Merge "Merge from upstream SVN r2699, git SHA 9b1cf54."
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..29ef23e
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,66 @@
+*.host.mk
+*.target.mk
+*.Makefile
+*.ncb
+*.ninja
+*.props
+*.pyc
+*.rules
+*.scons
+*.sdf
+*.sln
+*.suo
+*.targets
+*.user
+*.vcproj
+*.vcxproj
+*.vcxproj.filters
+*.vpj
+*.vpw
+*.vpwhistu
+*.vtg
+*.xcodeproj
+*~
+.*.sw?
+.DS_Store
+.cproject
+.gdb_history
+.gdbinit
+.metadata
+.project
+.pydevproject
+.settings
+/build
+/chromium_deps
+/gyp-mac-tool
+/Makefile
+/out
+/resources
+/src/supplement.gypi
+/testing
+/third_party/asan
+/third_party/cygwin
+/third_party/expat
+/third_party/gaeunit
+/third_party/gold
+/third_party/google-gflags/src
+/third_party/google-visualization-python
+/third_party/jsoncpp
+/third_party/libjingle
+/third_party/libjpeg
+/third_party/libjpeg_turbo
+/third_party/libsrtp
+/third_party/libvpx
+/third_party/libyuv
+/third_party/llvm-build
+/third_party/oauth2
+/third_party/protobuf
+/third_party/valgrind
+/third_party/yasm
+/tools/clang
+/tools/gyp
+/tools/python
+/tools/valgrind
+/tools/win
+/x86-generic_out/
+/xcodebuild
diff --git a/AUTHORS b/AUTHORS
new file mode 100644
index 0000000..30d7be8
--- /dev/null
+++ b/AUTHORS
@@ -0,0 +1,8 @@
+# Names should be added to this file like so:
+# Name or Organization <email address>
+
+Google Inc.
+Mozilla Foundation
+Ben Strong <bstrong@gmail.com>
+Petar Jovanovic <petarj@mips.com>
+Martin Storsjo <martin@martin.st>
diff --git a/Android.mk b/Android.mk
index 9ac8947..ee918eb 100644
--- a/Android.mk
+++ b/Android.mk
@@ -103,7 +103,7 @@
 include $(webrtc_path)/src/common_audio/resampler/Android.mk
 include $(webrtc_path)/src/common_audio/signal_processing/Android.mk
 include $(webrtc_path)/src/common_audio/vad/Android.mk
-include $(webrtc_path)/src/modules/audio_coding/codecs/isac/fix/Android.mk
+include $(webrtc_path)/src/modules/audio_coding/codecs/isac/fix/source/Android.mk
 include $(webrtc_path)/src/modules/audio_coding/codecs/isac/main/source/Android.mk
 include $(webrtc_path)/src/modules/audio_processing/aec/Android.mk
 include $(webrtc_path)/src/modules/audio_processing/aecm/Android.mk
@@ -111,12 +111,11 @@
 include $(webrtc_path)/src/modules/audio_processing/Android.mk
 include $(webrtc_path)/src/modules/audio_processing/ns/Android.mk
 include $(webrtc_path)/src/modules/audio_processing/utility/Android.mk
-#include $(webrtc_path)/src/modules/utility/source/Android.mk
 include $(webrtc_path)/src/system_wrappers/source/Android.mk
 
 # libwebrtc_audio_coding_gnustl_static dependencies
 WEBRTC_STL := gnustl_static
 include $(webrtc_path)/src/system_wrappers/source/Android.mk
+include $(webrtc_path)/src/modules/audio_coding/codecs/isac/fix/source/Android.mk
 include $(webrtc_path)/src/modules/audio_coding/codecs/isac/main/source/Android.mk
-include $(webrtc_path)/src/modules/audio_coding/codecs/isac/fix/Android.mk
 include $(webrtc_path)/src/common_audio/signal_processing/Android.mk
diff --git a/DEPS b/DEPS
new file mode 100644
index 0000000..fe2d635
--- /dev/null
+++ b/DEPS
@@ -0,0 +1,138 @@
+use_relative_paths = True
+
+vars = {
+  # Override root_dir in your .gclient's custom_vars to specify a custom root
+  # folder name.
+  "root_dir": "trunk",
+  "extra_gyp_flag": "-Dextra_gyp_flag=0",
+
+  # Use this googlecode_url variable only if there is an internal mirror for it.
+  # If you do not know, use the full path while defining your new deps entry.
+  "googlecode_url": "http://%s.googlecode.com/svn",
+  "chromium_trunk" : "http://src.chromium.org/svn/trunk",
+  "chromium_revision": "152335",
+
+  # External resources like video and audio files used for testing purposes.
+  # Downloaded on demand when needed.
+  "webrtc_resources_revision": "10",
+}
+
+# NOTE: Prefer revision numbers to tags for svn deps. Use http rather than
+# https; the latter can cause problems for users behind proxies.
+deps = {
+  "../chromium_deps":
+    File(Var("chromium_trunk") + "/src/DEPS@" + Var("chromium_revision")),
+
+  "build":
+    Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"),
+
+  "testing":
+    Var("chromium_trunk") + "/src/testing@" + Var("chromium_revision"),
+
+  "testing/gmock":
+    From("chromium_deps", "src/testing/gmock"),
+
+  "testing/gtest":
+    From("chromium_deps", "src/testing/gtest"),
+
+  "third_party/expat":
+    Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"),
+
+  "third_party/google-gflags/src":
+    (Var("googlecode_url") % "google-gflags") + "/trunk/src@45",
+
+  "third_party/libjpeg":
+    Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"),
+
+  "third_party/libjpeg_turbo":
+    From("chromium_deps", "src/third_party/libjpeg_turbo"),
+
+  "third_party/libvpx/source/libvpx":
+    "http://git.chromium.org/webm/libvpx.git@c522217d",
+
+  "third_party/libyuv":
+    (Var("googlecode_url") % "libyuv") + "/trunk@338",
+
+  "third_party/protobuf":
+    Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"),
+
+  "third_party/yasm":
+    Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"),
+
+  "third_party/yasm/source/patched-yasm":
+    From("chromium_deps", "src/third_party/yasm/source/patched-yasm"),
+
+  "tools/clang":
+    Var("chromium_trunk") + "/src/tools/clang@" + Var("chromium_revision"),
+
+  "tools/gyp":
+    From("chromium_deps", "src/tools/gyp"),
+
+  "tools/python":
+    Var("chromium_trunk") + "/src/tools/python@" + Var("chromium_revision"),
+
+  "tools/valgrind":
+    Var("chromium_trunk") + "/src/tools/valgrind@" + Var("chromium_revision"),
+
+  # Needed by build/common.gypi.
+  "tools/win/supalink":
+    Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"),
+}
+
+deps_os = {
+  "win": {
+    # Use our own, stripped down, version of Cygwin (required by GYP).
+    "third_party/cygwin":
+      (Var("googlecode_url") % "webrtc") + "/deps/third_party/cygwin@2672",
+
+    # Used by libjpeg-turbo.
+    "third_party/yasm/binaries":
+      From("chromium_deps", "src/third_party/yasm/binaries"),
+  },
+  "unix": {
+    "third_party/gold":
+      From("chromium_deps", "src/third_party/gold"),
+  },
+}
+
+hooks = [
+  {
+    # Create a supplement.gypi file under trunk/src.  This file will be picked
+    # up by gyp and used to enable the standalone build.
+    "pattern": ".",
+    "action": ["python", Var("root_dir") + "/tools/create_supplement_gypi.py",
+               Var("root_dir") + "/src/supplement.gypi"],
+  },
+  {
+    # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes
+    # zero seconds to run. If something changed, it downloads a prebuilt clang.
+    "pattern": ".",
+    "action": ["python", Var("root_dir") + "/tools/clang/scripts/update.py",
+               "--mac-only"],
+  },
+  {
+    # Update the cygwin mount on Windows.
+    # This is necessary to get the correct mapping between e.g. /bin and the
+    # cygwin path on Windows. Without it we can't run bash scripts in actions.
+    # Ideally this should be solved in "pylib/gyp/msvs_emulation.py".
+    "pattern": ".",
+    "action": ["python", Var("root_dir") + "/build/win/setup_cygwin_mount.py",
+               "--win-only"],
+  },
+  {
+    # Download test resources, i.e. video and audio files. If the latest
+    # version is already downloaded, this takes zero seconds to run.
+    # If a newer version or no current download exists, it will download
+    # the resources and extract them.
+    "pattern": ".",
+    "action": ["python", Var("root_dir") + "/tools/resources/update.py"],
+  },
+  {
+    # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+    "pattern": ".",
+    "action": ["python", Var("root_dir") + "/build/gyp_chromium",
+               "--depth=" + Var("root_dir"), Var("root_dir") + "/webrtc.gyp",
+               Var("extra_gyp_flag")],
+  },
+]
+
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..dd4a345
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1 @@
+Refer to src/LICENSE.
diff --git a/LICENSE_THIRD_PARTY b/LICENSE_THIRD_PARTY
new file mode 100644
index 0000000..d47c055
--- /dev/null
+++ b/LICENSE_THIRD_PARTY
@@ -0,0 +1 @@
+Refer to src/LICENSE_THIRD_PARTY.
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..b110a52
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,5 @@
+henrika@webrtc.org

+niklas.enbom@webrtc.org

+andrew@webrtc.org

+tina.legrand@webrtc.org

+tommi@webrtc.org
\ No newline at end of file
diff --git a/PATENTS b/PATENTS
new file mode 100644
index 0000000..5cb83ec
--- /dev/null
+++ b/PATENTS
@@ -0,0 +1 @@
+Refer to src/PATENTS.
diff --git a/PRESUBMIT.py b/PRESUBMIT.py
new file mode 100644
index 0000000..6eaa408
--- /dev/null
+++ b/PRESUBMIT.py
@@ -0,0 +1,161 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import os.path
+
+# All folders in LINT_FOLDERS will be scanned by cpplint by the presubmit
+# script. Note that subfolders are not included.
+LINT_FOLDERS = ['src/video_engine']
+
+def _LicenseHeader(input_api):
+  """Returns the license header regexp."""
+  # Accept any year number from 2011 to the current year
+  current_year = int(input_api.time.strftime('%Y'))
+  allowed_years = (str(s) for s in reversed(xrange(2011, current_year + 1)))
+  years_re = '(' + '|'.join(allowed_years) + ')'
+  license_header = (
+      r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
+        r'All Rights Reserved\.\n'
+      r'.*?\n'
+      r'.*? Use of this source code is governed by a BSD-style license\n'
+      r'.*? that can be found in the LICENSE file in the root of the source\n'
+      r'.*? tree\. An additional intellectual property rights grant can be '
+        r'found\n'
+      r'.*? in the file PATENTS\.  All contributing project authors may\n'
+      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
+  ) % {
+      'year': years_re,
+  }
+  return license_header
+
+def _CheckNoIOStreamInHeaders(input_api, output_api):
+  """Checks to make sure no .h files include <iostream>."""
+  files = []
+  pattern = input_api.re.compile(r'^#include\s*<iostream>',
+                                 input_api.re.MULTILINE)
+  for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
+    if not f.LocalPath().endswith('.h'):
+      continue
+    contents = input_api.ReadFile(f)
+    if pattern.search(contents):
+      files.append(f)
+
+  if len(files):
+    return [ output_api.PresubmitError(
+        'Do not #include <iostream> in header files, since it inserts static ' +
+        'initialization into every file including the header. Instead, ' +
+        '#include <ostream>. See http://crbug.com/94794',
+        files) ]
+  return []
+
+def _CheckNoFRIEND_TEST(input_api, output_api):
+  """Make sure that gtest's FRIEND_TEST() macro is not used, the
+  FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be
+  used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes."""
+  problems = []
+
+  file_filter = lambda f: f.LocalPath().endswith(('.cc', '.h'))
+  for f in input_api.AffectedFiles(file_filter=file_filter):
+    for line_num, line in f.ChangedContents():
+      if 'FRIEND_TEST(' in line:
+        problems.append('    %s:%d' % (f.LocalPath(), line_num))
+
+  if not problems:
+    return []
+  return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use '
+      'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and '
+      'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
+
+def _IsLintWhitelisted(file_name):
+  """ Checks if a file is whitelisted for lint check."""
+  # TODO(mflodman) Include subfolders in the check.
+  return (os.path.dirname(file_name) in LINT_FOLDERS)
+
+def _CheckApprovedFilesLintClean(input_api, output_api,
+                                 source_file_filter=None):
+  """Checks that all new or whitelisted .cc and .h files pass cpplint.py.
+  This check is based on _CheckChangeLintsClean in
+  depot_tools/presubmit_canned_checks.py but has less filters and only checks
+  added files."""
+  result = []
+
+  # Initialize cpplint.
+  import cpplint
+  # Access to a protected member _XX of a client class
+  # pylint: disable=W0212
+  cpplint._cpplint_state.ResetErrorCounts()
+
+  # Justifications for each filter:
+  #
+  # - build/header_guard  : WebRTC coding style says they should be prefixed
+  #                         with WEBRTC_, which is not possible to configure in
+  #                         cpplint.py.
+  cpplint._SetFilters('-build/header_guard')
+
+  # Use the strictest verbosity level for cpplint.py (level 1) which is the
+  # default when running cpplint.py from command line.
+  # To make it possible to work with not-yet-converted code, we're only applying
+  # it to new (or moved/renamed) files and files listed in LINT_FOLDERS.
+  verbosity_level = 1
+  files = []
+  for f in input_api.AffectedSourceFiles(source_file_filter):
+    # Note that moved/renamed files also count as added for svn.
+    if (f.Action() == 'A' or _IsLintWhitelisted(f.LocalPath())):
+      files.append(f.AbsoluteLocalPath())
+
+  for file_name in files:
+    cpplint.ProcessFile(file_name, verbosity_level)
+
+  if cpplint._cpplint_state.error_count > 0:
+    if input_api.is_committing:
+      # TODO(kjellander): Change back to PresubmitError below when we're
+      # confident with the lint settings.
+      res_type = output_api.PresubmitPromptWarning
+    else:
+      res_type = output_api.PresubmitPromptWarning
+    result = [res_type('Changelist failed cpplint.py check.')]
+
+  return result
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  # TODO(kjellander): Use presubmit_canned_checks.PanProjectChecks too.
+  results = []
+  results.extend(input_api.canned_checks.CheckLongLines(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
+      input_api, output_api))
+  results.extend(_CheckApprovedFilesLintClean(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckLicense(
+      input_api, output_api, _LicenseHeader(input_api)))
+  results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
+  results.extend(_CheckNoFRIEND_TEST(input_api, output_api))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeWasUploaded(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasDescription(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasBugField(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasTestField(
+      input_api, output_api))
+  return results
diff --git a/WATCHLISTS b/WATCHLISTS
new file mode 100644
index 0000000..13551a8
--- /dev/null
+++ b/WATCHLISTS
@@ -0,0 +1,114 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Inspired by chromium.org:
+# http://dev.chromium.org/developers/contributing-code/watchlists
+
+{
+  'WATCHLIST_DEFINITIONS': {
+    'this_file': {
+      'filepath': '^WATCHLISTS$',
+    },
+    'all_src': {
+      'filepath': 'src/.*',
+    },
+    'root_files': {
+      # src/build/ and non-recursive contents of ./ and src/
+      'filepath': '^[^/]*$|src/[^/]*$|src/build/.*',
+    },
+    'documented_interfaces': {
+      'filepath': 'src/[^/]*\.h$|'\
+                  'src/video_engine/main/interface/.*|'\
+                  'src/voice_engine/main/interface/.*',
+    },
+    'build_files': {
+      'filepath': '\.gyp$|\.gypi$|Android\.mk$',
+    },
+    'java_files': {
+      'filepath': '\.java$|\.xml$',
+    },
+    'video_engine': {
+      'filepath': 'src/video_engine/.*',
+    },
+    'voice_engine': {
+      'filepath': 'src/voice_engine/.*',
+    },
+    'common_audio': {
+      'filepath': 'src/common_audio/.*',
+    },
+    'video_capture': {
+      'filepath': 'src/modules/video_capture/.*',
+    },
+    'video_render': {
+      'filepath': 'src/modules/video_render/.*',
+    },
+    'audio_device': {
+      'filepath': 'src/modules/audio_device/.*',
+    },
+    'audio_coding': {
+      'filepath': 'src/modules/audio_coding/.*',
+    },
+    'neteq': {
+      'filepath': 'src/modules/audio_coding/neteq/.*',
+    },
+    'audio_processing': {
+      'filepath': 'src/modules/audio_processing/.*',
+    },
+    'video_codecs': {
+      'filepath': 'src/modules/video_coding/codecs/.*',
+    },
+    'video_coding': {
+      'filepath': 'src/modules/video_coding/.*',
+    },
+    'rtp_rtcp': {
+      'filepath': 'src/modules/rtp_rtcp/.*'
+    },
+    'system_wrappers': {
+      'filepath': 'src/system_wrappers/.*',
+    },
+  },
+
+  'WATCHLISTS': {
+    'this_file': [''],
+    'all_src': ['tterriberry@mozilla.com',
+                'giles@mozilla.com'],
+    'root_files': ['andrew@webrtc.org',
+                   'niklas.enbom@webrtc.org'],
+    'documented_interfaces': ['interface-changes@webrtc.org',
+                              'rwolff@gocast.it'],
+    'build_files': ['leozwang@webrtc.org'],
+    'java_files': ['leozwang@webrtc.org'],
+    'common_audio': ['bjornv@webrtc.org',
+                     'andrew@webrtc.org'],
+    'video_engine': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org'],
+    'voice_engine': ['henrika@webrtc.org'],
+    'video_capture': ['mflodman@webrtc.org',
+                      'perkj@webrtc.org',
+                      'leozwang@webrtc.org'],
+    'video_render': ['mflodman@webrtc.org',
+                     'perkj@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_device': ['henrika@webrtc.org',
+                     'leozwang@webrtc.org'],
+    'audio_coding': ['tina.legrand@webrtc.org'],
+    'neteq': ['henrik.lundin@webrtc.org'],
+    'audio_processing': ['andrew@webrtc.org',
+                         'bjornv@webrtc.org',
+                         'leozwang@webrtc.org'],
+    'video_codecs': ['henrik.lundin@webrtc.org',
+                     'pwestin@webrtc.org'],
+    'video_coding': ['stefan@webrtc.org'],
+    'rtp_rtcp': ['mflodman@webrtc.org',
+                 'pwestin@webrtc.org'],
+    'system_wrappers': ['mflodman@webrtc.org',
+                        'henrika@webrtc.org',
+                        'andrew@webrtc.org',
+                        'leozwang@webrtc.org'],
+  },
+}
diff --git a/android-webrtc.mk b/android-webrtc.mk
index f131004..2b8b5b9 100644
--- a/android-webrtc.mk
+++ b/android-webrtc.mk
@@ -9,7 +9,6 @@
 # These defines will apply to all source files
 # Think again before changing it
 MY_WEBRTC_COMMON_DEFS := \
-    '-DWEBRTC_TARGET_PC' \
     '-DWEBRTC_LINUX' \
     '-DWEBRTC_THREAD_RR' \
     '-DWEBRTC_CLOCK_TYPE_REALTIME' \
@@ -24,23 +23,23 @@
 #    '-DWEBRTC_DETECT_ARM_NEON' # only used in a build configuration without Neon
 # TODO(kma): figure out if the above define could be moved to NDK build only.
 
-# TODO(kma): test if the code under next two macros works with generic GCC compilers
-ifeq ($(ARCH_ARM_HAVE_NEON),true)
-MY_WEBRTC_COMMON_DEFS_arm += \
-    '-DWEBRTC_ARCH_ARM_NEON'
-MY_ARM_CFLAGS_NEON := \
-    -flax-vector-conversions
-endif
 
-ifneq (,$(filter '-DWEBRTC_DETECT_ARM_NEON' '-DWEBRTC_ARCH_ARM_NEON', \
-    $(MY_WEBRTC_COMMON_DEFS_arm)))
-WEBRTC_BUILD_NEON_LIBS := true
-endif
-
-ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
-MY_WEBRTC_COMMON_DEFS_arm += \
-    '-DWEBRTC_ARCH_ARM_V7A'
-endif
-
-MY_WEBRTC_COMMON_DEFS_x86 := \
-    '-DWEBRTC_USE_SSE2'
+# The following upstram ARM_NEON and ARMV7A defintions do not work on AOSP yet.
+# They should be fixed in the next merge from upstream.
+## TODO(kma): test if the code under next two macros works with generic GCC compilers
+#ifeq ($(ARCH_ARM_HAVE_NEON),true)
+#MY_WEBRTC_COMMON_DEFS_arm += \
+#    '-DWEBRTC_ARCH_ARM_NEON'
+#MY_ARM_CFLAGS_NEON := \
+#    -flax-vector-conversions
+#endif
+#
+#ifneq (,$(filter '-DWEBRTC_DETECT_ARM_NEON' '-DWEBRTC_ARCH_ARM_NEON', \
+#    $(MY_WEBRTC_COMMON_DEFS_arm)))
+#WEBRTC_BUILD_NEON_LIBS := true
+#endif
+#
+#ifeq ($(ARCH_ARM_HAVE_ARMV7A),true)
+#MY_WEBRTC_COMMON_DEFS_arm += \
+#    '-DWEBRTC_ARCH_ARM_V7A'
+#endif
diff --git a/codereview.settings b/codereview.settings
new file mode 100644
index 0000000..1c0ceaa
--- /dev/null
+++ b/codereview.settings
@@ -0,0 +1,11 @@
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: webrtc-codereview.appspot.com
+#CC_LIST:
+#VIEW_VC:
+#STATUS:
+TRY_ON_UPLOAD: False
+TRYSERVER_HTTP_HOST: webrtc-cb-linux-master.cbf.corp.google.com
+TRYSERVER_HTTP_PORT: 9018
+#TRYSERVER_SVN_URL:
+#GITCL_PREUPLOAD:
+#GITCL_PREDCOMMIT:
diff --git a/test/data/audio_processing/android/output_data_fixed.pb b/data/audio_processing/android/output_data_fixed.pb
similarity index 100%
rename from test/data/audio_processing/android/output_data_fixed.pb
rename to data/audio_processing/android/output_data_fixed.pb
Binary files differ
diff --git a/test/data/audio_processing/android/output_data_float.pb b/data/audio_processing/android/output_data_float.pb
similarity index 100%
rename from test/data/audio_processing/android/output_data_float.pb
rename to data/audio_processing/android/output_data_float.pb
Binary files differ
diff --git a/data/audio_processing/output_data_fixed.pb b/data/audio_processing/output_data_fixed.pb
new file mode 100644
index 0000000..2ca82e3
--- /dev/null
+++ b/data/audio_processing/output_data_fixed.pb
Binary files differ
diff --git a/data/audio_processing/output_data_float.pb b/data/audio_processing/output_data_float.pb
new file mode 100644
index 0000000..cadc1d3
--- /dev/null
+++ b/data/audio_processing/output_data_float.pb
Binary files differ
diff --git a/data/common_video/jpeg/webrtc_logo.jpg b/data/common_video/jpeg/webrtc_logo.jpg
new file mode 100644
index 0000000..ddb6192
--- /dev/null
+++ b/data/common_video/jpeg/webrtc_logo.jpg
Binary files differ
diff --git a/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin b/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin
new file mode 100644
index 0000000..00e3f80
--- /dev/null
+++ b/data/rtp_rtcp/H263Foreman_CIF_Iframe.bin
Binary files differ
diff --git a/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin b/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin
new file mode 100644
index 0000000..57f94c3
--- /dev/null
+++ b/data/rtp_rtcp/H263Foreman_CIF_Pframe.bin
Binary files differ
diff --git a/data/rtp_rtcp/H263_CIF_IFRAME.bin b/data/rtp_rtcp/H263_CIF_IFRAME.bin
new file mode 100644
index 0000000..00e3f80
--- /dev/null
+++ b/data/rtp_rtcp/H263_CIF_IFRAME.bin
Binary files differ
diff --git a/data/rtp_rtcp/H263_CIF_PFRAME.bin b/data/rtp_rtcp/H263_CIF_PFRAME.bin
new file mode 100644
index 0000000..248f3a1
--- /dev/null
+++ b/data/rtp_rtcp/H263_CIF_PFRAME.bin
Binary files differ
diff --git a/data/rtp_rtcp/H263_QCIF_IFRAME.bin b/data/rtp_rtcp/H263_QCIF_IFRAME.bin
new file mode 100644
index 0000000..0fa144c
--- /dev/null
+++ b/data/rtp_rtcp/H263_QCIF_IFRAME.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR0.bin b/data/rtp_rtcp/RTCPPacketTMMBR0.bin
new file mode 100644
index 0000000..19df13c
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR0.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR1.bin b/data/rtp_rtcp/RTCPPacketTMMBR1.bin
new file mode 100644
index 0000000..b7b7c941
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR1.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR2.bin b/data/rtp_rtcp/RTCPPacketTMMBR2.bin
new file mode 100644
index 0000000..257835c
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR2.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR3.bin b/data/rtp_rtcp/RTCPPacketTMMBR3.bin
new file mode 100644
index 0000000..4a8e375
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR3.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR4.bin b/data/rtp_rtcp/RTCPPacketTMMBR4.bin
new file mode 100644
index 0000000..28cd99c
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR4.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin b/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin
new file mode 100644
index 0000000..5080b88
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR4_1.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin b/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin
new file mode 100644
index 0000000..2c2f288
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR4_2.bin
Binary files differ
diff --git a/data/rtp_rtcp/RTCPPacketTMMBR5.bin b/data/rtp_rtcp/RTCPPacketTMMBR5.bin
new file mode 100644
index 0000000..da7235a
--- /dev/null
+++ b/data/rtp_rtcp/RTCPPacketTMMBR5.bin
Binary files differ
diff --git a/data/voice_engine/audio_long16.pcm b/data/voice_engine/audio_long16.pcm
new file mode 100644
index 0000000..853e0df
--- /dev/null
+++ b/data/voice_engine/audio_long16.pcm
Binary files differ
diff --git a/data/voice_engine/audio_long16.wav b/data/voice_engine/audio_long16.wav
new file mode 100644
index 0000000..ebe91c4
--- /dev/null
+++ b/data/voice_engine/audio_long16.wav
Binary files differ
diff --git a/data/voice_engine/audio_long16big_endian.pcm b/data/voice_engine/audio_long16big_endian.pcm
new file mode 100644
index 0000000..563e4e9
--- /dev/null
+++ b/data/voice_engine/audio_long16big_endian.pcm
Binary files differ
diff --git a/data/voice_engine/audio_long16noise.pcm b/data/voice_engine/audio_long16noise.pcm
new file mode 100644
index 0000000..a7be537
--- /dev/null
+++ b/data/voice_engine/audio_long16noise.pcm
Binary files differ
diff --git a/data/voice_engine/audio_long8.pcm b/data/voice_engine/audio_long8.pcm
new file mode 100644
index 0000000..85d17e5
--- /dev/null
+++ b/data/voice_engine/audio_long8.pcm
Binary files differ
diff --git a/data/voice_engine/audio_long8mulaw.wav b/data/voice_engine/audio_long8mulaw.wav
new file mode 100644
index 0000000..2d3d8b3
--- /dev/null
+++ b/data/voice_engine/audio_long8mulaw.wav
Binary files differ
diff --git a/data/voice_engine/audio_short16.pcm b/data/voice_engine/audio_short16.pcm
new file mode 100644
index 0000000..15a0f18
--- /dev/null
+++ b/data/voice_engine/audio_short16.pcm
Binary files differ
diff --git a/data/voice_engine/audio_tiny11.wav b/data/voice_engine/audio_tiny11.wav
new file mode 100644
index 0000000..6db80d5
--- /dev/null
+++ b/data/voice_engine/audio_tiny11.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny16.wav b/data/voice_engine/audio_tiny16.wav
new file mode 100644
index 0000000..baab0ac
--- /dev/null
+++ b/data/voice_engine/audio_tiny16.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny22.wav b/data/voice_engine/audio_tiny22.wav
new file mode 100644
index 0000000..b421867
--- /dev/null
+++ b/data/voice_engine/audio_tiny22.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny32.wav b/data/voice_engine/audio_tiny32.wav
new file mode 100644
index 0000000..773ac23
--- /dev/null
+++ b/data/voice_engine/audio_tiny32.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny44.wav b/data/voice_engine/audio_tiny44.wav
new file mode 100644
index 0000000..c9faa45
--- /dev/null
+++ b/data/voice_engine/audio_tiny44.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny48.wav b/data/voice_engine/audio_tiny48.wav
new file mode 100644
index 0000000..8ebf11a
--- /dev/null
+++ b/data/voice_engine/audio_tiny48.wav
Binary files differ
diff --git a/data/voice_engine/audio_tiny8.wav b/data/voice_engine/audio_tiny8.wav
new file mode 100644
index 0000000..d71c65e
--- /dev/null
+++ b/data/voice_engine/audio_tiny8.wav
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp b/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp
new file mode 100644
index 0000000..02abbc2
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp b/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp
new file mode 100644
index 0000000..4ed110b
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/HRTF_pcm16wb_jitter.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/README.txt b/data/voice_engine/stereo_rtp_files/README.txt
new file mode 100644
index 0000000..976ac56
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/README.txt
@@ -0,0 +1,4 @@
+Use RTP Play tool with command 'rtpplay.exe -v -T -f <path>\<file.rtp> 127.0.0.1/1236' 
+Example: rtpplay.exe -v -T -f hrtf_g722_1C_48.rtp 127.0.0.1/1236.  
+This sends the stereo rtp file to port 1236.  
+You can hear the voice getting panned from left, right and center.   
diff --git a/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp b/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp
new file mode 100644
index 0000000..b96d59b
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp b/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp
new file mode 100644
index 0000000..527a50a
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/hrtf_g722_1C_48_jitterT2.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/rtpplay.exe b/data/voice_engine/stereo_rtp_files/rtpplay.exe
new file mode 100755
index 0000000..6f938c8
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/rtpplay.exe
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_g729.rtp b/data/voice_engine/stereo_rtp_files/stereo_g729.rtp
new file mode 100644
index 0000000..3c36e30
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_g729.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp b/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp
new file mode 100644
index 0000000..913226c
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_g729_jitter.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp
new file mode 100644
index 0000000..729b565
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcm16wb.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp
new file mode 100644
index 0000000..efa2800
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcm16wb_jitter.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp
new file mode 100644
index 0000000..bb2d93c
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcmu.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp
new file mode 100644
index 0000000..fb79378
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcmu_jitter.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp
new file mode 100644
index 0000000..eebcf34
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp b/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp
new file mode 100644
index 0000000..5c368b4
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/stereo_pcmu_vad_jitter.rtp
Binary files differ
diff --git a/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp b/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp
new file mode 100644
index 0000000..1f713f6
--- /dev/null
+++ b/data/voice_engine/stereo_rtp_files/toggling_stereo_g729_pt18_pt125.rtp
Binary files differ
diff --git a/libvpx.mk b/libvpx.mk
new file mode 100644
index 0000000..07c04dc
--- /dev/null
+++ b/libvpx.mk
@@ -0,0 +1,107 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+MY_LIBVPX_PATH = ../libvpx
+
+LOCAL_SRC_FILES = \
+     $(MY_LIBVPX_PATH)/vp8/common/alloccommon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/blockd.c \
+     $(MY_LIBVPX_PATH)/vp8/common/debugmodes.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropy.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymode.c \
+     $(MY_LIBVPX_PATH)/vp8/common/entropymv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/extend.c \
+     $(MY_LIBVPX_PATH)/vp8/common/filter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/findnearmv.c \
+     $(MY_LIBVPX_PATH)/vp8/common/generic/systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/common/idctllm.c \
+     $(MY_LIBVPX_PATH)/vp8/common/invtrans.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/loopfilter_filters.c \
+     $(MY_LIBVPX_PATH)/vp8/common/mbpitch.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecont.c \
+     $(MY_LIBVPX_PATH)/vp8/common/modecontext.c \
+     $(MY_LIBVPX_PATH)/vp8/common/quant_common.c \
+     $(MY_LIBVPX_PATH)/vp8/common/recon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconinter.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra.c \
+     $(MY_LIBVPX_PATH)/vp8/common/reconintra4x4.c \
+     $(MY_LIBVPX_PATH)/vp8/common/setupintrarecon.c \
+     $(MY_LIBVPX_PATH)/vp8/common/swapyv12buffer.c \
+     $(MY_LIBVPX_PATH)/vp8/common/textblit.c \
+     $(MY_LIBVPX_PATH)/vp8/common/treecoder.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_cx_iface.c \
+     $(MY_LIBVPX_PATH)/vp8/vp8_dx_iface.c \
+     $(MY_LIBVPX_PATH)/vpx_config.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_codec.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_decoder.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_image.c \
+     $(MY_LIBVPX_PATH)/vpx_mem/vpx_mem.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/vpxscale.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12config.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/yv12extend.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/gen_scalers.c \
+     $(MY_LIBVPX_PATH)/vpx_scale/generic/scalesystemdependent.c \
+     $(MY_LIBVPX_PATH)/vpx/src/vpx_encoder.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/bitstream.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/boolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/dct.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeframe.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodeintra.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemb.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/encodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ethreading.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/firstpass.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/generic/csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/mcomp.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/modecosts.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/pickinter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/picklpf.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/psnr.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/quantize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/ratectrl.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/rdopt.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/sad_c.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/segmentation.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/tokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/treewriter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/onyx_if.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/temporal_filter.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/variance_c.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dboolhuff.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodemv.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/decodframe.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/dequantize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/detokenize.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/generic/dsystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/onyxd_if.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/reconintra_mt.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/threading.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/idct_blk.c \
+     $(MY_LIBVPX_PATH)/vp8/common/arm/arm_systemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/encoder/arm/arm_csystemdependent.c \
+     $(MY_LIBVPX_PATH)/vp8/decoder/arm/arm_dsystemdependent.c \
+
+LOCAL_CFLAGS := \
+    -DHAVE_CONFIG_H=vpx_config.h \
+    -include $(LOCAL_PATH)/third_party/libvpx/source/config/android/vpx_config.h
+
+LOCAL_MODULE := libwebrtc_vpx
+
+LOCAL_C_INCLUDES := \
+    external/libvpx \
+    external/libvpx/vpx_ports \
+    external/libvpx/vp8/common \
+    external/libvpx/vp8/encoder \
+    external/libvpx/vp8 \
+    external/libvpx/vpx_codec 
+
+include $(BUILD_STATIC_LIBRARY)
diff --git a/license_template.txt b/license_template.txt
new file mode 100644
index 0000000..5a3e653
--- /dev/null
+++ b/license_template.txt
@@ -0,0 +1,10 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
diff --git a/peerconnection/DEPS b/peerconnection/DEPS
new file mode 100644
index 0000000..efce4f0
--- /dev/null
+++ b/peerconnection/DEPS
@@ -0,0 +1,110 @@
+vars = {
+  # Use this googlecode_url variable only if there is an internal mirror for it.
+  # If you do not know, use the full path while defining your new deps entry.
+  "googlecode_url": "http://%s.googlecode.com/svn",
+  "chromium_trunk" : "http://src.chromium.org/svn/trunk",
+  "chromium_revision": "153489",
+  # Still needs the libjingle_revision here because some of
+  # the deps have to be pulled from libjingle repository.
+  "libjingle_revision": "175",
+}
+
+# NOTE: Prefer revision numbers to tags for svn deps. Use http rather than
+# https; the latter can cause problems for users behind proxies.
+deps = {
+  "trunk/chromium_deps":
+    File(Var("chromium_trunk") + "/src/DEPS@" + Var("chromium_revision")),
+
+  "trunk/third_party/webrtc":
+    From("trunk/chromium_deps", "src/third_party/webrtc"),
+
+  # WebRTC deps.
+  "trunk/third_party/libvpx":
+    From("trunk/chromium_deps", "src/third_party/libvpx"),
+
+  "trunk/build":
+    Var("chromium_trunk") + "/src/build@" + Var("chromium_revision"),
+
+  "trunk/testing/gtest":
+    From("trunk/chromium_deps", "src/testing/gtest"),
+
+  "trunk/tools/gyp":
+    From("trunk/chromium_deps", "src/tools/gyp"),
+
+  "trunk/tools/clang":
+    Var("chromium_trunk") + "/src/tools/clang@" + Var("chromium_revision"),
+
+  # Needed by build/common.gypi.
+  "trunk/tools/win/supalink":
+    Var("chromium_trunk") + "/src/tools/win/supalink@" + Var("chromium_revision"),
+
+  "trunk/third_party/protobuf":
+    Var("chromium_trunk") + "/src/third_party/protobuf@" + Var("chromium_revision"),
+
+  "trunk/third_party/libjpeg_turbo/":
+    From("trunk/chromium_deps", "src/third_party/libjpeg_turbo"),
+
+  "trunk/third_party/libjpeg":
+    Var("chromium_trunk") + "/src/third_party/libjpeg@" + Var("chromium_revision"),
+
+  "trunk/third_party/yasm":
+    Var("chromium_trunk") + "/src/third_party/yasm@" + Var("chromium_revision"),
+
+  "trunk/third_party/expat":
+    Var("chromium_trunk") + "/src/third_party/expat@" + Var("chromium_revision"),
+
+  "trunk/third_party/yasm/source/patched-yasm":
+    From("trunk/chromium_deps", "src/third_party/yasm/source/patched-yasm"),
+
+  "trunk/third_party/libyuv":
+    From("trunk/chromium_deps", "src/third_party/libyuv"),
+
+  # libjingle deps.
+  "trunk/third_party/libjingle/":
+    File(Var("chromium_trunk") + "/src/third_party/libjingle/libjingle.gyp@" + Var("chromium_revision")),
+
+  "trunk/third_party/libjingle/source":
+    From("trunk/chromium_deps", "src/third_party/libjingle/source"),
+
+  "trunk/third_party/libjingle/overrides/talk/base":
+    (Var("googlecode_url") % "libjingle") + "/trunk/talk/base@" + Var("libjingle_revision"),
+
+  "trunk/third_party/libsrtp/":
+    From("trunk/chromium_deps", "src/third_party/libsrtp"),
+
+  "trunk/third_party/jsoncpp/":
+    Var("chromium_trunk") + "/src/third_party/jsoncpp@" + Var("chromium_revision"),
+
+  "trunk/third_party/jsoncpp/source":
+    "http://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/trunk/jsoncpp@248",
+}
+
+deps_os = {
+  "win": {
+    "trunk/third_party/cygwin/":
+      Var("chromium_trunk") + "/deps/third_party/cygwin@66844",
+
+    # Used by libjpeg-turbo
+    "trunk/third_party/yasm/binaries":
+      From("trunk/chromium_deps", "src/third_party/yasm/binaries"),
+  },
+  "unix": {
+    "trunk/third_party/gold":
+      From("trunk/chromium_deps", "src/third_party/gold"),
+  },
+}
+
+hooks = [
+  {
+    # Pull clang on mac. If nothing changed, or on non-mac platforms, this takes
+    # zero seconds to run. If something changed, it downloads a prebuilt clang.
+    "pattern": ".",
+    "action": ["python", "trunk/tools/clang/scripts/update.py", "--mac-only"],
+  },
+  {
+    # A change to a .gyp, .gypi, or to GYP itself should run the generator.
+    "pattern": ".",
+    "action": ["python", "trunk/build/gyp_chromium", "--depth=trunk", "trunk/peerconnection.gyp"],
+  },
+]
+
diff --git a/peerconnection/OWNERS b/peerconnection/OWNERS
new file mode 100644
index 0000000..1527445
--- /dev/null
+++ b/peerconnection/OWNERS
@@ -0,0 +1,5 @@
+henrike@webrtc.org
+mallinath@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+tommi@webrtc.org
diff --git a/peerconnection/README b/peerconnection/README
new file mode 100644
index 0000000..b7bafb2
--- /dev/null
+++ b/peerconnection/README
@@ -0,0 +1,12 @@
+This folder can be used to pull together the chromium version of webrtc
+and libjingle, and build the peerconnection sample client and server. This will
+check out a new repository in which you can build peerconnection_server.
+
+Steps:
+1) Create a new directory for the new repository (outside the webrtc repo):
+   mkdir peerconnection
+   cd peerconnection
+2) gclient config --name trunk http://webrtc.googlecode.com/svn/trunk/peerconnection
+3) gclient sync
+4) cd trunk
+5) make peerconnection_server peerconnection_client
diff --git a/peerconnection/base/base.gyp b/peerconnection/base/base.gyp
new file mode 100644
index 0000000..b077b34
--- /dev/null
+++ b/peerconnection/base/base.gyp
@@ -0,0 +1,25 @@
+# Below are normally provided by Chromium's base.gyp and required for
+# libjingle.gyp.
+{
+  'targets': [
+    {
+      'target_name': 'base',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '..',
+        ],
+      },
+      'conditions': [
+        ['OS == "linux"', {
+          'link_settings': {
+            'libraries': [
+              # We need rt for clock_gettime() used in libjingle.
+              '-lrt',
+            ],
+          },
+        }],
+      ],
+    },
+  ],
+}
diff --git a/peerconnection/net/net.gyp b/peerconnection/net/net.gyp
new file mode 100644
index 0000000..d653158
--- /dev/null
+++ b/peerconnection/net/net.gyp
@@ -0,0 +1,9 @@
+# This is a dummy gyp file to satisfy libjingle.gyp.
+{
+  'targets': [
+    {
+      'target_name': 'net',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/peerconnection/peerconnection.gyp b/peerconnection/peerconnection.gyp
new file mode 100644
index 0000000..e9c1df9
--- /dev/null
+++ b/peerconnection/peerconnection.gyp
@@ -0,0 +1,92 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'third_party/webrtc/build/common.gypi', ],
+  'variables': {
+    'peerconnection_sample': 'third_party/libjingle/source/talk/examples/peerconnection',
+  },  
+  'conditions': [
+    ['OS=="win"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            '<(peerconnection_sample)/client/conductor.cc',
+            '<(peerconnection_sample)/client/conductor.h',
+            '<(peerconnection_sample)/client/defaults.cc',
+            '<(peerconnection_sample)/client/defaults.h',
+            '<(peerconnection_sample)/client/main.cc',
+            '<(peerconnection_sample)/client/main_wnd.cc',
+            '<(peerconnection_sample)/client/main_wnd.h',
+            '<(peerconnection_sample)/client/peer_connection_client.cc',
+            '<(peerconnection_sample)/client/peer_connection_client.h',
+            'third_party/libjingle/source/talk/base/win32socketinit.cc',
+            'third_party/libjingle/source/talk/base/win32socketserver.cc',
+          ],
+          'msvs_settings': {
+            'VCLinkerTool': {
+             'SubSystem': '2',  # Windows
+            },
+          },
+          'dependencies': [
+            'third_party/libjingle/libjingle.gyp:libjingle_peerconnection',
+          ],
+          'include_dirs': [
+            'src',
+            'src/modules/interface',
+            'third_party/libjingle/source',
+          ],
+        },
+      ],  # targets
+    }, ],  # OS="win"
+    ['OS=="linux"', {
+      'targets': [
+        {
+          'target_name': 'peerconnection_client',
+          'type': 'executable',
+          'sources': [
+            '<(peerconnection_sample)/client/conductor.cc',
+            '<(peerconnection_sample)/client/conductor.h',
+            '<(peerconnection_sample)/client/defaults.cc',
+            '<(peerconnection_sample)/client/defaults.h',
+            '<(peerconnection_sample)/client/linux/main.cc',
+            '<(peerconnection_sample)/client/linux/main_wnd.cc',
+            '<(peerconnection_sample)/client/linux/main_wnd.h',
+            '<(peerconnection_sample)/client/peer_connection_client.cc',
+            '<(peerconnection_sample)/client/peer_connection_client.h',
+          ],
+          'dependencies': [
+            'third_party/jsoncpp/jsoncpp.gyp:jsoncpp',
+            'third_party/libjingle/libjingle.gyp:libjingle_peerconnection',
+            # TODO(tommi): Switch to this and remove specific gtk dependency
+            # sections below for cflags and link_settings.
+            # '<(DEPTH)/build/linux/system.gyp:gtk',
+          ],
+          'include_dirs': [
+            'third_party/libjingle/source',
+          ],
+          'cflags': [
+            '<!@(pkg-config --cflags gtk+-2.0)',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
+              '-lX11',
+              '-lXext',
+            ],
+          },
+        },
+      ],  # targets
+    }, ],  # OS="linux"
+  ],
+}
diff --git a/peerconnection/supplement/supplement.gypi b/peerconnection/supplement/supplement.gypi
new file mode 100644
index 0000000..1fd012a
--- /dev/null
+++ b/peerconnection/supplement/supplement.gypi
@@ -0,0 +1,12 @@
+# This file will be picked up by gyp to initialize some global settings.
+{
+  'variables': {
+    'build_with_chromium': 1,
+    'enable_protobuf': 1,
+    'enabled_libjingle_device_manager': 1,
+    'include_internal_audio_device': 1,
+    'include_internal_video_capture': 1,
+    'include_internal_video_render': 1,
+    'include_pulse_audio': 1,
+  }
+}
diff --git a/src/LICENSE_THIRD_PARTY b/src/LICENSE_THIRD_PARTY
new file mode 100644
index 0000000..e19708a
--- /dev/null
+++ b/src/LICENSE_THIRD_PARTY
@@ -0,0 +1,27 @@
+This source tree contains third party source code which is governed by third 
+party licenses. This file contains references to files which are under other 
+licenses than the one provided in the LICENSE file in the root of the source
+tree.
+
+Files governed by third party licenses:
+common_audio/signal_processing/spl_sqrt_floor.c
+common_audio/signal_processing/spl_sqrt_floor.s
+modules/audio_coding/codecs/G711/main/source/g711.h
+modules/audio_coding/codecs/G711/main/source/g711.c
+modules/audio_coding/codecs/G722/main/source/g722_decode.h
+modules/audio_coding/codecs/G722/main/source/g722_decode.c
+modules/audio_coding/codecs/G722/main/source/g722_encode.h
+modules/audio_coding/codecs/G722/main/source/g722_encode.c
+modules/audio_coding/codecs/iSAC/main/source/fft.c
+modules/audio_device/main/source/Mac/portaudio/pa_memorybarrier.h
+modules/audio_device/main/source/Mac/portaudio/pa_ringbuffer.h
+modules/audio_device/main/source/Mac/portaudio/pa_ringbuffer.c
+modules/audio_processing/utility/fft4g.c
+modules/audio_processing/aec/aec_rdft.c
+system_wrappers/interface/fix_interlocked_exchange_pointer_windows.h
+system_wrappers/interface/scoped_ptr.h
+system_wrappers/interface/scoped_refptr.h
+system_wrappers/source/condition_variable_windows.cc
+system_wrappers/source/spreadsortlib/constants.hpp
+system_wrappers/source/spreadsortlib/spreadsort.hpp
+system_wrappers/source/thread_windows_set_name.h
diff --git a/src/README.chromium b/src/README.chromium
new file mode 100644
index 0000000..246c13d
--- /dev/null
+++ b/src/README.chromium
@@ -0,0 +1,13 @@
+Name: WebRTC

+URL: http://www.webrtc.org

+Version: 90

+License: BSD

+License File: LICENSE

+

+Description:

+WebRTC provides real time voice and video processing

+functionality to enable the implementation of 

+PeerConnection/MediaStream.

+

+Third party code used in this project is described 

+in the file LICENSE_THIRD_PARTY.

diff --git a/src/build/arm_neon.gypi b/src/build/arm_neon.gypi
new file mode 100644
index 0000000..53c3b53
--- /dev/null
+++ b/src/build/arm_neon.gypi
@@ -0,0 +1,35 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file sets correct neon flags. Include it if you want to build
+# source with neon intrinsics.
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.c',
+#     'bar.cc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+
+{
+  'conditions': [
+    ['OS=="android"', {
+      'cflags!': [
+        '-mfpu=vfpv3-d16',
+      ],
+      'cflags': [
+        '-mfpu=neon',
+        '-mfloat-abi=softfp',
+        '-flax-vector-conversions',
+      ],
+    }],
+  ],
+}
diff --git a/src/build/common.gypi b/src/build/common.gypi
new file mode 100644
index 0000000..aa72ea5
--- /dev/null
+++ b/src/build/common.gypi
@@ -0,0 +1,208 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file contains common settings for building WebRTC components.
+
+{
+  # Nesting is required in order to use variables for setting other variables.
+  'variables': {
+    'variables': {
+      'variables': {
+        'variables': {
+          # This will be set to zero in the supplement.gypi triggered by a
+          # gclient hook in the standalone build.
+          'build_with_chromium%': 1,
+        },
+        'build_with_chromium%': '<(build_with_chromium)',
+
+        'conditions': [
+          ['build_with_chromium==1', {
+            'webrtc_root%': '<(DEPTH)/third_party/webrtc',
+          }, {
+            'webrtc_root%': '<(DEPTH)/src',
+          }],
+        ],
+      },
+      'build_with_chromium%': '<(build_with_chromium)',
+      'webrtc_root%': '<(webrtc_root)',
+
+      'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
+    },
+    'build_with_chromium%': '<(build_with_chromium)',
+    'webrtc_root%': '<(webrtc_root)',
+    'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
+
+    # The Chromium common.gypi we use treats all gyp files without
+    # chromium_code==1 as third party code. This disables many of the
+    # preferred warning settings.
+    #
+    # We can set this here to have WebRTC code treated as Chromium code. Our
+    # third party code will still have the reduced warning settings.
+    'chromium_code': 1,
+
+    # Adds video support to dependencies shared by voice and video engine.
+    # This should normally be enabled; the intended use is to disable only
+    # when building voice engine exclusively.
+    'enable_video%': 1,
+
+    # Selects fixed-point code where possible.
+    'prefer_fixed_point%': 0,
+
+    # Enable data logging. Produces text files with data logged within engines
+    # which can be easily parsed for offline processing.
+    'enable_data_logging%': 0,
+
+    # Disable these to not build components which can be externally provided.
+    'build_libjpeg%': 1,
+    'build_libyuv%': 1,
+
+    'libyuv_dir%': '<(DEPTH)/third_party/libyuv',
+
+    'conditions': [
+      ['build_with_chromium==1', {
+        # Exclude pulse audio on Chromium since its prerequisites don't require
+        # pulse audio.
+        'include_pulse_audio%': 0,
+
+        # Exclude internal ADM since Chromium uses its own IO handling.
+        'include_internal_audio_device%': 0,
+
+        # Exclude internal VCM in Chromium build.
+        'include_internal_video_capture%': 0,
+
+        # Exclude internal video render module in Chromium build.
+        'include_internal_video_render%': 0,
+
+        'include_video_engine_file_api%': 0,
+
+        'include_tests%': 0,
+
+        # Disable the use of protocol buffers in production code.
+        'enable_protobuf%': 0,
+      }, {  # Settings for the standalone (not-in-Chromium) build.
+        'include_pulse_audio%': 1,
+        'include_internal_audio_device%': 1,
+        'include_internal_video_capture%': 1,
+        'include_internal_video_render%': 1,
+        'include_video_engine_file_api%': 1,
+        'enable_protobuf%': 1,
+        'include_tests%': 1,
+
+        # TODO(andrew): For now, disable the Chrome plugins, which causes a
+        # flood of chromium-style warnings. Investigate enabling them:
+        # http://code.google.com/p/webrtc/issues/detail?id=163
+        'clang_use_chrome_plugins%': 0,
+      }],
+    ], # conditions
+  },
+  'target_defaults': {
+    'include_dirs': [
+      # TODO(andrew): we should be able to just use <(webrtc_root) here.
+      '..','../..',
+    ],
+    'defines': [
+      # TODO(leozwang): Run this as a gclient hook rather than at build-time:
+      # http://code.google.com/p/webrtc/issues/detail?id=687
+      'WEBRTC_SVNREVISION="Unavailable(issue687)"',
+      #'WEBRTC_SVNREVISION="<!(python <(webrtc_root)/build/version.py)"',
+    ],
+    'conditions': [
+      ['build_with_chromium==1', {
+        'defines': [
+          # Changes settings for Chromium build.
+          'WEBRTC_CHROMIUM_BUILD',
+        ],
+      }, {
+        'conditions': [
+          ['os_posix==1', {
+            'cflags': [
+              '-Wextra',
+              # We need to repeat some flags from Chromium's common.gypi here
+              # that get overridden by -Wextra.
+              '-Wno-unused-parameter',
+              '-Wno-missing-field-initializers',
+            ],
+            'cflags_cc': [
+              # This is enabled for clang; enable for gcc as well.
+              '-Woverloaded-virtual',
+            ],
+          }],
+        ],
+      }],
+      ['target_arch=="arm"', {
+        'prefer_fixed_point%': 1,
+        'defines': [
+          'WEBRTC_ARCH_ARM',
+        ],
+        'conditions': [
+          ['armv7==1', {
+            'defines': [
+              'WEBRTC_ARCH_ARM_V7',
+              'WEBRTC_DETECT_ARM_NEON',
+            ],
+          }],
+          ['arm_neon==1', {
+            'defines': [
+              'WEBRTC_ARCH_ARM_NEON',
+            ],
+            'defines!': [
+              'WEBRTC_DETECT_ARM_NEON',
+            ],
+          }],
+        ],
+      }],
+      ['OS=="linux"', {
+        'defines': [
+          'WEBRTC_LINUX',
+          'WEBRTC_THREAD_RR',
+          # TODO(andrew): can we select this automatically?
+          # Define this if the Linux system does not support CLOCK_MONOTONIC.
+          #'WEBRTC_CLOCK_TYPE_REALTIME',
+        ],
+      }],
+      ['OS=="mac"', {
+        'defines': [
+          'WEBRTC_MAC',
+          'WEBRTC_MAC_INTEL',  # TODO(andrew): remove this.
+          'WEBRTC_THREAD_RR',
+          'WEBRTC_CLOCK_TYPE_REALTIME',
+        ],
+      }],
+      ['OS=="win"', {
+        'defines': [
+          'WEBRTC_WIN',
+        ],
+        # TODO(andrew): enable all warnings when possible.
+        # 4389: Signed/unsigned mismatch.
+        # 4373: MSVC legacy warning for ignoring const / volatile in
+        # signatures. TODO(phoglund): get rid of 4373 supression when
+        # http://code.google.com/p/webrtc/issues/detail?id=261 is solved.
+        'msvs_disabled_warnings': [4389, 4373],
+
+        # Re-enable some warnings that Chromium disables.
+        'msvs_disabled_warnings!': [4189,],
+      }],
+      ['OS=="android"', {
+        # TODO(kma): Remove prefer_fixed_point for Android.
+        'prefer_fixed_point%': 1,
+        'defines': [
+          'WEBRTC_LINUX',
+          'WEBRTC_ANDROID',
+          # TODO(leozwang): Investigate CLOCK_REALTIME and CLOCK_MONOTONIC
+          # support on Android. Keep WEBRTC_CLOCK_TYPE_REALTIME for now,
+          # remove it after I verify that CLOCK_MONOTONIC is fully functional
+          # with condition and event functions in system_wrappers.
+          'WEBRTC_CLOCK_TYPE_REALTIME',
+          'WEBRTC_THREAD_RR',
+          'WEBRTC_ANDROID_OPENSLES',
+         ],
+      }],
+    ], # conditions
+  }, # target_defaults
+}
+
diff --git a/src/build/generate_asm_header.py b/src/build/generate_asm_header.py
new file mode 100644
index 0000000..7d275ef
--- /dev/null
+++ b/src/build/generate_asm_header.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script generates a C header file of offsets from an ARM assembler file.
+
+It parses an ARM assembler generated .S file, finds declarations of variables
+whose names start with the string specified as the third argument in the
+command-line, translates the variable names and values into constant defines and
+writes them into a header file.
+"""
+
+import sys
+
+def usage():
+  print("Usage: generate_asm_header.py " +
+     "<input filename> <output filename> <variable name pattern>")
+  sys.exit(1)
+
+def main(argv):
+  if len(argv) != 3:
+    usage()
+
+  infile = open(argv[0])
+  outfile = open(argv[1], 'w')
+
+  for line in infile:  # Iterate though all the lines in the input file.
+    if line.startswith(argv[2]):
+      outfile.write('#define ')
+      outfile.write(line.split(':')[0])  # Write the constant name.
+      outfile.write(' ')
+
+    if line.find('.word') >= 0:
+      outfile.write(line.split('.word')[1])  # Write the constant value.
+
+  infile.close()
+  outfile.close()
+
+if __name__ == "__main__":
+  main(sys.argv[1:])
diff --git a/src/build/merge_libs.gyp b/src/build/merge_libs.gyp
new file mode 100644
index 0000000..d247189
--- /dev/null
+++ b/src/build/merge_libs.gyp
@@ -0,0 +1,43 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'common.gypi', ],
+  'targets': [
+    {
+      'target_name': 'no_op',
+      'type': 'executable',
+      'dependencies': [
+        '../video_engine/video_engine.gyp:video_engine_core',
+      ],
+      'sources': [ 'no_op.cc', ],
+    },
+    {
+      'target_name': 'merged_lib',
+      'type': 'none',
+      'dependencies': [
+        'no_op',
+      ],
+      'actions': [
+        {
+          'variables': {
+            'output_lib_name': 'webrtc',
+            'output_lib': '<(PRODUCT_DIR)/<(STATIC_LIB_PREFIX)<(output_lib_name)<(STATIC_LIB_SUFFIX)',
+          },
+          'action_name': 'merge_libs',
+          'inputs': ['<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)no_op<(EXECUTABLE_SUFFIX)'],
+          'outputs': ['<(output_lib)'],
+          'action': ['python',
+                     'merge_libs.py',
+                     '<(PRODUCT_DIR)',
+                     '<(output_lib)',],
+        },
+      ],
+    },
+  ],
+}
diff --git a/src/build/merge_libs.py b/src/build/merge_libs.py
new file mode 100644
index 0000000..31c5efb
--- /dev/null
+++ b/src/build/merge_libs.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Searches for libraries and/or object files on the specified path and
+# merges them into a single library.
+
+import subprocess
+import sys
+
+if __name__ == '__main__':
+  if len(sys.argv) != 3:
+    sys.stderr.write('Usage: ' + sys.argv[0] + ' <search_path> <output_lib>\n')
+    sys.exit(2)
+
+  search_path = sys.argv[1]
+  output_lib = sys.argv[2]
+
+  from subprocess import call, PIPE
+  if sys.platform.startswith('linux'):
+    call(["rm -f " + output_lib], shell=True)
+    call(["rm -rf " + search_path + "/obj.target/*do_not_use*"], shell=True)
+    call(["ar crs " + output_lib + " $(find " + search_path +
+          "/obj.target -name *\.o)"], shell=True)
+    call(["ar crs " + output_lib + " $(find " + search_path +
+          "/obj/gen -name *\.o)"], shell=True)
+
+  elif sys.platform == 'darwin':
+    call(["rm -f " + output_lib], shell=True)
+    call(["rm -f " + search_path + "/*do_not_use*"], shell=True)
+    call(["libtool -static -v -o " + output_lib + " " + search_path + "/*.a"],
+         shell=True)
+
+  elif sys.platform == 'win32':
+    # We need to execute a batch file to set some environment variables for the
+    # lib command. VS 8 uses vsvars.bat and VS 9 uses vsvars32.bat. It's
+    # required that at least one of them is in the system PATH. We try both and
+    # suppress stderr and stdout to fail silently.
+    call(["vsvars.bat"], stderr=PIPE, stdout=PIPE, shell=True)
+    call(["vsvars32.bat"], stderr=PIPE, stdout=PIPE, shell=True)
+    call(["del " + output_lib], shell=True)
+    call(["del /F /S /Q " + search_path + "/lib/*do_not_use*"],
+          shell=True)
+    call(["lib /OUT:" + output_lib + " " + search_path + "/lib/*.lib"],
+         shell=True)
+
+  else:
+    sys.stderr.write('Platform not supported: %r\n\n' % sys.platform)
+    sys.exit(1)
+
+  sys.exit(0)
+
diff --git a/src/build/no_op.cc b/src/build/no_op.cc
new file mode 100644
index 0000000..7508b9d
--- /dev/null
+++ b/src/build/no_op.cc
@@ -0,0 +1,14 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// No-op main() to provide a dummy executable target.
+int main() {
+  return 0;
+}
diff --git a/src/build/protoc.gypi b/src/build/protoc.gypi
new file mode 100644
index 0000000..70bf71e
--- /dev/null
+++ b/src/build/protoc.gypi
@@ -0,0 +1,97 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# It was necessary to copy this file to WebRTC, because the path to
+# build/common.gypi is different for the standalone and Chromium builds. Gyp
+# doesn't permit conditional inclusion or variable expansion in include paths.
+# http://code.google.com/p/gyp/wiki/InputFormatReference#Including_Other_Files
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'proto_in_dir%': '.',
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        '<(protoc)',
+        '--proto_path=<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '<(proto_in_dir)/<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--cpp_out=<(cc_dir)',
+        '--python_out=<(py_dir)',
+        ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/src/build/version.py b/src/build/version.py
new file mode 100755
index 0000000..ff67565
--- /dev/null
+++ b/src/build/version.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""Get svn revision of working copy
+
+This script tries to get the svn revision as much as it can. It supports
+both git-svn and svn. It will fail if not in a git-svn or svn repository;
+in this case the script will return "n/a".
+This script is a simplified version of lastchange.py which is in Chromium's
+src/build/util folder.
+"""
+
+import os
+import subprocess
+import sys
+
+def popen_cmd_and_get_output(cmd, directory):
+  """Return (status, output) of executing cmd in a shell."""
+  try:
+    proc = subprocess.Popen(cmd,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    words = line.split()
+    for index, word in enumerate(words):
+      if word == "Revision:":
+        return words[index+1]
+  # return None if cannot find keyword Revision
+  return None
+
+def main():
+  directory = os.path.dirname(sys.argv[0]);
+  version = popen_cmd_and_get_output(['git', 'svn', 'info'], directory)
+  if version == None:
+    version = popen_cmd_and_get_output(['svn', 'info'], directory)
+    if version == None:
+      print "n/a"
+      return 0
+  print version
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/src/common_audio/resampler/resampler.gypi b/src/common_audio/resampler/resampler.gypi
index 69f9b0e..75997fd 100644
--- a/src/common_audio/resampler/resampler.gypi
+++ b/src/common_audio/resampler/resampler.gypi
@@ -29,22 +29,22 @@
     },
   ], # targets
   'conditions': [
-    ['build_with_chromium==0', {
+    ['include_tests==1', {
       'targets' : [
         {
           'target_name': 'resampler_unittests',
           'type': 'executable',
           'dependencies': [
             'resampler',
-            '<(webrtc_root)/../test/test.gyp:test_support_main',
-            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
           ],
           'sources': [
             'resampler_unittest.cc',
           ],
         }, # resampler_unittests
       ], # targets
-    }], # build_with_chromium
+    }], # include_tests
   ], # conditions
 }
 
diff --git a/src/common_audio/signal_processing/Android.mk b/src/common_audio/signal_processing/Android.mk
index 552e836..c28cced 100644
--- a/src/common_audio/signal_processing/Android.mk
+++ b/src/common_audio/signal_processing/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -20,15 +20,11 @@
     auto_corr_to_refl_coef.c \
     auto_correlation.c \
     complex_fft.c \
-    complex_bit_reverse.c \
     copy_set_operations.c \
-    cross_correlation.c \
     division_operations.c \
     dot_product_with_scale.c \
-    downsample_fast.c \
     energy.c \
     filter_ar.c \
-    filter_ar_fast_q12.c \
     filter_ma_fast_q12.c \
     get_hanning_window.c \
     get_scaling_square.c \
@@ -64,21 +60,39 @@
     $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../..
 
-ifeq ($(ARCH_ARM_HAVE_NEON),true)
-LOCAL_SRC_FILES_arm += \
-    min_max_operations_neon.c
-LOCAL_CFLAGS_arm += \
-    $(MY_ARM_CFLAGS_NEON)
-endif
+# Some new .s files have compilation error with AOSP configuration,
+# so they are not used. The next merge of upstream .S file might work.
+#ifeq ($(ARCH_ARM_HAVE_NEON),true)
+#LOCAL_SRC_FILES += \
+#    cross_correlation_neon.s \
+#    downsample_fast_neon.s \
+#    min_max_operations_neon.s \
+#    vector_scaling_operations_neon.s
+#LOCAL_CFLAGS += \
+#    $(MY_ARM_CFLAGS_NEON)
+#else
+LOCAL_SRC_FILES += \
+    cross_correlation.c \
+    downsample_fast.c
+#endif
 
-my_as_src := spl_sqrt_floor.s
-my_c_src := spl_sqrt_floor.c
-LOCAL_SRC_FILES_arm += $(my_as_src)
-LOCAL_SRC_FILES_x86 += $(my_c_src)
-LOCAL_SRC_FILES_mips += $(my_c_src)
-LOCAL_SRC_FILES_arm64 += $(my_c_src)
-LOCAL_SRC_FILES_x86_64 += $(my_c_src)
-LOCAL_SRC_FILES_mips64 += $(my_c_src)
+#ifeq ($(ARCH_ARM_HAVE_ARMV7A),true_skip)
+#LOCAL_SRC_FILES += \
+#    filter_ar_fast_q12_armv7.s
+#else
+LOCAL_SRC_FILES += \
+    filter_ar_fast_q12.c
+#endif
+
+ifeq ($(TARGET_ARCH),arm)
+LOCAL_SRC_FILES += \
+    complex_bit_reverse_arm.s \
+    spl_sqrt_floor_arm.s
+else
+LOCAL_SRC_FILES += \
+    complex_bit_reverse.c \
+    spl_sqrt_floor.c
+endif
 
 ifdef WEBRTC_STL
 LOCAL_NDK_STL_VARIANT := $(WEBRTC_STL)
diff --git a/src/common_audio/signal_processing/auto_correlation.c b/src/common_audio/signal_processing/auto_correlation.c
index a00fde4..bd954cf 100644
--- a/src/common_audio/signal_processing/auto_correlation.c
+++ b/src/common_audio/signal_processing/auto_correlation.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,134 +8,61 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file contains the function WebRtcSpl_AutoCorrelation().
- * The description header can be found in signal_processing_library.h
- *
- */
-
 #include "signal_processing_library.h"
 
-int WebRtcSpl_AutoCorrelation(G_CONST WebRtc_Word16* in_vector,
+int WebRtcSpl_AutoCorrelation(const int16_t* in_vector,
                               int in_vector_length,
                               int order,
-                              WebRtc_Word32* result,
-                              int* scale)
-{
-    WebRtc_Word32 sum;
-    int i, j;
-    WebRtc_Word16 smax; // Sample max
-    G_CONST WebRtc_Word16* xptr1;
-    G_CONST WebRtc_Word16* xptr2;
-    WebRtc_Word32* resultptr;
-    int scaling = 0;
+                              int32_t* result,
+                              int* scale) {
+  int32_t sum = 0;
+  int i = 0, j = 0;
+  int16_t smax = 0;
+  int scaling = 0;
 
-#ifdef _ARM_OPT_
-#pragma message("NOTE: _ARM_OPT_ optimizations are used")
-    WebRtc_Word16 loops4;
-#endif
+  if (order > in_vector_length) {
+    /* Undefined */
+    return -1;
+  } else if (order < 0) {
+    order = in_vector_length;
+  }
 
-    if (order < 0)
-        order = in_vector_length;
+  // Find the maximum absolute value of the samples.
+  smax = WebRtcSpl_MaxAbsValueW16(in_vector, in_vector_length);
 
-    // Find the max. sample
-    smax = WebRtcSpl_MaxAbsValueW16(in_vector, in_vector_length);
+  // In order to avoid overflow when computing the sum we should scale the
+  // samples so that (in_vector_length * smax * smax) will not overflow.
+  if (smax == 0) {
+    scaling = 0;
+  } else {
+    // Number of bits in the sum loop.
+    int nbits = WebRtcSpl_GetSizeInBits(in_vector_length);
+    // Number of bits to normalize smax.
+    int t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax));
 
-    // In order to avoid overflow when computing the sum we should scale the samples so that
-    // (in_vector_length * smax * smax) will not overflow.
-
-    if (smax == 0)
-    {
-        scaling = 0;
-    } else
-    {
-        int nbits = WebRtcSpl_GetSizeInBits(in_vector_length); // # of bits in the sum loop
-        int t = WebRtcSpl_NormW32(WEBRTC_SPL_MUL(smax, smax)); // # of bits to normalize smax
-
-        if (t > nbits)
-        {
-            scaling = 0;
-        } else
-        {
-            scaling = nbits - t;
-        }
-
+    if (t > nbits) {
+      scaling = 0;
+    } else {
+      scaling = nbits - t;
     }
+  }
 
-    resultptr = result;
-
-    // Perform the actual correlation calculation
-    for (i = 0; i < order + 1; i++)
-    {
-        int loops = (in_vector_length - i);
-        sum = 0;
-        xptr1 = in_vector;
-        xptr2 = &in_vector[i];
-#ifndef _ARM_OPT_
-        for (j = loops; j > 0; j--)
-        {
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1++, *xptr2++, scaling);
-        }
-#else
-        loops4 = (loops >> 2) << 2;
-
-        if (scaling == 0)
-        {
-            for (j = 0; j < loops4; j = j + 4)
-            {
-                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
-                xptr1++;
-                xptr2++;
-            }
-
-            for (j = loops4; j < loops; j++)
-            {
-                sum += WEBRTC_SPL_MUL_16_16(*xptr1, *xptr2);
-                xptr1++;
-                xptr2++;
-            }
-        }
-        else
-        {
-            for (j = 0; j < loops4; j = j + 4)
-            {
-                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
-                xptr1++;
-                xptr2++;
-                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
-                xptr1++;
-                xptr2++;
-            }
-
-            for (j = loops4; j < loops; j++)
-            {
-                sum += WEBRTC_SPL_MUL_16_16_RSFT(*xptr1, *xptr2, scaling);
-                xptr1++;
-                xptr2++;
-            }
-        }
-
-#endif
-        *resultptr++ = sum;
+  // Perform the actual correlation calculation.
+  for (i = 0; i < order + 1; i++) {
+    sum = 0;
+    /* Unroll the loop to improve performance. */
+    for (j = 0; j < in_vector_length - i - 3; j += 4) {
+      sum += (in_vector[j + 0] * in_vector[i + j + 0]) >> scaling;
+      sum += (in_vector[j + 1] * in_vector[i + j + 1]) >> scaling;
+      sum += (in_vector[j + 2] * in_vector[i + j + 2]) >> scaling;
+      sum += (in_vector[j + 3] * in_vector[i + j + 3]) >> scaling;
     }
+    for (; j < in_vector_length - i; j++) {
+      sum += (in_vector[j] * in_vector[i + j]) >> scaling;
+    }
+    *result++ = sum;
+  }
 
-    *scale = scaling;
-
-    return order + 1;
+  *scale = scaling;
+  return order + 1;
 }
diff --git a/src/common_audio/signal_processing/complex_bit_reverse.c b/src/common_audio/signal_processing/complex_bit_reverse.c
index 85c76f8..02fde1e 100644
--- a/src/common_audio/signal_processing/complex_bit_reverse.c
+++ b/src/common_audio/signal_processing/complex_bit_reverse.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,44 +8,102 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file contains the function WebRtcSpl_ComplexBitReverse().
- * The description header can be found in signal_processing_library.h
- *
- */
-
 #include "signal_processing_library.h"
 
-void WebRtcSpl_ComplexBitReverse(WebRtc_Word16 frfi[], int stages)
-{
-    int mr, nn, n, l, m;
-    WebRtc_Word16 tr, ti;
+/* Tables for data buffer indexes that are bit reversed and thus need to be
+ * swapped. Note that, index_7[{0, 2, 4, ...}] are for the left side of the swap
+ * operations, while index_7[{1, 3, 5, ...}] are for the right side of the
+ * operation. Same for index_8.
+ */
 
-    n = 1 << stages;
+/* Indexes for the case of stages == 7. */
+static const int16_t index_7[112] = {
+  1, 64, 2, 32, 3, 96, 4, 16, 5, 80, 6, 48, 7, 112, 9, 72, 10, 40, 11, 104,
+  12, 24, 13, 88, 14, 56, 15, 120, 17, 68, 18, 36, 19, 100, 21, 84, 22, 52,
+  23, 116, 25, 76, 26, 44, 27, 108, 29, 92, 30, 60, 31, 124, 33, 66, 35, 98,
+  37, 82, 38, 50, 39, 114, 41, 74, 43, 106, 45, 90, 46, 58, 47, 122, 49, 70,
+  51, 102, 53, 86, 55, 118, 57, 78, 59, 110, 61, 94, 63, 126, 67, 97, 69,
+  81, 71, 113, 75, 105, 77, 89, 79, 121, 83, 101, 87, 117, 91, 109, 95, 125,
+  103, 115, 111, 123
+};
 
-    mr = 0;
-    nn = n - 1;
+/* Indexes for the case of stages == 8. */
+static const int16_t index_8[240] = {
+  1, 128, 2, 64, 3, 192, 4, 32, 5, 160, 6, 96, 7, 224, 8, 16, 9, 144, 10, 80,
+  11, 208, 12, 48, 13, 176, 14, 112, 15, 240, 17, 136, 18, 72, 19, 200, 20,
+  40, 21, 168, 22, 104, 23, 232, 25, 152, 26, 88, 27, 216, 28, 56, 29, 184,
+  30, 120, 31, 248, 33, 132, 34, 68, 35, 196, 37, 164, 38, 100, 39, 228, 41,
+  148, 42, 84, 43, 212, 44, 52, 45, 180, 46, 116, 47, 244, 49, 140, 50, 76,
+  51, 204, 53, 172, 54, 108, 55, 236, 57, 156, 58, 92, 59, 220, 61, 188, 62,
+  124, 63, 252, 65, 130, 67, 194, 69, 162, 70, 98, 71, 226, 73, 146, 74, 82,
+  75, 210, 77, 178, 78, 114, 79, 242, 81, 138, 83, 202, 85, 170, 86, 106, 87,
+  234, 89, 154, 91, 218, 93, 186, 94, 122, 95, 250, 97, 134, 99, 198, 101,
+  166, 103, 230, 105, 150, 107, 214, 109, 182, 110, 118, 111, 246, 113, 142,
+  115, 206, 117, 174, 119, 238, 121, 158, 123, 222, 125, 190, 127, 254, 131,
+  193, 133, 161, 135, 225, 137, 145, 139, 209, 141, 177, 143, 241, 147, 201,
+  149, 169, 151, 233, 155, 217, 157, 185, 159, 249, 163, 197, 167, 229, 171,
+  213, 173, 181, 175, 245, 179, 205, 183, 237, 187, 221, 191, 253, 199, 227,
+  203, 211, 207, 243, 215, 235, 223, 251, 239, 247
+};
 
-    // decimation in time - re-order data
-    for (m = 1; m <= nn; ++m)
-    {
-        l = n;
-        do
-        {
-            l >>= 1;
-        } while (mr + l > nn);
-        mr = (mr & (l - 1)) + l;
+void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages) {
+  /* For any specific value of stages, we know exactly the indexes that are
+   * bit reversed. Currently (Feb. 2012) in WebRTC the only possible values of
+   * stages are 7 and 8, so we use tables to save unnecessary iterations and
+   * calculations for these two cases.
+   */
+  if (stages == 7 || stages == 8) {
+    int m = 0;
+    int length = 112;
+    const int16_t* index = index_7;
 
-        if (mr <= m)
-            continue;
-
-        tr = frfi[2 * m];
-        frfi[2 * m] = frfi[2 * mr];
-        frfi[2 * mr] = tr;
-
-        ti = frfi[2 * m + 1];
-        frfi[2 * m + 1] = frfi[2 * mr + 1];
-        frfi[2 * mr + 1] = ti;
+    if (stages == 8) {
+      length = 240;
+      index = index_8;
     }
+
+    /* Decimation in time. Swap the elements with bit-reversed indexes. */
+    for (m = 0; m < length; m += 2) {
+      /* We declare a int32_t* type pointer, to load both the 16-bit real
+       * and imaginary elements from complex_data in one instruction, reducing
+       * complexity.
+       */
+      int32_t* complex_data_ptr = (int32_t*)complex_data;
+      int32_t temp = 0;
+
+      temp = complex_data_ptr[index[m]];  /* Real and imaginary */
+      complex_data_ptr[index[m]] = complex_data_ptr[index[m + 1]];
+      complex_data_ptr[index[m + 1]] = temp;
+    }
+  }
+  else {
+    int m = 0, mr = 0, l = 0;
+    int n = 1 << stages;
+    int nn = n - 1;
+
+    /* Decimation in time - re-order data */
+    for (m = 1; m <= nn; ++m) {
+      int32_t* complex_data_ptr = (int32_t*)complex_data;
+      int32_t temp = 0;
+
+      /* Find out indexes that are bit-reversed. */
+      l = n;
+      do {
+        l >>= 1;
+      } while (l > nn - mr);
+      mr = (mr & (l - 1)) + l;
+
+      if (mr <= m) {
+        continue;
+      }
+
+      /* Swap the elements with bit-reversed indexes.
+       * This is similar to the loop in the stages == 7 or 8 cases.
+       */
+      temp = complex_data_ptr[m];  /* Real and imaginary */
+      complex_data_ptr[m] = complex_data_ptr[mr];
+      complex_data_ptr[mr] = temp;
+    }
+  }
 }
+
diff --git a/src/common_audio/signal_processing/complex_bit_reverse_arm.s b/src/common_audio/signal_processing/complex_bit_reverse_arm.s
new file mode 100644
index 0000000..4828077
--- /dev/null
+++ b/src/common_audio/signal_processing/complex_bit_reverse_arm.s
@@ -0,0 +1,126 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_ComplexBitReverse(), optimized
+@ for ARMv5 platforms.
+@ Reference C code is in file complex_bit_reverse.c. Bit-exact.
+
+.arch armv5
+
+.global WebRtcSpl_ComplexBitReverse
+
+.align  2
+
+WebRtcSpl_ComplexBitReverse:
+.fnstart
+
+  push {r4-r7}
+
+  cmp r1, #7
+  adr r3, index_7                 @ Table pointer.
+  mov r4, #112                    @ Number of interations.
+  beq PRE_LOOP_STAGES_7_OR_8
+
+  cmp r1, #8
+  adr r3, index_8                 @ Table pointer.
+  mov r4, #240                    @ Number of interations.
+  beq PRE_LOOP_STAGES_7_OR_8
+
+  mov r3, #1                      @ Initialize m.
+  mov r1, r3, asl r1              @ n = 1 << stages;
+  subs r6, r1, #1                 @ nn = n - 1;
+  ble END
+
+  mov r5, r0                      @ &complex_data
+  mov r4, #0                      @ ml
+
+LOOP_GENERIC:
+  rsb r12, r4, r6                 @ l > nn - mr
+  mov r2, r1                      @ n
+
+LOOP_SHIFT:
+  asr r2, #1                      @ l >>= 1;
+  cmp r2, r12
+  bgt LOOP_SHIFT
+
+  sub r12, r2, #1
+  and r4, r12, r4
+  add r4, r2                      @ mr = (mr & (l - 1)) + l;
+  cmp r4, r3                      @ mr <= m ?
+  ble UPDATE_REGISTERS
+
+  mov r12, r4, asl #2
+  ldr r7, [r5, #4]                @ complex_data[2 * m, 2 * m + 1].
+                                  @   Offset 4 due to m incrementing from 1.
+  ldr r2, [r0, r12]               @ complex_data[2 * mr, 2 * mr + 1].
+  str r7, [r0, r12]
+  str r2, [r5, #4]
+
+UPDATE_REGISTERS:
+  add r3, r3, #1
+  add r5, #4
+  cmp r3, r1
+  bne LOOP_GENERIC
+
+  b END
+
+PRE_LOOP_STAGES_7_OR_8:
+  add r4, r3, r4, asl #1
+
+LOOP_STAGES_7_OR_8:
+  ldrsh r2, [r3], #2              @ index[m]
+  ldrsh r5, [r3], #2              @ index[m + 1]
+  ldr r1, [r0, r2]                @ complex_data[index[m], index[m] + 1]
+  ldr r12, [r0, r5]               @ complex_data[index[m + 1], index[m + 1] + 1]
+  cmp r3, r4
+  str r1, [r0, r5]
+  str r12, [r0, r2]
+  bne LOOP_STAGES_7_OR_8
+
+END:
+  pop {r4-r7}
+  bx lr
+
+.fnend
+
+
+@ The index tables. Note the values are doubles of the actual indexes for 16-bit
+@ elements, different from the generic C code. It actually provides byte offsets
+@ for the indexes.
+
+.align  2
+index_7:  @ Indexes for stages == 7.
+  .hword 4, 256, 8, 128, 12, 384, 16, 64, 20, 320, 24, 192, 28, 448, 36, 288
+  .hword 40, 160, 44, 416, 48, 96, 52, 352, 56, 224, 60, 480, 68, 272, 72, 144
+  .hword 76, 400, 84, 336, 88, 208, 92, 464, 100, 304, 104, 176, 108, 432, 116
+  .hword 368, 120, 240, 124, 496, 132, 264, 140, 392, 148, 328, 152, 200, 156
+  .hword 456, 164, 296, 172, 424, 180, 360, 184, 232, 188, 488, 196, 280, 204
+  .hword 408, 212, 344, 220, 472, 228, 312, 236, 440, 244, 376, 252, 504, 268
+  .hword 388, 276, 324, 284, 452, 300, 420, 308, 356, 316, 484, 332, 404, 348
+  .hword 468, 364, 436, 380, 500, 412, 460, 444, 492
+
+index_8:  @ Indexes for stages == 8.
+  .hword 4, 512, 8, 256, 12, 768, 16, 128, 20, 640, 24, 384, 28, 896, 32, 64
+  .hword 36, 576, 40, 320, 44, 832, 48, 192, 52, 704, 56, 448, 60, 960, 68, 544
+  .hword 72, 288, 76, 800, 80, 160, 84, 672, 88, 416, 92, 928, 100, 608, 104
+  .hword 352, 108, 864, 112, 224, 116, 736, 120, 480, 124, 992, 132, 528, 136
+  .hword 272, 140, 784, 148, 656, 152, 400, 156, 912, 164, 592, 168, 336, 172
+  .hword 848, 176, 208, 180, 720, 184, 464, 188, 976, 196, 560, 200, 304, 204
+  .hword 816, 212, 688, 216, 432, 220, 944, 228, 624, 232, 368, 236, 880, 244
+  .hword 752, 248, 496, 252, 1008, 260, 520, 268, 776, 276, 648, 280, 392, 284
+  .hword 904, 292, 584, 296, 328, 300, 840, 308, 712, 312, 456, 316, 968, 324
+  .hword 552, 332, 808, 340, 680, 344, 424, 348, 936, 356, 616, 364, 872, 372
+  .hword 744, 376, 488, 380, 1000, 388, 536, 396, 792, 404, 664, 412, 920, 420
+  .hword 600, 428, 856, 436, 728, 440, 472, 444, 984, 452, 568, 460, 824, 468
+  .hword 696, 476, 952, 484, 632, 492, 888, 500, 760, 508, 1016, 524, 772, 532
+  .hword 644, 540, 900, 548, 580, 556, 836, 564, 708, 572, 964, 588, 804, 596
+  .hword 676, 604, 932, 620, 868, 628, 740, 636, 996, 652, 788, 668, 916, 684
+  .hword 852, 692, 724, 700, 980, 716, 820, 732, 948, 748, 884, 764, 1012, 796
+  .hword 908, 812, 844, 828, 972, 860, 940, 892, 1004, 956, 988
diff --git a/src/common_audio/signal_processing/complex_fft.c b/src/common_audio/signal_processing/complex_fft.c
index 1e8503c..3f06ab3 100644
--- a/src/common_audio/signal_processing/complex_fft.c
+++ b/src/common_audio/signal_processing/complex_fft.c
@@ -233,7 +233,7 @@
                 wr = kSinTable1024[j + 256];
                 wi = -kSinTable1024[j];
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
                 WebRtc_Word32 wri;
                 WebRtc_Word32 frfi_r;
                 __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
@@ -244,7 +244,7 @@
                 {
                     j = i + l;
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
                     __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(frfi_r) :
                         "r"((WebRtc_Word32)frfi[2*j]), "r"((WebRtc_Word32)frfi[2*j +1]));
                     __asm__("smlsd %0, %1, %2, %3" : "=r"(tr32) :
@@ -374,7 +374,7 @@
                 wr = kSinTable1024[j + 256];
                 wi = kSinTable1024[j];
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
                 WebRtc_Word32 wri;
                 WebRtc_Word32 frfi_r;
                 __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(wri) :
@@ -385,7 +385,7 @@
                 {
                     j = i + l;
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
                     __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(frfi_r) :
                         "r"((WebRtc_Word32)frfi[2*j]), "r"((WebRtc_Word32)frfi[2*j +1]));
                     __asm__("smlsd %0, %1, %2, %3" : "=r"(tr32) :
diff --git a/src/common_audio/signal_processing/cross_correlation.c b/src/common_audio/signal_processing/cross_correlation.c
index 1133d09..cf7705c 100644
--- a/src/common_audio/signal_processing/cross_correlation.c
+++ b/src/common_audio/signal_processing/cross_correlation.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,260 +8,23 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file contains the function WebRtcSpl_CrossCorrelation().
- * The description header can be found in signal_processing_library.h
- *
- */
-
 #include "signal_processing_library.h"
 
-void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_correlation, WebRtc_Word16* seq1,
-                                WebRtc_Word16* seq2, WebRtc_Word16 dim_seq,
-                                WebRtc_Word16 dim_cross_correlation,
-                                WebRtc_Word16 right_shifts,
-                                WebRtc_Word16 step_seq2)
-{
-    int i, j;
-    WebRtc_Word16* seq1Ptr;
-    WebRtc_Word16* seq2Ptr;
-    WebRtc_Word32* CrossCorrPtr;
+void WebRtcSpl_CrossCorrelation(int32_t* cross_correlation,
+                                const int16_t* seq1,
+                                const int16_t* seq2,
+                                int16_t dim_seq,
+                                int16_t dim_cross_correlation,
+                                int16_t right_shifts,
+                                int16_t step_seq2) {
+  int i = 0, j = 0;
 
-#ifdef _XSCALE_OPT_
-
-#ifdef _WIN32
-#pragma message("NOTE: _XSCALE_OPT_ optimizations are used (overrides _ARM_OPT_ and requires /QRxscale compiler flag)")
-#endif
-
-    __int64 macc40;
-
-    int iseq1[250];
-    int iseq2[250];
-    int iseq3[250];
-    int * iseq1Ptr;
-    int * iseq2Ptr;
-    int * iseq3Ptr;
-    int len, i_len;
-
-    seq1Ptr = seq1;
-    iseq1Ptr = iseq1;
-    for(i = 0; i < ((dim_seq + 1) >> 1); i++)
-    {
-        *iseq1Ptr = (unsigned short)*seq1Ptr++;
-        *iseq1Ptr++ |= (WebRtc_Word32)*seq1Ptr++ << 16;
-
+  for (i = 0; i < dim_cross_correlation; i++) {
+    *cross_correlation = 0;
+    /* Unrolling doesn't seem to improve performance. */
+    for (j = 0; j < dim_seq; j++) {
+      *cross_correlation += (seq1[j] * seq2[step_seq2 * i + j]) >> right_shifts;
     }
-
-    if(dim_seq%2)
-    {
-        *(iseq1Ptr-1) &= 0x0000ffff;
-    }
-    *iseq1Ptr = 0;
-    iseq1Ptr++;
-    *iseq1Ptr = 0;
-    iseq1Ptr++;
-    *iseq1Ptr = 0;
-
-    if(step_seq2 < 0)
-    {
-        seq2Ptr = seq2 - dim_cross_correlation + 1;
-        CrossCorrPtr = &cross_correlation[dim_cross_correlation - 1];
-    }
-    else
-    {
-        seq2Ptr = seq2;
-        CrossCorrPtr = cross_correlation;
-    }
-
-    len = dim_seq + dim_cross_correlation - 1;
-    i_len = (len + 1) >> 1;
-    iseq2Ptr = iseq2;
-
-    iseq3Ptr = iseq3;
-    for(i = 0; i < i_len; i++)
-    {
-        *iseq2Ptr = (unsigned short)*seq2Ptr++;
-        *iseq3Ptr = (unsigned short)*seq2Ptr;
-        *iseq2Ptr++ |= (WebRtc_Word32)*seq2Ptr++ << 16;
-        *iseq3Ptr++ |= (WebRtc_Word32)*seq2Ptr << 16;
-    }
-
-    if(len % 2)
-    {
-        iseq2[i_len - 1] &= 0x0000ffff;
-        iseq3[i_len - 1] = 0;
-    }
-    else
-    iseq3[i_len - 1] &= 0x0000ffff;
-
-    iseq2[i_len] = 0;
-    iseq3[i_len] = 0;
-    iseq2[i_len + 1] = 0;
-    iseq3[i_len + 1] = 0;
-    iseq2[i_len + 2] = 0;
-    iseq3[i_len + 2] = 0;
-
-    // Set pointer to start value
-    iseq2Ptr = iseq2;
-    iseq3Ptr = iseq3;
-
-    i_len = (dim_seq + 7) >> 3;
-    for (i = 0; i < dim_cross_correlation; i++)
-    {
-
-        iseq1Ptr = iseq1;
-
-        macc40 = 0;
-
-        _WriteCoProcessor(macc40, 0);
-
-        if((i & 1))
-        {
-            iseq3Ptr = iseq3 + (i >> 1);
-            for (j = i_len; j > 0; j--)
-            {
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq3Ptr++);
-            }
-        }
-        else
-        {
-            iseq2Ptr = iseq2 + (i >> 1);
-            for (j = i_len; j > 0; j--)
-            {
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
-                _SmulAddPack_2SW_ACC(*iseq1Ptr++, *iseq2Ptr++);
-            }
-
-        }
-
-        macc40 = _ReadCoProcessor(0);
-        *CrossCorrPtr = (WebRtc_Word32)(macc40 >> right_shifts);
-        CrossCorrPtr += step_seq2;
-    }
-#else // #ifdef _XSCALE_OPT_
-#ifdef _ARM_OPT_
-    WebRtc_Word16 dim_seq8 = (dim_seq >> 3) << 3;
-#endif
-
-    CrossCorrPtr = cross_correlation;
-
-    for (i = 0; i < dim_cross_correlation; i++)
-    {
-        // Set the pointer to the static vector, set the pointer to the sliding vector
-        // and initialize cross_correlation
-        seq1Ptr = seq1;
-        seq2Ptr = seq2 + (step_seq2 * i);
-        (*CrossCorrPtr) = 0;
-
-#ifndef _ARM_OPT_ 
-#ifdef _WIN32
-#pragma message("NOTE: default implementation is used")
-#endif
-        // Perform the cross correlation
-        for (j = 0; j < dim_seq; j++)
-        {
-            (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr), right_shifts);
-            seq1Ptr++;
-            seq2Ptr++;
-        }
-#else
-#ifdef _WIN32
-#pragma message("NOTE: _ARM_OPT_ optimizations are used")
-#endif
-        if (right_shifts == 0)
-        {
-            // Perform the optimized cross correlation
-            for (j = 0; j < dim_seq8; j = j + 8)
-            {
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-            }
-
-            for (j = dim_seq8; j < dim_seq; j++)
-            {
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
-                seq1Ptr++;
-                seq2Ptr++;
-            }
-        }
-        else // right_shifts != 0
-
-        {
-            // Perform the optimized cross correlation
-            for (j = 0; j < dim_seq8; j = j + 8)
-            {
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-            }
-
-            for (j = dim_seq8; j < dim_seq; j++)
-            {
-                (*CrossCorrPtr) += WEBRTC_SPL_MUL_16_16_RSFT((*seq1Ptr), (*seq2Ptr),
-                                                             right_shifts);
-                seq1Ptr++;
-                seq2Ptr++;
-            }
-        }
-#endif
-        CrossCorrPtr++;
-    }
-#endif
+    cross_correlation++;
+  }
 }
diff --git a/src/common_audio/signal_processing/cross_correlation_neon.s b/src/common_audio/signal_processing/cross_correlation_neon.s
new file mode 100644
index 0000000..e9b1c69
--- /dev/null
+++ b/src/common_audio/signal_processing/cross_correlation_neon.s
@@ -0,0 +1,168 @@
+@
+@ Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ cross_correlation_neon.s
+@ This file contains the function WebRtcSpl_CrossCorrelation(),
+@ optimized for ARM Neon platform.
+@
+@ Reference Ccode at end of this file.
+@ Output is bit-exact with the reference C code, but not with the generic
+@ C code in file cross_correlation.c, due to reduction of shift operations
+@ from using Neon registers.
+
+@ Register usage:
+@
+@ r0: *cross_correlation (function argument)
+@ r1: *seq1 (function argument)
+@ r2: *seq2 (function argument)
+@ r3: dim_seq (function argument); then, total iteration of LOOP_DIM_SEQ
+@ r4: counter for LOOP_DIM_CROSS_CORRELATION
+@ r5: seq2_ptr
+@ r6: seq1_ptr
+@ r7: Total iteration of LOOP_DIM_SEQ_RESIDUAL
+@ r8, r9, r10, r11, r12: scratch
+
+.arch armv7-a
+.fpu neon
+
+.align  2
+.global WebRtcSpl_CrossCorrelation
+
+WebRtcSpl_CrossCorrelation:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  @ Put the shift value (-right_shifts) into a Neon register.
+  ldrsh r10, [sp, #36]
+  rsb r10, r10, #0
+  mov r8, r10, asr #31
+  vmov.32 d16, r10, r8
+
+  @ Initialize loop counters.
+  and r7, r3, #7              @ inner_loop_len2 = dim_seq % 8;
+  asr r3, r3, #3              @ inner_loop_len1 = dim_seq / 8;
+  ldrsh r4, [sp, #32]         @ dim_cross_correlation
+
+LOOP_DIM_CROSS_CORRELATION:
+  vmov.i32 q9, #0
+  vmov.i32 q14, #0
+  movs r8, r3                 @ inner_loop_len1
+  mov r6, r1                  @ seq1_ptr
+  mov r5, r2                  @ seq2_ptr
+  ble POST_LOOP_DIM_SEQ
+
+LOOP_DIM_SEQ:
+  vld1.16 {d20, d21}, [r6]!   @ seq1_ptr
+  vld1.16 {d22, d23}, [r5]!   @ seq2_ptr 
+  subs r8, r8, #1
+  vmull.s16 q12, d20, d22
+  vmull.s16 q13, d21, d23
+  vpadal.s32 q9, q12
+  vpadal.s32 q14, q13
+  bgt LOOP_DIM_SEQ
+
+POST_LOOP_DIM_SEQ:
+  movs r10, r7                @ Loop counter
+  mov r12, #0
+  mov r8, #0
+  ble POST_LOOP_DIM_SEQ_RESIDUAL
+
+LOOP_DIM_SEQ_RESIDUAL:
+  ldrh r11, [r6], #2
+  ldrh r9, [r5], #2
+  smulbb r11, r11, r9
+  adds r8, r8, r11
+  adc r12, r12, r11, asr #31
+  subs r10, #1
+  bgt LOOP_DIM_SEQ_RESIDUAL
+
+POST_LOOP_DIM_SEQ_RESIDUAL:   @ Sum the results up and do the shift.
+  vadd.i64 d18, d19
+  vadd.i64 d28, d29
+  vadd.i64 d18, d28
+  vmov.32 d17[0], r8
+  vmov.32 d17[1], r12
+  vadd.i64 d17, d18
+  vshl.s64 d17, d16
+  vst1.32 d17[0], [r0]!       @ Store the output
+
+  ldr r8, [sp, #40]           @ step_seq2
+  add r2, r8, lsl #1          @ prepare for seq2_ptr(r5) in the next loop.
+
+  subs r4, #1
+  bgt LOOP_DIM_CROSS_CORRELATION
+
+  pop {r4-r11}
+  bx  lr
+
+.fnend
+
+
+@ TODO(kma): Place this piece of reference code into a C code file.
+@ void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_correlation,
+@                                 WebRtc_Word16* seq1,
+@                                 WebRtc_Word16* seq2,
+@                                 WebRtc_Word16 dim_seq,
+@                                 WebRtc_Word16 dim_cross_correlation,
+@                                 WebRtc_Word16 right_shifts,
+@                                 WebRtc_Word16 step_seq2) {
+@   int i = 0;
+@   int j = 0;
+@   int inner_loop_len1 = dim_seq >> 3;
+@   int inner_loop_len2 = dim_seq - (inner_loop_len1 << 3);
+@ 
+@   assert(dim_cross_correlation > 0);
+@   assert(dim_seq > 0);
+@ 
+@   for (i = 0; i < dim_cross_correlation; i++) {
+@     int16_t *seq1_ptr = seq1;
+@     int16_t *seq2_ptr = seq2 + (step_seq2 * i);
+@     int64_t sum = 0;
+@ 
+@     for (j = inner_loop_len1; j > 0; j -= 1) {
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@     }
+@ 
+@     // Calculate the rest of the samples.
+@     for (j = inner_loop_len2; j > 0; j -= 1) {
+@       sum += WEBRTC_SPL_MUL_16_16(*seq1_ptr, *seq2_ptr);
+@       seq1_ptr++;
+@       seq2_ptr++;
+@     }
+@ 
+@     *cross_correlation++ = (int32_t)(sum >> right_shifts);
+@   }
+@ }
diff --git a/src/common_audio/signal_processing/dot_product_with_scale.c b/src/common_audio/signal_processing/dot_product_with_scale.c
index 6e085fd..4868260 100644
--- a/src/common_audio/signal_processing/dot_product_with_scale.c
+++ b/src/common_audio/signal_processing/dot_product_with_scale.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,84 +8,25 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file contains the function WebRtcSpl_DotProductWithScale().
- * The description header can be found in signal_processing_library.h
- *
- */
-
 #include "signal_processing_library.h"
 
-WebRtc_Word32 WebRtcSpl_DotProductWithScale(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
-                                            int length, int scaling)
-{
-    WebRtc_Word32 sum;
-    int i;
-#ifdef _ARM_OPT_
-#pragma message("NOTE: _ARM_OPT_ optimizations are used")
-    WebRtc_Word16 len4 = (length >> 2) << 2;
-#endif
+int32_t WebRtcSpl_DotProductWithScale(const int16_t* vector1,
+                                      const int16_t* vector2,
+                                      int length,
+                                      int scaling) {
+  int32_t sum = 0;
+  int i = 0;
 
-    sum = 0;
+  /* Unroll the loop to improve performance. */
+  for (i = 0; i < length - 3; i += 4) {
+    sum += (vector1[i + 0] * vector2[i + 0]) >> scaling;
+    sum += (vector1[i + 1] * vector2[i + 1]) >> scaling;
+    sum += (vector1[i + 2] * vector2[i + 2]) >> scaling;
+    sum += (vector1[i + 3] * vector2[i + 3]) >> scaling;
+  }
+  for (; i < length; i++) {
+    sum += (vector1[i] * vector2[i]) >> scaling;
+  }
 
-#ifndef _ARM_OPT_
-    for (i = 0; i < length; i++)
-    {
-        sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1++, *vector2++, scaling);
-    }
-#else
-    if (scaling == 0)
-    {
-        for (i = 0; i < len4; i = i + 4)
-        {
-            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
-            vector1++;
-            vector2++;
-        }
-
-        for (i = len4; i < length; i++)
-        {
-            sum += WEBRTC_SPL_MUL_16_16(*vector1, *vector2);
-            vector1++;
-            vector2++;
-        }
-    }
-    else
-    {
-        for (i = 0; i < len4; i = i + 4)
-        {
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
-            vector1++;
-            vector2++;
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
-            vector1++;
-            vector2++;
-        }
-
-        for (i = len4; i < length; i++)
-        {
-            sum += WEBRTC_SPL_MUL_16_16_RSFT(*vector1, *vector2, scaling);
-            vector1++;
-            vector2++;
-        }
-    }
-#endif
-
-    return sum;
+  return sum;
 }
diff --git a/src/common_audio/signal_processing/downsample_fast.c b/src/common_audio/signal_processing/downsample_fast.c
index cce463c..526cdca 100644
--- a/src/common_audio/signal_processing/downsample_fast.c
+++ b/src/common_audio/signal_processing/downsample_fast.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,52 +8,40 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file contains the function WebRtcSpl_DownsampleFast().
- * The description header can be found in signal_processing_library.h
- *
- */
-
 #include "signal_processing_library.h"
 
-int WebRtcSpl_DownsampleFast(WebRtc_Word16 *in_ptr, WebRtc_Word16 in_length,
-                             WebRtc_Word16 *out_ptr, WebRtc_Word16 out_length,
-                             WebRtc_Word16 *B, WebRtc_Word16 B_length, WebRtc_Word16 factor,
-                             WebRtc_Word16 delay)
-{
-    WebRtc_Word32 o;
-    int i, j;
+// TODO(Bjornv): Change the function parameter order to WebRTC code style.
+int WebRtcSpl_DownsampleFast(const int16_t* data_in,
+                             int data_in_length,
+                             int16_t* data_out,
+                             int data_out_length,
+                             const int16_t* __restrict coefficients,
+                             int coefficients_length,
+                             int factor,
+                             int delay) {
+  int i = 0;
+  int j = 0;
+  int32_t out_s32 = 0;
+  int endpos = delay + factor * (data_out_length - 1) + 1;
 
-    WebRtc_Word16 *downsampled_ptr = out_ptr;
-    WebRtc_Word16 *b_ptr;
-    WebRtc_Word16 *x_ptr;
-    WebRtc_Word16 endpos = delay
-            + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(factor, (out_length - 1)) + 1;
+  // Return error if any of the running conditions doesn't meet.
+  if (data_out_length <= 0 || coefficients_length <= 0
+                           || data_in_length < endpos) {
+    return -1;
+  }
 
-    if (in_length < endpos)
-    {
-        return -1;
+  for (i = delay; i < endpos; i += factor) {
+    out_s32 = 2048;  // Round value, 0.5 in Q12.
+
+    for (j = 0; j < coefficients_length; j++) {
+      out_s32 += coefficients[j] * data_in[i - j];  // Q12.
     }
 
-    for (i = delay; i < endpos; i += factor)
-    {
-        b_ptr = &B[0];
-        x_ptr = &in_ptr[i];
+    out_s32 >>= 12;  // Q0.
 
-        o = (WebRtc_Word32)2048; // Round val
+    // Saturate and store the output.
+    *data_out++ = WebRtcSpl_SatW32ToW16(out_s32);
+  }
 
-        for (j = 0; j < B_length; j++)
-        {
-            o += WEBRTC_SPL_MUL_16_16(*b_ptr++, *x_ptr--);
-        }
-
-        o = WEBRTC_SPL_RSHIFT_W32(o, 12);
-
-        // If output is higher than 32768, saturate it. Same with negative side
-
-        *downsampled_ptr++ = WebRtcSpl_SatW32ToW16(o);
-    }
-
-    return 0;
+  return 0;
 }
diff --git a/src/common_audio/signal_processing/downsample_fast_neon.s b/src/common_audio/signal_processing/downsample_fast_neon.s
new file mode 100644
index 0000000..906b0a1
--- /dev/null
+++ b/src/common_audio/signal_processing/downsample_fast_neon.s
@@ -0,0 +1,222 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_DownsampleFast(), optimized for
+@ ARM Neon platform. The description header can be found in
+@ signal_processing_library.h
+@
+@ The reference C code is in file downsample_fast.c. Bit-exact.
+
+.arch armv7-a
+.fpu neon
+
+.align  2
+.global WebRtcSpl_DownsampleFast
+
+WebRtcSpl_DownsampleFast:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  cmp r3, #0                                @ data_out_length <= 0?
+  movle r0, #-1
+  ble END
+
+  ldrsh r12, [sp, #44]
+  ldr r5, [sp, #40]                         @ r5: factor
+  add r4, r12, #1                           @ r4: delay + 1
+  sub r3, r3, #1                            @ r3: data_out_length - 1
+  smulbb r3, r5, r3
+  ldr r8, [sp, #32]                         @ &coefficients[0]
+  mov r9, r12                               @ Iteration counter for outer loops.
+  add r3, r4                                @ delay + factor * (out_length-1) +1
+
+  cmp r3, r1                                @ data_in_length < endpos?
+  movgt r0, #-1
+  bgt END
+
+  @ Initializations.
+  sub r3, r5, asl #3
+  add r11, r0, r12, asl #1                  @ &data_in[delay]
+  ldr r0, [sp, #36]                         @ coefficients_length
+  add r3, r5                                @ endpos - factor * 7
+
+  cmp r0, #0                                @ coefficients_length <= 0 ?
+  movle r0, #-1
+  ble END
+
+  add r8, r0, asl #1                        @ &coeffieient[coefficients_length]
+  cmp r9, r3
+  bge POST_LOOP_ENDPOS                      @ branch when Iteration < 8 times.
+
+@
+@ First part, unroll the loop 8 times, with 3 subcases (factor == 2, 4, others)
+@
+  mov r4, #-2
+
+  @ Direct program flow to the right channel.
+
+  @ r10 is an offset to &data_in[] in the loop. After an iteration, we need to
+  @ move the pointer back to original after advancing 16 bytes by a vld1, and
+  @ then move 2 bytes forward to increment one more sample.
+  cmp r5, #2
+  moveq r10, #-14
+  beq LOOP_ENDPOS_FACTOR2                   @ Branch when factor == 2
+
+  @ Similar here, for r10, we need to move the pointer back to original after
+  @ advancing 32 bytes, then move 2 bytes forward to increment one sample.
+  cmp r5, #4
+  moveq r10, #-30
+  beq LOOP_ENDPOS_FACTOR4                   @ Branch when factor == 4
+
+  @ For r10, we need to move the pointer back to original after advancing
+  @ (factor * 7 * 2) bytes, then move 2 bytes forward to increment one sample.
+  mov r10, r5, asl #4
+  rsb r10, #2
+  add r10, r5, asl #1
+  lsl r5, #1                                @ r5 = factor * sizeof(data_in)
+
+@ The general case (factor != 2 && factor != 4)
+LOOP_ENDPOS_GENERAL:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_GENERAL:
+  vld1.16 {d2[], d3[]}, [r7], r4            @ coefficients[j]
+  vld1.16 d0[0], [r1], r5                   @ data_in[i - j]
+  vld1.16 d0[1], [r1], r5                   @ data_in[i + factor - j]
+  vld1.16 d0[2], [r1], r5                   @ data_in[i + factor * 2 - j]
+  vld1.16 d0[3], [r1], r5                   @ data_in[i + factor * 3 - j]
+  vld1.16 d1[0], [r1], r5                   @ data_in[i + factor * 4 - j]
+  vld1.16 d1[1], [r1], r5                   @ data_in[i + factor * 5 - j]
+  vld1.16 d1[2], [r1], r5                   @ data_in[i + factor * 6 - j]
+  vld1.16 d1[3], [r1], r10                  @ data_in[i + factor * 7 - j]
+  subs r12, #1
+  vmlal.s16 q2, d0, d2
+  vmlal.s16 q3, d1, d3
+  bge LOOP_COEFF_LENGTH_GENERAL
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #3                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #2                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_GENERAL
+  asr r5, #1                                @ Restore r5 to the value of factor.
+  b POST_LOOP_ENDPOS
+
+@ The case for factor == 2.
+LOOP_ENDPOS_FACTOR2:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_FACTOR2:
+  vld1.16 {d16[], d17[]}, [r7], r4          @ coefficients[j]
+  vld2.16 {d0, d1}, [r1]!                   @ data_in[]
+  vld2.16 {d2, d3}, [r1], r10               @ data_in[]
+  subs r12, #1
+  vmlal.s16 q2, d0, d16
+  vmlal.s16 q3, d2, d17
+  bge LOOP_COEFF_LENGTH_FACTOR2
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #4                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #3                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_FACTOR2
+  b POST_LOOP_ENDPOS
+
+@ The case for factor == 4.
+LOOP_ENDPOS_FACTOR4:
+  @ Initializations.
+  vmov.i32 q2, #2048
+  vmov.i32 q3, #2048
+  sub r7, r8, #2
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r1, r11, r12, asl #1                  @ &data_in[i - j]
+
+LOOP_COEFF_LENGTH_FACTOR4:
+  vld1.16 {d16[], d17[]}, [r7], r4          @ coefficients[j]
+  vld4.16 {d0, d1, d2, d3}, [r1]!           @ data_in[]
+  vld4.16 {d18, d19, d20, d21}, [r1], r10   @ data_in[]
+  subs r12, #1
+  vmlal.s16 q2, d0, d16
+  vmlal.s16 q3, d18, d17
+  bge LOOP_COEFF_LENGTH_FACTOR4
+
+  @ Shift, saturate, and store the result.
+  vqshrn.s32 d0, q2, #12
+  vqshrn.s32 d1, q3, #12
+  vst1.16 {d0, d1}, [r2]!
+
+  add r11, r5, asl #4                       @ r11 -> &data_in[i + factor * 8]
+  add r9, r5, asl #3                        @ Counter i = delay + factor * 8.
+  cmp r9, r3                                @ i < endpos - factor * 7 ?
+  blt LOOP_ENDPOS_FACTOR4
+
+@
+@ Second part, do the rest iterations (if any).
+@
+
+POST_LOOP_ENDPOS:
+  add r3, r5, asl #3
+  sub r3, r5                                @ Restore r3 to endpos.
+  cmp r9, r3
+  movge r0, #0
+  bge END
+
+LOOP2_ENDPOS:
+  @ Initializations.
+  mov r7, r8
+  sub r12, r0, #1                           @ coefficients_length - 1
+  sub r6, r11, r12, asl #1                  @ &data_in[i - j]
+
+  mov r1, #2048
+
+LOOP2_COEFF_LENGTH:
+  ldrsh r4, [r7, #-2]!                      @ coefficients[j]
+  ldrsh r10, [r6], #2                       @ data_in[i - j]
+  smlabb r1, r4, r10, r1
+  subs r12, #1
+  bge LOOP2_COEFF_LENGTH
+
+  @ Shift, saturate, and store the result.
+  ssat r1, #16, r1, asr #12
+  strh r1, [r2], #2
+
+  add r11, r5, asl #1                       @ r11 -> &data_in[i + factor]
+  add r9, r5                                @ Counter i = delay + factor.
+  cmp r9, r3                                @ i < endpos?
+  blt LOOP2_ENDPOS
+
+  mov r0, #0
+
+END:
+  pop {r4-r11}
+  bx  lr
+
+.fnend
diff --git a/src/common_audio/signal_processing/filter_ar_fast_q12.c b/src/common_audio/signal_processing/filter_ar_fast_q12.c
index 6184da3..0402302 100644
--- a/src/common_audio/signal_processing/filter_ar_fast_q12.c
+++ b/src/common_audio/signal_processing/filter_ar_fast_q12.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -7,43 +7,37 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
-
-
-/*
- * This file contains the function WebRtcSpl_FilterARFastQ12().
- * The description header can be found in signal_processing_library.h
- *
- */
+#include <assert.h>
 
 #include "signal_processing_library.h"
 
-void WebRtcSpl_FilterARFastQ12(WebRtc_Word16 *in, WebRtc_Word16 *out, WebRtc_Word16 *A,
-                               WebRtc_Word16 A_length, WebRtc_Word16 length)
-{
-    WebRtc_Word32 o;
-    int i, j;
+// TODO(bjornv): Change the return type to report errors.
 
-    WebRtc_Word16 *x_ptr = &in[0];
-    WebRtc_Word16 *filtered_ptr = &out[0];
+void WebRtcSpl_FilterARFastQ12(const int16_t* data_in,
+                               int16_t* data_out,
+                               const int16_t* __restrict coefficients,
+                               int coefficients_length,
+                               int data_length) {
+  int i = 0;
+  int j = 0;
 
-    for (i = 0; i < length; i++)
-    {
-        // Calculate filtered[i]
-        G_CONST WebRtc_Word16 *a_ptr = &A[0];
-        WebRtc_Word16 *state_ptr = &out[i - 1];
+  assert(data_length > 0);
+  assert(coefficients_length > 1);
 
-        o = WEBRTC_SPL_MUL_16_16(*x_ptr++, *a_ptr++);
+  for (i = 0; i < data_length; i++) {
+    int32_t output = 0;
+    int32_t sum = 0;
 
-        for (j = 1; j < A_length; j++)
-        {
-            o -= WEBRTC_SPL_MUL_16_16(*a_ptr++,*state_ptr--);
-        }
-
-        // Saturate the output
-        o = WEBRTC_SPL_SAT((WebRtc_Word32)134215679, o, (WebRtc_Word32)-134217728);
-
-        *filtered_ptr++ = (WebRtc_Word16)((o + (WebRtc_Word32)2048) >> 12);
+    for (j = coefficients_length - 1; j > 0; j--) {
+      sum += coefficients[j] * data_out[i - j];
     }
 
-    return;
+    output = coefficients[0] * data_in[i];
+    output -= sum;
+
+    // Saturate and store the output.
+    output = WEBRTC_SPL_SAT(134215679, output, -134217728);
+    data_out[i] = (int16_t)((output + 2048) >> 12);
+  }
 }
+
diff --git a/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s b/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s
new file mode 100644
index 0000000..5591bb8
--- /dev/null
+++ b/src/common_audio/signal_processing/filter_ar_fast_q12_armv7.s
@@ -0,0 +1,223 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_FilterARFastQ12(), optimized for
+@ ARMv7  platform. The description header can be found in
+@ signal_processing_library.h
+@
+@ Output is bit-exact with the generic C code as in filter_ar_fast_q12.c, and
+@ the reference C code at end of this file.
+
+@ Assumptions:
+@ (1) data_length > 0
+@ (2) coefficients_length > 1
+
+@ Register usage:
+@
+@ r0:  &data_in[i]
+@ r1:  &data_out[i], for result ouput
+@ r2:  &coefficients[0]
+@ r3:  coefficients_length
+@ r4:  Iteration counter for the outer loop.
+@ r5:  data_out[j] as multiplication inputs
+@ r6:  Calculated value for output data_out[]; interation counter for inner loop
+@ r7:  Partial sum of a filtering multiplication results
+@ r8:  Partial sum of a filtering multiplication results
+@ r9:  &data_out[], for filtering input; data_in[i]
+@ r10: coefficients[j]
+@ r11: Scratch
+@ r12: &coefficients[j]
+
+.arch armv7-a
+
+.align  2
+.global WebRtcSpl_FilterARFastQ12
+
+WebRtcSpl_FilterARFastQ12:
+
+.fnstart
+
+.save {r4-r11}
+  push {r4-r11}
+
+  ldrsh r12, [sp, #32]         @ data_length
+  subs r4, r12, #1
+  beq ODD_LENGTH               @ jump if data_length == 1
+
+LOOP_LENGTH:
+  add r12, r2, r3, lsl #1
+  sub r12, #4                  @ &coefficients[coefficients_length - 2]
+  sub r9, r1, r3, lsl #1
+  add r9, #2                   @ &data_out[i - coefficients_length + 1]
+  ldr r5, [r9], #4             @ data_out[i - coefficients_length + {1,2}]
+
+  mov r7, #0                   @ sum1
+  mov r8, #0                   @ sum2
+  subs r6, r3, #3              @ Iteration counter for inner loop.
+  beq ODD_A_LENGTH             @ branch if coefficients_length == 3
+  blt POST_LOOP_A_LENGTH       @ branch if coefficients_length == 2
+
+LOOP_A_LENGTH:
+  ldr r10, [r12], #-4          @ coefficients[j - 1], coefficients[j]
+  subs r6, #2
+  smlatt r8, r10, r5, r8       @ sum2 += coefficients[j] * data_out[i - j + 1];
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[j] * data_out[i - j];
+  smlabt r7, r10, r5, r7       @ coefficients[j - 1] * data_out[i - j + 1];
+  ldr r5, [r9], #4             @ data_out[i - j + 2],  data_out[i - j + 3]
+  smlabb r8, r10, r5, r8       @ coefficients[j - 1] * data_out[i - j + 2];
+  bgt LOOP_A_LENGTH
+  blt POST_LOOP_A_LENGTH
+
+ODD_A_LENGTH:
+  ldrsh r10, [r12, #2]         @ Filter coefficients coefficients[2]
+  sub r12, #2                  @ &coefficients[0]
+  smlabb r7, r10, r5, r7       @ sum1 += coefficients[2] * data_out[i - 2];
+  smlabt r8, r10, r5, r8       @ sum2 += coefficients[2] * data_out[i - 1];
+  ldr r5, [r9, #-2]            @ data_out[i - 1],  data_out[i]
+
+POST_LOOP_A_LENGTH:
+  ldr r10, [r12]               @ coefficients[0], coefficients[1]
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[1] * data_out[i - 1];
+
+  ldr r9, [r0], #4             @ data_in[i], data_in[i + 1]
+  smulbb r6, r10, r9           @ output1 = coefficients[0] * data_in[i];
+  sub r6, r7                   @ output1 -= sum1;
+
+  sbfx r11, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r11
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1], #2            @ Store data_out[i]
+
+  smlatb r8, r10, r6, r8       @ sum2 += coefficients[1] * data_out[i];
+  smulbt r6, r10, r9           @ output2 = coefficients[0] * data_in[i + 1];
+  sub r6, r8                   @ output1 -= sum1;
+
+  sbfx r11, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r11
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1], #2            @ Store data_out[i + 1]
+
+  subs r4, #2
+  bgt LOOP_LENGTH
+  blt END                      @ For even data_length, it's done. Jump to END.
+
+@ Process i = data_length -1, for the case of an odd length.
+ODD_LENGTH:
+  add r12, r2, r3, lsl #1
+  sub r12, #4                  @ &coefficients[coefficients_length - 2]
+  sub r9, r1, r3, lsl #1
+  add r9, #2                   @ &data_out[i - coefficients_length + 1]
+  mov r7, #0                   @ sum1
+  mov r8, #0                   @ sum1
+  subs r6, r3, #2              @ inner loop counter
+  beq EVEN_A_LENGTH            @ branch if coefficients_length == 2
+
+LOOP2_A_LENGTH:
+  ldr r10, [r12], #-4          @ coefficients[j - 1], coefficients[j]
+  ldr r5, [r9], #4             @ data_out[i - j],  data_out[i - j + 1]
+  subs r6, #2
+  smlatb r7, r10, r5, r7       @ sum1 += coefficients[j] * data_out[i - j];
+  smlabt r8, r10, r5, r8       @ coefficients[j - 1] * data_out[i - j + 1];
+  bgt LOOP2_A_LENGTH
+  addlt r12, #2
+  blt POST_LOOP2_A_LENGTH
+
+EVEN_A_LENGTH:
+  ldrsh r10, [r12, #2]         @ Filter coefficients coefficients[1]
+  ldrsh r5, [r9]               @ data_out[i - 1]
+  smlabb r7, r10, r5, r7       @ sum1 += coefficients[1] * data_out[i - 1];
+
+POST_LOOP2_A_LENGTH:
+  ldrsh r10, [r12]             @ Filter coefficients coefficients[0]
+  ldrsh r9, [r0]               @ data_in[i]
+  smulbb r6, r10, r9           @ output1 = coefficients[0] * data_in[i];
+  sub r6, r7                   @ output1 -= sum1;
+  sub r6, r8                   @ output1 -= sum1;
+  sbfx r8, r6, #12, #16
+  ssat r7, #16, r6, asr #12
+  cmp r7, r8
+  addeq r6, r6, #2048
+  ssat r6, #16, r6, asr #12
+  strh r6, [r1]                @ Store the data_out[i]
+
+END:
+  pop {r4-r11}
+  bx  lr
+
+.fnend
+
+
+@Reference C code:
+@
+@void WebRtcSpl_FilterARFastQ12(int16_t* data_in,
+@                               int16_t* data_out,
+@                               int16_t* __restrict coefficients,
+@                               int coefficients_length,
+@                               int data_length) {
+@  int i = 0;
+@  int j = 0;
+@
+@  for (i = 0; i < data_length - 1; i += 2) {
+@    int32_t output1 = 0;
+@    int32_t sum1 = 0;
+@    int32_t output2 = 0;
+@    int32_t sum2 = 0;
+@
+@    for (j = coefficients_length - 1; j > 2; j -= 2) {
+@      sum1 += coefficients[j]      * data_out[i - j];
+@      sum1 += coefficients[j - 1]  * data_out[i - j + 1];
+@      sum2 += coefficients[j]     * data_out[i - j + 1];
+@      sum2 += coefficients[j - 1] * data_out[i - j + 2];
+@    }
+@
+@    if (j == 2) {
+@      sum1 += coefficients[2] * data_out[i - 2];
+@      sum2 += coefficients[2] * data_out[i - 1];
+@    }
+@
+@    sum1 += coefficients[1] * data_out[i - 1];
+@    output1 = coefficients[0] * data_in[i];
+@    output1 -= sum1;
+@    // Saturate and store the output.
+@    output1 = WEBRTC_SPL_SAT(134215679, output1, -134217728);
+@    data_out[i] = (int16_t)((output1 + 2048) >> 12);
+@
+@    sum2 += coefficients[1] * data_out[i];
+@    output2 = coefficients[0] * data_in[i + 1];
+@    output2 -= sum2;
+@    // Saturate and store the output.
+@    output2 = WEBRTC_SPL_SAT(134215679, output2, -134217728);
+@    data_out[i + 1] = (int16_t)((output2 + 2048) >> 12);
+@  }
+@
+@  if (i == data_length - 1) {
+@    int32_t output1 = 0;
+@    int32_t sum1 = 0;
+@
+@    for (j = coefficients_length - 1; j > 1; j -= 2) {
+@      sum1 += coefficients[j]      * data_out[i - j];
+@      sum1 += coefficients[j - 1]  * data_out[i - j + 1];
+@    }
+@
+@    if (j == 1) {
+@      sum1 += coefficients[1] * data_out[i - 1];
+@    }
+@
+@    output1 = coefficients[0] * data_in[i];
+@    output1 -= sum1;
+@    // Saturate and store the output.
+@    output1 = WEBRTC_SPL_SAT(134215679, output1, -134217728);
+@    data_out[i] = (int16_t)((output1 + 2048) >> 12);
+@  }
+@}
diff --git a/src/common_audio/signal_processing/ilbc_specific_functions.c b/src/common_audio/signal_processing/ilbc_specific_functions.c
index 5a9e577..3588ba4 100644
--- a/src/common_audio/signal_processing/ilbc_specific_functions.c
+++ b/src/common_audio/signal_processing/ilbc_specific_functions.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -11,35 +11,16 @@
 
 /*
  * This file contains implementations of the iLBC specific functions
- * WebRtcSpl_ScaleAndAddVectorsWithRound()
  * WebRtcSpl_ReverseOrderMultArrayElements()
  * WebRtcSpl_ElementwiseVectorMult()
  * WebRtcSpl_AddVectorsAndShift()
  * WebRtcSpl_AddAffineVectorToVector()
  * WebRtcSpl_AffineTransformVector()
  *
- * The description header can be found in signal_processing_library.h
- *
  */
 
 #include "signal_processing_library.h"
 
-void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16 *vector1, WebRtc_Word16 scale1,
-                                           WebRtc_Word16 *vector2, WebRtc_Word16 scale2,
-                                           WebRtc_Word16 right_shifts, WebRtc_Word16 *out,
-                                           WebRtc_Word16 vector_length)
-{
-    int i;
-    WebRtc_Word16 roundVal;
-    roundVal = 1 << right_shifts;
-    roundVal = roundVal >> 1;
-    for (i = 0; i < vector_length; i++)
-    {
-        out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(vector1[i], scale1)
-                + WEBRTC_SPL_MUL_16_16(vector2[i], scale2) + roundVal) >> right_shifts);
-    }
-}
-
 void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16 *out, G_CONST WebRtc_Word16 *in,
                                              G_CONST WebRtc_Word16 *win,
                                              WebRtc_Word16 vector_length,
diff --git a/src/common_audio/signal_processing/include/signal_processing_library.h b/src/common_audio/signal_processing/include/signal_processing_library.h
index 651a68c..1da9078 100644
--- a/src/common_audio/signal_processing/include/signal_processing_library.h
+++ b/src/common_audio/signal_processing/include/signal_processing_library.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -34,30 +34,24 @@
 #define WEBRTC_SPL_MAX_SEED_USED    0x80000000L
 #define WEBRTC_SPL_MIN(A, B)        (A < B ? A : B) // Get min value
 #define WEBRTC_SPL_MAX(A, B)        (A > B ? A : B) // Get max value
+// TODO(kma/bjorn): For the next two macros, investigate how to correct the code
+// for inputs of a = WEBRTC_SPL_WORD16_MIN or WEBRTC_SPL_WORD32_MIN.
 #define WEBRTC_SPL_ABS_W16(a) \
     (((WebRtc_Word16)a >= 0) ? ((WebRtc_Word16)a) : -((WebRtc_Word16)a))
 #define WEBRTC_SPL_ABS_W32(a) \
     (((WebRtc_Word32)a >= 0) ? ((WebRtc_Word32)a) : -((WebRtc_Word32)a))
 
-#if (defined WEBRTC_TARGET_PC)||(defined __TARGET_XSCALE)
+#ifdef WEBRTC_ARCH_LITTLE_ENDIAN
 #define WEBRTC_SPL_GET_BYTE(a, nr)  (((WebRtc_Word8 *)a)[nr])
 #define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
     (((WebRtc_Word8 *)d_ptr)[index] = (val))
-#elif defined WEBRTC_BIG_ENDIAN
+#else
 #define WEBRTC_SPL_GET_BYTE(a, nr) \
     ((((WebRtc_Word16 *)a)[nr >> 1]) >> (((nr + 1) & 0x1) * 8) & 0x00ff)
 #define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
     ((WebRtc_Word16 *)d_ptr)[index >> 1] = \
     ((((WebRtc_Word16 *)d_ptr)[index >> 1]) \
     & (0x00ff << (8 * ((index) & 0x1)))) | (val << (8 * ((index + 1) & 0x1)))
-#else
-#define WEBRTC_SPL_GET_BYTE(a,nr) \
-    ((((WebRtc_Word16 *)(a))[(nr) >> 1]) >> (((nr) & 0x1) * 8) & 0x00ff)
-#define WEBRTC_SPL_SET_BYTE(d_ptr, val, index) \
-    ((WebRtc_Word16 *)(d_ptr))[(index) >> 1] = \
-    ((((WebRtc_Word16 *)(d_ptr))[(index) >> 1]) \
-    & (0x00ff << (8 * (((index) + 1) & 0x1)))) | \
-    ((val) << (8 * ((index) & 0x1)))
 #endif
 
 #define WEBRTC_SPL_MUL(a, b) \
@@ -81,7 +75,7 @@
 #define WEBRTC_SPL_UDIV(a, b) \
     ((WebRtc_UWord32) ((WebRtc_UWord32)(a) / (WebRtc_UWord32)(b)))
 
-#ifndef WEBRTC_ARCH_ARM_V7A
+#ifndef WEBRTC_ARCH_ARM_V7
 // For ARMv7 platforms, these are inline functions in spl_inl_armv7.h
 #define WEBRTC_SPL_MUL_16_16(a, b) \
     ((WebRtc_Word32) (((WebRtc_Word16)(a)) * ((WebRtc_Word16)(b))))
@@ -210,33 +204,137 @@
                                      WebRtc_Word16 vector_length);
 // End: Copy and set operations.
 
-// Minimum and maximum operations. Implementation in min_max_operations.c.
-// Descriptions at bottom of file.
-WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16* vector,
-                                       WebRtc_Word16 length);
-WebRtc_Word32 WebRtcSpl_MaxAbsValueW32(G_CONST WebRtc_Word32* vector,
-                                       WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MinValueW16(G_CONST WebRtc_Word16* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word32 WebRtcSpl_MinValueW32(G_CONST WebRtc_Word32* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MaxValueW16(G_CONST WebRtc_Word16* vector,
-                                    WebRtc_Word16 length);
 
-WebRtc_Word16 WebRtcSpl_MaxAbsIndexW16(G_CONST WebRtc_Word16* vector,
-                                       WebRtc_Word16 length);
-WebRtc_Word32 WebRtcSpl_MaxValueW32(G_CONST WebRtc_Word32* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MinIndexW16(G_CONST WebRtc_Word16* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MinIndexW32(G_CONST WebRtc_Word32* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MaxIndexW16(G_CONST WebRtc_Word16* vector,
-                                    WebRtc_Word16 length);
-WebRtc_Word16 WebRtcSpl_MaxIndexW32(G_CONST WebRtc_Word32* vector,
-                                    WebRtc_Word16 length);
+// Minimum and maximum operations. Implementation in min_max_operations.c.
+
+// Returns the largest absolute value in a signed 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Maximum absolute value in vector;
+//                 or -1, if (vector == NULL || length <= 0).
+int16_t WebRtcSpl_MaxAbsValueW16(const int16_t* vector, int length);
+
+// Returns the largest absolute value in a signed 32-bit vector.
+//
+// Input:
+//      - vector : 32-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Maximum absolute value in vector;
+//                 or -1, if (vector == NULL || length <= 0).
+int32_t WebRtcSpl_MaxAbsValueW32(const int32_t* vector, int length);
+
+// Returns the maximum value of a 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Maximum sample value in |vector|.
+//                 If (vector == NULL || length <= 0) WEBRTC_SPL_WORD16_MIN
+//                 is returned. Note that WEBRTC_SPL_WORD16_MIN is a feasible
+//                 value and we can't catch errors purely based on it.
+int16_t WebRtcSpl_MaxValueW16(const int16_t* vector, int length);
+
+// Returns the maximum value of a 32-bit vector.
+//
+// Input:
+//      - vector : 32-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Maximum sample value in |vector|.
+//                 If (vector == NULL || length <= 0) WEBRTC_SPL_WORD32_MIN
+//                 is returned. Note that WEBRTC_SPL_WORD32_MIN is a feasible
+//                 value and we can't catch errors purely based on it.
+int32_t WebRtcSpl_MaxValueW32(const int32_t* vector, int length);
+
+// Returns the minimum value of a 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Minimum sample value in |vector|.
+//                 If (vector == NULL || length <= 0) WEBRTC_SPL_WORD16_MAX
+//                 is returned. Note that WEBRTC_SPL_WORD16_MAX is a feasible
+//                 value and we can't catch errors purely based on it.
+int16_t WebRtcSpl_MinValueW16(const int16_t* vector, int length);
+
+// Returns the minimum value of a 32-bit vector.
+//
+// Input:
+//      - vector : 32-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Minimum sample value in |vector|.
+//                 If (vector == NULL || length <= 0) WEBRTC_SPL_WORD32_MAX
+//                 is returned. Note that WEBRTC_SPL_WORD32_MAX is a feasible
+//                 value and we can't catch errors purely based on it.
+int32_t WebRtcSpl_MinValueW32(const int32_t* vector, int length);
+
+// Returns the vector index to the largest absolute value of a 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Index to the maximum absolute value in vector, or -1,
+//                 if (vector == NULL || length <= 0).
+//                 If there are multiple equal maxima, return the index of the
+//                 first. -32768 will always have precedence over 32767 (despite
+//                 -32768 presenting an int16 absolute value of 32767);
+int WebRtcSpl_MaxAbsIndexW16(const int16_t* vector, int length);
+
+// Returns the vector index to the maximum sample value of a 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Index to the maximum value in vector (if multiple
+//                 indexes have the maximum, return the first);
+//                 or -1, if (vector == NULL || length <= 0).
+int WebRtcSpl_MaxIndexW16(const int16_t* vector, int length);
+
+// Returns the vector index to the maximum sample value of a 32-bit vector.
+//
+// Input:
+//      - vector : 32-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Index to the maximum value in vector (if multiple
+//                 indexes have the maximum, return the first);
+//                 or -1, if (vector == NULL || length <= 0).
+int WebRtcSpl_MaxIndexW32(const int32_t* vector, int length);
+
+// Returns the vector index to the minimum sample value of a 16-bit vector.
+//
+// Input:
+//      - vector : 16-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Index to the mimimum value in vector  (if multiple
+//                 indexes have the minimum, return the first);
+//                 or -1, if (vector == NULL || length <= 0).
+int WebRtcSpl_MinIndexW16(const int16_t* vector, int length);
+
+// Returns the vector index to the minimum sample value of a 32-bit vector.
+//
+// Input:
+//      - vector : 32-bit input vector.
+//      - length : Number of samples in vector.
+//
+// Return value  : Index to the mimimum value in vector  (if multiple
+//                 indexes have the minimum, return the first);
+//                 or -1, if (vector == NULL || length <= 0).
+int WebRtcSpl_MinIndexW32(const int32_t* vector, int length);
+
 // End: Minimum and maximum operations.
 
+
 // Vector scaling operations. Implementation in vector_scaling_operations.c.
 // Description at bottom of file.
 void WebRtcSpl_VectorBitShiftW16(WebRtc_Word16* out_vector,
@@ -268,17 +366,37 @@
                                   WebRtc_Word16 gain2, int right_shifts2,
                                   WebRtc_Word16* out_vector,
                                   int vector_length);
+
+// Performs the vector operation:
+//   out_vector[k] = ((scale1 * in_vector1[k]) + (scale2 * in_vector2[k])
+//        + round_value) >> right_shifts,
+//   where  round_value = (1 << right_shifts) >> 1.
+//
+// Input:
+//      - in_vector1       : Input vector 1
+//      - in_vector1_scale : Gain to be used for vector 1
+//      - in_vector2       : Input vector 2
+//      - in_vector2_scale : Gain to be used for vector 2
+//      - right_shifts     : Number of right bit shifts to be applied
+//      - length           : Number of elements in the input vectors
+//
+// Output:
+//      - out_vector       : Output vector
+// Return value            : 0 if OK, -1 if (in_vector1 == NULL
+//                           || in_vector2 == NULL || out_vector == NULL
+//                           || length <= 0 || right_shift < 0).
+int WebRtcSpl_ScaleAndAddVectorsWithRound(const int16_t* in_vector1,
+                                          int16_t in_vector1_scale,
+                                          const int16_t* in_vector2,
+                                          int16_t in_vector2_scale,
+                                          int right_shifts,
+                                          int16_t* out_vector,
+                                          int length);
+
 // End: Vector scaling operations.
 
 // iLBC specific functions. Implementations in ilbc_specific_functions.c.
 // Description at bottom of file.
-void WebRtcSpl_ScaleAndAddVectorsWithRound(WebRtc_Word16* in_vector1,
-                                           WebRtc_Word16 scale1,
-                                           WebRtc_Word16* in_vector2,
-                                           WebRtc_Word16 scale2,
-                                           WebRtc_Word16 right_shifts,
-                                           WebRtc_Word16* out_vector,
-                                           WebRtc_Word16 vector_length);
 void WebRtcSpl_ReverseOrderMultArrayElements(WebRtc_Word16* out_vector,
                                              G_CONST WebRtc_Word16* in_vector,
                                              G_CONST WebRtc_Word16* window,
@@ -308,32 +426,146 @@
                                      int vector_length);
 // End: iLBC specific functions.
 
-// Signal processing operations. Descriptions at bottom of this file.
-int WebRtcSpl_AutoCorrelation(G_CONST WebRtc_Word16* vector,
-                              int vector_length, int order,
-                              WebRtc_Word32* result_vector,
+// Signal processing operations.
+
+// A 32-bit fix-point implementation of auto-correlation computation
+//
+// Input:
+//      - in_vector        : Vector to calculate autocorrelation upon
+//      - in_vector_length : Length (in samples) of |vector|
+//      - order            : The order up to which the autocorrelation should be
+//                           calculated
+//
+// Output:
+//      - result           : auto-correlation values (values should be seen
+//                           relative to each other since the absolute values
+//                           might have been down shifted to avoid overflow)
+//
+//      - scale            : The number of left shifts required to obtain the
+//                           auto-correlation in Q0
+//
+// Return value            :
+//      - -1, if |order| > |in_vector_length|;
+//      - Number of samples in |result|, i.e. (order+1), otherwise.
+int WebRtcSpl_AutoCorrelation(const int16_t* in_vector,
+                              int in_vector_length,
+                              int order,
+                              int32_t* result,
                               int* scale);
+
+// A 32-bit fix-point implementation of the Levinson-Durbin algorithm that
+// does NOT use the 64 bit class
+//
+// Input:
+//      - auto_corr : Vector with autocorrelation values of length >=
+//                    |use_order|+1
+//      - use_order : The LPC filter order (support up to order 20)
+//
+// Output:
+//      - lpc_coef  : lpc_coef[0..use_order] LPC coefficients in Q12
+//      - refl_coef : refl_coef[0...use_order-1]| Reflection coefficients in
+//                    Q15
+//
+// Return value     : 1 for stable 0 for unstable
 WebRtc_Word16 WebRtcSpl_LevinsonDurbin(WebRtc_Word32* auto_corr,
                                        WebRtc_Word16* lpc_coef,
                                        WebRtc_Word16* refl_coef,
                                        WebRtc_Word16 order);
+
+// Converts reflection coefficients |refl_coef| to LPC coefficients |lpc_coef|.
+// This version is a 16 bit operation.
+//
+// NOTE: The 16 bit refl_coef -> lpc_coef conversion might result in a
+// "slightly unstable" filter (i.e., a pole just outside the unit circle) in
+// "rare" cases even if the reflection coefficients are stable.
+//
+// Input:
+//      - refl_coef : Reflection coefficients in Q15 that should be converted
+//                    to LPC coefficients
+//      - use_order : Number of coefficients in |refl_coef|
+//
+// Output:
+//      - lpc_coef  : LPC coefficients in Q12
 void WebRtcSpl_ReflCoefToLpc(G_CONST WebRtc_Word16* refl_coef,
                              int use_order,
                              WebRtc_Word16* lpc_coef);
+
+// Converts LPC coefficients |lpc_coef| to reflection coefficients |refl_coef|.
+// This version is a 16 bit operation.
+// The conversion is implemented by the step-down algorithm.
+//
+// Input:
+//      - lpc_coef  : LPC coefficients in Q12, that should be converted to
+//                    reflection coefficients
+//      - use_order : Number of coefficients in |lpc_coef|
+//
+// Output:
+//      - refl_coef : Reflection coefficients in Q15.
 void WebRtcSpl_LpcToReflCoef(WebRtc_Word16* lpc_coef,
                              int use_order,
                              WebRtc_Word16* refl_coef);
+
+// Calculates reflection coefficients (16 bit) from auto-correlation values
+//
+// Input:
+//      - auto_corr : Auto-correlation values
+//      - use_order : Number of coefficients wanted be calculated
+//
+// Output:
+//      - refl_coef : Reflection coefficients in Q15.
 void WebRtcSpl_AutoCorrToReflCoef(G_CONST WebRtc_Word32* auto_corr,
                                   int use_order,
                                   WebRtc_Word16* refl_coef);
-void WebRtcSpl_CrossCorrelation(WebRtc_Word32* cross_corr,
-                                WebRtc_Word16* vector1,
-                                WebRtc_Word16* vector2,
-                                WebRtc_Word16 dim_vector,
-                                WebRtc_Word16 dim_cross_corr,
-                                WebRtc_Word16 right_shifts,
-                                WebRtc_Word16 step_vector2);
+
+// Calculates the cross-correlation between two sequences |seq1| and |seq2|.
+// |seq1| is fixed and |seq2| slides as the pointer is increased with the
+// amount |step_seq2|. Note the arguments should obey the relationship:
+// |dim_seq| - 1 + |step_seq2| * (|dim_cross_correlation| - 1) <
+//      buffer size of |seq2|
+//
+// Input:
+//      - seq1           : First sequence (fixed throughout the correlation)
+//      - seq2           : Second sequence (slides |step_vector2| for each
+//                            new correlation)
+//      - dim_seq        : Number of samples to use in the cross-correlation
+//      - dim_cross_correlation : Number of cross-correlations to calculate (the
+//                            start position for |vector2| is updated for each
+//                            new one)
+//      - right_shifts   : Number of right bit shifts to use. This will
+//                            become the output Q-domain.
+//      - step_seq2      : How many (positive or negative) steps the
+//                            |vector2| pointer should be updated for each new
+//                            cross-correlation value.
+//
+// Output:
+//      - cross_correlation : The cross-correlation in Q(-right_shifts)
+void WebRtcSpl_CrossCorrelation(int32_t* cross_correlation,
+                                const int16_t* seq1,
+                                const int16_t* seq2,
+                                int16_t dim_seq,
+                                int16_t dim_cross_correlation,
+                                int16_t right_shifts,
+                                int16_t step_seq2);
+
+// Creates (the first half of) a Hanning window. Size must be at least 1 and
+// at most 512.
+//
+// Input:
+//      - size      : Length of the requested Hanning window (1 to 512)
+//
+// Output:
+//      - window    : Hanning vector in Q14.
 void WebRtcSpl_GetHanningWindow(WebRtc_Word16* window, WebRtc_Word16 size);
+
+// Calculates y[k] = sqrt(1 - x[k]^2) for each element of the input vector
+// |in_vector|. Input and output values are in Q15.
+//
+// Inputs:
+//      - in_vector     : Values to calculate sqrt(1 - x^2) of
+//      - vector_length : Length of vector |in_vector|
+//
+// Output:
+//      - out_vector    : Output values in Q15
 void WebRtcSpl_SqrtOfOneMinusXSquared(WebRtc_Word16* in_vector,
                                       int vector_length,
                                       WebRtc_Word16* out_vector);
@@ -367,10 +599,21 @@
                                int vector_length,
                                int* scale_factor);
 
-WebRtc_Word32 WebRtcSpl_DotProductWithScale(WebRtc_Word16* vector1,
-                                            WebRtc_Word16* vector2,
-                                            int vector_length,
-                                            int scaling);
+// Calculates the dot product between two (WebRtc_Word16) vectors.
+//
+// Input:
+//      - vector1       : Vector 1
+//      - vector2       : Vector 2
+//      - vector_length : Number of samples used in the dot product
+//      - scaling       : The number of right bit shifts to apply on each term
+//                        during calculation to avoid overflow, i.e., the
+//                        output will be in Q(-|scaling|)
+//
+// Return value         : The dot product in Q(-scaling)
+int32_t WebRtcSpl_DotProductWithScale(const int16_t* vector1,
+                                      const int16_t* vector2,
+                                      int length,
+                                      int scaling);
 
 // Filter operations.
 int WebRtcSpl_FilterAR(G_CONST WebRtc_Word16* ar_coef, int ar_coef_length,
@@ -385,25 +628,70 @@
                                WebRtc_Word16* ma_coef,
                                WebRtc_Word16 ma_coef_length,
                                WebRtc_Word16 vector_length);
-void WebRtcSpl_FilterARFastQ12(WebRtc_Word16* in_vector,
-                               WebRtc_Word16* out_vector,
-                               WebRtc_Word16* ar_coef,
-                               WebRtc_Word16 ar_coef_length,
-                               WebRtc_Word16 vector_length);
-int WebRtcSpl_DownsampleFast(WebRtc_Word16* in_vector,
-                             WebRtc_Word16 in_vector_length,
-                             WebRtc_Word16* out_vector,
-                             WebRtc_Word16 out_vector_length,
-                             WebRtc_Word16* ma_coef,
-                             WebRtc_Word16 ma_coef_length,
-                             WebRtc_Word16 factor,
-                             WebRtc_Word16 delay);
+
+// Performs a AR filtering on a vector in Q12
+// Input:
+//      - data_in            : Input samples
+//      - data_out           : State information in positions
+//                               data_out[-order] .. data_out[-1]
+//      - coefficients       : Filter coefficients (in Q12)
+//      - coefficients_length: Number of coefficients (order+1)
+//      - data_length        : Number of samples to be filtered
+// Output:
+//      - data_out           : Filtered samples
+void WebRtcSpl_FilterARFastQ12(const int16_t* data_in,
+                               int16_t* data_out,
+                               const int16_t* __restrict coefficients,
+                               int coefficients_length,
+                               int data_length);
+
+// Performs a MA down sampling filter on a vector
+// Input:
+//      - data_in            : Input samples (state in positions
+//                               data_in[-order] .. data_in[-1])
+//      - data_in_length     : Number of samples in |data_in| to be filtered.
+//                               This must be at least
+//                               |delay| + |factor|*(|out_vector_length|-1) + 1)
+//      - data_out_length    : Number of down sampled samples desired
+//      - coefficients       : Filter coefficients (in Q12)
+//      - coefficients_length: Number of coefficients (order+1)
+//      - factor             : Decimation factor
+//      - delay              : Delay of filter (compensated for in out_vector)
+// Output:
+//      - data_out           : Filtered samples
+// Return value              : 0 if OK, -1 if |in_vector| is too short
+int WebRtcSpl_DownsampleFast(const int16_t* data_in,
+                             int data_in_length,
+                             int16_t* data_out,
+                             int data_out_length,
+                             const int16_t* __restrict coefficients,
+                             int coefficients_length,
+                             int factor,
+                             int delay);
+
 // End: Filter operations.
 
 // FFT operations
+
 int WebRtcSpl_ComplexFFT(WebRtc_Word16 vector[], int stages, int mode);
 int WebRtcSpl_ComplexIFFT(WebRtc_Word16 vector[], int stages, int mode);
-void WebRtcSpl_ComplexBitReverse(WebRtc_Word16 vector[], int stages);
+
+// Treat a 16-bit complex data buffer |complex_data| as an array of 32-bit
+// values, and swap elements whose indexes are bit-reverses of each other.
+//
+// Input:
+//      - complex_data  : Complex data buffer containing 2^|stages| real
+//                        elements interleaved with 2^|stages| imaginary
+//                        elements: [Re Im Re Im Re Im....]
+//      - stages        : Number of FFT stages. Must be at least 3 and at most
+//                        10, since the table WebRtcSpl_kSinTable1024[] is 1024
+//                        elements long.
+//
+// Output:
+//      - complex_data  : The complex data buffer.
+
+void WebRtcSpl_ComplexBitReverse(int16_t* __restrict complex_data, int stages);
+
 // End: FFT operations
 
 /************************************************************
@@ -785,83 +1073,6 @@
 //
 
 //
-// WebRtcSpl_MinValueW16(...)
-// WebRtcSpl_MinValueW32(...)
-//
-// Returns the minimum value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Minimum sample value in vector
-//
-
-//
-// WebRtcSpl_MaxValueW16(...)
-// WebRtcSpl_MaxValueW32(...)
-//
-// Returns the maximum value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Maximum sample value in vector
-//
-
-//
-// WebRtcSpl_MaxAbsValueW16(...)
-// WebRtcSpl_MaxAbsValueW32(...)
-//
-// Returns the largest absolute value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Maximum absolute value in vector
-//
-
-//
-// WebRtcSpl_MaxAbsIndexW16(...)
-//
-// Returns the vector index to the largest absolute value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Index to maximum absolute value in vector
-//
-
-//
-// WebRtcSpl_MinIndexW16(...)
-// WebRtcSpl_MinIndexW32(...)
-//
-// Returns the vector index to the minimum sample value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Index to minimum sample value in vector
-//
-
-//
-// WebRtcSpl_MaxIndexW16(...)
-// WebRtcSpl_MaxIndexW32(...)
-//
-// Returns the vector index to the maximum sample value of a vector
-//
-// Input:
-//      - vector        : Input vector
-//      - vector_length : Number of samples in vector
-//
-// Return value         : Index to maximum sample value in vector
-//
-
-//
 // WebRtcSpl_VectorBitShiftW16(...)
 // WebRtcSpl_VectorBitShiftW32(...)
 //
@@ -949,30 +1160,6 @@
 //
 
 //
-// WebRtcSpl_ScaleAndAddVectorsWithRound(...)
-//
-// Performs the vector operation:
-//
-//  out_vector[k] = ((scale1*in_vector1[k]) + (scale2*in_vector2[k])
-//                      + round_value) >> right_shifts
-//
-//      where:
-//
-//  round_value = (1<<right_shifts)>>1
-//
-// Input:
-//      - in_vector1    : Input vector 1
-//      - scale1        : Gain to be used for vector 1
-//      - in_vector2    : Input vector 2
-//      - scale2        : Gain to be used for vector 2
-//      - right_shifts  : Number of right bit shifts to be applied
-//      - vector_length : Number of elements in the input vectors
-//
-// Output:
-//      - out_vector    : Output vector
-//
-
-//
 // WebRtcSpl_ReverseOrderMultArrayElements(...)
 //
 // Performs the vector operation:
@@ -1062,147 +1249,6 @@
 //
 
 //
-// WebRtcSpl_AutoCorrelation(...)
-//
-// A 32-bit fix-point implementation of auto-correlation computation
-//
-// Input:
-//      - vector        : Vector to calculate autocorrelation upon
-//      - vector_length : Length (in samples) of |vector|
-//      - order         : The order up to which the autocorrelation should be
-//                        calculated
-//
-// Output:
-//      - result_vector : auto-correlation values (values should be seen
-//                        relative to each other since the absolute values
-//                        might have been down shifted to avoid overflow)
-//
-//      - scale         : The number of left shifts required to obtain the
-//                        auto-correlation in Q0
-//
-// Return value         : Number of samples in |result_vector|, i.e., (order+1)
-//
-
-//
-// WebRtcSpl_LevinsonDurbin(...)
-//
-// A 32-bit fix-point implementation of the Levinson-Durbin algorithm that
-// does NOT use the 64 bit class
-//
-// Input:
-//      - auto_corr : Vector with autocorrelation values of length >=
-//                    |use_order|+1
-//      - use_order : The LPC filter order (support up to order 20)
-//
-// Output:
-//      - lpc_coef  : lpc_coef[0..use_order] LPC coefficients in Q12
-//      - refl_coef : refl_coef[0...use_order-1]| Reflection coefficients in
-//                    Q15
-//
-// Return value     : 1 for stable 0 for unstable
-//
-
-//
-// WebRtcSpl_ReflCoefToLpc(...)
-//
-// Converts reflection coefficients |refl_coef| to LPC coefficients |lpc_coef|.
-// This version is a 16 bit operation.
-//
-// NOTE: The 16 bit refl_coef -> lpc_coef conversion might result in a
-// "slightly unstable" filter (i.e., a pole just outside the unit circle) in
-// "rare" cases even if the reflection coefficients are stable.
-//
-// Input:
-//      - refl_coef : Reflection coefficients in Q15 that should be converted
-//                    to LPC coefficients
-//      - use_order : Number of coefficients in |refl_coef|
-//
-// Output:
-//      - lpc_coef  : LPC coefficients in Q12
-//
-
-//
-// WebRtcSpl_LpcToReflCoef(...)
-//
-// Converts LPC coefficients |lpc_coef| to reflection coefficients |refl_coef|.
-// This version is a 16 bit operation.
-// The conversion is implemented by the step-down algorithm.
-//
-// Input:
-//      - lpc_coef  : LPC coefficients in Q12, that should be converted to
-//                    reflection coefficients
-//      - use_order : Number of coefficients in |lpc_coef|
-//
-// Output:
-//      - refl_coef : Reflection coefficients in Q15.
-//
-
-//
-// WebRtcSpl_AutoCorrToReflCoef(...)
-//
-// Calculates reflection coefficients (16 bit) from auto-correlation values
-//
-// Input:
-//      - auto_corr : Auto-correlation values
-//      - use_order : Number of coefficients wanted be calculated
-//
-// Output:
-//      - refl_coef : Reflection coefficients in Q15.
-//
-
-//
-// WebRtcSpl_CrossCorrelation(...)
-//
-// Calculates the cross-correlation between two sequences |vector1| and
-// |vector2|. |vector1| is fixed and |vector2| slides as the pointer is
-// increased with the amount |step_vector2|
-//
-// Input:
-//      - vector1           : First sequence (fixed throughout the correlation)
-//      - vector2           : Second sequence (slides |step_vector2| for each
-//                            new correlation)
-//      - dim_vector        : Number of samples to use in the cross-correlation
-//      - dim_cross_corr    : Number of cross-correlations to calculate (the
-//                            start position for |vector2| is updated for each
-//                            new one)
-//      - right_shifts      : Number of right bit shifts to use. This will
-//                            become the output Q-domain.
-//      - step_vector2      : How many (positive or negative) steps the
-//                            |vector2| pointer should be updated for each new
-//                            cross-correlation value.
-//
-// Output:
-//      - cross_corr        : The cross-correlation in Q(-right_shifts)
-//
-
-//
-// WebRtcSpl_GetHanningWindow(...)
-//
-// Creates (the first half of) a Hanning window. Size must be at least 1 and
-// at most 512.
-//
-// Input:
-//      - size      : Length of the requested Hanning window (1 to 512)
-//
-// Output:
-//      - window    : Hanning vector in Q14.
-//
-
-//
-// WebRtcSpl_SqrtOfOneMinusXSquared(...)
-//
-// Calculates y[k] = sqrt(1 - x[k]^2) for each element of the input vector
-// |in_vector|. Input and output values are in Q15.
-//
-// Inputs:
-//      - in_vector     : Values to calculate sqrt(1 - x^2) of
-//      - vector_length : Length of vector |in_vector|
-//
-// Output:
-//      - out_vector    : Output values in Q15
-//
-
-//
 // WebRtcSpl_IncreaseSeed(...)
 //
 // Increases the seed (and returns the new value)
@@ -1438,62 +1484,6 @@
 //
 
 //
-// WebRtcSpl_FilterARFastQ12(...)
-//
-// Performs a AR filtering on a vector in Q12
-//
-// Input:
-//      - in_vector         : Input samples
-//      - out_vector        : State information in positions
-//                            out_vector[-order] .. out_vector[-1]
-//      - ar_coef           : Filter coefficients (in Q12)
-//      - ar_coef_length    : Number of B coefficients (order+1)
-//      - vector_length     : Number of samples to be filtered
-//
-// Output:
-//      - out_vector        : Filtered samples
-//
-
-//
-// WebRtcSpl_DownsampleFast(...)
-//
-// Performs a MA down sampling filter on a vector
-//
-// Input:
-//      - in_vector         : Input samples (state in positions
-//                            in_vector[-order] .. in_vector[-1])
-//      - in_vector_length  : Number of samples in |in_vector| to be filtered.
-//                            This must be at least
-//                            |delay| + |factor|*(|out_vector_length|-1) + 1)
-//      - out_vector_length : Number of down sampled samples desired
-//      - ma_coef           : Filter coefficients (in Q12)
-//      - ma_coef_length    : Number of B coefficients (order+1)
-//      - factor            : Decimation factor
-//      - delay             : Delay of filter (compensated for in out_vector)
-//
-// Output:
-//      - out_vector        : Filtered samples
-//
-// Return value             : 0 if OK, -1 if |in_vector| is too short
-//
-
-//
-// WebRtcSpl_DotProductWithScale(...)
-//
-// Calculates the dot product between two (WebRtc_Word16) vectors
-//
-// Input:
-//      - vector1       : Vector 1
-//      - vector2       : Vector 2
-//      - vector_length : Number of samples used in the dot product
-//      - scaling       : The number of right bit shifts to apply on each term
-//                        during calculation to avoid overflow, i.e., the
-//                        output will be in Q(-|scaling|)
-//
-// Return value         : The dot product in Q(-scaling)
-//
-
-//
 // WebRtcSpl_ComplexIFFT(...)
 //
 // Complex Inverse FFT
@@ -1585,31 +1575,6 @@
 //
 
 //
-// WebRtcSpl_ComplexBitReverse(...)
-//
-// Complex Bit Reverse
-//
-// This function bit-reverses the position of elements in the complex input
-// vector into the output vector.
-//
-// If you bit-reverse a linear-order array, you obtain a bit-reversed order
-// array. If you bit-reverse a bit-reversed order array, you obtain a
-// linear-order array.
-//
-// Input:
-//      - vector    : In pointer to complex vector containing 2^|stages| real
-//                    elements interleaved with 2^|stages| imaginary elements.
-//                    [ReImReImReIm....]
-//      - stages    : Number of FFT stages. Must be at least 3 and at most 10,
-//                    since the table WebRtcSpl_kSinTable1024[] is 1024
-//                    elements long.
-//
-// Output:
-//      - vector    : Out pointer to complex vector in bit-reversed order.
-//                    The input vector is over written.
-//
-
-//
 // WebRtcSpl_AnalysisQMF(...)
 //
 // Splits a 0-2*F Hz signal into two sub bands: 0-F Hz and F-2*F Hz. The
@@ -1652,7 +1617,7 @@
 // WebRtc_Word16 WebRtcSpl_SatW32ToW16(...)
 //
 // This function saturates a 32-bit word into a 16-bit word.
-// 
+//
 // Input:
 //      - value32   : The value of a 32-bit word.
 //
@@ -1664,7 +1629,7 @@
 //
 // This function multiply a 16-bit word by a 16-bit word, and accumulate this
 // value to a 32-bit integer.
-// 
+//
 // Input:
 //      - a    : The value of the first 16-bit word.
 //      - b    : The value of the second 16-bit word.
diff --git a/src/common_audio/signal_processing/include/spl_inl.h b/src/common_audio/signal_processing/include/spl_inl.h
index 23b3209..1cde181 100644
--- a/src/common_audio/signal_processing/include/spl_inl.h
+++ b/src/common_audio/signal_processing/include/spl_inl.h
@@ -15,7 +15,7 @@
 #ifndef WEBRTC_SPL_SPL_INL_H_
 #define WEBRTC_SPL_SPL_INL_H_
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
 #include "spl_inl_armv7.h"
 #else
 
@@ -35,49 +35,11 @@
   return WebRtcSpl_SatW32ToW16((WebRtc_Word32) a + (WebRtc_Word32) b);
 }
 
-static __inline WebRtc_Word32 WebRtcSpl_AddSatW32(WebRtc_Word32 l_var1,
-                                                  WebRtc_Word32 l_var2) {
-  WebRtc_Word32 l_sum;
-
-  // perform long addition
-  l_sum = l_var1 + l_var2;
-
-  // check for under or overflow
-  if (WEBRTC_SPL_IS_NEG(l_var1)) {
-    if (WEBRTC_SPL_IS_NEG(l_var2) && !WEBRTC_SPL_IS_NEG(l_sum)) {
-        l_sum = (WebRtc_Word32)0x80000000;
-    }
-  } else {
-    if (!WEBRTC_SPL_IS_NEG(l_var2) && WEBRTC_SPL_IS_NEG(l_sum)) {
-        l_sum = (WebRtc_Word32)0x7FFFFFFF;
-    }
-  }
-
-  return l_sum;
-}
-
 static __inline WebRtc_Word16 WebRtcSpl_SubSatW16(WebRtc_Word16 var1,
                                                   WebRtc_Word16 var2) {
   return WebRtcSpl_SatW32ToW16((WebRtc_Word32) var1 - (WebRtc_Word32) var2);
 }
 
-static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
-                                                  WebRtc_Word32 l_var2) {
-  WebRtc_Word32 l_diff;
-
-  // perform subtraction
-  l_diff = l_var1 - l_var2;
-
-  // check for underflow
-  if ((l_var1 < 0) && (l_var2 > 0) && (l_diff > 0))
-    l_diff = (WebRtc_Word32)0x80000000;
-  // check for overflow
-  if ((l_var1 > 0) && (l_var2 < 0) && (l_diff < 0))
-    l_diff = (WebRtc_Word32)0x7FFFFFFF;
-
-  return l_diff;
-}
-
 static __inline WebRtc_Word16 WebRtcSpl_GetSizeInBits(WebRtc_UWord32 n) {
   int bits;
 
@@ -154,6 +116,48 @@
   return (a * b + c);
 }
 
-#endif  // WEBRTC_ARCH_ARM_V7A
+#endif  // WEBRTC_ARCH_ARM_V7
+
+// The following functions have no optimized versions.
+// TODO(kma): Consider saturating add/sub instructions in X86 platform.
+static __inline WebRtc_Word32 WebRtcSpl_AddSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_sum;
+
+  // Perform long addition
+  l_sum = l_var1 + l_var2;
+
+  if (l_var1 < 0) {  // Check for underflow.
+    if ((l_var2 < 0) && (l_sum >= 0)) {
+        l_sum = (WebRtc_Word32)0x80000000;
+    }
+  } else {  // Check for overflow.
+    if ((l_var2 > 0) && (l_sum < 0)) {
+        l_sum = (WebRtc_Word32)0x7FFFFFFF;
+    }
+  }
+
+  return l_sum;
+}
+
+static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_diff;
+
+  // Perform subtraction.
+  l_diff = l_var1 - l_var2;
+
+  if (l_var1 < 0) {  // Check for underflow.
+    if ((l_var2 > 0) && (l_diff > 0)) {
+      l_diff = (WebRtc_Word32)0x80000000;
+    }
+  } else {  // Check for overflow.
+    if ((l_var2 < 0) && (l_diff < 0)) {
+      l_diff = (WebRtc_Word32)0x7FFFFFFF;
+    }
+  }
+
+  return l_diff;
+}
 
 #endif  // WEBRTC_SPL_SPL_INL_H_
diff --git a/src/common_audio/signal_processing/include/spl_inl_armv7.h b/src/common_audio/signal_processing/include/spl_inl_armv7.h
index 67fc4f9..2eff496 100644
--- a/src/common_audio/signal_processing/include/spl_inl_armv7.h
+++ b/src/common_audio/signal_processing/include/spl_inl_armv7.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -9,39 +9,58 @@
  */
 
 
-// This header file includes the inline functions for ARM processors in
-// the fix point signal processing library.
+/* This header file includes the inline functions for ARM processors in
+ * the fix point signal processing library.
+ */
 
 #ifndef WEBRTC_SPL_SPL_INL_ARMV7_H_
 #define WEBRTC_SPL_SPL_INL_ARMV7_H_
 
+/* TODO(kma): Replace some assembly code with GCC intrinsics
+ * (e.g. __builtin_clz).
+ */
+
+/* This function produces result that is not bit exact with that by the generic
+ * C version in some cases, although the former is at least as accurate as the
+ * later.
+ */
 static __inline WebRtc_Word32 WEBRTC_SPL_MUL_16_32_RSFT16(WebRtc_Word16 a,
                                                           WebRtc_Word32 b) {
-  WebRtc_Word32 tmp;
-  __asm__("smulwb %0, %1, %2":"=r"(tmp):"r"(b), "r"(a));
+  WebRtc_Word32 tmp = 0;
+  __asm __volatile ("smulwb %0, %1, %2":"=r"(tmp):"r"(b), "r"(a));
   return tmp;
 }
 
+/* This function produces result that is not bit exact with that by the generic
+ * C version in some cases, although the former is at least as accurate as the
+ * later.
+ */
 static __inline WebRtc_Word32 WEBRTC_SPL_MUL_32_32_RSFT32(WebRtc_Word16 a,
                                                           WebRtc_Word16 b,
                                                           WebRtc_Word32 c) {
-  WebRtc_Word32 tmp;
-  __asm__("pkhbt %0, %1, %2, lsl #16" : "=r"(tmp) : "r"(b), "r"(a));
-  __asm__("smmul %0, %1, %2":"=r"(tmp):"r"(tmp), "r"(c));
+  WebRtc_Word32 tmp = 0;
+  __asm __volatile (
+    "pkhbt %[tmp], %[b], %[a], lsl #16\n\t"
+    "smmulr %[tmp], %[tmp], %[c]\n\t"
+    :[tmp]"+r"(tmp)
+    :[a]"r"(a),
+     [b]"r"(b),
+     [c]"r"(c)
+  );
   return tmp;
 }
 
 static __inline WebRtc_Word32 WEBRTC_SPL_MUL_32_32_RSFT32BI(WebRtc_Word32 a,
                                                             WebRtc_Word32 b) {
-  WebRtc_Word32 tmp;
-  __asm__("smmul %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
+  WebRtc_Word32 tmp = 0;
+  __asm volatile ("smmulr %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
   return tmp;
 }
 
 static __inline WebRtc_Word32 WEBRTC_SPL_MUL_16_16(WebRtc_Word16 a,
                                                    WebRtc_Word16 b) {
-  WebRtc_Word32 tmp;
-  __asm__("smulbb %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
+  WebRtc_Word32 tmp = 0;
+  __asm __volatile ("smulbb %0, %1, %2":"=r"(tmp):"r"(a), "r"(b));
   return tmp;
 }
 
@@ -49,88 +68,93 @@
                                           int16_t b,
                                           int32_t c) {
   int32_t tmp = 0;
-  __asm__("smlabb %0, %1, %2, %3":"=r"(tmp):"r"(a), "r"(b), "r"(c));
+  __asm __volatile ("smlabb %0, %1, %2, %3":"=r"(tmp):"r"(a), "r"(b), "r"(c));
   return tmp;
 }
 
 static __inline WebRtc_Word16 WebRtcSpl_AddSatW16(WebRtc_Word16 a,
                                                   WebRtc_Word16 b) {
-  WebRtc_Word32 s_sum;
+  WebRtc_Word32 s_sum = 0;
 
-  __asm__("qadd16 %0, %1, %2":"=r"(s_sum):"r"(a), "r"(b));
+  __asm __volatile ("qadd16 %0, %1, %2":"=r"(s_sum):"r"(a), "r"(b));
 
   return (WebRtc_Word16) s_sum;
 }
 
+/* TODO(kma): find the cause of unittest errors by the next two functions:
+ * http://code.google.com/p/webrtc/issues/detail?id=740.
+ */
+#if 0
 static __inline WebRtc_Word32 WebRtcSpl_AddSatW32(WebRtc_Word32 l_var1,
                                                   WebRtc_Word32 l_var2) {
-  WebRtc_Word32 l_sum;
+  WebRtc_Word32 l_sum = 0;
 
-  __asm__("qadd %0, %1, %2":"=r"(l_sum):"r"(l_var1), "r"(l_var2));
+  __asm __volatile ("qadd %0, %1, %2":"=r"(l_sum):"r"(l_var1), "r"(l_var2));
 
   return l_sum;
 }
 
+static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
+                                                  WebRtc_Word32 l_var2) {
+  WebRtc_Word32 l_sub = 0;
+
+  __asm __volatile ("qsub %0, %1, %2":"=r"(l_sub):"r"(l_var1), "r"(l_var2));
+
+  return l_sub;
+}
+#endif
+
 static __inline WebRtc_Word16 WebRtcSpl_SubSatW16(WebRtc_Word16 var1,
                                                   WebRtc_Word16 var2) {
-  WebRtc_Word32 s_sub;
+  WebRtc_Word32 s_sub = 0;
 
-  __asm__("qsub16 %0, %1, %2":"=r"(s_sub):"r"(var1), "r"(var2));
+  __asm __volatile ("qsub16 %0, %1, %2":"=r"(s_sub):"r"(var1), "r"(var2));
 
   return (WebRtc_Word16)s_sub;
 }
 
-static __inline WebRtc_Word32 WebRtcSpl_SubSatW32(WebRtc_Word32 l_var1,
-                                                  WebRtc_Word32 l_var2) {
-  WebRtc_Word32 l_sub;
-
-  __asm__("qsub %0, %1, %2":"=r"(l_sub):"r"(l_var1), "r"(l_var2));
-
-  return l_sub;
-}
-
 static __inline WebRtc_Word16 WebRtcSpl_GetSizeInBits(WebRtc_UWord32 n) {
-  WebRtc_Word32 tmp;
+  WebRtc_Word32 tmp = 0;
 
-  __asm__("clz %0, %1":"=r"(tmp):"r"(n));
+  __asm __volatile ("clz %0, %1":"=r"(tmp):"r"(n));
 
   return (WebRtc_Word16)(32 - tmp);
 }
 
 static __inline int WebRtcSpl_NormW32(WebRtc_Word32 a) {
-  WebRtc_Word32 tmp;
+  WebRtc_Word32 tmp = 0;
 
   if (a <= 0) a ^= 0xFFFFFFFF;
 
-  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+  __asm __volatile ("clz %0, %1":"=r"(tmp):"r"(a));
 
   return tmp - 1;
 }
 
 static __inline int WebRtcSpl_NormU32(WebRtc_UWord32 a) {
-  int tmp;
+  int tmp = 0;
 
   if (a == 0) return 0;
 
-  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+  __asm __volatile ("clz %0, %1":"=r"(tmp):"r"(a));
 
   return tmp;
 }
 
 static __inline int WebRtcSpl_NormW16(WebRtc_Word16 a) {
-  WebRtc_Word32 tmp;
+  WebRtc_Word32 tmp = 0;
 
   if (a <= 0) a ^= 0xFFFFFFFF;
 
-  __asm__("clz %0, %1":"=r"(tmp):"r"(a));
+  __asm __volatile ("clz %0, %1":"=r"(tmp):"r"(a));
 
   return tmp - 17;
 }
 
 static __inline WebRtc_Word16 WebRtcSpl_SatW32ToW16(WebRtc_Word32 value32) {
-  WebRtc_Word16 out16;
+  WebRtc_Word16 out16 = 0;
 
-  __asm__("ssat %0, #16, %1" : "=r"(out16) : "r"(value32));
+  __asm __volatile ("ssat %r0, #16, %r1" : "=r"(out16) : "r"(value32));
 
   return out16;
 }
diff --git a/src/common_audio/signal_processing/min_max_operations.c b/src/common_audio/signal_processing/min_max_operations.c
index 57eaff7..d3539d7 100644
--- a/src/common_audio/signal_processing/min_max_operations.c
+++ b/src/common_audio/signal_processing/min_max_operations.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -11,255 +11,237 @@
 /*
  * This file contains the implementation of functions
  * WebRtcSpl_MaxAbsValueW16()
- * WebRtcSpl_MaxAbsIndexW16()
  * WebRtcSpl_MaxAbsValueW32()
  * WebRtcSpl_MaxValueW16()
- * WebRtcSpl_MaxIndexW16()
  * WebRtcSpl_MaxValueW32()
- * WebRtcSpl_MaxIndexW32()
  * WebRtcSpl_MinValueW16()
- * WebRtcSpl_MinIndexW16()
  * WebRtcSpl_MinValueW32()
+ * WebRtcSpl_MaxAbsIndexW16()
+ * WebRtcSpl_MaxIndexW16()
+ * WebRtcSpl_MaxIndexW32()
+ * WebRtcSpl_MinIndexW16()
  * WebRtcSpl_MinIndexW32()
  *
- * The description header can be found in signal_processing_library.h.
- *
  */
 
 #include "signal_processing_library.h"
 
-#if !(defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
+#include <stdlib.h>
+
+// TODO(bjorn/kma): Consolidate function pairs (e.g. combine
+// WebRtcSpl_MaxAbsValueW16 and WebRtcSpl_MaxAbsIndexW16 into a single one.)
+
+// TODO(kma): Move the code in the #ifndef block into min_max_operations_c.c.
+#ifndef WEBRTC_ARCH_ARM_NEON
 
 // Maximum absolute value of word16 vector.
-WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16 *vector, WebRtc_Word16 length)
-{
-    WebRtc_Word32 tempMax = 0;
-    WebRtc_Word32 absVal;
-    WebRtc_Word16 totMax;
-    int i;
-    G_CONST WebRtc_Word16 *tmpvector = vector;
+int16_t WebRtcSpl_MaxAbsValueW16(const int16_t* vector, int length) {
+  int i = 0, absolute = 0, maximum = 0;
 
-    for (i = 0; i < length; i++)
-    {
-        absVal = WEBRTC_SPL_ABS_W32((*tmpvector));
-        if (absVal > tempMax)
-        {
-            tempMax = absVal;
-        }
-        tmpvector++;
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    absolute = abs((int)vector[i]);
+
+    if (absolute > maximum) {
+      maximum = absolute;
     }
-    totMax = (WebRtc_Word16)WEBRTC_SPL_MIN(tempMax, WEBRTC_SPL_WORD16_MAX);
-    return totMax;
-}
+  }
 
-#endif
+  // Guard the case for abs(-32768).
+  if (maximum > WEBRTC_SPL_WORD16_MAX) {
+    maximum = WEBRTC_SPL_WORD16_MAX;
+  }
 
-// Index of maximum absolute value in a  word16 vector.
-WebRtc_Word16 WebRtcSpl_MaxAbsIndexW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word16 tempMax;
-    WebRtc_Word16 absTemp;
-    WebRtc_Word16 tempMaxIndex = 0;
-    WebRtc_Word16 i = 0;
-    G_CONST WebRtc_Word16 *tmpvector = vector;
-
-    tempMax = WEBRTC_SPL_ABS_W16(*tmpvector);
-    tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        absTemp = WEBRTC_SPL_ABS_W16(*tmpvector);
-        tmpvector++;
-        if (absTemp > tempMax)
-        {
-            tempMax = absTemp;
-            tempMaxIndex = i;
-        }
-    }
-    return tempMaxIndex;
+  return (int16_t)maximum;
 }
 
 // Maximum absolute value of word32 vector.
-WebRtc_Word32 WebRtcSpl_MaxAbsValueW32(G_CONST WebRtc_Word32 *vector, WebRtc_Word16 length)
-{
-    WebRtc_UWord32 tempMax = 0;
-    WebRtc_UWord32 absVal;
-    WebRtc_Word32 retval;
-    int i;
-    G_CONST WebRtc_Word32 *tmpvector = vector;
+int32_t WebRtcSpl_MaxAbsValueW32(const int32_t* vector, int length) {
+  // Use uint32_t for the local variables, to accommodate the return value
+  // of abs(0x80000000), which is 0x80000000.
 
-    for (i = 0; i < length; i++)
-    {
-        absVal = WEBRTC_SPL_ABS_W32((*tmpvector));
-        if (absVal > tempMax)
-        {
-            tempMax = absVal;
-        }
-        tmpvector++;
+  uint32_t absolute = 0, maximum = 0;
+  int i = 0;
+
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    absolute = abs((int)vector[i]);
+    if (absolute > maximum) {
+      maximum = absolute;
     }
-    retval = (WebRtc_Word32)(WEBRTC_SPL_MIN(tempMax, WEBRTC_SPL_WORD32_MAX));
-    return retval;
+  }
+
+  maximum = WEBRTC_SPL_MIN(maximum, WEBRTC_SPL_WORD32_MAX);
+
+  return (int32_t)maximum;
 }
 
 // Maximum value of word16 vector.
-#ifndef XSCALE_OPT
-WebRtc_Word16 WebRtcSpl_MaxValueW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word16 tempMax;
-    WebRtc_Word16 i;
-    G_CONST WebRtc_Word16 *tmpvector = vector;
+int16_t WebRtcSpl_MaxValueW16(const int16_t* vector, int length) {
+  int16_t maximum = WEBRTC_SPL_WORD16_MIN;
+  int i = 0;
 
-    tempMax = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ > tempMax)
-            tempMax = vector[i];
-    }
-    return tempMax;
-}
-#else
-#pragma message(">> WebRtcSpl_MaxValueW16 is excluded from this build")
-#endif
+  if (vector == NULL || length <= 0) {
+    return maximum;
+  }
 
-// Index of maximum value in a word16 vector.
-WebRtc_Word16 WebRtcSpl_MaxIndexW16(G_CONST WebRtc_Word16 *vector, WebRtc_Word16 length)
-{
-    WebRtc_Word16 tempMax;
-    WebRtc_Word16 tempMaxIndex = 0;
-    WebRtc_Word16 i = 0;
-    G_CONST WebRtc_Word16 *tmpvector = vector;
-
-    tempMax = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ > tempMax)
-        {
-            tempMax = vector[i];
-            tempMaxIndex = i;
-        }
-    }
-    return tempMaxIndex;
+  for (i = 0; i < length; i++) {
+    if (vector[i] > maximum)
+      maximum = vector[i];
+  }
+  return maximum;
 }
 
 // Maximum value of word32 vector.
-#ifndef XSCALE_OPT
-WebRtc_Word32 WebRtcSpl_MaxValueW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word32 tempMax;
-    WebRtc_Word16 i;
-    G_CONST WebRtc_Word32 *tmpvector = vector;
+int32_t WebRtcSpl_MaxValueW32(const int32_t* vector, int length) {
+  int32_t maximum = WEBRTC_SPL_WORD32_MIN;
+  int i = 0;
 
-    tempMax = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ > tempMax)
-            tempMax = vector[i];
-    }
-    return tempMax;
-}
-#else
-#pragma message(">> WebRtcSpl_MaxValueW32 is excluded from this build")
-#endif
+  if (vector == NULL || length <= 0) {
+    return maximum;
+  }
 
-// Index of maximum value in a word32 vector.
-WebRtc_Word16 WebRtcSpl_MaxIndexW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word32 tempMax;
-    WebRtc_Word16 tempMaxIndex = 0;
-    WebRtc_Word16 i = 0;
-    G_CONST WebRtc_Word32 *tmpvector = vector;
-
-    tempMax = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ > tempMax)
-        {
-            tempMax = vector[i];
-            tempMaxIndex = i;
-        }
-    }
-    return tempMaxIndex;
+  for (i = 0; i < length; i++) {
+    if (vector[i] > maximum)
+      maximum = vector[i];
+  }
+  return maximum;
 }
 
 // Minimum value of word16 vector.
-WebRtc_Word16 WebRtcSpl_MinValueW16(G_CONST WebRtc_Word16 *vector, WebRtc_Word16 length)
-{
-    WebRtc_Word16 tempMin;
-    WebRtc_Word16 i;
-    G_CONST WebRtc_Word16 *tmpvector = vector;
+int16_t WebRtcSpl_MinValueW16(const int16_t* vector, int length) {
+  int16_t minimum = WEBRTC_SPL_WORD16_MAX;
+  int i = 0;
 
-    // Find the minimum value
-    tempMin = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ < tempMin)
-            tempMin = (vector[i]);
+  if (vector == NULL || length <= 0) {
+    return minimum;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] < minimum)
+      minimum = vector[i];
+  }
+  return minimum;
+}
+
+// Minimum value of word32 vector.
+int32_t WebRtcSpl_MinValueW32(const int32_t* vector, int length) {
+  int32_t minimum = WEBRTC_SPL_WORD32_MAX;
+  int i = 0;
+
+  if (vector == NULL || length <= 0) {
+    return minimum;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] < minimum)
+      minimum = vector[i];
+  }
+  return minimum;
+}
+#endif  // WEBRTC_ARCH_ARM_NEON
+
+
+// Index of maximum absolute value in a word16 vector.
+int WebRtcSpl_MaxAbsIndexW16(const int16_t* vector, int length) {
+  // Use type int for local variables, to accomodate the value of abs(-32768).
+
+  int i = 0, absolute = 0, maximum = 0, index = 0;
+
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    absolute = abs((int)vector[i]);
+
+    if (absolute > maximum) {
+      maximum = absolute;
+      index = i;
     }
-    return tempMin;
+  }
+
+  return index;
+}
+
+// Index of maximum value in a word16 vector.
+int WebRtcSpl_MaxIndexW16(const int16_t* vector, int length) {
+  int i = 0, index = 0;
+  int16_t maximum = WEBRTC_SPL_WORD16_MIN;
+
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] > maximum) {
+      maximum = vector[i];
+      index = i;
+    }
+  }
+
+  return index;
+}
+
+// Index of maximum value in a word32 vector.
+int WebRtcSpl_MaxIndexW32(const int32_t* vector, int length) {
+  int i = 0, index = 0;
+  int32_t maximum = WEBRTC_SPL_WORD32_MIN;
+
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] > maximum) {
+      maximum = vector[i];
+      index = i;
+    }
+  }
+
+  return index;
 }
 
 // Index of minimum value in a word16 vector.
-#ifndef XSCALE_OPT
-WebRtc_Word16 WebRtcSpl_MinIndexW16(G_CONST WebRtc_Word16* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word16 tempMin;
-    WebRtc_Word16 tempMinIndex = 0;
-    WebRtc_Word16 i = 0;
-    G_CONST WebRtc_Word16* tmpvector = vector;
+int WebRtcSpl_MinIndexW16(const int16_t* vector, int length) {
+  int i = 0, index = 0;
+  int16_t minimum = WEBRTC_SPL_WORD16_MAX;
 
-    // Find index of smallest value
-    tempMin = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ < tempMin)
-        {
-            tempMin = vector[i];
-            tempMinIndex = i;
-        }
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] < minimum) {
+      minimum = vector[i];
+      index = i;
     }
-    return tempMinIndex;
-}
-#else
-#pragma message(">> WebRtcSpl_MinIndexW16 is excluded from this build")
-#endif
+  }
 
-// Minimum value of word32 vector.
-WebRtc_Word32 WebRtcSpl_MinValueW32(G_CONST WebRtc_Word32 *vector, WebRtc_Word16 length)
-{
-    WebRtc_Word32 tempMin;
-    WebRtc_Word16 i;
-    G_CONST WebRtc_Word32 *tmpvector = vector;
-
-    // Find the minimum value
-    tempMin = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ < tempMin)
-            tempMin = (vector[i]);
-    }
-    return tempMin;
+  return index;
 }
 
 // Index of minimum value in a word32 vector.
-#ifndef XSCALE_OPT
-WebRtc_Word16 WebRtcSpl_MinIndexW32(G_CONST WebRtc_Word32* vector, WebRtc_Word16 length)
-{
-    WebRtc_Word32 tempMin;
-    WebRtc_Word16 tempMinIndex = 0;
-    WebRtc_Word16 i = 0;
-    G_CONST WebRtc_Word32 *tmpvector = vector;
+int WebRtcSpl_MinIndexW32(const int32_t* vector, int length) {
+  int i = 0, index = 0;
+  int32_t minimum = WEBRTC_SPL_WORD32_MAX;
 
-    // Find index of smallest value
-    tempMin = *tmpvector++;
-    for (i = 1; i < length; i++)
-    {
-        if (*tmpvector++ < tempMin)
-        {
-            tempMin = vector[i];
-            tempMinIndex = i;
-        }
+  if (vector == NULL || length <= 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    if (vector[i] < minimum) {
+      minimum = vector[i];
+      index = i;
     }
-    return tempMinIndex;
+  }
+
+  return index;
 }
-#else
-#pragma message(">> WebRtcSpl_MinIndexW32 is excluded from this build")
-#endif
diff --git a/src/common_audio/signal_processing/min_max_operations_neon.c b/src/common_audio/signal_processing/min_max_operations_neon.c
deleted file mode 100644
index 158bcc1..0000000
--- a/src/common_audio/signal_processing/min_max_operations_neon.c
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#if (defined(WEBRTC_ANDROID) && defined(WEBRTC_ARCH_ARM_NEON))
-
-#include <arm_neon.h>
-
-#include "signal_processing_library.h"
-
-// Maximum absolute value of word16 vector.
-WebRtc_Word16 WebRtcSpl_MaxAbsValueW16(const WebRtc_Word16* vector,
-                                       WebRtc_Word16 length) {
-  WebRtc_Word32 temp_max = 0;
-  WebRtc_Word32 abs_val;
-  WebRtc_Word16 tot_max;
-  int i;
-
-  __asm__("vmov.i16 d25, #0" : : : "d25");
-
-  for (i = 0; i < length - 7; i += 8) {
-    __asm__("vld1.16 {d26, d27}, [%0]" : : "r"(&vector[i]) : "q13");
-    __asm__("vabs.s16 q13, q13" : : : "q13");
-    __asm__("vpmax.s16 d26, d27" : : : "q13");
-    __asm__("vpmax.s16 d25, d26" : : : "d25", "d26");
-  }
-  __asm__("vpmax.s16 d25, d25" : : : "d25");
-  __asm__("vpmax.s16 d25, d25" : : : "d25");
-  __asm__("vmov.s16 %0, d25[0]" : "=r"(temp_max): : "d25");
-
-  for (; i < length; i++) {
-    abs_val = WEBRTC_SPL_ABS_W32((vector[i]));
-    if (abs_val > temp_max) {
-      temp_max = abs_val;
-    }
-  }
-  tot_max = (WebRtc_Word16)WEBRTC_SPL_MIN(temp_max, WEBRTC_SPL_WORD16_MAX);
-  return tot_max;
-}
-
-#endif
diff --git a/src/common_audio/signal_processing/min_max_operations_neon.s b/src/common_audio/signal_processing/min_max_operations_neon.s
new file mode 100644
index 0000000..01831ef
--- /dev/null
+++ b/src/common_audio/signal_processing/min_max_operations_neon.s
@@ -0,0 +1,305 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ This file contains the function WebRtcSpl_MaxAbsValueW16(), optimized for
+@ ARM Neon platform. The description header can be found in
+@ signal_processing_library.h
+@
+@ The reference C code is in file min_max_operations.c. Code here is basically
+@ a loop unrolling by 8 with Neon instructions. Bit-exact.
+
+.arch armv7-a
+.fpu neon
+.global WebRtcSpl_MaxAbsValueW16
+.global WebRtcSpl_MaxAbsValueW32
+.global WebRtcSpl_MaxValueW16
+.global WebRtcSpl_MaxValueW32
+.global WebRtcSpl_MinValueW16
+.global WebRtcSpl_MinValueW32
+.align  2
+
+@ int16_t WebRtcSpl_MaxAbsValueW16(const int16_t* vector, int length);
+WebRtcSpl_MaxAbsValueW16:
+.fnstart
+
+  mov r2, #-1                 @ Initialize the return value.
+  cmp r0, #0
+  beq END_MAX_ABS_VALUE_W16
+  cmp r1, #0
+  ble END_MAX_ABS_VALUE_W16
+
+  cmp r1, #8
+  blt LOOP_MAX_ABS_VALUE_W16
+
+  vmov.i16 q12, #0
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MAX_ABS_VALUE_W16:
+  vld1.16 {q13}, [r0]!
+  subs r1, #8
+  vabs.s16 q13, q13           @ Note vabs doesn't change the value of -32768.
+  vmax.u16 q12, q13           @ Use u16 so we don't lose the value -32768.
+  bge LOOP_UNROLLED_BY_8_MAX_ABS_VALUE_W16
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmax.u16 d24, d25
+  vpmax.u16 d24, d24
+  vpmax.u16 d24, d24
+  adds r1, #8
+  vmov.u16 r2, d24[0]
+  beq END_MAX_ABS_VALUE_W16
+
+LOOP_MAX_ABS_VALUE_W16:
+  ldrsh r3, [r0], #2
+  eor r12, r3, r3, asr #31    @ eor and then sub, to get absolute value.
+  sub r12, r12, r3, asr #31
+  cmp r2, r12
+  movlt r2, r12
+  subs r1, #1
+  bne LOOP_MAX_ABS_VALUE_W16
+
+END_MAX_ABS_VALUE_W16:
+  cmp r2, #0x8000             @ Guard against the case for -32768.
+  subeq r2, #1
+  mov r0, r2
+  bx  lr
+
+.fnend
+
+@ int32_t WebRtcSpl_MaxAbsValueW32(const int32_t* vector, int length);
+WebRtcSpl_MaxAbsValueW32:
+.fnstart
+
+  cmp r0, #0
+  moveq r0, #-1
+  beq EXIT                    @ Return -1 for a NULL pointer.
+  cmp r1, #0                  @ length
+  movle r0, #-1
+  ble EXIT                    @ Return -1 if length <= 0.
+
+  vmov.i32 q11, #0
+  vmov.i32 q12, #0
+  cmp r1, #8
+  blt LOOP_MAX_ABS_VALUE_W32
+
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MAX_ABS_VALUE_W32:
+  vld1.32 {q13, q14}, [r0]!
+  subs r1, #8                 @ Counter for loops
+  vabs.s32 q13, q13           @ vabs doesn't change the value of 0x80000000.
+  vabs.s32 q14, q14
+  vmax.u32 q11, q13           @ Use u32 so we don't lose the value 0x80000000.
+  vmax.u32 q12, q14
+  bge LOOP_UNROLLED_BY_8_MAX_ABS_VALUE_W32
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmax.u32 q12, q11
+  vmax.u32 d24, d25
+  vpmax.u32 d24, d24
+  adds r1, #8
+  vmov.u32 r2, d24[0]
+  beq END_MAX_ABS_VALUE_W32
+
+LOOP_MAX_ABS_VALUE_W32:
+  ldr r3, [r0], #4
+  eor r12, r3, r3, asr #31    @ eor and then sub, to get absolute value.
+  sub r12, r12, r3, asr #31
+  cmp r2, r12
+  movcc r2, r12
+  subs r1, #1
+  bne LOOP_MAX_ABS_VALUE_W32
+
+END_MAX_ABS_VALUE_W32:
+  mvn r0, #0x80000000         @ Guard against the case for 0x80000000.
+  cmp r2, r0
+  movcc r0, r2
+
+EXIT:
+  bx  lr
+
+.fnend
+
+@ int16_t WebRtcSpl_MaxValueW16(const int16_t* vector, int length);
+WebRtcSpl_MaxValueW16:
+.fnstart
+
+  mov r2, #0x8000             @ Initialize the return value.
+  cmp r0, #0
+  beq END_MAX_VALUE_W16
+  cmp r1, #0
+  ble END_MAX_VALUE_W16
+
+  vmov.i16 q12, #0x8000
+  cmp r1, #8
+  blt LOOP_MAX_VALUE_W16
+
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MAX_VALUE_W16:
+  vld1.16 {q13}, [r0]!
+  subs r1, #8
+  vmax.s16 q12, q13
+  bge LOOP_UNROLLED_BY_8_MAX_VALUE_W16
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmax.s16 d24, d25
+  vpmax.s16 d24, d24
+  vpmax.s16 d24, d24
+  adds r1, #8
+  vmov.u16 r2, d24[0]
+  beq END_MAX_VALUE_W16
+
+LOOP_MAX_VALUE_W16:
+  ldrsh r3, [r0], #2
+  cmp r2, r3
+  movlt r2, r3
+  subs r1, #1
+  bne LOOP_MAX_VALUE_W16
+
+END_MAX_VALUE_W16:
+  mov r0, r2
+  bx  lr
+
+.fnend
+
+@ int32_t WebRtcSpl_MaxValueW32(const int32_t* vector, int length);
+WebRtcSpl_MaxValueW32:
+.fnstart
+
+  mov r2, #0x80000000         @ Initialize the return value.
+  cmp r0, #0
+  beq END_MAX_VALUE_W32
+  cmp r1, #0
+  ble END_MAX_VALUE_W32
+
+  vmov.i32 q11, #0x80000000
+  vmov.i32 q12, #0x80000000
+  cmp r1, #8
+  blt LOOP_MAX_VALUE_W32
+
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MAX_VALUE_W32:
+  vld1.32 {q13, q14}, [r0]!
+  subs r1, #8
+  vmax.s32 q11, q13
+  vmax.s32 q12, q14
+  bge LOOP_UNROLLED_BY_8_MAX_VALUE_W32
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmax.s32 q12, q11
+  vpmax.s32 d24, d25
+  vpmax.s32 d24, d24
+  adds r1, #8
+  vmov.s32 r2, d24[0]
+  beq END_MAX_VALUE_W32
+
+LOOP_MAX_VALUE_W32:
+  ldr r3, [r0], #4
+  cmp r2, r3
+  movlt r2, r3
+  subs r1, #1
+  bne LOOP_MAX_VALUE_W32
+
+END_MAX_VALUE_W32:
+  mov r0, r2
+  bx  lr
+
+.fnend
+
+@ int16_t WebRtcSpl_MinValueW16(const int16_t* vector, int length);
+WebRtcSpl_MinValueW16:
+.fnstart
+
+  movw r2, #0x7FFF            @ Initialize the return value.
+  cmp r0, #0
+  beq END_MIN_VALUE_W16
+  cmp r1, #0
+  ble END_MIN_VALUE_W16
+
+  vmov.i16 q12, #0x7FFF
+  cmp r1, #8
+  blt LOOP_MIN_VALUE_W16
+
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MIN_VALUE_W16:
+  vld1.16 {q13}, [r0]!
+  subs r1, #8
+  vmin.s16 q12, q13
+  bge LOOP_UNROLLED_BY_8_MIN_VALUE_W16
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmin.s16 d24, d25
+  vpmin.s16 d24, d24
+  vpmin.s16 d24, d24
+  adds r1, #8
+  vmov.s16 r2, d24[0]
+  sxth  r2, r2
+  beq END_MIN_VALUE_W16
+
+LOOP_MIN_VALUE_W16:
+  ldrsh r3, [r0], #2
+  cmp r2, r3
+  movge r2, r3
+  subs r1, #1
+  bne LOOP_MIN_VALUE_W16
+
+END_MIN_VALUE_W16:
+  mov r0, r2
+  bx  lr
+
+.fnend
+
+@ int32_t WebRtcSpl_MinValueW32(const int32_t* vector, int length);
+WebRtcSpl_MinValueW32:
+.fnstart
+
+  mov r2, #0x7FFFFFFF         @ Initialize the return value.
+  cmp r0, #0
+  beq END_MIN_VALUE_W32
+  cmp r1, #0
+  ble END_MIN_VALUE_W32
+
+  vdup.32 q11, r2
+  vdup.32 q12, r2
+  cmp r1, #8
+  blt LOOP_MIN_VALUE_W32
+
+  sub r1, #8                  @ Counter for loops
+
+LOOP_UNROLLED_BY_8_MIN_VALUE_W32:
+  vld1.32 {q13, q14}, [r0]!
+  subs r1, #8
+  vmin.s32 q11, q13
+  vmin.s32 q12, q14
+  bge LOOP_UNROLLED_BY_8_MIN_VALUE_W32
+
+  @ Find the maximum value in the Neon registers and move it to r2.
+  vmin.s32 q12, q11
+  vpmin.s32 d24, d25
+  vpmin.s32 d24, d24
+  adds r1, #8
+  vmov.s32 r2, d24[0]
+  beq END_MIN_VALUE_W32
+
+LOOP_MIN_VALUE_W32:
+  ldr r3, [r0], #4
+  cmp r2, r3
+  movge r2, r3
+  subs r1, #1
+  bne LOOP_MIN_VALUE_W32
+
+END_MIN_VALUE_W32:
+  mov r0, r2
+  bx  lr
+
+.fnend
diff --git a/src/common_audio/signal_processing/resample_by_2.c b/src/common_audio/signal_processing/resample_by_2.c
index a0da428..c1d8b37 100644
--- a/src/common_audio/signal_processing/resample_by_2.c
+++ b/src/common_audio/signal_processing/resample_by_2.c
@@ -17,7 +17,7 @@
 
 #include "signal_processing_library.h"
 
-#ifdef WEBRTC_ARCH_ARM_V7A
+#ifdef WEBRTC_ARCH_ARM_V7
 
 // allpass filter coefficients.
 static const WebRtc_UWord32 kResampleAllpass1[3] = {3284, 24441, 49528 << 15};
@@ -31,8 +31,8 @@
                                           WebRtc_Word32 diff,
                                           WebRtc_Word32 state) {
   WebRtc_Word32 result;
-  __asm__("smlawb %0, %1, %2, %3": "=r"(result): "r"(diff),
-                                   "r"(tbl_value), "r"(state));
+  __asm__("smlawb %r0, %r1, %r2, %r3": "=r"(result): "r"(diff),
+                                       "r"(tbl_value), "r"(state));
   return result;
 }
 
@@ -47,8 +47,8 @@
                                           WebRtc_Word32 diff,
                                           WebRtc_Word32 state) {
   WebRtc_Word32 result;
-  __asm__("smmla %0, %1, %2, %3": "=r"(result): "r"(diff << 1),
-                                  "r"(tbl_value), "r"(state));
+  __asm__("smmla %r0, %r1, %r2, %r3": "=r"(result): "r"(diff << 1),
+                                      "r"(tbl_value), "r"(state));
   return result;
 }
 
@@ -62,7 +62,7 @@
 #define MUL_ACCUM_1(a, b, c) WEBRTC_SPL_SCALEDIFF32(a, b, c)
 #define MUL_ACCUM_2(a, b, c) WEBRTC_SPL_SCALEDIFF32(a, b, c)
 
-#endif  // WEBRTC_ARCH_ARM_V7A
+#endif  // WEBRTC_ARCH_ARM_V7
 
 
 // decimator
diff --git a/src/common_audio/signal_processing/signal_processing.gypi b/src/common_audio/signal_processing/signal_processing.gypi
index c67bf7c..2edc808 100644
--- a/src/common_audio/signal_processing/signal_processing.gypi
+++ b/src/common_audio/signal_processing/signal_processing.gypi
@@ -56,30 +56,44 @@
         'sqrt_of_one_minus_x_squared.c',
         'vector_scaling_operations.c',
       ],
+      'conditions': [
+        ['target_arch=="arm"', {
+          'sources': [
+            'spl_sqrt_floor_arm.s',
+          ],
+          'sources!': [
+            'spl_sqrt_floor.c',
+          ],
+          'conditions': [
+            ['armv7==1', {
+              'sources': [
+                'filter_ar_fast_q12_armv7.s',
+              ],
+              'sources!': [
+                'filter_ar_fast_q12.c',
+              ],
+            }],
+          ],
+        }],
+      ],
     }, # spl
   ], # targets
   'conditions': [
-    ['build_with_chromium==0', {
+    ['include_tests==1', {
       'targets': [
         {
           'target_name': 'signal_processing_unittests',
           'type': 'executable',
           'dependencies': [
             'signal_processing',
-            '<(webrtc_root)/../test/test.gyp:test_support_main',
-            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
           ],
           'sources': [
             'signal_processing_unittest.cc',
           ],
         }, # spl_unittests
       ], # targets
-    }], # build_with_chromium
+    }], # include_tests
   ], # conditions
 }
-
-# Local Variables:
-# tab-width:2
-# indent-tabs-mode:nil
-# End:
-# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/common_audio/signal_processing/signal_processing_unittest.cc b/src/common_audio/signal_processing/signal_processing_unittest.cc
index b2e8281..a7c69b2 100644
--- a/src/common_audio/signal_processing/signal_processing_unittest.cc
+++ b/src/common_audio/signal_processing/signal_processing_unittest.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -11,6 +11,10 @@
 #include "signal_processing_library.h"
 #include "gtest/gtest.h"
 
+static const int kVector16Size = 9;
+static const int16_t vector16[kVector16Size] = {1, -15511, 4323, 1963,
+  WEBRTC_SPL_WORD16_MAX, 0, WEBRTC_SPL_WORD16_MIN + 5, -3333, 345};
+
 class SplTest : public testing::Test {
  protected:
   virtual ~SplTest() {
@@ -41,12 +45,12 @@
 
     EXPECT_EQ(-63, WEBRTC_SPL_MUL(a, B));
     EXPECT_EQ(-2147483645, WEBRTC_SPL_MUL(a, b));
-    EXPECT_EQ(-2147483645u, WEBRTC_SPL_UMUL(a, b));
+    EXPECT_EQ(2147483651u, WEBRTC_SPL_UMUL(a, b));
     b = WEBRTC_SPL_WORD16_MAX >> 1;
     EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_RSFT16(a, b));
     EXPECT_EQ(1073627139u, WEBRTC_SPL_UMUL_16_16(a, b));
     EXPECT_EQ(16382u, WEBRTC_SPL_UMUL_16_16_RSFT16(a, b));
-    EXPECT_EQ(-49149u, WEBRTC_SPL_UMUL_32_16(a, b));
+    EXPECT_EQ(4294918147u, WEBRTC_SPL_UMUL_32_16(a, b));
     EXPECT_EQ(65535u, WEBRTC_SPL_UMUL_32_16_RSFT16(a, b));
     EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_U16(a, b));
 
@@ -66,9 +70,7 @@
     EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32(a32a, a32b, A));
     EXPECT_EQ(5, WEBRTC_SPL_MUL_32_32_RSFT32BI(a32, A));
 
-    EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_16(a, b));
     EXPECT_EQ(-12288, WEBRTC_SPL_MUL_16_16_RSFT(a, b, 2));
-
     EXPECT_EQ(-12287, WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(a, b, 2));
     EXPECT_EQ(-1, WEBRTC_SPL_MUL_16_16_RSFT_WITH_FIXROUND(a, b));
 
@@ -102,36 +104,77 @@
     EXPECT_EQ(32766u, WEBRTC_SPL_LSHIFT_U32(a, 1));
 
     EXPECT_EQ(1470, WEBRTC_SPL_RAND(A));
+
+    EXPECT_EQ(-49149, WEBRTC_SPL_MUL_16_16(a, b));
+    EXPECT_EQ(1073676289, WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_WORD16_MAX,
+                                               WEBRTC_SPL_WORD16_MAX));
+    EXPECT_EQ(1073709055, WEBRTC_SPL_MUL_16_32_RSFT16(WEBRTC_SPL_WORD16_MAX,
+                                                      WEBRTC_SPL_WORD32_MAX));
+    EXPECT_EQ(1073741824, WEBRTC_SPL_MUL_16_32_RSFT16(WEBRTC_SPL_WORD16_MIN,
+                                                      WEBRTC_SPL_WORD32_MIN));
+#ifdef WEBRTC_ARCH_ARM_V7
+    EXPECT_EQ(-1073741824,
+              WEBRTC_SPL_MUL_16_32_RSFT16(WEBRTC_SPL_WORD16_MIN,
+                                          WEBRTC_SPL_WORD32_MAX));
+    EXPECT_EQ(0x3fffffff, WEBRTC_SPL_MUL_32_32_RSFT32(WEBRTC_SPL_WORD16_MAX,
+              0xffff, WEBRTC_SPL_WORD32_MAX));
+    EXPECT_EQ(0x3fffffff, WEBRTC_SPL_MUL_32_32_RSFT32BI(WEBRTC_SPL_WORD32_MAX,
+              WEBRTC_SPL_WORD32_MAX));
+#else
+    EXPECT_EQ(-1073741823,
+              WEBRTC_SPL_MUL_16_32_RSFT16(WEBRTC_SPL_WORD16_MIN,
+                                          WEBRTC_SPL_WORD32_MAX));
+    EXPECT_EQ(0x3fff7ffe, WEBRTC_SPL_MUL_32_32_RSFT32(WEBRTC_SPL_WORD16_MAX,
+              0xffff, WEBRTC_SPL_WORD32_MAX));
+    EXPECT_EQ(0x3ffffffd, WEBRTC_SPL_MUL_32_32_RSFT32BI(WEBRTC_SPL_WORD32_MAX,
+                                                        WEBRTC_SPL_WORD32_MAX));
+#endif
 }
 
 TEST_F(SplTest, InlineTest) {
-    WebRtc_Word16 a = 121;
-    WebRtc_Word16 b = -17;
-    WebRtc_Word32 A = 111121;
-    WebRtc_Word32 B = -1711;
+    WebRtc_Word16 a16 = 121;
+    WebRtc_Word16 b16 = -17;
+    WebRtc_Word32 a32 = 111121;
+    WebRtc_Word32 b32 = -1711;
     char bVersion[8];
 
-    EXPECT_EQ(104, WebRtcSpl_AddSatW16(a, b));
-    EXPECT_EQ(138, WebRtcSpl_SubSatW16(a, b));
+    EXPECT_EQ(17, WebRtcSpl_GetSizeInBits(a32));
+    EXPECT_EQ(14, WebRtcSpl_NormW32(a32));
+    EXPECT_EQ(4, WebRtcSpl_NormW16(b32));
+    EXPECT_EQ(15, WebRtcSpl_NormU32(a32));
 
-    EXPECT_EQ(109410, WebRtcSpl_AddSatW32(A, B));
-    EXPECT_EQ(112832, WebRtcSpl_SubSatW32(A, B));
+    EXPECT_EQ(104, WebRtcSpl_AddSatW16(a16, b16));
+    EXPECT_EQ(138, WebRtcSpl_SubSatW16(a16, b16));
 
-    EXPECT_EQ(17, WebRtcSpl_GetSizeInBits(A));
-    EXPECT_EQ(14, WebRtcSpl_NormW32(A));
-    EXPECT_EQ(4, WebRtcSpl_NormW16(B));
-    EXPECT_EQ(15, WebRtcSpl_NormU32(A));
+    EXPECT_EQ(109410, WebRtcSpl_AddSatW32(a32, b32));
+    EXPECT_EQ(112832, WebRtcSpl_SubSatW32(a32, b32));
+    a32 = 0x80000000;
+    b32 = 0x80000000;
+    // Cast to signed int to avoid compiler complaint on gtest.h.
+    EXPECT_EQ(static_cast<int>(0x80000000), WebRtcSpl_AddSatW32(a32, b32));
+    a32 = 0x7fffffff;
+    b32 = 0x7fffffff;
+    EXPECT_EQ(0x7fffffff, WebRtcSpl_AddSatW32(a32, b32));
+    a32 = 0;
+    b32 = 0x80000000;
+    EXPECT_EQ(0x7fffffff, WebRtcSpl_SubSatW32(a32, b32));
+    a32 = 0x7fffffff;
+    b32 = 0x80000000;
+    EXPECT_EQ(0x7fffffff, WebRtcSpl_SubSatW32(a32, b32));
+    a32 = 0x80000000;
+    b32 = 0x7fffffff;
+    EXPECT_EQ(static_cast<int>(0x80000000), WebRtcSpl_SubSatW32(a32, b32));
 
     EXPECT_EQ(0, WebRtcSpl_get_version(bVersion, 8));
 }
 
 TEST_F(SplTest, MathOperationsTest) {
-    int A = 117;
+    int A = 1134567892;
     WebRtc_Word32 num = 117;
     WebRtc_Word32 den = -5;
     WebRtc_UWord16 denU = 5;
-    EXPECT_EQ(10, WebRtcSpl_Sqrt(A));
-    EXPECT_EQ(10, WebRtcSpl_SqrtFloor(A));
+    EXPECT_EQ(33700, WebRtcSpl_Sqrt(A));
+    EXPECT_EQ(33683, WebRtcSpl_SqrtFloor(A));
 
 
     EXPECT_EQ(-91772805, WebRtcSpl_DivResultInQ31(den, num));
@@ -221,31 +264,97 @@
     }
 }
 
+TEST_F(SplTest, ExeptionsHandlingMinMaxOperationsTest) {
+  // Test how the functions handle exceptional cases.
+  const int kVectorSize = 2;
+  int16_t vector16[kVectorSize] = {0};
+  int32_t vector32[kVectorSize] = {0};
+
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsValueW16(vector16, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsValueW16(NULL, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, WebRtcSpl_MaxValueW16(vector16, 0));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MIN, WebRtcSpl_MaxValueW16(NULL, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, WebRtcSpl_MinValueW16(vector16, 0));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX, WebRtcSpl_MinValueW16(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsValueW32(vector32, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsValueW32(NULL, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MIN, WebRtcSpl_MaxValueW32(vector32, 0));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MIN, WebRtcSpl_MaxValueW32(NULL, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, WebRtcSpl_MinValueW32(vector32, 0));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX, WebRtcSpl_MinValueW32(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsIndexW16(vector16, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MaxAbsIndexW16(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MaxIndexW16(vector16, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MaxIndexW16(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MaxIndexW32(vector32, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MaxIndexW32(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MinIndexW16(vector16, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MinIndexW16(NULL, kVectorSize));
+  EXPECT_EQ(-1, WebRtcSpl_MinIndexW32(vector32, 0));
+  EXPECT_EQ(-1, WebRtcSpl_MinIndexW32(NULL, kVectorSize));
+}
+
 TEST_F(SplTest, MinMaxOperationsTest) {
-    const int kVectorSize = 4;
-    int B[] = {4, 12, 133, -1100};
-    WebRtc_Word16 b16[kVectorSize];
-    WebRtc_Word32 b32[kVectorSize];
+  const int kVectorSize = 17;
 
-    for (int kk = 0; kk < kVectorSize; ++kk) {
-        b16[kk] = B[kk];
-        b32[kk] = B[kk];
-    }
+  // Vectors to test the cases where minimum values have to be caught
+  // outside of the unrolled loops in ARM-Neon.
+  int16_t vector16[kVectorSize] = {-1, 7485, 0, 3333,
+      -18283, 0, 12334, -29871, 988, -3333,
+      345, -456, 222, 999,  888, 8774, WEBRTC_SPL_WORD16_MIN};
+  int32_t vector32[kVectorSize] = {-1, 0, 283211, 3333,
+      8712345, 0, -3333, 89345, -374585456, 222, 999, 122345334,
+      -12389756, -987329871, 888, -2, WEBRTC_SPL_WORD32_MIN};
 
-    EXPECT_EQ(1100, WebRtcSpl_MaxAbsValueW16(b16, kVectorSize));
-    EXPECT_EQ(1100, WebRtcSpl_MaxAbsValueW32(b32, kVectorSize));
-    EXPECT_EQ(133, WebRtcSpl_MaxValueW16(b16, kVectorSize));
-    EXPECT_EQ(133, WebRtcSpl_MaxValueW32(b32, kVectorSize));
-    EXPECT_EQ(3, WebRtcSpl_MaxAbsIndexW16(b16, kVectorSize));
-    EXPECT_EQ(2, WebRtcSpl_MaxIndexW16(b16, kVectorSize));
-    EXPECT_EQ(2, WebRtcSpl_MaxIndexW32(b32, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MIN,
+            WebRtcSpl_MinValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MIN,
+            WebRtcSpl_MinValueW32(vector32, kVectorSize));
+  EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MinIndexW32(vector32, kVectorSize));
 
-    EXPECT_EQ(-1100, WebRtcSpl_MinValueW16(b16, kVectorSize));
-    EXPECT_EQ(-1100, WebRtcSpl_MinValueW32(b32, kVectorSize));
-    EXPECT_EQ(3, WebRtcSpl_MinIndexW16(b16, kVectorSize));
-    EXPECT_EQ(3, WebRtcSpl_MinIndexW32(b32, kVectorSize));
+  // Test the cases where maximum values have to be caught
+  // outside of the unrolled loops in ARM-Neon.
+  vector16[kVectorSize - 1] = WEBRTC_SPL_WORD16_MAX;
+  vector32[kVectorSize - 1] = WEBRTC_SPL_WORD32_MAX;
 
-    EXPECT_EQ(0, WebRtcSpl_GetScalingSquare(b16, kVectorSize, 1));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX,
+            WebRtcSpl_MaxAbsValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX,
+            WebRtcSpl_MaxValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX,
+            WebRtcSpl_MaxAbsValueW32(vector32, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX,
+            WebRtcSpl_MaxValueW32(vector32, kVectorSize));
+  EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxAbsIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(kVectorSize - 1, WebRtcSpl_MaxIndexW32(vector32, kVectorSize));
+
+  // Test the cases where multiple maximum and minimum values are present.
+  vector16[1] = WEBRTC_SPL_WORD16_MAX;
+  vector16[6] = WEBRTC_SPL_WORD16_MIN;
+  vector16[11] = WEBRTC_SPL_WORD16_MIN;
+  vector32[1] = WEBRTC_SPL_WORD32_MAX;
+  vector32[6] = WEBRTC_SPL_WORD32_MIN;
+  vector32[11] = WEBRTC_SPL_WORD32_MIN;
+
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX,
+            WebRtcSpl_MaxAbsValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MAX,
+            WebRtcSpl_MaxValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD16_MIN,
+            WebRtcSpl_MinValueW16(vector16, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX,
+            WebRtcSpl_MaxAbsValueW32(vector32, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MAX,
+            WebRtcSpl_MaxValueW32(vector32, kVectorSize));
+  EXPECT_EQ(WEBRTC_SPL_WORD32_MIN,
+            WebRtcSpl_MinValueW32(vector32, kVectorSize));
+  EXPECT_EQ(6, WebRtcSpl_MaxAbsIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(1, WebRtcSpl_MaxIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(1, WebRtcSpl_MaxIndexW32(vector32, kVectorSize));
+  EXPECT_EQ(6, WebRtcSpl_MinIndexW16(vector16, kVectorSize));
+  EXPECT_EQ(6, WebRtcSpl_MinIndexW32(vector32, kVectorSize));
 }
 
 TEST_F(SplTest, VectorOperationsTest) {
@@ -253,7 +362,6 @@
     int B[] = {4, 12, 133, 1100};
     WebRtc_Word16 a16[kVectorSize];
     WebRtc_Word16 b16[kVectorSize];
-    WebRtc_Word32 b32[kVectorSize];
     WebRtc_Word16 bTmp16[kVectorSize];
 
     for (int kk = 0; kk < kVectorSize; ++kk) {
@@ -275,13 +383,6 @@
         EXPECT_EQ(((B[kk]*3+B[kk]*2+2)>>2)+((b16[kk]*3+7)>>2), bTmp16[kk]);
     }
 
-    WebRtcSpl_CrossCorrelation(b32, b16, bTmp16, kVectorSize, 2, 2, 0);
-    for (int kk = 0; kk < 2; ++kk) {
-        EXPECT_EQ(614236, b32[kk]);
-    }
-//    EXPECT_EQ(, WebRtcSpl_DotProduct(b16, bTmp16, 4));
-    EXPECT_EQ(306962, WebRtcSpl_DotProductWithScale(b16, b16, kVectorSize, 2));
-
     WebRtcSpl_ScaleVector(b16, bTmp16, 13, kVectorSize, 2);
     for (int kk = 0; kk < kVectorSize; ++kk) {
         EXPECT_EQ((b16[kk]*13)>>2, bTmp16[kk]);
@@ -313,6 +414,8 @@
         EXPECT_EQ(32767, bTmp16[kk]);
     }
     EXPECT_EQ(32749, bTmp16[kVectorSize - 1]);
+
+    EXPECT_EQ(0, WebRtcSpl_GetScalingSquare(b16, kVectorSize, 1));
 }
 
 TEST_F(SplTest, EstimatorsTest) {
@@ -332,11 +435,12 @@
 
 TEST_F(SplTest, FilterTest) {
     const int kVectorSize = 4;
+    const int kFilterOrder = 3;
     WebRtc_Word16 A[] = {1, 2, 33, 100};
     WebRtc_Word16 A5[] = {1, 2, 33, 100, -5};
     WebRtc_Word16 B[] = {4, 12, 133, 110};
-    WebRtc_Word16 b16[kVectorSize];
-    WebRtc_Word16 bTmp16[kVectorSize];
+    WebRtc_Word16 data_in[kVectorSize];
+    WebRtc_Word16 data_out[kVectorSize];
     WebRtc_Word16 bTmp16Low[kVectorSize];
     WebRtc_Word16 bState[kVectorSize];
     WebRtc_Word16 bStateLow[kVectorSize];
@@ -345,28 +449,32 @@
     WebRtcSpl_ZerosArrayW16(bStateLow, kVectorSize);
 
     for (int kk = 0; kk < kVectorSize; ++kk) {
-        b16[kk] = A[kk];
+        data_in[kk] = A[kk];
+        data_out[kk] = 0;
     }
 
-    // MA filters
-    WebRtcSpl_FilterMAFastQ12(b16, bTmp16, B, kVectorSize, kVectorSize);
-    for (int kk = 0; kk < kVectorSize; ++kk) {
-        //EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
-    }
-    // AR filters
-    WebRtcSpl_FilterARFastQ12(b16, bTmp16, A, kVectorSize, kVectorSize);
-    for (int kk = 0; kk < kVectorSize; ++kk) {
-//        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
-    }
+    // MA filters.
+    // Note that the input data has |kFilterOrder| states before the actual
+    // data (one sample).
+    WebRtcSpl_FilterMAFastQ12(&data_in[kFilterOrder], data_out, B,
+                              kFilterOrder + 1, 1);
+    EXPECT_EQ(0, data_out[0]);
+    // AR filters.
+    // Note that the output data has |kFilterOrder| states before the actual
+    // data (one sample).
+    WebRtcSpl_FilterARFastQ12(data_in, &data_out[kFilterOrder], A,
+                              kFilterOrder + 1, 1);
+    EXPECT_EQ(0, data_out[kFilterOrder]);
+
     EXPECT_EQ(kVectorSize, WebRtcSpl_FilterAR(A5,
                                               5,
-                                              b16,
+                                              data_in,
                                               kVectorSize,
                                               bState,
                                               kVectorSize,
                                               bStateLow,
                                               kVectorSize,
-                                              bTmp16,
+                                              data_out,
                                               bTmp16Low,
                                               kVectorSize));
 }
@@ -386,39 +494,79 @@
     }
 }
 
+TEST_F(SplTest, DotProductWithScaleTest) {
+  EXPECT_EQ(605362796, WebRtcSpl_DotProductWithScale(vector16,
+      vector16, kVector16Size, 2));
+}
+
+TEST_F(SplTest, CrossCorrelationTest) {
+  // Note the function arguments relation specificed by API.
+  const int kCrossCorrelationDimension = 3;
+  const int kShift = 2;
+  const int kStep = 1;
+  const int kSeqDimension = 6;
+
+  const int16_t vector16_b[kVector16Size] = {1, 4323, 1963,
+    WEBRTC_SPL_WORD16_MAX, WEBRTC_SPL_WORD16_MIN + 5, -3333, -876, 8483, 142};
+  const int32_t expected[3] = {-266947903, -15579555, -171282001};
+  int32_t vector32[kCrossCorrelationDimension] = {0};
+
+  WebRtcSpl_CrossCorrelation(vector32, vector16, vector16_b, kSeqDimension,
+                             kCrossCorrelationDimension, kShift, kStep);
+
+  for (int i = 0; i < kCrossCorrelationDimension; ++i) {
+    EXPECT_EQ(expected[i], vector32[i]);
+  }
+}
+
+TEST_F(SplTest, AutoCorrelationTest) {
+  int scale = 0;
+  int32_t vector32[kVector16Size];
+  const int32_t expected[kVector16Size] = {302681398, 14223410, -121705063,
+    -85221647, -17104971, 61806945, 6644603, -669329, 43};
+
+  EXPECT_EQ(-1, WebRtcSpl_AutoCorrelation(vector16,
+      kVector16Size, kVector16Size + 1, vector32, &scale));
+  EXPECT_EQ(kVector16Size, WebRtcSpl_AutoCorrelation(vector16,
+      kVector16Size, kVector16Size - 1, vector32, &scale));
+  EXPECT_EQ(3, scale);
+  for (int i = 0; i < kVector16Size; ++i) {
+    EXPECT_EQ(expected[i], vector32[i]);
+  }
+}
+
 TEST_F(SplTest, SignalProcessingTest) {
     const int kVectorSize = 4;
     int A[] = {1, 2, 33, 100};
+    const WebRtc_Word16 kHanning[4] = { 2399, 8192, 13985, 16384 };
     WebRtc_Word16 b16[kVectorSize];
-    WebRtc_Word32 b32[kVectorSize];
 
     WebRtc_Word16 bTmp16[kVectorSize];
-    WebRtc_Word32 bTmp32[kVectorSize];
 
     int bScale = 0;
 
     for (int kk = 0; kk < kVectorSize; ++kk) {
         b16[kk] = A[kk];
-        b32[kk] = A[kk];
     }
 
-    EXPECT_EQ(2, WebRtcSpl_AutoCorrelation(b16, kVectorSize, 1, bTmp32, &bScale));
-    WebRtcSpl_ReflCoefToLpc(b16, kVectorSize, bTmp16);
-//    for (int kk = 0; kk < kVectorSize; ++kk) {
-//        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
-//    }
-    WebRtcSpl_LpcToReflCoef(bTmp16, kVectorSize, b16);
-//    for (int kk = 0; kk < kVectorSize; ++kk) {
-//        EXPECT_EQ(a16[kk], b16[kk]);
-//    }
-    WebRtcSpl_AutoCorrToReflCoef(b32, kVectorSize, bTmp16);
-//    for (int kk = 0; kk < kVectorSize; ++kk) {
-//        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
-//    }
+    // TODO(bjornv): Activate the Reflection Coefficient tests when refactoring.
+//    WebRtcSpl_ReflCoefToLpc(b16, kVectorSize, bTmp16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
+////    }
+//    WebRtcSpl_LpcToReflCoef(bTmp16, kVectorSize, b16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(a16[kk], b16[kk]);
+////    }
+//    WebRtcSpl_AutoCorrToReflCoef(b32, kVectorSize, bTmp16);
+////    for (int kk = 0; kk < kVectorSize; ++kk) {
+////        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
+////    }
+
     WebRtcSpl_GetHanningWindow(bTmp16, kVectorSize);
-//    for (int kk = 0; kk < kVectorSize; ++kk) {
-//        EXPECT_EQ(aTmp16[kk], bTmp16[kk]);
-//    }
+    for (int kk = 0; kk < kVectorSize; ++kk) {
+        EXPECT_EQ(kHanning[kk], bTmp16[kk]);
+    }
 
     for (int kk = 0; kk < kVectorSize; ++kk) {
         b16[kk] = A[kk];
diff --git a/src/common_audio/signal_processing/spl_sqrt_floor.c b/src/common_audio/signal_processing/spl_sqrt_floor.c
index 62041b3..f0e8ae2 100644
--- a/src/common_audio/signal_processing/spl_sqrt_floor.c
+++ b/src/common_audio/signal_processing/spl_sqrt_floor.c
@@ -1,6 +1,6 @@
 /*
- * Written by Wilco Dijkstra, 1996.
- * Refer to NOTICE file at the root of git project.
+ * Written by Wilco Dijkstra, 1996. Refer to file LICENSE under
+ * trunk/third_party_mods/sqrt_floor.
  *
  * Minor modifications in code style for WebRTC, 2012.
  */
diff --git a/src/common_audio/signal_processing/spl_sqrt_floor.s b/src/common_audio/signal_processing/spl_sqrt_floor.s
deleted file mode 100644
index 425993d..0000000
--- a/src/common_audio/signal_processing/spl_sqrt_floor.s
+++ /dev/null
@@ -1,88 +0,0 @@
-@ Written by Wilco Dijkstra, 1996.
-@ Refer to NOTICE file at the root of git project.
-@
-@ Minor modifications in code style for WebRTC, 2012.
-@ Output is bit-exact with the reference C code in spl_sqrt_floor.c.
-
-@ Input :             r0 32 bit unsigned integer
-@ Output:             r0 = INT (SQRT (r0)), precision is 16 bits
-@ Registers touched:  r1, r2
-
-.global WebRtcSpl_SqrtFloor
-
-.align  2
-WebRtcSpl_SqrtFloor:
-.fnstart
-  mov    r1, #3 << 30
-  mov    r2, #1 << 30
-
-  @ unroll for i = 0 .. 15
-
-  cmp    r0, r2, ror #2 * 0
-  subhs  r0, r0, r2, ror #2 * 0
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 1
-  subhs  r0, r0, r2, ror #2 * 1
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 2
-  subhs  r0, r0, r2, ror #2 * 2
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 3
-  subhs  r0, r0, r2, ror #2 * 3
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 4
-  subhs  r0, r0, r2, ror #2 * 4
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 5
-  subhs  r0, r0, r2, ror #2 * 5
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 6
-  subhs  r0, r0, r2, ror #2 * 6
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 7
-  subhs  r0, r0, r2, ror #2 * 7
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 8
-  subhs  r0, r0, r2, ror #2 * 8
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 9
-  subhs  r0, r0, r2, ror #2 * 9
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 10
-  subhs  r0, r0, r2, ror #2 * 10
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 11
-  subhs  r0, r0, r2, ror #2 * 11
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 12
-  subhs  r0, r0, r2, ror #2 * 12
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 13
-  subhs  r0, r0, r2, ror #2 * 13
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 14
-  subhs  r0, r0, r2, ror #2 * 14
-  adc    r2, r1, r2, lsl #1
-
-  cmp    r0, r2, ror #2 * 15
-  subhs  r0, r0, r2, ror #2 * 15
-  adc    r2, r1, r2, lsl #1
-
-  bic    r0, r2, #3 << 30  @ for rounding add: cmp r0, r2  adc r2, #1
-  bx lr
-
-.fnend
diff --git a/src/common_audio/signal_processing/spl_sqrt_floor_arm.s b/src/common_audio/signal_processing/spl_sqrt_floor_arm.s
new file mode 100644
index 0000000..cfd9ed0
--- /dev/null
+++ b/src/common_audio/signal_processing/spl_sqrt_floor_arm.s
@@ -0,0 +1,88 @@
+@ Written by Wilco Dijkstra, 1996. Refer to file LICENSE under
+@ trunk/third_party_mods/sqrt_floor.
+@
+@ Minor modifications in code style for WebRTC, 2012.
+@ Output is bit-exact with the reference C code in spl_sqrt_floor.c.
+
+@ Input :             r0 32 bit unsigned integer
+@ Output:             r0 = INT (SQRT (r0)), precision is 16 bits
+@ Registers touched:  r1, r2
+
+.global WebRtcSpl_SqrtFloor
+
+.align  2
+WebRtcSpl_SqrtFloor:
+.fnstart
+  mov    r1, #3 << 30
+  mov    r2, #1 << 30
+
+  @ unroll for i = 0 .. 15
+
+  cmp    r0, r2, ror #2 * 0
+  subhs  r0, r0, r2, ror #2 * 0
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 1
+  subhs  r0, r0, r2, ror #2 * 1
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 2
+  subhs  r0, r0, r2, ror #2 * 2
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 3
+  subhs  r0, r0, r2, ror #2 * 3
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 4
+  subhs  r0, r0, r2, ror #2 * 4
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 5
+  subhs  r0, r0, r2, ror #2 * 5
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 6
+  subhs  r0, r0, r2, ror #2 * 6
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 7
+  subhs  r0, r0, r2, ror #2 * 7
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 8
+  subhs  r0, r0, r2, ror #2 * 8
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 9
+  subhs  r0, r0, r2, ror #2 * 9
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 10
+  subhs  r0, r0, r2, ror #2 * 10
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 11
+  subhs  r0, r0, r2, ror #2 * 11
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 12
+  subhs  r0, r0, r2, ror #2 * 12
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 13
+  subhs  r0, r0, r2, ror #2 * 13
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 14
+  subhs  r0, r0, r2, ror #2 * 14
+  adc    r2, r1, r2, lsl #1
+
+  cmp    r0, r2, ror #2 * 15
+  subhs  r0, r0, r2, ror #2 * 15
+  adc    r2, r1, r2, lsl #1
+
+  bic    r0, r2, #3 << 30  @ for rounding add: cmp r0, r2  adc r2, #1
+  bx lr
+
+.fnend
diff --git a/src/common_audio/signal_processing/vector_scaling_operations.c b/src/common_audio/signal_processing/vector_scaling_operations.c
index 20d239c..91d9671 100644
--- a/src/common_audio/signal_processing/vector_scaling_operations.c
+++ b/src/common_audio/signal_processing/vector_scaling_operations.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -17,9 +17,7 @@
  * WebRtcSpl_ScaleVector()
  * WebRtcSpl_ScaleVectorWithSat()
  * WebRtcSpl_ScaleAndAddVectors()
- *
- * The description header can be found in signal_processing_library.h
- *
+ * WebRtcSpl_ScaleAndAddVectorsWithRound()
  */
 
 #include "signal_processing_library.h"
@@ -149,3 +147,30 @@
                 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(gain2, *in2ptr++, shift2);
     }
 }
+
+#ifndef WEBRTC_ARCH_ARM_NEON
+int WebRtcSpl_ScaleAndAddVectorsWithRound(const int16_t* in_vector1,
+                                          int16_t in_vector1_scale,
+                                          const int16_t* in_vector2,
+                                          int16_t in_vector2_scale,
+                                          int right_shifts,
+                                          int16_t* out_vector,
+                                          int length) {
+  int i = 0;
+  int round_value = (1 << right_shifts) >> 1;
+
+  if (in_vector1 == NULL || in_vector2 == NULL || out_vector == NULL ||
+      length <= 0 || right_shifts < 0) {
+    return -1;
+  }
+
+  for (i = 0; i < length; i++) {
+    out_vector[i] = (int16_t)((
+        WEBRTC_SPL_MUL_16_16(in_vector1[i], in_vector1_scale)
+        + WEBRTC_SPL_MUL_16_16(in_vector2[i], in_vector2_scale)
+        + round_value) >> right_shifts);
+  }
+
+  return 0;
+}
+#endif  // WEBRTC_ARCH_ARM_NEON
diff --git a/src/common_audio/signal_processing/vector_scaling_operations_neon.s b/src/common_audio/signal_processing/vector_scaling_operations_neon.s
new file mode 100644
index 0000000..003943b
--- /dev/null
+++ b/src/common_audio/signal_processing/vector_scaling_operations_neon.s
@@ -0,0 +1,88 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ vector_scaling_operations_neon.s
+@ This file contains the function WebRtcSpl_ScaleAndAddVectorsWithRound(),
+@ optimized for ARM Neon platform. Output is bit-exact with the reference
+@ C code in vector_scaling_operations.c.
+
+.arch armv7-a
+.fpu neon
+
+.align  2
+.global WebRtcSpl_ScaleAndAddVectorsWithRound
+
+WebRtcSpl_ScaleAndAddVectorsWithRound:
+.fnstart
+
+  push {r4-r9}
+
+  ldr r4, [sp, #32]           @ length
+  ldr r5, [sp, #28]           @ out_vector
+  ldrsh r6, [sp, #24]         @ right_shifts
+
+  cmp r4, #0
+  ble END                     @ Return if length <= 0.
+
+  cmp r4, #8
+  blt SET_ROUND_VALUE
+
+  vdup.16 d26, r1             @ in_vector1_scale
+  vdup.16 d27, r3             @ in_vector2_scale
+
+  @ Neon instructions can only right shift by an immediate value. To shift right
+  @ by a register value, we have to do a left shift left by the negative value.
+  rsb r7, r6, #0
+  vdup.16 q12, r7             @ -right_shifts
+
+  bic r7, r4, #7              @ Counter for LOOP_UNROLLED_BY_8: length / 8 * 8.
+
+LOOP_UNROLLED_BY_8:
+  vld1.16 {d28, d29}, [r0]!   @ in_vector1[]
+  vld1.16 {d30, d31}, [r2]!   @ in_vector2[]
+  vmull.s16 q0, d28, d26
+  vmull.s16 q1, d29, d26
+  vmull.s16 q2, d30, d27
+  vmull.s16 q3, d31, d27
+  vadd.s32 q0, q2
+  vadd.s32 q1, q3
+  vrshl.s32 q0, q12           @ Round shift right by right_shifts.
+  vrshl.s32 q1, q12
+  vmovn.i32 d0, q0            @ Cast to 16 bit values.
+  vmovn.i32 d1, q1
+  subs r7, #8
+  vst1.16 {d0, d1}, [r5]!
+  bgt LOOP_UNROLLED_BY_8
+
+  ands r4, #0xFF              @ Counter for LOOP_NO_UNROLLING: length % 8.
+  beq END
+
+SET_ROUND_VALUE:
+  mov r9, #1
+  lsl r9, r6
+  lsr r9, #1
+
+LOOP_NO_UNROLLING:
+  ldrh  r7, [r0], #2
+  ldrh  r8, [r2], #2
+  smulbb r7, r7, r1
+  smulbb r8, r8, r3
+  subs r4, #1
+  add r7, r9
+  add r7, r8
+  asr r7, r6
+  strh r7, [r5], #2
+  bne LOOP_NO_UNROLLING
+
+END:
+  pop {r4-r9}
+  bx  lr
+
+.fnend
diff --git a/src/common_audio/vad/include/webrtc_vad.h b/src/common_audio/vad/include/webrtc_vad.h
index 6e3eb74..edc7494 100644
--- a/src/common_audio/vad/include/webrtc_vad.h
+++ b/src/common_audio/vad/include/webrtc_vad.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -13,147 +13,78 @@
  * This header file includes the VAD API calls. Specific function calls are given below.
  */
 
-#ifndef WEBRTC_VAD_WEBRTC_VAD_H_
-#define WEBRTC_VAD_WEBRTC_VAD_H_
+#ifndef WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_  // NOLINT
+#define WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_
 
-#include "typedefs.h"
+#include "typedefs.h"  // NOLINT
 
 typedef struct WebRtcVadInst VadInst;
 
 #ifdef __cplusplus
-extern "C"
-{
+extern "C" {
 #endif
 
-/****************************************************************************
- * WebRtcVad_get_version(...)
- *
- * This function returns the version number of the code.
- *
- * Output:
- *      - version       : Pointer to a buffer where the version info will
- *                        be stored.
- * Input:
- *      - size_bytes    : Size of the buffer.
- *
- */
-WebRtc_Word16 WebRtcVad_get_version(char *version, size_t size_bytes);
+// Creates an instance to the VAD structure.
+//
+// - handle [o] : Pointer to the VAD instance that should be created.
+//
+// returns      : 0 - (OK), -1 - (Error)
+int WebRtcVad_Create(VadInst** handle);
 
-/****************************************************************************
- * WebRtcVad_AssignSize(...) 
- *
- * This functions get the size needed for storing the instance for encoder
- * and decoder, respectively
- *
- * Input/Output:
- *      - size_in_bytes : Pointer to integer where the size is returned
- *
- * Return value         : 0
- */
-WebRtc_Word16 WebRtcVad_AssignSize(int *size_in_bytes);
+// Frees the dynamic memory of a specified VAD instance.
+//
+// - handle [i] : Pointer to VAD instance that should be freed.
+//
+// returns      : 0 - (OK), -1 - (NULL pointer in)
+int WebRtcVad_Free(VadInst* handle);
 
-/****************************************************************************
- * WebRtcVad_Assign(...) 
- *
- * This functions Assigns memory for the instances.
- *
- * Input:
- *        - vad_inst_addr :  Address to where to assign memory
- * Output:
- *        - vad_inst      :  Pointer to the instance that should be created
- *
- * Return value           :  0 - Ok
- *                          -1 - Error
- */
-WebRtc_Word16 WebRtcVad_Assign(VadInst **vad_inst, void *vad_inst_addr);
+// Initializes a VAD instance.
+//
+// - handle [i/o] : Instance that should be initialized.
+//
+// returns        : 0 - (OK),
+//                 -1 - (NULL pointer or Default mode could not be set).
+int WebRtcVad_Init(VadInst* handle);
 
-/****************************************************************************
- * WebRtcVad_Create(...)
- *
- * This function creates an instance to the VAD structure
- *
- * Input:
- *      - vad_inst      : Pointer to VAD instance that should be created
- *
- * Output:
- *      - vad_inst      : Pointer to created VAD instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-WebRtc_Word16 WebRtcVad_Create(VadInst **vad_inst);
+// Sets the VAD operating mode. A more aggressive (higher mode) VAD is more
+// restrictive in reporting speech. Put in other words the probability of being
+// speech when the VAD returns 1 is increased with increasing mode. As a
+// consequence also the missed detection rate goes up.
+//
+// - handle [i/o] : VAD instance.
+// - mode   [i]   : Aggressiveness mode (0, 1, 2, or 3).
+//
+// returns        : 0 - (OK),
+//                 -1 - (NULL pointer, mode could not be set or the VAD instance
+//                       has not been initialized).
+int WebRtcVad_set_mode(VadInst* handle, int mode);
 
-/****************************************************************************
- * WebRtcVad_Free(...)
- *
- * This function frees the dynamic memory of a specified VAD instance
- *
- * Input:
- *      - vad_inst      : Pointer to VAD instance that should be freed
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-WebRtc_Word16 WebRtcVad_Free(VadInst *vad_inst);
+// Calculates a VAD decision for the |audio_frame|. For valid sampling rates
+// frame lengths, see the description of WebRtcVad_ValidRatesAndFrameLengths().
+//
+// - handle       [i/o] : VAD Instance. Needs to be initialized by
+//                        WebRtcVad_Init() before call.
+// - fs           [i]   : Sampling frequency (Hz): 8000, 16000, or 32000
+// - audio_frame  [i]   : Audio frame buffer.
+// - frame_length [i]   : Length of audio frame buffer in number of samples.
+//
+// returns              : 1 - (Active Voice),
+//                        0 - (Non-active Voice),
+//                       -1 - (Error)
+int WebRtcVad_Process(VadInst* handle, int fs, int16_t* audio_frame,
+                      int frame_length);
 
-/****************************************************************************
- * WebRtcVad_Init(...)
- *
- * This function initializes a VAD instance
- *
- * Input:
- *      - vad_inst      : Instance that should be initialized
- *
- * Output:
- *      - vad_inst      : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-WebRtc_Word16 WebRtcVad_Init(VadInst *vad_inst);
-
-/****************************************************************************
- * WebRtcVad_set_mode(...)
- *
- * This function initializes a VAD instance
- *
- * Input:
- *      - vad_inst      : VAD instance
- *      - mode          : Aggressiveness setting (0, 1, 2, or 3) 
- *
- * Output:
- *      - vad_inst      : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-WebRtc_Word16 WebRtcVad_set_mode(VadInst *vad_inst, WebRtc_Word16 mode);
-
-/****************************************************************************
- * WebRtcVad_Process(...)
- * 
- * This functions does a VAD for the inserted speech frame
- *
- * Input
- *        - vad_inst     : VAD Instance. Needs to be initiated before call.
- *        - fs           : sampling frequency (Hz): 8000, 16000, or 32000
- *        - speech_frame : Pointer to speech frame buffer
- *        - frame_length : Length of speech frame buffer in number of samples
- *
- * Output:
- *        - vad_inst     : Updated VAD instance
- *
- * Return value          :  1 - Active Voice
- *                          0 - Non-active Voice
- *                         -1 - Error
- */
-WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
-                                WebRtc_Word16 fs,
-                                WebRtc_Word16 *speech_frame,
-                                WebRtc_Word16 frame_length);
+// Checks for valid combinations of |rate| and |frame_length|. We support 10,
+// 20 and 30 ms frames and the rates 8000, 16000 and 32000 Hz.
+//
+// - rate         [i] : Sampling frequency (Hz).
+// - frame_length [i] : Speech frame buffer length in number of samples.
+//
+// returns            : 0 - (valid combination), -1 - (invalid combination)
+int WebRtcVad_ValidRateAndFrameLength(int rate, int frame_length);
 
 #ifdef __cplusplus
 }
 #endif
 
-#endif // WEBRTC_VAD_WEBRTC_VAD_H_
+#endif  // WEBRTC_COMMON_AUDIO_VAD_INCLUDE_WEBRTC_VAD_H_  // NOLINT
diff --git a/src/common_audio/vad/vad.gypi b/src/common_audio/vad/vad.gypi
index 4b12db0..5a9466c 100644
--- a/src/common_audio/vad/vad.gypi
+++ b/src/common_audio/vad/vad.gypi
@@ -27,7 +27,6 @@
         'webrtc_vad.c',
         'vad_core.c',
         'vad_core.h',
-        'vad_defines.h',
         'vad_filterbank.c',
         'vad_filterbank.h',
         'vad_gmm.c',
@@ -38,22 +37,27 @@
     },
   ], # targets
    'conditions': [
-    ['build_with_chromium==0', {
+    ['include_tests==1', {
       'targets' : [
         {
           'target_name': 'vad_unittests',
           'type': 'executable',
           'dependencies': [
             'vad',
-            '<(webrtc_root)/../test/test.gyp:test_support_main',
-            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
           ],
           'sources': [
+            'vad_core_unittest.cc',
+            'vad_filterbank_unittest.cc',
+            'vad_gmm_unittest.cc',
+            'vad_sp_unittest.cc',
             'vad_unittest.cc',
+            'vad_unittest.h',
           ],
         }, # vad_unittests
       ], # targets
-    }], # build_with_chromium
+    }], # include_tests
   ], # conditions
 }
 
diff --git a/src/common_audio/vad/vad_core.c b/src/common_audio/vad/vad_core.c
index e05c296..1e9053f 100644
--- a/src/common_audio/vad/vad_core.c
+++ b/src/common_audio/vad/vad_core.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,276 +8,604 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file includes the implementation of the core functionality in VAD.
- * For function description, see vad_core.h.
- */
-
 #include "vad_core.h"
 
 #include "signal_processing_library.h"
 #include "typedefs.h"
-#include "vad_defines.h"
 #include "vad_filterbank.h"
 #include "vad_gmm.h"
 #include "vad_sp.h"
 
 // Spectrum Weighting
-static const WebRtc_Word16 kSpectrumWeight[6] = { 6, 8, 10, 12, 14, 16 };
-static const WebRtc_Word16 kNoiseUpdateConst = 655; // Q15
-static const WebRtc_Word16 kSpeechUpdateConst = 6554; // Q15
-static const WebRtc_Word16 kBackEta = 154; // Q8
+static const int16_t kSpectrumWeight[kNumChannels] = { 6, 8, 10, 12, 14, 16 };
+static const int16_t kNoiseUpdateConst = 655; // Q15
+static const int16_t kSpeechUpdateConst = 6554; // Q15
+static const int16_t kBackEta = 154; // Q8
 // Minimum difference between the two models, Q5
-static const WebRtc_Word16 kMinimumDifference[6] = {
+static const int16_t kMinimumDifference[kNumChannels] = {
     544, 544, 576, 576, 576, 576 };
 // Upper limit of mean value for speech model, Q7
-static const WebRtc_Word16 kMaximumSpeech[6] = {
+static const int16_t kMaximumSpeech[kNumChannels] = {
     11392, 11392, 11520, 11520, 11520, 11520 };
 // Minimum value for mean value
-static const WebRtc_Word16 kMinimumMean[2] = { 640, 768 };
+static const int16_t kMinimumMean[kNumGaussians] = { 640, 768 };
 // Upper limit of mean value for noise model, Q7
-static const WebRtc_Word16 kMaximumNoise[6] = {
+static const int16_t kMaximumNoise[kNumChannels] = {
     9216, 9088, 8960, 8832, 8704, 8576 };
 // Start values for the Gaussian models, Q7
 // Weights for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataWeights[12] = {
+static const int16_t kNoiseDataWeights[kTableSize] = {
     34, 62, 72, 66, 53, 25, 94, 66, 56, 62, 75, 103 };
 // Weights for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataWeights[12] = {
+static const int16_t kSpeechDataWeights[kTableSize] = {
     48, 82, 45, 87, 50, 47, 80, 46, 83, 41, 78, 81 };
 // Means for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataMeans[12] = {
+static const int16_t kNoiseDataMeans[kTableSize] = {
     6738, 4892, 7065, 6715, 6771, 3369, 7646, 3863, 7820, 7266, 5020, 4362 };
 // Means for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataMeans[12] = {
+static const int16_t kSpeechDataMeans[kTableSize] = {
     8306, 10085, 10078, 11823, 11843, 6309, 9473, 9571, 10879, 7581, 8180, 7483
 };
 // Stds for the two Gaussians for the six channels (noise)
-static const WebRtc_Word16 kNoiseDataStds[12] = {
+static const int16_t kNoiseDataStds[kTableSize] = {
     378, 1064, 493, 582, 688, 593, 474, 697, 475, 688, 421, 455 };
 // Stds for the two Gaussians for the six channels (speech)
-static const WebRtc_Word16 kSpeechDataStds[12] = {
+static const int16_t kSpeechDataStds[kTableSize] = {
     555, 505, 567, 524, 585, 1231, 509, 828, 492, 1540, 1079, 850 };
 
+// Constants used in GmmProbability().
+//
+// Maximum number of counted speech (VAD = 1) frames in a row.
+static const int16_t kMaxSpeechFrames = 6;
+// Minimum standard deviation for both speech and noise.
+static const int16_t kMinStd = 384;
+
+// Constants in WebRtcVad_InitCore().
+// Default aggressiveness mode.
+static const short kDefaultMode = 0;
 static const int kInitCheck = 42;
 
-// Initialize VAD
-int WebRtcVad_InitCore(VadInstT *inst, short mode)
-{
-    int i;
+// Constants used in WebRtcVad_set_mode_core().
+//
+// Thresholds for different frame lengths (10 ms, 20 ms and 30 ms).
+//
+// Mode 0, Quality.
+static const int16_t kOverHangMax1Q[3] = { 8, 4, 3 };
+static const int16_t kOverHangMax2Q[3] = { 14, 7, 5 };
+static const int16_t kLocalThresholdQ[3] = { 24, 21, 24 };
+static const int16_t kGlobalThresholdQ[3] = { 57, 48, 57 };
+// Mode 1, Low bitrate.
+static const int16_t kOverHangMax1LBR[3] = { 8, 4, 3 };
+static const int16_t kOverHangMax2LBR[3] = { 14, 7, 5 };
+static const int16_t kLocalThresholdLBR[3] = { 37, 32, 37 };
+static const int16_t kGlobalThresholdLBR[3] = { 100, 80, 100 };
+// Mode 2, Aggressive.
+static const int16_t kOverHangMax1AGG[3] = { 6, 3, 2 };
+static const int16_t kOverHangMax2AGG[3] = { 9, 5, 3 };
+static const int16_t kLocalThresholdAGG[3] = { 82, 78, 82 };
+static const int16_t kGlobalThresholdAGG[3] = { 285, 260, 285 };
+// Mode 3, Very aggressive.
+static const int16_t kOverHangMax1VAG[3] = { 6, 3, 2 };
+static const int16_t kOverHangMax2VAG[3] = { 9, 5, 3 };
+static const int16_t kLocalThresholdVAG[3] = { 94, 94, 94 };
+static const int16_t kGlobalThresholdVAG[3] = { 1100, 1050, 1100 };
 
-    // Initialization of struct
-    inst->vad = 1;
-    inst->frame_counter = 0;
-    inst->over_hang = 0;
-    inst->num_of_speech = 0;
+// Calculates the weighted average w.r.t. number of Gaussians. The |data| are
+// updated with an |offset| before averaging.
+//
+// - data     [i/o] : Data to average.
+// - offset   [i]   : An offset added to |data|.
+// - weights  [i]   : Weights used for averaging.
+//
+// returns          : The weighted average.
+static int32_t WeightedAverage(int16_t* data, int16_t offset,
+                               const int16_t* weights) {
+  int k;
+  int32_t weighted_average = 0;
 
-    // Initialization of downsampling filter state
-    inst->downsampling_filter_states[0] = 0;
-    inst->downsampling_filter_states[1] = 0;
-    inst->downsampling_filter_states[2] = 0;
-    inst->downsampling_filter_states[3] = 0;
+  for (k = 0; k < kNumGaussians; k++) {
+    data[k * kNumChannels] += offset;
+    weighted_average += data[k * kNumChannels] * weights[k * kNumChannels];
+  }
+  return weighted_average;
+}
 
-    // Read initial PDF parameters
-    for (i = 0; i < NUM_TABLE_VALUES; i++)
-    {
-        inst->noise_means[i] = kNoiseDataMeans[i];
-        inst->speech_means[i] = kSpeechDataMeans[i];
-        inst->noise_stds[i] = kNoiseDataStds[i];
-        inst->speech_stds[i] = kSpeechDataStds[i];
+// Calculates the probabilities for both speech and background noise using
+// Gaussian Mixture Models (GMM). A hypothesis-test is performed to decide which
+// type of signal is most probable.
+//
+// - self           [i/o] : Pointer to VAD instance
+// - features       [i]   : Feature vector of length |kNumChannels|
+//                          = log10(energy in frequency band)
+// - total_power    [i]   : Total power in audio frame.
+// - frame_length   [i]   : Number of input samples
+//
+// - returns              : the VAD decision (0 - noise, 1 - speech).
+static int16_t GmmProbability(VadInstT* self, int16_t* features,
+                              int16_t total_power, int frame_length) {
+  int channel, k;
+  int16_t feature_minimum;
+  int16_t h0, h1;
+  int16_t log_likelihood_ratio;
+  int16_t vadflag = 0;
+  int16_t shifts_h0, shifts_h1;
+  int16_t tmp_s16, tmp1_s16, tmp2_s16;
+  int16_t diff;
+  int gaussian;
+  int16_t nmk, nmk2, nmk3, smk, smk2, nsk, ssk;
+  int16_t delt, ndelt;
+  int16_t maxspe, maxmu;
+  int16_t deltaN[kTableSize], deltaS[kTableSize];
+  int16_t ngprvec[kTableSize] = { 0 };  // Conditional probability = 0.
+  int16_t sgprvec[kTableSize] = { 0 };  // Conditional probability = 0.
+  int32_t h0_test, h1_test;
+  int32_t tmp1_s32, tmp2_s32;
+  int32_t sum_log_likelihood_ratios = 0;
+  int32_t noise_global_mean, speech_global_mean;
+  int32_t noise_probability[kNumGaussians], speech_probability[kNumGaussians];
+  int16_t overhead1, overhead2, individualTest, totalTest;
+
+  // Set various thresholds based on frame lengths (80, 160 or 240 samples).
+  if (frame_length == 80) {
+    overhead1 = self->over_hang_max_1[0];
+    overhead2 = self->over_hang_max_2[0];
+    individualTest = self->individual[0];
+    totalTest = self->total[0];
+  } else if (frame_length == 160) {
+    overhead1 = self->over_hang_max_1[1];
+    overhead2 = self->over_hang_max_2[1];
+    individualTest = self->individual[1];
+    totalTest = self->total[1];
+  } else {
+    overhead1 = self->over_hang_max_1[2];
+    overhead2 = self->over_hang_max_2[2];
+    individualTest = self->individual[2];
+    totalTest = self->total[2];
+  }
+
+  if (total_power > kMinEnergy) {
+    // The signal power of current frame is large enough for processing. The
+    // processing consists of two parts:
+    // 1) Calculating the likelihood of speech and thereby a VAD decision.
+    // 2) Updating the underlying model, w.r.t., the decision made.
+
+    // The detection scheme is an LRT with hypothesis
+    // H0: Noise
+    // H1: Speech
+    //
+    // We combine a global LRT with local tests, for each frequency sub-band,
+    // here defined as |channel|.
+    for (channel = 0; channel < kNumChannels; channel++) {
+      // For each channel we model the probability with a GMM consisting of
+      // |kNumGaussians|, with different means and standard deviations depending
+      // on H0 or H1.
+      h0_test = 0;
+      h1_test = 0;
+      for (k = 0; k < kNumGaussians; k++) {
+        gaussian = channel + k * kNumChannels;
+        // Probability under H0, that is, probability of frame being noise.
+        // Value given in Q27 = Q7 * Q20.
+        tmp1_s32 = WebRtcVad_GaussianProbability(features[channel],
+                                                 self->noise_means[gaussian],
+                                                 self->noise_stds[gaussian],
+                                                 &deltaN[gaussian]);
+        noise_probability[k] = kNoiseDataWeights[gaussian] * tmp1_s32;
+        h0_test += noise_probability[k];  // Q27
+
+        // Probability under H1, that is, probability of frame being speech.
+        // Value given in Q27 = Q7 * Q20.
+        tmp1_s32 = WebRtcVad_GaussianProbability(features[channel],
+                                                 self->speech_means[gaussian],
+                                                 self->speech_stds[gaussian],
+                                                 &deltaS[gaussian]);
+        speech_probability[k] = kSpeechDataWeights[gaussian] * tmp1_s32;
+        h1_test += speech_probability[k];  // Q27
+      }
+
+      // Calculate the log likelihood ratio: log2(Pr{X|H1} / Pr{X|H1}).
+      // Approximation:
+      // log2(Pr{X|H1} / Pr{X|H1}) = log2(Pr{X|H1}*2^Q) - log2(Pr{X|H1}*2^Q)
+      //                           = log2(h1_test) - log2(h0_test)
+      //                           = log2(2^(31-shifts_h1)*(1+b1))
+      //                             - log2(2^(31-shifts_h0)*(1+b0))
+      //                           = shifts_h0 - shifts_h1
+      //                             + log2(1+b1) - log2(1+b0)
+      //                          ~= shifts_h0 - shifts_h1
+      //
+      // Note that b0 and b1 are values less than 1, hence, 0 <= log2(1+b0) < 1.
+      // Further, b0 and b1 are independent and on the average the two terms
+      // cancel.
+      shifts_h0 = WebRtcSpl_NormW32(h0_test);
+      shifts_h1 = WebRtcSpl_NormW32(h1_test);
+      if (h0_test == 0) {
+        shifts_h0 = 31;
+      }
+      if (h1_test == 0) {
+        shifts_h1 = 31;
+      }
+      log_likelihood_ratio = shifts_h0 - shifts_h1;
+
+      // Update |sum_log_likelihood_ratios| with spectrum weighting. This is
+      // used for the global VAD decision.
+      sum_log_likelihood_ratios +=
+          (int32_t) (log_likelihood_ratio * kSpectrumWeight[channel]);
+
+      // Local VAD decision.
+      if ((log_likelihood_ratio << 2) > individualTest) {
+        vadflag = 1;
+      }
+
+      // TODO(bjornv): The conditional probabilities below are applied on the
+      // hard coded number of Gaussians set to two. Find a way to generalize.
+      // Calculate local noise probabilities used later when updating the GMM.
+      h0 = (int16_t) (h0_test >> 12);  // Q15
+      if (h0 > 0) {
+        // High probability of noise. Assign conditional probabilities for each
+        // Gaussian in the GMM.
+        tmp1_s32 = (noise_probability[0] & 0xFFFFF000) << 2;  // Q29
+        ngprvec[channel] = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, h0);  // Q14
+        ngprvec[channel + kNumChannels] = 16384 - ngprvec[channel];
+      } else {
+        // Low noise probability. Assign conditional probability 1 to the first
+        // Gaussian and 0 to the rest (which is already set at initialization).
+        ngprvec[channel] = 16384;
+      }
+
+      // Calculate local speech probabilities used later when updating the GMM.
+      h1 = (int16_t) (h1_test >> 12);  // Q15
+      if (h1 > 0) {
+        // High probability of speech. Assign conditional probabilities for each
+        // Gaussian in the GMM. Otherwise use the initialized values, i.e., 0.
+        tmp1_s32 = (speech_probability[0] & 0xFFFFF000) << 2;  // Q29
+        sgprvec[channel] = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, h1);  // Q14
+        sgprvec[channel + kNumChannels] = 16384 - sgprvec[channel];
+      }
     }
 
-    // Index and Minimum value vectors are initialized
-    for (i = 0; i < 16 * NUM_CHANNELS; i++)
-    {
-        inst->low_value_vector[i] = 10000;
-        inst->index_vector[i] = 0;
+    // Make a global VAD decision.
+    vadflag |= (sum_log_likelihood_ratios >= totalTest);
+
+    // Update the model parameters.
+    maxspe = 12800;
+    for (channel = 0; channel < kNumChannels; channel++) {
+
+      // Get minimum value in past which is used for long term correction in Q4.
+      feature_minimum = WebRtcVad_FindMinimum(self, features[channel], channel);
+
+      // Compute the "global" mean, that is the sum of the two means weighted.
+      noise_global_mean = WeightedAverage(&self->noise_means[channel], 0,
+                                          &kNoiseDataWeights[channel]);
+      tmp1_s16 = (int16_t) (noise_global_mean >> 6);  // Q8
+
+      for (k = 0; k < kNumGaussians; k++) {
+        gaussian = channel + k * kNumChannels;
+
+        nmk = self->noise_means[gaussian];
+        smk = self->speech_means[gaussian];
+        nsk = self->noise_stds[gaussian];
+        ssk = self->speech_stds[gaussian];
+
+        // Update noise mean vector if the frame consists of noise only.
+        nmk2 = nmk;
+        if (!vadflag) {
+          // deltaN = (x-mu)/sigma^2
+          // ngprvec[k] = |noise_probability[k]| /
+          //   (|noise_probability[0]| + |noise_probability[1]|)
+
+          // (Q14 * Q11 >> 11) = Q14.
+          delt = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(ngprvec[gaussian],
+                                                     deltaN[gaussian],
+                                                     11);
+          // Q7 + (Q14 * Q15 >> 22) = Q7.
+          nmk2 = nmk + (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(delt,
+                                                           kNoiseUpdateConst,
+                                                           22);
+        }
+
+        // Long term correction of the noise mean.
+        // Q8 - Q8 = Q8.
+        ndelt = (feature_minimum << 4) - tmp1_s16;
+        // Q7 + (Q8 * Q8) >> 9 = Q7.
+        nmk3 = nmk2 + (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(ndelt, kBackEta, 9);
+
+        // Control that the noise mean does not drift to much.
+        tmp_s16 = (int16_t) ((k + 5) << 7);
+        if (nmk3 < tmp_s16) {
+          nmk3 = tmp_s16;
+        }
+        tmp_s16 = (int16_t) ((72 + k - channel) << 7);
+        if (nmk3 > tmp_s16) {
+          nmk3 = tmp_s16;
+        }
+        self->noise_means[gaussian] = nmk3;
+
+        if (vadflag) {
+          // Update speech mean vector:
+          // |deltaS| = (x-mu)/sigma^2
+          // sgprvec[k] = |speech_probability[k]| /
+          //   (|speech_probability[0]| + |speech_probability[1]|)
+
+          // (Q14 * Q11) >> 11 = Q14.
+          delt = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(sgprvec[gaussian],
+                                                     deltaS[gaussian],
+                                                     11);
+          // Q14 * Q15 >> 21 = Q8.
+          tmp_s16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(delt,
+                                                        kSpeechUpdateConst,
+                                                        21);
+          // Q7 + (Q8 >> 1) = Q7. With rounding.
+          smk2 = smk + ((tmp_s16 + 1) >> 1);
+
+          // Control that the speech mean does not drift to much.
+          maxmu = maxspe + 640;
+          if (smk2 < kMinimumMean[k]) {
+            smk2 = kMinimumMean[k];
+          }
+          if (smk2 > maxmu) {
+            smk2 = maxmu;
+          }
+          self->speech_means[gaussian] = smk2;  // Q7.
+
+          // (Q7 >> 3) = Q4. With rounding.
+          tmp_s16 = ((smk + 4) >> 3);
+
+          tmp_s16 = features[channel] - tmp_s16;  // Q4
+          // (Q11 * Q4 >> 3) = Q12.
+          tmp1_s32 = WEBRTC_SPL_MUL_16_16_RSFT(deltaS[gaussian], tmp_s16, 3);
+          tmp2_s32 = tmp1_s32 - 4096;
+          tmp_s16 = sgprvec[gaussian] >> 2;
+          // (Q14 >> 2) * Q12 = Q24.
+          tmp1_s32 = tmp_s16 * tmp2_s32;
+
+          tmp2_s32 = tmp1_s32 >> 4;  // Q20
+
+          // 0.1 * Q20 / Q7 = Q13.
+          if (tmp2_s32 > 0) {
+            tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(tmp2_s32, ssk * 10);
+          } else {
+            tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(-tmp2_s32, ssk * 10);
+            tmp_s16 = -tmp_s16;
+          }
+          // Divide by 4 giving an update factor of 0.025 (= 0.1 / 4).
+          // Note that division by 4 equals shift by 2, hence,
+          // (Q13 >> 8) = (Q13 >> 6) / 4 = Q7.
+          tmp_s16 += 128;  // Rounding.
+          ssk += (tmp_s16 >> 8);
+          if (ssk < kMinStd) {
+            ssk = kMinStd;
+          }
+          self->speech_stds[gaussian] = ssk;
+        } else {
+          // Update GMM variance vectors.
+          // deltaN * (features[channel] - nmk) - 1
+          // Q4 - (Q7 >> 3) = Q4.
+          tmp_s16 = features[channel] - (nmk >> 3);
+          // (Q11 * Q4 >> 3) = Q12.
+          tmp1_s32 = WEBRTC_SPL_MUL_16_16_RSFT(deltaN[gaussian], tmp_s16, 3);
+          tmp1_s32 -= 4096;
+
+          // (Q14 >> 2) * Q12 = Q24.
+          tmp_s16 = (ngprvec[gaussian] + 2) >> 2;
+          tmp2_s32 = tmp_s16 * tmp1_s32;
+          // Q20  * approx 0.001 (2^-10=0.0009766), hence,
+          // (Q24 >> 14) = (Q24 >> 4) / 2^10 = Q20.
+          tmp1_s32 = tmp2_s32 >> 14;
+
+          // Q20 / Q7 = Q13.
+          if (tmp1_s32 > 0) {
+            tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(tmp1_s32, nsk);
+          } else {
+            tmp_s16 = (int16_t) WebRtcSpl_DivW32W16(-tmp1_s32, nsk);
+            tmp_s16 = -tmp_s16;
+          }
+          tmp_s16 += 32;  // Rounding
+          nsk += tmp_s16 >> 6;  // Q13 >> 6 = Q7.
+          if (nsk < kMinStd) {
+            nsk = kMinStd;
+          }
+          self->noise_stds[gaussian] = nsk;
+        }
+      }
+
+      // Separate models if they are too close.
+      // |noise_global_mean| in Q14 (= Q7 * Q7).
+      noise_global_mean = WeightedAverage(&self->noise_means[channel], 0,
+                                          &kNoiseDataWeights[channel]);
+
+      // |speech_global_mean| in Q14 (= Q7 * Q7).
+      speech_global_mean = WeightedAverage(&self->speech_means[channel], 0,
+                                           &kSpeechDataWeights[channel]);
+
+      // |diff| = "global" speech mean - "global" noise mean.
+      // (Q14 >> 9) - (Q14 >> 9) = Q5.
+      diff = (int16_t) (speech_global_mean >> 9) -
+          (int16_t) (noise_global_mean >> 9);
+      if (diff < kMinimumDifference[channel]) {
+        tmp_s16 = kMinimumDifference[channel] - diff;
+
+        // |tmp1_s16| = ~0.8 * (kMinimumDifference - diff) in Q7.
+        // |tmp2_s16| = ~0.2 * (kMinimumDifference - diff) in Q7.
+        tmp1_s16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(13, tmp_s16, 2);
+        tmp2_s16 = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(3, tmp_s16, 2);
+
+        // Move Gaussian means for speech model by |tmp1_s16| and update
+        // |speech_global_mean|. Note that |self->speech_means[channel]| is
+        // changed after the call.
+        speech_global_mean = WeightedAverage(&self->speech_means[channel],
+                                             tmp1_s16,
+                                             &kSpeechDataWeights[channel]);
+
+        // Move Gaussian means for noise model by -|tmp2_s16| and update
+        // |noise_global_mean|. Note that |self->noise_means[channel]| is
+        // changed after the call.
+        noise_global_mean = WeightedAverage(&self->noise_means[channel],
+                                            -tmp2_s16,
+                                            &kNoiseDataWeights[channel]);
+      }
+
+      // Control that the speech & noise means do not drift to much.
+      maxspe = kMaximumSpeech[channel];
+      tmp2_s16 = (int16_t) (speech_global_mean >> 7);
+      if (tmp2_s16 > maxspe) {
+        // Upper limit of speech model.
+        tmp2_s16 -= maxspe;
+
+        for (k = 0; k < kNumGaussians; k++) {
+          self->speech_means[channel + k * kNumChannels] -= tmp2_s16;
+        }
+      }
+
+      tmp2_s16 = (int16_t) (noise_global_mean >> 7);
+      if (tmp2_s16 > kMaximumNoise[channel]) {
+        tmp2_s16 -= kMaximumNoise[channel];
+
+        for (k = 0; k < kNumGaussians; k++) {
+          self->noise_means[channel + k * kNumChannels] -= tmp2_s16;
+        }
+      }
     }
+    self->frame_counter++;
+  }
 
-    for (i = 0; i < 5; i++)
-    {
-        inst->upper_state[i] = 0;
-        inst->lower_state[i] = 0;
+  // Smooth with respect to transition hysteresis.
+  if (!vadflag) {
+    if (self->over_hang > 0) {
+      vadflag = 2 + self->over_hang;
+      self->over_hang--;
     }
-
-    for (i = 0; i < 4; i++)
-    {
-        inst->hp_filter_state[i] = 0;
+    self->num_of_speech = 0;
+  } else {
+    self->num_of_speech++;
+    if (self->num_of_speech > kMaxSpeechFrames) {
+      self->num_of_speech = kMaxSpeechFrames;
+      self->over_hang = overhead2;
+    } else {
+      self->over_hang = overhead1;
     }
+  }
+  return vadflag;
+}
 
-    // Init mean value memory, for FindMin function
-    inst->mean_value[0] = 1600;
-    inst->mean_value[1] = 1600;
-    inst->mean_value[2] = 1600;
-    inst->mean_value[3] = 1600;
-    inst->mean_value[4] = 1600;
-    inst->mean_value[5] = 1600;
+// Initialize the VAD. Set aggressiveness mode to default value.
+int WebRtcVad_InitCore(VadInstT* self) {
+  int i;
 
-    if (mode == 0)
-    {
-        // Quality mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_Q; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_Q; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_Q; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_Q; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_Q; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_Q; // Overhang long speech burst
+  if (self == NULL) {
+    return -1;
+  }
 
-        inst->individual[0] = INDIVIDUAL_10MS_Q;
-        inst->individual[1] = INDIVIDUAL_20MS_Q;
-        inst->individual[2] = INDIVIDUAL_30MS_Q;
+  // Initialization of general struct variables.
+  self->vad = 1;  // Speech active (=1).
+  self->frame_counter = 0;
+  self->over_hang = 0;
+  self->num_of_speech = 0;
 
-        inst->total[0] = TOTAL_10MS_Q;
-        inst->total[1] = TOTAL_20MS_Q;
-        inst->total[2] = TOTAL_30MS_Q;
-    } else if (mode == 1)
-    {
-        // Low bitrate mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_LBR; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_LBR; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_LBR; // Overhang long speech burst
+  // Initialization of downsampling filter state.
+  memset(self->downsampling_filter_states, 0,
+         sizeof(self->downsampling_filter_states));
 
-        inst->individual[0] = INDIVIDUAL_10MS_LBR;
-        inst->individual[1] = INDIVIDUAL_20MS_LBR;
-        inst->individual[2] = INDIVIDUAL_30MS_LBR;
+  // Read initial PDF parameters.
+  for (i = 0; i < kTableSize; i++) {
+    self->noise_means[i] = kNoiseDataMeans[i];
+    self->speech_means[i] = kSpeechDataMeans[i];
+    self->noise_stds[i] = kNoiseDataStds[i];
+    self->speech_stds[i] = kSpeechDataStds[i];
+  }
 
-        inst->total[0] = TOTAL_10MS_LBR;
-        inst->total[1] = TOTAL_20MS_LBR;
-        inst->total[2] = TOTAL_30MS_LBR;
-    } else if (mode == 2)
-    {
-        // Aggressive mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_AGG; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_AGG; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_AGG; // Overhang long speech burst
+  // Initialize Index and Minimum value vectors.
+  for (i = 0; i < 16 * kNumChannels; i++) {
+    self->low_value_vector[i] = 10000;
+    self->index_vector[i] = 0;
+  }
 
-        inst->individual[0] = INDIVIDUAL_10MS_AGG;
-        inst->individual[1] = INDIVIDUAL_20MS_AGG;
-        inst->individual[2] = INDIVIDUAL_30MS_AGG;
+  // Initialize splitting filter states.
+  memset(self->upper_state, 0, sizeof(self->upper_state));
+  memset(self->lower_state, 0, sizeof(self->lower_state));
 
-        inst->total[0] = TOTAL_10MS_AGG;
-        inst->total[1] = TOTAL_20MS_AGG;
-        inst->total[2] = TOTAL_30MS_AGG;
-    } else
-    {
-        // Very aggressive mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_VAG; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_VAG; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_VAG; // Overhang long speech burst
+  // Initialize high pass filter states.
+  memset(self->hp_filter_state, 0, sizeof(self->hp_filter_state));
 
-        inst->individual[0] = INDIVIDUAL_10MS_VAG;
-        inst->individual[1] = INDIVIDUAL_20MS_VAG;
-        inst->individual[2] = INDIVIDUAL_30MS_VAG;
+  // Initialize mean value memory, for WebRtcVad_FindMinimum().
+  for (i = 0; i < kNumChannels; i++) {
+    self->mean_value[i] = 1600;
+  }
 
-        inst->total[0] = TOTAL_10MS_VAG;
-        inst->total[1] = TOTAL_20MS_VAG;
-        inst->total[2] = TOTAL_30MS_VAG;
-    }
+  // Set aggressiveness mode to default (=|kDefaultMode|).
+  if (WebRtcVad_set_mode_core(self, kDefaultMode) != 0) {
+    return -1;
+  }
 
-    inst->init_flag = kInitCheck;
+  self->init_flag = kInitCheck;
 
-    return 0;
+  return 0;
 }
 
 // Set aggressiveness mode
-int WebRtcVad_set_mode_core(VadInstT *inst, short mode)
-{
+int WebRtcVad_set_mode_core(VadInstT* self, int mode) {
+  int return_value = 0;
 
-    if (mode == 0)
-    {
-        // Quality mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_Q; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_Q; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_Q; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_Q; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_Q; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_Q; // Overhang long speech burst
+  switch (mode) {
+    case 0:
+      // Quality mode.
+      memcpy(self->over_hang_max_1, kOverHangMax1Q,
+             sizeof(self->over_hang_max_1));
+      memcpy(self->over_hang_max_2, kOverHangMax2Q,
+             sizeof(self->over_hang_max_2));
+      memcpy(self->individual, kLocalThresholdQ,
+             sizeof(self->individual));
+      memcpy(self->total, kGlobalThresholdQ,
+             sizeof(self->total));
+      break;
+    case 1:
+      // Low bitrate mode.
+      memcpy(self->over_hang_max_1, kOverHangMax1LBR,
+             sizeof(self->over_hang_max_1));
+      memcpy(self->over_hang_max_2, kOverHangMax2LBR,
+             sizeof(self->over_hang_max_2));
+      memcpy(self->individual, kLocalThresholdLBR,
+             sizeof(self->individual));
+      memcpy(self->total, kGlobalThresholdLBR,
+             sizeof(self->total));
+      break;
+    case 2:
+      // Aggressive mode.
+      memcpy(self->over_hang_max_1, kOverHangMax1AGG,
+             sizeof(self->over_hang_max_1));
+      memcpy(self->over_hang_max_2, kOverHangMax2AGG,
+             sizeof(self->over_hang_max_2));
+      memcpy(self->individual, kLocalThresholdAGG,
+             sizeof(self->individual));
+      memcpy(self->total, kGlobalThresholdAGG,
+             sizeof(self->total));
+      break;
+    case 3:
+      // Very aggressive mode.
+      memcpy(self->over_hang_max_1, kOverHangMax1VAG,
+             sizeof(self->over_hang_max_1));
+      memcpy(self->over_hang_max_2, kOverHangMax2VAG,
+             sizeof(self->over_hang_max_2));
+      memcpy(self->individual, kLocalThresholdVAG,
+             sizeof(self->individual));
+      memcpy(self->total, kGlobalThresholdVAG,
+             sizeof(self->total));
+      break;
+    default:
+      return_value = -1;
+      break;
+  }
 
-        inst->individual[0] = INDIVIDUAL_10MS_Q;
-        inst->individual[1] = INDIVIDUAL_20MS_Q;
-        inst->individual[2] = INDIVIDUAL_30MS_Q;
-
-        inst->total[0] = TOTAL_10MS_Q;
-        inst->total[1] = TOTAL_20MS_Q;
-        inst->total[2] = TOTAL_30MS_Q;
-    } else if (mode == 1)
-    {
-        // Low bitrate mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_LBR; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_LBR; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_LBR; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_LBR; // Overhang long speech burst
-
-        inst->individual[0] = INDIVIDUAL_10MS_LBR;
-        inst->individual[1] = INDIVIDUAL_20MS_LBR;
-        inst->individual[2] = INDIVIDUAL_30MS_LBR;
-
-        inst->total[0] = TOTAL_10MS_LBR;
-        inst->total[1] = TOTAL_20MS_LBR;
-        inst->total[2] = TOTAL_30MS_LBR;
-    } else if (mode == 2)
-    {
-        // Aggressive mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_AGG; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_AGG; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_AGG; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_AGG; // Overhang long speech burst
-
-        inst->individual[0] = INDIVIDUAL_10MS_AGG;
-        inst->individual[1] = INDIVIDUAL_20MS_AGG;
-        inst->individual[2] = INDIVIDUAL_30MS_AGG;
-
-        inst->total[0] = TOTAL_10MS_AGG;
-        inst->total[1] = TOTAL_20MS_AGG;
-        inst->total[2] = TOTAL_30MS_AGG;
-    } else if (mode == 3)
-    {
-        // Very aggressive mode
-        inst->over_hang_max_1[0] = OHMAX1_10MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_1[1] = OHMAX1_20MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_1[2] = OHMAX1_30MS_VAG; // Overhang short speech burst
-        inst->over_hang_max_2[0] = OHMAX2_10MS_VAG; // Overhang long speech burst
-        inst->over_hang_max_2[1] = OHMAX2_20MS_VAG; // Overhang long speech burst
-        inst->over_hang_max_2[2] = OHMAX2_30MS_VAG; // Overhang long speech burst
-
-        inst->individual[0] = INDIVIDUAL_10MS_VAG;
-        inst->individual[1] = INDIVIDUAL_20MS_VAG;
-        inst->individual[2] = INDIVIDUAL_30MS_VAG;
-
-        inst->total[0] = TOTAL_10MS_VAG;
-        inst->total[1] = TOTAL_20MS_VAG;
-        inst->total[2] = TOTAL_30MS_VAG;
-    } else
-    {
-        return -1;
-    }
-
-    return 0;
+  return return_value;
 }
 
 // Calculate VAD decision by first extracting feature values and then calculate
 // probability for both speech and background noise.
 
-WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
-                                     int frame_length)
+int WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+                           int frame_length)
 {
-    WebRtc_Word16 len, vad;
-    WebRtc_Word16 speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB)
-    WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+    int len, vad;
+    int16_t speechWB[480]; // Downsampled speech frame: 960 samples (30ms in SWB)
+    int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
 
 
     // Downsample signal 32->16->8 before doing VAD
@@ -294,11 +622,11 @@
     return vad;
 }
 
-WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
-                                     int frame_length)
+int WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+                           int frame_length)
 {
-    WebRtc_Word16 len, vad;
-    WebRtc_Word16 speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
+    int len, vad;
+    int16_t speechNB[240]; // Downsampled speech frame: 480 samples (30ms in WB)
 
     // Wideband: Downsample signal before doing VAD
     WebRtcVad_Downsampling(speech_frame, speechNB, inst->downsampling_filter_states,
@@ -310,414 +638,17 @@
     return vad;
 }
 
-WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT *inst, WebRtc_Word16 *speech_frame,
-                                    int frame_length)
+int WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+                          int frame_length)
 {
-    WebRtc_Word16 feature_vector[NUM_CHANNELS], total_power;
+    int16_t feature_vector[kNumChannels], total_power;
 
     // Get power in the bands
-    total_power = WebRtcVad_get_features(inst, speech_frame, frame_length, feature_vector);
+    total_power = WebRtcVad_CalculateFeatures(inst, speech_frame, frame_length,
+                                              feature_vector);
 
     // Make a VAD
-    inst->vad = WebRtcVad_GmmProbability(inst, feature_vector, total_power, frame_length);
+    inst->vad = GmmProbability(inst, feature_vector, total_power, frame_length);
 
     return inst->vad;
 }
-
-// Calculate probability for both speech and background noise, and perform a
-// hypothesis-test.
-WebRtc_Word16 WebRtcVad_GmmProbability(VadInstT *inst, WebRtc_Word16 *feature_vector,
-                                       WebRtc_Word16 total_power, int frame_length)
-{
-    int n, k;
-    WebRtc_Word16 backval;
-    WebRtc_Word16 h0, h1;
-    WebRtc_Word16 ratvec, xval;
-    WebRtc_Word16 vadflag;
-    WebRtc_Word16 shifts0, shifts1;
-    WebRtc_Word16 tmp16, tmp16_1, tmp16_2;
-    WebRtc_Word16 diff, nr, pos;
-    WebRtc_Word16 nmk, nmk2, nmk3, smk, smk2, nsk, ssk;
-    WebRtc_Word16 delt, ndelt;
-    WebRtc_Word16 maxspe, maxmu;
-    WebRtc_Word16 deltaN[NUM_TABLE_VALUES], deltaS[NUM_TABLE_VALUES];
-    WebRtc_Word16 ngprvec[NUM_TABLE_VALUES], sgprvec[NUM_TABLE_VALUES];
-    WebRtc_Word32 h0test, h1test;
-    WebRtc_Word32 tmp32_1, tmp32_2;
-    WebRtc_Word32 dotVal;
-    WebRtc_Word32 nmid, smid;
-    WebRtc_Word32 probn[NUM_MODELS], probs[NUM_MODELS];
-    WebRtc_Word16 *nmean1ptr, *nmean2ptr, *smean1ptr, *smean2ptr, *nstd1ptr, *nstd2ptr,
-            *sstd1ptr, *sstd2ptr;
-    WebRtc_Word16 overhead1, overhead2, individualTest, totalTest;
-
-    // Set the thresholds to different values based on frame length
-    if (frame_length == 80)
-    {
-        // 80 input samples
-        overhead1 = inst->over_hang_max_1[0];
-        overhead2 = inst->over_hang_max_2[0];
-        individualTest = inst->individual[0];
-        totalTest = inst->total[0];
-    } else if (frame_length == 160)
-    {
-        // 160 input samples
-        overhead1 = inst->over_hang_max_1[1];
-        overhead2 = inst->over_hang_max_2[1];
-        individualTest = inst->individual[1];
-        totalTest = inst->total[1];
-    } else
-    {
-        // 240 input samples
-        overhead1 = inst->over_hang_max_1[2];
-        overhead2 = inst->over_hang_max_2[2];
-        individualTest = inst->individual[2];
-        totalTest = inst->total[2];
-    }
-
-    if (total_power > MIN_ENERGY)
-    { // If signal present at all
-
-        // Set pointers to the gaussian parameters
-        nmean1ptr = &inst->noise_means[0];
-        nmean2ptr = &inst->noise_means[NUM_CHANNELS];
-        smean1ptr = &inst->speech_means[0];
-        smean2ptr = &inst->speech_means[NUM_CHANNELS];
-        nstd1ptr = &inst->noise_stds[0];
-        nstd2ptr = &inst->noise_stds[NUM_CHANNELS];
-        sstd1ptr = &inst->speech_stds[0];
-        sstd2ptr = &inst->speech_stds[NUM_CHANNELS];
-
-        vadflag = 0;
-        dotVal = 0;
-        for (n = 0; n < NUM_CHANNELS; n++)
-        { // For all channels
-
-            pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
-            xval = feature_vector[n];
-
-            // Probability for Noise, Q7 * Q20 = Q27
-            tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean1ptr++, *nstd1ptr++,
-                                                    &deltaN[pos]);
-            probn[0] = (WebRtc_Word32)(kNoiseDataWeights[n] * tmp32_1);
-            tmp32_1 = WebRtcVad_GaussianProbability(xval, *nmean2ptr++, *nstd2ptr++,
-                                                    &deltaN[pos + 1]);
-            probn[1] = (WebRtc_Word32)(kNoiseDataWeights[n + NUM_CHANNELS] * tmp32_1);
-            h0test = probn[0] + probn[1]; // Q27
-            h0 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h0test, 12); // Q15
-
-            // Probability for Speech
-            tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean1ptr++, *sstd1ptr++,
-                                                    &deltaS[pos]);
-            probs[0] = (WebRtc_Word32)(kSpeechDataWeights[n] * tmp32_1);
-            tmp32_1 = WebRtcVad_GaussianProbability(xval, *smean2ptr++, *sstd2ptr++,
-                                                    &deltaS[pos + 1]);
-            probs[1] = (WebRtc_Word32)(kSpeechDataWeights[n + NUM_CHANNELS] * tmp32_1);
-            h1test = probs[0] + probs[1]; // Q27
-            h1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(h1test, 12); // Q15
-
-            // Get likelihood ratio. Approximate log2(H1/H0) with shifts0 - shifts1
-            shifts0 = WebRtcSpl_NormW32(h0test);
-            shifts1 = WebRtcSpl_NormW32(h1test);
-
-            if ((h0test > 0) && (h1test > 0))
-            {
-                ratvec = shifts0 - shifts1;
-            } else if (h1test > 0)
-            {
-                ratvec = 31 - shifts1;
-            } else if (h0test > 0)
-            {
-                ratvec = shifts0 - 31;
-            } else
-            {
-                ratvec = 0;
-            }
-
-            // VAD decision with spectrum weighting
-            dotVal += WEBRTC_SPL_MUL_16_16(ratvec, kSpectrumWeight[n]);
-
-            // Individual channel test
-            if ((ratvec << 2) > individualTest)
-            {
-                vadflag = 1;
-            }
-
-            // Probabilities used when updating model
-            if (h0 > 0)
-            {
-                tmp32_1 = probn[0] & 0xFFFFF000; // Q27
-                tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2); // Q29
-                ngprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h0);
-                ngprvec[pos + 1] = 16384 - ngprvec[pos];
-            } else
-            {
-                ngprvec[pos] = 16384;
-                ngprvec[pos + 1] = 0;
-            }
-
-            // Probabilities used when updating model
-            if (h1 > 0)
-            {
-                tmp32_1 = probs[0] & 0xFFFFF000;
-                tmp32_2 = WEBRTC_SPL_LSHIFT_W32(tmp32_1, 2);
-                sgprvec[pos] = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, h1);
-                sgprvec[pos + 1] = 16384 - sgprvec[pos];
-            } else
-            {
-                sgprvec[pos] = 0;
-                sgprvec[pos + 1] = 0;
-            }
-        }
-
-        // Overall test
-        if (dotVal >= totalTest)
-        {
-            vadflag |= 1;
-        }
-
-        // Set pointers to the means and standard deviations.
-        nmean1ptr = &inst->noise_means[0];
-        smean1ptr = &inst->speech_means[0];
-        nstd1ptr = &inst->noise_stds[0];
-        sstd1ptr = &inst->speech_stds[0];
-
-        maxspe = 12800;
-
-        // Update the model's parameters
-        for (n = 0; n < NUM_CHANNELS; n++)
-        {
-
-            pos = WEBRTC_SPL_LSHIFT_W16(n, 1);
-
-            // Get min value in past which is used for long term correction
-            backval = WebRtcVad_FindMinimum(inst, feature_vector[n], n); // Q4
-
-            // Compute the "global" mean, that is the sum of the two means weighted
-            nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr); // Q7 * Q7
-            nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS],
-                    *(nmean1ptr+NUM_CHANNELS));
-            tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 6); // Q8
-
-            for (k = 0; k < NUM_MODELS; k++)
-            {
-
-                nr = pos + k;
-
-                nmean2ptr = nmean1ptr + k * NUM_CHANNELS;
-                smean2ptr = smean1ptr + k * NUM_CHANNELS;
-                nstd2ptr = nstd1ptr + k * NUM_CHANNELS;
-                sstd2ptr = sstd1ptr + k * NUM_CHANNELS;
-                nmk = *nmean2ptr;
-                smk = *smean2ptr;
-                nsk = *nstd2ptr;
-                ssk = *sstd2ptr;
-
-                // Update noise mean vector if the frame consists of noise only
-                nmk2 = nmk;
-                if (!vadflag)
-                {
-                    // deltaN = (x-mu)/sigma^2
-                    // ngprvec[k] = probn[k]/(probn[0] + probn[1])
-
-                    delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ngprvec[nr],
-                            deltaN[nr], 11); // Q14*Q11
-                    nmk2 = nmk + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
-                            kNoiseUpdateConst,
-                            22); // Q7+(Q14*Q15>>22)
-                }
-
-                // Long term correction of the noise mean
-                ndelt = WEBRTC_SPL_LSHIFT_W16(backval, 4);
-                ndelt -= tmp16_1; // Q8 - Q8
-                nmk3 = nmk2 + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(ndelt,
-                        kBackEta,
-                        9); // Q7+(Q8*Q8)>>9
-
-                // Control that the noise mean does not drift to much
-                tmp16 = WEBRTC_SPL_LSHIFT_W16(k+5, 7);
-                if (nmk3 < tmp16)
-                    nmk3 = tmp16;
-                tmp16 = WEBRTC_SPL_LSHIFT_W16(72+k-n, 7);
-                if (nmk3 > tmp16)
-                    nmk3 = tmp16;
-                *nmean2ptr = nmk3;
-
-                if (vadflag)
-                {
-                    // Update speech mean vector:
-                    // deltaS = (x-mu)/sigma^2
-                    // sgprvec[k] = probn[k]/(probn[0] + probn[1])
-
-                    delt = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(sgprvec[nr],
-                            deltaS[nr],
-                            11); // (Q14*Q11)>>11=Q14
-                    tmp16 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(delt,
-                            kSpeechUpdateConst,
-                            21) + 1;
-                    smk2 = smk + (tmp16 >> 1); // Q7 + (Q14 * Q15 >> 22)
-
-                    // Control that the speech mean does not drift to much
-                    maxmu = maxspe + 640;
-                    if (smk2 < kMinimumMean[k])
-                        smk2 = kMinimumMean[k];
-                    if (smk2 > maxmu)
-                        smk2 = maxmu;
-
-                    *smean2ptr = smk2;
-
-                    // (Q7>>3) = Q4
-                    tmp16 = WEBRTC_SPL_RSHIFT_W16((smk + 4), 3);
-
-                    tmp16 = feature_vector[n] - tmp16; // Q4
-                    tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaS[nr], tmp16, 3);
-                    tmp32_2 = tmp32_1 - (WebRtc_Word32)4096; // Q12
-                    tmp16 = WEBRTC_SPL_RSHIFT_W16((sgprvec[nr]), 2);
-                    tmp32_1 = (WebRtc_Word32)(tmp16 * tmp32_2);// (Q15>>3)*(Q14>>2)=Q12*Q12=Q24
-
-                    tmp32_2 = WEBRTC_SPL_RSHIFT_W32(tmp32_1, 4); // Q20
-
-                    // 0.1 * Q20 / Q7 = Q13
-                    if (tmp32_2 > 0)
-                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_2, ssk * 10);
-                    else
-                    {
-                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_2, ssk * 10);
-                        tmp16 = -tmp16;
-                    }
-                    // divide by 4 giving an update factor of 0.025
-                    tmp16 += 128; // Rounding
-                    ssk += WEBRTC_SPL_RSHIFT_W16(tmp16, 8);
-                    // Division with 8 plus Q7
-                    if (ssk < MIN_STD)
-                        ssk = MIN_STD;
-                    *sstd2ptr = ssk;
-                } else
-                {
-                    // Update GMM variance vectors
-                    // deltaN * (feature_vector[n] - nmk) - 1, Q11 * Q4
-                    tmp16 = feature_vector[n] - WEBRTC_SPL_RSHIFT_W16(nmk, 3);
-
-                    // (Q15>>3) * (Q14>>2) = Q12 * Q12 = Q24
-                    tmp32_1 = WEBRTC_SPL_MUL_16_16_RSFT(deltaN[nr], tmp16, 3) - 4096;
-                    tmp16 = WEBRTC_SPL_RSHIFT_W16((ngprvec[nr]+2), 2);
-                    tmp32_2 = (WebRtc_Word32)(tmp16 * tmp32_1);
-                    tmp32_1 = WEBRTC_SPL_RSHIFT_W32(tmp32_2, 14);
-                    // Q20  * approx 0.001 (2^-10=0.0009766)
-
-                    // Q20 / Q7 = Q13
-                    tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
-                    if (tmp32_1 > 0)
-                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp32_1, nsk);
-                    else
-                    {
-                        tmp16 = (WebRtc_Word16)WebRtcSpl_DivW32W16(-tmp32_1, nsk);
-                        tmp16 = -tmp16;
-                    }
-                    tmp16 += 32; // Rounding
-                    nsk += WEBRTC_SPL_RSHIFT_W16(tmp16, 6);
-
-                    if (nsk < MIN_STD)
-                        nsk = MIN_STD;
-
-                    *nstd2ptr = nsk;
-                }
-            }
-
-            // Separate models if they are too close - nmid in Q14
-            nmid = WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n], *nmean1ptr);
-            nmid += WEBRTC_SPL_MUL_16_16(kNoiseDataWeights[n+NUM_CHANNELS], *nmean2ptr);
-
-            // smid in Q14
-            smid = WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n], *smean1ptr);
-            smid += WEBRTC_SPL_MUL_16_16(kSpeechDataWeights[n+NUM_CHANNELS], *smean2ptr);
-
-            // diff = "global" speech mean - "global" noise mean
-            diff = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 9);
-            tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 9);
-            diff -= tmp16;
-
-            if (diff < kMinimumDifference[n])
-            {
-
-                tmp16 = kMinimumDifference[n] - diff; // Q5
-
-                // tmp16_1 = ~0.8 * (kMinimumDifference - diff) in Q7
-                // tmp16_2 = ~0.2 * (kMinimumDifference - diff) in Q7
-                tmp16_1 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(13, tmp16, 2);
-                tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(3, tmp16, 2);
-
-                // First Gauss, speech model
-                tmp16 = tmp16_1 + *smean1ptr;
-                *smean1ptr = tmp16;
-                smid = WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n]);
-
-                // Second Gauss, speech model
-                tmp16 = tmp16_1 + *smean2ptr;
-                *smean2ptr = tmp16;
-                smid += WEBRTC_SPL_MUL_16_16(tmp16, kSpeechDataWeights[n+NUM_CHANNELS]);
-
-                // First Gauss, noise model
-                tmp16 = *nmean1ptr - tmp16_2;
-                *nmean1ptr = tmp16;
-
-                nmid = WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n]);
-
-                // Second Gauss, noise model
-                tmp16 = *nmean2ptr - tmp16_2;
-                *nmean2ptr = tmp16;
-                nmid += WEBRTC_SPL_MUL_16_16(tmp16, kNoiseDataWeights[n+NUM_CHANNELS]);
-            }
-
-            // Control that the speech & noise means do not drift to much
-            maxspe = kMaximumSpeech[n];
-            tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(smid, 7);
-            if (tmp16_2 > maxspe)
-            { // Upper limit of speech model
-                tmp16_2 -= maxspe;
-
-                *smean1ptr -= tmp16_2;
-                *smean2ptr -= tmp16_2;
-            }
-
-            tmp16_2 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(nmid, 7);
-            if (tmp16_2 > kMaximumNoise[n])
-            {
-                tmp16_2 -= kMaximumNoise[n];
-
-                *nmean1ptr -= tmp16_2;
-                *nmean2ptr -= tmp16_2;
-            }
-
-            nmean1ptr++;
-            smean1ptr++;
-            nstd1ptr++;
-            sstd1ptr++;
-        }
-        inst->frame_counter++;
-    } else
-    {
-        vadflag = 0;
-    }
-
-    // Hangover smoothing
-    if (!vadflag)
-    {
-        if (inst->over_hang > 0)
-        {
-            vadflag = 2 + inst->over_hang;
-            inst->over_hang = inst->over_hang - 1;
-        }
-        inst->num_of_speech = 0;
-    } else
-    {
-        inst->num_of_speech = inst->num_of_speech + 1;
-        if (inst->num_of_speech > NSP_MAX)
-        {
-            inst->num_of_speech = NSP_MAX;
-            inst->over_hang = overhead2;
-        } else
-            inst->over_hang = overhead1;
-    }
-    return vadflag;
-}
diff --git a/src/common_audio/vad/vad_core.h b/src/common_audio/vad/vad_core.h
index cad6ca4..00d39a4 100644
--- a/src/common_audio/vad/vad_core.h
+++ b/src/common_audio/vad/vad_core.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -13,59 +13,54 @@
  * This header file includes the descriptions of the core VAD calls.
  */
 
-#ifndef WEBRTC_VAD_CORE_H_
-#define WEBRTC_VAD_CORE_H_
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
 
 #include "typedefs.h"
-#include "vad_defines.h"
+
+enum { kNumChannels = 6 };  // Number of frequency bands (named channels).
+enum { kNumGaussians = 2 };  // Number of Gaussians per channel in the GMM.
+enum { kTableSize = kNumChannels * kNumGaussians };
+enum { kMinEnergy = 10 };  // Minimum energy required to trigger audio signal.
 
 typedef struct VadInstT_
 {
 
-    WebRtc_Word16 vad;
-    WebRtc_Word32 downsampling_filter_states[4];
-    WebRtc_Word16 noise_means[NUM_TABLE_VALUES];
-    WebRtc_Word16 speech_means[NUM_TABLE_VALUES];
-    WebRtc_Word16 noise_stds[NUM_TABLE_VALUES];
-    WebRtc_Word16 speech_stds[NUM_TABLE_VALUES];
+    int vad;
+    int32_t downsampling_filter_states[4];
+    int16_t noise_means[kTableSize];
+    int16_t speech_means[kTableSize];
+    int16_t noise_stds[kTableSize];
+    int16_t speech_stds[kTableSize];
     // TODO(bjornv): Change to |frame_count|.
-    WebRtc_Word32 frame_counter;
-    WebRtc_Word16 over_hang; // Over Hang
-    WebRtc_Word16 num_of_speech;
+    int32_t frame_counter;
+    int16_t over_hang; // Over Hang
+    int16_t num_of_speech;
     // TODO(bjornv): Change to |age_vector|.
-    WebRtc_Word16 index_vector[16 * NUM_CHANNELS];
-    WebRtc_Word16 low_value_vector[16 * NUM_CHANNELS];
+    int16_t index_vector[16 * kNumChannels];
+    int16_t low_value_vector[16 * kNumChannels];
     // TODO(bjornv): Change to |median|.
-    WebRtc_Word16 mean_value[NUM_CHANNELS];
-    WebRtc_Word16 upper_state[5];
-    WebRtc_Word16 lower_state[5];
-    WebRtc_Word16 hp_filter_state[4];
-    WebRtc_Word16 over_hang_max_1[3];
-    WebRtc_Word16 over_hang_max_2[3];
-    WebRtc_Word16 individual[3];
-    WebRtc_Word16 total[3];
+    int16_t mean_value[kNumChannels];
+    int16_t upper_state[5];
+    int16_t lower_state[5];
+    int16_t hp_filter_state[4];
+    int16_t over_hang_max_1[3];
+    int16_t over_hang_max_2[3];
+    int16_t individual[3];
+    int16_t total[3];
 
-    short init_flag;
+    int init_flag;
 
 } VadInstT;
 
-/****************************************************************************
- * WebRtcVad_InitCore(...)
- *
- * This function initializes a VAD instance
- *
- * Input:
- *      - inst      : Instance that should be initialized
- *      - mode      : Aggressiveness degree
- *                    0 (High quality) - 3 (Highly aggressive)
- *
- * Output:
- *      - inst      : Initialized instance
- *
- * Return value     :  0 - Ok
- *                    -1 - Error
- */
-int WebRtcVad_InitCore(VadInstT* inst, short mode);
+// Initializes the core VAD component. The default aggressiveness mode is
+// controlled by |kDefaultMode| in vad_core.c.
+//
+// - self [i/o] : Instance that should be initialized
+//
+// returns      : 0 (OK), -1 (NULL pointer in or if the default mode can't be
+//                set)
+int WebRtcVad_InitCore(VadInstT* self);
 
 /****************************************************************************
  * WebRtcVad_set_mode_core(...)
@@ -84,7 +79,7 @@
  *                    -1 - Error
  */
 
-int WebRtcVad_set_mode_core(VadInstT* inst, short mode);
+int WebRtcVad_set_mode_core(VadInstT* self, int mode);
 
 /****************************************************************************
  * WebRtcVad_CalcVad32khz(...) 
@@ -105,31 +100,11 @@
  *                        0 - No active speech
  *                        1-6 - Active speech
  */
-WebRtc_Word16 WebRtcVad_CalcVad32khz(VadInstT* inst, WebRtc_Word16* speech_frame,
-                                     int frame_length);
-WebRtc_Word16 WebRtcVad_CalcVad16khz(VadInstT* inst, WebRtc_Word16* speech_frame,
-                                     int frame_length);
-WebRtc_Word16 WebRtcVad_CalcVad8khz(VadInstT* inst, WebRtc_Word16* speech_frame,
-                                    int frame_length);
+int WebRtcVad_CalcVad32khz(VadInstT* inst, int16_t* speech_frame,
+                           int frame_length);
+int WebRtcVad_CalcVad16khz(VadInstT* inst, int16_t* speech_frame,
+                           int frame_length);
+int WebRtcVad_CalcVad8khz(VadInstT* inst, int16_t* speech_frame,
+                          int frame_length);
 
-/****************************************************************************
- * WebRtcVad_GmmProbability(...)
- *
- * This function calculates the probabilities for background noise and
- * speech using Gaussian Mixture Models. A hypothesis-test is performed to decide
- * which type of signal is most probable.
- *
- * Input:
- *      - inst              : Pointer to VAD instance
- *      - feature_vector    : Feature vector = log10(energy in frequency band)
- *      - total_power       : Total power in frame.
- *      - frame_length      : Number of input samples
- *
- * Output:
- *      VAD decision        : 0 - noise, 1 - speech
- *    
- */
-WebRtc_Word16 WebRtcVad_GmmProbability(VadInstT* inst, WebRtc_Word16* feature_vector,
-                                       WebRtc_Word16 total_power, int frame_length);
-
-#endif // WEBRTC_VAD_CORE_H_
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_CORE_H_
diff --git a/src/common_audio/vad/vad_core_unittest.cc b/src/common_audio/vad/vad_core_unittest.cc
new file mode 100644
index 0000000..141b796
--- /dev/null
+++ b/src/common_audio/vad/vad_core_unittest.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+}
+
+namespace {
+
+TEST_F(VadTest, InitCore) {
+  // Test WebRtcVad_InitCore().
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+
+  // NULL pointer test.
+  EXPECT_EQ(-1, WebRtcVad_InitCore(NULL));
+
+  // Verify return = 0 for non-NULL pointer.
+  EXPECT_EQ(0, WebRtcVad_InitCore(self));
+  // Verify init_flag is set.
+  EXPECT_EQ(42, self->init_flag);
+
+  free(self);
+}
+
+TEST_F(VadTest, set_mode_core) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+
+  // TODO(bjornv): Add NULL pointer check if we take care of it in
+  // vad_core.c
+
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  // Test WebRtcVad_set_mode_core().
+  // Invalid modes should return -1.
+  EXPECT_EQ(-1, WebRtcVad_set_mode_core(self, -1));
+  EXPECT_EQ(-1, WebRtcVad_set_mode_core(self, 1000));
+  // Valid modes should return 0.
+  for (size_t j = 0; j < kModesSize; ++j) {
+    EXPECT_EQ(0, WebRtcVad_set_mode_core(self, kModes[j]));
+  }
+
+  free(self);
+}
+
+TEST_F(VadTest, CalcVad) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  int16_t speech[kMaxFrameLength];
+
+  // TODO(bjornv): Add NULL pointer check if we take care of it in
+  // vad_core.c
+
+  // Test WebRtcVad_CalcVadXXkhz()
+  // Verify that all zeros in gives VAD = 0 out.
+  memset(speech, 0, sizeof(speech));
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad8khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(16000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad16khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(32000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalcVad32khz(self, speech, kFrameLengths[j]));
+    }
+  }
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = (i * i);
+  }
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad8khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(16000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad16khz(self, speech, kFrameLengths[j]));
+    }
+    if (ValidRatesAndFrameLengths(32000, kFrameLengths[j])) {
+      EXPECT_EQ(1, WebRtcVad_CalcVad32khz(self, speech, kFrameLengths[j]));
+    }
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/src/common_audio/vad/vad_defines.h b/src/common_audio/vad/vad_defines.h
deleted file mode 100644
index b33af2e..0000000
--- a/src/common_audio/vad/vad_defines.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-
-/*
- * This header file includes the macros used in VAD.
- */
-
-#ifndef WEBRTC_VAD_DEFINES_H_
-#define WEBRTC_VAD_DEFINES_H_
-
-#define NUM_CHANNELS        6   // Eight frequency bands
-#define NUM_MODELS          2   // Number of Gaussian models
-#define NUM_TABLE_VALUES    NUM_CHANNELS * NUM_MODELS
-
-#define MIN_ENERGY          10
-#define ALPHA1              6553    // 0.2 in Q15
-#define ALPHA2              32439   // 0.99 in Q15
-#define NSP_MAX             6       // Maximum number of VAD=1 frames in a row counted
-#define MIN_STD             384     // Minimum standard deviation
-// Mode 0, Quality thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_Q   24
-#define INDIVIDUAL_20MS_Q   21      // (log10(2)*66)<<2 ~=16
-#define INDIVIDUAL_30MS_Q   24
-
-#define TOTAL_10MS_Q        57
-#define TOTAL_20MS_Q        48
-#define TOTAL_30MS_Q        57
-
-#define OHMAX1_10MS_Q       8  // Max Overhang 1
-#define OHMAX2_10MS_Q       14 // Max Overhang 2
-#define OHMAX1_20MS_Q       4  // Max Overhang 1
-#define OHMAX2_20MS_Q       7  // Max Overhang 2
-#define OHMAX1_30MS_Q       3
-#define OHMAX2_30MS_Q       5
-
-// Mode 1, Low bitrate thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_LBR 37
-#define INDIVIDUAL_20MS_LBR 32
-#define INDIVIDUAL_30MS_LBR 37
-
-#define TOTAL_10MS_LBR      100
-#define TOTAL_20MS_LBR      80
-#define TOTAL_30MS_LBR      100
-
-#define OHMAX1_10MS_LBR     8  // Max Overhang 1
-#define OHMAX2_10MS_LBR     14 // Max Overhang 2
-#define OHMAX1_20MS_LBR     4
-#define OHMAX2_20MS_LBR     7
-
-#define OHMAX1_30MS_LBR     3
-#define OHMAX2_30MS_LBR     5
-
-// Mode 2, Very aggressive thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_AGG 82
-#define INDIVIDUAL_20MS_AGG 78
-#define INDIVIDUAL_30MS_AGG 82
-
-#define TOTAL_10MS_AGG      285 //580
-#define TOTAL_20MS_AGG      260
-#define TOTAL_30MS_AGG      285
-
-#define OHMAX1_10MS_AGG     6  // Max Overhang 1
-#define OHMAX2_10MS_AGG     9  // Max Overhang 2
-#define OHMAX1_20MS_AGG     3
-#define OHMAX2_20MS_AGG     5
-
-#define OHMAX1_30MS_AGG     2
-#define OHMAX2_30MS_AGG     3
-
-// Mode 3, Super aggressive thresholds - Different thresholds for the different frame lengths
-#define INDIVIDUAL_10MS_VAG 94
-#define INDIVIDUAL_20MS_VAG 94
-#define INDIVIDUAL_30MS_VAG 94
-
-#define TOTAL_10MS_VAG      1100 //1700
-#define TOTAL_20MS_VAG      1050
-#define TOTAL_30MS_VAG      1100
-
-#define OHMAX1_10MS_VAG     6  // Max Overhang 1
-#define OHMAX2_10MS_VAG     9  // Max Overhang 2
-#define OHMAX1_20MS_VAG     3
-#define OHMAX2_20MS_VAG     5
-
-#define OHMAX1_30MS_VAG     2
-#define OHMAX2_30MS_VAG     3
-
-#endif // WEBRTC_VAD_DEFINES_H_
diff --git a/src/common_audio/vad/vad_filterbank.c b/src/common_audio/vad/vad_filterbank.c
index 63eef5b..b626ad0 100644
--- a/src/common_audio/vad/vad_filterbank.c
+++ b/src/common_audio/vad/vad_filterbank.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,38 +8,41 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-/*
- * This file includes the implementation of the internal filterbank associated functions.
- * For function description, see vad_filterbank.h.
- */
-
 #include "vad_filterbank.h"
 
+#include <assert.h>
+
 #include "signal_processing_library.h"
 #include "typedefs.h"
-#include "vad_defines.h"
 
-// Constant 160*log10(2) in Q9
-static const int16_t kLogConst = 24660;
+// Constants used in LogOfEnergy().
+static const int16_t kLogConst = 24660;  // 160*log10(2) in Q9.
+static const int16_t kLogEnergyIntPart = 14336;  // 14 in Q10
 
-// Coefficients used by WebRtcVad_HpOutput, Q14
+// Coefficients used by HighPassFilter, Q14.
 static const int16_t kHpZeroCoefs[3] = { 6631, -13262, 6631 };
 static const int16_t kHpPoleCoefs[3] = { 16384, -7756, 5620 };
 
-// Allpass filter coefficients, upper and lower, in Q15
+// Allpass filter coefficients, upper and lower, in Q15.
 // Upper: 0.64, Lower: 0.17
 static const int16_t kAllPassCoefsQ15[2] = { 20972, 5571 };
 
-// Adjustment for division with two in WebRtcVad_SplitFilter
+// Adjustment for division with two in SplitFilter.
 static const int16_t kOffsetVector[6] = { 368, 368, 272, 176, 176, 176 };
 
-void WebRtcVad_HpOutput(int16_t* in_vector,
-                        int in_vector_length,
-                        int16_t* filter_state,
-                        int16_t* out_vector) {
+// High pass filtering, with a cut-off frequency at 80 Hz, if the |data_in| is
+// sampled at 500 Hz.
+//
+// - data_in      [i]   : Input audio data sampled at 500 Hz.
+// - data_length  [i]   : Length of input and output data.
+// - filter_state [i/o] : State of the filter.
+// - data_out     [o]   : Output audio data in the frequency interval
+//                        80 - 250 Hz.
+static void HighPassFilter(const int16_t* data_in, int data_length,
+                           int16_t* filter_state, int16_t* data_out) {
   int i;
-  int16_t* in_ptr = in_vector;
-  int16_t* out_ptr = out_vector;
+  const int16_t* in_ptr = data_in;
+  int16_t* out_ptr = data_out;
   int32_t tmp32 = 0;
 
 
@@ -51,228 +54,281 @@
   // The all-pole section has a max amplification of a single sample of: 1.9931
   // Impulse response: 1.0000  0.4734 -0.1189 -0.2187 -0.0627   0.04532
 
-  for (i = 0; i < in_vector_length; i++) {
-    // all-zero section (filter coefficients in Q14)
-    tmp32 = (int32_t) WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[0], (*in_ptr));
-    tmp32 += (int32_t) WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[1], filter_state[0]);
-    tmp32 += (int32_t) WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[2],
-                                            filter_state[1]);  // Q14
+  for (i = 0; i < data_length; i++) {
+    // All-zero section (filter coefficients in Q14).
+    tmp32 = WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[0], *in_ptr);
+    tmp32 += WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[1], filter_state[0]);
+    tmp32 += WEBRTC_SPL_MUL_16_16(kHpZeroCoefs[2], filter_state[1]);
     filter_state[1] = filter_state[0];
     filter_state[0] = *in_ptr++;
 
-    // all-pole section
-    tmp32 -= (int32_t) WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[1],
-                                            filter_state[2]);  // Q14
-    tmp32 -= (int32_t) WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[2], filter_state[3]);
+    // All-pole section (filter coefficients in Q14).
+    tmp32 -= WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[1], filter_state[2]);
+    tmp32 -= WEBRTC_SPL_MUL_16_16(kHpPoleCoefs[2], filter_state[3]);
     filter_state[3] = filter_state[2];
-    filter_state[2] = (int16_t) WEBRTC_SPL_RSHIFT_W32 (tmp32, 14);
+    filter_state[2] = (int16_t) (tmp32 >> 14);
     *out_ptr++ = filter_state[2];
   }
 }
 
-void WebRtcVad_Allpass(int16_t* in_vector,
-                       int16_t filter_coefficients,
-                       int vector_length,
-                       int16_t* filter_state,
-                       int16_t* out_vector) {
+// All pass filtering of |data_in|, used before splitting the signal into two
+// frequency bands (low pass vs high pass).
+// Note that |data_in| and |data_out| can NOT correspond to the same address.
+//
+// - data_in            [i]   : Input audio signal given in Q0.
+// - data_length        [i]   : Length of input and output data.
+// - filter_coefficient [i]   : Given in Q15.
+// - filter_state       [i/o] : State of the filter given in Q(-1).
+// - data_out           [o]   : Output audio signal given in Q(-1).
+static void AllPassFilter(const int16_t* data_in, int data_length,
+                          int16_t filter_coefficient, int16_t* filter_state,
+                          int16_t* data_out) {
   // The filter can only cause overflow (in the w16 output variable)
   // if more than 4 consecutive input numbers are of maximum value and
   // has the the same sign as the impulse responses first taps.
-  // First 6 taps of the impulse response: 0.6399 0.5905 -0.3779
-  // 0.2418 -0.1547 0.0990
+  // First 6 taps of the impulse response:
+  // 0.6399 0.5905 -0.3779 0.2418 -0.1547 0.0990
 
   int i;
   int16_t tmp16 = 0;
-  int32_t tmp32 = 0, in32 = 0;
-  int32_t state32 = WEBRTC_SPL_LSHIFT_W32((int32_t) (*filter_state), 16); // Q31
+  int32_t tmp32 = 0;
+  int32_t state32 = ((int32_t) (*filter_state) << 16);  // Q15
 
-  for (i = 0; i < vector_length; i++) {
-    tmp32 = state32 + WEBRTC_SPL_MUL_16_16(filter_coefficients, (*in_vector));
-    tmp16 = (int16_t) WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
-    *out_vector++ = tmp16;
-    in32 = WEBRTC_SPL_LSHIFT_W32(((int32_t) (*in_vector)), 14);
-    state32 = in32 - WEBRTC_SPL_MUL_16_16(filter_coefficients, tmp16);
-    state32 = WEBRTC_SPL_LSHIFT_W32(state32, 1);
-    in_vector += 2;
+  for (i = 0; i < data_length; i++) {
+    tmp32 = state32 + WEBRTC_SPL_MUL_16_16(filter_coefficient, *data_in);
+    tmp16 = (int16_t) (tmp32 >> 16);  // Q(-1)
+    *data_out++ = tmp16;
+    state32 = (((int32_t) (*data_in)) << 14); // Q14
+    state32 -= WEBRTC_SPL_MUL_16_16(filter_coefficient, tmp16);  // Q14
+    state32 <<= 1;  // Q15.
+    data_in += 2;
   }
 
-  *filter_state = (int16_t) WEBRTC_SPL_RSHIFT_W32(state32, 16);
+  *filter_state = (int16_t) (state32 >> 16);  // Q(-1)
 }
 
-void WebRtcVad_SplitFilter(int16_t* in_vector,
-                           int in_vector_length,
-                           int16_t* upper_state,
-                           int16_t* lower_state,
-                           int16_t* out_vector_hp,
-                           int16_t* out_vector_lp) {
-  int16_t tmp_out;
+// Splits |data_in| into |hp_data_out| and |lp_data_out| corresponding to
+// an upper (high pass) part and a lower (low pass) part respectively.
+//
+// - data_in      [i]   : Input audio data to be split into two frequency bands.
+// - data_length  [i]   : Length of |data_in|.
+// - upper_state  [i/o] : State of the upper filter, given in Q(-1).
+// - lower_state  [i/o] : State of the lower filter, given in Q(-1).
+// - hp_data_out  [o]   : Output audio data of the upper half of the spectrum.
+//                        The length is |data_length| / 2.
+// - lp_data_out  [o]   : Output audio data of the lower half of the spectrum.
+//                        The length is |data_length| / 2.
+static void SplitFilter(const int16_t* data_in, int data_length,
+                        int16_t* upper_state, int16_t* lower_state,
+                        int16_t* hp_data_out, int16_t* lp_data_out) {
   int i;
-  int half_length = WEBRTC_SPL_RSHIFT_W16(in_vector_length, 1);
+  int half_length = data_length >> 1;  // Downsampling by 2.
+  int16_t tmp_out;
 
-  // All-pass filtering upper branch
-  WebRtcVad_Allpass(&in_vector[0], kAllPassCoefsQ15[0], half_length,
-                    upper_state, out_vector_hp);
+  // All-pass filtering upper branch.
+  AllPassFilter(&data_in[0], half_length, kAllPassCoefsQ15[0], upper_state,
+                hp_data_out);
 
-  // All-pass filtering lower branch
-  WebRtcVad_Allpass(&in_vector[1], kAllPassCoefsQ15[1], half_length,
-                    lower_state, out_vector_lp);
+  // All-pass filtering lower branch.
+  AllPassFilter(&data_in[1], half_length, kAllPassCoefsQ15[1], lower_state,
+                lp_data_out);
 
-  // Make LP and HP signals
+  // Make LP and HP signals.
   for (i = 0; i < half_length; i++) {
-    tmp_out = *out_vector_hp;
-    *out_vector_hp++ -= *out_vector_lp;
-    *out_vector_lp++ += tmp_out;
+    tmp_out = *hp_data_out;
+    *hp_data_out++ -= *lp_data_out;
+    *lp_data_out++ += tmp_out;
   }
 }
 
-int16_t WebRtcVad_get_features(VadInstT* inst,
-                               int16_t* in_vector,
-                               int frame_size,
-                               int16_t* out_vector) {
-  int16_t power = 0;
-  // We expect |frame_size| to be 80, 160 or 240 samples, which corresponds to
-  // 10, 20 or 30 ms in 8 kHz. Therefore, the intermediate downsampled data will
-  // have at most 120 samples after the first split and at most 60 samples after
-  // the second split.
-  int16_t hp_120[120], lp_120[120];
-  int16_t hp_60[60], lp_60[60];
-  // Initialize variables for the first SplitFilter().
-  int length = frame_size;
-  int frequency_band = 0;
-  int16_t* in_ptr = in_vector;
-  int16_t* hp_out_ptr = hp_120;
-  int16_t* lp_out_ptr = lp_120;
+// Calculates the energy of |data_in| in dB, and also updates an overall
+// |total_energy| if necessary.
+//
+// - data_in      [i]   : Input audio data for energy calculation.
+// - data_length  [i]   : Length of input data.
+// - offset       [i]   : Offset value added to |log_energy|.
+// - total_energy [i/o] : An external energy updated with the energy of
+//                        |data_in|.
+//                        NOTE: |total_energy| is only updated if
+//                        |total_energy| <= |kMinEnergy|.
+// - log_energy   [o]   : 10 * log10("energy of |data_in|") given in Q4.
+static void LogOfEnergy(const int16_t* data_in, int data_length,
+                        int16_t offset, int16_t* total_energy,
+                        int16_t* log_energy) {
+  // |tot_rshifts| accumulates the number of right shifts performed on |energy|.
+  int tot_rshifts = 0;
+  // The |energy| will be normalized to 15 bits. We use unsigned integer because
+  // we eventually will mask out the fractional part.
+  uint32_t energy = 0;
 
-  // Split at 2000 Hz and downsample
-  WebRtcVad_SplitFilter(in_ptr, length, &inst->upper_state[frequency_band],
-                        &inst->lower_state[frequency_band], hp_out_ptr,
-                        lp_out_ptr);
+  assert(data_in != NULL);
+  assert(data_length > 0);
 
-  // Split at 3000 Hz and downsample
-  frequency_band = 1;
-  in_ptr = hp_120;
-  hp_out_ptr = hp_60;
-  lp_out_ptr = lp_60;
-  length = WEBRTC_SPL_RSHIFT_W16(frame_size, 1);
+  energy = (uint32_t) WebRtcSpl_Energy((int16_t*) data_in, data_length,
+                                       &tot_rshifts);
 
-  WebRtcVad_SplitFilter(in_ptr, length, &inst->upper_state[frequency_band],
-                        &inst->lower_state[frequency_band], hp_out_ptr,
-                        lp_out_ptr);
+  if (energy != 0) {
+    // By construction, normalizing to 15 bits is equivalent with 17 leading
+    // zeros of an unsigned 32 bit value.
+    int normalizing_rshifts = 17 - WebRtcSpl_NormU32(energy);
+    // In a 15 bit representation the leading bit is 2^14. log2(2^14) in Q10 is
+    // (14 << 10), which is what we initialize |log2_energy| with. For a more
+    // detailed derivations, see below.
+    int16_t log2_energy = kLogEnergyIntPart;
 
-  // Energy in 3000 Hz - 4000 Hz
-  length = WEBRTC_SPL_RSHIFT_W16(length, 1);
-  WebRtcVad_LogOfEnergy(hp_60, length, kOffsetVector[5], &power,
-                        &out_vector[5]);
+    tot_rshifts += normalizing_rshifts;
+    // Normalize |energy| to 15 bits.
+    // |tot_rshifts| is now the total number of right shifts performed on
+    // |energy| after normalization. This means that |energy| is in
+    // Q(-tot_rshifts).
+    if (normalizing_rshifts < 0) {
+      energy <<= -normalizing_rshifts;
+    } else {
+      energy >>= normalizing_rshifts;
+    }
 
-  // Energy in 2000 Hz - 3000 Hz
-  WebRtcVad_LogOfEnergy(lp_60, length, kOffsetVector[4], &power,
-                        &out_vector[4]);
+    // Calculate the energy of |data_in| in dB, in Q4.
+    //
+    // 10 * log10("true energy") in Q4 = 2^4 * 10 * log10("true energy") =
+    // 160 * log10(|energy| * 2^|tot_rshifts|) =
+    // 160 * log10(2) * log2(|energy| * 2^|tot_rshifts|) =
+    // 160 * log10(2) * (log2(|energy|) + log2(2^|tot_rshifts|)) =
+    // (160 * log10(2)) * (log2(|energy|) + |tot_rshifts|) =
+    // |kLogConst| * (|log2_energy| + |tot_rshifts|)
+    //
+    // We know by construction that |energy| is normalized to 15 bits. Hence,
+    // |energy| = 2^14 + frac_Q15, where frac_Q15 is a fractional part in Q15.
+    // Further, we'd like |log2_energy| in Q10
+    // log2(|energy|) in Q10 = 2^10 * log2(2^14 + frac_Q15) =
+    // 2^10 * log2(2^14 * (1 + frac_Q15 * 2^-14)) =
+    // 2^10 * (14 + log2(1 + frac_Q15 * 2^-14)) ~=
+    // (14 << 10) + 2^10 * (frac_Q15 * 2^-14) =
+    // (14 << 10) + (frac_Q15 * 2^-4) = (14 << 10) + (frac_Q15 >> 4)
+    //
+    // Note that frac_Q15 = (|energy| & 0x00003FFF)
 
-  // Split at 1000 Hz and downsample
-  frequency_band = 2;
-  in_ptr = lp_120;
-  hp_out_ptr = hp_60;
-  lp_out_ptr = lp_60;
-  length = WEBRTC_SPL_RSHIFT_W16(frame_size, 1);
-  WebRtcVad_SplitFilter(in_ptr, length, &inst->upper_state[frequency_band],
-                        &inst->lower_state[frequency_band], hp_out_ptr,
-                        lp_out_ptr);
+    // Calculate and add the fractional part to |log2_energy|.
+    log2_energy += (int16_t) ((energy & 0x00003FFF) >> 4);
 
-  // Energy in 1000 Hz - 2000 Hz
-  length = WEBRTC_SPL_RSHIFT_W16(length, 1);
-  WebRtcVad_LogOfEnergy(hp_60, length, kOffsetVector[3], &power,
-                        &out_vector[3]);
-
-  // Split at 500 Hz
-  frequency_band = 3;
-  in_ptr = lp_60;
-  hp_out_ptr = hp_120;
-  lp_out_ptr = lp_120;
-
-  WebRtcVad_SplitFilter(in_ptr, length, &inst->upper_state[frequency_band],
-                        &inst->lower_state[frequency_band], hp_out_ptr,
-                        lp_out_ptr);
-
-  // Energy in 500 Hz - 1000 Hz
-  length = WEBRTC_SPL_RSHIFT_W16(length, 1);
-  WebRtcVad_LogOfEnergy(hp_120, length, kOffsetVector[2], &power,
-                        &out_vector[2]);
-
-  // Split at 250 Hz
-  frequency_band = 4;
-  in_ptr = lp_120;
-  hp_out_ptr = hp_60;
-  lp_out_ptr = lp_60;
-
-  WebRtcVad_SplitFilter(in_ptr, length, &inst->upper_state[frequency_band],
-                        &inst->lower_state[frequency_band], hp_out_ptr,
-                        lp_out_ptr);
-
-  // Energy in 250 Hz - 500 Hz
-  length = WEBRTC_SPL_RSHIFT_W16(length, 1);
-  WebRtcVad_LogOfEnergy(hp_60, length, kOffsetVector[1], &power,
-                        &out_vector[1]);
-
-  // Remove DC and LFs
-  WebRtcVad_HpOutput(lp_60, length, inst->hp_filter_state, hp_120);
-
-  // Power in 80 Hz - 250 Hz
-  WebRtcVad_LogOfEnergy(hp_120, length, kOffsetVector[0], &power,
-                        &out_vector[0]);
-
-  return power;
-}
-
-void WebRtcVad_LogOfEnergy(int16_t* vector,
-                           int vector_length,
-                           int16_t offset,
-                           int16_t* power,
-                           int16_t* log_energy) {
-  int shfts = 0, shfts2 = 0;
-  int16_t energy_s16 = 0;
-  int16_t zeros = 0, frac = 0, log2 = 0;
-  int32_t energy = WebRtcSpl_Energy(vector, vector_length, &shfts);
-
-  if (energy > 0) {
-
-    shfts2 = 16 - WebRtcSpl_NormW32(energy);
-    shfts += shfts2;
-    // "shfts" is the total number of right shifts that has been done to
-    // energy_s16.
-    energy_s16 = (int16_t) WEBRTC_SPL_SHIFT_W32(energy, -shfts2);
-
-    // Find:
-    // 160*log10(energy_s16*2^shfts) = 160*log10(2)*log2(energy_s16*2^shfts) =
-    // 160*log10(2)*(log2(energy_s16) + log2(2^shfts)) =
-    // 160*log10(2)*(log2(energy_s16) + shfts)
-
-    zeros = WebRtcSpl_NormU32(energy_s16);
-    frac = (int16_t) (((uint32_t) ((int32_t) (energy_s16) << zeros)
-        & 0x7FFFFFFF) >> 21);
-    log2 = (int16_t) (((31 - zeros) << 10) + frac);
-
-    *log_energy = (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(kLogConst, log2, 19)
-        + (int16_t) WEBRTC_SPL_MUL_16_16_RSFT(shfts, kLogConst, 9);
+    // |kLogConst| is in Q9, |log2_energy| in Q10 and |tot_rshifts| in Q0.
+    // Note that we in our derivation above have accounted for an output in Q4.
+    *log_energy = (int16_t) (WEBRTC_SPL_MUL_16_16_RSFT(
+        kLogConst, log2_energy, 19) +
+        WEBRTC_SPL_MUL_16_16_RSFT(tot_rshifts, kLogConst, 9));
 
     if (*log_energy < 0) {
       *log_energy = 0;
     }
   } else {
-    *log_energy = 0;
-    shfts = -15;
-    energy_s16 = 0;
+    *log_energy = offset;
+    return;
   }
 
   *log_energy += offset;
 
-  // Total power in frame
-  if (*power <= MIN_ENERGY) {
-    if (shfts > 0) {
-      *power += MIN_ENERGY + 1;
-    } else if (WEBRTC_SPL_SHIFT_W16(energy_s16, shfts) > MIN_ENERGY) {
-      *power += MIN_ENERGY + 1;
+  // Update the approximate |total_energy| with the energy of |data_in|, if
+  // |total_energy| has not exceeded |kMinEnergy|. |total_energy| is used as an
+  // energy indicator in WebRtcVad_GmmProbability() in vad_core.c.
+  if (*total_energy <= kMinEnergy) {
+    if (tot_rshifts >= 0) {
+      // We know by construction that the |energy| > |kMinEnergy| in Q0, so add
+      // an arbitrary value such that |total_energy| exceeds |kMinEnergy|.
+      *total_energy += kMinEnergy + 1;
     } else {
-      *power += WEBRTC_SPL_SHIFT_W16(energy_s16, shfts);
+      // By construction |energy| is represented by 15 bits, hence any number of
+      // right shifted |energy| will fit in an int16_t. In addition, adding the
+      // value to |total_energy| is wrap around safe as long as
+      // |kMinEnergy| < 8192.
+      *total_energy += (int16_t) (energy >> -tot_rshifts);  // Q0.
     }
   }
 }
+
+int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in,
+                                    int data_length, int16_t* features) {
+  int16_t total_energy = 0;
+  // We expect |data_length| to be 80, 160 or 240 samples, which corresponds to
+  // 10, 20 or 30 ms in 8 kHz. Therefore, the intermediate downsampled data will
+  // have at most 120 samples after the first split and at most 60 samples after
+  // the second split.
+  int16_t hp_120[120], lp_120[120];
+  int16_t hp_60[60], lp_60[60];
+  const int half_data_length = data_length >> 1;
+  int length = half_data_length;  // |data_length| / 2, corresponds to
+                                  // bandwidth = 2000 Hz after downsampling.
+
+  // Initialize variables for the first SplitFilter().
+  int frequency_band = 0;
+  const int16_t* in_ptr = data_in;  // [0 - 4000] Hz.
+  int16_t* hp_out_ptr = hp_120;  // [2000 - 4000] Hz.
+  int16_t* lp_out_ptr = lp_120;  // [0 - 2000] Hz.
+
+  assert(data_length >= 0);
+  assert(data_length <= 240);
+  assert(4 < kNumChannels - 1);  // Checking maximum |frequency_band|.
+
+  // Split at 2000 Hz and downsample.
+  SplitFilter(in_ptr, data_length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // For the upper band (2000 Hz - 4000 Hz) split at 3000 Hz and downsample.
+  frequency_band = 1;
+  in_ptr = hp_120;  // [2000 - 4000] Hz.
+  hp_out_ptr = hp_60;  // [3000 - 4000] Hz.
+  lp_out_ptr = lp_60;  // [2000 - 3000] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 3000 Hz - 4000 Hz.
+  length >>= 1;  // |data_length| / 4 <=> bandwidth = 1000 Hz.
+
+  LogOfEnergy(hp_60, length, kOffsetVector[5], &total_energy, &features[5]);
+
+  // Energy in 2000 Hz - 3000 Hz.
+  LogOfEnergy(lp_60, length, kOffsetVector[4], &total_energy, &features[4]);
+
+  // For the lower band (0 Hz - 2000 Hz) split at 1000 Hz and downsample.
+  frequency_band = 2;
+  in_ptr = lp_120;  // [0 - 2000] Hz.
+  hp_out_ptr = hp_60;  // [1000 - 2000] Hz.
+  lp_out_ptr = lp_60;  // [0 - 1000] Hz.
+  length = half_data_length;  // |data_length| / 2 <=> bandwidth = 2000 Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 1000 Hz - 2000 Hz.
+  length >>= 1;  // |data_length| / 4 <=> bandwidth = 1000 Hz.
+  LogOfEnergy(hp_60, length, kOffsetVector[3], &total_energy, &features[3]);
+
+  // For the lower band (0 Hz - 1000 Hz) split at 500 Hz and downsample.
+  frequency_band = 3;
+  in_ptr = lp_60;  // [0 - 1000] Hz.
+  hp_out_ptr = hp_120;  // [500 - 1000] Hz.
+  lp_out_ptr = lp_120;  // [0 - 500] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 500 Hz - 1000 Hz.
+  length >>= 1;  // |data_length| / 8 <=> bandwidth = 500 Hz.
+  LogOfEnergy(hp_120, length, kOffsetVector[2], &total_energy, &features[2]);
+
+  // For the lower band (0 Hz - 500 Hz) split at 250 Hz and downsample.
+  frequency_band = 4;
+  in_ptr = lp_120;  // [0 - 500] Hz.
+  hp_out_ptr = hp_60;  // [250 - 500] Hz.
+  lp_out_ptr = lp_60;  // [0 - 250] Hz.
+  SplitFilter(in_ptr, length, &self->upper_state[frequency_band],
+              &self->lower_state[frequency_band], hp_out_ptr, lp_out_ptr);
+
+  // Energy in 250 Hz - 500 Hz.
+  length >>= 1;  // |data_length| / 16 <=> bandwidth = 250 Hz.
+  LogOfEnergy(hp_60, length, kOffsetVector[1], &total_energy, &features[1]);
+
+  // Remove 0 Hz - 80 Hz, by high pass filtering the lower band.
+  HighPassFilter(lp_60, length, self->hp_filter_state, hp_120);
+
+  // Energy in 80 Hz - 250 Hz.
+  LogOfEnergy(hp_120, length, kOffsetVector[0], &total_energy, &features[0]);
+
+  return total_energy;
+}
diff --git a/src/common_audio/vad/vad_filterbank.h b/src/common_audio/vad/vad_filterbank.h
index 1285c47..b5fd69e 100644
--- a/src/common_audio/vad/vad_filterbank.h
+++ b/src/common_audio/vad/vad_filterbank.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -9,8 +9,7 @@
  */
 
 /*
- * This header file includes the description of the internal VAD call
- * WebRtcVad_GaussianProbability.
+ * This file includes feature calculating functionality used in vad_core.c.
  */
 
 #ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_FILTERBANK_H_
@@ -19,126 +18,27 @@
 #include "typedefs.h"
 #include "vad_core.h"
 
-// TODO(bjornv): Move local functions to vad_filterbank.c and make static.
-/****************************************************************************
- * WebRtcVad_HpOutput(...)
- *
- * This function removes DC from the lowest frequency band
- *
- * Input:
- *      - in_vector         : Samples in the frequency interval 0 - 250 Hz
- *      - in_vector_length  : Length of input and output vector
- *      - filter_state      : Current state of the filter
- *
- * Output:
- *      - out_vector        : Samples in the frequency interval 80 - 250 Hz
- *      - filter_state      : Updated state of the filter
- *
- */
-void WebRtcVad_HpOutput(int16_t* in_vector,
-                        int in_vector_length,
-                        int16_t* filter_state,
-                        int16_t* out_vector);
-
-/****************************************************************************
- * WebRtcVad_Allpass(...)
- *
- * This function is used when before splitting a speech file into 
- * different frequency bands
- *
- * Note! Do NOT let the arrays in_vector and out_vector correspond to the same address.
- *
- * Input:
- *      - in_vector             : (Q0)
- *      - filter_coefficients   : (Q15)
- *      - vector_length         : Length of input and output vector
- *      - filter_state          : Current state of the filter (Q(-1))
- *
- * Output:
- *      - out_vector            : Output speech signal (Q(-1))
- *      - filter_state          : Updated state of the filter (Q(-1))
- *
- */
-void WebRtcVad_Allpass(int16_t* in_vector,
-                       int16_t filter_coefficients,
-                       int vector_length,
-                       int16_t* filter_state,
-                       int16_t* outw16);
-
-/****************************************************************************
- * WebRtcVad_SplitFilter(...)
- *
- * This function is used when before splitting a speech file into 
- * different frequency bands
- *
- * Input:
- *      - in_vector         : Input signal to be split into two frequency bands.
- *      - upper_state       : Current state of the upper filter
- *      - lower_state       : Current state of the lower filter
- *      - in_vector_length  : Length of input vector
- *
- * Output:
- *      - out_vector_hp     : Upper half of the spectrum
- *      - out_vector_lp     : Lower half of the spectrum
- *      - upper_state       : Updated state of the upper filter
- *      - lower_state       : Updated state of the lower filter
- *
- */
-void WebRtcVad_SplitFilter(int16_t* in_vector,
-                           int in_vector_length,
-                           int16_t* upper_state,
-                           int16_t* lower_state,
-                           int16_t* out_vector_hp,
-                           int16_t* out_vector_lp);
-
-/****************************************************************************
- * WebRtcVad_get_features(...)
- *
- * This function is used to get the logarithm of the power of each of the 
- * 6 frequency bands used by the VAD:
- *        80 Hz - 250 Hz
- *        250 Hz - 500 Hz
- *        500 Hz - 1000 Hz
- *        1000 Hz - 2000 Hz
- *        2000 Hz - 3000 Hz
- *        3000 Hz - 4000 Hz 
- *
- * Input:
- *      - inst        : Pointer to VAD instance
- *      - in_vector   : Input speech signal
- *      - frame_size  : Frame size, in number of samples
- *
- * Output:
- *      - out_vector  : 10*log10(power in each freq. band), Q4
- *    
- * Return: total power in the signal (NOTE! This value is not exact since it
- *         is only used in a comparison.
- */
-int16_t WebRtcVad_get_features(VadInstT* inst,
-                               int16_t* in_vector,
-                               int frame_size,
-                               int16_t* out_vector);
-
-/****************************************************************************
- * WebRtcVad_LogOfEnergy(...)
- *
- * This function is used to get the logarithm of the power of one frequency band.
- *
- * Input:
- *      - vector            : Input speech samples for one frequency band
- *      - offset            : Offset value for the current frequency band
- *      - vector_length     : Length of input vector
- *
- * Output:
- *      - log_energy        : 10*log10(energy);
- *      - power             : Update total power in speech frame. NOTE! This value
- *                            is not exact since it is only used in a comparison.
- *     
- */
-void WebRtcVad_LogOfEnergy(int16_t* vector,
-                           int vector_length,
-                           int16_t offset,
-                           int16_t* power,
-                           int16_t* log_energy);
+// Takes |data_length| samples of |data_in| and calculates the logarithm of the
+// energy of each of the |kNumChannels| = 6 frequency bands used by the VAD:
+//        80 Hz - 250 Hz
+//        250 Hz - 500 Hz
+//        500 Hz - 1000 Hz
+//        1000 Hz - 2000 Hz
+//        2000 Hz - 3000 Hz
+//        3000 Hz - 4000 Hz
+//
+// The values are given in Q4 and written to |features|. Further, an approximate
+// overall energy is returned. The return value is used in
+// WebRtcVad_GmmProbability() as a signal indicator, hence it is arbitrary above
+// the threshold |kMinEnergy|.
+//
+// - self         [i/o] : State information of the VAD.
+// - data_in      [i]   : Input audio data, for feature extraction.
+// - data_length  [i]   : Audio data size, in number of samples.
+// - features     [o]   : 10 * log10(energy in each frequency band), Q4.
+// - returns            : Total energy of the signal (NOTE! This value is not
+//                        exact. It is only used in a comparison.)
+int16_t WebRtcVad_CalculateFeatures(VadInstT* self, const int16_t* data_in,
+                                    int data_length, int16_t* features);
 
 #endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_FILTERBANK_H_
diff --git a/src/common_audio/vad/vad_filterbank_unittest.cc b/src/common_audio/vad/vad_filterbank_unittest.cc
new file mode 100644
index 0000000..ef01146
--- /dev/null
+++ b/src/common_audio/vad/vad_filterbank_unittest.cc
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+#include "vad_filterbank.h"
+}
+
+namespace {
+
+enum { kNumValidFrameLengths = 3 };
+
+TEST_F(VadTest, vad_filterbank) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  static const int16_t kReference[kNumValidFrameLengths] = { 48, 11, 11 };
+  static const int16_t kFeatures[kNumValidFrameLengths * kNumChannels] = {
+      1213, 759, 587, 462, 434, 272,
+      1479, 1385, 1291, 1200, 1103, 1099,
+      1732, 1692, 1681, 1629, 1436, 1436
+  };
+  static const int16_t kOffsetVector[kNumChannels] = {
+      368, 368, 272, 176, 176, 176 };
+  int16_t features[kNumChannels];
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  int16_t speech[kMaxFrameLength];
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = (i * i);
+  }
+
+  int frame_length_index = 0;
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(kReference[frame_length_index],
+                WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                            features));
+      for (int k = 0; k < kNumChannels; ++k) {
+        EXPECT_EQ(kFeatures[k + frame_length_index * kNumChannels],
+                  features[k]);
+      }
+      frame_length_index++;
+    }
+  }
+  EXPECT_EQ(kNumValidFrameLengths, frame_length_index);
+
+  // Verify that all zeros in gives kOffsetVector out.
+  memset(speech, 0, sizeof(speech));
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                               features));
+      for (int k = 0; k < kNumChannels; ++k) {
+        EXPECT_EQ(kOffsetVector[k], features[k]);
+      }
+    }
+  }
+
+  // Verify that all ones in gives kOffsetVector out. Any other constant input
+  // will have a small impact in the sub bands.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    speech[i] = 1;
+  }
+  for (size_t j = 0; j < kFrameLengthsSize; ++j) {
+    if (ValidRatesAndFrameLengths(8000, kFrameLengths[j])) {
+      ASSERT_EQ(0, WebRtcVad_InitCore(self));
+      EXPECT_EQ(0, WebRtcVad_CalculateFeatures(self, speech, kFrameLengths[j],
+                                               features));
+      for (int k = 0; k < kNumChannels; ++k) {
+        EXPECT_EQ(kOffsetVector[k], features[k]);
+      }
+    }
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/src/common_audio/vad/vad_gmm_unittest.cc b/src/common_audio/vad/vad_gmm_unittest.cc
new file mode 100644
index 0000000..205435a
--- /dev/null
+++ b/src/common_audio/vad/vad_gmm_unittest.cc
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_gmm.h"
+}
+
+namespace {
+
+TEST_F(VadTest, vad_gmm) {
+  int16_t delta = 0;
+  // Input value at mean.
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(0, 0, 128, &delta));
+  EXPECT_EQ(0, delta);
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(16, 128, 128, &delta));
+  EXPECT_EQ(0, delta);
+  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(-16, -128, 128, &delta));
+  EXPECT_EQ(0, delta);
+
+  // Largest possible input to give non-zero probability.
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(59, 0, 128, &delta));
+  EXPECT_EQ(7552, delta);
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(75, 128, 128, &delta));
+  EXPECT_EQ(7552, delta);
+  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(-75, -128, 128, &delta));
+  EXPECT_EQ(-7552, delta);
+
+  // Too large input, should give zero probability.
+  EXPECT_EQ(0, WebRtcVad_GaussianProbability(105, 0, 128, &delta));
+  EXPECT_EQ(13440, delta);
+}
+}  // namespace
diff --git a/src/common_audio/vad/vad_sp.c b/src/common_audio/vad/vad_sp.c
index 4fface3..9e531c4 100644
--- a/src/common_audio/vad/vad_sp.c
+++ b/src/common_audio/vad/vad_sp.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,17 +8,19 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include "vad_sp.h"
+#include "common_audio/vad/vad_sp.h"
 
 #include <assert.h>
 
-#include "signal_processing_library.h"
+#include "common_audio/signal_processing/include/signal_processing_library.h"
+#include "common_audio/vad/vad_core.h"
 #include "typedefs.h"
-#include "vad_defines.h"
 
 // Allpass filter coefficients, upper and lower, in Q13.
 // Upper: 0.64, Lower: 0.17.
-static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 };  // Q13
+static const int16_t kAllPassCoefsQ13[2] = { 5243, 1392 };  // Q13.
+static const int16_t kSmoothingDown = 6553;  // 0.2 in Q15.
+static const int16_t kSmoothingUp = 32439;  // 0.99 in Q15.
 
 // TODO(bjornv): Move this function to vad_filterbank.c.
 // Downsampling filter based on splitting filter and allpass functions.
@@ -62,114 +64,110 @@
   int i = 0, j = 0;
   int position = -1;
   // Offset to beginning of the 16 minimum values in memory.
-  int offset = (channel << 4);
+  const int offset = (channel << 4);
   int16_t current_median = 1600;
   int16_t alpha = 0;
   int32_t tmp32 = 0;
   // Pointer to memory for the 16 minimum values and the age of each value of
   // the |channel|.
-  int16_t* age_ptr = &self->index_vector[offset];
-  int16_t* value_ptr = &self->low_value_vector[offset];
-  int16_t *p1, *p2, *p3;
+  int16_t* age = &self->index_vector[offset];
+  int16_t* smallest_values = &self->low_value_vector[offset];
 
-  assert(channel < NUM_CHANNELS);
+  assert(channel < kNumChannels);
 
-  // Each value in |low_value_vector| is getting 1 loop older.
-  // Update age of each value in |age_ptr|, and remove old values.
+  // Each value in |smallest_values| is getting 1 loop older. Update |age|, and
+  // remove old values.
   for (i = 0; i < 16; i++) {
-    p3 = age_ptr + i;
-    if (*p3 != 100) {
-      *p3 += 1;
+    if (age[i] != 100) {
+      age[i]++;
     } else {
-      p1 = value_ptr + i + 1;
-      p2 = p3 + 1;
+      // Too old value. Remove from memory and shift larger values downwards.
       for (j = i; j < 16; j++) {
-        *(value_ptr + j) = *p1++;
-        *(age_ptr + j) = *p2++;
+        smallest_values[j] = smallest_values[j + 1];
+        age[j] = age[j + 1];
       }
-      *(age_ptr + 15) = 101;
-      *(value_ptr + 15) = 10000;
+      age[15] = 101;
+      smallest_values[15] = 10000;
     }
   }
 
   // Check if |feature_value| is smaller than any of the values in
-  // |low_value_vector|. If so, find the |position| where to insert the new
-  // value.
-  if (feature_value < *(value_ptr + 7)) {
-    if (feature_value < *(value_ptr + 3)) {
-      if (feature_value < *(value_ptr + 1)) {
-        if (feature_value < *value_ptr) {
+  // |smallest_values|. If so, find the |position| where to insert the new value
+  // (|feature_value|).
+  if (feature_value < smallest_values[7]) {
+    if (feature_value < smallest_values[3]) {
+      if (feature_value < smallest_values[1]) {
+        if (feature_value < smallest_values[0]) {
           position = 0;
         } else {
           position = 1;
         }
-      } else if (feature_value < *(value_ptr + 2)) {
+      } else if (feature_value < smallest_values[2]) {
         position = 2;
       } else {
         position = 3;
       }
-    } else if (feature_value < *(value_ptr + 5)) {
-      if (feature_value < *(value_ptr + 4)) {
+    } else if (feature_value < smallest_values[5]) {
+      if (feature_value < smallest_values[4]) {
         position = 4;
       } else {
         position = 5;
       }
-    } else if (feature_value < *(value_ptr + 6)) {
+    } else if (feature_value < smallest_values[6]) {
       position = 6;
     } else {
       position = 7;
     }
-  } else if (feature_value < *(value_ptr + 15)) {
-    if (feature_value < *(value_ptr + 11)) {
-      if (feature_value < *(value_ptr + 9)) {
-        if (feature_value < *(value_ptr + 8)) {
+  } else if (feature_value < smallest_values[15]) {
+    if (feature_value < smallest_values[11]) {
+      if (feature_value < smallest_values[9]) {
+        if (feature_value < smallest_values[8]) {
           position = 8;
         } else {
           position = 9;
         }
-      } else if (feature_value < *(value_ptr + 10)) {
+      } else if (feature_value < smallest_values[10]) {
         position = 10;
       } else {
         position = 11;
       }
-    } else if (feature_value < *(value_ptr + 13)) {
-      if (feature_value < *(value_ptr + 12)) {
+    } else if (feature_value < smallest_values[13]) {
+      if (feature_value < smallest_values[12]) {
         position = 12;
       } else {
         position = 13;
       }
-    } else if (feature_value < *(value_ptr + 14)) {
+    } else if (feature_value < smallest_values[14]) {
       position = 14;
     } else {
       position = 15;
     }
   }
 
-  // If we have a new small value, put it in the correct position and shift
-  // larger values up.
+  // If we have detected a new small value, insert it at the correct position
+  // and shift larger values up.
   if (position > -1) {
     for (i = 15; i > position; i--) {
-      j = i - 1;
-      *(value_ptr + i) = *(value_ptr + j);
-      *(age_ptr + i) = *(age_ptr + j);
+      smallest_values[i] = smallest_values[i - 1];
+      age[i] = age[i - 1];
     }
-    *(value_ptr + position) = feature_value;
-    *(age_ptr + position) = 1;
+    smallest_values[position] = feature_value;
+    age[position] = 1;
   }
 
   // Get |current_median|.
   if (self->frame_counter > 2) {
-    current_median = *(value_ptr + 2);
+    current_median = smallest_values[2];
   } else if (self->frame_counter > 0) {
-    current_median = *value_ptr;
+    current_median = smallest_values[0];
   }
 
   // Smooth the median value.
   if (self->frame_counter > 0) {
     if (current_median < self->mean_value[channel]) {
-      alpha = (int16_t) ALPHA1;  // 0.2 in Q15.
+      alpha = kSmoothingDown;  // 0.2 in Q15.
     } else {
-      alpha = (int16_t) ALPHA2;  // 0.99 in Q15.
+      alpha = kSmoothingUp;  // 0.99 in Q15.
     }
   }
   tmp32 = WEBRTC_SPL_MUL_16_16(alpha + 1, self->mean_value[channel]);
diff --git a/src/common_audio/vad/vad_sp.h b/src/common_audio/vad/vad_sp.h
index 95c3b4c..9e8b204 100644
--- a/src/common_audio/vad/vad_sp.h
+++ b/src/common_audio/vad/vad_sp.h
@@ -37,6 +37,8 @@
 
 // Updates and returns the smoothed feature minimum. As minimum we use the
 // median of the five smallest feature values in a 100 frames long window.
+// As long as |handle->frame_counter| is zero, that is, we haven't received any
+// "valid" data, FindMinimum() outputs the default value of 1600.
 //
 // Inputs:
 //      - feature_value : New feature value to update with.
diff --git a/src/common_audio/vad/vad_sp_unittest.cc b/src/common_audio/vad/vad_sp_unittest.cc
new file mode 100644
index 0000000..2b25316
--- /dev/null
+++ b/src/common_audio/vad/vad_sp_unittest.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+#include "vad_unittest.h"
+
+extern "C" {
+#include "vad_core.h"
+#include "vad_sp.h"
+}
+
+namespace {
+
+TEST_F(VadTest, vad_sp) {
+  VadInstT* self = reinterpret_cast<VadInstT*>(malloc(sizeof(VadInstT)));
+  int16_t zeros[kMaxFrameLength] = { 0 };
+  int32_t state[2] = { 0 };
+  int16_t data_in[kMaxFrameLength];
+  int16_t data_out[kMaxFrameLength];
+
+  // We expect the first value to be 1600 as long as |frame_counter| is zero,
+  // which is true for the first iteration.
+  static const int16_t kReferenceMin[32] = {
+      1600, 720, 509, 512, 532, 552, 570, 588,
+       606, 624, 642, 659, 675, 691, 707, 723,
+      1600, 544, 502, 522, 542, 561, 579, 597,
+       615, 633, 651, 667, 683, 699, 715, 731
+  };
+
+  // Construct a speech signal that will trigger the VAD in all modes. It is
+  // known that (i * i) will wrap around, but that doesn't matter in this case.
+  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
+    data_in[i] = (i * i);
+  }
+  // Input values all zeros, expect all zeros out.
+  WebRtcVad_Downsampling(zeros, data_out, state,
+                         static_cast<int>(kMaxFrameLength));
+  EXPECT_EQ(0, state[0]);
+  EXPECT_EQ(0, state[1]);
+  for (int16_t i = 0; i < kMaxFrameLength / 2; ++i) {
+    EXPECT_EQ(0, data_out[i]);
+  }
+  // Make a simple non-zero data test.
+  WebRtcVad_Downsampling(data_in, data_out, state,
+                         static_cast<int>(kMaxFrameLength));
+  EXPECT_EQ(207, state[0]);
+  EXPECT_EQ(2270, state[1]);
+
+  ASSERT_EQ(0, WebRtcVad_InitCore(self));
+  // TODO(bjornv): Replace this part of the test with taking values from an
+  // array and calculate the reference value here. Make sure the values are not
+  // ordered.
+  for (int16_t i = 0; i < 16; ++i) {
+    int16_t value = 500 * (i + 1);
+    for (int j = 0; j < kNumChannels; ++j) {
+      // Use values both above and below initialized value.
+      EXPECT_EQ(kReferenceMin[i], WebRtcVad_FindMinimum(self, value, j));
+      EXPECT_EQ(kReferenceMin[i + 16], WebRtcVad_FindMinimum(self, 12000, j));
+    }
+    self->frame_counter++;
+  }
+
+  free(self);
+}
+}  // namespace
diff --git a/src/common_audio/vad/vad_unittest.cc b/src/common_audio/vad/vad_unittest.cc
index 54a397a..b31217c 100644
--- a/src/common_audio/vad/vad_unittest.cc
+++ b/src/common_audio/vad/vad_unittest.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,36 +8,24 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include <stddef.h>  // size_t
+#include "vad_unittest.h"
+
 #include <stdlib.h>
 
 #include "gtest/gtest.h"
+
+#include "common_audio/signal_processing/include/signal_processing_library.h"
+#include "common_audio/vad/include/webrtc_vad.h"
 #include "typedefs.h"
-#include "webrtc_vad.h"
 
-// TODO(bjornv): Move the internal unit tests to separate files.
-extern "C" {
-#include "vad_core.h"
-#include "vad_gmm.h"
-#include "vad_sp.h"
-}
+VadTest::VadTest() {}
 
-namespace webrtc {
-namespace {
-const int16_t kModes[] = { 0, 1, 2, 3 };
-const size_t kModesSize = sizeof(kModes) / sizeof(*kModes);
+void VadTest::SetUp() {}
 
-// Rates we support.
-const int16_t kRates[] = { 8000, 12000, 16000, 24000, 32000 };
-const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
-// Frame lengths we support.
-const int16_t kMaxFrameLength = 960;
-const int16_t kFrameLengths[] = { 80, 120, 160, 240, 320, 480, 640,
-    kMaxFrameLength };
-const size_t kFrameLengthsSize = sizeof(kFrameLengths) / sizeof(*kFrameLengths);
+void VadTest::TearDown() {}
 
 // Returns true if the rate and frame length combination is valid.
-bool ValidRatesAndFrameLengths(int16_t rate, int16_t frame_length) {
+bool VadTest::ValidRatesAndFrameLengths(int rate, int frame_length) {
   if (rate == 8000) {
     if (frame_length == 80 || frame_length == 160 || frame_length == 240) {
       return true;
@@ -59,21 +47,7 @@
   return false;
 }
 
-class VadTest : public ::testing::Test {
- protected:
-  VadTest();
-  virtual void SetUp();
-  virtual void TearDown();
-};
-
-VadTest::VadTest() {
-}
-
-void VadTest::SetUp() {
-}
-
-void VadTest::TearDown() {
-}
+namespace {
 
 TEST_F(VadTest, ApiTest) {
   // This API test runs through the APIs for all possible valid and invalid
@@ -89,32 +63,13 @@
     speech[i] = (i * i);
   }
 
-  // WebRtcVad_get_version() tests
-  char version[32];
-  EXPECT_EQ(-1, WebRtcVad_get_version(NULL, sizeof(version)));
-  EXPECT_EQ(-1, WebRtcVad_get_version(version, 1));
-  EXPECT_EQ(0, WebRtcVad_get_version(version, sizeof(version)));
-
-  // Null instance tests
+  // NULL instance tests
   EXPECT_EQ(-1, WebRtcVad_Create(NULL));
   EXPECT_EQ(-1, WebRtcVad_Init(NULL));
-  EXPECT_EQ(-1, WebRtcVad_Assign(NULL, NULL));
   EXPECT_EQ(-1, WebRtcVad_Free(NULL));
   EXPECT_EQ(-1, WebRtcVad_set_mode(NULL, kModes[0]));
   EXPECT_EQ(-1, WebRtcVad_Process(NULL, kRates[0], speech, kFrameLengths[0]));
 
-  // WebRtcVad_AssignSize tests
-  int handle_size_bytes = 0;
-  EXPECT_EQ(0, WebRtcVad_AssignSize(&handle_size_bytes));
-  EXPECT_EQ(576, handle_size_bytes);
-
-  // WebRtcVad_Assign tests
-  void* tmp_handle = malloc(handle_size_bytes);
-  EXPECT_EQ(-1, WebRtcVad_Assign(&handle, NULL));
-  EXPECT_EQ(0, WebRtcVad_Assign(&handle, tmp_handle));
-  EXPECT_EQ(handle, tmp_handle);
-  free(tmp_handle);
-
   // WebRtcVad_Create()
   ASSERT_EQ(0, WebRtcVad_Create(&handle));
 
@@ -125,9 +80,14 @@
   // WebRtcVad_Init() test
   ASSERT_EQ(0, WebRtcVad_Init(handle));
 
-  // WebRtcVad_set_mode() invalid modes tests
-  EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[0] - 1));
-  EXPECT_EQ(-1, WebRtcVad_set_mode(handle, kModes[kModesSize - 1] + 1));
+  // WebRtcVad_set_mode() invalid modes tests. Tries smallest supported value
+  // minus one and largest supported value plus one.
+  EXPECT_EQ(-1, WebRtcVad_set_mode(handle,
+                                   WebRtcSpl_MinValueW32(kModes,
+                                                         kModesSize) - 1));
+  EXPECT_EQ(-1, WebRtcVad_set_mode(handle,
+                                   WebRtcSpl_MaxValueW32(kModes,
+                                                         kModesSize) + 1));
 
   // WebRtcVad_Process() tests
   // NULL speech pointer
@@ -160,75 +120,22 @@
   EXPECT_EQ(0, WebRtcVad_Free(handle));
 }
 
-TEST_F(VadTest, GMMTests) {
-  int16_t delta = 0;
-  // Input value at mean.
-  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(0, 0, 128, &delta));
-  EXPECT_EQ(0, delta);
-  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(16, 128, 128, &delta));
-  EXPECT_EQ(0, delta);
-  EXPECT_EQ(1048576, WebRtcVad_GaussianProbability(-16, -128, 128, &delta));
-  EXPECT_EQ(0, delta);
-
-  // Largest possible input to give non-zero probability.
-  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(59, 0, 128, &delta));
-  EXPECT_EQ(7552, delta);
-  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(75, 128, 128, &delta));
-  EXPECT_EQ(7552, delta);
-  EXPECT_EQ(1024, WebRtcVad_GaussianProbability(-75, -128, 128, &delta));
-  EXPECT_EQ(-7552, delta);
-
-  // Too large input, should give zero probability.
-  EXPECT_EQ(0, WebRtcVad_GaussianProbability(105, 0, 128, &delta));
-  EXPECT_EQ(13440, delta);
-}
-
-TEST_F(VadTest, SPTests) {
-  VadInstT* handle = (VadInstT*) malloc(sizeof(VadInstT));
-  int16_t zeros[kMaxFrameLength] = { 0 };
-  int32_t state[2] = { 0 };
-  int16_t data_in[kMaxFrameLength];
-  int16_t data_out[kMaxFrameLength];
-
-  const int16_t kReferenceMin[32] = {
-      1600, 720, 509, 512, 532, 552, 570, 588,
-      606, 624, 642, 659, 675, 691, 707, 723,
-      1600, 544, 502, 522, 542, 561, 579, 597,
-      615, 633, 651, 667, 683, 699, 715, 731
-  };
-
-  // Construct a speech signal that will trigger the VAD in all modes. It is
-  // known that (i * i) will wrap around, but that doesn't matter in this case.
-  for (int16_t i = 0; i < kMaxFrameLength; ++i) {
-    data_in[i] = (i * i);
-  }
-  // Input values all zeros, expect all zeros out.
-  WebRtcVad_Downsampling(zeros, data_out, state, (int) kMaxFrameLength);
-  EXPECT_EQ(0, state[0]);
-  EXPECT_EQ(0, state[1]);
-  for (int16_t i = 0; i < kMaxFrameLength / 2; ++i) {
-    EXPECT_EQ(0, data_out[i]);
-  }
-  // Make a simple non-zero data test.
-  WebRtcVad_Downsampling(data_in, data_out, state, (int) kMaxFrameLength);
-  EXPECT_EQ(207, state[0]);
-  EXPECT_EQ(2270, state[1]);
-
-  ASSERT_EQ(0, WebRtcVad_InitCore(handle, 0));
-  for (int16_t i = 0; i < 16; ++i) {
-    int16_t value = 500 * (i + 1);
-    for (int j = 0; j < NUM_CHANNELS; ++j) {
-      // Use values both above and below initialized value.
-      EXPECT_EQ(kReferenceMin[i], WebRtcVad_FindMinimum(handle, value, j));
-      EXPECT_EQ(kReferenceMin[i + 16], WebRtcVad_FindMinimum(handle, 12000, j));
+TEST_F(VadTest, ValidRatesFrameLengths) {
+  // This test verifies valid and invalid rate/frame_length combinations. We
+  // loop through sampling rates and frame lengths from negative values to
+  // values larger than possible.
+  for (int16_t rate = -1; rate <= kRates[kRatesSize - 1] + 1; rate++) {
+    for (int16_t frame_length = -1; frame_length <= kMaxFrameLength + 1;
+        frame_length++) {
+      if (ValidRatesAndFrameLengths(rate, frame_length)) {
+        EXPECT_EQ(0, WebRtcVad_ValidRateAndFrameLength(rate, frame_length));
+      } else {
+        EXPECT_EQ(-1, WebRtcVad_ValidRateAndFrameLength(rate, frame_length));
+      }
     }
-    handle->frame_counter++;
   }
-
-  free(handle);
 }
 
 // TODO(bjornv): Add a process test, run on file.
 
 }  // namespace
-}  // namespace webrtc
diff --git a/src/common_audio/vad/vad_unittest.h b/src/common_audio/vad/vad_unittest.h
new file mode 100644
index 0000000..3069801
--- /dev/null
+++ b/src/common_audio/vad/vad_unittest.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_AUDIO_VAD_VAD_UNITTEST_H
+#define WEBRTC_COMMON_AUDIO_VAD_VAD_UNITTEST_H
+
+#include <stddef.h>  // size_t
+
+#include "gtest/gtest.h"
+
+#include "typedefs.h"
+
+namespace {
+
+// Modes we support
+const int kModes[] = { 0, 1, 2, 3 };
+const size_t kModesSize = sizeof(kModes) / sizeof(*kModes);
+
+// Rates we support.
+const int kRates[] = { 8000, 12000, 16000, 24000, 32000 };
+const size_t kRatesSize = sizeof(kRates) / sizeof(*kRates);
+
+// Frame lengths we support.
+const int kMaxFrameLength = 960;
+const int kFrameLengths[] = { 80, 120, 160, 240, 320, 480, 640,
+    kMaxFrameLength };
+const size_t kFrameLengthsSize = sizeof(kFrameLengths) / sizeof(*kFrameLengths);
+
+}  // namespace
+
+class VadTest : public ::testing::Test {
+ protected:
+  VadTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  // Returns true if the rate and frame length combination is valid.
+  bool ValidRatesAndFrameLengths(int rate, int frame_length);
+};
+
+#endif  // WEBRTC_COMMON_AUDIO_VAD_VAD_UNITTEST_H
diff --git a/src/common_audio/vad/webrtc_vad.c b/src/common_audio/vad/webrtc_vad.c
index dcfbda1..ab2e492 100644
--- a/src/common_audio/vad/webrtc_vad.c
+++ b/src/common_audio/vad/webrtc_vad.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,190 +8,123 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-
-/*
- * This file includes the VAD API calls. For a specific function call description,
- * see webrtc_vad.h
- */
+#include "common_audio/vad/include/webrtc_vad.h"
 
 #include <stdlib.h>
 #include <string.h>
 
-#include "webrtc_vad.h"
-#include "vad_core.h"
+#include "common_audio/vad/vad_core.h"
+#include "typedefs.h"
 
 static const int kInitCheck = 42;
+static const int kValidRates[] = { 8000, 16000, 32000 };
+static const size_t kRatesSize = sizeof(kValidRates) / sizeof(*kValidRates);
+static const int kMaxFrameLengthMs = 30;
 
-WebRtc_Word16 WebRtcVad_get_version(char *version, size_t size_bytes)
-{
-    const char my_version[] = "VAD 1.2.0";
+int WebRtcVad_Create(VadInst** handle) {
+  VadInstT* self = NULL;
 
-    if (version == NULL)
-    {
-        return -1;
-    }
+  if (handle == NULL) {
+    return -1;
+  }
 
-    if (size_bytes < sizeof(my_version))
-    {
-        return -1;
-    }
+  *handle = NULL;
+  self = (VadInstT*) malloc(sizeof(VadInstT));
+  *handle = (VadInst*) self;
 
-    memcpy(version, my_version, sizeof(my_version));
-    return 0;
+  if (self == NULL) {
+    return -1;
+  }
+
+  self->init_flag = 0;
+
+  return 0;
 }
 
-WebRtc_Word16 WebRtcVad_AssignSize(int *size_in_bytes)
-{
-    *size_in_bytes = sizeof(VadInstT) * 2 / sizeof(WebRtc_Word16);
-    return 0;
+int WebRtcVad_Free(VadInst* handle) {
+  if (handle == NULL) {
+    return -1;
+  }
+
+  free(handle);
+
+  return 0;
 }
 
-WebRtc_Word16 WebRtcVad_Assign(VadInst **vad_inst, void *vad_inst_addr)
-{
-
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
-
-    if (vad_inst_addr != NULL)
-    {
-        *vad_inst = (VadInst*)vad_inst_addr;
-        return 0;
-    } else
-    {
-        return -1;
-    }
+// TODO(bjornv): Move WebRtcVad_InitCore() code here.
+int WebRtcVad_Init(VadInst* handle) {
+  // Initialize the core VAD component.
+  return WebRtcVad_InitCore((VadInstT*) handle);
 }
 
-WebRtc_Word16 WebRtcVad_Create(VadInst **vad_inst)
-{
+// TODO(bjornv): Move WebRtcVad_set_mode_core() code here.
+int WebRtcVad_set_mode(VadInst* handle, int mode) {
+  VadInstT* self = (VadInstT*) handle;
 
-    VadInstT *vad_ptr = NULL;
+  if (handle == NULL) {
+    return -1;
+  }
+  if (self->init_flag != kInitCheck) {
+    return -1;
+  }
 
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
-
-    *vad_inst = NULL;
-
-    vad_ptr = (VadInstT *)malloc(sizeof(VadInstT));
-    *vad_inst = (VadInst *)vad_ptr;
-
-    if (vad_ptr == NULL)
-    {
-        return -1;
-    }
-
-    vad_ptr->init_flag = 0;
-
-    return 0;
+  return WebRtcVad_set_mode_core(self, mode);
 }
 
-WebRtc_Word16 WebRtcVad_Free(VadInst *vad_inst)
-{
+int WebRtcVad_Process(VadInst* handle, int fs, int16_t* audio_frame,
+                      int frame_length) {
+  int vad = -1;
+  VadInstT* self = (VadInstT*) handle;
 
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
+  if (handle == NULL) {
+    return -1;
+  }
 
-    free(vad_inst);
-    return 0;
+  if (self->init_flag != kInitCheck) {
+    return -1;
+  }
+  if (audio_frame == NULL) {
+    return -1;
+  }
+  if (WebRtcVad_ValidRateAndFrameLength(fs, frame_length) != 0) {
+    return -1;
+  }
+
+  if (fs == 32000) {
+    vad = WebRtcVad_CalcVad32khz(self, audio_frame, frame_length);
+  } else if (fs == 16000) {
+    vad = WebRtcVad_CalcVad16khz(self, audio_frame, frame_length);
+  } else if (fs == 8000) {
+    vad = WebRtcVad_CalcVad8khz(self, audio_frame, frame_length);
+  }
+
+  if (vad > 0) {
+    vad = 1;
+  }
+  return vad;
 }
 
-WebRtc_Word16 WebRtcVad_Init(VadInst *vad_inst)
-{
-    short mode = 0; // Default high quality
+int WebRtcVad_ValidRateAndFrameLength(int rate, int frame_length) {
+  int return_value = -1;
+  size_t i;
+  int valid_length_ms;
+  int valid_length;
 
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
-
-    return WebRtcVad_InitCore((VadInstT*)vad_inst, mode);
-}
-
-WebRtc_Word16 WebRtcVad_set_mode(VadInst *vad_inst, WebRtc_Word16 mode)
-{
-    VadInstT* vad_ptr;
-
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
-
-    vad_ptr = (VadInstT*)vad_inst;
-    if (vad_ptr->init_flag != kInitCheck)
-    {
-        return -1;
-    }
-
-    return WebRtcVad_set_mode_core((VadInstT*)vad_inst, mode);
-}
-
-WebRtc_Word16 WebRtcVad_Process(VadInst *vad_inst,
-                                WebRtc_Word16 fs,
-                                WebRtc_Word16 *speech_frame,
-                                WebRtc_Word16 frame_length)
-{
-    WebRtc_Word16 vad;
-    VadInstT* vad_ptr;
-
-    if (vad_inst == NULL)
-    {
-        return -1;
-    }
-
-    vad_ptr = (VadInstT*)vad_inst;
-    if (vad_ptr->init_flag != kInitCheck)
-    {
-        return -1;
-    }
-
-    if (speech_frame == NULL)
-    {
-        return -1;
-    }
-
-    if (fs == 32000)
-    {
-        if ((frame_length != 320) && (frame_length != 640) && (frame_length != 960))
-        {
-            return -1;
+  // We only allow 10, 20 or 30 ms frames. Loop through valid frame rates and
+  // see if we have a matching pair.
+  for (i = 0; i < kRatesSize; i++) {
+    if (kValidRates[i] == rate) {
+      for (valid_length_ms = 10; valid_length_ms <= kMaxFrameLengthMs;
+          valid_length_ms += 10) {
+        valid_length = (kValidRates[i] / 1000 * valid_length_ms);
+        if (frame_length == valid_length) {
+          return_value = 0;
+          break;
         }
-        vad = WebRtcVad_CalcVad32khz((VadInstT*)vad_inst, speech_frame, frame_length);
-
-    } else if (fs == 16000)
-    {
-        if ((frame_length != 160) && (frame_length != 320) && (frame_length != 480))
-        {
-            return -1;
-        }
-        vad = WebRtcVad_CalcVad16khz((VadInstT*)vad_inst, speech_frame, frame_length);
-
-    } else if (fs == 8000)
-    {
-        if ((frame_length != 80) && (frame_length != 160) && (frame_length != 240))
-        {
-            return -1;
-        }
-        vad = WebRtcVad_CalcVad8khz((VadInstT*)vad_inst, speech_frame, frame_length);
-
-    } else
-    {
-        return -1; // Not a supported sampling frequency
+      }
+      break;
     }
+  }
 
-    if (vad > 0)
-    {
-        return 1;
-    } else if (vad == 0)
-    {
-        return 0;
-    } else
-    {
-        return -1;
-    }
+  return return_value;
 }
diff --git a/src/common_types.h b/src/common_types.h
index 02d712e..122c7ee 100644
--- a/src/common_types.h
+++ b/src/common_types.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -13,18 +13,26 @@
 
 #include "typedefs.h"
 
+#if defined(_MSC_VER)
+// Disable "new behavior: elements of array will be default initialized"
+// warning. Affects OverUseDetectorOptions.
+#pragma warning(disable:4351)
+#endif
+
 #ifdef WEBRTC_EXPORT
-    #define WEBRTC_DLLEXPORT _declspec(dllexport)
+#define WEBRTC_DLLEXPORT _declspec(dllexport)
 #elif WEBRTC_DLL
-    #define WEBRTC_DLLEXPORT _declspec(dllimport)
+#define WEBRTC_DLLEXPORT _declspec(dllimport)
 #else
-    #define WEBRTC_DLLEXPORT
+#define WEBRTC_DLLEXPORT
 #endif
 
 #ifndef NULL
-    #define NULL 0
+#define NULL 0
 #endif
 
+#define RTP_PAYLOAD_NAME_SIZE 32
+
 namespace webrtc {
 
 class InStream
@@ -147,32 +155,61 @@
     kEncryptionAndAuthentication     = 3
 };
 
+// Interface for encrypting and decrypting regular data and rtp/rtcp packets.
+// Implement this interface if you wish to provide an encryption scheme to
+// the voice or video engines.
 class Encryption
 {
 public:
+    // Encrypt the given data.
+    //
+    // Args:
+    //   channel: The channel to encrypt data for.
+    //   in_data: The data to encrypt. This data is bytes_in bytes long.
+    //   out_data: The buffer to write the encrypted data to. You may write more
+    //       bytes of encrypted data than what you got as input, up to a maximum
+    //       of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
+    //       webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
+    //   bytes_in: The number of bytes in the input buffer.
+    //   bytes_out: The number of bytes written in out_data.
     virtual void encrypt(
-        int channel_no,
+        int channel,
         unsigned char* in_data,
         unsigned char* out_data,
         int bytes_in,
         int* bytes_out) = 0;
 
+    // Decrypts the given data. This should reverse the effects of encrypt().
+    //
+    // Args:
+    //   channel_no: The channel to decrypt data for.
+    //   in_data: The data to decrypt. This data is bytes_in bytes long.
+    //   out_data: The buffer to write the decrypted data to. You may write more
+    //       bytes of decrypted data than what you got as input, up to a maximum
+    //       of webrtc::kViEMaxMtu if you are encrypting in the video engine, or
+    //       webrtc::kVoiceEngineMaxIpPacketSizeBytes for the voice engine.
+    //   bytes_in: The number of bytes in the input buffer.
+    //   bytes_out: The number of bytes written in out_data.
     virtual void decrypt(
-        int channel_no,
+        int channel,
         unsigned char* in_data,
         unsigned char* out_data,
         int bytes_in,
         int* bytes_out) = 0;
 
+    // Encrypts a RTCP packet. Otherwise, this method has the same contract as
+    // encrypt().
     virtual void encrypt_rtcp(
-        int channel_no,
+        int channel,
         unsigned char* in_data,
         unsigned char* out_data,
         int bytes_in,
         int* bytes_out) = 0;
 
+    // Decrypts a RTCP packet. Otherwise, this method has the same contract as
+    // decrypt().
     virtual void decrypt_rtcp(
-        int channel_no,
+        int channel,
         unsigned char* in_data,
         unsigned char* out_data,
         int bytes_in,
@@ -203,7 +240,7 @@
 struct CodecInst
 {
     int pltype;
-    char plname[32];
+    char plname[RTP_PAYLOAD_NAME_SIZE];
     int plfreq;
     int pacsize;
     int channels;
@@ -271,6 +308,8 @@
     int meanWaitingTimeMs;
     // median packet waiting time in the jitter buffer (ms)
     int medianWaitingTimeMs;
+    // min packet waiting time in the jitter buffer (ms)
+    int minWaitingTimeMs;
     // max packet waiting time in the jitter buffer (ms)
     int maxWaitingTimeMs;
 };
@@ -327,7 +366,7 @@
     // scaling takes place in the digital domain (e.g. for conference servers
     // and embedded devices)
     kAgcAdaptiveDigital,
-    // can be used on embedded devices where the the capture signal is level
+    // can be used on embedded devices where the capture signal level
     // is predictable
     kAgcFixedDigital
 };
@@ -447,19 +486,6 @@
 enum { kMaxSimulcastStreams = 4};
 enum { kMaxTemporalStreams = 4};
 
-// H.263 specific
-struct VideoCodecH263
-{
-    char quality;
-};
-
-// H.264 specific
-enum H264Packetization
-{
-    kH264SingleMode         = 0,
-    kH264NonInterleavedMode = 1
-};
-
 enum VideoCodecComplexity
 {
     kComplexityNormal = 0,
@@ -485,20 +511,6 @@
                      // within a frame.
 };
 
-struct VideoCodecH264
-{
-    H264Packetization          packetization;
-    VideoCodecComplexity       complexity;
-    VideoCodecProfile          profile;
-    char                       level;
-    char                       quality;
-
-    bool                       useFMO;
-
-    unsigned char              configParameters[kConfigParameterSize];
-    unsigned char              configParametersSize;
-};
-
 // VP8 specific
 struct VideoCodecVP8
 {
@@ -507,14 +519,10 @@
     VideoCodecComplexity complexity;
     VP8ResilienceMode    resilience;
     unsigned char        numberOfTemporalLayers;
-};
-
-// MPEG-4 specific
-struct VideoCodecMPEG4
-{
-    unsigned char   configParameters[kConfigParameterSize];
-    unsigned char   configParametersSize;
-    char            level;
+    bool                 denoisingOn;
+    bool                 errorConcealmentOn;
+    bool                 automaticResizeOn;
+    bool                 frameDroppingOn;
 };
 
 // Unknown specific
@@ -525,10 +533,7 @@
 // Video codec types
 enum VideoCodecType
 {
-    kVideoCodecH263,
-    kVideoCodecH264,
     kVideoCodecVP8,
-    kVideoCodecMPEG4,
     kVideoCodecI420,
     kVideoCodecRED,
     kVideoCodecULPFEC,
@@ -537,17 +542,13 @@
 
 union VideoCodecUnion
 {
-    VideoCodecH263      H263;
-    VideoCodecH264      H264;
     VideoCodecVP8       VP8;
-    VideoCodecMPEG4     MPEG4;
     VideoCodecGeneric   Generic;
 };
 
-/*
-*  Simulcast is when the same stream is encoded multiple times with different
-*  settings such as resolution.  
-*/
+
+// Simulcast is when the same stream is encoded multiple times with different
+// settings such as resolution.
 struct SimulcastStream
 {
     unsigned short      width;
@@ -578,5 +579,32 @@
     unsigned char       numberOfSimulcastStreams;
     SimulcastStream     simulcastStream[kMaxSimulcastStreams];
 };
+
+// Bandwidth over-use detector options.  These are used to drive
+// experimentation with bandwidth estimation parameters.
+// See modules/remote_bitrate_estimator/overuse_detector.h
+struct OverUseDetectorOptions {
+  OverUseDetectorOptions()
+      : initial_slope(8.0/512.0),
+        initial_offset(0),
+        initial_e(),
+        initial_process_noise(),
+        initial_avg_noise(0.0),
+        initial_var_noise(500),
+        initial_threshold(25.0) {
+    initial_e[0][0] = 100;
+    initial_e[1][1] = 1e-1;
+    initial_e[0][1] = initial_e[1][0] = 0;
+    initial_process_noise[0] = 1e-10;
+    initial_process_noise[1] = 1e-2;
+  }
+  double initial_slope;
+  double initial_offset;
+  double initial_e[2][2];
+  double initial_process_noise[2];
+  double initial_avg_noise;
+  double initial_var_noise;
+  double initial_threshold;
+};
 }  // namespace webrtc
 #endif  // WEBRTC_COMMON_TYPES_H
diff --git a/src/common_video/OWNERS b/src/common_video/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/src/common_video/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/src/common_video/common_video.gyp b/src/common_video/common_video.gyp
new file mode 100644
index 0000000..3adb56c
--- /dev/null
+++ b/src/common_video/common_video.gyp
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'libyuv/libyuv.gypi',
+    'jpeg/jpeg.gypi',
+  ],
+}
diff --git a/src/common_video/interface/video_image.h b/src/common_video/interface/video_image.h
new file mode 100644
index 0000000..8e48b44
--- /dev/null
+++ b/src/common_video/interface/video_image.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
+#define COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
+
+#include "typedefs.h"
+#include <stdlib.h>
+
+namespace webrtc
+{
+
+enum VideoFrameType
+{
+    kKeyFrame = 0,
+    kDeltaFrame = 1,
+    kGoldenFrame = 2,
+    kAltRefFrame = 3,
+    kSkipFrame = 4
+};
+
+class EncodedImage
+{
+public:
+    EncodedImage()
+        : _encodedWidth(0),
+          _encodedHeight(0),
+          _timeStamp(0),
+          capture_time_ms_(0),
+          _frameType(kDeltaFrame),
+          _buffer(NULL),
+          _length(0),
+          _size(0),
+          _completeFrame(false) {}
+
+    EncodedImage(WebRtc_UWord8* buffer,
+                 WebRtc_UWord32 length,
+                 WebRtc_UWord32 size)
+        : _encodedWidth(0),
+          _encodedHeight(0),
+          _timeStamp(0),
+          capture_time_ms_(0),
+          _frameType(kDeltaFrame),
+          _buffer(buffer),
+          _length(length),
+          _size(size),
+          _completeFrame(false) {}
+
+    WebRtc_UWord32              _encodedWidth;
+    WebRtc_UWord32              _encodedHeight;
+    WebRtc_UWord32              _timeStamp;
+    int64_t                     capture_time_ms_;
+    VideoFrameType              _frameType;
+    WebRtc_UWord8*              _buffer;
+    WebRtc_UWord32              _length;
+    WebRtc_UWord32              _size;
+    bool                        _completeFrame;
+};
+
+} // namespace webrtc
+
+#endif // COMMON_VIDEO_INTERFACE_VIDEO_IMAGE_H
diff --git a/src/common_video/jpeg/data_manager.cc b/src/common_video/jpeg/data_manager.cc
new file mode 100644
index 0000000..a5a7a48
--- /dev/null
+++ b/src/common_video/jpeg/data_manager.cc
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/jpeg/data_manager.h"
+
+namespace webrtc
+{
+
+typedef struct
+{
+    jpeg_source_mgr  mgr;
+    JOCTET* next_input_byte;
+    size_t bytes_in_buffer;      /* # of byte spaces remaining in buffer */
+} DataSrcMgr;
+
+void
+jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize)
+{
+    DataSrcMgr* src;
+    if (cinfo->src == NULL)
+    {  /* first time for this JPEG object? */
+        cinfo->src = (struct jpeg_source_mgr *)
+                   (*cinfo->mem->alloc_small) ((j_common_ptr) cinfo,
+                       JPOOL_PERMANENT, sizeof(DataSrcMgr));
+    }
+
+    // Setting required functionality
+    src = (DataSrcMgr*) cinfo->src;
+    src->mgr.init_source = initSrc;;
+    src->mgr.fill_input_buffer = fillInputBuffer;
+    src->mgr.skip_input_data = skipInputData;
+    src->mgr.resync_to_restart = jpeg_resync_to_restart; // use default
+    src->mgr.term_source = termSource;
+    // setting buffer/src
+    src->bytes_in_buffer = bufferSize;
+    src->next_input_byte = srcBuffer;
+
+}
+
+
+void
+initSrc(j_decompress_ptr cinfo)
+{
+    DataSrcMgr  *src = (DataSrcMgr*)cinfo->src;
+    src->mgr.next_input_byte = src->next_input_byte;
+    src->mgr.bytes_in_buffer = src->bytes_in_buffer;
+}
+
+boolean
+fillInputBuffer(j_decompress_ptr cinfo)
+{
+    return false;
+}
+
+
+void
+skipInputData(j_decompress_ptr cinfo, long num_bytes)
+{
+    DataSrcMgr* src = (DataSrcMgr*)cinfo->src;
+    if (num_bytes > 0)
+    {
+          if ((unsigned long)num_bytes > src->mgr.bytes_in_buffer)
+              src->mgr.bytes_in_buffer = 0;
+          else
+          {
+              src->mgr.next_input_byte += num_bytes;
+              src->mgr.bytes_in_buffer -= num_bytes;
+          }
+    }
+}
+
+
+void
+termSource (j_decompress_ptr cinfo)
+{
+  //
+}
+
+} // end of namespace webrtc
diff --git a/src/common_video/jpeg/data_manager.h b/src/common_video/jpeg/data_manager.h
new file mode 100644
index 0000000..61609ec
--- /dev/null
+++ b/src/common_video/jpeg/data_manager.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Jpeg source data manager
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
+#define WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER
+
+#include <stdio.h>
+extern "C" {
+#if defined(USE_SYSTEM_LIBJPEG)
+#include <jpeglib.h>
+#else
+#include "jpeglib.h"
+#endif
+}
+
+namespace webrtc
+{
+
+// Source manager:
+
+
+// a general function that will set these values
+void
+jpegSetSrcBuffer(j_decompress_ptr cinfo, JOCTET* srcBuffer, size_t bufferSize);
+
+
+// Initialize source.  This is called by jpeg_read_header() before any
+//  data is actually read.
+
+void
+initSrc(j_decompress_ptr cinfo);
+
+
+// Fill input buffer
+// This is called whenever bytes_in_buffer has reached zero and more
+//  data is wanted.
+
+boolean
+fillInputBuffer(j_decompress_ptr cinfo);
+
+// Skip input data
+// Skip num_bytes worth of data.
+
+void
+skipInputData(j_decompress_ptr cinfo, long num_bytes);
+
+
+
+
+// Terminate source
+void
+termSource (j_decompress_ptr cinfo);
+
+} // end of namespace webrtc
+
+
+#endif /* WEBRTC_COMMON_VIDEO_JPEG_DATA_MANAGER */
diff --git a/src/common_video/jpeg/include/jpeg.h b/src/common_video/jpeg/include/jpeg.h
new file mode 100644
index 0000000..10c0461
--- /dev/null
+++ b/src/common_video/jpeg/include/jpeg.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_JPEG
+#define WEBRTC_COMMON_VIDEO_JPEG
+
+#include "typedefs.h"
+#include "modules/interface/module_common_types.h"  // VideoFrame
+#include "common_video/interface/video_image.h"  // EncodedImage
+
+// jpeg forward declaration
+struct jpeg_compress_struct;
+struct jpeg_decompress_struct;
+
+namespace webrtc
+{
+
+class JpegEncoder
+{
+public:
+    JpegEncoder();
+    ~JpegEncoder();
+
+// SetFileName
+// Input:
+//  - fileName - Pointer to input vector (should be less than 256) to which the
+//               compressed  file will be written to
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 SetFileName(const char* fileName);
+
+// Encode an I420 image. The encoded image is saved to a file
+//
+// Input:
+//          - inputImage        : Image to be encoded
+//
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 Encode(const VideoFrame& inputImage);
+
+private:
+
+    jpeg_compress_struct*   _cinfo;
+    char                    _fileName[257];
+};
+
+class JpegDecoder
+{
+ public:
+    JpegDecoder();
+    ~JpegDecoder();
+
+// Decodes a JPEG-stream
+// Supports 1 image component. 3 interleaved image components,
+// YCbCr sub-sampling  4:4:4, 4:2:2, 4:2:0.
+//
+// Input:
+//    - inputImage        : encoded image to be decoded.
+//    - outputImage       : VideoFrame to store decoded output.
+//
+//    Output:
+//    - 0             : OK
+//    - (-1)          : Error
+    WebRtc_Word32 Decode(const EncodedImage& inputImage,
+                         VideoFrame& outputImage);
+ private:
+    jpeg_decompress_struct*    _cinfo;
+};
+
+
+}
+#endif /* WEBRTC_COMMON_VIDEO_JPEG  */
diff --git a/src/common_video/jpeg/jpeg.cc b/src/common_video/jpeg/jpeg.cc
new file mode 100644
index 0000000..93bc251
--- /dev/null
+++ b/src/common_video/jpeg/jpeg.cc
@@ -0,0 +1,366 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#if defined(WIN32)
+ #include <basetsd.h>
+#endif
+#include <setjmp.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "common_video/jpeg/include/jpeg.h"
+#include "common_video/jpeg/data_manager.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+extern "C" {
+#if defined(USE_SYSTEM_LIBJPEG)
+#include <jpeglib.h>
+#else
+#include "jpeglib.h"
+#endif
+}
+
+
+namespace webrtc
+{
+
+// Error handler
+struct myErrorMgr {
+
+    struct jpeg_error_mgr pub;
+    jmp_buf setjmp_buffer;
+};
+typedef struct myErrorMgr * myErrorPtr;
+
+METHODDEF(void)
+MyErrorExit (j_common_ptr cinfo)
+{
+    myErrorPtr myerr = (myErrorPtr) cinfo->err;
+
+    // Return control to the setjmp point
+    longjmp(myerr->setjmp_buffer, 1);
+}
+
+JpegEncoder::JpegEncoder()
+{
+   _cinfo = new jpeg_compress_struct;
+    strcpy(_fileName, "Snapshot.jpg");
+}
+
+JpegEncoder::~JpegEncoder()
+{
+    if (_cinfo != NULL)
+    {
+        delete _cinfo;
+        _cinfo = NULL;
+    }
+}
+
+
+WebRtc_Word32
+JpegEncoder::SetFileName(const char* fileName)
+{
+    if (!fileName)
+    {
+        return -1;
+    }
+
+    if (fileName)
+    {
+        strncpy(_fileName, fileName, 256);
+        _fileName[256] = 0;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+JpegEncoder::Encode(const VideoFrame& inputImage)
+{
+    if (inputImage.Buffer() == NULL || inputImage.Size() == 0)
+    {
+        return -1;
+    }
+    if (inputImage.Width() < 1 || inputImage.Height() < 1)
+    {
+        return -1;
+    }
+
+    FILE* outFile = NULL;
+
+    const WebRtc_UWord32 width = inputImage.Width();
+    const WebRtc_UWord32 height = inputImage.Height();
+
+    // Set error handler
+    myErrorMgr      jerr;
+    _cinfo->err = jpeg_std_error(&jerr.pub);
+    jerr.pub.error_exit = MyErrorExit;
+    // Establish the setjmp return context
+    if (setjmp(jerr.setjmp_buffer))
+    {
+        // If we get here, the JPEG code has signaled an error.
+        jpeg_destroy_compress(_cinfo);
+        if (outFile != NULL)
+        {
+            fclose(outFile);
+        }
+        return -1;
+    }
+
+    if ((outFile = fopen(_fileName, "wb")) == NULL)
+    {
+        return -2;
+    }
+    // Create a compression object
+    jpeg_create_compress(_cinfo);
+
+    // Setting destination file
+    jpeg_stdio_dest(_cinfo, outFile);
+
+    // Set parameters for compression
+    _cinfo->in_color_space = JCS_YCbCr;
+    jpeg_set_defaults(_cinfo);
+
+    _cinfo->image_width = width;
+    _cinfo->image_height = height;
+    _cinfo->input_components = 3;
+
+    _cinfo->comp_info[0].h_samp_factor = 2;   // Y
+    _cinfo->comp_info[0].v_samp_factor = 2;
+    _cinfo->comp_info[1].h_samp_factor = 1;   // U
+    _cinfo->comp_info[1].v_samp_factor = 1;
+    _cinfo->comp_info[2].h_samp_factor = 1;   // V
+    _cinfo->comp_info[2].v_samp_factor = 1;
+    _cinfo->raw_data_in = TRUE;
+
+    WebRtc_UWord32 height16 = (height + 15) & ~15;
+    WebRtc_UWord8* imgPtr = inputImage.Buffer();
+    WebRtc_UWord8* origImagePtr = NULL;
+    if (height16 != height)
+    {
+        // Copy image to an adequate size buffer
+        WebRtc_UWord32 requiredSize = CalcBufferSize(kI420, width, height16);
+        origImagePtr = new WebRtc_UWord8[requiredSize];
+        memset(origImagePtr, 0, requiredSize);
+        memcpy(origImagePtr, inputImage.Buffer(), inputImage.Length());
+        imgPtr = origImagePtr;
+    }
+
+    jpeg_start_compress(_cinfo, TRUE);
+
+    JSAMPROW y[16],u[8],v[8];
+    JSAMPARRAY data[3];
+
+    data[0] = y;
+    data[1] = u;
+    data[2] = v;
+
+    WebRtc_UWord32 i, j;
+
+    for (j = 0; j < height; j += 16)
+    {
+        for (i = 0; i < 16; i++)
+        {
+            y[i] = (JSAMPLE*)imgPtr + width * (i + j);
+
+            if (i % 2 == 0)
+            {
+                u[i / 2] = (JSAMPLE*) imgPtr + width * height +
+                            width / 2 * ((i + j) / 2);
+                v[i / 2] = (JSAMPLE*) imgPtr + width * height +
+                            width * height / 4 + width / 2 * ((i + j) / 2);
+            }
+        }
+        jpeg_write_raw_data(_cinfo, data, 16);
+    }
+
+    jpeg_finish_compress(_cinfo);
+    jpeg_destroy_compress(_cinfo);
+
+    fclose(outFile);
+
+    if (origImagePtr != NULL)
+    {
+        delete [] origImagePtr;
+    }
+
+    return 0;
+}
+
+JpegDecoder::JpegDecoder()
+{
+    _cinfo = new jpeg_decompress_struct;
+}
+
+JpegDecoder::~JpegDecoder()
+{
+    if (_cinfo != NULL)
+    {
+        delete _cinfo;
+        _cinfo = NULL;
+    }
+}
+
+WebRtc_Word32
+JpegDecoder::Decode(const EncodedImage& inputImage,
+                    VideoFrame& outputImage)
+{
+
+    WebRtc_UWord8* tmpBuffer = NULL;
+    // Set error handler
+    myErrorMgr    jerr;
+    _cinfo->err = jpeg_std_error(&jerr.pub);
+    jerr.pub.error_exit = MyErrorExit;
+
+    // Establish the setjmp return context
+    if (setjmp(jerr.setjmp_buffer))
+    {
+        if (_cinfo->is_decompressor)
+        {
+            jpeg_destroy_decompress(_cinfo);
+        }
+        if (tmpBuffer != NULL)
+        {
+            delete [] tmpBuffer;
+        }
+        return -1;
+    }
+
+    _cinfo->out_color_space = JCS_YCbCr;
+
+    // Create decompression object
+    jpeg_create_decompress(_cinfo);
+
+    // Specify data source
+    jpegSetSrcBuffer(_cinfo, (JOCTET*) inputImage._buffer, inputImage._size);
+
+    // Read header data
+    jpeg_read_header(_cinfo, TRUE);
+
+    _cinfo->raw_data_out = TRUE;
+    jpeg_start_decompress(_cinfo);
+
+    // Check header
+    if (_cinfo->num_components == 4)
+    {
+        return -2; // not supported
+    }
+    if (_cinfo->progressive_mode == 1)
+    {
+        return -2; // not supported
+    }
+
+
+    WebRtc_UWord32 height = _cinfo->image_height;
+    WebRtc_UWord32 width = _cinfo->image_width;
+
+    // Making sure width and height are even
+    if (height % 2)
+    {
+        height++;
+    }
+    if (width % 2)
+    {
+         width++;
+    }
+
+    WebRtc_UWord32 height16 = (height + 15) & ~15;
+    WebRtc_UWord32 stride = (width + 15) & ~15;
+    WebRtc_UWord32 uvStride = ((((stride + 1) >> 1) + 15) & ~15);
+
+    WebRtc_UWord32 tmpRequiredSize =  stride * height16 +
+                                      2 * (uvStride * ((height16 + 1) >> 1));
+    WebRtc_UWord32 requiredSize = width * height * 3 >> 1;
+
+    // Verify sufficient buffer size.
+    outputImage.VerifyAndAllocate(requiredSize);
+    WebRtc_UWord8* outPtr = outputImage.Buffer();
+
+    if (tmpRequiredSize > requiredSize)
+    {
+        tmpBuffer = new WebRtc_UWord8[(int) (tmpRequiredSize)];
+        outPtr = tmpBuffer;
+    }
+
+    JSAMPROW y[16],u[8],v[8];
+    JSAMPARRAY data[3];
+    data[0] = y;
+    data[1] = u;
+    data[2] = v;
+
+    WebRtc_UWord32 hInd, i;
+    WebRtc_UWord32 numScanLines = 16;
+    WebRtc_UWord32 numLinesProcessed = 0;
+
+    while (_cinfo->output_scanline < _cinfo->output_height)
+    {
+        hInd = _cinfo->output_scanline;
+        for (i = 0; i < numScanLines; i++)
+        {
+            y[i] = outPtr + stride * (i + hInd);
+
+            if (i % 2 == 0)
+            {
+                 u[i / 2] = outPtr + stride * height16 +
+                            stride / 2 * ((i + hInd) / 2);
+                 v[i / 2] = outPtr + stride * height16 +
+                            stride * height16 / 4 +
+                            stride / 2 * ((i + hInd) / 2);
+            }
+        }
+        // Processes exactly one iMCU row per call
+        numLinesProcessed = jpeg_read_raw_data(_cinfo, data, numScanLines);
+        // Error in read
+        if (numLinesProcessed == 0)
+        {
+            jpeg_abort((j_common_ptr)_cinfo);
+            return -1;
+        }
+    }
+
+    if (tmpRequiredSize > requiredSize)
+    {
+         WebRtc_UWord8* dstFramePtr = outputImage.Buffer();
+         WebRtc_UWord8* tmpPtr = outPtr;
+
+         for (WebRtc_UWord32 p = 0; p < 3; p++)
+         {
+             const WebRtc_UWord32 h = (p == 0) ? height : height >> 1;
+             const WebRtc_UWord32 h16 = (p == 0) ? height16 : height16 >> 1;
+             const WebRtc_UWord32 w = (p == 0) ? width : width >> 1;
+             const WebRtc_UWord32 s = (p == 0) ? stride : stride >> 1;
+
+             for (WebRtc_UWord32 i = 0; i < h; i++)
+             {
+                 memcpy(dstFramePtr, tmpPtr, w);
+                 dstFramePtr += w;
+                 tmpPtr += s;
+             }
+             tmpPtr += (h16 - h) * s;
+         }
+    }
+
+    if (tmpBuffer != NULL)
+    {
+        delete [] tmpBuffer;
+    }
+    // Setting output Image parameter
+    outputImage.SetWidth(width);
+    outputImage.SetHeight(height);
+    outputImage.SetLength(requiredSize);
+    outputImage.SetTimeStamp(inputImage._timeStamp);
+
+    jpeg_finish_decompress(_cinfo);
+    jpeg_destroy_decompress(_cinfo);
+    return 0;
+}
+
+
+}
diff --git a/src/common_video/jpeg/jpeg.gypi b/src/common_video/jpeg/jpeg.gypi
new file mode 100644
index 0000000..80f3f62
--- /dev/null
+++ b/src/common_video/jpeg/jpeg.gypi
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'variables': {
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+    'conditions': [
+      ['use_libjpeg_turbo==1', {
+        'libjpeg_include_dir%': [ '<(DEPTH)/third_party/libjpeg_turbo', ],
+      }, {
+        'libjpeg_include_dir%': [ '<(DEPTH)/third_party/libjpeg', ],
+      }],
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'webrtc_jpeg',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_libyuv',
+      ],
+      'include_dirs': [
+        'include',
+        '<(webrtc_root)',
+        '<(webrtc_root)/common_video/interface',
+        '<(webrtc_root)/modules/interface/',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+          '<(webrtc_root)/common_video/interface',
+        ],
+      },
+      'conditions': [
+        ['build_libjpeg==1', {
+          'dependencies': [
+            '<(libjpeg_gyp_path):libjpeg',
+          ],
+        }, {
+          # Need to add a directory normally exported by libjpeg.gyp.
+          'include_dirs': [ '<(libjpeg_include_dir)' ],
+        }],
+      ],
+      'sources': [
+        'include/jpeg.h',
+        'data_manager.cc',
+        'data_manager.h',
+        'jpeg.cc',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'jpeg_unittests',
+          'type': 'executable',
+          'dependencies': [
+             'webrtc_jpeg',
+             '<(DEPTH)/testing/gtest.gyp:gtest',
+             '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'jpeg_unittest.cc',
+          ],
+        },
+      ] # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/common_video/jpeg/jpeg_unittest.cc b/src/common_video/jpeg/jpeg_unittest.cc
new file mode 100644
index 0000000..ee5d0b0
--- /dev/null
+++ b/src/common_video/jpeg/jpeg_unittest.cc
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <string>
+
+#include "common_video/jpeg/include/jpeg.h"
+#include "common_video/interface/video_image.h"
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+const unsigned int kImageWidth = 640;
+const unsigned int kImageHeight = 480;
+
+class JpegTest: public testing::Test {
+ protected:
+  JpegTest()
+      : input_filename_(webrtc::test::ProjectRootPath() +
+                       "data/common_video/jpeg/webrtc_logo.jpg"),
+        decoded_filename_(webrtc::test::OutputPath() + "TestJpegDec.yuv"),
+        encoded_filename_(webrtc::test::OutputPath() + "TestJpegEnc.jpg"),
+        encoded_buffer_(NULL) {}
+  virtual ~JpegTest() {}
+
+  void SetUp() {
+    encoder_ = new JpegEncoder();
+    decoder_ = new JpegDecoder();
+  }
+
+  void TearDown() {
+    if (encoded_buffer_ != NULL) {
+      if (encoded_buffer_->_buffer != NULL) {
+        delete [] encoded_buffer_->_buffer;
+      }
+      delete encoded_buffer_;
+    }
+    delete encoder_;
+    delete decoder_;
+  }
+
+  // Reads an encoded image. Caller will have to deallocate the memory of this
+  // object and it's _buffer byte array.
+  EncodedImage* ReadEncodedImage(std::string input_filename) {
+    FILE* open_file = fopen(input_filename.c_str(), "rb");
+    assert(open_file != NULL);
+    size_t length = webrtc::test::GetFileSize(input_filename);
+    EncodedImage* encoded_buffer = new EncodedImage();
+    encoded_buffer->_buffer = new WebRtc_UWord8[length];
+    encoded_buffer->_size = length;
+    encoded_buffer->_length = length;
+    if (fread(encoded_buffer->_buffer, 1, length, open_file) != length) {
+      ADD_FAILURE() << "Error reading file:" << input_filename;
+    }
+    fclose(open_file);
+    return encoded_buffer;
+  }
+
+  std::string input_filename_;
+  std::string decoded_filename_;
+  std::string encoded_filename_;
+  EncodedImage* encoded_buffer_;
+  JpegEncoder* encoder_;
+  JpegDecoder* decoder_;
+};
+
+TEST_F(JpegTest, Decode) {
+  encoded_buffer_ = ReadEncodedImage(input_filename_);
+  VideoFrame image_buffer;
+  EXPECT_EQ(0, decoder_->Decode(*encoded_buffer_, image_buffer));
+  EXPECT_GT(image_buffer.Length(), 0u);
+  EXPECT_EQ(kImageWidth, image_buffer.Width());
+  EXPECT_EQ(kImageHeight, image_buffer.Height());
+  image_buffer.Free();
+}
+
+TEST_F(JpegTest, EncodeInvalidInputs) {
+  VideoFrame empty;
+  empty.SetWidth(164);
+  empty.SetHeight(164);
+  EXPECT_EQ(-1, encoder_->SetFileName(0));
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty.VerifyAndAllocate(0);
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty.VerifyAndAllocate(10);
+  empty.SetHeight(0);
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+
+  empty.SetHeight(164);
+  empty.SetWidth(0);
+  EXPECT_EQ(-1, encoder_->Encode(empty));
+  empty.Free();
+}
+
+TEST_F(JpegTest, Encode) {
+  // Decode our input image then encode it again to a new file:
+  encoded_buffer_ = ReadEncodedImage(input_filename_);
+  VideoFrame image_buffer;
+  EXPECT_EQ(0, decoder_->Decode(*encoded_buffer_, image_buffer));
+
+  EXPECT_EQ(0, encoder_->SetFileName(encoded_filename_.c_str()));
+  EXPECT_EQ(0, encoder_->Encode(image_buffer));
+
+  // Save decoded image to file.
+  FILE* save_file = fopen(decoded_filename_.c_str(), "wb");
+  if (fwrite(image_buffer.Buffer(), 1,
+             image_buffer.Length(), save_file) != image_buffer.Length()) {
+    return;
+  }
+  fclose(save_file);
+
+  image_buffer.Free();
+}
+
+}  // namespace webrtc
diff --git a/src/common_video/libyuv/include/scaler.h b/src/common_video/libyuv/include/scaler.h
new file mode 100644
index 0000000..40fa660
--- /dev/null
+++ b/src/common_video/libyuv/include/scaler.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface to the LibYuv scaling functionality
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
+#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Supported scaling types
+enum ScaleMethod {
+  kScalePoint,  // no interpolation
+  kScaleBilinear,
+  kScaleBox
+};
+
+// TODO (mikhal): Have set return the expected value of the dst_frame, such
+// that the user can allocate memory for Scale().
+class Scaler {
+ public:
+  Scaler();
+  ~Scaler();
+
+  // Set interpolation properties:
+  //
+  // Return value: 0 - OK
+  //              -1 - parameter error
+  int Set(int src_width, int src_height,
+          int dst_width, int dst_height,
+          VideoType src_video_type, VideoType dst_video_type,
+          ScaleMethod method);
+
+  // Scale frame
+  // Memory is allocated by user. If dst_frame is not of sufficient size,
+  // the frame will be reallocated to the appropriate size.
+  // Return value: 0 - OK,
+  //               -1 - parameter error
+  //               -2 - scaler not set
+  int Scale(const uint8_t* src_frame,
+            uint8_t*& dst_frame,
+            int& dst_size);
+
+ private:
+  // Determine if the VideoTypes are currently supported.
+  bool SupportedVideoType(VideoType src_video_type,
+                          VideoType dst_video_type);
+
+  ScaleMethod   method_;
+  int           src_width_;
+  int           src_height_;
+  int           dst_width_;
+  int           dst_height_;
+  bool          set_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_SCALER_H_
diff --git a/src/common_video/libyuv/include/webrtc_libyuv.h b/src/common_video/libyuv/include/webrtc_libyuv.h
new file mode 100644
index 0000000..87fbdb1
--- /dev/null
+++ b/src/common_video/libyuv/include/webrtc_libyuv.h
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * WebRTC's wrapper to libyuv.
+ */
+
+#ifndef WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
+#define WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
+
+#include "common_types.h"  // RawVideoTypes.
+#include "typedefs.h"
+
+namespace webrtc {
+
+// TODO(mikhal): 1. Sync libyuv and WebRtc meaning of stride.
+//               2. Reorder parameters for consistency.
+
+// Supported video types.
+enum VideoType {
+  kUnknown,
+  kI420,
+  kIYUV,
+  kRGB24,
+  kABGR,
+  kARGB,
+  kARGB4444,
+  kRGB565,
+  kARGB1555,
+  kYUY2,
+  kYV12,
+  kUYVY,
+  kMJPG,
+  kNV21,
+  kNV12,
+  kBGRA,
+};
+
+// Conversion between the RawVideoType and the LibYuv videoType.
+// TODO(wu): Consolidate types into one type throughout WebRtc.
+VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type);
+
+// Supported rotation
+// Direction of rotation - clockwise.
+enum VideoRotationMode {
+  kRotateNone = 0,
+  kRotate90 = 90,
+  kRotate180 = 180,
+  kRotate270 = 270,
+};
+
+// Calculate the required buffer size.
+// Input:
+//   - type - The type of the designated video frame.
+//   - width - frame width in pixels.
+//   - height - frame height in pixels.
+// Return value:  The required size in bytes to accommodate the specified
+//                video frame or -1 in case of an error .
+int CalcBufferSize(VideoType type, int width, int height);
+
+// Convert To I420
+// Input:
+//   - src_video_type   : Type of input video.
+//   - src_frame        : Pointer to a source frame.
+//   - crop_x/crop_y    : Starting positions for cropping (0 for no crop).
+//   - src/dst_width    : src/dst width in pixels.
+//   - src/dst_height   : src/dst height in pixels.
+//   - sample_size      : Required only for the parsing of MJPG (set to 0 else).
+//   - dst_stride       : Number of bytes in a row of the dst Y plane.
+//   - rotate           : Rotation mode of output image.
+// Output:
+//   - dst_frame        : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+
+int ConvertToI420(VideoType src_video_type,
+                  const uint8_t* src_frame,
+                  int crop_x, int crop_y,
+                  int src_width, int src_height,
+                  int sample_size,
+                  int dst_width, int dst_height, int dst_stride,
+                  VideoRotationMode rotation,
+                  uint8_t* dst_frame);
+
+// Convert From I420
+// Input:
+//   - src_frame        : Pointer to a source frame.
+//   - src_stride       : Number of bytes in a row of the src Y plane.
+//   - dst_video_type   : Type of output video.
+//   - dst_sample_size  : Required only for the parsing of MJPG.
+//   - width            : Width in pixels.
+//   - height           : Height in pixels.
+//   - dst_frame        : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame);
+// ConvertFrom YV12.
+// Interface - same as above.
+int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame);
+
+// The following list describes designated conversion functions which
+// are not covered by the previous general functions.
+// Input and output descriptions mostly match the above descriptions, and are
+// therefore omitted.
+int ConvertRGB24ToARGB(const uint8_t* src_frame,
+                       uint8_t* dst_frame,
+                       int width, int height,
+                       int dst_stride);
+int ConvertNV12ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height);
+
+// Mirror functions
+// The following 2 functions perform mirroring on a given image
+// (LeftRight/UpDown).
+// Input:
+//    - width       : Image width in pixels.
+//    - height      : Image height in pixels.
+//    - src_frame   : Pointer to a source frame.
+//    - dst_frame   : Pointer to a destination frame.
+// Return value: 0 if OK, < 0 otherwise.
+int MirrorI420LeftRight(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height);
+int MirrorI420UpDown(const uint8_t* src_frame,
+                     uint8_t* dst_frame,
+                     int width, int height);
+
+// Compute PSNR for an I420 frame (all planes).
+double I420PSNR(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height);
+// Compute SSIM for an I420 frame (all planes).
+double I420SSIM(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height);
+}
+
+#endif  // WEBRTC_COMMON_VIDEO_LIBYUV_INCLUDE_WEBRTC_LIBYUV_H_
diff --git a/src/common_video/libyuv/libyuv.gypi b/src/common_video/libyuv/libyuv.gypi
new file mode 100644
index 0000000..77aba19
--- /dev/null
+++ b/src/common_video/libyuv/libyuv.gypi
@@ -0,0 +1,52 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_libyuv',
+      'type': '<(library)',
+      'conditions': [
+        ['build_libyuv==1', {
+          'dependencies': [
+            '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv'
+          ],
+        }, {
+          # Need to add a directory normally exported by libyuv.gyp.
+          'include_dirs': [ '<(libyuv_dir)/include', ],
+        }],
+      ],
+      'sources': [
+        'include/webrtc_libyuv.h',
+        'include/scaler.h',
+        'webrtc_libyuv.cc',
+        'scaler.cc',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'libyuv_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'webrtc_libyuv',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'libyuv_unittest.cc',
+            'scaler_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/common_video/libyuv/libyuv_unittest.cc b/src/common_video/libyuv/libyuv_unittest.cc
new file mode 100644
index 0000000..3e58314
--- /dev/null
+++ b/src/common_video/libyuv/libyuv_unittest.cc
@@ -0,0 +1,304 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <string.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+int PrintFrame(const uint8_t* frame, int width, int height) {
+  if (frame == NULL)
+    return -1;
+  int k = 0;
+  for (int i = 0; i < height; i++) {
+    for (int j = 0; j < width; j++) {
+      printf("%d ", frame[k++]);
+    }
+    printf(" \n");
+  }
+  printf(" \n");
+  return 0;
+}
+
+int PrintFrame(const uint8_t* frame, int width,
+                int height, const char* str) {
+  if (frame == NULL)
+     return -1;
+  printf("%s %dx%d \n", str, width, height);
+
+  const uint8_t* frame_y = frame;
+  const uint8_t* frame_u = frame_y + width * height;
+  const uint8_t* frame_v = frame_u + width * height / 4;
+
+  int ret = 0;
+  ret += PrintFrame(frame_y, width, height);
+  ret += PrintFrame(frame_u, width / 2, height / 2);
+  ret += PrintFrame(frame_v, width / 2, height / 2);
+
+  return ret;
+}
+
+void CreateImage(int width, int height,
+                 uint8_t* frame, int offset,
+                 int height_factor, int width_factor) {
+  if (frame == NULL)
+    return;
+  for (int i = 0; i < height; i++) {
+    for (int j = 0; j < width; j++) {
+      *frame = static_cast<uint8_t>((i + offset) * height_factor
+                                     + j * width_factor);
+      frame++;
+    }
+  }
+}
+
+class TestLibYuv : public ::testing::Test {
+ protected:
+  TestLibYuv();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  FILE* source_file_;
+  const int width_;
+  const int height_;
+  const int frame_length_;
+};
+
+// TODO (mikhal): Use scoped_ptr when handling buffers.
+TestLibYuv::TestLibYuv()
+    : source_file_(NULL),
+      width_(352),
+      height_(288),
+      frame_length_(CalcBufferSize(kI420, 352, 288)) {
+}
+
+void TestLibYuv::SetUp() {
+  const std::string input_file_name = webrtc::test::ProjectRootPath() +
+                                      "resources/foreman_cif.yuv";
+  source_file_  = fopen(input_file_name.c_str(), "rb");
+  ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
+                                       input_file_name << "\n";
+}
+
+void TestLibYuv::TearDown() {
+  if (source_file_ != NULL) {
+    ASSERT_EQ(0, fclose(source_file_));
+  }
+  source_file_ = NULL;
+}
+
+TEST_F(TestLibYuv, ConvertSanityTest) {
+  // TODO(mikhal)
+}
+
+TEST_F(TestLibYuv, ConvertTest) {
+  // Reading YUV frame - testing on the first frame of the foreman sequence
+  int j = 0;
+  std::string output_file_name = webrtc::test::OutputPath() +
+                                 "LibYuvTest_conversion.yuv";
+  FILE*  output_file = fopen(output_file_name.c_str(), "wb");
+  ASSERT_TRUE(output_file != NULL);
+
+  double psnr = 0;
+
+  uint8_t* orig_buffer = new uint8_t[frame_length_];
+  EXPECT_GT(fread(orig_buffer, 1, frame_length_, source_file_), 0U);
+
+  // printf("\nConvert #%d I420 <-> RGB24\n", j);
+  uint8_t* res_rgb_buffer2  = new uint8_t[width_ * height_ * 3];
+  uint8_t* res_i420_buffer = new uint8_t[frame_length_];
+
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kRGB24, 0,
+                               width_, height_, res_rgb_buffer2));
+
+  EXPECT_EQ(0, ConvertToI420(kRGB24, res_rgb_buffer2, 0, 0, width_, height_,
+                             0, width_, height_, width_, kRotateNone,
+                             res_i420_buffer));
+
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) != static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  // Optimization Speed- quality trade-off => 45 dB only (platform dependant).
+  EXPECT_GT(ceil(psnr), 44);
+  j++;
+  delete [] res_rgb_buffer2;
+
+  // printf("\nConvert #%d I420 <-> UYVY\n", j);
+  uint8_t* out_uyvy_buffer = new uint8_t[width_ * height_ * 2];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kUYVY, 0, width_, height_, out_uyvy_buffer));
+  EXPECT_EQ(0, ConvertToI420(kUYVY, out_uyvy_buffer, 0, 0, width_, height_,
+            0, width_, height_, width_,kRotateNone, res_i420_buffer));
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) !=  static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+
+  j++;
+  delete [] out_uyvy_buffer;
+
+  // printf("\nConvert #%d I420 <-> I420 \n", j);
+  uint8_t* out_i420_buffer = new uint8_t[width_ * height_ * 3 / 2 ];
+  EXPECT_EQ(0, ConvertToI420(kI420, orig_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, out_i420_buffer));
+  EXPECT_EQ(0, ConvertFromI420(out_i420_buffer, width_, kI420, 0,
+                               width_, height_, res_i420_buffer));
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) != static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  j++;
+  delete [] out_i420_buffer;
+
+  // printf("\nConvert #%d I420 <-> YV12\n", j);
+  uint8_t* outYV120Buffer = new uint8_t[frame_length_];
+
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_, kYV12, 0,
+                               width_, height_, outYV120Buffer));
+  EXPECT_EQ(0, ConvertFromYV12(outYV120Buffer, width_,
+                               kI420, 0,
+                               width_, height_,
+                               res_i420_buffer));
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) !=  static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+  j++;
+  delete [] outYV120Buffer;
+
+  // printf("\nConvert #%d I420 <-> YUY2\n", j);
+  uint8_t* out_yuy2_buffer = new uint8_t[width_ * height_ * 2];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kYUY2, 0, width_, height_, out_yuy2_buffer));
+
+  EXPECT_EQ(0, ConvertToI420(kYUY2, out_yuy2_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, res_i420_buffer));
+
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) !=  static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  EXPECT_EQ(48.0, psnr);
+
+  // printf("\nConvert #%d I420 <-> RGB565\n", j);
+  uint8_t* out_rgb565_buffer = new uint8_t[width_ * height_ * 2];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kRGB565, 0, width_, height_, out_rgb565_buffer));
+
+  EXPECT_EQ(0, ConvertToI420(kRGB565, out_rgb565_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, res_i420_buffer));
+
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) !=  static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  // TODO(leozwang) Investigate the right psnr should be set for I420ToRGB565,
+  // Another example is I420ToRGB24, the psnr is 44
+  EXPECT_GT(ceil(psnr), 40);
+
+  // printf("\nConvert #%d I420 <-> ARGB8888\n", j);
+  uint8_t* out_argb8888_buffer = new uint8_t[width_ * height_ * 4];
+  EXPECT_EQ(0, ConvertFromI420(orig_buffer, width_,
+                               kARGB, 0, width_, height_, out_argb8888_buffer));
+
+  EXPECT_EQ(0, ConvertToI420(kARGB, out_argb8888_buffer, 0, 0, width_, height_,
+                             0, width_, height_, width_,
+                             kRotateNone, res_i420_buffer));
+
+  if (fwrite(res_i420_buffer, 1, frame_length_,
+             output_file) !=  static_cast<unsigned int>(frame_length_)) {
+    return;
+  }
+  psnr = I420PSNR(orig_buffer, res_i420_buffer, width_, height_);
+  // TODO(leozwang) Investigate the right psnr should be set for I420ToARGB8888,
+  EXPECT_GT(ceil(psnr), 42);
+
+  ASSERT_EQ(0, fclose(output_file));
+
+  delete [] out_argb8888_buffer;
+  delete [] out_rgb565_buffer;
+  delete [] out_yuy2_buffer;
+  delete [] res_i420_buffer;
+  delete [] orig_buffer;
+}
+
+// TODO(holmer): Disabled for now due to crashes on Linux 32 bit. The theory
+// is that it crashes due to the fact that the buffers are not 16 bit aligned.
+// See http://code.google.com/p/webrtc/issues/detail?id=335 for more info.
+TEST_F(TestLibYuv, DISABLED_MirrorTest) {
+  // TODO (mikhal): Add an automated test to confirm output.
+  std::string str;
+  int width = 16;
+  int height = 8;
+  int factor_y = 1;
+  int factor_u = 1;
+  int factor_v = 1;
+  int start_buffer_offset = 10;
+  int length = webrtc::CalcBufferSize(kI420, width, height);
+
+  uint8_t* test_frame = new uint8_t[length];
+  memset(test_frame, 255, length);
+
+  // Create input frame
+  uint8_t* in_frame = test_frame;
+  uint8_t* in_frame_cb = in_frame + width * height;
+  uint8_t* in_frame_cr = in_frame_cb + (width * height) / 4;
+  CreateImage(width, height, in_frame, 10, factor_y, 1);  // Y
+  CreateImage(width / 2, height / 2, in_frame_cb, 100, factor_u, 1);  // Cb
+  CreateImage(width / 2, height / 2, in_frame_cr, 200, factor_v, 1);  // Cr
+  EXPECT_EQ(0, PrintFrame(test_frame, width, height, "InputFrame"));
+
+  uint8_t* test_frame2 = new uint8_t[length + start_buffer_offset * 2];
+  memset(test_frame2, 255, length + start_buffer_offset * 2);
+  uint8_t* out_frame = test_frame2;
+
+  // LeftRight
+  std::cout << "Test Mirror function: LeftRight" << std::endl;
+  EXPECT_EQ(0, MirrorI420LeftRight(in_frame, out_frame, width, height));
+  EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
+  EXPECT_EQ(0, MirrorI420LeftRight(out_frame, test_frame, width, height));
+
+  EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
+
+  // UpDown
+  std::cout << "Test Mirror function: UpDown" << std::endl;
+  EXPECT_EQ(0, MirrorI420UpDown(in_frame, out_frame, width, height));
+  EXPECT_EQ(0, PrintFrame(test_frame2, width, height, "OutputFrame"));
+  EXPECT_EQ(0, MirrorI420UpDown(out_frame, test_frame, width, height));
+
+  EXPECT_EQ(0, memcmp(in_frame, test_frame, length));
+
+  // TODO(mikhal): Write to a file, and ask to look at the file.
+
+  std::cout << "Do the mirrored frames look correct?" << std::endl;
+  delete [] test_frame;
+  delete [] test_frame2;
+}
+
+}  // namespace
diff --git a/src/common_video/libyuv/scaler.cc b/src/common_video/libyuv/scaler.cc
new file mode 100644
index 0000000..b27c75c
--- /dev/null
+++ b/src/common_video/libyuv/scaler.cc
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/libyuv/include/scaler.h"
+
+#include "libyuv.h"
+
+namespace webrtc {
+
+Scaler::Scaler()
+    : method_(kScaleBox),
+      src_width_(0),
+      src_height_(0),
+      dst_width_(0),
+      dst_height_(0),
+      set_(false) {}
+
+Scaler::~Scaler() {}
+
+int Scaler::Set(int src_width, int src_height,
+                int dst_width, int dst_height,
+                VideoType src_video_type, VideoType dst_video_type,
+                ScaleMethod method) {
+  set_ = false;
+  if (src_width < 1 || src_height < 1 || dst_width < 1 || dst_height < 1)
+    return -1;
+
+  if (!SupportedVideoType(src_video_type, dst_video_type))
+    return -1;
+
+  src_width_ = src_width;
+  src_height_ = src_height;
+  dst_width_ = dst_width;
+  dst_height_ = dst_height;
+  method_ = method;
+  set_ = true;
+  return 0;
+}
+
+int Scaler::Scale(const uint8_t* src_frame,
+                  uint8_t*& dst_frame,
+                  int& dst_size) {
+  if (src_frame == NULL)
+    return -1;
+  if (!set_)
+    return -2;
+
+  // Making sure that destination frame is of sufficient size
+  int dst_half_width = (dst_width_ + 1) >> 1;
+  int dst_half_height = (dst_height_ + 1) >> 1;
+  int required_dst_size = dst_width_ * dst_height_ + 2 * (dst_half_width *
+      dst_half_height);
+  if (dst_frame && required_dst_size > dst_size) {
+    // allocated buffer is too small
+    delete [] dst_frame;
+    dst_frame = NULL;
+  }
+  if (dst_frame == NULL) {
+    // TODO(mikhal): align this buffer to 16 bytes.
+    dst_frame = new uint8_t[required_dst_size];
+    dst_size = required_dst_size;
+  }
+
+  int src_half_width = (src_width_ + 1) >> 1;
+  int src_half_height = (src_height_ + 1) >> 1;
+  // Converting to planes:
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_frame + src_width_ * src_height_;
+  const uint8_t* src_vplane = src_uplane + src_half_width * src_half_height;
+
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_frame + dst_width_ * dst_height_;
+  uint8_t* dst_vplane = dst_uplane + dst_half_width * dst_half_height;
+
+  return libyuv::I420Scale(src_yplane, src_width_,
+                           src_uplane, src_half_width,
+                           src_vplane, src_half_width,
+                           src_width_, src_height_,
+                           dst_yplane, dst_width_,
+                           dst_uplane, dst_half_width,
+                           dst_vplane, dst_half_width,
+                           dst_width_, dst_height_,
+                           libyuv::FilterMode(method_));
+}
+
+// TODO(mikhal): Add support for more types.
+bool Scaler::SupportedVideoType(VideoType src_video_type,
+                                VideoType dst_video_type) {
+  if (src_video_type != dst_video_type)
+    return false;
+
+  if ((src_video_type == kI420) || (src_video_type == kIYUV) ||
+      (src_video_type == kYV12))
+    return true;
+
+  return false;
+}
+
+}  // namespace webrtc
diff --git a/src/common_video/libyuv/scaler_unittest.cc b/src/common_video/libyuv/scaler_unittest.cc
new file mode 100644
index 0000000..f6ee15d
--- /dev/null
+++ b/src/common_video/libyuv/scaler_unittest.cc
@@ -0,0 +1,469 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <string.h>
+
+#include "common_video/libyuv/include/scaler.h"
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+class TestScaler : public ::testing::Test {
+ protected:
+  TestScaler();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  void ScaleSequence(ScaleMethod method,
+                     FILE* source_file, std::string out_name,
+                     int src_width, int src_height,
+                     int dst_width, int dst_height);
+
+  // TODO(mikhal): add a sequence reader to libyuv.
+
+  // Computes the sequence average PSNR between an input sequence in
+  // |input_file| and an output sequence with filename |out_name|. |width| and
+  // |height| are the frame sizes of both sequences.
+  double ComputeAvgSequencePSNR(FILE* input_file, std::string out_name,
+                                int width, int height);
+
+  Scaler test_scaler_;
+  FILE* source_file_;
+  uint8_t* test_buffer_;
+  const int width_;
+  const int height_;
+  const int frame_length_;
+};
+
+
+// TODO (mikhal): Use scoped_ptr when handling buffers.
+TestScaler::TestScaler()
+    : source_file_(NULL),
+      width_(352),
+      height_(288),
+      frame_length_(CalcBufferSize(kI420, 352, 288)) {
+}
+
+void TestScaler::SetUp() {
+  const std::string input_file_name =
+      webrtc::test::ResourcePath("foreman_cif", "yuv");
+  source_file_  = fopen(input_file_name.c_str(), "rb");
+  ASSERT_TRUE(source_file_ != NULL) << "Cannot read file: "<<
+                                       input_file_name << "\n";
+  test_buffer_ = new uint8_t[frame_length_];
+}
+
+void TestScaler::TearDown() {
+  if (source_file_ != NULL) {
+    ASSERT_EQ(0, fclose(source_file_));
+  }
+  source_file_ = NULL;
+  delete [] test_buffer_;
+}
+
+TEST_F(TestScaler, ScaleWithoutSettingValues) {
+  int size = 100;
+  EXPECT_EQ(-2, test_scaler_.Scale(test_buffer_, test_buffer_, size));
+}
+
+TEST_F(TestScaler, ScaleBadInitialValues) {
+  EXPECT_EQ(-1, test_scaler_.Set(0, 288, 352, 288, kI420, kI420, kScalePoint));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 0, 352, 288, kI420, kI420, kScaleBox));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 576, 352, 0, kI420, kI420,
+                                 kScaleBilinear));
+  EXPECT_EQ(-1, test_scaler_.Set(704, 576, 0, 288, kI420, kI420, kScalePoint));
+}
+
+TEST_F(TestScaler, ScaleSendingNullSourcePointer) {
+  int size = 0;
+  EXPECT_EQ(-1, test_scaler_.Scale(NULL, test_buffer_, size));
+}
+
+TEST_F(TestScaler, ScaleSendingBufferTooSmall) {
+  // Sending a buffer which is too small (should reallocate and update size)
+  EXPECT_EQ(0, test_scaler_.Set(352, 288, 144, 288, kI420, kI420, kScalePoint));
+  uint8_t* test_buffer2 = NULL;
+  int size = 0;
+  EXPECT_GT(fread(test_buffer_, 1, frame_length_, source_file_), 0U);
+  EXPECT_EQ(0, test_scaler_.Scale(test_buffer_, test_buffer2, size));
+  EXPECT_EQ(144 * 288 * 3 / 2, size);
+  delete [] test_buffer2;
+}
+
+//TODO (mikhal): Converge the test into one function that accepts the method.
+TEST_F(TestScaler, PointScaleTest) {
+  double avg_psnr;
+  FILE* source_file2;
+  ScaleMethod method = kScalePoint;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_PointScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  // Upsample back up and check PSNR.
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
+      "upfrom_176_144.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                176, 144,
+                352, 288);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 176, 144, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 27.9);
+  ASSERT_EQ(0, fclose(source_file2));
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+  // Dowsample to odd size frame and scale back up.
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_282_231.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                282, 231);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
+      "upfrom_282_231.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                282, 231,
+                352, 288);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 282, 231, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 27.8);
+  ASSERT_EQ(0, fclose(source_file2));
+  // Upsample to odd size frame and scale back down.
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_699_531.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                699, 531);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_PointScale_352_288_"
+      "downfrom_699_531.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                699, 531,
+                352, 288);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 699, 531, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 37.8);
+  ASSERT_EQ(0, fclose(source_file2));
+}
+
+TEST_F(TestScaler, BiLinearScaleTest) {
+  double avg_psnr;
+  FILE* source_file2;
+  ScaleMethod method = kScaleBilinear;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_BilinearScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  // Upsample back up and check PSNR.
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BilinearScale_352_288_"
+      "upfrom_176_144.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                176, 144,
+                352, 288);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 176, 144, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 27.5);
+  ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  ASSERT_EQ(0, fclose(source_file2));
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() +
+             "LibYuvTest_BilinearScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+  // Downsample to odd size frame and scale back up.
+  out_name = webrtc::test::OutputPath() +
+      "LibYuvTest_BilinearScale_282_231.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                282, 231);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BilinearScale_352_288_"
+      "upfrom_282_231.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                282, 231,
+                width_, height_);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 282, 231, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 29.7);
+  ASSERT_EQ(0, fclose(source_file2));
+  // Upsample to odd size frame and scale back down.
+  out_name = webrtc::test::OutputPath() +
+      "LibYuvTest_BilinearScale_699_531.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                699, 531);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BilinearScale_352_288_"
+      "downfrom_699_531.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                699, 531,
+                width_, height_);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 699, 531, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 31.4);
+  ASSERT_EQ(0, fclose(source_file2));
+}
+
+TEST_F(TestScaler, BoxScaleTest) {
+  double avg_psnr;
+  FILE* source_file2;
+  ScaleMethod method = kScaleBox;
+  std::string out_name = webrtc::test::OutputPath() +
+                         "LibYuvTest_BoxScale_176_144.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ / 2, height_ / 2);
+  // Upsample back up and check PSNR.
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_352_288_"
+      "upfrom_176_144.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                176, 144,
+                352, 288);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 176, 144, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 27.5);
+  ASSERT_EQ(0, fclose(source_file2));
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_320_240.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                320, 240);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_704_576.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                width_ * 2, height_ * 2);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_300_200.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                300, 200);
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_400_300.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                400, 300);
+  // Downsample to odd size frame and scale back up.
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_282_231.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                282, 231);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_352_288_"
+       "upfrom_282_231.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                282, 231,
+                width_, height_);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+      "original size: %f \n", width_, height_, 282, 231, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 29.7);
+  ASSERT_EQ(0, fclose(source_file2));
+  // Upsample to odd size frame and scale back down.
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_699_531.yuv";
+  ScaleSequence(method,
+                source_file_, out_name,
+                width_, height_,
+                699, 531);
+  source_file2 = fopen(out_name.c_str(), "rb");
+  out_name = webrtc::test::OutputPath() + "LibYuvTest_BoxScale_352_288_"
+       "downfrom_699_531.yuv";
+  ScaleSequence(method,
+                source_file2, out_name,
+                699, 531,
+                width_, height_);
+  avg_psnr = ComputeAvgSequencePSNR(source_file_, out_name, width_, height_);
+  printf("PSNR for scaling from: %d %d, down/up to: %d %d, and back to "
+       "original size: %f \n", width_, height_, 699, 531, avg_psnr);
+  // Average PSNR for lower bound in assert is ~0.1dB lower than the actual
+  // average PSNR under same conditions.
+  ASSERT_GT(avg_psnr, 31.4);
+  ASSERT_EQ(0, fclose(source_file2));
+}
+
+double TestScaler::ComputeAvgSequencePSNR(FILE* input_file,
+                                          std::string out_name,
+                                          int width, int height) {
+  FILE* output_file;
+  output_file = fopen(out_name.c_str(), "rb");
+  assert(output_file != NULL);
+  rewind(input_file);
+  rewind(output_file);
+
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  int required_size = height * width + 2 * (half_width * half_height);
+  uint8_t* input_buffer = new uint8_t[required_size];
+  uint8_t* output_buffer = new uint8_t[required_size];
+
+  int frame_count = 0;
+  double avg_psnr = 0;
+  while (feof(input_file) == 0) {
+    if ((size_t)required_size !=
+        fread(input_buffer, 1, required_size, input_file)) {
+      break;
+    }
+    if ((size_t)required_size !=
+        fread(output_buffer, 1, required_size, output_file)) {
+      break;
+    }
+    frame_count++;
+    double psnr = I420PSNR(input_buffer, output_buffer, width, height);
+    avg_psnr += psnr;
+  }
+  avg_psnr = avg_psnr / frame_count;
+  assert(0 == fclose(output_file));
+  delete [] input_buffer;
+  delete [] output_buffer;
+  return avg_psnr;
+}
+
+// TODO (mikhal): Move part to a separate scale test.
+void TestScaler::ScaleSequence(ScaleMethod method,
+                   FILE* source_file, std::string out_name,
+                   int src_width, int src_height,
+                   int dst_width, int dst_height) {
+  FILE* output_file;
+  EXPECT_EQ(0, test_scaler_.Set(src_width, src_height,
+                               dst_width, dst_height,
+                               kI420, kI420, method));
+
+  output_file = fopen(out_name.c_str(), "wb");
+  ASSERT_TRUE(output_file != NULL);
+
+  rewind(source_file);
+
+  int src_half_width = (src_width + 1) >> 1;
+  int src_half_height = (src_height + 1) >> 1;
+  int dst_half_width = (dst_width + 1) >> 1;
+  int dst_half_height = (dst_height + 1) >> 1;
+
+  int out_required_size = dst_width * dst_height + 2 * (dst_half_width *
+      dst_half_height);
+  int in_required_size = src_height * src_width + 2 * (src_half_width *
+      src_half_height);
+
+  uint8_t* input_buffer = new uint8_t[in_required_size];
+  uint8_t* output_buffer = new uint8_t[out_required_size];
+
+  int64_t start_clock, total_clock;
+  total_clock = 0;
+  int frame_count = 0;
+
+  // Running through entire sequence
+  while (feof(source_file) == 0) {
+      if ((size_t)in_required_size !=
+          fread(input_buffer, 1, in_required_size, source_file))
+        break;
+
+    start_clock = TickTime::MillisecondTimestamp();
+    EXPECT_EQ(0, test_scaler_.Scale(input_buffer, output_buffer,
+                                   out_required_size));
+    total_clock += TickTime::MillisecondTimestamp() - start_clock;
+    if (fwrite(output_buffer, 1, out_required_size,
+               output_file) !=  static_cast<unsigned int>(out_required_size)) {
+      return;
+    }
+    frame_count++;
+  }
+
+  if (frame_count) {
+    printf("Scaling[%d %d] => [%d %d]: ",
+           src_width, src_height, dst_width, dst_height);
+    printf("Average time per frame[ms]: %.2lf\n",
+             (static_cast<double>(total_clock) / frame_count));
+  }
+  ASSERT_EQ(0, fclose(output_file));
+  delete [] input_buffer;
+  delete [] output_buffer;
+}
+
+}  // namespace webrtc
diff --git a/src/common_video/libyuv/webrtc_libyuv.cc b/src/common_video/libyuv/webrtc_libyuv.cc
new file mode 100644
index 0000000..9c370f7
--- /dev/null
+++ b/src/common_video/libyuv/webrtc_libyuv.cc
@@ -0,0 +1,323 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+#include <assert.h>
+
+#include "libyuv.h"
+
+namespace webrtc {
+
+VideoType RawVideoTypeToCommonVideoVideoType(RawVideoType type) {
+  switch (type) {
+    case kVideoI420:
+      return kI420;
+    case kVideoIYUV:
+      return kIYUV;
+    case kVideoRGB24:
+      return kRGB24;
+    case kVideoARGB:
+      return kARGB;
+    case kVideoARGB4444:
+      return kARGB4444;
+    case kVideoRGB565:
+      return kRGB565;
+    case kVideoARGB1555:
+      return kARGB1555;
+    case kVideoYUY2:
+      return kYUY2;
+    case kVideoYV12:
+      return kYV12;
+    case kVideoUYVY:
+      return kUYVY;
+    case kVideoNV21:
+      return kNV21;
+    case kVideoNV12:
+      return kNV12;
+    case kVideoBGRA:
+      return kBGRA;
+    case kVideoMJPEG:
+      return kMJPG;
+    default:
+      assert(false);
+  }
+  return kUnknown;
+}
+
+int CalcBufferSize(VideoType type, int width, int height) {
+  int buffer_size = 0;
+  switch (type) {
+    case kI420:
+    case kNV12:
+    case kNV21:
+    case kIYUV:
+    case kYV12: {
+      int half_width = (width + 1) >> 1;
+      int half_height = (height + 1) >> 1;
+      buffer_size = width * height + half_width * half_height * 2;
+      break;
+    }
+    case kARGB4444:
+    case kRGB565:
+    case kARGB1555:
+    case kYUY2:
+    case kUYVY:
+      buffer_size = width * height * 2;
+      break;
+    case kRGB24:
+      buffer_size = width * height * 3;
+      break;
+    case kBGRA:
+    case kARGB:
+      buffer_size = width * height * 4;
+      break;
+    default:
+      assert(false);
+      return -1;
+  }
+  return buffer_size;
+}
+
+int ConvertNV12ToRGB565(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height) {
+  int abs_height = (height < 0) ? -height : height;
+  const uint8_t* yplane = src_frame;
+  const uint8_t* uvInterlaced = src_frame + (width * abs_height);
+
+  return libyuv::NV12ToRGB565(yplane, width,
+                              uvInterlaced, (width + 1) >> 1,
+                              dst_frame, width,
+                              width, height);
+}
+
+int ConvertRGB24ToARGB(const uint8_t* src_frame, uint8_t* dst_frame,
+                       int width, int height, int dst_stride) {
+  if (dst_stride == 0)
+    dst_stride = width;
+  return libyuv::RGB24ToARGB(src_frame, width,
+                             dst_frame, dst_stride,
+                             width, height);
+}
+
+libyuv::RotationMode ConvertRotationMode(VideoRotationMode rotation) {
+  switch(rotation) {
+    case kRotateNone:
+      return libyuv::kRotate0;
+    case kRotate90:
+      return libyuv::kRotate90;
+    case kRotate180:
+      return libyuv::kRotate180;
+    case kRotate270:
+      return libyuv::kRotate270;
+  }
+  assert(false);
+  return libyuv::kRotate0;
+}
+
+int ConvertVideoType(VideoType video_type) {
+  switch(video_type) {
+    case kUnknown:
+      return libyuv::FOURCC_ANY;
+    case  kI420:
+      return libyuv::FOURCC_I420;
+    case kIYUV:  // same as KYV12
+    case kYV12:
+      return libyuv::FOURCC_YV12;
+    case kRGB24:
+      return libyuv::FOURCC_24BG;
+    case kABGR:
+      return libyuv::FOURCC_ABGR;
+    case kRGB565:
+      return libyuv::FOURCC_RGBP;
+    case kYUY2:
+      return libyuv::FOURCC_YUY2;
+    case kUYVY:
+      return libyuv::FOURCC_UYVY;
+    case kMJPG:
+      return libyuv::FOURCC_MJPG;
+    case kNV21:
+      return libyuv::FOURCC_NV21;
+    case kNV12:
+      return libyuv::FOURCC_NV12;
+    case kARGB:
+      return libyuv::FOURCC_ARGB;
+    case kBGRA:
+      return libyuv::FOURCC_BGRA;
+    case kARGB4444:
+      return libyuv::FOURCC_R444;
+    case kARGB1555:
+      return libyuv::FOURCC_RGBO;
+  }
+  assert(false);
+  return libyuv::FOURCC_ANY;
+}
+
+int ConvertToI420(VideoType src_video_type,
+                  const uint8_t* src_frame,
+                  int crop_x, int crop_y,
+                  int src_width, int src_height,
+                  int sample_size,
+                  int dst_width, int dst_height, int dst_stride,
+                  VideoRotationMode rotation,
+                  uint8_t* dst_frame) {
+  // All sanity tests are conducted within LibYuv.
+  int abs_dst_height = (dst_height < 0) ? -dst_height : dst_height;
+  int half_dst_width = (dst_width + 1) >> 1;
+  int half_dst_height = (abs_dst_height + 1) >> 1;
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_yplane + dst_width * abs_dst_height;
+  uint8_t* dst_vplane = dst_uplane + half_dst_width * half_dst_height;
+  return libyuv::ConvertToI420(src_frame, sample_size,
+                               dst_yplane, dst_stride,
+                               dst_uplane, (dst_stride + 1) / 2,
+                               dst_vplane, (dst_stride + 1) / 2,
+                               crop_x, crop_y,
+                               src_width, src_height,
+                               dst_width, dst_height,
+                               ConvertRotationMode(rotation),
+                               ConvertVideoType(src_video_type));
+}
+
+int ConvertFromI420(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame) {
+  int abs_height = (height < 0) ? -height : height;
+  int half_width = (width + 1) >> 1;
+  int half_height = (abs_height + 1) >> 1;
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * abs_height;
+  const uint8_t* src_vplane = src_uplane + half_width * half_height;
+  return libyuv::ConvertFromI420(src_yplane, src_stride,
+                                 src_uplane, (src_stride + 1) / 2,
+                                 src_vplane, (src_stride + 1) / 2,
+                                 dst_frame, dst_sample_size,
+                                 width, height,
+                                 ConvertVideoType(dst_video_type));
+}
+
+int ConvertFromYV12(const uint8_t* src_frame, int src_stride,
+                    VideoType dst_video_type, int dst_sample_size,
+                    int width, int height,
+                    uint8_t* dst_frame) {
+  int half_src_stride = (src_stride + 1) >> 1;
+  int abs_height = (height < 0) ? -height : height;
+  int half_height = (abs_height + 1) >> 1;
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * abs_height;
+  const uint8_t* src_vplane = src_uplane + half_src_stride * half_height;
+  // YV12 = Y, V, U
+  return libyuv::ConvertFromI420(src_yplane, src_stride,
+                                 src_vplane, half_src_stride,
+                                 src_uplane, half_src_stride,
+                                 dst_frame, dst_sample_size,
+                                 width, height,
+                                 ConvertVideoType(dst_video_type));
+}
+
+int MirrorI420LeftRight(const uint8_t* src_frame,
+                        uint8_t* dst_frame,
+                        int width, int height) {
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_yplane + width * height;
+  const uint8_t* src_vplane = src_uplane + half_width * half_height;
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_yplane + width * height;
+  uint8_t* dst_vplane = dst_uplane + half_width * half_height;
+  return libyuv::I420Mirror(src_yplane, width,
+                            src_uplane, half_width,
+                            src_vplane, half_width,
+                            dst_yplane, width,
+                            dst_uplane, half_width,
+                            dst_vplane, half_width,
+                            width, height);
+}
+
+int MirrorI420UpDown(const uint8_t* src_frame, uint8_t* dst_frame,
+                     int width, int height) {
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  const uint8_t* src_yplane = src_frame;
+  const uint8_t* src_uplane = src_frame + width * height;
+  const uint8_t* src_vplane = src_uplane + half_width * half_height;
+  uint8_t* dst_yplane = dst_frame;
+  uint8_t* dst_uplane = dst_frame + width * height;
+  uint8_t* dst_vplane = dst_uplane + half_width * half_height;
+
+  // Inserting negative height flips the frame.
+  return libyuv::I420Copy(src_yplane, width,
+                          src_uplane, half_width,
+                          src_vplane, half_width,
+                          dst_yplane, width,
+                          dst_uplane, half_width,
+                          dst_vplane, half_width,
+                          width, -height);
+}
+
+// Compute PSNR for an I420 frame (all planes)
+double I420PSNR(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height) {
+  if (!ref_frame || !test_frame)
+    return -1;
+  else if (height < 0 || width < 0)
+    return -1;
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  const uint8_t* src_y_a = ref_frame;
+  const uint8_t* src_u_a = src_y_a + width * height;
+  const uint8_t* src_v_a = src_u_a + half_width * half_height;
+  const uint8_t* src_y_b = test_frame;
+  const uint8_t* src_u_b = src_y_b + width * height;
+  const uint8_t* src_v_b = src_u_b + half_width * half_height;
+  // In the following: stride is determined by width.
+  double psnr = libyuv::I420Psnr(src_y_a, width,
+                                 src_u_a, half_width,
+                                 src_v_a, half_width,
+                                 src_y_b, width,
+                                 src_u_b, half_width,
+                                 src_v_b, half_width,
+                                 width, height);
+  // LibYuv sets the max psnr value to 128, we restrict it to 48.
+  // In case of 0 mse in one frame, 128 can skew the results significantly.
+  return (psnr > 48.0) ? 48.0 : psnr;
+}
+// Compute SSIM for an I420 frame (all planes)
+double I420SSIM(const uint8_t* ref_frame,
+                const uint8_t* test_frame,
+                int width, int height) {
+  if (!ref_frame || !test_frame)
+     return -1;
+  else if (height < 0 || width < 0)
+     return -1;
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  const uint8_t* src_y_a = ref_frame;
+  const uint8_t* src_u_a = src_y_a + width * height;
+  const uint8_t* src_v_a = src_u_a + half_width * half_height;
+  const uint8_t* src_y_b = test_frame;
+  const uint8_t* src_u_b = src_y_b + width * height;
+  const uint8_t* src_v_b = src_u_b + half_width * half_height;
+  int stride_y = width;
+  int stride_uv = half_width;
+  return libyuv::I420Ssim(src_y_a, stride_y,
+                          src_u_a, stride_uv,
+                          src_v_a, stride_uv,
+                          src_y_b, stride_y,
+                          src_u_b, stride_uv,
+                          src_v_b, stride_uv,
+                          width, height);
+}
+
+}  // namespace webrtc
diff --git a/src/engine_configurations.h b/src/engine_configurations.h
new file mode 100644
index 0000000..a1ed4a9
--- /dev/null
+++ b/src/engine_configurations.h
@@ -0,0 +1,151 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_ENGINE_CONFIGURATIONS_H_
+#define WEBRTC_ENGINE_CONFIGURATIONS_H_
+
+// ============================================================================
+//                              Voice and Video
+// ============================================================================
+
+// Don't link in socket support in Chrome
+#ifdef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_EXTERNAL_TRANSPORT
+#endif
+
+// Optional to enable stand-alone
+// #define WEBRTC_EXTERNAL_TRANSPORT
+
+// ----------------------------------------------------------------------------
+//  [Voice] Codec settings
+// ----------------------------------------------------------------------------
+
+#ifdef WEBRTC_ARCH_ARM
+#define WEBRTC_CODEC_ISACFX     // fix-point iSAC implementation
+#else
+#define WEBRTC_CODEC_ISAC       // floating-point iSAC implementation (default)
+#endif
+#define WEBRTC_CODEC_AVT
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_CODEC_ILBC
+#define WEBRTC_CODEC_G722
+#define WEBRTC_CODEC_PCM16
+#define WEBRTC_CODEC_RED
+#endif
+
+// ----------------------------------------------------------------------------
+//  [Video] Codec settings
+// ----------------------------------------------------------------------------
+
+#define VIDEOCODEC_I420
+#define VIDEOCODEC_VP8
+
+// ============================================================================
+//                                 VoiceEngine
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Settings for VoiceEngine
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_AGC                 // Near-end AGC
+#define WEBRTC_VOICE_ENGINE_ECHO                // Near-end AEC
+#define WEBRTC_VOICE_ENGINE_NR                  // Near-end NS
+#define WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_VOICE_ENGINE_TYPING_DETECTION    // Typing detection
+#endif
+
+// ----------------------------------------------------------------------------
+//  VoiceEngine sub-APIs
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#define WEBRTC_VOICE_ENGINE_CODEC_API
+#define WEBRTC_VOICE_ENGINE_DTMF_API
+#define WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#define WEBRTC_VOICE_ENGINE_FILE_API
+#define WEBRTC_VOICE_ENGINE_HARDWARE_API
+#define WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#define WEBRTC_VOICE_ENGINE_NETWORK_API
+#define WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#define WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#define WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+#ifndef WEBRTC_CHROMIUM_BUILD
+#define WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#define WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#endif
+
+// ============================================================================
+//                                 VideoEngine
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Settings for special VideoEngine configurations
+// ----------------------------------------------------------------------------
+// ----------------------------------------------------------------------------
+//  VideoEngine sub-API:s
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VIDEO_ENGINE_CAPTURE_API
+#define WEBRTC_VIDEO_ENGINE_CODEC_API
+#define WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+#define WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+#define WEBRTC_VIDEO_ENGINE_NETWORK_API
+#define WEBRTC_VIDEO_ENGINE_RENDER_API
+#define WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+// #define WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+
+// Now handled by gyp:
+// WEBRTC_VIDEO_ENGINE_FILE_API
+
+// ============================================================================
+//                       Platform specific configurations
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  VideoEngine Windows
+// ----------------------------------------------------------------------------
+
+#if defined(_WIN32)
+// #define DIRECTDRAW_RENDERING
+#define DIRECT3D9_RENDERING  // Requires DirectX 9.
+#endif
+
+// ----------------------------------------------------------------------------
+//  VideoEngine MAC
+// ----------------------------------------------------------------------------
+
+#if defined(WEBRTC_MAC) && !defined(MAC_IPHONE)
+// #define CARBON_RENDERING
+#define COCOA_RENDERING
+#endif
+
+// ----------------------------------------------------------------------------
+//  VideoEngine Mobile iPhone
+// ----------------------------------------------------------------------------
+
+#if defined(MAC_IPHONE)
+#define EAGL_RENDERING
+#endif
+
+// ----------------------------------------------------------------------------
+//  Deprecated
+// ----------------------------------------------------------------------------
+
+// #define WEBRTC_CODEC_G729
+// #define WEBRTC_DTMF_DETECTION
+// #define WEBRTC_SRTP
+// #define WEBRTC_SRTP_ALLOW_ROC_ITERATION
+
+#endif  // WEBRTC_ENGINE_CONFIGURATIONS_H_
diff --git a/src/modules/audio_coding/codecs/OWNERS b/src/modules/audio_coding/codecs/OWNERS
new file mode 100644
index 0000000..e1e6256
--- /dev/null
+++ b/src/modules/audio_coding/codecs/OWNERS
@@ -0,0 +1,3 @@
+tina.legrand@webrtc.org
+turaj@webrtc.org
+jan.skoglund@webrtc.org
diff --git a/src/modules/audio_coding/codecs/cng/cng.gypi b/src/modules/audio_coding/codecs/cng/cng.gypi
new file mode 100644
index 0000000..ce83e29
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/cng.gypi
@@ -0,0 +1,57 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'CNG',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/webrtc_cng.h',
+        'webrtc_cng.c',
+        'cng_helpfuns.c',
+        'cng_helpfuns.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'cng_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'CNG',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'cng_unittest.cc',
+          ],
+        }, # CNG_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/cng/cng_helpfuns.c b/src/modules/audio_coding/codecs/cng/cng_helpfuns.c
new file mode 100644
index 0000000..2e9029f
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/cng_helpfuns.c
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "webrtc_cng.h"
+#include "signal_processing_library.h"
+#include "typedefs.h"
+#include "cng_helpfuns.h"
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+void WebRtcCng_K2a16( 
+    WebRtc_Word16 *k,           /* Q15.    */
+    int            useOrder,
+    WebRtc_Word16 *a            /* Q12.    */
+)
+{
+    WebRtc_Word16 any[WEBRTC_SPL_MAX_LPC_ORDER+1];
+    WebRtc_Word16 *aptr, *aptr2, *anyptr;
+    G_CONST WebRtc_Word16 *kptr;
+    int m, i;
+    
+    kptr = k;
+    *a   = 4096;  /* i.e., (Word16_MAX >> 3)+1 */
+     *any = *a;
+    a[1] = (*k+4) >> 3;
+    for( m=1; m<useOrder; m++ )
+    {
+        kptr++;
+        aptr = a;
+        aptr++;
+        aptr2 = &a[m];
+        anyptr = any;
+        anyptr++;
+
+        any[m+1] = (*kptr+4) >> 3;
+        for( i=0; i<m; i++ ) {
+            *anyptr++ = (*aptr++) + (WebRtc_Word16)( (( (WebRtc_Word32)(*aptr2--) * (WebRtc_Word32)*kptr )+16384) >> 15);
+        }
+
+        aptr   = a;
+        anyptr = any;
+        for( i=0; i<(m+2); i++ ){
+            *aptr++ = *anyptr++;
+        }
+    }
+}
+
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/src/modules/audio_coding/codecs/cng/cng_helpfuns.h b/src/modules/audio_coding/codecs/cng/cng_helpfuns.h
new file mode 100644
index 0000000..fd8d6dc
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/cng_helpfuns.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
+
+extern WebRtc_Word32 lpc_lagwinTbl_fixw32[WEBRTC_CNG_MAX_LPC_ORDER + 1];
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+void WebRtcCng_K2a16(WebRtc_Word16 *k, int useOrder, WebRtc_Word16 *a);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_SOURCE_CNG_HELPFUNS_H_
diff --git a/src/modules/audio_coding/codecs/cng/cng_unittest.cc b/src/modules/audio_coding/codecs/cng/cng_unittest.cc
new file mode 100644
index 0000000..6a4edc0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/cng_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "webrtc_cng.h"
+#include "gtest/gtest.h"
+
+TEST(CngTest, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h b/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h
new file mode 100644
index 0000000..d405e3a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/include/webrtc_cng.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
+
+#include "typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define WEBRTC_CNG_MAX_LPC_ORDER 12
+#define WEBRTC_CNG_MAX_OUTSIZE_ORDER 640
+
+/* Define Error codes */
+
+/* 6100 Encoder */
+#define CNG_ENCODER_MEMORY_ALLOCATION_FAILED    6110
+#define CNG_ENCODER_NOT_INITIATED               6120
+#define CNG_DISALLOWED_LPC_ORDER                6130
+#define CNG_DISALLOWED_FRAME_SIZE               6140
+#define CNG_DISALLOWED_SAMPLING_FREQUENCY       6150
+/* 6200 Decoder */
+#define CNG_DECODER_MEMORY_ALLOCATION_FAILED    6210
+#define CNG_DECODER_NOT_INITIATED               6220
+
+
+typedef struct WebRtcCngEncInst         CNG_enc_inst;
+typedef struct WebRtcCngDecInst         CNG_dec_inst;
+
+
+/****************************************************************************
+ * WebRtcCng_Version(...)
+ *
+ * These functions returns the version name (string must be at least
+ * 500 characters long)
+ *
+ * Output:
+ *    - version    : Pointer to character string
+ *
+ * Return value    :  0 - Ok
+ *                   -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Version(char *version);
+
+/****************************************************************************
+ * WebRtcCng_AssignSizeEnc/Dec(...)
+ *
+ * These functions get the size needed for storing the instance for encoder
+ * and decoder, respectively
+ *
+ * Input/Output:
+ *    - sizeinbytes     : Pointer to integer where the size is returned
+ *
+ * Return value         :  0
+ */
+
+WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes);
+WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes);
+
+
+/****************************************************************************
+ * WebRtcCng_AssignEnc/Dec(...)
+ *
+ * These functions Assignes memory for the instances.
+ *
+ * Input:
+ *    - CNG_inst_Addr :  Adress to where to assign memory
+ * Output:
+ *    - inst          :  Pointer to the instance that should be created
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr);
+WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr);
+
+
+/****************************************************************************
+ * WebRtcCng_CreateEnc/Dec(...)
+ *
+ * These functions create an instance to the specified structure
+ *
+ * Input:
+ *    - XXX_inst      : Pointer to created instance that should be created
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst);
+WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst);
+
+
+/****************************************************************************
+ * WebRtcCng_InitEnc/Dec(...)
+ *
+ * This function initializes a instance
+ *
+ * Input:
+ *    - cng_inst      : Instance that should be initialized
+ *
+ *    - fs            : 8000 for narrowband and 16000 for wideband
+ *    - interval      : generate SID data every interval ms
+ *    - quality       : Number of refl. coefs, maximum allowed is 12
+ *
+ * Output:
+ *    - cng_inst      : Initialized instance
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 interval,
+                                WebRtc_Word16 quality);
+WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_dec_inst);
+
+ 
+/****************************************************************************
+ * WebRtcCng_FreeEnc/Dec(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst);
+WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst);
+
+
+
+/****************************************************************************
+ * WebRtcCng_Encode(...)
+ *
+ * These functions analyzes background noise
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance
+ *    - speech        : Signal to be analyzed
+ *    - nrOfSamples   : Size of speech vector
+ *    - forceSID      : not zero to force SID frame and reset
+ *
+ * Output:
+ *    - bytesOut      : Nr of bytes to transmit, might be 0
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst,
+                               WebRtc_Word16 *speech,
+                               WebRtc_Word16 nrOfSamples,
+                               WebRtc_UWord8* SIDdata,
+                               WebRtc_Word16 *bytesOut,
+                               WebRtc_Word16 forceSID);
+
+
+/****************************************************************************
+ * WebRtcCng_UpdateSid(...)
+ *
+ * These functions updates the CN state, when a new SID packet arrives
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - SID           : SID packet, all headers removed
+ *    - length        : Length in bytes of SID packet
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
+                                  WebRtc_UWord8 *SID,
+                                  WebRtc_Word16 length);
+
+
+/****************************************************************************
+ * WebRtcCng_Generate(...)
+ *
+ * These functions generates CN data when needed
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - outData       : pointer to area to write CN data
+ *    - nrOfSamples   : How much data to generate
+ *    - new_period    : >0 if a new period of CNG, will reset history
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
+                                 WebRtc_Word16 * outData,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 new_period);
+
+
+/*****************************************************************************
+ * WebRtcCng_GetErrorCodeEnc/Dec(...)
+ *
+ * This functions can be used to check the error code of a CNG instance. When
+ * a function returns -1 a error code will be set for that instance. The 
+ * function below extract the code of the last error that occurred in the
+ * specified instance.
+ *
+ * Input:
+ *    - CNG_inst    : CNG enc/dec instance
+ *
+ * Return value     : Error code
+ */
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst);
+WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_INTERFACE_WEBRTC_CNG_H_
diff --git a/src/modules/audio_coding/codecs/cng/test/CNG.cc b/src/modules/audio_coding/codecs/cng/test/CNG.cc
new file mode 100644
index 0000000..e3cabbb
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/test/CNG.cc
@@ -0,0 +1,224 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * CNG.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdlib.h>
+#include <string.h>
+#include "stdafx.h"
+#include "webrtc_cng.h"
+#include "webrtc_vad.h"
+
+CNG_enc_inst *e_inst; 
+CNG_dec_inst *d_inst;
+
+VadInst *vinst;
+//#define ASSIGN
+
+short anaSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], genSpeech[WEBRTC_CNG_MAX_OUTSIZE_ORDER], state[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+unsigned char SIDpkt[114];
+
+int main(int argc, char* argv[])
+{
+    FILE * infile, *outfile, *statefile;
+    short res=0,errtype;
+    /*float time=0.0;*/
+    
+    WebRtcVad_Create(&vinst);
+    WebRtcVad_Init(vinst);
+    
+    short size;
+    int samps=0;
+
+    if (argc < 5){
+        printf("Usage:\n CNG.exe infile outfile samplingfreq(Hz) interval(ms) order\n\n");
+        return(0);
+    }
+
+    infile=fopen(argv[1],"rb");
+    if (infile==NULL){
+        printf("file %s does not exist\n",argv[1]);
+        return(0);
+    }
+    outfile=fopen(argv[2],"wb");
+    statefile=fopen("CNGVAD.d","wb");
+    if (outfile==NULL){
+        printf("file %s could not be created\n",argv[2]);
+        return(0);
+    }
+
+    unsigned int fs=16000;
+    short frameLen=fs/50;
+
+#ifndef ASSIGN
+    res=WebRtcCng_CreateEnc(&e_inst);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+    res=WebRtcCng_CreateDec(&d_inst);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+
+#else
+
+    // Test the Assign-functions
+    int Esize, Dsize;
+    void *Eaddr, *Daddr;
+
+    res=WebRtcCng_AssignSizeEnc(&Esize);
+    res=WebRtcCng_AssignSizeDec(&Dsize);
+    Eaddr=malloc(Esize);
+    Daddr=malloc(Dsize);
+
+    res=WebRtcCng_AssignEnc(&e_inst, Eaddr);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+
+    res=WebRtcCng_AssignDec(&d_inst, Daddr);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+
+#endif
+
+    res=WebRtcCng_InitEnc(e_inst,atoi(argv[3]),atoi(argv[4]),atoi(argv[5]));
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+
+    res=WebRtcCng_InitDec(d_inst);
+    if (res < 0) {
+        /* exit if returned with error */
+        errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+        fprintf(stderr,"\n\n Error in initialization: %d.\n\n", errtype);
+        exit(EXIT_FAILURE);
+    }
+
+    static bool firstSilent=true;
+
+    int numSamp=0;
+    int speech=0;
+    int silent=0;
+    long cnt=0;
+
+    while(fread(anaSpeech,2,frameLen,infile)==frameLen){
+
+        cnt++;
+        if (cnt==60){
+            cnt=60;
+        }
+        /*  time+=(float)frameLen/fs;
+        numSamp+=frameLen;
+        float temp[640];
+        for(unsigned int j=0;j<frameLen;j++)
+        temp[j]=(float)anaSpeech[j]; */
+
+        //        if(!WebRtcVad_Process(vinst, fs, anaSpeech, frameLen)){
+
+
+        if(1){ // Do CNG coding of entire file
+
+            //        if(!((anaSpeech[0]==0)&&(anaSpeech[1]==0)&&(anaSpeech[2]==0))){
+            if(firstSilent){
+                res = WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,1);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+
+                firstSilent=false;
+
+                res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,1);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+
+            }
+            else{
+                res=WebRtcCng_Encode(e_inst, anaSpeech, frameLen/2, SIDpkt,&size,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+                res=WebRtcCng_Encode(e_inst, &anaSpeech[frameLen/2], frameLen/2, SIDpkt,&size,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeEnc(e_inst);
+                    fprintf(stderr,"\n\n Error in encoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            }
+
+            if(size>0){
+                res=WebRtcCng_UpdateSid(d_inst,SIDpkt, size);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+                    fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            }
+            res=WebRtcCng_Generate(d_inst,genSpeech, frameLen,0);
+                if (res < 0) {
+                    /* exit if returned with error */
+                    errtype=WebRtcCng_GetErrorCodeDec(d_inst);
+                    fprintf(stderr,"\n\n Error in decoder: %d.\n\n", errtype);
+                    exit(EXIT_FAILURE);
+                }
+            memcpy(state,anaSpeech,2*frameLen);
+        }
+        else{
+            firstSilent=true;
+            memcpy(genSpeech,anaSpeech,2*frameLen);
+
+            memset(anaSpeech,0,frameLen*2);
+            memset(state,0,frameLen*2);
+
+        }
+        if (fwrite(genSpeech, 2, frameLen,
+                   outfile) != static_cast<size_t>(frameLen)) {
+          return -1;
+        }
+        if (fwrite(state, 2, frameLen,
+                   statefile) != static_cast<size_t>(frameLen)) {
+          return -1;
+        }
+    }
+    fclose(infile);
+    fclose(outfile);
+    fclose(statefile);
+    return 0;
+}
diff --git a/src/modules/audio_coding/codecs/cng/test/StdAfx.cc b/src/modules/audio_coding/codecs/cng/test/StdAfx.cc
new file mode 100644
index 0000000..995e510
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/test/StdAfx.cc
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.cpp : source file that includes just the standard includes
+//    CNG.pch will be the pre-compiled header
+//    stdafx.obj will contain the pre-compiled type information
+
+#include "stdafx.h"
+
+// TODO: reference any additional headers you need in STDAFX.H
+// and not in this file
diff --git a/src/modules/audio_coding/codecs/cng/test/StdAfx.h b/src/modules/audio_coding/codecs/cng/test/StdAfx.h
new file mode 100644
index 0000000..dd6c445
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/test/StdAfx.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+//  or project specific include files that are used frequently, but
+//      are changed infrequently
+//
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_CNG_MAIN_TEST_STDAFX_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif // _MSC_VER > 1000
+
+#define WIN32_LEAN_AND_MEAN        // Exclude rarely-used stuff from Windows headers
+
+#include <stdio.h>
+
+// TODO: reference additional headers your program requires here
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // !defined(AFX_STDAFX_H__DE2097A7_569B_42A0_A615_41BF352D6FFB__INCLUDED_)
diff --git a/src/modules/audio_coding/codecs/cng/webrtc_cng.c b/src/modules/audio_coding/codecs/cng/webrtc_cng.c
new file mode 100644
index 0000000..6d95f11
--- /dev/null
+++ b/src/modules/audio_coding/codecs/cng/webrtc_cng.c
@@ -0,0 +1,732 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <string.h>
+#include <stdlib.h>
+
+#include "webrtc_cng.h"
+#include "signal_processing_library.h"
+#include "cng_helpfuns.h"
+#include "stdio.h"
+
+
+typedef struct WebRtcCngDecInst_t_ {
+
+    WebRtc_UWord32 dec_seed;
+    WebRtc_Word32 dec_target_energy;
+    WebRtc_Word32 dec_used_energy;
+    WebRtc_Word16 dec_target_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_used_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_Efiltstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_EfiltstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 dec_order;
+    WebRtc_Word16 dec_target_scale_factor; /*Q29*/
+    WebRtc_Word16 dec_used_scale_factor;  /*Q29*/
+    WebRtc_Word16 target_scale_factor; /* Q13 */
+    WebRtc_Word16 errorcode;
+    WebRtc_Word16 initflag; 
+
+} WebRtcCngDecInst_t;
+
+
+typedef struct WebRtcCngEncInst_t_ {
+
+    WebRtc_Word16 enc_nrOfCoefs;
+    WebRtc_Word16 enc_sampfreq;
+    WebRtc_Word16 enc_interval;
+    WebRtc_Word16 enc_msSinceSID;
+    WebRtc_Word32 enc_Energy;
+    WebRtc_Word16 enc_reflCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word32 enc_corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 enc_filtstate[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 enc_filtstateLow[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_UWord32 enc_seed;    
+    WebRtc_Word16 errorcode;
+    WebRtc_Word16 initflag;
+
+} WebRtcCngEncInst_t;
+
+const WebRtc_Word32 WebRtcCng_kDbov[94]={
+    1081109975,  858756178,  682134279,  541838517,  430397633,  341876992,
+    271562548,  215709799,  171344384,  136103682,  108110997,   85875618,
+    68213428,   54183852,   43039763,   34187699,   27156255,   21570980,
+    17134438,   13610368,   10811100,    8587562,    6821343,    5418385,
+    4303976,    3418770,    2715625,    2157098,    1713444,    1361037,
+    1081110,     858756,     682134,     541839,     430398,     341877,
+    271563,     215710,     171344,     136104,     108111,      85876,
+    68213,      54184,      43040,      34188,      27156,      21571,
+    17134,      13610,      10811,       8588,       6821,       5418,
+    4304,       3419,       2716,       2157,       1713,       1361,
+    1081,        859,        682,        542,        430,        342,
+    272,        216,        171,        136,        108,         86, 
+    68,         54,         43,         34,         27,         22, 
+    17,         14,         11,          9,          7,          5, 
+    4,          3,          3,          2,          2,           1, 
+    1,          1,          1,          1
+};
+const WebRtc_Word16 WebRtcCng_kCorrWindow[WEBRTC_CNG_MAX_LPC_ORDER] = {
+    32702, 32636, 32570, 32505, 32439, 32374, 
+    32309, 32244, 32179, 32114, 32049, 31985
+}; 
+
+/****************************************************************************
+ * WebRtcCng_Version(...)
+ *
+ * These functions returns the version name (string must be at least
+ * 500 characters long)
+ *
+ * Output:
+ *      - version       : Pointer to character string
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_Version(char *version)
+{
+    strcpy((char*)version,(const char*)"1.2.0\n");
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_AssignSizeEnc/Dec(...)
+ *
+ * These functions get the size needed for storing the instance for encoder
+ * and decoder, respectively
+ *
+ * Input/Output:
+ *      - sizeinbytes   : Pointer to integer where the size is returned
+ *
+ * Return value         :  0
+ */
+
+WebRtc_Word16 WebRtcCng_AssignSizeEnc(int *sizeinbytes)
+{
+    *sizeinbytes=sizeof(WebRtcCngEncInst_t)*2/sizeof(WebRtc_Word16);
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_AssignSizeDec(int *sizeinbytes)
+{
+    *sizeinbytes=sizeof(WebRtcCngDecInst_t)*2/sizeof(WebRtc_Word16);
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_AssignEnc/Dec(...)
+ *
+ * These functions Assignes memory for the instances.
+ *
+ * Input:
+ *        - CNG_inst_Addr :  Adress to where to assign memory
+ * Output:
+ *        - inst          :  Pointer to the instance that should be created
+ *
+ * Return value           :  0 - Ok
+ *                          -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_AssignEnc(CNG_enc_inst **inst, void *CNG_inst_Addr)
+{
+    if (CNG_inst_Addr!=NULL) {
+        *inst = (CNG_enc_inst*)CNG_inst_Addr;
+        (*(WebRtcCngEncInst_t**) inst)->errorcode = 0;
+        (*(WebRtcCngEncInst_t**) inst)->initflag = 0;
+        return(0);
+    } else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcCng_AssignDec(CNG_dec_inst **inst, void *CNG_inst_Addr)
+{
+    if (CNG_inst_Addr!=NULL) {
+        *inst = (CNG_dec_inst*)CNG_inst_Addr;
+        (*(WebRtcCngDecInst_t**) inst)->errorcode = 0;
+        (*(WebRtcCngDecInst_t**) inst)->initflag = 0;
+        return(0);
+    } else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_CreateEnc/Dec(...)
+ *
+ * These functions create an instance to the specified structure
+ *
+ * Input:
+ *      - XXX_inst      : Pointer to created instance that should be created
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_CreateEnc(CNG_enc_inst **cng_inst)
+{
+    *cng_inst=(CNG_enc_inst*)malloc(sizeof(WebRtcCngEncInst_t));
+    if(*cng_inst!=NULL) {
+        (*(WebRtcCngEncInst_t**) cng_inst)->errorcode = 0;
+        (*(WebRtcCngEncInst_t**) cng_inst)->initflag = 0;
+        return(0);
+    }
+    else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcCng_CreateDec(CNG_dec_inst **cng_inst)
+{
+    *cng_inst=(CNG_dec_inst*)malloc(sizeof(WebRtcCngDecInst_t));
+    if(*cng_inst!=NULL) {
+        (*(WebRtcCngDecInst_t**) cng_inst)->errorcode = 0;
+        (*(WebRtcCngDecInst_t**) cng_inst)->initflag = 0;
+        return(0);
+    }
+    else {
+        /* The memory could not be allocated */
+        return(-1);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_InitEnc/Dec(...)
+ *
+ * This function initializes a instance
+ *
+ * Input:
+ *    - cng_inst      : Instance that should be initialized
+ *
+ *    - fs            : 8000 for narrowband and 16000 for wideband
+ *    - interval      : generate SID data every interval ms
+ *    - quality       : TBD
+ *
+ * Output:
+ *    - cng_inst      : Initialized instance
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_InitEnc(CNG_enc_inst *cng_inst,
+                                WebRtc_Word16 fs,
+                                WebRtc_Word16 interval,
+                                WebRtc_Word16 quality)
+{
+    int i;
+
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    memset(inst, 0, sizeof(WebRtcCngEncInst_t));
+
+     /* Check LPC order */
+
+    if (quality>WEBRTC_CNG_MAX_LPC_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_LPC_ORDER;
+        return (-1);
+    }
+
+    if (fs<=0) {
+        inst->errorcode = CNG_DISALLOWED_SAMPLING_FREQUENCY;
+        return (-1);
+    }
+
+    inst->enc_sampfreq=fs;
+    inst->enc_interval=interval;
+    inst->enc_nrOfCoefs=quality;
+    inst->enc_msSinceSID=0;
+    inst->enc_seed=7777; /*For debugging only*/
+    inst->enc_Energy=0;
+    for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
+        inst->enc_reflCoefs[i]=0;
+        inst->enc_corrVector[i]=0;
+    }
+    inst->initflag=1;
+
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_InitDec(CNG_dec_inst *cng_inst)
+{
+    int i;
+
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+
+    memset(inst, 0, sizeof(WebRtcCngDecInst_t));
+    inst->dec_seed=7777; /*For debugging only*/
+    inst->dec_order=5;
+    inst->dec_target_scale_factor=0;
+    inst->dec_used_scale_factor=0;
+    for(i=0;i<(WEBRTC_CNG_MAX_LPC_ORDER+1);i++){
+        inst->dec_filtstate[i]=0;
+        inst->dec_target_reflCoefs[i]=0;
+        inst->dec_used_reflCoefs[i]=0;
+    }
+    inst->dec_target_reflCoefs[0]=0;
+    inst->dec_used_reflCoefs[0]=0;
+    inst ->dec_used_energy=0;
+    inst->initflag=1;
+
+    return(0);
+}
+
+/****************************************************************************
+ * WebRtcCng_FreeEnc/Dec(...)
+ *
+ * These functions frees the dynamic memory of a specified instance
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+
+WebRtc_Word16 WebRtcCng_FreeEnc(CNG_enc_inst *cng_inst)
+{
+    free(cng_inst);
+    return(0);
+}
+
+WebRtc_Word16 WebRtcCng_FreeDec(CNG_dec_inst *cng_inst)
+{
+    free(cng_inst);
+    return(0);
+}
+
+
+
+/****************************************************************************
+ * WebRtcCng_Encode(...)
+ *
+ * These functions analyzes background noise
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance
+ *    - speech        : Signal (noise) to be analyzed
+ *    - nrOfSamples   : Size of speech vector
+ *    - bytesOut      : Nr of bytes to transmit, might be 0
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Encode(CNG_enc_inst *cng_inst, 
+                               WebRtc_Word16 *speech,
+                               WebRtc_Word16 nrOfSamples,
+                               WebRtc_UWord8* SIDdata,
+                               WebRtc_Word16* bytesOut,
+                               WebRtc_Word16 forceSID)
+{
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    WebRtc_Word16 arCoefs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word32 corrVector[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 hanningW[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 ReflBeta=19661; /*0.6 in q15*/
+    WebRtc_Word16 ReflBetaComp=13107; /*0.4 in q15*/ 
+    WebRtc_Word32 outEnergy;
+    int outShifts;
+    int i, stab;
+    int acorrScale;
+    int index;
+    WebRtc_Word16 ind,factor;
+    WebRtc_Word32 *bptr, blo, bhi;
+    WebRtc_Word16 negate;
+    const WebRtc_Word16 *aptr;
+
+    WebRtc_Word16 speechBuf[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+
+
+    /* check if encoder initiated */    
+    if (inst->initflag != 1) {
+        inst->errorcode = CNG_ENCODER_NOT_INITIATED;
+        return (-1);
+    }
+
+
+    /* check framesize */    
+    if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
+        return (-1);
+    }
+
+
+    for(i=0;i<nrOfSamples;i++){
+        speechBuf[i]=speech[i];
+    }
+
+    factor=nrOfSamples;
+
+    /* Calculate energy and a coefficients */
+    outEnergy =WebRtcSpl_Energy(speechBuf, nrOfSamples, &outShifts);
+    while(outShifts>0){
+        if(outShifts>5){ /*We can only do 5 shifts without destroying accuracy in division factor*/
+            outEnergy<<=(outShifts-5);
+            outShifts=5;
+        }
+        else{
+            factor/=2;
+            outShifts--;
+        }
+    }
+    outEnergy=WebRtcSpl_DivW32W16(outEnergy,factor);
+
+    if (outEnergy > 1){
+        /* Create Hanning Window */
+        WebRtcSpl_GetHanningWindow(hanningW, nrOfSamples/2);
+        for( i=0;i<(nrOfSamples/2);i++ )
+            hanningW[nrOfSamples-i-1]=hanningW[i];
+
+        WebRtcSpl_ElementwiseVectorMult(speechBuf, hanningW, speechBuf, nrOfSamples, 14);
+
+        WebRtcSpl_AutoCorrelation( speechBuf, nrOfSamples, inst->enc_nrOfCoefs, corrVector, &acorrScale );
+
+        if( *corrVector==0 )
+            *corrVector = WEBRTC_SPL_WORD16_MAX;
+
+        /* Adds the bandwidth expansion */
+        aptr = WebRtcCng_kCorrWindow;
+        bptr = corrVector;
+
+        // (zzz) lpc16_1 = 17+1+820+2+2 = 842 (ordo2=700) 
+        for( ind=0; ind<inst->enc_nrOfCoefs; ind++ )
+        {
+            // The below code multiplies the 16 b corrWindow values (Q15) with
+            // the 32 b corrvector (Q0) and shifts the result down 15 steps.
+                 
+            negate = *bptr<0;
+            if( negate )
+                *bptr = -*bptr;
+
+            blo = (WebRtc_Word32)*aptr * (*bptr & 0xffff);
+            bhi = ((blo >> 16) & 0xffff) + ((WebRtc_Word32)(*aptr++) * ((*bptr >> 16) & 0xffff));
+            blo = (blo & 0xffff) | ((bhi & 0xffff) << 16);
+
+            *bptr = (( (bhi>>16) & 0x7fff) << 17) | ((WebRtc_UWord32)blo >> 15);
+            if( negate )
+                *bptr = -*bptr;
+            bptr++;
+        }
+
+        // end of bandwidth expansion
+
+        stab=WebRtcSpl_LevinsonDurbin(corrVector, arCoefs, refCs, inst->enc_nrOfCoefs);
+        
+        if(!stab){
+            // disregard from this frame
+            *bytesOut=0;
+            return(0);
+        }
+
+    }
+    else {
+        for(i=0;i<inst->enc_nrOfCoefs; i++)
+            refCs[i]=0;
+    }
+
+    if(forceSID){
+        /*Read instantaneous values instead of averaged*/
+        for(i=0;i<inst->enc_nrOfCoefs;i++)
+            inst->enc_reflCoefs[i]=refCs[i];
+        inst->enc_Energy=outEnergy;
+    }
+    else{
+        /*Average history with new values*/
+        for(i=0;i<(inst->enc_nrOfCoefs);i++){
+            inst->enc_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->enc_reflCoefs[i],ReflBeta,15);
+            inst->enc_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(refCs[i],ReflBetaComp,15);
+        }
+        inst->enc_Energy=(outEnergy>>2)+(inst->enc_Energy>>1)+(inst->enc_Energy>>2);
+    }
+
+
+    if(inst->enc_Energy<1){
+        inst->enc_Energy=1;
+    }
+
+    if((inst->enc_msSinceSID>(inst->enc_interval-1))||forceSID){
+
+        /* Search for best dbov value */
+        index=0;
+        for(i=1;i<93;i++){
+            /* Always round downwards */
+            if((inst->enc_Energy-WebRtcCng_kDbov[i])>0){
+                index=i;
+                break;
+            }
+        }
+        if((i==93)&&(index==0))
+            index=94;
+        SIDdata[0]=index;
+
+
+        /* Quantize coefs with tweak for WebRtc implementation of RFC3389 */
+        if(inst->enc_nrOfCoefs==WEBRTC_CNG_MAX_LPC_ORDER){ 
+            for(i=0;i<inst->enc_nrOfCoefs;i++){
+                SIDdata[i+1]=((inst->enc_reflCoefs[i]+128)>>8); /* Q15 to Q7*/ /* +127 */
+            }
+        }else{
+            for(i=0;i<inst->enc_nrOfCoefs;i++){
+                SIDdata[i+1]=(127+((inst->enc_reflCoefs[i]+128)>>8)); /* Q15 to Q7*/ /* +127 */
+            }
+        }
+
+        inst->enc_msSinceSID=0;
+        *bytesOut=inst->enc_nrOfCoefs+1;
+
+        inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
+        return(inst->enc_nrOfCoefs+1);
+    }else{
+        inst->enc_msSinceSID+=(1000*nrOfSamples)/inst->enc_sampfreq;
+        *bytesOut=0;
+    return(0);
+    }
+}
+
+
+/****************************************************************************
+ * WebRtcCng_UpdateSid(...)
+ *
+ * These functions updates the CN state, when a new SID packet arrives
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - SID           : SID packet, all headers removed
+ *    - length        : Length in bytes of SID packet
+ *
+ * Return value       :  0 - Ok
+ *                      -1 - Error
+ */
+
+WebRtc_Word16 WebRtcCng_UpdateSid(CNG_dec_inst *cng_inst,
+                                  WebRtc_UWord8 *SID,
+                                  WebRtc_Word16 length)
+{
+
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+    WebRtc_Word16 refCs[WEBRTC_CNG_MAX_LPC_ORDER];
+    WebRtc_Word32 targetEnergy;
+    int i;
+
+    if (inst->initflag != 1) {
+        inst->errorcode = CNG_DECODER_NOT_INITIATED;
+        return (-1);
+    }
+
+    /*Throw away reflection coefficients of higher order than we can handle*/
+    if(length> (WEBRTC_CNG_MAX_LPC_ORDER+1))
+        length=WEBRTC_CNG_MAX_LPC_ORDER+1;
+
+    inst->dec_order=length-1;
+
+    if(SID[0]>93)
+        SID[0]=93;
+    targetEnergy=WebRtcCng_kDbov[SID[0]];
+    /* Take down target energy to 75% */
+    targetEnergy=targetEnergy>>1;
+    targetEnergy+=targetEnergy>>2;
+
+    inst->dec_target_energy=targetEnergy;
+
+    /* Reconstruct coeffs with tweak for WebRtc implementation of RFC3389 */
+    if(inst->dec_order==WEBRTC_CNG_MAX_LPC_ORDER){ 
+        for(i=0;i<(inst->dec_order);i++){
+            refCs[i]=SID[i+1]<<8; /* Q7 to Q15*/
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+    }else{
+        for(i=0;i<(inst->dec_order);i++){
+            refCs[i]=(SID[i+1]-127)<<8; /* Q7 to Q15*/
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+    }
+    
+    for(i=(inst->dec_order);i<WEBRTC_CNG_MAX_LPC_ORDER;i++){
+            refCs[i]=0; 
+            inst->dec_target_reflCoefs[i]=refCs[i];
+        }
+
+    return(0);
+}
+
+
+/****************************************************************************
+ * WebRtcCng_Generate(...)
+ *
+ * These functions generates CN data when needed
+ *
+ * Input:
+ *    - cng_inst      : Pointer to created instance that should be freed
+ *    - outData       : pointer to area to write CN data
+ *    - nrOfSamples   : How much data to generate
+ *
+ * Return value        :  0 - Ok
+ *                       -1 - Error
+ */
+WebRtc_Word16 WebRtcCng_Generate(CNG_dec_inst *cng_inst,
+                                 WebRtc_Word16 *outData,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 new_period)
+{
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+    
+    int i;
+    WebRtc_Word16 excitation[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 low[WEBRTC_CNG_MAX_OUTSIZE_ORDER];
+    WebRtc_Word16 lpPoly[WEBRTC_CNG_MAX_LPC_ORDER+1];
+    WebRtc_Word16 ReflBetaStd=26214; /*0.8 in q15*/
+    WebRtc_Word16 ReflBetaCompStd=6553; /*0.2in q15*/
+    WebRtc_Word16 ReflBetaNewP=19661; /*0.6 in q15*/
+    WebRtc_Word16 ReflBetaCompNewP=13107; /*0.4 in q15*/
+    WebRtc_Word16 Beta,BetaC, tmp1, tmp2, tmp3;
+    WebRtc_Word32 targetEnergy;
+    WebRtc_Word16 En;
+    WebRtc_Word16 temp16;
+
+    if (nrOfSamples>WEBRTC_CNG_MAX_OUTSIZE_ORDER) {
+        inst->errorcode = CNG_DISALLOWED_FRAME_SIZE;
+        return (-1);
+    }
+
+
+    if (new_period) {
+        inst->dec_used_scale_factor=inst->dec_target_scale_factor;
+        Beta=ReflBetaNewP;
+        BetaC=ReflBetaCompNewP;
+    } else {
+        Beta=ReflBetaStd;
+        BetaC=ReflBetaCompStd;
+    }
+
+    /*Here we use a 0.5 weighting, should possibly be modified to 0.6*/
+    tmp1=inst->dec_used_scale_factor<<2; /* Q13->Q15 */
+    tmp2=inst->dec_target_scale_factor<<2; /* Q13->Q15 */
+    tmp3=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1,Beta,15);
+    tmp3+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp2,BetaC,15);
+    inst->dec_used_scale_factor=tmp3>>2; /* Q15->Q13 */
+
+    inst->dec_used_energy=inst->dec_used_energy>>1;
+    inst->dec_used_energy+=inst->dec_target_energy>>1;
+
+    
+    /* Do the same for the reflection coeffs */
+    for (i=0;i<WEBRTC_CNG_MAX_LPC_ORDER;i++) {
+        inst->dec_used_reflCoefs[i]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],Beta,15);
+        inst->dec_used_reflCoefs[i]+=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_target_reflCoefs[i],BetaC,15);        
+    }
+
+    /* Compute the polynomial coefficients            */
+    WebRtcCng_K2a16(inst->dec_used_reflCoefs, WEBRTC_CNG_MAX_LPC_ORDER, lpPoly);
+
+    /***/ 
+
+    targetEnergy=inst->dec_used_energy;
+
+    // Calculate scaling factor based on filter energy
+    En=8192; //1.0 in Q13
+    for (i=0; i<(WEBRTC_CNG_MAX_LPC_ORDER); i++) {
+
+        // Floating point value for reference 
+        // E*=1.0-((float)inst->dec_used_reflCoefs[i]/32768.0)*((float)inst->dec_used_reflCoefs[i]/32768.0);
+
+        // Same in fixed point
+        temp16=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(inst->dec_used_reflCoefs[i],inst->dec_used_reflCoefs[i],15); // K(i).^2 in Q15
+        temp16=0x7fff - temp16; // 1 - K(i).^2 in Q15
+        En=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(En,temp16,15);
+
+    }
+
+    //float scaling= sqrt(E*inst->dec_target_energy/((1<<24)));
+
+    //Calculate sqrt(En*target_energy/exctiation energy)
+
+    targetEnergy=WebRtcSpl_Sqrt(inst->dec_used_energy);
+
+    En=(WebRtc_Word16)WebRtcSpl_Sqrt(En)<<6; //We are missing a factor sqrt(2) here
+    En=(En*3)>>1; //1.5 estimates sqrt(2)
+
+    inst->dec_used_scale_factor=(WebRtc_Word16)((En*targetEnergy)>>12);
+
+
+    /***/
+
+    /*Generate excitation*/
+    /*Excitation energy per sample is 2.^24 - Q13 N(0,1) */
+    for(i=0;i<nrOfSamples;i++){
+        excitation[i]=WebRtcSpl_RandN(&inst->dec_seed)>>1;
+    }
+
+    /*Scale to correct energy*/
+    WebRtcSpl_ScaleVector(excitation, excitation, inst->dec_used_scale_factor, nrOfSamples, 13);
+
+    WebRtcSpl_FilterAR(
+        lpPoly,    /* Coefficients in Q12 */
+        WEBRTC_CNG_MAX_LPC_ORDER+1, 
+        excitation,            /* Speech samples */
+        nrOfSamples, 
+        inst->dec_filtstate,        /* State preservation */
+        WEBRTC_CNG_MAX_LPC_ORDER, 
+        inst->dec_filtstateLow,        /* State preservation */
+        WEBRTC_CNG_MAX_LPC_ORDER, 
+        outData,    /* Filtered speech samples */
+        low,
+        nrOfSamples
+    );
+
+    return(0);
+
+}
+
+
+
+/****************************************************************************
+ * WebRtcCng_GetErrorCodeEnc/Dec(...)
+ *
+ * This functions can be used to check the error code of a CNG instance. When
+ * a function returns -1 a error code will be set for that instance. The 
+ * function below extract the code of the last error that occured in the 
+ * specified instance.
+ *
+ * Input:
+ *    - CNG_inst    : CNG enc/dec instance
+ *
+ * Return value     : Error code
+ */
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeEnc(CNG_enc_inst *cng_inst)
+{
+
+    /* typecast pointer to real structure */
+    WebRtcCngEncInst_t* inst=(WebRtcCngEncInst_t*)cng_inst;
+
+    return inst->errorcode;
+}
+
+WebRtc_Word16 WebRtcCng_GetErrorCodeDec(CNG_dec_inst *cng_inst)
+{
+
+    /* typecast pointer to real structure */
+    WebRtcCngDecInst_t* inst=(WebRtcCngDecInst_t*)cng_inst;
+
+    return inst->errorcode;
+}
diff --git a/src/modules/audio_coding/codecs/g711/g711.c b/src/modules/audio_coding/codecs/g711/g711.c
new file mode 100644
index 0000000..954f377
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/g711.c
@@ -0,0 +1,83 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g711.c - A-law and u-law transcoding routines
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2006 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place this code in the
+ *  public domain for the benefit of all mankind - even the slimy
+ *  ones who might try to proprietize my work and use it to my
+ *  detriment.
+ *
+ * $Id: g711.c,v 1.1 2006/06/07 15:46:39 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed unused include files
+ * -Changed to use WebRtc types
+ * -Added option to run encoder bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+#include "g711.h"
+#include "typedefs.h"
+
+/* Copied from the CCITT G.711 specification */
+static const WebRtc_UWord8 ulaw_to_alaw_table[256] =
+{
+     42,  43,  40,  41,  46,  47,  44,  45,  34,  35,  32,  33,  38,  39,  36,  37,
+     58,  59,  56,  57,  62,  63,  60,  61,  50,  51,  48,  49,  54,  55,  52,  53,
+     10,  11,   8,   9,  14,  15,  12,  13,   2,   3,   0,   1,   6,   7,   4,  26,
+     27,  24,  25,  30,  31,  28,  29,  18,  19,  16,  17,  22,  23,  20,  21, 106,
+    104, 105, 110, 111, 108, 109,  98,  99,  96,  97, 102, 103, 100, 101, 122, 120,
+    126, 127, 124, 125, 114, 115, 112, 113, 118, 119, 116, 117,  75,  73,  79,  77,
+     66,  67,  64,  65,  70,  71,  68,  69,  90,  91,  88,  89,  94,  95,  92,  93,
+     82,  82,  83,  83,  80,  80,  81,  81,  86,  86,  87,  87,  84,  84,  85,  85,
+    170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
+    186, 187, 184, 185, 190, 191, 188, 189, 178, 179, 176, 177, 182, 183, 180, 181,
+    138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 154,
+    155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149, 234,
+    232, 233, 238, 239, 236, 237, 226, 227, 224, 225, 230, 231, 228, 229, 250, 248,
+    254, 255, 252, 253, 242, 243, 240, 241, 246, 247, 244, 245, 203, 201, 207, 205,
+    194, 195, 192, 193, 198, 199, 196, 197, 218, 219, 216, 217, 222, 223, 220, 221,
+    210, 210, 211, 211, 208, 208, 209, 209, 214, 214, 215, 215, 212, 212, 213, 213
+};
+
+/* These transcoding tables are copied from the CCITT G.711 specification. To achieve
+   optimal results, do not change them. */
+
+static const WebRtc_UWord8 alaw_to_ulaw_table[256] =
+{
+     42,  43,  40,  41,  46,  47,  44,  45,  34,  35,  32,  33,  38,  39,  36,  37,
+     57,  58,  55,  56,  61,  62,  59,  60,  49,  50,  47,  48,  53,  54,  51,  52,
+     10,  11,   8,   9,  14,  15,  12,  13,   2,   3,   0,   1,   6,   7,   4,   5,
+     26,  27,  24,  25,  30,  31,  28,  29,  18,  19,  16,  17,  22,  23,  20,  21,
+     98,  99,  96,  97, 102, 103, 100, 101,  93,  93,  92,  92,  95,  95,  94,  94,
+    116, 118, 112, 114, 124, 126, 120, 122, 106, 107, 104, 105, 110, 111, 108, 109,
+     72,  73,  70,  71,  76,  77,  74,  75,  64,  65,  63,  63,  68,  69,  66,  67,
+     86,  87,  84,  85,  90,  91,  88,  89,  79,  79,  78,  78,  82,  83,  80,  81,
+    170, 171, 168, 169, 174, 175, 172, 173, 162, 163, 160, 161, 166, 167, 164, 165,
+    185, 186, 183, 184, 189, 190, 187, 188, 177, 178, 175, 176, 181, 182, 179, 180,
+    138, 139, 136, 137, 142, 143, 140, 141, 130, 131, 128, 129, 134, 135, 132, 133,
+    154, 155, 152, 153, 158, 159, 156, 157, 146, 147, 144, 145, 150, 151, 148, 149,
+    226, 227, 224, 225, 230, 231, 228, 229, 221, 221, 220, 220, 223, 223, 222, 222,
+    244, 246, 240, 242, 252, 254, 248, 250, 234, 235, 232, 233, 238, 239, 236, 237,
+    200, 201, 198, 199, 204, 205, 202, 203, 192, 193, 191, 191, 196, 197, 194, 195,
+    214, 215, 212, 213, 218, 219, 216, 217, 207, 207, 206, 206, 210, 211, 208, 209
+};
+
+WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw)
+{
+    return alaw_to_ulaw_table[alaw];
+}
+/*- End of function --------------------------------------------------------*/
+
+WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw)
+{
+    return ulaw_to_alaw_table[ulaw];
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/src/modules/audio_coding/codecs/g711/g711.gypi b/src/modules/audio_coding/codecs/g711/g711.gypi
new file mode 100644
index 0000000..66aa17a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/g711.gypi
@@ -0,0 +1,64 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'G711',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/g711_interface.h',
+        'g711_interface.c',
+        'g711.c',
+        'g711.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'g711_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'G711',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'g711_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'g711_test',
+          'type': 'executable',
+          'dependencies': [
+            'G711',
+          ],
+          'sources': [
+            'test/testG711.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/g711/g711.h b/src/modules/audio_coding/codecs/g711/g711.h
new file mode 100644
index 0000000..cd5e3d7
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/g711.h
@@ -0,0 +1,382 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g711.h - In line A-law and u-law conversion routines
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2001 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place this code in the
+ *  public domain for the benefit of all mankind - even the slimy
+ *  ones who might try to proprietize my work and use it to my
+ *  detriment.
+ *
+ * $Id: g711.h,v 1.1 2006/06/07 15:46:39 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Changed to use WebRtc types
+ * -Changed __inline__ to __inline
+ * -Two changes to make implementation bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+/*! \page g711_page A-law and mu-law handling
+Lookup tables for A-law and u-law look attractive, until you consider the impact
+on the CPU cache. If it causes a substantial area of your processor cache to get
+hit too often, cache sloshing will severely slow things down. The main reason
+these routines are slow in C, is the lack of direct access to the CPU's "find
+the first 1" instruction. A little in-line assembler fixes that, and the
+conversion routines can be faster than lookup tables, in most real world usage.
+A "find the first 1" instruction is available on most modern CPUs, and is a
+much underused feature. 
+
+If an assembly language method of bit searching is not available, these routines
+revert to a method that can be a little slow, so the cache thrashing might not
+seem so bad :(
+
+Feel free to submit patches to add fast "find the first 1" support for your own
+favourite processor.
+
+Look up tables are used for transcoding between A-law and u-law, since it is
+difficult to achieve the precise transcoding procedure laid down in the G.711
+specification by other means.
+*/
+
+#if !defined(_G711_H_)
+#define _G711_H_
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "typedefs.h"
+
+#if defined(__i386__)
+/*! \brief Find the bit position of the highest set bit in a word
+    \param bits The word to be searched
+    \return The bit number of the highest set bit, or -1 if the word is zero. */
+static __inline__ int top_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movl $-1,%%edx;\n"
+                         " bsrl %%eax,%%edx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Find the bit position of the lowest set bit in a word
+    \param bits The word to be searched
+    \return The bit number of the lowest set bit, or -1 if the word is zero. */
+static __inline__ int bottom_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movl $-1,%%edx;\n"
+                         " bsfl %%eax,%%edx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+#elif defined(__x86_64__)
+static __inline__ int top_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movq $-1,%%rdx;\n"
+                         " bsrq %%rax,%%rdx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+
+static __inline__ int bottom_bit(unsigned int bits)
+{
+    int res;
+
+    __asm__ __volatile__(" movq $-1,%%rdx;\n"
+                         " bsfq %%rax,%%rdx;\n"
+                         : "=d" (res)
+                         : "a" (bits));
+    return res;
+}
+/*- End of function --------------------------------------------------------*/
+#else
+static __inline int top_bit(unsigned int bits)
+{
+    int i;
+    
+    if (bits == 0)
+        return -1;
+    i = 0;
+    if (bits & 0xFFFF0000)
+    {
+        bits &= 0xFFFF0000;
+        i += 16;
+    }
+    if (bits & 0xFF00FF00)
+    {
+        bits &= 0xFF00FF00;
+        i += 8;
+    }
+    if (bits & 0xF0F0F0F0)
+    {
+        bits &= 0xF0F0F0F0;
+        i += 4;
+    }
+    if (bits & 0xCCCCCCCC)
+    {
+        bits &= 0xCCCCCCCC;
+        i += 2;
+    }
+    if (bits & 0xAAAAAAAA)
+    {
+        bits &= 0xAAAAAAAA;
+        i += 1;
+    }
+    return i;
+}
+/*- End of function --------------------------------------------------------*/
+
+static __inline int bottom_bit(unsigned int bits)
+{
+    int i;
+    
+    if (bits == 0)
+        return -1;
+    i = 32;
+    if (bits & 0x0000FFFF)
+    {
+        bits &= 0x0000FFFF;
+        i -= 16;
+    }
+    if (bits & 0x00FF00FF)
+    {
+        bits &= 0x00FF00FF;
+        i -= 8;
+    }
+    if (bits & 0x0F0F0F0F)
+    {
+        bits &= 0x0F0F0F0F;
+        i -= 4;
+    }
+    if (bits & 0x33333333)
+    {
+        bits &= 0x33333333;
+        i -= 2;
+    }
+    if (bits & 0x55555555)
+    {
+        bits &= 0x55555555;
+        i -= 1;
+    }
+    return i;
+}
+/*- End of function --------------------------------------------------------*/
+#endif
+
+/* N.B. It is tempting to use look-up tables for A-law and u-law conversion.
+ *      However, you should consider the cache footprint.
+ *
+ *      A 64K byte table for linear to x-law and a 512 byte table for x-law to
+ *      linear sound like peanuts these days, and shouldn't an array lookup be
+ *      real fast? No! When the cache sloshes as badly as this one will, a tight
+ *      calculation may be better. The messiest part is normally finding the
+ *      segment, but a little inline assembly can fix that on an i386, x86_64 and
+ *      many other modern processors.
+ */
+ 
+/*
+ * Mu-law is basically as follows:
+ *
+ *      Biased Linear Input Code        Compressed Code
+ *      ------------------------        ---------------
+ *      00000001wxyza                   000wxyz
+ *      0000001wxyzab                   001wxyz
+ *      000001wxyzabc                   010wxyz
+ *      00001wxyzabcd                   011wxyz
+ *      0001wxyzabcde                   100wxyz
+ *      001wxyzabcdef                   101wxyz
+ *      01wxyzabcdefg                   110wxyz
+ *      1wxyzabcdefgh                   111wxyz
+ *
+ * Each biased linear code has a leading 1 which identifies the segment
+ * number. The value of the segment number is equal to 7 minus the number
+ * of leading 0's. The quantization interval is directly available as the
+ * four bits wxyz.  * The trailing bits (a - h) are ignored.
+ *
+ * Ordinarily the complement of the resulting code word is used for
+ * transmission, and so the code word is complemented before it is returned.
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+
+//#define ULAW_ZEROTRAP                 /* turn on the trap as per the MIL-STD */
+#define ULAW_BIAS        0x84           /* Bias for linear code. */
+
+/*! \brief Encode a linear sample to u-law
+    \param linear The sample to encode.
+    \return The u-law value.
+*/
+static __inline WebRtc_UWord8 linear_to_ulaw(int linear)
+{
+    WebRtc_UWord8 u_val;
+    int mask;
+    int seg;
+
+    /* Get the sign and the magnitude of the value. */
+    if (linear < 0)
+    {
+        /* WebRtc, tlegrand: -1 added to get bitexact to reference implementation */
+        linear = ULAW_BIAS - linear - 1;
+        mask = 0x7F;
+    }
+    else
+    {
+        linear = ULAW_BIAS + linear;
+        mask = 0xFF;
+    }
+
+    seg = top_bit(linear | 0xFF) - 7;
+
+    /*
+     * Combine the sign, segment, quantization bits,
+     * and complement the code word.
+     */
+    if (seg >= 8)
+        u_val = (WebRtc_UWord8) (0x7F ^ mask);
+    else
+        u_val = (WebRtc_UWord8) (((seg << 4) | ((linear >> (seg + 3)) & 0xF)) ^ mask);
+#ifdef ULAW_ZEROTRAP
+    /* Optional ITU trap */
+    if (u_val == 0)
+        u_val = 0x02;
+#endif
+    return  u_val;
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Decode an u-law sample to a linear value.
+    \param ulaw The u-law sample to decode.
+    \return The linear value.
+*/
+static __inline WebRtc_Word16 ulaw_to_linear(WebRtc_UWord8 ulaw)
+{
+    int t;
+    
+    /* Complement to obtain normal u-law value. */
+    ulaw = ~ulaw;
+    /*
+     * Extract and bias the quantization bits. Then
+     * shift up by the segment number and subtract out the bias.
+     */
+    t = (((ulaw & 0x0F) << 3) + ULAW_BIAS) << (((int) ulaw & 0x70) >> 4);
+    return  (WebRtc_Word16) ((ulaw & 0x80)  ?  (ULAW_BIAS - t)  :  (t - ULAW_BIAS));
+}
+/*- End of function --------------------------------------------------------*/
+
+/*
+ * A-law is basically as follows:
+ *
+ *      Linear Input Code        Compressed Code
+ *      -----------------        ---------------
+ *      0000000wxyza             000wxyz
+ *      0000001wxyza             001wxyz
+ *      000001wxyzab             010wxyz
+ *      00001wxyzabc             011wxyz
+ *      0001wxyzabcd             100wxyz
+ *      001wxyzabcde             101wxyz
+ *      01wxyzabcdef             110wxyz
+ *      1wxyzabcdefg             111wxyz
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+
+#define ALAW_AMI_MASK       0x55
+
+/*! \brief Encode a linear sample to A-law
+    \param linear The sample to encode.
+    \return The A-law value.
+*/
+static __inline WebRtc_UWord8 linear_to_alaw(int linear)
+{
+    int mask;
+    int seg;
+    
+    if (linear >= 0)
+    {
+        /* Sign (bit 7) bit = 1 */
+        mask = ALAW_AMI_MASK | 0x80;
+    }
+    else
+    {
+        /* Sign (bit 7) bit = 0 */
+        mask = ALAW_AMI_MASK;
+        /* WebRtc, tlegrand: Changed from -8 to -1 to get bitexact to reference
+         * implementation */
+        linear = -linear - 1;
+    }
+
+    /* Convert the scaled magnitude to segment number. */
+    seg = top_bit(linear | 0xFF) - 7;
+    if (seg >= 8)
+    {
+        if (linear >= 0)
+        {
+            /* Out of range. Return maximum value. */
+            return (WebRtc_UWord8) (0x7F ^ mask);
+        }
+        /* We must be just a tiny step below zero */
+        return (WebRtc_UWord8) (0x00 ^ mask);
+    }
+    /* Combine the sign, segment, and quantization bits. */
+    return (WebRtc_UWord8) (((seg << 4) | ((linear >> ((seg)  ?  (seg + 3)  :  4)) & 0x0F)) ^ mask);
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Decode an A-law sample to a linear value.
+    \param alaw The A-law sample to decode.
+    \return The linear value.
+*/
+static __inline WebRtc_Word16 alaw_to_linear(WebRtc_UWord8 alaw)
+{
+    int i;
+    int seg;
+
+    alaw ^= ALAW_AMI_MASK;
+    i = ((alaw & 0x0F) << 4);
+    seg = (((int) alaw & 0x70) >> 4);
+    if (seg)
+        i = (i + 0x108) << (seg - 1);
+    else
+        i += 8;
+    return (WebRtc_Word16) ((alaw & 0x80)  ?  i  :  -i);
+}
+/*- End of function --------------------------------------------------------*/
+
+/*! \brief Transcode from A-law to u-law, using the procedure defined in G.711.
+    \param alaw The A-law sample to transcode.
+    \return The best matching u-law value.
+*/
+WebRtc_UWord8 alaw_to_ulaw(WebRtc_UWord8 alaw);
+
+/*! \brief Transcode from u-law to A-law, using the procedure defined in G.711.
+    \param alaw The u-law sample to transcode.
+    \return The best matching A-law value.
+*/
+WebRtc_UWord8 ulaw_to_alaw(WebRtc_UWord8 ulaw);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
+/*- End of file ------------------------------------------------------------*/
diff --git a/src/modules/audio_coding/codecs/g711/g711_interface.c b/src/modules/audio_coding/codecs/g711/g711_interface.c
new file mode 100644
index 0000000..a49abdb
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/g711_interface.c
@@ -0,0 +1,171 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include <string.h>
+#include "g711.h"
+#include "g711_interface.h"
+#include "typedefs.h"
+
+WebRtc_Word16 WebRtcG711_EncodeA(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded)
+{
+    int n;
+    WebRtc_UWord16 tempVal,  tempVal2;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    // Loop over all samples
+    for (n = 0; n < len; n++) {
+        tempVal = (WebRtc_UWord16)linear_to_alaw(speechIn[n]);
+
+#ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            encoded[n>>1]|=((WebRtc_UWord16)tempVal);
+        } else {
+            encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
+        }
+#else
+        if ((n & 0x1) == 1) {
+            tempVal2 |= ((WebRtc_UWord16) tempVal) << 8;
+            encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
+        } else {
+            tempVal2 = ((WebRtc_UWord16) tempVal);
+            encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
+        }
+#endif
+    }
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_EncodeU(void  *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    // Loop over all samples
+    for (n = 0; n < len; n++) {
+        tempVal = (WebRtc_UWord16)linear_to_ulaw(speechIn[n]);
+
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            encoded[n>>1]|=((WebRtc_UWord16)tempVal);
+        } else {
+            encoded[n>>1]=((WebRtc_UWord16)tempVal)<<8;
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            encoded[n >> 1] |= ((WebRtc_UWord16) tempVal) << 8;
+        } else {
+            encoded[n >> 1] = ((WebRtc_UWord16) tempVal);
+        }
+ #endif
+    }
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_DecodeA(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    for (n = 0; n < len; n++) {
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
+        } else {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            tempVal = (encoded[n >> 1] >> 8);
+        } else {
+            tempVal = (encoded[n >> 1] & 0xFF);
+        }
+ #endif
+        decoded[n] = (WebRtc_Word16) alaw_to_linear(tempVal);
+    }
+
+    *speechType = 1;
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_DecodeU(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType)
+{
+    int n;
+    WebRtc_UWord16 tempVal;
+
+    // Set and discard to avoid getting warnings
+    (void)(state = NULL);
+
+    // Sanity check of input length
+    if (len < 0) {
+        return (-1);
+    }
+
+    for (n = 0; n < len; n++) {
+ #ifdef WEBRTC_BIG_ENDIAN
+        if ((n & 0x1) == 1) {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] & 0xFF);
+        } else {
+            tempVal=((WebRtc_UWord16)encoded[n>>1] >> 8);
+        }
+ #else
+        if ((n & 0x1) == 1) {
+            tempVal = (encoded[n >> 1] >> 8);
+        } else {
+            tempVal = (encoded[n >> 1] & 0xFF);
+        }
+ #endif
+        decoded[n] = (WebRtc_Word16) ulaw_to_linear(tempVal);
+    }
+
+    *speechType = 1;
+    return (len);
+}
+
+WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes)
+{
+    strncpy(version, "2.0.0", lenBytes);
+    return 0;
+}
diff --git a/src/modules/audio_coding/codecs/g711/g711_unittest.cc b/src/modules/audio_coding/codecs/g711/g711_unittest.cc
new file mode 100644
index 0000000..c903bed
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/g711_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "g711_interface.h"
+#include "gtest/gtest.h"
+
+TEST(G711Test, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/audio_coding/codecs/g711/include/g711_interface.h b/src/modules/audio_coding/codecs/g711/include/g711_interface.h
new file mode 100644
index 0000000..25a9903
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/include/g711_interface.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
+#define MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_
+
+#include "typedefs.h"
+
+// Comfort noise constants
+#define G711_WEBRTC_SPEECH    1
+#define G711_WEBRTC_CNG       2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcG711_EncodeA(...)
+ *
+ * This function encodes a G711 A-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - speechIn           : Input speech vector
+ *      - len                : Samples in speechIn
+ *
+ * Output:
+ *      - encoded            : The encoded data vector
+ *
+ * Return value              : >0 - Length (in bytes) of coded data
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_EncodeA(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded);
+
+/****************************************************************************
+ * WebRtcG711_EncodeU(...)
+ *
+ * This function encodes a G711 U-law frame and inserts it into a packet.
+ * Input speech length has be of any length.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - speechIn           : Input speech vector
+ *      - len                : Samples in speechIn
+ *
+ * Output:
+ *      - encoded            : The encoded data vector
+ *
+ * Return value              : >0 - Length (in bytes) of coded data
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_EncodeU(void *state,
+                                 WebRtc_Word16 *speechIn,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *encoded);
+
+/****************************************************************************
+ * WebRtcG711_DecodeA(...)
+ *
+ * This function decodes a packet G711 A-law frame.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - encoded            : Encoded data
+ *      - len                : Bytes in encoded vector
+ *
+ * Output:
+ *      - decoded            : The decoded vector
+ *      - speechType         : 1 normal, 2 CNG (for G711 it should
+ *                             always return 1 since G711 does not have a
+ *                             built-in DTX/CNG scheme)
+ *
+ * Return value              : >0 - Samples in decoded vector
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_DecodeA(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType);
+
+/****************************************************************************
+ * WebRtcG711_DecodeU(...)
+ *
+ * This function decodes a packet G711 U-law frame.
+ *
+ * Input:
+ *      - state              : Dummy state to make this codec look more like
+ *                             other codecs
+ *      - encoded            : Encoded data
+ *      - len                : Bytes in encoded vector
+ *
+ * Output:
+ *      - decoded            : The decoded vector
+ *      - speechType         : 1 normal, 2 CNG (for G711 it should
+ *                             always return 1 since G711 does not have a
+ *                             built-in DTX/CNG scheme)
+ *
+ * Return value              : >0 - Samples in decoded vector
+ *                             -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG711_DecodeU(void *state,
+                                 WebRtc_Word16 *encoded,
+                                 WebRtc_Word16 len,
+                                 WebRtc_Word16 *decoded,
+                                 WebRtc_Word16 *speechType);
+
+/**********************************************************************
+* WebRtcG711_Version(...)
+*
+* This function gives the version string of the G.711 codec.
+*
+* Input:
+*      - lenBytes:     the size of Allocated space (in Bytes) where
+*                      the version number is written to (in string format).
+*
+* Output:
+*      - version:      Pointer to a buffer where the version number is
+*                      written to.
+*
+*/
+
+WebRtc_Word16 WebRtcG711_Version(char* version, WebRtc_Word16 lenBytes);
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif /* MODULES_AUDIO_CODING_CODECS_G711_MAIN_INTERFACE_G711_INTERFACE_H_ */
diff --git a/src/modules/audio_coding/codecs/g711/test/testG711.cc b/src/modules/audio_coding/codecs/g711/test/testG711.cc
new file mode 100644
index 0000000..fd54522
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g711/test/testG711.cc
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * testG711.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+/* include API */
+#include "g711_interface.h"
+
+/* Runtime statistics */
+#include <time.h>
+#define CLOCKS_PER_SEC_G711  1000
+
+
+/* function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length) {
+
+    short k, rlen, status = 0;
+
+    rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+    return status;
+}
+
+int main(int argc, char* argv[])
+{
+    char inname[80], outname[40], bitname[40];
+    FILE *inp, *outp, *bitp = NULL;
+    int framecnt, endfile;
+
+    WebRtc_Word16 framelength = 80;
+
+    int err;
+
+    /* Runtime statistics */
+    double starttime;
+    double runtime;
+    double length_file;
+
+    WebRtc_Word16 stream_len = 0;
+    WebRtc_Word16 shortdata[480];
+    WebRtc_Word16 decoded[480];
+    WebRtc_Word16 streamdata[500];
+    WebRtc_Word16    speechType[1];
+    char law[2];
+    char versionNumber[40];
+
+    /* handling wrong input arguments in the command line */
+    if ((argc!=5) && (argc!=6))  {
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        printf("\nG.711 test application\n\n");
+        printf("Usage:\n\n");
+        printf("./testG711.exe framelength law infile outfile \n\n");
+        printf("framelength: Framelength in samples.\n");
+        printf("law        : Coding law, A och u.\n");
+        printf("infile     : Normal speech input file\n");
+        printf("outfile    : Speech output file\n\n");
+        printf("outbits    : Output bitstream file [optional]\n\n");
+        exit(0);
+
+    }
+
+    /* Get version and print */
+    WebRtcG711_Version(versionNumber, 40);
+
+    printf("-----------------------------------\n");
+    printf("G.711 version: %s\n\n", versionNumber);
+    /* Get frame length */
+    framelength = atoi(argv[1]);
+    
+    /* Get compression law */
+    strcpy(law, argv[2]);
+
+    /* Get Input and Output files */
+    sscanf(argv[3], "%s", inname);
+    sscanf(argv[4], "%s", outname);
+    if (argc==6) {
+        sscanf(argv[5], "%s", bitname);
+        if ((bitp = fopen(bitname,"wb")) == NULL) {
+            printf("  G.711: Cannot read file %s.\n", bitname);
+            exit(1);
+        }
+    }
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  G.711: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  G.711: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+    printf("\nInput:  %s\nOutput: %s\n", inname, outname);
+    if (argc==6) {
+        printf("\nBitfile:  %s\n", bitname);
+    }
+
+    starttime = clock()/(double)CLOCKS_PER_SEC_G711; /* Runtime statistics */
+
+     /* Initialize encoder and decoder */
+    framecnt= 0;
+    endfile    = 0;
+    while (endfile == 0) {
+        framecnt++;
+        /* Read speech block */
+        endfile = readframe(shortdata, inp, framelength);
+
+        /* G.711 encoding */
+        if (!strcmp(law,"A")) {
+            /* A-law encoding */
+            stream_len = WebRtcG711_EncodeA(NULL, shortdata, framelength, streamdata);
+            if (argc==6){
+              /* Write bits to file */
+              if (fwrite(streamdata, sizeof(unsigned char), stream_len,
+                         bitp) != static_cast<size_t>(stream_len)) {
+                return -1;
+              }
+            }
+            err = WebRtcG711_DecodeA(NULL, streamdata, stream_len, decoded, speechType);
+        } else if (!strcmp(law,"u")){
+            /* u-law encoding */
+            stream_len = WebRtcG711_EncodeU(NULL, shortdata, framelength, streamdata);
+            if (argc==6){
+              /* Write bits to file */
+              if (fwrite(streamdata, sizeof(unsigned char), stream_len,
+                         bitp) != static_cast<size_t>(stream_len)) {
+                return -1;
+              }
+            }
+            err = WebRtcG711_DecodeU(NULL, streamdata, stream_len, decoded, speechType);
+        } else {
+            printf("Wrong law mode\n");
+            exit (1);
+        }
+        if (stream_len < 0 || err < 0) {
+            /* exit if returned with error */
+            printf("Error in encoder/decoder\n");
+        } else {
+          /* Write coded speech to file */
+          if (fwrite(decoded, sizeof(short), framelength,
+                     outp) != static_cast<size_t>(framelength)) {
+            return -1;
+          }
+        }
+    }
+
+
+    runtime = (double)(clock()/(double)CLOCKS_PER_SEC_G711-starttime);
+    length_file = ((double)framecnt*(double)framelength/8000);
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+    printf("Time to run G.711:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+    printf("---------------------END----------------------\n");
+
+    fclose(inp);
+    fclose(outp);
+
+
+    return 0;
+}
diff --git a/src/modules/audio_coding/codecs/g722/g722.gypi b/src/modules/audio_coding/codecs/g722/g722.gypi
new file mode 100644
index 0000000..311b5a0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722.gypi
@@ -0,0 +1,64 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+  'targets': [
+    {
+      'target_name': 'G722',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/g722_interface.h',
+        'g722_interface.c',
+        'g722_encode.c',
+        'g722_decode.c',
+        'g722_enc_dec.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'g722_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'G722',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'g722_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'G722Test',
+          'type': 'executable',
+          'dependencies': [
+            'G722',
+          ],
+          'sources': [
+            'test/testG722.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/g722/g722_decode.c b/src/modules/audio_coding/codecs/g722/g722_decode.c
new file mode 100644
index 0000000..499cc8f
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722_decode.c
@@ -0,0 +1,410 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722_decode.c - The ITU G.722 codec, decode part.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based in part on a single channel G.722 codec which is:
+ *
+ * Copyright (c) CMU 1993
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722_decode.c,v 1.15 2006/07/07 16:37:49 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed usage of inttypes.h and tgmath.h
+ * -Changed to use WebRtc types
+ * -Changed __inline__ to __inline
+ * -Added saturation check on output
+ */
+
+/*! \file */
+
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <stdio.h>
+#include <memory.h>
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "g722_enc_dec.h"
+
+
+#if !defined(FALSE)
+#define FALSE 0
+#endif
+#if !defined(TRUE)
+#define TRUE (!FALSE)
+#endif
+
+static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+{
+    WebRtc_Word16 amp16;
+
+    /* Hopefully this is optimised for the common case - not clipping */
+    amp16 = (WebRtc_Word16) amp;
+    if (amp == amp16)
+        return amp16;
+    if (amp > WEBRTC_INT16_MAX)
+        return  WEBRTC_INT16_MAX;
+    return  WEBRTC_INT16_MIN;
+}
+/*- End of function --------------------------------------------------------*/
+
+static void block4(g722_decode_state_t *s, int band, int d);
+
+static void block4(g722_decode_state_t *s, int band, int d)
+{
+    int wd1;
+    int wd2;
+    int wd3;
+    int i;
+
+    /* Block 4, RECONS */
+    s->band[band].d[0] = d;
+    s->band[band].r[0] = saturate(s->band[band].s + d);
+
+    /* Block 4, PARREC */
+    s->band[band].p[0] = saturate(s->band[band].sz + d);
+
+    /* Block 4, UPPOL2 */
+    for (i = 0;  i < 3;  i++)
+        s->band[band].sg[i] = s->band[band].p[i] >> 15;
+    wd1 = saturate(s->band[band].a[1] << 2);
+
+    wd2 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  -wd1  :  wd1;
+    if (wd2 > 32767)
+        wd2 = 32767;
+    wd3 = (s->band[band].sg[0] == s->band[band].sg[2])  ?  128  :  -128;
+    wd3 += (wd2 >> 7);
+    wd3 += (s->band[band].a[2]*32512) >> 15;
+    if (wd3 > 12288)
+        wd3 = 12288;
+    else if (wd3 < -12288)
+        wd3 = -12288;
+    s->band[band].ap[2] = wd3;
+
+    /* Block 4, UPPOL1 */
+    s->band[band].sg[0] = s->band[band].p[0] >> 15;
+    s->band[band].sg[1] = s->band[band].p[1] >> 15;
+    wd1 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  192  :  -192;
+    wd2 = (s->band[band].a[1]*32640) >> 15;
+
+    s->band[band].ap[1] = saturate(wd1 + wd2);
+    wd3 = saturate(15360 - s->band[band].ap[2]);
+    if (s->band[band].ap[1] > wd3)
+        s->band[band].ap[1] = wd3;
+    else if (s->band[band].ap[1] < -wd3)
+        s->band[band].ap[1] = -wd3;
+
+    /* Block 4, UPZERO */
+    wd1 = (d == 0)  ?  0  :  128;
+    s->band[band].sg[0] = d >> 15;
+    for (i = 1;  i < 7;  i++)
+    {
+        s->band[band].sg[i] = s->band[band].d[i] >> 15;
+        wd2 = (s->band[band].sg[i] == s->band[band].sg[0])  ?  wd1  :  -wd1;
+        wd3 = (s->band[band].b[i]*32640) >> 15;
+        s->band[band].bp[i] = saturate(wd2 + wd3);
+    }
+
+    /* Block 4, DELAYA */
+    for (i = 6;  i > 0;  i--)
+    {
+        s->band[band].d[i] = s->band[band].d[i - 1];
+        s->band[band].b[i] = s->band[band].bp[i];
+    }
+    
+    for (i = 2;  i > 0;  i--)
+    {
+        s->band[band].r[i] = s->band[band].r[i - 1];
+        s->band[band].p[i] = s->band[band].p[i - 1];
+        s->band[band].a[i] = s->band[band].ap[i];
+    }
+
+    /* Block 4, FILTEP */
+    wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
+    wd1 = (s->band[band].a[1]*wd1) >> 15;
+    wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
+    wd2 = (s->band[band].a[2]*wd2) >> 15;
+    s->band[band].sp = saturate(wd1 + wd2);
+
+    /* Block 4, FILTEZ */
+    s->band[band].sz = 0;
+    for (i = 6;  i > 0;  i--)
+    {
+        wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
+        s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
+    }
+    s->band[band].sz = saturate(s->band[band].sz);
+
+    /* Block 4, PREDIC */
+    s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
+}
+/*- End of function --------------------------------------------------------*/
+
+g722_decode_state_t *WebRtc_g722_decode_init(g722_decode_state_t *s,
+                                             int rate,
+                                             int options)
+{
+    if (s == NULL)
+    {
+        if ((s = (g722_decode_state_t *) malloc(sizeof(*s))) == NULL)
+            return NULL;
+    }
+    memset(s, 0, sizeof(*s));
+    if (rate == 48000)
+        s->bits_per_sample = 6;
+    else if (rate == 56000)
+        s->bits_per_sample = 7;
+    else
+        s->bits_per_sample = 8;
+    if ((options & G722_SAMPLE_RATE_8000))
+        s->eight_k = TRUE;
+    if ((options & G722_PACKED)  &&  s->bits_per_sample != 8)
+        s->packed = TRUE;
+    else
+        s->packed = FALSE;
+    s->band[0].det = 32;
+    s->band[1].det = 8;
+    return s;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_decode_release(g722_decode_state_t *s)
+{
+    free(s);
+    return 0;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_decode(g722_decode_state_t *s, WebRtc_Word16 amp[],
+                       const WebRtc_UWord8 g722_data[], int len)
+{
+    static const int wl[8] = {-60, -30, 58, 172, 334, 538, 1198, 3042 };
+    static const int rl42[16] = {0, 7, 6, 5, 4, 3, 2, 1,
+                                 7, 6, 5, 4, 3,  2, 1, 0 };
+    static const int ilb[32] =
+    {
+        2048, 2093, 2139, 2186, 2233, 2282, 2332,
+        2383, 2435, 2489, 2543, 2599, 2656, 2714,
+        2774, 2834, 2896, 2960, 3025, 3091, 3158,
+        3228, 3298, 3371, 3444, 3520, 3597, 3676,
+        3756, 3838, 3922, 4008
+    };
+    static const int wh[3] = {0, -214, 798};
+    static const int rh2[4] = {2, 1, 2, 1};
+    static const int qm2[4] = {-7408, -1616,  7408,   1616};
+    static const int qm4[16] = 
+    {
+              0, -20456, -12896,  -8968, 
+          -6288,  -4240,  -2584,  -1200,
+          20456,  12896,   8968,   6288,
+           4240,   2584,   1200,      0
+    };
+    static const int qm5[32] =
+    {
+           -280,   -280, -23352, -17560,
+         -14120, -11664,  -9752,  -8184,
+          -6864,  -5712,  -4696,  -3784,
+          -2960,  -2208,  -1520,   -880,
+          23352,  17560,  14120,  11664,
+           9752,   8184,   6864,   5712,
+           4696,   3784,   2960,   2208,
+           1520,    880,    280,   -280
+    };
+    static const int qm6[64] =
+    {
+           -136,   -136,   -136,   -136,
+         -24808, -21904, -19008, -16704,
+         -14984, -13512, -12280, -11192,
+         -10232,  -9360,  -8576,  -7856,
+          -7192,  -6576,  -6000,  -5456,
+          -4944,  -4464,  -4008,  -3576,
+          -3168,  -2776,  -2400,  -2032,
+          -1688,  -1360,  -1040,   -728,
+          24808,  21904,  19008,  16704,
+          14984,  13512,  12280,  11192,
+          10232,   9360,   8576,   7856,
+           7192,   6576,   6000,   5456,
+           4944,   4464,   4008,   3576,
+           3168,   2776,   2400,   2032,
+           1688,   1360,   1040,    728,
+            432,    136,   -432,   -136
+    };
+    static const int qmf_coeffs[12] =
+    {
+           3,  -11,   12,   32, -210,  951, 3876, -805,  362, -156,   53,  -11,
+    };
+
+    int dlowt;
+    int rlow;
+    int ihigh;
+    int dhigh;
+    int rhigh;
+    int xout1;
+    int xout2;
+    int wd1;
+    int wd2;
+    int wd3;
+    int code;
+    int outlen;
+    int i;
+    int j;
+
+    outlen = 0;
+    rhigh = 0;
+    for (j = 0;  j < len;  )
+    {
+        if (s->packed)
+        {
+            /* Unpack the code bits */
+            if (s->in_bits < s->bits_per_sample)
+            {
+                s->in_buffer |= (g722_data[j++] << s->in_bits);
+                s->in_bits += 8;
+            }
+            code = s->in_buffer & ((1 << s->bits_per_sample) - 1);
+            s->in_buffer >>= s->bits_per_sample;
+            s->in_bits -= s->bits_per_sample;
+        }
+        else
+        {
+            code = g722_data[j++];
+        }
+
+        switch (s->bits_per_sample)
+        {
+        default:
+        case 8:
+            wd1 = code & 0x3F;
+            ihigh = (code >> 6) & 0x03;
+            wd2 = qm6[wd1];
+            wd1 >>= 2;
+            break;
+        case 7:
+            wd1 = code & 0x1F;
+            ihigh = (code >> 5) & 0x03;
+            wd2 = qm5[wd1];
+            wd1 >>= 1;
+            break;
+        case 6:
+            wd1 = code & 0x0F;
+            ihigh = (code >> 4) & 0x03;
+            wd2 = qm4[wd1];
+            break;
+        }
+        /* Block 5L, LOW BAND INVQBL */
+        wd2 = (s->band[0].det*wd2) >> 15;
+        /* Block 5L, RECONS */
+        rlow = s->band[0].s + wd2;
+        /* Block 6L, LIMIT */
+        if (rlow > 16383)
+            rlow = 16383;
+        else if (rlow < -16384)
+            rlow = -16384;
+
+        /* Block 2L, INVQAL */
+        wd2 = qm4[wd1];
+        dlowt = (s->band[0].det*wd2) >> 15;
+
+        /* Block 3L, LOGSCL */
+        wd2 = rl42[wd1];
+        wd1 = (s->band[0].nb*127) >> 7;
+        wd1 += wl[wd2];
+        if (wd1 < 0)
+            wd1 = 0;
+        else if (wd1 > 18432)
+            wd1 = 18432;
+        s->band[0].nb = wd1;
+            
+        /* Block 3L, SCALEL */
+        wd1 = (s->band[0].nb >> 6) & 31;
+        wd2 = 8 - (s->band[0].nb >> 11);
+        wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+        s->band[0].det = wd3 << 2;
+
+        block4(s, 0, dlowt);
+        
+        if (!s->eight_k)
+        {
+            /* Block 2H, INVQAH */
+            wd2 = qm2[ihigh];
+            dhigh = (s->band[1].det*wd2) >> 15;
+            /* Block 5H, RECONS */
+            rhigh = dhigh + s->band[1].s;
+            /* Block 6H, LIMIT */
+            if (rhigh > 16383)
+                rhigh = 16383;
+            else if (rhigh < -16384)
+                rhigh = -16384;
+
+            /* Block 2H, INVQAH */
+            wd2 = rh2[ihigh];
+            wd1 = (s->band[1].nb*127) >> 7;
+            wd1 += wh[wd2];
+            if (wd1 < 0)
+                wd1 = 0;
+            else if (wd1 > 22528)
+                wd1 = 22528;
+            s->band[1].nb = wd1;
+            
+            /* Block 3H, SCALEH */
+            wd1 = (s->band[1].nb >> 6) & 31;
+            wd2 = 10 - (s->band[1].nb >> 11);
+            wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+            s->band[1].det = wd3 << 2;
+
+            block4(s, 1, dhigh);
+        }
+
+        if (s->itu_test_mode)
+        {
+            amp[outlen++] = (WebRtc_Word16) (rlow << 1);
+            amp[outlen++] = (WebRtc_Word16) (rhigh << 1);
+        }
+        else
+        {
+            if (s->eight_k)
+            {
+                amp[outlen++] = (WebRtc_Word16) (rlow << 1);
+            }
+            else
+            {
+                /* Apply the receive QMF */
+                for (i = 0;  i < 22;  i++)
+                    s->x[i] = s->x[i + 2];
+                s->x[22] = rlow + rhigh;
+                s->x[23] = rlow - rhigh;
+
+                xout1 = 0;
+                xout2 = 0;
+                for (i = 0;  i < 12;  i++)
+                {
+                    xout2 += s->x[2*i]*qmf_coeffs[i];
+                    xout1 += s->x[2*i + 1]*qmf_coeffs[11 - i];
+                }
+                /* We shift by 12 to allow for the QMF filters (DC gain = 4096), less 1
+                   to allow for the 15 bit input to the G.722 algorithm. */
+                /* WebRtc, tlegrand: added saturation */
+                amp[outlen++] = saturate(xout1 >> 11);
+                amp[outlen++] = saturate(xout2 >> 11);
+            }
+        }
+    }
+    return outlen;
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/src/modules/audio_coding/codecs/g722/g722_enc_dec.h b/src/modules/audio_coding/codecs/g722/g722_enc_dec.h
new file mode 100644
index 0000000..d2d19b0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722_enc_dec.h
@@ -0,0 +1,158 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722.h - The ITU G.722 codec.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based on a single channel G.722 codec which is:
+ *
+ *****    Copyright (c) CMU    1993      *****
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722.h,v 1.10 2006/06/16 12:45:53 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Changed to use WebRtc types
+ * -Added new defines for minimum and maximum values of short int
+ */
+
+
+/*! \file */
+
+#if !defined(_G722_ENC_DEC_H_)
+#define _G722_ENC_DEC_H_
+
+/*! \page g722_page G.722 encoding and decoding
+\section g722_page_sec_1 What does it do?
+The G.722 module is a bit exact implementation of the ITU G.722 specification for all three
+specified bit rates - 64000bps, 56000bps and 48000bps. It passes the ITU tests.
+
+To allow fast and flexible interworking with narrow band telephony, the encoder and decoder
+support an option for the linear audio to be an 8k samples/second stream. In this mode the
+codec is considerably faster, and still fully compatible with wideband terminals using G.722.
+
+\section g722_page_sec_2 How does it work?
+???.
+*/
+
+#define WEBRTC_INT16_MAX 32767
+#define WEBRTC_INT16_MIN -32768
+
+enum
+{
+    G722_SAMPLE_RATE_8000 = 0x0001,
+    G722_PACKED = 0x0002
+};
+
+typedef struct
+{
+    /*! TRUE if the operating in the special ITU test mode, with the band split filters
+             disabled. */
+    int itu_test_mode;
+    /*! TRUE if the G.722 data is packed */
+    int packed;
+    /*! TRUE if encode from 8k samples/second */
+    int eight_k;
+    /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
+    int bits_per_sample;
+
+    /*! Signal history for the QMF */
+    int x[24];
+
+    struct
+    {
+        int s;
+        int sp;
+        int sz;
+        int r[3];
+        int a[3];
+        int ap[3];
+        int p[3];
+        int d[7];
+        int b[7];
+        int bp[7];
+        int sg[7];
+        int nb;
+        int det;
+    } band[2];
+
+    unsigned int in_buffer;
+    int in_bits;
+    unsigned int out_buffer;
+    int out_bits;
+} g722_encode_state_t;
+
+typedef struct
+{
+    /*! TRUE if the operating in the special ITU test mode, with the band split filters
+             disabled. */
+    int itu_test_mode;
+    /*! TRUE if the G.722 data is packed */
+    int packed;
+    /*! TRUE if decode to 8k samples/second */
+    int eight_k;
+    /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */
+    int bits_per_sample;
+
+    /*! Signal history for the QMF */
+    int x[24];
+
+    struct
+    {
+        int s;
+        int sp;
+        int sz;
+        int r[3];
+        int a[3];
+        int ap[3];
+        int p[3];
+        int d[7];
+        int b[7];
+        int bp[7];
+        int sg[7];
+        int nb;
+        int det;
+    } band[2];
+    
+    unsigned int in_buffer;
+    int in_bits;
+    unsigned int out_buffer;
+    int out_bits;
+} g722_decode_state_t;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+g722_encode_state_t *WebRtc_g722_encode_init(g722_encode_state_t *s,
+                                             int rate,
+                                             int options);
+int WebRtc_g722_encode_release(g722_encode_state_t *s);
+int WebRtc_g722_encode(g722_encode_state_t *s,
+                       WebRtc_UWord8 g722_data[],
+                       const WebRtc_Word16 amp[],
+                       int len);
+
+g722_decode_state_t *WebRtc_g722_decode_init(g722_decode_state_t *s,
+                                             int rate,
+                                             int options);
+int WebRtc_g722_decode_release(g722_decode_state_t *s);
+int WebRtc_g722_decode(g722_decode_state_t *s,
+                       WebRtc_Word16 amp[],
+                       const WebRtc_UWord8 g722_data[],
+                       int len);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/modules/audio_coding/codecs/g722/g722_encode.c b/src/modules/audio_coding/codecs/g722/g722_encode.c
new file mode 100644
index 0000000..7487b64
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722_encode.c
@@ -0,0 +1,434 @@
+/*
+ * SpanDSP - a series of DSP components for telephony
+ *
+ * g722_encode.c - The ITU G.722 codec, encode part.
+ *
+ * Written by Steve Underwood <steveu@coppice.org>
+ *
+ * Copyright (C) 2005 Steve Underwood
+ *
+ * All rights reserved.
+ *
+ *  Despite my general liking of the GPL, I place my own contributions 
+ *  to this code in the public domain for the benefit of all mankind -
+ *  even the slimy ones who might try to proprietize my work and use it
+ *  to my detriment.
+ *
+ * Based on a single channel 64kbps only G.722 codec which is:
+ *
+ *****    Copyright (c) CMU    1993      *****
+ * Computer Science, Speech Group
+ * Chengxiang Lu and Alex Hauptmann
+ *
+ * $Id: g722_encode.c,v 1.14 2006/07/07 16:37:49 steveu Exp $
+ *
+ * Modifications for WebRtc, 2011/04/28, by tlegrand:
+ * -Removed usage of inttypes.h and tgmath.h
+ * -Changed to use WebRtc types
+ * -Added option to run encoder bitexact with ITU-T reference implementation
+ */
+
+/*! \file */
+
+#ifdef HAVE_CONFIG_H
+#include <config.h>
+#endif
+
+#include <stdio.h>
+#include <memory.h>
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "g722_enc_dec.h"
+
+#if !defined(FALSE)
+#define FALSE 0
+#endif
+#if !defined(TRUE)
+#define TRUE (!FALSE)
+#endif
+
+static __inline WebRtc_Word16 saturate(WebRtc_Word32 amp)
+{
+    WebRtc_Word16 amp16;
+
+    /* Hopefully this is optimised for the common case - not clipping */
+    amp16 = (WebRtc_Word16) amp;
+    if (amp == amp16)
+        return amp16;
+    if (amp > WEBRTC_INT16_MAX)
+        return  WEBRTC_INT16_MAX;
+    return  WEBRTC_INT16_MIN;
+}
+/*- End of function --------------------------------------------------------*/
+
+static void block4(g722_encode_state_t *s, int band, int d)
+{
+    int wd1;
+    int wd2;
+    int wd3;
+    int i;
+
+    /* Block 4, RECONS */
+    s->band[band].d[0] = d;
+    s->band[band].r[0] = saturate(s->band[band].s + d);
+
+    /* Block 4, PARREC */
+    s->band[band].p[0] = saturate(s->band[band].sz + d);
+
+    /* Block 4, UPPOL2 */
+    for (i = 0;  i < 3;  i++)
+        s->band[band].sg[i] = s->band[band].p[i] >> 15;
+    wd1 = saturate(s->band[band].a[1] << 2);
+
+    wd2 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  -wd1  :  wd1;
+    if (wd2 > 32767)
+        wd2 = 32767;
+    wd3 = (wd2 >> 7) + ((s->band[band].sg[0] == s->band[band].sg[2])  ?  128  :  -128);
+    wd3 += (s->band[band].a[2]*32512) >> 15;
+    if (wd3 > 12288)
+        wd3 = 12288;
+    else if (wd3 < -12288)
+        wd3 = -12288;
+    s->band[band].ap[2] = wd3;
+
+    /* Block 4, UPPOL1 */
+    s->band[band].sg[0] = s->band[band].p[0] >> 15;
+    s->band[band].sg[1] = s->band[band].p[1] >> 15;
+    wd1 = (s->band[band].sg[0] == s->band[band].sg[1])  ?  192  :  -192;
+    wd2 = (s->band[band].a[1]*32640) >> 15;
+
+    s->band[band].ap[1] = saturate(wd1 + wd2);
+    wd3 = saturate(15360 - s->band[band].ap[2]);
+    if (s->band[band].ap[1] > wd3)
+        s->band[band].ap[1] = wd3;
+    else if (s->band[band].ap[1] < -wd3)
+        s->band[band].ap[1] = -wd3;
+
+    /* Block 4, UPZERO */
+    wd1 = (d == 0)  ?  0  :  128;
+    s->band[band].sg[0] = d >> 15;
+    for (i = 1;  i < 7;  i++)
+    {
+        s->band[band].sg[i] = s->band[band].d[i] >> 15;
+        wd2 = (s->band[band].sg[i] == s->band[band].sg[0])  ?  wd1  :  -wd1;
+        wd3 = (s->band[band].b[i]*32640) >> 15;
+        s->band[band].bp[i] = saturate(wd2 + wd3);
+    }
+
+    /* Block 4, DELAYA */
+    for (i = 6;  i > 0;  i--)
+    {
+        s->band[band].d[i] = s->band[band].d[i - 1];
+        s->band[band].b[i] = s->band[band].bp[i];
+    }
+    
+    for (i = 2;  i > 0;  i--)
+    {
+        s->band[band].r[i] = s->band[band].r[i - 1];
+        s->band[band].p[i] = s->band[band].p[i - 1];
+        s->band[band].a[i] = s->band[band].ap[i];
+    }
+
+    /* Block 4, FILTEP */
+    wd1 = saturate(s->band[band].r[1] + s->band[band].r[1]);
+    wd1 = (s->band[band].a[1]*wd1) >> 15;
+    wd2 = saturate(s->band[band].r[2] + s->band[band].r[2]);
+    wd2 = (s->band[band].a[2]*wd2) >> 15;
+    s->band[band].sp = saturate(wd1 + wd2);
+
+    /* Block 4, FILTEZ */
+    s->band[band].sz = 0;
+    for (i = 6;  i > 0;  i--)
+    {
+        wd1 = saturate(s->band[band].d[i] + s->band[band].d[i]);
+        s->band[band].sz += (s->band[band].b[i]*wd1) >> 15;
+    }
+    s->band[band].sz = saturate(s->band[band].sz);
+
+    /* Block 4, PREDIC */
+    s->band[band].s = saturate(s->band[band].sp + s->band[band].sz);
+}
+/*- End of function --------------------------------------------------------*/
+
+g722_encode_state_t *WebRtc_g722_encode_init(g722_encode_state_t *s,
+                                             int rate, int options)
+{
+    if (s == NULL)
+    {
+        if ((s = (g722_encode_state_t *) malloc(sizeof(*s))) == NULL)
+            return NULL;
+    }
+    memset(s, 0, sizeof(*s));
+    if (rate == 48000)
+        s->bits_per_sample = 6;
+    else if (rate == 56000)
+        s->bits_per_sample = 7;
+    else
+        s->bits_per_sample = 8;
+    if ((options & G722_SAMPLE_RATE_8000))
+        s->eight_k = TRUE;
+    if ((options & G722_PACKED)  &&  s->bits_per_sample != 8)
+        s->packed = TRUE;
+    else
+        s->packed = FALSE;
+    s->band[0].det = 32;
+    s->band[1].det = 8;
+    return s;
+}
+/*- End of function --------------------------------------------------------*/
+
+int WebRtc_g722_encode_release(g722_encode_state_t *s)
+{
+    free(s);
+    return 0;
+}
+/*- End of function --------------------------------------------------------*/
+
+/* WebRtc, tlegrand:
+ * Only define the following if bit-exactness with reference implementation
+ * is needed. Will only have any effect if input signal is saturated.
+ */
+//#define RUN_LIKE_REFERENCE_G722
+#ifdef RUN_LIKE_REFERENCE_G722
+WebRtc_Word16 limitValues (WebRtc_Word16 rl)
+{
+
+    WebRtc_Word16 yl;
+
+    yl = (rl > 16383) ? 16383 : ((rl < -16384) ? -16384 : rl);
+
+    return (yl);
+}
+#endif
+
+int WebRtc_g722_encode(g722_encode_state_t *s, WebRtc_UWord8 g722_data[],
+                       const WebRtc_Word16 amp[], int len)
+{
+    static const int q6[32] =
+    {
+           0,   35,   72,  110,  150,  190,  233,  276,
+         323,  370,  422,  473,  530,  587,  650,  714,
+         786,  858,  940, 1023, 1121, 1219, 1339, 1458,
+        1612, 1765, 1980, 2195, 2557, 2919,    0,    0
+    };
+    static const int iln[32] =
+    {
+         0, 63, 62, 31, 30, 29, 28, 27,
+        26, 25, 24, 23, 22, 21, 20, 19,
+        18, 17, 16, 15, 14, 13, 12, 11,
+        10,  9,  8,  7,  6,  5,  4,  0
+    };
+    static const int ilp[32] =
+    {
+         0, 61, 60, 59, 58, 57, 56, 55,
+        54, 53, 52, 51, 50, 49, 48, 47,
+        46, 45, 44, 43, 42, 41, 40, 39,
+        38, 37, 36, 35, 34, 33, 32,  0
+    };
+    static const int wl[8] =
+    {
+        -60, -30, 58, 172, 334, 538, 1198, 3042
+    };
+    static const int rl42[16] =
+    {
+        0, 7, 6, 5, 4, 3, 2, 1, 7, 6, 5, 4, 3, 2, 1, 0
+    };
+    static const int ilb[32] =
+    {
+        2048, 2093, 2139, 2186, 2233, 2282, 2332,
+        2383, 2435, 2489, 2543, 2599, 2656, 2714,
+        2774, 2834, 2896, 2960, 3025, 3091, 3158,
+        3228, 3298, 3371, 3444, 3520, 3597, 3676,
+        3756, 3838, 3922, 4008
+    };
+    static const int qm4[16] =
+    {
+             0, -20456, -12896, -8968,
+         -6288,  -4240,  -2584, -1200,
+         20456,  12896,   8968,  6288,
+          4240,   2584,   1200,     0
+    };
+    static const int qm2[4] =
+    {
+        -7408,  -1616,   7408,   1616
+    };
+    static const int qmf_coeffs[12] =
+    {
+           3,  -11,   12,   32, -210,  951, 3876, -805,  362, -156,   53,  -11,
+    };
+    static const int ihn[3] = {0, 1, 0};
+    static const int ihp[3] = {0, 3, 2};
+    static const int wh[3] = {0, -214, 798};
+    static const int rh2[4] = {2, 1, 2, 1};
+
+    int dlow;
+    int dhigh;
+    int el;
+    int wd;
+    int wd1;
+    int ril;
+    int wd2;
+    int il4;
+    int ih2;
+    int wd3;
+    int eh;
+    int mih;
+    int i;
+    int j;
+    /* Low and high band PCM from the QMF */
+    int xlow;
+    int xhigh;
+    int g722_bytes;
+    /* Even and odd tap accumulators */
+    int sumeven;
+    int sumodd;
+    int ihigh;
+    int ilow;
+    int code;
+
+    g722_bytes = 0;
+    xhigh = 0;
+    for (j = 0;  j < len;  )
+    {
+        if (s->itu_test_mode)
+        {
+            xlow =
+            xhigh = amp[j++] >> 1;
+        }
+        else
+        {
+            if (s->eight_k)
+            {
+                /* We shift by 1 to allow for the 15 bit input to the G.722 algorithm. */
+                xlow = amp[j++] >> 1;
+            }
+            else
+            {
+                /* Apply the transmit QMF */
+                /* Shuffle the buffer down */
+                for (i = 0;  i < 22;  i++)
+                    s->x[i] = s->x[i + 2];
+                s->x[22] = amp[j++];
+                s->x[23] = amp[j++];
+    
+                /* Discard every other QMF output */
+                sumeven = 0;
+                sumodd = 0;
+                for (i = 0;  i < 12;  i++)
+                {
+                    sumodd += s->x[2*i]*qmf_coeffs[i];
+                    sumeven += s->x[2*i + 1]*qmf_coeffs[11 - i];
+                }
+                /* We shift by 12 to allow for the QMF filters (DC gain = 4096), plus 1
+                   to allow for us summing two filters, plus 1 to allow for the 15 bit
+                   input to the G.722 algorithm. */
+                xlow = (sumeven + sumodd) >> 14;
+                xhigh = (sumeven - sumodd) >> 14;
+
+#ifdef RUN_LIKE_REFERENCE_G722
+                /* The following lines are only used to verify bit-exactness
+                 * with reference implementation of G.722. Higher precision
+                 * is achieved without limiting the values.
+                 */
+                xlow = limitValues(xlow);
+                xhigh = limitValues(xhigh);
+#endif
+            }
+        }
+        /* Block 1L, SUBTRA */
+        el = saturate(xlow - s->band[0].s);
+
+        /* Block 1L, QUANTL */
+        wd = (el >= 0)  ?  el  :  -(el + 1);
+
+        for (i = 1;  i < 30;  i++)
+        {
+            wd1 = (q6[i]*s->band[0].det) >> 12;
+            if (wd < wd1)
+                break;
+        }
+        ilow = (el < 0)  ?  iln[i]  :  ilp[i];
+
+        /* Block 2L, INVQAL */
+        ril = ilow >> 2;
+        wd2 = qm4[ril];
+        dlow = (s->band[0].det*wd2) >> 15;
+
+        /* Block 3L, LOGSCL */
+        il4 = rl42[ril];
+        wd = (s->band[0].nb*127) >> 7;
+        s->band[0].nb = wd + wl[il4];
+        if (s->band[0].nb < 0)
+            s->band[0].nb = 0;
+        else if (s->band[0].nb > 18432)
+            s->band[0].nb = 18432;
+
+        /* Block 3L, SCALEL */
+        wd1 = (s->band[0].nb >> 6) & 31;
+        wd2 = 8 - (s->band[0].nb >> 11);
+        wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+        s->band[0].det = wd3 << 2;
+
+        block4(s, 0, dlow);
+        
+        if (s->eight_k)
+        {
+            /* Just leave the high bits as zero */
+            code = (0xC0 | ilow) >> (8 - s->bits_per_sample);
+        }
+        else
+        {
+            /* Block 1H, SUBTRA */
+            eh = saturate(xhigh - s->band[1].s);
+
+            /* Block 1H, QUANTH */
+            wd = (eh >= 0)  ?  eh  :  -(eh + 1);
+            wd1 = (564*s->band[1].det) >> 12;
+            mih = (wd >= wd1)  ?  2  :  1;
+            ihigh = (eh < 0)  ?  ihn[mih]  :  ihp[mih];
+
+            /* Block 2H, INVQAH */
+            wd2 = qm2[ihigh];
+            dhigh = (s->band[1].det*wd2) >> 15;
+
+            /* Block 3H, LOGSCH */
+            ih2 = rh2[ihigh];
+            wd = (s->band[1].nb*127) >> 7;
+            s->band[1].nb = wd + wh[ih2];
+            if (s->band[1].nb < 0)
+                s->band[1].nb = 0;
+            else if (s->band[1].nb > 22528)
+                s->band[1].nb = 22528;
+
+            /* Block 3H, SCALEH */
+            wd1 = (s->band[1].nb >> 6) & 31;
+            wd2 = 10 - (s->band[1].nb >> 11);
+            wd3 = (wd2 < 0)  ?  (ilb[wd1] << -wd2)  :  (ilb[wd1] >> wd2);
+            s->band[1].det = wd3 << 2;
+
+            block4(s, 1, dhigh);
+            code = ((ihigh << 6) | ilow) >> (8 - s->bits_per_sample);
+        }
+
+        if (s->packed)
+        {
+            /* Pack the code bits */
+            s->out_buffer |= (code << s->out_bits);
+            s->out_bits += s->bits_per_sample;
+            if (s->out_bits >= 8)
+            {
+                g722_data[g722_bytes++] = (WebRtc_UWord8) (s->out_buffer & 0xFF);
+                s->out_bits -= 8;
+                s->out_buffer >>= 8;
+            }
+        }
+        else
+        {
+            g722_data[g722_bytes++] = (WebRtc_UWord8) code;
+        }
+    }
+    return g722_bytes;
+}
+/*- End of function --------------------------------------------------------*/
+/*- End of file ------------------------------------------------------------*/
diff --git a/src/modules/audio_coding/codecs/g722/g722_interface.c b/src/modules/audio_coding/codecs/g722/g722_interface.c
new file mode 100644
index 0000000..d559014
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722_interface.c
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+
+#include <stdlib.h>
+#include <string.h>
+#include "g722_interface.h"
+#include "g722_enc_dec.h"
+#include "typedefs.h"
+
+
+WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst)
+{
+    *G722enc_inst=(G722EncInst*)malloc(sizeof(g722_encode_state_t));
+    if (*G722enc_inst!=NULL) {
+      return(0);
+    } else {
+      return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst)
+{
+    // Create and/or reset the G.722 encoder
+    // Bitrate 64 kbps and wideband mode (2)
+    G722enc_inst = (G722EncInst *) WebRtc_g722_encode_init(
+        (g722_encode_state_t*) G722enc_inst, 64000, 2);
+    if (G722enc_inst == NULL) {
+        return -1;
+    } else {
+        return 0;
+    }
+}
+
+WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst)
+{
+    // Free encoder memory
+    return WebRtc_g722_encode_release((g722_encode_state_t*) G722enc_inst);
+}
+
+WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
+                                WebRtc_Word16 *speechIn,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *encoded)
+{
+    unsigned char *codechar = (unsigned char*) encoded;
+    // Encode the input speech vector
+    return WebRtc_g722_encode((g722_encode_state_t*) G722enc_inst,
+                       codechar, speechIn, len);
+}
+
+WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst)
+{
+    *G722dec_inst=(G722DecInst*)malloc(sizeof(g722_decode_state_t));
+    if (*G722dec_inst!=NULL) {
+      return(0);
+    } else {
+      return(-1);
+    }
+}
+
+WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst)
+{
+    // Create and/or reset the G.722 decoder
+    // Bitrate 64 kbps and wideband mode (2)
+    G722dec_inst = (G722DecInst *) WebRtc_g722_decode_init(
+        (g722_decode_state_t*) G722dec_inst, 64000, 2);
+    if (G722dec_inst == NULL) {
+        return -1;
+    } else {
+        return 0;
+    }
+}
+
+WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst)
+{
+    // Free encoder memory
+    return WebRtc_g722_decode_release((g722_decode_state_t*) G722dec_inst);
+}
+
+WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
+                                WebRtc_Word16 *encoded,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *decoded,
+                                WebRtc_Word16 *speechType)
+{
+    // Decode the G.722 encoder stream
+    *speechType=G722_WEBRTC_SPEECH;
+    return WebRtc_g722_decode((g722_decode_state_t*) G722dec_inst,
+                              decoded, (WebRtc_UWord8*) encoded, len);
+}
+
+WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len)
+{
+    // Get version string
+    char version[30] = "2.0.0\n";
+    if (strlen(version) < (unsigned int)len)
+    {
+        strcpy(versionStr, version);
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
diff --git a/src/modules/audio_coding/codecs/g722/g722_unittest.cc b/src/modules/audio_coding/codecs/g722/g722_unittest.cc
new file mode 100644
index 0000000..a828edd
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/g722_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "g722_interface.h"
+#include "gtest/gtest.h"
+
+TEST(G722Test, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/audio_coding/codecs/g722/include/g722_interface.h b/src/modules/audio_coding/codecs/g722/include/g722_interface.h
new file mode 100644
index 0000000..e50d66f
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/include/g722_interface.h
@@ -0,0 +1,190 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
+#define MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_
+
+#include "typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ */
+
+typedef struct WebRtcG722EncInst    G722EncInst;
+typedef struct WebRtcG722DecInst    G722DecInst;
+
+/*
+ * Comfort noise constants
+ */
+
+#define G722_WEBRTC_SPEECH     1
+#define G722_WEBRTC_CNG        2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/****************************************************************************
+ * WebRtcG722_CreateEncoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance for encoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_CreateEncoder(G722EncInst **G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_EncoderInit(...)
+ *
+ * This function initializes a G722 instance
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance, i.e. the user that should receive
+ *                             be initialized
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_EncoderInit(G722EncInst *G722enc_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_FreeEncoder(...)
+ *
+ * Free the memory used for G722 encoder
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance for encoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_FreeEncoder(G722EncInst *G722enc_inst);
+
+
+
+/****************************************************************************
+ * WebRtcG722_Encode(...)
+ *
+ * This function encodes G722 encoded data.
+ *
+ * Input:
+ *     - G722enc_inst         : G722 instance, i.e. the user that should encode
+ *                              a packet
+ *     - speechIn             : Input speech vector
+ *     - len                  : Samples in speechIn
+ *
+ * Output:
+ *        - encoded           : The encoded data vector
+ *
+ * Return value               : >0 - Length (in bytes) of coded data
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_Encode(G722EncInst *G722enc_inst,
+                                WebRtc_Word16 *speechIn,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *encoded);
+
+
+/****************************************************************************
+ * WebRtcG722_CreateDecoder(...)
+ *
+ * Create memory used for G722 encoder
+ *
+ * Input:
+ *     - G722dec_inst         : G722 instance for decoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+WebRtc_Word16 WebRtcG722_CreateDecoder(G722DecInst **G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_DecoderInit(...)
+ *
+ * This function initializes a G729 instance
+ *
+ * Input:
+ *     - G729_decinst_t    : G729 instance, i.e. the user that should receive
+ *                           be initialized
+ *
+ * Return value            :  0 - Ok
+ *                           -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_DecoderInit(G722DecInst *G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_FreeDecoder(...)
+ *
+ * Free the memory used for G722 decoder
+ *
+ * Input:
+ *     - G722dec_inst         : G722 instance for decoder
+ *
+ * Return value               :  0 - Ok
+ *                              -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_FreeDecoder(G722DecInst *G722dec_inst);
+
+
+/****************************************************************************
+ * WebRtcG722_Decode(...)
+ *
+ * This function decodes a packet with G729 frame(s). Output speech length
+ * will be a multiple of 80 samples (80*frames/packet).
+ *
+ * Input:
+ *     - G722dec_inst       : G722 instance, i.e. the user that should decode
+ *                            a packet
+ *     - encoded            : Encoded G722 frame(s)
+ *     - len                : Bytes in encoded vector
+ *
+ * Output:
+ *        - decoded         : The decoded vector
+ *      - speechType        : 1 normal, 2 CNG (Since G722 does not have its own
+ *                            DTX/CNG scheme it should always return 1)
+ *
+ * Return value             : >0 - Samples in decoded vector
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcG722_Decode(G722DecInst *G722dec_inst,
+                                WebRtc_Word16 *encoded,
+                                WebRtc_Word16 len,
+                                WebRtc_Word16 *decoded,
+                                WebRtc_Word16 *speechType);
+
+/****************************************************************************
+ * WebRtcG722_Version(...)
+ *
+ * Get a string with the current version of the codec
+ */
+
+WebRtc_Word16 WebRtcG722_Version(char *versionStr, short len);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+
+#endif /* MODULES_AUDIO_CODING_CODECS_G722_MAIN_INTERFACE_G722_INTERFACE_H_ */
diff --git a/src/modules/audio_coding/codecs/g722/test/testG722.cc b/src/modules/audio_coding/codecs/g722/test/testG722.cc
new file mode 100644
index 0000000..d2fdca3
--- /dev/null
+++ b/src/modules/audio_coding/codecs/g722/test/testG722.cc
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * testG722.cpp : Defines the entry point for the console application.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include "typedefs.h"
+
+/* include API */
+#include "g722_interface.h"
+
+/* Runtime statistics */
+#include <time.h>
+#define CLOCKS_PER_SEC_G722  100000
+
+// Forward declaration
+typedef struct WebRtcG722EncInst    G722EncInst;
+typedef struct WebRtcG722DecInst    G722DecInst;
+
+/* function for reading audio data from PCM file */
+int readframe(WebRtc_Word16 *data, FILE *inp, int length)
+{
+    short k, rlen, status = 0;
+
+    rlen = (short)fread(data, sizeof(WebRtc_Word16), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+    return status;
+}
+
+int main(int argc, char* argv[])
+{
+    char inname[60], outbit[40], outname[40];
+    FILE *inp, *outbitp, *outp;
+
+    int framecnt, endfile;
+    WebRtc_Word16 framelength = 160;
+    G722EncInst *G722enc_inst;
+    G722DecInst *G722dec_inst;
+    int err;
+
+    /* Runtime statistics */
+    double starttime;
+    double runtime = 0;
+    double length_file;
+
+    WebRtc_Word16 stream_len = 0;
+    WebRtc_Word16 shortdata[960];
+    WebRtc_Word16 decoded[960];
+    WebRtc_Word16 streamdata[80*3];
+    WebRtc_Word16 speechType[1];
+
+    /* handling wrong input arguments in the command line */
+    if (argc!=5)  {
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        printf("Usage:\n\n");
+        printf("./testG722.exe framelength infile outbitfile outspeechfile \n\n");
+        printf("with:\n");
+        printf("framelength  :    Framelength in samples.\n\n");
+        printf("infile       :    Normal speech input file\n\n");
+        printf("outbitfile   :    Bitstream output file\n\n");
+        printf("outspeechfile:    Speech output file\n\n");
+        exit(0);
+
+    }
+
+    /* Get frame length */
+    framelength = atoi(argv[1]);
+
+    /* Get Input and Output files */
+    sscanf(argv[2], "%s", inname);
+    sscanf(argv[3], "%s", outbit);
+    sscanf(argv[4], "%s", outname);
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  G.722: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outbitp = fopen(outbit,"wb")) == NULL) {
+        printf("  G.722: Cannot write file %s.\n", outbit);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  G.722: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+    printf("\nInput:%s\nOutput bitstream:%s\nOutput:%s\n", inname, outbit, outname);
+
+    /* Create and init */
+    WebRtcG722_CreateEncoder((G722EncInst **)&G722enc_inst);
+    WebRtcG722_CreateDecoder((G722DecInst **)&G722dec_inst);
+    WebRtcG722_EncoderInit((G722EncInst *)G722enc_inst);
+    WebRtcG722_DecoderInit((G722DecInst *)G722dec_inst);
+
+
+    /* Initialize encoder and decoder */
+    framecnt = 0;
+    endfile = 0;
+    while (endfile == 0) {
+        framecnt++;
+
+        /* Read speech block */
+        endfile = readframe(shortdata, inp, framelength);
+
+        /* Start clock before call to encoder and decoder */
+        starttime = clock()/(double)CLOCKS_PER_SEC_G722;
+
+        /* G.722 encoding + decoding */
+        stream_len = WebRtcG722_Encode((G722EncInst *)G722enc_inst, shortdata, framelength, streamdata);
+        err = WebRtcG722_Decode((G722DecInst *)G722dec_inst, streamdata, stream_len, decoded, speechType);
+
+        /* Stop clock after call to encoder and decoder */
+        runtime += (double)((clock()/(double)CLOCKS_PER_SEC_G722)-starttime);
+
+        if (stream_len < 0 || err < 0) {
+            /* exit if returned with error */
+            printf("Error in encoder/decoder\n");
+        } else {
+          /* Write coded bits to file */
+          if (fwrite(streamdata, sizeof(short), stream_len/2,
+                     outbitp) != static_cast<size_t>(stream_len/2)) {
+            return -1;
+          }
+          /* Write coded speech to file */
+          if (fwrite(decoded, sizeof(short), framelength,
+                     outp) != static_cast<size_t>(framelength)) {
+            return -1;
+          }
+        }
+    }
+
+    WebRtcG722_FreeEncoder((G722EncInst *)G722enc_inst);
+    WebRtcG722_FreeDecoder((G722DecInst *)G722dec_inst);
+
+    length_file = ((double)framecnt*(double)framelength/16000);
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+    printf("Time to run G.722:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+    printf("---------------------END----------------------\n");
+
+    fclose(inp);
+    fclose(outbitp);
+    fclose(outp);
+
+    return 0;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/abs_quant.c b/src/modules/audio_coding/codecs/ilbc/abs_quant.c
new file mode 100644
index 0000000..4a70c8b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/abs_quant.c
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "abs_quant_loop.h"
+
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuant(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
+                                   and idxVec, uses state_first as
+                                   input) */
+    WebRtc_Word16 *in,     /* (i) vector to encode */
+    WebRtc_Word16 *weightDenum   /* (i) denominator of synthesis filter */
+                            ) {
+  WebRtc_Word16 *syntOut;
+  WebRtc_Word16 quantLen[2];
+
+  /* Stack based */
+  WebRtc_Word16 syntOutBuf[LPC_FILTERORDER+STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 in_weightedVec[STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 *in_weighted = &in_weightedVec[LPC_FILTERORDER];
+
+  /* Initialize the buffers */
+  WebRtcSpl_MemSetW16(syntOutBuf, 0, LPC_FILTERORDER+STATE_SHORT_LEN_30MS);
+  syntOut = &syntOutBuf[LPC_FILTERORDER];
+  /* Start with zero state */
+  WebRtcSpl_MemSetW16(in_weightedVec, 0, LPC_FILTERORDER);
+
+  /* Perform the quantization loop in two sections of length quantLen[i],
+     where the perceptual weighting filter is updated at the subframe
+     border */
+
+  if (iLBC_encbits->state_first) {
+    quantLen[0]=SUBL;
+    quantLen[1]=iLBCenc_inst->state_short_len-SUBL;
+  } else {
+    quantLen[0]=iLBCenc_inst->state_short_len-SUBL;
+    quantLen[1]=SUBL;
+  }
+
+  /* Calculate the weighted residual, switch perceptual weighting
+     filter at the subframe border */
+  WebRtcSpl_FilterARFastQ12(
+      in, in_weighted,
+      weightDenum, LPC_FILTERORDER+1, quantLen[0]);
+  WebRtcSpl_FilterARFastQ12(
+      &in[quantLen[0]], &in_weighted[quantLen[0]],
+      &weightDenum[LPC_FILTERORDER+1], LPC_FILTERORDER+1, quantLen[1]);
+
+  WebRtcIlbcfix_AbsQuantLoop(
+      syntOut,
+      in_weighted,
+      weightDenum,
+      quantLen,
+      iLBC_encbits->idxVec);
+
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/abs_quant.h b/src/modules/audio_coding/codecs/ilbc/abs_quant.h
new file mode 100644
index 0000000..fa59593
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/abs_quant.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuant(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits (outputs idxForMax
+                                   and idxVec, uses state_first as
+                                   input) */
+    WebRtc_Word16 *in,     /* (i) vector to encode */
+    WebRtc_Word16 *weightDenum   /* (i) denominator of synthesis filter */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c b/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
new file mode 100644
index 0000000..4eebc3e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.c
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuantLoop.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "sort_sq.h"
+
+void WebRtcIlbcfix_AbsQuantLoop(
+    WebRtc_Word16 *syntOutIN,
+    WebRtc_Word16 *in_weightedIN,
+    WebRtc_Word16 *weightDenumIN,
+    WebRtc_Word16 *quantLenIN,
+    WebRtc_Word16 *idxVecIN
+                                )
+{
+  int n, k1, k2;
+  WebRtc_Word16 index;
+  WebRtc_Word32 toQW32;
+  WebRtc_Word32 toQ32;
+  WebRtc_Word16 tmp16a;
+  WebRtc_Word16 xq;
+
+  WebRtc_Word16 *syntOut   = syntOutIN;
+  WebRtc_Word16 *in_weighted  = in_weightedIN;
+  WebRtc_Word16 *weightDenum  = weightDenumIN;
+  WebRtc_Word16 *quantLen  = quantLenIN;
+  WebRtc_Word16 *idxVec   = idxVecIN;
+
+  n=0;
+
+  for(k1=0;k1<2;k1++) {
+    for(k2=0;k2<quantLen[k1];k2++){
+
+      /* Filter to get the predicted value */
+      WebRtcSpl_FilterARFastQ12(
+          syntOut, syntOut,
+          weightDenum, LPC_FILTERORDER+1, 1);
+
+      /* the quantizer */
+      toQW32 = (WebRtc_Word32)(*in_weighted) - (WebRtc_Word32)(*syntOut);
+
+      toQ32 = (((WebRtc_Word32)toQW32)<<2);
+
+      if (toQ32 > 32767) {
+        toQ32 = (WebRtc_Word32) 32767;
+      } else if (toQ32 < -32768) {
+        toQ32 = (WebRtc_Word32) -32768;
+      }
+
+      /* Quantize the state */
+      if (toQW32<(-7577)) {
+        /* To prevent negative overflow */
+        index=0;
+      } else if (toQW32>8151) {
+        /* To prevent positive overflow */
+        index=7;
+      } else {
+        /* Find the best quantization index
+           (state_sq3Tbl is in Q13 and toQ is in Q11)
+        */
+        WebRtcIlbcfix_SortSq(&xq, &index,
+                             (WebRtc_Word16)toQ32,
+                             WebRtcIlbcfix_kStateSq3, 8);
+      }
+
+      /* Store selected index */
+      (*idxVec++) = index;
+
+      /* Compute decoded sample and update of the prediction filter */
+      tmp16a = ((WebRtcIlbcfix_kStateSq3[index] + 2 ) >> 2);
+
+      *syntOut     = (WebRtc_Word16) (tmp16a + (WebRtc_Word32)(*in_weighted) - toQW32);
+
+      n++;
+      syntOut++; in_weighted++;
+    }
+    /* Update perceptual weighting filter at subframe border */
+    weightDenum += 11;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h b/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
new file mode 100644
index 0000000..f506e8e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/abs_quant_loop.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AbsQuantLoop.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ABS_QUANT_LOOP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  predictive noise shaping encoding of scaled start state
+ *  (subrutine for WebRtcIlbcfix_StateSearch)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AbsQuantLoop(
+    WebRtc_Word16 *syntOutIN,
+    WebRtc_Word16 *in_weightedIN,
+    WebRtc_Word16 *weightDenumIN,
+    WebRtc_Word16 *quantLenIN,
+    WebRtc_Word16 *idxVecIN
+                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c b/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
new file mode 100644
index 0000000..6011e92
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.c
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AugmentedCbCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "augmented_cb_corr.h"
+
+void WebRtcIlbcfix_AugmentedCbCorr(
+    WebRtc_Word16 *target,   /* (i) Target vector */
+    WebRtc_Word16 *buffer,   /* (i) Memory buffer */
+    WebRtc_Word16 *interpSamples, /* (i) buffer with
+                                     interpolated samples */
+    WebRtc_Word32 *crossDot,  /* (o) The cross correlation between
+                                 the target and the Augmented
+                                 vector */
+    WebRtc_Word16 low,    /* (i) Lag to start from (typically
+                             20) */
+    WebRtc_Word16 high,   /* (i) Lag to end at (typically 39) */
+    WebRtc_Word16 scale)   /* (i) Scale factor to use for
+                              the crossDot */
+{
+  int lagcount;
+  WebRtc_Word16 ilow;
+  WebRtc_Word16 *targetPtr;
+  WebRtc_Word32 *crossDotPtr;
+  WebRtc_Word16 *iSPtr=interpSamples;
+
+  /* Calculate the correlation between the target and the
+     interpolated codebook. The correlation is calculated in
+     3 sections with the interpolated part in the middle */
+  crossDotPtr=crossDot;
+  for (lagcount=low; lagcount<=high; lagcount++) {
+
+    ilow = (WebRtc_Word16) (lagcount-4);
+
+    /* Compute dot product for the first (lagcount-4) samples */
+    (*crossDotPtr) = WebRtcSpl_DotProductWithScale(target, buffer-lagcount, ilow, scale);
+
+    /* Compute dot product on the interpolated samples */
+    (*crossDotPtr) += WebRtcSpl_DotProductWithScale(target+ilow, iSPtr, 4, scale);
+    targetPtr = target + lagcount;
+    iSPtr += lagcount-ilow;
+
+    /* Compute dot product for the remaining samples */
+    (*crossDotPtr) += WebRtcSpl_DotProductWithScale(targetPtr, buffer-lagcount, SUBL-lagcount, scale);
+    crossDotPtr++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h b/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
new file mode 100644
index 0000000..8e097fe
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/augmented_cb_corr.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_AugmentedCbCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_AUGMENTED_CB_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Calculate correlation between target and Augmented codebooks
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_AugmentedCbCorr(
+    WebRtc_Word16 *target,   /* (i) Target vector */
+    WebRtc_Word16 *buffer,   /* (i) Memory buffer */
+    WebRtc_Word16 *interpSamples, /* (i) buffer with
+                                           interpolated samples */
+    WebRtc_Word32 *crossDot,  /* (o) The cross correlation between
+                                           the target and the Augmented
+                                           vector */
+    WebRtc_Word16 low,    /* (i) Lag to start from (typically
+                                                   20) */
+    WebRtc_Word16 high,   /* (i) Lag to end at (typically 39 */
+    WebRtc_Word16 scale);   /* (i) Scale factor to use for
+                                                   the crossDot */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/bw_expand.c b/src/modules/audio_coding/codecs/ilbc/bw_expand.c
new file mode 100644
index 0000000..a2287aa
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/bw_expand.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_BwExpand.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc bandwidth expansion
+ *---------------------------------------------------------------*/
+
+/* The output is in the same domain as the input */
+void WebRtcIlbcfix_BwExpand(
+    WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
+    WebRtc_Word16 *in,  /* (i) the lpc coefficients before bandwidth
+                                   expansion */
+    WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
+    WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+                            ) {
+  int i;
+
+  out[0] = in[0];
+  for (i = 1; i < length; i++) {
+    /* out[i] = coef[i] * in[i] with rounding.
+       in[] and out[] are in Q12 and coef[] is in Q15
+    */
+    out[i] = (WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(coef[i], in[i])+16384)>>15);
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/bw_expand.h b/src/modules/audio_coding/codecs/ilbc/bw_expand.h
new file mode 100644
index 0000000..c9f3fab
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/bw_expand.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_BwExpand.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_BW_EXPAND_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc bandwidth expansion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_BwExpand(
+    WebRtc_Word16 *out, /* (o) the bandwidth expanded lpc coefficients */
+    WebRtc_Word16 *in,  /* (i) the lpc coefficients before bandwidth
+                                   expansion */
+    WebRtc_Word16 *coef, /* (i) the bandwidth expansion factor Q15 */
+    WebRtc_Word16 length /* (i) the length of lpc coefficient vectors */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_construct.c b/src/modules/audio_coding/codecs/ilbc/cb_construct.c
new file mode 100644
index 0000000..094a7e4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_construct.c
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbConstruct.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "gain_dequant.h"
+#include "get_cd_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Construct decoded vector from codebook and gains.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbConstruct(
+    WebRtc_Word16 *decvector,  /* (o) Decoded vector */
+    WebRtc_Word16 *index,   /* (i) Codebook indices */
+    WebRtc_Word16 *gain_index,  /* (i) Gain quantization indices */
+    WebRtc_Word16 *mem,   /* (i) Buffer for codevector construction */
+    WebRtc_Word16 lMem,   /* (i) Length of buffer */
+    WebRtc_Word16 veclen   /* (i) Length of vector */
+                               ){
+  int j;
+  WebRtc_Word16 gain[CB_NSTAGES];
+  /* Stack based */
+  WebRtc_Word16 cbvec0[SUBL];
+  WebRtc_Word16 cbvec1[SUBL];
+  WebRtc_Word16 cbvec2[SUBL];
+  WebRtc_Word32 a32;
+  WebRtc_Word16 *gainPtr;
+
+  /* gain de-quantization */
+
+  gain[0] = WebRtcIlbcfix_GainDequant(gain_index[0], 16384, 0);
+  gain[1] = WebRtcIlbcfix_GainDequant(gain_index[1], gain[0], 1);
+  gain[2] = WebRtcIlbcfix_GainDequant(gain_index[2], gain[1], 2);
+
+  /* codebook vector construction and construction of total vector */
+
+  /* Stack based */
+  WebRtcIlbcfix_GetCbVec(cbvec0, mem, index[0], lMem, veclen);
+  WebRtcIlbcfix_GetCbVec(cbvec1, mem, index[1], lMem, veclen);
+  WebRtcIlbcfix_GetCbVec(cbvec2, mem, index[2], lMem, veclen);
+
+  gainPtr = &gain[0];
+  for (j=0;j<veclen;j++) {
+    a32  = WEBRTC_SPL_MUL_16_16(*gainPtr++, cbvec0[j]);
+    a32 += WEBRTC_SPL_MUL_16_16(*gainPtr++, cbvec1[j]);
+    a32 += WEBRTC_SPL_MUL_16_16(*gainPtr, cbvec2[j]);
+    gainPtr -= 2;
+    decvector[j] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(a32 + 8192, 14);
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_construct.h b/src/modules/audio_coding/codecs/ilbc/cb_construct.h
new file mode 100644
index 0000000..bec759f
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_construct.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbConstruct.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_CONSTRUCT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct decoded vector from codebook and gains.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbConstruct(
+    WebRtc_Word16 *decvector,  /* (o) Decoded vector */
+    WebRtc_Word16 *index,   /* (i) Codebook indices */
+    WebRtc_Word16 *gain_index,  /* (i) Gain quantization indices */
+    WebRtc_Word16 *mem,   /* (i) Buffer for codevector construction */
+    WebRtc_Word16 lMem,   /* (i) Length of buffer */
+    WebRtc_Word16 veclen   /* (i) Length of vector */
+                               );
+
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
new file mode 100644
index 0000000..8613fa2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.c
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergy.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "cb_mem_energy_calc.h"
+
+/*----------------------------------------------------------------*
+ *  Function WebRtcIlbcfix_CbMemEnergy computes the energy of all
+ * the vectors in the codebook memory that will be used in the
+ * following search for the best match.
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbMemEnergy(
+    WebRtc_Word16 range,
+    WebRtc_Word16 *CB,   /* (i) The CB memory (1:st section) */
+    WebRtc_Word16 *filteredCB,  /* (i) The filtered CB memory (2:nd section) */
+    WebRtc_Word16 lMem,   /* (i) Length of the CB memory */
+    WebRtc_Word16 lTarget,   /* (i) Length of the target vector */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                               ) {
+  WebRtc_Word16 *ppi, *ppo, *pp;
+  WebRtc_Word32 energy, tmp32;
+
+  /* Compute the energy and store it in a vector. Also the
+   * corresponding shift values are stored. The energy values
+   * are reused in all three stages. */
+
+  /* Calculate the energy in the first block of 'lTarget' sampels. */
+  ppi = CB+lMem-lTarget-1;
+  ppo = CB+lMem-1;
+
+  pp=CB+lMem-lTarget;
+  energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
+
+  /* Normalize the energy and store the number of shifts */
+  energyShifts[0] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+  tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[0]);
+  energyW16[0] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+
+  /* Compute the energy of the rest of the cb memory
+   * by step wise adding and subtracting the next
+   * sample and the last sample respectively. */
+  WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, 0);
+
+  /* Next, precompute the energy values for the filtered cb section */
+  energy=0;
+  pp=filteredCB+lMem-lTarget;
+
+  energy = WebRtcSpl_DotProductWithScale( pp, pp, lTarget, scale);
+
+  /* Normalize the energy and store the number of shifts */
+  energyShifts[base_size] = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+  tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, energyShifts[base_size]);
+  energyW16[base_size] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+
+  ppi = filteredCB + lMem - 1 - lTarget;
+  ppo = filteredCB + lMem - 1;
+
+  WebRtcIlbcfix_CbMemEnergyCalc(energy, range, ppi, ppo, energyW16, energyShifts, scale, base_size);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
new file mode 100644
index 0000000..1aa2b7b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergy.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_H_
+
+void WebRtcIlbcfix_CbMemEnergy(
+    WebRtc_Word16 range,
+    WebRtc_Word16 *CB,   /* (i) The CB memory (1:st section) */
+    WebRtc_Word16 *filteredCB,  /* (i) The filtered CB memory (2:nd section) */
+    WebRtc_Word16 lMem,   /* (i) Length of the CB memory */
+    WebRtc_Word16 lTarget,   /* (i) Length of the target vector */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                               );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
new file mode 100644
index 0000000..0c6f479
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.c
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyAugmentation.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbMemEnergyAugmentation(
+    WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size,  /* (i) Index to where the energy values should be stored */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+                                           ){
+  WebRtc_Word32 energy, tmp32;
+  WebRtc_Word16 *ppe, *pp, *interpSamplesPtr;
+  WebRtc_Word16 *CBmemPtr, lagcount;
+  WebRtc_Word16 *enPtr=&energyW16[base_size-20];
+  WebRtc_Word16 *enShPtr=&energyShifts[base_size-20];
+  WebRtc_Word32 nrjRecursive;
+
+  CBmemPtr = CBmem+147;
+  interpSamplesPtr = interpSamples;
+
+  /* Compute the energy for the first (low-5) noninterpolated samples */
+  nrjRecursive = WebRtcSpl_DotProductWithScale( CBmemPtr-19, CBmemPtr-19, 15, scale);
+  ppe = CBmemPtr - 20;
+
+  for (lagcount=20; lagcount<=39; lagcount++) {
+
+    /* Update the energy recursively to save complexity */
+    nrjRecursive = nrjRecursive +
+        WEBRTC_SPL_MUL_16_16_RSFT(*ppe, *ppe, scale);
+    ppe--;
+    energy = nrjRecursive;
+
+    /* interpolation */
+    energy += WebRtcSpl_DotProductWithScale(interpSamplesPtr, interpSamplesPtr, 4, scale);
+    interpSamplesPtr += 4;
+
+    /* Compute energy for the remaining samples */
+    pp = CBmemPtr - lagcount;
+    energy += WebRtcSpl_DotProductWithScale(pp, pp, SUBL-lagcount, scale);
+
+    /* Normalize the energy and store the number of shifts */
+    (*enShPtr) = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(energy, (*enShPtr));
+    (*enPtr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32, 16);
+    enShPtr++;
+    enPtr++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
new file mode 100644
index 0000000..938b87e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_augmentation.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyAugmentation.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_AUGMENTATION_H_
+
+void WebRtcIlbcfix_CbMemEnergyAugmentation(
+    WebRtc_Word16 *interpSamples, /* (i) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size,  /* (i) Index to where the energy values should be stored */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts /* (o) Shift value of the energy */
+                                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
new file mode 100644
index 0000000..40bb708
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.c
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyCalc.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/* Compute the energy of the rest of the cb memory
+ * by step wise adding and subtracting the next
+ * sample and the last sample respectively */
+void WebRtcIlbcfix_CbMemEnergyCalc(
+    WebRtc_Word32 energy,   /* (i) input start energy */
+    WebRtc_Word16 range,   /* (i) number of iterations */
+    WebRtc_Word16 *ppi,   /* (i) input pointer 1 */
+    WebRtc_Word16 *ppo,   /* (i) input pointer 2 */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                                   )
+{
+  WebRtc_Word16 j,shft;
+  WebRtc_Word32 tmp;
+  WebRtc_Word16 *eSh_ptr;
+  WebRtc_Word16 *eW16_ptr;
+
+
+  eSh_ptr  = &energyShifts[1+base_size];
+  eW16_ptr = &energyW16[1+base_size];
+
+  for(j=0;j<range-1;j++) {
+
+    /* Calculate next energy by a +/-
+       operation on the edge samples */
+    tmp  = WEBRTC_SPL_MUL_16_16(*ppi, *ppi);
+    tmp -= WEBRTC_SPL_MUL_16_16(*ppo, *ppo);
+    energy += WEBRTC_SPL_RSHIFT_W32(tmp, scale);
+    energy = WEBRTC_SPL_MAX(energy, 0);
+
+    ppi--;
+    ppo--;
+
+    /* Normalize the energy into a WebRtc_Word16 and store
+       the number of shifts */
+
+    shft = (WebRtc_Word16)WebRtcSpl_NormW32(energy);
+    *eSh_ptr++ = shft;
+
+    tmp = WEBRTC_SPL_LSHIFT_W32(energy, shft);
+    *eW16_ptr++ = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp, 16);
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
new file mode 100644
index 0000000..ee2e285
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_mem_energy_calc.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbMemEnergyCalc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_MEM_ENERGY_CALC_H_
+
+void WebRtcIlbcfix_CbMemEnergyCalc(
+    WebRtc_Word32 energy,   /* (i) input start energy */
+    WebRtc_Word16 range,   /* (i) number of iterations */
+    WebRtc_Word16 *ppi,   /* (i) input pointer 1 */
+    WebRtc_Word16 *ppo,   /* (i) input pointer 2 */
+    WebRtc_Word16 *energyW16,  /* (o) Energy in the CB vectors */
+    WebRtc_Word16 *energyShifts, /* (o) Shift value of the energy */
+    WebRtc_Word16 scale,   /* (i) The scaling of all energy values */
+    WebRtc_Word16 base_size  /* (i) Index to where the energy values should be stored */
+                                   );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_search.c b/src/modules/audio_coding/codecs/ilbc/cb_search.c
new file mode 100644
index 0000000..c51ccf7
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_search.c
@@ -0,0 +1,396 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearch.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "gain_quant.h"
+#include "filtered_cb_vecs.h"
+#include "constants.h"
+#include "cb_mem_energy.h"
+#include "interpolate_samples.h"
+#include "cb_mem_energy_augmentation.h"
+#include "cb_search_core.h"
+#include "energy_inverse.h"
+#include "augmented_cb_corr.h"
+#include "cb_update_best_index.h"
+#include "create_augmented_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Search routine for codebook encoding and gain quantization.
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CbSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) the encoder state structure */
+    WebRtc_Word16 *index,  /* (o) Codebook indices */
+    WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
+    WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
+    WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
+    WebRtc_Word16 lMem,  /* (i) Length of buffer */
+    WebRtc_Word16 lTarget,  /* (i) Length of vector */
+    WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
+    WebRtc_Word16 block  /* (i) the subblock number */
+                            ) {
+  WebRtc_Word16 i, j, stage, range;
+  WebRtc_Word16 *pp, scale, tmp;
+  WebRtc_Word16 bits, temp1, temp2;
+  WebRtc_Word16 base_size;
+  WebRtc_Word32 codedEner, targetEner;
+  WebRtc_Word16 gains[CB_NSTAGES+1];
+  WebRtc_Word16 *cb_vecPtr;
+  WebRtc_Word16 indexOffset, sInd, eInd;
+  WebRtc_Word32 CritMax=0;
+  WebRtc_Word16 shTotMax=WEBRTC_SPL_WORD16_MIN;
+  WebRtc_Word16 bestIndex=0;
+  WebRtc_Word16 bestGain=0;
+  WebRtc_Word16 indexNew, CritNewSh;
+  WebRtc_Word32 CritNew;
+  WebRtc_Word32 *cDotPtr;
+  WebRtc_Word16 noOfZeros;
+  WebRtc_Word16 *gainPtr;
+  WebRtc_Word32 t32, tmpW32;
+  WebRtc_Word16 *WebRtcIlbcfix_kGainSq5_ptr;
+  /* Stack based */
+  WebRtc_Word16 CBbuf[CB_MEML+LPC_FILTERORDER+CB_HALFFILTERLEN];
+  WebRtc_Word32 cDot[128];
+  WebRtc_Word32 Crit[128];
+  WebRtc_Word16 targetVec[SUBL+LPC_FILTERORDER];
+  WebRtc_Word16 cbvectors[CB_MEML];
+  WebRtc_Word16 codedVec[SUBL];
+  WebRtc_Word16 interpSamples[20*4];
+  WebRtc_Word16 interpSamplesFilt[20*4];
+  WebRtc_Word16 energyW16[CB_EXPAND*128];
+  WebRtc_Word16 energyShifts[CB_EXPAND*128];
+  WebRtc_Word16 *inverseEnergy=energyW16;   /* Reuse memory */
+  WebRtc_Word16 *inverseEnergyShifts=energyShifts; /* Reuse memory */
+  WebRtc_Word16 *buf = &CBbuf[LPC_FILTERORDER];
+  WebRtc_Word16 *target = &targetVec[LPC_FILTERORDER];
+  WebRtc_Word16 *aug_vec = (WebRtc_Word16*)cDot;   /* length [SUBL], reuse memory */
+
+  /* Determine size of codebook sections */
+
+  base_size=lMem-lTarget+1;
+  if (lTarget==SUBL) {
+    base_size=lMem-19;
+  }
+
+  /* weighting of the CB memory */
+  noOfZeros=lMem-WebRtcIlbcfix_kFilterRange[block];
+  WebRtcSpl_MemSetW16(&buf[-LPC_FILTERORDER], 0, noOfZeros+LPC_FILTERORDER);
+  WebRtcSpl_FilterARFastQ12(
+      decResidual+noOfZeros, buf+noOfZeros,
+      weightDenum, LPC_FILTERORDER+1, WebRtcIlbcfix_kFilterRange[block]);
+
+  /* weighting of the target vector */
+  WEBRTC_SPL_MEMCPY_W16(&target[-LPC_FILTERORDER], buf+noOfZeros+WebRtcIlbcfix_kFilterRange[block]-LPC_FILTERORDER, LPC_FILTERORDER);
+  WebRtcSpl_FilterARFastQ12(
+      intarget, target,
+      weightDenum, LPC_FILTERORDER+1, lTarget);
+
+  /* Store target, towards the end codedVec is calculated as
+     the initial target minus the remaining target */
+  WEBRTC_SPL_MEMCPY_W16(codedVec, target, lTarget);
+
+  /* Find the highest absolute value to calculate proper
+     vector scale factor (so that it uses 12 bits) */
+  temp1 = WebRtcSpl_MaxAbsValueW16(buf, (WebRtc_Word16)lMem);
+  temp2 = WebRtcSpl_MaxAbsValueW16(target, (WebRtc_Word16)lTarget);
+
+  if ((temp1>0)&&(temp2>0)) {
+    temp1 = WEBRTC_SPL_MAX(temp1, temp2);
+    scale = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(temp1, temp1));
+  } else {
+    /* temp1 or temp2 is negative (maximum was -32768) */
+    scale = 30;
+  }
+
+  /* Scale to so that a mul-add 40 times does not overflow */
+  scale = scale - 25;
+  scale = WEBRTC_SPL_MAX(0, scale);
+
+  /* Compute energy of the original target */
+  targetEner = WebRtcSpl_DotProductWithScale(target, target, lTarget, scale);
+
+  /* Prepare search over one more codebook section. This section
+     is created by filtering the original buffer with a filter. */
+  WebRtcIlbcfix_FilteredCbVecs(cbvectors, buf, lMem, WebRtcIlbcfix_kFilterRange[block]);
+
+  range = WebRtcIlbcfix_kSearchRange[block][0];
+
+  if(lTarget == SUBL) {
+    /* Create the interpolated samples and store them for use in all stages */
+
+    /* First section, non-filtered half of the cb */
+    WebRtcIlbcfix_InterpolateSamples(interpSamples, buf, lMem);
+
+    /* Second section, filtered half of the cb */
+    WebRtcIlbcfix_InterpolateSamples(interpSamplesFilt, cbvectors, lMem);
+
+    /* Compute the CB vectors' energies for the first cb section (non-filtered) */
+    WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamples, buf,
+                                          scale, 20, energyW16, energyShifts);
+
+    /* Compute the CB vectors' energies for the second cb section (filtered cb) */
+    WebRtcIlbcfix_CbMemEnergyAugmentation(interpSamplesFilt, cbvectors,
+                                          scale, (WebRtc_Word16)(base_size+20), energyW16, energyShifts);
+
+    /* Compute the CB vectors' energies and store them in the vector
+     * energyW16. Also the corresponding shift values are stored. The
+     * energy values are used in all three stages. */
+    WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem,
+                              lTarget, energyW16+20, energyShifts+20, scale, base_size);
+
+  } else {
+    /* Compute the CB vectors' energies and store them in the vector
+     * energyW16. Also the corresponding shift values are stored. The
+     * energy values are used in all three stages. */
+    WebRtcIlbcfix_CbMemEnergy(range, buf, cbvectors, lMem,
+                              lTarget, energyW16, energyShifts, scale, base_size);
+
+    /* Set the energy positions 58-63 and 122-127 to zero
+       (otherwise they are uninitialized) */
+    WebRtcSpl_MemSetW16(energyW16+range, 0, (base_size-range));
+    WebRtcSpl_MemSetW16(energyW16+range+base_size, 0, (base_size-range));
+  }
+
+  /* Calculate Inverse Energy (energyW16 is already normalized
+     and will contain the inverse energy in Q29 after this call */
+  WebRtcIlbcfix_EnergyInverse(energyW16, base_size*CB_EXPAND);
+
+  /* The gain value computed in the previous stage is used
+   * as an upper limit to what the next stage gain value
+   * is allowed to be. In stage 0, 16384 (1.0 in Q14) is used as
+   * the upper limit. */
+  gains[0] = 16384;
+
+  for (stage=0; stage<CB_NSTAGES; stage++) {
+
+    /* Set up memories */
+    range = WebRtcIlbcfix_kSearchRange[block][stage];
+
+    /* initialize search measures */
+    CritMax=0;
+    shTotMax=-100;
+    bestIndex=0;
+    bestGain=0;
+
+    /* loop over lags 40+ in the first codebook section, full search */
+    cb_vecPtr = buf+lMem-lTarget;
+
+    /* Calculate all the cross correlations (augmented part of CB) */
+    if (lTarget==SUBL) {
+      WebRtcIlbcfix_AugmentedCbCorr(target, buf+lMem,
+                                    interpSamples, cDot,
+                                    20, 39, scale);
+      cDotPtr=&cDot[20];
+    } else {
+      cDotPtr=cDot;
+    }
+    /* Calculate all the cross correlations (main part of CB) */
+    WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, range, scale, -1);
+
+    /* Adjust the search range for the augmented vectors */
+    if (lTarget==SUBL) {
+      range=WebRtcIlbcfix_kSearchRange[block][stage]+20;
+    } else {
+      range=WebRtcIlbcfix_kSearchRange[block][stage];
+    }
+
+    indexOffset=0;
+
+    /* Search for best index in this part of the vector */
+    WebRtcIlbcfix_CbSearchCore(
+        cDot, range, stage, inverseEnergy,
+        inverseEnergyShifts, Crit,
+        &indexNew, &CritNew, &CritNewSh);
+
+    /* Update the global best index and the corresponding gain */
+    WebRtcIlbcfix_CbUpdateBestIndex(
+        CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew+indexOffset],
+        inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
+        &CritMax, &shTotMax, &bestIndex, &bestGain);
+
+    sInd=bestIndex-(WebRtc_Word16)(CB_RESRANGE>>1);
+    eInd=sInd+CB_RESRANGE;
+    if (sInd<0) {
+      eInd-=sInd;
+      sInd=0;
+    }
+    if (eInd>=range) {
+      eInd=range-1;
+      sInd=eInd-CB_RESRANGE;
+    }
+
+    range = WebRtcIlbcfix_kSearchRange[block][stage];
+
+    if (lTarget==SUBL) {
+      i=sInd;
+      if (sInd<20) {
+        WebRtcIlbcfix_AugmentedCbCorr(target, cbvectors+lMem,
+                                      interpSamplesFilt, cDot,
+                                      (WebRtc_Word16)(sInd+20), (WebRtc_Word16)(WEBRTC_SPL_MIN(39, (eInd+20))), scale);
+        i=20;
+      }
+
+      cDotPtr=&cDot[WEBRTC_SPL_MAX(0,(20-sInd))];
+      cb_vecPtr = cbvectors+lMem-20-i;
+
+      /* Calculate the cross correlations (main part of the filtered CB) */
+      WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-i+1), scale, -1);
+
+    } else {
+      cDotPtr = cDot;
+      cb_vecPtr = cbvectors+lMem-lTarget-sInd;
+
+      /* Calculate the cross correlations (main part of the filtered CB) */
+      WebRtcSpl_CrossCorrelation(cDotPtr, target, cb_vecPtr, lTarget, (WebRtc_Word16)(eInd-sInd+1), scale, -1);
+
+    }
+
+    /* Adjust the search range for the augmented vectors */
+    indexOffset=base_size+sInd;
+
+    /* Search for best index in this part of the vector */
+    WebRtcIlbcfix_CbSearchCore(
+        cDot, (WebRtc_Word16)(eInd-sInd+1), stage, inverseEnergy+indexOffset,
+        inverseEnergyShifts+indexOffset, Crit,
+        &indexNew, &CritNew, &CritNewSh);
+
+    /* Update the global best index and the corresponding gain */
+    WebRtcIlbcfix_CbUpdateBestIndex(
+        CritNew, CritNewSh, (WebRtc_Word16)(indexNew+indexOffset), cDot[indexNew],
+        inverseEnergy[indexNew+indexOffset], inverseEnergyShifts[indexNew+indexOffset],
+        &CritMax, &shTotMax, &bestIndex, &bestGain);
+
+    index[stage] = bestIndex;
+
+
+    bestGain = WebRtcIlbcfix_GainQuant(bestGain,
+                                       (WebRtc_Word16)WEBRTC_SPL_ABS_W16(gains[stage]), stage, &gain_index[stage]);
+
+    /* Extract the best (according to measure) codebook vector
+       Also adjust the index, so that the augmented vectors are last.
+       Above these vectors were first...
+    */
+
+    if(lTarget==(STATE_LEN-iLBCenc_inst->state_short_len)) {
+
+      if(index[stage]<base_size) {
+        pp=buf+lMem-lTarget-index[stage];
+      } else {
+        pp=cbvectors+lMem-lTarget-
+            index[stage]+base_size;
+      }
+
+    } else {
+
+      if (index[stage]<base_size) {
+        if (index[stage]>=20) {
+          /* Adjust index and extract vector */
+          index[stage]-=20;
+          pp=buf+lMem-lTarget-index[stage];
+        } else {
+          /* Adjust index and extract vector */
+          index[stage]+=(base_size-20);
+
+          WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-base_size+40),
+                                           buf+lMem, aug_vec);
+          pp = aug_vec;
+
+        }
+      } else {
+
+        if ((index[stage] - base_size) >= 20) {
+          /* Adjust index and extract vector */
+          index[stage]-=20;
+          pp=cbvectors+lMem-lTarget-
+              index[stage]+base_size;
+        } else {
+          /* Adjust index and extract vector */
+          index[stage]+=(base_size-20);
+          WebRtcIlbcfix_CreateAugmentedVec((WebRtc_Word16)(index[stage]-2*base_size+40),
+                                           cbvectors+lMem, aug_vec);
+          pp = aug_vec;
+        }
+      }
+    }
+
+    /* Subtract the best codebook vector, according
+       to measure, from the target vector */
+
+    WebRtcSpl_AddAffineVectorToVector(target, pp, (WebRtc_Word16)(-bestGain), (WebRtc_Word32)8192, (WebRtc_Word16)14, (int)lTarget);
+
+    /* record quantized gain */
+    gains[stage+1] = bestGain;
+
+  } /* end of Main Loop. for (stage=0;... */
+
+  /* Calculte the coded vector (original target - what's left) */
+  for (i=0;i<lTarget;i++) {
+    codedVec[i]-=target[i];
+  }
+
+  /* Gain adjustment for energy matching */
+  codedEner = WebRtcSpl_DotProductWithScale(codedVec, codedVec, lTarget, scale);
+
+  j=gain_index[0];
+
+  temp1 = (WebRtc_Word16)WebRtcSpl_NormW32(codedEner);
+  temp2 = (WebRtc_Word16)WebRtcSpl_NormW32(targetEner);
+
+  if(temp1 < temp2) {
+    bits = 16 - temp1;
+  } else {
+    bits = 16 - temp2;
+  }
+
+  tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(gains[1],gains[1], 14);
+
+  targetEner = WEBRTC_SPL_MUL_16_16(
+      WEBRTC_SPL_SHIFT_W32(targetEner, -bits), tmp);
+
+  tmpW32 = ((WebRtc_Word32)(gains[1]-1))<<1;
+
+  /* Pointer to the table that contains
+     gain_sq5TblFIX * gain_sq5TblFIX in Q14 */
+  gainPtr=(WebRtc_Word16*)WebRtcIlbcfix_kGainSq5Sq+gain_index[0];
+  temp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(codedEner, -bits);
+
+  WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[j];
+
+  /* targetEner and codedEner are in Q(-2*scale) */
+  for (i=gain_index[0];i<32;i++) {
+
+    /* Change the index if
+       (codedEnergy*gainTbl[i]*gainTbl[i])<(targetEn*gain[0]*gain[0]) AND
+       gainTbl[i] < 2*gain[0]
+    */
+
+    t32 = WEBRTC_SPL_MUL_16_16(temp1, (*gainPtr));
+    t32 = t32 - targetEner;
+    if (t32 < 0) {
+      if ((*WebRtcIlbcfix_kGainSq5_ptr) < tmpW32) {
+        j=i;
+        WebRtcIlbcfix_kGainSq5_ptr = (WebRtc_Word16*)&WebRtcIlbcfix_kGainSq5[i];
+      }
+    }
+    gainPtr++;
+  }
+  gain_index[0]=j;
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_search.h b/src/modules/audio_coding/codecs/ilbc/cb_search.h
new file mode 100644
index 0000000..e4ad4b5
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_search.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearch.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_H_
+
+void WebRtcIlbcfix_CbSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) the encoder state structure */
+    WebRtc_Word16 *index,  /* (o) Codebook indices */
+    WebRtc_Word16 *gain_index, /* (o) Gain quantization indices */
+    WebRtc_Word16 *intarget, /* (i) Target vector for encoding */
+    WebRtc_Word16 *decResidual,/* (i) Decoded residual for codebook construction */
+    WebRtc_Word16 lMem,  /* (i) Length of buffer */
+    WebRtc_Word16 lTarget,  /* (i) Length of vector */
+    WebRtc_Word16 *weightDenum,/* (i) weighting filter coefficients in Q12 */
+    WebRtc_Word16 block  /* (i) the subblock number */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_search_core.c b/src/modules/audio_coding/codecs/ilbc/cb_search_core.c
new file mode 100644
index 0000000..711e2df
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_search_core.c
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearchCore.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbSearchCore(
+    WebRtc_Word32 *cDot,    /* (i) Cross Correlation */
+    WebRtc_Word16 range,    /* (i) Search range */
+    WebRtc_Word16 stage,    /* (i) Stage of this search */
+    WebRtc_Word16 *inverseEnergy,  /* (i) Inversed energy */
+    WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+                                           with the offset 2*16-29 */
+    WebRtc_Word32 *Crit,    /* (o) The criteria */
+    WebRtc_Word16 *bestIndex,   /* (o) Index that corresponds to
+                                                   maximum criteria (in this
+                                                   vector) */
+    WebRtc_Word32 *bestCrit,   /* (o) Value of critera for the
+                                                   chosen index */
+    WebRtc_Word16 *bestCritSh)   /* (o) The domain of the chosen
+                                                   criteria */
+{
+  WebRtc_Word32 maxW32, tmp32;
+  WebRtc_Word16 max, sh, tmp16;
+  int i;
+  WebRtc_Word32 *cDotPtr;
+  WebRtc_Word16 cDotSqW16;
+  WebRtc_Word16 *inverseEnergyPtr;
+  WebRtc_Word32 *critPtr;
+  WebRtc_Word16 *inverseEnergyShiftPtr;
+
+  /* Don't allow negative values for stage 0 */
+  if (stage==0) {
+    cDotPtr=cDot;
+    for (i=0;i<range;i++) {
+      *cDotPtr=WEBRTC_SPL_MAX(0, (*cDotPtr));
+      cDotPtr++;
+    }
+  }
+
+  /* Normalize cDot to WebRtc_Word16, calculate the square of cDot and store the upper WebRtc_Word16 */
+  maxW32 = WebRtcSpl_MaxAbsValueW32(cDot, range);
+
+  sh = (WebRtc_Word16)WebRtcSpl_NormW32(maxW32);
+  cDotPtr = cDot;
+  inverseEnergyPtr = inverseEnergy;
+  critPtr = Crit;
+  inverseEnergyShiftPtr=inverseEnergyShift;
+  max=WEBRTC_SPL_WORD16_MIN;
+
+  for (i=0;i<range;i++) {
+    /* Calculate cDot*cDot and put the result in a WebRtc_Word16 */
+    tmp32 = WEBRTC_SPL_LSHIFT_W32(*cDotPtr,sh);
+    tmp16 = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp32,16);
+    cDotSqW16 = (WebRtc_Word16)(((WebRtc_Word32)(tmp16)*(tmp16))>>16);
+
+    /* Calculate the criteria (cDot*cDot/energy) */
+    *critPtr=WEBRTC_SPL_MUL_16_16(cDotSqW16, (*inverseEnergyPtr));
+
+    /* Extract the maximum shift value under the constraint
+       that the criteria is not zero */
+    if ((*critPtr)!=0) {
+      max = WEBRTC_SPL_MAX((*inverseEnergyShiftPtr), max);
+    }
+
+    inverseEnergyPtr++;
+    inverseEnergyShiftPtr++;
+    critPtr++;
+    cDotPtr++;
+  }
+
+  /* If no max shifts still at initialization value, set shift to zero */
+  if (max==WEBRTC_SPL_WORD16_MIN) {
+    max = 0;
+  }
+
+  /* Modify the criterias, so that all of them use the same Q domain */
+  critPtr=Crit;
+  inverseEnergyShiftPtr=inverseEnergyShift;
+  for (i=0;i<range;i++) {
+    /* Guarantee that the shift value is less than 16
+       in order to simplify for DSP's (and guard against >31) */
+    tmp16 = WEBRTC_SPL_MIN(16, max-(*inverseEnergyShiftPtr));
+
+    (*critPtr)=WEBRTC_SPL_SHIFT_W32((*critPtr),-tmp16);
+    critPtr++;
+    inverseEnergyShiftPtr++;
+  }
+
+  /* Find the index of the best value */
+  *bestIndex = WebRtcSpl_MaxIndexW32(Crit, range);
+  *bestCrit = Crit[*bestIndex];
+
+  /* Calculate total shifts of this criteria */
+  *bestCritSh = 32 - 2*sh + max;
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_search_core.h b/src/modules/audio_coding/codecs/ilbc/cb_search_core.h
new file mode 100644
index 0000000..e074c52
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_search_core.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbSearchCore.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_SEARCH_CORE_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_CbSearchCore(
+    WebRtc_Word32 *cDot,    /* (i) Cross Correlation */
+    WebRtc_Word16 range,    /* (i) Search range */
+    WebRtc_Word16 stage,    /* (i) Stage of this search */
+    WebRtc_Word16 *inverseEnergy,  /* (i) Inversed energy */
+    WebRtc_Word16 *inverseEnergyShift, /* (i) Shifts of inversed energy
+                                          with the offset 2*16-29 */
+    WebRtc_Word32 *Crit,    /* (o) The criteria */
+    WebRtc_Word16 *bestIndex,   /* (o) Index that corresponds to
+                                   maximum criteria (in this
+                                   vector) */
+    WebRtc_Word32 *bestCrit,   /* (o) Value of critera for the
+                                  chosen index */
+    WebRtc_Word16 *bestCritSh);  /* (o) The domain of the chosen
+                                    criteria */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c b/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
new file mode 100644
index 0000000..bf85408
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.c
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbUpdateBestIndex.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "cb_update_best_index.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_CbUpdateBestIndex(
+    WebRtc_Word32 CritNew,    /* (i) New Potentially best Criteria */
+    WebRtc_Word16 CritNewSh,   /* (i) Shift value of above Criteria */
+    WebRtc_Word16 IndexNew,   /* (i) Index of new Criteria */
+    WebRtc_Word32 cDotNew,    /* (i) Cross dot of new index */
+    WebRtc_Word16 invEnergyNew,  /* (i) Inversed energy new index */
+    WebRtc_Word16 energyShiftNew,  /* (i) Energy shifts of new index */
+    WebRtc_Word32 *CritMax,   /* (i/o) Maximum Criteria (so far) */
+    WebRtc_Word16 *shTotMax,   /* (i/o) Shifts of maximum criteria */
+    WebRtc_Word16 *bestIndex,   /* (i/o) Index that corresponds to
+                                                   maximum criteria */
+    WebRtc_Word16 *bestGain)   /* (i/o) Gain in Q14 that corresponds
+                                                   to maximum criteria */
+{
+  WebRtc_Word16 shOld, shNew, tmp16;
+  WebRtc_Word16 scaleTmp;
+  WebRtc_Word32 gainW32;
+
+  /* Normalize the new and old Criteria to the same domain */
+  if (CritNewSh>(*shTotMax)) {
+    shOld=WEBRTC_SPL_MIN(31,CritNewSh-(*shTotMax));
+    shNew=0;
+  } else {
+    shOld=0;
+    shNew=WEBRTC_SPL_MIN(31,(*shTotMax)-CritNewSh);
+  }
+
+  /* Compare the two criterias. If the new one is better,
+     calculate the gain and store this index as the new best one
+  */
+
+  if (WEBRTC_SPL_RSHIFT_W32(CritNew, shNew)>
+      WEBRTC_SPL_RSHIFT_W32((*CritMax),shOld)) {
+
+    tmp16 = (WebRtc_Word16)WebRtcSpl_NormW32(cDotNew);
+    tmp16 = 16 - tmp16;
+
+    /* Calculate the gain in Q14
+       Compensate for inverseEnergyshift in Q29 and that the energy
+       value was stored in a WebRtc_Word16 (shifted down 16 steps)
+       => 29-14+16 = 31 */
+
+    scaleTmp = -energyShiftNew-tmp16+31;
+    scaleTmp = WEBRTC_SPL_MIN(31, scaleTmp);
+
+    gainW32 = WEBRTC_SPL_MUL_16_16_RSFT(
+        ((WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cDotNew, -tmp16)), invEnergyNew, scaleTmp);
+
+    /* Check if criteria satisfies Gain criteria (max 1.3)
+       if it is larger set the gain to 1.3
+       (slightly different from FLP version)
+    */
+    if (gainW32>21299) {
+      *bestGain=21299;
+    } else if (gainW32<-21299) {
+      *bestGain=-21299;
+    } else {
+      *bestGain=(WebRtc_Word16)gainW32;
+    }
+
+    *CritMax=CritNew;
+    *shTotMax=CritNewSh;
+    *bestIndex = IndexNew;
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h b/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
new file mode 100644
index 0000000..9015187
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/cb_update_best_index.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CbUpdateBestIndex.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CB_UPDATE_BEST_INDEX_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_CbUpdateBestIndex(
+    WebRtc_Word32 CritNew,    /* (i) New Potentially best Criteria */
+    WebRtc_Word16 CritNewSh,   /* (i) Shift value of above Criteria */
+    WebRtc_Word16 IndexNew,   /* (i) Index of new Criteria */
+    WebRtc_Word32 cDotNew,    /* (i) Cross dot of new index */
+    WebRtc_Word16 invEnergyNew,  /* (i) Inversed energy new index */
+    WebRtc_Word16 energyShiftNew,  /* (i) Energy shifts of new index */
+    WebRtc_Word32 *CritMax,   /* (i/o) Maximum Criteria (so far) */
+    WebRtc_Word16 *shTotMax,   /* (i/o) Shifts of maximum criteria */
+    WebRtc_Word16 *bestIndex,   /* (i/o) Index that corresponds to
+                                   maximum criteria */
+    WebRtc_Word16 *bestGain);   /* (i/o) Gain in Q14 that corresponds
+                                   to maximum criteria */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/chebyshev.c b/src/modules/audio_coding/codecs/ilbc/chebyshev.c
new file mode 100644
index 0000000..90108ff
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/chebyshev.c
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Chebyshev.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*------------------------------------------------------------------*
+ *  Calculate the Chevyshev polynomial series
+ *  F(w) = 2*exp(-j5w)*C(x)
+ *   C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2)
+ *   T_i(x) is the i:th order Chebyshev polynomial
+ *------------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+    /* (o) Result of C(x) */
+    WebRtc_Word16 x,  /* (i) Value to the Chevyshev polynomial */
+    WebRtc_Word16 *f  /* (i) The coefficients in the polynomial */
+                                      ) {
+  WebRtc_Word16 b1_high, b1_low; /* Use the high, low format to increase the accuracy */
+  WebRtc_Word32 b2;
+  WebRtc_Word32 tmp1W32;
+  WebRtc_Word32 tmp2W32;
+  int i;
+
+  b2 = (WebRtc_Word32)0x1000000; /* b2 = 1.0 (Q23) */
+  /* Calculate b1 = 2*x + f[1] */
+  tmp1W32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x, 10);
+  tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[1], 14);
+
+  for (i = 2; i < 5; i++) {
+    tmp2W32 = tmp1W32;
+
+    /* Split b1 (in tmp1W32) into a high and low part */
+    b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+    b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+
+    /* Calculate 2*x*b1-b2+f[i] */
+    tmp1W32 = WEBRTC_SPL_LSHIFT_W32( (WEBRTC_SPL_MUL_16_16(b1_high, x) +
+                                      WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15)), 2);
+
+    tmp1W32 -= b2;
+    tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 14);
+
+    /* Update b2 for next round */
+    b2 = tmp2W32;
+  }
+
+  /* Split b1 (in tmp1W32) into a high and low part */
+  b1_high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 16);
+  b1_low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)b1_high),16), 1);
+
+  /* tmp1W32 = x*b1 - b2 + f[i]/2 */
+  tmp1W32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(b1_high, x), 1) +
+      WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(b1_low, x, 15), 1);
+
+  tmp1W32 -= b2;
+  tmp1W32 += WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)f[i], 13);
+
+  /* Handle overflows and set to maximum or minimum WebRtc_Word16 instead */
+  if (tmp1W32>((WebRtc_Word32)33553408)) {
+    return(WEBRTC_SPL_WORD16_MAX);
+  } else if (tmp1W32<((WebRtc_Word32)-33554432)) {
+    return(WEBRTC_SPL_WORD16_MIN);
+  } else {
+    return((WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmp1W32, 10));
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/chebyshev.h b/src/modules/audio_coding/codecs/ilbc/chebyshev.h
new file mode 100644
index 0000000..57aab99
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/chebyshev.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Chebyshev.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CHEBYSHEV_H_
+
+#include "defines.h"
+
+/*------------------------------------------------------------------*
+ *  Calculate the Chevyshev polynomial series
+ *  F(w) = 2*exp(-j5w)*C(x)
+ *   C(x) = (T_0(x) + f(1)T_1(x) + ... + f(4)T_1(x) + f(5)/2)
+ *   T_i(x) is the i:th order Chebyshev polynomial
+ *------------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_Chebyshev(
+    /* (o) Result of C(x) */
+    WebRtc_Word16 x,  /* (i) Value to the Chevyshev polynomial */
+    WebRtc_Word16 *f  /* (i) The coefficients in the polynomial */
+                                      );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/comp_corr.c b/src/modules/audio_coding/codecs/ilbc/comp_corr.c
new file mode 100644
index 0000000..3d7f93e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/comp_corr.c
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CompCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Compute cross correlation and pitch gain for pitch prediction
+ *  of last subframe at given lag.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CompCorr(
+    WebRtc_Word32 *corr, /* (o) cross correlation */
+    WebRtc_Word32 *ener, /* (o) energy */
+    WebRtc_Word16 *buffer, /* (i) signal buffer */
+    WebRtc_Word16 lag,  /* (i) pitch lag */
+    WebRtc_Word16 bLen, /* (i) length of buffer */
+    WebRtc_Word16 sRange, /* (i) correlation search length */
+    WebRtc_Word16 scale /* (i) number of rightshifts to use */
+                            ){
+  WebRtc_Word16 *w16ptr;
+
+  w16ptr=&buffer[bLen-sRange-lag];
+
+  /* Calculate correlation and energy */
+  (*corr)=WebRtcSpl_DotProductWithScale(&buffer[bLen-sRange], w16ptr, sRange, scale);
+  (*ener)=WebRtcSpl_DotProductWithScale(w16ptr, w16ptr, sRange, scale);
+
+  /* For zero energy set the energy to 0 in order to avoid potential
+     problems for coming divisions */
+  if (*ener == 0) {
+    *corr = 0;
+    *ener = 1;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/comp_corr.h b/src/modules/audio_coding/codecs/ilbc/comp_corr.h
new file mode 100644
index 0000000..cd46532
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/comp_corr.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CompCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_COMP_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Compute cross correlation and pitch gain for pitch prediction
+ *  of last subframe at given lag.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CompCorr(
+    WebRtc_Word32 *corr, /* (o) cross correlation */
+    WebRtc_Word32 *ener, /* (o) energy */
+    WebRtc_Word16 *buffer, /* (i) signal buffer */
+    WebRtc_Word16 lag,  /* (i) pitch lag */
+    WebRtc_Word16 bLen, /* (i) length of buffer */
+    WebRtc_Word16 sRange, /* (i) correlation search length */
+    WebRtc_Word16 scale /* (i) number of rightshifts to use */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m b/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m
new file mode 100644
index 0000000..f768194
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/complexityMeasures.m
@@ -0,0 +1,49 @@
+clear;
+pack;
+%
+% Enter the path to YOUR executable and remember to define the perprocessor
+% variable PRINT_MIPS te get the instructions printed to the screen.
+%
+command = '!iLBCtest.exe 30 speechAndBGnoise.pcm out1.bit out1.pcm tlm10_30ms.dat';
+cout=' > st.txt';   %saves to matlab variable 'st'
+eval(strcat(command,cout));
+if(length(cout)>3)
+    load st.txt
+else
+    disp('No cout file to load')
+end
+
+% initialize vector to zero
+index = find(st(1:end,1)==-1);
+indexnonzero = find(st(1:end,1)>0);
+frames = length(index)-indexnonzero(1)+1;
+start = indexnonzero(1) - 1;
+functionOrder=max(st(:,2));
+new=zeros(frames,functionOrder);
+
+for i = 1:frames,
+    for j = index(start-1+i)+1:(index(start+i)-1),
+        new(i,st(j,2)) = new(i,st(j,2)) + st(j,1);
+    end
+end
+
+result=zeros(functionOrder,3);
+for i=1:functionOrder
+    nonzeroelements = find(new(1:end,i)>0);
+    result(i,1)=i;
+    
+    % Compute each function's mean complexity
+    % result(i,2)=(sum(new(nonzeroelements,i))/(length(nonzeroelements)*0.03))/1000000;
+    
+    % Compute each function's maximum complexity in encoding
+    % and decoding respectively and then add it together:
+    % result(i,3)=(max(new(1:end,i))/0.03)/1000000;
+    result(i,3)=(max(new(1:size(new,1)/2,i))/0.03)/1000000 + (max(new(size(new,1)/2+1:end,i))/0.03)/1000000;
+end
+
+result
+
+% Compute maximum complexity for a single frame (enc/dec separately and together)
+maxEncComplexityInAFrame = (max(sum(new(1:size(new,1)/2,:),2))/0.03)/1000000
+maxDecComplexityInAFrame = (max(sum(new(size(new,1)/2+1:end,:),2))/0.03)/1000000
+totalComplexity = maxEncComplexityInAFrame + maxDecComplexityInAFrame
\ No newline at end of file
diff --git a/src/modules/audio_coding/codecs/ilbc/constants.c b/src/modules/audio_coding/codecs/ilbc/constants.c
new file mode 100644
index 0000000..5ebe9be
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/constants.c
@@ -0,0 +1,666 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ constants.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/* HP Filters {b[0] b[1] b[2] -a[1] -a[2]} */
+
+const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[5] = {3798, -7596, 3798, 7807, -3733};
+const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[5] = {3849, -7699, 3849, 7918, -3833};
+
+/* Window in Q11 to window the energies of the 5 choises (3 for 20ms) in the choise for
+   the 80 sample start state
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[NSUB_MAX-1]= {
+  1638, 1843, 2048, 1843, 1638
+};
+
+/* LP Filter coeffs used for downsampling */
+const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[FILTERORDER_DS_PLUS1]= {
+  -273, 512, 1297, 1696, 1297, 512, -273
+};
+
+/* Constants used in the LPC calculations */
+
+/* Hanning LPC window (in Q15) */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[BLOCKL_MAX] = {
+  6, 22, 50, 89, 139, 200, 272, 355, 449, 554, 669, 795,
+  932, 1079, 1237, 1405, 1583, 1771, 1969, 2177, 2395, 2622, 2858, 3104,
+  3359, 3622, 3894, 4175, 4464, 4761, 5066, 5379, 5699, 6026, 6361, 6702,
+  7050, 7404, 7764, 8130, 8502, 8879, 9262, 9649, 10040, 10436, 10836, 11240,
+  11647, 12058, 12471, 12887, 13306, 13726, 14148, 14572, 14997, 15423, 15850, 16277,
+  16704, 17131, 17558, 17983, 18408, 18831, 19252, 19672, 20089, 20504, 20916, 21325,
+  21730, 22132, 22530, 22924, 23314, 23698, 24078, 24452, 24821, 25185, 25542, 25893,
+  26238, 26575, 26906, 27230, 27547, 27855, 28156, 28450, 28734, 29011, 29279, 29538,
+  29788, 30029, 30261, 30483, 30696, 30899, 31092, 31275, 31448, 31611, 31764, 31906,
+  32037, 32158, 32268, 32367, 32456, 32533, 32600, 32655, 32700, 32733, 32755, 32767,
+  32767, 32755, 32733, 32700, 32655, 32600, 32533, 32456, 32367, 32268, 32158, 32037,
+  31906, 31764, 31611, 31448, 31275, 31092, 30899, 30696, 30483, 30261, 30029, 29788,
+  29538, 29279, 29011, 28734, 28450, 28156, 27855, 27547, 27230, 26906, 26575, 26238,
+  25893, 25542, 25185, 24821, 24452, 24078, 23698, 23314, 22924, 22530, 22132, 21730,
+  21325, 20916, 20504, 20089, 19672, 19252, 18831, 18408, 17983, 17558, 17131, 16704,
+  16277, 15850, 15423, 14997, 14572, 14148, 13726, 13306, 12887, 12471, 12058, 11647,
+  11240, 10836, 10436, 10040, 9649, 9262, 8879, 8502, 8130, 7764, 7404, 7050,
+  6702, 6361, 6026, 5699, 5379, 5066, 4761, 4464, 4175, 3894, 3622, 3359,
+  3104, 2858, 2622, 2395, 2177, 1969, 1771, 1583, 1405, 1237, 1079, 932,
+  795, 669, 554, 449, 355, 272, 200, 139, 89, 50, 22, 6
+};
+
+/* Asymmetric LPC window (in Q15)*/
+const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[BLOCKL_MAX] = {
+  2, 7, 15, 27, 42, 60, 81, 106, 135, 166, 201, 239,
+  280, 325, 373, 424, 478, 536, 597, 661, 728, 798, 872, 949,
+  1028, 1111, 1197, 1287, 1379, 1474, 1572, 1674, 1778, 1885, 1995, 2108,
+  2224, 2343, 2465, 2589, 2717, 2847, 2980, 3115, 3254, 3395, 3538, 3684,
+  3833, 3984, 4138, 4295, 4453, 4615, 4778, 4944, 5112, 5283, 5456, 5631,
+  5808, 5987, 6169, 6352, 6538, 6725, 6915, 7106, 7300, 7495, 7692, 7891,
+  8091, 8293, 8497, 8702, 8909, 9118, 9328, 9539, 9752, 9966, 10182, 10398,
+  10616, 10835, 11055, 11277, 11499, 11722, 11947, 12172, 12398, 12625, 12852, 13080,
+  13309, 13539, 13769, 14000, 14231, 14463, 14695, 14927, 15160, 15393, 15626, 15859,
+  16092, 16326, 16559, 16792, 17026, 17259, 17492, 17725, 17957, 18189, 18421, 18653,
+  18884, 19114, 19344, 19573, 19802, 20030, 20257, 20483, 20709, 20934, 21157, 21380,
+  21602, 21823, 22042, 22261, 22478, 22694, 22909, 23123, 23335, 23545, 23755, 23962,
+  24168, 24373, 24576, 24777, 24977, 25175, 25371, 25565, 25758, 25948, 26137, 26323,
+  26508, 26690, 26871, 27049, 27225, 27399, 27571, 27740, 27907, 28072, 28234, 28394,
+  28552, 28707, 28860, 29010, 29157, 29302, 29444, 29584, 29721, 29855, 29987, 30115,
+  30241, 30364, 30485, 30602, 30717, 30828, 30937, 31043, 31145, 31245, 31342, 31436,
+  31526, 31614, 31699, 31780, 31858, 31933, 32005, 32074, 32140, 32202, 32261, 32317,
+  32370, 32420, 32466, 32509, 32549, 32585, 32618, 32648, 32675, 32698, 32718, 32734,
+  32748, 32758, 32764, 32767, 32767, 32667, 32365, 31863, 31164, 30274, 29197, 27939,
+  26510, 24917, 23170, 21281, 19261, 17121, 14876, 12540, 10126, 7650, 5126, 2571
+};
+
+/* Lag window for LPC (Q31) */
+const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[LPC_FILTERORDER + 1]={
+  2147483647,   2144885453,   2137754373,   2125918626,   2109459810,
+  2088483140,   2063130336,   2033564590,   1999977009,   1962580174,
+  1921610283};
+
+/* WebRtcIlbcfix_kLpcChirpSyntDenum vector in Q15 corresponding
+ * floating point vector {1 0.9025 0.9025^2 0.9025^3 ...}
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[LPC_FILTERORDER + 1] = {
+  32767, 29573, 26690, 24087,
+  21739, 19619, 17707, 15980,
+  14422, 13016, 11747};
+
+/* WebRtcIlbcfix_kLpcChirpWeightDenum in Q15 corresponding to
+ * floating point vector {1 0.4222 0.4222^2... }
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[LPC_FILTERORDER + 1] = {
+  32767, 13835, 5841, 2466, 1041, 440,
+  186, 78,  33,  14,  6};
+
+/* LSF quantization Q13 domain */
+const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[64 * 3 + 128 * 3 + 128 * 4] = {
+  1273,       2238,       3696,
+  3199,       5309,       8209,
+  3606,       5671,       7829,
+  2815,       5262,       8778,
+  2608,       4027,       5493,
+  1582,       3076,       5945,
+  2983,       4181,       5396,
+  2437,       4322,       6902,
+  1861,       2998,       4613,
+  2007,       3250,       5214,
+  1388,       2459,       4262,
+  2563,       3805,       5269,
+  2036,       3522,       5129,
+  1935,       4025,       6694,
+  2744,       5121,       7338,
+  2810,       4248,       5723,
+  3054,       5405,       7745,
+  1449,       2593,       4763,
+  3411,       5128,       6596,
+  2484,       4659,       7496,
+  1668,       2879,       4818,
+  1812,       3072,       5036,
+  1638,       2649,       3900,
+  2464,       3550,       4644,
+  1853,       2900,       4158,
+  2458,       4163,       5830,
+  2556,       4036,       6254,
+  2703,       4432,       6519,
+  3062,       4953,       7609,
+  1725,       3703,       6187,
+  2221,       3877,       5427,
+  2339,       3579,       5197,
+  2021,       4633,       7037,
+  2216,       3328,       4535,
+  2961,       4739,       6667,
+  2807,       3955,       5099,
+  2788,       4501,       6088,
+  1642,       2755,       4431,
+  3341,       5282,       7333,
+  2414,       3726,       5727,
+  1582,       2822,       5269,
+  2259,       3447,       4905,
+  3117,       4986,       7054,
+  1825,       3491,       5542,
+  3338,       5736,       8627,
+  1789,       3090,       5488,
+  2566,       3720,       4923,
+  2846,       4682,       7161,
+  1950,       3321,       5976,
+  1834,       3383,       6734,
+  3238,       4769,       6094,
+  2031,       3978,       5903,
+  1877,       4068,       7436,
+  2131,       4644,       8296,
+  2764,       5010,       8013,
+  2194,       3667,       6302,
+  2053,       3127,       4342,
+  3523,       6595,      10010,
+  3134,       4457,       5748,
+  3142,       5819,       9414,
+  2223,       4334,       6353,
+  2022,       3224,       4822,
+  2186,       3458,       5544,
+  2552,       4757,       6870,
+  10905,      12917,      14578,
+  9503,      11485,      14485,
+  9518,      12494,      14052,
+  6222,       7487,       9174,
+  7759,       9186,      10506,
+  8315,      12755,      14786,
+  9609,      11486,      13866,
+  8909,      12077,      13643,
+  7369,       9054,      11520,
+  9408,      12163,      14715,
+  6436,       9911,      12843,
+  7109,       9556,      11884,
+  7557,      10075,      11640,
+  6482,       9202,      11547,
+  6463,       7914,      10980,
+  8611,      10427,      12752,
+  7101,       9676,      12606,
+  7428,      11252,      13172,
+  10197,      12955,      15842,
+  7487,      10955,      12613,
+  5575,       7858,      13621,
+  7268,      11719,      14752,
+  7476,      11744,      13795,
+  7049,       8686,      11922,
+  8234,      11314,      13983,
+  6560,      11173,      14984,
+  6405,       9211,      12337,
+  8222,      12054,      13801,
+  8039,      10728,      13255,
+  10066,      12733,      14389,
+  6016,       7338,      10040,
+  6896,       8648,      10234,
+  7538,       9170,      12175,
+  7327,      12608,      14983,
+  10516,      12643,      15223,
+  5538,       7644,      12213,
+  6728,      12221,      14253,
+  7563,       9377,      12948,
+  8661,      11023,      13401,
+  7280,       8806,      11085,
+  7723,       9793,      12333,
+  12225,      14648,      16709,
+  8768,      13389,      15245,
+  10267,      12197,      13812,
+  5301,       7078,      11484,
+  7100,      10280,      11906,
+  8716,      12555,      14183,
+  9567,      12464,      15434,
+  7832,      12305,      14300,
+  7608,      10556,      12121,
+  8913,      11311,      12868,
+  7414,       9722,      11239,
+  8666,      11641,      13250,
+  9079,      10752,      12300,
+  8024,      11608,      13306,
+  10453,      13607,      16449,
+  8135,       9573,      10909,
+  6375,       7741,      10125,
+  10025,      12217,      14874,
+  6985,      11063,      14109,
+  9296,      13051,      14642,
+  8613,      10975,      12542,
+  6583,      10414,      13534,
+  6191,       9368,      13430,
+  5742,       6859,       9260,
+  7723,       9813,      13679,
+  8137,      11291,      12833,
+  6562,       8973,      10641,
+  6062,       8462,      11335,
+  6928,       8784,      12647,
+  7501,       8784,      10031,
+  8372,      10045,      12135,
+  8191,       9864,      12746,
+  5917,       7487,      10979,
+  5516,       6848,      10318,
+  6819,       9899,      11421,
+  7882,      12912,      15670,
+  9558,      11230,      12753,
+  7752,       9327,      11472,
+  8479,       9980,      11358,
+  11418,      14072,      16386,
+  7968,      10330,      14423,
+  8423,      10555,      12162,
+  6337,      10306,      14391,
+  8850,      10879,      14276,
+  6750,      11885,      15710,
+  7037,       8328,       9764,
+  6914,       9266,      13476,
+  9746,      13949,      15519,
+  11032,      14444,      16925,
+  8032,      10271,      11810,
+  10962,      13451,      15833,
+  10021,      11667,      13324,
+  6273,       8226,      12936,
+  8543,      10397,      13496,
+  7936,      10302,      12745,
+  6769,       8138,      10446,
+  6081,       7786,      11719,
+  8637,      11795,      14975,
+  8790,      10336,      11812,
+  7040,       8490,      10771,
+  7338,      10381,      13153,
+  6598,       7888,       9358,
+  6518,       8237,      12030,
+  9055,      10763,      12983,
+  6490,      10009,      12007,
+  9589,      12023,      13632,
+  6867,       9447,      10995,
+  7930,       9816,      11397,
+  10241,      13300,      14939,
+  5830,       8670,      12387,
+  9870,      11915,      14247,
+  9318,      11647,      13272,
+  6721,      10836,      12929,
+  6543,       8233,       9944,
+  8034,      10854,      12394,
+  9112,      11787,      14218,
+  9302,      11114,      13400,
+  9022,      11366,      13816,
+  6962,      10461,      12480,
+  11288,      13333,      15222,
+  7249,       8974,      10547,
+  10566,      12336,      14390,
+  6697,      11339,      13521,
+  11851,      13944,      15826,
+  6847,       8381,      11349,
+  7509,       9331,      10939,
+  8029,       9618,      11909,
+  13973,      17644,      19647,      22474,
+  14722,      16522,      20035,      22134,
+  16305,      18179,      21106,      23048,
+  15150,      17948,      21394,      23225,
+  13582,      15191,      17687,      22333,
+  11778,      15546,      18458,      21753,
+  16619,      18410,      20827,      23559,
+  14229,      15746,      17907,      22474,
+  12465,      15327,      20700,      22831,
+  15085,      16799,      20182,      23410,
+  13026,      16935,      19890,      22892,
+  14310,      16854,      19007,      22944,
+  14210,      15897,      18891,      23154,
+  14633,      18059,      20132,      22899,
+  15246,      17781,      19780,      22640,
+  16396,      18904,      20912,      23035,
+  14618,      17401,      19510,      21672,
+  15473,      17497,      19813,      23439,
+  18851,      20736,      22323,      23864,
+  15055,      16804,      18530,      20916,
+  16490,      18196,      19990,      21939,
+  11711,      15223,      21154,      23312,
+  13294,      15546,      19393,      21472,
+  12956,      16060,      20610,      22417,
+  11628,      15843,      19617,      22501,
+  14106,      16872,      19839,      22689,
+  15655,      18192,      20161,      22452,
+  12953,      15244,      20619,      23549,
+  15322,      17193,      19926,      21762,
+  16873,      18676,      20444,      22359,
+  14874,      17871,      20083,      21959,
+  11534,      14486,      19194,      21857,
+  17766,      19617,      21338,      23178,
+  13404,      15284,      19080,      23136,
+  15392,      17527,      19470,      21953,
+  14462,      16153,      17985,      21192,
+  17734,      19750,      21903,      23783,
+  16973,      19096,      21675,      23815,
+  16597,      18936,      21257,      23461,
+  15966,      17865,      20602,      22920,
+  15416,      17456,      20301,      22972,
+  18335,      20093,      21732,      23497,
+  15548,      17217,      20679,      23594,
+  15208,      16995,      20816,      22870,
+  13890,      18015,      20531,      22468,
+  13211,      15377,      19951,      22388,
+  12852,      14635,      17978,      22680,
+  16002,      17732,      20373,      23544,
+  11373,      14134,      19534,      22707,
+  17329,      19151,      21241,      23462,
+  15612,      17296,      19362,      22850,
+  15422,      19104,      21285,      23164,
+  13792,      17111,      19349,      21370,
+  15352,      17876,      20776,      22667,
+  15253,      16961,      18921,      22123,
+  14108,      17264,      20294,      23246,
+  15785,      17897,      20010,      21822,
+  17399,      19147,      20915,      22753,
+  13010,      15659,      18127,      20840,
+  16826,      19422,      22218,      24084,
+  18108,      20641,      22695,      24237,
+  18018,      20273,      22268,      23920,
+  16057,      17821,      21365,      23665,
+  16005,      17901,      19892,      23016,
+  13232,      16683,      21107,      23221,
+  13280,      16615,      19915,      21829,
+  14950,      18575,      20599,      22511,
+  16337,      18261,      20277,      23216,
+  14306,      16477,      21203,      23158,
+  12803,      17498,      20248,      22014,
+  14327,      17068,      20160,      22006,
+  14402,      17461,      21599,      23688,
+  16968,      18834,      20896,      23055,
+  15070,      17157,      20451,      22315,
+  15419,      17107,      21601,      23946,
+  16039,      17639,      19533,      21424,
+  16326,      19261,      21745,      23673,
+  16489,      18534,      21658,      23782,
+  16594,      18471,      20549,      22807,
+  18973,      21212,      22890,      24278,
+  14264,      18674,      21123,      23071,
+  15117,      16841,      19239,      23118,
+  13762,      15782,      20478,      23230,
+  14111,      15949,      20058,      22354,
+  14990,      16738,      21139,      23492,
+  13735,      16971,      19026,      22158,
+  14676,      17314,      20232,      22807,
+  16196,      18146,      20459,      22339,
+  14747,      17258,      19315,      22437,
+  14973,      17778,      20692,      23367,
+  15715,      17472,      20385,      22349,
+  15702,      18228,      20829,      23410,
+  14428,      16188,      20541,      23630,
+  16824,      19394,      21365,      23246,
+  13069,      16392,      18900,      21121,
+  12047,      16640,      19463,      21689,
+  14757,      17433,      19659,      23125,
+  15185,      16930,      19900,      22540,
+  16026,      17725,      19618,      22399,
+  16086,      18643,      21179,      23472,
+  15462,      17248,      19102,      21196,
+  17368,      20016,      22396,      24096,
+  12340,      14475,      19665,      23362,
+  13636,      16229,      19462,      22728,
+  14096,      16211,      19591,      21635,
+  12152,      14867,      19943,      22301,
+  14492,      17503,      21002,      22728,
+  14834,      16788,      19447,      21411,
+  14650,      16433,      19326,      22308,
+  14624,      16328,      19659,      23204,
+  13888,      16572,      20665,      22488,
+  12977,      16102,      18841,      22246,
+  15523,      18431,      21757,      23738,
+  14095,      16349,      18837,      20947,
+  13266,      17809,      21088,      22839,
+  15427,      18190,      20270,      23143,
+  11859,      16753,      20935,      22486,
+  12310,      17667,      21736,      23319,
+  14021,      15926,      18702,      22002,
+  12286,      15299,      19178,      21126,
+  15703,      17491,      21039,      23151,
+  12272,      14018,      18213,      22570,
+  14817,      16364,      18485,      22598,
+  17109,      19683,      21851,      23677,
+  12657,      14903,      19039,      22061,
+  14713,      16487,      20527,      22814,
+  14635,      16726,      18763,      21715,
+  15878,      18550,      20718,      22906
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[LSF_NSPLIT] = {3, 3, 4};
+const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[LSF_NSPLIT] = {64,128,128};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[LPC_FILTERORDER] = {
+  2308,       3652,       5434,       7885,
+  10255,      12559,      15160,      17513,
+  20328,      22752};
+
+const WebRtc_Word16 WebRtcIlbcfix_kLspMean[LPC_FILTERORDER] = {
+  31476, 29565, 25819, 18725, 10276,
+  1236, -9049, -17600, -25884, -30618
+};
+
+/* Q14 */
+const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[4] = {12288, 8192, 4096, 0};
+const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[6] = {8192, 16384, 10923, 5461, 0, 0};
+
+/*
+   cos(x) in Q15
+   WebRtcIlbcfix_kCos[i] = cos(pi*i/64.0)
+   used in WebRtcIlbcfix_Lsp2Lsf()
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kCos[64] = {
+  32767,  32729,  32610,  32413,  32138,  31786,  31357,  30853,
+  30274,  29622,  28899,  28106,  27246,  26320,  25330,  24279,
+  23170,  22006,  20788,  19520,  18205,  16846,  15447,  14010,
+  12540,  11039,   9512,   7962,   6393,   4808,   3212,   1608,
+  0,  -1608,  -3212,  -4808,  -6393,  -7962,  -9512, -11039,
+  -12540, -14010, -15447, -16846, -18205, -19520, -20788, -22006,
+  -23170, -24279, -25330, -26320, -27246, -28106, -28899, -29622,
+  -30274, -30853, -31357, -31786, -32138, -32413, -32610, -32729
+};
+
+/*
+   Derivative in Q19, used to interpolate between the
+   WebRtcIlbcfix_kCos[] values to get a more exact y = cos(x)
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[64] = {
+  -632,  -1893,  -3150,  -4399,  -5638,  -6863,  -8072,  -9261,
+  -10428, -11570, -12684, -13767, -14817, -15832, -16808, -17744,
+  -18637, -19486, -20287, -21039, -21741, -22390, -22986, -23526,
+  -24009, -24435, -24801, -25108, -25354, -25540, -25664, -25726,
+  -25726, -25664, -25540, -25354, -25108, -24801, -24435, -24009,
+  -23526, -22986, -22390, -21741, -21039, -20287, -19486, -18637,
+  -17744, -16808, -15832, -14817, -13767, -12684, -11570, -10428,
+  -9261,  -8072,  -6863,  -5638,  -4399,  -3150,  -1893,   -632};
+
+/*
+  Table in Q15, used for a2lsf conversion
+  WebRtcIlbcfix_kCosGrid[i] = cos((2*pi*i)/(float)(2*COS_GRID_POINTS));
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[COS_GRID_POINTS + 1] = {
+  32760, 32723, 32588, 32364, 32051, 31651, 31164, 30591,
+  29935, 29196, 28377, 27481, 26509, 25465, 24351, 23170,
+  21926, 20621, 19260, 17846, 16384, 14876, 13327, 11743,
+  10125, 8480, 6812, 5126, 3425, 1714, 0, -1714, -3425,
+  -5126, -6812, -8480, -10125, -11743, -13327, -14876,
+  -16384, -17846, -19260, -20621, -21926, -23170, -24351,
+  -25465, -26509, -27481, -28377, -29196, -29935, -30591,
+  -31164, -31651, -32051, -32364, -32588, -32723, -32760
+};
+
+/*
+   Derivative of y = acos(x) in Q12
+   used in WebRtcIlbcfix_Lsp2Lsf()
+*/
+
+const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[64] = {
+  -26887, -8812, -5323, -3813, -2979, -2444, -2081, -1811,
+  -1608, -1450, -1322, -1219, -1132, -1059, -998, -946,
+  -901, -861, -827, -797, -772, -750, -730, -713,
+  -699, -687, -677, -668, -662, -657, -654, -652,
+  -652, -654, -657, -662, -668, -677, -687, -699,
+  -713, -730, -750, -772, -797, -827, -861, -901,
+  -946, -998, -1059, -1132, -1219, -1322, -1450, -1608,
+  -1811, -2081, -2444, -2979, -3813, -5323, -8812, -26887
+};
+
+
+/* Tables for quantization of start state */
+
+/* State quantization tables */
+const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[8] = { /* Values in Q13 */
+  -30473, -17838, -9257, -2537,
+  3639, 10893, 19958, 32636
+};
+
+/* This table defines the limits for the selection of the freqg
+   less or equal than value 0 => index = 0
+   less or equal than value k => index = k
+*/
+const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[64] = {
+  118, 163, 222, 305, 425, 604,
+  851, 1174, 1617, 2222, 3080, 4191,
+  5525, 7215, 9193, 11540, 14397, 17604,
+  21204, 25209, 29863, 35720, 42531, 50375,
+  59162, 68845, 80108, 93754, 110326, 129488,
+  150654, 174328, 201962, 233195, 267843, 308239,
+  354503, 405988, 464251, 531550, 608652, 697516,
+  802526, 928793, 1080145, 1258120, 1481106, 1760881,
+  2111111, 2546619, 3078825, 3748642, 4563142, 5573115,
+  6887601, 8582108, 10797296, 14014513, 18625760, 25529599,
+  37302935, 58819185, 109782723, WEBRTC_SPL_WORD32_MAX
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kScale[64] = {
+  /* Values in Q16 */
+  29485, 25003, 21345, 18316, 15578, 13128, 10973, 9310, 7955,
+  6762, 5789, 4877, 4255, 3699, 3258, 2904, 2595, 2328,
+  2123, 1932, 1785, 1631, 1493, 1370, 1260, 1167, 1083,
+  /* Values in Q21 */
+  32081, 29611, 27262, 25229, 23432, 21803, 20226, 18883, 17609,
+  16408, 15311, 14327, 13390, 12513, 11693, 10919, 10163, 9435,
+  8739, 8100, 7424, 6813, 6192, 5648, 5122, 4639, 4207, 3798,
+  3404, 3048, 2706, 2348, 2036, 1713, 1393, 1087, 747
+};
+
+/*frgq in fixpoint, but already computed like this:
+  for(i=0; i<64; i++){
+  a = (pow(10,frgq[i])/4.5);
+  WebRtcIlbcfix_kFrgQuantMod[i] = round(a);
+  }
+
+  Value 0 :36 in Q8
+  37:58 in Q5
+  59:63 in Q3
+*/
+const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[64] = {
+  /* First 37 values in Q8 */
+  569, 671, 786, 916, 1077, 1278,
+  1529, 1802, 2109, 2481, 2898, 3440,
+  3943, 4535, 5149, 5778, 6464, 7208,
+  7904, 8682, 9397, 10285, 11240, 12246,
+  13313, 14382, 15492, 16735, 18131, 19693,
+  21280, 22912, 24624, 26544, 28432, 30488,
+  32720,
+  /* 22 values in Q5 */
+  4383, 4684, 5012, 5363, 5739, 6146,
+  6603, 7113, 7679, 8285, 9040, 9850,
+  10838, 11882, 13103, 14467, 15950, 17669,
+  19712, 22016, 24800, 28576,
+  /* 5 values in Q3 */
+  8240, 9792, 12040, 15440, 22472
+};
+
+/* Constants for codebook search and creation */
+
+/* Expansion filter to get additional cb section.
+ * Q12 and reversed compared to flp
+ */
+const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[CB_FILTERLEN]={
+  -140, 446, -755, 3302, 2922, -590, 343, -138};
+
+/* Weighting coefficients for short lags.
+ * [0.2 0.4 0.6 0.8] in Q15 */
+const WebRtc_Word16 WebRtcIlbcfix_kAlpha[4]={
+  6554, 13107, 19661, 26214};
+
+/* Ranges for search and filters at different subframes */
+
+const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES]={
+  {58,58,58}, {108,44,44}, {108,108,108}, {108,108,108}, {108,108,108}};
+
+const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[5]={63, 85, 125, 147, 147};
+
+/* Gain Quantization for the codebook gains of the 3 stages */
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[9]={
+  -16384, -10813, -5407, 0, 4096, 8192,
+  12288, 16384, 32767};
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[17]={
+  -17203, -14746, -12288, -9830, -7373, -4915,
+  -2458, 0, 2458, 4915, 7373, 9830,
+  12288, 14746, 17203, 19661, 32767};
+
+/* Q14 (one extra value (max WebRtc_Word16) to simplify for the search) */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[33]={
+  614,        1229,        1843,        2458,        3072,       3686,
+  4301,        4915,        5530,        6144,        6758,        7373,
+  7987,        8602,        9216,        9830,       10445,       11059,
+  11674,       12288,       12902,       13517,       14131,       14746,
+  15360,       15974,       16589,       17203,       17818,       18432,
+  19046,       19661,    32767};
+
+/* Q14 gain_sq5Tbl squared in Q14 */
+const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[32] = {
+  23,   92,    207,  368,  576,  829,
+  1129,  1474,   1866,  2304,  2787,  3317,
+  3893,  4516,   5184,  5897,  6658,  7464,
+  8318,  9216,   10160,  11151,  12187,  13271,
+  14400,  15574,   16796,  18062,  19377,  20736,
+  22140,  23593
+};
+
+const WebRtc_Word16* const WebRtcIlbcfix_kGain[3] =
+{WebRtcIlbcfix_kGainSq5, WebRtcIlbcfix_kGainSq4, WebRtcIlbcfix_kGainSq3};
+
+
+/* Tables for the Enhancer, using upsamling factor 4 (ENH_UPS0 = 4) */
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1]={
+  {0,    0,    0, 4096,    0,  0,   0},
+  {64, -315, 1181, 3531, -436, 77, -64},
+  {97, -509, 2464, 2464, -509, 97, -97},
+  {77, -436, 3531, 1181, -315, 64, -77}
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[3] = {
+  4800, 16384, 27968 /* Q16 */
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[ENH_NBLOCKS_TOT] = {
+  160, 480, 800, 1120, 1440, 1760, 2080, 2400  /* Q(-2) */
+};
+
+/* PLC table */
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[6] = { /* Grid points for square of periodiciy in Q15 */
+  839, 1343, 2048, 2998, 4247, 5849
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[6] = { /* Value of y=(x^4-0.4)/(0.7-0.4) in grid points in Q15 */
+  0, 5462, 10922, 16384, 21846, 27306
+};
+
+const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[6] = { /* Slope of y=(x^4-0.4)/(0.7-0.4) in Q11 */
+  26667, 18729, 13653, 10258, 7901, 6214
+};
diff --git a/src/modules/audio_coding/codecs/ilbc/constants.h b/src/modules/audio_coding/codecs/ilbc/constants.h
new file mode 100644
index 0000000..f787f74
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/constants.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ constants.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CONSTANTS_H_
+
+#include "defines.h"
+#include "typedefs.h"
+
+/* high pass filters */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kHpInCoefs[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kHpOutCoefs[];
+
+/* Window for start state decision */
+extern const WebRtc_Word16 WebRtcIlbcfix_kStartSequenceEnrgWin[];
+
+/* low pass filter used for downsampling */
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpFiltCoefs[];
+
+/* LPC analysis and quantization */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcWin[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcAsymWin[];
+extern const WebRtc_Word32 WebRtcIlbcfix_kLpcLagWin[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpSyntDenum[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLpcChirpWeightDenum[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfDimCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfSizeCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfCb[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight20ms[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfWeight30ms[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLsfMean[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kLspMean[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCos[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCosDerivative[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kCosGrid[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kAcosDerivative[];
+
+/* state quantization tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kStateSq3[];
+extern const WebRtc_Word32 WebRtcIlbcfix_kChooseFrgQuant[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kScale[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kFrgQuantMod[];
+
+/* Ranges for search and filters at different subframes */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kSearchRange[5][CB_NSTAGES];
+extern const WebRtc_Word16 WebRtcIlbcfix_kFilterRange[];
+
+/* gain quantization tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq3[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq4[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kGainSq5Sq[];
+extern const WebRtc_Word16* const WebRtcIlbcfix_kGain[];
+
+/* adaptive codebook definitions */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kCbFiltersRev[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kAlpha[];
+
+/* enhancer definitions */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPolyPhaser[ENH_UPS0][ENH_FLO_MULT2_PLUS1];
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhWt[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kEnhPlocs[];
+
+/* PLC tables */
+
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPerSqr[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPitchFact[];
+extern const WebRtc_Word16 WebRtcIlbcfix_kPlcPfSlope[];
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c b/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
new file mode 100644
index 0000000..f021c4d
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.c
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CreateAugmentedVec.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Recreate a specific codebook vector from the augmented part.
+ *
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CreateAugmentedVec(
+    WebRtc_Word16 index,  /* (i) Index for the augmented vector to be created */
+    WebRtc_Word16 *buffer,  /* (i) Pointer to the end of the codebook memory that
+                                           is used for creation of the augmented codebook */
+    WebRtc_Word16 *cbVec  /* (o) The construced codebook vector */
+                                      ) {
+  WebRtc_Word16 ilow;
+  WebRtc_Word16 *ppo, *ppi;
+  WebRtc_Word16 cbVecTmp[4];
+
+  ilow = index-4;
+
+  /* copy the first noninterpolated part */
+  ppo = buffer-index;
+  WEBRTC_SPL_MEMCPY_W16(cbVec, ppo, index);
+
+  /* interpolation */
+  ppo = buffer - 4;
+  ppi = buffer - index - 4;
+
+  /* perform cbVec[ilow+k] = ((ppi[k]*alphaTbl[k])>>15) + ((ppo[k]*alphaTbl[3-k])>>15);
+     for k = 0..3
+  */
+  WebRtcSpl_ElementwiseVectorMult(&cbVec[ilow], ppi, WebRtcIlbcfix_kAlpha, 4, 15);
+  WebRtcSpl_ReverseOrderMultArrayElements(cbVecTmp, ppo, &WebRtcIlbcfix_kAlpha[3], 4, 15);
+  WebRtcSpl_AddVectorsAndShift(&cbVec[ilow], &cbVec[ilow], cbVecTmp, 4, 0);
+
+  /* copy the second noninterpolated part */
+  ppo = buffer - index;
+  WEBRTC_SPL_MEMCPY_W16(cbVec+index,ppo,(SUBL-index));
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h b/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
new file mode 100644
index 0000000..970a9be
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/create_augmented_vec.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_CreateAugmentedVec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_CREATE_AUGMENTED_VEC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Recreate a specific codebook vector from the augmented part.
+ *
+ *----------------------------------------------------------------*/
+
+void WebRtcIlbcfix_CreateAugmentedVec(
+    WebRtc_Word16 index,  /* (i) Index for the augmented vector to be created */
+    WebRtc_Word16 *buffer,  /* (i) Pointer to the end of the codebook memory that
+                                           is used for creation of the augmented codebook */
+    WebRtc_Word16 *cbVec  /* (o) The construced codebook vector */
+                                      );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/decode.c b/src/modules/audio_coding/codecs/ilbc/decode.c
new file mode 100644
index 0000000..3bca764
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decode.c
@@ -0,0 +1,243 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Decode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "simple_lsf_dequant.h"
+#include "decoder_interpolate_lsf.h"
+#include "index_conv_dec.h"
+#include "do_plc.h"
+#include "constants.h"
+#include "enhancer_interface.h"
+#include "xcorr_coef.h"
+#include "lsf_check.h"
+#include "decode_residual.h"
+#include "unpack_bits.h"
+#include "hp_output.h"
+#ifndef WEBRTC_BIG_ENDIAN
+#include "swap_bytes.h"
+#endif
+
+/*----------------------------------------------------------------*
+ *  main decoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeImpl(
+    WebRtc_Word16 *decblock,    /* (o) decoded signal block */
+    const WebRtc_UWord16 *bytes, /* (i) encoded signal bits */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
+                                           structure */
+    WebRtc_Word16 mode      /* (i) 0: bad packet, PLC,
+                                                                   1: normal */
+                           ) {
+  int i;
+  WebRtc_Word16 order_plus_one;
+
+  WebRtc_Word16 last_bit;
+  WebRtc_Word16 *data;
+  /* Stack based */
+  WebRtc_Word16 decresidual[BLOCKL_MAX];
+  WebRtc_Word16 PLCresidual[BLOCKL_MAX + LPC_FILTERORDER];
+  WebRtc_Word16 syntdenum[NSUB_MAX*(LPC_FILTERORDER+1)];
+  WebRtc_Word16 PLClpc[LPC_FILTERORDER + 1];
+#ifndef WEBRTC_BIG_ENDIAN
+  WebRtc_UWord16 swapped[NO_OF_WORDS_30MS];
+#endif
+  iLBC_bits *iLBCbits_inst = (iLBC_bits*)PLCresidual;
+
+  /* Reuse some buffers that are non overlapping in order to save stack memory */
+  data = &PLCresidual[LPC_FILTERORDER];
+
+  if (mode) { /* the data are good */
+
+    /* decode data */
+
+    /* Unpacketize bits into parameters */
+
+#ifndef WEBRTC_BIG_ENDIAN
+    WebRtcIlbcfix_SwapBytes(bytes, iLBCdec_inst->no_of_words, swapped);
+    last_bit = WebRtcIlbcfix_UnpackBits(swapped, iLBCbits_inst, iLBCdec_inst->mode);
+#else
+    last_bit = WebRtcIlbcfix_UnpackBits(bytes, iLBCbits_inst, iLBCdec_inst->mode);
+#endif
+
+    /* Check for bit errors */
+    if (iLBCbits_inst->startIdx<1)
+      mode = 0;
+    if ((iLBCdec_inst->mode==20) && (iLBCbits_inst->startIdx>3))
+      mode = 0;
+    if ((iLBCdec_inst->mode==30) && (iLBCbits_inst->startIdx>5))
+      mode = 0;
+    if (last_bit==1)
+      mode = 0;
+
+    if (mode) { /* No bit errors was detected, continue decoding */
+      /* Stack based */
+      WebRtc_Word16 lsfdeq[LPC_FILTERORDER*LPC_N_MAX];
+      WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+
+      /* adjust index */
+      WebRtcIlbcfix_IndexConvDec(iLBCbits_inst->cb_index);
+
+      /* decode the lsf */
+      WebRtcIlbcfix_SimpleLsfDeQ(lsfdeq, (WebRtc_Word16*)(iLBCbits_inst->lsf), iLBCdec_inst->lpc_n);
+      WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCdec_inst->lpc_n);
+      WebRtcIlbcfix_DecoderInterpolateLsp(syntdenum, weightdenum,
+                                          lsfdeq, LPC_FILTERORDER, iLBCdec_inst);
+
+      /* Decode the residual using the cb and gain indexes */
+      WebRtcIlbcfix_DecodeResidual(iLBCdec_inst, iLBCbits_inst, decresidual, syntdenum);
+
+      /* preparing the plc for a future loss! */
+      WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 0,
+                              decresidual, syntdenum + (LPC_FILTERORDER + 1)*(iLBCdec_inst->nsub - 1),
+                              (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+
+      /* Use the output from doThePLC */
+      WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
+    }
+
+  }
+
+  if (mode == 0) {
+    /* the data is bad (either a PLC call
+     * was made or a bit error was detected)
+     */
+
+    /* packet loss conceal */
+
+    WebRtcIlbcfix_DoThePlc( PLCresidual, PLClpc, 1,
+                            decresidual, syntdenum, (WebRtc_Word16)(iLBCdec_inst->last_lag), iLBCdec_inst);
+
+    WEBRTC_SPL_MEMCPY_W16(decresidual, PLCresidual, iLBCdec_inst->blockl);
+
+    order_plus_one = LPC_FILTERORDER + 1;
+
+    for (i = 0; i < iLBCdec_inst->nsub; i++) {
+      WEBRTC_SPL_MEMCPY_W16(syntdenum+(i*order_plus_one),
+                            PLClpc, order_plus_one);
+    }
+  }
+
+  if ((*iLBCdec_inst).use_enhancer == 1) { /* Enhancer activated */
+
+    /* Update the filter and filter coefficients if there was a packet loss */
+    if (iLBCdec_inst->prev_enh_pl==2) {
+      for (i=0;i<iLBCdec_inst->nsub;i++) {
+        WEBRTC_SPL_MEMCPY_W16(&(iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)]),
+                              syntdenum, (LPC_FILTERORDER+1));
+      }
+    }
+
+    /* post filtering */
+    (*iLBCdec_inst).last_lag =
+        WebRtcIlbcfix_EnhancerInterface(data, decresidual, iLBCdec_inst);
+
+    /* synthesis filtering */
+
+    /* Set up the filter state */
+    WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER);
+
+    if (iLBCdec_inst->mode==20) {
+      /* Enhancer has 40 samples delay */
+      i=0;
+      WebRtcSpl_FilterARFastQ12(
+          data, data,
+          iLBCdec_inst->old_syntdenum + (i+iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1),
+          LPC_FILTERORDER+1, SUBL);
+
+      for (i=1; i < iLBCdec_inst->nsub; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            syntdenum+(i-1)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+
+    } else if (iLBCdec_inst->mode==30) {
+      /* Enhancer has 80 samples delay */
+      for (i=0; i < 2; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            iLBCdec_inst->old_syntdenum + (i+4)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+      for (i=2; i < iLBCdec_inst->nsub; i++) {
+        WebRtcSpl_FilterARFastQ12(
+            data+i*SUBL, data+i*SUBL,
+            syntdenum+(i-2)*(LPC_FILTERORDER+1),
+            LPC_FILTERORDER+1, SUBL);
+      }
+    }
+
+    /* Save the filter state */
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+
+  } else { /* Enhancer not activated */
+    WebRtc_Word16 lag;
+
+    /* Find last lag (since the enhancer is not called to give this info) */
+    lag = 20;
+    if (iLBCdec_inst->mode==20) {
+      lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+          &decresidual[iLBCdec_inst->blockl-60],
+          &decresidual[iLBCdec_inst->blockl-60-lag],
+          60,
+          80, lag, -1);
+    } else {
+      lag = (WebRtc_Word16)WebRtcIlbcfix_XcorrCoef(
+          &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL],
+          &decresidual[iLBCdec_inst->blockl-ENH_BLOCKL-lag],
+          ENH_BLOCKL,
+          100, lag, -1);
+    }
+
+    /* Store lag (it is needed if next packet is lost) */
+    (*iLBCdec_inst).last_lag = (int)lag;
+
+    /* copy data and run synthesis filter */
+    WEBRTC_SPL_MEMCPY_W16(data, decresidual, iLBCdec_inst->blockl);
+
+    /* Set up the filter state */
+    WEBRTC_SPL_MEMCPY_W16(&data[-LPC_FILTERORDER], iLBCdec_inst->syntMem, LPC_FILTERORDER);
+
+    for (i=0; i < iLBCdec_inst->nsub; i++) {
+      WebRtcSpl_FilterARFastQ12(
+          data+i*SUBL, data+i*SUBL,
+          syntdenum + i*(LPC_FILTERORDER+1),
+          LPC_FILTERORDER+1, SUBL);
+    }
+
+    /* Save the filter state */
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &data[iLBCdec_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+  }
+
+  WEBRTC_SPL_MEMCPY_W16(decblock,data,iLBCdec_inst->blockl);
+
+  /* High pass filter the signal (with upscaling a factor 2 and saturation) */
+  WebRtcIlbcfix_HpOutput(decblock, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                         iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                         iLBCdec_inst->blockl);
+
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->old_syntdenum,
+                        syntdenum, iLBCdec_inst->nsub*(LPC_FILTERORDER+1));
+
+  iLBCdec_inst->prev_enh_pl=0;
+
+  if (mode==0) { /* PLC was used */
+    iLBCdec_inst->prev_enh_pl=1;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/decode.h b/src/modules/audio_coding/codecs/ilbc/decode.h
new file mode 100644
index 0000000..2c9b5a2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decode.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Decode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  main decoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeImpl(
+    WebRtc_Word16 *decblock,    /* (o) decoded signal block */
+    const WebRtc_UWord16 *bytes, /* (i) encoded signal bits */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) the decoder state
+                                           structure */
+    WebRtc_Word16 mode      /* (i) 0: bad packet, PLC,
+                                                                   1: normal */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/decode_residual.c b/src/modules/audio_coding/codecs/ilbc/decode_residual.c
new file mode 100644
index 0000000..4bc1cd3
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decode_residual.c
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecodeResidual.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "state_construct.h"
+#include "cb_construct.h"
+#include "index_conv_dec.h"
+#include "do_plc.h"
+#include "constants.h"
+#include "enhancer_interface.h"
+#include "xcorr_coef.h"
+#include "lsf_check.h"
+
+
+/*----------------------------------------------------------------*
+ *  frame residual decoder function (subrutine to iLBC_decode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeResidual(
+    iLBC_Dec_Inst_t *iLBCdec_inst,
+    /* (i/o) the decoder state structure */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
+                                for the decoding  */
+    WebRtc_Word16 *decresidual,  /* (o) decoded residual frame */
+    WebRtc_Word16 *syntdenum   /* (i) the decoded synthesis filter
+                                  coefficients */
+                                  ) {
+  WebRtc_Word16 meml_gotten, Nfor, Nback, diff, start_pos;
+  WebRtc_Word16 subcount, subframe;
+  WebRtc_Word16 *reverseDecresidual = iLBCdec_inst->enh_buf; /* Reversed decoded data, used for decoding backwards in time (reuse memory in state) */
+  WebRtc_Word16 *memVec = iLBCdec_inst->prevResidual;  /* Memory for codebook and filter state (reuse memory in state) */
+  WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN];   /* Memory for codebook */
+
+  diff = STATE_LEN - iLBCdec_inst->state_short_len;
+
+  if (iLBC_encbits->state_first == 1) {
+    start_pos = (iLBC_encbits->startIdx-1)*SUBL;
+  } else {
+    start_pos = (iLBC_encbits->startIdx-1)*SUBL + diff;
+  }
+
+  /* decode scalar part of start state */
+
+  WebRtcIlbcfix_StateConstruct(iLBC_encbits->idxForMax,
+                               iLBC_encbits->idxVec, &syntdenum[(iLBC_encbits->startIdx-1)*(LPC_FILTERORDER+1)],
+                               &decresidual[start_pos], iLBCdec_inst->state_short_len
+                               );
+
+  if (iLBC_encbits->state_first) { /* put adaptive part in the end */
+
+    /* setup memory */
+
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCdec_inst->state_short_len));
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCdec_inst->state_short_len, decresidual+start_pos,
+                          iLBCdec_inst->state_short_len);
+
+    /* construct decoded vector */
+
+    WebRtcIlbcfix_CbConstruct(
+        &decresidual[start_pos+iLBCdec_inst->state_short_len],
+        iLBC_encbits->cb_index, iLBC_encbits->gain_index,
+        mem+CB_MEML-ST_MEM_L_TBL,
+        ST_MEM_L_TBL, (WebRtc_Word16)diff
+                              );
+
+  }
+  else {/* put adaptive part in the beginning */
+
+    /* create reversed vectors for prediction */
+
+    WebRtcSpl_MemCpyReversedOrder(reverseDecresidual+diff,
+                                  &decresidual[(iLBC_encbits->startIdx+1)*SUBL-1-STATE_LEN], diff);
+
+    /* setup memory */
+
+    meml_gotten = iLBCdec_inst->state_short_len;
+    WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
+                                  decresidual+start_pos, meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+    /* construct decoded vector */
+
+    WebRtcIlbcfix_CbConstruct(
+        reverseDecresidual,
+        iLBC_encbits->cb_index, iLBC_encbits->gain_index,
+        mem+CB_MEML-ST_MEM_L_TBL,
+        ST_MEM_L_TBL, diff
+                              );
+
+    /* get decoded residual from reversed vector */
+
+    WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1],
+                                  reverseDecresidual, diff);
+  }
+
+  /* counter for predicted subframes */
+
+  subcount=1;
+
+  /* forward prediction of subframes */
+
+  Nfor = iLBCdec_inst->nsub-iLBC_encbits->startIdx-1;
+
+  if( Nfor > 0 ) {
+
+    /* setup memory */
+    WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN);
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN,
+                          decresidual+(iLBC_encbits->startIdx-1)*SUBL, STATE_LEN);
+
+    /* loop over subframes to encode */
+
+    for (subframe=0; subframe<Nfor; subframe++) {
+
+      /* construct decoded vector */
+      WebRtcIlbcfix_CbConstruct(
+          &decresidual[(iLBC_encbits->startIdx+1+subframe)*SUBL],
+          iLBC_encbits->cb_index+subcount*CB_NSTAGES,
+          iLBC_encbits->gain_index+subcount*CB_NSTAGES,
+          mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, CB_MEML-SUBL);
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &decresidual[(iLBC_encbits->startIdx+1+subframe)*SUBL], SUBL);
+
+      subcount++;
+    }
+
+  }
+
+  /* backward prediction of subframes */
+
+  Nback = iLBC_encbits->startIdx-1;
+
+  if( Nback > 0 ){
+
+    /* setup memory */
+
+    meml_gotten = SUBL*(iLBCdec_inst->nsub+1-iLBC_encbits->startIdx);
+    if( meml_gotten > CB_MEML ) {
+      meml_gotten=CB_MEML;
+    }
+
+    WebRtcSpl_MemCpyReversedOrder(mem+CB_MEML-1,
+                                  decresidual+(iLBC_encbits->startIdx-1)*SUBL, meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+    /* loop over subframes to decode */
+
+    for (subframe=0; subframe<Nback; subframe++) {
+
+      /* construct decoded vector */
+      WebRtcIlbcfix_CbConstruct(
+          &reverseDecresidual[subframe*SUBL],
+          iLBC_encbits->cb_index+subcount*CB_NSTAGES,
+          iLBC_encbits->gain_index+subcount*CB_NSTAGES,
+          mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, CB_MEML-SUBL);
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &reverseDecresidual[subframe*SUBL], SUBL);
+
+      subcount++;
+    }
+
+    /* get decoded residual from reversed vector */
+    WebRtcSpl_MemCpyReversedOrder(decresidual+SUBL*Nback-1,
+                                  reverseDecresidual, SUBL*Nback);
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/decode_residual.h b/src/modules/audio_coding/codecs/ilbc/decode_residual.h
new file mode 100644
index 0000000..ea7208a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decode_residual.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecodeResidual.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODE_RESIDUAL_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  frame residual decoder function (subrutine to iLBC_decode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecodeResidual(
+    iLBC_Dec_Inst_t *iLBCdec_inst,
+    /* (i/o) the decoder state structure */
+    iLBC_bits *iLBC_encbits, /* (i/o) Encoded bits, which are used
+                                   for the decoding  */
+    WebRtc_Word16 *decresidual,  /* (o) decoded residual frame */
+    WebRtc_Word16 *syntdenum   /* (i) the decoded synthesis filter
+                                                   coefficients */
+                                  );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c b/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
new file mode 100644
index 0000000..eee3105
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.c
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecoderInterpolateLsp.c
+
+******************************************************************/
+
+#include "lsf_interpolate_to_poly_dec.h"
+#include "bw_expand.h"
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  obtain synthesis and weighting filters form lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecoderInterpolateLsp(
+    WebRtc_Word16 *syntdenum,  /* (o) synthesis filter coefficients */
+    WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+                                   coefficients */
+    WebRtc_Word16 *lsfdeq,   /* (i) dequantized lsf coefficients */
+    WebRtc_Word16 length,   /* (i) length of lsf coefficient vector */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i) the decoder state structure */
+                                          ){
+  int  i, pos, lp_length;
+  WebRtc_Word16  lp[LPC_FILTERORDER + 1], *lsfdeq2;
+
+  lsfdeq2 = lsfdeq + length;
+  lp_length = length + 1;
+
+  if (iLBCdec_inst->mode==30) {
+    /* subframe 1: Interpolation between old and first LSF */
+
+    WebRtcIlbcfix_LspInterpolate2PolyDec(lp, (*iLBCdec_inst).lsfdeqold, lsfdeq,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0], length);
+    WEBRTC_SPL_MEMCPY_W16(syntdenum,lp,lp_length);
+    WebRtcIlbcfix_BwExpand(weightdenum, lp, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+
+    /* subframes 2 to 6: interpolation between first and last LSF */
+
+    pos = lp_length;
+    for (i = 1; i < 6; i++) {
+      WebRtcIlbcfix_LspInterpolate2PolyDec(lp, lsfdeq, lsfdeq2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i], length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos,lp,lp_length);
+      WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+      pos += lp_length;
+    }
+  } else { /* iLBCdec_inst->mode=20 */
+    /* subframes 1 to 4: interpolation between old and new LSF */
+    pos = 0;
+    for (i = 0; i < iLBCdec_inst->nsub; i++) {
+      WebRtcIlbcfix_LspInterpolate2PolyDec(lp, iLBCdec_inst->lsfdeqold, lsfdeq,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i], length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum+pos,lp,lp_length);
+      WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, (WebRtc_Word16)lp_length);
+      pos += lp_length;
+    }
+  }
+
+  /* update memory */
+
+  if (iLBCdec_inst->mode==30) {
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq2, length);
+  } else {
+    WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, lsfdeq, length);
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h b/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
new file mode 100644
index 0000000..3896ca9
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/decoder_interpolate_lsf.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DecoderInterpolateLsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DECODER_INTERPOLATE_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  obtain synthesis and weighting filters form lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DecoderInterpolateLsp(
+    WebRtc_Word16 *syntdenum,  /* (o) synthesis filter coefficients */
+    WebRtc_Word16 *weightdenum, /* (o) weighting denumerator
+                                   coefficients */
+    WebRtc_Word16 *lsfdeq,   /* (i) dequantized lsf coefficients */
+    WebRtc_Word16 length,   /* (i) length of lsf coefficient vector */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i) the decoder state structure */
+                                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/defines.h b/src/modules/audio_coding/codecs/ilbc/defines.h
new file mode 100644
index 0000000..bdeba01
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/defines.h
@@ -0,0 +1,219 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ define.h
+
+******************************************************************/
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DEFINES_H_
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+#include <string.h>
+
+/* general codec settings */
+
+#define FS       8000
+#define BLOCKL_20MS     160
+#define BLOCKL_30MS     240
+#define BLOCKL_MAX     240
+#define NSUB_20MS     4
+#define NSUB_30MS     6
+#define NSUB_MAX     6
+#define NASUB_20MS     2
+#define NASUB_30MS     4
+#define NASUB_MAX     4
+#define SUBL      40
+#define STATE_LEN     80
+#define STATE_SHORT_LEN_30MS  58
+#define STATE_SHORT_LEN_20MS  57
+
+/* LPC settings */
+
+#define LPC_FILTERORDER    10
+#define LPC_LOOKBACK    60
+#define LPC_N_20MS     1
+#define LPC_N_30MS     2
+#define LPC_N_MAX     2
+#define LPC_ASYMDIFF    20
+#define LSF_NSPLIT     3
+#define LSF_NUMBER_OF_STEPS   4
+#define LPC_HALFORDER    5
+#define COS_GRID_POINTS 60
+
+/* cb settings */
+
+#define CB_NSTAGES     3
+#define CB_EXPAND     2
+#define CB_MEML      147
+#define CB_FILTERLEN    (2*4)
+#define CB_HALFFILTERLEN   4
+#define CB_RESRANGE     34
+#define CB_MAXGAIN_FIXQ6   83 /* error = -0.24% */
+#define CB_MAXGAIN_FIXQ14   21299
+
+/* enhancer */
+
+#define ENH_BLOCKL     80  /* block length */
+#define ENH_BLOCKL_HALF    (ENH_BLOCKL/2)
+#define ENH_HL      3  /* 2*ENH_HL+1 is number blocks
+                                                                           in said second sequence */
+#define ENH_SLOP     2  /* max difference estimated and
+                                                                           correct pitch period */
+#define ENH_PLOCSL     8  /* pitch-estimates and
+                                                                           pitch-locations buffer length */
+#define ENH_OVERHANG    2
+#define ENH_UPS0     4  /* upsampling rate */
+#define ENH_FL0      3  /* 2*FLO+1 is the length of each filter */
+#define ENH_FLO_MULT2_PLUS1   7
+#define ENH_VECTL     (ENH_BLOCKL+2*ENH_FL0)
+#define ENH_CORRDIM     (2*ENH_SLOP+1)
+#define ENH_NBLOCKS     (BLOCKL/ENH_BLOCKL)
+#define ENH_NBLOCKS_EXTRA   5
+#define ENH_NBLOCKS_TOT    8 /* ENH_NBLOCKS+ENH_NBLOCKS_EXTRA */
+#define ENH_BUFL     (ENH_NBLOCKS_TOT)*ENH_BLOCKL
+#define ENH_BUFL_FILTEROVERHEAD  3
+#define ENH_A0      819   /* Q14 */
+#define ENH_A0_MINUS_A0A0DIV4  848256041 /* Q34 */
+#define ENH_A0DIV2     26843546 /* Q30 */
+
+/* PLC */
+
+/* Down sampling */
+
+#define FILTERORDER_DS_PLUS1  7
+#define DELAY_DS     3
+#define FACTOR_DS     2
+
+/* bit stream defs */
+
+#define NO_OF_BYTES_20MS   38
+#define NO_OF_BYTES_30MS   50
+#define NO_OF_WORDS_20MS   19
+#define NO_OF_WORDS_30MS   25
+#define STATE_BITS     3
+#define BYTE_LEN     8
+#define ULP_CLASSES     3
+
+/* help parameters */
+
+#define TWO_PI_FIX     25736 /* Q12 */
+
+/* Constants for codebook search and creation */
+
+#define ST_MEM_L_TBL  85
+#define MEM_LF_TBL  147
+
+
+/* Struct for the bits */
+typedef struct iLBC_bits_t_ {
+  WebRtc_Word16 lsf[LSF_NSPLIT*LPC_N_MAX];
+  WebRtc_Word16 cb_index[CB_NSTAGES*(NASUB_MAX+1)];  /* First CB_NSTAGES values contains extra CB index */
+  WebRtc_Word16 gain_index[CB_NSTAGES*(NASUB_MAX+1)]; /* First CB_NSTAGES values contains extra CB gain */
+  WebRtc_Word16 idxForMax;
+  WebRtc_Word16 state_first;
+  WebRtc_Word16 idxVec[STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 firstbits;
+  WebRtc_Word16 startIdx;
+} iLBC_bits;
+
+/* type definition encoder instance */
+typedef struct iLBC_Enc_Inst_t_ {
+
+  /* flag for frame size mode */
+  WebRtc_Word16 mode;
+
+  /* basic parameters for different frame sizes */
+  WebRtc_Word16 blockl;
+  WebRtc_Word16 nsub;
+  WebRtc_Word16 nasub;
+  WebRtc_Word16 no_of_bytes, no_of_words;
+  WebRtc_Word16 lpc_n;
+  WebRtc_Word16 state_short_len;
+
+  /* analysis filter state */
+  WebRtc_Word16 anaMem[LPC_FILTERORDER];
+
+  /* Fix-point old lsf parameters for interpolation */
+  WebRtc_Word16 lsfold[LPC_FILTERORDER];
+  WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+
+  /* signal buffer for LP analysis */
+  WebRtc_Word16 lpc_buffer[LPC_LOOKBACK + BLOCKL_MAX];
+
+  /* state of input HP filter */
+  WebRtc_Word16 hpimemx[2];
+  WebRtc_Word16 hpimemy[4];
+
+#ifdef SPLIT_10MS
+  WebRtc_Word16 weightdenumbuf[66];
+  WebRtc_Word16 past_samples[160];
+  WebRtc_UWord16 bytes[25];
+  WebRtc_Word16 section;
+  WebRtc_Word16 Nfor_flag;
+  WebRtc_Word16 Nback_flag;
+  WebRtc_Word16 start_pos;
+  WebRtc_Word16 diff;
+#endif
+
+} iLBC_Enc_Inst_t;
+
+/* type definition decoder instance */
+typedef struct iLBC_Dec_Inst_t_ {
+
+  /* flag for frame size mode */
+  WebRtc_Word16 mode;
+
+  /* basic parameters for different frame sizes */
+  WebRtc_Word16 blockl;
+  WebRtc_Word16 nsub;
+  WebRtc_Word16 nasub;
+  WebRtc_Word16 no_of_bytes, no_of_words;
+  WebRtc_Word16 lpc_n;
+  WebRtc_Word16 state_short_len;
+
+  /* synthesis filter state */
+  WebRtc_Word16 syntMem[LPC_FILTERORDER];
+
+  /* old LSF for interpolation */
+  WebRtc_Word16 lsfdeqold[LPC_FILTERORDER];
+
+  /* pitch lag estimated in enhancer and used in PLC */
+  int last_lag;
+
+  /* PLC state information */
+  int consPLICount, prev_enh_pl;
+  WebRtc_Word16 perSquare;
+
+  WebRtc_Word16 prevScale, prevPLI;
+  WebRtc_Word16 prevLag, prevLpc[LPC_FILTERORDER+1];
+  WebRtc_Word16 prevResidual[NSUB_MAX*SUBL];
+  WebRtc_Word16 seed;
+
+  /* previous synthesis filter parameters */
+
+  WebRtc_Word16 old_syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+
+  /* state of output HP filter */
+  WebRtc_Word16 hpimemx[2];
+  WebRtc_Word16 hpimemy[4];
+
+  /* enhancer state information */
+  int use_enhancer;
+  WebRtc_Word16 enh_buf[ENH_BUFL+ENH_BUFL_FILTEROVERHEAD];
+  WebRtc_Word16 enh_period[ENH_NBLOCKS_TOT];
+
+} iLBC_Dec_Inst_t;
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/do_plc.c b/src/modules/audio_coding/codecs/ilbc/do_plc.c
new file mode 100644
index 0000000..0dfae2b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/do_plc.c
@@ -0,0 +1,308 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DoThePlc.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "comp_corr.h"
+#include "bw_expand.h"
+
+/*----------------------------------------------------------------*
+ *  Packet loss concealment routine. Conceals a residual signal
+ *  and LP parameters. If no packet loss, update state.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DoThePlc(
+    WebRtc_Word16 *PLCresidual,  /* (o) concealed residual */
+    WebRtc_Word16 *PLClpc,    /* (o) concealed LP parameters */
+    WebRtc_Word16 PLI,     /* (i) packet loss indicator
+                                                           0 - no PL, 1 = PL */
+    WebRtc_Word16 *decresidual,  /* (i) decoded residual */
+    WebRtc_Word16 *lpc,    /* (i) decoded LPC (only used for no PL) */
+    WebRtc_Word16 inlag,    /* (i) pitch lag */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i/o) decoder instance */
+                            ){
+  WebRtc_Word16 i, pick;
+  WebRtc_Word32 cross, ener, cross_comp, ener_comp = 0;
+  WebRtc_Word32 measure, maxMeasure, energy;
+  WebRtc_Word16 max, crossSquareMax, crossSquare;
+  WebRtc_Word16 j, lag, tmp1, tmp2, randlag;
+  WebRtc_Word16 shift1, shift2, shift3, shiftMax;
+  WebRtc_Word16 scale3;
+  WebRtc_Word16 corrLen;
+  WebRtc_Word32 tmpW32, tmp2W32;
+  WebRtc_Word16 use_gain;
+  WebRtc_Word16 tot_gain;
+  WebRtc_Word16 max_perSquare;
+  WebRtc_Word16 scale1, scale2;
+  WebRtc_Word16 totscale;
+  WebRtc_Word32 nom;
+  WebRtc_Word16 denom;
+  WebRtc_Word16 pitchfact;
+  WebRtc_Word16 use_lag;
+  int ind;
+  WebRtc_Word16 randvec[BLOCKL_MAX];
+
+  /* Packet Loss */
+  if (PLI == 1) {
+
+    (*iLBCdec_inst).consPLICount += 1;
+
+    /* if previous frame not lost,
+       determine pitch pred. gain */
+
+    if (iLBCdec_inst->prevPLI != 1) {
+
+      /* Maximum 60 samples are correlated, preserve as high accuracy
+         as possible without getting overflow */
+      max = WebRtcSpl_MaxAbsValueW16((*iLBCdec_inst).prevResidual, (WebRtc_Word16)iLBCdec_inst->blockl);
+      scale3 = (WebRtcSpl_GetSizeInBits(max)<<1) - 25;
+      if (scale3 < 0) {
+        scale3 = 0;
+      }
+
+      /* Store scale for use when interpolating between the
+       * concealment and the received packet */
+      iLBCdec_inst->prevScale = scale3;
+
+      /* Search around the previous lag +/-3 to find the
+         best pitch period */
+      lag = inlag - 3;
+
+      /* Guard against getting outside the frame */
+      corrLen = WEBRTC_SPL_MIN(60, iLBCdec_inst->blockl-(inlag+3));
+
+      WebRtcIlbcfix_CompCorr( &cross, &ener,
+                              iLBCdec_inst->prevResidual, lag, iLBCdec_inst->blockl, corrLen, scale3);
+
+      /* Normalize and store cross^2 and the number of shifts */
+      shiftMax = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross))-15;
+      crossSquareMax = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross, -shiftMax),
+                                                                WEBRTC_SPL_SHIFT_W32(cross, -shiftMax), 15);
+
+      for (j=inlag-2;j<=inlag+3;j++) {
+        WebRtcIlbcfix_CompCorr( &cross_comp, &ener_comp,
+                                iLBCdec_inst->prevResidual, j, iLBCdec_inst->blockl, corrLen, scale3);
+
+        /* Use the criteria (corr*corr)/energy to compare if
+           this lag is better or not. To avoid the division,
+           do a cross multiplication */
+        shift1 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(cross_comp))-15;
+        crossSquare = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1),
+                                                               WEBRTC_SPL_SHIFT_W32(cross_comp, -shift1), 15);
+
+        shift2 = WebRtcSpl_GetSizeInBits(ener)-15;
+        measure = WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_SHIFT_W32(ener, -shift2),
+                                       crossSquare);
+
+        shift3 = WebRtcSpl_GetSizeInBits(ener_comp)-15;
+        maxMeasure = WEBRTC_SPL_MUL_16_16(WEBRTC_SPL_SHIFT_W32(ener_comp, -shift3),
+                                          crossSquareMax);
+
+        /* Calculate shift value, so that the two measures can
+           be put in the same Q domain */
+        if(((shiftMax<<1)+shift3) > ((shift1<<1)+shift2)) {
+          tmp1 = WEBRTC_SPL_MIN(31, (shiftMax<<1)+shift3-(shift1<<1)-shift2);
+          tmp2 = 0;
+        } else {
+          tmp1 = 0;
+          tmp2 = WEBRTC_SPL_MIN(31, (shift1<<1)+shift2-(shiftMax<<1)-shift3);
+        }
+
+        if ((measure>>tmp1) > (maxMeasure>>tmp2)) {
+          /* New lag is better => record lag, measure and domain */
+          lag = j;
+          crossSquareMax = crossSquare;
+          cross = cross_comp;
+          shiftMax = shift1;
+          ener = ener_comp;
+        }
+      }
+
+      /* Calculate the periodicity for the lag with the maximum correlation.
+
+         Definition of the periodicity:
+         abs(corr(vec1, vec2))/(sqrt(energy(vec1))*sqrt(energy(vec2)))
+
+         Work in the Square domain to simplify the calculations
+         max_perSquare is less than 1 (in Q15)
+      */
+      tmp2W32=WebRtcSpl_DotProductWithScale(&iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen],
+                                            &iLBCdec_inst->prevResidual[iLBCdec_inst->blockl-corrLen],
+                                            corrLen, scale3);
+
+      if ((tmp2W32>0)&&(ener_comp>0)) {
+        /* norm energies to WebRtc_Word16, compute the product of the energies and
+           use the upper WebRtc_Word16 as the denominator */
+
+        scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp2W32)-16;
+        tmp1=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(tmp2W32, scale1);
+
+        scale2=(WebRtc_Word16)WebRtcSpl_NormW32(ener)-16;
+        tmp2=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, scale2);
+        denom=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tmp1, tmp2, 16); /* denom in Q(scale1+scale2-16) */
+
+        /* Square the cross correlation and norm it such that max_perSquare
+           will be in Q15 after the division */
+
+        totscale = scale1+scale2-1;
+        tmp1 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, (totscale>>1));
+        tmp2 = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(cross, totscale-(totscale>>1));
+
+        nom = WEBRTC_SPL_MUL_16_16(tmp1, tmp2);
+        max_perSquare = (WebRtc_Word16)WebRtcSpl_DivW32W16(nom, denom);
+
+      } else {
+        max_perSquare = 0;
+      }
+    }
+
+    /* previous frame lost, use recorded lag and gain */
+
+    else {
+      lag = iLBCdec_inst->prevLag;
+      max_perSquare = iLBCdec_inst->perSquare;
+    }
+
+    /* Attenuate signal and scale down pitch pred gain if
+       several frames lost consecutively */
+
+    use_gain = 32767;   /* 1.0 in Q15 */
+
+    if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>320) {
+      use_gain = 29491;  /* 0.9 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>640) {
+      use_gain = 22938;  /* 0.7 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>960) {
+      use_gain = 16384;  /* 0.5 in Q15 */
+    } else if (iLBCdec_inst->consPLICount*iLBCdec_inst->blockl>1280) {
+      use_gain = 0;   /* 0.0 in Q15 */
+    }
+
+    /* Compute mixing factor of picth repeatition and noise:
+       for max_per>0.7 set periodicity to 1.0
+       0.4<max_per<0.7 set periodicity to (maxper-0.4)/0.7-0.4)
+       max_per<0.4 set periodicity to 0.0
+    */
+
+    if (max_perSquare>7868) { /* periodicity > 0.7  (0.7^4=0.2401 in Q15) */
+      pitchfact = 32767;
+    } else if (max_perSquare>839) { /* 0.4 < periodicity < 0.7 (0.4^4=0.0256 in Q15) */
+      /* find best index and interpolate from that */
+      ind = 5;
+      while ((max_perSquare<WebRtcIlbcfix_kPlcPerSqr[ind])&&(ind>0)) {
+        ind--;
+      }
+      /* pitch fact is approximated by first order */
+      tmpW32 = (WebRtc_Word32)WebRtcIlbcfix_kPlcPitchFact[ind] +
+          WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kPlcPfSlope[ind], (max_perSquare-WebRtcIlbcfix_kPlcPerSqr[ind]), 11);
+
+      pitchfact = (WebRtc_Word16)WEBRTC_SPL_MIN(tmpW32, 32767); /* guard against overflow */
+
+    } else { /* periodicity < 0.4 */
+      pitchfact = 0;
+    }
+
+    /* avoid repetition of same pitch cycle (buzzyness) */
+    use_lag = lag;
+    if (lag<80) {
+      use_lag = 2*lag;
+    }
+
+    /* compute concealed residual */
+    energy = 0;
+
+    for (i=0; i<iLBCdec_inst->blockl; i++) {
+
+      /* noise component -  52 < randlagFIX < 117 */
+      iLBCdec_inst->seed = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iLBCdec_inst->seed, 31821)+(WebRtc_Word32)13849);
+      randlag = 53 + (WebRtc_Word16)(iLBCdec_inst->seed & 63);
+
+      pick = i - randlag;
+
+      if (pick < 0) {
+        randvec[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
+      } else {
+        randvec[i] = iLBCdec_inst->prevResidual[pick];
+      }
+
+      /* pitch repeatition component */
+      pick = i - use_lag;
+
+      if (pick < 0) {
+        PLCresidual[i] = iLBCdec_inst->prevResidual[iLBCdec_inst->blockl+pick];
+      } else {
+        PLCresidual[i] = PLCresidual[pick];
+      }
+
+      /* Attinuate total gain for each 10 ms */
+      if (i<80) {
+        tot_gain=use_gain;
+      } else if (i<160) {
+        tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(31130, use_gain, 15); /* 0.95*use_gain */
+      } else {
+        tot_gain=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(29491, use_gain, 15); /* 0.9*use_gain */
+      }
+
+
+      /* mix noise and pitch repeatition */
+
+      PLCresidual[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(tot_gain,
+                                                                (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32( (WEBRTC_SPL_MUL_16_16(pitchfact, PLCresidual[i]) +
+                                                                                                       WEBRTC_SPL_MUL_16_16((32767-pitchfact), randvec[i]) + 16384),
+                                                                                                      15),
+                                                                15);
+
+      /* Shifting down the result one step extra to ensure that no overflow
+         will occur */
+      energy += WEBRTC_SPL_MUL_16_16_RSFT(PLCresidual[i],
+                                          PLCresidual[i], (iLBCdec_inst->prevScale+1));
+
+    }
+
+    /* less than 30 dB, use only noise */
+    if (energy < (WEBRTC_SPL_SHIFT_W32(((WebRtc_Word32)iLBCdec_inst->blockl*900),-(iLBCdec_inst->prevScale+1)))) {
+      energy = 0;
+      for (i=0; i<iLBCdec_inst->blockl; i++) {
+        PLCresidual[i] = randvec[i];
+      }
+    }
+
+    /* use the old LPC */
+    WEBRTC_SPL_MEMCPY_W16(PLClpc, (*iLBCdec_inst).prevLpc, LPC_FILTERORDER+1);
+
+    /* Update state in case there are multiple frame losses */
+    iLBCdec_inst->prevLag = lag;
+    iLBCdec_inst->perSquare = max_perSquare;
+  }
+
+  /* no packet loss, copy input */
+
+  else {
+    WEBRTC_SPL_MEMCPY_W16(PLCresidual, decresidual, iLBCdec_inst->blockl);
+    WEBRTC_SPL_MEMCPY_W16(PLClpc, lpc, (LPC_FILTERORDER+1));
+    iLBCdec_inst->consPLICount = 0;
+  }
+
+  /* update state */
+  iLBCdec_inst->prevPLI = PLI;
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevLpc, PLClpc, (LPC_FILTERORDER+1));
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->prevResidual, PLCresidual, iLBCdec_inst->blockl);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/do_plc.h b/src/modules/audio_coding/codecs/ilbc/do_plc.h
new file mode 100644
index 0000000..c5bcc52
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/do_plc.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_DoThePlc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_DO_PLC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Packet loss concealment routine. Conceals a residual signal
+ *  and LP parameters. If no packet loss, update state.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_DoThePlc(
+    WebRtc_Word16 *PLCresidual,  /* (o) concealed residual */
+    WebRtc_Word16 *PLClpc,    /* (o) concealed LP parameters */
+    WebRtc_Word16 PLI,     /* (i) packet loss indicator
+                                                           0 - no PL, 1 = PL */
+    WebRtc_Word16 *decresidual,  /* (i) decoded residual */
+    WebRtc_Word16 *lpc,    /* (i) decoded LPC (only used for no PL) */
+    WebRtc_Word16 inlag,    /* (i) pitch lag */
+    iLBC_Dec_Inst_t *iLBCdec_inst
+    /* (i/o) decoder instance */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/encode.c b/src/modules/audio_coding/codecs/ilbc/encode.c
new file mode 100644
index 0000000..d3d3ba8
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/encode.c
@@ -0,0 +1,518 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Encode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lpc_encode.h"
+#include "frame_classify.h"
+#include "state_search.h"
+#include "state_construct.h"
+#include "constants.h"
+#include "cb_search.h"
+#include "cb_construct.h"
+#include "index_conv_enc.h"
+#include "pack_bits.h"
+#include "hp_input.h"
+
+#ifdef SPLIT_10MS
+#include "unpack_bits.h"
+#include "index_conv_dec.h"
+#endif
+#ifndef WEBRTC_BIG_ENDIAN
+#include "swap_bytes.h"
+#endif
+
+/*----------------------------------------------------------------*
+ *  main encoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EncodeImpl(
+    WebRtc_UWord16 *bytes,     /* (o) encoded data bits iLBC */
+    const WebRtc_Word16 *block, /* (i) speech vector to encode */
+    iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
+                                     state */
+                          ){
+  int n, meml_gotten, Nfor, Nback;
+  WebRtc_Word16 diff, start_pos;
+  int index;
+  int subcount, subframe;
+  WebRtc_Word16 start_count, end_count;
+  WebRtc_Word16 *residual;
+  WebRtc_Word32 en1, en2;
+  WebRtc_Word16 scale, max;
+  WebRtc_Word16 *syntdenum;
+  WebRtc_Word16 *decresidual;
+  WebRtc_Word16 *reverseResidual;
+  WebRtc_Word16 *reverseDecresidual;
+  /* Stack based */
+  WebRtc_Word16 weightdenum[(LPC_FILTERORDER + 1)*NSUB_MAX];
+  WebRtc_Word16 dataVec[BLOCKL_MAX + LPC_FILTERORDER];
+  WebRtc_Word16 memVec[CB_MEML+CB_FILTERLEN];
+  WebRtc_Word16 bitsMemory[sizeof(iLBC_bits)/sizeof(WebRtc_Word16)];
+  iLBC_bits *iLBCbits_inst = (iLBC_bits*)bitsMemory;
+
+
+#ifdef SPLIT_10MS
+  WebRtc_Word16 *weightdenumbuf = iLBCenc_inst->weightdenumbuf;
+  WebRtc_Word16 last_bit;
+#endif
+
+  WebRtc_Word16 *data = &dataVec[LPC_FILTERORDER];
+  WebRtc_Word16 *mem = &memVec[CB_HALFFILTERLEN];
+
+  /* Reuse som buffers to save stack memory */
+  residual = &iLBCenc_inst->lpc_buffer[LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl];
+  syntdenum = mem;      /* syntdenum[(LPC_FILTERORDER + 1)*NSUB_MAX] and mem are used non overlapping in the code */
+  decresidual = residual;     /* Already encoded residual is overwritten by the decoded version */
+  reverseResidual = data;     /* data and reverseResidual are used non overlapping in the code */
+  reverseDecresidual = reverseResidual; /* Already encoded residual is overwritten by the decoded version */
+
+#ifdef SPLIT_10MS
+
+  WebRtcSpl_MemSetW16 (  (WebRtc_Word16 *) iLBCbits_inst, 0,
+                         (WebRtc_Word16) (sizeof(iLBC_bits) / sizeof(WebRtc_Word16))  );
+
+  start_pos = iLBCenc_inst->start_pos;
+  diff = iLBCenc_inst->diff;
+
+  if (iLBCenc_inst->section != 0){
+    WEBRTC_SPL_MEMCPY_W16 (weightdenum, weightdenumbuf,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    /* Un-Packetize the frame into parameters */
+    last_bit = WebRtcIlbcfix_UnpackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    if (last_bit)
+      return;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvDec (iLBCbits_inst->cb_index);
+
+    if (iLBCenc_inst->section == 1){
+      /* Save first 80 samples of a 160/240 sample frame for 20/30msec */
+      WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples, block, 80);
+    }
+    else{ // iLBCenc_inst->section == 2 AND mode = 30ms
+      /* Save second 80 samples of a 240 sample frame for 30msec */
+      WEBRTC_SPL_MEMCPY_W16 (iLBCenc_inst->past_samples + 80, block, 80);
+    }
+  }
+  else{ // iLBCenc_inst->section == 0
+    /* form a complete frame of 160/240 for 20msec/30msec mode */
+    WEBRTC_SPL_MEMCPY_W16 (data + (iLBCenc_inst->mode * 8) - 80, block, 80);
+    WEBRTC_SPL_MEMCPY_W16 (data, iLBCenc_inst->past_samples,
+                           (iLBCenc_inst->mode * 8) - 80);
+    iLBCenc_inst->Nfor_flag = 0;
+    iLBCenc_inst->Nback_flag = 0;
+#else
+    /* copy input block to data*/
+    WEBRTC_SPL_MEMCPY_W16(data,block,iLBCenc_inst->blockl);
+#endif
+
+    /* high pass filtering of input signal and scale down the residual (*0.5) */
+    WebRtcIlbcfix_HpInput(data, (WebRtc_Word16*)WebRtcIlbcfix_kHpInCoefs,
+                          iLBCenc_inst->hpimemy, iLBCenc_inst->hpimemx,
+                          iLBCenc_inst->blockl);
+
+    /* LPC of hp filtered input data */
+    WebRtcIlbcfix_LpcEncode(syntdenum, weightdenum, iLBCbits_inst->lsf, data,
+                            iLBCenc_inst);
+
+    /* Set up state */
+    WEBRTC_SPL_MEMCPY_W16(dataVec, iLBCenc_inst->anaMem, LPC_FILTERORDER);
+
+    /* inverse filter to get residual */
+    for (n=0; n<iLBCenc_inst->nsub; n++ ) {
+      WebRtcSpl_FilterMAFastQ12(
+          &data[n*SUBL], &residual[n*SUBL],
+          &syntdenum[n*(LPC_FILTERORDER+1)],
+          LPC_FILTERORDER+1, SUBL);
+    }
+
+    /* Copy the state for next frame */
+    WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->anaMem, &data[iLBCenc_inst->blockl-LPC_FILTERORDER], LPC_FILTERORDER);
+
+    /* find state location */
+
+    iLBCbits_inst->startIdx = WebRtcIlbcfix_FrameClassify(iLBCenc_inst,residual);
+
+    /* check if state should be in first or last part of the
+       two subframes */
+
+    index = (iLBCbits_inst->startIdx-1)*SUBL;
+    max=WebRtcSpl_MaxAbsValueW16(&residual[index], 2*SUBL);
+    scale=WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max,max));
+
+    /* Scale to maximum 25 bits so that the MAC won't cause overflow */
+    scale = scale - 25;
+    if(scale < 0) {
+      scale = 0;
+    }
+
+    diff = STATE_LEN - iLBCenc_inst->state_short_len;
+    en1=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index],
+                                      iLBCenc_inst->state_short_len, scale);
+    index += diff;
+    en2=WebRtcSpl_DotProductWithScale(&residual[index], &residual[index],
+                                      iLBCenc_inst->state_short_len, scale);
+    if (en1 > en2) {
+      iLBCbits_inst->state_first = 1;
+      start_pos = (iLBCbits_inst->startIdx-1)*SUBL;
+    } else {
+      iLBCbits_inst->state_first = 0;
+      start_pos = (iLBCbits_inst->startIdx-1)*SUBL + diff;
+    }
+
+    /* scalar quantization of state */
+
+    WebRtcIlbcfix_StateSearch(iLBCenc_inst, iLBCbits_inst, &residual[start_pos],
+                              &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                              &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)]);
+
+    WebRtcIlbcfix_StateConstruct(iLBCbits_inst->idxForMax, iLBCbits_inst->idxVec,
+                                 &syntdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                                 &decresidual[start_pos], iLBCenc_inst->state_short_len
+                                 );
+
+    /* predictive quantization in state */
+
+    if (iLBCbits_inst->state_first) { /* put adaptive part in the end */
+
+      /* setup memory */
+
+      WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-iLBCenc_inst->state_short_len,
+                            decresidual+start_pos, iLBCenc_inst->state_short_len);
+
+      /* encode subframes */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                             &residual[start_pos+iLBCenc_inst->state_short_len],
+                             mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff,
+                             &weightdenum[iLBCbits_inst->startIdx*(LPC_FILTERORDER+1)], 0);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&decresidual[start_pos+iLBCenc_inst->state_short_len],
+                                iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                                mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL,
+                                diff
+                                );
+
+    }
+    else { /* put adaptive part in the beginning */
+
+      /* create reversed vectors for prediction */
+
+      WebRtcSpl_MemCpyReversedOrder(&reverseResidual[diff-1],
+                                    &residual[(iLBCbits_inst->startIdx+1)*SUBL-STATE_LEN], diff);
+
+      /* setup memory */
+
+      meml_gotten = iLBCenc_inst->state_short_len;
+      WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[start_pos], meml_gotten);
+      WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-iLBCenc_inst->state_short_len));
+
+      /* encode subframes */
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                             reverseResidual, mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL, diff,
+                             &weightdenum[(iLBCbits_inst->startIdx-1)*(LPC_FILTERORDER+1)],
+                             0);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(reverseDecresidual,
+                                iLBCbits_inst->cb_index, iLBCbits_inst->gain_index,
+                                mem+CB_MEML-ST_MEM_L_TBL, ST_MEM_L_TBL,
+                                diff
+                                );
+
+      /* get decoded residual from reversed vector */
+
+      WebRtcSpl_MemCpyReversedOrder(&decresidual[start_pos-1], reverseDecresidual, diff);
+    }
+
+#ifdef SPLIT_10MS
+    iLBCenc_inst->start_pos = start_pos;
+    iLBCenc_inst->diff = diff;
+    iLBCenc_inst->section++;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index);
+    /* Packetize the parameters into the frame */
+    WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    return;
+  }
+#endif
+
+  /* forward prediction of subframes */
+
+  Nfor = iLBCenc_inst->nsub-iLBCbits_inst->startIdx-1;
+
+  /* counter for predicted subframes */
+#ifdef SPLIT_10MS
+  if (iLBCenc_inst->mode == 20)
+  {
+    subcount = 1;
+  }
+  if (iLBCenc_inst->mode == 30)
+  {
+    if (iLBCenc_inst->section == 1)
+    {
+      subcount = 1;
+    }
+    if (iLBCenc_inst->section == 2)
+    {
+      subcount = 3;
+    }
+  }
+#else
+  subcount=1;
+#endif
+
+  if( Nfor > 0 ){
+
+    /* setup memory */
+
+    WebRtcSpl_MemSetW16(mem, 0, CB_MEML-STATE_LEN);
+    WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-STATE_LEN,
+                          decresidual+(iLBCbits_inst->startIdx-1)*SUBL, STATE_LEN);
+
+#ifdef SPLIT_10MS
+    if (iLBCenc_inst->Nfor_flag > 0)
+    {
+      for (subframe = 0; subframe < WEBRTC_SPL_MIN (Nfor, 2); subframe++)
+      {
+        /* update memory */
+        WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL));
+        WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL,
+                               &decresidual[(iLBCbits_inst->startIdx + 1 +
+                                             subframe) * SUBL], SUBL);
+      }
+    }
+
+    iLBCenc_inst->Nfor_flag++;
+
+    if (iLBCenc_inst->mode == 20)
+    {
+      start_count = 0;
+      end_count = Nfor;
+    }
+    if (iLBCenc_inst->mode == 30)
+    {
+      if (iLBCenc_inst->section == 1)
+      {
+        start_count = 0;
+        end_count = WEBRTC_SPL_MIN (Nfor, 2);
+      }
+      if (iLBCenc_inst->section == 2)
+      {
+        start_count = WEBRTC_SPL_MIN (Nfor, 2);
+        end_count = Nfor;
+      }
+    }
+#else
+    start_count = 0;
+    end_count = (WebRtc_Word16)Nfor;
+#endif
+
+    /* loop over subframes to encode */
+
+    for (subframe = start_count; subframe < end_count; subframe++){
+
+      /* encode subframe */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                             iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                             &residual[(iLBCbits_inst->startIdx+1+subframe)*SUBL],
+                             mem, MEM_LF_TBL, SUBL,
+                             &weightdenum[(iLBCbits_inst->startIdx+1+subframe)*(LPC_FILTERORDER+1)],
+                             (WebRtc_Word16)subcount);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&decresidual[(iLBCbits_inst->startIdx+1+subframe)*SUBL],
+                                iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                                iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                                mem, MEM_LF_TBL,
+                                SUBL
+                                );
+
+      /* update memory */
+
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, (CB_MEML-SUBL));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &decresidual[(iLBCbits_inst->startIdx+1+subframe)*SUBL], SUBL);
+
+      subcount++;
+    }
+  }
+
+#ifdef SPLIT_10MS
+  if ((iLBCenc_inst->section == 1) &&
+      (iLBCenc_inst->mode == 30) && (Nfor > 0) && (end_count == 2))
+  {
+    iLBCenc_inst->section++;
+    /* adjust index */
+    WebRtcIlbcfix_IndexConvEnc (iLBCbits_inst->cb_index);
+    /* Packetize the parameters into the frame */
+    WebRtcIlbcfix_PackBits (iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+    return;
+  }
+#endif
+
+  /* backward prediction of subframes */
+
+  Nback = iLBCbits_inst->startIdx-1;
+
+  if( Nback > 0 ){
+
+    /* create reverse order vectors
+       (The decresidual does not need to be copied since it is
+       contained in the same vector as the residual)
+    */
+
+    WebRtcSpl_MemCpyReversedOrder(&reverseResidual[Nback*SUBL-1], residual, Nback*SUBL);
+
+    /* setup memory */
+
+    meml_gotten = SUBL*(iLBCenc_inst->nsub+1-iLBCbits_inst->startIdx);
+    if( meml_gotten > CB_MEML ) {
+      meml_gotten=CB_MEML;
+    }
+
+    WebRtcSpl_MemCpyReversedOrder(&mem[CB_MEML-1], &decresidual[Nback*SUBL], meml_gotten);
+    WebRtcSpl_MemSetW16(mem, 0, (WebRtc_Word16)(CB_MEML-meml_gotten));
+
+#ifdef SPLIT_10MS
+    if (iLBCenc_inst->Nback_flag > 0)
+    {
+      for (subframe = 0; subframe < WEBRTC_SPL_MAX (2 - Nfor, 0); subframe++)
+      {
+        /* update memory */
+        WEBRTC_SPL_MEMCPY_W16 (mem, mem + SUBL, (CB_MEML - SUBL));
+        WEBRTC_SPL_MEMCPY_W16 (mem + CB_MEML - SUBL,
+                               &reverseDecresidual[subframe * SUBL], SUBL);
+      }
+    }
+
+    iLBCenc_inst->Nback_flag++;
+
+
+    if (iLBCenc_inst->mode == 20)
+    {
+      start_count = 0;
+      end_count = Nback;
+    }
+    if (iLBCenc_inst->mode == 30)
+    {
+      if (iLBCenc_inst->section == 1)
+      {
+        start_count = 0;
+        end_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
+      }
+      if (iLBCenc_inst->section == 2)
+      {
+        start_count = WEBRTC_SPL_MAX (2 - Nfor, 0);
+        end_count = Nback;
+      }
+    }
+#else
+    start_count = 0;
+    end_count = (WebRtc_Word16)Nback;
+#endif
+
+    /* loop over subframes to encode */
+
+    for (subframe = start_count; subframe < end_count; subframe++){
+
+      /* encode subframe */
+
+      WebRtcIlbcfix_CbSearch(iLBCenc_inst, iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                             iLBCbits_inst->gain_index+subcount*CB_NSTAGES, &reverseResidual[subframe*SUBL],
+                             mem, MEM_LF_TBL, SUBL,
+                             &weightdenum[(iLBCbits_inst->startIdx-2-subframe)*(LPC_FILTERORDER+1)],
+                             (WebRtc_Word16)subcount);
+
+      /* construct decoded vector */
+
+      WebRtcIlbcfix_CbConstruct(&reverseDecresidual[subframe*SUBL],
+                                iLBCbits_inst->cb_index+subcount*CB_NSTAGES,
+                                iLBCbits_inst->gain_index+subcount*CB_NSTAGES,
+                                mem, MEM_LF_TBL, SUBL
+                                );
+
+      /* update memory */
+
+      WEBRTC_SPL_MEMMOVE_W16(mem, mem+SUBL, (CB_MEML-SUBL));
+      WEBRTC_SPL_MEMCPY_W16(mem+CB_MEML-SUBL,
+                            &reverseDecresidual[subframe*SUBL], SUBL);
+
+      subcount++;
+
+    }
+
+    /* get decoded residual from reversed vector */
+
+    WebRtcSpl_MemCpyReversedOrder(&decresidual[SUBL*Nback-1], reverseDecresidual, SUBL*Nback);
+  }
+  /* end encoding part */
+
+  /* adjust index */
+
+  WebRtcIlbcfix_IndexConvEnc(iLBCbits_inst->cb_index);
+
+  /* Packetize the parameters into the frame */
+
+#ifdef SPLIT_10MS
+  if( (iLBCenc_inst->mode==30) && (iLBCenc_inst->section==1) ){
+    WebRtcIlbcfix_PackBits(iLBCenc_inst->bytes, iLBCbits_inst, iLBCenc_inst->mode);
+  }
+  else{
+    WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode);
+  }
+#else
+  WebRtcIlbcfix_PackBits(bytes, iLBCbits_inst, iLBCenc_inst->mode);
+#endif
+
+#ifndef WEBRTC_BIG_ENDIAN
+  /* Swap bytes for LITTLE ENDIAN since the packbits()
+     function assumes BIG_ENDIAN machine */
+#ifdef SPLIT_10MS
+  if (( (iLBCenc_inst->section == 1) && (iLBCenc_inst->mode == 20) ) ||
+      ( (iLBCenc_inst->section == 2) && (iLBCenc_inst->mode == 30) )){
+    WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words, bytes);
+  }
+#else
+  WebRtcIlbcfix_SwapBytes(bytes, iLBCenc_inst->no_of_words, bytes);
+#endif
+#endif
+
+#ifdef SPLIT_10MS
+  if (subcount == (iLBCenc_inst->nsub - 1))
+  {
+    iLBCenc_inst->section = 0;
+  }
+  else
+  {
+    iLBCenc_inst->section++;
+    WEBRTC_SPL_MEMCPY_W16 (weightdenumbuf, weightdenum,
+                           SCRATCH_ENCODE_DATAVEC - SCRATCH_ENCODE_WEIGHTDENUM);
+  }
+#endif
+
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/encode.h b/src/modules/audio_coding/codecs/ilbc/encode.h
new file mode 100644
index 0000000..b7d93d7
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/encode.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Encode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  main encoder function
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EncodeImpl(
+    WebRtc_UWord16 *bytes,     /* (o) encoded data bits iLBC */
+    const WebRtc_Word16 *block, /* (i) speech vector to encode */
+    iLBC_Enc_Inst_t *iLBCenc_inst /* (i/o) the general encoder
+                                           state */
+                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/energy_inverse.c b/src/modules/audio_coding/codecs/ilbc/energy_inverse.c
new file mode 100644
index 0000000..d56069b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/energy_inverse.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnergyInverse.c
+
+******************************************************************/
+
+/* Inverses the in vector in into Q29 domain */
+
+#include "energy_inverse.h"
+
+void WebRtcIlbcfix_EnergyInverse(
+    WebRtc_Word16 *energy,    /* (i/o) Energy and inverse
+                                                           energy (in Q29) */
+    int noOfEnergies)  /* (i)   The length of the energy
+                                   vector */
+{
+  WebRtc_Word32 Nom=(WebRtc_Word32)0x1FFFFFFF;
+  WebRtc_Word16 *energyPtr;
+  int i;
+
+  /* Set the minimum energy value to 16384 to avoid overflow */
+  energyPtr=energy;
+  for (i=0; i<noOfEnergies; i++) {
+    (*energyPtr)=WEBRTC_SPL_MAX((*energyPtr),16384);
+    energyPtr++;
+  }
+
+  /* Calculate inverse energy in Q29 */
+  energyPtr=energy;
+  for (i=0; i<noOfEnergies; i++) {
+    (*energyPtr) = (WebRtc_Word16)WebRtcSpl_DivW32W16(Nom, (*energyPtr));
+    energyPtr++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/energy_inverse.h b/src/modules/audio_coding/codecs/ilbc/energy_inverse.h
new file mode 100644
index 0000000..db13589
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/energy_inverse.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnergyInverse.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENERGY_INVERSE_H_
+
+#include "defines.h"
+
+/* Inverses the in vector in into Q29 domain */
+
+void WebRtcIlbcfix_EnergyInverse(
+    WebRtc_Word16 *energy,     /* (i/o) Energy and inverse
+                                                                   energy (in Q29) */
+    int noOfEnergies);   /* (i)   The length of the energy
+                                   vector */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/enh_upsample.c b/src/modules/audio_coding/codecs/ilbc/enh_upsample.c
new file mode 100644
index 0000000..3343816
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enh_upsample.c
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhUpsample.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ * upsample finite array assuming zeros outside bounds
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EnhUpsample(
+    WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
+    WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+                                ){
+  int j;
+  WebRtc_Word32 *pu1, *pu11;
+  WebRtc_Word16 *ps, *w16tmp;
+  const WebRtc_Word16 *pp;
+
+  /* filtering: filter overhangs left side of sequence */
+  pu1=useq1;
+  for (j=0;j<ENH_UPS0; j++) {
+    pu11=pu1;
+    /* i = 2 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+2;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu11+=ENH_UPS0;
+    /* i = 3 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+3;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu11+=ENH_UPS0;
+    /* i = 4 */
+    pp=WebRtcIlbcfix_kEnhPolyPhaser[j]+1;
+    ps=seq1+4;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--,*pp++);
+    pu1++;
+  }
+
+  /* filtering: simple convolution=inner products
+     (not needed since the sequence is so short)
+  */
+
+  /* filtering: filter overhangs right side of sequence */
+
+  /* Code with loops, which is equivivalent to the expanded version below
+
+     filterlength = 5;
+     hf1 = 2;
+     for(j=0;j<ENH_UPS0; j++){
+     pu = useq1 + (filterlength-hfl)*ENH_UPS0 + j;
+     for(i=1; i<=hfl; i++){
+     *pu=0;
+     pp = polyp[j]+i;
+     ps = seq1+dim1-1;
+     for(k=0;k<filterlength-i;k++) {
+     *pu += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+     }
+     pu+=ENH_UPS0;
+     }
+     }
+  */
+  pu1 = useq1 + 12;
+  w16tmp = seq1+4;
+  for (j=0;j<ENH_UPS0; j++) {
+    pu11 = pu1;
+    /* i = 1 */
+    pp = WebRtcIlbcfix_kEnhPolyPhaser[j]+2;
+    ps = w16tmp;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    pu11+=ENH_UPS0;
+    /* i = 2 */
+    pp = WebRtcIlbcfix_kEnhPolyPhaser[j]+3;
+    ps = w16tmp;
+    (*pu11) = WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    (*pu11) += WEBRTC_SPL_MUL_16_16(*ps--, *pp++);
+    pu11+=ENH_UPS0;
+
+    pu1++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/enh_upsample.h b/src/modules/audio_coding/codecs/ilbc/enh_upsample.h
new file mode 100644
index 0000000..53534cc
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enh_upsample.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhUpsample.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENH_UPSAMPLE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENH_UPSAMPLE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * upsample finite array assuming zeros outside bounds
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_EnhUpsample(
+    WebRtc_Word32 *useq1, /* (o) upsampled output sequence */
+    WebRtc_Word16 *seq1 /* (i) unupsampled sequence */
+                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/enhancer.c b/src/modules/audio_coding/codecs/ilbc/enhancer.c
new file mode 100644
index 0000000..b8f3335
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enhancer.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Enhancer.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "get_sync_seq.h"
+#include "smooth.h"
+
+/*----------------------------------------------------------------*
+ * perform enhancement on idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Enhancer(
+    WebRtc_Word16 *odata,   /* (o) smoothed block, dimension blockl */
+    WebRtc_Word16 *idata,   /* (i) data buffer used for enhancing */
+    WebRtc_Word16 idatal,   /* (i) dimension idata */
+    WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
+    WebRtc_Word16 *period,   /* (i) pitch period array (pitch bward-in time) */
+    WebRtc_Word16 *plocs,   /* (i) locations where period array values valid */
+    WebRtc_Word16 periodl   /* (i) dimension of period and plocs */
+                            ){
+  /* Stack based */
+  WebRtc_Word16 surround[ENH_BLOCKL];
+
+  WebRtcSpl_MemSetW16(surround, 0, ENH_BLOCKL);
+
+  /* get said second sequence of segments */
+
+  WebRtcIlbcfix_GetSyncSeq(idata, idatal, centerStartPos, period, plocs,
+                           periodl, ENH_HL, surround);
+
+  /* compute the smoothed output from said second sequence */
+
+  WebRtcIlbcfix_Smooth(odata, idata+centerStartPos, surround);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/enhancer.h b/src/modules/audio_coding/codecs/ilbc/enhancer.h
new file mode 100644
index 0000000..e14f559
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enhancer.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Enhancer.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * perform enhancement on idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Enhancer(
+    WebRtc_Word16 *odata,   /* (o) smoothed block, dimension blockl */
+    WebRtc_Word16 *idata,   /* (i) data buffer used for enhancing */
+    WebRtc_Word16 idatal,   /* (i) dimension idata */
+    WebRtc_Word16 centerStartPos, /* (i) first sample current block within idata */
+    WebRtc_Word16 *period,   /* (i) pitch period array (pitch bward-in time) */
+    WebRtc_Word16 *plocs,   /* (i) locations where period array values valid */
+    WebRtc_Word16 periodl   /* (i) dimension of period and plocs */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c b/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c
new file mode 100644
index 0000000..61b71d1
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enhancer_interface.c
@@ -0,0 +1,381 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhancerInterface.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "xcorr_coef.h"
+#include "enhancer.h"
+#include "hp_output.h"
+
+
+
+/*----------------------------------------------------------------*
+ * interface for enhancer
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
+    WebRtc_Word16 *out,     /* (o) enhanced signal */
+    WebRtc_Word16 *in,      /* (i) unenhanced signal */
+    iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
+                                        ){
+  int iblock;
+  int lag=20, tlag=20;
+  int inLen=iLBCdec_inst->blockl+120;
+  WebRtc_Word16 scale, scale1, plc_blockl;
+  WebRtc_Word16 *enh_buf, *enh_period;
+  WebRtc_Word32 tmp1, tmp2, max, new_blocks;
+  WebRtc_Word16 *enh_bufPtr1;
+  int i, k;
+  WebRtc_Word16 EnChange;
+  WebRtc_Word16 SqrtEnChange;
+  WebRtc_Word16 inc;
+  WebRtc_Word16 win;
+  WebRtc_Word16 *tmpW16ptr;
+  WebRtc_Word16 startPos;
+  WebRtc_Word16 *plc_pred;
+  WebRtc_Word16 *target, *regressor;
+  WebRtc_Word16 max16;
+  int shifts;
+  WebRtc_Word32 ener;
+  WebRtc_Word16 enerSh;
+  WebRtc_Word16 corrSh;
+  WebRtc_Word16 ind, sh;
+  WebRtc_Word16 start, stop;
+  /* Stack based */
+  WebRtc_Word16 totsh[3];
+  WebRtc_Word16 downsampled[(BLOCKL_MAX+120)>>1]; /* length 180 */
+  WebRtc_Word32 corr32[50];
+  WebRtc_Word32 corrmax[3];
+  WebRtc_Word16 corr16[3];
+  WebRtc_Word16 en16[3];
+  WebRtc_Word16 lagmax[3];
+
+  plc_pred = downsampled; /* Reuse memory since plc_pred[ENH_BLOCKL] and
+                              downsampled are non overlapping */
+  enh_buf=iLBCdec_inst->enh_buf;
+  enh_period=iLBCdec_inst->enh_period;
+
+  /* Copy in the new data into the enhancer buffer */
+
+  WEBRTC_SPL_MEMMOVE_W16(enh_buf, &enh_buf[iLBCdec_inst->blockl],
+                         ENH_BUFL-iLBCdec_inst->blockl);
+
+  WEBRTC_SPL_MEMCPY_W16(&enh_buf[ENH_BUFL-iLBCdec_inst->blockl], in,
+                        iLBCdec_inst->blockl);
+
+  /* Set variables that are dependent on frame size */
+  if (iLBCdec_inst->mode==30) {
+    plc_blockl=ENH_BLOCKL;
+    new_blocks=3;
+    startPos=320;  /* Start position for enhancement
+                     (640-new_blocks*ENH_BLOCKL-80) */
+  } else {
+    plc_blockl=40;
+    new_blocks=2;
+    startPos=440;  /* Start position for enhancement
+                    (640-new_blocks*ENH_BLOCKL-40) */
+  }
+
+  /* Update the pitch prediction for each enhancer block, move the old ones */
+  WEBRTC_SPL_MEMMOVE_W16(enh_period, &enh_period[new_blocks],
+                         (ENH_NBLOCKS_TOT-new_blocks));
+
+  k=WebRtcSpl_DownsampleFast(
+      enh_buf+ENH_BUFL-inLen,    /* Input samples */
+      (WebRtc_Word16)(inLen+ENH_BUFL_FILTEROVERHEAD),
+      downsampled,
+      (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16(inLen, 1),
+      (WebRtc_Word16*)WebRtcIlbcfix_kLpFiltCoefs,  /* Coefficients in Q12 */
+      FILTERORDER_DS_PLUS1,    /* Length of filter (order-1) */
+      FACTOR_DS,
+      DELAY_DS);
+
+  /* Estimate the pitch in the down sampled domain. */
+  for(iblock = 0; iblock<new_blocks; iblock++){
+
+    /* references */
+    i=60+WEBRTC_SPL_MUL_16_16(iblock,ENH_BLOCKL_HALF);
+    target=downsampled+i;
+    regressor=downsampled+i-10;
+
+    /* scaling */
+    max16=WebRtcSpl_MaxAbsValueW16(&regressor[-50],
+                                   (WebRtc_Word16)(ENH_BLOCKL_HALF+50-1));
+    shifts = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max16, max16)) - 25;
+    shifts = WEBRTC_SPL_MAX(0, shifts);
+
+    /* compute cross correlation */
+    WebRtcSpl_CrossCorrelation(corr32, target, regressor,
+                               ENH_BLOCKL_HALF, 50, (WebRtc_Word16)shifts, -1);
+
+    /* Find 3 highest correlations that should be compared for the
+       highest (corr*corr)/ener */
+
+    for (i=0;i<2;i++) {
+      lagmax[i] = WebRtcSpl_MaxIndexW32(corr32, 50);
+      corrmax[i] = corr32[lagmax[i]];
+      start = lagmax[i] - 2;
+      stop = lagmax[i] + 2;
+      start = WEBRTC_SPL_MAX(0,  start);
+      stop  = WEBRTC_SPL_MIN(49, stop);
+      for (k=start; k<=stop; k++) {
+        corr32[k] = 0;
+      }
+    }
+    lagmax[2] = WebRtcSpl_MaxIndexW32(corr32, 50);
+    corrmax[2] = corr32[lagmax[2]];
+
+    /* Calculate normalized corr^2 and ener */
+    for (i=0;i<3;i++) {
+      corrSh = 15-WebRtcSpl_GetSizeInBits(corrmax[i]);
+      ener = WebRtcSpl_DotProductWithScale(&regressor[-lagmax[i]],
+                                           &regressor[-lagmax[i]],
+                                           ENH_BLOCKL_HALF, shifts);
+      enerSh = 15-WebRtcSpl_GetSizeInBits(ener);
+      corr16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(corrmax[i], corrSh);
+      corr16[i] = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(corr16[i],
+                                                           corr16[i], 16);
+      en16[i] = (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(ener, enerSh);
+      totsh[i] = enerSh - WEBRTC_SPL_LSHIFT_W32(corrSh, 1);
+    }
+
+    /* Compare lagmax[0..3] for the (corr^2)/ener criteria */
+    ind = 0;
+    for (i=1; i<3; i++) {
+      if (totsh[ind] > totsh[i]) {
+        sh = WEBRTC_SPL_MIN(31, totsh[ind]-totsh[i]);
+        if ( WEBRTC_SPL_MUL_16_16(corr16[ind], en16[i]) <
+            WEBRTC_SPL_MUL_16_16_RSFT(corr16[i], en16[ind], sh)) {
+          ind = i;
+        }
+      } else {
+        sh = WEBRTC_SPL_MIN(31, totsh[i]-totsh[ind]);
+        if (WEBRTC_SPL_MUL_16_16_RSFT(corr16[ind], en16[i], sh) <
+            WEBRTC_SPL_MUL_16_16(corr16[i], en16[ind])) {
+          ind = i;
+        }
+      }
+    }
+
+    lag = lagmax[ind] + 10;
+
+    /* Store the estimated lag in the non-downsampled domain */
+    enh_period[ENH_NBLOCKS_TOT-new_blocks+iblock] =
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(lag, 8);
+
+    /* Store the estimated lag for backward PLC */
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      if (!iblock) {
+        tlag = WEBRTC_SPL_MUL_16_16(lag, 2);
+      }
+    } else {
+      if (iblock==1) {
+        tlag = WEBRTC_SPL_MUL_16_16(lag, 2);
+      }
+    }
+
+    lag = WEBRTC_SPL_MUL_16_16(lag, 2);
+  }
+
+  if ((iLBCdec_inst->prev_enh_pl==1)||(iLBCdec_inst->prev_enh_pl==2)) {
+
+    /* Calculate the best lag of the new frame
+       This is used to interpolate backwards and mix with the PLC'd data
+    */
+
+    /* references */
+    target=in;
+    regressor=in+tlag-1;
+
+    /* scaling */
+    max16=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(plc_blockl+3-1));
+    if (max16>5000)
+      shifts=2;
+    else
+      shifts=0;
+
+    /* compute cross correlation */
+    WebRtcSpl_CrossCorrelation(corr32, target, regressor,
+                               plc_blockl, 3, (WebRtc_Word16)shifts, 1);
+
+    /* find lag */
+    lag=WebRtcSpl_MaxIndexW32(corr32, 3);
+    lag+=tlag-1;
+
+    /* Copy the backward PLC to plc_pred */
+
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      if (lag>plc_blockl) {
+        WEBRTC_SPL_MEMCPY_W16(plc_pred, &in[lag-plc_blockl], plc_blockl);
+      } else {
+        WEBRTC_SPL_MEMCPY_W16(&plc_pred[plc_blockl-lag], in, lag);
+        WEBRTC_SPL_MEMCPY_W16(
+            plc_pred, &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl+lag],
+            (plc_blockl-lag));
+      }
+    } else {
+      int pos;
+
+      pos = plc_blockl;
+
+      while (lag<pos) {
+        WEBRTC_SPL_MEMCPY_W16(&plc_pred[pos-lag], in, lag);
+        pos = pos - lag;
+      }
+      WEBRTC_SPL_MEMCPY_W16(plc_pred, &in[lag-pos], pos);
+
+    }
+
+    if (iLBCdec_inst->prev_enh_pl==1) {
+      /* limit energy change
+         if energy in backward PLC is more than 4 times higher than the forward
+         PLC, then reduce the energy in the backward PLC vector:
+         sample 1...len-16 set energy of the to 4 times forward PLC
+         sample len-15..len interpolate between 4 times fw PLC and bw PLC energy
+
+         Note: Compared to floating point code there is a slight change,
+         the window is 16 samples long instead of 10 samples to simplify the
+         calculations
+      */
+
+      max=WebRtcSpl_MaxAbsValueW16(
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl], plc_blockl);
+      max16=WebRtcSpl_MaxAbsValueW16(plc_pred, plc_blockl);
+      max = WEBRTC_SPL_MAX(max, max16);
+      scale=22-(WebRtc_Word16)WebRtcSpl_NormW32(max);
+      scale=WEBRTC_SPL_MAX(scale,0);
+
+      tmp2 = WebRtcSpl_DotProductWithScale(
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl],
+          &enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl],
+          plc_blockl, scale);
+      tmp1 = WebRtcSpl_DotProductWithScale(plc_pred, plc_pred,
+                                           plc_blockl, scale);
+
+      /* Check the energy difference */
+      if ((tmp1>0)&&((tmp1>>2)>tmp2)) {
+        /* EnChange is now guaranteed to be <0.5
+           Calculate EnChange=tmp2/tmp1 in Q16
+        */
+
+        scale1=(WebRtc_Word16)WebRtcSpl_NormW32(tmp1);
+        tmp1=WEBRTC_SPL_SHIFT_W32(tmp1, (scale1-16)); /* using 15 bits */
+
+        tmp2=WEBRTC_SPL_SHIFT_W32(tmp2, (scale1));
+        EnChange = (WebRtc_Word16)WebRtcSpl_DivW32W16(tmp2,
+                                                      (WebRtc_Word16)tmp1);
+
+        /* Calculate the Sqrt of the energy in Q15 ((14+16)/2) */
+        SqrtEnChange = (WebRtc_Word16)WebRtcSpl_SqrtFloor(
+            WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)EnChange, 14));
+
+
+        /* Multiply first part of vector with 2*SqrtEnChange */
+        WebRtcSpl_ScaleVector(plc_pred, plc_pred, SqrtEnChange,
+                              (WebRtc_Word16)(plc_blockl-16), 14);
+
+        /* Calculate increase parameter for window part (16 last samples) */
+        /* (1-2*SqrtEnChange)/16 in Q15 */
+        inc=(2048-WEBRTC_SPL_RSHIFT_W16(SqrtEnChange, 3));
+
+        win=0;
+        tmpW16ptr=&plc_pred[plc_blockl-16];
+
+        for (i=16;i>0;i--) {
+          (*tmpW16ptr)=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+              (*tmpW16ptr), (SqrtEnChange+(win>>1)), 14);
+          /* multiply by (2.0*SqrtEnChange+win) */
+
+          win += inc;
+          tmpW16ptr++;
+        }
+      }
+
+      /* Make the linear interpolation between the forward PLC'd data
+         and the backward PLC'd data (from the new frame)
+      */
+
+      if (plc_blockl==40) {
+        inc=400; /* 1/41 in Q14 */
+      } else { /* plc_blockl==80 */
+        inc=202; /* 1/81 in Q14 */
+      }
+      win=0;
+      enh_bufPtr1=&enh_buf[ENH_BUFL-1-iLBCdec_inst->blockl];
+      for (i=0; i<plc_blockl; i++) {
+        win+=inc;
+        *enh_bufPtr1 =
+            (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT((*enh_bufPtr1), win, 14);
+        *enh_bufPtr1 += (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(
+                (16384-win), plc_pred[plc_blockl-1-i], 14);
+        enh_bufPtr1--;
+      }
+    } else {
+      WebRtc_Word16 *synt = &downsampled[LPC_FILTERORDER];
+
+      enh_bufPtr1=&enh_buf[ENH_BUFL-iLBCdec_inst->blockl-plc_blockl];
+      WEBRTC_SPL_MEMCPY_W16(enh_bufPtr1, plc_pred, plc_blockl);
+
+      /* Clear fileter memory */
+      WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER);
+      WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4);
+      WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2);
+
+      /* Initialize filter memory by filtering through 2 lags */
+      WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], iLBCdec_inst->syntMem,
+                            LPC_FILTERORDER);
+      WebRtcSpl_FilterARFastQ12(
+          enh_bufPtr1,
+          synt,
+          &iLBCdec_inst->old_syntdenum[
+                                       (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
+                                       LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+
+      WEBRTC_SPL_MEMCPY_W16(&synt[-LPC_FILTERORDER], &synt[lag-LPC_FILTERORDER],
+                            LPC_FILTERORDER);
+      WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                             iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                             (WebRtc_Word16)lag);
+      WebRtcSpl_FilterARFastQ12(
+          enh_bufPtr1, synt,
+          &iLBCdec_inst->old_syntdenum[
+                                       (iLBCdec_inst->nsub-1)*(LPC_FILTERORDER+1)],
+                                       LPC_FILTERORDER+1, (WebRtc_Word16)lag);
+
+      WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->syntMem, &synt[lag-LPC_FILTERORDER],
+                            LPC_FILTERORDER);
+      WebRtcIlbcfix_HpOutput(synt, (WebRtc_Word16*)WebRtcIlbcfix_kHpOutCoefs,
+                             iLBCdec_inst->hpimemy, iLBCdec_inst->hpimemx,
+                             (WebRtc_Word16)lag);
+    }
+  }
+
+
+  /* Perform enhancement block by block */
+
+  for (iblock = 0; iblock<new_blocks; iblock++) {
+    WebRtcIlbcfix_Enhancer(out+WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL),
+                           enh_buf,
+                           ENH_BUFL,
+                           (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(iblock, ENH_BLOCKL)+startPos),
+                           enh_period,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kEnhPlocs, ENH_NBLOCKS_TOT);
+  }
+
+  return (lag);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h b/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h
new file mode 100644
index 0000000..37b27e2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/enhancer_interface.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_EnhancerInterface.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_ENHANCER_INTERFACE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * interface for enhancer
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_EnhancerInterface( /* (o) Estimated lag in end of in[] */
+    WebRtc_Word16 *out,     /* (o) enhanced signal */
+    WebRtc_Word16 *in,      /* (i) unenhanced signal */
+    iLBC_Dec_Inst_t *iLBCdec_inst /* (i) buffers etc */
+                                        );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c b/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
new file mode 100644
index 0000000..7cece26
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FilteredCbVecs.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Construct an additional codebook vector by filtering the
+ *  initial codebook buffer. This vector is then used to expand
+ *  the codebook with an additional section.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_FilteredCbVecs(
+    WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
+    WebRtc_Word16 *CBmem,  /* (i) Codebook memory that is filtered to create a
+                                           second CB section */
+    int lMem,  /* (i) Length of codebook memory */
+    WebRtc_Word16 samples    /* (i) Number of samples to filter */
+                                  ) {
+
+  /* Set up the memory, start with zero state */
+  WebRtcSpl_MemSetW16(CBmem+lMem, 0, CB_HALFFILTERLEN);
+  WebRtcSpl_MemSetW16(CBmem-CB_HALFFILTERLEN, 0, CB_HALFFILTERLEN);
+  WebRtcSpl_MemSetW16(cbvectors, 0, lMem-samples);
+
+  /* Filter to obtain the filtered CB memory */
+
+  WebRtcSpl_FilterMAFastQ12(
+      CBmem+CB_HALFFILTERLEN+lMem-samples, cbvectors+lMem-samples,
+      (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev, CB_FILTERLEN, samples);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h b/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
new file mode 100644
index 0000000..c502e8f
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/filtered_cb_vecs.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FilteredCbVecs.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FILTERED_CB_VECS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct an additional codebook vector by filtering the
+ *  initial codebook buffer. This vector is then used to expand
+ *  the codebook with an additional section.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_FilteredCbVecs(
+    WebRtc_Word16 *cbvectors, /* (o) Codebook vector for the higher section */
+    WebRtc_Word16 *CBmem,  /* (i) Codebook memory that is filtered to create a
+                                           second CB section */
+    int lMem,  /* (i) Length of codebook memory */
+    WebRtc_Word16 samples    /* (i) Number of samples to filter */
+                                  );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/frame_classify.c b/src/modules/audio_coding/codecs/ilbc/frame_classify.c
new file mode 100644
index 0000000..ea3675e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/frame_classify.c
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FrameClassify.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Classification of subframes to localize start state
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+    /* (o) Index to the max-energy sub frame */
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i/o) the encoder state structure */
+    WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+                                                ){
+  WebRtc_Word16 max, scale;
+  WebRtc_Word32 ssqEn[NSUB_MAX-1];
+  WebRtc_Word16 *ssqPtr;
+  WebRtc_Word32 *seqEnPtr;
+  WebRtc_Word32 maxW32;
+  WebRtc_Word16 scale1;
+  WebRtc_Word16 pos;
+  int n;
+
+  /*
+    Calculate the energy of each of the 80 sample blocks
+    in the draft the 4 first and last samples are windowed with 1/5...4/5
+    and 4/5...1/5 respectively. To simplify for the fixpoint we have changed
+    this to 0 0 1 1 and 1 1 0 0
+  */
+
+  max = WebRtcSpl_MaxAbsValueW16(residualFIX, iLBCenc_inst->blockl);
+  scale=WebRtcSpl_GetSizeInBits(WEBRTC_SPL_MUL_16_16(max,max));
+
+  /* Scale to maximum 24 bits so that it won't overflow for 76 samples */
+  scale = scale-24;
+  scale1 = WEBRTC_SPL_MAX(0, scale);
+
+  /* Calculate energies */
+  ssqPtr=residualFIX + 2;
+  seqEnPtr=ssqEn;
+  for (n=(iLBCenc_inst->nsub-1); n>0; n--) {
+    (*seqEnPtr) = WebRtcSpl_DotProductWithScale(ssqPtr, ssqPtr, 76, scale1);
+    ssqPtr += 40;
+    seqEnPtr++;
+  }
+
+  /* Scale to maximum 20 bits in order to allow for the 11 bit window */
+  maxW32 = WebRtcSpl_MaxValueW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1));
+  scale = WebRtcSpl_GetSizeInBits(maxW32) - 20;
+  scale1 = WEBRTC_SPL_MAX(0, scale);
+
+  /* Window each 80 block with the ssqEn_winTbl window to give higher probability for
+     the blocks in the middle
+  */
+  seqEnPtr=ssqEn;
+  if (iLBCenc_inst->mode==20) {
+    ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin+1;
+  } else {
+    ssqPtr=(WebRtc_Word16*)WebRtcIlbcfix_kStartSequenceEnrgWin;
+  }
+  for (n=(iLBCenc_inst->nsub-1); n>0; n--) {
+    (*seqEnPtr)=WEBRTC_SPL_MUL(((*seqEnPtr)>>scale1), (*ssqPtr));
+    seqEnPtr++;
+    ssqPtr++;
+  }
+
+  /* Extract the best choise of start state */
+  pos = WebRtcSpl_MaxIndexW32(ssqEn, (WebRtc_Word16)(iLBCenc_inst->nsub-1)) + 1;
+
+  return(pos);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/frame_classify.h b/src/modules/audio_coding/codecs/ilbc/frame_classify.h
new file mode 100644
index 0000000..faf4666
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/frame_classify.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_FrameClassify.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_FRAME_CLASSIFY_H_
+
+WebRtc_Word16 WebRtcIlbcfix_FrameClassify(
+    /* (o) Index to the max-energy sub frame */
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i/o) the encoder state structure */
+    WebRtc_Word16 *residualFIX /* (i) lpc residual signal */
+                                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/gain_dequant.c b/src/modules/audio_coding/codecs/ilbc/gain_dequant.c
new file mode 100644
index 0000000..9450a80
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/gain_dequant.c
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainDequant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  decoder for quantized gains in the gain-shape coding of
+ *  residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+    /* (o) quantized gain value (Q14) */
+    WebRtc_Word16 index, /* (i) quantization index */
+    WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
+    WebRtc_Word16 stage /* (i) The stage of the search */
+                                                ){
+  WebRtc_Word16 scale;
+  const WebRtc_Word16 *gain;
+
+  /* obtain correct scale factor */
+
+  scale=WEBRTC_SPL_ABS_W16(maxIn);
+  scale = WEBRTC_SPL_MAX(1638, scale);  /* if lower than 0.1, set it to 0.1 */
+
+  /* select the quantization table and return the decoded value */
+  gain = WebRtcIlbcfix_kGain[stage];
+
+  return((WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, gain[index])+8192)>>14));
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/gain_dequant.h b/src/modules/audio_coding/codecs/ilbc/gain_dequant.h
new file mode 100644
index 0000000..28f2ceb
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/gain_dequant.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainDequant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_DEQUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  decoder for quantized gains in the gain-shape coding of
+ *  residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainDequant(
+    /* (o) quantized gain value (Q14) */
+    WebRtc_Word16 index, /* (i) quantization index */
+    WebRtc_Word16 maxIn, /* (i) maximum of unquantized gain (Q14) */
+    WebRtc_Word16 stage /* (i) The stage of the search */
+                                         );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/gain_quant.c b/src/modules/audio_coding/codecs/ilbc/gain_quant.c
new file mode 100644
index 0000000..bdf88a5
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/gain_quant.c
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainQuant.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  quantizer for the gain in the gain-shape coding of residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+    WebRtc_Word16 gain, /* (i) gain value Q14 */
+    WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
+    WebRtc_Word16 stage, /* (i) The stage of the search */
+    WebRtc_Word16 *index /* (o) quantization index */
+                                        ) {
+
+  WebRtc_Word16 scale, returnVal, cblen;
+  WebRtc_Word32 gainW32, measure1, measure2;
+  const WebRtc_Word16 *cbPtr, *cb;
+  int loc, noMoves, noChecks, i;
+
+  /* ensure a lower bound (0.1) on the scaling factor */
+
+  scale = WEBRTC_SPL_MAX(1638, maxIn);
+
+  /* select the quantization table and calculate
+     the length of the table and the number of
+     steps in the binary search that are needed */
+  cb = WebRtcIlbcfix_kGain[stage];
+  cblen = 32>>stage;
+  noChecks = 4-stage;
+
+  /* Multiply the gain with 2^14 to make the comparison
+     easier and with higher precision */
+  gainW32 = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)gain, 14);
+
+  /* Do a binary search, starting in the middle of the CB
+     loc - defines the current position in the table
+     noMoves - defines the number of steps to move in the CB in order
+     to get next CB location
+  */
+
+  loc = cblen>>1;
+  noMoves = loc;
+  cbPtr = cb + loc; /* Centre of CB */
+
+  for (i=noChecks;i>0;i--) {
+    noMoves>>=1;
+    measure1=WEBRTC_SPL_MUL_16_16(scale, (*cbPtr));
+
+    /* Move up if gain is larger, otherwise move down in table */
+    measure1 = measure1 - gainW32;
+
+    if (0>measure1) {
+      cbPtr+=noMoves;
+      loc+=noMoves;
+    } else {
+      cbPtr-=noMoves;
+      loc-=noMoves;
+    }
+  }
+
+  /* Check which value is the closest one: loc-1, loc or loc+1 */
+
+  measure1=WEBRTC_SPL_MUL_16_16(scale, (*cbPtr));
+  if (gainW32>measure1) {
+    /* Check against value above loc */
+    measure2=WEBRTC_SPL_MUL_16_16(scale, (*(cbPtr+1)));
+    if ((measure2-gainW32)<(gainW32-measure1)) {
+      loc+=1;
+    }
+  } else {
+    /* Check against value below loc */
+    measure2=WEBRTC_SPL_MUL_16_16(scale, (*(cbPtr-1)));
+    if ((gainW32-measure2)<=(measure1-gainW32)) {
+      loc-=1;
+    }
+  }
+
+  /* Guard against getting outside the table. The calculation above can give a location
+     which is one above the maximum value (in very rare cases) */
+  loc=WEBRTC_SPL_MIN(loc, (cblen-1));
+  *index=loc;
+
+  /* Calculate the quantized gain value (in Q14) */
+  returnVal=(WebRtc_Word16)((WEBRTC_SPL_MUL_16_16(scale, cb[loc])+8192)>>14);
+
+  /* return the quantized value */
+  return(returnVal);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/gain_quant.h b/src/modules/audio_coding/codecs/ilbc/gain_quant.h
new file mode 100644
index 0000000..a2f0596
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/gain_quant.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GainQuant.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GAIN_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  quantizer for the gain in the gain-shape coding of residual
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_GainQuant( /* (o) quantized gain value */
+    WebRtc_Word16 gain, /* (i) gain value Q14 */
+    WebRtc_Word16 maxIn, /* (i) maximum of gain value Q14 */
+    WebRtc_Word16 stage, /* (i) The stage of the search */
+    WebRtc_Word16 *index /* (o) quantization index */
+                                       );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c b/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c
new file mode 100644
index 0000000..aba3e31
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_cd_vec.c
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetCbVec.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "create_augmented_vec.h"
+
+/*----------------------------------------------------------------*
+ *  Construct codebook vector for given index.
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetCbVec(
+    WebRtc_Word16 *cbvec,   /* (o) Constructed codebook vector */
+    WebRtc_Word16 *mem,   /* (i) Codebook buffer */
+    WebRtc_Word16 index,   /* (i) Codebook index */
+    WebRtc_Word16 lMem,   /* (i) Length of codebook buffer */
+    WebRtc_Word16 cbveclen   /* (i) Codebook vector length */
+                            ){
+  WebRtc_Word16 k, base_size;
+  WebRtc_Word16 lag;
+  /* Stack based */
+  WebRtc_Word16 tempbuff2[SUBL+5];
+
+  /* Determine size of codebook sections */
+
+  base_size=lMem-cbveclen+1;
+
+  if (cbveclen==SUBL) {
+    base_size+=WEBRTC_SPL_RSHIFT_W16(cbveclen,1);
+  }
+
+  /* No filter -> First codebook section */
+
+  if (index<lMem-cbveclen+1) {
+
+    /* first non-interpolated vectors */
+
+    k=index+cbveclen;
+    /* get vector */
+    WEBRTC_SPL_MEMCPY_W16(cbvec, mem+lMem-k, cbveclen);
+
+  } else if (index < base_size) {
+
+    /* Calculate lag */
+
+    k=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (index-(lMem-cbveclen+1)))+cbveclen;
+
+    lag=WEBRTC_SPL_RSHIFT_W16(k, 1);
+
+    WebRtcIlbcfix_CreateAugmentedVec(lag, mem+lMem, cbvec);
+
+  }
+
+  /* Higher codebbok section based on filtering */
+
+  else {
+
+    WebRtc_Word16 memIndTest;
+
+    /* first non-interpolated vectors */
+
+    if (index-base_size<lMem-cbveclen+1) {
+
+      /* Set up filter memory, stuff zeros outside memory buffer */
+
+      memIndTest = lMem-(index-base_size+cbveclen);
+
+      WebRtcSpl_MemSetW16(mem-CB_HALFFILTERLEN, 0, CB_HALFFILTERLEN);
+      WebRtcSpl_MemSetW16(mem+lMem, 0, CB_HALFFILTERLEN);
+
+      /* do filtering to get the codebook vector */
+
+      WebRtcSpl_FilterMAFastQ12(
+          &mem[memIndTest+4], cbvec, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
+          CB_FILTERLEN, cbveclen);
+    }
+
+    /* interpolated vectors */
+
+    else {
+      /* Stuff zeros outside memory buffer  */
+      memIndTest = lMem-cbveclen-CB_FILTERLEN;
+      WebRtcSpl_MemSetW16(mem+lMem, 0, CB_HALFFILTERLEN);
+
+      /* do filtering */
+      WebRtcSpl_FilterMAFastQ12(
+          &mem[memIndTest+7], tempbuff2, (WebRtc_Word16*)WebRtcIlbcfix_kCbFiltersRev,
+          CB_FILTERLEN, (WebRtc_Word16)(cbveclen+5));
+
+      /* Calculate lag index */
+      lag = (cbveclen<<1)-20+index-base_size-lMem-1;
+
+      WebRtcIlbcfix_CreateAugmentedVec(lag, tempbuff2+SUBL+5, cbvec);
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h b/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h
new file mode 100644
index 0000000..99b5d4e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_cd_vec.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetCbVec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_CD_VEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_CD_VEC_H_
+
+void WebRtcIlbcfix_GetCbVec(
+    WebRtc_Word16 *cbvec,   /* (o) Constructed codebook vector */
+    WebRtc_Word16 *mem,   /* (i) Codebook buffer */
+    WebRtc_Word16 index,   /* (i) Codebook index */
+    WebRtc_Word16 lMem,   /* (i) Length of codebook buffer */
+    WebRtc_Word16 cbveclen   /* (i) Codebook vector length */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c b/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
new file mode 100644
index 0000000..c55e918
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.c
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetLspPoly.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Construct the polynomials F1(z) and F2(z) from the LSP
+ * (Computations are done in Q24)
+ *
+ * The expansion is performed using the following recursion:
+ *
+ * f[0] = 1;
+ * tmp = -2.0 * lsp[0];
+ * f[1] = tmp;
+ * for (i=2; i<=5; i++) {
+ *    b = -2.0 * lsp[2*i-2];
+ *    f[i] = tmp*f[i-1] + 2.0*f[i-2];
+ *    for (j=i; j>=2; j--) {
+ *       f[j] = f[j] + tmp*f[j-1] + f[j-2];
+ *    }
+ *    f[i] = f[i] + tmp;
+ * }
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetLspPoly(
+    WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
+    WebRtc_Word32 *f)  /* (o) polonymial in Q24 */
+{
+  WebRtc_Word32 tmpW32;
+  int i, j;
+  WebRtc_Word16 high, low;
+  WebRtc_Word16 *lspPtr;
+  WebRtc_Word32 *fPtr;
+
+  lspPtr = lsp;
+  fPtr = f;
+  /* f[0] = 1.0 (Q24) */
+  (*fPtr) = (WebRtc_Word32)16777216;
+  fPtr++;
+
+  (*fPtr) = WEBRTC_SPL_MUL((*lspPtr), -1024);
+  fPtr++;
+  lspPtr+=2;
+
+  for(i=2; i<=5; i++)
+  {
+    (*fPtr) = fPtr[-2];
+
+    for(j=i; j>1; j--)
+    {
+      /* Compute f[j] = f[j] + tmp*f[j-1] + f[j-2]; */
+      high = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1], 16);
+      low = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(fPtr[-1]-WEBRTC_SPL_LSHIFT_W32(((WebRtc_Word32)high),16), 1);
+
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(high, (*lspPtr)), 2) +
+          WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16_RSFT(low, (*lspPtr), 15), 2);
+
+      (*fPtr) += fPtr[-2];
+      (*fPtr) -= tmpW32;
+      fPtr--;
+    }
+    (*fPtr) -= (WebRtc_Word32)WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)(*lspPtr), 10);
+
+    fPtr+=i;
+    lspPtr+=2;
+  }
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h b/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
new file mode 100644
index 0000000..b0520b4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_lsp_poly.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetLspPoly.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_LSP_POLY_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Construct the polynomials F1(z) and F2(z) from the LSP
+ * (Computations are done in Q24)
+ *
+ * The expansion is performed using the following recursion:
+ *
+ * f[0] = 1;
+ * tmp = -2.0 * lsp[0];
+ * f[1] = tmp;
+ * for (i=2; i<=5; i++) {
+ *    b = -2.0 * lsp[2*i-2];
+ *    f[i] = tmp*f[i-1] + 2.0*f[i-2];
+ *    for (j=i; j>=2; j--) {
+ *       f[j] = f[j] + tmp*f[j-1] + f[j-2];
+ *    }
+ *    f[i] = f[i] + tmp;
+ * }
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetLspPoly(
+    WebRtc_Word16 *lsp, /* (i) LSP in Q15 */
+    WebRtc_Word32 *f);  /* (o) polonymial in Q24 */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c b/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c
new file mode 100644
index 0000000..ce72865
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_sync_seq.c
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetSyncSeq.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "refiner.h"
+#include "nearest_neighbor.h"
+
+/*----------------------------------------------------------------*
+ * get the pitch-synchronous sample sequence
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetSyncSeq(
+    WebRtc_Word16 *idata,   /* (i) original data */
+    WebRtc_Word16 idatal,   /* (i) dimension of data */
+    WebRtc_Word16 centerStartPos, /* (i) where current block starts */
+    WebRtc_Word16 *period,   /* (i) rough-pitch-period array       (Q-2) */
+    WebRtc_Word16 *plocs,   /* (i) where periods of period array are taken (Q-2) */
+    WebRtc_Word16 periodl,   /* (i) dimension period array */
+    WebRtc_Word16 hl,    /* (i) 2*hl+1 is the number of sequences */
+    WebRtc_Word16 *surround  /* (i/o) The contribution from this sequence
+                                summed with earlier contributions */
+                              ){
+  WebRtc_Word16 i,centerEndPos,q;
+  /* Stack based */
+  WebRtc_Word16 lagBlock[2*ENH_HL+1];
+  WebRtc_Word16 blockStartPos[2*ENH_HL+1]; /* Defines the position to search around (Q2) */
+  WebRtc_Word16 plocs2[ENH_PLOCSL];
+
+  centerEndPos=centerStartPos+ENH_BLOCKL-1;
+
+  /* present (find predicted lag from this position) */
+
+  WebRtcIlbcfix_NearestNeighbor(lagBlock+hl,plocs,
+                                (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2, (centerStartPos+centerEndPos)),
+                                periodl);
+
+  blockStartPos[hl]=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, centerStartPos);
+
+  /* past (find predicted position and perform a refined
+     search to find the best sequence) */
+
+  for(q=hl-1;q>=0;q--) {
+    blockStartPos[q]=blockStartPos[q+1]-period[lagBlock[q+1]];
+
+    WebRtcIlbcfix_NearestNeighbor(lagBlock+q, plocs,
+                                  (WebRtc_Word16)(blockStartPos[q] + (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)-period[lagBlock[q+1]]),
+                                  periodl);
+
+    if((blockStartPos[q]-(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_OVERHANG))>=0) {
+
+      /* Find the best possible sequence in the 4 times upsampled
+         domain around blockStartPos+q */
+      WebRtcIlbcfix_Refiner(blockStartPos+q,idata,idatal,
+                            centerStartPos,blockStartPos[q],surround,WebRtcIlbcfix_kEnhWt[q]);
+
+    } else {
+      /* Don't add anything since this sequence would
+         be outside the buffer */
+    }
+  }
+
+  /* future (find predicted position and perform a refined
+     search to find the best sequence) */
+
+  for(i=0;i<periodl;i++) {
+    plocs2[i]=(plocs[i]-period[i]);
+  }
+
+  for(q=hl+1;q<=WEBRTC_SPL_MUL_16_16(2, hl);q++) {
+
+    WebRtcIlbcfix_NearestNeighbor(lagBlock+q,plocs2,
+                                  (WebRtc_Word16)(blockStartPos[q-1]+
+                                                  (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, ENH_BLOCKL_HALF)),periodl);
+
+    blockStartPos[q]=blockStartPos[q-1]+period[lagBlock[q]];
+
+    if( (blockStartPos[q]+(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, (ENH_BLOCKL+ENH_OVERHANG)))
+        <
+        (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(4, idatal)) {
+
+      /* Find the best possible sequence in the 4 times upsampled
+         domain around blockStartPos+q */
+      WebRtcIlbcfix_Refiner(blockStartPos+q, idata, idatal,
+                            centerStartPos,blockStartPos[q],surround,WebRtcIlbcfix_kEnhWt[2*hl-q]);
+
+    }
+    else {
+      /* Don't add anything since this sequence would
+         be outside the buffer */
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h b/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h
new file mode 100644
index 0000000..a0ffd39
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/get_sync_seq.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_GetSyncSeq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_GET_SYNC_SEQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * get the pitch-synchronous sample sequence
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_GetSyncSeq(
+    WebRtc_Word16 *idata,   /* (i) original data */
+    WebRtc_Word16 idatal,   /* (i) dimension of data */
+    WebRtc_Word16 centerStartPos, /* (i) where current block starts */
+    WebRtc_Word16 *period,   /* (i) rough-pitch-period array       (Q-2) */
+    WebRtc_Word16 *plocs,   /* (i) where periods of period array are taken (Q-2) */
+    WebRtc_Word16 periodl,   /* (i) dimension period array */
+    WebRtc_Word16 hl,    /* (i) 2*hl+1 is the number of sequences */
+    WebRtc_Word16 *surround  /* (i/o) The contribution from this sequence
+                                summed with earlier contributions */
+                              );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/hp_input.c b/src/modules/audio_coding/codecs/ilbc/hp_input.c
new file mode 100644
index 0000000..f202f62
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/hp_input.c
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpInput.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  high-pass filter of input with *0.5 and saturation
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_HpInput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len)      /* (i)   Number of samples to filter */
+{
+  int i;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word32 tmpW32b;
+
+  for (i=0; i<len; i++) {
+
+    /*
+        y[i] = b[0]*x[i] + b[1]*x[i-1] + b[2]*x[i-2]
+        + (-a[1])*y[i-1] + (-a[2])*y[i-2];
+    */
+
+    tmpW32  = WEBRTC_SPL_MUL_16_16(y[1], ba[3]);     /* (-a[1])*y[i-1] (low part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[3], ba[4]);     /* (-a[2])*y[i-2] (low part) */
+    tmpW32 = (tmpW32>>15);
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[0], ba[3]);     /* (-a[1])*y[i-1] (high part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[2], ba[4]);     /* (-a[2])*y[i-2] (high part) */
+    tmpW32 = (tmpW32<<1);
+
+    tmpW32 += WEBRTC_SPL_MUL_16_16(signal[i], ba[0]);   /* b[0]*x[0] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[0],      ba[1]);   /* b[1]*x[i-1] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[1],      ba[2]);   /* b[2]*x[i-2] */
+
+    /* Update state (input part) */
+    x[1] = x[0];
+    x[0] = signal[i];
+
+    /* Rounding in Q(12+1), i.e. add 2^12 */
+    tmpW32b = tmpW32 + 4096;
+
+    /* Saturate (to 2^28) so that the HP filtered signal does not overflow */
+    tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)268435455, tmpW32b, (WebRtc_Word32)-268435456);
+
+    /* Convert back to Q0 and multiply with 0.5 */
+    signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 13);
+
+    /* Update state (filtered part) */
+    y[2] = y[0];
+    y[3] = y[1];
+
+    /* upshift tmpW32 by 3 with saturation */
+    if (tmpW32>268435455) {
+      tmpW32 = WEBRTC_SPL_WORD32_MAX;
+    } else if (tmpW32<-268435456) {
+      tmpW32 = WEBRTC_SPL_WORD32_MIN;
+    } else {
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
+    }
+
+    y[0] = (WebRtc_Word16)(tmpW32 >> 16);
+    y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/hp_input.h b/src/modules/audio_coding/codecs/ilbc/hp_input.h
new file mode 100644
index 0000000..f56c4f7
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/hp_input.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpInput.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_INPUT_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_HpInput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len);     /* (i)   Number of samples to filter */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/hp_output.c b/src/modules/audio_coding/codecs/ilbc/hp_output.c
new file mode 100644
index 0000000..8e1c919
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/hp_output.c
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpOutput.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  high-pass filter of output and *2 with saturation
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_HpOutput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                                                                   {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                                                                   is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                                                                   yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len)      /* (i)   Number of samples to filter */
+{
+  int i;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word32 tmpW32b;
+
+  for (i=0; i<len; i++) {
+
+    /*
+      y[i] = b[0]*x[i] + b[1]*x[i-1] + b[2]*x[i-2]
+      + (-a[1])*y[i-1] + (-a[2])*y[i-2];
+    */
+
+    tmpW32  = WEBRTC_SPL_MUL_16_16(y[1], ba[3]);     /* (-a[1])*y[i-1] (low part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[3], ba[4]);     /* (-a[2])*y[i-2] (low part) */
+    tmpW32 = (tmpW32>>15);
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[0], ba[3]);     /* (-a[1])*y[i-1] (high part) */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(y[2], ba[4]);     /* (-a[2])*y[i-2] (high part) */
+    tmpW32 = (tmpW32<<1);
+
+    tmpW32 += WEBRTC_SPL_MUL_16_16(signal[i], ba[0]);   /* b[0]*x[0] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[0],      ba[1]);   /* b[1]*x[i-1] */
+    tmpW32 += WEBRTC_SPL_MUL_16_16(x[1],      ba[2]);   /* b[2]*x[i-2] */
+
+    /* Update state (input part) */
+    x[1] = x[0];
+    x[0] = signal[i];
+
+    /* Rounding in Q(12-1), i.e. add 2^10 */
+    tmpW32b = tmpW32 + 1024;
+
+    /* Saturate (to 2^26) so that the HP filtered signal does not overflow */
+    tmpW32b = WEBRTC_SPL_SAT((WebRtc_Word32)67108863, tmpW32b, (WebRtc_Word32)-67108864);
+
+    /* Convert back to Q0 and multiply with 2 */
+    signal[i] = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(tmpW32b, 11);
+
+    /* Update state (filtered part) */
+    y[2] = y[0];
+    y[3] = y[1];
+
+    /* upshift tmpW32 by 3 with saturation */
+    if (tmpW32>268435455) {
+      tmpW32 = WEBRTC_SPL_WORD32_MAX;
+    } else if (tmpW32<-268435456) {
+      tmpW32 = WEBRTC_SPL_WORD32_MIN;
+    } else {
+      tmpW32 = WEBRTC_SPL_LSHIFT_W32(tmpW32, 3);
+    }
+
+    y[0] = (WebRtc_Word16)(tmpW32 >> 16);
+    y[1] = (WebRtc_Word16)((tmpW32 - WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y[0], 16))>>1);
+
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/hp_output.h b/src/modules/audio_coding/codecs/ilbc/hp_output.h
new file mode 100644
index 0000000..c9a7426
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/hp_output.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_HpOutput.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_HP_OUTPUT_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_HpOutput(
+    WebRtc_Word16 *signal,     /* (i/o) signal vector */
+    WebRtc_Word16 *ba,      /* (i)   B- and A-coefficients (2:nd order)
+                               {b[0] b[1] b[2] -a[1] -a[2]} a[0]
+                               is assumed to be 1.0 */
+    WebRtc_Word16 *y,      /* (i/o) Filter state yhi[n-1] ylow[n-1]
+                              yhi[n-2] ylow[n-2] */
+    WebRtc_Word16 *x,      /* (i/o) Filter state x[n-1] x[n-2] */
+    WebRtc_Word16 len);      /* (i)   Number of samples to filter */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/ilbc.c b/src/modules/audio_coding/codecs/ilbc/ilbc.c
new file mode 100644
index 0000000..dbd32d1
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/ilbc.c
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ iLBCInterface.c
+
+******************************************************************/
+
+#include "ilbc.h"
+#include "defines.h"
+#include "init_encode.h"
+#include "encode.h"
+#include "init_decode.h"
+#include "decode.h"
+#include <stdlib.h>
+
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst, WebRtc_Word16 *ILBCENC_inst_Addr, WebRtc_Word16 *size) {
+  *iLBC_encinst=(iLBC_encinst_t*)ILBCENC_inst_Addr;
+  *size=sizeof(iLBC_Enc_Inst_t)/sizeof(WebRtc_Word16);
+  if (*iLBC_encinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst, WebRtc_Word16 *ILBCDEC_inst_Addr, WebRtc_Word16 *size) {
+  *iLBC_decinst=(iLBC_decinst_t*)ILBCDEC_inst_Addr;
+  *size=sizeof(iLBC_Dec_Inst_t)/sizeof(WebRtc_Word16);
+  if (*iLBC_decinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst) {
+  *iLBC_encinst=(iLBC_encinst_t*)malloc(sizeof(iLBC_Enc_Inst_t));
+  if (*iLBC_encinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst) {
+  *iLBC_decinst=(iLBC_decinst_t*)malloc(sizeof(iLBC_Dec_Inst_t));
+  if (*iLBC_decinst!=NULL) {
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst) {
+  free(iLBC_encinst);
+  return(0);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst) {
+  free(iLBC_decinst);
+  return(0);
+}
+
+
+WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst, WebRtc_Word16 mode)
+{
+  if ((mode==20)||(mode==30)) {
+    WebRtcIlbcfix_InitEncode((iLBC_Enc_Inst_t*) iLBCenc_inst, mode);
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst, const WebRtc_Word16 *speechIn, WebRtc_Word16 len, WebRtc_Word16 *encoded) {
+
+  WebRtc_Word16 pos = 0;
+  WebRtc_Word16 encpos = 0;
+
+  if ((len != ((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl) &&
+#ifdef SPLIT_10MS
+      (len != 80) &&
+#endif
+      (len != 2*((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl) &&
+      (len != 3*((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl))
+  {
+    /* A maximum of 3 frames/packet is allowed */
+    return(-1);
+  } else {
+
+    /* call encoder */
+    while (pos<len) {
+      WebRtcIlbcfix_EncodeImpl((WebRtc_UWord16*) &encoded[encpos], &speechIn[pos], (iLBC_Enc_Inst_t*) iLBCenc_inst);
+#ifdef SPLIT_10MS
+      pos += 80;
+      if(((iLBC_Enc_Inst_t*)iLBCenc_inst)->section == 0)
+#else
+        pos += ((iLBC_Enc_Inst_t*)iLBCenc_inst)->blockl;
+#endif
+      encpos += ((iLBC_Enc_Inst_t*)iLBCenc_inst)->no_of_words;
+    }
+    return (encpos*2);
+  }
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 mode) {
+  if ((mode==20)||(mode==30)) {
+    WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, mode, 1);
+    return(0);
+  } else {
+    return(-1);
+  }
+}
+WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst) {
+  WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 20, 1);
+  return(0);
+}
+WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst) {
+  WebRtcIlbcfix_InitDecode((iLBC_Dec_Inst_t*) iLBCdec_inst, 30, 1);
+  return(0);
+}
+
+
+WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+                                  const WebRtc_Word16 *encoded,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *decoded,
+                                  WebRtc_Word16 *speechType)
+{
+  int i=0;
+  /* Allow for automatic switching between the frame sizes
+     (although you do get some discontinuity) */
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    /* Test if the mode has changed */
+    if (((iLBC_Dec_Inst_t*)iLBCdec_inst)->mode==20) {
+      if ((len==NO_OF_BYTES_30MS)||
+          (len==2*NO_OF_BYTES_30MS)||
+          (len==3*NO_OF_BYTES_30MS)) {
+        WebRtcIlbcfix_InitDecode(((iLBC_Dec_Inst_t*)iLBCdec_inst), 30, ((iLBC_Dec_Inst_t*)iLBCdec_inst)->use_enhancer);
+      } else {
+        /* Unsupported frame length */
+        return(-1);
+      }
+    } else {
+      if ((len==NO_OF_BYTES_20MS)||
+          (len==2*NO_OF_BYTES_20MS)||
+          (len==3*NO_OF_BYTES_20MS)) {
+        WebRtcIlbcfix_InitDecode(((iLBC_Dec_Inst_t*)iLBCdec_inst), 20, ((iLBC_Dec_Inst_t*)iLBCdec_inst)->use_enhancer);
+      } else {
+        /* Unsupported frame length */
+        return(-1);
+      }
+    }
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+                                       const WebRtc_Word16 *encoded,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 *decoded,
+                                       WebRtc_Word16 *speechType)
+{
+  int i=0;
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    return(-1);
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+                                       const WebRtc_Word16 *encoded,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 *decoded,
+                                       WebRtc_Word16 *speechType)
+{
+  int i=0;
+  if ((len==((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==2*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)||
+      (len==3*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)) {
+    /* ok, do nothing */
+  } else {
+    return(-1);
+  }
+
+  while ((i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_bytes)<len) {
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], (const WebRtc_UWord16*) &encoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->no_of_words], (iLBC_Dec_Inst_t*) iLBCdec_inst, 1);
+    i++;
+  }
+  /* iLBC does not support VAD/CNG yet */
+  *speechType=1;
+  return(i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+  int i;
+  WebRtc_UWord16 dummy;
+
+  for (i=0;i<noOfLostFrames;i++) {
+    /* call decoder */
+    WebRtcIlbcfix_DecodeImpl(&decoded[i*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl], &dummy, (iLBC_Dec_Inst_t*) iLBCdec_inst, 0);
+  }
+  return (noOfLostFrames*((iLBC_Dec_Inst_t*)iLBCdec_inst)->blockl);
+}
+
+WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst, WebRtc_Word16 *decoded, WebRtc_Word16 noOfLostFrames) {
+
+  /* Two input parameters not used, but needed for function pointers in NetEQ */
+  (void)(decoded = NULL);
+  (void)(noOfLostFrames = 0);
+
+  WebRtcSpl_MemSetW16(((iLBC_Dec_Inst_t*)iLBCdec_inst)->enh_buf, 0, ENH_BUFL);
+  ((iLBC_Dec_Inst_t*)iLBCdec_inst)->prev_enh_pl = 2;
+
+  return (0);
+}
+
+void WebRtcIlbcfix_version(char *version)
+{
+  strcpy((char*)version, "1.1.1");
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/ilbc.gypi b/src/modules/audio_coding/codecs/ilbc/ilbc.gypi
new file mode 100644
index 0000000..89ae27d
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/ilbc.gypi
@@ -0,0 +1,190 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'iLBC',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'include_dirs': [
+        'interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'interface',
+        ],
+      },
+      'sources': [
+        'interface/ilbc.h',
+        'abs_quant.c',
+        'abs_quant_loop.c',
+        'augmented_cb_corr.c',
+        'bw_expand.c',
+        'cb_construct.c',
+        'cb_mem_energy.c',
+        'cb_mem_energy_augmentation.c',
+        'cb_mem_energy_calc.c',
+        'cb_search.c',
+        'cb_search_core.c',
+        'cb_update_best_index.c',
+        'chebyshev.c',
+        'comp_corr.c',
+        'constants.c',
+        'create_augmented_vec.c',
+        'decode.c',
+        'decode_residual.c',
+        'decoder_interpolate_lsf.c',
+        'do_plc.c',
+        'encode.c',
+        'energy_inverse.c',
+        'enh_upsample.c',
+        'enhancer.c',
+        'enhancer_interface.c',
+        'filtered_cb_vecs.c',
+        'frame_classify.c',
+        'gain_dequant.c',
+        'gain_quant.c',
+        'get_cd_vec.c',
+        'get_lsp_poly.c',
+        'get_sync_seq.c',
+        'hp_input.c',
+        'hp_output.c',
+        'ilbc.c',
+        'index_conv_dec.c',
+        'index_conv_enc.c',
+        'init_decode.c',
+        'init_encode.c',
+        'interpolate.c',
+        'interpolate_samples.c',
+        'lpc_encode.c',
+        'lsf_check.c',
+        'lsf_interpolate_to_poly_dec.c',
+        'lsf_interpolate_to_poly_enc.c',
+        'lsf_to_lsp.c',
+        'lsf_to_poly.c',
+        'lsp_to_lsf.c',
+        'my_corr.c',
+        'nearest_neighbor.c',
+        'pack_bits.c',
+        'poly_to_lsf.c',
+        'poly_to_lsp.c',
+        'refiner.c',
+        'simple_interpolate_lsf.c',
+        'simple_lpc_analysis.c',
+        'simple_lsf_dequant.c',
+        'simple_lsf_quant.c',
+        'smooth.c',
+        'smooth_out_data.c',
+        'sort_sq.c',
+        'split_vq.c',
+        'state_construct.c',
+        'state_search.c',
+        'swap_bytes.c',
+        'unpack_bits.c',
+        'vq3.c',
+        'vq4.c',
+        'window32_w32.c',
+        'xcorr_coef.c',
+        'abs_quant.h',
+        'abs_quant_loop.h',
+        'augmented_cb_corr.h',
+        'bw_expand.h',
+        'cb_construct.h',
+        'cb_mem_energy.h',
+        'cb_mem_energy_augmentation.h',
+        'cb_mem_energy_calc.h',
+        'cb_search.h',
+        'cb_search_core.h',
+        'cb_update_best_index.h',
+        'chebyshev.h',
+        'comp_corr.h',
+        'constants.h',
+        'create_augmented_vec.h',
+        'decode.h',
+        'decode_residual.h',
+        'decoder_interpolate_lsf.h',
+        'do_plc.h',
+        'encode.h',
+        'energy_inverse.h',
+        'enh_upsample.h',
+        'enhancer.h',
+        'enhancer_interface.h',
+        'filtered_cb_vecs.h',
+        'frame_classify.h',
+        'gain_dequant.h',
+        'gain_quant.h',
+        'get_cd_vec.h',
+        'get_lsp_poly.h',
+        'get_sync_seq.h',
+        'hp_input.h',
+        'hp_output.h',
+        'defines.h',
+        'index_conv_dec.h',
+        'index_conv_enc.h',
+        'init_decode.h',
+        'init_encode.h',
+        'interpolate.h',
+        'interpolate_samples.h',
+        'lpc_encode.h',
+        'lsf_check.h',
+        'lsf_interpolate_to_poly_dec.h',
+        'lsf_interpolate_to_poly_enc.h',
+        'lsf_to_lsp.h',
+        'lsf_to_poly.h',
+        'lsp_to_lsf.h',
+        'my_corr.h',
+        'nearest_neighbor.h',
+        'pack_bits.h',
+        'poly_to_lsf.h',
+        'poly_to_lsp.h',
+        'refiner.h',
+        'simple_interpolate_lsf.h',
+        'simple_lpc_analysis.h',
+        'simple_lsf_dequant.h',
+        'simple_lsf_quant.h',
+        'smooth.h',
+        'smooth_out_data.h',
+        'sort_sq.h',
+        'split_vq.h',
+        'state_construct.h',
+        'state_search.h',
+        'swap_bytes.h',
+        'unpack_bits.h',
+        'vq3.h',
+        'vq4.h',
+        'window32_w32.h',
+        'xcorr_coef.h',
+     ], # sources
+    }, # iLBC
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'iLBCtest',
+          'type': 'executable',
+          'dependencies': [
+            'iLBC',
+          ],
+          'sources': [
+            'test/iLBC_test.c',
+          ],
+        }, # iLBCtest
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c b/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c
new file mode 100644
index 0000000..0d6346a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/index_conv_dec.c
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvDec.c
+
+******************************************************************/
+
+#include "defines.h"
+
+void WebRtcIlbcfix_IndexConvDec(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                ){
+  int k;
+
+  for (k=4;k<6;k++) {
+    /* Readjust the second and third codebook index for the first 40 sample
+       so that they look the same as the first (in terms of lag)
+    */
+    if ((index[k]>=44)&&(index[k]<108)) {
+      index[k]+=64;
+    } else if ((index[k]>=108)&&(index[k]<128)) {
+      index[k]+=128;
+    } else {
+      /* ERROR */
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h b/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h
new file mode 100644
index 0000000..f29ee23
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/index_conv_dec.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvDec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_DEC_H_
+
+#include "defines.h"
+
+void WebRtcIlbcfix_IndexConvDec(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c b/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c
new file mode 100644
index 0000000..cbc04b6
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/index_conv_enc.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ IiLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvEnc.c
+
+******************************************************************/
+
+#include "defines.h"
+/*----------------------------------------------------------------*
+ *  Convert the codebook indexes to make the search easier
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_IndexConvEnc(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                ){
+  int k;
+
+  for (k=4;k<6;k++) {
+    /* Readjust the second and third codebook index so that it is
+       packetized into 7 bits (before it was put in lag-wise the same
+       way as for the first codebook which uses 8 bits)
+    */
+    if ((index[k]>=108)&&(index[k]<172)) {
+      index[k]-=64;
+    } else if (index[k]>=236) {
+      index[k]-=128;
+    } else {
+      /* ERROR */
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h b/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h
new file mode 100644
index 0000000..d28a6e2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/index_conv_enc.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_IndexConvEnc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INDEX_CONV_ENC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Convert the codebook indexes to make the search easier
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_IndexConvEnc(
+    WebRtc_Word16 *index   /* (i/o) Codebook indexes */
+                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/init_decode.c b/src/modules/audio_coding/codecs/ilbc/init_decode.c
new file mode 100644
index 0000000..b654f1e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/init_decode.c
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+	WebRtcIlbcfix_InitDecode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of decoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitDecode(		/* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst,	/* (i/o) Decoder instance */
+    WebRtc_Word16 mode,					/* (i) frame size mode */
+    int use_enhancer           /* (i) 1 to use enhancer
+                                  0 to run without enhancer */
+                                                ) {
+  int i;
+
+  iLBCdec_inst->mode = mode;
+
+  /* Set all the variables that are dependent on the frame size mode */
+  if (mode==30) {
+    iLBCdec_inst->blockl = BLOCKL_30MS;
+    iLBCdec_inst->nsub = NSUB_30MS;
+    iLBCdec_inst->nasub = NASUB_30MS;
+    iLBCdec_inst->lpc_n = LPC_N_30MS;
+    iLBCdec_inst->no_of_bytes = NO_OF_BYTES_30MS;
+    iLBCdec_inst->no_of_words = NO_OF_WORDS_30MS;
+    iLBCdec_inst->state_short_len=STATE_SHORT_LEN_30MS;
+  }
+  else if (mode==20) {
+    iLBCdec_inst->blockl = BLOCKL_20MS;
+    iLBCdec_inst->nsub = NSUB_20MS;
+    iLBCdec_inst->nasub = NASUB_20MS;
+    iLBCdec_inst->lpc_n = LPC_N_20MS;
+    iLBCdec_inst->no_of_bytes = NO_OF_BYTES_20MS;
+    iLBCdec_inst->no_of_words = NO_OF_WORDS_20MS;
+    iLBCdec_inst->state_short_len=STATE_SHORT_LEN_20MS;
+  }
+  else {
+    return(-1);
+  }
+
+  /* Reset all the previous LSF to mean LSF */
+  WEBRTC_SPL_MEMCPY_W16(iLBCdec_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+
+  /* Clear the synthesis filter memory */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->syntMem, 0, LPC_FILTERORDER);
+
+  /* Set the old synthesis filter to {1.0 0.0 ... 0.0} */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->old_syntdenum, 0, ((LPC_FILTERORDER + 1)*NSUB_MAX));
+  for (i=0; i<NSUB_MAX; i++) {
+    iLBCdec_inst->old_syntdenum[i*(LPC_FILTERORDER+1)] = 4096;
+  }
+
+  /* Clear the variables that are used for the PLC */
+  iLBCdec_inst->last_lag = 20;
+  iLBCdec_inst->consPLICount = 0;
+  iLBCdec_inst->prevPLI = 0;
+  iLBCdec_inst->perSquare = 0;
+  iLBCdec_inst->prevLag = 120;
+  iLBCdec_inst->prevLpc[0] = 4096;
+  WebRtcSpl_MemSetW16(iLBCdec_inst->prevLpc+1, 0, LPC_FILTERORDER);
+  WebRtcSpl_MemSetW16(iLBCdec_inst->prevResidual, 0, BLOCKL_MAX);
+
+  /* Initialize the seed for the random number generator */
+  iLBCdec_inst->seed = 777;
+
+  /* Set the filter state of the HP filter to 0 */
+  WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemx, 0, 2);
+  WebRtcSpl_MemSetW16(iLBCdec_inst->hpimemy, 0, 4);
+
+  /* Set the variables that are used in the ehnahcer */
+  iLBCdec_inst->use_enhancer = use_enhancer;
+  WebRtcSpl_MemSetW16(iLBCdec_inst->enh_buf, 0, (ENH_BUFL+ENH_BUFL_FILTEROVERHEAD));
+  for (i=0;i<ENH_NBLOCKS_TOT;i++) {
+    iLBCdec_inst->enh_period[i]=160; /* Q(-4) */
+  }
+
+  iLBCdec_inst->prev_enh_pl = 0;
+
+  return (iLBCdec_inst->blockl);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/init_decode.h b/src/modules/audio_coding/codecs/ilbc/init_decode.h
new file mode 100644
index 0000000..3452f34
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/init_decode.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitDecode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_DECODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of decoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitDecode(  /* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
+    WebRtc_Word16 mode,     /* (i) frame size mode */
+    int use_enhancer           /* (i) 1 to use enhancer
+                                  0 to run without enhancer */
+                                         );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/init_encode.c b/src/modules/audio_coding/codecs/ilbc/init_encode.c
new file mode 100644
index 0000000..e034bb0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/init_encode.c
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitEncode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of encoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitEncode( /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst,     /* (i/o) Encoder instance */
+    WebRtc_Word16 mode     /* (i) frame size mode */
+                                        ){
+  iLBCenc_inst->mode = mode;
+
+  /* Set all the variables that are dependent on the frame size mode */
+  if (mode==30) {
+    iLBCenc_inst->blockl = BLOCKL_30MS;
+    iLBCenc_inst->nsub = NSUB_30MS;
+    iLBCenc_inst->nasub = NASUB_30MS;
+    iLBCenc_inst->lpc_n = LPC_N_30MS;
+    iLBCenc_inst->no_of_bytes = NO_OF_BYTES_30MS;
+    iLBCenc_inst->no_of_words = NO_OF_WORDS_30MS;
+    iLBCenc_inst->state_short_len=STATE_SHORT_LEN_30MS;
+  }
+  else if (mode==20) {
+    iLBCenc_inst->blockl = BLOCKL_20MS;
+    iLBCenc_inst->nsub = NSUB_20MS;
+    iLBCenc_inst->nasub = NASUB_20MS;
+    iLBCenc_inst->lpc_n = LPC_N_20MS;
+    iLBCenc_inst->no_of_bytes = NO_OF_BYTES_20MS;
+    iLBCenc_inst->no_of_words = NO_OF_WORDS_20MS;
+    iLBCenc_inst->state_short_len=STATE_SHORT_LEN_20MS;
+  }
+  else {
+    return(-1);
+  }
+
+  /* Clear the buffers and set the previous LSF and LSP to the mean value */
+  WebRtcSpl_MemSetW16(iLBCenc_inst->anaMem, 0, LPC_FILTERORDER);
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lsfdeqold, WebRtcIlbcfix_kLsfMean, LPC_FILTERORDER);
+  WebRtcSpl_MemSetW16(iLBCenc_inst->lpc_buffer, 0, LPC_LOOKBACK + BLOCKL_MAX);
+
+  /* Set the filter state of the HP filter to 0 */
+  WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemx, 0, 2);
+  WebRtcSpl_MemSetW16(iLBCenc_inst->hpimemy, 0, 4);
+
+#ifdef SPLIT_10MS
+  /*Zeroing the past samples for 10msec Split*/
+  WebRtcSpl_MemSetW16(iLBCenc_inst->past_samples,0,160);
+  iLBCenc_inst->section = 0;
+#endif
+
+  return (iLBCenc_inst->no_of_bytes);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/init_encode.h b/src/modules/audio_coding/codecs/ilbc/init_encode.h
new file mode 100644
index 0000000..f1d1858
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/init_encode.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InitEncode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INIT_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Initiation of encoder instance.
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_InitEncode(  /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst, /* (i/o) Encoder instance */
+    WebRtc_Word16 mode     /* (i) frame size mode */
+                                         );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h b/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h
new file mode 100644
index 0000000..6208962
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/interface/ilbc.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * ilbc.h
+ *
+ * This header file contains all of the API's for iLBC.
+ *
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_INTERFACE_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_INTERFACE_ILBC_H_
+
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include "typedefs.h"
+
+/*
+ * Solution to support multiple instances
+ * Customer has to cast instance to proper type
+ */
+
+typedef struct iLBC_encinst_t_ iLBC_encinst_t;
+
+typedef struct iLBC_decinst_t_ iLBC_decinst_t;
+
+/*
+ * Comfort noise constants
+ */
+
+#define ILBC_SPEECH 1
+#define ILBC_CNG  2
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxAssign(...)
+   *
+   * These functions assigns the encoder/decoder instance to the specified
+   * memory location
+   *
+   * Input:
+   *      - XXX_xxxinst       : Pointer to created instance that should be
+   *                            assigned
+   *      - ILBCXXX_inst_Addr : Pointer to the desired memory space
+   *      - size              : The size that this structure occupies (in Word16)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderAssign(iLBC_encinst_t **iLBC_encinst,
+					    WebRtc_Word16 *ILBCENC_inst_Addr,
+					    WebRtc_Word16 *size);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderAssign(iLBC_decinst_t **iLBC_decinst,
+					    WebRtc_Word16 *ILBCDEC_inst_Addr,
+					    WebRtc_Word16 *size);
+
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxAssign(...)
+   *
+   * These functions create a instance to the specified structure
+   *
+   * Input:
+   *      - XXX_inst          : Pointer to created instance that should be created
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderCreate(iLBC_encinst_t **iLBC_encinst);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderCreate(iLBC_decinst_t **iLBC_decinst);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_XxxFree(...)
+   *
+   * These functions frees the dynamic memory of a specified instance
+   *
+   * Input:
+   *      - XXX_inst          : Pointer to created instance that should be freed
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderFree(iLBC_encinst_t *iLBC_encinst);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderFree(iLBC_decinst_t *iLBC_decinst);
+
+
+  /****************************************************************************
+   * WebRtcIlbcfix_EncoderInit(...)
+   *
+   * This function initializes a iLBC instance
+   *
+   * Input:
+   *      - iLBCenc_inst      : iLBC instance, i.e. the user that should receive
+   *                            be initialized
+   *      - frameLen          : The frame length of the codec 20/30 (ms)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_EncoderInit(iLBC_encinst_t *iLBCenc_inst,
+					  WebRtc_Word16 frameLen);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_Encode(...)
+   *
+   * This function encodes one iLBC frame. Input speech length has be a
+   * multiple of the frame length.
+   *
+   * Input:
+   *      - iLBCenc_inst      : iLBC instance, i.e. the user that should encode
+   *                            a package
+   *      - speechIn          : Input speech vector
+   *      - len               : Samples in speechIn (160, 240, 320 or 480)
+   *
+   * Output:
+   *  - encoded               : The encoded data vector
+   *
+   * Return value             : >0 - Length (in bytes) of coded data
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_Encode(iLBC_encinst_t *iLBCenc_inst,
+                                     const WebRtc_Word16 *speechIn,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *encoded);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_DecoderInit(...)
+   *
+   * This function initializes a iLBC instance with either 20 or 30 ms frames
+   * Alternatively the WebRtcIlbcfix_DecoderInit_XXms can be used. Then it's
+   * not needed to specify the frame length with a variable.
+   *
+   * Input:
+   *      - iLBC_decinst_t    : iLBC instance, i.e. the user that should receive
+   *                            be initialized
+   *      - frameLen          : The frame length of the codec 20/30 (ms)
+   *
+   * Return value             :  0 - Ok
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_DecoderInit(iLBC_decinst_t *iLBCdec_inst,
+					  WebRtc_Word16 frameLen);
+  WebRtc_Word16 WebRtcIlbcfix_DecoderInit20Ms(iLBC_decinst_t *iLBCdec_inst);
+  WebRtc_Word16 WebRtcIlbcfix_Decoderinit30Ms(iLBC_decinst_t *iLBCdec_inst);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_Decode(...)
+   *
+   * This function decodes a packet with iLBC frame(s). Output speech length
+   * will be a multiple of 160 or 240 samples ((160 or 240)*frames/packet).
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance, i.e. the user that should decode
+   *                            a packet
+   *      - encoded           : Encoded iLBC frame(s)
+   *      - len               : Bytes in encoded vector
+   *
+   * Output:
+   *      - decoded           : The decoded vector
+   *      - speechType        : 1 normal, 2 CNG
+   *
+   * Return value             : >0 - Samples in decoded vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_Decode(iLBC_decinst_t *iLBCdec_inst,
+                                     const WebRtc_Word16* encoded,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *decoded,
+                                     WebRtc_Word16 *speechType);
+  WebRtc_Word16 WebRtcIlbcfix_Decode20Ms(iLBC_decinst_t *iLBCdec_inst,
+                                         const WebRtc_Word16 *encoded,
+                                         WebRtc_Word16 len,
+                                         WebRtc_Word16 *decoded,
+                                         WebRtc_Word16 *speechType);
+  WebRtc_Word16 WebRtcIlbcfix_Decode30Ms(iLBC_decinst_t *iLBCdec_inst,
+                                         const WebRtc_Word16 *encoded,
+                                         WebRtc_Word16 len,
+                                         WebRtc_Word16 *decoded,
+                                         WebRtc_Word16 *speechType);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_DecodePlc(...)
+   *
+   * This function conducts PLC for iLBC frame(s). Output speech length
+   * will be a multiple of 160 or 240 samples.
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance, i.e. the user that should perform
+   *                            a PLC
+   *      - noOfLostFrames    : Number of PLC frames to produce
+   *
+   * Output:
+   *      - decoded           : The "decoded" vector
+   *
+   * Return value             : >0 - Samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_DecodePlc(iLBC_decinst_t *iLBCdec_inst,
+					WebRtc_Word16 *decoded,
+					WebRtc_Word16 noOfLostFrames);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_NetEqPlc(...)
+   *
+   * This function updates the decoder when a packet loss has occured, but it
+   * does not produce any PLC data. Function can be used if another PLC method
+   * is used (i.e NetEq).
+   *
+   * Input:
+   *      - iLBCdec_inst      : iLBC instance that should be updated
+   *      - noOfLostFrames    : Number of lost frames
+   *
+   * Output:
+   *      - decoded           : The "decoded" vector (nothing in this case)
+   *
+   * Return value             : >0 - Samples in decoded PLC vector
+   *                            -1 - Error
+   */
+
+  WebRtc_Word16 WebRtcIlbcfix_NetEqPlc(iLBC_decinst_t *iLBCdec_inst,
+				       WebRtc_Word16 *decoded,
+				       WebRtc_Word16 noOfLostFrames);
+
+  /****************************************************************************
+   * WebRtcIlbcfix_version(...)
+   *
+   * This function returns the version number of iLBC
+   *
+   * Output:
+   *      - version           : Version number of iLBC (maximum 20 char)
+   */
+
+  void WebRtcIlbcfix_version(char *version);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/interpolate.c b/src/modules/audio_coding/codecs/ilbc/interpolate.c
new file mode 100644
index 0000000..11cb33c
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/interpolate.c
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Interpolate.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation between vectors
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Interpolate(
+    WebRtc_Word16 *out, /* (o) output vector */
+    WebRtc_Word16 *in1, /* (i) first input vector */
+    WebRtc_Word16 *in2, /* (i) second input vector */
+    WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
+    WebRtc_Word16 length)  /* (i) number of sample is vectors */
+{
+  int i;
+  WebRtc_Word16 invcoef;
+
+  /*
+    Performs the operation out[i] = in[i]*coef + (1-coef)*in2[i] (with rounding)
+  */
+
+  invcoef = 16384 - coef; /* 16384 = 1.0 (Q14)*/
+  for (i = 0; i < length; i++) {
+    out[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+        (WEBRTC_SPL_MUL_16_16(coef, in1[i]) + WEBRTC_SPL_MUL_16_16(invcoef, in2[i]))+8192,
+        14);
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/interpolate.h b/src/modules/audio_coding/codecs/ilbc/interpolate.h
new file mode 100644
index 0000000..a12021c
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/interpolate.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Interpolate.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation between vectors
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Interpolate(
+    WebRtc_Word16 *out, /* (o) output vector */
+    WebRtc_Word16 *in1, /* (i) first input vector */
+    WebRtc_Word16 *in2, /* (i) second input vector */
+    WebRtc_Word16 coef, /* (i) weight coefficient in Q14 */
+    WebRtc_Word16 length); /* (i) number of sample is vectors */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c b/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c
new file mode 100644
index 0000000..31eb52e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/interpolate_samples.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InterpolateSamples.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_InterpolateSamples(
+    WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 lMem    /* (i) Length of the CB memory */
+                                      ) {
+  WebRtc_Word16 *ppi, *ppo, i, j, temp1, temp2;
+  WebRtc_Word16 *tmpPtr;
+
+  /* Calculate the 20 vectors of interpolated samples (4 samples each)
+     that are used in the codebooks for lag 20 to 39 */
+  tmpPtr = interpSamples;
+  for (j=0; j<20; j++) {
+    temp1 = 0;
+    temp2 = 3;
+    ppo = CBmem+lMem-4;
+    ppi = CBmem+lMem-j-24;
+    for (i=0; i<4; i++) {
+
+      *tmpPtr++ = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp2],*ppo, 15) +
+          (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAlpha[temp1], *ppi, 15);
+
+      ppo++;
+      ppi++;
+      temp1++;
+      temp2--;
+    }
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h b/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h
new file mode 100644
index 0000000..5c98aaf
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/interpolate_samples.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_InterpolateSamples.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_INTERPOLATE_SAMPLES_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Construct the interpolated samples for the Augmented CB
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_InterpolateSamples(
+    WebRtc_Word16 *interpSamples, /* (o) The interpolated samples */
+    WebRtc_Word16 *CBmem,   /* (i) The CB memory */
+    WebRtc_Word16 lMem    /* (i) Length of the CB memory */
+                                      );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lpc_encode.c b/src/modules/audio_coding/codecs/ilbc/lpc_encode.c
new file mode 100644
index 0000000..73d67a0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lpc_encode.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LpcEncode.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "simple_lpc_analysis.h"
+#include "simple_interpolate_lsf.h"
+#include "simple_lsf_quant.h"
+#include "lsf_check.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lpc encoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LpcEncode(
+    WebRtc_Word16 *syntdenum,  /* (i/o) synthesis filter coefficients
+                                           before/after encoding */
+    WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+                                   before/after encoding */
+    WebRtc_Word16 *lsf_index,  /* (o) lsf quantization index */
+    WebRtc_Word16 *data,   /* (i) Speech to do LPC analysis on */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                              ) {
+  /* Stack based */
+  WebRtc_Word16 lsf[LPC_FILTERORDER * LPC_N_MAX];
+  WebRtc_Word16 lsfdeq[LPC_FILTERORDER * LPC_N_MAX];
+
+  /* Calculate LSF's from the input speech */
+  WebRtcIlbcfix_SimpleLpcAnalysis(lsf, data, iLBCenc_inst);
+
+  /* Quantize the LSF's */
+  WebRtcIlbcfix_SimpleLsfQ(lsfdeq, lsf_index, lsf, iLBCenc_inst->lpc_n);
+
+  /* Stableize the LSF's if needed */
+  WebRtcIlbcfix_LsfCheck(lsfdeq, LPC_FILTERORDER, iLBCenc_inst->lpc_n);
+
+  /* Calculate the synthesis and weighting filter coefficients from
+     the optimal LSF and the dequantized LSF */
+  WebRtcIlbcfix_SimpleInterpolateLsf(syntdenum, weightdenum,
+                                     lsf, lsfdeq, iLBCenc_inst->lsfold,
+                                     iLBCenc_inst->lsfdeqold, LPC_FILTERORDER, iLBCenc_inst);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lpc_encode.h b/src/modules/audio_coding/codecs/ilbc/lpc_encode.h
new file mode 100644
index 0000000..36967a3
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lpc_encode.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LpcEncode.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LPC_ENCODE_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc encoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LpcEncode(
+    WebRtc_Word16 *syntdenum,  /* (i/o) synthesis filter coefficients
+                                  before/after encoding */
+    WebRtc_Word16 *weightdenum, /* (i/o) weighting denumerator coefficients
+                                   before/after encoding */
+    WebRtc_Word16 *lsf_index,  /* (o) lsf quantization index */
+    WebRtc_Word16 *data,   /* (i) Speech to do LPC analysis on */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                             );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_check.c b/src/modules/audio_coding/codecs/ilbc/lsf_check.c
new file mode 100644
index 0000000..7097d74
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_check.c
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfCheck.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  check for stability of lsf coefficients
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_LsfCheck(
+    WebRtc_Word16 *lsf, /* LSF parameters */
+    int dim, /* dimension of LSF */
+    int NoAn)  /* No of analysis per frame */
+{
+  int k,n,m, Nit=2, change=0,pos;
+  const WebRtc_Word16 eps=319;  /* 0.039 in Q13 (50 Hz)*/
+  const WebRtc_Word16 eps2=160;  /* eps/2.0 in Q13;*/
+  const WebRtc_Word16 maxlsf=25723; /* 3.14; (4000 Hz)*/
+  const WebRtc_Word16 minlsf=82;  /* 0.01; (0 Hz)*/
+
+  /* LSF separation check*/
+  for (n=0;n<Nit;n++) {  /* Run through a 2 times */
+    for (m=0;m<NoAn;m++) { /* Number of analyses per frame */
+      for (k=0;k<(dim-1);k++) {
+        pos=m*dim+k;
+
+        /* Seperate coefficients with a safety margin of 50 Hz */
+        if ((lsf[pos+1]-lsf[pos])<eps) {
+
+          if (lsf[pos+1]<lsf[pos]) {
+            lsf[pos+1]= lsf[pos]+eps2;
+            lsf[pos]= lsf[pos+1]-eps2;
+          } else {
+            lsf[pos]-=eps2;
+            lsf[pos+1]+=eps2;
+          }
+          change=1;
+        }
+
+        /* Limit minimum and maximum LSF */
+        if (lsf[pos]<minlsf) {
+          lsf[pos]=minlsf;
+          change=1;
+        }
+
+        if (lsf[pos]>maxlsf) {
+          lsf[pos]=maxlsf;
+          change=1;
+        }
+      }
+    }
+  }
+
+  return change;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_check.h b/src/modules/audio_coding/codecs/ilbc/lsf_check.h
new file mode 100644
index 0000000..830bbed
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_check.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfCheck.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_CHECK_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  check for stability of lsf coefficients
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_LsfCheck(
+    WebRtc_Word16 *lsf, /* LSF parameters */
+    int dim, /* dimension of LSF */
+    int NoAn); /* No of analysis per frame */
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
new file mode 100644
index 0000000..3bb23d0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.c
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LspInterpolate2PolyDec.c
+
+******************************************************************/
+
+#include "interpolate.h"
+#include "lsf_to_poly.h"
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation of lsf coefficients for the decoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LspInterpolate2PolyDec(
+    WebRtc_Word16 *a,   /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1,  /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2,  /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef,  /* (i) weighting coefficient to use between
+                                   lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length  /* (i) length of coefficient vectors */
+                                          ){
+  WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+
+  /* interpolate LSF */
+  WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
+
+  /* Compute the filter coefficients from the LSF */
+  WebRtcIlbcfix_Lsf2Poly(a, lsftmp);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
new file mode 100644
index 0000000..23fe3a7
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_dec.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LspInterpolate2PolyDec.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_DEC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  interpolation of lsf coefficients for the decoder
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LspInterpolate2PolyDec(
+    WebRtc_Word16 *a,   /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1,  /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2,  /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef,  /* (i) weighting coefficient to use between
+                                   lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length  /* (i) length of coefficient vectors */
+                                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
new file mode 100644
index 0000000..3b0a34d
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfInterpolate2PloyEnc.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "interpolate.h"
+#include "lsf_to_poly.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator and conversion from lsf to a coefficients
+ *  (subrutine to SimpleInterpolateLSF)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
+    WebRtc_Word16 *a,  /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+                           lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length /* (i) length of coefficient vectors */
+                                          ) {
+  /* Stack based */
+  WebRtc_Word16 lsftmp[LPC_FILTERORDER];
+
+  /* interpolate LSF */
+  WebRtcIlbcfix_Interpolate(lsftmp, lsf1, lsf2, coef, length);
+
+  /* Compute the filter coefficients from the LSF */
+  WebRtcIlbcfix_Lsf2Poly(a, lsftmp);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
new file mode 100644
index 0000000..1bbbb80
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_interpolate_to_poly_enc.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_LsfInterpolate2PloyEnc.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_INTERPOLATE_TO_POLY_ENC_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator and conversion from lsf to a coefficients
+ *  (subrutine to SimpleInterpolateLSF)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_LsfInterpolate2PloyEnc(
+    WebRtc_Word16 *a,  /* (o) lpc coefficients Q12 */
+    WebRtc_Word16 *lsf1, /* (i) first set of lsf coefficients Q13 */
+    WebRtc_Word16 *lsf2, /* (i) second set of lsf coefficients Q13 */
+    WebRtc_Word16 coef, /* (i) weighting coefficient to use between
+                           lsf1 and lsf2 Q14 */
+    WebRtc_Word16 length /* (i) length of coefficient vectors */
+                                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c b/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
new file mode 100644
index 0000000..84278a4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.c
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Lsp.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lsf to lsp coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Lsp(
+    WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
+    WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+    WebRtc_Word16 m  /* (i) number of coefficients */
+                           ) {
+  WebRtc_Word16 i, k;
+  WebRtc_Word16 diff; /* difference, which is used for the
+                           linear approximation (Q8) */
+  WebRtc_Word16 freq; /* normalized frequency in Q15 (0..1) */
+  WebRtc_Word32 tmpW32;
+
+  for(i=0; i<m; i++)
+  {
+    freq = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(lsf[i], 20861, 15);
+    /* 20861: 1.0/(2.0*PI) in Q17 */
+    /*
+       Upper 8 bits give the index k and
+       Lower 8 bits give the difference, which needs
+       to be approximated linearly
+    */
+    k = WEBRTC_SPL_RSHIFT_W16(freq, 8);
+    diff = (freq&0x00ff);
+
+    /* Guard against getting outside table */
+
+    if (k>63) {
+      k = 63;
+    }
+
+    /* Calculate linear approximation */
+    tmpW32 = WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kCosDerivative[k], diff);
+    lsp[i] = WebRtcIlbcfix_kCos[k]+(WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(tmpW32, 12));
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h b/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
new file mode 100644
index 0000000..db6549b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_to_lsp.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Lsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_LSP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lsf to lsp coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Lsp(
+    WebRtc_Word16 *lsf, /* (i) lsf in Q13 values between 0 and pi */
+    WebRtc_Word16 *lsp, /* (o) lsp in Q15 values between -1 and 1 */
+    WebRtc_Word16 m     /* (i) number of coefficients */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c b/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
new file mode 100644
index 0000000..f1c4a9e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.c
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Poly.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lsf_to_lsp.h"
+#include "get_lsp_poly.h"
+#include "constants.h"
+
+void WebRtcIlbcfix_Lsf2Poly(
+    WebRtc_Word16 *a,     /* (o) predictor coefficients (order = 10) in Q12 */
+    WebRtc_Word16 *lsf    /* (i) line spectral frequencies in Q13 */
+                            ) {
+  WebRtc_Word32 f[2][6]; /* f[0][] and f[1][] corresponds to
+                            F1(z) and F2(z) respectivly */
+  WebRtc_Word32 *f1ptr, *f2ptr;
+  WebRtc_Word16 *a1ptr, *a2ptr;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word16 lsp[10];
+  int i;
+
+  /* Convert lsf to lsp */
+  WebRtcIlbcfix_Lsf2Lsp(lsf, lsp, LPC_FILTERORDER);
+
+  /* Get F1(z) and F2(z) from the lsp */
+  f1ptr=f[0];
+  f2ptr=f[1];
+  WebRtcIlbcfix_GetLspPoly(&lsp[0],f1ptr);
+  WebRtcIlbcfix_GetLspPoly(&lsp[1],f2ptr);
+
+  /* for i = 5 down to 1
+     Compute f1[i] += f1[i-1];
+     and     f2[i] += f2[i-1];
+  */
+  f1ptr=&f[0][5];
+  f2ptr=&f[1][5];
+  for (i=5; i>0; i--)
+  {
+    (*f1ptr) += (*(f1ptr-1));
+    (*f2ptr) -= (*(f2ptr-1));
+    f1ptr--;
+    f2ptr--;
+  }
+
+  /* Get the A(z) coefficients
+     a[0] = 1.0
+     for i = 1 to 5
+     a[i] = (f1[i] + f2[i] + round)>>13;
+     for i = 1 to 5
+     a[11-i] = (f1[i] - f2[i] + round)>>13;
+  */
+  a[0]=4096;
+  a1ptr=&a[1];
+  a2ptr=&a[10];
+  f1ptr=&f[0][1];
+  f2ptr=&f[1][1];
+  for (i=5; i>0; i--)
+  {
+    tmpW32 = (*f1ptr) + (*f2ptr);
+    (*a1ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+
+    tmpW32 = (*f1ptr) - (*f2ptr);
+    (*a2ptr) = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32((tmpW32+4096),13);
+
+    a1ptr++;
+    a2ptr--;
+    f1ptr++;
+    f2ptr++;
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h b/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
new file mode 100644
index 0000000..a00693b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsf_to_poly.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsf2Poly.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSF_TO_POLY_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  Convert from LSF coefficients to A coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsf2Poly(
+    WebRtc_Word16 *a,     /* (o) predictor coefficients (order = 10) in Q12 */
+    WebRtc_Word16 *lsf    /* (i) line spectral frequencies in Q13 */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c b/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
new file mode 100644
index 0000000..134afbb
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.c
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsp2Lsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from LSP coefficients to LSF coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsp2Lsf(
+    WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
+    WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+                           (ordered, so that lsf[i]<lsf[i+1]) */
+    WebRtc_Word16 m  /* (i) Number of coefficients */
+                           )
+{
+  WebRtc_Word16 i, k;
+  WebRtc_Word16 diff; /* diff between table value and desired value (Q15) */
+  WebRtc_Word16 freq; /* lsf/(2*pi) (Q16) */
+  WebRtc_Word16 *lspPtr, *lsfPtr, *cosTblPtr;
+  WebRtc_Word16 tmp;
+
+  /* set the index to maximum index value in WebRtcIlbcfix_kCos */
+  k = 63;
+
+  /*
+     Start with the highest LSP and then work the way down
+     For each LSP the lsf is calculated by first order approximation
+     of the acos(x) function
+  */
+  lspPtr = &lsp[9];
+  lsfPtr = &lsf[9];
+  cosTblPtr=(WebRtc_Word16*)&WebRtcIlbcfix_kCos[k];
+  for(i=m-1; i>=0; i--)
+  {
+    /*
+       locate value in the table, which is just above lsp[i],
+       basically an approximation to acos(x)
+    */
+    while( (((WebRtc_Word32)(*cosTblPtr)-(*lspPtr)) < 0)&&(k>0) )
+    {
+      k-=1;
+      cosTblPtr--;
+    }
+
+    /* Calculate diff, which is used in the linear approximation of acos(x) */
+    diff = (*lspPtr)-(*cosTblPtr);
+
+    /*
+       The linear approximation of acos(lsp[i]) :
+       acos(lsp[i])= k*512 + (WebRtcIlbcfix_kAcosDerivative[ind]*offset >> 11)
+    */
+
+    /* tmp (linear offset) in Q16 */
+    tmp = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(WebRtcIlbcfix_kAcosDerivative[k],diff, 11);
+
+    /* freq in Q16 */
+    freq = (WebRtc_Word16)WEBRTC_SPL_LSHIFT_W16(k,9)+tmp;
+
+    /* lsf = freq*2*pi */
+    (*lsfPtr) = (WebRtc_Word16)(((WebRtc_Word32)freq*25736)>>15);
+
+    lsfPtr--;
+    lspPtr--;
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h b/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
new file mode 100644
index 0000000..97ba7e4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/lsp_to_lsf.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Lsp2Lsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_LSP_TO_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from LSP coefficients to LSF coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Lsp2Lsf(
+    WebRtc_Word16 *lsp, /* (i) lsp vector -1...+1 in Q15 */
+    WebRtc_Word16 *lsf, /* (o) Lsf vector 0...Pi in Q13
+                           (ordered, so that lsf[i]<lsf[i+1]) */
+    WebRtc_Word16 m  /* (i) Number of coefficients */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/my_corr.c b/src/modules/audio_coding/codecs/ilbc/my_corr.c
new file mode 100644
index 0000000..2162205
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/my_corr.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_MyCorr.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * compute cross correlation between sequences
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_MyCorr(
+    WebRtc_Word32 *corr,  /* (o) correlation of seq1 and seq2 */
+    WebRtc_Word16 *seq1,  /* (i) first sequence */
+    WebRtc_Word16 dim1,  /* (i) dimension first seq1 */
+    const WebRtc_Word16 *seq2, /* (i) second sequence */
+    WebRtc_Word16 dim2   /* (i) dimension seq2 */
+                          ){
+  WebRtc_Word16 max, scale, loops;
+
+  /* Calculate correlation between the two sequences. Scale the
+     result of the multiplcication to maximum 26 bits in order
+     to avoid overflow */
+  max=WebRtcSpl_MaxAbsValueW16(seq1, dim1);
+  scale=WebRtcSpl_GetSizeInBits(max);
+
+  scale = (WebRtc_Word16)(WEBRTC_SPL_MUL_16_16(2,scale)-26);
+  if (scale<0) {
+    scale=0;
+  }
+
+  loops=dim1-dim2+1;
+
+  /* Calculate the cross correlations */
+  WebRtcSpl_CrossCorrelation(corr, (WebRtc_Word16*)seq2, seq1, dim2, loops, scale, 1);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/my_corr.h b/src/modules/audio_coding/codecs/ilbc/my_corr.h
new file mode 100644
index 0000000..f588c53
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/my_corr.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_MyCorr.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_MY_CORR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * compute cross correlation between sequences
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_MyCorr(
+    WebRtc_Word32 *corr,  /* (o) correlation of seq1 and seq2 */
+    WebRtc_Word16 *seq1,  /* (i) first sequence */
+    WebRtc_Word16 dim1,  /* (i) dimension first seq1 */
+    const WebRtc_Word16 *seq2, /* (i) second sequence */
+    WebRtc_Word16 dim2   /* (i) dimension seq2 */
+                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c b/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
new file mode 100644
index 0000000..ea9e1eb
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.c
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_NearestNeighbor.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Find index in array such that the array element with said
+ * index is the element of said array closest to "value"
+ * according to the squared-error criterion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_NearestNeighbor(
+    WebRtc_Word16 *index, /* (o) index of array element closest to value */
+    WebRtc_Word16 *array, /* (i) data array (Q2) */
+    WebRtc_Word16 value, /* (i) value (Q2) */
+    WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+                                   ){
+  int i;
+  WebRtc_Word16 diff;
+  /* Stack based */
+  WebRtc_Word32 crit[8];
+
+  /* Calculate square distance */
+  for(i=0;i<arlength;i++){
+    diff=array[i]-value;
+    crit[i]=WEBRTC_SPL_MUL_16_16(diff, diff);
+  }
+
+  /* Find the minimum square distance */
+  *index=WebRtcSpl_MinIndexW32(crit, (WebRtc_Word16)arlength);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h b/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
new file mode 100644
index 0000000..705e17a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/nearest_neighbor.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_NearestNeighbor.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_NEAREST_NEIGHBOR_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Find index in array such that the array element with said
+ * index is the element of said array closest to "value"
+ * according to the squared-error criterion
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_NearestNeighbor(
+    WebRtc_Word16 *index, /* (o) index of array element closest to value */
+    WebRtc_Word16 *array, /* (i) data array (Q2) */
+    WebRtc_Word16 value, /* (i) value (Q2) */
+    WebRtc_Word16 arlength /* (i) dimension of data array (==8) */
+                                   );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/pack_bits.c b/src/modules/audio_coding/codecs/ilbc/pack_bits.c
new file mode 100644
index 0000000..3990fbe
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/pack_bits.c
@@ -0,0 +1,251 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_PackBits.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_PackBits(
+    WebRtc_UWord16 *bitstream,   /* (o) The packetized bitstream */
+    iLBC_bits *enc_bits,  /* (i) Encoded bits */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                             ){
+  WebRtc_UWord16 *bitstreamPtr;
+  int i, k;
+  WebRtc_Word16 *tmpPtr;
+
+  bitstreamPtr=bitstream;
+
+  /* Class 1 bits of ULP */
+  /* First WebRtc_Word16 */
+  (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[0])<<10;   /* Bit 0..5  */
+  (*bitstreamPtr) |= (enc_bits->lsf[1])<<3;     /* Bit 6..12 */
+  (*bitstreamPtr) |= (enc_bits->lsf[2]&0x70)>>4;    /* Bit 13..15 */
+  bitstreamPtr++;
+  /* Second WebRtc_Word16 */
+  (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[2]&0xF)<<12;  /* Bit 0..3  */
+
+  if (mode==20) {
+    (*bitstreamPtr) |= (enc_bits->startIdx)<<10;    /* Bit 4..5  */
+    (*bitstreamPtr) |= (enc_bits->state_first)<<9;    /* Bit 6  */
+    (*bitstreamPtr) |= (enc_bits->idxForMax)<<3;    /* Bit 7..12 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[0])&0x70)>>4;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    (*bitstreamPtr) = ((enc_bits->cb_index[0])&0xE)<<12;  /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x18)<<8;  /* Bit 3..4  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x8)<<7;  /* Bit 5  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0xFE)<<2;  /* Bit 6..12 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x10)>>2;  /* Bit 13  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x8)>>2;  /* Bit 14  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x10)>>4;  /* Bit 15  */
+  } else { /* mode==30 */
+    (*bitstreamPtr) |= (enc_bits->lsf[3])<<6;     /* Bit 4..9  */
+    (*bitstreamPtr) |= (enc_bits->lsf[4]&0x7E)>>1;    /* Bit 10..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->lsf[4]&0x1)<<15;  /* Bit 0  */
+    (*bitstreamPtr) |= (enc_bits->lsf[5])<<8;     /* Bit 1..7  */
+    (*bitstreamPtr) |= (enc_bits->startIdx)<<5;     /* Bit 8..10 */
+    (*bitstreamPtr) |= (enc_bits->state_first)<<4;    /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->idxForMax)&0x3C)>>2;   /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 4:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->idxForMax&0x3)<<14; /* Bit 0..1  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[0]&0x78)<<7;   /* Bit 2..5  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x10)<<5;  /* Bit 6  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x8)<<5;  /* Bit 7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[3]&0xFC);   /* Bit 8..13 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0x10)>>3;  /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x8)>>3;  /* Bit 15  */
+  }
+  /* Class 2 bits of ULP */
+  /* 4:th to 6:th WebRtc_Word16 for 20 ms case
+     5:th to 7:th WebRtc_Word16 for 30 ms case */
+  bitstreamPtr++;
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<3; k++) {
+    (*bitstreamPtr) = 0;
+    for (i=15; i>=0; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 7:th WebRtc_Word16 */
+    (*bitstreamPtr) = 0;
+    for (i=15; i>6; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x4)<<4;  /* Bit 9  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<2;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x4)<<1;  /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x8)>>1;  /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)>>2;  /* Bit 14..15 */
+
+  } else { /* mode==30 */
+    /* 8:th WebRtc_Word16 */
+    (*bitstreamPtr) = 0;
+    for (i=15; i>5; i--) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x4)>>2)<<i;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    (*bitstreamPtr) |= (enc_bits->cb_index[0]&0x6)<<3;   /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[0]&0x8);   /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[1]&0x4);   /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[3]&0x2);    /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[6]&0x80)>>7;   /* Bit 15  */
+    bitstreamPtr++;
+    /* 9:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->cb_index[6]&0x7E)<<9;/* Bit 0..5  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[9]&0xFE)<<2;   /* Bit 6..12 */
+    (*bitstreamPtr) |= (enc_bits->cb_index[12]&0xE0)>>5;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 10:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)enc_bits->cb_index[12]&0x1E)<<11;/* Bit 0..3 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[3]&0xC)<<8;  /* Bit 4..5  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[4]&0x6)<<7;  /* Bit 6..7  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[6]&0x18)<<3;  /* Bit 8..9  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[7]&0xC)<<2;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[9]&0x10)>>1;  /* Bit 12  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[10]&0x8)>>1;  /* Bit 13  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[12]&0x10)>>3;  /* Bit 14  */
+    (*bitstreamPtr) |= (enc_bits->gain_index[13]&0x8)>>3;  /* Bit 15  */
+  }
+  bitstreamPtr++;
+  /* Class 3 bits of ULP */
+  /*  8:th to 14:th WebRtc_Word16 for 20 ms case
+      11:th to 17:th WebRtc_Word16 for 30 ms case */
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<7; k++) {
+    (*bitstreamPtr) = 0;
+    for (i=14; i>=0; i-=2) {
+      (*bitstreamPtr) |= ((WebRtc_UWord16)((*tmpPtr)&0x3))<<i; /* Bit 15-i..14-i*/
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 15:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+    (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<13;  /* Bit 2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<6;   /* Bit 3..9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x7E)>>1;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 16:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[2])&0x1))<<15;
+    /* Bit 0  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<12;  /* Bit 1..3  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<10;  /* Bit 4..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[2]))<<7;   /* Bit 6..8  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<6;  /* Bit 9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x7E)>>1;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 17:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->cb_index[4])&0x1))<<15;
+    /* Bit 0  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[5])<<8;    /* Bit 1..7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[6]);     /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 18:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)(enc_bits->cb_index[7]))<<8; /* Bit 0..7  */
+    (*bitstreamPtr) |= (enc_bits->cb_index[8]);     /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    (*bitstreamPtr) = ((WebRtc_UWord16)((enc_bits->gain_index[3])&0x3))<<14;
+    /* Bit 0..1  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x3)<<12;  /* Bit 2..3  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[5]))<<9;   /* Bit 4..6  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<6;  /* Bit 7..9  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<4;  /* Bit 10..11 */
+    (*bitstreamPtr) |= (enc_bits->gain_index[8])<<1;   /* Bit 12..14 */
+  } else { /* mode==30 */
+    /* 18:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)((enc_bits->idxVec[56])&0x3))<<14;/* Bit 0..1 */
+    (*bitstreamPtr) |= (((enc_bits->idxVec[57])&0x3))<<12;  /* Bit 2..3  */
+    (*bitstreamPtr) |= (((enc_bits->cb_index[0])&1))<<11;  /* Bit 4  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[1]))<<4;   /* Bit 5..11 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[2])&0x78)>>3;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[2])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[0])&0x7)<<10;  /* Bit 3..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[1])&0x3)<<8;  /* Bit 6..7  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[2])&0x7)<<5;  /* Bit 8..10 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[3])&0x1)<<4;  /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[4])&0x78)>>3;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 20:th WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[4])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[5]))<<6;   /* Bit 3..9  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[6])&0x1)<<5;  /* Bit 10  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[7])&0xF8)>>3;  /* Bit 11..15 */
+    bitstreamPtr++;
+    /* 21:st WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[7])&0x7)<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[8]))<<5;   /* Bit 3..10 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[9])&0x1)<<4;  /* Bit 11  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[10])&0xF0)>>4;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 22:nd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[10])&0xF)<<12;
+    /* Bit 0..3  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[11]))<<4;   /* Bit 4..11 */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[12])&0x1)<<3;  /* Bit 12  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[13])&0xE0)>>5;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 23:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->cb_index[13])&0x1F)<<11;
+    /* Bit 0..4  */
+    (*bitstreamPtr) |= ((enc_bits->cb_index[14]))<<3;   /* Bit 5..12 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[3])&0x3)<<1;  /* Bit 13..14 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[4])&0x1);   /* Bit 15  */
+    bitstreamPtr++;
+    /* 24:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->gain_index[5]))<<13;
+    /* Bit 0..2  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[6])&0x7)<<10;  /* Bit 3..5  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[7])&0x3)<<8;  /* Bit 6..7  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[8]))<<5;   /* Bit 8..10 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[9])&0xF)<<1;  /* Bit 11..14 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[10])&0x4)>>2;  /* Bit 15  */
+    bitstreamPtr++;
+    /* 25:rd WebRtc_Word16 */
+    (*bitstreamPtr)  = ((WebRtc_UWord16)(enc_bits->gain_index[10])&0x3)<<14;
+    /* Bit 0..1  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[11]))<<11;  /* Bit 2..4  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[12])&0xF)<<7;  /* Bit 5..8  */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[13])&0x7)<<4;  /* Bit 9..11 */
+    (*bitstreamPtr) |= ((enc_bits->gain_index[14]))<<1;   /* Bit 12..14 */
+  }
+  /* Last bit is automatically zero */
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/pack_bits.h b/src/modules/audio_coding/codecs/ilbc/pack_bits.h
new file mode 100644
index 0000000..ed3f224
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/pack_bits.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_PackBits.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_PACK_BITS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_PackBits( 
+    WebRtc_UWord16 *bitstream,   /* (o) The packetized bitstream */
+    iLBC_bits *enc_bits,  /* (i) Encoded bits */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                             );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c b/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
new file mode 100644
index 0000000..fe91851
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.c
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "poly_to_lsp.h"
+#include "lsp_to_lsf.h"
+
+void WebRtcIlbcfix_Poly2Lsf(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients (Q13) */
+    WebRtc_Word16 *a    /* (i) A coefficients (Q12) */
+                            ) {
+  WebRtc_Word16 lsp[10];
+  WebRtcIlbcfix_Poly2Lsp(a, lsp, (WebRtc_Word16*)WebRtcIlbcfix_kLspMean);
+  WebRtcIlbcfix_Lsp2Lsf(lsp, lsf, 10);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h b/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
new file mode 100644
index 0000000..0ea595e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/poly_to_lsf.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  conversion from lpc coefficients to lsf coefficients
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsf(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients (Q13) */
+    WebRtc_Word16 *a    /* (i) A coefficients (Q12) */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c b/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
new file mode 100644
index 0000000..29b4213
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.c
@@ -0,0 +1,156 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsp.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "chebyshev.h"
+
+/*----------------------------------------------------------------*
+ * conversion from lpc coefficients to lsp coefficients
+ * function is only for 10:th order LPC
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsp(
+    WebRtc_Word16 *a,  /* (o) A coefficients in Q12 */
+    WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
+    WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+                              coefficients turn out to be unstable */
+                            ) {
+  WebRtc_Word16 f[2][6]; /* f[0][] represents f1 and f[1][] represents f2 */
+  WebRtc_Word16 *a_i_ptr, *a_10mi_ptr;
+  WebRtc_Word16 *f1ptr, *f2ptr;
+  WebRtc_Word32 tmpW32;
+  WebRtc_Word16 x, y, xlow, ylow, xmid, ymid, xhigh, yhigh, xint;
+  WebRtc_Word16 shifts, sign;
+  int i, j;
+  int foundFreqs;
+  int fi_select;
+
+  /*
+     Calculate the two polynomials f1(z) and f2(z)
+     (the sum and the diff polynomial)
+     f1[0] = f2[0] = 1.0;
+     f1[i+1] = a[i+1] + a[10-i] - f1[i];
+     f2[i+1] = a[i+1] - a[10-i] - f1[i];
+  */
+
+  a_i_ptr = a + 1;
+  a_10mi_ptr = a + 10;
+  f1ptr = f[0];
+  f2ptr = f[1];
+  (*f1ptr) = 1024; /* 1.0 in Q10 */
+  (*f2ptr) = 1024; /* 1.0 in Q10 */
+  for (i = 0; i < 5; i++) {
+    (*(f1ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)+(*a_10mi_ptr)), 2) - (*f1ptr));
+    (*(f2ptr+1)) = (WebRtc_Word16)(WEBRTC_SPL_RSHIFT_W32(((WebRtc_Word32)(*a_i_ptr)-(*a_10mi_ptr)), 2) + (*f2ptr));
+    a_i_ptr++;
+    a_10mi_ptr--;
+    f1ptr++;
+    f2ptr++;
+  }
+
+  /*
+    find the LSPs using the Chebychev pol. evaluation
+  */
+
+  fi_select = 0; /* selector between f1 and f2, start with f1 */
+
+  foundFreqs = 0;
+
+  xlow = WebRtcIlbcfix_kCosGrid[0];
+  ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+
+  /*
+     Iterate until all the 10 LSP's have been found or
+     all the grid points have been tried. If the 10 LSP's can
+     not be found, set the LSP vector to previous LSP
+  */
+
+  for (j = 1; j < COS_GRID_POINTS && foundFreqs < 10; j++) {
+    xhigh = xlow;
+    yhigh = ylow;
+    xlow = WebRtcIlbcfix_kCosGrid[j];
+    ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+
+    if (WEBRTC_SPL_MUL_16_16(ylow, yhigh) <= 0) {
+      /* Run 4 times to reduce the interval */
+      for (i = 0; i < 4; i++) {
+        /* xmid =(xlow + xhigh)/2 */
+        xmid = WEBRTC_SPL_RSHIFT_W16(xlow, 1) + WEBRTC_SPL_RSHIFT_W16(xhigh, 1);
+        ymid = WebRtcIlbcfix_Chebyshev(xmid, f[fi_select]);
+
+        if (WEBRTC_SPL_MUL_16_16(ylow, ymid) <= 0) {
+          yhigh = ymid;
+          xhigh = xmid;
+        } else {
+          ylow = ymid;
+          xlow = xmid;
+        }
+      }
+
+      /*
+        Calculater xint by linear interpolation:
+        xint = xlow - ylow*(xhigh-xlow)/(yhigh-ylow);
+      */
+
+      x = xhigh - xlow;
+      y = yhigh - ylow;
+
+      if (y == 0) {
+        xint = xlow;
+      } else {
+        sign = y;
+        y = WEBRTC_SPL_ABS_W16(y);
+        shifts = (WebRtc_Word16)WebRtcSpl_NormW32(y)-16;
+        y = WEBRTC_SPL_LSHIFT_W16(y, shifts);
+        y = (WebRtc_Word16)WebRtcSpl_DivW32W16(536838144, y); /* 1/(yhigh-ylow) */
+
+        tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(x, y, (19-shifts));
+
+        /* y=(xhigh-xlow)/(yhigh-ylow) */
+        y = (WebRtc_Word16)(tmpW32&0xFFFF);
+
+        if (sign < 0) {
+          y = -y;
+        }
+        /* tmpW32 = ylow*(xhigh-xlow)/(yhigh-ylow) */
+        tmpW32 = WEBRTC_SPL_MUL_16_16_RSFT(ylow, y, 10);
+        xint = xlow-(WebRtc_Word16)(tmpW32&0xFFFF);
+      }
+
+      /* Store the calculated lsp */
+      lsp[foundFreqs] = (WebRtc_Word16)xint;
+      foundFreqs++;
+
+      /* if needed, set xlow and ylow for next recursion */
+      if (foundFreqs<10) {
+        xlow = xint;
+        /* Swap between f1 and f2 (f[0][] and f[1][]) */
+        fi_select = ((fi_select+1)&0x1);
+
+        ylow = WebRtcIlbcfix_Chebyshev(xlow, f[fi_select]);
+      }
+    }
+  }
+
+  /* Check if M roots found, if not then use the old LSP */
+  if (foundFreqs < 10) {
+    WEBRTC_SPL_MEMCPY_W16(lsp, old_lsp, 10);
+  }
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h b/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
new file mode 100644
index 0000000..7eebb25
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/poly_to_lsp.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Poly2Lsp.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_POLY_TO_LSP_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * conversion from lpc coefficients to lsp coefficients
+ * function is only for 10:th order LPC
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Poly2Lsp(
+    WebRtc_Word16 *a,  /* (o) A coefficients in Q12 */
+    WebRtc_Word16 *lsp, /* (i) LSP coefficients in Q15 */
+    WebRtc_Word16 *old_lsp /* (i) old LSP coefficients that are used if the new
+                              coefficients turn out to be unstable */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/refiner.c b/src/modules/audio_coding/codecs/ilbc/refiner.c
new file mode 100644
index 0000000..9210092
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/refiner.c
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Refiner.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "enh_upsample.h"
+#include "my_corr.h"
+
+/*----------------------------------------------------------------*
+ * find segment starting near idata+estSegPos that has highest
+ * correlation with idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a
+ * resolution of ENH_UPSO times the original of the original
+ * sampling rate
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Refiner(
+    WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
+    WebRtc_Word16 *idata,   /* (i) original data buffer */
+    WebRtc_Word16 idatal,   /* (i) dimension of idata */
+    WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
+    WebRtc_Word16 estSegPos,  /* (i) estimated beginning other segment (Q-2) */
+    WebRtc_Word16 *surround,  /* (i/o) The contribution from this sequence
+                                           summed with earlier contributions */
+    WebRtc_Word16 gain    /* (i) Gain to use for this sequence */
+                           ){
+  WebRtc_Word16 estSegPosRounded,searchSegStartPos,searchSegEndPos,corrdim;
+  WebRtc_Word16 tloc,tloc2,i,st,en,fraction;
+
+  WebRtc_Word32 maxtemp, scalefact;
+  WebRtc_Word16 *filtStatePtr, *polyPtr;
+  /* Stack based */
+  WebRtc_Word16 filt[7];
+  WebRtc_Word32 corrVecUps[ENH_CORRDIM*ENH_UPS0];
+  WebRtc_Word32 corrVecTemp[ENH_CORRDIM];
+  WebRtc_Word16 vect[ENH_VECTL];
+  WebRtc_Word16 corrVec[ENH_CORRDIM];
+
+  /* defining array bounds */
+
+  estSegPosRounded=WEBRTC_SPL_RSHIFT_W16((estSegPos - 2),2);
+
+  searchSegStartPos=estSegPosRounded-ENH_SLOP;
+
+  if (searchSegStartPos<0) {
+    searchSegStartPos=0;
+  }
+  searchSegEndPos=estSegPosRounded+ENH_SLOP;
+
+  if(searchSegEndPos+ENH_BLOCKL >= idatal) {
+    searchSegEndPos=idatal-ENH_BLOCKL-1;
+  }
+  corrdim=searchSegEndPos-searchSegStartPos+1;
+
+  /* compute upsampled correlation and find
+     location of max */
+
+  WebRtcIlbcfix_MyCorr(corrVecTemp,idata+searchSegStartPos,
+                       (WebRtc_Word16)(corrdim+ENH_BLOCKL-1),idata+centerStartPos,ENH_BLOCKL);
+
+  /* Calculate the rescaling factor for the correlation in order to
+     put the correlation in a WebRtc_Word16 vector instead */
+  maxtemp=WebRtcSpl_MaxAbsValueW32(corrVecTemp, (WebRtc_Word16)corrdim);
+
+  scalefact=WebRtcSpl_GetSizeInBits(maxtemp)-15;
+
+  if (scalefact>0) {
+    for (i=0;i<corrdim;i++) {
+      corrVec[i]=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(corrVecTemp[i], scalefact);
+    }
+  } else {
+    for (i=0;i<corrdim;i++) {
+      corrVec[i]=(WebRtc_Word16)corrVecTemp[i];
+    }
+  }
+  /* In order to guarantee that all values are initialized */
+  for (i=corrdim;i<ENH_CORRDIM;i++) {
+    corrVec[i]=0;
+  }
+
+  /* Upsample the correlation */
+  WebRtcIlbcfix_EnhUpsample(corrVecUps,corrVec);
+
+  /* Find maximum */
+  tloc=WebRtcSpl_MaxIndexW32(corrVecUps, (WebRtc_Word16) (ENH_UPS0*corrdim));
+
+  /* make vector can be upsampled without ever running outside
+     bounds */
+  *updStartPos = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(searchSegStartPos,4) + tloc + 4;
+
+  tloc2 = WEBRTC_SPL_RSHIFT_W16((tloc+3), 2);
+
+  st=searchSegStartPos+tloc2-ENH_FL0;
+
+  /* initialize the vector to be filtered, stuff with zeros
+     when data is outside idata buffer */
+  if(st<0){
+    WebRtcSpl_MemSetW16(vect, 0, (WebRtc_Word16)(-st));
+    WEBRTC_SPL_MEMCPY_W16(&vect[-st], idata, (ENH_VECTL+st));
+  }
+  else{
+    en=st+ENH_VECTL;
+
+    if(en>idatal){
+      WEBRTC_SPL_MEMCPY_W16(vect, &idata[st],
+                            (ENH_VECTL-(en-idatal)));
+      WebRtcSpl_MemSetW16(&vect[ENH_VECTL-(en-idatal)], 0,
+                          (WebRtc_Word16)(en-idatal));
+    }
+    else {
+      WEBRTC_SPL_MEMCPY_W16(vect, &idata[st], ENH_VECTL);
+    }
+  }
+  /* Calculate which of the 4 fractions to use */
+  fraction=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16(tloc2,ENH_UPS0)-tloc;
+
+  /* compute the segment (this is actually a convolution) */
+
+  filtStatePtr = filt + 6;
+  polyPtr = (WebRtc_Word16*)WebRtcIlbcfix_kEnhPolyPhaser[fraction];
+  for (i=0;i<7;i++) {
+    *filtStatePtr-- = *polyPtr++;
+  }
+
+  WebRtcSpl_FilterMAFastQ12(
+      &vect[6], vect, filt,
+      ENH_FLO_MULT2_PLUS1, ENH_BLOCKL);
+
+  /* Add the contribution from this vector (scaled with gain) to the total surround vector */
+  WebRtcSpl_AddAffineVectorToVector(
+      surround, vect, gain,
+      (WebRtc_Word32)32768, 16, ENH_BLOCKL);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/refiner.h b/src/modules/audio_coding/codecs/ilbc/refiner.h
new file mode 100644
index 0000000..559555c
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/refiner.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Refiner.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_REFINER_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * find segment starting near idata+estSegPos that has highest
+ * correlation with idata+centerStartPos through
+ * idata+centerStartPos+ENH_BLOCKL-1 segment is found at a
+ * resolution of ENH_UPSO times the original of the original
+ * sampling rate
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Refiner(
+    WebRtc_Word16 *updStartPos, /* (o) updated start point (Q-2) */
+    WebRtc_Word16 *idata,   /* (i) original data buffer */
+    WebRtc_Word16 idatal,   /* (i) dimension of idata */
+    WebRtc_Word16 centerStartPos, /* (i) beginning center segment */
+    WebRtc_Word16 estSegPos,  /* (i) estimated beginning other segment (Q-2) */
+    WebRtc_Word16 *surround,  /* (i/o) The contribution from this sequence
+                                 summed with earlier contributions */
+    WebRtc_Word16 gain    /* (i) Gain to use for this sequence */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c b/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
new file mode 100644
index 0000000..ee5e643
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.c
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleInterpolateLsf.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "lsf_interpolate_to_poly_enc.h"
+#include "bw_expand.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleInterpolateLsf(
+    WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+                                   resulting from the quantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+                                   resulting from the unquantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *lsf,  /* (i) the unquantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfdeq,  /* (i) the dequantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfold,  /* (i) the unquantized lsf coefficients of
+                                           the previous signal frame Q13 */
+    WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+                                   previous signal frame Q13 */
+    WebRtc_Word16 length,  /* (i) should equate FILTERORDER */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                        ) {
+  int i, pos, lp_length;
+
+  WebRtc_Word16 *lsf2, *lsfdeq2;
+  /* Stack based */
+  WebRtc_Word16 lp[LPC_FILTERORDER + 1];
+
+  lsf2 = lsf + length;
+  lsfdeq2 = lsfdeq + length;
+  lp_length = length + 1;
+
+  if (iLBCenc_inst->mode==30) {
+    /* subframe 1: Interpolation between old and first set of
+       lsf coefficients */
+
+    /* Calculate Analysis/Syntehsis filter from quantized LSF */
+    WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0],
+                                         length);
+    WEBRTC_SPL_MEMCPY_W16(syntdenum, lp, lp_length);
+
+    /* Calculate Weighting filter from quantized LSF */
+    WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf,
+                                         WebRtcIlbcfix_kLsfWeight30ms[0],
+                                         length);
+    WebRtcIlbcfix_BwExpand(weightdenum, lp,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                           (WebRtc_Word16)lp_length);
+
+    /* subframe 2 to 6: Interpolation between first and second
+       set of lsf coefficients */
+
+    pos = lp_length;
+    for (i = 1; i < iLBCenc_inst->nsub; i++) {
+
+      /* Calculate Analysis/Syntehsis filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeq, lsfdeq2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i],
+                                           length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length);
+
+      /* Calculate Weighting filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsf, lsf2,
+                                           WebRtcIlbcfix_kLsfWeight30ms[i],
+                                           length);
+      WebRtcIlbcfix_BwExpand(weightdenum + pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                             (WebRtc_Word16)lp_length);
+
+      pos += lp_length;
+    }
+
+    /* update memory */
+
+    WEBRTC_SPL_MEMCPY_W16(lsfold, lsf2, length);
+    WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq2, length);
+
+  } else { /* iLBCenc_inst->mode==20 */
+    pos = 0;
+    for (i = 0; i < iLBCenc_inst->nsub; i++) {
+
+      /* Calculate Analysis/Syntehsis filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfdeqold, lsfdeq,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i],
+                                           length);
+      WEBRTC_SPL_MEMCPY_W16(syntdenum + pos, lp, lp_length);
+
+      /* Calculate Weighting filter from quantized LSF */
+      WebRtcIlbcfix_LsfInterpolate2PloyEnc(lp, lsfold, lsf,
+                                           WebRtcIlbcfix_kLsfWeight20ms[i],
+                                           length);
+      WebRtcIlbcfix_BwExpand(weightdenum+pos, lp,
+                             (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpWeightDenum,
+                             (WebRtc_Word16)lp_length);
+
+      pos += lp_length;
+    }
+
+    /* update memory */
+
+    WEBRTC_SPL_MEMCPY_W16(lsfold, lsf, length);
+    WEBRTC_SPL_MEMCPY_W16(lsfdeqold, lsfdeq, length);
+
+  }
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h b/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
new file mode 100644
index 0000000..8cdd7da
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_interpolate_lsf.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleInterpolateLsf.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_INTERPOLATE_LSF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf interpolator (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleInterpolateLsf(
+    WebRtc_Word16 *syntdenum, /* (o) the synthesis filter denominator
+                                   resulting from the quantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *weightdenum, /* (o) the weighting filter denominator
+                                   resulting from the unquantized
+                                   interpolated lsf Q12 */
+    WebRtc_Word16 *lsf,  /* (i) the unquantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfdeq,  /* (i) the dequantized lsf coefficients Q13 */
+    WebRtc_Word16 *lsfold,  /* (i) the unquantized lsf coefficients of
+                                           the previous signal frame Q13 */
+    WebRtc_Word16 *lsfdeqold, /* (i) the dequantized lsf coefficients of the
+                                   previous signal frame Q13 */
+    WebRtc_Word16 length,  /* (i) should equate FILTERORDER */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                        );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c b/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
new file mode 100644
index 0000000..2d19edd
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.c
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLpcAnalysis.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "window32_w32.h"
+#include "bw_expand.h"
+#include "poly_to_lsf.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lpc analysis (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLpcAnalysis(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients */
+    WebRtc_Word16 *data,   /* (i) new block of speech */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                     ) {
+  int k;
+  int scale;
+  WebRtc_Word16 is;
+  WebRtc_Word16 stability;
+  /* Stack based */
+  WebRtc_Word16 A[LPC_FILTERORDER + 1];
+  WebRtc_Word32 R[LPC_FILTERORDER + 1];
+  WebRtc_Word16 windowedData[BLOCKL_MAX];
+  WebRtc_Word16 rc[LPC_FILTERORDER];
+
+  is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer+is,data,iLBCenc_inst->blockl);
+
+  /* No lookahead, last window is asymmetric */
+
+  for (k = 0; k < iLBCenc_inst->lpc_n; k++) {
+
+    is = LPC_LOOKBACK;
+
+    if (k < (iLBCenc_inst->lpc_n - 1)) {
+
+      /* Hanning table WebRtcIlbcfix_kLpcWin[] is in Q15-domain so the output is right-shifted 15 */
+      WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer, WebRtcIlbcfix_kLpcWin, BLOCKL_MAX, 15);
+    } else {
+
+      /* Hanning table WebRtcIlbcfix_kLpcAsymWin[] is in Q15-domain so the output is right-shifted 15 */
+      WebRtcSpl_ElementwiseVectorMult(windowedData, iLBCenc_inst->lpc_buffer+is, WebRtcIlbcfix_kLpcAsymWin, BLOCKL_MAX, 15);
+    }
+
+    /* Compute autocorrelation */
+    WebRtcSpl_AutoCorrelation(windowedData, BLOCKL_MAX, LPC_FILTERORDER, R, &scale);
+
+    /* Window autocorrelation vector */
+    WebRtcIlbcfix_Window32W32(R, R, WebRtcIlbcfix_kLpcLagWin, LPC_FILTERORDER + 1 );
+
+    /* Calculate the A coefficients from the Autocorrelation using Levinson Durbin algorithm */
+    stability=WebRtcSpl_LevinsonDurbin(R, A, rc, LPC_FILTERORDER);
+
+    /*
+       Set the filter to {1.0, 0.0, 0.0,...} if filter from Levinson Durbin algorithm is unstable
+       This should basically never happen...
+    */
+    if (stability!=1) {
+      A[0]=4096;
+      WebRtcSpl_MemSetW16(&A[1], 0, LPC_FILTERORDER);
+    }
+
+    /* Bandwidth expand the filter coefficients */
+    WebRtcIlbcfix_BwExpand(A, A, (WebRtc_Word16*)WebRtcIlbcfix_kLpcChirpSyntDenum, LPC_FILTERORDER+1);
+
+    /* Convert from A to LSF representation */
+    WebRtcIlbcfix_Poly2Lsf(lsf + k*LPC_FILTERORDER, A);
+  }
+
+  is=LPC_LOOKBACK+BLOCKL_MAX-iLBCenc_inst->blockl;
+  WEBRTC_SPL_MEMCPY_W16(iLBCenc_inst->lpc_buffer,
+                        iLBCenc_inst->lpc_buffer+LPC_LOOKBACK+BLOCKL_MAX-is, is);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h b/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
new file mode 100644
index 0000000..83c1e5b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lpc_analysis.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLpcAnalysis.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LPC_ANALYSIS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lpc analysis (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLpcAnalysis(
+    WebRtc_Word16 *lsf,   /* (o) lsf coefficients */
+    WebRtc_Word16 *data,   /* (i) new block of speech */
+    iLBC_Enc_Inst_t *iLBCenc_inst
+    /* (i/o) the encoder state structure */
+                                     );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c b/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
new file mode 100644
index 0000000..7b5efa0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.c
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfDeQ.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  obtain dequantized lsf coefficients from quantization index
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfDeQ(
+    WebRtc_Word16 *lsfdeq,  /* (o) dequantized lsf coefficients */
+    WebRtc_Word16 *index,  /* (i) quantization index */
+    WebRtc_Word16 lpc_n  /* (i) number of LPCs */
+                                ){
+  int i, j, pos, cb_pos;
+
+  /* decode first LSF */
+
+  pos = 0;
+  cb_pos = 0;
+  for (i = 0; i < LSF_NSPLIT; i++) {
+    for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) {
+      lsfdeq[pos + j] = WebRtcIlbcfix_kLsfCb[cb_pos +
+                                             WEBRTC_SPL_MUL_16_16(index[i], WebRtcIlbcfix_kLsfDimCb[i]) + j];
+    }
+    pos += WebRtcIlbcfix_kLsfDimCb[i];
+    cb_pos += WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kLsfSizeCb[i], WebRtcIlbcfix_kLsfDimCb[i]);
+  }
+
+  if (lpc_n>1) {
+    /* decode last LSF */
+    pos = 0;
+    cb_pos = 0;
+    for (i = 0; i < LSF_NSPLIT; i++) {
+      for (j = 0; j < WebRtcIlbcfix_kLsfDimCb[i]; j++) {
+        lsfdeq[LPC_FILTERORDER + pos + j] = WebRtcIlbcfix_kLsfCb[cb_pos +
+                                                                 WEBRTC_SPL_MUL_16_16(index[LSF_NSPLIT + i], WebRtcIlbcfix_kLsfDimCb[i]) + j];
+      }
+      pos += WebRtcIlbcfix_kLsfDimCb[i];
+      cb_pos += WEBRTC_SPL_MUL_16_16(WebRtcIlbcfix_kLsfSizeCb[i], WebRtcIlbcfix_kLsfDimCb[i]);
+    }
+  }
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h b/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
new file mode 100644
index 0000000..efd3103
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lsf_dequant.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfDeQ.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_DEQUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  obtain dequantized lsf coefficients from quantization index
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfDeQ(
+    WebRtc_Word16 *lsfdeq,  /* (o) dequantized lsf coefficients */
+    WebRtc_Word16 *index,  /* (i) quantization index */
+    WebRtc_Word16 lpc_n  /* (i) number of LPCs */
+                                );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c b/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
new file mode 100644
index 0000000..aa27fb4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.c
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfQ.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "split_vq.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  lsf quantizer (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfQ(
+    WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+                                   (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 *index, /* (o) quantization index */
+    WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+                           quantized (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+                              ){
+
+  /* Quantize first LSF with memoryless split VQ */
+  WebRtcIlbcfix_SplitVq( lsfdeq, index, lsf,
+                         (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+
+  if (lpc_n==2) {
+    /* Quantize second LSF with memoryless split VQ */
+    WebRtcIlbcfix_SplitVq( lsfdeq + LPC_FILTERORDER, index + LSF_NSPLIT,
+                           lsf + LPC_FILTERORDER, (WebRtc_Word16*)WebRtcIlbcfix_kLsfCb,
+                           (WebRtc_Word16*)WebRtcIlbcfix_kLsfDimCb, (WebRtc_Word16*)WebRtcIlbcfix_kLsfSizeCb);
+  }
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h b/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
new file mode 100644
index 0000000..fd17b2e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/simple_lsf_quant.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SimpleLsfQ.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SIMPLE_LSF_QUANT_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  lsf quantizer (subrutine to LPCencode)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SimpleLsfQ(
+    WebRtc_Word16 *lsfdeq, /* (o) dequantized lsf coefficients
+                                   (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 *index, /* (o) quantization index */
+    WebRtc_Word16 *lsf, /* (i) the lsf coefficient vector to be
+                           quantized (dimension FILTERORDER) Q13 */
+    WebRtc_Word16 lpc_n /* (i) number of lsf sets to quantize */
+                              );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/smooth.c b/src/modules/audio_coding/codecs/ilbc/smooth.c
new file mode 100644
index 0000000..b606077
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/smooth.c
@@ -0,0 +1,211 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "smooth_out_data.h"
+
+/*----------------------------------------------------------------*
+ * find the smoothed output data
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Smooth(
+    WebRtc_Word16 *odata,   /* (o) smoothed output */
+    WebRtc_Word16 *current,  /* (i) the un enhanced residual for
+                                this block */
+    WebRtc_Word16 *surround  /* (i) The approximation from the
+                                surrounding sequences */
+                          ) {
+  WebRtc_Word16 maxtot, scale, scale1, scale2;
+  WebRtc_Word16 A, B, C, denomW16;
+  WebRtc_Word32 B_W32, denom, num;
+  WebRtc_Word32 errs;
+  WebRtc_Word32 w00,w10,w11, endiff, crit;
+  WebRtc_Word32 w00prim, w10prim, w11_div_w00;
+  WebRtc_Word16 w11prim;
+  WebRtc_Word16 bitsw00, bitsw10, bitsw11;
+  WebRtc_Word32 w11w00, w10w10, w00w00;
+  WebRtc_Word16 max1, max2;
+
+  /* compute some inner products (ensure no overflow by first calculating proper scale factor) */
+
+  w00 = w10 = w11 = 0;
+
+  max1=WebRtcSpl_MaxAbsValueW16(current, ENH_BLOCKL);
+  max2=WebRtcSpl_MaxAbsValueW16(surround, ENH_BLOCKL);
+  maxtot=WEBRTC_SPL_MAX(max1, max2);
+
+  scale=WebRtcSpl_GetSizeInBits(maxtot);
+  scale = (WebRtc_Word16)WEBRTC_SPL_MUL_16_16(2,scale)-26;
+  scale=WEBRTC_SPL_MAX(0, scale);
+
+  w00=WebRtcSpl_DotProductWithScale(current,current,ENH_BLOCKL,scale);
+  w11=WebRtcSpl_DotProductWithScale(surround,surround,ENH_BLOCKL,scale);
+  w10=WebRtcSpl_DotProductWithScale(surround,current,ENH_BLOCKL,scale);
+
+  if (w00<0) w00 = WEBRTC_SPL_WORD32_MAX;
+  if (w11<0) w11 = WEBRTC_SPL_WORD32_MAX;
+
+  /* Rescale w00 and w11 to w00prim and w11prim, so that w00prim/w11prim
+     is in Q16 */
+
+  bitsw00 = WebRtcSpl_GetSizeInBits(w00);
+  bitsw11 = WebRtcSpl_GetSizeInBits(w11);
+  bitsw10 = WebRtcSpl_GetSizeInBits(WEBRTC_SPL_ABS_W32(w10));
+  scale1 = 31 - bitsw00;
+  scale2 = 15 - bitsw11;
+
+  if (scale2>(scale1-16)) {
+    scale2 = scale1 - 16;
+  } else {
+    scale1 = scale2 + 16;
+  }
+
+  w00prim = WEBRTC_SPL_LSHIFT_W32(w00, scale1);
+  w11prim = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w11, scale2);
+
+  /* Perform C = sqrt(w11/w00) (C is in Q11 since (16+6)/2=11) */
+  if (w11prim>64) {
+    endiff = WEBRTC_SPL_LSHIFT_W32(
+        (WebRtc_Word32)WebRtcSpl_DivW32W16(w00prim, w11prim), 6);
+    C = (WebRtc_Word16)WebRtcSpl_SqrtFloor(endiff); /* C is in Q11 */
+  } else {
+    C = 1;
+  }
+
+  /* first try enhancement without power-constraint */
+
+  errs = WebRtcIlbcfix_Smooth_odata(odata, current, surround, C);
+
+
+
+  /* if constraint violated by first try, add constraint */
+
+  if ( (6-scale+scale1) > 31) {
+    crit=0;
+  } else {
+    /* crit = 0.05 * w00 (Result in Q-6) */
+    crit = WEBRTC_SPL_SHIFT_W32(
+        WEBRTC_SPL_MUL(ENH_A0, WEBRTC_SPL_RSHIFT_W32(w00prim, 14)),
+        -(6-scale+scale1));
+  }
+
+  if (errs > crit) {
+
+    if( w00 < 1) {
+      w00=1;
+    }
+
+    /* Calculate w11*w00, w10*w10 and w00*w00 in the same Q domain */
+
+    scale1 = bitsw00-15;
+    scale2 = bitsw11-15;
+
+    if (scale2>scale1) {
+      scale = scale2;
+    } else {
+      scale = scale1;
+    }
+
+    w11w00 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w11, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+
+    w10w10 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w10, -scale));
+
+    w00w00 = WEBRTC_SPL_MUL_16_16(
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale),
+        (WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(w00, -scale));
+
+    /* Calculate (w11*w00-w10*w10)/(w00*w00) in Q16 */
+    if (w00w00>65536) {
+      endiff = (w11w00-w10w10);
+      endiff = WEBRTC_SPL_MAX(0, endiff);
+      /* denom is in Q16 */
+      denom = WebRtcSpl_DivW32W16(endiff, (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(w00w00, 16));
+    } else {
+      denom = 65536;
+    }
+
+    if( denom > 7){ /* eliminates numerical problems
+                       for if smooth */
+
+      scale=WebRtcSpl_GetSizeInBits(denom)-15;
+
+      if (scale>0) {
+        /* denomW16 is in Q(16+scale) */
+        denomW16=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(denom, scale);
+
+        /* num in Q(34-scale) */
+        num=WEBRTC_SPL_RSHIFT_W32(ENH_A0_MINUS_A0A0DIV4, scale);
+      } else {
+        /* denomW16 is in Q16 */
+        denomW16=(WebRtc_Word16)denom;
+
+        /* num in Q34 */
+        num=ENH_A0_MINUS_A0A0DIV4;
+      }
+
+      /* A sqrt( (ENH_A0-(ENH_A0^2)/4)*(w00*w00)/(w11*w00 + w10*w10) ) in Q9 */
+      A = (WebRtc_Word16)WebRtcSpl_SqrtFloor(WebRtcSpl_DivW32W16(num, denomW16));
+
+      /* B_W32 is in Q30 ( B = 1 - ENH_A0/2 - A * w10/w00 ) */
+      scale1 = 31-bitsw10;
+      scale2 = 21-scale1;
+      w10prim = WEBRTC_SPL_LSHIFT_W32(w10, scale1);
+      w00prim = WEBRTC_SPL_SHIFT_W32(w00, -scale2);
+      scale = bitsw00-scale2-15;
+
+      if (scale>0) {
+        w10prim=WEBRTC_SPL_RSHIFT_W32(w10prim, scale);
+        w00prim=WEBRTC_SPL_RSHIFT_W32(w00prim, scale);
+      }
+
+      if ((w00prim>0)&&(w10prim>0)) {
+        w11_div_w00=WebRtcSpl_DivW32W16(w10prim, (WebRtc_Word16)w00prim);
+
+        if (WebRtcSpl_GetSizeInBits(w11_div_w00)+WebRtcSpl_GetSizeInBits(A)>31) {
+          B_W32 = 0;
+        } else {
+          B_W32 = (WebRtc_Word32)1073741824 - (WebRtc_Word32)ENH_A0DIV2 -
+              WEBRTC_SPL_MUL(A, w11_div_w00);
+        }
+        B = (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(B_W32, 16); /* B in Q14 */
+      } else {
+        /* No smoothing */
+        A = 0;
+        B = 16384; /* 1 in Q14 */
+      }
+    }
+    else{ /* essentially no difference between cycles;
+             smoothing not needed */
+
+      A = 0;
+      B = 16384; /* 1 in Q14 */
+    }
+
+    /* create smoothed sequence */
+
+    WebRtcSpl_ScaleAndAddVectors(surround, A, 9,
+                                current, B, 14,
+                                odata, ENH_BLOCKL);
+  }
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/smooth.h b/src/modules/audio_coding/codecs/ilbc/smooth.h
new file mode 100644
index 0000000..88ce805
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/smooth.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * find the smoothed output data
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Smooth(
+    WebRtc_Word16 *odata,   /* (o) smoothed output */
+    WebRtc_Word16 *current,  /* (i) the un enhanced residual for
+                                this block */
+    WebRtc_Word16 *surround  /* (i) The approximation from the
+                                surrounding sequences */
+                          );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c b/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c
new file mode 100644
index 0000000..9bacd85
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/smooth_out_data.c
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth_odata.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
+    WebRtc_Word16 *odata,
+    WebRtc_Word16 *psseq,
+    WebRtc_Word16 *surround,
+    WebRtc_Word16 C)
+{
+  int i;
+
+  WebRtc_Word16 err;
+  WebRtc_Word32 errs;
+
+  for(i=0;i<80;i++) {
+    odata[i]= (WebRtc_Word16)WEBRTC_SPL_RSHIFT_W32(
+        (WEBRTC_SPL_MUL_16_16(C, surround[i])+1024), 11);
+  }
+
+  errs=0;
+  for(i=0;i<80;i++) {
+    err=(WebRtc_Word16)WEBRTC_SPL_RSHIFT_W16((psseq[i]-odata[i]), 3);
+    errs+=WEBRTC_SPL_MUL_16_16(err, err); /* errs in Q-6 */
+  }
+
+  return errs;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h b/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h
new file mode 100644
index 0000000..6fbe694
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/smooth_out_data.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Smooth_odata.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SMOOTH_OUT_DATA_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * help function to WebRtcIlbcfix_Smooth()
+ *---------------------------------------------------------------*/
+
+WebRtc_Word32 WebRtcIlbcfix_Smooth_odata(
+    WebRtc_Word16 *odata,
+    WebRtc_Word16 *psseq,
+    WebRtc_Word16 *surround,
+    WebRtc_Word16 C);
+
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/sort_sq.c b/src/modules/audio_coding/codecs/ilbc/sort_sq.c
new file mode 100644
index 0000000..9276a7b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/sort_sq.c
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SortSq.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  scalar quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SortSq(
+    WebRtc_Word16 *xq,   /* (o) the quantized value */
+    WebRtc_Word16 *index,  /* (o) the quantization index */
+    WebRtc_Word16 x,   /* (i) the value to quantize */
+    const WebRtc_Word16 *cb, /* (i) the quantization codebook */
+    WebRtc_Word16 cb_size  /* (i) the size of the quantization codebook */
+                          ){
+  int i;
+
+  if (x <= cb[0]) {
+    *index = 0;
+    *xq = cb[0];
+  } else {
+    i = 0;
+    while ((x > cb[i]) && (i < (cb_size-1))) {
+      i++;
+    }
+
+    if (x > WEBRTC_SPL_RSHIFT_W32(( (WebRtc_Word32)cb[i] + cb[i - 1] + 1),1)) {
+      *index = i;
+      *xq = cb[i];
+    } else {
+      *index = i - 1;
+      *xq = cb[i - 1];
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/sort_sq.h b/src/modules/audio_coding/codecs/ilbc/sort_sq.h
new file mode 100644
index 0000000..2863dc5
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/sort_sq.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SortSq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SORT_SQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  scalar quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SortSq(
+    WebRtc_Word16 *xq,   /* (o) the quantized value */
+    WebRtc_Word16 *index,  /* (o) the quantization index */
+    WebRtc_Word16 x,   /* (i) the value to quantize */
+    const WebRtc_Word16 *cb, /* (i) the quantization codebook */
+    WebRtc_Word16 cb_size  /* (i) the size of the quantization codebook */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/split_vq.c b/src/modules/audio_coding/codecs/ilbc/split_vq.c
new file mode 100644
index 0000000..d908fa2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/split_vq.c
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SplitVq.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "vq3.h"
+#include "vq4.h"
+
+/*----------------------------------------------------------------*
+ *  split vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SplitVq(
+    WebRtc_Word16 *qX,  /* (o) the quantized vector in Q13 */
+    WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+                                   codebooks in the split */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize */
+    WebRtc_Word16 *CB,  /* (i) the quantizer codebook in Q13 */
+    WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
+    WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+                           ) {
+
+  WebRtc_Word16 *qXPtr, *indexPtr, *CBPtr, *XPtr;
+
+  /* Quantize X with the 3 vectror quantization tables */
+
+  qXPtr=qX;
+  indexPtr=index;
+  CBPtr=CB;
+  XPtr=X;
+  WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[0]);
+
+  qXPtr+=3;
+  indexPtr+=1;
+  CBPtr+=(dim[0]*cbsize[0]);
+  XPtr+=3;
+  WebRtcIlbcfix_Vq3(qXPtr, indexPtr, CBPtr, XPtr, cbsize[1]);
+
+  qXPtr+=3;
+  indexPtr+=1;
+  CBPtr+=(dim[1]*cbsize[1]);
+  XPtr+=3;
+  WebRtcIlbcfix_Vq4(qXPtr, indexPtr, CBPtr, XPtr, cbsize[2]);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/split_vq.h b/src/modules/audio_coding/codecs/ilbc/split_vq.h
new file mode 100644
index 0000000..7264a21
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/split_vq.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SplitVq.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SPLIT_VQ_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  split vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SplitVq(
+    WebRtc_Word16 *qX,  /* (o) the quantized vector in Q13 */
+    WebRtc_Word16 *index, /* (o) a vector of indexes for all vector
+                                   codebooks in the split */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize */
+    WebRtc_Word16 *CB,  /* (i) the quantizer codebook in Q13 */
+    WebRtc_Word16 *dim, /* (i) the dimension of X and qX */
+    WebRtc_Word16 *cbsize /* (i) the number of vectors in the codebook */
+                           );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/state_construct.c b/src/modules/audio_coding/codecs/ilbc/state_construct.c
new file mode 100644
index 0000000..9d03cc3
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/state_construct.c
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateConstruct.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  decoding of the start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateConstruct(
+    WebRtc_Word16 idxForMax,   /* (i) 6-bit index for the quantization of
+                                           max amplitude */
+    WebRtc_Word16 *idxVec,   /* (i) vector of quantization indexes */
+    WebRtc_Word16 *syntDenum,  /* (i) synthesis filter denumerator */
+    WebRtc_Word16 *Out_fix,  /* (o) the decoded state vector */
+    WebRtc_Word16 len    /* (i) length of a state vector */
+                                  ) {
+  int k;
+  WebRtc_Word16 maxVal;
+  WebRtc_Word16 *tmp1, *tmp2, *tmp3;
+  /* Stack based */
+  WebRtc_Word16 numerator[1+LPC_FILTERORDER];
+  WebRtc_Word16 sampleValVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 sampleMaVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 *sampleVal = &sampleValVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleMa = &sampleMaVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleAr = &sampleValVec[LPC_FILTERORDER];
+
+  /* initialization of coefficients */
+
+  for (k=0; k<LPC_FILTERORDER+1; k++){
+    numerator[k] = syntDenum[LPC_FILTERORDER-k];
+  }
+
+  /* decoding of the maximum value */
+
+  maxVal = WebRtcIlbcfix_kFrgQuantMod[idxForMax];
+
+  /* decoding of the sample values */
+  tmp1 = sampleVal;
+  tmp2 = &idxVec[len-1];
+
+  if (idxForMax<37) {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 2097152 (= 0.5 << 22)
+        maxVal is in Q8 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)2097152) >> 22);
+      tmp1++;
+      tmp2--;
+    }
+  } else if (idxForMax<59) {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 262144 (= 0.5 << 19)
+        maxVal is in Q5 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)262144) >> 19);
+      tmp1++;
+      tmp2--;
+    }
+  } else {
+    for(k=0; k<len; k++){
+      /*the shifting is due to the Q13 in sq4_fixQ13[i], also the adding of 65536 (= 0.5 << 17)
+        maxVal is in Q3 and result is in Q(-1) */
+      (*tmp1) = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(maxVal,WebRtcIlbcfix_kStateSq3[(*tmp2)])+(WebRtc_Word32)65536) >> 17);
+      tmp1++;
+      tmp2--;
+    }
+  }
+
+  /* Set the rest of the data to zero */
+  WebRtcSpl_MemSetW16(&sampleVal[len], 0, len);
+
+  /* circular convolution with all-pass filter */
+
+  /* Set the state to zero */
+  WebRtcSpl_MemSetW16(sampleValVec, 0, (LPC_FILTERORDER));
+
+  /* Run MA filter + AR filter */
+  WebRtcSpl_FilterMAFastQ12(
+      sampleVal, sampleMa,
+      numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(len + LPC_FILTERORDER));
+  WebRtcSpl_MemSetW16(&sampleMa[len + LPC_FILTERORDER], 0, (len - LPC_FILTERORDER));
+  WebRtcSpl_FilterARFastQ12(
+      sampleMa, sampleAr,
+      syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*len));
+
+  tmp1 = &sampleAr[len-1];
+  tmp2 = &sampleAr[2*len-1];
+  tmp3 = Out_fix;
+  for(k=0;k<len;k++){
+    (*tmp3) = (*tmp1) + (*tmp2);
+    tmp1--;
+    tmp2--;
+    tmp3++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/state_construct.h b/src/modules/audio_coding/codecs/ilbc/state_construct.h
new file mode 100644
index 0000000..465699b
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/state_construct.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateConstruct.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_CONSTRUCT_H_
+
+/*----------------------------------------------------------------*
+ *  Generate the start state from the quantized indexes
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateConstruct(
+    WebRtc_Word16 idxForMax,   /* (i) 6-bit index for the quantization of
+                                           max amplitude */
+    WebRtc_Word16 *idxVec,   /* (i) vector of quantization indexes */
+    WebRtc_Word16 *syntDenum,  /* (i) synthesis filter denumerator */
+    WebRtc_Word16 *Out_fix,  /* (o) the decoded state vector */
+    WebRtc_Word16 len    /* (i) length of a state vector */
+                                  );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/state_search.c b/src/modules/audio_coding/codecs/ilbc/state_search.c
new file mode 100644
index 0000000..824a0ba
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/state_search.c
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateSearch.c
+
+******************************************************************/
+
+#include "defines.h"
+#include "constants.h"
+#include "abs_quant.h"
+
+/*----------------------------------------------------------------*
+ *  encoding of start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
+                               and idxVec, input state_first) */
+    WebRtc_Word16 *residual,   /* (i) target residual vector */
+    WebRtc_Word16 *syntDenum,  /* (i) lpc synthesis filter */
+    WebRtc_Word16 *weightDenum  /* (i) weighting filter denuminator */
+                               ) {
+  WebRtc_Word16 k, index;
+  WebRtc_Word16 maxVal;
+  WebRtc_Word16 scale, shift;
+  WebRtc_Word32 maxValsq;
+  WebRtc_Word16 scaleRes;
+  WebRtc_Word16 max;
+  int i;
+  /* Stack based */
+  WebRtc_Word16 numerator[1+LPC_FILTERORDER];
+  WebRtc_Word16 residualLongVec[2*STATE_SHORT_LEN_30MS+LPC_FILTERORDER];
+  WebRtc_Word16 sampleMa[2*STATE_SHORT_LEN_30MS];
+  WebRtc_Word16 *residualLong = &residualLongVec[LPC_FILTERORDER];
+  WebRtc_Word16 *sampleAr = residualLong;
+
+  /* Scale to maximum 12 bits to avoid saturation in circular convolution filter */
+  max = WebRtcSpl_MaxAbsValueW16(residual, iLBCenc_inst->state_short_len);
+  scaleRes = WebRtcSpl_GetSizeInBits(max)-12;
+  scaleRes = WEBRTC_SPL_MAX(0, scaleRes);
+  /* Set up the filter coefficients for the circular convolution */
+  for (i=0; i<LPC_FILTERORDER+1; i++) {
+    numerator[i] = (syntDenum[LPC_FILTERORDER-i]>>scaleRes);
+  }
+
+  /* Copy the residual to a temporary buffer that we can filter
+   * and set the remaining samples to zero.
+   */
+  WEBRTC_SPL_MEMCPY_W16(residualLong, residual, iLBCenc_inst->state_short_len);
+  WebRtcSpl_MemSetW16(residualLong + iLBCenc_inst->state_short_len, 0, iLBCenc_inst->state_short_len);
+
+  /* Run the Zero-Pole filter (Ciurcular convolution) */
+  WebRtcSpl_MemSetW16(residualLongVec, 0, LPC_FILTERORDER);
+  WebRtcSpl_FilterMAFastQ12(
+      residualLong, sampleMa,
+      numerator, LPC_FILTERORDER+1, (WebRtc_Word16)(iLBCenc_inst->state_short_len + LPC_FILTERORDER));
+  WebRtcSpl_MemSetW16(&sampleMa[iLBCenc_inst->state_short_len + LPC_FILTERORDER], 0, iLBCenc_inst->state_short_len - LPC_FILTERORDER);
+
+  WebRtcSpl_FilterARFastQ12(
+      sampleMa, sampleAr,
+      syntDenum, LPC_FILTERORDER+1, (WebRtc_Word16)(2*iLBCenc_inst->state_short_len));
+
+  for(k=0;k<iLBCenc_inst->state_short_len;k++){
+    sampleAr[k] += sampleAr[k+iLBCenc_inst->state_short_len];
+  }
+
+  /* Find maximum absolute value in the vector */
+  maxVal=WebRtcSpl_MaxAbsValueW16(sampleAr, iLBCenc_inst->state_short_len);
+
+  /* Find the best index */
+
+  if ((((WebRtc_Word32)maxVal)<<scaleRes)<23170) {
+    maxValsq=((WebRtc_Word32)maxVal*maxVal)<<(2+2*scaleRes);
+  } else {
+    maxValsq=(WebRtc_Word32)WEBRTC_SPL_WORD32_MAX;
+  }
+
+  index=0;
+  for (i=0;i<63;i++) {
+
+    if (maxValsq>=WebRtcIlbcfix_kChooseFrgQuant[i]) {
+      index=i+1;
+    } else {
+      i=63;
+    }
+  }
+  iLBC_encbits->idxForMax=index;
+
+  /* Rescale the vector before quantization */
+  scale=WebRtcIlbcfix_kScale[index];
+
+  if (index<27) { /* scale table is in Q16, fout[] is in Q(-1) and we want the result to be in Q11 */
+    shift=4;
+  } else { /* scale table is in Q21, fout[] is in Q(-1) and we want the result to be in Q11 */
+    shift=9;
+  }
+
+  /* Set up vectors for AbsQuant and rescale it with the scale factor */
+  WebRtcSpl_ScaleVectorWithSat(sampleAr, sampleAr, scale,
+                              iLBCenc_inst->state_short_len, (WebRtc_Word16)(shift-scaleRes));
+
+  /* Quantize the values in fout[] */
+  WebRtcIlbcfix_AbsQuant(iLBCenc_inst, iLBC_encbits, sampleAr, weightDenum);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/state_search.h b/src/modules/audio_coding/codecs/ilbc/state_search.h
new file mode 100644
index 0000000..8b7f298
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/state_search.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_StateSearch.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_STATE_SEARCH_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  encoding of start state
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_StateSearch(
+    iLBC_Enc_Inst_t *iLBCenc_inst,
+    /* (i) Encoder instance */
+    iLBC_bits *iLBC_encbits,/* (i/o) Encoded bits (output idxForMax
+                               and idxVec, input state_first) */
+    WebRtc_Word16 *residual,   /* (i) target residual vector */
+    WebRtc_Word16 *syntDenum,  /* (i) lpc synthesis filter */
+    WebRtc_Word16 *weightDenum  /* (i) weighting filter denuminator */
+                               );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/swap_bytes.c b/src/modules/audio_coding/codecs/ilbc/swap_bytes.c
new file mode 100644
index 0000000..a48a066
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/swap_bytes.c
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SwapBytes.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Swap bytes (to simplify operations on Little Endian machines)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SwapBytes(
+    const WebRtc_UWord16* input,   /* (i) the sequence to swap */
+    WebRtc_Word16 wordLength,      /* (i) number or WebRtc_UWord16 to swap */
+    WebRtc_UWord16* output         /* (o) the swapped sequence */
+                              ) {
+  int k;
+  for (k = wordLength; k > 0; k--) {
+    *output++ = (*input >> 8)|(*input << 8);
+    input++;
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/swap_bytes.h b/src/modules/audio_coding/codecs/ilbc/swap_bytes.h
new file mode 100644
index 0000000..1632311
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/swap_bytes.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_SwapBytes.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_SWAP_BYTES_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * Swap bytes (to simplify operations on Little Endian machines)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_SwapBytes(
+    const WebRtc_UWord16* input,   /* (i) the sequence to swap */
+    WebRtc_Word16 wordLength,      /* (i) number or WebRtc_UWord16 to swap */
+    WebRtc_UWord16* output         /* (o) the swapped sequence */
+                              );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c b/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
new file mode 100644
index 0000000..19569ac
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/test/iLBC_test.c
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+        iLBC_test.c
+
+******************************************************************/
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include "ilbc.h"
+
+/*---------------------------------------------------------------*
+ *  Main program to test iLBC encoding and decoding
+ *
+ *  Usage:
+ *	  exefile_name.exe <infile> <bytefile> <outfile> <channel>
+ *
+ *    <infile>   : Input file, speech for encoder (16-bit pcm file)
+ *    <bytefile> : Bit stream output from the encoder
+ *    <outfile>  : Output file, decoded speech (16-bit pcm file)
+ *    <channel>  : Bit error file, optional (16-bit)
+ *                     1 - Packet received correctly
+ *                     0 - Packet Lost
+ *
+ *--------------------------------------------------------------*/
+
+#define BLOCKL_MAX			240
+#define ILBCNOOFWORDS_MAX	25
+
+
+int main(int argc, char* argv[])
+{
+
+  FILE *ifileid,*efileid,*ofileid, *cfileid;
+  WebRtc_Word16 data[BLOCKL_MAX];
+  WebRtc_Word16 encoded_data[ILBCNOOFWORDS_MAX], decoded_data[BLOCKL_MAX];
+  int len;
+  short pli, mode;
+  int blockcount = 0;
+  int packetlosscount = 0;
+  int frameLen;
+  WebRtc_Word16 speechType;
+  iLBC_encinst_t *Enc_Inst;
+  iLBC_decinst_t *Dec_Inst;
+
+#ifdef __ILBC_WITH_40BITACC
+  /* Doublecheck that long long exists */
+  if (sizeof(long)>=sizeof(long long)) {
+    fprintf(stderr, "40-bit simulation is not be supported on this platform\n");
+    exit(0);
+  }
+#endif
+
+  /* get arguments and open files */
+
+  if ((argc!=5) && (argc!=6)) {
+    fprintf(stderr,
+            "\n*-----------------------------------------------*\n");
+    fprintf(stderr,
+            "   %s <20,30> input encoded decoded (channel)\n\n",
+            argv[0]);
+    fprintf(stderr,
+            "   mode    : Frame size for the encoding/decoding\n");
+    fprintf(stderr,
+            "                 20 - 20 ms\n");
+    fprintf(stderr,
+            "                 30 - 30 ms\n");
+    fprintf(stderr,
+            "   input   : Speech for encoder (16-bit pcm file)\n");
+    fprintf(stderr,
+            "   encoded : Encoded bit stream\n");
+    fprintf(stderr,
+            "   decoded : Decoded speech (16-bit pcm file)\n");
+    fprintf(stderr,
+            "   channel : Packet loss pattern, optional (16-bit)\n");
+    fprintf(stderr,
+            "                  1 - Packet received correctly\n");
+    fprintf(stderr,
+            "                  0 - Packet Lost\n");
+    fprintf(stderr,
+            "*-----------------------------------------------*\n\n");
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n",
+            argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open encoded file file %s\n",
+            argv[3]); exit(1);}
+  if ( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open decoded file %s\n",
+            argv[4]); exit(1);}
+  if (argc==6) {
+    if( (cfileid=fopen(argv[5],"rb")) == NULL) {
+      fprintf(stderr, "Cannot open channel file %s\n",
+              argv[5]);
+      exit(1);
+    }
+  } else {
+    cfileid=NULL;
+  }
+
+  /* print info */
+
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBC test program                            *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,"\nMode           : %2d ms\n", mode);
+  fprintf(stderr,"Input file     : %s\n", argv[2]);
+  fprintf(stderr,"Encoded file   : %s\n", argv[3]);
+  fprintf(stderr,"Output file    : %s\n", argv[4]);
+  if (argc==6) {
+    fprintf(stderr,"Channel file   : %s\n", argv[5]);
+  }
+  fprintf(stderr,"\n");
+
+  /* Create structs */
+  WebRtcIlbcfix_EncoderCreate(&Enc_Inst);
+  WebRtcIlbcfix_DecoderCreate(&Dec_Inst);
+
+
+  /* Initialization */
+
+  WebRtcIlbcfix_EncoderInit(Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(Dec_Inst, mode);
+  frameLen = mode*8;
+
+  /* loop over input blocks */
+
+  while (((WebRtc_Word16)fread(data,sizeof(WebRtc_Word16),frameLen,ifileid))==
+         frameLen) {
+
+    blockcount++;
+
+    /* encoding */
+
+    fprintf(stderr, "--- Encoding block %i --- ",blockcount);
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, (WebRtc_Word16)frameLen, encoded_data);
+    fprintf(stderr, "\r");
+
+    /* write byte file */
+
+    if (fwrite(encoded_data, sizeof(WebRtc_Word16),
+               ((len+1)/sizeof(WebRtc_Word16)), efileid) !=
+        (size_t)(((len+1)/sizeof(WebRtc_Word16)))) {
+      return -1;
+    }
+
+    /* get channel data if provided */
+    if (argc==6) {
+      if (fread(&pli, sizeof(WebRtc_Word16), 1, cfileid)) {
+        if ((pli!=0)&&(pli!=1)) {
+          fprintf(stderr, "Error in channel file\n");
+          exit(0);
+        }
+        if (pli==0) {
+          /* Packet loss -> remove info from frame */
+          memset(encoded_data, 0,
+                 sizeof(WebRtc_Word16)*ILBCNOOFWORDS_MAX);
+          packetlosscount++;
+        }
+      } else {
+        fprintf(stderr, "Error. Channel file too short\n");
+        exit(0);
+      }
+    } else {
+      pli=1;
+    }
+
+    /* decoding */
+
+    fprintf(stderr, "--- Decoding block %i --- ",blockcount);
+    if (pli==1) {
+      len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data,
+                               (WebRtc_Word16)len, decoded_data,&speechType);
+    } else {
+      len=WebRtcIlbcfix_DecodePlc(Dec_Inst, decoded_data, 1);
+    }
+    fprintf(stderr, "\r");
+
+    /* write output file */
+
+    if (fwrite(decoded_data, sizeof(WebRtc_Word16), len,
+               ofileid) != (size_t)len) {
+      return -1;
+    }
+  }
+
+  /* close files */
+
+  fclose(ifileid);  fclose(efileid); fclose(ofileid);
+  if (argc==6) {
+    fclose(cfileid);
+  }
+
+  /* Free structs */
+  WebRtcIlbcfix_EncoderFree(Enc_Inst);
+  WebRtcIlbcfix_DecoderFree(Dec_Inst);
+
+
+  printf("\nDone with simulation\n\n");
+
+  return(0);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c b/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
new file mode 100644
index 0000000..ee5e484
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/test/iLBC_testLib.c
@@ -0,0 +1,207 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+iLBC Speech Coder ANSI-C Source Code
+
+iLBC_test.c
+
+******************************************************************/
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include "ilbc.h"
+
+//#define JUNK_DATA
+#ifdef JUNK_DATA
+#define SEED_FILE "randseed.txt"
+#endif
+
+
+/*----------------------------------------------------------------*
+*  Main program to test iLBC encoding and decoding
+*
+*  Usage:
+*		exefile_name.exe <infile> <bytefile> <outfile>
+*
+*---------------------------------------------------------------*/
+
+int main(int argc, char* argv[])
+{
+  FILE *ifileid,*efileid,*ofileid, *chfileid;
+  short encoded_data[55], data[240], speechType;
+  short len, mode, pli;
+  int blockcount = 0;
+
+  iLBC_encinst_t *Enc_Inst;
+  iLBC_decinst_t *Dec_Inst;
+#ifdef JUNK_DATA
+  int i;
+  FILE *seedfile;
+  unsigned int random_seed = (unsigned int) time(NULL);//1196764538
+#endif
+
+  /* Create structs */
+  WebRtcIlbcfix_EncoderCreate(&Enc_Inst);
+  WebRtcIlbcfix_DecoderCreate(&Dec_Inst);
+
+  /* get arguments and open files */
+
+  if (argc != 6 ) {
+    fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n",
+            argv[0]);
+    fprintf(stderr, "Example:\n");
+    fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]);
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open channelfile file %s\n",
+            argv[3]); exit(3);}
+  if( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open output file %s\n",
+            argv[4]); exit(3);}
+  if ( (chfileid=fopen(argv[5],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open channel file file %s\n", argv[5]);
+    exit(2);
+  }
+  /* print info */
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBCtest                                     *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+		"*---------------------------------------------------*\n");
+#ifdef SPLIT_10MS
+  fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode);
+#else
+  fprintf(stderr,"\nMode          : %2d ms\n", mode);
+#endif
+  fprintf(stderr,"\nInput file    : %s\n", argv[2]);
+  fprintf(stderr,"Coded file    : %s\n", argv[3]);
+  fprintf(stderr,"Output file   : %s\n\n", argv[4]);
+  fprintf(stderr,"Channel file  : %s\n\n", argv[5]);
+
+#ifdef JUNK_DATA
+  srand(random_seed);
+
+  if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+    fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+  }
+  else {
+    fprintf(seedfile, "%u\n", random_seed);
+    fclose(seedfile);
+  }
+#endif
+
+  /* Initialization */
+  WebRtcIlbcfix_EncoderInit(Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(Dec_Inst, mode);
+
+  /* loop over input blocks */
+#ifdef SPLIT_10MS
+  while(fread(data, sizeof(short), 80, ifileid) == 80) {
+#else
+  while((short)fread(data,sizeof(short),(mode<<3),ifileid)==(mode<<3)) {
+#endif
+    blockcount++;
+
+    /* encoding */
+    fprintf(stderr, "--- Encoding block %i --- ",blockcount);
+#ifdef SPLIT_10MS
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, 80, encoded_data);
+#else
+    len=WebRtcIlbcfix_Encode(Enc_Inst, data, (short)(mode<<3), encoded_data);
+#endif
+    fprintf(stderr, "\r");
+
+#ifdef JUNK_DATA
+    for ( i = 0; i < len; i++) {
+      encoded_data[i] = (short) (encoded_data[i] + (short) rand());
+    }
+#endif
+    /* write byte file */
+    if(len != 0){ //len may be 0 in 10ms split case
+      fwrite(encoded_data,1,len,efileid);
+    }
+
+    if(len != 0){ //len may be 0 in 10ms split case
+      /* get channel data if provided */
+      if (argc==6) {
+        if (fread(&pli, sizeof(WebRtc_Word16), 1, chfileid)) {
+          if ((pli!=0)&&(pli!=1)) {
+            fprintf(stderr, "Error in channel file\n");
+            exit(0);
+          }
+          if (pli==0) {
+            /* Packet loss -> remove info from frame */
+            memset(encoded_data, 0, sizeof(WebRtc_Word16)*25);
+          }
+        } else {
+          fprintf(stderr, "Error. Channel file too short\n");
+          exit(0);
+        }
+      } else {
+        pli=1;
+      }
+
+      /* decoding */
+      fprintf(stderr, "--- Decoding block %i --- ",blockcount);
+      if (pli==1) {
+        len=WebRtcIlbcfix_Decode(Dec_Inst, encoded_data, len, data, &speechType);
+      } else {
+        len=WebRtcIlbcfix_DecodePlc(Dec_Inst, data, 1);
+      }
+      fprintf(stderr, "\r");
+
+      /* write output file */
+      fwrite(data,sizeof(short),len,ofileid);
+    }
+  }
+
+#ifdef JUNK_DATA
+  if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+    fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+  }
+  else {
+    fprintf(seedfile, "ok\n\n");
+    fclose(seedfile);
+  }
+#endif
+
+  /* free structs */
+  WebRtcIlbcfix_EncoderFree(Enc_Inst);
+  WebRtcIlbcfix_DecoderFree(Dec_Inst);
+
+  /* close files */
+  fclose(ifileid);
+  fclose(efileid);
+  fclose(ofileid);
+
+  return 0;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c b/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
new file mode 100644
index 0000000..f67945e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/test/iLBC_testprogram.c
@@ -0,0 +1,343 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+	iLBC Speech Coder ANSI-C Source Code
+
+        iLBC_test.c
+
+******************************************************************/
+
+#include <math.h>
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+
+#include "defines.h"
+#include "nit_encode.h"
+#include "encode.h"
+#include "init_decode.h"
+#include "decode.h"
+#include "constants.h"
+#include "ilbc.h"
+
+#define ILBCNOOFWORDS_MAX (NO_OF_BYTES_30MS)/2
+
+/* Runtime statistics */
+#include <time.h>
+/* #define CLOCKS_PER_SEC  1000 */
+
+/*----------------------------------------------------------------*
+ *  Encoder interface function
+ *---------------------------------------------------------------*/
+
+short encode(                         /* (o) Number of bytes encoded */
+    iLBC_Enc_Inst_t *iLBCenc_inst,    /* (i/o) Encoder instance */
+    WebRtc_Word16 *encoded_data,      /* (o) The encoded bytes */
+    WebRtc_Word16 *data               /* (i) The signal block to encode */
+                                                        ){
+
+  /* do the actual encoding */
+  WebRtcIlbcfix_Encode((WebRtc_UWord16 *)encoded_data, data, iLBCenc_inst);
+
+  return (iLBCenc_inst->no_of_bytes);
+}
+
+/*----------------------------------------------------------------*
+ *  Decoder interface function
+ *---------------------------------------------------------------*/
+
+short decode( /* (o) Number of decoded samples */
+    iLBC_Dec_Inst_t *iLBCdec_inst, /* (i/o) Decoder instance */
+    short *decoded_data, /* (o) Decoded signal block */
+    short *encoded_data, /* (i) Encoded bytes */
+    short mode           /* (i) 0=PL, 1=Normal */
+              ){
+
+  /* check if mode is valid */
+
+  if (mode<0 || mode>1) {
+    printf("\nERROR - Wrong mode - 0, 1 allowed\n"); exit(3);}
+
+  /* do actual decoding of block */
+
+  WebRtcIlbcfix_Decode(decoded_data, (WebRtc_UWord16 *)encoded_data,
+                       iLBCdec_inst, mode);
+
+  return (iLBCdec_inst->blockl);
+}
+
+/*----------------------------------------------------------------*
+ *  Main program to test iLBC encoding and decoding
+ *
+ *  Usage:
+ *		exefile_name.exe <infile> <bytefile> <outfile> <channelfile>
+ *
+ *---------------------------------------------------------------*/
+
+#define MAXFRAMES   10000
+#define MAXFILELEN (BLOCKL_MAX*MAXFRAMES)
+
+int main(int argc, char* argv[])
+{
+
+  /* Runtime statistics */
+
+  float starttime1, starttime2;
+  float runtime1, runtime2;
+  float outtime;
+
+  FILE *ifileid,*efileid,*ofileid, *chfileid;
+  short *inputdata, *encodeddata, *decodeddata;
+  short *channeldata;
+  int blockcount = 0, noOfBlocks=0, i, noOfLostBlocks=0;
+  short mode;
+  iLBC_Enc_Inst_t Enc_Inst;
+  iLBC_Dec_Inst_t Dec_Inst;
+
+  short frameLen;
+  short count;
+#ifdef SPLIT_10MS
+  short size;
+#endif
+
+  inputdata=(short*) malloc(MAXFILELEN*sizeof(short));
+  if (inputdata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    exit(0);
+  }
+  encodeddata=(short*) malloc(ILBCNOOFWORDS_MAX*MAXFRAMES*sizeof(short));
+  if (encodeddata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    exit(0);
+  }
+  decodeddata=(short*) malloc(MAXFILELEN*sizeof(short));
+  if (decodeddata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    free(encodeddata);
+    exit(0);
+  }
+  channeldata=(short*) malloc(MAXFRAMES*sizeof(short));
+  if (channeldata==NULL) {
+    fprintf(stderr,"Could not allocate memory for vector\n");
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    exit(0);
+  }
+
+  /* get arguments and open files */
+
+  if (argc != 6 ) {
+    fprintf(stderr, "%s mode inputfile bytefile outputfile channelfile\n",
+            argv[0]);
+    fprintf(stderr, "Example:\n");
+    fprintf(stderr, "%s <30,20> in.pcm byte.dat out.pcm T30.0.dat\n", argv[0]);
+    exit(1);
+  }
+  mode=atoi(argv[1]);
+  if (mode != 20 && mode != 30) {
+    fprintf(stderr,"Wrong mode %s, must be 20, or 30\n", argv[1]);
+    exit(2);
+  }
+  if ( (ifileid=fopen(argv[2],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open input file %s\n", argv[2]);
+    exit(2);}
+  if ( (efileid=fopen(argv[3],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open channelfile file %s\n",
+            argv[3]); exit(3);}
+  if( (ofileid=fopen(argv[4],"wb")) == NULL) {
+    fprintf(stderr, "Cannot open output file %s\n",
+            argv[4]); exit(3);}
+  if ( (chfileid=fopen(argv[5],"rb")) == NULL) {
+    fprintf(stderr,"Cannot open channel file file %s\n", argv[5]);
+    exit(2);}
+
+
+  /* print info */
+#ifndef PRINT_MIPS
+  fprintf(stderr, "\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*      iLBCtest                                     *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*                                                   *\n");
+  fprintf(stderr,
+          "*---------------------------------------------------*\n");
+#ifdef SPLIT_10MS
+  fprintf(stderr,"\n10ms split with raw mode: %2d ms\n", mode);
+#else
+  fprintf(stderr,"\nMode          : %2d ms\n", mode);
+#endif
+  fprintf(stderr,"\nInput file    : %s\n", argv[2]);
+  fprintf(stderr,"Coded file    : %s\n", argv[3]);
+  fprintf(stderr,"Output file   : %s\n\n", argv[4]);
+  fprintf(stderr,"Channel file  : %s\n\n", argv[5]);
+#endif
+
+  /* Initialization */
+
+  WebRtcIlbcfix_EncoderInit(&Enc_Inst, mode);
+  WebRtcIlbcfix_DecoderInit(&Dec_Inst, mode, 1);
+
+  /* extract the input file and channel file */
+
+#ifdef SPLIT_10MS
+  frameLen = (mode==20)? 80:160;
+  fread(Enc_Inst.past_samples, sizeof(short), frameLen, ifileid);
+  Enc_Inst.section = 0;
+
+  while( fread(&inputdata[noOfBlocks*80], sizeof(short),
+               80, ifileid) == 80 ) {
+    noOfBlocks++;
+  }
+
+  noOfBlocks += frameLen/80;
+  frameLen = 80;
+#else
+  frameLen = Enc_Inst.blockl;
+
+  while( fread(&inputdata[noOfBlocks*Enc_Inst.blockl],sizeof(short),
+               Enc_Inst.blockl,ifileid)==(WebRtc_UWord16)Enc_Inst.blockl){
+    noOfBlocks++;
+  }
+#endif
+
+
+  while ((fread(&channeldata[blockcount],sizeof(short), 1,chfileid)==1)
+            && ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) )) {
+    blockcount++;
+  }
+
+  if ( blockcount < noOfBlocks/(Enc_Inst.blockl/frameLen) ) {
+    fprintf(stderr,"Channel file %s is too short\n", argv[4]);
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    free(channeldata);
+    exit(0);
+  }
+
+  count=0;
+
+  /* Runtime statistics */
+
+  starttime1 = clock()/(float)CLOCKS_PER_SEC;
+
+  /* Encoding loop */
+#ifdef PRINT_MIPS
+  printf("-1 -1\n");
+#endif
+
+#ifdef SPLIT_10MS
+  /* "Enc_Inst.section != 0" is to make sure we run through full
+     lengths of all vectors for 10ms split mode.
+  */
+  //   while( (count < noOfBlocks) || (Enc_Inst.section != 0) )    {
+  while( count < blockcount * (Enc_Inst.blockl/frameLen) )    {
+
+    encode(&Enc_Inst, &encodeddata[Enc_Inst.no_of_words *
+                                   (count/(Enc_Inst.nsub/2))],
+           &inputdata[frameLen * count] );
+#else
+    while (count < noOfBlocks) {
+      encode( &Enc_Inst, &encodeddata[Enc_Inst.no_of_words * count],
+              &inputdata[frameLen * count] );
+#endif
+
+#ifdef PRINT_MIPS
+      printf("-1 -1\n");
+#endif
+
+      count++;
+    }
+
+    count=0;
+
+    /* Runtime statistics */
+
+    starttime2=clock()/(float)CLOCKS_PER_SEC;
+    runtime1 = (float)(starttime2-starttime1);
+
+    /* Decoding loop */
+
+    while (count < blockcount) {
+      if (channeldata[count]==1) {
+        /* Normal decoding */
+        decode(&Dec_Inst, &decodeddata[count * Dec_Inst.blockl],
+               &encodeddata[Dec_Inst.no_of_words * count], 1);
+      } else if (channeldata[count]==0) {
+        /* PLC */
+        short emptydata[ILBCNOOFWORDS_MAX];
+        memset(emptydata, 0, Dec_Inst.no_of_words*sizeof(short));
+        decode(&Dec_Inst, &decodeddata[count*Dec_Inst.blockl],
+               emptydata, 0);
+        noOfLostBlocks++;
+      } else {
+        printf("Error in channel file (values have to be either 1 or 0)\n");
+        exit(0);
+      }
+#ifdef PRINT_MIPS
+      printf("-1 -1\n");
+#endif
+
+      count++;
+    }
+
+    /* Runtime statistics */
+
+    runtime2 = (float)(clock()/(float)CLOCKS_PER_SEC-starttime2);
+
+    outtime = (float)((float)blockcount*
+                      (float)mode/1000.0);
+
+#ifndef PRINT_MIPS
+    printf("\nLength of speech file: %.1f s\n", outtime);
+    printf("Lost frames          : %.1f%%\n\n", 100*(float)noOfLostBlocks/(float)blockcount);
+
+    printf("Time to run iLBC_encode+iLBC_decode:");
+    printf(" %.1f s (%.1f%% of realtime)\n", runtime1+runtime2,
+           (100*(runtime1+runtime2)/outtime));
+
+    printf("Time in iLBC_encode                :");
+    printf(" %.1f s (%.1f%% of total runtime)\n",
+           runtime1, 100.0*runtime1/(runtime1+runtime2));
+
+    printf("Time in iLBC_decode                :");
+    printf(" %.1f s (%.1f%% of total runtime)\n\n",
+           runtime2, 100.0*runtime2/(runtime1+runtime2));
+#endif
+
+    /* Write data to files */
+    for (i=0; i<blockcount; i++) {
+      fwrite(&encodeddata[i*Enc_Inst.no_of_words], sizeof(short),
+             Enc_Inst.no_of_words, efileid);
+    }
+    for (i=0;i<blockcount;i++) {
+      fwrite(&decodeddata[i*Enc_Inst.blockl],sizeof(short),Enc_Inst.blockl,ofileid);
+    }
+
+    /* return memory and close files */
+
+    free(inputdata);
+    free(encodeddata);
+    free(decodeddata);
+    free(channeldata);
+    fclose(ifileid);  fclose(efileid); fclose(ofileid);
+    return(0);
+  }
diff --git a/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt b/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt
new file mode 100644
index 0000000..db0e9a0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/test/iLBCtestscript.txt
@@ -0,0 +1,73 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+#
+# This script can be used to verify the bit exactness of iLBC fixed-point version 1.0.6
+#
+
+INP=../../../../../../../resources/audio_coding
+EXEP=../../../../../../../out/Release
+OUTP=./GeneratedFiles
+mkdir ./GeneratedFiles
+
+$EXEP/iLBCtest 20 $INP/F00.INP $OUTP/F00.BIT20 $OUTP/F00.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F01.INP $OUTP/F01.BIT20 $OUTP/F01.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F02.INP $OUTP/F02.BIT20 $OUTP/F02.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F03.INP $OUTP/F03.BIT20 $OUTP/F03.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F04.INP $OUTP/F04.BIT20 $OUTP/F04.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F05.INP $OUTP/F05.BIT20 $OUTP/F05.OUT20 $INP/clean.chn
+$EXEP/iLBCtest 20 $INP/F06.INP $OUTP/F06.BIT20 $OUTP/F06.OUT20 $INP/clean.chn
+
+$EXEP/iLBCtest 30 $INP/F00.INP $OUTP/F00.BIT30 $OUTP/F00.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F01.INP $OUTP/F01.BIT30 $OUTP/F01.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F02.INP $OUTP/F02.BIT30 $OUTP/F02.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F03.INP $OUTP/F03.BIT30 $OUTP/F03.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F04.INP $OUTP/F04.BIT30 $OUTP/F04.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F05.INP $OUTP/F05.BIT30 $OUTP/F05.OUT30 $INP/clean.chn
+$EXEP/iLBCtest 30 $INP/F06.INP $OUTP/F06.BIT30 $OUTP/F06.OUT30 $INP/clean.chn
+
+$EXEP/iLBCtest 20 $INP/F00.INP $OUTP/F00.BIT20 $OUTP/F00_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 20 $INP/F01.INP $OUTP/F01.BIT20 $OUTP/F01_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 20 $INP/F02.INP $OUTP/F02.BIT20 $OUTP/F02_tlm10.OUT20 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F00.INP $OUTP/F00.BIT30 $OUTP/F00_tlm10.OUT30 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F01.INP $OUTP/F01.BIT30 $OUTP/F01_tlm10.OUT30 $INP/tlm10.chn
+$EXEP/iLBCtest 30 $INP/F02.INP $OUTP/F02.BIT30 $OUTP/F02_tlm10.OUT30 $INP/tlm10.chn
+
+
+diff $OUTP/F00.BIT20 $INP/F00.BIT20
+diff $OUTP/F01.BIT20 $INP/F01.BIT20
+diff $OUTP/F02.BIT20 $INP/F02.BIT20
+diff $OUTP/F03.BIT20 $INP/F03.BIT20
+diff $OUTP/F04.BIT20 $INP/F04.BIT20
+diff $OUTP/F05.BIT20 $INP/F05.BIT20
+diff $OUTP/F06.BIT20 $INP/F06.BIT20
+diff $OUTP/F00.OUT20 $INP/F00.OUT20
+diff $OUTP/F01.OUT20 $INP/F01.OUT20
+diff $OUTP/F02.OUT20 $INP/F02.OUT20
+diff $OUTP/F03.OUT20 $INP/F03.OUT20
+diff $OUTP/F04.OUT20 $INP/F04.OUT20
+diff $OUTP/F05.OUT20 $INP/F05.OUT20
+diff $OUTP/F06.OUT20 $INP/F06.OUT20
+
+diff $OUTP/F00.BIT30 $INP/F00.BIT30
+diff $OUTP/F01.BIT30 $INP/F01.BIT30
+diff $OUTP/F02.BIT30 $INP/F02.BIT30
+diff $OUTP/F03.BIT30 $INP/F03.BIT30
+diff $OUTP/F04.BIT30 $INP/F04.BIT30
+diff $OUTP/F05.BIT30 $INP/F05.BIT30
+diff $OUTP/F06.BIT30 $INP/F06.BIT30
+diff $OUTP/F00.OUT30 $INP/F00.OUT30
+diff $OUTP/F01.OUT30 $INP/F01.OUT30
+diff $OUTP/F02.OUT30 $INP/F02.OUT30
+diff $OUTP/F03.OUT30 $INP/F03.OUT30
+diff $OUTP/F04.OUT30 $INP/F04.OUT30
+diff $OUTP/F05.OUT30 $INP/F05.OUT30
+diff $OUTP/F06.OUT30 $INP/F06.OUT30
+
+diff $OUTP/F00_tlm10.OUT20 $INP/F00_tlm10.OUT20
+diff $OUTP/F01_tlm10.OUT20 $INP/F01_tlm10.OUT20
+diff $OUTP/F02_tlm10.OUT20 $INP/F02_tlm10.OUT20
+diff $OUTP/F00_tlm10.OUT30 $INP/F00_tlm10.OUT30
+diff $OUTP/F01_tlm10.OUT30 $INP/F01_tlm10.OUT30
+diff $OUTP/F02_tlm10.OUT30 $INP/F02_tlm10.OUT30
+
diff --git a/src/modules/audio_coding/codecs/ilbc/unpack_bits.c b/src/modules/audio_coding/codecs/ilbc/unpack_bits.c
new file mode 100644
index 0000000..6ed9265
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/unpack_bits.c
@@ -0,0 +1,239 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_UnpackBits.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+    const WebRtc_UWord16 *bitstream,    /* (i) The packatized bitstream */
+    iLBC_bits *enc_bits,  /* (o) Paramerers from bitstream */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                                        ) {
+  const WebRtc_UWord16 *bitstreamPtr;
+  int i, k;
+  WebRtc_Word16 *tmpPtr;
+
+  bitstreamPtr=bitstream;
+
+  /* First WebRtc_Word16 */
+  enc_bits->lsf[0]  =  (*bitstreamPtr)>>10;       /* Bit 0..5  */
+  enc_bits->lsf[1]  = ((*bitstreamPtr)>>3)&0x7F;      /* Bit 6..12 */
+  enc_bits->lsf[2]  = ((*bitstreamPtr)&0x7)<<4;      /* Bit 13..15 */
+  bitstreamPtr++;
+  /* Second WebRtc_Word16 */
+  enc_bits->lsf[2] |= ((*bitstreamPtr)>>12)&0xF;      /* Bit 0..3  */
+
+  if (mode==20) {
+    enc_bits->startIdx             = ((*bitstreamPtr)>>10)&0x3;  /* Bit 4..5  */
+    enc_bits->state_first          = ((*bitstreamPtr)>>9)&0x1;  /* Bit 6  */
+    enc_bits->idxForMax            = ((*bitstreamPtr)>>3)&0x3F;  /* Bit 7..12 */
+    enc_bits->cb_index[0]          = ((*bitstreamPtr)&0x7)<<4;  /* Bit 13..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>12)&0xE;  /* Bit 0..2  */
+    enc_bits->gain_index[0]        = ((*bitstreamPtr)>>8)&0x18;  /* Bit 3..4  */
+    enc_bits->gain_index[1]        = ((*bitstreamPtr)>>7)&0x8;  /* Bit 5  */
+    enc_bits->cb_index[3]          = ((*bitstreamPtr)>>2)&0xFE;  /* Bit 6..12 */
+    enc_bits->gain_index[3]        = ((*bitstreamPtr)<<2)&0x10;  /* Bit 13  */
+    enc_bits->gain_index[4]        = ((*bitstreamPtr)<<2)&0x8;  /* Bit 14  */
+    enc_bits->gain_index[6]        = ((*bitstreamPtr)<<4)&0x10;  /* Bit 15  */
+  } else { /* mode==30 */
+    enc_bits->lsf[3]               = ((*bitstreamPtr)>>6)&0x3F;  /* Bit 4..9  */
+    enc_bits->lsf[4]               = ((*bitstreamPtr)<<1)&0x7E;  /* Bit 10..15 */
+    bitstreamPtr++;
+    /* Third WebRtc_Word16 */
+    enc_bits->lsf[4]              |= ((*bitstreamPtr)>>15)&0x1;  /* Bit 0  */
+    enc_bits->lsf[5]               = ((*bitstreamPtr)>>8)&0x7F;  /* Bit 1..7  */
+    enc_bits->startIdx             = ((*bitstreamPtr)>>5)&0x7;  /* Bit 8..10 */
+    enc_bits->state_first          = ((*bitstreamPtr)>>4)&0x1;  /* Bit 11  */
+    enc_bits->idxForMax            = ((*bitstreamPtr)<<2)&0x3C;  /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 4:th WebRtc_Word16 */
+    enc_bits->idxForMax           |= ((*bitstreamPtr)>>14)&0x3;  /* Bit 0..1  */
+    enc_bits->cb_index[0]        = ((*bitstreamPtr)>>7)&0x78;  /* Bit 2..5  */
+    enc_bits->gain_index[0]        = ((*bitstreamPtr)>>5)&0x10;  /* Bit 6  */
+    enc_bits->gain_index[1]        = ((*bitstreamPtr)>>5)&0x8;  /* Bit 7  */
+    enc_bits->cb_index[3]          = ((*bitstreamPtr))&0xFC;  /* Bit 8..13 */
+    enc_bits->gain_index[3]        = ((*bitstreamPtr)<<3)&0x10;  /* Bit 14  */
+    enc_bits->gain_index[4]        = ((*bitstreamPtr)<<3)&0x8;  /* Bit 15  */
+  }
+  /* Class 2 bits of ULP */
+  /* 4:th to 6:th WebRtc_Word16 for 20 ms case
+     5:th to 7:th WebRtc_Word16 for 30 ms case */
+  bitstreamPtr++;
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<3; k++) {
+    for (i=15; i>=0; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 7:th WebRtc_Word16 */
+    for (i=15; i>6; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>4)&0x4; /* Bit 9  */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>1)&0x4; /* Bit 12  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)<<1)&0x8; /* Bit 13  */
+    enc_bits->gain_index[7]        = ((*bitstreamPtr)<<2)&0xC; /* Bit 14..15 */
+
+  } else { /* mode==30 */
+    /* 8:th WebRtc_Word16 */
+    for (i=15; i>5; i--) {
+      (*tmpPtr)                  = (((*bitstreamPtr)>>i)<<2)&0x4;
+      /* Bit 15-i  */
+      tmpPtr++;
+    }
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>3)&0x6; /* Bit 10..11 */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr))&0x8;  /* Bit 12  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr))&0x4;  /* Bit 13  */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr))&0x2;  /* Bit 14  */
+    enc_bits->cb_index[6]          = ((*bitstreamPtr)<<7)&0x80; /* Bit 15  */
+    bitstreamPtr++;
+    /* 9:th WebRtc_Word16 */
+    enc_bits->cb_index[6]         |= ((*bitstreamPtr)>>9)&0x7E; /* Bit 0..5  */
+    enc_bits->cb_index[9]          = ((*bitstreamPtr)>>2)&0xFE; /* Bit 6..12 */
+    enc_bits->cb_index[12]         = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 10:th WebRtc_Word16 */
+    enc_bits->cb_index[12]         |= ((*bitstreamPtr)>>11)&0x1E;/* Bit 0..3 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>8)&0xC; /* Bit 4..5  */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>7)&0x6; /* Bit 6..7  */
+    enc_bits->gain_index[6]        = ((*bitstreamPtr)>>3)&0x18; /* Bit 8..9  */
+    enc_bits->gain_index[7]        = ((*bitstreamPtr)>>2)&0xC; /* Bit 10..11 */
+    enc_bits->gain_index[9]        = ((*bitstreamPtr)<<1)&0x10; /* Bit 12  */
+    enc_bits->gain_index[10]       = ((*bitstreamPtr)<<1)&0x8; /* Bit 13  */
+    enc_bits->gain_index[12]       = ((*bitstreamPtr)<<3)&0x10; /* Bit 14  */
+    enc_bits->gain_index[13]       = ((*bitstreamPtr)<<3)&0x8; /* Bit 15  */
+  }
+  bitstreamPtr++;
+  /* Class 3 bits of ULP */
+  /*  8:th to 14:th WebRtc_Word16 for 20 ms case
+      11:th to 17:th WebRtc_Word16 for 30 ms case */
+  tmpPtr=enc_bits->idxVec;
+  for (k=0; k<7; k++) {
+    for (i=14; i>=0; i-=2) {
+      (*tmpPtr)                 |= ((*bitstreamPtr)>>i)&0x3; /* Bit 15-i..14-i*/
+      tmpPtr++;
+    }
+    bitstreamPtr++;
+  }
+
+  if (mode==20) {
+    /* 15:th WebRtc_Word16 */
+    enc_bits->idxVec[56]          |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>13)&0x1; /* Bit 2  */
+    enc_bits->cb_index[1]          = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9  */
+    enc_bits->cb_index[2]          = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 16:th WebRtc_Word16 */
+    enc_bits->cb_index[2]         |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0  */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr)>>12)&0x7; /* Bit 1..3  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>10)&0x3; /* Bit 4..5  */
+    enc_bits->gain_index[2]        = ((*bitstreamPtr)>>7)&0x7; /* Bit 6..8  */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr)>>6)&0x1; /* Bit 9  */
+    enc_bits->cb_index[4]          = ((*bitstreamPtr)<<1)&0x7E; /* Bit 10..15 */
+    bitstreamPtr++;
+    /* 17:th WebRtc_Word16 */
+    enc_bits->cb_index[4]         |= ((*bitstreamPtr)>>15)&0x1; /* Bit 0  */
+    enc_bits->cb_index[5]          = ((*bitstreamPtr)>>8)&0x7F; /* Bit 1..7  */
+    enc_bits->cb_index[6]          = ((*bitstreamPtr))&0xFF; /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 18:th WebRtc_Word16 */
+    enc_bits->cb_index[7]          = (*bitstreamPtr)>>8;  /* Bit 0..7  */
+    enc_bits->cb_index[8]          = (*bitstreamPtr)&0xFF;  /* Bit 8..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3  */
+    enc_bits->gain_index[5]        = ((*bitstreamPtr)>>9)&0x7; /* Bit 4..6  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)>>6)&0x7; /* Bit 7..9  */
+    enc_bits->gain_index[7]       |= ((*bitstreamPtr)>>4)&0x3; /* Bit 10..11 */
+    enc_bits->gain_index[8]        = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */
+  } else { /* mode==30 */
+    /* 18:th WebRtc_Word16 */
+    enc_bits->idxVec[56]          |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->idxVec[57]          |= ((*bitstreamPtr)>>12)&0x3; /* Bit 2..3  */
+    enc_bits->cb_index[0]         |= ((*bitstreamPtr)>>11)&1; /* Bit 4  */
+    enc_bits->cb_index[1]          = ((*bitstreamPtr)>>4)&0x7F; /* Bit 5..11 */
+    enc_bits->cb_index[2]          = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 19:th WebRtc_Word16 */
+    enc_bits->cb_index[2]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->gain_index[0]       |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5  */
+    enc_bits->gain_index[1]       |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7  */
+    enc_bits->gain_index[2]        = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */
+    enc_bits->cb_index[3]         |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11  */
+    enc_bits->cb_index[4]          = ((*bitstreamPtr)<<3)&0x78; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 20:th WebRtc_Word16 */
+    enc_bits->cb_index[4]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->cb_index[5]          = ((*bitstreamPtr)>>6)&0x7F; /* Bit 3..9  */
+    enc_bits->cb_index[6]         |= ((*bitstreamPtr)>>5)&0x1; /* Bit 10  */
+    enc_bits->cb_index[7]          = ((*bitstreamPtr)<<3)&0xF8; /* Bit 11..15 */
+    bitstreamPtr++;
+    /* 21:st WebRtc_Word16 */
+    enc_bits->cb_index[7]         |= ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->cb_index[8]          = ((*bitstreamPtr)>>5)&0xFF; /* Bit 3..10 */
+    enc_bits->cb_index[9]         |= ((*bitstreamPtr)>>4)&0x1; /* Bit 11  */
+    enc_bits->cb_index[10]         = ((*bitstreamPtr)<<4)&0xF0; /* Bit 12..15 */
+    bitstreamPtr++;
+    /* 22:nd WebRtc_Word16 */
+    enc_bits->cb_index[10]        |= ((*bitstreamPtr)>>12)&0xF; /* Bit 0..3  */
+    enc_bits->cb_index[11]         = ((*bitstreamPtr)>>4)&0xFF; /* Bit 4..11 */
+    enc_bits->cb_index[12]        |= ((*bitstreamPtr)>>3)&0x1; /* Bit 12  */
+    enc_bits->cb_index[13]         = ((*bitstreamPtr)<<5)&0xE0; /* Bit 13..15 */
+    bitstreamPtr++;
+    /* 23:rd WebRtc_Word16 */
+    enc_bits->cb_index[13]        |= ((*bitstreamPtr)>>11)&0x1F;/* Bit 0..4  */
+    enc_bits->cb_index[14]         = ((*bitstreamPtr)>>3)&0xFF; /* Bit 5..12 */
+    enc_bits->gain_index[3]       |= ((*bitstreamPtr)>>1)&0x3; /* Bit 13..14 */
+    enc_bits->gain_index[4]       |= ((*bitstreamPtr)&0x1);  /* Bit 15  */
+    bitstreamPtr++;
+    /* 24:rd WebRtc_Word16 */
+    enc_bits->gain_index[5]        = ((*bitstreamPtr)>>13)&0x7; /* Bit 0..2  */
+    enc_bits->gain_index[6]       |= ((*bitstreamPtr)>>10)&0x7; /* Bit 3..5  */
+    enc_bits->gain_index[7]       |= ((*bitstreamPtr)>>8)&0x3; /* Bit 6..7  */
+    enc_bits->gain_index[8]        = ((*bitstreamPtr)>>5)&0x7; /* Bit 8..10 */
+    enc_bits->gain_index[9]       |= ((*bitstreamPtr)>>1)&0xF; /* Bit 11..14 */
+    enc_bits->gain_index[10]      |= ((*bitstreamPtr)<<2)&0x4; /* Bit 15  */
+    bitstreamPtr++;
+    /* 25:rd WebRtc_Word16 */
+    enc_bits->gain_index[10]      |= ((*bitstreamPtr)>>14)&0x3; /* Bit 0..1  */
+    enc_bits->gain_index[11]       = ((*bitstreamPtr)>>11)&0x7; /* Bit 2..4  */
+    enc_bits->gain_index[12]      |= ((*bitstreamPtr)>>7)&0xF; /* Bit 5..8  */
+    enc_bits->gain_index[13]      |= ((*bitstreamPtr)>>4)&0x7; /* Bit 9..11 */
+    enc_bits->gain_index[14]       = ((*bitstreamPtr)>>1)&0x7; /* Bit 12..14 */
+  }
+  /* Last bit should be zero, otherwise it's an "empty" frame */
+  if (((*bitstreamPtr)&0x1) == 1) {
+    return(1);
+  } else {
+    return(0);
+  }
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/unpack_bits.h b/src/modules/audio_coding/codecs/ilbc/unpack_bits.h
new file mode 100644
index 0000000..31c728e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/unpack_bits.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_UnpackBits.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_UNPACK_BITS_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  unpacking of bits from bitstream, i.e., vector of bytes
+ *---------------------------------------------------------------*/
+
+WebRtc_Word16 WebRtcIlbcfix_UnpackBits( /* (o) "Empty" frame indicator */
+    const WebRtc_UWord16 *bitstream,    /* (i) The packatized bitstream */
+    iLBC_bits *enc_bits,  /* (o) Paramerers from bitstream */
+    WebRtc_Word16 mode     /* (i) Codec mode (20 or 30) */
+                                        );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/vq3.c b/src/modules/audio_coding/codecs/ilbc/vq3.c
new file mode 100644
index 0000000..81d1bfa
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/vq3.c
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq3.c
+
+******************************************************************/
+
+#include "vq3.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq3(
+    WebRtc_Word16 *Xq, /* quantized vector (Q13) */
+    WebRtc_Word16 *index,
+    WebRtc_Word16 *CB, /* codebook in Q13 */
+    WebRtc_Word16 *X,  /* vector to quantize (Q13) */
+    WebRtc_Word16 n_cb
+                       ){
+  WebRtc_Word16 i, j;
+  WebRtc_Word16 pos, minindex=0;
+  WebRtc_Word16 tmp;
+  WebRtc_Word32 dist, mindist;
+
+  pos = 0;
+  mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
+
+  /* Find the codebook with the lowest square distance */
+  for (j = 0; j < n_cb; j++) {
+    tmp = X[0] - CB[pos];
+    dist = WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    for (i = 1; i < 3; i++) {
+      tmp = X[i] - CB[pos + i];
+      dist += WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    }
+
+    if (dist < mindist) {
+      mindist = dist;
+      minindex = j;
+    }
+    pos += 3;
+  }
+
+  /* Store the quantized codebook and the index */
+  for (i = 0; i < 3; i++) {
+    Xq[i] = CB[minindex*3 + i];
+  }
+  *index = minindex;
+
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/vq3.h b/src/modules/audio_coding/codecs/ilbc/vq3.h
new file mode 100644
index 0000000..f2628e0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/vq3.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq3.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ3_H_
+
+#include "typedefs.h"
+
+/*----------------------------------------------------------------*
+ *  Vector quantization of order 3 (based on MSE)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq3(
+    WebRtc_Word16 *Xq,  /* (o) the quantized vector (Q13) */
+    WebRtc_Word16 *index, /* (o) the quantization index */
+    WebRtc_Word16 *CB,  /* (i) the vector quantization codebook (Q13) */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize (Q13) */
+    WebRtc_Word16 n_cb  /* (i) the number of vectors in the codebook */
+                       );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/vq4.c b/src/modules/audio_coding/codecs/ilbc/vq4.c
new file mode 100644
index 0000000..3d4c26d
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/vq4.c
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq4.c
+
+******************************************************************/
+
+#include "vq4.h"
+#include "constants.h"
+
+/*----------------------------------------------------------------*
+ *  vector quantization
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq4(
+    WebRtc_Word16 *Xq, /* quantized vector (Q13) */
+    WebRtc_Word16 *index,
+    WebRtc_Word16 *CB, /* codebook in Q13 */
+    WebRtc_Word16 *X,  /* vector to quantize (Q13) */
+    WebRtc_Word16 n_cb
+                       ){
+  WebRtc_Word16 i, j;
+  WebRtc_Word16 pos, minindex=0;
+  WebRtc_Word16 tmp;
+  WebRtc_Word32 dist, mindist;
+
+  pos = 0;
+  mindist = WEBRTC_SPL_WORD32_MAX; /* start value */
+
+  /* Find the codebook with the lowest square distance */
+  for (j = 0; j < n_cb; j++) {
+    tmp = X[0] - CB[pos];
+    dist = WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    for (i = 1; i < 4; i++) {
+      tmp = X[i] - CB[pos + i];
+      dist += WEBRTC_SPL_MUL_16_16(tmp, tmp);
+    }
+
+    if (dist < mindist) {
+      mindist = dist;
+      minindex = j;
+    }
+    pos += 4;
+  }
+
+  /* Store the quantized codebook and the index */
+  for (i = 0; i < 4; i++) {
+    Xq[i] = CB[minindex*4 + i];
+  }
+  *index = minindex;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/vq4.h b/src/modules/audio_coding/codecs/ilbc/vq4.h
new file mode 100644
index 0000000..1b8cff2
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/vq4.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Vq4.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_VQ4_H_
+
+#include "typedefs.h"
+
+/*----------------------------------------------------------------*
+ *  Vector quantization of order 4 (based on MSE)
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Vq4(
+    WebRtc_Word16 *Xq,  /* (o) the quantized vector (Q13) */
+    WebRtc_Word16 *index, /* (o) the quantization index */
+    WebRtc_Word16 *CB,  /* (i) the vector quantization codebook (Q13) */
+    WebRtc_Word16 *X,  /* (i) the vector to quantize (Q13) */
+    WebRtc_Word16 n_cb  /* (i) the number of vectors in the codebook */
+                       );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/window32_w32.c b/src/modules/audio_coding/codecs/ilbc/window32_w32.c
new file mode 100644
index 0000000..b0e8406
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/window32_w32.c
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Window32W32.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  window multiplication
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Window32W32(
+    WebRtc_Word32 *z,    /* Output */
+    WebRtc_Word32 *x,    /* Input (same domain as Output)*/
+    const WebRtc_Word32  *y,  /* Q31 Window */
+    WebRtc_Word16 N     /* length to process */
+                               ) {
+  WebRtc_Word16 i;
+  WebRtc_Word16 x_low, x_hi, y_low, y_hi;
+  WebRtc_Word16 left_shifts;
+  WebRtc_Word32 temp;
+
+  left_shifts = (WebRtc_Word16)WebRtcSpl_NormW32(x[0]);
+  WebRtcSpl_VectorBitShiftW32(x, N, x, (WebRtc_Word16)(-left_shifts));
+
+
+  /* The double precision numbers use a special representation:
+   * w32 = hi<<16 + lo<<1
+   */
+  for (i = 0; i < N; i++) {
+    /* Extract higher bytes */
+    x_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(x[i], 16);
+    y_hi = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(y[i], 16);
+
+    /* Extract lower bytes, defined as (w32 - hi<<16)>>1 */
+    temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)x_hi, 16);
+    x_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((x[i] - temp), 1);
+
+    temp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)y_hi, 16);
+    y_low = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((y[i] - temp), 1);
+
+    /* Calculate z by a 32 bit multiplication using both low and high from x and y */
+    temp = WEBRTC_SPL_LSHIFT_W32(WEBRTC_SPL_MUL_16_16(x_hi, y_hi), 1);
+    temp = (temp + (WEBRTC_SPL_MUL_16_16_RSFT(x_hi, y_low, 14)));
+
+    z[i] = (temp + (WEBRTC_SPL_MUL_16_16_RSFT(x_low, y_hi, 14)));
+  }
+
+  WebRtcSpl_VectorBitShiftW32(z, N, z, left_shifts);
+
+  return;
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/window32_w32.h b/src/modules/audio_coding/codecs/ilbc/window32_w32.h
new file mode 100644
index 0000000..121188a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/window32_w32.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_Window32W32.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_WINDOW32_W32_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ *  window multiplication
+ *---------------------------------------------------------------*/
+
+void WebRtcIlbcfix_Window32W32(
+    WebRtc_Word32 *z,    /* Output */
+    WebRtc_Word32 *x,    /* Input (same domain as Output)*/
+    const WebRtc_Word32  *y,  /* Q31 Window */
+    WebRtc_Word16 N     /* length to process */
+                               );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c b/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c
new file mode 100644
index 0000000..04170ad
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/xcorr_coef.c
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_XcorrCoef.c
+
+******************************************************************/
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * cross correlation which finds the optimal lag for the
+ * crossCorr*crossCorr/(energy) criteria
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_XcorrCoef(
+    WebRtc_Word16 *target,  /* (i) first array */
+    WebRtc_Word16 *regressor, /* (i) second array */
+    WebRtc_Word16 subl,  /* (i) dimension arrays */
+    WebRtc_Word16 searchLen, /* (i) the search lenght */
+    WebRtc_Word16 offset,  /* (i) samples offset between arrays */
+    WebRtc_Word16 step   /* (i) +1 or -1 */
+                            ){
+  int k;
+  WebRtc_Word16 maxlag;
+  WebRtc_Word16 pos;
+  WebRtc_Word16 max;
+  WebRtc_Word16 crossCorrScale, Energyscale;
+  WebRtc_Word16 crossCorrSqMod, crossCorrSqMod_Max;
+  WebRtc_Word32 crossCorr, Energy;
+  WebRtc_Word16 crossCorrmod, EnergyMod, EnergyMod_Max;
+  WebRtc_Word16 *tp, *rp;
+  WebRtc_Word16 *rp_beg, *rp_end;
+  WebRtc_Word16 totscale, totscale_max;
+  WebRtc_Word16 scalediff;
+  WebRtc_Word32 newCrit, maxCrit;
+  int shifts;
+
+  /* Initializations, to make sure that the first one is selected */
+  crossCorrSqMod_Max=0;
+  EnergyMod_Max=WEBRTC_SPL_WORD16_MAX;
+  totscale_max=-500;
+  maxlag=0;
+  pos=0;
+
+  /* Find scale value and start position */
+  if (step==1) {
+    max=WebRtcSpl_MaxAbsValueW16(regressor, (WebRtc_Word16)(subl+searchLen-1));
+    rp_beg = regressor;
+    rp_end = &regressor[subl];
+  } else { /* step==-1 */
+    max=WebRtcSpl_MaxAbsValueW16(&regressor[-searchLen], (WebRtc_Word16)(subl+searchLen-1));
+    rp_beg = &regressor[-1];
+    rp_end = &regressor[subl-1];
+  }
+
+  /* Introduce a scale factor on the Energy in WebRtc_Word32 in
+     order to make sure that the calculation does not
+     overflow */
+
+  if (max>5000) {
+    shifts=2;
+  } else {
+    shifts=0;
+  }
+
+  /* Calculate the first energy, then do a +/- to get the other energies */
+  Energy=WebRtcSpl_DotProductWithScale(regressor, regressor, subl, shifts);
+
+  for (k=0;k<searchLen;k++) {
+    tp = target;
+    rp = &regressor[pos];
+
+    crossCorr=WebRtcSpl_DotProductWithScale(tp, rp, subl, shifts);
+
+    if ((Energy>0)&&(crossCorr>0)) {
+
+      /* Put cross correlation and energy on 16 bit word */
+      crossCorrScale=(WebRtc_Word16)WebRtcSpl_NormW32(crossCorr)-16;
+      crossCorrmod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(crossCorr, crossCorrScale);
+      Energyscale=(WebRtc_Word16)WebRtcSpl_NormW32(Energy)-16;
+      EnergyMod=(WebRtc_Word16)WEBRTC_SPL_SHIFT_W32(Energy, Energyscale);
+
+      /* Square cross correlation and store upper WebRtc_Word16 */
+      crossCorrSqMod=(WebRtc_Word16)WEBRTC_SPL_MUL_16_16_RSFT(crossCorrmod, crossCorrmod, 16);
+
+      /* Calculate the total number of (dynamic) right shifts that have
+         been performed on (crossCorr*crossCorr)/energy
+      */
+      totscale=Energyscale-(crossCorrScale<<1);
+
+      /* Calculate the shift difference in order to be able to compare the two
+         (crossCorr*crossCorr)/energy in the same domain
+      */
+      scalediff=totscale-totscale_max;
+      scalediff=WEBRTC_SPL_MIN(scalediff,31);
+      scalediff=WEBRTC_SPL_MAX(scalediff,-31);
+
+      /* Compute the cross multiplication between the old best criteria
+         and the new one to be able to compare them without using a
+         division */
+
+      if (scalediff<0) {
+        newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max)>>(-scalediff);
+        maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod);
+      } else {
+        newCrit = ((WebRtc_Word32)crossCorrSqMod*EnergyMod_Max);
+        maxCrit = ((WebRtc_Word32)crossCorrSqMod_Max*EnergyMod)>>scalediff;
+      }
+
+      /* Store the new lag value if the new criteria is larger
+         than previous largest criteria */
+
+      if (newCrit > maxCrit) {
+        crossCorrSqMod_Max = crossCorrSqMod;
+        EnergyMod_Max = EnergyMod;
+        totscale_max = totscale;
+        maxlag = k;
+      }
+    }
+    pos+=step;
+
+    /* Do a +/- to get the next energy */
+    Energy += step*(WEBRTC_SPL_RSHIFT_W32(
+        ((WebRtc_Word32)(*rp_end)*(*rp_end)) - ((WebRtc_Word32)(*rp_beg)*(*rp_beg)),
+        shifts));
+    rp_beg+=step;
+    rp_end+=step;
+  }
+
+  return(maxlag+offset);
+}
diff --git a/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h b/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h
new file mode 100644
index 0000000..ac885c4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/ilbc/xcorr_coef.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/******************************************************************
+
+ iLBC Speech Coder ANSI-C Source Code
+
+ WebRtcIlbcfix_XcorrCoef.h
+
+******************************************************************/
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ILBC_MAIN_SOURCE_XCORR_COEF_H_
+
+#include "defines.h"
+
+/*----------------------------------------------------------------*
+ * cross correlation which finds the optimal lag for the
+ * crossCorr*crossCorr/(energy) criteria
+ *---------------------------------------------------------------*/
+
+int WebRtcIlbcfix_XcorrCoef(
+    WebRtc_Word16 *target,  /* (i) first array */
+    WebRtc_Word16 *regressor, /* (i) second array */
+    WebRtc_Word16 subl,  /* (i) dimension arrays */
+    WebRtc_Word16 searchLen, /* (i) the search lenght */
+    WebRtc_Word16 offset,  /* (i) samples offset between arrays */
+    WebRtc_Word16 step   /* (i) +1 or -1 */
+                            );
+
+#endif
diff --git a/src/modules/audio_coding/codecs/isac/fix/Android.mk b/src/modules/audio_coding/codecs/isac/fix/Android.mk
deleted file mode 100644
index 7a16792..0000000
--- a/src/modules/audio_coding/codecs/isac/fix/Android.mk
+++ /dev/null
@@ -1,3 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-
-include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/src/modules/audio_coding/codecs/isac/fix/source/Android.mk b/src/modules/audio_coding/codecs/isac/fix/source/Android.mk
index aed687b..0138888 100644
--- a/src/modules/audio_coding/codecs/isac/fix/source/Android.mk
+++ b/src/modules/audio_coding/codecs/isac/fix/source/Android.mk
@@ -89,9 +89,6 @@
 
 include $(CLEAR_VARS)
 
-# filters_neon.c does not compile with Clang, http://b/17457665.
-LOCAL_CLANG := false
-
 LOCAL_ARM_MODE := arm
 LOCAL_MODULE_CLASS := STATIC_LIBRARIES
 LOCAL_MODULE := libwebrtc_isacfix_neon
diff --git a/src/modules/audio_coding/codecs/isac/fix/source/filters_neon.c b/src/modules/audio_coding/codecs/isac/fix/source/filters_neon.c
index 93143fe..7e7d74d 100644
--- a/src/modules/audio_coding/codecs/isac/fix/source/filters_neon.c
+++ b/src/modules/audio_coding/codecs/isac/fix/source/filters_neon.c
@@ -72,7 +72,7 @@
   } else if (zeros_low == 0) {
     scaling = 1;
   }
-  reg64x1b = -scaling;
+  reg64x1b = (int64x1_t) (int64_t) -scaling;
   reg64x1a = vshl_s64(reg64x1a, reg64x1b);
 
   // Record the result.
@@ -143,7 +143,7 @@
       "vmov.32 d17[1], %[prod_upper]\n\t"
       "vadd.i64 d17, d18\n\t"
       "mov %[tmp], %[scaling], asr #31\n\t"
-      "vmov.32 d16, %[scaling], %[tmp]\n\t"
+      "vmov d16, %[scaling], %[tmp]\n\t"
       "vshl.s64 d17, d16\n\t"
       "vmov.32 %[sum], d17[0]\n\t"
 
diff --git a/src/modules/audio_coding/codecs/isac/isac_test.gypi b/src/modules/audio_coding/codecs/isac/isac_test.gypi
new file mode 100644
index 0000000..abe2454
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/isac_test.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # simple kenny
+    {
+      'target_name': 'iSACtest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/simpleKenny.c',
+        './main/util/utility.c',
+      ],
+    },
+    # ReleaseTest-API
+    {
+      'target_name': 'iSACAPITest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/ReleaseTest-API/ReleaseTest-API.cc',
+        './main/util/utility.c',
+      ],
+    },
+    # SwitchingSampRate
+    {
+      'target_name': 'iSACSwitchSampRateTest',
+      'type': 'executable',
+      'dependencies': [
+        'iSAC',
+      ],
+      'include_dirs': [
+        './main/test',
+        './main/interface',
+        '../../../../common_audio/signal_processing/include',
+        './main/util',
+      ],
+      'sources': [
+        './main/test/SwitchingSampRate/SwitchingSampRate.cc',
+        './main/util/utility.c',
+      ],    
+    },
+
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/isac/isacfix_test.gypi b/src/modules/audio_coding/codecs/isac/isacfix_test.gypi
new file mode 100644
index 0000000..a6c59d0
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/isacfix_test.gypi
@@ -0,0 +1,41 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # kenny
+    {
+      'target_name': 'iSACFixtest',
+      'type': 'executable',
+      'dependencies': [
+        'iSACFix',
+      ],
+      'include_dirs': [
+        './fix/test',
+        './fix/interface',
+      ],
+      'sources': [
+        './fix/test/kenny.c',
+      ],
+    },
+    {
+      'target_name': 'isacfix_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'iSACFix',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+      ],
+      'sources': [
+        'fix/test/isacfix_unittest.cc',
+      ],
+    },
+  ],
+}
+
+# TODO(kma): Add bit-exact test for iSAC-fix.
diff --git a/src/modules/audio_coding/codecs/isac/main/source/Android.mk b/src/modules/audio_coding/codecs/isac/main/source/Android.mk
index 889ba3f..f8d7f76 100644
--- a/src/modules/audio_coding/codecs/isac/main/source/Android.mk
+++ b/src/modules/audio_coding/codecs/isac/main/source/Android.mk
@@ -50,6 +50,7 @@
 LOCAL_CFLAGS := \
     $(MY_WEBRTC_COMMON_DEFS)
 
+# fft.c: error: comparison of array 'fftstate->Tmp0' equal to a null pointer
 LOCAL_CLANG_CFLAGS := \
     -Wno-tautological-pointer-compare
 
diff --git a/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACLongtest.txt b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACLongtest.txt
new file mode 100644
index 0000000..3f05224
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACLongtest.txt
@@ -0,0 +1,433 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+
+
+if  [ "$1" = "x64" ] || [ "$2" = "x64" ] || [ "$#" -eq 0 ]
+    then
+    PLATFORM=_X64
+    ISAC=../x64/Release/ReleaseTest-API_2005.exe
+elif [ "$1" = "LINUX" ] || [ "$2" = "LINUX" ]
+    then
+    PLATFORM=_linux
+    ISAC=../ReleaseTest-API/isacswtest
+else
+    PLATFORM=_2005
+    ISAC=../win32/Release/ReleaseTest-API_2005.exe
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "wb" ]
+    then
+    LOGFILE=logNormal"$PLATFORM".txt
+    echo "START ISAC WB TEST" > $LOGFILE
+    echo >> $LOGFILE
+
+    INFILES=$(cat InputFiles.txt)
+    SUBSET=$(cat InputFilesFew.txt)
+    CHANNELFILES=$(cat ChannelFiles.txt)
+    CHANNELLIST=($(cat ChannelFiles.txt))
+    INDIR=../data/orig
+    OUTDIR=../dataqa"$PLATFORM"
+    mkdir -p $OUTDIR
+    rm -f $OUTDIR/*
+    
+    idx=0
+    RATE=10000
+    FRAMESIZE=30
+    
+
+    for file in $INFILES # loop over all input files
+      do
+      
+      echo "Input file: " $file
+      echo "-----------------------------------"
+      echo "Instantaneous with RATE " $RATE ", and Frame-size " $FRAMESIZE
+      $ISAC -I -B $RATE -FL $FRAMESIZE -FS 16 $INDIR/"$file" $OUTDIR/i_"$FRAMESIZE"_"$RATE"_"$file" >> $LOGFILE
+      echo
+      
+      name="${CHANNELLIST[$idx]}"
+      echo "Adaptive with channel file: " $name 
+      
+      $ISAC -B $INDIR/${CHANNELLIST[$idx]} -FS 16 $INDIR/"$file" $OUTDIR/a_${name%.*}_"$file" >> $LOGFILE
+      
+      echo
+      echo
+      
+#     alternate between 30 & 60 ms.
+      if [ $FRAMESIZE -eq 30 ]
+	  then
+	  FRAMESIZE=60
+      else
+	  FRAMESIZE=30
+      fi
+      
+#     rate between 10000 to 32000 bits/sec
+      if [ $RATE -le 30000 ]
+	  then
+	  let "RATE=RATE+2000"
+      else
+	  let "RATE=10000"
+      fi
+      
+#     there are only three channel file
+      if [ $idx -ge 2 ]; then
+	  idx=0
+      else
+	  let "idx=idx+1"
+      fi
+      
+    done
+
+    idx=0
+    
+#   loop over the subset of input files
+    for file in $SUBSET 
+      do
+      
+      if [ $idx -eq 0 ]; then
+	  $ISAC -B $INDIR/${CHANNELLIST[0]} -FL 30 -FIXED_FL -FS 16 $INDIR/"$file" $OUTDIR/a30_"$file" >> $LOGFILE
+	  idx=1
+      else
+	  $ISAC -B $INDIR/${CHANNELLIST[0]} -FL 60 -FIXED_FL -FS 16 $INDIR/"$file" $OUTDIR/a60_"$file" >> $LOGFILE
+	  idx=0
+      fi
+    done
+
+    $ISAC -B $INDIR/${CHANNELLIST[0]} -INITRATE 25000 -FL 30 -FS 16 $INDIR/"$file" $OUTDIR/a60_Init25kbps_"$file" >> $LOGFILE
+
+    echo
+    echo WIDEBAND DONE!
+    echo
+    echo
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "swb" ]
+    then
+
+    LOGFILE=logNormal_SWB"$PLATFORM".txt
+    echo "START ISAC SWB TEST" > $LOGFILE
+    echo >> $LOGFILE
+
+    echo STARTING TO TEST SUPER-WIDEBAND
+    
+    INFILES=$(cat InputFilesSWB.txt)
+    INDIR=../data/origswb
+    OUTDIR=../dataqaswb"$PLATFORM"
+    mkdir -p $OUTDIR
+    rm -f $OUTDIR/*
+    
+    for file in $INFILES
+      do
+      echo
+      echo "Input file: " $file
+      echo "--------------------------------"
+      for RATE in 12000 20000 32000 38000 45000 50000 56000  
+	do
+	
+	echo "Rate " $RATE
+	$ISAC -I -B $RATE -FL 30 -FS 32 $INDIR/"$file" $OUTDIR/swb_"$RATE"_"$file" >> $LOGFILE
+	echo
+	
+      done
+  
+    done
+fi
+
+if  [ "$#" -eq 0 ] || [ "$1" = "all" ] || [ "$1" = "API" ]
+    then
+
+    LOGFILE_API=logNormal_API"$PLATFORM".txt
+    echo
+    echo
+    echo "START ISAC API TEST" > $LOGFILE_API
+    echo >> $LOGFILE_API
+    idx=1
+    echo "                            Test Enforcement of frame-size"
+    echo "========================================================================================"
+    mkdir -p ../FrameSizeLim"$PLATFORM"
+    rm -f ../FrameSizeLim"$PLATFORM"/*
+    echo
+    echo "-- No enforcement; BN 10000"
+    echo
+    $ISAC -B 10000 -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Now Enforce 30 ms frame size with the same bottleneck"
+    echo "There should not be any 60 ms frame"
+    echo
+    $ISAC -B 10000 -FL 30 -FIXED_FL -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- No enforcement; BN 32000"
+    echo
+    $ISAC -B 32000 -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Now Enforce 60 ms frame size with the same bottleneck"
+    echo "There should not be any 30 ms frame"
+    echo 
+    $ISAC -B 32000 -FL 60 -FIXED_FL -FS 16 ../data/orig/speech_and_misc_WB.pcm \
+	../FrameSizeLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo
+    echo
+    echo
+
+    echo "                           Test Limiting of Payload Size and Rate"
+    echo "========================================================================================"
+    mkdir -p ../PayloadLim"$PLATFORM"
+    rm -f ../PayloadLim"$PLATFORM"/*
+    echo
+    echo
+    echo "-- No Limit, frame-size 60 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 60 -FS 16                ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload-size limit of 250, frame-size 60 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 60 -FS 16 -MAX 250       ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 33 kbps for 60 ms frame-size"
+    echo
+    $ISAC -I -B 32000 -FL 60 -FS 16 -MAXRATE 33000 ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo 
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No Limit, frame-size 30 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 30 -FS 16                ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo    
+    echo "-- Payload-size limit of 130, frame-size 30 ms, WIDEBAND"
+    echo 
+    $ISAC -I -B 32000 -FL 30 -FS 16 -MAX 130       ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 33 kbps for 30 ms frame-size, wideband"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 16 -MAXRATE 33000 ../data/orig/speech_and_misc_WB.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No limit for 32 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 130 bytes for 32 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 32 -MAX 130 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+
+    echo "-- No limit, Rate 45 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32               ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 46 kbps for 42 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32 -MAXRATE 46000 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 170 bytes for 45 kbps, 30 ms, SUPER-WIDEBAND, 12 kHz"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32 -MAX 170       ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+  
+    echo "-- No limit for 56 kbps, 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32                ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Payload limit of 200 bytes for 56 kbps 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32 -MAX 200       ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    echo "-- Rate limit of 57 kbps for 56 kbps 30 ms, SUPER-WIDEBAND, 16 kHz"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32 -MAXRATE 57000 ../data/origswb/jstest_32.pcm \
+	../PayloadLim"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo
+    echo
+    echo
+    
+    echo "                                    Test Trans-Coding"
+    echo "========================================================================================"
+    mkdir -p ../Transcoding"$PLATFORM"
+    rm -f ../Transcoding"$PLATFORM"/*
+    echo
+    echo
+    echo "-- 20 kbps, 30 ms, WIDEBAND"
+    echo
+    $ISAC -I -B 20000 -FL 30 -FS 16          ../data/orig/speech_and_misc_WB.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans20WB.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps trans-coding to 20 kbps, 30 ms, WIDEBAND"
+    echo
+    $ISAC -I -B 32000 -FL 30 -FS 16  -T 20000  ../Transcoding"$PLATFORM"/APITest_32T20.pcm \
+	../data/orig/speech_and_misc_WB.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+
+    echo
+    echo
+    echo "-- 38 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 38000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans38.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 45 kbps trans-coding to 38 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32  -T 38000  ../Transcoding"$PLATFORM"/APITest_45T38.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+ 
+    echo
+    echo
+    echo "-- 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 20000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans20SWB.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+
+    echo
+    echo
+    
+    echo "-- 45 kbps trans-coding to 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 45000 -FL 30 -FS 32  -T 20000  ../Transcoding"$PLATFORM"/APITest_45T20.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo "-- 50 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 50000 -FL 30 -FS 32          ../data/origswb/jstest_32.pcm \
+	../Transcoding"$PLATFORM"/APITest_refTrans50.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 56 kbps trans-coding to 50 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32  -T 50000  ../Transcoding"$PLATFORM"/APITest_56T50.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 56 kbps trans-coding to 20 kbps, 30 ms, SUPER-WIDEBAND"
+    echo
+    $ISAC -I -B 56000 -FL 30 -FS 32  -T 20000 ../Transcoding"$PLATFORM"/APITest_56T20.pcm \
+	../data/origswb/jstest_32.pcm ../Transcoding"$PLATFORM"/APITest_"$idx".pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo "________________________________________________________"
+    echo
+    echo
+    echo 
+    echo
+    echo
+    
+    echo "                                         Test FEC"
+    echo "========================================================================================"
+    mkdir -p ../FEC"$PLATFORM"
+    rm -f ../FEC"$PLATFORM"/*
+    echo
+    echo
+    echo "-- 32 kbps with transcoding to 20kbps, 30 ms, WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 30 -FS 16 -PL 10 -T 20000 ../FEC"$PLATFORM"/APITest_PL10_WB30_T20.pcm \
+	../data/orig/speech_and_misc_WB.pcm ../FEC"$PLATFORM"/APITest_PL10_WB30.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps, 60 ms, WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 60 -FS 16 -PL 10 ../data/orig/speech_and_misc_WB.pcm \
+	../FEC"$PLATFORM"/APITest_PL10_WB60.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+    
+    echo "-- 32 kbps with transcoding to 20 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 32000 -FL 30 -FS 32 -PL 10 -T 20000 ../FEC"$PLATFORM"/APITest_PL10_SWB_8kHz_T20.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_8kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+
+    echo "-- 45 kbps with Trascoding to 38 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 45000 -FL 30 -FS 32 -PL 10 -T 38000 ../FEC"$PLATFORM"/APITest_PL10_SWB_12kHz_T38.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_12kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+
+    echo "-- 56 kbps with transcoding to 50 kbps, 30 ms, SUPER-WIDEBAND, 10% packet loss"
+    $ISAC -I -B 56000 -FL 30 -FS 32 -PL 10 -T 50000 ../FEC"$PLATFORM"/APITest_PL10_SWB_16kHz_T50.pcm \
+	../data/origswb/jstest_32.pcm ../FEC"$PLATFORM"/APITest_PL10_SWB_16kHz.pcm >> $LOGFILE_API
+    let "idx=idx+1"
+    echo
+    echo
+fi
diff --git a/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfault.txt b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfault.txt
new file mode 100644
index 0000000..63829a4
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfault.txt
@@ -0,0 +1,80 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character????? ?????? 
+if  [ "$1" = "x64" ] || [ "$#" -eq 0 ]
+    then
+    PLATFORM=_X64
+    ISAC=../x64/Release/ReleaseTest-API_2005.exe
+elif [ "$1" = "2005" ]
+	then
+    PLATFORM=_2005
+    ISAC=../win32/Release/ReleaseTest-API_2005.exe
+elif [ "$1" == "LINUX" ]
+	then
+    PLATFORM=_linux
+    ISAC=../ReleaseTest-API/isacswtest
+else
+	echo Unknown Platform
+	exit 2
+fi
+
+LOGFILE=logfault$PLATFORM.txt
+echo "START FAULT TEST" > $LOGFILE
+
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+INDIRSWB=../data/origswb
+OUTDIR=../dataqaft$PLATFORM
+mkdir -p $OUTDIR
+
+#maximum Target rate for different bandwidth
+TARGETRATE=( 32000 32000 44000 56000 )
+SAMPFREQ=( 16 32 32 32 )
+FAULTTEST=(1 2 3 4 5 6 7 9)
+
+index1=0
+
+file_wb=../data/orig/16kHz.pcm
+file_swb=../data/origswb/32kHz.pcm
+
+for idx in 0 1 2 3 
+  do
+# Fault test
+  echo
+  echo "Sampling Frequency " ${SAMPFREQ[idx]} "kHz, Rate " ${TARGETRATE[idx]} "bps."
+  echo "---------------------------------------------------"
+  if [ ${SAMPFREQ[idx]} -eq 16 ]; then
+    file=$file_wb
+  else
+    file=$file_swb
+  fi
+
+  for testnr in ${FAULTTEST[*]}
+    do
+    echo "Running Fault Test " $testnr
+    $ISAC -I -B "${TARGETRATE[idx]}" -F $testnr -FS "${SAMPFREQ[idx]}" "$file" \
+	$OUTDIR/ft"$testnr"_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+    echo
+    
+  done
+  
+# Fault test number 10, error in bitstream
+  echo "Running Fault Test 10"
+  $ISAC -I -B "${TARGETRATE[idx]}" -F 10        -FS "${SAMPFREQ[idx]}" "$file" \
+    $OUTDIR/ft10_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+  echo
+  echo "Running Fault Test 10 with packetloss"
+  $ISAC -I -B "${TARGETRATE[idx]}" -F 10 -PL 10 -FS "${SAMPFREQ[idx]}" "$file" \
+    $OUTDIR/ft10plc_"${TARGETRATE[idx]}"_"${SAMPFREQ[idx]}".pcm >> LOGFILE
+  echo
+done
+
+echo 
+echo
+echo DONE!
+  
+
+
diff --git a/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfixfloat.txt b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfixfloat.txt
new file mode 100644
index 0000000..4cda78e
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/QA/runiSACfixfloat.txt
@@ -0,0 +1,47 @@
+#!/bin/bash
+(set -o igncr) 2>/dev/null && set -o igncr; # force bash to ignore \r character
+
+LOGFILE=logfxfl.txt
+echo "START FIX-FLOAT TEST" > $LOGFILE
+
+
+ISACFIXFLOAT=../../../fix/test/testFixFloat.exe
+
+INFILES=$(cat InputFiles.txt)
+SUBSET=$(cat InputFilesFew.txt)
+CHANNELFILES=$(cat ChannelFiles.txt)
+CHANNELLIST=($(cat ChannelFiles.txt))
+INDIR=../data/orig
+OUTDIR=../dataqafxfl
+mkdir -p $OUTDIR
+
+index1=0
+
+for file in $INFILES # loop over all input files
+  do
+  
+  for channel in $CHANNELFILES
+	do
+	let "index1=index1+1"
+
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -PLC $INDIR/"$file" $OUTDIR/flfx$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -PLC $INDIR/"$file" $OUTDIR/fxfl$index1"$file" >> $LOGFILE
+  done
+
+done
+
+index1=0
+
+for file in $SUBSET # loop over the subset of input files
+  do
+	let "index1=index1+1"
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 1 $INDIR/"$file" $OUTDIR/flfxnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 1 $INDIR/"$file" $OUTDIR/fxflnb1_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 1 -NB 2 -PLC $INDIR/"$file" $OUTDIR/flfxnb2_$index1"$file" >> $LOGFILE
+	$ISACFIXFLOAT $INDIR/$channel -m 2 -NB 2 -PLC $INDIR/"$file" $OUTDIR/fxflnb2_$index1"$file" >> $LOGFILE
+done
+
+echo DONE!
+
+
+
diff --git a/src/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc b/src/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
new file mode 100644
index 0000000..04c5367
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/ReleaseTest-API/ReleaseTest-API.cc
@@ -0,0 +1,1065 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ReleaseTest-API.cpp : Defines the entry point for the console application.
+//
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+#include <ctype.h>
+#include <iostream>
+
+/* include API */
+#include "isac.h"
+#include "utility.h"
+
+/* Defines */
+#define SEED_FILE "randseed.txt"  /* Used when running decoder on garbage data  */
+#define MAX_FRAMESAMPLES     960  /* max number of samples per frame
+                                      (= 60 ms frame & 16 kHz) or
+                                      (= 30 ms frame & 32 kHz)                  */
+#define FRAMESAMPLES_10ms	 160   /* number of samples per 10ms frame          */
+#define SWBFRAMESAMPLES_10ms 320
+//#define FS		        	16000 /* sampling frequency (Hz) */
+
+#ifdef WIN32
+#define CLOCKS_PER_SEC      1000  /* Runtime statistics */
+#endif
+
+
+
+
+using namespace std;
+
+int main(int argc, char* argv[])
+{
+
+    char inname[100], outname[100], bottleneck_file[100], vadfile[100];
+	FILE *inp, *outp, *f_bn=NULL, *vadp, *bandwidthp;
+	int framecnt, endfile;
+
+	int i, errtype, VADusage = 0, packetLossPercent = 0;
+	WebRtc_Word16 CodingMode;
+	WebRtc_Word32 bottleneck;
+	WebRtc_Word16 framesize = 30;           /* ms */
+	int cur_framesmpls, err;
+
+	/* Runtime statistics */
+	double starttime, runtime, length_file;
+
+	WebRtc_Word16 stream_len = 0;
+	WebRtc_Word16 declen, lostFrame = 0, declenTC = 0;
+
+	WebRtc_Word16 shortdata[SWBFRAMESAMPLES_10ms];
+	WebRtc_Word16 vaddata[SWBFRAMESAMPLES_10ms*3];
+	WebRtc_Word16 decoded[MAX_FRAMESAMPLES << 1];
+	WebRtc_Word16 decodedTC[MAX_FRAMESAMPLES << 1];
+	WebRtc_UWord16 streamdata[500];
+	WebRtc_Word16	speechType[1];
+    WebRtc_Word16 rateBPS = 0;
+    WebRtc_Word16 fixedFL = 0;
+    WebRtc_Word16 payloadSize = 0;
+    WebRtc_Word32 payloadRate = 0;
+    int setControlBWE = 0;
+    short FL, testNum;
+	char version_number[20];
+    FILE  *plFile;
+    WebRtc_Word32 sendBN;
+
+#ifdef _DEBUG
+	FILE *fy;
+	double kbps;
+#endif /* _DEBUG */
+	int totalbits =0;
+	int totalsmpls =0;
+
+    /* If use GNS file */
+    FILE *fp_gns = NULL;
+	char gns_file[100];
+    short maxStreamLen30 = 0;
+    short maxStreamLen60 = 0;
+    short sampFreqKHz = 32;
+    short samplesIn10Ms;
+    short useAssign = 0;
+    //FILE logFile;
+    bool doTransCoding = false;
+    WebRtc_Word32 rateTransCoding = 0;
+    WebRtc_UWord16 streamDataTransCoding[600];
+    WebRtc_Word16 streamLenTransCoding = 0;
+    FILE* transCodingFile = NULL;
+    FILE* transcodingBitstream = NULL;
+    WebRtc_UWord32 numTransCodingBytes = 0;
+
+	/* only one structure used for ISAC encoder */
+	ISACStruct* ISAC_main_inst;
+    ISACStruct* decoderTransCoding;
+
+	BottleNeckModel       BN_data;
+
+#ifdef _DEBUG
+	fy = fopen("bit_rate.dat", "w");
+	fclose(fy);
+	fy = fopen("bytes_frames.dat", "w");
+	fclose(fy);
+#endif /* _DEBUG */
+
+	/* Handling wrong input arguments in the command line */
+	if((argc<3) || (argc>17))  {
+		printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("\n");
+        WebRtcIsac_version(version_number);
+        printf("iSAC-swb version %s \n\n", version_number);
+
+        printf("Usage:\n\n");
+        printf("./kenny.exe [-I] bottleneck_value infile outfile \n\n");
+        printf("with:\n");
+        printf("[-FS num]       :   sampling frequency in kHz, valid values are 16 & 32,\n");
+        printf("                    with 16 as default.\n");
+        printf("[-I]            :   if -I option is specified, the coder will use\n");
+        printf("                    an instantaneous Bottleneck value. If not, it\n");
+        printf("                    will be an adaptive Bottleneck value.\n\n");
+        printf("[-assign]       :   Use Assign API.\n");
+        printf("[-B num]        :   the value of the bottleneck provided either\n");
+        printf("                    as a fixed value in bits/sec (e.g. 25000) or\n");
+        printf("                    read from a file (e.g. bottleneck.txt)\n\n");
+        printf("[-INITRATE num] :   Set a new value for initial rate. Note! Only used in \n");
+        printf("                    adaptive mode.\n\n");
+        printf("[-FL num]       :   Set (initial) frame length in msec. Valid length are \n");
+        printf("                    30 and 60 msec.\n\n");
+        printf("[-FIXED_FL]     :   Frame length will be fixed to initial value.\n\n");
+        printf("[-MAX num]      :   Set the limit for the payload size of iSAC in bytes. \n");
+        printf("                    Minimum 100 maximum 400.\n\n");
+        printf("[-MAXRATE num]  :   Set the maxrate for iSAC in bits per second. \n");
+        printf("                    Minimum 32000, maximum 53400.\n\n");
+        printf("[-F num]        :   if -F option is specified, the test function\n");
+        printf("                    will run the iSAC API fault scenario specified by the\n");
+        printf("                    supplied number.\n");
+        printf("                    F 1 - Call encoder prior to init encoder call\n");
+        printf("                    F 2 - Call decoder prior to init decoder call\n");
+        printf("                    F 3 - Call decoder prior to encoder call\n");
+        printf("                    F 4 - Call decoder with a too short coded sequence\n");
+        printf("                    F 5 - Call decoder with a too long coded sequence\n");
+        printf("                    F 6 - Call decoder with random bit stream\n");
+        printf("                    F 7 - Call init encoder/decoder at random during a call\n");
+        printf("                    F 8 - Call encoder/decoder without having allocated memory \n");
+        printf("                          for encoder/decoder instance\n");
+        printf("                    F 9 - Call decodeB without calling decodeA\n");
+        printf("                    F 10 - Call decodeB with garbage data\n");
+        printf("[-PL num]       :   if -PL option is specified \n");
+        printf("[-T rate file]  :   test trans-coding with target bottleneck 'rate' bits/sec\n");
+        printf("                    the output file is written to 'file'\n");
+        printf("[-LOOP num]     :   number of times to repeat coding the input file for stress testing\n");
+        //printf("[-CE num]       :   Test of APIs used by Conference Engine.\n");
+        //printf("                    CE 1 - getNewBitstream, getBWE \n");
+        //printf("                    (CE 2 - RESERVED for transcoding)\n");
+        //printf("                    CE 3 - getSendBWE, setSendBWE.  \n\n");
+        //printf("-L filename     :   write the logging info into file (appending)\n");
+        printf("infile          :   Normal speech input file\n\n");
+        printf("outfile         :   Speech output file\n\n");
+    	exit(0);
+	}
+
+    /* Print version number */
+    printf("-------------------------------------------------\n");
+    WebRtcIsac_version(version_number);
+    printf("iSAC version %s \n\n", version_number);
+
+    /* Loop over all command line arguments */
+	CodingMode = 0;
+	testNum = 0;
+    useAssign = 0;
+    //logFile = NULL;
+    char transCodingFileName[500];
+    WebRtc_Word16 totFileLoop = 0;
+    WebRtc_Word16 numFileLoop = 0;
+	for (i = 1; i < argc-2;i++)
+    {
+        if(!strcmp("-LOOP", argv[i]))
+        {
+            i++;
+            totFileLoop = (WebRtc_Word16)atol(argv[i]);
+            if(totFileLoop <= 0)
+            {
+                fprintf(stderr, "Invalid number of runs for the given input file, %d.", totFileLoop);
+                exit(0);
+            }
+        }
+
+        if(!strcmp("-T", argv[i]))
+        {
+            doTransCoding = true;
+            i++;
+            rateTransCoding = atoi(argv[i]);
+            i++;
+            strcpy(transCodingFileName, argv[i]);
+        }
+
+        /*Should we use assign API*/
+        if(!strcmp("-assign", argv[i]))
+        {
+            useAssign = 1;
+        }
+
+        /* Set Sampling Rate */
+        if(!strcmp("-FS", argv[i]))
+        {
+            i++;
+            sampFreqKHz = atoi(argv[i]);
+        }
+
+        /* Instantaneous mode */
+		if(!strcmp ("-I", argv[i]))
+        {
+			printf("Instantaneous BottleNeck\n");
+			CodingMode = 1;
+		}
+
+        /* Set (initial) bottleneck value */
+        if(!strcmp ("-INITRATE", argv[i]))	{
+			rateBPS = atoi(argv[i + 1]);
+            setControlBWE = 1;
+            if((rateBPS < 10000) || (rateBPS > 32000))
+            {
+				printf("\n%d is not a initial rate. Valid values are in the range 10000 to 32000.\n", rateBPS);
+				exit(0);
+            }
+			printf("New initial rate: %d\n", rateBPS);
+			i++;
+		}
+
+        /* Set (initial) framelength */
+        if(!strcmp ("-FL", argv[i]))	{
+			framesize = atoi(argv[i + 1]);
+            if((framesize != 30) && (framesize != 60))
+            {
+				printf("\n%d is not a valid frame length. Valid length are 30 and 60 msec.\n", framesize);
+				exit(0);
+            }
+            setControlBWE = 1;
+			printf("Frame Length: %d\n", framesize);
+			i++;
+		}
+
+        /* Fixed frame length */
+        if(!strcmp ("-FIXED_FL", argv[i]))
+        {
+			fixedFL = 1;
+            setControlBWE = 1;
+			printf("Fixed Frame Length\n");
+		}
+
+        /* Set maximum allowed payload size in bytes */
+        if(!strcmp ("-MAX", argv[i]))	{
+			payloadSize = atoi(argv[i + 1]);
+            printf("Maximum Payload Size: %d\n", payloadSize);
+			i++;
+		}
+
+        /* Set maximum rate in bytes */
+        if(!strcmp ("-MAXRATE", argv[i]))	{
+			payloadRate = atoi(argv[i + 1]);
+            printf("Maximum Rate in kbps: %d\n", payloadRate);
+			i++;
+		}
+
+        /* Test of fault scenarious */
+        if(!strcmp ("-F", argv[i]))
+        {
+			testNum = atoi(argv[i + 1]);
+			printf("Fault test: %d\n", testNum);
+			if(testNum < 1 || testNum > 10)
+            {
+				printf("\n%d is not a valid Fault Scenario number. Valid Fault Scenarios are numbered 1-10.\n", testNum);
+				exit(0);
+			}
+			i++;
+		}
+
+        /* Packet loss test */
+		if(!strcmp ("-PL", argv[i]))
+        {
+			if( isdigit( *argv[i+1] ) )
+            {
+				packetLossPercent = atoi( argv[i+1] );
+				if( (packetLossPercent < 0) | (packetLossPercent > 100) )
+                {
+					printf( "\nInvalid packet loss perentage \n" );
+					exit( 0 );
+				}
+                if( packetLossPercent > 0 )
+                {
+					printf( "Simulating %d %% of independent packet loss\n", packetLossPercent );
+                }
+                else
+                {
+					printf( "\nNo Packet Loss Is Simulated \n" );
+                }
+            }
+            else
+            {
+				plFile = fopen( argv[i+1], "rb" );
+				if( plFile == NULL )
+                {
+					printf( "\n couldn't open the frameloss file: %s\n", argv[i+1] );
+					exit( 0 );
+				}
+				printf( "Simulating packet loss through the given channel file: %s\n", argv[i+1] );
+			}
+			i++;
+		}
+
+        /* Random packetlosses */
+		if(!strcmp ("-rnd", argv[i]))
+        {
+			srand((unsigned int)time(NULL) );
+			printf( "Random pattern in lossed packets \n" );
+		}
+
+        /* Use gns file */
+		if(!strcmp ("-G", argv[i]))
+        {
+			sscanf(argv[i + 1], "%s", gns_file);
+			fp_gns = fopen(gns_file, "rb");
+			if(fp_gns  == NULL)
+            {
+				printf("Cannot read file %s.\n", gns_file);
+				exit(0);
+			}
+			i++;
+		}
+
+
+        // make it with '-B'
+        /* Get Bottleneck value */
+        if(!strcmp("-B", argv[i]))
+        {
+            i++;
+            bottleneck = atoi(argv[i]);
+            if(bottleneck == 0)
+            {
+                sscanf(argv[i], "%s", bottleneck_file);
+                f_bn = fopen(bottleneck_file, "rb");
+                if(f_bn  == NULL)
+                {
+                    printf("Error No value provided for BottleNeck and cannot read file %s.\n", bottleneck_file);
+                    exit(0);
+                }
+                else
+                {
+                    printf("reading bottleneck rates from file %s\n\n",bottleneck_file);
+                    if(fscanf(f_bn, "%d", &bottleneck) == EOF)
+                    {
+                        /* Set pointer to beginning of file */
+                        fseek(f_bn, 0L, SEEK_SET);
+                        if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+                            exit(0);
+                        }
+                    }
+
+                    /*	Bottleneck is a cosine function
+                    *	Matlab code for writing the bottleneck file:
+                    *	BottleNeck_10ms = 20e3 + 10e3 * cos((0:5999)/5999*2*pi);
+                    *	fid = fopen('bottleneck.txt', 'wb');
+                    *	fprintf(fid, '%d\n', BottleNeck_10ms); fclose(fid);
+                    */
+                }
+            }
+            else
+            {
+                printf("\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+            }
+        }
+        /* Run Conference Engine APIs */
+        //     Do not test it in the first release
+        //
+        //     if(!strcmp ("-CE", argv[i]))
+        //     {
+        //         testCE = atoi(argv[i + 1]);
+        //         if(testCE==1)
+        //         {
+        //             i++;
+        //             scale = (float)atof( argv[i+1] );
+        //         }
+        //         else if(testCE == 2)
+        //         {
+        //             printf("\nCE-test 2 (transcoding) not implemented.\n");
+        //             exit(0);
+        //         }
+        //         else if(testCE < 1 || testCE > 3)
+        //         {
+        //             printf("\n%d is not a valid CE-test number. Valid CE tests are 1-3.\n", testCE);
+        //             exit(0);
+        //         }
+        //         printf("CE-test number: %d\n", testCE);
+        //         i++;
+        //     }
+    }
+
+	if(CodingMode == 0)
+	{
+		printf("\nAdaptive BottleNeck\n");
+	}
+
+    switch(sampFreqKHz)
+    {
+    case 16:
+        {
+            printf("iSAC Wideband.\n");
+            samplesIn10Ms = FRAMESAMPLES_10ms;
+            break;
+        }
+    case 32:
+        {
+            printf("iSAC Supper-Wideband.\n");
+            samplesIn10Ms = SWBFRAMESAMPLES_10ms;
+            break;
+        }
+    default:
+            printf("Unsupported sampling frequency %d kHz", sampFreqKHz);
+            exit(0);
+    }
+
+
+
+
+	/* Get Input and Output files */
+	sscanf(argv[argc-2], "%s", inname);
+	sscanf(argv[argc-1], "%s", outname);
+    printf("\nInput file: %s\n", inname);
+    printf("Output file: %s\n\n", outname);
+	if((inp = fopen(inname,"rb")) == NULL)
+    {
+		printf("  Error iSAC Cannot read file %s.\n", inname);
+        cout << flush;
+		exit(1);
+	}
+
+	if((outp = fopen(outname,"wb")) == NULL)
+    {
+		printf("  Error iSAC Cannot write file %s.\n", outname);
+        cout << flush;
+        getchar();
+		exit(1);
+	}
+	if(VADusage)
+    {
+		if((vadp = fopen(vadfile,"rb")) == NULL)
+        {
+			printf("  Error iSAC Cannot read file %s.\n", vadfile);
+            cout << flush;
+			exit(1);
+		}
+	}
+
+    if((bandwidthp = fopen("bwe.pcm","wb")) == NULL)
+    {
+            printf("  Error iSAC Cannot read file %s.\n", "bwe.pcm");
+            cout << flush;
+            exit(1);
+    }
+
+
+	starttime = clock()/(double)CLOCKS_PER_SEC; /* Runtime statistics */
+
+    /* Initialize the ISAC and BN structs */
+    if(testNum != 8)
+    {
+        if(!useAssign)
+        {
+            err =WebRtcIsac_Create(&ISAC_main_inst);
+            WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+            WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        }
+        else
+        {
+            /* Test the Assign functions */
+            int sss;
+            void *ppp;
+            err = WebRtcIsac_AssignSize(&sss);
+            ppp = malloc(sss);
+            err = WebRtcIsac_Assign(&ISAC_main_inst, ppp);
+            WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+            WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        }
+        /* Error check */
+        if(err < 0)
+        {
+            printf("\n\n Error in create.\n\n");
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+	BN_data.arrival_time  = 0;
+	BN_data.sample_count  = 0;
+	BN_data.rtp_number    = 0;
+
+	/* Initialize encoder and decoder */
+    framecnt= 0;
+    endfile	= 0;
+
+    if(doTransCoding)
+    {
+        WebRtcIsac_Create(&decoderTransCoding);
+        WebRtcIsac_SetEncSampRate(decoderTransCoding, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        WebRtcIsac_SetDecSampRate(decoderTransCoding, (sampFreqKHz == 16)? kIsacWideband:kIsacSuperWideband);
+        WebRtcIsac_DecoderInit(decoderTransCoding);
+        transCodingFile = fopen(transCodingFileName, "wb");
+        if(transCodingFile == NULL)
+        {
+            printf("Could not open %s to output trans-coding.\n", transCodingFileName);
+            exit(0);
+        }
+        strcat(transCodingFileName, ".bit");
+        transcodingBitstream = fopen(transCodingFileName, "wb");
+        if(transcodingBitstream == NULL)
+        {
+            printf("Could not open %s to write the bit-stream of transcoder.\n", transCodingFileName);
+            exit(0);
+        }
+    }
+
+    if(testNum != 1)
+    {
+		if(WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode) < 0)
+        {
+            printf("Error could not initialize the encoder \n");
+            cout << flush;
+            return 0;
+        }
+	}
+    if(testNum != 2)
+    {
+        if(WebRtcIsac_DecoderInit(ISAC_main_inst) < 0)
+        {
+            printf("Error could not initialize the decoder \n");
+            cout << flush;
+            return 0;
+        }
+	}
+	if(CodingMode == 1)
+    {
+        err = WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in initialization (control): %d.\n\n", errtype);
+            cout << flush;
+            if(testNum == 0)
+            {
+                exit(EXIT_FAILURE);
+            }
+        }
+	}
+
+    if((setControlBWE) && (CodingMode == 0))
+    {
+        err = WebRtcIsac_ControlBwe(ISAC_main_inst, rateBPS, framesize, fixedFL);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+
+            printf("\n\n Error in Control BWE: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+
+    if(payloadSize != 0)
+    {
+        err = WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadSize);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in SetMaxPayloadSize: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+    if(payloadRate != 0)
+    {
+        err = WebRtcIsac_SetMaxRate(ISAC_main_inst, payloadRate);
+        if(err < 0)
+        {
+            /* exit if returned with error */
+            errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+            printf("\n\n Error in SetMaxRateInBytes: %d.\n\n", errtype);
+            cout << flush;
+            exit(EXIT_FAILURE);
+        }
+    }
+
+	*speechType = 1;
+
+    cout << "\n" << flush;
+
+    length_file = 0;
+    WebRtc_Word16 bnIdxTC;
+    WebRtc_Word16 jitterInfoTC;
+    while (endfile == 0)
+    {
+        /* Call init functions at random, fault test number 7 */
+		if(testNum == 7 && (rand()%2 == 0))
+        {
+            err = WebRtcIsac_EncoderInit(ISAC_main_inst, CodingMode);
+            /* Error check */
+            if(err < 0)
+            {
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\n Error in encoderinit: %d.\n\n", errtype);
+                cout << flush;
+            }
+
+            err = WebRtcIsac_DecoderInit(ISAC_main_inst);
+            /* Error check */
+            if(err < 0)
+            {
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\n Error in decoderinit: %d.\n\n", errtype);
+                cout << flush;
+            }
+        }
+
+		cur_framesmpls = 0;
+		while (1)
+        {
+            /* Read 10 ms speech block */
+            endfile = readframe(shortdata, inp, samplesIn10Ms);
+
+            if(endfile)
+            {
+                numFileLoop++;
+                if(numFileLoop < totFileLoop)
+                {
+                    rewind(inp);
+                    framecnt = 0;
+                    fprintf(stderr, "\n");
+                    endfile = readframe(shortdata, inp, samplesIn10Ms);
+                }
+            }
+
+            if(testNum == 7)
+            {
+		    	srand((unsigned int)time(NULL));
+		    }
+
+            /* iSAC encoding */
+            if(!(testNum == 3 && framecnt == 0))
+            {
+                stream_len = WebRtcIsac_Encode(ISAC_main_inst,
+                    shortdata,
+                    (WebRtc_Word16*)streamdata);
+                if((payloadSize != 0) && (stream_len > payloadSize))
+                {
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                    printf("\nError: Streamsize out of range %d\n", stream_len - payloadSize);
+                    cout << flush;
+                }
+
+                WebRtcIsac_GetUplinkBw(ISAC_main_inst, &sendBN);
+
+                if(stream_len>0)
+                {
+                    if(doTransCoding)
+                    {
+                        WebRtc_Word16 indexStream;
+                        WebRtc_UWord8 auxUW8;
+
+                        /************************* Main Transcoding stream *******************************/
+                        WebRtcIsac_GetDownLinkBwIndex(ISAC_main_inst, &bnIdxTC, &jitterInfoTC);
+                        streamLenTransCoding = WebRtcIsac_GetNewBitStream(
+                            ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
+                            (WebRtc_Word16*)streamDataTransCoding, false);
+                        if(streamLenTransCoding < 0)
+                        {
+                            fprintf(stderr, "Error in trans-coding\n");
+                            exit(0);
+                        }
+                        auxUW8 = (WebRtc_UWord8)(((streamLenTransCoding & 0xFF00) >> 8) &  0x00FF);
+                        if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1,
+                                   transcodingBitstream) != 1) {
+                          return -1;
+                        }
+
+                        auxUW8 = (WebRtc_UWord8)(streamLenTransCoding & 0x00FF);
+                        if (fwrite(&auxUW8, sizeof(WebRtc_UWord8),
+                                   1, transcodingBitstream) != 1) {
+                          return -1;
+                        }
+
+                        if (fwrite((WebRtc_UWord8*)streamDataTransCoding,
+                                   sizeof(WebRtc_UWord8),
+                                   streamLenTransCoding,
+                                   transcodingBitstream) !=
+                            static_cast<size_t>(streamLenTransCoding)) {
+                          return -1;
+                        }
+
+                        WebRtcIsac_ReadBwIndex((WebRtc_Word16*)streamDataTransCoding, &indexStream);
+                        if(indexStream != bnIdxTC)
+                        {
+                            fprintf(stderr, "Error in inserting Bandwidth index into transcoding stream.\n");
+                            exit(0);
+                        }
+                        numTransCodingBytes += streamLenTransCoding;
+                    }
+                }
+            }
+            else
+            {
+                break;
+            }
+
+			if(stream_len < 0)
+            {
+				/* exit if returned with error */
+				errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                printf("\n\nError in encoder: %d.\n\n", errtype);
+                cout << flush;
+			}
+			cur_framesmpls += samplesIn10Ms;
+			/* exit encoder loop if the encoder returned a bitstream */
+			if(stream_len != 0) break;
+		}
+
+        /* read next bottleneck rate */
+        if(f_bn != NULL)
+        {
+            if(fscanf(f_bn, "%d", &bottleneck) == EOF)
+            {
+                /* Set pointer to beginning of file */
+                fseek(f_bn, 0L, SEEK_SET);
+                if (fscanf(f_bn, "%d", &bottleneck) == EOF) {
+                    exit(0);
+                }
+            }
+            if(CodingMode == 1)
+            {
+                WebRtcIsac_Control(ISAC_main_inst, bottleneck, framesize);
+            }
+        }
+
+        length_file += cur_framesmpls;
+        if(cur_framesmpls == (3 * samplesIn10Ms))
+        {
+            maxStreamLen30 = (stream_len > maxStreamLen30)? stream_len:maxStreamLen30;
+        }
+        else
+        {
+            maxStreamLen60 = (stream_len > maxStreamLen60)? stream_len:maxStreamLen60;
+        }
+
+        if(!lostFrame)
+        {
+            lostFrame = ((rand()%100) < packetLossPercent);
+        }
+        else
+        {
+            lostFrame = 0;
+        }
+
+        // RED.
+        if(lostFrame)
+        {
+            stream_len = WebRtcIsac_GetRedPayload(ISAC_main_inst,
+                (WebRtc_Word16*)streamdata);
+
+            if(doTransCoding)
+            {
+                streamLenTransCoding = WebRtcIsac_GetNewBitStream(
+                    ISAC_main_inst, bnIdxTC, jitterInfoTC, rateTransCoding,
+                    (WebRtc_Word16*)streamDataTransCoding, true);
+                if(streamLenTransCoding < 0)
+                {
+                    fprintf(stderr, "Error in RED trans-coding\n");
+                    exit(0);
+                }
+            }
+        }
+
+        /* make coded sequence to short be inreasing */
+		/* the length the decoder expects */
+		if(testNum == 4)
+        {
+			stream_len += 10;
+		}
+
+		/* make coded sequence to long be decreasing */
+		/* the length the decoder expects */
+		if(testNum == 5)
+        {
+			stream_len -= 10;
+		}
+
+        if(testNum == 6)
+        {
+			srand((unsigned int)time(NULL));
+            for(i = 0; i < stream_len; i++)
+            {
+				streamdata[i] = rand();
+            }
+		}
+
+        if(VADusage){
+            readframe(vaddata, vadp, samplesIn10Ms*3);
+        }
+
+		/* simulate packet handling through NetEq and the modem */
+		if(!(testNum == 3 && framecnt == 0))
+        {
+            get_arrival_time(cur_framesmpls, stream_len, bottleneck, &BN_data,
+                sampFreqKHz*1000, sampFreqKHz*1000);
+        }
+
+		if(VADusage && (framecnt>10 && vaddata[0]==0))
+        {
+			BN_data.rtp_number--;
+		}
+        else
+        {
+            /* Error test number 10, garbage data */
+            if(testNum == 10)
+            {
+                /* Test to run decoder with garbage data */
+                for(i = 0; i < stream_len; i++)
+                {
+                    streamdata[i] = (short) (streamdata[i]) + (short) rand();
+                }
+            }
+
+            if(testNum != 9)
+            {
+                err = WebRtcIsac_UpdateBwEstimate(ISAC_main_inst, streamdata,
+                    stream_len, BN_data.rtp_number, BN_data.sample_count,
+                    BN_data.arrival_time);
+
+                if(err < 0)
+                {
+                    /* exit if returned with error */
+                    errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                    printf("Error: in decoder: %d.", errtype);
+                    cout << flush;
+                    if(testNum == 0)
+                    {
+                        printf("\n\n");
+                    }
+
+                }
+            }
+
+            /* Call getFramelen, only used here for function test */
+            err = WebRtcIsac_ReadFrameLen(ISAC_main_inst,
+                (WebRtc_Word16*)streamdata, &FL);
+            if(err < 0)
+            {
+                /* exit if returned with error */
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in getFrameLen %d.", errtype);
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+            }
+
+            // iSAC decoding
+
+            if(lostFrame)
+            {
+                declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, streamdata,
+                    stream_len, decoded, speechType);
+
+                if(doTransCoding)
+                {
+                    declenTC = WebRtcIsac_DecodeRcu(decoderTransCoding,
+                        streamDataTransCoding, streamLenTransCoding,
+                        decodedTC, speechType);
+                }
+            }
+            else
+            {
+                declen = WebRtcIsac_Decode(ISAC_main_inst, streamdata,
+                    stream_len, decoded, speechType);
+
+                if(doTransCoding)
+                {
+                    declenTC = WebRtcIsac_Decode(decoderTransCoding,
+                        streamDataTransCoding, streamLenTransCoding,
+                        decodedTC, speechType);
+                }
+            }
+
+            if(declen < 0)
+            {
+                /* exit if returned with error */
+                errtype=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in decoder %d.", errtype);
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+            }
+
+            if(declenTC < 0)
+            {
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+                printf("    Error: in decoding the transcoded stream");
+                cout << flush;
+                if(testNum == 0)
+                {
+                    printf("\n\n");
+                }
+
+            }
+        }
+        /* Write decoded speech frame to file */
+        if((declen > 0) && (numFileLoop == 0))
+        {
+          if (fwrite(decoded, sizeof(WebRtc_Word16), declen,
+                     outp) != static_cast<size_t>(declen)) {
+            return -1;
+          }
+        }
+
+        if((declenTC > 0) && (numFileLoop == 0))
+        {
+          if (fwrite(decodedTC, sizeof(WebRtc_Word16), declen,
+                     transCodingFile) != static_cast<size_t>(declen)) {
+            return -1;
+          }
+        }
+
+
+		fprintf(stderr, "\rframe = %5d  ", framecnt);
+        fflush(stderr);
+		framecnt++;
+
+        /* Error test number 10, garbage data */
+        //if(testNum == 10)
+        //{
+        //    /* Test to run decoder with garbage data */
+        //    if( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL )
+        //    {
+        //        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+        //    }
+        //    else
+        //    {
+        //        fprintf(seedfile, "ok\n\n");
+        //        fclose(seedfile);
+        //    }
+        //}
+        /* Error test number 10, garbage data */
+        //if(testNum == 10)
+        //{
+        //    /* Test to run decoder with garbage data */
+        //    for ( i = 0; i < stream_len; i++)
+        //    {
+        //        streamdata[i] = (short) (streamdata[i] + (short) rand());
+        //    }
+        //}
+
+
+		totalsmpls += declen;
+		totalbits += 8 * stream_len;
+#ifdef _DEBUG
+        kbps = ((double) sampFreqKHz * 1000.) / ((double) cur_framesmpls) * 8.0 * stream_len / 1000.0;// kbits/s
+		fy = fopen("bit_rate.dat", "a");
+		fprintf(fy, "Frame %i = %0.14f\n", framecnt, kbps);
+		fclose(fy);
+
+#endif /* _DEBUG */
+
+	}
+	printf("\n");
+	printf("total bits               = %d bits\n", totalbits);
+	printf("measured average bitrate = %0.3f kbits/s\n",
+        (double)totalbits *(sampFreqKHz) / totalsmpls);
+    if(doTransCoding)
+    {
+        printf("Transcoding average bit-rate = %0.3f kbps\n",
+            (double)numTransCodingBytes * 8.0 *(sampFreqKHz) / totalsmpls);
+        fclose(transCodingFile);
+    }
+	printf("\n");
+
+	/* Runtime statistics */
+	runtime = (double)(clock()/(double)CLOCKS_PER_SEC-starttime);
+	length_file = length_file /(sampFreqKHz * 1000.);
+
+    printf("\n\nLength of speech file: %.1f s\n", length_file);
+	printf("Time to run iSAC:      %.2f s (%.2f %% of realtime)\n\n", runtime, (100*runtime/length_file));
+
+    if(maxStreamLen30 != 0)
+    {
+        printf("Maximum payload size 30ms Frames %d bytes (%0.3f kbps)\n",
+            maxStreamLen30,
+            maxStreamLen30 * 8 / 30.);
+    }
+    if(maxStreamLen60 != 0)
+    {
+        printf("Maximum payload size 60ms Frames %d bytes (%0.3f kbps)\n",
+            maxStreamLen60,
+            maxStreamLen60 * 8 / 60.);
+    }
+    //fprintf(stderr, "\n");
+
+	fprintf(stderr, "   %.1f s", length_file);
+    fprintf(stderr, "   %0.1f kbps", (double)totalbits *(sampFreqKHz) / totalsmpls);
+    if(maxStreamLen30 != 0)
+    {
+        fprintf(stderr, "   plmax-30ms %d bytes (%0.0f kbps)",
+            maxStreamLen30,
+            maxStreamLen30 * 8 / 30.);
+    }
+    if(maxStreamLen60 != 0)
+    {
+        fprintf(stderr, "   plmax-60ms %d bytes (%0.0f kbps)",
+            maxStreamLen60,
+            maxStreamLen60 * 8 / 60.);
+    }
+    if(doTransCoding)
+    {
+        fprintf(stderr, "  transcoding rate %.0f kbps",
+            (double)numTransCodingBytes * 8.0 *(sampFreqKHz) / totalsmpls);
+    }
+
+    fclose(inp);
+	fclose(outp);
+	WebRtcIsac_Free(ISAC_main_inst);
+
+
+	exit(0);
+}
diff --git a/src/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc b/src/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc
new file mode 100644
index 0000000..cccae28
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/SwitchingSampRate/SwitchingSampRate.cc
@@ -0,0 +1,455 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// SwitchingSampRate.cpp : Defines the entry point for the console
+// application.
+//
+
+#include <iostream>
+#include "isac.h"
+#include "utility.h"
+#include "signal_processing_library.h"
+
+#define MAX_FILE_NAME  500
+#define MAX_NUM_CLIENTS 2
+
+
+#define NUM_CLIENTS 2
+
+using namespace std;
+
+int main(int argc, char* argv[])
+{
+  char fileNameWB[MAX_FILE_NAME];
+  char fileNameSWB[MAX_FILE_NAME];
+
+  char outFileName[MAX_NUM_CLIENTS][MAX_FILE_NAME];
+
+  FILE* inFile[MAX_NUM_CLIENTS];
+  FILE* outFile[MAX_NUM_CLIENTS];
+
+  ISACStruct* codecInstance[MAX_NUM_CLIENTS];
+  WebRtc_Word32 resamplerState[MAX_NUM_CLIENTS][8];
+
+  enum IsacSamplingRate encoderSampRate[MAX_NUM_CLIENTS];
+
+  int minBn = 16000;
+  int maxBn = 56000;
+
+  int bnWB = 32000;
+  int bnSWB = 56000;
+
+  strcpy(outFileName[0], "switchSampRate_out1.pcm");
+  strcpy(outFileName[1], "switchSampRate_out2.pcm");
+
+  short clientCntr;
+
+  unsigned int lenEncodedInBytes[MAX_NUM_CLIENTS];
+  unsigned int lenAudioIn10ms[MAX_NUM_CLIENTS];
+  unsigned int lenEncodedInBytesTmp[MAX_NUM_CLIENTS];
+  unsigned int lenAudioIn10msTmp[MAX_NUM_CLIENTS];
+  BottleNeckModel* packetData[MAX_NUM_CLIENTS];
+
+  char versionNumber[100];
+  short samplesIn10ms[MAX_NUM_CLIENTS];
+  int bottleneck[MAX_NUM_CLIENTS];
+
+  printf("\n\n");
+  printf("____________________________________________\n\n");
+  WebRtcIsac_version(versionNumber);
+  printf("    iSAC-swb version %s\n", versionNumber);
+  printf("____________________________________________\n");
+
+
+  fileNameWB[0]  = '\0';
+  fileNameSWB[0] = '\0';
+
+  char myFlag[20];
+  strcpy(myFlag, "-wb");
+  // READ THE WIDEBAND AND SUPER-WIDEBAND FILE NAMES
+  if(readParamString(argc, argv, myFlag, fileNameWB, MAX_FILE_NAME) <= 0)
+  {
+    printf("No wideband file is specified");
+  }
+
+  strcpy(myFlag, "-swb");
+  if(readParamString(argc, argv, myFlag, fileNameSWB, MAX_FILE_NAME) <= 0)
+  {
+    printf("No super-wideband file is specified");
+  }
+
+  // THE FIRST CLIENT STARTS IN WIDEBAND
+  encoderSampRate[0] = kIsacWideband;
+  OPEN_FILE_RB(inFile[0], fileNameWB);
+
+  // THE SECOND CLIENT STARTS IN SUPER-WIDEBAND
+  encoderSampRate[1] = kIsacSuperWideband;
+  OPEN_FILE_RB(inFile[1], fileNameSWB);
+
+  strcpy(myFlag, "-I");
+  short codingMode = readSwitch(argc, argv, myFlag);
+
+  for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+  {
+    codecInstance[clientCntr] = NULL;
+
+    printf("\n");
+    printf("Client %d\n", clientCntr + 1);
+    printf("---------\n");
+    printf("Starting %s",
+           (encoderSampRate[clientCntr] == kIsacWideband)
+           ? "wideband":"super-wideband");
+
+    // Open output File Name
+    OPEN_FILE_WB(outFile[clientCntr], outFileName[clientCntr]);
+    printf("Output File...................... %s\n", outFileName[clientCntr]);
+
+    samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10;
+
+    if(codingMode == 1)
+    {
+      bottleneck[clientCntr] = (clientCntr)? bnSWB:bnWB;
+    }
+    else
+    {
+      bottleneck[clientCntr] = (clientCntr)? minBn:maxBn;
+    }
+
+    printf("Bottleneck....................... %0.3f kbits/sec \n",
+           bottleneck[clientCntr] / 1000.0);
+
+    // coding-mode
+    printf("Encoding Mode.................... %s\n",
+           (codingMode == 1)? "Channel-Independent (Instantaneous)":"Adaptive");
+
+    lenEncodedInBytes[clientCntr] = 0;
+    lenAudioIn10ms[clientCntr] = 0;
+    lenEncodedInBytesTmp[clientCntr] = 0;
+    lenAudioIn10msTmp[clientCntr] = 0;
+
+    packetData[clientCntr] = (BottleNeckModel*)new(BottleNeckModel);
+    if(packetData[clientCntr] == NULL)
+    {
+      printf("Could not allocate memory for packetData \n");
+      return -1;
+    }
+    memset(packetData[clientCntr], 0, sizeof(BottleNeckModel));
+    memset(resamplerState[clientCntr], 0, sizeof(WebRtc_Word32) * 8);
+  }
+
+  for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+  {
+    // Create
+    if(WebRtcIsac_Create(&codecInstance[clientCntr]))
+    {
+      printf("Could not creat client %d\n", clientCntr + 1);
+      return -1;
+    }
+
+    WebRtcIsac_SetEncSampRate(codecInstance[clientCntr], encoderSampRate[clientCntr]);
+
+    WebRtcIsac_SetDecSampRate(codecInstance[clientCntr],
+                              encoderSampRate[clientCntr + (1 - ((clientCntr & 1)<<1))]);
+
+    // Initialize Encoder
+    if(WebRtcIsac_EncoderInit(codecInstance[clientCntr],
+                              codingMode) < 0)
+    {
+      printf("Could not initialize client, %d\n", clientCntr + 1);
+      return -1;
+    }
+
+    // Initialize Decoder
+    if(WebRtcIsac_DecoderInit(codecInstance[clientCntr]) < 0)
+    {
+      printf("Could not initialize decoder of client %d\n",
+             clientCntr + 1);
+      return -1;
+    }
+
+    // setup Rate if in Instantaneous mode
+    if(codingMode != 0)
+    {
+      // ONLY Clients who are not in Adaptive mode
+      if(WebRtcIsac_Control(codecInstance[clientCntr],
+                            bottleneck[clientCntr], 30) < 0)
+      {
+        printf("Could not setup bottleneck and frame-size for client %d\n",
+               clientCntr + 1);
+        return -1;
+      }
+    }
+  }
+
+
+  short streamLen;
+  short numSamplesRead;
+  short lenDecodedAudio;
+  short senderIdx;
+  short receiverIdx;
+
+  printf("\n");
+  short num10ms[MAX_NUM_CLIENTS];
+  memset(num10ms, 0, sizeof(short)*MAX_NUM_CLIENTS);
+  FILE* arrivalTimeFile1 = fopen("arrivalTime1.dat", "wb");
+  FILE* arrivalTimeFile2 = fopen("arrivalTime2.dat", "wb");
+  short numPrint[MAX_NUM_CLIENTS];
+  memset(numPrint, 0, sizeof(short) * MAX_NUM_CLIENTS);
+
+  // Audio Buffers
+  short silence10ms[10 * 32];
+  memset(silence10ms, 0, 320 * sizeof(short));
+  short audioBuff10ms[10 * 32];
+  short audioBuff60ms[60 * 32];
+  short resampledAudio60ms[60 * 32];
+
+  unsigned short bitStream[600+600];
+  short speechType[1];
+
+  short numSampFreqChanged = 0;
+  while(numSampFreqChanged < 10)
+  {
+    for(clientCntr = 0; clientCntr < NUM_CLIENTS; clientCntr++)
+    {
+      // Encoding/decoding for this pair of clients, if there is
+      // audio for any of them
+      //if(audioLeft[clientCntr] || audioLeft[clientCntr + 1])
+      //{
+      //for(pairCntr = 0; pairCntr < 2; pairCntr++)
+      //{
+      senderIdx = clientCntr; // + pairCntr;
+      receiverIdx = 1 - clientCntr;//  + (1 - pairCntr);
+
+      //if(num10ms[senderIdx] > 6)
+      //{
+      //    printf("Too many frames read for client %d",
+      //        senderIdx + 1);
+      //    return -1;
+      //}
+
+      numSamplesRead = (short)fread(audioBuff10ms, sizeof(short),
+                                    samplesIn10ms[senderIdx], inFile[senderIdx]);
+      if(numSamplesRead != samplesIn10ms[senderIdx])
+      {
+        // file finished switch encoder sampling frequency.
+        printf("Changing Encoder Sampling frequency in client %d to ", senderIdx+1);
+        fclose(inFile[senderIdx]);
+        numSampFreqChanged++;
+        if(encoderSampRate[senderIdx] == kIsacWideband)
+        {
+          printf("super-wideband.\n");
+          OPEN_FILE_RB(inFile[senderIdx], fileNameSWB);
+          encoderSampRate[senderIdx] = kIsacSuperWideband;
+        }
+        else
+        {
+          printf("wideband.\n");
+          OPEN_FILE_RB(inFile[senderIdx], fileNameWB);
+          encoderSampRate[senderIdx] = kIsacWideband;
+        }
+        WebRtcIsac_SetEncSampRate(codecInstance[senderIdx], encoderSampRate[senderIdx]);
+        WebRtcIsac_SetDecSampRate(codecInstance[receiverIdx], encoderSampRate[senderIdx]);
+
+        samplesIn10ms[clientCntr] = encoderSampRate[clientCntr] * 10;
+
+        numSamplesRead = (short)fread(audioBuff10ms, sizeof(short),
+                                      samplesIn10ms[senderIdx], inFile[senderIdx]);
+        if(numSamplesRead != samplesIn10ms[senderIdx])
+        {
+          printf(" File %s for client %d has not enough audio\n",
+                 (encoderSampRate[senderIdx]==kIsacWideband)? "wideband":"super-wideband",
+                 senderIdx + 1);
+          return -1;
+        }
+      }
+      num10ms[senderIdx]++;
+
+      // sanity check
+      //if(num10ms[senderIdx] > 6)
+      //{
+      //    printf("Client %d has got more than 60 ms audio and encoded no packet.\n",
+      //        senderIdx);
+      //    return -1;
+      //}
+
+      // Encode
+
+
+      streamLen = WebRtcIsac_Encode(codecInstance[senderIdx],
+                                    audioBuff10ms, (short*)bitStream);
+      WebRtc_Word16 ggg;
+      if (streamLen > 0) {
+        if((  WebRtcIsac_ReadFrameLen(codecInstance[receiverIdx],
+                                      (short *) bitStream, &ggg))<0)
+          printf("ERROR\n");
+      }
+
+      // Sanity check
+      if(streamLen < 0)
+      {
+        printf(" Encoder error in client %d \n", senderIdx + 1);
+        return -1;
+      }
+
+
+      if(streamLen > 0)
+      {
+        // Packet generated; model sending through a channel, do bandwidth
+        // estimation at the receiver and decode.
+        lenEncodedInBytes[senderIdx] += streamLen;
+        lenAudioIn10ms[senderIdx] += (unsigned int)num10ms[senderIdx];
+        lenEncodedInBytesTmp[senderIdx] += streamLen;
+        lenAudioIn10msTmp[senderIdx] += (unsigned int)num10ms[senderIdx];
+
+        // Print after ~5 sec.
+        if(lenAudioIn10msTmp[senderIdx] >= 100)
+        {
+          numPrint[senderIdx]++;
+          printf("  %d,  %6.3f => %6.3f ", senderIdx+1,
+                 bottleneck[senderIdx] / 1000.0,
+                 lenEncodedInBytesTmp[senderIdx] * 0.8 /
+                 lenAudioIn10msTmp[senderIdx]);
+
+          if(codingMode == 0)
+          {
+            WebRtc_Word32 bn;
+            WebRtcIsac_GetUplinkBw(codecInstance[senderIdx], &bn);
+            printf("[%d] ", bn);
+          }
+          //WebRtc_Word16 rateIndexLB;
+          //WebRtc_Word16 rateIndexUB;
+          //WebRtcIsac_GetDownLinkBwIndex(codecInstance[receiverIdx],
+          //    &rateIndexLB, &rateIndexUB);
+          //printf(" (%2d, %2d) ", rateIndexLB, rateIndexUB);
+
+          cout << flush;
+          lenEncodedInBytesTmp[senderIdx] = 0;
+          lenAudioIn10msTmp[senderIdx]    = 0;
+          //if(senderIdx == (NUM_CLIENTS - 1))
+          //{
+          printf("  %0.1f \n", lenAudioIn10ms[senderIdx] * 10. /1000);
+          //}
+
+          // After ~20 sec change the bottleneck.
+          //    if((numPrint[senderIdx] == 4) && (codingMode == 0))
+          //    {
+          //        numPrint[senderIdx] = 0;
+          //        if(codingMode == 0)
+          //        {
+          //            int newBottleneck = bottleneck[senderIdx] +
+          //                (bottleneckChange[senderIdx] * 1000);
+
+          //            if(bottleneckChange[senderIdx] > 0)
+          //            {
+          //                if(newBottleneck >maxBn)
+          //                {
+          //                    bottleneckChange[senderIdx] = -1;
+          //                    newBottleneck = bottleneck[senderIdx] +
+          //                        (bottleneckChange[senderIdx] * 1000);
+          //                    if(newBottleneck > minBn)
+          //                    {
+          //                        bottleneck[senderIdx] = newBottleneck;
+          //                    }
+          //                }
+          //                else
+          //                {
+          //                    bottleneck[senderIdx] = newBottleneck;
+          //                }
+          //            }
+          //            else
+          //            {
+          //                if(newBottleneck < minBn)
+          //                {
+          //                    bottleneckChange[senderIdx] = 1;
+          //                    newBottleneck = bottleneck[senderIdx] +
+          //                        (bottleneckChange[senderIdx] * 1000);
+          //                    if(newBottleneck < maxBn)
+          //                    {
+          //                        bottleneck[senderIdx] = newBottleneck;
+          //                    }
+          //                }
+          //                else
+          //                {
+          //                    bottleneck[senderIdx] = newBottleneck;
+          //                }
+          //            }
+          //        }
+          //    }
+        }
+
+        // model a channel of given bottleneck, to get the receive timestamp
+        get_arrival_time(num10ms[senderIdx] * samplesIn10ms[senderIdx],
+                         streamLen, bottleneck[senderIdx], packetData[senderIdx],
+                         encoderSampRate[senderIdx]*1000, encoderSampRate[senderIdx]*1000);
+
+        // Write the arrival time.
+        if(senderIdx == 0)
+        {
+          if (fwrite(&(packetData[senderIdx]->arrival_time),
+                     sizeof(unsigned int),
+                     1, arrivalTimeFile1) != 1) {
+            return -1;
+          }
+        }
+        else
+        {
+          if (fwrite(&(packetData[senderIdx]->arrival_time),
+                     sizeof(unsigned int),
+                     1, arrivalTimeFile2) != 1) {
+            return -1;
+          }
+        }
+
+        // BWE
+        if(WebRtcIsac_UpdateBwEstimate(codecInstance[receiverIdx],
+                                       bitStream,  streamLen, packetData[senderIdx]->rtp_number,
+                                       packetData[senderIdx]->sample_count,
+                                       packetData[senderIdx]->arrival_time) < 0)
+        {
+          printf(" BWE Error at client %d \n", receiverIdx + 1);
+          return -1;
+        }
+        /**/
+        // Decode
+        lenDecodedAudio = WebRtcIsac_Decode(
+            codecInstance[receiverIdx], bitStream, streamLen,
+            audioBuff60ms, speechType);
+        if(lenDecodedAudio < 0)
+        {
+          printf(" Decoder error in client %d \n", receiverIdx + 1);
+          return -1;
+        }
+
+
+        if(encoderSampRate[senderIdx] == kIsacWideband)
+        {
+          WebRtcSpl_UpsampleBy2(audioBuff60ms, lenDecodedAudio, resampledAudio60ms,
+                                resamplerState[receiverIdx]);
+          if (fwrite(resampledAudio60ms, sizeof(short), lenDecodedAudio << 1,
+                     outFile[receiverIdx]) !=
+              static_cast<size_t>(lenDecodedAudio << 1)) {
+            return -1;
+          }
+        }
+        else
+        {
+          if (fwrite(audioBuff60ms, sizeof(short), lenDecodedAudio,
+                     outFile[receiverIdx]) !=
+              static_cast<size_t>(lenDecodedAudio)) {
+            return -1;
+          }
+        }
+        num10ms[senderIdx] = 0;
+      }
+      //}
+      //}
+    }
+  }
+}
diff --git a/src/modules/audio_coding/codecs/isac/main/test/debugUtility.h b/src/modules/audio_coding/codecs/isac/main/test/debugUtility.h
new file mode 100644
index 0000000..d708ad1
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/debugUtility.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_TEST_DEBUGUTILITY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_TEST_DEBUGUTILITY_H_
+
+#include <stdio.h>
+#include <string.h>
+#include "utility.h"
+
+typedef struct 
+{
+    FILE*  res0to4FilePtr;
+    FILE*  res4to8FilePtr;
+    FILE*  res8to12FilePtr;
+    FILE*  res8to16FilePtr;
+
+    FILE*  res0to4DecFilePtr;
+    FILE*  res4to8DecFilePtr;
+    FILE*  res8to12DecFilePtr;
+    FILE*  res8to16DecFilePtr;
+
+    FILE*  in0to4FilePtr;
+    FILE*  in4to8FilePtr;
+    FILE*  in8to12FilePtr;
+    FILE*  in8to16FilePtr;
+
+    FILE*  out0to4FilePtr;
+    FILE*  out4to8FilePtr;
+    FILE*  out8to12FilePtr;
+    FILE*  out8to16FilePtr;
+
+    FILE*  fftFilePtr;
+    FILE*  fftDecFilePtr;
+
+    FILE*  arrivalTime;
+    
+    float  lastArrivalTime;
+
+    int    prevPacketLost;
+    int    currPacketLost;
+    int    nextPacketLost;
+
+    //double residualSignal4kHZ[240];
+    int    packetLossPercent;
+
+    int maxPayloadLB;
+    int maxPayloadUB;
+    int lbBytes;
+    int ubBytes;
+    
+
+}debugStruct;
+
+
+#define PRINT_ENTROPY_INFO(obj)                                         \
+    do                                                                  \
+    {                                                                   \
+        printf("%10u, %u; ",                                            \
+            obj->bitstr_obj.streamval, obj->bitstr_obj.stream_index);   \
+    } while(0)  
+
+int setupDebugStruct(debugStruct* str);
+
+#endif
\ No newline at end of file
diff --git a/src/modules/audio_coding/codecs/isac/main/test/simpleKenny.c b/src/modules/audio_coding/codecs/isac/main/test/simpleKenny.c
new file mode 100644
index 0000000..be1588c
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/test/simpleKenny.c
@@ -0,0 +1,644 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* kenny.c  - Main function for the iSAC coder */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#ifdef WIN32
+#include "windows.h"
+#define CLOCKS_PER_SEC  1000
+#endif
+
+#include <ctype.h>
+#include <math.h>
+
+/* include API */
+#include "isac.h"
+#include "utility.h"
+//#include "commonDefs.h"
+
+/* max number of samples per frame (= 60 ms frame) */
+#define MAX_FRAMESAMPLES_SWB                1920
+/* number of samples per 10ms frame */
+#define FRAMESAMPLES_SWB_10ms               320
+#define FRAMESAMPLES_WB_10ms                160
+
+/* sampling frequency (Hz) */
+#define FS_SWB                               32000
+#define FS_WB                                16000
+
+//#define CHANGE_OUTPUT_NAME
+
+#ifdef HAVE_DEBUG_INFO
+    #include "debugUtility.h"
+    debugStruct debugInfo;
+#endif
+
+unsigned long framecnt = 0;
+
+int main(int argc, char* argv[])
+{
+    //--- File IO ----
+    FILE* inp;
+    FILE* outp;
+    char inname[500];
+    char outname[500];
+
+    /* Runtime statistics */
+    double        rate;
+    double        rateRCU;
+    unsigned long totalbits = 0;
+    unsigned long totalBitsRCU = 0;
+    unsigned long totalsmpls =0;
+
+    WebRtc_Word32   bottleneck = 39;
+    WebRtc_Word16   frameSize = 30;           /* ms */
+    WebRtc_Word16   codingMode = 1;
+    WebRtc_Word16   shortdata[FRAMESAMPLES_SWB_10ms];
+    WebRtc_Word16   decoded[MAX_FRAMESAMPLES_SWB];
+    //WebRtc_UWord16  streamdata[1000];
+    WebRtc_Word16   speechType[1];
+    WebRtc_Word16   payloadLimit;
+    WebRtc_Word32   rateLimit;
+    ISACStruct*   ISAC_main_inst;
+
+    WebRtc_Word16   stream_len = 0;
+    WebRtc_Word16   declen;
+    WebRtc_Word16   err;
+    WebRtc_Word16   cur_framesmpls;
+    int           endfile;
+#ifdef WIN32
+    double        length_file;
+    double        runtime;
+    char          outDrive[10];
+    char          outPath[500];
+    char          outPrefix[500];
+    char          outSuffix[500];
+    char          bitrateFileName[500];
+    FILE*         bitrateFile;
+    double        starttime;
+    double        rateLB = 0;
+    double        rateUB = 0;
+#endif
+    FILE*         histFile;
+    FILE*         averageFile;
+    int           sampFreqKHz;
+    int           samplesIn10Ms;
+    WebRtc_Word16   maxStreamLen = 0;
+    char          histFileName[500];
+    char          averageFileName[500];
+    unsigned int  hist[600];
+    unsigned int  tmpSumStreamLen = 0;
+    unsigned int  packetCntr = 0;
+    unsigned int  lostPacketCntr = 0;
+    WebRtc_UWord16  payload[600];
+    WebRtc_UWord16  payloadRCU[600];
+    WebRtc_UWord16  packetLossPercent = 0;
+    WebRtc_Word16   rcuStreamLen = 0;
+	int onlyEncode;
+	int onlyDecode;
+
+
+    BottleNeckModel packetData;
+	packetData.arrival_time  = 0;
+	packetData.sample_count  = 0;
+	packetData.rtp_number    = 0;
+    memset(hist, 0, sizeof(hist));
+
+    /* handling wrong input arguments in the command line */
+    if(argc < 5)
+    {
+		int size;
+		WebRtcIsac_AssignSize(&size);
+
+        printf("\n\nWrong number of arguments or flag values.\n\n");
+
+        printf("Usage:\n\n");
+        printf("%s infile outfile -bn bottelneck [options] \n\n", argv[0]);
+        printf("with:\n");
+        printf("-I................... indicates encoding in instantaneous mode.\n");
+        printf("-bn bottleneck....... the value of the bottleneck in bit/sec, e.g. 39742,\n");
+		printf("                      in instantaneous (channel-independent) mode.\n\n");
+        printf("infile............... Normal speech input file\n\n");
+        printf("outfile.............. Speech output file\n\n");
+        printf("OPTIONS\n");
+        printf("-------\n");
+        printf("-fs sampFreq......... sampling frequency of codec 16 or 32 (default) kHz.\n");
+        printf("-plim payloadLim..... payload limit in bytes,\n");
+        printf("                      default is the maximum possible.\n");
+        printf("-rlim rateLim........ rate limit in bits/sec, \n");
+        printf("                      default is the maimum possible.\n");
+        printf("-h file.............. record histogram and *append* to 'file'.\n");
+        printf("-ave file............ record average rate of 3 sec intervales and *append* to 'file'.\n");
+        printf("-ploss............... packet-loss percentage.\n");
+		printf("-enc................. do only encoding and store the bit-stream\n");
+		printf("-dec................. the input file is a bit-stream, decode it.\n");
+
+        printf("\n");
+        printf("Example usage:\n\n");
+        printf("%s speechIn.pcm speechOut.pcm -B 40000 -fs 32 \n\n", argv[0]);
+
+		printf("structure size %d bytes\n", size);
+
+        exit(0);
+    }
+
+
+
+    /* Get Bottleneck value */
+    bottleneck = readParamInt(argc, argv, "-bn", 50000);
+    fprintf(stderr,"\nfixed bottleneck rate of %d bits/s\n\n", bottleneck);
+
+    /* Get Input and Output files */
+    sscanf(argv[1], "%s", inname);
+    sscanf(argv[2], "%s", outname);
+    codingMode = readSwitch(argc, argv, "-I");
+    sampFreqKHz = (WebRtc_Word16)readParamInt(argc, argv, "-fs", 32);
+    if(readParamString(argc, argv, "-h", histFileName, 500) > 0)
+    {
+        histFile = fopen(histFileName, "a");
+        if(histFile == NULL)
+        {
+            printf("cannot open hist file %s", histFileName);
+            exit(0);
+        }
+    }
+    else
+    {
+        // NO recording of hitstogram
+        histFile = NULL;
+    }
+
+
+    packetLossPercent = readParamInt(argc, argv, "-ploss", 0);
+
+    if(readParamString(argc, argv, "-ave", averageFileName, 500) > 0)
+    {
+        averageFile = fopen(averageFileName, "a");
+        if(averageFile == NULL)
+        {
+            printf("cannot open file to write rate %s", averageFileName);
+            exit(0);
+        }
+    }
+    else
+    {
+        averageFile = NULL;
+    }
+
+	onlyEncode = readSwitch(argc, argv, "-enc");
+	onlyDecode = readSwitch(argc, argv, "-dec");
+
+
+    switch(sampFreqKHz)
+    {
+    case 16:
+        {
+            samplesIn10Ms = 160;
+            break;
+        }
+    case 32:
+        {
+            samplesIn10Ms = 320;
+            break;
+        }
+    default:
+        printf("A sampling frequency of %d kHz is not supported,\
+valid values are 8 and 16.\n", sampFreqKHz);
+        exit(-1);
+    }
+    payloadLimit = (WebRtc_Word16)readParamInt(argc, argv, "-plim", 400);
+    rateLimit = readParamInt(argc, argv, "-rlim", 106800);
+
+    if ((inp = fopen(inname,"rb")) == NULL) {
+        printf("  iSAC: Cannot read file %s.\n", inname);
+        exit(1);
+    }
+    if ((outp = fopen(outname,"wb")) == NULL) {
+        printf("  iSAC: Cannot write file %s.\n", outname);
+        exit(1);
+    }
+
+#ifdef WIN32
+    _splitpath(outname, outDrive, outPath, outPrefix, outSuffix);
+    _makepath(bitrateFileName, outDrive, outPath, "bitrate", ".txt");
+
+    bitrateFile = fopen(bitrateFileName, "a");
+    fprintf(bitrateFile, "%  %%s  \n", inname);
+#endif
+
+    printf("\n");
+    printf("Input.................... %s\n", inname);
+    printf("Output................... %s\n", outname);
+    printf("Encoding Mode............ %s\n",
+        (codingMode == 1)? "Channel-Independent":"Channel-Adaptive");
+    printf("Bottleneck............... %d bits/sec\n", bottleneck);
+    printf("Packet-loss Percentage... %d\n", packetLossPercent);
+    printf("\n");
+
+#ifdef WIN32
+    starttime = clock()/(double)CLOCKS_PER_SEC; /* Runtime statistics */
+#endif
+
+    /* Initialize the ISAC and BN structs */
+    err = WebRtcIsac_Create(&ISAC_main_inst);
+
+    WebRtcIsac_SetEncSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband: kIsacSuperWideband);
+    WebRtcIsac_SetDecSampRate(ISAC_main_inst, (sampFreqKHz == 16)? kIsacWideband: kIsacSuperWideband);
+    /* Error check */
+    if (err < 0) {
+        fprintf(stderr,"\n\n Error in create.\n\n");
+        exit(EXIT_FAILURE);
+    }
+
+    framecnt = 0;
+    endfile     = 0;
+
+    /* Initialize encoder and decoder */
+    if(WebRtcIsac_EncoderInit(ISAC_main_inst, codingMode) < 0)
+    {
+        printf("cannot initialize encoder\n");
+        return -1;
+    }
+    if(WebRtcIsac_DecoderInit(ISAC_main_inst) < 0)
+    {
+        printf("cannot initialize decoder\n");
+        return -1;
+    }
+
+    //{
+    //    WebRtc_Word32 b1, b2;
+    //    FILE* fileID = fopen("GetBNTest.txt", "w");
+    //    b2 = 32100;
+    //    while(b2 <= 52000)
+    //    {
+    //        WebRtcIsac_Control(ISAC_main_inst, b2, frameSize);
+    //        WebRtcIsac_GetUplinkBw(ISAC_main_inst, &b1);
+    //        fprintf(fileID, "%5d %5d\n", b2, b1);
+    //        b2 += 10;
+    //    }
+    //}
+
+    if(codingMode == 1)
+    {
+        if(WebRtcIsac_Control(ISAC_main_inst, bottleneck, frameSize) < 0)
+        {
+            printf("cannot set bottleneck\n");
+            return -1;
+        }
+    }
+    else
+    {
+        if(WebRtcIsac_ControlBwe(ISAC_main_inst, 15000, 30, 1) < 0)
+        {
+            printf("cannot configure BWE\n");
+            return -1;
+        }
+    }
+
+    if(WebRtcIsac_SetMaxPayloadSize(ISAC_main_inst, payloadLimit) < 0)
+    {
+        printf("cannot set maximum payload size %d.\n", payloadLimit);
+        return -1;
+    }
+
+    if (rateLimit < 106800) {
+        if(WebRtcIsac_SetMaxRate(ISAC_main_inst, rateLimit) < 0)
+        {
+            printf("cannot set the maximum rate %d.\n", rateLimit);
+            return -1;
+        }
+    }
+
+    //=====================================
+//#ifdef HAVE_DEBUG_INFO
+//    if(setupDebugStruct(&debugInfo) < 0)
+//    {
+//        exit(1);
+//    }
+//#endif
+
+    while (endfile == 0)
+    {
+        fprintf(stderr,"  \rframe = %7li", framecnt);
+
+        //============== Readind from the file and encoding =================
+        cur_framesmpls = 0;
+        stream_len = 0;
+
+
+		if(onlyDecode)
+		{
+			WebRtc_UWord8 auxUW8;
+                        size_t auxSizet;
+			if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+			{
+				break;
+			}
+			stream_len = ((WebRtc_UWord8)auxUW8) << 8;
+			if(fread(&auxUW8, sizeof(WebRtc_UWord8), 1, inp) < 1)
+			{
+				break;
+			}
+			stream_len |= (WebRtc_UWord16)auxUW8;
+                        auxSizet = (size_t)stream_len;
+                        if(fread(payload, 1, auxSizet, inp) < auxSizet)
+			{
+				printf("last payload is corrupted\n");
+				break;
+			}
+		}
+		else
+		{
+			while(stream_len == 0)
+			{
+				// Read 10 ms speech block
+				endfile = readframe(shortdata, inp, samplesIn10Ms);
+				if(endfile)
+				{
+					break;
+				}
+				cur_framesmpls += samplesIn10Ms;
+
+				//-------- iSAC encoding ---------
+				stream_len = WebRtcIsac_Encode(ISAC_main_inst, shortdata,
+					(WebRtc_Word16*)payload);
+
+				if(stream_len < 0)
+				{
+					// exit if returned with error
+					//errType=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+					fprintf(stderr,"\nError in encoder\n");
+					getchar();
+					exit(EXIT_FAILURE);
+				}
+
+
+			}
+			//===================================================================
+			if(endfile)
+			{
+				break;
+			}
+
+			rcuStreamLen = WebRtcIsac_GetRedPayload(ISAC_main_inst, (WebRtc_Word16*)payloadRCU);
+
+			get_arrival_time(cur_framesmpls, stream_len, bottleneck, &packetData,
+				sampFreqKHz * 1000, sampFreqKHz * 1000);
+			if(WebRtcIsac_UpdateBwEstimate(ISAC_main_inst,
+				payload,  stream_len, packetData.rtp_number,
+				packetData.sample_count,
+				packetData.arrival_time) < 0)
+			{
+				printf(" BWE Error at client\n");
+				return -1;
+			}
+		}
+
+        if(endfile)
+        {
+            break;
+        }
+
+        maxStreamLen = (stream_len > maxStreamLen)? stream_len:maxStreamLen;
+        packetCntr++;
+
+        hist[stream_len]++;
+        if(averageFile != NULL)
+        {
+            tmpSumStreamLen += stream_len;
+            if(packetCntr == 100)
+            {
+                // kbps
+                fprintf(averageFile, "%8.3f ", (double)tmpSumStreamLen * 8.0 / (30.0 * packetCntr));
+                packetCntr = 0;
+                tmpSumStreamLen = 0;
+            }
+        }
+
+		if(onlyEncode)
+		{
+                  WebRtc_UWord8 auxUW8;
+                  auxUW8 = (WebRtc_UWord8)(((stream_len & 0x7F00) >> 8) & 0xFF);
+                  if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp) != 1) {
+                    return -1;
+                  }
+
+                  auxUW8 = (WebRtc_UWord8)(stream_len & 0xFF);
+                  if (fwrite(&auxUW8, sizeof(WebRtc_UWord8), 1, outp) != 1) {
+                    return -1;
+                  }
+                  if (fwrite(payload, 1, stream_len,
+                             outp) != (size_t)stream_len) {
+                    return -1;
+                  }
+		}
+		else
+		{
+
+			//======================= iSAC decoding ===========================
+
+			if((rand() % 100) < packetLossPercent)
+			{
+				declen = WebRtcIsac_DecodeRcu(ISAC_main_inst, payloadRCU,
+					rcuStreamLen, decoded, speechType);
+				lostPacketCntr++;
+			}
+			else
+			{
+				declen = WebRtcIsac_Decode(ISAC_main_inst, payload,
+					stream_len, decoded, speechType);
+			}
+			if(declen <= 0)
+			{
+				//errType=WebRtcIsac_GetErrorCode(ISAC_main_inst);
+				fprintf(stderr,"\nError in decoder.\n");
+				getchar();
+				exit(1);
+			}
+
+			// Write decoded speech frame to file
+                        if (fwrite(decoded, sizeof(WebRtc_Word16),
+                                   declen, outp) != (size_t)declen) {
+                          return -1;
+                        }
+			cur_framesmpls = declen;
+		}
+        // Update Statistics
+        framecnt++;
+        totalsmpls += cur_framesmpls;
+        if(stream_len > 0)
+        {
+            totalbits += 8 * stream_len;
+        }
+        if(rcuStreamLen > 0)
+        {
+            totalBitsRCU += 8 * rcuStreamLen;
+        }
+    }
+
+    rate =    ((double)totalbits    * (sampFreqKHz)) / (double)totalsmpls;
+    rateRCU = ((double)totalBitsRCU * (sampFreqKHz)) / (double)totalsmpls;
+
+    printf("\n\n");
+    printf("Sampling Rate......................... %d kHz\n", sampFreqKHz);
+    printf("Payload Limit......................... %d bytes \n", payloadLimit);
+    printf("Rate Limit............................ %d bits/sec \n", rateLimit);
+
+#ifdef WIN32
+#ifdef HAVE_DEBUG_INFO
+    rateLB = ((double)debugInfo.lbBytes * 8. *
+              (sampFreqKHz)) / (double)totalsmpls;
+    rateUB = ((double)debugInfo.ubBytes * 8. *
+              (sampFreqKHz)) / (double)totalsmpls;
+#endif
+
+    fprintf(bitrateFile, "%d  %10u     %d     %6.3f  %6.3f    %6.3f\n",
+        sampFreqKHz,
+        framecnt,
+        bottleneck,
+        rateLB,
+        rateUB,
+        rate);
+    fclose(bitrateFile);
+#endif   // WIN32
+
+    printf("\n");
+    printf("Measured bit-rate..................... %0.3f kbps\n", rate);
+    printf("Measured RCU bit-ratre................ %0.3f kbps\n", rateRCU);
+    printf("Maximum bit-rate/payloadsize.......... %0.3f / %d\n",
+        maxStreamLen * 8 / 0.03, maxStreamLen);
+    printf("Measured packet-loss.................. %0.1f%% \n",
+        100.0f * (float)lostPacketCntr / (float)packetCntr);
+
+//#ifdef HAVE_DEBUG_INFO
+//    printf("Measured lower-band bit-rate.......... %0.3f kbps (%.0f%%)\n",
+//        rateLB, (double)(rateLB) * 100. /(double)(rate));
+//    printf("Measured upper-band bit-rate.......... %0.3f kbps (%.0f%%)\n",
+//        rateUB, (double)(rateUB) * 100. /(double)(rate));
+//
+//    printf("Maximum payload lower-band............ %d bytes (%0.3f kbps)\n",
+//        debugInfo.maxPayloadLB, debugInfo.maxPayloadLB * 8.0 / 0.03);
+//    printf("Maximum payload upper-band............ %d bytes (%0.3f kbps)\n",
+//        debugInfo.maxPayloadUB, debugInfo.maxPayloadUB * 8.0 / 0.03);
+//#endif
+
+    printf("\n");
+
+    /* Runtime statistics */
+#ifdef WIN32
+    runtime = (double)(clock()/(double)CLOCKS_PER_SEC-starttime);
+    length_file = ((double)framecnt*(double)declen/(sampFreqKHz*1000));
+    printf("Length of speech file................ %.1f s\n", length_file);
+    printf("Time to run iSAC..................... %.2f s (%.2f %% of realtime)\n\n",
+        runtime, (100*runtime/length_file));
+#endif
+    printf("\n\n_______________________________________________\n");
+
+    if(histFile != NULL)
+    {
+        int n;
+        for(n = 0; n < 600; n++)
+        {
+            fprintf(histFile, "%6d ", hist[n]);
+        }
+        fprintf(histFile, "\n");
+        fclose(histFile);
+    }
+    if(averageFile != NULL)
+    {
+        if(packetCntr > 0)
+        {
+            fprintf(averageFile, "%8.3f ", (double)tmpSumStreamLen * 8.0 / (30.0 * packetCntr));
+        }
+        fprintf(averageFile, "\n");
+        fclose(averageFile);
+    }
+
+    fclose(inp);
+    fclose(outp);
+
+    WebRtcIsac_Free(ISAC_main_inst);
+
+
+#ifdef CHANGE_OUTPUT_NAME
+    {
+        char* p;
+        char myExt[50];
+        char bitRateStr[10];
+        char newOutName[500];
+        strcpy(newOutName, outname);
+
+        myExt[0] = '\0';
+        p = strchr(newOutName, '.');
+        if(p != NULL)
+        {
+            strcpy(myExt, p);
+            *p = '_';
+            p++;
+            *p = '\0';
+        }
+        else
+        {
+            strcat(newOutName, "_");
+        }
+        sprintf(bitRateStr, "%0.0fkbps", rate);
+        strcat(newOutName, bitRateStr);
+        strcat(newOutName, myExt);
+        rename(outname, newOutName);
+    }
+#endif
+    exit(0);
+}
+
+
+#ifdef HAVE_DEBUG_INFO
+int setupDebugStruct(debugStruct* str)
+{
+    str->prevPacketLost = 0;
+    str->currPacketLost = 0;
+
+    OPEN_FILE_WB(str->res0to4FilePtr,     "Res0to4.dat");
+    OPEN_FILE_WB(str->res4to8FilePtr,     "Res4to8.dat");
+    OPEN_FILE_WB(str->res8to12FilePtr,    "Res8to12.dat");
+    OPEN_FILE_WB(str->res8to16FilePtr,    "Res8to16.dat");
+
+    OPEN_FILE_WB(str->res0to4DecFilePtr,  "Res0to4Dec.dat");
+    OPEN_FILE_WB(str->res4to8DecFilePtr,  "Res4to8Dec.dat");
+    OPEN_FILE_WB(str->res8to12DecFilePtr, "Res8to12Dec.dat");
+    OPEN_FILE_WB(str->res8to16DecFilePtr, "Res8to16Dec.dat");
+
+    OPEN_FILE_WB(str->in0to4FilePtr,      "in0to4.dat");
+    OPEN_FILE_WB(str->in4to8FilePtr,      "in4to8.dat");
+    OPEN_FILE_WB(str->in8to12FilePtr,     "in8to12.dat");
+    OPEN_FILE_WB(str->in8to16FilePtr,     "in8to16.dat");
+
+    OPEN_FILE_WB(str->out0to4FilePtr,     "out0to4.dat");
+    OPEN_FILE_WB(str->out4to8FilePtr,     "out4to8.dat");
+    OPEN_FILE_WB(str->out8to12FilePtr,    "out8to12.dat");
+    OPEN_FILE_WB(str->out8to16FilePtr,    "out8to16.dat");
+    OPEN_FILE_WB(str->fftFilePtr,         "riFFT.dat");
+    OPEN_FILE_WB(str->fftDecFilePtr,      "riFFTDec.dat");
+
+    OPEN_FILE_WB(str->arrivalTime,        NULL/*"ArivalTime.dat"*/);
+    str->lastArrivalTime = 0;
+
+    str->maxPayloadLB = 0;
+    str->maxPayloadUB = 0;
+    str->lbBytes = 0;
+    str->ubBytes = 0;
+
+    return 0;
+};
+#endif
diff --git a/src/modules/audio_coding/codecs/isac/main/util/utility.c b/src/modules/audio_coding/codecs/isac/main/util/utility.c
new file mode 100644
index 0000000..0a2256a
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/util/utility.c
@@ -0,0 +1,178 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+#include "utility.h"
+
+/* function for reading audio data from PCM file */
+int
+readframe(
+    short* data,
+    FILE*  inp,
+    int    length)
+{
+    short k, rlen, status = 0;
+	unsigned char* ptrUChar;
+	ptrUChar = (unsigned char*)data;
+
+    rlen = (short)fread(data, sizeof(short), length, inp);
+    if (rlen < length) {
+        for (k = rlen; k < length; k++)
+            data[k] = 0;
+        status = 1;
+    }
+
+	// Assuming that our PCM files are written in Intel machines
+	for(k = 0; k < length; k++)
+	{
+		data[k] = (short)ptrUChar[k<<1] | ((((short)ptrUChar[(k<<1) + 1]) << 8) & 0xFF00);
+	}
+
+    return status;
+}
+
+short
+readSwitch(
+    int   argc,
+    char* argv[],
+    char* strID)
+{
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            return 1;
+        }
+    }
+    return 0;
+}
+
+double
+readParamDouble(
+    int    argc,
+    char*  argv[],
+    char*  strID,
+    double defaultVal)
+{
+    double returnVal = defaultVal;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                returnVal = atof(argv[n]);
+            }
+            break;
+        }
+    }
+    return returnVal;
+}
+
+int
+readParamInt(
+    int   argc,
+    char* argv[],
+    char* strID,
+    int   defaultVal)
+{
+    int returnVal = defaultVal;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                returnVal = atoi(argv[n]);
+            }
+            break;
+        }
+    }
+    return returnVal;
+}
+
+int
+readParamString(
+    int   argc,
+    char* argv[],
+    char* strID,
+    char* stringParam,
+    int   maxSize)
+{
+    int paramLenght = 0;
+    short n;
+    for(n = 0; n < argc; n++)
+    {
+        if(strcmp(argv[n], strID) == 0)
+        {
+            n++;
+            if(n < argc)
+            {
+                strncpy(stringParam, argv[n], maxSize);
+                paramLenght = (int)strlen(argv[n]);
+            }
+            break;
+        }
+    }
+    return paramLenght;
+}
+
+void
+get_arrival_time(
+    int              current_framesamples,   /* samples */
+    int              packet_size,            /* bytes */
+    int              bottleneck,             /* excluding headers; bits/s */
+    BottleNeckModel* BN_data,
+    short            senderSampFreqHz,
+    short            receiverSampFreqHz)
+{
+    unsigned int travelTimeMs;
+	const int headerSizeByte = 35;
+
+	int headerRate;
+
+    BN_data->whenPackGeneratedMs += (current_framesamples / (senderSampFreqHz / 1000));
+
+	headerRate = headerSizeByte * 8 * senderSampFreqHz / current_framesamples;     /* bits/s */
+
+	/* everything in samples */
+	BN_data->sample_count = BN_data->sample_count + current_framesamples;
+
+    //travelTimeMs = ((packet_size + HeaderSize) * 8 * sampFreqHz) /
+    //    (bottleneck + HeaderRate)
+    travelTimeMs = (unsigned int)floor((double)((packet_size + headerSizeByte) * 8 * 1000)
+        / (double)(bottleneck + headerRate) + 0.5);
+
+    if(BN_data->whenPrevPackLeftMs > BN_data->whenPackGeneratedMs)
+    {
+        BN_data->whenPrevPackLeftMs += travelTimeMs;
+    }
+    else
+    {
+        BN_data->whenPrevPackLeftMs = BN_data->whenPackGeneratedMs +
+            travelTimeMs;
+    }
+
+    BN_data->arrival_time = (BN_data->whenPrevPackLeftMs *
+        (receiverSampFreqHz / 1000));
+
+//	if (BN_data->arrival_time < BN_data->sample_count)
+//		BN_data->arrival_time = BN_data->sample_count;
+
+	BN_data->rtp_number++;
+}
diff --git a/src/modules/audio_coding/codecs/isac/main/util/utility.h b/src/modules/audio_coding/codecs/isac/main/util/utility.h
new file mode 100644
index 0000000..f9fba94
--- /dev/null
+++ b/src/modules/audio_coding/codecs/isac/main/util/utility.h
@@ -0,0 +1,144 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_UTIL_UTILITY_H_
+
+#include <stdlib.h>
+#include <stdio.h>
+
+#if defined(__cplusplus)
+extern "C" {
+#endif
+
+#define OPEN_FILE_WB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "wb");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to write to.", fullPath);     \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define OPEN_FILE_AB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "ab");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to write to.", fullPath);     \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define OPEN_FILE_RB(filePtr, fullPath)                         \
+  do                                                            \
+  {                                                             \
+    if(fullPath != NULL)                                        \
+    {                                                           \
+      filePtr = fopen(fullPath, "rb");                          \
+      if(filePtr == NULL)                                       \
+      {                                                         \
+        printf("could not open %s to read from.", fullPath);    \
+        return -1;                                              \
+      }                                                         \
+    }                                                           \
+    else                                                        \
+    {                                                           \
+      filePtr = NULL;                                           \
+    }                                                           \
+  }while(0)
+
+#define WRITE_FILE_D(bufferPtr, len, filePtr)           \
+  do                                                    \
+  {                                                     \
+    if(filePtr != NULL)                                 \
+    {                                                   \
+      double dummy[1000];                               \
+      int cntr;                                         \
+      for(cntr = 0; cntr < (len); cntr++)               \
+      {                                                 \
+        dummy[cntr] = (double)bufferPtr[cntr];          \
+      }                                                 \
+      fwrite(dummy, sizeof(double), len, filePtr);      \
+      fflush(filePtr);                                  \
+    }                                                   \
+  } while(0)
+
+  typedef struct {
+    unsigned int whenPackGeneratedMs;
+    unsigned int whenPrevPackLeftMs;
+    unsigned int sendTimeMs ;          /* milisecond */
+    unsigned int arrival_time;         /* samples */
+    unsigned int sample_count;         /* samples, also used as "send time stamp" */
+    unsigned int rtp_number;
+  } BottleNeckModel;
+
+  void get_arrival_time(
+      int              current_framesamples,   /* samples */
+      int              packet_size,            /* bytes */
+      int              bottleneck,             /* excluding headers; bits/s */
+      BottleNeckModel* BN_data,
+      short            senderSampFreqHz,
+      short            receiverSampFreqHz);
+
+  /* function for reading audio data from PCM file */
+  int readframe(
+      short* data,
+      FILE*  inp,
+      int    length);
+
+  short readSwitch(
+      int   argc,
+      char* argv[],
+      char* strID);
+
+  double readParamDouble(
+      int    argc,
+      char*  argv[],
+      char*  strID,
+      double defaultVal);
+
+  int readParamInt(
+      int   argc,
+      char* argv[],
+      char* strID,
+      int   defaultVal);
+
+  int readParamString(
+      int   argc,
+      char* argv[],
+      char* strID,
+      char* stringParam,
+      int   maxSize);
+
+#if defined(__cplusplus)
+}
+#endif
+
+
+
+#endif
diff --git a/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h b/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
new file mode 100644
index 0000000..e3cac4d
--- /dev/null
+++ b/src/modules/audio_coding/codecs/pcm16b/include/pcm16b.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_CODECS_PCM16B_MAIN_INTERFACE_PCM16B_H_
+/*
+ * Define the fixpoint numeric formats
+ */
+
+#include "typedefs.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/****************************************************************************
+ * WebRtcPcm16b_EncodeW16(...)
+ *
+ * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speechIn16b	: Input speech vector
+ *		- len			: Number of samples in speech vector
+ *
+ * Output:
+ *		- speechOut16b	: Encoded data vector (big endian 16 bit)
+ *
+ * Returned value		: Size in bytes of speechOut16b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b);
+
+/****************************************************************************
+ * WebRtcPcm16b_Encode(...)
+ *
+ * "Encode" a sample vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speech16b		: Input speech vector
+ *		- len			: Number of samples in speech vector
+ *
+ * Output:
+ *		- speech8b		: Encoded data vector (big endian 16 bit)
+ *
+ * Returned value		: Size in bytes of speech8b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
+                                  WebRtc_Word16 len,
+                                  unsigned char *speech8b);
+
+/****************************************************************************
+ * WebRtcPcm16b_DecodeW16(...)
+ *
+ * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speechIn16b	: Encoded data vector (big endian 16 bit)
+ *		- len			: Number of bytes in speechIn16b
+ *
+ * Output:
+ *		- speechOut16b	: Decoded speech vector
+ *
+ * Returned value		: Samples in speechOut16b
+ */
+
+WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
+                                     WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b,
+                                     WebRtc_Word16* speechType);
+
+/****************************************************************************
+ * WebRtcPcm16b_Decode(...)
+ *
+ * "Decode" a vector to 16 bit linear (Encoded standard is big endian)
+ *
+ * Input:
+ *		- speech8b		: Encoded data vector (big endian 16 bit)
+ *		- len			: Number of bytes in speech8b
+ *
+ * Output:
+ *		- speech16b		: Decoded speech vector
+ *
+ * Returned value		: Samples in speech16b
+ */
+
+
+WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *speech16b);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* PCM16B */
diff --git a/src/modules/audio_coding/codecs/pcm16b/pcm16b.c b/src/modules/audio_coding/codecs/pcm16b/pcm16b.c
new file mode 100644
index 0000000..0cff5dd
--- /dev/null
+++ b/src/modules/audio_coding/codecs/pcm16b/pcm16b.c
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "pcm16b.h"
+
+#include <stdlib.h>
+
+#include "typedefs.h"
+
+#ifdef WEBRTC_BIG_ENDIAN
+#include "signal_processing_library.h"
+#endif
+
+#define HIGHEND 0xFF00
+#define LOWEND    0xFF
+
+
+
+/* Encoder with WebRtc_Word16 Output */
+WebRtc_Word16 WebRtcPcm16b_EncodeW16(WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b)
+{
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
+#else
+    int i;
+    for (i=0;i<len;i++) {
+        speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|((((WebRtc_UWord16)speechIn16b[i])<<8)&0xFF00);
+    }
+#endif
+    return(len<<1);
+}
+
+
+/* Encoder with char Output (old version) */
+WebRtc_Word16 WebRtcPcm16b_Encode(WebRtc_Word16 *speech16b,
+                                  WebRtc_Word16 len,
+                                  unsigned char *speech8b)
+{
+    WebRtc_Word16 samples=len*2;
+    WebRtc_Word16 pos;
+    WebRtc_Word16 short1;
+    WebRtc_Word16 short2;
+    for (pos=0;pos<len;pos++) {
+        short1=HIGHEND & speech16b[pos];
+        short2=LOWEND & speech16b[pos];
+        short1=short1>>8;
+        speech8b[pos*2]=(unsigned char) short1;
+        speech8b[pos*2+1]=(unsigned char) short2;
+    }
+    return(samples);
+}
+
+
+/* Decoder with WebRtc_Word16 Input instead of char when the WebRtc_Word16 Encoder is used */
+WebRtc_Word16 WebRtcPcm16b_DecodeW16(void *inst,
+                                     WebRtc_Word16 *speechIn16b,
+                                     WebRtc_Word16 len,
+                                     WebRtc_Word16 *speechOut16b,
+                                     WebRtc_Word16* speechType)
+{
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(WebRtc_Word16)+1)>>1));
+#else
+    int i;
+    int samples=len>>1;
+
+    for (i=0;i<samples;i++) {
+        speechOut16b[i]=(((WebRtc_UWord16)speechIn16b[i])>>8)|(((WebRtc_UWord16)(speechIn16b[i]&0xFF))<<8);
+    }
+#endif
+
+    *speechType=1;
+
+    // Avoid warning.
+    (void)(inst = NULL);
+
+    return(len>>1);
+}
+
+/* "old" version of the decoder that uses char as input (not used in NetEq any more) */
+WebRtc_Word16 WebRtcPcm16b_Decode(unsigned char *speech8b,
+                                  WebRtc_Word16 len,
+                                  WebRtc_Word16 *speech16b)
+{
+    WebRtc_Word16 samples=len>>1;
+    WebRtc_Word16 pos;
+    WebRtc_Word16 shortval;
+    for (pos=0;pos<samples;pos++) {
+        shortval=((unsigned short) speech8b[pos*2]);
+        shortval=(shortval<<8)&HIGHEND;
+        shortval=shortval|(((unsigned short) speech8b[pos*2+1])&LOWEND);
+        speech16b[pos]=shortval;
+    }
+    return(samples);
+}
diff --git a/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi b/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
new file mode 100644
index 0000000..9e196b8
--- /dev/null
+++ b/src/modules/audio_coding/codecs/pcm16b/pcm16b.gypi
@@ -0,0 +1,52 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'PCM16B',
+      'type': '<(library)',
+      'include_dirs': [
+        'include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        'include/pcm16b.h',
+        'pcm16b.c',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'pcm16b_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'PCM16B',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'pcm16b_unittest.cc',
+          ],
+        }, # PCM16B_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc b/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc
new file mode 100644
index 0000000..eb910b3
--- /dev/null
+++ b/src/modules/audio_coding/codecs/pcm16b/pcm16b_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "pcm16b.h"
+#include "gtest/gtest.h"
+
+TEST(Pcm16bTest, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/audio_coding/main/OWNERS b/src/modules/audio_coding/main/OWNERS
new file mode 100644
index 0000000..e1e6256
--- /dev/null
+++ b/src/modules/audio_coding/main/OWNERS
@@ -0,0 +1,3 @@
+tina.legrand@webrtc.org
+turaj@webrtc.org
+jan.skoglund@webrtc.org
diff --git a/src/modules/audio_coding/main/interface/audio_coding_module.h b/src/modules/audio_coding/main/interface/audio_coding_module.h
new file mode 100644
index 0000000..687d9fe
--- /dev/null
+++ b/src/modules/audio_coding/main/interface/audio_coding_module.h
@@ -0,0 +1,895 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
+
+#include "audio_coding_module_typedefs.h"
+#include "module.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+// forward declarations
+struct CodecInst;
+
+#define WEBRTC_10MS_PCM_AUDIO 960 // 16 bits super wideband 48 Khz
+
+// Callback class used for sending data ready to be packetized
+class AudioPacketizationCallback {
+ public:
+  virtual ~AudioPacketizationCallback() {}
+
+  virtual WebRtc_Word32 SendData(
+      FrameType frameType, WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+      const WebRtc_UWord8* payloadData, WebRtc_UWord16 payloadSize,
+      const RTPFragmentationHeader* fragmentation) = 0;
+};
+
+// Callback class used for inband Dtmf detection
+class AudioCodingFeedback {
+ public:
+  virtual ~AudioCodingFeedback() {}
+
+  virtual WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digitDtmf,
+                                     const bool end) = 0;
+};
+
+// Callback class used for reporting VAD decision
+class ACMVADCallback {
+ public:
+  virtual ~ACMVADCallback() {}
+
+  virtual WebRtc_Word32 InFrameType(WebRtc_Word16 frameType) = 0;
+};
+
+// Callback class used for reporting receiver statistics
+class ACMVQMonCallback {
+ public:
+  virtual ~ACMVQMonCallback() {}
+
+  virtual WebRtc_Word32 NetEqStatistics(
+      const WebRtc_Word32 id, // current ACM id
+      const WebRtc_UWord16 MIUsValid, // valid voice duration in ms
+      const WebRtc_UWord16 MIUsReplaced, // concealed voice duration in ms
+      const WebRtc_UWord8 eventFlags, // concealed voice flags
+      const WebRtc_UWord16 delayMS) = 0; // average delay in ms
+};
+
+class AudioCodingModule: public Module {
+ protected:
+  AudioCodingModule() {}
+  virtual ~AudioCodingModule() {}
+
+ public:
+  ///////////////////////////////////////////////////////////////////////////
+  // Creation and destruction of a ACM
+  //
+  static AudioCodingModule* Create(const WebRtc_Word32 id);
+
+  static void Destroy(AudioCodingModule* module);
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Utility functions
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_UWord8 NumberOfCodecs()
+  // Returns number of supported codecs.
+  //
+  // Return value:
+  //   number of supported codecs.
+  ///
+  static WebRtc_UWord8 NumberOfCodecs();
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  // Get supported codec with list number.
+  //
+  // Input:
+  //   -listId             : list number.
+  //
+  // Output:
+  //   -codec              : a structure where the parameters of the codec,
+  //                         given by list number is written to.
+  //
+  // Return value:
+  //   -1 if the list number (listId) is invalid.
+  //    0 if succeeded.
+  //
+  static WebRtc_Word32 Codec(const WebRtc_UWord8 listId, CodecInst& codec);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  // Get supported codec with the given codec name, sampling frequency, and
+  // a given number of channels.
+  //
+  // Input:
+  //   -payload_name       : name of the codec.
+  //   -sampling_freq_hz   : sampling frequency of the codec. Note! for RED
+  //                         a sampling frequency of -1 is a valid input.
+  //   -channels           : number of channels ( 1 - mono, 2 - stereo).
+  //
+  // Output:
+  //   -codec              : a structure where the function returns the
+  //                         default parameters of the codec.
+  //
+  // Return value:
+  //   -1 if the list number (listId) is invalid.
+  //    0 if succeeded.
+  //
+  static WebRtc_Word32 Codec(const char* payload_name, CodecInst& codec,
+                             int sampling_freq_hz, int channels);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Codec()
+  //
+  // Returns the list number of the given codec name, sampling frequency, and
+  // a given number of channels.
+  //
+  // Input:
+  //   -payload_name        : name of the codec.
+  //   -sampling_freq_hz    : sampling frequency of the codec. Note! for RED
+  //                          a sampling frequency of -1 is a valid input.
+  //   -channels            : number of channels ( 1 - mono, 2 - stereo).
+  //
+  // Return value:
+  //   if the codec is found, the index of the codec in the list,
+  //   -1 if the codec is not found.
+  //
+  static WebRtc_Word32 Codec(const char* payload_name, int sampling_freq_hz,
+                             int channels);
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool IsCodecValid()
+  // Checks the validity of the parameters of the given codec.
+  //
+  // Input:
+  //   -codec              : the structur which keeps the parameters of the
+  //                         codec.
+  //
+  // Reurn value:
+  //   true if the parameters are valid,
+  //   false if any parameter is not valid.
+  //
+  static bool IsCodecValid(const CodecInst& codec);
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Sender
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 InitializeSender()
+  // Any encoder-related state of ACM will be initialized to the
+  // same state when ACM is created. This will not interrupt or
+  // effect decoding functionality of ACM. ACM will lose all the
+  // encoding-related settings by calling this function.
+  // For instance, a send codec has to be registered again.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 InitializeSender() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ResetEncoder()
+  // This API resets the states of encoder. All the encoder settings, such as
+  // send-codec or VAD/DTX, will be preserved.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ResetEncoder() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterSendCodec()
+  // Registers a codec, specified by "sendCodec," as sending codec.
+  // This API can be called multiple of times to register Codec. The last codec
+  // registered overwrites the previous ones.
+  // The API can also be used to change payload type for CNG and RED, which are
+  // registered by default to default payload types.
+  // Note that registering CNG and RED won't overwrite speech codecs.
+  // This API can be called to set/change the send payload-type, frame-size
+  // or encoding rate (if applicable for the codec).
+  //
+  // Note: If a stereo codec is registered as send codec, VAD/DTX will
+  // automatically be turned off, since it is not supported for stereo sending.
+  //
+  // Input:
+  //   -sendCodec          : Parameters of the codec to be registered, c.f.
+  //                         common_types.h for the definition of
+  //                         CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 RegisterSendCodec(const CodecInst& sendCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SendCodec()
+  // Get parameters for the codec currently registered as send codec.
+  //
+  // Output:
+  //   -currentSendCodec          : parameters of the send codec.
+  //
+  // Return value:
+  //   -1 if failed to get send codec,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SendCodec(CodecInst& currentSendCodec) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SendFrequency()
+  // Get the sampling frequency of the current encoder in Hertz.
+  //
+  // Return value:
+  //   positive; sampling frequency [Hz] of the current encoder.
+  //   -1 if an error has happened.
+  //
+  virtual WebRtc_Word32 SendFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Bitrate()
+  // Get encoding bit-rate in bits per second.
+  //
+  // Return value:
+  //   positive; encoding rate in bits/sec,
+  //   -1 if an error is happened.
+  //
+  virtual WebRtc_Word32 SendBitrate() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetReceivedEstimatedBandwidth()
+  // Set available bandwidth [bits/sec] of the up-link channel.
+  // This information is used for traffic shaping, and is currently only
+  // supported if iSAC is the send codec.
+  //
+  // Input:
+  //   -bw                 : bandwidth in bits/sec estimated for
+  //                         up-link.
+  // Return value
+  //   -1 if error occurred in setting the bandwidth,
+  //    0 bandwidth is set successfully.
+  //
+  virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(
+      const WebRtc_Word32 bw) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterTransportCallback()
+  // Register a transport callback which will be called to deliver
+  // the encoded buffers whenever Process() is called and a
+  // bit-stream is ready.
+  //
+  // Input:
+  //   -transport          : pointer to the callback class
+  //                         transport->SendData() is called whenever
+  //                         Process() is called and bit-stream is ready
+  //                         to deliver.
+  //
+  // Return value:
+  //   -1 if the transport callback could not be registered
+  //    0 if registration is successful.
+  //
+  virtual WebRtc_Word32 RegisterTransportCallback(
+      AudioPacketizationCallback* transport) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 Add10MsData()
+  // Add 10MS of raw (PCM) audio data to the encoder. If the sampling
+  // frequency of the audio does not match the sampling frequency of the
+  // current encoder ACM will resample the audio.
+  //
+  // Input:
+  //   -audioFrame         : the input audio frame, containing raw audio
+  //                         sampling frequency etc.,
+  //                         c.f. module_common_types.h for definition of
+  //                         AudioFrame.
+  //
+  // Return value:
+  //      0   successfully added the frame.
+  //     -1   some error occurred and data is not added.
+  //   < -1   to add the frame to the buffer n samples had to be
+  //          overwritten, -n is the return value in this case.
+  //
+  virtual WebRtc_Word32 Add10MsData(const AudioFrame& audioFrame) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // (FEC) Forward Error Correction
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetFECStatus(const bool enable)
+  // configure FEC status i.e. on/off.
+  //
+  // RFC 2198 describes a solution which has a single payload type which
+  // signifies a packet with redundancy. That packet then becomes a container,
+  // encapsulating multiple payloads into a single RTP packet.
+  // Such a scheme is flexible, since any amount of redundancy may be
+  // encapsulated within a single packet.  There is, however, a small overhead
+  // since each encapsulated payload must be preceded by a header indicating
+  // the type of data enclosed.
+  //
+  // This means that FEC is actually a RED scheme.
+  //
+  // Input:
+  //   -enableFEC          : if true FEC is enabled, otherwise FEC is
+  //                         disabled.
+  //
+  // Return value:
+  //   -1 if failed to set FEC status,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SetFECStatus(const bool enableFEC) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool FECStatus()
+  // Get FEC status
+  //
+  // Return value
+  //   true if FEC is enabled,
+  //   false if FEC is disabled.
+  //
+  virtual bool FECStatus() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   (VAD) Voice Activity Detection
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetVAD()
+  // If DTX is enabled & the codec does not have internal DTX/VAD
+  // WebRtc VAD will be automatically enabled and 'enableVAD' is ignored.
+  //
+  // If DTX is disabled but VAD is enabled no DTX packets are send,
+  // regardless of whether the codec has internal DTX/VAD or not. In this
+  // case, WebRtc VAD is running to label frames as active/in-active.
+  //
+  // NOTE! VAD/DTX is not supported when sending stereo.
+  //
+  // Inputs:
+  //   -enableDTX          : if true DTX is enabled,
+  //                         otherwise DTX is disabled.
+  //   -enableVAD          : if true VAD is enabled,
+  //                         otherwise VAD is disabled.
+  //   -vadMode            : determines the aggressiveness of VAD. A more
+  //                         aggressive mode results in more frames labeled
+  //                         as in-active, c.f. definition of
+  //                         ACMVADMode in audio_coding_module_typedefs.h
+  //                         for valid values.
+  //
+  // Return value:
+  //   -1 if failed to set up VAD/DTX,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 SetVAD(const bool enableDTX = true,
+                               const bool enableVAD = false,
+                               const ACMVADMode vadMode = VADNormal) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 VAD()
+  // Get VAD status.
+  //
+  // Outputs:
+  //   -dtxEnabled         : is set to true if DTX is enabled, otherwise
+  //                         is set to false.
+  //   -vadEnabled         : is set to true if VAD is enabled, otherwise
+  //                         is set to false.
+  //   -vadMode            : is set to the current aggressiveness of VAD.
+  //
+  // Return value:
+  //   -1 if fails to retrieve the setting of DTX/VAD,
+  //    0 if succeeeded.
+  //
+  virtual WebRtc_Word32 VAD(bool& dtxEnabled, bool& vadEnabled,
+                            ACMVADMode& vadMode) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReplaceInternalDTXWithWebRtc()
+  // Used to replace codec internal DTX scheme with WebRtc. This is only
+  // supported for G729, where this call replaces AnnexB with WebRtc DTX.
+  //
+  // Input:
+  //   -useWebRtcDTX         : if false (default) the codec built-in DTX/VAD
+  //                         scheme is used, otherwise the internal DTX is
+  //                         replaced with WebRtc DTX/VAD.
+  //
+  // Return value:
+  //   -1 if failed to replace codec internal DTX with WebRtc,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ReplaceInternalDTXWithWebRtc(
+      const bool useWebRtcDTX = false) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IsInternalDTXReplacedWithWebRtc()
+  // Get status if the codec internal DTX (when such exists) is replaced with
+  // WebRtc DTX. This is only supported for G729.
+  //
+  // Output:
+  //   -usesWebRtcDTX        : is set to true if the codec internal DTX is
+  //                         replaced with WebRtc DTX/VAD, otherwise it is set
+  //                         to false.
+  //
+  // Return value:
+  //   -1 if failed to determine if codec internal DTX is replaced with WebRtc,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(
+      bool& usesWebRtcDTX) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterVADCallback()
+  // Call this method to register a callback function which is called
+  // any time that ACM encounters an empty frame. That is a frame which is
+  // recognized inactive. Depending on the codec WebRtc VAD or internal codec
+  // VAD is employed to identify a frame as active/inactive.
+  //
+  // Input:
+  //   -vadCallback        : pointer to a callback function.
+  //
+  // Return value:
+  //   -1 if failed to register the callback function.
+  //    0 if the callback function is registered successfully.
+  //
+  virtual WebRtc_Word32 RegisterVADCallback(ACMVADCallback* vadCallback) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Receiver
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 InitializeReceiver()
+  // Any decoder-related state of ACM will be initialized to the
+  // same state when ACM is created. This will not interrupt or
+  // effect encoding functionality of ACM. ACM would lose all the
+  // decoding-related settings by calling this function.
+  // For instance, all registered codecs are deleted and have to be
+  // registered again.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 InitializeReceiver() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ResetDecoder()
+  // This API resets the states of decoders. ACM will not lose any
+  // decoder-related settings, such as registered codecs.
+  //
+  // Return value:
+  //   -1 if failed to initialize,
+  //    0 if succeeded.
+  //
+  virtual WebRtc_Word32 ResetDecoder() = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReceiveFrequency()
+  // Get sampling frequency of the last received payload.
+  //
+  // Return value:
+  //   non-negative the sampling frequency in Hertz.
+  //   -1 if an error has occurred.
+  //
+  virtual WebRtc_Word32 ReceiveFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutFrequency()
+  // Get sampling frequency of audio played out.
+  //
+  // Return value:
+  //   the sampling frequency in Hertz.
+  //
+  virtual WebRtc_Word32 PlayoutFrequency() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterReceiveCodec()
+  // Register possible decoders, can be called multiple times for
+  // codecs, CNG-NB, CNG-WB, CNG-SWB, AVT and RED.
+  //
+  // Input:
+  //   -receiveCodec       : parameters of the codec to be registered, c.f.
+  //                         common_types.h for the definition of
+  //                         CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to register the codec
+  //    0 if the codec registered successfully.
+  //
+  virtual WebRtc_Word32 RegisterReceiveCodec(const CodecInst& receiveCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 UnregisterReceiveCodec()
+  // Unregister the codec currently registered with a specific payload type
+  // from the list of possible receive codecs.
+  //
+  // Input:
+  //   -payloadType        : The number representing the payload type to
+  //                         unregister.
+  //
+  // Output:
+  //   -1 if the unregistration fails.
+  //    0 if the given codec is successfully unregistered.
+  //
+  virtual WebRtc_Word32 UnregisterReceiveCodec(
+      const WebRtc_Word16 receiveCodec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ReceiveCodec()
+  // Get the codec associated with last received payload.
+  //
+  // Output:
+  //   -currRcvCodec       : parameters of the codec associated with the last
+  //                         received payload, c.f. common_types.h for
+  //                         the definition of CodecInst.
+  //
+  // Return value:
+  //   -1 if failed to retrieve the codec,
+  //    0 if the codec is successfully retrieved.
+  //
+  virtual WebRtc_Word32 ReceiveCodec(CodecInst& currRcvCodec) const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IncomingPacket()
+  // Call this function to insert a parsed RTP packet into ACM.
+  //
+  // Inputs:
+  //   -incomingPayload    : received payload.
+  //   -payloadLengthByte  : the length of payload in bytes.
+  //   -rtpInfo            : the relevant information retrieved from RTP
+  //                         header.
+  //
+  // Return value:
+  //   -1 if failed to push in the payload
+  //    0 if payload is successfully pushed in.
+  //
+  virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                       const WebRtc_Word32 payloadLengthByte,
+                                       const WebRtcRTPHeader& rtpInfo) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 IncomingPayload()
+  // Call this API to push incoming payloads when there is no rtp-info.
+  // The rtp-info will be created in ACM. One usage for this API is when
+  // pre-encoded files are pushed in ACM
+  //
+  // Inputs:
+  //   -incomingPayload    : received payload.
+  //   -payloadLenghtByte  : the length, in bytes, of the received payload.
+  //   -payloadType        : the payload-type. This specifies which codec has
+  //                         to be used to decode the payload.
+  //   -timestamp          : send timestamp of the payload. ACM starts with
+  //                         a random value and increment it by the
+  //                         packet-size, which is given when the codec in
+  //                         question is registered by RegisterReceiveCodec().
+  //                         Therefore, it is essential to have the timestamp
+  //                         if the frame-size differ from the registered
+  //                         value or if the incoming payload contains DTX
+  //                         packets.
+  //
+  // Return value:
+  //   -1 if failed to push in the payload
+  //    0 if payload is successfully pushed in.
+  //
+  virtual WebRtc_Word32 IncomingPayload(const WebRtc_UWord8* incomingPayload,
+                                        const WebRtc_Word32 payloadLengthByte,
+                                        const WebRtc_UWord8 payloadType,
+                                        const WebRtc_UWord32 timestamp = 0) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetMinimumPlayoutDelay()
+  // Set Minimum playout delay, used for lip-sync.
+  //
+  // Input:
+  //   -timeMs             : minimum delay in milliseconds.
+  //
+  // Return value:
+  //   -1 if failed to set the delay,
+  //    0 if the minimum delay is set.
+  //
+  virtual WebRtc_Word32 SetMinimumPlayoutDelay(const WebRtc_Word32 timeMs) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 RegisterIncomingMessagesCallback()
+  // Used by the module to deliver messages to the codec module/application
+  // when a Dtmf tone is detected, as well as when it stopped.
+  //
+  // Inputs:
+  //   -inMsgCallback      : pointer to callback function which will be called
+  //                         if Dtmf is detected.
+  //   -cpt                : enables CPT (Call Progress Tone) detection for the
+  //                         specified country. c.f. definition of ACMCountries
+  //                         in audio_coding_module_typedefs.h for valid
+  //                         entries. The default value disables CPT
+  //                         detection.
+  //
+  // Return value:
+  //   -1 if the message callback could not be registered
+  //    0 if registration is successful.
+  //
+  virtual WebRtc_Word32
+      RegisterIncomingMessagesCallback(
+          AudioCodingFeedback* inMsgCallback,
+          const ACMCountries cpt = ACMDisableCountryDetection) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetDtmfPlayoutStatus()
+  // Configure Dtmf playout, i.e. whether out-of-band
+  // Dtmf tones are played or not.
+  //
+  // Input:
+  //   -enable             : if true to enable playout out-of-band Dtmf tones,
+  //                         false to disable.
+  //
+  // Return value:
+  //   -1 if the method fails, e.g. Dtmf playout is not supported.
+  //    0 if the status is set successfully.
+  //
+  virtual WebRtc_Word32 SetDtmfPlayoutStatus(const bool enable) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // bool DtmfPlayoutStatus()
+  // Get Dtmf playout status.
+  //
+  // Return value:
+  //   true if out-of-band Dtmf tones are played,
+  //   false if playout of Dtmf tones is disabled.
+  //
+  virtual bool DtmfPlayoutStatus() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetBackgroundNoiseMode()
+  // Sets the mode of the background noise playout in an event of long
+  // packetloss burst. For the valid modes see the declaration of
+  // ACMBackgroundNoiseMode in audio_coding_module_typedefs.h.
+  //
+  // Input:
+  //   -mode               : the mode for the background noise playout.
+  //
+  // Return value:
+  //   -1 if failed to set the mode.
+  //    0 if succeeded in setting the mode.
+  //
+  virtual WebRtc_Word32 SetBackgroundNoiseMode(
+      const ACMBackgroundNoiseMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 BackgroundNoiseMode()
+  // Call this method to get the mode of the background noise playout.
+  // Playout of background noise is a result of a long packetloss burst.
+  // See ACMBackgroundNoiseMode in audio_coding_module_typedefs.h for
+  // possible modes.
+  //
+  // Output:
+  //   -mode             : a reference to ACMBackgroundNoiseMode enumerator.
+  //
+  // Return value:
+  //    0 if the output is a valid mode.
+  //   -1 if ACM failed to output a valid mode.
+  //
+  virtual WebRtc_Word32 BackgroundNoiseMode(ACMBackgroundNoiseMode& mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutTimestamp()
+  // The send timestamp of an RTP packet is associated with the decoded
+  // audio of the packet in question. This function returns the timestamp of
+  // the latest audio obtained by calling PlayoutData10ms().
+  //
+  // Input:
+  //   -timestamp          : a reference to a WebRtc_UWord32 to receive the
+  //                         timestamp.
+  // Return value:
+  //    0 if the output is a correct timestamp.
+  //   -1 if failed to output the correct timestamp.
+  //
+  //
+  virtual WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32& timestamp) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 DecoderEstimatedBandwidth()
+  // Get the estimate of the Bandwidth, in bits/second, based on the incoming
+  // stream. This API is useful in one-way communication scenarios, where
+  // the bandwidth information is sent in an out-of-band fashion.
+  // Currently only supported if iSAC is registered as a reciever.
+  //
+  // Return value:
+  //   >0 bandwidth in bits/second.
+  //   -1 if failed to get a bandwidth estimate.
+  //
+  virtual WebRtc_Word32 DecoderEstimatedBandwidth() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetPlayoutMode()
+  // Call this API to set the playout mode. Playout mode could be optimized
+  // for i) voice, ii) FAX or iii) streaming. In Voice mode, NetEQ is
+  // optimized to deliver highest audio quality while maintaining a minimum
+  // delay. In FAX mode, NetEQ is optimized to have few delay changes as
+  // possible and maintain a constant delay, perhaps large relative to voice
+  // mode, to avoid PLC. In streaming mode, we tolerate a little more delay
+  // to acheive better jitter robustness.
+  //
+  // Input:
+  //   -mode               : playout mode. Possible inputs are:
+  //                         "voice",
+  //                         "fax" and
+  //                         "streaming".
+  //
+  // Return value:
+  //   -1 if failed to set the mode,
+  //    0 if succeeding.
+  //
+  virtual WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // AudioPlayoutMode PlayoutMode()
+  // Get playout mode, i.e. whether it is speech, FAX or streaming. See
+  // audio_coding_module_typedefs.h for definition of AudioPlayoutMode.
+  //
+  // Return value:
+  //   voice:       is for voice output,
+  //   fax:         a mode that is optimized for receiving FAX signals.
+  //                In this mode NetEq tries to maintain a constant high
+  //                delay to avoid PLC if possible.
+  //   streaming:   a mode that is suitable for streaminq. In this mode we
+  //                accept longer delay to improve jitter robustness.
+  //
+  virtual AudioPlayoutMode PlayoutMode() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 PlayoutData10Ms(
+  // Get 10 milliseconds of raw audio data for playout, at the given sampling
+  // frequency. ACM will perform a resampling if required.
+  //
+  // Input:
+  //   -desiredFreqHz      : the desired sampling frequency, in Hertz, of the
+  //                         output audio. If set to -1, the function returns the
+  //                         audio at the current sampling frequency.
+  //
+  // Output:
+  //   -audioFrame         : output audio frame which contains raw audio data
+  //                         and other relevant parameters, c.f.
+  //                         module_common_types.h for the definition of
+  //                         AudioFrame.
+  //
+  // Return value:
+  //   -1 if the function fails,
+  //    0 if the function succeeds.
+  //
+  virtual WebRtc_Word32
+      PlayoutData10Ms(const WebRtc_Word32 desiredFreqHz,
+                      AudioFrame &audioFrame) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   (CNG) Comfort Noise Generation
+  //   Generate comfort noise when receiving DTX packets
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word16 SetReceiveVADMode()
+  // Configure VAD aggressiveness on the incoming stream.
+  //
+  // Input:
+  //   -mode               : aggressiveness of the VAD on incoming stream.
+  //                         See audio_coding_module_typedefs.h for the
+  //                         definition of ACMVADMode, and possible
+  //                         values for aggressiveness.
+  //
+  // Return value:
+  //   -1 if fails to set the mode,
+  //    0 if the mode is set successfully.
+  //
+  virtual WebRtc_Word16 SetReceiveVADMode(const ACMVADMode mode) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // ACMVADMode ReceiveVADMode()
+  // Get VAD aggressiveness on the incoming stream.
+  //
+  // Return value:
+  //   aggressiveness of VAD, running on the incoming stream. A more
+  //   aggressive mode means more audio frames will be labeled as in-active.
+  //   See audio_coding_module_typedefs.h for the definition of
+  //   ACMVADMode.
+  //
+  virtual ACMVADMode ReceiveVADMode() const = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   Codec specific
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetISACMaxRate()
+  // Set the maximum instantaneous rate of iSAC. For a payload of B bits
+  // with a frame-size of T sec the instantaneous rate is B/T bist per
+  // second. Therefore, (B/T < maxRateBitPerSec) and
+  // (B < maxPayloadLenBytes * 8) are always satisfied for iSAC payloads,
+  // c.f SetISACMaxPayloadSize().
+  //
+  // Input:
+  //   -maxRateBitPerSec   : maximum instantaneous bit-rate given in bits/sec.
+  //
+  // Return value:
+  //   -1 if failed to set the maximum rate.
+  //    0 if the maximum rate is set successfully.
+  //
+  virtual WebRtc_Word32 SetISACMaxRate(
+      const WebRtc_UWord32 maxRateBitPerSec) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 SetISACMaxPayloadSize()
+  // Set the maximum payload size of iSAC packets. No iSAC payload,
+  // regardless of its frame-size, may exceed the given limit. For
+  // an iSAC payload of size B bits and frame-size T sec we have;
+  // (B < maxPayloadLenBytes * 8) and (B/T < maxRateBitPerSec), c.f.
+  // SetISACMaxRate().
+  //
+  // Input:
+  //   -maxPayloadLenBytes : maximum payload size in bytes.
+  //
+  // Return value:
+  //   -1 if failed to set the maximm  payload-size.
+  //    0 if the given linit is seet successfully.
+  //
+  virtual WebRtc_Word32 SetISACMaxPayloadSize(
+      const WebRtc_UWord16 maxPayloadLenBytes) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32 ConfigISACBandwidthEstimator()
+  // Call this function to configure the bandwidth estimator of ISAC.
+  // During the adaptation of bit-rate, iSAC atomatically adjusts the
+  // frame-size (either 30 or 60 ms) to save on RTP header. The initial
+  // frame-size can be specified by the first argument. The configuration also
+  // regards the initial estimate of bandwidths. The estimator starts from
+  // this point and converges to the actual bottleneck. This is given by the
+  // second parameter. Furthermore, it is also possible to control the
+  // adaptation of frame-size. This is specified by the last parameter.
+  //
+  // Input:
+  //   -initFrameSizeMsec  : initial frame-size in milisecods. For iSAC-wb
+  //                         30 ms and 60 ms (default) are acceptable values,
+  //                         and for iSAC-swb 30 ms is the only acceptable
+  //                         value. Zero indiates default value.
+  //   -initRateBitPerSec  : initial estimate of the bandwidth. Values
+  //                         between 10000 and 58000 are acceptable.
+  //   -enforceFrameSize   : if true, the frame-size will not be adapted.
+  //
+  // Return value:
+  //   -1 if failed to configure the bandwidth estimator,
+  //    0 if the configuration was successfully applied.
+  //
+  virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
+      const WebRtc_UWord8 initFrameSizeMsec,
+      const WebRtc_UWord16 initRateBitPerSec,
+      const bool enforceFrameSize = false) = 0;
+
+  ///////////////////////////////////////////////////////////////////////////
+  //   statistics
+  //
+
+  ///////////////////////////////////////////////////////////////////////////
+  // WebRtc_Word32  NetworkStatistics()
+  // Get network statistics.
+  //
+  // Input:
+  //   -networkStatistics  : a structure that contains network statistics.
+  //
+  // Return value:
+  //   -1 if failed to set the network statistics,
+  //    0 if statistics are set successfully.
+  //
+  virtual WebRtc_Word32 NetworkStatistics(
+      ACMNetworkStatistics& networkStatistics) const = 0;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_H
diff --git a/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h b/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
new file mode 100644
index 0000000..c0e06ef
--- /dev/null
+++ b/src/modules/audio_coding/main/interface/audio_coding_module_typedefs.h
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+///////////////////////////////////////////////////////////////////////////
+// enum AudioPlayoutMode
+// An enumerator for different playout modes.
+//
+// -voice       : This is the standard mode for VoIP calls. The trade-off
+//                between low delay and jitter robustness is optimized
+//                for high-quality two-way communication.
+//                NetEQs packet loss concealment and signal processing
+//                capabilities are fully employed.
+// -fax         : The fax mode is optimized for decodability of fax signals
+//                rather than for perceived audio quality. When this mode
+//                is selected, NetEQ will do as few delay changes as possible,
+//                trying to maintain a high and constant delay. Meanwhile,
+//                the packet loss concealment efforts are reduced.
+//
+// -streaming   : In the case of one-way communication such as passive
+//                conference participant, a webinar, or a streaming application,
+//                this mode can be used to improve the jitter robustness at
+//                the cost of increased delay.
+//
+enum AudioPlayoutMode {
+  voice = 0,
+  fax = 1,
+  streaming = 2
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMSpeechType
+// An enumerator for possible labels of a decoded frame.
+//
+// -normal      : a normal speech frame. If VAD is enabled on the
+//                incoming stream this label indicate that the
+//                frame is active.
+// -PLC         : a PLC frame. The corresponding packet was lost
+//                and this frame generated by PLC techniques.
+// -CNG         : the frame is comfort noise. This happens if VAD
+//                is enabled at the sender and we have received
+//                SID.
+// -PLCCNG      : PLC will fade to comfort noise if the duration
+//                of PLC is long. This labels such a case.
+// -VADPassive  : the VAD at the receiver recognizes this frame as
+//                passive.
+//
+enum ACMSpeechType {
+  normal = 0,
+  PLC = 1,
+  CNG = 2,
+  PLCCNG = 3,
+  VADPassive = 4
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMVADMode
+// An enumerator for aggressiveness of VAD
+// -VADNormal                : least aggressive mode.
+// -VADLowBitrate            : more aggressive than "VADNormal" to save on
+//                             bit-rate.
+// -VADAggr                  : an aggressive mode.
+// -VADVeryAggr              : the most agressive mode.
+//
+enum ACMVADMode {
+  VADNormal = 0,
+  VADLowBitrate = 1,
+  VADAggr = 2,
+  VADVeryAggr = 3
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMCountries
+// An enumerator for countries, used when enabling CPT for a specific country.
+//
+enum ACMCountries {
+  ACMDisableCountryDetection = -1, // disable CPT detection
+  ACMUSA = 0,
+  ACMJapan,
+  ACMCanada,
+  ACMFrance,
+  ACMGermany,
+  ACMAustria,
+  ACMBelgium,
+  ACMUK,
+  ACMCzech,
+  ACMDenmark,
+  ACMFinland,
+  ACMGreece,
+  ACMHungary,
+  ACMIceland,
+  ACMIreland,
+  ACMItaly,
+  ACMLuxembourg,
+  ACMMexico,
+  ACMNorway,
+  ACMPoland,
+  ACMPortugal,
+  ACMSpain,
+  ACMSweden,
+  ACMTurkey,
+  ACMChina,
+  ACMHongkong,
+  ACMTaiwan,
+  ACMKorea,
+  ACMSingapore,
+  ACMNonStandard1
+// non-standard countries
+};
+
+///////////////////////////////////////////////////////////////////////////
+// enum ACMAMRPackingFormat
+// An enumerator for different bit-packing format of AMR codec according to
+// RFC 3267.
+//
+// -AMRUndefined           : undefined.
+// -AMRBandwidthEfficient  : bandwidth-efficient mode.
+// -AMROctetAlligned       : Octet-alligned mode.
+// -AMRFileStorage         : file-storage mode.
+//
+enum ACMAMRPackingFormat {
+  AMRUndefined = -1,
+  AMRBandwidthEfficient = 0,
+  AMROctetAlligned = 1,
+  AMRFileStorage = 2
+};
+
+
+///////////////////////////////////////////////////////////////////////////
+//
+//   Struct containing network statistics
+//
+// -currentBufferSize      : current jitter buffer size in ms
+// -preferredBufferSize    : preferred (optimal) buffer size in ms
+// -jitterPeaksFound       : indicate if peaky-jitter mode is engaged, that is,
+//                           if severe but sparse network delays have occurred.
+// -currentPacketLossRate  : loss rate (network + late) (in Q14)
+// -currentDiscardRate     : late loss rate (in Q14)
+// -currentExpandRate      : fraction (of original stream) of synthesized
+//                           speech inserted through expansion (in Q14)
+// -currentPreemptiveRate  : fraction of synthesized speech inserted through
+//                           pre-emptive expansion (in Q14)
+// -currentAccelerateRate  : fraction of data removed through acceleration
+//                           (in Q14)
+// -clockDriftPPM          : clock-drift between sender and receiver in parts-
+//                           per-million. Positive means that receiver sample
+//                           rate is higher than sender sample rate.
+// -meanWaitingTimeMs      : average packet waiting time in the buffer
+// -medianWaitingTimeMs    : median packet waiting time in the buffer
+// -minWaitingTimeMs       : min packet waiting time in the buffer
+// -maxWaitingTimeMs       : max packet waiting time in the buffer
+typedef struct {
+  WebRtc_UWord16 currentBufferSize;
+  WebRtc_UWord16 preferredBufferSize;
+  bool jitterPeaksFound;
+  WebRtc_UWord16 currentPacketLossRate;
+  WebRtc_UWord16 currentDiscardRate;
+  WebRtc_UWord16 currentExpandRate;
+  WebRtc_UWord16 currentPreemptiveRate;
+  WebRtc_UWord16 currentAccelerateRate;
+  int32_t clockDriftPPM;
+  int meanWaitingTimeMs;
+  int medianWaitingTimeMs;
+  int minWaitingTimeMs;
+  int maxWaitingTimeMs;
+} ACMNetworkStatistics;
+
+///////////////////////////////////////////////////////////////////////////
+//
+// Enumeration of background noise mode a mapping from NetEQ interface.
+//
+// -On                  : default "normal" behavior with eternal noise
+// -Fade                : noise fades to zero after some time
+// -Off                 : background noise is always zero
+//
+enum ACMBackgroundNoiseMode {
+  On,
+  Fade,
+  Off
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_INTERFACE_AUDIO_CODING_MODULE_TYPEDEFS_H_
diff --git a/src/modules/audio_coding/main/source/acm_amr.cc b/src/modules/audio_coding/main/source/acm_amr.cc
new file mode 100644
index 0000000..067e7f0
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_amr.cc
@@ -0,0 +1,424 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_amr.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_AMR
+// NOTE! GSM AMR is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/amr/main/interface/amr_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcAmr_CreateEnc(AMR_encinst_t_** encInst);
+// int16_t WebRtcAmr_CreateDec(AMR_decinst_t_** decInst);
+// int16_t WebRtcAmr_FreeEnc(AMR_encinst_t_* encInst);
+// int16_t WebRtcAmr_FreeDec(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_Encode(AMR_encinst_t_* encInst,
+//                          int16_t* input,
+//                          int16_t len,
+//                          int16_t*output,
+//                          int16_t mode);
+//  int16_t WebRtcAmr_EncoderInit(AMR_encinst_t_* encInst,
+//                               int16_t dtxMode);
+// int16_t WebRtcAmr_EncodeBitmode(AMR_encinst_t_* encInst,
+//                                 int format);
+// int16_t WebRtcAmr_Decode(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecodePlc(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecoderInit(AMR_decinst_t_* decInst);
+// int16_t WebRtcAmr_DecodeBitmode(AMR_decinst_t_* decInst,
+//                                 int format);
+#include "amr_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AMR
+ACMAMR::ACMAMR(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1),  // Invalid value.
+      _encodingRate(0),  // Invalid value.
+      _encoderPackingFormat(AMRBandwidthEfficient),
+      _decoderPackingFormat(AMRBandwidthEfficient) {
+  return;
+}
+
+ACMAMR::~ACMAMR() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::EnableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::DisableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                               const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMAMR::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMAMR::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMAMR::DestructDecoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+  return -1;
+}
+
+void ACMAMR::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+#else     //===================== Actual Implementation =======================
+
+#define WEBRTC_AMR_MR475  0
+#define WEBRTC_AMR_MR515  1
+#define WEBRTC_AMR_MR59   2
+#define WEBRTC_AMR_MR67   3
+#define WEBRTC_AMR_MR74   4
+#define WEBRTC_AMR_MR795  5
+#define WEBRTC_AMR_MR102  6
+#define WEBRTC_AMR_MR122  7
+
+ACMAMR::ACMAMR(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1), // invalid value
+      _encodingRate(0) { // invalid value
+  _codecID = codecID;
+  _hasInternalDTX = true;
+  _encoderPackingFormat = AMRBandwidthEfficient;
+  _decoderPackingFormat = AMRBandwidthEfficient;
+  return;
+}
+
+ACMAMR::~ACMAMR() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmr_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmr_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMR::InternalEncode(WebRtc_UWord8* bitStream,
+                                     WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 vadDecision = 1;
+  // sanity check, if the rate is set correctly. we might skip this
+  // sanity check. if rate is not set correctly, initialization flag
+  // should be false and should not be here.
+  if ((_encodingMode < WEBRTC_AMR_MR475) ||
+      (_encodingMode > WEBRTC_AMR_MR122)) {
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+  *bitStreamLenByte = WebRtcAmr_Encode(_encoderInstPtr,
+                                       &_inAudio[_inAudioIxRead],
+                                       _frameLenSmpl,
+                                       (WebRtc_Word16*) bitStream,
+                                       _encodingMode);
+
+  // Update VAD, if internal DTX is used
+  if (_hasInternalDTX && _dtxEnabled) {
+    if (*bitStreamLenByte <= (7 * _frameLenSmpl / 160)) {
+      vadDecision = 0;
+    }
+    for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+      _vadLabel[n] = vadDecision;
+    }
+  }
+  // increment the read index
+  _inAudioIxRead += _frameLenSmpl;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMAMR::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMAMR::EnableDTX() {
+  if (_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // enable DTX
+    if (WebRtcAmr_EncoderInit(_encoderInstPtr, 1) < 0) {
+      return -1;
+    }
+    _dtxEnabled = true;
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+WebRtc_Word16 ACMAMR::DisableDTX() {
+  if (!_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // disable DTX
+    if (WebRtcAmr_EncoderInit(_encoderInstPtr, 0) < 0) {
+      return -1;
+    }
+    _dtxEnabled = false;
+    return 0;
+  } else {
+    // encoder doesn't exists, therefore disabling is harmless
+    return 0;
+  }
+}
+
+WebRtc_Word16 ACMAMR::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+  status += (WebRtcAmr_EncoderInit(
+      _encoderInstPtr, ((codecParams->enableDTX) ? 1 : 0)) < 0) ? -1 : 0;
+  status += (WebRtcAmr_EncodeBitmode(
+      _encoderInstPtr, _encoderPackingFormat) < 0) ? -1 : 0;
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word16 ACMAMR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  WebRtc_Word16 status =
+      ((WebRtcAmr_DecoderInit(_decoderInstPtr) < 0) ? -1 : 0);
+  status += WebRtcAmr_DecodeBitmode(_decoderInstPtr, _decoderPackingFormat);
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word32 ACMAMR::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                               const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // Todo:
+    // log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AMR_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAMR, codecInst.pltype, _decoderInstPtr,
+                8000);
+  SET_AMR_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMAMR::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateEncoder() {
+  return WebRtcAmr_CreateEnc(&_encoderInstPtr);
+}
+
+void ACMAMR::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmr_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  // there is no encoder set the following
+  _encoderExist = false;
+  _encoderInitialized = false;
+  _encodingMode = -1; // invalid value
+  _encodingRate = 0; // invalid value
+}
+
+WebRtc_Word16 ACMAMR::InternalCreateDecoder() {
+  return WebRtcAmr_CreateDec(&_decoderInstPtr);
+}
+
+void ACMAMR::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmr_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  // there is no encoder instance set the followings
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+WebRtc_Word16 ACMAMR::SetBitRateSafe(const WebRtc_Word32 rate) {
+  switch (rate) {
+    case 4750: {
+      _encodingMode = WEBRTC_AMR_MR475;
+      _encodingRate = 4750;
+      break;
+    }
+    case 5150: {
+      _encodingMode = WEBRTC_AMR_MR515;
+      _encodingRate = 5150;
+      break;
+    }
+    case 5900: {
+      _encodingMode = WEBRTC_AMR_MR59;
+      _encodingRate = 5900;
+      break;
+    }
+    case 6700: {
+      _encodingMode = WEBRTC_AMR_MR67;
+      _encodingRate = 6700;
+      break;
+    }
+    case 7400: {
+      _encodingMode = WEBRTC_AMR_MR74;
+      _encodingRate = 7400;
+      break;
+    }
+    case 7950: {
+      _encodingMode = WEBRTC_AMR_MR795;
+      _encodingRate = 7950;
+      break;
+    }
+    case 10200: {
+      _encodingMode = WEBRTC_AMR_MR102;
+      _encodingRate = 10200;
+      break;
+    }
+    case 12200: {
+      _encodingMode = WEBRTC_AMR_MR122;
+      _encodingRate = 12200;
+      break;
+    }
+    default: {
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMAMR::InternalDestructEncoderInst(void* ptrInst) {
+  // Free the memory where ptrInst is pointing to
+  if (ptrInst != NULL) {
+    WebRtcAmr_FreeEnc(reinterpret_cast<AMR_encinst_t_*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMR::SetAMREncoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMR Encoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmr_EncodeBitmode(_encoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _encoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMR::AMREncoderPackingFormat() const {
+  return _encoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMR::SetAMRDecoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMR decoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmr_DecodeBitmode(_decoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _decoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMR::AMRDecoderPackingFormat() const {
+  return _decoderPackingFormat;
+}
+
+#endif
+
+}
diff --git a/src/modules/audio_coding/main/source/acm_amr.h b/src/modules/audio_coding/main/source/acm_amr.h
new file mode 100644
index 0000000..d3c175c
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_amr.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct AMR_encinst_t_;
+struct AMR_decinst_t_;
+
+namespace webrtc {
+
+enum ACMAMRPackingFormat;
+
+class ACMAMR: public ACMGenericCodec {
+ public:
+  ACMAMR(WebRtc_Word16 codecID);
+  ~ACMAMR();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 SetAMREncoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMREncoderPackingFormat() const;
+
+  WebRtc_Word16 SetAMRDecoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRDecoderPackingFormat() const;
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+
+  WebRtc_Word16 EnableDTX();
+
+  WebRtc_Word16 DisableDTX();
+
+  AMR_encinst_t_* _encoderInstPtr;
+  AMR_decinst_t_* _decoderInstPtr;
+  WebRtc_Word16 _encodingMode;
+  WebRtc_Word16 _encodingRate;
+  ACMAMRPackingFormat _encoderPackingFormat;
+  ACMAMRPackingFormat _decoderPackingFormat;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMR_H_
diff --git a/src/modules/audio_coding/main/source/acm_amrwb.cc b/src/modules/audio_coding/main/source/acm_amrwb.cc
new file mode 100644
index 0000000..4187a1f
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_amrwb.cc
@@ -0,0 +1,431 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_amrwb.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_AMRWB
+// NOTE! GSM AMR-wb is not included in the open-source package. The
+// following interface file is needed:
+//
+// /modules/audio_coding/codecs/amrwb/main/interface/amrwb_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcAmrWb_CreateEnc(AMRWB_encinst_t_** encInst);
+// int16_t WebRtcAmrWb_CreateDec(AMRWB_decinst_t_** decInst);
+// int16_t WebRtcAmrWb_FreeEnc(AMRWB_encinst_t_* encInst);
+// int16_t WebRtcAmrWb_FreeDec(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_Encode(AMRWB_encinst_t_* encInst, int16_t* input,
+//                            int16_t len, int16_t* output, int16_t mode);
+// int16_t WebRtcAmrWb_EncoderInit(AMRWB_encinst_t_* encInst,
+//                                 int16_t dtxMode);
+// int16_t WebRtcAmrWb_EncodeBitmode(AMRWB_encinst_t_* encInst,
+//                                    int format);
+// int16_t WebRtcAmrWb_Decode(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecodePlc(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecoderInit(AMRWB_decinst_t_* decInst);
+// int16_t WebRtcAmrWb_DecodeBitmode(AMRWB_decinst_t_* decInst,
+//                                   int format);
+#include "amrwb_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AMRWB
+ACMAMRwb::ACMAMRwb(WebRtc_Word16 /* codecID*/)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1),  // invalid value
+      _encodingRate(0),  // invalid value
+      _encoderPackingFormat(AMRBandwidthEfficient),
+      _decoderPackingFormat(AMRBandwidthEfficient) {
+  return;
+}
+
+ACMAMRwb::~ACMAMRwb() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                       WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                   WebRtc_Word16 /* bitStreamLenByte */,
+                                   WebRtc_Word16* /* audio */,
+                                   WebRtc_Word16* /* audioSamples */,
+                                   WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::EnableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::DisableDTX() {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                 const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec*
+ACMAMRwb::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMAMRwb::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMAMRwb::DestructDecoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 /* rate */) {
+  return -1;
+}
+
+void ACMAMRwb::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+    ACMAMRPackingFormat /* packingFormat */) {
+  return -1;
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
+  return AMRUndefined;
+}
+
+#else     //===================== Actual Implementation =======================
+
+#define AMRWB_MODE_7k       0
+#define AMRWB_MODE_9k       1
+#define AMRWB_MODE_12k      2
+#define AMRWB_MODE_14k      3
+#define AMRWB_MODE_16k      4
+#define AMRWB_MODE_18k      5
+#define AMRWB_MODE_20k      6
+#define AMRWB_MODE_23k      7
+#define AMRWB_MODE_24k      8
+
+ACMAMRwb::ACMAMRwb(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _encodingMode(-1), // invalid value
+      _encodingRate(0) { // invalid value
+  _codecID = codecID;
+  _hasInternalDTX = true;
+  _encoderPackingFormat = AMRBandwidthEfficient;
+  _decoderPackingFormat = AMRBandwidthEfficient;
+  return;
+}
+
+ACMAMRwb::~ACMAMRwb() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalEncode(WebRtc_UWord8* bitStream,
+                                       WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 vadDecision = 1;
+  // sanity check, if the rate is set correctly. we might skip this
+  // sanity check. if rate is not set correctly, initialization flag
+  // should be false and should not be here.
+  if ((_encodingMode < AMRWB_MODE_7k) || (_encodingMode > AMRWB_MODE_24k)) {
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+  *bitStreamLenByte = WebRtcAmrWb_Encode(_encoderInstPtr,
+                                         &_inAudio[_inAudioIxRead],
+                                         _frameLenSmpl,
+                                         (WebRtc_Word16*) bitStream,
+                                         _encodingMode);
+
+  // Update VAD, if internal DTX is used
+  if (_hasInternalDTX && _dtxEnabled) {
+    if (*bitStreamLenByte <= (7 * _frameLenSmpl / 160)) {
+      vadDecision = 0;
+    }
+    for (WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++) {
+      _vadLabel[n] = vadDecision;
+    }
+  }
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += _frameLenSmpl;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMAMRwb::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                   WebRtc_Word16 /* bitStreamLenByte */,
+                                   WebRtc_Word16* /* audio */,
+                                   WebRtc_Word16* /* audioSamples */,
+                                   WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMAMRwb::EnableDTX() {
+  if (_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // enable DTX
+    if (WebRtcAmrWb_EncoderInit(_encoderInstPtr, 1) < 0) {
+      return -1;
+    }
+    _dtxEnabled = true;
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+WebRtc_Word16 ACMAMRwb::DisableDTX() {
+  if (!_dtxEnabled) {
+    return 0;
+  } else if (_encoderExist) { // check if encoder exist
+    // disable DTX
+    if (WebRtcAmrWb_EncoderInit(_encoderInstPtr, 0) < 0) {
+      return -1;
+    }
+    _dtxEnabled = false;
+    return 0;
+  } else {
+    // encoder doesn't exists, therefore disabling is harmless
+    return 0;
+  }
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  // sanity check
+  if (_encoderInstPtr == NULL) {
+    return -1;
+  }
+
+  WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+  status += (WebRtcAmrWb_EncoderInit(
+      _encoderInstPtr, ((codecParams->enableDTX) ? 1 : 0)) < 0) ? -1 : 0;
+  status += (WebRtcAmrWb_EncodeBitmode(
+      _encoderInstPtr, _encoderPackingFormat) < 0) ? -1 : 0;
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  WebRtc_Word16 status = WebRtcAmrWb_DecodeBitmode(_decoderInstPtr,
+                                                   _decoderPackingFormat);
+  status += ((WebRtcAmrWb_DecoderInit(_decoderInstPtr) < 0) ? -1 : 0);
+  return (status < 0) ? -1 : 0;
+}
+
+WebRtc_Word32 ACMAMRwb::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                 const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    return -1;
+  }
+
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AMRWB_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAMRWB, codecInst.pltype, _decoderInstPtr,
+                16000);
+  SET_AMRWB_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMAMRwb::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateEncoder() {
+  return WebRtcAmrWb_CreateEnc(&_encoderInstPtr);
+}
+
+void ACMAMRwb::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  // there is no encoder set the following
+  _encoderExist = false;
+  _encoderInitialized = false;
+  _encodingMode = -1; // invalid value
+  _encodingRate = 0;
+}
+
+WebRtc_Word16 ACMAMRwb::InternalCreateDecoder() {
+  return WebRtcAmrWb_CreateDec(&_decoderInstPtr);
+}
+
+void ACMAMRwb::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcAmrWb_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  // there is no encoder instance set the followings
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+WebRtc_Word16 ACMAMRwb::SetBitRateSafe(const WebRtc_Word32 rate) {
+  switch (rate) {
+    case 7000: {
+      _encodingMode = AMRWB_MODE_7k;
+      _encodingRate = 7000;
+      break;
+    }
+    case 9000: {
+      _encodingMode = AMRWB_MODE_9k;
+      _encodingRate = 9000;
+      break;
+    }
+    case 12000: {
+      _encodingMode = AMRWB_MODE_12k;
+      _encodingRate = 12000;
+      break;
+    }
+    case 14000: {
+      _encodingMode = AMRWB_MODE_14k;
+      _encodingRate = 14000;
+      break;
+    }
+    case 16000: {
+      _encodingMode = AMRWB_MODE_16k;
+      _encodingRate = 16000;
+      break;
+    }
+    case 18000: {
+      _encodingMode = AMRWB_MODE_18k;
+      _encodingRate = 18000;
+      break;
+    }
+    case 20000: {
+      _encodingMode = AMRWB_MODE_20k;
+      _encodingRate = 20000;
+      break;
+    }
+    case 23000: {
+      _encodingMode = AMRWB_MODE_23k;
+      _encodingRate = 23000;
+      break;
+    }
+    case 24000: {
+      _encodingMode = AMRWB_MODE_24k;
+      _encodingRate = 24000;
+      break;
+    }
+    default: {
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMAMRwb::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcAmrWb_FreeEnc(static_cast<AMRWB_encinst_t_*>(ptrInst));
+  }
+  return;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbEncoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMRwb encoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmrWb_EncodeBitmode(_encoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _encoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbEncoderPackingFormat() const {
+  return _encoderPackingFormat;
+}
+
+WebRtc_Word16 ACMAMRwb::SetAMRwbDecoderPackingFormat(
+    ACMAMRPackingFormat packingFormat) {
+  if ((packingFormat != AMRBandwidthEfficient) &&
+      (packingFormat != AMROctetAlligned) &&
+      (packingFormat != AMRFileStorage)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Invalid AMRwb decoder packing-format.");
+    return -1;
+  } else {
+    if (WebRtcAmrWb_DecodeBitmode(_decoderInstPtr, packingFormat) < 0) {
+      return -1;
+    } else {
+      _decoderPackingFormat = packingFormat;
+      return 0;
+    }
+  }
+}
+
+ACMAMRPackingFormat ACMAMRwb::AMRwbDecoderPackingFormat() const {
+  return _decoderPackingFormat;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_amrwb.h b/src/modules/audio_coding/main/source/acm_amrwb.h
new file mode 100644
index 0000000..126302c
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_amrwb.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct AMRWB_encinst_t_;
+struct AMRWB_decinst_t_;
+
+namespace webrtc {
+
+enum ACMAMRPackingFormat;
+
+class ACMAMRwb: public ACMGenericCodec {
+ public:
+  ACMAMRwb(WebRtc_Word16 codecID);
+  ~ACMAMRwb();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams* codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams* codecParams);
+
+  WebRtc_Word16 SetAMRwbEncoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRwbEncoderPackingFormat() const;
+
+  WebRtc_Word16 SetAMRwbDecoderPackingFormat(
+      const ACMAMRPackingFormat packingFormat);
+
+  ACMAMRPackingFormat AMRwbDecoderPackingFormat() const;
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 SetBitRateSafe(const WebRtc_Word32 rate);
+
+  WebRtc_Word16 EnableDTX();
+
+  WebRtc_Word16 DisableDTX();
+
+  AMRWB_encinst_t_* _encoderInstPtr;
+  AMRWB_decinst_t_* _decoderInstPtr;
+
+  WebRtc_Word16 _encodingMode;
+  WebRtc_Word16 _encodingRate;
+  ACMAMRPackingFormat _encoderPackingFormat;
+  ACMAMRPackingFormat _decoderPackingFormat;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_AMRWB_H_
diff --git a/src/modules/audio_coding/main/source/acm_celt.cc b/src/modules/audio_coding/main/source/acm_celt.cc
new file mode 100644
index 0000000..d9678fd
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_celt.cc
@@ -0,0 +1,334 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_celt.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+// TODO(tlegrand): Add full paths.
+
+#ifdef WEBRTC_CODEC_CELT
+// NOTE! Celt is not included in the open-source package. Modify this file or
+// your codec API to match the function call and name of used Celt API file.
+#include "celt_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_CELT
+
+ACMCELT::ACMCELT(int16_t /* codecID */)
+    : enc_inst_ptr_(NULL),
+      dec_inst_ptr_(NULL),
+      sampling_freq_(0),
+      bitrate_(0),
+      channels_(1),
+      dec_channels_(1) {
+  return;
+}
+
+ACMCELT::~ACMCELT() {
+  return;
+}
+
+int16_t ACMCELT::InternalEncode(uint8_t* /* bitStream */,
+                                int16_t* /* bitStreamLenByte */) {
+  return -1;
+}
+
+int16_t ACMCELT::DecodeSafe(uint8_t* /* bitStream */,
+                            int16_t /* bitStreamLenByte */,
+                            int16_t* /* audio */,
+                            int16_t* /* audioSamples */,
+                            WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef  */,
+                          const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMCELT::CreateInstance(void) {
+  return NULL;
+}
+
+int16_t ACMCELT::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMCELT::DestructEncoderSafe() {
+  return;
+}
+
+int16_t ACMCELT::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMCELT::DestructDecoderSafe() {
+  return;
+}
+
+void ACMCELT::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+bool ACMCELT::IsTrueStereoCodec() {
+  return true;
+}
+
+int16_t ACMCELT::SetBitRateSafe(const int32_t /*rate*/) {
+  return -1;
+}
+
+void ACMCELT::SplitStereoPacket(uint8_t* /*payload*/,
+                                int32_t* /*payload_length*/) {}
+
+#else  //===================== Actual Implementation =======================
+
+ACMCELT::ACMCELT(int16_t codecID)
+    : enc_inst_ptr_(NULL),
+      dec_inst_ptr_(NULL),
+      sampling_freq_(32000),  // Default sampling frequency.
+      bitrate_(64000),  // Default rate.
+      channels_(1),  // Default send mono.
+      dec_channels_(1) {  // Default receive mono.
+  // TODO(tlegrand): remove later when ACMGenericCodec has a new constructor.
+  _codecID = codecID;
+
+  return;
+}
+
+ACMCELT::~ACMCELT() {
+  if (enc_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+  }
+  if (dec_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+  }
+  return;
+}
+
+int16_t ACMCELT::InternalEncode(uint8_t* bitStream, int16_t* bitStreamLenByte) {
+  *bitStreamLenByte = 0;
+
+  // Call Encoder.
+  *bitStreamLenByte = WebRtcCelt_Encode(enc_inst_ptr_,
+                                        &_inAudio[_inAudioIxRead],
+                                        bitStream);
+
+  // Increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer.
+  _inAudioIxRead += _frameLenSmpl * channels_;
+
+  if (*bitStreamLenByte < 0) {
+    // Error reported from the encoder.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalEncode: Encode error for Celt");
+    *bitStreamLenByte = 0;
+    return -1;
+  }
+
+  return *bitStreamLenByte;
+}
+
+int16_t ACMCELT::DecodeSafe(uint8_t* /* bitStream */,
+                            int16_t /* bitStreamLenByte */,
+                            int16_t* /* audio */,
+                            int16_t* /* audioSamples */,
+                            WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+int16_t ACMCELT::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  // Set bitrate and check that it is within the valid range.
+  int16_t status = SetBitRateSafe((codecParams->codecInstant).rate);
+  if (status < 0) {
+    return -1;
+  }
+
+  // If number of channels changed we need to re-create memory.
+  if (codecParams->codecInstant.channels != channels_) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+    // Store new number of channels.
+    channels_ = codecParams->codecInstant.channels;
+    if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, channels_) < 0) {
+       return -1;
+    }
+  }
+
+  // Initiate encoder.
+  if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+int16_t ACMCELT::InternalInitDecoder(WebRtcACMCodecParams* codecParams) {
+  // If number of channels changed we need to re-create memory.
+  if (codecParams->codecInstant.channels != dec_channels_) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+    // Store new number of channels.
+    dec_channels_ = codecParams->codecInstant.channels;
+    if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
+       return -1;
+    }
+  }
+
+  // Initiate decoder, both master and slave parts.
+  if (WebRtcCelt_DecoderInit(dec_inst_ptr_) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalInitDecoder: init decoder failed for Celt.");
+    return -1;
+  }
+  if (WebRtcCelt_DecoderInitSlave(dec_inst_ptr_) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalInitDecoder: init decoder failed for Celt.");
+    return -1;
+  }
+  return 0;
+}
+
+int32_t ACMCELT::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                          const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "CodecDef: Decoder uninitialized for Celt");
+    return -1;
+  }
+
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" and "SET_CELT_FUNCTIONS" or "SET_CELTSLAVE_FUNCTIONS".
+  // Then call NetEQ to add the codec to it's
+  // database.
+  if (codecInst.channels == 1) {
+    SET_CODEC_PAR(codecDef, kDecoderCELT_32, codecInst.pltype, dec_inst_ptr_,
+                  32000);
+  } else {
+    SET_CODEC_PAR(codecDef, kDecoderCELT_32_2ch, codecInst.pltype,
+                  dec_inst_ptr_, 32000);
+  }
+
+  // If this is the master of NetEQ, regular decoder will be added, otherwise
+  // the slave decoder will be used.
+  if (_isMaster) {
+    SET_CELT_FUNCTIONS(codecDef);
+  } else {
+    SET_CELTSLAVE_FUNCTIONS(codecDef);
+  }
+  return 0;
+}
+
+ACMGenericCodec* ACMCELT::CreateInstance(void) {
+  return NULL;
+}
+
+int16_t ACMCELT::InternalCreateEncoder() {
+  if (WebRtcCelt_CreateEnc(&enc_inst_ptr_, _noChannels) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateEncoder: create encoder failed for Celt");
+    return -1;
+  }
+  channels_ = _noChannels;
+  return 0;
+}
+
+void ACMCELT::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (enc_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeEnc(enc_inst_ptr_);
+    enc_inst_ptr_ = NULL;
+  }
+}
+
+int16_t ACMCELT::InternalCreateDecoder() {
+  if (WebRtcCelt_CreateDec(&dec_inst_ptr_, dec_channels_) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateDecoder: create decoder failed for Celt");
+    return -1;
+  }
+
+  return 0;
+}
+
+void ACMCELT::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (dec_inst_ptr_ != NULL) {
+    WebRtcCelt_FreeDec(dec_inst_ptr_);
+    dec_inst_ptr_ = NULL;
+  }
+}
+
+void ACMCELT::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcCelt_FreeEnc(static_cast<CELT_encinst_t*>(ptrInst));
+  }
+  return;
+}
+
+bool ACMCELT::IsTrueStereoCodec() {
+  return true;
+}
+
+int16_t ACMCELT::SetBitRateSafe(const int32_t rate) {
+  // Check that rate is in the valid range.
+  if ((rate >= 48000) && (rate <= 128000)) {
+    // Store new rate.
+    bitrate_ = rate;
+
+    // Initiate encoder with new rate.
+    if (WebRtcCelt_EncoderInit(enc_inst_ptr_, channels_, bitrate_) >= 0) {
+      return 0;
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "SetBitRateSafe: Failed to initiate Celt with rate %d",
+                   rate);
+      return -1;
+    }
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "SetBitRateSafe: Invalid rate Celt, %d", rate);
+    return -1;
+  }
+}
+
+// Copy the stereo packet so that NetEq will insert into both master and slave.
+void ACMCELT::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
+  // Check for valid inputs.
+  assert(payload != NULL);
+  assert(*payload_length > 0);
+
+  // Duplicate the payload.
+  memcpy(&payload[*payload_length], &payload[0],
+         sizeof(uint8_t) * (*payload_length));
+  // Double the size of the packet.
+  *payload_length *= 2;
+}
+
+#endif
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_celt.h b/src/modules/audio_coding/main/source/acm_celt.h
new file mode 100644
index 0000000..18e36dd
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_celt.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct CELT_encinst_t_;
+struct CELT_decinst_t_;
+
+namespace webrtc {
+
+class ACMCELT : public ACMGenericCodec {
+ public:
+  ACMCELT(int16_t codecID);
+  ~ACMCELT();
+
+  ACMGenericCodec* CreateInstance(void);
+
+  int16_t InternalEncode(uint8_t* bitstream, int16_t* bitStreamLenByte);
+
+  int16_t InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  int16_t InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+
+  WebRtc_Word16 DecodeSafe(
+      uint8_t* /* bitStream */,
+      int16_t /* bitStreamLenByte */,
+      int16_t* /* audio */,
+      int16_t* /* audioSamples */,
+      // TODO(leozwang): use int8_t here when WebRtc_Word8 is properly typed.
+      // http://code.google.com/p/webrtc/issues/detail?id=311
+      WebRtc_Word8* /* speechType */);
+
+  int32_t CodecDef(WebRtcNetEQ_CodecDef& codecDef, const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  int16_t InternalCreateEncoder();
+
+  int16_t InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  bool IsTrueStereoCodec();
+
+  int16_t SetBitRateSafe(const int32_t rate);
+
+  void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
+
+  CELT_encinst_t_* enc_inst_ptr_;
+  CELT_decinst_t_* dec_inst_ptr_;
+  uint16_t sampling_freq_;
+  int32_t bitrate_;
+  uint16_t channels_;
+  uint16_t dec_channels_;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CELT_H_
diff --git a/src/modules/audio_coding/main/source/acm_cng.cc b/src/modules/audio_coding/main/source/acm_cng.cc
new file mode 100644
index 0000000..2393346
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_cng.cc
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_cng.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_cng.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc {
+
+ACMCNG::ACMCNG(WebRtc_Word16 codecID) {
+  _encoderInstPtr = NULL;
+  _decoderInstPtr = NULL;
+  _codecID = codecID;
+  _sampFreqHz = ACMCodecDB::CodecFreq(_codecID);
+  return;
+}
+
+ACMCNG::~ACMCNG() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcCng_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    WebRtcCng_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  return;
+}
+
+// CNG is not like a regular encoder, this function
+// should not be called normally
+// instead the following function is called from inside
+// ACMGenericCodec::ProcessFrameVADDTX
+WebRtc_Word16 ACMCNG::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMCNG::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                 WebRtc_Word16 /* bitStreamLenByte */,
+                                 WebRtc_Word16* /* audio */,
+                                 WebRtc_Word16* /* audioSamples */,
+                                 WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+// CNG is not like a regular encoder,
+// this function should not be called normally
+// instead the following function is called from inside
+// ACMGenericCodec::ProcessFrameVADDTX
+WebRtc_Word16 ACMCNG::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMCNG::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return WebRtcCng_InitDec(_decoderInstPtr);
+}
+
+WebRtc_Word32 ACMCNG::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                               const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // TODO (tlegrand): log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_CNG_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+
+  if (_sampFreqHz == 8000 || _sampFreqHz == 16000 || _sampFreqHz == 32000) {
+    SET_CODEC_PAR((codecDef), kDecoderCNG, codecInst.pltype,
+        _decoderInstPtr, _sampFreqHz);
+    SET_CNG_FUNCTIONS((codecDef));
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+ACMGenericCodec* ACMCNG::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMCNG::InternalCreateEncoder() {
+  if (WebRtcCng_CreateEnc(&_encoderInstPtr) < 0) {
+    _encoderInstPtr = NULL;
+    return -1;
+  } else {
+    return 0;
+  }
+}
+
+void ACMCNG::DestructEncoderSafe() {
+  if (_encoderInstPtr != NULL) {
+    WebRtcCng_FreeEnc(_encoderInstPtr);
+    _encoderInstPtr = NULL;
+  }
+  _encoderExist = false;
+  _encoderInitialized = false;
+}
+
+WebRtc_Word16 ACMCNG::InternalCreateDecoder() {
+  if (WebRtcCng_CreateDec(&_decoderInstPtr) < 0) {
+    _decoderInstPtr = NULL;
+    return -1;
+  } else {
+    return 0;
+  }
+}
+
+void ACMCNG::DestructDecoderSafe() {
+  if (_decoderInstPtr != NULL) {
+    WebRtcCng_FreeDec(_decoderInstPtr);
+    _decoderInstPtr = NULL;
+  }
+  _decoderExist = false;
+  _decoderInitialized = false;
+}
+
+void ACMCNG::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcCng_FreeEnc(static_cast<CNG_enc_inst*>(ptrInst));
+  }
+  return;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_cng.h b/src/modules/audio_coding/main/source/acm_cng.h
new file mode 100644
index 0000000..6276c44
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_cng.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct WebRtcCngEncInst;
+struct WebRtcCngDecInst;
+
+namespace webrtc {
+
+class ACMCNG: public ACMGenericCodec {
+ public:
+  ACMCNG(WebRtc_Word16 codecID);
+  ~ACMCNG();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word16 EnableDTX() {
+    return -1;
+  }
+
+  WebRtc_Word16 DisableDTX() {
+    return -1;
+  }
+
+  WebRtcCngEncInst* _encoderInstPtr;
+  WebRtcCngDecInst* _decoderInstPtr;
+  WebRtc_Word16 _sampFreqHz;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CNG_H_
diff --git a/src/modules/audio_coding/main/source/acm_codec_database.cc b/src/modules/audio_coding/main/source/acm_codec_database.cc
new file mode 100644
index 0000000..b782194
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_codec_database.cc
@@ -0,0 +1,948 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+// TODO(tlegrand): Change constant input pointers in all functions to constant
+// references, where appropriate.
+#include "acm_codec_database.h"
+
+#include <stdio.h>
+
+#include "acm_common_defs.h"
+#include "trace.h"
+
+// Includes needed to create the codecs.
+// G.711, PCM mu-law and A-law.
+#include "acm_pcma.h"
+#include "acm_pcmu.h"
+#include "g711_interface.h"
+// CNG.
+#include "acm_cng.h"
+#include "webrtc_cng.h"
+// NetEQ.
+#include "webrtc_neteq.h"
+#ifdef WEBRTC_CODEC_ISAC
+    #include "acm_isac.h"
+    #include "acm_isac_macros.h"
+    #include "isac.h"
+#endif
+#ifdef WEBRTC_CODEC_ISACFX
+    #include "acm_isac.h"
+    #include "acm_isac_macros.h"
+    #include "isacfix.h"
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    #include "pcm16b.h"
+    #include "acm_pcm16b.h"
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    #include "acm_ilbc.h"
+    #include "ilbc.h"
+#endif
+#ifdef WEBRTC_CODEC_AMR
+    #include "acm_amr.h"
+    #include "amr_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+    #include "acm_amrwb.h"
+    #include "amrwb_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    #include "acm_celt.h"
+    #include "celt_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722
+    #include "acm_g722.h"
+    #include "g722_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+    #include "acm_g7221.h"
+    #include "g7221_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+    #include "acm_g7221c.h"
+    #include "g7221c_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G729
+    #include "acm_g729.h"
+    #include "g729_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    #include "acm_g7291.h"
+    #include "g7291_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+    #include "acm_gsmfr.h"
+    #include "gsmfr_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    #include "acm_speex.h"
+    #include "speex_interface.h"
+#endif
+#ifdef WEBRTC_CODEC_AVT
+    #include "acm_dtmf_playout.h"
+#endif
+#ifdef WEBRTC_CODEC_RED
+    #include "acm_red.h"
+#endif
+
+namespace webrtc {
+
+// We dynamically allocate some of the dynamic payload types to the defined
+// codecs. Note! There are a limited number of payload types. If more codecs
+// are defined they will receive reserved fixed payload types (values 69-95).
+const int kDynamicPayloadtypes[ACMCodecDB::kMaxNumCodecs] = {
+  105, 107, 108, 109, 111, 112, 113, 114, 115, 116, 117, 120,
+  121, 122, 123, 124, 125, 126, 101, 100,  97,  96,  95,  94,
+   93,  92,  91,  90,  89,  88,  87,  86,  85,  84,  83,  82,
+   81,  80,  79,  78,  77,  76,  75,  74,  73,  72,  71,  70,
+   69,
+};
+
+// Creates database with all supported codecs at compile time.
+// Each entry needs the following parameters in the given order:
+// payload type, name, sampling frequency, packet size in samples,
+// number of channels, and default rate.
+#if (defined(WEBRTC_CODEC_PCM16) || \
+     defined(WEBRTC_CODEC_AMR) || defined(WEBRTC_CODEC_AMRWB) || \
+     defined(WEBRTC_CODEC_CELT) || defined(WEBRTC_CODEC_G729_1) || \
+     defined(WEBRTC_CODEC_SPEEX) || defined(WEBRTC_CODEC_G722_1) || \
+     defined(WEBRTC_CODEC_G722_1C))
+static int count_database = 0;
+#endif
+
+const CodecInst ACMCodecDB::database_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  {103, "ISAC", 16000, kIsacPacSize480, 1, kIsacWbDefaultRate},
+# if (defined(WEBRTC_CODEC_ISAC))
+  {104, "ISAC", 32000, kIsacPacSize960, 1, kIsacSwbDefaultRate},
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  // Mono
+  {kDynamicPayloadtypes[count_database++], "L16", 8000, 80, 1, 128000},
+  {kDynamicPayloadtypes[count_database++], "L16", 16000, 160, 1, 256000},
+  {kDynamicPayloadtypes[count_database++], "L16", 32000, 320, 1, 512000},
+  // Stereo
+  {kDynamicPayloadtypes[count_database++], "L16", 8000, 80, 2, 128000},
+  {kDynamicPayloadtypes[count_database++], "L16", 16000, 160, 2, 256000},
+  {kDynamicPayloadtypes[count_database++], "L16", 32000, 320, 2, 512000},
+#endif
+  // G.711, PCM mu-law and A-law.
+  // Mono
+  {0, "PCMU", 8000, 160, 1, 64000},
+  {8, "PCMA", 8000, 160, 1, 64000},
+  // Stereo
+  {110, "PCMU", 8000, 160, 2, 64000},
+  {118, "PCMA", 8000, 160, 2, 64000},
+#ifdef WEBRTC_CODEC_ILBC
+  {102, "ILBC", 8000, 240, 1, 13300},
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  {kDynamicPayloadtypes[count_database++], "AMR", 8000, 160, 1, 12200},
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  {kDynamicPayloadtypes[count_database++], "AMR-WB", 16000, 320, 1, 20000},
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  // Mono
+  {kDynamicPayloadtypes[count_database++], "CELT", 32000, 640, 1, 64000},
+  // Stereo
+  {kDynamicPayloadtypes[count_database++], "CELT", 32000, 640, 2, 64000},
+#endif
+#ifdef WEBRTC_CODEC_G722
+  // Mono
+  {9, "G722", 16000, 320, 1, 64000},
+  // Stereo
+  {119, "G722", 16000, 320, 2, 64000},
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 32000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 24000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 16000, 320, 1, 16000},
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 48000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 32000},
+  {kDynamicPayloadtypes[count_database++], "G7221", 32000, 640, 1, 24000},
+#endif
+#ifdef WEBRTC_CODEC_G729
+  {18, "G729", 8000, 240, 1, 8000},
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  {kDynamicPayloadtypes[count_database++], "G7291", 16000, 320, 1, 32000},
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  {3, "GSM", 8000, 160, 1, 13200},
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  {kDynamicPayloadtypes[count_database++], "speex", 8000, 160, 1, 11000},
+  {kDynamicPayloadtypes[count_database++], "speex", 16000, 320, 1, 22000},
+#endif
+  // Comfort noise for three different sampling frequencies.
+  {13, "CN", 8000, 240, 1, 0},
+  {98, "CN", 16000, 480, 1, 0},
+  {99, "CN", 32000, 960, 1, 0},
+#ifdef WEBRTC_CODEC_AVT
+  {106, "telephone-event", 8000, 240, 1, 0},
+#endif
+#ifdef WEBRTC_CODEC_RED
+  {127, "red", 8000, 0, 1, 0},
+#endif
+  // To prevent compile errors due to trailing commas.
+  {-1, "Null", -1, -1, -1, -1}
+};
+
+// Create database with all codec settings at compile time.
+// Each entry needs the following parameters in the given order:
+// Number of allowed packet sizes, a vector with the allowed packet sizes,
+// Basic block samples, max number of channels that are supported.
+const ACMCodecDB::CodecSettings ACMCodecDB::codec_settings_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  {2, {kIsacPacSize480, kIsacPacSize960}, 0, 1},
+# if (defined(WEBRTC_CODEC_ISAC))
+  {1, {kIsacPacSize960}, 0, 1},
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  // Mono
+  {4, {80, 160, 240, 320}, 0, 2},
+  {4, {160, 320, 480, 640}, 0, 2},
+  {2, {320, 640}, 0, 2},
+  // Stereo
+  {4, {80, 160, 240, 320}, 0, 2},
+  {4, {160, 320, 480, 640}, 0, 2},
+  {2, {320, 640}, 0, 2},
+#endif
+  // G.711, PCM mu-law and A-law.
+  // Mono
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+  // Stereo
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+  {6, {80, 160, 240, 320, 400, 480}, 0, 2},
+#ifdef WEBRTC_CODEC_ILBC
+  {4, {160, 240, 320, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  {3, {160, 320, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  {3, {320, 640, 960}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  // Mono
+  {1, {640}, 0, 2},
+  // Stereo
+  {1, {640}, 0, 2},
+#endif
+#ifdef WEBRTC_CODEC_G722
+  // Mono
+  {6, {160, 320, 480, 640, 800, 960}, 0, 2},
+  // Stereo
+  {6, {160, 320, 480, 640, 800, 960}, 0, 2},
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  {1, {320}, 320, 1},
+  {1, {320}, 320, 1},
+  {1, {320}, 320, 1},
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  {1, {640}, 640, 1},
+  {1, {640}, 640, 1},
+  {1, {640}, 640, 1},
+#endif
+#ifdef WEBRTC_CODEC_G729
+  {6, {80, 160, 240, 320, 400, 480}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  {3, {320, 640, 960}, 0, 1},
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  {3, {160, 320, 480}, 160, 1},
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  {3, {160, 320, 480}, 0, 1},
+  {3, {320, 640, 960}, 0, 1},
+#endif
+  // Comfort noise for three different sampling frequencies.
+  {1, {240}, 240, 1},
+  {1, {480}, 480, 1},
+  {1, {960}, 960, 1},
+#ifdef WEBRTC_CODEC_AVT
+  {1, {240}, 240, 1},
+#endif
+#ifdef WEBRTC_CODEC_RED
+  {1, {0}, 0, 1},
+#endif
+  // To prevent compile errors due to trailing commas.
+  {-1, {-1}, -1, -1}
+};
+
+// Create a database of all NetEQ decoders at compile time.
+const WebRtcNetEQDecoder ACMCodecDB::neteq_decoders_[] = {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  kDecoderISAC,
+# if (defined(WEBRTC_CODEC_ISAC))
+  kDecoderISACswb,
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  // Mono
+  kDecoderPCM16B,
+  kDecoderPCM16Bwb,
+  kDecoderPCM16Bswb32kHz,
+  // Stereo
+  kDecoderPCM16B_2ch,
+  kDecoderPCM16Bwb_2ch,
+  kDecoderPCM16Bswb32kHz_2ch,
+#endif
+  // G.711, PCM mu-las and A-law.
+  // Mono
+  kDecoderPCMu,
+  kDecoderPCMa,
+  // Stereo
+  kDecoderPCMu_2ch,
+  kDecoderPCMa_2ch,
+#ifdef WEBRTC_CODEC_ILBC
+  kDecoderILBC,
+#endif
+#ifdef WEBRTC_CODEC_AMR
+  kDecoderAMR,
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  kDecoderAMRWB,
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  // Mono
+  kDecoderCELT_32,
+  // Stereo
+  kDecoderCELT_32_2ch,
+#endif
+#ifdef WEBRTC_CODEC_G722
+  // Mono
+  kDecoderG722,
+  // Stereo
+  kDecoderG722_2ch,
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  kDecoderG722_1_32,
+  kDecoderG722_1_24,
+  kDecoderG722_1_16,
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  kDecoderG722_1C_48,
+  kDecoderG722_1C_32,
+  kDecoderG722_1C_24,
+#endif
+#ifdef WEBRTC_CODEC_G729
+  kDecoderG729,
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  kDecoderG729_1,
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  kDecoderGSMFR,
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+  kDecoderSPEEX_8,
+  kDecoderSPEEX_16,
+#endif
+  // Comfort noise for three different sampling frequencies.
+  kDecoderCNG,
+  kDecoderCNG,
+  kDecoderCNG,
+#ifdef WEBRTC_CODEC_AVT
+  kDecoderAVT,
+#endif
+#ifdef WEBRTC_CODEC_RED
+  kDecoderRED,
+#endif
+  kDecoderReservedEnd
+};
+
+// Get codec information from database.
+// TODO(tlegrand): replace memcpy with a pointer to the data base memory.
+int ACMCodecDB::Codec(int codec_id, CodecInst* codec_inst) {
+  // Error check to see that codec_id is not out of bounds.
+  if ((codec_id < 0) || (codec_id >= kNumCodecs)) {
+    return -1;
+  }
+
+  // Copy database information for the codec to the output.
+  memcpy(codec_inst, &database_[codec_id], sizeof(CodecInst));
+
+  return 0;
+}
+
+// Enumerator for error codes when asking for codec database id.
+enum {
+  kInvalidCodec = -10,
+  kInvalidPayloadtype = -30,
+  kInvalidPacketSize = -40,
+  kInvalidRate = -50
+};
+
+// Gets the codec id number from the database. If there is some mismatch in
+// the codec settings, an error message will be recorded in the error string.
+// NOTE! Only the first mismatch found will be recorded in the error string.
+int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id,
+                            char* err_message, int max_message_len_byte) {
+  int codec_id = ACMCodecDB::CodecNumber(codec_inst, mirror_id);
+
+  // Write error message if ACMCodecDB::CodecNumber() returned error.
+  if ((codec_id < 0) && (err_message != NULL)) {
+    char my_err_msg[1000];
+
+    if (codec_id == kInvalidCodec) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, Codec not "
+              "found");
+    } else if (codec_id == kInvalidPayloadtype) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, payload "
+              "number %d is out of range for %s", codec_inst->pltype,
+              codec_inst->plname);
+    } else if (codec_id == kInvalidPacketSize) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, Packet "
+              "size is out of range for %s", codec_inst->plname);
+    } else if (codec_id == kInvalidRate) {
+      sprintf(my_err_msg, "Call to ACMCodecDB::CodecNumber failed, rate=%d "
+              "is not a valid rate for %s", codec_inst->rate,
+              codec_inst->plname);
+    } else {
+      // Other error
+      sprintf(my_err_msg, "invalid codec parameters to be registered, "
+              "ACMCodecDB::CodecNumber failed");
+    }
+
+    strncpy(err_message, my_err_msg, max_message_len_byte - 1);
+    // make sure that the message is null-terminated.
+    err_message[max_message_len_byte - 1] = '\0';
+  }
+
+  return codec_id;
+}
+
+// Gets the codec id number from the database. If there is some mismatch in
+// the codec settings, the function will return an error code.
+// NOTE! The first mismatch found will generate the return value.
+int ACMCodecDB::CodecNumber(const CodecInst* codec_inst, int* mirror_id) {
+  // Look for a matching codec in the database.
+  int codec_id = CodecId(codec_inst);
+
+  // Checks if we found a matching codec.
+  if (codec_id == -1) {
+    return kInvalidCodec;
+  }
+
+  // Checks the validity of payload type
+  if (!ValidPayloadType(codec_inst->pltype)) {
+    return kInvalidPayloadtype;
+  }
+
+  // Comfort Noise is special case, packet-size & rate is not checked.
+  if (STR_CASE_CMP(database_[codec_id].plname, "CN") == 0) {
+    *mirror_id = codec_id;
+    return codec_id;
+  }
+
+  // RED is special case, packet-size & rate is not checked.
+  if (STR_CASE_CMP(database_[codec_id].plname, "red") == 0) {
+    *mirror_id = codec_id;
+    return codec_id;
+  }
+
+  // Checks the validity of packet size.
+  if (codec_settings_[codec_id].num_packet_sizes > 0) {
+    bool packet_size_ok = false;
+    int i;
+    int packet_size_samples;
+    for (i = 0; i < codec_settings_[codec_id].num_packet_sizes; i++) {
+      packet_size_samples =
+          codec_settings_[codec_id].packet_sizes_samples[i];
+      if (codec_inst->pacsize == packet_size_samples) {
+        packet_size_ok = true;
+        break;
+      }
+    }
+
+    if (!packet_size_ok) {
+      return kInvalidPacketSize;
+    }
+  }
+
+  if (codec_inst->pacsize < 1) {
+    return kInvalidPacketSize;
+  }
+
+  // Check the validity of rate. Codecs with multiple rates have their own
+  // function for this.
+  *mirror_id = codec_id;
+  if (STR_CASE_CMP("isac", codec_inst->plname) == 0) {
+    if (IsISACRateValid(codec_inst->rate)) {
+      // Set mirrorID to iSAC WB which is only created once to be used both for
+      // iSAC WB and SWB, because they need to share struct.
+      *mirror_id = kISAC;
+      return  codec_id;
+    } else {
+      return kInvalidRate;
+    }
+  } else if (STR_CASE_CMP("ilbc", codec_inst->plname) == 0) {
+    return IsILBCRateValid(codec_inst->rate, codec_inst->pacsize)
+        ? codec_id : kInvalidRate;
+  } else if (STR_CASE_CMP("amr", codec_inst->plname) == 0) {
+    return IsAMRRateValid(codec_inst->rate)
+        ? codec_id : kInvalidRate;
+  } else if (STR_CASE_CMP("amr-wb", codec_inst->plname) == 0) {
+    return IsAMRwbRateValid(codec_inst->rate)
+        ? codec_id : kInvalidRate;
+  } else if (STR_CASE_CMP("g7291", codec_inst->plname) == 0) {
+    return IsG7291RateValid(codec_inst->rate)
+        ? codec_id : kInvalidRate;
+  } else if (STR_CASE_CMP("speex", codec_inst->plname) == 0) {
+    return IsSpeexRateValid(codec_inst->rate)
+        ? codec_id : kInvalidRate;
+  } else if (STR_CASE_CMP("celt", codec_inst->plname) == 0) {
+    return IsCeltRateValid(codec_inst->rate)
+        ? codec_id : kInvalidRate;
+  }
+
+  return IsRateValid(codec_id, codec_inst->rate) ?
+      codec_id : kInvalidRate;
+}
+
+// Looks for a matching payload name, frequency, and channels in the
+// codec list. Need to check all three since some codecs have several codec
+// entries with different frequencies and/or channels.
+// Does not check other codec settings, such as payload type and packet size.
+// Returns the id of the codec, or -1 if no match is found.
+int ACMCodecDB::CodecId(const CodecInst* codec_inst) {
+  return (CodecId(codec_inst->plname, codec_inst->plfreq,
+                  codec_inst->channels));
+}
+
+int ACMCodecDB::CodecId(const char* payload_name, int frequency, int channels) {
+  for (int id = 0; id < kNumCodecs; id++) {
+    bool name_match = false;
+    bool frequency_match = false;
+    bool channels_match = false;
+
+    // Payload name, sampling frequency and number of channels need to match.
+    // NOTE! If |frequency| is -1, the frequency is not applicable, and is
+    // always treated as true, like for RED.
+    name_match = (STR_CASE_CMP(database_[id].plname, payload_name) == 0);
+    frequency_match = (frequency == database_[id].plfreq) || (frequency == -1);
+    channels_match = (channels == database_[id].channels);
+
+    if (name_match && frequency_match && channels_match) {
+      // We have found a matching codec in the list.
+      return id;
+    }
+  }
+
+  // We didn't find a matching codec.
+  return -1;
+}
+// Gets codec id number, and mirror id, from database for the receiver.
+int ACMCodecDB::ReceiverCodecNumber(const CodecInst* codec_inst,
+    int* mirror_id) {
+  // Look for a matching codec in the database.
+  int codec_id = CodecId(codec_inst);
+
+  // Set |mirror_id| to |codec_id|, except for iSAC. In case of iSAC we always
+  // set |mirror_id| to iSAC WB (kISAC) which is only created once to be used
+  // both for iSAC WB and SWB, because they need to share struct.
+  if (STR_CASE_CMP(codec_inst->plname, "ISAC") != 0) {
+    *mirror_id = codec_id;
+  } else {
+    *mirror_id = kISAC;
+  }
+
+  return codec_id;
+}
+
+// Returns the codec sampling frequency for codec with id = "codec_id" in
+// database.
+int ACMCodecDB::CodecFreq(int codec_id) {
+  // Error check to see that codec_id is not out of bounds.
+  if (codec_id < 0 || codec_id >= kNumCodecs) {
+    return -1;
+  }
+
+  return database_[codec_id].plfreq;
+}
+
+// Returns the codec's basic coding block size in samples.
+int ACMCodecDB::BasicCodingBlock(int codec_id) {
+  // Error check to see that codec_id is not out of bounds.
+  if (codec_id < 0 || codec_id >= kNumCodecs) {
+      return -1;
+  }
+
+  return codec_settings_[codec_id].basic_block_samples;
+}
+
+// Returns the NetEQ decoder database.
+const WebRtcNetEQDecoder* ACMCodecDB::NetEQDecoders() {
+  return neteq_decoders_;
+}
+
+// Gets mirror id. The Id is used for codecs sharing struct for settings that
+// need different payload types.
+int ACMCodecDB::MirrorID(int codec_id) {
+  if (STR_CASE_CMP(database_[codec_id].plname, "isac") == 0) {
+    return kISAC;
+  } else {
+    return codec_id;
+  }
+}
+
+// Creates memory/instance for storing codec state.
+ACMGenericCodec* ACMCodecDB::CreateCodecInstance(const CodecInst* codec_inst) {
+  // All we have support for right now.
+  if (!STR_CASE_CMP(codec_inst->plname, "ISAC")) {
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    return new ACMISAC(kISAC);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "PCMU")) {
+    if (codec_inst->channels == 1) {
+      return new ACMPCMU(kPCMU);
+    } else {
+      return new ACMPCMU(kPCMU_2ch);
+    }
+  } else if (!STR_CASE_CMP(codec_inst->plname, "PCMA")) {
+    if (codec_inst->channels == 1) {
+      return new ACMPCMA(kPCMA);
+    } else {
+      return new ACMPCMA(kPCMA_2ch);
+    }
+  } else if (!STR_CASE_CMP(codec_inst->plname, "ILBC")) {
+#ifdef WEBRTC_CODEC_ILBC
+    return new ACMILBC(kILBC);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "AMR")) {
+#ifdef WEBRTC_CODEC_AMR
+    return new ACMAMR(kGSMAMR);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "AMR-WB")) {
+#ifdef WEBRTC_CODEC_AMRWB
+    return new ACMAMRwb(kGSMAMRWB);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CELT")) {
+#ifdef WEBRTC_CODEC_CELT
+    if (codec_inst->channels == 1) {
+      return new ACMCELT(kCELT32);
+    } else {
+      return new ACMCELT(kCELT32_2ch);
+    }
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G722")) {
+#ifdef WEBRTC_CODEC_G722
+    if (codec_inst->channels == 1) {
+      return new ACMG722(kG722);
+    } else {
+      return new ACMG722(kG722_2ch);
+    }
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G7221")) {
+    switch (codec_inst->plfreq) {
+      case 16000: {
+#ifdef WEBRTC_CODEC_G722_1
+        int codec_id;
+        switch (codec_inst->rate) {
+          case 16000 : {
+            codec_id = kG722_1_16;
+            break;
+          }
+          case 24000 : {
+            codec_id = kG722_1_24;
+            break;
+          }
+          case 32000 : {
+            codec_id = kG722_1_32;
+            break;
+          }
+          default: {
+            return NULL;
+          }
+          return new ACMG722_1(codec_id);
+        }
+#endif
+      }
+      case 32000: {
+#ifdef WEBRTC_CODEC_G722_1C
+        int codec_id;
+        switch (codec_inst->rate) {
+          case 24000 : {
+            codec_id = kG722_1C_24;
+            break;
+          }
+          case 32000 : {
+            codec_id = kG722_1C_32;
+            break;
+          }
+          case 48000 : {
+            codec_id = kG722_1C_48;
+            break;
+          }
+          default: {
+            return NULL;
+          }
+          return new ACMG722_1C(codec_id);
+        }
+#endif
+      }
+    }
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
+    // For CN we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kCNNB;
+        break;
+      }
+      case 16000: {
+        codec_id = kCNWB;
+        break;
+      }
+      case 32000: {
+        codec_id = kCNSWB;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMCNG(codec_id);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G729")) {
+#ifdef WEBRTC_CODEC_G729
+    return new ACMG729(kG729);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "G7291")) {
+#ifdef WEBRTC_CODEC_G729_1
+    return new ACMG729_1(kG729_1);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "speex")) {
+#ifdef WEBRTC_CODEC_SPEEX
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kSPEEX8;
+        break;
+      }
+      case 16000: {
+        codec_id = kSPEEX16;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMSPEEX(codec_id);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "CN")) {
+    // For CN we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    switch (codec_inst->plfreq) {
+      case 8000: {
+        codec_id = kCNNB;
+        break;
+      }
+      case 16000: {
+        codec_id = kCNWB;
+        break;
+      }
+      case 32000: {
+        codec_id = kCNSWB;
+        break;
+      }
+      default: {
+        return NULL;
+      }
+    }
+    return new ACMCNG(codec_id);
+  } else if (!STR_CASE_CMP(codec_inst->plname, "L16")) {
+#ifdef WEBRTC_CODEC_PCM16
+    // For L16 we need to check sampling frequency to know what codec to create.
+    int codec_id;
+    if (codec_inst->channels == 1) {
+      switch (codec_inst->plfreq) {
+        case 8000: {
+          codec_id = kPCM16B;
+          break;
+        }
+        case 16000: {
+          codec_id = kPCM16Bwb;
+          break;
+        }
+        case 32000: {
+          codec_id = kPCM16Bswb32kHz;
+          break;
+        }
+        default: {
+          return NULL;
+        }
+      }
+    } else {
+      switch (codec_inst->plfreq) {
+        case 8000: {
+          codec_id = kPCM16B_2ch;
+          break;
+        }
+        case 16000: {
+          codec_id = kPCM16Bwb_2ch;
+          break;
+        }
+        case 32000: {
+          codec_id = kPCM16Bswb32kHz_2ch;
+          break;
+        }
+        default: {
+          return NULL;
+        }
+      }
+    }
+    return new ACMPCM16B(codec_id);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "telephone-event")) {
+#ifdef WEBRTC_CODEC_AVT
+    return new ACMDTMFPlayout(kAVT);
+#endif
+  } else if (!STR_CASE_CMP(codec_inst->plname, "red")) {
+#ifdef WEBRTC_CODEC_RED
+    return new ACMRED(kRED);
+#endif
+  }
+  return NULL;
+}
+
+// Checks if the bitrate is valid for the codec.
+bool ACMCodecDB::IsRateValid(int codec_id, int rate) {
+  if (database_[codec_id].rate == rate) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for iSAC.
+bool ACMCodecDB::IsISACRateValid(int rate) {
+  if ((rate == -1) || ((rate <= 56000) && (rate >= 10000))) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for iLBC.
+bool ACMCodecDB::IsILBCRateValid(int rate, int frame_size_samples) {
+  if (((frame_size_samples == 240) || (frame_size_samples == 480)) &&
+      (rate == 13300)) {
+    return true;
+  } else if (((frame_size_samples == 160) || (frame_size_samples == 320)) &&
+      (rate == 15200)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Check if the bitrate is valid for the GSM-AMR.
+bool ACMCodecDB::IsAMRRateValid(int rate) {
+  switch (rate) {
+    case 4750:
+    case 5150:
+    case 5900:
+    case 6700:
+    case 7400:
+    case 7950:
+    case 10200:
+    case 12200: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Check if the bitrate is valid for GSM-AMR-WB.
+bool ACMCodecDB::IsAMRwbRateValid(int rate) {
+  switch (rate) {
+    case 7000:
+    case 9000:
+    case 12000:
+    case 14000:
+    case 16000:
+    case 18000:
+    case 20000:
+    case 23000:
+    case 24000: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Check if the bitrate is valid for G.729.1.
+bool ACMCodecDB::IsG7291RateValid(int rate) {
+  switch (rate) {
+    case 8000:
+    case 12000:
+    case 14000:
+    case 16000:
+    case 18000:
+    case 20000:
+    case 22000:
+    case 24000:
+    case 26000:
+    case 28000:
+    case 30000:
+    case 32000: {
+      return true;
+    }
+    default: {
+      return false;
+    }
+  }
+}
+
+// Checks if the bitrate is valid for Speex.
+bool ACMCodecDB::IsSpeexRateValid(int rate) {
+  if (rate > 2000) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the bitrate is valid for Celt.
+bool ACMCodecDB::IsCeltRateValid(int rate) {
+  if ((rate >= 48000) && (rate <= 128000)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+// Checks if the payload type is in the valid range.
+bool ACMCodecDB::ValidPayloadType(int payload_type) {
+  if ((payload_type < 0) || (payload_type > 127)) {
+    return false;
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_codec_database.h b/src/modules/audio_coding/main/source/acm_codec_database.h
new file mode 100644
index 0000000..0fe3a5e
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_codec_database.h
@@ -0,0 +1,324 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file generates databases with information about all supported audio
+ * codecs.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
+
+#include "acm_generic_codec.h"
+#include "common_types.h"
+#include "webrtc_neteq.h"
+
+namespace webrtc {
+
+// TODO(tlegrand): replace class ACMCodecDB with a namespace.
+class ACMCodecDB {
+ public:
+  // Enum with array indexes for the supported codecs. NOTE! The order MUST
+  // be the same as when creating the database in acm_codec_database.cc.
+  enum {
+    kNone = -1
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    , kISAC
+# if (defined(WEBRTC_CODEC_ISAC))
+    , kISACSWB
+# endif
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    // Mono
+    , kPCM16B
+    , kPCM16Bwb
+    , kPCM16Bswb32kHz
+    // Stereo
+    , kPCM16B_2ch
+    , kPCM16Bwb_2ch
+    , kPCM16Bswb32kHz_2ch
+#endif
+    // Mono
+    , kPCMU
+    , kPCMA
+    // Stereo
+    , kPCMU_2ch
+    , kPCMA_2ch
+#ifdef WEBRTC_CODEC_ILBC
+    , kILBC
+#endif
+#ifdef WEBRTC_CODEC_AMR
+    , kGSMAMR
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+    , kGSMAMRWB
+#endif
+#ifdef WEBRTC_CODEC_CELT
+    // Mono
+    , kCELT32
+    // Stereo
+    , kCELT32_2ch
+#endif
+#ifdef WEBRTC_CODEC_G722
+    // Mono
+    , kG722
+    // Stereo
+    , kG722_2ch
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+    , kG722_1_32
+    , kG722_1_24
+    , kG722_1_16
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+    , kG722_1C_48
+    , kG722_1C_32
+    , kG722_1C_24
+#endif
+#ifdef WEBRTC_CODEC_G729
+    , kG729
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    , kG729_1
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+    , kGSMFR
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    , kSPEEX8
+    , kSPEEX16
+#endif
+    , kCNNB
+    , kCNWB
+    , kCNSWB
+#ifdef WEBRTC_CODEC_AVT
+    , kAVT
+#endif
+#ifdef WEBRTC_CODEC_RED
+    , kRED
+#endif
+    , kNumCodecs
+  };
+
+  // Set unsupported codecs to -1
+#ifndef WEBRTC_CODEC_ISAC
+  enum {kISACSWB = -1};
+# ifndef WEBRTC_CODEC_ISACFX
+  enum {kISAC = -1};
+# endif
+#endif
+#ifndef WEBRTC_CODEC_PCM16
+  // Mono
+  enum {kPCM16B = -1};
+  enum {kPCM16Bwb = -1};
+  enum {kPCM16Bswb32kHz = -1};
+  // Stereo
+  enum {kPCM16B_2ch = -1};
+  enum {kPCM16Bwb_2ch = -1};
+  enum {kPCM16Bswb32kHz_2ch = -1};
+#endif
+  // 48 kHz not supported, always set to -1.
+  enum {kPCM16Bswb48kHz = -1};
+#ifndef WEBRTC_CODEC_ILBC
+  enum {kILBC = -1};
+#endif
+#ifndef WEBRTC_CODEC_AMR
+  enum {kGSMAMR = -1};
+#endif
+#ifndef WEBRTC_CODEC_AMRWB
+  enum {kGSMAMRWB = -1};
+#endif
+#ifndef WEBRTC_CODEC_CELT
+  // Mono
+  enum {kCELT32 = -1};
+  // Stereo
+  enum {kCELT32_2ch = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722
+  // Mono
+  enum {kG722 = -1};
+  // Stereo
+  enum {kG722_2ch = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722_1
+  enum {kG722_1_32 = -1};
+  enum {kG722_1_24 = -1};
+  enum {kG722_1_16 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G722_1C
+  enum {kG722_1C_48 = -1};
+  enum {kG722_1C_32 = -1};
+  enum {kG722_1C_24 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G729
+  enum {kG729 = -1};
+#endif
+#ifndef WEBRTC_CODEC_G729_1
+  enum {kG729_1 = -1};
+#endif
+#ifndef WEBRTC_CODEC_GSMFR
+  enum {kGSMFR = -1};
+#endif
+#ifndef WEBRTC_CODEC_SPEEX
+  enum {kSPEEX8 = -1};
+  enum {kSPEEX16 = -1};
+#endif
+#ifndef WEBRTC_CODEC_AVT
+  enum {kAVT = -1};
+#endif
+#ifndef WEBRTC_CODEC_RED
+  enum {kRED = -1};
+#endif
+
+  // kMaxNumCodecs - Maximum number of codecs that can be activated in one
+  //                 build.
+  // kMaxNumPacketSize - Maximum number of allowed packet sizes for one codec.
+  // These might need to be increased if adding a new codec to the database
+  static const int kMaxNumCodecs =  50;
+  static const int kMaxNumPacketSize = 6;
+
+  // Codec specific settings
+  //
+  // num_packet_sizes     - number of allowed packet sizes.
+  // packet_sizes_samples - list of the allowed packet sizes.
+  // basic_block_samples  - assigned a value different from 0 if the codec
+  //                        requires to be fed with a specific number of samples
+  //                        that can be different from packet size.
+  // channel_support      - number of channels supported to encode;
+  //                        1 = mono, 2 = stereo, etc.
+  struct CodecSettings {
+    int num_packet_sizes;
+    int packet_sizes_samples[kMaxNumPacketSize];
+    int basic_block_samples;
+    int channel_support;
+  };
+
+  // Gets codec information from database at the position in database given by
+  // [codec_id].
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Output:
+  //   [codec_inst] - filled with information about the codec.
+  // Return:
+  //   0 if successful, otherwise -1.
+  static int Codec(int codec_id, CodecInst* codec_inst);
+
+  // Returns codec id and mirror id from database, given the information
+  // received in the input [codec_inst]. Mirror id is a number that tells
+  // where to find the codec's memory (instance). The number is either the
+  // same as codec id (most common), or a number pointing at a different
+  // entry in the database, if the codec has several entries with different
+  // payload types. This is used for codecs that must share one struct even if
+  // the payload type differs.
+  // One example is the codec iSAC which has the same struct for both 16 and
+  // 32 khz, but they have different entries in the database. Let's say the
+  // function is called with iSAC 32kHz. The function will return 1 as that is
+  // the entry in the data base, and [mirror_id] = 0, as that is the entry for
+  // iSAC 16 kHz, which holds the shared memory.
+  // Input:
+  //   [codec_inst] - Information about the codec for which we require the
+  //                  database id.
+  // Output:
+  //   [mirror_id] - mirror id, which most often is the same as the return
+  //                 value, see above.
+  //   [err_message] - if present, in the event of a mismatch found between the
+  //                   input and the database, a descriptive error message is
+  //                   written here.
+  //   [err_message] - if present, the length of error message is returned here.
+  // Return:
+  //   codec id if successful, otherwise < 0.
+  static int CodecNumber(const CodecInst* codec_inst, int* mirror_id,
+                         char* err_message, int max_message_len_byte);
+  static int CodecNumber(const CodecInst* codec_inst, int* mirror_id);
+  static int CodecId(const CodecInst* codec_inst);
+  static int CodecId(const char* payload_name, int frequency, int channels);
+  static int ReceiverCodecNumber(const CodecInst* codec_inst, int* mirror_id);
+
+  // Returns the codec sampling frequency for codec with id = "codec_id" in
+  // database.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Return:
+  //   codec sampling frequency if successful, otherwise -1.
+  static int CodecFreq(int codec_id);
+
+  // Return the codec's basic coding block size in samples.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies at what position in the database to
+  //                get the information.
+  // Return:
+  //   codec basic block size if successful, otherwise -1.
+  static int BasicCodingBlock(int codec_id);
+
+  // Returns the NetEQ decoder database.
+  static const WebRtcNetEQDecoder* NetEQDecoders();
+
+  // Returns mirror id, which is a number that tells where to find the codec's
+  // memory (instance). It is either the same as codec id (most common), or a
+  // number pointing at a different entry in the database, if the codec have
+  // several entries with different payload types. This is used for codecs that
+  // must share struct even if the payload type differs.
+  // TODO(tlegrand): Check if function is needed, or if we can change
+  // to access database directly.
+  // Input:
+  //   [codec_id] - number that specifies codec's position in the database.
+  // Return:
+  //   Mirror id on success, otherwise -1.
+  static int MirrorID(int codec_id);
+
+  // Create memory/instance for storing codec state.
+  // Input:
+  //   [codec_inst] - information about codec. Only name of codec, "plname", is
+  //                  used in this function.
+  static ACMGenericCodec* CreateCodecInstance(const CodecInst* codec_inst);
+
+  // Checks if the bitrate is valid for the codec.
+  // Input:
+  //   [codec_id] - number that specifies codec's position in the database.
+  //   [rate] - bitrate to check.
+  //   [frame_size_samples] - (used for iLBC) specifies which frame size to go
+  //                          with the rate.
+  static bool IsRateValid(int codec_id, int rate);
+  static bool IsISACRateValid(int rate);
+  static bool IsILBCRateValid(int rate, int frame_size_samples);
+  static bool IsAMRRateValid(int rate);
+  static bool IsAMRwbRateValid(int rate);
+  static bool IsG7291RateValid(int rate);
+  static bool IsSpeexRateValid(int rate);
+  static bool IsCeltRateValid(int rate);
+
+  // Check if the payload type is valid, meaning that it is in the valid range
+  // of 0 to 127.
+  // Input:
+  //   [payload_type] - payload type.
+  static bool ValidPayloadType(int payload_type);
+
+  // Databases with information about the supported codecs
+  // database_ - stored information about all codecs: payload type, name,
+  //             sampling frequency, packet size in samples, default channel
+  //             support, and default rate.
+  // codec_settings_ - stored codec settings: number of allowed packet sizes,
+  //                   a vector with the allowed packet sizes, basic block
+  //                   samples, and max number of channels that are supported.
+  // neteq_decoders_ - list of supported decoders in NetEQ.
+  static const CodecInst database_[kMaxNumCodecs];
+  static const CodecSettings codec_settings_[kMaxNumCodecs];
+  static const WebRtcNetEQDecoder neteq_decoders_[kMaxNumCodecs];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_CODEC_DATABASE_H_
diff --git a/src/modules/audio_coding/main/source/acm_common_defs.h b/src/modules/audio_coding/main/source/acm_common_defs.h
new file mode 100644
index 0000000..fd8dbd6
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_common_defs.h
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
+
+#include <string.h>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "typedefs.h"
+
+// Checks for enabled codecs, we prevent enabling codecs which are not
+// compatible.
+#if ((defined WEBRTC_CODEC_ISAC) && (defined WEBRTC_CODEC_ISACFX))
+#error iSAC and iSACFX codecs cannot be enabled at the same time
+#endif
+
+#ifdef WIN32
+// OS-dependent case-insensitive string comparison
+#define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+// OS-dependent case-insensitive string comparison
+#define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+
+// 60 ms is the maximum block size we support. An extra 20 ms is considered
+// for safety if process() method is not called when it should be, i.e. we
+// accept 20 ms of jitter. 80 ms @ 32 kHz (super wide-band) is 2560 samples.
+#define AUDIO_BUFFER_SIZE_W16  2560
+
+// There is one timestamp per each 10 ms of audio
+// the audio buffer, at max, may contain 32 blocks of 10ms
+// audio if the sampling frequency is 8000 Hz (80 samples per block).
+// Therefore, The size of the buffer where we keep timestamps
+// is defined as follows
+#define TIMESTAMP_BUFFER_SIZE_W32  (AUDIO_BUFFER_SIZE_W16/80)
+
+// The maximum size of a payload, that is 60 ms of PCM-16 @ 32 kHz stereo
+#define MAX_PAYLOAD_SIZE_BYTE   7680
+
+// General codec specific defines
+const int kIsacWbDefaultRate = 32000;
+const int kIsacSwbDefaultRate = 56000;
+const int kIsacPacSize480 = 480;
+const int kIsacPacSize960 = 960;
+
+// An encoded bit-stream is labeled by one of the following enumerators.
+//
+//   kNoEncoding              : There has been no encoding.
+//   kActiveNormalEncoded     : Active audio frame coded by the codec.
+//   kPassiveNormalEncoded    : Passive audio frame coded by the codec.
+//   kPassiveDTXNB            : Passive audio frame coded by narrow-band CN.
+//   kPassiveDTXWB            : Passive audio frame coded by wide-band CN.
+//   kPassiveDTXSWB           : Passive audio frame coded by super-wide-band CN.
+//
+enum WebRtcACMEncodingType {
+  kNoEncoding,
+  kActiveNormalEncoded,
+  kPassiveNormalEncoded,
+  kPassiveDTXNB,
+  kPassiveDTXWB,
+  kPassiveDTXSWB
+};
+
+// A structure which contains codec parameters. For instance, used when
+// initializing encoder and decoder.
+//
+//   codecInstant            : c.f. common_types.h
+//   enableDTX               : set true to enable DTX. If codec does not have
+//                             internal DTX, this will enable VAD.
+//   enableVAD               : set true to enable VAD.
+//   vadMode                 : VAD mode, c.f. audio_coding_module_typedefs.h
+//                             for possible values.
+struct WebRtcACMCodecParams {
+  CodecInst codecInstant;
+  bool enableDTX;
+  bool enableVAD;
+  ACMVADMode vadMode;
+};
+
+// A structure that encapsulates audio buffer and related parameters
+// used for synchronization of audio of two ACMs.
+//
+//   inAudio                 : same as ACMGenericCodec::_inAudio
+//   inAudioIxRead           : same as ACMGenericCodec::_inAudioIxRead
+//   inAudioIxWrite          : same as ACMGenericCodec::_inAudioIxWrite
+//   inTimestamp             : same as ACMGenericCodec::_inTimestamp
+//   inTimestampIxWrite      : same as ACMGenericCodec::_inTImestampIxWrite
+//   lastTimestamp           : same as ACMGenericCodec::_lastTimestamp
+//   lastInTimestamp         : same as AudioCodingModuleImpl::_lastInTimestamp
+//
+struct WebRtcACMAudioBuff {
+  WebRtc_Word16 inAudio[AUDIO_BUFFER_SIZE_W16];
+  WebRtc_Word16 inAudioIxRead;
+  WebRtc_Word16 inAudioIxWrite;
+  WebRtc_UWord32 inTimestamp[TIMESTAMP_BUFFER_SIZE_W32];
+  WebRtc_Word16 inTimestampIxWrite;
+  WebRtc_UWord32 lastTimestamp;
+  WebRtc_UWord32 lastInTimestamp;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_COMMON_DEFS_H_
diff --git a/src/modules/audio_coding/main/source/acm_dtmf_detection.cc b/src/modules/audio_coding/main/source/acm_dtmf_detection.cc
new file mode 100644
index 0000000..e256186
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_dtmf_detection.cc
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_dtmf_detection.h"
+#include "audio_coding_module_typedefs.h"
+
+namespace webrtc {
+
+ACMDTMFDetection::ACMDTMFDetection() {}
+
+ACMDTMFDetection::~ACMDTMFDetection() {}
+
+WebRtc_Word16 ACMDTMFDetection::Enable(ACMCountries /* cpt */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFDetection::Disable() {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFDetection::Detect(
+    const WebRtc_Word16* /* inAudioBuff */,
+    const WebRtc_UWord16 /* inBuffLenWord16 */,
+    const WebRtc_Word32 /* inFreqHz */,
+    bool& /* toneDetected */,
+    WebRtc_Word16& /* tone  */) {
+  return -1;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_dtmf_detection.h b/src/modules/audio_coding/main/source/acm_dtmf_detection.h
new file mode 100644
index 0000000..fab6f18
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_dtmf_detection.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
+
+#include "acm_resampler.h"
+#include "audio_coding_module_typedefs.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class ACMDTMFDetection {
+ public:
+  ACMDTMFDetection();
+  ~ACMDTMFDetection();
+  WebRtc_Word16 Enable(ACMCountries cpt = ACMDisableCountryDetection);
+  WebRtc_Word16 Disable();
+  WebRtc_Word16 Detect(const WebRtc_Word16* inAudioBuff,
+                       const WebRtc_UWord16 inBuffLenWord16,
+                       const WebRtc_Word32 inFreqHz,
+                       bool& toneDetected,
+                       WebRtc_Word16& tone);
+
+ private:
+  ACMResampler _resampler;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_DETECTION_H_
diff --git a/src/modules/audio_coding/main/source/acm_dtmf_playout.cc b/src/modules/audio_coding/main/source/acm_dtmf_playout.cc
new file mode 100644
index 0000000..de69f10
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_dtmf_playout.cc
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_dtmf_playout.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_AVT
+
+ACMDTMFPlayout::ACMDTMFPlayout(
+    WebRtc_Word16 /* codecID */) {
+  return;
+}
+
+ACMDTMFPlayout::~ACMDTMFPlayout() {
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                         WebRtc_Word16 /* bitStreamLenByte */,
+                                         WebRtc_Word16* /* audio */,
+                                         WebRtc_Word16* /* audioSamples */,
+                                         WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                       const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+  return -1;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+void ACMDTMFPlayout::DestructEncoderSafe() {
+  return;
+}
+
+void ACMDTMFPlayout::DestructDecoderSafe() {
+  return;
+}
+
+#else     //===================== Actual Implementation =======================
+
+ACMDTMFPlayout::ACMDTMFPlayout(WebRtc_Word16 codecID) {
+  _codecID = codecID;
+}
+
+ACMDTMFPlayout::~ACMDTMFPlayout() {
+  return;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                         WebRtc_Word16 /* bitStreamLenByte */,
+                                         WebRtc_Word16* /* audio */,
+                                         WebRtc_Word16* /* audioSamples */,
+                                         WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization,
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization,
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word32 ACMDTMFPlayout::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                       const CodecInst& codecInst) {
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_AVT_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  SET_CODEC_PAR((codecDef), kDecoderAVT, codecInst.pltype, NULL, 8000);
+  SET_AVT_FUNCTIONS((codecDef));
+  return 0;
+}
+
+ACMGenericCodec* ACMDTMFPlayout::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateEncoder() {
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+WebRtc_Word16 ACMDTMFPlayout::InternalCreateDecoder() {
+  // DTMFPlayout has no instance
+  return 0;
+}
+
+void ACMDTMFPlayout::InternalDestructEncoderInst(void* /* ptrInst */) {
+  // DTMFPlayout has no instance
+  return;
+}
+
+void ACMDTMFPlayout::DestructEncoderSafe() {
+  // DTMFPlayout has no instance
+  return;
+}
+
+void ACMDTMFPlayout::DestructDecoderSafe() {
+  // DTMFPlayout has no instance
+  return;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_dtmf_playout.h b/src/modules/audio_coding/main/source/acm_dtmf_playout.h
new file mode 100644
index 0000000..62b1501
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_dtmf_playout.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc {
+
+class ACMDTMFPlayout: public ACMGenericCodec {
+ public:
+  ACMDTMFPlayout(WebRtc_Word16 codecID);
+  ~ACMDTMFPlayout();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_DTMF_PLAYOUT_H_
diff --git a/src/modules/audio_coding/main/source/acm_g722.cc b/src/modules/audio_coding/main/source/acm_g722.cc
new file mode 100644
index 0000000..12397be
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g722.cc
@@ -0,0 +1,353 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g722.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+#include "g722_interface.h"
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722
+
+ACMG722::ACMG722(WebRtc_Word16 /* codecID */)
+    : _ptrEncStr(NULL),
+      _ptrDecStr(NULL),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+ACMG722::~ACMG722() {
+  return;
+}
+
+WebRtc_Word32 ACMG722::Add10MsDataSafe(const WebRtc_UWord32 /* timestamp */,
+                                       const WebRtc_Word16* /* data */,
+                                       const WebRtc_UWord16 /* lengthSmpl */,
+                                       const WebRtc_UWord8 /* audioChannel */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                      WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+void ACMG722::SplitStereoPacket(uint8_t* /*payload*/,
+                                int32_t* /*payload_length*/) {}
+
+#else     //===================== Actual Implementation =======================
+
+// Encoder and decoder memory
+struct ACMG722EncStr {
+  G722EncInst* inst; // instance for left channel in case of stereo
+  G722EncInst* instRight; // instance for right channel in case of stereo
+};
+struct ACMG722DecStr {
+  G722DecInst* inst; // instance for left channel in case of stereo
+  G722DecInst* instRight; // instance for right channel in case of stereo
+};
+
+ACMG722::ACMG722(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL) {
+  // Encoder
+  _ptrEncStr = new ACMG722EncStr;
+  if (_ptrEncStr != NULL) {
+    _ptrEncStr->inst = NULL;
+    _ptrEncStr->instRight = NULL;
+  }
+  // Decoder
+  _ptrDecStr = new ACMG722DecStr;
+  if (_ptrDecStr != NULL) {
+    _ptrDecStr->inst = NULL;
+    _ptrDecStr->instRight = NULL; // Not used
+  }
+  _codecID = codecID;
+  return;
+}
+
+ACMG722::~ACMG722() {
+  // Encoder
+  if (_ptrEncStr != NULL) {
+    if (_ptrEncStr->inst != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->inst);
+      _ptrEncStr->inst = NULL;
+    }
+    if (_ptrEncStr->instRight != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->instRight);
+      _ptrEncStr->instRight = NULL;
+    }
+    delete _ptrEncStr;
+    _ptrEncStr = NULL;
+  }
+  // Decoder
+  if (_ptrDecStr != NULL) {
+    if (_ptrDecStr->inst != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->inst);
+      _ptrDecStr->inst = NULL;
+    }
+    if (_ptrDecStr->instRight != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->instRight);
+      _ptrDecStr->instRight = NULL;
+    }
+    delete _ptrDecStr;
+    _ptrDecStr = NULL;
+  }
+  return;
+}
+
+WebRtc_Word32 ACMG722::Add10MsDataSafe(const WebRtc_UWord32 timestamp,
+                                       const WebRtc_Word16* data,
+                                       const WebRtc_UWord16 lengthSmpl,
+                                       const WebRtc_UWord8 audioChannel) {
+  return ACMGenericCodec::Add10MsDataSafe((timestamp >> 1), data, lengthSmpl,
+                                          audioChannel);
+}
+
+WebRtc_Word16 ACMG722::InternalEncode(WebRtc_UWord8* bitStream,
+                                      WebRtc_Word16* bitStreamLenByte) {
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    WebRtc_Word16 leftChannel[960];
+    WebRtc_Word16 rightChannel[960];
+    WebRtc_UWord8 outLeft[480];
+    WebRtc_UWord8 outRight[480];
+    WebRtc_Word16 lenInBytes;
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+    lenInBytes = WebRtcG722_Encode(_encoderInstPtr, leftChannel, _frameLenSmpl,
+                                   (WebRtc_Word16*) outLeft);
+    lenInBytes += WebRtcG722_Encode(_encoderInstPtrRight, rightChannel,
+                                    _frameLenSmpl, (WebRtc_Word16*) outRight);
+    *bitStreamLenByte = lenInBytes;
+
+    // Interleave the 4 bits per sample from left and right channel
+    for (int i = 0, j = 0; i < lenInBytes; i += 2, j++) {
+      bitStream[i] = (outLeft[j] & 0xF0) + (outRight[j] >> 4);
+      bitStream[i + 1] = ((outLeft[j] & 0x0F) << 4) + (outRight[j] & 0x0F);
+    }
+  } else {
+    *bitStreamLenByte = WebRtcG722_Encode(_encoderInstPtr,
+                                          &_inAudio[_inAudioIxRead],
+                                          _frameLenSmpl,
+                                          (WebRtc_Word16*) bitStream);
+  }
+
+  // increment the read index this tell the caller how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += _frameLenSmpl * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722::InternalInitEncoder(WebRtcACMCodecParams* codecParams) {
+  if (codecParams->codecInstant.channels == 2) {
+    // Create codec struct for right channel
+    if (_ptrEncStr->instRight == NULL) {
+      WebRtcG722_CreateEncoder(&_ptrEncStr->instRight);
+      if (_ptrEncStr->instRight == NULL) {
+        return -1;
+      }
+    }
+    _encoderInstPtrRight = _ptrEncStr->instRight;
+    if (WebRtcG722_EncoderInit(_encoderInstPtrRight) < 0) {
+      return -1;
+    }
+  }
+
+  return WebRtcG722_EncoderInit(_encoderInstPtr);
+}
+
+WebRtc_Word16 ACMG722::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return WebRtcG722_DecoderInit(_decoderInstPtr);
+}
+
+WebRtc_Word32 ACMG722::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // TODO: log error
+    return -1;
+  }
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_FUNCTION."
+  // Then call NetEQ to add the codec to it's
+  // database.
+  if (codecInst.channels == 1) {
+    SET_CODEC_PAR(codecDef, kDecoderG722, codecInst.pltype, _decoderInstPtr,
+                  16000);
+  } else {
+    SET_CODEC_PAR(codecDef, kDecoderG722_2ch, codecInst.pltype,
+                  _decoderInstPtr, 16000);
+  }
+  SET_G722_FUNCTIONS(codecDef);
+  return 0;
+}
+
+ACMGenericCodec* ACMG722::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateEncoder() {
+  if (_ptrEncStr == NULL) {
+    // this structure must be created at the costructor
+    // if it is still NULL then there is a probelm and
+    // we dont continue
+    return -1;
+  }
+  WebRtcG722_CreateEncoder(&_ptrEncStr->inst);
+  if (_ptrEncStr->inst == NULL) {
+    return -1;
+  }
+  _encoderInstPtr = _ptrEncStr->inst;
+  return 0;
+}
+
+void ACMG722::DestructEncoderSafe() {
+  if (_ptrEncStr != NULL) {
+    if (_ptrEncStr->inst != NULL) {
+      WebRtcG722_FreeEncoder(_ptrEncStr->inst);
+      _ptrEncStr->inst = NULL;
+    }
+  }
+  _encoderExist = false;
+  _encoderInitialized = false;
+}
+
+WebRtc_Word16 ACMG722::InternalCreateDecoder() {
+  if (_ptrDecStr == NULL) {
+    // this structure must be created at the costructor
+    // if it is still NULL then there is a probelm and
+    // we dont continue
+    return -1;
+  }
+
+  WebRtcG722_CreateDecoder(&_ptrDecStr->inst);
+  if (_ptrDecStr->inst == NULL) {
+    return -1;
+  }
+  _decoderInstPtr = _ptrDecStr->inst;
+  return 0;
+}
+
+void ACMG722::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_ptrDecStr != NULL) {
+    if (_ptrDecStr->inst != NULL) {
+      WebRtcG722_FreeDecoder(_ptrDecStr->inst);
+      _ptrDecStr->inst = NULL;
+    }
+  }
+}
+
+void ACMG722::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    WebRtcG722_FreeEncoder(static_cast<G722EncInst*>(ptrInst));
+  }
+  return;
+}
+
+// Split the stereo packet and place left and right channel after each other
+// in the payload vector.
+void ACMG722::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
+  uint8_t right_byte;
+
+  // Check for valid inputs.
+  assert(payload != NULL);
+  assert(*payload_length > 0);
+
+  // Regroup the 4 bits/sample so to |l1 l2| |r1 r2| |l3 l4| |r3 r4| ...,
+  // where "lx" is 4 bits representing left sample number x, and "rx" right
+  // sample. Two samples fits in one byte, represented with |...|.
+  for (int i = 0; i < *payload_length; i += 2) {
+    right_byte = ((payload[i] & 0x0F) << 4) + (payload[i + 1] & 0x0F);
+    payload[i] = (payload[i] & 0xF0) + (payload[i + 1] >> 4);
+    payload[i + 1] = right_byte;
+  }
+
+  // Move one byte representing right channel each loop, and place it at the
+  // end of the bytestream vector. After looping the data is reordered to:
+  // |l1 l2| |l3 l4| ... |l(N-1) lN| |r1 r2| |r3 r4| ... |r(N-1) r(N)|,
+  // where N is the total number of samples.
+  for (int i = 0; i < *payload_length / 2; i++) {
+    right_byte = payload[i + 1];
+    memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
+    payload[*payload_length - 1] = right_byte;
+  }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_g722.h b/src/modules/audio_coding/main/source/acm_g722.h
new file mode 100644
index 0000000..d718cdb
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g722.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
+
+#include "acm_generic_codec.h"
+
+typedef struct WebRtcG722EncInst G722EncInst;
+typedef struct WebRtcG722DecInst G722DecInst;
+
+namespace webrtc {
+
+// forward declaration
+struct ACMG722EncStr;
+struct ACMG722DecStr;
+
+class ACMG722: public ACMGenericCodec {
+ public:
+  ACMG722(WebRtc_Word16 codecID);
+  ~ACMG722();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  WebRtc_Word32 Add10MsDataSafe(const WebRtc_UWord32 timestamp,
+                                const WebRtc_Word16* data,
+                                const WebRtc_UWord16 lengthSmpl,
+                                const WebRtc_UWord8 audioChannel);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
+
+  ACMG722EncStr* _ptrEncStr;
+  ACMG722DecStr* _ptrDecStr;
+
+  G722EncInst* _encoderInstPtr;
+  G722EncInst* _encoderInstPtrRight; // Prepared for stereo
+  G722DecInst* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_H_
diff --git a/src/modules/audio_coding/main/source/acm_g7221.cc b/src/modules/audio_coding/main/source/acm_g7221.cc
new file mode 100644
index 0000000..b7a8f58
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7221.cc
@@ -0,0 +1,492 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7221.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G722_1
+// NOTE! G.722.1 is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/g7221/main/interface/g7221_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcG7221_CreateEnc16(G722_1_16_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateEnc24(G722_1_24_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateEnc32(G722_1_32_encinst_t_** encInst);
+// int16_t WebRtcG7221_CreateDec16(G722_1_16_decinst_t_** decInst);
+// int16_t WebRtcG7221_CreateDec24(G722_1_24_decinst_t_** decInst);
+// int16_t WebRtcG7221_CreateDec32(G722_1_32_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221_FreeEnc16(G722_1_16_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeEnc24(G722_1_24_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeEnc32(G722_1_32_encinst_t_** encInst);
+// int16_t WebRtcG7221_FreeDec16(G722_1_16_decinst_t_** decInst);
+// int16_t WebRtcG7221_FreeDec24(G722_1_24_decinst_t_** decInst);
+// int16_t WebRtcG7221_FreeDec32(G722_1_32_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221_EncoderInit16(G722_1_16_encinst_t_* encInst);
+// int16_t WebRtcG7221_EncoderInit24(G722_1_24_encinst_t_* encInst);
+// int16_t WebRtcG7221_EncoderInit32(G722_1_32_encinst_t_* encInst);
+// int16_t WebRtcG7221_DecoderInit16(G722_1_16_decinst_t_* decInst);
+// int16_t WebRtcG7221_DecoderInit24(G722_1_24_decinst_t_* decInst);
+// int16_t WebRtcG7221_DecoderInit32(G722_1_32_decinst_t_* decInst);
+//
+// int16_t WebRtcG7221_Encode16(G722_1_16_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Encode24(G722_1_24_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Encode32(G722_1_32_encinst_t_* encInst,
+//                              int16_t* input,
+//                              int16_t len,
+//                              int16_t* output);
+//
+// int16_t WebRtcG7221_Decode16(G722_1_16_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Decode24(G722_1_24_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+// int16_t WebRtcG7221_Decode32(G722_1_32_decinst_t_* decInst,
+//                              int16_t* bitstream,
+//                              int16_t len,
+//                              int16_t* output);
+//
+// int16_t WebRtcG7221_DecodePlc16(G722_1_16_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+// int16_t WebRtcG7221_DecodePlc24(G722_1_24_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+// int16_t WebRtcG7221_DecodePlc32(G722_1_32_decinst_t_* decInst,
+//                                 int16_t* output,
+//                                 int16_t nrLostFrames);
+#include "g7221_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722_1
+
+ACMG722_1::ACMG722_1(WebRtc_Word16 /* codecID */)
+    : _operationalRate(-1),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL),
+      _encoderInst16Ptr(NULL),
+      _encoderInst16PtrR(NULL),
+      _encoderInst24Ptr(NULL),
+      _encoderInst24PtrR(NULL),
+      _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL),
+      _decoderInst16Ptr(NULL),
+      _decoderInst24Ptr(NULL),
+      _decoderInst32Ptr(NULL) {
+  return;
+}
+
+ACMG722_1::~ACMG722_1() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                        WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitStream  */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef  */,
+                                  const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722_1::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722_1::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722_1::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722_1::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+#else     //===================== Actual Implementation =======================
+ACMG722_1::ACMG722_1(
+    WebRtc_Word16 codecID):
+    _encoderInstPtr(NULL),
+    _encoderInstPtrRight(NULL),
+    _decoderInstPtr(NULL),
+    _encoderInst16Ptr(NULL),
+    _encoderInst16PtrR(NULL),
+    _encoderInst24Ptr(NULL),
+    _encoderInst24PtrR(NULL),
+    _encoderInst32Ptr(NULL),
+    _encoderInst32PtrR(NULL),
+    _decoderInst16Ptr(NULL),
+    _decoderInst24Ptr(NULL),
+    _decoderInst32Ptr(NULL) {
+  _codecID = codecID;
+  if (_codecID == ACMCodecDB::kG722_1_16) {
+    _operationalRate = 16000;
+  } else if (_codecID == ACMCodecDB::kG722_1_24) {
+    _operationalRate = 24000;
+  } else if (_codecID == ACMCodecDB::kG722_1_32) {
+    _operationalRate = 32000;
+  } else {
+    _operationalRate = -1;
+  }
+  return;
+}
+
+ACMG722_1::~ACMG722_1() {
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+
+  switch (_operationalRate) {
+    case 16000: {
+      _encoderInst16Ptr = NULL;
+      _encoderInst16PtrR = NULL;
+      _decoderInst16Ptr = NULL;
+      break;
+    }
+    case 24000: {
+      _encoderInst24Ptr = NULL;
+      _encoderInst24PtrR = NULL;
+      _decoderInst24Ptr = NULL;
+      break;
+    }
+    case 32000: {
+      _encoderInst32Ptr = NULL;
+      _encoderInst32PtrR = NULL;
+      _decoderInst32Ptr = NULL;
+      break;
+    }
+    default: {
+      break;
+    }
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1::InternalEncode(WebRtc_UWord8* bitStream,
+                                        WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 leftChannel[320];
+  WebRtc_Word16 rightChannel[320];
+  WebRtc_Word16 lenInBytes;
+  WebRtc_Word16 outB[160];
+
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+  } else {
+    memcpy(leftChannel, &_inAudio[_inAudioIxRead], 320);
+  }
+
+  switch (_operationalRate) {
+    case 16000: {
+      Inst lenInBytes = WebRtcG7221_Encode16(_encoderInst16Ptr, leftChannel,
+                                             320, &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode16(_encoderInst16PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 24000: {
+      lenInBytes = WebRtcG7221_Encode24(_encoderInst24Ptr, leftChannel, 320,
+                                        &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode24(_encoderInst24PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 32000: {
+      lenInBytes = WebRtcG7221_Encode32(_encoderInst32Ptr, leftChannel, 320,
+                                        &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221_Encode32(_encoderInst32PtrR, rightChannel,
+                                           320, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitEncode: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  memcpy(bitStream, outB, lenInBytes);
+  *bitStreamLenByte = lenInBytes;
+
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += 320 * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722_1::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 ret;
+
+  switch (_operationalRate) {
+    case 16000: {
+      ret = WebRtcG7221_EncoderInit16(_encoderInst16PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit16(_encoderInst16Ptr);
+    }
+    case 24000: {
+      ret = WebRtcG7221_EncoderInit24(_encoderInst24PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit24(_encoderInst24Ptr);
+    }
+    case 32000: {
+      ret = WebRtcG7221_EncoderInit32(_encoderInst32PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221_EncoderInit32(_encoderInst32Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError,Inst webrtc::kTraceAudioCoding,
+                   _uniqueID, "InternalInitEncoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word16 ACMG722_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  switch (_operationalRate) {
+    case 16000: {
+      return WebRtcG7221_DecoderInit16(_decoderInst16Ptr);
+    }
+    case 24000: {
+      return WebRtcG7221_DecoderInit24(_decoderInst24Ptr);
+    }
+    case 32000: {
+      return WebRtcG7221_DecoderInit32(_decoderInst32Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitDecoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word32 ACMG722_1::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                  const CodecInst& codecInst) {
+  if (!_decoderInitialized) {
+    // Todo:
+    // log error
+    return -1;
+  }
+  // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
+  // Get an entry of that array (neteq wrapper will allocate memory)
+  // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
+  // be the index of the entry.
+  // Fill up the given structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+  switch (_operationalRate) {
+    case 16000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_16, codecInst.pltype,
+          _decoderInst16Ptr, 16000);
+      SET_G722_1_16_FUNCTIONS((codecDef));
+      break;
+    }
+    case 24000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_24, codecInst.pltype,
+          _decoderInst24Ptr, 16000);
+      SET_G722_1_24_FUNCTIONS((codecDef));
+      break;
+    }
+    case 32000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1_32, codecInst.pltype,
+          _decoderInst32Ptr, 16000);
+      SET_G722_1_32_FUNCTIONS((codecDef));
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "CodecDef: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+ACMGenericCodec* ACMG722_1::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateEncoder() {
+  if ((_encoderInstPtr == NULL) || (_encoderInstPtrRight == NULL)) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 16000: {
+      WebRtcG7221_CreateEnc16(&_encoderInst16Ptr);
+      WebRtcG7221_CreateEnc16(&_encoderInst16PtrR);
+      break;
+    }
+    case 24000: {
+      WebRtcG7221_CreateEnc24(&_encoderInst24Ptr);
+      WebRtcG7221_CreateEnc24(&_encoderInst24PtrR);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221_CreateEnc32(&_encoderInst32Ptr);
+      WebRtcG7221_CreateEnc32(&_encoderInst32PtrR);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  _encoderInst16Ptr = NULL;
+  _encoderInst24Ptr = NULL;
+  _encoderInst32Ptr = NULL;
+}
+
+WebRtc_Word16 ACMG722_1::InternalCreateDecoder() {
+  if (_decoderInstPtr == NULL) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 16000: {
+      WebRtcG7221_CreateDec16(&_decoderInst16Ptr);
+      break;
+    }
+    case 24000: {
+      WebRtcG7221_CreateDec24(&_decoderInst24Ptr);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221_CreateDec32(&_decoderInst32Ptr);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateDecoder: Wrong rate for G722_1.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+  _decoderInst16Ptr = NULL;
+  _decoderInst24Ptr = NULL;
+  _decoderInst32Ptr = NULL;
+}
+
+void ACMG722_1::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    delete ptrInst;
+  }
+  return;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_g7221.h b/src/modules/audio_coding/main/source/acm_g7221.h
new file mode 100644
index 0000000..af12475
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7221.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G722_1_16_encinst_t_;
+struct G722_1_16_decinst_t_;
+struct G722_1_24_encinst_t_;
+struct G722_1_24_decinst_t_;
+struct G722_1_32_encinst_t_;
+struct G722_1_32_decinst_t_;
+struct G722_1_Inst_t_;
+
+namespace webrtc {
+
+class ACMG722_1: public ACMGenericCodec {
+ public:
+  ACMG722_1(WebRtc_Word16 codecID);
+  ~ACMG722_1();
+  // for FEC
+  ACMGenericCodec* CreateInstance(void);
+
+  WebRtc_Word16 InternalEncode(WebRtc_UWord8* bitstream,
+                               WebRtc_Word16* bitStreamLenByte);
+
+  WebRtc_Word16 InternalInitEncoder(WebRtcACMCodecParams *codecParams);
+
+  WebRtc_Word16 InternalInitDecoder(WebRtcACMCodecParams *codecParams);
+
+ protected:
+  WebRtc_Word16 DecodeSafe(WebRtc_UWord8* bitStream,
+                           WebRtc_Word16 bitStreamLenByte,
+                           WebRtc_Word16* audio, WebRtc_Word16* audioSamples,
+                           WebRtc_Word8* speechType);
+
+  WebRtc_Word32 CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                         const CodecInst& codecInst);
+
+  void DestructEncoderSafe();
+
+  void DestructDecoderSafe();
+
+  WebRtc_Word16 InternalCreateEncoder();
+
+  WebRtc_Word16 InternalCreateDecoder();
+
+  void InternalDestructEncoderInst(void* ptrInst);
+
+  WebRtc_Word32 _operationalRate;
+
+  G722_1_Inst_t_* _encoderInstPtr;
+  G722_1_Inst_t_* _encoderInstPtrRight; //Used in stereo mode
+  G722_1_Inst_t_* _decoderInstPtr;
+
+  // Only one set of these pointer is valid at any instance
+  G722_1_16_encinst_t_* _encoderInst16Ptr;
+  G722_1_16_encinst_t_* _encoderInst16PtrR;
+  G722_1_24_encinst_t_* _encoderInst24Ptr;
+  G722_1_24_encinst_t_* _encoderInst24PtrR;
+  G722_1_32_encinst_t_* _encoderInst32Ptr;
+  G722_1_32_encinst_t_* _encoderInst32PtrR;
+
+  // Only one of these pointer is valid at any instance
+  G722_1_16_decinst_t_* _decoderInst16Ptr;
+  G722_1_24_decinst_t_* _decoderInst24Ptr;
+  G722_1_32_decinst_t_* _decoderInst32Ptr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1_H_
diff --git a/src/modules/audio_coding/main/source/acm_g7221c.cc b/src/modules/audio_coding/main/source/acm_g7221c.cc
new file mode 100644
index 0000000..6d055d6
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7221c.cc
@@ -0,0 +1,494 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7221c.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+#include "trace.h"
+
+#ifdef WEBRTC_CODEC_G722_1C
+// NOTE! G.722.1C is not included in the open-source package. The following
+// interface file is needed:
+//
+// /modules/audio_coding/codecs/g7221c/main/interface/g7221c_interface.h
+//
+// The API in the header file should match the one below.
+//
+// int16_t WebRtcG7221C_CreateEnc24(G722_1C_24_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateEnc32(G722_1C_32_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateEnc48(G722_1C_48_encinst_t_** encInst);
+// int16_t WebRtcG7221C_CreateDec24(G722_1C_24_decinst_t_** decInst);
+// int16_t WebRtcG7221C_CreateDec32(G722_1C_32_decinst_t_** decInst);
+// int16_t WebRtcG7221C_CreateDec48(G722_1C_48_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221C_FreeEnc24(G722_1C_24_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeEnc32(G722_1C_32_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeEnc48(G722_1C_48_encinst_t_** encInst);
+// int16_t WebRtcG7221C_FreeDec24(G722_1C_24_decinst_t_** decInst);
+// int16_t WebRtcG7221C_FreeDec32(G722_1C_32_decinst_t_** decInst);
+// int16_t WebRtcG7221C_FreeDec48(G722_1C_48_decinst_t_** decInst);
+//
+// int16_t WebRtcG7221C_EncoderInit24(G722_1C_24_encinst_t_* encInst);
+// int16_t WebRtcG7221C_EncoderInit32(G722_1C_32_encinst_t_* encInst);
+// int16_t WebRtcG7221C_EncoderInit48(G722_1C_48_encinst_t_* encInst);
+// int16_t WebRtcG7221C_DecoderInit24(G722_1C_24_decinst_t_* decInst);
+// int16_t WebRtcG7221C_DecoderInit32(G722_1C_32_decinst_t_* decInst);
+// int16_t WebRtcG7221C_DecoderInit48(G722_1C_48_decinst_t_* decInst);
+//
+// int16_t WebRtcG7221C_Encode24(G722_1C_24_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Encode32(G722_1C_32_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Encode48(G722_1C_48_encinst_t_* encInst,
+//                               int16_t* input,
+//                               int16_t len,
+//                               int16_t* output);
+//
+// int16_t WebRtcG7221C_Decode24(G722_1C_24_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Decode32(G722_1C_32_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+// int16_t WebRtcG7221C_Decode48(G722_1C_48_decinst_t_* decInst,
+//                               int16_t* bitstream,
+//                               int16_t len,
+//                               int16_t* output);
+//
+// int16_t WebRtcG7221C_DecodePlc24(G722_1C_24_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+// int16_t WebRtcG7221C_DecodePlc32(G722_1C_32_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+// int16_t WebRtcG7221C_DecodePlc48(G722_1C_48_decinst_t_* decInst,
+//                                  int16_t* output,
+//                                  int16_t nrLostFrames);
+#include "g7221c_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G722_1C
+
+ACMG722_1C::ACMG722_1C(WebRtc_Word16 /* codecID */)
+    : _operationalRate(-1),
+      _encoderInstPtr(NULL),
+      _encoderInstPtrRight(NULL),
+      _decoderInstPtr(NULL),
+      _encoderInst24Ptr(NULL),
+      _encoderInst24PtrR(NULL),
+      _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL),
+      _encoderInst48Ptr(NULL),
+      _encoderInst48PtrR(NULL),
+      _decoderInst24Ptr(NULL),
+      _decoderInst32Ptr(NULL),
+      _decoderInst48Ptr(NULL) {
+  return;
+}
+
+ACMG722_1C::~ACMG722_1C() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalEncode(
+    WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+    WebRtc_Word16 /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio */,
+    WebRtc_Word16* /* audioSamples */,
+    WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+    const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMG722_1C::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+  return -1;
+}
+
+void ACMG722_1C::DestructEncoderSafe() {
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMG722_1C::DestructDecoderSafe() {
+  return;
+}
+
+void ACMG722_1C::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+#else     //===================== Actual Implementation =======================
+ACMG722_1C::ACMG722_1C(WebRtc_Word16 codecID) :
+  _encoderInstPtr(NULL), _encoderInstPtrRight(NULL), _decoderInstPtr(NULL),
+      _encoderInst24Ptr(NULL), _encoderInst24PtrR(NULL), _encoderInst32Ptr(NULL),
+      _encoderInst32PtrR(NULL), _encoderInst48Ptr(NULL), _encoderInst48PtrR(NULL),
+      _decoderInst24Ptr(NULL), _decoderInst32Ptr(NULL), _decoderInst48Ptr(NULL) {
+  _codecID = codecID;
+  if (_codecID == ACMCodecDB::kG722_1C_24) {
+    _operationalRate = 24000;
+  } else if (_codecID == ACMCodecDB::kG722_1C_32) {
+    _operationalRate = 32000;
+  } else if (_codecID == ACMCodecDB::kG722_1C_48) {
+    _operationalRate = 48000;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "Wrong codec id for G722_1c.");
+    _operationalRate = -1;
+  }
+  return;
+}
+
+ACMG722_1C::~ACMG722_1C() {
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+
+  switch (_operationalRate) {
+    case 24000: {
+      _encoderInst24Ptr = NULL;
+      _encoderInst24PtrR = NULL;
+      _decoderInst24Ptr = NULL;
+      break;
+    }
+    case 32000: {
+      _encoderInst32Ptr = NULL;
+      _encoderInst32PtrR = NULL;
+      _decoderInst32Ptr = NULL;
+      break;
+    }
+    case 48000: {
+      _encoderInst48Ptr = NULL;
+      _encoderInst48PtrR = NULL;
+      _decoderInst48Ptr = NULL;
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "Wrong rate for G722_1c.");
+      break;
+    }
+  }
+  return;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalEncode(WebRtc_UWord8* bitStream,
+                                         WebRtc_Word16* bitStreamLenByte) {
+  WebRtc_Word16 leftChannel[640];
+  WebRtc_Word16 rightChannel[640];
+  WebRtc_Word16 lenInBytes;
+  WebRtc_Word16 outB[240];
+
+  // If stereo, split input signal in left and right channel before encoding
+  if (_noChannels == 2) {
+    for (int i = 0, j = 0; i < _frameLenSmpl * 2; i += 2, j++) {
+      leftChannel[j] = _inAudio[_inAudioIxRead + i];
+      rightChannel[j] = _inAudio[_inAudioIxRead + i + 1];
+    }
+  } else {
+    memcpy(leftChannel, &_inAudio[_inAudioIxRead], 640);
+  }
+
+  switch (_operationalRate) {
+    case 24000: {
+      lenInBytes = WebRtcG7221C_Encode24(_encoderInst24Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode24(_encoderInst24PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 32000: {
+      lenInBytes = WebRtcG7221C_Encode32(_encoderInst32Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode32(_encoderInst32PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    case 48000: {
+      lenInBytes = WebRtcG7221C_Encode48(_encoderInst48Ptr, leftChannel, 640,
+                                         &outB[0]);
+      if (_noChannels == 2) {
+        lenInBytes += WebRtcG7221C_Encode48(_encoderInst48PtrR, rightChannel,
+                                            640, &outB[lenInBytes / 2]);
+      }
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalEncode: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+
+  memcpy(bitStream, outB, lenInBytes);
+  *bitStreamLenByte = lenInBytes;
+
+  // increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer
+  _inAudioIxRead += 640 * _noChannels;
+
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMG722_1C::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                     WebRtc_Word16 /* bitStreamLenByte */,
+                                     WebRtc_Word16* /* audio */,
+                                     WebRtc_Word16* /* audioSamples */,
+                                     WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams) {
+  WebRtc_Word16 ret;
+
+  switch (_operationalRate) {
+    case 24000: {
+      ret = WebRtcG7221C_EncoderInit24(_encoderInst24PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit24(_encoderInst24Ptr);
+    }
+    case 32000: {
+      ret = WebRtcG7221C_EncoderInit32(_encoderInst32PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit32(_encoderInst32Ptr);
+    }
+    case 48000: {
+      ret = WebRtcG7221C_EncoderInit48(_encoderInst48PtrR);
+      if (ret < 0) {
+        return ret;
+      }
+      return WebRtcG7221C_EncoderInit48(_encoderInst48Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitEncode: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word16 ACMG722_1C::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  switch (_operationalRate) {
+    case 24000: {
+      return WebRtcG7221C_DecoderInit24(_decoderInst24Ptr);
+    }
+    case 32000: {
+      return WebRtcG7221C_DecoderInit32(_decoderInst32Ptr);
+    }
+    case 48000: {
+      return WebRtcG7221C_DecoderInit48(_decoderInst48Ptr);
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalInitDecoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+}
+
+WebRtc_Word32 ACMG722_1C::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                   const CodecInst& codecInst) {
+
+  if (!_decoderInitialized) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "CodeDef: decoder not initialized for G722_1c");
+    return -1;
+  }
+  // NetEq has an array of pointers to WebRtcNetEQ_CodecDef.
+  // get an entry of that array (neteq wrapper will allocate memory)
+  // by calling "netEq->CodecDef", where "NETEQ_CODEC_G722_1_XX" would
+  // be the index of the entry.
+  // Fill up the given structure by calling
+  // "SET_CODEC_PAR" & "SET_G722_1_XX_FUNCTION."
+  // Then return the structure back to NetEQ to add the codec to it's
+  // database.
+  switch (_operationalRate) {
+    case 24000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_24, codecInst.pltype,
+          _decoderInst24Ptr, 32000);
+      SET_G722_1C_24_FUNCTIONS((codecDef));
+      break;
+    }
+    case 32000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_32, codecInst.pltype,
+          _decoderInst32Ptr, 32000);
+      SET_G722_1C_32_FUNCTIONS((codecDef));
+      break;
+    }
+    case 48000: {
+      SET_CODEC_PAR((codecDef), kDecoderG722_1C_32, codecInst.pltype,
+          _decoderInst48Ptr, 32000);
+      SET_G722_1C_48_FUNCTIONS((codecDef));
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "CodeDef: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+ACMGenericCodec*
+ACMG722_1C::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateEncoder() {
+  if ((_encoderInstPtr == NULL) || (_encoderInstPtrRight == NULL)) {
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 24000: {
+      WebRtcG7221C_CreateEnc24(&_encoderInst24Ptr);
+      WebRtcG7221C_CreateEnc24(&_encoderInst24PtrR);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221C_CreateEnc32(&_encoderInst32Ptr);
+      WebRtcG7221C_CreateEnc32(&_encoderInst32PtrR);
+      break;
+    }
+    case 48000: {
+      WebRtcG7221C_CreateEnc48(&_encoderInst48Ptr);
+      WebRtcG7221C_CreateEnc48(&_encoderInst48PtrR);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1C::DestructEncoderSafe() {
+  _encoderExist = false;
+  _encoderInitialized = false;
+  if (_encoderInstPtr != NULL) {
+    delete _encoderInstPtr;
+    _encoderInstPtr = NULL;
+  }
+  if (_encoderInstPtrRight != NULL) {
+    delete _encoderInstPtrRight;
+    _encoderInstPtrRight = NULL;
+  }
+  _encoderInst24Ptr = NULL;
+  _encoderInst32Ptr = NULL;
+  _encoderInst48Ptr = NULL;
+}
+
+WebRtc_Word16 ACMG722_1C::InternalCreateDecoder() {
+  if (_decoderInstPtr == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                 "InternalCreateEncoder: cannot create decoder");
+    return -1;
+  }
+  switch (_operationalRate) {
+    case 24000: {
+      WebRtcG7221C_CreateDec24(&_decoderInst24Ptr);
+      break;
+    }
+    case 32000: {
+      WebRtcG7221C_CreateDec32(&_decoderInst32Ptr);
+      break;
+    }
+    case 48000: {
+      WebRtcG7221C_CreateDec48(&_decoderInst48Ptr);
+      break;
+    }
+    default: {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                   "InternalCreateEncoder: Wrong rate for G722_1c.");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+void ACMG722_1C::DestructDecoderSafe() {
+  _decoderExist = false;
+  _decoderInitialized = false;
+  if (_decoderInstPtr != NULL) {
+    delete _decoderInstPtr;
+    _decoderInstPtr = NULL;
+  }
+  _decoderInst24Ptr = NULL;
+  _decoderInst32Ptr = NULL;
+  _decoderInst48Ptr = NULL;
+}
+
+void ACMG722_1C::InternalDestructEncoderInst(void* ptrInst) {
+  if (ptrInst != NULL) {
+    delete ptrInst;
+  }
+  return;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_g7221c.h b/src/modules/audio_coding/main/source/acm_g7221c.h
new file mode 100644
index 0000000..ef573dc
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7221c.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G722_1C_24_encinst_t_;
+struct G722_1C_24_decinst_t_;
+struct G722_1C_32_encinst_t_;
+struct G722_1C_32_decinst_t_;
+struct G722_1C_48_encinst_t_;
+struct G722_1C_48_decinst_t_;
+struct G722_1_Inst_t_;
+
+namespace webrtc {
+
+class ACMG722_1C : public ACMGenericCodec
+{
+public:
+    ACMG722_1C(WebRtc_Word16 codecID);
+    ~ACMG722_1C();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word32    _operationalRate;
+
+    G722_1_Inst_t_*  _encoderInstPtr;
+    G722_1_Inst_t_*  _encoderInstPtrRight; //Used in stereo mode
+    G722_1_Inst_t_*  _decoderInstPtr;
+
+    // Only one set of these pointer is valid at any instance
+    G722_1C_24_encinst_t_* _encoderInst24Ptr;
+    G722_1C_24_encinst_t_* _encoderInst24PtrR;
+    G722_1C_32_encinst_t_* _encoderInst32Ptr;
+    G722_1C_32_encinst_t_* _encoderInst32PtrR;
+    G722_1C_48_encinst_t_* _encoderInst48Ptr;
+    G722_1C_48_encinst_t_* _encoderInst48PtrR;
+
+    // Only one of these pointer is valid at any instance
+    G722_1C_24_decinst_t_* _decoderInst24Ptr;
+    G722_1C_32_decinst_t_* _decoderInst32Ptr;
+    G722_1C_48_decinst_t_* _decoderInst48Ptr;
+};
+
+} // namespace webrtc;
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G722_1C_H_
diff --git a/src/modules/audio_coding/main/source/acm_g729.cc b/src/modules/audio_coding/main/source/acm_g729.cc
new file mode 100644
index 0000000..d668ae7
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g729.cc
@@ -0,0 +1,515 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g729.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G729
+    // NOTE! G.729 is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/g729/main/interface/g729_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcG729_CreateEnc(G729_encinst_t_** inst);
+    // int16_t WebRtcG729_CreateDec(G729_decinst_t_** inst);
+    // int16_t WebRtcG729_FreeEnc(G729_encinst_t_* inst);
+    // int16_t WebRtcG729_FreeDec(G729_decinst_t_* inst);
+    // int16_t WebRtcG729_Encode(G729_encinst_t_* encInst, int16_t* input,
+    //                                       int16_t len, int16_t* output);
+    // int16_t WebRtcG729_EncoderInit(G729_encinst_t_* encInst, int16_t mode);
+    // int16_t WebRtcG729_Decode(G729_decinst_t_* decInst);
+    // int16_t WebRtcG729_DecodeBwe(G729_decinst_t_* decInst, int16_t* input);
+    // int16_t WebRtcG729_DecodePlc(G729_decinst_t_* decInst);
+    // int16_t WebRtcG729_DecoderInit(G729_decinst_t_* decInst);
+    #include "g729_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G729
+
+ACMG729::ACMG729(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMG729::~ACMG729()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::EnableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::DisableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMG729::ReplaceInternalDTXSafe(
+    const bool /*replaceInternalDTX*/)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMG729::IsInternalDTXReplacedSafe(
+    bool* /* internalDTXReplaced */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMG729::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMG729::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMG729::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+#else     //===================== Actual Implementation =======================
+
+ACMG729::ACMG729(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    _hasInternalDTX = true;
+    return;
+}
+
+
+ACMG729::~ACMG729()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        // Delete encoder memory
+        WebRtcG729_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        // Delete decoder memory
+        WebRtcG729_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    // Initialize before entering the loop
+    WebRtc_Word16 noEncodedSamples = 0;
+    WebRtc_Word16 tmpLenByte = 0;
+    WebRtc_Word16 vadDecision = 0;
+    *bitStreamLenByte = 0;
+    while(noEncodedSamples < _frameLenSmpl)
+    {
+        // Call G.729 encoder with pointer to encoder memory, input
+        // audio, number of samples and bitsream
+        tmpLenByte = WebRtcG729_Encode(_encoderInstPtr,
+            &_inAudio[_inAudioIxRead], 80,
+            (WebRtc_Word16*)(&(bitStream[*bitStreamLenByte])));
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += 80;
+
+        // sanity check
+        if(tmpLenByte < 0)
+        {
+            // error has happened
+            *bitStreamLenByte = 0;
+            return -1;
+        }
+
+        // increment number of written bytes
+        *bitStreamLenByte += tmpLenByte;
+        switch(tmpLenByte)
+        {
+        case 0:
+            {
+                if(0 == noEncodedSamples)
+                {
+                    // this is the first 10 ms in this packet and there is
+                    // no data generated, perhaps DTX is enabled and the
+                    // codec is not generating any bit-stream for this 10 ms.
+                    // we do not continue encoding this frame.
+                    return 0;
+                }
+                break;
+            }
+        case 2:
+            {
+                // check if G.729 internal DTX is enabled
+                if(_hasInternalDTX && _dtxEnabled)
+                {
+                    vadDecision = 0;
+                    for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+                    {
+                        _vadLabel[n] = vadDecision;
+                    }
+                }
+                // we got a SID and have to send out this packet no matter
+                // how much audio we have encoded
+                return *bitStreamLenByte;
+            }
+        case 10:
+            {
+                vadDecision = 1;
+                // this is a valid length just continue encoding
+                break;
+            }
+        default:
+            {
+                return -1;
+            }
+        }
+
+        // update number of encoded samples
+        noEncodedSamples += 80;
+    }
+
+    // update VAD decision vector
+    if(_hasInternalDTX && !vadDecision && _dtxEnabled)
+    {
+        for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+        {
+            _vadLabel[n] = vadDecision;
+        }
+    }
+
+    // done encoding, return number of encoded bytes
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMG729::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        // DTX already enabled, do nothing
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        // Re-init the G.729 encoder to turn on DTX
+        if(WebRtcG729_EncoderInit(_encoderInstPtr, 1) < 0)
+        {
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        // DTX already dissabled, do nothing
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        // Re-init the G.729 decoder to turn off DTX
+        if(WebRtcG729_EncoderInit(_encoderInstPtr, 0) < 0)
+        {
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMG729::ReplaceInternalDTXSafe(
+    const bool replaceInternalDTX)
+{
+    // This function is used to dissable the G.729 built in DTX and use an
+    // external instead.
+
+    if(replaceInternalDTX == _hasInternalDTX)
+    {
+        // Make sure we keep the DTX/VAD setting if possible
+        bool oldEnableDTX = _dtxEnabled;
+        bool oldEnableVAD = _vadEnabled;
+        ACMVADMode oldMode = _vadMode;
+        if (replaceInternalDTX)
+        {
+            // Disable internal DTX before enabling external DTX
+            DisableDTX();
+        }
+        else
+        {
+            // Disable external DTX before enabling internal
+            ACMGenericCodec::DisableDTX();
+        }
+        _hasInternalDTX = !replaceInternalDTX;
+        WebRtc_Word16 status = SetVADSafe(oldEnableDTX, oldEnableVAD, oldMode);
+        // Check if VAD status has changed from inactive to active, or if error was
+        // reported
+        if (status == 1) {
+            _vadEnabled = true;
+            return status;
+        } else if (status < 0) {
+            _hasInternalDTX = replaceInternalDTX;
+            return -1;
+        }
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMG729::IsInternalDTXReplacedSafe(
+    bool* internalDTXReplaced)
+{
+    // Get status of wether DTX is replaced or not
+    *internalDTXReplaced = !_hasInternalDTX;
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    // This function is not used. G.729 decoder is called from inside NetEQ
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // Init G.729 encoder
+    return WebRtcG729_EncoderInit(_encoderInstPtr,
+        ((codecParams->enableDTX)? 1:0));
+}
+
+
+WebRtc_Word16
+ACMG729::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // Init G.729 decoder
+    return WebRtcG729_DecoderInit(_decoderInstPtr);
+}
+
+
+WebRtc_Word32
+ACMG729::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderG729, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_G729_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMG729::CreateInstance(void)
+{
+    // Function not used
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateEncoder()
+{
+    // Create encoder memory
+    return WebRtcG729_CreateEnc(&_encoderInstPtr);
+}
+
+
+void
+ACMG729::DestructEncoderSafe()
+{
+    // Free encoder memory
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG729_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729::InternalCreateDecoder()
+{
+    // Create decoder memory
+    return WebRtcG729_CreateDec(&_decoderInstPtr);
+}
+
+
+void
+ACMG729::DestructDecoderSafe()
+{
+    // Free decoder memory
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG729_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMG729::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcG729_FreeEnc((G729_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_g729.h b/src/modules/audio_coding/main/source/acm_g729.h
new file mode 100644
index 0000000..474dabd
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g729.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G729_encinst_t_;
+struct G729_decinst_t_;
+
+namespace webrtc {
+
+class ACMG729 : public ACMGenericCodec
+{
+public:
+    ACMG729(WebRtc_Word16 codecID);
+    ~ACMG729();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+    WebRtc_Word32 ReplaceInternalDTXSafe(
+        const bool replaceInternalDTX);
+
+    WebRtc_Word32 IsInternalDTXReplacedSafe(
+        bool* internalDTXReplaced);
+
+    G729_encinst_t_* _encoderInstPtr;
+    G729_decinst_t_* _decoderInstPtr;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_H_
diff --git a/src/modules/audio_coding/main/source/acm_g7291.cc b/src/modules/audio_coding/main/source/acm_g7291.cc
new file mode 100644
index 0000000..cc8783f
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7291.cc
@@ -0,0 +1,471 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_g7291.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_G729_1
+    // NOTE! G.729.1 is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/g7291/main/interface/g7291_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcG7291_Create(G729_1_inst_t_** inst);
+    // int16_t WebRtcG7291_Free(G729_1_inst_t_* inst);
+    // int16_t WebRtcG7291_Encode(G729_1_inst_t_* encInst, int16_t* input,
+    //                            int16_t* output, int16_t myRate,
+    //                            int16_t nrFrames);
+    // int16_t WebRtcG7291_EncoderInit(G729_1_inst_t_* encInst, int16_t myRate,
+    //                                 int16_t flag8kHz, int16_t flagG729mode);
+    // int16_t WebRtcG7291_Decode(G729_1_inst_t_* decInst);
+    // int16_t WebRtcG7291_DecodeBwe(G729_1_inst_t_* decInst, int16_t* input);
+    // int16_t WebRtcG7291_DecodePlc(G729_1_inst_t_* decInst);
+    // int16_t WebRtcG7291_DecoderInit(G729_1_inst_t_* decInst);
+    #include "g7291_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_G729_1
+
+ACMG729_1::ACMG729_1( WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _myRate(32000),
+      _flag8kHz(0),
+      _flagG729mode(0) {
+  return;
+}
+
+
+ACMG729_1::~ACMG729_1()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMG729_1::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMG729_1::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729_1::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMG729_1::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMG729_1::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMG729_1::SetBitRateSafe(
+    const WebRtc_Word32 /*rate*/ )
+{
+  return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+struct G729_1_inst_t_;
+
+ACMG729_1::ACMG729_1(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _myRate(32000),  // Default rate.
+      _flag8kHz(0),
+      _flagG729mode(0) {
+  // TODO(tlegrand): We should add codecID as a input variable to the
+  // constructor of ACMGenericCodec.
+  _codecID = codecID;
+  return;
+}
+
+ACMG729_1::~ACMG729_1()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+
+    // Initialize before entering the loop 
+    WebRtc_Word16 noEncodedSamples = 0;
+    *bitStreamLenByte = 0;
+
+  WebRtc_Word16 byteLengthFrame = 0;
+
+    // Derive number of 20ms frames per encoded packet.
+  // [1,2,3] <=> [20,40,60]ms <=> [320,640,960] samples
+    WebRtc_Word16 n20msFrames = (_frameLenSmpl / 320);
+    // Byte length for the frame. +1 is for rate information.
+    byteLengthFrame = _myRate/(8*50) * n20msFrames + (1 - _flagG729mode);
+
+    // The following might be revised if we have G729.1 Annex C (support for DTX);
+    do
+    {
+        *bitStreamLenByte = WebRtcG7291_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+       (WebRtc_Word16*)bitStream, _myRate, n20msFrames);
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+    _inAudioIxRead += 160;
+
+        // sanity check
+        if(*bitStreamLenByte < 0)
+        {
+      // error has happened
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalEncode: Encode error for G729_1");
+            *bitStreamLenByte = 0;
+            return -1;
+        }
+
+    noEncodedSamples += 160;
+    } while(*bitStreamLenByte == 0);
+
+
+    // This criteria will change if we have Annex C.
+    if(*bitStreamLenByte != byteLengthFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: Encode error for G729_1");
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+
+
+    if(noEncodedSamples != _frameLenSmpl)
+    {
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMG729_1::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+  //set the bit rate and initialize
+  _myRate = codecParams->codecInstant.rate;
+    return SetBitRateSafe( (WebRtc_UWord32)_myRate);
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcG7291_DecoderInit(_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalInitDecoder: init decoder failed for G729_1");
+    return -1;
+  }
+  return 0;
+}
+
+
+WebRtc_Word32
+ACMG729_1::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "CodeDef: Decoder uninitialized for G729_1");
+      return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderG729_1, codecInst.pltype,
+        _decoderInstPtr, 16000);
+    SET_G729_1_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMG729_1::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateEncoder()
+{
+    if (WebRtcG7291_Create(&_encoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "InternalCreateEncoder: create encoder failed for G729_1");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMG729_1::DestructEncoderSafe()
+{
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMG729_1::InternalCreateDecoder()
+{
+   if (WebRtcG7291_Create(&_decoderInstPtr) < 0)
+   {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+    "InternalCreateDecoder: create decoder failed for G729_1");
+     return -1;
+   }
+   return 0;
+}
+
+
+void
+ACMG729_1::DestructDecoderSafe()
+{
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcG7291_Free(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMG729_1::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        //WebRtcG7291_Free((G729_1_inst_t*)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMG729_1::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    //allowed rates: { 8000, 12000, 14000, 16000, 18000, 20000,
+    //                22000, 24000, 26000, 28000, 30000, 32000};
+    // TODO(tlegrand): This check exists in one other place two. Should be
+    // possible to reuse code.
+    switch(rate)
+    {
+    case 8000:
+        {
+            _myRate = 8000;
+            break;
+        }
+  case 12000:
+        {
+            _myRate = 12000;
+            break;
+        }
+  case 14000:
+        {
+            _myRate = 14000;
+            break;
+        }
+  case 16000:
+        {
+            _myRate = 16000;
+            break;
+        }
+  case 18000:
+        {
+            _myRate = 18000;
+            break;
+        }
+  case 20000:
+        {
+            _myRate = 20000;
+            break;
+        }
+  case 22000:
+        {
+            _myRate = 22000;
+            break;
+        }
+  case 24000:
+        {
+            _myRate = 24000;
+            break;
+        }
+  case 26000:
+        {
+            _myRate = 26000;
+            break;
+        }
+  case 28000:
+        {
+            _myRate = 28000;
+            break;
+        }
+  case 30000:
+        {
+            _myRate = 30000;
+            break;
+        }
+  case 32000:
+        {
+            _myRate = 32000;
+            break;
+        }
+    default:
+        {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "SetBitRateSafe: Invalid rate G729_1");
+            return -1;
+        }
+    }
+
+    // Re-init with new rate
+    if (WebRtcG7291_EncoderInit(_encoderInstPtr, _myRate, _flag8kHz, _flagG729mode) >= 0)
+    {
+        _encoderParams.codecInstant.rate = _myRate;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_g7291.h b/src/modules/audio_coding/main/source/acm_g7291.h
new file mode 100644
index 0000000..1d03250
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_g7291.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct G729_1_inst_t_;
+struct G729_1_inst_t_;
+
+namespace webrtc {
+
+class ACMG729_1: public ACMGenericCodec
+{
+public:
+    ACMG729_1(WebRtc_Word16 codecID);
+    ~ACMG729_1();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst& codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+  WebRtc_Word16 SetBitRateSafe(
+    const WebRtc_Word32 rate);
+
+    G729_1_inst_t_* _encoderInstPtr;
+    G729_1_inst_t_* _decoderInstPtr;
+
+    WebRtc_UWord16     _myRate;
+  WebRtc_Word16     _flag8kHz;
+    WebRtc_Word16     _flagG729mode;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_G729_1_H_
diff --git a/src/modules/audio_coding/main/source/acm_generic_codec.cc b/src/modules/audio_coding/main/source/acm_generic_codec.cc
new file mode 100644
index 0000000..f9a6a3a
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_generic_codec.cc
@@ -0,0 +1,1534 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <string.h>
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_generic_codec.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_vad.h"
+#include "webrtc_cng.h"
+
+namespace webrtc
+{
+
+// Enum for CNG
+enum
+{
+    kMaxPLCParamsCNG = WEBRTC_CNG_MAX_LPC_ORDER,
+    kNewCNGNumPLCParams = 8
+};
+
+#define ACM_SID_INTERVAL_MSEC 100
+
+// We set some of the variables to invalid values as a check point
+// if a proper initialization has happened. Another approach is
+// to initialize to a default codec that we are sure is always included.
+ACMGenericCodec::ACMGenericCodec()
+    : _inAudioIxWrite(0),
+      _inAudioIxRead(0),
+      _inTimestampIxWrite(0),
+      _inAudio(NULL),
+      _inTimestamp(NULL),
+      _frameLenSmpl(-1),  // invalid value
+      _noChannels(1),
+      _codecID(-1),  // invalid value
+      _noMissedSamples(0),
+      _encoderExist(false),
+      _decoderExist(false),
+      _encoderInitialized(false),
+      _decoderInitialized(false),
+      _registeredInNetEq(false),
+      _hasInternalDTX(false),
+      _ptrVADInst(NULL),
+      _vadEnabled(false),
+      _vadMode(VADNormal),
+      _dtxEnabled(false),
+      _ptrDTXInst(NULL),
+      _numLPCParams(kNewCNGNumPLCParams),
+      _sentCNPrevious(false),
+      _isMaster(true),
+      _netEqDecodeLock(NULL),
+      _codecWrapperLock(*RWLockWrapper::CreateRWLock()),
+      _lastEncodedTimestamp(0),
+      _lastTimestamp(0xD87F3F9F),
+      _isAudioBuffFresh(true),
+      _uniqueID(0) {
+  // Initialize VAD vector.
+  for (int i = 0; i < MAX_FRAME_SIZE_10MSEC; i++) {
+    _vadLabel[i] = 0;
+  }
+
+  // Nullify memory for encoder and decoder, and set payload type to an
+  // invalid value.
+  memset(&_encoderParams, 0, sizeof(WebRtcACMCodecParams));
+  _encoderParams.codecInstant.pltype = -1;
+  memset(&_decoderParams, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams.codecInstant.pltype = -1;
+}
+
+ACMGenericCodec::~ACMGenericCodec()
+{
+    // Check all the members which are pointers and
+    // if they are not NULL delete/free them.
+
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+
+    if (_inAudio != NULL)
+    {
+        delete [] _inAudio;
+        _inAudio = NULL;
+    }
+
+    if (_inTimestamp != NULL)
+    {
+        delete [] _inTimestamp;
+        _inTimestamp = NULL;
+    }
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    delete &_codecWrapperLock;
+}
+
+WebRtc_Word32
+ACMGenericCodec::Add10MsData(
+    const WebRtc_UWord32 timestamp,
+    const WebRtc_Word16* data,
+    const WebRtc_UWord16 lengthSmpl,
+    const WebRtc_UWord8  audioChannel)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return Add10MsDataSafe(timestamp, data, lengthSmpl, audioChannel);
+}
+
+WebRtc_Word32
+ACMGenericCodec::Add10MsDataSafe(
+    const WebRtc_UWord32 timestamp,
+    const WebRtc_Word16* data,
+    const WebRtc_UWord16 lengthSmpl,
+    const WebRtc_UWord8  audioChannel)
+{
+    // The codec expects to get data in correct sampling rate.
+    // get the sampling frequency of the codec
+    WebRtc_UWord16 plFreqHz;
+
+    if(EncoderSampFreq(plFreqHz) < 0)
+    {
+        // _codecID is not correct, perhaps the codec is not initialized yet.
+        return -1;
+    }
+
+    // Sanity check, if the length of the input corresponds to 10 ms.
+    if((plFreqHz / 100) != lengthSmpl)
+    {
+        // This is not 10 ms of audio, given the sampling frequency of the
+        // codec
+        return -1;
+    }
+    if(_lastTimestamp == timestamp)
+    {
+        // Same timestamp as the last time, overwrite.
+        if((_inAudioIxWrite >= lengthSmpl * audioChannel) &&
+           (_inTimestampIxWrite > 0))
+        {
+            _inAudioIxWrite -= lengthSmpl * audioChannel;
+            _inTimestampIxWrite--;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+                "Adding 10ms with previous timestamp, \
+overwriting the previous 10ms");
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+                "Adding 10ms with previous timestamp, this will sound bad");
+        }
+    }
+
+    _lastTimestamp = timestamp;
+
+    if ((_inAudioIxWrite + lengthSmpl*audioChannel) > AUDIO_BUFFER_SIZE_W16)
+    {
+        // Get the number of samples to be overwritten
+        WebRtc_Word16 missedSamples = _inAudioIxWrite + lengthSmpl*audioChannel -
+            AUDIO_BUFFER_SIZE_W16;
+
+        // Move the data (overwite the old data)
+        memmove(_inAudio, _inAudio + missedSamples,
+            (AUDIO_BUFFER_SIZE_W16 - lengthSmpl*audioChannel)*sizeof(WebRtc_Word16));
+        // Copy the new data
+        memcpy(_inAudio + (AUDIO_BUFFER_SIZE_W16 - lengthSmpl*audioChannel), data,
+            lengthSmpl*audioChannel * sizeof(WebRtc_Word16));
+
+        // Get the number of 10 ms blocks which are overwritten
+        WebRtc_Word16 missed10MsecBlocks =
+            (WebRtc_Word16)((missedSamples/audioChannel * 100) / plFreqHz);
+
+        // Move the timestamps
+        memmove(_inTimestamp, _inTimestamp + missed10MsecBlocks,
+            (_inTimestampIxWrite - missed10MsecBlocks) * sizeof(WebRtc_UWord32));
+        _inTimestampIxWrite -= missed10MsecBlocks;
+        _inTimestamp[_inTimestampIxWrite] = timestamp;
+        _inTimestampIxWrite++;
+
+        // Buffer is full
+        _inAudioIxWrite = AUDIO_BUFFER_SIZE_W16;
+        IncreaseNoMissedSamples(missedSamples);
+        _isAudioBuffFresh = false;
+        return -missedSamples;
+    }
+    memcpy(_inAudio + _inAudioIxWrite, data, lengthSmpl*audioChannel * sizeof(WebRtc_Word16));
+    _inAudioIxWrite += lengthSmpl*audioChannel;
+
+    assert(_inTimestampIxWrite < TIMESTAMP_BUFFER_SIZE_W32);
+    assert(_inTimestampIxWrite >= 0);
+
+    _inTimestamp[_inTimestampIxWrite] = timestamp;
+    _inTimestampIxWrite++;
+    _isAudioBuffFresh = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::Encode(
+    WebRtc_UWord8*         bitStream,
+    WebRtc_Word16*         bitStreamLenByte,
+    WebRtc_UWord32*        timeStamp,
+    WebRtcACMEncodingType* encodingType)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return EncodeSafe(bitStream, bitStreamLenByte,
+        timeStamp, encodingType);
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::EncodeSafe(
+    WebRtc_UWord8*         bitStream,
+    WebRtc_Word16*         bitStreamLenByte,
+    WebRtc_UWord32*        timeStamp,
+    WebRtcACMEncodingType* encodingType)
+{
+    // Do we have enough data to encode?
+    // we wait until we have a full frame to encode.
+    if(_inAudioIxWrite < _frameLenSmpl*_noChannels)
+    {
+        // There is not enough audio
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        // Doesn't really matter what this parameter set to
+        *encodingType = kNoEncoding;
+        return 0;
+    }
+
+    // Not all codecs accept the whole frame to be pushed into
+    // encoder at once.
+    const WebRtc_Word16 myBasicCodingBlockSmpl =
+        ACMCodecDB::BasicCodingBlock(_codecID);
+    if((myBasicCodingBlockSmpl < 0) ||
+        (!_encoderInitialized) ||
+        (!_encoderExist))
+    {
+        // This should not happen
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        *encodingType = kNoEncoding;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EncodeSafe: error, basic coding sample block is negative");
+        return -1;
+    }
+
+    // This makes the internal encoder read from the begining of the buffer
+    _inAudioIxRead = 0;
+    *timeStamp = _inTimestamp[0];
+
+    // Process the audio through VAD the function doesn't set _vadLabels.
+    // If VAD is disabled all labels are set to ONE (active)
+    WebRtc_Word16 status = 0;
+    WebRtc_Word16 dtxProcessedSamples = 0;
+
+    status = ProcessFrameVADDTX(bitStream, bitStreamLenByte,
+        &dtxProcessedSamples);
+
+    if(status < 0)
+    {
+        *timeStamp = 0;
+        *bitStreamLenByte = 0;
+        *encodingType = kNoEncoding;
+    }
+    else
+    {
+        if(dtxProcessedSamples > 0)
+        {
+            // Dtx have processed some samples may or may not a bit-stream
+            // is generated we should not do any encoding (normally there
+            // will be not enough data)
+
+            // Setting the following makes that the move of audio data
+            // and timestamps happen correctly
+            _inAudioIxRead = dtxProcessedSamples;
+            // This will let the owner of ACMGenericCodec to know that the
+            // generated bit-stream is DTX to use correct payload type
+            WebRtc_UWord16 sampFreqHz;
+            EncoderSampFreq(sampFreqHz);
+            if (sampFreqHz == 8000) {
+                *encodingType = kPassiveDTXNB;
+            } else if (sampFreqHz == 16000) {
+                *encodingType = kPassiveDTXWB;
+            } else if (sampFreqHz == 32000) {
+                *encodingType = kPassiveDTXSWB;
+            } else {
+                status = -1;
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "EncodeSafe: Wrong sampling frequency for DTX.");
+            }
+
+            // Transport empty frame if we have an empty bitstream
+            if ((*bitStreamLenByte == 0)
+                && (_sentCNPrevious || ((_inAudioIxWrite - _inAudioIxRead) <= 0))
+                )
+            {
+                // Makes sure we transmit an empty frame
+                *bitStreamLenByte = 1;
+                *encodingType = kNoEncoding;
+            }
+            _sentCNPrevious = true;
+        }
+        else
+        {
+            _sentCNPrevious = false;
+            // This will let the caller of the method to know if the frame is
+            // Active or non-Active The caller of the method knows that the
+            // stream is encoded by codec and can use the info for callbacks,
+            // if any registered.
+            if(myBasicCodingBlockSmpl == 0)
+            {
+                // This codec can handle all allowed frame sizes as basic
+                // coding block
+                status = InternalEncode(bitStream, bitStreamLenByte);
+
+                if(status < 0)
+                {
+                    // TODO:
+                    // Maybe reseting the encoder to be fresh for the next
+                    // frame
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                        "EncodeSafe: error in internalEncode");
+                    *bitStreamLenByte = 0;
+                    *encodingType = kNoEncoding;
+                }
+            }
+            else
+            {
+                // A basic-coding-block for this codec is defined so we loop
+                // over the audio with the steps of the basic-coding-block.
+                // It is not necessary that in each itteration
+                WebRtc_Word16 tmpBitStreamLenByte;
+
+                // Reset the variables which will be increamented in the loop
+                *bitStreamLenByte = 0;
+                bool done = false;
+                while(!done)
+                {
+                    status = InternalEncode(&bitStream[*bitStreamLenByte],
+                        &tmpBitStreamLenByte);
+                    *bitStreamLenByte += tmpBitStreamLenByte;
+
+                    // Guard Against errors and too large payloads
+                    if((status < 0) ||
+                        (*bitStreamLenByte > MAX_PAYLOAD_SIZE_BYTE))
+                    {
+                        // Error has happened if we are in the middle of a full
+                        // frame we have to exit. Before exiting, whatever bits
+                        // are in the buffer are probably corruptred. Anyways
+                        // we ignore them.
+                        *bitStreamLenByte = 0;
+                        *encodingType = kNoEncoding;
+                        // We might have come here because of the second
+                        // condition.
+                        status = -1;
+                         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding,
+                            _uniqueID, "EncodeSafe: error in InternalEncode");
+                        // break from the loop
+                        break;
+                    }
+
+                    // TODO(andrew): This should be multiplied by the number of
+                    //               channels, right?
+                    // http://code.google.com/p/webrtc/issues/detail?id=714
+                    done = _inAudioIxRead >= _frameLenSmpl;
+                }
+            }
+            if(status >= 0)
+            {
+                *encodingType = (_vadLabel[0] == 1)?
+                kActiveNormalEncoded:kPassiveNormalEncoded;
+                // Transport empty frame if we have an empty bitsteram
+                if ((*bitStreamLenByte == 0) && ((_inAudioIxWrite - _inAudioIxRead) <= 0))
+                {
+                    // Makes sure we transmit an empty frame
+                    *bitStreamLenByte = 1;
+                    *encodingType = kNoEncoding;
+                }
+            }
+        }
+    }
+
+    // Move the timestampe buffer according to the number of 10 ms blocks
+    // which are read.
+    WebRtc_UWord16 sampFreqHz;
+    EncoderSampFreq(sampFreqHz);
+
+    WebRtc_Word16 num10MsecBlocks =
+            (WebRtc_Word16)((_inAudioIxRead/_noChannels * 100) / sampFreqHz);
+    if(_inTimestampIxWrite > num10MsecBlocks)
+    {
+        memmove(_inTimestamp, _inTimestamp + num10MsecBlocks,
+            (_inTimestampIxWrite - num10MsecBlocks) * sizeof(WebRtc_Word32));
+    }
+    _inTimestampIxWrite -= num10MsecBlocks;
+
+    // We have to move the audio that is not encoded to the beginning
+    // of the buffer and accordingly adjust the read and write indices.
+    if(_inAudioIxRead < _inAudioIxWrite)
+    {
+        memmove(_inAudio, &_inAudio[_inAudioIxRead],
+            (_inAudioIxWrite - _inAudioIxRead)*sizeof(WebRtc_Word16));
+    }
+
+    _inAudioIxWrite -= _inAudioIxRead;
+
+    _inAudioIxRead = 0;
+    _lastEncodedTimestamp = *timeStamp;
+    return (status < 0) ? (-1):(*bitStreamLenByte);
+}
+
+WebRtc_Word16
+ACMGenericCodec::Decode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16  bitStreamLenByte,
+    WebRtc_Word16* audio,
+    WebRtc_Word16* audioSamples,
+    WebRtc_Word8*  speechType)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return DecodeSafe(bitStream, bitStreamLenByte, audio,
+        audioSamples, speechType);
+}
+
+bool
+ACMGenericCodec::EncoderInitialized()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _encoderInitialized;
+}
+
+bool
+ACMGenericCodec::DecoderInitialized()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _decoderInitialized;
+}
+
+
+WebRtc_Word32
+ACMGenericCodec::RegisterInNetEq(
+    ACMNetEQ*   netEq,
+    const CodecInst& codecInst)
+{
+    WebRtcNetEQ_CodecDef codecDef;
+    WriteLockScoped wl(_codecWrapperLock);
+
+    if(CodecDef(codecDef, codecInst) < 0)
+    {
+        // Failed to register
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "RegisterInNetEq: error, failed to register");
+        _registeredInNetEq = false;
+        return -1;
+    }
+    else
+    {
+        if(netEq->AddCodec(&codecDef, _isMaster) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "RegisterInNetEq: error, failed to add codec");
+            _registeredInNetEq = false;
+            return -1;
+        }
+        // Registered
+        _registeredInNetEq = true;
+        return 0;
+    }
+}
+
+WebRtc_Word16
+ACMGenericCodec::EncoderParams(
+    WebRtcACMCodecParams* encParams)
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return EncoderParamsSafe(encParams);
+}
+
+WebRtc_Word16
+ACMGenericCodec::EncoderParamsSafe(
+    WebRtcACMCodecParams* encParams)
+{
+    // Codec parameters are valid only if the encoder is initialized
+    if(_encoderInitialized)
+    {
+        WebRtc_Word32 currentRate;
+        memcpy(encParams, &_encoderParams, sizeof(WebRtcACMCodecParams));
+        currentRate = encParams->codecInstant.rate;
+        CurrentRate(currentRate);
+        encParams->codecInstant.rate = currentRate;
+        return 0;
+    }
+    else
+    {
+        encParams->codecInstant.plname[0] = '\0';
+        encParams->codecInstant.pltype    = -1;
+        encParams->codecInstant.pacsize   = 0;
+        encParams->codecInstant.rate      = 0;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EncoderParamsSafe: error, encoder not initialized");
+        return -1;
+    }
+}
+
+bool
+ACMGenericCodec::DecoderParams(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return DecoderParamsSafe(decParams, payloadType);
+}
+
+bool
+ACMGenericCodec::DecoderParamsSafe(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    // Decoder parameters are valid only if decoder is initialized
+    if(_decoderInitialized)
+    {
+        if(payloadType == _decoderParams.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams, sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+    }
+
+    decParams->codecInstant.plname[0] = '\0';
+    decParams->codecInstant.pltype    = -1;
+    decParams->codecInstant.pacsize   = 0;
+    decParams->codecInstant.rate      = 0;
+    return false;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetEncoder()
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return ResetEncoderSafe();
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetEncoderSafe()
+{
+    if(!_encoderExist || !_encoderInitialized)
+    {
+        // We don't reset if doesn't exists or not initialized yet
+        return 0;
+    }
+
+    _inAudioIxWrite     = 0;
+    _inAudioIxRead      = 0;
+    _inTimestampIxWrite = 0;
+    _noMissedSamples    = 0;
+    _isAudioBuffFresh   = true;
+    memset(_inAudio, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    memset(_inTimestamp, 0, TIMESTAMP_BUFFER_SIZE_W32 * sizeof(WebRtc_Word32));
+
+    // Store DTX/VAD params
+    bool enableVAD = _vadEnabled;
+    bool enableDTX = _dtxEnabled;
+    ACMVADMode mode = _vadMode;
+
+    // Reset the encoder
+    if(InternalResetEncoder() < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "ResetEncoderSafe: error in reset encoder");
+        return -1;
+    }
+
+    // Disable DTX & VAD this deletes the states
+    // we like to have fresh start
+    DisableDTX();
+    DisableVAD();
+
+    // Set DTX/VAD
+    return SetVADSafe(enableDTX, enableVAD, mode);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InternalResetEncoder()
+{
+    // For most of the codecs it is sufficient to
+    // call their internal initialization.
+    // There are some exceptions.
+    // ----
+    // For iSAC we don't want to lose BWE history,
+    // so for iSAC we have to over-write this function.
+    // ----
+    return InternalInitEncoder(&_encoderParams);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitEncoder(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    ReadLockScoped lockNetEq(*_netEqDecodeLock);
+    return InitEncoderSafe(codecParams, forceInitialization);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitEncoderSafe(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    // Check if we got a valid set of parameters
+    int mirrorID;
+    int codecNumber =
+        ACMCodecDB::CodecNumber(&(codecParams->codecInstant), &mirrorID);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: error, codec number negative");
+        return -1;
+    }
+    // Check if the parameters are for this codec
+    if((_codecID >= 0) && (_codecID != codecNumber) && (_codecID != mirrorID))
+    {
+        // The current codec is not the same as the one given by codecParams
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: current codec is not the same as the one given by codecParams");
+        return -1;
+    }
+
+    if(!CanChangeEncodingParam(codecParams->codecInstant))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: cannot change encoding parameters");
+        return -1;
+    }
+
+    if(_encoderInitialized && !forceInitialization)
+    {
+        // The encoder is already initialized
+        return 0;
+    }
+    WebRtc_Word16 status;
+    if(!_encoderExist)
+    {
+        _encoderInitialized = false;
+        status = CreateEncoder();
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: cannot create encoder");
+            return -1;
+        }
+        else
+        {
+            _encoderExist = true;
+        }
+    }
+    _frameLenSmpl = (codecParams->codecInstant).pacsize;
+    _noChannels = codecParams->codecInstant.channels;
+    status = InternalInitEncoder(codecParams);
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InitEncoderSafe: error in init encoder");
+        _encoderInitialized = false;
+        return -1;
+    }
+    else
+    {
+        memcpy(&_encoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+        _encoderInitialized = true;
+        if(_inAudio == NULL)
+        {
+            _inAudio = new WebRtc_Word16[AUDIO_BUFFER_SIZE_W16];
+            if(_inAudio == NULL)
+            {
+                return -1;
+            }
+            memset(_inAudio, 0, AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+        }
+        if(_inTimestamp == NULL)
+        {
+            _inTimestamp = new WebRtc_UWord32[TIMESTAMP_BUFFER_SIZE_W32];
+            if(_inTimestamp == NULL)
+            {
+                return -1;
+            }
+            memset(_inTimestamp, 0, sizeof(WebRtc_UWord32) *
+                TIMESTAMP_BUFFER_SIZE_W32);
+        }
+        _isAudioBuffFresh = true;
+    }
+    status = SetVADSafe(codecParams->enableDTX, codecParams->enableVAD,
+        codecParams->vadMode);
+
+    return status;
+}
+
+bool
+ACMGenericCodec::CanChangeEncodingParam(
+    CodecInst& /*codecInst*/)
+{
+    return true;
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitDecoder(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    WriteLockScoped lockCodc(_codecWrapperLock);
+    WriteLockScoped lockNetEq(*_netEqDecodeLock);
+    return InitDecoderSafe(codecParams, forceInitialization);
+}
+
+WebRtc_Word16
+ACMGenericCodec::InitDecoderSafe(
+    WebRtcACMCodecParams* codecParams,
+    bool                  forceInitialization)
+{
+    int mirrorID;
+    // Check if we got a valid set of parameters
+    int codecNumber =
+        ACMCodecDB::ReceiverCodecNumber(&codecParams->codecInstant, &mirrorID);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: error, invalid codec number");
+        return -1;
+    }
+    // Check if the parameters are for this codec
+    if((_codecID >= 0) && (_codecID != codecNumber) && (_codecID != mirrorID))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: current codec is not the same as the one given "
+                    "by codecParams");
+        // The current codec is not the same as the one given by codecParams
+        return -1;
+    }
+
+
+    if(_decoderInitialized && !forceInitialization)
+    {
+        // The encoder is already initialized
+        return 0;
+    }
+
+    WebRtc_Word16 status;
+    if(!_decoderExist)
+    {
+        _decoderInitialized = false;
+        status = CreateDecoder();
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "InitDecoderSafe: cannot create decoder");
+            return -1;
+        }
+        else
+        {
+            _decoderExist = true;
+        }
+    }
+
+    status = InternalInitDecoder(codecParams);
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "InitDecoderSafe: cannot init decoder");
+        _decoderInitialized = false;
+        return -1;
+    }
+    else
+    {
+        // Store the parameters
+        SaveDecoderParamSafe(codecParams);
+        _decoderInitialized = true;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetDecoder(WebRtc_Word16 payloadType)
+{
+    WriteLockScoped lockCodec(_codecWrapperLock);
+    WriteLockScoped lockNetEq(*_netEqDecodeLock);
+    return ResetDecoderSafe(payloadType);
+}
+
+WebRtc_Word16
+ACMGenericCodec::ResetDecoderSafe(WebRtc_Word16 payloadType)
+{
+    WebRtcACMCodecParams decoderParams;
+    if(!_decoderExist || !_decoderInitialized)
+    {
+        return 0;
+    }
+    // Initialization of the decoder should work for all
+    // the codec. If there is a codec that has to keep
+    // some states then we need to define a virtual and
+    // overwrite in that codec
+    DecoderParamsSafe(&decoderParams, (WebRtc_UWord8) payloadType);
+    return InternalInitDecoder(&decoderParams);
+}
+
+void
+ACMGenericCodec::ResetNoMissedSamples()
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    _noMissedSamples = 0;
+}
+
+void
+ACMGenericCodec::IncreaseNoMissedSamples(
+    const WebRtc_Word16 noSamples)
+{
+    _noMissedSamples += noSamples;
+}
+
+// Get the number of missed samples, this can be public
+WebRtc_UWord32
+ACMGenericCodec::NoMissedSamples() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _noMissedSamples;
+}
+void
+ACMGenericCodec::DestructEncoder()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+
+    // Disable VAD and delete the instance
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+    _vadEnabled = false;
+    _vadMode = VADNormal;
+
+    //Disable DTX and delete the instance
+    _dtxEnabled = false;
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    _numLPCParams = kNewCNGNumPLCParams;
+
+    DestructEncoderSafe();
+}
+
+void
+ACMGenericCodec::DestructDecoder()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    _decoderParams.codecInstant.pltype = -1;
+    DestructDecoderSafe();
+}
+
+WebRtc_Word16
+ACMGenericCodec::SetBitRate(
+    const WebRtc_Word32 bitRateBPS)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return SetBitRateSafe(bitRateBPS);
+}
+
+WebRtc_Word16
+ACMGenericCodec::SetBitRateSafe(
+    const WebRtc_Word32 bitRateBPS)
+{
+    // If the codec can change the bit-rate this function
+    // should be overwritten, otherewise the only acceptable
+    // value is the one that is in database.
+    CodecInst codecParams;
+    if(ACMCodecDB::Codec(_codecID, &codecParams) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "SetBitRateSafe: error in ACMCodecDB::Codec");
+        return -1;
+    }
+    if(codecParams.rate != bitRateBPS)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "SetBitRateSafe: rate value is not acceptable");
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetEstimatedBandwidth()
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return GetEstimatedBandwidthSafe();
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetEstimatedBandwidthSafe()
+{
+    // All codecs but iSAC will return -1
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetEstimatedBandwidth(
+    WebRtc_Word32 estimatedBandwidth)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return SetEstimatedBandwidthSafe(estimatedBandwidth);
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 /*estimatedBandwidth*/)
+{
+    // All codecs but iSAC will return -1
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetRedPayload(
+    WebRtc_UWord8* redPayload,
+    WebRtc_Word16* payloadBytes)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    return GetRedPayloadSafe(redPayload, payloadBytes);
+}
+
+WebRtc_Word32
+ACMGenericCodec::GetRedPayloadSafe(
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1; // Do nothing by default
+}
+
+WebRtc_Word16
+ACMGenericCodec::CreateEncoder()
+{
+    WebRtc_Word16 status = 0;
+    if(!_encoderExist)
+    {
+        status = InternalCreateEncoder();
+        // We just created the codec and obviously it is not initialized
+        _encoderInitialized = false;
+    }
+
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CreateEncoder: error in internal create encoder");
+        _encoderExist = false;
+    }
+    else
+    {
+        _encoderExist = true;
+    }
+    return status;
+}
+
+WebRtc_Word16
+ACMGenericCodec::CreateDecoder()
+{
+    WebRtc_Word16 status = 0;
+    if(!_decoderExist)
+    {
+        status = InternalCreateDecoder();
+        // Decoder just created and obviously it is not initialized
+        _decoderInitialized = false;
+    }
+
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CreateDecoder: error in internal create decoder");
+        _decoderExist = false;
+    }
+    else
+    {
+        _decoderExist = true;
+    }
+    return status;
+}
+
+
+void ACMGenericCodec::DestructEncoderInst(void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WriteLockScoped lockCodec(_codecWrapperLock);
+        ReadLockScoped lockNetEq(*_netEqDecodeLock);
+        InternalDestructEncoderInst(ptrInst);
+    }
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::AudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    memcpy(audioBuff.inAudio, _inAudio,
+        AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    audioBuff.inAudioIxRead = _inAudioIxRead;
+    audioBuff.inAudioIxWrite = _inAudioIxWrite;
+    memcpy(audioBuff.inTimestamp, _inTimestamp,
+        TIMESTAMP_BUFFER_SIZE_W32*sizeof(WebRtc_UWord32));
+    audioBuff.inTimestampIxWrite = _inTimestampIxWrite;
+    audioBuff.lastTimestamp = _lastTimestamp;
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetAudioBuffer(
+    WebRtcACMAudioBuff& audioBuff)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    memcpy(_inAudio, audioBuff.inAudio,
+        AUDIO_BUFFER_SIZE_W16 * sizeof(WebRtc_Word16));
+    _inAudioIxRead = audioBuff.inAudioIxRead;
+    _inAudioIxWrite = audioBuff.inAudioIxWrite;
+    memcpy(_inTimestamp, audioBuff.inTimestamp,
+        TIMESTAMP_BUFFER_SIZE_W32*sizeof(WebRtc_UWord32));
+    _inTimestampIxWrite = audioBuff.inTimestampIxWrite;
+    _lastTimestamp = audioBuff.lastTimestamp;
+    _isAudioBuffFresh = false;
+    return 0;
+}
+
+
+WebRtc_UWord32
+ACMGenericCodec::LastEncodedTimestamp() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _lastEncodedTimestamp;
+}
+
+
+WebRtc_UWord32
+ACMGenericCodec::EarliestTimestamp() const
+{
+    ReadLockScoped cs(_codecWrapperLock);
+    return _inTimestamp[0];
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetVAD(
+    const bool       enableDTX,
+    const bool       enableVAD,
+    const ACMVADMode mode)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return SetVADSafe(enableDTX, enableVAD, mode);
+}
+
+
+WebRtc_Word16
+ACMGenericCodec::SetVADSafe(
+    const bool       enableDTX,
+    const bool       enableVAD,
+    const ACMVADMode mode)
+{
+    if(enableDTX)
+    {
+        // Make G729 AnnexB a special case
+        if (!STR_CASE_CMP(_encoderParams.codecInstant.plname, "G729") && !_hasInternalDTX)
+        {
+            if (ACMGenericCodec::EnableDTX() < 0)
+            {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "SetVADSafe: error in enable DTX");
+                return -1;
+            }
+        }
+        else
+        {
+            if(EnableDTX() < 0)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "SetVADSafe: error in enable DTX");
+                return -1;
+            }
+        }
+
+        if(_hasInternalDTX)
+        {
+            // Codec has internal DTX, practically we don't need WebRtc VAD,
+            // however, we let the user to turn it on if they need call-backs
+            // on silence. Store VAD mode for future even if VAD is off.
+            _vadMode = mode;
+            return (enableVAD)? EnableVAD(mode):DisableVAD();
+        }
+        else
+        {
+            // Codec does not have internal DTX so enabling DTX requires an
+            // active VAD. 'enableDTX == true' overwrites VAD status.
+            if(EnableVAD(mode) < 0)
+            {
+                // If we cannot create VAD we have to disable DTX
+                if(!_vadEnabled)
+                {
+                    DisableDTX();
+                }
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                    "SetVADSafe: error in enable VAD");
+                return -1;
+            }
+
+            // Return '1', to let the caller know VAD was turned on, even if the
+            // function was called with VAD='false'
+            if (enableVAD == false) {
+                return 1;
+            } else {
+                return 0;
+            }
+        }
+    }
+    else
+    {
+        // Make G729 AnnexB a special case
+        if (!STR_CASE_CMP(_encoderParams.codecInstant.plname, "G729") && !_hasInternalDTX)
+        {
+            ACMGenericCodec::DisableDTX();
+        }
+        else
+        {
+            DisableDTX();
+        }
+        return (enableVAD)? EnableVAD(mode):DisableVAD();
+    }
+}
+
+WebRtc_Word16
+ACMGenericCodec::EnableDTX()
+{
+    if(_hasInternalDTX)
+    {
+        // We should not be here if we have internal DTX
+        // this function should be overwritten by the derived
+        // class in this case
+        return -1;
+    }
+    if(!_dtxEnabled)
+    {
+        if(WebRtcCng_CreateEnc(&_ptrDTXInst) < 0)
+        {
+            _ptrDTXInst = NULL;
+            return -1;
+        }
+        WebRtc_UWord16 freqHz;
+        EncoderSampFreq(freqHz);
+        if(WebRtcCng_InitEnc(_ptrDTXInst, (WebRtc_Word16)freqHz,
+            ACM_SID_INTERVAL_MSEC, _numLPCParams) < 0)
+        {
+            // Couldn't initialize, has to return -1, and free the memory
+            WebRtcCng_FreeEnc(_ptrDTXInst);
+            _ptrDTXInst = NULL;
+            return -1;
+        }
+        _dtxEnabled = true;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::DisableDTX()
+{
+    if(_hasInternalDTX)
+    {
+        // We should not be here if we have internal DTX
+        // this function should be overwritten by the derived
+        // class in this case
+        return -1;
+    }
+    if(_ptrDTXInst != NULL)
+    {
+        WebRtcCng_FreeEnc(_ptrDTXInst);
+        _ptrDTXInst = NULL;
+    }
+    _dtxEnabled = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::EnableVAD(
+    ACMVADMode mode)
+{
+    if((mode < VADNormal) || (mode > VADVeryAggr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "EnableVAD: error in VAD mode range");
+        return -1;
+    }
+
+    if(!_vadEnabled)
+    {
+        if(WebRtcVad_Create(&_ptrVADInst) < 0)
+        {
+            _ptrVADInst = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "EnableVAD: error in create VAD");
+            return -1;
+        }
+        if(WebRtcVad_Init(_ptrVADInst) < 0)
+        {
+            WebRtcVad_Free(_ptrVADInst);
+            _ptrVADInst = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "EnableVAD: error in init VAD");
+            return -1;
+        }
+    }
+
+    // Set the vad mode to the given value
+    if(WebRtcVad_set_mode(_ptrVADInst, mode) < 0)
+    {
+        // We failed to set the mode and we have to return -1. If
+        // we already have a working VAD (_vadEnabled == true) then
+        // we leave it to work. otherwise, the following will be
+        // executed.
+        if(!_vadEnabled)
+        {
+            // We just created the instance but cannot set the mode
+            // we have to free the memomry.
+            WebRtcVad_Free(_ptrVADInst);
+            _ptrVADInst = NULL;
+        }
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _uniqueID,
+            "EnableVAD: failed to set the VAD mode");
+        return -1;
+    }
+    _vadMode = mode;
+    _vadEnabled = true;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::DisableVAD()
+{
+    if(_ptrVADInst != NULL)
+    {
+        WebRtcVad_Free(_ptrVADInst);
+        _ptrVADInst = NULL;
+    }
+    _vadEnabled = false;
+    return 0;
+}
+
+WebRtc_Word32
+ACMGenericCodec::ReplaceInternalDTX(
+    const bool replaceInternalDTX)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return ReplaceInternalDTXSafe(replaceInternalDTX);
+}
+
+WebRtc_Word32
+ACMGenericCodec::ReplaceInternalDTXSafe(
+    const bool /* replaceInternalDTX */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::IsInternalDTXReplaced(
+    bool* internalDTXReplaced)
+{
+    WriteLockScoped cs(_codecWrapperLock);
+    return IsInternalDTXReplacedSafe(internalDTXReplaced);
+}
+
+WebRtc_Word32
+ACMGenericCodec::IsInternalDTXReplacedSafe(
+    bool* internalDTXReplaced)
+{
+    *internalDTXReplaced = false;
+    return 0;
+}
+
+WebRtc_Word16
+ACMGenericCodec::ProcessFrameVADDTX(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte,
+    WebRtc_Word16* samplesProcessed)
+{
+    if(!_vadEnabled)
+    {
+        // VAD not enabled, set all vadLable[] to 1 (speech detected)
+        for(WebRtc_Word16 n = 0; n < MAX_FRAME_SIZE_10MSEC; n++)
+        {
+            _vadLabel[n] = 1;
+        }
+        *samplesProcessed = 0;
+        return 0;
+    }
+    WebRtc_UWord16 freqHz;
+    EncoderSampFreq(freqHz);
+
+    // Calculate number of samples in 10 ms blocks, and number ms in one frame
+    WebRtc_Word16 samplesIn10Msec = (WebRtc_Word16)(freqHz / 100);
+    WebRtc_Word32 frameLenMsec = (((WebRtc_Word32)_frameLenSmpl * 1000) / freqHz);
+    WebRtc_Word16 status;
+
+    // Vector for storing maximum 30 ms of mono audio at 32 kHz
+    WebRtc_Word16 audio[960];
+
+    // Calculate number of VAD-blocks to process, and number of samples in each block.
+    int noSamplesToProcess[2];
+    if (frameLenMsec == 40)
+    {
+        // 20 ms in each VAD block
+        noSamplesToProcess[0] = noSamplesToProcess[1] = 2*samplesIn10Msec;
+    }
+    else
+    {
+        // For 10-30 ms framesizes, second VAD block will be size zero ms,
+        // for 50 and 60 ms first VAD block will be 30 ms.
+        noSamplesToProcess[0] = (frameLenMsec > 30)? 3*samplesIn10Msec : _frameLenSmpl;
+        noSamplesToProcess[1] = _frameLenSmpl-noSamplesToProcess[0];
+    }
+
+    int offSet = 0;
+    int loops = (noSamplesToProcess[1]>0) ? 2 : 1;
+    for (int i=0; i<loops; i++) {
+        // If stereo, calculate mean of the two channels
+        if(_noChannels == 2) {
+            for (int j=0; j<noSamplesToProcess[i]; j++) {
+                audio[j] = (_inAudio[(offSet+j)*2]+_inAudio[(offSet+j)*2+1])/2;
+        }
+        offSet = noSamplesToProcess[0];
+        } else {
+            // Mono, copy data from _inAudio to continue work on
+            memcpy(audio, _inAudio, sizeof(WebRtc_Word16)*noSamplesToProcess[i]);
+        }
+
+        // Call VAD
+        status = (WebRtc_Word16)WebRtcVad_Process(_ptrVADInst, (int)freqHz,
+            audio, noSamplesToProcess[i]);
+
+        _vadLabel[i] = status;
+
+        if(status < 0)
+        {
+            // This will force that the data be removed from the buffer
+            *samplesProcessed += noSamplesToProcess[i];
+            return -1;
+        }
+
+        // If VAD decision non-active, update DTX. NOTE! We only do this if the first part of
+        // a frame gets the VAD decision "inactive". Otherwise DTX might say it is time to
+        // transmit SID frame, but we will encode the whole frame, because the first part is
+        // active.
+        *samplesProcessed = 0;
+        if((status == 0) && (i==0) && _dtxEnabled && !_hasInternalDTX)
+        {
+            WebRtc_Word16 bitStreamLen;
+            WebRtc_Word16 num10MsecFrames = noSamplesToProcess[i] / samplesIn10Msec;
+            *bitStreamLenByte = 0;
+            for(WebRtc_Word16 n = 0; n < num10MsecFrames; n++)
+            {
+                // This block is (passive) && (vad enabled)
+                status = WebRtcCng_Encode(_ptrDTXInst, &audio[n*samplesIn10Msec],
+                    samplesIn10Msec, bitStream, &bitStreamLen, 0);
+                if (status < 0) {
+                    return -1;
+                }
+
+                *samplesProcessed += samplesIn10Msec*_noChannels;
+
+                // bitStreamLen will only be > 0 once per 100 ms
+                *bitStreamLenByte += bitStreamLen;
+            }
+
+
+            // Check if all samples got processed by the DTX
+            if(*samplesProcessed != noSamplesToProcess[i]*_noChannels) {
+                // Set to zero since something went wrong. Shouldn't happen.
+                *samplesProcessed = 0;
+            }
+        }
+
+        if(*samplesProcessed > 0)
+        {
+            // The block contains inactive speech, and is processed by DTX.
+            // Discontinue running VAD.
+            break;
+        }
+    }
+
+    return status;
+}
+
+WebRtc_Word16
+ACMGenericCodec::SamplesLeftToEncode()
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return (_frameLenSmpl <= _inAudioIxWrite)?
+        0:(_frameLenSmpl - _inAudioIxWrite);
+}
+
+void
+ACMGenericCodec::SetUniqueID(
+    const WebRtc_UWord32 id)
+{
+    _uniqueID = id;
+}
+
+bool
+ACMGenericCodec::IsAudioBufferFresh() const
+{
+    ReadLockScoped rl(_codecWrapperLock);
+    return _isAudioBuffFresh;
+}
+
+// This function is replaced by codec specific functions for some codecs
+WebRtc_Word16
+ACMGenericCodec::EncoderSampFreq(WebRtc_UWord16& sampFreqHz)
+{
+    WebRtc_Word32 f;
+    f = ACMCodecDB::CodecFreq(_codecID);
+    if(f < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                     "EncoderSampFreq: codec frequency is negative");
+        return -1;
+    }
+    else
+    {
+        sampFreqHz = (WebRtc_UWord16)f;
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMGenericCodec::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  /* initFrameSizeMsec */,
+    const WebRtc_UWord16 /* initRateBitPerSec */,
+    const bool           /* enforceFrameSize  */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to config iSAC bandwidth estimator.");
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetISACMaxRate(
+    const WebRtc_UWord32 /* maxRateBitPerSec */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to set iSAC max rate.");
+    return -1;
+}
+
+WebRtc_Word32
+ACMGenericCodec::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 /* maxPayloadLenBytes */)
+{
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+        "The send-codec is not iSAC, failed to set iSAC max payload-size.");
+    return -1;
+}
+
+
+void
+ACMGenericCodec::SaveDecoderParam(
+    const WebRtcACMCodecParams* codecParams)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    SaveDecoderParamSafe(codecParams);
+}
+
+
+void
+ACMGenericCodec::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* codecParams)
+{
+    memcpy(&_decoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+}
+
+WebRtc_Word16
+ACMGenericCodec::UpdateEncoderSampFreq(
+    WebRtc_UWord16 /* encoderSampFreqHz */)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "It is asked for a change in smapling frequency while the \
+current send-codec supports only one sampling rate.");
+    return -1;
+}
+
+
+void
+ACMGenericCodec::SetIsMaster(
+    bool isMaster)
+{
+    WriteLockScoped wl(_codecWrapperLock);
+    _isMaster = isMaster;
+}
+
+
+
+WebRtc_Word16
+ACMGenericCodec::REDPayloadISAC(
+        const WebRtc_Word32  /* isacRate        */,
+        const WebRtc_Word16  /* isacBwEstimate  */,
+        WebRtc_UWord8*       /* payload         */,
+        WebRtc_Word16*       /* payloadLenBytes */)
+{
+   WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "Error: REDPayloadISAC is an iSAC specific function");
+    return -1;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_generic_codec.h b/src/modules/audio_coding/main/source/acm_generic_codec.h
new file mode 100644
index 0000000..c138ed9
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_generic_codec.h
@@ -0,0 +1,1332 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
+
+#include "acm_common_defs.h"
+#include "audio_coding_module_typedefs.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+
+#define MAX_FRAME_SIZE_10MSEC 6
+
+// forward declaration
+struct WebRtcVadInst;
+struct WebRtcCngEncInst;
+
+namespace webrtc
+{
+
+// forward declaration
+struct CodecInst;
+class  ACMNetEQ;
+
+class ACMGenericCodec
+{
+public:
+    ///////////////////////////////////////////////////////////////////////////
+    // Constructor of the class
+    //
+    ACMGenericCodec();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // Destructor of the class.
+    //
+    virtual ~ACMGenericCodec();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // ACMGenericCodec* CreateInstance();
+    // The function will be used for FEC. It is not implemented yet.
+    //
+    virtual ACMGenericCodec* CreateInstance() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 Encode()
+    // The function is called to perform an encoding of the audio stored in
+    // audio buffer. An encoding is performed only if enough audio, i.e. equal
+    // to the frame-size of the codec, exist. The audio frame will be processed
+    // by VAD and CN/DTX if required. There are few different cases.
+    //
+    // A) Neither VAD nor DTX is active; the frame is encoded by the encoder.
+    //
+    // B) VAD is enabled but not DTX; in this case the audio is processed by VAD
+    //    and encoded by the encoder. The "*encodingType" will be either
+    //    "activeNormalEncode" or "passiveNormalEncode" if frame is active or
+    //    passive, respectively.
+    //
+    // C) DTX is enabled; if the codec has internal VAD/DTX we just encode the
+    //    frame by the encoder. Otherwise, the frame is passed through VAD and
+    //    if identified as passive, then it will be processed by CN/DTX. If the
+    //    frame is active it will be encoded by the encoder.
+    //
+    // This function acquires the appropriate locks and calls EncodeSafe() for
+    // the actual processing.
+    //
+    // Outputs:
+    //   -bitStream          : a buffer where bit-stream will be written to.
+    //   -bitStreamLenByte   : contains the length of the bit-stream in
+    //                         bytes.
+    //   -timeStamp          : contains the RTP timestamp, this is the
+    //                         sampling time of the first sample encoded
+    //                         (measured in number of samples).
+    //   -encodingType       : contains the type of encoding applied on the
+    //                         audio samples. The alternatives are
+    //                         (c.f. acm_common_types.h)
+    //                         -kNoEncoding:
+    //                            there was not enough data to encode. or
+    //                            some error has happened that we could
+    //                            not do encoding.
+    //                         -kActiveNormalEncoded:
+    //                            the audio frame is active and encoded by
+    //                            the given codec.
+    //                         -kPassiveNormalEncoded:
+    //                            the audio frame is passive but coded with
+    //                            the given codec (NO DTX).
+    //                         -kPassiveDTXWB:
+    //                            The audio frame is passive and used
+    //                            wide-band CN to encode.
+    //                         -kPassiveDTXNB:
+    //                            The audio frame is passive and used
+    //                            narrow-band CN to encode.
+    //
+    // Return value:
+    //   -1 if error is occurred, otherwise the length of the bit-stream in
+    //      bytes.
+    //
+    WebRtc_Word16 Encode(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timeStamp,
+        WebRtcACMEncodingType* encodingType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 Decode()
+    // This function is used to decode a given bit-stream, without engaging
+    // NetEQ.
+    //
+    // This function acquires the appropriate locks and calls DecodeSafe() for
+    // the actual processing. Please note that this is not functional yet.
+    //
+    // Inputs:
+    //   -bitStream          : a buffer where bit-stream will be read.
+    //   -bitStreamLenByte   : the length of the bit-stream in bytes.
+    //
+    // Outputs:
+    //   -audio              : pointer to a buffer where the audio will written.
+    //   -audioSamples       : number of audio samples out of decoding the given
+    //                         bit-stream.
+    //   -speechType         : speech type (for future use).
+    //
+    // Return value:
+    //   -1 if failed to decode,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 Decode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void SplitStereoPacket()
+    // This function is used to split stereo payloads in left and right channel.
+    // Codecs which has stereo support has there own implementation of the
+    // function.
+    //
+    // Input/Output:
+    //   -payload             : a vector with the received payload data.
+    //                          The function will reorder the data so that
+    //                          first half holds the left channel data, and the
+    //                          second half the right channel data.
+    //   -payload_length      : length of payload in bytes. Will be changed to
+    //                          twice the input in case of true stereo, where
+    //                          we simply copy the data and return it both for
+    //                          left channel and right channel decoding.
+    virtual void SplitStereoPacket(WebRtc_UWord8* /* payload */,
+                                   WebRtc_Word32* /* payload_length */) {}
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool EncoderInitialized();
+    //
+    // Return value:
+    //   True if the encoder is successfully initialized,
+    //   false otherwise.
+    //
+    bool EncoderInitialized();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool DecoderInitialized();
+    //
+    // Return value:
+    //   True if the decoder is successfully initialized,
+    //   false otherwise.
+    //
+    bool DecoderInitialized();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EncoderParams()
+    // It is called to get encoder parameters. It will call
+    // EncoderParamsSafe() in turn.
+    //
+    // Output:
+    //   -encParams          : a buffer where the encoder parameters is
+    //                         written to. If the encoder is not
+    //                         initialized this buffer is filled with
+    //                         invalid values
+    // Return value:
+    //   -1 if the encoder is not initialized,
+    //    0 otherwise.
+    //
+    //
+    WebRtc_Word16 EncoderParams(
+        WebRtcACMCodecParams *encParams);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DecoderParams(...)
+    // It is called to get decoder parameters. It will call DecoderParamsSafe()
+    // in turn.
+    //
+    // Output:
+    //   -decParams          : a buffer where the decoder parameters is
+    //                         written to. If the decoder is not initialized
+    //                         this buffer is filled with invalid values
+    //
+    // Return value:
+    //   -1 if the decoder is not initialized,
+    //    0 otherwise.
+    //
+    //
+    bool DecoderParams(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InitEncoder(...)
+    // This function is called to initialize the encoder with the given
+    // parameters.
+    //
+    // Input:
+    //   -codecParams        : parameters of encoder.
+    //   -forceInitialization: if false the initialization is invoked only if
+    //                         the encoder is not initialized. If true the
+    //                         encoder is forced to (re)initialize.
+    //
+    // Return value:
+    //   0 if could initialize successfully,
+    //  -1 if failed to initialize.
+    //
+    //
+    WebRtc_Word16 InitEncoder(
+        WebRtcACMCodecParams* codecParams,
+        bool                  forceInitialization);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InitDecoder()
+    // This function is called to initialize the decoder with the given
+    // parameters. (c.f. acm_common_defs.h & common_types.h for the
+    // definition of the structure)
+    //
+    // Input:
+    //   -codecParams        : parameters of decoder.
+    //   -forceInitialization: if false the initialization is invoked only
+    //                         if the decoder is not initialized. If true
+    //                         the encoder is forced to(re)initialize.
+    //
+    // Return value:
+    //   0 if could initialize successfully,
+    //  -1 if failed to initialize.
+    //
+    //
+    WebRtc_Word16 InitDecoder(
+        WebRtcACMCodecParams* codecParams,
+        bool                 forceInitialization);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 RegisterInNetEq(...)
+    // This function is called to register the decoder in NetEq, with the given
+    // payload-type.
+    //
+    // Inputs:
+    //   -netEq              : pointer to NetEq Instance
+    //   -codecInst          : instance with of the codec settings of the codec
+    //
+    // Return values
+    //   -1 if failed to register,
+    //    0 if successfully initialized.
+    //
+    WebRtc_Word32 RegisterInNetEq(
+        ACMNetEQ*             netEq,
+        const CodecInst& codecInst);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 Add10MsData(...)
+    // This function is called to add 10 ms of audio to the audio buffer of
+    // the codec.
+    //
+    // Inputs:
+    //   -timeStamp          : the timestamp of the 10 ms audio. the timestamp
+    //                         is the sampling time of the
+    //                         first sample measured in number of samples.
+    //   -data               : a buffer that contains the audio. The codec
+    //                         expects to get the audio in correct sampling
+    //                         frequency
+    //   -length             : the length of the audio buffer
+    //   -audioChannel       : 0 for mono, 1 for stereo (not supported yet)
+    //
+    // Return values:
+    //   -1 if failed
+    //    0 otherwise.
+    //
+    WebRtc_Word32 Add10MsData(
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_Word16* data,
+        const WebRtc_UWord16 length,
+        const WebRtc_UWord8  audioChannel);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 NoMissedSamples()
+    // This function returns the number of samples which are overwritten in
+    // the audio buffer. The audio samples are overwritten if the input audio
+    // buffer is full, but Add10MsData() is called. (We might remove this
+    // function if it is not used)
+    //
+    // Return Value:
+    //   Number of samples which are overwritten.
+    //
+    WebRtc_UWord32 NoMissedSamples() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void ResetNoMissedSamples()
+    // This function resets the number of overwritten samples to zero.
+    // (We might remove this function if we remove NoMissedSamples())
+    //
+    void ResetNoMissedSamples();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetBitRate()
+    // The function is called to set the encoding rate.
+    //
+    // Input:
+    //   -bitRateBPS         : encoding rate in bits per second
+    //
+    // Return value:
+    //   -1 if failed to set the rate, due to invalid input or given
+    //      codec is not rate-adjustable.
+    //    0 if the rate is adjusted successfully
+    //
+    WebRtc_Word16 SetBitRate(const WebRtc_Word32 bitRateBPS);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // DestructEncoderInst()
+    // This API is used in conferencing. It will free the memory that is pointed
+    // by "ptrInst". "ptrInst" is a pointer to encoder instance, created and
+    // filled up by calling EncoderInst(...).
+    //
+    // Inputs:
+    //   -ptrInst            : pointer to an encoder instance to be deleted.
+    //
+    //
+    void DestructEncoderInst(
+        void* ptrInst);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 AudioBuffer()
+    // This is used when synchronization of codecs is required. There are cases
+    // that the audio buffers of two codecs have to be synched. By calling this
+    // function on can get the audio buffer and other related parameters, such
+    // as timestamps...
+    //
+    // Output:
+    //   -audioBuff          : a pointer to WebRtcACMAudioBuff where the audio
+    //                         buffer of this codec will be written to.
+    //
+    // Return value:
+    //   -1 if fails to copy the audio buffer,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 AudioBuffer(
+        WebRtcACMAudioBuff& audioBuff);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 EarliestTimestamp()
+    // Returns the timestamp of the first 10 ms in audio buffer. This is used
+    // to identify if a synchronization of two encoders is required.
+    //
+    // Return value:
+    //   timestamp of the first 10 ms audio in the audio buffer.
+    //
+    WebRtc_UWord32 EarliestTimestamp() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetAudioBuffer()
+    // This function is called to set the audio buffer and the associated
+    // parameters to a given value.
+    //
+    // Return value:
+    //   -1 if fails to copy the audio buffer,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& audioBuff);
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SetVAD()
+    // This is called to set VAD & DTX. If the codec has internal DTX that will
+    // be used. If DTX is enabled and the codec does not have internal DTX,
+    // WebRtc-VAD will be used to decide if the frame is active. If DTX is
+    // disabled but VAD is enabled. The audio is passed through VAD to label it
+    // as active or passive, but the frame is  encoded normally. However the
+    // bit-stream is labeled properly so that ACM::Process() can use this
+    // information. In case of failure, the previous states of the VAD & DTX
+    // are kept.
+    //
+    // Inputs:
+    //   -enableDTX          : if true DTX will be enabled otherwise the DTX is
+    //                         disabled. If codec has internal DTX that will be
+    //                         used, otherwise WebRtc-CNG is used. In the latter
+    //                         case VAD is automatically activated.
+    //   -enableVAD          : if true WebRtc-VAD is enabled, otherwise VAD is
+    //                         disabled, except for the case that DTX is enabled
+    //                         but codec doesn't have internal DTX. In this case
+    //                         VAD is enabled regardless of the value of
+    //                         "enableVAD."
+    //   -mode               : this specifies the aggressiveness of VAD.
+    //
+    // Return value
+    //   -1 if failed to set DTX & VAD as specified,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 SetVAD(
+        const bool             enableDTX = true,
+        const bool             enableVAD = false,
+        const ACMVADMode mode      = VADNormal);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 ReplaceInternalDTX()
+    // This is called to replace the codec internal DTX with WebRtc DTX.
+    // This is only valid for G729 where the user has possibility to replace
+    // AnnexB with WebRtc DTX. For other codecs this function has no effect.
+    //
+    // Input:
+    //   -replaceInternalDTX : if true the internal DTX is replaced with WebRtc.
+    //
+    // Return value
+    //   -1 if failed to replace internal DTX,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 ReplaceInternalDTX(const bool replaceInternalDTX);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 IsInternalDTXReplaced()
+    // This is called to check if the codec internal DTX is replaced by WebRtc DTX.
+    // This is only valid for G729 where the user has possibility to replace
+    // AnnexB with WebRtc DTX. For other codecs this function has no effect.
+    //
+    // Output:
+    //   -internalDTXReplaced    : if true the internal DTX is replaced with WebRtc.
+    //
+    // Return value
+    //   -1 if failed to check if replace internal DTX or replacement not feasible,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 IsInternalDTXReplaced(bool* internalDTXReplaced);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void SetNetEqDecodeLock()
+    // Passes the NetEq lock to the codec.
+    //
+    // Input:
+    //   -netEqDecodeLock    : pointer to the lock associated with NetEQ of ACM.
+    //
+    void SetNetEqDecodeLock(
+        RWLockWrapper* netEqDecodeLock)
+    {
+        _netEqDecodeLock = netEqDecodeLock;
+    }
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // bool HasInternalDTX()
+    // Used to check if the codec has internal DTX.
+    //
+    // Return value:
+    //   true if the codec has an internal DTX, e.g. G729,
+    //   false otherwise.
+    //
+    bool HasInternalDTX() const
+    {
+        return _hasInternalDTX;
+    }
+
+
+   ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 GetEstimatedBandwidth()
+    // Used to get decoder estimated bandwidth. Only iSAC will provide a value.
+    //
+    //
+    // Return value:
+    //   -1 if fails to get decoder estimated bandwidth,
+    //    >0 estimated bandwidth in bits/sec.
+    //
+    WebRtc_Word32 GetEstimatedBandwidth();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 SetEstimatedBandwidth()
+    // Used to set estiamted bandwidth sent out of band from other side. Only
+    // iSAC will have use for the value.
+    //
+    // Input:
+    //       -estimatedBandwidth:    estimated bandwidth in bits/sec
+    //
+    // Return value:
+    //   -1 if fails to set estimated bandwidth,
+    //    0 on success.
+    //
+    WebRtc_Word32 SetEstimatedBandwidth(WebRtc_Word32 estimatedBandwidth);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 GetRedPayload()
+    // Used to get codec specific RED payload (if such is implemented).
+    // Currently only done in iSAC.
+    //
+    // Outputs:
+    //   -redPayload        : a pointer to the data for RED payload.
+    //   -payloadBytes      : number of bytes in RED payload.
+    //
+    // Return value:
+    //   -1 if fails to get codec specific RED,
+    //    0 if succeeded.
+    //
+    WebRtc_Word32 GetRedPayload(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ResetEncoder()
+    // By calling this function you would re-initialize the encoder with the
+    // current parameters. All the settings, e.g. VAD/DTX, frame-size... should
+    // remain unchanged. (In case of iSAC we don't want to lose BWE history.)
+    //
+    // Return value
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ResetEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ResetEncoder()
+    // By calling this function you would re-initialize the decoder with the
+    // current parameters.
+    //
+    // Return value
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ResetDecoder(
+        WebRtc_Word16 payloadType);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void DestructEncoder()
+    // This function is called to delete the encoder instance, if possible, to
+    // have a fresh start. For codecs where encoder and decoder share the same
+    // instance we cannot delete the encoder and instead we will initialize the
+    // encoder. We also delete VAD and DTX if they have been created.
+    //
+    void DestructEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void DestructDecoder()
+    // This function is called to delete the decoder instance, if possible, to
+    // have a fresh start. For codecs where encoder and decoder share the same
+    // instance we cannot delete the encoder and instead we will initialize the
+    // decoder. Before deleting decoder instance it has to be removed from the
+    // NetEq list.
+    //
+    void DestructDecoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 SamplesLeftToEncode()
+    // Returns the number of samples required to be able to do encoding.
+    //
+    // Return value:
+    //   Number of samples.
+    //
+    WebRtc_Word16 SamplesLeftToEncode();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_UWord32 LastEncodedTimestamp()
+    // Returns the timestamp of the last frame it encoded.
+    //
+    // Return value:
+    //   Timestamp.
+    //
+    WebRtc_UWord32 LastEncodedTimestamp() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetUniqueID()
+    // Set a unique ID for the codec to be used for tracing and debuging
+    //
+    // Input
+    //   -id                 : A number to identify the codec.
+    //
+    void SetUniqueID(
+        const WebRtc_UWord32 id);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // IsAudioBufferFresh()
+    // Specifies if ever audio is injected to this codec.
+    //
+    // Return value
+    //   -true; no audio is feed into this codec
+    //   -false; audio has already been  fed to the codec.
+    //
+    bool IsAudioBufferFresh() const;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // UpdateDecoderSampFreq()
+    // For most of the codecs this function does nothing. It must be
+    // implemented for those codecs that one codec instance serves as the
+    // decoder for different flavers of the codec. One example is iSAC. there,
+    // iSAC 16 kHz and iSAC 32 kHz are treated as two different codecs with
+    // different payload types, however, there is only one iSAC instance to
+    // decode. The reason for that is we would like to decode and encode with
+    // the same codec instance for bandwidth estimator to work.
+    //
+    // Each time that we receive a new payload type, we call this funtion to
+    // prepare the decoder associated with the new payload. Normally, decoders
+    // doesn't have to do anything. For iSAC the decoder has to change it's
+    // sampling rate. The input parameter specifies the current flaver of the
+    // codec in codec database. For instance, if we just got a SWB payload then
+    // the input parameter is ACMCodecDB::isacswb.
+    //
+    // Input:
+    //   -codecId            : the ID of the codec associated with the
+    //                         payload type that we just received.
+    //
+    // Return value:
+    //    0 if succeeded in updating the decoder.
+    //   -1 if failed to update.
+    //
+    virtual WebRtc_Word16 UpdateDecoderSampFreq(
+        WebRtc_Word16 /* codecId */)
+    {
+        return 0;
+    }
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // UpdateEncoderSampFreq()
+    // Call this function to update the encoder sampling frequency. This
+    // is for codecs where one payload-name supports several encoder sampling
+    // frequencies. Otherwise, to change the sampling frequency we need to
+    // register new codec. ACM will consider that as registration of a new
+    // codec, not a change in parameter. For iSAC, switching from WB to SWB
+    // is treated as a change in parameter. Therefore, we need this function.
+    //
+    // Input:
+    //   -encoderSampFreqHz  : encoder sampling frequency.
+    //
+    // Return value:
+    //   -1 if failed, or if this is meaningless for the given codec.
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 UpdateEncoderSampFreq(
+        WebRtc_UWord16 encoderSampFreqHz);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // EncoderSampFreq()
+    // Get the sampling frequency that the encoder (WebRtc wrapper) expects.
+    //
+    // Output:
+    //   -sampFreqHz         : sampling frequency, in Hertz, which the encoder
+    //                         should be fed with.
+    //
+    // Return value:
+    //   -1 if failed to output sampling rate.
+    //    0 if the sample rate is returned successfully.
+    //
+    virtual WebRtc_Word16 EncoderSampFreq(
+        WebRtc_UWord16& sampFreqHz);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word32 ConfigISACBandwidthEstimator()
+    // Call this function to configure the bandwidth estimator of ISAC.
+    // During the adaptation of bit-rate, iSAC atomatically adjusts the
+    // frame-size (either 30 or 60 ms) to save on RTP header. The initial
+    // frame-size can be specified by the first argument. The configuration also
+    // regards the initial estimate of bandwidths. The estimator starts from
+    // this point and converges to the actual bottleneck. This is given by the
+    // second parameter. Furthermore, it is also possible to control the
+    // adaptation of frame-size. This is specified by the last parameter.
+    //
+    // Input:
+    //   -initFrameSizeMsec  : initial frame-size in milisecods. For iSAC-wb
+    //                         30 ms and 60 ms (default) are acceptable values,
+    //                         and for iSAC-swb 30 ms is the only acceptable
+    //                         value. Zero indiates default value.
+    //   -initRateBitPerSec  : initial estimate of the bandwidth. Values
+    //                         between 10000 and 58000 are acceptable.
+    //   -enforceFrameSize   : if true, the frame-size will not be adapted.
+    //
+    // Return value:
+    //   -1 if failed to configure the bandwidth estimator,
+    //    0 if the configuration was successfully applied.
+    //
+    virtual WebRtc_Word32 ConfigISACBandwidthEstimator(
+        const WebRtc_UWord8  initFrameSizeMsec,
+        const WebRtc_UWord16 initRateBitPerSec,
+        const bool           enforceFrameSize);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetISACMaxPayloadSize()
+    // Set the maximum payload size of iSAC packets. No iSAC payload,
+    // regardless of its frame-size, may exceed the given limit. For
+    // an iSAC payload of size B bits and frame-size T sec we have;
+    // (B < maxPayloadLenBytes * 8) and (B/T < maxRateBitPerSec), c.f.
+    // SetISACMaxRate().
+    //
+    // Input:
+    //   -maxPayloadLenBytes : maximum payload size in bytes.
+    //
+    // Return value:
+    //   -1 if failed to set the maximm  payload-size.
+    //    0 if the given linit is seet successfully.
+    //
+    virtual WebRtc_Word32 SetISACMaxPayloadSize(
+        const WebRtc_UWord16 maxPayloadLenBytes);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SetISACMaxRate()
+    // Set the maximum instantaneous rate of iSAC. For a payload of B bits
+    // with a frame-size of T sec the instantaneous rate is B/T bist per
+    // second. Therefore, (B/T < maxRateBitPerSec) and
+    // (B < maxPayloadLenBytes * 8) are always satisfied for iSAC payloads,
+    // c.f SetISACMaxPayloadSize().
+    //
+    // Input:
+    //   -maxRateBitPerSec   : maximum instantaneous bit-rate given in bits/sec.
+    //
+    // Return value:
+    //   -1 if failed to set the maximum rate.
+    //    0 if the maximum rate is set successfully.
+    //
+    virtual WebRtc_Word32 SetISACMaxRate(
+        const WebRtc_UWord32 maxRateBitPerSec);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // SaveDecoderParamS()
+    // Save the parameters of decoder.
+    //
+    // Input:
+    //   -codecParams        : pointer to a struct where the parameters of
+    //                         decoder is stored in.
+    //
+    void SaveDecoderParam(
+        const WebRtcACMCodecParams* codecParams);
+
+
+    WebRtc_Word32 FrameSize()
+    {
+        return _frameLenSmpl;
+    }
+
+    void SetIsMaster(bool isMaster);
+
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // REDPayloadISAC()
+    // This is an iSAC-specific function. The function is called to get RED
+    // paylaod from a default-encoder.
+    //
+    // Inputs:
+    //   -isacRate           : the target rate of the main payload. A RED
+    //                         paylaod is generated according to the rate of
+    //                         main paylaod. Note that we are not specifying the
+    //                         rate of RED payload, but the main payload.
+    //   -isacBwEstimate     : bandwidth information should be inserted in
+    //                         RED payload.
+    //
+    // Output:
+    //   -payload            : pointer to a buffer where the RED paylaod will
+    //                         written to.
+    //   -paylaodLenBytes    : a place-holder to write the length of the RED
+    //                         payload in Bytes.
+    //
+    // Return value:
+    //   -1 if an error occures, otherwise the length of the payload (in Bytes)
+    //   is returned.
+    //
+    //
+    virtual WebRtc_Word16 REDPayloadISAC(
+        const WebRtc_Word32 isacRate,
+        const WebRtc_Word16 isacBwEstimate,
+        WebRtc_UWord8*      payload,
+        WebRtc_Word16*      payloadLenBytes);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // IsTrueStereoCodec()
+    // Call to see if current encoder is a true stereo codec. This function
+    // should be overwritten for codecs which are true stereo codecs
+    // Return value:
+    //   -true  if stereo codec
+    //   -false if not stereo codec.
+    //
+    virtual bool IsTrueStereoCodec() {
+      return false;
+    }
+
+protected:
+    ///////////////////////////////////////////////////////////////////////////
+    // All the functions with FunctionNameSafe(...) contain the actual
+    // implementation of FunctionName(...). FunctionName() acquires an
+    // appropriate lock and calls FunctionNameSafe() to do the actual work.
+    // Therefore, for the description of functionality, input/output arguments
+    // and return value we refer to FunctionName()
+    //
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Encode() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    WebRtc_Word16 EncodeSafe(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timeStamp,
+        WebRtcACMEncodingType* encodingType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Decode() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    virtual WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType) = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See Add10MsSafe() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 Add10MsDataSafe(
+        const WebRtc_UWord32 timeStamp,
+        const WebRtc_Word16* data,
+        const WebRtc_UWord16 length,
+        const WebRtc_UWord8  audioChannel);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See RegisterInNetEq() for the description of function,
+    // input(s)/output(s) and  return value.
+    //
+    virtual WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&  codecInst) = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See EncoderParam() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 EncoderParamsSafe(
+        WebRtcACMCodecParams *encParams);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DecoderParam for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    // Note:
+    // Any Class where a single instance handle several flavers of the
+    // same codec, therefore, several payload types are associated with
+    // the same instance have to implement this function.
+    //
+    // Currently only iSAC is implementing it. A single iSAC instance is
+    // used for decoding both WB & SWB stream. At one moment both WB & SWB
+    // can be registered as receive codec. Hence two payloads are associated
+    // with a single codec instance.
+    //
+    virtual bool  DecoderParamsSafe(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ResetEncoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 ResetEncoderSafe();
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See InitEncoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 InitEncoderSafe(
+        WebRtcACMCodecParams *codecParams,
+        bool                 forceInitialization);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See InitDecoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 InitDecoderSafe(
+        WebRtcACMCodecParams *codecParams,
+        bool                 forceInitialization);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ResetDecoder() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    WebRtc_Word16 ResetDecoderSafe(
+        WebRtc_Word16 payloadType);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DestructEncoder() for the description of function,
+    // input(s)/output(s) and return value.
+    //
+    virtual void DestructEncoderSafe() = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See DestructDecoder() for the description of function,
+    // input(s)/output(s) and return value.
+    //
+    virtual void DestructDecoderSafe() = 0;
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetBitRate() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    // Any codec that can change the bit-rate has to implement this.
+    //
+    virtual WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 bitRateBPS);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See GetEstimatedBandwidth() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 GetEstimatedBandwidthSafe();
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetEstimatedBandwidth() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 SetEstimatedBandwidthSafe(WebRtc_Word32 estimatedBandwidth);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See GetRedPayload() for the description of function, input(s)/output(s)
+    // and return value.
+    //
+    virtual WebRtc_Word32 GetRedPayloadSafe(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See SetVAD() for the description of function, input(s)/output(s) and
+    // return value.
+    //
+    WebRtc_Word16 SetVADSafe(
+        const bool       enableDTX = true,
+        const bool       enableVAD = false,
+        const ACMVADMode mode      = VADNormal);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See ReplaceInternalDTX() for the description of function, input and
+    // return value.
+    //
+    virtual WebRtc_Word32 ReplaceInternalDTXSafe(
+        const bool replaceInternalDTX);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // See IsInternalDTXReplaced() for the description of function, input and
+    // return value.
+    //
+    virtual WebRtc_Word32 IsInternalDTXReplacedSafe(
+        bool* internalDTXReplaced);
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 CreateEncoder()
+    // Creates the encoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 CreateEncoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 CreateDecoder()
+    // Creates the decoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 CreateDecoder();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EnableVAD();
+    // Enables VAD with the given mode. The VAD instance will be created if
+    // it does not exists.
+    //
+    // Input:
+    //   -mode               : VAD mode c.f. audio_coding_module_typedefs.h for
+    //                         the options.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 EnableVAD(ACMVADMode mode);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DisableVAD()
+    // Disables VAD.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 DisableVAD();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 EnableDTX()
+    // Enables DTX. This method should be overwritten for codecs which have
+    // internal DTX.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 EnableDTX();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 DisableDTX()
+    // Disables usage of DTX. This method should be overwritten for codecs which
+    // have internal DTX.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 DisableDTX();
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalEncode()
+    // This is a codec-specific function called in EncodeSafe() to actually
+    // encode a frame of audio.
+    //
+    // Outputs:
+    //   -bitStream          : pointer to a buffer where the bit-stream is
+    //                         written to.
+    //   -bitStreamLenByte   : the length of the bit-stream in bytes,
+    //                         a negative value indicates error.
+    //
+    // Return value:
+    //   -1 if failed,
+    //   otherwise the length of the bit-stream is returned.
+    //
+    virtual WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalInitEncoder()
+    // This is a codec-specific function called in InitEncoderSafe(), it has to
+    // do all codec-specific operation to initialize the encoder given the
+    // encoder parameters.
+    //
+    // Input:
+    //   -codecParams        : pointer to a structure that contains parameters to
+    //                         initialize encoder.
+    //                         Set codecParam->CodecInst.rate to -1 for
+    //                         iSAC to operate in adaptive mode.
+    //                         (to do: if frame-length is -1 frame-length will be
+    //                         automatically adjusted, otherwise, given
+    //                         frame-length is forced)
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalInitDecoder()
+    // This is a codec-specific function called in InitDecoderSafe(), it has to
+    // do all codec-specific operation to initialize the decoder given the
+    // decoder parameters.
+    //
+    // Input:
+    //   -codecParams        : pointer to a structure that contains parameters to
+    //                         initialize encoder.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void IncreaseNoMissedSamples()
+    // This method is called to increase the number of samples that are
+    // overwritten in the audio buffer.
+    //
+    // Input:
+    //   -noSamples          : the number of overwritten samples is incremented
+    //                         by this value.
+    //
+    void IncreaseNoMissedSamples(
+        const WebRtc_Word16 noSamples);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalCreateEncoder()
+    // This is a codec-specific method called in CreateEncoderSafe() it is
+    // supposed to perform all codec-specific operations to create encoder
+    // instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalCreateEncoder() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalCreateDecoder()
+    // This is a codec-specific method called in CreateDecoderSafe() it is
+    // supposed to perform all codec-specific operations to create decoder
+    // instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalCreateDecoder() = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // void InternalDestructEncoderInst()
+    // This is a codec-specific method, used in conferencing, called from
+    // DestructEncoderInst(). The input argument is pointer to encoder instance
+    // (codec instance for codecs that encoder and decoder share the same
+    // instance). This method is called to free the memory that "ptrInst" is
+    // pointing to.
+    //
+    // Input:
+    //   -ptrInst            : pointer to encoder instance.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual void InternalDestructEncoderInst(
+        void* ptrInst) = 0;
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 InternalResetEncoder()
+    // This method is called to reset the states of encoder. However, the
+    // current parameters, e.g. frame-length, should remain as they are. For
+    // most of the codecs a re-initialization of the encoder is what needs to
+    // be down. But for iSAC we like to keep the BWE history so we cannot
+    // re-initialize. As soon as such an API is implemented in iSAC this method
+    // has to be overwritten in ACMISAC class.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    virtual WebRtc_Word16 InternalResetEncoder();
+
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // WebRtc_Word16 ProcessFrameVADDTX()
+    // This function is called when a full frame of audio is available. It will
+    // break the audio frame into blocks such that each block could be processed
+    // by VAD & CN/DTX. If a frame is divided into two blocks then there are two
+    // cases. First, the first block is active, the second block will not be
+    // processed by CN/DTX but only by VAD and return to caller with
+    // '*samplesProcessed' set to zero. There, the audio frame will be encoded
+    // by the encoder. Second, the first block is inactive and is processed by
+    // CN/DTX, then we stop processing the next block and return to the caller
+    // which is EncodeSafe(), with "*samplesProcessed" equal to the number of
+    // samples in first block.
+    //
+    // Output:
+    //   -bitStream          : pointer to a buffer where DTX frame, if
+    //                         generated, will be written to.
+    //   -bitStreamLenByte   : contains the length of bit-stream in bytes, if
+    //                         generated. Zero if no bit-stream is generated.
+    //   -noSamplesProcessed : contains no of samples that actually CN has
+    //                         processed. Those samples processed by CN will not
+    //                         be encoded by the encoder, obviously. If
+    //                         contains zero, it means that the frame has been
+    //                         identified as active by VAD. Note that
+    //                         "*noSamplesProcessed" might be non-zero but
+    //                         "*bitStreamLenByte" be zero.
+    //
+    // Return value:
+    //   -1 if failed,
+    //    0 if succeeded.
+    //
+    WebRtc_Word16 ProcessFrameVADDTX(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte,
+        WebRtc_Word16* samplesProcessed);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // CanChangeEncodingParam()
+    // Check if the codec parameters can be changed. In conferencing normally
+    // codec parametrs cannot be changed. The exception is bit-rate of isac.
+    //
+    // return value:
+    //   -true  if codec parameters are allowed to change.
+    //   -flase otherwise.
+    //
+    virtual bool CanChangeEncodingParam(CodecInst& codecInst);
+
+
+    ///////////////////////////////////////////////////////////////////////////
+    // CurrentRate()
+    // Call to get the current encoding rate of the encoder. This function
+    // should be overwritten for codecs whic automatically change their
+    // target rate. One example is iSAC. The output of the function is the
+    // current target rate.
+    //
+    // Output:
+    //   -rateBitPerSec      : the current target rate of the codec.
+    //
+    virtual void CurrentRate(
+        WebRtc_Word32& /* rateBitPerSec */)
+    {
+        return;
+    }
+
+    virtual void SaveDecoderParamSafe(
+        const WebRtcACMCodecParams* codecParams);
+
+    // &_inAudio[_inAudioIxWrite] always point to where new audio can be
+    // written to
+    WebRtc_Word16         _inAudioIxWrite;
+
+    // &_inAudio[_inAudioIxRead] points to where audio has to be read from
+    WebRtc_Word16         _inAudioIxRead;
+
+    WebRtc_Word16         _inTimestampIxWrite;
+
+    // Where the audio is stored before encoding,
+    // To save memory the following buffer can be allocated
+    // dynamically for 80ms depending on the sampling frequency
+    // of the codec.
+    WebRtc_Word16*        _inAudio;
+    WebRtc_UWord32*       _inTimestamp;
+
+    WebRtc_Word16         _frameLenSmpl;
+    WebRtc_UWord16        _noChannels;
+
+    // This will point to a static database of the supported codecs
+    WebRtc_Word16         _codecID;
+
+    // This will account for the No of samples  were not encoded
+    // the case is rare, either samples are missed due to overwite
+    // at input buffer or due to encoding error
+    WebRtc_UWord32        _noMissedSamples;
+
+    // True if the encoder instance created
+    bool                  _encoderExist;
+    bool                  _decoderExist;
+    // True if the ecncoder instance initialized
+    bool                  _encoderInitialized;
+    bool                  _decoderInitialized;
+
+    bool                  _registeredInNetEq;
+
+    // VAD/DTX
+    bool                  _hasInternalDTX;
+    WebRtcVadInst*        _ptrVADInst;
+    bool                  _vadEnabled;
+    ACMVADMode            _vadMode;
+    WebRtc_Word16         _vadLabel[MAX_FRAME_SIZE_10MSEC];
+    bool                  _dtxEnabled;
+    WebRtcCngEncInst*     _ptrDTXInst;
+    WebRtc_UWord8         _numLPCParams;
+    bool                  _sentCNPrevious;
+    bool                  _isMaster;
+
+    WebRtcACMCodecParams  _encoderParams;
+    WebRtcACMCodecParams  _decoderParams;
+
+    // Used as a global lock for all avaiable decoders
+    // so that no decoder is used when NetEQ decodes.
+    RWLockWrapper*        _netEqDecodeLock;
+    // Used to lock wrapper internal data
+    // such as buffers and state variables.
+    RWLockWrapper&        _codecWrapperLock;
+
+    WebRtc_UWord32        _lastEncodedTimestamp;
+    WebRtc_UWord32        _lastTimestamp;
+    bool                  _isAudioBuffFresh;
+    WebRtc_UWord32        _uniqueID;
+};
+
+} // namespace webrt
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GENERIC_CODEC_H_
diff --git a/src/modules/audio_coding/main/source/acm_gsmfr.cc b/src/modules/audio_coding/main/source/acm_gsmfr.cc
new file mode 100644
index 0000000..2343241
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_gsmfr.cc
@@ -0,0 +1,386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_gsmfr.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_GSMFR
+    // NOTE! GSM-FR is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/gsmfr/main/interface/gsmfr_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcGSMFR_CreateEnc(GSMFR_encinst_t_** inst);
+    // int16_t WebRtcGSMFR_CreateDec(GSMFR_decinst_t_** inst);
+    // int16_t WebRtcGSMFR_FreeEnc(GSMFR_encinst_t_* inst);
+    // int16_t WebRtcGSMFR_FreeDec(GSMFR_decinst_t_* inst);
+    // int16_t WebRtcGSMFR_Encode(GSMFR_encinst_t_* encInst, int16_t* input,
+    //                            int16_t len, int16_t* output);
+    // int16_t WebRtcGSMFR_EncoderInit(GSMFR_encinst_t_* encInst, int16_t mode);
+    // int16_t WebRtcGSMFR_Decode(GSMFR_decinst_t_* decInst);
+    // int16_t WebRtcGSMFR_DecodeBwe(GSMFR_decinst_t_* decInst, int16_t* input);
+    // int16_t WebRtcGSMFR_DecodePlc(GSMFR_decinst_t_* decInst);
+    // int16_t WebRtcGSMFR_DecoderInit(GSMFR_decinst_t_* decInst);
+    #include "gsmfr_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_GSMFR
+
+ACMGSMFR::ACMGSMFR(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMGSMFR::~ACMGSMFR()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::EnableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DisableDTX()
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMGSMFR::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMGSMFR::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMGSMFR::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMGSMFR::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMGSMFR::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+#else     //===================== Actual Implementation =======================
+
+ACMGSMFR::ACMGSMFR(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    _hasInternalDTX = true;
+    return;
+}
+
+
+ACMGSMFR::~ACMGSMFR()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcGSMFR_Encode(_encoderInstPtr,
+        &_inAudio[_inAudioIxRead], _frameLenSmpl, (WebRtc_Word16*)bitStream);
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        if(WebRtcGSMFR_EncoderInit(_encoderInstPtr, 1) < 0)
+        {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "EnableDTX: cannot init encoder for GSMFR");
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMGSMFR::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)
+    {
+        if(WebRtcGSMFR_EncoderInit(_encoderInstPtr, 0) < 0)
+        {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "DisableDTX: cannot init encoder for GSMFR");
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    if (WebRtcGSMFR_EncoderInit(_encoderInstPtr, ((codecParams->enableDTX)? 1:0)) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "InternalInitEncoder: cannot init encoder for GSMFR");
+  }
+  return 0;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcGSMFR_DecoderInit(_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalInitDecoder: cannot init decoder for GSMFR");
+    return -1;
+  }
+  return 0;
+}
+
+
+WebRtc_Word32
+ACMGSMFR::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+      "CodecDef: decoder is not initialized for GSMFR");
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_GSMFR_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderGSMFR, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_GSMFR_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMGSMFR::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateEncoder()
+{
+    if (WebRtcGSMFR_CreateEnc(&_encoderInstPtr) < 0)
+  {
+     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalCreateEncoder: cannot create instance for GSMFR encoder");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMGSMFR::DestructEncoderSafe()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    _encoderExist = false;
+    _encoderInitialized = false;
+}
+
+
+WebRtc_Word16
+ACMGSMFR::InternalCreateDecoder()
+{
+    if (WebRtcGSMFR_CreateDec(&_decoderInstPtr) < 0)
+  {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+       "InternalCreateDecoder: cannot create instance for GSMFR decoder");
+    return -1;
+  }
+  return 0;
+}
+
+
+void
+ACMGSMFR::DestructDecoderSafe()
+{
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcGSMFR_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    _decoderExist = false;
+    _decoderInitialized = false;
+}
+
+
+void
+ACMGSMFR::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcGSMFR_FreeEnc((GSMFR_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_gsmfr.h b/src/modules/audio_coding/main/source/acm_gsmfr.h
new file mode 100644
index 0000000..8991de8
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_gsmfr.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct GSMFR_encinst_t_;
+struct GSMFR_decinst_t_;
+
+namespace webrtc {
+
+class ACMGSMFR : public ACMGenericCodec
+{
+public:
+    ACMGSMFR(WebRtc_Word16 codecID);
+    ~ACMGSMFR();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+    GSMFR_encinst_t_* _encoderInstPtr;
+    GSMFR_decinst_t_* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_GSMFR_H_
diff --git a/src/modules/audio_coding/main/source/acm_ilbc.cc b/src/modules/audio_coding/main/source/acm_ilbc.cc
new file mode 100644
index 0000000..0721619
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_ilbc.cc
@@ -0,0 +1,362 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_ilbc.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_ILBC
+    #include "ilbc.h"
+#endif
+
+namespace webrtc
+{
+
+#ifndef WEBRTC_CODEC_ILBC
+
+ACMILBC::ACMILBC(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL) {
+  return;
+}
+
+
+ACMILBC::~ACMILBC()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMILBC::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMILBC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMILBC::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMILBC::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMILBC::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMILBC::SetBitRateSafe(const WebRtc_Word32 /* rate */)
+{
+    return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+
+ACMILBC::ACMILBC(
+    WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+    return;
+}
+
+
+ACMILBC::~ACMILBC()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_DecoderFree(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    *bitStreamLenByte = WebRtcIlbcfix_Encode(_encoderInstPtr,
+        &_inAudio[_inAudioIxRead], _frameLenSmpl, (WebRtc_Word16*)bitStream);
+    if (*bitStreamLenByte < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: error in encode for ILBC");
+        return -1;
+    }
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMILBC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // initialize with a correct processing block length
+    if((160 == (codecParams->codecInstant).pacsize) ||
+        (320 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 20ms
+        return WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 20);
+    }
+    else if((240 == (codecParams->codecInstant).pacsize) ||
+        (480 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 30ms
+        return WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 30);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalInitEncoder: invalid processing block");
+        return -1;
+    }
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalInitDecoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // initialize with a correct processing block length
+    if((160 == (codecParams->codecInstant).pacsize) ||
+        (320 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 20ms
+        return WebRtcIlbcfix_DecoderInit(_decoderInstPtr, 20);
+    }
+    else if((240 == (codecParams->codecInstant).pacsize) ||
+        (480 == (codecParams->codecInstant).pacsize))
+    {
+        // processing block of 30ms
+        return WebRtcIlbcfix_DecoderInit(_decoderInstPtr, 30);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalInitDecoder: invalid processing block");
+        return -1;
+    }
+}
+
+
+WebRtc_Word32
+ACMILBC::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CodeDef: decoder not initialized for ILBC");
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_ILBC_FUNCTION."
+    // Then return the structure back to NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderILBC, codecInst.pltype,
+        _decoderInstPtr, 8000);
+    SET_ILBC_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMILBC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateEncoder()
+{
+    if (WebRtcIlbcfix_EncoderCreate(&_encoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateEncoder: cannot create instance for ILBC encoder");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMILBC::DestructEncoderSafe()
+{
+    _encoderInitialized = false;
+    _encoderExist = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMILBC::InternalCreateDecoder()
+{
+    if (WebRtcIlbcfix_DecoderCreate(&_decoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateDecoder: cannot create instance for ILBC decoder");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMILBC::DestructDecoderSafe()
+{
+    _decoderInitialized = false;
+    _decoderExist = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcIlbcfix_DecoderFree(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMILBC::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcIlbcfix_EncoderFree((iLBC_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMILBC::SetBitRateSafe(const WebRtc_Word32 rate)
+{
+    // Check that rate is valid. No need to store the value
+    if (rate == 13300)
+    {
+        WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 30);
+    }
+    else if (rate == 15200)
+    {
+        WebRtcIlbcfix_EncoderInit(_encoderInstPtr, 20);
+    }
+    else
+    {
+        return -1;
+    }
+    _encoderParams.codecInstant.rate = rate;
+
+    return 0;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_ilbc.h b/src/modules/audio_coding/main/source/acm_ilbc.h
new file mode 100644
index 0000000..02eb6f1
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_ilbc.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct iLBC_encinst_t_;
+struct iLBC_decinst_t_;
+
+namespace webrtc
+{
+
+class ACMILBC : public ACMGenericCodec
+{
+public:
+    ACMILBC(WebRtc_Word16 codecID);
+    ~ACMILBC();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    iLBC_encinst_t_* _encoderInstPtr;
+    iLBC_decinst_t_* _decoderInstPtr;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ILBC_H_
diff --git a/src/modules/audio_coding/main/source/acm_isac.cc b/src/modules/audio_coding/main/source/acm_isac.cc
new file mode 100644
index 0000000..b5ec6d2
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_isac.cc
@@ -0,0 +1,1202 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_isac.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+
+#ifdef WEBRTC_CODEC_ISAC
+    #include "acm_isac_macros.h"
+    #include "isac.h"
+#endif
+
+#ifdef WEBRTC_CODEC_ISACFX
+    #include "acm_isac_macros.h"
+    #include "isacfix.h"
+#endif
+
+namespace webrtc
+{
+
+// we need this otherwise we cannot use forward declaration
+// in the header file
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+struct ACMISACInst
+{
+    ACM_ISAC_STRUCT *inst;
+};
+#endif
+
+#define ISAC_MIN_RATE 10000
+#define ISAC_MAX_RATE 56000
+
+
+// How the scaling is computed. iSAC computes a gain based on the
+// bottleneck. It follows the following expression for that
+//
+// G(BN_kbps) = pow(10, (a + b * BN_kbps + c * BN_kbps * BN_kbps) / 20.0)
+//              / 3.4641;
+//
+// Where for 30 ms framelength we have,
+//
+// a = -23; b = 0.48; c = 0;
+//
+// As the default encoder is operating at 32kbps we have the scale as
+//
+// S(BN_kbps) = G(BN_kbps) / G(32);
+
+#define ISAC_NUM_SUPPORTED_RATES 9
+const WebRtc_UWord16 isacSuportedRates[ISAC_NUM_SUPPORTED_RATES] = {
+    32000,    30000,    26000,   23000,   21000,
+    19000,    17000,   15000,    12000};
+
+const float isacScale[ISAC_NUM_SUPPORTED_RATES] = {
+     1.0f,    0.8954f,  0.7178f, 0.6081f, 0.5445f,
+     0.4875f, 0.4365f,  0.3908f, 0.3311f};
+
+// Tables for bandwidth estimates
+#define NR_ISAC_BANDWIDTHS 24
+const WebRtc_Word32 isacRatesWB[NR_ISAC_BANDWIDTHS] =
+{
+    10000, 11100, 12300, 13700, 15200, 16900,
+    18800, 20900, 23300, 25900, 28700, 31900,
+    10100, 11200, 12400, 13800, 15300, 17000,
+    18900, 21000, 23400, 26000, 28800, 32000};
+
+
+const WebRtc_Word32 isacRatesSWB[NR_ISAC_BANDWIDTHS] =
+{
+    10000, 11000, 12400, 13800, 15300, 17000,
+    18900, 21000, 23200, 25400, 27600, 29800,
+    32000, 34100, 36300, 38500, 40700, 42900,
+    45100, 47300, 49500, 51700, 53900, 56000,
+};
+
+#if (!defined(WEBRTC_CODEC_ISAC) && !defined(WEBRTC_CODEC_ISACFX))
+
+ACMISAC::ACMISAC(WebRtc_Word16 /* codecID */)
+    : _codecInstPtr(NULL),
+      _isEncInitialized(false),
+      _isacCodingMode(CHANNEL_INDEPENDENT),
+      _enforceFrameSize(false),
+      _isacCurrentBN(32000),
+      _samplesIn10MsAudio(160) {  // Initiates to 16 kHz mode.
+  // Initiate decoder parameters for the 32 kHz mode.
+  memset(&_decoderParams32kHz, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams32kHz.codecInstant.pltype = -1;
+
+  return;
+}
+
+
+ACMISAC::~ACMISAC()
+{
+    return;
+}
+
+
+ACMGenericCodec*
+ACMISAC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalEncode(
+    WebRtc_UWord8* /* bitstream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMISAC::DestructDecoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMISAC::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word32
+ACMISAC::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+void
+ACMISAC::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::DeliverCachedIsacData(
+    WebRtc_UWord8*         /* bitStream        */,
+    WebRtc_Word16*         /* bitStreamLenByte */,
+    WebRtc_UWord32*        /* timestamp        */,
+    WebRtcACMEncodingType* /* encodingType     */,
+    const WebRtc_UWord16   /* isacRate         */,
+    const WebRtc_UWord8    /* isacBWestimate   */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::Transcode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */,
+    WebRtc_Word16  /* qBWE             */,
+    WebRtc_Word32  /* scale            */,
+    bool           /* isRED            */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMISAC::SetBitRateSafe(
+    WebRtc_Word32 /* bitRate */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::GetEstimatedBandwidthSafe()
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 /* estimatedBandwidth */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::GetRedPayloadSafe(
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMISAC::UpdateDecoderSampFreq(
+    WebRtc_Word16 /* codecId */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateEncoderSampFreq(
+    WebRtc_UWord16 /* encoderSampFreqHz */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMISAC::EncoderSampFreq(
+        WebRtc_UWord16& /* sampFreqHz */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  /* initFrameSizeMsec */,
+    const WebRtc_UWord16 /* initRateBitPerSec */,
+    const bool           /* enforceFrameSize  */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 /* maxPayloadLenBytes */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxRate(
+        const WebRtc_UWord32 /* maxRateBitPerSec */)
+{
+    return -1;
+}
+
+
+void
+ACMISAC::UpdateFrameLen()
+{
+    return;
+}
+
+void
+ACMISAC::CurrentRate(
+    WebRtc_Word32& /*rateBitPerSec */)
+{
+    return;
+}
+
+bool
+ACMISAC::DecoderParamsSafe(
+    WebRtcACMCodecParams* /* decParams   */,
+    const WebRtc_UWord8   /* payloadType */)
+{
+    return false;
+}
+
+void
+ACMISAC::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* /* codecParams */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::REDPayloadISAC(
+        const WebRtc_Word32 /* isacRate        */,
+        const WebRtc_Word16 /* isacBwEstimate  */,
+        WebRtc_UWord8*      /* payload         */,
+        WebRtc_Word16*      /* payloadLenBytes */)
+{
+    return -1;
+}
+
+
+#else     //===================== Actual Implementation =======================
+
+
+
+#ifdef WEBRTC_CODEC_ISACFX
+
+enum IsacSamplingRate
+{
+    kIsacWideband = 16,
+    kIsacSuperWideband = 32
+};
+
+static float
+ACMISACFixTranscodingScale(
+    WebRtc_UWord16 rate)
+{
+    // find the scale for transcoding, the scale is rounded
+    // downward
+    float  scale = -1;
+    for(WebRtc_Word16 n=0; n < ISAC_NUM_SUPPORTED_RATES; n++)
+    {
+        if(rate >= isacSuportedRates[n])
+        {
+            scale = isacScale[n];
+            break;
+        }
+    }
+    return scale;
+}
+
+static void
+ACMISACFixGetSendBitrate(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32*   bottleNeck)
+{
+    *bottleNeck = WebRtcIsacfix_GetUplinkBw(inst);
+}
+
+static WebRtc_Word16
+ACMISACFixGetNewBitstream(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word16    BWEIndex,
+    WebRtc_Word16    /* jitterIndex */,
+    WebRtc_Word32    rate,
+    WebRtc_Word16*   bitStream,
+    bool             isRED)
+{
+    if (isRED)
+    {
+        // RED not supported with iSACFIX
+        return -1;
+    }
+    float scale = ACMISACFixTranscodingScale((WebRtc_UWord16)rate);
+    return WebRtcIsacfix_GetNewBitStream(inst, BWEIndex, scale, bitStream);
+}
+
+
+static WebRtc_Word16
+ACMISACFixGetSendBWE(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word16*   rateIndex,
+    WebRtc_Word16*   /* dummy */)
+{
+    WebRtc_Word16 localRateIndex;
+    WebRtc_Word16 status = WebRtcIsacfix_GetDownLinkBwIndex(inst, &localRateIndex);
+    if(status < 0)
+    {
+        return -1;
+    }
+    else
+    {
+        *rateIndex = localRateIndex;
+        return 0;
+    }
+}
+
+static WebRtc_Word16
+ACMISACFixControlBWE(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32    rateBPS,
+    WebRtc_Word16    frameSizeMs,
+    WebRtc_Word16    enforceFrameSize)
+{
+    return WebRtcIsacfix_ControlBwe(inst, (WebRtc_Word16)rateBPS,
+        frameSizeMs, enforceFrameSize);
+}
+
+static WebRtc_Word16
+ACMISACFixControl(
+    ACM_ISAC_STRUCT* inst,
+    WebRtc_Word32    rateBPS,
+    WebRtc_Word16    frameSizeMs)
+{
+    return WebRtcIsacfix_Control(inst, (WebRtc_Word16)rateBPS,
+        frameSizeMs);
+}
+
+static IsacSamplingRate
+ACMISACFixGetEncSampRate(
+    ACM_ISAC_STRUCT* /* inst */)
+{
+    return kIsacWideband;
+}
+
+
+static IsacSamplingRate
+ACMISACFixGetDecSampRate(
+    ACM_ISAC_STRUCT* /* inst */)
+{
+    return kIsacWideband;
+}
+
+#endif
+
+
+
+
+
+
+ACMISAC::ACMISAC(WebRtc_Word16 codecID)
+    : _isEncInitialized(false),
+      _isacCodingMode(CHANNEL_INDEPENDENT),
+      _enforceFrameSize(false),
+      _isacCurrentBN(32000),
+      _samplesIn10MsAudio(160) {  // Initiates to 16 kHz mode.
+  _codecID = codecID;
+
+  // Create codec instance.
+  _codecInstPtr = new ACMISACInst;
+  if (_codecInstPtr == NULL) {
+    return;
+  }
+  _codecInstPtr->inst = NULL;
+
+  // Initiate decoder parameters for the 32 kHz mode.
+  memset(&_decoderParams32kHz, 0, sizeof(WebRtcACMCodecParams));
+  _decoderParams32kHz.codecInstant.pltype = -1;
+
+  // TODO(tlegrand): Check if the following is really needed, now that
+  // ACMGenericCodec has been updated to initialize this value.
+  // Initialize values that can be used uninitialized otherwise
+  _decoderParams.codecInstant.pltype = -1;
+}
+
+
+ACMISAC::~ACMISAC()
+{
+    if (_codecInstPtr != NULL)
+    {
+        if(_codecInstPtr->inst != NULL)
+        {
+            ACM_ISAC_FREE(_codecInstPtr->inst);
+            _codecInstPtr->inst = NULL;
+        }
+        delete _codecInstPtr;
+        _codecInstPtr = NULL;
+    }
+    return;
+}
+
+
+ACMGenericCodec*
+ACMISAC::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalEncode(
+    WebRtc_UWord8* bitstream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    // ISAC takes 10ms audio everytime we call encoder, therefor,
+    // it should be treated like codecs with 'basic coding block'
+    // non-zero, and the following 'while-loop' should not be necessary.
+    // However, due to a mistake in the codec the frame-size might change
+    // at the first 10ms pushed in to iSAC if the bit-rate is low, this is
+    // sort of a bug in iSAC. to address this we treat iSAC as the
+    // following.
+
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    *bitStreamLenByte = 0;
+    while((*bitStreamLenByte == 0) && (_inAudioIxRead < _frameLenSmpl))
+    {
+        if(_inAudioIxRead > _inAudioIxWrite)
+        {
+            // something is wrong.
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "The actual fram-size of iSAC appears to be larger that expected. All audio \
+pushed in but no bit-stream is generated.");
+            return -1;
+        }
+        *bitStreamLenByte = ACM_ISAC_ENCODE(_codecInstPtr->inst,
+            &_inAudio[_inAudioIxRead], (WebRtc_Word16*)bitstream);
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += _samplesIn10MsAudio;
+    }
+    if(*bitStreamLenByte == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _uniqueID,
+            "ISAC Has encoded the whole frame but no bit-stream is generated.");
+    }
+
+    // a packet is generated iSAC, is set in adaptive mode may change
+    // the frame length and we like to update the bottleneck value as
+    // well, although updating bottleneck is not crucial
+    if((*bitStreamLenByte > 0) && (_isacCodingMode == ADAPTIVE))
+    {
+        //_frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+        ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &_isacCurrentBN);
+    }
+    UpdateFrameLen();
+    return *bitStreamLenByte;
+}
+
+
+WebRtc_Word16
+ACMISAC::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // if rate is set to -1 then iSAC has to be in adaptive mode
+    if(codecParams->codecInstant.rate == -1)
+    {
+        _isacCodingMode = ADAPTIVE;
+    }
+
+    // sanity check that rate is in acceptable range
+    else if((codecParams->codecInstant.rate >= ISAC_MIN_RATE) &&
+        (codecParams->codecInstant.rate <= ISAC_MAX_RATE))
+    {
+        _isacCodingMode = CHANNEL_INDEPENDENT;
+        _isacCurrentBN = codecParams->codecInstant.rate;
+    }
+    else
+    {
+        return -1;
+    }
+
+    // we need to set the encoder sampling frequency.
+    if(UpdateEncoderSampFreq((WebRtc_UWord16)codecParams->codecInstant.plfreq) < 0)
+    {
+        return -1;
+    }
+    if(ACM_ISAC_ENCODERINIT(_codecInstPtr->inst, _isacCodingMode) < 0)
+    {
+        return -1;
+    }
+
+    // apply the frame-size and rate if operating in
+    // channel-independent mode
+    if(_isacCodingMode == CHANNEL_INDEPENDENT)
+    {
+        if(ACM_ISAC_CONTROL(_codecInstPtr->inst,
+            codecParams->codecInstant.rate,
+            codecParams->codecInstant.pacsize /
+            (codecParams->codecInstant.plfreq / 1000)) < 0)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        // We need this for adaptive case and has to be called
+        // after initialization
+        ACM_ISAC_GETSENDBITRATE(
+            _codecInstPtr->inst, &_isacCurrentBN);
+    }
+    _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+    return 0;
+}
+
+WebRtc_Word16
+ACMISAC::InternalInitDecoder(
+    WebRtcACMCodecParams*  codecParams)
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+
+    // set decoder sampling frequency.
+    if(codecParams->codecInstant.plfreq == 32000)
+    {
+        UpdateDecoderSampFreq(ACMCodecDB::kISACSWB);
+    }
+    else
+    {
+        UpdateDecoderSampFreq(ACMCodecDB::kISAC);
+    }
+
+    // in a one-way communication we may never register send-codec.
+    // However we like that the BWE to work properly so it has to
+    // be initialized. The BWE is initialized when iSAC encoder is initialized.
+    // Therefore, we need this.
+    if(!_encoderInitialized)
+    {
+        // Since we don't require a valid rate or a valid packet size when initializing
+        // the decoder, we set valid values before initializing encoder
+        codecParams->codecInstant.rate = kIsacWbDefaultRate;
+        codecParams->codecInstant.pacsize = kIsacPacSize960;
+        if(InternalInitEncoder(codecParams) < 0)
+        {
+            return -1;
+        }
+        _encoderInitialized = true;
+    }
+
+    return ACM_ISAC_DECODERINIT(_codecInstPtr->inst);
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateDecoder()
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_Word16 status = ACM_ISAC_CREATE (&(_codecInstPtr->inst));
+
+    // specific to codecs with one instance for encoding and decoding
+    _encoderInitialized = false;
+    if(status < 0)
+    {
+        _encoderExist = false;
+    }
+    else
+    {
+        _encoderExist = true;
+    }
+    return status;
+}
+
+
+void
+ACMISAC::DestructDecoderSafe()
+{
+    // codec with shared instance cannot delete.
+    _decoderInitialized = false;
+    return;
+}
+
+
+WebRtc_Word16
+ACMISAC::InternalCreateEncoder()
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_Word16 status = ACM_ISAC_CREATE(&(_codecInstPtr->inst));
+
+    // specific to codecs with one instance for encoding and decoding
+    _decoderInitialized = false;
+    if(status < 0)
+    {
+        _decoderExist = false;
+    }
+    else
+    {
+        _decoderExist = true;
+    }
+    return status;
+}
+
+
+void
+ACMISAC::DestructEncoderSafe()
+{
+    // codec with shared instance cannot delete.
+    _encoderInitialized = false;
+    return;
+}
+
+
+WebRtc_Word32
+ACMISAC::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    // Sanity checks
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    if (!_decoderInitialized || !_decoderExist)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_ISAC_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    if(codecInst.plfreq == 16000)
+    {
+        SET_CODEC_PAR((codecDef), kDecoderISAC, codecInst.pltype,
+            _codecInstPtr->inst, 16000);
+#ifdef WEBRTC_CODEC_ISAC
+        SET_ISAC_FUNCTIONS((codecDef));
+#else
+        SET_ISACfix_FUNCTIONS((codecDef));
+#endif
+    }
+    else
+    {
+#ifdef WEBRTC_CODEC_ISAC
+        SET_CODEC_PAR((codecDef), kDecoderISACswb, codecInst.pltype,
+            _codecInstPtr->inst, 32000);
+        SET_ISACSWB_FUNCTIONS((codecDef));
+#else
+        return -1;
+#endif
+    }
+
+    return 0;
+}
+
+
+void
+ACMISAC::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        ACM_ISAC_FREE((ACM_ISAC_STRUCT *)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMISAC::Transcode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte,
+    WebRtc_Word16  qBWE,
+    WebRtc_Word32  rate,
+    bool           isRED)
+{
+    WebRtc_Word16 jitterInfo = 0;
+    // transcode from a higher rate to lower rate
+    // sanity check
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+
+    *bitStreamLenByte = ACM_ISAC_GETNEWBITSTREAM(_codecInstPtr->inst,
+        qBWE, jitterInfo, rate, (WebRtc_Word16*)bitStream, (isRED)? 1:0);
+
+    if(*bitStreamLenByte < 0)
+    {
+        // error happened
+        *bitStreamLenByte = 0;
+        return -1;
+    }
+    else
+    {
+        return *bitStreamLenByte;
+    }
+}
+
+WebRtc_Word16
+ACMISAC::SetBitRateSafe(
+    WebRtc_Word32 bitRate)
+{
+    if (_codecInstPtr == NULL)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 encoderSampFreq;
+    EncoderSampFreq(encoderSampFreq);
+    bool reinit = false;
+    // change the BN of iSAC
+    if(bitRate == -1)
+    {
+        // ADAPTIVE MODE
+        // Check if it was already in adaptive mode
+        if(_isacCodingMode != ADAPTIVE)
+        {
+            // was not in adaptive, then set the mode to adaptive
+            // and flag for re-initialization
+            _isacCodingMode = ADAPTIVE;
+            reinit = true;
+        }
+    }
+    // Sanity check if the rate valid
+    else if((bitRate >= ISAC_MIN_RATE) &&
+        (bitRate <= ISAC_MAX_RATE))
+    {
+        //check if it was in channel-independent mode before
+        if(_isacCodingMode != CHANNEL_INDEPENDENT)
+        {
+            // was not in channel independent, set the mode to
+            // channel-independent and flag for re-initialization
+            _isacCodingMode = CHANNEL_INDEPENDENT;
+            reinit = true;
+        }
+        // store the bottleneck
+        _isacCurrentBN = (WebRtc_UWord16)bitRate;
+    }
+    else
+    {
+        // invlaid rate
+        return -1;
+    }
+
+    WebRtc_Word16 status = 0;
+    if(reinit)
+    {
+        // initialize and check if it is successful
+        if(ACM_ISAC_ENCODERINIT(_codecInstPtr->inst, _isacCodingMode) < 0)
+        {
+            // failed initialization
+            return -1;
+        }
+    }
+    if(_isacCodingMode == CHANNEL_INDEPENDENT)
+    {
+
+        status = ACM_ISAC_CONTROL(_codecInstPtr->inst, _isacCurrentBN,
+            (encoderSampFreq == 32000)? 30:(_frameLenSmpl / 16));
+        if(status < 0)
+        {
+            status = -1;
+        }
+    }
+
+    // Update encoder parameters
+    _encoderParams.codecInstant.rate = bitRate;
+
+    UpdateFrameLen();
+    return status;
+}
+
+
+WebRtc_Word32
+ACMISAC::GetEstimatedBandwidthSafe()
+{
+    WebRtc_Word16 bandwidthIndex = 0;
+    WebRtc_Word16 delayIndex = 0;
+    IsacSamplingRate sampRate;
+
+    // Get bandwidth information
+    ACM_ISAC_GETSENDBWE(_codecInstPtr->inst, &bandwidthIndex, &delayIndex);
+
+    // Validy check of index
+    if ((bandwidthIndex < 0) || (bandwidthIndex >= NR_ISAC_BANDWIDTHS))
+    {
+        return -1;
+    }
+
+    // Check sample frequency
+    sampRate = ACM_ISAC_GETDECSAMPRATE(_codecInstPtr->inst);
+    if(sampRate == kIsacWideband)
+    {
+        return isacRatesWB[bandwidthIndex];
+    }
+    else
+    {
+        return isacRatesSWB[bandwidthIndex];
+    }
+}
+
+WebRtc_Word32
+ACMISAC::SetEstimatedBandwidthSafe(
+    WebRtc_Word32 estimatedBandwidth)
+{
+    IsacSamplingRate sampRate;
+    WebRtc_Word16 bandwidthIndex;
+
+    // Check sample frequency and choose appropriate table
+    sampRate = ACM_ISAC_GETENCSAMPRATE(_codecInstPtr->inst);
+
+    if(sampRate == kIsacWideband)
+    {
+        // Search through the WB rate table to find the index
+
+        bandwidthIndex = NR_ISAC_BANDWIDTHS/2 - 1;
+        for (int i=0; i<(NR_ISAC_BANDWIDTHS/2); i++)
+        {
+            if (estimatedBandwidth == isacRatesWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+            } else if (estimatedBandwidth == isacRatesWB[i+NR_ISAC_BANDWIDTHS/2])
+            {
+                bandwidthIndex = i + NR_ISAC_BANDWIDTHS/2;
+                break;
+            } else if (estimatedBandwidth < isacRatesWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+             }
+        }
+    }
+    else
+    {
+        // Search through the SWB rate table to find the index
+        bandwidthIndex = NR_ISAC_BANDWIDTHS - 1;
+        for (int i=0; i<NR_ISAC_BANDWIDTHS; i++)
+        {
+            if(estimatedBandwidth <= isacRatesSWB[i])
+            {
+                bandwidthIndex = i;
+                break;
+            }
+        }
+    }
+
+    // Set iSAC Bandwidth Estimate
+    ACM_ISAC_SETBWE(_codecInstPtr->inst, bandwidthIndex);
+
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::GetRedPayloadSafe(
+#if (!defined(WEBRTC_CODEC_ISAC))
+    WebRtc_UWord8* /* redPayload   */,
+    WebRtc_Word16* /* payloadBytes */)
+{
+    return -1;
+#else
+    WebRtc_UWord8* redPayload,
+    WebRtc_Word16* payloadBytes)
+{
+
+    WebRtc_Word16 bytes = WebRtcIsac_GetRedPayload(_codecInstPtr->inst, (WebRtc_Word16*)redPayload);
+    if (bytes < 0)
+    {
+        return -1;
+    }
+    *payloadBytes = bytes;
+    return 0;
+#endif
+}
+
+WebRtc_Word16
+ACMISAC::UpdateDecoderSampFreq(
+#ifdef WEBRTC_CODEC_ISAC
+    WebRtc_Word16 codecId)
+{
+    if(ACMCodecDB::kISAC == codecId)
+    {
+        return WebRtcIsac_SetDecSampRate(_codecInstPtr->inst, kIsacWideband);
+    }
+    else if(ACMCodecDB::kISACSWB == codecId)
+    {
+        return WebRtcIsac_SetDecSampRate(_codecInstPtr->inst, kIsacSuperWideband);
+    }
+    else
+    {
+        return -1;
+    }
+
+#else
+    WebRtc_Word16 /* codecId */)
+{
+    return 0;
+#endif
+}
+
+
+WebRtc_Word16
+ACMISAC::UpdateEncoderSampFreq(
+#ifdef WEBRTC_CODEC_ISAC
+    WebRtc_UWord16 encoderSampFreqHz)
+{
+    WebRtc_UWord16 currentSampRateHz;
+    EncoderSampFreq(currentSampRateHz);
+
+    if(currentSampRateHz != encoderSampFreqHz)
+    {
+        if((encoderSampFreqHz != 16000) && (encoderSampFreqHz != 32000))
+        {
+            return -1;
+        }
+        else
+        {
+            _inAudioIxRead = 0;
+            _inAudioIxWrite = 0;
+            _inTimestampIxWrite = 0;
+            if(encoderSampFreqHz == 16000)
+            {
+                if(WebRtcIsac_SetEncSampRate(_codecInstPtr->inst, kIsacWideband) < 0)
+                {
+                    return -1;
+                }
+                _samplesIn10MsAudio = 160;
+            }
+            else
+            {
+
+                if(WebRtcIsac_SetEncSampRate(_codecInstPtr->inst, kIsacSuperWideband) < 0)
+                {
+                    return -1;
+                }
+                _samplesIn10MsAudio = 320;
+            }
+            _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+            _encoderParams.codecInstant.pacsize = _frameLenSmpl;
+            _encoderParams.codecInstant.plfreq = encoderSampFreqHz;
+            return 0;
+        }
+    }
+#else
+    WebRtc_UWord16 /* codecId */)
+{
+#endif
+    return 0;
+}
+
+WebRtc_Word16
+ACMISAC::EncoderSampFreq(
+    WebRtc_UWord16& sampFreqHz)
+{
+    IsacSamplingRate sampRate;
+    sampRate = ACM_ISAC_GETENCSAMPRATE(_codecInstPtr->inst);
+    if(sampRate == kIsacSuperWideband)
+    {
+        sampFreqHz = 32000;
+    }
+    else
+    {
+        sampFreqHz = 16000;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8  initFrameSizeMsec,
+    const WebRtc_UWord16 initRateBitPerSec,
+    const bool           enforceFrameSize)
+{
+    WebRtc_Word16 status;
+    {
+        WebRtc_UWord16 sampFreqHz;
+        EncoderSampFreq(sampFreqHz);
+        // @TODO: at 32kHz we hardcode calling with 30ms and enforce
+        // the frame-size otherwise we might get error. Revise if
+        // control-bwe is changed.
+        if(sampFreqHz == 32000)
+        {
+            status = ACM_ISAC_CONTROL_BWE(_codecInstPtr->inst,
+                initRateBitPerSec, 30, 1);
+        }
+        else
+        {
+            status = ACM_ISAC_CONTROL_BWE(_codecInstPtr->inst,
+                initRateBitPerSec, initFrameSizeMsec, enforceFrameSize? 1:0);
+        }
+    }
+    if(status < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Coutn't config iSAC BWE.");
+        return -1;
+    }
+    UpdateFrameLen();
+    ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &_isacCurrentBN);
+    return 0;
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 maxPayloadLenBytes)
+{
+    return ACM_ISAC_SETMAXPAYLOADSIZE(_codecInstPtr->inst, maxPayloadLenBytes);
+}
+
+WebRtc_Word32
+ACMISAC::SetISACMaxRate(
+    const WebRtc_UWord32 maxRateBitPerSec)
+{
+    return ACM_ISAC_SETMAXRATE(_codecInstPtr->inst, maxRateBitPerSec);
+}
+
+
+void
+ACMISAC::UpdateFrameLen()
+{
+    _frameLenSmpl = ACM_ISAC_GETNEWFRAMELEN(_codecInstPtr->inst);
+    _encoderParams.codecInstant.pacsize = _frameLenSmpl;
+}
+
+void
+ACMISAC::CurrentRate(WebRtc_Word32& rateBitPerSec)
+{
+    if(_isacCodingMode == ADAPTIVE)
+    {
+        ACM_ISAC_GETSENDBITRATE(_codecInstPtr->inst, &rateBitPerSec);
+    }
+}
+
+
+bool
+ACMISAC::DecoderParamsSafe(
+    WebRtcACMCodecParams* decParams,
+    const WebRtc_UWord8   payloadType)
+{
+    if(_decoderInitialized)
+    {
+        if(payloadType == _decoderParams.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams, sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+        if(payloadType == _decoderParams32kHz.codecInstant.pltype)
+        {
+            memcpy(decParams, &_decoderParams32kHz,
+                sizeof(WebRtcACMCodecParams));
+            return true;
+        }
+    }
+    return false;
+}
+
+void
+ACMISAC::SaveDecoderParamSafe(
+    const WebRtcACMCodecParams* codecParams)
+{
+    // set decoder sampling frequency.
+    if(codecParams->codecInstant.plfreq == 32000)
+    {
+        memcpy(&_decoderParams32kHz, codecParams, sizeof(WebRtcACMCodecParams));
+    }
+    else
+    {
+        memcpy(&_decoderParams, codecParams, sizeof(WebRtcACMCodecParams));
+    }
+}
+
+
+WebRtc_Word16
+ACMISAC::REDPayloadISAC(
+    const WebRtc_Word32  isacRate,
+    const WebRtc_Word16  isacBwEstimate,
+    WebRtc_UWord8*       payload,
+    WebRtc_Word16*       payloadLenBytes)
+{
+    WebRtc_Word16 status;
+    ReadLockScoped rl(_codecWrapperLock);
+    status = Transcode(payload, payloadLenBytes, isacBwEstimate, isacRate, true);
+    return status;
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_isac.h b/src/modules/audio_coding/main/source/acm_isac.h
new file mode 100644
index 0000000..ee56a6e
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_isac.h
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+struct ACMISACInst;
+
+enum iSACCodingMode {ADAPTIVE, CHANNEL_INDEPENDENT};
+
+
+class ACMISAC : public ACMGenericCodec
+{
+public:
+    ACMISAC(WebRtc_Word16 codecID);
+    ~ACMISAC();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 DeliverCachedIsacData(
+        WebRtc_UWord8*         bitStream,
+        WebRtc_Word16*         bitStreamLenByte,
+        WebRtc_UWord32*        timestamp,
+        WebRtcACMEncodingType* encodingType,
+        const WebRtc_UWord16   isacRate,
+        const WebRtc_UWord8    isacBWestimate);
+
+    WebRtc_Word16 DeliverCachedData(
+        WebRtc_UWord8*         /* bitStream        */,
+        WebRtc_Word16*         /* bitStreamLenByte */,
+        WebRtc_UWord32*        /* timestamp        */,
+        WebRtcACMEncodingType* /* encodingType     */)
+    {
+        return -1;
+    }
+
+    WebRtc_Word16 UpdateDecoderSampFreq(
+        WebRtc_Word16 codecId);
+
+    WebRtc_Word16 UpdateEncoderSampFreq(
+        WebRtc_UWord16 sampFreqHz);
+
+    WebRtc_Word16 EncoderSampFreq(
+        WebRtc_UWord16& sampFreqHz);
+
+    WebRtc_Word32 ConfigISACBandwidthEstimator(
+        const WebRtc_UWord8  initFrameSizeMsec,
+        const WebRtc_UWord16 initRateBitPerSec,
+        const bool           enforceFrameSize);
+
+    WebRtc_Word32 SetISACMaxPayloadSize(
+        const WebRtc_UWord16 maxPayloadLenBytes);
+
+    WebRtc_Word32 SetISACMaxRate(
+        const WebRtc_UWord32 maxRateBitPerSec);
+
+    WebRtc_Word16 REDPayloadISAC(
+        const WebRtc_Word32  isacRate,
+        const WebRtc_Word16  isacBwEstimate,
+        WebRtc_UWord8*       payload,
+        WebRtc_Word16*       payloadLenBytes);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 bitRate);
+
+    WebRtc_Word32 GetEstimatedBandwidthSafe();
+
+    WebRtc_Word32 SetEstimatedBandwidthSafe(WebRtc_Word32 estimatedBandwidth);
+
+    WebRtc_Word32 GetRedPayloadSafe(
+        WebRtc_UWord8* redPayload,
+        WebRtc_Word16* payloadBytes);
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 Transcode(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16* bitStreamLenByte,
+        WebRtc_Word16  qBWE,
+        WebRtc_Word32  rate,
+        bool           isRED);
+
+    void CurrentRate(WebRtc_Word32& rateBitPerSec);
+
+    void UpdateFrameLen();
+
+    bool DecoderParamsSafe(
+        WebRtcACMCodecParams *decParams,
+        const WebRtc_UWord8  payloadType);
+
+    void SaveDecoderParamSafe(
+        const WebRtcACMCodecParams* codecParams);
+
+    ACMISACInst* _codecInstPtr;
+
+    bool                  _isEncInitialized;
+    iSACCodingMode        _isacCodingMode;
+    bool                  _enforceFrameSize;
+    WebRtc_Word32         _isacCurrentBN;
+    WebRtc_UWord16        _samplesIn10MsAudio;
+    WebRtcACMCodecParams  _decoderParams32kHz;
+};
+
+} //namespace
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_H_
diff --git a/src/modules/audio_coding/main/source/acm_isac_macros.h b/src/modules/audio_coding/main/source/acm_isac_macros.h
new file mode 100644
index 0000000..4c3842a
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_isac_macros.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+
+#include "engine_configurations.h"
+
+namespace webrtc
+{
+
+#ifdef WEBRTC_CODEC_ISAC
+#   define ACM_ISAC_CREATE            WebRtcIsac_Create
+#   define ACM_ISAC_FREE              WebRtcIsac_Free
+#   define ACM_ISAC_ENCODERINIT       WebRtcIsac_EncoderInit
+#   define ACM_ISAC_ENCODE            WebRtcIsac_Encode
+#   define ACM_ISAC_DECODERINIT       WebRtcIsac_DecoderInit
+#   define ACM_ISAC_DECODE_BWE        WebRtcIsac_UpdateBwEstimate
+#   define ACM_ISAC_DECODE_B          WebRtcIsac_Decode
+#   define ACM_ISAC_DECODEPLC         WebRtcIsac_DecodePlc
+#   define ACM_ISAC_CONTROL           WebRtcIsac_Control
+#   define ACM_ISAC_CONTROL_BWE       WebRtcIsac_ControlBwe
+#   define ACM_ISAC_GETFRAMELEN       WebRtcIsac_ReadFrameLen
+#   define ACM_ISAC_GETERRORCODE      WebRtcIsac_GetErrorCode
+#   define ACM_ISAC_GETSENDBITRATE    WebRtcIsac_GetUplinkBw
+#   define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsac_SetMaxPayloadSize
+#   define ACM_ISAC_SETMAXRATE        WebRtcIsac_SetMaxRate
+#   define ACM_ISAC_GETNEWBITSTREAM   WebRtcIsac_GetNewBitStream
+#   define ACM_ISAC_GETSENDBWE        WebRtcIsac_GetDownLinkBwIndex
+#   define ACM_ISAC_SETBWE            WebRtcIsac_UpdateUplinkBw
+#   define ACM_ISAC_GETBWE            WebRtcIsac_ReadBwIndex
+#   define ACM_ISAC_GETNEWFRAMELEN    WebRtcIsac_GetNewFrameLen
+#   define ACM_ISAC_STRUCT            ISACStruct
+#   define ACM_ISAC_GETENCSAMPRATE    WebRtcIsac_EncSampRate
+#   define ACM_ISAC_GETDECSAMPRATE    WebRtcIsac_DecSampRate
+#endif
+
+#ifdef WEBRTC_CODEC_ISACFX
+#   define ACM_ISAC_CREATE            WebRtcIsacfix_Create
+#   define ACM_ISAC_FREE              WebRtcIsacfix_Free
+#   define ACM_ISAC_ENCODERINIT       WebRtcIsacfix_EncoderInit
+#   define ACM_ISAC_ENCODE            WebRtcIsacfix_Encode
+#   define ACM_ISAC_DECODERINIT       WebRtcIsacfix_DecoderInit
+#   define ACM_ISAC_DECODE_BWE        WebRtcIsacfix_UpdateBwEstimate
+#   define ACM_ISAC_DECODE_B          WebRtcIsacfix_Decode
+#   define ACM_ISAC_DECODEPLC         WebRtcIsacfix_DecodePlc
+#   define ACM_ISAC_CONTROL           ACMISACFixControl         // local Impl
+#   define ACM_ISAC_CONTROL_BWE       ACMISACFixControlBWE      // local Impl
+#   define ACM_ISAC_GETFRAMELEN       WebRtcIsacfix_ReadFrameLen
+#   define ACM_ISAC_GETERRORCODE      WebRtcIsacfix_GetErrorCode
+#   define ACM_ISAC_GETSENDBITRATE    ACMISACFixGetSendBitrate   // local Impl
+#   define ACM_ISAC_SETMAXPAYLOADSIZE WebRtcIsacfix_SetMaxPayloadSize
+#   define ACM_ISAC_SETMAXRATE        WebRtcIsacfix_SetMaxRate
+#   define ACM_ISAC_GETNEWBITSTREAM   ACMISACFixGetNewBitstream  // local Impl
+#   define ACM_ISAC_GETSENDBWE        ACMISACFixGetSendBWE       // local Impl
+#   define ACM_ISAC_SETBWE            WebRtcIsacfix_UpdateUplinkBw
+#   define ACM_ISAC_GETBWE            WebRtcIsacfix_ReadBwIndex
+#   define ACM_ISAC_GETNEWFRAMELEN    WebRtcIsacfix_GetNewFrameLen
+#   define ACM_ISAC_STRUCT            ISACFIX_MainStruct
+#   define ACM_ISAC_GETENCSAMPRATE    ACMISACFixGetEncSampRate   // local Impl
+#   define ACM_ISAC_GETDECSAMPRATE    ACMISACFixGetDecSampRate   // local Impl
+#endif
+
+} //namespace
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_ISAC_MACROS_H_
+
diff --git a/src/modules/audio_coding/main/source/acm_neteq.cc b/src/modules/audio_coding/main/source/acm_neteq.cc
new file mode 100644
index 0000000..71e541e
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_neteq.cc
@@ -0,0 +1,1245 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <algorithm>  // sort
+#include <stdlib.h>  // malloc
+#include <vector>
+
+#include "acm_neteq.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "signal_processing_library.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+namespace webrtc
+{
+
+#define RTP_HEADER_SIZE 12
+#define NETEQ_INIT_FREQ 8000
+#define NETEQ_INIT_FREQ_KHZ (NETEQ_INIT_FREQ/1000)
+#define NETEQ_ERR_MSG_LEN_BYTE (WEBRTC_NETEQ_MAX_ERROR_NAME + 1)
+
+
+ACMNetEQ::ACMNetEQ()
+:
+_id(0),
+_currentSampFreqKHz(NETEQ_INIT_FREQ_KHZ),
+_avtPlayout(false),
+_playoutMode(voice),
+_netEqCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_vadStatus(false),
+_vadMode(VADNormal),
+_decodeLock(RWLockWrapper::CreateRWLock()),
+_numSlaves(0),
+_receivedStereo(false),
+_masterSlaveInfo(NULL),
+_previousAudioActivity(AudioFrame::kVadUnknown),
+_extraDelay(0),
+_callbackCritSect(CriticalSectionWrapper::CreateCriticalSection())
+{
+    for(int n = 0; n < MAX_NUM_SLAVE_NETEQ + 1; n++)
+    {
+        _isInitialized[n]     = false;
+        _ptrVADInst[n]        = NULL;
+        _inst[n]              = NULL;
+        _instMem[n]           = NULL;
+        _netEqPacketBuffer[n] = NULL;
+    }
+}
+
+ACMNetEQ::~ACMNetEQ() {
+  {
+    CriticalSectionScoped lock(_netEqCritSect);
+    RemoveNetEQSafe(0);  // Master.
+    RemoveSlavesSafe();
+  }
+  if (_netEqCritSect != NULL) {
+    delete _netEqCritSect;
+  }
+
+  if (_decodeLock != NULL) {
+    delete _decodeLock;
+  }
+
+  if (_callbackCritSect != NULL) {
+    delete _callbackCritSect;
+  }
+}
+
+WebRtc_Word32
+ACMNetEQ::Init()
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(InitByIdxSafe(idx) < 0)
+        {
+            return -1;
+        }
+        // delete VAD instance and start fresh if required.
+        if(_ptrVADInst[idx] != NULL)
+        {
+            WebRtcVad_Free(_ptrVADInst[idx]);
+            _ptrVADInst[idx] = NULL;
+        }
+        if(_vadStatus)
+        {
+            // Has to enable VAD
+            if(EnableVADByIdxSafe(idx) < 0)
+            {
+                // Failed to enable VAD.
+                // Delete VAD instance, if it is created
+                if(_ptrVADInst[idx] != NULL)
+                {
+                    WebRtcVad_Free(_ptrVADInst[idx]);
+                    _ptrVADInst[idx] = NULL;
+                }
+                // We are at initialization of NetEq, if failed to
+                // enable VAD, we delete the NetEq instance.
+                if (_instMem[idx] != NULL) {
+                    free(_instMem[idx]);
+                    _instMem[idx] = NULL;
+                    _inst[idx] = NULL;
+                }
+                _isInitialized[idx] = false;
+                return -1;
+            }
+        }
+        _isInitialized[idx] = true;
+    }
+    if (EnableVAD() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::InitByIdxSafe(
+    const WebRtc_Word16 idx)
+{
+    int memorySizeBytes;
+    if (WebRtcNetEQ_AssignSize(&memorySizeBytes) != 0)
+    {
+        LogError("AssignSize", idx);
+        return -1;
+    }
+
+    if(_instMem[idx] != NULL)
+    {
+        free(_instMem[idx]);
+        _instMem[idx] = NULL;
+    }
+    _instMem[idx] = malloc(memorySizeBytes);
+    if (_instMem[idx] == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not allocate memory for NetEq");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    if (WebRtcNetEQ_Assign(&_inst[idx], _instMem[idx]) != 0)
+    {
+        if (_instMem[idx] != NULL) {
+            free(_instMem[idx]);
+            _instMem[idx] = NULL;
+        }
+        LogError("Assign", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not Assign");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    if (WebRtcNetEQ_Init(_inst[idx], NETEQ_INIT_FREQ) != 0)
+    {
+        if (_instMem[idx] != NULL) {
+            free(_instMem[idx]);
+            _instMem[idx] = NULL;
+        }
+        LogError("Init", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "InitByIdxSafe: NetEq Initialization error: could not initialize NetEq");
+        _isInitialized[idx] = false;
+        return -1;
+    }
+    _isInitialized[idx] = true;
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::EnableVADByIdxSafe(
+    const WebRtc_Word16 idx)
+{
+    if(_ptrVADInst[idx] == NULL)
+    {
+        if(WebRtcVad_Create(&_ptrVADInst[idx]) < 0)
+        {
+            _ptrVADInst[idx] = NULL;
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "EnableVADByIdxSafe: NetEq Initialization error: could not create VAD");
+            return -1;
+        }
+    }
+
+    if(WebRtcNetEQ_SetVADInstance(_inst[idx], _ptrVADInst[idx],
+        (WebRtcNetEQ_VADInitFunction)    WebRtcVad_Init,
+        (WebRtcNetEQ_VADSetmodeFunction) WebRtcVad_set_mode,
+        (WebRtcNetEQ_VADFunction)        WebRtcVad_Process) < 0)
+    {
+       LogError("setVADinstance", idx);
+       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+           "EnableVADByIdxSafe: NetEq Initialization error: could not set VAD instance");
+        return -1;
+    }
+
+    if(WebRtcNetEQ_SetVADMode(_inst[idx], _vadMode) < 0)
+    {
+        LogError("setVADmode", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "EnableVADByIdxSafe: NetEq Initialization error: could not set VAD mode");
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+ACMNetEQ::AllocatePacketBuffer(
+    const WebRtcNetEQDecoder* usedCodecs,
+    WebRtc_Word16     noOfCodecs)
+{
+    // Due to WebRtcNetEQ_GetRecommendedBufferSize
+    // the following has to be int otherwise we will have compiler error
+    // if not casted
+
+    CriticalSectionScoped lock(_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(AllocatePacketBufferByIdxSafe(usedCodecs, noOfCodecs, idx) < 0)
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::AllocatePacketBufferByIdxSafe(
+    const WebRtcNetEQDecoder*    usedCodecs,
+    WebRtc_Word16       noOfCodecs,
+    const WebRtc_Word16 idx)
+{
+    int maxNoPackets;
+    int bufferSizeInBytes;
+
+    if(!_isInitialized[idx])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "AllocatePacketBufferByIdxSafe: NetEq is not initialized.");
+        return -1;
+    }
+    if (WebRtcNetEQ_GetRecommendedBufferSize(_inst[idx], usedCodecs, noOfCodecs,
+        kTCPLargeJitter , &maxNoPackets, &bufferSizeInBytes)
+        != 0)
+    {
+        LogError("GetRecommendedBufferSize", idx);
+        return -1;
+    }
+    if(_netEqPacketBuffer[idx] != NULL)
+    {
+        free(_netEqPacketBuffer[idx]);
+        _netEqPacketBuffer[idx] = NULL;
+    }
+
+    _netEqPacketBuffer[idx] = (WebRtc_Word16 *)malloc(bufferSizeInBytes);
+    if (_netEqPacketBuffer[idx] == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "AllocatePacketBufferByIdxSafe: NetEq Initialization error: could not allocate "
+            "memory for NetEq Packet Buffer");
+        return -1;
+
+    }
+    if (WebRtcNetEQ_AssignBuffer(_inst[idx], maxNoPackets, _netEqPacketBuffer[idx],
+        bufferSizeInBytes) != 0)
+    {
+        if (_netEqPacketBuffer[idx] != NULL) {
+            free(_netEqPacketBuffer[idx]);
+            _netEqPacketBuffer[idx] = NULL;
+        }
+        LogError("AssignBuffer", idx);
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+ACMNetEQ::SetExtraDelay(
+    const WebRtc_Word32 delayInMS)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetExtraDelay: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_SetExtraDelay(_inst[idx], delayInMS) < 0)
+        {
+            LogError("SetExtraDelay", idx);
+            return -1;
+        }
+    }
+    _extraDelay = delayInMS;
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMNetEQ::SetAVTPlayout(
+    const bool enable)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if (_avtPlayout != enable)
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetAVTPlayout: NetEq is not initialized.");
+                return -1;
+            }
+            if(WebRtcNetEQ_SetAVTPlayout(_inst[idx], (enable) ? 1 : 0) < 0)
+            {
+                LogError("SetAVTPlayout", idx);
+                return -1;
+            }
+        }
+    }
+    _avtPlayout = enable;
+    return 0;
+}
+
+
+bool
+ACMNetEQ::AVTPlayout() const
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    return _avtPlayout;
+}
+
+WebRtc_Word32
+ACMNetEQ::CurrentSampFreqHz() const
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "CurrentSampFreqHz: NetEq is not initialized.");
+        return -1;
+    }
+    return (WebRtc_Word32)(1000*_currentSampFreqKHz);
+}
+
+
+WebRtc_Word32
+ACMNetEQ::SetPlayoutMode(
+    const AudioPlayoutMode mode)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(_playoutMode != mode)
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetPlayoutMode: NetEq is not initialized.");
+                return -1;
+            }
+
+            enum WebRtcNetEQPlayoutMode playoutMode = kPlayoutOff;
+            switch(mode)
+            {
+            case voice:
+                playoutMode = kPlayoutOn;
+                break;
+            case fax:
+                playoutMode = kPlayoutFax;
+                break;
+            case streaming:
+                playoutMode = kPlayoutStreaming;
+                break;
+            }
+            if(WebRtcNetEQ_SetPlayoutMode(_inst[idx], playoutMode) < 0)
+            {
+                LogError("SetPlayoutMode", idx);
+                return -1;
+            }
+        }
+        _playoutMode = mode;
+    }
+
+    return 0;
+}
+
+AudioPlayoutMode
+ACMNetEQ::PlayoutMode() const
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    return _playoutMode;
+}
+
+
+WebRtc_Word32
+ACMNetEQ::NetworkStatistics(
+    ACMNetworkStatistics* statistics) const
+{
+    WebRtcNetEQ_NetworkStatistics stats;
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "NetworkStatistics: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_GetNetworkStatistics(_inst[0], &stats) == 0)
+    {
+        statistics->currentAccelerateRate = stats.currentAccelerateRate;
+        statistics->currentBufferSize = stats.currentBufferSize;
+        statistics->jitterPeaksFound = (stats.jitterPeaksFound > 0);
+        statistics->currentDiscardRate = stats.currentDiscardRate;
+        statistics->currentExpandRate = stats.currentExpandRate;
+        statistics->currentPacketLossRate = stats.currentPacketLossRate;
+        statistics->currentPreemptiveRate = stats.currentPreemptiveRate;
+        statistics->preferredBufferSize = stats.preferredBufferSize;
+        statistics->clockDriftPPM = stats.clockDriftPPM;
+    }
+    else
+    {
+        LogError("getNetworkStatistics", 0);
+        return -1;
+    }
+    const int kArrayLen = 100;
+    int waiting_times[kArrayLen];
+    int waiting_times_len = WebRtcNetEQ_GetRawFrameWaitingTimes(
+        _inst[0], kArrayLen, waiting_times);
+    if (waiting_times_len > 0)
+    {
+        std::vector<int> waiting_times_vec(waiting_times,
+                                           waiting_times + waiting_times_len);
+        std::sort(waiting_times_vec.begin(), waiting_times_vec.end());
+        size_t size = waiting_times_vec.size();
+        assert(size == static_cast<size_t>(waiting_times_len));
+        if (size % 2 == 0)
+        {
+            statistics->medianWaitingTimeMs =
+                (waiting_times_vec[size / 2 - 1] +
+                    waiting_times_vec[size / 2]) / 2;
+        }
+        else
+        {
+            statistics->medianWaitingTimeMs = waiting_times_vec[size / 2];
+        }
+        statistics->minWaitingTimeMs = waiting_times_vec.front();
+        statistics->maxWaitingTimeMs = waiting_times_vec.back();
+        double sum = 0;
+        for (size_t i = 0; i < size; ++i) {
+          sum += waiting_times_vec[i];
+        }
+        statistics->meanWaitingTimeMs = static_cast<int>(sum / size);
+    }
+    else if (waiting_times_len == 0)
+    {
+        statistics->meanWaitingTimeMs = -1;
+        statistics->medianWaitingTimeMs = -1;
+        statistics->minWaitingTimeMs = -1;
+        statistics->maxWaitingTimeMs = -1;
+    }
+    else
+    {
+        LogError("getRawFrameWaitingTimes", 0);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+ACMNetEQ::RecIn(
+    const WebRtc_UWord8*   incomingPayload,
+    const WebRtc_Word32    payloadLength,
+    const WebRtcRTPHeader& rtpInfo)
+{
+    WebRtc_Word16 payload_length = static_cast<WebRtc_Word16>(payloadLength);
+
+    // translate to NetEq struct
+    WebRtcNetEQ_RTPInfo netEqRTPInfo;
+    netEqRTPInfo.payloadType = rtpInfo.header.payloadType;
+    netEqRTPInfo.sequenceNumber = rtpInfo.header.sequenceNumber;
+    netEqRTPInfo.timeStamp = rtpInfo.header.timestamp;
+    netEqRTPInfo.SSRC = rtpInfo.header.ssrc;
+    netEqRTPInfo.markerBit = rtpInfo.header.markerBit;
+
+    CriticalSectionScoped lock(_netEqCritSect);
+    // Down-cast the time to (32-6)-bit since we only care about
+    // the least significant bits. (32-6) bits cover 2^(32-6) = 67108864 ms.
+    // we masked 6 most significant bits of 32-bit so we don't loose resolution
+    // when do the following multiplication.
+    const WebRtc_UWord32 nowInMs = static_cast<WebRtc_UWord32>(
+        TickTime::MillisecondTimestamp() & 0x03ffffff);
+    WebRtc_UWord32 recvTimestamp = static_cast<WebRtc_UWord32>
+        (_currentSampFreqKHz * nowInMs);
+
+    int status;
+    // In case of stereo payload, first half of the data should be pushed into
+    // master, and the second half into slave.
+    if (rtpInfo.type.Audio.channel == 2) {
+      payload_length = payload_length / 2;
+    }
+
+    // Check that master is initialized.
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RecIn: NetEq is not initialized.");
+        return -1;
+    }
+    // PUSH into Master
+    status = WebRtcNetEQ_RecInRTPStruct(_inst[0], &netEqRTPInfo,
+             incomingPayload, payload_length, recvTimestamp);
+    if(status < 0)
+    {
+        LogError("RecInRTPStruct", 0);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "RecIn: NetEq, error in pushing in Master");
+        return -1;
+    }
+
+    // If the received stream is stereo, insert second half of paket into slave.
+    if(rtpInfo.type.Audio.channel == 2)
+    {
+        if(!_isInitialized[1])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq is not initialized.");
+            return -1;
+        }
+        // PUSH into Slave
+        status = WebRtcNetEQ_RecInRTPStruct(_inst[1], &netEqRTPInfo,
+                                            &incomingPayload[payload_length],
+                                            payload_length,
+                                            recvTimestamp);
+        if(status < 0)
+        {
+            LogError("RecInRTPStruct", 1);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecIn: NetEq, error in pushing in Slave");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+ACMNetEQ::RecOut(
+    AudioFrame& audioFrame)
+{
+    enum WebRtcNetEQOutputType type;
+    WebRtc_Word16 payloadLenSample;
+    enum WebRtcNetEQOutputType typeMaster;
+    enum WebRtcNetEQOutputType typeSlave;
+
+    WebRtc_Word16 payloadLenSampleSlave;
+
+    CriticalSectionScoped lockNetEq(_netEqCritSect);
+
+    if(!_receivedStereo)
+    {
+        if(!_isInitialized[0])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq is not initialized.");
+            return -1;
+        }
+        {
+            WriteLockScoped lockCodec(*_decodeLock);
+            if(WebRtcNetEQ_RecOut(_inst[0], &(audioFrame.data_[0]),
+                &payloadLenSample) != 0)
+            {
+                LogError("RecOut", 0);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id, 
+                    "RecOut: NetEq, error in pulling out for mono case");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+        }
+        WebRtcNetEQ_GetSpeechOutputType(_inst[0], &type);
+        audioFrame.num_channels_ = 1;
+    }
+    else
+    {
+        if(!_isInitialized[0] || !_isInitialized[1])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq is not initialized.");
+            return -1;
+        }
+        WebRtc_Word16 payloadMaster[480];
+        WebRtc_Word16 payloadSlave[480];
+        {
+            WriteLockScoped lockCodec(*_decodeLock);
+            if(WebRtcNetEQ_RecOutMasterSlave(_inst[0], payloadMaster,
+                &payloadLenSample, _masterSlaveInfo, 1) != 0)
+            {
+                LogError("RecOutMasterSlave", 0);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "RecOut: NetEq, error in pulling out for master");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[0]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+            if(WebRtcNetEQ_RecOutMasterSlave(_inst[1], payloadSlave,
+                &payloadLenSampleSlave, _masterSlaveInfo, 0) != 0)
+            {
+                LogError("RecOutMasterSlave", 1);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "RecOut: NetEq, error in pulling out for slave");
+
+                // Check for errors that can be recovered from:
+                // RECOUT_ERROR_SAMPLEUNDERRUN = 2003
+                int errorCode = WebRtcNetEQ_GetErrorCode(_inst[1]);
+                if(errorCode != 2003)
+                {
+                    // Cannot recover; return an error
+                    return -1;
+                }
+            }
+        }
+
+        if(payloadLenSample != payloadLenSampleSlave)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+                "RecOut: mismatch between the lenght of the decoded \
+audio by Master (%d samples) and Slave (%d samples).",
+            payloadLenSample, payloadLenSampleSlave);
+            if(payloadLenSample > payloadLenSampleSlave)
+            {
+                memset(&payloadSlave[payloadLenSampleSlave], 0,
+                    (payloadLenSample - payloadLenSampleSlave) * sizeof(WebRtc_Word16));
+            }
+        }
+
+        for(WebRtc_Word16 n = 0; n < payloadLenSample; n++)
+        {
+            audioFrame.data_[n<<1]     = payloadMaster[n];
+            audioFrame.data_[(n<<1)+1] = payloadSlave[n];
+        }
+        audioFrame.num_channels_ = 2;
+
+        WebRtcNetEQ_GetSpeechOutputType(_inst[0], &typeMaster);
+        WebRtcNetEQ_GetSpeechOutputType(_inst[1], &typeSlave);
+        if((typeMaster == kOutputNormal) ||
+            (typeSlave == kOutputNormal))
+        {
+            type = kOutputNormal;
+        }
+        else
+        {
+            type = typeMaster;
+        }
+    }
+
+    audioFrame.samples_per_channel_ = static_cast<WebRtc_UWord16>(payloadLenSample);
+    // NetEq always returns 10 ms of audio.
+    _currentSampFreqKHz = static_cast<float>(audioFrame.samples_per_channel_) / 10.0f;
+    audioFrame.sample_rate_hz_ = audioFrame.samples_per_channel_ * 100;
+    if(_vadStatus)
+    {
+        if(type == kOutputVADPassive)
+        {
+            audioFrame.vad_activity_ = AudioFrame::kVadPassive;
+            audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputNormal)
+        {
+            audioFrame.vad_activity_ = AudioFrame::kVadActive;
+            audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputPLC)
+        {
+            audioFrame.vad_activity_ = _previousAudioActivity;
+            audioFrame.speech_type_  = AudioFrame::kPLC;
+        }
+        else if(type == kOutputCNG)
+        {
+            audioFrame.vad_activity_ = AudioFrame::kVadPassive;
+            audioFrame.speech_type_  = AudioFrame::kCNG;
+        }
+        else
+        {
+            audioFrame.vad_activity_ = AudioFrame::kVadPassive;
+            audioFrame.speech_type_  = AudioFrame::kPLCCNG;
+        }
+    }
+    else
+    {
+        // Always return kVadUnknown when receive VAD is inactive
+        audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
+
+        if(type == kOutputNormal)
+        {
+            audioFrame.speech_type_  = AudioFrame::kNormalSpeech;
+        }
+        else if(type == kOutputPLC)
+        {
+            audioFrame.speech_type_  = AudioFrame::kPLC;
+        }
+        else if(type == kOutputPLCtoCNG)
+        {
+            audioFrame.speech_type_  = AudioFrame::kPLCCNG;
+        }
+        else if(type == kOutputCNG)
+        {
+            audioFrame.speech_type_  = AudioFrame::kCNG;
+        }
+        else
+        {
+            // type is kOutputVADPassive which
+            // we don't expect to get if _vadStatus is false
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+                "RecOut: NetEq returned kVadPassive while _vadStatus is false.");
+            audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
+            audioFrame.speech_type_  = AudioFrame::kNormalSpeech;
+        }
+    }
+    _previousAudioActivity = audioFrame.vad_activity_;
+
+    return 0;
+}
+
+// When ACMGenericCodec has set the codec specific parameters in codecDef
+// it calls AddCodec() to add the new codec to the NetEQ database.
+WebRtc_Word32
+ACMNetEQ::AddCodec(
+    WebRtcNetEQ_CodecDef* codecDef,
+    bool                  toMaster)
+{
+    if (codecDef == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "ACMNetEQ::AddCodec: error, codecDef is NULL");
+        return -1;
+    }
+    CriticalSectionScoped lock(_netEqCritSect);
+
+    WebRtc_Word16 idx;
+    if(toMaster)
+    {
+        idx = 0;
+    }
+    else
+    {
+        idx = 1;
+    }
+
+    if(!_isInitialized[idx])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "ACMNetEQ::AddCodec: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_CodecDbAdd(_inst[idx], codecDef) < 0)
+    {
+        LogError("CodecDB_Add", idx);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "ACMNetEQ::AddCodec: NetEq, error in adding codec");
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+// Creates a Word16 RTP packet out of a Word8 payload and an rtp info struct.
+// Must be byte order safe.
+void
+ACMNetEQ::RTPPack(
+    WebRtc_Word16*         rtpPacket,
+    const WebRtc_Word8*    payload,
+    const WebRtc_Word32    payloadLengthW8,
+    const WebRtcRTPHeader& rtpInfo)
+{
+    WebRtc_Word32 idx = 0;
+    WEBRTC_SPL_SET_BYTE(rtpPacket, (WebRtc_Word8)0x80, idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, rtpInfo.header.payloadType, idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.sequenceNumber), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.sequenceNumber), 0), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 3), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 2), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.timestamp), 0), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 3), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 2), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 1), idx);
+    idx++;
+
+    WEBRTC_SPL_SET_BYTE(rtpPacket, WEBRTC_SPL_GET_BYTE(
+        &(rtpInfo.header.ssrc), 0), idx);
+    idx++;
+
+    for (WebRtc_Word16 i=0; i < payloadLengthW8; i++)
+    {
+        WEBRTC_SPL_SET_BYTE(rtpPacket, payload[i], idx);
+        idx++;
+    }
+    if (payloadLengthW8 & 1)
+    {
+        // Our 16 bits buffer is one byte too large, set that
+        // last byte to zero.
+        WEBRTC_SPL_SET_BYTE(rtpPacket, 0x0, idx);
+    }
+}
+
+WebRtc_Word16
+ACMNetEQ::EnableVAD()
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if (_vadStatus)
+    {
+        return 0;
+    }
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetVADStatus: NetEq is not initialized.");
+            return -1;
+        }
+        // VAD was off and we have to turn it on
+        if(EnableVADByIdxSafe(idx) < 0)
+        {
+            return -1;
+        }
+
+        // Set previous VAD status to PASSIVE
+        _previousAudioActivity = AudioFrame::kVadPassive;
+    }
+    _vadStatus = true;
+    return 0;
+}
+
+
+ACMVADMode
+ACMNetEQ::VADMode() const
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    return _vadMode;
+}
+
+
+WebRtc_Word16
+ACMNetEQ::SetVADMode(
+    const ACMVADMode mode)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if((mode < VADNormal) || (mode > VADVeryAggr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "SetVADMode: NetEq error: could not set VAD mode, mode is not supported");
+        return -1;
+    }
+    else
+    {
+        for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+        {
+            if(!_isInitialized[idx])
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                    "SetVADMode: NetEq is not initialized.");
+                return -1;
+            }
+            if(WebRtcNetEQ_SetVADMode(_inst[idx], mode) < 0)
+            {
+                LogError("SetVADmode", idx);
+                return -1;
+            }
+        }
+        _vadMode = mode;
+        return 0;
+    }
+}
+
+
+WebRtc_Word32
+ACMNetEQ::FlushBuffers()
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "FlushBuffers: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_FlushBuffers(_inst[idx]) < 0)
+        {
+            LogError("FlushBuffers", idx);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::RemoveCodec(
+    WebRtcNetEQDecoder codecIdx,
+    bool               isStereo)
+{
+    // sanity check
+    if((codecIdx <= kDecoderReservedStart) ||
+        (codecIdx >= kDecoderReservedEnd))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RemoveCodec: NetEq error: could not Remove Codec, codec index out of range");
+        return -1;
+    }
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "RemoveCodec: NetEq is not initialized.");
+        return -1;
+    }
+
+    if(WebRtcNetEQ_CodecDbRemove(_inst[0], codecIdx) < 0)
+    {
+        LogError("CodecDB_Remove", 0);
+        return -1;
+    }
+
+    if(isStereo)
+    {
+        if(WebRtcNetEQ_CodecDbRemove(_inst[1], codecIdx) < 0)
+        {
+            LogError("CodecDB_Remove", 1);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::SetBackgroundNoiseMode(
+    const ACMBackgroundNoiseMode mode)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    for(WebRtc_Word16 idx = 0; idx < _numSlaves + 1; idx++)
+    {
+        if(!_isInitialized[idx])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "SetBackgroundNoiseMode: NetEq is not initialized.");
+            return -1;
+        }
+        if(WebRtcNetEQ_SetBGNMode(_inst[idx], (WebRtcNetEQBGNMode)mode) < 0)
+        {
+            LogError("SetBGNMode", idx);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word16
+ACMNetEQ::BackgroundNoiseMode(
+    ACMBackgroundNoiseMode& mode)
+{
+    WebRtcNetEQBGNMode myMode;
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(!_isInitialized[0])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+            "BackgroundNoiseMode: NetEq is not initialized.");
+        return -1;
+    }
+    if(WebRtcNetEQ_GetBGNMode(_inst[0], &myMode) < 0)
+    {
+        LogError("WebRtcNetEQ_GetBGNMode", 0);
+        return -1;
+    }
+    else
+    {
+        mode = (ACMBackgroundNoiseMode)myMode;
+    }
+    return 0;
+}
+
+void 
+ACMNetEQ::SetUniqueId(
+    WebRtc_Word32 id)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    _id = id;
+}
+
+
+void
+ACMNetEQ::LogError(
+    const char* neteqFuncName,
+    const WebRtc_Word16 idx) const
+{
+    char errorName[NETEQ_ERR_MSG_LEN_BYTE];
+    char myFuncName[50];
+    int neteqErrorCode = WebRtcNetEQ_GetErrorCode(_inst[idx]);
+    WebRtcNetEQ_GetErrorName(neteqErrorCode, errorName, NETEQ_ERR_MSG_LEN_BYTE - 1);
+    strncpy(myFuncName, neteqFuncName, 49);
+    errorName[NETEQ_ERR_MSG_LEN_BYTE - 1] = '\0';
+    myFuncName[49] = '\0';
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+        "NetEq-%d Error in function %s, error-code: %d, error-string: %s",
+        idx,
+        myFuncName,
+        neteqErrorCode,
+        errorName);
+}
+
+
+WebRtc_Word32
+ACMNetEQ::PlayoutTimestamp(
+    WebRtc_UWord32& timestamp)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    if(WebRtcNetEQ_GetSpeechTimeStamp(_inst[0], &timestamp) < 0)
+    {
+        LogError("GetSpeechTimeStamp", 0);
+        return -1;
+    }
+    else
+    {
+        return 0;
+    }
+}
+
+void ACMNetEQ::RemoveSlaves() {
+  CriticalSectionScoped lock(_netEqCritSect);
+  RemoveSlavesSafe();
+}
+
+void ACMNetEQ::RemoveSlavesSafe() {
+  for (int i = 1; i < _numSlaves + 1; i++) {
+    RemoveNetEQSafe(i);
+  }
+
+  if (_masterSlaveInfo != NULL) {
+    free(_masterSlaveInfo);
+    _masterSlaveInfo = NULL;
+  }
+  _numSlaves = 0;
+}
+
+void ACMNetEQ::RemoveNetEQSafe(int index) {
+  if (_instMem[index] != NULL) {
+    free(_instMem[index]);
+    _instMem[index] = NULL;
+  }
+  if (_netEqPacketBuffer[index] != NULL) {
+    free(_netEqPacketBuffer[index]);
+    _netEqPacketBuffer[index] = NULL;
+  }
+  if (_ptrVADInst[index] != NULL) {
+    WebRtcVad_Free(_ptrVADInst[index]);
+    _ptrVADInst[index] = NULL;
+  }
+}
+
+WebRtc_Word16
+ACMNetEQ::AddSlave(
+    const WebRtcNetEQDecoder* usedCodecs,
+    WebRtc_Word16       noOfCodecs)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    const WebRtc_Word16 slaveIdx = 1;
+    if(_numSlaves < 1)
+    {
+        // initialize the receiver, this also sets up VAD.
+        if(InitByIdxSafe(slaveIdx) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Initialize");
+            return -1;
+        }
+
+        // Allocate buffer.
+        if(AllocatePacketBufferByIdxSafe(usedCodecs, noOfCodecs, slaveIdx) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Allocate Packet Buffer");
+            return -1;
+        }
+
+        if(_masterSlaveInfo != NULL)
+        {
+            free(_masterSlaveInfo);
+            _masterSlaveInfo = NULL;
+        }
+        int msInfoSize = WebRtcNetEQ_GetMasterSlaveInfoSize();
+        _masterSlaveInfo = malloc(msInfoSize);
+
+        if(_masterSlaveInfo == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Allocate memory for Master-Slave Info");
+            return -1;
+        }
+
+        // We accept this as initialized NetEQ, the rest is to synchronize
+        // Slave with Master.
+        _numSlaves = 1;
+        _isInitialized[slaveIdx] = true;
+
+        // Set Slave delay as all other instances.
+        if(WebRtcNetEQ_SetExtraDelay(_inst[slaveIdx], _extraDelay) < 0)
+        {
+            LogError("SetExtraDelay", slaveIdx);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set delay");
+            return -1;
+        }
+
+        // Set AVT
+        if(WebRtcNetEQ_SetAVTPlayout(_inst[slaveIdx], (_avtPlayout) ? 1 : 0) < 0)
+        {
+            LogError("SetAVTPlayout", slaveIdx);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set AVT playout.");
+            return -1;
+        }
+
+        // Set Background Noise
+        WebRtcNetEQBGNMode currentMode;
+        if(WebRtcNetEQ_GetBGNMode(_inst[0], &currentMode) < 0)
+        {
+            LogError("GetBGNMode", 0);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AAddSlave: AddSlave Failed, Could not Get BGN form Master.");
+            return -1;
+        }
+
+        if(WebRtcNetEQ_SetBGNMode(_inst[slaveIdx], (WebRtcNetEQBGNMode)currentMode) < 0)
+        {
+            LogError("SetBGNMode", slaveIdx);
+             WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not set BGN mode.");
+           return -1;
+        }
+
+        enum WebRtcNetEQPlayoutMode playoutMode = kPlayoutOff;
+        switch(_playoutMode)
+        {
+        case voice:
+            playoutMode = kPlayoutOn;
+            break;
+        case fax:
+            playoutMode = kPlayoutFax;
+            break;
+        case streaming:
+            playoutMode = kPlayoutStreaming;
+            break;
+        }
+        if(WebRtcNetEQ_SetPlayoutMode(_inst[slaveIdx], playoutMode) < 0)
+        {
+            LogError("SetPlayoutMode", 1);
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                "AddSlave: AddSlave Failed, Could not Set Playout Mode.");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+void
+ACMNetEQ::SetReceivedStereo(
+    bool receivedStereo)
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    _receivedStereo = receivedStereo;
+}
+
+WebRtc_UWord8
+ACMNetEQ::NumSlaves()
+{
+    CriticalSectionScoped lock(_netEqCritSect);
+    return _numSlaves;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_neteq.h b/src/modules/audio_coding/main/source/acm_neteq.h
new file mode 100644
index 0000000..db6c0fd
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_neteq.h
@@ -0,0 +1,369 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
+
+#include "audio_coding_module.h"
+#include "audio_coding_module_typedefs.h"
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+#include "webrtc_neteq.h"
+#include "webrtc_vad.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RWLockWrapper;
+struct CodecInst;
+enum AudioPlayoutMode;
+enum ACMSpeechType;
+
+#define MAX_NUM_SLAVE_NETEQ 1
+
+class ACMNetEQ
+{
+public:
+    // Constructor of the class
+    ACMNetEQ();
+
+    // Destructor of the class.
+    ~ACMNetEQ();
+
+    //
+    // Init()
+    // Allocates memory for NetEQ and VAD and initializes them.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ or VAD returned an error or
+    //                            if out of memory.
+    //
+    WebRtc_Word32 Init();
+
+    //
+    // RecIn()
+    // Gives the payload to NetEQ.
+    //
+    // Input:
+    //   - incomingPayload       : Incoming audio payload.
+    //   - payloadLength         : Length of incoming audio payload.
+    //   - rtpInfo               : RTP header for the incoming payload containing
+    //                             information about payload type, sequence number,
+    //                             timestamp, ssrc and marker bit.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 RecIn(
+        const WebRtc_UWord8*    incomingPayload,
+        const WebRtc_Word32    payloadLength,
+        const WebRtcRTPHeader&   rtpInfo);
+
+    //
+    // RecOut()
+    // Asks NetEQ for 10 ms of decoded audio.
+    //
+    // Input:
+    //   -audioFrame             : an audio frame were output data and
+    //                             associated parameters are written to.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ returned an error.
+    //
+    WebRtc_Word32 RecOut(
+        AudioFrame& audioFrame);
+
+    //
+    // AddCodec()
+    // Adds a new codec to the NetEQ codec database.
+    //
+    // Input:
+    //   - codecDef              : The codec to be added.
+    //   - toMaster              : true if the codec has to be added to Master
+    //                             NetEq, otherwise will be added to the Slave
+    //                             NetEQ.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 AddCodec(
+        WebRtcNetEQ_CodecDef *codecDef,
+        bool                  toMaster = true);
+
+    //
+    // AllocatePacketBuffer()
+    // Allocates the NetEQ packet buffer.
+    //
+    // Input:
+    //   - usedCodecs            : An array of the codecs to be used by NetEQ.
+    //   - noOfCodecs            : Number of codecs in usedCodecs.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 AllocatePacketBuffer(
+        const WebRtcNetEQDecoder* usedCodecs,
+        WebRtc_Word16    noOfCodecs);
+
+    //
+    // SetExtraDelay()
+    // Sets an delayInMS milliseconds extra delay in NetEQ.
+    //
+    // Input:
+    //   - delayInMS             : Extra delay in milliseconds.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetExtraDelay(
+        const WebRtc_Word32 delayInMS);
+
+    //
+    // SetAVTPlayout()
+    // Enable/disable playout of AVT payloads.
+    //
+    // Input:
+    //   - enable                : Enable if true, disable if false.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetAVTPlayout(
+        const bool enable);
+
+    //
+    // AVTPlayout()
+    // Get the current AVT playout state.
+    //
+    // Return value              : True if AVT playout is enabled.
+    //                             False if AVT playout is disabled.
+    //
+    bool AVTPlayout() const;
+
+    //
+    // CurrentSampFreqHz()
+    // Get the current sampling frequency in Hz.
+    //
+    // Return value              : Sampling frequency in Hz.
+    //
+    WebRtc_Word32 CurrentSampFreqHz() const;
+
+    //
+    // SetPlayoutMode()
+    // Sets the playout mode to voice or fax.
+    //
+    // Input:
+    //   - mode                  : The playout mode to be used, voice,
+    //                             fax, or streaming.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 SetPlayoutMode(
+        const AudioPlayoutMode mode);
+
+    //
+    // PlayoutMode()
+    // Get the current playout mode.
+    //
+    // Return value              : The current playout mode.
+    //
+    AudioPlayoutMode PlayoutMode() const;
+
+    //
+    // NetworkStatistics()
+    // Get the current network statistics from NetEQ.
+    //
+    // Output:
+    //   - statistics            : The current network statistics.
+    //
+    // Return value              : 0 if ok.
+    //                            <0 if NetEQ returned an error.
+    //
+    WebRtc_Word32 NetworkStatistics(
+        ACMNetworkStatistics* statistics) const;
+
+    //
+    // VADMode()
+    // Get the current VAD Mode.
+    //
+    // Return value              : The current VAD mode.
+    //
+    ACMVADMode VADMode() const;
+
+    //
+    // SetVADMode()
+    // Set the VAD mode.
+    //
+    // Input:
+    //   - mode                  : The new VAD mode.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 SetVADMode(
+        const ACMVADMode mode);
+
+    //
+    // DecodeLock()
+    // Get the decode lock used to protect decoder instances while decoding.
+    //
+    // Return value              : Pointer to the decode lock.
+    //
+    RWLockWrapper* DecodeLock() const
+    {
+        return _decodeLock;
+    }
+
+    //
+    // FlushBuffers()
+    // Flushes the NetEQ packet and speech buffers.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if NetEQ returned an error.
+    //
+    WebRtc_Word32 FlushBuffers();
+
+    //
+    // RemoveCodec()
+    // Removes a codec from the NetEQ codec database.
+    //
+    // Input:
+    //   - codecIdx              : Codec to be removed.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 RemoveCodec(
+        WebRtcNetEQDecoder codecIdx,
+        bool isStereo = false);
+
+
+    //
+    // SetBackgroundNoiseMode()
+    // Set the mode of the background noise.
+    //
+    // Input:
+    //   - mode                  : an enumerator specifying the mode of the
+    //                             background noise.
+    //
+    // Return value              : 0 if succeeded,
+    //                            -1 if failed to set the mode.
+    //
+    WebRtc_Word16 SetBackgroundNoiseMode(
+        const ACMBackgroundNoiseMode mode);
+
+    //
+    // BackgroundNoiseMode()
+    // return the mode of the background noise.
+    //
+    // Return value              : The mode of background noise.
+    //
+    WebRtc_Word16 BackgroundNoiseMode(
+        ACMBackgroundNoiseMode& mode);
+
+    void SetUniqueId(
+        WebRtc_Word32 id);
+
+    WebRtc_Word32 PlayoutTimestamp(
+        WebRtc_UWord32& timestamp);
+
+    void SetReceivedStereo(
+        bool receivedStereo);
+
+    WebRtc_UWord8 NumSlaves();
+
+    enum JB {masterJB = 0, slaveJB = 1};
+
+    // Delete all slaves.
+    void RemoveSlaves();
+
+    WebRtc_Word16 AddSlave(
+        const WebRtcNetEQDecoder*    usedCodecs,
+        WebRtc_Word16       noOfCodecs);
+
+private:
+    //
+    // RTPPack()
+    // Creates a Word16 RTP packet out of the payload data in Word16 and
+    // a WebRtcRTPHeader.
+    //
+    // Input:
+    //   - payload               : Payload to be packetized.
+    //   - payloadLengthW8       : Length of the payload in bytes.
+    //   - rtpInfo               : RTP header struct.
+    //
+    // Output:
+    //   - rtpPacket             : The RTP packet.
+    //
+    static void RTPPack(
+        WebRtc_Word16*         rtpPacket,
+        const WebRtc_Word8*    payload,
+        const WebRtc_Word32    payloadLengthW8,
+        const WebRtcRTPHeader& rtpInfo);
+
+    void LogError(
+        const char* neteqFuncName,
+        const WebRtc_Word16 idx) const;
+
+    WebRtc_Word16 InitByIdxSafe(
+        const WebRtc_Word16 idx);
+
+    // EnableVAD()
+    // Enable VAD.
+    //
+    // Return value              : 0 if ok.
+    //                            -1 if an error occurred.
+    //
+    WebRtc_Word16 EnableVAD();
+
+    WebRtc_Word16 EnableVADByIdxSafe(
+        const WebRtc_Word16 idx);
+
+    WebRtc_Word16 AllocatePacketBufferByIdxSafe(
+        const WebRtcNetEQDecoder* usedCodecs,
+        WebRtc_Word16       noOfCodecs,
+        const WebRtc_Word16 idx);
+
+    // Delete the NetEQ corresponding to |index|.
+    void RemoveNetEQSafe(int index);
+
+    void RemoveSlavesSafe();
+
+    void*                   _inst[MAX_NUM_SLAVE_NETEQ + 1];
+    void*                   _instMem[MAX_NUM_SLAVE_NETEQ + 1];
+
+    WebRtc_Word16*          _netEqPacketBuffer[MAX_NUM_SLAVE_NETEQ + 1];
+
+    WebRtc_Word32           _id;
+    float                   _currentSampFreqKHz;
+    bool                    _avtPlayout;
+    AudioPlayoutMode        _playoutMode;
+    CriticalSectionWrapper* _netEqCritSect;
+
+    WebRtcVadInst*          _ptrVADInst[MAX_NUM_SLAVE_NETEQ + 1];
+
+    bool                    _vadStatus;
+    ACMVADMode              _vadMode;
+    RWLockWrapper*          _decodeLock;
+    bool                    _isInitialized[MAX_NUM_SLAVE_NETEQ + 1];
+    WebRtc_UWord8           _numSlaves;
+    bool                    _receivedStereo;
+    void*                   _masterSlaveInfo;
+    AudioFrame::VADActivity _previousAudioActivity;
+    WebRtc_Word32           _extraDelay;
+
+    CriticalSectionWrapper* _callbackCritSect;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_NETEQ_H_
diff --git a/src/modules/audio_coding/main/source/acm_neteq_unittest.cc b/src/modules/audio_coding/main/source/acm_neteq_unittest.cc
new file mode 100644
index 0000000..8f73592
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_neteq_unittest.cc
@@ -0,0 +1,147 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains unit tests for ACM's NetEQ wrapper (class ACMNetEQ).
+
+#include <stdlib.h>
+
+#include "gtest/gtest.h"
+#include "modules/audio_coding/codecs/pcm16b/include/pcm16b.h"
+#include "modules/audio_coding/main/interface/audio_coding_module_typedefs.h"
+#include "modules/audio_coding/main/source/acm_codec_database.h"
+#include "modules/audio_coding/main/source/acm_neteq.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
+#include "modules/interface/module_common_types.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class AcmNetEqTest : public ::testing::Test {
+ protected:
+  static const size_t kMaxPayloadLen = 5760;  // 60 ms, 48 kHz, 16 bit samples.
+  static const int kPcm16WbPayloadType = 94;
+  AcmNetEqTest() {}
+  virtual void SetUp();
+  virtual void TearDown() {}
+
+  void InsertZeroPacket(uint16_t sequence_number,
+                        uint32_t timestamp,
+                        uint8_t payload_type,
+                        uint32_t ssrc,
+                        bool marker_bit,
+                        size_t len_payload_bytes);
+  void PullData(int expected_num_samples);
+
+  ACMNetEQ neteq_;
+};
+
+void AcmNetEqTest::SetUp() {
+  ASSERT_EQ(0, neteq_.Init());
+  ASSERT_EQ(0, neteq_.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
+                                           ACMCodecDB::kNumCodecs));
+  WebRtcNetEQ_CodecDef codec_def;
+  SET_CODEC_PAR(codec_def, kDecoderPCM16Bwb, kPcm16WbPayloadType, NULL, 16000);
+  SET_PCM16B_WB_FUNCTIONS(codec_def);
+  ASSERT_EQ(0, neteq_.AddCodec(&codec_def, true));
+}
+
+void AcmNetEqTest::InsertZeroPacket(uint16_t sequence_number,
+                                    uint32_t timestamp,
+                                    uint8_t payload_type,
+                                    uint32_t ssrc,
+                                    bool marker_bit,
+                                    size_t len_payload_bytes) {
+  ASSERT_TRUE(len_payload_bytes <= kMaxPayloadLen);
+  uint16_t payload[kMaxPayloadLen] = {0};
+  WebRtcRTPHeader rtp_header;
+  rtp_header.header.sequenceNumber = sequence_number;
+  rtp_header.header.timestamp = timestamp;
+  rtp_header.header.ssrc = ssrc;
+  rtp_header.header.payloadType = payload_type;
+  rtp_header.header.markerBit = marker_bit;
+  rtp_header.type.Audio.channel = 1;
+  ASSERT_EQ(0, neteq_.RecIn(reinterpret_cast<WebRtc_UWord8*>(payload),
+                            len_payload_bytes, rtp_header));
+}
+
+void AcmNetEqTest::PullData(int expected_num_samples) {
+  AudioFrame out_frame;
+  ASSERT_EQ(0, neteq_.RecOut(out_frame));
+  ASSERT_EQ(expected_num_samples, out_frame.samples_per_channel_);
+}
+
+TEST_F(AcmNetEqTest, NetworkStatistics) {
+  // Use fax mode to avoid time-scaling. This is to simplify the testing of
+  // packet waiting times in the packet buffer.
+  neteq_.SetPlayoutMode(fax);
+  // Insert 31 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
+  int num_frames = 30;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  int i, j;
+  for (i = 0; i < num_frames; ++i) {
+    InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                     kPayloadBytes);
+  }
+  // Pull out data once.
+  PullData(kSamples);
+  // Insert one more packet (to produce different mean and median).
+  i = num_frames;
+  InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                   kPayloadBytes);
+  // Pull out all data.
+  for (j = 1; j < num_frames + 1; ++j) {
+    PullData(kSamples);
+  }
+
+  ACMNetworkStatistics stats;
+  ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
+  EXPECT_EQ(0, stats.currentBufferSize);
+  EXPECT_EQ(0, stats.preferredBufferSize);
+  EXPECT_FALSE(stats.jitterPeaksFound);
+  EXPECT_EQ(0, stats.currentPacketLossRate);
+  EXPECT_EQ(0, stats.currentDiscardRate);
+  EXPECT_EQ(0, stats.currentExpandRate);
+  EXPECT_EQ(0, stats.currentPreemptiveRate);
+  EXPECT_EQ(0, stats.currentAccelerateRate);
+  EXPECT_EQ(-916, stats.clockDriftPPM);  // Initial value is slightly off.
+  EXPECT_EQ(300, stats.maxWaitingTimeMs);
+  EXPECT_EQ(10, stats.minWaitingTimeMs);
+  EXPECT_EQ(159, stats.meanWaitingTimeMs);
+  EXPECT_EQ(160, stats.medianWaitingTimeMs);
+}
+
+TEST_F(AcmNetEqTest, TestZeroLengthWaitingTimesVector) {
+  // Insert one packet.
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  int i = 0;
+  InsertZeroPacket(i, i * kSamples, kPcm16WbPayloadType, 0x1234, false,
+                   kPayloadBytes);
+  // Do not pull out any data.
+
+  ACMNetworkStatistics stats;
+  ASSERT_EQ(0, neteq_.NetworkStatistics(&stats));
+  EXPECT_EQ(0, stats.currentBufferSize);
+  EXPECT_EQ(0, stats.preferredBufferSize);
+  EXPECT_FALSE(stats.jitterPeaksFound);
+  EXPECT_EQ(0, stats.currentPacketLossRate);
+  EXPECT_EQ(0, stats.currentDiscardRate);
+  EXPECT_EQ(0, stats.currentExpandRate);
+  EXPECT_EQ(0, stats.currentPreemptiveRate);
+  EXPECT_EQ(0, stats.currentAccelerateRate);
+  EXPECT_EQ(-916, stats.clockDriftPPM);  // Initial value is slightly off.
+  EXPECT_EQ(-1, stats.minWaitingTimeMs);
+  EXPECT_EQ(-1, stats.maxWaitingTimeMs);
+  EXPECT_EQ(-1, stats.meanWaitingTimeMs);
+  EXPECT_EQ(-1, stats.medianWaitingTimeMs);
+}
+
+}  // namespace
diff --git a/src/modules/audio_coding/main/source/acm_opus.cc b/src/modules/audio_coding/main/source/acm_opus.cc
new file mode 100644
index 0000000..87bdd8b
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_opus.cc
@@ -0,0 +1,449 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "acm_opus.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_OPUS
+    // NOTE! Opus is not included in the open-source package. Modify this file or your codec
+    // API to match the function call and name of used Opus API file.
+    // #include "opus_interface.h"
+#endif
+
+namespace webrtc
+{
+
+#ifndef WEBRTC_CODEC_OPUS
+
+ACMOPUS::ACMOPUS(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _mySampFreq(0),
+      _myRate(0),
+      _opusMode(0),
+      _flagVBR(0) {
+  return;
+}
+
+
+ACMOPUS::~ACMOPUS()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+
+WebRtc_Word32
+ACMOPUS::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+
+ACMGenericCodec*
+ACMOPUS::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateEncoder()
+{
+    return -1;
+}
+
+
+void
+ACMOPUS::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateDecoder()
+{
+    return -1;
+}
+
+
+void
+ACMOPUS::DestructDecoderSafe()
+{
+    return;
+}
+
+
+void
+ACMOPUS::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+WebRtc_Word16
+ACMOPUS::SetBitRateSafe(
+    const WebRtc_Word32 /*rate*/ )
+{
+    return -1;
+}
+
+#else     //===================== Actual Implementation =======================
+
+// Remove when integrating a real Opus wrapper
+extern WebRtc_Word16 WebRtcOpus_CreateEnc(OPUS_inst_t_** inst, WebRtc_Word16 samplFreq);
+extern WebRtc_Word16 WebRtcOpus_CreateDec(OPUS_inst_t_** inst, WebRtc_Word16 samplFreq);
+extern WebRtc_Word16 WebRtcOpus_FreeEnc(OPUS_inst_t_* inst);
+extern WebRtc_Word16 WebRtcOpus_FreeDec(OPUS_inst_t_* inst);
+extern WebRtc_Word16 WebRtcOpus_Encode(OPUS_inst_t_* encInst,
+                                       WebRtc_Word16* input,
+                                       WebRtc_Word16* output,
+                                       WebRtc_Word16 len,
+                                       WebRtc_Word16 byteLen);
+extern WebRtc_Word16 WebRtcOpus_EncoderInit(OPUS_inst_t_* encInst,
+                                            WebRtc_Word16 samplFreq,
+                                            WebRtc_Word16 mode,
+                                            WebRtc_Word16 vbrFlag);
+extern WebRtc_Word16 WebRtcOpus_Decode(OPUS_inst_t_* decInst);
+extern WebRtc_Word16 WebRtcOpus_DecodeBwe(OPUS_inst_t_* decInst, WebRtc_Word16* input);
+extern WebRtc_Word16 WebRtcOpus_DecodePlc(OPUS_inst_t_* decInst);
+extern WebRtc_Word16 WebRtcOpus_DecoderInit(OPUS_inst_t_* decInst);
+
+ACMOPUS::ACMOPUS(WebRtc_Word16 codecID)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _mySampFreq(48000),  // Default sampling frequency.
+      _myRate(50000),  // Default rate.
+      _opusMode(1),  // Default mode is the hybrid mode.
+      _flagVBR(0) {  // Default VBR off.
+  _codecID = codecID;
+
+  // Current implementation doesn't have DTX. That might change.
+  _hasInternalDTX = false;
+
+  return;
+}
+
+ACMOPUS::~ACMOPUS()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    WebRtc_Word16 noEncodedSamples = 0;
+    WebRtc_Word16 tmpLenByte = 0;
+    *bitStreamLenByte = 0;
+
+    WebRtc_Word16 byteLengthFrame = 0;
+
+    // Derive what byte-length is requested
+    byteLengthFrame = _myRate*_frameLenSmpl/(8*_mySampFreq);
+
+    // Call Encoder
+    *bitStreamLenByte = WebRtcOpus_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+           (WebRtc_Word16*)bitStream, _frameLenSmpl, byteLengthFrame);
+
+    // increment the read index this tell the caller that how far
+    // we have gone forward in reading the audio buffer
+    _inAudioIxRead += _frameLenSmpl;
+
+    // sanity check
+    if(*bitStreamLenByte < 0)
+    {
+        // error has happened
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalEncode: Encode error for Opus");
+            *bitStreamLenByte = 0;
+            return -1;
+    }
+
+    return *bitStreamLenByte;
+}
+
+
+
+WebRtc_Word16
+ACMOPUS::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    //set the bit rate and initialize
+    _myRate = codecParams->codecInstant.rate;
+    return SetBitRateSafe( (WebRtc_UWord32)_myRate);
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    if (WebRtcOpus_DecoderInit(_decoderInstPtr) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "InternalInitDecoder: init decoder failed for Opus");
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32
+ACMOPUS::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "CodeDef: Decoder uninitialized for Opus");
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_G729_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderOpus, codecInst.pltype,
+        _decoderInstPtr, 16000);
+    SET_OPUS_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMOPUS::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateEncoder()
+{
+    if (WebRtcOpus_CreateEnc(&_encoderInstPtr, _mySampFreq) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "InternalCreateEncoder: create encoder failed for Opus");
+        return -1;
+    }
+    return 0;
+}
+
+
+void
+ACMOPUS::DestructEncoderSafe()
+{
+    _encoderExist = false;
+    _encoderInitialized = false;
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+}
+
+
+WebRtc_Word16
+ACMOPUS::InternalCreateDecoder()
+{
+   if (WebRtcOpus_CreateDec(&_decoderInstPtr, _mySampFreq) < 0)
+   {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "InternalCreateDecoder: create decoder failed for Opus");
+       return -1;
+   }
+   return 0;
+}
+
+
+void
+ACMOPUS::DestructDecoderSafe()
+{
+    _decoderExist = false;
+    _decoderInitialized = false;
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcOpus_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+}
+
+
+void
+ACMOPUS::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcOpus_FreeEnc((OPUS_inst_t*)ptrInst);
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMOPUS::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    //allowed rates: {8000, 12000, 14000, 16000, 18000, 20000,
+    //                22000, 24000, 26000, 28000, 30000, 32000};
+    switch(rate)
+    {
+    case 8000:
+        {
+            _myRate = 8000;
+            break;
+        }
+    case 12000:
+        {
+            _myRate = 12000;
+            break;
+        }
+    case 14000:
+        {
+            _myRate = 14000;
+            break;
+        }
+    case 16000:
+        {
+            _myRate = 16000;
+            break;
+        }
+    case 18000:
+        {
+            _myRate = 18000;
+            break;
+        }
+    case 20000:
+        {
+            _myRate = 20000;
+            break;
+        }
+    case 22000:
+        {
+            _myRate = 22000;
+            break;
+        }
+    case 24000:
+        {
+            _myRate = 24000;
+            break;
+        }
+    case 26000:
+        {
+            _myRate = 26000;
+            break;
+        }
+    case 28000:
+        {
+            _myRate = 28000;
+            break;
+        }
+    case 30000:
+        {
+            _myRate = 30000;
+            break;
+        }
+    case 32000:
+        {
+            _myRate = 32000;
+            break;
+        }
+    default:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "SetBitRateSafe: Invalid rate Opus");
+            return -1;
+        }
+    }
+
+    // Re-init with new rate
+    if (WebRtcOpus_EncoderInit(_encoderInstPtr, _mySampFreq, _opusMode, _flagVBR) >= 0)
+    {
+        _encoderParams.codecInstant.rate = _myRate;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_opus.h b/src/modules/audio_coding/main/source/acm_opus.h
new file mode 100644
index 0000000..c6832fa
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_opus.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct OPUS_inst_t_;
+struct OPUS_inst_t_;
+
+namespace webrtc
+{
+
+class ACMOPUS: public ACMGenericCodec
+{
+public:
+    ACMOPUS(WebRtc_Word16 codecID);
+    ~ACMOPUS();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst& codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    OPUS_inst_t_* _encoderInstPtr;
+    OPUS_inst_t_* _decoderInstPtr;
+
+    WebRtc_UWord16    _mySampFreq;
+    WebRtc_UWord16    _myRate;
+    WebRtc_Word16     _opusMode;
+    WebRtc_Word16     _flagVBR;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_OPUS_H_
diff --git a/src/modules/audio_coding/main/source/acm_pcm16b.cc b/src/modules/audio_coding/main/source/acm_pcm16b.cc
new file mode 100644
index 0000000..e44be38
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcm16b.cc
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_pcm16b.h"
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_PCM16
+    #include "pcm16b.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_PCM16
+
+ACMPCM16B::ACMPCM16B(WebRtc_Word16 /* codecID */) {
+  return;
+}
+
+ACMPCM16B::~ACMPCM16B() {
+  return;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalEncode(WebRtc_UWord8* /* bitStream */,
+                                        WebRtc_Word16* /* bitStreamLenByte */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMPCM16B::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  return -1;
+}
+
+WebRtc_Word32 ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& /* codecDef */,
+                                  const CodecInst& /* codecInst */) {
+  return -1;
+}
+
+ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalCreateEncoder() {
+  return -1;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalCreateDecoder() {
+  return -1;
+}
+
+void ACMPCM16B::InternalDestructEncoderInst(void* /* ptrInst */) {
+  return;
+}
+
+void ACMPCM16B::DestructEncoderSafe() {
+  return;
+}
+
+void ACMPCM16B::DestructDecoderSafe() {
+  return;
+}
+
+void ACMPCM16B::SplitStereoPacket(uint8_t* /*payload*/,
+                                  int32_t* /*payload_length*/) {
+}
+
+#else     //===================== Actual Implementation =======================
+
+ACMPCM16B::ACMPCM16B(WebRtc_Word16 codecID) {
+  _codecID = codecID;
+  _samplingFreqHz = ACMCodecDB::CodecFreq(_codecID);
+}
+
+ACMPCM16B::~ACMPCM16B() {
+  return;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalEncode(WebRtc_UWord8* bitStream,
+                                        WebRtc_Word16* bitStreamLenByte) {
+  *bitStreamLenByte = WebRtcPcm16b_Encode(&_inAudio[_inAudioIxRead],
+                                          _frameLenSmpl * _noChannels,
+                                          bitStream);
+  // Increment the read index to tell the caller that how far
+  // we have gone forward in reading the audio buffer.
+  _inAudioIxRead += _frameLenSmpl * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMPCM16B::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                    WebRtc_Word16 /* bitStreamLenByte */,
+                                    WebRtc_Word16* /* audio */,
+                                    WebRtc_Word16* /* audioSamples */,
+                                    WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization, PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization, PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word32 ACMPCM16B::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                  const CodecInst& codecInst) {
+  // Fill up the structure by calling "SET_CODEC_PAR" & "SET_PCMU_FUNCTION".
+  // Then call NetEQ to add the codec to it's database.
+  if (codecInst.channels == 1) {
+    switch(_samplingFreqHz) {
+      case 8000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16B, codecInst.pltype, NULL, 8000);
+        SET_PCM16B_FUNCTIONS(codecDef);
+        break;
+      }
+      case 16000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16Bwb, codecInst.pltype, NULL,
+                      16000);
+        SET_PCM16B_WB_FUNCTIONS(codecDef);
+        break;
+      }
+      case 32000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16Bswb32kHz, codecInst.pltype,
+                      NULL, 32000);
+        SET_PCM16B_SWB32_FUNCTIONS(codecDef);
+        break;
+      }
+      default: {
+        return -1;
+      }
+    }
+  } else {
+    switch(_samplingFreqHz) {
+      case 8000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16B_2ch, codecInst.pltype, NULL,
+                      8000);
+        SET_PCM16B_FUNCTIONS(codecDef);
+        break;
+      }
+      case 16000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16Bwb_2ch, codecInst.pltype,
+                      NULL, 16000);
+        SET_PCM16B_WB_FUNCTIONS(codecDef);
+        break;
+      }
+      case 32000: {
+        SET_CODEC_PAR(codecDef, kDecoderPCM16Bswb32kHz_2ch, codecInst.pltype,
+                      NULL, 32000);
+        SET_PCM16B_SWB32_FUNCTIONS(codecDef);
+        break;
+      }
+      default: {
+        return -1;
+      }
+    }
+  }
+  return 0;
+}
+
+ACMGenericCodec* ACMPCM16B::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalCreateEncoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCM16B::InternalCreateDecoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+void ACMPCM16B::InternalDestructEncoderInst(void* /* ptrInst */) {
+  // PCM has no instance.
+  return;
+}
+
+void ACMPCM16B::DestructEncoderSafe() {
+  // PCM has no instance.
+  _encoderExist = false;
+  _encoderInitialized = false;
+  return;
+}
+
+void ACMPCM16B::DestructDecoderSafe() {
+  // PCM has no instance.
+  _decoderExist = false;
+  _decoderInitialized = false;
+  return;
+}
+
+// Split the stereo packet and place left and right channel after each other
+// in the payload vector.
+void ACMPCM16B::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
+  uint8_t right_byte_msb;
+  uint8_t right_byte_lsb;
+
+  // Check for valid inputs.
+  assert(payload != NULL);
+  assert(*payload_length > 0);
+
+  // Move two bytes representing right channel each loop, and place it at the
+  // end of the bytestream vector. After looping the data is reordered to:
+  // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
+  // where N is the total number of samples.
+
+  for (int i = 0; i < *payload_length / 2; i += 2) {
+    right_byte_msb = payload[i + 2];
+    right_byte_lsb = payload[i + 3];
+    memmove(&payload[i + 2], &payload[i + 4], *payload_length - i - 4);
+    payload[*payload_length - 2] = right_byte_msb;
+    payload[*payload_length - 1] = right_byte_lsb;
+  }
+}
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_pcm16b.h b/src/modules/audio_coding/main/source/acm_pcm16b.h
new file mode 100644
index 0000000..09fbb05
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcm16b.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCM16B : public ACMGenericCodec
+{
+public:
+    ACMPCM16B(WebRtc_Word16 codecID);
+    ~ACMPCM16B();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
+
+    WebRtc_Word32 _samplingFreqHz;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCM16B_H_
diff --git a/src/modules/audio_coding/main/source/acm_pcma.cc b/src/modules/audio_coding/main/source/acm_pcma.cc
new file mode 100644
index 0000000..c459d25
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcma.cc
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_pcma.h"
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+// Codec interface
+#include "g711_interface.h"
+
+namespace webrtc {
+
+ACMPCMA::ACMPCMA(WebRtc_Word16 codecID) {
+  _codecID = codecID;
+}
+
+ACMPCMA::~ACMPCMA() {
+  return;
+}
+
+WebRtc_Word16 ACMPCMA::InternalEncode(WebRtc_UWord8* bitStream,
+                                      WebRtc_Word16* bitStreamLenByte) {
+  *bitStreamLenByte = WebRtcG711_EncodeA(NULL, &_inAudio[_inAudioIxRead],
+                                         _frameLenSmpl * _noChannels,
+                                         (WebRtc_Word16*) bitStream);
+  // Increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer.
+  _inAudioIxRead += _frameLenSmpl * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMPCMA::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMA::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization, PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMA::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization, PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word32 ACMPCMA::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                const CodecInst& codecInst) {
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_PCMA_FUNCTION."
+  // Then call NetEQ to add the codec to it's database.
+  if (codecInst.channels == 1) {
+    // Mono mode.
+    SET_CODEC_PAR(codecDef, kDecoderPCMa, codecInst.pltype, NULL, 8000);
+  } else {
+    // Stereo mode.
+    SET_CODEC_PAR(codecDef, kDecoderPCMa_2ch, codecInst.pltype, NULL, 8000);
+  }
+  SET_PCMA_FUNCTIONS(codecDef);
+  return 0;
+}
+
+ACMGenericCodec* ACMPCMA::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMPCMA::InternalCreateEncoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMA::InternalCreateDecoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+void ACMPCMA::InternalDestructEncoderInst(void* /* ptrInst */) {
+  // PCM has no instance.
+  return;
+}
+
+void ACMPCMA::DestructEncoderSafe() {
+  // PCM has no instance.
+  return;
+}
+
+void ACMPCMA::DestructDecoderSafe() {
+  // PCM has no instance.
+  _decoderInitialized = false;
+  _decoderExist = false;
+  return;
+}
+
+// Split the stereo packet and place left and right channel after each other
+// in the payload vector.
+void ACMPCMA::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
+  uint8_t right_byte;
+
+  // Check for valid inputs.
+  assert(payload != NULL);
+  assert(*payload_length > 0);
+
+  // Move one bytes representing right channel each loop, and place it at the
+  // end of the bytestream vector. After looping the data is reordered to:
+  // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
+  // where N is the total number of samples.
+  for (int i = 0; i < *payload_length / 2; i ++) {
+    right_byte = payload[i + 1];
+    memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
+    payload[*payload_length - 1] = right_byte;
+  }
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_pcma.h b/src/modules/audio_coding/main/source/acm_pcma.h
new file mode 100644
index 0000000..a3cf220
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcma.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCMA : public ACMGenericCodec
+{
+public:
+    ACMPCMA(WebRtc_Word16 codecID);
+    ~ACMPCMA();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMA_H_
diff --git a/src/modules/audio_coding/main/source/acm_pcmu.cc b/src/modules/audio_coding/main/source/acm_pcmu.cc
new file mode 100644
index 0000000..83240d4
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcmu.cc
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_pcmu.h"
+
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+// Codec interface
+#include "g711_interface.h"
+
+namespace webrtc {
+
+ACMPCMU::ACMPCMU(WebRtc_Word16 codecID) {
+  _codecID = codecID;
+}
+
+ACMPCMU::~ACMPCMU() {
+  return;
+}
+
+WebRtc_Word16 ACMPCMU::InternalEncode(WebRtc_UWord8* bitStream,
+                                      WebRtc_Word16* bitStreamLenByte) {
+  *bitStreamLenByte = WebRtcG711_EncodeU(NULL, &_inAudio[_inAudioIxRead],
+                                         _frameLenSmpl * _noChannels,
+                                         (WebRtc_Word16*) bitStream);
+  // Increment the read index this tell the caller that how far
+  // we have gone forward in reading the audio buffer.
+  _inAudioIxRead += _frameLenSmpl * _noChannels;
+  return *bitStreamLenByte;
+}
+
+WebRtc_Word16 ACMPCMU::DecodeSafe(WebRtc_UWord8* /* bitStream */,
+                                  WebRtc_Word16 /* bitStreamLenByte */,
+                                  WebRtc_Word16* /* audio */,
+                                  WebRtc_Word16* /* audioSamples */,
+                                  WebRtc_Word8* /* speechType */) {
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMU::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+  // This codec does not need initialization, PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMU::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */) {
+   // This codec does not need initialization, PCM has no instance.
+   return 0;
+}
+
+WebRtc_Word32 ACMPCMU::CodecDef(WebRtcNetEQ_CodecDef& codecDef,
+                                const CodecInst& codecInst) {
+  // Fill up the structure by calling
+  // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
+  // Then call NetEQ to add the codec to it's database.
+  if (codecInst.channels == 1) {
+    // Mono mode.
+    SET_CODEC_PAR(codecDef, kDecoderPCMu, codecInst.pltype, NULL, 8000);
+  } else {
+    // Stereo mode.
+    SET_CODEC_PAR(codecDef, kDecoderPCMu_2ch, codecInst.pltype, NULL, 8000);
+  }
+  SET_PCMU_FUNCTIONS(codecDef);
+  return 0;
+}
+
+ACMGenericCodec* ACMPCMU::CreateInstance(void) {
+  return NULL;
+}
+
+WebRtc_Word16 ACMPCMU::InternalCreateEncoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+WebRtc_Word16 ACMPCMU::InternalCreateDecoder() {
+  // PCM has no instance.
+  return 0;
+}
+
+void ACMPCMU::InternalDestructEncoderInst(void* /* ptrInst */) {
+  // PCM has no instance.
+  return;
+}
+
+void ACMPCMU::DestructEncoderSafe() {
+  // PCM has no instance.
+  _encoderExist = false;
+  _encoderInitialized = false;
+  return;
+}
+
+void ACMPCMU::DestructDecoderSafe() {
+  // PCM has no instance.
+  _decoderInitialized = false;
+  _decoderExist = false;
+  return;
+}
+
+// Split the stereo packet and place left and right channel after each other
+// in the payload vector.
+void ACMPCMU::SplitStereoPacket(uint8_t* payload, int32_t* payload_length) {
+  uint8_t right_byte;
+
+  // Check for valid inputs.
+  assert(payload != NULL);
+  assert(*payload_length > 0);
+
+  // Move one bytes representing right channel each loop, and place it at the
+  // end of the bytestream vector. After looping the data is reordered to:
+  // l1 l2 l3 l4 ... l(N-1) lN r1 r2 r3 r4 ... r(N-1) r(N),
+  // where N is the total number of samples.
+  for (int i = 0; i < *payload_length / 2; i ++) {
+    right_byte = payload[i + 1];
+    memmove(&payload[i + 1], &payload[i + 2], *payload_length - i - 2);
+    payload[*payload_length - 1] = right_byte;
+  }
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_pcmu.h b/src/modules/audio_coding/main/source/acm_pcmu.h
new file mode 100644
index 0000000..716ac40
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_pcmu.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMPCMU : public ACMGenericCodec
+{
+public:
+    ACMPCMU(WebRtc_Word16 codecID);
+    ~ACMPCMU();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&  codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    void SplitStereoPacket(uint8_t* payload, int32_t* payload_length);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_PCMU_H_
diff --git a/src/modules/audio_coding/main/source/acm_red.cc b/src/modules/audio_coding/main/source/acm_red.cc
new file mode 100644
index 0000000..232142d
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_red.cc
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_red.h"
+#include "acm_neteq.h"
+#include "acm_common_defs.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+namespace webrtc
+{
+
+ACMRED::ACMRED(WebRtc_Word16 codecID)
+{
+    _codecID = codecID;
+}
+
+
+ACMRED::~ACMRED()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    // RED is never used as an encoder
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    // This codec does not need initialization,
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+   // This codec does not need initialization,
+   // RED has no instance
+   return 0;
+}
+
+
+WebRtc_Word32
+ACMRED::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        // Todo:
+        // log error
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_PCMU_FUNCTION."
+    // Then call NetEQ to add the codec to it's
+    // database.
+    SET_CODEC_PAR((codecDef), kDecoderRED, codecInst.pltype, NULL, 8000);
+    SET_RED_FUNCTIONS((codecDef));
+    return 0;
+}
+
+
+ACMGenericCodec*
+ACMRED::CreateInstance(void)
+{
+    return NULL;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalCreateEncoder()
+{
+    // RED has no instance
+    return 0;
+}
+
+
+WebRtc_Word16
+ACMRED::InternalCreateDecoder()
+{
+    // RED has no instance
+    return 0;
+}
+
+
+void
+ACMRED::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    // RED has no instance
+    return;
+}
+
+
+void
+ACMRED::DestructEncoderSafe()
+{
+    // RED has no instance
+    return;
+}
+
+void ACMRED::DestructDecoderSafe()
+{
+    // RED has no instance
+    return;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_red.h b/src/modules/audio_coding/main/source/acm_red.h
new file mode 100644
index 0000000..3aaae47
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_red.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
+
+#include "acm_generic_codec.h"
+
+namespace webrtc
+{
+
+class ACMRED : public ACMGenericCodec
+{
+public:
+    ACMRED(WebRtc_Word16 codecID);
+    ~ACMRED();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RED_H_
diff --git a/src/modules/audio_coding/main/source/acm_resampler.cc b/src/modules/audio_coding/main/source/acm_resampler.cc
new file mode 100644
index 0000000..f8965ce
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_resampler.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "acm_resampler.h"
+
+#include "critical_section_wrapper.h"
+#include "resampler.h"
+#include "signal_processing_library.h"
+#include "trace.h"
+
+namespace webrtc {
+
+ACMResampler::ACMResampler()
+    : _resamplerCritSect(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+ACMResampler::~ACMResampler() {
+  delete _resamplerCritSect;
+}
+
+WebRtc_Word16 ACMResampler::Resample10Msec(const WebRtc_Word16* inAudio,
+                                           WebRtc_Word32 inFreqHz,
+                                           WebRtc_Word16* outAudio,
+                                           WebRtc_Word32 outFreqHz,
+                                           WebRtc_UWord8 numAudioChannels) {
+  CriticalSectionScoped cs(_resamplerCritSect);
+
+  if (inFreqHz == outFreqHz) {
+    size_t length = static_cast<size_t>(inFreqHz * numAudioChannels / 100);
+    memcpy(outAudio, inAudio, length * sizeof(WebRtc_Word16));
+    return static_cast<WebRtc_Word16>(inFreqHz / 100);
+  }
+
+  // |maxLen| is maximum number of samples for 10ms at 48kHz.
+  int maxLen = 480 * numAudioChannels;
+  int lengthIn = (WebRtc_Word16)(inFreqHz / 100) * numAudioChannels;
+  int outLen;
+
+  WebRtc_Word32 ret;
+  ResamplerType type;
+  type = (numAudioChannels == 1) ? kResamplerSynchronous :
+      kResamplerSynchronousStereo;
+
+  ret = _resampler.ResetIfNeeded(inFreqHz, outFreqHz, type);
+  if (ret < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
+                 "Error in reset of resampler");
+    return -1;
+  }
+
+  ret = _resampler.Push(inAudio, lengthIn, outAudio, maxLen, outLen);
+  if (ret < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, 0,
+                 "Error in resampler: resampler.Push");
+    return -1;
+  }
+
+  WebRtc_Word16 outAudioLenSmpl = (WebRtc_Word16) outLen / numAudioChannels;
+
+  return outAudioLenSmpl;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_resampler.h b/src/modules/audio_coding/main/source/acm_resampler.h
new file mode 100644
index 0000000..1586f02
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_resampler.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
+
+#include "resampler.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ACMResampler {
+ public:
+  ACMResampler();
+  ~ACMResampler();
+
+  WebRtc_Word16 Resample10Msec(const WebRtc_Word16* inAudio,
+                               const WebRtc_Word32 inFreqHz,
+                               WebRtc_Word16* outAudio,
+                               const WebRtc_Word32 outFreqHz,
+                               WebRtc_UWord8 numAudioChannels);
+
+ private:
+  // Use the Resampler class.
+  Resampler _resampler;
+  CriticalSectionWrapper* _resamplerCritSect;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_RESAMPLER_H_
diff --git a/src/modules/audio_coding/main/source/acm_speex.cc b/src/modules/audio_coding/main/source/acm_speex.cc
new file mode 100644
index 0000000..14554d0
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_speex.cc
@@ -0,0 +1,622 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "acm_speex.h"
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_neteq.h"
+#include "trace.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_help_macros.h"
+
+#ifdef WEBRTC_CODEC_SPEEX
+    // NOTE! Speex is not included in the open-source package. The following
+    // interface file is needed:
+    //
+    // /modules/audio_coding/codecs/speex/main/interface/speex_interface.h
+    //
+    // The API in the header file should match the one below.
+    //
+    // int16_t WebRtcSpeex_CreateEnc(SPEEX_encinst_t **SPEEXenc_inst,
+    //                               int32_t fs);
+    // int16_t WebRtcSpeex_FreeEnc(SPEEX_encinst_t *SPEEXenc_inst);
+    // int16_t WebRtcSpeex_CreateDec(SPEEX_decinst_t **SPEEXdec_inst,
+    //                               int32_t fs,
+    //                               int16_t enh_enabled);
+    // int16_t WebRtcSpeex_FreeDec(SPEEX_decinst_t *SPEEXdec_inst);
+    // int16_t WebRtcSpeex_Encode(SPEEX_encinst_t *SPEEXenc_inst,
+    //                            int16_t *speechIn,
+    //                            int32_t rate);
+    // int16_t WebRtcSpeex_EncoderInit(SPEEX_encinst_t *SPEEXenc_inst,
+    //                                 int16_t vbr, int16_t complexity,
+    //                                 int16_t vad_enable);
+    // int16_t WebRtcSpeex_GetBitstream(SPEEX_encinst_t *SPEEXenc_inst,
+    //                                  int16_t *encoded);
+    // int16_t WebRtcSpeex_DecodePlc(SPEEX_decinst_t *SPEEXdec_inst,
+    //                               int16_t *decoded, int16_t noOfLostFrames);
+    // int16_t WebRtcSpeex_Decode(SPEEX_decinst_t *SPEEXdec_inst,
+    //                            int16_t *encoded, int16_t len,
+    //                            int16_t *decoded, int16_t *speechType);
+    // int16_t WebRtcSpeex_DecoderInit(SPEEX_decinst_t *SPEEXdec_inst);
+    #include "speex_interface.h"
+#endif
+
+namespace webrtc {
+
+#ifndef WEBRTC_CODEC_SPEEX
+ACMSPEEX::ACMSPEEX(WebRtc_Word16 /* codecID */)
+    : _encoderInstPtr(NULL),
+      _decoderInstPtr(NULL),
+      _complMode(0),
+      _vbrEnabled(false),
+      _encodingRate(-1),
+      _samplingFrequency(-1),
+      _samplesIn20MsAudio(-1) {
+  return;
+}
+
+ACMSPEEX::~ACMSPEEX()
+{
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalEncode(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16* /* bitStreamLenByte */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::EnableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableDTX()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitEncoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    return -1;
+}
+
+WebRtc_Word32
+ACMSPEEX::CodecDef(
+    WebRtcNetEQ_CodecDef& /* codecDef  */,
+    const CodecInst&      /* codecInst */)
+{
+    return -1;
+}
+
+ACMGenericCodec*
+ACMSPEEX::CreateInstance(void)
+{
+    return NULL;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateEncoder()
+{
+    return -1;
+}
+
+void
+ACMSPEEX::DestructEncoderSafe()
+{
+    return;
+}
+
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateDecoder()
+{
+    return -1;
+}
+
+void
+ACMSPEEX::DestructDecoderSafe()
+{
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetBitRateSafe(
+    const WebRtc_Word32 /* rate */)
+{
+    return -1;
+}
+
+void
+ACMSPEEX::InternalDestructEncoderInst(
+    void* /* ptrInst */)
+{
+    return;
+}
+
+#ifdef UNUSEDSPEEX
+WebRtc_Word16
+ACMSPEEX::EnableVBR()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableVBR()
+{
+    return -1;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetComplMode(
+    WebRtc_Word16 mode)
+{
+    return -1;
+}
+#endif
+
+#else     //===================== Actual Implementation =======================
+
+ACMSPEEX::ACMSPEEX(WebRtc_Word16 codecID):
+_encoderInstPtr(NULL),
+_decoderInstPtr(NULL)
+{
+    _codecID = codecID;
+
+    // Set sampling frequency, frame size and rate Speex
+    if(_codecID == ACMCodecDB::kSPEEX8)
+    {
+        _samplingFrequency = 8000;
+        _samplesIn20MsAudio = 160;
+        _encodingRate = 11000;
+    }
+    else if(_codecID == ACMCodecDB::kSPEEX16)
+    {
+        _samplingFrequency = 16000;
+        _samplesIn20MsAudio = 320;
+        _encodingRate = 22000;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Wrong codec id for Speex.");
+
+        _samplingFrequency = -1;
+        _samplesIn20MsAudio = -1;
+        _encodingRate = -1;
+    }
+
+    _hasInternalDTX = true;
+    _dtxEnabled = false;
+    _vbrEnabled = false;
+    _complMode =  3; // default complexity value
+
+    return;
+}
+
+ACMSPEEX::~ACMSPEEX()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    return;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalEncode(
+    WebRtc_UWord8* bitStream,
+    WebRtc_Word16* bitStreamLenByte)
+{
+    WebRtc_Word16 status;
+    WebRtc_Word16 numEncodedSamples = 0;
+    WebRtc_Word16 n = 0;
+
+    while( numEncodedSamples < _frameLenSmpl)
+    {
+        status = WebRtcSpeex_Encode(_encoderInstPtr, &_inAudio[_inAudioIxRead],
+            _encodingRate);
+
+        // increment the read index this tell the caller that how far
+        // we have gone forward in reading the audio buffer
+        _inAudioIxRead += _samplesIn20MsAudio;
+        numEncodedSamples += _samplesIn20MsAudio;
+
+        if(status < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+                "Error in Speex encoder");
+            return status;
+        }
+
+        // Update VAD, if internal DTX is used
+        if(_hasInternalDTX && _dtxEnabled)
+        {
+            _vadLabel[n++] = status;
+            _vadLabel[n++] = status;
+        }
+
+        if(status == 0)
+        {
+            // This frame is detected as inactive. We need send whatever
+            // encoded so far.
+            *bitStreamLenByte = WebRtcSpeex_GetBitstream(_encoderInstPtr,
+                (WebRtc_Word16*)bitStream);
+
+            return *bitStreamLenByte;
+        }
+    }
+
+    *bitStreamLenByte = WebRtcSpeex_GetBitstream(_encoderInstPtr,
+        (WebRtc_Word16*)bitStream);
+    return *bitStreamLenByte;
+}
+
+WebRtc_Word16
+ACMSPEEX::DecodeSafe(
+    WebRtc_UWord8* /* bitStream        */,
+    WebRtc_Word16  /* bitStreamLenByte */,
+    WebRtc_Word16* /* audio            */,
+    WebRtc_Word16* /* audioSamples     */,
+    WebRtc_Word8*  /* speechType       */)
+{
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::EnableDTX()
+{
+    if(_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // enable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, (_vbrEnabled ? 1:0), _complMode, 1) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot enable DTX for Speex");
+            return -1;
+        }
+        _dtxEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::DisableDTX()
+{
+    if(!_dtxEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // disable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, (_vbrEnabled ? 1:0), _complMode, 0) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot disable DTX for Speex");
+            return -1;
+        }
+        _dtxEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+
+    return 0;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitEncoder(
+    WebRtcACMCodecParams* codecParams)
+{
+    // sanity check
+    if (_encoderInstPtr == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Cannot initialize Speex encoder, instance does not exist");
+        return -1;
+    }
+
+    WebRtc_Word16 status = SetBitRateSafe((codecParams->codecInstant).rate);
+    status += (WebRtcSpeex_EncoderInit(_encoderInstPtr, _vbrEnabled, _complMode, ((codecParams->enableDTX)? 1:0)) < 0)? -1:0;
+
+    if (status >= 0) {
+        return 0;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error in initialization of Speex encoder");
+        return -1;
+    }
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalInitDecoder(
+    WebRtcACMCodecParams* /* codecParams */)
+{
+    WebRtc_Word16 status;
+
+    // sanity check
+    if (_decoderInstPtr == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Cannot initialize Speex decoder, instance does not exist");
+        return -1;
+    }
+    status = ((WebRtcSpeex_DecoderInit(_decoderInstPtr) < 0)? -1:0);
+
+    if (status >= 0) {
+        return 0;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error in initialization of Speex decoder");
+        return -1;
+    }
+}
+
+WebRtc_Word32
+ACMSPEEX::CodecDef(
+    WebRtcNetEQ_CodecDef& codecDef,
+    const CodecInst&      codecInst)
+{
+    if (!_decoderInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Error, Speex decoder is not initialized");
+        return -1;
+    }
+
+    // Fill up the structure by calling
+    // "SET_CODEC_PAR" & "SET_SPEEX_FUNCTION."
+    // Then call NetEQ to add the codec to its
+    // database.
+
+    switch(_samplingFrequency)
+    {
+    case 8000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderSPEEX_8, codecInst.pltype,
+                _decoderInstPtr, 8000);
+            break;
+        }
+    case 16000:
+        {
+            SET_CODEC_PAR((codecDef), kDecoderSPEEX_16, codecInst.pltype,
+                _decoderInstPtr, 16000);
+            break;
+        }
+    default:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Unsupported sampling frequency for Speex");
+
+            return -1;
+        }
+    }
+
+    SET_SPEEX_FUNCTIONS((codecDef));
+    return 0;
+}
+
+ACMGenericCodec*
+ACMSPEEX::CreateInstance(void)
+{
+    return NULL;
+}
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateEncoder()
+{
+    return WebRtcSpeex_CreateEnc(&_encoderInstPtr, _samplingFrequency);
+}
+
+void
+ACMSPEEX::DestructEncoderSafe()
+{
+    if(_encoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeEnc(_encoderInstPtr);
+        _encoderInstPtr = NULL;
+    }
+    // there is no encoder set the following
+    _encoderExist = false;
+    _encoderInitialized = false;
+    _encodingRate = 0;
+}
+
+
+WebRtc_Word16
+ACMSPEEX::InternalCreateDecoder()
+{
+    return WebRtcSpeex_CreateDec(&_decoderInstPtr, _samplingFrequency, 1);
+}
+
+void
+ACMSPEEX::DestructDecoderSafe()
+{
+    if(_decoderInstPtr != NULL)
+    {
+        WebRtcSpeex_FreeDec(_decoderInstPtr);
+        _decoderInstPtr = NULL;
+    }
+    // there is no encoder instance set the followings
+    _decoderExist = false;
+    _decoderInitialized = false;
+}
+
+WebRtc_Word16
+ACMSPEEX::SetBitRateSafe(
+    const WebRtc_Word32 rate)
+{
+    // Check if changed rate
+    if (rate == _encodingRate) {
+        return 0;
+    } else if (rate > 2000) {
+        _encodingRate = rate;
+        _encoderParams.codecInstant.rate = rate;
+    } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+        "Unsupported encoding rate for Speex");
+
+        return -1;
+    }
+
+    return 0;
+}
+
+
+void
+ACMSPEEX::InternalDestructEncoderInst(
+    void* ptrInst)
+{
+    if(ptrInst != NULL)
+    {
+        WebRtcSpeex_FreeEnc((SPEEX_encinst_t_*)ptrInst);
+    }
+    return;
+}
+
+#ifdef UNUSEDSPEEX
+
+// This API is currently not in use. If requested to be able to enable/disable VBR
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::EnableVBR()
+{
+    if(_vbrEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // enable Variable Bit Rate (VBR)
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 1, _complMode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot enable VBR mode for Speex");
+
+            return -1;
+        }
+        _vbrEnabled = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+
+// This API is currently not in use. If requested to be able to enable/disable VBR
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::DisableVBR()
+{
+    if(!_vbrEnabled)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // disable DTX
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 0, _complMode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Cannot disable DTX for Speex");
+
+            return -1;
+        }
+        _vbrEnabled = false;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+// This API is currently not in use. If requested to be able to set complexity
+// an ACM API need to be added.
+WebRtc_Word16
+ACMSPEEX::SetComplMode(
+    WebRtc_Word16 mode)
+{
+    // Check if new mode
+    if(mode == _complMode)
+    {
+        return 0;
+    }
+    else if(_encoderExist)  // check if encoder exist
+    {
+        // Set new mode
+        if(WebRtcSpeex_EncoderInit(_encoderInstPtr, 0, mode, (_dtxEnabled? 1:0)) < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _uniqueID,
+            "Error in complexity mode for Speex");
+            return -1;
+        }
+        _complMode = mode;
+        return 0;
+    }
+    else
+    {
+        // encoder doesn't exists, therefore disabling is harmless
+        return 0;
+    }
+}
+
+#endif
+
+#endif
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/acm_speex.h b/src/modules/audio_coding/main/source/acm_speex.h
new file mode 100644
index 0000000..aabcec1
--- /dev/null
+++ b/src/modules/audio_coding/main/source/acm_speex.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
+
+#include "acm_generic_codec.h"
+
+// forward declaration
+struct SPEEX_encinst_t_;
+struct SPEEX_decinst_t_;
+
+namespace webrtc {
+
+class ACMSPEEX : public ACMGenericCodec
+{
+public:
+    ACMSPEEX(WebRtc_Word16 codecID);
+    ~ACMSPEEX();
+    // for FEC
+    ACMGenericCodec* CreateInstance(void);
+
+    WebRtc_Word16 InternalEncode(
+        WebRtc_UWord8* bitstream,
+        WebRtc_Word16* bitStreamLenByte);
+
+    WebRtc_Word16 InternalInitEncoder(
+        WebRtcACMCodecParams *codecParams);
+
+    WebRtc_Word16 InternalInitDecoder(
+        WebRtcACMCodecParams *codecParams);
+
+protected:
+    WebRtc_Word16 DecodeSafe(
+        WebRtc_UWord8* bitStream,
+        WebRtc_Word16  bitStreamLenByte,
+        WebRtc_Word16* audio,
+        WebRtc_Word16* audioSamples,
+        WebRtc_Word8*  speechType);
+
+    WebRtc_Word32 CodecDef(
+        WebRtcNetEQ_CodecDef& codecDef,
+        const CodecInst&      codecInst);
+
+    void DestructEncoderSafe();
+
+    void DestructDecoderSafe();
+
+    WebRtc_Word16 InternalCreateEncoder();
+
+    WebRtc_Word16 InternalCreateDecoder();
+
+    void InternalDestructEncoderInst(
+        void* ptrInst);
+
+    WebRtc_Word16 SetBitRateSafe(
+        const WebRtc_Word32 rate);
+
+    WebRtc_Word16 EnableDTX();
+
+    WebRtc_Word16 DisableDTX();
+
+#ifdef UNUSEDSPEEX
+    WebRtc_Word16 EnableVBR();
+
+    WebRtc_Word16 DisableVBR();
+
+    WebRtc_Word16 SetComplMode(
+        WebRtc_Word16 mode);
+#endif
+
+    SPEEX_encinst_t_* _encoderInstPtr;
+    SPEEX_decinst_t_* _decoderInstPtr;
+    WebRtc_Word16     _complMode;
+    bool              _vbrEnabled;
+    WebRtc_Word32     _encodingRate;
+    WebRtc_Word16     _samplingFrequency;
+    WebRtc_UWord16    _samplesIn20MsAudio;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_ACM_SPEEX_H_
diff --git a/src/modules/audio_coding/main/source/audio_coding_module.cc b/src/modules/audio_coding/main/source/audio_coding_module.cc
new file mode 100644
index 0000000..4fe6dad
--- /dev/null
+++ b/src/modules/audio_coding/main/source/audio_coding_module.cc
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "acm_dtmf_detection.h"
+#include "audio_coding_module.h"
+#include "audio_coding_module_impl.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+// Create module
+AudioCodingModule*
+AudioCodingModule::Create(
+    const WebRtc_Word32 id)
+{
+    return new AudioCodingModuleImpl(id);
+}
+
+// Destroy module
+void
+AudioCodingModule::Destroy(
+        AudioCodingModule* module)
+{
+    delete static_cast<AudioCodingModuleImpl*> (module);
+}
+
+// Get number of supported codecs
+WebRtc_UWord8 AudioCodingModule::NumberOfCodecs()
+{
+    return static_cast<WebRtc_UWord8>(ACMCodecDB::kNumCodecs);
+}
+
+// Get supported codec param with id
+WebRtc_Word32
+AudioCodingModule::Codec(
+    const WebRtc_UWord8 listId,
+    CodecInst&          codec)
+{
+    // Get the codec settings for the codec with the given list ID
+    return ACMCodecDB::Codec(listId, &codec);
+}
+
+// Get supported codec Param with name, frequency and number of channels.
+WebRtc_Word32 AudioCodingModule::Codec(const char* payload_name,
+                                       CodecInst& codec,
+                                       int sampling_freq_hz,
+                                       int channels) {
+  int codec_id;
+
+  // Get the id of the codec from the database.
+  codec_id = ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels);
+  if (codec_id < 0) {
+    // We couldn't find a matching codec, set the parameterss to unacceptable
+    // values and return.
+    codec.plname[0] = '\0';
+    codec.pltype    = -1;
+    codec.pacsize   = 0;
+    codec.rate      = 0;
+    codec.plfreq    = 0;
+    return -1;
+  }
+
+  // Get default codec settings.
+  ACMCodecDB::Codec(codec_id, &codec);
+
+  return 0;
+}
+
+// Get supported codec Index with name, frequency and number of channels.
+WebRtc_Word32 AudioCodingModule::Codec(const char* payload_name,
+                                       int sampling_freq_hz,
+                                       int channels) {
+  return ACMCodecDB::CodecId(payload_name, sampling_freq_hz, channels);
+}
+
+// Checks the validity of the parameters of the given codec
+bool
+AudioCodingModule::IsCodecValid(
+    const CodecInst& codec)
+{
+    int mirrorID;
+    char errMsg[500];
+
+    int codecNumber = ACMCodecDB::CodecNumber(&codec, &mirrorID, errMsg, 500);
+
+    if(codecNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, -1, errMsg);
+        return false;
+    }
+    else
+    {
+        return true;
+    }
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/audio_coding_module.gypi b/src/modules/audio_coding/main/source/audio_coding_module.gypi
new file mode 100644
index 0000000..bc9ea7d
--- /dev/null
+++ b/src/modules/audio_coding/main/source/audio_coding_module.gypi
@@ -0,0 +1,160 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'variables': {
+    'audio_coding_dependencies': [
+      'CNG',
+      'G711',
+      'G722',
+      'iLBC',
+      'iSAC',
+      'iSACFix',
+      'PCM16B',
+      'NetEq',
+      '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+      '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+      '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+    ],
+    'audio_coding_defines': [],
+  },
+  'targets': [
+    {
+      'target_name': 'audio_coding_module',
+      'type': '<(library)',
+      'defines': [
+        '<@(audio_coding_defines)',
+      ],
+      'dependencies': [
+        '<@(audio_coding_dependencies)',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+        '../interface',
+        '../../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/audio_coding_module.h',
+        '../interface/audio_coding_module_typedefs.h',
+        'acm_amr.cc',
+        'acm_amr.h',
+        'acm_amrwb.cc',
+        'acm_amrwb.h',
+        'acm_celt.cc',
+        'acm_celt.h',
+        'acm_cng.cc',
+        'acm_cng.h',
+        'acm_codec_database.cc',
+        'acm_codec_database.h',
+        'acm_dtmf_detection.cc',
+        'acm_dtmf_detection.h',
+        'acm_dtmf_playout.cc',
+        'acm_dtmf_playout.h',
+        'acm_g722.cc',
+        'acm_g722.h',
+        'acm_g7221.cc',
+        'acm_g7221.h',
+        'acm_g7221c.cc',
+        'acm_g7221c.h',
+        'acm_g729.cc',
+        'acm_g729.h',
+        'acm_g7291.cc',
+        'acm_g7291.h',
+        'acm_generic_codec.cc',
+        'acm_generic_codec.h',
+        'acm_gsmfr.cc',
+        'acm_gsmfr.h',
+        'acm_ilbc.cc',
+        'acm_ilbc.h',
+        'acm_isac.cc',
+        'acm_isac.h',
+        'acm_isac_macros.h',
+        'acm_neteq.cc',
+        'acm_neteq.h',
+        'acm_opus.cc',
+        'acm_opus.h',
+        'acm_speex.cc',
+        'acm_speex.h',
+        'acm_pcm16b.cc',
+        'acm_pcm16b.h',
+        'acm_pcma.cc',
+        'acm_pcma.h',
+        'acm_pcmu.cc',
+        'acm_pcmu.h',
+        'acm_red.cc',
+        'acm_red.h',
+        'acm_resampler.cc',
+        'acm_resampler.h',
+        'audio_coding_module.cc',
+        'audio_coding_module_impl.cc',
+        'audio_coding_module_impl.h',
+      ],
+    },
+  ],
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'audio_coding_module_test',
+          'type': 'executable',
+          'dependencies': [
+            'audio_coding_module',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+             '../test/ACMTest.cc',
+             '../test/APITest.cc',
+             '../test/Channel.cc',
+             '../test/EncodeDecodeTest.cc',
+             '../test/iSACTest.cc',
+             '../test/PCMFile.cc',
+             '../test/RTPFile.cc',
+             '../test/SpatialAudio.cc',
+             '../test/TestAllCodecs.cc',
+             '../test/Tester.cc',
+             '../test/TestFEC.cc',
+             '../test/TestStereo.cc',
+             '../test/TestVADDTX.cc',
+             '../test/TimedTrace.cc',
+             '../test/TwoWayCommunication.cc',
+             '../test/utility.cc',
+          ],
+        },
+        {
+          'target_name': 'audio_coding_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'audio_coding_module',
+            'NetEq',
+            '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+             'acm_neteq_unittest.cc',
+          ],
+        }, # audio_coding_unittests
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_coding/main/source/audio_coding_module_impl.cc b/src/modules/audio_coding/main/source/audio_coding_module_impl.cc
new file mode 100644
index 0000000..c1341b9
--- /dev/null
+++ b/src/modules/audio_coding/main/source/audio_coding_module_impl.cc
@@ -0,0 +1,2334 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_coding_module_impl.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#ifdef ACM_QA_TEST
+#   include <stdio.h>
+#endif
+
+#include "acm_codec_database.h"
+#include "acm_common_defs.h"
+#include "acm_dtmf_detection.h"
+#include "acm_generic_codec.h"
+#include "acm_resampler.h"
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+
+enum {
+  kACMToneEnd = 999
+};
+
+// Maximum number of bytes in one packet (PCM16B, 20 ms packets, stereo).
+enum {
+  kMaxPacketSize = 2560
+};
+
+AudioCodingModuleImpl::AudioCodingModuleImpl(const WebRtc_Word32 id)
+    : _packetizationCallback(NULL),
+      _id(id),
+      _lastTimestamp(0),
+      _lastInTimestamp(0),
+      _cng_nb_pltype(255),
+      _cng_wb_pltype(255),
+      _cng_swb_pltype(255),
+      _red_pltype(255),
+      _vadEnabled(false),
+      _dtxEnabled(false),
+      _vadMode(VADNormal),
+      _stereoReceiveRegistered(false),
+      _stereoSend(false),
+      _prev_received_channel(0),
+      _expected_channels(1),
+      _currentSendCodecIdx(-1),
+      _current_receive_codec_idx(-1),
+      _sendCodecRegistered(false),
+      _acmCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _vadCallback(NULL),
+      _lastRecvAudioCodecPlType(255),
+      _isFirstRED(true),
+      _fecEnabled(false),
+      _fragmentation(NULL),
+      _lastFECTimestamp(0),
+      _receiveREDPayloadType(255),
+      _previousPayloadType(255),
+      _dummyRTPHeader(NULL),
+      _recvPlFrameSizeSmpls(0),
+      _receiverInitialized(false),
+      _dtmfDetector(NULL),
+      _dtmfCallback(NULL),
+      _lastDetectedTone(kACMToneEnd),
+      _callbackCritSect(CriticalSectionWrapper::CreateCriticalSection()) {
+  _lastTimestamp = 0xD87F3F9F;
+  _lastInTimestamp = 0xD87F3F9F;
+
+  // Nullify send codec memory, set payload type and set codec name to
+  // invalid values.
+  memset(&_sendCodecInst, 0, sizeof(CodecInst));
+  strncpy(_sendCodecInst.plname, "noCodecRegistered", 31);
+  _sendCodecInst.pltype = -1;
+
+  for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
+    _codecs[i] = NULL;
+    _registeredPlTypes[i] = -1;
+    _stereoReceive[i] = false;
+    _slaveCodecs[i] = NULL;
+    _mirrorCodecIdx[i] = -1;
+  }
+
+  _netEq.SetUniqueId(_id);
+
+  // Allocate memory for RED.
+  _redBuffer = new WebRtc_UWord8[MAX_PAYLOAD_SIZE_BYTE];
+  _fragmentation = new RTPFragmentationHeader;
+  _fragmentation->fragmentationVectorSize = 2;
+  _fragmentation->fragmentationOffset = new WebRtc_UWord32[2];
+  _fragmentation->fragmentationLength = new WebRtc_UWord32[2];
+  _fragmentation->fragmentationTimeDiff = new WebRtc_UWord16[2];
+  _fragmentation->fragmentationPlType = new WebRtc_UWord8[2];
+
+  // Register the default payload type for RED and for CNG for the three
+  // frequencies 8, 16 and 32 kHz.
+  for (int i = (ACMCodecDB::kNumCodecs - 1); i >= 0; i--) {
+    if (IsCodecRED(i)) {
+      _red_pltype = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
+    } else if (IsCodecCN(i)) {
+      if (ACMCodecDB::database_[i].plfreq == 8000) {
+        _cng_nb_pltype = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
+      } else if (ACMCodecDB::database_[i].plfreq == 16000) {
+        _cng_wb_pltype = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
+      } else if (ACMCodecDB::database_[i].plfreq == 32000) {
+        _cng_swb_pltype = static_cast<uint8_t>(ACMCodecDB::database_[i].pltype);
+      }
+    }
+  }
+
+  if (InitializeReceiverSafe() < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Cannot initialize reciever");
+  }
+#ifdef ACM_QA_TEST
+  char file_name[500];
+  sprintf(file_name, "ACM_QA_incomingPL_%03d_%d%d%d%d%d%d.dat", _id,
+          rand() % 10, rand() % 10, rand() % 10, rand() % 10, rand() % 10,
+          rand() % 10);
+  _incomingPL = fopen(file_name, "wb");
+  sprintf(file_name, "ACM_QA_outgoingPL_%03d_%d%d%d%d%d%d.dat", _id,
+          rand() % 10, rand() % 10, rand() % 10, rand() % 10, rand() % 10,
+          rand() % 10);
+  _outgoingPL = fopen(file_name, "wb");
+#endif
+
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, id, "Created");
+}
+
+AudioCodingModuleImpl::~AudioCodingModuleImpl() {
+  {
+    CriticalSectionScoped lock(_acmCritSect);
+    _currentSendCodecIdx = -1;
+
+    for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
+      if (_codecs[i] != NULL) {
+        // True stereo codecs share the same memory for master and
+        // slave, so slave codec need to be nullified here, since the
+        // memory will be deleted.
+        if (_slaveCodecs[i] == _codecs[i]) {
+          _slaveCodecs[i] = NULL;
+        }
+
+        // Mirror index holds the address of the codec memory.
+        assert(_mirrorCodecIdx[i] > -1);
+        if (_codecs[_mirrorCodecIdx[i]] != NULL) {
+          delete _codecs[_mirrorCodecIdx[i]];
+          _codecs[_mirrorCodecIdx[i]] = NULL;
+        }
+
+        _codecs[i] = NULL;
+      }
+
+      if (_slaveCodecs[i] != NULL) {
+        // Delete memory for stereo usage of mono codecs.
+        assert(_mirrorCodecIdx[i] > -1);
+        if (_slaveCodecs[_mirrorCodecIdx[i]] != NULL) {
+          delete _slaveCodecs[_mirrorCodecIdx[i]];
+          _slaveCodecs[_mirrorCodecIdx[i]] = NULL;
+        }
+        _slaveCodecs[i] = NULL;
+      }
+    }
+
+    if (_dtmfDetector != NULL) {
+      delete _dtmfDetector;
+      _dtmfDetector = NULL;
+    }
+    if (_dummyRTPHeader != NULL) {
+      delete _dummyRTPHeader;
+      _dummyRTPHeader = NULL;
+    }
+    if (_redBuffer != NULL) {
+      delete[] _redBuffer;
+      _redBuffer = NULL;
+    }
+    if (_fragmentation != NULL) {
+      // Only need to delete fragmentation header, it will clean
+      // up it's own memory.
+      delete _fragmentation;
+      _fragmentation = NULL;
+    }
+  }
+
+#ifdef ACM_QA_TEST
+  if(_incomingPL != NULL) {
+    fclose(_incomingPL);
+  }
+
+  if(_outgoingPL != NULL) {
+    fclose(_outgoingPL);
+  }
+#endif
+
+  delete _callbackCritSect;
+  _callbackCritSect = NULL;
+
+  delete _acmCritSect;
+  _acmCritSect = NULL;
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceAudioCoding, _id,
+               "Destroyed");
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id) {
+  {
+    CriticalSectionScoped lock(_acmCritSect);
+    _id = id;
+
+#ifdef ACM_QA_TEST
+    if (_incomingPL != NULL) {
+      fclose (_incomingPL);
+    }
+    if (_outgoingPL != NULL) {
+      fclose (_outgoingPL);
+    }
+    char fileName[500];
+    sprintf(fileName, "ACM_QA_incomingPL_%03d_%d%d%d%d%d%d.dat", _id,
+            rand() % 10, rand() % 10, rand() % 10, rand() % 10, rand() % 10,
+            rand() % 10);
+    _incomingPL = fopen(fileName, "wb");
+    sprintf(fileName, "ACM_QA_outgoingPL_%03d_%d%d%d%d%d%d.dat", _id,
+            rand() % 10, rand() % 10, rand() % 10, rand() % 10, rand() % 10,
+            rand() % 10);
+    _outgoingPL = fopen(fileName, "wb");
+#endif
+
+    for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
+      if (_codecs[i] != NULL) {
+        _codecs[i]->SetUniqueID(id);
+      }
+    }
+  }
+
+  _netEq.SetUniqueId(_id);
+  return 0;
+}
+
+// Returns the number of milliseconds until the module want a
+// worker thread to call Process.
+WebRtc_Word32 AudioCodingModuleImpl::TimeUntilNextProcess() {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("TimeUntilNextProcess")) {
+    return -1;
+  }
+  return _codecs[_currentSendCodecIdx]->SamplesLeftToEncode() /
+      (_sendCodecInst.plfreq / 1000);
+}
+
+// Process any pending tasks such as timeouts.
+WebRtc_Word32 AudioCodingModuleImpl::Process() {
+  // Make room for 1 RED payload.
+  WebRtc_UWord8 stream[2 * MAX_PAYLOAD_SIZE_BYTE];
+  WebRtc_Word16 length_bytes = 2 * MAX_PAYLOAD_SIZE_BYTE;
+  WebRtc_Word16 red_length_bytes = length_bytes;
+  WebRtc_UWord32 rtp_timestamp;
+  WebRtc_Word16 status;
+  WebRtcACMEncodingType encoding_type;
+  FrameType frame_type = kAudioFrameSpeech;
+  WebRtc_UWord8 current_payload_type = 0;
+  bool has_data_to_send = false;
+  bool fec_active = false;
+
+  // Keep the scope of the ACM critical section limited.
+  {
+    CriticalSectionScoped lock(_acmCritSect);
+    if (!HaveValidEncoder("Process")) {
+      return -1;
+    }
+
+    status = _codecs[_currentSendCodecIdx]->Encode(stream, &length_bytes,
+                                                   &rtp_timestamp,
+                                                   &encoding_type);
+    if (status < 0) {
+      // Encode failed.
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Process(): Encoding Failed");
+      length_bytes = 0;
+      return -1;
+    } else if (status == 0) {
+      // Not enough data.
+      return 0;
+    } else {
+      switch (encoding_type) {
+        case kNoEncoding: {
+          current_payload_type = _previousPayloadType;
+          frame_type = kFrameEmpty;
+          length_bytes = 0;
+          break;
+        }
+        case kActiveNormalEncoded:
+        case kPassiveNormalEncoded: {
+          current_payload_type = (WebRtc_UWord8) _sendCodecInst.pltype;
+          frame_type = kAudioFrameSpeech;
+          break;
+        }
+        case kPassiveDTXNB: {
+          current_payload_type = _cng_nb_pltype;
+          frame_type = kAudioFrameCN;
+          _isFirstRED = true;
+          break;
+        }
+        case kPassiveDTXWB: {
+          current_payload_type = _cng_wb_pltype;
+          frame_type = kAudioFrameCN;
+          _isFirstRED = true;
+          break;
+        }
+        case kPassiveDTXSWB: {
+          current_payload_type = _cng_swb_pltype;
+          frame_type = kAudioFrameCN;
+          _isFirstRED = true;
+          break;
+        }
+      }
+      has_data_to_send = true;
+      _previousPayloadType = current_payload_type;
+
+      // Redundancy encode is done here. The two bitstreams packetized into
+      // one RTP packet and the fragmentation points are set.
+      // Only apply RED on speech data.
+      if ((_fecEnabled) &&
+          ((encoding_type == kActiveNormalEncoded) ||
+              (encoding_type == kPassiveNormalEncoded))) {
+        // FEC is enabled within this scope.
+        //
+        // Note that, a special solution exists for iSAC since it is the only
+        // codec for which getRedPayload has a non-empty implementation.
+        //
+        // Summary of the FEC scheme below (use iSAC as example):
+        //
+        //  1st (_firstRED is true) encoded iSAC frame (primary #1) =>
+        //      - call getRedPayload() and store redundancy for packet #1 in
+        //        second fragment of RED buffer (old data)
+        //      - drop the primary iSAC frame
+        //      - don't call SendData
+        //  2nd (_firstRED is false) encoded iSAC frame (primary #2) =>
+        //      - store primary #2 in 1st fragment of RED buffer and send the
+        //        combined packet
+        //      - the transmitted packet contains primary #2 (new) and
+        //        reduncancy for packet #1 (old)
+        //      - call getRedPayload() and store redundancy for packet #2 in
+        //        second fragment of RED buffer
+        //
+        //  ...
+        //
+        //  Nth encoded iSAC frame (primary #N) =>
+        //      - store primary #N in 1st fragment of RED buffer and send the
+        //        combined packet
+        //      - the transmitted packet contains primary #N (new) and
+        //        reduncancy for packet #(N-1) (old)
+        //      - call getRedPayload() and store redundancy for packet #N in
+        //        second fragment of RED buffer
+        //
+        //  For all other codecs, getRedPayload does nothing and returns -1 =>
+        //  redundant data is only a copy.
+        //
+        //  First combined packet contains : #2 (new) and #1 (old)
+        //  Second combined packet contains: #3 (new) and #2 (old)
+        //  Third combined packet contains : #4 (new) and #3 (old)
+        //
+        //  Hence, even if every second packet is dropped, perfect
+        //  reconstruction is possible.
+        fec_active = true;
+
+        has_data_to_send = false;
+        // Skip the following part for the first packet in a RED session.
+        if (!_isFirstRED) {
+          // Rearrange stream such that FEC packets are included.
+          // Replace stream now that we have stored current stream.
+          memcpy(stream + _fragmentation->fragmentationOffset[1], _redBuffer,
+                 _fragmentation->fragmentationLength[1]);
+          // Update the fragmentation time difference vector, in number of
+          // timestamps.
+          WebRtc_UWord16 time_since_last = WebRtc_UWord16(
+              rtp_timestamp - _lastFECTimestamp);
+
+          // Update fragmentation vectors.
+          _fragmentation->fragmentationPlType[1] = _fragmentation
+              ->fragmentationPlType[0];
+          _fragmentation->fragmentationTimeDiff[1] = time_since_last;
+          has_data_to_send = true;
+        }
+
+        // Insert new packet length.
+        _fragmentation->fragmentationLength[0] = length_bytes;
+
+        // Insert new packet payload type.
+        _fragmentation->fragmentationPlType[0] = current_payload_type;
+        _lastFECTimestamp = rtp_timestamp;
+
+        // Can be modified by the GetRedPayload() call if iSAC is utilized.
+        red_length_bytes = length_bytes;
+
+        // A fragmentation header is provided => packetization according to
+        // RFC 2198 (RTP Payload for Redundant Audio Data) will be used.
+        // First fragment is the current data (new).
+        // Second fragment is the previous data (old).
+        length_bytes = static_cast<WebRtc_Word16>(
+            _fragmentation->fragmentationLength[0] +
+            _fragmentation->fragmentationLength[1]);
+
+        // Get, and store, redundant data from the encoder based on the recently
+        // encoded frame.
+        // NOTE - only iSAC contains an implementation; all other codecs does
+        // nothing and returns -1.
+        if (_codecs[_currentSendCodecIdx]->GetRedPayload(
+            _redBuffer,
+            &red_length_bytes) == -1) {
+          // The codec was not iSAC => use current encoder output as redundant
+          // data instead (trivial FEC scheme).
+          memcpy(_redBuffer, stream, red_length_bytes);
+        }
+
+        _isFirstRED = false;
+        // Update payload type with RED payload type.
+        current_payload_type = _red_pltype;
+      }
+    }
+  }
+
+  if (has_data_to_send) {
+    CriticalSectionScoped lock(_callbackCritSect);
+#ifdef ACM_QA_TEST
+    if(_outgoingPL != NULL) {
+      if (fwrite(&rtp_timestamp, sizeof(WebRtc_UWord32), 1, _outgoingPL) != 1) {
+        return -1;
+      }
+      if (fwrite(&current_payload_type, sizeof(WebRtc_UWord8),
+                 1, _outgoingPL) != 1) {
+        return -1;
+      }
+      if (fwrite(&length_bytes, sizeof(WebRtc_Word16), 1, _outgoingPL) != 1) {
+        return -1;
+      }
+    }
+#endif
+
+    if (_packetizationCallback != NULL) {
+      if (fec_active) {
+        // Callback with payload data, including redundant data (FEC/RED).
+        _packetizationCallback->SendData(frame_type, current_payload_type,
+                                         rtp_timestamp, stream, length_bytes,
+                                         _fragmentation);
+      } else {
+        // Callback with payload data.
+        _packetizationCallback->SendData(frame_type, current_payload_type,
+                                         rtp_timestamp, stream, length_bytes,
+                                         NULL);
+      }
+    }
+
+    if (_vadCallback != NULL) {
+      // Callback with VAD decision.
+      _vadCallback->InFrameType(((WebRtc_Word16) encoding_type));
+    }
+  }
+  if (fec_active) {
+    // Store RED length in bytes.
+    _fragmentation->fragmentationLength[1] = red_length_bytes;
+  }
+  return length_bytes;
+}
+
+/////////////////////////////////////////
+//   Sender
+//
+
+// Initialize send codec.
+WebRtc_Word32 AudioCodingModuleImpl::InitializeSender() {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  // Start with invalid values.
+  _sendCodecRegistered = false;
+  _currentSendCodecIdx = -1;
+  _sendCodecInst.plname[0] = '\0';
+
+  // Delete all encoders to start fresh.
+  for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+    if (_codecs[id] != NULL) {
+      _codecs[id]->DestructEncoder();
+    }
+  }
+
+  // Initialize FEC/RED.
+  _isFirstRED = true;
+  if (_fecEnabled) {
+    if (_redBuffer != NULL) {
+      memset(_redBuffer, 0, MAX_PAYLOAD_SIZE_BYTE);
+    }
+    if (_fragmentation != NULL) {
+      _fragmentation->fragmentationVectorSize = 2;
+      _fragmentation->fragmentationOffset[0] = 0;
+      _fragmentation->fragmentationOffset[0] = MAX_PAYLOAD_SIZE_BYTE;
+      memset(_fragmentation->fragmentationLength, 0,
+             sizeof(WebRtc_UWord32) * 2);
+      memset(_fragmentation->fragmentationTimeDiff, 0,
+             sizeof(WebRtc_UWord16) * 2);
+      memset(_fragmentation->fragmentationPlType, 0, sizeof(WebRtc_UWord8) * 2);
+    }
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::ResetEncoder() {
+  CriticalSectionScoped lock(_acmCritSect);
+  if (!HaveValidEncoder("ResetEncoder")) {
+    return -1;
+  }
+  return _codecs[_currentSendCodecIdx]->ResetEncoder();
+}
+
+void AudioCodingModuleImpl::UnregisterSendCodec() {
+  CriticalSectionScoped lock(_acmCritSect);
+  _sendCodecRegistered = false;
+  _currentSendCodecIdx = -1;
+  return;
+}
+
+ACMGenericCodec* AudioCodingModuleImpl::CreateCodec(const CodecInst& codec) {
+  ACMGenericCodec* my_codec = NULL;
+
+  my_codec = ACMCodecDB::CreateCodecInstance(&codec);
+  if (my_codec == NULL) {
+    // Error, could not create the codec.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "ACMCodecDB::CreateCodecInstance() failed in CreateCodec()");
+    return my_codec;
+  }
+  my_codec->SetUniqueID(_id);
+  my_codec->SetNetEqDecodeLock(_netEq.DecodeLock());
+
+  return my_codec;
+}
+
+// Can be called multiple times for Codec, CNG, RED.
+WebRtc_Word32 AudioCodingModuleImpl::RegisterSendCodec(
+    const CodecInst& send_codec) {
+  if ((send_codec.channels != 1) && (send_codec.channels != 2)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Registering Send codec failed due to wrong number of "
+                 "channels, %d. Only mono codecs are supported, i.e. "
+                 "channels=1.", send_codec.channels);
+    return -1;
+  }
+
+  char error_message[500];
+  int mirror_id;
+  int codec_id = ACMCodecDB::CodecNumber(&send_codec, &mirror_id, error_message,
+                                        sizeof(error_message));
+  CriticalSectionScoped lock(_acmCritSect);
+
+  // Check for reported errors from function CodecNumber().
+  if (codec_id < 0) {
+    if (!_sendCodecRegistered) {
+      // This values has to be NULL if there is no codec registered.
+      _currentSendCodecIdx = -1;
+    }
+    // Failed to register Send Codec.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 error_message);
+    return -1;
+  }
+
+  // Telephone-event cannot be a send codec.
+  if (!STR_CASE_CMP(send_codec.plname, "telephone-event")) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "telephone-event cannot be registered as send codec");
+    return -1;
+  }
+
+  // RED can be registered with other payload type. If not registered a default
+  // payload type is used.
+  if (IsCodecRED(&send_codec)) {
+    // TODO(tlegrand): Remove this check. Already taken care of in
+    // ACMCodecDB::CodecNumber().
+    // Check if the payload-type is valid
+    if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Invalid payload-type %d for %s.", send_codec.pltype,
+                   send_codec.plname);
+      return -1;
+    }
+    // Set RED payload type.
+    _red_pltype = static_cast<uint8_t>(send_codec.pltype);
+    return 0;
+  }
+
+  // CNG can be registered with other payload type. If not registered the
+  // default payload types from codec database will be used.
+  if (IsCodecCN(&send_codec)) {
+    // CNG is registered.
+    switch (send_codec.plfreq) {
+      case 8000: {
+        _cng_nb_pltype = static_cast<uint8_t>(send_codec.pltype);
+        break;
+      }
+      case 16000: {
+        _cng_wb_pltype = static_cast<uint8_t>(send_codec.pltype);
+        break;
+      }
+      case 32000: {
+        _cng_swb_pltype = static_cast<uint8_t>(send_codec.pltype);
+        break;
+      }
+      default: {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "RegisterSendCodec() failed, invalid frequency for CNG "
+                     "registration");
+        return -1;
+      }
+    }
+
+    return 0;
+  }
+
+  // TODO(tlegrand): Remove this check. Already taken care of in
+  // ACMCodecDB::CodecNumber().
+  // Check if the payload-type is valid
+  if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Invalid payload-type %d for %s.", send_codec.pltype,
+                 send_codec.plname);
+    return -1;
+  }
+
+  // Check if codec supports the number of channels.
+  if (ACMCodecDB::codec_settings_[codec_id].channel_support
+      < send_codec.channels) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "%d number of channels not supportedn for %s.",
+                 send_codec.channels, send_codec.plname);
+    return -1;
+  }
+
+  // Set Stereo, and make sure VAD and DTX is turned off.
+  if (send_codec.channels == 2) {
+    _stereoSend = true;
+    if (_vadEnabled || _dtxEnabled) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+                   "VAD/DTX is turned off, not supported when sending stereo.");
+    }
+    _vadEnabled = false;
+    _dtxEnabled = false;
+  } else {
+    _stereoSend = false;
+  }
+
+  // Check if the codec is already registered as send codec.
+  bool is_send_codec;
+  if (_sendCodecRegistered) {
+    int send_codec_mirror_id;
+    int send_codec_id = ACMCodecDB::CodecNumber(&_sendCodecInst,
+                                              &send_codec_mirror_id);
+    assert(send_codec_id >= 0);
+    is_send_codec = (send_codec_id == codec_id) ||
+        (mirror_id == send_codec_mirror_id);
+  } else {
+    is_send_codec = false;
+  }
+
+  // If new codec, or new settings, register.
+  if (!is_send_codec) {
+    if (_codecs[mirror_id] == NULL) {
+
+      _codecs[mirror_id] = CreateCodec(send_codec);
+      if (_codecs[mirror_id] == NULL) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Cannot Create the codec");
+        return -1;
+      }
+      _mirrorCodecIdx[mirror_id] = mirror_id;
+    }
+
+    if (mirror_id != codec_id) {
+      _codecs[codec_id] = _codecs[mirror_id];
+      _mirrorCodecIdx[codec_id] = mirror_id;
+    }
+
+    ACMGenericCodec* codec_ptr = _codecs[codec_id];
+    WebRtc_Word16 status;
+    WebRtcACMCodecParams codec_params;
+
+    memcpy(&(codec_params.codecInstant), &send_codec, sizeof(CodecInst));
+    codec_params.enableVAD = _vadEnabled;
+    codec_params.enableDTX = _dtxEnabled;
+    codec_params.vadMode = _vadMode;
+    // Force initialization.
+    status = codec_ptr->InitEncoder(&codec_params, true);
+
+    // Check if VAD was turned on, or if error is reported.
+    if (status == 1) {
+      _vadEnabled = true;
+    } else if (status < 0) {
+      // Could not initialize the encoder.
+
+      // Check if already have a registered codec.
+      // Depending on that different messages are logged.
+      if (!_sendCodecRegistered) {
+        _currentSendCodecIdx = -1;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Cannot Initialize the encoder No Encoder is registered");
+      } else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Cannot Initialize the encoder, continue encoding with "
+                     "the previously registered codec");
+      }
+      return -1;
+    }
+
+    // Everything is fine so we can replace the previous codec with this one.
+    if (_sendCodecRegistered) {
+      // If we change codec we start fresh with FEC.
+      // This is not strictly required by the standard.
+      _isFirstRED = true;
+
+      if (codec_ptr->SetVAD(_dtxEnabled, _vadEnabled, _vadMode) < 0) {
+        // SetVAD failed.
+        _vadEnabled = false;
+        _dtxEnabled = false;
+      }
+    }
+
+    _currentSendCodecIdx = codec_id;
+    _sendCodecRegistered = true;
+    memcpy(&_sendCodecInst, &send_codec, sizeof(CodecInst));
+    _previousPayloadType = _sendCodecInst.pltype;
+    return 0;
+  } else {
+    // If codec is the same as already registered check if any parameters
+    // has changed compared to the current values.
+    // If any parameter is valid then apply it and record.
+    bool force_init = false;
+
+    if (mirror_id != codec_id) {
+      _codecs[codec_id] = _codecs[mirror_id];
+      _mirrorCodecIdx[codec_id] = mirror_id;
+    }
+
+    // Check the payload type.
+    if (send_codec.pltype != _sendCodecInst.pltype) {
+      // At this point check if the given payload type is valid.
+      // Record it later when the sampling frequency is changed
+      // successfully.
+      if (!ACMCodecDB::ValidPayloadType(send_codec.pltype)) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Out of range payload type");
+        return -1;
+      }
+    }
+
+    // If there is a codec that ONE instance of codec supports multiple
+    // sampling frequencies, then we need to take care of it here.
+    // one such a codec is iSAC. Both WB and SWB are encoded and decoded
+    // with one iSAC instance. Therefore, we need to update the encoder
+    // frequency if required.
+    if (_sendCodecInst.plfreq != send_codec.plfreq) {
+      force_init = true;
+
+      // If sampling frequency is changed we have to start fresh with RED.
+      _isFirstRED = true;
+    }
+
+    // If packet size or number of channels has changed, we need to
+    // re-initialize the encoder.
+    if (_sendCodecInst.pacsize != send_codec.pacsize) {
+      force_init = true;
+    }
+    if (_sendCodecInst.channels != send_codec.channels) {
+      force_init = true;
+    }
+
+    if (force_init) {
+      WebRtcACMCodecParams codec_params;
+
+      memcpy(&(codec_params.codecInstant), &send_codec, sizeof(CodecInst));
+      codec_params.enableVAD = _vadEnabled;
+      codec_params.enableDTX = _dtxEnabled;
+      codec_params.vadMode = _vadMode;
+
+      // Force initialization.
+      if (_codecs[_currentSendCodecIdx]->InitEncoder(&codec_params, true) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Could not change the codec packet-size.");
+        return -1;
+      }
+
+      _sendCodecInst.plfreq = send_codec.plfreq;
+      _sendCodecInst.pacsize = send_codec.pacsize;
+      _sendCodecInst.channels = send_codec.channels;
+    }
+
+    // If the change of sampling frequency has been successful then
+    // we store the payload-type.
+    _sendCodecInst.pltype = send_codec.pltype;
+
+    // Check if a change in Rate is required.
+    if (send_codec.rate != _sendCodecInst.rate) {
+      if (_codecs[codec_id]->SetBitRate(send_codec.rate) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Could not change the codec rate.");
+        return -1;
+      }
+      _sendCodecInst.rate = send_codec.rate;
+    }
+    _previousPayloadType = _sendCodecInst.pltype;
+
+    return 0;
+  }
+}
+
+// Get current send codec.
+WebRtc_Word32 AudioCodingModuleImpl::SendCodec(
+    CodecInst& current_codec) const {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+               "SendCodec()");
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!_sendCodecRegistered) {
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+                 "SendCodec Failed, no codec is registered");
+
+    return -1;
+  }
+  WebRtcACMCodecParams encoder_param;
+  _codecs[_currentSendCodecIdx]->EncoderParams(&encoder_param);
+  encoder_param.codecInstant.pltype = _sendCodecInst.pltype;
+  memcpy(&current_codec, &(encoder_param.codecInstant), sizeof(CodecInst));
+
+  return 0;
+}
+
+// Get current send frequency.
+WebRtc_Word32 AudioCodingModuleImpl::SendFrequency() const {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+               "SendFrequency()");
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!_sendCodecRegistered) {
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+                 "SendFrequency Failed, no codec is registered");
+
+    return -1;
+  }
+
+  return _sendCodecInst.plfreq;
+}
+
+// Get encode bitrate.
+// Adaptive rate codecs return their current encode target rate, while other
+// codecs return there longterm avarage or their fixed rate.
+WebRtc_Word32 AudioCodingModuleImpl::SendBitrate() const {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!_sendCodecRegistered) {
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+                 "SendBitrate Failed, no codec is registered");
+
+    return -1;
+  }
+
+  WebRtcACMCodecParams encoder_param;
+  _codecs[_currentSendCodecIdx]->EncoderParams(&encoder_param);
+
+  return encoder_param.codecInstant.rate;
+}
+
+// Set available bandwidth, inform the encoder about the estimated bandwidth
+// received from the remote party.
+WebRtc_Word32 AudioCodingModuleImpl::SetReceivedEstimatedBandwidth(
+    const WebRtc_Word32 bw) {
+  return _codecs[_currentSendCodecIdx]->SetEstimatedBandwidth(bw);
+}
+
+// Register a transport callback which will be called to deliver
+// the encoded buffers.
+WebRtc_Word32 AudioCodingModuleImpl::RegisterTransportCallback(
+    AudioPacketizationCallback* transport) {
+  CriticalSectionScoped lock(_callbackCritSect);
+  _packetizationCallback = transport;
+  return 0;
+}
+
+// Used by the module to deliver messages to the codec module/application
+// AVT(DTMF).
+WebRtc_Word32 AudioCodingModuleImpl::RegisterIncomingMessagesCallback(
+#ifndef WEBRTC_DTMF_DETECTION
+    AudioCodingFeedback* /* incoming_message */,
+    const ACMCountries /* cpt */) {
+  return -1;
+#else
+    AudioCodingFeedback* incoming_message,
+    const ACMCountries cpt) {
+  WebRtc_Word16 status = 0;
+
+  // Enter the critical section for callback.
+  {
+    CriticalSectionScoped lock(_callbackCritSect);
+    _dtmfCallback = incoming_message;
+  }
+  // Enter the ACM critical section to set up the DTMF class.
+  {
+    CriticalSectionScoped lock(_acmCritSect);
+    // Check if the call is to disable or enable the callback.
+    if (incoming_message == NULL) {
+      // Callback is disabled, delete DTMF-detector class.
+      if (_dtmfDetector != NULL) {
+        delete _dtmfDetector;
+        _dtmfDetector = NULL;
+      }
+      status = 0;
+    } else {
+      status = 0;
+      if (_dtmfDetector == NULL) {
+        _dtmfDetector = new (ACMDTMFDetection);
+        if (_dtmfDetector == NULL) {
+          status = -1;
+        }
+      }
+      if (status >= 0) {
+        status = _dtmfDetector->Enable(cpt);
+        if (status < 0) {
+          // Failed to initialize if DTMF-detection was not enabled before,
+          // delete the class, and set the callback to NULL and return -1.
+          delete _dtmfDetector;
+          _dtmfDetector = NULL;
+        }
+      }
+    }
+  }
+  // Check if we failed in setting up the DTMF-detector class.
+  if ((status < 0)) {
+    // We failed, we cannot have the callback.
+    CriticalSectionScoped lock(_callbackCritSect);
+    _dtmfCallback = NULL;
+  }
+
+  return status;
+#endif
+}
+
+// Add 10MS of raw (PCM) audio data to the encoder.
+WebRtc_Word32 AudioCodingModuleImpl::Add10MsData(
+    const AudioFrame& audio_frame) {
+  // Do we have a codec registered?
+  CriticalSectionScoped lock(_acmCritSect);
+  if (!HaveValidEncoder("Add10MsData")) {
+    return -1;
+  }
+
+  if (audio_frame.samples_per_channel_ == 0) {
+    assert(false);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Cannot Add 10 ms audio, payload length is zero");
+    return -1;
+  }
+  // Allow for 8, 16, 32 and 48kHz input audio.
+  if ((audio_frame.sample_rate_hz_ != 8000)
+      && (audio_frame.sample_rate_hz_ != 16000)
+      && (audio_frame.sample_rate_hz_ != 32000)
+      && (audio_frame.sample_rate_hz_ != 48000)) {
+    assert(false);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Cannot Add 10 ms audio, input frequency not valid");
+    return -1;
+  }
+
+  // If the length and frequency matches. We currently just support raw PCM.
+  if ((audio_frame.sample_rate_hz_ / 100)
+      != audio_frame.samples_per_channel_) {
+    WEBRTC_TRACE(
+        webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+        "Cannot Add 10 ms audio, input frequency and length doesn't match");
+    return -1;
+  }
+
+  // Calculate the timestamp that should be pushed to codec.
+  // This might be different from the timestamp of the frame
+  // due to re-sampling.
+  bool resample = ((WebRtc_Word32) audio_frame.sample_rate_hz_
+      != _sendCodecInst.plfreq);
+
+  // If number of channels in audio doesn't match codec mode, we need
+  // either mono-to-stereo or stereo-to-mono conversion.
+  WebRtc_Word16 audio[WEBRTC_10MS_PCM_AUDIO];
+  int audio_channels = _sendCodecInst.channels;
+  // TODO(andrew): reuse RemixAndResample here? The upmixing should be done
+  // after resampling. (Would require moving it somewhere common).
+  if (audio_frame.num_channels_ != audio_channels) {
+    if (audio_channels == 2) {
+      // Do mono-to-stereo conversion by copying each sample.
+      for (int k = 0; k < audio_frame.samples_per_channel_; k++) {
+        audio[k * 2] = audio_frame.data_[k];
+        audio[(k * 2) + 1] = audio_frame.data_[k];
+      }
+    } else if (audio_channels == 1) {
+      // Do stereo-to-mono conversion by creating the average of the stereo
+      // samples.
+      for (int k = 0; k < audio_frame.samples_per_channel_; k++) {
+        audio[k] = (audio_frame.data_[k * 2]
+            + audio_frame.data_[(k * 2) + 1]) >> 1;
+      }
+    }
+  } else {
+    // Copy payload data for future use.
+    size_t length = static_cast<size_t>(audio_frame.samples_per_channel_
+        * audio_channels);
+    memcpy(audio, audio_frame.data_, length * sizeof(WebRtc_UWord16));
+  }
+
+  WebRtc_UWord32 current_timestamp;
+  WebRtc_Word32 status;
+  // If it is required, we have to do a resampling.
+  if (resample) {
+    WebRtc_Word16 resampled_audio[WEBRTC_10MS_PCM_AUDIO];
+    WebRtc_Word32 send_freq = _sendCodecInst.plfreq;
+    WebRtc_UWord32 timestamp_diff;
+    WebRtc_Word16 new_length;
+
+    // Calculate the timestamp of this frame.
+    if (_lastInTimestamp > audio_frame.timestamp_) {
+      // A wrap around has happened.
+      timestamp_diff = ((WebRtc_UWord32) 0xFFFFFFFF - _lastInTimestamp)
+          + audio_frame.timestamp_;
+    } else {
+      timestamp_diff = audio_frame.timestamp_ - _lastInTimestamp;
+    }
+    current_timestamp = _lastTimestamp + (WebRtc_UWord32)(timestamp_diff *
+        ((double) _sendCodecInst.plfreq / (double) audio_frame.sample_rate_hz_));
+
+    new_length = _inputResampler.Resample10Msec(audio,
+                                                audio_frame.sample_rate_hz_,
+                                                resampled_audio, send_freq,
+                                                audio_channels);
+
+    if (new_length < 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Cannot add 10 ms audio, resmapling failed");
+      return -1;
+    }
+    status = _codecs[_currentSendCodecIdx]->Add10MsData(current_timestamp,
+                                                        resampled_audio,
+                                                        new_length,
+                                                        audio_channels);
+  } else {
+    current_timestamp = audio_frame.timestamp_;
+
+    status = _codecs[_currentSendCodecIdx]->Add10MsData(
+        current_timestamp, audio, audio_frame.samples_per_channel_,
+        audio_channels);
+  }
+  _lastInTimestamp = audio_frame.timestamp_;
+  _lastTimestamp = current_timestamp;
+  return status;
+}
+
+/////////////////////////////////////////
+//   (FEC) Forward Error Correction
+//
+
+bool AudioCodingModuleImpl::FECStatus() const {
+  CriticalSectionScoped lock(_acmCritSect);
+  return _fecEnabled;
+}
+
+// Configure FEC status i.e on/off.
+WebRtc_Word32
+AudioCodingModuleImpl::SetFECStatus(
+#ifdef WEBRTC_CODEC_RED
+    const bool enable_fec) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (_fecEnabled != enable_fec) {
+    // Reset the RED buffer.
+    memset(_redBuffer, 0, MAX_PAYLOAD_SIZE_BYTE);
+
+    // Reset fragmentation buffers.
+    _fragmentation->fragmentationVectorSize = 2;
+    _fragmentation->fragmentationOffset[0] = 0;
+    _fragmentation->fragmentationOffset[1] = MAX_PAYLOAD_SIZE_BYTE;
+    memset(_fragmentation->fragmentationLength, 0, sizeof(WebRtc_UWord32) * 2);
+    memset(_fragmentation->fragmentationTimeDiff, 0,
+           sizeof(WebRtc_UWord16) * 2);
+    memset(_fragmentation->fragmentationPlType, 0, sizeof(WebRtc_UWord8) * 2);
+
+    // Set _fecEnabled.
+    _fecEnabled = enable_fec;
+  }
+  _isFirstRED = true;  // Make sure we restart FEC.
+  return 0;
+#else
+    const bool /* enable_fec */) {
+  _fecEnabled = false;
+  WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+               "  WEBRTC_CODEC_RED is undefined => _fecEnabled = %d",
+               _fecEnabled);
+  return -1;
+#endif
+}
+
+/////////////////////////////////////////
+//   (VAD) Voice Activity Detection
+//
+
+WebRtc_Word32 AudioCodingModuleImpl::SetVAD(const bool enable_dtx,
+                                            const bool enable_vad,
+                                            const ACMVADMode mode) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  // Sanity check of the mode.
+  if ((mode != VADNormal) && (mode != VADLowBitrate)
+      && (mode != VADAggr) && (mode != VADVeryAggr)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Invalid VAD Mode %d, no change is made to VAD/DTX status",
+                 (int) mode);
+    return -1;
+  }
+
+  // Check that the send codec is mono. We don't support VAD/DTX for stereo
+  // sending.
+  if ((enable_dtx || enable_vad) && _stereoSend) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "VAD/DTX not supported for stereo sending");
+    return -1;
+  }
+
+  // If a send codec is registered, set VAD/DTX for the codec.
+  if (HaveValidEncoder("SetVAD")) {
+    WebRtc_Word16 status = _codecs[_currentSendCodecIdx]->SetVAD(enable_dtx,
+                                                                 enable_vad,
+                                                                 mode);
+    if (status == 1) {
+      // Vad was enabled.
+      _vadEnabled = true;
+      _dtxEnabled = enable_dtx;
+      _vadMode = mode;
+
+      return 0;
+    } else if (status < 0) {
+      // SetVAD failed.
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "SetVAD failed");
+
+      _vadEnabled = false;
+      _dtxEnabled = false;
+
+      return -1;
+    }
+  }
+
+  _vadEnabled = enable_vad;
+  _dtxEnabled = enable_dtx;
+  _vadMode = mode;
+
+  return 0;
+}
+
+// Get VAD/DTX settings.
+WebRtc_Word32 AudioCodingModuleImpl::VAD(bool& dtx_enabled, bool& vad_enabled,
+                                         ACMVADMode& mode) const {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  dtx_enabled = _dtxEnabled;
+  vad_enabled = _vadEnabled;
+  mode = _vadMode;
+
+  return 0;
+}
+
+/////////////////////////////////////////
+//   Receiver
+//
+
+WebRtc_Word32 AudioCodingModuleImpl::InitializeReceiver() {
+  CriticalSectionScoped lock(_acmCritSect);
+  return InitializeReceiverSafe();
+}
+
+// Initialize receiver, resets codec database etc.
+WebRtc_Word32 AudioCodingModuleImpl::InitializeReceiverSafe() {
+  // If the receiver is already initialized then we want to destroy any
+  // existing decoders. After a call to this function, we should have a clean
+  // start-up.
+  if (_receiverInitialized) {
+    for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+      if (UnregisterReceiveCodecSafe(i) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "InitializeReceiver() failed, Could not unregister codec");
+        return -1;
+      }
+    }
+  }
+  if (_netEq.Init() != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "InitializeReceiver() failed, Could not initialize NetEQ");
+    return -1;
+  }
+  _netEq.SetUniqueId(_id);
+  if (_netEq.AllocatePacketBuffer(ACMCodecDB::NetEQDecoders(),
+                                  ACMCodecDB::kNumCodecs) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "NetEQ cannot allocatePacket Buffer");
+    return -1;
+  }
+
+  // Register RED and CN.
+  for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+    if (IsCodecRED(i) || IsCodecCN(i)) {
+      if (RegisterRecCodecMSSafe(ACMCodecDB::database_[i], i, i,
+                                 ACMNetEQ::masterJB) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Cannot register master codec.");
+        return -1;
+      }
+      _registeredPlTypes[i] = ACMCodecDB::database_[i].pltype;
+    }
+  }
+
+  _receiverInitialized = true;
+  return 0;
+}
+
+// Reset the decoder state.
+WebRtc_Word32 AudioCodingModuleImpl::ResetDecoder() {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+    if ((_codecs[id] != NULL) && (_registeredPlTypes[id] != -1)) {
+      if (_codecs[id]->ResetDecoder(_registeredPlTypes[id]) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "ResetDecoder failed:");
+        return -1;
+      }
+    }
+  }
+  return _netEq.FlushBuffers();
+}
+
+// Get current receive frequency.
+WebRtc_Word32 AudioCodingModuleImpl::ReceiveFrequency() const {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+               "ReceiveFrequency()");
+  WebRtcACMCodecParams codec_params;
+
+  CriticalSectionScoped lock(_acmCritSect);
+  if (DecoderParamByPlType(_lastRecvAudioCodecPlType, codec_params) < 0) {
+    return _netEq.CurrentSampFreqHz();
+  } else {
+    return codec_params.codecInstant.plfreq;
+  }
+}
+
+// Get current playout frequency.
+WebRtc_Word32 AudioCodingModuleImpl::PlayoutFrequency() const {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+               "PlayoutFrequency()");
+
+  CriticalSectionScoped lock(_acmCritSect);
+
+  return _netEq.CurrentSampFreqHz();
+}
+
+// Register possible reveive codecs, can be called multiple times,
+// for codecs, CNG (NB, WB and SWB), DTMF, RED.
+WebRtc_Word32 AudioCodingModuleImpl::RegisterReceiveCodec(
+    const CodecInst& receive_codec) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (receive_codec.channels > 2) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "More than 2 audio channel is not supported.");
+    return -1;
+  }
+
+  int mirror_id;
+  int codec_id = ACMCodecDB::ReceiverCodecNumber(&receive_codec, &mirror_id);
+
+  if (codec_id < 0 || codec_id >= ACMCodecDB::kNumCodecs) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Wrong codec params to be registered as receive codec");
+    return -1;
+  }
+  // Check if the payload-type is valid.
+  if (!ACMCodecDB::ValidPayloadType(receive_codec.pltype)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Invalid payload-type %d for %s.", receive_codec.pltype,
+                 receive_codec.plname);
+    return -1;
+  }
+
+  if (!_receiverInitialized) {
+    if (InitializeReceiverSafe() < 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Cannot initialize reciver, so failed registering a codec.");
+      return -1;
+    }
+  }
+
+  // If codec already registered, unregister. Except for CN where we only
+  // unregister if payload type is changing.
+  if ((_registeredPlTypes[codec_id] == receive_codec.pltype)
+      && IsCodecCN(&receive_codec)) {
+    // Codec already registered as receiver with this payload type. Nothing
+    // to be done.
+    return 0;
+  } else if (_registeredPlTypes[codec_id] != -1) {
+    if (UnregisterReceiveCodecSafe(codec_id) < 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Cannot register master codec.");
+      return -1;
+    }
+  }
+
+  if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
+                             ACMNetEQ::masterJB) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Cannot register master codec.");
+    return -1;
+  }
+
+  // TODO(andrew): Refactor how the slave is initialized. Can we instead
+  // always start up a slave and pre-register CN and RED? We should be able
+  // to get rid of _stereoReceiveRegistered.
+  // http://code.google.com/p/webrtc/issues/detail?id=453
+
+  // Register stereo codecs with the slave, or, if we've had already seen a
+  // stereo codec, register CN or RED as a special case.
+  if (receive_codec.channels == 2 ||
+      (_stereoReceiveRegistered && (IsCodecCN(&receive_codec) ||
+          IsCodecRED(&receive_codec)))) {
+    // TODO(andrew): refactor this block to combine with InitStereoSlave().
+
+    if (!_stereoReceiveRegistered) {
+      // This is the first time a stereo codec has been registered. Make
+      // some stereo preparations.
+
+      // Add a stereo slave.
+      assert(_netEq.NumSlaves() == 0);
+      if (_netEq.AddSlave(ACMCodecDB::NetEQDecoders(),
+                          ACMCodecDB::kNumCodecs) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Cannot add slave jitter buffer to NetEQ.");
+        return -1;
+      }
+
+      // Register any existing CN or RED codecs with the slave and as stereo.
+      for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+        if (_registeredPlTypes[i] != -1 && (IsCodecRED(i) || IsCodecCN(i))) {
+          _stereoReceive[i] = true;
+
+          CodecInst codec;
+          memcpy(&codec, &ACMCodecDB::database_[i], sizeof(CodecInst));
+          codec.pltype = _registeredPlTypes[i];
+          if (RegisterRecCodecMSSafe(codec, i, i, ACMNetEQ::slaveJB) < 0) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioCoding, _id,
+                         "Cannot register slave codec.");
+            return -1;
+          }
+        }
+      }
+    }
+
+    if (RegisterRecCodecMSSafe(receive_codec, codec_id, mirror_id,
+                               ACMNetEQ::slaveJB) < 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Cannot register slave codec.");
+      return -1;
+    }
+
+    if (!_stereoReceive[codec_id]
+        && (_lastRecvAudioCodecPlType == receive_codec.pltype)) {
+      // The last received payload type is the same as the current one, but
+      // was marked as mono. Reset to avoid problems.
+      _lastRecvAudioCodecPlType = -1;
+    }
+
+    _stereoReceive[codec_id] = true;
+    _stereoReceiveRegistered = true;
+  } else {
+    _stereoReceive[codec_id] = false;
+  }
+
+  _registeredPlTypes[codec_id] = receive_codec.pltype;
+
+  if (IsCodecRED(&receive_codec)) {
+    _receiveREDPayloadType = receive_codec.pltype;
+  }
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::RegisterRecCodecMSSafe(
+    const CodecInst& receive_codec, WebRtc_Word16 codec_id,
+    WebRtc_Word16 mirror_id, ACMNetEQ::JB jitter_buffer) {
+  ACMGenericCodec** codecs;
+  if (jitter_buffer == ACMNetEQ::masterJB) {
+    codecs = &_codecs[0];
+  } else if (jitter_buffer == ACMNetEQ::slaveJB) {
+    codecs = &_slaveCodecs[0];
+    if (_codecs[codec_id]->IsTrueStereoCodec()) {
+      // True stereo codecs need to use the same codec memory
+      // for both master and slave.
+      _slaveCodecs[mirror_id] = _codecs[mirror_id];
+      _mirrorCodecIdx[mirror_id] = mirror_id;
+    }
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "RegisterReceiveCodecMSSafe failed, jitter_buffer is neither "
+                 "master or slave ");
+    return -1;
+  }
+
+  if (codecs[mirror_id] == NULL) {
+    codecs[mirror_id] = CreateCodec(receive_codec);
+    if (codecs[mirror_id] == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "Cannot create codec to register as receive codec");
+      return -1;
+    }
+    _mirrorCodecIdx[mirror_id] = mirror_id;
+  }
+  if (mirror_id != codec_id) {
+    codecs[codec_id] = codecs[mirror_id];
+    _mirrorCodecIdx[codec_id] = mirror_id;
+  }
+
+  codecs[codec_id]->SetIsMaster(jitter_buffer == ACMNetEQ::masterJB);
+
+  WebRtc_Word16 status = 0;
+  WebRtcACMCodecParams codec_params;
+  memcpy(&(codec_params.codecInstant), &receive_codec, sizeof(CodecInst));
+  codec_params.enableVAD = false;
+  codec_params.enableDTX = false;
+  codec_params.vadMode = VADNormal;
+  if (!codecs[codec_id]->DecoderInitialized()) {
+    // Force initialization.
+    status = codecs[codec_id]->InitDecoder(&codec_params, true);
+    if (status < 0) {
+      // Could not initialize the decoder, we don't want to
+      // continue if we could not initialize properly.
+      WEBRTC_TRACE(
+          webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+          "could not initialize the receive codec, codec not registered");
+
+      return -1;
+    }
+  } else if (mirror_id != codec_id) {
+    // Currently this only happens for iSAC.
+    // We have to store the decoder parameters.
+    codecs[codec_id]->SaveDecoderParam(&codec_params);
+  }
+
+  if (codecs[codec_id]->RegisterInNetEq(&_netEq, receive_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Receive codec could not be registered in NetEQ");
+      return -1;
+  }
+  // Guarantee that the same payload-type that is
+  // registered in NetEQ is stored in the codec.
+  codecs[codec_id]->SaveDecoderParam(&codec_params);
+
+  return status;
+}
+
+// Get current received codec.
+WebRtc_Word32 AudioCodingModuleImpl::ReceiveCodec(
+    CodecInst& current_codec) const {
+  WebRtcACMCodecParams decoderParam;
+  CriticalSectionScoped lock(_acmCritSect);
+
+  for (int id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+    if (_codecs[id] != NULL) {
+      if (_codecs[id]->DecoderInitialized()) {
+        if (_codecs[id]->DecoderParams(&decoderParam,
+                                       _lastRecvAudioCodecPlType)) {
+          memcpy(&current_codec, &decoderParam.codecInstant, sizeof(CodecInst));
+          return 0;
+        }
+      }
+    }
+  }
+
+  // If we are here then we haven't found any codec. Set codec pltype to -1 to
+  // indicate that the structure is invalid and return -1.
+  current_codec.pltype = -1;
+  return -1;
+}
+
+// Incoming packet from network parsed and ready for decode.
+WebRtc_Word32 AudioCodingModuleImpl::IncomingPacket(
+    const WebRtc_UWord8* incoming_payload,
+    const WebRtc_Word32 payload_length,
+    const WebRtcRTPHeader& rtp_info) {
+  WebRtcRTPHeader rtp_header;
+
+  memcpy(&rtp_header, &rtp_info, sizeof(WebRtcRTPHeader));
+
+  if (payload_length < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "IncomingPacket() Error, payload-length cannot be negative");
+    return -1;
+  }
+  {
+    // Store the payload Type. This will be used to retrieve "received codec"
+    // and "received frequency."
+    CriticalSectionScoped lock(_acmCritSect);
+#ifdef ACM_QA_TEST
+    if(_incomingPL != NULL) {
+      if (fwrite(&rtp_info.header.timestamp, sizeof(WebRtc_UWord32),
+                 1, _incomingPL) != 1) {
+        return -1;
+      }
+      if (fwrite(&rtp_info.header.payloadType, sizeof(WebRtc_UWord8),
+                 1, _incomingPL) != 1) {
+        return -1;
+      }
+      if (fwrite(&payload_length, sizeof(WebRtc_Word16),
+                 1, _incomingPL) != 1) {
+        return -1;
+      }
+    }
+#endif
+
+    WebRtc_UWord8 myPayloadType;
+
+    // Check if this is an RED payload.
+    if (rtp_info.header.payloadType == _receiveREDPayloadType) {
+      // Get the primary payload-type.
+      myPayloadType = incoming_payload[0] & 0x7F;
+    } else {
+      myPayloadType = rtp_info.header.payloadType;
+    }
+
+    // If payload is audio, check if received payload is different from
+    // previous.
+    if (!rtp_info.type.Audio.isCNG) {
+      // This is Audio not CNG.
+
+      if (myPayloadType != _lastRecvAudioCodecPlType) {
+        // We detect a change in payload type. It is necessary for iSAC
+        // we are going to use ONE iSAC instance for decoding both WB and
+        // SWB payloads. If payload is changed there might be a need to reset
+        // sampling rate of decoder. depending what we have received "now".
+        for (int i = 0; i < ACMCodecDB::kMaxNumCodecs; i++) {
+          if (_registeredPlTypes[i] == myPayloadType) {
+            if (UpdateUponReceivingCodec(i) != 0)
+              return -1;
+            break;
+          }
+        }
+      }
+      _lastRecvAudioCodecPlType = myPayloadType;
+    }
+  }
+
+  // Split the payload for stereo packets, so that first half of payload
+  // vector holds left channel, and second half holds right channel.
+  if (_expected_channels == 2) {
+    if (!rtp_info.type.Audio.isCNG) {
+      // Create a new vector for the payload, maximum payload size.
+      WebRtc_Word32 length = payload_length;
+      WebRtc_UWord8 payload[kMaxPacketSize];
+      assert(payload_length <= kMaxPacketSize);
+      memcpy(payload, incoming_payload, payload_length);
+      _codecs[_current_receive_codec_idx]->SplitStereoPacket(payload, &length);
+      rtp_header.type.Audio.channel = 2;
+      // Insert packet into NetEQ.
+      return _netEq.RecIn(payload, length, rtp_header);
+    } else {
+      // If we receive a CNG packet while expecting stereo, we ignore the packet
+      // and continue. CNG is not supported for stereo.
+      return 0;
+    }
+  } else {
+    return _netEq.RecIn(incoming_payload, payload_length, rtp_header);
+  }
+}
+
+int AudioCodingModuleImpl::UpdateUponReceivingCodec(int index) {
+  if (_codecs[index] == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioCoding, _id,
+        "IncomingPacket() error: payload type found but corresponding codec "
+        "is NULL");
+    return -1;
+  }
+  _codecs[index]->UpdateDecoderSampFreq(index);
+  _netEq.SetReceivedStereo(_stereoReceive[index]);
+  _current_receive_codec_idx = index;
+
+  // If we have a change in the expected number of channels, flush packet
+  // buffers in NetEQ.
+  if ((_stereoReceive[index] && (_expected_channels == 1)) ||
+      (!_stereoReceive[index] && (_expected_channels == 2))) {
+    _netEq.FlushBuffers();
+    _codecs[index]->ResetDecoder(_registeredPlTypes[index]);
+  }
+
+  if (_stereoReceive[index] && (_expected_channels == 1)) {
+    // When switching from a mono to stereo codec reset the slave.
+    if (InitStereoSlave() != 0)
+      return -1;
+  }
+
+  // Store number of channels we expect to receive for the current payload type.
+  if (_stereoReceive[index]) {
+    _expected_channels = 2;
+  } else {
+    _expected_channels = 1;
+  }
+
+  // Reset previous received channel.
+  _prev_received_channel = 0;
+  return 0;
+}
+
+bool AudioCodingModuleImpl::IsCodecForSlave(int index) const {
+  return (_registeredPlTypes[index] != -1 && _stereoReceive[index]);
+}
+
+bool AudioCodingModuleImpl::IsCodecRED(int index) const {
+  return (IsCodecRED(&ACMCodecDB::database_[index]));
+}
+
+bool AudioCodingModuleImpl::IsCodecRED(const CodecInst* codec) const {
+  return (STR_CASE_CMP(codec->plname, "RED") == 0);
+}
+
+bool AudioCodingModuleImpl::IsCodecCN(int index) const {
+  return (IsCodecCN(&ACMCodecDB::database_[index]));
+}
+
+bool AudioCodingModuleImpl::IsCodecCN(const CodecInst* codec) const {
+  return (STR_CASE_CMP(codec->plname, "CN") == 0);
+}
+
+int AudioCodingModuleImpl::InitStereoSlave() {
+  _netEq.RemoveSlaves();
+
+  if (_netEq.AddSlave(ACMCodecDB::NetEQDecoders(),
+                      ACMCodecDB::kNumCodecs) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Cannot add slave jitter buffer to NetEQ.");
+    return -1;
+  }
+
+  // Register all needed codecs with slave.
+  for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+    if (_codecs[i] != NULL && IsCodecForSlave(i)) {
+      WebRtcACMCodecParams decoder_params;
+      if (_codecs[i]->DecoderParams(&decoder_params, _registeredPlTypes[i])) {
+        if (RegisterRecCodecMSSafe(decoder_params.codecInstant,
+                                   i, ACMCodecDB::MirrorID(i),
+                                   ACMNetEQ::slaveJB) < 0) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioCoding, _id,
+                         "Cannot register slave codec.");
+            return -1;
+        }
+      }
+    }
+  }
+  return 0;
+}
+
+// Minimum playout delay (Used for lip-sync).
+WebRtc_Word32 AudioCodingModuleImpl::SetMinimumPlayoutDelay(
+    const WebRtc_Word32 time_ms) {
+  if ((time_ms < 0) || (time_ms > 1000)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Delay must be in the range of 0-1000 milliseconds.");
+    return -1;
+  }
+  return _netEq.SetExtraDelay(time_ms);
+}
+
+// Get Dtmf playout status.
+bool AudioCodingModuleImpl::DtmfPlayoutStatus() const {
+#ifndef WEBRTC_CODEC_AVT
+  return false;
+#else
+  return _netEq.AVTPlayout();
+#endif
+}
+
+// Configure Dtmf playout status i.e on/off playout the incoming outband
+// Dtmf tone.
+WebRtc_Word32 AudioCodingModuleImpl::SetDtmfPlayoutStatus(
+#ifndef WEBRTC_CODEC_AVT
+    const bool /* enable */) {
+  WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioCoding, _id,
+               "SetDtmfPlayoutStatus() failed: AVT is not supported.");
+  return -1;
+#else
+    const bool enable) {
+  return _netEq.SetAVTPlayout(enable);
+#endif
+}
+
+// Estimate the Bandwidth based on the incoming stream, needed for one way
+// audio where the RTCP send the BW estimate.
+// This is also done in the RTP module.
+WebRtc_Word32 AudioCodingModuleImpl::DecoderEstimatedBandwidth() const {
+  CodecInst codec;
+  WebRtc_Word16 codec_id = -1;
+  int payloadtype_wb;
+  int payloadtype_swb;
+
+  // Get iSAC settings.
+  for (int id = 0; id < ACMCodecDB::kNumCodecs; id++) {
+    // Store codec settings for codec number "codeCntr" in the output struct.
+    ACMCodecDB::Codec(id, &codec);
+
+    if (!STR_CASE_CMP(codec.plname, "isac")) {
+      codec_id = 1;
+      payloadtype_wb = codec.pltype;
+
+      ACMCodecDB::Codec(id + 1, &codec);
+      payloadtype_swb = codec.pltype;
+
+      break;
+    }
+  }
+
+  if (codec_id < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "DecoderEstimatedBandwidth failed");
+    return -1;
+  }
+
+  if ((_lastRecvAudioCodecPlType == payloadtype_wb) ||
+      (_lastRecvAudioCodecPlType == payloadtype_swb)) {
+    return _codecs[codec_id]->GetEstimatedBandwidth();
+  } else {
+    return -1;
+  }
+}
+
+// Set playout mode for: voice, fax, or streaming.
+WebRtc_Word32 AudioCodingModuleImpl::SetPlayoutMode(
+    const AudioPlayoutMode mode) {
+  if ((mode != voice) && (mode != fax) && (mode != streaming)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Invalid playout mode.");
+    return -1;
+  }
+  return _netEq.SetPlayoutMode(mode);
+}
+
+// Get playout mode voice, fax.
+AudioPlayoutMode AudioCodingModuleImpl::PlayoutMode() const {
+  return _netEq.PlayoutMode();
+}
+
+// Get 10 milliseconds of raw audio data to play out.
+// Automatic resample to the requested frequency.
+WebRtc_Word32 AudioCodingModuleImpl::PlayoutData10Ms(
+    const WebRtc_Word32 desired_freq_hz, AudioFrame& audio_frame) {
+  bool stereo_mode;
+
+  // RecOut always returns 10 ms.
+  if (_netEq.RecOut(_audioFrame) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "PlayoutData failed, RecOut Failed");
+    return -1;
+  }
+
+  audio_frame.num_channels_ = _audioFrame.num_channels_;
+  audio_frame.vad_activity_ = _audioFrame.vad_activity_;
+  audio_frame.speech_type_ = _audioFrame.speech_type_;
+
+  stereo_mode = (_audioFrame.num_channels_ > 1);
+  // For stereo playout:
+  // Master and Slave samples are interleaved starting with Master.
+
+  const WebRtc_UWord16 receive_freq =
+      static_cast<WebRtc_UWord16>(_audioFrame.sample_rate_hz_);
+  bool tone_detected = false;
+  WebRtc_Word16 last_detected_tone;
+  WebRtc_Word16 tone;
+
+  // Limit the scope of ACM Critical section.
+  {
+    CriticalSectionScoped lock(_acmCritSect);
+
+    if ((receive_freq != desired_freq_hz) && (desired_freq_hz != -1)) {
+      // Resample payloadData.
+      WebRtc_Word16 temp_len = _outputResampler.Resample10Msec(
+          _audioFrame.data_, receive_freq, audio_frame.data_,
+          desired_freq_hz, _audioFrame.num_channels_);
+
+      if (temp_len < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "PlayoutData failed, resampler failed");
+        return -1;
+      }
+
+      // Set the payload data length from the resampler.
+      audio_frame.samples_per_channel_ = (WebRtc_UWord16) temp_len;
+      // Set the sampling frequency.
+      audio_frame.sample_rate_hz_ = desired_freq_hz;
+    } else {
+      memcpy(audio_frame.data_, _audioFrame.data_,
+             _audioFrame.samples_per_channel_ * audio_frame.num_channels_
+             * sizeof(WebRtc_Word16));
+      // Set the payload length.
+      audio_frame.samples_per_channel_ =
+          _audioFrame.samples_per_channel_;
+      // Set the sampling frequency.
+      audio_frame.sample_rate_hz_ = receive_freq;
+    }
+
+    // Tone detection done for master channel.
+    if (_dtmfDetector != NULL) {
+      // Dtmf Detection.
+      if (audio_frame.sample_rate_hz_ == 8000) {
+        // Use audio_frame.data_ then Dtmf detector doesn't
+        // need resampling.
+        if (!stereo_mode) {
+          _dtmfDetector->Detect(audio_frame.data_,
+                                audio_frame.samples_per_channel_,
+                                audio_frame.sample_rate_hz_, tone_detected,
+                                tone);
+        } else {
+          // We are in 8 kHz so the master channel needs only 80 samples.
+          WebRtc_Word16 master_channel[80];
+          for (int n = 0; n < 80; n++) {
+            master_channel[n] = audio_frame.data_[n << 1];
+          }
+          _dtmfDetector->Detect(master_channel,
+                                audio_frame.samples_per_channel_,
+                                audio_frame.sample_rate_hz_, tone_detected,
+                                tone);
+        }
+      } else {
+        // Do the detection on the audio that we got from NetEQ (_audioFrame).
+        if (!stereo_mode) {
+          _dtmfDetector->Detect(_audioFrame.data_,
+                                _audioFrame.samples_per_channel_,
+                                receive_freq, tone_detected, tone);
+        } else {
+          WebRtc_Word16 master_channel[WEBRTC_10MS_PCM_AUDIO];
+          for (int n = 0; n < _audioFrame.samples_per_channel_; n++) {
+            master_channel[n] = _audioFrame.data_[n << 1];
+          }
+          _dtmfDetector->Detect(master_channel,
+                                _audioFrame.samples_per_channel_,
+                                receive_freq, tone_detected, tone);
+        }
+      }
+    }
+
+    // We want to do this while we are in _acmCritSect.
+    // (Doesn't really need to initialize the following
+    // variable but Linux complains if we don't.)
+    last_detected_tone = kACMToneEnd;
+    if (tone_detected) {
+      last_detected_tone = _lastDetectedTone;
+      _lastDetectedTone = tone;
+    }
+  }
+
+  if (tone_detected) {
+    // We will deal with callback here, so enter callback critical section.
+    CriticalSectionScoped lock(_callbackCritSect);
+
+    if (_dtmfCallback != NULL) {
+      if (tone != kACMToneEnd) {
+        // just a tone
+        _dtmfCallback->IncomingDtmf((WebRtc_UWord8) tone, false);
+      } else if ((tone == kACMToneEnd) && (last_detected_tone != kACMToneEnd)) {
+        // The tone is "END" and the previously detected tone is
+        // not "END," so call fir an end.
+        _dtmfCallback->IncomingDtmf((WebRtc_UWord8) last_detected_tone, true);
+      }
+    }
+  }
+
+  audio_frame.id_ = _id;
+  audio_frame.energy_ = -1;
+  audio_frame.timestamp_ = 0;
+
+  return 0;
+}
+
+/////////////////////////////////////////
+//   (CNG) Comfort Noise Generation
+//   Generate comfort noise when receiving DTX packets
+//
+
+// Get VAD aggressiveness on the incoming stream
+ACMVADMode AudioCodingModuleImpl::ReceiveVADMode() const {
+  return _netEq.VADMode();
+}
+
+// Configure VAD aggressiveness on the incoming stream.
+WebRtc_Word16 AudioCodingModuleImpl::SetReceiveVADMode(const ACMVADMode mode) {
+  return _netEq.SetVADMode(mode);
+}
+
+/////////////////////////////////////////
+//   Statistics
+//
+
+WebRtc_Word32 AudioCodingModuleImpl::NetworkStatistics(
+    ACMNetworkStatistics& statistics) const {
+  WebRtc_Word32 status;
+  status = _netEq.NetworkStatistics(&statistics);
+  return status;
+}
+
+
+void AudioCodingModuleImpl::DestructEncoderInst(void* inst) {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+               "DestructEncoderInst()");
+  if (!HaveValidEncoder("DestructEncoderInst")) {
+    return;
+  }
+
+  _codecs[_currentSendCodecIdx]->DestructEncoderInst(inst);
+}
+
+WebRtc_Word16 AudioCodingModuleImpl::AudioBuffer(
+    WebRtcACMAudioBuff& buffer) {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+               "AudioBuffer()");
+  if (!HaveValidEncoder("AudioBuffer")) {
+    return -1;
+  }
+  buffer.lastInTimestamp = _lastInTimestamp;
+  return _codecs[_currentSendCodecIdx]->AudioBuffer(buffer);
+}
+
+WebRtc_Word16 AudioCodingModuleImpl::SetAudioBuffer(
+    WebRtcACMAudioBuff& buffer) {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+               "SetAudioBuffer()");
+  if (!HaveValidEncoder("SetAudioBuffer")) {
+    return -1;
+  }
+  return _codecs[_currentSendCodecIdx]->SetAudioBuffer(buffer);
+}
+
+WebRtc_UWord32 AudioCodingModuleImpl::EarliestTimestamp() const {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+               "EarliestTimestamp()");
+  if (!HaveValidEncoder("EarliestTimestamp")) {
+    return -1;
+  }
+  return _codecs[_currentSendCodecIdx]->EarliestTimestamp();
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::RegisterVADCallback(
+    ACMVADCallback* vad_callback) {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceAudioCoding, _id,
+               "RegisterVADCallback()");
+  CriticalSectionScoped lock(_callbackCritSect);
+  _vadCallback = vad_callback;
+  return 0;
+}
+
+// TODO(tlegrand): Modify this function to work for stereo, and add tests.
+WebRtc_Word32 AudioCodingModuleImpl::IncomingPayload(
+    const WebRtc_UWord8* incoming_payload, const WebRtc_Word32 payload_length,
+    const WebRtc_UWord8 payload_type, const WebRtc_UWord32 timestamp) {
+  if (payload_length < 0) {
+    // Log error in trace file.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "IncomingPacket() Error, payload-length cannot be negative");
+    return -1;
+  }
+
+  if (_dummyRTPHeader == NULL) {
+    // This is the first time that we are using _dummyRTPHeader
+    // so we have to create it.
+    WebRtcACMCodecParams codec_params;
+    _dummyRTPHeader = new WebRtcRTPHeader;
+    if (_dummyRTPHeader == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                   "IncomingPacket() Error, out of memory");
+      return -1;
+    }
+    _dummyRTPHeader->header.payloadType = payload_type;
+    // Don't matter in this case.
+    _dummyRTPHeader->header.ssrc = 0;
+    _dummyRTPHeader->header.markerBit = false;
+    // Start with random numbers.
+    _dummyRTPHeader->header.sequenceNumber = rand();
+    _dummyRTPHeader->header.timestamp = (((WebRtc_UWord32) rand()) << 16)
+        + (WebRtc_UWord32) rand();
+    _dummyRTPHeader->type.Audio.channel = 1;
+
+    if (DecoderParamByPlType(payload_type, codec_params) < 0) {
+      // We didn't find a codec with the given payload.
+      // Something is wrong we exit, but we delete _dummyRTPHeader
+      // and set it to NULL to start clean next time.
+      delete _dummyRTPHeader;
+      _dummyRTPHeader = NULL;
+      return -1;
+    }
+    _recvPlFrameSizeSmpls = codec_params.codecInstant.pacsize;
+  }
+
+  if (payload_type != _dummyRTPHeader->header.payloadType) {
+    // Payload type has changed since the last time we might need to
+    // update the frame-size.
+    WebRtcACMCodecParams codec_params;
+    if (DecoderParamByPlType(payload_type, codec_params) < 0) {
+      // We didn't find a codec with the given payload.
+      return -1;
+    }
+    _recvPlFrameSizeSmpls = codec_params.codecInstant.pacsize;
+    _dummyRTPHeader->header.payloadType = payload_type;
+  }
+
+  if (timestamp > 0) {
+    _dummyRTPHeader->header.timestamp = timestamp;
+  }
+
+  // Store the payload Type. this will be used to retrieve "received codec"
+  // and "received frequency."
+  _lastRecvAudioCodecPlType = payload_type;
+
+  // Insert in NetEQ.
+  if (_netEq.RecIn(incoming_payload, payload_length, (*_dummyRTPHeader)) < 0) {
+    return -1;
+  }
+
+  // Get ready for the next payload.
+  _dummyRTPHeader->header.sequenceNumber++;
+  _dummyRTPHeader->header.timestamp += _recvPlFrameSizeSmpls;
+  return 0;
+}
+
+WebRtc_Word16 AudioCodingModuleImpl::DecoderParamByPlType(
+    const WebRtc_UWord8 payload_type,
+    WebRtcACMCodecParams& codec_params) const {
+  CriticalSectionScoped lock(_acmCritSect);
+  for (WebRtc_Word16 id = 0; id < ACMCodecDB::kMaxNumCodecs;
+      id++) {
+    if (_codecs[id] != NULL) {
+      if (_codecs[id]->DecoderInitialized()) {
+        if (_codecs[id]->DecoderParams(&codec_params, payload_type)) {
+          return 0;
+        }
+      }
+    }
+  }
+  // If we are here it means that we could not find a
+  // codec with that payload type. reset the values to
+  // not acceptable values and return -1.
+  codec_params.codecInstant.plname[0] = '\0';
+  codec_params.codecInstant.pacsize = 0;
+  codec_params.codecInstant.rate = 0;
+  codec_params.codecInstant.pltype = -1;
+  return -1;
+}
+
+WebRtc_Word16 AudioCodingModuleImpl::DecoderListIDByPlName(
+    const char* name, const WebRtc_UWord16 frequency) const {
+  WebRtcACMCodecParams codec_params;
+  CriticalSectionScoped lock(_acmCritSect);
+  for (WebRtc_Word16 id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+    if ((_codecs[id] != NULL)) {
+      if (_codecs[id]->DecoderInitialized()) {
+        assert(_registeredPlTypes[id] >= 0);
+        assert(_registeredPlTypes[id] <= 255);
+        _codecs[id]->DecoderParams(
+            &codec_params, (WebRtc_UWord8) _registeredPlTypes[id]);
+        if (!STR_CASE_CMP(codec_params.codecInstant.plname, name)) {
+          // Check if the given sampling frequency matches.
+          // A zero sampling frequency means we matching the names
+          // is sufficient and we don't need to check for the
+          // frequencies.
+          // Currently it is only iSAC which has one name but two
+          // sampling frequencies.
+          if ((frequency == 0)||
+              (codec_params.codecInstant.plfreq == frequency)) {
+            return id;
+          }
+        }
+      }
+    }
+  }
+  // If we are here it means that we could not find a
+  // codec with that payload type. return -1.
+  return -1;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::LastEncodedTimestamp(
+    WebRtc_UWord32& timestamp) const {
+  CriticalSectionScoped lock(_acmCritSect);
+  if (!HaveValidEncoder("LastEncodedTimestamp")) {
+    return -1;
+  }
+  timestamp = _codecs[_currentSendCodecIdx]->LastEncodedTimestamp();
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::ReplaceInternalDTXWithWebRtc(
+    bool use_webrtc_dtx) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("ReplaceInternalDTXWithWebRtc")) {
+    WEBRTC_TRACE(
+        webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+        "Cannot replace codec internal DTX when no send codec is registered.");
+    return -1;
+  }
+
+  WebRtc_Word32 res = _codecs[_currentSendCodecIdx]->ReplaceInternalDTX(
+      use_webrtc_dtx);
+  // Check if VAD is turned on, or if there is any error.
+  if (res == 1) {
+    _vadEnabled = true;
+  } else if (res < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "Failed to set ReplaceInternalDTXWithWebRtc(%d)",
+                 use_webrtc_dtx);
+    return res;
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::IsInternalDTXReplacedWithWebRtc(
+    bool& uses_webrtc_dtx) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("IsInternalDTXReplacedWithWebRtc")) {
+    return -1;
+  }
+  if (_codecs[_currentSendCodecIdx]->IsInternalDTXReplaced(&uses_webrtc_dtx)
+      < 0) {
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::SetISACMaxRate(
+    const WebRtc_UWord32 max_bit_per_sec) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("SetISACMaxRate")) {
+    return -1;
+  }
+
+  return _codecs[_currentSendCodecIdx]->SetISACMaxRate(max_bit_per_sec);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::SetISACMaxPayloadSize(
+    const WebRtc_UWord16 max_size_bytes) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("SetISACMaxPayloadSize")) {
+    return -1;
+  }
+
+  return _codecs[_currentSendCodecIdx]->SetISACMaxPayloadSize(
+      max_size_bytes);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::ConfigISACBandwidthEstimator(
+    const WebRtc_UWord8 frame_size_ms,
+    const WebRtc_UWord16 rate_bit_per_sec,
+    const bool enforce_frame_size) {
+  CriticalSectionScoped lock(_acmCritSect);
+
+  if (!HaveValidEncoder("ConfigISACBandwidthEstimator")) {
+    return -1;
+  }
+
+  return _codecs[_currentSendCodecIdx]->ConfigISACBandwidthEstimator(
+      frame_size_ms, rate_bit_per_sec, enforce_frame_size);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::SetBackgroundNoiseMode(
+    const ACMBackgroundNoiseMode mode) {
+  if ((mode < On) || (mode > Off)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "The specified background noise is out of range.\n");
+    return -1;
+  }
+  return _netEq.SetBackgroundNoiseMode(mode);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::BackgroundNoiseMode(
+    ACMBackgroundNoiseMode& mode) {
+  return _netEq.BackgroundNoiseMode(mode);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::PlayoutTimestamp(
+    WebRtc_UWord32& timestamp) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceAudioCoding, _id,
+               "PlayoutTimestamp()");
+  return _netEq.PlayoutTimestamp(timestamp);
+}
+
+bool AudioCodingModuleImpl::HaveValidEncoder(const char* caller_name) const {
+  if ((!_sendCodecRegistered) || (_currentSendCodecIdx < 0) ||
+      (_currentSendCodecIdx >= ACMCodecDB::kNumCodecs)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "%s failed: No send codec is registered.", caller_name);
+    return false;
+  }
+  if ((_currentSendCodecIdx < 0) ||
+      (_currentSendCodecIdx >= ACMCodecDB::kNumCodecs)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "%s failed: Send codec index out of range.", caller_name);
+    return false;
+  }
+  if (_codecs[_currentSendCodecIdx] == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                 "%s failed: Send codec is NULL pointer.", caller_name);
+    return false;
+  }
+  return true;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::UnregisterReceiveCodec(
+    const WebRtc_Word16 payload_type) {
+  CriticalSectionScoped lock(_acmCritSect);
+  int id;
+
+  // Search through the list of registered payload types.
+  for (id = 0; id < ACMCodecDB::kMaxNumCodecs; id++) {
+    if (_registeredPlTypes[id] == payload_type) {
+      // We have found the id registered with the payload type.
+      break;
+    }
+  }
+
+  if (id >= ACMCodecDB::kNumCodecs) {
+    // Payload type was not registered. No need to unregister.
+    return 0;
+  }
+
+  // Unregister the codec with the given payload type.
+  return UnregisterReceiveCodecSafe(id);
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::UnregisterReceiveCodecSafe(
+    const WebRtc_Word16 codec_id) {
+  const WebRtcNetEQDecoder *neteq_decoder = ACMCodecDB::NetEQDecoders();
+  WebRtc_Word16 mirror_id = ACMCodecDB::MirrorID(codec_id);
+  bool stereo_receiver = false;
+
+  if (_codecs[codec_id] != NULL) {
+    if (_registeredPlTypes[codec_id] != -1) {
+      // Store stereo information for future use.
+      stereo_receiver = _stereoReceive[codec_id];
+
+      // Before deleting the decoder instance unregister from NetEQ.
+      if (_netEq.RemoveCodec(neteq_decoder[codec_id],
+                             _stereoReceive[codec_id])  < 0) {
+        CodecInst codec;
+        ACMCodecDB::Codec(codec_id, &codec);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioCoding, _id,
+                     "Unregistering %s-%d from NetEQ failed.", codec.plname,
+                     codec.plfreq);
+        return -1;
+      }
+
+      // CN is a special case for NetEQ, all three sampling frequencies
+      // are unregistered if one is deleted.
+      if (IsCodecCN(codec_id)) {
+        for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+          if (IsCodecCN(i)) {
+            _stereoReceive[i] = false;
+            _registeredPlTypes[i] = -1;
+          }
+        }
+      } else {
+        if (codec_id == mirror_id) {
+          _codecs[codec_id]->DestructDecoder();
+          if (_stereoReceive[codec_id]) {
+            _slaveCodecs[codec_id]->DestructDecoder();
+            _stereoReceive[codec_id] = false;
+
+          }
+        }
+      }
+
+      // Check if this is the last registered stereo receive codec.
+      if (stereo_receiver) {
+        bool no_stereo = true;
+
+        for (int i = 0; i < ACMCodecDB::kNumCodecs; i++) {
+          if (_stereoReceive[i]) {
+            // We still have stereo codecs registered.
+            no_stereo = false;
+            break;
+          }
+        }
+
+        // If we don't have any stereo codecs left, change status.
+        if (no_stereo) {
+          _netEq.RemoveSlaves();  // No longer need the slave.
+          _stereoReceiveRegistered = false;
+        }
+      }
+    }
+  }
+
+  if (_registeredPlTypes[codec_id] == _receiveREDPayloadType) {
+    // RED is going to be unregistered, set to an invalid value.
+    _receiveREDPayloadType = 255;
+  }
+  _registeredPlTypes[codec_id] = -1;
+
+  return 0;
+}
+
+WebRtc_Word32 AudioCodingModuleImpl::REDPayloadISAC(
+    const WebRtc_Word32 isac_rate, const WebRtc_Word16 isac_bw_estimate,
+    WebRtc_UWord8* payload, WebRtc_Word16* length_bytes) {
+  if (!HaveValidEncoder("EncodeData")) {
+    return -1;
+  }
+  WebRtc_Word16 status;
+  status = _codecs[_currentSendCodecIdx]->REDPayloadISAC(isac_rate,
+                                                         isac_bw_estimate,
+                                                         payload,
+                                                         length_bytes);
+  return status;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/source/audio_coding_module_impl.h b/src/modules/audio_coding/main/source/audio_coding_module_impl.h
new file mode 100644
index 0000000..145faf6
--- /dev/null
+++ b/src/modules/audio_coding/main/source/audio_coding_module_impl.h
@@ -0,0 +1,346 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
+
+#include "acm_codec_database.h"
+#include "acm_neteq.h"
+#include "acm_resampler.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+
+namespace webrtc {
+
+class ACMDTMFDetection;
+class ACMGenericCodec;
+class CriticalSectionWrapper;
+class RWLockWrapper;
+
+#ifdef ACM_QA_TEST
+#   include <stdio.h>
+#endif
+
+class AudioCodingModuleImpl : public AudioCodingModule {
+ public:
+  // Constructor
+  AudioCodingModuleImpl(const WebRtc_Word32 id);
+
+  // Destructor
+  ~AudioCodingModuleImpl();
+
+  // Change the unique identifier of this object.
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+  // Returns the number of milliseconds until the module want a worker thread
+  // to call Process.
+  WebRtc_Word32 TimeUntilNextProcess();
+
+  // Process any pending tasks such as timeouts.
+  WebRtc_Word32 Process();
+
+  /////////////////////////////////////////
+  //   Sender
+  //
+
+  // Initialize send codec.
+  WebRtc_Word32 InitializeSender();
+
+  // Reset send codec.
+  WebRtc_Word32 ResetEncoder();
+
+  // Can be called multiple times for Codec, CNG, RED.
+  WebRtc_Word32 RegisterSendCodec(const CodecInst& send_codec);
+
+  // Get current send codec.
+  WebRtc_Word32 SendCodec(CodecInst& current_codec) const;
+
+  // Get current send frequency.
+  WebRtc_Word32 SendFrequency() const;
+
+  // Get encode bitrate.
+  // Adaptive rate codecs return their current encode target rate, while other
+  // codecs return there longterm avarage or their fixed rate.
+  WebRtc_Word32 SendBitrate() const;
+
+  // Set available bandwidth, inform the encoder about the
+  // estimated bandwidth received from the remote party.
+  virtual WebRtc_Word32 SetReceivedEstimatedBandwidth(const WebRtc_Word32 bw);
+
+  // Register a transport callback which will be
+  // called to deliver the encoded buffers.
+  WebRtc_Word32 RegisterTransportCallback(
+      AudioPacketizationCallback* transport);
+
+  // Used by the module to deliver messages to the codec module/application
+  // AVT(DTMF).
+  WebRtc_Word32 RegisterIncomingMessagesCallback(
+      AudioCodingFeedback* incoming_message, const ACMCountries cpt);
+
+  // Add 10MS of raw (PCM) audio data to the encoder.
+  WebRtc_Word32 Add10MsData(const AudioFrame& audio_frame);
+
+  // Set background noise mode for NetEQ, on, off or fade.
+  WebRtc_Word32 SetBackgroundNoiseMode(const ACMBackgroundNoiseMode mode);
+
+  // Get current background noise mode.
+  WebRtc_Word32 BackgroundNoiseMode(ACMBackgroundNoiseMode& mode);
+
+  /////////////////////////////////////////
+  // (FEC) Forward Error Correction
+  //
+
+  // Configure FEC status i.e on/off.
+  WebRtc_Word32 SetFECStatus(const bool enable_fec);
+
+  // Get FEC status.
+  bool FECStatus() const;
+
+  /////////////////////////////////////////
+  //   (VAD) Voice Activity Detection
+  //   and
+  //   (CNG) Comfort Noise Generation
+  //
+
+  WebRtc_Word32 SetVAD(const bool enable_dtx = true,
+                       const bool enable_vad = false,
+                       const ACMVADMode mode = VADNormal);
+
+  WebRtc_Word32 VAD(bool& dtx_enabled, bool& vad_enabled,
+                    ACMVADMode& mode) const;
+
+  WebRtc_Word32 RegisterVADCallback(ACMVADCallback* vadCallback);
+
+  // Get VAD aggressiveness on the incoming stream.
+  ACMVADMode ReceiveVADMode() const;
+
+  // Configure VAD aggressiveness on the incoming stream.
+  WebRtc_Word16 SetReceiveVADMode(const ACMVADMode mode);
+
+  /////////////////////////////////////////
+  //   Receiver
+  //
+
+  // Initialize receiver, resets codec database etc.
+  WebRtc_Word32 InitializeReceiver();
+
+  // Reset the decoder state.
+  WebRtc_Word32 ResetDecoder();
+
+  // Get current receive frequency.
+  WebRtc_Word32 ReceiveFrequency() const;
+
+  // Get current playout frequency.
+  WebRtc_Word32 PlayoutFrequency() const;
+
+  // Register possible reveive codecs, can be called multiple times,
+  // for codecs, CNG, DTMF, RED.
+  WebRtc_Word32 RegisterReceiveCodec(const CodecInst& receive_codec);
+
+  // Get current received codec.
+  WebRtc_Word32 ReceiveCodec(CodecInst& current_codec) const;
+
+  // Incoming packet from network parsed and ready for decode.
+  WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incoming_payload,
+                               const WebRtc_Word32 payload_length,
+                               const WebRtcRTPHeader& rtp_info);
+
+  // Incoming payloads, without rtp-info, the rtp-info will be created in ACM.
+  // One usage for this API is when pre-encoded files are pushed in ACM.
+  WebRtc_Word32 IncomingPayload(const WebRtc_UWord8* incoming_payload,
+                                const WebRtc_Word32 payload_length,
+                                const WebRtc_UWord8 payload_type,
+                                const WebRtc_UWord32 timestamp = 0);
+
+  // Minimum playout dealy (used for lip-sync).
+  WebRtc_Word32 SetMinimumPlayoutDelay(const WebRtc_Word32 time_ms);
+
+  // Configure Dtmf playout status i.e on/off playout the incoming outband Dtmf
+  // tone.
+  WebRtc_Word32 SetDtmfPlayoutStatus(const bool enable);
+
+  // Get Dtmf playout status.
+  bool DtmfPlayoutStatus() const;
+
+  // Estimate the Bandwidth based on the incoming stream, needed
+  // for one way audio where the RTCP send the BW estimate.
+  // This is also done in the RTP module .
+  WebRtc_Word32 DecoderEstimatedBandwidth() const;
+
+  // Set playout mode voice, fax.
+  WebRtc_Word32 SetPlayoutMode(const AudioPlayoutMode mode);
+
+  // Get playout mode voice, fax.
+  AudioPlayoutMode PlayoutMode() const;
+
+  // Get playout timestamp.
+  WebRtc_Word32 PlayoutTimestamp(WebRtc_UWord32& timestamp);
+
+  // Get 10 milliseconds of raw audio data to play out, and
+  // automatic resample to the requested frequency if > 0.
+  WebRtc_Word32 PlayoutData10Ms(const WebRtc_Word32 desired_freq_hz,
+                                AudioFrame &audio_frame);
+
+  /////////////////////////////////////////
+  //   Statistics
+  //
+
+  WebRtc_Word32 NetworkStatistics(ACMNetworkStatistics& statistics) const;
+
+  void DestructEncoderInst(void* inst);
+
+  WebRtc_Word16 AudioBuffer(WebRtcACMAudioBuff& buffer);
+
+  // GET RED payload for iSAC. The method id called when 'this' ACM is
+  // the default ACM.
+  WebRtc_Word32 REDPayloadISAC(const WebRtc_Word32 isac_rate,
+                               const WebRtc_Word16 isac_bw_estimate,
+                               WebRtc_UWord8* payload,
+                               WebRtc_Word16* length_bytes);
+
+  WebRtc_Word16 SetAudioBuffer(WebRtcACMAudioBuff& buffer);
+
+  WebRtc_UWord32 EarliestTimestamp() const;
+
+  WebRtc_Word32 LastEncodedTimestamp(WebRtc_UWord32& timestamp) const;
+
+  WebRtc_Word32 ReplaceInternalDTXWithWebRtc(const bool use_webrtc_dtx);
+
+  WebRtc_Word32 IsInternalDTXReplacedWithWebRtc(bool& uses_webrtc_dtx);
+
+  WebRtc_Word32 SetISACMaxRate(const WebRtc_UWord32 max_bit_per_sec);
+
+  WebRtc_Word32 SetISACMaxPayloadSize(const WebRtc_UWord16 max_size_bytes);
+
+  WebRtc_Word32 ConfigISACBandwidthEstimator(
+      const WebRtc_UWord8 frame_size_ms,
+      const WebRtc_UWord16 rate_bit_per_sec,
+      const bool enforce_frame_size = false);
+
+  WebRtc_Word32 UnregisterReceiveCodec(const WebRtc_Word16 payload_type);
+
+ protected:
+  void UnregisterSendCodec();
+
+  WebRtc_Word32 UnregisterReceiveCodecSafe(const WebRtc_Word16 id);
+
+  ACMGenericCodec* CreateCodec(const CodecInst& codec);
+
+  WebRtc_Word16 DecoderParamByPlType(const WebRtc_UWord8 payload_type,
+                                     WebRtcACMCodecParams& codec_params) const;
+
+  WebRtc_Word16 DecoderListIDByPlName(
+      const char* name, const WebRtc_UWord16 frequency = 0) const;
+
+  WebRtc_Word32 InitializeReceiverSafe();
+
+  bool HaveValidEncoder(const char* caller_name) const;
+
+  WebRtc_Word32 RegisterRecCodecMSSafe(const CodecInst& receive_codec,
+                                       WebRtc_Word16 codec_id,
+                                       WebRtc_Word16 mirror_id,
+                                       ACMNetEQ::JB jitter_buffer);
+
+ private:
+  // Change required states after starting to receive the codec corresponding
+  // to |index|.
+  int UpdateUponReceivingCodec(int index);
+
+  // Remove all slaves and initialize a stereo slave with required codecs
+  // from the master.
+  int InitStereoSlave();
+
+  // Returns true if the codec's |index| is registered with the master and
+  // is a stereo codec, RED or CN.
+  bool IsCodecForSlave(int index) const;
+
+  // Returns true if the |codec| is RED.
+  bool IsCodecRED(const CodecInst* codec) const;
+  // ...or if its |index| is RED.
+  bool IsCodecRED(int index) const;
+
+  // Returns true if the |codec| is CN.
+  bool IsCodecCN(int index) const;
+  // ...or if its |index| is CN.
+  bool IsCodecCN(const CodecInst* codec) const;
+
+  AudioPacketizationCallback* _packetizationCallback;
+  WebRtc_Word32 _id;
+  WebRtc_UWord32 _lastTimestamp;
+  WebRtc_UWord32 _lastInTimestamp;
+  CodecInst _sendCodecInst;
+  uint8_t _cng_nb_pltype;
+  uint8_t _cng_wb_pltype;
+  uint8_t _cng_swb_pltype;
+  uint8_t _red_pltype;
+  bool _vadEnabled;
+  bool _dtxEnabled;
+  ACMVADMode _vadMode;
+  ACMGenericCodec* _codecs[ACMCodecDB::kMaxNumCodecs];
+  ACMGenericCodec* _slaveCodecs[ACMCodecDB::kMaxNumCodecs];
+  WebRtc_Word16 _mirrorCodecIdx[ACMCodecDB::kMaxNumCodecs];
+  bool _stereoReceive[ACMCodecDB::kMaxNumCodecs];
+  bool _stereoReceiveRegistered;
+  bool _stereoSend;
+  int _prev_received_channel;
+  int _expected_channels;
+  WebRtc_Word32 _currentSendCodecIdx;
+  int _current_receive_codec_idx;
+  bool _sendCodecRegistered;
+  ACMResampler _inputResampler;
+  ACMResampler _outputResampler;
+  ACMNetEQ _netEq;
+  CriticalSectionWrapper* _acmCritSect;
+  ACMVADCallback* _vadCallback;
+  WebRtc_UWord8 _lastRecvAudioCodecPlType;
+
+  // RED/FEC.
+  bool _isFirstRED;
+  bool _fecEnabled;
+  WebRtc_UWord8* _redBuffer;
+  RTPFragmentationHeader* _fragmentation;
+  WebRtc_UWord32 _lastFECTimestamp;
+  // If no RED is registered as receive codec this
+  // will have an invalid value.
+  WebRtc_UWord8 _receiveREDPayloadType;
+
+  // This is to keep track of CN instances where we can send DTMFs.
+  WebRtc_UWord8 _previousPayloadType;
+
+  // This keeps track of payload types associated with _codecs[].
+  // We define it as signed variable and initialize with -1 to indicate
+  // unused elements.
+  WebRtc_Word16 _registeredPlTypes[ACMCodecDB::kMaxNumCodecs];
+
+  // Used when payloads are pushed into ACM without any RTP info
+  // One example is when pre-encoded bit-stream is pushed from
+  // a file.
+  WebRtcRTPHeader* _dummyRTPHeader;
+  WebRtc_UWord16 _recvPlFrameSizeSmpls;
+
+  bool _receiverInitialized;
+  ACMDTMFDetection* _dtmfDetector;
+
+  AudioCodingFeedback* _dtmfCallback;
+  WebRtc_Word16 _lastDetectedTone;
+  CriticalSectionWrapper* _callbackCritSect;
+
+  AudioFrame _audioFrame;
+
+#ifdef ACM_QA_TEST
+  FILE* _outgoingPL;
+  FILE* _incomingPL;
+#endif
+
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_SOURCE_AUDIO_CODING_MODULE_IMPL_H_
diff --git a/src/modules/audio_coding/main/test/ACMTest.cc b/src/modules/audio_coding/main/test/ACMTest.cc
new file mode 100644
index 0000000..1bbac0e
--- /dev/null
+++ b/src/modules/audio_coding/main/test/ACMTest.cc
@@ -0,0 +1,16 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ACMTest.h"
+
+ACMTest::~ACMTest()
+{
+}
+
diff --git a/src/modules/audio_coding/main/test/ACMTest.h b/src/modules/audio_coding/main/test/ACMTest.h
new file mode 100644
index 0000000..e965671
--- /dev/null
+++ b/src/modules/audio_coding/main/test/ACMTest.h
@@ -0,0 +1,21 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACMTEST_H
+#define ACMTEST_H
+
+class ACMTest
+{
+public:
+    virtual ~ACMTest() =0;
+    virtual void Perform() =0;
+};
+
+#endif
diff --git a/src/modules/audio_coding/main/test/APITest.cc b/src/modules/audio_coding/main/test/APITest.cc
new file mode 100644
index 0000000..3cf9bc1
--- /dev/null
+++ b/src/modules/audio_coding/main/test/APITest.cc
@@ -0,0 +1,1567 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <cctype>
+#include <iostream>
+#include <ostream>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#include "APITest.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define TEST_DURATION_SEC 600
+
+#define NUMBER_OF_SENDER_TESTS 6
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+#define CHECK_THREAD_NULLITY(myThread, S)                                      \
+    if(myThread != NULL)                                                       \
+    {                                                                          \
+        unsigned int i;                                                        \
+        (myThread)->Start(i);                                                  \
+    }                                                                          \
+    else                                                                       \
+    {                                                                          \
+      ADD_FAILURE() << S;                                                      \
+    }
+
+
+void
+APITest::Wait(WebRtc_UWord32 waitLengthMs)
+{
+    if(_randomTest)
+    {
+        return;
+    }
+    else
+    {
+        EventWrapper* myEvent = EventWrapper::Create();
+        myEvent->Wait(waitLengthMs);
+        delete myEvent;
+        return;
+    }
+}
+
+
+
+APITest::APITest():
+_acmA(NULL),
+_acmB(NULL),
+_channel_A2B(NULL),
+_channel_B2A(NULL),
+_writeToFile(true),
+_pullEventA(NULL),
+_pushEventA(NULL),
+_processEventA(NULL),
+_apiEventA(NULL),
+_pullEventB(NULL),
+_pushEventB(NULL),
+_processEventB(NULL),
+_apiEventB(NULL),
+_codecCntrA(0),
+_codecCntrB(0),
+_thereIsEncoderA(false),
+_thereIsEncoderB(false),
+_thereIsDecoderA(false),
+_thereIsDecoderB(false),
+_sendVADA(false),
+_sendDTXA(false),
+_sendVADModeA(VADNormal),
+_sendVADB(false),
+_sendDTXB(false),
+_sendVADModeB(VADNormal),
+_minDelayA(0),
+_minDelayB(0),
+_dotPositionA(0),
+_dotMoveDirectionA(1),
+_dotPositionB(39),
+_dotMoveDirectionB(-1),
+_dtmfCallback(NULL),
+_vadCallbackA(NULL),
+_vadCallbackB(NULL),
+_apiTestRWLock(*RWLockWrapper::CreateRWLock()),
+_randomTest(false),
+_testNumA(0),
+_testNumB(1)
+{
+    int n;
+    for( n = 0; n < 32; n++)
+    {
+        _payloadUsed[n] = false;
+    }
+
+    for(n = 0; n < 3; n++)
+    {
+        _receiveVADActivityA[n] = 0;
+        _receiveVADActivityB[n] = 0;
+    }
+
+    _movingDot[40] = '\0';
+
+    for(int n = 0; n <40; n++)
+    {
+        _movingDot[n]  = ' ';
+    }
+}
+
+APITest::~APITest()
+{
+    DESTROY_ACM(_acmA);
+    DESTROY_ACM(_acmB);
+
+    DELETE_POINTER(_channel_A2B);
+    DELETE_POINTER(_channel_B2A);
+
+    DELETE_POINTER(_pushEventA);
+    DELETE_POINTER(_pullEventA);
+    DELETE_POINTER(_processEventA);
+    DELETE_POINTER(_apiEventA);
+
+    DELETE_POINTER(_pushEventB);
+    DELETE_POINTER(_pullEventB);
+    DELETE_POINTER(_processEventB);
+    DELETE_POINTER(_apiEventB);
+
+    _inFileA.Close();
+    _outFileA.Close();
+
+    _inFileB.Close();
+    _outFileB.Close();
+
+    DELETE_POINTER(_dtmfCallback);
+    DELETE_POINTER(_vadCallbackA);
+    DELETE_POINTER(_vadCallbackB);
+
+    delete &_apiTestRWLock;
+}
+
+
+
+//WebRtc_Word16
+//APITest::SetInFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//{
+//    return _inFile.Open(fileName, frequencyHz, "rb");
+//}
+//
+//WebRtc_Word16
+//APITest::SetOutFile(char* fileName, WebRtc_UWord16 frequencyHz)
+//{
+//    return _outFile.Open(fileName, frequencyHz, "wb");
+//}
+
+WebRtc_Word16
+APITest::SetUp()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    CodecInst dummyCodec;
+    int lastPayloadType = 0;
+
+    WebRtc_Word16 numCodecs = _acmA->NumberOfCodecs();
+    for(WebRtc_UWord8 n = 0; n < numCodecs; n++)
+    {
+        AudioCodingModule::Codec(n, dummyCodec);
+        if((STR_CASE_CMP(dummyCodec.plname, "CN") == 0) &&
+            (dummyCodec.plfreq == 32000))
+        {
+            continue;
+        }
+
+        printf("Register Receive Codec %s  ", dummyCodec.plname);
+
+        if((n != 0) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // Check registration with an already occupied payload type
+            int currentPayloadType = dummyCodec.pltype;
+            dummyCodec.pltype = 97; //lastPayloadType;
+            CHECK_ERROR(_acmB->RegisterReceiveCodec(dummyCodec));
+            dummyCodec.pltype = currentPayloadType;
+        }
+
+        if((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // test if re-registration works;
+            CodecInst nextCodec;
+            int currentPayloadType = dummyCodec.pltype;
+            AudioCodingModule::Codec(n + 1, nextCodec);
+            dummyCodec.pltype = nextCodec.pltype;
+            if(!FixedPayloadTypeCodec(nextCodec.plname))
+            {
+                _acmB->RegisterReceiveCodec(dummyCodec);
+            }
+            dummyCodec.pltype = currentPayloadType;
+        }
+
+        if((n < numCodecs - 1) && !FixedPayloadTypeCodec(dummyCodec.plname))
+        {
+            // test if un-registration works;
+            CodecInst nextCodec;
+            AudioCodingModule::Codec(n + 1, nextCodec);
+            nextCodec.pltype = dummyCodec.pltype;
+            if(!FixedPayloadTypeCodec(nextCodec.plname))
+            {
+                CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(nextCodec));
+                CHECK_ERROR_MT(_acmA->UnregisterReceiveCodec(nextCodec.pltype));
+            }
+        }
+
+
+        CHECK_ERROR_MT(_acmA->RegisterReceiveCodec(dummyCodec));
+        printf("   side A done!");
+        CHECK_ERROR_MT(_acmB->RegisterReceiveCodec(dummyCodec));
+        printf("   side B done!\n");
+
+        if(!strcmp(dummyCodec.plname, "CN"))
+        {
+            CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+            CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+        }
+        lastPayloadType = dummyCodec.pltype;
+        if((lastPayloadType >= 96) && (lastPayloadType <= 127))
+        {
+            _payloadUsed[lastPayloadType - 96] = true;
+        }
+    }
+    _thereIsDecoderA = true;
+    _thereIsDecoderB = true;
+
+    // Register Send Codec
+    AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrA, dummyCodec);
+    CHECK_ERROR_MT(_acmA->RegisterSendCodec(dummyCodec));
+    _thereIsEncoderA = true;
+    //
+    AudioCodingModule::Codec((WebRtc_UWord8)_codecCntrB, dummyCodec);
+    CHECK_ERROR_MT(_acmB->RegisterSendCodec(dummyCodec));
+    _thereIsEncoderB = true;
+
+    WebRtc_UWord16 frequencyHz;
+
+    printf("\n\nAPI Test\n");
+    printf("========\n");
+    printf("Hit enter to accept the default values indicated in []\n\n");
+
+    //--- Input A
+    std::string file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    frequencyHz = 32000;
+    printf("Enter input file at side A [%s]: ", file_name.c_str());
+    PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
+    _inFileA.Open(file_name, frequencyHz, "rb", true);
+
+    //--- Output A
+    std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
+    printf("Enter output file at side A [%s]: ", out_file_a.c_str());
+    PCMFile::ChooseFile(&out_file_a, 499, &frequencyHz);
+    _outFileA.Open(out_file_a, frequencyHz, "wb");
+
+    //--- Input B
+    file_name = webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    printf("\n\nEnter input file at side B [%s]: ", file_name.c_str());
+    PCMFile::ChooseFile(&file_name, 499, &frequencyHz);
+    _inFileB.Open(file_name, frequencyHz, "rb", true);
+
+    //--- Output B
+    std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
+    printf("Enter output file at side B [%s]: ", out_file_b.c_str());
+    PCMFile::ChooseFile(&out_file_b, 499, &frequencyHz);
+    _outFileB.Open(out_file_b, frequencyHz, "wb");
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel(2);
+    CHECK_ERROR_MT(_acmA->RegisterTransportCallback(_channel_A2B));
+    _channel_A2B->RegisterReceiverACM(_acmB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel(1);
+    CHECK_ERROR_MT(_acmB->RegisterTransportCallback(_channel_B2A));
+    _channel_B2A->RegisterReceiverACM(_acmA);
+
+    //--- EVENT TIMERS
+    // A
+    _pullEventA    = EventWrapper::Create();
+    _pushEventA    = EventWrapper::Create();
+    _processEventA = EventWrapper::Create();
+    _apiEventA     = EventWrapper::Create();
+    // B
+    _pullEventB    = EventWrapper::Create();
+    _pushEventB    = EventWrapper::Create();
+    _processEventB = EventWrapper::Create();
+    _apiEventB     = EventWrapper::Create();
+
+    //--- I/O params
+    // A
+    _outFreqHzA = _outFileA.SamplingFrequency();
+    // B
+    _outFreqHzB = _outFileB.SamplingFrequency();
+
+
+    //Trace::SetEncryptedTraceFile("ACMAPITestEncrypted.txt");
+
+    char print[11];
+
+    // Create a trace file.
+    Trace::CreateTrace();
+    Trace::SetTraceFile((webrtc::test::OutputPath() +
+        "acm_api_trace.txt").c_str());
+
+    printf("\nRandom Test (y/n)?");
+    EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+    print[10] = '\0';
+    if(strstr(print, "y") != NULL)
+    {
+        _randomTest = true;
+        _verbose = false;
+        _writeToFile = false;
+    }
+    else
+    {
+        _randomTest = false;
+        printf("\nPrint Tests (y/n)? ");
+        EXPECT_TRUE(fgets(print, 10, stdin) != NULL);
+        print[10] = '\0';
+        if(strstr(print, "y") == NULL)
+        {
+            EXPECT_TRUE(freopen("APITest_log.txt", "w", stdout) != 0);
+            _verbose = false;
+        }
+    }
+
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfCallback = new DTMFDetector;
+#endif
+    _vadCallbackA = new VADCallback;
+    _vadCallbackB = new VADCallback;
+
+    return 0;
+}
+
+bool
+APITest::PushAudioThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->PushAudioRunA();
+}
+
+bool
+APITest::PushAudioThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->PushAudioRunB();
+}
+
+bool
+APITest::PullAudioThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->PullAudioRunA();
+}
+
+bool
+APITest::PullAudioThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->PullAudioRunB();
+}
+
+bool
+APITest::ProcessThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->ProcessRunA();
+}
+
+bool
+APITest::ProcessThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->ProcessRunB();
+}
+
+bool
+APITest::APIThreadA(void* obj)
+{
+    return static_cast<APITest*>(obj)->APIRunA();
+}
+
+bool
+APITest::APIThreadB(void* obj)
+{
+    return static_cast<APITest*>(obj)->APIRunB();
+}
+
+bool
+APITest::PullAudioRunA()
+{
+    _pullEventA->Wait(100);
+    AudioFrame audioFrame;
+    if(_acmA->PlayoutData10Ms(_outFreqHzA, audioFrame) < 0)
+    {
+        bool thereIsDecoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsDecoder = _thereIsDecoderA;
+        }
+        if(thereIsDecoder)
+        {
+            fprintf(stderr, "\n>>>>>>    cannot pull audio A       <<<<<<<< \n");
+        }
+    }
+    else
+    {
+        if(_writeToFile)
+        {
+            _outFileA.Write10MsData(audioFrame);
+        }
+        _receiveVADActivityA[(int)audioFrame.vad_activity_]++;
+    }
+    return true;
+}
+
+bool
+APITest::PullAudioRunB()
+{
+    _pullEventB->Wait(100);
+    AudioFrame audioFrame;
+    if(_acmB->PlayoutData10Ms(_outFreqHzB, audioFrame) < 0)
+    {
+        bool thereIsDecoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsDecoder = _thereIsDecoderB;
+        }
+        if(thereIsDecoder)
+        {
+            fprintf(stderr, "\n>>>>>>    cannot pull audio B       <<<<<<<< \n");
+            fprintf(stderr, "%d %d\n", _testNumA, _testNumB);
+        }
+    }
+    else
+    {
+        if(_writeToFile)
+        {
+            _outFileB.Write10MsData(audioFrame);
+        }
+        _receiveVADActivityB[(int)audioFrame.vad_activity_]++;
+    }
+    return true;
+}
+
+bool
+APITest::PushAudioRunA()
+{
+    _pushEventA->Wait(100);
+    AudioFrame audioFrame;
+    _inFileA.Read10MsData(audioFrame);
+    if(_acmA->Add10MsData(audioFrame) < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderA;
+        }
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>        add10MsData at A failed       <<<<\n");
+        }
+    }
+    return true;
+}
+
+bool
+APITest::PushAudioRunB()
+{
+    _pushEventB->Wait(100);
+    AudioFrame audioFrame;
+    _inFileB.Read10MsData(audioFrame);
+    if(_acmB->Add10MsData(audioFrame) < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderB;
+        }
+
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>   cannot add audio to B    <<<<");
+        }
+    }
+
+    return true;
+}
+
+bool
+APITest::ProcessRunA()
+{
+    _processEventA->Wait(100);
+    if(_acmA->Process() < 0)
+    {
+        // do not print error message if there is no encoder
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderA;
+        }
+
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>>      Process Failed at A     <<<<<\n");
+        }
+    }
+    return true;
+}
+
+bool
+APITest::ProcessRunB()
+{
+    _processEventB->Wait(100);
+    if(_acmB->Process() < 0)
+    {
+        bool thereIsEncoder;
+        {
+            ReadLockScoped rl(_apiTestRWLock);
+            thereIsEncoder = _thereIsEncoderB;
+        }
+        if(thereIsEncoder)
+        {
+            fprintf(stderr, "\n>>>>>      Process Failed at B     <<<<<\n");
+        }
+    }
+    return true;
+}
+
+/*/
+ *
+ * In side A we test the APIs which are related to sender Side.
+ *
+/*/
+
+
+void
+APITest::RunTest(char thread)
+{
+    int testNum;
+    {
+        WriteLockScoped cs(_apiTestRWLock);
+        if(thread == 'A')
+        {
+            _testNumA = (_testNumB + 1 + (rand() % 6)) % 7;
+            testNum = _testNumA;
+
+            _movingDot[_dotPositionA] = ' ';
+            if(_dotPositionA == 0)
+            {
+                _dotMoveDirectionA = 1;
+            }
+            if(_dotPositionA == 19)
+            {
+                _dotMoveDirectionA = -1;
+            }
+            _dotPositionA += _dotMoveDirectionA;
+            _movingDot[_dotPositionA] = (_dotMoveDirectionA > 0)? '>':'<';
+        }
+        else
+        {
+            _testNumB = (_testNumA + 1 + (rand() % 6)) % 7;
+            testNum = _testNumB;
+
+            _movingDot[_dotPositionB] = ' ';
+            if(_dotPositionB == 20)
+            {
+                _dotMoveDirectionB = 1;
+            }
+            if(_dotPositionB == 39)
+            {
+                _dotMoveDirectionB = -1;
+            }
+            _dotPositionB += _dotMoveDirectionB;
+            _movingDot[_dotPositionB] = (_dotMoveDirectionB > 0)? '>':'<';
+        }
+        //fprintf(stderr, "%c: %d \n", thread, testNum);
+        //fflush(stderr);
+    }
+    switch(testNum)
+    {
+    case 0:
+        CurrentCodec('A');
+        ChangeCodec('A');
+        break;
+    case 1:
+        TestPlayout('B');
+        break;
+    case 2:
+        if(!_randomTest)
+        {
+            fprintf(stdout, "\nTesting Delay ...\n");
+        }
+        TestDelay('A');
+        break;
+    case 3:
+        TestSendVAD('A');
+        break;
+    case 4:
+        TestRegisteration('A');
+        break;
+    case 5:
+        TestReceiverVAD('A');
+        break;
+    case 6:
+#ifdef WEBRTC_DTMF_DETECTION
+        LookForDTMF('A');
+#endif
+        break;
+    default:
+        fprintf(stderr, "Wrong Test Number\n");
+        getchar();
+        exit(1);
+    }
+}
+
+
+
+bool
+APITest::APIRunA()
+{
+    _apiEventA->Wait(50);
+
+    bool randomTest;
+    {
+        ReadLockScoped rl(_apiTestRWLock);
+        randomTest = _randomTest;
+    }
+    if(randomTest)
+    {
+        RunTest('A');
+    }
+    else
+    {
+        CurrentCodec('A');
+        ChangeCodec('A');
+        TestPlayout('B');
+        if(_codecCntrA == 0)
+        {
+            fprintf(stdout, "\nTesting Delay ...\n");
+            TestDelay('A');
+        }
+        // VAD TEST
+        TestSendVAD('A');
+        TestRegisteration('A');
+        TestReceiverVAD('A');
+#ifdef WEBRTC_DTMF_DETECTION
+        LookForDTMF('A');
+#endif
+    }
+    return true;
+}
+
+bool
+APITest::APIRunB()
+{
+    _apiEventB->Wait(50);
+    bool randomTest;
+    {
+        ReadLockScoped rl(_apiTestRWLock);
+        randomTest = _randomTest;
+    }
+    //_apiEventB->Wait(2000);
+    if(randomTest)
+    {
+        RunTest('B');
+    }
+
+    return true;
+}
+
+void
+APITest::Perform()
+{
+    SetUp();
+
+    //--- THREADS
+    // A
+    // PUSH
+    ThreadWrapper* myPushAudioThreadA = ThreadWrapper::CreateThread(PushAudioThreadA,
+        this, kNormalPriority, "PushAudioThreadA");
+    CHECK_THREAD_NULLITY(myPushAudioThreadA, "Unable to start A::PUSH thread");
+    // PULL
+    ThreadWrapper* myPullAudioThreadA = ThreadWrapper::CreateThread(PullAudioThreadA,
+        this, kNormalPriority, "PullAudioThreadA");
+    CHECK_THREAD_NULLITY(myPullAudioThreadA, "Unable to start A::PULL thread");
+    // Process
+    ThreadWrapper* myProcessThreadA = ThreadWrapper::CreateThread(ProcessThreadA,
+        this, kNormalPriority, "ProcessThreadA");
+    CHECK_THREAD_NULLITY(myProcessThreadA, "Unable to start A::Process thread");
+    // API
+    ThreadWrapper* myAPIThreadA = ThreadWrapper::CreateThread(APIThreadA,
+        this, kNormalPriority, "APIThreadA");
+    CHECK_THREAD_NULLITY(myAPIThreadA, "Unable to start A::API thread");
+    // B
+    // PUSH
+    ThreadWrapper* myPushAudioThreadB = ThreadWrapper::CreateThread(PushAudioThreadB,
+        this, kNormalPriority, "PushAudioThreadB");
+    CHECK_THREAD_NULLITY(myPushAudioThreadB, "Unable to start B::PUSH thread");
+    // PULL
+    ThreadWrapper* myPullAudioThreadB = ThreadWrapper::CreateThread(PullAudioThreadB,
+        this, kNormalPriority, "PullAudioThreadB");
+    CHECK_THREAD_NULLITY(myPullAudioThreadB, "Unable to start B::PULL thread");
+    // Process
+    ThreadWrapper* myProcessThreadB = ThreadWrapper::CreateThread(ProcessThreadB,
+        this, kNormalPriority, "ProcessThreadB");
+    CHECK_THREAD_NULLITY(myProcessThreadB, "Unable to start B::Process thread");
+    // API
+    ThreadWrapper* myAPIThreadB = ThreadWrapper::CreateThread(APIThreadB,
+        this, kNormalPriority, "APIThreadB");
+    CHECK_THREAD_NULLITY(myAPIThreadB, "Unable to start B::API thread");
+
+
+    //_apiEventA->StartTimer(true, 5000);
+    //_apiEventB->StartTimer(true, 5000);
+
+    _processEventA->StartTimer(true, 10);
+    _processEventB->StartTimer(true, 10);
+
+    _pullEventA->StartTimer(true, 10);
+    _pullEventB->StartTimer(true, 10);
+
+    _pushEventA->StartTimer(true, 10);
+    _pushEventB->StartTimer(true, 10);
+
+    // Keep main thread waiting for sender/receiver
+    // threads to complete
+    EventWrapper* completeEvent = EventWrapper::Create();
+    WebRtc_UWord64 startTime = TickTime::MillisecondTimestamp();
+    WebRtc_UWord64 currentTime;
+    do
+    {
+        {
+            //ReadLockScoped rl(_apiTestRWLock);
+            //fprintf(stderr, "\r%s", _movingDot);
+        }
+        //fflush(stderr);
+        completeEvent->Wait(50);
+        currentTime = TickTime::MillisecondTimestamp();
+    } while((currentTime - startTime) < 120000); // Run test in 2 minutes (120000 ms)
+
+    //completeEvent->Wait(0xFFFFFFFF);//(unsigned long)((unsigned long)TEST_DURATION_SEC * (unsigned long)1000));
+    delete completeEvent;
+
+    myPushAudioThreadA->Stop();
+    myPullAudioThreadA->Stop();
+    myProcessThreadA->Stop();
+    myAPIThreadA->Stop();
+
+    delete myPushAudioThreadA;
+    delete myPullAudioThreadA;
+    delete myProcessThreadA;
+    delete myAPIThreadA;
+
+
+    myPushAudioThreadB->Stop();
+    myPullAudioThreadB->Stop();
+    myProcessThreadB->Stop();
+    myAPIThreadB->Stop();
+
+    delete myPushAudioThreadB;
+    delete myPullAudioThreadB;
+    delete myProcessThreadB;
+    delete myAPIThreadB;
+}
+
+
+void
+APITest::CheckVADStatus(char side)
+{
+
+    bool dtxEnabled;
+    bool vadEnabled;
+    ACMVADMode vadMode;
+
+    if(side == 'A')
+    {
+        _acmA->VAD(dtxEnabled, vadEnabled, vadMode);
+        _acmA->RegisterVADCallback(NULL);
+        _vadCallbackA->Reset();
+        _acmA->RegisterVADCallback(_vadCallbackA);
+
+        if(!_randomTest)
+        {
+            if(_verbose)
+            {
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode);
+                Wait(5000);
+                fprintf(stdout, " => bit-rate %3.0f kbps\n",
+                    _channel_A2B->BitRate());
+            }
+            else
+            {
+                Wait(5000);
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode,
+                    _channel_A2B->BitRate());
+            }
+            _vadCallbackA->PrintFrameTypes();
+        }
+
+        if(dtxEnabled != _sendDTXA)
+        {
+            fprintf(stderr, ">>>   Error Enabling DTX    <<<\n");
+        }
+        if((vadEnabled != _sendVADA) && (!dtxEnabled))
+        {
+            fprintf(stderr, ">>>   Error Enabling VAD    <<<\n");
+        }
+        if((vadMode != _sendVADModeA) && vadEnabled)
+        {
+            fprintf(stderr, ">>>   Error setting VAD-mode    <<<\n");
+        }
+    }
+    else
+    {
+        _acmB->VAD(dtxEnabled, vadEnabled, vadMode);
+
+        _acmB->RegisterVADCallback(NULL);
+        _vadCallbackB->Reset();
+        _acmB->RegisterVADCallback(_vadCallbackB);
+
+        if(!_randomTest)
+        {
+            if(_verbose)
+            {
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d",
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode);
+                Wait(5000);
+                fprintf(stdout, " => bit-rate %3.0f kbps\n",
+                    _channel_B2A->BitRate());
+            }
+            else
+            {
+                Wait(5000);
+                fprintf(stdout, "DTX %3s, VAD %3s, Mode %d => bit-rate %3.0f kbps\n",
+                    dtxEnabled? "ON":"OFF",
+                    vadEnabled? "ON":"OFF",
+                    (int)vadMode,
+                    _channel_B2A->BitRate());
+            }
+            _vadCallbackB->PrintFrameTypes();
+        }
+
+        if(dtxEnabled != _sendDTXB)
+        {
+            fprintf(stderr, ">>>   Error Enabling DTX    <<<\n");
+        }
+        if((vadEnabled != _sendVADB) && (!dtxEnabled))
+        {
+            fprintf(stderr, ">>>   Error Enabling VAD    <<<\n");
+        }
+        if((vadMode != _sendVADModeB) && vadEnabled)
+        {
+            fprintf(stderr, ">>>   Error setting VAD-mode    <<<\n");
+        }
+    }
+}
+
+// Set Min delay, get delay, playout timestamp
+void
+APITest::TestDelay(char side)
+{
+    AudioCodingModule* myACM;
+    Channel* myChannel;
+    WebRtc_Word32* myMinDelay;
+    EventWrapper* myEvent = EventWrapper::Create();
+
+    WebRtc_UWord32 inTimestamp = 0;
+    WebRtc_UWord32 outTimestamp = 0;
+    double estimDelay = 0;
+
+    double averageEstimDelay = 0;
+    double averageDelay = 0;
+
+    CircularBuffer estimDelayCB(100);
+    estimDelayCB.SetArithMean(true);
+
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        myChannel = _channel_B2A;
+        myMinDelay = &_minDelayA;
+    }
+    else
+    {
+        myACM = _acmB;
+        myChannel = _channel_A2B;
+        myMinDelay = &_minDelayB;
+    }
+
+
+    CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+
+    inTimestamp = myChannel->LastInTimestamp();
+    CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
+
+    if(!_randomTest)
+    {
+        myEvent->StartTimer(true, 30);
+        int n = 0;
+        int settlePoint = 5000;
+        while(n < settlePoint + 400)
+        {
+            myEvent->Wait(1000);
+
+            inTimestamp = myChannel->LastInTimestamp();
+            CHECK_ERROR_MT(myACM->PlayoutTimestamp(outTimestamp));
+
+            //std::cout << outTimestamp << std::endl << std::flush;
+            estimDelay = (double)((WebRtc_UWord32)(inTimestamp - outTimestamp)) /
+                ((double)myACM->ReceiveFrequency() / 1000.0);
+
+            estimDelayCB.Update(estimDelay);
+
+            estimDelayCB.ArithMean(averageEstimDelay);
+            //printf("\n %6.1f \n", estimDelay);
+            //std::cout << " " << std::flush;
+
+            if(_verbose)
+            {
+                fprintf(stdout, "\rExpected: %4d,    retreived: %6.1f,   measured: %6.1f",
+                    *myMinDelay, averageDelay, averageEstimDelay);
+                std::cout << " " << std::flush;
+            }
+            if((averageDelay > *myMinDelay) && (n < settlePoint))
+            {
+                settlePoint = n;
+            }
+            n++;
+        }
+        myEvent->StopTimer();
+    }
+
+    if((!_verbose) && (!_randomTest))
+    {
+        fprintf(stdout, "\nExpected: %4d,    retreived: %6.1f,   measured: %6.1f",
+            *myMinDelay, averageDelay, averageEstimDelay);
+    }
+
+    *myMinDelay = (rand() % 1000) + 1;
+
+    ACMNetworkStatistics networkStat;
+    CHECK_ERROR_MT(myACM->NetworkStatistics(networkStat));
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nJitter Statistics at Side %c\n", side);
+        fprintf(stdout, "--------------------------------------\n");
+        fprintf(stdout, "buffer-size............. %d\n", networkStat.currentBufferSize);
+        fprintf(stdout, "Preferred buffer-size... %d\n", networkStat.preferredBufferSize);
+        fprintf(stdout, "Peaky jitter mode........%d\n", networkStat.jitterPeaksFound);
+        fprintf(stdout, "packet-size rate........ %d\n", networkStat.currentPacketLossRate);
+        fprintf(stdout, "discard rate............ %d\n", networkStat.currentDiscardRate);
+        fprintf(stdout, "expand rate............. %d\n", networkStat.currentExpandRate);
+        fprintf(stdout, "Preemptive rate......... %d\n", networkStat.currentPreemptiveRate);
+        fprintf(stdout, "Accelerate rate......... %d\n", networkStat.currentAccelerateRate);
+        fprintf(stdout, "Clock-drift............. %d\n", networkStat.clockDriftPPM);
+        fprintf(stdout, "Mean waiting time....... %d\n", networkStat.meanWaitingTimeMs);
+        fprintf(stdout, "Median waiting time..... %d\n", networkStat.medianWaitingTimeMs);
+        fprintf(stdout, "Min waiting time........ %d\n", networkStat.minWaitingTimeMs);
+        fprintf(stdout, "Max waiting time........ %d\n", networkStat.maxWaitingTimeMs);
+    }
+
+    CHECK_ERROR_MT(myACM->SetMinimumPlayoutDelay(*myMinDelay));
+
+    if(!_randomTest)
+    {
+        myEvent->Wait(500);
+        fprintf(stdout, "\n");
+        fprintf(stdout, "\n");
+    }
+    delete myEvent;
+}
+
+// Unregister a codec & register again.
+void
+APITest::TestRegisteration(char sendSide)
+{
+    AudioCodingModule* sendACM;
+    AudioCodingModule* receiveACM;
+    bool* thereIsDecoder;
+    EventWrapper* myEvent = EventWrapper::Create();
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "---------------------------------------------------------\n");
+        fprintf(stdout, "           Unregister/register Receive Codec\n");
+        fprintf(stdout, "---------------------------------------------------------\n");
+    }
+
+    switch(sendSide)
+    {
+    case 'A':
+        {
+            sendACM = _acmA;
+            receiveACM = _acmB;
+            thereIsDecoder = &_thereIsDecoderB;
+            break;
+        }
+    case 'B':
+        {
+            sendACM = _acmB;
+            receiveACM = _acmA;
+            thereIsDecoder = &_thereIsDecoderA;
+            break;
+        }
+    default:
+        fprintf(stderr, "Invalid sender-side in TestRegistration(%c)\n", sendSide);
+        exit(-1);
+    }
+
+    CodecInst myCodec;
+    if(sendACM->SendCodec(myCodec) < 0)
+    {
+        AudioCodingModule::Codec(_codecCntrA, myCodec);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+        fflush(stdout);
+    }
+    {
+        WriteLockScoped wl(_apiTestRWLock);
+        *thereIsDecoder = false;
+    }
+    //myEvent->Wait(20);
+    CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+    Wait(1000);
+
+    int currentPayload = myCodec.pltype;
+
+    if(!FixedPayloadTypeCodec(myCodec.plname))
+    {
+        WebRtc_Word32 i;
+        for(i = 0; i < 32; i++)
+        {
+            if(!_payloadUsed[i])
+            {
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Register receive codec with new Payload, AUDIO BACK.\n");
+                }
+                //myCodec.pltype = i + 96;
+                //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+                //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
+                //myEvent->Wait(20);
+                //{
+                //    WriteLockScoped wl(_apiTestRWLock);
+                //    *thereIsDecoder = true;
+                //}
+                Wait(1000);
+
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Unregistering reveive codec, NO AUDIO.\n");
+                }
+                //{
+                //    WriteLockScoped wl(_apiTestRWLock);
+                //    *thereIsDecoder = false;
+                //}
+                //myEvent->Wait(20);
+                //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+                Wait(1000);
+
+                myCodec.pltype = currentPayload;
+                if(!_randomTest)
+                {
+                    fprintf(stdout, "Register receive codec with default Payload, AUDIO BACK.\n");
+                    fflush(stdout);
+                }
+                CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+                //CHECK_ERROR_MT(sendACM->RegisterSendCodec(myCodec));
+                myEvent->Wait(20);
+                {
+                    WriteLockScoped wl(_apiTestRWLock);
+                    *thereIsDecoder = true;
+                }
+                Wait(1000);
+
+                break;
+            }
+        }
+        if(i == 32)
+        {
+            CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+            {
+                WriteLockScoped wl(_apiTestRWLock);
+                *thereIsDecoder = true;
+            }
+        }
+    }
+    else
+    {
+        if(!_randomTest)
+        {
+            fprintf(stdout, "Register receive codec with fixed Payload, AUDIO BACK.\n");
+            fflush(stdout);
+        }
+        CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+        //CHECK_ERROR_MT(receiveACM->UnregisterReceiveCodec(myCodec.pltype));
+        //CHECK_ERROR_MT(receiveACM->RegisterReceiveCodec(myCodec));
+        myEvent->Wait(20);
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            *thereIsDecoder = true;
+        }
+    }
+    delete myEvent;
+    if(!_randomTest)
+    {
+        fprintf(stdout, "---------------------------------------------------------\n");
+    }
+}
+
+// Playout Mode, background noise mode.
+// Receiver Frequency, playout frequency.
+void
+APITest::TestPlayout(char receiveSide)
+{
+    AudioCodingModule* receiveACM;
+    AudioPlayoutMode* playoutMode = NULL;
+    ACMBackgroundNoiseMode* bgnMode = NULL;
+    switch(receiveSide)
+    {
+        case 'A':
+            {
+                receiveACM = _acmA;
+                playoutMode = &_playoutModeA;
+                bgnMode = &_bgnModeA;
+                break;
+            }
+        case 'B':
+            {
+                receiveACM = _acmB;
+                playoutMode = &_playoutModeB;
+                bgnMode = &_bgnModeB;
+                break;
+            }
+        default:
+            receiveACM = _acmA;
+    }
+
+    WebRtc_Word32 receiveFreqHz = receiveACM->ReceiveFrequency();
+    WebRtc_Word32 playoutFreqHz = receiveACM->PlayoutFrequency();
+
+    CHECK_ERROR_MT(receiveFreqHz);
+    CHECK_ERROR_MT(playoutFreqHz);
+
+    char bgnString[25];
+    switch(*bgnMode)
+    {
+    case On:
+        {
+            *bgnMode = Fade;
+            strncpy(bgnString, "Fade", 25);
+            break;
+        }
+    case Fade:
+        {
+            *bgnMode = Off;
+            strncpy(bgnString, "OFF", 25);
+            break;
+        }
+    case Off:
+        {
+            *bgnMode = On;
+            strncpy(bgnString, "ON", 25);
+            break;
+        }
+    default:
+        *bgnMode = On;
+        strncpy(bgnString, "ON", 25);
+    }
+    CHECK_ERROR_MT(receiveACM->SetBackgroundNoiseMode(*bgnMode));
+    bgnString[24] = '\0';
+
+    char playoutString[25];
+    switch(*playoutMode)
+    {
+    case voice:
+        {
+            *playoutMode = fax;
+            strncpy(playoutString, "FAX", 25);
+            break;
+        }
+    case fax:
+        {
+            *playoutMode = streaming;
+            strncpy(playoutString, "Streaming", 25);
+            break;
+        }
+    case streaming:
+        {
+            *playoutMode = voice;
+            strncpy(playoutString, "Voice", 25);
+            break;
+        }
+    default:
+        *playoutMode = voice;
+        strncpy(playoutString, "Voice", 25);
+    }
+    CHECK_ERROR_MT(receiveACM->SetPlayoutMode(*playoutMode));
+    playoutString[24] = '\0';
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n");
+        fprintf(stdout, "In Side %c\n", receiveSide);
+        fprintf(stdout, "---------------------------------\n");
+        fprintf(stdout, "Receive Frequency....... %d Hz\n", receiveFreqHz);
+        fprintf(stdout, "Playout Frequency....... %d Hz\n", playoutFreqHz);
+        fprintf(stdout, "Audio Playout Mode...... %s\n", playoutString);
+        fprintf(stdout, "Background Noise Mode... %s\n", bgnString);
+    }
+}
+
+// set/get receiver VAD status & mode.
+void
+APITest::TestReceiverVAD(char side)
+{
+    AudioCodingModule* myACM;
+    int* myReceiveVADActivity;
+
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        myReceiveVADActivity = _receiveVADActivityA;
+    }
+    else
+    {
+        myACM = _acmB;
+        myReceiveVADActivity = _receiveVADActivityB;
+    }
+
+    ACMVADMode mode = myACM->ReceiveVADMode();
+
+    CHECK_ERROR_MT(mode);
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nCurrent Receive VAD at side %c\n", side);
+        fprintf(stdout, "----------------------------------\n");
+        fprintf(stdout, "mode.......... %d\n", (int)mode);
+        fprintf(stdout, "VAD Active.... %d\n", myReceiveVADActivity[0]);
+        fprintf(stdout, "VAD Passive... %d\n", myReceiveVADActivity[1]);
+        fprintf(stdout, "VAD Unknown... %d\n", myReceiveVADActivity[2]);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\nChange Receive VAD at side %c\n\n", side);
+    }
+
+    switch(mode)
+    {
+      case VADNormal:
+          mode = VADAggr;
+          break;
+      case VADLowBitrate:
+          mode = VADVeryAggr;
+          break;
+      case VADAggr:
+          mode = VADLowBitrate;
+          break;
+      case VADVeryAggr:
+          mode = VADNormal;
+          break;
+      default:
+          mode = VADNormal;
+
+          CHECK_ERROR_MT(myACM->SetReceiveVADMode(mode));
+    }
+    for(int n = 0; n < 3; n++)
+    {
+        myReceiveVADActivity[n] = 0;
+    }
+}
+
+
+void
+APITest::TestSendVAD(char side)
+{
+    if(_randomTest)
+    {
+        return;
+    }
+
+    bool* vad;
+    bool* dtx;
+    ACMVADMode* mode;
+    Channel* myChannel;
+    AudioCodingModule* myACM;
+
+    CodecInst myCodec;
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+        fprintf(stdout, "                Test VAD API\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+    }
+
+    if(side == 'A')
+    {
+        AudioCodingModule::Codec(_codecCntrA, myCodec);
+        vad = &_sendVADA;
+        dtx = &_sendDTXA;
+        mode = &_sendVADModeA;
+        myChannel = _channel_A2B;
+        myACM = _acmA;
+    }
+    else
+    {
+        AudioCodingModule::Codec(_codecCntrB, myCodec);
+        vad = &_sendVADB;
+        dtx = &_sendDTXB;
+        mode = &_sendVADModeB;
+        myChannel = _channel_B2A;
+        myACM = _acmB;
+    }
+
+    CheckVADStatus(side);
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+    }
+
+    switch(*mode)
+    {
+    case VADNormal:
+        *vad = true;
+        *dtx = true;
+        *mode = VADAggr;
+        break;
+    case VADLowBitrate:
+        *vad = true;
+        *dtx = true;
+        *mode = VADVeryAggr;
+        break;
+    case VADAggr:
+        *vad = true;
+        *dtx = true;
+        *mode = VADLowBitrate;
+        break;
+    case VADVeryAggr:
+        *vad = false;
+        *dtx = false;
+        *mode = VADNormal;
+        break;
+    default:
+        *mode = VADNormal;
+    }
+
+    *dtx = (myCodec.plfreq == 32000)? false:*dtx;
+
+    CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+    myChannel->ResetStats();
+
+    CheckVADStatus(side);
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n");
+        fprintf(stdout, "-----------------------------------------------\n");
+    }
+
+    // Fault Test
+    CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)-1));
+    CHECK_PROTECTED_MT(myACM->SetVAD(false, true, (ACMVADMode)4));
+
+
+
+}
+
+
+void
+APITest::CurrentCodec(char side)
+{
+    CodecInst myCodec;
+    if(side == 'A')
+    {
+        _acmA->SendCodec(myCodec);
+    }
+    else
+    {
+        _acmB->SendCodec(myCodec);
+    }
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\n");
+        fprintf(stdout, "Send codec in Side A\n");
+        fprintf(stdout, "----------------------------\n");
+        fprintf(stdout, "Name................. %s\n", myCodec.plname);
+        fprintf(stdout, "Sampling Frequency... %d\n", myCodec.plfreq);
+        fprintf(stdout, "Rate................. %d\n", myCodec.rate);
+        fprintf(stdout, "Payload-type......... %d\n", myCodec.pltype);
+        fprintf(stdout, "Packet-size.......... %d\n", myCodec.pacsize);
+    }
+
+    Wait(100);
+}
+
+void
+APITest::ChangeCodec(char side)
+{
+    CodecInst myCodec;
+    AudioCodingModule* myACM;
+    WebRtc_UWord8* codecCntr;
+    bool* thereIsEncoder;
+    bool* vad;
+    bool* dtx;
+    ACMVADMode* mode;
+    Channel* myChannel;
+    // Reset and Wait
+    if(!_randomTest)
+    {
+        fprintf(stdout, "Reset Encoder Side A \n");
+    }
+    if(side == 'A')
+    {
+        myACM = _acmA;
+        codecCntr = &_codecCntrA;
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            thereIsEncoder = &_thereIsEncoderA;
+        }
+        vad = &_sendVADA;
+        dtx = &_sendDTXA;
+        mode = &_sendVADModeA;
+        myChannel = _channel_A2B;
+    }
+    else
+    {
+        myACM = _acmB;
+        codecCntr = &_codecCntrB;
+        {
+            WriteLockScoped wl(_apiTestRWLock);
+            thereIsEncoder = &_thereIsEncoderB;
+        }
+        vad = &_sendVADB;
+        dtx = &_sendDTXB;
+        mode = &_sendVADModeB;
+        myChannel = _channel_B2A;
+    }
+
+    myACM->ResetEncoder();
+    Wait(100);
+
+    // Register the next codec
+    do
+    {
+        *codecCntr = (*codecCntr < AudioCodingModule::NumberOfCodecs() - 1)?
+            (*codecCntr + 1):0;
+
+        if(*codecCntr == 0)
+        {
+            //printf("Initialize Sender Side A \n");
+            {
+                WriteLockScoped wl(_apiTestRWLock);
+                *thereIsEncoder = false;
+            }
+            CHECK_ERROR_MT(myACM->InitializeSender());
+            Wait(1000);
+
+            // After Initialization CN is lost, re-register them
+            if(AudioCodingModule::Codec("CN", myCodec, 8000, 1) >= 0)
+            {
+                CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+            }
+            if(AudioCodingModule::Codec("CN", myCodec, 16000, 1) >= 0)
+            {
+                CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+            }
+            // VAD & DTX are disabled after initialization
+            *vad = false;
+            *dtx = false;
+            _writeToFile = false;
+        }
+
+        AudioCodingModule::Codec(*codecCntr, myCodec);
+    } while(!STR_CASE_CMP(myCodec.plname, "CN")          ||
+        !STR_CASE_CMP(myCodec.plname, "telephone-event") ||
+        !STR_CASE_CMP(myCodec.plname, "RED"));
+
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n====================================================================\n");
+        fprintf(stdout, "      Registering New Codec %s, %d kHz, %d kbps\n",
+            myCodec.plname, myCodec.plfreq / 1000, myCodec.rate / 1000);
+    }
+    //std::cout<< std::flush;
+
+    // NO DTX for supe-wideband codec at this point
+    if(myCodec.plfreq == 32000)
+    {
+        *dtx = false;
+        CHECK_ERROR_MT(myACM->SetVAD(*dtx, *vad, *mode));
+
+    }
+
+    CHECK_ERROR_MT(myACM->RegisterSendCodec(myCodec));
+    myChannel->ResetStats();
+    {
+        WriteLockScoped wl(_apiTestRWLock);
+        *thereIsEncoder = true;
+    }
+    Wait(500);
+}
+
+
+void
+APITest::LookForDTMF(char side)
+{
+    if(!_randomTest)
+    {
+        fprintf(stdout, "\n\nLooking for DTMF Signal in Side %c\n", side);
+        fprintf(stdout, "----------------------------------------\n");
+    }
+
+    if(side == 'A')
+    {
+        _acmB->RegisterIncomingMessagesCallback(NULL);
+        _acmA->RegisterIncomingMessagesCallback(_dtmfCallback);
+        Wait(1000);
+        _acmA->RegisterIncomingMessagesCallback(NULL);
+    }
+    else
+    {
+        _acmA->RegisterIncomingMessagesCallback(NULL);
+        _acmB->RegisterIncomingMessagesCallback(_dtmfCallback);
+        Wait(1000);
+        _acmB->RegisterIncomingMessagesCallback(NULL);
+    }
+}
+
+} // namespace webrtc
+
diff --git a/src/modules/audio_coding/main/test/APITest.h b/src/modules/audio_coding/main/test/APITest.h
new file mode 100644
index 0000000..ee3f5e6
--- /dev/null
+++ b/src/modules/audio_coding/main/test/APITest.h
@@ -0,0 +1,173 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef API_TEST_H
+#define API_TEST_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "event_wrapper.h"
+#include "utility.h"
+
+namespace webrtc {
+
+enum APITESTAction {TEST_CHANGE_CODEC_ONLY = 0, DTX_TEST = 1};
+
+class APITest : public ACMTest
+{
+public:
+    APITest();
+    ~APITest();
+
+    void Perform();
+private:
+    WebRtc_Word16 SetUp();
+
+    static bool PushAudioThreadA(void* obj);
+    static bool PullAudioThreadA(void* obj);
+    static bool ProcessThreadA(void* obj);
+    static bool APIThreadA(void* obj);
+
+    static bool PushAudioThreadB(void* obj);
+    static bool PullAudioThreadB(void* obj);
+    static bool ProcessThreadB(void* obj);
+    static bool APIThreadB(void* obj);
+
+    void CheckVADStatus(char side);
+
+    // Set Min delay, get delay, playout timestamp
+    void TestDelay(char side);
+
+    // Unregister a codec & register again.
+    void TestRegisteration(char side);
+
+    // Playout Mode, background noise mode.
+    // Receiver Frequency, playout frequency.
+    void TestPlayout(char receiveSide);
+
+    // set/get receiver VAD status & mode.
+    void TestReceiverVAD(char side);
+
+    //
+    void TestSendVAD(char side);
+
+    void CurrentCodec(char side);
+
+    void ChangeCodec(char side);
+
+    void Wait(WebRtc_UWord32 waitLengthMs);
+
+    void LookForDTMF(char side);
+
+    void RunTest(char thread);
+
+    bool PushAudioRunA();
+    bool PullAudioRunA();
+    bool ProcessRunA();
+    bool APIRunA();
+
+    bool PullAudioRunB();
+    bool PushAudioRunB();
+    bool ProcessRunB();
+    bool APIRunB();
+
+
+
+    //--- ACMs
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    //--- Channels
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+
+    //--- I/O files
+    // A
+    PCMFile _inFileA;
+    PCMFile _outFileA;
+    // B
+    PCMFile _outFileB;
+    PCMFile _inFileB;
+
+    //--- I/O params
+    // A
+    WebRtc_Word32 _outFreqHzA;
+    // B
+    WebRtc_Word32 _outFreqHzB;
+
+    // Should we write to file.
+    // we might skip writing to file if we
+    // run the test for a long time.
+    bool _writeToFile;
+    //--- Events
+    // A
+    EventWrapper* _pullEventA;      // pulling data from ACM
+    EventWrapper* _pushEventA;      // pushing data to ACM
+    EventWrapper* _processEventA;   // process
+    EventWrapper* _apiEventA;       // API calls
+    // B
+    EventWrapper* _pullEventB;      // pulling data from ACM
+    EventWrapper* _pushEventB;      // pushing data to ACM
+    EventWrapper* _processEventB;   // process
+    EventWrapper* _apiEventB;       // API calls
+
+    // keep track of the codec in either side.
+    WebRtc_UWord8 _codecCntrA;
+    WebRtc_UWord8 _codecCntrB;
+
+    // Is set to true if there is no encoder in either side
+    bool _thereIsEncoderA;
+    bool _thereIsEncoderB;
+    bool _thereIsDecoderA;
+    bool _thereIsDecoderB;
+
+    bool             _sendVADA;
+    bool             _sendDTXA;
+    ACMVADMode       _sendVADModeA;
+
+    bool             _sendVADB;
+    bool             _sendDTXB;
+    ACMVADMode       _sendVADModeB;
+
+    WebRtc_Word32    _minDelayA;
+    WebRtc_Word32    _minDelayB;
+    bool             _payloadUsed[32];
+
+    AudioPlayoutMode    _playoutModeA;
+    AudioPlayoutMode    _playoutModeB;
+
+    ACMBackgroundNoiseMode _bgnModeA;
+    ACMBackgroundNoiseMode _bgnModeB;
+
+
+    int            _receiveVADActivityA[3];
+    int            _receiveVADActivityB[3];
+    bool           _verbose;
+
+    int            _dotPositionA;
+    int            _dotMoveDirectionA;
+    int            _dotPositionB;
+    int            _dotMoveDirectionB;
+
+    char           _movingDot[41];
+
+    DTMFDetector*  _dtmfCallback;
+    VADCallback*   _vadCallbackA;
+    VADCallback*   _vadCallbackB;
+    RWLockWrapper&    _apiTestRWLock;
+    bool           _randomTest;
+    int            _testNumA;
+    int            _testNumB;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/Channel.cc b/src/modules/audio_coding/main/test/Channel.cc
new file mode 100644
index 0000000..d7e387a
--- /dev/null
+++ b/src/modules/audio_coding/main/test/Channel.cc
@@ -0,0 +1,477 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <iostream>
+
+#include "audio_coding_module.h"
+#include "Channel.h"
+#include "tick_util.h"
+#include "typedefs.h"
+#include "common_types.h"
+
+namespace webrtc {
+
+WebRtc_Word32
+Channel::SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData,
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation)
+{
+    WebRtcRTPHeader rtpInfo;
+    WebRtc_Word32   status;
+    WebRtc_UWord16  payloadDataSize = payloadSize;
+
+    rtpInfo.header.markerBit = false;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.timestamp = timeStamp;
+    if(frameType == kAudioFrameCN)
+    {
+        rtpInfo.type.Audio.isCNG = true;
+    }
+    else
+    {
+        rtpInfo.type.Audio.isCNG = false;
+    }
+    if(frameType == kFrameEmpty)
+    {
+        // Skip this frame
+        return 0;
+    }
+
+    rtpInfo.type.Audio.channel = 1;
+    // Treat fragmentation separately
+    if(fragmentation != NULL)
+    {
+        if((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) && // silence for too long send only new data
+            (fragmentation->fragmentationVectorSize == 2))
+        {
+            // only 0x80 if we have multiple blocks
+            _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
+            WebRtc_UWord32 REDheader =  (((WebRtc_UWord32)fragmentation->fragmentationTimeDiff[1]) << 10) + fragmentation->fragmentationLength[1];
+            _payloadData[1] = WebRtc_UWord8((REDheader >> 16) & 0x000000FF);
+            _payloadData[2] = WebRtc_UWord8((REDheader >> 8) & 0x000000FF);
+            _payloadData[3] = WebRtc_UWord8(REDheader & 0x000000FF);
+
+            _payloadData[4] = fragmentation->fragmentationPlType[0];
+            // copy the RED data
+            memcpy(_payloadData + 5,
+                payloadData + fragmentation->fragmentationOffset[1],
+                fragmentation->fragmentationLength[1]);
+            // copy the normal data
+            memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+            payloadDataSize += 5;
+        } else
+        {
+            // single block (newest one)
+            memcpy(_payloadData,
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+            payloadDataSize = WebRtc_UWord16(fragmentation->fragmentationLength[0]);
+            rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
+        }
+    }
+    else
+    {
+        memcpy(_payloadData, payloadData, payloadDataSize);
+        if(_isStereo)
+        {
+            if(_leftChannel)
+            {
+                memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
+                _leftChannel = false;
+                rtpInfo.type.Audio.channel = 1;
+            }
+            else
+            {
+                memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
+                _leftChannel = true;
+                rtpInfo.type.Audio.channel = 2;
+            }
+        }
+    }
+
+    _channelCritSect->Enter();
+    if(_saveBitStream)
+    {
+        //fwrite(payloadData, sizeof(WebRtc_UWord8), payloadSize, _bitStreamFile);
+    }
+
+    if(!_isStereo)
+    {
+        CalcStatistics(rtpInfo, payloadSize);
+    }
+    _lastInTimestamp = timeStamp;
+    _totalBytes += payloadDataSize;
+    _channelCritSect->Leave();
+
+    if(_useFECTestWithPacketLoss)
+    {
+        _packetLoss += 1;
+        if(_packetLoss == 3)
+        {
+            _packetLoss = 0;
+            return 0;
+        }
+    }
+
+    status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize,
+                                          rtpInfo);
+
+    return status;
+}
+
+void
+Channel::CalcStatistics(
+    WebRtcRTPHeader& rtpInfo,
+    WebRtc_UWord16   payloadSize)
+{
+    int n;
+    if((rtpInfo.header.payloadType != _lastPayloadType) &&
+        (_lastPayloadType != -1))
+    {
+        // payload-type is changed.
+        // we have to terminate the calculations on the previous payload type
+        // we ignore the last packet in that payload type just to make things
+        // easier.
+        for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+        {
+            if(_lastPayloadType == _payloadStats[n].payloadType)
+            {
+                _payloadStats[n].newPacket = true;
+                break;
+            }
+        }
+    }
+    _lastPayloadType = rtpInfo.header.payloadType;
+
+    bool newPayload = true;
+    ACMTestPayloadStats* currentPayloadStr;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        if(rtpInfo.header.payloadType == _payloadStats[n].payloadType)
+        {
+            newPayload = false;
+            currentPayloadStr = &_payloadStats[n];
+            break;
+        }
+    }
+
+    if(!newPayload)
+    {
+        if(!currentPayloadStr->newPacket)
+        {
+            WebRtc_UWord32 lastFrameSizeSample = (WebRtc_UWord32)((WebRtc_UWord32)rtpInfo.header.timestamp -
+                (WebRtc_UWord32)currentPayloadStr->lastTimestamp);
+            assert(lastFrameSizeSample > 0);
+            int k = 0;
+            while((currentPayloadStr->frameSizeStats[k].frameSizeSample !=
+                lastFrameSizeSample) &&
+                (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0))
+            {
+                k++;
+            }
+            ACMTestFrameSizeStats* currentFrameSizeStats =
+                &(currentPayloadStr->frameSizeStats[k]);
+            currentFrameSizeStats->frameSizeSample = (WebRtc_Word16)lastFrameSizeSample;
+
+            // increment the number of encoded samples.
+            currentFrameSizeStats->totalEncodedSamples +=
+                lastFrameSizeSample;
+            // increment the number of recveived packets
+            currentFrameSizeStats->numPackets++;
+            // increment the total number of bytes (this is based on
+            // the previous payload we don't know the frame-size of
+            // the current payload.
+            currentFrameSizeStats->totalPayloadLenByte +=
+                currentPayloadStr->lastPayloadLenByte;
+            // store the maximum payload-size (this is based on
+            // the previous payload we don't know the frame-size of
+            // the current payload.
+            if(currentFrameSizeStats->maxPayloadLen <
+                currentPayloadStr->lastPayloadLenByte)
+            {
+                currentFrameSizeStats->maxPayloadLen =
+                    currentPayloadStr->lastPayloadLenByte;
+            }
+            // store the current values for the next time
+            currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
+            currentPayloadStr->lastPayloadLenByte = payloadSize;
+        }
+        else
+        {
+            currentPayloadStr->newPacket          = false;
+            currentPayloadStr->lastPayloadLenByte = payloadSize;
+            currentPayloadStr->lastTimestamp      = rtpInfo.header.timestamp;
+            currentPayloadStr->payloadType        = rtpInfo.header.payloadType;
+        }
+    }
+    else
+    {
+        n = 0;
+        while(_payloadStats[n].payloadType != -1)
+        {
+            n++;
+        }
+        // first packet
+        _payloadStats[n].newPacket          = false;
+        _payloadStats[n].lastPayloadLenByte = payloadSize;
+        _payloadStats[n].lastTimestamp      = rtpInfo.header.timestamp;
+        _payloadStats[n].payloadType        = rtpInfo.header.payloadType;
+    }
+}
+
+Channel::Channel(WebRtc_Word16 chID) :
+_receiverACM(NULL),
+_seqNo(0),
+_channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_bitStreamFile(NULL),
+_saveBitStream(false),
+_lastPayloadType(-1),
+_isStereo(false),
+_leftChannel(true),
+_lastInTimestamp(0),
+_packetLoss(0),
+_useFECTestWithPacketLoss(false),
+_beginTime(TickTime::MillisecondTimestamp()),
+_totalBytes(0)
+{
+    int n;
+    int k;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        _payloadStats[n].payloadType = -1;
+        _payloadStats[n].newPacket   = true;
+        for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
+        {
+            _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+            _payloadStats[n].frameSizeStats[k].maxPayloadLen   = 0;
+            _payloadStats[n].frameSizeStats[k].numPackets      = 0;
+            _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+            _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+        }
+    }
+    if(chID >= 0)
+    {
+        _saveBitStream = true;
+        char bitStreamFileName[500];
+        sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
+        _bitStreamFile = fopen(bitStreamFileName, "wb");
+    }
+    else
+    {
+        _saveBitStream = false;
+    }
+}
+
+Channel::~Channel()
+{
+    delete _channelCritSect;
+}
+
+void
+Channel::RegisterReceiverACM(AudioCodingModule* acm)
+{
+    _receiverACM = acm;
+    return;
+}
+
+void
+Channel::ResetStats()
+{
+    int n;
+    int k;
+    _channelCritSect->Enter();
+    _lastPayloadType = -1;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        _payloadStats[n].payloadType = -1;
+        _payloadStats[n].newPacket   = true;
+        for(k = 0; k < MAX_NUM_FRAMESIZES; k++)
+        {
+            _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
+            _payloadStats[n].frameSizeStats[k].maxPayloadLen   = 0;
+            _payloadStats[n].frameSizeStats[k].numPackets      = 0;
+            _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
+            _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
+        }
+    }
+    _beginTime = TickTime::MillisecondTimestamp();
+    _totalBytes = 0;
+    _channelCritSect->Leave();
+}
+
+WebRtc_Word16
+Channel::Stats(CodecInst& codecInst, ACMTestPayloadStats& payloadStats)
+{
+    _channelCritSect->Enter();
+    int n;
+    payloadStats.payloadType = -1;
+    for(n = 0; n < MAX_NUM_PAYLOADS; n++)
+    {
+        if(_payloadStats[n].payloadType == codecInst.pltype)
+        {
+            memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
+            break;
+        }
+    }
+    if(payloadStats.payloadType == -1)
+    {
+        _channelCritSect->Leave();
+        return -1;
+    }
+    for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+    {
+        if(payloadStats.frameSizeStats[n].frameSizeSample == 0)
+        {
+            _channelCritSect->Leave();
+            return 0;
+        }
+        payloadStats.frameSizeStats[n].usageLenSec =
+            (double)payloadStats.frameSizeStats[n].totalEncodedSamples
+            / (double)codecInst.plfreq;
+
+        payloadStats.frameSizeStats[n].rateBitPerSec =
+            payloadStats.frameSizeStats[n].totalPayloadLenByte * 8 /
+            payloadStats.frameSizeStats[n].usageLenSec;
+
+    }
+    _channelCritSect->Leave();
+    return 0;
+}
+
+void
+Channel::Stats(WebRtc_UWord32* numPackets)
+{
+    _channelCritSect->Enter();
+    int k;
+    int n;
+    memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+    for(k = 0; k < MAX_NUM_PAYLOADS; k++)
+    {
+        if(_payloadStats[k].payloadType == -1)
+        {
+            break;
+        }
+        numPackets[k] = 0;
+        for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+        {
+            if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
+            {
+                break;
+            }
+            numPackets[k] +=
+                _payloadStats[k].frameSizeStats[n].numPackets;
+        }
+    }
+    _channelCritSect->Leave();
+}
+
+void
+Channel::Stats(WebRtc_UWord8* payloadType, WebRtc_UWord32* payloadLenByte)
+{
+    _channelCritSect->Enter();
+
+    int k;
+    int n;
+    memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(WebRtc_UWord32));
+    for(k = 0; k < MAX_NUM_PAYLOADS; k++)
+    {
+        if(_payloadStats[k].payloadType == -1)
+        {
+            break;
+        }
+        payloadType[k] = (WebRtc_UWord8)_payloadStats[k].payloadType;
+        payloadLenByte[k] = 0;
+        for(n = 0; n < MAX_NUM_FRAMESIZES; n++)
+        {
+            if(_payloadStats[k].frameSizeStats[n].frameSizeSample == 0)
+            {
+                break;
+            }
+            payloadLenByte[k] += (WebRtc_UWord16)
+                _payloadStats[k].frameSizeStats[n].totalPayloadLenByte;
+        }
+    }
+
+    _channelCritSect->Leave();
+}
+
+
+void
+Channel::PrintStats(CodecInst& codecInst)
+{
+    ACMTestPayloadStats payloadStats;
+    Stats(codecInst, payloadStats);
+    printf("%s %d kHz\n",
+        codecInst.plname,
+        codecInst.plfreq / 1000);
+    printf("=====================================================\n");
+    if(payloadStats.payloadType == -1)
+    {
+        printf("No Packets are sent with payload-type %d (%s)\n\n",
+            codecInst.pltype,
+            codecInst.plname);
+        return;
+    }
+    for(int k = 0; k < MAX_NUM_FRAMESIZES; k++)
+    {
+        if(payloadStats.frameSizeStats[k].frameSizeSample == 0)
+        {
+            break;
+        }
+        printf("Frame-size.................... %d samples\n",
+            payloadStats.frameSizeStats[k].frameSizeSample);
+        printf("Average Rate.................. %.0f bits/sec\n",
+            payloadStats.frameSizeStats[k].rateBitPerSec);
+        printf("Maximum Payload-Size.......... %d Bytes\n",
+            payloadStats.frameSizeStats[k].maxPayloadLen);
+        printf("Maximum Instantaneous Rate.... %.0f bits/sec\n",
+            ((double)payloadStats.frameSizeStats[k].maxPayloadLen * 8.0 *
+            (double)codecInst.plfreq) /
+            (double)payloadStats.frameSizeStats[k].frameSizeSample);
+        printf("Number of Packets............. %u\n",
+               (unsigned int)payloadStats.frameSizeStats[k].numPackets);
+        printf("Duration...................... %0.3f sec\n\n",
+            payloadStats.frameSizeStats[k].usageLenSec);
+
+    }
+
+}
+
+WebRtc_UWord32
+Channel::LastInTimestamp()
+{
+    WebRtc_UWord32 timestamp;
+    _channelCritSect->Enter();
+    timestamp = _lastInTimestamp;
+    _channelCritSect->Leave();
+    return timestamp;
+}
+
+double
+Channel::BitRate()
+{
+    double rate;
+    WebRtc_UWord64 currTime = TickTime::MillisecondTimestamp();
+    _channelCritSect->Enter();
+    rate =   ((double)_totalBytes * 8.0)/ (double)(currTime - _beginTime);
+    _channelCritSect->Leave();
+    return rate;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/Channel.h b/src/modules/audio_coding/main/test/Channel.h
new file mode 100644
index 0000000..617027e
--- /dev/null
+++ b/src/modules/audio_coding/main/test/Channel.h
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef CHANNEL_H
+#define CHANNEL_H
+
+#include <stdio.h>
+
+#include "audio_coding_module.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+
+namespace webrtc {
+
+#define MAX_NUM_PAYLOADS   50
+#define MAX_NUM_FRAMESIZES  6
+
+
+struct ACMTestFrameSizeStats
+{
+    WebRtc_UWord16 frameSizeSample;
+    WebRtc_Word16  maxPayloadLen;
+    WebRtc_UWord32 numPackets;
+    WebRtc_UWord64 totalPayloadLenByte;
+    WebRtc_UWord64 totalEncodedSamples;
+    double         rateBitPerSec;
+    double         usageLenSec;
+
+};
+
+struct ACMTestPayloadStats
+{
+    bool                  newPacket;
+    WebRtc_Word16         payloadType;
+    WebRtc_Word16         lastPayloadLenByte;
+    WebRtc_UWord32        lastTimestamp;
+    ACMTestFrameSizeStats frameSizeStats[MAX_NUM_FRAMESIZES];
+};
+
+class Channel: public AudioPacketizationCallback
+{
+public:
+
+    Channel(
+        WebRtc_Word16 chID = -1);
+    ~Channel();
+
+    WebRtc_Word32 SendData(
+        const FrameType       frameType,
+        const WebRtc_UWord8   payloadType,
+        const WebRtc_UWord32  timeStamp,
+        const WebRtc_UWord8*  payloadData,
+        const WebRtc_UWord16  payloadSize,
+        const RTPFragmentationHeader* fragmentation);
+
+    void RegisterReceiverACM(
+        AudioCodingModule *acm);
+
+    void ResetStats();
+
+    WebRtc_Word16 Stats(
+        CodecInst&           codecInst,
+        ACMTestPayloadStats& payloadStats);
+
+    void Stats(
+        WebRtc_UWord32* numPackets);
+
+    void Stats(
+        WebRtc_UWord8*  payloadLenByte,
+        WebRtc_UWord32* payloadType);
+
+    void PrintStats(
+        CodecInst& codecInst);
+
+    void SetIsStereo(bool isStereo)
+    {
+        _isStereo = isStereo;
+    }
+
+    WebRtc_UWord32 LastInTimestamp();
+
+    void SetFECTestWithPacketLoss(bool usePacketLoss)
+    {
+        _useFECTestWithPacketLoss = usePacketLoss;
+    }
+
+    double BitRate();
+
+private:
+    void CalcStatistics(
+        WebRtcRTPHeader& rtpInfo,
+        WebRtc_UWord16   payloadSize);
+
+    AudioCodingModule*      _receiverACM;
+    WebRtc_UWord16          _seqNo;
+    // 60msec * 32 sample(max)/msec * 2 description (maybe) * 2 bytes/sample
+    WebRtc_UWord8           _payloadData[60 * 32 * 2 * 2];
+
+    CriticalSectionWrapper* _channelCritSect;
+    FILE*                   _bitStreamFile;
+    bool                    _saveBitStream;
+    WebRtc_Word16           _lastPayloadType;
+    ACMTestPayloadStats     _payloadStats[MAX_NUM_PAYLOADS];
+    bool                    _isStereo;
+    WebRtcRTPHeader         _rtpInfo;
+    bool                    _leftChannel;
+    WebRtc_UWord32          _lastInTimestamp;
+    // FEC Test variables
+    WebRtc_Word16           _packetLoss;
+    bool                    _useFECTestWithPacketLoss;
+    WebRtc_UWord64          _beginTime;
+    WebRtc_UWord64          _totalBytes;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/EncodeDecodeTest.cc b/src/modules/audio_coding/main/test/EncodeDecodeTest.cc
new file mode 100644
index 0000000..09ff58e
--- /dev/null
+++ b/src/modules/audio_coding/main/test/EncodeDecodeTest.cc
@@ -0,0 +1,392 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "EncodeDecodeTest.h"
+
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestPacketization::TestPacketization(RTPStream *rtpStream,
+                                     WebRtc_UWord16 frequency)
+    : _rtpStream(rtpStream),
+      _frequency(frequency),
+      _seqNo(0) {
+}
+
+TestPacketization::~TestPacketization() { }
+
+WebRtc_Word32 TestPacketization::SendData(
+    const FrameType /* frameType */,
+    const WebRtc_UWord8 payloadType,
+    const WebRtc_UWord32 timeStamp,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadSize,
+    const RTPFragmentationHeader* /* fragmentation */) {
+  _rtpStream->Write(payloadType, timeStamp, _seqNo++, payloadData, payloadSize,
+                    _frequency);
+  return 1;
+}
+
+Sender::Sender()
+    : _acm(NULL),
+      _pcmFile(),
+      _audioFrame(),
+      _packetization(NULL) {
+}
+
+void Sender::Setup(AudioCodingModule *acm, RTPStream *rtpStream) {
+  acm->InitializeSender();
+  struct CodecInst sendCodec;
+  int noOfCodecs = acm->NumberOfCodecs();
+  int codecNo;
+
+  // Open input file
+  const std::string file_name =
+      webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+  _pcmFile.Open(file_name, 32000, "rb");
+
+  // Set the codec for the current test.
+  if ((testMode == 0) || (testMode == 1)) {
+    // Set the codec id.
+    codecNo = codeId;
+  } else {
+    // Choose codec on command line.
+    printf("List of supported codec.\n");
+    for (int n = 0; n < noOfCodecs; n++) {
+      acm->Codec(n, sendCodec);
+      printf("%d %s\n", n, sendCodec.plname);
+    }
+    printf("Choose your codec:");
+    ASSERT_GT(scanf("%d", &codecNo), 0);
+  }
+
+  acm->Codec(codecNo, sendCodec);
+  if (!strcmp(sendCodec.plname, "CELT")) {
+    sendCodec.channels = 1;
+  }
+  acm->RegisterSendCodec(sendCodec);
+  _packetization = new TestPacketization(rtpStream, sendCodec.plfreq);
+  if (acm->RegisterTransportCallback(_packetization) < 0) {
+    printf("Registering Transport Callback failed, for run: codecId: %d: --\n",
+           codeId);
+  }
+
+  _acm = acm;
+}
+
+void Sender::Teardown() {
+  _pcmFile.Close();
+  delete _packetization;
+}
+
+bool Sender::Add10MsData() {
+  if (!_pcmFile.EndOfFile()) {
+    _pcmFile.Read10MsData(_audioFrame);
+    WebRtc_Word32 ok = _acm->Add10MsData(_audioFrame);
+    if (ok != 0) {
+      printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
+      exit(1);
+    }
+    return true;
+  }
+  return false;
+}
+
+bool Sender::Process() {
+  WebRtc_Word32 ok = _acm->Process();
+  if (ok < 0) {
+    printf("Error calling Add10MsData: for run: codecId: %d\n", codeId);
+    exit(1);
+  }
+  return true;
+}
+
+void Sender::Run() {
+  while (true) {
+    if (!Add10MsData()) {
+      break;
+    }
+    if (!Process()) { // This could be done in a processing thread
+      break;
+    }
+  }
+}
+
+Receiver::Receiver()
+    : _playoutLengthSmpls(WEBRTC_10MS_PCM_AUDIO),
+      _payloadSizeBytes(MAX_INCOMING_PAYLOAD) {
+}
+
+void Receiver::Setup(AudioCodingModule *acm, RTPStream *rtpStream) {
+  struct CodecInst recvCodec;
+  int noOfCodecs;
+  acm->InitializeReceiver();
+
+  noOfCodecs = acm->NumberOfCodecs();
+  for (int i = 0; i < noOfCodecs; i++) {
+    acm->Codec((WebRtc_UWord8) i, recvCodec);
+    if (acm->RegisterReceiveCodec(recvCodec) != 0) {
+      printf("Unable to register codec: for run: codecId: %d\n", codeId);
+      exit(1);
+    }
+  }
+
+  int playSampFreq;
+  std::string file_name;
+  std::stringstream file_stream;
+  file_stream << webrtc::test::OutputPath() << "encodeDecode_out" <<
+      static_cast<int>(codeId) << ".pcm";
+  file_name = file_stream.str();
+  _rtpStream = rtpStream;
+
+  if (testMode == 1) {
+    playSampFreq=recvCodec.plfreq;
+    _pcmFile.Open(file_name, recvCodec.plfreq, "wb+");
+  } else if (testMode == 0) {
+    playSampFreq=32000;
+    _pcmFile.Open(file_name, 32000, "wb+");
+  } else {
+    printf("\nValid output frequencies:\n");
+    printf("8000\n16000\n32000\n-1,");
+    printf("which means output frequency equal to received signal frequency");
+    printf("\n\nChoose output sampling frequency: ");
+    ASSERT_GT(scanf("%d", &playSampFreq), 0);
+    file_name =  webrtc::test::OutputPath() + "encodeDecode_out.pcm";
+    _pcmFile.Open(file_name, playSampFreq, "wb+");
+  }
+
+  _realPayloadSizeBytes = 0;
+  _playoutBuffer = new WebRtc_Word16[WEBRTC_10MS_PCM_AUDIO];
+  _frequency = playSampFreq;
+  _acm = acm;
+  _firstTime = true;
+}
+
+void Receiver::Teardown() {
+  delete [] _playoutBuffer;
+  _pcmFile.Close();
+  if (testMode > 1)
+    Trace::ReturnTrace();
+}
+
+bool Receiver::IncomingPacket() {
+  if (!_rtpStream->EndOfFile()) {
+    if (_firstTime) {
+      _firstTime = false;
+      _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+                                               _payloadSizeBytes, &_nextTime);
+      if (_realPayloadSizeBytes == 0) {
+        if (_rtpStream->EndOfFile()) {
+          _firstTime = true;
+          return true;
+        } else {
+          printf("Error in reading incoming payload.\n");
+          return false;
+        }
+      }
+   }
+
+   WebRtc_Word32 ok = _acm->IncomingPacket(_incomingPayload,
+                                           _realPayloadSizeBytes, _rtpInfo);
+   if (ok != 0) {
+     printf("Error when inserting packet to ACM, for run: codecId: %d\n",
+            codeId);
+   }
+   _realPayloadSizeBytes = _rtpStream->Read(&_rtpInfo, _incomingPayload,
+                                            _payloadSizeBytes, &_nextTime);
+    if (_realPayloadSizeBytes == 0 && _rtpStream->EndOfFile()) {
+      _firstTime = true;
+    }
+  }
+  return true;
+}
+
+bool Receiver::PlayoutData() {
+  AudioFrame audioFrame;
+
+  if (_acm->PlayoutData10Ms(_frequency, audioFrame) != 0) {
+    printf("Error when calling PlayoutData10Ms, for run: codecId: %d\n",
+           codeId);
+    exit(1);
+  }
+  if (_playoutLengthSmpls == 0) {
+    return false;
+  }
+  _pcmFile.Write10MsData(audioFrame.data_,
+                         audioFrame.samples_per_channel_);
+  return true;
+}
+
+void Receiver::Run() {
+  WebRtc_UWord8 counter500Ms = 50;
+  WebRtc_UWord32 clock = 0;
+
+  while (counter500Ms > 0) {
+    if (clock == 0 || clock >= _nextTime) {
+      IncomingPacket();
+      if (clock == 0) {
+        clock = _nextTime;
+      }
+    }
+    if ((clock % 10) == 0) {
+      if (!PlayoutData()) {
+        clock++;
+        continue;
+      }
+    }
+    if (_rtpStream->EndOfFile()) {
+      counter500Ms--;
+    }
+    clock++;
+  }
+}
+
+EncodeDecodeTest::EncodeDecodeTest() {
+  _testMode = 2;
+  Trace::CreateTrace();
+  Trace::SetTraceFile((webrtc::test::OutputPath() +
+      "acm_encdec_trace.txt").c_str());
+}
+
+EncodeDecodeTest::EncodeDecodeTest(int testMode) {
+  //testMode == 0 for autotest
+  //testMode == 1 for testing all codecs/parameters
+  //testMode > 1 for specific user-input test (as it was used before)
+ _testMode = testMode;
+ if(_testMode != 0) {
+   Trace::CreateTrace();
+   Trace::SetTraceFile((webrtc::test::OutputPath() +
+       "acm_encdec_trace.txt").c_str());
+ }
+}
+
+void EncodeDecodeTest::Perform() {
+  if (_testMode == 0) {
+    printf("Running Encode/Decode Test");
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+                 "---------- EncodeDecodeTest ----------");
+  }
+
+  int numCodecs = 1;
+  int codePars[3]; // Frequency, packet size, rate.
+  int numPars[52]; // Number of codec parameters sets (freq, pacsize, rate)
+                   // to test, for a given codec.
+
+  codePars[0] = 0;
+  codePars[1] = 0;
+  codePars[2] = 0;
+
+  AudioCodingModule* acm = AudioCodingModule::Create(0);
+  struct CodecInst sendCodecTmp;
+  numCodecs = acm->NumberOfCodecs();
+
+  if (_testMode == 1) {
+    printf("List of supported codec.\n");
+  }
+  if (_testMode != 2) {
+    for (int n = 0; n < numCodecs; n++) {
+      acm->Codec(n, sendCodecTmp);
+      if (STR_CASE_CMP(sendCodecTmp.plname, "telephone-event") == 0) {
+        numPars[n] = 0;
+      } else if (STR_CASE_CMP(sendCodecTmp.plname, "cn") == 0) {
+        numPars[n] = 0;
+      } else if (STR_CASE_CMP(sendCodecTmp.plname, "red") == 0) {
+        numPars[n] = 0;
+      } else if (sendCodecTmp.channels == 2) {
+        numPars[n] = 0;
+      } else {
+        numPars[n] = 1;
+        if (_testMode == 1) {
+          printf("%d %s\n", n, sendCodecTmp.plname);
+        }
+      }
+    }
+  } else {
+    numCodecs = 1;
+    numPars[0] = 1;
+  }
+
+  _receiver.testMode = _testMode;
+
+  // Loop over all mono codecs:
+  for (int codeId = 0; codeId < numCodecs; codeId++) {
+    // Only encode using real mono encoders, not telephone-event and cng.
+    for (int loopPars = 1; loopPars <= numPars[codeId]; loopPars++) {
+      if (_testMode == 1) {
+        printf("\n");
+        printf("***FOR RUN: codeId: %d\n", codeId);
+        printf("\n");
+      } else if (_testMode == 0) {
+        printf(".");
+      }
+
+      EncodeToFile(1, codeId, codePars, _testMode);
+
+      RTPFile rtpFile;
+      std::string fileName = webrtc::test::OutputPath() + "outFile.rtp";
+      rtpFile.Open(fileName.c_str(), "rb");
+
+      _receiver.codeId = codeId;
+
+      rtpFile.ReadHeader();
+      _receiver.Setup(acm, &rtpFile);
+      _receiver.Run();
+      _receiver.Teardown();
+      rtpFile.Close();
+
+      if (_testMode == 1) {
+        printf("***COMPLETED RUN FOR: codecID: %d ***\n", codeId);
+      }
+    }
+  }
+  AudioCodingModule::Destroy(acm);
+  if (_testMode == 0) {
+    printf("Done!\n");
+  }
+  if (_testMode == 1)
+    Trace::ReturnTrace();
+}
+
+void EncodeDecodeTest::EncodeToFile(int fileType, int codeId, int* codePars,
+                                    int testMode) {
+  AudioCodingModule* acm = AudioCodingModule::Create(1);
+  RTPFile rtpFile;
+  std::string fileName = webrtc::test::OutputPath() + "outFile.rtp";
+  rtpFile.Open(fileName.c_str(), "wb+");
+  rtpFile.WriteHeader();
+
+  //for auto_test and logging
+  _sender.testMode = testMode;
+  _sender.codeId = codeId;
+
+  _sender.Setup(acm, &rtpFile);
+  struct CodecInst sendCodecInst;
+  if (acm->SendCodec(sendCodecInst) >= 0) {
+    _sender.Run();
+  }
+  _sender.Teardown();
+  rtpFile.Close();
+  AudioCodingModule::Destroy(acm);
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/EncodeDecodeTest.h b/src/modules/audio_coding/main/test/EncodeDecodeTest.h
new file mode 100644
index 0000000..f407a6b
--- /dev/null
+++ b/src/modules/audio_coding/main/test/EncodeDecodeTest.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_ENCODEDECODETEST_H_
+
+#include <stdio.h>
+
+#include "ACMTest.h"
+#include "audio_coding_module.h"
+#include "RTPFile.h"
+#include "PCMFile.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+#define MAX_INCOMING_PAYLOAD 8096
+
+// TestPacketization callback which writes the encoded payloads to file
+class TestPacketization: public AudioPacketizationCallback {
+ public:
+  TestPacketization(RTPStream *rtpStream, WebRtc_UWord16 frequency);
+  ~TestPacketization();
+  virtual WebRtc_Word32 SendData(const FrameType frameType,
+                                 const WebRtc_UWord8 payloadType,
+                                 const WebRtc_UWord32 timeStamp,
+                                 const WebRtc_UWord8* payloadData,
+                                 const WebRtc_UWord16 payloadSize,
+                                 const RTPFragmentationHeader* fragmentation);
+
+ private:
+  static void MakeRTPheader(WebRtc_UWord8* rtpHeader, WebRtc_UWord8 payloadType,
+                            WebRtc_Word16 seqNo, WebRtc_UWord32 timeStamp,
+                            WebRtc_UWord32 ssrc);
+  RTPStream* _rtpStream;
+  WebRtc_Word32 _frequency;
+  WebRtc_Word16 _seqNo;
+};
+
+class Sender {
+ public:
+  Sender();
+  void Setup(AudioCodingModule *acm, RTPStream *rtpStream);
+  void Teardown();
+  void Run();
+  bool Add10MsData();
+  bool Process();
+
+  //for auto_test and logging
+  WebRtc_UWord8 testMode;
+  WebRtc_UWord8 codeId;
+
+ private:
+  AudioCodingModule* _acm;
+  PCMFile _pcmFile;
+  AudioFrame _audioFrame;
+  TestPacketization* _packetization;
+};
+
+class Receiver {
+ public:
+  Receiver();
+  void Setup(AudioCodingModule *acm, RTPStream *rtpStream);
+  void Teardown();
+  void Run();
+  bool IncomingPacket();
+  bool PlayoutData();
+
+  //for auto_test and logging
+  WebRtc_UWord8 codeId;
+  WebRtc_UWord8 testMode;
+
+ private:
+  AudioCodingModule* _acm;
+  RTPStream* _rtpStream;
+  PCMFile _pcmFile;
+  WebRtc_Word16* _playoutBuffer;
+  WebRtc_UWord16 _playoutLengthSmpls;
+  WebRtc_UWord8 _incomingPayload[MAX_INCOMING_PAYLOAD];
+  WebRtc_UWord16 _payloadSizeBytes;
+  WebRtc_UWord16 _realPayloadSizeBytes;
+  WebRtc_Word32 _frequency;
+  bool _firstTime;
+  WebRtcRTPHeader _rtpInfo;
+  WebRtc_UWord32 _nextTime;
+};
+
+class EncodeDecodeTest: public ACMTest {
+ public:
+  EncodeDecodeTest();
+  EncodeDecodeTest(int testMode);
+  virtual void Perform();
+
+  WebRtc_UWord16 _playoutFreq;
+  WebRtc_UWord8 _testMode;
+
+ private:
+  void EncodeToFile(int fileType, int codeId, int* codePars, int testMode);
+
+ protected:
+  Sender _sender;
+  Receiver _receiver;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/PCMFile.cc b/src/modules/audio_coding/main/test/PCMFile.cc
new file mode 100644
index 0000000..fbe73f5
--- /dev/null
+++ b/src/modules/audio_coding/main/test/PCMFile.cc
@@ -0,0 +1,248 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "PCMFile.h"
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+PCMFile::PCMFile()
+    : pcm_file_(NULL),
+      samples_10ms_(160),
+      frequency_(16000),
+      end_of_file_(false),
+      auto_rewind_(false),
+      rewinded_(false),
+      read_stereo_(false),
+      save_stereo_(false) {
+  timestamp_ = (((WebRtc_UWord32)rand() & 0x0000FFFF) << 16) |
+      ((WebRtc_UWord32)rand() & 0x0000FFFF);
+}
+
+PCMFile::PCMFile(WebRtc_UWord32 timestamp)
+    : pcm_file_(NULL),
+      samples_10ms_(160),
+      frequency_(16000),
+      end_of_file_(false),
+      auto_rewind_(false),
+      rewinded_(false),
+      read_stereo_(false),
+      save_stereo_(false) {
+  timestamp_ = timestamp;
+}
+
+WebRtc_Word16 PCMFile::ChooseFile(std::string* file_name,
+                                  WebRtc_Word16 max_len) {
+  char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
+
+  EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+  tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
+  WebRtc_Word16 n = 0;
+
+  // Removing leading spaces.
+  while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
+      && (n < MAX_FILE_NAME_LENGTH_BYTE)) {
+    n++;
+  }
+  if (n > 0) {
+    memmove(tmp_name, &tmp_name[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+  }
+
+  // Removing trailing spaces.
+  n = (WebRtc_Word16)(strlen(tmp_name) - 1);
+  if (n >= 0) {
+    while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
+      n--;
+    }
+  }
+  if (n >= 0) {
+    tmp_name[n + 1] = '\0';
+  }
+
+  WebRtc_Word16 len = (WebRtc_Word16) strlen(tmp_name);
+  if (len > max_len) {
+    return -1;
+  }
+  if (len > 0) {
+    std::string tmp_string(tmp_name, len + 1);
+    *file_name = tmp_string;
+  }
+  return 0;
+}
+
+WebRtc_Word16 PCMFile::ChooseFile(std::string* file_name,
+                                  WebRtc_Word16 max_len,
+                                  WebRtc_UWord16* frequency_hz) {
+  char tmp_name[MAX_FILE_NAME_LENGTH_BYTE];
+
+  EXPECT_TRUE(fgets(tmp_name, MAX_FILE_NAME_LENGTH_BYTE, stdin) != NULL);
+  tmp_name[MAX_FILE_NAME_LENGTH_BYTE - 1] = '\0';
+  WebRtc_Word16 n = 0;
+
+  // Removing trailing spaces.
+  while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (tmp_name[n] != 0)
+      && (n < MAX_FILE_NAME_LENGTH_BYTE)) {
+    n++;
+  }
+  if (n > 0) {
+    memmove(tmp_name, &tmp_name[n], MAX_FILE_NAME_LENGTH_BYTE - n);
+  }
+
+  // Removing trailing spaces.
+  n = (WebRtc_Word16)(strlen(tmp_name) - 1);
+  if (n >= 0) {
+    while ((isspace(tmp_name[n]) || iscntrl(tmp_name[n])) && (n >= 0)) {
+      n--;
+    }
+  }
+  if (n >= 0) {
+    tmp_name[n + 1] = '\0';
+  }
+
+  WebRtc_Word16 len = (WebRtc_Word16) strlen(tmp_name);
+  if (len > max_len) {
+    return -1;
+  }
+  if (len > 0) {
+    std::string tmp_string(tmp_name, len + 1);
+    *file_name = tmp_string;
+  }
+  printf("Enter the sampling frequency (in Hz) of the above file [%u]: ",
+         *frequency_hz);
+  EXPECT_TRUE(fgets(tmp_name, 10, stdin) != NULL);
+  WebRtc_UWord16 tmp_frequency = (WebRtc_UWord16) atoi(tmp_name);
+  if (tmp_frequency > 0) {
+    *frequency_hz = tmp_frequency;
+  }
+  return 0;
+}
+
+void PCMFile::Open(const std::string& file_name, WebRtc_UWord16 frequency,
+                   const char* mode, bool auto_rewind) {
+  if ((pcm_file_ = fopen(file_name.c_str(), mode)) == NULL) {
+    printf("Cannot open file %s.\n", file_name.c_str());
+    ADD_FAILURE() << "Unable to read file";
+  }
+  frequency_ = frequency;
+  samples_10ms_ = (WebRtc_UWord16)(frequency_ / 100);
+  auto_rewind_ = auto_rewind;
+  end_of_file_ = false;
+  rewinded_ = false;
+}
+
+WebRtc_Word32 PCMFile::SamplingFrequency() const {
+  return frequency_;
+}
+
+WebRtc_UWord16 PCMFile::PayloadLength10Ms() const {
+  return samples_10ms_;
+}
+
+WebRtc_Word32 PCMFile::Read10MsData(AudioFrame& audio_frame) {
+  WebRtc_UWord16 channels = 1;
+  if (read_stereo_) {
+    channels = 2;
+  }
+
+  WebRtc_Word32 payload_size = (WebRtc_Word32) fread(audio_frame.data_,
+                                                    sizeof(WebRtc_UWord16),
+                                                    samples_10ms_ * channels,
+                                                    pcm_file_);
+  if (payload_size < samples_10ms_ * channels) {
+    for (int k = payload_size; k < samples_10ms_ * channels; k++) {
+      audio_frame.data_[k] = 0;
+    }
+    if (auto_rewind_) {
+      rewind(pcm_file_);
+      rewinded_ = true;
+    } else {
+      end_of_file_ = true;
+    }
+  }
+  audio_frame.samples_per_channel_ = samples_10ms_;
+  audio_frame.sample_rate_hz_ = frequency_;
+  audio_frame.num_channels_ = channels;
+  audio_frame.timestamp_ = timestamp_;
+  timestamp_ += samples_10ms_;
+  return samples_10ms_;
+}
+
+void PCMFile::Write10MsData(AudioFrame& audio_frame) {
+  if (audio_frame.num_channels_ == 1) {
+    if (!save_stereo_) {
+      if (fwrite(audio_frame.data_, sizeof(WebRtc_UWord16),
+                 audio_frame.samples_per_channel_, pcm_file_) !=
+          static_cast<size_t>(audio_frame.samples_per_channel_)) {
+        return;
+      }
+    } else {
+      WebRtc_Word16* stereo_audio =
+          new WebRtc_Word16[2 * audio_frame.samples_per_channel_];
+      int k;
+      for (k = 0; k < audio_frame.samples_per_channel_; k++) {
+        stereo_audio[k << 1] = audio_frame.data_[k];
+        stereo_audio[(k << 1) + 1] = audio_frame.data_[k];
+      }
+      if (fwrite(stereo_audio, sizeof(WebRtc_Word16),
+                 2 * audio_frame.samples_per_channel_, pcm_file_) !=
+          static_cast<size_t>(2 * audio_frame.samples_per_channel_)) {
+        return;
+      }
+      delete[] stereo_audio;
+    }
+  } else {
+    if (fwrite(audio_frame.data_, sizeof(WebRtc_Word16),
+               audio_frame.num_channels_ * audio_frame.samples_per_channel_,
+               pcm_file_) != static_cast<size_t>(
+            audio_frame.num_channels_ * audio_frame.samples_per_channel_)) {
+      return;
+    }
+  }
+}
+
+void PCMFile::Write10MsData(WebRtc_Word16* playout_buffer,
+                            WebRtc_UWord16 length_smpls) {
+  if (fwrite(playout_buffer, sizeof(WebRtc_UWord16),
+             length_smpls, pcm_file_) != length_smpls) {
+    return;
+  }
+}
+
+void PCMFile::Close() {
+  fclose(pcm_file_);
+  pcm_file_ = NULL;
+}
+
+void PCMFile::Rewind() {
+  rewind(pcm_file_);
+  end_of_file_ = false;
+}
+
+bool PCMFile::Rewinded() {
+  return rewinded_;
+}
+
+void PCMFile::SaveStereo(bool is_stereo) {
+  save_stereo_ = is_stereo;
+}
+
+void PCMFile::ReadStereo(bool is_stereo) {
+  read_stereo_ = is_stereo;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/PCMFile.h b/src/modules/audio_coding/main/test/PCMFile.h
new file mode 100644
index 0000000..2dbfecd
--- /dev/null
+++ b/src/modules/audio_coding/main/test/PCMFile.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
+
+#include <cstdio>
+#include <cstdlib>
+#include <string>
+
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class PCMFile {
+ public:
+  PCMFile();
+  PCMFile(WebRtc_UWord32 timestamp);
+  ~PCMFile() {
+    if (pcm_file_ != NULL) {
+      fclose(pcm_file_);
+    }
+  }
+
+  void Open(const std::string& filename, WebRtc_UWord16 frequency,
+                  const char* mode, bool auto_rewind = false);
+
+  WebRtc_Word32 Read10MsData(AudioFrame& audio_frame);
+
+  void Write10MsData(WebRtc_Word16 *playout_buffer,
+                     WebRtc_UWord16 length_smpls);
+  void Write10MsData(AudioFrame& audio_frame);
+
+  WebRtc_UWord16 PayloadLength10Ms() const;
+  WebRtc_Word32 SamplingFrequency() const;
+  void Close();
+  bool EndOfFile() const {
+    return end_of_file_;
+  }
+  void Rewind();
+  static WebRtc_Word16 ChooseFile(std::string* file_name,
+                                  WebRtc_Word16 max_len,
+                                  WebRtc_UWord16* frequency_hz);
+  static WebRtc_Word16 ChooseFile(std::string* file_name,
+                                  WebRtc_Word16 max_len);
+  bool Rewinded();
+  void SaveStereo(bool is_stereo = true);
+  void ReadStereo(bool is_stereo = true);
+ private:
+  FILE* pcm_file_;
+  WebRtc_UWord16 samples_10ms_;
+  WebRtc_Word32 frequency_;
+  bool end_of_file_;
+  bool auto_rewind_;
+  bool rewinded_;
+  WebRtc_UWord32 timestamp_;
+  bool read_stereo_;
+  bool save_stereo_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_PCMFILE_H_
diff --git a/src/modules/audio_coding/main/test/RTPFile.cc b/src/modules/audio_coding/main/test/RTPFile.cc
new file mode 100644
index 0000000..37f9d3c
--- /dev/null
+++ b/src/modules/audio_coding/main/test/RTPFile.cc
@@ -0,0 +1,281 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "RTPFile.h"
+
+#include <stdlib.h>
+
+#ifdef WIN32
+#   include <Winsock2.h>
+#else
+#   include <arpa/inet.h>
+#endif
+
+#include "audio_coding_module.h"
+#include "engine_configurations.h"
+#include "gtest/gtest.h" // TODO (tlegrand): Consider removing usage of gtest.
+#include "rw_lock_wrapper.h"
+
+namespace webrtc {
+
+void RTPStream::ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader)
+{
+    rtpInfo->header.payloadType = rtpHeader[1];
+    rtpInfo->header.sequenceNumber = (static_cast<WebRtc_UWord16>(rtpHeader[2])<<8) | rtpHeader[3];
+    rtpInfo->header.timestamp = (static_cast<WebRtc_UWord32>(rtpHeader[4])<<24) |
+                         (static_cast<WebRtc_UWord32>(rtpHeader[5])<<16) |
+                         (static_cast<WebRtc_UWord32>(rtpHeader[6])<<8) |
+                         rtpHeader[7];
+    rtpInfo->header.ssrc = (static_cast<WebRtc_UWord32>(rtpHeader[8])<<24) |
+                    (static_cast<WebRtc_UWord32>(rtpHeader[9])<<16) |
+                    (static_cast<WebRtc_UWord32>(rtpHeader[10])<<8) |
+                    rtpHeader[11];
+}
+
+void RTPStream::MakeRTPheader(WebRtc_UWord8* rtpHeader, 
+                              WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo,
+                              WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc)
+{
+    rtpHeader[0]=(unsigned char)0x80;
+    rtpHeader[1]=(unsigned char)(payloadType & 0xFF);
+    rtpHeader[2]=(unsigned char)((seqNo>>8)&0xFF);
+    rtpHeader[3]=(unsigned char)((seqNo)&0xFF);
+    rtpHeader[4]=(unsigned char)((timeStamp>>24)&0xFF);
+    rtpHeader[5]=(unsigned char)((timeStamp>>16)&0xFF);
+
+    rtpHeader[6]=(unsigned char)((timeStamp>>8)&0xFF); 
+    rtpHeader[7]=(unsigned char)(timeStamp & 0xFF);
+
+    rtpHeader[8]=(unsigned char)((ssrc>>24)&0xFF);
+    rtpHeader[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+    rtpHeader[10]=(unsigned char)((ssrc>>8)&0xFF);
+    rtpHeader[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+
+RTPPacket::RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                    WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+                                    :
+payloadType(payloadType),
+timeStamp(timeStamp),
+seqNo(seqNo),
+payloadSize(payloadSize),
+frequency(frequency)
+{
+    if (payloadSize > 0)
+    {
+        this->payloadData = new WebRtc_UWord8[payloadSize];
+        memcpy(this->payloadData, payloadData, payloadSize);
+    }
+}
+
+RTPPacket::~RTPPacket()
+{
+    delete [] payloadData;
+}
+
+RTPBuffer::RTPBuffer()
+{
+    _queueRWLock = RWLockWrapper::CreateRWLock();
+}
+
+RTPBuffer::~RTPBuffer()
+{
+    delete _queueRWLock;
+}
+
+void
+RTPBuffer::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                    const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+{
+    RTPPacket *packet = new RTPPacket(payloadType, timeStamp, seqNo, payloadData, payloadSize, frequency);
+    _queueRWLock->AcquireLockExclusive();
+    _rtpQueue.push(packet);
+    _queueRWLock->ReleaseLockExclusive();
+}
+
+WebRtc_UWord16
+RTPBuffer::Read(WebRtcRTPHeader* rtpInfo,
+                WebRtc_UWord8* payloadData,
+                WebRtc_UWord16 payloadSize,
+                WebRtc_UWord32* offset)
+{
+    _queueRWLock->AcquireLockShared();
+    RTPPacket *packet = _rtpQueue.front();
+    _rtpQueue.pop();
+    _queueRWLock->ReleaseLockShared();
+    rtpInfo->header.markerBit = 1;
+    rtpInfo->header.payloadType = packet->payloadType;
+    rtpInfo->header.sequenceNumber = packet->seqNo;
+    rtpInfo->header.ssrc = 0;
+    rtpInfo->header.timestamp = packet->timeStamp;
+    if (packet->payloadSize > 0 && payloadSize >= packet->payloadSize)
+    {
+        memcpy(payloadData, packet->payloadData, packet->payloadSize);
+    }
+    else
+    {
+        return 0;
+    }
+    *offset = (packet->timeStamp/(packet->frequency/1000));
+
+    return packet->payloadSize;
+}
+
+bool
+RTPBuffer::EndOfFile() const
+{
+    _queueRWLock->AcquireLockShared();
+    bool eof = _rtpQueue.empty();
+    _queueRWLock->ReleaseLockShared();
+    return eof;
+}
+
+void RTPFile::Open(const char *filename, const char *mode)
+{
+    if ((_rtpFile = fopen(filename, mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", filename);
+        ADD_FAILURE() << "Unable to write file";
+        exit(1);
+    }
+}
+
+void RTPFile::Close()
+{
+    if (_rtpFile != NULL)
+    {
+        fclose(_rtpFile);
+        _rtpFile = NULL;
+    }
+}
+
+
+void RTPFile::WriteHeader()
+{
+    // Write data in a format that NetEQ and RTP Play can parse
+    fprintf(_rtpFile, "#!RTPencode%s\n", "1.0");
+    WebRtc_UWord32 dummy_variable = 0;
+    // should be converted to network endian format, but does not matter when 0
+    if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&dummy_variable, 4, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&dummy_variable, 2, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&dummy_variable, 2, 1, _rtpFile) != 1) {
+      return;
+    }
+    fflush(_rtpFile);
+}
+
+void RTPFile::ReadHeader()
+{
+    WebRtc_UWord32 start_sec, start_usec, source;
+    WebRtc_UWord16 port, padding;
+    char fileHeader[40];
+    EXPECT_TRUE(fgets(fileHeader, 40, _rtpFile) != 0);
+    EXPECT_EQ(1u, fread(&start_sec, 4, 1, _rtpFile));
+    start_sec=ntohl(start_sec);
+    EXPECT_EQ(1u, fread(&start_usec, 4, 1, _rtpFile));
+    start_usec=ntohl(start_usec);
+    EXPECT_EQ(1u, fread(&source, 4, 1, _rtpFile));
+    source=ntohl(source);
+    EXPECT_EQ(1u, fread(&port, 2, 1, _rtpFile));
+    port=ntohs(port);
+    EXPECT_EQ(1u, fread(&padding, 2, 1, _rtpFile));
+    padding=ntohs(padding);
+}
+
+void RTPFile::Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                    const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency)
+{
+    /* write RTP packet to file */
+    WebRtc_UWord8 rtpHeader[12];
+    MakeRTPheader(rtpHeader, payloadType, seqNo, timeStamp, 0);
+    WebRtc_UWord16 lengthBytes = htons(12 + payloadSize + 8);
+    WebRtc_UWord16 plen = htons(12 + payloadSize);
+    WebRtc_UWord32 offsetMs;
+
+    offsetMs = (timeStamp/(frequency/1000));
+    offsetMs = htonl(offsetMs);
+    if (fwrite(&lengthBytes, 2, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&plen, 2, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(&offsetMs, 4, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(rtpHeader, 12, 1, _rtpFile) != 1) {
+      return;
+    }
+    if (fwrite(payloadData, 1, payloadSize, _rtpFile) != payloadSize) {
+      return;
+    }
+}
+
+WebRtc_UWord16 RTPFile::Read(WebRtcRTPHeader* rtpInfo,
+                   WebRtc_UWord8* payloadData,
+                   WebRtc_UWord16 payloadSize,
+                   WebRtc_UWord32* offset)
+{
+    WebRtc_UWord16 lengthBytes;
+    WebRtc_UWord16 plen;
+    WebRtc_UWord8 rtpHeader[12];
+    size_t read_len = fread(&lengthBytes, 2, 1, _rtpFile);
+    /* Check if we have reached end of file. */
+    if ((read_len == 0) && feof(_rtpFile))
+    {
+        _rtpEOF = true;
+        return 0;
+    }
+    EXPECT_EQ(1u, fread(&plen, 2, 1, _rtpFile));
+    EXPECT_EQ(1u, fread(offset, 4, 1, _rtpFile));
+    lengthBytes = ntohs(lengthBytes);
+    plen = ntohs(plen);
+    *offset = ntohl(*offset);
+    EXPECT_GT(plen, 11);
+
+    EXPECT_EQ(1u, fread(rtpHeader, 12, 1, _rtpFile));
+    ParseRTPHeader(rtpInfo, rtpHeader);
+    rtpInfo->type.Audio.isCNG = false;
+    rtpInfo->type.Audio.channel = 1;
+    EXPECT_EQ(lengthBytes, plen + 8);
+
+    if (plen == 0)
+    {
+        return 0;
+    }
+    if (payloadSize < (lengthBytes - 20))
+    {
+      return -1;
+    }
+    if (lengthBytes < 20)
+    {
+      return -1;
+    }
+    lengthBytes -= 20;
+    EXPECT_EQ(lengthBytes, fread(payloadData, 1, lengthBytes, _rtpFile));
+    return lengthBytes;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/RTPFile.h b/src/modules/audio_coding/main/test/RTPFile.h
new file mode 100644
index 0000000..b5f5299
--- /dev/null
+++ b/src/modules/audio_coding/main/test/RTPFile.h
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RTPFILE_H
+#define RTPFILE_H
+
+#include "audio_coding_module.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+#include "rw_lock_wrapper.h"
+#include <stdio.h>
+#include <queue>
+
+namespace webrtc {
+
+class RTPStream
+{
+public:
+    virtual ~RTPStream(){}
+
+    virtual void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency) = 0;
+
+    // Returns the packet's payload size. Zero should be treated as an
+    // end-of-stream (in the case that EndOfFile() is true) or an error.
+    virtual WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_UWord8* payloadData,
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset) = 0;
+    virtual bool EndOfFile() const = 0;
+
+protected:
+    void MakeRTPheader(WebRtc_UWord8* rtpHeader, 
+                                      WebRtc_UWord8 payloadType, WebRtc_Word16 seqNo, 
+                                      WebRtc_UWord32 timeStamp, WebRtc_UWord32 ssrc);
+    void ParseRTPHeader(WebRtcRTPHeader* rtpInfo, const WebRtc_UWord8* rtpHeader);
+};
+
+class RTPPacket
+{
+public:
+    RTPPacket(WebRtc_UWord8 payloadType, WebRtc_UWord32 timeStamp,
+                                     WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    ~RTPPacket();
+    WebRtc_UWord8 payloadType;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_Word16 seqNo;
+    WebRtc_UWord8* payloadData;
+    WebRtc_UWord16 payloadSize;
+    WebRtc_UWord32 frequency;
+};
+
+class RTPBuffer : public RTPStream
+{
+public:
+    RTPBuffer();
+    ~RTPBuffer();
+    void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_UWord8* payloadData,
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset);
+    virtual bool EndOfFile() const;
+private:
+    RWLockWrapper*             _queueRWLock;
+    std::queue<RTPPacket *>   _rtpQueue;
+};
+
+class RTPFile : public RTPStream
+{
+public:
+    ~RTPFile(){}
+    RTPFile() : _rtpFile(NULL),_rtpEOF(false) {}
+    void Open(const char *outFilename, const char *mode);
+    void Close();
+    void WriteHeader();
+    void ReadHeader();
+    void Write(const WebRtc_UWord8 payloadType, const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_Word16 seqNo, const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord16 payloadSize, WebRtc_UWord32 frequency);
+    WebRtc_UWord16 Read(WebRtcRTPHeader* rtpInfo,
+                    WebRtc_UWord8* payloadData,
+                    WebRtc_UWord16 payloadSize,
+                    WebRtc_UWord32* offset);
+    bool EndOfFile() const { return _rtpEOF; }
+private:
+    FILE*   _rtpFile;
+    bool    _rtpEOF;
+};
+
+} // namespace webrtc
+#endif
diff --git a/src/modules/audio_coding/main/test/SpatialAudio.cc b/src/modules/audio_coding/main/test/SpatialAudio.cc
new file mode 100644
index 0000000..923eefe
--- /dev/null
+++ b/src/modules/audio_coding/main/test/SpatialAudio.cc
@@ -0,0 +1,230 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+
+#include <math.h>
+
+#include "common_types.h"
+#include "SpatialAudio.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define NUM_PANN_COEFFS 10
+
+SpatialAudio::SpatialAudio(int testMode)
+{
+    _testMode = testMode;
+}
+
+SpatialAudio::~SpatialAudio()
+{
+    AudioCodingModule::Destroy(_acmLeft);
+    AudioCodingModule::Destroy(_acmRight);
+    AudioCodingModule::Destroy(_acmReceiver);
+    delete _channel;
+    _inFile.Close();
+    _outFile.Close();
+}
+
+WebRtc_Word16 
+SpatialAudio::Setup()
+{
+    // Create ACMs and the Channel;
+    _acmLeft = AudioCodingModule::Create(1);
+    _acmRight = AudioCodingModule::Create(2);
+    _acmReceiver = AudioCodingModule::Create(3);
+    _channel = new Channel;
+
+    // Register callback for the sender side.
+    CHECK_ERROR(_acmLeft->RegisterTransportCallback(_channel));
+    CHECK_ERROR(_acmRight->RegisterTransportCallback(_channel));
+    // Register the receiver ACM in channel
+    _channel->RegisterReceiverACM(_acmReceiver);
+
+    WebRtc_UWord16 sampFreqHz = 32000;
+
+    const std::string file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    _inFile.Open(file_name, sampFreqHz, "rb", false);
+
+    std::string output_file = webrtc::test::OutputPath() +
+        "out_spatial_autotest.pcm";
+    if(_testMode == 1)
+    {
+        output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
+        printf("\n");
+        printf("Enter the output file [%s]: ", output_file.c_str());
+        PCMFile::ChooseFile(&output_file, MAX_FILE_NAME_LENGTH_BYTE,
+                            &sampFreqHz);
+    }
+    else
+    {
+        output_file = webrtc::test::OutputPath() + "testspatial_out.pcm";
+    }
+    _outFile.Open(output_file, sampFreqHz, "wb", false);
+    _outFile.SaveStereo(true);
+
+    // Register all available codes as receiving codecs.
+    CodecInst codecInst;
+    int status;
+    WebRtc_UWord8 num_encoders = _acmReceiver->NumberOfCodecs();
+    // Register all available codes as receiving codecs once more.
+    for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+      status = _acmReceiver->Codec(n, codecInst);
+      if (status < 0) {
+        printf("Error in Codec(), no matching codec found");
+      }
+      status = _acmReceiver->RegisterReceiveCodec(codecInst);
+      if (status < 0) {
+        printf("Error in RegisterReceiveCodec() for payload type %d",
+               codecInst.pltype);
+      }
+    }
+
+    return 0;
+}
+
+void
+SpatialAudio::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running SpatialAudio Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+                     "---------- SpatialAudio ----------");
+    }
+
+    Setup();
+
+    CodecInst codecInst;
+    _acmLeft->Codec((WebRtc_UWord8)1, codecInst);
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    EncodeDecode();
+
+    WebRtc_Word16 pannCntr = 0;
+
+    double leftPanning[NUM_PANN_COEFFS] =  
+        {1.00, 0.95, 0.90, 0.85, 0.80, 0.75, 0.70, 0.60, 0.55, 0.50};
+    double rightPanning[NUM_PANN_COEFFS] = 
+        {0.50, 0.55, 0.60, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95, 1.00};
+
+    while((pannCntr + 1) < NUM_PANN_COEFFS)
+    {
+        _acmLeft->Codec((WebRtc_UWord8)0, codecInst);    
+        codecInst.pacsize = 480;
+        CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+        CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr++;
+
+        // Change codec    
+        _acmLeft->Codec((WebRtc_UWord8)3, codecInst);    
+        codecInst.pacsize = 320;
+        CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+        CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr++;
+        if(_testMode == 0)
+        {
+            printf(".");
+        }
+    }
+
+    _acmLeft->Codec((WebRtc_UWord8)4, codecInst);
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    EncodeDecode();
+
+    _acmLeft->Codec((WebRtc_UWord8)0, codecInst);    
+    codecInst.pacsize = 480;
+    CHECK_ERROR(_acmLeft->RegisterSendCodec(codecInst));
+    CHECK_ERROR(_acmRight->RegisterSendCodec(codecInst));
+    pannCntr = NUM_PANN_COEFFS -1;
+    while(pannCntr >= 0)
+    {
+        EncodeDecode(leftPanning[pannCntr], rightPanning[pannCntr]);
+        pannCntr--;
+        if(_testMode == 0)
+        {
+            printf(".");
+        }
+    }
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+}
+
+void 
+SpatialAudio::EncodeDecode(
+    const double leftPanning, 
+    const double rightPanning)
+{
+    AudioFrame audioFrame;
+    WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+
+    const double rightToLeftRatio = rightPanning / leftPanning;
+
+    _channel->SetIsStereo(true);
+
+    while(!_inFile.EndOfFile())
+    {
+        _inFile.Read10MsData(audioFrame);
+        for(int n = 0; n < audioFrame.samples_per_channel_; n++)
+        {
+            audioFrame.data_[n] = (WebRtc_Word16)floor(
+                audioFrame.data_[n] * leftPanning + 0.5);
+        }
+        CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+        for(int n = 0; n < audioFrame.samples_per_channel_; n++)
+        {
+            audioFrame.data_[n] = (WebRtc_Word16)floor(
+                audioFrame.data_[n] * rightToLeftRatio + 0.5);
+        }
+        CHECK_ERROR(_acmRight->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmLeft->Process());
+        CHECK_ERROR(_acmRight->Process());
+
+        CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, audioFrame));
+        _outFile.Write10MsData(audioFrame);
+    }
+    _inFile.Rewind();
+}
+
+void 
+SpatialAudio::EncodeDecode()
+{
+    AudioFrame audioFrame;
+    WebRtc_Word32 outFileSampFreq = _outFile.SamplingFrequency();
+
+    _channel->SetIsStereo(false);
+
+    while(!_inFile.EndOfFile())
+    {
+        _inFile.Read10MsData(audioFrame);
+        CHECK_ERROR(_acmLeft->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmLeft->Process());
+
+        CHECK_ERROR(_acmReceiver->PlayoutData10Ms(outFileSampFreq, audioFrame));
+        _outFile.Write10MsData(audioFrame);
+    }
+    _inFile.Rewind();
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/SpatialAudio.h b/src/modules/audio_coding/main/test/SpatialAudio.h
new file mode 100644
index 0000000..6a88327
--- /dev/null
+++ b/src/modules/audio_coding/main/test/SpatialAudio.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_TEST_SPATIAL_AUDIO_H
+#define ACM_TEST_SPATIAL_AUDIO_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+namespace webrtc {
+
+class SpatialAudio : public ACMTest
+{
+public:
+    SpatialAudio(int testMode);
+    ~SpatialAudio();
+
+    void Perform();
+private:
+    WebRtc_Word16 Setup();
+    void EncodeDecode(double leftPanning, double rightPanning);
+    void EncodeDecode();
+
+    AudioCodingModule* _acmLeft;
+    AudioCodingModule* _acmRight;
+    AudioCodingModule* _acmReceiver;
+    Channel*               _channel;
+    PCMFile                _inFile;
+    PCMFile                _outFile;
+    int                    _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/TestAllCodecs.cc b/src/modules/audio_coding/main/test/TestAllCodecs.cc
new file mode 100644
index 0000000..b312390
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestAllCodecs.cc
@@ -0,0 +1,809 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestAllCodecs.h"
+
+#include <stdio.h>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#include "audio_coding_module.h"
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "typedefs.h"
+#include "utility.h"
+
+// Description of the test:
+// In this test we set up a one-way communication channel from a participant
+// called "a" to a participant called "b".
+// a -> channel_a_to_b -> b
+//
+// The test loops through all available mono codecs, encode at "a" sends over
+// the channel, and decodes at "b".
+
+namespace webrtc {
+
+// Class for simulating packet handling.
+TestPack::TestPack()
+    : receiver_acm_(NULL),
+      sequence_number_(0),
+      timestamp_diff_(0),
+      last_in_timestamp_(0),
+      total_bytes_(0),
+      payload_size_(0) {
+}
+
+TestPack::~TestPack() {
+}
+
+void TestPack::RegisterReceiverACM(AudioCodingModule* acm) {
+  receiver_acm_ = acm;
+  return;
+}
+
+int32_t TestPack::SendData(FrameType frame_type, uint8_t payload_type,
+                           uint32_t timestamp, const uint8_t* payload_data,
+                           uint16_t payload_size,
+                           const RTPFragmentationHeader* fragmentation) {
+  WebRtcRTPHeader rtp_info;
+  int32_t status;
+
+  rtp_info.header.markerBit = false;
+  rtp_info.header.ssrc = 0;
+  rtp_info.header.sequenceNumber = sequence_number_++;
+  rtp_info.header.payloadType = payload_type;
+  rtp_info.header.timestamp = timestamp;
+  if (frame_type == kAudioFrameCN) {
+    rtp_info.type.Audio.isCNG = true;
+  } else {
+    rtp_info.type.Audio.isCNG = false;
+  }
+  if (frame_type == kFrameEmpty) {
+    // Skip this frame.
+    return 0;
+  }
+
+  // Only run mono for all test cases.
+  rtp_info.type.Audio.channel = 1;
+  memcpy(payload_data_, payload_data, payload_size);
+
+  status =  receiver_acm_->IncomingPacket(payload_data_, payload_size,
+                                          rtp_info);
+
+  payload_size_ = payload_size;
+  timestamp_diff_ = timestamp - last_in_timestamp_;
+  last_in_timestamp_ = timestamp;
+  total_bytes_ += payload_size;
+  return status;
+}
+
+uint16_t TestPack::payload_size() {
+  return payload_size_;
+}
+
+uint32_t TestPack::timestamp_diff() {
+  return timestamp_diff_;
+}
+
+void TestPack::reset_payload_size() {
+  payload_size_ = 0;
+}
+
+TestAllCodecs::TestAllCodecs(int test_mode)
+    : acm_a_(NULL),
+      acm_b_(NULL),
+      channel_a_to_b_(NULL),
+      test_count_(0),
+      packet_size_samples_(0),
+      packet_size_bytes_(0) {
+  // test_mode = 0 for silent test (auto test)
+  test_mode_ = test_mode;
+}
+
+TestAllCodecs::~TestAllCodecs() {
+  if (acm_a_ != NULL) {
+    AudioCodingModule::Destroy(acm_a_);
+    acm_a_ = NULL;
+  }
+  if (acm_b_ != NULL) {
+    AudioCodingModule::Destroy(acm_b_);
+    acm_b_ = NULL;
+  }
+  if (channel_a_to_b_ != NULL) {
+    delete channel_a_to_b_;
+    channel_a_to_b_ = NULL;
+  }
+}
+
+void TestAllCodecs::Perform() {
+
+  const std::string file_name =
+      webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+  infile_a_.Open(file_name, 32000, "rb");
+
+  if (test_mode_ == 0) {
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                 "---------- TestAllCodecs ----------");
+  }
+
+  acm_a_ = AudioCodingModule::Create(0);
+  acm_b_ = AudioCodingModule::Create(1);
+
+  acm_a_->InitializeReceiver();
+  acm_b_->InitializeReceiver();
+
+  uint8_t num_encoders = acm_a_->NumberOfCodecs();
+  CodecInst my_codec_param;
+  for (uint8_t n = 0; n < num_encoders; n++) {
+    acm_b_->Codec(n, my_codec_param);
+    acm_b_->RegisterReceiveCodec(my_codec_param);
+  }
+
+  // Create and connect the channel
+  channel_a_to_b_ = new TestPack;
+  acm_a_->RegisterTransportCallback(channel_a_to_b_);
+  channel_a_to_b_->RegisterReceiverACM(acm_b_);
+
+  // All codecs are tested for all allowed sampling frequencies, rates and
+  // packet sizes.
+#ifdef WEBRTC_CODEC_AMR
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_amr[] = "AMR";
+  RegisterSendCodec('A', codec_amr, 8000, 4750, 160, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 4750, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 4750, 480, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5150, 160, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5150, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5150, 480, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5900, 160, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5900, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 5900, 480, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 6700, 160, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 6700, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 6700, 480, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7400, 160, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7400, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7400, 480, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7950, 160, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7950, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 7950, 480, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 10200, 160, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 10200, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 10200, 480, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 12200, 160, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 12200, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amr, 8000, 12200, 480, 3);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_AMRWB
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  char codec_amrwb[] = "AMR-WB";
+  OpenOutFile(test_count_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 7000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 7000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 7000, 960, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 9000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 9000, 640, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 9000, 960, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 12000, 320, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 12000, 640, 6);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 12000, 960, 8);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 14000, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 14000, 640, 4);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 14000, 960, 5);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 16000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 16000, 640, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 16000, 960, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 18000, 320, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 18000, 640, 4);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 18000, 960, 5);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 20000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 20000, 640, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 20000, 960, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 23000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 23000, 640, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 23000, 960, 3);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 24000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 24000, 640, 2);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_amrwb, 16000, 24000, 960, 2);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_g722[] = "G722";
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 480, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 800, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 960, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722_1
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_g722_1[] = "G7221";
+  RegisterSendCodec('A', codec_g722_1, 16000, 32000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722_1, 16000, 24000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722_1, 16000, 16000, 320, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_g722_1c[] = "G7221";
+  RegisterSendCodec('A', codec_g722_1c, 32000, 48000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722_1c, 32000, 32000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g722_1c, 32000, 24000, 640, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_G729
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_g729[] = "G729";
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 80, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 240, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 400, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729, 8000, 8000, 480, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_g729_1[] = "G7291";
+  RegisterSendCodec('A', codec_g729_1, 16000, 8000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 8000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 8000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 12000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 12000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 12000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 14000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 14000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 14000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 16000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 16000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 16000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 18000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 18000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 18000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 20000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 20000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 20000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 22000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 22000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 22000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 24000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 24000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 24000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 26000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 26000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 26000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 28000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 28000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 28000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 30000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 30000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 30000, 960, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 32000, 320, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 32000, 640, 1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_g729_1, 16000, 32000, 960, 1);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_GSMFR
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_gsmfr[] = "GSM";
+  RegisterSendCodec('A', codec_gsmfr, 8000, 13200, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_gsmfr, 8000, 13200, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_gsmfr, 8000, 13200, 480, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_ilbc[] = "ILBC";
+  RegisterSendCodec('A', codec_ilbc, 8000, 13300, 240, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_ilbc, 8000, 13300, 480, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_ilbc, 8000, 15200, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_ilbc, 8000, 15200, 320, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#if (defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_isac[] = "ISAC";
+  RegisterSendCodec('A', codec_isac, 16000, -1, 480, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 16000, -1, 960, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 16000, 15000, 480, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 16000, 32000, 960, -1);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_ISAC
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  RegisterSendCodec('A', codec_isac, 32000, -1, 960, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 32000, 56000, 960, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 32000, 37000, 960, -1);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_isac, 32000, 32000, 960, -1);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_l16[] = "L16";
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 80, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 240, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 320, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 480, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 640, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  RegisterSendCodec('A', codec_l16, 32000, 512000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_l16, 32000, 512000, 640, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_pcma[] = "PCMA";
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, 0);
+  Run(channel_a_to_b_);
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  char codec_pcmu[] = "PCMU";
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#ifdef WEBRTC_CODEC_SPEEX
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_speex[] = "SPEEX";
+  RegisterSendCodec('A', codec_speex, 8000, 2400, 160, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_speex, 8000, 8000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_speex, 8000, 18200, 480, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  RegisterSendCodec('A', codec_speex, 16000, 4000, 320, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_speex, 16000, 12800, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_speex, 16000, 34200, 960, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+  }
+  test_count_++;
+  OpenOutFile(test_count_);
+  char codec_celt[] = "CELT";
+  RegisterSendCodec('A', codec_celt, 32000, 48000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_celt, 32000, 64000, 640, 0);
+  Run(channel_a_to_b_);
+  RegisterSendCodec('A', codec_celt, 32000, 128000, 640, 0);
+  Run(channel_a_to_b_);
+  outfile_b_.Close();
+#endif
+  if (test_mode_ != 0) {
+    printf("===============================================================\n");
+
+    /* Print out all codecs that were not tested in the run */
+    printf("The following codecs was not included in the test:\n");
+#ifndef WEBRTC_CODEC_AMR
+    printf("   GSMAMR\n");
+#endif
+#ifndef WEBRTC_CODEC_AMRWB
+    printf("   GSMAMR-wb\n");
+#endif
+#ifndef WEBRTC_CODEC_G722
+    printf("   G.722\n");
+#endif
+#ifndef WEBRTC_CODEC_G722_1
+    printf("   G.722.1\n");
+#endif
+#ifndef WEBRTC_CODEC_G722_1C
+    printf("   G.722.1C\n");
+#endif
+#ifndef WEBRTC_CODEC_G729
+    printf("   G.729\n");
+#endif
+#ifndef WEBRTC_CODEC_G729_1
+    printf("   G.729.1\n");
+#endif
+#ifndef WEBRTC_CODEC_GSMFR
+    printf("   GSMFR\n");
+#endif
+#ifndef WEBRTC_CODEC_ILBC
+    printf("   iLBC\n");
+#endif
+#ifndef WEBRTC_CODEC_ISAC
+    printf("   ISAC float\n");
+#endif
+#ifndef WEBRTC_CODEC_ISACFX
+    printf("   ISAC fix\n");
+#endif
+#ifndef WEBRTC_CODEC_PCM16
+    printf("   PCM16\n");
+#endif
+#ifndef WEBRTC_CODEC_SPEEX
+    printf("   Speex\n");
+#endif
+
+    printf("\nTo complete the test, listen to the %d number of output files.\n",
+           test_count_);
+  }
+}
+
+// Register Codec to use in the test
+//
+// Input:  side             - which ACM to use, 'A' or 'B'
+//         codec_name       - name to use when register the codec
+//         sampling_freq_hz - sampling frequency in Herz
+//         rate             - bitrate in bytes
+//         packet_size      - packet size in samples
+//         extra_byte       - if extra bytes needed compared to the bitrate
+//                            used when registering, can be an internal header
+//                            set to -1 if the codec is a variable rate codec
+void TestAllCodecs::RegisterSendCodec(char side, char* codec_name,
+                                      int32_t sampling_freq_hz, int rate,
+                                      int packet_size, int extra_byte) {
+  if (test_mode_ != 0) {
+    // Print out codec and settings.
+    printf("codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
+           sampling_freq_hz, rate, packet_size);
+  }
+
+  // Store packet-size in samples, used to validate the received packet.
+  // If G.722, store half the size to compensate for the timestamp bug in the
+  // RFC for G.722.
+  // If iSAC runs in adaptive mode, packet size in samples can change on the
+  // fly, so we exclude this test by setting |packet_size_samples_| to -1.
+  if (!strcmp(codec_name, "G722")) {
+    packet_size_samples_ = packet_size / 2;
+  } else if (!strcmp(codec_name, "ISAC") && (rate == -1)) {
+    packet_size_samples_ = -1;
+  } else {
+    packet_size_samples_ = packet_size;
+  }
+
+  // Store the expected packet size in bytes, used to validate the received
+  // packet. If variable rate codec (extra_byte == -1), set to -1 (65535).
+  if (extra_byte != -1) {
+    // Add 0.875 to always round up to a whole byte
+    packet_size_bytes_ =
+        static_cast<uint16_t>(static_cast<float>(packet_size * rate) /
+        static_cast<float>(sampling_freq_hz * 8) + 0.875) + extra_byte;
+  } else {
+    // Packets will have a variable size.
+    packet_size_bytes_ = -1;
+  }
+
+  // Set pointer to the ACM where to register the codec.
+  AudioCodingModule* my_acm = NULL;
+  switch (side) {
+    case 'A': {
+      my_acm = acm_a_;
+      break;
+    }
+    case 'B': {
+      my_acm = acm_b_;
+      break;
+    }
+    default: {
+      break;
+    }
+  }
+  ASSERT_TRUE(my_acm != NULL);
+
+  // Get all codec parameters before registering
+  CodecInst my_codec_param;
+  CHECK_ERROR(AudioCodingModule::Codec(codec_name, my_codec_param,
+                                       sampling_freq_hz, 1));
+  my_codec_param.rate = rate;
+  my_codec_param.pacsize = packet_size;
+  CHECK_ERROR(my_acm->RegisterSendCodec(my_codec_param));
+}
+
+void TestAllCodecs::Run(TestPack* channel) {
+  AudioFrame audio_frame;
+
+  int32_t out_freq_hz = outfile_b_.SamplingFrequency();
+  uint16_t receive_size;
+  uint32_t timestamp_diff;
+  channel->reset_payload_size();
+  int error_count = 0;
+
+  int counter = 0;
+  while (!infile_a_.EndOfFile()) {
+    // Add 10 msec to ACM.
+    infile_a_.Read10MsData(audio_frame);
+    CHECK_ERROR(acm_a_->Add10MsData(audio_frame));
+
+    // Run sender side of ACM.
+    CHECK_ERROR(acm_a_->Process());
+
+    // Verify that the received packet size matches the settings.
+    receive_size = channel->payload_size();
+    if (receive_size) {
+      if ((receive_size != packet_size_bytes_) &&
+          (packet_size_bytes_ < 65535)) {
+        error_count++;
+      }
+
+      // Verify that the timestamp is updated with expected length. The counter
+      // is used to avoid problems when switching codec or frame size in the
+      // test.
+      timestamp_diff = channel->timestamp_diff();
+      if ((counter > 10) && (timestamp_diff != packet_size_samples_) &&
+          (packet_size_samples_ < 65535))
+        error_count++;
+    }
+
+    // Run received side of ACM.
+    CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz, audio_frame));
+
+    // Write output speech to file.
+    outfile_b_.Write10MsData(audio_frame.data_,
+                             audio_frame.samples_per_channel_);
+
+    // Update loop counter
+    counter++;
+  }
+
+  EXPECT_EQ(0, error_count);
+
+  if (infile_a_.EndOfFile()) {
+    infile_a_.Rewind();
+  }
+}
+
+void TestAllCodecs::OpenOutFile(int test_number) {
+  std::string filename = webrtc::test::OutputPath();
+  std::ostringstream test_number_str;
+  test_number_str << test_number;
+  filename += "testallcodecs_out_";
+  filename += test_number_str.str();
+  filename += ".pcm";
+  outfile_b_.Open(filename, 32000, "wb");
+}
+
+void TestAllCodecs::DisplaySendReceiveCodec() {
+  CodecInst my_codec_param;
+  acm_a_->SendCodec(my_codec_param);
+  printf("%s -> ", my_codec_param.plname);
+  acm_b_->ReceiveCodec(my_codec_param);
+  printf("%s\n", my_codec_param.plname);
+}
+
+}  // namespace webrtc
+
diff --git a/src/modules/audio_coding/main/test/TestAllCodecs.h b/src/modules/audio_coding/main/test/TestAllCodecs.h
new file mode 100644
index 0000000..ef91913
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestAllCodecs.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class TestPack : public AudioPacketizationCallback {
+ public:
+  TestPack();
+  ~TestPack();
+
+  void RegisterReceiverACM(AudioCodingModule* acm);
+
+  int32_t SendData(FrameType frame_type, uint8_t payload_type,
+                   uint32_t timestamp, const uint8_t* payload_data,
+                   uint16_t payload_size,
+                   const RTPFragmentationHeader* fragmentation);
+
+  uint16_t payload_size();
+  uint32_t timestamp_diff();
+  void reset_payload_size();
+
+ private:
+  AudioCodingModule*  receiver_acm_;
+  uint16_t sequence_number_;
+  uint8_t payload_data_[60 * 32 * 2 * 2];
+  uint32_t timestamp_diff_;
+  uint32_t last_in_timestamp_;
+  uint64_t total_bytes_;
+  uint16_t payload_size_;
+};
+
+class TestAllCodecs : public ACMTest {
+ public:
+  TestAllCodecs(int test_mode);
+  ~TestAllCodecs();
+
+  void Perform();
+
+ private:
+  // The default value of '-1' indicates that the registration is based only on
+  // codec name, and a sampling frequency matching is not required.
+  // This is useful for codecs which support several sampling frequency.
+  // Note! Only mono mode is tested in this test.
+  void RegisterSendCodec(char side, char* codec_name, int32_t sampling_freq_hz,
+                         int rate, int packet_size, int extra_byte);
+
+  void Run(TestPack* channel);
+  void OpenOutFile(int test_number);
+  void DisplaySendReceiveCodec();
+
+  int test_mode_;
+  AudioCodingModule* acm_a_;
+  AudioCodingModule* acm_b_;
+  TestPack* channel_a_to_b_;
+  PCMFile infile_a_;
+  PCMFile outfile_b_;
+  int test_count_;
+  uint16_t packet_size_samples_;
+  uint16_t packet_size_bytes_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_ALL_CODECS_H_
diff --git a/src/modules/audio_coding/main/test/TestFEC.cc b/src/modules/audio_coding/main/test/TestFEC.cc
new file mode 100644
index 0000000..bdbd97a
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestFEC.cc
@@ -0,0 +1,625 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestFEC.h"
+
+#include <cassert>
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestFEC::TestFEC(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testCntr(0)
+{
+    _testMode = testMode;
+}
+
+TestFEC::~TestFEC()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestFEC::Perform()
+{
+
+    if(_testMode == 0)
+    {
+        printf("Running FEC Test");
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                     "---------- TestFEC ----------");
+    }
+    const std::string file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    _inFileA.Open(file_name, 32000, "rb");
+
+    bool fecEnabled;
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+    if(_testMode != 0)
+    {
+        printf("Registering codecs at receiver... \n");
+    }
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        if(_testMode != 0)
+        {
+            printf("%s\n", myCodecParam.plname);
+        }
+        _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+
+    // Create and connect the channel
+    _channelA2B = new Channel;    
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+#ifndef WEBRTC_CODEC_G722
+    printf("G722 needs to be activated to run this test\n");
+    exit(-1);
+#endif
+    char nameG722[] = "G722";
+    RegisterSendCodec('A', nameG722, 16000);
+    char nameCN[] = "CN";
+    RegisterSendCodec('A', nameCN, 16000);
+    char nameRED[] = "RED";
+    RegisterSendCodec('A', nameRED);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    char nameISAC[] = "iSAC";
+    RegisterSendCodec('A',nameISAC, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(false, false, VADNormal);
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+
+
+
+
+    _channelA2B->SetFECTestWithPacketLoss(true);
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+
+    RegisterSendCodec('A',nameG722);
+    RegisterSendCodec('A', nameCN, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 16000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(true, true, VADVeryAggr);
+    _acmA->SetFECStatus(false);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    OpenOutFile(_testCntr);
+    Run();
+    _outFileB.Close();
+
+
+
+
+
+
+    if(_testMode != 0)
+    {
+        printf("=======================================================================\n");
+        printf("%d ",_testCntr++);
+    }
+    else
+    {
+        printf(".");
+    }
+    RegisterSendCodec('A', nameISAC, 32000);
+    OpenOutFile(_testCntr);
+    SetVAD(false, false, VADNormal);
+    _acmA->SetFECStatus(true);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 32000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+
+    RegisterSendCodec('A', nameISAC, 16000);
+    fecEnabled = _acmA->FECStatus();
+    if(_testMode != 0)
+    {
+        printf("FEC currently %s\n",(fecEnabled?"ON":"OFF"));
+        DisplaySendReceiveCodec();
+    }
+    Run();
+    _outFileB.Close();
+
+
+
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+}
+
+WebRtc_Word32 TestFEC::SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode)
+{
+    if(_testMode != 0)
+    {
+        printf("DTX %s; VAD %s; VAD-Mode %d\n", 
+            enableDTX? "ON":"OFF", 
+            enableVAD? "ON":"OFF", 
+            (WebRtc_Word16)vadMode);
+    }
+    return _acmA->SetVAD(enableDTX, enableVAD, vadMode);
+}
+
+WebRtc_Word16 TestFEC::RegisterSendCodec(char side, char* codecName, WebRtc_Word32 samplingFreqHz)
+{
+    if(_testMode != 0)
+    {
+        if(samplingFreqHz > 0)
+        {
+            printf("Registering %s-%d for side %c\n", codecName, samplingFreqHz, side);
+        }
+        else
+        {
+            printf("Registering %s for side %c\n", codecName, side);
+        }
+    }
+    std::cout << std::flush;
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    CodecInst myCodecParam;
+
+    CHECK_ERROR(AudioCodingModule::Codec(codecName, myCodecParam,
+                                         samplingFreqHz, 1));
+
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+
+    // initialization was succesful
+    return 0;
+}
+
+void TestFEC::Run()
+{
+    AudioFrame audioFrame;
+
+    WebRtc_UWord16 msecPassed = 0;
+    WebRtc_UWord32 secPassed  = 0;
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    while(!_inFileA.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+        CHECK_ERROR(_acmA->Process());
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+        _outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
+        msecPassed += 10;
+        if(msecPassed >= 1000)
+        {
+            msecPassed = 0;
+            secPassed++;
+        }
+        if(((secPassed%5) == 4) && (msecPassed == 0) && (_testCntr > 14))
+        {
+            printf("%3u:%3u  ", secPassed, msecPassed);
+            _acmA->SetFECStatus(false);
+            printf("FEC currently %s\n",(_acmA->FECStatus()?"ON":"OFF"));
+        }
+        if(((secPassed%5) == 4) && (msecPassed >= 990) && (_testCntr > 14))
+        {
+            printf("%3u:%3u  ", secPassed, msecPassed);
+            _acmA->SetFECStatus(true);
+            printf("FEC currently %s\n",(_acmA->FECStatus()?"ON":"OFF"));
+        }
+    }
+    _inFileA.Rewind();
+}
+
+void TestFEC::OpenOutFile(WebRtc_Word16 test_number) {
+  std::string file_name;
+  std::stringstream file_stream;
+  file_stream << webrtc::test::OutputPath();
+  if (_testMode == 0) {
+    file_stream << "TestFEC_autoFile_";
+  } else {
+    file_stream << "TestFEC_outFile_";
+  }
+  file_stream << test_number << ".pcm";
+  file_name = file_stream.str();
+  _outFileB.Open(file_name, 16000, "wb");
+}
+
+void TestFEC::DisplaySendReceiveCodec()
+{
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    printf("%s -> ", myCodecParam.plname);
+    _acmB->ReceiveCodec(myCodecParam);
+    printf("%s\n", myCodecParam.plname);
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/TestFEC.h b/src/modules/audio_coding/main/test/TestFEC.h
new file mode 100644
index 0000000..00e951f
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestFEC.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_FEC_H
+#define TEST_FEC_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+class TestFEC : public ACMTest
+{
+public:
+    TestFEC(int testMode);
+    ~TestFEC();
+
+    void Perform();
+private:
+    // The default value of '-1' indicates that the registration is based only on codec name
+    // and a sampling frequncy matching is not required. This is useful for codecs which support
+    // several sampling frequency.
+    WebRtc_Word16 RegisterSendCodec(char side, char* codecName, WebRtc_Word32 sampFreqHz = -1);
+    void Run();
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void DisplaySendReceiveCodec();
+    WebRtc_Word32 SetVAD(bool enableDTX, bool enableVAD, ACMVADMode vadMode);
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel*               _channelA2B;
+
+    PCMFile                _inFileA;
+    PCMFile                _outFileB;
+    WebRtc_Word16            _testCntr;
+    int                    _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/TestStereo.cc b/src/modules/audio_coding/main/test/TestStereo.cc
new file mode 100644
index 0000000..9910ead
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestStereo.cc
@@ -0,0 +1,798 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestStereo.h"
+
+#include <cassert>
+#include <iostream>
+
+#include "gtest/gtest.h"
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+// Class for simulating packet handling
+TestPackStereo::TestPackStereo()
+    : receiver_acm_(NULL),
+      seq_no_(0),
+      timestamp_diff_(0),
+      last_in_timestamp_(0),
+      total_bytes_(0),
+      payload_size_(0),
+      codec_mode_(kNotSet),
+      lost_packet_(false) {}
+
+TestPackStereo::~TestPackStereo() {}
+
+void TestPackStereo::RegisterReceiverACM(AudioCodingModule* acm) {
+  receiver_acm_ = acm;
+  return;
+}
+
+WebRtc_Word32 TestPackStereo::SendData(
+    const FrameType frame_type,
+    const WebRtc_UWord8 payload_type,
+    const WebRtc_UWord32 timestamp,
+    const WebRtc_UWord8* payload_data,
+    const WebRtc_UWord16 payload_size,
+    const RTPFragmentationHeader* fragmentation) {
+  WebRtcRTPHeader rtp_info;
+  WebRtc_Word32 status = 0;
+
+  rtp_info.header.markerBit = false;
+  rtp_info.header.ssrc = 0;
+  rtp_info.header.sequenceNumber = seq_no_++;
+  rtp_info.header.payloadType = payload_type;
+  rtp_info.header.timestamp = timestamp;
+  if (frame_type == kFrameEmpty) {
+    // Skip this frame
+    return 0;
+  }
+
+  if (lost_packet_ == false) {
+    if (frame_type != kAudioFrameCN) {
+      rtp_info.type.Audio.isCNG = false;
+      rtp_info.type.Audio.channel = (int) codec_mode_;
+    } else {
+      rtp_info.type.Audio.isCNG = true;
+      rtp_info.type.Audio.channel = (int) kMono;
+    }
+    status = receiver_acm_->IncomingPacket(payload_data, payload_size,
+                                           rtp_info);
+
+    if (frame_type != kAudioFrameCN) {
+      payload_size_ = payload_size;
+    } else {
+      payload_size_ = -1;
+    }
+
+    timestamp_diff_ = timestamp - last_in_timestamp_;
+    last_in_timestamp_ = timestamp;
+    total_bytes_ += payload_size;
+  }
+  return status;
+}
+
+WebRtc_UWord16 TestPackStereo::payload_size() {
+  return payload_size_;
+}
+
+WebRtc_UWord32 TestPackStereo::timestamp_diff() {
+  return timestamp_diff_;
+}
+
+void TestPackStereo::reset_payload_size() {
+  payload_size_ = 0;
+}
+
+void TestPackStereo::set_codec_mode(enum StereoMonoMode mode) {
+  codec_mode_ = mode;
+}
+
+void TestPackStereo::set_lost_packet(bool lost) {
+  lost_packet_ = lost;
+}
+
+TestStereo::TestStereo(int test_mode)
+    : acm_a_(NULL),
+      acm_b_(NULL),
+      channel_a2b_(NULL),
+      test_cntr_(0),
+      pack_size_samp_(0),
+      pack_size_bytes_(0),
+      counter_(0),
+      g722_pltype_(0),
+      l16_8khz_pltype_(-1),
+      l16_16khz_pltype_(-1),
+      l16_32khz_pltype_(-1),
+      pcma_pltype_(-1),
+      pcmu_pltype_(-1),
+      celt_pltype_(-1),
+      cn_8khz_pltype_(-1),
+      cn_16khz_pltype_(-1),
+      cn_32khz_pltype_(-1) {
+  // test_mode = 0 for silent test (auto test)
+  test_mode_ = test_mode;
+}
+
+TestStereo::~TestStereo() {
+  if (acm_a_ != NULL) {
+    AudioCodingModule::Destroy(acm_a_);
+    acm_a_ = NULL;
+  }
+  if (acm_b_ != NULL) {
+    AudioCodingModule::Destroy(acm_b_);
+    acm_b_ = NULL;
+  }
+  if (channel_a2b_ != NULL) {
+    delete channel_a2b_;
+    channel_a2b_ = NULL;
+  }
+}
+
+void TestStereo::Perform() {
+  WebRtc_UWord16 frequency_hz;
+  int audio_channels;
+  int codec_channels;
+  bool dtx;
+  bool vad;
+  ACMVADMode vad_mode;
+
+  if (test_mode_ == 0) {
+    printf("Running Stereo Test");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                 "---------- TestStereo ----------");
+  }
+
+  // Open both mono and stereo test files in 32 kHz.
+  const std::string file_name_stereo =
+      webrtc::test::ResourcePath("audio_coding/teststereo32kHz", "pcm");
+  const std::string file_name_mono =
+      webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+  frequency_hz = 32000;
+  in_file_stereo_ = new PCMFile();
+  in_file_mono_ = new PCMFile();
+  in_file_stereo_->Open(file_name_stereo, frequency_hz, "rb");
+  in_file_stereo_->ReadStereo(true);
+  in_file_mono_->Open(file_name_mono, frequency_hz, "rb");
+  in_file_mono_->ReadStereo(false);
+
+  // Create and initialize two ACMs, one for each side of a one-to-one call.
+  acm_a_ = AudioCodingModule::Create(0);
+  acm_b_ = AudioCodingModule::Create(1);
+  ASSERT_TRUE((acm_a_ != NULL) && (acm_b_ != NULL));
+  EXPECT_EQ(0, acm_a_->InitializeReceiver());
+  EXPECT_EQ(0, acm_b_->InitializeReceiver());
+
+  // Register all available codes as receiving codecs.
+  WebRtc_UWord8 num_encoders = acm_a_->NumberOfCodecs();
+  CodecInst my_codec_param;
+  for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+    EXPECT_EQ(0, acm_b_->Codec(n, my_codec_param));
+    EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
+  }
+
+  // Test that unregister all receive codecs works.
+  for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+    EXPECT_EQ(0, acm_b_->Codec(n, my_codec_param));
+    EXPECT_EQ(0, acm_b_->UnregisterReceiveCodec(my_codec_param.pltype));
+  }
+
+  // Register all available codes as receiving codecs once more.
+  for (WebRtc_UWord8 n = 0; n < num_encoders; n++) {
+    EXPECT_EQ(0, acm_b_->Codec(n, my_codec_param));
+    EXPECT_EQ(0, acm_b_->RegisterReceiveCodec(my_codec_param));
+  }
+
+  // TODO(tlegrand): Take care of return values of all function calls.
+
+  // TODO(tlegrand): Re-register all stereo codecs needed in the test,
+  // with new payload numbers.
+  // g722_pltype_ = 117;
+  // l16_8khz_pltype_ = 120;
+  // l16_16khz_pltype_ = 121;
+  // l16_32khz_pltype_ = 122;
+  // pcma_pltype_ = 110;
+  // pcmu_pltype_ = 118;
+  // celt_pltype_ = 119;
+  // cn_8khz_pltype_ = 123;
+  // cn_16khz_pltype_ = 124;
+  // cn_32khz_pltype_ = 125;
+
+  // Create and connect the channel.
+  channel_a2b_ = new TestPackStereo;
+  EXPECT_EQ(0, acm_a_->RegisterTransportCallback(channel_a2b_));
+  channel_a2b_->RegisterReceiverACM(acm_b_);
+
+  // Start with setting VAD/DTX, before we know we will send stereo.
+  // Continue with setting a stereo codec as send codec and verify that
+  // VAD/DTX gets turned off.
+  EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_TRUE(dtx);
+  EXPECT_TRUE(vad);
+  char codec_pcma_temp[] = "PCMA";
+  RegisterSendCodec('A', codec_pcma_temp, 8000, 64000, 80, 2, pcma_pltype_);
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_FALSE(dtx);
+  EXPECT_FALSE(vad);
+  if(test_mode_ != 0) {
+    printf("\n");
+  }
+
+  //
+  // Test Stereo-To-Stereo for all codecs.
+  //
+  audio_channels = 2;
+  codec_channels = 2;
+
+  // All codecs are tested for all allowed sampling frequencies, rates and
+  // packet sizes.
+#ifdef WEBRTC_CODEC_G722
+  if(test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  channel_a2b_->set_codec_mode(kStereo);
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  char codec_g722[] = "G722";
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 320, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 480, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 640, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 800, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 960, codec_channels,
+      g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if(test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  channel_a2b_->set_codec_mode(kStereo);
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  char codec_l16[] = "L16";
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+      l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 160, codec_channels,
+      l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 240, codec_channels,
+      l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 320, codec_channels,
+      l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+
+  if(test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+      l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 320, codec_channels,
+      l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 480, codec_channels,
+      l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 640, codec_channels,
+      l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+
+  if(test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+      l16_32khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_l16, 32000, 512000, 640, codec_channels,
+      l16_32khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#define PCMA_AND_PCMU
+#ifdef PCMA_AND_PCMU
+  if (test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n", test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  channel_a2b_->set_codec_mode(kStereo);
+  audio_channels = 2;
+  codec_channels = 2;
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  char codec_pcma[] = "PCMA";
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 160, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 240, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 320, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 400, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 480, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+
+  // Test that VAD/DTX cannot be turned on while sending stereo.
+  EXPECT_EQ(-1, acm_a_->SetVAD(true, true, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_FALSE(dtx);
+  EXPECT_FALSE(vad);
+  EXPECT_EQ(-1, acm_a_->SetVAD(true, false, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_FALSE(dtx);
+  EXPECT_FALSE(vad);
+  EXPECT_EQ(-1, acm_a_->SetVAD(false, true, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_FALSE(dtx);
+  EXPECT_FALSE(vad);
+  EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_FALSE(dtx);
+  EXPECT_FALSE(vad);
+
+  out_file_.Close();
+  if (test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n", test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  char codec_pcmu[] = "PCMU";
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 160, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 240, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 320, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 400, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 480, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if(test_mode_ != 0) {
+    printf("===========================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-stereo\n");
+  }
+  channel_a2b_->set_codec_mode(kStereo);
+  audio_channels = 2;
+  codec_channels = 2;
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  char codec_celt[] = "CELT";
+  RegisterSendCodec('A', codec_celt, 32000, 48000, 640, codec_channels,
+      celt_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_celt, 32000, 64000, 640, codec_channels,
+      celt_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_celt, 32000, 128000, 640, codec_channels,
+      celt_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+  //
+  // Test Mono-To-Stereo for all codecs.
+  //
+  audio_channels = 1;
+  codec_channels = 2;
+
+#ifdef WEBRTC_CODEC_G722
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  channel_a2b_->set_codec_mode(kStereo);
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+                    g722_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  channel_a2b_->set_codec_mode(kStereo);
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+                    l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+                    l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+                    l16_32khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef PCMA_AND_PCMU
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  channel_a2b_->set_codec_mode(kStereo);
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Mono-to-stereo\n");
+  }
+  test_cntr_++;
+  channel_a2b_->set_codec_mode(kStereo);
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_celt, 32000, 64000, 640, codec_channels,
+                    celt_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+
+  //
+  // Test Stereo-To-Mono for all codecs.
+  //
+  audio_channels = 2;
+  codec_channels = 1;
+  channel_a2b_->set_codec_mode(kMono);
+
+#ifdef WEBRTC_CODEC_G722
+  // Run stereo audio and mono codec.
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_g722, 16000, 64000, 160, codec_channels,
+                    g722_pltype_);
+
+
+  // Make sure it is possible to set VAD/CNG, now that we are sending mono
+  // again.
+  EXPECT_EQ(0, acm_a_->SetVAD(true, true, VADNormal));
+  EXPECT_EQ(0, acm_a_->VAD(dtx, vad, vad_mode));
+  EXPECT_TRUE(dtx);
+  EXPECT_TRUE(vad);
+  EXPECT_EQ(0, acm_a_->SetVAD(false, false, VADNormal));
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 8000, 128000, 80, codec_channels,
+                    l16_8khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-mono\n");
+   }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_l16, 16000, 256000, 160, codec_channels,
+                    l16_16khz_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+  if(test_mode_ != 0) {
+     printf("==============================================================\n");
+     printf("Test number: %d\n",test_cntr_ + 1);
+     printf("Test type: Stereo-to-mono\n");
+   }
+   test_cntr_++;
+   OpenOutFile(test_cntr_);
+   RegisterSendCodec('A', codec_l16, 32000, 512000, 320, codec_channels,
+                     l16_32khz_pltype_);
+   Run(channel_a2b_, audio_channels, codec_channels);
+   out_file_.Close();
+#endif
+#ifdef PCMA_AND_PCMU
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_pcmu, 8000, 64000, 80, codec_channels,
+                    pcmu_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  RegisterSendCodec('A', codec_pcma, 8000, 64000, 80, codec_channels,
+                    pcma_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+#ifdef WEBRTC_CODEC_CELT
+  if(test_mode_ != 0) {
+    printf("===============================================================\n");
+    printf("Test number: %d\n",test_cntr_ + 1);
+    printf("Test type: Stereo-to-mono\n");
+  }
+  test_cntr_++;
+  OpenOutFile(test_cntr_);
+  RegisterSendCodec('A', codec_celt, 32000, 64000, 640, codec_channels,
+                    celt_pltype_);
+  Run(channel_a2b_, audio_channels, codec_channels);
+  out_file_.Close();
+#endif
+
+  // Print out which codecs were tested, and which were not, in the run.
+  if (test_mode_ != 0) {
+    printf("\nThe following codecs was INCLUDED in the test:\n");
+#ifdef WEBRTC_CODEC_G722
+    printf("   G.722\n");
+#endif
+#ifdef WEBRTC_CODEC_PCM16
+    printf("   PCM16\n");
+#endif
+    printf("   G.711\n");
+#ifdef WEBRTC_CODEC_CELT
+    printf("   CELT\n");
+#endif
+    printf("\nTo complete the test, listen to the %d number of output "
+           "files.\n",
+           test_cntr_);
+  }
+
+  // Delete the file pointers.
+  delete in_file_stereo_;
+  delete in_file_mono_;
+}
+
+// Register Codec to use in the test
+//
+// Input:   side             - which ACM to use, 'A' or 'B'
+//          codec_name       - name to use when register the codec
+//          sampling_freq_hz - sampling frequency in Herz
+//          rate             - bitrate in bytes
+//          pack_size        - packet size in samples
+//          channels         - number of channels; 1 for mono, 2 for stereo
+//          payload_type     - payload type for the codec
+void TestStereo::RegisterSendCodec(char side, char* codec_name,
+                                   WebRtc_Word32 sampling_freq_hz, int rate,
+                                   int pack_size, int channels,
+                                   int payload_type) {
+  if (test_mode_ != 0) {
+    // Print out codec and settings
+    printf("Codec: %s Freq: %d Rate: %d PackSize: %d\n", codec_name,
+           sampling_freq_hz, rate, pack_size);
+  }
+
+  // Store packet size in samples, used to validate the received packet
+  pack_size_samp_ = pack_size;
+
+  // Store the expected packet size in bytes, used to validate the received
+  // packet. Add 0.875 to always round up to a whole byte.
+  // For Celt the packet size in bytes is already counting the stereo part.
+  if (!strcmp(codec_name, "CELT")) {
+    pack_size_bytes_ = (WebRtc_UWord16)(
+        (float) (pack_size * rate) / (float) (sampling_freq_hz * 8) + 0.875)
+        / channels;
+  } else {
+    pack_size_bytes_ = (WebRtc_UWord16)(
+        (float) (pack_size * rate) / (float) (sampling_freq_hz * 8) + 0.875);
+  }
+
+  // Set pointer to the ACM where to register the codec
+  AudioCodingModule* my_acm = NULL;
+  switch (side) {
+    case 'A': {
+      my_acm = acm_a_;
+      break;
+    }
+    case 'B': {
+      my_acm = acm_b_;
+      break;
+    }
+    default:
+      break;
+  }
+  ASSERT_TRUE(my_acm != NULL);
+
+  CodecInst my_codec_param;
+  // Get all codec parameters before registering
+  CHECK_ERROR(AudioCodingModule::Codec(codec_name, my_codec_param,
+                                       sampling_freq_hz, channels));
+  my_codec_param.rate = rate;
+  my_codec_param.pacsize = pack_size;
+  CHECK_ERROR(my_acm->RegisterSendCodec(my_codec_param));
+}
+
+void TestStereo::Run(TestPackStereo* channel, int in_channels, int out_channels,
+                     int percent_loss) {
+  AudioFrame audio_frame;
+
+  WebRtc_Word32 out_freq_hz_b = out_file_.SamplingFrequency();
+  WebRtc_UWord16 rec_size;
+  WebRtc_UWord32 time_stamp_diff;
+  channel->reset_payload_size();
+  int error_count = 0;
+
+  while (1) {
+    // Simulate packet loss by setting |packet_loss_| to "true" in
+    // |percent_loss| percent of the loops.
+    if (percent_loss > 0) {
+      if (counter_ == floor((100 / percent_loss) + 0.5)) {
+        counter_ = 0;
+        channel->set_lost_packet(true);
+      } else {
+        channel->set_lost_packet(false);
+      }
+      counter_++;
+    }
+
+    // Add 10 msec to ACM
+    if (in_channels == 1) {
+      if (in_file_mono_->EndOfFile()) {
+        break;
+      }
+      in_file_mono_->Read10MsData(audio_frame);
+    } else {
+      if (in_file_stereo_->EndOfFile()) {
+        break;
+      }
+      in_file_stereo_->Read10MsData(audio_frame);
+    }
+    CHECK_ERROR(acm_a_->Add10MsData(audio_frame));
+
+    // Run sender side of ACM
+    CHECK_ERROR(acm_a_->Process());
+
+    // Verify that the received packet size matches the settings
+    rec_size = channel->payload_size();
+    if ((0 < rec_size) & (rec_size < 65535)) {
+      if ((rec_size != pack_size_bytes_ * out_channels)
+          && (pack_size_bytes_ < 65535)) {
+        error_count++;
+      }
+
+      // Verify that the timestamp is updated with expected length
+      time_stamp_diff = channel->timestamp_diff();
+      if ((counter_ > 10) && (time_stamp_diff != pack_size_samp_)) {
+        error_count++;
+      }
+    }
+
+    // Run received side of ACM
+    CHECK_ERROR(acm_b_->PlayoutData10Ms(out_freq_hz_b, audio_frame));
+
+    // Write output speech to file
+    out_file_.Write10MsData(
+        audio_frame.data_,
+        audio_frame.samples_per_channel_ * audio_frame.num_channels_);
+  }
+
+  EXPECT_EQ(0, error_count);
+
+  if (in_file_mono_->EndOfFile()) {
+    in_file_mono_->Rewind();
+  }
+  if (in_file_stereo_->EndOfFile()) {
+    in_file_stereo_->Rewind();
+  }
+  // Reset in case we ended with a lost packet
+  channel->set_lost_packet(false);
+}
+
+void TestStereo::OpenOutFile(WebRtc_Word16 test_number) {
+  std::string file_name;
+  std::stringstream file_stream;
+  file_stream << webrtc::test::OutputPath() << "teststereo_out_"
+      << test_number << ".pcm";
+  file_name = file_stream.str();
+  out_file_.Open(file_name, 32000, "wb");
+}
+
+void TestStereo::DisplaySendReceiveCodec() {
+  CodecInst my_codec_param;
+  acm_a_->SendCodec(my_codec_param);
+  if (test_mode_ != 0) {
+    printf("%s -> ", my_codec_param.plname);
+  }
+  acm_b_->ReceiveCodec(my_codec_param);
+  if (test_mode_ != 0) {
+    printf("%s\n", my_codec_param.plname);
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/TestStereo.h b/src/modules/audio_coding/main/test/TestStereo.h
new file mode 100644
index 0000000..3023139
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestStereo.h
@@ -0,0 +1,116 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_
+#define WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_
+
+#include <math.h>
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+enum StereoMonoMode {
+  kNotSet,
+  kMono,
+  kStereo
+};
+
+class TestPackStereo : public AudioPacketizationCallback {
+ public:
+  TestPackStereo();
+  ~TestPackStereo();
+
+  void RegisterReceiverACM(AudioCodingModule* acm);
+
+  virtual WebRtc_Word32 SendData(const FrameType frame_type,
+                                 const WebRtc_UWord8 payload_type,
+                                 const WebRtc_UWord32 timestamp,
+                                 const WebRtc_UWord8* payload_data,
+                                 const WebRtc_UWord16 payload_size,
+                                 const RTPFragmentationHeader* fragmentation);
+
+  WebRtc_UWord16 payload_size();
+  WebRtc_UWord32 timestamp_diff();
+  void reset_payload_size();
+  void set_codec_mode(StereoMonoMode mode);
+  void set_lost_packet(bool lost);
+
+ private:
+  AudioCodingModule* receiver_acm_;
+  WebRtc_Word16 seq_no_;
+  WebRtc_UWord32 timestamp_diff_;
+  WebRtc_UWord32 last_in_timestamp_;
+  WebRtc_UWord64 total_bytes_;
+  WebRtc_UWord16 payload_size_;
+  StereoMonoMode codec_mode_;
+  // Simulate packet losses
+  bool lost_packet_;
+};
+
+class TestStereo : public ACMTest {
+ public:
+  TestStereo(int test_mode);
+  ~TestStereo();
+
+  void Perform();
+ private:
+  // The default value of '-1' indicates that the registration is based only on
+  // codec name and a sampling frequncy matching is not required. This is useful
+  // for codecs which support several sampling frequency.
+  void RegisterSendCodec(char side, char* codec_name,
+                         WebRtc_Word32 samp_freq_hz, int rate, int pack_size,
+                         int channels, int payload_type);
+
+  void Run(TestPackStereo* channel, int in_channels, int out_channels,
+           int percent_loss = 0);
+  void OpenOutFile(WebRtc_Word16 test_number);
+  void DisplaySendReceiveCodec();
+
+  WebRtc_Word32 SendData(const FrameType frame_type,
+                         const WebRtc_UWord8 payload_type,
+                         const WebRtc_UWord32 timestamp,
+                         const WebRtc_UWord8* payload_data,
+                         const WebRtc_UWord16 payload_size,
+                         const RTPFragmentationHeader* fragmentation);
+
+  int test_mode_;
+
+  AudioCodingModule* acm_a_;
+  AudioCodingModule* acm_b_;
+
+  TestPackStereo* channel_a2b_;
+
+  PCMFile* in_file_stereo_;
+  PCMFile* in_file_mono_;
+  PCMFile out_file_;
+  WebRtc_Word16 test_cntr_;
+  WebRtc_UWord16 pack_size_samp_;
+  WebRtc_UWord16 pack_size_bytes_;
+  int counter_;
+
+  // Payload types for stereo codecs and CNG
+  int g722_pltype_;
+  int l16_8khz_pltype_;
+  int l16_16khz_pltype_;
+  int l16_32khz_pltype_;
+  int pcma_pltype_;
+  int pcmu_pltype_;
+  int celt_pltype_;
+  int cn_8khz_pltype_;
+  int cn_16khz_pltype_;
+  int cn_32khz_pltype_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_CODING_MAIN_TEST_TEST_STEREO_H_
diff --git a/src/modules/audio_coding/main/test/TestVADDTX.cc b/src/modules/audio_coding/main/test/TestVADDTX.cc
new file mode 100644
index 0000000..793ab579
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestVADDTX.cc
@@ -0,0 +1,511 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TestVADDTX.h"
+
+#include <iostream>
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "testsupport/fileutils.h"
+#include "trace.h"
+#include "utility.h"
+
+namespace webrtc {
+
+TestVADDTX::TestVADDTX(int testMode):
+_acmA(NULL),
+_acmB(NULL),
+_channelA2B(NULL),
+_testResults(0)
+{
+    //testMode == 1 for more extensive testing
+    //testMode == 0 for quick test (autotest)
+   _testMode = testMode;
+}
+
+TestVADDTX::~TestVADDTX()
+{
+    if(_acmA != NULL)
+    {
+        AudioCodingModule::Destroy(_acmA);
+        _acmA = NULL;
+    }
+    if(_acmB != NULL)
+    {
+        AudioCodingModule::Destroy(_acmB);
+        _acmB = NULL;
+    }
+    if(_channelA2B != NULL)
+    {
+        delete _channelA2B;
+        _channelA2B = NULL;
+    }
+}
+
+void TestVADDTX::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running VAD/DTX Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1,
+                     "---------- TestVADDTX ----------");
+    }
+
+    const std::string file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    _inFileA.Open(file_name, 32000, "rb");
+
+    _acmA = AudioCodingModule::Create(0);
+    _acmB = AudioCodingModule::Create(1);
+
+    _acmA->InitializeReceiver();
+    _acmB->InitializeReceiver();
+
+    WebRtc_UWord8 numEncoders = _acmA->NumberOfCodecs();
+    CodecInst myCodecParam;
+    if(_testMode != 0)
+    {
+        printf("Registering codecs at receiver... \n");
+    }
+    for(WebRtc_UWord8 n = 0; n < numEncoders; n++)
+    {
+        _acmB->Codec(n, myCodecParam);
+        if(_testMode != 0)
+        {
+            printf("%s\n", myCodecParam.plname);
+        }
+        _acmB->RegisterReceiveCodec(myCodecParam);
+    }
+
+    // Create and connect the channel
+    _channelA2B = new Channel;
+    _acmA->RegisterTransportCallback(_channelA2B);
+    _channelA2B->RegisterReceiverACM(_acmB);
+
+    _acmA->RegisterVADCallback(&_monitor);
+
+
+    WebRtc_Word16 testCntr = 1;
+    WebRtc_Word16 testResults = 0;
+
+#ifdef WEBRTC_CODEC_ISAC
+    // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iSAC WB as send codec
+    char nameISAC[] = "ISAC";
+    RegisterSendCodec('A', nameISAC, 16000);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+
+   // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iSAC SWB as send codec
+    RegisterSendCodec('A', nameISAC, 32000);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    // Open outputfile
+    OpenOutFile(testCntr++);
+
+    // Register iLBC as send codec
+    char nameILBC[] = "ilbc";
+    RegisterSendCodec('A', nameILBC);
+
+    // Run the five test cased
+    runTestCases();
+
+    // Close file
+    _outFileB.Close();
+
+#endif
+    if(_testMode) {
+        printf("Done!\n");
+    }
+
+    printf("VAD/DTX test completed with %d subtests failed\n", testResults);
+    if (testResults > 0)
+    {
+        printf("Press return\n\n");
+        getchar();
+    }
+}
+
+void TestVADDTX::runTestCases()
+{
+    if(_testMode != 0)
+    {
+        CodecInst myCodecParam;
+        _acmA->SendCodec(myCodecParam);
+        printf("%s\n", myCodecParam.plname);
+    }
+    else
+    {
+        printf(".");
+    }
+    // #1 DTX = OFF, VAD = ON, VADNormal
+    if(_testMode != 0)
+        printf("Test #1 ");
+    SetVAD(false, true, VADNormal);
+    Run();
+    _testResults += VerifyTest();
+
+    // #2 DTX = OFF, VAD = ON, VADAggr
+    if(_testMode != 0)
+        printf("Test #2 ");
+    SetVAD(false, true, VADAggr);
+    Run();
+    _testResults += VerifyTest();
+
+    // #3 DTX = ON, VAD = ON, VADLowBitrate
+    if(_testMode != 0)
+        printf("Test #3 ");
+    SetVAD(true, true, VADLowBitrate);
+    Run();
+    _testResults += VerifyTest();
+
+    // #4 DTX = ON, VAD = ON, VADVeryAggr
+    if(_testMode != 0)
+        printf("Test #4 ");
+    SetVAD(true, true, VADVeryAggr);
+    Run();
+    _testResults += VerifyTest();
+
+    // #5 DTX = ON, VAD = OFF, VADNormal
+    if(_testMode != 0)
+        printf("Test #5 ");
+    SetVAD(true, false, VADNormal);
+    Run();
+    _testResults += VerifyTest();
+
+}
+void TestVADDTX::runTestInternalDTX()
+{
+    // #6 DTX = ON, VAD = ON, VADNormal
+    if(_testMode != 0)
+        printf("Test #6 ");
+
+    SetVAD(true, true, VADNormal);
+    if(_acmA->ReplaceInternalDTXWithWebRtc(true) < 0) {
+        printf("Was not able to replace DTX since CN was not registered\n");
+     }
+    Run();
+    _testResults += VerifyTest();
+}
+
+void TestVADDTX::SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode)
+{
+    bool dtxEnabled, vadEnabled;
+    ACMVADMode vadModeSet;
+
+    if (_acmA->SetVAD(statusDTX, statusVAD, (ACMVADMode) vadMode) < 0) {
+      assert(false);
+    }
+    if (_acmA->VAD(dtxEnabled, vadEnabled, vadModeSet) < 0) {
+      assert(false);
+    }
+
+    if(_testMode != 0)
+    {
+        if(statusDTX != dtxEnabled)
+        {
+            printf("DTX: %s not the same as requested: %s\n",
+            dtxEnabled? "ON":"OFF", dtxEnabled? "OFF":"ON");
+        }
+        if(((statusVAD == true) && (vadEnabled == false)) ||
+           ((statusVAD == false) && (vadEnabled == false) &&
+               (statusDTX == true)))
+        {
+            printf("VAD: %s not the same as requested: %s\n",
+            vadEnabled? "ON":"OFF", vadEnabled? "OFF":"ON");
+        }
+        if(vadModeSet != vadMode)
+        {
+            printf("VAD mode: %d not the same as requested: %d\n",
+            (WebRtc_Word16)vadModeSet, (WebRtc_Word16)vadMode);
+        }
+    }
+
+    // Requested VAD/DTX settings
+    _setStruct.statusDTX = statusDTX;
+    _setStruct.statusVAD = statusVAD;
+    _setStruct.vadMode = (ACMVADMode) vadMode;
+
+    // VAD settings after setting VAD in ACM
+    _getStruct.statusDTX = dtxEnabled;
+    _getStruct.statusVAD = vadEnabled;
+    _getStruct.vadMode = vadModeSet;
+
+}
+
+VADDTXstruct TestVADDTX::GetVAD()
+{
+    VADDTXstruct retStruct;
+    bool dtxEnabled, vadEnabled;
+    ACMVADMode vadModeSet;
+
+    if (_acmA->VAD(dtxEnabled, vadEnabled, vadModeSet) < 0) {
+      assert(false);
+    }
+
+    retStruct.statusDTX = dtxEnabled;
+    retStruct.statusVAD = vadEnabled;
+    retStruct.vadMode = vadModeSet;
+    return retStruct;
+}
+
+WebRtc_Word16 TestVADDTX::RegisterSendCodec(char side,
+                                          char* codecName,
+                                          WebRtc_Word32 samplingFreqHz,
+                                          WebRtc_Word32 rateKbps)
+{
+    if(_testMode != 0)
+    {
+        printf("Registering %s for side %c\n", codecName, side);
+    }
+    std::cout << std::flush;
+    AudioCodingModule* myACM;
+    switch(side)
+    {
+    case 'A':
+        {
+            myACM = _acmA;
+            break;
+        }
+    case 'B':
+        {
+            myACM = _acmB;
+            break;
+        }
+    default:
+        return -1;
+    }
+
+    if(myACM == NULL)
+    {
+        return -1;
+    }
+
+    CodecInst myCodecParam;
+    for(WebRtc_Word16 codecCntr = 0; codecCntr < myACM->NumberOfCodecs();
+        codecCntr++)
+    {
+        CHECK_ERROR(myACM->Codec((WebRtc_UWord8)codecCntr, myCodecParam));
+        if(!STR_CASE_CMP(myCodecParam.plname, codecName))
+        {
+            if((samplingFreqHz == -1) || (myCodecParam.plfreq == samplingFreqHz))
+            {
+                if((rateKbps == -1) || (myCodecParam.rate == rateKbps))
+                {
+                    break;
+                }
+            }
+        }
+    }
+
+    CHECK_ERROR(myACM->RegisterSendCodec(myCodecParam));
+
+    // initialization was succesful
+    return 0;
+}
+
+void TestVADDTX::Run()
+{
+    AudioFrame audioFrame;
+
+    WebRtc_UWord16 SamplesIn10MsecA = _inFileA.PayloadLength10Ms();
+    WebRtc_UWord32 timestampA = 1;
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    while(!_inFileA.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        audioFrame.timestamp_ = timestampA;
+        timestampA += SamplesIn10MsecA;
+        CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+        CHECK_ERROR(_acmA->Process());
+
+        CHECK_ERROR(_acmB->PlayoutData10Ms(outFreqHzB, audioFrame));
+        _outFileB.Write10MsData(audioFrame.data_, audioFrame.samples_per_channel_);
+    }
+#ifdef PRINT_STAT
+    _monitor.PrintStatistics(_testMode);
+#endif
+    _inFileA.Rewind();
+    _monitor.GetStatistics(_statCounter);
+    _monitor.ResetStatistics();
+}
+
+void TestVADDTX::OpenOutFile(WebRtc_Word16 test_number) {
+  std::string file_name;
+  std::stringstream file_stream;
+  file_stream << webrtc::test::OutputPath();
+  if (_testMode == 0) {
+    file_stream << "testVADDTX_autoFile_";
+  } else {
+    file_stream << "testVADDTX_outFile_";
+  }
+  file_stream << test_number << ".pcm";
+  file_name = file_stream.str();
+  _outFileB.Open(file_name, 16000, "wb");
+}
+
+
+WebRtc_Word16 TestVADDTX::VerifyTest()
+{
+    // Verify empty frame result
+    WebRtc_UWord8 statusEF = 0;
+    WebRtc_UWord8 vadPattern = 0;
+    WebRtc_UWord8 emptyFramePattern[6];
+    CodecInst myCodecParam;
+    _acmA->SendCodec(myCodecParam);
+    bool dtxInUse = true;
+    bool isReplaced = false;
+    if ((STR_CASE_CMP(myCodecParam.plname,"G729") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"G723") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"AMR") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"AMR-wb") == 0) ||
+        (STR_CASE_CMP(myCodecParam.plname,"speex") == 0))
+    {
+        _acmA->IsInternalDTXReplacedWithWebRtc(isReplaced);
+        if (!isReplaced)
+        {
+            dtxInUse = false;
+        }
+    }
+
+    // Check for error in VAD/DTX settings
+    if (_getStruct.statusDTX != _setStruct.statusDTX){
+        // DTX status doesn't match expected
+        vadPattern |= 4;
+    }
+    if (_getStruct.statusDTX){
+        if ((!_getStruct.statusVAD && dtxInUse) || (!dtxInUse && (_getStruct.statusVAD !=_setStruct.statusVAD)))
+        {
+            // Missmatch in VAD setting
+            vadPattern |= 2;
+        }
+    } else {
+        if (_getStruct.statusVAD != _setStruct.statusVAD){
+            // VAD status doesn't match expected
+            vadPattern |= 2;
+        }
+    }
+    if (_getStruct.vadMode != _setStruct.vadMode){
+        // VAD Mode doesn't match expected
+        vadPattern |= 1;
+    }
+
+    // Set expected empty frame pattern
+    int ii;
+    for (ii = 0; ii < 6; ii++) {
+        emptyFramePattern[ii] = 0;
+    }
+    emptyFramePattern[0] = 1; // "kNoEncoding", not important to check. Codecs with packetsize != 80 samples will get this output.
+    emptyFramePattern[1] = 1; // Expect to always receive some frames labeled "kActiveNormalEncoded"
+    emptyFramePattern[2] = (((!_getStruct.statusDTX && _getStruct.statusVAD) || (!dtxInUse && _getStruct.statusDTX))); // "kPassiveNormalEncoded"
+    emptyFramePattern[3] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 8000))); // "kPassiveDTXNB"
+    emptyFramePattern[4] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 16000))); // "kPassiveDTXWB"
+    emptyFramePattern[5] = ((_getStruct.statusDTX && dtxInUse && (_acmA->SendFrequency() == 32000))); // "kPassiveDTXSWB"
+
+    // Check pattern 1-5 (skip 0)
+    for (int ii = 1; ii < 6; ii++)
+    {
+        if (emptyFramePattern[ii])
+        {
+            statusEF |= (_statCounter[ii] == 0);
+        }
+        else
+        {
+            statusEF |= (_statCounter[ii] > 0);
+        }
+    }
+    if ((statusEF == 0) && (vadPattern == 0))
+    {
+        if(_testMode != 0)
+        {
+            printf(" Test OK!\n");
+        }
+        return 0;
+    }
+    else
+    {
+        if (statusEF)
+        {
+            printf("\t\t\tUnexpected empty frame result!\n");
+        }
+        if (vadPattern)
+        {
+            printf("\t\t\tUnexpected SetVAD() result!\tDTX: %d\tVAD: %d\tMode: %d\n", (vadPattern >> 2) & 1, (vadPattern >> 1) & 1, vadPattern & 1);
+        }
+        return 1;
+    }
+}
+
+ActivityMonitor::ActivityMonitor()
+{
+    _counter[0] = _counter[1] = _counter[2] = _counter[3] = _counter[4] = _counter[5] = 0;
+}
+
+ActivityMonitor::~ActivityMonitor()
+{
+}
+
+WebRtc_Word32 ActivityMonitor::InFrameType(WebRtc_Word16 frameType)
+{
+    _counter[frameType]++;
+    return 0;
+}
+
+void ActivityMonitor::PrintStatistics(int testMode)
+{
+    if(testMode != 0)
+    {
+        printf("\n");
+        printf("kActiveNormalEncoded  kPassiveNormalEncoded  kPassiveDTXWB  kPassiveDTXNB kPassiveDTXSWB kFrameEmpty\n");
+
+        printf("%19u", _counter[1]);
+        printf("%22u", _counter[2]);
+        printf("%14u", _counter[3]);
+        printf("%14u", _counter[4]);
+        printf("%14u", _counter[5]);
+        printf("%11u", _counter[0]);
+
+        printf("\n\n");
+    }
+}
+
+void ActivityMonitor::ResetStatistics()
+{
+    _counter[0] = _counter[1] = _counter[2] = _counter[3] = _counter[4] = _counter[5] = 0;
+}
+
+void ActivityMonitor::GetStatistics(WebRtc_UWord32* getCounter)
+{
+    for (int ii = 0; ii < 6; ii++)
+    {
+        getCounter[ii] = _counter[ii];
+    }
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/TestVADDTX.h b/src/modules/audio_coding/main/test/TestVADDTX.h
new file mode 100644
index 0000000..e8f9e1e
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TestVADDTX.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TEST_VAD_DTX_H
+#define TEST_VAD_DTX_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+
+namespace webrtc {
+
+typedef struct 
+{
+    bool statusDTX;
+    bool statusVAD;
+    ACMVADMode vadMode;
+} VADDTXstruct;
+
+class ActivityMonitor : public ACMVADCallback
+{
+public:
+    ActivityMonitor();
+    ~ActivityMonitor();
+    WebRtc_Word32 InFrameType(WebRtc_Word16 frameType);
+    void PrintStatistics(int testMode);
+    void ResetStatistics();
+    void GetStatistics(WebRtc_UWord32* getCounter);
+private:
+    // counting according to
+    /*enum WebRtcACMEncodingType
+    {
+        kNoEncoding,
+        kActiveNormalEncoded,
+        kPassiveNormalEncoded,
+        kPassiveDTXNB,
+        kPassiveDTXWB,
+        kPassiveDTXSWB
+    };*/
+    WebRtc_UWord32 _counter[6];
+};
+
+class TestVADDTX : public ACMTest
+{
+public:
+    TestVADDTX(int testMode);
+    ~TestVADDTX();
+
+    void Perform();
+private:
+    // Registration can be based on codec name only, codec name and sampling frequency, or 
+    // codec name, sampling frequency and rate.
+    WebRtc_Word16 RegisterSendCodec(char side, 
+        char* codecName, 
+        WebRtc_Word32 samplingFreqHz = -1,
+        WebRtc_Word32 rateKhz = -1);
+    void Run();
+    void OpenOutFile(WebRtc_Word16 testNumber);
+    void runTestCases();
+    void runTestInternalDTX();
+    void SetVAD(bool statusDTX, bool statusVAD, WebRtc_Word16 vadMode);
+    VADDTXstruct GetVAD();
+    WebRtc_Word16 VerifyTest();//VADDTXstruct setDTX, VADDTXstruct getDTX);
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel*               _channelA2B;
+
+    PCMFile                _inFileA;
+    PCMFile                _outFileB;
+
+    ActivityMonitor        _monitor;
+    WebRtc_UWord32           _statCounter[6];
+
+    int                    _testMode;
+    int                    _testResults;
+    VADDTXstruct           _setStruct;
+    VADDTXstruct           _getStruct;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/Tester.cc b/src/modules/audio_coding/main/test/Tester.cc
new file mode 100644
index 0000000..c6ac601
--- /dev/null
+++ b/src/modules/audio_coding/main/test/Tester.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "APITest.h"
+#include "audio_coding_module.h"
+#include "EncodeDecodeTest.h"
+#include "iSACTest.h"
+#include "TestAllCodecs.h"
+#include "TestFEC.h"
+#include "TestStereo.h"
+#include "testsupport/fileutils.h"
+#include "TestVADDTX.h"
+#include "trace.h"
+#include "TwoWayCommunication.h"
+
+using webrtc::AudioCodingModule;
+using webrtc::Trace;
+
+// This parameter is used to describe how to run the tests. It is normally
+// set to 1, but in auto test all printing is turned off, and the parameter is
+// set to 0.
+#define ACM_TEST_MODE 1
+
+// TODO(tlegrand): Add all tests as individual gtests, like already done for
+// TestAllCodecs (ACM_TEST_ALL_ENC_DEC).
+
+// Choose what tests to run by defining one or more of the following:
+//
+// ACM_AUTO_TEST - Most common codecs and settings will be tested. All the
+//                 other tests will be activated.
+// ACM_TEST_ENC_DEC - You decide what to test in run time. Used for debugging
+//                    and for testing while implementing.
+// ACM_TEST_TWO_WAY - Mainly for debugging.
+// ACM_TEST_ALL_CODECS - Loop through all defined codecs and settings.
+// ACM_TEST_STEREO - Run stereo and spatial audio tests.
+// ACM_TEST_VAD_DTX - Run all VAD/DTX tests.
+// ACM_TEST_FEC - Test FEC (also called RED).
+// ACM_TEST_CODEC_SPEC_API - Test the iSAC has codec specfic APIs.
+// ACM_TEST_FULL_API - Test all APIs with threads (long test).
+
+#define ACM_AUTO_TEST
+//#define ACM_TEST_ENC_DEC
+//#define ACM_TEST_TWO_WAY
+//#define ACM_TEST_ALL_CODECS
+//#define ACM_TEST_STEREO
+//#define ACM_TEST_VAD_DTX
+//#define ACM_TEST_FEC
+//#define ACM_TEST_CODEC_SPEC_API
+//#define ACM_TEST_FULL_API
+
+// If Auto test is active, we activate all tests.
+#ifdef ACM_AUTO_TEST
+#undef ACM_TEST_MODE
+#define ACM_TEST_MODE 0
+#ifndef ACM_TEST_ALL_CODECS
+#define ACM_TEST_ALL_CODECS
+#endif
+#endif
+
+void PopulateTests(std::vector<ACMTest*>* tests) {
+  Trace::CreateTrace();
+  Trace::SetTraceFile((webrtc::test::OutputPath() + "acm_trace.txt").c_str());
+
+  printf("The following tests will be executed:\n");
+#ifdef ACM_AUTO_TEST
+  printf("  ACM auto test\n");
+  tests->push_back(new webrtc::EncodeDecodeTest(0));
+  tests->push_back(new webrtc::TwoWayCommunication(0));
+  tests->push_back(new webrtc::TestStereo(0));
+  tests->push_back(new webrtc::TestVADDTX(0));
+  tests->push_back(new webrtc::TestFEC(0));
+  tests->push_back(new webrtc::ISACTest(0));
+#endif
+#ifdef ACM_TEST_ENC_DEC
+  printf("  ACM encode-decode test\n");
+  tests->push_back(new webrtc::EncodeDecodeTest(2));
+#endif
+#ifdef ACM_TEST_TWO_WAY
+  printf("  ACM two-way communication test\n");
+  tests->push_back(new webrtc::TwoWayCommunication(1));
+#endif
+#ifdef ACM_TEST_STEREO
+  printf("  ACM stereo test\n");
+  tests->push_back(new webrtc::TestStereo(1));
+#endif
+#ifdef ACM_TEST_VAD_DTX
+  printf("  ACM VAD-DTX test\n");
+  tests->push_back(new webrtc::TestVADDTX(1));
+#endif
+#ifdef ACM_TEST_FEC
+  printf("  ACM FEC test\n");
+  tests->push_back(new webrtc::TestFEC(1));
+#endif
+#ifdef ACM_TEST_CODEC_SPEC_API
+  printf("  ACM codec API test\n");
+  tests->push_back(new webrtc::ISACTest(1));
+#endif
+#ifdef ACM_TEST_FULL_API
+  printf("  ACM full API test\n");
+  tests->push_back(new webrtc::APITest());
+#endif
+  printf("\n");
+}
+
+// TODO(kjellander): Make this a proper gtest instead of using this single test
+// to run all the tests.
+
+#ifdef ACM_TEST_ALL_CODECS
+TEST(AudioCodingModuleTest, TestAllCodecs) {
+  Trace::CreateTrace();
+  Trace::SetTraceFile((webrtc::test::OutputPath() +
+      "acm_allcodecs_trace.txt").c_str());
+  webrtc::TestAllCodecs(ACM_TEST_MODE).Perform();
+  Trace::ReturnTrace();
+}
+#endif
+
+TEST(AudioCodingModuleTest, RunAllTests) {
+  std::vector<ACMTest*> tests;
+  PopulateTests(&tests);
+  std::vector<ACMTest*>::iterator it;
+  for (it = tests.begin(); it < tests.end(); it++) {
+    (*it)->Perform();
+    delete (*it);
+  }
+
+  Trace::ReturnTrace();
+  printf("ACM test completed\n");
+}
diff --git a/src/modules/audio_coding/main/test/TimedTrace.cc b/src/modules/audio_coding/main/test/TimedTrace.cc
new file mode 100644
index 0000000..6bf301f
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TimedTrace.cc
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TimedTrace.h"
+#include <math.h>
+
+double TimedTrace::_timeEllapsedSec = 0;
+FILE*  TimedTrace::_timedTraceFile = NULL;
+
+TimedTrace::TimedTrace()
+{
+
+}
+
+TimedTrace::~TimedTrace()
+{
+    if(_timedTraceFile != NULL)
+    {
+        fclose(_timedTraceFile);
+    }
+    _timedTraceFile = NULL;
+}
+
+WebRtc_Word16
+TimedTrace::SetUp(char* fileName)
+{
+    if(_timedTraceFile == NULL)
+    {
+        _timedTraceFile = fopen(fileName, "w");
+    }
+    if(_timedTraceFile == NULL)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+void
+TimedTrace::SetTimeEllapsed(double timeEllapsedSec)
+{
+    _timeEllapsedSec = timeEllapsedSec;
+}
+
+double
+TimedTrace::TimeEllapsed()
+{
+    return _timeEllapsedSec;
+}
+
+void
+TimedTrace::Tick10Msec()
+{
+    _timeEllapsedSec += 0.010;
+}
+
+void
+TimedTrace::TimedLogg(char* message)
+{    
+    unsigned int minutes = (WebRtc_UWord32)floor(_timeEllapsedSec / 60.0);
+    double seconds = _timeEllapsedSec - minutes * 60;
+    //char myFormat[100] = "%8.2f, %3u:%05.2f: %s\n";
+    if(_timedTraceFile != NULL)
+    {
+        fprintf(_timedTraceFile, "%8.2f, %3u:%05.2f: %s\n", 
+            _timeEllapsedSec, 
+            minutes, 
+            seconds, 
+            message);
+    }
+}
diff --git a/src/modules/audio_coding/main/test/TimedTrace.h b/src/modules/audio_coding/main/test/TimedTrace.h
new file mode 100644
index 0000000..d37d287
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TimedTrace.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TIMED_TRACE_H
+#define TIMED_TRACE_H
+
+#include "typedefs.h"
+
+#include <cstdio>
+#include <cstdlib>
+
+
+class TimedTrace
+{
+public:
+    TimedTrace();
+    ~TimedTrace();
+
+    void SetTimeEllapsed(double myTime);
+    double TimeEllapsed();
+    void Tick10Msec();
+    WebRtc_Word16 SetUp(char* fileName);
+    void TimedLogg(char* message);
+
+private:
+    static double _timeEllapsedSec;
+    static FILE*  _timedTraceFile;
+
+};
+
+#endif
diff --git a/src/modules/audio_coding/main/test/TwoWayCommunication.cc b/src/modules/audio_coding/main/test/TwoWayCommunication.cc
new file mode 100644
index 0000000..2e580cb
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TwoWayCommunication.cc
@@ -0,0 +1,452 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "TwoWayCommunication.h"
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <Windows.h>
+#endif
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "gtest/gtest.h"
+#include "PCMFile.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "utility.h"
+
+namespace webrtc {
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+
+TwoWayCommunication::TwoWayCommunication(int testMode)
+{
+    _testMode = testMode;
+}
+
+TwoWayCommunication::~TwoWayCommunication()
+{
+    AudioCodingModule::Destroy(_acmA);
+    AudioCodingModule::Destroy(_acmB);
+
+    AudioCodingModule::Destroy(_acmRefA);
+    AudioCodingModule::Destroy(_acmRefB);
+
+    delete _channel_A2B;
+    delete _channel_B2A;
+
+    delete _channelRef_A2B;
+    delete _channelRef_B2A;
+#ifdef WEBRTC_DTMF_DETECTION
+    if(_dtmfDetectorA != NULL)
+    {
+        delete _dtmfDetectorA;
+    }
+    if(_dtmfDetectorB != NULL)
+    {
+        delete _dtmfDetectorB;
+    }
+#endif
+    _inFileA.Close();
+    _inFileB.Close();
+    _outFileA.Close();
+    _outFileB.Close();
+    _outFileRefA.Close();
+    _outFileRefB.Close();
+}
+
+
+WebRtc_UWord8
+TwoWayCommunication::ChooseCodec(WebRtc_UWord8* codecID_A,
+                                 WebRtc_UWord8* codecID_B)
+{
+    AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
+    WebRtc_UWord8 noCodec = tmpACM->NumberOfCodecs();
+    CodecInst codecInst;
+    printf("List of Supported Codecs\n");
+    printf("========================\n");
+    for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+    {
+        tmpACM->Codec(codecCntr, codecInst);
+        printf("%d- %s\n", codecCntr, codecInst.plname);
+    }
+    printf("\nChoose a send codec for side A [0]: ");
+    char myStr[15] = "";
+    EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+    *codecID_A = (WebRtc_UWord8)atoi(myStr);
+
+    printf("\nChoose a send codec for side B [0]: ");
+    EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+    *codecID_B = (WebRtc_UWord8)atoi(myStr);
+
+    AudioCodingModule::Destroy(tmpACM);
+    printf("\n");
+    return 0;
+}
+
+WebRtc_Word16 TwoWayCommunication::SetUp()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    _acmRefA = AudioCodingModule::Create(3);
+    _acmRefB = AudioCodingModule::Create(4);
+
+    WebRtc_UWord8 codecID_A;
+    WebRtc_UWord8 codecID_B;
+
+    ChooseCodec(&codecID_A, &codecID_B);
+    CodecInst codecInst_A;
+    CodecInst codecInst_B;
+    CodecInst dummyCodec;
+    _acmA->Codec(codecID_A, codecInst_A);
+    _acmB->Codec(codecID_B, codecInst_B);
+
+    _acmA->Codec(6, dummyCodec);
+
+    //--- Set A codecs
+    CHECK_ERROR(_acmA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorA = new(DTMFDetector);
+    CHECK_ERROR(_acmA->RegisterIncomingMessagesCallback(_dtmfDetectorA,
+                                                        ACMUSA));
+#endif
+    //--- Set ref-A codecs
+    CHECK_ERROR(_acmRefA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmRefA->RegisterReceiveCodec(codecInst_B));
+
+    //--- Set B codecs
+    CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(codecInst_A));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorB = new(DTMFDetector);
+    CHECK_ERROR(_acmB->RegisterIncomingMessagesCallback(_dtmfDetectorB,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-B codecs
+    CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
+
+    WebRtc_UWord16 frequencyHz;
+    
+    //--- Input A
+    std::string in_file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    frequencyHz = 32000;
+    printf("Enter input file at side A [%s]: ", in_file_name.c_str());
+    PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
+    _inFileA.Open(in_file_name, frequencyHz, "rb");
+
+    //--- Output A
+    std::string out_file_a = webrtc::test::OutputPath() + "outA.pcm";
+    printf("Output file at side A: %s\n", out_file_a.c_str());
+    printf("Sampling frequency (in Hz) of the above file: %u\n",
+           frequencyHz);
+    _outFileA.Open(out_file_a, frequencyHz, "wb");
+    std::string ref_file_name = webrtc::test::OutputPath() + "ref_outA.pcm";
+    _outFileRefA.Open(ref_file_name, frequencyHz, "wb");
+
+    //--- Input B
+    in_file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    frequencyHz = 32000;
+    printf("\n\nEnter input file at side B [%s]: ", in_file_name.c_str());
+    PCMFile::ChooseFile(&in_file_name, 499, &frequencyHz);
+    _inFileB.Open(in_file_name, frequencyHz, "rb");
+
+    //--- Output B
+    std::string out_file_b = webrtc::test::OutputPath() + "outB.pcm";
+    printf("Output file at side B: %s\n", out_file_b.c_str());
+    printf("Sampling frequency (in Hz) of the above file: %u\n",
+           frequencyHz);
+    _outFileB.Open(out_file_b, frequencyHz, "wb");
+    ref_file_name = webrtc::test::OutputPath() + "ref_outB.pcm";
+    _outFileRefB.Open(ref_file_name, frequencyHz, "wb");
+    
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    _acmA->RegisterTransportCallback(_channel_A2B);
+    _channel_A2B->RegisterReceiverACM(_acmB);
+    //--- Do the same for the reference
+    _channelRef_A2B = new Channel;
+    _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+    _channelRef_A2B->RegisterReceiverACM(_acmRefB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    _acmB->RegisterTransportCallback(_channel_B2A);
+    _channel_B2A->RegisterReceiverACM(_acmA);
+    //--- Do the same for reference
+    _channelRef_B2A = new Channel;
+    _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+    _channelRef_B2A->RegisterReceiverACM(_acmRefA);
+
+    // The clicks will be more obvious when we 
+    // are in FAX mode.
+    _acmB->SetPlayoutMode(fax);
+    _acmRefB->SetPlayoutMode(fax);
+
+    return 0;
+}
+
+WebRtc_Word16 TwoWayCommunication::SetUpAutotest()
+{
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    _acmRefA = AudioCodingModule::Create(3);
+    _acmRefB = AudioCodingModule::Create(4);
+
+    CodecInst codecInst_A;
+    CodecInst codecInst_B;
+    CodecInst dummyCodec;
+
+    _acmA->Codec("ISAC", codecInst_A, 16000, 1);
+    _acmB->Codec("L16", codecInst_B, 8000, 1);
+    _acmA->Codec(6, dummyCodec);
+
+    //--- Set A codecs
+    CHECK_ERROR(_acmA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorA = new(DTMFDetector);
+    CHECK_ERROR(_acmA->RegisterIncomingMessagesCallback(_dtmfDetectorA,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-A codecs
+    CHECK_ERROR(_acmRefA->RegisterSendCodec(codecInst_A));
+    CHECK_ERROR(_acmRefA->RegisterReceiveCodec(codecInst_B));
+
+    //--- Set B codecs
+    CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(codecInst_A));
+#ifdef WEBRTC_DTMF_DETECTION
+    _dtmfDetectorB = new(DTMFDetector);
+    CHECK_ERROR(_acmB->RegisterIncomingMessagesCallback(_dtmfDetectorB,
+                                                        ACMUSA));
+#endif
+
+    //--- Set ref-B codecs
+    CHECK_ERROR(_acmRefB->RegisterSendCodec(codecInst_B));
+    CHECK_ERROR(_acmRefB->RegisterReceiveCodec(codecInst_A));
+
+    WebRtc_UWord16 frequencyHz;
+
+    //--- Input A and B
+    std::string in_file_name =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+    frequencyHz = 16000;
+    _inFileA.Open(in_file_name, frequencyHz, "rb");
+    _inFileB.Open(in_file_name, frequencyHz, "rb");
+
+    //--- Output A
+    std::string output_file_a = webrtc::test::OutputPath() + "outAutotestA.pcm";
+    frequencyHz = 16000;
+    _outFileA.Open(output_file_a, frequencyHz, "wb");
+    std::string output_ref_file_a = webrtc::test::OutputPath() +
+        "ref_outAutotestA.pcm";
+    _outFileRefA.Open(output_ref_file_a, frequencyHz, "wb");
+
+    //--- Output B
+    std::string output_file_b = webrtc::test::OutputPath() + "outAutotestB.pcm";
+    frequencyHz = 16000;
+    _outFileB.Open(output_file_b, frequencyHz, "wb");
+    std::string output_ref_file_b = webrtc::test::OutputPath() +
+        "ref_outAutotestB.pcm";
+    _outFileRefB.Open(output_ref_file_b, frequencyHz, "wb");
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    _acmA->RegisterTransportCallback(_channel_A2B);
+    _channel_A2B->RegisterReceiverACM(_acmB);
+    //--- Do the same for the reference
+    _channelRef_A2B = new Channel;
+    _acmRefA->RegisterTransportCallback(_channelRef_A2B);
+    _channelRef_A2B->RegisterReceiverACM(_acmRefB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    _acmB->RegisterTransportCallback(_channel_B2A);
+    _channel_B2A->RegisterReceiverACM(_acmA);
+    //--- Do the same for reference
+    _channelRef_B2A = new Channel;
+    _acmRefB->RegisterTransportCallback(_channelRef_B2A);
+    _channelRef_B2A->RegisterReceiverACM(_acmRefA);
+
+    // The clicks will be more obvious when we 
+    // are in FAX mode.
+    _acmB->SetPlayoutMode(fax);
+    _acmRefB->SetPlayoutMode(fax);
+
+    return 0;
+}
+
+void
+TwoWayCommunication::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running TwoWayCommunication Test");
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                     "---------- TwoWayCommunication ----------");
+        SetUpAutotest();
+    }
+    else
+    {
+        SetUp();
+    }
+    unsigned int msecPassed = 0;
+    unsigned int secPassed  = 0;
+
+    WebRtc_Word32 outFreqHzA = _outFileA.SamplingFrequency();
+    WebRtc_Word32 outFreqHzB = _outFileB.SamplingFrequency();
+
+    AudioFrame audioFrame;
+
+    CodecInst codecInst_B;
+    CodecInst dummy;
+
+    _acmB->SendCodec(codecInst_B);
+
+    if(_testMode != 0)
+    {
+        printf("\n");
+        printf("sec:msec                   A                              B\n");
+        printf("--------                 -----                        -----\n");
+    }
+
+    while(!_inFileA.EndOfFile() && !_inFileB.EndOfFile())
+    {
+        _inFileA.Read10MsData(audioFrame);
+        _acmA->Add10MsData(audioFrame);
+        _acmRefA->Add10MsData(audioFrame);
+
+        _inFileB.Read10MsData(audioFrame);
+        _acmB->Add10MsData(audioFrame);
+        _acmRefB->Add10MsData(audioFrame);
+
+
+        _acmA->Process();
+        _acmB->Process();
+        _acmRefA->Process();
+        _acmRefB->Process();
+
+        _acmA->PlayoutData10Ms(outFreqHzA, audioFrame);
+        _outFileA.Write10MsData(audioFrame);
+
+        _acmRefA->PlayoutData10Ms(outFreqHzA, audioFrame);
+        _outFileRefA.Write10MsData(audioFrame);
+
+        _acmB->PlayoutData10Ms(outFreqHzB, audioFrame);
+        _outFileB.Write10MsData(audioFrame);
+
+        _acmRefB->PlayoutData10Ms(outFreqHzB, audioFrame);
+        _outFileRefB.Write10MsData(audioFrame);
+
+        msecPassed += 10;
+        if(msecPassed >= 1000)
+        {
+            msecPassed = 0;
+            secPassed++;
+        }
+        if(((secPassed%5) == 4) && (msecPassed == 0))
+        {
+            if(_testMode != 0)
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+            }
+            _acmA->ResetEncoder();
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "---------- Errors expected");
+                printf(".");
+            }
+            else
+            {
+                printf("Reset Encoder (click in side B)               ");
+                printf("Initialize Sender (no audio in side A)\n");
+            }
+            CHECK_ERROR(_acmB->InitializeSender());
+        }
+        if(((secPassed%5) == 4) && (msecPassed >= 990))
+        {
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "----- END: Errors expected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("                                              ");
+                printf("Register Send Codec (audio back in side A)\n");
+            }
+            CHECK_ERROR(_acmB->RegisterSendCodec(codecInst_B));
+            CHECK_ERROR(_acmB->SendCodec(dummy));
+        }
+        if(((secPassed%7) == 6) && (msecPassed == 0))
+        {
+            CHECK_ERROR(_acmB->ResetDecoder());
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "---------- Errors expected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("Initialize Receiver (no audio in side A)      ");
+                printf("Reset Decoder\n");
+            }
+            CHECK_ERROR(_acmA->InitializeReceiver());
+        }
+        if(((secPassed%7) == 6) && (msecPassed >= 990))
+        {
+            if(_testMode == 0)
+            {
+                WEBRTC_TRACE(kTraceStateInfo, kTraceAudioCoding, -1,
+                             "----- END: Errors expected");
+                printf(".");
+            }
+            else
+            {
+                printf("%3u:%3u  ", secPassed, msecPassed);
+                printf("Register Receive Coded (audio back in side A)\n");
+            }
+            CHECK_ERROR(_acmA->RegisterReceiveCodec(codecInst_B));
+        }
+        //Sleep(9);
+    }
+    if(_testMode == 0)
+    {
+        printf("Done!\n");
+    }
+
+#ifdef WEBRTC_DTMF_DETECTION
+    printf("\nDTMF at Side A\n");
+    _dtmfDetectorA->PrintDetectedDigits();
+
+    printf("\nDTMF at Side B\n");
+    _dtmfDetectorB->PrintDetectedDigits();
+#endif
+
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/TwoWayCommunication.h b/src/modules/audio_coding/main/test/TwoWayCommunication.h
new file mode 100644
index 0000000..fe53532
--- /dev/null
+++ b/src/modules/audio_coding/main/test/TwoWayCommunication.h
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef TWO_WAY_COMMUNICATION_H
+#define TWO_WAY_COMMUNICATION_H
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+
+namespace webrtc {
+
+class TwoWayCommunication : public ACMTest
+{
+public:
+    TwoWayCommunication(int testMode = 1);
+    ~TwoWayCommunication();
+
+    void Perform();
+private:
+    WebRtc_UWord8 ChooseCodec(WebRtc_UWord8* codecID_A, WebRtc_UWord8* codecID_B);
+    WebRtc_Word16 SetUp();
+    WebRtc_Word16 SetUpAutotest();
+
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    AudioCodingModule* _acmRefA;
+    AudioCodingModule* _acmRefB;
+
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+
+    Channel* _channelRef_A2B;
+    Channel* _channelRef_B2A;
+
+    PCMFile _inFileA;
+    PCMFile _inFileB;
+
+    PCMFile _outFileA;
+    PCMFile _outFileB;
+
+    PCMFile _outFileRefA;
+    PCMFile _outFileRefB;
+
+    int _testMode;
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/iSACTest.cc b/src/modules/audio_coding/main/test/iSACTest.cc
new file mode 100644
index 0000000..28cc942
--- /dev/null
+++ b/src/modules/audio_coding/main/test/iSACTest.cc
@@ -0,0 +1,565 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cctype>
+#include <stdio.h>
+#include <string.h>
+
+#if _WIN32
+#include <windows.h>
+#elif WEBRTC_LINUX
+#include <ctime>
+#else
+#include <sys/time.h>
+#include <time.h>
+#endif 
+
+#include "event_wrapper.h"
+#include "iSACTest.h"
+#include "utility.h"
+#include "trace.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+
+namespace webrtc {
+
+void SetISACConfigDefault(
+    ACMTestISACConfig& isacConfig)
+{
+    isacConfig.currentRateBitPerSec = 0;
+    isacConfig.currentFrameSizeMsec = 0;
+    isacConfig.maxRateBitPerSec     = 0;
+    isacConfig.maxPayloadSizeByte   = 0;
+    isacConfig.encodingMode         = -1;
+    isacConfig.initRateBitPerSec    = 0;
+    isacConfig.initFrameSizeInMsec  = 0;
+    isacConfig.enforceFrameSize     = false;
+    return;
+}
+
+
+WebRtc_Word16 SetISAConfig(
+    ACMTestISACConfig& isacConfig,
+    AudioCodingModule* acm,
+    int testMode)
+{
+
+    if((isacConfig.currentRateBitPerSec != 0) ||
+        (isacConfig.currentFrameSizeMsec != 0))
+    {
+        CodecInst sendCodec;
+        acm->SendCodec(sendCodec);
+        if(isacConfig.currentRateBitPerSec < 0)
+        {
+            sendCodec.rate = -1;
+            CHECK_ERROR(acm->RegisterSendCodec(sendCodec));
+            if(testMode != 0)
+            {
+                printf("ISAC-%s Registered in adaptive (channel-dependent) mode.\n", 
+                    (sendCodec.plfreq == 32000)? "swb":"wb");
+            }
+        }
+        else
+        {
+
+            if(isacConfig.currentRateBitPerSec != 0)
+            {
+                sendCodec.rate = isacConfig.currentRateBitPerSec;
+            }
+            if(isacConfig.currentFrameSizeMsec != 0)
+            {
+                sendCodec.pacsize = isacConfig.currentFrameSizeMsec *
+                    (sendCodec.plfreq / 1000);
+            }
+            CHECK_ERROR(acm->RegisterSendCodec(sendCodec));
+            if(testMode != 0)
+            {
+                printf("Target rate is set to %d bit/sec with frame-size %d ms \n",
+                    (int)isacConfig.currentRateBitPerSec,
+                    (int)sendCodec.pacsize / (sendCodec.plfreq / 1000));
+            }
+        }
+    }
+
+    if(isacConfig.maxRateBitPerSec > 0)
+    {
+        CHECK_ERROR(acm->SetISACMaxRate(isacConfig.maxRateBitPerSec));
+        if(testMode != 0)
+        {
+            printf("Max rate is set to %u bit/sec\n",
+                isacConfig.maxRateBitPerSec);
+        }
+    }
+    if(isacConfig.maxPayloadSizeByte > 0)
+    {
+        CHECK_ERROR(acm->SetISACMaxPayloadSize(isacConfig.maxPayloadSizeByte));
+        if(testMode != 0)
+        {
+            printf("Max payload-size is set to %u bit/sec\n",
+                isacConfig.maxPayloadSizeByte);
+        }
+    }
+    if((isacConfig.initFrameSizeInMsec != 0) ||
+        (isacConfig.initRateBitPerSec != 0))
+    {
+        CHECK_ERROR(acm->ConfigISACBandwidthEstimator(
+            (WebRtc_UWord8)isacConfig.initFrameSizeInMsec,
+            (WebRtc_UWord16)isacConfig.initRateBitPerSec, 
+            isacConfig.enforceFrameSize));
+        if((isacConfig.initFrameSizeInMsec != 0) && (testMode != 0))
+        {
+            printf("Initialize BWE to %d msec frame-size\n",
+                isacConfig.initFrameSizeInMsec);
+        }
+        if((isacConfig.initRateBitPerSec != 0) && (testMode != 0))
+        {
+            printf("Initialize BWE to %u bit/sec send-bandwidth\n",
+                isacConfig.initRateBitPerSec);
+        }
+    }
+
+    return 0;
+}
+
+
+ISACTest::ISACTest(int testMode)
+{
+    _testMode = testMode;
+}
+
+ISACTest::~ISACTest()
+{
+    AudioCodingModule::Destroy(_acmA);
+    AudioCodingModule::Destroy(_acmB);
+
+    delete _channel_A2B;
+    delete _channel_B2A;
+}
+
+
+WebRtc_Word16
+ISACTest::Setup()
+{
+    int codecCntr;
+    CodecInst codecParam;
+
+    _acmA = AudioCodingModule::Create(1);
+    _acmB = AudioCodingModule::Create(2);
+
+    for(codecCntr = 0; codecCntr < AudioCodingModule::NumberOfCodecs(); codecCntr++)
+    {
+        AudioCodingModule::Codec(codecCntr, codecParam);
+        if(!STR_CASE_CMP(codecParam.plname, "ISAC") && codecParam.plfreq == 16000)
+        {
+            memcpy(&_paramISAC16kHz, &codecParam, sizeof(CodecInst));
+            _idISAC16kHz = codecCntr;
+        }
+        if(!STR_CASE_CMP(codecParam.plname, "ISAC") && codecParam.plfreq == 32000)
+        {
+            memcpy(&_paramISAC32kHz, &codecParam, sizeof(CodecInst));
+            _idISAC32kHz = codecCntr;
+        }        
+    }
+
+    // register both iSAC-wb & iSAC-swb in both sides as receiver codecs
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmA->RegisterReceiveCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmB->RegisterReceiveCodec(_paramISAC32kHz));
+
+    //--- Set A-to-B channel
+    _channel_A2B = new Channel;
+    CHECK_ERROR(_acmA->RegisterTransportCallback(_channel_A2B));
+    _channel_A2B->RegisterReceiverACM(_acmB);
+
+    //--- Set B-to-A channel
+    _channel_B2A = new Channel;
+    CHECK_ERROR(_acmB->RegisterTransportCallback(_channel_B2A));
+    _channel_B2A->RegisterReceiverACM(_acmA);
+
+    file_name_swb_ =
+        webrtc::test::ResourcePath("audio_coding/testfile32kHz", "pcm");
+
+    _acmB->RegisterSendCodec(_paramISAC16kHz);
+    _acmA->RegisterSendCodec(_paramISAC32kHz);
+
+    if(_testMode != 0)
+    {
+        printf("Side A Send Codec\n");
+        printf("%s %d\n", _paramISAC32kHz.plname, _paramISAC32kHz.plfreq);
+
+        printf("Side B Send Codec\n");
+        printf("%s %d\n", _paramISAC16kHz.plname, _paramISAC16kHz.plfreq);
+    }
+
+    _inFileA.Open(file_name_swb_, 32000, "rb");
+    std::string fileNameA = webrtc::test::OutputPath() + "testisac_a.pcm";
+    std::string fileNameB = webrtc::test::OutputPath() + "testisac_b.pcm";
+    _outFileA.Open(fileNameA, 32000, "wb");
+    _outFileB.Open(fileNameB, 32000, "wb");
+
+    while(!_inFileA.EndOfFile())
+    {
+        Run10ms();
+    }
+    CodecInst receiveCodec;
+    CHECK_ERROR(_acmA->ReceiveCodec(receiveCodec));
+    if(_testMode != 0)
+    {
+        printf("Side A Receive Codec\n");
+        printf("%s %d\n", receiveCodec.plname, receiveCodec.plfreq);
+    }
+
+    CHECK_ERROR(_acmB->ReceiveCodec(receiveCodec));
+    if(_testMode != 0)
+    {
+        printf("Side B Receive Codec\n");
+        printf("%s %d\n", receiveCodec.plname, receiveCodec.plfreq);
+    }
+
+    _inFileA.Close();
+    _outFileA.Close();
+    _outFileB.Close();
+
+    return 0;
+}
+
+
+void
+ISACTest::Perform()
+{
+    if(_testMode == 0)
+    {
+        printf("Running iSAC Test");
+        WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceAudioCoding, -1, "---------- iSACTest ----------");
+    }
+
+    Setup();
+
+    WebRtc_Word16 testNr = 0;
+    ACMTestISACConfig wbISACConfig;
+    ACMTestISACConfig swbISACConfig;
+
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+
+    wbISACConfig.currentRateBitPerSec = -1;
+    swbISACConfig.currentRateBitPerSec = -1;
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+    if (_testMode != 0)
+    {
+        SetISACConfigDefault(wbISACConfig);
+        SetISACConfigDefault(swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = -1;
+        swbISACConfig.currentRateBitPerSec = -1;
+        wbISACConfig.initRateBitPerSec = 13000;
+        wbISACConfig.initFrameSizeInMsec = 60;
+        swbISACConfig.initRateBitPerSec = 20000;
+        swbISACConfig.initFrameSizeInMsec = 30;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+        SetISACConfigDefault(wbISACConfig);
+        SetISACConfigDefault(swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = 20000;
+        swbISACConfig.currentRateBitPerSec = 48000;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+        wbISACConfig.currentRateBitPerSec = 16000;
+        swbISACConfig.currentRateBitPerSec = 30000;
+        wbISACConfig.currentFrameSizeMsec = 60;
+        testNr++;
+        EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+    }
+
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+    
+    int user_input;
+    if((_testMode == 0) || (_testMode == 1))
+    {
+        swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
+        wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)200;
+    }
+    else
+    {
+        printf("Enter the max payload-size for side A: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        swbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+        printf("Enter the max payload-size for side B: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        wbISACConfig.maxPayloadSizeByte = (WebRtc_UWord16)user_input;
+    }
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+    _acmA->ResetEncoder();
+    _acmB->ResetEncoder();
+    SetISACConfigDefault(wbISACConfig);
+    SetISACConfigDefault(swbISACConfig);
+
+    if((_testMode == 0) || (_testMode == 1))
+    {
+        swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
+        wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)48000;
+    }
+    else
+    {
+        printf("Enter the max rate for side A: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        swbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+        printf("Enter the max rate for side B: ");
+        CHECK_ERROR(scanf("%d", &user_input));
+        wbISACConfig.maxRateBitPerSec = (WebRtc_UWord32)user_input;
+    }
+ 
+    testNr++;
+    EncodeDecode(testNr, wbISACConfig, swbISACConfig);
+
+
+    testNr++;
+    if(_testMode == 0)
+    {
+        SwitchingSamplingRate(testNr, 4);
+        printf("Done!\n");
+    }
+    else
+    {
+        SwitchingSamplingRate(testNr, 80);
+    }
+}
+
+
+void
+ISACTest::Run10ms()
+{
+    AudioFrame audioFrame;
+
+    _inFileA.Read10MsData(audioFrame);
+    CHECK_ERROR(_acmA->Add10MsData(audioFrame));
+
+    CHECK_ERROR(_acmB->Add10MsData(audioFrame));
+
+    CHECK_ERROR(_acmA->Process());
+    CHECK_ERROR(_acmB->Process());
+
+    CHECK_ERROR(_acmA->PlayoutData10Ms(32000, audioFrame));
+    _outFileA.Write10MsData(audioFrame);
+
+    CHECK_ERROR(_acmB->PlayoutData10Ms(32000, audioFrame));
+    _outFileB.Write10MsData(audioFrame);
+}
+
+void
+ISACTest::EncodeDecode(
+    int                testNr,
+    ACMTestISACConfig& wbISACConfig,
+    ACMTestISACConfig& swbISACConfig)
+{
+    if(_testMode == 0)
+    {
+        printf(".");
+    }
+    else
+    {
+        printf("\nTest %d:\n\n", testNr);
+    }
+
+    // Files in Side A and B
+    _inFileA.Open(file_name_swb_, 32000, "rb", true);
+    _inFileB.Open(file_name_swb_, 32000, "rb", true);
+
+    std::string file_name_out;
+    std::stringstream file_stream_a;
+    std::stringstream file_stream_b;
+    file_stream_a << webrtc::test::OutputPath();
+    file_stream_b << webrtc::test::OutputPath();
+    if(_testMode == 0)
+    {
+        file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
+        file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
+
+    }
+    else
+    {
+        file_stream_a << "outA_" << testNr << ".pcm";
+        file_stream_b << "outB_" << testNr << ".pcm";
+    }
+    file_name_out = file_stream_a.str();
+    _outFileA.Open(file_name_out, 32000, "wb");
+    file_name_out = file_stream_b.str();
+    _outFileB.Open(file_name_out, 32000, "wb");
+
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC16kHz));
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+    
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+    if(_testMode != 0)
+    {
+        printf("Side A Sending Super-Wideband \n");
+        printf("Side B Sending Wideband\n\n");
+    }
+
+    SetISAConfig(swbISACConfig, _acmA, _testMode);
+    SetISAConfig(wbISACConfig,  _acmB, _testMode);
+
+    bool adaptiveMode = false;
+    if((swbISACConfig.currentRateBitPerSec == -1) ||
+        (wbISACConfig.currentRateBitPerSec == -1))
+    {
+        adaptiveMode = true;
+    }
+    _myTimer.Reset();
+    _channel_A2B->ResetStats();
+    _channel_B2A->ResetStats();
+
+    char currentTime[500];
+    if(_testMode == 2) printf("\n");
+    CodecInst sendCodec;
+    EventWrapper* myEvent = EventWrapper::Create();
+    myEvent->StartTimer(true, 10);
+    while(!(_inFileA.EndOfFile() || _inFileA.Rewinded()))
+    {
+        Run10ms();
+        _myTimer.Tick10ms();
+        _myTimer.CurrentTimeHMS(currentTime);
+        if(_testMode == 2) printf("\r%s   ", currentTime);
+
+        if((adaptiveMode) && (_testMode != 0))
+        {
+            myEvent->Wait(5000);
+
+            _acmA->SendCodec(sendCodec);
+            if(_testMode == 2) printf("[%d]  ", sendCodec.rate);
+            _acmB->SendCodec(sendCodec);
+            if(_testMode == 2) printf("[%d]  ", sendCodec.rate);
+        }
+    }
+
+    if(_testMode != 0)
+    {
+        printf("\n\nSide A statistics\n\n");
+        _channel_A2B->PrintStats(_paramISAC32kHz);
+
+        printf("\n\nSide B statistics\n\n");
+        _channel_B2A->PrintStats(_paramISAC16kHz);
+    }
+    
+    _channel_A2B->ResetStats();
+    _channel_B2A->ResetStats();
+
+    if(_testMode != 0) printf("\n");
+    _outFileA.Close();
+    _outFileB.Close();
+    _inFileA.Close();
+    _inFileB.Close();
+}
+
+void
+ISACTest::SwitchingSamplingRate(
+    int testNr, 
+    int maxSampRateChange)
+{
+    // Files in Side A 
+    _inFileA.Open(file_name_swb_, 32000, "rb");
+    _inFileB.Open(file_name_swb_, 32000, "rb");
+
+    std::string file_name_out;
+    std::stringstream file_stream_a;
+    std::stringstream file_stream_b;
+    file_stream_a << webrtc::test::OutputPath();
+    file_stream_b << webrtc::test::OutputPath();
+    if(_testMode == 0)
+    {
+        file_stream_a << "out_iSACTest_A_" << testNr << ".pcm";
+        file_stream_b << "out_iSACTest_B_" << testNr << ".pcm";
+    }
+    else
+    {
+        printf("\nTest %d", testNr);
+        printf("    Alternate between WB and SWB at the sender Side\n\n");
+        file_stream_a << "outA_" << testNr << ".pcm";
+        file_stream_b << "outB_" << testNr << ".pcm";
+    }
+    file_name_out = file_stream_a.str();
+    _outFileA.Open(file_name_out, 32000, "wb");
+    file_name_out = file_stream_b.str();
+    _outFileB.Open(file_name_out, 32000, "wb");
+
+    CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+    CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+    if(_testMode != 0)
+    {
+        printf("Side A Sending Super-Wideband \n");
+        printf("Side B Sending Wideband\n");
+    }
+
+    int numSendCodecChanged = 0;
+    _myTimer.Reset();
+    char currentTime[50];
+    while(numSendCodecChanged < (maxSampRateChange<<1))
+    {
+        Run10ms();
+        _myTimer.Tick10ms();
+        _myTimer.CurrentTimeHMS(currentTime);
+        if(_testMode == 2) printf("\r%s", currentTime);
+        if(_inFileA.EndOfFile())
+        {
+            if(_inFileA.SamplingFrequency() == 16000)
+            {
+                if(_testMode != 0) printf("\nSide A switched to Send Super-Wideband\n");
+                _inFileA.Close();
+                _inFileA.Open(file_name_swb_, 32000, "rb");
+                CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC32kHz));
+            }
+            else
+            {
+                if(_testMode != 0) printf("\nSide A switched to Send Wideband\n");
+                _inFileA.Close();
+                _inFileA.Open(file_name_swb_, 32000, "rb");
+                CHECK_ERROR(_acmA->RegisterSendCodec(_paramISAC16kHz));
+            }
+            numSendCodecChanged++;
+        }
+
+        if(_inFileB.EndOfFile())
+        {
+            if(_inFileB.SamplingFrequency() == 16000)
+            {
+                if(_testMode != 0) printf("\nSide B switched to Send Super-Wideband\n");
+                _inFileB.Close();
+                _inFileB.Open(file_name_swb_, 32000, "rb");
+                CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC32kHz));
+            }
+            else
+            {
+                if(_testMode != 0) printf("\nSide B switched to Send Wideband\n");
+                _inFileB.Close();
+                _inFileB.Open(file_name_swb_, 32000, "rb");
+                CHECK_ERROR(_acmB->RegisterSendCodec(_paramISAC16kHz));
+            }
+            numSendCodecChanged++;
+        }
+    }
+    _outFileA.Close();
+    _outFileB.Close();
+    _inFileA.Close();
+    _inFileB.Close();
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/iSACTest.h b/src/modules/audio_coding/main/test/iSACTest.h
new file mode 100644
index 0000000..96d3fb6
--- /dev/null
+++ b/src/modules/audio_coding/main/test/iSACTest.h
@@ -0,0 +1,104 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_ISAC_TEST_H
+#define ACM_ISAC_TEST_H
+
+#include <string.h>
+
+#include "ACMTest.h"
+#include "Channel.h"
+#include "PCMFile.h"
+#include "audio_coding_module.h"
+#include "utility.h"
+#include "common_types.h"
+
+#define MAX_FILE_NAME_LENGTH_BYTE 500
+#define NO_OF_CLIENTS             15
+
+namespace webrtc {
+
+struct ACMTestISACConfig
+{
+    WebRtc_Word32  currentRateBitPerSec;
+    WebRtc_Word16  currentFrameSizeMsec;
+    WebRtc_UWord32 maxRateBitPerSec;
+    WebRtc_Word16  maxPayloadSizeByte;
+    WebRtc_Word16  encodingMode;
+    WebRtc_UWord32 initRateBitPerSec;
+    WebRtc_Word16  initFrameSizeInMsec;
+    bool           enforceFrameSize;
+};
+
+
+
+class ISACTest : public ACMTest
+{
+public:
+    ISACTest(int testMode);
+    ~ISACTest();
+
+    void Perform();
+private:
+    WebRtc_Word16 Setup();
+    WebRtc_Word16 SetupConference();
+    WebRtc_Word16 RunConference();    
+    
+
+    void Run10ms();
+
+    void EncodeDecode(
+        int                testNr,
+        ACMTestISACConfig& wbISACConfig,
+        ACMTestISACConfig& swbISACConfig);
+    
+    void TestBWE(
+        int testNr);
+
+    void SwitchingSamplingRate(
+        int testNr, 
+        int maxSampRateChange);
+
+    AudioCodingModule* _acmA;
+    AudioCodingModule* _acmB;
+
+    Channel* _channel_A2B;
+    Channel* _channel_B2A;
+
+    PCMFile _inFileA;
+    PCMFile _inFileB;
+
+    PCMFile _outFileA;
+    PCMFile _outFileB;
+
+    WebRtc_UWord8 _idISAC16kHz;
+    WebRtc_UWord8 _idISAC32kHz;
+    CodecInst _paramISAC16kHz;
+    CodecInst _paramISAC32kHz;
+
+    std::string file_name_swb_;
+
+    ACMTestTimer _myTimer;
+    int _testMode;
+    
+    AudioCodingModule* _defaultACM32;
+    AudioCodingModule* _defaultACM16;
+    
+    AudioCodingModule* _confACM[NO_OF_CLIENTS];
+    AudioCodingModule* _clientACM[NO_OF_CLIENTS];
+    Channel*               _conf2Client[NO_OF_CLIENTS];
+    Channel*               _client2Conf[NO_OF_CLIENTS];
+
+    PCMFile                _clientOutFile[NO_OF_CLIENTS];
+};
+
+} // namespace webrtc
+
+#endif
diff --git a/src/modules/audio_coding/main/test/utility.cc b/src/modules/audio_coding/main/test/utility.cc
new file mode 100644
index 0000000..56acbf7
--- /dev/null
+++ b/src/modules/audio_coding/main/test/utility.cc
@@ -0,0 +1,434 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "utility.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "gtest/gtest.h"
+
+#define NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE 13
+
+namespace webrtc {
+
+ACMTestTimer::ACMTestTimer() :
+_msec(0),
+_sec(0),
+_min(0),
+_hour(0)
+{
+    return;
+}
+
+ACMTestTimer::~ACMTestTimer()
+{
+    return;
+}
+
+void ACMTestTimer::Reset()
+{
+    _msec = 0;
+    _sec = 0;
+    _min = 0;
+    _hour = 0;
+    return;
+}
+void ACMTestTimer::Tick10ms()
+{
+    _msec += 10;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick1ms()
+{
+    _msec++;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick100ms()
+{
+    _msec += 100;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::Tick1sec()
+{
+    _sec++;
+    Adjust();
+    return;
+}
+
+void ACMTestTimer::CurrentTimeHMS(char* currTime)
+{
+    sprintf(currTime, "%4lu:%02u:%06.3f", _hour, _min, (double)_sec + (double)_msec / 1000.); 
+    return;
+}
+
+void ACMTestTimer::CurrentTime(
+        unsigned long&  h, 
+        unsigned char&  m,
+        unsigned char&  s,
+        unsigned short& ms)
+{
+    h = _hour;
+    m = _min;
+    s = _sec;
+    ms = _msec;
+    return;
+}
+
+void ACMTestTimer::Adjust()
+{
+    unsigned int n;
+    if(_msec >= 1000)
+    {
+        n = _msec / 1000;
+        _msec -= (1000 * n);
+        _sec += n;
+    }
+    if(_sec >= 60)
+    {
+        n = _sec / 60;
+        _sec -= (n * 60);
+        _min += n;
+    }
+    if(_min >= 60)
+    {
+        n = _min / 60;
+        _min -= (n * 60);
+        _hour += n;
+    }
+}
+
+
+WebRtc_Word16
+ChooseCodec(
+    CodecInst& codecInst)
+{
+
+    PrintCodecs();
+    //AudioCodingModule* tmpACM = AudioCodingModule::Create(0);
+    WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
+    WebRtc_Word8 codecID;
+    bool outOfRange = false;
+    char myStr[15] = "";
+    do
+    {
+        printf("\nChoose a codec [0]: ");
+        EXPECT_TRUE(fgets(myStr, 10, stdin) != NULL);
+        codecID = atoi(myStr);
+        if((codecID < 0) || (codecID >= noCodec))
+        {
+            printf("\nOut of range.\n");
+            outOfRange = true;
+        }
+    } while(outOfRange);
+
+    CHECK_ERROR(AudioCodingModule::Codec((WebRtc_UWord8)codecID, codecInst));
+    return 0;
+}
+
+void
+PrintCodecs()
+{
+    WebRtc_UWord8 noCodec = AudioCodingModule::NumberOfCodecs();
+        
+    CodecInst codecInst;
+    printf("No  Name                [Hz]    [bps]\n");     
+    for(WebRtc_UWord8 codecCntr = 0; codecCntr < noCodec; codecCntr++)
+    {
+        AudioCodingModule::Codec(codecCntr, codecInst);
+        printf("%2d- %-18s %5d   %6d\n", 
+            codecCntr, codecInst.plname, codecInst.plfreq, codecInst.rate);
+    }
+
+}
+
+CircularBuffer::CircularBuffer(WebRtc_UWord32 len):
+_buff(NULL),
+_idx(0),
+_buffIsFull(false),
+_calcAvg(false),
+_calcVar(false),
+_sum(0),
+_sumSqr(0)
+{
+    _buff = new double[len];
+    if(_buff == NULL)
+    {
+        _buffLen = 0;
+    }
+    else
+    {
+        for(WebRtc_UWord32 n = 0; n < len; n++)
+        {
+            _buff[n] = 0;
+        }
+        _buffLen = len;
+    }
+}
+
+CircularBuffer::~CircularBuffer()
+{
+    if(_buff != NULL)
+    {
+        delete [] _buff;
+        _buff = NULL;
+    }
+}
+
+void
+CircularBuffer::Update(
+    const double newVal)
+{
+    assert(_buffLen > 0);
+    
+    // store the value that is going to be overwritten
+    double oldVal = _buff[_idx];
+    // record the new value
+    _buff[_idx] = newVal;
+    // increment the index, to point to where we would
+    // write next
+    _idx++;
+    // it is a circular buffer, if we are at the end
+    // we have to cycle to the beginning 
+    if(_idx >= _buffLen)
+    {
+        // flag that the buffer is filled up.
+        _buffIsFull = true;
+        _idx = 0;
+    }
+    
+    // Update 
+
+    if(_calcAvg)
+    {
+        // for the average we have to update
+        // the sum
+        _sum += (newVal - oldVal);
+    }
+
+    if(_calcVar)
+    {
+        // to calculate variance we have to update
+        // the sum of squares 
+        _sumSqr += (double)(newVal - oldVal) * (double)(newVal + oldVal);
+    }
+}
+
+void 
+CircularBuffer::SetArithMean(
+    bool enable)
+{
+    assert(_buffLen > 0);
+
+    if(enable && !_calcAvg)
+    {
+        WebRtc_UWord32 lim;
+        if(_buffIsFull)
+        {
+            lim = _buffLen;
+        }
+        else
+        {
+            lim = _idx;
+        }
+        _sum = 0;
+        for(WebRtc_UWord32 n = 0; n < lim; n++)
+        {
+            _sum += _buff[n];
+        }
+    }
+    _calcAvg = enable;
+}
+
+void
+CircularBuffer::SetVariance(
+    bool enable)
+{
+    assert(_buffLen > 0);
+
+    if(enable && !_calcVar)
+    {
+        WebRtc_UWord32 lim;
+        if(_buffIsFull)
+        {
+            lim = _buffLen;
+        }
+        else
+        {
+            lim = _idx;
+        }
+        _sumSqr = 0;
+        for(WebRtc_UWord32 n = 0; n < lim; n++)
+        {
+            _sumSqr += _buff[n] * _buff[n];
+        }
+    }
+    _calcAvg = enable;
+}
+
+WebRtc_Word16
+CircularBuffer::ArithMean(double& mean)
+{
+    assert(_buffLen > 0);
+
+    if(_buffIsFull)
+    {
+
+        mean = _sum / (double)_buffLen;
+        return 0;
+    }
+    else
+    {
+        if(_idx > 0)
+        {
+            mean = _sum / (double)_idx;
+            return 0;
+        }
+        else
+        {
+            return -1;
+        }
+
+    }
+}
+
+WebRtc_Word16
+CircularBuffer::Variance(double& var)
+{
+    assert(_buffLen > 0);
+
+    if(_buffIsFull)
+    {
+        var = _sumSqr / (double)_buffLen;
+        return 0;
+    }
+    else
+    {
+        if(_idx > 0)
+        {
+            var = _sumSqr / (double)_idx;
+            return 0;
+        }
+        else
+        {
+            return -1;
+        }
+    }
+}
+
+
+
+bool
+FixedPayloadTypeCodec(const char* payloadName)
+{
+    char fixPayloadTypeCodecs[NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE][32] = {
+        "PCMU",
+        "PCMA",
+        "GSM",
+        "G723",
+        "DVI4",
+        "LPC",
+        "PCMA",
+        "G722",
+        "QCELP",
+        "CN",
+        "MPA",
+        "G728",
+        "G729"
+    };
+
+    for(int n = 0; n < NUM_CODECS_WITH_FIXED_PAYLOAD_TYPE; n++)
+    {
+        if(!STR_CASE_CMP(payloadName, fixPayloadTypeCodecs[n]))
+        {
+            return true;
+        }
+    }
+    return false;
+}
+
+DTMFDetector::DTMFDetector()
+{
+    for(WebRtc_Word16 n = 0; n < 1000; n++)
+    {
+        _toneCntr[n] = 0;
+    }
+}
+
+DTMFDetector::~DTMFDetector()
+{
+}
+
+WebRtc_Word32 DTMFDetector::IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool /* toneEnded */)
+{
+    fprintf(stdout, "%d-",digitDtmf);
+    _toneCntr[digitDtmf]++;
+    return 0;
+}
+
+void DTMFDetector::PrintDetectedDigits()
+{
+    for(WebRtc_Word16 n = 0; n < 1000; n++)
+    {
+        if(_toneCntr[n] > 0)
+        {
+            fprintf(stdout, "%d %u  msec, \n", n, _toneCntr[n]*10);
+        }
+    }
+    fprintf(stdout, "\n");
+    return;
+}
+
+void 
+VADCallback::Reset()
+{
+    for(int n = 0; n < 6; n++)
+    {
+        _numFrameTypes[n] = 0;
+    }
+}
+
+VADCallback::VADCallback()
+{
+    for(int n = 0; n < 6; n++)
+    {
+        _numFrameTypes[n] = 0;
+    }
+}
+
+void
+VADCallback::PrintFrameTypes()
+{
+    fprintf(stdout, "No encoding.................. %d\n", _numFrameTypes[0]);
+    fprintf(stdout, "Active normal encoded........ %d\n", _numFrameTypes[1]);
+    fprintf(stdout, "Passive normal encoded....... %d\n", _numFrameTypes[2]);
+    fprintf(stdout, "Passive DTX wideband......... %d\n", _numFrameTypes[3]);
+    fprintf(stdout, "Passive DTX narrowband....... %d\n", _numFrameTypes[4]);
+    fprintf(stdout, "Passive DTX super-wideband... %d\n", _numFrameTypes[5]);
+}
+
+WebRtc_Word32 
+VADCallback::InFrameType(
+    WebRtc_Word16 frameType)
+{
+    _numFrameTypes[frameType]++;
+    return 0;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_coding/main/test/utility.h b/src/modules/audio_coding/main/test/utility.h
new file mode 100644
index 0000000..887c735
--- /dev/null
+++ b/src/modules/audio_coding/main/test/utility.h
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef ACM_TEST_UTILITY_H
+#define ACM_TEST_UTILITY_H
+
+#include "audio_coding_module.h"
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+//-----------------------------
+#define CHECK_ERROR(f)                                                                      \
+    do {                                                                                    \
+        EXPECT_GE(f, 0) << "Error Calling API";                                             \
+    }while(0)
+
+//-----------------------------
+#define CHECK_PROTECTED(f)                                                                  \
+    do {                                                                                    \
+        if(f >= 0) {                                                                        \
+            ADD_FAILURE() << "Error Calling API";                                           \
+        }                                                                                   \
+        else {                                                                              \
+            printf("An expected error is caught.\n");                                       \
+        }                                                                                   \
+    }while(0)
+
+//----------------------------
+#define CHECK_ERROR_MT(f)                                                                   \
+    do {                                                                                    \
+        if(f < 0) {                                                                         \
+            fprintf(stderr, "Error Calling API in file %s at line %d \n",                   \
+                __FILE__, __LINE__);                                                        \
+        }                                                                                   \
+    }while(0)
+
+//----------------------------
+#define CHECK_PROTECTED_MT(f)                                                               \
+    do {                                                                                    \
+        if(f >= 0) {                                                                        \
+            fprintf(stderr, "Error Calling API in file %s at line %d \n",                   \
+                __FILE__, __LINE__);                                                        \
+        }                                                                                   \
+        else {                                                                              \
+            printf("An expected error is caught.\n");                                       \
+        }                                                                                   \
+    }while(0)
+
+
+
+#ifdef WIN32
+    /* Exclude rarely-used stuff from Windows headers */
+    //#define WIN32_LEAN_AND_MEAN 
+    /* OS-dependent case-insensitive string comparison */
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    /* OS-dependent case-insensitive string comparison */
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+#define DESTROY_ACM(acm)                                                                    \
+    do {                                                                                    \
+        if(acm != NULL) {                                                                   \
+            AudioCodingModule::Destroy(acm);                       \
+            acm = NULL;                                                                     \
+        }                                                                                   \
+    } while(0)
+
+
+#define DELETE_POINTER(p)                                                                   \
+    do {                                                                                    \
+        if(p != NULL) {                                                                     \
+            delete p;                                                                       \
+            p = NULL;                                                                       \
+        }                                                                                   \
+    } while(0)
+
+class ACMTestTimer
+{
+public:
+    ACMTestTimer();
+    ~ACMTestTimer();
+
+    void Reset();
+    void Tick10ms();
+    void Tick1ms();
+    void Tick100ms();
+    void Tick1sec();
+    void CurrentTimeHMS(
+        char* currTime);
+    void CurrentTime(
+        unsigned long&  h, 
+        unsigned char&  m,
+        unsigned char&  s,
+        unsigned short& ms);
+
+private:
+    void Adjust();
+
+    unsigned short _msec;
+    unsigned char  _sec;
+    unsigned char  _min;
+    unsigned long  _hour;  
+};
+
+
+
+class CircularBuffer
+{
+public:
+    CircularBuffer(WebRtc_UWord32 len);
+    ~CircularBuffer();
+
+    void SetArithMean(
+        bool enable);
+    void SetVariance(
+        bool enable);
+
+    void Update(
+        const double newVal);
+    void IsBufferFull();
+    
+    WebRtc_Word16 Variance(double& var);
+    WebRtc_Word16 ArithMean(double& mean);
+
+protected:
+    double* _buff;
+    WebRtc_UWord32 _idx;
+    WebRtc_UWord32 _buffLen;
+
+    bool         _buffIsFull;
+    bool         _calcAvg;
+    bool         _calcVar;
+    double       _sum;
+    double       _sumSqr;
+};
+
+
+
+
+
+WebRtc_Word16 ChooseCodec(
+    CodecInst& codecInst);
+
+void PrintCodecs();
+
+bool FixedPayloadTypeCodec(const char* payloadName);
+
+
+
+
+class DTMFDetector: public AudioCodingFeedback
+{
+public:
+    DTMFDetector();
+    ~DTMFDetector();
+    // used for inband DTMF detection
+    WebRtc_Word32 IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool toneEnded);
+    void PrintDetectedDigits();
+
+private:
+    WebRtc_UWord32 _toneCntr[1000];
+
+};
+
+
+
+
+class VADCallback : public ACMVADCallback
+{
+public:
+    VADCallback();
+    ~VADCallback(){}
+
+    WebRtc_Word32 InFrameType(
+        WebRtc_Word16 frameType);
+
+    void PrintFrameTypes();
+    void Reset();
+
+private:
+    WebRtc_UWord32 _numFrameTypes[6];
+};
+
+} // namespace webrtc
+
+#endif // ACM_TEST_UTILITY_H
diff --git a/src/modules/audio_coding/neteq/OWNERS b/src/modules/audio_coding/neteq/OWNERS
new file mode 100644
index 0000000..1d25542
--- /dev/null
+++ b/src/modules/audio_coding/neteq/OWNERS
@@ -0,0 +1,2 @@
+henrik.lundin@webrtc.org
+tina.legrand@webrtc.org
diff --git a/src/modules/audio_coding/neteq/accelerate.c b/src/modules/audio_coding/neteq/accelerate.c
new file mode 100644
index 0000000..285de4d
--- /dev/null
+++ b/src/modules/audio_coding/neteq/accelerate.c
@@ -0,0 +1,489 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the Accelerate algorithm that is used to reduce
+ * the delay by removing a part of the audio stream.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define ACCELERATE_CORR_LEN 50
+#define ACCELERATE_MIN_LAG 10
+#define ACCELERATE_MAX_LAG 60
+#define ACCELERATE_DOWNSAMPLED_LEN (ACCELERATE_CORR_LEN + ACCELERATE_MAX_LAG)
+
+/* Scratch usage:
+
+ Type	        Name                size    startpos    endpos
+ WebRtc_Word16  pw16_downSampSpeech 110     0           109
+ WebRtc_Word32  pw32_corr           2*50    110         209
+ WebRtc_Word16  pw16_corr           50      0           49
+
+ Total: 110+2*50
+ */
+
+#define	 SCRATCH_PW16_DS_SPEECH			0
+#define	 SCRATCH_PW32_CORR				ACCELERATE_DOWNSAMPLED_LEN
+#define	 SCRATCH_PW16_CORR				0
+
+/****************************************************************************
+ * WebRtcNetEQ_Accelerate(...)
+ *
+ * This function tries to shorten the audio data by removing one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - BGNonly       : If non-zero, Accelerate will only remove the last 
+ *                        DEFAULT_TIME_ADJUST seconds of the input.
+ *                        No signal matching is done.
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
+#ifdef SCRATCH
+                           WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                           const WebRtc_Word16 *pw16_decoded, int len,
+                           WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                           WebRtc_Word16 BGNonly)
+{
+
+#ifdef SCRATCH
+    /* Use scratch memory for internal temporary vectors */
+    WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+    WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+#else
+    /* Allocate memory for temporary vectors */
+    WebRtc_Word16 pw16_downSampSpeech[ACCELERATE_DOWNSAMPLED_LEN];
+    WebRtc_Word32 pw32_corr[ACCELERATE_CORR_LEN];
+    WebRtc_Word16 pw16_corr[ACCELERATE_CORR_LEN];
+#endif
+    WebRtc_Word16 w16_decodedMax = 0;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word32 w32_tmp2;
+
+    const WebRtc_Word16 w16_startLag = ACCELERATE_MIN_LAG;
+    const WebRtc_Word16 w16_endLag = ACCELERATE_MAX_LAG;
+    const WebRtc_Word16 w16_corrLen = ACCELERATE_CORR_LEN;
+    const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+    WebRtc_Word16 *pw16_vectmp;
+    WebRtc_Word16 w16_inc, w16_startfact;
+    WebRtc_Word16 w16_bestIndex, w16_bestVal;
+    WebRtc_Word16 w16_VAD = 1;
+    WebRtc_Word16 fsMult;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+    WebRtc_Word16 w16_en1, w16_en2;
+    WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+    WebRtc_Word16 w16_sqrtEn1En2;
+    WebRtc_Word16 w16_bestCorr = 0;
+    int ok;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
+
+    /* Pre-calculate common multiplication with fsMult */
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /* Sanity check for len variable; must be (almost) 30 ms 
+     (120*fsMult + max(bestIndex)) */
+    if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult))
+    {
+        /* Length of decoded data too short */
+        inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+        *pw16_len = len;
+
+        /* simply move all data from decoded to outData */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return NETEQ_OTHER_ERROR;
+    }
+
+    /***********************************/
+    /* Special operations for BGN only */
+    /***********************************/
+
+    /* Check if "background noise only" flag is set */
+    if (BGNonly)
+    {
+        /* special operation for BGN only; simply remove a chunk of data */
+        w16_bestIndex = DEFAULT_TIME_ADJUST * WEBRTC_SPL_LSHIFT_W16(fsMult, 3); /* X*fs/1000 */
+
+        /* Sanity check for bestIndex */
+        if (w16_bestIndex > len)
+        { /* not good, do nothing instead */
+            inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+            *pw16_len = len;
+
+            /* simply move all data from decoded to outData */
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* set length parameter */
+        *pw16_len = len - w16_bestIndex; /* we remove bestIndex samples */
+
+        /* copy to output */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, *pw16_len);
+
+        /* set mode */
+        inst->w16_mode = MODE_LOWEN_ACCELERATE;
+
+        /* update statistics */
+        inst->statInst.accelerateLength += w16_bestIndex;
+
+        return 0;
+    } /* end of special code for BGN mode */
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    if (msInfo->msMode != NETEQ_SLAVE)
+    {
+        /* Find correlation lag only for non-slave instances */
+
+#endif
+
+        /****************************************************************/
+        /* Find the strongest correlation lag by downsampling to 4 kHz, */
+        /* calculating correlation for downsampled signal and finding   */
+        /* the strongest correlation peak.                              */
+        /****************************************************************/
+
+        /* find maximum absolute value */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* downsample the decoded speech to 4 kHz */
+        ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
+            ACCELERATE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
+        if (ok != 0)
+        {
+            /* error */
+            inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+            *pw16_len = len;
+            /* simply move all data from decoded to outData */
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /*
+         * Set scaling factor for cross correlation to protect against overflow
+         * (log2(50) => 6)
+         */
+        w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */
+        WebRtcNetEQ_CrossCorr(
+            pw32_corr, &pw16_downSampSpeech[w16_endLag],
+            &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
+            (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find the strongest correlation peak by using the parabolic fit method */
+        WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+            &w16_bestIndex, &w16_bestVal);
+        /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+        /* Compensate bestIndex for displaced starting position */
+        w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+        /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        if (msInfo->extraInfo == ACC_FAIL)
+        {
+            /* Master has signaled an unsuccessful accelerate */
+            w16_bestIndex = 0;
+        }
+        else
+        {
+            /* Get best index from master */
+            w16_bestIndex = msInfo->bestIndex;
+        }
+    }
+    else
+    {
+        /* Invalid mode */
+        return MASTER_SLAVE_ERROR;
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find the strongest correlation peak by using the parabolic fit method */
+    WebRtcNetEQ_PeakDetection(pw16_corr, (WebRtc_Word16) w16_corrLen, 1, fsMult,
+        &w16_bestIndex, &w16_bestVal);
+    /* 0 <= bestIndex <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+    /* Compensate bestIndex for displaced starting position */
+    w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+    /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+#endif /* NETEQ_STEREO */
+
+#ifdef NETEQ_STEREO
+
+    if (msInfo->msMode != NETEQ_SLAVE)
+    {
+        /* Calculate correlation only for non-slave instances */
+
+#endif /* NETEQ_STEREO */
+
+        /*****************************************************/
+        /* Calculate correlation bestCorr for the found lag. */
+        /* Also do a simple VAD decision.                    */
+        /*****************************************************/
+
+        /*
+         * Calculate scaling to ensure that bestIndex samples can be square-summed
+         * without overflowing
+         */
+        w16_tmp = (31
+            - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
+        w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
+        w16_tmp -= 31;
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Calculate energies for vec1 and vec2 */
+        w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
+        w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+
+        /* Calculate cross-correlation at the found lag */
+        w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+            w16_bestIndex, w16_tmp);
+
+        /* Check VAD constraint 
+         ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
+        if (inst->BGNInst.w16_initialized == 1)
+        {
+            w32_tmp2 = inst->BGNInst.w32_energy;
+        }
+        else
+        {
+            /* if BGN parameters have not been estimated, use a fixed threshold */
+            w32_tmp2 = 75000;
+        }
+        w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
+        w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
+        w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+        w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
+
+        /* Scale w32_tmp properly before comparing with w32_tmp2 */
+        /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
+        if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
+        {
+            /* Cannot scale only w32_tmp, must scale w32_temp2 too */
+            WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
+            w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
+        }
+        else
+        {
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
+        }
+
+        if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        {
+            /* The signal seems to be passive speech */
+            w16_VAD = 0;
+            w16_bestCorr = 0; /* Correlation does not matter */
+        }
+        else
+        {
+            /* The signal is active speech */
+            w16_VAD = 1;
+
+            /* Calculate correlation (cc/sqrt(en1*en2)) */
+
+            /* Start with calculating scale values */
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                w16_en1Scale += 1;
+            }
+
+            /* Convert energies to WebRtc_Word16 */
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+
+            /* Calculate energy product */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+
+            /* Calculate square-root of energy product */
+            w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_tmp = 14 - WEBRTC_SPL_RSHIFT_W16(w16_en1Scale+w16_en2Scale, 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
+            w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
+            w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+            w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
+        }
+
+#ifdef NETEQ_STEREO
+
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+#endif /* NETEQ_STEREO */
+
+    /************************************************/
+    /* Check accelerate criteria and remove samples */
+    /************************************************/
+
+    /* Check for strong correlation (>0.9) or passive speech */
+#ifdef NETEQ_STEREO
+    if ((((w16_bestCorr > 14746) || (w16_VAD == 0)) && (msInfo->msMode != NETEQ_SLAVE))
+        || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->extraInfo != ACC_FAIL)))
+#else
+    if ((w16_bestCorr > 14746) || (w16_VAD == 0))
+#endif
+    {
+        /* Do accelerate operation by overlap add */
+
+        /*
+         * Calculate cross-fading slope so that the fading factor goes from
+         * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
+         */
+        w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
+            (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+
+        /* Initiate fading factor */
+        w16_startfact = 16384 - w16_inc;
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Copy unmodified part [0 to 15 ms minus 1 pitch period] */
+        w16_tmp = (fsMult120 - w16_bestIndex);
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_tmp);
+
+        /* Generate interpolated part of length bestIndex (1 pitch period) */
+        pw16_vectmp = pw16_outData + w16_tmp; /* start of interpolation output */
+        /* Reuse mixing function from Expand */
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec2, &w16_startfact, w16_inc, w16_bestIndex);
+
+        /* Move the last part (also unmodified) */
+        /* Take from decoded at 15 ms + 1 pitch period */
+        pw16_vec2 = &pw16_decoded[fsMult120 + w16_bestIndex];
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[fsMult120], pw16_vec2,
+            (WebRtc_Word16) (len - fsMult120 - w16_bestIndex));
+
+        /* Set the mode flag */
+        if (w16_VAD)
+        {
+            inst->w16_mode = MODE_SUCCESS_ACCELERATE;
+        }
+        else
+        {
+            inst->w16_mode = MODE_LOWEN_ACCELERATE;
+        }
+
+        /* Calculate resulting length = original length - pitch period */
+        *pw16_len = len - w16_bestIndex;
+
+        /* Update in-call statistics */
+        inst->statInst.accelerateLength += w16_bestIndex;
+
+        return 0;
+    }
+    else
+    {
+        /* Accelerate not allowed */
+
+#ifdef NETEQ_STEREO
+        /* Signal to slave(s) that this was unsuccessful */
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            msInfo->extraInfo = ACC_FAIL;
+        }
+#endif
+
+        /* Set mode flag to unsuccessful accelerate */
+        inst->w16_mode = MODE_UNSUCCESS_ACCELERATE;
+
+        /* Length is unmodified */
+        *pw16_len = len;
+
+        /* Simply move all data from decoded to outData */
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return 0;
+    }
+}
+
+#undef SCRATCH_PW16_DS_SPEECH
+#undef SCRATCH_PW32_CORR
+#undef SCRATCH_PW16_CORR
diff --git a/src/modules/audio_coding/neteq/automode.c b/src/modules/audio_coding/neteq/automode.c
new file mode 100644
index 0000000..d8d56c6
--- /dev/null
+++ b/src/modules/audio_coding/neteq/automode.c
@@ -0,0 +1,740 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the implementation of automatic buffer level optimization.
+ */
+
+#include "automode.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "neteq_defines.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+/* special code for offline delay logging */
+#include <stdio.h>
+#include "delay_logging.h"
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+#endif /* NETEQ_DELAY_LOGGING */
+
+
+int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
+                                    WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word32 fsHz, int mdCodec, int streamingMode)
+{
+    WebRtc_UWord32 timeIat; /* inter-arrival time */
+    int i;
+    WebRtc_Word32 tempsum = 0; /* temp summation */
+    WebRtc_Word32 tempvar; /* temporary variable */
+    int retval = 0; /* return value */
+    WebRtc_Word16 packetLenSamp; /* packet speech length in samples */
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (maxBufLen <= 1 || fsHz <= 0)
+    {
+        /* maxBufLen must be at least 2 and fsHz must both be strictly positive */
+        return -1;
+    }
+
+    /****************************/
+    /* Update packet statistics */
+    /****************************/
+
+    /* Try calculating packet length from current and previous timestamps */
+    if ((timeStamp <= inst->lastTimeStamp) || (seqNumber <= inst->lastSeqNo))
+    {
+        /* Wrong timestamp or sequence order; revert to backup plan */
+        packetLenSamp = inst->packetSpeechLenSamp; /* use stored value */
+    }
+    else
+    {
+        /* calculate timestamps per packet */
+        packetLenSamp = (WebRtc_Word16) WebRtcSpl_DivU32U16(timeStamp - inst->lastTimeStamp,
+            seqNumber - inst->lastSeqNo);
+    }
+
+    /* Check that the packet size is positive; if not, the statistics cannot be updated. */
+    if (packetLenSamp > 0)
+    { /* packet size ok */
+
+        /* calculate inter-arrival time in integer packets (rounding down) */
+        timeIat = WebRtcSpl_DivW32W16(inst->packetIatCountSamp, packetLenSamp);
+
+        /* Special operations for streaming mode */
+        if (streamingMode != 0)
+        {
+            /*
+             * Calculate IAT in Q8, including fractions of a packet (i.e., more accurate
+             * than timeIat).
+             */
+            WebRtc_Word16 timeIatQ8 = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_LSHIFT_W32(inst->packetIatCountSamp, 8), packetLenSamp);
+
+            /*
+             * Calculate cumulative sum iat with sequence number compensation (ideal arrival
+             * times makes this sum zero).
+             */
+            inst->cSumIatQ8 += (timeIatQ8
+                - WEBRTC_SPL_LSHIFT_W32(seqNumber - inst->lastSeqNo, 8));
+
+            /* subtract drift term */
+            inst->cSumIatQ8 -= CSUM_IAT_DRIFT;
+
+            /* ensure not negative */
+            inst->cSumIatQ8 = WEBRTC_SPL_MAX(inst->cSumIatQ8, 0);
+
+            /* remember max */
+            if (inst->cSumIatQ8 > inst->maxCSumIatQ8)
+            {
+                inst->maxCSumIatQ8 = inst->cSumIatQ8;
+                inst->maxCSumUpdateTimer = 0;
+            }
+
+            /* too long since the last maximum was observed; decrease max value */
+            if (inst->maxCSumUpdateTimer > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+                MAX_STREAMING_PEAK_PERIOD))
+            {
+                inst->maxCSumIatQ8 -= 4; /* remove 1000*4/256 = 15.6 ms/s */
+            }
+        } /* end of streaming mode */
+
+        /* check for discontinuous packet sequence and re-ordering */
+        if (seqNumber > inst->lastSeqNo + 1)
+        {
+            /* Compensate for gap in the sequence numbers.
+             * Reduce IAT with expected extra time due to lost packets, but ensure that
+             * the IAT is not negative.
+             */
+            timeIat -= WEBRTC_SPL_MIN(timeIat,
+                (WebRtc_UWord32) (seqNumber - inst->lastSeqNo - 1));
+        }
+        else if (seqNumber < inst->lastSeqNo)
+        {
+            /* compensate for re-ordering */
+            timeIat += (WebRtc_UWord32) (inst->lastSeqNo + 1 - seqNumber);
+        }
+
+        /* saturate IAT at maximum value */
+        timeIat = WEBRTC_SPL_MIN( timeIat, MAX_IAT );
+
+        /* update iatProb = forgetting_factor * iatProb for all elements */
+        for (i = 0; i <= MAX_IAT; i++)
+        {
+            WebRtc_Word32 tempHi, tempLo; /* Temporary variables */
+
+            /*
+             * Multiply iatProbFact (Q15) with iatProb (Q30) and right-shift 15 steps
+             * to come back to Q30. The operation is done in two steps:
+             */
+
+            /*
+             * 1) Multiply the high 16 bits (15 bits + sign) of iatProb. Shift iatProb
+             * 16 steps right to get the high 16 bits in a WebRtc_Word16 prior to
+             * multiplication, and left-shift with 1 afterwards to come back to
+             * Q30 = (Q15 * (Q30>>16)) << 1.
+             */
+            tempHi = WEBRTC_SPL_MUL_16_16(inst->iatProbFact,
+                (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(inst->iatProb[i], 16));
+            tempHi = WEBRTC_SPL_LSHIFT_W32(tempHi, 1); /* left-shift 1 step */
+
+            /*
+             * 2) Isolate and multiply the low 16 bits of iatProb. Right-shift 15 steps
+             * afterwards to come back to Q30 = (Q15 * Q30) >> 15.
+             */
+            tempLo = inst->iatProb[i] & 0x0000FFFF; /* sift out the 16 low bits */
+            tempLo = WEBRTC_SPL_MUL_16_U16(inst->iatProbFact,
+                (WebRtc_UWord16) tempLo);
+            tempLo = WEBRTC_SPL_RSHIFT_W32(tempLo, 15);
+
+            /* Finally, add the high and low parts */
+            inst->iatProb[i] = tempHi + tempLo;
+
+            /* Sum all vector elements while we are at it... */
+            tempsum += inst->iatProb[i];
+        }
+
+        /*
+         * Increase the probability for the currently observed inter-arrival time
+         * with 1 - iatProbFact. The factor is in Q15, iatProb in Q30;
+         * hence, left-shift 15 steps to obtain result in Q30.
+         */
+        inst->iatProb[timeIat] += (32768 - inst->iatProbFact) << 15;
+
+        tempsum += (32768 - inst->iatProbFact) << 15; /* add to vector sum */
+
+        /*
+         * Update iatProbFact (changes only during the first seconds after reset)
+         * The factor converges to IAT_PROB_FACT.
+         */
+        inst->iatProbFact += (IAT_PROB_FACT - inst->iatProbFact + 3) >> 2;
+
+        /* iatProb should sum up to 1 (in Q30). */
+        tempsum -= 1 << 30; /* should be zero */
+
+        /* Check if it does, correct if it doesn't. */
+        if (tempsum > 0)
+        {
+            /* tempsum too large => decrease a few values in the beginning */
+            i = 0;
+            while (i <= MAX_IAT && tempsum > 0)
+            {
+                /* Remove iatProb[i] / 16 from iatProb, but not more than tempsum */
+                tempvar = WEBRTC_SPL_MIN(tempsum, inst->iatProb[i] >> 4);
+                inst->iatProb[i++] -= tempvar;
+                tempsum -= tempvar;
+            }
+        }
+        else if (tempsum < 0)
+        {
+            /* tempsum too small => increase a few values in the beginning */
+            i = 0;
+            while (i <= MAX_IAT && tempsum < 0)
+            {
+                /* Add iatProb[i] / 16 to iatProb, but not more than tempsum */
+                tempvar = WEBRTC_SPL_MIN(-tempsum, inst->iatProb[i] >> 4);
+                inst->iatProb[i++] += tempvar;
+                tempsum += tempvar;
+            }
+        }
+
+        /* Calculate optimal buffer level based on updated statistics */
+        tempvar = (WebRtc_Word32) WebRtcNetEQ_CalcOptimalBufLvl(inst, fsHz, mdCodec, timeIat,
+            streamingMode);
+        if (tempvar > 0)
+        {
+            inst->optBufLevel = (WebRtc_UWord16) tempvar;
+
+            if (streamingMode != 0)
+            {
+                inst->optBufLevel = WEBRTC_SPL_MAX(inst->optBufLevel,
+                    inst->maxCSumIatQ8);
+            }
+
+            /*********/
+            /* Limit */
+            /*********/
+
+            /* Subtract extra delay from maxBufLen */
+            if (inst->extraDelayMs > 0 && inst->packetSpeechLenSamp > 0)
+            {
+                maxBufLen -= inst->extraDelayMs / inst->packetSpeechLenSamp * fsHz / 1000;
+                maxBufLen = WEBRTC_SPL_MAX(maxBufLen, 1); // sanity: at least one packet
+            }
+
+            maxBufLen = WEBRTC_SPL_LSHIFT_W32(maxBufLen, 8); /* shift to Q8 */
+
+            /* Enforce upper limit; 75% of maxBufLen */
+            inst->optBufLevel = (WebRtc_UWord16) WEBRTC_SPL_MIN( inst->optBufLevel,
+                (maxBufLen >> 1) + (maxBufLen >> 2) ); /* 1/2 + 1/4 = 75% */
+        }
+        else
+        {
+            retval = (int) tempvar;
+        }
+
+    } /* end if */
+
+    /*******************************/
+    /* Update post-call statistics */
+    /*******************************/
+
+    /* Calculate inter-arrival time in ms = packetIatCountSamp / (fsHz / 1000) */
+    timeIat = WEBRTC_SPL_UDIV(
+        WEBRTC_SPL_UMUL_32_16(inst->packetIatCountSamp, (WebRtc_Word16) 1000),
+        (WebRtc_UWord32) fsHz);
+
+    /* Increase counter corresponding to current inter-arrival time */
+    if (timeIat > 2000)
+    {
+        inst->countIAT2000ms++;
+    }
+    else if (timeIat > 1000)
+    {
+        inst->countIAT1000ms++;
+    }
+    else if (timeIat > 500)
+    {
+        inst->countIAT500ms++;
+    }
+
+    if (timeIat > inst->longestIATms)
+    {
+        /* update maximum value */
+        inst->longestIATms = timeIat;
+    }
+
+    /***********************************/
+    /* Prepare for next packet arrival */
+    /***********************************/
+
+    inst->packetIatCountSamp = 0; /* reset inter-arrival time counter */
+
+    inst->lastSeqNo = seqNumber; /* remember current sequence number */
+
+    inst->lastTimeStamp = timeStamp; /* remember current timestamp */
+
+    return retval;
+}
+
+
+WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
+                                            int mdCodec, WebRtc_UWord32 timeIatPkts,
+                                            int streamingMode)
+{
+
+    WebRtc_Word32 sum1 = 1 << 30; /* assign to 1 in Q30 */
+    WebRtc_Word16 B;
+    WebRtc_UWord16 Bopt;
+    int i;
+    WebRtc_Word32 betaInv; /* optimization parameter */
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    int temp_var;
+#endif
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (fsHz <= 0)
+    {
+        /* fsHz must be strictly positive */
+        return -1;
+    }
+
+    /***********************************************/
+    /* Get betaInv parameter based on playout mode */
+    /***********************************************/
+
+    if (streamingMode)
+    {
+        /* streaming (listen-only) mode */
+        betaInv = AUTOMODE_STREAMING_BETA_INV_Q30;
+    }
+    else
+    {
+        /* normal mode */
+        betaInv = AUTOMODE_BETA_INV_Q30;
+    }
+
+    /*******************************************************************/
+    /* Calculate optimal buffer level without considering jitter peaks */
+    /*******************************************************************/
+
+    /*
+     * Find the B for which the probability of observing an inter-arrival time larger
+     * than or equal to B is less than or equal to betaInv.
+     */
+    B = 0; /* start from the beginning of iatProb */
+    sum1 -= inst->iatProb[B]; /* ensure that optimal level is not less than 1 */
+
+    do
+    {
+        /*
+         * Subtract the probabilities one by one until the sum is no longer greater
+         * than betaInv.
+         */
+        sum1 -= inst->iatProb[++B];
+    }
+    while ((sum1 > betaInv) && (B < MAX_IAT));
+
+    Bopt = B; /* This is our primary value for the optimal buffer level Bopt */
+
+    if (mdCodec)
+    {
+        /*
+         * Use alternative cost function when multiple description codec is in use.
+         * Do not have to re-calculate all points, just back off a few steps from
+         * previous value of B.
+         */
+        WebRtc_Word32 sum2 = sum1; /* copy sum1 */
+
+        while ((sum2 <= betaInv + inst->iatProb[Bopt]) && (Bopt > 0))
+        {
+            /* Go backwards in the sum until the modified cost function solution is found */
+            sum2 += inst->iatProb[Bopt--];
+        }
+
+        Bopt++; /* This is the optimal level when using an MD codec */
+
+        /* Now, Bopt and B can have different values. */
+    }
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF;
+    if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
+      return -1;
+    }
+    temp_var = (int) (Bopt * inst->packetSpeechLenSamp);
+#endif
+
+    /******************************************************************/
+    /* Make levelFiltFact adaptive: Larger B <=> larger levelFiltFact */
+    /******************************************************************/
+
+    switch (B)
+    {
+        case 0:
+        case 1:
+        {
+            inst->levelFiltFact = 251;
+            break;
+        }
+        case 2:
+        case 3:
+        {
+            inst->levelFiltFact = 252;
+            break;
+        }
+        case 4:
+        case 5:
+        case 6:
+        case 7:
+        {
+            inst->levelFiltFact = 253;
+            break;
+        }
+        default: /* B > 7 */
+        {
+            inst->levelFiltFact = 254;
+            break;
+        }
+    }
+
+    /************************/
+    /* Peak mode operations */
+    /************************/
+
+    /* Compare current IAT with peak threshold
+     *
+     * If IAT > optimal level + threshold (+1 for MD codecs)
+     * or if IAT > 2 * optimal level (note: optimal level is in Q8):
+     */
+    if (timeIatPkts > (WebRtc_UWord32) (Bopt + inst->peakThresholdPkt + (mdCodec != 0))
+        || timeIatPkts > (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_U16(Bopt, 1))
+    {
+        /* A peak is observed */
+
+        if (inst->peakIndex == -1)
+        {
+            /* this is the first peak; prepare for next peak */
+            inst->peakIndex = 0;
+            /* set the mode-disable counter */
+            inst->peakModeDisabled = WEBRTC_SPL_LSHIFT_W16(1, NUM_PEAKS_REQUIRED-2);
+        }
+        else if (inst->peakIatCountSamp
+            <=
+            (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz, MAX_PEAK_PERIOD))
+        {
+            /* This is not the first peak and the period time is valid */
+
+            /* store time elapsed since last peak */
+            inst->peakPeriodSamp[inst->peakIndex] = inst->peakIatCountSamp;
+
+            /* saturate height to 16 bits */
+            inst->peakHeightPkt[inst->peakIndex]
+                =
+                (WebRtc_Word16) WEBRTC_SPL_MIN(timeIatPkts, WEBRTC_SPL_WORD16_MAX);
+
+            /* increment peakIndex and wrap/modulo */
+            inst->peakIndex = (inst->peakIndex + 1) & PEAK_INDEX_MASK;
+
+            /* process peak vectors */
+            inst->curPeakHeight = 0;
+            inst->curPeakPeriod = 0;
+
+            for (i = 0; i < NUM_PEAKS; i++)
+            {
+                /* Find maximum of peak heights and peak periods */
+                inst->curPeakHeight
+                    = WEBRTC_SPL_MAX(inst->curPeakHeight, inst->peakHeightPkt[i]);
+                inst->curPeakPeriod
+                    = WEBRTC_SPL_MAX(inst->curPeakPeriod, inst->peakPeriodSamp[i]);
+
+            }
+
+            inst->peakModeDisabled >>= 1; /* decrease mode-disable "counter" */
+
+        }
+        else if (inst->peakIatCountSamp > (WebRtc_UWord32) WEBRTC_SPL_MUL_32_16(fsHz,
+            WEBRTC_SPL_LSHIFT_W16(MAX_PEAK_PERIOD, 1)))
+        {
+            /*
+             * More than 2 * MAX_PEAK_PERIOD has elapsed since last peak;
+             * too long time => reset peak statistics
+             */
+            inst->curPeakHeight = 0;
+            inst->curPeakPeriod = 0;
+            for (i = 0; i < NUM_PEAKS; i++)
+            {
+                inst->peakHeightPkt[i] = 0;
+                inst->peakPeriodSamp[i] = 0;
+            }
+
+            inst->peakIndex = -1; /* Next peak is first peak */
+            inst->peakIatCountSamp = 0;
+        }
+
+        inst->peakIatCountSamp = 0; /* Reset peak interval timer */
+    } /* end if peak is observed */
+
+    /* Evaluate peak mode conditions */
+
+    /*
+     * If not disabled (enough peaks have been observed) and
+     * time since last peak is less than two peak periods.
+     */
+    inst->peakFound = 0;
+    if ((!inst->peakModeDisabled) && (inst->peakIatCountSamp
+        <= WEBRTC_SPL_LSHIFT_W32(inst->curPeakPeriod , 1)))
+    {
+        /* Engage peak mode */
+        inst->peakFound = 1;
+        /* Set optimal buffer level to curPeakHeight (if it's not already larger) */
+        Bopt = WEBRTC_SPL_MAX(Bopt, inst->curPeakHeight);
+
+#ifdef NETEQ_DELAY_LOGGING
+        /* special code for offline delay logging */
+        temp_var = (int) -(Bopt * inst->packetSpeechLenSamp);
+#endif
+    }
+
+    /* Scale Bopt to Q8 */
+    Bopt = WEBRTC_SPL_LSHIFT_U16(Bopt,8);
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    if (fwrite( &temp_var, sizeof(int), 1, delay_fid2 ) != 1) {
+      return -1;
+    }
+#endif
+
+    /* Sanity check: Bopt must be strictly positive */
+    if (Bopt <= 0)
+    {
+        Bopt = WEBRTC_SPL_LSHIFT_W16(1, 8); /* 1 in Q8 */
+    }
+
+    return Bopt; /* return value in Q8 */
+}
+
+
+int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
+                                  int sampPerCall, WebRtc_Word16 fsMult)
+{
+
+    WebRtc_Word16 curSizeFrames;
+
+    /****************/
+    /* Sanity check */
+    /****************/
+
+    if (sampPerCall <= 0 || fsMult <= 0)
+    {
+        /* sampPerCall and fsMult must both be strictly positive */
+        return -1;
+    }
+
+    /* Check if packet size has been detected */
+    if (inst->packetSpeechLenSamp > 0)
+    {
+        /*
+         * Current buffer level in packet lengths
+         * = (curSizeMs8 * fsMult) / packetSpeechLenSamp
+         */
+        curSizeFrames = (WebRtc_Word16) WebRtcSpl_DivW32W16(
+            WEBRTC_SPL_MUL_32_16(curSizeMs8, fsMult), inst->packetSpeechLenSamp);
+    }
+    else
+    {
+        curSizeFrames = 0;
+    }
+
+    /* Filter buffer level */
+    if (inst->levelFiltFact > 0) /* check that filter factor is set */
+    {
+        /* Filter:
+         * buffLevelFilt = levelFiltFact * buffLevelFilt
+         *                  + (1-levelFiltFact) * curSizeFrames
+         *
+         * levelFiltFact is in Q8
+         */
+        inst->buffLevelFilt = (WebRtc_UWord16) (WEBRTC_SPL_RSHIFT_W32(
+            WEBRTC_SPL_MUL_16_U16(inst->levelFiltFact, inst->buffLevelFilt), 8)
+            + WEBRTC_SPL_MUL_16_16(256 - inst->levelFiltFact, curSizeFrames));
+    }
+
+    /* Account for time-scale operations (accelerate and pre-emptive expand) */
+    if (inst->prevTimeScale)
+    {
+        /*
+         * Time-scaling has been performed since last filter update.
+         * Subtract the sampleMemory from buffLevelFilt after converting sampleMemory
+         * from samples to packets in Q8. Make sure that the filtered value is
+         * non-negative.
+         */
+        inst->buffLevelFilt = (WebRtc_UWord16) WEBRTC_SPL_MAX( inst->buffLevelFilt -
+            WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_LSHIFT_W32(inst->sampleMemory, 8), /* sampleMemory in Q8 */
+                inst->packetSpeechLenSamp ), /* divide by packetSpeechLenSamp */
+            0);
+
+        /*
+         * Reset flag and set timescaleHoldOff timer to prevent further time-scaling
+         * for some time.
+         */
+        inst->prevTimeScale = 0;
+        inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT;
+    }
+
+    /* Update time counters and HoldOff timer */
+    inst->packetIatCountSamp += sampPerCall; /* packet inter-arrival time */
+    inst->peakIatCountSamp += sampPerCall; /* peak inter-arrival time */
+    inst->timescaleHoldOff >>= 1; /* time-scaling limiter */
+    inst->maxCSumUpdateTimer += sampPerCall; /* cumulative-sum timer */
+
+    return 0;
+
+}
+
+
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
+                                   WebRtc_Word32 fsHz)
+{
+
+    /* Sanity check for newLenSamp and fsHz */
+    if (newLenSamp <= 0 || fsHz <= 0)
+    {
+        return -1;
+    }
+
+    inst->packetSpeechLenSamp = newLenSamp; /* Store packet size in instance */
+
+    /* Make NetEQ wait for first regular packet before starting the timer */
+    inst->lastPackCNGorDTMF = 1;
+
+    inst->packetIatCountSamp = 0; /* Reset packet time counter */
+
+    /*
+     * Calculate peak threshold from packet size. The threshold is defined as
+     * the (fractional) number of packets that corresponds to PEAK_HEIGHT
+     * (in Q8 seconds). That is, threshold = PEAK_HEIGHT/256 * fsHz / packLen.
+     */
+    inst->peakThresholdPkt = (WebRtc_UWord16) WebRtcSpl_DivW32W16ResW16(
+        WEBRTC_SPL_MUL_16_16_RSFT(PEAK_HEIGHT,
+            (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(fsHz, 6), 2), inst->packetSpeechLenSamp);
+
+    return 0;
+}
+
+
+int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets)
+{
+
+    int i;
+    WebRtc_UWord16 tempprob = 0x4002; /* 16384 + 2 = 100000000000010 binary; */
+
+    /* Sanity check for maxBufLenPackets */
+    if (maxBufLenPackets <= 1)
+    {
+        /* Invalid value; set to 10 instead (arbitary small number) */
+        maxBufLenPackets = 10;
+    }
+
+    /* Reset filtered buffer level */
+    inst->buffLevelFilt = 0;
+
+    /* Reset packet size to unknown */
+    inst->packetSpeechLenSamp = 0;
+
+    /*
+     * Flag that last packet was special payload, so that automode will treat the next speech
+     * payload as the first payload received.
+     */
+    inst->lastPackCNGorDTMF = 1;
+
+    /* Reset peak detection parameters */
+    inst->peakModeDisabled = 1; /* disable peak mode */
+    inst->peakIatCountSamp = 0;
+    inst->peakIndex = -1; /* indicates that no peak is registered */
+    inst->curPeakHeight = 0;
+    inst->curPeakPeriod = 0;
+    for (i = 0; i < NUM_PEAKS; i++)
+    {
+        inst->peakHeightPkt[i] = 0;
+        inst->peakPeriodSamp[i] = 0;
+    }
+
+    /*
+     * Set the iatProb PDF vector to an exponentially decaying distribution
+     * iatProb[i] = 0.5^(i+1), i = 0, 1, 2, ...
+     * iatProb is in Q30.
+     */
+    for (i = 0; i <= MAX_IAT; i++)
+    {
+        /* iatProb[i] = 0.5^(i+1) = iatProb[i-1] / 2 */
+        tempprob = WEBRTC_SPL_RSHIFT_U16(tempprob, 1);
+        /* store in PDF vector */
+        inst->iatProb[i] = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) tempprob, 16);
+    }
+
+    /*
+     * Calculate the optimal buffer level corresponding to the initial PDF.
+     * No need to call WebRtcNetEQ_CalcOptimalBufLvl() since we have just hard-coded
+     * all the variables that the buffer level depends on => we know the result
+     */
+    inst->optBufLevel = WEBRTC_SPL_MIN(4,
+        (maxBufLenPackets >> 1) + (maxBufLenPackets >> 1)); /* 75% of maxBufLenPackets */
+    inst->levelFiltFact = 253;
+
+    /*
+     * Reset the iat update forgetting factor to 0 to make the impact of the first
+     * incoming packets greater.
+     */
+    inst->iatProbFact = 0;
+
+    /* Reset packet inter-arrival time counter */
+    inst->packetIatCountSamp = 0;
+
+    /* Clear time-scaling related variables */
+    inst->prevTimeScale = 0;
+    inst->timescaleHoldOff = AUTOMODE_TIMESCALE_LIMIT; /* don't allow time-scaling immediately */
+
+    inst->cSumIatQ8 = 0;
+    inst->maxCSumIatQ8 = 0;
+
+    return 0;
+}
+
+int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst) {
+  int i;
+  int32_t sum_q24 = 0;
+  assert(inst);
+  for (i = 0; i <= MAX_IAT; ++i) {
+    /* Shift 6 to fit worst case: 2^30 * 64. */
+    sum_q24 += (inst->iatProb[i] >> 6) * i;
+  }
+  /* Subtract the nominal inter-arrival time 1 = 2^24 in Q24. */
+  sum_q24 -= (1 << 24);
+  /*
+   * Multiply with 1000000 / 2^24 = 15625 / 2^18 to get in parts-per-million.
+   * Shift 7 to Q17 first, then multiply with 15625 and shift another 11.
+   */
+  return ((sum_q24 >> 7) * 15625) >> 11;
+}
diff --git a/src/modules/audio_coding/neteq/automode.h b/src/modules/audio_coding/neteq/automode.h
new file mode 100644
index 0000000..dbd09cf
--- /dev/null
+++ b/src/modules/audio_coding/neteq/automode.h
@@ -0,0 +1,264 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the functionality for automatic buffer level optimization.
+ */
+
+#ifndef AUTOMODE_H
+#define AUTOMODE_H
+
+#include "typedefs.h"
+
+/*************/
+/* Constants */
+/*************/
+
+/* The beta parameter defines the trade-off between delay and underrun probability. */
+/* It is defined through its inverse in Q30 */
+#define AUTOMODE_BETA_INV_Q30 53687091  /* 1/20 in Q30 */
+#define AUTOMODE_STREAMING_BETA_INV_Q30 536871 /* 1/2000 in Q30 */
+
+/* Forgetting factor for the inter-arrival time statistics */
+#define IAT_PROB_FACT 32745       /* 0.9993 in Q15 */
+
+/* Maximum inter-arrival time to register (in "packet-times") */
+#define MAX_IAT 64
+#define PEAK_HEIGHT 20            /* 0.08s in Q8 */
+
+/* The value (1<<5) sets maximum accelerate "speed" to about 100 ms/s */
+#define AUTOMODE_TIMESCALE_LIMIT (1<<5)
+
+/* Peak mode related parameters */
+/* Number of peaks in peak vector; must be a power of 2 */
+#define NUM_PEAKS 8
+
+/* Must be NUM_PEAKS-1 */
+#define PEAK_INDEX_MASK 0x0007
+
+/* Longest accepted peak distance */
+#define MAX_PEAK_PERIOD 10
+#define MAX_STREAMING_PEAK_PERIOD 600 /* 10 minutes */
+
+/* Number of peaks required before peak mode can be engaged */
+#define NUM_PEAKS_REQUIRED 3
+
+/* Drift term for cumulative sum */
+#define CSUM_IAT_DRIFT 2
+
+/*******************/
+/* Automode struct */
+/*******************/
+
+/* The automode struct is a sub-struct of the
+ bufstats-struct (BufstatsInst_t). */
+
+typedef struct
+{
+
+    /* Filtered current buffer level */
+    WebRtc_UWord16 levelFiltFact; /* filter forgetting factor in Q8 */
+    WebRtc_UWord16 buffLevelFilt; /* filtered buffer level in Q8 */
+
+    /* Inter-arrival time (iat) statistics */
+    WebRtc_Word32 iatProb[MAX_IAT + 1]; /* iat probabilities in Q30 */
+    WebRtc_Word16 iatProbFact; /* iat forgetting factor in Q15 */
+    WebRtc_UWord32 packetIatCountSamp; /* time (in timestamps) elapsed since last
+     packet arrival, based on RecOut calls */
+    WebRtc_UWord16 optBufLevel; /* current optimal buffer level in Q8 */
+
+    /* Packet related information */
+    WebRtc_Word16 packetSpeechLenSamp; /* speech samples per incoming packet */
+    WebRtc_Word16 lastPackCNGorDTMF; /* indicates that the last received packet
+     contained special information */
+    WebRtc_UWord16 lastSeqNo; /* sequence number for last packet received */
+    WebRtc_UWord32 lastTimeStamp; /* timestamp for the last packet received */
+    WebRtc_Word32 sampleMemory; /* memory position for keeping track of how many
+     samples we cut during expand */
+    WebRtc_Word16 prevTimeScale; /* indicates that the last mode was an accelerate
+     or pre-emptive expand operation */
+    WebRtc_UWord32 timescaleHoldOff; /* counter that is shifted one step right each
+     RecOut call; time-scaling allowed when it has
+     reached 0 */
+    WebRtc_Word16 extraDelayMs; /* extra delay for sync with video */
+
+    /* Peak-detection */
+    /* vector with the latest peak periods (peak spacing in samples) */
+    WebRtc_UWord32 peakPeriodSamp[NUM_PEAKS];
+    /* vector with the latest peak heights (in packets) */
+    WebRtc_Word16 peakHeightPkt[NUM_PEAKS];
+    WebRtc_Word16 peakIndex; /* index for the vectors peakPeriodSamp and peakHeightPkt;
+     -1 if still waiting for first peak */
+    WebRtc_UWord16 peakThresholdPkt; /* definition of peak (in packets);
+     calculated from PEAK_HEIGHT */
+    WebRtc_UWord32 peakIatCountSamp; /* samples elapsed since last peak was observed */
+    WebRtc_UWord32 curPeakPeriod; /* current maximum of peakPeriodSamp vector */
+    WebRtc_Word16 curPeakHeight; /* derived from peakHeightPkt vector;
+     used as optimal buffer level in peak mode */
+    WebRtc_Word16 peakModeDisabled; /* ==0 if peak mode can be engaged; >0 if not */
+    uint16_t peakFound; /* 1 if peaks are detected and extra delay is applied;
+                        * 0 otherwise. */
+
+    /* Post-call statistics */
+    WebRtc_UWord32 countIAT500ms; /* number of times we got small network outage */
+    WebRtc_UWord32 countIAT1000ms; /* number of times we got medium network outage */
+    WebRtc_UWord32 countIAT2000ms; /* number of times we got large network outage */
+    WebRtc_UWord32 longestIATms; /* mSec duration of longest network outage */
+
+    WebRtc_Word16 cSumIatQ8; /* cumulative sum of inter-arrival times */
+    WebRtc_Word16 maxCSumIatQ8; /* max cumulative sum IAT */
+    WebRtc_UWord32 maxCSumUpdateTimer;/* time elapsed since maximum was observed */
+
+} AutomodeInst_t;
+
+/*************/
+/* Functions */
+/*************/
+
+/****************************************************************************
+ * WebRtcNetEQ_UpdateIatStatistics(...)
+ *
+ * Update the packet inter-arrival time statistics when a new packet arrives.
+ * This function should be called for every arriving packet, with some
+ * exceptions when using DTX/VAD and DTMF. A new optimal buffer level is
+ * calculated after the update.
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *		- maxBufLen		: Maximum number of packets the buffer can hold
+ *		- seqNumber     : RTP sequence number of incoming packet
+ *      - timeStamp     : RTP timestamp of incoming packet
+ *      - fsHz          : Sample rate in Hz
+ *      - mdCodec       : Non-zero if the current codec is a multiple-
+ *                        description codec
+ *      - streamingMode : A non-zero value will increase jitter robustness (and delay)
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_UpdateIatStatistics(AutomodeInst_t *inst, int maxBufLen,
+                                    WebRtc_UWord16 seqNumber, WebRtc_UWord32 timeStamp,
+                                    WebRtc_Word32 fsHz, int mdCodec, int streamingMode);
+
+/****************************************************************************
+ * WebRtcNetEQ_CalcOptimalBufLvl(...)
+ *
+ * Calculate the optimal buffer level based on packet inter-arrival time
+ * statistics.
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *      - fsHz          : Sample rate in Hz
+ *      - mdCodec       : Non-zero if the current codec is a multiple-
+ *                        description codec
+ *      - timeIatPkts   : Currently observed inter-arrival time in packets
+ *      - streamingMode : A non-zero value will increase jitter robustness (and delay)
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			: >0 - Optimal buffer level
+ *                        <0 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_CalcOptimalBufLvl(AutomodeInst_t *inst, WebRtc_Word32 fsHz,
+                                            int mdCodec, WebRtc_UWord32 timeIatPkts,
+                                            int streamingMode);
+
+/****************************************************************************
+ * WebRtcNetEQ_BufferLevelFilter(...)
+ *
+ * Update filtered buffer level. The function must be called once for each
+ * RecOut call, since the timing of automode hinges on counters that are
+ * updated by this function.
+ *
+ * Input:
+ *      - curSizeMs8    : Total length of unused speech data in packet buffer
+ *                        and sync buffer, in ms * 8
+ *		- inst	        : Automode instance
+ *		- sampPerCall	: Number of samples per RecOut call
+ *      - fsMult        : Sample rate in Hz divided by 8000
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                      : <0 - Error
+ */
+
+int WebRtcNetEQ_BufferLevelFilter(WebRtc_Word32 curSizeMs8, AutomodeInst_t *inst,
+                                  int sampPerCall, WebRtc_Word16 fsMult);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetPacketSpeechLen(...)
+ *
+ * Provide the number of speech samples extracted from a packet to the
+ * automode instance. Several of the calculations within automode depend
+ * on knowing the packet size.
+ *
+ *
+ * Input:
+ *		- inst	        : Automode instance
+ *		- newLenSamp    : Number of samples per RecOut call
+ *      - fsHz          : Sample rate in Hz
+ *
+ * Output:
+ *      - inst          : Updated automode instance
+ *
+ * Return value			:  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_SetPacketSpeechLen(AutomodeInst_t *inst, WebRtc_Word16 newLenSamp,
+                                   WebRtc_Word32 fsHz);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetAutomode(...)
+ *
+ * Reset the automode instance.
+ *
+ *
+ * Input:
+ *		- inst	            : Automode instance
+ *		- maxBufLenPackets  : Maximum number of packets that the packet
+ *                            buffer can hold (>1)
+ *
+ * Output:
+ *      - inst              : Updated automode instance
+ *
+ * Return value			    :  0 - Ok
+ */
+
+int WebRtcNetEQ_ResetAutomode(AutomodeInst_t *inst, int maxBufLenPackets);
+
+/****************************************************************************
+ * WebRtcNetEQ_AverageIAT(...)
+ *
+ * Calculate the average inter-arrival time based on current statistics.
+ * The average is expressed in parts per million relative the nominal. That is,
+ * if the average inter-arrival time is equal to the nominal frame time,
+ * the return value is zero. A positive value corresponds to packet spacing
+ * being too large, while a negative value means that the packets arrive with
+ * less spacing than expected.
+ *
+ *
+ * Input:
+ *    - inst              : Automode instance.
+ *
+ * Return value           : Average relative inter-arrival time in samples.
+ */
+
+int32_t WebRtcNetEQ_AverageIAT(const AutomodeInst_t *inst);
+
+#endif /* AUTOMODE_H */
diff --git a/src/modules/audio_coding/neteq/bgn_update.c b/src/modules/audio_coding/neteq/bgn_update.c
new file mode 100644
index 0000000..05956c2
--- /dev/null
+++ b/src/modules/audio_coding/neteq/bgn_update.c
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for updating the background noise estimate.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage: 
+ Designed for BGN_LPC_ORDER <= 10
+
+ Type           Name            size   startpos  endpos
+ WebRtc_Word32  pw32_autoCorr   22     0         21  (Length (BGN_LPC_ORDER + 1)*2)
+ WebRtc_Word16  pw16_tempVec    10     22        31	(Length BGN_LPC_ORDER)
+ WebRtc_Word16  pw16_rc         10     32        41	(Length BGN_LPC_ORDER)
+ WebRtc_Word16  pw16_outVec     74     0         73  (Length BGN_LPC_ORDER + 64)
+
+ Total: 74
+ */
+
+#if (BGN_LPC_ORDER > 10) && (defined SCRATCH)
+#error BGN_LPC_ORDER is too large for current scratch memory allocation
+#endif
+
+#define	 SCRATCH_PW32_AUTO_CORR			0
+#define	 SCRATCH_PW16_TEMP_VEC			22
+#define	 SCRATCH_PW16_RC				32
+#define	 SCRATCH_PW16_OUT_VEC			0
+
+#define NETEQFIX_BGNFRAQINCQ16	229 /* 0.0035 in Q16 */
+
+/****************************************************************************
+ * WebRtcNetEQ_BGNUpdate(...)
+ *
+ * This function updates the background noise parameter estimates.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, where the speech history is stored.
+ *      - scratchPtr    : Pointer to scratch vector.
+ *
+ * Output:
+ *		- inst			: Updated information about the BGN characteristics.
+ *
+ * Return value			: No return value
+ */
+
+void WebRtcNetEQ_BGNUpdate(
+#ifdef SCRATCH
+                           DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+#else
+                           DSPInst_t *inst
+#endif
+)
+{
+    const WebRtc_Word16 w16_vecLen = 256;
+    BGNInst_t *BGN_Inst = &(inst->BGNInst);
+#ifdef SCRATCH
+    WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_AUTO_CORR);
+    WebRtc_Word16 *pw16_tempVec = pw16_scratchPtr + SCRATCH_PW16_TEMP_VEC;
+    WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+    WebRtc_Word16 *pw16_outVec = pw16_scratchPtr + SCRATCH_PW16_OUT_VEC;
+#else
+    WebRtc_Word32 pw32_autoCorr[BGN_LPC_ORDER + 1];
+    WebRtc_Word16 pw16_tempVec[BGN_LPC_ORDER];
+    WebRtc_Word16 pw16_outVec[BGN_LPC_ORDER + 64];
+    WebRtc_Word16 pw16_rc[BGN_LPC_ORDER];
+#endif
+    WebRtc_Word16 pw16_A[BGN_LPC_ORDER + 1];
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word16 *pw16_vec;
+    WebRtc_Word16 w16_maxSample;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word16 w16_enSampleShift;
+    WebRtc_Word32 w32_en, w32_enBGN;
+    WebRtc_Word32 w32_enUpdateThreashold;
+    WebRtc_Word16 stability;
+
+    pw16_vec = inst->pw16_speechHistory + inst->w16_speechHistoryLen - w16_vecLen;
+
+#ifdef NETEQ_VAD
+    if( !inst->VADInst.VADEnabled /* we are not using post-decode VAD */
+        || inst->VADInst.VADDecision == 0 )
+    { /* ... or, post-decode VAD says passive speaker */
+#endif /* NETEQ_VAD */
+
+    /*Insert zeros to guarantee that boundary values do not distort autocorrelation */
+    WEBRTC_SPL_MEMCPY_W16(pw16_tempVec, pw16_vec - BGN_LPC_ORDER, BGN_LPC_ORDER);
+    WebRtcSpl_MemSetW16(pw16_vec - BGN_LPC_ORDER, 0, BGN_LPC_ORDER);
+
+    w16_maxSample = WebRtcSpl_MaxAbsValueW16(pw16_vec, w16_vecLen);
+    w16_tmp = 8 /* log2(w16_veclen) = 8 */
+        - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_maxSample, w16_maxSample));
+    w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+    WebRtcNetEQ_CrossCorr(pw32_autoCorr, pw16_vec, pw16_vec, w16_vecLen, BGN_LPC_ORDER + 1,
+        w16_tmp, -1);
+
+    /* Copy back data */
+    WEBRTC_SPL_MEMCPY_W16(pw16_vec - BGN_LPC_ORDER, pw16_tempVec, BGN_LPC_ORDER);
+
+    w16_enSampleShift = 8 - w16_tmp; /* Number of shifts to get energy/sample */
+    /* pw32_autoCorr[0]>>w16_enSampleShift */
+    w32_en = WEBRTC_SPL_RSHIFT_W32(pw32_autoCorr[0], w16_enSampleShift);
+    if ((w32_en < BGN_Inst->w32_energyUpdate
+#ifdef NETEQ_VAD
+        /* post-decode VAD disabled and w32_en sufficiently low */
+         && !inst->VADInst.VADEnabled)
+    /* ... or, post-decode VAD says passive speaker */
+    || (inst->VADInst.VADEnabled && inst->VADInst.VADDecision == 0)
+#else
+    ) /* just close the extra parenthesis */
+#endif /* NETEQ_VAD */
+    )
+    {
+        /* Generate LPC coefficients */
+        if (pw32_autoCorr[0] > 0)
+        {
+            /* regardless of whether the filter is actually updated or not,
+             update energy threshold levels, since we have in fact observed
+             a low energy signal */
+            if (w32_en < BGN_Inst->w32_energyUpdate)
+            {
+                /* Never get under 1.0 in average sample energy */
+                BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
+                BGN_Inst->w32_energyUpdateLow = 0;
+            }
+
+            stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, pw16_A, pw16_rc, BGN_LPC_ORDER);
+            /* Only update BGN if filter is stable */
+            if (stability != 1)
+            {
+                return;
+            }
+        }
+        else
+        {
+            /* Do not update */
+            return;
+        }
+        /* Generate the CNG gain factor by looking at the energy of the residual */
+        WebRtcSpl_FilterMAFastQ12(pw16_vec + w16_vecLen - 64, pw16_outVec, pw16_A,
+            BGN_LPC_ORDER + 1, 64);
+        w32_enBGN = WebRtcNetEQ_DotW16W16(pw16_outVec, pw16_outVec, 64, 0);
+        /* Dot product should never overflow since it is BGN and residual! */
+
+        /*
+         * Check spectral flatness
+         * Comparing the residual variance with the input signal variance tells
+         * if the spectrum is flat or not.
+         * (20*w32_enBGN) >= (w32_en<<6)
+         * Also ensure that the energy is non-zero.
+         */
+        if ((WEBRTC_SPL_MUL_32_16(w32_enBGN, 20) >= WEBRTC_SPL_LSHIFT_W32(w32_en, 6))
+            && (w32_en > 0))
+        {
+            /* spectrum is flat enough; save filter parameters */
+
+            WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filter, pw16_A, BGN_LPC_ORDER+1);
+            WEBRTC_SPL_MEMCPY_W16(BGN_Inst->pw16_filterState,
+                pw16_vec + w16_vecLen - BGN_LPC_ORDER, BGN_LPC_ORDER);
+
+            /* Save energy level */
+            BGN_Inst->w32_energy = WEBRTC_SPL_MAX(w32_en, 1);
+
+            /* Update energy threshold levels */
+            /* Never get under 1.0 in average sample energy */
+            BGN_Inst->w32_energyUpdate = WEBRTC_SPL_MAX(w32_en, 1);
+            BGN_Inst->w32_energyUpdateLow = 0;
+
+            /* Normalize w32_enBGN to 29 or 30 bits before sqrt */
+            w16_tmp2 = WebRtcSpl_NormW32(w32_enBGN) - 1;
+            if (w16_tmp2 & 0x1)
+            {
+                w16_tmp2 -= 1; /* Even number of shifts required */
+            }
+            w32_enBGN = WEBRTC_SPL_SHIFT_W32(w32_enBGN, w16_tmp2);
+
+            /* Calculate scale and shift factor */
+            BGN_Inst->w16_scale = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_enBGN);
+            BGN_Inst->w16_scaleShift = 13 + ((6 + w16_tmp2) >> 1); /* RANDN table is in Q13, */
+            /* 6=log2(64) */
+
+            BGN_Inst->w16_initialized = 1;
+        }
+
+    }
+    else
+    {
+        /*
+         * Will only happen if post-decode VAD is disabled and w32_en is not low enough.
+         * Increase the threshold for update so that it increases by a factor 4 in four
+         * seconds.
+         * energy = energy * 1.0035
+         */
+        w32_tmp = WEBRTC_SPL_MUL_16_16_RSFT(NETEQFIX_BGNFRAQINCQ16,
+            BGN_Inst->w32_energyUpdateLow, 16);
+        w32_tmp += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)(BGN_Inst->w32_energyUpdate & 0xFF));
+        w32_tmp += (WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)((BGN_Inst->w32_energyUpdate>>8) & 0xFF)) << 8);
+        BGN_Inst->w32_energyUpdateLow += w32_tmp;
+
+        BGN_Inst->w32_energyUpdate += WEBRTC_SPL_MUL_16_16(NETEQFIX_BGNFRAQINCQ16,
+            (WebRtc_Word16)(BGN_Inst->w32_energyUpdate>>16));
+        BGN_Inst->w32_energyUpdate += BGN_Inst->w32_energyUpdateLow >> 16;
+        BGN_Inst->w32_energyUpdateLow = (BGN_Inst->w32_energyUpdateLow & 0x0FFFF);
+
+        /* Update maximum energy */
+        /* Decrease by a factor 1/1024 each time */
+        BGN_Inst->w32_energyMax = BGN_Inst->w32_energyMax - (BGN_Inst->w32_energyMax >> 10);
+        if (w32_en > BGN_Inst->w32_energyMax)
+        {
+            BGN_Inst->w32_energyMax = w32_en;
+        }
+
+        /* Set update level to at the minimum 60.21dB lower then the maximum energy */
+        w32_enUpdateThreashold = (BGN_Inst->w32_energyMax + 524288) >> 20;
+        if (w32_enUpdateThreashold > BGN_Inst->w32_energyUpdate)
+        {
+            BGN_Inst->w32_energyUpdate = w32_enUpdateThreashold;
+        }
+    }
+
+#ifdef NETEQ_VAD
+} /* closing initial if-statement */
+#endif /* NETEQ_VAD */
+
+    return;
+}
+
+#undef	 SCRATCH_PW32_AUTO_CORR
+#undef	 SCRATCH_PW16_TEMP_VEC
+#undef	 SCRATCH_PW16_RC
+#undef	 SCRATCH_PW16_OUT_VEC
+
diff --git a/src/modules/audio_coding/neteq/buffer_stats.h b/src/modules/audio_coding/neteq/buffer_stats.h
new file mode 100644
index 0000000..9820519
--- /dev/null
+++ b/src/modules/audio_coding/neteq/buffer_stats.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Calculates and stores the packet buffer statistics.
+ */
+
+#ifndef BUFFER_STATS_H
+#define BUFFER_STATS_H
+
+#include "automode.h"
+#include "webrtc_neteq.h" /* to define enum WebRtcNetEQPlayoutMode */
+
+/* NetEQ related decisions */
+#define BUFSTATS_DO_NORMAL					0
+#define BUFSTATS_DO_ACCELERATE				1
+#define BUFSTATS_DO_MERGE					2
+#define BUFSTATS_DO_EXPAND					3
+#define BUFSTAT_REINIT						4
+#define BUFSTATS_DO_RFC3389CNG_PACKET		5
+#define BUFSTATS_DO_RFC3389CNG_NOPACKET		6
+#define BUFSTATS_DO_INTERNAL_CNG_NOPACKET	7
+#define BUFSTATS_DO_PREEMPTIVE_EXPAND		8
+#define BUFSTAT_REINIT_DECODER              9
+#define BUFSTATS_DO_DTMF_ONLY               10
+/* Decisions related to when NetEQ is switched off (or in FAX mode) */
+#define BUFSTATS_DO_ALTERNATIVE_PLC				   11
+#define BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS		   12
+#define BUFSTATS_DO_AUDIO_REPETITION			   13
+#define BUFSTATS_DO_AUDIO_REPETITION_INC_TS		   14
+
+/* Reinit decoder states after this number of expands (upon arrival of new packet) */
+#define REINIT_AFTER_EXPANDS 100
+
+/* Wait no longer than this number of RecOut calls before using an "early" packet */
+#define MAX_WAIT_FOR_PACKET 10
+
+/* CNG modes */
+#define CNG_OFF 0
+#define CNG_RFC3389_ON 1
+#define CNG_INTERNAL_ON 2
+
+typedef struct
+{
+
+    /* store statistical data here */
+    WebRtc_Word16 w16_cngOn; /* remember if CNG is interrupted by other event (e.g. DTMF) */
+    WebRtc_Word16 w16_noExpand;
+    WebRtc_Word32 uw32_CNGplayedTS;
+
+    /* VQmon data */
+    WebRtc_UWord16 avgDelayMsQ8;
+    WebRtc_Word16 maxDelayMs;
+
+    AutomodeInst_t Automode_inst;
+
+} BufstatsInst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_BufstatsDecision()
+ *
+ * Gives a decision about what action that is currently desired 
+ *
+ *
+ *	Input:
+ *		inst:			    The bufstat instance
+ *		cur_size:		    Current buffer size in ms in Q3 domain
+ *		targetTS:		    The desired timestamp to start playout from
+ *		availableTS:	    The closest future value available in buffer
+ *		noPacket		    1 if no packet is available, makes availableTS undefined
+ *		prevPlayMode	    mode of last NetEq playout
+ *		timestampsPerCall	number of timestamp for 10ms
+ *
+ *	Output:
+ *		Returns:		    A decision, as defined above (see top of file)
+ *
+ */
+
+WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
+                                            WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
+                                            WebRtc_UWord32 availableTS, int noPacket,
+                                            int cngPacket, int prevPlayMode,
+                                            enum WebRtcNetEQPlayoutMode playoutMode,
+                                            int timestampsPerCall, int NoOfExpandCalls,
+                                            WebRtc_Word16 fs_mult,
+                                            WebRtc_Word16 lastModeBGNonly, int playDtmf);
+
+#endif
diff --git a/src/modules/audio_coding/neteq/bufstats_decision.c b/src/modules/audio_coding/neteq/bufstats_decision.c
new file mode 100644
index 0000000..3d37e17
--- /dev/null
+++ b/src/modules/audio_coding/neteq/bufstats_decision.c
@@ -0,0 +1,426 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function where the main decision logic for buffer level
+ * adaptation happens.
+ */
+
+#include "buffer_stats.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "neteq_defines.h"
+#include "neteq_error_codes.h"
+#include "webrtc_neteq.h"
+
+#define NETEQ_BUFSTAT_20MS_Q7 2560 /* = 20 ms in Q7  */
+
+WebRtc_UWord16 WebRtcNetEQ_BufstatsDecision(BufstatsInst_t *inst, WebRtc_Word16 frameSize,
+                                            WebRtc_Word32 cur_size, WebRtc_UWord32 targetTS,
+                                            WebRtc_UWord32 availableTS, int noPacket,
+                                            int cngPacket, int prevPlayMode,
+                                            enum WebRtcNetEQPlayoutMode playoutMode,
+                                            int timestampsPerCall, int NoOfExpandCalls,
+                                            WebRtc_Word16 fs_mult,
+                                            WebRtc_Word16 lastModeBGNonly, int playDtmf)
+{
+
+    int currentDelayMs;
+    WebRtc_Word32 currSizeSamples = cur_size;
+    WebRtc_Word16 extraDelayPacketsQ8 = 0;
+
+    /* Avoid overflow if the buffer size should be really large (cur_size is limited 256ms) */
+    WebRtc_Word32 curr_sizeQ7 = WEBRTC_SPL_LSHIFT_W32(cur_size, 4);
+    WebRtc_UWord16 level_limit_hi, level_limit_lo;
+
+    inst->Automode_inst.prevTimeScale &= (prevPlayMode == MODE_SUCCESS_ACCELERATE
+        || prevPlayMode == MODE_LOWEN_ACCELERATE || prevPlayMode == MODE_SUCCESS_PREEMPTIVE
+        || prevPlayMode == MODE_LOWEN_PREEMPTIVE);
+
+    if ((prevPlayMode != MODE_RFC3389CNG) && (prevPlayMode != MODE_CODEC_INTERNAL_CNG))
+    {
+        /*
+         * Do not update buffer history if currently playing CNG
+         * since it will bias the filtered buffer level.
+         */
+        WebRtcNetEQ_BufferLevelFilter(cur_size, &(inst->Automode_inst), timestampsPerCall,
+            fs_mult);
+    }
+    else
+    {
+        /* only update time counters */
+        inst->Automode_inst.packetIatCountSamp += timestampsPerCall; /* packet inter-arrival time */
+        inst->Automode_inst.peakIatCountSamp += timestampsPerCall; /* peak inter-arrival time */
+        inst->Automode_inst.timescaleHoldOff >>= 1; /* time-scaling limiter */
+    }
+    cur_size = WEBRTC_SPL_MIN(curr_sizeQ7, WEBRTC_SPL_WORD16_MAX);
+
+    /* Calculate VQmon related variables */
+    /* avgDelay = avgDelay*(511/512) + currentDelay*(1/512) (sample ms delay in Q8) */
+    inst->avgDelayMsQ8 = (WebRtc_Word16) (WEBRTC_SPL_MUL_16_16_RSFT(inst->avgDelayMsQ8,511,9)
+        + (cur_size >> 9));
+
+    /* Update maximum delay if needed */
+    currentDelayMs = (curr_sizeQ7 >> 7);
+    if (currentDelayMs > inst->maxDelayMs)
+    {
+        inst->maxDelayMs = currentDelayMs;
+    }
+
+    /* NetEQ is on with normal or steaming mode */
+    if (playoutMode == kPlayoutOn || playoutMode == kPlayoutStreaming)
+    {
+        /* Guard for errors, so that it should not get stuck in error mode */
+        if (prevPlayMode == MODE_ERROR)
+        {
+            if (noPacket)
+            {
+                return BUFSTATS_DO_EXPAND;
+            }
+            else
+            {
+                return BUFSTAT_REINIT;
+            }
+        }
+
+        if (prevPlayMode != MODE_EXPAND && prevPlayMode != MODE_FADE_TO_BGN)
+        {
+            inst->w16_noExpand = 1;
+        }
+        else
+        {
+            inst->w16_noExpand = 0;
+        }
+
+        if (cngPacket)
+        {
+            /* signed difference between wanted and available TS */
+            WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+            int32_t optimal_level_samp = (inst->Automode_inst.optBufLevel *
+                inst->Automode_inst.packetSpeechLenSamp) >> 8;
+            int32_t excess_waiting_time_samp = -diffTS - optimal_level_samp;
+
+            if (excess_waiting_time_samp > optimal_level_samp / 2)
+            {
+                /* The waiting time for this packet will be longer than 1.5
+                 * times the wanted buffer delay. Advance the clock to cut
+                 * waiting time down to the optimal.
+                 */
+                inst->uw32_CNGplayedTS += excess_waiting_time_samp;
+                diffTS += excess_waiting_time_samp;
+            }
+
+            if ((diffTS) < 0 && (prevPlayMode == MODE_RFC3389CNG))
+            {
+                /* Not time to play this packet yet. Wait another round before using this
+                 * packet. Keep on playing CNG from previous CNG parameters. */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+
+            /* otherwise, go for the CNG packet now */
+            return BUFSTATS_DO_RFC3389CNG_PACKET;
+        }
+
+        /*Check for expand/cng */
+        if (noPacket)
+        {
+            if (inst->w16_cngOn == CNG_RFC3389_ON)
+            {
+                /* keep on playing CNG */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+            else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+            {
+                /* keep on playing internal CNG */
+                return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+            }
+            else if (playDtmf == 1)
+            {
+                /* we have not audio data, but can play DTMF */
+                return BUFSTATS_DO_DTMF_ONLY;
+            }
+            else
+            {
+                /* nothing to play => do Expand */
+                return BUFSTATS_DO_EXPAND;
+            }
+        }
+
+        /*
+         * If the expand period was very long, reset NetEQ since it is likely that the
+         * sender was restarted.
+         */
+        if (NoOfExpandCalls > REINIT_AFTER_EXPANDS) return BUFSTAT_REINIT_DECODER;
+
+        /* Calculate extra delay in Q8 packets */
+        if (inst->Automode_inst.extraDelayMs > 0 && inst->Automode_inst.packetSpeechLenSamp
+            > 0)
+        {
+            extraDelayPacketsQ8 = WebRtcSpl_DivW32W16ResW16(
+                (WEBRTC_SPL_MUL(inst->Automode_inst.extraDelayMs, 8 * fs_mult) << 8),
+                inst->Automode_inst.packetSpeechLenSamp);
+            /* (extra delay in samples in Q8) */
+        }
+
+        /* Check if needed packet is available */
+        if (targetTS == availableTS)
+        {
+
+            /* If last mode was not expand, and there is no DTMF to play */
+            if (inst->w16_noExpand == 1 && playDtmf == 0)
+            {
+                /* If so check for accelerate */
+
+                level_limit_lo = ((inst->Automode_inst.optBufLevel) >> 1) /* 50 % */
+                    + ((inst->Automode_inst.optBufLevel) >> 2); /* ... + 25% = 75% */
+
+                /* set upper limit to optBufLevel, but make sure that window is at least 20ms */
+                level_limit_hi = WEBRTC_SPL_MAX(inst->Automode_inst.optBufLevel,
+                    level_limit_lo +
+                    WebRtcSpl_DivW32W16ResW16((WEBRTC_SPL_MUL(20*8, fs_mult) << 8),
+                        inst->Automode_inst.packetSpeechLenSamp));
+
+                /* if extra delay is non-zero, add it */
+                if (extraDelayPacketsQ8 > 0)
+                {
+                    level_limit_hi += extraDelayPacketsQ8;
+                    level_limit_lo += extraDelayPacketsQ8;
+                }
+
+                if (((inst->Automode_inst.buffLevelFilt >= level_limit_hi) &&
+                    (inst->Automode_inst.timescaleHoldOff == 0)) ||
+                    (inst->Automode_inst.buffLevelFilt >= level_limit_hi << 2))
+                {
+                    /*
+                     * Buffer level higher than limit and time-scaling allowed,
+                     * OR buffer level _really_ high.
+                     */
+                    return BUFSTATS_DO_ACCELERATE;
+                }
+                else if ((inst->Automode_inst.buffLevelFilt < level_limit_lo)
+                    && (inst->Automode_inst.timescaleHoldOff == 0))
+                {
+                    return BUFSTATS_DO_PREEMPTIVE_EXPAND;
+                }
+            }
+            return BUFSTATS_DO_NORMAL;
+        }
+
+        /* Check for Merge */
+        else if (availableTS > targetTS)
+        {
+
+            /* Check that we do not play a packet "too early" */
+            if ((prevPlayMode == MODE_EXPAND)
+                && (availableTS - targetTS
+                    < (WebRtc_UWord32) WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
+                        (WebRtc_Word16)REINIT_AFTER_EXPANDS))
+                && (NoOfExpandCalls < MAX_WAIT_FOR_PACKET)
+                && (availableTS
+                    > targetTS
+                        + WEBRTC_SPL_MUL_16_16((WebRtc_Word16)timestampsPerCall,
+                            (WebRtc_Word16)NoOfExpandCalls))
+                && (inst->Automode_inst.buffLevelFilt <= inst->Automode_inst.optBufLevel
+                    + extraDelayPacketsQ8))
+            {
+                if (playDtmf == 1)
+                {
+                    /* we still have DTMF to play, so do not perform expand */
+                    return BUFSTATS_DO_DTMF_ONLY;
+                }
+                else
+                {
+                    /* nothing to play */
+                    return BUFSTATS_DO_EXPAND;
+                }
+            }
+
+            /* If previous was CNG period or BGNonly then no merge is needed */
+            if ((prevPlayMode == MODE_RFC3389CNG) || (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
+                || lastModeBGNonly)
+            {
+                /*
+                 * Keep the same delay as before the CNG (or maximum 70 ms in buffer as safety
+                 * precaution), but make sure that the number of samples in buffer is no
+                 * higher than 4 times the optimal level.
+                 */
+                WebRtc_Word32 diffTS = (inst->uw32_CNGplayedTS + targetTS) - availableTS;
+                if (diffTS >= 0
+                    || (WEBRTC_SPL_MUL_16_16_RSFT( inst->Automode_inst.optBufLevel
+                        + extraDelayPacketsQ8,
+                        inst->Automode_inst.packetSpeechLenSamp, 6) < currSizeSamples))
+                {
+                    /* it is time to play this new packet */
+                    return BUFSTATS_DO_NORMAL;
+                }
+                else
+                {
+                    /* it is too early to play this new packet => keep on playing CNG */
+                    if (prevPlayMode == MODE_RFC3389CNG)
+                    {
+                        return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                    }
+                    else if (prevPlayMode == MODE_CODEC_INTERNAL_CNG)
+                    {
+                        return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                    }
+                    else if (playDtmf == 1)
+                    {
+                        /* we have not audio data, but can play DTMF */
+                        return BUFSTATS_DO_DTMF_ONLY;
+                    }
+                    else /* lastModeBGNonly */
+                    {
+                        /* signal expand, but this will result in BGN again */
+                        return BUFSTATS_DO_EXPAND;
+                    }
+                }
+            }
+
+            /* Do not merge unless we have done a Expand before (for complexity reasons) */
+            if ((inst->w16_noExpand == 0) || ((frameSize < timestampsPerCall) && (cur_size
+                > NETEQ_BUFSTAT_20MS_Q7)))
+            {
+                return BUFSTATS_DO_MERGE;
+            }
+            else if (playDtmf == 1)
+            {
+                /* play DTMF instead of expand */
+                return BUFSTATS_DO_DTMF_ONLY;
+            }
+            else
+            {
+                return BUFSTATS_DO_EXPAND;
+            }
+        }
+    }
+    else
+    { /* kPlayoutOff or kPlayoutFax */
+        if (cngPacket)
+        {
+            if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+            {
+                /* time to play this packet now */
+                return BUFSTATS_DO_RFC3389CNG_PACKET;
+            }
+            else
+            {
+                /* wait before playing this packet */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+        }
+        if (noPacket)
+        {
+            /*
+             * No packet =>
+             * 1. If in CNG mode play as usual
+             * 2. Otherwise use other method to generate data and hold TS value
+             */
+            if (inst->w16_cngOn == CNG_RFC3389_ON)
+            {
+                /* keep on playing CNG */
+                return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+            }
+            else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+            {
+                /* keep on playing internal CNG */
+                return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+            }
+            else
+            {
+                /* nothing to play => invent some data to play out */
+                if (playoutMode == kPlayoutOff)
+                {
+                    return BUFSTATS_DO_ALTERNATIVE_PLC;
+                }
+                else if (playoutMode == kPlayoutFax)
+                {
+                    return BUFSTATS_DO_AUDIO_REPETITION;
+                }
+                else
+                {
+                    /* UNDEFINED, should not get here... */
+                    assert(0);
+                    return BUFSTAT_REINIT;
+                }
+            }
+        }
+        else if (targetTS == availableTS)
+        {
+            return BUFSTATS_DO_NORMAL;
+        }
+        else
+        {
+            if (((WebRtc_Word32) ((inst->uw32_CNGplayedTS + targetTS) - availableTS)) >= 0)
+            {
+                return BUFSTATS_DO_NORMAL;
+            }
+            else if (playoutMode == kPlayoutOff)
+            {
+                /*
+                 * If currently playing CNG, continue with that. Don't increase TS
+                 * since uw32_CNGplayedTS will be increased.
+                 */
+                if (inst->w16_cngOn == CNG_RFC3389_ON)
+                {
+                    return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                }
+                else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+                {
+                    return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                }
+                else
+                {
+                    /*
+                     * Otherwise, do PLC and increase TS while waiting for the time to
+                     * play this packet.
+                     */
+                    return BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS;
+                }
+            }
+            else if (playoutMode == kPlayoutFax)
+            {
+                /*
+                 * If currently playing CNG, continue with that don't increase TS since
+                 * uw32_CNGplayedTS will be increased.
+                 */
+                if (inst->w16_cngOn == CNG_RFC3389_ON)
+                {
+                    return BUFSTATS_DO_RFC3389CNG_NOPACKET;
+                }
+                else if (inst->w16_cngOn == CNG_INTERNAL_ON)
+                {
+                    return BUFSTATS_DO_INTERNAL_CNG_NOPACKET;
+                }
+                else
+                {
+                    /*
+                     * Otherwise, do audio repetition and increase TS while waiting for the
+                     * time to play this packet.
+                     */
+                    return BUFSTATS_DO_AUDIO_REPETITION_INC_TS;
+                }
+            }
+            else
+            {
+                /* UNDEFINED, should not get here... */
+                assert(0);
+                return BUFSTAT_REINIT;
+            }
+        }
+    }
+    /* We should not get here (but sometimes we do anyway...) */
+    return BUFSTAT_REINIT;
+}
+
diff --git a/src/modules/audio_coding/neteq/cng_internal.c b/src/modules/audio_coding/neteq/cng_internal.c
new file mode 100644
index 0000000..f3a10dc
--- /dev/null
+++ b/src/modules/audio_coding/neteq/cng_internal.c
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for obtaining comfort noise from noise parameters
+ * according to IETF RFC 3389.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+#include "webrtc_cng.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Cng(...)
+ *
+ * This function produces CNG according to RFC 3389.
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - len           : Number of samples to produce (max 640 or
+ *                        640 - fsHz*5/8000 for first-time CNG, governed by
+ *                        the definition of WEBRTC_CNG_MAX_OUTSIZE_ORDER in
+ *                        webrtc_cng.h)
+ *
+ * Output:
+ *      - pw16_outData  : Output CNG
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+#ifdef NETEQ_CNG_CODEC
+/* Must compile NetEQ with CNG support to enable this function */
+
+int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len)
+{
+    WebRtc_Word16 w16_winMute = 0; /* mixing factor for overlap data */
+    WebRtc_Word16 w16_winUnMute = 0; /* mixing factor for comfort noise */
+    WebRtc_Word16 w16_winMuteInc = 0; /* mixing factor increment (negative) */
+    WebRtc_Word16 w16_winUnMuteInc = 0; /* mixing factor increment */
+    int i;
+
+    /*
+     * Check if last RecOut call was other than RFC3389,
+     * that is, this call is the first of a CNG period.
+     */
+    if (inst->w16_mode != MODE_RFC3389CNG)
+    {
+        /* Reset generation and overlap slightly with old data */
+
+        /* Generate len samples + overlap */
+        if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData,
+            (WebRtc_Word16) (len + inst->ExpandInst.w16_overlap), 1) < 0)
+        {
+            /* error returned */
+            return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+        }
+
+        /* Set windowing parameters depending on sample rate */
+        if (inst->fs == 8000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs == 16000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs == 32000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else if (inst->fs == 48000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
+#endif
+        }
+        else
+        {
+            /* Unsupported sample rate (should not be possible) */
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* Do overlap add between new vector and overlap */
+        for (i = 0; i < inst->ExpandInst.w16_overlap; i++)
+        {
+            /* overlapVec[i] = WinMute * overlapVec[i] + WinUnMute * outData[i] */
+            inst->ExpandInst.pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                WEBRTC_SPL_MUL_16_16(
+                    inst->ExpandInst.pw16_overlapVec[i], w16_winMute) +
+                WEBRTC_SPL_MUL_16_16(pw16_outData[i], w16_winUnMute)
+                + 16384, 15); /* shift with proper rounding */
+
+            w16_winMute += w16_winMuteInc; /* decrease mute factor (inc<0) */
+            w16_winUnMute += w16_winUnMuteInc; /* increase unmute factor (inc>0) */
+
+        }
+
+        /*
+         * Shift the contents of the outData buffer by overlap samples, since we
+         * already used these first samples in the overlapVec above
+         */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_outData+inst->ExpandInst.w16_overlap, len);
+
+    }
+    else
+    {
+        /* This is a subsequent CNG call; no special overlap needed */
+
+        /* Generate len samples */
+        if (WebRtcCng_Generate(inst->CNG_Codec_inst, pw16_outData, (WebRtc_Word16) len, 0) < 0)
+        {
+            /* error returned */
+            return -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+        }
+    }
+
+    return 0;
+
+}
+
+#endif /* NETEQ_CNG_CODEC */
+
diff --git a/src/modules/audio_coding/neteq/codec_db.c b/src/modules/audio_coding/neteq/codec_db.c
new file mode 100644
index 0000000..e91e372
--- /dev/null
+++ b/src/modules/audio_coding/neteq/codec_db.c
@@ -0,0 +1,757 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the codec database.
+ */
+
+#include "codec_db.h"
+
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+/*
+ * Resets the codec database.
+ */
+
+int WebRtcNetEQ_DbReset(CodecDbInst_t *inst)
+{
+    int i;
+
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) inst, 0,
+        sizeof(CodecDbInst_t) / sizeof(WebRtc_Word16));
+
+    for (i = 0; i < NUM_TOTAL_CODECS; i++)
+    {
+        inst->position[i] = -1;
+    }
+
+    for (i = 0; i < NUM_CODECS; i++)
+    {
+        inst->payloadType[i] = -1;
+    }
+
+    for (i = 0; i < NUM_CNG_CODECS; i++)
+    {
+        inst->CNGpayloadType[i] = -1;
+    }
+
+    return 0;
+}
+
+/*
+ * Adds a new codec to the database.
+ */
+
+int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                      WebRtc_Word16 payloadType, FuncDecode funcDecode,
+                      FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
+                      FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
+                      FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
+                      FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
+                      void* codec_state, WebRtc_UWord16 codec_fs)
+{
+
+    int temp;
+    int insertCNGcodec = 0, overwriteCNGcodec = 0, CNGpos = -1;
+
+#ifndef NETEQ_RED_CODEC
+    if (codec == kDecoderRED)
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+#endif
+    if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
+        >= (int) kDecoderReservedEnd))
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+
+    if ((codec_fs != 8000)
+#ifdef NETEQ_WIDEBAND
+    &&(codec_fs!=16000)
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    &&(codec_fs!=32000)
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    &&(codec_fs!=48000)
+#endif
+    )
+    {
+        return CODEC_DB_UNSUPPORTED_FS;
+    }
+
+    /* Ensure that the codec type is supported */
+    switch (codec)
+    {
+#ifdef NETEQ_PCM16B_CODEC
+        case kDecoderPCM16B :
+        case kDecoderPCM16B_2ch :
+#endif
+#ifdef NETEQ_G711_CODEC
+        case kDecoderPCMu :
+        case kDecoderPCMa :
+        case kDecoderPCMu_2ch :
+        case kDecoderPCMa_2ch :
+#endif
+#ifdef NETEQ_ILBC_CODEC
+        case kDecoderILBC :
+#endif
+#ifdef NETEQ_ISAC_CODEC
+        case kDecoderISAC :
+#endif
+#ifdef NETEQ_ISAC_SWB_CODEC
+        case kDecoderISACswb :
+#endif
+#ifdef NETEQ_G722_CODEC
+        case kDecoderG722 :
+        case kDecoderG722_2ch :
+#endif
+#ifdef NETEQ_WIDEBAND
+        case kDecoderPCM16Bwb :
+        case kDecoderPCM16Bwb_2ch :
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        case kDecoderPCM16Bswb32kHz :
+        case kDecoderPCM16Bswb32kHz_2ch :
+#endif
+#ifdef NETEQ_CNG_CODEC
+        case kDecoderCNG :
+#endif
+#ifdef NETEQ_ATEVENT_DECODE
+        case kDecoderAVT :
+#endif
+#ifdef NETEQ_RED_CODEC
+        case kDecoderRED :
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef NETEQ_ARBITRARY_CODEC
+        case kDecoderArbitrary:
+#endif
+#ifdef NETEQ_G729_CODEC
+        case kDecoderG729:
+#endif
+#ifdef NETEQ_G729_1_CODEC
+        case kDecoderG729_1 :
+#endif
+#ifdef NETEQ_G726_CODEC
+        case kDecoderG726_16 :
+        case kDecoderG726_24 :
+        case kDecoderG726_32 :
+        case kDecoderG726_40 :
+#endif
+#ifdef NETEQ_G722_1_CODEC
+        case kDecoderG722_1_16 :
+        case kDecoderG722_1_24 :
+        case kDecoderG722_1_32 :
+#endif
+#ifdef NETEQ_G722_1C_CODEC
+        case kDecoderG722_1C_24 :
+        case kDecoderG722_1C_32 :
+        case kDecoderG722_1C_48 :
+#endif
+#ifdef NETEQ_SPEEX_CODEC
+        case kDecoderSPEEX_8 :
+        case kDecoderSPEEX_16 :
+#endif
+#ifdef NETEQ_CELT_CODEC
+        case kDecoderCELT_32 :
+        case kDecoderCELT_32_2ch :
+#endif
+#ifdef NETEQ_GSMFR_CODEC
+        case kDecoderGSMFR :
+#endif
+#ifdef NETEQ_AMR_CODEC
+        case kDecoderAMR :
+#endif
+#ifdef NETEQ_AMRWB_CODEC
+        case kDecoderAMRWB :
+#endif
+        {
+            /* If we end up here, the inserted codec is supported => Do nothing */
+            break;
+        }
+    default:
+    {
+        /* If we get to this point, the inserted codec is not supported */
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+    }
+
+    /* Check to see if payload type is taken */
+    if (WebRtcNetEQ_DbGetCodec(inst, payloadType) > 0)
+    {
+        return CODEC_DB_PAYLOAD_TAKEN;
+    }
+
+    /* Special case for CNG codecs */
+    if (codec == kDecoderCNG)
+    {
+        /* check if this is first CNG codec to be registered */
+        if (WebRtcNetEQ_DbGetPayload(inst, codec) == CODEC_DB_NOT_EXIST2)
+        {
+            /* no other CNG codec found */
+            insertCNGcodec = 1;
+        }
+
+        /* find the appropriate insert position in CNG payload vector */
+        switch (codec_fs)
+        {
+#ifdef NETEQ_WIDEBAND
+            case 16000:
+            CNGpos = 1;
+            break;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+            case 32000:
+            CNGpos = 2;
+            break;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+            case 48000:
+            CNGpos = 3;
+            break;
+#endif
+            default: /* 8000 Hz case */
+                CNGpos = 0;
+                /*
+                 * The 8 kHz CNG payload type is the one associated with the regular codec DB
+                 * should override any other setting.
+                 * Overwrite if this isn't the first CNG
+                 */
+                overwriteCNGcodec = !insertCNGcodec;
+                break;
+        }
+
+        /* insert CNG payload type */
+        inst->CNGpayloadType[CNGpos] = payloadType;
+
+    }
+
+    if ((codec != kDecoderCNG) || (insertCNGcodec == 1) || (overwriteCNGcodec == 1))
+    {
+        /* Check if we have reached the maximum numbers of simultaneous codecs */
+        if (inst->nrOfCodecs == NUM_CODECS) return CODEC_DB_FULL;
+
+        /* Check that codec has not already been initialized to DB =>
+         remove it and reinitialize according to new spec */
+        if ((inst->position[codec] != -1) && (overwriteCNGcodec != 1))
+        { /* if registering multiple CNG codecs, don't remove, just overwrite */
+            WebRtcNetEQ_DbRemove(inst, codec);
+        }
+
+        if (overwriteCNGcodec == 1)
+        {
+            temp = inst->position[codec];
+        }
+        else
+        {
+            temp = inst->nrOfCodecs; /* Store this codecs position */
+            inst->position[codec] = temp;
+            inst->nrOfCodecs++;
+        }
+
+        inst->payloadType[temp] = payloadType;
+
+        /* Copy to database */
+        inst->codec_state[temp] = codec_state;
+        inst->funcDecode[temp] = funcDecode;
+        inst->funcDecodeRCU[temp] = funcDecodeRCU;
+        inst->funcAddLatePkt[temp] = funcAddLatePkt;
+        inst->funcDecodeInit[temp] = funcDecodeInit;
+        inst->funcDecodePLC[temp] = funcDecodePLC;
+        inst->funcGetMDinfo[temp] = funcGetMDinfo;
+        inst->funcGetPitch[temp] = funcGetPitch;
+        inst->funcUpdBWEst[temp] = funcUpdBWEst;
+        inst->funcGetErrorCode[temp] = funcGetErrorCode;
+        inst->codec_fs[temp] = codec_fs;
+
+    }
+
+    return 0;
+}
+
+/*
+ * Removes a codec from the database.
+ */
+
+int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec)
+{
+    int i;
+    int pos = -1;
+
+#ifndef NETEQ_RED_CODEC
+    if (codec == kDecoderRED)
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+#endif
+    if (((int) codec <= (int) kDecoderReservedStart) || ((int) codec
+        >= (int) kDecoderReservedEnd))
+    {
+        return CODEC_DB_UNSUPPORTED_CODEC;
+    }
+
+    pos = inst->position[codec];
+    if (pos == -1)
+    {
+        return CODEC_DB_NOT_EXIST4;
+    }
+    else
+    {
+        /* Remove this codec */
+        inst->position[codec] = -1;
+        for (i = pos; i < (inst->nrOfCodecs - 1); i++)
+        {
+            inst->payloadType[i] = inst->payloadType[i + 1];
+            inst->codec_state[i] = inst->codec_state[i + 1];
+            inst->funcDecode[i] = inst->funcDecode[i + 1];
+            inst->funcDecodeRCU[i] = inst->funcDecodeRCU[i + 1];
+            inst->funcAddLatePkt[i] = inst->funcAddLatePkt[i + 1];
+            inst->funcDecodeInit[i] = inst->funcDecodeInit[i + 1];
+            inst->funcDecodePLC[i] = inst->funcDecodePLC[i + 1];
+            inst->funcGetMDinfo[i] = inst->funcGetMDinfo[i + 1];
+            inst->funcGetPitch[i] = inst->funcGetPitch[i + 1];
+            inst->funcUpdBWEst[i] = inst->funcUpdBWEst[i + 1];
+            inst->funcGetErrorCode[i] = inst->funcGetErrorCode[i + 1];
+            inst->codec_fs[i] = inst->codec_fs[i + 1];
+        }
+        inst->payloadType[i] = -1;
+        inst->codec_state[i] = NULL;
+        inst->funcDecode[i] = NULL;
+        inst->funcDecodeRCU[i] = NULL;
+        inst->funcAddLatePkt[i] = NULL;
+        inst->funcDecodeInit[i] = NULL;
+        inst->funcDecodePLC[i] = NULL;
+        inst->funcGetMDinfo[i] = NULL;
+        inst->funcGetPitch[i] = NULL;
+        inst->funcUpdBWEst[i] = NULL;
+        inst->funcGetErrorCode[i] = NULL;
+        inst->codec_fs[i] = 0;
+        /* Move down all the codecs above this one */
+        for (i = 0; i < NUM_TOTAL_CODECS; i++)
+        {
+            if (inst->position[i] >= pos)
+            {
+                inst->position[i] = inst->position[i] - 1;
+            }
+        }
+        inst->nrOfCodecs--;
+
+        if (codec == kDecoderCNG)
+        {
+            /* also remove all registered CNG payload types */
+            for (i = 0; i < NUM_CNG_CODECS; i++)
+            {
+                inst->CNGpayloadType[i] = -1;
+            }
+        }
+    }
+    return 0;
+}
+
+/*
+ * Get the decoder function pointers for a codec.
+ */
+
+int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                          CodecFuncInst_t *ptr_inst)
+{
+
+    int pos = inst->position[codec];
+    if ((codec <= kDecoderReservedStart) || (codec >= kDecoderReservedEnd) || (codec
+        > NUM_TOTAL_CODECS))
+    {
+        /* ERROR */
+        pos = -1;
+    }
+    if (pos >= 0)
+    {
+        ptr_inst->codec_state = inst->codec_state[pos];
+        ptr_inst->funcAddLatePkt = inst->funcAddLatePkt[pos];
+        ptr_inst->funcDecode = inst->funcDecode[pos];
+        ptr_inst->funcDecodeRCU = inst->funcDecodeRCU[pos];
+        ptr_inst->funcDecodeInit = inst->funcDecodeInit[pos];
+        ptr_inst->funcDecodePLC = inst->funcDecodePLC[pos];
+        ptr_inst->funcGetMDinfo = inst->funcGetMDinfo[pos];
+        ptr_inst->funcUpdBWEst = inst->funcUpdBWEst[pos];
+        ptr_inst->funcGetErrorCode = inst->funcGetErrorCode[pos];
+        ptr_inst->codec_fs = inst->codec_fs[pos];
+        return 0;
+    }
+    else
+    {
+        WebRtcSpl_MemSetW16((WebRtc_Word16*) ptr_inst, 0,
+            sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+        return CODEC_DB_NOT_EXIST1;
+    }
+}
+
+/*
+ * Returns payload number given a codec identifier.
+ */
+
+int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID)
+{
+    if (inst->position[codecID] == -1)
+        return CODEC_DB_NOT_EXIST2;
+    else
+        return (inst->payloadType[inst->position[codecID]]);
+
+}
+
+/*
+ * Returns codec identifier given a payload number.
+ * Returns -1 if the payload type does not exist.
+ */
+
+int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType)
+{
+    int i, pos;
+
+    for (i = 0; i < NUM_TOTAL_CODECS; i++)
+    {
+        pos = inst->position[i];
+        if (pos != -1)
+        {
+            if (inst->payloadType[pos] == payloadType) return i;
+        }
+    }
+
+    /* did not find payload type */
+    /* check if it's a CNG codec */
+    if (WebRtcNetEQ_DbIsCNGPayload(inst, payloadType))
+    {
+        return kDecoderCNG;
+    }
+
+    /* found no match */
+    return CODEC_DB_NOT_EXIST3;
+}
+
+/*
+ * Extracts the Payload Split information of the codec with the specified payloadType.
+ */
+
+int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
+                               int codedsize)
+{
+
+    switch (codecID)
+    {
+#ifdef NETEQ_ISAC_CODEC
+        case kDecoderISAC:
+#endif
+#ifdef NETEQ_ISAC_SWB_CODEC
+        case kDecoderISACswb:
+#endif
+#ifdef NETEQ_ARBITRARY_CODEC
+        case kDecoderArbitrary:
+#endif
+#ifdef NETEQ_AMR_CODEC
+        case kDecoderAMR:
+#endif
+#ifdef NETEQ_AMRWB_CODEC
+        case kDecoderAMRWB:
+#endif
+#ifdef NETEQ_G726_CODEC
+            /* Treat G726 as non-splittable to simplify the implementation */
+        case kDecoderG726_16:
+        case kDecoderG726_24:
+        case kDecoderG726_32:
+        case kDecoderG726_40:
+#endif
+#ifdef NETEQ_SPEEX_CODEC
+        case kDecoderSPEEX_8:
+        case kDecoderSPEEX_16:
+#endif
+#ifdef NETEQ_CELT_CODEC
+        case kDecoderCELT_32 :
+        case kDecoderCELT_32_2ch :
+#endif
+#ifdef NETEQ_G729_1_CODEC
+        case kDecoderG729_1:
+#endif
+        {
+            /* These codecs' payloads are not splittable */
+            inst->deltaBytes = NO_SPLIT;
+            return 0;
+        }
+
+            /*
+             * Sample based coders are a special case.
+             * In this case, deltaTime signals the number of bytes per timestamp unit times 2
+             * in log2 domain.
+             */
+#if (defined NETEQ_G711_CODEC)
+        case kDecoderPCMu:
+        case kDecoderPCMa:
+        case kDecoderPCMu_2ch:
+        case kDecoderPCMa_2ch:
+        {
+            inst->deltaBytes = -12;
+            inst->deltaTime = 1;
+            return 0;
+        }
+#endif
+#if (defined NETEQ_G722_CODEC)
+        case kDecoderG722:
+        case kDecoderG722_2ch:
+        {
+            inst->deltaBytes = -14;
+            inst->deltaTime = 0;
+            return 0;
+        }
+#endif
+#if (defined NETEQ_PCM16B_CODEC)
+        case kDecoderPCM16B:
+        case kDecoderPCM16B_2ch:
+        {
+            inst->deltaBytes = -12;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_WIDEBAND))
+        case kDecoderPCM16Bwb:
+        case kDecoderPCM16Bwb_2ch:
+        {
+            inst->deltaBytes = -14;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_32KHZ_WIDEBAND))
+        case kDecoderPCM16Bswb32kHz:
+        case kDecoderPCM16Bswb32kHz_2ch:
+        {
+            inst->deltaBytes = -18;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+#if ((defined NETEQ_PCM16B_CODEC)&&(defined NETEQ_48KHZ_WIDEBAND))
+        case kDecoderPCM16Bswb48kHz:
+        {
+            inst->deltaBytes = -22;
+            inst->deltaTime = 2;
+            return 0;
+        }
+#endif
+
+            /* Splittable payloads */
+#ifdef NETEQ_G722_1_CODEC
+        case kDecoderG722_1_16:
+        {
+            inst->deltaBytes = 40;
+            inst->deltaTime = 320;
+            return 0;
+        }
+        case kDecoderG722_1_24:
+        {
+            inst->deltaBytes = 60;
+            inst->deltaTime = 320;
+            return 0;
+        }
+        case kDecoderG722_1_32:
+        {
+            inst->deltaBytes = 80;
+            inst->deltaTime = 320;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_G722_1C_CODEC
+        case kDecoderG722_1C_24:
+        {
+            inst->deltaBytes = 60;
+            inst->deltaTime = 640;
+            return 0;
+        }
+        case kDecoderG722_1C_32:
+        {
+            inst->deltaBytes = 80;
+            inst->deltaTime = 640;
+            return 0;
+        }
+        case kDecoderG722_1C_48:
+        {
+            inst->deltaBytes = 120;
+            inst->deltaTime = 640;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_G729_CODEC
+        case kDecoderG729:
+        {
+            inst->deltaBytes = 10;
+            inst->deltaTime = 80;
+            return 0;
+        }
+#endif
+#ifdef NETEQ_ILBC_CODEC
+        case kDecoderILBC:
+        {
+            /* Check for splitting of iLBC packets.
+             * If payload size is a multiple of 50 bytes it should be split into 30ms frames.
+             * If payload size is a multiple of 38 bytes it should be split into 20ms frames.
+             * Least common multiplier between 38 and 50 is 950, so the payload size must be less than
+             * 950 bytes in order to resolve the frames unambiguously.
+             * Currently max 12 frames in one bundle.
+             */
+            switch (codedsize)
+            {
+                case 50:
+                case 100:
+                case 150:
+                case 200:
+                case 250:
+                case 300:
+                case 350:
+                case 400:
+                case 450:
+                case 500:
+                case 550:
+                case 600:
+                {
+                    inst->deltaBytes = 50;
+                    inst->deltaTime = 240;
+                    break;
+                }
+                case 38:
+                case 76:
+                case 114:
+                case 152:
+                case 190:
+                case 228:
+                case 266:
+                case 304:
+                case 342:
+                case 380:
+                case 418:
+                case 456:
+                {
+                    inst->deltaBytes = 38;
+                    inst->deltaTime = 160;
+                    break;
+                }
+                default:
+                {
+                    return AMBIGUOUS_ILBC_FRAME_SIZE; /* Something not supported... */
+                }
+            }
+            return 0;
+        }
+#endif
+#ifdef NETEQ_GSMFR_CODEC
+        case kDecoderGSMFR:
+        {
+            inst->deltaBytes = 33;
+            inst->deltaTime = 160;
+            return 0;
+        }
+#endif
+        default:
+        { /*Unknown codec */
+            inst->deltaBytes = NO_SPLIT;
+            return CODEC_DB_UNKNOWN_CODEC;
+        }
+    } /* end of switch */
+}
+
+/*
+ * Returns 1 if codec is multiple description, 0 otherwise.
+ * NOTE: This function is a stub, since there currently are no MD codecs.
+ */
+int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID)
+{
+    if (0) /* Add test for MD codecs here */
+        return 1;
+    else
+        return 0;
+}
+
+/*
+ * Returns 1 if payload type is registered as a CNG codec, 0 otherwise
+ */
+int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType)
+{
+#ifdef NETEQ_CNG_CODEC
+    int i;
+
+    for(i=0; i<NUM_CNG_CODECS; i++)
+    {
+        if( (inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType) )
+        {
+            return 1;
+        }
+    }
+#endif
+
+    return 0;
+
+}
+
+/*
+ * Return the sample rate for the codec with the given payload type, 0 if error
+ */
+WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType)
+{
+    int i;
+    CodecFuncInst_t codecInst;
+
+    /* Sanity */
+    if (inst == NULL)
+    {
+        /* return 0 Hz */
+        return 0;
+    }
+
+    /* Check among CNG payloads */
+    for (i = 0; i < NUM_CNG_CODECS; i++)
+    {
+        if ((inst->CNGpayloadType[i] != -1) && (inst->CNGpayloadType[i] == payloadType))
+        {
+            switch (i)
+            {
+                case 1:
+                    return 16000;
+                case 2:
+                    return 32000;
+                case 3:
+                    return 48000;
+                default:
+                    return 8000;
+            }
+        }
+    }
+
+    /* Not a CNG payload, check the other payloads */
+    i = WebRtcNetEQ_DbGetCodec(inst, payloadType);
+    if (i >= 0)
+    {
+        if (WebRtcNetEQ_DbGetPtrs(inst, (enum WebRtcNetEQDecoder) i, &codecInst) != 0)
+        {
+            /* Unexpected error, return 0 Hz */
+            return 0;
+        }
+        return codecInst.codec_fs;
+    }
+
+    /* If we end up here, we got an error, return 0 Hz */
+    return 0;
+
+}
+
diff --git a/src/modules/audio_coding/neteq/codec_db.h b/src/modules/audio_coding/neteq/codec_db.h
new file mode 100644
index 0000000..7f42980
--- /dev/null
+++ b/src/modules/audio_coding/neteq/codec_db.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface for the codec database.
+ */
+
+#ifndef CODEC_DB_H
+#define CODEC_DB_H
+
+#include "typedefs.h"
+
+#include "webrtc_neteq.h"
+#include "codec_db_defines.h"
+#include "neteq_defines.h"
+
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define NUM_CNG_CODECS 4
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define NUM_CNG_CODECS 3
+#elif defined(NETEQ_WIDEBAND)
+    #define NUM_CNG_CODECS 2
+#else
+    #define NUM_CNG_CODECS 1
+#endif
+
+typedef struct
+{
+
+    WebRtc_Word16 position[NUM_TOTAL_CODECS];
+    WebRtc_Word16 nrOfCodecs;
+
+    WebRtc_Word16 payloadType[NUM_CODECS];
+    FuncDecode funcDecode[NUM_CODECS];
+    FuncDecode funcDecodeRCU[NUM_CODECS];
+    FuncDecodePLC funcDecodePLC[NUM_CODECS];
+    FuncDecodeInit funcDecodeInit[NUM_CODECS];
+    FuncAddLatePkt funcAddLatePkt[NUM_CODECS];
+    FuncGetMDinfo funcGetMDinfo[NUM_CODECS];
+    FuncGetPitchInfo funcGetPitch[NUM_CODECS];
+    FuncUpdBWEst funcUpdBWEst[NUM_CODECS];
+    FuncGetErrorCode funcGetErrorCode[NUM_CODECS];
+    void * codec_state[NUM_CODECS];
+    WebRtc_UWord16 codec_fs[NUM_CODECS];
+    WebRtc_Word16 CNGpayloadType[NUM_CNG_CODECS];
+
+} CodecDbInst_t;
+
+#define NO_SPLIT -1 /* codec payload cannot be split */
+
+typedef struct
+{
+    WebRtc_Word16 deltaBytes;
+    WebRtc_Word16 deltaTime;
+} SplitInfo_t;
+
+/*
+ * Resets the codec database.
+ */
+int WebRtcNetEQ_DbReset(CodecDbInst_t *inst);
+
+/*
+ * Adds a new codec to the database.
+ */
+int WebRtcNetEQ_DbAdd(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec,
+                      WebRtc_Word16 payloadType, FuncDecode funcDecode,
+                      FuncDecode funcDecodeRCU, FuncDecodePLC funcDecodePLC,
+                      FuncDecodeInit funcDecodeInit, FuncAddLatePkt funcAddLatePkt,
+                      FuncGetMDinfo funcGetMDinfo, FuncGetPitchInfo funcGetPitch,
+                      FuncUpdBWEst funcUpdBWEst, FuncGetErrorCode funcGetErrorCode,
+                      void* codec_state, WebRtc_UWord16 codec_fs);
+
+/*
+ * Removes a codec from the database.
+ */
+int WebRtcNetEQ_DbRemove(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codec);
+
+/*
+ * Get the decoder function pointers for a codec.
+ */
+int WebRtcNetEQ_DbGetPtrs(CodecDbInst_t *inst, enum WebRtcNetEQDecoder,
+                          CodecFuncInst_t *ptr_inst);
+
+/*
+ * Returns payload number given a codec identifier.
+ */
+
+int WebRtcNetEQ_DbGetPayload(CodecDbInst_t *inst, enum WebRtcNetEQDecoder codecID);
+
+/*
+ * Returns codec identifier given a payload number.
+ */
+
+int WebRtcNetEQ_DbGetCodec(CodecDbInst_t *inst, int payloadType);
+
+/*
+ * Extracts the Payload Split information of the codec with the specified payloadType.
+ */
+
+int WebRtcNetEQ_DbGetSplitInfo(SplitInfo_t *inst, enum WebRtcNetEQDecoder codecID,
+                               int codedsize);
+
+/*
+ * Returns 1 if codec is multiple description type, 0 otherwise.
+ */
+int WebRtcNetEQ_DbIsMDCodec(enum WebRtcNetEQDecoder codecID);
+
+/*
+ * Returns 1 if payload type is registered as a CNG codec, 0 otherwise.
+ */
+int WebRtcNetEQ_DbIsCNGPayload(CodecDbInst_t *inst, int payloadType);
+
+/*
+ * Return the sample rate for the codec with the given payload type, 0 if error.
+ */
+WebRtc_UWord16 WebRtcNetEQ_DbGetSampleRate(CodecDbInst_t *inst, int payloadType);
+
+#endif
+
diff --git a/src/modules/audio_coding/neteq/codec_db_defines.h b/src/modules/audio_coding/neteq/codec_db_defines.h
new file mode 100644
index 0000000..9b78b86
--- /dev/null
+++ b/src/modules/audio_coding/neteq/codec_db_defines.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Some definitions related to the codec database.
+ */
+
+#ifndef CODEC_DB_DEFINES_H
+#define CODEC_DB_DEFINES_H
+
+#include "typedefs.h"
+
+#define NUM_CODECS 47 /* probably too large with the limited set of supported codecs*/
+#define NUM_TOTAL_CODECS	kDecoderReservedEnd
+
+/*
+ * Pointer to decoder function.
+ */
+typedef WebRtc_Word16 (*FuncDecode)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len,
+                                    WebRtc_Word16* decoded, WebRtc_Word16* speechType);
+
+/*
+ * Pointer to PLC function.
+ */
+typedef WebRtc_Word16 (*FuncDecodePLC)(void* state, WebRtc_Word16* decodec,
+                                       WebRtc_Word16 frames);
+
+/*
+ * Pointer to decoder init function.
+ */
+typedef WebRtc_Word16 (*FuncDecodeInit)(void* state);
+
+/*
+ * Pointer to add late packet function.
+ */
+typedef WebRtc_Word16
+                (*FuncAddLatePkt)(void* state, WebRtc_Word16* encoded, WebRtc_Word16 len);
+
+/*
+ * Pointer to get MD infofunction.
+ */
+typedef WebRtc_Word16 (*FuncGetMDinfo)(void* state);
+
+/*
+ * Pointer to pitch info function.
+ * Return 0 for unvoiced, -1 if pitch not availiable.
+ */
+typedef WebRtc_Word16 (*FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
+                                          WebRtc_Word16* length);
+
+/*
+ *  Pointer to the update bandwidth estimate function
+ */
+typedef WebRtc_Word16 (*FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
+                                      WebRtc_Word32 packet_size,
+                                      WebRtc_UWord16 rtp_seq_number, WebRtc_UWord32 send_ts,
+                                      WebRtc_UWord32 arr_ts);
+
+/*
+ *  Pointer to error code function
+ */
+typedef WebRtc_Word16 (*FuncGetErrorCode)(void* state);
+
+typedef struct CodecFuncInst_t_
+{
+
+    FuncDecode funcDecode;
+    FuncDecode funcDecodeRCU;
+    FuncDecodePLC funcDecodePLC;
+    FuncDecodeInit funcDecodeInit;
+    FuncAddLatePkt funcAddLatePkt;
+    FuncGetMDinfo funcGetMDinfo;
+    FuncUpdBWEst funcUpdBWEst; /* Currently in use for the ISAC family (without LC) only*/
+    FuncGetErrorCode funcGetErrorCode;
+    void * codec_state;
+    WebRtc_UWord16 codec_fs;
+    WebRtc_UWord32 timeStamp;
+
+} CodecFuncInst_t;
+
+#endif
+
diff --git a/src/modules/audio_coding/neteq/correlator.c b/src/modules/audio_coding/neteq/correlator.c
new file mode 100644
index 0000000..97c41da
--- /dev/null
+++ b/src/modules/audio_coding/neteq/correlator.c
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage:
+
+ Type           Name                size  startpos  endpos
+ WebRtc_Word16  pw16_corrVec        62    0         61
+ WebRtc_Word16  pw16_data_ds        124   0         123
+ WebRtc_Word32  pw32_corr           2*54  124       231
+
+ Total:  232
+ */
+
+#define	 SCRATCH_pw16_corrVec			0
+#define	 SCRATCH_pw16_data_ds			0
+#define	 SCRATCH_pw32_corr				124
+
+#define NETEQ_CORRELATOR_DSVECLEN 		124	/* 124 = 60 + 10 + 54 */
+
+WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+#ifdef SCRATCH
+                                     WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                     WebRtc_Word16 *pw16_data,
+                                     WebRtc_Word16 w16_dataLen,
+                                     WebRtc_Word16 *pw16_corrOut,
+                                     WebRtc_Word16 *pw16_corrScale)
+{
+    WebRtc_Word16 w16_corrLen = 60;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_data_ds = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+    /*	WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;*/
+#else
+    WebRtc_Word16 pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN];
+    WebRtc_Word32 pw32_corr[54];
+    /*	WebRtc_Word16 pw16_corrVec[4+54+4];*/
+#endif
+    /*	WebRtc_Word16 *pw16_corr=&pw16_corrVec[4];*/
+    WebRtc_Word16 w16_maxVal;
+    WebRtc_Word32 w32_maxVal;
+    WebRtc_Word16 w16_normVal;
+    WebRtc_Word16 w16_normVal2;
+    /*	WebRtc_Word16 w16_corrUpsLen;*/
+    WebRtc_Word16 *pw16_B = NULL;
+    WebRtc_Word16 w16_Blen = 0;
+    WebRtc_Word16 w16_factor = 0;
+
+    /* Set constants depending on frequency used */
+    if (inst->fs == 8000)
+    {
+        w16_Blen = 3;
+        w16_factor = 2;
+        pw16_B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+#ifdef NETEQ_WIDEBAND
+    }
+    else if (inst->fs==16000)
+    {
+        w16_Blen = 5;
+        w16_factor = 4;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    }
+    else if (inst->fs==32000)
+    {
+        w16_Blen = 7;
+        w16_factor = 8;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    }
+    else /* if inst->fs==48000 */
+    {
+        w16_Blen = 7;
+        w16_factor = 12;
+        pw16_B = (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl;
+#endif
+    }
+
+    /* Downsample data in order to work on a 4 kHz sampled signal */
+    WebRtcSpl_DownsampleFast(
+        pw16_data + w16_dataLen - (NETEQ_CORRELATOR_DSVECLEN * w16_factor),
+        (WebRtc_Word16) (NETEQ_CORRELATOR_DSVECLEN * w16_factor), pw16_data_ds,
+        NETEQ_CORRELATOR_DSVECLEN, pw16_B, w16_Blen, w16_factor, (WebRtc_Word16) 0);
+
+    /* Normalize downsampled vector to using entire 16 bit */
+    w16_maxVal = WebRtcSpl_MaxAbsValueW16(pw16_data_ds, 124);
+    w16_normVal = 16 - WebRtcSpl_NormW32((WebRtc_Word32) w16_maxVal);
+    WebRtcSpl_VectorBitShiftW16(pw16_data_ds, NETEQ_CORRELATOR_DSVECLEN, pw16_data_ds,
+        w16_normVal);
+
+    /* Correlate from lag 10 to lag 60 (20..120 in NB and 40..240 in WB) */
+
+    WebRtcNetEQ_CrossCorr(
+        pw32_corr, &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen],
+        &pw16_data_ds[NETEQ_CORRELATOR_DSVECLEN - w16_corrLen - 10], 60, 54,
+        6 /*maxValue... shifts*/, -1);
+
+    /*
+     * Move data from w32 to w16 vector.
+     * Normalize downsampled vector to using all 14 bits
+     */
+    w32_maxVal = WebRtcSpl_MaxAbsValueW32(pw32_corr, 54);
+    w16_normVal2 = 18 - WebRtcSpl_NormW32(w32_maxVal);
+    w16_normVal2 = WEBRTC_SPL_MAX(w16_normVal2, 0);
+
+    WebRtcSpl_VectorBitShiftW32ToW16(pw16_corrOut, 54, pw32_corr, w16_normVal2);
+
+    /* Total scale factor (right shifts) of correlation value */
+    *pw16_corrScale = 2 * w16_normVal + 6 + w16_normVal2;
+
+    return (50 + 1);
+}
+
+#undef	 SCRATCH_pw16_corrVec
+#undef	 SCRATCH_pw16_data_ds
+#undef	 SCRATCH_pw32_corr
+
diff --git a/src/modules/audio_coding/neteq/delay_logging.h b/src/modules/audio_coding/neteq/delay_logging.h
new file mode 100644
index 0000000..04b1c40
--- /dev/null
+++ b/src/modules/audio_coding/neteq/delay_logging.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Contains definitions for the delay logging functionality. Only used for debugging and
+ * tracing purposes.
+ */
+
+#ifndef DELAY_LOGGING_H
+#define DELAY_LOGGING_H
+
+#define NETEQ_DELAY_LOGGING_VERSION_STRING "2.0"
+
+#define NETEQ_DELAY_LOGGING_SIGNAL_RECIN 1
+#define NETEQ_DELAY_LOGGING_SIGNAL_FLUSH 2
+#define NETEQ_DELAY_LOGGING_SIGNAL_CLOCK 3
+#define NETEQ_DELAY_LOGGING_SIGNAL_EOF 4
+#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE 5
+#define NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS 6
+#define NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO 7
+#define NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO 8
+#define NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO 9
+#define NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO 10
+#define NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF 11
+#define NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC 12
+
+#endif
diff --git a/src/modules/audio_coding/neteq/dsp.c b/src/modules/audio_coding/neteq/dsp.c
new file mode 100644
index 0000000..d9873da
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dsp.c
@@ -0,0 +1,522 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some DSP initialization functions and 
+ * constant table definitions.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+/* Filter coefficients used when downsampling from the indicated 
+ sample rates (8, 16, 32, 48 kHz) to 4 kHz.
+ Coefficients are in Q12. */
+
+/* {0.3, 0.4, 0.3} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[] = { 1229, 1638, 1229 };
+
+#ifdef NETEQ_WIDEBAND
+/* {0.15, 0.2, 0.3, 0.2, 0.15} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[] =
+{   614, 819, 1229, 819, 614};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+/* {0.1425, 0.1251, 0.1525, 0.1628, 0.1525, 0.1251, 0.1425} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[] =
+{   584, 512, 625, 667, 625, 512, 584};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+/* {0.2487, 0.0952, 0.1042, 0.1074, 0.1042, 0.0952, 0.2487} */
+const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[] =
+{   1019, 390, 427, 440, 427, 390, 1019};
+#endif
+
+/* Constants used in expand function WebRtcNetEQ_Expand */
+
+/* Q12: -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 */
+const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[4] = { -5179, 19931, -16422, 5776 };
+
+/* Tabulated divisions to save complexity */
+/* 1049/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k1049div[7] = { 0, 1049, 524, 349, 262, 209, 174 };
+
+/* 2097/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k2097div[7] = { 0, 2097, 1048, 699, 524, 419, 349 };
+
+/* 5243/{0, .., 6} */
+const WebRtc_Word16 WebRtcNetEQ_k5243div[7] = { 0, 5243, 2621, 1747, 1310, 1048, 873 };
+
+#ifdef WEBRTC_NETEQ_40BITACC_TEST
+/*
+ * Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP
+ * implementation where the main (spl and NetEQ) functions have been
+ * 40-bit optimized. For testing purposes.
+ */
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccCrossCorr(...)
+ *
+ * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
+ * is fixed and seq2 slides as the pointer is increased with step
+ *
+ * Input:
+ *		- seq1			: First sequence (fixed throughout the correlation)
+ *		- seq2			: Second sequence (slided step_seq2 for each 
+ *						  new correlation)
+ *		- dimSeq		: Number of samples to use in the cross correlation.
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- dimCrossCorr	: Number of CrossCorrelations to calculate (start 
+ *						  position for seq2 is updated for each new one)
+ *		- rShift			: Number of right shifts to use
+ *		- step_seq2		: How many (positive or negative) steps the seq2 
+ *						  pointer should be updated for each new cross 
+ *						  correlation value
+ *
+ * Output:
+ *		- crossCorr		: The cross correlation in Q-rShift
+ */
+
+void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr,
+    WebRtc_Word16 *seq1,
+    WebRtc_Word16 *seq2,
+    WebRtc_Word16 dimSeq,
+    WebRtc_Word16 dimCrossCorr,
+    WebRtc_Word16 rShift,
+    WebRtc_Word16 step_seq2)
+{
+    int i, j;
+    WebRtc_Word16 *seq1Ptr, *seq2Ptr;
+    WebRtc_Word64 acc;
+
+    for (i = 0; i < dimCrossCorr; i++)
+    {
+        /* Set the pointer to the static vector, set the pointer to
+         the sliding vector and initialize crossCorr */
+        seq1Ptr = seq1;
+        seq2Ptr = seq2 + (step_seq2 * i);
+        acc = 0;
+
+        /* Perform the cross correlation */
+        for (j = 0; j < dimSeq; j++)
+        {
+            acc += WEBRTC_SPL_MUL_16_16((*seq1Ptr), (*seq2Ptr));
+            seq1Ptr++;
+            seq2Ptr++;
+        }
+
+        (*crossCorr) = (WebRtc_Word32) (acc >> rShift);
+        crossCorr++;
+    }
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccDotW16W16(...)
+ *
+ * Calculates the dot product between two vectors (WebRtc_Word16)
+ *
+ * Input:
+ *		- vector1		: Vector 1
+ *		- vector2		: Vector 2
+ *		- len			: Number of samples in vector
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- scaling		: The number of left shifts required to avoid overflow 
+ *						  in the dot product
+ * Return value			: The dot product
+ */
+
+WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1,
+    WebRtc_Word16 *vector2,
+    int len,
+    int scaling)
+{
+    WebRtc_Word32 sum;
+    int i;
+    WebRtc_Word64 acc;
+
+    acc = 0;
+    for (i = 0; i < len; i++)
+    {
+        acc += WEBRTC_SPL_MUL_16_16(*vector1++, *vector2++);
+    }
+
+    sum = (WebRtc_Word32) (acc >> scaling);
+
+    return(sum);
+}
+
+#endif /* WEBRTC_NETEQ_40BITACC_TEST */
+
+/****************************************************************************
+ * WebRtcNetEQ_DSPInit(...)
+ *
+ * Initializes DSP side of NetEQ.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *      - fs            : Initial sample rate (may change when decoding data)
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs)
+{
+
+    int res = 0;
+    WebRtc_Word16 fs_mult;
+
+    /* Pointers and values to save before clearing the instance */
+#ifdef NETEQ_CNG_CODEC
+    void *savedPtr1 = inst->CNG_Codec_inst;
+#endif
+    void *savedPtr2 = inst->pw16_readAddress;
+    void *savedPtr3 = inst->pw16_writeAddress;
+    void *savedPtr4 = inst->main_inst;
+#ifdef NETEQ_VAD
+    void *savedVADptr = inst->VADInst.VADState;
+    VADInitFunction savedVADinit = inst->VADInst.initFunction;
+    VADSetmodeFunction savedVADsetmode = inst->VADInst.setmodeFunction;
+    VADFunction savedVADfunc = inst->VADInst.VADFunction;
+    WebRtc_Word16 savedVADEnabled = inst->VADInst.VADEnabled;
+    int savedVADMode = inst->VADInst.VADMode;
+#endif /* NETEQ_VAD */
+    DSPStats_t saveStats;
+    WebRtc_Word16 saveMsPerCall = inst->millisecondsPerCall;
+    enum BGNMode saveBgnMode = inst->BGNInst.bgnMode;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo saveMSinfo;
+#endif
+
+    /* copy contents of statInst to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveStats, &(inst->statInst),
+        sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+
+#ifdef NETEQ_STEREO
+    /* copy contents of msInfo to avoid clearing */WEBRTC_SPL_MEMCPY_W16(&saveMSinfo, &(inst->msInfo),
+        sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+#endif
+
+    /* check that the sample rate is valid */
+    if ((fs != 8000)
+#ifdef NETEQ_WIDEBAND
+    &&(fs!=16000)
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    &&(fs!=32000)
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    &&(fs!=48000)
+#endif
+    )
+    {
+        /* invalid rate */
+        return (CODEC_DB_UNSUPPORTED_FS);
+    }
+
+    /* calcualte fs/8000 */
+    fs_mult = WebRtcSpl_DivW32W16ResW16(fs, 8000);
+
+    /* Set everything to zero since most variables should be zero at start */
+    WebRtcSpl_MemSetW16((WebRtc_Word16 *) inst, 0, sizeof(DSPInst_t) / sizeof(WebRtc_Word16));
+
+    /* Restore saved pointers  */
+#ifdef NETEQ_CNG_CODEC
+    inst->CNG_Codec_inst = (CNG_dec_inst *)savedPtr1;
+#endif
+    inst->pw16_readAddress = (WebRtc_Word16 *) savedPtr2;
+    inst->pw16_writeAddress = (WebRtc_Word16 *) savedPtr3;
+    inst->main_inst = savedPtr4;
+#ifdef NETEQ_VAD
+    inst->VADInst.VADState = savedVADptr;
+    inst->VADInst.initFunction = savedVADinit;
+    inst->VADInst.setmodeFunction = savedVADsetmode;
+    inst->VADInst.VADFunction = savedVADfunc;
+    inst->VADInst.VADEnabled = savedVADEnabled;
+    inst->VADInst.VADMode = savedVADMode;
+#endif /* NETEQ_VAD */
+
+    /* Initialize main part */
+    inst->fs = fs;
+    inst->millisecondsPerCall = saveMsPerCall;
+    inst->timestampsPerCall = inst->millisecondsPerCall * 8 * fs_mult;
+    inst->ExpandInst.w16_overlap = 5 * fs_mult;
+    inst->endPosition = 565 * fs_mult;
+    inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
+    inst->w16_seedInc = 1;
+    inst->uw16_seed = 777;
+    inst->w16_muteFactor = 16384; /* 1.0 in Q14 */
+    inst->w16_frameLen = 3 * inst->timestampsPerCall; /* Dummy initialize to 30ms */
+
+    inst->w16_speechHistoryLen = 256 * fs_mult;
+    inst->pw16_speechHistory = &inst->speechBuffer[inst->endPosition
+        - inst->w16_speechHistoryLen];
+    inst->ExpandInst.pw16_overlapVec = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen
+        - inst->ExpandInst.w16_overlap]);
+
+    /* Reusage of memory in speechBuffer inside Expand */
+    inst->ExpandInst.pw16_expVecs[0] = &inst->speechBuffer[0];
+    inst->ExpandInst.pw16_expVecs[1] = &inst->speechBuffer[126 * fs_mult];
+    inst->ExpandInst.pw16_arState = &inst->speechBuffer[2 * 126 * fs_mult];
+    inst->ExpandInst.pw16_arFilter = &inst->speechBuffer[2 * 126 * fs_mult
+        + UNVOICED_LPC_ORDER];
+    /* Ends at 2*126*fs_mult+UNVOICED_LPC_ORDER+(UNVOICED_LPC_ORDER+1) */
+
+    inst->ExpandInst.w16_expandMuteFactor = 16384; /* 1.0 in Q14 */
+
+    /* Initialize BGN part */
+    inst->BGNInst.pw16_filter[0] = 4096;
+    inst->BGNInst.w16_scale = 20000;
+    inst->BGNInst.w16_scaleShift = 24;
+    inst->BGNInst.w32_energyUpdate = 500000;
+    inst->BGNInst.w32_energyUpdateLow = 0;
+    inst->BGNInst.w32_energy = 2500;
+    inst->BGNInst.w16_initialized = 0;
+    inst->BGNInst.bgnMode = saveBgnMode;
+
+    /* Recreate statistics counters */WEBRTC_SPL_MEMCPY_W16(&(inst->statInst), &saveStats,
+        sizeof(DSPStats_t)/sizeof(WebRtc_Word16));
+
+#ifdef NETEQ_STEREO
+    /* Recreate MSinfo */WEBRTC_SPL_MEMCPY_W16(&(inst->msInfo), &saveMSinfo,
+        sizeof(MasterSlaveInfo)/sizeof(WebRtc_Word16));
+#endif
+
+#ifdef NETEQ_CNG_CODEC
+    if (inst->CNG_Codec_inst!=NULL)
+    {
+        /* initialize comfort noise generator */
+        res |= WebRtcCng_InitDec(inst->CNG_Codec_inst);
+    }
+#endif
+
+#ifdef NETEQ_VAD
+    /* initialize PostDecode VAD instance
+     (don't bother checking for NULL instance, this is done inside init function) */
+    res |= WebRtcNetEQ_InitVAD(&inst->VADInst, fs);
+#endif /* NETEQ_VAD */
+
+    return (res);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_AddressInit(...)
+ *
+ * Initializes the shared-memory communication on the DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *      - data2McuAddress   : Pointer to memory where DSP writes / MCU reads
+ *      - data2DspAddress   : Pointer to memory where MCU writes / DSP reads
+ *      - mainInst          : NetEQ main instance
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
+                            const void *data2DspAddress, const void *mainInst)
+{
+
+    /* set shared-memory addresses in the DSP instance */
+    inst->pw16_readAddress = (WebRtc_Word16 *) data2DspAddress;
+    inst->pw16_writeAddress = (WebRtc_Word16 *) data2McuAddress;
+
+    /* set pointer to main NetEQ instance */
+    inst->main_inst = (void *) mainInst;
+
+    /* set output frame size to 10 ms = 80 samples in narrowband */
+    inst->millisecondsPerCall = 10;
+    inst->timestampsPerCall = 80;
+
+    return (0);
+
+}
+
+/****************************************************************************
+ * NETEQDSP_clearInCallStats(...)
+ *
+ * Reset in-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst)
+{
+
+    /* Reset statistics counters */
+    inst->statInst.accelerateLength = 0;
+    inst->statInst.expandLength = 0;
+    inst->statInst.preemptiveLength = 0;
+
+    return (0);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearPostCallStats(...)
+ *
+ * Reset post-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst)
+{
+
+    /* Reset statistics counters */
+    inst->statInst.expandedVoiceSamples = 0;
+    inst->statInst.expandedNoiseSamples = 0;
+
+    return (0);
+}
+
+#ifdef NETEQ_VAD
+
+/****************************************************************************
+ * WebRtcNetEQ_InitVAD(...)
+ *
+ * Initializes post-decode VAD instance.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - fs            : Initial sample rate
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs)
+{
+
+    int res = 0;
+
+    /* initially, disable the post-decode VAD */
+    VADInst->VADEnabled = 0;
+
+    if (VADInst->VADState != NULL /* if VAD state is provided */
+        && VADInst->initFunction != NULL /* and all function ... */
+        && VADInst->setmodeFunction != NULL /* ... pointers ... */
+        && VADInst->VADFunction != NULL) /* ... are defined */
+    {
+        res = VADInst->initFunction( VADInst->VADState ); /* call VAD init function */
+        res |= WebRtcNetEQ_SetVADModeInternal( VADInst, VADInst->VADMode );
+
+        if (res!=0)
+        {
+            /* something is wrong; play it safe and set the VADstate to NULL */
+            VADInst->VADState = NULL;
+        }
+        else if (fs<=16000)
+        {
+            /* enable VAD if NB or WB (VAD cannot handle SWB) */
+            VADInst->VADEnabled = 1;
+        }
+    }
+
+    /* reset SID/CNG interval counter */
+    VADInst->SIDintervalCounter = 0;
+
+    /* initialize with active-speaker decision */
+    VADInst->VADDecision = 1;
+
+    return(res);
+
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADModeInternal(...)
+ *
+ * Set the VAD mode in the VAD struct, and communicate it to the VAD instance 
+ * if it exists.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - mode          : Mode number passed on to the VAD function
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode)
+{
+
+    int res = 0;
+
+    VADInst->VADMode = mode;
+
+    if (VADInst->VADState != NULL)
+    {
+        /* call setmode function */
+        res = VADInst->setmodeFunction(VADInst->VADState, mode);
+    }
+
+    return(res);
+
+}
+
+#endif /* NETEQ_VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushSpeechBuffer(...)
+ *
+ * Flush the speech buffer.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst)
+{
+    WebRtc_Word16 fs_mult;
+
+    /* calcualte fs/8000 */
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+
+    /* clear buffer */
+    WebRtcSpl_MemSetW16(inst->speechBuffer, 0, SPEECH_BUF_SIZE);
+    inst->endPosition = 565 * fs_mult;
+    inst->curPosition = inst->endPosition - inst->ExpandInst.w16_overlap;
+
+    return 0;
+}
+
diff --git a/src/modules/audio_coding/neteq/dsp.h b/src/modules/audio_coding/neteq/dsp.h
new file mode 100644
index 0000000..40b7831
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dsp.h
@@ -0,0 +1,788 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some DSP initialization functions,
+ * constant table definitions and other parameters.
+ * Also contains definitions of all DSP-side data structures. 
+ */
+
+
+#ifndef DSP_H
+#define DSP_H
+
+#include "typedefs.h"
+
+#include "webrtc_cng.h"
+
+#include "codec_db_defines.h"
+#include "neteq_defines.h"
+#include "neteq_statistics.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+#include "dtmf_tonegen.h"
+#endif
+
+
+
+/*****************************/
+/* Pre-processor definitions */
+/*****************************/
+
+/* FSMULT is the sample rate divided by 8000 */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+	#define FSMULT	6
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+	#define FSMULT	4
+#elif defined(NETEQ_WIDEBAND)
+	#define FSMULT 2
+#else
+	#define FSMULT 1
+#endif
+
+/* Size of the speech buffer (or synchronization buffer). */
+/* 60 ms decoding + 10 ms syncbuff + 0.625ms lookahead */
+#define SPEECH_BUF_SIZE (565 * FSMULT)
+
+/* Misc definitions */
+#define BGN_LPC_ORDER				(4 + FSMULT)  /* 5, 6, 8, or 10 */
+#define UNVOICED_LPC_ORDER			6
+#define RANDVEC_NO_OF_SAMPLES		256
+
+/* Number of milliseconds to remove/add during accelerate/pre-emptive expand
+   under BGNonly operation */
+#define DEFAULT_TIME_ADJUST 8
+
+/* Number of RecOut calls without CNG/SID before re-enabling post-decode VAD */
+#define POST_DECODE_VAD_AUTO_ENABLE 3000  
+
+/* 8kHz windowing in Q15 (over 5 samples) */
+#define NETEQ_OVERLAP_WINMUTE_8KHZ_START	27307
+#define NETEQ_OVERLAP_WINMUTE_8KHZ_INC		-5461
+#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_START	 5461
+#define NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC	 5461
+/* 16kHz windowing in Q15 (over 10 samples) */
+#define NETEQ_OVERLAP_WINMUTE_16KHZ_START	29789
+#define NETEQ_OVERLAP_WINMUTE_16KHZ_INC		-2979
+#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_START	 2979
+#define NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC	 2979
+/* 32kHz windowing in Q15 (over 20 samples) */
+#define NETEQ_OVERLAP_WINMUTE_32KHZ_START	31208
+#define NETEQ_OVERLAP_WINMUTE_32KHZ_INC		-1560
+#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_START	 1560
+#define NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC	 1560
+/* 48kHz windowing in Q15 (over 30 samples) */
+#define NETEQ_OVERLAP_WINMUTE_48KHZ_START	31711
+#define NETEQ_OVERLAP_WINMUTE_48KHZ_INC		-1057
+#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_START	 1057
+#define NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC	 1057
+
+/* Fade BGN towards zero after this many Expand calls */
+#define FADE_BGN_TIME 200
+
+
+/*******************/
+/* Constant tables */
+/*******************/
+
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample8kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample16kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample32kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kDownsample48kHzTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_kMixFractionFuncTbl[];
+extern const WebRtc_Word16 WebRtcNetEQ_k1049div[];
+extern const WebRtc_Word16 WebRtcNetEQ_k2097div[];
+extern const WebRtc_Word16 WebRtcNetEQ_k5243div[];
+
+
+
+/************/
+/* Typedefs */
+/************/
+
+enum BGNMode
+{
+    BGN_ON,     /* default "normal" behavior with eternal noise */
+    BGN_FADE,   /* noise fades to zero after some time */
+    BGN_OFF     /* background noise is always zero */
+};
+
+#ifdef NETEQ_STEREO
+enum MasterSlaveMode
+{
+    NETEQ_MONO,     /* stand-alone instance */
+    NETEQ_MASTER,   /* master instance in a spatial/stereo configuration */
+    NETEQ_SLAVE     /* slave instance in a spatial/stereo configuration */
+};
+
+enum MasterSlaveExtraInfo
+{
+    NO_INFO,        /* no info to convey */
+    ACC_FAIL,       /* signal that accelerate failed */
+    PE_EXP_FAIL,    /* signal that pre-emptive expand failed */
+    DTMF_OVERDUB,   /* signal that DTMF overdub is generated */
+    DTMF_ONLY       /* signal that DTMF only is played */
+};
+#endif
+
+/****************************/
+/* DSP-side data structures */
+/****************************/
+
+/* Background noise (BGN) instance for storing BGN parameters 
+ (sub-instance of NETEQDSP_inst) */
+typedef struct BGNInst_t_
+{
+
+    WebRtc_Word32 w32_energy;
+    WebRtc_Word32 w32_energyMax;
+    WebRtc_Word32 w32_energyUpdate;
+    WebRtc_Word32 w32_energyUpdateLow;
+    WebRtc_Word16 pw16_filterState[BGN_LPC_ORDER];
+    WebRtc_Word16 pw16_filter[BGN_LPC_ORDER + 1];
+    WebRtc_Word16 w16_mutefactor;
+    WebRtc_Word16 w16_scale;
+    WebRtc_Word16 w16_scaleShift;
+    WebRtc_Word16 w16_initialized;
+    enum BGNMode bgnMode;
+
+} BGNInst_t;
+
+/* Expansion instance (sub-instance of NETEQDSP_inst) */
+typedef struct ExpandInst_t_
+{
+
+    WebRtc_Word16 w16_overlap; /* Constant, 5 for NB and 10 for WB */
+    WebRtc_Word16 w16_consecExp; /* Number of consecutive expand calls */
+    WebRtc_Word16 *pw16_arFilter; /* length [UNVOICED_LPC_ORDER+1]	*/
+    WebRtc_Word16 *pw16_arState; /* length [UNVOICED_LPC_ORDER]		*/
+    WebRtc_Word16 w16_arGain;
+    WebRtc_Word16 w16_arGainScale;
+    WebRtc_Word16 w16_vFraction; /* Q14 */
+    WebRtc_Word16 w16_currentVFraction; /* Q14 */
+    WebRtc_Word16 *pw16_expVecs[2];
+    WebRtc_Word16 w16_lags[3];
+    WebRtc_Word16 w16_maxLag;
+    WebRtc_Word16 *pw16_overlapVec; /* last samples of speech history */
+    WebRtc_Word16 w16_lagsDirection;
+    WebRtc_Word16 w16_lagsPosition;
+    WebRtc_Word16 w16_expandMuteFactor; /* Q14 */
+    WebRtc_Word16 w16_stopMuting;
+    WebRtc_Word16 w16_onset;
+    WebRtc_Word16 w16_muteSlope; /* Q20 */
+
+} ExpandInst_t;
+
+#ifdef NETEQ_VAD
+
+/*
+ * VAD function pointer types, replicating the typedefs in webrtc_neteq_internal.h.
+ * These function pointers match the definitions of WebRtc VAD functions WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
+ */
+typedef int (*VADInitFunction)(void *VAD_inst);
+typedef int (*VADSetmodeFunction)(void *VAD_inst, int mode);
+typedef int (*VADFunction)(void *VAD_inst, int fs, WebRtc_Word16 *frame,
+                           int frameLen);
+
+/* Post-decode VAD instance (sub-instance of NETEQDSP_inst) */
+typedef struct PostDecodeVAD_t_
+{
+
+    void *VADState; /* pointer to a VAD instance */
+
+    WebRtc_Word16 VADEnabled; /* 1 if enabled, 0 if disabled */
+    int VADMode; /* mode parameter to pass to the VAD function */
+    int VADDecision; /* 1 for active, 0 for passive */
+    WebRtc_Word16 SIDintervalCounter; /* reset when decoding CNG/SID frame,
+     increment for each recout call */
+
+    /* Function pointers */
+    VADInitFunction initFunction; /* VAD init function */
+    VADSetmodeFunction setmodeFunction; /* VAD setmode function */
+    VADFunction VADFunction; /* VAD function */
+
+} PostDecodeVAD_t;
+
+#endif /* NETEQ_VAD */
+
+#ifdef NETEQ_STEREO
+#define MAX_MS_DECODES 10
+
+typedef struct 
+{
+    /* Stand-alone, master, or slave */
+    enum MasterSlaveMode    msMode;
+
+    enum MasterSlaveExtraInfo  extraInfo;
+
+    WebRtc_UWord16 instruction;
+    WebRtc_Word16 distLag;
+    WebRtc_Word16 corrLag;
+    WebRtc_Word16 bestIndex;
+
+    WebRtc_UWord32 endTimestamp;
+    WebRtc_UWord16 samplesLeftWithOverlap;
+
+} MasterSlaveInfo;
+#endif
+
+
+/* "Main" NetEQ DSP instance */
+typedef struct DSPInst_t_
+{
+
+    /* MCU/DSP Communication layer */
+    WebRtc_Word16 *pw16_readAddress;
+    WebRtc_Word16 *pw16_writeAddress;
+    void *main_inst;
+
+    /* Output frame size in ms and samples */
+    WebRtc_Word16 millisecondsPerCall;
+    WebRtc_Word16 timestampsPerCall;
+
+    /*
+     *	Example of speech buffer
+     *
+     *  -----------------------------------------------------------
+     *  |            History  T-60 to T         |     Future      |
+     *	-----------------------------------------------------------
+     *						                    ^			      ^
+     *					                    	|			      |
+     *					                   curPosition	   endPosition
+     *
+     *		History is gradually shifted out to the left when inserting
+     *      new data at the end.
+     */
+
+    WebRtc_Word16 speechBuffer[SPEECH_BUF_SIZE]; /* History/future speech buffer */
+    int curPosition; /* Next sample to play */
+    int endPosition; /* Position that ends future data */
+    WebRtc_UWord32 endTimestamp; /* Timestamp value at end of future data */
+    WebRtc_UWord32 videoSyncTimestamp; /* (Estimated) timestamp of the last
+     played sample (usually same as
+     endTimestamp-(endPosition-curPosition)
+     except during Expand and CNG) */
+    WebRtc_UWord16 fs; /* sample rate in Hz */
+    WebRtc_Word16 w16_frameLen; /* decoder frame length in samples */
+    WebRtc_Word16 w16_mode; /* operation used during last RecOut call */
+    WebRtc_Word16 w16_muteFactor; /* speech mute factor in Q14 */
+    WebRtc_Word16 *pw16_speechHistory; /* beginning of speech history during Expand */
+    WebRtc_Word16 w16_speechHistoryLen; /* 256 for NB and 512 for WB */
+
+    /* random noise seed parameters */
+    WebRtc_Word16 w16_seedInc;
+    WebRtc_UWord32 uw16_seed;
+
+    /* VQmon related variable */
+    WebRtc_Word16 w16_concealedTS;
+
+    /*****************/
+    /* Sub-instances */
+    /*****************/
+
+    /* Decoder data */
+    CodecFuncInst_t codec_ptr_inst;
+
+#ifdef NETEQ_CNG_CODEC
+    /* CNG "decoder" instance */
+    CNG_dec_inst *CNG_Codec_inst;
+#endif /* NETEQ_CNG_CODEC */
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* DTMF generator instance */
+    dtmf_tone_inst_t DTMFInst;
+#endif /* NETEQ_CNG_CODEC */
+
+#ifdef NETEQ_VAD
+    /* Post-decode VAD instance */
+    PostDecodeVAD_t VADInst;
+#endif /* NETEQ_VAD */
+
+    /* Expand instance (defined above) */
+    ExpandInst_t ExpandInst;
+
+    /* Background noise instance (defined above) */
+    BGNInst_t BGNInst;
+
+    /* Internal statistics instance */
+    DSPStats_t statInst;
+
+#ifdef NETEQ_STEREO
+    /* Pointer to Master/Slave info */
+    MasterSlaveInfo *msInfo;
+#endif
+
+} DSPInst_t;
+
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+/****************************************************************************
+ * WebRtcNetEQ_DSPInit(...)
+ *
+ * Initializes DSP side of NetEQ.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *      - fs            : Initial sample rate (may change when decoding data)
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_DSPInit(DSPInst_t *inst, WebRtc_UWord16 fs);
+
+/****************************************************************************
+ * WebRtcNetEQ_AddressInit(...)
+ *
+ * Initializes the shared-memory communication on the DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *      - data2McuAddress   : Pointer to memory where DSP writes / MCU reads
+ *      - data2DspAddress   : Pointer to memory where MCU writes / DSP reads
+ *      - mainInst          : NetEQ main instance
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_AddressInit(DSPInst_t *inst, const void *data2McuAddress,
+                            const void *data2DspAddress, const void *mainInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearInCallStats(...)
+ *
+ * Reset in-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearInCallStats(DSPInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ClearPostCallStats(...)
+ *
+ * Reset post-call statistics variables on DSP side.
+ *
+ * Input:
+ *		- inst			    : NetEQ DSP instance 
+ *
+ * Output:
+ *		- inst			    : Updated instance
+ *
+ * Return value			    : 0 - ok
+ */
+
+int WebRtcNetEQ_ClearPostCallStats(DSPInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutInternal(...)
+ *
+ * This function asks NetEQ for more speech/audio data.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, i.e. the user that requests more 
+ *						  speech/audio data.
+ *		- outdata		: Pointer to a memory space where the output data
+ *						  should be stored.
+ *      - BGNonly       : If non-zero, RecOut will only produce background
+ *                        noise. It will still draw packets from the packet
+ *                        buffer, but they will never be decoded.
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- len			: Number of samples that were outputted from NetEq
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Normal(...)
+ *
+ * This function has the possibility to modify data that is played out in Normal
+ * mode, for example adjust the gain of the signal. The length of the signal 
+ * can not be changed.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *		- decoded		: Pointer to vector of new data from decoder
+ *      - len           : Number of input samples
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- pw16_len		: Pointer to varibale where the number of samples 
+ *                        produced will be written
+ *
+ * Return value			: >=0 - Number of samples written to outData
+ *						   -1 - Error
+ */
+
+int WebRtcNetEQ_Normal(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_Expand(...)
+ *
+ * This function produces one "chunk" of expansion data (PLC audio). The
+ * lenght of the produced audio depends on the speech history.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - BGNonly       : If non-zero, Expand will only produce background
+ *                        noise.
+ *      - pw16_len      : Desired number of samples (only for BGN mode).
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- outdata		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples that were outputted from NetEq
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Expand(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_GenerateBGN(...)
+ *
+ * This function generates and writes len samples of background noise to the
+ * output vector. The Expand function will be called repeteadly until the
+ * correct number of samples is produced.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - len           : Desired length of produced BGN.
+ *						  
+ *
+ * Output:
+ *		- pw16_outData	: Pointer to a memory space where the output data
+ *						  should be stored
+ *
+ * Return value			: >=0 - Number of noise samples produced and written
+ *                              to output
+ *						  -1  - Error
+ */
+
+int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
+#ifdef SCRATCH
+                            WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                            WebRtc_Word16 *pw16_outData, WebRtc_Word16 len);
+
+/****************************************************************************
+ * WebRtcNetEQ_PreEmptiveExpand(...)
+ *
+ * This function tries to extend the audio data by repeating one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low. The algorithm is the
+ * reciprocal of the Accelerate algorithm.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - oldDataLen    : Length of the part of decoded that has already been played out.
+ *      - BGNonly       : If non-zero, Pre-emptive Expand will only copy 
+ *                        the first DEFAULT_TIME_ADJUST seconds of the
+ *                        input and append to the end. No signal matching is
+ *                        done.
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored. The vector must be at least
+ *						  min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
+ *						  elements long.
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
+#ifdef SCRATCH
+                                 WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                 const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
+                                 WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                                 WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Accelerate(...)
+ *
+ * This function tries to shorten the audio data by removing one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to newly decoded speech.
+ *		- len           : Length of decoded speech.
+ *      - BGNonly       : If non-zero, Accelerate will only remove the last 
+ *                        DEFAULT_TIME_ADJUST seconds of the intput.
+ *                        No signal matching is done.
+ *
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *		- outData		: Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to outData.
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Accelerate(DSPInst_t *inst,
+#ifdef SCRATCH
+                           WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                           const WebRtc_Word16 *pw16_decoded, int len,
+                           WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                           WebRtc_Word16 BGNonly);
+
+/****************************************************************************
+ * WebRtcNetEQ_Merge(...)
+ *
+ * This function is used to merge new data from the decoder to the exisiting
+ * stream in the synchronization buffer. The merge operation is typically
+ * done after a packet loss, where the end of the expanded data does not
+ * fit naturally with the new decoded data.
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *		- decoded	    : Pointer to new decoded speech.
+ *      - len           : Number of samples in pw16_decoded.
+ *
+ *
+ * Output:
+ *		- inst			: Updated user information
+ *		- outData	    : Pointer to a memory space where the output data
+ *						  should be stored
+ *		- pw16_len		: Number of samples written to pw16_outData
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_Merge(DSPInst_t *inst,
+#ifdef SCRATCH
+                      WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                      WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
+                      WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_Cng(...)
+ *
+ * This function produces CNG according to RFC 3389
+ *
+ * Input:
+ *		- inst			: NetEQ DSP instance
+ *		- len			: Number of samples to produce
+ *
+ * Output:
+ *		- pw16_outData	: Output CNG
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+#ifdef NETEQ_CNG_CODEC
+/* Must compile NetEQ with CNG support to enable this function */
+
+int WebRtcNetEQ_Cng(DSPInst_t *inst, WebRtc_Word16 *pw16_outData, int len);
+
+#endif /* NETEQ_CNG_CODEC */
+
+/****************************************************************************
+ * WebRtcNetEQ_BGNUpdate(...)
+ *
+ * This function updates the background noise parameter estimates.
+ *
+ * Input:
+ *		- inst			: NetEQ instance, where the speech history is stored.
+ *      - scratchPtr    : Pointer to scratch vector.
+ *
+ * Output:
+ *		- inst			: Updated information about the BGN characteristics.
+ *
+ * Return value			: No return value
+ */
+
+void WebRtcNetEQ_BGNUpdate(
+#ifdef SCRATCH
+                           DSPInst_t *inst, WebRtc_Word16 *pw16_scratchPtr
+#else
+                           DSPInst_t *inst
+#endif
+                );
+
+#ifdef NETEQ_VAD
+/* Functions used by post-decode VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_InitVAD(...)
+ *
+ * Initializes post-decode VAD instance.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - fs            : Initial sample rate
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_InitVAD(PostDecodeVAD_t *VADInst, WebRtc_UWord16 fs);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADModeInternal(...)
+ *
+ * Set the VAD mode in the VAD struct, and communicate it to the VAD instance 
+ * if it exists.
+ *
+ * Input:
+ *		- VADinst		: PostDecodeVAD instance
+ *      - mode          : Mode number passed on to the VAD function
+ *
+ * Output:
+ *		- VADinst		: Updated instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADModeInternal(PostDecodeVAD_t *VADInst, int mode);
+
+#endif /* NETEQ_VAD */
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushSpeechBuffer(...)
+ *
+ * Flush the speech buffer.
+ *
+ * Input:
+ *		- inst			: NetEq DSP instance 
+ *
+ * Output:
+ *		- inst			: Updated instance
+ *
+ * Return value			: 0 - ok
+ *                      : non-zero - error
+ */
+
+int WebRtcNetEQ_FlushSpeechBuffer(DSPInst_t *inst);
+
+#ifndef WEBRTC_NETEQ_40BITACC_TEST
+
+#include "signal_processing_library.h"
+/* Map to regular SPL functions */
+#define WebRtcNetEQ_CrossCorr   WebRtcSpl_CrossCorrelation
+#define WebRtcNetEQ_DotW16W16   WebRtcSpl_DotProductWithScale
+
+#else /* WEBRTC_NETEQ_40BITACC_TEST defined */
+/* Run NetEQ with simulated 40-bit accumulator to run bit-exact to a DSP 
+ implementation where the main (splib and NetEQ) functions have been
+ 40-bit optimized. */
+
+/* Map to special 40-bit optimized functions, defined below */
+#define WebRtcNetEQ_CrossCorr		WebRtcNetEQ_40BitAccCrossCorr
+#define WebRtcNetEQ_DotW16W16	    WebRtcNetEQ_40BitAccDotW16W16
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccCrossCorr(...)
+ *
+ * Calculates the Cross correlation between two sequences seq1 and seq2. Seq1
+ * is fixed and seq2 slides as the pointer is increased with step
+ *
+ * Input:
+ *		- seq1			: First sequence (fixed throughout the correlation)
+ *		- seq2			: Second sequence (slided step_seq2 for each 
+ *						  new correlation)
+ *		- dimSeq		: Number of samples to use in the cross correlation.
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- dimCrossCorr	: Number of CrossCorrelations to calculate (start 
+ *						  position for seq2 is updated for each new one)
+ *		- rShift			: Number of right shifts to use
+ *		- step_seq2		: How many (positive or negative) steps the seq2 
+ *						  pointer should be updated for each new cross 
+ *						  correlation value
+ *
+ * Output:
+ *		- crossCorr		: The cross correlation in Q-rShift
+ */
+
+void WebRtcNetEQ_40BitAccCrossCorr(WebRtc_Word32 *crossCorr, WebRtc_Word16 *seq1,
+                                   WebRtc_Word16 *seq2, WebRtc_Word16 dimSeq,
+                                   WebRtc_Word16 dimCrossCorr, WebRtc_Word16 rShift,
+                                   WebRtc_Word16 step_seq2);
+
+/****************************************************************************
+ * WebRtcNetEQ_40BitAccDotW16W16(...)
+ *
+ * Calculates the dot product between two vectors (WebRtc_Word16)
+ *
+ * Input:
+ *		- vector1		: Vector 1
+ *		- vector2		: Vector 2
+ *		- len			: Number of samples in vector
+ *                        Should be no larger than 1024 to avoid overflow.
+ *		- scaling		: The number of right shifts (after multiplication)
+ *                        required to avoid overflow in the dot product.
+ * Return value			: The dot product
+ */
+
+WebRtc_Word32 WebRtcNetEQ_40BitAccDotW16W16(WebRtc_Word16 *vector1, WebRtc_Word16 *vector2,
+                                            int len, int scaling);
+
+#endif /* WEBRTC_NETEQ_40BITACC_TEST */
+
+#endif /* DSP_H */
diff --git a/src/modules/audio_coding/neteq/dsp_helpfunctions.c b/src/modules/audio_coding/neteq/dsp_helpfunctions.c
new file mode 100644
index 0000000..6e9a283
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dsp_helpfunctions.c
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some help functions that did not fit elsewhere.
+ */
+
+#include "dsp_helpfunctions.h"
+
+
+WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz)
+{
+    switch (fsHz)
+    {
+        case 8000:
+        {
+            return 1;
+        }
+        case 16000:
+        {
+            return 2;
+        }
+        case 32000:
+        {
+            return 4;
+        }
+        case 48000:
+        {
+            return 6;
+        }
+        default:
+        {
+            return 1;
+        }
+    }
+}
+
+
+int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
+                                 WebRtc_Word16 *out, int outLen, int compensateDelay)
+{
+    WebRtc_Word16 *B; /* filter coefficients */
+    WebRtc_Word16 Blen; /* number of coefficients */
+    WebRtc_Word16 filterDelay; /* phase delay in samples */
+    WebRtc_Word16 factor; /* conversion rate (inFsHz/8000) */
+    int ok;
+
+    /* Set constants depending on frequency used */
+    /* NOTE: The phase delay values are wrong compared to the true phase delay
+     of the filters. However, the error is preserved (through the +1 term)
+     for consistency. */
+    switch (inFsHz)
+    {
+        case 8000:
+        {
+            Blen = 3;
+            factor = 2;
+            B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl;
+            filterDelay = 1 + 1;
+            break;
+        }
+#ifdef NETEQ_WIDEBAND
+            case 16000:
+            {
+                Blen = 5;
+                factor = 4;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample16kHzTbl;
+                filterDelay = 2 + 1;
+                break;
+            }
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+            case 32000:
+            {
+                Blen = 7;
+                factor = 8;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample32kHzTbl;
+                filterDelay = 3 + 1;
+                break;
+            }
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+            case 48000:
+            {
+                Blen = 7;
+                factor = 12;
+                B = (WebRtc_Word16*) WebRtcNetEQ_kDownsample48kHzTbl;
+                filterDelay = 3 + 1;
+                break;
+            }
+#endif
+        default:
+        {
+            /* unsupported or wrong sample rate */
+            return -1;
+        }
+    }
+
+    if (!compensateDelay)
+    {
+        /* disregard delay compensation */
+        filterDelay = 0;
+    }
+
+    ok = WebRtcSpl_DownsampleFast((WebRtc_Word16*) &in[Blen - 1],
+        (WebRtc_Word16) (inLen - (Blen - 1)), /* number of input samples */
+        out, (WebRtc_Word16) outLen, /* number of output samples to produce */
+        B, Blen, factor, filterDelay); /* filter parameters */
+
+    return ok; /* return value is -1 if input signal is too short */
+
+}
+
diff --git a/src/modules/audio_coding/neteq/dsp_helpfunctions.h b/src/modules/audio_coding/neteq/dsp_helpfunctions.h
new file mode 100644
index 0000000..f728c09
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dsp_helpfunctions.h
@@ -0,0 +1,220 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Various help functions used by the DSP functions.
+ */
+
+#ifndef DSP_HELPFUNCTIONS_H
+#define DSP_HELPFUNCTIONS_H
+
+#include "typedefs.h"
+
+#include "dsp.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Correlator(...)
+ *
+ * Calculate signal correlation.
+ *
+ * Input:
+ *      - inst          : DSP instance
+ *      - data          : Speech history to do expand from (older history in data[-4..-1])
+ *      - dataLen       : Length of data
+ *
+ * Output:
+ *      - corrOut       : CC of downsampled signal
+ *      - corrScale     : Scale factor for correlation (-Qdomain)
+ *
+ * Return value         : Length of correlated data
+ */
+
+WebRtc_Word16 WebRtcNetEQ_Correlator(DSPInst_t *inst,
+#ifdef SCRATCH
+                                     WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                     WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                     WebRtc_Word16 *pw16_corrOut,
+                                     WebRtc_Word16 *pw16_corrScale);
+
+/****************************************************************************
+ * WebRtcNetEQ_PeakDetection(...)
+ *
+ * Peak detection with parabolic fit.
+ *
+ * Input:
+ *      - data          : Data sequence for peak detection
+ *      - dataLen       : Length of data
+ *      - nmbPeaks      : Number of peaks to detect
+ *      - fs_mult       : Sample rate multiplier
+ *
+ * Output:
+ *      - corrIndex     : Index of the peak
+ *      - winner        : Value of the peak
+ *
+ * Return value         : 0 for ok
+ */
+
+WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                        WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
+                                        WebRtc_Word16 *pw16_corrIndex,
+                                        WebRtc_Word16 *pw16_winners);
+
+/****************************************************************************
+ * WebRtcNetEQ_PrblFit(...)
+ *
+ * Three-point parbola fit.
+ *
+ * Input:
+ *      - 3pts          : Three input samples
+ *      - fs_mult       : Sample rate multiplier
+ *
+ * Output:
+ *      - Ind           : Index of the peak
+ *      - outVal        : Value of the peak
+ *
+ * Return value         : 0 for ok
+ */
+
+WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
+                                  WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult);
+
+/****************************************************************************
+ * WebRtcNetEQ_MinDistortion(...)
+ *
+ * Find the lag that results in minimum distortion.
+ *
+ * Input:
+ *      - data          : Start of speech to perform distortion on, second vector is assumed
+ *                        to be data[-Lag]
+ *      - minLag        : Start lag
+ *      - maxLag        : End lag
+ *      - len           : Length to correlate
+ *
+ * Output:
+ *      - dist          : Distorion value
+ *
+ * Return value         : Lag for minimum distortion
+ */
+
+WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
+                                        WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
+                                        WebRtc_Word16 len, WebRtc_Word32 *pw16_dist);
+
+/****************************************************************************
+ * WebRtcNetEQ_RandomVec(...)
+ *
+ * Generate random vector.
+ *
+ * Input:
+ *      - seed          : Current seed (input/output)
+ *      - len           : Number of samples to generate
+ *      - incVal        : Jump step
+ *
+ * Output:
+ *      - randVec       : Generated random vector
+ */
+
+void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
+                           WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval);
+
+/****************************************************************************
+ * WebRtcNetEQ_MixVoiceUnvoice(...)
+ *
+ * Mix voiced and unvoiced signal.
+ *
+ * Input:
+ *      - voicedVec         : Voiced input signal
+ *      - unvoicedVec       : Unvoiced input signal
+ *      - current_vfraction : Current mixing factor
+ *      - vfraction_change  : Mixing factor change per sample
+ *      - N                 : Number of samples
+ *
+ * Output:
+ *      - outData           : Mixed signal
+ */
+
+void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
+                                 WebRtc_Word16 *pw16_unvoicedVec,
+                                 WebRtc_Word16 *w16_current_vfraction,
+                                 WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_UnmuteSignal(...)
+ *
+ * Gradually reduce attenuation.
+ *
+ * Input:
+ *      - inVec         : Input signal
+ *      - startMuteFact : Starting attenuation
+ *      - unmuteFact    : Factor to "unmute" with (Q20)
+ *      - N             : Number of samples
+ *
+ * Output:
+ *      - outVec        : Output signal
+ */
+
+void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
+                              WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
+                              WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_MuteSignal(...)
+ *
+ * Gradually increase attenuation.
+ *
+ * Input:
+ *      - inout         : Input/output signal
+ *      - muteSlope     : Slope of muting
+ *      - N             : Number of samples
+ */
+
+void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
+                            WebRtc_Word16 N);
+
+/****************************************************************************
+ * WebRtcNetEQ_CalcFsMult(...)
+ *
+ * Calculate the sample rate divided by 8000.
+ *
+ * Input:
+ *		- fsHz			: Sample rate in Hz in {8000, 16000, 32000, 48000}.
+ *
+ * Return value			: fsHz/8000 for the valid values, 1 for other inputs
+ */
+
+WebRtc_Word16 WebRtcNetEQ_CalcFsMult(WebRtc_UWord16 fsHz);
+
+/****************************************************************************
+ * WebRtcNetEQ_DownSampleTo4kHz(...)
+ *
+ * Lowpass filter and downsample a signal to 4 kHz sample rate.
+ *
+ * Input:
+ *      - in                : Input signal samples.
+ *      - inLen             : Number of input samples.
+ *		- inFsHz		    : Input sample rate in Hz.
+ *      - outLen            : Desired number of samples in decimated signal.
+ *      - compensateDelay   : If non-zero, compensate for the phase delay of
+ *                            of the anti-alias filter.
+ *
+ * Output:
+ *      - out               : Output signal samples.
+ *
+ * Return value			    : 0 - Ok
+ *                           -1 - Error
+ *
+ */
+
+int WebRtcNetEQ_DownSampleTo4kHz(const WebRtc_Word16 *in, int inLen, WebRtc_UWord16 inFsHz,
+                                 WebRtc_Word16 *out, int outLen, int compensateDelay);
+
+#endif
+
diff --git a/src/modules/audio_coding/neteq/dtmf_buffer.c b/src/modules/audio_coding/neteq/dtmf_buffer.c
new file mode 100644
index 0000000..f00f9c9
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dtmf_buffer.c
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of packet buffer for DTMF messages.
+ */
+
+#include "dtmf_buffer.h"
+
+#include "typedefs.h" /* to define endianness */
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+
+#ifdef NETEQ_ATEVENT_DECODE
+
+WebRtc_Word16 WebRtcNetEQ_DtmfRemoveEvent(dtmf_inst_t *DTMFdec_inst)
+{
+
+    int i;
+    for (i = 0; i < 3; i++)
+    {
+        DTMFdec_inst->EventQueue[i] = DTMFdec_inst->EventQueue[i + 1];
+        DTMFdec_inst->EventQueueVolume[i] = DTMFdec_inst->EventQueueVolume[i + 1];
+        DTMFdec_inst->EventQueueEnded[i] = DTMFdec_inst->EventQueueEnded[i + 1];
+        DTMFdec_inst->EventQueueStartTime[i] = DTMFdec_inst->EventQueueStartTime[i + 1];
+        DTMFdec_inst->EventQueueEndTime[i] = DTMFdec_inst->EventQueueEndTime[i + 1];
+    }
+    DTMFdec_inst->EventBufferSize--;
+    DTMFdec_inst->EventQueue[3] = -1;
+    DTMFdec_inst->EventQueueVolume[3] = 0;
+    DTMFdec_inst->EventQueueEnded[3] = 0;
+    DTMFdec_inst->EventQueueStartTime[3] = 0;
+    DTMFdec_inst->EventQueueEndTime[3] = 0;
+
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
+                                          WebRtc_Word16 MaxPLCtime)
+{
+    int i;
+    if (((fs != 8000) && (fs != 16000) && (fs != 32000) && (fs != 48000)) || (MaxPLCtime < 0))
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+    if (fs == 8000)
+        DTMFdec_inst->framelen = 80;
+    else if (fs == 16000)
+        DTMFdec_inst->framelen = 160;
+    else if (fs == 32000)
+        DTMFdec_inst->framelen = 320;
+    else
+        /* fs == 48000 */
+        DTMFdec_inst->framelen = 480;
+
+    DTMFdec_inst->MaxPLCtime = MaxPLCtime;
+    DTMFdec_inst->CurrentPLCtime = 0;
+    DTMFdec_inst->EventBufferSize = 0;
+    for (i = 0; i < 4; i++)
+    {
+        DTMFdec_inst->EventQueue[i] = -1;
+        DTMFdec_inst->EventQueueVolume[i] = 0;
+        DTMFdec_inst->EventQueueEnded[i] = 0;
+        DTMFdec_inst->EventQueueStartTime[i] = 0;
+        DTMFdec_inst->EventQueueEndTime[i] = 0;
+    }
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+                                          const WebRtc_Word16 *encoded, WebRtc_Word16 len,
+                                          WebRtc_UWord32 timeStamp)
+{
+
+    int i;
+    WebRtc_Word16 value;
+    const WebRtc_Word16 *EventStart;
+    WebRtc_Word16 endEvent;
+    WebRtc_Word16 Volume;
+    WebRtc_Word16 Duration;
+    WebRtc_Word16 position = -1;
+
+    /* Extract event */
+    if (len == 4)
+    {
+        EventStart = encoded;
+#ifdef WEBRTC_BIG_ENDIAN
+        value=((*EventStart)>>8);
+        endEvent=((*EventStart)&0x80)>>7;
+        Volume=((*EventStart)&0x3F);
+        Duration=EventStart[1];
+#else
+        value = ((*EventStart) & 0xFF);
+        endEvent = ((*EventStart) & 0x8000) >> 15;
+        Volume = ((*EventStart) & 0x3F00) >> 8;
+        Duration = (((((WebRtc_UWord16) EventStart[1]) >> 8) & 0xFF)
+            | (((WebRtc_UWord16) (EventStart[1] & 0xFF)) << 8));
+#endif
+        /* Only events between 0-15 are supported (DTMF tones) */
+        if ((value < 0) || (value > 15))
+        {
+            return 0;
+        }
+
+        /* Discard all DTMF tones with really low volume (<-36dbm0) */
+        if (Volume > 36)
+        {
+            return 0;
+        }
+
+        /*Are there any unended events of the same type? */
+        for (i = 0; i < DTMFdec_inst->EventBufferSize; i++)
+        {
+            /* Going through the whole queue even when we have found a match will
+             ensure that we add to the latest applicable event  */
+            if ((DTMFdec_inst->EventQueue[i] == value) && (!DTMFdec_inst->EventQueueEnded[i]
+                || endEvent)) position = i;
+        }
+        if (position > -1)
+        {
+            DTMFdec_inst->EventQueueVolume[position] = Volume;
+            if ((timeStamp + Duration) > DTMFdec_inst->EventQueueEndTime[position]) DTMFdec_inst->EventQueueEndTime[position]
+                = DTMFdec_inst->EventQueueStartTime[position] + Duration;
+            if (endEvent) DTMFdec_inst->EventQueueEnded[position] = 1;
+        }
+        else
+        {
+            if (DTMFdec_inst->EventBufferSize == MAX_DTMF_QUEUE_SIZE)
+            { /* Buffer full */
+                /* Remove one event */
+                DTMFdec_inst->EventBufferSize--;
+            }
+            /* Store data in the instance on a new position*/
+            DTMFdec_inst->EventQueue[DTMFdec_inst->EventBufferSize] = value;
+            DTMFdec_inst->EventQueueVolume[DTMFdec_inst->EventBufferSize] = Volume;
+            DTMFdec_inst->EventQueueEnded[DTMFdec_inst->EventBufferSize] = endEvent;
+            DTMFdec_inst->EventQueueStartTime[DTMFdec_inst->EventBufferSize] = timeStamp;
+            DTMFdec_inst->EventQueueEndTime[DTMFdec_inst->EventBufferSize] = timeStamp
+                + Duration;
+            DTMFdec_inst->EventBufferSize++;
+        }
+        return 0;
+    }
+    return DTMF_INSERT_ERROR;
+}
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
+                                     WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp)
+{
+
+    if (DTMFdec_inst->EventBufferSize < 1) return 0; /* No events to play */
+
+    /* We have events, is it time to play them? */
+    if (currTimeStamp < DTMFdec_inst->EventQueueStartTime[0])
+    {
+        /*No, just return zero */
+        return 0;
+    }
+
+    /* Continue on the event that is currently ongoing */
+    *event = DTMFdec_inst->EventQueue[0];
+    *volume = DTMFdec_inst->EventQueueVolume[0];
+
+    if (DTMFdec_inst->EventQueueEndTime[0] >= (currTimeStamp + DTMFdec_inst->framelen))
+    {
+
+        /* Still at least framLen to play */
+
+        DTMFdec_inst->CurrentPLCtime = 0;
+        if ((DTMFdec_inst->EventQueueEndTime[0] == (currTimeStamp + DTMFdec_inst->framelen))
+            && (DTMFdec_inst->EventQueueEnded[0]))
+        { /* We are done */
+            /*Remove the event from Queue*/
+            WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+        }
+        return DTMFdec_inst->framelen;
+
+    }
+    else
+    {
+        if ((DTMFdec_inst->EventQueueEnded[0]) || (DTMFdec_inst->EventQueue[1] > -1))
+        {
+            /*
+             * Less than frameLen to play and end of event or already received next event.
+             * Give our a whole frame size of audio to simplify things.
+             */
+
+            /*Remove the event from Queue*/
+            WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+            DTMFdec_inst->CurrentPLCtime = 0;
+
+            return DTMFdec_inst->framelen;
+
+        }
+        else
+        {
+            /* Less than frameLen to play and not end of event. */
+            DTMFdec_inst->CurrentPLCtime = (WebRtc_Word16) (currTimeStamp
+                - DTMFdec_inst->EventQueueEndTime[0]);
+
+            if ((DTMFdec_inst->CurrentPLCtime > DTMFdec_inst->MaxPLCtime)
+                || (DTMFdec_inst->CurrentPLCtime < -DTMFdec_inst->MaxPLCtime))
+            {
+                /*Remove the event from queue*/
+                WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+                DTMFdec_inst->CurrentPLCtime = 0;
+            }
+
+            /* If we have a new event that it's time to play */
+            if ((DTMFdec_inst->EventQueue[1] > -1) && (DTMFdec_inst->EventQueueStartTime[1]
+                >= (currTimeStamp + DTMFdec_inst->framelen)))
+            {
+                /*Remove the event from queue*/
+                WebRtcNetEQ_DtmfRemoveEvent(DTMFdec_inst);
+                DTMFdec_inst->CurrentPLCtime = 0;
+            }
+
+            return DTMFdec_inst->framelen;
+        }
+    }
+}
+
+#endif
diff --git a/src/modules/audio_coding/neteq/dtmf_buffer.h b/src/modules/audio_coding/neteq/dtmf_buffer.h
new file mode 100644
index 0000000..e185411
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dtmf_buffer.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Packet buffer for DTMF messages.
+ */
+
+#ifndef DTMF_BUFFER_H
+#define DTMF_BUFFER_H
+
+#include "typedefs.h"
+
+#include "neteq_defines.h"
+
+/* Include this code only if ATEVENT (DTMF) is defined in */
+#ifdef NETEQ_ATEVENT_DECODE
+
+#define MAX_DTMF_QUEUE_SIZE 4 
+
+typedef struct dtmf_inst_t_
+{
+    WebRtc_Word16 MaxPLCtime;
+    WebRtc_Word16 CurrentPLCtime;
+    WebRtc_Word16 EventQueue[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventQueueVolume[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventQueueEnded[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_UWord32 EventQueueStartTime[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_UWord32 EventQueueEndTime[MAX_DTMF_QUEUE_SIZE];
+    WebRtc_Word16 EventBufferSize;
+    WebRtc_Word16 framelen;
+} dtmf_inst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfDecoderInit(...)
+ *
+ * This function initializes a DTMF instance.
+ *
+ * Input:
+ *      - DTMF_decinst_t    : DTMF instance
+ *      - fs                : The sample rate used for the DTMF
+ *      - MaxPLCtime        : Maximum length for a PLC before zeros should be inserted
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecoderInit(dtmf_inst_t *DTMFdec_inst, WebRtc_UWord16 fs,
+                                          WebRtc_Word16 MaxPLCtime);
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfInsertEvent(...)
+ *
+ * This function decodes a packet with DTMF frames.
+ *
+ * Input:
+ *      - DTMFdec_inst      : DTMF instance
+ *      - encoded           : Encoded DTMF frame(s)
+ *      - len               : Bytes in encoded vector
+ *
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfInsertEvent(dtmf_inst_t *DTMFdec_inst,
+                                          const WebRtc_Word16 *encoded, WebRtc_Word16 len,
+                                          WebRtc_UWord32 timeStamp);
+
+/****************************************************************************
+ * WebRtcNetEQ_DtmfDecode(...)
+ *
+ * This function decodes a packet with DTMF frame(s). Output will be the
+ * event that should be played for next 10 ms. 
+ *
+ * Input:
+ *      - DTMFdec_inst      : DTMF instance
+ *      - currTimeStamp     : The current playout timestamp
+ *
+ * Output:
+ *      - event             : Event number to be played
+ *      - volume            : Event volume to be played
+ *
+ * Return value             : >0 - There is a event to be played
+ *                             0 - No event to be played
+ *                            -1 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DtmfDecode(dtmf_inst_t *DTMFdec_inst, WebRtc_Word16 *event,
+                                     WebRtc_Word16 *volume, WebRtc_UWord32 currTimeStamp);
+
+#endif    /* NETEQ_ATEVENT_DECODE */
+
+#endif    /* DTMF_BUFFER_H */
+
diff --git a/src/modules/audio_coding/neteq/dtmf_tonegen.c b/src/modules/audio_coding/neteq/dtmf_tonegen.c
new file mode 100644
index 0000000..a52f9bc
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dtmf_tonegen.c
@@ -0,0 +1,371 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the DTMF tone generator and its parameters.
+ *
+ * A sinusoid is generated using the recursive oscillator model
+ *
+ *      y[n] = sin(w*n + phi) = 2*cos(w) * y[n-1] - y[n-2]
+ *                            = a * y[n-1] - y[n-2]
+ *
+ * initialized with 
+ *      y[-2] = 0
+ *      y[-1] = sin(w)
+ *
+ * A DTMF signal is a combination of two sinusoids, depending
+ * on which event is sent (i.e, which key is pressed). The following
+ * table maps each key (event codes in parentheses) into two tones:
+ *
+ *  	    1209 Hz     1336 Hz     1477 Hz     1633 Hz
+ * 697 Hz   1 (ev. 1)   2 (ev. 2) 	3 (ev. 3) 	A (ev. 12)
+ * 770 Hz 	4 (ev. 4)	5 (ev. 5) 	6 (ev. 6)	B (ev. 13)
+ * 852 Hz 	7 (ev. 7) 	8 (ev. 8) 	9 (ev. 9)	C (ev. 14)
+ * 941 Hz 	* (ev. 10) 	0 (ev. 0)	# (ev. 11)	D (ev. 15)
+ *
+ * The two tones are added to form the DTMF signal.
+ *
+ */
+
+#include "dtmf_tonegen.h"
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+/* Must compile NetEQ with DTMF support to enable the functionality */
+
+/*******************/
+/* Constant tables */
+/*******************/
+
+/*
+ * All tables corresponding to the oscillator model are organized so that
+ * the coefficients for a specific frequency is found in the same position
+ * in every table. The positions for the tones follow this layout:
+ *
+ *  dummyVector[8] =
+ *  {
+ *      697 Hz,	    770 Hz,	    852 Hz,     941 Hz,
+ *      1209 Hz,    1336 Hz,    1477 Hz,    1633 Hz
+ *  };
+ */
+
+/*
+ * Tables for the constant a = 2*cos(w) = 2*cos(2*pi*f/fs)
+ * in the oscillator model, for 8, 16, 32 and 48 kHz sample rate.
+ * Table values in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl8Khz[8] =
+{
+    27980, 26956, 25701, 24219,
+    19073, 16325, 13085, 9315
+};
+
+#ifdef NETEQ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl16Khz[8]=
+{
+    31548, 31281, 30951, 30556,
+    29144, 28361, 27409, 26258
+};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl32Khz[8]=
+{
+    32462, 32394, 32311, 32210,
+    31849, 31647, 31400, 31098
+};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_aTbl48Khz[8]=
+{
+    32632, 32602, 32564, 32520,
+    32359, 32268, 32157, 32022
+};
+#endif
+
+/*
+ * Initialization values y[-1] = sin(w) = sin(2*pi*f/fs), for 8, 16, 32 and 48 kHz sample rate.
+ * Table values in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab8Khz[8] =
+{
+    8528, 9315, 10163, 11036,
+    13323, 14206,15021, 15708
+};
+
+#ifdef NETEQ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab16Khz[8]=
+{
+    4429, 4879, 5380, 5918,
+    7490, 8207, 8979, 9801
+};
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab32Khz[8]=
+{
+    2235, 2468, 2728, 3010,
+    3853, 4249, 4685, 5164
+};
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+const WebRtc_Word16 WebRtcNetEQ_dtfm_yInitTab48Khz[8]=
+{
+    1493, 1649, 1823, 2013,
+    2582, 2851, 3148, 3476
+};
+#endif
+
+/* Volume in dBm0 from 0 to -63, where 0 is the first table entry.
+ Everything below -36 is discarded, wherefore the table stops at -36.
+ Table entries are in Q14.
+ */
+
+const WebRtc_Word16 WebRtcNetEQ_dtfm_dBm0[37] = { 16141, 14386, 12821, 11427, 10184, 9077, 8090,
+                                                7210, 6426, 5727, 5104, 4549, 4054, 3614,
+                                                3221, 2870, 2558, 2280, 2032, 1811, 1614,
+                                                1439, 1282, 1143, 1018, 908, 809, 721, 643,
+                                                573, 510, 455, 405, 361, 322, 287, 256 };
+
+/****************************************************************************
+ * WebRtcNetEQ_DTMFGenerate(...)
+ *
+ * Generate 10 ms DTMF signal according to input parameters.
+ *
+ * Input:
+ *		- DTMFdecInst	: DTMF instance
+ *      - value         : DTMF event number (0-15)
+ *      - volume        : Volume of generated signal (0-36)
+ *                        Volume is given in negative dBm0, i.e., volume == 0
+ *                        means 0 dBm0 while volume == 36 mean -36 dBm0.
+ *      - sampFreq      : Sample rate in Hz
+ *
+ * Output:
+ *      - signal        : Pointer to vector where DTMF signal is stored;
+ *                        Vector must be at least sampFreq/100 samples long.
+ *		- DTMFdecInst	: Updated DTMF instance
+ *
+ * Return value			: >0 - Number of samples written to signal
+ *                      : <0 - error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst, WebRtc_Word16 value,
+                                       WebRtc_Word16 volume, WebRtc_Word16 *signal,
+                                       WebRtc_UWord16 sampFreq, WebRtc_Word16 extFrameLen)
+{
+    const WebRtc_Word16 *aTbl; /* pointer to a-coefficient table */
+    const WebRtc_Word16 *yInitTable; /* pointer to initialization value table */
+    WebRtc_Word16 a1 = 0; /* a-coefficient for first tone (low tone) */
+    WebRtc_Word16 a2 = 0; /* a-coefficient for second tone (high tone) */
+    int i;
+    int frameLen; /* number of samples to generate */
+    int lowIndex;
+    int highIndex;
+    WebRtc_Word32 tempVal;
+    WebRtc_Word16 tempValLow;
+    WebRtc_Word16 tempValHigh;
+
+    /* Sanity check for volume */
+    if ((volume < 0) || (volume > 36))
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+
+    /* Sanity check for extFrameLen */
+    if (extFrameLen < -1)
+    {
+        return DTMF_DEC_PARAMETER_ERROR;
+    }
+
+    /* Select oscillator coefficient tables based on sample rate */
+    if (sampFreq == 8000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl8Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab8Khz;
+        frameLen = 80;
+#ifdef NETEQ_WIDEBAND
+    }
+    else if (sampFreq == 16000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl16Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab16Khz;
+        frameLen = 160;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+    }
+    else if (sampFreq == 32000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl32Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab32Khz;
+        frameLen = 320;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+    }
+    else if (sampFreq == 48000)
+    {
+        aTbl = WebRtcNetEQ_dtfm_aTbl48Khz;
+        yInitTable = WebRtcNetEQ_dtfm_yInitTab48Khz;
+        frameLen = 480;
+#endif
+    }
+    else
+    {
+        /* unsupported sample rate */
+        return DTMF_GEN_UNKNOWN_SAMP_FREQ;
+    }
+
+    if (extFrameLen >= 0)
+    {
+        frameLen = extFrameLen;
+    }
+
+    /* select low frequency based on event value */
+    switch (value)
+    {
+        case 1:
+        case 2:
+        case 3:
+        case 12: /* first row on keypad */
+        {
+            lowIndex = 0; /* low frequency: 697 Hz */
+            break;
+        }
+        case 4:
+        case 5:
+        case 6:
+        case 13: /* second row on keypad */
+        {
+            lowIndex = 1; /* low frequency: 770 Hz */
+            break;
+        }
+        case 7:
+        case 8:
+        case 9:
+        case 14: /* third row on keypad */
+        {
+            lowIndex = 2; /* low frequency: 852 Hz */
+            break;
+        }
+        case 0:
+        case 10:
+        case 11:
+        case 15: /* fourth row on keypad */
+        {
+            lowIndex = 3; /* low frequency: 941 Hz */
+            break;
+        }
+        default:
+        {
+            return DTMF_DEC_PARAMETER_ERROR;
+        }
+    } /* end switch */
+
+    /* select high frequency based on event value */
+    switch (value)
+    {
+        case 1:
+        case 4:
+        case 7:
+        case 10: /* first column on keypad */
+        {
+            highIndex = 4; /* high frequency: 1209 Hz */
+            break;
+        }
+        case 2:
+        case 5:
+        case 8:
+        case 0: /* second column on keypad */
+        {
+            highIndex = 5;/* high frequency: 1336 Hz */
+            break;
+        }
+        case 3:
+        case 6:
+        case 9:
+        case 11: /* third column on keypad */
+        {
+            highIndex = 6;/* high frequency: 1477 Hz */
+            break;
+        }
+        case 12:
+        case 13:
+        case 14:
+        case 15: /* fourth column on keypad (special) */
+        {
+            highIndex = 7;/* high frequency: 1633 Hz */
+            break;
+        }
+        default:
+        {
+            return DTMF_DEC_PARAMETER_ERROR;
+        }
+    } /* end switch */
+
+    /* select coefficients based on results from switches above */
+    a1 = aTbl[lowIndex]; /* coefficient for first (low) tone */
+    a2 = aTbl[highIndex]; /* coefficient for second (high) tone */
+
+    if (DTMFdecInst->reinit)
+    {
+        /* set initial values for the recursive model */
+        DTMFdecInst->oldOutputLow[0] = yInitTable[lowIndex];
+        DTMFdecInst->oldOutputLow[1] = 0;
+        DTMFdecInst->oldOutputHigh[0] = yInitTable[highIndex];
+        DTMFdecInst->oldOutputHigh[1] = 0;
+
+        /* reset reinit flag */
+        DTMFdecInst->reinit = 0;
+    }
+
+    /* generate signal sample by sample */
+    for (i = 0; i < frameLen; i++)
+    {
+
+        /* Use rescursion formula y[n] = a*y[n-1] - y[n-2] */
+        tempValLow
+                        = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a1, DTMFdecInst->oldOutputLow[1])
+                                        + 8192) >> 14) - DTMFdecInst->oldOutputLow[0]);
+        tempValHigh
+                        = (WebRtc_Word16) (((WEBRTC_SPL_MUL_16_16(a2, DTMFdecInst->oldOutputHigh[1])
+                                        + 8192) >> 14) - DTMFdecInst->oldOutputHigh[0]);
+
+        /* Update recursion memory */
+        DTMFdecInst->oldOutputLow[0] = DTMFdecInst->oldOutputLow[1];
+        DTMFdecInst->oldOutputLow[1] = tempValLow;
+        DTMFdecInst->oldOutputHigh[0] = DTMFdecInst->oldOutputHigh[1];
+        DTMFdecInst->oldOutputHigh[1] = tempValHigh;
+
+        /* scale high tone with 32768 (15 left shifts) 
+         and low tone with 23171 (3dB lower than high tone) */
+        tempVal = WEBRTC_SPL_MUL_16_16(DTMF_AMP_LOW, tempValLow)
+                        + WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)tempValHigh, 15);
+
+        /* Norm the signal to Q14 (with proper rounding) */
+        tempVal = (tempVal + 16384) >> 15;
+
+        /* Scale the signal to correct dbM0 value */
+        signal[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                               (WEBRTC_SPL_MUL_16_16(tempVal, WebRtcNetEQ_dtfm_dBm0[volume])
+                               + 8192), 14); /* volume value is in Q14; use proper rounding */
+    }
+
+    return frameLen;
+
+}
+
+#endif /* NETEQ_ATEVENT_DECODE */
+
diff --git a/src/modules/audio_coding/neteq/dtmf_tonegen.h b/src/modules/audio_coding/neteq/dtmf_tonegen.h
new file mode 100644
index 0000000..add6eb1
--- /dev/null
+++ b/src/modules/audio_coding/neteq/dtmf_tonegen.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the DTMF tone generator function.
+ */
+
+#ifndef DTMF_TONEGEN_H
+#define DTMF_TONEGEN_H
+
+#include "typedefs.h"
+
+#include "neteq_defines.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+/* Must compile NetEQ with DTMF support to enable the functionality */
+
+#define DTMF_AMP_LOW	23171	/* 3 dB lower than the high frequency */
+
+/* The DTMF generator struct (part of DSP main struct DSPInst_t) */
+typedef struct dtmf_tone_inst_t_
+{
+
+    WebRtc_Word16 reinit; /* non-zero if the oscillator model should
+     be reinitialized for next event */
+    WebRtc_Word16 oldOutputLow[2]; /* oscillator recursion history (low tone) */
+    WebRtc_Word16 oldOutputHigh[2]; /* oscillator recursion history (high tone) */
+
+    int lastDtmfSample; /* index to the first non-DTMF sample in the
+     speech history, if non-negative */
+}dtmf_tone_inst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_DTMFGenerate(...)
+ *
+ * Generate 10 ms DTMF signal according to input parameters.
+ *
+ * Input:
+ *		- DTMFdecInst	: DTMF instance
+ *      - value         : DTMF event number (0-15)
+ *      - volume        : Volume of generated signal (0-36)
+ *                        Volume is given in negative dBm0, i.e., volume == 0
+ *                        means 0 dBm0 while volume == 36 mean -36 dBm0.
+ *      - sampFreq      : Sample rate in Hz
+ *
+ * Output:
+ *      - signal        : Pointer to vector where DTMF signal is stored;
+ *                        Vector must be at least sampFreq/100 samples long.
+ *		- DTMFdecInst	: Updated DTMF instance
+ *
+ * Return value			: >0 - Number of samples written to signal
+ *                      : <0 - Error
+ */
+
+WebRtc_Word16 WebRtcNetEQ_DTMFGenerate(dtmf_tone_inst_t *DTMFdecInst,
+                WebRtc_Word16 value,
+                WebRtc_Word16 volume,
+                WebRtc_Word16 *signal,
+                WebRtc_UWord16 sampFreq,
+                WebRtc_Word16 frameLen
+);
+
+#endif /* NETEQ_ATEVENT_DECODE */
+
+#endif /* DTMF_TONEGEN_H */
+
diff --git a/src/modules/audio_coding/neteq/expand.c b/src/modules/audio_coding/neteq/expand.c
new file mode 100644
index 0000000..3db7a2a
--- /dev/null
+++ b/src/modules/audio_coding/neteq/expand.c
@@ -0,0 +1,1216 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the function to expand from the speech history, to produce concealment data or
+ * increasing delay.
+ */
+
+#include "dsp.h"
+
+#include <assert.h>
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define CHECK_NO_OF_CORRMAX        3
+#define DISTLEN                    20
+#define LPCANALASYSLEN           160
+
+/* Scratch usage:
+
+ Type            Name                    size            startpos        endpos
+ (First part of first expand)
+ WebRtc_Word16  pw16_bestCorrIndex      3               0               2
+ WebRtc_Word16  pw16_bestCorr           3               3               5
+ WebRtc_Word16  pw16_bestDistIndex      3               6               8
+ WebRtc_Word16  pw16_bestDist           3               9               11
+ WebRtc_Word16  pw16_corrVec            102*fs/8000     12              11+102*fs/8000
+ func           WebRtcNetEQ_Correlator  232             12+102*fs/8000  243+102*fs/8000
+
+ (Second part of first expand)
+ WebRtc_Word32  pw32_corr2              99*fs/8000+1    0               99*fs/8000
+ WebRtc_Word32  pw32_autoCorr           2*7             0               13
+ WebRtc_Word16  pw16_rc                 6               14              19
+
+ Signal combination:
+ WebRtc_Word16  pw16_randVec            30+120*fs/8000  0               29+120*fs/8000
+ WebRtc_Word16  pw16_scaledRandVec      125*fs/8000     30+120*fs/8000  29+245*fs/8000
+ WebRtc_Word16  pw16_unvoicedVecSpace   10+125*fs/8000  30+245*fs/8000  39+370*fs/8000
+
+ Total: 40+370*fs/8000 (size depends on UNVOICED_LPC_ORDER and BGN_LPC_ORDER)
+ */
+
+#if ((BGN_LPC_ORDER > 10) || (UNVOICED_LPC_ORDER > 10)) && (defined SCRATCH)
+#error BGN_LPC_ORDER and/or BGN_LPC_ORDER are too large for current scratch memory allocation
+#endif
+
+#define     SCRATCH_PW16_BEST_CORR_INDEX    0
+#define     SCRATCH_PW16_BEST_CORR          3
+#define     SCRATCH_PW16_BEST_DIST_INDEX    6
+#define     SCRATCH_PW16_BEST_DIST          9
+#define     SCRATCH_PW16_CORR_VEC           12
+#define     SCRATCH_PW16_CORR2              0
+#define     SCRATCH_PW32_AUTO_CORR          0
+#define     SCRATCH_PW16_RC                 14
+#define     SCRATCH_PW16_RAND_VEC           0
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     624
+#define     SCRATCH_PW16_SCALED_RAND_VEC    750
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 1500
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     420
+#define     SCRATCH_PW16_SCALED_RAND_VEC    510
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 1010
+#elif (defined(NETEQ_WIDEBAND)) 
+#define     SCRATCH_NETEQDSP_CORRELATOR     216
+#define     SCRATCH_PW16_SCALED_RAND_VEC    270
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 520
+#else    /* NB */
+#define     SCRATCH_NETEQDSP_CORRELATOR     114
+#define     SCRATCH_PW16_SCALED_RAND_VEC    150
+#define     SCRATCH_PW16_UNVOICED_VEC_SPACE 275
+#endif
+
+/****************************************************************************
+ * WebRtcNetEQ_Expand(...)
+ *
+ * This function produces one "chunk" of expansion data (PLC audio). The
+ * length of the produced audio depends on the speech history.
+ *
+ * Input:
+ *      - inst          : DSP instance
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - outdata       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - BGNonly       : If non-zero, "expand" will only produce background noise.
+ *      - pw16_len      : Desired number of samples (only for BGN mode).
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - pw16_len      : Number of samples that were output from NetEq
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_Expand(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                       WebRtc_Word16 BGNonly)
+{
+
+    WebRtc_Word16 fs_mult;
+    ExpandInst_t *ExpandState = &(inst->ExpandInst);
+    BGNInst_t *BGNState = &(inst->BGNInst);
+    int i;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_randVec = pw16_scratchPtr + SCRATCH_PW16_RAND_VEC;
+    WebRtc_Word16 *pw16_scaledRandVec = pw16_scratchPtr + SCRATCH_PW16_SCALED_RAND_VEC;
+    WebRtc_Word16 *pw16_unvoicedVecSpace = pw16_scratchPtr + SCRATCH_PW16_UNVOICED_VEC_SPACE;
+#else
+    WebRtc_Word16 pw16_randVec[FSMULT * 120 + 30]; /* 150 for NB and 270 for WB */
+    WebRtc_Word16 pw16_scaledRandVec[FSMULT * 125]; /* 125 for NB and 250 for WB */
+    WebRtc_Word16 pw16_unvoicedVecSpace[BGN_LPC_ORDER + FSMULT * 125];
+#endif
+    /* 125 for NB and 250 for WB etc. Reuse pw16_outData[] for this vector */
+    WebRtc_Word16 *pw16_voicedVecStorage = pw16_outData;
+    WebRtc_Word16 *pw16_voicedVec = &pw16_voicedVecStorage[ExpandState->w16_overlap];
+    WebRtc_Word16 *pw16_unvoicedVec = pw16_unvoicedVecSpace + UNVOICED_LPC_ORDER;
+    WebRtc_Word16 *pw16_cngVec = pw16_unvoicedVecSpace + BGN_LPC_ORDER;
+    WebRtc_Word16 w16_expVecsLen, w16_lag = 0, w16_expVecPos;
+    WebRtc_Word16 w16_randLen;
+    WebRtc_Word16 w16_vfractionChange; /* in Q14 */
+    WebRtc_Word16 w16_winMute = 0, w16_winMuteInc = 0, w16_winUnMute = 0, w16_winUnMuteInc = 0;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word16 stability;
+    enum BGNMode bgnMode = inst->BGNInst.bgnMode;
+
+    /* Pre-calculate common multiplications with fs_mult */
+    WebRtc_Word16 fsMult4;
+    WebRtc_Word16 fsMult20;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word16 fsMultDistLen;
+    WebRtc_Word16 fsMultLPCAnalasysLen;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    /* fs is WebRtc_UWord16 (to hold fs=48000) */
+    fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs); /* calculate fs/8000 */
+
+    /* Pre-calculate common multiplications with fs_mult */
+    fsMult4 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 4);
+    fsMult20 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 20);
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, 120);
+    fsMultDistLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, DISTLEN);
+    fsMultLPCAnalasysLen = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fs_mult, LPCANALASYSLEN);
+
+    /*
+     * Perform all the initial setup if it's the first expansion.
+     * If background noise (BGN) only, this setup is not needed.
+     */
+    if (ExpandState->w16_consecExp == 0 && !BGNonly)
+    {
+        /* Setup more variables */
+#ifdef SCRATCH
+        WebRtc_Word32 *pw32_autoCorr = (WebRtc_Word32*) (pw16_scratchPtr
+            + SCRATCH_PW32_AUTO_CORR);
+        WebRtc_Word16 *pw16_rc = pw16_scratchPtr + SCRATCH_PW16_RC;
+        WebRtc_Word16 *pw16_bestCorrIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR_INDEX;
+        WebRtc_Word16 *pw16_bestCorr = pw16_scratchPtr + SCRATCH_PW16_BEST_CORR;
+        WebRtc_Word16 *pw16_bestDistIndex = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST_INDEX;
+        WebRtc_Word16 *pw16_bestDist = pw16_scratchPtr + SCRATCH_PW16_BEST_DIST;
+        WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_PW16_CORR_VEC;
+        WebRtc_Word32 *pw32_corr2 = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW16_CORR2);
+#else
+        WebRtc_Word32 pw32_autoCorr[UNVOICED_LPC_ORDER+1];
+        WebRtc_Word16 pw16_rc[UNVOICED_LPC_ORDER];
+        WebRtc_Word16 pw16_corrVec[FSMULT*102]; /* 102 for NB */
+        WebRtc_Word16 pw16_bestCorrIndex[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestCorr[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestDistIndex[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 pw16_bestDist[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word32 pw32_corr2[(99*FSMULT)+1];
+#endif
+        WebRtc_Word32 pw32_bestDist[CHECK_NO_OF_CORRMAX];
+        WebRtc_Word16 w16_ind = 0;
+        WebRtc_Word16 w16_corrVecLen;
+        WebRtc_Word16 w16_corrScale;
+        WebRtc_Word16 w16_distScale;
+        WebRtc_Word16 w16_indMin, w16_indMax;
+        WebRtc_Word16 w16_len;
+        WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+        WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+        WebRtc_Word16 w16_en1, w16_en2;
+        WebRtc_Word32 w32_en1_mul_en2;
+        WebRtc_Word16 w16_sqrt_en1en2;
+        WebRtc_Word16 w16_ccShiftL;
+        WebRtc_Word16 w16_bestcorr; /* Correlation in Q14 */
+        WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+        WebRtc_Word16 w16_factor;
+        WebRtc_Word16 w16_DistLag, w16_CorrLag, w16_diffLag;
+        WebRtc_Word16 w16_energyLen;
+        WebRtc_Word16 w16_slope;
+        WebRtc_Word16 w16_startInd;
+        WebRtc_Word16 w16_noOfcorr2;
+        WebRtc_Word16 w16_scale;
+
+        /* Initialize some variables */
+        ExpandState->w16_lagsDirection = 1;
+        ExpandState->w16_lagsPosition = -1;
+        ExpandState->w16_expandMuteFactor = 16384; /* Start from 1.0 (Q14) */
+        BGNState->w16_mutefactor = 0; /* Start with 0 gain for BGN (value in Q14) */
+        inst->w16_seedInc = 1;
+
+#ifdef NETEQ_STEREO
+        /* Sanity for msInfo */
+        if (msInfo == NULL)
+        {
+            /* this should not happen here */
+            return MASTER_SLAVE_ERROR;
+        }
+
+        /*
+         * Do not calculate correlations for slave instance(s)
+         * unless lag info from master is corrupt
+         */
+        if ((msInfo->msMode != NETEQ_SLAVE)
+        || ((msInfo->distLag <= 0) || (msInfo->corrLag <= 0)))
+        {
+#endif
+            /* Calculate correlation vector in downsampled domain (4 kHz sample rate) */
+            w16_corrVecLen = WebRtcNetEQ_Correlator(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQDSP_CORRELATOR,
+#endif
+                inst->pw16_speechHistory, inst->w16_speechHistoryLen, pw16_corrVec,
+                &w16_corrScale);
+
+            /* Find peaks in correlation vector using parabolic fit method */
+            WebRtcNetEQ_PeakDetection(pw16_corrVec, w16_corrVecLen, CHECK_NO_OF_CORRMAX, fs_mult,
+                pw16_bestCorrIndex, pw16_bestCorr);
+
+            /*
+             * Adjust peak locations; cross-correlation lags start at 2.5 ms
+             * (20*fs_mult samples)
+             */
+            pw16_bestCorrIndex[0] += fsMult20;
+            pw16_bestCorrIndex[1] += fsMult20;
+            pw16_bestCorrIndex[2] += fsMult20;
+
+            /* Calculate distortion around the 3 (CHECK_NO_OF_CORRMAX) best lags */
+            w16_distScale = 0;
+            for (i = 0; i < CHECK_NO_OF_CORRMAX; i++)
+            {
+                w16_tmp = fsMult20;
+                w16_tmp2 = pw16_bestCorrIndex[i] - fsMult4;
+                w16_indMin = WEBRTC_SPL_MAX(w16_tmp, w16_tmp2);
+                w16_tmp = fsMult120 - 1;
+                w16_tmp2 = pw16_bestCorrIndex[i] + fsMult4;
+                w16_indMax = WEBRTC_SPL_MIN(w16_tmp, w16_tmp2);
+
+                pw16_bestDistIndex[i] = WebRtcNetEQ_MinDistortion(
+                    &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultDistLen]),
+                    w16_indMin, w16_indMax, fsMultDistLen, &pw32_bestDist[i]);
+
+                w16_distScale
+                    = WEBRTC_SPL_MAX(16 - WebRtcSpl_NormW32(pw32_bestDist[i]), w16_distScale);
+
+            }
+
+            /* Shift the distortion values to fit in WebRtc_Word16 */
+            WebRtcSpl_VectorBitShiftW32ToW16(pw16_bestDist, CHECK_NO_OF_CORRMAX, pw32_bestDist,
+                w16_distScale);
+
+            /*
+             * Find index of maximum criteria, where crit[i] = bestCorr[i])/(bestDist[i])
+             * Do this by a cross multiplication.
+             */
+
+            w32_en1 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0],pw16_bestDist[1]);
+            w32_en2 = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[0]);
+            if (w32_en1 >= w32_en2)
+            {
+                /* 0 wins over 1 */
+                w32_en1
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[0], pw16_bestDist[2]);
+                w32_en2
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2], pw16_bestDist[0]);
+                if (w32_en1 >= w32_en2)
+                {
+                    /* 0 wins over 2 */
+                    w16_ind = 0;
+                }
+                else
+                {
+                    /* 2 wins over 0 */
+                    w16_ind = 2;
+                }
+            }
+            else
+            {
+                /* 1 wins over 0 */
+                w32_en1
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[1],pw16_bestDist[2]);
+                w32_en2
+                    = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) pw16_bestCorr[2],pw16_bestDist[1]);
+                if ((WebRtc_Word32) w32_en1 >= (WebRtc_Word32) w32_en2)
+                {
+                    /* 1 wins over 2 */
+                    w16_ind = 1;
+                }
+                else
+                {
+                    /* 2 wins over 1 */
+                    w16_ind = 2;
+                }
+            }
+
+#ifdef NETEQ_STEREO
+        }
+
+        /* Store DistLag and CorrLag of the position with highest criteria */
+        if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO)
+            || ((msInfo->msMode == NETEQ_SLAVE) && (msInfo->distLag <= 0 || msInfo->corrLag
+                <= 0)))
+        {
+            /* lags not provided externally */
+            w16_DistLag = pw16_bestDistIndex[w16_ind];
+            w16_CorrLag = pw16_bestCorrIndex[w16_ind];
+            if (msInfo->msMode == NETEQ_MASTER)
+            {
+                msInfo->distLag = w16_DistLag;
+                msInfo->corrLag = w16_CorrLag;
+            }
+        }
+        else if (msInfo->msMode == NETEQ_SLAVE)
+        {
+            /* lags provided externally (from master) */
+            w16_DistLag = msInfo->distLag;
+            w16_CorrLag = msInfo->corrLag;
+
+            /* sanity for lag values */
+            if ((w16_DistLag <= 0) || (w16_CorrLag <= 0))
+            {
+                return MASTER_SLAVE_ERROR;
+            }
+        }
+        else
+        {
+            /* Invalid mode */
+            return MASTER_SLAVE_ERROR;
+        }
+#else /* not NETEQ_STEREO */
+        w16_DistLag = pw16_bestDistIndex[w16_ind];
+        w16_CorrLag = pw16_bestCorrIndex[w16_ind];
+#endif
+
+        ExpandState->w16_maxLag = WEBRTC_SPL_MAX(w16_DistLag, w16_CorrLag);
+
+        /* Calculate the exact best correlation (in the range within CorrLag-DistLag) */
+        w16_len = w16_DistLag + 10;
+        w16_len = WEBRTC_SPL_MIN(w16_len, fsMult120);
+        w16_len = WEBRTC_SPL_MAX(w16_len, 60 * fs_mult);
+
+        w16_startInd = WEBRTC_SPL_MIN(w16_DistLag, w16_CorrLag);
+        w16_noOfcorr2 = WEBRTC_SPL_ABS_W16((w16_DistLag-w16_CorrLag)) + 1;
+        /* w16_noOfcorr2 maximum value is 99*fs_mult + 1 */
+
+        /* Calculate suitable scaling */
+        w16_tmp
+            = WebRtcSpl_MaxAbsValueW16(
+                &inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd
+                    - w16_noOfcorr2],
+                (WebRtc_Word16) (w16_len + w16_startInd + w16_noOfcorr2 - 1));
+        w16_corrScale = ((31 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_tmp, w16_tmp)))
+            + (31 - WebRtcSpl_NormW32(w16_len))) - 31;
+        w16_corrScale = WEBRTC_SPL_MAX(0, w16_corrScale);
+
+        /*
+         * Perform the correlation, store in pw32_corr2
+         */
+
+        WebRtcNetEQ_CrossCorr(pw32_corr2,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_startInd]),
+            w16_len, w16_noOfcorr2, w16_corrScale, -1);
+
+        /* Find maximizing index */
+        w16_ind = WebRtcSpl_MaxIndexW32(pw32_corr2, w16_noOfcorr2);
+        w32_cc = pw32_corr2[w16_ind]; /* this is maximum correlation */
+        w16_ind = w16_ind + w16_startInd; /* correct index for start offset */
+
+        /* Calculate energies */
+        w32_en1 = WebRtcNetEQ_DotW16W16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len]), w16_len,
+            w16_corrScale);
+        w32_en2 = WebRtcNetEQ_DotW16W16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_len - w16_ind]),
+            w16_len, w16_corrScale);
+
+        /* Calculate the correlation value w16_bestcorr */
+        if ((w32_en1 > 0) && (w32_en2 > 0))
+        {
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                /* if sum is odd */
+                w16_en1Scale += 1;
+            }
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+            w32_en1_mul_en2 = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+            w16_sqrt_en1en2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_en1_mul_en2);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_ccShiftL = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_ccShiftL);
+            w16_bestcorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrt_en1en2);
+            w16_bestcorr = WEBRTC_SPL_MIN(16384, w16_bestcorr); /* set maximum to 1.0 */
+
+        }
+        else
+        {
+            /* if either en1 or en2 is zero */
+            w16_bestcorr = 0;
+        }
+
+        /*
+         * Extract the two vectors, pw16_expVecs[0][] and pw16_expVecs[1][],
+         * from the SpeechHistory[]
+         */
+        w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
+        pw16_vec1 = &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - w16_expVecsLen]);
+        pw16_vec2 = pw16_vec1 - w16_DistLag;
+        /* Normalize the second vector to the same energy as the first */
+        w32_en1 = WebRtcNetEQ_DotW16W16(pw16_vec1, pw16_vec1, w16_expVecsLen, w16_corrScale);
+        w32_en2 = WebRtcNetEQ_DotW16W16(pw16_vec2, pw16_vec2, w16_expVecsLen, w16_corrScale);
+
+        /*
+         * Confirm that energy factor sqrt(w32_en1/w32_en2) is within difference 0.5 - 2.0
+         * w32_en1/w32_en2 within 0.25 - 4
+         */
+        if (((w32_en1 >> 2) < w32_en2) && ((w32_en1) > (w32_en2 >> 2)))
+        {
+
+            /* Energy constraint fulfilled => use both vectors and scale them accordingly */
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+            w16_en1Scale = w16_en2Scale - 13;
+
+            /* calculate w32_en1/w32_en2 in Q13 */
+            w32_en1_mul_en2 = WebRtcSpl_DivW32W16(
+                WEBRTC_SPL_SHIFT_W32(w32_en1, -w16_en1Scale),
+                (WebRtc_Word16) (WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale)));
+
+            /* calculate factor in Q13 (sqrt of en1/en2 in Q26) */
+            w16_factor = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+                WEBRTC_SPL_LSHIFT_W32(w32_en1_mul_en2, 13));
+
+            /* Copy the two vectors and give them the same energy */
+
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
+            WebRtcSpl_AffineTransformVector(ExpandState->pw16_expVecs[1], pw16_vec2,
+                w16_factor, 4096, 13, w16_expVecsLen);
+
+        }
+        else
+        {
+            /* Energy change constraint not fulfilled => only use last vector */
+
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[0], pw16_vec1, w16_expVecsLen);
+            WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_expVecs[1], ExpandState->pw16_expVecs[0],
+                w16_expVecsLen);
+
+            /* Set the w16_factor since it is used by muting slope */
+            if (((w32_en1 >> 2) < w32_en2) || (w32_en2 == 0))
+            {
+                w16_factor = 4096; /* 0.5 in Q13*/
+            }
+            else
+            {
+                w16_factor = 16384; /* 2.0 in Q13*/
+            }
+        }
+
+        /* Set the 3 lag values */
+        w16_diffLag = w16_DistLag - w16_CorrLag;
+        if (w16_diffLag == 0)
+        {
+            /* DistLag and CorrLag are equal */
+            ExpandState->w16_lags[0] = w16_DistLag;
+            ExpandState->w16_lags[1] = w16_DistLag;
+            ExpandState->w16_lags[2] = w16_DistLag;
+        }
+        else
+        {
+            /* DistLag and CorrLag are not equal; use different combinations of the two */
+            ExpandState->w16_lags[0] = w16_DistLag; /* DistLag only */
+            ExpandState->w16_lags[1] = ((w16_DistLag + w16_CorrLag) >> 1); /* 50/50 */
+            /* Third lag, move one half-step towards CorrLag (in both cases) */
+            if (w16_diffLag > 0)
+            {
+                ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag - 1) >> 1;
+            }
+            else
+            {
+                ExpandState->w16_lags[2] = (w16_DistLag + w16_CorrLag + 1) >> 1;
+            }
+        }
+
+        /*************************************************
+         * Calculate the LPC and the gain of the filters *
+         *************************************************/
+
+        /* Calculate scale value needed for autocorrelation */
+        w16_tmp = WebRtcSpl_MaxAbsValueW16(
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - fsMultLPCAnalasysLen]),
+            fsMultLPCAnalasysLen);
+
+        w16_tmp = 16 - WebRtcSpl_NormW32(w16_tmp);
+        w16_tmp = WEBRTC_SPL_MIN(w16_tmp,0);
+        w16_tmp = (w16_tmp << 1) + 7;
+        w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
+
+        /* set w16_ind to simplify the following expressions */
+        w16_ind = inst->w16_speechHistoryLen - fsMultLPCAnalasysLen - UNVOICED_LPC_ORDER;
+
+        /* store first UNVOICED_LPC_ORDER samples in pw16_rc */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_rc, &inst->pw16_speechHistory[w16_ind], UNVOICED_LPC_ORDER);
+
+        /* set first samples to zero */
+        WebRtcSpl_MemSetW16(&inst->pw16_speechHistory[w16_ind], 0, UNVOICED_LPC_ORDER);
+
+        /* Calculate UNVOICED_LPC_ORDER+1 lags of the ACF */
+
+        WebRtcNetEQ_CrossCorr(
+            pw32_autoCorr, &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]),
+            &(inst->pw16_speechHistory[w16_ind + UNVOICED_LPC_ORDER]), fsMultLPCAnalasysLen,
+            UNVOICED_LPC_ORDER + 1, w16_tmp, -1);
+
+        /* Recover the stored samples from pw16_rc */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[w16_ind], pw16_rc, UNVOICED_LPC_ORDER);
+
+        if (pw32_autoCorr[0] > 0)
+        { /* check that variance is positive */
+
+            /* estimate AR filter parameters using Levinson-Durbin algorithm
+             (UNVOICED_LPC_ORDER+1 filter coefficients) */
+            stability = WebRtcSpl_LevinsonDurbin(pw32_autoCorr, ExpandState->pw16_arFilter,
+                pw16_rc, UNVOICED_LPC_ORDER);
+
+            /* Only update BGN if filter is stable */
+            if (stability != 1)
+            {
+                /* Set first coefficient to 4096 (1.0 in Q12)*/
+                ExpandState->pw16_arFilter[0] = 4096;
+                /* Set remaining UNVOICED_LPC_ORDER coefficients to zero */
+                WebRtcSpl_MemSetW16(ExpandState->pw16_arFilter + 1, 0, UNVOICED_LPC_ORDER);
+            }
+
+        }
+
+        if (w16_DistLag < 40)
+        {
+            w16_energyLen = 2 * w16_DistLag;
+        }
+        else
+        {
+            w16_energyLen = w16_DistLag;
+        }
+        w16_randLen = w16_energyLen + 30; /* Startup part */
+
+        /* Extract a noise segment */
+        if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
+        {
+            WEBRTC_SPL_MEMCPY_W16(pw16_randVec,
+                (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl, w16_randLen);
+        }
+        else
+        { /* only applies to SWB where length could be larger than 256 */
+#if FSMULT >= 2  /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
+            WEBRTC_SPL_MEMCPY_W16(pw16_randVec, (WebRtc_Word16*) WebRtcNetEQ_kRandnTbl,
+                RANDVEC_NO_OF_SAMPLES);
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            assert(w16_randLen <= FSMULT * 120 + 30);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
+                (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+#else
+            assert(0);
+#endif
+        }
+
+        /* Set up state vector and calculate scale factor for unvoiced filtering */
+
+        WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+        WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
+            &(inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128 - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+        WebRtcSpl_FilterMAFastQ12(&inst->pw16_speechHistory[inst->w16_speechHistoryLen - 128],
+            pw16_unvoicedVec, ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, 128);
+        if (WebRtcSpl_MaxAbsValueW16(pw16_unvoicedVec, 128) > 4000)
+        {
+            w16_scale = 4;
+        }
+        else
+        {
+            w16_scale = 0;
+        }
+        w32_tmp = WebRtcNetEQ_DotW16W16(pw16_unvoicedVec, pw16_unvoicedVec, 128, w16_scale);
+
+        /* Normalize w32_tmp to 28 or 29 bits to preserve sqrt() accuracy */
+        w16_tmp = WebRtcSpl_NormW32(w32_tmp) - 3;
+        w16_tmp += ((w16_tmp & 0x1) ^ 0x1); /* Make sure we do an odd number of shifts since we
+         from earlier have 7 shifts from dividing with 128.*/
+        w32_tmp = WEBRTC_SPL_SHIFT_W32(w32_tmp, w16_tmp);
+        w32_tmp = WebRtcSpl_SqrtFloor(w32_tmp);
+        ExpandState->w16_arGainScale = 13 + ((w16_tmp + 7 - w16_scale) >> 1);
+        ExpandState->w16_arGain = (WebRtc_Word16) w32_tmp;
+
+        /********************************************************************
+         * Calculate vfraction from bestcorr                                *
+         * if (bestcorr>0.480665)                                           *
+         *     vfraction = ((bestcorr-0.4)/(1-0.4)).^2                      *
+         * else    vfraction = 0                                            *
+         *                                                                  *
+         * approximation (coefficients in Q12):                             *
+         * if (x>0.480665)    (y(x)<0.3)                                    *
+         *   y(x) = -1.264421 + 4.8659148*x - 4.0092827*x^2 + 1.4100529*x^3 *
+         * else y(x) = 0;                                                   *
+         ********************************************************************/
+
+        if (w16_bestcorr > 7875)
+        {
+            /* if x>0.480665 */
+            WebRtc_Word16 w16_x1, w16_x2, w16_x3;
+            w16_x1 = w16_bestcorr;
+            w32_tmp = WEBRTC_SPL_MUL_16_16((WebRtc_Word32) w16_x1, w16_x1);
+            w16_x2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_x1, w16_x2);
+            w16_x3 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 14);
+            w32_tmp
+                = (WebRtc_Word32) WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) WebRtcNetEQ_kMixFractionFuncTbl[0], 14);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[1], w16_x1);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[2], w16_x2);
+            w32_tmp
+                += (WebRtc_Word32) WEBRTC_SPL_MUL_16_16(WebRtcNetEQ_kMixFractionFuncTbl[3], w16_x3);
+            ExpandState->w16_vFraction = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 12);
+            ExpandState->w16_vFraction = WEBRTC_SPL_MIN(ExpandState->w16_vFraction, 16384);
+            ExpandState->w16_vFraction = WEBRTC_SPL_MAX(ExpandState->w16_vFraction, 0);
+        }
+        else
+        {
+            ExpandState->w16_vFraction = 0;
+        }
+
+        /***********************************************************************
+         * Calculate muting slope, reuse value from earlier scaling of ExpVecs *
+         ***********************************************************************/
+        w16_slope = w16_factor;
+
+        if (w16_slope > 12288)
+        {
+            /* w16_slope > 1.5 ? */
+            /* Calculate (1-(1/slope))/w16_DistLag = (slope-1)/(w16_DistLag*slope) */
+            w32_tmp = w16_slope - 8192;
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 12); /* Value in Q25 (13+12=25) */
+            w16_tmp = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(w16_DistLag,
+                w16_slope, 8); /* Value in Q5  (13-8=5)  */
+            w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                w16_tmp); /* Res in Q20 (25-5=20) */
+
+            if (w16_slope > 14746)
+            { /* w16_slope > 1.8 ? */
+                ExpandState->w16_muteSlope = (w16_tmp + 1) >> 1;
+            }
+            else
+            {
+                ExpandState->w16_muteSlope = (w16_tmp + 4) >> 3;
+            }
+            ExpandState->w16_onset = 1;
+        }
+        else if (ExpandState->w16_vFraction > 13107)
+        {
+            /* w16_vFraction > 0.8 ? */
+            if (w16_slope > 8028)
+            {
+                /* w16_vFraction > 0.98 ? */
+                ExpandState->w16_muteSlope = 0;
+            }
+            else
+            {
+                /* Calculate (1-slope)/w16_DistLag */
+                w32_tmp = 8192 - w16_slope;
+                w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
+                ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                    w16_DistLag); /* Res in Q20 (20-0=20) */
+            }
+            ExpandState->w16_onset = 0;
+        }
+        else
+        {
+            /*
+             * Use the minimum of 0.005 (0.9 on 50 samples in NB and the slope)
+             * and ((1-slope)/w16_DistLag)
+             */
+            w32_tmp = 8192 - w16_slope;
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, 7); /* Value in Q20 (13+7=20) */
+            w32_tmp = WEBRTC_SPL_MAX(w32_tmp, 0);
+            ExpandState->w16_muteSlope = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp,
+                w16_DistLag); /* Res   in Q20    (20-0=20) */
+            w16_tmp = WebRtcNetEQ_k5243div[fs_mult]; /* 0.005/fs_mult = 5243/fs_mult */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(w16_tmp, ExpandState->w16_muteSlope);
+            ExpandState->w16_onset = 0;
+        }
+    }
+    else
+    {
+        /* This is not the first Expansion, parameters are already estimated. */
+
+        /* Extract a noise segment */
+        if (BGNonly) /* If we should produce nothing but background noise */
+        {
+            if (*pw16_len > 0)
+            {
+                /*
+                 * Set length to input parameter length, but not more than length
+                 * of pw16_randVec
+                 */
+                w16_lag = WEBRTC_SPL_MIN(*pw16_len, FSMULT * 120 + 30);
+            }
+            else
+            {
+                /* set length to 15 ms */
+                w16_lag = fsMult120;
+            }
+            w16_randLen = w16_lag;
+        }
+        else
+        {
+            w16_randLen = ExpandState->w16_maxLag;
+        }
+
+        if (w16_randLen <= RANDVEC_NO_OF_SAMPLES)
+        {
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, w16_randLen,
+                inst->w16_seedInc);
+        }
+        else
+        { /* only applies to SWB where length could be larger than 256 */
+#if FSMULT >= 2  /* Makes pw16_randVec longer than RANDVEC_NO_OF_SAMPLES. */
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, pw16_randVec, RANDVEC_NO_OF_SAMPLES,
+                inst->w16_seedInc);
+            inst->w16_seedInc = (inst->w16_seedInc + 2) & (RANDVEC_NO_OF_SAMPLES - 1);
+            assert(w16_randLen <= FSMULT * 120 + 30);
+            WebRtcNetEQ_RandomVec(&inst->uw16_seed, &pw16_randVec[RANDVEC_NO_OF_SAMPLES],
+                (WebRtc_Word16) (w16_randLen - RANDVEC_NO_OF_SAMPLES), inst->w16_seedInc);
+#else
+            assert(0);
+#endif
+        }
+    } /* end if(first expand or BGNonly) ... else ... */
+
+    if (!BGNonly) /* Voiced and unvoiced parts not used if generating BGN only */
+    {
+
+        /*************************************************
+         * Generate signal                               *
+         *************************************************/
+
+        /*
+         * Voiced part
+         */
+
+        /* Linearly mute the use_vfraction value from 1 to vfraction */
+        if (ExpandState->w16_consecExp == 0)
+        {
+            ExpandState->w16_currentVFraction = 16384; /* 1.0 in Q14 */
+        }
+
+        ExpandState->w16_lagsPosition = ExpandState->w16_lagsPosition
+            + ExpandState->w16_lagsDirection;
+
+        /* Change direction if needed */
+        if (ExpandState->w16_lagsPosition == 0)
+        {
+            ExpandState->w16_lagsDirection = 1;
+        }
+        if (ExpandState->w16_lagsPosition == 2)
+        {
+            ExpandState->w16_lagsDirection = -1;
+        }
+
+        /* Generate a weighted vector with the selected lag */
+        w16_expVecsLen = ExpandState->w16_maxLag + ExpandState->w16_overlap;
+        w16_lag = ExpandState->w16_lags[ExpandState->w16_lagsPosition];
+        /* Copy lag+overlap data */
+        w16_expVecPos = w16_expVecsLen - w16_lag - ExpandState->w16_overlap;
+        w16_tmp = w16_lag + ExpandState->w16_overlap;
+        if (ExpandState->w16_lagsPosition == 0)
+        {
+            WEBRTC_SPL_MEMCPY_W16(pw16_voicedVecStorage,
+                &(ExpandState->pw16_expVecs[0][w16_expVecPos]), w16_tmp);
+        }
+        else if (ExpandState->w16_lagsPosition == 1)
+        {
+            WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 3,
+                &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 2, pw16_voicedVecStorage,
+                w16_tmp);
+
+        }
+        else if (ExpandState->w16_lagsPosition == 2)
+        {
+            WebRtcSpl_ScaleAndAddVectorsWithRound(&ExpandState->pw16_expVecs[0][w16_expVecPos], 1,
+                &ExpandState->pw16_expVecs[1][w16_expVecPos], 1, 1, pw16_voicedVecStorage,
+                w16_tmp);
+        }
+
+        if (inst->fs == 8000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_8KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_8KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_8KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_8KHZ_INC;
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs == 16000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_16KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_16KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_16KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_16KHZ_INC;
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs == 32000)
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_32KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_32KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_32KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_32KHZ_INC;
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else /* if (inst->fs==48000) */
+        {
+            /* Windowing in Q15 */
+            w16_winMute = NETEQ_OVERLAP_WINMUTE_48KHZ_START;
+            w16_winMuteInc = NETEQ_OVERLAP_WINMUTE_48KHZ_INC;
+            w16_winUnMute = NETEQ_OVERLAP_WINUNMUTE_48KHZ_START;
+            w16_winUnMuteInc = NETEQ_OVERLAP_WINUNMUTE_48KHZ_INC;
+#endif
+        }
+
+        /* Smooth the expanded if it has not been muted to or vfraction is larger than 0.5 */
+        if ((ExpandState->w16_expandMuteFactor > 819) && (ExpandState->w16_currentVFraction
+            > 8192))
+        {
+            for (i = 0; i < ExpandState->w16_overlap; i++)
+            {
+                /* Do overlap add between new vector and overlap */
+                ExpandState->pw16_overlapVec[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                    WEBRTC_SPL_MUL_16_16(ExpandState->pw16_overlapVec[i], w16_winMute) +
+                    WEBRTC_SPL_MUL_16_16(
+                        WEBRTC_SPL_MUL_16_16_RSFT(ExpandState->w16_expandMuteFactor,
+                            pw16_voicedVecStorage[i], 14), w16_winUnMute) + 16384, 15);
+                w16_winMute += w16_winMuteInc;
+                w16_winUnMute += w16_winUnMuteInc;
+            }
+        }
+        else if (ExpandState->w16_expandMuteFactor == 0
+#ifdef NETEQ_STEREO
+            && msInfo->msMode == NETEQ_MONO /* only if mono mode is selected */
+#endif
+        )
+        {
+            /* if ExpandState->w16_expandMuteFactor = 0 => all is CNG component 
+             set the output length to 15ms (for best CNG production) */
+            w16_tmp = fsMult120;
+            ExpandState->w16_maxLag = w16_tmp;
+            ExpandState->w16_lags[0] = w16_tmp;
+            ExpandState->w16_lags[1] = w16_tmp;
+            ExpandState->w16_lags[2] = w16_tmp;
+        }
+
+        /*
+         * Unvoiced part
+         */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_unvoicedVec - UNVOICED_LPC_ORDER,
+            ExpandState->pw16_arState,
+            UNVOICED_LPC_ORDER);
+        if (ExpandState->w16_arGainScale > 0)
+        {
+            w32_tmp = ((WebRtc_Word32) 1) << (ExpandState->w16_arGainScale - 1);
+        }
+        else
+        {
+            w32_tmp = 0;
+        }
+
+        /* Note that shift value can be >16 which complicates things for some DSPs */
+        WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
+            ExpandState->w16_arGain, w32_tmp, ExpandState->w16_arGainScale, w16_lag);
+
+        WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_unvoicedVec,
+            ExpandState->pw16_arFilter, UNVOICED_LPC_ORDER + 1, w16_lag);
+
+        WEBRTC_SPL_MEMCPY_W16(ExpandState->pw16_arState,
+            &(pw16_unvoicedVec[w16_lag - UNVOICED_LPC_ORDER]),
+            UNVOICED_LPC_ORDER);
+
+        /*
+         * Voiced + Unvoiced
+         */
+
+        /* For lag = 
+         <=31*fs_mult         => go from 1 to 0 in about 8 ms
+         (>=31..<=63)*fs_mult => go from 1 to 0 in about 16 ms
+         >=64*fs_mult         => go from 1 to 0 in about 32 ms
+         */
+        w16_tmp = (31 - WebRtcSpl_NormW32(ExpandState->w16_maxLag)) - 5; /* getbits(w16_maxLag) -5 */
+        w16_vfractionChange = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(256, w16_tmp);
+        if (ExpandState->w16_stopMuting == 1)
+        {
+            w16_vfractionChange = 0;
+        }
+
+        /* Create combined signal (unmuted) by shifting in more and more of unvoiced part */
+        w16_tmp = 8 - w16_tmp; /* getbits(w16_vfractionChange) */
+        w16_tmp = (ExpandState->w16_currentVFraction - ExpandState->w16_vFraction) >> w16_tmp;
+        w16_tmp = WEBRTC_SPL_MIN(w16_tmp, w16_lag);
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_outData, pw16_voicedVec, pw16_unvoicedVec,
+            &ExpandState->w16_currentVFraction, w16_vfractionChange, w16_tmp);
+
+        if (w16_tmp < w16_lag)
+        {
+            if (w16_vfractionChange != 0)
+            {
+                ExpandState->w16_currentVFraction = ExpandState->w16_vFraction;
+            }
+            w16_tmp2 = 16384 - ExpandState->w16_currentVFraction;
+            WebRtcSpl_ScaleAndAddVectorsWithRound(pw16_voicedVec + w16_tmp,
+                ExpandState->w16_currentVFraction, pw16_unvoicedVec + w16_tmp, w16_tmp2, 14,
+                pw16_outData + w16_tmp, (WebRtc_Word16) (w16_lag - w16_tmp));
+        }
+
+        /* Select muting factor */
+        if (ExpandState->w16_consecExp == 3)
+        {
+            /* 0.95 on 50 samples in NB (0.0010/fs_mult in Q20) */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
+                WebRtcNetEQ_k1049div[fs_mult]);
+        }
+        if (ExpandState->w16_consecExp == 7)
+        {
+            /* 0.90 on 50 samples in NB (0.0020/fs_mult in Q20) */
+            ExpandState->w16_muteSlope = WEBRTC_SPL_MAX(ExpandState->w16_muteSlope,
+                WebRtcNetEQ_k2097div[fs_mult]);
+        }
+
+        /* Mute segment according to slope value */
+        if ((ExpandState->w16_consecExp != 0) || (ExpandState->w16_onset != 1))
+        {
+            /* Mute to the previous level, then continue with the muting */
+            WebRtcSpl_AffineTransformVector(pw16_outData, pw16_outData,
+                ExpandState->w16_expandMuteFactor, 8192, 14, w16_lag);
+
+            if ((ExpandState->w16_stopMuting != 1))
+            {
+                WebRtcNetEQ_MuteSignal(pw16_outData, ExpandState->w16_muteSlope, w16_lag);
+
+                w16_tmp = 16384 - (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_lag,
+                    ExpandState->w16_muteSlope) + 8192) >> 6); /* 20-14 = 6 */
+                w16_tmp = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp,
+                    ExpandState->w16_expandMuteFactor) + 8192) >> 14);
+
+                /* Guard against getting stuck with very small (but sometimes audible) gain */
+                if ((ExpandState->w16_consecExp > 3) && (w16_tmp
+                    >= ExpandState->w16_expandMuteFactor))
+                {
+                    ExpandState->w16_expandMuteFactor = 0;
+                }
+                else
+                {
+                    ExpandState->w16_expandMuteFactor = w16_tmp;
+                }
+            }
+        }
+
+    } /* end if(!BGNonly) */
+
+    /*
+     * BGN
+     */
+
+    if (BGNState->w16_initialized == 1)
+    {
+        /* BGN parameters are initialized; use them */
+
+        WEBRTC_SPL_MEMCPY_W16(pw16_cngVec - BGN_LPC_ORDER,
+            BGNState->pw16_filterState,
+            BGN_LPC_ORDER);
+
+        if (BGNState->w16_scaleShift > 1)
+        {
+            w32_tmp = ((WebRtc_Word32) 1) << (BGNState->w16_scaleShift - 1);
+        }
+        else
+        {
+            w32_tmp = 0;
+        }
+
+        /* Scale random vector to correct energy level */
+        /* Note that shift value can be >16 which complicates things for some DSPs */
+        WebRtcSpl_AffineTransformVector(pw16_scaledRandVec, pw16_randVec,
+            BGNState->w16_scale, w32_tmp, BGNState->w16_scaleShift, w16_lag);
+
+        WebRtcSpl_FilterARFastQ12(pw16_scaledRandVec, pw16_cngVec, BGNState->pw16_filter,
+            BGN_LPC_ORDER + 1, w16_lag);
+
+        WEBRTC_SPL_MEMCPY_W16(BGNState->pw16_filterState,
+            &(pw16_cngVec[w16_lag-BGN_LPC_ORDER]),
+            BGN_LPC_ORDER);
+
+        /* Unmute the insertion of background noise */
+
+        if (bgnMode == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME
+            && BGNState->w16_mutefactor > 0)
+        {
+            /* fade BGN to zero */
+            /* calculate muting slope, approx 2^18/fsHz */
+            WebRtc_Word16 muteFactor;
+            if (fs_mult == 1)
+            {
+                muteFactor = -32;
+            }
+            else if (fs_mult == 2)
+            {
+                muteFactor = -16;
+            }
+            else if (fs_mult == 4)
+            {
+                muteFactor = -8;
+            }
+            else
+            {
+                muteFactor = -5;
+            }
+            /* use UnmuteSignal function with negative slope */
+            WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
+            pw16_cngVec, muteFactor, /* In Q20 */
+            w16_lag);
+        }
+        else if (BGNState->w16_mutefactor < 16384 && !BGNonly)
+        {
+            /* if (w16_mutefactor < 1)  and not BGN only (since then we use no muting) */
+
+            /*
+             * If BGN_OFF, or if BNG_FADE has started fading,
+             * mutefactor should not be increased.
+             */
+            if (ExpandState->w16_stopMuting != 1 && bgnMode != BGN_OFF && !(bgnMode
+                == BGN_FADE && ExpandState->w16_consecExp >= FADE_BGN_TIME))
+            {
+                WebRtcNetEQ_UnmuteSignal(pw16_cngVec, &BGNState->w16_mutefactor, /* In Q14 */
+                pw16_cngVec, ExpandState->w16_muteSlope, /* In Q20 */
+                w16_lag);
+            }
+            else
+            {
+                /* BGN_ON and stop muting, or
+                 * BGN_OFF (mute factor is always 0), or
+                 * BGN_FADE has reached 0 */
+                WebRtcSpl_AffineTransformVector(pw16_cngVec, pw16_cngVec,
+                    BGNState->w16_mutefactor, 8192, 14, w16_lag);
+            }
+        }
+    }
+    else
+    {
+        /* BGN parameters have not been initialized; use zero noise */
+        WebRtcSpl_MemSetW16(pw16_cngVec, 0, w16_lag);
+    }
+
+    if (BGNonly)
+    {
+        /* Copy BGN to outdata */
+        for (i = 0; i < w16_lag; i++)
+        {
+            pw16_outData[i] = pw16_cngVec[i];
+        }
+    }
+    else
+    {
+        /* Add CNG vector to the Voiced + Unvoiced vectors */
+        for (i = 0; i < w16_lag; i++)
+        {
+            pw16_outData[i] = pw16_outData[i] + pw16_cngVec[i];
+        }
+
+        /* increase call number */
+        ExpandState->w16_consecExp = ExpandState->w16_consecExp + 1;
+        if (ExpandState->w16_consecExp < 0) /* Guard against overflow */
+            ExpandState->w16_consecExp = FADE_BGN_TIME; /* "Arbitrary" large num of expands */
+    }
+
+    inst->w16_mode = MODE_EXPAND;
+    *pw16_len = w16_lag;
+
+    /* Update in-call and post-call statistics */
+    if (ExpandState->w16_stopMuting != 1 || BGNonly)
+    {
+        /*
+         * Only do this if StopMuting != 1 or if explicitly BGNonly, otherwise Expand is
+         * called from Merge or Normal and special measures must be taken.
+         */
+        inst->statInst.expandLength += (WebRtc_UWord32) *pw16_len;
+        if (ExpandState->w16_expandMuteFactor == 0 || BGNonly)
+        {
+            /* Only noise expansion */
+            inst->statInst.expandedNoiseSamples += *pw16_len;
+        }
+        else
+        {
+            /* Voice expand (note: not necessarily _voiced_) */
+            inst->statInst.expandedVoiceSamples += *pw16_len;
+        }
+    }
+
+    return 0;
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_GenerateBGN(...)
+ *
+ * This function generates and writes len samples of background noise to the
+ * output vector. The Expand function will be called repeatedly until the
+ * correct number of samples is produced.
+ *
+ * Input:
+ *      - inst          : NetEq instance, i.e. the user that requests more
+ *                        speech/audio data
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - len           : Desired length of produced BGN.
+ *
+ *
+ * Output:
+ *      - pw16_outData  : Pointer to a memory space where the output data
+ *                        should be stored
+ *
+ * Return value         : >=0 - Number of noise samples produced and written
+ *                              to output
+ *                        -1  - Error
+ */
+
+int WebRtcNetEQ_GenerateBGN(DSPInst_t *inst,
+#ifdef SCRATCH
+                            WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                            WebRtc_Word16 *pw16_outData, WebRtc_Word16 len)
+{
+
+    WebRtc_Word16 pos = 0;
+    WebRtc_Word16 tempLen = len;
+
+    while (tempLen > 0)
+    {
+        /* while we still need more noise samples, call Expand to obtain background noise */
+        WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr,
+#endif
+            &pw16_outData[pos], &tempLen, 1 /*BGNonly*/);
+
+        pos += tempLen; /* we got this many samples */
+        tempLen = len - pos; /* this is the number of samples we still need */
+    }
+
+    return pos;
+}
+
+#undef   SCRATCH_PW16_BEST_CORR_INDEX
+#undef   SCRATCH_PW16_BEST_CORR
+#undef   SCRATCH_PW16_BEST_DIST_INDEX
+#undef   SCRATCH_PW16_BEST_DIST
+#undef   SCRATCH_PW16_CORR_VEC
+#undef   SCRATCH_PW16_CORR2
+#undef   SCRATCH_PW32_AUTO_CORR
+#undef   SCRATCH_PW16_RC
+#undef   SCRATCH_PW16_RAND_VEC
+#undef   SCRATCH_NETEQDSP_CORRELATOR
+#undef   SCRATCH_PW16_SCALED_RAND_VEC
+#undef   SCRATCH_PW16_UNVOICED_VEC_SPACE
+
diff --git a/src/modules/audio_coding/neteq/interface/webrtc_neteq.h b/src/modules/audio_coding/neteq/interface/webrtc_neteq.h
new file mode 100644
index 0000000..aacfaeb
--- /dev/null
+++ b/src/modules/audio_coding/neteq/interface/webrtc_neteq.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the main API for NetEQ. Helper macros are located in webrtc_neteq_help_macros.h,
+ * while some internal API functions are found in webrtc_neteq_internal.h.
+ */
+
+#include "typedefs.h"
+
+#ifndef WEBRTC_NETEQ_H
+#define WEBRTC_NETEQ_H
+
+#ifdef __cplusplus 
+extern "C"
+{
+#endif
+
+/**********************************************************
+ * Definitions
+ */
+
+enum WebRtcNetEQDecoder
+{
+    kDecoderReservedStart,
+    kDecoderPCMu,
+    kDecoderPCMa,
+    kDecoderPCMu_2ch,
+    kDecoderPCMa_2ch,
+    kDecoderILBC,
+    kDecoderISAC,
+    kDecoderISACswb,
+    kDecoderPCM16B,
+    kDecoderPCM16Bwb,
+    kDecoderPCM16Bswb32kHz,
+    kDecoderPCM16Bswb48kHz,
+    kDecoderPCM16B_2ch,
+    kDecoderPCM16Bwb_2ch,
+    kDecoderPCM16Bswb32kHz_2ch,
+    kDecoderG722,
+    kDecoderG722_2ch,
+    kDecoderRED,
+    kDecoderAVT,
+    kDecoderCNG,
+    kDecoderArbitrary,
+    kDecoderG729,
+    kDecoderG729_1,
+    kDecoderG726_16,
+    kDecoderG726_24,
+    kDecoderG726_32,
+    kDecoderG726_40,
+    kDecoderG722_1_16,
+    kDecoderG722_1_24,
+    kDecoderG722_1_32,
+    kDecoderG722_1C_24,
+    kDecoderG722_1C_32,
+    kDecoderG722_1C_48,
+    kDecoderSPEEX_8,
+    kDecoderSPEEX_16,
+    kDecoderCELT_32,
+    kDecoderCELT_32_2ch,
+    kDecoderGSMFR,
+    kDecoderAMR,
+    kDecoderAMRWB,
+    kDecoderReservedEnd
+};
+
+enum WebRtcNetEQNetworkType
+{
+    kUDPNormal,
+    kUDPVideoSync,
+    kTCPNormal,
+    kTCPLargeJitter,
+    kTCPXLargeJitter
+};
+
+enum WebRtcNetEQOutputType
+{
+    kOutputNormal,
+    kOutputPLC,
+    kOutputCNG,
+    kOutputPLCtoCNG,
+    kOutputVADPassive
+};
+
+enum WebRtcNetEQPlayoutMode
+{
+    kPlayoutOn, kPlayoutOff, kPlayoutFax, kPlayoutStreaming
+};
+
+/* Available modes for background noise (inserted after long expands) */
+enum WebRtcNetEQBGNMode
+{
+    kBGNOn, /* default "normal" behavior with eternal noise */
+    kBGNFade, /* noise fades to zero after some time */
+    kBGNOff
+/* background noise is always zero */
+};
+
+/*************************************************
+ * Definitions of decoder calls and the default 
+ * API function calls for each codec
+ */
+
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecode)(void* state, WebRtc_Word16* encoded,
+                                                WebRtc_Word16 len, WebRtc_Word16* decoded,
+                                                WebRtc_Word16* speechType);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodePLC)(void* state, WebRtc_Word16* decoded,
+                                                   WebRtc_Word16 frames);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncDecodeInit)(void* state);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncAddLatePkt)(void* state, WebRtc_Word16* encoded,
+                                                    WebRtc_Word16 len);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetMDinfo)(void* state);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetPitchInfo)(void* state, WebRtc_Word16* encoded,
+                                                      WebRtc_Word16* length);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncUpdBWEst)(void* state, const WebRtc_UWord16 *encoded,
+                                                  WebRtc_Word32 packet_size,
+                                                  WebRtc_UWord16 rtp_seq_number,
+                                                  WebRtc_UWord32 send_ts,
+                                                  WebRtc_UWord32 arr_ts);
+typedef WebRtc_Word16 (*WebRtcNetEQ_FuncGetErrorCode)(void* state);
+
+/**********************************************************
+ * Structures
+ */
+
+typedef struct
+{
+    enum WebRtcNetEQDecoder codec;
+    WebRtc_Word16 payloadType;
+    WebRtcNetEQ_FuncDecode funcDecode;
+    WebRtcNetEQ_FuncDecode funcDecodeRCU;
+    WebRtcNetEQ_FuncDecodePLC funcDecodePLC;
+    WebRtcNetEQ_FuncDecodeInit funcDecodeInit;
+    WebRtcNetEQ_FuncAddLatePkt funcAddLatePkt;
+    WebRtcNetEQ_FuncGetMDinfo funcGetMDinfo;
+    WebRtcNetEQ_FuncGetPitchInfo funcGetPitch;
+    WebRtcNetEQ_FuncUpdBWEst funcUpdBWEst;
+    WebRtcNetEQ_FuncGetErrorCode funcGetErrorCode;
+    void* codec_state;
+    WebRtc_UWord16 codec_fs;
+} WebRtcNetEQ_CodecDef;
+
+typedef struct
+{
+    WebRtc_UWord16 fraction_lost;
+    WebRtc_UWord32 cum_lost;
+    WebRtc_UWord32 ext_max;
+    WebRtc_UWord32 jitter;
+} WebRtcNetEQ_RTCPStat;
+
+/**********************************************************
+ * NETEQ Functions
+ */
+
+/* Info functions */
+
+#define WEBRTC_NETEQ_MAX_ERROR_NAME 40
+int WebRtcNetEQ_GetVersion(char *version);
+int WebRtcNetEQ_GetErrorCode(void *inst);
+int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen);
+
+/* Instance memory assign functions */
+
+int WebRtcNetEQ_AssignSize(int *sizeinbytes);
+int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr);
+int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
+                                         int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
+                                         int *MaxNoOfPackets, int *sizeinbytes);
+int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
+                             int sizeinbytes);
+
+/* Init functions */
+
+int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs);
+int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon);
+int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs);
+int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode);
+int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode);
+int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode);
+
+/* Codec Database functions */
+
+int WebRtcNetEQ_CodecDbReset(void *inst);
+int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst);
+int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec);
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
+                                   WebRtc_Word16 *MaxEntries);
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+                                    enum WebRtcNetEQDecoder *codec);
+
+/* Real-time functions */
+
+int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
+                      WebRtc_UWord32 uw32_timeRec);
+int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len);
+int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
+int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst);
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp);
+int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType);
+
+/* VQmon related functions */
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
+                                      WebRtc_UWord16 *concealedVoiceDurationMs,
+                                      WebRtc_UWord8 *concealedVoiceFlags);
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
+                                      WebRtc_UWord8 *adaptationRate);
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
+                                     WebRtc_UWord16 *maxDelayMs);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h b/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
new file mode 100644
index 0000000..c6f19bb
--- /dev/null
+++ b/src/modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h
@@ -0,0 +1,387 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains some helper macros that can be used when loading the
+ * NetEQ codec database.
+ */
+
+#ifndef WEBRTC_NETEQ_HELP_MACROS_H
+#define WEBRTC_NETEQ_HELP_MACROS_H
+
+#ifndef NULL
+#define NULL 0
+#endif
+
+/**********************************************************
+ * Help macros for NetEQ initialization
+ */
+
+#define SET_CODEC_PAR(inst,decoder,pt,state,fs) \
+                    inst.codec=decoder; \
+                    inst.payloadType=pt; \
+                    inst.codec_state=state; \
+                    inst.codec_fs=fs;
+
+#define SET_PCMU_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeU; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCMA_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG711_DecodeA; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_ILBC_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIlbcfix_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcIlbcfix_NetEqPlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIlbcfix_Decoderinit30Ms; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_ISAC_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
+                    inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
+
+#define SET_ISACfix_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsacfix_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsacfix_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsacfix_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsacfix_GetErrorCode;
+
+#define SET_ISACSWB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcIsac_Decode; \
+                    inst.funcDecodeRCU=(WebRtcNetEQ_FuncDecode)WebRtcIsac_DecodeRcu; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcIsac_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcIsac_UpdateBwEstimate; \
+                    inst.funcGetErrorCode=(WebRtcNetEQ_FuncGetErrorCode)WebRtcIsac_GetErrorCode;
+
+#define SET_G729_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG729_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG729_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG729_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G729_1_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7291_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7291_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=(WebRtcNetEQ_FuncUpdBWEst)WebRtcG7291_DecodeBwe; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_WB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_PCM16B_SWB32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+
+#define SET_PCM16B_SWB48_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcPcm16b_DecodeW16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG722_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG722_DecoderInit;\
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_16_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc16; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit16; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc24; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221_Decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221_DecodePlc32; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221_DecoderInit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc24; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc32; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G722_1C_48_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG7221C_Decode48; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcG7221C_DecodePlc48; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG7221C_DecoderInit48; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AMR_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmr_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmr_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmr_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AMRWB_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcAmrWb_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcAmrWb_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcAmrWb_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_GSMFR_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcGSMFR_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcGSMFR_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcGSMFR_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_16_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode16; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit16; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_24_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode24; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit24; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_32_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode32; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit32; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_G726_40_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcG726_decode40; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcG726_decoderinit40; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_SPEEX_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcSpeex_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=(WebRtcNetEQ_FuncDecodePLC)WebRtcSpeex_DecodePlc; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcSpeex_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CELT_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_Decode; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInit; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CELTSLAVE_FUNCTIONS(inst) \
+                    inst.funcDecode=(WebRtcNetEQ_FuncDecode)WebRtcCelt_DecodeSlave; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=(WebRtcNetEQ_FuncDecodeInit)WebRtcCelt_DecoderInitSlave; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_RED_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_AVT_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#define SET_CNG_FUNCTIONS(inst) \
+                    inst.funcDecode=NULL; \
+                    inst.funcDecodeRCU=NULL; \
+                    inst.funcDecodePLC=NULL; \
+                    inst.funcDecodeInit=NULL; \
+                    inst.funcAddLatePkt=NULL; \
+                    inst.funcGetMDinfo=NULL; \
+                    inst.funcGetPitch=NULL; \
+                    inst.funcUpdBWEst=NULL; \
+                    inst.funcGetErrorCode=NULL;
+
+#endif /* WEBRTC_NETEQ_HELP_MACROS_H */
+
diff --git a/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h b/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
new file mode 100644
index 0000000..8ca6181
--- /dev/null
+++ b/src/modules/audio_coding/neteq/interface/webrtc_neteq_internal.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the internal API functions.
+ */
+
+#include "typedefs.h"
+
+#ifndef WEBRTC_NETEQ_INTERNAL_H
+#define WEBRTC_NETEQ_INTERNAL_H
+
+#ifdef __cplusplus 
+extern "C"
+{
+#endif
+
+typedef struct
+{
+    WebRtc_UWord8 payloadType;
+    WebRtc_UWord16 sequenceNumber;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_UWord32 SSRC;
+    WebRtc_UWord8 markerBit;
+} WebRtcNetEQ_RTPInfo;
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInRTPStruct(...)
+ *
+ * Alternative RecIn function, used when the RTP data has already been
+ * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
+ *
+ * Input:
+ *		- inst	            : NetEQ instance
+ *		- rtpInfo		    : Pointer to RTP info
+ *		- payloadPtr        : Pointer to the RTP payload (first byte after header)
+ *      - payloadLenBytes   : Length (in bytes) of the payload in payloadPtr
+ *      - timeRec           : Receive time (in timestamps of the used codec)
+ *
+ * Return value			    :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
+                               const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
+                               WebRtc_UWord32 timeRec);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetMasterSlaveInfoSize(...)
+ *
+ * Get size in bytes for master/slave struct msInfo used in 
+ * WebRtcNetEQ_RecOutMasterSlave.
+ *
+ * Return value			    :  Struct size in bytes
+ * 
+ */
+
+int WebRtcNetEQ_GetMasterSlaveInfoSize();
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutMasterSlave(...)
+ *
+ * RecOut function for running several NetEQ instances in master/slave mode.
+ * One master can be used to control several slaves. 
+ * The MasterSlaveInfo struct must be allocated outside NetEQ.
+ * Use function WebRtcNetEQ_GetMasterSlaveInfoSize to get the size needed.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *      - isMaster      : Non-zero indicates that this is the master channel
+ *      - msInfo        : (slave only) Information from master
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *      - msInfo        : (master only) Information to slave(s)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
+                                  WebRtc_Word16 *pw16_len, void *msInfo,
+                                  WebRtc_Word16 isMaster);
+
+typedef struct
+{
+    uint16_t currentBufferSize;         /* Current jitter buffer size in ms. */
+    uint16_t preferredBufferSize;       /* Preferred buffer size in ms. */
+    uint16_t jitterPeaksFound;          /* 1 if adding extra delay due to peaky
+                                         * jitter; 0 otherwise. */
+    uint16_t currentPacketLossRate;     /* Loss rate (network + late) (Q14). */
+    uint16_t currentDiscardRate;        /* Late loss rate (Q14). */
+    uint16_t currentExpandRate;         /* Fraction (of original stream) of
+                                         * synthesized speech inserted through
+                                         * expansion (in Q14). */
+    uint16_t currentPreemptiveRate;     /* Fraction of data inserted through
+                                         * pre-emptive expansion (in Q14). */
+    uint16_t currentAccelerateRate;     /* Fraction of data removed through
+                                         * acceleration (in Q14). */
+    int32_t clockDriftPPM;              /* Average clock-drift in parts-per-
+                                         * million (positive or negative). */
+} WebRtcNetEQ_NetworkStatistics;
+
+/*
+ * Get the "in-call" statistics from NetEQ.
+ * The statistics are reset after the query.
+ */
+int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats);
+
+/*
+ * Get the raw waiting times for decoded frames. The function writes the last
+ * recorded waiting times (from frame arrival to frame decoding) to the memory
+ * pointed to by waitingTimeMs. The number of elements written is in the return
+ * value. No more than maxLength elements are written. Statistics are reset on
+ * each query.
+ */
+int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
+                                        int max_length,
+                                        int* waiting_times_ms);
+
+/***********************************************/
+/* Functions for post-decode VAD functionality */
+/***********************************************/
+
+/* NetEQ must be compiled with the flag NETEQ_VAD enabled for these functions to work. */
+
+/*
+ * VAD function pointer types
+ *
+ * These function pointers match the definitions of webrtc VAD functions WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in webrtc_vad.h.
+ */
+typedef int (*WebRtcNetEQ_VADInitFunction)(void *VAD_inst);
+typedef int (*WebRtcNetEQ_VADSetmodeFunction)(void *VAD_inst, int mode);
+typedef int (*WebRtcNetEQ_VADFunction)(void *VAD_inst, int fs,
+    WebRtc_Word16 *frame, int frameLen);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADInstance(...)
+ *
+ * Provide a pointer to an allocated VAD instance. If function is never 
+ * called or it is called with NULL pointer as VAD_inst, the post-decode
+ * VAD functionality is disabled. Also provide pointers to init, setmode
+ * and VAD functions. These are typically pointers to WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
+ * interface file webrtc_vad.h.
+ *
+ * Input:
+ *      - NetEQ_inst        : NetEQ instance
+ *		- VADinst		    : VAD instance
+ *		- initFunction	    : Pointer to VAD init function
+ *		- setmodeFunction   : Pointer to VAD setmode function
+ *		- VADfunction	    : Pointer to VAD function
+ *
+ * Output:
+ *		- NetEQ_inst	    : Updated NetEQ instance
+ *
+ * Return value			    :  0 - Ok
+ *						      -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
+                               WebRtcNetEQ_VADInitFunction initFunction,
+                               WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
+                               WebRtcNetEQ_VADFunction VADFunction);
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADMode(...)
+ *
+ * Pass an aggressiveness mode parameter to the post-decode VAD instance.
+ * If this function is never called, mode 0 (quality mode) is used as default.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- mode  		: mode parameter (same range as WebRtc VAD mode)
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADMode(void *NetEQ_inst, int mode);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutNoDecode(...)
+ *
+ * Special RecOut that does not do any decoding.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len);
+
+/****************************************************************************
+ * WebRtcNetEQ_FlushBuffers(...)
+ *
+ * Flush packet and speech buffers. Does not reset codec database or 
+ * jitter statistics.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_FlushBuffers(void *inst);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/src/modules/audio_coding/neteq/mcu.h b/src/modules/audio_coding/neteq/mcu.h
new file mode 100644
index 0000000..499684a
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mcu.h
@@ -0,0 +1,284 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * MCU struct and functions related to the MCU side operations.
+ */
+
+#ifndef MCU_H
+#define MCU_H
+
+#include "typedefs.h"
+
+#include "codec_db.h"
+#include "rtcp.h"
+#include "packet_buffer.h"
+#include "buffer_stats.h"
+#include "neteq_statistics.h"
+
+#ifdef NETEQ_ATEVENT_DECODE
+#include "dtmf_buffer.h"
+#endif
+
+#define MAX_ONE_DESC 5 /* cannot do more than this many consecutive one-descriptor decodings */
+#define MAX_LOSS_REPORT_PERIOD 60   /* number of seconds between auto-reset */
+
+enum TsScaling
+{
+    kTSnoScaling = 0,
+    kTSscalingTwo,
+    kTSscalingTwoThirds,
+    kTSscalingFourThirds
+};
+
+enum { kLenWaitingTimes = 100 };
+
+typedef struct
+{
+
+    WebRtc_Word16 current_Codec;
+    WebRtc_Word16 current_Payload;
+    WebRtc_UWord32 timeStamp; /* Next timestamp that should be played */
+    WebRtc_Word16 millisecondsPerCall;
+    WebRtc_UWord16 timestampsPerCall; /* Output chunk size */
+    WebRtc_UWord16 fs;
+    WebRtc_UWord32 ssrc; /* Current ssrc */
+    WebRtc_Word16 new_codec;
+    WebRtc_Word16 first_packet;
+
+    /* MCU/DSP Communication layer */
+    WebRtc_Word16 *pw16_readAddress;
+    WebRtc_Word16 *pw16_writeAddress;
+    void *main_inst;
+
+    CodecDbInst_t codec_DB_inst; /* Information about all the codecs, i.e. which
+     functions to use and which codpoints that
+     have been assigned */
+    SplitInfo_t PayloadSplit_inst; /* Information about how the current codec
+     payload should be splitted */
+    WebRtcNetEQ_RTCP_t RTCP_inst; /* RTCP statistics */
+    PacketBuf_t PacketBuffer_inst; /* The packet buffer */
+    BufstatsInst_t BufferStat_inst; /* Statistics that are used to make decision
+     for what the DSP should perform */
+#ifdef NETEQ_ATEVENT_DECODE
+    dtmf_inst_t DTMF_inst;
+#endif
+    int NoOfExpandCalls;
+    WebRtc_Word16 AVT_PlayoutOn;
+    enum WebRtcNetEQPlayoutMode NetEqPlayoutMode;
+
+    WebRtc_Word16 one_desc; /* Number of times running on one desc */
+
+    WebRtc_UWord32 lostTS; /* Number of timestamps lost */
+    WebRtc_UWord32 lastReportTS; /* Timestamp elapsed since last report was given */
+
+    int waiting_times[kLenWaitingTimes];  /* Waiting time statistics storage. */
+    int len_waiting_times;
+    int next_waiting_time_index;
+
+    WebRtc_UWord32 externalTS;
+    WebRtc_UWord32 internalTS;
+    WebRtc_Word16 TSscalingInitialized;
+    enum TsScaling scalingFactor;
+
+#ifdef NETEQ_STEREO
+    int usingStereo;
+#endif
+
+} MCUInst_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_McuReset(...)
+ *
+ * Reset the MCU instance.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuReset(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetMcuInCallStats(...)
+ *
+ * Reset MCU-side statistics variables for the in-call statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetWaitingTimeStats(...)
+ *
+ * Reset waiting-time statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance.
+ *
+ * Return value         : n/a
+ */
+void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_LogWaitingTime(...)
+ *
+ * Log waiting-time to the statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance.
+ *      - waiting_time  : Waiting time in "RecOut calls" (i.e., 1 call = 10 ms).
+ *
+ * Return value         : n/a
+ */
+void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time);
+
+/****************************************************************************
+ * WebRtcNetEQ_ResetMcuJitterStat(...)
+ *
+ * Reset MCU-side statistics variables for the post-call statistics.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_McuAddressInit(...)
+ *
+ * Initializes MCU with read address and write address.
+ *
+ * Input:
+ *      - inst              : MCU instance
+ *      - Data2McuAddress   : Pointer to MCU address
+ *      - Data2DspAddress   : Pointer to DSP address
+ *      - main_inst         : Pointer to NetEQ main instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
+                               void * Data2DspAddress, void *main_inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_McuSetFs(...)
+ *
+ * Initializes MCU with read address and write address.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *      - fs_hz         : Sample rate in Hz -- 8000, 16000, 32000, (48000)
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs_hz);
+
+/****************************************************************************
+ * WebRtcNetEQ_SignalMcu(...)
+ *
+ * Signal the MCU that data is available and ask for a RecOut decision.
+ *
+ * Input:
+ *      - inst          : MCU instance
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+int WebRtcNetEQ_SignalMcu(MCUInst_t *inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInInternal(...)
+ *
+ * This function inserts a packet into the jitter buffer.
+ *
+ * Input:
+ *		- MCU_inst		: MCU instance
+ *		- RTPpacket	    : The RTP packet, parsed into NetEQ's internal RTP struct
+ *		- uw32_timeRec	: Time stamp for the arrival of the packet (not RTP timestamp)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacket,
+                              WebRtc_UWord32 uw32_timeRec);
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInInternal(...)
+ *
+ * Split the packet according to split_inst and inserts the parts into
+ * Buffer_inst.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - RTPpacket     : The RTP packet, parsed into NetEQ's internal RTP struct
+ *      - uw32_timeRec  : Time stamp for the arrival of the packet (not RTP timestamp)
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
+                                      SplitInfo_t *split_inst, WebRtc_Word16 *flushed);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetTimestampScaling(...)
+ *
+ * Update information about timestamp scaling for a payload type
+ * in MCU_inst->scalingFactor.
+ *
+ * Input:
+ *      - MCU_inst          : MCU instance
+ *      - rtpPayloadType    : RTP payload number
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+
+int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType);
+
+/****************************************************************************
+ * WebRtcNetEQ_ScaleTimestampExternalToInternal(...)
+ *
+ * Convert from external to internal timestamp using current scaling info.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - externalTS    : External timestamp
+ *
+ * Return value         : Internal timestamp
+ */
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 externalTS);
+
+/****************************************************************************
+ * WebRtcNetEQ_ScaleTimestampInternalToExternal(...)
+ *
+ * Convert from external to internal timestamp using current scaling info.
+ *
+ * Input:
+ *      - MCU_inst      : MCU instance
+ *      - externalTS    : Internal timestamp
+ *
+ * Return value         : External timestamp
+ */
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 internalTS);
+#endif
diff --git a/src/modules/audio_coding/neteq/mcu_address_init.c b/src/modules/audio_coding/neteq/mcu_address_init.c
new file mode 100644
index 0000000..0306a85
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mcu_address_init.c
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "mcu.h"
+
+#include <string.h> /* to define NULL */
+
+/*
+ * Initializes MCU with read address and write address
+ */
+int WebRtcNetEQ_McuAddressInit(MCUInst_t *inst, void * Data2McuAddress,
+                               void * Data2DspAddress, void *main_inst)
+{
+
+    inst->pw16_readAddress = (WebRtc_Word16*) Data2McuAddress;
+    inst->pw16_writeAddress = (WebRtc_Word16*) Data2DspAddress;
+    inst->main_inst = main_inst;
+
+    inst->millisecondsPerCall = 10;
+
+    /* Do expansions in the beginning */
+    if (inst->pw16_writeAddress != NULL) inst->pw16_writeAddress[0] = DSP_INSTR_EXPAND;
+
+    return (0);
+}
+
diff --git a/src/modules/audio_coding/neteq/mcu_dsp_common.c b/src/modules/audio_coding/neteq/mcu_dsp_common.c
new file mode 100644
index 0000000..13025d4
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mcu_dsp_common.c
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Communication between MCU and DSP sides.
+ */
+
+#include "mcu_dsp_common.h"
+
+#include <string.h>
+
+/* Initialize instances with read and write address */
+int WebRtcNetEQ_DSPinit(MainInst_t *inst)
+{
+    int res = 0;
+
+    res |= WebRtcNetEQ_AddressInit(&inst->DSPinst, NULL, NULL, inst);
+    res |= WebRtcNetEQ_McuAddressInit(&inst->MCUinst, NULL, NULL, inst);
+
+    return res;
+
+}
+
+/* The DSP side will call this function to interrupt the MCU side */
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem)
+{
+    inst->MCUinst.pw16_readAddress = pw16_shared_mem;
+    inst->MCUinst.pw16_writeAddress = pw16_shared_mem;
+    return WebRtcNetEQ_SignalMcu(&inst->MCUinst);
+}
diff --git a/src/modules/audio_coding/neteq/mcu_dsp_common.h b/src/modules/audio_coding/neteq/mcu_dsp_common.h
new file mode 100644
index 0000000..e3f4213
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mcu_dsp_common.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * The main NetEQ instance, which is where the DSP and MCU sides join.
+ */
+
+#ifndef MCU_DSP_COMMON_H
+#define MCU_DSP_COMMON_H
+
+#include "typedefs.h"
+
+#include "dsp.h"
+#include "mcu.h"
+
+/* Define size of shared memory area. */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define SHARED_MEM_SIZE (6*640)
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define SHARED_MEM_SIZE (4*640)
+#elif defined(NETEQ_WIDEBAND)
+    #define SHARED_MEM_SIZE (2*640)
+#else
+    #define SHARED_MEM_SIZE 640
+#endif
+
+/* Struct to hold the NetEQ instance */
+typedef struct
+{
+    DSPInst_t DSPinst; /* DSP part of the NetEQ instance */
+    MCUInst_t MCUinst; /* MCU part of the NetEQ instance */
+    WebRtc_Word16 ErrorCode; /* Store last error code */
+#ifdef NETEQ_STEREO
+    WebRtc_Word16 masterSlave; /* 0 = not set, 1 = master, 2 = slave */
+#endif /* NETEQ_STEREO */
+} MainInst_t;
+
+/* Struct used for communication between DSP and MCU sides of NetEQ */
+typedef struct
+{
+    WebRtc_UWord32 playedOutTS; /* Timestamp position at end of DSP data */
+    WebRtc_UWord16 samplesLeft; /* Number of samples stored */
+    WebRtc_Word16 MD; /* Multiple description codec information */
+    WebRtc_Word16 lastMode; /* Latest mode of NetEQ playout */
+    WebRtc_Word16 frameLen; /* Frame length of previously decoded packet */
+} DSP2MCU_info_t;
+
+/* Initialize instances with read and write address */
+int WebRtcNetEQ_DSPinit(MainInst_t *inst);
+
+/* The DSP side will call this function to interrupt the MCU side */
+int WebRtcNetEQ_DSP2MCUinterrupt(MainInst_t *inst, WebRtc_Word16 *pw16_shared_mem);
+
+#endif
diff --git a/src/modules/audio_coding/neteq/mcu_reset.c b/src/modules/audio_coding/neteq/mcu_reset.c
new file mode 100644
index 0000000..3aae4ce
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mcu_reset.c
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Reset MCU side data.
+ */
+
+#include "mcu.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "automode.h"
+
+int WebRtcNetEQ_McuReset(MCUInst_t *inst)
+{
+
+#ifdef NETEQ_ATEVENT_DECODE
+    int ok;
+#endif
+
+    /* MCU/DSP Communication layer */
+    inst->pw16_readAddress = NULL;
+    inst->pw16_writeAddress = NULL;
+    inst->main_inst = NULL;
+    inst->one_desc = 0;
+    inst->BufferStat_inst.Automode_inst.extraDelayMs = 0;
+    inst->NetEqPlayoutMode = kPlayoutOn;
+
+    WebRtcNetEQ_DbReset(&inst->codec_DB_inst);
+    memset(&inst->PayloadSplit_inst, 0, sizeof(SplitInfo_t));
+
+    /* Clear the Packet buffer and the pointer to memory storage */
+    WebRtcNetEQ_PacketBufferFlush(&inst->PacketBuffer_inst);
+    inst->PacketBuffer_inst.memorySizeW16 = 0;
+    inst->PacketBuffer_inst.maxInsertPositions = 0;
+
+    /* Clear the decision and delay history */
+    memset(&inst->BufferStat_inst, 0, sizeof(BufstatsInst_t));
+#ifdef NETEQ_ATEVENT_DECODE
+    ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
+    if (ok != 0)
+    {
+        return ok;
+    }
+#endif
+    inst->NoOfExpandCalls = 0;
+    inst->current_Codec = -1;
+    inst->current_Payload = -1;
+
+    inst->millisecondsPerCall = 10;
+    inst->timestampsPerCall = inst->millisecondsPerCall * 8;
+    inst->fs = 8000;
+    inst->first_packet = 1;
+
+    WebRtcNetEQ_ResetMcuInCallStats(inst);
+
+    WebRtcNetEQ_ResetWaitingTimeStats(inst);
+
+    WebRtcNetEQ_ResetMcuJitterStat(inst);
+
+    WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+        inst->PacketBuffer_inst.maxInsertPositions);
+
+    return 0;
+}
+
+/*
+ * Reset MCU-side statistics variables for the in-call statistics.
+ */
+
+int WebRtcNetEQ_ResetMcuInCallStats(MCUInst_t *inst)
+{
+    inst->lostTS = 0;
+    inst->lastReportTS = 0;
+    inst->PacketBuffer_inst.discardedPackets = 0;
+
+    return 0;
+}
+
+/*
+ * Reset waiting-time statistics.
+ */
+
+void WebRtcNetEQ_ResetWaitingTimeStats(MCUInst_t *inst) {
+  memset(inst->waiting_times, 0,
+         kLenWaitingTimes * sizeof(inst->waiting_times[0]));
+  inst->len_waiting_times = 0;
+  inst->next_waiting_time_index = 0;
+}
+
+/*
+ * Store waiting-time in the statistics.
+ */
+
+void WebRtcNetEQ_StoreWaitingTime(MCUInst_t *inst, int waiting_time) {
+  assert(inst->next_waiting_time_index < kLenWaitingTimes);
+  inst->waiting_times[inst->next_waiting_time_index] = waiting_time;
+  inst->next_waiting_time_index++;
+  if (inst->next_waiting_time_index >= kLenWaitingTimes) {
+    inst->next_waiting_time_index = 0;
+  }
+  if (inst->len_waiting_times < kLenWaitingTimes) {
+    inst->len_waiting_times++;
+  }
+}
+
+/*
+ * Reset all MCU-side statistics variables for the post-call statistics.
+ */
+
+int WebRtcNetEQ_ResetMcuJitterStat(MCUInst_t *inst)
+{
+    inst->BufferStat_inst.Automode_inst.countIAT500ms = 0;
+    inst->BufferStat_inst.Automode_inst.countIAT1000ms = 0;
+    inst->BufferStat_inst.Automode_inst.countIAT2000ms = 0;
+    inst->BufferStat_inst.Automode_inst.longestIATms = 0;
+
+    return 0;
+}
+
diff --git a/src/modules/audio_coding/neteq/merge.c b/src/modules/audio_coding/neteq/merge.c
new file mode 100644
index 0000000..5f020a9
--- /dev/null
+++ b/src/modules/audio_coding/neteq/merge.c
@@ -0,0 +1,564 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This is the function to merge a new packet with expanded data after a packet loss.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+/****************************************************************************
+ * WebRtcNetEQ_Merge(...)
+ *
+ * This function...
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *      - decoded       : Pointer to new decoded speech.
+ *      - len           : Number of samples in pw16_decoded.
+ *
+ *
+ * Output:
+ *      - inst          : Updated user information
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - pw16_len      : Number of samples written to pw16_outData
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+/* Scratch usage:
+
+ Type           Name                    size            startpos        endpos
+ WebRtc_Word16  pw16_expanded           210*fs/8000     0               209*fs/8000
+ WebRtc_Word16  pw16_expandedLB         100             210*fs/8000     99+210*fs/8000
+ WebRtc_Word16  pw16_decodedLB          40              100+210*fs/8000 139+210*fs/8000
+ WebRtc_Word32  pw32_corr               2*60            140+210*fs/8000 260+210*fs/8000
+ WebRtc_Word16  pw16_corrVec            68              210*fs/8000     67+210*fs/8000
+
+ [gap in scratch vector]
+
+ func           WebRtcNetEQ_Expand      40+370*fs/8000  126*fs/8000     39+496*fs/8000
+
+ Total:  40+496*fs/8000
+ */
+
+#define SCRATCH_pw16_expanded          0
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        1260
+#define SCRATCH_pw16_decodedLB         1360
+#define SCRATCH_pw32_corr              1400
+#define SCRATCH_pw16_corrVec           1260
+#define SCRATCH_NETEQ_EXPAND            756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        840
+#define SCRATCH_pw16_decodedLB         940
+#define SCRATCH_pw32_corr              980
+#define SCRATCH_pw16_corrVec           840
+#define SCRATCH_NETEQ_EXPAND            504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SCRATCH_pw16_expandedLB        420
+#define SCRATCH_pw16_decodedLB         520
+#define SCRATCH_pw32_corr              560
+#define SCRATCH_pw16_corrVec           420
+#define SCRATCH_NETEQ_EXPAND            252
+#else    /* NB */
+#define SCRATCH_pw16_expandedLB        210
+#define SCRATCH_pw16_decodedLB         310
+#define SCRATCH_pw32_corr              350
+#define SCRATCH_pw16_corrVec           210
+#define SCRATCH_NETEQ_EXPAND            126
+#endif
+
+int WebRtcNetEQ_Merge(DSPInst_t *inst,
+#ifdef SCRATCH
+                      WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                      WebRtc_Word16 *pw16_decoded, int len, WebRtc_Word16 *pw16_outData,
+                      WebRtc_Word16 *pw16_len)
+{
+
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 fs_shift;
+    WebRtc_Word32 w32_En_new_frame, w32_En_old_frame;
+    WebRtc_Word16 w16_expmax, w16_newmax;
+    WebRtc_Word16 w16_tmp, w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+#ifdef SCRATCH
+    WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_pw16_expanded;
+    WebRtc_Word16 *pw16_expandedLB = pw16_scratchPtr + SCRATCH_pw16_expandedLB;
+    WebRtc_Word16 *pw16_decodedLB = pw16_scratchPtr + SCRATCH_pw16_decodedLB;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_pw32_corr);
+    WebRtc_Word16 *pw16_corrVec = pw16_scratchPtr + SCRATCH_pw16_corrVec;
+#else
+    WebRtc_Word16 pw16_expanded[(125+80+5)*FSMULT];
+    WebRtc_Word16 pw16_expandedLB[100];
+    WebRtc_Word16 pw16_decodedLB[40];
+    WebRtc_Word32 pw32_corr[60];
+    WebRtc_Word16 pw16_corrVec[4+60+4];
+#endif
+    WebRtc_Word16 *pw16_corr = &pw16_corrVec[4];
+    WebRtc_Word16 w16_stopPos = 0, w16_bestIndex, w16_interpLen;
+    WebRtc_Word16 w16_bestVal; /* bestVal is dummy */
+    WebRtc_Word16 w16_startfact, w16_inc;
+    WebRtc_Word16 w16_expandedLen;
+    WebRtc_Word16 w16_startPos;
+    WebRtc_Word16 w16_expLen, w16_newLen = 0;
+    WebRtc_Word16 *pw16_decodedOut;
+    WebRtc_Word16 w16_muted;
+
+    int w16_decodedLen = len;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+    fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
+
+    /*************************************
+     * Generate data to merge with
+     *************************************/
+    /*
+     * Check how much data that is left since earlier
+     * (at least there should be the overlap)...
+     */
+    w16_startPos = inst->endPosition - inst->curPosition;
+    /* Get one extra expansion to merge and overlap with */
+    inst->ExpandInst.w16_stopMuting = 1;
+    inst->ExpandInst.w16_lagsDirection = 1; /* make sure we get the "optimal" lag */
+    inst->ExpandInst.w16_lagsPosition = -1; /* out of the 3 possible ones */
+    w16_expandedLen = 0; /* Does not fill any function currently */
+
+    if (w16_startPos >= 210 * FSMULT)
+    {
+        /*
+         * The number of samples available in the sync buffer is more than what fits in
+         * pw16_expanded.Keep the first 210*FSMULT samples, but shift them towards the end of
+         * the buffer. This is ok, since all of the buffer will be expand data anyway, so as
+         * long as the beginning is left untouched, we're fine.
+         */
+
+        w16_tmp = w16_startPos - 210 * FSMULT; /* length difference */
+
+        WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[inst->curPosition+w16_tmp] ,
+                               &inst->speechBuffer[inst->curPosition], 210*FSMULT);
+
+        inst->curPosition += w16_tmp; /* move start position of sync buffer accordingly */
+        w16_startPos = 210 * FSMULT; /* this is the truncated length */
+    }
+
+    WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+        pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+        pw16_expanded, /* let Expand write to beginning of pw16_expanded to avoid overflow */
+        &w16_newLen, 0);
+
+    /*
+     * Now shift the data in pw16_expanded to where it belongs.
+     * Truncate all that ends up outside the vector.
+     */
+
+    WEBRTC_SPL_MEMMOVE_W16(&pw16_expanded[w16_startPos], pw16_expanded,
+                           WEBRTC_SPL_MIN(w16_newLen,
+                               WEBRTC_SPL_MAX(210*FSMULT - w16_startPos, 0) ) );
+
+    inst->ExpandInst.w16_stopMuting = 0;
+
+    /* Copy what is left since earlier into the expanded vector */
+
+    WEBRTC_SPL_MEMCPY_W16(pw16_expanded, &inst->speechBuffer[inst->curPosition], w16_startPos);
+
+    /*
+     * Do "ugly" copy and paste from the expanded in order to generate more data
+     * to correlate (but not interpolate) with.
+     */
+    w16_expandedLen = (120 + 80 + 2) * fs_mult;
+    w16_expLen = w16_startPos + w16_newLen;
+
+    if (w16_expLen < w16_expandedLen)
+    {
+        while ((w16_expLen + w16_newLen) < w16_expandedLen)
+        {
+            WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
+                w16_newLen);
+            w16_expLen += w16_newLen;
+        }
+
+        /* Copy last part (fraction of a whole expansion) */
+
+        WEBRTC_SPL_MEMCPY_W16(&pw16_expanded[w16_expLen], &pw16_expanded[w16_startPos],
+                              (w16_expandedLen-w16_expLen));
+    }
+    w16_expLen = w16_expandedLen;
+
+    /* Adjust muting factor (main muting factor times expand muting factor) */
+    inst->w16_muteFactor
+        = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+            inst->ExpandInst.w16_expandMuteFactor, 14);
+
+    /* Adjust muting factor if new vector is more or less of the BGN energy */
+    len = WEBRTC_SPL_MIN(64*fs_mult, w16_decodedLen);
+    w16_expmax = WebRtcSpl_MaxAbsValueW16(pw16_expanded, (WebRtc_Word16) len);
+    w16_newmax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+    /* Calculate energy of old data */
+    w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_expmax, w16_expmax));
+    w16_tmp = WEBRTC_SPL_MAX(w16_tmp,0);
+    w32_En_old_frame = WebRtcNetEQ_DotW16W16(pw16_expanded, pw16_expanded, len, w16_tmp);
+
+    /* Calculate energy of new data */
+    w16_tmp2 = 6 + fs_shift - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_newmax, w16_newmax));
+    w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2,0);
+    w32_En_new_frame = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, len, w16_tmp2);
+
+    /* Align to same Q-domain */
+    if (w16_tmp2 > w16_tmp)
+    {
+        w32_En_old_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_old_frame, (w16_tmp2-w16_tmp));
+    }
+    else
+    {
+        w32_En_new_frame = WEBRTC_SPL_RSHIFT_W32(w32_En_new_frame, (w16_tmp-w16_tmp2));
+    }
+
+    /* Calculate muting factor to use for new frame */
+    if (w32_En_new_frame > w32_En_old_frame)
+    {
+        /* Normalize w32_En_new_frame to 14 bits */
+        w16_tmp = WebRtcSpl_NormW32(w32_En_new_frame) - 17;
+        w32_En_new_frame = WEBRTC_SPL_SHIFT_W32(w32_En_new_frame, w16_tmp);
+
+        /*
+         * Put w32_En_old_frame in a domain 14 higher, so that
+         * w32_En_old_frame/w32_En_new_frame is in Q14
+         */
+        w16_tmp = w16_tmp + 14;
+        w32_En_old_frame = WEBRTC_SPL_SHIFT_W32(w32_En_old_frame, w16_tmp);
+        w16_tmp
+            = WebRtcSpl_DivW32W16ResW16(w32_En_old_frame, (WebRtc_Word16) w32_En_new_frame);
+        /* Calculate sqrt(w32_En_old_frame/w32_En_new_frame) in Q14 */
+        w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+            WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,14));
+    }
+    else
+    {
+        w16_muted = 16384; /* Set = 1.0 when old frame has higher energy than new */
+    }
+
+    /* Set the raise the continued muting factor w16_muted if w16_muteFactor is lower */
+    if (w16_muted > inst->w16_muteFactor)
+    {
+        inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
+    }
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    /* do not downsample and calculate correlations for slave instance(s) */
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+#endif
+
+        /*********************************************
+         * Downsample to 4kHz and find best overlap
+         *********************************************/
+
+        /* Downsample to 4 kHz */
+        if (inst->fs == 8000)
+        {
+            WebRtcSpl_DownsampleFast(&pw16_expanded[2], (WebRtc_Word16) (w16_expandedLen - 2),
+                pw16_expandedLB, (WebRtc_Word16) (100),
+                (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl, (WebRtc_Word16) 3,
+                (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+            if (w16_decodedLen <= 80)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 2;
+                w16_tmp = temp_len / 2;
+                WebRtcSpl_DownsampleFast(&pw16_decoded[2], temp_len,
+                                         pw16_decodedLB, w16_tmp,
+                                         (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
+                    (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40 - w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(&pw16_decoded[2],
+                    (WebRtc_Word16) (w16_decodedLen - 2), pw16_decodedLB,
+                    (WebRtc_Word16) (40), (WebRtc_Word16*) WebRtcNetEQ_kDownsample8kHzTbl,
+                    (WebRtc_Word16) 3, (WebRtc_Word16) 2, (WebRtc_Word16) 0);
+            }
+#ifdef NETEQ_WIDEBAND
+        }
+        else if (inst->fs==16000)
+        {
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[4], (WebRtc_Word16)(w16_expandedLen-4),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                (WebRtc_Word16)4, (WebRtc_Word16)0);
+            if (w16_decodedLen<=160)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 4;
+                w16_tmp = temp_len / 4;
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[4], temp_len,
+                    pw16_decodedLB, w16_tmp,
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                    (WebRtc_Word16)4, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[4], (WebRtc_Word16)(w16_decodedLen-4),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample16kHzTbl, (WebRtc_Word16)5,
+                    (WebRtc_Word16)4, (WebRtc_Word16)0);
+            }
+#endif
+#ifdef NETEQ_32KHZ_WIDEBAND
+        }
+        else if (inst->fs==32000)
+        {
+            /*
+             * TODO(hlundin) Why is the offset into pw16_expanded 6?
+             */
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                (WebRtc_Word16)8, (WebRtc_Word16)0);
+            if (w16_decodedLen<=320)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                WebRtc_Word16 temp_len = w16_decodedLen - 6;
+                w16_tmp = temp_len / 8;
+                WebRtcSpl_DownsampleFast(
+                      &pw16_decoded[6], temp_len,
+                      pw16_decodedLB, w16_tmp,
+                      (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                      (WebRtc_Word16)8, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample32kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)8, (WebRtc_Word16)0);
+            }
+#endif
+#ifdef NETEQ_48KHZ_WIDEBAND
+        }
+        else /* if (inst->fs==48000) */
+        {
+            /*
+             * TODO(hlundin) Why is the offset into pw16_expanded 6?
+             */
+            WebRtcSpl_DownsampleFast(
+                &pw16_expanded[6], (WebRtc_Word16)(w16_expandedLen-6),
+                pw16_expandedLB, (WebRtc_Word16)(100),
+                (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                (WebRtc_Word16)12, (WebRtc_Word16)0);
+            if (w16_decodedLen<=320)
+            {
+                /* Not quite long enough, so we have to cheat a bit... */
+                /*
+                 * TODO(hlundin): Is this correct? Downsampling is a factor 12
+                 * but w16_tmp = temp_len / 8.
+                 * (Was w16_tmp = ((w16_decodedLen-6)>>3) before re-write.)
+                 */
+                WebRtc_Word16 temp_len = w16_decodedLen - 6;
+                w16_tmp = temp_len / 8;
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], temp_len,
+                    pw16_decodedLB, w16_tmp,
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)12, (WebRtc_Word16)0);
+                WebRtcSpl_MemSetW16(&pw16_decodedLB[w16_tmp], 0, (40-w16_tmp));
+            }
+            else
+            {
+                WebRtcSpl_DownsampleFast(
+                    &pw16_decoded[6], (WebRtc_Word16)(w16_decodedLen-6),
+                    pw16_decodedLB, (WebRtc_Word16)(40),
+                    (WebRtc_Word16*)WebRtcNetEQ_kDownsample48kHzTbl, (WebRtc_Word16)7,
+                    (WebRtc_Word16)12, (WebRtc_Word16)0);
+            }
+#endif
+        }
+
+        /* Calculate correlation without any normalization (40 samples) */
+        w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) inst->ExpandInst.w16_maxLag,
+            (WebRtc_Word16) (fs_mult * 2)) + 1;
+        w16_stopPos = WEBRTC_SPL_MIN(60, w16_tmp);
+        w32_tmp = WEBRTC_SPL_MUL_16_16(w16_expmax, w16_newmax);
+        if (w32_tmp > 26843546)
+        {
+            w16_tmp = 3;
+        }
+        else
+        {
+            w16_tmp = 0;
+        }
+
+        WebRtcNetEQ_CrossCorr(pw32_corr, pw16_decodedLB, pw16_expandedLB, 40,
+            (WebRtc_Word16) w16_stopPos, w16_tmp, 1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        WebRtcSpl_MemSetW16(pw16_corrVec, 0, (4 + 60 + 4));
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_stopPos);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_stopPos, pw32_corr, w16_tmp);
+
+        /* Calculate allowed starting point for peak finding.
+         The peak location bestIndex must fulfill two criteria:
+         (1) w16_bestIndex+w16_decodedLen < inst->timestampsPerCall+inst->ExpandInst.w16_overlap
+         (2) w16_bestIndex+w16_decodedLen < w16_startPos */
+        w16_tmp = WEBRTC_SPL_MAX(0, WEBRTC_SPL_MAX(w16_startPos,
+                inst->timestampsPerCall+inst->ExpandInst.w16_overlap) - w16_decodedLen);
+        /* Downscale starting index to 4kHz domain */
+        w16_tmp2 = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) w16_tmp,
+            (WebRtc_Word16) (fs_mult << 1));
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE)  */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* This is master or mono instance; find peak */
+        WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
+            &w16_bestVal);
+        w16_bestIndex += w16_tmp; /* compensate for modified starting index */
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        /* Get peak location from master instance */
+        w16_bestIndex = msInfo->bestIndex;
+    }
+    else
+    {
+        /* Invalid mode */
+        return MASTER_SLAVE_ERROR;
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find peak */
+    WebRtcNetEQ_PeakDetection(&pw16_corr[w16_tmp2], w16_stopPos, 1, fs_mult, &w16_bestIndex,
+        &w16_bestVal);
+    w16_bestIndex += w16_tmp; /* compensate for modified starting index */
+
+#endif /* NETEQ_STEREO */
+
+    /*
+     * Ensure that underrun does not occur for 10ms case => we have to get at least
+     * 10ms + overlap . (This should never happen thanks to the above modification of
+     * peak-finding starting point.)
+     * */
+    while ((w16_bestIndex + w16_decodedLen) < (inst->timestampsPerCall
+        + inst->ExpandInst.w16_overlap) || w16_bestIndex + w16_decodedLen < w16_startPos)
+    {
+        w16_bestIndex += w16_newLen; /* Jump one lag ahead */
+    }
+    pw16_decodedOut = pw16_outData + w16_bestIndex;
+
+    /* Mute the new decoded data if needed (and unmute it linearly) */
+    w16_interpLen = WEBRTC_SPL_MIN(60*fs_mult,
+        w16_expandedLen-w16_bestIndex); /* this is the overlapping part of pw16_expanded */
+    w16_interpLen = WEBRTC_SPL_MIN(w16_interpLen, w16_decodedLen);
+    w16_inc = WebRtcSpl_DivW32W16ResW16(4194,
+        fs_mult); /* in Q20, 0.004 for NB and 0.002 for WB */
+    if (inst->w16_muteFactor < 16384)
+    {
+        WebRtcNetEQ_UnmuteSignal(pw16_decoded, &inst->w16_muteFactor, pw16_decoded, w16_inc,
+            (WebRtc_Word16) w16_interpLen);
+        WebRtcNetEQ_UnmuteSignal(&pw16_decoded[w16_interpLen], &inst->w16_muteFactor,
+            &pw16_decodedOut[w16_interpLen], w16_inc,
+            (WebRtc_Word16) (w16_decodedLen - w16_interpLen));
+    }
+    else
+    {
+        /* No muting needed */
+
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_decodedOut[w16_interpLen], &pw16_decoded[w16_interpLen],
+            (w16_decodedLen-w16_interpLen));
+    }
+
+    /* Do overlap and interpolate linearly */
+    w16_inc = WebRtcSpl_DivW32W16ResW16(16384, (WebRtc_Word16) (w16_interpLen + 1)); /* Q14 */
+    w16_startfact = (16384 - w16_inc);
+    WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_expanded, w16_bestIndex);
+    WebRtcNetEQ_MixVoiceUnvoice(pw16_decodedOut, &pw16_expanded[w16_bestIndex], pw16_decoded,
+        &w16_startfact, w16_inc, w16_interpLen);
+
+    inst->w16_mode = MODE_MERGE;
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /* New added length (w16_startPos samples were borrowed) */
+    *pw16_len = w16_bestIndex + w16_decodedLen - w16_startPos;
+
+    /* Update VQmon parameter */
+    inst->w16_concealedTS += (*pw16_len - w16_decodedLen);
+    inst->w16_concealedTS = WEBRTC_SPL_MAX(0, inst->w16_concealedTS);
+
+    /* Update in-call and post-call statistics */
+    if (inst->ExpandInst.w16_expandMuteFactor == 0)
+    {
+        /* expansion generates noise only */
+        inst->statInst.expandedNoiseSamples += (*pw16_len - w16_decodedLen);
+    }
+    else
+    {
+        /* expansion generates more than only noise */
+        inst->statInst.expandedVoiceSamples += (*pw16_len - w16_decodedLen);
+    }
+    inst->statInst.expandLength += (*pw16_len - w16_decodedLen);
+
+
+    /* Copy back the first part of the data to the speechHistory */
+
+    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition], pw16_outData, w16_startPos);
+
+
+    /* Move data to within outData */
+
+    WEBRTC_SPL_MEMMOVE_W16(pw16_outData, &pw16_outData[w16_startPos], (*pw16_len));
+
+    return 0;
+}
+
+#undef     SCRATCH_pw16_expanded
+#undef     SCRATCH_pw16_expandedLB
+#undef     SCRATCH_pw16_decodedLB
+#undef     SCRATCH_pw32_corr
+#undef     SCRATCH_pw16_corrVec
+#undef     SCRATCH_NETEQ_EXPAND
diff --git a/src/modules/audio_coding/neteq/min_distortion.c b/src/modules/audio_coding/neteq/min_distortion.c
new file mode 100644
index 0000000..4c9ee1c
--- /dev/null
+++ b/src/modules/audio_coding/neteq/min_distortion.c
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Calculate best overlap fit according to distortion measure.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+WebRtc_Word16 WebRtcNetEQ_MinDistortion(const WebRtc_Word16 *pw16_data,
+                                        WebRtc_Word16 w16_minLag, WebRtc_Word16 w16_maxLag,
+                                        WebRtc_Word16 len, WebRtc_Word32 *pw16_dist)
+{
+    int i, j;
+    const WebRtc_Word16 *pw16_data1;
+    const WebRtc_Word16 *pw16_data2;
+    WebRtc_Word32 w32_diff;
+    WebRtc_Word32 w32_sumdiff;
+    WebRtc_Word16 bestIndex = -1;
+    WebRtc_Word32 minDist = WEBRTC_SPL_WORD32_MAX;
+
+    for (i = w16_minLag; i <= w16_maxLag; i++)
+    {
+        w32_sumdiff = 0;
+        pw16_data1 = pw16_data;
+        pw16_data2 = pw16_data - i;
+
+        for (j = 0; j < len; j++)
+        {
+            w32_diff = pw16_data1[j] - pw16_data2[j];
+            w32_sumdiff += WEBRTC_SPL_ABS_W32(w32_diff);
+        }
+
+        /* Compare with previous minimum */
+        if (w32_sumdiff < minDist)
+        {
+            minDist = w32_sumdiff;
+            bestIndex = i;
+        }
+    }
+
+    *pw16_dist = minDist;
+
+    return bestIndex;
+}
+
diff --git a/src/modules/audio_coding/neteq/mix_voice_unvoice.c b/src/modules/audio_coding/neteq/mix_voice_unvoice.c
new file mode 100644
index 0000000..9895630
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mix_voice_unvoice.c
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function mixes a voiced signal with an unvoiced signal and
+ * updates the weight on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+void WebRtcNetEQ_MixVoiceUnvoice(WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_voicedVec,
+                                 WebRtc_Word16 *pw16_unvoicedVec,
+                                 WebRtc_Word16 *w16_current_vfraction,
+                                 WebRtc_Word16 w16_vfraction_change, WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word16 vfraction = *w16_current_vfraction;
+
+    w16_tmp2 = 16384 - vfraction;
+    for (i = 0; i < N; i++)
+    {
+        pw16_outData[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+            WEBRTC_SPL_MUL_16_16(vfraction, pw16_voicedVec[i]) +
+            WEBRTC_SPL_MUL_16_16(w16_tmp2, pw16_unvoicedVec[i]) + 8192,
+            14);
+        vfraction -= w16_vfraction_change;
+        w16_tmp2 += w16_vfraction_change;
+    }
+    *w16_current_vfraction = vfraction;
+}
+
diff --git a/src/modules/audio_coding/neteq/mute_signal.c b/src/modules/audio_coding/neteq/mute_signal.c
new file mode 100644
index 0000000..ee899cf
--- /dev/null
+++ b/src/modules/audio_coding/neteq/mute_signal.c
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function mutes a signal linearly on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+void WebRtcNetEQ_MuteSignal(WebRtc_Word16 *pw16_inout, WebRtc_Word16 muteSlope,
+                            WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_Word32 w32_tmp = 1048608; /* (16384<<6 + 32) */
+
+    for (i = 0; i < N; i++)
+    {
+        pw16_inout[i]
+            = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16((WebRtc_Word16)(w32_tmp>>6), pw16_inout[i])
+                + 8192) >> 14);
+        w32_tmp -= muteSlope;
+    }
+}
+
diff --git a/src/modules/audio_coding/neteq/neteq.gypi b/src/modules/audio_coding/neteq/neteq.gypi
new file mode 100644
index 0000000..0d19c89
--- /dev/null
+++ b/src/modules/audio_coding/neteq/neteq.gypi
@@ -0,0 +1,299 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'NetEq',
+      'type': '<(library)',
+      'dependencies': [
+        'CNG',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      ],
+      'defines': [
+        'NETEQ_VOICEENGINE_CODECS', # TODO: Should create a Chrome define which
+        'SCRATCH',                  # specifies a subset of codecs to support.
+      ],
+      'include_dirs': [
+        'interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'interface',
+        ],
+      },
+      'sources': [
+        'interface/webrtc_neteq.h',
+        'interface/webrtc_neteq_help_macros.h',
+        'interface/webrtc_neteq_internal.h',
+        'accelerate.c',
+        'automode.c',
+        'automode.h',
+        'bgn_update.c',
+        'buffer_stats.h',
+        'bufstats_decision.c',
+        'cng_internal.c',
+        'codec_db.c',
+        'codec_db.h',
+        'codec_db_defines.h',
+        'correlator.c',
+        'delay_logging.h',
+        'dsp.c',
+        'dsp.h',
+        'dsp_helpfunctions.c',
+        'dsp_helpfunctions.h',
+        'dtmf_buffer.c',
+        'dtmf_buffer.h',
+        'dtmf_tonegen.c',
+        'dtmf_tonegen.h',
+        'expand.c',
+        'mcu.h',
+        'mcu_address_init.c',
+        'mcu_dsp_common.c',
+        'mcu_dsp_common.h',
+        'mcu_reset.c',
+        'merge.c',
+        'min_distortion.c',
+        'mix_voice_unvoice.c',
+        'mute_signal.c',
+        'neteq_defines.h',
+        'neteq_error_codes.h',
+        'neteq_statistics.h',
+        'normal.c',
+        'packet_buffer.c',
+        'packet_buffer.h',
+        'peak_detection.c',
+        'preemptive_expand.c',
+        'random_vector.c',
+        'recin.c',
+        'recout.c',
+        'rtcp.c',
+        'rtcp.h',
+        'rtp.c',
+        'rtp.h',
+        'set_fs.c',
+        'signal_mcu.c',
+        'split_and_insert.c',
+        'unmute_signal.c',
+        'webrtc_neteq.c',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'neteq_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'NetEq',
+            'NetEqTestTools',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'webrtc_neteq_unittest.cc',
+          ],
+        }, # neteq_unittests
+        {
+          'target_name': 'NetEqRTPplay',
+          'type': 'executable',
+          'dependencies': [
+            'NetEq',          # NetEQ library defined above
+            'NetEqTestTools', # Test helpers
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+          ],
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            '.',
+            'test',
+          ],
+          'sources': [
+            'test/NetEqRTPplay.cc',
+          ],
+        },
+       {
+          'target_name': 'RTPencode',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',# Test helpers
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+            '<(webrtc_root)/common_audio/common_audio.gyp:vad',
+          ],
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            'interface',
+            'test',
+          ],
+          'sources': [
+            'test/RTPencode.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPjitter',
+          'type': 'executable',
+          'dependencies': [
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPjitter.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPanalyze',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPanalyze.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPchange',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+           'test/RTPchange.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPtimeshift',
+          'type': 'executable',
+          'dependencies': [
+           'NetEqTestTools',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPtimeshift.cc',
+          ],
+        },
+        {
+          'target_name': 'RTPcat',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'test/RTPcat.cc',
+          ],
+        },
+        {
+          'target_name': 'rtp_to_text',
+          'type': 'executable',
+          'dependencies': [
+            'NetEqTestTools',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+            'test/rtp_to_text.cc',
+          ],
+        },
+        {
+         'target_name': 'NetEqTestTools',
+          # Collection of useful functions used in other tests
+          'type': '<(library)',
+          'variables': {
+            # Expects RTP packets without payloads when enabled.
+            'neteq_dummy_rtp%': 0,
+          },
+          'dependencies': [
+            'G711',
+            'G722',
+            'PCM16B',
+            'iLBC',
+            'iSAC',
+            'CNG',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'interface',
+              'test',
+            ],
+          },
+          'defines': [
+            # TODO: Make codec selection conditional on definitions in target NetEq
+            'CODEC_ILBC',
+            'CODEC_PCM16B',
+            'CODEC_G711',
+            'CODEC_G722',
+            'CODEC_ISAC',
+            'CODEC_PCM16B_WB',
+            'CODEC_ISAC_SWB',
+            'CODEC_PCM16B_32KHZ',
+            'CODEC_CNGCODEC8',
+            'CODEC_CNGCODEC16',
+            'CODEC_CNGCODEC32',
+            'CODEC_ATEVENT_DECODE',
+            'CODEC_RED',
+          ],
+          'include_dirs': [
+            'interface',
+            'test',
+          ],
+          'sources': [
+            'test/NETEQTEST_CodecClass.cc',
+            'test/NETEQTEST_CodecClass.h',
+            'test/NETEQTEST_DummyRTPpacket.cc',
+            'test/NETEQTEST_DummyRTPpacket.h',
+            'test/NETEQTEST_NetEQClass.cc',
+            'test/NETEQTEST_NetEQClass.h',
+            'test/NETEQTEST_RTPpacket.cc',
+            'test/NETEQTEST_RTPpacket.h',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/modules/audio_coding/neteq/neteq_defines.h b/src/modules/audio_coding/neteq/neteq_defines.h
new file mode 100644
index 0000000..318e6bb
--- /dev/null
+++ b/src/modules/audio_coding/neteq/neteq_defines.h
@@ -0,0 +1,356 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*****************************************************************************************
+ *
+ * Compilation flags in NetEQ:
+ *
+ *****************************************************************************************
+ *
+ ***** Platform flags ******
+ *
+ * SCRATCH                        Run NetEQ with "Scratch memory" to save some stack memory.
+ *                                Definition can be used on all platforms
+ *
+ ***** Summary flags ******
+ *
+ * NETEQ_ALL_SPECIAL_CODECS       Add support for special codecs (CN/RED/DTMF)
+ *
+ * NETEQ_ALL_NB_CODECS            Add support for all NB codecs (except CN/RED/DTMF)
+ *
+ * NETEQ_ALL_WB_CODECS            Add support for all WB codecs (except CN/RED/DTMF)
+ *
+ * NETEQ_VOICEENGINE_CODECS       Support for all NB, WB and SWB32 codecs and CN, RED and DTMF
+ *
+ * NETEQ_ALL_CODECS               Support for all NB, WB, SWB 32kHz and SWB 48kHz as well as
+ *                                CN, RED and DTMF
+ *
+ ***** Sampling frequency ****** 
+ * (Note: usually not needed when Summary flags are used)
+ *
+ * NETEQ_WIDEBAND                 Wideband enabled
+ *
+ * NETEQ_32KHZ_WIDEBAND           Super wideband @ 32kHz enabled
+ *
+ * NETEQ_48KHZ_WIDEBAND           Super wideband @ 48kHz enabled
+ *
+ ***** Special Codec ****** 
+ * (Note: not needed if NETEQ_ALL_CODECS is used)
+ *
+ * NETEQ_RED_CODEC                With this flag you enable NetEQ to understand redundancy in
+ *                                the RTP. NetEQ will use the redundancy if it's the same
+ *                                codec
+ *
+ * NETEQ_CNG_CODEC                Enable DTX with the CN payload
+ *
+ * NETEQ_ATEVENT_DECODE           Enable AVT event and play out the corresponding DTMF tone
+ *
+ ***** Speech Codecs *****
+ * (Note: Not needed if Summary flags are used)
+ *
+ * NETEQ_G711_CODEC               Enable G.711 u- and A-law
+ *
+ * NETEQ_PCM16B_CODEC             Enable uncompressed 16-bit
+ *
+ * NETEQ_ILBC_CODEC               Enable iLBC
+ *
+ * NETEQ_ISAC_CODEC               Enable iSAC
+ *
+ * NETEQ_ISAC_SWB_CODEC           Enable iSAC-SWB
+ *
+ * NETEQ_G722_CODEC               Enable G.722
+ *
+ * NETEQ_G729_CODEC               Enable G.729
+ *
+ * NETEQ_G729_1_CODEC             Enable G.729.1
+ *
+ * NETEQ_G726_CODEC               Enable G.726
+ *
+ * NETEQ_G722_1_CODEC             Enable G722.1
+ *
+ * NETEQ_G722_1C_CODEC            Enable G722.1 Annex C
+ *
+ * NETEQ_SPEEX_CODEC              Enable Speex (at 8 and 16 kHz sample rate)
+ *
+ * NETEQ_CELT_CODEC               Enable Celt (at 32 kHz sample rate)
+ *
+ * NETEQ_GSMFR_CODEC              Enable GSM-FR
+ *
+ * NETEQ_AMR_CODEC                Enable AMR (narrowband)
+ *
+ * NETEQ_AMRWB_CODEC              Enable AMR-WB
+ *
+ * NETEQ_CNG_CODEC                Enable DTX with the CNG payload
+ *
+ * NETEQ_ATEVENT_DECODE           Enable AVT event and play out the corresponding DTMF tone
+ *
+ ***** Test flags ******
+ *
+ * WEBRTC_NETEQ_40BITACC_TEST     Run NetEQ with simulated 40-bit accumulator to run
+ *                                bit-exact to a DSP implementation where the main (splib
+ *                                and NetEQ) functions have been 40-bit optimized
+ *
+ *****************************************************************************************
+ */
+
+#if !defined NETEQ_DEFINES_H
+#define NETEQ_DEFINES_H
+
+/* Data block structure for MCU to DSP communication:
+ *
+ *
+ *  First 3 16-bit words are pre-header that contains instructions and timestamp update
+ *  Fourth 16-bit word is length of data block 1
+ *  Rest is payload data
+ *
+ *  0               48          64          80
+ *  -------------...----------------------------------------------------------------------
+ *  |  PreHeader ... | Length 1 |  Payload data 1 ...... | Lenght 2| Data block 2....    | ...
+ *  -------------...----------------------------------------------------------------------
+ *
+ *
+ *  Preheader:
+ *  4 MSB can be either of:
+ */
+
+#define DSP_INSTR_NORMAL                         0x1000
+/* Payload data will contain the encoded frames */
+
+#define DSP_INSTR_MERGE                          0x2000
+/* Payload data block 1 will contain the encoded frame */
+/* Info block will contain the number of missing samples */
+
+#define DSP_INSTR_EXPAND                         0x3000
+/* Payload data will be empty */
+
+#define DSP_INSTR_ACCELERATE                     0x4000
+/* Payload data will contain the encoded frame */
+
+#define DSP_INSTR_DO_RFC3389CNG                  0x5000
+/* Payload data will contain the SID frame if there is one*/
+
+#define DSP_INSTR_DTMF_GENERATE                  0x6000
+/* Payload data will be one WebRtc_Word16 with the current DTMF value and one
+ * WebRtc_Word16 with the current volume value
+ */
+#define DSP_INSTR_NORMAL_ONE_DESC                0x7000
+/* No encoded frames */
+
+#define DSP_INSTR_DO_CODEC_INTERNAL_CNG          0x8000
+/* Codec has a built-in VAD/DTX scheme (use the above for "no transmission") */
+
+#define DSP_INSTR_PREEMPTIVE_EXPAND              0x9000
+/* Payload data will contain the encoded frames, if any */
+
+#define DSP_INSTR_DO_ALTERNATIVE_PLC             0xB000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS      0xC000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_AUDIO_REPETITION            0xD000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_DO_AUDIO_REPETITION_INC_TS     0xE000
+/* NetEQ switched off and packet missing... */
+
+#define DSP_INSTR_FADE_TO_BGN                    0xF000
+/* Exception handling: fade out to BGN (expand) */
+
+/*
+ * Next 4 bits signal additional data that needs to be transmitted
+ */
+
+#define DSP_CODEC_NO_CHANGE                      0x0100
+#define DSP_CODEC_NEW_CODEC                      0x0200
+#define DSP_CODEC_ADD_LATE_PKT                   0x0300
+#define DSP_CODEC_RESET                          0x0400
+#define DSP_DTMF_PAYLOAD                         0x0010
+
+/*
+ * The most significant bit of the payload-length
+ * is used to flag whether the associated payload
+ * is redundant payload. This currently useful only for
+ * iSAC, where redundant payloads have to be treated 
+ * differently. Every time the length is read it must be
+ * masked by DSP_CODEC_MASK_RED_FLAG to ignore the flag.
+ * Use DSP_CODEC_RED_FLAG to set or retrieve the flag.
+ */
+#define DSP_CODEC_MASK_RED_FLAG                  0x7FFF
+#define DSP_CODEC_RED_FLAG                       0x8000
+
+/*
+ * The first block of payload data consist of decode function pointers,
+ * and then the speech blocks.
+ *
+ */
+
+
+/*
+ * The playout modes that NetEq produced (i.e. gives more info about if the 
+ * Accelerate was successful or not)
+ */
+
+#define MODE_NORMAL                    0x0000
+#define MODE_EXPAND                    0x0001
+#define MODE_MERGE                     0x0002
+#define MODE_SUCCESS_ACCELERATE        0x0003
+#define MODE_UNSUCCESS_ACCELERATE      0x0004
+#define MODE_RFC3389CNG                0x0005
+#define MODE_LOWEN_ACCELERATE          0x0006
+#define MODE_DTMF                      0x0007
+#define MODE_ONE_DESCRIPTOR            0x0008
+#define MODE_CODEC_INTERNAL_CNG        0x0009
+#define MODE_SUCCESS_PREEMPTIVE        0x000A
+#define MODE_UNSUCCESS_PREEMPTIVE      0x000B
+#define MODE_LOWEN_PREEMPTIVE          0x000C
+#define MODE_FADE_TO_BGN               0x000D
+
+#define MODE_ERROR                     0x0010
+
+#define MODE_AWAITING_CODEC_PTR        0x0100
+
+#define MODE_BGN_ONLY                  0x0200
+
+#define MODE_MASTER_DTMF_SIGNAL        0x0400
+
+#define MODE_USING_STEREO              0x0800
+
+
+
+/***********************/
+/* Group codec defines */
+/***********************/
+
+#if (defined(NETEQ_ALL_SPECIAL_CODECS))
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+#endif
+
+#if (defined(NETEQ_ALL_NB_CODECS))        /* Except RED, DTMF and CNG */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_G726_CODEC
+    #define NETEQ_GSMFR_CODEC
+    #define NETEQ_AMR_CODEC
+#endif
+
+#if (defined(NETEQ_ALL_WB_CODECS))        /* Except RED, DTMF and CNG */
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_SPEEX_CODEC
+    #define NETEQ_AMRWB_CODEC
+    #define NETEQ_WIDEBAND
+#endif
+
+#if (defined(NETEQ_ALL_WB32_CODECS))        /* AAC, RED, DTMF and CNG */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+#endif
+
+#if (defined(NETEQ_VOICEENGINE_CODECS))
+    /* Special codecs */
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+
+    /* Narrowband codecs */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_AMR_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_GSMFR_CODEC
+
+    /* Wideband codecs */
+    #define NETEQ_WIDEBAND
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_AMRWB_CODEC
+    #define NETEQ_SPEEX_CODEC
+
+    /* Super wideband 32kHz codecs */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+
+#endif 
+
+#if (defined(NETEQ_ALL_CODECS))
+    /* Special codecs */
+    #define NETEQ_CNG_CODEC
+    #define NETEQ_ATEVENT_DECODE
+    #define NETEQ_RED_CODEC
+    #define NETEQ_VAD
+    #define NETEQ_ARBITRARY_CODEC
+
+    /* Narrowband codecs */
+    #define NETEQ_PCM16B_CODEC
+    #define NETEQ_G711_CODEC
+    #define NETEQ_ILBC_CODEC
+    #define NETEQ_G729_CODEC
+    #define NETEQ_G726_CODEC
+    #define NETEQ_GSMFR_CODEC
+    #define NETEQ_AMR_CODEC
+
+    /* Wideband codecs */
+    #define NETEQ_WIDEBAND
+    #define NETEQ_ISAC_CODEC
+    #define NETEQ_G722_CODEC
+    #define NETEQ_G722_1_CODEC
+    #define NETEQ_G729_1_CODEC
+    #define NETEQ_SPEEX_CODEC
+    #define NETEQ_AMRWB_CODEC
+
+    /* Super wideband 32kHz codecs */
+    #define NETEQ_ISAC_SWB_CODEC
+    #define NETEQ_32KHZ_WIDEBAND
+    #define NETEQ_G722_1C_CODEC
+    #define NETEQ_CELT_CODEC
+
+    /* Super wideband 48kHz codecs */
+    #define NETEQ_48KHZ_WIDEBAND
+#endif
+
+/* Max output size from decoding one frame */
+#if defined(NETEQ_48KHZ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     2880    /* 60 ms super wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    3600    /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
+#elif defined(NETEQ_32KHZ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     1920    /* 60 ms super wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    2400    /* 60+15 ms super wideband (60 ms decoded + 15 ms for merge overlap) */
+#elif defined(NETEQ_WIDEBAND)
+    #define NETEQ_MAX_FRAME_SIZE     960        /* 60 ms wideband */
+    #define NETEQ_MAX_OUTPUT_SIZE    1200    /* 60+15 ms wideband (60 ms decoded + 10 ms for merge overlap) */
+#else
+    #define NETEQ_MAX_FRAME_SIZE     480        /* 60 ms narrowband */
+    #define NETEQ_MAX_OUTPUT_SIZE    600        /* 60+15 ms narrowband (60 ms decoded + 10 ms for merge overlap) */
+#endif
+
+
+/* Enable stereo */
+#define NETEQ_STEREO
+
+#endif /* #if !defined NETEQ_DEFINES_H */
+
diff --git a/src/modules/audio_coding/neteq/neteq_error_codes.h b/src/modules/audio_coding/neteq/neteq_error_codes.h
new file mode 100644
index 0000000..1ce4680
--- /dev/null
+++ b/src/modules/audio_coding/neteq/neteq_error_codes.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Definition of error codes.
+ *
+ * NOTE: When modifying the error codes,
+ * also modify the function WebRtcNetEQ_GetErrorCode!
+ */
+
+#if !defined NETEQ_ERROR_CODES_H
+#define NETEQ_ERROR_CODES_H
+
+/* Misc Error */
+#define NETEQ_OTHER_ERROR               -1000
+
+/* Misc Recout Errors */
+#define FAULTY_INSTRUCTION              -1001
+#define FAULTY_NETWORK_TYPE             -1002
+#define FAULTY_DELAYVALUE               -1003
+#define FAULTY_PLAYOUTMODE              -1004
+#define CORRUPT_INSTANCE                -1005
+#define ILLEGAL_MASTER_SLAVE_SWITCH     -1006
+#define MASTER_SLAVE_ERROR              -1007
+
+/* Misc Recout problems */
+#define UNKNOWN_BUFSTAT_DECISION        -2001
+#define RECOUT_ERROR_DECODING           -2002
+#define RECOUT_ERROR_SAMPLEUNDERRUN     -2003
+#define RECOUT_ERROR_DECODED_TOO_MUCH   -2004
+
+/* Misc RecIn problems */
+#define RECIN_CNG_ERROR                 -3001
+#define RECIN_UNKNOWNPAYLOAD            -3002
+#define RECIN_BUFFERINSERT_ERROR        -3003
+
+/* PBUFFER/BUFSTAT ERRORS */
+#define PBUFFER_INIT_ERROR              -4001
+#define PBUFFER_INSERT_ERROR1           -4002
+#define PBUFFER_INSERT_ERROR2           -4003
+#define PBUFFER_INSERT_ERROR3           -4004
+#define PBUFFER_INSERT_ERROR4           -4005
+#define PBUFFER_INSERT_ERROR5           -4006
+#define UNKNOWN_G723_HEADER             -4007
+#define PBUFFER_NONEXISTING_PACKET      -4008
+#define PBUFFER_NOT_INITIALIZED         -4009
+#define AMBIGUOUS_ILBC_FRAME_SIZE       -4010
+
+/* CODEC DATABASE ERRORS */
+#define CODEC_DB_FULL                   -5001
+#define CODEC_DB_NOT_EXIST1             -5002
+#define CODEC_DB_NOT_EXIST2             -5003
+#define CODEC_DB_NOT_EXIST3             -5004
+#define CODEC_DB_NOT_EXIST4             -5005
+#define CODEC_DB_UNKNOWN_CODEC          -5006
+#define CODEC_DB_PAYLOAD_TAKEN          -5007
+#define CODEC_DB_UNSUPPORTED_CODEC      -5008
+#define CODEC_DB_UNSUPPORTED_FS         -5009
+
+/* DTMF ERRORS */
+#define DTMF_DEC_PARAMETER_ERROR        -6001
+#define DTMF_INSERT_ERROR               -6002
+#define DTMF_GEN_UNKNOWN_SAMP_FREQ      -6003
+#define DTMF_NOT_SUPPORTED              -6004
+
+/* RTP/PACKET ERRORS */
+#define RED_SPLIT_ERROR1                -7001
+#define RED_SPLIT_ERROR2                -7002
+#define RTP_TOO_SHORT_PACKET            -7003
+#define RTP_CORRUPT_PACKET              -7004
+
+#endif
diff --git a/src/modules/audio_coding/neteq/neteq_statistics.h b/src/modules/audio_coding/neteq/neteq_statistics.h
new file mode 100644
index 0000000..d07f330
--- /dev/null
+++ b/src/modules/audio_coding/neteq/neteq_statistics.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Definitions of statistics data structures for MCU and DSP sides.
+ */
+
+#include "typedefs.h"
+
+#ifndef NETEQ_STATISTICS_H
+#define NETEQ_STATISTICS_H
+
+/*
+ * Statistics struct on DSP side
+ */
+typedef struct
+{
+
+    /* variables for in-call statistics; queried through WebRtcNetEQ_GetNetworkStatistics */
+    WebRtc_UWord32 expandLength; /* number of samples produced through expand */
+    WebRtc_UWord32 preemptiveLength; /* number of samples produced through pre-emptive
+     expand */
+    WebRtc_UWord32 accelerateLength; /* number of samples removed through accelerate */
+
+    /* variables for post-call statistics; queried through WebRtcNetEQ_GetJitterStatistics */
+    WebRtc_UWord32 expandedVoiceSamples; /* number of voice samples produced through expand */
+    WebRtc_UWord32 expandedNoiseSamples; /* number of noise (background) samples produced
+     through expand */
+
+} DSPStats_t;
+
+#endif
+
diff --git a/src/modules/audio_coding/neteq/normal.c b/src/modules/audio_coding/neteq/normal.c
new file mode 100644
index 0000000..b33940a
--- /dev/null
+++ b/src/modules/audio_coding/neteq/normal.c
@@ -0,0 +1,279 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the function for handling "normal" speech operation.
+ */
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+
+/* Scratch usage:
+
+ Type           Name                    size            startpos        endpos
+ WebRtc_Word16  pw16_expanded           125*fs/8000     0               125*fs/8000-1
+
+ func           WebRtcNetEQ_Expand      40+370*fs/8000  125*fs/8000     39+495*fs/8000
+
+ Total:  40+495*fs/8000
+ */
+
+#define     SCRATCH_PW16_EXPANDED           0
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define     SCRATCH_NETEQ_EXPAND    252
+#else    /* NB */
+#define     SCRATCH_NETEQ_EXPAND    126
+#endif
+
+/****************************************************************************
+ * WebRtcNetEQ_Normal(...)
+ *
+ * This function has the possibility to modify data that is played out in Normal
+ * mode, for example adjust the gain of the signal. The length of the signal 
+ * can not be changed.
+ *
+ * Input:
+ *      - inst          : NetEq instance, i.e. the user that requests more
+ *                        speech/audio data
+ *      - scratchPtr    : Pointer to scratch vector
+ *      - decoded       : Pointer to vector of new data from decoder
+ *                        (Vector contents may be altered by the function)
+ *      - len           : Number of input samples
+ *
+ * Output:
+ *      - inst          : Updated user information
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored
+ *      - pw16_len      : Pointer to variable where the number of samples
+ *                        produced will be written
+ *
+ * Return value         : >=0 - Number of samples written to outData
+ *                         -1 - Error
+ */
+
+int WebRtcNetEQ_Normal(DSPInst_t *inst,
+#ifdef SCRATCH
+                       WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                       WebRtc_Word16 *pw16_decoded, WebRtc_Word16 len,
+                       WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+{
+
+    int i;
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 fs_shift;
+    WebRtc_Word32 w32_En_speech;
+    WebRtc_Word16 enLen;
+    WebRtc_Word16 w16_muted;
+    WebRtc_Word16 w16_inc, w16_frac;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word32 w32_tmp;
+
+    /* Sanity check */
+    if (len < 0)
+    {
+        /* Cannot have negative length of input vector */
+        return (-1);
+    }
+
+    if (len == 0)
+    {
+        /* Still got some data to play => continue with the same mode */
+        *pw16_len = len;
+        return (len);
+    }
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+    fs_shift = 30 - WebRtcSpl_NormW32(fs_mult); /* Note that this is not "exact" for 48kHz */
+
+    /*
+     * Check if last RecOut call resulted in an Expand or a FadeToBGN. If so, we have to take
+     * care of some cross-fading and unmuting.
+     */
+    if (inst->w16_mode == MODE_EXPAND || inst->w16_mode == MODE_FADE_TO_BGN)
+    {
+
+        /* Define memory where temporary result from Expand algorithm can be stored. */
+#ifdef SCRATCH
+        WebRtc_Word16 *pw16_expanded = pw16_scratchPtr + SCRATCH_PW16_EXPANDED;
+#else
+        WebRtc_Word16 pw16_expanded[FSMULT * 125];
+#endif
+        WebRtc_Word16 expandedLen = 0;
+        WebRtc_Word16 w16_decodedMax;
+
+        /* Find largest value in new data */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* Generate interpolation data using Expand */
+        /* First, set Expand parameters to appropriate values. */
+        inst->ExpandInst.w16_lagsPosition = 0;
+        inst->ExpandInst.w16_lagsDirection = 0;
+        inst->ExpandInst.w16_stopMuting = 1; /* Do not mute signal any more */
+
+        /* Call Expand */
+        WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+            pw16_expanded, &expandedLen, (WebRtc_Word16) (inst->w16_mode == MODE_FADE_TO_BGN));
+
+        inst->ExpandInst.w16_stopMuting = 0; /* Restore value */
+        inst->ExpandInst.w16_consecExp = 0; /* Last was not Expand any more */
+
+        /* Adjust muting factor (main muting factor times expand muting factor) */
+        if (inst->w16_mode == MODE_FADE_TO_BGN)
+        {
+            /* If last mode was FadeToBGN, the mute factor should be zero. */
+            inst->w16_muteFactor = 0;
+        }
+        else
+        {
+            /* w16_muteFactor * w16_expandMuteFactor */
+            inst->w16_muteFactor
+                = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16_RSFT(inst->w16_muteFactor,
+                    inst->ExpandInst.w16_expandMuteFactor, 14);
+        }
+
+        /* Adjust muting factor if needed (to BGN level) */
+        enLen = WEBRTC_SPL_MIN(fs_mult<<6, len); /* min( fs_mult * 64, len ) */
+        w16_tmp = 6 + fs_shift - WebRtcSpl_NormW32(
+            WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(w16_tmp, 0);
+        w32_En_speech = WebRtcNetEQ_DotW16W16(pw16_decoded, pw16_decoded, enLen, w16_tmp);
+        w32_En_speech = WebRtcSpl_DivW32W16(w32_En_speech, (WebRtc_Word16) (enLen >> w16_tmp));
+
+        if ((w32_En_speech != 0) && (w32_En_speech > inst->BGNInst.w32_energy))
+        {
+            /* Normalize new frame energy to 15 bits */
+            w16_tmp = WebRtcSpl_NormW32(w32_En_speech) - 16;
+            /* we want inst->BGNInst.energy/En_speech in Q14 */
+            w32_tmp = WEBRTC_SPL_SHIFT_W32(inst->BGNInst.w32_energy, (w16_tmp+14));
+            w16_tmp = (WebRtc_Word16) WEBRTC_SPL_SHIFT_W32(w32_En_speech, w16_tmp);
+            w16_tmp = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_tmp, w16_tmp);
+            w16_muted = (WebRtc_Word16) WebRtcSpl_SqrtFloor(
+                WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32) w16_tmp,
+                    14)); /* w16_muted in Q14 (sqrt(Q28)) */
+        }
+        else
+        {
+            w16_muted = 16384; /* 1.0 in Q14 */
+        }
+        if (w16_muted > inst->w16_muteFactor)
+        {
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(w16_muted, 16384);
+        }
+
+        /* If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14) */
+        w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
+        for (i = 0; i < len; i++)
+        {
+            /* scale with mute factor */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
+            /* shift 14 with proper rounding */
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+            /* increase mute_factor towards 16384 */
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
+        }
+
+        /*
+         * Interpolate the expanded data into the new vector
+         * (NB/WB/SWB32/SWB40 8/16/32/32 samples)
+         */
+        fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
+        w16_inc = 4 >> fs_shift;
+        w16_frac = w16_inc;
+        for (i = 0; i < 8 * fs_mult; i++)
+        {
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
+                    WEBRTC_SPL_MUL_16_16((32 - w16_frac), pw16_expanded[i]) + 8),
+                5);
+            w16_frac += w16_inc;
+        }
+
+#ifdef NETEQ_CNG_CODEC
+    }
+    else if (inst->w16_mode==MODE_RFC3389CNG)
+    { /* previous was RFC 3389 CNG...*/
+        WebRtc_Word16 pw16_CngInterp[32];
+        /* Reset mute factor and start up fresh */
+        inst->w16_muteFactor = 16384;
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            /* Generate long enough for 32kHz */
+            if(WebRtcCng_Generate(inst->CNG_Codec_inst,pw16_CngInterp, 32, 0)<0)
+            {
+                /* error returned; set return vector to all zeros */
+                WebRtcSpl_MemSetW16(pw16_CngInterp, 0, 32);
+            }
+        }
+        else
+        {
+            /*
+             * If no CNG instance is defined, just copy from the decoded data.
+             * (This will result in interpolating the decoded with itself.)
+             */
+            WEBRTC_SPL_MEMCPY_W16(pw16_CngInterp, pw16_decoded, fs_mult * 8);
+        }
+        /*
+         * Interpolate the CNG into the new vector
+         * (NB/WB/SWB32kHz/SWB48kHz 8/16/32/32 samples)
+         */
+        fs_shift = WEBRTC_SPL_MIN(3, fs_shift); /* Set to 3 for >32kHz */
+        w16_inc = 4>>fs_shift;
+        w16_frac = w16_inc;
+        for (i = 0; i < 8 * fs_mult; i++)
+        {
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                (WEBRTC_SPL_MUL_16_16(w16_frac, pw16_decoded[i]) +
+                    WEBRTC_SPL_MUL_16_16((32-w16_frac), pw16_CngInterp[i]) + 8),
+                5);
+            w16_frac += w16_inc;
+        }
+#endif
+
+    }
+    else if (inst->w16_muteFactor < 16384)
+    {
+        /*
+         * Previous was neither of Expand, FadeToBGN or RFC3389_CNG, but we are still
+         * ramping up from previous muting.
+         * If muted increase by 0.64 for every 20 ms (NB/WB 0.0040/0.0020 in Q14)
+         */
+        w16_inc = WebRtcSpl_DivW32W16ResW16(64, fs_mult);
+        for (i = 0; i < len; i++)
+        {
+            /* scale with mute factor */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(pw16_decoded[i], inst->w16_muteFactor);
+            /* shift 14 with proper rounding */
+            pw16_decoded[i] = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32((w32_tmp + 8192), 14);
+            /* increase mute_factor towards 16384 */
+            inst->w16_muteFactor = WEBRTC_SPL_MIN(16384, (inst->w16_muteFactor+w16_inc));
+        }
+    }
+
+    /* Copy data to other buffer */WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
+
+    inst->w16_mode = MODE_NORMAL;
+    *pw16_len = len;
+    return (len);
+
+}
+
+#undef SCRATCH_PW16_EXPANDED
+#undef SCRATCH_NETEQ_EXPAND
+
diff --git a/src/modules/audio_coding/neteq/packet_buffer.c b/src/modules/audio_coding/neteq/packet_buffer.c
new file mode 100644
index 0000000..8f09b07
--- /dev/null
+++ b/src/modules/audio_coding/neteq/packet_buffer.c
@@ -0,0 +1,750 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the actual packet buffer data structure.
+ */
+
+#include "packet_buffer.h"
+
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+/* special code for offline delay logging */
+#include "delay_logging.h"
+#include <stdio.h>
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+extern WebRtc_UWord32 tot_received_packets;
+#endif /* NETEQ_DELAY_LOGGING */
+
+
+int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
+                                 WebRtc_Word16 *pw16_memory, int memorySize)
+{
+    int i;
+    int pos = 0;
+
+    /* Sanity check */
+    if ((memorySize < PBUFFER_MIN_MEMORY_SIZE) || (pw16_memory == NULL)
+        || (maxNoOfPackets < 2) || (maxNoOfPackets > 600))
+    {
+        /* Invalid parameters */
+        return (PBUFFER_INIT_ERROR);
+    }
+
+    /* Clear the buffer instance */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) bufferInst, 0,
+        sizeof(PacketBuf_t) / sizeof(WebRtc_Word16));
+
+    /* Clear the buffer memory */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) pw16_memory, 0, memorySize);
+
+    /* Set maximum number of packets */
+    bufferInst->maxInsertPositions = maxNoOfPackets;
+
+    /* Initialize array pointers */
+    /* After each pointer has been set, the index pos is advanced to point immediately
+     * after the the recently allocated vector. Note that one step for the pos index
+     * corresponds to a WebRtc_Word16.
+     */
+
+    bufferInst->timeStamp = (WebRtc_UWord32*) &pw16_memory[pos];
+    pos += maxNoOfPackets << 1; /* advance maxNoOfPackets * WebRtc_UWord32 */
+
+    bufferInst->payloadLocation = (WebRtc_Word16**) &pw16_memory[pos];
+    pos += maxNoOfPackets * (sizeof(WebRtc_Word16*) / sizeof(WebRtc_Word16)); /* advance */
+
+    bufferInst->seqNumber = (WebRtc_UWord16*) &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_UWord16 */
+
+    bufferInst->payloadType = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->payloadLengthBytes = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->rcuPlCntr = &pw16_memory[pos];
+    pos += maxNoOfPackets; /* advance maxNoOfPackets * WebRtc_Word16 */
+
+    bufferInst->waitingTime = (int*) (&pw16_memory[pos]);
+    /* Advance maxNoOfPackets * sizeof(waitingTime element). */
+    pos += maxNoOfPackets *
+        sizeof(*bufferInst->waitingTime) / sizeof(*pw16_memory);
+
+    /* The payload memory starts after the slot arrays */
+    bufferInst->startPayloadMemory = &pw16_memory[pos];
+    bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+    bufferInst->memorySizeW16 = (memorySize - pos); /* Remaining memory */
+
+    /* Initialize each payload slot as empty with infinite delay */
+    for (i = 0; i < bufferInst->maxInsertPositions; i++)
+    {
+        bufferInst->payloadType[i] = -1;
+    }
+
+    /* Reset buffer parameters */
+    bufferInst->numPacketsInBuffer = 0;
+    bufferInst->packSizeSamples = 0;
+    bufferInst->insertPosition = 0;
+
+    /* Reset buffer statistics */
+    bufferInst->discardedPackets = 0;
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst)
+{
+    int i;
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* Packet buffer has not been initialized */
+        /* Don't do the flushing operation, since we do not
+         know the state of the struct variables */
+        return (0);
+    }
+
+    /* Set all payload lengths to zero */
+    WebRtcSpl_MemSetW16(bufferInst->payloadLengthBytes, 0, bufferInst->maxInsertPositions);
+
+    /* Reset buffer variables */
+    bufferInst->numPacketsInBuffer = 0;
+    bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+    bufferInst->insertPosition = 0;
+
+    /* Clear all slots, starting with the last one */
+    for (i = (bufferInst->maxInsertPositions - 1); i >= 0; i--)
+    {
+        bufferInst->payloadType[i] = -1;
+        bufferInst->timeStamp[i] = 0;
+        bufferInst->seqNumber[i] = 0;
+    }
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
+                                   WebRtc_Word16 *flushed)
+{
+    int nextPos;
+    int i;
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    int temp_var;
+#endif /* NETEQ_DELAY_LOGGING */
+
+    /* Initialize to "no flush" */
+    *flushed = 0;
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* packet buffer has not been initialized */
+        return (-1);
+    }
+
+    /* Sanity check for payload length
+     (payloadLen in bytes and memory size in WebRtc_Word16) */
+    if ((RTPpacket->payloadLen > (bufferInst->memorySizeW16 << 1)) || (RTPpacket->payloadLen
+        <= 0))
+    {
+        /* faulty or too long payload length */
+        return (-1);
+    }
+
+    /* Find a position in the buffer for this packet */
+    if (bufferInst->numPacketsInBuffer != 0)
+    {
+        /* Get the next slot */
+        bufferInst->insertPosition++;
+        if (bufferInst->insertPosition >= bufferInst->maxInsertPositions)
+        {
+            /* "Wrap around" and start from the beginning */
+            bufferInst->insertPosition = 0;
+        }
+
+        /* Check if there is enough space for the new packet */
+        if (bufferInst->currentMemoryPos + ((RTPpacket->payloadLen + 1) >> 1)
+            >= &bufferInst->startPayloadMemory[bufferInst->memorySizeW16])
+        {
+            WebRtc_Word16 *tempMemAddress;
+
+            /*
+             * Payload does not fit at the end of the memory, put it in the beginning
+             * instead
+             */
+            bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+
+            /*
+             * Now, we must search for the next non-empty payload,
+             * finding the one with the lowest start address for the payload
+             */
+            tempMemAddress = &bufferInst->startPayloadMemory[bufferInst->memorySizeW16];
+            nextPos = -1;
+
+            /* Loop through all slots again */
+            for (i = 0; i < bufferInst->maxInsertPositions; i++)
+            {
+                /* Look for the non-empty slot with the lowest
+                 payload location address */
+                if (bufferInst->payloadLengthBytes[i] != 0 && bufferInst->payloadLocation[i]
+                    < tempMemAddress)
+                {
+                    tempMemAddress = bufferInst->payloadLocation[i];
+                    nextPos = i;
+                }
+            }
+
+            /* Check that we did find a previous payload */
+            if (nextPos == -1)
+            {
+                /* The buffer is corrupt => flush and return error */
+                WebRtcNetEQ_PacketBufferFlush(bufferInst);
+                *flushed = 1;
+                return (-1);
+            }
+        }
+        else
+        {
+            /* Payload fits at the end of memory. */
+
+            /* Find the next non-empty slot. */
+            nextPos = bufferInst->insertPosition + 1;
+
+            /* Increase nextPos until a non-empty slot is found or end of array is encountered*/
+            while ((bufferInst->payloadLengthBytes[nextPos] == 0) && (nextPos
+                < bufferInst->maxInsertPositions))
+            {
+                nextPos++;
+            }
+
+            if (nextPos == bufferInst->maxInsertPositions)
+            {
+                /*
+                 * Reached the end of the array, so there must be a packet in the first
+                 * position instead
+                 */
+                nextPos = 0;
+
+                /* Increase nextPos until a non-empty slot is found */
+                while (bufferInst->payloadLengthBytes[nextPos] == 0)
+                {
+                    nextPos++;
+                }
+            }
+        } /* end if-else */
+
+        /*
+         * Check if the new payload will extend into a payload later in memory.
+         * If so, the buffer is full.
+         */
+        if ((bufferInst->currentMemoryPos <= bufferInst->payloadLocation[nextPos])
+            && ((&bufferInst->currentMemoryPos[(RTPpacket->payloadLen + 1) >> 1])
+                > bufferInst->payloadLocation[nextPos]))
+        {
+            /* Buffer is full, so the buffer must be flushed */
+            WebRtcNetEQ_PacketBufferFlush(bufferInst);
+            *flushed = 1;
+        }
+
+        if (bufferInst->payloadLengthBytes[bufferInst->insertPosition] != 0)
+        {
+            /* All positions are already taken and entire buffer should be flushed */
+            WebRtcNetEQ_PacketBufferFlush(bufferInst);
+            *flushed = 1;
+        }
+
+    }
+    else
+    {
+        /* Buffer is empty, just insert the packet at the beginning */
+        bufferInst->currentMemoryPos = bufferInst->startPayloadMemory;
+        bufferInst->insertPosition = 0;
+    }
+
+    /* Insert packet in the found position */
+    if (RTPpacket->starts_byte1 == 0)
+    {
+        /* Payload is 16-bit aligned => just copy it */
+
+        WEBRTC_SPL_MEMCPY_W16(bufferInst->currentMemoryPos,
+            RTPpacket->payload, (RTPpacket->payloadLen + 1) >> 1);
+    }
+    else
+    {
+        /* Payload is not 16-bit aligned => align it during copy operation */
+        for (i = 0; i < RTPpacket->payloadLen; i++)
+        {
+            /* copy the (i+1)-th byte to the i-th byte */
+
+            WEBRTC_SPL_SET_BYTE(bufferInst->currentMemoryPos,
+                (WEBRTC_SPL_GET_BYTE(RTPpacket->payload, (i + 1))), i);
+        }
+    }
+
+    /* Copy the packet information */
+    bufferInst->payloadLocation[bufferInst->insertPosition] = bufferInst->currentMemoryPos;
+    bufferInst->payloadLengthBytes[bufferInst->insertPosition] = RTPpacket->payloadLen;
+    bufferInst->payloadType[bufferInst->insertPosition] = RTPpacket->payloadType;
+    bufferInst->seqNumber[bufferInst->insertPosition] = RTPpacket->seqNumber;
+    bufferInst->timeStamp[bufferInst->insertPosition] = RTPpacket->timeStamp;
+    bufferInst->rcuPlCntr[bufferInst->insertPosition] = RTPpacket->rcuPlCntr;
+    bufferInst->rcuPlCntr[bufferInst->insertPosition] = 0;
+    bufferInst->waitingTime[bufferInst->insertPosition] = 0;
+    /* Update buffer parameters */
+    bufferInst->numPacketsInBuffer++;
+    bufferInst->currentMemoryPos += (RTPpacket->payloadLen + 1) >> 1;
+
+#ifdef NETEQ_DELAY_LOGGING
+    /* special code for offline delay logging */
+    if (*flushed)
+    {
+        temp_var = NETEQ_DELAY_LOGGING_SIGNAL_FLUSH;
+        if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+          return -1;
+        }
+    }
+    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_RECIN;
+    if ((fwrite(&temp_var, sizeof(int),
+                1, delay_fid2) != 1) ||
+        (fwrite(&RTPpacket->timeStamp, sizeof(WebRtc_UWord32),
+                1, delay_fid2) != 1) ||
+        (fwrite(&RTPpacket->seqNumber, sizeof(WebRtc_UWord16),
+                1, delay_fid2) != 1) ||
+        (fwrite(&RTPpacket->payloadType, sizeof(int),
+                1, delay_fid2) != 1) ||
+        (fwrite(&RTPpacket->payloadLen, sizeof(WebRtc_Word16),
+                1, delay_fid2) != 1)) {
+      return -1;
+    }
+    tot_received_packets++;
+#endif /* NETEQ_DELAY_LOGGING */
+
+    return (0);
+}
+
+
+int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
+                                    int bufferPosition, int *waitingTime)
+{
+
+    /* Sanity check */
+    if (bufferInst->startPayloadMemory == NULL)
+    {
+        /* packet buffer has not been initialized */
+        return (PBUFFER_NOT_INITIALIZED);
+    }
+
+    if (bufferPosition < 0 || bufferPosition >= bufferInst->maxInsertPositions)
+    {
+        /* buffer position is outside valid range */
+        return (NETEQ_OTHER_ERROR);
+    }
+
+    /* Check that there is a valid payload in the specified position */
+    if (bufferInst->payloadLengthBytes[bufferPosition] <= 0)
+    {
+        /* The position does not contain a valid payload */
+        RTPpacket->payloadLen = 0; /* Set zero length */
+        return (PBUFFER_NONEXISTING_PACKET); /* Return error */
+    }
+
+    /* Payload exists => extract payload data */
+
+    /* Copy the actual data payload to RTP packet struct */
+
+    WEBRTC_SPL_MEMCPY_W16((WebRtc_Word16*) RTPpacket->payload,
+        bufferInst->payloadLocation[bufferPosition],
+        (bufferInst->payloadLengthBytes[bufferPosition] + 1) >> 1); /*length in WebRtc_Word16*/
+
+    /* Copy payload parameters */
+    RTPpacket->payloadLen = bufferInst->payloadLengthBytes[bufferPosition];
+    RTPpacket->payloadType = bufferInst->payloadType[bufferPosition];
+    RTPpacket->seqNumber = bufferInst->seqNumber[bufferPosition];
+    RTPpacket->timeStamp = bufferInst->timeStamp[bufferPosition];
+    RTPpacket->rcuPlCntr = bufferInst->rcuPlCntr[bufferPosition];
+    *waitingTime = bufferInst->waitingTime[bufferPosition];
+    RTPpacket->starts_byte1 = 0; /* payload is 16-bit aligned */
+
+    /* Clear the position in the packet buffer */
+    bufferInst->payloadType[bufferPosition] = -1;
+    bufferInst->payloadLengthBytes[bufferPosition] = 0;
+    bufferInst->seqNumber[bufferPosition] = 0;
+    bufferInst->timeStamp[bufferPosition] = 0;
+    bufferInst->waitingTime[bufferPosition] = 0;
+    bufferInst->payloadLocation[bufferPosition] = bufferInst->startPayloadMemory;
+
+    /* Reduce packet counter with one */
+    bufferInst->numPacketsInBuffer--;
+
+    return (0);
+}
+
+int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
+                                                uint32_t current_time_stamp,
+                                                uint32_t* time_stamp,
+                                                int* buffer_position,
+                                                int erase_old_packets,
+                                                int16_t* payload_type) {
+  int32_t time_stamp_diff = WEBRTC_SPL_WORD32_MAX;  /* Smallest diff found. */
+  int32_t new_diff;
+  int i;
+  int16_t rcu_payload_cntr;
+
+  if (buffer_inst->startPayloadMemory == NULL) {
+    /* Packet buffer has not been initialized. */
+    return PBUFFER_NOT_INITIALIZED;
+  }
+
+  /* Initialize all return values. */
+  *time_stamp = 0;
+  *payload_type = -1;  /* Indicates that no packet was found. */
+  *buffer_position = -1;  /* Indicates that no packet was found. */
+  rcu_payload_cntr = WEBRTC_SPL_WORD16_MAX;  /* Indicates no packet found. */
+
+  /* Check if buffer is empty. */
+  if (buffer_inst->numPacketsInBuffer <= 0) {
+    return 0;
+  }
+
+  /* Loop through all slots in buffer. */
+  if (erase_old_packets) {  /* If old payloads should be discarded. */
+    for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+      /* Calculate difference between this slot and current_time_stamp. */
+      new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
+
+      /* Check if payload should be discarded. */
+      if ((new_diff < 0)  /* Payload is too old */
+          && (new_diff > -30000)  /* Account for TS wrap-around. */
+          && (buffer_inst->payloadLengthBytes[i] > 0)) {  /* Payload exists. */
+        /* Throw away old packet. */
+
+        /* Clear the position in the buffer. */
+        buffer_inst->payloadType[i] = -1;
+        buffer_inst->payloadLengthBytes[i] = 0;
+
+        /* Reduce packet counter by one. */
+        buffer_inst->numPacketsInBuffer--;
+        /* Increase discard counter for in-call statistics. */
+        buffer_inst->discardedPackets++;
+      } else if (((new_diff < time_stamp_diff) 
+                  || ((new_diff == time_stamp_diff)
+                      && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
+                      && (buffer_inst->payloadLengthBytes[i] > 0)) {
+        /* New diff is smaller than previous diffs or we have a candidate with a
+         * time stamp as previous candidate but better RCU-counter; 
+         * and the payload exists. 
+         */ 
+        /* Save this position as the best candidate. */
+        *buffer_position = i;
+        time_stamp_diff = new_diff;
+        *payload_type = buffer_inst->payloadType[i];
+        rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
+      }
+    }
+  } else {
+    for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+      /* Calculate difference between this slot and current_time_stamp. */
+      new_diff = (int32_t)(buffer_inst->timeStamp[i] - current_time_stamp);
+
+      /* Check if this is the oldest packet. */
+      if (((new_diff < time_stamp_diff) 
+           || ((new_diff == time_stamp_diff)
+               && (buffer_inst->rcuPlCntr[i] < rcu_payload_cntr)))
+               && (buffer_inst->payloadLengthBytes[i] > 0)) {
+        /* New diff is smaller than previous diffs or we have a candidate with a
+         * time_stamp as previous candidate but better RCU-counter; 
+         * and the payload exists. 
+         */ 
+        /* Save this position as the best candidate. */
+        *buffer_position = i;
+        time_stamp_diff = new_diff;
+        *payload_type = buffer_inst->payloadType[i];
+        rcu_payload_cntr = buffer_inst->rcuPlCntr[i];
+      }
+    }
+  }
+
+  /* Check that we did find a real position. */
+  if (*buffer_position >= 0) {
+    /* Get the time_stamp for the best position. */
+    *time_stamp = buffer_inst->timeStamp[*buffer_position];
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst)
+{
+    int i, count;
+    WebRtc_Word32 sizeSamples;
+
+    count = 0;
+
+    /* Loop through all slots in the buffer */
+    for (i = 0; i < bufferInst->maxInsertPositions; i++)
+    {
+        /* Only count the packets with non-zero size */
+        if (bufferInst->payloadLengthBytes[i] != 0)
+        {
+            count++;
+        }
+    }
+
+    /*
+     * Calculate buffer size as number of packets times packet size
+     * (packet size is that of the latest decoded packet)
+     */
+    sizeSamples = WEBRTC_SPL_MUL_16_16(bufferInst->packSizeSamples, count);
+
+    /* Sanity check; size cannot be negative */
+    if (sizeSamples < 0)
+    {
+        sizeSamples = 0;
+    }
+
+    return sizeSamples;
+}
+
+void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst) {
+  int i;
+  /* Loop through all slots in the buffer. */
+  for (i = 0; i < buffer_inst->maxInsertPositions; ++i) {
+    /* Only increment waiting time for the packets with non-zero size. */
+    if (buffer_inst->payloadLengthBytes[i] != 0) {
+      buffer_inst->waitingTime[i]++;
+    }
+  }
+}
+
+int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
+                                        int noOfCodecs, int *maxBytes, int *maxSlots)
+{
+    int i;
+    int ok = 0;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 codecBytes;
+    WebRtc_Word16 codecBuffers;
+
+    /* Initialize return variables to zero */
+    *maxBytes = 0;
+    *maxSlots = 0;
+
+    /* Loop through all codecs supplied to function */
+    for (i = 0; i < noOfCodecs; i++)
+    {
+        /* Find current codec and set parameters accordingly */
+
+        if ((codecID[i] == kDecoderPCMu) || (codecID[i] == kDecoderPCMu_2ch))
+        {
+            codecBytes = 1680; /* Up to 210ms @ 64kbps */
+            codecBuffers = 30; /* Down to 5ms frames */
+        }
+        else if ((codecID[i] == kDecoderPCMa) ||
+            (codecID[i] == kDecoderPCMa_2ch))
+        {
+            codecBytes = 1680; /* Up to 210ms @ 64kbps */
+            codecBuffers = 30; /* Down to 5ms frames */
+        }
+        else if (codecID[i] == kDecoderILBC)
+        {
+            codecBytes = 380; /* 200ms @ 15.2kbps (20ms frames) */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderISAC)
+        {
+            codecBytes = 960; /* 240ms @ 32kbps (60ms frames) */
+            codecBuffers = 8;
+        }
+        else if (codecID[i] == kDecoderISACswb)
+        {
+            codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
+            codecBuffers = 8;
+        }
+        else if ((codecID[i] == kDecoderPCM16B) ||
+            (codecID[i] == kDecoderPCM16B_2ch))
+        {
+            codecBytes = 3360; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if ((codecID[i] == kDecoderPCM16Bwb) ||
+            (codecID[i] == kDecoderPCM16Bwb_2ch))
+        {
+            codecBytes = 6720; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if ((codecID[i] == kDecoderPCM16Bswb32kHz) ||
+            (codecID[i] == kDecoderPCM16Bswb32kHz_2ch))
+        {
+            codecBytes = 13440; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderPCM16Bswb48kHz)
+        {
+            codecBytes = 20160; /* 210ms */
+            codecBuffers = 15;
+        }
+        else if ((codecID[i] == kDecoderG722) ||
+            (codecID[i] == kDecoderG722_2ch))
+        {
+            codecBytes = 1680; /* 210ms @ 64kbps */
+            codecBuffers = 15;
+        }
+        else if (codecID[i] == kDecoderRED)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderAVT)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderCNG)
+        {
+            codecBytes = 0; /* Should not be max... */
+            codecBuffers = 0;
+        }
+        else if (codecID[i] == kDecoderG729)
+        {
+            codecBytes = 210; /* 210ms @ 8kbps */
+            codecBuffers = 20; /* max 200ms supported for 10ms frames */
+        }
+        else if (codecID[i] == kDecoderG729_1)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10; /* max 200ms supported for 20ms frames */
+        }
+        else if (codecID[i] == kDecoderG726_16)
+        {
+            codecBytes = 400; /* 200ms @ 16kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_24)
+        {
+            codecBytes = 600; /* 200ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_32)
+        {
+            codecBytes = 800; /* 200ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG726_40)
+        {
+            codecBytes = 1000; /* 200ms @ 40kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_16)
+        {
+            codecBytes = 420; /* 210ms @ 16kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_24)
+        {
+            codecBytes = 630; /* 210ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1_32)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_24)
+        {
+            codecBytes = 630; /* 210ms @ 24kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_32)
+        {
+            codecBytes = 840; /* 210ms @ 32kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderG722_1C_48)
+        {
+            codecBytes = 1260; /* 210ms @ 48kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderSPEEX_8)
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderSPEEX_16)
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if ((codecID[i] == kDecoderCELT_32) ||
+            (codecID[i] == kDecoderCELT_32_2ch))
+        {
+            codecBytes = 1250; /* 210ms @ 50kbps */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderGSMFR)
+        {
+            codecBytes = 340; /* 200ms */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderAMR)
+        {
+            codecBytes = 384; /* 240ms @ 12.2kbps+headers (60ms frames) */
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderAMRWB)
+        {
+            codecBytes = 744;
+            codecBuffers = 10;
+        }
+        else if (codecID[i] == kDecoderArbitrary)
+        {
+            codecBytes = 6720; /* Assume worst case uncompressed WB 210ms */
+            codecBuffers = 15;
+        }
+        else
+        {
+            /*Unknow codec */
+            codecBytes = 0;
+            codecBuffers = 0;
+            ok = CODEC_DB_UNKNOWN_CODEC;
+        }
+
+        /* Update max variables */
+        *maxBytes = WEBRTC_SPL_MAX((*maxBytes), codecBytes);
+        *maxSlots = WEBRTC_SPL_MAX((*maxSlots), codecBuffers);
+
+    } /* end of for loop */
+
+    /*
+     * Add size needed by the additional pointers for each slot inside struct,
+     * as indicated on each line below.
+     */
+    w16_tmp = (sizeof(WebRtc_UWord32) /* timeStamp */
+    + sizeof(WebRtc_Word16*) /* payloadLocation */
+    + sizeof(WebRtc_UWord16) /* seqNumber */
+    + sizeof(WebRtc_Word16)  /* payloadType */
+    + sizeof(WebRtc_Word16)  /* payloadLengthBytes */
+    + sizeof(WebRtc_Word16)  /* rcuPlCntr   */
+    + sizeof(int));          /* waitingTime */
+    /* Add the extra size per slot to the memory count */
+    *maxBytes += w16_tmp * (*maxSlots);
+
+    return ok;
+}
diff --git a/src/modules/audio_coding/neteq/packet_buffer.h b/src/modules/audio_coding/neteq/packet_buffer.h
new file mode 100644
index 0000000..662f8af
--- /dev/null
+++ b/src/modules/audio_coding/neteq/packet_buffer.h
@@ -0,0 +1,220 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Interface for the actual packet buffer data structure.
+ */
+
+#ifndef PACKET_BUFFER_H
+#define PACKET_BUFFER_H
+
+#include "typedefs.h"
+
+#include "webrtc_neteq.h"
+#include "rtp.h"
+
+/* Define minimum allowed buffer memory, in 16-bit words */
+#define PBUFFER_MIN_MEMORY_SIZE	150
+
+/****************************/
+/* The packet buffer struct */
+/****************************/
+
+typedef struct
+{
+
+    /* Variables common to the entire buffer */
+    WebRtc_UWord16 packSizeSamples; /* packet size in samples of last decoded packet */
+    WebRtc_Word16 *startPayloadMemory; /* pointer to the payload memory */
+    int memorySizeW16; /* the size (in WebRtc_Word16) of the payload memory */
+    WebRtc_Word16 *currentMemoryPos; /* The memory position to insert next payload */
+    int numPacketsInBuffer; /* The number of packets in the buffer */
+    int insertPosition; /* The position to insert next packet */
+    int maxInsertPositions; /* Maximum number of packets allowed */
+
+    /* Arrays with one entry per packet slot */
+    /* NOTE: If these are changed, the changes must be accounted for at the end of
+     the function WebRtcNetEQ_GetDefaultCodecSettings(). */
+    WebRtc_UWord32 *timeStamp; /* Timestamp in slot n */
+    WebRtc_Word16 **payloadLocation; /* Memory location of payload in slot n */
+    WebRtc_UWord16 *seqNumber; /* Sequence number in slot n */
+    WebRtc_Word16 *payloadType; /* Payload type of packet in slot n */
+    WebRtc_Word16 *payloadLengthBytes; /* Payload length of packet in slot n */
+    WebRtc_Word16 *rcuPlCntr; /* zero for non-RCU payload, 1 for main payload
+     2 for redundant payload */
+    int *waitingTime;
+
+
+    /* Statistics counter */
+    WebRtc_UWord16 discardedPackets; /* Number of discarded packets */
+
+} PacketBuf_t;
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferInit(...)
+ *
+ * This function initializes the packet buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance to be initialized
+ *		- noOfPackets	: Maximum number of packets that buffer should hold
+ *		- memory		: Pointer to the storage memory for the payloads
+ *		- memorySize	: The size of the payload memory (in WebRtc_Word16)
+ *
+ * Output:
+ *      - bufferInst    : Updated buffer instance
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferInit(PacketBuf_t *bufferInst, int maxNoOfPackets,
+                                 WebRtc_Word16 *pw16_memory, int memorySize);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferFlush(...)
+ *
+ * This function flushes all the packets in the buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance to be flushed
+ *
+ * Output:
+ *      - bufferInst    : Flushed buffer instance
+ *
+ * Return value			:  0 - Ok
+ */
+
+int WebRtcNetEQ_PacketBufferFlush(PacketBuf_t *bufferInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferInsert(...)
+ *
+ * This function inserts an RTP packet into the packet buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *		- RTPpacket		: An RTP packet struct (with payload, sequence
+ *						  number, etc.)
+ *
+ * Output:
+ *      - bufferInst    : Updated buffer instance
+ *		- flushed		: 1 if buffer was flushed, 0 otherwise
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferInsert(PacketBuf_t *bufferInst, const RTPPacket_t *RTPpacket,
+                                   WebRtc_Word16 *flushed);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferExtract(...)
+ *
+ * This function extracts a payload from the buffer.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *		- bufferPosition: Position of the packet that should be extracted
+ *
+ * Output:
+ *		- RTPpacket		: An RTP packet struct (with payload, sequence 
+ *						  number, etc)
+ *      - bufferInst    : Updated buffer instance
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_PacketBufferExtract(PacketBuf_t *bufferInst, RTPPacket_t *RTPpacket,
+                                    int bufferPosition, int *waitingTime);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferFindLowestTimestamp(...)
+ *
+ * This function finds the next packet with the lowest timestamp.
+ *
+ * Input:
+ *       - buffer_inst        : Buffer instance.
+ *       - current_time_stamp : The timestamp to compare packet timestamps with.
+ *       - erase_old_packets  : If non-zero, erase packets older than currentTS.
+ *
+ * Output:
+ *       - time_stamp         : Lowest timestamp that was found.
+ *       - buffer_position    : Position of this packet (-1 if there are no
+ *                              packets in the buffer).
+ *       - payload_type       : Payload type of the found payload.
+ *
+ * Return value               :  0 - Ok;
+ *                             < 0 - Error.
+ */
+
+int WebRtcNetEQ_PacketBufferFindLowestTimestamp(PacketBuf_t* buffer_inst,
+                                                uint32_t current_time_stamp,
+                                                uint32_t* time_stamp,
+                                                int* buffer_position,
+                                                int erase_old_packets,
+                                                int16_t* payload_type);
+
+/****************************************************************************
+ * WebRtcNetEQ_PacketBufferGetSize(...)
+ *
+ * Calculate and return an estimate of the total data length (in samples)
+ * currently in the buffer. The estimate is calculated as the number of
+ * packets currently in the buffer (which does not have any remaining waiting
+ * time), multiplied with the number of samples obtained from the last
+ * decoded packet.
+ *
+ * Input:
+ *		- bufferInst	: Buffer instance
+ *
+ * Return value			: The buffer size in samples
+ */
+
+WebRtc_Word32 WebRtcNetEQ_PacketBufferGetSize(const PacketBuf_t *bufferInst);
+
+/****************************************************************************
+ * WebRtcNetEQ_IncrementWaitingTimes(...)
+ *
+ * Increment the waiting time for all packets in the buffer by one.
+ *
+ * Input:
+ *    - bufferInst  : Buffer instance
+ *
+ * Return value     : n/a
+ */
+
+void WebRtcNetEQ_IncrementWaitingTimes(PacketBuf_t *buffer_inst);
+
+/****************************************************************************
+ * WebRtcNetEQ_GetDefaultCodecSettings(...)
+ *
+ * Calculates a recommended buffer size for a specific set of codecs.
+ *
+ * Input:
+ *		- codecID	    : An array of codec types that will be used
+ *      - noOfCodecs    : Number of codecs in array codecID
+ *
+ * Output:
+ *		- maxBytes	    : Recommended buffer memory size in bytes
+ *      - maxSlots      : Recommended number of slots in buffer
+ *
+ * Return value			:  0 - Ok
+ *						  <0 - Error
+ */
+
+int WebRtcNetEQ_GetDefaultCodecSettings(const enum WebRtcNetEQDecoder *codecID,
+                                        int noOfCodecs, int *maxBytes, int *maxSlots);
+
+#endif /* PACKET_BUFFER_H */
diff --git a/src/modules/audio_coding/neteq/peak_detection.c b/src/modules/audio_coding/neteq/peak_detection.c
new file mode 100644
index 0000000..678c7f9
--- /dev/null
+++ b/src/modules/audio_coding/neteq/peak_detection.c
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the peak detection used for finding correlation peaks.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+/* Table of constants used in parabolic fit function WebRtcNetEQ_PrblFit */
+const WebRtc_Word16 WebRtcNetEQ_kPrblCf[17][3] = { { 120, 32, 64 }, { 140, 44, 75 },
+                                                    { 150, 50, 80 }, { 160, 57, 85 },
+                                                    { 180, 72, 96 }, { 200, 89, 107 },
+                                                    { 210, 98, 112 }, { 220, 108, 117 },
+                                                    { 240, 128, 128 }, { 260, 150, 139 },
+                                                    { 270, 162, 144 }, { 280, 174, 149 },
+                                                    { 300, 200, 160 }, { 320, 228, 171 },
+                                                    { 330, 242, 176 }, { 340, 257, 181 },
+                                                    { 360, 288, 192 } };
+
+WebRtc_Word16 WebRtcNetEQ_PeakDetection(WebRtc_Word16 *pw16_data, WebRtc_Word16 w16_dataLen,
+                                        WebRtc_Word16 w16_nmbPeaks, WebRtc_Word16 fs_mult,
+                                        WebRtc_Word16 *pw16_winIndex,
+                                        WebRtc_Word16 *pw16_winValue)
+{
+    /* Local variables */
+    int i;
+    WebRtc_Word16 w16_tmp;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word16 indMin = 0;
+    WebRtc_Word16 indMax = 0;
+
+    /* Peak detection */
+
+    for (i = 0; i <= (w16_nmbPeaks - 1); i++)
+    {
+        if (w16_nmbPeaks == 1)
+        {
+            /*
+             * Single peak
+             * The parabola fit assumes that an extra point is available; worst case it gets
+             * a zero on the high end of the signal.
+             */
+            w16_dataLen++;
+        }
+
+        pw16_winIndex[i] = WebRtcSpl_MaxIndexW16(pw16_data, (WebRtc_Word16) (w16_dataLen - 1));
+
+        if (i != w16_nmbPeaks - 1)
+        {
+            w16_tmp = pw16_winIndex[i] - 2; /* *fs_mult; */
+            indMin = WEBRTC_SPL_MAX(0, w16_tmp);
+            w16_tmp = pw16_winIndex[i] + 2; /* *fs_mult; */
+            w16_tmp2 = w16_dataLen - 1;
+            indMax = WEBRTC_SPL_MIN(w16_tmp2, w16_tmp);
+        }
+
+        if ((pw16_winIndex[i] != 0) && (pw16_winIndex[i] != (w16_dataLen - 2)))
+        {
+            /* Parabola fit*/
+            WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]), &(pw16_winIndex[i]),
+                &(pw16_winValue[i]), fs_mult);
+        }
+        else
+        {
+            if (pw16_winIndex[i] == (w16_dataLen - 2))
+            {
+                if (pw16_data[pw16_winIndex[i]] > pw16_data[pw16_winIndex[i] + 1])
+                {
+                    WebRtcNetEQ_PrblFit(&(pw16_data[pw16_winIndex[i] - 1]),
+                        &(pw16_winIndex[i]), &(pw16_winValue[i]), fs_mult);
+                }
+                else if (pw16_data[pw16_winIndex[i]] <= pw16_data[pw16_winIndex[i] + 1])
+                {
+                    pw16_winValue[i] = (pw16_data[pw16_winIndex[i]]
+                        + pw16_data[pw16_winIndex[i] + 1]) >> 1; /* lin approx */
+                    pw16_winIndex[i] = (pw16_winIndex[i] * 2 + 1) * fs_mult;
+                }
+            }
+            else
+            {
+                pw16_winValue[i] = pw16_data[pw16_winIndex[i]];
+                pw16_winIndex[i] = pw16_winIndex[i] * 2 * fs_mult;
+            }
+        }
+
+        if (i != w16_nmbPeaks - 1)
+        {
+            WebRtcSpl_MemSetW16(&(pw16_data[indMin]), 0, (indMax - indMin + 1));
+            /* for (j=indMin; j<=indMax; j++) pw16_data[j] = 0; */
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word16 WebRtcNetEQ_PrblFit(WebRtc_Word16 *pw16_3pts, WebRtc_Word16 *pw16_Ind,
+                                  WebRtc_Word16 *pw16_outVal, WebRtc_Word16 fs_mult)
+{
+    /* Variables */
+    WebRtc_Word32 Num, Den;
+    WebRtc_Word32 temp;
+    WebRtc_Word16 flag, stp, strt, lmt;
+    WebRtc_UWord16 PFind[13];
+
+    if (fs_mult == 1)
+    {
+        PFind[0] = 0;
+        PFind[1] = 8;
+        PFind[2] = 16;
+    }
+    else if (fs_mult == 2)
+    {
+        PFind[0] = 0;
+        PFind[1] = 4;
+        PFind[2] = 8;
+        PFind[3] = 12;
+        PFind[4] = 16;
+    }
+    else if (fs_mult == 4)
+    {
+        PFind[0] = 0;
+        PFind[1] = 2;
+        PFind[2] = 4;
+        PFind[3] = 6;
+        PFind[4] = 8;
+        PFind[5] = 10;
+        PFind[6] = 12;
+        PFind[7] = 14;
+        PFind[8] = 16;
+    }
+    else
+    {
+        PFind[0] = 0;
+        PFind[1] = 1;
+        PFind[2] = 3;
+        PFind[3] = 4;
+        PFind[4] = 5;
+        PFind[5] = 7;
+        PFind[6] = 8;
+        PFind[7] = 9;
+        PFind[8] = 11;
+        PFind[9] = 12;
+        PFind[10] = 13;
+        PFind[11] = 15;
+        PFind[12] = 16;
+    }
+
+    /*	Num = -3*pw16_3pts[0] + 4*pw16_3pts[1] - pw16_3pts[2]; */
+    /*	Den =    pw16_3pts[0] - 2*pw16_3pts[1] + pw16_3pts[2]; */
+    Num = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],-3) + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],4)
+        - pw16_3pts[2];
+
+    Den = pw16_3pts[0] + WEBRTC_SPL_MUL_16_16(pw16_3pts[1],-2) + pw16_3pts[2];
+
+    temp = (WebRtc_Word32) WEBRTC_SPL_MUL(Num, (WebRtc_Word32)120); /* need 32_16 really */
+    flag = 1;
+    stp = WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0] - WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0];
+    strt = (WebRtcNetEQ_kPrblCf[PFind[fs_mult]][0]
+        + WebRtcNetEQ_kPrblCf[PFind[fs_mult - 1]][0]) >> 1;
+
+    if (temp < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)strt))
+    {
+        lmt = strt - stp;
+        while (flag)
+        {
+            if ((flag == fs_mult) || (temp
+                > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+            {
+                *pw16_outVal
+                    = (WebRtc_Word16)
+                    (((WebRtc_Word32) ((WebRtc_Word32) WEBRTC_SPL_MUL(Den,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][1])
+                        + (WebRtc_Word32) WEBRTC_SPL_MUL(Num,(WebRtc_Word32)WebRtcNetEQ_kPrblCf[PFind[fs_mult-flag]][2])
+                        + WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256))) >> 8);
+                *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) - flag;
+                flag = 0;
+            }
+            else
+            {
+                flag++;
+                lmt -= stp;
+            }
+        }
+    }
+    else if (temp > (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)(strt+stp)))
+    {
+        lmt = strt + (stp << 1);
+        while (flag)
+        {
+            if ((flag == fs_mult) || (temp
+                < (WebRtc_Word32) WEBRTC_SPL_MUL(-Den,(WebRtc_Word32)lmt)))
+            {
+                WebRtc_Word32 temp_term_1, temp_term_2, temp_term_3;
+
+                temp_term_1 = WEBRTC_SPL_MUL(Den,
+                    (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][1]);
+                temp_term_2 = WEBRTC_SPL_MUL(Num,
+                    (WebRtc_Word32) WebRtcNetEQ_kPrblCf[PFind[fs_mult+flag]][2]);
+                temp_term_3 = WEBRTC_SPL_MUL_16_16(pw16_3pts[0],256);
+
+                *pw16_outVal
+                    = (WebRtc_Word16) ((temp_term_1 + temp_term_2 + temp_term_3) >> 8);
+
+                *pw16_Ind = (*pw16_Ind) * (fs_mult << 1) + flag;
+                flag = 0;
+            }
+            else
+            {
+                flag++;
+                lmt += stp;
+            }
+        }
+
+    }
+    else
+    {
+        *pw16_outVal = pw16_3pts[1];
+        *pw16_Ind = (*pw16_Ind) * 2 * fs_mult;
+    }
+
+    return 0;
+}
+
diff --git a/src/modules/audio_coding/neteq/preemptive_expand.c b/src/modules/audio_coding/neteq/preemptive_expand.c
new file mode 100644
index 0000000..167bc3a
--- /dev/null
+++ b/src/modules/audio_coding/neteq/preemptive_expand.c
@@ -0,0 +1,524 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the Pre-emptive Expand algorithm that is used to increase
+ * the delay by repeating a part of the audio stream.
+ */
+
+#include "dsp.h"
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+
+#define PREEMPTIVE_CORR_LEN 50
+#define PREEMPTIVE_MIN_LAG 10
+#define PREEMPTIVE_MAX_LAG 60
+#define PREEMPTIVE_DOWNSAMPLED_LEN (PREEMPTIVE_CORR_LEN + PREEMPTIVE_MAX_LAG)
+
+/* Scratch usage:
+
+ Type             Name                 size            startpos         endpos
+ WebRtc_Word16    pw16_downSampSpeech  110             0                109
+ WebRtc_Word32    pw32_corr            2*50            110              209
+ WebRtc_Word16    pw16_corr            50              0                49
+
+ Total: 110+2*50
+ */
+
+#define     SCRATCH_PW16_DS_SPEECH           0
+#define     SCRATCH_PW32_CORR                PREEMPTIVE_DOWNSAMPLED_LEN
+#define     SCRATCH_PW16_CORR                0
+
+/****************************************************************************
+ * WebRtcNetEQ_PreEmptiveExpand(...)
+ *
+ * This function tries to extend the audio data by repeating one or several
+ * pitch periods. The operation is only carried out if the correlation is
+ * strong or if the signal energy is very low. The algorithm is the
+ * reciprocal of the Accelerate algorithm.
+ *
+ * Input:
+ *      - inst          : NetEQ DSP instance
+ *      - scratchPtr    : Pointer to scratch vector.
+ *      - decoded       : Pointer to newly decoded speech.
+ *      - len           : Length of decoded speech.
+ *      - oldDataLen    : Length of the part of decoded that has already been played out.
+ *      - BGNonly       : If non-zero, Pre-emptive Expand will only copy 
+ *                        the first DEFAULT_TIME_ADJUST seconds of the
+ *                        input and append to the end. No signal matching is
+ *                        done.
+ *
+ * Output:
+ *      - inst          : Updated instance
+ *      - outData       : Pointer to a memory space where the output data
+ *                        should be stored. The vector must be at least
+ *                        min(len + 120*fs/8000, NETEQ_MAX_OUTPUT_SIZE)
+ *                        elements long.
+ *      - pw16_len      : Number of samples written to outData.
+ *
+ * Return value         :  0 - Ok
+ *                        <0 - Error
+ */
+
+int WebRtcNetEQ_PreEmptiveExpand(DSPInst_t *inst,
+#ifdef SCRATCH
+                                 WebRtc_Word16 *pw16_scratchPtr,
+#endif
+                                 const WebRtc_Word16 *pw16_decoded, int len, int oldDataLen,
+                                 WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len,
+                                 WebRtc_Word16 BGNonly)
+{
+
+#ifdef SCRATCH
+    /* Use scratch memory for internal temporary vectors */
+    WebRtc_Word16 *pw16_downSampSpeech = pw16_scratchPtr + SCRATCH_PW16_DS_SPEECH;
+    WebRtc_Word32 *pw32_corr = (WebRtc_Word32*) (pw16_scratchPtr + SCRATCH_PW32_CORR);
+    WebRtc_Word16 *pw16_corr = pw16_scratchPtr + SCRATCH_PW16_CORR;
+#else
+    /* Allocate memory for temporary vectors */
+    WebRtc_Word16 pw16_downSampSpeech[PREEMPTIVE_DOWNSAMPLED_LEN];
+    WebRtc_Word32 pw32_corr[PREEMPTIVE_CORR_LEN];
+    WebRtc_Word16 pw16_corr[PREEMPTIVE_CORR_LEN];
+#endif
+    WebRtc_Word16 w16_decodedMax = 0;
+    WebRtc_Word16 w16_tmp = 0;
+    WebRtc_Word16 w16_tmp2;
+    WebRtc_Word32 w32_tmp;
+    WebRtc_Word32 w32_tmp2;
+
+    const WebRtc_Word16 w16_startLag = PREEMPTIVE_MIN_LAG;
+    const WebRtc_Word16 w16_endLag = PREEMPTIVE_MAX_LAG;
+    const WebRtc_Word16 w16_corrLen = PREEMPTIVE_CORR_LEN;
+    const WebRtc_Word16 *pw16_vec1, *pw16_vec2;
+    WebRtc_Word16 *pw16_vectmp;
+    WebRtc_Word16 w16_inc, w16_startfact;
+    WebRtc_Word16 w16_bestIndex, w16_bestVal;
+    WebRtc_Word16 w16_VAD = 1;
+    WebRtc_Word16 fsMult;
+    WebRtc_Word16 fsMult120;
+    WebRtc_Word32 w32_en1, w32_en2, w32_cc;
+    WebRtc_Word16 w16_en1, w16_en2;
+    WebRtc_Word16 w16_en1Scale, w16_en2Scale;
+    WebRtc_Word16 w16_sqrtEn1En2;
+    WebRtc_Word16 w16_bestCorr = 0;
+    int ok;
+
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+
+    fsMult = WebRtcNetEQ_CalcFsMult(inst->fs); /* Calculate fs/8000 */
+
+    /* Pre-calculate common multiplication with fsMult */
+    fsMult120 = (WebRtc_Word16) WEBRTC_SPL_MUL_16_16(fsMult, 120); /* 15 ms */
+
+    inst->ExpandInst.w16_consecExp = 0; /* Last was not expand any more */
+
+    /*
+     * Sanity check for len variable; must be (almost) 30 ms (120*fsMult + max(bestIndex)).
+     * Also, the new part must be at least .625 ms (w16_overlap).
+     */
+    if (len < (WebRtc_Word16) WEBRTC_SPL_MUL_16_16((120 + 119), fsMult) || oldDataLen >= len
+        - inst->ExpandInst.w16_overlap)
+    {
+        /* Length of decoded data too short */
+        inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+        *pw16_len = len;
+
+        
+        /* simply move all data from decoded to outData */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return NETEQ_OTHER_ERROR;
+    }
+
+    /***********************************/
+    /* Special operations for BGN only */
+    /***********************************/
+
+    /* Check if "background noise only" flag is set */
+    if (BGNonly)
+    {
+        /* special operation for BGN only; simply insert a chunk of data */
+        w16_bestIndex = DEFAULT_TIME_ADJUST * (fsMult << 3); /* X*fs/1000 */
+
+        /* Sanity check for bestIndex */
+        if (w16_bestIndex > len)
+        { /* not good, do nothing instead */
+            inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+            *pw16_len = len;
+
+
+            /* simply move all data from decoded to outData */
+
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /* set length parameter */
+        *pw16_len = len + w16_bestIndex;
+
+
+        /* copy to output */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, len);
+        WEBRTC_SPL_MEMCPY_W16(&pw16_outData[len], pw16_decoded, w16_bestIndex);
+
+        /* set mode */
+        inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
+
+        /* update statistics */
+        inst->statInst.preemptiveLength += w16_bestIndex;
+
+        return 0;
+    } /* end of special code for BGN mode */
+
+#ifdef NETEQ_STEREO
+
+    /* Sanity for msInfo */
+    if (msInfo == NULL)
+    {
+        /* this should not happen here */
+        return MASTER_SLAVE_ERROR;
+    }
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find correlation lag only for non-slave instances */
+
+#endif
+
+        /****************************************************************/
+        /* Find the strongest correlation lag by downsampling to 4 kHz, */
+        /* calculating correlation for downsampled signal and finding   */
+        /* the strongest correlation peak.                              */
+        /****************************************************************/
+
+        /* find maximum absolute value */
+        w16_decodedMax = WebRtcSpl_MaxAbsValueW16(pw16_decoded, (WebRtc_Word16) len);
+
+        /* downsample the decoded speech to 4 kHz */
+        ok = WebRtcNetEQ_DownSampleTo4kHz(pw16_decoded, len, inst->fs, pw16_downSampSpeech,
+            PREEMPTIVE_DOWNSAMPLED_LEN, 1 /* compensate delay*/);
+        if (ok != 0)
+        {
+            /* error */
+            inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+            *pw16_len = len;
+
+
+            /* simply move all data from decoded to outData */
+
+            WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+            return NETEQ_OTHER_ERROR;
+        }
+
+        /*
+         * Set scaling factor for cross correlation to protect against
+         * overflow (log2(50) => 6)
+         */
+        w16_tmp = 6 - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax));
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* Perform correlation from lag 10 to lag 60 in 4 kHz domain */WebRtcNetEQ_CrossCorr(
+            pw32_corr, &pw16_downSampSpeech[w16_endLag],
+            &pw16_downSampSpeech[w16_endLag - w16_startLag], w16_corrLen,
+            (WebRtc_Word16) (w16_endLag - w16_startLag), w16_tmp, -1);
+
+        /* Normalize correlation to 14 bits and put in a WebRtc_Word16 vector */
+        w32_tmp = WebRtcSpl_MaxAbsValueW32(pw32_corr, w16_corrLen);
+        w16_tmp = 17 - WebRtcSpl_NormW32(w32_tmp);
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        WebRtcSpl_VectorBitShiftW32ToW16(pw16_corr, w16_corrLen, pw32_corr, w16_tmp);
+
+        /* Find limits for peak finding, in order to avoid overful NetEQ algorithm buffer. */
+        /* Calculate difference between MAX_OUTPUT_SIZE and len in 4 kHz domain. */
+        w16_tmp = WebRtcSpl_DivW32W16ResW16((WebRtc_Word32) (NETEQ_MAX_OUTPUT_SIZE - len),
+            (WebRtc_Word16) (fsMult << 1)) - w16_startLag;
+        w16_tmp = WEBRTC_SPL_MIN(w16_corrLen, w16_tmp); /* no more than corrLen = 50 */
+
+#ifdef NETEQ_STEREO
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Find the strongest correlation peak by using the parabolic fit method */
+        WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
+        /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+        /* Compensate bestIndex for displaced starting position */
+        w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+        /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+        msInfo->bestIndex = w16_bestIndex;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        if (msInfo->extraInfo == PE_EXP_FAIL)
+        {
+            /* Master has signaled an unsuccessful preemptive expand */
+            w16_bestIndex = 0;
+        }
+        else
+        {
+            /* Get best index from master */
+            w16_bestIndex = msInfo->bestIndex;
+        }
+    }
+    else
+    {
+        /* Invalid mode */
+        return (MASTER_SLAVE_ERROR);
+    }
+
+#else /* NETEQ_STEREO */
+
+    /* Find the strongest correlation peak by using the parabolic fit method */
+    WebRtcNetEQ_PeakDetection(pw16_corr, w16_tmp, 1, fsMult, &w16_bestIndex, &w16_bestVal);
+    /* 0 <= bestIndex <= (2*w16_tmp - 1)*fsMult <= (2*corrLen - 1)*fsMult = 99*fsMult */
+
+    /* Compensate bestIndex for displaced starting position */
+    w16_bestIndex = w16_bestIndex + w16_startLag * WEBRTC_SPL_LSHIFT_W16(fsMult, 1);
+    /* 20*fsMult <= bestIndex <= 119*fsMult */
+
+#endif /* NETEQ_STEREO */
+
+#ifdef NETEQ_STEREO
+
+    if ((msInfo->msMode == NETEQ_MASTER) || (msInfo->msMode == NETEQ_MONO))
+    {
+        /* Calculate correlation only for non-slave instances */
+
+#endif /* NETEQ_STEREO */
+
+        /*****************************************************/
+        /* Calculate correlation bestCorr for the found lag. */
+        /* Also do a simple VAD decision.                    */
+        /*****************************************************/
+
+        /*
+         * Calculate scaling to ensure that bestIndex samples can be square-summed
+         * without overflowing
+         */
+        w16_tmp = (31
+            - WebRtcSpl_NormW32(WEBRTC_SPL_MUL_16_16(w16_decodedMax, w16_decodedMax)));
+        w16_tmp += (31 - WebRtcSpl_NormW32(w16_bestIndex));
+        w16_tmp -= 31;
+        w16_tmp = WEBRTC_SPL_MAX(0, w16_tmp);
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[fsMult120 - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[fsMult120];
+
+        /* Calculate energies for vec1 and vec2 */
+        w32_en1 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1,
+            (WebRtc_Word16*) pw16_vec1, w16_bestIndex, w16_tmp);
+        w32_en2 = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec2, w16_bestIndex, w16_tmp);
+
+        /* Calculate cross-correlation at the found lag */
+        w32_cc = WebRtcNetEQ_DotW16W16((WebRtc_Word16*) pw16_vec1, (WebRtc_Word16*) pw16_vec2,
+            w16_bestIndex, w16_tmp);
+
+        /* Check VAD constraint 
+         ((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_en1 + w32_en2, 4); /* (en1+en2)/(2*8) */
+        if (inst->BGNInst.w16_initialized == 1)
+        {
+            w32_tmp2 = inst->BGNInst.w32_energy;
+        }
+        else
+        {
+            /* if BGN parameters have not been estimated, use a fixed threshold */
+            w32_tmp2 = 75000;
+        }
+        w16_tmp2 = 16 - WebRtcSpl_NormW32(w32_tmp2);
+        w16_tmp2 = WEBRTC_SPL_MAX(0, w16_tmp2);
+        w32_tmp = WEBRTC_SPL_RSHIFT_W32(w32_tmp, w16_tmp2);
+        w16_tmp2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_tmp2, w16_tmp2);
+        w32_tmp2 = WEBRTC_SPL_MUL_16_16(w16_bestIndex, w16_tmp2);
+
+        /* Scale w32_tmp properly before comparing with w32_tmp2 */
+        /* (w16_tmp is scaling before energy calculation, thus 2*w16_tmp) */
+        if (WebRtcSpl_NormW32(w32_tmp) < WEBRTC_SPL_LSHIFT_W32(w16_tmp,1))
+        {
+            /* Cannot scale only w32_tmp, must scale w32_temp2 too */
+            WebRtc_Word16 tempshift = WebRtcSpl_NormW32(w32_tmp);
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp, tempshift);
+            w32_tmp2 = WEBRTC_SPL_RSHIFT_W32(w32_tmp2,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1) - tempshift);
+        }
+        else
+        {
+            w32_tmp = WEBRTC_SPL_LSHIFT_W32(w32_tmp,
+                WEBRTC_SPL_LSHIFT_W32(w16_tmp,1));
+        }
+
+        if (w32_tmp <= w32_tmp2) /*((en1+en2)/(2*bestIndex)) <= 8*inst->BGNInst.energy */
+        {
+            /* The signal seems to be passive speech */
+            w16_VAD = 0;
+            w16_bestCorr = 0; /* Correlation does not matter */
+
+            /* For low energy expansion, the new data can be less than 15 ms,
+             but we must ensure that bestIndex is not larger than the new data. */
+            w16_bestIndex = WEBRTC_SPL_MIN( w16_bestIndex, len - oldDataLen );
+        }
+        else
+        {
+            /* The signal is active speech */
+            w16_VAD = 1;
+
+            /* Calculate correlation (cc/sqrt(en1*en2)) */
+
+            /* Start with calculating scale values */
+            w16_en1Scale = 16 - WebRtcSpl_NormW32(w32_en1);
+            w16_en1Scale = WEBRTC_SPL_MAX(0, w16_en1Scale);
+            w16_en2Scale = 16 - WebRtcSpl_NormW32(w32_en2);
+            w16_en2Scale = WEBRTC_SPL_MAX(0, w16_en2Scale);
+
+            /* Make sure total scaling is even (to simplify scale factor after sqrt) */
+            if ((w16_en1Scale + w16_en2Scale) & 1)
+            {
+                w16_en1Scale += 1;
+            }
+
+            /* Convert energies to WebRtc_Word16 */
+            w16_en1 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en1, w16_en1Scale);
+            w16_en2 = (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(w32_en2, w16_en2Scale);
+
+            /* Calculate energy product */
+            w32_tmp = WEBRTC_SPL_MUL_16_16(w16_en1, w16_en2);
+
+            /* Calculate square-root of energy product */
+            w16_sqrtEn1En2 = (WebRtc_Word16) WebRtcSpl_SqrtFloor(w32_tmp);
+
+            /* Calculate cc/sqrt(en1*en2) in Q14 */
+            w16_tmp = 14 - ((w16_en1Scale + w16_en2Scale) >> 1);
+            w32_cc = WEBRTC_SPL_SHIFT_W32(w32_cc, w16_tmp);
+            w32_cc = WEBRTC_SPL_MAX(0, w32_cc); /* Don't divide with negative number */
+            w16_bestCorr = (WebRtc_Word16) WebRtcSpl_DivW32W16(w32_cc, w16_sqrtEn1En2);
+            w16_bestCorr = WEBRTC_SPL_MIN(16384, w16_bestCorr); /* set maximum to 1.0 */
+        }
+
+#ifdef NETEQ_STEREO
+
+    } /* end if (msInfo->msMode != NETEQ_SLAVE) */
+
+#endif /* NETEQ_STEREO */
+
+    /*******************************************************/
+    /* Check preemptive expand criteria and insert samples */
+    /*******************************************************/
+
+    /* Check for strong correlation (>0.9) and at least 15 ms new data, 
+     or passive speech */
+#ifdef NETEQ_STEREO
+    if (((((w16_bestCorr > 14746) && (oldDataLen <= fsMult120)) || (w16_VAD == 0))
+        && (msInfo->msMode != NETEQ_SLAVE)) || ((msInfo->msMode == NETEQ_SLAVE)
+        && (msInfo->extraInfo != PE_EXP_FAIL)))
+#else
+    if (((w16_bestCorr > 14746) && (oldDataLen <= fsMult120))
+        || (w16_VAD == 0))
+#endif
+    {
+        /* Do expand operation by overlap add */
+
+        /* Set length of the first part, not to be modified */
+        WebRtc_Word16 w16_startIndex = WEBRTC_SPL_MAX(oldDataLen, fsMult120);
+
+        /*
+         * Calculate cross-fading slope so that the fading factor goes from
+         * 1 (16384 in Q14) to 0 in one pitch period (bestIndex).
+         */
+        w16_inc = (WebRtc_Word16) WebRtcSpl_DivW32W16((WebRtc_Word32) 16384,
+            (WebRtc_Word16) (w16_bestIndex + 1)); /* in Q14 */
+
+        /* Initiate fading factor */
+        w16_startfact = 16384 - w16_inc;
+
+        /* vec1 starts at 15 ms minus one pitch period */
+        pw16_vec1 = &pw16_decoded[w16_startIndex - w16_bestIndex];
+        /* vec2 start at 15 ms */
+        pw16_vec2 = &pw16_decoded[w16_startIndex];
+
+
+        /* Copy unmodified part [0 to 15 ms] */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, w16_startIndex);
+
+        /* Generate interpolated part of length bestIndex (1 pitch period) */
+        pw16_vectmp = pw16_outData + w16_startIndex;
+        /* Reuse mixing function from Expand */
+        WebRtcNetEQ_MixVoiceUnvoice(pw16_vectmp, (WebRtc_Word16*) pw16_vec2,
+            (WebRtc_Word16*) pw16_vec1, &w16_startfact, w16_inc, w16_bestIndex);
+
+        /* Move the last part (also unmodified) */
+        /* Take from decoded at 15 ms */
+        pw16_vec2 = &pw16_decoded[w16_startIndex];
+        WEBRTC_SPL_MEMMOVE_W16(&pw16_outData[w16_startIndex + w16_bestIndex], pw16_vec2,
+            (WebRtc_Word16) (len - w16_startIndex));
+
+        /* Set the mode flag */
+        if (w16_VAD)
+        {
+            inst->w16_mode = MODE_SUCCESS_PREEMPTIVE;
+        }
+        else
+        {
+            inst->w16_mode = MODE_LOWEN_PREEMPTIVE;
+        }
+
+        /* Calculate resulting length = original length + pitch period */
+        *pw16_len = len + w16_bestIndex;
+
+        /* Update in-call statistics */
+        inst->statInst.preemptiveLength += w16_bestIndex;
+
+        return 0;
+    }
+    else
+    {
+        /* Preemptive Expand not allowed */
+
+#ifdef NETEQ_STEREO
+        /* Signal to slave(s) that this was unsuccessful */
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            msInfo->extraInfo = PE_EXP_FAIL;
+        }
+#endif
+
+        /* Set mode flag to unsuccessful preemptive expand */
+        inst->w16_mode = MODE_UNSUCCESS_PREEMPTIVE;
+
+        /* Length is unmodified */
+        *pw16_len = len;
+
+
+        /* Simply move all data from decoded to outData */
+
+        WEBRTC_SPL_MEMMOVE_W16(pw16_outData, pw16_decoded, (WebRtc_Word16) len);
+
+        return 0;
+    }
+}
+
+#undef     SCRATCH_PW16_DS_SPEECH
+#undef     SCRATCH_PW32_CORR
+#undef     SCRATCH_PW16_CORR
diff --git a/src/modules/audio_coding/neteq/random_vector.c b/src/modules/audio_coding/neteq/random_vector.c
new file mode 100644
index 0000000..217bacd
--- /dev/null
+++ b/src/modules/audio_coding/neteq/random_vector.c
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function generates a pseudo-random vector.
+ */
+
+#include "dsp_helpfunctions.h"
+
+/*
+ * Values are normalized so that
+ * sqrt(dot(pw16_NETEQFIX_RANDN_TBL,pw16_NETEQFIX_RANDN_TBL)/256)=2^13
+ */
+const WebRtc_Word16 WebRtcNetEQ_kRandnTbl[RANDVEC_NO_OF_SAMPLES] =
+{
+	2680, 5532, 441, 5520, 16170, -5146, -1024, -8733, 3115, 9598, -10380, -4959, -1280, -21716, 7133, -1522, 
+	13458, -3902, 2789, -675, 3441, 5016, -13599, -4003, -2739, 3922, -7209, 13352, -11617, -7241, 12905, -2314, 
+	5426, 10121, -9702, 11207, -13542, 1373, 816, -5934, -12504, 4798, 1811, 4112, -613, 201, -10367, -2960, 
+	-2419, 3442, 4299, -6116, -6092, 1552, -1650, -480, -1237, 18720, -11858, -8303, -8212, 865, -2890, -16968, 
+	12052, -5845, -5912, 9777, -5665, -6294, 5426, -4737, -6335, 1652, 761, 3832, 641, -8552, -9084, -5753, 
+	8146, 12156, -4915, 15086, -1231, -1869, 11749, -9319, -6403, 11407, 6232, -1683, 24340, -11166, 4017, -10448, 
+	3153, -2936, 6212, 2891, -866, -404, -4807, -2324, -1917, -2388, -6470, -3895, -10300, 5323, -5403, 2205, 
+	4640, 7022, -21186, -6244, -882, -10031, -3395, -12885, 7155, -5339, 5079, -2645, -9515, 6622, 14651, 15852, 
+	359, 122, 8246, -3502, -6696, -3679, -13535, -1409, -704, -7403, -4007, 1798, 279, -420, -12796, -14219, 
+	1141, 3359, 11434, 7049, -6684, -7473, 14283, -4115, -9123, -8969, 4152, 4117, 13792, 5742, 16168, 8661, 
+	-1609, -6095, 1881, 14380, -5588, 6758, -6425, -22969, -7269, 7031, 1119, -1611, -5850, -11281, 3559, -8952, 
+	-10146, -4667, -16251, -1538, 2062, -1012, -13073, 227, -3142, -5265, 20, 5770, -7559, 4740, -4819, 992, 
+	-8208, -7130, -4652, 6725, 7369, -1036, 13144, -1588, -5304, -2344, -449, -5705, -8894, 5205, -17904, -11188, 
+	-1022, 4852, 10101, -5255, -4200, -752, 7941, -1543, 5959, 14719, 13346, 17045, -15605, -1678, -1600, -9230, 
+	68, 23348, 1172, 7750, 11212, -18227, 9956, 4161, 883, 3947, 4341, 1014, -4889, -2603, 1246, -5630, 
+	-3596, -870, -1298, 2784, -3317, -6612, -20541, 4166, 4181, -8625, 3562, 12890, 4761, 3205, -12259, -8579
+};
+
+
+void WebRtcNetEQ_RandomVec(WebRtc_UWord32 *w32_seed, WebRtc_Word16 *pw16_randVec,
+                           WebRtc_Word16 w16_len, WebRtc_Word16 w16_incval)
+{
+    int i;
+    WebRtc_Word16 w16_pos;
+    for (i = 0; i < w16_len; i++)
+    {
+        *w32_seed = (*w32_seed) + w16_incval;
+        w16_pos = (WebRtc_Word16) ((*w32_seed) & (RANDVEC_NO_OF_SAMPLES - 1));
+        pw16_randVec[i] = WebRtcNetEQ_kRandnTbl[w16_pos];
+    }
+}
+
diff --git a/src/modules/audio_coding/neteq/recin.c b/src/modules/audio_coding/neteq/recin.c
new file mode 100644
index 0000000..bce7c48
--- /dev/null
+++ b/src/modules/audio_coding/neteq/recin.c
@@ -0,0 +1,473 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of the RecIn function, which is the main function for inserting RTP
+ * packets into NetEQ.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "dtmf_buffer.h"
+#include "neteq_defines.h"
+#include "neteq_error_codes.h"
+
+
+int WebRtcNetEQ_RecInInternal(MCUInst_t *MCU_inst, RTPPacket_t *RTPpacketInput,
+                              WebRtc_UWord32 uw32_timeRec)
+{
+    RTPPacket_t RTPpacket[2];
+    int i_k;
+    int i_ok = 0, i_No_Of_Payloads = 1;
+    WebRtc_Word16 flushed = 0;
+    WebRtc_Word16 codecPos;
+    int curr_Codec;
+    WebRtc_Word16 isREDPayload = 0;
+    WebRtc_Word32 temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer;
+#ifdef NETEQ_RED_CODEC
+    RTPPacket_t* RTPpacketPtr[2]; /* Support for redundancy up to 2 payloads */
+    RTPpacketPtr[0] = &RTPpacket[0];
+    RTPpacketPtr[1] = &RTPpacket[1];
+#endif
+
+    /*
+     * Copy from input RTP packet to local copy
+     * (mainly to enable multiple payloads using RED)
+     */
+
+    WEBRTC_SPL_MEMCPY_W8(&RTPpacket[0], RTPpacketInput, sizeof(RTPPacket_t));
+
+    /* Reinitialize NetEq if it's needed (changed SSRC or first call) */
+
+    if ((RTPpacket[0].ssrc != MCU_inst->ssrc) || (MCU_inst->first_packet == 1))
+    {
+        WebRtcNetEQ_RTCPInit(&MCU_inst->RTCP_inst, RTPpacket[0].seqNumber);
+        MCU_inst->first_packet = 0;
+
+        /* Flush the buffer */
+        WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+
+        /* Store new SSRC */
+        MCU_inst->ssrc = RTPpacket[0].ssrc;
+
+        /* Update codecs */
+        MCU_inst->timeStamp = RTPpacket[0].timeStamp;
+        MCU_inst->current_Payload = RTPpacket[0].payloadType;
+
+        /*Set MCU to update codec on next SignalMCU call */
+        MCU_inst->new_codec = 1;
+
+        /* Reset timestamp scaling */
+        MCU_inst->TSscalingInitialized = 0;
+
+    }
+
+    /* Call RTCP statistics */
+    i_ok |= WebRtcNetEQ_RTCPUpdate(&(MCU_inst->RTCP_inst), RTPpacket[0].seqNumber,
+        RTPpacket[0].timeStamp, uw32_timeRec);
+
+    /* If Redundancy is supported and this is the redundancy payload, separate the payloads */
+#ifdef NETEQ_RED_CODEC
+    if (RTPpacket[0].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+        kDecoderRED))
+    {
+
+        /* Split the payload into a main and a redundancy payloads */
+        i_ok = WebRtcNetEQ_RedundancySplit(RTPpacketPtr, 2, &i_No_Of_Payloads);
+        if (i_ok < 0)
+        {
+            /* error returned */
+            return i_ok;
+        }
+
+        /*
+         * Only accept a few redundancies of the same type as the main data,
+         * AVT events and CNG.
+         */
+        if ((i_No_Of_Payloads > 1) && (RTPpacket[0].payloadType != RTPpacket[1].payloadType)
+            && (RTPpacket[0].payloadType != WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+                kDecoderAVT)) && (RTPpacket[1].payloadType != WebRtcNetEQ_DbGetPayload(
+            &MCU_inst->codec_DB_inst, kDecoderAVT)) && (!WebRtcNetEQ_DbIsCNGPayload(
+            &MCU_inst->codec_DB_inst, RTPpacket[0].payloadType))
+            && (!WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst, RTPpacket[1].payloadType)))
+        {
+            i_No_Of_Payloads = 1;
+        }
+        isREDPayload = 1;
+    }
+#endif
+
+    /* loop over the number of payloads */
+    for (i_k = 0; i_k < i_No_Of_Payloads; i_k++)
+    {
+
+        if (isREDPayload == 1)
+        {
+            RTPpacket[i_k].rcuPlCntr = i_k;
+        }
+        else
+        {
+            RTPpacket[i_k].rcuPlCntr = 0;
+        }
+
+        /* Force update of SplitInfo if it's iLBC because of potential change between 20/30ms */
+        if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+            kDecoderILBC))
+        {
+            i_ok = WebRtcNetEQ_DbGetSplitInfo(
+                &MCU_inst->PayloadSplit_inst,
+                (enum WebRtcNetEQDecoder) WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+                    RTPpacket[i_k].payloadType), RTPpacket[i_k].payloadLen);
+            if (i_ok < 0)
+            {
+                /* error returned */
+                return i_ok;
+            }
+        }
+
+        /* Get information about timestamp scaling for this payload type */
+        i_ok = WebRtcNetEQ_GetTimestampScaling(MCU_inst, RTPpacket[i_k].payloadType);
+        if (i_ok < 0)
+        {
+            /* error returned */
+            return i_ok;
+        }
+
+        if (MCU_inst->TSscalingInitialized == 0 && MCU_inst->scalingFactor != kTSnoScaling)
+        {
+            /* Must initialize scaling with current timestamps */
+            MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
+            MCU_inst->internalTS = RTPpacket[i_k].timeStamp;
+            MCU_inst->TSscalingInitialized = 1;
+        }
+
+        /* Adjust timestamp if timestamp scaling is needed (e.g. SILK or G.722) */
+        if (MCU_inst->TSscalingInitialized == 1)
+        {
+            WebRtc_UWord32 newTS = WebRtcNetEQ_ScaleTimestampExternalToInternal(MCU_inst,
+                RTPpacket[i_k].timeStamp);
+
+            /* save the incoming timestamp for next time */
+            MCU_inst->externalTS = RTPpacket[i_k].timeStamp;
+
+            /* add the scaled difference to last scaled timestamp and save ... */
+            MCU_inst->internalTS = newTS;
+
+            RTPpacket[i_k].timeStamp = newTS;
+        }
+
+        /* Is this a DTMF packet?*/
+        if (RTPpacket[i_k].payloadType == WebRtcNetEQ_DbGetPayload(&MCU_inst->codec_DB_inst,
+            kDecoderAVT))
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            if (MCU_inst->AVT_PlayoutOn)
+            {
+                i_ok = WebRtcNetEQ_DtmfInsertEvent(&MCU_inst->DTMF_inst,
+                    RTPpacket[i_k].payload, RTPpacket[i_k].payloadLen,
+                    RTPpacket[i_k].timeStamp);
+                if (i_ok != 0)
+                {
+                    return i_ok;
+                }
+            }
+#endif
+#ifdef NETEQ_STEREO
+            if (MCU_inst->usingStereo == 0)
+            {
+                /* do not set this for DTMF packets when using stereo mode */
+                MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+            }
+#else
+            MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+#endif
+        }
+        else if (WebRtcNetEQ_DbIsCNGPayload(&MCU_inst->codec_DB_inst,
+            RTPpacket[i_k].payloadType))
+        {
+            /* Is this a CNG packet? how should we handle this?*/
+#ifdef NETEQ_CNG_CODEC
+            /* Get CNG sample rate */
+            WebRtc_UWord16 fsCng = WebRtcNetEQ_DbGetSampleRate(&MCU_inst->codec_DB_inst,
+                RTPpacket[i_k].payloadType);
+            if ((fsCng != MCU_inst->fs) && (fsCng > 8000))
+            {
+                /*
+                 * We have received CNG with a different sample rate from what we are using
+                 * now (must be > 8000, since we may use only one CNG type (default) for all
+                 * frequencies). Flush buffer and signal new codec.
+                 */
+                WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+                MCU_inst->new_codec = 1;
+                MCU_inst->current_Codec = -1;
+            }
+            i_ok = WebRtcNetEQ_PacketBufferInsert(&MCU_inst->PacketBuffer_inst,
+                &RTPpacket[i_k], &flushed);
+            if (i_ok < 0)
+            {
+                return RECIN_CNG_ERROR;
+            }
+            MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 1;
+#else /* NETEQ_CNG_CODEC not defined */
+            return RECIN_UNKNOWNPAYLOAD;
+#endif /* NETEQ_CNG_CODEC */
+        }
+        else
+        {
+            /* Reinitialize the splitting if the payload and/or the payload length has changed */
+            curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+                RTPpacket[i_k].payloadType);
+            if (curr_Codec != MCU_inst->current_Codec)
+            {
+                if (curr_Codec < 0)
+                {
+                    return RECIN_UNKNOWNPAYLOAD;
+                }
+                MCU_inst->current_Codec = curr_Codec;
+                MCU_inst->current_Payload = RTPpacket[i_k].payloadType;
+                i_ok = WebRtcNetEQ_DbGetSplitInfo(&MCU_inst->PayloadSplit_inst,
+                    (enum WebRtcNetEQDecoder) MCU_inst->current_Codec,
+                    RTPpacket[i_k].payloadLen);
+                if (i_ok < 0)
+                { /* error returned */
+                    return i_ok;
+                }
+                WebRtcNetEQ_PacketBufferFlush(&MCU_inst->PacketBuffer_inst);
+                MCU_inst->new_codec = 1;
+            }
+
+            /* Parse the payload and insert it into the buffer */
+            i_ok = WebRtcNetEQ_SplitAndInsertPayload(&RTPpacket[i_k],
+                &MCU_inst->PacketBuffer_inst, &MCU_inst->PayloadSplit_inst, &flushed);
+            if (i_ok < 0)
+            {
+                return i_ok;
+            }
+            if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF != 0)
+            {
+                /* first normal packet after CNG or DTMF */
+                MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = -1;
+            }
+        }
+        /* Reset DSP timestamp etc. if packet buffer flushed */
+        if (flushed)
+        {
+            MCU_inst->new_codec = 1;
+        }
+    }
+
+    /*
+     * Update Bandwidth Estimate
+     * Only send the main payload to BWE
+     */
+    if ((curr_Codec = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst,
+        RTPpacket[0].payloadType)) >= 0)
+    {
+        codecPos = MCU_inst->codec_DB_inst.position[curr_Codec];
+        if (MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos] != NULL) /* codec has BWE function */
+        {
+            if (RTPpacket[0].starts_byte1) /* check for shifted byte alignment */
+            {
+                /* re-align to 16-bit alignment */
+                for (i_k = 0; i_k < RTPpacket[0].payloadLen; i_k++)
+                {
+                    WEBRTC_SPL_SET_BYTE(RTPpacket[0].payload,
+                        WEBRTC_SPL_GET_BYTE(RTPpacket[0].payload, i_k+1),
+                        i_k);
+                }
+                RTPpacket[0].starts_byte1 = 0;
+            }
+
+            MCU_inst->codec_DB_inst.funcUpdBWEst[codecPos](
+                MCU_inst->codec_DB_inst.codec_state[codecPos],
+                (G_CONST WebRtc_UWord16 *) RTPpacket[0].payload,
+                (WebRtc_Word32) RTPpacket[0].payloadLen, RTPpacket[0].seqNumber,
+                (WebRtc_UWord32) RTPpacket[0].timeStamp, (WebRtc_UWord32) uw32_timeRec);
+        }
+    }
+
+    if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == 0)
+    {
+        /* Calculate the total speech length carried in each packet */
+        temp_bufsize = MCU_inst->PacketBuffer_inst.numPacketsInBuffer - temp_bufsize;
+        temp_bufsize *= MCU_inst->PacketBuffer_inst.packSizeSamples;
+
+        if ((temp_bufsize > 0) && (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF
+            == 0) && (temp_bufsize
+            != MCU_inst->BufferStat_inst.Automode_inst.packetSpeechLenSamp))
+        {
+            /* Change the auto-mode parameters if packet length has changed */
+            WebRtcNetEQ_SetPacketSpeechLen(&(MCU_inst->BufferStat_inst.Automode_inst),
+                (WebRtc_Word16) temp_bufsize, MCU_inst->fs);
+        }
+
+        /* update statistics */
+        if ((WebRtc_Word32) (RTPpacket[0].timeStamp - MCU_inst->timeStamp) >= 0
+            && !MCU_inst->new_codec)
+        {
+            /*
+             * Only update statistics if incoming packet is not older than last played out
+             * packet, and if new codec flag is not set.
+             */
+            WebRtcNetEQ_UpdateIatStatistics(&MCU_inst->BufferStat_inst.Automode_inst,
+                MCU_inst->PacketBuffer_inst.maxInsertPositions, RTPpacket[0].seqNumber,
+                RTPpacket[0].timeStamp, MCU_inst->fs,
+                WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) MCU_inst->current_Codec),
+                (MCU_inst->NetEqPlayoutMode == kPlayoutStreaming));
+        }
+    }
+    else if (MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF == -1)
+    {
+        /*
+         * This is first "normal" packet after CNG or DTMF.
+         * Reset packet time counter and measure time until next packet,
+         * but don't update statistics.
+         */
+        MCU_inst->BufferStat_inst.Automode_inst.lastPackCNGorDTMF = 0;
+        MCU_inst->BufferStat_inst.Automode_inst.packetIatCountSamp = 0;
+    }
+    return 0;
+
+}
+
+int WebRtcNetEQ_GetTimestampScaling(MCUInst_t *MCU_inst, int rtpPayloadType)
+{
+    enum WebRtcNetEQDecoder codec;
+    int codecNumber;
+
+    codecNumber = WebRtcNetEQ_DbGetCodec(&MCU_inst->codec_DB_inst, rtpPayloadType);
+    if (codecNumber < 0)
+    {
+        /* error */
+        return codecNumber;
+    }
+
+    /* cast to enumerator */
+    codec = (enum WebRtcNetEQDecoder) codecNumber;
+
+    /*
+     * The factor obtained below is the number with which the RTP timestamp must be
+     * multiplied to get the true sample count.
+     */
+    switch (codec)
+    {
+        case kDecoderG722:
+        case kDecoderG722_2ch:
+        {
+            /* Use timestamp scaling with factor 2 (two output samples per RTP timestamp) */
+            MCU_inst->scalingFactor = kTSscalingTwo;
+            break;
+        }
+        case kDecoderAVT:
+        case kDecoderCNG:
+        {
+            /* do not change the timestamp scaling settings */
+            break;
+        }
+        default:
+        {
+            /* do not use timestamp scaling */
+            MCU_inst->scalingFactor = kTSnoScaling;
+            break;
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampExternalToInternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 externalTS)
+{
+    WebRtc_Word32 timestampDiff;
+    WebRtc_UWord32 internalTS;
+
+    /* difference between this and last incoming timestamp */
+    timestampDiff = externalTS - MCU_inst->externalTS;
+
+    switch (MCU_inst->scalingFactor)
+    {
+        case kTSscalingTwo:
+        {
+            /* multiply with 2 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingTwoThirds:
+        {
+            /* multiply with 2/3 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 1);
+            timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
+            break;
+        }
+        case kTSscalingFourThirds:
+        {
+            /* multiply with 4/3 */
+            timestampDiff = WEBRTC_SPL_LSHIFT_W32(timestampDiff, 2);
+            timestampDiff = WebRtcSpl_DivW32W16(timestampDiff, 3);
+            break;
+        }
+        default:
+        {
+            /* no scaling */
+        }
+    }
+
+    /* add the scaled difference to last scaled timestamp and save ... */
+    internalTS = MCU_inst->internalTS + timestampDiff;
+
+    return internalTS;
+}
+
+WebRtc_UWord32 WebRtcNetEQ_ScaleTimestampInternalToExternal(const MCUInst_t *MCU_inst,
+                                                            WebRtc_UWord32 internalTS)
+{
+    WebRtc_Word32 timestampDiff;
+    WebRtc_UWord32 externalTS;
+
+    /* difference between this and last incoming timestamp */
+    timestampDiff = (WebRtc_Word32) internalTS - MCU_inst->internalTS;
+
+    switch (MCU_inst->scalingFactor)
+    {
+        case kTSscalingTwo:
+        {
+            /* divide by 2 */
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingTwoThirds:
+        {
+            /* multiply with 3/2 */
+            timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 1);
+            break;
+        }
+        case kTSscalingFourThirds:
+        {
+            /* multiply with 3/4 */
+            timestampDiff = WEBRTC_SPL_MUL_32_16(timestampDiff, 3);
+            timestampDiff = WEBRTC_SPL_RSHIFT_W32(timestampDiff, 2);
+            break;
+        }
+        default:
+        {
+            /* no scaling */
+        }
+    }
+
+    /* add the scaled difference to last scaled timestamp and save ... */
+    externalTS = MCU_inst->externalTS + timestampDiff;
+
+    return externalTS;
+}
diff --git a/src/modules/audio_coding/neteq/recout.c b/src/modules/audio_coding/neteq/recout.c
new file mode 100644
index 0000000..eb80f2d
--- /dev/null
+++ b/src/modules/audio_coding/neteq/recout.c
@@ -0,0 +1,1486 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of RecOut function, which is the main function for the audio output
+ * process. This function must be called (through the NetEQ API) once every 10 ms.
+ */
+
+#include "dsp.h"
+
+#include <assert.h>
+#include <string.h> /* to define NULL */
+
+#include "signal_processing_library.h"
+
+#include "dsp_helpfunctions.h"
+#include "neteq_error_codes.h"
+#include "neteq_defines.h"
+#include "mcu_dsp_common.h"
+
+/* Audio types */
+#define TYPE_SPEECH 1
+#define TYPE_CNG 2
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#include <stdio.h>
+#pragma message("*******************************************************************")
+#pragma message("You have specified to use NETEQ_DELAY_LOGGING in the NetEQ library.")
+#pragma message("Make sure that your test application supports this.")
+#pragma message("*******************************************************************")
+#endif
+
+/* Scratch usage:
+
+ Type           Name                            size             startpos      endpos
+ WebRtc_Word16  pw16_NetEqAlgorithm_buffer      600*fs/8000      0             600*fs/8000-1
+ struct         dspInfo                         6                600*fs/8000   605*fs/8000
+
+ func           WebRtcNetEQ_Normal              40+495*fs/8000   0             39+495*fs/8000
+ func           WebRtcNetEQ_Merge               40+496*fs/8000   0             39+496*fs/8000
+ func           WebRtcNetEQ_Expand              40+370*fs/8000   126*fs/800    39+496*fs/8000
+ func           WebRtcNetEQ_Accelerate          210              240*fs/8000   209+240*fs/8000
+ func           WebRtcNetEQ_BGNUpdate           69               480*fs/8000   68+480*fs/8000
+
+ Total:  605*fs/8000
+ */
+
+#define SCRATCH_ALGORITHM_BUFFER            0
+#define SCRATCH_NETEQ_NORMAL                0
+#define SCRATCH_NETEQ_MERGE                 0
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     3600
+#define SCRATCH_NETEQ_ACCELERATE            1440
+#define SCRATCH_NETEQ_BGN_UPDATE            2880
+#define SCRATCH_NETEQ_EXPAND                756
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     2400
+#define SCRATCH_NETEQ_ACCELERATE            960
+#define SCRATCH_NETEQ_BGN_UPDATE            1920
+#define SCRATCH_NETEQ_EXPAND                504
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SCRATCH_DSP_INFO                     1200
+#define SCRATCH_NETEQ_ACCELERATE            480
+#define SCRATCH_NETEQ_BGN_UPDATE            960
+#define SCRATCH_NETEQ_EXPAND                252
+#else    /* NB */
+#define SCRATCH_DSP_INFO                     600
+#define SCRATCH_NETEQ_ACCELERATE            240
+#define SCRATCH_NETEQ_BGN_UPDATE            480
+#define SCRATCH_NETEQ_EXPAND                126
+#endif
+
+#if (defined(NETEQ_48KHZ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 3636
+#elif (defined(NETEQ_32KHZ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 2424
+#elif (defined(NETEQ_WIDEBAND)) 
+#define SIZE_SCRATCH_BUFFER                 1212
+#else    /* NB */
+#define SIZE_SCRATCH_BUFFER                 606
+#endif
+
+#ifdef NETEQ_DELAY_LOGGING
+extern FILE *delay_fid2; /* file pointer to delay log file */
+extern WebRtc_UWord32 tot_received_packets;
+#endif
+
+
+int WebRtcNetEQ_RecOutInternal(DSPInst_t *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len, WebRtc_Word16 BGNonly)
+{
+
+    WebRtc_Word16 blockLen, payloadLen, len = 0, pos;
+    WebRtc_Word16 w16_tmp1, w16_tmp2, w16_tmp3, DataEnough;
+    WebRtc_Word16 *blockPtr;
+    WebRtc_Word16 MD = 0;
+
+    WebRtc_Word16 speechType = TYPE_SPEECH;
+    WebRtc_UWord16 instr;
+    WebRtc_UWord16 uw16_tmp;
+#ifdef SCRATCH
+    char pw8_ScratchBuffer[((SIZE_SCRATCH_BUFFER + 1) * 2)];
+    WebRtc_Word16 *pw16_scratchPtr = (WebRtc_Word16*) pw8_ScratchBuffer;
+    WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE];
+    WebRtc_Word16 *pw16_NetEqAlgorithm_buffer = pw16_scratchPtr
+        + SCRATCH_ALGORITHM_BUFFER;
+    DSP2MCU_info_t *dspInfo = (DSP2MCU_info_t*) (pw16_scratchPtr + SCRATCH_DSP_INFO);
+#else
+    WebRtc_Word16 pw16_decoded_buffer[NETEQ_MAX_FRAME_SIZE];
+    WebRtc_Word16 pw16_NetEqAlgorithm_buffer[NETEQ_MAX_OUTPUT_SIZE];
+    DSP2MCU_info_t dspInfoStruct;
+    DSP2MCU_info_t *dspInfo = &dspInfoStruct;
+#endif
+    WebRtc_Word16 fs_mult;
+    int borrowedSamples;
+    int oldBorrowedSamples;
+    int return_value = 0;
+    WebRtc_Word16 lastModeBGNonly = (inst->w16_mode & MODE_BGN_ONLY) != 0; /* check BGN flag */
+    void *mainInstBackup = inst->main_inst;
+
+#ifdef NETEQ_DELAY_LOGGING
+    int temp_var;
+#endif
+    WebRtc_Word16 dtmfValue = -1;
+    WebRtc_Word16 dtmfVolume = -1;
+    int playDtmf = 0;
+#ifdef NETEQ_ATEVENT_DECODE
+    int dtmfSwitch = 0;
+#endif
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo *msInfo = inst->msInfo;
+#endif
+    WebRtc_Word16 *sharedMem = pw16_NetEqAlgorithm_buffer; /* Reuse memory SHARED_MEM_SIZE size */
+    inst->pw16_readAddress = sharedMem;
+    inst->pw16_writeAddress = sharedMem;
+
+    /* Get information about if there is one descriptor left */
+    if (inst->codec_ptr_inst.funcGetMDinfo != NULL)
+    {
+        MD = inst->codec_ptr_inst.funcGetMDinfo(inst->codec_ptr_inst.codec_state);
+        if (MD > 0)
+            MD = 1;
+        else
+            MD = 0;
+    }
+
+#ifdef NETEQ_STEREO
+    if ((msInfo->msMode == NETEQ_SLAVE) && (inst->codec_ptr_inst.funcDecode != NULL))
+    {
+        /*
+         * Valid function pointers indicate that we have decoded something,
+         * and that the timestamp information is correct.
+         */
+
+        /* Get the information from master to correct synchronization */
+        WebRtc_UWord32 currentMasterTimestamp;
+        WebRtc_UWord32 currentSlaveTimestamp;
+
+        currentMasterTimestamp = msInfo->endTimestamp - msInfo->samplesLeftWithOverlap;
+        currentSlaveTimestamp = inst->endTimestamp - (inst->endPosition - inst->curPosition);
+
+        /* Partition the uint32_t space in three: [0 0.25) [0.25 0.75] (0.75 1]
+         * We consider a wrap to have occurred if the timestamps are in
+         * different edge partitions.
+         */
+        if (currentSlaveTimestamp < 0x40000000 &&
+            currentMasterTimestamp > 0xc0000000) {
+          // Slave has wrapped.
+          currentSlaveTimestamp += (0xffffffff - currentMasterTimestamp) + 1;
+          currentMasterTimestamp = 0;
+        } else if (currentMasterTimestamp < 0x40000000 &&
+            currentSlaveTimestamp > 0xc0000000) {
+          // Master has wrapped.
+          currentMasterTimestamp += (0xffffffff - currentSlaveTimestamp) + 1;
+          currentSlaveTimestamp = 0;
+        }
+
+        if (currentSlaveTimestamp < currentMasterTimestamp)
+        {
+            /* brute-force discard a number of samples to catch up */
+            inst->curPosition += currentMasterTimestamp - currentSlaveTimestamp;
+
+        }
+        else if (currentSlaveTimestamp > currentMasterTimestamp)
+        {
+            /* back off current position to slow down */
+            inst->curPosition -= currentSlaveTimestamp - currentMasterTimestamp;
+        }
+
+        /* make sure we have at least "overlap" samples left */
+        inst->curPosition = WEBRTC_SPL_MIN(inst->curPosition,
+            inst->endPosition - inst->ExpandInst.w16_overlap);
+
+        /* make sure we do not end up outside the speech history */
+        inst->curPosition = WEBRTC_SPL_MAX(inst->curPosition, 0);
+    }
+#endif
+
+    /* Write status data to shared memory */
+    dspInfo->playedOutTS = inst->endTimestamp;
+    dspInfo->samplesLeft = inst->endPosition - inst->curPosition
+        - inst->ExpandInst.w16_overlap;
+    dspInfo->MD = MD;
+    dspInfo->lastMode = inst->w16_mode;
+    dspInfo->frameLen = inst->w16_frameLen;
+
+    /* Force update of codec if codec function is NULL */
+    if (inst->codec_ptr_inst.funcDecode == NULL)
+    {
+        dspInfo->lastMode |= MODE_AWAITING_CODEC_PTR;
+    }
+
+#ifdef NETEQ_STEREO
+    if (msInfo->msMode == NETEQ_SLAVE && (msInfo->extraInfo == DTMF_OVERDUB
+        || msInfo->extraInfo == DTMF_ONLY))
+    {
+        /* Signal that the master instance generated DTMF tones */
+        dspInfo->lastMode |= MODE_MASTER_DTMF_SIGNAL;
+    }
+
+    if (msInfo->msMode != NETEQ_MONO)
+    {
+        /* We are using stereo mode; signal this to MCU side */
+        dspInfo->lastMode |= MODE_USING_STEREO;
+    }
+#endif
+
+    WEBRTC_SPL_MEMCPY_W8(inst->pw16_writeAddress,dspInfo,sizeof(DSP2MCU_info_t));
+
+    /* Signal MCU with "interrupt" call to main inst*/
+#ifdef NETEQ_STEREO
+    assert(msInfo != NULL);
+    if (msInfo->msMode == NETEQ_MASTER)
+    {
+        /* clear info to slave */
+        WebRtcSpl_MemSetW16((WebRtc_Word16 *) msInfo, 0,
+            sizeof(MasterSlaveInfo) / sizeof(WebRtc_Word16));
+        /* re-set mode */
+        msInfo->msMode = NETEQ_MASTER;
+
+        /* Store some information to slave */
+        msInfo->endTimestamp = inst->endTimestamp;
+        msInfo->samplesLeftWithOverlap = inst->endPosition - inst->curPosition;
+    }
+#endif
+
+    /*
+     * This call will trigger the MCU side to make a decision based on buffer contents and
+     * decision history. Instructions, encoded data and function pointers will be written
+     * to the shared memory.
+     */
+    return_value = WebRtcNetEQ_DSP2MCUinterrupt((MainInst_t *) inst->main_inst, sharedMem);
+
+    /* Read MCU data and instructions */
+    instr = (WebRtc_UWord16) (inst->pw16_readAddress[0] & 0xf000);
+
+#ifdef NETEQ_STEREO
+    if (msInfo->msMode == NETEQ_MASTER)
+    {
+        msInfo->instruction = instr;
+    }
+    else if (msInfo->msMode == NETEQ_SLAVE)
+    {
+        /* Nothing to do */
+    }
+#endif
+
+    /* check for error returned from MCU side, if so, return error */
+    if (return_value < 0)
+    {
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return return_value;
+    }
+
+    blockPtr = &((inst->pw16_readAddress)[3]);
+
+    /* Check for DTMF payload flag */
+    if ((inst->pw16_readAddress[0] & DSP_DTMF_PAYLOAD) != 0)
+    {
+        playDtmf = 1;
+        dtmfValue = blockPtr[1];
+        dtmfVolume = blockPtr[2];
+        blockPtr += 3;
+
+#ifdef NETEQ_STEREO
+        if (msInfo->msMode == NETEQ_MASTER)
+        {
+            /* signal to slave that master is using DTMF */
+            msInfo->extraInfo = DTMF_OVERDUB;
+        }
+#endif
+    }
+
+    blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of WebRtc_Word16 */
+    payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+    blockPtr++;
+
+    /* Do we have to change our decoder? */
+    if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_NEW_CODEC)
+    {
+        WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
+        if (inst->codec_ptr_inst.codec_fs != 0)
+        {
+            return_value = WebRtcNetEQ_DSPInit(inst, inst->codec_ptr_inst.codec_fs);
+            if (return_value != 0)
+            { /* error returned */
+                instr = DSP_INSTR_FADE_TO_BGN; /* emergency instruction */
+            }
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS;
+            if ((fwrite(&temp_var, sizeof(int),
+                        1, delay_fid2) != 1) ||
+                (fwrite(&inst->fs, sizeof(WebRtc_UWord16),
+                        1, delay_fid2) != 1)) {
+              return -1;
+            }
+#endif
+        }
+
+        /* Copy it again since the init destroys this part */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->codec_ptr_inst,blockPtr,(payloadLen+1)>>1);
+        inst->endTimestamp = inst->codec_ptr_inst.timeStamp;
+        inst->videoSyncTimestamp = inst->codec_ptr_inst.timeStamp;
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+        if (inst->codec_ptr_inst.funcDecodeInit != NULL)
+        {
+            inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+        }
+
+#ifdef NETEQ_CNG_CODEC
+
+        /* Also update the CNG state as this might be uninitialized */
+
+        WEBRTC_SPL_MEMCPY_W16(&inst->CNG_Codec_inst,blockPtr,(payloadLen+1)>>1);
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1;
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            WebRtcCng_InitDec(inst->CNG_Codec_inst);
+        }
+#endif
+    }
+    else if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_RESET)
+    {
+        /* Reset the current codec (but not DSP struct) */
+        if (inst->codec_ptr_inst.funcDecodeInit != NULL)
+        {
+            inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+        }
+
+#ifdef NETEQ_CNG_CODEC
+        /* And reset CNG */
+        if (inst->CNG_Codec_inst != NULL)
+        {
+            WebRtcCng_InitDec(inst->CNG_Codec_inst);
+        }
+#endif /*NETEQ_CNG_CODEC*/
+    }
+
+    fs_mult = WebRtcNetEQ_CalcFsMult(inst->fs);
+
+    /* Add late packet? */
+    if ((inst->pw16_readAddress[0] & 0x0f00) == DSP_CODEC_ADD_LATE_PKT)
+    {
+        if (inst->codec_ptr_inst.funcAddLatePkt != NULL)
+        {
+            /* Only do this if the codec has support for Add Late Pkt */
+            inst->codec_ptr_inst.funcAddLatePkt(inst->codec_ptr_inst.codec_state, blockPtr,
+                payloadLen);
+        }
+        blockPtr += blockLen;
+        blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
+        payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+        blockPtr++;
+    }
+
+    /* Do we have to decode data? */
+    if ((instr == DSP_INSTR_NORMAL) || (instr == DSP_INSTR_ACCELERATE) || (instr
+        == DSP_INSTR_MERGE) || (instr == DSP_INSTR_PREEMPTIVE_EXPAND))
+    {
+        /* Do we need to update codec-internal PLC state? */
+        if ((instr == DSP_INSTR_MERGE) && (inst->codec_ptr_inst.funcDecodePLC != NULL))
+        {
+            len = 0;
+            len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                &pw16_decoded_buffer[len], 1);
+        }
+        len = 0;
+
+        /* Do decoding */
+        while ((blockLen > 0) && (len < (240 * fs_mult))) /* Guard somewhat against overflow */
+        {
+            if (inst->codec_ptr_inst.funcDecode != NULL)
+            {
+                WebRtc_Word16 dec_Len;
+                if (!BGNonly)
+                {
+                    /* Do decoding as normal
+                     *
+                     * blockPtr is pointing to payload, at this point,
+                     * the most significant bit of *(blockPtr - 1) is a flag if set to 1
+                     * indicates that the following payload is the redundant payload.
+                     */
+                    if (((*(blockPtr - 1) & DSP_CODEC_RED_FLAG) != 0)
+                        && (inst->codec_ptr_inst.funcDecodeRCU != NULL))
+                    {
+                        dec_Len = inst->codec_ptr_inst.funcDecodeRCU(
+                            inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
+                            &pw16_decoded_buffer[len], &speechType);
+                    }
+                    else
+                    {
+                        dec_Len = inst->codec_ptr_inst.funcDecode(
+                            inst->codec_ptr_inst.codec_state, blockPtr, payloadLen,
+                            &pw16_decoded_buffer[len], &speechType);
+                    }
+                }
+                else
+                {
+                    /*
+                     * Background noise mode: don't decode, just produce the same length BGN.
+                     * Don't call Expand for BGN here, since Expand uses the memory where the
+                     * bitstreams are stored (sharemem).
+                     */
+                    dec_Len = inst->w16_frameLen;
+                }
+
+                if (dec_Len > 0)
+                {
+                    len += dec_Len;
+                    /* Update frameLen */
+                    inst->w16_frameLen = dec_Len;
+                }
+                else if (dec_Len < 0)
+                {
+                    /* Error */
+                    len = -1;
+                    break;
+                }
+                /*
+                 * Sanity check (although we might still write outside memory when this
+                 * happens...)
+                 */
+                if (len > NETEQ_MAX_FRAME_SIZE)
+                {
+                    WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
+                    *pw16_len = inst->timestampsPerCall;
+                    inst->w16_mode = MODE_ERROR;
+                    dspInfo->lastMode = MODE_ERROR;
+                    return RECOUT_ERROR_DECODED_TOO_MUCH;
+                }
+
+                /* Verify that instance was not corrupted by decoder */
+                if (mainInstBackup != inst->main_inst)
+                {
+                    /* Instance is corrupt */
+                    return CORRUPT_INSTANCE;
+                }
+
+            }
+            blockPtr += blockLen;
+            blockLen = (((*blockPtr) & DSP_CODEC_MASK_RED_FLAG) + 1) >> 1; /* In # of Word16 */
+            payloadLen = ((*blockPtr) & DSP_CODEC_MASK_RED_FLAG);
+            blockPtr++;
+        }
+
+        if (len < 0)
+        {
+            len = 0;
+            inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
+            if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
+            {
+                return_value = -inst->codec_ptr_inst.funcGetErrorCode(
+                    inst->codec_ptr_inst.codec_state);
+            }
+            else
+            {
+                return_value = RECOUT_ERROR_DECODING;
+            }
+            instr = DSP_INSTR_FADE_TO_BGN;
+        }
+        if (speechType != TYPE_CNG)
+        {
+            /*
+             * Don't increment timestamp if codec returned CNG speech type
+             * since in this case, the MCU side will increment the CNGplayedTS counter.
+             */
+            inst->endTimestamp += len;
+        }
+    }
+    else if (instr == DSP_INSTR_NORMAL_ONE_DESC)
+    {
+        if (inst->codec_ptr_inst.funcDecode != NULL)
+        {
+            len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state, NULL, 0,
+                pw16_decoded_buffer, &speechType);
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+            if (fwrite(&inst->endTimestamp, sizeof(WebRtc_UWord32),
+                       1, delay_fid2) != 1) {
+              return -1;
+            }
+            if (fwrite(&dspInfo->samplesLeft, sizeof(WebRtc_UWord16),
+                       1, delay_fid2) != 1) {
+              return -1;
+            }
+            tot_received_packets++;
+#endif
+        }
+        if (speechType != TYPE_CNG)
+        {
+            /*
+             * Don't increment timestamp if codec returned CNG speech type
+             * since in this case, the MCU side will increment the CNGplayedTS counter.
+             */
+            inst->endTimestamp += len;
+        }
+
+        /* Verify that instance was not corrupted by decoder */
+        if (mainInstBackup != inst->main_inst)
+        {
+            /* Instance is corrupt */
+            return CORRUPT_INSTANCE;
+        }
+
+        if (len <= 0)
+        {
+            len = 0;
+            if (inst->codec_ptr_inst.funcGetErrorCode != NULL)
+            {
+                return_value = -inst->codec_ptr_inst.funcGetErrorCode(
+                    inst->codec_ptr_inst.codec_state);
+            }
+            else
+            {
+                return_value = RECOUT_ERROR_DECODING;
+            }
+            if ((inst->codec_ptr_inst.funcDecodeInit != NULL)
+                && (inst->codec_ptr_inst.codec_state != NULL))
+            {
+                /* Reinitialize codec state as something is obviously wrong */
+                inst->codec_ptr_inst.funcDecodeInit(inst->codec_ptr_inst.codec_state);
+            }
+            inst->endTimestamp += inst->w16_frameLen; /* advance one frame */
+            instr = DSP_INSTR_FADE_TO_BGN;
+        }
+    }
+
+    if (len == 0 && lastModeBGNonly) /* no new data */
+    {
+        BGNonly = 1; /* force BGN this time too */
+    }
+
+#ifdef NETEQ_VAD
+    if ((speechType == TYPE_CNG) /* decoder responded with codec-internal CNG */
+    || ((instr == DSP_INSTR_DO_RFC3389CNG) && (blockLen > 0)) /* ... or, SID frame */
+    || (inst->fs > 16000)) /* ... or, if not NB or WB */
+    {
+        /* disable post-decode VAD upon first sign of send-side DTX/VAD active, or if SWB */
+        inst->VADInst.VADEnabled = 0;
+        inst->VADInst.VADDecision = 1; /* set to always active, just to be on the safe side */
+        inst->VADInst.SIDintervalCounter = 0; /* reset SID interval counter */
+    }
+    else if (!inst->VADInst.VADEnabled) /* VAD disabled and no SID/CNG data observed this time */
+    {
+        inst->VADInst.SIDintervalCounter++; /* increase counter */
+    }
+
+    /* check for re-enabling the VAD */
+    if (inst->VADInst.SIDintervalCounter >= POST_DECODE_VAD_AUTO_ENABLE)
+    {
+        /*
+         * It's been a while since the last CNG/SID frame was observed => re-enable VAD.
+         * (Do not care to look for a VAD instance, since this is done inside the init
+         * function)
+         */
+        WebRtcNetEQ_InitVAD(&inst->VADInst, inst->fs);
+    }
+
+    if (len > 0 /* if we decoded any data */
+    && inst->VADInst.VADEnabled /* and VAD enabled */
+    && inst->fs <= 16000) /* can only do VAD for NB and WB */
+    {
+        int VADframeSize; /* VAD frame size in ms */
+        int VADSamplePtr = 0;
+
+        inst->VADInst.VADDecision = 0;
+
+        if (inst->VADInst.VADFunction != NULL) /* make sure that VAD function is provided */
+        {
+            /* divide the data into groups, as large as possible */
+            for (VADframeSize = 30; VADframeSize >= 10; VADframeSize -= 10)
+            {
+                /* loop through 30, 20, 10 */
+
+                while (inst->VADInst.VADDecision == 0
+                    && len - VADSamplePtr >= VADframeSize * fs_mult * 8)
+                {
+                    /*
+                     * Only continue until first active speech found, and as long as there is
+                     * one VADframeSize left.
+                     */
+
+                    /* call VAD with new decoded data */
+                    inst->VADInst.VADDecision |= inst->VADInst.VADFunction(
+                        inst->VADInst.VADState, (int) inst->fs,
+                        (WebRtc_Word16 *) &pw16_decoded_buffer[VADSamplePtr],
+                        (VADframeSize * fs_mult * 8));
+
+                    VADSamplePtr += VADframeSize * fs_mult * 8; /* increment sample counter */
+                }
+            }
+        }
+        else
+        { /* VAD function is NULL */
+            inst->VADInst.VADDecision = 1; /* set decision to active */
+            inst->VADInst.VADEnabled = 0; /* disable VAD since we have no VAD function */
+        }
+
+    }
+#endif /* NETEQ_VAD */
+
+    /* Adjust timestamp if needed */
+    uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[1];
+    inst->endTimestamp += (((WebRtc_UWord32) uw16_tmp) << 16);
+    uw16_tmp = (WebRtc_UWord16) inst->pw16_readAddress[2];
+    inst->endTimestamp += uw16_tmp;
+
+    if (BGNonly && len > 0)
+    {
+        /*
+         * If BGN mode, we did not produce any data at decoding.
+         * Do it now instead.
+         */
+
+        WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+            pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+            pw16_decoded_buffer, len);
+    }
+
+    /* Switch on the instruction received from the MCU side. */
+    switch (instr)
+    {
+        case DSP_INSTR_NORMAL:
+
+            /* Allow for signal processing to apply gain-back etc */
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if ((speechType == TYPE_CNG) || ((inst->w16_mode == MODE_CODEC_INTERNAL_CNG)
+                && (len == 0)))
+            {
+                inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+            }
+
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+        case DSP_INSTR_NORMAL_ONE_DESC:
+
+            /* Allow for signal processing to apply gain-back etc */
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            inst->w16_mode = MODE_ONE_DESCRIPTOR;
+            break;
+        case DSP_INSTR_MERGE:
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+            temp_var = -len;
+#endif
+            /* Call Merge with history*/
+            return_value = WebRtcNetEQ_Merge(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_MERGE,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+
+            if (return_value < 0)
+            {
+                /* error */
+                return return_value;
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var += len;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+#endif
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_EXPAND:
+            len = 0;
+            pos = 0;
+            while ((inst->endPosition - inst->curPosition - inst->ExpandInst.w16_overlap + pos)
+                < (inst->timestampsPerCall))
+            {
+                return_value = WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+                /*
+                 * Update buffer, but only end part (otherwise expand state is destroyed
+                 * since it reuses speechBuffer[] memory
+                 */
+
+                WEBRTC_SPL_MEMMOVE_W16(inst->pw16_speechHistory,
+                                       inst->pw16_speechHistory + len,
+                                       (inst->w16_speechHistoryLen-len));
+                WEBRTC_SPL_MEMCPY_W16(&inst->pw16_speechHistory[inst->w16_speechHistoryLen-len],
+                                      pw16_NetEqAlgorithm_buffer, len);
+
+                inst->curPosition -= len;
+
+                /* Update variables for VQmon */
+                inst->w16_concealedTS += len;
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
+                if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                  return -1;
+                }
+                temp_var = len;
+                if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                  return -1;
+                }
+#endif
+                len = 0; /* already written the data, so do not write it again further down. */
+            }
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_ACCELERATE:
+            if (len < 3 * 80 * fs_mult)
+            {
+                /* We need to move data from the speechBuffer[] in order to get 30 ms */
+                borrowedSamples = 3 * 80 * fs_mult - len;
+
+                WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
+                                       pw16_decoded_buffer, len);
+                WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
+                                      &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                                      borrowedSamples);
+
+                return_value = WebRtcNetEQ_Accelerate(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                    pw16_decoded_buffer, 3 * inst->timestampsPerCall,
+                    pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+                /* Copy back samples to the buffer */
+                if (len < borrowedSamples)
+                {
+                    /*
+                     * This destroys the beginning of the buffer, but will not cause any
+                     * problems
+                     */
+
+                    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
+                        pw16_NetEqAlgorithm_buffer, len);
+                    WEBRTC_SPL_MEMMOVE_W16(&inst->speechBuffer[borrowedSamples-len],
+                                           inst->speechBuffer,
+                                           (inst->endPosition-(borrowedSamples-len)));
+
+                    inst->curPosition += (borrowedSamples - len);
+#ifdef NETEQ_DELAY_LOGGING
+                    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                    if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                      return -1;
+                    }
+                    temp_var = 3 * inst->timestampsPerCall - len;
+                    if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                      return -1;
+                    }
+#endif
+                    len = 0;
+                }
+                else
+                {
+                    WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-borrowedSamples],
+                        pw16_NetEqAlgorithm_buffer, borrowedSamples);
+                    WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
+                                           &pw16_NetEqAlgorithm_buffer[borrowedSamples],
+                                           (len-borrowedSamples));
+#ifdef NETEQ_DELAY_LOGGING
+                    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                    if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                      return -1;
+                    }
+                    temp_var = 3 * inst->timestampsPerCall - len;
+                    if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                      return -1;
+                    }
+#endif
+                    len = len - borrowedSamples;
+                }
+
+            }
+            else
+            {
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var = NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO;
+                if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                  return -1;
+                }
+                temp_var = len;
+#endif
+                return_value = WebRtcNetEQ_Accelerate(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                    pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+                if (return_value < 0)
+                {
+                    /* error */
+                    return return_value;
+                }
+
+#ifdef NETEQ_DELAY_LOGGING
+                temp_var -= len;
+                if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+                  return -1;
+                }
+#endif
+            }
+            /* If last packet was decoded as a inband CNG set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_DO_RFC3389CNG:
+#ifdef NETEQ_CNG_CODEC
+            if (blockLen > 0)
+            {
+                if (WebRtcCng_UpdateSid(inst->CNG_Codec_inst, (WebRtc_UWord8*) blockPtr,
+                    payloadLen) < 0)
+                {
+                    /* error returned from CNG function */
+                    return_value = -WebRtcCng_GetErrorCodeDec(inst->CNG_Codec_inst);
+                    len = inst->timestampsPerCall;
+                    WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+                    break;
+                }
+            }
+
+            if (BGNonly)
+            {
+                /* Get data from BGN function instead of CNG */
+                len = WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_NetEqAlgorithm_buffer, inst->timestampsPerCall);
+                if (len != inst->timestampsPerCall)
+                {
+                    /* this is not good, treat this as an error */
+                    return_value = -1;
+                }
+            }
+            else
+            {
+                return_value = WebRtcNetEQ_Cng(inst, pw16_NetEqAlgorithm_buffer,
+                    inst->timestampsPerCall);
+            }
+            len = inst->timestampsPerCall;
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->w16_mode = MODE_RFC3389CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+
+            if (return_value < 0)
+            {
+                /* error returned */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+
+            break;
+#else
+            return FAULTY_INSTRUCTION;
+#endif
+        case DSP_INSTR_DO_CODEC_INTERNAL_CNG:
+            /*
+             * This represents the case when there is no transmission and the decoder should
+             * do internal CNG.
+             */
+            len = 0;
+            if (inst->codec_ptr_inst.funcDecode != NULL && !BGNonly)
+            {
+                len = inst->codec_ptr_inst.funcDecode(inst->codec_ptr_inst.codec_state,
+                    blockPtr, 0, pw16_decoded_buffer, &speechType);
+            }
+            else
+            {
+                /* get BGN data */
+                len = WebRtcNetEQ_GenerateBGN(inst,
+#ifdef SCRATCH
+                    pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                    pw16_decoded_buffer, inst->timestampsPerCall);
+            }
+            WebRtcNetEQ_Normal(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_NORMAL,
+#endif
+                pw16_decoded_buffer, len, pw16_NetEqAlgorithm_buffer, &len);
+            inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+
+        case DSP_INSTR_DTMF_GENERATE:
+#ifdef NETEQ_ATEVENT_DECODE
+            dtmfSwitch = 0;
+            if ((inst->w16_mode != MODE_DTMF) && (inst->DTMFInst.reinit == 0))
+            {
+                /* Special case; see below.
+                 * We must catch this before calling DTMFGenerate,
+                 * since reinit is set to 0 in that call.
+                 */
+                dtmfSwitch = 1;
+            }
+
+            len = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+                pw16_NetEqAlgorithm_buffer, inst->fs, -1);
+            if (len < 0)
+            {
+                /* error occurred */
+                return_value = len;
+                len = inst->timestampsPerCall;
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+
+            if (dtmfSwitch == 1)
+            {
+                /*
+                 * This is the special case where the previous operation was DTMF overdub.
+                 * but the current instruction is "regular" DTMF. We must make sure that the
+                 * DTMF does not have any discontinuities. The first DTMF sample that we
+                 * generate now must be played out immediately, wherefore it must be copied to
+                 * the speech buffer.
+                 */
+
+                /*
+                 * Generate extra DTMF data to fill the space between
+                 * curPosition and endPosition
+                 */
+                WebRtc_Word16 tempLen;
+
+                tempLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+                    &pw16_NetEqAlgorithm_buffer[len], inst->fs,
+                    inst->endPosition - inst->curPosition);
+                if (tempLen < 0)
+                {
+                    /* error occurred */
+                    return_value = tempLen;
+                    len = inst->endPosition - inst->curPosition;
+                    WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0,
+                        inst->endPosition - inst->curPosition);
+                }
+
+                /* Add to total length */
+                len += tempLen;
+
+                /* Overwrite the "future" part of the speech buffer with the new DTMF data */
+
+                WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->curPosition],
+                                      pw16_NetEqAlgorithm_buffer,
+                                      inst->endPosition - inst->curPosition);
+
+                /* Shuffle the remaining data to the beginning of algorithm buffer */
+                len -= (inst->endPosition - inst->curPosition);
+                WEBRTC_SPL_MEMMOVE_W16(pw16_NetEqAlgorithm_buffer,
+                    &pw16_NetEqAlgorithm_buffer[inst->endPosition - inst->curPosition],
+                    len);
+            }
+
+            inst->endTimestamp += inst->timestampsPerCall;
+            inst->DTMFInst.reinit = 0;
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->w16_mode = MODE_DTMF;
+            BGNonly = 0; /* override BGN only and let DTMF through */
+
+            playDtmf = 0; /* set to zero because the DTMF is already in the Algorithm buffer */
+            /*
+             * If playDtmf is 1, an extra DTMF vector will be generated and overdubbed
+             * on the output.
+             */
+
+#ifdef NETEQ_STEREO
+            if (msInfo->msMode == NETEQ_MASTER)
+            {
+                /* signal to slave that master is using DTMF only */
+                msInfo->extraInfo = DTMF_ONLY;
+            }
+#endif
+
+            break;
+#else
+            inst->w16_mode = MODE_ERROR;
+            dspInfo->lastMode = MODE_ERROR;
+            return FAULTY_INSTRUCTION;
+#endif
+
+        case DSP_INSTR_DO_ALTERNATIVE_PLC:
+            if (inst->codec_ptr_inst.funcDecodePLC != 0)
+            {
+                len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                    pw16_NetEqAlgorithm_buffer, 1);
+            }
+            else
+            {
+                len = inst->timestampsPerCall;
+                /* ZeroStuffing... */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+        case DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS:
+            if (inst->codec_ptr_inst.funcDecodePLC != 0)
+            {
+                len = inst->codec_ptr_inst.funcDecodePLC(inst->codec_ptr_inst.codec_state,
+                    pw16_NetEqAlgorithm_buffer, 1);
+            }
+            else
+            {
+                len = inst->timestampsPerCall;
+                /* ZeroStuffing... */
+                WebRtcSpl_MemSetW16(pw16_NetEqAlgorithm_buffer, 0, len);
+            }
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->endTimestamp += len;
+            break;
+        case DSP_INSTR_DO_AUDIO_REPETITION:
+            len = inst->timestampsPerCall;
+            /* copy->paste... */
+            WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
+                                  &inst->speechBuffer[inst->endPosition-len], len);
+            inst->ExpandInst.w16_consecExp = 0;
+            break;
+        case DSP_INSTR_DO_AUDIO_REPETITION_INC_TS:
+            len = inst->timestampsPerCall;
+            /* copy->paste... */
+            WEBRTC_SPL_MEMCPY_W16(pw16_NetEqAlgorithm_buffer,
+                                  &inst->speechBuffer[inst->endPosition-len], len);
+            inst->ExpandInst.w16_consecExp = 0;
+            inst->endTimestamp += len;
+            break;
+
+        case DSP_INSTR_PREEMPTIVE_EXPAND:
+            if (len < 3 * inst->timestampsPerCall)
+            {
+                /* borrow samples from sync buffer if necessary */
+                borrowedSamples = 3 * inst->timestampsPerCall - len; /* borrow this many samples */
+                /* calculate how many of these are already played out */
+                oldBorrowedSamples = WEBRTC_SPL_MAX(0,
+                    borrowedSamples - (inst->endPosition - inst->curPosition));
+                WEBRTC_SPL_MEMMOVE_W16(&pw16_decoded_buffer[borrowedSamples],
+                                       pw16_decoded_buffer, len);
+                WEBRTC_SPL_MEMCPY_W16(pw16_decoded_buffer,
+                                      &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                                      borrowedSamples);
+            }
+            else
+            {
+                borrowedSamples = 0;
+                oldBorrowedSamples = 0;
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            w16_tmp1 = len;
+#endif
+            /* do the expand */
+            return_value = WebRtcNetEQ_PreEmptiveExpand(inst,
+#ifdef SCRATCH
+                /* use same scratch memory as Accelerate */
+                pw16_scratchPtr + SCRATCH_NETEQ_ACCELERATE,
+#endif
+                pw16_decoded_buffer, len + borrowedSamples, oldBorrowedSamples,
+                pw16_NetEqAlgorithm_buffer, &len, BGNonly);
+
+            if (return_value < 0)
+            {
+                /* error */
+                return return_value;
+            }
+
+            if (borrowedSamples > 0)
+            {
+                /* return borrowed samples */
+
+                /* Copy back to last part of speechBuffer from beginning of output buffer */
+                WEBRTC_SPL_MEMCPY_W16( &(inst->speechBuffer[inst->endPosition-borrowedSamples]),
+                    pw16_NetEqAlgorithm_buffer,
+                    borrowedSamples);
+
+                len -= borrowedSamples; /* remove the borrowed samples from new total length */
+
+                /* Move to beginning of output buffer from end of output buffer */
+                WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
+                    &pw16_NetEqAlgorithm_buffer[borrowedSamples],
+                    len);
+            }
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+            temp_var = len - w16_tmp1; /* number of samples added */
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+#endif
+            /* If last packet was decoded as inband CNG, set mode to CNG instead */
+            if (speechType == TYPE_CNG) inst->w16_mode = MODE_CODEC_INTERNAL_CNG;
+#ifdef NETEQ_ATEVENT_DECODE
+            if (playDtmf == 0)
+            {
+                inst->DTMFInst.reinit = 1;
+            }
+#endif
+            break;
+
+        case DSP_INSTR_FADE_TO_BGN:
+        {
+            int tempReturnValue;
+            /* do not overwrite return_value, since it likely contains an error code */
+
+            /* calculate interpolation length */
+            w16_tmp3 = WEBRTC_SPL_MIN(inst->endPosition - inst->curPosition,
+                    inst->timestampsPerCall);
+            /* check that it will fit in pw16_NetEqAlgorithm_buffer */
+            if (w16_tmp3 + inst->w16_frameLen > NETEQ_MAX_OUTPUT_SIZE)
+            {
+                w16_tmp3 = NETEQ_MAX_OUTPUT_SIZE - inst->w16_frameLen;
+            }
+
+            /* call Expand */
+            len = inst->timestampsPerCall + inst->ExpandInst.w16_overlap;
+            pos = 0;
+
+            tempReturnValue = WebRtcNetEQ_Expand(inst,
+#ifdef SCRATCH
+                pw16_scratchPtr + SCRATCH_NETEQ_EXPAND,
+#endif
+                pw16_NetEqAlgorithm_buffer, &len, 1);
+
+            if (tempReturnValue < 0)
+            {
+                /* error */
+                /* this error value will override return_value */
+                return tempReturnValue;
+            }
+
+            pos += len; /* got len samples from expand */
+
+            /* copy to fill the demand */
+            while (pos + len <= inst->w16_frameLen + w16_tmp3)
+            {
+                WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos],
+                    pw16_NetEqAlgorithm_buffer, len);
+                pos += len;
+            }
+
+            /* fill with fraction of the expand vector if needed */
+            if (pos < inst->w16_frameLen + w16_tmp3)
+            {
+                WEBRTC_SPL_MEMCPY_W16(&pw16_NetEqAlgorithm_buffer[pos], pw16_NetEqAlgorithm_buffer,
+                    inst->w16_frameLen + w16_tmp3 - pos);
+            }
+
+            len = inst->w16_frameLen + w16_tmp3; /* truncate any surplus samples since we don't want these */
+
+            /*
+             * Mix with contents in sync buffer. Find largest power of two that is less than
+             * interpolate length divide 16384 with this number; result is in w16_tmp2.
+             */
+            w16_tmp1 = 2;
+            w16_tmp2 = 16384;
+            while (w16_tmp1 <= w16_tmp3)
+            {
+                w16_tmp2 >>= 1; /* divide with 2 */
+                w16_tmp1 <<= 1; /* increase with a factor of 2 */
+            }
+
+            w16_tmp1 = 0;
+            pos = 0;
+            while (w16_tmp1 < 16384)
+            {
+                inst->speechBuffer[inst->curPosition + pos]
+                    =
+                    (WebRtc_Word16) WEBRTC_SPL_RSHIFT_W32(
+                        WEBRTC_SPL_MUL_16_16( inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
+                            16384-w16_tmp1 ) +
+                        WEBRTC_SPL_MUL_16_16( pw16_NetEqAlgorithm_buffer[pos], w16_tmp1 ),
+                        14 );
+                w16_tmp1 += w16_tmp2;
+                pos++;
+            }
+
+            /* overwrite remainder of speech buffer */
+
+            WEBRTC_SPL_MEMCPY_W16( &inst->speechBuffer[inst->endPosition - w16_tmp3 + pos],
+                &pw16_NetEqAlgorithm_buffer[pos], w16_tmp3 - pos);
+
+            len -= w16_tmp3;
+            /* shift algorithm buffer */
+
+            WEBRTC_SPL_MEMMOVE_W16( pw16_NetEqAlgorithm_buffer,
+                &pw16_NetEqAlgorithm_buffer[w16_tmp3],
+                len );
+
+            /* Update variables for VQmon */
+            inst->w16_concealedTS += len;
+
+            inst->w16_mode = MODE_FADE_TO_BGN;
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+            temp_var = len;
+            if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+              return -1;
+            }
+#endif
+
+            break;
+        }
+
+        default:
+            inst->w16_mode = MODE_ERROR;
+            dspInfo->lastMode = MODE_ERROR;
+            return FAULTY_INSTRUCTION;
+    } /* end of grand switch */
+
+    /* Copy data directly to output buffer */
+
+    w16_tmp2 = 0;
+    if ((inst->endPosition + len - inst->curPosition - inst->ExpandInst.w16_overlap)
+        >= inst->timestampsPerCall)
+    {
+        w16_tmp2 = inst->endPosition - inst->curPosition;
+        w16_tmp2 = WEBRTC_SPL_MAX(w16_tmp2, 0); /* Additional error protection, just in case */
+        w16_tmp1 = WEBRTC_SPL_MIN(w16_tmp2, inst->timestampsPerCall);
+        w16_tmp2 = inst->timestampsPerCall - w16_tmp1;
+        WEBRTC_SPL_MEMCPY_W16(pw16_outData, &inst->speechBuffer[inst->curPosition], w16_tmp1);
+        WEBRTC_SPL_MEMCPY_W16(&pw16_outData[w16_tmp1], pw16_NetEqAlgorithm_buffer, w16_tmp2);
+        DataEnough = 1;
+    }
+    else
+    {
+        DataEnough = 0;
+    }
+
+    if (playDtmf != 0)
+    {
+#ifdef NETEQ_ATEVENT_DECODE
+        WebRtc_Word16 outDataIndex = 0;
+        WebRtc_Word16 overdubLen = -1; /* default len */
+        WebRtc_Word16 dtmfLen;
+
+        /*
+         * Overdub the output with DTMF. Note that this is not executed if the
+         * DSP_INSTR_DTMF_GENERATE operation is performed above.
+         */
+        if (inst->DTMFInst.lastDtmfSample - inst->curPosition > 0)
+        {
+            /* special operation for transition from "DTMF only" to "DTMF overdub" */
+            outDataIndex
+                = WEBRTC_SPL_MIN(inst->DTMFInst.lastDtmfSample - inst->curPosition,
+                    inst->timestampsPerCall);
+            overdubLen = inst->timestampsPerCall - outDataIndex;
+        }
+
+        dtmfLen = WebRtcNetEQ_DTMFGenerate(&inst->DTMFInst, dtmfValue, dtmfVolume,
+            &pw16_outData[outDataIndex], inst->fs, overdubLen);
+        if (dtmfLen < 0)
+        {
+            /* error occurred */
+            return_value = dtmfLen;
+        }
+        inst->DTMFInst.reinit = 0;
+#else
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return FAULTY_INSTRUCTION;
+#endif
+    }
+
+    /*
+     * Shuffle speech buffer to allow more data. Move data from pw16_NetEqAlgorithm_buffer
+     * to speechBuffer.
+     */
+    if (instr != DSP_INSTR_EXPAND)
+    {
+        w16_tmp1 = WEBRTC_SPL_MIN(inst->endPosition, len);
+        WEBRTC_SPL_MEMMOVE_W16(inst->speechBuffer, inst->speechBuffer + w16_tmp1,
+                               (inst->endPosition-w16_tmp1));
+        WEBRTC_SPL_MEMCPY_W16(&inst->speechBuffer[inst->endPosition-w16_tmp1],
+                              &pw16_NetEqAlgorithm_buffer[len-w16_tmp1], w16_tmp1);
+#ifdef NETEQ_ATEVENT_DECODE
+        /* Update index to end of DTMF data in speech buffer */
+        if (instr == DSP_INSTR_DTMF_GENERATE)
+        {
+            /* We have written DTMF data to the end of speech buffer */
+            inst->DTMFInst.lastDtmfSample = inst->endPosition;
+        }
+        else if (inst->DTMFInst.lastDtmfSample > 0)
+        {
+            /* The end of DTMF data in speech buffer has been shuffled */
+            inst->DTMFInst.lastDtmfSample -= w16_tmp1;
+        }
+#endif
+        /*
+         * Update the BGN history if last operation was not expand (nor Merge, Accelerate
+         * or Pre-emptive expand, to save complexity).
+         */
+        if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_MERGE)
+            && (inst->w16_mode != MODE_SUCCESS_ACCELERATE) && (inst->w16_mode
+            != MODE_LOWEN_ACCELERATE) && (inst->w16_mode != MODE_SUCCESS_PREEMPTIVE)
+            && (inst->w16_mode != MODE_LOWEN_PREEMPTIVE) && (inst->w16_mode
+            != MODE_FADE_TO_BGN) && (inst->w16_mode != MODE_DTMF) && (!BGNonly))
+        {
+            WebRtcNetEQ_BGNUpdate(inst
+#ifdef SCRATCH
+                , pw16_scratchPtr + SCRATCH_NETEQ_BGN_UPDATE
+#endif
+            );
+        }
+    }
+    else /* instr == DSP_INSTR_EXPAND */
+    {
+        /* Nothing should be done since data is already copied to output. */
+    }
+
+    inst->curPosition -= len;
+
+    /*
+     * Extra protection in case something should go totally wrong in terms of sizes...
+     * If everything is ok this should NEVER happen.
+     */
+    if (inst->curPosition < -inst->timestampsPerCall)
+    {
+        inst->curPosition = -inst->timestampsPerCall;
+    }
+
+    if ((instr != DSP_INSTR_EXPAND) && (instr != DSP_INSTR_MERGE) && (instr
+        != DSP_INSTR_FADE_TO_BGN))
+    {
+        /* Reset concealed TS parameter if it does not seem to have been flushed */
+        if (inst->w16_concealedTS > inst->timestampsPerCall)
+        {
+            inst->w16_concealedTS = 0;
+        }
+    }
+
+    /*
+     * Double-check that we actually have 10 ms to play. If we haven't, there has been a
+     * serious error.The decoder might have returned way too few samples
+     */
+    if (!DataEnough)
+    {
+        /* This should not happen. Set outdata to zeros, and return error. */
+        WebRtcSpl_MemSetW16(pw16_outData, 0, inst->timestampsPerCall);
+        *pw16_len = inst->timestampsPerCall;
+        inst->w16_mode = MODE_ERROR;
+        dspInfo->lastMode = MODE_ERROR;
+        return RECOUT_ERROR_SAMPLEUNDERRUN;
+    }
+
+    /*
+     * Update Videosync timestamp (this special timestamp is needed since the endTimestamp
+     * stops during CNG and Expand periods.
+     */
+    if ((inst->w16_mode != MODE_EXPAND) && (inst->w16_mode != MODE_RFC3389CNG))
+    {
+        WebRtc_UWord32 uw32_tmpTS;
+        uw32_tmpTS = inst->endTimestamp - (inst->endPosition - inst->curPosition);
+        if ((WebRtc_Word32) (uw32_tmpTS - inst->videoSyncTimestamp) > 0)
+        {
+            inst->videoSyncTimestamp = uw32_tmpTS;
+        }
+    }
+    else
+    {
+        inst->videoSyncTimestamp += inst->timestampsPerCall;
+    }
+
+    /* After this, regardless of what has happened, deliver 10 ms of future data */
+    inst->curPosition += inst->timestampsPerCall;
+    *pw16_len = inst->timestampsPerCall;
+
+    /* Remember if BGNonly was used */
+    if (BGNonly)
+    {
+        inst->w16_mode |= MODE_BGN_ONLY;
+    }
+
+    return return_value;
+}
+
+#undef    SCRATCH_ALGORITHM_BUFFER
+#undef    SCRATCH_NETEQ_NORMAL
+#undef    SCRATCH_NETEQ_MERGE
+#undef    SCRATCH_NETEQ_BGN_UPDATE
+#undef    SCRATCH_NETEQ_EXPAND
+#undef    SCRATCH_DSP_INFO
+#undef    SCRATCH_NETEQ_ACCELERATE
+#undef    SIZE_SCRATCH_BUFFER
diff --git a/src/modules/audio_coding/neteq/rtcp.c b/src/modules/audio_coding/neteq/rtcp.c
new file mode 100644
index 0000000..35f73da
--- /dev/null
+++ b/src/modules/audio_coding/neteq/rtcp.c
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of RTCP statistics reporting.
+ */
+
+#include "rtcp.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo)
+{
+    /*
+     * Initialize everything to zero and then set the start values for the RTP packet stream.
+     */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) RTCP_inst, 0,
+        sizeof(WebRtcNetEQ_RTCP_t) / sizeof(WebRtc_Word16));
+    RTCP_inst->base_seq = uw16_seqNo;
+    RTCP_inst->max_seq = uw16_seqNo;
+    return 0;
+}
+
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
+                           WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime)
+{
+    WebRtc_Word16 w16_SeqDiff;
+    WebRtc_Word32 w32_TimeDiff;
+    WebRtc_Word32 w32_JitterDiff;
+
+    /*
+     * Update number of received packets, and largest packet number received.
+     */
+    RTCP_inst->received++;
+    w16_SeqDiff = uw16_seqNo - RTCP_inst->max_seq;
+    if (w16_SeqDiff >= 0)
+    {
+        if (uw16_seqNo < RTCP_inst->max_seq)
+        {
+            /* Wrap around detected */
+            RTCP_inst->cycles++;
+        }
+        RTCP_inst->max_seq = uw16_seqNo;
+    }
+
+    /* Calculate Jitter, and update previous timestamps */
+    /* Note that the value in RTCP_inst->jitter is in Q4. */
+    if (RTCP_inst->received > 1)
+    {
+        w32_TimeDiff = (uw32_recTime - (uw32_timeStamp - RTCP_inst->transit));
+        w32_TimeDiff = WEBRTC_SPL_ABS_W32(w32_TimeDiff);
+        w32_JitterDiff = WEBRTC_SPL_LSHIFT_W16(w32_TimeDiff, 4) - RTCP_inst->jitter;
+        RTCP_inst->jitter = RTCP_inst->jitter + WEBRTC_SPL_RSHIFT_W32((w32_JitterDiff + 8), 4);
+    }
+    RTCP_inst->transit = (uw32_timeStamp - uw32_recTime);
+    return 0;
+}
+
+int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
+                             WebRtc_UWord16 *puw16_fraction_lost,
+                             WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
+                             WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset)
+{
+    WebRtc_UWord32 uw32_exp_nr, uw32_exp_interval, uw32_rec_interval;
+    WebRtc_Word32 w32_lost;
+
+    /* Extended highest sequence number received */
+    *puw32_ext_max
+        = (WebRtc_UWord32) WEBRTC_SPL_LSHIFT_W32((WebRtc_UWord32)RTCP_inst->cycles, 16)
+            + RTCP_inst->max_seq;
+
+    /*
+     * Calculate expected number of packets and compare it to the number of packets that
+     * were actually received => the cumulative number of packets lost can be extracted.
+     */
+    uw32_exp_nr = *puw32_ext_max - RTCP_inst->base_seq + 1;
+    if (RTCP_inst->received == 0)
+    {
+        /* no packets received, assume none lost */
+        *puw32_cum_lost = 0;
+    }
+    else if (uw32_exp_nr > RTCP_inst->received)
+    {
+        *puw32_cum_lost = uw32_exp_nr - RTCP_inst->received;
+        if (*puw32_cum_lost > (WebRtc_UWord32) 0xFFFFFF)
+        {
+            *puw32_cum_lost = 0xFFFFFF;
+        }
+    }
+    else
+    {
+        *puw32_cum_lost = 0;
+    }
+
+    /* Fraction lost (Since last report) */
+    uw32_exp_interval = uw32_exp_nr - RTCP_inst->exp_prior;
+    if (!doNotReset)
+    {
+        RTCP_inst->exp_prior = uw32_exp_nr;
+    }
+    uw32_rec_interval = RTCP_inst->received - RTCP_inst->rec_prior;
+    if (!doNotReset)
+    {
+        RTCP_inst->rec_prior = RTCP_inst->received;
+    }
+    w32_lost = (WebRtc_Word32) (uw32_exp_interval - uw32_rec_interval);
+    if (uw32_exp_interval == 0 || w32_lost <= 0 || RTCP_inst->received == 0)
+    {
+        *puw16_fraction_lost = 0;
+    }
+    else
+    {
+        *puw16_fraction_lost = (WebRtc_UWord16) (WEBRTC_SPL_LSHIFT_W32(w32_lost, 8)
+            / uw32_exp_interval);
+    }
+    if (*puw16_fraction_lost > 0xFF)
+    {
+        *puw16_fraction_lost = 0xFF;
+    }
+
+    /* Inter-arrival jitter */
+    *puw32_jitter = (RTCP_inst->jitter) >> 4; /* scaling from Q4 */
+    return 0;
+}
+
diff --git a/src/modules/audio_coding/neteq/rtcp.h b/src/modules/audio_coding/neteq/rtcp.h
new file mode 100644
index 0000000..009e019
--- /dev/null
+++ b/src/modules/audio_coding/neteq/rtcp.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTCP statistics reporting.
+ */
+
+#ifndef RTCP_H
+#define RTCP_H
+
+#include "typedefs.h"
+
+typedef struct
+{
+    WebRtc_UWord16 cycles; /* The number of wrap-arounds for the sequence number */
+    WebRtc_UWord16 max_seq; /* The maximum sequence number received
+     (starts from 0 again after wrap around) */
+    WebRtc_UWord16 base_seq; /* The sequence number of the first packet that arrived */
+    WebRtc_UWord32 received; /* The number of packets that has been received */
+    WebRtc_UWord32 rec_prior; /* Number of packets received when last report was generated */
+    WebRtc_UWord32 exp_prior; /* Number of packets that should have been received if no
+     packets were lost. Stored value from last report. */
+    WebRtc_UWord32 jitter; /* Jitter statistics at this instance (calculated according to RFC) */
+    WebRtc_Word32 transit; /* Clock difference for previous packet (RTPtimestamp - LOCALtime_rec) */
+} WebRtcNetEQ_RTCP_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPInit(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *		- seqNo			: Packet number of the first received frame.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPInit(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo);
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPUpdate(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *		- seqNo			: Packet number of the first received frame.
+ *		- timeStamp		: Time stamp from the RTP header.
+ *		- recTime		: Time (in RTP timestamps) when this packet was received.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPUpdate(WebRtcNetEQ_RTCP_t *RTCP_inst, WebRtc_UWord16 uw16_seqNo,
+                           WebRtc_UWord32 uw32_timeStamp, WebRtc_UWord32 uw32_recTime);
+
+/****************************************************************************
+ * WebRtcNetEQ_RTCPGetStats(...)
+ *
+ * This function calculates the parameters that are needed for the RTCP 
+ * report.
+ *
+ * Input:
+ *		- RTCP_inst		: RTCP instance, that contains information about the 
+ *						  packets that have been received etc.
+ *      - doNotReset    : If non-zero, the fraction lost statistics will not
+ *                        be reset.
+ *
+ * Output:
+ *		- RTCP_inst		: Updated RTCP information (some statistics are 
+ *						  reset when generating this report)
+ *		- fraction_lost : Number of lost RTP packets divided by the number of
+ *						  expected packets, since the last RTCP Report.
+ *		- cum_lost		: Cumulative number of lost packets during this 
+ *						  session.
+ *		- ext_max		: Extended highest sequence number received.
+ *		- jitter		: Inter-arrival jitter.
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTCPGetStats(WebRtcNetEQ_RTCP_t *RTCP_inst,
+                             WebRtc_UWord16 *puw16_fraction_lost,
+                             WebRtc_UWord32 *puw32_cum_lost, WebRtc_UWord32 *puw32_ext_max,
+                             WebRtc_UWord32 *puw32_jitter, WebRtc_Word16 doNotReset);
+
+#endif
diff --git a/src/modules/audio_coding/neteq/rtp.c b/src/modules/audio_coding/neteq/rtp.c
new file mode 100644
index 0000000..bd4f9a2
--- /dev/null
+++ b/src/modules/audio_coding/neteq/rtp.c
@@ -0,0 +1,240 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTP related functions.
+ */
+
+#include "rtp.h"
+
+#include "typedefs.h" /* to define endianness */
+
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+                               RTPPacket_t* RTPheader)
+{
+    int i_P, i_X, i_CC, i_startPosition;
+    int i_IPver;
+    int i_extlength = -1; /* Default value is there is no extension */
+    int i_padlength = 0; /* Default value if there is no padding */
+
+    if (i_DatagramLen < 12)
+    {
+        return RTP_TOO_SHORT_PACKET;
+    }
+
+#ifdef WEBRTC_BIG_ENDIAN
+    i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC000)) >> 14); /* Extract the version */
+    i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x2000)) >> 13); /* Extract the P bit */
+    i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x1000)) >> 12); /* Extract the X bit */
+    i_CC = ((WebRtc_UWord16) (pw16_Datagram[0] >> 8) & 0xF); /* Get the CC number */
+    RTPheader->payloadType = pw16_Datagram[0] & 0x7F; /* Get the coder type	*/
+    RTPheader->seqNumber = pw16_Datagram[1]; /* Get the sequence number	*/
+    RTPheader->timeStamp = ((((WebRtc_UWord32) ((WebRtc_UWord16) pw16_Datagram[2])) << 16)
+        | (WebRtc_UWord16) (pw16_Datagram[3])); /* Get timestamp */
+    RTPheader->ssrc = (((WebRtc_UWord32) pw16_Datagram[4]) << 16)
+        + (((WebRtc_UWord32) pw16_Datagram[5])); /* Get the SSRC */
+
+    if (i_X == 1)
+    {
+        /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
+        i_extlength = pw16_Datagram[7 + 2 * i_CC];
+    }
+    if (i_P == 1)
+    {
+        /* Padding exists. Find out how many bytes the padding consists of. */
+        if (i_DatagramLen & 0x1)
+        {
+            /* odd number of bytes => last byte in higher byte */
+            i_padlength = (((WebRtc_UWord16) pw16_Datagram[i_DatagramLen >> 1]) >> 8);
+        }
+        else
+        {
+            /* even number of bytes => last byte in lower byte */
+            i_padlength = ((pw16_Datagram[(i_DatagramLen >> 1) - 1]) & 0xFF);
+        }
+    }
+#else /* WEBRTC_LITTLE_ENDIAN */
+    i_IPver = (((WebRtc_UWord16) (pw16_Datagram[0] & 0xC0)) >> 6); /* Extract the IP version */
+    i_P = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x20)) >> 5); /* Extract the P bit */
+    i_X = (((WebRtc_UWord16) (pw16_Datagram[0] & 0x10)) >> 4); /* Extract the X bit */
+    i_CC = (WebRtc_UWord16) (pw16_Datagram[0] & 0xF); /* Get the CC number */
+    RTPheader->payloadType = (pw16_Datagram[0] >> 8) & 0x7F; /* Get the coder type */
+    RTPheader->seqNumber = (((((WebRtc_UWord16) pw16_Datagram[1]) >> 8) & 0xFF)
+        | (((WebRtc_UWord16) (pw16_Datagram[1] & 0xFF)) << 8)); /* Get the packet number */
+    RTPheader->timeStamp = ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF) << 24)
+        | ((((WebRtc_UWord16) pw16_Datagram[2]) & 0xFF00) << 8)
+        | ((((WebRtc_UWord16) pw16_Datagram[3]) >> 8) & 0xFF)
+        | ((((WebRtc_UWord16) pw16_Datagram[3]) & 0xFF) << 8); /* Get timestamp */
+    RTPheader->ssrc = ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF) << 24)
+        | ((((WebRtc_UWord16) pw16_Datagram[4]) & 0xFF00) << 8)
+        | ((((WebRtc_UWord16) pw16_Datagram[5]) >> 8) & 0xFF)
+        | ((((WebRtc_UWord16) pw16_Datagram[5]) & 0xFF) << 8); /* Get the SSRC */
+
+    if (i_X == 1)
+    {
+        /* Extension header exists. Find out how many WebRtc_Word32 it consists of. */
+        i_extlength = (((((WebRtc_UWord16) pw16_Datagram[7 + 2 * i_CC]) >> 8) & 0xFF)
+            | (((WebRtc_UWord16) (pw16_Datagram[7 + 2 * i_CC] & 0xFF)) << 8));
+    }
+    if (i_P == 1)
+    {
+        /* Padding exists. Find out how many bytes the padding consists of. */
+        if (i_DatagramLen & 0x1)
+        {
+            /* odd number of bytes => last byte in higher byte */
+            i_padlength = (pw16_Datagram[i_DatagramLen >> 1] & 0xFF);
+        }
+        else
+        {
+            /* even number of bytes => last byte in lower byte */
+            i_padlength = (((WebRtc_UWord16) pw16_Datagram[(i_DatagramLen >> 1) - 1]) >> 8);
+        }
+    }
+#endif
+
+    i_startPosition = 12 + 4 * (i_extlength + 1) + 4 * i_CC;
+    RTPheader->payload = &pw16_Datagram[i_startPosition >> 1];
+    RTPheader->payloadLen = i_DatagramLen - i_startPosition - i_padlength;
+    RTPheader->starts_byte1 = 0;
+
+    if ((i_IPver != 2) || (RTPheader->payloadLen <= 0) || (RTPheader->payloadLen >= 16000)
+        || (i_startPosition < 12) || (i_startPosition > i_DatagramLen))
+    {
+        return RTP_CORRUPT_PACKET;
+    }
+
+    return 0;
+}
+
+#ifdef NETEQ_RED_CODEC
+
+int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
+                                int *i_No_Of_Payloads)
+{
+    const WebRtc_Word16 *pw16_data = RTPheader[0]->payload; /* Pointer to the data */
+    WebRtc_UWord16 uw16_offsetTimeStamp = 65535, uw16_secondPayload = 65535;
+    int i_blockLength, i_k;
+    int i_discardedBlockLength = 0;
+    int singlePayload = 0;
+
+#ifdef WEBRTC_BIG_ENDIAN
+    if ((pw16_data[0] & 0x8000) == 0)
+    {
+        /* Only one payload in this packet*/
+        singlePayload = 1;
+        /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
+        i_blockLength = -4;
+        RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
+    }
+    else
+    {
+        /* Discard all but the two last payloads. */
+        while (((pw16_data[2] & 0x8000) == 1)&&
+            (pw16_data<((RTPheader[0]->payload)+((RTPheader[0]->payloadLen+1)>>1))))
+        {
+            i_discardedBlockLength += (4+(((WebRtc_UWord16)pw16_data[1]) & 0x3FF));
+            pw16_data+=2;
+        }
+        if (pw16_data>=(RTPheader[0]->payload+((RTPheader[0]->payloadLen+1)>>1)))
+        {
+            return RED_SPLIT_ERROR2; /* Error, we are outside the packet */
+        }
+        singlePayload = 0; /* the packet contains more than one payload */
+        uw16_secondPayload = ((((WebRtc_UWord16)pw16_data[0]) & 0x7F00) >> 8);
+        RTPheader[0]->payloadType = ((((WebRtc_UWord16)pw16_data[2]) & 0x7F00) >> 8);
+        uw16_offsetTimeStamp = ((((WebRtc_UWord16)pw16_data[0]) & 0xFF) << 6) +
+        ((((WebRtc_UWord16)pw16_data[1]) & 0xFC00) >> 10);
+        i_blockLength = (((WebRtc_UWord16)pw16_data[1]) & 0x3FF);
+    }
+#else /* WEBRTC_LITTLE_ENDIAN */
+    if ((pw16_data[0] & 0x80) == 0)
+    {
+        /* Only one payload in this packet */
+        singlePayload = 1;
+        /* set the blocklength to -4 to deduce the non-existent 4-byte RED header */
+        i_blockLength = -4;
+        RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
+    }
+    else
+    {
+        /* Discard all but the two last payloads. */
+        while (((pw16_data[2] & 0x80) == 1) && (pw16_data < ((RTPheader[0]->payload)
+            + ((RTPheader[0]->payloadLen + 1) >> 1))))
+        {
+            i_discardedBlockLength += (4 + ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+                + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8));
+            pw16_data += 2;
+        }
+        if (pw16_data >= (RTPheader[0]->payload + ((RTPheader[0]->payloadLen + 1) >> 1)))
+        {
+            return RED_SPLIT_ERROR2; /* Error, we are outside the packet */;
+        }
+        singlePayload = 0; /* the packet contains more than one payload */
+        uw16_secondPayload = (((WebRtc_UWord16) pw16_data[0]) & 0x7F);
+        RTPheader[0]->payloadType = (((WebRtc_UWord16) pw16_data[2]) & 0x7F);
+        uw16_offsetTimeStamp = ((((WebRtc_UWord16) pw16_data[0]) & 0xFF00) >> 2)
+            + ((((WebRtc_UWord16) pw16_data[1]) & 0xFC) >> 2);
+        i_blockLength = ((((WebRtc_UWord16) pw16_data[1]) & 0x3) << 8)
+            + ((((WebRtc_UWord16) pw16_data[1]) & 0xFF00) >> 8);
+    }
+#endif
+
+    if (i_MaximumPayloads < 2 || singlePayload == 1)
+    {
+        /* Reject the redundancy; or no redundant payload present. */
+        for (i_k = 1; i_k < i_MaximumPayloads; i_k++)
+        {
+            RTPheader[i_k]->payloadType = -1;
+            RTPheader[i_k]->payloadLen = 0;
+        }
+
+        /* update the pointer for the main data */
+        pw16_data = &pw16_data[(5 + i_blockLength) >> 1];
+        RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
+        RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
+            - i_discardedBlockLength;
+        RTPheader[0]->payload = pw16_data;
+
+        *i_No_Of_Payloads = 1;
+
+    }
+    else
+    {
+        /* Redundancy accepted, put the redundancy in second RTPheader. */
+        RTPheader[1]->payloadType = uw16_secondPayload;
+        RTPheader[1]->payload = &pw16_data[5 >> 1];
+        RTPheader[1]->starts_byte1 = 5 & 0x1;
+        RTPheader[1]->seqNumber = RTPheader[0]->seqNumber;
+        RTPheader[1]->timeStamp = RTPheader[0]->timeStamp - uw16_offsetTimeStamp;
+        RTPheader[1]->ssrc = RTPheader[0]->ssrc;
+        RTPheader[1]->payloadLen = i_blockLength;
+
+        /* Modify first RTP packet, so that it contains the main data. */
+        RTPheader[0]->payload = &pw16_data[(5 + i_blockLength) >> 1];
+        RTPheader[0]->starts_byte1 = (5 + i_blockLength) & 0x1;
+        RTPheader[0]->payloadLen = RTPheader[0]->payloadLen - (i_blockLength + 5)
+            - i_discardedBlockLength;
+
+        /* Clear the following payloads. */
+        for (i_k = 2; i_k < i_MaximumPayloads; i_k++)
+        {
+            RTPheader[i_k]->payloadType = -1;
+            RTPheader[i_k]->payloadLen = 0;
+        }
+
+        *i_No_Of_Payloads = 2;
+    }
+    return 0;
+}
+
+#endif
+
diff --git a/src/modules/audio_coding/neteq/rtp.h b/src/modules/audio_coding/neteq/rtp.h
new file mode 100644
index 0000000..8490d62
--- /dev/null
+++ b/src/modules/audio_coding/neteq/rtp.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * RTP data struct and related functions.
+ */
+
+#ifndef RTP_H
+#define RTP_H
+
+#include "typedefs.h"
+
+#include "codec_db.h"
+
+typedef struct
+{
+    WebRtc_UWord16 seqNumber;
+    WebRtc_UWord32 timeStamp;
+    WebRtc_UWord32 ssrc;
+    int payloadType;
+    const WebRtc_Word16 *payload;
+    WebRtc_Word16 payloadLen;
+    WebRtc_Word16 starts_byte1;
+    WebRtc_Word16 rcuPlCntr;
+} RTPPacket_t;
+
+/****************************************************************************
+ * WebRtcNetEQ_RTPPayloadInfo(...)
+ *
+ * Converts a datagram into an RTP header struct.
+ *
+ * Input:
+ *		- Datagram		: UDP datagram from the network
+ *		- DatagramLen	: Length in bytes of the datagram
+ *
+ * Output:
+ *		- RTPheader		: Structure with the datagram info
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RTPPayloadInfo(WebRtc_Word16* pw16_Datagram, int i_DatagramLen,
+                               RTPPacket_t* RTPheader);
+
+/****************************************************************************
+ * WebRtcNetEQ_RedundancySplit(...)
+ *
+ * Splits a Redundancy RTP struct into two RTP structs. User has to check 
+ * that it's really the redundancy payload. No such check is done inside this
+ * function.
+ *
+ * Input:
+ *		- RTPheader		: First header holds the whole RTP packet (with the redundancy payload)
+ *		- MaximumPayloads: 
+ *						  The maximum number of RTP payloads that should be
+ *						  extracted (1+maximum_no_of_Redundancies).
+ *
+ * Output:
+ *		- RTPheader		: First header holds the main RTP data, while 2..N 
+ *						  holds the redundancy data.
+ *		- No_Of
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RedundancySplit(RTPPacket_t* RTPheader[], int i_MaximumPayloads,
+                                int *i_No_Of_Payloads);
+
+#endif
diff --git a/src/modules/audio_coding/neteq/set_fs.c b/src/modules/audio_coding/neteq/set_fs.c
new file mode 100644
index 0000000..b2ad5ca
--- /dev/null
+++ b/src/modules/audio_coding/neteq/set_fs.c
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Function were the sample rate is set.
+ */
+
+#include "mcu.h"
+
+#include "dtmf_buffer.h"
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_McuSetFs(MCUInst_t *inst, WebRtc_UWord16 fs)
+{
+    WebRtc_Word16 ok = 0;
+
+    switch (fs)
+    {
+        case 8000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 8000, 560);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 8;
+            break;
+        }
+
+#ifdef NETEQ_WIDEBAND
+        case 16000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 16000, 1120);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 16;
+            break;
+        }
+#endif
+
+#ifdef NETEQ_32KHZ_WIDEBAND
+        case 32000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 32000, 2240);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 32;
+            break;
+        }
+#endif
+
+#ifdef NETEQ_48KHZ_WIDEBAND
+        case 48000:
+        {
+#ifdef NETEQ_ATEVENT_DECODE
+            ok = WebRtcNetEQ_DtmfDecoderInit(&inst->DTMF_inst, 48000, 3360);
+#endif
+            inst->timestampsPerCall = inst->millisecondsPerCall * 48;
+            break;
+        }
+#endif
+
+        default:
+        {
+            /* Not supported yet */
+            return CODEC_DB_UNSUPPORTED_FS;
+        }
+    } /* end switch */
+
+    inst->fs = fs;
+
+    return ok;
+}
diff --git a/src/modules/audio_coding/neteq/signal_mcu.c b/src/modules/audio_coding/neteq/signal_mcu.c
new file mode 100644
index 0000000..b28f39c
--- /dev/null
+++ b/src/modules/audio_coding/neteq/signal_mcu.c
@@ -0,0 +1,769 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Signal the MCU that data is available and ask for a RecOut decision.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "automode.h"
+#include "dtmf_buffer.h"
+#include "mcu_dsp_common.h"
+#include "neteq_error_codes.h"
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#include <stdio.h>
+
+extern FILE *delay_fid2; /* file pointer to delay log file */
+#endif
+
+
+/*
+ * Signals the MCU that DSP status data is available.
+ */
+int WebRtcNetEQ_SignalMcu(MCUInst_t *inst)
+{
+
+    int i_bufferpos, i_res;
+    WebRtc_UWord16 uw16_instr;
+    DSP2MCU_info_t dspInfo;
+    WebRtc_Word16 *blockPtr, blockLen;
+    WebRtc_UWord32 uw32_availableTS;
+    RTPPacket_t temp_pkt;
+    WebRtc_Word32 w32_bufsize, w32_tmp;
+    WebRtc_Word16 payloadType = -1;
+    WebRtc_Word16 wantedNoOfTimeStamps;
+    WebRtc_Word32 totalTS;
+    WebRtc_Word16 oldPT, latePacketExist = 0;
+    WebRtc_UWord32 oldTS, prevTS, uw32_tmp;
+    WebRtc_UWord16 prevSeqNo;
+    WebRtc_Word16 nextSeqNoAvail;
+    WebRtc_Word16 fs_mult, w16_tmp;
+    WebRtc_Word16 lastModeBGNonly = 0;
+#ifdef NETEQ_DELAY_LOGGING
+    int temp_var;
+#endif
+    int playDtmf = 0;
+
+    fs_mult = WebRtcSpl_DivW32W16ResW16(inst->fs, 8000);
+
+    /* Increment counter since last statistics report */
+    inst->lastReportTS += inst->timestampsPerCall;
+
+    /* Increment waiting time for all packets. */
+    WebRtcNetEQ_IncrementWaitingTimes(&inst->PacketBuffer_inst);
+
+    /* Read info from DSP so we now current status */
+
+    WEBRTC_SPL_MEMCPY_W8(&dspInfo,inst->pw16_readAddress,sizeof(DSP2MCU_info_t));
+
+    /* Set blockPtr to first payload block */
+    blockPtr = &inst->pw16_writeAddress[3];
+
+    /* Clear instruction word and number of lost samples (2*WebRtc_Word16) */
+    inst->pw16_writeAddress[0] = 0;
+    inst->pw16_writeAddress[1] = 0;
+    inst->pw16_writeAddress[2] = 0;
+
+    if ((dspInfo.lastMode & MODE_AWAITING_CODEC_PTR) != 0)
+    {
+        /*
+         * Make sure state is adjusted so that a codec update is
+         * performed when first packet arrives.
+         */
+        if (inst->new_codec != 1)
+        {
+            inst->current_Codec = -1;
+        }
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_AWAITING_CODEC_PTR);
+    }
+
+#ifdef NETEQ_STEREO
+    if ((dspInfo.lastMode & MODE_MASTER_DTMF_SIGNAL) != 0)
+    {
+        playDtmf = 1; /* force DTMF decision */
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_MASTER_DTMF_SIGNAL);
+    }
+
+    if ((dspInfo.lastMode & MODE_USING_STEREO) != 0)
+    {
+        if (inst->usingStereo == 0)
+        {
+            /* stereo mode changed; reset automode instance to re-synchronize statistics */
+            WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+                inst->PacketBuffer_inst.maxInsertPositions);
+        }
+        inst->usingStereo = 1;
+        dspInfo.lastMode = (dspInfo.lastMode ^ MODE_USING_STEREO);
+    }
+    else
+    {
+        inst->usingStereo = 0;
+    }
+#endif
+
+    /* detect if BGN_ONLY flag is set in lastMode */
+    if ((dspInfo.lastMode & MODE_BGN_ONLY) != 0)
+    {
+        lastModeBGNonly = 1; /* remember flag */
+        dspInfo.lastMode ^= MODE_BGN_ONLY; /* clear the flag */
+    }
+
+    if ((dspInfo.lastMode == MODE_RFC3389CNG) || (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
+        || (dspInfo.lastMode == MODE_EXPAND))
+    {
+        /*
+         * If last mode was CNG (or Expand, since this could be covering up for a lost CNG
+         * packet), increase the CNGplayedTS counter.
+         */
+        inst->BufferStat_inst.uw32_CNGplayedTS += inst->timestampsPerCall;
+
+        if (dspInfo.lastMode == MODE_RFC3389CNG)
+        {
+            /* remember that RFC3389CNG is on (needed if CNG is interrupted by DTMF) */
+            inst->BufferStat_inst.w16_cngOn = CNG_RFC3389_ON;
+        }
+        else if (dspInfo.lastMode == MODE_CODEC_INTERNAL_CNG)
+        {
+            /* remember that internal CNG is on (needed if CNG is interrupted by DTMF) */
+            inst->BufferStat_inst.w16_cngOn = CNG_INTERNAL_ON;
+        }
+
+    }
+
+    /* Update packet size from previously decoded packet */
+    if (dspInfo.frameLen > 0)
+    {
+        inst->PacketBuffer_inst.packSizeSamples = dspInfo.frameLen;
+    }
+
+    /* Look for late packet (unless codec has changed) */
+    if (inst->new_codec != 1)
+    {
+        if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec))
+        {
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                inst->timeStamp, &uw32_availableTS, &i_bufferpos, 1, &payloadType);
+            if ((inst->new_codec != 1) && (inst->timeStamp == uw32_availableTS)
+                && (inst->timeStamp < dspInfo.playedOutTS) && (i_bufferpos != -1)
+                && (WebRtcNetEQ_DbGetPayload(&(inst->codec_DB_inst),
+                    (enum WebRtcNetEQDecoder) inst->current_Codec) == payloadType))
+            {
+                int waitingTime;
+                temp_pkt.payload = blockPtr + 1;
+                i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
+                    i_bufferpos, &waitingTime);
+                if (i_res < 0)
+                { /* error returned */
+                    return i_res;
+                }
+                WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
+                *blockPtr = temp_pkt.payloadLen;
+                /* set the flag if this is a redundant payload */
+                if (temp_pkt.rcuPlCntr > 0)
+                {
+                    *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
+                }
+                blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
+
+                /*
+                 * Close the data with a zero size block, in case we will not write any
+                 * more data.
+                 */
+                *blockPtr = 0;
+                inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
+                        | DSP_CODEC_ADD_LATE_PKT;
+                latePacketExist = 1;
+            }
+        }
+    }
+
+    i_res = WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+        dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
+        &payloadType);
+    if (i_res < 0)
+    { /* error returned */
+        return i_res;
+    }
+
+    if (inst->BufferStat_inst.w16_cngOn == CNG_RFC3389_ON)
+    {
+        /*
+         * Because of timestamp peculiarities, we have to "manually" disallow using a CNG
+         * packet with the same timestamp as the one that was last played. This can happen
+         * when using redundancy and will cause the timing to shift.
+         */
+        while (i_bufferpos != -1 && WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst,
+            payloadType) && dspInfo.playedOutTS >= uw32_availableTS)
+        {
+
+            /* Don't use this packet, discard it */
+            inst->PacketBuffer_inst.payloadType[i_bufferpos] = -1;
+            inst->PacketBuffer_inst.payloadLengthBytes[i_bufferpos] = 0;
+            inst->PacketBuffer_inst.numPacketsInBuffer--;
+
+            /* Check buffer again */
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                dspInfo.playedOutTS, &uw32_availableTS, &i_bufferpos, (inst->new_codec == 0),
+                &payloadType);
+        }
+    }
+
+    /* Check packet buffer */
+    w32_bufsize = WebRtcNetEQ_PacketBufferGetSize(&inst->PacketBuffer_inst);
+
+    if (dspInfo.lastMode == MODE_SUCCESS_ACCELERATE || dspInfo.lastMode
+        == MODE_LOWEN_ACCELERATE || dspInfo.lastMode == MODE_SUCCESS_PREEMPTIVE
+        || dspInfo.lastMode == MODE_LOWEN_PREEMPTIVE)
+    {
+        /* Subtract (dspInfo.samplesLeft + inst->timestampsPerCall) from sampleMemory */
+        inst->BufferStat_inst.Automode_inst.sampleMemory -= dspInfo.samplesLeft
+            + inst->timestampsPerCall;
+    }
+
+    /* calculate total current buffer size (in ms*8), including sync buffer */
+    w32_bufsize = WebRtcSpl_DivW32W16((w32_bufsize + dspInfo.samplesLeft), fs_mult);
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* DTMF data will affect the decision */
+    if (WebRtcNetEQ_DtmfDecode(&inst->DTMF_inst, blockPtr + 1, blockPtr + 2,
+        dspInfo.playedOutTS + inst->BufferStat_inst.uw32_CNGplayedTS) > 0)
+    {
+        playDtmf = 1;
+
+        /* Flag DTMF payload */
+        inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] | DSP_DTMF_PAYLOAD;
+
+        /* Block Length in bytes */
+        blockPtr[0] = 4;
+        /* Advance to next payload position */
+        blockPtr += 3;
+    }
+#endif
+
+    /* Update statistics and make decision */
+    uw16_instr = WebRtcNetEQ_BufstatsDecision(&inst->BufferStat_inst,
+        inst->PacketBuffer_inst.packSizeSamples, w32_bufsize, dspInfo.playedOutTS,
+        uw32_availableTS, i_bufferpos == -1,
+        WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType), dspInfo.lastMode,
+        inst->NetEqPlayoutMode, inst->timestampsPerCall, inst->NoOfExpandCalls, fs_mult,
+        lastModeBGNonly, playDtmf);
+
+    /* Check if time to reset loss counter */
+    if (inst->lastReportTS > WEBRTC_SPL_UMUL(inst->fs, MAX_LOSS_REPORT_PERIOD))
+    {
+        /* reset loss counter */
+        WebRtcNetEQ_ResetMcuInCallStats(inst);
+    }
+
+    /* Check sync buffer size */
+    if ((dspInfo.samplesLeft >= inst->timestampsPerCall) && (uw16_instr
+        != BUFSTATS_DO_ACCELERATE) && (uw16_instr != BUFSTATS_DO_MERGE) && (uw16_instr
+            != BUFSTATS_DO_PREEMPTIVE_EXPAND))
+    {
+        *blockPtr = 0;
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_NORMAL;
+        return 0;
+    }
+
+    if (uw16_instr == BUFSTATS_DO_EXPAND)
+    {
+        inst->NoOfExpandCalls++;
+    }
+    else
+    {
+        /* reset counter */
+        inst->NoOfExpandCalls = 0;
+    }
+
+    /* New codec or big change in packet number? */
+    if ((inst->new_codec) || (uw16_instr == BUFSTAT_REINIT))
+    {
+        CodecFuncInst_t cinst;
+
+        /* Clear other instructions */
+        blockPtr = &inst->pw16_writeAddress[3];
+        /* Clear instruction word */
+        inst->pw16_writeAddress[0] = 0;
+
+        inst->timeStamp = uw32_availableTS;
+        dspInfo.playedOutTS = uw32_availableTS;
+        if (inst->current_Codec != -1)
+        {
+            i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst,
+                (enum WebRtcNetEQDecoder) inst->current_Codec, &cinst);
+            if (i_res < 0)
+            { /* error returned */
+                return i_res;
+            }
+        }
+        else
+        {
+            /* The main codec has not been initialized yet (first packets are DTMF or CNG). */
+            if (WebRtcNetEQ_DbIsCNGPayload(&inst->codec_DB_inst, payloadType))
+            {
+                /* The currently extracted packet is CNG; get CNG fs */
+                WebRtc_UWord16 tempFs;
+
+                tempFs = WebRtcNetEQ_DbGetSampleRate(&inst->codec_DB_inst, payloadType);
+                if (tempFs > 0)
+                {
+                    inst->fs = tempFs;
+                }
+            }
+            WebRtcSpl_MemSetW16((WebRtc_Word16*) &cinst, 0,
+                                sizeof(CodecFuncInst_t) / sizeof(WebRtc_Word16));
+            cinst.codec_fs = inst->fs;
+        }
+        cinst.timeStamp = inst->timeStamp;
+        blockLen = (sizeof(CodecFuncInst_t)) >> (sizeof(WebRtc_Word16) - 1); /* in Word16 */
+        *blockPtr = blockLen * 2;
+        blockPtr++;
+        WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst,sizeof(CodecFuncInst_t));
+        blockPtr += blockLen;
+        inst->new_codec = 0;
+
+        /* Reinitialize the MCU fs */
+        i_res = WebRtcNetEQ_McuSetFs(inst, cinst.codec_fs);
+        if (i_res < 0)
+        { /* error returned */
+            return i_res;
+        }
+
+        /* Set the packet size by guessing */
+        inst->PacketBuffer_inst.packSizeSamples = inst->timestampsPerCall * 3;
+
+        WebRtcNetEQ_ResetAutomode(&(inst->BufferStat_inst.Automode_inst),
+                                  inst->PacketBuffer_inst.maxInsertPositions);
+
+#ifdef NETEQ_CNG_CODEC
+        /* Also insert CNG state as this might be needed by DSP */
+        i_res = WebRtcNetEQ_DbGetPtrs(&inst->codec_DB_inst, kDecoderCNG, &cinst);
+        if ((i_res < 0) && (i_res != CODEC_DB_NOT_EXIST1))
+        {
+            /* other error returned */
+            /* (CODEC_DB_NOT_EXIST1 simply indicates that CNG is not used */
+            return i_res;
+        }
+        else
+        {
+            /* CNG exists */
+            blockLen = (sizeof(cinst.codec_state)) >> (sizeof(WebRtc_Word16) - 1);
+            *blockPtr = blockLen * 2;
+            blockPtr++;
+            WEBRTC_SPL_MEMCPY_W8(blockPtr,&cinst.codec_state,sizeof(cinst.codec_state));
+            blockPtr += blockLen;
+        }
+#endif
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff)
+                | DSP_CODEC_NEW_CODEC;
+
+        if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
+        {
+            /*
+             * Change decision to CNG packet, since we do have a CNG packet, but it was
+             * considered too early to use. Now, use it anyway.
+             */
+            uw16_instr = BUFSTATS_DO_RFC3389CNG_PACKET;
+        }
+        else if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
+        {
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+
+        /* reset loss counter */
+        WebRtcNetEQ_ResetMcuInCallStats(inst);
+    }
+
+    /* Should we just reset the decoder? */
+    if (uw16_instr == BUFSTAT_REINIT_DECODER)
+    {
+        /* Change decision to normal and flag decoder reset */
+        uw16_instr = BUFSTATS_DO_NORMAL;
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0xf0ff) | DSP_CODEC_RESET;
+    }
+
+    /* Expand requires no new packet */
+    if (uw16_instr == BUFSTATS_DO_EXPAND)
+    {
+
+        inst->timeStamp = dspInfo.playedOutTS;
+
+        /* Have we got one descriptor left? */
+        if (WebRtcNetEQ_DbIsMDCodec((enum WebRtcNetEQDecoder) inst->current_Codec)
+            && (dspInfo.MD || latePacketExist))
+        {
+
+            if (dspInfo.lastMode != MODE_ONE_DESCRIPTOR)
+            {
+                /* this is the first "consecutive" one-descriptor decoding; reset counter */
+                inst->one_desc = 0;
+            }
+            if (inst->one_desc < MAX_ONE_DESC)
+            {
+                /* use that one descriptor */
+                inst->one_desc++; /* increase counter */
+                inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                        | DSP_INSTR_NORMAL_ONE_DESC;
+
+                /* decrease counter since we did no Expand */
+                inst->NoOfExpandCalls = WEBRTC_SPL_MAX(inst->NoOfExpandCalls - 1, 0);
+                return 0;
+            }
+            else
+            {
+                /* too many consecutive one-descriptor decodings; do expand instead */
+                inst->one_desc = 0; /* reset counter */
+            }
+
+        }
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff) | DSP_INSTR_EXPAND;
+        return 0;
+    }
+
+    /* Merge is not needed if we still have a descriptor */
+    if ((uw16_instr == BUFSTATS_DO_MERGE) && (dspInfo.MD != 0))
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_NORMAL_ONE_DESC;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do CNG without trying to extract any packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_RFC3389CNG_NOPACKET)
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DO_RFC3389CNG;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do built-in CNG without extracting any new packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_INTERNAL_CNG_NOPACKET)
+    {
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DO_CODEC_INTERNAL_CNG;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    /* Do DTMF without extracting any new packets from buffer */
+    if (uw16_instr == BUFSTATS_DO_DTMF_ONLY)
+    {
+        WebRtc_UWord32 timeStampJump = 0;
+
+        /* Update timestamp */
+        if ((inst->BufferStat_inst.uw32_CNGplayedTS > 0) && (dspInfo.lastMode != MODE_DTMF))
+        {
+            /* Jump in timestamps if needed */
+            timeStampJump = inst->BufferStat_inst.uw32_CNGplayedTS;
+            inst->pw16_writeAddress[1] = (WebRtc_UWord16) (timeStampJump >> 16);
+            inst->pw16_writeAddress[2] = (WebRtc_UWord16) (timeStampJump & 0xFFFF);
+        }
+
+        inst->timeStamp = dspInfo.playedOutTS + timeStampJump;
+
+        inst->BufferStat_inst.uw32_CNGplayedTS = 0;
+        inst->NoOfExpandCalls = 0;
+
+        inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                | DSP_INSTR_DTMF_GENERATE;
+        *blockPtr = 0;
+        return 0;
+    }
+
+    if (uw16_instr == BUFSTATS_DO_ACCELERATE)
+    {
+        /* In order to do a Accelerate we need at least 30 ms of data */
+        if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
+        {
+            /* Already have enough data, so we do not need to extract any more */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_ACCELERATE;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /* Avoid decoding more data as it might overflow playout buffer */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_NORMAL;
+            *blockPtr = 0;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /* For >= 30ms allow Accelerate with a decoding to avoid overflow in playout buffer */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
+        {
+            /* We need to decode another 10 ms in order to do an Accelerate */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else
+        {
+            /*
+             * Build up decoded data by decoding at least 20 ms of data.
+             * Do not perform Accelerate yet, but wait until we only need to do one decoding.
+             */
+            wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+    }
+    else if (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND)
+    {
+        /* In order to do a Preemptive Expand we need at least 30 ms of data */
+        if (dspInfo.samplesLeft >= (3 * 80 * fs_mult))
+        {
+            /* Already have enough data, so we do not need to extract any more */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_PREEMPTIVE_EXPAND;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft >= (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /*
+             * Avoid decoding more data as it might overflow playout buffer;
+             * still try Preemptive Expand though.
+             */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_PREEMPTIVE_EXPAND;
+            *blockPtr = 0;
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+            return 0;
+        }
+        else if ((dspInfo.samplesLeft < (1 * 80 * fs_mult))
+            && (inst->PacketBuffer_inst.packSizeSamples >= (240 * fs_mult)))
+        {
+            /*
+             * For >= 30ms allow Preemptive Expand with a decoding to avoid overflow in
+             * playout buffer
+             */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else if (dspInfo.samplesLeft >= (2 * 80 * fs_mult))
+        {
+            /* We need to decode another 10 ms in order to do an Preemptive Expand */
+            wantedNoOfTimeStamps = inst->timestampsPerCall;
+        }
+        else
+        {
+            /*
+             * Build up decoded data by decoding at least 20 ms of data,
+             * Still try to perform Preemptive Expand.
+             */
+            wantedNoOfTimeStamps = 2 * inst->timestampsPerCall;
+        }
+    }
+    else
+    {
+        wantedNoOfTimeStamps = inst->timestampsPerCall;
+    }
+
+    /* Otherwise get data from buffer, try to get at least 10ms */
+    totalTS = 0;
+    oldTS = uw32_availableTS;
+    if ((i_bufferpos > -1) && (uw16_instr != BUFSTATS_DO_ALTERNATIVE_PLC) && (uw16_instr
+        != BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS) && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION)
+        && (uw16_instr != BUFSTATS_DO_AUDIO_REPETITION_INC_TS))
+    {
+        uw32_tmp = (uw32_availableTS - dspInfo.playedOutTS);
+        inst->pw16_writeAddress[1] = (WebRtc_UWord16) (uw32_tmp >> 16);
+        inst->pw16_writeAddress[2] = (WebRtc_UWord16) (uw32_tmp & 0xFFFF);
+        if (inst->BufferStat_inst.w16_cngOn == CNG_OFF)
+        {
+            /*
+             * Adjustment of TS only corresponds to an actual packet loss
+             * if comfort noise is not played. If comfort noise was just played,
+             * this adjustment of TS is only done to get back in sync with the
+             * stream TS; no loss to report.
+             */
+            inst->lostTS += uw32_tmp;
+        }
+
+        if (uw16_instr != BUFSTATS_DO_RFC3389CNG_PACKET)
+        {
+            /* We are about to decode and use a non-CNG packet => CNG period is ended */
+            inst->BufferStat_inst.w16_cngOn = CNG_OFF;
+        }
+
+        /*
+         * Reset CNG timestamp as a new packet will be delivered.
+         * (Also if CNG packet, since playedOutTS is updated.)
+         */
+        inst->BufferStat_inst.uw32_CNGplayedTS = 0;
+
+        prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
+        prevTS = inst->PacketBuffer_inst.timeStamp[i_bufferpos];
+        oldPT = inst->PacketBuffer_inst.payloadType[i_bufferpos];
+
+        /* clear flag bits */
+        inst->pw16_writeAddress[0] = inst->pw16_writeAddress[0] & 0xFF3F;
+        do
+        {
+            int waitingTime;
+            inst->timeStamp = uw32_availableTS;
+            /* Write directly to shared memory */
+            temp_pkt.payload = blockPtr + 1;
+            i_res = WebRtcNetEQ_PacketBufferExtract(&inst->PacketBuffer_inst, &temp_pkt,
+                i_bufferpos, &waitingTime);
+
+            if (i_res < 0)
+            {
+                /* error returned */
+                return i_res;
+            }
+            WebRtcNetEQ_StoreWaitingTime(inst, waitingTime);
+
+#ifdef NETEQ_DELAY_LOGGING
+            temp_var = NETEQ_DELAY_LOGGING_SIGNAL_DECODE;
+            if ((fwrite(&temp_var, sizeof(int),
+                        1, delay_fid2) != 1) ||
+                (fwrite(&temp_pkt.timeStamp, sizeof(WebRtc_UWord32),
+                        1, delay_fid2) != 1) ||
+                (fwrite(&dspInfo.samplesLeft, sizeof(WebRtc_UWord16),
+                        1, delay_fid2) != 1)) {
+              return -1;
+            }
+#endif
+
+            *blockPtr = temp_pkt.payloadLen;
+            /* set the flag if this is a redundant payload */
+            if (temp_pkt.rcuPlCntr > 0)
+            {
+                *blockPtr = (*blockPtr) | (DSP_CODEC_RED_FLAG);
+            }
+            blockPtr += ((temp_pkt.payloadLen + 1) >> 1) + 1;
+
+            if (i_bufferpos > -1)
+            {
+                /*
+                 * Store number of TS extracted (last extracted is assumed to be of
+                 * packSizeSamples).
+                 */
+                totalTS = uw32_availableTS - oldTS + inst->PacketBuffer_inst.packSizeSamples;
+            }
+            /* Check what next packet is available */
+            WebRtcNetEQ_PacketBufferFindLowestTimestamp(&inst->PacketBuffer_inst,
+                inst->timeStamp, &uw32_availableTS, &i_bufferpos, 0, &payloadType);
+
+            nextSeqNoAvail = 0;
+            if ((i_bufferpos > -1) && (oldPT
+                == inst->PacketBuffer_inst.payloadType[i_bufferpos]))
+            {
+                w16_tmp = inst->PacketBuffer_inst.seqNumber[i_bufferpos] - prevSeqNo;
+                w32_tmp = inst->PacketBuffer_inst.timeStamp[i_bufferpos] - prevTS;
+                if ((w16_tmp == 1) || /* Next packet */
+                    ((w16_tmp == 0) && (w32_tmp == inst->PacketBuffer_inst.packSizeSamples)))
+                { /* or packet split into frames */
+                    nextSeqNoAvail = 1;
+                }
+                prevSeqNo = inst->PacketBuffer_inst.seqNumber[i_bufferpos];
+            }
+
+        }
+        while ((totalTS < wantedNoOfTimeStamps) && (nextSeqNoAvail == 1));
+    }
+
+    if ((uw16_instr == BUFSTATS_DO_ACCELERATE)
+        || (uw16_instr == BUFSTATS_DO_PREEMPTIVE_EXPAND))
+    {
+        /* Check that we have enough data (30ms) to do the Accelearate */
+        if ((totalTS + dspInfo.samplesLeft) < WEBRTC_SPL_MUL(3,inst->timestampsPerCall)
+            && (uw16_instr == BUFSTATS_DO_ACCELERATE))
+        {
+            /* Not enough, do normal operation instead */
+            uw16_instr = BUFSTATS_DO_NORMAL;
+        }
+        else
+        {
+            inst->BufferStat_inst.Automode_inst.sampleMemory
+            = (WebRtc_Word32) dspInfo.samplesLeft + totalTS;
+            inst->BufferStat_inst.Automode_inst.prevTimeScale = 1;
+        }
+    }
+
+    /* Close the data with a zero size block */
+    *blockPtr = 0;
+
+    /* Write data to DSP */
+    switch (uw16_instr)
+    {
+        case BUFSTATS_DO_NORMAL:
+            /* Normal with decoding included */
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_NORMAL;
+            break;
+        case BUFSTATS_DO_ACCELERATE:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_ACCELERATE;
+            break;
+        case BUFSTATS_DO_MERGE:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_MERGE;
+            break;
+        case BUFSTATS_DO_RFC3389CNG_PACKET:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_DO_RFC3389CNG;
+            break;
+        case BUFSTATS_DO_ALTERNATIVE_PLC:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_ALTERNATIVE_PLC;
+            break;
+        case BUFSTATS_DO_ALTERNATIVE_PLC_INC_TS:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_ALTERNATIVE_PLC_INC_TS;
+            break;
+        case BUFSTATS_DO_AUDIO_REPETITION:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_AUDIO_REPETITION;
+            break;
+        case BUFSTATS_DO_AUDIO_REPETITION_INC_TS:
+            inst->pw16_writeAddress[1] = 0;
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+                    | DSP_INSTR_DO_AUDIO_REPETITION_INC_TS;
+            break;
+        case BUFSTATS_DO_PREEMPTIVE_EXPAND:
+            inst->pw16_writeAddress[0] = (inst->pw16_writeAddress[0] & 0x0fff)
+            | DSP_INSTR_PREEMPTIVE_EXPAND;
+            break;
+        default:
+            return UNKNOWN_BUFSTAT_DECISION;
+    }
+
+    inst->timeStamp = dspInfo.playedOutTS;
+    return 0;
+
+}
diff --git a/src/modules/audio_coding/neteq/split_and_insert.c b/src/modules/audio_coding/neteq/split_and_insert.c
new file mode 100644
index 0000000..03c1569
--- /dev/null
+++ b/src/modules/audio_coding/neteq/split_and_insert.c
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Split an RTP payload (if possible and suitable) and insert into packet buffer.
+ */
+
+#include "mcu.h"
+
+#include <string.h>
+
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+
+int WebRtcNetEQ_SplitAndInsertPayload(RTPPacket_t *packet, PacketBuf_t *Buffer_inst,
+                                      SplitInfo_t *split_inst, WebRtc_Word16 *flushed)
+{
+
+    int i_ok;
+    int len;
+    int i;
+    RTPPacket_t temp_packet;
+    WebRtc_Word16 localFlushed = 0;
+    const WebRtc_Word16 *pw16_startPayload;
+    *flushed = 0;
+
+    len = packet->payloadLen;
+
+    /* Copy to temp packet that can be modified. */
+
+    WEBRTC_SPL_MEMCPY_W8(&temp_packet,packet,sizeof(RTPPacket_t));
+
+    if (split_inst->deltaBytes == NO_SPLIT)
+    {
+        /* Not splittable codec */
+        i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, packet, &localFlushed);
+        *flushed |= localFlushed;
+        if (i_ok < 0)
+        {
+            return PBUFFER_INSERT_ERROR5;
+        }
+    }
+    else if (split_inst->deltaBytes < -10)
+    {
+        /* G711, PCM16B or G722, use "soft splitting" */
+        int split_size = packet->payloadLen;
+        int mult = WEBRTC_SPL_ABS_W32(split_inst->deltaBytes) - 10;
+
+        /* Find "chunk size" >= 20 ms and < 40 ms
+         * split_inst->deltaTime in this case contains the number of bytes per
+         * timestamp unit times 2
+         */
+        while (split_size >= ((80 << split_inst->deltaTime) * mult))
+        {
+            split_size >>= 1;
+        }
+
+        /* Make the size an even value. */
+        if (split_size > 1)
+        {
+            split_size >>= 1;
+            split_size *= 2;
+        }
+
+        temp_packet.payloadLen = split_size;
+        pw16_startPayload = temp_packet.payload;
+        i = 0;
+        while (len >= (2 * split_size))
+        {
+            /* insert every chunk */
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            temp_packet.timeStamp += ((2 * split_size) >> split_inst->deltaTime);
+            i++;
+            temp_packet.payload = &(pw16_startPayload[(i * split_size) >> 1]);
+            temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_size & 0x1);
+
+            len -= split_size;
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR1;
+            }
+        }
+
+        /* Insert the rest */
+        temp_packet.payloadLen = len;
+        i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+        *flushed |= localFlushed;
+        if (i_ok < 0)
+        {
+            return PBUFFER_INSERT_ERROR2;
+        }
+    }
+    else
+    {
+        /* Frame based codec, use hard splitting. */
+        i = 0;
+        pw16_startPayload = temp_packet.payload;
+        while (len >= split_inst->deltaBytes)
+        {
+
+            temp_packet.payloadLen = split_inst->deltaBytes;
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            i++;
+            temp_packet.payload = &(pw16_startPayload[(i * split_inst->deltaBytes) >> 1]);
+            temp_packet.timeStamp += split_inst->deltaTime;
+            temp_packet.starts_byte1 = temp_packet.starts_byte1 ^ (split_inst->deltaBytes
+                & 0x1);
+
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR3;
+            }
+            len -= split_inst->deltaBytes;
+
+        }
+        if (len > 0)
+        {
+            /* Must be a either an error or a SID frame at the end of the packet. */
+            temp_packet.payloadLen = len;
+            i_ok = WebRtcNetEQ_PacketBufferInsert(Buffer_inst, &temp_packet, &localFlushed);
+            *flushed |= localFlushed;
+            if (i_ok < 0)
+            {
+                return PBUFFER_INSERT_ERROR4;
+            }
+        }
+    }
+
+    return 0;
+}
+
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc b/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
new file mode 100644
index 0000000..0056ddc
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.cc
@@ -0,0 +1,678 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_CodecClass.h"
+
+#include <stdlib.h>  // exit
+
+#include "webrtc_neteq_help_macros.h"
+
+NETEQTEST_Decoder::NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt)
+:
+_decoder(NULL),
+_decoderType(type),
+_pt(pt),
+_fs(fs),
+_name(name)
+{
+}
+
+int NETEQTEST_Decoder::loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst)
+{
+    SET_CODEC_PAR(codecInst, _decoderType, _pt, _decoder, _fs);
+    int err = neteq.loadCodec(codecInst);
+    
+    if (err)
+    {
+        printf("Error loading codec %s into NetEQ database\n", _name.c_str());
+    }
+
+    return(err);
+}
+
+
+// iSAC
+#ifdef CODEC_ISAC
+#include "isac.h"
+
+decoder_iSAC::decoder_iSAC(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderISAC, 16000, "iSAC", pt)
+{
+    WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
+    WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacWideband);
+}
+
+
+decoder_iSAC::~decoder_iSAC()
+{
+    if (_decoder)
+    {
+        WebRtcIsac_Free((ISACStruct *) _decoder);
+        _decoder = NULL;
+    }
+}
+
+
+int decoder_iSAC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ISAC_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+#endif
+
+#ifdef CODEC_ISAC_SWB
+decoder_iSACSWB::decoder_iSACSWB(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderISACswb, 32000, "iSAC swb", pt)
+{
+    WebRtc_Word16 err = WebRtcIsac_Create((ISACStruct **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+
+    WebRtcIsac_EncoderInit((ISACStruct *) _decoder, 0);
+    WebRtcIsac_SetDecSampRate((ISACStruct *) _decoder, kIsacSuperWideband);
+}
+
+decoder_iSACSWB::~decoder_iSACSWB()
+{
+    if (_decoder)
+    {
+        WebRtcIsac_Free((ISACStruct *) _decoder);
+        _decoder = NULL;
+    }
+}
+
+int decoder_iSACSWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ISACSWB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+#endif
+
+// PCM u/A
+#ifdef CODEC_G711
+#include "g711_interface.h"
+
+decoder_PCMU::decoder_PCMU(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderPCMu, 8000, "G.711-u", pt)
+{
+    // no state to crate or init
+}
+
+int decoder_PCMU::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCMU_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+
+}
+
+decoder_PCMA::decoder_PCMA(WebRtc_UWord8 pt) 
+:
+NETEQTEST_Decoder(kDecoderPCMa, 8000, "G.711-A", pt)
+{
+    // no state to crate or init
+}
+
+int decoder_PCMA::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCMA_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+// Linear PCM16b
+#if (defined(CODEC_PCM16B) || defined(CODEC_PCM16B_WB) || \
+    defined(CODEC_PCM16B_32KHZ) || defined(CODEC_PCM16B_48KHZ))
+#include "pcm16b.h"
+#endif
+
+#ifdef CODEC_PCM16B
+int decoder_PCM16B_NB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_WB
+int decoder_PCM16B_WB::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_WB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_32KHZ
+int decoder_PCM16B_SWB32::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_SWB32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_PCM16B_48KHZ
+int decoder_PCM16B_SWB48::loadToNetEQ(NETEQTEST_NetEQClass &neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_PCM16B_SWB48_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_ILBC
+#include "ilbc.h"
+decoder_ILBC::decoder_ILBC(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderILBC, 8000, "iLBC", pt)
+{
+    WebRtc_Word16 err = WebRtcIlbcfix_DecoderCreate((iLBC_decinst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_ILBC::~decoder_ILBC()
+{
+    WebRtcIlbcfix_DecoderFree((iLBC_decinst_t *) _decoder);
+}
+
+int decoder_ILBC::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_ILBC_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G729
+#include "G729Interface.h"
+decoder_G729::decoder_G729(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG729, 8000, "G.729", pt)
+{
+    WebRtc_Word16 err = WebRtcG729_CreateDec((G729_decinst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G729::~decoder_G729()
+{
+    WebRtcG729_FreeDec((G729_decinst_t *) _decoder);
+}
+
+int decoder_G729::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G729_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G729_1
+#include "G729_1Interface.h"
+decoder_G729_1::decoder_G729_1(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG729_1, 16000, "G.729.1", pt)
+{
+    WebRtc_Word16 err = WebRtcG7291_Create((G729_1_inst_t **) &_decoder);
+    if (err)
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G729_1::~decoder_G729_1()
+{
+    WebRtcG7291_Free((G729_1_inst_t *) _decoder);
+}
+
+int decoder_G729_1::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G729_1_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722
+#include "g722_interface.h"
+decoder_G722::decoder_G722(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722, 16000, "G.722", pt)
+{
+     WebRtc_Word16 err = WebRtcG722_CreateDecoder((G722DecInst **) &_decoder);
+     if (err)
+     {
+         exit(EXIT_FAILURE);
+     }
+}
+
+decoder_G722::~decoder_G722()
+{
+    WebRtcG722_FreeDecoder((G722DecInst *) _decoder);
+}
+
+int decoder_G722::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_G722_1_16) || defined(CODEC_G722_1_24) || \
+    defined(CODEC_G722_1_32) || defined(CODEC_G722_1C_24) || \
+    defined(CODEC_G722_1C_32) || defined(CODEC_G722_1C_48))
+#include "G722_1Interface.h"
+#endif
+
+#ifdef CODEC_G722_1_16
+decoder_G722_1_16::decoder_G722_1_16(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_16, 16000, "G.722.1 (16 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec16((G722_1_16_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_16::~decoder_G722_1_16()
+{
+    WebRtcG7221_FreeDec16((G722_1_16_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_16::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_16_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1_24
+decoder_G722_1_24::decoder_G722_1_24(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_24, 16000, "G.722.1 (24 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec24((G722_1_24_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_24::~decoder_G722_1_24()
+{
+    WebRtcG7221_FreeDec24((G722_1_24_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_24_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1_32
+decoder_G722_1_32::decoder_G722_1_32(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1_32, 16000, "G.722.1 (32 kbps)", pt)
+{
+    if (WebRtcG7221_CreateDec32((G722_1_32_decinst_t **) &_decoder))
+    {
+        exit(EXIT_FAILURE);
+    }
+}
+
+decoder_G722_1_32::~decoder_G722_1_32()
+{
+    WebRtcG7221_FreeDec32((G722_1_32_decinst_t *) _decoder);
+}
+
+int decoder_G722_1_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1_32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_24
+decoder_G722_1C_24::decoder_G722_1C_24(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_24, 32000, "G.722.1C (24 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec24((G722_1C_24_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_24::~decoder_G722_1C_24()
+{
+    WebRtcG7221C_FreeDec24((G722_1C_24_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_24::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_24_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_32
+decoder_G722_1C_32::decoder_G722_1C_32(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_32, 32000, "G.722.1C (32 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec32((G722_1C_32_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_32::~decoder_G722_1C_32()
+{
+    WebRtcG7221C_FreeDec32((G722_1C_32_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_32::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_32_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_G722_1C_48
+decoder_G722_1C_48::decoder_G722_1C_48(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderG722_1C_48, 32000, "G.722.1C (48 kbps)", pt)
+{
+    if (WebRtcG7221C_CreateDec48((G722_1C_48_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_G722_1C_48::~decoder_G722_1C_48()
+{
+    WebRtcG7221C_FreeDec48((G722_1C_48_decinst_t *) _decoder);
+}
+
+int decoder_G722_1C_48::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_G722_1C_48_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_AMR
+#include "AMRInterface.h"
+#include "AMRCreation.h"
+decoder_AMR::decoder_AMR(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderAMR, 8000, "AMR", pt)
+{
+    if (WebRtcAmr_CreateDec((AMR_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+
+    WebRtcAmr_DecodeBitmode((AMR_decinst_t *) _decoder, AMRBandwidthEfficient);
+}
+
+decoder_AMR::~decoder_AMR()
+{
+    WebRtcAmr_FreeDec((AMR_decinst_t *) _decoder);
+}
+
+int decoder_AMR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AMR_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_AMRWB
+#include "AMRWBInterface.h"
+#include "AMRWBCreation.h"
+decoder_AMRWB::decoder_AMRWB(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderAMRWB, 16000, "AMR wb", pt)
+{
+    if (WebRtcAmrWb_CreateDec((AMRWB_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+
+    WebRtcAmrWb_DecodeBitmode((AMRWB_decinst_t *) _decoder, AMRBandwidthEfficient);
+}
+
+decoder_AMRWB::~decoder_AMRWB()
+{
+    WebRtcAmrWb_FreeDec((AMRWB_decinst_t *) _decoder);
+}
+
+int decoder_AMRWB::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AMRWB_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_GSMFR
+#include "GSMFRInterface.h"
+#include "GSMFRCreation.h"
+decoder_GSMFR::decoder_GSMFR(WebRtc_UWord8 pt)
+:
+NETEQTEST_Decoder(kDecoderGSMFR, 8000, "GSM-FR", pt)
+{
+    if (WebRtcGSMFR_CreateDec((GSMFR_decinst_t **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_GSMFR::~decoder_GSMFR()
+{
+    WebRtcGSMFR_FreeDec((GSMFR_decinst_t *) _decoder);
+}
+
+int decoder_GSMFR::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_GSMFR_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_SPEEX_8) || defined (CODEC_SPEEX_16))
+#include "SpeexInterface.h"
+decoder_SPEEX::decoder_SPEEX(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(fs == 8000 ? kDecoderSPEEX_8 : kDecoderSPEEX_16, 
+                  fs, "SPEEX", pt)
+{
+    if (fs != 8000 && fs != 16000)
+        throw std::exception("Wrong sample rate for SPEEX");
+
+    if (WebRtcSpeex_CreateDec((SPEEX_decinst_t **) &_decoder, fs, 1))
+        exit(EXIT_FAILURE);
+}
+
+decoder_SPEEX::~decoder_SPEEX()
+{
+    WebRtcSpeex_FreeDec((SPEEX_decinst_t *) _decoder);
+}
+
+int decoder_SPEEX::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_SPEEX_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_CELT_32
+#include "celt_interface.h"
+decoder_CELT::decoder_CELT(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(kDecoderCELT_32, fs, "CELT", pt)
+{
+   if (WebRtcCelt_CreateDec((CELT_decinst_t **) &_decoder, 2))
+        exit(EXIT_FAILURE);
+}
+
+decoder_CELT::~decoder_CELT()
+{
+    WebRtcCelt_FreeDec((CELT_decinst_t *) _decoder);
+}
+
+int decoder_CELT::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_CELT_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+
+decoder_CELTslave::decoder_CELTslave(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(kDecoderCELT_32, fs, "CELT", pt)
+{
+   if (WebRtcCelt_CreateDec((CELT_decinst_t **) &_decoder, 2))
+        exit(EXIT_FAILURE);
+}
+
+decoder_CELTslave::~decoder_CELTslave()
+{
+    WebRtcCelt_FreeDec((CELT_decinst_t *) _decoder);
+}
+
+int decoder_CELTslave::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_CELTSLAVE_FUNCTIONS(codecInst);
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_RED
+int decoder_RED::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_RED_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#ifdef CODEC_ATEVENT_DECODE
+int decoder_AVT::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_AVT_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+#include "webrtc_cng.h"
+decoder_CNG::decoder_CNG(WebRtc_UWord8 pt, WebRtc_UWord16 fs)
+:
+NETEQTEST_Decoder(kDecoderCNG, fs, "CNG", pt)
+{
+    if (fs != 8000 && fs != 16000 && fs != 32000 && fs != 48000)
+        exit(EXIT_FAILURE);
+
+    if (WebRtcCng_CreateDec((CNG_dec_inst **) &_decoder))
+        exit(EXIT_FAILURE);
+}
+
+decoder_CNG::~decoder_CNG()
+{
+    WebRtcCng_FreeDec((CNG_dec_inst *) _decoder);
+}
+
+int decoder_CNG::loadToNetEQ(NETEQTEST_NetEQClass & neteq)
+{
+    WebRtcNetEQ_CodecDef codecInst;
+
+    SET_CNG_FUNCTIONS(codecInst);
+
+    return(NETEQTEST_Decoder::loadToNetEQ(neteq, codecInst));
+}
+#endif
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h b/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
new file mode 100644
index 0000000..6990794
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h
@@ -0,0 +1,308 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_CODECCLASS_H
+#define NETEQTEST_CODECCLASS_H
+
+#include <string>
+#include <string.h>
+
+#include "typedefs.h"
+#include "webrtc_neteq.h"
+#include "NETEQTEST_NetEQClass.h"
+
+class NETEQTEST_Decoder
+{
+public:
+    NETEQTEST_Decoder(enum WebRtcNetEQDecoder type, WebRtc_UWord16 fs, const char * name, WebRtc_UWord8 pt = 0);
+    virtual ~NETEQTEST_Decoder() {};
+
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
+
+    int getName(char * name, int maxLen) const { strncpy( name, _name.c_str(), maxLen ); return 0;};
+
+    void setPT(WebRtc_UWord8 pt) { _pt = pt; };
+    WebRtc_UWord16 getFs() const { return (_fs); };
+    enum WebRtcNetEQDecoder getType() const { return (_decoderType); };
+    WebRtc_UWord8 getPT() const { return (_pt); };
+
+protected:
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq, WebRtcNetEQ_CodecDef & codecInst);
+
+    void * _decoder;
+    enum WebRtcNetEQDecoder _decoderType;
+    WebRtc_UWord8 _pt;
+    WebRtc_UWord16 _fs;
+    std::string _name;
+
+private:
+};
+
+
+class decoder_iSAC : public NETEQTEST_Decoder
+{
+public:
+    decoder_iSAC(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_iSAC();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_iSACSWB : public NETEQTEST_Decoder
+{
+public:
+    decoder_iSACSWB(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_iSACSWB();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_PCMU : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCMU(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_PCMU() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_PCMA : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCMA(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_PCMA() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_PCM16B_NB : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_NB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16B, 8000, "PCM16 nb", pt) {};
+    virtual ~decoder_PCM16B_NB() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_WB : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_WB(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bwb, 16000, "PCM16 wb", pt) {};
+    virtual ~decoder_PCM16B_WB() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_SWB32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_SWB32(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb32kHz, 32000, "PCM16 swb32", pt) {};
+    virtual ~decoder_PCM16B_SWB32() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_PCM16B_SWB48 : public NETEQTEST_Decoder
+{
+public:
+    decoder_PCM16B_SWB48(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderPCM16Bswb48kHz, 48000, "PCM16 swb48", pt) {};
+    virtual ~decoder_PCM16B_SWB48() {};
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_ILBC : public NETEQTEST_Decoder
+{
+public:
+    decoder_ILBC(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_ILBC();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G729 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G729(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G729();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G729_1 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G729_1(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G729_1();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722_1_16 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_16(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_16();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1_24 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_24(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_24();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1_32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1_32(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1_32();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_G722_1C_24 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_24(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_24();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1C_32 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_32(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_32();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G722_1C_48 : public NETEQTEST_Decoder
+{
+public:
+    decoder_G722_1C_48(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G722_1C_48();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_AMR : public NETEQTEST_Decoder
+{
+public:
+    decoder_AMR(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_AMR();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_AMRWB : public NETEQTEST_Decoder
+{
+public:
+    decoder_AMRWB(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_AMRWB();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_GSMFR : public NETEQTEST_Decoder
+{
+public:
+    decoder_GSMFR(WebRtc_UWord8 pt = 0);
+    virtual ~decoder_GSMFR();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726 : public NETEQTEST_Decoder
+{
+public:
+    //virtual decoder_G726(WebRtc_UWord8 pt = 0) = 0;
+    decoder_G726(enum WebRtcNetEQDecoder type, const char * name, WebRtc_UWord8 pt = 0);
+    virtual ~decoder_G726();
+    virtual int loadToNetEQ(NETEQTEST_NetEQClass & neteq) = 0;
+};
+
+class decoder_G726_16 : public decoder_G726
+{
+public:
+    decoder_G726_16(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_16, "G.726 (16 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_24 : public decoder_G726
+{
+public:
+    decoder_G726_24(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_24, "G.726 (24 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_32 : public decoder_G726
+{
+public:
+    decoder_G726_32(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_32, "G.726 (32 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_G726_40 : public decoder_G726
+{
+public:
+    decoder_G726_40(WebRtc_UWord8 pt = 0) : decoder_G726(kDecoderG726_40, "G.726 (40 kbps)", pt) {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_SPEEX : public NETEQTEST_Decoder
+{
+public:
+    decoder_SPEEX(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+    virtual ~decoder_SPEEX();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_CELT : public NETEQTEST_Decoder
+{
+public:
+    decoder_CELT(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 32000);
+    virtual ~decoder_CELT();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+class decoder_CELTslave : public NETEQTEST_Decoder
+{
+public:
+    decoder_CELTslave(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 32000);
+    virtual ~decoder_CELTslave();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_RED : public NETEQTEST_Decoder
+{
+public:
+    decoder_RED(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderRED, 8000, "RED", pt) {};
+    virtual ~decoder_RED() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+class decoder_AVT : public NETEQTEST_Decoder
+{
+public:
+    decoder_AVT(WebRtc_UWord8 pt = 0) : NETEQTEST_Decoder(kDecoderAVT, 8000, "AVT", pt) {};
+    virtual ~decoder_AVT() {};
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+
+class decoder_CNG : public NETEQTEST_Decoder
+{
+public:
+    decoder_CNG(WebRtc_UWord8 pt = 0, WebRtc_UWord16 fs = 8000);
+    virtual ~decoder_CNG();
+    int loadToNetEQ(NETEQTEST_NetEQClass & neteq);
+};
+
+#endif //NETEQTEST_CODECCLASS_H
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc b/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
new file mode 100644
index 0000000..e8d153b
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.cc
@@ -0,0 +1,191 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_DummyRTPpacket.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <string.h>
+
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h> // for htons, htonl, etc
+#endif
+
+int NETEQTEST_DummyRTPpacket::readFromFile(FILE *fp)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    if (fread(&length, 2, 1, fp) == 0)
+    {
+        reset();
+        return -2;
+    }
+    length = ntohs(length);
+
+    if (fread(&plen, 2, 1, fp) == 0)
+    {
+        reset();
+        return -1;
+    }
+    int packetLen = ntohs(plen);
+
+    if (fread(&offset, 4, 1, fp) == 0)
+    {
+        reset();
+        return -1;
+    }
+    // Store in local variable until we have passed the reset below.
+    WebRtc_UWord32 receiveTime = ntohl(offset);
+
+    // Use length here because a plen of 0 specifies rtcp.
+    length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+
+    // check buffer size
+    if (_datagram && _memSize < length)
+    {
+        reset();
+    }
+
+    if (!_datagram)
+    {
+        _datagram = new WebRtc_UWord8[length];
+        _memSize = length;
+    }
+    memset(_datagram, 0, length);
+
+    if (length == 0)
+    {
+        _datagramLen = 0;
+        return packetLen;
+    }
+
+    // Read basic header
+    if (fread((unsigned short *) _datagram, 1, _kBasicHeaderLen, fp)
+        != (size_t)_kBasicHeaderLen)
+    {
+        reset();
+        return -1;
+    }
+    _receiveTime = receiveTime;
+    _datagramLen = _kBasicHeaderLen;
+
+    // Parse the basic header
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+    int P, X, CC;
+    parseBasicHeader(&tempRTPinfo, &P, &X, &CC);
+
+    // Check if we have to extend the header
+    if (X != 0 || CC != 0)
+    {
+        int newLen = _kBasicHeaderLen + CC * 4 + X * 4;
+        assert(_memSize >= newLen);
+
+        // Read extension from file
+        size_t readLen = newLen - _kBasicHeaderLen;
+        if (fread((unsigned short *) _datagram + _kBasicHeaderLen, 1, readLen,
+            fp) != readLen)
+        {
+            reset();
+            return -1;
+        }
+        _datagramLen = newLen;
+
+        if (X != 0)
+        {
+            int totHdrLen = calcHeaderLength(X, CC);
+            assert(_memSize >= totHdrLen);
+
+            // Read extension from file
+            size_t readLen = totHdrLen - newLen;
+            if (fread((unsigned short *) _datagram + newLen, 1, readLen, fp)
+                != readLen)
+            {
+                reset();
+                return -1;
+            }
+            _datagramLen = totHdrLen;
+        }
+    }
+    _datagramLen = length;
+
+    if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+    {
+        // discard this payload
+        return readFromFile(fp);
+    }
+
+    return packetLen;
+
+}
+
+int NETEQTEST_DummyRTPpacket::writeToFile(FILE *fp)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    // length including RTPplay header
+    length = htons(_datagramLen + _kRDHeaderLen);
+    if (fwrite(&length, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // payload length
+    plen = htons(_datagramLen);
+    if (fwrite(&plen, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // offset (=receive time)
+    offset = htonl(_receiveTime);
+    if (fwrite(&offset, 4, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // Figure out the length of the RTP header.
+    int headerLen;
+    if (_datagramLen == 0)
+    {
+        // No payload at all; we are done writing to file.
+        headerLen = 0;
+    }
+    else
+    {
+        parseHeader();
+        headerLen = _payloadPtr - _datagram;
+        assert(headerLen >= 0);
+    }
+
+    // write RTP header
+    if (fwrite((unsigned short *) _datagram, 1, headerLen, fp) !=
+        static_cast<size_t>(headerLen))
+    {
+        return -1;
+    }
+
+    return (headerLen + _kRDHeaderLen); // total number of bytes written
+
+}
+
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h b/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h
new file mode 100644
index 0000000..ef74421
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h
@@ -0,0 +1,23 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_DUMMYRTPPACKET_H
+#define NETEQTEST_DUMMYRTPPACKET_H
+
+#include "NETEQTEST_RTPpacket.h"
+
+class NETEQTEST_DummyRTPpacket : public NETEQTEST_RTPpacket
+{
+public:
+    virtual int readFromFile(FILE *fp);
+    virtual int writeToFile(FILE *fp);
+};
+
+#endif //NETEQTEST_DUMMYRTPPACKET_H
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc b/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
new file mode 100644
index 0000000..0d8be00
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.cc
@@ -0,0 +1,393 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <memory.h>
+
+#include "NETEQTEST_NetEQClass.h"
+
+
+NETEQTEST_NetEQClass::NETEQTEST_NetEQClass()
+    :
+    _inst(NULL),
+    _instMem(NULL),
+    _bufferMem(NULL),
+    _preparseRTP(false),
+    _fsmult(1),
+    _isMaster(true),
+    _noDecode(false)
+{
+#ifdef WINDOWS_TIMING
+    _totTimeRecIn.QuadPart = 0;
+    _totTimeRecOut.QuadPart = 0;
+#endif
+}
+
+NETEQTEST_NetEQClass::NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
+        WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+    :
+    _inst(NULL),
+    _instMem(NULL),
+    _bufferMem(NULL),
+    _preparseRTP(false),
+    _fsmult(1),
+    _isMaster(true),
+    _noDecode(false)
+{
+#ifdef WINDOWS_TIMING
+    _totTimeRecIn.QuadPart = 0;
+    _totTimeRecOut.QuadPart = 0;
+#endif
+
+    if (assign() == 0)
+    {
+        if (init(fs) == 0)
+        {
+            assignBuffer(usedCodec, noOfCodecs, nwType);
+        }
+    }
+}
+
+
+NETEQTEST_NetEQClass::~NETEQTEST_NetEQClass()
+{
+    if (_instMem)
+    {
+        delete [] _instMem;
+        _instMem = NULL;
+    }
+
+    if (_bufferMem)
+    {
+        delete [] _bufferMem;
+        _bufferMem = NULL;
+    }
+
+    _inst = NULL;
+}
+
+int NETEQTEST_NetEQClass::assign()
+{
+    int memSize;
+
+    WebRtcNetEQ_AssignSize(&memSize);
+
+    if (_instMem)
+    {
+        delete [] _instMem;
+        _instMem = NULL;
+    }
+
+    _instMem = new WebRtc_Word8[memSize];
+
+    int ret = WebRtcNetEQ_Assign(&_inst, _instMem);
+
+    if (ret)
+    {
+        printError();
+    }
+
+    return (ret);
+}
+
+
+int NETEQTEST_NetEQClass::init(WebRtc_UWord16 fs)
+{
+    int ret;
+
+    if (!_inst)
+    {
+        // not assigned
+        ret = assign();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+    }
+
+    ret = WebRtcNetEQ_Init(_inst, fs);
+
+    if (ret != 0)
+    {
+        printError();
+    }
+
+    return (ret);
+
+}
+
+
+int NETEQTEST_NetEQClass::assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType)
+{
+    int numPackets, memSize, ret;
+
+    if (!_inst)
+    {
+        // not assigned
+        ret = assign();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+
+        ret = init();
+
+        if (ret != 0)
+        {
+            printError();
+            return (ret);
+        }
+    }
+
+    ret = WebRtcNetEQ_GetRecommendedBufferSize(_inst, usedCodec, noOfCodecs, nwType, &numPackets, &memSize);
+
+    if (ret != 0)
+    {
+        printError();
+        return (ret);
+    }
+
+    if (_bufferMem)
+    {
+        delete [] _bufferMem;
+        _bufferMem = NULL;
+    }
+
+    _bufferMem = new WebRtc_Word8[memSize];
+
+    memset(_bufferMem, -1, memSize);
+
+    ret = WebRtcNetEQ_AssignBuffer(_inst, numPackets, _bufferMem, memSize);
+
+    if (ret != 0)
+    {
+        printError();
+    }
+
+    return (ret);
+}
+
+int NETEQTEST_NetEQClass::loadCodec(WebRtcNetEQ_CodecDef &codecInst)
+{
+    int err = WebRtcNetEQ_CodecDbAdd(_inst, &codecInst);
+
+    if (err)
+    {
+        printError();
+    }
+
+    return (err);
+}
+
+void NETEQTEST_NetEQClass::printError()
+{
+    if (_inst)
+    {
+        int errorCode = WebRtcNetEQ_GetErrorCode(_inst);
+
+        if (errorCode)
+        {
+            char errorName[WEBRTC_NETEQ_MAX_ERROR_NAME];
+
+            WebRtcNetEQ_GetErrorName(errorCode, errorName, WEBRTC_NETEQ_MAX_ERROR_NAME);
+
+            printf("Error %i: %s\n", errorCode, errorName);
+        }
+    }
+}
+
+void NETEQTEST_NetEQClass::printError(NETEQTEST_RTPpacket &rtp)
+{
+    // print regular error info
+    printError();
+
+    // print extra info from packet
+    printf("\tRTP: TS=%u, SN=%u, PT=%u, M=%i, len=%i\n",
+           rtp.timeStamp(), rtp.sequenceNumber(), rtp.payloadType(),
+           rtp.markerBit(), rtp.payloadLen());
+
+}
+
+int NETEQTEST_NetEQClass::recIn(NETEQTEST_RTPpacket &rtp)
+{
+
+    int err;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER countA, countB;
+#endif
+
+    if (_preparseRTP)
+    {
+        WebRtcNetEQ_RTPInfo rtpInfo;
+        // parse RTP header
+        rtp.parseHeader(rtpInfo);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+        err = WebRtcNetEQ_RecInRTPStruct(_inst, &rtpInfo, rtp.payload(), rtp.payloadLen(), rtp.time() * _fsmult * 8);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countB); // get stop count for processor
+        _totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    }
+    else
+    {
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+        err = WebRtcNetEQ_RecIn(_inst, (WebRtc_Word16 *) rtp.datagram(), rtp.dataLen(), rtp.time() * _fsmult * 8);
+
+#ifdef WINDOWS_TIMING
+        QueryPerformanceCounter(&countB); // get stop count for processor
+        _totTimeRecIn.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    }
+
+    if (err)
+    {
+        printError(rtp);
+    }
+
+    return (err);
+
+}
+
+
+WebRtc_Word16 NETEQTEST_NetEQClass::recOut(WebRtc_Word16 *outData, void *msInfo, enum WebRtcNetEQOutputType *outputType)
+{
+    int err;
+    WebRtc_Word16 outLen = 0;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER countA, countB;
+#endif
+
+#ifdef WINDOWS_TIMING
+    QueryPerformanceCounter(&countA); // get start count for processor
+#endif
+
+    if (!msInfo)
+    {
+        // no msInfo given, do mono mode
+        if (_noDecode)
+        {
+            err = WebRtcNetEQ_RecOutNoDecode(_inst, outData, &outLen);
+        }
+        else
+        {
+            err = WebRtcNetEQ_RecOut(_inst, outData, &outLen);
+        }
+    }
+    else
+    {
+        // master/slave mode
+        err = WebRtcNetEQ_RecOutMasterSlave(_inst, outData, &outLen, msInfo, static_cast<WebRtc_Word16>(_isMaster));
+    }
+
+#ifdef WINDOWS_TIMING
+    QueryPerformanceCounter(&countB); // get stop count for processor
+    _totTimeRecOut.QuadPart += (countB.QuadPart - countA.QuadPart);
+#endif
+
+    if (err)
+    {
+        printError();
+    }
+    else
+    {
+        int newfsmult = static_cast<int>(outLen / 80);
+
+        if (newfsmult != _fsmult)
+        {
+#ifdef NETEQTEST_PRINT_WARNINGS
+            printf("Warning: output sample rate changed\n");
+#endif  // NETEQTEST_PRINT_WARNINGS
+            _fsmult = newfsmult;
+        }
+    }
+
+    if (outputType != NULL)
+    {
+        err = WebRtcNetEQ_GetSpeechOutputType(_inst, outputType);
+
+        if (err)
+        {
+            printError();
+        }
+    }
+
+    return (outLen);
+}
+
+
+WebRtc_UWord32 NETEQTEST_NetEQClass::getSpeechTimeStamp()
+{
+
+    WebRtc_UWord32 ts = 0;
+    int err;
+
+    err = WebRtcNetEQ_GetSpeechTimeStamp(_inst, &ts);
+
+    if (err)
+    {
+        printError();
+        ts = 0;
+    }
+
+    return (ts);
+
+}
+
+WebRtcNetEQOutputType NETEQTEST_NetEQClass::getOutputType() {
+  WebRtcNetEQOutputType type;
+
+  int err = WebRtcNetEQ_GetSpeechOutputType(_inst, &type);
+  if (err)
+  {
+    printError();
+    type = kOutputNormal;
+  }
+  return (type);
+}
+
+//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels)
+//:
+//channels(numChannels, new NETEQTEST_NetEQClass())
+//{
+//    //for (int i = 0; i < numChannels; i++)
+//    //{
+//    //    channels.push_back(new NETEQTEST_NetEQClass());
+//    //}
+//}
+//
+//NETEQTEST_NetEQVector::NETEQTEST_NetEQVector(int numChannels, enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs,
+//                      WebRtc_UWord16 fs, WebRtcNetEQNetworkType nwType)
+//                      :
+//channels(numChannels, new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType))
+//{
+//    //for (int i = 0; i < numChannels; i++)
+//    //{
+//    //    channels.push_back(new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, fs, nwType));
+//    //}
+//}
+//
+//NETEQTEST_NetEQVector::~NETEQTEST_NetEQVector()
+//{
+//}
+
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h b/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
new file mode 100644
index 0000000..3e43125
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_NETEQCLASS_H
+#define NETEQTEST_NETEQCLASS_H
+
+#include <stdio.h>
+#include <vector>
+
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+#include "NETEQTEST_RTPpacket.h"
+
+#ifdef WIN32
+#define WINDOWS_TIMING // complexity measurement only implemented for windows
+//TODO(hlundin):Add complexity testing for Linux.
+#include <windows.h>
+#endif
+
+class NETEQTEST_NetEQClass
+{
+public:
+    NETEQTEST_NetEQClass();
+    NETEQTEST_NetEQClass(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, 
+        WebRtc_UWord16 fs = 8000, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+    ~NETEQTEST_NetEQClass();
+
+    int assign();
+    int init(WebRtc_UWord16 fs = 8000);
+    int assignBuffer(enum WebRtcNetEQDecoder *usedCodec, int noOfCodecs, WebRtcNetEQNetworkType nwType = kTCPLargeJitter);
+    int loadCodec(WebRtcNetEQ_CodecDef & codecInst);
+    int recIn(NETEQTEST_RTPpacket & rtp);
+    WebRtc_Word16 recOut(WebRtc_Word16 *outData, void *msInfo = NULL, enum WebRtcNetEQOutputType *outputType = NULL);
+    WebRtc_UWord32 getSpeechTimeStamp();
+    WebRtcNetEQOutputType getOutputType();
+
+    void * instance() { return (_inst); };
+    void usePreparseRTP( bool useIt = true ) { _preparseRTP = useIt; };
+    bool usingPreparseRTP() { return (_preparseRTP); };
+    void setMaster( bool isMaster = true ) { _isMaster = isMaster; };
+    void setSlave() { _isMaster = false; };
+    void setNoDecode(bool noDecode = true) { _noDecode = noDecode; };
+    bool isMaster() { return (_isMaster); };
+    bool isSlave() { return (!_isMaster); };
+    bool isNoDecode() { return _noDecode; };
+
+#ifdef WINDOWS_TIMING
+    double getRecInTime() { return (static_cast<double>( _totTimeRecIn.QuadPart )); };
+    double getRecOutTime() { return (static_cast<double>( _totTimeRecOut.QuadPart )); };
+#else
+    double getRecInTime() { return (0.0); };
+    double getRecOutTime() { return (0.0); };
+
+#endif
+
+    void printError();
+    void printError(NETEQTEST_RTPpacket & rtp);
+
+private:
+    void *          _inst;
+    WebRtc_Word8 *    _instMem;
+    WebRtc_Word8 *    _bufferMem;
+    bool            _preparseRTP;
+    int             _fsmult;
+    bool            _isMaster;
+    bool            _noDecode;
+#ifdef WINDOWS_TIMING
+    LARGE_INTEGER   _totTimeRecIn;
+    LARGE_INTEGER   _totTimeRecOut;
+#endif
+};
+
+#endif //NETEQTEST_NETEQCLASS_H
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc b/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
new file mode 100644
index 0000000..fdc9662
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.cc
@@ -0,0 +1,865 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "NETEQTEST_RTPpacket.h"
+
+#include <assert.h>
+#include <stdlib.h>  // rand
+#include <string.h>
+
+#ifdef WIN32
+#include <winsock2.h>
+#else
+#include <netinet/in.h> // for htons, htonl, etc
+#endif
+
+const int NETEQTEST_RTPpacket::_kRDHeaderLen = 8;
+const int NETEQTEST_RTPpacket::_kBasicHeaderLen = 12;
+
+NETEQTEST_RTPpacket::NETEQTEST_RTPpacket()
+:
+_datagram(NULL),
+_payloadPtr(NULL),
+_memSize(0),
+_datagramLen(-1),
+_payloadLen(0),
+_rtpParsed(false),
+_receiveTime(0),
+_lost(false)
+{
+    memset(&_rtpInfo, 0, sizeof(_rtpInfo));
+    _blockList.clear();
+}
+
+NETEQTEST_RTPpacket::~NETEQTEST_RTPpacket()
+{
+    if(_datagram)
+    {
+        delete [] _datagram;
+    }
+}
+
+void NETEQTEST_RTPpacket::reset()
+{
+    if(_datagram) {
+        delete [] _datagram;
+    }
+    _datagram = NULL;
+    _memSize = 0;
+    _datagramLen = -1;
+    _payloadLen = 0;
+    _payloadPtr = NULL;
+    _receiveTime = 0;
+    memset(&_rtpInfo, 0, sizeof(_rtpInfo));
+    _rtpParsed = false;
+
+}
+
+int NETEQTEST_RTPpacket::skipFileHeader(FILE *fp)
+{
+    if (!fp) {
+        return -1;
+    }
+
+    const int kFirstLineLength = 40;
+    char firstline[kFirstLineLength];
+    if (fgets(firstline, kFirstLineLength, fp) == NULL) {
+        return -1;
+    }
+    if (strncmp(firstline, "#!rtpplay", 9) == 0) {
+        if (strncmp(firstline, "#!rtpplay1.0", 12) != 0) {
+            return -1;
+        }
+    }
+    else if (strncmp(firstline, "#!RTPencode", 11) == 0) {
+        if (strncmp(firstline, "#!RTPencode1.0", 14) != 0) {
+            return -1;
+        }
+    }
+    else
+    {
+        return -1;
+    }
+
+    const int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+    if (fseek(fp, kRtpDumpHeaderSize, SEEK_CUR) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+int NETEQTEST_RTPpacket::readFromFile(FILE *fp)
+{
+    if(!fp)
+    {
+        return(-1);
+    }
+
+    WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    if (fread(&length,2,1,fp)==0)
+    {
+        reset();
+        return(-2);
+    }
+    length = ntohs(length);
+
+    if (fread(&plen,2,1,fp)==0)
+    {
+        reset();
+        return(-1);
+    }
+    int packetLen = ntohs(plen);
+
+    if (fread(&offset,4,1,fp)==0)
+    {
+        reset();
+        return(-1);
+    }
+    WebRtc_UWord32 receiveTime = ntohl(offset); // store in local variable until we have passed the reset below
+
+    // Use length here because a plen of 0 specifies rtcp
+    length = (WebRtc_UWord16) (length - _kRDHeaderLen);
+
+    // check buffer size
+    if (_datagram && _memSize < length)
+    {
+        reset();
+    }
+
+    if (!_datagram)
+    {
+        _datagram = new WebRtc_UWord8[length];
+        _memSize = length;
+    }
+
+    if (fread((unsigned short *) _datagram,1,length,fp) != length)
+    {
+        reset();
+        return(-1);
+    }
+
+    _datagramLen = length;
+    _receiveTime = receiveTime;
+
+    if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+    {
+        // discard this payload
+        return(readFromFile(fp));
+    }
+
+    return(packetLen);
+
+}
+
+
+int NETEQTEST_RTPpacket::readFixedFromFile(FILE *fp, size_t length)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    // check buffer size
+    if (_datagram && _memSize < static_cast<int>(length))
+    {
+        reset();
+    }
+
+    if (!_datagram)
+    {
+        _datagram = new WebRtc_UWord8[length];
+        _memSize = length;
+    }
+
+    if (fread(_datagram, 1, length, fp) != length)
+    {
+        reset();
+        return -1;
+    }
+
+    _datagramLen = length;
+    _receiveTime = 0;
+
+    if (!_blockList.empty() && _blockList.count(payloadType()) > 0)
+    {
+        // discard this payload
+        return readFromFile(fp);
+    }
+
+    return length;
+
+}
+
+
+int NETEQTEST_RTPpacket::writeToFile(FILE *fp)
+{
+    if (!fp)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 length, plen;
+    WebRtc_UWord32 offset;
+
+    // length including RTPplay header
+    length = htons(_datagramLen + _kRDHeaderLen);
+    if (fwrite(&length, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // payload length
+    plen = htons(_datagramLen);
+    if (fwrite(&plen, 2, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+    // offset (=receive time)
+    offset = htonl(_receiveTime);
+    if (fwrite(&offset, 4, 1, fp) != 1)
+    {
+        return -1;
+    }
+
+
+    // write packet data
+    if (fwrite(_datagram, 1, _datagramLen, fp) !=
+            static_cast<size_t>(_datagramLen))
+    {
+        return -1;
+    }
+
+    return _datagramLen + _kRDHeaderLen; // total number of bytes written
+
+}
+
+
+void NETEQTEST_RTPpacket::blockPT(WebRtc_UWord8 pt)
+{
+    _blockList[pt] = true;
+}
+
+
+void NETEQTEST_RTPpacket::parseHeader()
+{
+    if (_rtpParsed)
+    {
+        // nothing to do
+        return;
+    }
+
+    if (_datagramLen < _kBasicHeaderLen)
+    {
+        // corrupt packet?
+        return;
+    }
+
+    _payloadLen = parseRTPheader(&_payloadPtr);
+
+    _rtpParsed = true;
+
+    return;
+
+}
+
+void NETEQTEST_RTPpacket::parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo)
+{
+    if (!_rtpParsed)
+    {
+        // parse the header
+        parseHeader();
+    }
+
+    memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcNetEQ_RTPInfo));
+}
+
+WebRtcNetEQ_RTPInfo const * NETEQTEST_RTPpacket::RTPinfo() const
+{
+    if (_rtpParsed)
+    {
+        return &_rtpInfo;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_UWord8 * NETEQTEST_RTPpacket::datagram() const
+{
+    if (_datagramLen > 0)
+    {
+        return _datagram;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_UWord8 * NETEQTEST_RTPpacket::payload() const
+{
+    if (_payloadLen > 0)
+    {
+        return _payloadPtr;
+    }
+    else
+    {
+        return NULL;
+    }
+}
+
+WebRtc_Word16 NETEQTEST_RTPpacket::payloadLen()
+{
+    parseHeader();
+    return _payloadLen;
+}
+
+WebRtc_Word16 NETEQTEST_RTPpacket::dataLen() const
+{
+    return _datagramLen;
+}
+
+bool NETEQTEST_RTPpacket::isParsed() const
+{
+    return _rtpParsed;
+}
+
+bool NETEQTEST_RTPpacket::isLost() const
+{
+    return _lost;
+}
+
+WebRtc_UWord8  NETEQTEST_RTPpacket::payloadType() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+
+    if(_datagram && _datagramLen >= _kBasicHeaderLen)
+    {
+        parseRTPheader(&tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.payloadType;
+}
+
+WebRtc_UWord16 NETEQTEST_RTPpacket::sequenceNumber() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+
+    if(_datagram && _datagramLen >= _kBasicHeaderLen)
+    {
+        parseRTPheader(&tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.sequenceNumber;
+}
+
+WebRtc_UWord32 NETEQTEST_RTPpacket::timeStamp() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+
+    if(_datagram && _datagramLen >= _kBasicHeaderLen)
+    {
+        parseRTPheader(&tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.timeStamp;
+}
+
+WebRtc_UWord32 NETEQTEST_RTPpacket::SSRC() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+
+    if(_datagram && _datagramLen >= _kBasicHeaderLen)
+    {
+        parseRTPheader(&tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.SSRC;
+}
+
+WebRtc_UWord8  NETEQTEST_RTPpacket::markerBit() const
+{
+    WebRtcNetEQ_RTPInfo tempRTPinfo;
+
+    if(_datagram && _datagramLen >= _kBasicHeaderLen)
+    {
+        parseRTPheader(&tempRTPinfo);
+    }
+    else
+    {
+        return 0;
+    }
+
+    return tempRTPinfo.markerBit;
+}
+
+
+
+int NETEQTEST_RTPpacket::setPayloadType(WebRtc_UWord8 pt)
+{
+
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.payloadType = pt;
+    }
+
+    _datagram[1]=(unsigned char)(pt & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setSequenceNumber(WebRtc_UWord16 sn)
+{
+
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.sequenceNumber = sn;
+    }
+
+    _datagram[2]=(unsigned char)((sn>>8)&0xFF);
+    _datagram[3]=(unsigned char)((sn)&0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setTimeStamp(WebRtc_UWord32 ts)
+{
+
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.timeStamp = ts;
+    }
+
+    _datagram[4]=(unsigned char)((ts>>24)&0xFF);
+    _datagram[5]=(unsigned char)((ts>>16)&0xFF);
+    _datagram[6]=(unsigned char)((ts>>8)&0xFF);
+    _datagram[7]=(unsigned char)(ts & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setSSRC(WebRtc_UWord32 ssrc)
+{
+
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (!_rtpParsed)
+    {
+        _rtpInfo.SSRC = ssrc;
+    }
+
+    _datagram[8]=(unsigned char)((ssrc>>24)&0xFF);
+    _datagram[9]=(unsigned char)((ssrc>>16)&0xFF);
+    _datagram[10]=(unsigned char)((ssrc>>8)&0xFF);
+    _datagram[11]=(unsigned char)(ssrc & 0xFF);
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setMarkerBit(WebRtc_UWord8 mb)
+{
+
+    if (_datagramLen < 12)
+    {
+        return -1;
+    }
+
+    if (_rtpParsed)
+    {
+        _rtpInfo.markerBit = mb;
+    }
+
+    if (mb)
+    {
+        _datagram[0] |= 0x01;
+    }
+    else
+    {
+        _datagram[0] &= 0xFE;
+    }
+
+    return 0;
+
+}
+
+int NETEQTEST_RTPpacket::setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo)
+{
+    if (_datagramLen < 12)
+    {
+        // this packet is not ok
+        return -1;
+    }
+
+    makeRTPheader(_datagram,
+        RTPinfo->payloadType,
+        RTPinfo->sequenceNumber,
+        RTPinfo->timeStamp,
+        RTPinfo->SSRC,
+        RTPinfo->markerBit);
+
+    return 0;
+}
+
+
+int NETEQTEST_RTPpacket::splitStereo(NETEQTEST_RTPpacket* slaveRtp,
+                                     enum stereoModes mode)
+{
+    // if mono, do nothing
+    if (mode == stereoModeMono)
+    {
+        return 0;
+    }
+
+    // check that the RTP header info is parsed
+    parseHeader();
+
+    // start by copying the main rtp packet
+    *slaveRtp = *this;
+
+    if(_payloadLen == 0)
+    {
+        // do no more
+        return 0;
+    }
+
+    if(_payloadLen%2 != 0)
+    {
+        // length must be a factor of 2
+        return -1;
+    }
+
+    switch(mode)
+    {
+    case stereoModeSample1:
+        {
+            // sample based codec with 1-byte samples
+            splitStereoSample(slaveRtp, 1 /* 1 byte/sample */);
+            break;
+        }
+    case stereoModeSample2:
+        {
+            // sample based codec with 2-byte samples
+            splitStereoSample(slaveRtp, 2 /* 2 bytes/sample */);
+            break;
+        }
+    case stereoModeFrame:
+        {
+            // frame based codec
+            splitStereoFrame(slaveRtp);
+            break;
+        }
+    case stereoModeDuplicate:
+        {
+            // frame based codec, send the whole packet to both master and slave
+            splitStereoDouble(slaveRtp);
+            break;
+        }
+    case stereoModeMono:
+        {
+            assert(false);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+
+void NETEQTEST_RTPpacket::makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType, WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const
+{
+    rtp_data[0]=(unsigned char)0x80;
+    if (markerBit)
+    {
+        rtp_data[0] |= 0x01;
+    }
+    else
+    {
+        rtp_data[0] &= 0xFE;
+    }
+    rtp_data[1]=(unsigned char)(payloadType & 0xFF);
+    rtp_data[2]=(unsigned char)((seqNo>>8)&0xFF);
+    rtp_data[3]=(unsigned char)((seqNo)&0xFF);
+    rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
+    rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
+
+    rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF);
+    rtp_data[7]=(unsigned char)(timestamp & 0xFF);
+
+    rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
+    rtp_data[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+    rtp_data[10]=(unsigned char)((ssrc>>8)&0xFF);
+    rtp_data[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+WebRtc_UWord16
+    NETEQTEST_RTPpacket::parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
+                                        WebRtc_UWord8 **payloadPtr) const
+{
+    WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+    int i_P, i_X, i_CC;
+
+    assert(_datagramLen >= 12);
+    parseBasicHeader(RTPinfo, &i_P, &i_X, &i_CC);
+
+    int i_startPosition = calcHeaderLength(i_X, i_CC);
+
+    int i_padlength = calcPadLength(i_P);
+
+    if (payloadPtr)
+    {
+        *payloadPtr = (WebRtc_UWord8*) &rtp_data[i_startPosition >> 1];
+    }
+
+    return (WebRtc_UWord16) (_datagramLen - i_startPosition - i_padlength);
+}
+
+
+void NETEQTEST_RTPpacket::parseBasicHeader(WebRtcNetEQ_RTPInfo *RTPinfo,
+                                           int *i_P, int *i_X, int *i_CC) const
+{
+    WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+    if (_datagramLen < 12)
+    {
+        assert(false);
+        return;
+    }
+
+    *i_P=(((WebRtc_UWord16)(rtp_data[0] & 0x20))>>5); /* Extract the P bit */
+    *i_X=(((WebRtc_UWord16)(rtp_data[0] & 0x10))>>4); /* Extract the X bit */
+    *i_CC=(WebRtc_UWord16)(rtp_data[0] & 0xF); /* Get the CC number  */
+    /* Get the marker bit */
+    RTPinfo->markerBit = (WebRtc_UWord8) ((rtp_data[0] >> 15) & 0x01);
+    /* Get the coder type */
+    RTPinfo->payloadType = (WebRtc_UWord8) ((rtp_data[0] >> 8) & 0x7F);
+    /* Get the packet number */
+    RTPinfo->sequenceNumber = ((( ((WebRtc_UWord16)rtp_data[1]) >> 8) & 0xFF) |
+        ( ((WebRtc_UWord16)(rtp_data[1] & 0xFF)) << 8));
+    /* Get timestamp */
+    RTPinfo->timeStamp = ((((WebRtc_UWord16)rtp_data[2]) & 0xFF) << 24) |
+        ((((WebRtc_UWord16)rtp_data[2]) & 0xFF00) << 8) |
+        ((((WebRtc_UWord16)rtp_data[3]) >> 8) & 0xFF) |
+        ((((WebRtc_UWord16)rtp_data[3]) & 0xFF) << 8);
+    /* Get the SSRC */
+    RTPinfo->SSRC=((((WebRtc_UWord16)rtp_data[4]) & 0xFF) << 24) |
+        ((((WebRtc_UWord16)rtp_data[4]) & 0xFF00) << 8) |
+        ((((WebRtc_UWord16)rtp_data[5]) >> 8) & 0xFF) |
+        ((((WebRtc_UWord16)rtp_data[5]) & 0xFF) << 8);
+}
+
+int NETEQTEST_RTPpacket::calcHeaderLength(int i_X, int i_CC) const
+{
+    int i_extlength = 0;
+    WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+
+    if (i_X == 1)
+    {
+        // Extension header exists.
+        // Find out how many WebRtc_Word32 it consists of.
+        assert(_datagramLen > 2 * (7 + 2 * i_CC));
+        if (_datagramLen > 2 * (7 + 2 * i_CC))
+        {
+            i_extlength = (((((WebRtc_UWord16) rtp_data[7 + 2 * i_CC]) >> 8)
+                & 0xFF) | (((WebRtc_UWord16) (rtp_data[7 + 2 * i_CC] & 0xFF))
+                << 8)) + 1;
+        }
+    }
+
+    return 12 + 4 * i_extlength + 4 * i_CC;
+}
+
+int NETEQTEST_RTPpacket::calcPadLength(int i_P) const
+{
+    WebRtc_Word16 *rtp_data = (WebRtc_Word16 *) _datagram;
+    if (i_P == 1)
+    {
+        /* Padding exists. Find out how many bytes the padding consists of. */
+        if (_datagramLen & 0x1)
+        {
+            /* odd number of bytes => last byte in higher byte */
+            return rtp_data[_datagramLen >> 1] & 0xFF;
+        }
+        else
+        {
+            /* even number of bytes => last byte in lower byte */
+            return ((WebRtc_UWord16) rtp_data[(_datagramLen >> 1) - 1]) >> 8;
+        }
+    }
+    return 0;
+}
+
+void NETEQTEST_RTPpacket::splitStereoSample(NETEQTEST_RTPpacket* slaveRtp,
+                                            int stride)
+{
+    if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
+        || _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
+    {
+        return;
+    }
+
+    WebRtc_UWord8 *readDataPtr = _payloadPtr;
+    WebRtc_UWord8 *writeDataPtr = _payloadPtr;
+    WebRtc_UWord8 *slaveData = slaveRtp->_payloadPtr;
+
+    while (readDataPtr - _payloadPtr < _payloadLen)
+    {
+        // master data
+        for (int ix = 0; ix < stride; ix++) {
+            *writeDataPtr = *readDataPtr;
+            writeDataPtr++;
+            readDataPtr++;
+        }
+
+        // slave data
+        for (int ix = 0; ix < stride; ix++) {
+            *slaveData = *readDataPtr;
+            slaveData++;
+            readDataPtr++;
+        }
+    }
+
+    _payloadLen /= 2;
+    slaveRtp->_payloadLen = _payloadLen;
+}
+
+
+void NETEQTEST_RTPpacket::splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp)
+{
+    if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
+        || _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
+    {
+        return;
+    }
+
+    memmove(slaveRtp->_payloadPtr, _payloadPtr + _payloadLen/2, _payloadLen/2);
+
+    _payloadLen /= 2;
+    slaveRtp->_payloadLen = _payloadLen;
+}
+void NETEQTEST_RTPpacket::splitStereoDouble(NETEQTEST_RTPpacket* slaveRtp)
+{
+    if(!_payloadPtr || !slaveRtp || !slaveRtp->_payloadPtr
+        || _payloadLen <= 0 || slaveRtp->_memSize < _memSize)
+    {
+        return;
+    }
+
+    memcpy(slaveRtp->_payloadPtr, _payloadPtr, _payloadLen);
+    slaveRtp->_payloadLen = _payloadLen;
+}
+
+// Get the RTP header for the RED payload indicated by argument index.
+// The first RED payload is index = 0.
+int NETEQTEST_RTPpacket::extractRED(int index, WebRtcNetEQ_RTPInfo& red)
+{
+//
+//  0                   1                    2                   3
+//  0 1 2 3 4 5 6 7 8 9 0 1 2 3  4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1|   block PT  |  timestamp offset         |   block length    |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |1|    ...                                                      |
+// +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+// |0|   block PT  |
+// +-+-+-+-+-+-+-+-+
+//
+
+    parseHeader();
+
+    WebRtc_UWord8* ptr = payload();
+    WebRtc_UWord8* payloadEndPtr = ptr + payloadLen();
+    int num_encodings = 0;
+    int total_len = 0;
+
+    while ((ptr < payloadEndPtr) && (*ptr & 0x80))
+    {
+        int len = ((ptr[2] & 0x03) << 8) + ptr[3];
+        if (num_encodings == index)
+        {
+            // Header found.
+            red.payloadType = ptr[0] & 0x7F;
+            WebRtc_UWord32 offset = (ptr[1] << 6) + ((ptr[2] & 0xFC) >> 2);
+            red.sequenceNumber = sequenceNumber();
+            red.timeStamp = timeStamp() - offset;
+            red.markerBit = markerBit();
+            red.SSRC = SSRC();
+            return len;
+        }
+        ++num_encodings;
+        total_len += len;
+        ptr += 4;
+    }
+    if ((ptr < payloadEndPtr) && (num_encodings == index))
+    {
+        // Last header.
+        red.payloadType = ptr[0] & 0x7F;
+        red.sequenceNumber = sequenceNumber();
+        red.timeStamp = timeStamp();
+        red.markerBit = markerBit();
+        red.SSRC = SSRC();
+        ++ptr;
+        return payloadLen() - (ptr - payload()) - total_len;
+    }
+    return -1;
+}
+
+// Randomize the payload, not the RTP header.
+void NETEQTEST_RTPpacket::scramblePayload(void)
+{
+    parseHeader();
+
+    for (int i = 0; i < _payloadLen; ++i)
+    {
+        _payloadPtr[i] = static_cast<WebRtc_UWord8>(rand());
+    }
+}
diff --git a/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h b/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
new file mode 100644
index 0000000..a6d32dc
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef NETEQTEST_RTPPACKET_H
+#define NETEQTEST_RTPPACKET_H
+
+#include <map>
+#include <stdio.h>
+#include "typedefs.h"
+#include "webrtc_neteq_internal.h"
+
+enum stereoModes {
+    stereoModeMono,
+    stereoModeSample1,
+    stereoModeSample2,
+    stereoModeFrame,
+    stereoModeDuplicate
+};
+
+class NETEQTEST_RTPpacket
+{
+public:
+    NETEQTEST_RTPpacket();
+    bool operator !() const { return (dataLen() < 0); };
+    virtual ~NETEQTEST_RTPpacket();
+    void reset();
+    static int skipFileHeader(FILE *fp);
+    virtual int readFromFile(FILE *fp);
+    int readFixedFromFile(FILE *fp, size_t len);
+    virtual int writeToFile(FILE *fp);
+    void blockPT(WebRtc_UWord8 pt);
+    //WebRtc_Word16 payloadType();
+    void parseHeader();
+    void parseHeader(WebRtcNetEQ_RTPInfo & rtpInfo);
+    WebRtcNetEQ_RTPInfo const * RTPinfo() const;
+    WebRtc_UWord8 * datagram() const;
+    WebRtc_UWord8 * payload() const;
+    WebRtc_Word16 payloadLen();
+    WebRtc_Word16 dataLen() const;
+    bool isParsed() const;
+    bool isLost() const;
+    WebRtc_UWord32 time() const { return _receiveTime; };
+
+    WebRtc_UWord8  payloadType() const;
+    WebRtc_UWord16 sequenceNumber() const;
+    WebRtc_UWord32 timeStamp() const;
+    WebRtc_UWord32 SSRC() const;
+    WebRtc_UWord8  markerBit() const;
+
+    int setPayloadType(WebRtc_UWord8 pt);
+    int setSequenceNumber(WebRtc_UWord16 sn);
+    int setTimeStamp(WebRtc_UWord32 ts);
+    int setSSRC(WebRtc_UWord32 ssrc);
+    int setMarkerBit(WebRtc_UWord8 mb);
+    void setTime(WebRtc_UWord32 receiveTime) { _receiveTime = receiveTime; };
+
+    int setRTPheader(const WebRtcNetEQ_RTPInfo *RTPinfo);
+
+    int splitStereo(NETEQTEST_RTPpacket* slaveRtp, enum stereoModes mode);
+
+    int extractRED(int index, WebRtcNetEQ_RTPInfo& red);
+
+    void scramblePayload(void);
+
+    WebRtc_UWord8 *       _datagram;
+    WebRtc_UWord8 *       _payloadPtr;
+    int                 _memSize;
+    WebRtc_Word16         _datagramLen;
+    WebRtc_Word16         _payloadLen;
+    WebRtcNetEQ_RTPInfo  _rtpInfo;
+    bool                _rtpParsed;
+    WebRtc_UWord32        _receiveTime;
+    bool                _lost;
+    std::map<WebRtc_UWord8, bool> _blockList;
+
+protected:
+    static const int _kRDHeaderLen;
+    static const int _kBasicHeaderLen;
+
+    void parseBasicHeader(WebRtcNetEQ_RTPInfo *RTPinfo, int *i_P, int *i_X,
+                          int *i_CC) const;
+    int calcHeaderLength(int i_X, int i_CC) const;
+
+private:
+    void makeRTPheader(unsigned char* rtp_data, WebRtc_UWord8 payloadType,
+                       WebRtc_UWord16 seqNo, WebRtc_UWord32 timestamp,
+                       WebRtc_UWord32 ssrc, WebRtc_UWord8 markerBit) const;
+    WebRtc_UWord16 parseRTPheader(WebRtcNetEQ_RTPInfo *RTPinfo,
+                                  WebRtc_UWord8 **payloadPtr = NULL) const;
+    WebRtc_UWord16 parseRTPheader(WebRtc_UWord8 **payloadPtr = NULL)
+        { return parseRTPheader(&_rtpInfo, payloadPtr);};
+    int calcPadLength(int i_P) const;
+    void splitStereoSample(NETEQTEST_RTPpacket* slaveRtp, int stride);
+    void splitStereoFrame(NETEQTEST_RTPpacket* slaveRtp);
+    void splitStereoDouble(NETEQTEST_RTPpacket* slaveRtp);
+};
+
+#endif //NETEQTEST_RTPPACKET_H
diff --git a/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc b/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc
new file mode 100644
index 0000000..cb03baf
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/NetEqRTPplay.cc
@@ -0,0 +1,1790 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+#include <assert.h>
+#include <stdio.h>
+
+/* header includes */
+#include "typedefs.h"
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+#include "webrtc_neteq_help_macros.h"
+#include "neteq_error_codes.h" // for the API test
+
+#include "NETEQTEST_RTPpacket.h"
+#include "NETEQTEST_DummyRTPpacket.h"
+#include "NETEQTEST_NetEQClass.h"
+#include "NETEQTEST_CodecClass.h"
+
+#ifdef WEBRTC_ANDROID
+#include <ctype.h> // isalpha
+#endif
+#include <string.h>
+#include <stdlib.h>
+#include <time.h>
+#include <map>
+#include <vector>
+
+#ifdef WIN32
+#include <cassert>
+#include <windows.h>
+#endif
+
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#include <libgen.h>
+#include <cassert>
+#endif
+
+//#include "vld.h"
+
+//#define NETEQ_DELAY_LOGGING
+//#define DUMMY_SLAVE_CHANNEL
+
+#ifdef NETEQ_DELAY_LOGGING
+#include "delay_logging.h"
+#define DUMMY_SLAVE_CHANNEL // do not use a slave channel, only generate zeros instead
+#endif
+
+
+/************************/
+/* Define payload types */
+/************************/
+
+// Payload types are defined in the textfile ptypes.txt, and can be changed after compilation.
+
+
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define TIME_STEP 1
+#define FIRSTLINELEN 40
+#define MAX_NETEQ_BUFFERSIZE    170000 //100000
+#define CHECK_ZERO(a) {int errCode = a; char tempErrName[WEBRTC_NETEQ_MAX_ERROR_NAME]; if((errCode)!=0){errCode = WebRtcNetEQ_GetErrorCode(inst); WebRtcNetEQ_GetErrorName(errCode, tempErrName, WEBRTC_NETEQ_MAX_ERROR_NAME); printf("\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, errCode); exit(0);}}
+#define CHECK_NOT_NULL(a) if((a)==NULL){printf("\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+//#define PLAY_CLEAN // ignore arrival times and let the packets arrive according to RTP timestamps
+#define HDR_SIZE 8 // rtpplay packet header size in bytes
+//#define JUNK_DATA   // scramble the payloads to test error resilience
+//#define ZERO_TS_START
+
+#ifdef JUNK_DATA
+    #define SEED_FILE "randseed.txt"
+#endif
+
+#ifdef WIN32
+#define MY_MAX_DRIVE _MAX_DRIVE
+#define MY_MAX_PATH _MAX_PATH
+#define MY_MAX_FNAME _MAX_FNAME
+#define MY_MAX_EXT _MAX_EXT
+
+#elif defined(WEBRTC_LINUX)
+#include <linux/limits.h>
+#define MY_MAX_PATH PATH_MAX
+
+#elif defined(WEBRTC_MAC)
+#include <sys/syslimits.h>
+#define MY_MAX_PATH PATH_MAX
+#endif // WEBRTC_MAC
+
+/************/
+/* Typedefs */
+/************/
+
+typedef struct {
+    enum WebRtcNetEQDecoder  codec;
+    enum stereoModes    stereo;
+    NETEQTEST_Decoder * decoder[2];
+    int            fs;
+} decoderStruct;
+
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen);
+int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime);
+void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d);
+bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
+                 const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+                 const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+                 bool *isStereo);
+void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders);
+int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec);
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber);
+void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders);
+int doAPItest();
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode);
+
+
+
+/********************/
+/* Global variables */
+/********************/
+
+WebRtc_Word16 NetEqPacketBuffer[MAX_NETEQ_BUFFERSIZE>>1];
+WebRtc_Word16 NetEqPacketBufferSlave[MAX_NETEQ_BUFFERSIZE>>1];
+
+#ifdef NETEQ_DELAY_LOGGING
+extern "C" {
+    FILE *delay_fid2;   /* file pointer */
+    WebRtc_UWord32 tot_received_packets=0;
+}
+#endif
+
+#ifdef DEF_BUILD_DATE
+extern char BUILD_DATE;
+#endif
+
+WebRtc_UWord32 writtenSamples = 0;
+WebRtc_UWord32 simClock=0;
+
+int main(int argc, char* argv[])
+{
+    std::vector<NETEQTEST_NetEQClass *> NetEQvector;
+    char   version[20];
+
+    enum WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd-1];
+    int noOfCodecs;
+    int ok;
+    WebRtc_Word16 out_data[640*2];
+    WebRtc_Word16 outLen, writeLen;
+    int fs = 8000;
+    WebRtcNetEQ_RTCPStat RTCPstat;
+#ifdef WIN32
+    char outdrive[MY_MAX_DRIVE];
+    char outpath[MY_MAX_PATH];
+    char outfile[MY_MAX_FNAME];
+    char outext[MY_MAX_EXT];
+#endif
+    char outfilename[MY_MAX_PATH];
+#ifdef NETEQ_DELAY_LOGGING
+    float clock_float;
+    int temp_var;
+#endif
+#ifdef JUNK_DATA
+    FILE *seedfile;
+#endif
+    FILE *recoutTimes = NULL;
+    FILE *extraDelays = NULL;
+    WebRtcNetEQPlayoutMode streamingMode = kPlayoutOn;
+    bool preParseRTP = false;
+    bool rtpOnly = false;
+    int packetLen = 0;
+    int packetCount = 0;
+    std::map<WebRtc_UWord8, decoderStruct> decoders;
+    bool dummyRtp = false;
+    bool noDecode = false;
+
+    /* get the version string */
+    WebRtcNetEQ_GetVersion(version);
+    printf("\n\nNetEq version: %s\n", version);
+#ifdef DEF_BUILD_DATE
+    printf("Build time: %s\n", __BUILD_DATE);
+#endif
+
+    /* check number of parameters */
+    if ((argc < 3)
+#ifdef WIN32 // implicit output file name possible for windows
+        && (argc < 2)
+#endif
+        ) {
+        /* print help text and exit */
+        printf("Test program for NetEQ.\n");
+        printf("The program reads an RTP stream from file and inserts it into NetEQ.\n");
+        printf("The format of the RTP stream file should be the same as for rtpplay,\n");
+        printf("and can be obtained e.g., from Ethereal by using\n");
+        printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
+        printf("Usage:\n\n");
+#ifdef WIN32
+        printf("%s RTPfile [outfile] [-options]\n", argv[0]);
+#else
+        printf("%s RTPfile outfile [-options]\n", argv[0]);
+#endif
+        printf("where:\n");
+
+        printf("RTPfile      : RTP stream input file\n\n");
+
+        printf("outfile      : PCM speech output file\n");
+        printf("               Output file name is derived from RTP file name if omitted\n\n");
+
+        printf("-options are optional switches:\n");
+        printf("\t-recout datfile        : supply recout times\n");
+        printf("\t-extradelay datfile    : supply extra delay settings and timing\n");
+        printf("\t-streaming             : engage streaming mode\n");
+        printf("\t-fax                   : engage fax mode\n");
+        printf("\t-preparsertp           : use RecIn with pre-parsed RTP\n");
+        printf("\t-rtponly packLenBytes  : input file consists of constant size RTP packets without RTPplay headers\n");
+        printf("\t-dummyrtp              : input file contains only RTP headers\n");
+        printf("\t-nodecode              : no decoding will be done\n");
+        //printf("\t-switchms              : switch from mono to stereo (copy channel) after 10 seconds\n");
+        //printf("\t-duplicate             : use two instances with identical input (2-channel mono)\n");
+
+        return(0);
+    }
+
+    if (strcmp(argv[1], "-apitest")==0) {
+        // do API test and then return
+        ok=doAPItest();
+
+        if (ok==0)
+            printf("API test successful!\n");
+        else
+            printf("API test failed!\n");
+
+        return(ok);
+    }
+
+    FILE* in_file=fopen(argv[1],"rb");
+    CHECK_NOT_NULL(in_file);
+    printf("Input file: %s\n",argv[1]);
+
+    int argIx = 2; // index of next argument from command line
+
+    if ( argc >= 3 && argv[2][0] != '-' ) { // output name given on command line
+        strcpy(outfilename, argv[2]);
+        argIx++;
+    } else { // derive output name from input name
+#ifdef WIN32
+        _splitpath(argv[1],outdrive,outpath,outfile,outext);
+        _makepath(outfilename,outdrive,outpath,outfile,"pcm");
+#else
+        fprintf(stderr,"Output file name must be specified.\n");
+        return(-1);
+#endif
+    }
+    FILE* out_file=fopen(outfilename,"wb");
+    if (out_file==NULL) {
+        fprintf(stderr,"Could not open file %s for writing\n", outfilename);
+        return(-1);
+    }
+    printf("Output file: %s\n",outfilename);
+
+    // Parse for more arguments, all beginning with '-'
+    while( argIx < argc ) {
+        if (argv[argIx][0] != '-') {
+            fprintf(stderr,"Unknown input argument %s\n", argv[argIx]);
+            return(-1);
+        }
+
+        if( strcmp(argv[argIx], "-recout") == 0 ) {
+            argIx++;
+            recoutTimes = fopen(argv[argIx], "rb");
+            CHECK_NOT_NULL(recoutTimes);
+            argIx++;
+        }
+        else if( strcmp(argv[argIx], "-extradelay") == 0 ) {
+            argIx++;
+            extraDelays = fopen(argv[argIx], "rb");
+            CHECK_NOT_NULL(extraDelays);
+            argIx++;
+        }
+        else if( strcmp(argv[argIx], "-streaming") == 0 ) {
+            argIx++;
+            streamingMode = kPlayoutStreaming;
+        }
+        else if( strcmp(argv[argIx], "-fax") == 0 ) {
+            argIx++;
+            streamingMode = kPlayoutFax;
+        }
+        else if( strcmp(argv[argIx], "-preparsertp") == 0 ) {
+            argIx++;
+            preParseRTP = true;
+        }
+        else if( strcmp(argv[argIx], "-rtponly") == 0 ) {
+            argIx++;
+            rtpOnly = true;
+            packetLen = atoi(argv[argIx]);
+            argIx++;
+            if (packetLen <= 0)
+            {
+                printf("Wrong packet size used with argument -rtponly.\n");
+                exit(1);
+            }
+        }
+        else if (strcmp(argv[argIx], "-dummyrtp") == 0
+            || strcmp(argv[argIx], "-dummy") == 0)
+        {
+            argIx++;
+            dummyRtp = true;
+            noDecode = true; // force noDecode since there are no payloads
+        }
+        else if (strcmp(argv[argIx], "-nodecode") == 0)
+        {
+            argIx++;
+            noDecode = true;
+        }
+        //else if( strcmp(argv[argIx], "-switchms") == 0 ) {
+        //    argIx++;
+        //    switchMS = true;
+        //}
+        //else if( strcmp(argv[argIx], "-duplicate") == 0 ) {
+        //    argIx++;
+        //    duplicatePayload = true;
+        //}
+        else {
+            fprintf(stderr,"Unknown input argument %s\n", argv[argIx]);
+            return(-1);
+        }
+    }
+
+
+
+#ifdef NETEQ_DELAY_LOGGING
+    char delayfile[MY_MAX_PATH];
+#ifdef WIN32
+    _splitpath(outfilename,outdrive,outpath,outfile,outext);
+    _makepath(delayfile,outdrive,outpath,outfile,"d");
+#else
+    sprintf(delayfile, "%s.d", outfilename);
+#endif
+    delay_fid2 = fopen(delayfile,"wb");
+    fprintf(delay_fid2, "#!NetEQ_Delay_Logging%s\n", NETEQ_DELAY_LOGGING_VERSION_STRING);
+#endif
+
+    char ptypesfile[MY_MAX_PATH];
+#ifdef WIN32
+    _splitpath(argv[0],outdrive,outpath,outfile,outext);
+    _makepath(ptypesfile,outdrive,outpath,"ptypes","txt");
+#elif defined(WEBRTC_ANDROID)
+  strcpy(ptypesfile, "/sdcard/ptypes.txt");
+#else
+    // TODO(hlundin): Include path to ptypes, as for WIN32 above.
+  strcpy(ptypesfile, "ptypes.txt");
+#endif
+    FILE *ptypeFile = fopen(ptypesfile,"rt");
+    if (!ptypeFile) {
+        // Check if we can find the file at the usual place in the trunk.
+        if (strstr(argv[0], "out/Debug/")) {
+            int path_len = strstr(argv[0], "out/Debug/") - argv[0];
+            strncpy(ptypesfile, argv[0], path_len);
+            ptypesfile[path_len] = '\0';
+            strcat(ptypesfile,
+                   "src/modules/audio_coding/NetEQ/main/test/ptypes.txt");
+            ptypeFile = fopen(ptypesfile,"rt");
+        }
+    }
+    CHECK_NOT_NULL(ptypeFile);
+    printf("Ptypes file: %s\n\n", ptypesfile);
+
+    parsePtypeFile(ptypeFile, &decoders);
+    fclose(ptypeFile);
+
+    noOfCodecs = populateUsedCodec(&decoders, usedCodec);
+
+
+    /* read RTP file header */
+    if (!rtpOnly)
+    {
+        if (NETEQTEST_RTPpacket::skipFileHeader(in_file) != 0)
+        {
+            fprintf(stderr, "Wrong format in RTP file.\n");
+            return -1;
+        }
+    }
+
+    /* check payload type for first speech packet */
+    long tempFilePos = ftell(in_file);
+    enum stereoModes stereoMode = stereoModeMono;
+
+    NETEQTEST_RTPpacket *rtp;
+    NETEQTEST_RTPpacket *slaveRtp;
+    if (!dummyRtp)
+    {
+        rtp = new NETEQTEST_RTPpacket();
+        slaveRtp = new NETEQTEST_RTPpacket();
+    }
+    else
+    {
+        rtp = new NETEQTEST_DummyRTPpacket();
+        slaveRtp = new NETEQTEST_DummyRTPpacket();
+    }
+
+    if (!rtpOnly)
+    {
+        while (rtp->readFromFile(in_file) >= 0)
+        {
+            if (decoders.count(rtp->payloadType()) > 0
+                && decoders[rtp->payloadType()].codec != kDecoderRED
+                && decoders[rtp->payloadType()].codec != kDecoderAVT
+                && decoders[rtp->payloadType()].codec != kDecoderCNG )
+            {
+                stereoMode = decoders[rtp->payloadType()].stereo;
+                fs = decoders[rtp->payloadType()].fs;
+                break;
+            }
+        }
+    }
+    else
+    {
+        while (rtp->readFixedFromFile(in_file, packetLen) >= 0)
+        {
+            if (decoders.count(rtp->payloadType()) > 0
+                && decoders[rtp->payloadType()].codec != kDecoderRED
+                && decoders[rtp->payloadType()].codec != kDecoderAVT
+                && decoders[rtp->payloadType()].codec != kDecoderCNG )
+            {
+                stereoMode = decoders[rtp->payloadType()].stereo;
+                fs = decoders[rtp->payloadType()].fs;
+                break;
+            }
+        }
+    }
+
+    fseek(in_file, tempFilePos, SEEK_SET /* from beginning */);
+
+
+    /* block some payload types */
+    //rtp->blockPT(72);
+    //rtp->blockPT(23);
+
+    /* read first packet */
+    if (!rtpOnly)
+    {
+        rtp->readFromFile(in_file);
+    }
+    else
+    {
+        rtp->readFixedFromFile(in_file, packetLen);
+        rtp->setTime((1000 * rtp->timeStamp()) / fs);
+    }
+    if (!rtp)
+    {
+        printf("\nWarning: RTP file is empty\n\n");
+    }
+
+
+    /* Initialize NetEQ instances */
+    int numInst = 1;
+    if (stereoMode > stereoModeMono)
+    {
+        numInst = 2;
+    }
+
+    for (int i = 0; i < numInst; i++)
+    {
+        // create memory, allocate, initialize, and allocate packet buffer memory
+        NetEQvector.push_back (new NETEQTEST_NetEQClass(usedCodec, noOfCodecs, static_cast<WebRtc_UWord16>(fs), kTCPLargeJitter));
+
+        createAndInsertDecoders (NetEQvector[i], &decoders, i /* channel */);
+
+        WebRtcNetEQ_SetAVTPlayout(NetEQvector[i]->instance(),1); // enable DTMF playout
+
+        WebRtcNetEQ_SetPlayoutMode(NetEQvector[i]->instance(), streamingMode);
+
+        NetEQvector[i]->usePreparseRTP(preParseRTP);
+
+        NetEQvector[i]->setNoDecode(noDecode);
+
+        if (numInst > 1)
+        {
+            // we are using master/slave mode
+            if (i == 0)
+            {
+                // first instance is master
+                NetEQvector[i]->setMaster();
+            }
+            else
+            {
+                // all other are slaves
+                NetEQvector[i]->setSlave();
+            }
+        }
+    }
+
+
+#ifdef ZERO_TS_START
+    WebRtc_UWord32 firstTS = rtp->timeStamp();
+    rtp->setTimeStamp(0);
+#else
+    WebRtc_UWord32 firstTS = 0;
+#endif
+
+    // check stereo mode
+    if (stereoMode > stereoModeMono)
+    {
+        if(rtp->splitStereo(slaveRtp, stereoMode))
+        {
+            printf("Error in splitStereo\n");
+        }
+    }
+
+#ifdef PLAY_CLEAN
+    WebRtc_UWord32 prevTS = rtp->timeStamp();
+    WebRtc_UWord32 currTS, prev_time;
+#endif
+
+#ifdef JUNK_DATA
+    unsigned int random_seed = (unsigned int) /*1196764538; */time(NULL);
+    srand(random_seed);
+
+    if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+    }
+    else {
+        fprintf(seedfile, "%u\n", random_seed);
+        fclose(seedfile);
+    }
+#endif
+
+    WebRtc_UWord32 nextRecoutTime;
+    int lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
+
+    if (recoutTimes)
+        simClock = (rtp->time() < nextRecoutTime ? rtp->time(): nextRecoutTime);
+    else
+        simClock = rtp->time(); // start immediately with first packet
+
+    WebRtc_UWord32 start_clock = simClock;
+
+    WebRtc_UWord32 nextExtraDelayTime;
+    int extraDelay = -1;
+    getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
+
+    void *msInfo;
+    msInfo = malloc(WebRtcNetEQ_GetMasterSlaveInfoSize());
+    if(msInfo == NULL)
+        return(-1);
+
+    while(rtp->dataLen() >= 0 || (recoutTimes && !lastRecout)) {
+//        printf("simClock = %Lu\n", simClock);
+
+#ifdef NETEQ_DELAY_LOGGING
+        temp_var = NETEQ_DELAY_LOGGING_SIGNAL_CLOCK;
+        clock_float = (float) simClock;
+        if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+          return -1;
+        }
+        if (fwrite(&clock_float, sizeof(float), 1, delay_fid2) != 1) {
+          return -1;
+        }
+#endif
+        /* time to set extra delay */
+        if (extraDelay > -1 && simClock >= nextExtraDelayTime) {
+            // set extra delay for all instances
+            for (int i = 0; i < numInst; i++)
+            {
+                WebRtcNetEQ_SetExtraDelay(NetEQvector[i]->instance(), extraDelay);
+            }
+            getNextExtraDelay(extraDelays, &nextExtraDelayTime, &extraDelay);
+        }
+
+        /* check if time to receive */
+        while (simClock >= rtp->time() && rtp->dataLen() >= 0)
+        {
+            if (rtp->dataLen() > 0)
+            {
+
+                // insert main packet
+                NetEQvector[0]->recIn(*rtp);
+
+                if (stereoMode > stereoModeMono
+                    && slaveRtp->dataLen() > 0)
+                {
+                    // insert slave packet
+                    NetEQvector[1]->recIn(*slaveRtp);
+                }
+
+            }
+
+            /* get next packet */
+#ifdef PLAY_CLEAN
+            prev_time = rtp->time();
+#endif
+            if (!rtpOnly)
+            {
+                rtp->readFromFile(in_file);
+            }
+            else
+            {
+                rtp->readFixedFromFile(in_file, packetLen);
+                rtp->setTime((1000 * rtp->timeStamp()) / fs);
+            }
+
+            if (rtp->dataLen() >= 0)
+            {
+                rtp->setTimeStamp(rtp->timeStamp() - firstTS);
+            }
+
+            packetCount++;
+
+            if (changeStereoMode(*rtp, decoders, &stereoMode))
+            {
+                printf("Warning: stereo mode changed\n");
+            }
+
+            if (stereoMode > stereoModeMono)
+            {
+                if(rtp->splitStereo(slaveRtp, stereoMode))
+                {
+                    printf("Error in splitStereo\n");
+                }
+            }
+
+#ifdef PLAY_CLEAN
+            currTS = rtp->timeStamp();
+            rtp->setTime(prev_time + (currTS-prevTS)/(fs/1000));
+            prevTS = currTS;
+#endif
+        }
+
+        /* check if time to RecOut */
+        if ( (!recoutTimes && (simClock%10)==0) // recout times not given from file
+        || ( recoutTimes && (simClock >= nextRecoutTime) ) ) // recout times given from file
+        {
+            if (stereoMode > stereoModeMono)
+            {
+                // stereo
+                WebRtc_Word16 tempLen;
+                tempLen = NetEQvector[0]->recOut( out_data, msInfo ); // master
+                outLen = NetEQvector[1]->recOut( &out_data[tempLen], msInfo ); // slave
+
+                assert(tempLen == outLen);
+
+                writeLen = outLen * 2;
+                stereoInterleave(out_data, writeLen);
+            }
+            else
+            {
+                // mono
+                outLen = NetEQvector[0]->recOut( out_data );
+                writeLen = outLen;
+            }
+
+            // write to file
+            if (fwrite(out_data, writeLen, 2, out_file) != 2) {
+              return -1;
+            }
+            writtenSamples += writeLen;
+
+
+            lastRecout = getNextRecoutTime(recoutTimes, &nextRecoutTime); // does nothing if recoutTimes == NULL
+
+            /* ask for statistics */
+            WebRtcNetEQ_NetworkStatistics inCallStats;
+            WebRtcNetEQ_GetNetworkStatistics(NetEQvector[0]->instance(), &inCallStats);
+
+        }
+
+        /* increase time */
+        simClock+=TIME_STEP;
+    }
+
+    fclose(in_file);
+    fclose(out_file);
+
+#ifdef NETEQ_DELAY_LOGGING
+    temp_var = NETEQ_DELAY_LOGGING_SIGNAL_EOF;
+    if (fwrite(&temp_var, sizeof(int), 1, delay_fid2) != 1) {
+      return -1;
+    }
+    if (fwrite(&tot_received_packets, sizeof(WebRtc_UWord32),
+               1, delay_fid2) != 1) {
+      return -1;
+    }
+    fprintf(delay_fid2,"End of file\n");
+    fclose(delay_fid2);
+#endif
+
+    WebRtcNetEQ_GetRTCPStats(NetEQvector[0]->instance(), &RTCPstat);
+    printf("RTCP statistics:\n");
+    printf("    cum_lost        : %d\n", (int) RTCPstat.cum_lost);
+    printf("    ext_max         : %d\n", (int) RTCPstat.ext_max);
+    printf("    fraction_lost   : %d (%f%%)\n", RTCPstat.fraction_lost, (float)(100.0*RTCPstat.fraction_lost/256.0));
+    printf("    jitter          : %d\n", (int) RTCPstat.jitter);
+
+    printf("\n    Call duration ms    : %u\n", simClock-start_clock);
+
+    printf("\nComplexity estimates (including sub-components):\n");
+    printf("    RecIn complexity    : %.2f MCPS\n", NetEQvector[0]->getRecInTime() / ((float) 1000*(simClock-start_clock)));
+    printf("    RecOut complexity   : %.2f MCPS\n", NetEQvector[0]->getRecOutTime() / ((float) 1000*(simClock-start_clock)));
+
+    free_coders(decoders);
+    //free_coders(0 /* first channel */);
+ //   if (stereoMode > stereoModeMono) {
+ //       free_coders(1 /* second channel */);
+ //   }
+    free(msInfo);
+
+    for (std::vector<NETEQTEST_NetEQClass *>::iterator it = NetEQvector.begin();
+        it < NetEQvector.end(); delete *it++) {
+    }
+
+    printf("\nSimulation done!\n");
+
+#ifdef JUNK_DATA
+    if ( (seedfile = fopen(SEED_FILE, "a+t") ) == NULL ) {
+        fprintf(stderr, "Error: Could not open file %s\n", SEED_FILE);
+    }
+    else {
+        fprintf(seedfile, "ok\n\n");
+        fclose(seedfile);
+    }
+#endif
+
+
+    // Log complexity to file
+/*    FILE *statfile;
+    statfile = fopen("complexity.txt","at");
+    fprintf(statfile,"%.4f, %.4f\n", (float) totTime_RecIn.QuadPart / ((float) 1000*(simClock-start_clock)), (float) totTime_RecOut.QuadPart / ((float) 1000*(simClock-start_clock)));
+    fclose(statfile);*/
+
+    return(0);
+
+}
+
+
+
+
+
+/****************/
+/* Subfunctions */
+/****************/
+
+bool splitStereo(NETEQTEST_RTPpacket* rtp, NETEQTEST_RTPpacket* rtpSlave,
+                 const WebRtc_Word16 *stereoPtype, const enum stereoModes *stereoMode, int noOfStereoCodecs,
+                 const WebRtc_Word16 *cngPtype, int noOfCngCodecs,
+                 bool *isStereo)
+{
+
+    // init
+    //bool isStereo = false;
+    enum stereoModes tempStereoMode = stereoModeMono;
+    bool isCng = false;
+
+    // check payload length
+    if (rtp->dataLen() <= 0) {
+        //*isStereo = false; // don't change
+        return(*isStereo);
+    }
+
+    // check payload type
+    WebRtc_Word16 ptype = rtp->payloadType();
+
+    // is this a cng payload?
+    for (int k = 0; k < noOfCngCodecs; k++) {
+        if (ptype == cngPtype[k]) {
+            // do not change stereo state
+            isCng = true;
+            tempStereoMode = stereoModeFrame;
+        }
+    }
+
+    if (!isCng)
+    {
+        *isStereo = false;
+
+        // is this payload type a stereo codec? which type?
+        for (int k = 0; k < noOfStereoCodecs; k++) {
+            if (ptype == stereoPtype[k]) {
+                tempStereoMode = stereoMode[k];
+                *isStereo = true;
+                break; // exit for loop
+            }
+        }
+    }
+
+    if (*isStereo)
+    {
+        // split the payload if stereo
+
+        if(rtp->splitStereo(rtpSlave, tempStereoMode))
+        {
+            printf("Error in splitStereo\n");
+        }
+
+    }
+
+    return(*isStereo);
+
+}
+
+void stereoInterleave(WebRtc_Word16 *data, WebRtc_Word16 totalLen)
+{
+    int k;
+
+    for(k = totalLen/2; k < totalLen; k++) {
+        WebRtc_Word16 temp = data[k];
+        memmove(&data[2*k - totalLen + 2], &data[2*k - totalLen + 1], (totalLen - k -1) *  sizeof(WebRtc_Word16));
+        data[2*k - totalLen + 1] = temp;
+    }
+}
+
+
+int getNextRecoutTime(FILE *fp, WebRtc_UWord32 *nextTime) {
+
+    float tempTime;
+
+    if (!fp) {
+        return -1;
+    }
+
+    if (fread(&tempTime, sizeof(float), 1, fp) != 0) {
+        // not end of file
+        *nextTime = (WebRtc_UWord32) tempTime;
+        return 0;
+    }
+
+    *nextTime = 0;
+    fclose(fp);
+
+    return 1;
+}
+
+void getNextExtraDelay(FILE *fp, WebRtc_UWord32 *t, int *d) {
+
+    float temp[2];
+
+    if(!fp) {
+        *d = -1;
+        return;
+    }
+
+    if (fread(&temp, sizeof(float), 2, fp) != 0) {
+        // not end of file
+        *t = (WebRtc_UWord32) temp[0];
+        *d = (int) temp[1];
+        return;
+    }
+
+    *d = -1;
+    fclose(fp);
+
+    return;
+}
+
+
+void parsePtypeFile(FILE *ptypeFile, std::map<WebRtc_UWord8, decoderStruct>* decoders)
+{
+    int n, pt;
+    char codec[100];
+    decoderStruct tempDecoder;
+
+    // read first line
+    n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+
+    while (n==2)
+    {
+        memset(&tempDecoder, 0, sizeof(decoderStruct));
+        tempDecoder.stereo = stereoModeMono;
+
+        if( pt >= 0  // < 0 disables this codec
+            && isalpha(codec[0]) ) // and is a letter
+        {
+
+            /* check for stereo */
+            int L = strlen(codec);
+            bool isStereo = false;
+
+            if (codec[L-1] == '*') {
+                // stereo codec
+                isStereo = true;
+
+                // remove '*'
+                codec[L-1] = '\0';
+            }
+
+#ifdef CODEC_G711
+            if(strcmp(codec, "pcmu") == 0) {
+                tempDecoder.codec = kDecoderPCMu;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "pcma") == 0) {
+                tempDecoder.codec = kDecoderPCMa;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_IPCMU
+            else if(strcmp(codec, "eg711u") == 0) {
+                tempDecoder.codec = kDecoderEG711u;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_IPCMA
+            else if(strcmp(codec, "eg711a") == 0) {
+                tempDecoder.codec = kDecoderEG711a;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_ILBC
+            else if(strcmp(codec, "ilbc") == 0) {
+                tempDecoder.codec = kDecoderILBC;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_ISAC
+            else if(strcmp(codec, "isac") == 0) {
+                tempDecoder.codec = kDecoderISAC;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_ISACLC
+            else if(strcmp(codec, "isaclc") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_ISACLC;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_ISAC_SWB
+            else if(strcmp(codec, "isacswb") == 0) {
+                tempDecoder.codec = kDecoderISACswb;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_IPCMWB
+            else if(strcmp(codec, "ipcmwb") == 0) {
+                tempDecoder.codec = kDecoderIPCMwb;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722
+            else if(strcmp(codec, "g722") == 0) {
+                tempDecoder.codec = kDecoderG722;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_16
+            else if(strcmp(codec, "g722_1_16") == 0) {
+                tempDecoder.codec = kDecoderG722_1_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_24
+            else if(strcmp(codec, "g722_1_24") == 0) {
+                tempDecoder.codec = kDecoderG722_1_24;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1_32
+            else if(strcmp(codec, "g722_1_32") == 0) {
+                tempDecoder.codec = kDecoderG722_1_32;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_G722_1C_24
+            else if(strcmp(codec, "g722_1c_24") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_24;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G722_1C_32
+            else if(strcmp(codec, "g722_1c_32") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_32;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G722_1C_48
+            else if(strcmp(codec, "g722_1c_48") == 0) {
+                tempDecoder.codec = kDecoderG722_1C_48;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_G723
+            else if(strcmp(codec, "g723") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_G723;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G726
+            else if(strcmp(codec, "g726_16") == 0) {
+                tempDecoder.codec = kDecoderG726_16;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_24") == 0) {
+                tempDecoder.codec = kDecoderG726_24;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_32") == 0) {
+                tempDecoder.codec = kDecoderG726_32;
+                tempDecoder.fs = 8000;
+            }
+            else if(strcmp(codec, "g726_40") == 0) {
+                tempDecoder.codec = kDecoderG726_40;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729
+            else if(strcmp(codec, "g729") == 0) {
+                tempDecoder.codec = kDecoderG729;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729D
+            else if(strcmp(codec, "g729d") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_G729D;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_G729_1
+            else if(strcmp(codec, "g729_1") == 0) {
+                tempDecoder.codec = kDecoderG729_1;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_GSMFR
+            else if(strcmp(codec, "gsmfr") == 0) {
+                tempDecoder.codec = kDecoderGSMFR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_GSMEFR
+            else if(strcmp(codec, "gsmefr") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_GSMEFR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_AMR
+            else if(strcmp(codec, "amr") == 0) {
+                tempDecoder.codec = kDecoderAMR;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_AMRWB
+            else if(strcmp(codec, "amrwb") == 0) {
+                tempDecoder.codec = kDecoderAMRWB;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_DVI4
+            else if(strcmp(codec, "dvi4") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_DVI4;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SPEEX_8
+            else if(strcmp(codec, "speex8") == 0) {
+                tempDecoder.codec = kDecoderSPEEX_8;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SPEEX_16
+            else if(strcmp(codec, "speex16") == 0) {
+                tempDecoder.codec = kDecoderSPEEX_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_CELT_32
+            else if(strcmp(codec, "celt32") == 0) {
+                tempDecoder.codec = kDecoderCELT_32;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_SILK_NB
+            else if(strcmp(codec, "silk8") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_8;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_SILK_WB
+            else if(strcmp(codec, "silk12") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_12;
+                tempDecoder.fs = 16000;
+            }
+            else if(strcmp(codec, "silk16") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_16;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_SILK_SWB
+            else if(strcmp(codec, "silk24") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_SILK_24;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_MELPE
+            else if(strcmp(codec, "melpe") == 0) {
+                tempDecoder.codec = NETEQ_CODEC_MELPE;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_PCM16B
+            else if(strcmp(codec, "pcm16b") == 0) {
+                tempDecoder.codec = kDecoderPCM16B;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_PCM16B_WB
+            else if(strcmp(codec, "pcm16b_wb") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bwb;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+            else if(strcmp(codec, "pcm16b_swb32khz") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bswb32kHz;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+            else if(strcmp(codec, "pcm16b_swb48khz") == 0) {
+                tempDecoder.codec = kDecoderPCM16Bswb48kHz;
+                tempDecoder.fs = 48000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC8
+            else if(strcmp(codec, "cn") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC16
+            else if(strcmp(codec, "cn_wb") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 16000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC32
+            else if(strcmp(codec, "cn_swb32") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 32000;
+            }
+#endif
+#ifdef CODEC_CNGCODEC48
+            else if(strcmp(codec, "cn_swb48") == 0) {
+                tempDecoder.codec = kDecoderCNG;
+                tempDecoder.fs = 48000;
+            }
+#endif
+#ifdef CODEC_ATEVENT_DECODE
+            else if(strcmp(codec, "avt") == 0) {
+                tempDecoder.codec = kDecoderAVT;
+                tempDecoder.fs = 8000;
+            }
+#endif
+#ifdef CODEC_RED
+            else if(strcmp(codec, "red") == 0) {
+                tempDecoder.codec = kDecoderRED;
+                tempDecoder.fs = 8000;
+            }
+#endif
+            else if(isalpha(codec[0])) {
+                printf("Unsupported codec %s\n", codec);
+                // read next line and continue while loop
+                n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+                continue;
+            }
+            else {
+                // name is not recognized, and does not start with a letter
+                // hence, it is commented out
+                // read next line and continue while loop
+                n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+                continue;
+            }
+
+            // handle stereo
+            if (tempDecoder.codec == kDecoderCNG)
+            {
+                // always set stereo mode for CNG, even if it is not marked at stereo
+                tempDecoder.stereo = stereoModeFrame;
+            }
+            else if(isStereo)
+            {
+                switch(tempDecoder.codec) {
+                    // sample based codecs
+                    case kDecoderPCMu:
+                    case kDecoderPCMa:
+                    case kDecoderG722:
+                        {
+                            // 1 octet per sample
+                            tempDecoder.stereo = stereoModeSample1;
+                            break;
+                        }
+                    case kDecoderPCM16B:
+                    case kDecoderPCM16Bwb:
+                    case kDecoderPCM16Bswb32kHz:
+                    case kDecoderPCM16Bswb48kHz:
+                        {
+                            // 2 octets per sample
+                            tempDecoder.stereo = stereoModeSample2;
+                            break;
+                        }
+
+                    case kDecoderCELT_32:
+                    {
+                      tempDecoder.stereo = stereoModeDuplicate;
+                      break;
+                    }
+                        // fixed-rate frame codecs
+//                    case kDecoderG729:
+//                    case NETEQ_CODEC_G729D:
+//                    case NETEQ_CODEC_G729E:
+//                    case kDecoderG722_1_16:
+//                    case kDecoderG722_1_24:
+//                    case kDecoderG722_1_32:
+//                    case kDecoderG722_1C_24:
+//                    case kDecoderG722_1C_32:
+//                    case kDecoderG722_1C_48:
+//                    case NETEQ_CODEC_MELPE:
+//                        {
+//                            tempDecoder.stereo = stereoModeFrame;
+//                            break;
+//                        }
+                    default:
+                        {
+                            printf("Cannot use codec %s as stereo codec\n", codec);
+                            exit(0);
+                        }
+                }
+            }
+
+            if (pt > 127)
+            {
+                printf("Payload type must be less than 128\n");
+                exit(0);
+            }
+
+            // insert into codecs map
+            (*decoders)[static_cast<WebRtc_UWord8>(pt)] = tempDecoder;
+
+        }
+
+        n = fscanf(ptypeFile, "%s %i\n", codec, &pt);
+    } // end while
+
+}
+
+
+bool changeStereoMode(NETEQTEST_RTPpacket & rtp, std::map<WebRtc_UWord8, decoderStruct> & decoders, enum stereoModes *stereoMode)
+{
+        if (decoders.count(rtp.payloadType()) > 0
+            && decoders[rtp.payloadType()].codec != kDecoderRED
+            && decoders[rtp.payloadType()].codec != kDecoderAVT
+            && decoders[rtp.payloadType()].codec != kDecoderCNG )
+        {
+            if (decoders[rtp.payloadType()].stereo != *stereoMode)
+            {
+                *stereoMode = decoders[rtp.payloadType()].stereo;
+                return true; // stereo mode did change
+            }
+        }
+
+        return false; // stereo mode did not change
+}
+
+
+int populateUsedCodec(std::map<WebRtc_UWord8, decoderStruct>* decoders, enum WebRtcNetEQDecoder *usedCodec)
+{
+    int numCodecs = 0;
+
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    it = decoders->begin();
+
+    for (int i = 0; i < static_cast<int>(decoders->size()); i++, it++)
+    {
+        usedCodec[numCodecs] = (*it).second.codec;
+        numCodecs++;
+    }
+
+    return numCodecs;
+}
+
+
+void createAndInsertDecoders (NETEQTEST_NetEQClass *neteq, std::map<WebRtc_UWord8, decoderStruct>* decoders, int channelNumber)
+{
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    for (it = decoders->begin(); it != decoders->end();  it++)
+    {
+        if (channelNumber == 0 ||
+            ((*it).second.stereo > stereoModeMono ))
+        {
+            // create decoder instance
+            WebRtc_UWord8 pt = static_cast<WebRtc_UWord8>( (*it).first );
+            NETEQTEST_Decoder **dec = &((*it).second.decoder[channelNumber]);
+            enum WebRtcNetEQDecoder type = (*it).second.codec;
+
+            switch (type)
+            {
+#ifdef CODEC_G711
+            case kDecoderPCMu:
+                *dec = new decoder_PCMU( pt );
+                break;
+            case kDecoderPCMa:
+                *dec = new decoder_PCMA( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMU
+            case kDecoderEG711u:
+                *dec = new decoder_IPCMU( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMA
+            case kDecoderEG711a:
+                *dec = new decoder_IPCMA( pt );
+                break;
+#endif
+#ifdef CODEC_IPCMWB
+            case kDecoderIPCMwb:
+                *dec = new decoder_IPCMWB( pt );
+                break;
+#endif
+#ifdef CODEC_ILBC
+            case kDecoderILBC:
+                *dec = new decoder_ILBC( pt );
+                break;
+#endif
+#ifdef CODEC_ISAC
+            case kDecoderISAC:
+                *dec = new decoder_iSAC( pt );
+                break;
+#endif
+#ifdef CODEC_ISAC_SWB
+            case kDecoderISACswb:
+                *dec = new decoder_iSACSWB( pt );
+                break;
+#endif
+#ifdef CODEC_G729
+            case kDecoderG729:
+                *dec = new decoder_G729( pt );
+                break;
+            case NETEQ_CODEC_G729D:
+                printf("Error: G729D not supported\n");
+                break;
+#endif
+#ifdef CODEC_G729E
+            case NETEQ_CODEC_G729E:
+                *dec = new decoder_G729E( pt );
+                break;
+#endif
+#ifdef CODEC_G729_1
+            case kDecoderG729_1:
+                *dec = new decoder_G729_1( pt );
+                break;
+#endif
+#ifdef CODEC_G723
+            case NETEQ_CODEC_G723:
+                *dec = new decoder_G723( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B
+            case kDecoderPCM16B:
+                *dec = new decoder_PCM16B_NB( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_WB
+            case kDecoderPCM16Bwb:
+                *dec = new decoder_PCM16B_WB( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+            case kDecoderPCM16Bswb32kHz:
+                *dec = new decoder_PCM16B_SWB32( pt );
+                break;
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+            case kDecoderPCM16Bswb48kHz:
+                *dec = new decoder_PCM16B_SWB48( pt );
+                break;
+#endif
+#ifdef CODEC_DVI4
+            case NETEQ_CODEC_DVI4:
+                *dec = new decoder_DVI4( pt );
+                break;
+#endif
+#ifdef CODEC_G722
+            case kDecoderG722:
+                *dec = new decoder_G722( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_16
+            case kDecoderG722_1_16:
+                *dec = new decoder_G722_1_16( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_24
+            case kDecoderG722_1_24:
+                *dec = new decoder_G722_1_24( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1_32
+            case kDecoderG722_1_32:
+                *dec = new decoder_G722_1_32( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_24
+            case kDecoderG722_1C_24:
+                *dec = new decoder_G722_1C_24( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_32
+            case kDecoderG722_1C_32:
+                *dec = new decoder_G722_1C_32( pt );
+                break;
+#endif
+#ifdef CODEC_G722_1C_48
+            case kDecoderG722_1C_48:
+                *dec = new decoder_G722_1C_48( pt );
+                break;
+#endif
+#ifdef CODEC_AMR
+            case kDecoderAMR:
+                *dec = new decoder_AMR( pt );
+                break;
+#endif
+#ifdef CODEC_AMRWB
+            case kDecoderAMRWB:
+                *dec = new decoder_AMRWB( pt );
+                break;
+#endif
+#ifdef CODEC_GSMFR
+            case kDecoderGSMFR:
+                *dec = new decoder_GSMFR( pt );
+                break;
+#endif
+#ifdef CODEC_GSMEFR
+            case NETEQ_CODEC_GSMEFR:
+                *dec = new decoder_GSMEFR( pt );
+                break;
+#endif
+#ifdef CODEC_G726
+            case kDecoderG726_16:
+                *dec = new decoder_G726_16( pt );
+                break;
+            case kDecoderG726_24:
+                *dec = new decoder_G726_24( pt );
+                break;
+            case kDecoderG726_32:
+                *dec = new decoder_G726_32( pt );
+                break;
+            case kDecoderG726_40:
+                *dec = new decoder_G726_40( pt );
+                break;
+#endif
+#ifdef CODEC_MELPE
+            case NETEQ_CODEC_MELPE:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_MELPE( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SPEEX_8
+            case kDecoderSPEEX_8:
+                *dec = new decoder_SPEEX( pt, 8000 );
+                break;
+#endif
+#ifdef CODEC_SPEEX_16
+            case kDecoderSPEEX_16:
+                *dec = new decoder_SPEEX( pt, 16000 );
+                break;
+#endif
+#ifdef CODEC_CELT_32
+            case kDecoderCELT_32:
+              if (channelNumber == 0)
+                *dec = new decoder_CELT( pt, 32000 );
+              else
+                *dec = new decoder_CELTslave( pt, 32000 );
+                break;
+#endif
+#ifdef CODEC_RED
+            case kDecoderRED:
+                *dec = new decoder_RED( pt );
+                break;
+#endif
+#ifdef CODEC_ATEVENT_DECODE
+            case kDecoderAVT:
+                *dec = new decoder_AVT( pt );
+                break;
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+            case kDecoderCNG:
+                *dec = new decoder_CNG( pt, static_cast<WebRtc_UWord16>((*it).second.fs) );
+                break;
+#endif
+#ifdef CODEC_ISACLC
+            case NETEQ_CODEC_ISACLC:
+                *dec = new decoder_iSACLC( pt );
+                break;
+#endif
+#ifdef CODEC_SILK_NB
+            case NETEQ_CODEC_SILK_8:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK8( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SILK_WB
+            case NETEQ_CODEC_SILK_12:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK12( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SILK_WB
+            case NETEQ_CODEC_SILK_16:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK16( pt );
+#endif
+                break;
+#endif
+#ifdef CODEC_SILK_SWB
+            case NETEQ_CODEC_SILK_24:
+#if (_MSC_VER >= 1400) && !defined(_WIN64) // only for Visual 2005 or later, and not for x64
+                *dec = new decoder_SILK24( pt );
+#endif
+                break;
+#endif
+
+            default:
+                printf("Unknown codec type encountered in createAndInsertDecoders\n");
+                exit(0);
+            }
+
+            // insert into codec DB
+            if (*dec)
+            {
+                (*dec)->loadToNetEQ(*neteq);
+            }
+        }
+    }
+
+}
+
+
+void free_coders(std::map<WebRtc_UWord8, decoderStruct> & decoders)
+{
+    std::map<WebRtc_UWord8, decoderStruct>::iterator it;
+
+    for (it = decoders.begin(); it != decoders.end();  it++)
+    {
+        if ((*it).second.decoder[0])
+        {
+            delete (*it).second.decoder[0];
+        }
+
+        if ((*it).second.decoder[1])
+        {
+            delete (*it).second.decoder[1];
+        }
+    }
+}
+
+
+
+#include "pcm16b.h"
+#include "g711_interface.h"
+#include "isac.h"
+
+int doAPItest() {
+
+    char   version[20];
+    void *inst;
+    enum WebRtcNetEQDecoder usedCodec;
+    int NetEqBufferMaxPackets, BufferSizeInBytes;
+    WebRtcNetEQ_CodecDef codecInst;
+    WebRtcNetEQ_RTCPStat RTCPstat;
+    WebRtc_UWord32 timestamp;
+    int memorySize;
+    int ok;
+
+    printf("API-test:\n");
+
+    /* get the version string */
+    WebRtcNetEQ_GetVersion(version);
+    printf("NetEq version: %s\n\n", version);
+
+    /* test that API functions return -1 if instance is NULL */
+#define CHECK_MINUS_ONE(x) {int errCode = x; if((errCode)!=-1){printf("\n API test failed at line %d: %s. Function did not return -1 as expected\n",__LINE__,#x); return(-1);}}
+//#define RESET_ERROR(x) ((MainInst_t*) x)->ErrorCode = 0;
+    inst = NULL;
+
+    CHECK_MINUS_ONE(WebRtcNetEQ_GetErrorCode(inst))
+    CHECK_MINUS_ONE(WebRtcNetEQ_Assign(&inst, NULL))
+//  printf("WARNING: Test of WebRtcNetEQ_Assign() is disabled due to a bug.\n");
+    usedCodec=kDecoderPCMu;
+    CHECK_MINUS_ONE(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter,  &NetEqBufferMaxPackets, &BufferSizeInBytes))
+    CHECK_MINUS_ONE(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+
+    CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 8000))
+    CHECK_MINUS_ONE(WebRtcNetEQ_SetAVTPlayout(inst, 0))
+    CHECK_MINUS_ONE(WebRtcNetEQ_SetExtraDelay(inst, 17))
+    CHECK_MINUS_ONE(WebRtcNetEQ_SetPlayoutMode(inst, kPlayoutOn))
+
+    CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbReset(inst))
+    CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+    CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbRemove(inst, usedCodec))
+    WebRtc_Word16 temp1, temp2;
+    CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2))
+    CHECK_MINUS_ONE(WebRtcNetEQ_CodecDbGetCodecInfo(inst, 0, &usedCodec))
+
+    CHECK_MINUS_ONE(WebRtcNetEQ_RecIn(inst, &temp1, 17, 4711))
+    CHECK_MINUS_ONE(WebRtcNetEQ_RecOut(inst, &temp1, &temp2))
+    CHECK_MINUS_ONE(WebRtcNetEQ_GetRTCPStats(inst, &RTCPstat)); // error here!!!
+    CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechTimeStamp(inst, &timestamp))
+    WebRtcNetEQOutputType temptype;
+    CHECK_MINUS_ONE(WebRtcNetEQ_GetSpeechOutputType(inst, &temptype))
+
+    WebRtc_UWord8 tempFlags;
+    WebRtc_UWord16 utemp1, utemp2;
+    CHECK_MINUS_ONE(WebRtcNetEQ_VQmonRecOutStatistics(inst, &utemp1, &utemp2, &tempFlags))
+    CHECK_MINUS_ONE(WebRtcNetEQ_VQmonGetRxStatistics(inst, &utemp1, &utemp2))
+
+    WebRtcNetEQ_AssignSize(&memorySize);
+    CHECK_ZERO(WebRtcNetEQ_Assign(&inst, malloc(memorySize)))
+
+    /* init with wrong sample frequency */
+    CHECK_MINUS_ONE(WebRtcNetEQ_Init(inst, 17))
+
+    /* init with correct fs */
+    CHECK_ZERO(WebRtcNetEQ_Init(inst, 8000))
+
+    /* GetRecommendedBufferSize with wrong codec */
+    usedCodec=kDecoderReservedStart;
+    ok = WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+    if((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNKNOWN_CODEC))){
+        printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong codec.\n");
+        printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+    }
+    //RESET_ERROR(inst)
+
+    /* GetRecommendedBufferSize with wrong network type */
+    usedCodec = kDecoderPCMu;
+    ok=WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, (enum WebRtcNetEQNetworkType) 4711 , &NetEqBufferMaxPackets, &BufferSizeInBytes);
+    if ((ok!=-1) || ((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_NETWORK_TYPE))) {
+        printf("WebRtcNetEQ_GetRecommendedBufferSize() did not return proper error code for wrong network type.\n");
+        printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));
+        //RESET_ERROR(inst)
+    }
+    CHECK_ZERO(WebRtcNetEQ_GetRecommendedBufferSize(inst, &usedCodec, 1, kTCPLargeJitter , &NetEqBufferMaxPackets, &BufferSizeInBytes))
+
+    /* try to do RecIn before assigning the packet buffer */
+/*  makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, 17,4711, 1235412312);
+    makeDTMFpayload(&rtp_data[12], 1, 1, 10, 100);
+    ok = WebRtcNetEQ_RecIn(inst, (short *) rtp_data, 12+4, 4711);
+    printf("return value = %d; error code = %d\n", ok, WebRtcNetEQ_GetErrorCode(inst));*/
+
+    /* check all limits of WebRtcNetEQ_AssignBuffer */
+    ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, 149<<1);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+        printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong sizeinbytes\n");
+    }
+    ok=WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NULL, BufferSizeInBytes);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+        printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for NULL memory pointer\n");
+    }
+    ok=WebRtcNetEQ_AssignBuffer(inst, 1, NetEqPacketBuffer, BufferSizeInBytes);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+        printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+    }
+    ok=WebRtcNetEQ_AssignBuffer(inst, 601, NetEqPacketBuffer, BufferSizeInBytes);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-PBUFFER_INIT_ERROR))) {
+        printf("WebRtcNetEQ_AssignBuffer() did not return proper error code for wrong MaxNoOfPackets\n");
+    }
+
+    /* do correct assignbuffer */
+    CHECK_ZERO(WebRtcNetEQ_AssignBuffer(inst, NetEqBufferMaxPackets, NetEqPacketBuffer, BufferSizeInBytes))
+
+    ok=WebRtcNetEQ_SetExtraDelay(inst, -1);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+        printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too small delay\n");
+    }
+    ok=WebRtcNetEQ_SetExtraDelay(inst, 1001);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_DELAYVALUE))) {
+        printf("WebRtcNetEQ_SetExtraDelay() did not return proper error code for too large delay\n");
+    }
+
+    ok=WebRtcNetEQ_SetPlayoutMode(inst,(enum WebRtcNetEQPlayoutMode) 4711);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-FAULTY_PLAYOUTMODE))) {
+        printf("WebRtcNetEQ_SetPlayoutMode() did not return proper error code for wrong mode\n");
+    }
+
+    /* number of codecs should return zero before adding any codecs */
+    WebRtcNetEQ_CodecDbGetSizeInfo(inst, &temp1, &temp2);
+    if(temp1!=0)
+        printf("WebRtcNetEQ_CodecDbGetSizeInfo() return non-zero number of codecs in DB before adding any codecs\n");
+
+    /* get info from empty database */
+    ok=WebRtcNetEQ_CodecDbGetCodecInfo(inst, 17, &usedCodec);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST1))) {
+        printf("WebRtcNetEQ_CodecDbGetCodecInfo() did not return proper error code for out-of-range entry number\n");
+    }
+
+    /* remove codec from empty database */
+    ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderPCMa);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_NOT_EXIST4))) {
+        printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that has not been added\n");
+    }
+
+    /* add codec with unsupported fs */
+#ifdef CODEC_PCM16B
+#ifndef NETEQ_48KHZ_WIDEBAND
+    SET_CODEC_PAR(codecInst,kDecoderPCM16Bswb48kHz,77,NULL,48000);
+    SET_PCM16B_SWB48_FUNCTIONS(codecInst);
+    ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_FS))) {
+        printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding codec with unsupported sample freq\n");
+    }
+#else
+    printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled with 48kHz support.\n");
+#endif
+#else
+    printf("Could not test adding codec with unsupported sample frequency since NetEQ is compiled without PCM16B support.\n");
+#endif
+
+    /* add two codecs with identical payload types */
+    SET_CODEC_PAR(codecInst,kDecoderPCMa,17,NULL,8000);
+    SET_PCMA_FUNCTIONS(codecInst);
+    CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+
+    SET_CODEC_PAR(codecInst,kDecoderPCMu,17,NULL,8000);
+    SET_PCMU_FUNCTIONS(codecInst);
+    ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+        printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding two codecs with identical payload types\n");
+    }
+
+    /* try adding several payload types for CNG codecs */
+    SET_CODEC_PAR(codecInst,kDecoderCNG,105,NULL,16000);
+    SET_CNG_FUNCTIONS(codecInst);
+    CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+    SET_CODEC_PAR(codecInst,kDecoderCNG,13,NULL,8000);
+    SET_CNG_FUNCTIONS(codecInst);
+    CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+
+    /* try adding a speech codec over a CNG codec */
+    SET_CODEC_PAR(codecInst,kDecoderISAC,105,NULL,16000); /* same as WB CNG above */
+    SET_ISAC_FUNCTIONS(codecInst);
+    ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+        printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+    }
+
+    /* try adding a CNG codec over a speech codec */
+    SET_CODEC_PAR(codecInst,kDecoderCNG,17,NULL,32000); /* same as PCMU above */
+    SET_CNG_FUNCTIONS(codecInst);
+    ok=WebRtcNetEQ_CodecDbAdd(inst, &codecInst);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_PAYLOAD_TAKEN))) {
+        printf("WebRtcNetEQ_CodecDbAdd() did not return proper error code when adding a speech codec over a CNG codec\n");
+    }
+
+
+    /* remove codec out of range */
+    ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedStart);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+        printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+    }
+    ok=WebRtcNetEQ_CodecDbRemove(inst,kDecoderReservedEnd);
+    if((ok!=-1)||((ok==-1)&&(WebRtcNetEQ_GetErrorCode(inst)!=-CODEC_DB_UNSUPPORTED_CODEC))) {
+        printf("WebRtcNetEQ_CodecDbRemove() did not return proper error code when removing codec that is out of range\n");
+    }
+
+    /*SET_CODEC_PAR(codecInst,kDecoderEG711a,NETEQ_CODEC_EG711A_PT,NetEqiPCMAState,8000);
+    SET_IPCMA_FUNCTIONS(codecInst);
+    CHECK_ZERO(WebRtcNetEQ_CodecDbAdd(inst, &codecInst))
+*/
+    free(inst);
+
+    return(0);
+
+}
diff --git a/src/modules/audio_coding/neteq/test/PayloadTypes.h b/src/modules/audio_coding/neteq/test/PayloadTypes.h
new file mode 100644
index 0000000..f6cc3da
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/PayloadTypes.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* PayloadTypes.h */
+/* Used by NetEqRTPplay application */
+
+/* RTP defined codepoints */
+#define NETEQ_CODEC_PCMU_PT				0
+#define NETEQ_CODEC_GSMFR_PT			3
+#define NETEQ_CODEC_G723_PT				4
+#define NETEQ_CODEC_DVI4_PT				125 // 8 kHz version
+//#define NETEQ_CODEC_DVI4_16_PT			6  // 16 kHz version
+#define NETEQ_CODEC_PCMA_PT				8
+#define NETEQ_CODEC_G722_PT				9
+#define NETEQ_CODEC_CN_PT				13
+//#define NETEQ_CODEC_G728_PT				15
+//#define NETEQ_CODEC_DVI4_11_PT			16  // 11.025 kHz version
+//#define NETEQ_CODEC_DVI4_22_PT			17  // 22.050 kHz version
+#define NETEQ_CODEC_G729_PT				18
+
+/* Dynamic RTP codepoints as defined in VoiceEngine (file VEAPI.cpp) */
+#define NETEQ_CODEC_IPCMWB_PT			97
+#define NETEQ_CODEC_SPEEX8_PT			98
+#define NETEQ_CODEC_SPEEX16_PT			99
+#define NETEQ_CODEC_EG711U_PT			100
+#define NETEQ_CODEC_EG711A_PT			101
+#define NETEQ_CODEC_ILBC_PT				102
+#define NETEQ_CODEC_ISAC_PT				103
+#define NETEQ_CODEC_ISACLC_PT			119
+#define NETEQ_CODEC_ISACSWB_PT			104
+#define NETEQ_CODEC_AVT_PT				106
+#define NETEQ_CODEC_G722_1_16_PT		108
+#define NETEQ_CODEC_G722_1_24_PT		109
+#define NETEQ_CODEC_G722_1_32_PT		110
+#define NETEQ_CODEC_SC3_PT				111
+#define NETEQ_CODEC_AMR_PT				112
+#define NETEQ_CODEC_GSMEFR_PT			113
+//#define NETEQ_CODEC_ILBCRCU_PT			114
+#define NETEQ_CODEC_G726_16_PT			115
+#define NETEQ_CODEC_G726_24_PT			116
+#define NETEQ_CODEC_G726_32_PT			121
+#define NETEQ_CODEC_RED_PT				117
+#define NETEQ_CODEC_G726_40_PT			118
+//#define NETEQ_CODEC_ENERGY_PT			120
+#define NETEQ_CODEC_CN_WB_PT			105
+#define NETEQ_CODEC_CN_SWB_PT           126
+#define NETEQ_CODEC_G729_1_PT			107
+#define NETEQ_CODEC_G729D_PT			123
+#define NETEQ_CODEC_MELPE_PT			124
+#define NETEQ_CODEC_CELT32_PT     114
+
+/* Extra dynamic codepoints */
+#define NETEQ_CODEC_AMRWB_PT			120
+#define NETEQ_CODEC_PCM16B_PT			93
+#define NETEQ_CODEC_PCM16B_WB_PT		94
+#define NETEQ_CODEC_PCM16B_SWB32KHZ_PT	95
+#define NETEQ_CODEC_PCM16B_SWB48KHZ_PT	96
+#define NETEQ_CODEC_MPEG4AAC_PT			122
+
+
+/* Not default in VoiceEngine */
+#define NETEQ_CODEC_G722_1C_24_PT		84
+#define NETEQ_CODEC_G722_1C_32_PT		85
+#define NETEQ_CODEC_G722_1C_48_PT		86
+
+#define NETEQ_CODEC_SILK_8_PT			80
+#define NETEQ_CODEC_SILK_12_PT			81
+#define NETEQ_CODEC_SILK_16_PT			82
+#define NETEQ_CODEC_SILK_24_PT			83
+
diff --git a/src/modules/audio_coding/neteq/test/RTPanalyze.cc b/src/modules/audio_coding/neteq/test/RTPanalyze.cc
new file mode 100644
index 0000000..4d7d573
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPanalyze.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <stdio.h>
+#include <vector>
+
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
+
+//#define WEBRTC_DUMMY_RTP
+
+enum {
+  kRedPayloadType = 127
+};
+
+int main(int argc, char* argv[]) {
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+  printf("Input file: %s\n", argv[1]);
+
+  FILE* out_file = fopen(argv[2], "wt");
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[2]);
+    return -1;
+  }
+  printf("Output file: %s\n\n", argv[2]);
+
+  // Print file header.
+  fprintf(out_file, "SeqNo  TimeStamp   SendTime  Size    PT  M\n");
+
+  // Read file header.
+  NETEQTEST_RTPpacket::skipFileHeader(in_file);
+#ifdef WEBRTC_DUMMY_RTP
+  NETEQTEST_DummyRTPpacket packet;
+#else
+  NETEQTEST_RTPpacket packet;
+#endif
+
+  while (packet.readFromFile(in_file) >= 0) {
+    // Write packet data to file.
+    fprintf(out_file, "%5u %10u %10u %5i %5i %2i\n",
+            packet.sequenceNumber(), packet.timeStamp(), packet.time(),
+            packet.dataLen(), packet.payloadType(), packet.markerBit());
+    if (packet.payloadType() == kRedPayloadType) {
+      WebRtcNetEQ_RTPInfo red_header;
+      int len;
+      int red_index = 0;
+      while ((len = packet.extractRED(red_index++, red_header)) >= 0) {
+        fprintf(out_file, "* %5u %10u %10u %5i %5i\n",
+                red_header.sequenceNumber, red_header.timeStamp,
+                packet.time(), len, red_header.payloadType);
+      }
+      assert(red_index > 1);  // We must get at least one payload.
+    }
+  }
+
+  fclose(in_file);
+  fclose(out_file);
+
+  return 0;
+}
diff --git a/src/modules/audio_coding/neteq/test/RTPcat.cc b/src/modules/audio_coding/neteq/test/RTPcat.cc
new file mode 100644
index 0000000..001b00b
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPcat.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+
+#define FIRSTLINELEN 40
+
+int main(int argc, char* argv[]) {
+  if (argc < 3) {
+    printf("Usage: RTPcat in1.rtp int2.rtp [...] out.rtp\n");
+    exit(1);
+  }
+
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+
+  FILE* out_file = fopen(argv[argc - 1], "wb");  // Last parameter is out file.
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[argc - 1]);
+    return -1;
+  }
+  printf("Output RTP file: %s\n\n", argv[argc - 1]);
+
+  // Read file header and write directly to output file.
+  char firstline[FIRSTLINELEN];
+  const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+  EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, in_file) != NULL);
+  EXPECT_GT(fputs(firstline, out_file), 0);
+  EXPECT_EQ(kRtpDumpHeaderSize, fread(firstline, 1, kRtpDumpHeaderSize,
+                                      in_file));
+  EXPECT_EQ(kRtpDumpHeaderSize, fwrite(firstline, 1, kRtpDumpHeaderSize,
+                                       out_file));
+
+  // Close input file and re-open it later (easier to write the loop below).
+  fclose(in_file);
+
+  for (int i = 1; i < argc - 1; i++) {
+    in_file = fopen(argv[i], "rb");
+    if (!in_file) {
+      printf("Cannot open input file %s\n", argv[i]);
+      return -1;
+    }
+    printf("Input RTP file: %s\n", argv[i]);
+
+    NETEQTEST_RTPpacket::skipFileHeader(in_file);
+    NETEQTEST_RTPpacket packet;
+    int pack_len = packet.readFromFile(in_file);
+    if (pack_len < 0) {
+      exit(1);
+    }
+    while (pack_len >= 0) {
+      packet.writeToFile(out_file);
+      pack_len = packet.readFromFile(in_file);
+    }
+    fclose(in_file);
+  }
+  fclose(out_file);
+  return 0;
+}
diff --git a/src/modules/audio_coding/neteq/test/RTPchange.cc b/src/modules/audio_coding/neteq/test/RTPchange.cc
new file mode 100644
index 0000000..259a773
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPchange.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_DummyRTPpacket.h"
+
+#define FIRSTLINELEN 40
+//#define WEBRTC_DUMMY_RTP
+
+static bool pktCmp(NETEQTEST_RTPpacket *a, NETEQTEST_RTPpacket *b) {
+  return (a->time() < b->time());
+}
+
+int main(int argc, char* argv[]) {
+  FILE* in_file = fopen(argv[1], "rb");
+  if (!in_file) {
+    printf("Cannot open input file %s\n", argv[1]);
+    return -1;
+  }
+  printf("Input RTP file: %s\n", argv[1]);
+
+  FILE* stat_file = fopen(argv[2], "rt");
+  if (!stat_file) {
+    printf("Cannot open timing file %s\n", argv[2]);
+    return -1;
+  }
+  printf("Timing file: %s\n", argv[2]);
+
+  FILE* out_file = fopen(argv[3], "wb");
+  if (!out_file) {
+    printf("Cannot open output file %s\n", argv[3]);
+    return -1;
+  }
+  printf("Output RTP file: %s\n\n", argv[3]);
+
+  // Read all statistics and insert into map.
+  // Read first line.
+  char temp_str[100];
+  if (fgets(temp_str, 100, stat_file) == NULL) {
+    printf("Failed to read timing file %s\n", argv[2]);
+    return -1;
+  }
+  // Define map.
+  std::map<std::pair<uint16_t, uint32_t>, uint32_t> packet_stats;
+  uint16_t seq_no;
+  uint32_t ts;
+  uint32_t send_time;
+
+  while (fscanf(stat_file,
+                "%hu %u %u %*i %*i\n", &seq_no, &ts, &send_time) == 3) {
+    std::pair<uint16_t, uint32_t>
+        temp_pair = std::pair<uint16_t, uint32_t>(seq_no, ts);
+
+    packet_stats[temp_pair] = send_time;
+  }
+
+  fclose(stat_file);
+
+  // Read file header and write directly to output file.
+  char first_line[FIRSTLINELEN];
+  if (fgets(first_line, FIRSTLINELEN, in_file) == NULL) {
+    printf("Failed to read first line of input file %s\n", argv[1]);
+    return -1;
+  }
+  fputs(first_line, out_file);
+  // start_sec + start_usec + source + port + padding
+  const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+  if (fread(first_line, 1, kRtpDumpHeaderSize, in_file)
+      != kRtpDumpHeaderSize) {
+    printf("Failed to read RTP dump header from input file %s\n", argv[1]);
+    return -1;
+  }
+  if (fwrite(first_line, 1, kRtpDumpHeaderSize, out_file)
+      != kRtpDumpHeaderSize) {
+    printf("Failed to write RTP dump header to output file %s\n", argv[3]);
+    return -1;
+  }
+
+  std::vector<NETEQTEST_RTPpacket *> packet_vec;
+
+  while (1) {
+    // Insert in vector.
+#ifdef WEBRTC_DUMMY_RTP
+    NETEQTEST_RTPpacket *new_packet = new NETEQTEST_DummyRTPpacket();
+#else
+    NETEQTEST_RTPpacket *new_packet = new NETEQTEST_RTPpacket();
+#endif
+    if (new_packet->readFromFile(in_file) < 0) {
+      // End of file.
+      break;
+    }
+
+    // Look for new send time in statistics vector.
+    std::pair<uint16_t, uint32_t> temp_pair =
+        std::pair<uint16_t, uint32_t>(new_packet->sequenceNumber(),
+                                      new_packet->timeStamp());
+
+    uint32_t new_send_time = packet_stats[temp_pair];
+    new_packet->setTime(new_send_time);  // Set new send time.
+    packet_vec.push_back(new_packet);  // Insert in vector.
+  }
+
+  // Sort the vector according to send times.
+  std::sort(packet_vec.begin(), packet_vec.end(), pktCmp);
+
+  std::vector<NETEQTEST_RTPpacket *>::iterator it;
+  for (it = packet_vec.begin(); it != packet_vec.end(); it++) {
+    // Write to out file.
+    if ((*it)->writeToFile(out_file) < 0) {
+      printf("Error writing to file\n");
+      return -1;
+    }
+    // Delete packet.
+    delete *it;
+  }
+
+  fclose(in_file);
+  fclose(out_file);
+
+  return 0;
+}
diff --git a/src/modules/audio_coding/neteq/test/RTPencode.cc b/src/modules/audio_coding/neteq/test/RTPencode.cc
new file mode 100644
index 0000000..3aaaf6c
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPencode.cc
@@ -0,0 +1,2070 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+/* header includes */
+#include "typedefs.h"
+#include "stdio.h"
+#include "webrtc_neteq.h" // needed for enum WebRtcNetEQDecoder
+#include <string.h>
+#include <stdlib.h>
+#include <cassert>
+
+#ifdef WIN32
+#include <winsock2.h>
+#endif
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#endif
+
+
+/************************/
+/* Define payload types */
+/************************/
+
+#include "PayloadTypes.h"
+
+
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define STOPSENDTIME 3000
+#define RESTARTSENDTIME 0 //162500
+#define FIRSTLINELEN 40
+#define CHECK_NOT_NULL(a) if((a)==0){printf("\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+
+//#define MULTIPLE_SAME_TIMESTAMP
+#define REPEAT_PACKET_DISTANCE 17
+#define REPEAT_PACKET_COUNT 1  // number of extra packets to send
+
+//#define INSERT_OLD_PACKETS
+#define OLD_PACKET 5 // how many seconds too old should the packet be?
+
+//#define TIMESTAMP_WRAPAROUND
+
+//#define RANDOM_DATA
+//#define RANDOM_PAYLOAD_DATA
+#define RANDOM_SEED 10
+
+//#define INSERT_DTMF_PACKETS
+//#define NO_DTMF_OVERDUB
+#define DTMF_PACKET_INTERVAL 2000
+#define DTMF_DURATION 500
+
+#define STEREO_MODE_FRAME 0
+#define STEREO_MODE_SAMPLE_1 1 //1 octet per sample
+#define STEREO_MODE_SAMPLE_2 2 //2 octets per sample
+
+/*************************/
+/* Function declarations */
+/*************************/
+
+void NetEQTest_GetCodec_and_PT(char * name, enum WebRtcNetEQDecoder *codec, int *PT, int frameLen, int *fs, int *bitrate, int *useRed);
+int NetEQTest_init_coders(enum WebRtcNetEQDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels);
+void defineCodecs(enum WebRtcNetEQDecoder *usedCodec, int *noOfCodecs );
+int NetEQTest_free_coders(enum WebRtcNetEQDecoder coder, int numChannels);
+int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , int * vad, int useVAD, int bitrate, int numChannels);
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc);
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
+                        int seqNo, WebRtc_UWord32 ssrc);
+int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration);
+void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples);
+void stereoInterleave(unsigned char* data, int dataLen, int stride);
+
+/*********************/
+/* Codec definitions */
+/*********************/
+
+#include "webrtc_vad.h"
+
+#if ((defined CODEC_PCM16B)||(defined NETEQ_ARBITRARY_CODEC))
+	#include "pcm16b.h"
+#endif
+#ifdef CODEC_G711
+	#include "g711_interface.h"
+#endif
+#ifdef CODEC_G729
+	#include "G729Interface.h"
+#endif
+#ifdef CODEC_G729_1
+	#include "G729_1Interface.h"
+#endif
+#ifdef CODEC_AMR
+	#include "AMRInterface.h"
+	#include "AMRCreation.h"
+#endif
+#ifdef CODEC_AMRWB
+	#include "AMRWBInterface.h"
+	#include "AMRWBCreation.h"
+#endif
+#ifdef CODEC_ILBC
+	#include "ilbc.h"
+#endif
+#if (defined CODEC_ISAC || defined CODEC_ISAC_SWB) 
+	#include "isac.h"
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+	#include "isacfix.h"
+	#ifdef CODEC_ISAC
+		#error Cannot have both ISAC and ISACfix defined. Please de-select one in the beginning of RTPencode.cpp
+	#endif
+#endif
+#ifdef CODEC_G722
+	#include "g722_interface.h"
+#endif
+#ifdef CODEC_G722_1_24
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1_32
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1_16
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_24
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_32
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G722_1C_48
+	#include "G722_1Interface.h"
+#endif
+#ifdef CODEC_G726
+    #include "G726Creation.h"
+    #include "G726Interface.h"
+#endif
+#ifdef CODEC_GSMFR
+	#include "GSMFRInterface.h"
+	#include "GSMFRCreation.h"
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+  #include "webrtc_cng.h"
+#endif
+#if ((defined CODEC_SPEEX_8)||(defined CODEC_SPEEX_16))
+	#include "SpeexInterface.h"
+#endif
+#ifdef CODEC_CELT_32
+#include "celt_interface.h"
+#endif
+
+
+/***********************************/
+/* Global codec instance variables */
+/***********************************/
+
+WebRtcVadInst *VAD_inst[2];
+
+#ifdef CODEC_G722
+    G722EncInst *g722EncState[2];
+#endif
+
+#ifdef CODEC_G722_1_24
+	G722_1_24_encinst_t *G722_1_24enc_inst[2];
+#endif
+#ifdef CODEC_G722_1_32
+	G722_1_32_encinst_t *G722_1_32enc_inst[2];
+#endif
+#ifdef CODEC_G722_1_16
+	G722_1_16_encinst_t *G722_1_16enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_24
+	G722_1C_24_encinst_t *G722_1C_24enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_32
+	G722_1C_32_encinst_t *G722_1C_32enc_inst[2];
+#endif
+#ifdef CODEC_G722_1C_48
+	G722_1C_48_encinst_t *G722_1C_48enc_inst[2];
+#endif
+#ifdef CODEC_G726
+    G726_encinst_t *G726enc_inst[2];
+#endif
+#ifdef CODEC_G729
+	G729_encinst_t *G729enc_inst[2];
+#endif
+#ifdef CODEC_G729_1
+	G729_1_inst_t *G729_1_inst[2];
+#endif
+#ifdef CODEC_AMR
+	AMR_encinst_t *AMRenc_inst[2];
+	WebRtc_Word16		  AMR_bitrate;
+#endif
+#ifdef CODEC_AMRWB
+	AMRWB_encinst_t *AMRWBenc_inst[2];
+	WebRtc_Word16		  AMRWB_bitrate;
+#endif
+#ifdef CODEC_ILBC
+	iLBC_encinst_t *iLBCenc_inst[2];
+#endif
+#ifdef CODEC_ISAC
+	ISACStruct *ISAC_inst[2];
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+	ISACFIX_MainStruct *ISAC_inst[2];
+#endif
+#ifdef CODEC_ISAC_SWB
+	ISACStruct *ISACSWB_inst[2];
+#endif
+#ifdef CODEC_GSMFR
+	GSMFR_encinst_t *GSMFRenc_inst[2];
+#endif
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+	CNG_enc_inst *CNGenc_inst[2];
+#endif
+#ifdef CODEC_SPEEX_8
+	SPEEX_encinst_t *SPEEX8enc_inst[2];
+#endif
+#ifdef CODEC_SPEEX_16
+	SPEEX_encinst_t *SPEEX16enc_inst[2];
+#endif
+#ifdef CODEC_CELT_32
+  CELT_encinst_t *CELT32enc_inst[2];
+#endif
+#ifdef CODEC_G711
+    void *G711state[2]={NULL, NULL};
+#endif
+
+
+int main(int argc, char* argv[])
+{
+	int packet_size, fs;
+	enum WebRtcNetEQDecoder usedCodec;
+	int payloadType;
+	int bitrate = 0;
+	int useVAD, vad;
+    int useRed=0;
+	int len, enc_len;
+	WebRtc_Word16 org_data[4000];
+	unsigned char rtp_data[8000];
+	WebRtc_Word16 seqNo=0xFFF;
+	WebRtc_UWord32 ssrc=1235412312;
+	WebRtc_UWord32 timestamp=0xAC1245;
+        WebRtc_UWord16 length, plen;
+	WebRtc_UWord32 offset;
+	double sendtime = 0;
+    int red_PT[2] = {0};
+    WebRtc_UWord32 red_TS[2] = {0};
+    WebRtc_UWord16 red_len[2] = {0};
+    int RTPheaderLen=12;
+	unsigned char red_data[8000];
+#ifdef INSERT_OLD_PACKETS
+	WebRtc_UWord16 old_length, old_plen;
+	int old_enc_len;
+	int first_old_packet=1;
+	unsigned char old_rtp_data[8000];
+	int packet_age=0;
+#endif
+#ifdef INSERT_DTMF_PACKETS
+	int NTone = 1;
+	int DTMFfirst = 1;
+	WebRtc_UWord32 DTMFtimestamp;
+    bool dtmfSent = false;
+#endif
+    bool usingStereo = false;
+    int stereoMode = 0;
+    int numChannels = 1;
+
+	/* check number of parameters */
+	if ((argc != 6) && (argc != 7)) {
+		/* print help text and exit */
+		printf("Application to encode speech into an RTP stream.\n");
+		printf("The program reads a PCM file and encodes is using the specified codec.\n");
+		printf("The coded speech is packetized in RTP packest and written to the output file.\n");
+		printf("The format of the RTP stream file is simlilar to that of rtpplay,\n");
+		printf("but with the receive time euqal to 0 for all packets.\n");
+		printf("Usage:\n\n");
+		printf("%s PCMfile RTPfile frameLen codec useVAD bitrate\n", argv[0]);
+		printf("where:\n");
+
+		printf("PCMfile      : PCM speech input file\n\n");
+
+		printf("RTPfile      : RTP stream output file\n\n");
+
+		printf("frameLen     : 80...960...  Number of samples per packet (limit depends on codec)\n\n");
+
+		printf("codecName\n");
+#ifdef CODEC_PCM16B
+		printf("             : pcm16b       16 bit PCM (8kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_WB
+		printf("             : pcm16b_wb   16 bit PCM (16kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+		printf("             : pcm16b_swb32 16 bit PCM (32kHz)\n");
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+		printf("             : pcm16b_swb48 16 bit PCM (48kHz)\n");
+#endif
+#ifdef CODEC_G711
+		printf("             : pcma         g711 A-law (8kHz)\n");
+#endif
+#ifdef CODEC_G711
+		printf("             : pcmu         g711 u-law (8kHz)\n");
+#endif
+#ifdef CODEC_G729
+		printf("             : g729         G729 (8kHz and 8kbps) CELP (One-Three frame(s)/packet)\n");
+#endif
+#ifdef CODEC_G729_1
+		printf("             : g729.1       G729.1 (16kHz) variable rate (8--32 kbps)\n");
+#endif
+#ifdef CODEC_G722_1_16
+		printf("             : g722.1_16    G722.1 coder (16kHz) (g722.1 with 16kbps)\n");
+#endif
+#ifdef CODEC_G722_1_24
+		printf("             : g722.1_24    G722.1 coder (16kHz) (the 24kbps version)\n");
+#endif
+#ifdef CODEC_G722_1_32
+		printf("             : g722.1_32    G722.1 coder (16kHz) (the 32kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_24
+		printf("             : g722.1C_24    G722.1 C coder (32kHz) (the 24kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_32
+		printf("             : g722.1C_32    G722.1 C coder (32kHz) (the 32kbps version)\n");
+#endif
+#ifdef CODEC_G722_1C_48
+		printf("             : g722.1C_48    G722.1 C coder (32kHz) (the 48kbps)\n");
+#endif
+
+#ifdef CODEC_G726
+        printf("             : g726_16      G726 coder (8kHz) 16kbps\n");
+        printf("             : g726_24      G726 coder (8kHz) 24kbps\n");
+        printf("             : g726_32      G726 coder (8kHz) 32kbps\n");
+        printf("             : g726_40      G726 coder (8kHz) 40kbps\n");
+#endif
+#ifdef CODEC_AMR
+		printf("             : AMRXk        Adaptive Multi Rate CELP codec (8kHz)\n");
+		printf("                            X = 4.75, 5.15, 5.9, 6.7, 7.4, 7.95, 10.2 or 12.2\n");
+#endif
+#ifdef CODEC_AMRWB
+		printf("             : AMRwbXk      Adaptive Multi Rate Wideband CELP codec (16kHz)\n");
+		printf("                            X = 7, 9, 12, 14, 16, 18, 20, 23 or 24\n");
+#endif
+#ifdef CODEC_ILBC
+		printf("             : ilbc         iLBC codec (8kHz and 13.8kbps)\n");
+#endif
+#ifdef CODEC_ISAC
+		printf("             : isac         iSAC (16kHz and 32.0 kbps). To set rate specify a rate parameter as last parameter\n");
+#endif
+#ifdef CODEC_ISAC_SWB
+		printf("             : isacswb       iSAC SWB (32kHz and 32.0-52.0 kbps). To set rate specify a rate parameter as last parameter\n");
+#endif
+#ifdef CODEC_GSMFR
+		printf("             : gsmfr        GSM FR codec (8kHz and 13kbps)\n");
+#endif
+#ifdef CODEC_G722
+		printf("             : g722         g722 coder (16kHz) (the 64kbps version)\n");
+#endif
+#ifdef CODEC_SPEEX_8
+		printf("             : speex8       speex coder (8 kHz)\n");
+#endif
+#ifdef CODEC_SPEEX_16
+		printf("             : speex16      speex coder (16 kHz)\n");
+#endif
+#ifdef CODEC_CELT_32
+    printf("             : celt32       celt coder (32 kHz)\n");
+#endif
+#ifdef CODEC_RED
+#ifdef CODEC_G711
+		printf("             : red_pcm      Redundancy RTP packet with 2*G711A frames\n");
+#endif
+#ifdef CODEC_ISAC
+		printf("             : red_isac     Redundancy RTP packet with 2*iSAC frames\n");
+#endif
+#endif
+        printf("\n");
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+		printf("useVAD       : 0 Voice Activity Detection is switched off\n");
+		printf("             : 1 Voice Activity Detection is switched on\n\n");
+#else
+		printf("useVAD       : 0 Voice Activity Detection switched off (on not supported)\n\n");
+#endif
+		printf("bitrate      : Codec bitrate in bps (only applies to vbr codecs)\n\n");
+
+		return(0);
+	}
+
+	FILE* in_file=fopen(argv[1],"rb");
+	CHECK_NOT_NULL(in_file);
+	printf("Input file: %s\n",argv[1]);
+	FILE* out_file=fopen(argv[2],"wb");
+	CHECK_NOT_NULL(out_file);
+	printf("Output file: %s\n\n",argv[2]);
+	packet_size=atoi(argv[3]);
+	CHECK_NOT_NULL(packet_size);
+	printf("Packet size: %i\n",packet_size);
+
+    // check for stereo
+    if(argv[4][strlen(argv[4])-1] == '*') {
+        // use stereo
+        usingStereo = true;
+        numChannels = 2;
+        argv[4][strlen(argv[4])-1] = '\0';
+    }
+
+	NetEQTest_GetCodec_and_PT(argv[4], &usedCodec, &payloadType, packet_size, &fs, &bitrate, &useRed);
+
+    if(useRed) {
+        RTPheaderLen = 12 + 4 + 1; /* standard RTP = 12; 4 bytes per redundant payload, except last one which is 1 byte */
+    }
+
+	useVAD=atoi(argv[5]);
+#if !(defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+	if (useVAD!=0) {
+		printf("Error: this simulation does not support VAD/DTX/CNG\n");
+	}
+#endif
+	
+    // check stereo type
+    if(usingStereo)
+    {
+        switch(usedCodec) 
+        {
+            // sample based codecs 
+        case kDecoderPCMu:
+        case kDecoderPCMa:
+        case kDecoderG722:
+            {
+                // 1 octet per sample
+                stereoMode = STEREO_MODE_SAMPLE_1;
+                break;
+            }
+        case kDecoderPCM16B:
+        case kDecoderPCM16Bwb:
+        case kDecoderPCM16Bswb32kHz:
+        case kDecoderPCM16Bswb48kHz:
+            {
+                // 2 octets per sample
+                stereoMode = STEREO_MODE_SAMPLE_2;
+                break;
+            }
+
+            // fixed-rate frame codecs (with internal VAD)
+        case kDecoderG729:
+            {
+                if(useVAD) {
+                    printf("Cannot use codec-internal VAD and stereo\n");
+                    exit(0);
+                }
+                // break intentionally omitted
+            }
+        case kDecoderG722_1_16:
+        case kDecoderG722_1_24:
+        case kDecoderG722_1_32:
+        case kDecoderG722_1C_24:
+        case kDecoderG722_1C_32:
+        case kDecoderG722_1C_48:
+            {
+                stereoMode = STEREO_MODE_FRAME;
+                break;
+            }
+        default:
+            {
+                printf("Cannot use codec %s as stereo codec\n", argv[4]);
+                exit(0);
+            }
+        }
+    }
+
+	if ((usedCodec == kDecoderISAC) || (usedCodec == kDecoderISACswb))
+    {
+        if (argc != 7)
+        {
+            if (usedCodec == kDecoderISAC)
+            {
+                bitrate = 32000;
+                printf(
+                    "Running iSAC at default bitrate of 32000 bps (to specify explicitly add the bps as last parameter)\n");
+            }
+            else // (usedCodec==kDecoderISACswb)
+            {
+                bitrate = 56000;
+                printf(
+                    "Running iSAC at default bitrate of 56000 bps (to specify explicitly add the bps as last parameter)\n");
+            }
+        }
+        else
+        {
+            bitrate = atoi(argv[6]);
+            if (usedCodec == kDecoderISAC)
+            {
+                if ((bitrate < 10000) || (bitrate > 32000))
+                {
+                    printf(
+                        "Error: iSAC bitrate must be between 10000 and 32000 bps (%i is invalid)\n",
+                        bitrate);
+                    exit(0);
+                }
+                printf("Running iSAC at bitrate of %i bps\n", bitrate);
+            }
+            else // (usedCodec==kDecoderISACswb)
+            {
+                if ((bitrate < 32000) || (bitrate > 56000))
+                {
+                    printf(
+                        "Error: iSAC SWB bitrate must be between 32000 and 56000 bps (%i is invalid)\n",
+                        bitrate);
+                    exit(0);
+                }
+            }
+        }
+    }
+    else
+    {
+        if (argc == 7)
+        {
+            printf(
+                "Error: Bitrate parameter can only be specified for iSAC, G.723, and G.729.1\n");
+            exit(0);
+        }
+    }
+	
+    if(useRed) {
+        printf("Redundancy engaged. ");
+    }
+	printf("Used codec: %i\n",usedCodec);
+	printf("Payload type: %i\n",payloadType);
+	
+	NetEQTest_init_coders(usedCodec, packet_size, bitrate, fs, useVAD, numChannels);
+
+	/* write file header */
+	//fprintf(out_file, "#!RTPencode%s\n", "1.0");
+	fprintf(out_file, "#!rtpplay%s \n", "1.0"); // this is the string that rtpplay needs
+	WebRtc_UWord32 dummy_variable = 0; // should be converted to network endian format, but does not matter when 0
+        if (fwrite(&dummy_variable, 4, 1, out_file) != 1) {
+          return -1;
+        }
+        if (fwrite(&dummy_variable, 4, 1, out_file) != 1) {
+          return -1;
+        }
+        if (fwrite(&dummy_variable, 4, 1, out_file) != 1) {
+          return -1;
+        }
+        if (fwrite(&dummy_variable, 2, 1, out_file) != 1) {
+          return -1;
+        }
+        if (fwrite(&dummy_variable, 2, 1, out_file) != 1) {
+          return -1;
+        }
+
+#ifdef TIMESTAMP_WRAPAROUND
+	timestamp = 0xFFFFFFFF - fs*10; /* should give wrap-around in 10 seconds */
+#endif
+#if defined(RANDOM_DATA) | defined(RANDOM_PAYLOAD_DATA)
+	srand(RANDOM_SEED);
+#endif
+
+    /* if redundancy is used, the first redundant payload is zero length */
+    red_len[0] = 0;
+
+	/* read first frame */
+	len=fread(org_data,2,packet_size * numChannels,in_file) / numChannels;
+
+    /* de-interleave if stereo */
+    if ( usingStereo )
+    {
+        stereoDeInterleave(org_data, len * numChannels);
+    }
+
+	while (len==packet_size) {
+
+#ifdef INSERT_DTMF_PACKETS
+        dtmfSent = false;
+
+        if ( sendtime >= NTone * DTMF_PACKET_INTERVAL ) {
+            if ( sendtime < NTone * DTMF_PACKET_INTERVAL + DTMF_DURATION ) {
+                // tone has not ended
+                if (DTMFfirst==1) {
+                    DTMFtimestamp = timestamp; // save this timestamp
+                    DTMFfirst=0;
+                }
+                makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, seqNo,DTMFtimestamp, ssrc);
+                enc_len = makeDTMFpayload(&rtp_data[12], NTone % 12, 0, 4, (int) (sendtime - NTone * DTMF_PACKET_INTERVAL)*(fs/1000) + len);
+            }
+            else {
+                // tone has ended
+                makeRTPheader(rtp_data, NETEQ_CODEC_AVT_PT, seqNo,DTMFtimestamp, ssrc);
+                enc_len = makeDTMFpayload(&rtp_data[12], NTone % 12, 1, 4, DTMF_DURATION*(fs/1000));
+                NTone++;
+                DTMFfirst=1;
+            }
+
+            /* write RTP packet to file */
+            length = htons(12 + enc_len + 8);
+            plen = htons(12 + enc_len);
+            offset = (WebRtc_UWord32) sendtime; //(timestamp/(fs/1000));
+            offset = htonl(offset);
+            if (fwrite(&length, 2, 1, out_file) != 1) {
+              return -1;
+            }
+            if (fwrite(&plen, 2, 1, out_file) != 1) {
+              return -1;
+            }
+            if (fwrite(&offset, 4, 1, out_file) != 1) {
+              return -1;
+            }
+            if (fwrite(rtp_data, 12 + enc_len, 1, out_file) != 1) {
+              return -1;
+            }
+
+            dtmfSent = true;
+        }
+#endif
+
+#ifdef NO_DTMF_OVERDUB
+        /* If DTMF is sent, we should not send any speech packets during the same time */
+        if (dtmfSent) {
+            enc_len = 0;
+        }
+        else {
+#endif
+		/* encode frame */
+		enc_len=NetEQTest_encode(usedCodec, org_data, packet_size, &rtp_data[12] ,fs,&vad, useVAD, bitrate, numChannels);
+		if (enc_len==-1) {
+			printf("Error encoding frame\n");
+			exit(0);
+		}
+
+        if ( usingStereo &&
+            stereoMode != STEREO_MODE_FRAME &&
+            vad == 1 )
+        {
+            // interleave the encoded payload for sample-based codecs (not for CNG)
+            stereoInterleave(&rtp_data[12], enc_len, stereoMode);
+        }
+#ifdef NO_DTMF_OVERDUB
+        }
+#endif
+		
+		if (enc_len > 0 && (sendtime <= STOPSENDTIME || sendtime > RESTARTSENDTIME)) {
+            if(useRed) {
+                if(red_len[0] > 0) {
+                    memmove(&rtp_data[RTPheaderLen+red_len[0]], &rtp_data[12], enc_len);
+                    memcpy(&rtp_data[RTPheaderLen], red_data, red_len[0]);
+
+                    red_len[1] = enc_len;
+                    red_TS[1] = timestamp;
+                    if(vad)
+                        red_PT[1] = payloadType;
+                    else
+                        red_PT[1] = NETEQ_CODEC_CN_PT;
+
+                    makeRedundantHeader(rtp_data, red_PT, 2, red_TS, red_len, seqNo++, ssrc);
+
+
+                    enc_len += red_len[0] + RTPheaderLen - 12;
+                }
+                else { // do not use redundancy payload for this packet, i.e., only last payload
+                    memmove(&rtp_data[RTPheaderLen-4], &rtp_data[12], enc_len);
+                    //memcpy(&rtp_data[RTPheaderLen], red_data, red_len[0]);
+
+                    red_len[1] = enc_len;
+                    red_TS[1] = timestamp;
+                    if(vad)
+                        red_PT[1] = payloadType;
+                    else
+                        red_PT[1] = NETEQ_CODEC_CN_PT;
+
+                    makeRedundantHeader(rtp_data, red_PT, 2, red_TS, red_len, seqNo++, ssrc);
+
+
+                    enc_len += red_len[0] + RTPheaderLen - 4 - 12; // 4 is length of redundancy header (not used)
+                }
+            }
+            else {
+                
+                /* make RTP header */
+                if (vad) // regular speech data
+                    makeRTPheader(rtp_data, payloadType, seqNo++,timestamp, ssrc);
+                else // CNG data
+                    makeRTPheader(rtp_data, NETEQ_CODEC_CN_PT, seqNo++,timestamp, ssrc);
+                
+            }
+#ifdef MULTIPLE_SAME_TIMESTAMP
+			int mult_pack=0;
+			do {
+#endif //MULTIPLE_SAME_TIMESTAMP
+			/* write RTP packet to file */
+                          length = htons(12 + enc_len + 8);
+                          plen = htons(12 + enc_len);
+                          offset = (WebRtc_UWord32) sendtime;
+                          //(timestamp/(fs/1000));
+                          offset = htonl(offset);
+                          if (fwrite(&length, 2, 1, out_file) != 1) {
+                            return -1;
+                          }
+                          if (fwrite(&plen, 2, 1, out_file) != 1) {
+                            return -1;
+                          }
+                          if (fwrite(&offset, 4, 1, out_file) != 1) {
+                            return -1;
+                          }
+#ifdef RANDOM_DATA
+			for (int k=0; k<12+enc_len; k++) {
+				rtp_data[k] = rand() + rand();
+			}
+#endif
+#ifdef RANDOM_PAYLOAD_DATA
+			for (int k=12; k<12+enc_len; k++) {
+				rtp_data[k] = rand() + rand();
+			}
+#endif
+                        if (fwrite(rtp_data, 12 + enc_len, 1, out_file) != 1) {
+                          return -1;
+                        }
+#ifdef MULTIPLE_SAME_TIMESTAMP
+			} while ( (seqNo%REPEAT_PACKET_DISTANCE == 0) && (mult_pack++ < REPEAT_PACKET_COUNT) );
+#endif //MULTIPLE_SAME_TIMESTAMP
+
+#ifdef INSERT_OLD_PACKETS
+			if (packet_age >= OLD_PACKET*fs) {
+				if (!first_old_packet) {
+                                  // send the old packet
+                                  if (fwrite(&old_length, 2, 1,
+                                             out_file) != 1) {
+                                    return -1;
+                                  }
+                                  if (fwrite(&old_plen, 2, 1,
+                                             out_file) != 1) {
+                                    return -1;
+                                  }
+                                  if (fwrite(&offset, 4, 1,
+                                             out_file) != 1) {
+                                    return -1;
+                                  }
+                                  if (fwrite(old_rtp_data, 12 + old_enc_len,
+                                             1, out_file) != 1) {
+                                    return -1;
+                                  }
+				}
+				// store current packet as old
+				old_length=length;
+				old_plen=plen;
+				memcpy(old_rtp_data,rtp_data,12+enc_len);
+				old_enc_len=enc_len;
+				first_old_packet=0;
+				packet_age=0;
+
+			}
+			packet_age += packet_size;
+#endif
+			
+            if(useRed) {
+                /* move data to redundancy store */
+#ifdef CODEC_ISAC
+                if(usedCodec==kDecoderISAC)
+                {
+                    assert(!usingStereo); // Cannot handle stereo yet
+                    red_len[0] = WebRtcIsac_GetRedPayload(ISAC_inst[0], (WebRtc_Word16*)red_data);
+                }
+                else
+                {
+#endif
+                    memcpy(red_data, &rtp_data[RTPheaderLen+red_len[0]], enc_len);
+                    red_len[0]=red_len[1];
+#ifdef CODEC_ISAC
+                }
+#endif
+                red_TS[0]=red_TS[1];
+                red_PT[0]=red_PT[1];
+            }
+            
+		}
+
+		/* read next frame */
+        len=fread(org_data,2,packet_size * numChannels,in_file) / numChannels;
+        /* de-interleave if stereo */
+        if ( usingStereo )
+        {
+            stereoDeInterleave(org_data, len * numChannels);
+        }
+
+        if (payloadType==NETEQ_CODEC_G722_PT)
+            timestamp+=len>>1;
+        else
+            timestamp+=len;
+
+		sendtime += (double) len/(fs/1000);
+	}
+	
+	NetEQTest_free_coders(usedCodec, numChannels);
+	fclose(in_file);
+	fclose(out_file);
+    printf("Done!\n");
+
+	return(0);
+}
+
+
+
+
+/****************/
+/* Subfunctions */
+/****************/
+
+void NetEQTest_GetCodec_and_PT(char * name, enum WebRtcNetEQDecoder *codec, int *PT, int frameLen, int *fs, int *bitrate, int *useRed) {
+
+	*bitrate = 0; /* Default bitrate setting */
+    *useRed = 0; /* Default no redundancy */
+
+	if(!strcmp(name,"pcmu")){
+		*codec=kDecoderPCMu;
+		*PT=NETEQ_CODEC_PCMU_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcma")){
+		*codec=kDecoderPCMa;
+		*PT=NETEQ_CODEC_PCMA_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcm16b")){
+		*codec=kDecoderPCM16B;
+		*PT=NETEQ_CODEC_PCM16B_PT;
+		*fs=8000;
+	}
+	else if(!strcmp(name,"pcm16b_wb")){
+		*codec=kDecoderPCM16Bwb;
+		*PT=NETEQ_CODEC_PCM16B_WB_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"pcm16b_swb32")){
+		*codec=kDecoderPCM16Bswb32kHz;
+		*PT=NETEQ_CODEC_PCM16B_SWB32KHZ_PT;
+		*fs=32000;
+	}
+	else if(!strcmp(name,"pcm16b_swb48")){
+		*codec=kDecoderPCM16Bswb48kHz;
+		*PT=NETEQ_CODEC_PCM16B_SWB48KHZ_PT;
+		*fs=48000;
+	}
+	else if(!strcmp(name,"g722")){
+		*codec=kDecoderG722;
+		*PT=NETEQ_CODEC_G722_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_16")){
+		*codec=kDecoderG722_1_16;
+		*PT=NETEQ_CODEC_G722_1_16_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_24")){
+		*codec=kDecoderG722_1_24;
+		*PT=NETEQ_CODEC_G722_1_24_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1_32")){
+		*codec=kDecoderG722_1_32;
+		*PT=NETEQ_CODEC_G722_1_32_PT;
+		*fs=16000;
+	}
+	else if(!strcmp(name,"g722.1C_24")){
+		*codec=kDecoderG722_1C_24;
+		*PT=NETEQ_CODEC_G722_1C_24_PT;
+		*fs=32000;
+	}
+	else if(!strcmp(name,"g722.1C_32")){
+		*codec=kDecoderG722_1C_32;
+		*PT=NETEQ_CODEC_G722_1C_32_PT;
+		*fs=32000;
+	}
+    else if(!strcmp(name,"g722.1C_48")){
+		*codec=kDecoderG722_1C_48;
+		*PT=NETEQ_CODEC_G722_1C_48_PT;
+		*fs=32000;
+	}
+    else if(!strcmp(name,"g726_16")){
+        *fs=8000;
+        *codec=kDecoderG726_16;
+        *PT=NETEQ_CODEC_G726_16_PT;
+        *bitrate=16;
+    }
+    else if(!strcmp(name,"g726_24")){
+        *fs=8000;
+        *codec=kDecoderG726_24;
+        *PT=NETEQ_CODEC_G726_24_PT;
+        *bitrate=24;
+    }
+    else if(!strcmp(name,"g726_32")){
+        *fs=8000;
+        *codec=kDecoderG726_32;
+        *PT=NETEQ_CODEC_G726_32_PT;
+        *bitrate=32;
+    }
+    else if(!strcmp(name,"g726_40")){
+        *fs=8000;
+        *codec=kDecoderG726_40;
+        *PT=NETEQ_CODEC_G726_40_PT;
+        *bitrate=40;
+    }
+	else if((!strcmp(name,"amr4.75k"))||(!strcmp(name,"amr5.15k"))||(!strcmp(name,"amr5.9k"))||
+			(!strcmp(name,"amr6.7k"))||(!strcmp(name,"amr7.4k"))||(!strcmp(name,"amr7.95k"))||
+			(!strcmp(name,"amr10.2k"))||(!strcmp(name,"amr12.2k"))) {
+		*fs=8000;
+		if (!strcmp(name,"amr4.75k"))
+			*bitrate = 0;
+		if (!strcmp(name,"amr5.15k"))
+			*bitrate = 1;
+		if (!strcmp(name,"amr5.9k"))
+			*bitrate = 2;
+		if (!strcmp(name,"amr6.7k"))
+			*bitrate = 3;
+		if (!strcmp(name,"amr7.4k"))
+			*bitrate = 4;
+		if (!strcmp(name,"amr7.95k"))
+			*bitrate = 5;
+		if (!strcmp(name,"amr10.2k"))
+			*bitrate = 6;
+		if (!strcmp(name,"amr12.2k"))
+			*bitrate = 7;
+		*codec=kDecoderAMR;
+		*PT=NETEQ_CODEC_AMR_PT;
+	}
+	else if((!strcmp(name,"amrwb7k"))||(!strcmp(name,"amrwb9k"))||(!strcmp(name,"amrwb12k"))||
+			(!strcmp(name,"amrwb14k"))||(!strcmp(name,"amrwb16k"))||(!strcmp(name,"amrwb18k"))||
+			(!strcmp(name,"amrwb20k"))||(!strcmp(name,"amrwb23k"))||(!strcmp(name,"amrwb24k"))) {
+		*fs=16000;
+		if (!strcmp(name,"amrwb7k"))
+			*bitrate = 7000;
+		if (!strcmp(name,"amrwb9k"))
+			*bitrate = 9000;
+		if (!strcmp(name,"amrwb12k"))
+			*bitrate = 12000;
+		if (!strcmp(name,"amrwb14k"))
+			*bitrate = 14000;
+		if (!strcmp(name,"amrwb16k"))
+			*bitrate = 16000;
+		if (!strcmp(name,"amrwb18k"))
+			*bitrate = 18000;
+		if (!strcmp(name,"amrwb20k"))
+			*bitrate = 20000;
+		if (!strcmp(name,"amrwb23k"))
+			*bitrate = 23000;
+		if (!strcmp(name,"amrwb24k"))
+			*bitrate = 24000;
+		*codec=kDecoderAMRWB;
+		*PT=NETEQ_CODEC_AMRWB_PT;
+	}
+	else if((!strcmp(name,"ilbc"))&&((frameLen%240==0)||(frameLen%160==0))){
+		*fs=8000;
+		*codec=kDecoderILBC;
+		*PT=NETEQ_CODEC_ILBC_PT;
+	}
+	else if(!strcmp(name,"isac")){
+		*fs=16000;
+		*codec=kDecoderISAC;
+		*PT=NETEQ_CODEC_ISAC_PT;
+	}
+    else if(!strcmp(name,"isacswb")){
+		*fs=32000;
+		*codec=kDecoderISACswb;
+		*PT=NETEQ_CODEC_ISACSWB_PT;
+	}
+	else if(!strcmp(name,"g729")){
+		*fs=8000;
+		*codec=kDecoderG729;
+		*PT=NETEQ_CODEC_G729_PT;
+	}
+	else if(!strcmp(name,"g729.1")){
+		*fs=16000;
+		*codec=kDecoderG729_1;
+		*PT=NETEQ_CODEC_G729_1_PT;
+	}
+	else if(!strcmp(name,"gsmfr")){
+		*fs=8000;
+		*codec=kDecoderGSMFR;
+		*PT=NETEQ_CODEC_GSMFR_PT;
+	}
+	else if(!strcmp(name,"speex8")){
+		*fs=8000;
+		*codec=kDecoderSPEEX_8;
+		*PT=NETEQ_CODEC_SPEEX8_PT;
+	}
+	else if(!strcmp(name,"speex16")){
+		*fs=16000;
+		*codec=kDecoderSPEEX_16;
+		*PT=NETEQ_CODEC_SPEEX16_PT;
+	}
+  else if(!strcmp(name,"celt32")){
+    *fs=32000;
+    *codec=kDecoderCELT_32;
+    *PT=NETEQ_CODEC_CELT32_PT;
+  }
+    else if(!strcmp(name,"red_pcm")){
+		*codec=kDecoderPCMa;
+		*PT=NETEQ_CODEC_PCMA_PT; /* this will be the PT for the sub-headers */
+		*fs=8000;
+        *useRed = 1;
+	} else if(!strcmp(name,"red_isac")){
+		*codec=kDecoderISAC;
+		*PT=NETEQ_CODEC_ISAC_PT; /* this will be the PT for the sub-headers */
+		*fs=16000;
+        *useRed = 1;
+    } else {
+		printf("Error: Not a supported codec (%s)\n", name);
+		exit(0);
+	}
+
+}
+
+
+
+
+int NetEQTest_init_coders(enum WebRtcNetEQDecoder coder, int enc_frameSize, int bitrate, int sampfreq , int vad, int numChannels){
+	
+	int ok=0;
+	
+    for (int k = 0; k < numChannels; k++) 
+    {
+        ok=WebRtcVad_Create(&VAD_inst[k]);
+        if (ok!=0) {
+            printf("Error: Couldn't allocate memory for VAD instance\n");
+            exit(0);
+        }
+        ok=WebRtcVad_Init(VAD_inst[k]);
+        if (ok==-1) {
+            printf("Error: Initialization of VAD struct failed\n");	
+            exit(0); 
+        }
+
+
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+        ok=WebRtcCng_CreateEnc(&CNGenc_inst[k]);
+        if (ok!=0) {
+            printf("Error: Couldn't allocate memory for CNG encoding instance\n");
+            exit(0);
+        }
+        if(sampfreq <= 16000) {
+            ok=WebRtcCng_InitEnc(CNGenc_inst[k],sampfreq, 200, 5);
+            if (ok==-1) {
+                printf("Error: Initialization of CNG struct failed. Error code %d\n", 
+                    WebRtcCng_GetErrorCodeEnc(CNGenc_inst[k]));	
+                exit(0); 
+            }
+        }
+#endif
+
+        switch (coder) {
+    case kDecoderReservedStart : // dummy codec
+#ifdef CODEC_PCM16B
+    case kDecoderPCM16B :
+#endif
+#ifdef CODEC_PCM16B_WB
+    case kDecoderPCM16Bwb :
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+    case kDecoderPCM16Bswb32kHz :
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+    case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef CODEC_G711
+    case kDecoderPCMu :
+    case kDecoderPCMa :
+#endif
+        // do nothing
+        break;
+#ifdef CODEC_G729
+    case kDecoderG729:
+        if (sampfreq==8000) {
+            if ((enc_frameSize==80)||(enc_frameSize==160)||(enc_frameSize==240)||(enc_frameSize==320)||(enc_frameSize==400)||(enc_frameSize==480)) {
+                ok=WebRtcG729_CreateEnc(&G729enc_inst[k]);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for G729 encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: g729 only supports 10, 20, 30, 40, 50 or 60 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG729_EncoderInit(G729enc_inst[k], vad);
+            if ((vad==1)&&(enc_frameSize!=80)) {
+                printf("\nError - This simulation only supports VAD for G729 at 10ms packets (not %dms)\n", (enc_frameSize>>3));
+            }
+        } else {
+            printf("\nError - g729 is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G729_1
+    case kDecoderG729_1:
+        if (sampfreq==16000) {
+            if ((enc_frameSize==320)||(enc_frameSize==640)||(enc_frameSize==960)
+                ) {
+                    ok=WebRtcG7291_Create(&G729_1_inst[k]);
+                    if (ok!=0) {
+                        printf("Error: Couldn't allocate memory for G.729.1 codec instance\n");
+                        exit(0);
+                    }
+                } else {
+                    printf("\nError: G.729.1 only supports 20, 40 or 60 ms!!\n\n");
+                    exit(0);
+                }
+                if (!(((bitrate >= 12000) && (bitrate <= 32000) && (bitrate%2000 == 0)) || (bitrate == 8000))) {
+                    /* must be 8, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, or 32 kbps */
+                    printf("\nError: G.729.1 bitrate must be 8000 or 12000--32000 in steps of 2000 bps\n");
+                    exit(0);
+                }
+                WebRtcG7291_EncoderInit(G729_1_inst[k], bitrate, 0 /* flag8kHz*/, 0 /*flagG729mode*/);
+        } else {
+            printf("\nError - G.729.1 input is always 16 kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_SPEEX_8
+    case kDecoderSPEEX_8 :
+        if (sampfreq==8000) {
+            if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {
+                ok=WebRtcSpeex_CreateEnc(&SPEEX8enc_inst[k], sampfreq);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Speex encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Speex only supports 20, 40, and 60 ms!!\n\n");
+                exit(0);
+            }
+            if ((vad==1)&&(enc_frameSize!=160)) {
+                printf("\nError - This simulation only supports VAD for Speex at 20ms packets (not %dms)\n", (enc_frameSize>>3));
+                vad=0;
+            }
+            ok=WebRtcSpeex_EncoderInit(SPEEX8enc_inst[k], 0/*vbr*/, 3 /*complexity*/, vad);
+            if (ok!=0) exit(0);
+        } else {
+            printf("\nError - Speex8 called with sample frequency other than 8 kHz.\n\n");
+        }
+        break;
+#endif
+#ifdef CODEC_SPEEX_16
+    case kDecoderSPEEX_16 :
+        if (sampfreq==16000) {
+            if ((enc_frameSize==320)||(enc_frameSize==640)||(enc_frameSize==960)) {
+                ok=WebRtcSpeex_CreateEnc(&SPEEX16enc_inst[k], sampfreq);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Speex encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Speex only supports 20, 40, and 60 ms!!\n\n");
+                exit(0);
+            }
+            if ((vad==1)&&(enc_frameSize!=320)) {
+                printf("\nError - This simulation only supports VAD for Speex at 20ms packets (not %dms)\n", (enc_frameSize>>4));
+                vad=0;
+            }
+            ok=WebRtcSpeex_EncoderInit(SPEEX16enc_inst[k], 0/*vbr*/, 3 /*complexity*/, vad);
+            if (ok!=0) exit(0);
+        } else {
+            printf("\nError - Speex16 called with sample frequency other than 16 kHz.\n\n");
+        }
+        break;
+#endif
+#ifdef CODEC_CELT_32
+    case kDecoderCELT_32 :
+        if (sampfreq==32000) {
+            if (enc_frameSize==320) {
+                ok=WebRtcCelt_CreateEnc(&CELT32enc_inst[k], 1 /*mono*/);
+                if (ok!=0) {
+                    printf("Error: Couldn't allocate memory for Celt encoding instance\n");
+                    exit(0);
+                }
+            } else {
+                printf("\nError: Celt only supports 10 ms!!\n\n");
+                exit(0);
+            }
+            ok=WebRtcCelt_EncoderInit(CELT32enc_inst[k],  1 /*mono*/, 48000 /*bitrate*/);
+            if (ok!=0) exit(0);
+        } else {
+          printf("\nError - Celt32 called with sample frequency other than 32 kHz.\n\n");
+        }
+        break;
+#endif
+
+#ifdef CODEC_G722_1_16
+    case kDecoderG722_1_16 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc16(&G722_1_16enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {				
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit16((G722_1_16_encinst_t*)G722_1_16enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1_24
+    case kDecoderG722_1_24 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc24(&G722_1_24enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit24((G722_1_24_encinst_t*)G722_1_24enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1_32
+    case kDecoderG722_1_32 :
+        if (sampfreq==16000) {
+            ok=WebRtcG7221_CreateEnc32(&G722_1_32enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1 instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==320) {
+            } else {
+                printf("\nError: G722.1 only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221_EncoderInit32((G722_1_32_encinst_t*)G722_1_32enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_24
+    case kDecoderG722_1C_24 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc24(&G722_1C_24enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_32
+    case kDecoderG722_1C_32 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc32(&G722_1C_32enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722_1C_48
+    case kDecoderG722_1C_48 :
+        if (sampfreq==32000) {
+            ok=WebRtcG7221C_CreateEnc48(&G722_1C_48enc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for G.722.1C instance\n");
+                exit(0);
+            }
+            if (enc_frameSize==640) {
+            } else {
+                printf("\nError: G722.1 C only supports 20 ms!!\n\n");
+                exit(0);
+            }
+            WebRtcG7221C_EncoderInit48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k]);
+        } else {
+            printf("\nError - G722.1 C is only developed for 32kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_G722
+    case kDecoderG722 :
+        if (sampfreq==16000) {
+            if (enc_frameSize%2==0) {				
+            } else {
+                printf("\nError - g722 frames must have an even number of enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcG722_CreateEncoder(&g722EncState[k]);
+            WebRtcG722_EncoderInit(g722EncState[k]);
+        } else {
+            printf("\nError - g722 is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_AMR
+    case kDecoderAMR :
+        if (sampfreq==8000) {
+            ok=WebRtcAmr_CreateEnc(&AMRenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for AMR encoding instance\n");
+                exit(0);
+            }if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {				
+            } else {
+                printf("\nError - AMR must have a multiple of 160 enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcAmr_EncoderInit(AMRenc_inst[k], vad);
+            WebRtcAmr_EncodeBitmode(AMRenc_inst[k], AMRBandwidthEfficient);
+            AMR_bitrate = bitrate;
+        } else {
+            printf("\nError - AMR is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_AMRWB
+    case kDecoderAMRWB : 
+        if (sampfreq==16000) {
+            ok=WebRtcAmrWb_CreateEnc(&AMRWBenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for AMRWB encoding instance\n");
+                exit(0);
+            }
+            if (((enc_frameSize/320)<0)||((enc_frameSize/320)>3)||((enc_frameSize%320)!=0)) {
+                printf("\nError - AMRwb must have frameSize of 20, 40 or 60ms\n");
+                exit(0);
+            }
+            WebRtcAmrWb_EncoderInit(AMRWBenc_inst[k], vad);
+            if (bitrate==7000) {
+                AMRWB_bitrate = AMRWB_MODE_7k;
+            } else if (bitrate==9000) {
+                AMRWB_bitrate = AMRWB_MODE_9k;
+            } else if (bitrate==12000) {
+                AMRWB_bitrate = AMRWB_MODE_12k;
+            } else if (bitrate==14000) {
+                AMRWB_bitrate = AMRWB_MODE_14k;
+            } else if (bitrate==16000) {
+                AMRWB_bitrate = AMRWB_MODE_16k;
+            } else if (bitrate==18000) {
+                AMRWB_bitrate = AMRWB_MODE_18k;
+            } else if (bitrate==20000) {
+                AMRWB_bitrate = AMRWB_MODE_20k;
+            } else if (bitrate==23000) {
+                AMRWB_bitrate = AMRWB_MODE_23k;
+            } else if (bitrate==24000) {
+                AMRWB_bitrate = AMRWB_MODE_24k;
+            }
+            WebRtcAmrWb_EncodeBitmode(AMRWBenc_inst[k], AMRBandwidthEfficient);
+
+        } else {
+            printf("\nError - AMRwb is only developed for 16kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ILBC
+    case kDecoderILBC :
+        if (sampfreq==8000) {
+            ok=WebRtcIlbcfix_EncoderCreate(&iLBCenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iLBC encoding instance\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==240)||(enc_frameSize==320)||(enc_frameSize==480)) {				
+            } else {
+                printf("\nError - iLBC only supports 160, 240, 320 and 480 enc_frameSize (20, 30, 40 and 60 ms)\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==320)) {
+                /* 20 ms version */
+                WebRtcIlbcfix_EncoderInit(iLBCenc_inst[k], 20);
+            } else {
+                /* 30 ms version */
+                WebRtcIlbcfix_EncoderInit(iLBCenc_inst[k], 30);
+            }
+        } else {
+            printf("\nError - iLBC is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ISAC
+    case kDecoderISAC:
+        if (sampfreq==16000) {
+            ok=WebRtcIsac_Create(&ISAC_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC instance\n");
+                exit(0);
+            }if ((enc_frameSize==480)||(enc_frameSize==960)) {
+            } else {
+                printf("\nError - iSAC only supports frameSize (30 and 60 ms)\n");
+                exit(0);
+            }
+            WebRtcIsac_EncoderInit(ISAC_inst[k],1);
+            if ((bitrate<10000)||(bitrate>32000)) {
+                printf("\nError - iSAC bitrate has to be between 10000 and 32000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsac_Control(ISAC_inst[k], bitrate, enc_frameSize>>4);
+        } else {
+            printf("\nError - iSAC only supports 480 or 960 enc_frameSize (30 or 60 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+    case kDecoderISAC:
+        if (sampfreq==16000) {
+            ok=WebRtcIsacfix_Create(&ISAC_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC instance\n");
+                exit(0);
+            }if ((enc_frameSize==480)||(enc_frameSize==960)) {
+            } else {
+                printf("\nError - iSAC only supports frameSize (30 and 60 ms)\n");
+                exit(0);
+            }
+            WebRtcIsacfix_EncoderInit(ISAC_inst[k],1);
+            if ((bitrate<10000)||(bitrate>32000)) {
+                printf("\nError - iSAC bitrate has to be between 10000 and 32000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsacfix_Control(ISAC_inst[k], bitrate, enc_frameSize>>4);
+        } else {
+            printf("\nError - iSAC only supports 480 or 960 enc_frameSize (30 or 60 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_ISAC_SWB
+    case kDecoderISACswb:
+        if (sampfreq==32000) {
+            ok=WebRtcIsac_Create(&ISACSWB_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for iSAC SWB instance\n");
+                exit(0);
+            }if (enc_frameSize==960) {
+            } else {
+                printf("\nError - iSAC SWB only supports frameSize 30 ms\n");
+                exit(0);
+            }
+            ok = WebRtcIsac_SetEncSampRate(ISACSWB_inst[k], kIsacSuperWideband);
+            if (ok!=0) {
+                printf("Error: Couldn't set sample rate for iSAC SWB instance\n");
+                exit(0);
+            }
+            WebRtcIsac_EncoderInit(ISACSWB_inst[k],1);
+            if ((bitrate<32000)||(bitrate>56000)) {
+                printf("\nError - iSAC SWB bitrate has to be between 32000 and 56000 bps (not %i)\n", bitrate);
+                exit(0);
+            }
+            WebRtcIsac_Control(ISACSWB_inst[k], bitrate, enc_frameSize>>5);
+        } else {
+            printf("\nError - iSAC SWB only supports 960 enc_frameSize (30 ms)\n");
+            exit(0);
+        }
+        break;
+#endif
+#ifdef CODEC_GSMFR
+    case kDecoderGSMFR:
+        if (sampfreq==8000) {
+            ok=WebRtcGSMFR_CreateEnc(&GSMFRenc_inst[k]);
+            if (ok!=0) {
+                printf("Error: Couldn't allocate memory for GSM FR encoding instance\n");
+                exit(0);
+            }
+            if ((enc_frameSize==160)||(enc_frameSize==320)||(enc_frameSize==480)) {			
+            } else {
+                printf("\nError - GSM FR must have a multiple of 160 enc_frameSize\n");
+                exit(0);
+            }
+            WebRtcGSMFR_EncoderInit(GSMFRenc_inst[k], 0);
+        } else {
+            printf("\nError - GSM FR is only developed for 8kHz \n");
+            exit(0);
+        }
+        break;
+#endif
+    default :
+        printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
+        exit(0);
+        break;
+        }
+
+        if (ok != 0) {
+            return(ok);
+        }
+    } // end for
+
+    return(0);
+}			
+
+
+
+
+int NetEQTest_free_coders(enum WebRtcNetEQDecoder coder, int numChannels) {
+
+    for (int k = 0; k < numChannels; k++)
+    {
+        WebRtcVad_Free(VAD_inst[k]);
+#if (defined(CODEC_CNGCODEC8) || defined(CODEC_CNGCODEC16) || \
+    defined(CODEC_CNGCODEC32) || defined(CODEC_CNGCODEC48))
+        WebRtcCng_FreeEnc(CNGenc_inst[k]);
+#endif
+
+        switch (coder) 
+        {
+        case kDecoderReservedStart : // dummy codec
+#ifdef CODEC_PCM16B
+        case kDecoderPCM16B :
+#endif
+#ifdef CODEC_PCM16B_WB
+        case kDecoderPCM16Bwb :
+#endif
+#ifdef CODEC_PCM16B_32KHZ
+        case kDecoderPCM16Bswb32kHz :
+#endif
+#ifdef CODEC_PCM16B_48KHZ
+        case kDecoderPCM16Bswb48kHz :
+#endif
+#ifdef CODEC_G711
+        case kDecoderPCMu :
+        case kDecoderPCMa :
+#endif
+            // do nothing
+            break;
+#ifdef CODEC_G729
+        case kDecoderG729:
+            WebRtcG729_FreeEnc(G729enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G729_1
+        case kDecoderG729_1:
+            WebRtcG7291_Free(G729_1_inst[k]);
+            break;
+#endif
+#ifdef CODEC_SPEEX_8
+        case kDecoderSPEEX_8 :
+            WebRtcSpeex_FreeEnc(SPEEX8enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_SPEEX_16
+        case kDecoderSPEEX_16 :
+            WebRtcSpeex_FreeEnc(SPEEX16enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_CELT_32
+        case kDecoderCELT_32 :
+            WebRtcCelt_FreeEnc(CELT32enc_inst[k]);
+            break;
+#endif
+
+#ifdef CODEC_G722_1_16
+        case kDecoderG722_1_16 :
+            WebRtcG7221_FreeEnc16(G722_1_16enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1_24
+        case kDecoderG722_1_24 :
+            WebRtcG7221_FreeEnc24(G722_1_24enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1_32
+        case kDecoderG722_1_32 :
+            WebRtcG7221_FreeEnc32(G722_1_32enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_24
+        case kDecoderG722_1C_24 :
+            WebRtcG7221C_FreeEnc24(G722_1C_24enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_32
+        case kDecoderG722_1C_32 :
+            WebRtcG7221C_FreeEnc32(G722_1C_32enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722_1C_48
+        case kDecoderG722_1C_48 :
+            WebRtcG7221C_FreeEnc48(G722_1C_48enc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_G722
+        case kDecoderG722 :
+            WebRtcG722_FreeEncoder(g722EncState[k]);
+            break;
+#endif
+#ifdef CODEC_AMR
+        case kDecoderAMR :
+            WebRtcAmr_FreeEnc(AMRenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_AMRWB
+        case kDecoderAMRWB : 
+            WebRtcAmrWb_FreeEnc(AMRWBenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ILBC
+        case kDecoderILBC :
+            WebRtcIlbcfix_EncoderFree(iLBCenc_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ISAC
+        case kDecoderISAC:
+            WebRtcIsac_Free(ISAC_inst[k]);
+            break;
+#endif
+#ifdef NETEQ_ISACFIX_CODEC
+        case kDecoderISAC:
+            WebRtcIsacfix_Free(ISAC_inst[k]);
+            break;
+#endif
+#ifdef CODEC_ISAC_SWB
+        case kDecoderISACswb:
+            WebRtcIsac_Free(ISACSWB_inst[k]);
+            break;
+#endif
+#ifdef CODEC_GSMFR
+        case kDecoderGSMFR:
+            WebRtcGSMFR_FreeEnc(GSMFRenc_inst[k]);
+            break;
+#endif
+        default :
+            printf("Error: unknown codec in call to NetEQTest_init_coders.\n");
+            exit(0);
+            break;
+        }
+    }
+
+	return(0);
+}
+
+
+
+
+
+
+int NetEQTest_encode(int coder, WebRtc_Word16 *indata, int frameLen, unsigned char * encoded,int sampleRate , 
+						  int * vad, int useVAD, int bitrate, int numChannels){
+
+	short cdlen = 0;
+	WebRtc_Word16 *tempdata;
+	static int first_cng=1;
+	WebRtc_Word16 tempLen;
+
+	*vad =1;
+
+    // check VAD first
+	if(useVAD&&
+			   (coder!=kDecoderG729)&&(coder!=kDecoderAMR)&&
+			   (coder!=kDecoderSPEEX_8)&&(coder!=kDecoderSPEEX_16))
+    {
+        *vad = 0;
+
+        for (int k = 0; k < numChannels; k++)
+        {
+            tempLen = frameLen;
+            tempdata = &indata[k*frameLen];
+            int localVad=0;
+            /* Partition the signal and test each chunk for VAD.
+            All chunks must be VAD=0 to produce a total VAD=0. */
+            while (tempLen >= 10*sampleRate/1000) {
+                if ((tempLen % 30*sampleRate/1000) == 0) { // tempLen is multiple of 30ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 30*sampleRate/1000);
+                    tempdata += 30*sampleRate/1000;
+                    tempLen -= 30*sampleRate/1000;
+                }
+                else if (tempLen >= 20*sampleRate/1000) { // tempLen >= 20ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 20*sampleRate/1000);
+                    tempdata += 20*sampleRate/1000;
+                    tempLen -= 20*sampleRate/1000;
+                }
+                else { // use 10ms
+                    localVad |= WebRtcVad_Process(VAD_inst[k] ,sampleRate, tempdata, 10*sampleRate/1000);
+                    tempdata += 10*sampleRate/1000;
+                    tempLen -= 10*sampleRate/1000;
+                }
+            }
+
+            // aggregate all VAD decisions over all channels
+            *vad |= localVad;
+        }
+
+        if(!*vad){
+            // all channels are silent
+            cdlen = 0;
+            for (int k = 0; k < numChannels; k++)
+            {
+                WebRtcCng_Encode(CNGenc_inst[k],&indata[k*frameLen], (frameLen <= 640 ? frameLen : 640) /* max 640 */,
+                    encoded,&tempLen,first_cng);
+                encoded += tempLen;
+                cdlen += tempLen;
+            }
+            *vad=0;
+            first_cng=0;
+            return(cdlen);
+        }
+	}
+
+
+    // loop over all channels
+    int totalLen = 0;
+
+    for (int k = 0; k < numChannels; k++)
+    {
+        /* Encode with the selected coder type */
+        if (coder==kDecoderPCMu) { /*g711 u-law */
+#ifdef CODEC_G711
+            cdlen = WebRtcG711_EncodeU(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+#endif
+        }  
+        else if (coder==kDecoderPCMa) { /*g711 A-law */
+#ifdef CODEC_G711
+            cdlen = WebRtcG711_EncodeA(G711state[k], indata, frameLen, (WebRtc_Word16*) encoded);
+        }
+#endif
+#ifdef CODEC_PCM16B
+        else if ((coder==kDecoderPCM16B)||(coder==kDecoderPCM16Bwb)||
+            (coder==kDecoderPCM16Bswb32kHz)||(coder==kDecoderPCM16Bswb48kHz)) { /*pcm16b (8kHz, 16kHz, 32kHz or 48kHz) */
+                cdlen = WebRtcPcm16b_EncodeW16(indata, frameLen, (WebRtc_Word16*) encoded);
+            }
+#endif
+#ifdef CODEC_G722
+        else if (coder==kDecoderG722) { /*g722 */
+            cdlen=WebRtcG722_Encode(g722EncState[k], indata, frameLen, (WebRtc_Word16*)encoded);
+            cdlen=frameLen>>1;
+        }
+#endif
+#ifdef CODEC_G722_1_16
+        else if (coder==kDecoderG722_1_16) { /* g722.1 16kbit/s mode */
+            cdlen=WebRtcG7221_Encode16((G722_1_16_encinst_t*)G722_1_16enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1_24
+        else if (coder==kDecoderG722_1_24) { /* g722.1 24kbit/s mode*/
+            cdlen=WebRtcG7221_Encode24((G722_1_24_encinst_t*)G722_1_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1_32
+        else if (coder==kDecoderG722_1_32) { /* g722.1 32kbit/s mode */
+            cdlen=WebRtcG7221_Encode32((G722_1_32_encinst_t*)G722_1_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_24
+        else if (coder==kDecoderG722_1C_24) { /* g722.1 32 kHz 24kbit/s mode*/
+            cdlen=WebRtcG7221C_Encode24((G722_1C_24_encinst_t*)G722_1C_24enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_32
+        else if (coder==kDecoderG722_1C_32) { /* g722.1 32 kHz 32kbit/s mode */
+            cdlen=WebRtcG7221C_Encode32((G722_1C_32_encinst_t*)G722_1C_32enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G722_1C_48
+        else if (coder==kDecoderG722_1C_48) { /* g722.1 32 kHz 48kbit/s mode */
+            cdlen=WebRtcG7221C_Encode48((G722_1C_48_encinst_t*)G722_1C_48enc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_G729
+        else if (coder==kDecoderG729) { /*g729 */
+            WebRtc_Word16 dataPos=0;
+            WebRtc_Word16 len=0;
+            cdlen = 0;
+            for (dataPos=0;dataPos<frameLen;dataPos+=80) {
+                len=WebRtcG729_Encode(G729enc_inst[k], &indata[dataPos], 80, (WebRtc_Word16*)(&encoded[cdlen]));
+                cdlen += len;
+            }
+        }
+#endif
+#ifdef CODEC_G729_1
+        else if (coder==kDecoderG729_1) { /*g729.1 */
+            WebRtc_Word16 dataPos=0;
+            WebRtc_Word16 len=0;
+            cdlen = 0;
+            for (dataPos=0;dataPos<frameLen;dataPos+=160) {
+                len=WebRtcG7291_Encode(G729_1_inst[k], &indata[dataPos], (WebRtc_Word16*)(&encoded[cdlen]), bitrate, frameLen/320 /* num 20ms frames*/);
+                cdlen += len;
+            }
+        }
+#endif
+#ifdef CODEC_AMR
+        else if (coder==kDecoderAMR) { /*AMR */
+            cdlen=WebRtcAmr_Encode(AMRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMR_bitrate);
+        }
+#endif
+#ifdef CODEC_AMRWB
+        else if (coder==kDecoderAMRWB) { /*AMR-wb */
+            cdlen=WebRtcAmrWb_Encode(AMRWBenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded, AMRWB_bitrate);
+        }
+#endif
+#ifdef CODEC_ILBC
+        else if (coder==kDecoderILBC) { /*iLBC */
+            cdlen=WebRtcIlbcfix_Encode(iLBCenc_inst[k], indata,frameLen,(WebRtc_Word16*)encoded);
+        }
+#endif
+#if (defined(CODEC_ISAC) || defined(NETEQ_ISACFIX_CODEC)) // TODO(hlundin): remove all NETEQ_ISACFIX_CODEC
+        else if (coder==kDecoderISAC) { /*iSAC */
+            int noOfCalls=0;
+            cdlen=0;
+            while (cdlen<=0) {
+#ifdef CODEC_ISAC /* floating point */
+                cdlen=WebRtcIsac_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+#else /* fixed point */
+                cdlen=WebRtcIsacfix_Encode(ISAC_inst[k],&indata[noOfCalls*160],(WebRtc_Word16*)encoded);
+#endif
+                noOfCalls++;
+            }
+        }
+#endif
+#ifdef CODEC_ISAC_SWB
+        else if (coder==kDecoderISACswb) { /* iSAC SWB */
+            int noOfCalls=0;
+            cdlen=0;
+            while (cdlen<=0) {
+                cdlen=WebRtcIsac_Encode(ISACSWB_inst[k],&indata[noOfCalls*320],(WebRtc_Word16*)encoded);
+                noOfCalls++;
+            }
+        }
+#endif
+#ifdef CODEC_GSMFR
+        else if (coder==kDecoderGSMFR) { /* GSM FR */
+            cdlen=WebRtcGSMFR_Encode(GSMFRenc_inst[k], indata, frameLen, (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_SPEEX_8
+        else if (coder==kDecoderSPEEX_8) { /* Speex */
+            int encodedLen = 0;
+            int retVal = 1;
+            while (retVal == 1 && encodedLen < frameLen) {
+                retVal = WebRtcSpeex_Encode(SPEEX8enc_inst[k], &indata[encodedLen], 15000);
+                encodedLen += 20*8; /* 20 ms */
+            }
+            if( (retVal == 0 && encodedLen != frameLen) || retVal < 0) {
+                printf("Error encoding speex frame!\n");
+                exit(0);
+            }
+            cdlen=WebRtcSpeex_GetBitstream(SPEEX8enc_inst[k], (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_SPEEX_16
+        else if (coder==kDecoderSPEEX_16) { /* Speex */
+            int encodedLen = 0;
+            int retVal = 1;
+            while (retVal == 1 && encodedLen < frameLen) {
+                retVal = WebRtcSpeex_Encode(SPEEX16enc_inst[k], &indata[encodedLen], 15000);
+                encodedLen += 20*16; /* 20 ms */
+            }
+            if( (retVal == 0 && encodedLen != frameLen) || retVal < 0) {
+                printf("Error encoding speex frame!\n");
+                exit(0);
+            }
+            cdlen=WebRtcSpeex_GetBitstream(SPEEX16enc_inst[k], (WebRtc_Word16*)encoded);
+        }
+#endif
+#ifdef CODEC_CELT_32
+        else if (coder==kDecoderCELT_32) { /* Celt */
+            int encodedLen = 0;
+            cdlen = 0;
+            while (cdlen <= 0) {
+                cdlen = WebRtcCelt_Encode(CELT32enc_inst[k], &indata[encodedLen], encoded);
+                encodedLen += 10*32; /* 10 ms */
+            }
+            if( (encodedLen != frameLen) || cdlen < 0) {
+                printf("Error encoding Celt frame!\n");
+                exit(0);
+            }
+        }
+#endif
+
+        indata += frameLen;
+        encoded += cdlen;
+        totalLen += cdlen;
+
+    } // end for
+
+	first_cng=1;
+	return(totalLen);
+}
+
+
+
+void makeRTPheader(unsigned char* rtp_data, int payloadType, int seqNo, WebRtc_UWord32 timestamp, WebRtc_UWord32 ssrc){
+			
+			rtp_data[0]=(unsigned char)0x80;
+			rtp_data[1]=(unsigned char)(payloadType & 0xFF);
+			rtp_data[2]=(unsigned char)((seqNo>>8)&0xFF);
+			rtp_data[3]=(unsigned char)((seqNo)&0xFF);
+			rtp_data[4]=(unsigned char)((timestamp>>24)&0xFF);
+			rtp_data[5]=(unsigned char)((timestamp>>16)&0xFF);
+
+			rtp_data[6]=(unsigned char)((timestamp>>8)&0xFF); 
+			rtp_data[7]=(unsigned char)(timestamp & 0xFF);
+
+			rtp_data[8]=(unsigned char)((ssrc>>24)&0xFF);
+			rtp_data[9]=(unsigned char)((ssrc>>16)&0xFF);
+
+			rtp_data[10]=(unsigned char)((ssrc>>8)&0xFF);
+			rtp_data[11]=(unsigned char)(ssrc & 0xFF);
+}
+
+
+int makeRedundantHeader(unsigned char* rtp_data, int *payloadType, int numPayloads, WebRtc_UWord32 *timestamp, WebRtc_UWord16 *blockLen,
+                        int seqNo, WebRtc_UWord32 ssrc)
+{
+
+    int i;
+    unsigned char *rtpPointer;
+    WebRtc_UWord16 offset;
+
+    /* first create "standard" RTP header */
+    makeRTPheader(rtp_data, NETEQ_CODEC_RED_PT, seqNo, timestamp[numPayloads-1], ssrc);
+
+    rtpPointer = &rtp_data[12];
+
+    /* add one sub-header for each redundant payload (not the primary) */
+    for(i=0; i<numPayloads-1; i++) {                                            /* |0 1 2 3 4 5 6 7| */
+        if(blockLen[i] > 0) {
+            offset = (WebRtc_UWord16) (timestamp[numPayloads-1] - timestamp[i]);
+
+            rtpPointer[0] = (unsigned char) ( 0x80 | (0x7F & payloadType[i]) ); /* |F|   block PT  | */
+            rtpPointer[1] = (unsigned char) ((offset >> 6) & 0xFF);             /* |  timestamp-   | */
+            rtpPointer[2] = (unsigned char) ( ((offset & 0x3F)<<2) |
+                ( (blockLen[i]>>8) & 0x03 ) );                                  /* | -offset   |bl-| */
+            rtpPointer[3] = (unsigned char) ( blockLen[i] & 0xFF );             /* | -ock length   | */
+
+            rtpPointer += 4;
+        }
+    }
+
+    /* last sub-header */
+    rtpPointer[0]= (unsigned char) (0x00 | (0x7F&payloadType[numPayloads-1]));/* |F|   block PT  | */
+    rtpPointer += 1;
+
+    return(rtpPointer - rtp_data); /* length of header in bytes */
+}
+
+
+
+int makeDTMFpayload(unsigned char* payload_data, int Event, int End, int Volume, int Duration) {
+	unsigned char E,R,V;
+	R=0;
+	V=(unsigned char)Volume;
+	if (End==0) {
+		E = 0x00;
+	} else {
+		E = 0x80;
+	}
+	payload_data[0]=(unsigned char)Event;
+	payload_data[1]=(unsigned char)(E|R|V);
+	//Duration equals 8 times time_ms, default is 8000 Hz.
+	payload_data[2]=(unsigned char)((Duration>>8)&0xFF);
+	payload_data[3]=(unsigned char)(Duration&0xFF);
+	return(4);
+}
+
+void stereoDeInterleave(WebRtc_Word16* audioSamples, int numSamples)
+{
+
+    WebRtc_Word16 *tempVec;
+    WebRtc_Word16 *readPtr, *writeL, *writeR;
+
+    if (numSamples <= 0)
+        return;
+
+    tempVec = (WebRtc_Word16 *) malloc(sizeof(WebRtc_Word16) * numSamples);
+    if (tempVec == NULL) {
+        printf("Error allocating memory\n");
+        exit(0);
+    }
+
+    memcpy(tempVec, audioSamples, numSamples*sizeof(WebRtc_Word16));
+
+    writeL = audioSamples;
+    writeR = &audioSamples[numSamples/2];
+    readPtr = tempVec;
+
+    for (int k = 0; k < numSamples; k += 2)
+    {
+        *writeL = *readPtr;
+        readPtr++;
+        *writeR = *readPtr;
+        readPtr++;
+        writeL++;
+        writeR++;
+    }
+
+    free(tempVec);
+
+}
+
+
+void stereoInterleave(unsigned char* data, int dataLen, int stride)
+{
+
+    unsigned char *ptrL, *ptrR;
+    unsigned char temp[10];
+
+    if (stride > 10)
+    {
+        exit(0);
+    }
+
+    if (dataLen%1 != 0)
+    {
+        // must be even number of samples
+        printf("Error: cannot interleave odd sample number\n");
+        exit(0);
+    }
+
+    ptrL = data + stride;
+    ptrR = &data[dataLen/2];
+
+    while (ptrL < ptrR) {
+        // copy from right pointer to temp
+        memcpy(temp, ptrR, stride);
+
+        // shift data between pointers
+        memmove(ptrL + stride, ptrL, ptrR - ptrL);
+
+        // copy from temp to left pointer
+        memcpy(ptrL, temp, stride);
+
+        // advance pointers
+        ptrL += stride*2;
+        ptrR += stride;
+    }
+
+}
diff --git a/src/modules/audio_coding/neteq/test/RTPjitter.cc b/src/modules/audio_coding/neteq/test/RTPjitter.cc
new file mode 100644
index 0000000..e3270be
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPjitter.cc
@@ -0,0 +1,200 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//TODO(hlundin): Reformat file to meet style guide.
+
+/* header includes */
+#include "typedefs.h"
+#include <stdio.h>
+#include <stdlib.h>
+#ifdef WIN32
+#include <winsock2.h>
+#include <io.h>
+#endif
+#ifdef WEBRTC_LINUX
+#include <netinet/in.h>
+#endif
+#include <float.h>
+
+#include "gtest/gtest.h"
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+#define CHECK_ZERO(a) {int errCode = a; if((errCode)!=0){fprintf(stderr,"\n %s \n line: %d \n error at %s\n Error Code = %d\n",__FILE__,__LINE__,#a, WebRtcNetEQ_GetErrorCode(inst)); exit(0);}}
+#define CHECK_NOT_NULL(a) if((a)==NULL){fprintf(stderr,"\n %s \n line: %d \nerror at %s\n",__FILE__,__LINE__,#a );return(-1);}
+
+struct arr_time {
+	float time;
+	WebRtc_UWord32 ix;
+};
+
+int filelen(FILE *fid)
+{
+  fpos_t cur_pos;
+  int len;
+
+  if (!fid || fgetpos(fid, &cur_pos)) {
+    return(-1);
+  }
+
+  fseek(fid, 0, SEEK_END);
+  len = ftell(fid);
+
+  fsetpos(fid, &cur_pos);
+
+  return (len);
+}
+
+int compare_arr_time(const void *x, const void *y);
+
+int main(int argc, char* argv[])
+{
+	unsigned int	dat_len, rtp_len, Npack, k;
+	arr_time		*time_vec;
+	char			firstline[FIRSTLINELEN];
+	unsigned char	*rtp_vec = NULL, **packet_ptr, *temp_packet;
+	const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+	WebRtc_UWord16			len;
+	WebRtc_UWord32			*offset;
+
+/* check number of parameters */
+	if (argc != 4) {
+		/* print help text and exit */
+		printf("Apply jitter on RTP stream.\n");
+		printf("The program reads an RTP stream and packet timing from two files.\n");
+		printf("The RTP stream is modified to have the same jitter as described in the timing files.\n");
+		printf("The format of the RTP stream file should be the same as for rtpplay,\n");
+		printf("and can be obtained e.g., from Ethereal by using\n");
+		printf("Statistics -> RTP -> Show All Streams -> [select a stream] -> Save As\n\n");
+		printf("Usage:\n\n");
+		printf("%s RTP_infile dat_file RTP_outfile\n", argv[0]);
+		printf("where:\n");
+
+		printf("RTP_infile       : RTP stream input file\n\n");
+
+		printf("dat_file         : file with packet arrival times in ms\n\n");
+
+		printf("RTP_outfile      : RTP stream output file\n\n");
+
+		return(0);
+	}
+
+	FILE* in_file=fopen(argv[1],"rb");
+	CHECK_NOT_NULL(in_file);
+	printf("Input file: %s\n",argv[1]);
+	FILE* dat_file=fopen(argv[2],"rb");
+	CHECK_NOT_NULL(dat_file);
+	printf("Dat-file: %s\n",argv[2]);
+	FILE* out_file=fopen(argv[3],"wb");
+	CHECK_NOT_NULL(out_file);
+	printf("Output file: %s\n\n",argv[3]);
+	
+	time_vec = (arr_time *) malloc(sizeof(arr_time)*(filelen(dat_file)/sizeof(float)) + 1000); // add 1000 bytes to avoid (rare) strange error
+	if (time_vec==NULL) {
+		fprintf(stderr, "Error: could not allocate memory for reading dat file\n");
+		goto closing;
+	}
+
+	dat_len=0;
+	while(fread(&(time_vec[dat_len].time),sizeof(float),1,dat_file)>0) {
+		time_vec[dat_len].ix=dat_len;
+		dat_len++;
+	}
+	
+	qsort(time_vec,dat_len,sizeof(arr_time),compare_arr_time);
+
+
+	rtp_vec = (unsigned char *) malloc(sizeof(unsigned char)*filelen(in_file));
+	if (rtp_vec==NULL) {
+		fprintf(stderr,"Error: could not allocate memory for reading rtp file\n");
+		goto closing;
+	}
+
+	// read file header and write directly to output file
+	EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, in_file) != NULL);
+	EXPECT_GT(fputs(firstline, out_file), 0);
+	EXPECT_EQ(kRtpDumpHeaderSize, fread(firstline, 1, kRtpDumpHeaderSize,
+	                                    in_file));
+	EXPECT_EQ(kRtpDumpHeaderSize, fwrite(firstline, 1, kRtpDumpHeaderSize,
+	                                     out_file));
+
+	// read all RTP packets into vector
+	rtp_len=0;
+	Npack=0;
+	len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of first packet
+	while(len==2) {
+		len = ntohs(*((WebRtc_UWord16 *)(rtp_vec + rtp_len)));
+		rtp_len += 2;
+		if(fread(&rtp_vec[rtp_len], sizeof(unsigned char), len-2, in_file)!=(unsigned) (len-2)) {
+			fprintf(stderr,"Error: currupt packet length\n");
+			goto closing;
+		}
+		rtp_len += len-2;
+		Npack++;
+		len=(WebRtc_UWord16) fread(&rtp_vec[rtp_len], sizeof(unsigned char), 2, in_file); // read length of next packet
+	}
+
+	packet_ptr = (unsigned char **) malloc(Npack*sizeof(unsigned char*));
+
+	packet_ptr[0]=rtp_vec;
+	k=1;
+	while(k<Npack) {
+		len = ntohs(*((WebRtc_UWord16 *) packet_ptr[k-1]));
+		packet_ptr[k]=packet_ptr[k-1]+len;
+		k++;
+	}
+
+	for(k=0; k<dat_len && k<Npack; k++) {
+		if(time_vec[k].time < FLT_MAX && time_vec[k].ix < Npack){ 
+			temp_packet = packet_ptr[time_vec[k].ix];
+			offset = (WebRtc_UWord32 *) (temp_packet+4);
+			if ( time_vec[k].time >= 0 ) {
+				*offset = htonl((WebRtc_UWord32) time_vec[k].time);
+			}
+			else {
+				*offset = htonl((WebRtc_UWord32) 0);
+				fprintf(stderr, "Warning: negative receive time in dat file transformed to 0.\n");
+			}
+
+			// write packet to file
+                        if (fwrite(temp_packet, sizeof(unsigned char),
+                                   ntohs(*((WebRtc_UWord16*) temp_packet)),
+                                   out_file) !=
+                            ntohs(*((WebRtc_UWord16*) temp_packet))) {
+                          return -1;
+                        }
+		}
+	}
+
+
+closing:
+	free(time_vec);
+	free(rtp_vec);
+	fclose(in_file);
+	fclose(dat_file);
+	fclose(out_file);
+
+	return(0);
+}
+
+
+
+int compare_arr_time(const void *xp, const void *yp) {
+
+	if(((arr_time *)xp)->time == ((arr_time *)yp)->time)
+		return(0);
+	else if(((arr_time *)xp)->time > ((arr_time *)yp)->time)
+		return(1);
+
+	return(-1);
+}
diff --git a/src/modules/audio_coding/neteq/test/RTPtimeshift.cc b/src/modules/audio_coding/neteq/test/RTPtimeshift.cc
new file mode 100644
index 0000000..dc7ff9f
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/RTPtimeshift.cc
@@ -0,0 +1,97 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <stdio.h>
+#include <vector>
+
+#include "NETEQTEST_RTPpacket.h"
+#include "gtest/gtest.h"
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+
+
+int main(int argc, char* argv[])
+{
+    if(argc < 4 || argc > 6)
+    {
+        printf("Usage: RTPtimeshift in.rtp out.rtp newStartTS [newStartSN [newStartArrTime]]\n");
+        exit(1);
+    }
+
+	FILE *inFile=fopen(argv[1],"rb");
+	if (!inFile)
+    {
+        printf("Cannot open input file %s\n", argv[1]);
+        return(-1);
+    }
+    printf("Input RTP file: %s\n",argv[1]);
+
+	FILE *outFile=fopen(argv[2],"wb");
+	if (!outFile)
+    {
+        printf("Cannot open output file %s\n", argv[2]);
+        return(-1);
+    }
+	printf("Output RTP file: %s\n\n",argv[2]);
+
+    // read file header and write directly to output file
+	const unsigned int kRtpDumpHeaderSize = 4 + 4 + 4 + 2 + 2;
+	char firstline[FIRSTLINELEN];
+	EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, inFile) != NULL);
+	EXPECT_GT(fputs(firstline, outFile), 0);
+	EXPECT_EQ(kRtpDumpHeaderSize,
+	          fread(firstline, 1, kRtpDumpHeaderSize, inFile));
+	EXPECT_EQ(kRtpDumpHeaderSize,
+	          fwrite(firstline, 1, kRtpDumpHeaderSize, outFile));
+	NETEQTEST_RTPpacket packet;
+	int packLen = packet.readFromFile(inFile);
+	if (packLen < 0)
+	{
+	    exit(1);
+	}
+
+    // get new start TS and start SeqNo from arguments
+	WebRtc_UWord32 TSdiff = atoi(argv[3]) - packet.timeStamp();
+	WebRtc_UWord16 SNdiff = 0;
+	WebRtc_UWord32 ATdiff = 0;
+    if (argc > 4)
+    {
+        if (argv[4] >= 0)
+            SNdiff = atoi(argv[4]) - packet.sequenceNumber();
+        if (argc > 5)
+        {
+            if (argv[5] >= 0)
+                ATdiff = atoi(argv[5]) - packet.time();
+        }
+    }
+
+    while (packLen >= 0)
+    {
+        
+        packet.setTimeStamp(packet.timeStamp() + TSdiff);
+        packet.setSequenceNumber(packet.sequenceNumber() + SNdiff);
+        packet.setTime(packet.time() + ATdiff);
+
+        packet.writeToFile(outFile);
+
+        packLen = packet.readFromFile(inFile);
+
+    }
+
+    fclose(inFile);
+    fclose(outFile);
+
+    return 0;
+}
diff --git a/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m b/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m
new file mode 100644
index 0000000..77b394f
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/delay_tool/parse_delay_file.m
@@ -0,0 +1,191 @@
+function outStruct = parse_delay_file(file)
+
+fid = fopen(file, 'rb');
+if fid == -1
+    error('Cannot open file %s', file);
+end
+
+textline = fgetl(fid);
+if ~strncmp(textline, '#!NetEQ_Delay_Logging', 21)
+    error('Wrong file format');
+end
+
+ver = sscanf(textline, '#!NetEQ_Delay_Logging%d.%d');
+if ~all(ver == [2; 0])
+    error('Wrong version of delay logging function')
+end
+
+
+start_pos = ftell(fid);
+fseek(fid, -12, 'eof');
+textline = fgetl(fid);
+if ~strncmp(textline, 'End of file', 21)
+    error('File ending is not correct. Seems like the simulation ended abnormally.');
+end
+
+fseek(fid,-12-4, 'eof');
+Npackets = fread(fid, 1, 'int32');
+fseek(fid, start_pos, 'bof');
+
+rtpts = zeros(Npackets, 1);
+seqno = zeros(Npackets, 1);
+pt = zeros(Npackets, 1);
+plen = zeros(Npackets, 1);
+recin_t = nan*ones(Npackets, 1);
+decode_t = nan*ones(Npackets, 1);
+playout_delay = zeros(Npackets, 1);
+optbuf = zeros(Npackets, 1);
+
+fs_ix = 1;
+clock = 0;
+ts_ix = 1;
+ended = 0;
+late_packets = 0;
+fs_now = 8000;
+last_decode_k = 0;
+tot_expand = 0;
+tot_accelerate = 0;
+tot_preemptive = 0;
+
+while not(ended)
+    signal = fread(fid, 1, '*int32');
+    
+    switch signal
+        case 3 % NETEQ_DELAY_LOGGING_SIGNAL_CLOCK
+            clock = fread(fid, 1, '*float32');
+            
+            % keep on reading batches of M until the signal is no longer "3"
+            % read int32 + float32 in one go
+            % this is to save execution time
+            temp = [3; 0];
+            M = 120;
+            while all(temp(1,:) == 3)
+                fp = ftell(fid);
+                temp = fread(fid, [2 M], '*int32');
+            end
+            
+            % back up to last clock event
+            fseek(fid, fp - ftell(fid) + ...
+                (find(temp(1,:) ~= 3, 1 ) - 2) * 2 * 4 + 4, 'cof');
+            % read the last clock value
+            clock = fread(fid, 1, '*float32');
+            
+        case 1 % NETEQ_DELAY_LOGGING_SIGNAL_RECIN
+            temp_ts = fread(fid, 1, 'uint32');
+            
+            if late_packets > 0
+                temp_ix = ts_ix - 1;
+                while (temp_ix >= 1) && (rtpts(temp_ix) ~= temp_ts)
+                    % TODO(hlundin): use matlab vector search instead?
+                    temp_ix = temp_ix - 1;
+                end
+                
+                if temp_ix >= 1
+                    % the ts was found in the vector
+                    late_packets = late_packets - 1;
+                else
+                    temp_ix = ts_ix;
+                    ts_ix = ts_ix + 1;
+                end
+            else
+                temp_ix = ts_ix;
+                ts_ix = ts_ix + 1;
+            end
+            
+            rtpts(temp_ix) = temp_ts;
+            seqno(temp_ix) = fread(fid, 1, 'uint16');
+            pt(temp_ix) = fread(fid, 1, 'int32');
+            plen(temp_ix) = fread(fid, 1, 'int16');
+            recin_t(temp_ix) = clock;
+            
+        case 2 % NETEQ_DELAY_LOGGING_SIGNAL_FLUSH
+            % do nothing
+            
+        case 4 % NETEQ_DELAY_LOGGING_SIGNAL_EOF
+            ended = 1;
+            
+        case 5 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE
+            last_decode_ts = fread(fid, 1, 'uint32');
+            temp_delay = fread(fid, 1, 'uint16');
+            
+            k = find(rtpts(1:(ts_ix - 1))==last_decode_ts,1,'last');
+            if ~isempty(k)
+                decode_t(k) = clock;
+                playout_delay(k) = temp_delay + ...
+                    5 *  fs_now / 8000; % add overlap length
+                last_decode_k = k;
+            end
+            
+        case 6 % NETEQ_DELAY_LOGGING_SIGNAL_CHANGE_FS
+            fsvec(fs_ix) = fread(fid, 1, 'uint16');
+            fschange_ts(fs_ix) = last_decode_ts;
+            fs_now = fsvec(fs_ix);
+            fs_ix = fs_ix + 1;
+            
+        case 7 % NETEQ_DELAY_LOGGING_SIGNAL_MERGE_INFO
+            playout_delay(last_decode_k) = playout_delay(last_decode_k) ...
+                + fread(fid, 1, 'int32');
+            
+        case 8 % NETEQ_DELAY_LOGGING_SIGNAL_EXPAND_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_expand = tot_expand + temp / (fs_now / 1000);
+            end                
+            
+        case 9 % NETEQ_DELAY_LOGGING_SIGNAL_ACCELERATE_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_accelerate = tot_accelerate + temp / (fs_now / 1000);
+            end                
+
+        case 10 % NETEQ_DELAY_LOGGING_SIGNAL_PREEMPTIVE_INFO
+            temp = fread(fid, 1, 'int32');
+            if last_decode_k ~= 0
+                tot_preemptive = tot_preemptive + temp / (fs_now / 1000);
+            end                
+            
+        case 11 % NETEQ_DELAY_LOGGING_SIGNAL_OPTBUF
+            optbuf(last_decode_k) = fread(fid, 1, 'int32');
+            
+        case 12 % NETEQ_DELAY_LOGGING_SIGNAL_DECODE_ONE_DESC
+            last_decode_ts = fread(fid, 1, 'uint32');
+            k = ts_ix - 1;
+            
+            while (k >= 1) && (rtpts(k) ~= last_decode_ts)
+                % TODO(hlundin): use matlab vector search instead?
+                k = k - 1;
+            end
+            
+            if k < 1
+                % packet not received yet
+                k = ts_ix;
+                rtpts(ts_ix) = last_decode_ts;
+                late_packets = late_packets + 1;
+            end
+            
+            decode_t(k) = clock;
+            playout_delay(k) = fread(fid, 1, 'uint16') + ...
+                5 *  fs_now / 8000; % add overlap length
+            last_decode_k = k;
+             
+    end
+    
+end
+
+
+fclose(fid);
+
+outStruct = struct(...
+    'ts', rtpts, ...
+    'sn', seqno, ...
+    'pt', pt,...
+    'plen', plen,...
+    'arrival', recin_t,...
+    'decode', decode_t,...
+    'fs', fsvec(:),...
+    'fschange_ts', fschange_ts(:),...
+    'playout_delay', playout_delay,...
+    'tot_expand', tot_expand,...
+    'tot_accelerate', tot_accelerate,...
+    'tot_preemptive', tot_preemptive,...
+    'optbuf', optbuf);
diff --git a/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m b/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m
new file mode 100644
index 0000000..bc1c85a
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/delay_tool/plot_neteq_delay.m
@@ -0,0 +1,187 @@
+function [delay_struct, delayvalues] = plot_neteq_delay(delayfile, varargin)
+
+% InfoStruct = plot_neteq_delay(delayfile)
+% InfoStruct = plot_neteq_delay(delayfile, 'skipdelay', skip_seconds)
+%
+% Henrik Lundin, 2006-11-17
+% Henrik Lundin, 2011-05-17
+%
+
+try
+    s = parse_delay_file(delayfile);
+catch
+    error(lasterr);
+end
+
+delayskip=0;
+noplot=0;
+arg_ptr=1;
+delaypoints=[];
+
+s.sn=unwrap_seqno(s.sn);
+
+while arg_ptr+1 <= nargin
+    switch lower(varargin{arg_ptr})
+    case {'skipdelay', 'delayskip'}
+        % skip a number of seconds in the beginning when calculating delays
+        delayskip = varargin{arg_ptr+1};
+        arg_ptr = arg_ptr + 2;
+    case 'noplot'
+        noplot=1;
+        arg_ptr = arg_ptr + 1;
+    case {'get_delay', 'getdelay'}
+        % return a vector of delay values for the points in the given vector
+        delaypoints = varargin{arg_ptr+1};
+        arg_ptr = arg_ptr + 2;
+    otherwise
+        warning('Unknown switch %s\n', varargin{arg_ptr});
+        arg_ptr = arg_ptr + 1;
+    end
+end
+
+% find lost frames that were covered by one-descriptor decoding
+one_desc_ix=find(isnan(s.arrival));
+for k=1:length(one_desc_ix)
+    ix=find(s.ts==max(s.ts(s.ts(one_desc_ix(k))>s.ts)));
+    s.sn(one_desc_ix(k))=s.sn(ix)+1;
+    s.pt(one_desc_ix(k))=s.pt(ix);
+    s.arrival(one_desc_ix(k))=s.arrival(ix)+s.decode(one_desc_ix(k))-s.decode(ix);
+end
+
+% remove duplicate received frames that were never decoded (RED codec)
+if length(unique(s.ts(isfinite(s.ts)))) < length(s.ts(isfinite(s.ts)))
+    ix=find(isfinite(s.decode));
+    s.sn=s.sn(ix);
+    s.ts=s.ts(ix);
+    s.arrival=s.arrival(ix);
+    s.playout_delay=s.playout_delay(ix);
+    s.pt=s.pt(ix);
+    s.optbuf=s.optbuf(ix);
+    plen=plen(ix);
+    s.decode=s.decode(ix);
+end
+
+% find non-unique sequence numbers
+[~,un_ix]=unique(s.sn);
+nonun_ix=setdiff(1:length(s.sn),un_ix);
+if ~isempty(nonun_ix)
+    warning('RTP sequence numbers are in error');
+end
+            
+% sort vectors
+[s.sn,sort_ix]=sort(s.sn);
+s.ts=s.ts(sort_ix);
+s.arrival=s.arrival(sort_ix);
+s.decode=s.decode(sort_ix);
+s.playout_delay=s.playout_delay(sort_ix);
+s.pt=s.pt(sort_ix);
+
+send_t=s.ts-s.ts(1);
+if length(s.fs)<1
+    warning('No info about sample rate found in file. Using default 8000.');
+    s.fs(1)=8000;
+    s.fschange_ts(1)=min(s.ts);
+elseif s.fschange_ts(1)>min(s.ts)
+    s.fschange_ts(1)=min(s.ts);
+end
+
+end_ix=length(send_t);
+for k=length(s.fs):-1:1
+    start_ix=find(s.ts==s.fschange_ts(k));
+    send_t(start_ix:end_ix)=send_t(start_ix:end_ix)/s.fs(k)*1000;
+    s.playout_delay(start_ix:end_ix)=s.playout_delay(start_ix:end_ix)/s.fs(k)*1000;
+    s.optbuf(start_ix:end_ix)=s.optbuf(start_ix:end_ix)/s.fs(k)*1000;
+    end_ix=start_ix-1;
+end
+
+tot_time=max(send_t)-min(send_t);
+
+seq_ix=s.sn-min(s.sn)+1;
+send_t=send_t+max(min(s.arrival-send_t),0);
+
+plot_send_t=nan*ones(max(seq_ix),1);
+plot_send_t(seq_ix)=send_t;
+plot_nw_delay=nan*ones(max(seq_ix),1);
+plot_nw_delay(seq_ix)=s.arrival-send_t;
+
+cng_ix=find(s.pt~=13); % find those packets that are not CNG/SID
+    
+if noplot==0
+    h=plot(plot_send_t/1000,plot_nw_delay);
+    set(h,'color',0.75*[1 1 1]);
+    hold on
+    if any(s.optbuf~=0)
+        peak_ix=find(s.optbuf(cng_ix)<0); % peak mode is labeled with negative values
+        no_peak_ix=find(s.optbuf(cng_ix)>0); %setdiff(1:length(cng_ix),peak_ix);
+        h1=plot(send_t(cng_ix(peak_ix))/1000,...
+            s.arrival(cng_ix(peak_ix))+abs(s.optbuf(cng_ix(peak_ix)))-send_t(cng_ix(peak_ix)),...
+            'r.');
+        h2=plot(send_t(cng_ix(no_peak_ix))/1000,...
+            s.arrival(cng_ix(no_peak_ix))+abs(s.optbuf(cng_ix(no_peak_ix)))-send_t(cng_ix(no_peak_ix)),...
+            'g.');
+        set([h1, h2],'markersize',1)
+    end
+    %h=plot(send_t(seq_ix)/1000,s.decode+s.playout_delay-send_t(seq_ix));
+    h=plot(send_t(cng_ix)/1000,s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix));
+    set(h,'linew',1.5);
+    hold off
+    ax1=axis;
+    axis tight
+    ax2=axis;
+    axis([ax2(1:3) ax1(4)])
+end
+
+
+% calculate delays and other parameters
+
+delayskip_ix = find(send_t-send_t(1)>=delayskip*1000, 1 );
+
+use_ix = intersect(cng_ix,... % use those that are not CNG/SID frames...
+    intersect(find(isfinite(s.decode)),... % ... that did arrive ...
+    (delayskip_ix:length(s.decode))')); % ... and are sent after delayskip seconds
+
+mean_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-send_t(use_ix));
+neteq_delay = mean(s.decode(use_ix)+s.playout_delay(use_ix)-s.arrival(use_ix));
+
+Npack=max(s.sn(delayskip_ix:end))-min(s.sn(delayskip_ix:end))+1;
+nw_lossrate=(Npack-length(s.sn(delayskip_ix:end)))/Npack;
+neteq_lossrate=(length(s.sn(delayskip_ix:end))-length(use_ix))/Npack;
+
+delay_struct=struct('mean_delay',mean_delay,'neteq_delay',neteq_delay,...
+    'nw_lossrate',nw_lossrate,'neteq_lossrate',neteq_lossrate,...
+    'tot_expand',round(s.tot_expand),'tot_accelerate',round(s.tot_accelerate),...
+    'tot_preemptive',round(s.tot_preemptive),'tot_time',tot_time,...
+    'filename',delayfile,'units','ms','fs',unique(s.fs));
+    
+if not(isempty(delaypoints))
+    delayvalues=interp1(send_t(cng_ix),...
+        s.decode(cng_ix)+s.playout_delay(cng_ix)-send_t(cng_ix),...
+        delaypoints,'nearest',NaN);
+else
+    delayvalues=[];
+end
+
+
+
+% SUBFUNCTIONS %
+
+function y=unwrap_seqno(x)
+
+jumps=find(abs((diff(x)-1))>65000);
+
+while ~isempty(jumps)
+    n=jumps(1);
+    if x(n+1)-x(n) < 0
+        % negative jump
+        x(n+1:end)=x(n+1:end)+65536;
+    else
+        % positive jump
+        x(n+1:end)=x(n+1:end)-65536;
+    end
+    
+    jumps=find(abs((diff(x(n+1:end))-1))>65000);
+end
+
+y=x;
+
+return;
diff --git a/src/modules/audio_coding/neteq/test/ptypes.txt b/src/modules/audio_coding/neteq/test/ptypes.txt
new file mode 100644
index 0000000..c3d4e25
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/ptypes.txt
@@ -0,0 +1,20 @@
+pcmu 0
+pcma 8
+cn 13
+//ipcmwb 97
+//eg711u 100
+//eg711a 101
+ilbc 102
+isac 103
+isacswb 104
+avt 106
+red 117
+cn_wb 98
+cn_swb32 99
+pcm16b 93
+pcm16b_wb 94
+pcm16b_swb32khz 95
+//pcm16b_swb48khz 96
+//mpeg4aac 122
+g722 9
+celt32 114
diff --git a/src/modules/audio_coding/neteq/test/rtp_to_text.cc b/src/modules/audio_coding/neteq/test/rtp_to_text.cc
new file mode 100644
index 0000000..1112d79
--- /dev/null
+++ b/src/modules/audio_coding/neteq/test/rtp_to_text.cc
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Parses an rtpdump file and outputs a text table parsable by parseLog.m.
+ * The output file will have .txt appended to the specified base name.
+ * $ rtp_to_text [-d] <input_rtp_file> <output_base_name>
+ *
+ * -d   RTP headers only
+ *
+ */
+
+#include "data_log.h"
+#include "NETEQTEST_DummyRTPpacket.h"
+#include "NETEQTEST_RTPpacket.h"
+
+#include <stdio.h>
+#include <string.h>
+
+#include <iostream>
+#include <string>
+#include <vector>
+
+/*********************/
+/* Misc. definitions */
+/*********************/
+
+#define FIRSTLINELEN 40
+
+using ::webrtc::DataLog;
+
+int main(int argc, char* argv[])
+{
+    int arg_count = 1;
+    NETEQTEST_RTPpacket* packet;
+
+    if (argc < 3)
+    {
+      printf("Usage: %s [-d] <input_rtp_file> <output_base_name>\n", argv[0]);
+      return -1;
+    }
+
+    // Parse dummy option
+    if (argc >= 3 && strcmp(argv[arg_count], "-d") == 0)
+    {
+        packet = new NETEQTEST_DummyRTPpacket;
+        ++arg_count;
+    }
+    else
+    {
+        packet = new NETEQTEST_RTPpacket;
+    }
+
+    std::string input_filename = argv[arg_count++];
+    std::string table_name = argv[arg_count];
+
+    std::cout << "Input file: " << input_filename << std::endl;
+    std::cout << "Output file: " << table_name << ".txt" << std::endl;
+
+    FILE *inFile=fopen(input_filename.c_str(),"rb");
+    if (!inFile)
+    {
+        std::cout << "Cannot open input file " << input_filename << std::endl;
+        return -1;
+    }
+
+    // Set up the DataLog and define the table
+    DataLog::CreateLog();
+    if (DataLog::AddTable(table_name) < 0)
+    {
+        std::cout << "Error adding table " << table_name << ".txt" << std::endl;
+        return -1;
+    }
+
+    DataLog::AddColumn(table_name, "seq", 1);
+    DataLog::AddColumn(table_name, "ssrc", 1);
+    DataLog::AddColumn(table_name, "payload type", 1);
+    DataLog::AddColumn(table_name, "length", 1);
+    DataLog::AddColumn(table_name, "timestamp", 1);
+    DataLog::AddColumn(table_name, "marker bit", 1);
+    DataLog::AddColumn(table_name, "arrival", 1);
+
+    // read file header
+    char firstline[FIRSTLINELEN];
+    if (fgets(firstline, FIRSTLINELEN, inFile) == NULL)
+    {
+        std::cout << "Error reading file " << input_filename << std::endl;
+        return -1;
+    }
+
+    // start_sec + start_usec + source + port + padding
+    if (fread(firstline, 4+4+4+2+2, 1, inFile) != 1)
+    {
+        std::cout << "Error reading file " << input_filename << std::endl;
+        return -1;
+    }
+
+    while (packet->readFromFile(inFile) >= 0)
+    {
+        // write packet headers to
+        DataLog::InsertCell(table_name, "seq", packet->sequenceNumber());
+        DataLog::InsertCell(table_name, "ssrc", packet->SSRC());
+        DataLog::InsertCell(table_name, "payload type", packet->payloadType());
+        DataLog::InsertCell(table_name, "length", packet->dataLen());
+        DataLog::InsertCell(table_name, "timestamp", packet->timeStamp());
+        DataLog::InsertCell(table_name, "marker bit", packet->markerBit());
+        DataLog::InsertCell(table_name, "arrival", packet->time());
+        DataLog::NextRow(table_name);
+        return -1;
+    }
+
+    DataLog::ReturnLog();
+
+    fclose(inFile);
+
+    return 0;
+}
diff --git a/src/modules/audio_coding/neteq/unmute_signal.c b/src/modules/audio_coding/neteq/unmute_signal.c
new file mode 100644
index 0000000..ee9daa8
--- /dev/null
+++ b/src/modules/audio_coding/neteq/unmute_signal.c
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This function "unmutes" a vector on a sample by sample basis.
+ */
+
+#include "dsp_helpfunctions.h"
+
+#include "signal_processing_library.h"
+
+
+void WebRtcNetEQ_UnmuteSignal(WebRtc_Word16 *pw16_inVec, WebRtc_Word16 *startMuteFact,
+                              WebRtc_Word16 *pw16_outVec, WebRtc_Word16 unmuteFact,
+                              WebRtc_Word16 N)
+{
+    int i;
+    WebRtc_UWord16 w16_tmp;
+    WebRtc_Word32 w32_tmp;
+
+    w16_tmp = (WebRtc_UWord16) *startMuteFact;
+    w32_tmp = WEBRTC_SPL_LSHIFT_W32((WebRtc_Word32)w16_tmp,6) + 32;
+    for (i = 0; i < N; i++)
+    {
+        pw16_outVec[i]
+            = (WebRtc_Word16) ((WEBRTC_SPL_MUL_16_16(w16_tmp, pw16_inVec[i]) + 8192) >> 14);
+        w32_tmp += unmuteFact;
+        w32_tmp = WEBRTC_SPL_MAX(0, w32_tmp);
+        w16_tmp = (WebRtc_UWord16) WEBRTC_SPL_RSHIFT_W32(w32_tmp, 6); /* 20 - 14 = 6 */
+        w16_tmp = WEBRTC_SPL_MIN(16384, w16_tmp);
+    }
+    *startMuteFact = (WebRtc_Word16) w16_tmp;
+}
+
diff --git a/src/modules/audio_coding/neteq/webrtc_neteq.c b/src/modules/audio_coding/neteq/webrtc_neteq.c
new file mode 100644
index 0000000..5e99fd8
--- /dev/null
+++ b/src/modules/audio_coding/neteq/webrtc_neteq.c
@@ -0,0 +1,1654 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Implementation of main NetEQ API.
+ */
+
+#include "webrtc_neteq.h"
+#include "webrtc_neteq_internal.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include "typedefs.h"
+#include "signal_processing_library.h"
+
+#include "neteq_error_codes.h"
+#include "mcu_dsp_common.h"
+#include "rtcp.h"
+
+#define RETURN_ON_ERROR( macroExpr, macroInstPtr )  { \
+    if ((macroExpr) != 0) { \
+    if ((macroExpr) == -1) { \
+    (macroInstPtr)->ErrorCode = - (NETEQ_OTHER_ERROR); \
+    } else { \
+    (macroInstPtr)->ErrorCode = -((WebRtc_Word16) (macroExpr)); \
+    } \
+    return(-1); \
+    } }
+
+int WebRtcNetEQ_strncpy(char *strDest, int numberOfElements,
+                        const char *strSource, int count)
+{
+    /* check vector lengths */
+    if (count > numberOfElements)
+    {
+        strDest[0] = '\0';
+        return (-1);
+    }
+    else
+    {
+        strncpy(strDest, strSource, count);
+        return (0);
+    }
+}
+
+/**********************************************************
+ * NETEQ Functions
+ */
+
+/*****************************************
+ * Info functions
+ */
+
+int WebRtcNetEQ_GetVersion(char *version)
+{
+    char versionString[] = "3.3.0\0    ";
+    char endChar[] = " ";
+    int i = 0;
+    while ((versionString[i] != endChar[0]) && (i <= 20))
+    {
+        version[i] = versionString[i]; /* To avoid using strcpy */
+        i++;
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_GetErrorCode(void *inst)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    return (NetEqMainInst->ErrorCode);
+}
+
+int WebRtcNetEQ_GetErrorName(int errorCode, char *errorName, int maxStrLen)
+{
+    if ((errorName == NULL) || (maxStrLen <= 0))
+    {
+        return (-1);
+    }
+
+    if (errorCode < 0)
+    {
+        errorCode = -errorCode; // absolute value
+    }
+
+    switch (errorCode)
+    {
+        case 1: // could be -1
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "OTHER_ERROR", maxStrLen);
+            break;
+        }
+        case 1001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_INSTRUCTION", maxStrLen);
+            break;
+        }
+        case 1002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_NETWORK_TYPE", maxStrLen);
+            break;
+        }
+        case 1003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_DELAYVALUE", maxStrLen);
+            break;
+        }
+        case 1004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "FAULTY_PLAYOUTMODE", maxStrLen);
+            break;
+        }
+        case 1005:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CORRUPT_INSTANCE", maxStrLen);
+            break;
+        }
+        case 1006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "ILLEGAL_MASTER_SLAVE_SWITCH", maxStrLen);
+            break;
+        }
+        case 1007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "MASTER_SLAVE_ERROR", maxStrLen);
+            break;
+        }
+        case 2001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_BUFSTAT_DECISION", maxStrLen);
+            break;
+        }
+        case 2002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODING", maxStrLen);
+            break;
+        }
+        case 2003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_SAMPLEUNDERRUN", maxStrLen);
+            break;
+        }
+        case 2004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECOUT_ERROR_DECODED_TOO_MUCH",
+                maxStrLen);
+            break;
+        }
+        case 3001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_CNG_ERROR", maxStrLen);
+            break;
+        }
+        case 3002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_UNKNOWNPAYLOAD", maxStrLen);
+            break;
+        }
+        case 3003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RECIN_BUFFERINSERT_ERROR", maxStrLen);
+            break;
+        }
+        case 4001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INIT_ERROR", maxStrLen);
+            break;
+        }
+        case 4002:
+        case 4003:
+        case 4004:
+        case 4005:
+        case 4006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_INSERT_ERROR1", maxStrLen);
+            break;
+        }
+        case 4007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_G723_HEADER", maxStrLen);
+            break;
+        }
+        case 4008:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NONEXISTING_PACKET", maxStrLen);
+            break;
+        }
+        case 4009:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "PBUFFER_NOT_INITIALIZED", maxStrLen);
+            break;
+        }
+        case 4010:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "AMBIGUOUS_ILBC_FRAME_SIZE", maxStrLen);
+            break;
+        }
+        case 5001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_FULL", maxStrLen);
+            break;
+        }
+        case 5002:
+        case 5003:
+        case 5004:
+        case 5005:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_NOT_EXIST", maxStrLen);
+            break;
+        }
+        case 5006:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNKNOWN_CODEC", maxStrLen);
+            break;
+        }
+        case 5007:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_PAYLOAD_TAKEN", maxStrLen);
+            break;
+        }
+        case 5008:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_CODEC", maxStrLen);
+            break;
+        }
+        case 5009:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "CODEC_DB_UNSUPPORTED_FS", maxStrLen);
+            break;
+        }
+        case 6001:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_DEC_PARAMETER_ERROR", maxStrLen);
+            break;
+        }
+        case 6002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_INSERT_ERROR", maxStrLen);
+            break;
+        }
+        case 6003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_GEN_UNKNOWN_SAMP_FREQ", maxStrLen);
+            break;
+        }
+        case 6004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "DTMF_NOT_SUPPORTED", maxStrLen);
+            break;
+        }
+        case 7001:
+        case 7002:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RED_SPLIT_ERROR", maxStrLen);
+            break;
+        }
+        case 7003:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_TOO_SHORT_PACKET", maxStrLen);
+            break;
+        }
+        case 7004:
+        {
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "RTP_CORRUPT_PACKET", maxStrLen);
+            break;
+        }
+        default:
+        {
+            /* check for decoder error ranges */
+            if (errorCode >= 6010 && errorCode <= 6810)
+            {
+                /* iSAC error code */
+                WebRtcNetEQ_strncpy(errorName, maxStrLen, "iSAC ERROR", maxStrLen);
+                break;
+            }
+
+            WebRtcNetEQ_strncpy(errorName, maxStrLen, "UNKNOWN_ERROR", maxStrLen);
+            return (-1);
+        }
+    }
+
+    return (0);
+}
+
+/* Assign functions (create not allowed in order to avoid malloc in lib) */
+int WebRtcNetEQ_AssignSize(int *sizeinbytes)
+{
+    *sizeinbytes = (sizeof(MainInst_t) * 2) / sizeof(WebRtc_Word16);
+    return (0);
+}
+
+int WebRtcNetEQ_Assign(void **inst, void *NETEQ_inst_Addr)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) NETEQ_inst_Addr;
+    *inst = NETEQ_inst_Addr;
+    if (*inst == NULL) return (-1);
+    /* Clear memory */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) NetEqMainInst, 0,
+        (sizeof(MainInst_t) / sizeof(WebRtc_Word16)));
+    ok = WebRtcNetEQ_McuReset(&NetEqMainInst->MCUinst);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_GetRecommendedBufferSize(void *inst, const enum WebRtcNetEQDecoder *codec,
+                                         int noOfCodecs, enum WebRtcNetEQNetworkType nwType,
+                                         int *MaxNoOfPackets, int *sizeinbytes)
+{
+    int ok = 0;
+    int multiplier;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *MaxNoOfPackets = 0;
+    *sizeinbytes = 0;
+    ok = WebRtcNetEQ_GetDefaultCodecSettings(codec, noOfCodecs, sizeinbytes, MaxNoOfPackets);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    if (nwType == kUDPNormal)
+    {
+        multiplier = 1;
+    }
+    else if (nwType == kUDPVideoSync)
+    {
+        multiplier = 4;
+    }
+    else if (nwType == kTCPNormal)
+    {
+        multiplier = 4;
+    }
+    else if (nwType == kTCPLargeJitter)
+    {
+        multiplier = 8;
+    }
+    else if (nwType == kTCPXLargeJitter)
+    {
+        multiplier = 20;
+    }
+    else
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_NETWORK_TYPE;
+        return (-1);
+    }
+    *MaxNoOfPackets = (*MaxNoOfPackets) * multiplier;
+    *sizeinbytes = (*sizeinbytes) * multiplier;
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_AssignBuffer(void *inst, int MaxNoOfPackets, void *NETEQ_Buffer_Addr,
+                             int sizeinbytes)
+{
+    int ok;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_PacketBufferInit(&NetEqMainInst->MCUinst.PacketBuffer_inst,
+        MaxNoOfPackets, (WebRtc_Word16*) NETEQ_Buffer_Addr, (sizeinbytes >> 1));
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/************************************************
+ * Init functions
+ */
+
+/****************************************************************************
+ * WebRtcNetEQ_Init(...)
+ *
+ * Initialize NetEQ.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- fs            : Initial sample rate in Hz (may change with payload)
+ *
+ * Output:
+ *		- inst	        : Initialized NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_Init(void *inst, WebRtc_UWord16 fs)
+{
+    int ok = 0;
+
+    /* Typecast inst to internal instance format */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+    /* Start out with no PostDecode VAD instance */
+    NetEqMainInst->DSPinst.VADInst.VADState = NULL;
+    /* Also set all VAD function pointers to NULL */
+    NetEqMainInst->DSPinst.VADInst.initFunction = NULL;
+    NetEqMainInst->DSPinst.VADInst.setmodeFunction = NULL;
+    NetEqMainInst->DSPinst.VADInst.VADFunction = NULL;
+#endif /* NETEQ_VAD */
+
+    ok = WebRtcNetEQ_DSPinit(NetEqMainInst); /* Init addresses between MCU and DSP */
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    ok = WebRtcNetEQ_DSPInit(&NetEqMainInst->DSPinst, fs); /* Init dsp side */
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    /* set BGN mode to default, since it is not cleared by DSP init function */
+    NetEqMainInst->DSPinst.BGNInst.bgnMode = BGN_ON;
+
+    /* init statistics functions and counters */
+    ok = WebRtcNetEQ_ClearInCallStats(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    ok = WebRtcNetEQ_ClearPostCallStats(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+    ok = WebRtcNetEQ_ResetMcuJitterStat(&NetEqMainInst->MCUinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* flush packet buffer */
+    ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* set some variables to initial values */
+    NetEqMainInst->MCUinst.current_Codec = -1;
+    NetEqMainInst->MCUinst.current_Payload = -1;
+    NetEqMainInst->MCUinst.first_packet = 1;
+    NetEqMainInst->MCUinst.one_desc = 0;
+    NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = 0;
+    NetEqMainInst->MCUinst.NoOfExpandCalls = 0;
+    NetEqMainInst->MCUinst.fs = fs;
+
+#ifdef NETEQ_ATEVENT_DECODE
+    /* init DTMF decoder */
+    ok = WebRtcNetEQ_DtmfDecoderInit(&(NetEqMainInst->MCUinst.DTMF_inst),fs,560);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+#endif
+
+    /* init RTCP statistics */
+    WebRtcNetEQ_RTCPInit(&(NetEqMainInst->MCUinst.RTCP_inst), 0);
+
+    /* set BufferStat struct to zero */
+    WebRtcSpl_MemSetW16((WebRtc_Word16*) &(NetEqMainInst->MCUinst.BufferStat_inst), 0,
+        sizeof(BufstatsInst_t) / sizeof(WebRtc_Word16));
+
+    /* reset automode */
+    WebRtcNetEQ_ResetAutomode(&(NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst),
+        NetEqMainInst->MCUinst.PacketBuffer_inst.maxInsertPositions);
+
+    NetEqMainInst->ErrorCode = 0;
+
+#ifdef NETEQ_STEREO
+    /* set master/slave info to undecided */
+    NetEqMainInst->masterSlave = 0;
+#endif
+
+    return (ok);
+}
+
+int WebRtcNetEQ_FlushBuffers(void *inst)
+{
+    int ok = 0;
+
+    /* Typecast inst to internal instance format */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    /* Flush packet buffer */
+    ok = WebRtcNetEQ_PacketBufferFlush(&NetEqMainInst->MCUinst.PacketBuffer_inst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    /* Set MCU to wait for new codec */
+    NetEqMainInst->MCUinst.first_packet = 1;
+
+    /* Flush speech buffer */
+    ok = WebRtcNetEQ_FlushSpeechBuffer(&NetEqMainInst->DSPinst);
+    RETURN_ON_ERROR(ok, NetEqMainInst);
+
+    return 0;
+}
+
+int WebRtcNetEQ_SetAVTPlayout(void *inst, int PlayoutAVTon)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+#ifdef NETEQ_ATEVENT_DECODE
+    NetEqMainInst->MCUinst.AVT_PlayoutOn = PlayoutAVTon;
+    return(0);
+#else
+    if (PlayoutAVTon != 0)
+    {
+        NetEqMainInst->ErrorCode = -DTMF_NOT_SUPPORTED;
+        return (-1);
+    }
+    else
+    {
+        return (0);
+    }
+#endif
+}
+
+int WebRtcNetEQ_SetExtraDelay(void *inst, int DelayInMs)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    if ((DelayInMs < 0) || (DelayInMs > 1000))
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_DELAYVALUE;
+        return (-1);
+    }
+    NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs = DelayInMs;
+    return (0);
+}
+
+int WebRtcNetEQ_SetPlayoutMode(void *inst, enum WebRtcNetEQPlayoutMode playoutMode)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    if ((playoutMode != kPlayoutOn) && (playoutMode != kPlayoutOff) && (playoutMode
+        != kPlayoutFax) && (playoutMode != kPlayoutStreaming))
+    {
+        NetEqMainInst->ErrorCode = -FAULTY_PLAYOUTMODE;
+        return (-1);
+    }
+    else
+    {
+        NetEqMainInst->MCUinst.NetEqPlayoutMode = playoutMode;
+        return (0);
+    }
+}
+
+int WebRtcNetEQ_SetBGNMode(void *inst, enum WebRtcNetEQBGNMode bgnMode)
+{
+
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    NetEqMainInst->DSPinst.BGNInst.bgnMode = (enum BGNMode) bgnMode;
+
+    return (0);
+}
+
+int WebRtcNetEQ_GetBGNMode(const void *inst, enum WebRtcNetEQBGNMode *bgnMode)
+{
+
+    const MainInst_t *NetEqMainInst = (const MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    *bgnMode = (enum WebRtcNetEQBGNMode) NetEqMainInst->DSPinst.BGNInst.bgnMode;
+
+    return (0);
+}
+
+/************************************************
+ * CodecDB functions
+ */
+
+int WebRtcNetEQ_CodecDbReset(void *inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_DbReset(&NetEqMainInst->MCUinst.codec_DB_inst);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    /* set function pointers to NULL to prevent RecOut from using the codec */
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
+    NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
+
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbGetSizeInfo(void *inst, WebRtc_Word16 *UsedEntries,
+                                   WebRtc_Word16 *MaxEntries)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *MaxEntries = NUM_CODECS;
+    *UsedEntries = NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs;
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbGetCodecInfo(void *inst, WebRtc_Word16 Entry,
+                                    enum WebRtcNetEQDecoder *codec)
+{
+    int i;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *codec = (enum WebRtcNetEQDecoder) 0;
+    if ((Entry >= 0) && (Entry < NetEqMainInst->MCUinst.codec_DB_inst.nrOfCodecs))
+    {
+        for (i = 0; i < NUM_TOTAL_CODECS; i++)
+        {
+            if (NetEqMainInst->MCUinst.codec_DB_inst.position[i] == Entry)
+            {
+                *codec = (enum WebRtcNetEQDecoder) i;
+            }
+        }
+    }
+    else
+    {
+        NetEqMainInst->ErrorCode = -(CODEC_DB_NOT_EXIST1);
+        return (-1);
+    }
+    return (0);
+}
+
+int WebRtcNetEQ_CodecDbAdd(void *inst, WebRtcNetEQ_CodecDef *codecInst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_DbAdd(&NetEqMainInst->MCUinst.codec_DB_inst, codecInst->codec,
+        codecInst->payloadType, codecInst->funcDecode, codecInst->funcDecodeRCU,
+        codecInst->funcDecodePLC, codecInst->funcDecodeInit, codecInst->funcAddLatePkt,
+        codecInst->funcGetMDinfo, codecInst->funcGetPitch, codecInst->funcUpdBWEst,
+        codecInst->funcGetErrorCode, codecInst->codec_state, codecInst->codec_fs);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_CodecDbRemove(void *inst, enum WebRtcNetEQDecoder codec)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* check if currently used codec is being removed */
+    if (NetEqMainInst->MCUinst.current_Codec == (WebRtc_Word16) codec)
+    {
+        /* set function pointers to NULL to prevent RecOut from using the codec */
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeRCU = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcAddLatePkt = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecode = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodeInit = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcDecodePLC = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcGetMDinfo = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcUpdBWEst = NULL;
+        NetEqMainInst->DSPinst.codec_ptr_inst.funcGetErrorCode = NULL;
+    }
+
+    ok = WebRtcNetEQ_DbRemove(&NetEqMainInst->MCUinst.codec_DB_inst, codec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/*********************************
+ * Real-time functions
+ */
+
+int WebRtcNetEQ_RecIn(void *inst, WebRtc_Word16 *p_w16datagramstart, WebRtc_Word16 w16_RTPlen,
+                      WebRtc_UWord32 uw32_timeRec)
+{
+    int ok = 0;
+    RTPPacket_t RTPpacket;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    /* Parse RTP header */
+    ok = WebRtcNetEQ_RTPPayloadInfo(p_w16datagramstart, w16_RTPlen, &RTPpacket);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_RecInRTPStruct(...)
+ *
+ * Alternative RecIn function, used when the RTP data has already been
+ * parsed into an RTP info struct (WebRtcNetEQ_RTPInfo).
+ *
+ * Input:
+ *		- inst	            : NetEQ instance
+ *		- rtpInfo		    : Pointer to RTP info
+ *		- payloadPtr        : Pointer to the RTP payload (first byte after header)
+ *      - payloadLenBytes   : Length (in bytes) of the payload in payloadPtr
+ *      - timeRec           : Receive time (in timestamps of the used codec)
+ *
+ * Return value			    :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcNetEQ_RecInRTPStruct(void *inst, WebRtcNetEQ_RTPInfo *rtpInfo,
+                               const WebRtc_UWord8 *payloadPtr, WebRtc_Word16 payloadLenBytes,
+                               WebRtc_UWord32 uw32_timeRec)
+{
+    int ok = 0;
+    RTPPacket_t RTPpacket;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->MCUinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    /* Load NetEQ's RTP struct from Module RTP struct */
+    RTPpacket.payloadType = rtpInfo->payloadType;
+    RTPpacket.seqNumber = rtpInfo->sequenceNumber;
+    RTPpacket.timeStamp = rtpInfo->timeStamp;
+    RTPpacket.ssrc = rtpInfo->SSRC;
+    RTPpacket.payload = (const WebRtc_Word16*) payloadPtr;
+    RTPpacket.payloadLen = payloadLenBytes;
+    RTPpacket.starts_byte1 = 0;
+
+    ok = WebRtcNetEQ_RecInInternal(&NetEqMainInst->MCUinst, &RTPpacket, uw32_timeRec);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_RecOut(void *inst, WebRtc_Word16 *pw16_outData, WebRtc_Word16 *pw16_len)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo msInfo;
+    msInfo.msMode = NETEQ_MONO;
+#endif
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+#ifdef NETEQ_STEREO
+    NetEqMainInst->DSPinst.msInfo = &msInfo;
+#endif
+
+    ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 0 /* not BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_RecOutMasterSlave(...)
+ *
+ * RecOut function for running several NetEQ instances in master/slave mode.
+ * One master can be used to control several slaves.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *      - isMaster      : Non-zero indicates that this is the master channel
+ *      - msInfo        : (slave only) Information from master
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *      - pw16_outData  : Pointer to vector where output should be written
+ *      - pw16_len      : Pointer to variable where output length is returned
+ *      - msInfo        : (master only) Information to slave(s)
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_RecOutMasterSlave(void *inst, WebRtc_Word16 *pw16_outData,
+                                  WebRtc_Word16 *pw16_len, void *msInfo,
+                                  WebRtc_Word16 isMaster)
+{
+#ifndef NETEQ_STEREO
+    /* Stereo not supported */
+    return(-1);
+#else
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+    if (msInfo == NULL)
+    {
+        /* msInfo not provided */
+        NetEqMainInst->ErrorCode = NETEQ_OTHER_ERROR;
+        return (-1);
+    }
+
+    /* translate from external to internal Master/Slave information */
+    NetEqMainInst->DSPinst.msInfo = (MasterSlaveInfo *) msInfo;
+
+    /* check that we have not done a master/slave switch without first re-initializing */
+    if ((NetEqMainInst->masterSlave == 1 && !isMaster) || /* switch from master to slave */
+    (NetEqMainInst->masterSlave == 2 && isMaster)) /* switch from slave to master */
+    {
+        NetEqMainInst->ErrorCode = ILLEGAL_MASTER_SLAVE_SWITCH;
+        return (-1);
+    }
+
+    if (!isMaster)
+    {
+        /* this is the slave */
+        NetEqMainInst->masterSlave = 2;
+        NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_SLAVE;
+    }
+    else
+    {
+        NetEqMainInst->DSPinst.msInfo->msMode = NETEQ_MASTER;
+    }
+
+    ok  = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 0 /* not BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+
+    if (isMaster)
+    {
+        /* this is the master */
+        NetEqMainInst->masterSlave = 1;
+    }
+
+    return (ok);
+#endif
+}
+
+int WebRtcNetEQ_GetMasterSlaveInfoSize()
+{
+#ifdef NETEQ_STEREO
+    return (sizeof(MasterSlaveInfo));
+#else
+    return(-1);
+#endif
+}
+
+/* Special RecOut that does not do any decoding. */
+int WebRtcNetEQ_RecOutNoDecode(void *inst, WebRtc_Word16 *pw16_outData,
+                               WebRtc_Word16 *pw16_len)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+#ifdef NETEQ_STEREO
+    MasterSlaveInfo msInfo;
+#endif
+
+    if (NetEqMainInst == NULL) return (-1);
+
+    /* Check for corrupt/cleared instance */
+    if (NetEqMainInst->DSPinst.main_inst != NetEqMainInst)
+    {
+        /* Instance is corrupt */
+        NetEqMainInst->ErrorCode = CORRUPT_INSTANCE;
+        return (-1);
+    }
+
+#ifdef NETEQ_STEREO
+    /* keep same mode as before */
+    switch (NetEqMainInst->masterSlave)
+    {
+        case 1:
+        {
+            msInfo.msMode = NETEQ_MASTER;
+            break;
+        }
+        case 2:
+        {
+            msInfo.msMode = NETEQ_SLAVE;
+            break;
+        }
+        default:
+        {
+            msInfo.msMode = NETEQ_MONO;
+            break;
+        }
+    }
+
+    NetEqMainInst->DSPinst.msInfo = &msInfo;
+#endif
+
+    ok = WebRtcNetEQ_RecOutInternal(&NetEqMainInst->DSPinst, pw16_outData,
+        pw16_len, 1 /* BGN only */);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetRTCPStats(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
+        &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
+        &RTCP_inst->jitter, 0);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetRTCPStatsNoReset(void *inst, WebRtcNetEQ_RTCPStat *RTCP_inst)
+{
+    int ok = 0;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    ok = WebRtcNetEQ_RTCPGetStats(&NetEqMainInst->MCUinst.RTCP_inst,
+        &RTCP_inst->fraction_lost, &RTCP_inst->cum_lost, &RTCP_inst->ext_max,
+        &RTCP_inst->jitter, 1);
+    if (ok != 0)
+    {
+        NetEqMainInst->ErrorCode = -ok;
+        return (-1);
+    }
+    return (ok);
+}
+
+int WebRtcNetEQ_GetSpeechTimeStamp(void *inst, WebRtc_UWord32 *timestamp)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+
+    if (NetEqMainInst->MCUinst.TSscalingInitialized)
+    {
+        *timestamp = WebRtcNetEQ_ScaleTimestampInternalToExternal(&NetEqMainInst->MCUinst,
+            NetEqMainInst->DSPinst.videoSyncTimestamp);
+    }
+    else
+    {
+        *timestamp = NetEqMainInst->DSPinst.videoSyncTimestamp;
+    }
+
+    return (0);
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_GetSpeechOutputType(...)
+ *
+ * Get the output type for the audio provided by the latest call to
+ * WebRtcNetEQ_RecOut().
+ *
+ * kOutputNormal = normal audio (possibly processed)
+ * kOutputPLC = loss concealment through stretching audio
+ * kOutputCNG = comfort noise (codec-internal or RFC3389)
+ * kOutputPLCtoCNG = background noise only due to long expand or error
+ * kOutputVADPassive = PostDecode VAD signalling passive speaker
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *
+ * Output:
+ *		- outputType    : Output type from enum list WebRtcNetEQOutputType
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_GetSpeechOutputType(void *inst, enum WebRtcNetEQOutputType *outputType)
+{
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+    if ((NetEqMainInst->DSPinst.w16_mode & MODE_BGN_ONLY) != 0)
+    {
+        /* If last mode was background noise only */
+        *outputType = kOutputPLCtoCNG;
+
+    }
+    else if ((NetEqMainInst->DSPinst.w16_mode == MODE_CODEC_INTERNAL_CNG)
+        || (NetEqMainInst->DSPinst.w16_mode == MODE_RFC3389CNG))
+    {
+        /* If CN or internal CNG */
+        *outputType = kOutputCNG;
+
+#ifdef NETEQ_VAD
+    }
+    else if ( NetEqMainInst->DSPinst.VADInst.VADDecision == 0 )
+    {
+        /* post-decode VAD says passive speaker */
+        *outputType = kOutputVADPassive;
+#endif /* NETEQ_VAD */
+
+    }
+    else if ((NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
+        && (NetEqMainInst->DSPinst.ExpandInst.w16_expandMuteFactor == 0))
+    {
+        /* Expand mode has faded down to background noise only (very long expand) */
+        *outputType = kOutputPLCtoCNG;
+
+    }
+    else if (NetEqMainInst->DSPinst.w16_mode == MODE_EXPAND)
+    {
+        /* PLC mode */
+        *outputType = kOutputPLC;
+
+    }
+    else
+    {
+        /* Normal speech output type (can still be manipulated, e.g., accelerated) */
+        *outputType = kOutputNormal;
+    }
+
+    return (0);
+}
+
+/**********************************
+ * Functions related to VQmon 
+ */
+
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_LOST       0x01
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_DISCARDED  0x02
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_SUPRESS    0x04
+#define WEBRTC_NETEQ_CONCEALMENTFLAG_CNGACTIVE  0x80
+
+int WebRtcNetEQ_VQmonRecOutStatistics(void *inst, WebRtc_UWord16 *validVoiceDurationMs,
+                                      WebRtc_UWord16 *concealedVoiceDurationMs,
+                                      WebRtc_UWord8 *concealedVoiceFlags)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    WebRtc_Word16 fs_mult;
+    WebRtc_Word16 ms_lost;
+    if (NetEqMainInst == NULL) return (-1);
+    fs_mult = WebRtcSpl_DivW32W16ResW16(NetEqMainInst->MCUinst.fs, 8000);
+
+    ms_lost = WebRtcSpl_DivW32W16ResW16(
+        (WebRtc_Word32) NetEqMainInst->DSPinst.w16_concealedTS, (WebRtc_Word16) (8 * fs_mult));
+    if (ms_lost > NetEqMainInst->DSPinst.millisecondsPerCall) ms_lost
+        = NetEqMainInst->DSPinst.millisecondsPerCall;
+
+    *validVoiceDurationMs = NetEqMainInst->DSPinst.millisecondsPerCall - ms_lost;
+    *concealedVoiceDurationMs = ms_lost;
+    if (ms_lost > 0)
+    {
+        *concealedVoiceFlags = WEBRTC_NETEQ_CONCEALMENTFLAG_LOST;
+    }
+    else
+    {
+        *concealedVoiceFlags = 0;
+    }
+    NetEqMainInst->DSPinst.w16_concealedTS -= ms_lost * (8 * fs_mult);
+
+    return (0);
+}
+
+int WebRtcNetEQ_VQmonGetConfiguration(void *inst, WebRtc_UWord16 *absMaxDelayMs,
+                                      WebRtc_UWord8 *adaptationRate)
+{
+    /* Dummy check the inst, just to avoid compiler warnings. */
+    if (inst == NULL)
+    {
+        /* Do nothing. */
+    }
+
+    /* Hardcoded variables that are used for VQmon as jitter buffer parameters */
+    *absMaxDelayMs = 240;
+    *adaptationRate = 1;
+    return (0);
+}
+
+int WebRtcNetEQ_VQmonGetRxStatistics(void *inst, WebRtc_UWord16 *avgDelayMs,
+                                     WebRtc_UWord16 *maxDelayMs)
+{
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL) return (-1);
+    *avgDelayMs = (WebRtc_UWord16) (NetEqMainInst->MCUinst.BufferStat_inst.avgDelayMsQ8 >> 8);
+    *maxDelayMs = (WebRtc_UWord16) NetEqMainInst->MCUinst.BufferStat_inst.maxDelayMs;
+    return (0);
+}
+
+/*************************************
+ * Statistics functions
+ */
+
+/* Get the "in-call" statistics from NetEQ.
+ * The statistics are reset after the query. */
+int WebRtcNetEQ_GetNetworkStatistics(void *inst, WebRtcNetEQ_NetworkStatistics *stats)
+
+{
+
+    WebRtc_UWord16 tempU16;
+    WebRtc_UWord32 tempU32, tempU32_2;
+    int numShift;
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+
+    /* Instance sanity */
+    if (NetEqMainInst == NULL) return (-1);
+
+    /*******************/
+    /* Get buffer size */
+    /*******************/
+
+    if (NetEqMainInst->MCUinst.fs != 0)
+    {
+        WebRtc_Word32 temp32;
+        /* Query packet buffer for number of samples. */
+        temp32 = WebRtcNetEQ_PacketBufferGetSize(
+            &NetEqMainInst->MCUinst.PacketBuffer_inst);
+
+        /* Divide by sample rate.
+         * Calculate temp32 * 1000 / fs to get result in ms. */
+        stats->currentBufferSize = (WebRtc_UWord16)
+            WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
+
+        /* Add number of samples yet to play in sync buffer. */
+        temp32 = (WebRtc_Word32) (NetEqMainInst->DSPinst.endPosition -
+            NetEqMainInst->DSPinst.curPosition);
+        stats->currentBufferSize += (WebRtc_UWord16)
+            WebRtcSpl_DivU32U16(temp32 * 1000, NetEqMainInst->MCUinst.fs);
+    }
+    else
+    {
+        /* Sample rate not initialized. */
+        stats->currentBufferSize = 0;
+    }
+
+    /***************************/
+    /* Get optimal buffer size */
+    /***************************/
+
+    if (NetEqMainInst->MCUinst.fs != 0 && NetEqMainInst->MCUinst.fs <= WEBRTC_SPL_WORD16_MAX)
+    {
+        /* preferredBufferSize = Bopt * packSizeSamples / (fs/1000) */
+        stats->preferredBufferSize
+            = (WebRtc_UWord16) WEBRTC_SPL_MUL_16_16(
+                (WebRtc_Word16) ((NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.optBufLevel) >> 8), /* optimal buffer level in packets shifted to Q0 */
+                WebRtcSpl_DivW32W16ResW16(
+                    (WebRtc_Word32) NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.packetSpeechLenSamp, /* samples per packet */
+                    WebRtcSpl_DivW32W16ResW16( (WebRtc_Word32) NetEqMainInst->MCUinst.fs, (WebRtc_Word16) 1000 ) /* samples per ms */
+                ) );
+
+        /* add extra delay */
+        if (NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs > 0)
+        {
+            stats->preferredBufferSize
+                += NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.extraDelayMs;
+        }
+    }
+    else
+    {
+        /* sample rate not initialized */
+        stats->preferredBufferSize = 0;
+    }
+
+    /***********************************/
+    /* Check if jitter peaks are found */
+    /***********************************/
+
+    stats->jitterPeaksFound =
+        NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst.peakFound;
+
+    /***********************/
+    /* Calculate loss rate */
+    /***********************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->MCUinst.lostTS == 0)
+    {
+        /* no losses */
+        stats->currentPacketLossRate = 0;
+    }
+    else if (NetEqMainInst->MCUinst.lostTS < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->MCUinst.lostTS); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentPacketLossRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( (WebRtc_UWord32) NetEqMainInst->MCUinst.lostTS, numShift);
+
+            stats->currentPacketLossRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentPacketLossRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /**************************/
+    /* Calculate discard rate */
+    /**************************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    /* number of discarded samples */
+    tempU32_2
+        = WEBRTC_SPL_MUL_16_U16( (WebRtc_Word16) NetEqMainInst->MCUinst.PacketBuffer_inst.packSizeSamples,
+            NetEqMainInst->MCUinst.PacketBuffer_inst.discardedPackets);
+
+    if (tempU32_2 == 0)
+    {
+        /* no discarded samples */
+        stats->currentDiscardRate = 0;
+    }
+    else if (tempU32_2 < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(tempU32_2); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentDiscardRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32 = WEBRTC_SPL_SHIFT_W32( tempU32_2, numShift);
+
+            stats->currentDiscardRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentDiscardRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /*************************************************************/
+    /* Calculate Accelerate, Expand and Pre-emptive Expand rates */
+    /*************************************************************/
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.accelerateLength == 0)
+    {
+        /* no accelerate */
+        stats->currentAccelerateRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.accelerateLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.accelerateLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentAccelerateRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.accelerateLength, numShift);
+
+            stats->currentAccelerateRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentAccelerateRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.expandLength == 0)
+    {
+        /* no expand */
+        stats->currentExpandRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.expandLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.expandLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentExpandRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.expandLength, numShift);
+
+            stats->currentExpandRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32, tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentExpandRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    /* timestamps elapsed since last report */
+    tempU32 = NetEqMainInst->MCUinst.lastReportTS;
+
+    if (NetEqMainInst->DSPinst.statInst.preemptiveLength == 0)
+    {
+        /* no pre-emptive expand */
+        stats->currentPreemptiveRate = 0;
+    }
+    else if (NetEqMainInst->DSPinst.statInst.preemptiveLength < tempU32)
+    {
+        /* calculate shifts; we want the result in Q14 */
+        numShift = WebRtcSpl_NormU32(NetEqMainInst->DSPinst.statInst.preemptiveLength); /* numerator shift for normalize */
+
+        if (numShift < 14)
+        {
+            /* cannot shift numerator 14 steps; shift denominator too */
+            tempU32 = WEBRTC_SPL_RSHIFT_U32(tempU32, 14-numShift); /* right-shift */
+        }
+        else
+        {
+            /* shift no more than 14 steps */
+            numShift = 14;
+        }
+
+        if (tempU32 == 0)
+        {
+            /* check for zero denominator; result should be zero in this case */
+            stats->currentPreemptiveRate = 0;
+        }
+        else
+        {
+            /* check that denominator fits in signed 16-bit */
+            while (tempU32 > WEBRTC_SPL_WORD16_MAX)
+            {
+                tempU32 >>= 1; /* right-shift 1 step */
+                numShift--; /* compensate in numerator */
+            }
+            tempU16 = (WebRtc_UWord16) tempU32;
+
+            /* do the shift of numerator */
+            tempU32
+                = WEBRTC_SPL_SHIFT_W32( NetEqMainInst->DSPinst.statInst.preemptiveLength, numShift);
+
+            stats->currentPreemptiveRate = (WebRtc_UWord16) WebRtcSpl_DivU32U16(tempU32,
+                tempU16);
+        }
+    }
+    else
+    {
+        /* lost count is larger than elapsed time count; probably timestamp wrap-around or something else wrong */
+        /* set loss rate = 1 */
+        stats->currentPreemptiveRate = 1 << 14; /* 1 in Q14 */
+    }
+
+    stats->clockDriftPPM = WebRtcNetEQ_AverageIAT(
+        &NetEqMainInst->MCUinst.BufferStat_inst.Automode_inst);
+
+    /* reset counters */
+    WebRtcNetEQ_ResetMcuInCallStats(&(NetEqMainInst->MCUinst));
+    WebRtcNetEQ_ClearInCallStats(&(NetEqMainInst->DSPinst));
+
+    return (0);
+}
+
+int WebRtcNetEQ_GetRawFrameWaitingTimes(void *inst,
+                                        int max_length,
+                                        int* waiting_times_ms) {
+  int i = 0;
+  MainInst_t *main_inst = (MainInst_t*) inst;
+  if (main_inst == NULL) return -1;
+
+  while ((i < max_length) && (i < main_inst->MCUinst.len_waiting_times)) {
+    waiting_times_ms[i] = main_inst->MCUinst.waiting_times[i] *
+        main_inst->DSPinst.millisecondsPerCall;
+    ++i;
+  }
+  assert(i <= kLenWaitingTimes);
+  WebRtcNetEQ_ResetWaitingTimeStats(&main_inst->MCUinst);
+  return i;
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADInstance(...)
+ *
+ * Provide a pointer to an allocated VAD instance. If function is never 
+ * called or it is called with NULL pointer as VAD_inst, the post-decode
+ * VAD functionality is disabled. Also provide pointers to init, setmode
+ * and VAD functions. These are typically pointers to WebRtcVad_Init,
+ * WebRtcVad_set_mode and WebRtcVad_Process, respectively, all found in the
+ * interface file webrtc_vad.h.
+ *
+ * Input:
+ *      - NetEQ_inst        : NetEQ instance
+ *		- VADinst		    : VAD instance
+ *		- initFunction	    : Pointer to VAD init function
+ *		- setmodeFunction   : Pointer to VAD setmode function
+ *		- VADfunction	    : Pointer to VAD function
+ *
+ * Output:
+ *		- NetEQ_inst	    : Updated NetEQ instance
+ *
+ * Return value			    :  0 - Ok
+ *						      -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADInstance(void *NetEQ_inst, void *VAD_inst,
+                               WebRtcNetEQ_VADInitFunction initFunction,
+                               WebRtcNetEQ_VADSetmodeFunction setmodeFunction,
+                               WebRtcNetEQ_VADFunction VADFunction)
+{
+
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) NetEQ_inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+
+    /* Store pointer in PostDecode VAD struct */
+    NetEqMainInst->DSPinst.VADInst.VADState = VAD_inst;
+
+    /* Store function pointers */
+    NetEqMainInst->DSPinst.VADInst.initFunction = initFunction;
+    NetEqMainInst->DSPinst.VADInst.setmodeFunction = setmodeFunction;
+    NetEqMainInst->DSPinst.VADInst.VADFunction = VADFunction;
+
+    /* Call init function and return the result (ok or fail) */
+    return(WebRtcNetEQ_InitVAD(&NetEqMainInst->DSPinst.VADInst, NetEqMainInst->DSPinst.fs));
+
+#else /* NETEQ_VAD not defined */
+    return (-1);
+#endif /* NETEQ_VAD */
+
+}
+
+/****************************************************************************
+ * WebRtcNetEQ_SetVADMode(...)
+ *
+ * Pass an aggressiveness mode parameter to the post-decode VAD instance.
+ * If this function is never called, mode 0 (quality mode) is used as default.
+ *
+ * Input:
+ *      - inst          : NetEQ instance
+ *		- mode  		: mode parameter (same range as WebRtc VAD mode)
+ *
+ * Output:
+ *		- inst	        : Updated NetEQ instance
+ *
+ * Return value			:  0 - Ok
+ *						  -1 - Error
+ */
+
+int WebRtcNetEQ_SetVADMode(void *inst, int mode)
+{
+
+    /* Typecast to internal instance type */
+    MainInst_t *NetEqMainInst = (MainInst_t*) inst;
+    if (NetEqMainInst == NULL)
+    {
+        return (-1);
+    }
+
+#ifdef NETEQ_VAD
+
+    /* Set mode and return result */
+    return(WebRtcNetEQ_SetVADModeInternal(&NetEqMainInst->DSPinst.VADInst, mode));
+
+#else /* NETEQ_VAD not defined */
+    return (-1);
+#endif /* NETEQ_VAD */
+
+}
diff --git a/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc b/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
new file mode 100644
index 0000000..fbb9cc2
--- /dev/null
+++ b/src/modules/audio_coding/neteq/webrtc_neteq_unittest.cc
@@ -0,0 +1,605 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for NetEQ.
+ */
+
+#include <stdlib.h>
+#include <string.h>  // memset
+
+#include <string>
+#include <vector>
+
+#include "gtest/gtest.h"
+
+#include "modules/audio_coding/neteq/interface/webrtc_neteq.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_help_macros.h"
+#include "modules/audio_coding/neteq/interface/webrtc_neteq_internal.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_CodecClass.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_NetEQClass.h"
+#include "modules/audio_coding/neteq/test/NETEQTEST_RTPpacket.h"
+#include "testsupport/fileutils.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class RefFiles {
+ public:
+  RefFiles(const std::string& input_file, const std::string& output_file);
+  ~RefFiles();
+  template<class T> void ProcessReference(const T& test_results);
+  template<typename T, size_t n> void ProcessReference(
+      const T (&test_results)[n],
+      size_t length);
+  template<typename T, size_t n> void WriteToFile(
+      const T (&test_results)[n],
+      size_t length);
+  template<typename T, size_t n> void ReadFromFileAndCompare(
+      const T (&test_results)[n],
+      size_t length);
+  void WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats);
+  void ReadFromFileAndCompare(const WebRtcNetEQ_NetworkStatistics& stats);
+  void WriteToFile(const WebRtcNetEQ_RTCPStat& stats);
+  void ReadFromFileAndCompare(const WebRtcNetEQ_RTCPStat& stats);
+
+  FILE* input_fp_;
+  FILE* output_fp_;
+};
+
+RefFiles::RefFiles(const std::string &input_file,
+                   const std::string &output_file)
+    : input_fp_(NULL),
+      output_fp_(NULL) {
+  if (!input_file.empty()) {
+    input_fp_ = fopen(input_file.c_str(), "rb");
+    EXPECT_TRUE(input_fp_ != NULL);
+  }
+  if (!output_file.empty()) {
+    output_fp_ = fopen(output_file.c_str(), "wb");
+    EXPECT_TRUE(output_fp_ != NULL);
+  }
+}
+
+RefFiles::~RefFiles() {
+  if (input_fp_) {
+    EXPECT_EQ(EOF, fgetc(input_fp_));  // Make sure that we reached the end.
+    fclose(input_fp_);
+  }
+  if (output_fp_) fclose(output_fp_);
+}
+
+template<class T>
+void RefFiles::ProcessReference(const T& test_results) {
+  WriteToFile(test_results);
+  ReadFromFileAndCompare(test_results);
+}
+
+template<typename T, size_t n>
+void RefFiles::ProcessReference(const T (&test_results)[n], size_t length) {
+  WriteToFile(test_results, length);
+  ReadFromFileAndCompare(test_results, length);
+}
+
+template<typename T, size_t n>
+void RefFiles::WriteToFile(const T (&test_results)[n], size_t length) {
+  if (output_fp_) {
+    ASSERT_EQ(length, fwrite(&test_results, sizeof(T), length, output_fp_));
+  }
+}
+
+template<typename T, size_t n>
+void RefFiles::ReadFromFileAndCompare(const T (&test_results)[n],
+                                      size_t length) {
+  if (input_fp_) {
+    // Read from ref file.
+    T* ref = new T[length];
+    ASSERT_EQ(length, fread(ref, sizeof(T), length, input_fp_));
+    // Compare
+    EXPECT_EQ(0, memcmp(&test_results, ref, sizeof(T) * length));
+    delete [] ref;
+  }
+}
+
+void RefFiles::WriteToFile(const WebRtcNetEQ_NetworkStatistics& stats) {
+  if (output_fp_) {
+    ASSERT_EQ(1u, fwrite(&stats, sizeof(WebRtcNetEQ_NetworkStatistics), 1,
+                         output_fp_));
+  }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+    const WebRtcNetEQ_NetworkStatistics& stats) {
+  if (input_fp_) {
+    // Read from ref file.
+    size_t stat_size = sizeof(WebRtcNetEQ_NetworkStatistics);
+    WebRtcNetEQ_NetworkStatistics ref_stats;
+    ASSERT_EQ(1u, fread(&ref_stats, stat_size, 1, input_fp_));
+    // Compare
+    EXPECT_EQ(0, memcmp(&stats, &ref_stats, stat_size));
+  }
+}
+
+void RefFiles::WriteToFile(const WebRtcNetEQ_RTCPStat& stats) {
+  if (output_fp_) {
+    ASSERT_EQ(1u, fwrite(&(stats.fraction_lost), sizeof(stats.fraction_lost), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.cum_lost), sizeof(stats.cum_lost), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.ext_max), sizeof(stats.ext_max), 1,
+                         output_fp_));
+    ASSERT_EQ(1u, fwrite(&(stats.jitter), sizeof(stats.jitter), 1,
+                         output_fp_));
+  }
+}
+
+void RefFiles::ReadFromFileAndCompare(
+    const WebRtcNetEQ_RTCPStat& stats) {
+  if (input_fp_) {
+    // Read from ref file.
+    WebRtcNetEQ_RTCPStat ref_stats;
+    ASSERT_EQ(1u, fread(&(ref_stats.fraction_lost),
+                        sizeof(ref_stats.fraction_lost), 1, input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.cum_lost), sizeof(ref_stats.cum_lost), 1,
+                        input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.ext_max), sizeof(ref_stats.ext_max), 1,
+                        input_fp_));
+    ASSERT_EQ(1u, fread(&(ref_stats.jitter), sizeof(ref_stats.jitter), 1,
+                        input_fp_));
+    // Compare
+    EXPECT_EQ(ref_stats.fraction_lost, stats.fraction_lost);
+    EXPECT_EQ(ref_stats.cum_lost, stats.cum_lost);
+    EXPECT_EQ(ref_stats.ext_max, stats.ext_max);
+    EXPECT_EQ(ref_stats.jitter, stats.jitter);
+  }
+}
+
+class NetEqDecodingTest : public ::testing::Test {
+ protected:
+  // NetEQ must be polled for data once every 10 ms. Thus, neither of the
+  // constants below can be changed.
+  static const int kTimeStepMs = 10;
+  static const int kBlockSize8kHz = kTimeStepMs * 8;
+  static const int kBlockSize16kHz = kTimeStepMs * 16;
+  static const int kBlockSize32kHz = kTimeStepMs * 32;
+  static const int kMaxBlockSize = kBlockSize32kHz;
+
+  NetEqDecodingTest();
+  virtual void SetUp();
+  virtual void TearDown();
+  void SelectDecoders(WebRtcNetEQDecoder* used_codec);
+  void LoadDecoders();
+  void OpenInputFile(const std::string &rtp_file);
+  void Process(NETEQTEST_RTPpacket* rtp_ptr, int16_t* out_len);
+  void DecodeAndCompare(const std::string &rtp_file,
+                        const std::string &ref_file);
+  void DecodeAndCheckStats(const std::string &rtp_file,
+                           const std::string &stat_ref_file,
+                           const std::string &rtcp_ref_file);
+  static void PopulateRtpInfo(int frame_index,
+                              int timestamp,
+                              WebRtcNetEQ_RTPInfo* rtp_info);
+  static void PopulateCng(int frame_index,
+                          int timestamp,
+                          WebRtcNetEQ_RTPInfo* rtp_info,
+                          uint8_t* payload,
+                          int* payload_len);
+
+  NETEQTEST_NetEQClass* neteq_inst_;
+  std::vector<NETEQTEST_Decoder*> dec_;
+  FILE* rtp_fp_;
+  unsigned int sim_clock_;
+  int16_t out_data_[kMaxBlockSize];
+};
+
+NetEqDecodingTest::NetEqDecodingTest()
+    : neteq_inst_(NULL),
+      rtp_fp_(NULL),
+      sim_clock_(0) {
+  memset(out_data_, 0, sizeof(out_data_));
+}
+
+void NetEqDecodingTest::SetUp() {
+  WebRtcNetEQDecoder usedCodec[kDecoderReservedEnd - 1];
+
+  SelectDecoders(usedCodec);
+  neteq_inst_ = new NETEQTEST_NetEQClass(usedCodec, dec_.size(), 8000,
+                                         kTCPLargeJitter);
+  ASSERT_TRUE(neteq_inst_);
+  LoadDecoders();
+}
+
+void NetEqDecodingTest::TearDown() {
+  if (neteq_inst_)
+    delete neteq_inst_;
+  for (size_t i = 0; i < dec_.size(); ++i) {
+    if (dec_[i])
+      delete dec_[i];
+  }
+  if (rtp_fp_)
+    fclose(rtp_fp_);
+}
+
+void NetEqDecodingTest::SelectDecoders(WebRtcNetEQDecoder* used_codec) {
+  *used_codec++ = kDecoderPCMu;
+  dec_.push_back(new decoder_PCMU(0));
+  *used_codec++ = kDecoderPCMa;
+  dec_.push_back(new decoder_PCMA(8));
+  *used_codec++ = kDecoderILBC;
+  dec_.push_back(new decoder_ILBC(102));
+  *used_codec++ = kDecoderISAC;
+  dec_.push_back(new decoder_iSAC(103));
+  *used_codec++ = kDecoderISACswb;
+  dec_.push_back(new decoder_iSACSWB(104));
+  *used_codec++ = kDecoderPCM16B;
+  dec_.push_back(new decoder_PCM16B_NB(93));
+  *used_codec++ = kDecoderPCM16Bwb;
+  dec_.push_back(new decoder_PCM16B_WB(94));
+  *used_codec++ = kDecoderPCM16Bswb32kHz;
+  dec_.push_back(new decoder_PCM16B_SWB32(95));
+  *used_codec++ = kDecoderCNG;
+  dec_.push_back(new decoder_CNG(13, 8000));
+  *used_codec++ = kDecoderCNG;
+  dec_.push_back(new decoder_CNG(98, 16000));
+}
+
+void NetEqDecodingTest::LoadDecoders() {
+  for (size_t i = 0; i < dec_.size(); ++i) {
+    ASSERT_EQ(0, dec_[i]->loadToNetEQ(*neteq_inst_));
+  }
+}
+
+void NetEqDecodingTest::OpenInputFile(const std::string &rtp_file) {
+  rtp_fp_ = fopen(rtp_file.c_str(), "rb");
+  ASSERT_TRUE(rtp_fp_ != NULL);
+  ASSERT_EQ(0, NETEQTEST_RTPpacket::skipFileHeader(rtp_fp_));
+}
+
+void NetEqDecodingTest::Process(NETEQTEST_RTPpacket* rtp, int16_t* out_len) {
+  // Check if time to receive.
+  while ((sim_clock_ >= rtp->time()) &&
+         (rtp->dataLen() >= 0)) {
+    if (rtp->dataLen() > 0) {
+      ASSERT_EQ(0, neteq_inst_->recIn(*rtp));
+    }
+    // Get next packet.
+    ASSERT_NE(-1, rtp->readFromFile(rtp_fp_));
+  }
+
+  // RecOut
+  *out_len = neteq_inst_->recOut(out_data_);
+  ASSERT_TRUE((*out_len == kBlockSize8kHz) ||
+              (*out_len == kBlockSize16kHz) ||
+              (*out_len == kBlockSize32kHz));
+
+  // Increase time.
+  sim_clock_ += kTimeStepMs;
+}
+
+void NetEqDecodingTest::DecodeAndCompare(const std::string &rtp_file,
+                                         const std::string &ref_file) {
+  OpenInputFile(rtp_file);
+
+  std::string ref_out_file = "";
+  if (ref_file.empty()) {
+    ref_out_file = webrtc::test::OutputPath() + "neteq_out.pcm";
+  }
+  RefFiles ref_files(ref_file, ref_out_file);
+
+  NETEQTEST_RTPpacket rtp;
+  ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+  while (rtp.dataLen() >= 0) {
+    int16_t out_len;
+    Process(&rtp, &out_len);
+    ref_files.ProcessReference(out_data_, out_len);
+  }
+}
+
+void NetEqDecodingTest::DecodeAndCheckStats(const std::string &rtp_file,
+                                            const std::string &stat_ref_file,
+                                            const std::string &rtcp_ref_file) {
+  OpenInputFile(rtp_file);
+  std::string stat_out_file = "";
+  if (stat_ref_file.empty()) {
+    stat_out_file = webrtc::test::OutputPath() +
+        "neteq_network_stats.dat";
+  }
+  RefFiles network_stat_files(stat_ref_file, stat_out_file);
+
+  std::string rtcp_out_file = "";
+  if (rtcp_ref_file.empty()) {
+    rtcp_out_file = webrtc::test::OutputPath() +
+        "neteq_rtcp_stats.dat";
+  }
+  RefFiles rtcp_stat_files(rtcp_ref_file, rtcp_out_file);
+
+  NETEQTEST_RTPpacket rtp;
+  ASSERT_GT(rtp.readFromFile(rtp_fp_), 0);
+  while (rtp.dataLen() >= 0) {
+    int16_t out_len;
+    Process(&rtp, &out_len);
+
+    // Query the network statistics API once per second
+    if (sim_clock_ % 1000 == 0) {
+      // Process NetworkStatistics.
+      WebRtcNetEQ_NetworkStatistics network_stats;
+      ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                    &network_stats));
+      network_stat_files.ProcessReference(network_stats);
+
+      // Process RTCPstat.
+      WebRtcNetEQ_RTCPStat rtcp_stats;
+      ASSERT_EQ(0, WebRtcNetEQ_GetRTCPStats(neteq_inst_->instance(),
+                                            &rtcp_stats));
+      rtcp_stat_files.ProcessReference(rtcp_stats);
+    }
+  }
+}
+
+void NetEqDecodingTest::PopulateRtpInfo(int frame_index,
+                                        int timestamp,
+                                        WebRtcNetEQ_RTPInfo* rtp_info) {
+  rtp_info->sequenceNumber = frame_index;
+  rtp_info->timeStamp = timestamp;
+  rtp_info->SSRC = 0x1234;  // Just an arbitrary SSRC.
+  rtp_info->payloadType = 94;  // PCM16b WB codec.
+  rtp_info->markerBit = 0;
+}
+
+void NetEqDecodingTest::PopulateCng(int frame_index,
+                                    int timestamp,
+                                    WebRtcNetEQ_RTPInfo* rtp_info,
+                                    uint8_t* payload,
+                                    int* payload_len) {
+  rtp_info->sequenceNumber = frame_index;
+  rtp_info->timeStamp = timestamp;
+  rtp_info->SSRC = 0x1234;  // Just an arbitrary SSRC.
+  rtp_info->payloadType = 98;  // WB CNG.
+  rtp_info->markerBit = 0;
+  payload[0] = 64;  // Noise level -64 dBov, quite arbitrarily chosen.
+  *payload_len = 1;  // Only noise level, no spectral parameters.
+}
+
+TEST_F(NetEqDecodingTest, TestBitExactness) {
+  const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+      "resources/neteq_universal.rtp";
+  const std::string kInputRefFile =
+      webrtc::test::ResourcePath("neteq_universal_ref", "pcm");
+  DecodeAndCompare(kInputRtpFile, kInputRefFile);
+}
+
+TEST_F(NetEqDecodingTest, TestNetworkStatistics) {
+  const std::string kInputRtpFile = webrtc::test::ProjectRootPath() +
+      "resources/neteq_universal.rtp";
+  const std::string kNetworkStatRefFile =
+      webrtc::test::ResourcePath("neteq_network_stats", "dat");
+  const std::string kRtcpStatRefFile =
+      webrtc::test::ResourcePath("neteq_rtcp_stats", "dat");
+  DecodeAndCheckStats(kInputRtpFile, kNetworkStatRefFile, kRtcpStatRefFile);
+}
+
+TEST_F(NetEqDecodingTest, TestFrameWaitingTimeStatistics) {
+  // Use fax mode to avoid time-scaling. This is to simplify the testing of
+  // packet waiting times in the packet buffer.
+  ASSERT_EQ(0,
+            WebRtcNetEQ_SetPlayoutMode(neteq_inst_->instance(), kPlayoutFax));
+  // Insert 30 dummy packets at once. Each packet contains 10 ms 16 kHz audio.
+  int num_frames = 30;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  for (int i = 0; i < num_frames; ++i) {
+    uint16_t payload[kSamples] = {0};
+    WebRtcNetEQ_RTPInfo rtp_info;
+    rtp_info.sequenceNumber = i;
+    rtp_info.timeStamp = i * kSamples;
+    rtp_info.SSRC = 0x1234;  // Just an arbitrary SSRC.
+    rtp_info.payloadType = 94;  // PCM16b WB codec.
+    rtp_info.markerBit = 0;
+    ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
+                                            reinterpret_cast<uint8_t*>(payload),
+                                            kPayloadBytes, 0));
+  }
+  // Pull out all data.
+  for (int i = 0; i < num_frames; ++i) {
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+  const int kVecLen = 110;  // More than kLenWaitingTimes in mcu.h.
+  int waiting_times[kVecLen];
+  int len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                                kVecLen, waiting_times);
+  EXPECT_EQ(num_frames, len);
+  // Since all frames are dumped into NetEQ at once, but pulled out with 10 ms
+  // spacing (per definition), we expect the delay to increase with 10 ms for
+  // each packet.
+  for (int i = 0; i < len; ++i) {
+    EXPECT_EQ((i + 1) * 10, waiting_times[i]);
+  }
+
+  // Check statistics again and make sure it's been reset.
+  EXPECT_EQ(0, WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                                   kVecLen, waiting_times));
+
+  // Process > 100 frames, and make sure that that we get statistics
+  // only for 100 frames. Note the new SSRC, causing NetEQ to reset.
+  num_frames = 110;
+  for (int i = 0; i < num_frames; ++i) {
+    uint16_t payload[kSamples] = {0};
+    WebRtcNetEQ_RTPInfo rtp_info;
+    rtp_info.sequenceNumber = i;
+    rtp_info.timeStamp = i * kSamples;
+    rtp_info.SSRC = 0x1235;  // Just an arbitrary SSRC.
+    rtp_info.payloadType = 94;  // PCM16b WB codec.
+    rtp_info.markerBit = 0;
+    ASSERT_EQ(0, WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(), &rtp_info,
+                                            reinterpret_cast<uint8_t*>(payload),
+                                            kPayloadBytes, 0));
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  len = WebRtcNetEQ_GetRawFrameWaitingTimes(neteq_inst_->instance(),
+                                            kVecLen, waiting_times);
+  EXPECT_EQ(100, len);
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimeNegative) {
+  const int kNumFrames = 3000;  // Needed for convergence.
+  int frame_index = 0;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  while (frame_index < kNumFrames) {
+    // Insert one packet each time, except every 10th time where we insert two
+    // packets at once. This will create a negative clock-drift of approx. 10%.
+    int num_packets = (frame_index % 10 == 0 ? 2 : 1);
+    for (int n = 0; n < num_packets; ++n) {
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++frame_index;
+    }
+
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  WebRtcNetEQ_NetworkStatistics network_stats;
+  ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                &network_stats));
+  EXPECT_EQ(-106911, network_stats.clockDriftPPM);
+}
+
+TEST_F(NetEqDecodingTest, TestAverageInterArrivalTimePositive) {
+  const int kNumFrames = 5000;  // Needed for convergence.
+  int frame_index = 0;
+  const int kSamples = 10 * 16;
+  const int kPayloadBytes = kSamples * 2;
+  for (int i = 0; i < kNumFrames; ++i) {
+    // Insert one packet each time, except every 10th time where we don't insert
+    // any packet. This will create a positive clock-drift of approx. 11%.
+    int num_packets = (i % 10 == 9 ? 0 : 1);
+    for (int n = 0; n < num_packets; ++n) {
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(frame_index, frame_index * kSamples, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++frame_index;
+    }
+
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  WebRtcNetEQ_NetworkStatistics network_stats;
+  ASSERT_EQ(0, WebRtcNetEQ_GetNetworkStatistics(neteq_inst_->instance(),
+                                                &network_stats));
+  EXPECT_EQ(108352, network_stats.clockDriftPPM);
+}
+
+TEST_F(NetEqDecodingTest, LongCngWithClockDrift) {
+  uint16_t seq_no = 0;
+  uint32_t timestamp = 0;
+  const int kFrameSizeMs = 30;
+  const int kSamples = kFrameSizeMs * 16;
+  const int kPayloadBytes = kSamples * 2;
+  // Apply a clock drift of -25 ms / s (sender faster than receiver).
+  const double kDriftFactor = 1000.0 / (1000.0 + 25.0);
+  double next_input_time_ms = 0.0;
+  double t_ms;
+
+  // Insert speech for 5 seconds.
+  const int kSpeechDurationMs = 5000;
+  for (t_ms = 0; t_ms < kSpeechDurationMs; t_ms += 10) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one 30 ms speech frame.
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++seq_no;
+      timestamp += kSamples;
+      next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  EXPECT_EQ(kOutputNormal, neteq_inst_->getOutputType());
+  int32_t delay_before = timestamp - neteq_inst_->getSpeechTimeStamp();
+
+  // Insert CNG for 1 minute (= 60000 ms).
+  const int kCngPeriodMs = 100;
+  const int kCngPeriodSamples = kCngPeriodMs * 16;  // Period in 16 kHz samples.
+  const int kCngDurationMs = 60000;
+  for (; t_ms < kSpeechDurationMs + kCngDurationMs; t_ms += 10) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one CNG frame each 100 ms.
+      uint8_t payload[kPayloadBytes];
+      int payload_len;
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateCng(seq_no, timestamp, &rtp_info, payload, &payload_len);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           payload_len, 0));
+      ++seq_no;
+      timestamp += kCngPeriodSamples;
+      next_input_time_ms += static_cast<double>(kCngPeriodMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+  }
+
+  EXPECT_EQ(kOutputCNG, neteq_inst_->getOutputType());
+
+  // Insert speech again until output type is speech.
+  while (neteq_inst_->getOutputType() != kOutputNormal) {
+    // Each turn in this for loop is 10 ms.
+    while (next_input_time_ms <= t_ms) {
+      // Insert one 30 ms speech frame.
+      uint8_t payload[kPayloadBytes] = {0};
+      WebRtcNetEQ_RTPInfo rtp_info;
+      PopulateRtpInfo(seq_no, timestamp, &rtp_info);
+      ASSERT_EQ(0,
+                WebRtcNetEQ_RecInRTPStruct(neteq_inst_->instance(),
+                                           &rtp_info,
+                                           payload,
+                                           kPayloadBytes, 0));
+      ++seq_no;
+      timestamp += kSamples;
+      next_input_time_ms += static_cast<double>(kFrameSizeMs) * kDriftFactor;
+    }
+    // Pull out data once.
+    ASSERT_TRUE(kBlockSize16kHz == neteq_inst_->recOut(out_data_));
+    // Increase clock.
+    t_ms += 10;
+  }
+
+  int32_t delay_after = timestamp - neteq_inst_->getSpeechTimeStamp();
+  // Compare delay before and after, and make sure it differs less than 20 ms.
+  EXPECT_LE(delay_after, delay_before + 20 * 16);
+  EXPECT_GE(delay_after, delay_before - 20 * 16);
+}
+
+}  // namespace
diff --git a/src/modules/audio_conference_mixer/OWNERS b/src/modules/audio_conference_mixer/OWNERS
new file mode 100644
index 0000000..7dc791e
--- /dev/null
+++ b/src/modules/audio_conference_mixer/OWNERS
@@ -0,0 +1,3 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+andrew@webrtc.org
\ No newline at end of file
diff --git a/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h b/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h
new file mode 100644
index 0000000..4ece1bf
--- /dev/null
+++ b/src/modules/audio_conference_mixer/interface/audio_conference_mixer.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
+
+#include "audio_conference_mixer_defines.h"
+#include "module.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+class AudioMixerOutputReceiver;
+class AudioMixerStatusReceiver;
+class MixerParticipant;
+class Trace;
+
+class AudioConferenceMixer : public Module
+{
+public:
+    enum {kMaximumAmountOfMixedParticipants = 3};
+    enum Frequency
+    {
+        kNbInHz           = 8000,
+        kWbInHz           = 16000,
+        kSwbInHz          = 32000,
+        kLowestPossible   = -1,
+        kDefaultFrequency = kWbInHz
+    };
+
+    // Factory method. Constructor disabled.
+    static AudioConferenceMixer* Create(int id);
+    virtual ~AudioConferenceMixer() {}
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+    virtual WebRtc_Word32 TimeUntilNextProcess() = 0 ;
+    virtual WebRtc_Word32 Process() = 0;
+
+    // Register/unregister a callback class for receiving the mixed audio.
+    virtual WebRtc_Word32 RegisterMixedStreamCallback(
+        AudioMixerOutputReceiver& receiver) = 0;
+    virtual WebRtc_Word32 UnRegisterMixedStreamCallback() = 0;
+
+    // Register/unregister a callback class for receiving status information.
+    virtual WebRtc_Word32 RegisterMixerStatusCallback(
+        AudioMixerStatusReceiver& mixerStatusCallback,
+        const WebRtc_UWord32 amountOf10MsBetweenCallbacks) = 0;
+    virtual WebRtc_Word32 UnRegisterMixerStatusCallback() = 0;
+
+    // Add/remove participants as candidates for mixing.
+    virtual WebRtc_Word32 SetMixabilityStatus(
+        MixerParticipant& participant,
+        const bool mixable) = 0;
+    // mixable is set to true if a participant is a candidate for mixing.
+    virtual WebRtc_Word32 MixabilityStatus(
+        MixerParticipant& participant,
+        bool& mixable) = 0;
+
+    // Inform the mixer that the participant should always be mixed and not
+    // count toward the number of mixed participants. Note that a participant
+    // must have been added to the mixer (by calling SetMixabilityStatus())
+    // before this function can be successfully called.
+    virtual WebRtc_Word32 SetAnonymousMixabilityStatus(
+        MixerParticipant& participant, const bool mixable) = 0;
+    // mixable is set to true if the participant is mixed anonymously.
+    virtual WebRtc_Word32 AnonymousMixabilityStatus(
+        MixerParticipant& participant, bool& mixable) = 0;
+
+    // Set the minimum sampling frequency at which to mix. The mixing algorithm
+    // may still choose to mix at a higher samling frequency to avoid
+    // downsampling of audio contributing to the mixed audio.
+    virtual WebRtc_Word32 SetMinimumMixingFrequency(Frequency freq) = 0;
+
+protected:
+    AudioConferenceMixer() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_H_
diff --git a/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h b/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
new file mode 100644
index 0000000..718470d
--- /dev/null
+++ b/src/modules/audio_conference_mixer/interface/audio_conference_mixer_defines.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
+
+#include "map_wrapper.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class MixHistory;
+
+// A callback class that all mixer participants must inherit from/implement.
+class MixerParticipant
+{
+public:
+    // The implementation of this function should update audioFrame with new
+    // audio every time it's called.
+    //
+    // If it returns -1, the frame will not be added to the mix.
+    virtual WebRtc_Word32 GetAudioFrame(const WebRtc_Word32 id,
+                                        AudioFrame& audioFrame) = 0;
+
+    // mixed will be set to true if the participant was mixed this mix iteration
+    WebRtc_Word32 IsMixed(bool& mixed) const;
+
+    // This function specifies the sampling frequency needed for the AudioFrame
+    // for future GetAudioFrame(..) calls.
+    virtual WebRtc_Word32 NeededFrequency(const WebRtc_Word32 id) = 0;
+
+    MixHistory* _mixHistory;
+protected:
+    MixerParticipant();
+    virtual ~MixerParticipant();
+};
+
+// Container struct for participant statistics.
+struct ParticipantStatistics
+{
+    WebRtc_Word32 participant;
+    WebRtc_Word32 level;
+};
+
+class AudioMixerStatusReceiver
+{
+public:
+    // Callback function that provides an array of ParticipantStatistics for the
+    // participants that were mixed last mix iteration.
+    virtual void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size) = 0;
+    // Callback function that provides an array of the ParticipantStatistics for
+    // the participants that had a positiv VAD last mix iteration.
+    virtual void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size) = 0;
+    // Callback function that provides the audio level of the mixed audio frame
+    // from the last mix iteration.
+    virtual void MixedAudioLevel(
+        const WebRtc_Word32  id,
+        const WebRtc_UWord32 level) = 0;
+protected:
+    AudioMixerStatusReceiver() {}
+    virtual ~AudioMixerStatusReceiver() {}
+};
+
+class AudioMixerOutputReceiver
+{
+public:
+    // This callback function provides the mixed audio for this mix iteration.
+    // Note that uniqueAudioFrames is an array of AudioFrame pointers with the
+    // size according to the size parameter.
+    virtual void NewMixedAudio(const WebRtc_Word32 id,
+                               const AudioFrame& generalAudioFrame,
+                               const AudioFrame** uniqueAudioFrames,
+                               const WebRtc_UWord32 size) = 0;
+protected:
+    AudioMixerOutputReceiver() {}
+    virtual ~AudioMixerOutputReceiver() {}
+};
+
+class AudioRelayReceiver
+{
+public:
+    // This callback function provides the mix decision for this mix iteration.
+    // mixerList is a list of elements of the type
+    // [int,MixerParticipant*]
+    virtual void NewAudioToRelay(const WebRtc_Word32 id,
+                                 const MapWrapper& mixerList) = 0;
+protected:
+    AudioRelayReceiver() {}
+    virtual ~AudioRelayReceiver() {}
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_INTERFACE_AUDIO_CONFERENCE_MIXER_DEFINES_H_
diff --git a/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi b/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi
new file mode 100644
index 0000000..c00885b
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_conference_mixer.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_conference_mixer',
+      'type': '<(library)',
+      'dependencies': [
+        'audio_processing',
+        'webrtc_utility',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/audio_conference_mixer.h',
+        '../interface/audio_conference_mixer_defines.h',
+        'audio_frame_manipulator.cc',
+        'audio_frame_manipulator.h',
+        'level_indicator.cc',
+        'level_indicator.h',
+        'memory_pool.h',
+        'memory_pool_posix.h',
+        'memory_pool_win.h',
+        'audio_conference_mixer_impl.cc',
+        'audio_conference_mixer_impl.h',
+        'time_scheduler.cc',
+        'time_scheduler.h',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'audio_conference_mixer_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'audio_conference_mixer',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+            'audio_conference_mixer_unittest.cc',
+          ],
+        }, # audio_conference_mixer_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc b/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
new file mode 100644
index 0000000..851642c
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.cc
@@ -0,0 +1,1214 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_conference_mixer_defines.h"
+#include "audio_conference_mixer_impl.h"
+#include "audio_frame_manipulator.h"
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "map_wrapper.h"
+#include "modules/utility/interface/audio_frame_operations.h"
+#include "trace.h"
+
+namespace webrtc {
+namespace {
+
+// Mix |frame| into |mixed_frame|, with saturation protection and upmixing.
+// These effects are applied to |frame| itself prior to mixing. Assumes that
+// |mixed_frame| always has at least as many channels as |frame|. Supports
+// stereo at most.
+//
+// TODO(andrew): consider not modifying |frame| here.
+void MixFrames(AudioFrame* mixed_frame, AudioFrame* frame) {
+  assert(mixed_frame->num_channels_ >= frame->num_channels_);
+  // Divide by two to avoid saturation in the mixing.
+  *frame >>= 1;
+  if (mixed_frame->num_channels_ > frame->num_channels_) {
+    // We only support mono-to-stereo.
+    assert(mixed_frame->num_channels_ == 2 &&
+           frame->num_channels_ == 1);
+    AudioFrameOperations::MonoToStereo(frame);
+  }
+
+  *mixed_frame += *frame;
+}
+
+// Return the max number of channels from a |list| composed of AudioFrames.
+int MaxNumChannels(const ListWrapper& list) {
+  ListItem* item = list.First();
+  int max_num_channels = 1;
+  while (item) {
+    AudioFrame* frame = static_cast<AudioFrame*>(item->GetItem());
+    max_num_channels = std::max(max_num_channels, frame->num_channels_);
+    item = list.Next(item);
+  }
+  return max_num_channels;
+}
+
+void SetParticipantStatistics(ParticipantStatistics* stats,
+                              const AudioFrame& frame)
+{
+    stats->participant = frame.id_;
+    stats->level = 0;  // TODO(andrew): to what should this be set?
+}
+
+}  // namespace
+
+MixerParticipant::MixerParticipant()
+    : _mixHistory(new MixHistory())
+{
+}
+
+MixerParticipant::~MixerParticipant()
+{
+    delete _mixHistory;
+}
+
+WebRtc_Word32 MixerParticipant::IsMixed(bool& mixed) const
+{
+    return _mixHistory->IsMixed(mixed);
+}
+
+MixHistory::MixHistory()
+    : _isMixed(0)
+{
+}
+
+MixHistory::~MixHistory()
+{
+}
+
+WebRtc_Word32 MixHistory::IsMixed(bool& mixed) const
+{
+    mixed = _isMixed;
+    return 0;
+}
+
+WebRtc_Word32 MixHistory::WasMixed(bool& wasMixed) const
+{
+    // Was mixed is the same as is mixed depending on perspective. This function
+    // is for the perspective of AudioConferenceMixerImpl.
+    return IsMixed(wasMixed);
+}
+
+WebRtc_Word32 MixHistory::SetIsMixed(const bool mixed)
+{
+    _isMixed = mixed;
+    return 0;
+}
+
+void MixHistory::ResetMixedStatus()
+{
+    _isMixed = false;
+}
+
+AudioConferenceMixer* AudioConferenceMixer::Create(int id)
+{
+    AudioConferenceMixerImpl* mixer = new AudioConferenceMixerImpl(id);
+    if(!mixer->Init())
+    {
+        delete mixer;
+        return NULL;
+    }
+    return mixer;
+}
+
+AudioConferenceMixerImpl::AudioConferenceMixerImpl(int id)
+    : _scratchParticipantsToMixAmount(0),
+      _scratchMixedParticipants(),
+      _scratchVadPositiveParticipantsAmount(0),
+      _scratchVadPositiveParticipants(),
+      _crit(NULL),
+      _cbCrit(NULL),
+      _id(id),
+      _minimumMixingFreq(kLowestPossible),
+      _mixReceiver(NULL),
+      _mixerStatusCallback(NULL),
+      _amountOf10MsBetweenCallbacks(1),
+      _amountOf10MsUntilNextCallback(0),
+      _mixerStatusCb(false),
+      _outputFrequency(kDefaultFrequency),
+      _sampleSize(0),
+      _audioFramePool(NULL),
+      _participantList(),
+      _additionalParticipantList(),
+      _numMixedParticipants(0),
+      _timeStamp(0),
+      _timeScheduler(kProcessPeriodicityInMs),
+      _mixedAudioLevel(),
+      _processCalls(0),
+      _limiter(NULL)
+{}
+
+bool AudioConferenceMixerImpl::Init()
+{
+    _crit.reset(CriticalSectionWrapper::CreateCriticalSection());
+    if (_crit.get() == NULL)
+        return false;
+
+    _cbCrit.reset(CriticalSectionWrapper::CreateCriticalSection());
+    if(_cbCrit.get() == NULL)
+        return false;
+
+    _limiter.reset(AudioProcessing::Create(_id));
+    if(_limiter.get() == NULL)
+        return false;
+
+    MemoryPool<AudioFrame>::CreateMemoryPool(_audioFramePool,
+                                             DEFAULT_AUDIO_FRAME_POOLSIZE);
+    if(_audioFramePool == NULL)
+        return false;
+
+    if(SetOutputFrequency(kDefaultFrequency) == -1)
+        return false;
+
+    // Assume mono.
+    if (!SetNumLimiterChannels(1))
+        return false;
+
+    if(_limiter->gain_control()->set_mode(GainControl::kFixedDigital) != 
+        _limiter->kNoError)
+        return false;
+
+    // We smoothly limit the mixed frame to -7 dbFS. -6 would correspond to the
+    // divide-by-2 but -7 is used instead to give a bit of headroom since the
+    // AGC is not a hard limiter.
+    if(_limiter->gain_control()->set_target_level_dbfs(7) != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->set_compression_gain_db(0)
+        != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->enable_limiter(true) != _limiter->kNoError)
+        return false;
+
+    if(_limiter->gain_control()->Enable(true) != _limiter->kNoError)
+        return false;
+
+    return true;
+}
+
+AudioConferenceMixerImpl::~AudioConferenceMixerImpl()
+{
+    MemoryPool<AudioFrame>::DeleteMemoryPool(_audioFramePool);
+    assert(_audioFramePool == NULL);
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// Process should be called every kProcessPeriodicityInMs ms
+WebRtc_Word32 AudioConferenceMixerImpl::TimeUntilNextProcess()
+{
+    WebRtc_Word32 timeUntilNextProcess = 0;
+    CriticalSectionScoped cs(_crit.get());
+    if(_timeScheduler.TimeToNextUpdate(timeUntilNextProcess) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "failed in TimeToNextUpdate() call");
+        // Sanity check
+        assert(false);
+        return -1;
+    }
+    return timeUntilNextProcess;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::Process()
+{
+    WebRtc_UWord32 remainingParticipantsAllowedToMix =
+        kMaximumAmountOfMixedParticipants;
+    {
+        CriticalSectionScoped cs(_crit.get());
+        assert(_processCalls == 0);
+        _processCalls++;
+
+        // Let the scheduler know that we are running one iteration.
+        _timeScheduler.UpdateScheduler();
+    }
+
+    ListWrapper mixList;
+    ListWrapper rampOutList;
+    ListWrapper additionalFramesList;
+    MapWrapper mixedParticipantsMap;
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+
+        WebRtc_Word32 lowFreq = GetLowestMixingFrequency();
+        // SILK can run in 12 kHz and 24 kHz. These frequencies are not
+        // supported so use the closest higher frequency to not lose any
+        // information.
+        // TODO(henrike): this is probably more appropriate to do in
+        //                GetLowestMixingFrequency().
+        if (lowFreq == 12000)
+        {
+            lowFreq = 16000;
+        } else if (lowFreq == 24000) {
+            lowFreq = 32000;
+        }
+        if(lowFreq <= 0)
+        {
+            CriticalSectionScoped cs(_crit.get());
+            _processCalls--;
+            return 0;
+        } else  {
+            switch(lowFreq)
+            {
+            case 8000:
+                if(OutputFrequency() != kNbInHz)
+                {
+                    SetOutputFrequency(kNbInHz);
+                }
+                break;
+            case 16000:
+                if(OutputFrequency() != kWbInHz)
+                {
+                    SetOutputFrequency(kWbInHz);
+                }
+                break;
+            case 32000:
+                if(OutputFrequency() != kSwbInHz)
+                {
+                    SetOutputFrequency(kSwbInHz);
+                }
+                break;
+            default:
+                assert(false);
+
+                CriticalSectionScoped cs(_crit.get());
+                _processCalls--;
+                return -1;
+            }
+        }
+
+        UpdateToMix(mixList, rampOutList, mixedParticipantsMap,
+                    remainingParticipantsAllowedToMix);
+
+        GetAdditionalAudio(additionalFramesList);
+        UpdateMixedStatus(mixedParticipantsMap);
+        _scratchParticipantsToMixAmount = mixedParticipantsMap.Size();
+    }
+
+    // Clear mixedParticipantsMap to avoid memory leak warning.
+    // Please note that the mixedParticipantsMap doesn't own any dynamically
+    // allocated memory.
+    while(mixedParticipantsMap.Erase(mixedParticipantsMap.First()) == 0) {}
+
+    // Get an AudioFrame for mixing from the memory pool.
+    AudioFrame* mixedAudio = NULL;
+    if(_audioFramePool->PopMemory(mixedAudio) == -1)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                     "failed PopMemory() call");
+        assert(false);
+        return -1;
+    }
+
+    bool timeForMixerCallback = false;
+    int retval = 0;
+    WebRtc_Word32 audioLevel = 0;
+    {
+        CriticalSectionScoped cs(_crit.get());
+
+        // TODO(henrike): it might be better to decide the number of channels
+        //                with an API instead of dynamically.
+
+        // Find the max channels over all mixing lists.
+        const int num_mixed_channels = std::max(MaxNumChannels(mixList),
+            std::max(MaxNumChannels(additionalFramesList),
+                     MaxNumChannels(rampOutList)));
+
+        if (!SetNumLimiterChannels(num_mixed_channels))
+            retval = -1;
+
+        mixedAudio->UpdateFrame(-1, _timeStamp, NULL, 0, _outputFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadPassive, num_mixed_channels);
+
+        _timeStamp += _sampleSize;
+
+        MixFromList(*mixedAudio, mixList);
+        MixAnonomouslyFromList(*mixedAudio, additionalFramesList);
+        MixAnonomouslyFromList(*mixedAudio, rampOutList);
+
+        if(mixedAudio->samples_per_channel_ == 0)
+        {
+            // Nothing was mixed, set the audio samples to silence.
+            memset(mixedAudio->data_, 0, _sampleSize);
+            mixedAudio->samples_per_channel_ = _sampleSize;
+        }
+        else
+        {
+            // Only call the limiter if we have something to mix.
+            if(!LimitMixedAudio(*mixedAudio))
+                retval = -1;
+        }
+
+        _mixedAudioLevel.ComputeLevel(mixedAudio->data_,_sampleSize);
+        audioLevel = _mixedAudioLevel.GetLevel();
+
+        if(_mixerStatusCb)
+        {
+            _scratchVadPositiveParticipantsAmount = 0;
+            UpdateVADPositiveParticipants(mixList);
+            if(_amountOf10MsUntilNextCallback-- == 0)
+            {
+                _amountOf10MsUntilNextCallback = _amountOf10MsBetweenCallbacks;
+                timeForMixerCallback = true;
+            }
+        }
+    }
+
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        if(_mixReceiver != NULL)
+        {
+            const AudioFrame** dummy = NULL;
+            _mixReceiver->NewMixedAudio(
+                _id,
+                *mixedAudio,
+                dummy,
+                0);
+        }
+
+        if((_mixerStatusCallback != NULL) &&
+            timeForMixerCallback)
+        {
+            _mixerStatusCallback->MixedParticipants(
+                _id,
+                _scratchMixedParticipants,
+                _scratchParticipantsToMixAmount);
+
+            _mixerStatusCallback->VADPositiveParticipants(
+                _id,
+                _scratchVadPositiveParticipants,
+                _scratchVadPositiveParticipantsAmount);
+            _mixerStatusCallback->MixedAudioLevel(_id,audioLevel);
+        }
+    }
+
+    // Reclaim all outstanding memory.
+    _audioFramePool->PushMemory(mixedAudio);
+    ClearAudioFrameList(mixList);
+    ClearAudioFrameList(rampOutList);
+    ClearAudioFrameList(additionalFramesList);
+    {
+        CriticalSectionScoped cs(_crit.get());
+        _processCalls--;
+    }
+    return retval;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::RegisterMixedStreamCallback(
+    AudioMixerOutputReceiver& mixReceiver)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(_mixReceiver != NULL)
+    {
+        return -1;
+    }
+    _mixReceiver = &mixReceiver;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::UnRegisterMixedStreamCallback()
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(_mixReceiver == NULL)
+    {
+        return -1;
+    }
+    _mixReceiver = NULL;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetOutputFrequency(
+    const Frequency frequency)
+{
+    CriticalSectionScoped cs(_crit.get());
+    const int error = _limiter->set_sample_rate_hz(frequency);
+    if(error != _limiter->kNoError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "Error from AudioProcessing: %d", error);
+        return -1;
+    }
+
+    _outputFrequency = frequency;
+    _sampleSize = (_outputFrequency*kProcessPeriodicityInMs) / 1000;
+
+    return 0;
+}
+
+AudioConferenceMixer::Frequency
+AudioConferenceMixerImpl::OutputFrequency() const
+{
+    CriticalSectionScoped cs(_crit.get());
+    return _outputFrequency;
+}
+
+bool AudioConferenceMixerImpl::SetNumLimiterChannels(int numChannels)
+{
+    if(_limiter->num_input_channels() != numChannels)
+    {
+        const int error = _limiter->set_num_channels(numChannels,
+                                                     numChannels);
+        if(error != _limiter->kNoError)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "Error from AudioProcessing: %d", error);
+            assert(false);
+            return false;
+        }
+    }
+
+    return true;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::RegisterMixerStatusCallback(
+    AudioMixerStatusReceiver& mixerStatusCallback,
+    const WebRtc_UWord32 amountOf10MsBetweenCallbacks)
+{
+    if(amountOf10MsBetweenCallbacks == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceAudioMixerServer,
+            _id,
+            "amountOf10MsBetweenCallbacks(%d) needs to be larger than 0");
+        return -1;
+    }
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        if(_mixerStatusCallback != NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixer status callback already registered");
+            return -1;
+        }
+        _mixerStatusCallback = &mixerStatusCallback;
+    }
+    {
+        CriticalSectionScoped cs(_crit.get());
+        _amountOf10MsBetweenCallbacks  = amountOf10MsBetweenCallbacks;
+        _amountOf10MsUntilNextCallback = 0;
+        _mixerStatusCb                 = true;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::UnRegisterMixerStatusCallback()
+{
+    {
+        CriticalSectionScoped cs(_crit.get());
+        if(!_mixerStatusCb)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixer status callback not registered");
+            return -1;
+        }
+        _mixerStatusCb = false;
+    }
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        _mixerStatusCallback = NULL;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetMixabilityStatus(
+    MixerParticipant& participant,
+    const bool mixable)
+{
+    if (!mixable)
+    {
+        // Anonymous participants are in a separate list. Make sure that the
+        // participant is in the _participantList if it is being mixed.
+        SetAnonymousMixabilityStatus(participant, false);
+    }
+    WebRtc_UWord32 numMixedParticipants;
+    {
+        CriticalSectionScoped cs(_cbCrit.get());
+        const bool isMixed =
+            IsParticipantInList(participant,_participantList);
+        // API must be called with a new state.
+        if(!(mixable ^ isMixed))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "Mixable is aready %s",
+                         isMixed ? "ON" : "off");
+            return -1;
+        }
+        bool success = false;
+        if(mixable)
+        {
+            success = AddParticipantToList(participant,_participantList);
+        }
+        else
+        {
+            success = RemoveParticipantFromList(participant,_participantList);
+        }
+        if(!success)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "failed to %s participant",
+                         mixable ? "add" : "remove");
+            assert(false);
+            return -1;
+        }
+
+        int numMixedNonAnonymous = _participantList.GetSize();
+        if (numMixedNonAnonymous > kMaximumAmountOfMixedParticipants)
+        {
+            numMixedNonAnonymous = kMaximumAmountOfMixedParticipants;
+        }
+        numMixedParticipants = numMixedNonAnonymous +
+                               _additionalParticipantList.GetSize();
+    }
+    // A MixerParticipant was added or removed. Make sure the scratch
+    // buffer is updated if necessary.
+    // Note: The scratch buffer may only be updated in Process().
+    CriticalSectionScoped cs(_crit.get());
+    _numMixedParticipants = numMixedParticipants;
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::MixabilityStatus(
+    MixerParticipant& participant,
+    bool& mixable)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    mixable = IsParticipantInList(participant, _participantList);
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetAnonymousMixabilityStatus(
+    MixerParticipant& participant, const bool anonymous)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    if(IsParticipantInList(participant, _additionalParticipantList))
+    {
+        if(anonymous)
+        {
+            return 0;
+        }
+        if(!RemoveParticipantFromList(participant, _additionalParticipantList))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                         "unable to remove participant from anonymous list");
+            assert(false);
+            return -1;
+        }
+        return AddParticipantToList(participant, _participantList) ? 0 : -1;
+    }
+    if(!anonymous)
+    {
+        return 0;
+    }
+    const bool mixable = RemoveParticipantFromList(participant,
+                                                   _participantList);
+    if(!mixable)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceAudioMixerServer,
+            _id,
+            "participant must be registered before turning it into anonymous");
+        // Setting anonymous status is only possible if MixerParticipant is
+        // already registered.
+        return -1;
+    }
+    return AddParticipantToList(participant, _additionalParticipantList) ?
+        0 : -1;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::AnonymousMixabilityStatus(
+    MixerParticipant& participant, bool& mixable)
+{
+    CriticalSectionScoped cs(_cbCrit.get());
+    mixable = IsParticipantInList(participant,
+                                  _additionalParticipantList);
+    return 0;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::SetMinimumMixingFrequency(
+    Frequency freq)
+{
+    // Make sure that only allowed sampling frequencies are used. Use closest
+    // higher sampling frequency to avoid losing information.
+    if (static_cast<int>(freq) == 12000)
+    {
+         freq = kWbInHz;
+    } else if (static_cast<int>(freq) == 24000) {
+        freq = kSwbInHz;
+    }
+
+    if((freq == kNbInHz) || (freq == kWbInHz) || (freq == kSwbInHz) ||
+       (freq == kLowestPossible))
+    {
+        _minimumMixingFreq=freq;
+        return 0;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "SetMinimumMixingFrequency incorrect frequency: %i",freq);
+        assert(false);
+        return -1;
+    }
+}
+
+// Check all AudioFrames that are to be mixed. The highest sampling frequency
+// found is the lowest that can be used without losing information.
+WebRtc_Word32 AudioConferenceMixerImpl::GetLowestMixingFrequency()
+{
+    const int participantListFrequency =
+        GetLowestMixingFrequencyFromList(_participantList);
+    const int anonymousListFrequency =
+        GetLowestMixingFrequencyFromList(_additionalParticipantList);
+    const int highestFreq =
+        (participantListFrequency > anonymousListFrequency) ?
+            participantListFrequency : anonymousListFrequency;
+    // Check if the user specified a lowest mixing frequency.
+    if(_minimumMixingFreq != kLowestPossible)
+    {
+        if(_minimumMixingFreq > highestFreq)
+        {
+            return _minimumMixingFreq;
+        }
+    }
+    return highestFreq;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::GetLowestMixingFrequencyFromList(
+    ListWrapper& mixList)
+{
+    WebRtc_Word32 highestFreq = 8000;
+    ListItem* item = mixList.First();
+    while(item)
+    {
+        MixerParticipant* participant =
+            static_cast<MixerParticipant*>(item->GetItem());
+        const WebRtc_Word32 neededFrequency = participant->NeededFrequency(_id);
+        if(neededFrequency > highestFreq)
+        {
+            highestFreq = neededFrequency;
+        }
+        item = mixList.Next(item);
+    }
+    return highestFreq;
+}
+
+void AudioConferenceMixerImpl::UpdateToMix(
+    ListWrapper& mixList,
+    ListWrapper& rampOutList,
+    MapWrapper& mixParticipantList,
+    WebRtc_UWord32& maxAudioFrameCounter)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateToMix(mixList,rampOutList,mixParticipantList,%d)",
+                 maxAudioFrameCounter);
+    const WebRtc_UWord32 mixListStartSize = mixList.GetSize();
+    ListWrapper activeList; // Elements are AudioFrames
+    // Struct needed by the passive lists to keep track of which AudioFrame
+    // belongs to which MixerParticipant.
+    struct ParticipantFramePair
+    {
+        MixerParticipant* participant;
+        AudioFrame* audioFrame;
+    };
+    ListWrapper passiveWasNotMixedList; // Elements are MixerParticipant
+    ListWrapper passiveWasMixedList;    // Elements are MixerParticipant
+    ListItem* item = _participantList.First();
+    while(item)
+    {
+        // Stop keeping track of passive participants if there are already
+        // enough participants available (they wont be mixed anyway).
+        bool mustAddToPassiveList = (maxAudioFrameCounter >
+                                    (activeList.GetSize() +
+                                     passiveWasMixedList.GetSize() +
+                                     passiveWasNotMixedList.GetSize()));
+
+        MixerParticipant* participant = static_cast<MixerParticipant*>(
+            item->GetItem());
+        bool wasMixed = false;
+        participant->_mixHistory->WasMixed(wasMixed);
+        AudioFrame* audioFrame = NULL;
+        if(_audioFramePool->PopMemory(audioFrame) == -1)
+        {
+            WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                         "failed PopMemory() call");
+            assert(false);
+            return;
+        }
+        audioFrame->sample_rate_hz_ = _outputFrequency;
+
+        if(participant->GetAudioFrame(_id,*audioFrame) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "failed to GetAudioFrame() from participant");
+            _audioFramePool->PushMemory(audioFrame);
+            item = _participantList.Next(item);
+            continue;
+        }
+        // TODO(henrike): this assert triggers in some test cases where SRTP is
+        // used which prevents NetEQ from making a VAD. Temporarily disable this
+        // assert until the problem is fixed on a higher level.
+        // assert(audioFrame->vad_activity_ != AudioFrame::kVadUnknown);
+        if (audioFrame->vad_activity_ == AudioFrame::kVadUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "invalid VAD state from participant");
+        }
+
+        if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
+        {
+            if(!wasMixed)
+            {
+                RampIn(*audioFrame);
+            }
+
+            if(activeList.GetSize() >= maxAudioFrameCounter)
+            {
+                // There are already more active participants than should be
+                // mixed. Only keep the ones with the highest energy.
+                ListItem* replaceItem = NULL;
+                CalculateEnergy(*audioFrame);
+                WebRtc_UWord32 lowestEnergy = audioFrame->energy_;
+
+                ListItem* activeItem = activeList.First();
+                while(activeItem)
+                {
+                    AudioFrame* replaceFrame = static_cast<AudioFrame*>(
+                        activeItem->GetItem());
+                    CalculateEnergy(*replaceFrame);
+                    if(replaceFrame->energy_ < lowestEnergy)
+                    {
+                        replaceItem = activeItem;
+                        lowestEnergy = replaceFrame->energy_;
+                    }
+                    activeItem = activeList.Next(activeItem);
+                }
+                if(replaceItem != NULL)
+                {
+                    AudioFrame* replaceFrame = static_cast<AudioFrame*>(
+                        replaceItem->GetItem());
+
+                    bool replaceWasMixed = false;
+                    MapItem* replaceParticipant = mixParticipantList.Find(
+                        replaceFrame->id_);
+                    // When a frame is pushed to |activeList| it is also pushed
+                    // to mixParticipantList with the frame's id. This means
+                    // that the Find call above should never fail.
+                    if(replaceParticipant == NULL)
+                    {
+                        assert(false);
+                    } else {
+                        static_cast<MixerParticipant*>(
+                            replaceParticipant->GetItem())->_mixHistory->
+                            WasMixed(replaceWasMixed);
+
+                        mixParticipantList.Erase(replaceFrame->id_);
+                        activeList.Erase(replaceItem);
+
+                        activeList.PushFront(static_cast<void*>(audioFrame));
+                        mixParticipantList.Insert(
+                            audioFrame->id_,
+                            static_cast<void*>(participant));
+                        assert(mixParticipantList.Size() <=
+                               kMaximumAmountOfMixedParticipants);
+
+                        if(replaceWasMixed)
+                        {
+                            RampOut(*replaceFrame);
+                            rampOutList.PushBack(
+                                static_cast<void*>(replaceFrame));
+                            assert(rampOutList.GetSize() <=
+                                   kMaximumAmountOfMixedParticipants);
+                        } else {
+                            _audioFramePool->PushMemory(replaceFrame);
+                        }
+                    }
+                } else {
+                    if(wasMixed)
+                    {
+                        RampOut(*audioFrame);
+                        rampOutList.PushBack(static_cast<void*>(audioFrame));
+                        assert(rampOutList.GetSize() <=
+                               kMaximumAmountOfMixedParticipants);
+                    } else {
+                        _audioFramePool->PushMemory(audioFrame);
+                    }
+                }
+            } else {
+                activeList.PushFront(static_cast<void*>(audioFrame));
+                mixParticipantList.Insert(audioFrame->id_,
+                                          static_cast<void*>(participant));
+                assert(mixParticipantList.Size() <=
+                       kMaximumAmountOfMixedParticipants);
+            }
+        } else {
+            if(wasMixed)
+            {
+                ParticipantFramePair* pair = new ParticipantFramePair;
+                pair->audioFrame  = audioFrame;
+                pair->participant = participant;
+                passiveWasMixedList.PushBack(static_cast<void*>(pair));
+            } else if(mustAddToPassiveList) {
+                RampIn(*audioFrame);
+                ParticipantFramePair* pair = new ParticipantFramePair;
+                pair->audioFrame  = audioFrame;
+                pair->participant = participant;
+                passiveWasNotMixedList.PushBack(static_cast<void*>(pair));
+            } else {
+                _audioFramePool->PushMemory(audioFrame);
+            }
+        }
+        item = _participantList.Next(item);
+    }
+    assert(activeList.GetSize() <= maxAudioFrameCounter);
+    // At this point it is known which participants should be mixed. Transfer
+    // this information to this functions output parameters.
+    while(!activeList.Empty())
+    {
+        ListItem* mixItem = activeList.First();
+        mixList.PushBack(mixItem->GetItem());
+        activeList.Erase(mixItem);
+    }
+    // Always mix a constant number of AudioFrames. If there aren't enough
+    // active participants mix passive ones. Starting with those that was mixed
+    // last iteration.
+    while(!passiveWasMixedList.Empty())
+    {
+        ListItem* mixItem = passiveWasMixedList.First();
+        ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
+            mixItem->GetItem());
+        if(mixList.GetSize() <  maxAudioFrameCounter + mixListStartSize)
+        {
+            mixList.PushBack(pair->audioFrame);
+            mixParticipantList.Insert(pair->audioFrame->id_,
+                                      static_cast<void*>(pair->participant));
+            assert(mixParticipantList.Size() <=
+                   kMaximumAmountOfMixedParticipants);
+        }
+        else
+        {
+            _audioFramePool->PushMemory(pair->audioFrame);
+        }
+        delete pair;
+        passiveWasMixedList.Erase(mixItem);
+    }
+    // And finally the ones that have not been mixed for a while.
+    while(!passiveWasNotMixedList.Empty())
+    {
+        ListItem* mixItem = passiveWasNotMixedList.First();
+        ParticipantFramePair* pair = static_cast<ParticipantFramePair*>(
+            mixItem->GetItem());
+        if(mixList.GetSize() <  maxAudioFrameCounter + mixListStartSize)
+        {
+            mixList.PushBack(pair->audioFrame);
+            mixParticipantList.Insert(pair->audioFrame->id_,
+                                      static_cast<void*>(pair->participant));
+            assert(mixParticipantList.Size() <=
+                   kMaximumAmountOfMixedParticipants);
+        }
+        else
+        {
+            _audioFramePool->PushMemory(pair->audioFrame);
+        }
+        delete pair;
+        passiveWasNotMixedList.Erase(mixItem);
+    }
+    assert(maxAudioFrameCounter + mixListStartSize >= mixList.GetSize());
+    maxAudioFrameCounter += mixListStartSize - mixList.GetSize();
+}
+
+void AudioConferenceMixerImpl::GetAdditionalAudio(
+    ListWrapper& additionalFramesList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "GetAdditionalAudio(additionalFramesList)");
+    ListItem* item = _additionalParticipantList.First();
+    while(item)
+    {
+        // The GetAudioFrame() callback may remove the current item. Store the
+        // next item just in case that happens.
+        ListItem* nextItem = _additionalParticipantList.Next(item);
+
+        MixerParticipant* participant = static_cast<MixerParticipant*>(
+            item->GetItem());
+        AudioFrame* audioFrame = NULL;
+        if(_audioFramePool->PopMemory(audioFrame) == -1)
+        {
+            WEBRTC_TRACE(kTraceMemory, kTraceAudioMixerServer, _id,
+                         "failed PopMemory() call");
+            assert(false);
+            return;
+        }
+        audioFrame->sample_rate_hz_ = _outputFrequency;
+        if(participant->GetAudioFrame(_id, *audioFrame) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioMixerServer, _id,
+                         "failed to GetAudioFrame() from participant");
+            _audioFramePool->PushMemory(audioFrame);
+            item = nextItem;
+            continue;
+        }
+        if(audioFrame->samples_per_channel_ == 0)
+        {
+            // Empty frame. Don't use it.
+            _audioFramePool->PushMemory(audioFrame);
+            item = nextItem;
+            continue;
+        }
+        additionalFramesList.PushBack(static_cast<void*>(audioFrame));
+        item = nextItem;
+    }
+}
+
+void AudioConferenceMixerImpl::UpdateMixedStatus(
+    MapWrapper& mixedParticipantsMap)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateMixedStatus(mixedParticipantsMap)");
+    assert(mixedParticipantsMap.Size() <= kMaximumAmountOfMixedParticipants);
+
+    // Loop through all participants. If they are in the mix map they
+    // were mixed.
+    ListItem* participantItem = _participantList.First();
+    while(participantItem != NULL)
+    {
+        bool isMixed = false;
+        MixerParticipant* participant =
+            static_cast<MixerParticipant*>(participantItem->GetItem());
+
+        MapItem* mixedItem = mixedParticipantsMap.First();
+        while(mixedItem)
+        {
+            if(participant == mixedItem->GetItem())
+            {
+                isMixed = true;
+                break;
+            }
+            mixedItem = mixedParticipantsMap.Next(mixedItem);
+        }
+        participant->_mixHistory->SetIsMixed(isMixed);
+        participantItem = _participantList.Next(participantItem);
+    }
+}
+
+void AudioConferenceMixerImpl::ClearAudioFrameList(ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "ClearAudioFrameList(audioFrameList)");
+    ListItem* item = audioFrameList.First();
+    while(item)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        _audioFramePool->PushMemory(audioFrame);
+        audioFrameList.Erase(item);
+        item = audioFrameList.First();
+    }
+}
+
+void AudioConferenceMixerImpl::UpdateVADPositiveParticipants(
+    ListWrapper& mixList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "UpdateVADPositiveParticipants(mixList)");
+
+    ListItem* item = mixList.First();
+    while(item != NULL)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        CalculateEnergy(*audioFrame);
+        if(audioFrame->vad_activity_ == AudioFrame::kVadActive)
+        {
+            _scratchVadPositiveParticipants[
+                _scratchVadPositiveParticipantsAmount].participant =
+                audioFrame->id_;
+            // TODO(andrew): to what should this be set?
+            _scratchVadPositiveParticipants[
+                _scratchVadPositiveParticipantsAmount].level = 0;
+            _scratchVadPositiveParticipantsAmount++;
+        }
+        item = mixList.Next(item);
+    }
+}
+
+bool AudioConferenceMixerImpl::IsParticipantInList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "IsParticipantInList(participant,participantList)");
+    ListItem* item = participantList.First();
+    while(item != NULL)
+    {
+        MixerParticipant* rhsParticipant =
+            static_cast<MixerParticipant*>(item->GetItem());
+        if(&participant == rhsParticipant)
+        {
+            return true;
+        }
+        item = participantList.Next(item);
+    }
+    return false;
+}
+
+bool AudioConferenceMixerImpl::AddParticipantToList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "AddParticipantToList(participant, participantList)");
+    if(participantList.PushBack(static_cast<void*>(&participant)) == -1)
+    {
+        return false;
+    }
+    // Make sure that the mixed status is correct for new MixerParticipant.
+    participant._mixHistory->ResetMixedStatus();
+    return true;
+}
+
+bool AudioConferenceMixerImpl::RemoveParticipantFromList(
+    MixerParticipant& participant,
+    ListWrapper& participantList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "RemoveParticipantFromList(participant, participantList)");
+    ListItem* item = participantList.First();
+    while(item)
+    {
+        if(item->GetItem() == &participant)
+        {
+            participantList.Erase(item);
+            // Participant is no longer mixed, reset to default.
+            participant._mixHistory->ResetMixedStatus();
+            return true;
+        }
+        item = participantList.Next(item);
+    }
+    return false;
+}
+
+WebRtc_Word32 AudioConferenceMixerImpl::MixFromList(
+    AudioFrame& mixedAudio,
+    const ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "MixFromList(mixedAudio, audioFrameList)");
+    WebRtc_UWord32 position = 0;
+    ListItem* item = audioFrameList.First();
+    if(item == NULL)
+    {
+        return 0;
+    }
+
+    if(_numMixedParticipants == 1)
+    {
+        // No mixing required here; skip the saturation protection.
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        mixedAudio = *audioFrame;
+        SetParticipantStatistics(&_scratchMixedParticipants[position],
+                                 *audioFrame);
+        return 0;
+    }
+
+    while(item != NULL)
+    {
+        if(position >= kMaximumAmountOfMixedParticipants)
+        {
+            WEBRTC_TRACE(
+                kTraceMemory,
+                kTraceAudioMixerServer,
+                _id,
+                "Trying to mix more than max amount of mixed participants:%d!",
+                kMaximumAmountOfMixedParticipants);
+            // Assert and avoid crash
+            assert(false);
+            position = 0;
+        }
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        MixFrames(&mixedAudio, audioFrame);
+
+        SetParticipantStatistics(&_scratchMixedParticipants[position],
+                                 *audioFrame);
+
+        position++;
+        item = audioFrameList.Next(item);
+    }
+
+    return 0;
+}
+
+// TODO(andrew): consolidate this function with MixFromList.
+WebRtc_Word32 AudioConferenceMixerImpl::MixAnonomouslyFromList(
+    AudioFrame& mixedAudio,
+    const ListWrapper& audioFrameList)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioMixerServer, _id,
+                 "MixAnonomouslyFromList(mixedAudio, audioFrameList)");
+    ListItem* item = audioFrameList.First();
+    if(item == NULL)
+        return 0;
+
+    if(_numMixedParticipants == 1)
+    {
+        // No mixing required here; skip the saturation protection.
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        mixedAudio = *audioFrame;
+        return 0;
+    }
+
+    while(item != NULL)
+    {
+        AudioFrame* audioFrame = static_cast<AudioFrame*>(item->GetItem());
+        MixFrames(&mixedAudio, audioFrame);
+        item = audioFrameList.Next(item);
+    }
+    return 0;
+}
+
+bool AudioConferenceMixerImpl::LimitMixedAudio(AudioFrame& mixedAudio)
+{
+    if(_numMixedParticipants == 1)
+    {
+        return true;
+    }
+
+    // Smoothly limit the mixed frame.
+    const int error = _limiter->ProcessStream(&mixedAudio);
+
+    // And now we can safely restore the level. This procedure results in
+    // some loss of resolution, deemed acceptable.
+    //
+    // It's possible to apply the gain in the AGC (with a target level of 0 dbFS
+    // and compression gain of 6 dB). However, in the transition frame when this
+    // is enabled (moving from one to two participants) it has the potential to
+    // create discontinuities in the mixed frame.
+    //
+    // Instead we double the frame (with addition since left-shifting a
+    // negative value is undefined).
+    mixedAudio += mixedAudio;
+
+    if(error != _limiter->kNoError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioMixerServer, _id,
+                     "Error from AudioProcessing: %d", error);
+        assert(false);
+        return false;
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h b/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
new file mode 100644
index 0000000..c38afd0
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_conference_mixer_impl.h
@@ -0,0 +1,208 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
+
+#include "audio_conference_mixer.h"
+#include "engine_configurations.h"
+#include "level_indicator.h"
+#include "list_wrapper.h"
+#include "memory_pool.h"
+#include "module_common_types.h"
+#include "scoped_ptr.h"
+#include "time_scheduler.h"
+
+namespace webrtc {
+class AudioProcessing;
+class CriticalSectionWrapper;
+
+// Cheshire cat implementation of MixerParticipant's non virtual functions.
+class MixHistory
+{
+public:
+    MixHistory();
+    ~MixHistory();
+
+    // MixerParticipant function
+    WebRtc_Word32 IsMixed(bool& mixed) const;
+
+    // Sets wasMixed to true if the participant was mixed previous mix
+    // iteration.
+    WebRtc_Word32 WasMixed(bool& wasMixed) const;
+
+    // Updates the mixed status.
+    WebRtc_Word32 SetIsMixed(const bool mixed);
+
+    void ResetMixedStatus();
+private:
+    bool _isMixed;
+};
+
+class AudioConferenceMixerImpl : public AudioConferenceMixer
+{
+public:
+    // AudioProcessing only accepts 10 ms frames.
+    enum {kProcessPeriodicityInMs = 10};
+
+    AudioConferenceMixerImpl(int id);
+    ~AudioConferenceMixerImpl();
+
+    // Must be called after ctor.
+    bool Init();
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // AudioConferenceMixer functions
+    virtual WebRtc_Word32 RegisterMixedStreamCallback(
+        AudioMixerOutputReceiver& mixReceiver);
+    virtual WebRtc_Word32 UnRegisterMixedStreamCallback();
+    virtual WebRtc_Word32 RegisterMixerStatusCallback(
+        AudioMixerStatusReceiver& mixerStatusCallback,
+        const WebRtc_UWord32 amountOf10MsBetweenCallbacks);
+    virtual WebRtc_Word32 UnRegisterMixerStatusCallback();
+    virtual WebRtc_Word32 SetMixabilityStatus(MixerParticipant& participant,
+                                              const bool mixable);
+    virtual WebRtc_Word32 MixabilityStatus(MixerParticipant& participant,
+                                           bool& mixable);
+    virtual WebRtc_Word32 SetMinimumMixingFrequency(Frequency freq);
+    virtual WebRtc_Word32 SetAnonymousMixabilityStatus(
+        MixerParticipant& participant, const bool mixable);
+    virtual WebRtc_Word32 AnonymousMixabilityStatus(
+        MixerParticipant& participant, bool& mixable);
+private:
+    enum{DEFAULT_AUDIO_FRAME_POOLSIZE = 50};
+
+    // Set/get mix frequency
+    WebRtc_Word32 SetOutputFrequency(const Frequency frequency);
+    Frequency OutputFrequency() const;
+
+    // Must be called whenever an audio frame indicates the number of channels
+    // has changed.
+    bool SetNumLimiterChannels(int numChannels);
+
+    // Fills mixList with the AudioFrames pointers that should be used when
+    // mixing. Fills mixParticipantList with ParticipantStatistics for the
+    // participants who's AudioFrames are inside mixList.
+    // maxAudioFrameCounter both input and output specifies how many more
+    // AudioFrames that are allowed to be mixed.
+    // rampOutList contain AudioFrames corresponding to an audio stream that
+    // used to be mixed but shouldn't be mixed any longer. These AudioFrames
+    // should be ramped out over this AudioFrame to avoid audio discontinuities.
+    void UpdateToMix(ListWrapper& mixList, ListWrapper& rampOutList,
+                     MapWrapper& mixParticipantList,
+                     WebRtc_UWord32& maxAudioFrameCounter);
+
+    // Return the lowest mixing frequency that can be used without having to
+    // downsample any audio.
+    WebRtc_Word32 GetLowestMixingFrequency();
+    WebRtc_Word32 GetLowestMixingFrequencyFromList(ListWrapper& mixList);
+
+    // Return the AudioFrames that should be mixed anonymously.
+    void GetAdditionalAudio(ListWrapper& additionalFramesList);
+
+    // Update the MixHistory of all MixerParticipants. mixedParticipantsList
+    // should contain a map of MixerParticipants that have been mixed.
+    void UpdateMixedStatus(MapWrapper& mixedParticipantsList);
+
+    // Clears audioFrameList and reclaims all memory associated with it.
+    void ClearAudioFrameList(ListWrapper& audioFrameList);
+
+    // Update the list of MixerParticipants who have a positive VAD. mixList
+    // should be a list of AudioFrames
+    void UpdateVADPositiveParticipants(
+        ListWrapper& mixList);
+
+    // This function returns true if it finds the MixerParticipant in the
+    // specified list of MixerParticipants.
+    bool IsParticipantInList(
+        MixerParticipant& participant,
+        ListWrapper& participantList);
+
+    // Add/remove the MixerParticipant to the specified
+    // MixerParticipant list.
+    bool AddParticipantToList(
+        MixerParticipant& participant,
+        ListWrapper& participantList);
+    bool RemoveParticipantFromList(
+        MixerParticipant& removeParticipant,
+        ListWrapper& participantList);
+
+    // Mix the AudioFrames stored in audioFrameList into mixedAudio.
+    WebRtc_Word32 MixFromList(
+        AudioFrame& mixedAudio,
+        const ListWrapper& audioFrameList);
+    // Mix the AudioFrames stored in audioFrameList into mixedAudio. No
+    // record will be kept of this mix (e.g. the corresponding MixerParticipants
+    // will not be marked as IsMixed()
+    WebRtc_Word32 MixAnonomouslyFromList(AudioFrame& mixedAudio,
+                                         const ListWrapper& audioFrameList);
+
+    bool LimitMixedAudio(AudioFrame& mixedAudio);
+
+    // Scratch memory
+    // Note that the scratch memory may only be touched in the scope of
+    // Process().
+    WebRtc_UWord32         _scratchParticipantsToMixAmount;
+    ParticipantStatistics  _scratchMixedParticipants[
+        kMaximumAmountOfMixedParticipants];
+    WebRtc_UWord32         _scratchVadPositiveParticipantsAmount;
+    ParticipantStatistics  _scratchVadPositiveParticipants[
+        kMaximumAmountOfMixedParticipants];
+
+    scoped_ptr<CriticalSectionWrapper> _crit;
+    scoped_ptr<CriticalSectionWrapper> _cbCrit;
+
+    WebRtc_Word32 _id;
+
+    Frequency _minimumMixingFreq;
+
+    // Mix result callback
+    AudioMixerOutputReceiver* _mixReceiver;
+
+    AudioMixerStatusReceiver* _mixerStatusCallback;
+    WebRtc_UWord32            _amountOf10MsBetweenCallbacks;
+    WebRtc_UWord32            _amountOf10MsUntilNextCallback;
+    bool                      _mixerStatusCb;
+
+    // The current sample frequency and sample size when mixing.
+    Frequency _outputFrequency;
+    WebRtc_UWord16 _sampleSize;
+
+    // Memory pool to avoid allocating/deallocating AudioFrames
+    MemoryPool<AudioFrame>* _audioFramePool;
+
+    // List of all participants. Note all lists are disjunct
+    ListWrapper _participantList;              // May be mixed.
+    ListWrapper _additionalParticipantList;    // Always mixed, anonomously.
+
+    WebRtc_UWord32 _numMixedParticipants;
+
+    WebRtc_UWord32 _timeStamp;
+
+    // Metronome class.
+    TimeScheduler _timeScheduler;
+
+    // Smooth level indicator.
+    LevelIndicator _mixedAudioLevel;
+
+    // Counter keeping track of concurrent calls to process.
+    // Note: should never be higher than 1 or lower than 0.
+    WebRtc_Word16 _processCalls;
+
+    // Used for inhibiting saturation in mixing.
+    scoped_ptr<AudioProcessing> _limiter;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_CONFERENCE_MIXER_IMPL_H_
diff --git a/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc b/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc
new file mode 100644
index 0000000..f895fbd
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_conference_mixer_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "audio_conference_mixer.h"
+#include "gtest/gtest.h"
+
+TEST(AudioConferenceMixerTest, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc b/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
new file mode 100644
index 0000000..65f8dc0
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_frame_manipulator.cc
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_frame_manipulator.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace {
+// Linear ramping over 80 samples.
+// TODO(hellner): ramp using fix point?
+const float rampArray[] = {0.0000f, 0.0127f, 0.0253f, 0.0380f,
+                           0.0506f, 0.0633f, 0.0759f, 0.0886f,
+                           0.1013f, 0.1139f, 0.1266f, 0.1392f,
+                           0.1519f, 0.1646f, 0.1772f, 0.1899f,
+                           0.2025f, 0.2152f, 0.2278f, 0.2405f,
+                           0.2532f, 0.2658f, 0.2785f, 0.2911f,
+                           0.3038f, 0.3165f, 0.3291f, 0.3418f,
+                           0.3544f, 0.3671f, 0.3797f, 0.3924f,
+                           0.4051f, 0.4177f, 0.4304f, 0.4430f,
+                           0.4557f, 0.4684f, 0.4810f, 0.4937f,
+                           0.5063f, 0.5190f, 0.5316f, 0.5443f,
+                           0.5570f, 0.5696f, 0.5823f, 0.5949f,
+                           0.6076f, 0.6203f, 0.6329f, 0.6456f,
+                           0.6582f, 0.6709f, 0.6835f, 0.6962f,
+                           0.7089f, 0.7215f, 0.7342f, 0.7468f,
+                           0.7595f, 0.7722f, 0.7848f, 0.7975f,
+                           0.8101f, 0.8228f, 0.8354f, 0.8481f,
+                           0.8608f, 0.8734f, 0.8861f, 0.8987f,
+                           0.9114f, 0.9241f, 0.9367f, 0.9494f,
+                           0.9620f, 0.9747f, 0.9873f, 1.0000f};
+const int rampSize = sizeof(rampArray)/sizeof(rampArray[0]);
+} // namespace
+
+namespace webrtc {
+void CalculateEnergy(AudioFrame& audioFrame)
+{
+    if(audioFrame.energy_ != 0xffffffff)
+    {
+        return;
+    }
+    audioFrame.energy_ = 0;
+    for(int position = 0; position < audioFrame.samples_per_channel_;
+        position++)
+    {
+        // TODO(andrew): this can easily overflow.
+        audioFrame.energy_ += audioFrame.data_[position] *
+                              audioFrame.data_[position];
+    }
+}
+
+void RampIn(AudioFrame& audioFrame)
+{
+    assert(rampSize <= audioFrame.samples_per_channel_);
+    for(int i = 0; i < rampSize; i++)
+    {
+        audioFrame.data_[i] = static_cast<WebRtc_Word16>
+            (rampArray[i] * audioFrame.data_[i]);
+    }
+}
+
+void RampOut(AudioFrame& audioFrame)
+{
+    assert(rampSize <= audioFrame.samples_per_channel_);
+    for(int i = 0; i < rampSize; i++)
+    {
+        const int rampPos = rampSize - 1 - i;
+        audioFrame.data_[i] = static_cast<WebRtc_Word16>
+            (rampArray[rampPos] * audioFrame.data_[i]);
+    }
+    memset(&audioFrame.data_[rampSize], 0,
+           (audioFrame.samples_per_channel_ - rampSize) *
+           sizeof(audioFrame.data_[0]));
+}
+} // namespace webrtc
diff --git a/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h b/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h
new file mode 100644
index 0000000..fdf5d33
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/audio_frame_manipulator.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
+
+namespace webrtc {
+class AudioFrame;
+
+// Updates the audioFrame's energy (based on its samples).
+void CalculateEnergy(AudioFrame& audioFrame);
+
+// Apply linear step function that ramps in/out the audio samples in audioFrame
+void RampIn(AudioFrame& audioFrame);
+void RampOut(AudioFrame& audioFrame);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_AUDIO_FRAME_MANIPULATOR_H_
diff --git a/src/modules/audio_conference_mixer/source/level_indicator.cc b/src/modules/audio_conference_mixer/source/level_indicator.cc
new file mode 100644
index 0000000..799a47d
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/level_indicator.cc
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "level_indicator.h"
+
+namespace webrtc {
+// Array for adding smothing to level changes (ad-hoc).
+const WebRtc_UWord32 perm[] =
+    {0,1,2,3,4,4,5,5,5,5,6,6,6,6,6,7,7,7,7,8,8,8,9,9,9,9,9,9,9,9,9,9,9};
+
+LevelIndicator::LevelIndicator()
+    : _max(0),
+      _count(0),
+      _currentLevel(0)
+{
+}
+
+LevelIndicator::~LevelIndicator()
+{
+}
+
+// Level is based on the highest absolute value for all samples.
+void LevelIndicator::ComputeLevel(const WebRtc_Word16* speech,
+                                  const WebRtc_UWord16 nrOfSamples)
+{
+    WebRtc_Word32 min = 0;
+    for(WebRtc_UWord32 i = 0; i < nrOfSamples; i++)
+    {
+        if(_max < speech[i])
+        {
+            _max = speech[i];
+        }
+        if(min > speech[i])
+        {
+            min = speech[i];
+        }
+    }
+
+    // Absolute max value.
+    if(-min > _max)
+    {
+        _max = -min;
+    }
+
+    if(_count == TICKS_BEFORE_CALCULATION)
+    {
+        // Highest sample value maps directly to a level.
+        WebRtc_Word32 position = _max / 1000;
+        if ((position == 0) &&
+            (_max > 250))
+        {
+            position = 1;
+        }
+        _currentLevel = perm[position];
+        // The max value is decayed and stored so that it can be reused to slow
+        // down decreases in level.
+        _max = _max >> 1;
+        _count = 0;
+    } else {
+        _count++;
+    }
+}
+
+WebRtc_Word32 LevelIndicator::GetLevel()
+{
+    return _currentLevel;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_conference_mixer/source/level_indicator.h b/src/modules/audio_conference_mixer/source/level_indicator.h
new file mode 100644
index 0000000..bdcdf8e
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/level_indicator.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class LevelIndicator
+{
+public:
+    enum{TICKS_BEFORE_CALCULATION = 10};
+
+    LevelIndicator();
+    ~LevelIndicator();
+
+    // Updates the level.
+    void ComputeLevel(const WebRtc_Word16* speech,
+                      const WebRtc_UWord16 nrOfSamples);
+
+    WebRtc_Word32 GetLevel();
+private:
+    WebRtc_Word32  _max;
+    WebRtc_UWord32 _count;
+    WebRtc_UWord32 _currentLevel;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_LEVEL_INDICATOR_H_
diff --git a/src/modules/audio_conference_mixer/source/memory_pool.h b/src/modules/audio_conference_mixer/source/memory_pool.h
new file mode 100644
index 0000000..caf5d93
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/memory_pool.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
+
+#include <assert.h>
+
+#include "typedefs.h"
+
+#if _WIN32
+#include "memory_pool_win.h"
+#else
+#include "memory_pool_posix.h"
+#endif
+
+namespace webrtc {
+
+template<class MemoryType>
+class MemoryPool
+{
+public:
+    // Factory method, constructor disabled.
+    static WebRtc_Word32 CreateMemoryPool(MemoryPool*& memoryPool,
+                                          WebRtc_UWord32 initialPoolSize);
+
+    // Try to delete the memory pool. Fail with return value -1 if there is
+    // outstanding memory.
+    static WebRtc_Word32 DeleteMemoryPool(
+        MemoryPool*& memoryPool);
+
+    // Get/return unused memory.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+private:
+    MemoryPool(WebRtc_Word32 initialPoolSize);
+    ~MemoryPool();
+
+    MemoryPoolImpl<MemoryType>* _ptrImpl;
+};
+
+template<class MemoryType>
+MemoryPool<MemoryType>::MemoryPool(WebRtc_Word32 initialPoolSize)
+{
+    _ptrImpl = new MemoryPoolImpl<MemoryType>(initialPoolSize);
+}
+
+template<class MemoryType>
+MemoryPool<MemoryType>::~MemoryPool()
+{
+    delete _ptrImpl;
+}
+
+template<class MemoryType> WebRtc_Word32
+MemoryPool<MemoryType>::CreateMemoryPool(MemoryPool*&   memoryPool,
+                                         WebRtc_UWord32 initialPoolSize)
+{
+    memoryPool = new MemoryPool(initialPoolSize);
+    if(memoryPool == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl == NULL)
+    {
+        delete memoryPool;
+        memoryPool = NULL;
+        return -1;
+    }
+    if(!memoryPool->_ptrImpl->Initialize())
+    {
+        delete memoryPool;
+        memoryPool = NULL;
+        return -1;
+    }
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::DeleteMemoryPool(MemoryPool*& memoryPool)
+{
+    if(memoryPool == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl == NULL)
+    {
+        return -1;
+    }
+    if(memoryPool->_ptrImpl->Terminate() == -1)
+    {
+        return -1;
+    }
+    delete memoryPool;
+    memoryPool = NULL;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    return _ptrImpl->PopMemory(memory);
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPool<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+    return _ptrImpl->PushMemory(memory);
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_H_
diff --git a/src/modules/audio_conference_mixer/source/memory_pool_posix.h b/src/modules/audio_conference_mixer/source/memory_pool_posix.h
new file mode 100644
index 0000000..45f800b
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/memory_pool_posix.h
@@ -0,0 +1,168 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
+
+#include <assert.h>
+
+#include "critical_section_wrapper.h"
+#include "list_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+template<class MemoryType>
+class MemoryPoolImpl
+{
+public:
+    // MemoryPool functions.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+
+    MemoryPoolImpl(WebRtc_Word32 initialPoolSize);
+    ~MemoryPoolImpl();
+
+    // Atomic functions
+    WebRtc_Word32 Terminate();
+    bool Initialize();
+private:
+    // Non-atomic function.
+    WebRtc_Word32 CreateMemory(WebRtc_UWord32 amountToCreate);
+
+    CriticalSectionWrapper* _crit;
+
+    bool _terminate;
+
+    ListWrapper _memoryPool;
+
+    WebRtc_UWord32 _initialPoolSize;
+    WebRtc_UWord32 _createdMemory;
+    WebRtc_UWord32 _outstandingMemory;
+};
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::MemoryPoolImpl(WebRtc_Word32 initialPoolSize)
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _terminate(false),
+      _memoryPool(),
+      _initialPoolSize(initialPoolSize),
+      _createdMemory(0),
+      _outstandingMemory(0)
+{
+}
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::~MemoryPoolImpl()
+{
+    // Trigger assert if there is outstanding memory.
+    assert(_createdMemory == 0);
+    assert(_outstandingMemory == 0);
+    delete _crit;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    CriticalSectionScoped cs(_crit);
+    if(_terminate)
+    {
+        memory = NULL;
+        return -1;
+    }
+    ListItem* item = _memoryPool.First();
+    if(item == NULL)
+    {
+        // _memoryPool empty create new memory.
+        CreateMemory(_initialPoolSize);
+        item = _memoryPool.First();
+        if(item == NULL)
+        {
+            memory = NULL;
+            return -1;
+        }
+    }
+    memory = static_cast<MemoryType*>(item->GetItem());
+    _memoryPool.Erase(item);
+    _outstandingMemory++;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_crit);
+    _outstandingMemory--;
+    if(_memoryPool.GetSize() > (_initialPoolSize << 1))
+    {
+        // Reclaim memory if less than half of the pool is unused.
+        _createdMemory--;
+        delete memory;
+        memory = NULL;
+        return 0;
+    }
+    _memoryPool.PushBack(static_cast<void*>(memory));
+    memory = NULL;
+    return 0;
+}
+
+template<class MemoryType>
+bool MemoryPoolImpl<MemoryType>::Initialize()
+{
+    CriticalSectionScoped cs(_crit);
+    return CreateMemory(_initialPoolSize) == 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::Terminate()
+{
+    CriticalSectionScoped cs(_crit);
+    assert(_createdMemory == _outstandingMemory + _memoryPool.GetSize());
+
+    _terminate = true;
+    // Reclaim all memory.
+    while(_createdMemory > 0)
+    {
+        ListItem* item = _memoryPool.First();
+        if(item == NULL)
+        {
+            // There is memory that hasn't been returned yet.
+            return -1;
+        }
+        MemoryType* memory = static_cast<MemoryType*>(item->GetItem());
+        delete memory;
+        _memoryPool.Erase(item);
+        _createdMemory--;
+    }
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::CreateMemory(
+    WebRtc_UWord32 amountToCreate)
+{
+    for(WebRtc_UWord32 i = 0; i < amountToCreate; i++)
+    {
+        MemoryType* memory = new MemoryType();
+        if(memory == NULL)
+        {
+            return -1;
+        }
+        _memoryPool.PushBack(static_cast<void*>(memory));
+        _createdMemory++;
+    }
+    return 0;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_GENERIC_H_
diff --git a/src/modules/audio_conference_mixer/source/memory_pool_win.h b/src/modules/audio_conference_mixer/source/memory_pool_win.h
new file mode 100644
index 0000000..8ff97f8
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/memory_pool_win.h
@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
+
+#include <assert.h>
+#include <windows.h>
+
+#include "aligned_malloc.h"
+#include "atomic32.h"
+#include "typedefs.h"
+
+namespace webrtc {
+template<class MemoryType> struct MemoryPoolItem;
+
+template<class MemoryType>
+struct MemoryPoolItemPayload
+{
+    MemoryPoolItemPayload()
+        : memoryType(),
+          base(NULL)
+    {
+    }
+    MemoryType                  memoryType;
+    MemoryPoolItem<MemoryType>* base;
+};
+
+template<class MemoryType>
+struct MemoryPoolItem
+{
+    // Atomic single linked list entry header.
+    SLIST_ENTRY itemEntry;
+    // Atomic single linked list payload.
+    MemoryPoolItemPayload<MemoryType>* payload;
+};
+
+template<class MemoryType>
+class MemoryPoolImpl
+{
+public:
+    // MemoryPool functions.
+    WebRtc_Word32 PopMemory(MemoryType*&  memory);
+    WebRtc_Word32 PushMemory(MemoryType*& memory);
+
+    MemoryPoolImpl(WebRtc_Word32 /*initialPoolSize*/);
+    ~MemoryPoolImpl();
+
+    // Atomic functions.
+    WebRtc_Word32 Terminate();
+    bool Initialize();
+private:
+    // Non-atomic function.
+    MemoryPoolItem<MemoryType>* CreateMemory();
+
+    // Windows implementation of single linked atomic list, documented here:
+    // http://msdn.microsoft.com/en-us/library/ms686962(VS.85).aspx
+
+    // Atomic single linked list head.
+    PSLIST_HEADER _pListHead;
+
+    Atomic32 _createdMemory;
+    Atomic32 _outstandingMemory;
+};
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::MemoryPoolImpl(
+    WebRtc_Word32 /*initialPoolSize*/)
+    : _pListHead(NULL),
+      _createdMemory(0),
+      _outstandingMemory(0)
+{
+}
+
+template<class MemoryType>
+MemoryPoolImpl<MemoryType>::~MemoryPoolImpl()
+{
+    Terminate();
+    if(_pListHead != NULL)
+    {
+        AlignedFree(reinterpret_cast<void*>(_pListHead));
+        _pListHead = NULL;
+    }
+    // Trigger assert if there is outstanding memory.
+    assert(_createdMemory.Value() == 0);
+    assert(_outstandingMemory.Value() == 0);
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PopMemory(MemoryType*& memory)
+{
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    if(pListEntry == NULL)
+    {
+        MemoryPoolItem<MemoryType>* item = CreateMemory();
+        if(item == NULL)
+        {
+            return -1;
+        }
+        pListEntry = &(item->itemEntry);
+    }
+    ++_outstandingMemory;
+    memory = &((MemoryPoolItem<MemoryType>*)pListEntry)->payload->memoryType;
+    return 0;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::PushMemory(MemoryType*& memory)
+{
+    if(memory == NULL)
+    {
+        return -1;
+    }
+
+    MemoryPoolItem<MemoryType>* item =
+        ((MemoryPoolItemPayload<MemoryType>*)memory)->base;
+
+    const WebRtc_Word32 usedItems  = --_outstandingMemory;
+    const WebRtc_Word32 totalItems = _createdMemory.Value();
+    const WebRtc_Word32 freeItems  = totalItems - usedItems;
+    if(freeItems < 0)
+    {
+        assert(false);
+        delete item->payload;
+        AlignedFree(item);
+        return -1;
+    }
+    if(freeItems >= totalItems>>1)
+    {
+        delete item->payload;
+        AlignedFree(item);
+        --_createdMemory;
+        return 0;
+    }
+    InterlockedPushEntrySList(_pListHead,&(item->itemEntry));
+    return 0;
+}
+
+template<class MemoryType>
+bool MemoryPoolImpl<MemoryType>::Initialize()
+{
+    _pListHead = (PSLIST_HEADER)AlignedMalloc(sizeof(SLIST_HEADER),
+                                              MEMORY_ALLOCATION_ALIGNMENT);
+    if(_pListHead == NULL)
+    {
+        return false;
+    }
+    InitializeSListHead(_pListHead);
+    return true;
+}
+
+template<class MemoryType>
+WebRtc_Word32 MemoryPoolImpl<MemoryType>::Terminate()
+{
+    WebRtc_Word32 itemsFreed = 0;
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    while(pListEntry != NULL)
+    {
+        MemoryPoolItem<MemoryType>* item = ((MemoryPoolItem<MemoryType>*)pListEntry);
+        delete item->payload;
+        AlignedFree(item);
+        --_createdMemory;
+        itemsFreed++;
+        pListEntry = InterlockedPopEntrySList(_pListHead);
+    }
+    return itemsFreed;
+}
+
+template<class MemoryType>
+MemoryPoolItem<MemoryType>* MemoryPoolImpl<MemoryType>::CreateMemory()
+{
+    MemoryPoolItem<MemoryType>* returnValue = (MemoryPoolItem<MemoryType>*)
+        AlignedMalloc(sizeof(MemoryPoolItem<MemoryType>),
+                      MEMORY_ALLOCATION_ALIGNMENT);
+    if(returnValue == NULL)
+    {
+        return NULL;
+    }
+
+    returnValue->payload = new MemoryPoolItemPayload<MemoryType>();
+    if(returnValue->payload == NULL)
+    {
+        delete returnValue;
+        return NULL;
+    }
+    returnValue->payload->base = returnValue;
+    ++_createdMemory;
+    return returnValue;
+}
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_MEMORY_POOL_WINDOWS_H_
diff --git a/src/modules/audio_conference_mixer/source/time_scheduler.cc b/src/modules/audio_conference_mixer/source/time_scheduler.cc
new file mode 100644
index 0000000..183005e
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/time_scheduler.cc
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_wrapper.h"
+#include "time_scheduler.h"
+
+namespace webrtc {
+TimeScheduler::TimeScheduler(const WebRtc_UWord32 periodicityInMs)
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _isStarted(false),
+      _lastPeriodMark(),
+      _periodicityInMs(periodicityInMs),
+      _periodicityInTicks(TickTime::MillisecondsToTicks(periodicityInMs)),
+      _missedPeriods(0)
+ {
+ }
+
+TimeScheduler::~TimeScheduler()
+{
+    delete _crit;
+}
+
+WebRtc_Word32 TimeScheduler::UpdateScheduler()
+{
+    CriticalSectionScoped cs(_crit);
+    if(!_isStarted)
+    {
+        _isStarted = true;
+        _lastPeriodMark = TickTime::Now();
+        return 0;
+    }
+    // Don't perform any calculations until the debt of pending periods have
+    // been worked off.
+    if(_missedPeriods > 0)
+    {
+        _missedPeriods--;
+        return 0;
+    }
+
+    // Calculate the time that has past since previous call to this function.
+    TickTime tickNow = TickTime::Now();
+    TickInterval amassedTicks = tickNow - _lastPeriodMark;
+    WebRtc_Word64 amassedMs = amassedTicks.Milliseconds();
+
+    // Calculate the number of periods the time that has passed correspond to.
+    WebRtc_Word32 periodsToClaim = (WebRtc_Word32)amassedMs /
+        ((WebRtc_Word32)_periodicityInMs);
+
+    // One period will be worked off by this call. Make sure that the number of
+    // pending periods don't end up being negative (e.g. if this function is
+    // called to often).
+    if(periodsToClaim < 1)
+    {
+        periodsToClaim = 1;
+    }
+
+    // Update the last period mark without introducing any drifting.
+    // Note that if this fuunction is called to often _lastPeriodMark can
+    // refer to a time in the future which in turn will yield TimeToNextUpdate
+    // that is greater than the periodicity
+    for(WebRtc_Word32 i = 0; i < periodsToClaim; i++)
+    {
+        _lastPeriodMark += _periodicityInTicks;
+    }
+
+    // Update the total amount of missed periods note that we have processed
+    // one period hence the - 1
+    _missedPeriods += periodsToClaim - 1;
+    return 0;
+}
+
+WebRtc_Word32 TimeScheduler::TimeToNextUpdate(
+    WebRtc_Word32& updateTimeInMS) const
+{
+    CriticalSectionScoped cs(_crit);
+    // Missed periods means that the next UpdateScheduler() should happen
+    // immediately.
+    if(_missedPeriods > 0)
+    {
+        updateTimeInMS = 0;
+        return 0;
+    }
+
+    // Calculate the time (in ms) that has past since last call to
+    // UpdateScheduler()
+    TickTime tickNow = TickTime::Now();
+    TickInterval ticksSinceLastUpdate = tickNow - _lastPeriodMark;
+    const WebRtc_Word32 millisecondsSinceLastUpdate =
+        (WebRtc_Word32) ticksSinceLastUpdate.Milliseconds();
+
+    updateTimeInMS = _periodicityInMs - millisecondsSinceLastUpdate;
+    updateTimeInMS =  (updateTimeInMS < 0) ? 0 : updateTimeInMS;
+    return 0;
+}
+} // namespace webrtc
diff --git a/src/modules/audio_conference_mixer/source/time_scheduler.h b/src/modules/audio_conference_mixer/source/time_scheduler.h
new file mode 100644
index 0000000..e2674d9
--- /dev/null
+++ b/src/modules/audio_conference_mixer/source/time_scheduler.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// The TimeScheduler class keeps track of periodic events. It is non-drifting
+// and keeps track of any missed periods so that it is possible to catch up.
+// (compare to a metronome)
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
+
+#include "tick_util.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class TimeScheduler
+{
+public:
+    TimeScheduler(const WebRtc_UWord32 periodicityInMs);
+    ~TimeScheduler();
+
+    // Signal that a periodic event has been triggered.
+    WebRtc_Word32 UpdateScheduler();
+
+    // Set updateTimeInMs to the amount of time until UpdateScheduler() should
+    // be called. This time will never be negative.
+    WebRtc_Word32 TimeToNextUpdate(WebRtc_Word32& updateTimeInMS) const;
+
+private:
+    CriticalSectionWrapper* _crit;
+
+    bool _isStarted;
+    TickTime _lastPeriodMark;
+
+    WebRtc_UWord32 _periodicityInMs;
+    WebRtc_Word64  _periodicityInTicks;
+    WebRtc_UWord32 _missedPeriods;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_SOURCE_TIME_SCHEDULER_H_
diff --git a/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc b/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc
new file mode 100644
index 0000000..f79898c
--- /dev/null
+++ b/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.cc
@@ -0,0 +1,1098 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <iostream>
+#include <time.h>
+
+#include "functionTest.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "webrtc_vad.h"
+
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_MAC))
+   #include <sys/stat.h>
+   #define MY_PERMISSION_MASK S_IRWXU | S_IRWXG | S_IRWXO
+   #define MKDIR(directory) mkdir(directory,MY_PERMISSION_MASK)
+#else // defined(WINDOWS)
+   #include <direct.h>
+   #define MKDIR(directory) mkdir(directory)
+#endif
+
+int main(int /*argc*/, char* /*argv[]*/)
+{
+    // Initialize random number generator
+    //unsigned int seed = 1220716312; // just a seed that can be used
+    unsigned int seed = (unsigned)time( NULL );
+    srand(seed);
+    std::cout << "Starting function test. Seed = " << seed << std::endl;
+    std::cout << "Press enter to continue" << std::endl;
+    getchar();
+    MixerWrapper* testInstance1 = MixerWrapper::CreateMixerWrapper();
+    MixerWrapper* testInstance2 = MixerWrapper::CreateMixerWrapper();
+    if((testInstance1 == NULL) ||
+       (testInstance2 == NULL))
+    {
+        assert(false);
+        return 0;
+    }
+
+    char versionString[256] = "";
+    WebRtc_UWord32 remainingBufferInBytes = 256;
+    WebRtc_UWord32 position = 0;
+    AudioConferenceMixer::GetVersion(versionString,remainingBufferInBytes,position);
+
+    int read = 1;
+    while(read != 0)
+    {
+        std::cout << versionString << std::endl;
+        std::cout << "--------Menu-----------" << std::endl;
+        std::cout << std::endl;
+        std::cout << "0. Quit" << std::endl;
+        std::cout << "2. StartMixing" << std::endl;
+        std::cout << "3. StopMixing" << std::endl;
+        std::cout << "4. Create participant(s)" << std::endl;
+        std::cout << "5. Delete participant(s)" << std::endl;
+        std::cout << "6. List participants " << std::endl;
+        std::cout << "7. Print mix status " << std::endl;
+        std::cout << "8. Run identical scenario:" << std::endl;
+        std::cout << "   a. 1 VIP,       3 regular, amount of mixed = 3"  << std::endl;
+        std::cout << "   b. 1 anonymous, 3 regular, amount of mixed = 2"  << std::endl;
+        scanf("%i",&read);
+        getchar();
+        MixerParticipant::ParticipantType participantType;
+        int option = 0;
+        WebRtc_UWord32 id = 0;
+        ListItem* item = NULL;
+        ListWrapper participants;
+        if(read == 0)
+        {
+            // donothing
+        }
+        else if(read == 1)
+        {
+        }
+        else if(read == 2)
+        {
+            testInstance1->StartMixing();
+        }
+        else if(read == 3)
+        {
+            testInstance1->StopMixing();
+        }
+        else if(read == 4)
+        {
+            while(true)
+            {
+                std::cout << "VIP(music)       = " << MixerParticipant::VIP << std::endl;
+                std::cout << "Regular(speech)  = " << MixerParticipant::REGULAR << std::endl;
+                std::cout << "Anonymous(music) = " << MixerParticipant::MIXED_ANONYMOUS << std::endl;
+                std::cout << "Select type of participant: ";
+                scanf("%i",&option);
+                if(option == MixerParticipant::VIP ||
+                   option == MixerParticipant::REGULAR ||
+                   option == MixerParticipant::MIXED_ANONYMOUS)
+                {
+                    break;
+                }
+            }
+            participantType = (MixerParticipant::ParticipantType)option;
+            testInstance1->CreateParticipant(participantType);
+        }
+        else if(read == 5)
+        {
+            std::cout << "Select participant to delete: ";
+            scanf("%i",&option);
+            id = option;
+            testInstance1->DeleteParticipant(id);
+            break;
+        }
+        else if(read == 6)
+        {
+            testInstance1->GetParticipantList(participants);
+            item = participants.First();
+            std::cout << "The following participants have been created: " << std::endl;
+            while(item)
+            {
+                WebRtc_UWord32 id = item->GetUnsignedItem();
+                std::cout << id;
+                item = participants.Next(item);
+                if(item != NULL)
+                {
+                    std::cout << ", ";
+                }
+                else
+                {
+                    std::cout << std::endl;
+                }
+            }
+        }
+        else if(read == 7)
+        {
+            std::cout << "-------------Mixer Status-------------" << std::endl;
+            testInstance1->PrintStatus();
+            testInstance2->PrintStatus();
+            std::cout << "Press enter to continue";
+            getchar();
+            std::cout << std::endl;
+            std::cout << std::endl;
+        }
+        else if(read == 8)
+        {
+            const WebRtc_Word32 amountOfParticipants = 4;
+            MixerParticipant::ParticipantType instance1Participants[] =
+                                                {MixerParticipant::VIP,
+                                                 MixerParticipant::REGULAR,
+                                                 MixerParticipant::REGULAR,
+                                                 MixerParticipant::REGULAR};
+            MixerParticipant::ParticipantType instance2Participants[] =
+                                               {MixerParticipant::MIXED_ANONYMOUS,
+                                                MixerParticipant::REGULAR,
+                                                MixerParticipant::REGULAR,
+                                                MixerParticipant::REGULAR};
+            for(WebRtc_Word32 i = 0; i < amountOfParticipants; i++)
+            {
+                WebRtc_Word32 startPosition = 0;
+                GenerateRandomPosition(startPosition);
+                testInstance1->CreateParticipant(instance1Participants[i],startPosition);
+                testInstance2->CreateParticipant(instance2Participants[i],startPosition);
+            }
+            bool success = true;
+            success = testInstance1->StartMixing();
+            assert(success);
+            success = testInstance2->StartMixing(2);
+            assert(success);
+        }
+    }
+
+    std::cout << "Press enter to stop" << std::endl;
+    getchar();
+    delete testInstance1;
+    delete testInstance2;
+    return 0;
+}
+
+FileWriter::FileWriter()
+    :
+    _file(NULL)
+{
+}
+
+FileWriter::~FileWriter()
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+}
+
+bool
+FileWriter::SetFileName(
+    const char* fileName)
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    _file = fopen(fileName,"wb");
+    return _file != NULL;
+}
+
+bool
+FileWriter::WriteToFile(
+    const AudioFrame& audioFrame)
+{
+    WebRtc_Word32 written = (WebRtc_Word32)fwrite(audioFrame.data_,sizeof(WebRtc_Word16),audioFrame.samples_per_channel_,_file);
+    // Do not flush buffers since that will add (a lot of) delay
+    return written == audioFrame.samples_per_channel_;
+}
+
+FileReader::FileReader()
+    :
+    _frequency(kDefaultFrequency),
+    _sampleSize((_frequency*kProcessPeriodicityInMs)/1000),
+    _timeStamp(0),
+    _file(NULL),
+    _vadInstr(NULL),
+    _automaticVad(false),
+    _vad(false)
+{
+    if(WebRtcVad_Create(&_vadInstr) == 0)
+    {
+        if(WebRtcVad_Init(_vadInstr) != 0)
+        {
+            assert(false);
+            WebRtcVad_Free(_vadInstr);
+            _vadInstr = NULL;
+        }
+    }
+    else
+    {
+        assert(false);
+    }
+}
+
+FileReader::~FileReader()
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    if(_vadInstr)
+    {
+        WebRtcVad_Free(_vadInstr);
+    }
+}
+
+bool
+FileReader::SetFileName(
+    const char* fileName)
+{
+    if(_file)
+    {
+        fclose(_file);
+    }
+    _file = fopen(fileName,"rb");
+    return _file != NULL;
+}
+
+bool
+FileReader::ReadFromFile(
+    AudioFrame& audioFrame)
+{
+
+    WebRtc_Word16 buffer[AudioFrame::kMaxDataSizeSamples];
+    LoopedFileRead(buffer,AudioFrame::kMaxDataSizeSamples,_sampleSize,_file);
+
+    bool vad = false;
+    GetVAD(buffer,_sampleSize,vad);
+    AudioFrame::VADActivity activity = vad ? AudioFrame::kVadActive :
+                                 AudioFrame::kVadPassive;
+
+    _volumeCalculator.ComputeLevel(buffer,_sampleSize);
+    const WebRtc_Word32 level = _volumeCalculator.GetLevel();
+    return audioFrame.UpdateFrame(  -1,
+                                    _timeStamp,
+                                    buffer,
+                                    _sampleSize,
+                                    _frequency,
+                                    AudioFrame::kNormalSpeech,
+                                    activity,
+                                    0,
+                                    level) == 0;
+
+}
+
+bool
+FileReader::FastForwardFile(
+    const WebRtc_Word32 samples)
+{
+    WebRtc_Word16* tempBuffer = new WebRtc_Word16[samples];
+    bool success = LoopedFileRead(tempBuffer,samples,samples,_file);
+    delete[] tempBuffer;
+    return success;
+}
+
+bool
+FileReader::EnableAutomaticVAD(
+    bool enable,
+    int mode)
+{
+    if(!_automaticVad &&
+       enable)
+    {
+        if(WebRtcVad_Init(_vadInstr) == -1)
+        {
+            return false;
+        }
+    }
+    WebRtcVad_set_mode(_vadInstr,mode);
+    _automaticVad = enable;
+    return true;
+}
+
+bool
+FileReader::SetVAD(
+    bool vad)
+{
+    if(_automaticVad)
+    {
+        return false;
+    }
+    _vad = vad;
+    return true;
+}
+
+bool
+FileReader::GetVAD(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord8 bufferLengthInSamples,
+    bool& vad)
+{
+    if(_automaticVad)
+    {
+        WebRtc_Word16 result = WebRtcVad_Process(_vadInstr,_frequency,buffer,bufferLengthInSamples);
+        if(result == -1)
+        {
+            assert(false);
+            return false;
+        }
+        _vad = vad = (result == 1);
+    }
+    vad = _vad;
+    return true;
+}
+
+MixerParticipant*
+MixerParticipant::CreateParticipant(
+    const WebRtc_UWord32 id,
+    ParticipantType participantType,
+    const WebRtc_Word32 startPosition,
+    char* outputPath)
+{
+    if(participantType == RANDOM)
+    {
+        participantType = (ParticipantType)(rand() % 3);
+    }
+    MixerParticipant* participant = new MixerParticipant(id,participantType);
+    // Randomize the start position so we only need one input file
+    // assume file is smaller than 1 minute wideband = 60 * 16000
+    // Always start at a multiple of 10ms wideband
+    if(!participant->InitializeFileReader(startPosition) ||
+       !participant->InitializeFileWriter(outputPath))
+    {
+        delete participant;
+        return NULL;
+    }
+    return participant;
+}
+
+MixerParticipant::MixerParticipant(
+    const WebRtc_UWord32 id,
+    ParticipantType participantType)
+    :
+    _id(id),
+    _participantType(participantType),
+    _fileReader(),
+    _fileWriter()
+{
+}
+
+MixerParticipant::~MixerParticipant()
+{
+}
+
+WebRtc_Word32
+MixerParticipant::GetAudioFrame(
+    const WebRtc_Word32 /*id*/,
+    AudioFrame& audioFrame)
+{
+    if(!_fileReader.ReadFromFile(audioFrame))
+    {
+        return -1;
+    }
+    audioFrame._id = _id;
+    return 0;
+}
+
+WebRtc_Word32
+MixerParticipant::MixedAudioFrame(
+    const AudioFrame& audioFrame)
+{
+    return _fileWriter.WriteToFile(audioFrame);
+}
+
+WebRtc_Word32
+MixerParticipant::GetParticipantType(
+    ParticipantType& participantType)
+{
+    participantType = _participantType;
+    return 0;
+}
+
+bool
+MixerParticipant::InitializeFileReader(
+    const WebRtc_Word32 startPositionInSamples)
+{
+    char fileName[128] = "";
+    if(_participantType == REGULAR)
+    {
+        sprintf(fileName,"convFile.pcm");
+    }
+    else
+    {
+        sprintf(fileName,"musicFile.pcm");
+    }
+    if(!_fileReader.SetFileName(fileName))
+    {
+        return false;
+    }
+    if(!_fileReader.EnableAutomaticVAD(true,2))
+    {
+        assert(false);
+    }
+    return _fileReader.FastForwardFile(startPositionInSamples);
+}
+
+bool
+MixerParticipant::InitializeFileWriter(
+    char* outputPath)
+{
+    const WebRtc_Word32 stringsize = 128;
+    char fileName[stringsize] = "";
+    strncpy(fileName,outputPath,stringsize);
+    fileName[stringsize-1] = '\0';
+
+    char tempName[stringsize];
+    tempName[0] = '\0';
+    sprintf(tempName,"outputFile%d.pcm",(int)_id);
+    strncat(fileName,tempName,(stringsize - strlen(fileName)));
+    fileName[stringsize-1] = '\0';
+
+    return _fileWriter.SetFileName(fileName);
+}
+
+StatusReceiver::StatusReceiver(
+    const WebRtc_Word32 id)
+    :
+    _id(id),
+    _mixedParticipants(NULL),
+    _mixedParticipantsAmount(0),
+    _mixedParticipantsSize(0),
+    _vadPositiveParticipants(NULL),
+    _vadPositiveParticipantsAmount(0),
+    _vadPositiveParticipantsSize(0),
+    _mixedAudioLevel(0)
+{
+}
+
+StatusReceiver::~StatusReceiver()
+{
+    delete[] _mixedParticipants;
+    delete[] _vadPositiveParticipants;
+}
+
+void
+StatusReceiver::MixedParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+    if(_mixedParticipantsSize < size)
+    {
+        delete[] _mixedParticipants;
+        _mixedParticipantsSize = size;
+        _mixedParticipants = new ParticipantStatistics[size];
+    }
+    _mixedParticipantsAmount = size;
+    memcpy(_mixedParticipants,participantStatistics,sizeof(ParticipantStatistics)*size);
+}
+
+void
+StatusReceiver::VADPositiveParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+
+    if(_vadPositiveParticipantsSize < size)
+    {
+        delete[] _vadPositiveParticipants;
+        _vadPositiveParticipantsSize = size;
+        _vadPositiveParticipants = new ParticipantStatistics[size];
+    }
+    _vadPositiveParticipantsAmount = size;
+    memcpy(_vadPositiveParticipants,participantStatistics,sizeof(ParticipantStatistics)*size);
+}
+
+void
+StatusReceiver::MixedAudioLevel(
+    const WebRtc_Word32  id,
+    const WebRtc_UWord32 level)
+{
+    if(id != _id)
+    {
+        assert(false);
+    }
+    _mixedAudioLevel = level;
+}
+
+void
+StatusReceiver::PrintMixedParticipants()
+{
+    std::cout << "Mixed participants" << std::endl;
+    if(_mixedParticipantsAmount == 0)
+    {
+        std::cout << "N/A" << std::endl;
+    }
+    for(WebRtc_UWord16 i = 0; i < _mixedParticipantsAmount; i++)
+    {
+        std::cout << i + 1 << ". Participant " << _mixedParticipants[i].participant << ": level = " << _mixedParticipants[i].level << std::endl;
+    }
+}
+
+void
+StatusReceiver::PrintVadPositiveParticipants()
+{
+    std::cout << "VAD positive participants" << std::endl;
+    if(_mixedParticipantsAmount == 0)
+    {
+        std::cout << "N/A"  << std::endl;
+    }
+    for(WebRtc_UWord16 i = 0; i < _mixedParticipantsAmount; i++)
+    {
+        std::cout << i + 1 << ". Participant " << _mixedParticipants[i].participant << ": level = " << _mixedParticipants[i].level << std::endl;
+    }
+}
+
+void
+StatusReceiver::PrintMixedAudioLevel()
+{
+    std::cout << "Mixed audio level = " << _mixedAudioLevel << std::endl;
+}
+
+WebRtc_Word32 MixerWrapper::_mixerWrapperIdCounter = 0;
+
+MixerWrapper::MixerWrapper()
+    :
+    _processThread(NULL),
+    _threadId(0),
+    _firstProcessCall(true),
+    _previousTime(),
+    _periodicityInTicks(TickTime::MillisecondsToTicks(FileReader::kProcessPeriodicityInMs)),
+    _synchronizationEvent(EventWrapper::Create()),
+    _freeItemIds(),
+    _itemIdCounter(0),
+    _mixerParticipants(),
+    _mixerWrappererId(_mixerWrapperIdCounter++),
+    _instanceOutputPath(),
+    _trace(NULL),
+    _statusReceiver(_mixerWrappererId),
+    _generalAudioWriter()
+{
+    sprintf(_instanceOutputPath,"instance%d/",(int)_mixerWrappererId);
+    MKDIR(_instanceOutputPath);
+    _mixer = AudioConferenceMixer::CreateAudioConferenceMixer(
+                                                    _mixerWrappererId);
+    if(_mixer != NULL)
+    {
+        bool success = true;
+
+        success = _mixer->RegisterMixedStreamCallback(*this) == 0;
+        assert(success);
+        success = _mixer->RegisterMixedStreamCallback(*this) == -1;
+        assert(success);
+        success = _mixer->UnRegisterMixedStreamCallback() == 0;
+        assert(success);
+        success = _mixer->UnRegisterMixedStreamCallback() == -1;
+        assert(success);
+        success = _mixer->RegisterMixedStreamCallback(*this) == 0;
+        assert(success);
+
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,2) == 0;
+        assert(success);
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,1) == -1;
+        assert(success);
+        success = _mixer->UnRegisterMixerStatusCallback() == 0;
+        assert(success);
+        success = _mixer->UnRegisterMixerStatusCallback() == -1;
+        assert(success);
+        success = _mixer->RegisterMixerStatusCallback(_statusReceiver,1) == 0;
+        assert(success);
+    }
+    else
+    {
+        assert(false);
+        std::cout << "Failed to create mixer instance";
+    }
+}
+
+MixerWrapper*
+MixerWrapper::CreateMixerWrapper()
+{
+    MixerWrapper* mixerWrapper = new MixerWrapper();
+    if(!mixerWrapper->InitializeFileWriter())
+    {
+        delete mixerWrapper;
+        return NULL;
+    }
+    return mixerWrapper;
+}
+
+MixerWrapper::~MixerWrapper()
+{
+    StopMixing();
+    ClearAllItemIds();
+    _synchronizationEvent->StopTimer();
+    delete _synchronizationEvent;
+    delete _mixer;
+}
+
+bool
+MixerWrapper::CreateParticipant(
+    MixerParticipant::ParticipantType participantType)
+{
+    WebRtc_Word32 startPosition = 0;
+    GenerateRandomPosition(startPosition);
+    return CreateParticipant(participantType,startPosition);
+}
+
+bool
+MixerWrapper::CreateParticipant(
+    MixerParticipant::ParticipantType participantType,
+    const WebRtc_Word32 startPosition)
+{
+    WebRtc_UWord32 id;
+    if(!GetFreeItemIds(id))
+    {
+        return false;
+    }
+
+    MixerParticipant* participant = MixerParticipant::CreateParticipant(id,participantType,startPosition,_instanceOutputPath);
+    if(!participant)
+    {
+        return false;
+    }
+    if(_mixerParticipants.Insert(id,static_cast<void*>(participant)) != 0)
+    {
+        delete participant;
+        return false;
+    }
+    if(!StartMixingParticipant(id))
+    {
+        DeleteParticipant(id);
+        return false;
+    }
+    return true;
+}
+
+bool
+MixerWrapper::DeleteParticipant(
+    const WebRtc_UWord32 id)
+{
+    bool success = StopMixingParticipant(id);
+    if(!success)
+    {
+        assert(false);
+        return false;
+    }
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    delete participant;
+    _mixerParticipants.Erase(item);
+    AddFreeItemIds(id);
+    return true;
+}
+
+bool
+MixerWrapper::StartMixing(
+    const WebRtc_UWord32 mixedParticipants)
+{
+    if(_processThread)
+    {
+        return false;
+    }
+    if(_mixer->SetAmountOfMixedParticipants(mixedParticipants) != 0)
+    {
+        assert(false);
+    }
+    WebRtc_UWord32 mixedParticipantsTest = 0;
+    _mixer->AmountOfMixedParticipants(mixedParticipantsTest);
+    assert(mixedParticipantsTest == mixedParticipants);
+
+    if(!_synchronizationEvent->StartTimer(true,10))
+    {
+        assert(false);
+        return false;
+    }
+    _processThread = ThreadWrapper::CreateThread(Process, this, kLowPriority);
+    if(!_processThread->Start(_threadId))
+    {
+        delete _processThread;
+        _processThread = NULL;
+        assert(false);
+        return false;
+    }
+
+    return true;
+}
+
+bool
+MixerWrapper::StopMixing()
+{
+    while(_processThread &&
+          !_processThread->Stop())
+    {}
+    _synchronizationEvent->StopTimer();
+
+    delete _processThread;
+    _processThread = NULL;
+    return true;
+}
+
+void
+MixerWrapper::NewMixedAudio(
+    const WebRtc_Word32 id,
+    const AudioFrame& generalAudioFrame,
+    const AudioFrame** uniqueAudioFrames,
+    const WebRtc_UWord32 size)
+{
+    if(id < 0)
+    {
+        assert(false);
+    }
+    // Store the general audio
+    _generalAudioWriter.WriteToFile(generalAudioFrame);
+
+    // Send the unique audio frames to its corresponding participants
+    ListWrapper uniqueAudioFrameList;
+    for(WebRtc_UWord32 i = 0; i < size; i++)
+    {
+        WebRtc_UWord32 id = (uniqueAudioFrames[i])->_id;
+        MapItem* resultItem = _mixerParticipants.Find(id);
+        if(resultItem == NULL)
+        {
+            assert(false);
+            continue;
+        }
+        MixerParticipant* participant = static_cast<MixerParticipant*>(resultItem->GetItem());
+        participant->MixedAudioFrame(*(uniqueAudioFrames[i]));
+        uniqueAudioFrameList.PushBack(resultItem->GetItem());
+    }
+
+    // Send the general audio frames to the remaining participants
+    MapItem* item = _mixerParticipants.First();
+    while(item)
+    {
+        bool isUnique = false;
+        ListItem* compareItem = uniqueAudioFrameList.First();
+        while(compareItem)
+        {
+            if(compareItem->GetItem() == item->GetItem())
+            {
+                isUnique = true;
+                break;
+            }
+            compareItem = uniqueAudioFrameList.Next(compareItem);
+        }
+        if(!isUnique)
+        {
+            MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+            participant->MixedAudioFrame(generalAudioFrame);
+        }
+        item = _mixerParticipants.Next(item);
+    }
+}
+
+bool
+MixerWrapper::GetParticipantList(
+    ListWrapper& participants)
+{
+    MapItem* item = _mixerParticipants.First();
+    while(item)
+    {
+        participants.PushBack(item->GetId());
+        item = _mixerParticipants.Next(item);
+    }
+    return true;
+}
+
+void
+MixerWrapper::PrintStatus()
+{
+    std::cout << "instance " << _mixerWrappererId << std::endl;
+    std::cout << std::endl;
+    _statusReceiver.PrintMixedParticipants();
+    std::cout << std::endl;
+    _statusReceiver.PrintVadPositiveParticipants();
+    std::cout << std::endl;
+    _statusReceiver.PrintMixedAudioLevel();
+    std::cout << "---------------------------------------" << std::endl;
+}
+
+bool
+MixerWrapper::InitializeFileWriter()
+{
+    const WebRtc_Word32 stringsize = 128;
+    char fileName[stringsize] = "";
+    strncpy(fileName,_instanceOutputPath,stringsize);
+    fileName[stringsize-1] = '\0';
+
+    strncat(fileName,"generalOutputFile.pcm",(stringsize - strlen(fileName)));
+    fileName[stringsize-1] = '\0';
+    return _generalAudioWriter.SetFileName(fileName);
+}
+
+bool
+MixerWrapper::Process(
+    void* instance)
+{
+    MixerWrapper* mixerWrapper = static_cast<MixerWrapper*>(instance);
+    return mixerWrapper->Process();
+}
+
+bool
+MixerWrapper::Process()
+{
+    switch(_synchronizationEvent->Wait(1000))
+    {
+    case kEventSignaled:
+         // Normal operation, ~10 ms has passed
+        break;
+    case kEventError:
+        // Error occured end the thread and throw an assertion
+        assert(false);
+        return false;
+    case kEventTimeout:
+        // One second has passed without a timeout something is wrong
+        // end the thread and throw an assertion
+        assert(false);
+        return false;
+    }
+    WebRtc_Word32 processOfset = 0;
+    const TickTime currentTime = TickTime::Now();
+    if(_firstProcessCall)
+    {
+        _previousTime = TickTime::Now();
+        _firstProcessCall = false;
+    }
+    else
+    {
+        TickInterval deltaTime = (currentTime - _previousTime);
+        _previousTime += _periodicityInTicks;
+        processOfset = (WebRtc_Word32) deltaTime.Milliseconds();
+        processOfset -= FileReader::kProcessPeriodicityInMs;
+    }
+
+    _mixer->Process();
+    WebRtc_Word32 timeUntilNextProcess = _mixer->TimeUntilNextProcess();
+    if(processOfset > FileReader::kProcessPeriodicityInMs)
+    {
+        std::cout << "Performance Warning: Process running " << processOfset << " too slow" << std::endl;
+        _previousTime = currentTime;
+        if(timeUntilNextProcess > 0)
+        {
+            std::cout << "Performance Warning: test performance and module performance missmatch" << std::endl;
+        }
+    }
+    else if(processOfset < -FileReader::kProcessPeriodicityInMs)
+    {
+        std::cout << "Performance Warning: Process running " << -processOfset << " too fast" << std::endl;
+        _previousTime = currentTime;
+        if(timeUntilNextProcess < FileReader::kProcessPeriodicityInMs)
+        {
+            std::cout << "Performance Warning: test performance and module performance missmatch" << std::endl;
+        }
+    }
+    return true;
+}
+
+
+bool
+MixerWrapper::StartMixingParticipant(
+    const WebRtc_UWord32 id)
+{
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    MixerParticipant::ParticipantType participantType = MixerParticipant::REGULAR;
+    participant->GetParticipantType(participantType);
+    if(participantType == MixerParticipant::MIXED_ANONYMOUS)
+    {
+        bool anonymouslyMixed = false;
+        bool success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(anonymouslyMixed);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == -1;
+        assert(success);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,false) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(!anonymouslyMixed);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,false) == -1;
+        assert(success);
+        success = _mixer->SetAnonymousMixabilityStatus(*participant,true) == 0;
+        assert(success);
+        success = _mixer->AnonymousMixabilityStatus(*participant,anonymouslyMixed) == 0;
+        assert(success);
+        assert(anonymouslyMixed);
+        return success;
+    }
+    WebRtc_UWord32 previousAmountOfMixableParticipants = 0;
+    bool success = _mixer->AmountOfMixables(previousAmountOfMixableParticipants) == 0;
+    assert(success);
+
+    success = _mixer->SetMixabilityStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,true) == -1;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == -1;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,true) == 0;
+    assert(success);
+    if(!success)
+    {
+        return false;
+    }
+
+    WebRtc_UWord32 currentAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(currentAmountOfMixableParticipants) == 0;
+    assert(currentAmountOfMixableParticipants == previousAmountOfMixableParticipants + 1);
+
+    bool mixable = true;
+    success = _mixer->MixabilityStatus(*participant,mixable) == 0;
+    assert(success);
+    assert(mixable);
+    if(participantType == MixerParticipant::REGULAR)
+    {
+        return true;
+    }
+    bool IsVIP = false;
+    success = _mixer->SetVIPStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(IsVIP);
+    success = _mixer->SetVIPStatus(*participant,true) == -1;
+    assert(success);
+    success = _mixer->SetVIPStatus(*participant,false) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(!IsVIP);
+    success = _mixer->SetVIPStatus(*participant,false) == -1;
+    assert(success);
+    success = _mixer->SetVIPStatus(*participant,true) == 0;
+    assert(success);
+    success = _mixer->VIPStatus(*participant,IsVIP) == 0;
+    assert(success);
+    assert(IsVIP);
+    assert(success);
+    return success;
+}
+
+bool
+MixerWrapper::StopMixingParticipant(
+    const WebRtc_UWord32 id)
+{
+    MapItem* item = _mixerParticipants.Find(id);
+    if(item == NULL)
+    {
+        return false;
+    }
+    MixerParticipant* participant = static_cast<MixerParticipant*>(item->GetItem());
+    bool success = false;
+    WebRtc_UWord32 previousAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(previousAmountOfMixableParticipants) == 0;
+    assert(success);
+    success = _mixer->SetMixabilityStatus(*participant,false) == 0;
+    assert(success);
+    WebRtc_UWord32 currentAmountOfMixableParticipants = 0;
+    success = _mixer->AmountOfMixables(currentAmountOfMixableParticipants) == 0;
+    assert(success);
+    assert(success ? currentAmountOfMixableParticipants == previousAmountOfMixableParticipants -1 :
+                     currentAmountOfMixableParticipants == previousAmountOfMixableParticipants);
+    return success;
+}
+
+bool
+MixerWrapper::GetFreeItemIds(
+    WebRtc_UWord32& itemId)
+{
+    if(!_freeItemIds.Empty())
+    {
+        ListItem* item = _freeItemIds.First();
+        WebRtc_UWord32* id = static_cast<WebRtc_UWord32*>(item->GetItem());
+        itemId = *id;
+        delete id;
+        return true;
+    }
+    if(_itemIdCounter == (WebRtc_UWord32) -1)
+    {
+        return false;
+    }
+    itemId = _itemIdCounter++;
+    return true;
+}
+
+void
+MixerWrapper::AddFreeItemIds(
+    const WebRtc_UWord32 itemId)
+{
+    WebRtc_UWord32* id = new WebRtc_UWord32;
+    *id = itemId;
+    _freeItemIds.PushBack(static_cast<void*>(id));
+}
+
+void
+MixerWrapper::ClearAllItemIds()
+{
+    ListItem* item = _freeItemIds.First();
+    while(item != NULL)
+    {
+        WebRtc_UWord32* id = static_cast<WebRtc_UWord32*>(item->GetItem());
+        delete id;
+        _freeItemIds.Erase(item);
+        item = _freeItemIds.First();
+    }
+}
+
+bool
+LoopedFileRead(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 samplesToRead,
+    FILE* file)
+{
+    if(bufferSizeInSamples < samplesToRead)
+    {
+        return false;
+    }
+    WebRtc_UWord32 gottenSamples = (WebRtc_UWord32)fread(buffer,sizeof(WebRtc_Word16),samplesToRead,file);
+    if(gottenSamples != samplesToRead)
+    {
+        WebRtc_UWord32 missingSamples = samplesToRead - gottenSamples;
+        fseek(file,0,0);
+        gottenSamples += (WebRtc_UWord32)fread(&buffer[gottenSamples],sizeof(WebRtc_Word16),missingSamples,file);
+    }
+    if(gottenSamples != samplesToRead)
+    {
+        return false;
+    }
+    return true;
+}
+
+void
+GenerateRandomPosition(
+    WebRtc_Word32& startPosition)
+{
+    startPosition = (rand() % (60*16000/160)) * 160;
+}
diff --git a/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h b/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h
new file mode 100644
index 0000000..f25e5f1
--- /dev/null
+++ b/src/modules/audio_conference_mixer/test/FunctionTest/functionTest.h
@@ -0,0 +1,276 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
+#define WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
+
+#include "module_common_types.h"
+#include "level_indicator.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+#include "audio_conference_mixer.h"
+#include "audio_conference_mixer_defines.h"
+#include "tick_util.h"
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+}
+struct WebRtcVadInst;
+
+class FileWriter
+{
+public:
+    FileWriter();
+    ~FileWriter();
+
+    bool SetFileName(
+        const char* fileName);
+
+    bool WriteToFile(
+        const AudioFrame& audioFrame);
+private:
+    FILE* _file;
+};
+
+class FileReader
+{
+public:
+    enum {kProcessPeriodicityInMs = 10};
+    enum Frequency
+    {
+        kNbInHz          = 8000,
+        kWbInHz          = 16000,
+        kDefaultFrequency = kWbInHz
+    };
+
+    FileReader();
+    ~FileReader();
+
+    bool SetFileName(
+        const char* fileName);
+
+    bool ReadFromFile(
+        AudioFrame& audioFrame);
+
+    bool FastForwardFile(
+        const WebRtc_Word32 samples);
+
+    bool EnableAutomaticVAD(
+        bool enable,
+        int mode);
+
+    bool SetVAD(
+        bool vad);
+private:
+    bool GetVAD(
+        WebRtc_Word16* buffer,
+        WebRtc_UWord8 bufferLengthInSamples,
+        bool& vad);
+
+    Frequency       _frequency;
+    WebRtc_UWord8     _sampleSize;
+
+    WebRtc_UWord32 _timeStamp;
+
+    FILE* _file;
+
+    WebRtcVadInst* _vadInstr;
+    bool  _automaticVad;
+    bool  _vad;
+
+    LevelIndicator _volumeCalculator;
+};
+
+class MixerParticipant : public MixerParticipant
+{
+public:
+    enum ParticipantType
+    {
+        VIP             = 0,
+        REGULAR         = 1,
+        MIXED_ANONYMOUS = 2,
+        RANDOM          = 3
+    };
+
+    static MixerParticipant* CreateParticipant(
+        const WebRtc_UWord32 id,
+        ParticipantType participantType,
+        const WebRtc_Word32 startPosition,
+        char* outputPath);
+    ~MixerParticipant();
+
+    WebRtc_Word32 GetAudioFrame(
+        const WebRtc_Word32 id,
+        AudioFrame& audioFrame);
+
+    WebRtc_Word32 MixedAudioFrame(
+        const AudioFrame& audioFrame);
+
+    WebRtc_Word32 GetParticipantType(
+        ParticipantType& participantType);
+private:
+    MixerParticipant(
+        const WebRtc_UWord32 id,
+        ParticipantType participantType);
+
+    bool InitializeFileReader(
+        const WebRtc_Word32 startPositionInSamples);
+
+    bool InitializeFileWriter(
+        char* outputPath);
+
+    WebRtc_UWord32 _id;
+    ParticipantType _participantType;
+
+    FileReader _fileReader;
+    FileWriter _fileWriter;
+};
+
+class StatusReceiver : public AudioMixerStatusReceiver
+{
+public:
+    StatusReceiver(
+        const WebRtc_Word32 id);
+    ~StatusReceiver();
+
+    void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    void MixedAudioLevel(
+        const WebRtc_Word32 id,
+        const WebRtc_UWord32 level);
+
+    void PrintMixedParticipants();
+
+    void PrintVadPositiveParticipants();
+
+    void PrintMixedAudioLevel();
+private:
+    WebRtc_Word32 _id;
+
+    ParticipantStatistics*  _mixedParticipants;
+    WebRtc_UWord32                _mixedParticipantsAmount;
+    WebRtc_UWord32                _mixedParticipantsSize;
+
+    ParticipantStatistics*  _vadPositiveParticipants;
+    WebRtc_UWord32                _vadPositiveParticipantsAmount;
+    WebRtc_UWord32                _vadPositiveParticipantsSize;
+
+    WebRtc_UWord32 _mixedAudioLevel;
+};
+
+class MixerWrapper : public AudioMixerOutputReceiver
+{
+public:
+    static MixerWrapper* CreateMixerWrapper();
+    ~MixerWrapper();
+
+    bool SetMixFrequency(
+        const AudioConferenceMixer::Frequency frequency);
+
+    bool CreateParticipant(
+        MixerParticipant::ParticipantType participantType);
+
+    bool CreateParticipant(
+        MixerParticipant::ParticipantType participantType,
+        const WebRtc_Word32 startPosition);
+
+    bool DeleteParticipant(
+        const WebRtc_UWord32 id);
+
+    bool StartMixing(
+        const WebRtc_UWord32 mixedParticipants = AudioConferenceMixer::kDefaultAmountOfMixedParticipants);
+
+    bool StopMixing();
+
+    void NewMixedAudio(
+        const WebRtc_Word32 id,
+        const AudioFrame& generalAudioFrame,
+        const AudioFrame** uniqueAudioFrames,
+        const WebRtc_UWord32 size);
+
+    bool GetParticipantList(
+        ListWrapper& participants);
+
+    void PrintStatus();
+private:
+    MixerWrapper();
+
+    bool InitializeFileWriter();
+
+    static bool Process(
+        void* instance);
+
+    bool Process();
+
+    bool StartMixingParticipant(
+        const WebRtc_UWord32 id);
+
+    bool StopMixingParticipant(
+        const WebRtc_UWord32 id);
+
+    bool GetFreeItemIds(
+        WebRtc_UWord32& itemId);
+
+    void AddFreeItemIds(
+        const WebRtc_UWord32 itemId);
+
+    void ClearAllItemIds();
+
+    webrtc::ThreadWrapper*  _processThread;
+    unsigned int _threadId;
+
+    // Performance hooks
+    enum{WARNING_COUNTER = 100};
+
+    bool _firstProcessCall;
+    TickTime _previousTime;             // Tick time of previous process
+    const WebRtc_Word64  _periodicityInTicks; // Periodicity
+
+    webrtc::EventWrapper*  _synchronizationEvent;
+
+    ListWrapper        _freeItemIds;
+    WebRtc_UWord32    _itemIdCounter;
+
+    MapWrapper _mixerParticipants;
+
+    static WebRtc_Word32 _mixerWrapperIdCounter;
+    WebRtc_Word32 _mixerWrappererId;
+    char _instanceOutputPath[128];
+
+    webrtc::Trace* _trace;
+    AudioConferenceMixer* _mixer;
+
+    StatusReceiver _statusReceiver;
+
+    FileWriter _generalAudioWriter;
+};
+
+bool
+LoopedFileRead(
+    WebRtc_Word16* buffer,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 samplesToRead,
+    FILE* file);
+
+void
+GenerateRandomPosition(
+    WebRtc_Word32& startPosition);
+
+#endif // WEBRTC_MODULES_AUDIO_CONFERENCE_MIXER_TEST_FUNCTIONTEST_FUNCTIONTEST_H_
diff --git a/src/modules/audio_device/OWNERS b/src/modules/audio_device/OWNERS
new file mode 100644
index 0000000..a07ced3
--- /dev/null
+++ b/src/modules/audio_device/OWNERS
@@ -0,0 +1,4 @@
+henrikg@webrtc.org
+henrika@webrtc.org
+niklas.enbom@webrtc.org
+xians@webrtc.org
diff --git a/src/modules/audio_device/main/interface/audio_device.h b/src/modules/audio_device/main/interface/audio_device.h
new file mode 100644
index 0000000..30b776d
--- /dev/null
+++ b/src/modules/audio_device/main/interface/audio_device.h
@@ -0,0 +1,207 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
+#define MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
+
+#include "modules/audio_device/main/interface/audio_device_defines.h"
+#include "modules/interface/module.h"
+
+namespace webrtc {
+
+class AudioDeviceModule : public RefCountedModule {
+ public:
+  enum ErrorCode {
+    kAdmErrNone = 0,
+    kAdmErrArgument = 1
+  };
+
+  enum AudioLayer {
+    kPlatformDefaultAudio = 0,
+    kWindowsWaveAudio = 1,
+    kWindowsCoreAudio = 2,
+    kLinuxAlsaAudio = 3,
+    kLinuxPulseAudio = 4,
+    kDummyAudio = 5
+  };
+
+  enum WindowsDeviceType {
+    kDefaultCommunicationDevice = -1,
+    kDefaultDevice = -2
+  };
+
+  enum BufferType {
+    kFixedBufferSize  = 0,
+    kAdaptiveBufferSize = 1
+  };
+
+  enum ChannelType {
+    kChannelLeft = 0,
+    kChannelRight = 1,
+    kChannelBoth = 2
+  };
+
+ public:
+  // Retrieve the currently utilized audio layer
+  virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0;
+
+  // Error handling
+  virtual ErrorCode LastError() const = 0;
+  virtual int32_t RegisterEventObserver(AudioDeviceObserver* eventCallback) = 0;
+
+  // Full-duplex transportation of PCM audio
+  virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) = 0;
+
+  // Main initialization and termination
+  virtual int32_t Init() = 0;
+  virtual int32_t Terminate() = 0;
+  virtual bool Initialized() const = 0;
+
+  // Device enumeration
+  virtual int16_t PlayoutDevices() = 0;
+  virtual int16_t RecordingDevices() = 0;
+  virtual int32_t PlayoutDeviceName(uint16_t index,
+                                    char name[kAdmMaxDeviceNameSize],
+                                    char guid[kAdmMaxGuidSize]) = 0;
+  virtual int32_t RecordingDeviceName(uint16_t index,
+                                      char name[kAdmMaxDeviceNameSize],
+                                      char guid[kAdmMaxGuidSize]) = 0;
+
+  // Device selection
+  virtual int32_t SetPlayoutDevice(uint16_t index) = 0;
+  virtual int32_t SetPlayoutDevice(WindowsDeviceType device) = 0;
+  virtual int32_t SetRecordingDevice(uint16_t index) = 0;
+  virtual int32_t SetRecordingDevice(WindowsDeviceType device) = 0;
+
+  // Audio transport initialization
+  virtual int32_t PlayoutIsAvailable(bool* available) = 0;
+  virtual int32_t InitPlayout() = 0;
+  virtual bool PlayoutIsInitialized() const = 0;
+  virtual int32_t RecordingIsAvailable(bool* available) = 0;
+  virtual int32_t InitRecording() = 0;
+  virtual bool RecordingIsInitialized() const = 0;
+
+  // Audio transport control
+  virtual int32_t StartPlayout() = 0;
+  virtual int32_t StopPlayout() = 0;
+  virtual bool Playing() const = 0;
+  virtual int32_t StartRecording() = 0;
+  virtual int32_t StopRecording() = 0;
+  virtual bool Recording() const = 0;
+
+  // Microphone Automatic Gain Control (AGC)
+  virtual int32_t SetAGC(bool enable) = 0;
+  virtual bool AGC() const = 0;
+
+  // Volume control based on the Windows Wave API (Windows only)
+  virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+                                   uint16_t volumeRight) = 0;
+  virtual int32_t WaveOutVolume(uint16_t* volumeLeft,
+                                uint16_t* volumeRight) const = 0;
+
+  // Audio mixer initialization
+  virtual int32_t SpeakerIsAvailable(bool* available) = 0;
+  virtual int32_t InitSpeaker() = 0;
+  virtual bool SpeakerIsInitialized() const = 0;
+  virtual int32_t MicrophoneIsAvailable(bool* available) = 0;
+  virtual int32_t InitMicrophone() = 0;
+  virtual bool MicrophoneIsInitialized() const = 0;
+
+  // Speaker volume controls
+  virtual int32_t SpeakerVolumeIsAvailable(bool* available) = 0;
+  virtual int32_t SetSpeakerVolume(uint32_t volume) = 0;
+  virtual int32_t SpeakerVolume(uint32_t* volume) const = 0;
+  virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const = 0;
+  virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const = 0;
+  virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const = 0;
+
+  // Microphone volume controls
+  virtual int32_t MicrophoneVolumeIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0;
+  virtual int32_t MicrophoneVolume(uint32_t* volume) const = 0;
+  virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const = 0;
+  virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const = 0;
+  virtual int32_t MicrophoneVolumeStepSize(uint16_t* stepSize) const = 0;
+
+  // Speaker mute control
+  virtual int32_t SpeakerMuteIsAvailable(bool* available) = 0;
+  virtual int32_t SetSpeakerMute(bool enable) = 0;
+  virtual int32_t SpeakerMute(bool* enabled) const = 0;
+
+  // Microphone mute control
+  virtual int32_t MicrophoneMuteIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneMute(bool enable) = 0;
+  virtual int32_t MicrophoneMute(bool* enabled) const = 0;
+
+  // Microphone boost control
+  virtual int32_t MicrophoneBoostIsAvailable(bool* available) = 0;
+  virtual int32_t SetMicrophoneBoost(bool enable) = 0;
+  virtual int32_t MicrophoneBoost(bool* enabled) const = 0;
+
+  // Stereo support
+  virtual int32_t StereoPlayoutIsAvailable(bool* available) const = 0;
+  virtual int32_t SetStereoPlayout(bool enable) = 0;
+  virtual int32_t StereoPlayout(bool* enabled) const = 0;
+  virtual int32_t StereoRecordingIsAvailable(bool* available) const = 0;
+  virtual int32_t SetStereoRecording(bool enable) = 0;
+  virtual int32_t StereoRecording(bool* enabled) const = 0;
+  virtual int32_t SetRecordingChannel(const ChannelType channel) = 0;
+  virtual int32_t RecordingChannel(ChannelType* channel) const = 0;
+
+  // Delay information and control
+  virtual int32_t SetPlayoutBuffer(const BufferType type,
+                                   uint16_t sizeMS = 0) = 0;
+  virtual int32_t PlayoutBuffer(BufferType* type, uint16_t* sizeMS) const = 0;
+  virtual int32_t PlayoutDelay(uint16_t* delayMS) const = 0;
+  virtual int32_t RecordingDelay(uint16_t* delayMS) const = 0;
+
+  // CPU load
+  virtual int32_t CPULoad(uint16_t* load) const = 0;
+
+  // Recording of raw PCM data
+  virtual int32_t StartRawOutputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) = 0;
+  virtual int32_t StopRawOutputFileRecording() = 0;
+  virtual int32_t StartRawInputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) = 0;
+  virtual int32_t StopRawInputFileRecording() = 0;
+
+  // Native sample rate controls (samples/sec)
+  virtual int32_t SetRecordingSampleRate(const uint32_t samplesPerSec) = 0;
+  virtual int32_t RecordingSampleRate(uint32_t* samplesPerSec) const = 0;
+  virtual int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec) = 0;
+  virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const = 0;
+
+  // Mobile device specific functions
+  virtual int32_t ResetAudioDevice() = 0;
+  virtual int32_t SetLoudspeakerStatus(bool enable) = 0;
+  virtual int32_t GetLoudspeakerStatus(bool* enabled) const = 0;
+
+  // *Experimental - not recommended for use.*
+  // Enables the Windows Core Audio built-in AEC. Fails on other platforms.
+  //
+  // Must be called before InitRecording(). When enabled:
+  // 1. StartPlayout() must be called before StartRecording().
+  // 2. StopRecording() should be called before StopPlayout().
+  //    The reverse order may cause garbage audio to be rendered or the
+  //    capture side to halt until StopRecording() is called.
+  virtual int32_t EnableBuiltInAEC(bool enable) { return -1; }
+  virtual bool BuiltInAECIsEnabled() const { return false; }
+
+ protected:
+  virtual ~AudioDeviceModule() {};
+};
+
+AudioDeviceModule* CreateAudioDeviceModule(
+    WebRtc_Word32 id, AudioDeviceModule::AudioLayer audioLayer);
+
+}  // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_INTERFACE_AUDIO_DEVICE_H_
diff --git a/src/modules/audio_device/main/interface/audio_device_defines.h b/src/modules/audio_device/main/interface/audio_device_defines.h
new file mode 100644
index 0000000..ab7ed60
--- /dev/null
+++ b/src/modules/audio_device/main/interface/audio_device_defines.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+static const int kAdmMaxDeviceNameSize = 128;
+static const int kAdmMaxFileNameSize = 512;
+static const int kAdmMaxGuidSize = 128;
+
+static const int kAdmMinPlayoutBufferSizeMs = 10;
+static const int kAdmMaxPlayoutBufferSizeMs = 250;
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceObserver
+// ----------------------------------------------------------------------------
+
+class AudioDeviceObserver
+{
+public:
+    enum ErrorCode
+    {
+        kRecordingError = 0,
+        kPlayoutError = 1
+    };
+    enum WarningCode
+    {
+        kRecordingWarning = 0,
+        kPlayoutWarning = 1
+    };
+
+    virtual void OnErrorIsReported(const ErrorCode error) = 0;
+    virtual void OnWarningIsReported(const WarningCode warning) = 0;
+
+protected:
+    virtual ~AudioDeviceObserver() {}
+};
+
+// ----------------------------------------------------------------------------
+//  AudioTransport
+// ----------------------------------------------------------------------------
+
+class AudioTransport
+{
+public:
+    virtual int32_t RecordedDataIsAvailable(const void* audioSamples,
+                                            const uint32_t nSamples,
+                                            const uint8_t nBytesPerSample,
+                                            const uint8_t nChannels,
+                                            const uint32_t samplesPerSec,
+                                            const uint32_t totalDelayMS,
+                                            const int32_t clockDrift,
+                                            const uint32_t currentMicLevel,
+                                            uint32_t& newMicLevel) = 0;   
+
+    virtual int32_t NeedMorePlayData(const uint32_t nSamples,
+                                     const uint8_t nBytesPerSample,
+                                     const uint8_t nChannels,
+                                     const uint32_t samplesPerSec,
+                                     void* audioSamples,
+                                     uint32_t& nSamplesOut) = 0;
+
+protected:
+    virtual ~AudioTransport() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DEFINES_H
diff --git a/src/modules/audio_device/main/source/android/audio_device_android_jni.cc b/src/modules/audio_device/main/source/android/audio_device_android_jni.cc
new file mode 100644
index 0000000..c6357f0
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_android_jni.cc
@@ -0,0 +1,2920 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device implementation (JNI/AudioTrack/AudioRecord usage)
+ */
+
+// TODO(xians): Break out attach and detach current thread to JVM to
+// separate functions.
+
+#include <stdlib.h>
+#include "audio_device_utility.h"
+#include "audio_device_android_jni.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+// Android logging, uncomment to print trace to logcat instead of
+// trace file/callback
+//#include <android/log.h>
+//#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, \
+//    "WebRTC AD jni", __VA_ARGS__)
+
+namespace webrtc
+{
+
+JavaVM* globalJvm = NULL;
+JNIEnv* globalJNIEnv = NULL;
+jobject globalSndContext = NULL;
+jclass globalScClass = NULL;
+
+// ----------------------------------------------------------------------------
+//  SetAndroidAudioDeviceObjects
+//
+//  Global function for setting Java pointers and creating Java
+//  objects that are global to all instances of VoiceEngine used
+//  by the same Java application.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
+                                           void* context)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1, "%s",
+                 __FUNCTION__);
+
+    globalJvm = (JavaVM*) javaVM;
+    globalSndContext = (jobject) context;
+
+    if (env)
+    {
+        globalJNIEnv = (JNIEnv *) env;
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
+                     "%s: will find class", __FUNCTION__);
+
+        // get java class type (note path to class packet)
+        jclass
+                javaScClassLocal =
+                        globalJNIEnv->FindClass(
+                                "org/webrtc/voiceengine/AudioDeviceAndroid");
+        if (!javaScClassLocal)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                         "%s: could not find java class", __FUNCTION__);
+            return -1; /* exception thrown */
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, -1,
+                     "%s: will create global reference", __FUNCTION__);
+
+        // create a global reference to the class (to tell JNI that we are
+        // referencing it after this function has returned)
+        globalScClass
+                = reinterpret_cast<jclass> (globalJNIEnv->NewGlobalRef(
+                        javaScClassLocal));
+        if (!globalScClass)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                         "%s: could not create reference", __FUNCTION__);
+            return -1;
+        }
+
+        // Delete local class ref, we only use the global ref
+        globalJNIEnv->DeleteLocalRef(javaScClassLocal);
+    }
+    else // User is resetting the env variable
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "%s: env is NULL, assuming deinit", __FUNCTION__);
+
+        if (!globalJNIEnv)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+                         "%s: saved env already NULL", __FUNCTION__);
+            return 0;
+        }
+
+        globalJNIEnv->DeleteGlobalRef(globalScClass);
+        globalJNIEnv = (JNIEnv *) NULL;
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidJni - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidJni::AudioDeviceAndroidJni(const WebRtc_Word32 id) :
+            _ptrAudioBuffer(NULL),
+            _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+            _id(id),
+            _timeEventRec(*EventWrapper::Create()),
+            _timeEventPlay(*EventWrapper::Create()),
+            _recStartStopEvent(*EventWrapper::Create()),
+            _playStartStopEvent(*EventWrapper::Create()),
+            _ptrThreadPlay(NULL),
+            _ptrThreadRec(NULL),
+            _recThreadID(0),
+            _playThreadID(0),
+            _playThreadIsInitialized(false),
+            _recThreadIsInitialized(false),
+            _shutdownPlayThread(false),
+            _shutdownRecThread(false),
+            //    _recBuffer[2*REC_BUF_SIZE_IN_SAMPLES]
+            _recordingDeviceIsSpecified(false),
+            _playoutDeviceIsSpecified(false), _initialized(false),
+            _recording(false), _playing(false), _recIsInitialized(false),
+            _playIsInitialized(false), _micIsInitialized(false),
+            _speakerIsInitialized(false), _startRec(false), _stopRec(false),
+            _startPlay(false), _stopPlay(false), _playWarning(0),
+            _playError(0), _recWarning(0), _recError(0), _delayPlayout(0),
+            _delayRecording(0),
+            _AGC(false),
+            _samplingFreqIn(0),
+            _samplingFreqOut(0),
+            _maxSpeakerVolume(0),
+            _loudSpeakerOn(false),
+            _recAudioSource(1), // 1 is AudioSource.MIC which is our default
+            _javaVM(NULL), _javaContext(NULL), _jniEnvPlay(NULL),
+            _jniEnvRec(NULL), _javaScClass(0), _javaScObj(0),
+            _javaPlayBuffer(0), _javaRecBuffer(0), _javaDirectPlayBuffer(NULL),
+            _javaDirectRecBuffer(NULL), _javaMidPlayAudio(0),
+            _javaMidRecAudio(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidJni - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidJni::~AudioDeviceAndroidJni()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    delete &_recStartStopEvent;
+    delete &_playStartStopEvent;
+    delete &_timeEventRec;
+    delete &_timeEventPlay;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const
+{
+
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Init Java member variables
+    // and set up JNI interface to
+    // AudioDeviceAndroid java class
+    if (InitJavaResources() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init Java resources", __FUNCTION__);
+        return -1;
+    }
+
+    // Check the sample rate to be used for playback and recording
+    // and the max playout volume
+    if (InitSampleRate() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init samplerate", __FUNCTION__);
+        return -1;
+    }
+
+    // RECORDING
+    const char* threadName = "webrtc_jni_audio_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+                                                kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    // PLAYOUT
+    threadName = "webrtc_jni_audio_render_thread";
+    _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
+                                                 kRealtimePriority, threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the play audio thread");
+        return -1;
+    }
+
+    threadID = 0;
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::Terminate()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    // RECORDING
+    StopRecording();
+    _shutdownRecThread = true;
+    _timeEventRec.Set(); // Release rec thread from waiting state
+    if (_ptrThreadRec)
+    {
+        // First, the thread must detach itself from Java VM
+        _critSect.Leave();
+        if (kEventSignaled != _recStartStopEvent.Wait(5000))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceAudioDevice,
+                         _id,
+                         "%s: Recording thread shutdown timed out, cannot "
+                         "terminate thread",
+                         __FUNCTION__);
+            // If we close thread anyway, the app will crash
+            return -1;
+        }
+        _recStartStopEvent.Reset();
+        _critSect.Enter();
+
+        // Close down rec thread
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        _critSect.Leave();
+        tmpThread->SetNotAlive();
+        // Release again, we might have returned to waiting state
+        _timeEventRec.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+            _jniEnvRec = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+        _critSect.Enter();
+
+        _recThreadIsInitialized = false;
+    }
+    _micIsInitialized = false;
+    _recordingDeviceIsSpecified = false;
+
+    // PLAYOUT
+    StopPlayout();
+    _shutdownPlayThread = true;
+    _timeEventPlay.Set(); // Release rec thread from waiting state
+    if (_ptrThreadPlay)
+    {
+        // First, the thread must detach itself from Java VM
+        _critSect.Leave();
+        if (kEventSignaled != _playStartStopEvent.Wait(5000))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceAudioDevice,
+                         _id,
+                         "%s: Playout thread shutdown timed out, cannot "
+                         "terminate thread",
+                         __FUNCTION__);
+            // If we close thread anyway, the app will crash
+            return -1;
+        }
+        _playStartStopEvent.Reset();
+        _critSect.Enter();
+
+        // Close down play thread
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+        tmpThread->SetNotAlive();
+        _timeEventPlay.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+            _jniEnvPlay = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+        _critSect.Enter();
+
+        _playThreadIsInitialized = false;
+    }
+    _speakerIsInitialized = false;
+    _playoutDeviceIsSpecified = false;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // Make method IDs and buffer pointers unusable
+    _javaMidPlayAudio = 0;
+    _javaMidRecAudio = 0;
+    _javaDirectPlayBuffer = NULL;
+    _javaDirectRecBuffer = NULL;
+
+    // Delete the references to the java buffers, this allows the
+    // garbage collector to delete them
+    env->DeleteGlobalRef(_javaPlayBuffer);
+    _javaPlayBuffer = 0;
+    env->DeleteGlobalRef(_javaRecBuffer);
+    _javaRecBuffer = 0;
+
+    // Delete the references to the java object and class, this allows the
+    // garbage collector to delete them
+    env->DeleteGlobalRef(_javaScObj);
+    _javaScObj = 0;
+    _javaScClass = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    _initialized = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Initialized() const
+{
+
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerIsAvailable(bool& available)
+{
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneIsAvailable(bool& available)
+{
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _micIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::SpeakerIsInitialized() const
+{
+
+    return _speakerIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::MicrophoneIsInitialized() const
+{
+
+    return _micIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    available = true; // We assume we are always be able to set/get volume
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID setPlayoutVolumeID = env->GetMethodID(_javaScClass,
+                                                    "SetPlayoutVolume", "(I)I");
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, setPlayoutVolumeID,
+                                  static_cast<int> (volume));
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetPlayoutVolume failed (%d)", res);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID getPlayoutVolumeID = env->GetMethodID(_javaScClass,
+                                                    "GetPlayoutVolume", "()I");
+
+    // call java sc object method
+    jint level = env->CallIntMethod(_javaScObj, getPlayoutVolumeID);
+    if (level < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetPlayoutVolume failed (%d)", level);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    volume = static_cast<WebRtc_UWord32> (level);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetWaveOutVolume(
+    WebRtc_UWord16 /*volumeLeft*/,
+    WebRtc_UWord16 /*volumeRight*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MaxSpeakerVolume(
+        WebRtc_UWord32& maxVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    maxVolume = _maxSpeakerVolume;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MinSpeakerVolume(
+        WebRtc_UWord32& minVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    minVolume = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerVolumeStepSize(
+        WebRtc_UWord16& stepSize) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    stepSize = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerMuteIsAvailable(bool& available)
+{
+
+    available = false; // Speaker mute not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetSpeakerMute(bool /*enable*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SpeakerMute(bool& /*enabled*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    available = false; // Mic mute not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneMute(bool /*enable*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneMute(bool& /*enabled*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    available = false; // Mic boost not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneBoost(bool enable)
+{
+
+    if (!_micIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneBoost(bool& enabled) const
+{
+
+    if (!_micIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoRecordingIsAvailable(bool& available)
+{
+
+    available = false; // Stereo recording not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+//
+//  Specifies the number of input channels.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoRecording(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoPlayoutIsAvailable(bool& available)
+{
+
+    available = false; // Stereo playout not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StereoPlayout(bool& enabled) const
+{
+
+    enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::AGC() const
+{
+
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolumeIsAvailable(
+        bool& available)
+{
+
+    available = false; // Mic volume not supported on Android
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetMicrophoneVolume(
+        WebRtc_UWord32 /*volume*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolume(
+        WebRtc_UWord32& /*volume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MaxMicrophoneVolume(
+        WebRtc_UWord32& /*maxVolume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MinMicrophoneVolume(
+        WebRtc_UWord32& /*minVolume*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::MicrophoneVolumeStepSize(
+        WebRtc_UWord16& /*stepSize*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceAndroidJni::PlayoutDevices()
+{
+
+    // There is one device only
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return -1;
+    }
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behavior
+    // with other platforms
+    _playoutDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize])
+{
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize])
+{
+
+    if (0 != index)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceAndroidJni::RecordingDevices()
+{
+
+    // There is one device only
+    return 1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return -1;
+    }
+
+    // Recording device index will be used for specifying recording
+    // audio source, allow any value
+    _recAudioSource = index;
+    _recordingDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Not initialized");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return 0;
+    }
+
+    // Initialize the speaker
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "attaching");
+
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
+                                                "(I)I");
+
+    int samplingFreq = 44100;
+    if (_samplingFreqOut != 44)
+    {
+        samplingFreq = _samplingFreqOut * 1000;
+    }
+
+    int retVal = -1;
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitPlayback failed (%d)", res);
+    }
+    else
+    {
+        // Set the audio device buffer sampling rate
+        _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreqOut * 1000);
+        _playIsInitialized = true;
+        retVal = 0;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "detaching");
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return retVal;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Not initialized");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return 0;
+    }
+
+    // Initialize the microphone
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
+                                                 "(II)I");
+
+    int samplingFreq = 44100;
+    if (_samplingFreqIn != 44)
+    {
+        samplingFreq = _samplingFreqIn * 1000;
+    }
+
+    int retVal = -1;
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
+                                  samplingFreq);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitRecording failed (%d)", res);
+    }
+    else
+    {
+        // Set the audio device buffer sampling rate
+        _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreqIn * 1000);
+
+        // the init rec function returns a fixed delay
+        _delayRecording = res / _samplingFreqIn;
+
+        _recIsInitialized = true;
+        retVal = 0;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return retVal;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StartRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording not initialized");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID startRecordingID = env->GetMethodID(_javaScClass,
+                                                  "StartRecording", "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, startRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StartRecording failed (%d)", res);
+        return -1;
+    }
+
+    _recWarning = 0;
+    _recError = 0;
+
+    // Signal to recording thread that we want to start
+    _startRec = true;
+    _timeEventRec.Set(); // Release thread from waiting state
+    _critSect.Leave();
+    // Wait for thread to init
+    if (kEventSignaled != _recStartStopEvent.Wait(5000))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Timeout or error starting");
+    }
+    _recStartStopEvent.Reset();
+    _critSect.Enter();
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StopRecording()
+
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording is not initialized");
+        return 0;
+    }
+
+    // make sure we don't start recording (it's asynchronous),
+    // assuming that we are under lock
+    _startRec = false;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
+                                                 "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, stopRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopRecording failed (%d)", res);
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingIsInitialized() const
+{
+
+    return _recIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Recording() const
+{
+
+    return _recording;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutIsInitialized() const
+{
+
+    return _playIsInitialized;
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StartPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout not initialized");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID startPlaybackID = env->GetMethodID(_javaScClass, "StartPlayback",
+                                                 "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, startPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StartPlayback failed (%d)", res);
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+
+    // Signal to playout thread that we want to start
+    _startPlay = true;
+    _timeEventPlay.Set(); // Release thread from waiting state
+    _critSect.Leave();
+    // Wait for thread to init
+    if (kEventSignaled != _playStartStopEvent.Wait(5000))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Timeout or error starting");
+    }
+    _playStartStopEvent.Reset();
+    _critSect.Enter();
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::StopPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout is not initialized");
+        return 0;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
+                                                "()I");
+
+    // Call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, stopPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopPlayback failed (%d)", res);
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _playWarning = 0;
+    _playError = 0;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+//
+//    Remaining amount of data still in the playout buffer.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = _delayPlayout;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+//
+//    Remaining amount of data still in the recording buffer.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::RecordingDelay(
+        WebRtc_UWord16& delayMS) const
+{
+    delayMS = _delayRecording;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::Playing() const
+{
+
+    return _playing;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType /*type*/,
+        WebRtc_UWord16 /*sizeMS*/)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::PlayoutBuffer(
+        AudioDeviceModule::BufferType& type,
+        WebRtc_UWord16& sizeMS) const
+{
+
+    type = AudioDeviceModule::kAdaptiveBufferSize;
+    sizeMS = _delayPlayout; // Set to current playout delay
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceAndroidJni::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec)
+{
+
+    if (samplesPerSec > 48000 || samplesPerSec < 8000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Invalid sample rate");
+        return -1;
+    }
+
+    // set the recording sample rate to use
+    if (samplesPerSec == 44100)
+    {
+        _samplingFreqIn = 44;
+    }
+    else
+    {
+        _samplingFreqIn = samplesPerSec / 1000;
+    }
+
+    // Update the AudioDeviceBuffer
+    _ptrAudioBuffer->SetRecordingSampleRate(samplesPerSec);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec)
+{
+
+    if (samplesPerSec > 48000 || samplesPerSec < 8000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Invalid sample rate");
+        return -1;
+    }
+
+    // set the playout sample rate to use
+    if (samplesPerSec == 44100)
+    {
+        _samplingFreqOut = 44;
+    }
+    else
+    {
+        _samplingFreqOut = samplesPerSec / 1000;
+    }
+
+    // Update the AudioDeviceBuffer
+    _ptrAudioBuffer->SetPlayoutSampleRate(samplesPerSec);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::SetLoudspeakerStatus(bool enable)
+{
+
+    if (!_javaContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "  Context is not set");
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                         "  Could not attach thread to JVM (%d, %p)", res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    // get the method ID
+    jmethodID setPlayoutSpeakerID = env->GetMethodID(_javaScClass,
+                                                     "SetPlayoutSpeaker",
+                                                     "(Z)I");
+
+    // call java sc object method
+    jint res = env->CallIntMethod(_javaScObj, setPlayoutSpeakerID, enable);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "  SetPlayoutSpeaker failed (%d)", res);
+        return -1;
+    }
+
+    _loudSpeakerOn = enable;
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
+                         "  Could not detach thread from JVM");
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::GetLoudspeakerStatus(bool& enabled) const
+{
+
+    enabled = _loudSpeakerOn;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+
+// ----------------------------------------------------------------------------
+//  InitJavaResources
+//
+//  Initializes needed Java resources like the JNI interface to
+//  AudioDeviceAndroid.java
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitJavaResources()
+{
+    // todo: Check if we already have created the java object
+    _javaVM = globalJvm;
+    _javaContext = globalSndContext;
+    _javaScClass = globalScClass;
+
+    // use the jvm that has been set
+    if (!_javaVM)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Not a valid Java VM pointer", __FUNCTION__);
+        return -1;
+    }
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "get method id");
+
+    // get the method ID for the void(void) constructor
+    jmethodID cid = env->GetMethodID(_javaScClass, "<init>", "()V");
+    if (cid == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get constructor ID", __FUNCTION__);
+        return -1; /* exception thrown */
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "construct object", __FUNCTION__);
+
+    // construct the object
+    jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
+    if (!javaScObjLocal)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "%s: could not create Java sc object", __FUNCTION__);
+        return -1;
+    }
+
+    // create a reference to the object (to tell JNI that we are referencing it
+    // after this function has returned)
+    _javaScObj = env->NewGlobalRef(javaScObjLocal);
+    if (!_javaScObj)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not create Java sc object reference",
+                     __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaScObjLocal);
+
+    //////////////////////
+    // AUDIO MANAGEMENT
+
+    // This is not mandatory functionality
+    if (_javaContext)
+    {
+        // Get Context field ID
+        jfieldID fidContext = env->GetFieldID(_javaScClass, "_context",
+                                              "Landroid/content/Context;");
+        if (!fidContext)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: could not get Context fid", __FUNCTION__);
+            return -1;
+        }
+
+        // Set the Java application Context so we can use AudioManager
+        // Get Context object and check it
+        jobject javaContext = (jobject) _javaContext;
+        env->SetObjectField(_javaScObj, fidContext, javaContext);
+        javaContext = env->GetObjectField(_javaScObj, fidContext);
+        if (!javaContext)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: could not set Context", __FUNCTION__);
+            return -1;
+        }
+
+        // Delete local object ref
+        env->DeleteLocalRef(javaContext);
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceWarning,
+                     kTraceAudioDevice,
+                     _id,
+                     "%s: did not set Context - some functionality is not "
+                     "supported",
+                     __FUNCTION__);
+    }
+
+    /////////////
+    // PLAYOUT
+
+    // Get play buffer field ID
+    jfieldID fidPlayBuffer = env->GetFieldID(_javaScClass, "_playBuffer",
+                                             "Ljava/nio/ByteBuffer;");
+    if (!fidPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer fid", __FUNCTION__);
+        return -1;
+    }
+
+    // Get play buffer object
+    jobject javaPlayBufferLocal =
+            env->GetObjectField(_javaScObj, fidPlayBuffer);
+    if (!javaPlayBufferLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Create a global reference to the object (to tell JNI that we are
+    // referencing it after this function has returned)
+    // NOTE: we are referencing it only through the direct buffer (see below)
+    _javaPlayBuffer = env->NewGlobalRef(javaPlayBufferLocal);
+    if (!_javaPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play buffer reference", __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaPlayBufferLocal);
+
+    // Get direct buffer
+    _javaDirectPlayBuffer = env->GetDirectBufferAddress(_javaPlayBuffer);
+    if (!_javaDirectPlayBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get direct play buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Get the play audio method ID
+    _javaMidPlayAudio = env->GetMethodID(_javaScClass, "PlayAudio", "(I)I");
+    if (!_javaMidPlayAudio)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get play audio mid", __FUNCTION__);
+        return -1;
+    }
+
+    //////////////
+    // RECORDING
+
+    // Get rec buffer field ID
+    jfieldID fidRecBuffer = env->GetFieldID(_javaScClass, "_recBuffer",
+                                            "Ljava/nio/ByteBuffer;");
+    if (!fidRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer fid", __FUNCTION__);
+        return -1;
+    }
+
+    // Get rec buffer object
+    jobject javaRecBufferLocal = env->GetObjectField(_javaScObj, fidRecBuffer);
+    if (!javaRecBufferLocal)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Create a global reference to the object (to tell JNI that we are
+    // referencing it after this function has returned)
+    // NOTE: we are referencing it only through the direct buffer (see below)
+    _javaRecBuffer = env->NewGlobalRef(javaRecBufferLocal);
+    if (!_javaRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec buffer reference", __FUNCTION__);
+        return -1;
+    }
+
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaRecBufferLocal);
+
+    // Get direct buffer
+    _javaDirectRecBuffer = env->GetDirectBufferAddress(_javaRecBuffer);
+    if (!_javaDirectRecBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get direct rec buffer", __FUNCTION__);
+        return -1;
+    }
+
+    // Get the rec audio method ID
+    _javaMidRecAudio = env->GetMethodID(_javaScClass, "RecordAudio", "(I)I");
+    if (!_javaMidRecAudio)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: could not get rec audio mid", __FUNCTION__);
+        return -1;
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSampleRate
+//
+//  checks supported sample rates for playback 
+//  and recording and initializes the rates to be used
+//  Also stores the max playout volume returned from InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidJni::InitSampleRate()
+{
+    int samplingFreq = 44100;
+    jint res = 0;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (_javaVM->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK)
+    {
+        // try to attach the thread and get the env
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&env, NULL);
+        if ((res < 0) || !env)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "%s: Could not attach thread to JVM (%d, %p)",
+                         __FUNCTION__, res, env);
+            return -1;
+        }
+        isAttached = true;
+    }
+
+    if (_samplingFreqIn > 0)
+    {
+        // read the configured sampling rate
+        samplingFreq = 44100;
+        if (_samplingFreqIn != 44)
+        {
+            samplingFreq = _samplingFreqIn * 1000;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "  Trying configured recording sampling rate %d",
+                     samplingFreq);
+    }
+
+    // get the method ID
+    jmethodID initRecordingID = env->GetMethodID(_javaScClass, "InitRecording",
+                                                 "(II)I");
+
+    bool keepTrying = true;
+    while (keepTrying)
+    {
+        // call java sc object method
+        res = env->CallIntMethod(_javaScObj, initRecordingID, _recAudioSource,
+                                 samplingFreq);
+        if (res < 0)
+        {
+            switch (samplingFreq)
+            {
+                case 44100:
+                    samplingFreq = 16000;
+                    break;
+                case 16000:
+                    samplingFreq = 8000;
+                    break;
+                default: // error
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "%s: InitRecording failed (%d)", __FUNCTION__,
+                                 res);
+                    return -1;
+            }
+        }
+        else
+        {
+            keepTrying = false;
+        }
+    }
+
+    // set the recording sample rate to use
+    if (samplingFreq == 44100)
+    {
+        _samplingFreqIn = 44;
+    }
+    else
+    {
+        _samplingFreqIn = samplingFreq / 1000;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "Recording sample rate set to (%d)", _samplingFreqIn);
+
+    // get the method ID
+    jmethodID stopRecordingID = env->GetMethodID(_javaScClass, "StopRecording",
+                                                 "()I");
+
+    // Call java sc object method
+    res = env->CallIntMethod(_javaScObj, stopRecordingID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "StopRecording failed (%d)", res);
+    }
+
+    // get the method ID
+    jmethodID initPlaybackID = env->GetMethodID(_javaScClass, "InitPlayback",
+                                                "(I)I");
+
+    if (_samplingFreqOut > 0)
+    {
+        // read the configured sampling rate
+        samplingFreq = 44100;
+        if (_samplingFreqOut != 44)
+        {
+            samplingFreq = _samplingFreqOut * 1000;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "  Trying configured playback sampling rate %d",
+                     samplingFreq);
+    }
+    else
+    {
+        // set the preferred sampling frequency
+        if (samplingFreq == 8000)
+        {
+            // try 16000
+            samplingFreq = 16000;
+        }
+        // else use same as recording
+    }
+
+    keepTrying = true;
+    while (keepTrying)
+    {
+        // call java sc object method
+        res = env->CallIntMethod(_javaScObj, initPlaybackID, samplingFreq);
+        if (res < 0)
+        {
+            switch (samplingFreq)
+            {
+                case 44100:
+                    samplingFreq = 16000;
+                    break;
+                case 16000:
+                    samplingFreq = 8000;
+                    break;
+                default: // error
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "InitPlayback failed (%d)", res);
+                    return -1;
+            }
+        }
+        else
+        {
+            keepTrying = false;
+        }
+    }
+
+    // Store max playout volume
+    _maxSpeakerVolume = static_cast<WebRtc_UWord32> (res);
+    if (_maxSpeakerVolume < 1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Did not get valid max speaker volume value (%d)",
+                     _maxSpeakerVolume);
+    }
+
+    // set the playback sample rate to use
+    if (samplingFreq == 44100)
+    {
+        _samplingFreqOut = 44;
+    }
+    else
+    {
+        _samplingFreqOut = samplingFreq / 1000;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "Playback sample rate set to (%d)", _samplingFreqOut);
+
+    // get the method ID
+    jmethodID stopPlaybackID = env->GetMethodID(_javaScClass, "StopPlayback",
+                                                "()I");
+
+    // Call java sc object method
+    res = env->CallIntMethod(_javaScObj, stopPlaybackID);
+    if (res < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "StopPlayback failed (%d)", res);
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached)
+    {
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "%s: Could not detach thread from JVM", __FUNCTION__);
+        }
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  PlayThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceAndroidJni*> (pThis)->PlayThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceAndroidJni*> (pThis)->RecThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::PlayThreadProcess()
+{
+    if (!_playThreadIsInitialized)
+    {
+        // Do once when thread is started
+
+        // Attach this thread to JVM and get the JNI env for this thread
+        jint res = _javaVM->AttachCurrentThread(&_jniEnvPlay, NULL);
+        if ((res < 0) || !_jniEnvPlay)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id,
+                         "Could not attach playout thread to JVM (%d, %p)",
+                         res, _jniEnvPlay);
+            return false; // Close down thread
+        }
+
+        _playThreadIsInitialized = true;
+    }
+
+    if (!_playing)
+    {
+        switch (_timeEventPlay.Wait(1000))
+        {
+            case kEventSignaled:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Playout thread event signal");
+                _timeEventPlay.Reset();
+                break;
+            case kEventError:
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "Playout thread event error");
+                return true;
+            case kEventTimeout:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Playout thread event timeout");
+                return true;
+        }
+    }
+
+    Lock();
+
+    if (_startPlay)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startPlay true, performing initial actions");
+        _startPlay = false;
+        _playing = true;
+        _playWarning = 0;
+        _playError = 0;
+        _playStartStopEvent.Set();
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Sent signal");
+    }
+
+    if (_playing)
+    {
+        WebRtc_Word8 playBuffer[2 * 480]; // Max 10 ms @ 48 kHz / 16 bit
+        WebRtc_UWord32 samplesToPlay = _samplingFreqOut * 10;
+
+        // ask for new PCM data to be played out using the AudioDeviceBuffer
+        // ensure that this callback is executed without taking the
+        // audio-thread lock
+        UnLock();
+        WebRtc_UWord32 nSamples =
+                _ptrAudioBuffer->RequestPlayoutData(samplesToPlay);
+        Lock();
+
+        // Check again since play may have stopped during unlocked period
+        if (!_playing)
+        {
+            UnLock();
+            return true;
+        }
+
+        nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        if (nSamples != samplesToPlay)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  invalid number of output samples(%d)", nSamples);
+            _playWarning = 1;
+        }
+
+        // Copy data to our direct buffer (held by java sc object)
+        // todo: Give _javaDirectPlayBuffer directly to VoE?
+        memcpy(_javaDirectPlayBuffer, playBuffer, nSamples * 2);
+
+        UnLock();
+
+        // Call java sc object method to process data in direct buffer
+        // Will block until data has been put in OS playout buffer
+        // (see java sc class)
+        jint res = _jniEnvPlay->CallIntMethod(_javaScObj, _javaMidPlayAudio,
+                                              2 * nSamples);
+        if (res < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "PlayAudio failed (%d)", res);
+            _playWarning = 1;
+        }
+        else if (res > 0)
+        {
+            // we are not recording and have got a delay value from playback
+            _delayPlayout = res / _samplingFreqOut;
+        }
+        // If 0 is returned we are recording and then play delay is updated
+        // in RecordProcess
+
+        Lock();
+
+    } // _playing
+
+    if (_shutdownPlayThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Detaching thread from Java VM");
+
+        // Detach thread from Java VM
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not detach playout thread from JVM");
+            _shutdownPlayThread = false;
+            // If we say OK (i.e. set event) and close thread anyway,
+            // app will crash
+        }
+        else
+        {
+            _jniEnvPlay = NULL;
+            _shutdownPlayThread = false;
+            _playStartStopEvent.Set(); // Signal to Terminate() that we are done
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "Sent signal");
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  RecThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceAndroidJni::RecThreadProcess()
+{
+    if (!_recThreadIsInitialized)
+    {
+        // Do once when thread is started
+
+        // Attach this thread to JVM
+        jint res = _javaVM->AttachCurrentThread(&_jniEnvRec, NULL);
+
+        // Get the JNI env for this thread
+        if ((res < 0) || !_jniEnvRec)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not attach rec thread to JVM (%d, %p)",
+                         res, _jniEnvRec);
+            return false; // Close down thread
+        }
+
+        _recThreadIsInitialized = true;
+    }
+
+    // just sleep if rec has not started
+    if (!_recording)
+    {
+        switch (_timeEventRec.Wait(1000))
+        {
+            case kEventSignaled:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Recording thread event signal");
+                _timeEventRec.Reset();
+                break;
+            case kEventError:
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "Recording thread event error");
+                return true;
+            case kEventTimeout:
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, "Recording thread event timeout");
+                return true;
+        }
+    }
+
+    Lock();
+
+    if (_startRec)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startRec true, performing initial actions");
+        _startRec = false;
+        _recording = true;
+        _recWarning = 0;
+        _recError = 0;
+        _recStartStopEvent.Set();
+    }
+
+    if (_recording)
+    {
+        WebRtc_UWord32 samplesToRec = _samplingFreqIn * 10;
+
+        // Call java sc object method to record data to direct buffer
+        // Will block until data has been recorded (see java sc class),
+        // therefore we must release the lock
+        UnLock();
+        jint playDelayInSamples = _jniEnvRec->CallIntMethod(_javaScObj,
+                                                            _javaMidRecAudio,
+                                                            2 * samplesToRec);
+        if (playDelayInSamples < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "RecordAudio failed");
+            _recWarning = 1;
+        }
+        else
+        {
+            _delayPlayout = playDelayInSamples / _samplingFreqOut;
+        }
+        Lock();
+
+        // Check again since recording may have stopped during Java call
+        if (_recording)
+        {
+//            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+//                         "total delay is %d", msPlayDelay + _delayRecording);
+
+            // Copy data to our direct buffer (held by java sc object)
+            // todo: Give _javaDirectRecBuffer directly to VoE?
+            // todo: Check count <= 480 ?
+            memcpy(_recBuffer, _javaDirectRecBuffer, 2 * samplesToRec);
+
+            // store the recorded buffer (no action will be taken if the
+            // #recorded samples is not a full buffer)
+            _ptrAudioBuffer->SetRecordedBuffer(_recBuffer, samplesToRec);
+
+            // store vqe delay values
+            _ptrAudioBuffer->SetVQEData(_delayPlayout, _delayRecording, 0);
+
+            // deliver recorded samples at specified sample rate, mic level
+            // etc. to the observer using callback
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+        }
+
+    } // _recording
+
+    if (_shutdownRecThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Detaching rec thread from Java VM");
+
+        // Detach thread from Java VM
+        if (_javaVM->DetachCurrentThread() < 0)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, "Could not detach recording thread from JVM");
+            _shutdownRecThread = false;
+            // If we say OK (i.e. set event) and close thread anyway,
+            // app will crash
+        }
+        else
+        {
+            _jniEnvRec = NULL;
+            _shutdownRecThread = false;
+            _recStartStopEvent.Set(); // Signal to Terminate() that we are done
+
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "Sent signal rec");
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_device/main/source/android/audio_device_android_jni.h b/src/modules/audio_device/main/source/android/audio_device_android_jni.h
new file mode 100644
index 0000000..e127e26
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_android_jni.h
@@ -0,0 +1,268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device interface (JNI/AudioTrack/AudioRecord usage)
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+
+#include <jni.h> // For accessing AudioDeviceAndroid java class
+
+namespace webrtc
+{
+class EventWrapper;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 44000; // Default is 44.1 kHz
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 44000; // Default is 44.1 kHz
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+
+
+WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM, void* env,
+                                           void* context);
+
+class ThreadWrapper;
+
+class AudioDeviceAndroidJni: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceAndroidJni(const WebRtc_Word32 id);
+    ~AudioDeviceAndroidJni();
+
+    virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize)
+        const;
+
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+    virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+
+private:
+    // Lock
+    void Lock()
+    {
+        _critSect.Enter();
+    };
+    void UnLock()
+    {
+        _critSect.Leave();
+    };
+
+    // Init
+    WebRtc_Word32 InitJavaResources();
+    WebRtc_Word32 InitSampleRate();
+
+    // Threads
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+    // Misc
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    // Events
+    EventWrapper& _timeEventRec;
+    EventWrapper& _timeEventPlay;
+    EventWrapper& _recStartStopEvent;
+    EventWrapper& _playStartStopEvent;
+
+    // Threads
+    ThreadWrapper* _ptrThreadPlay;
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+    bool _playThreadIsInitialized;
+    bool _recThreadIsInitialized;
+    bool _shutdownPlayThread;
+    bool _shutdownRecThread;
+
+    // Rec buffer
+    WebRtc_Word8 _recBuffer[2 * REC_BUF_SIZE_IN_SAMPLES];
+
+    // States
+    bool _recordingDeviceIsSpecified;
+    bool _playoutDeviceIsSpecified;
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _micIsInitialized;
+    bool _speakerIsInitialized;
+
+    // Signal flags to threads
+    bool _startRec;
+    bool _stopRec;
+    bool _startPlay;
+    bool _stopPlay;
+
+    // Warnings and errors
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    // Delay
+    WebRtc_UWord16 _delayPlayout;
+    WebRtc_UWord16 _delayRecording;
+
+    // AGC state
+    bool _AGC;
+
+    // Stored device properties
+    WebRtc_UWord16 _samplingFreqIn; // Sampling frequency for Mic
+    WebRtc_UWord16 _samplingFreqOut; // Sampling frequency for Speaker
+    WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
+    bool _loudSpeakerOn;
+    // Stores the desired audio source to use, set in SetRecordingDevice
+    int _recAudioSource;
+
+    // JNI and Java
+    JavaVM* _javaVM; // denotes a Java VM
+    jobject _javaContext; // the application context
+
+    JNIEnv* _jniEnvPlay; // The JNI env for playout thread
+    JNIEnv* _jniEnvRec; // The JNI env for recording thread
+
+    jclass _javaScClass; // AudioDeviceAndroid class
+    jobject _javaScObj; // AudioDeviceAndroid object
+
+    // The play buffer field in AudioDeviceAndroid object (global ref)
+    jobject _javaPlayBuffer;
+    // The rec buffer field in AudioDeviceAndroid object (global ref)
+    jobject _javaRecBuffer;
+    void* _javaDirectPlayBuffer; // Direct buffer pointer to play buffer
+    void* _javaDirectRecBuffer; // Direct buffer pointer to rec buffer
+    jmethodID _javaMidPlayAudio; // Method ID of play in AudioDeviceAndroid
+    jmethodID _javaMidRecAudio; // Method ID of rec in AudioDeviceAndroid
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_JNI_H
diff --git a/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc b/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc
new file mode 100644
index 0000000..b0a6992
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_android_opensles.cc
@@ -0,0 +1,1723 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <time.h>
+#include <sys/time.h>
+
+#include "audio_device_utility.h"
+#include "audio_device_android_opensles.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+#ifdef WEBRTC_ANDROID_DEBUG
+#include <android/log.h>
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(                  \
+           ANDROID_LOG_DEBUG, "WebRTC ADM OpenSLES", __VA_ARGS__)
+#endif
+
+namespace webrtc {
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceAndroidOpenSLES - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceAndroidOpenSLES::AudioDeviceAndroidOpenSLES(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _slEngineObject(NULL),
+    _slPlayer(NULL),
+    _slEngine(NULL),
+    _slPlayerPlay(NULL),
+    _slOutputMixObject(NULL),
+    _slSpeakerVolume(NULL),
+    _slRecorder(NULL),
+    _slRecorderRecord(NULL),
+    _slAudioIODeviceCapabilities(NULL),
+    _slRecorderSimpleBufferQueue(NULL),
+    _slMicVolume(NULL),
+    _micDeviceId(0),
+    _recQueueSeq(0),
+    _timeEventRec(*EventWrapper::Create()),
+    _ptrThreadRec(NULL),
+    _recThreadID(0),
+    _playQueueSeq(0),
+    _recCurrentSeq(0),
+    _recBufferTotalSize(0),
+    _recordingDeviceIsSpecified(false),
+    _playoutDeviceIsSpecified(false),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _micIsInitialized(false),
+    _speakerIsInitialized(false),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playoutDelay(0),
+    _recordingDelay(0),
+    _AGC(false),
+    _adbSampleRate(0),
+    _samplingRateIn(SL_SAMPLINGRATE_16),
+    _samplingRateOut(SL_SAMPLINGRATE_16),
+    _maxSpeakerVolume(0),
+    _minSpeakerVolume(0),
+    _loudSpeakerOn(false) {
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created",
+                 __FUNCTION__);
+    memset(_playQueueBuffer, 0, sizeof(_playQueueBuffer));
+    memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer));
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+    memset(_recLength, 0, sizeof(_recLength));
+    memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+}
+
+AudioDeviceAndroidOpenSLES::~AudioDeviceAndroidOpenSLES() {
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed",
+                 __FUNCTION__);
+
+    Terminate();
+
+    delete &_timeEventRec;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+void AudioDeviceAndroidOpenSLES::AttachAudioBuffer(
+    AudioDeviceBuffer* audioBuffer) {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const {
+
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::Init() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized) {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    SLEngineOption EngineOption[] = {
+      { (SLuint32) SL_ENGINEOPTION_THREADSAFE, (SLuint32) SL_BOOLEAN_TRUE },
+    };
+    WebRtc_Word32 res = slCreateEngine(&_slEngineObject, 1, EngineOption, 0,
+                                       NULL, NULL);
+    //WebRtc_Word32 res = slCreateEngine( &_slEngineObject, 0, NULL, 0, NULL,
+    //    NULL);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create SL Engine Object");
+        return -1;
+    }
+    /* Realizing the SL Engine in synchronous mode. */
+    if ((*_slEngineObject)->Realize(_slEngineObject, SL_BOOLEAN_FALSE)
+            != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to Realize SL Engine");
+        return -1;
+    }
+
+    if ((*_slEngineObject)->GetInterface(_slEngineObject, SL_IID_ENGINE,
+                                         (void*) &_slEngine)
+            != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get SL Engine interface");
+        return -1;
+    }
+
+    // Check the sample rate to be used for playback and recording
+    if (InitSampleRate() != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "%s: Failed to init samplerate", __FUNCTION__);
+        return -1;
+    }
+
+    // Set the audio device buffer sampling rate, we assume we get the same
+    // for play and record
+    if (_ptrAudioBuffer->SetRecordingSampleRate(_adbSampleRate) < 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Could not set audio device buffer recording "
+                         "sampling rate (%d)", _adbSampleRate);
+    }
+    if (_ptrAudioBuffer->SetPlayoutSampleRate(_adbSampleRate) < 0) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Could not set audio device buffer playout sampling "
+                         "rate (%d)", _adbSampleRate);
+    }
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::Terminate() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized) {
+        return 0;
+    }
+
+    // RECORDING
+    StopRecording();
+
+    _micIsInitialized = false;
+    _recordingDeviceIsSpecified = false;
+
+    // PLAYOUT
+    StopPlayout();
+
+    if (_slEngineObject != NULL) {
+        (*_slEngineObject)->Destroy(_slEngineObject);
+        _slEngineObject = NULL;
+        _slEngine = NULL;
+    }
+
+    _initialized = false;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::Initialized() const {
+
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerIsAvailable(bool& available) {
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitSpeaker() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneIsAvailable(
+    bool& available) {
+
+    // We always assume it's available
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitMicrophone() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    // Nothing needs to be done here, we use a flag to have consistent
+    // behavior with other platforms
+    _micIsInitialized = true;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::SpeakerIsInitialized() const {
+
+    return _speakerIsInitialized;
+}
+
+bool AudioDeviceAndroidOpenSLES::MicrophoneIsInitialized() const {
+
+    return _micIsInitialized;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolumeIsAvailable(
+    bool& available) {
+
+    available = true; // We assume we are always be able to set/get volume
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetSpeakerVolume(
+    WebRtc_UWord32 volume) {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    if (_slEngineObject == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetSpeakerVolume, SL Engine object doesnt exist");
+        return -1;
+    }
+
+    if (_slEngine == NULL) {
+        // Get the SL Engine Interface which is implicit
+        if ((*_slEngineObject)->GetInterface(_slEngineObject, SL_IID_ENGINE,
+                                             (void*) &_slEngine)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to GetInterface SL Engine Interface");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolume(
+    WebRtc_UWord32& volume) const {
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetWaveOutVolume(
+    WebRtc_UWord16 /*volumeLeft*/,
+    WebRtc_UWord16 /*volumeRight*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+
+    maxVolume = _maxSpeakerVolume;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;//
+    }
+
+    minVolume = _minSpeakerVolume;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const {
+
+    if (!_speakerIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Speaker not initialized");
+        return -1;
+    }
+    stepSize = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerMuteIsAvailable(
+    bool& available) {
+
+    available = false; // Speaker mute not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetSpeakerMute(bool /*enable*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SpeakerMute(bool& /*enabled*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneMuteIsAvailable(
+    bool& available) {
+
+    available = false; // Mic mute not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneMute(bool /*enable*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneMute(
+    bool& /*enabled*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneBoostIsAvailable(
+    bool& available) {
+
+    available = false; // Mic boost not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneBoost(bool enable) {
+
+    if (!_micIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneBoost(bool& enabled) const {
+
+    if (!_micIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Microphone not initialized");
+        return -1;
+    }
+
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoRecordingIsAvailable(
+    bool& available) {
+
+    available = false; // Stereo recording not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetStereoRecording(bool enable) {
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoRecording(bool& enabled) const {
+
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoPlayoutIsAvailable(
+    bool& available) {
+
+    available = false; // Stereo playout not supported on Android
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetStereoPlayout(bool enable) {
+
+    if (enable) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Enabling not available");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StereoPlayout(bool& enabled) const {
+
+    enabled = false;
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetAGC(bool enable) {
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::AGC() const {
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolumeIsAvailable(
+    bool& available) {
+
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetMicrophoneVolume(
+    WebRtc_UWord32 volume) {
+
+    if (_slEngineObject == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "SetMicrophoneVolume, SL Engine Object doesnt exist");
+        return -1;
+    }
+
+    /* Get the optional DEVICE VOLUME interface from the engine */
+    if (_slMicVolume == NULL) {
+        // Get the optional DEVICE VOLUME interface from the engine
+        if ((*_slEngineObject)->GetInterface(_slEngineObject,
+                                             SL_IID_DEVICEVOLUME,
+                                             (void*) &_slMicVolume)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create Output Mix object");
+        }
+    }
+
+    if (_slMicVolume != NULL) {
+        WebRtc_Word32 vol(0);
+        vol = ((volume * (_maxSpeakerVolume - _minSpeakerVolume) +
+                (int) (255 / 2)) / (255)) + _minSpeakerVolume;
+        if ((*_slMicVolume)->SetVolume(_slMicVolume, _micDeviceId, vol)
+                != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create Output Mix object");
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolume(
+    WebRtc_UWord32& /*volume*/) const {
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MaxMicrophoneVolume(
+    WebRtc_UWord32& /*maxVolume*/) const {
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const {
+
+    minVolume = 0;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const {
+
+    stepSize = 1;
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceAndroidOpenSLES::PlayoutDevices() {
+
+    return 1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutDevice(
+    WebRtc_UWord16 index) {
+
+    if (_playIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return -1;
+    }
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behaviour
+    // with other platforms
+    _playoutDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize]) {
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid) {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize]) {
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Return empty string
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid) {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceAndroidOpenSLES::RecordingDevices() {
+
+    return 1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetRecordingDevice(
+    WebRtc_UWord16 index) {
+
+    if (_recIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return -1;
+    }
+
+    if (0 != index) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Device index is out of range [0,0]");
+        return -1;
+    }
+
+    // Do nothing but set a flag, this is to have consistent behaviour with
+    // other platforms
+    _recordingDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutIsAvailable(bool& available) {
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1) {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingIsAvailable(
+    bool& available) {
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1) {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitPlayout() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  Not initialized");
+        return -1;
+    }
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return -1;
+    }
+
+    if (!_playoutDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout device is not specified");
+        return -1;
+    }
+
+    if (_playIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already initialized");
+        return 0;
+    }
+
+    // Initialize the speaker
+    if (InitSpeaker() == -1) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    if (_slEngineObject == NULL || _slEngine == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  SLObject or Engiine is NULL");
+        return -1;
+    }
+
+    WebRtc_Word32 res = -1;
+    SLDataFormat_PCM pcm;
+    SLDataSource audioSource;
+    SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+    SLDataSink audioSink;
+    SLDataLocator_OutputMix locator_outputmix;
+
+    // Create Output Mix object to be used by player
+    SLInterfaceID ids[N_MAX_INTERFACES];
+    SLboolean req[N_MAX_INTERFACES];
+    for (unsigned int i = 0; i < N_MAX_INTERFACES; i++) {
+        ids[i] = SL_IID_NULL;
+        req[i] = SL_BOOLEAN_FALSE;
+    }
+    ids[0] = SL_IID_ENVIRONMENTALREVERB;
+    res = (*_slEngine)->CreateOutputMix(_slEngine, &_slOutputMixObject, 1, ids,
+                                        req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get SL Output Mix object");
+        return -1;
+    }
+    // Realizing the Output Mix object in synchronous mode.
+    res = (*_slOutputMixObject)->Realize(_slOutputMixObject, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize SL Output Mix object");
+        return -1;
+    }
+
+    // The code below can be moved to startplayout instead
+    /* Setup the data source structure for the buffer queue */
+    simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+    /* Two buffers in our buffer queue, to have low latency*/
+    simpleBufferQueue.numBuffers = N_PLAY_QUEUE_BUFFERS;
+    // TODO(xians), figure out if we should support stereo playout for android
+    /* Setup the format of the content in the buffer queue */
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = 1;
+    // _samplingRateOut is initilized in InitSampleRate()
+    pcm.samplesPerSec = SL_SAMPLINGRATE_16;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audioSource.pFormat = (void *) &pcm;
+    audioSource.pLocator = (void *) &simpleBufferQueue;
+    /* Setup the data sink structure */
+    locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
+    locator_outputmix.outputMix = _slOutputMixObject;
+    audioSink.pLocator = (void *) &locator_outputmix;
+    audioSink.pFormat = NULL;
+
+    // Set arrays required[] and iidArray[] for SEEK interface
+    // (PlayItf is implicit)
+    ids[0] = SL_IID_BUFFERQUEUE;
+    ids[1] = SL_IID_EFFECTSEND;
+    req[0] = SL_BOOLEAN_TRUE;
+    req[1] = SL_BOOLEAN_TRUE;
+    // Create the music player
+    res = (*_slEngine)->CreateAudioPlayer(_slEngine, &_slPlayer, &audioSource,
+                                          &audioSink, 2, ids, req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create Audio Player");
+        return -1;
+    }
+
+    // Realizing the player in synchronous mode.
+    res = (*_slPlayer)->Realize(_slPlayer, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize the player");
+        return -1;
+    }
+    // Get seek and play interfaces
+    res = (*_slPlayer)->GetInterface(_slPlayer, SL_IID_PLAY,
+                                     (void*) &_slPlayerPlay);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Player interface");
+        return -1;
+    }
+    res = (*_slPlayer)->GetInterface(_slPlayer, SL_IID_BUFFERQUEUE,
+                                     (void*) &_slPlayerSimpleBufferQueue);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Player Simple Buffer Queue interface");
+        return -1;
+    }
+
+    // Setup to receive buffer queue event callbacks
+    res = (*_slPlayerSimpleBufferQueue)->RegisterCallback(
+        _slPlayerSimpleBufferQueue,
+        PlayerSimpleBufferQueueCallback,
+        this);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to register Player Callback");
+        return -1;
+    }
+    _playIsInitialized = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitRecording() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  Not initialized");
+        return -1;
+    }
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return -1;
+    }
+
+    if (!_recordingDeviceIsSpecified) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording device is not specified");
+        return -1;
+    }
+
+    if (_recIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already initialized");
+        return 0;
+    }
+
+    // Initialize the microphone
+    if (InitMicrophone() == -1) {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    if (_slEngineObject == NULL || _slEngine == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording object is NULL");
+        return -1;
+    }
+
+    WebRtc_Word32 res(-1);
+    SLDataSource audioSource;
+    SLDataLocator_IODevice micLocator;
+    SLDataSink audioSink;
+    SLDataFormat_PCM pcm;
+    SLDataLocator_AndroidSimpleBufferQueue simpleBufferQueue;
+
+    // Setup the data source structure
+    micLocator.locatorType = SL_DATALOCATOR_IODEVICE;
+    micLocator.deviceType = SL_IODEVICE_AUDIOINPUT;
+    micLocator.deviceID = SL_DEFAULTDEVICEID_AUDIOINPUT; //micDeviceID;
+    micLocator.device = NULL;
+    audioSource.pLocator = (void *) &micLocator;
+    audioSource.pFormat = NULL;
+
+    /* Setup the data source structure for the buffer queue */
+    simpleBufferQueue.locatorType = SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE;
+    simpleBufferQueue.numBuffers = N_REC_QUEUE_BUFFERS;
+    /* Setup the format of the content in the buffer queue */
+    pcm.formatType = SL_DATAFORMAT_PCM;
+    pcm.numChannels = 1;
+    // _samplingRateIn is initialized in initSampleRate()
+    pcm.samplesPerSec = SL_SAMPLINGRATE_16;
+    pcm.bitsPerSample = SL_PCMSAMPLEFORMAT_FIXED_16;
+    pcm.containerSize = 16;
+    pcm.channelMask = SL_SPEAKER_FRONT_CENTER;
+    pcm.endianness = SL_BYTEORDER_LITTLEENDIAN;
+    audioSink.pFormat = (void *) &pcm;
+    audioSink.pLocator = (void *) &simpleBufferQueue;
+
+    const SLInterfaceID id[1] = { SL_IID_ANDROIDSIMPLEBUFFERQUEUE };
+    const SLboolean req[1] = { SL_BOOLEAN_TRUE };
+    res = (*_slEngine)->CreateAudioRecorder(_slEngine, &_slRecorder,
+                                            &audioSource, &audioSink, 1, id,
+                                            req);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create Recorder");
+        return -1;
+    }
+
+    // Realizing the recorder in synchronous mode.
+    res = (*_slRecorder)->Realize(_slRecorder, SL_BOOLEAN_FALSE);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to realize Recorder");
+        return -1;
+    }
+
+    // Get the RECORD interface - it is an implicit interface
+    res = (*_slRecorder)->GetInterface(_slRecorder, SL_IID_RECORD,
+                                       (void*) &_slRecorderRecord);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Recorder interface");
+        return -1;
+    }
+
+    // Get the simpleBufferQueue interface
+    res = (*_slRecorder)->GetInterface(_slRecorder,
+                                       SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+                                       (void*) &_slRecorderSimpleBufferQueue);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to get Recorder Simple Buffer Queue");
+        return -1;
+    }
+
+    // Setup to receive buffer queue event callbacks
+    res = (*_slRecorderSimpleBufferQueue)->RegisterCallback(
+        _slRecorderSimpleBufferQueue,
+        RecorderSimpleBufferQueueCallback,
+        this);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to register Recorder Callback");
+        return -1;
+    }
+
+    _recIsInitialized = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StartRecording() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recording not initialized");
+        return -1;
+    }
+
+    if (_recording) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording already started");
+        return 0;
+    }
+
+    if (_slRecorderRecord == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  RecordITF is NULL");
+        return -1;
+    }
+
+    if (_slRecorderSimpleBufferQueue == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Recorder Simple Buffer Queue is NULL");
+        return -1;
+    }
+
+    // Reset recording buffer
+    memset(_recQueueBuffer, 0, sizeof(_recQueueBuffer)); // empty the queue
+    _recQueueSeq = 0;
+
+    const char* threadName = "webrtc_opensles_audio_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+            kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                "  failed to start the rec audio thread");
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+    _recThreadIsInitialized = true;
+    memset(_recBuffer, 0, sizeof(_recBuffer));
+    memset(_recLength, 0, sizeof(_recLength));
+    memset(_recSeqNumber, 0, sizeof(_recSeqNumber));
+    _recCurrentSeq = 0;
+    _recBufferTotalSize = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Enqueue N_REC_QUEUE_BUFFERS -1 zero buffers to get the ball rolling
+    // find out how it behaves when the sample rate is 44100
+    WebRtc_Word32 res(-1);
+    WebRtc_UWord32 nSample10ms = _adbSampleRate / 100;
+    for (int i = 0; i < (N_REC_QUEUE_BUFFERS - 1); i++) {
+        // We assign 10ms buffer to each queue, size given in bytes.
+        res = (*_slRecorderSimpleBufferQueue)->Enqueue(
+            _slRecorderSimpleBufferQueue,
+            (void*) _recQueueBuffer[_recQueueSeq],
+            2 * nSample10ms);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to Enqueue Empty Buffer to recorder");
+            return -1;
+        }
+        _recQueueSeq++;
+    }
+    // Record the audio
+    res = (*_slRecorderRecord)->SetRecordState(_slRecorderRecord,
+                                               SL_RECORDSTATE_RECORDING);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start recording");
+        return -1;
+    }
+    _recording = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StopRecording() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Recording is not initialized");
+        return 0;
+    }
+
+    // Stop the recording thread
+    if (_ptrThreadRec != NULL)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                "Stopping capture thread");
+        bool res = _ptrThreadRec->Stop();
+        if (!res) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                                    "Failed to stop Capture thread ");
+        } else {
+            delete _ptrThreadRec;
+            _ptrThreadRec = NULL;
+            _recThreadIsInitialized = false;
+        }
+    }
+
+    if ((_slRecorderRecord != NULL) && (_slRecorder != NULL)) {
+        // Record the audio
+        WebRtc_Word32 res = (*_slRecorderRecord)->SetRecordState(
+            _slRecorderRecord,
+            SL_RECORDSTATE_STOPPED);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to stop recording");
+            return -1;
+        }
+        res = (*_slRecorderSimpleBufferQueue)->Clear(
+              _slRecorderSimpleBufferQueue);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to clear recorder buffer queue");
+            return -1;
+        }
+
+        // Destroy the recorder object
+        (*_slRecorder)->Destroy(_slRecorder);
+        _slRecorder = NULL;
+        _slRecorderRecord = NULL;
+        _slRecorderRecord = NULL;
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+    _recWarning = 0;
+    _recError = 0;
+    _recQueueSeq = 0;
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingIsInitialized() const {
+
+    return _recIsInitialized;
+}
+
+
+bool AudioDeviceAndroidOpenSLES::Recording() const {
+
+    return _recording;
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutIsInitialized() const {
+
+    return _playIsInitialized;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StartPlayout() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Playout not initialized");
+        return -1;
+    }
+
+    if (_playing) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout already started");
+        return 0;
+    }
+
+    if (_slPlayerPlay == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  PlayItf is NULL");
+        return -1;
+    }
+    if (_slPlayerSimpleBufferQueue == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PlayerSimpleBufferQueue is NULL");
+        return -1;
+    }
+
+    _recQueueSeq = 0;
+
+    WebRtc_Word32 res(-1);
+    /* Enqueue a set of zero buffers to get the ball rolling */
+    WebRtc_UWord32 nSample10ms = _adbSampleRate / 100;
+    WebRtc_Word8 playBuffer[2 * nSample10ms];
+    WebRtc_UWord32 noSamplesOut(0);
+    {
+        noSamplesOut = _ptrAudioBuffer->RequestPlayoutData(nSample10ms);
+        //Lock();
+        // Get data from Audio Device Buffer
+        noSamplesOut = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        // Insert what we have in data buffer
+        memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, 2 * noSamplesOut);
+        //UnLock();
+
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        // "_playQueueSeq (%u)  noSamplesOut (%d)", _playQueueSeq,
+        //noSamplesOut);
+        // write the buffer data we got from VoE into the device
+        res = (*_slPlayerSimpleBufferQueue)->Enqueue(
+            _slPlayerSimpleBufferQueue,
+            (void*) _playQueueBuffer[_playQueueSeq],
+            2 * noSamplesOut);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  player simpler buffer queue Enqueue failed, %d",
+                         noSamplesOut);
+            //return ; dong return
+        }
+        _playQueueSeq = (_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+    }
+
+    // Play the PCM samples using a buffer queue
+    res = (*_slPlayerPlay)->SetPlayState(_slPlayerPlay, SL_PLAYSTATE_PLAYING);
+    if (res != SL_RESULT_SUCCESS) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start playout");
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _playing = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::StopPlayout() {
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized) {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  Playout is not initialized");
+        return 0;
+    }
+
+    if ((_slPlayerPlay != NULL) && (_slOutputMixObject == NULL) && (_slPlayer
+            == NULL)) {
+        // Make sure player is stopped 
+        WebRtc_Word32 res =
+                (*_slPlayerPlay)->SetPlayState(_slPlayerPlay,
+                                               SL_PLAYSTATE_STOPPED);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to stop playout");
+            return -1;
+        }
+        res = (*_slPlayerSimpleBufferQueue)->Clear(_slPlayerSimpleBufferQueue);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to clear recorder buffer queue");
+            return -1;
+        }
+
+        // Destroy the player
+        (*_slPlayer)->Destroy(_slPlayer);
+        // Destroy Output Mix object 
+        (*_slOutputMixObject)->Destroy(_slOutputMixObject);
+        _slPlayer = NULL;
+        _slPlayerPlay = NULL;
+        _slPlayerSimpleBufferQueue = NULL;
+        _slOutputMixObject = NULL;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _playWarning = 0;
+    _playError = 0;
+    _playQueueSeq = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutDelay(WebRtc_UWord16& delayMS) const {
+    delayMS = _playoutDelay;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::RecordingDelay(WebRtc_UWord16& delayMS) const {
+    delayMS = _recordingDelay;
+
+    return 0;
+}
+
+bool AudioDeviceAndroidOpenSLES::Playing() const {
+
+    return _playing;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType /*type*/,
+    WebRtc_UWord16 /*sizeMS*/) {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const {
+
+    type = AudioDeviceModule::kAdaptiveBufferSize;
+    sizeMS = _playoutDelay; // Set to current playout delay
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::CPULoad(WebRtc_UWord16& /*load*/) const {
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutWarning() const {
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::PlayoutError() const {
+    return (_playError > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingWarning() const {
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceAndroidOpenSLES::RecordingError() const {
+    return (_recError > 0);
+}
+
+void AudioDeviceAndroidOpenSLES::ClearPlayoutWarning() {
+    _playWarning = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearPlayoutError() {
+    _playError = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearRecordingWarning() {
+    _recWarning = 0;
+}
+
+void AudioDeviceAndroidOpenSLES::ClearRecordingError() {
+    _recError = 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::SetLoudspeakerStatus(bool enable) {
+    _loudSpeakerOn = enable;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::GetLoudspeakerStatus(
+    bool& enabled) const {
+
+    enabled = _loudSpeakerOn;
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioDeviceAndroidOpenSLES::PlayerSimpleBufferQueueCallback(
+    SLAndroidSimpleBufferQueueItf queueItf,
+    void *pContext) {
+    AudioDeviceAndroidOpenSLES* ptrThis =
+            static_cast<AudioDeviceAndroidOpenSLES*> (pContext);
+    ptrThis->PlayerSimpleBufferQueueCallbackHandler(queueItf);
+}
+
+void AudioDeviceAndroidOpenSLES::PlayerSimpleBufferQueueCallbackHandler(
+    SLAndroidSimpleBufferQueueItf queueItf) {
+    WebRtc_Word32 res;
+    //Lock();
+    //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+    //"_playQueueSeq (%u)", _playQueueSeq);
+    if (_playing && (_playQueueSeq < N_PLAY_QUEUE_BUFFERS)) {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+        //_id, "playout callback ");
+        unsigned int noSamp10ms = _adbSampleRate / 100;
+        // Max 10 ms @ samplerate kHz / 16 bit
+        WebRtc_Word8 playBuffer[2 * noSamp10ms];
+        int noSamplesOut = 0;
+
+        // Assumption for implementation
+        // assert(PLAYBUFSIZESAMPLES == noSamp10ms);
+
+        // TODO(xians), update the playout delay
+        //UnLock();
+
+        noSamplesOut = _ptrAudioBuffer->RequestPlayoutData(noSamp10ms);
+        //Lock();
+        // Get data from Audio Device Buffer
+        noSamplesOut = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        // Cast OK since only equality comparison
+        if (noSamp10ms != (unsigned int) noSamplesOut) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "noSamp10ms (%u) != noSamplesOut (%d)", noSamp10ms,
+                         noSamplesOut);
+
+            if (_playWarning > 0) {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             "  Pending play warning exists");
+            }
+            _playWarning = 1;
+        }
+        // Insert what we have in data buffer
+        memcpy(_playQueueBuffer[_playQueueSeq], playBuffer, 2 * noSamplesOut);
+        //UnLock();
+
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        //"_playQueueSeq (%u)  noSamplesOut (%d)", _playQueueSeq, noSamplesOut);
+        // write the buffer data we got from VoE into the device
+        res = (*_slPlayerSimpleBufferQueue)->Enqueue(
+            _slPlayerSimpleBufferQueue,
+            _playQueueBuffer[_playQueueSeq],
+            2 * noSamplesOut);
+        if (res != SL_RESULT_SUCCESS) {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  player simpler buffer queue Enqueue failed, %d",
+                         noSamplesOut);
+            return;
+        }
+        // update the playout delay
+        UpdatePlayoutDelay(noSamplesOut);
+        // update the play buffer sequency
+        _playQueueSeq = (_playQueueSeq + 1) % N_PLAY_QUEUE_BUFFERS;
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::RecorderSimpleBufferQueueCallback(
+    SLAndroidSimpleBufferQueueItf queueItf,
+    void *pContext) {
+    AudioDeviceAndroidOpenSLES* ptrThis =
+            static_cast<AudioDeviceAndroidOpenSLES*> (pContext);
+    ptrThis->RecorderSimpleBufferQueueCallbackHandler(queueItf);
+}
+
+void AudioDeviceAndroidOpenSLES::RecorderSimpleBufferQueueCallbackHandler(
+    SLAndroidSimpleBufferQueueItf queueItf) {
+    WebRtc_Word32 res;
+    //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+    //"  RecorderSimpleBufferQueueCallbackHandler");
+    if (_recording) {
+        // Insert all data in temp buffer into recording buffers
+        // There is zero or one buffer partially full at any given time,
+        // all others are full or empty
+        // Full means filled with noSamp10ms samples.
+
+        const unsigned int noSamp10ms = _adbSampleRate / 100;
+        //        WebRtc_UWord16 queuePos = 0;
+        //        WebRtc_UWord16 checkQueuePos = 0;
+        unsigned int dataPos = 0;
+        WebRtc_UWord16 bufPos = 0;
+        WebRtc_Word16 insertPos = -1;
+        unsigned int nCopy = 0; // Number of samples to copy
+        //        WebRtc_Word32 isData = 0;
+
+        while (dataPos < noSamp10ms)//REC_BUF_SIZE_IN_SAMPLES) //noSamp10ms)
+
+        {
+            // Loop over all recording buffers or until we find the partially
+            // full buffer
+            // First choice is to insert into partially full buffer,
+            // second choice is to insert into empty buffer
+            bufPos = 0;
+            insertPos = -1;
+            nCopy = 0;
+            while (bufPos < N_REC_BUFFERS)
+            {
+                if ((_recLength[bufPos] > 0) && (_recLength[bufPos]
+                                < noSamp10ms))
+                {
+                    // Found the partially full buffer
+                    insertPos = static_cast<WebRtc_Word16> (bufPos);
+                    bufPos = N_REC_BUFFERS; // Don't need to search more
+                }
+                else if ((-1 == insertPos) && (0 == _recLength[bufPos]))
+                {
+                    // Found an empty buffer
+                    insertPos = static_cast<WebRtc_Word16> (bufPos);
+                }
+                ++bufPos;
+            }
+
+            if (insertPos > -1)
+            {
+                // We found a non-full buffer, copy data from the buffer queue
+                // o recBuffer
+                unsigned int dataToCopy = noSamp10ms - dataPos;
+                unsigned int currentRecLen = _recLength[insertPos];
+                unsigned int roomInBuffer = noSamp10ms - currentRecLen;
+                nCopy = (dataToCopy < roomInBuffer ? dataToCopy : roomInBuffer);
+                memcpy(&_recBuffer[insertPos][currentRecLen],
+                        &_recQueueBuffer[_recQueueSeq][dataPos],
+                        nCopy * sizeof(short));
+                if (0 == currentRecLen)
+                {
+                    _recSeqNumber[insertPos] = _recCurrentSeq;
+                    ++_recCurrentSeq;
+                }
+                _recBufferTotalSize += nCopy;
+                // Has to be done last to avoid interrupt problems
+                // between threads
+                _recLength[insertPos] += nCopy;
+                dataPos += nCopy;
+            }
+            else
+            {
+                // Didn't find a non-full buffer
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                        _id, "  Could not insert into recording buffer");
+                if (_recWarning > 0)
+                {
+                    WEBRTC_TRACE(kTraceWarning,
+                            kTraceAudioDevice, _id,
+                            "  Pending rec warning exists");
+                }
+                _recWarning = 1;
+                dataPos = noSamp10ms; // Don't try to insert more
+            }
+        }
+
+        // clean the queue buffer
+        // Start with empty buffer
+        memset(_recQueueBuffer[_recQueueSeq], 0, 2 * REC_BUF_SIZE_IN_SAMPLES);
+        // write the empty buffer to the queue
+        res = (*_slRecorderSimpleBufferQueue)->Enqueue(
+              _slRecorderSimpleBufferQueue,
+              (void*) _recQueueBuffer[_recQueueSeq],
+              2 * noSamp10ms);
+        if (res != SL_RESULT_SUCCESS) {
+            return;
+        }
+        // update the rec queue seq
+        _recQueueSeq = (_recQueueSeq + 1) % N_REC_QUEUE_BUFFERS;
+        // wake up the recording thread
+        _timeEventRec.Set();
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::CheckErr(SLresult res) {
+    if (res != SL_RESULT_SUCCESS) {
+        // Debug printing to be placed here
+        exit(-1);
+    }
+}
+
+void AudioDeviceAndroidOpenSLES::UpdatePlayoutDelay(
+    WebRtc_UWord32 nSamplePlayed) {
+    // currently just do some simple calculation, should we setup a timer for
+    // the callback to have a more accurate delay
+    // Android CCD asks for 10ms as the maximum warm output latency, so we
+    // simply add (nPlayQueueBuffer -1 + 0.5)*10ms
+    // This playout delay should be seldom changed
+    _playoutDelay = (N_PLAY_QUEUE_BUFFERS - 0.5) * 10 + N_PLAY_QUEUE_BUFFERS
+            * nSamplePlayed / (_adbSampleRate / 1000);
+}
+
+void AudioDeviceAndroidOpenSLES::UpdateRecordingDelay() {
+    // // Android CCD asks for 10ms as the maximum warm input latency,
+    // so we simply add 10ms
+    _recordingDelay = 10;
+    const WebRtc_UWord32 noSamp10ms = _adbSampleRate / 100;
+    //    if (_recBufferTotalSize > noSamp10ms)
+    //    {
+    _recordingDelay += (N_REC_QUEUE_BUFFERS * noSamp10ms) / (_adbSampleRate
+            / 1000);
+    //    }
+}
+
+WebRtc_Word32 AudioDeviceAndroidOpenSLES::InitSampleRate() {
+    if (_slEngineObject == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "  SL Object is NULL");
+      return -1;
+    }
+
+    _samplingRateIn = SL_SAMPLINGRATE_16;
+    _samplingRateOut = SL_SAMPLINGRATE_16;
+    _adbSampleRate = 16000;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  sample rate set to (%d)", _adbSampleRate);
+    return 0;
+
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+bool AudioDeviceAndroidOpenSLES::RecThreadFunc(void* pThis) {
+  return (static_cast<AudioDeviceAndroidOpenSLES*>(pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceAndroidOpenSLES::RecThreadProcess() {
+
+    //    Lock();
+    // Wait for 100ms for the signal from device callback
+    // In case no callback comes in 100ms, we check the buffer anyway
+    _timeEventRec.Wait(100);
+
+    int bufPos = 0;
+    unsigned int lowestSeq = 0;
+    int lowestSeqBufPos = 0;
+    bool foundBuf = true;
+    const unsigned int noSamp10ms = _adbSampleRate / 100;
+
+    while (foundBuf)
+    {
+        // Check if we have any buffer with data to insert into the
+        // Audio Device Buffer,
+        // and find the one with the lowest seq number
+        foundBuf = false;
+
+        for (bufPos = 0; bufPos < N_REC_BUFFERS; ++bufPos)
+        {
+            if (noSamp10ms == _recLength[bufPos])
+            {
+                if (!foundBuf) {
+                    lowestSeq = _recSeqNumber[bufPos];
+                    lowestSeqBufPos = bufPos;
+                    foundBuf = true;
+                } else if (_recSeqNumber[bufPos] < lowestSeq)
+                {
+                    lowestSeq = _recSeqNumber[bufPos];
+                    lowestSeqBufPos = bufPos;
+                }
+            }
+        } // for
+
+        // Insert data into the Audio Device Buffer if found any
+        if (foundBuf)
+        {
+            UpdateRecordingDelay();
+            // Set the recorded buffer
+            _ptrAudioBuffer->SetRecordedBuffer(_recBuffer[lowestSeqBufPos],
+                                               noSamp10ms);
+
+            // Don't need to set the current mic level in ADB since we only
+            // support digital AGC,
+            // and besides we cannot get or set the iPhone mic level anyway.
+
+            // Set VQE info, use clockdrift == 0
+            _ptrAudioBuffer->SetVQEData(_playoutDelay, _recordingDelay, 0);
+
+            // Deliver recorded samples at specified sample rate, mic level
+            // etc. to the observer using callback
+            //UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            //Lock();
+
+            // Make buffer available
+            _recSeqNumber[lowestSeqBufPos] = 0;
+            _recBufferTotalSize -= _recLength[lowestSeqBufPos];
+            // Must be done last to avoid interrupt problems between threads
+            _recLength[lowestSeqBufPos] = 0;
+        }
+
+    } // while (foundBuf)
+    //UnLock();
+    return true;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_device/main/source/android/audio_device_android_opensles.h b/src/modules/audio_device/main/source/android/audio_device_android_opensles.h
new file mode 100644
index 0000000..612fc36
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_android_opensles.h
@@ -0,0 +1,310 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+
+#include <jni.h> // For accessing AudioDeviceAndroid.java
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <SLES/OpenSLES.h>
+#include <SLES/OpenSLES_Android.h>
+#include <SLES/OpenSLES_AndroidConfiguration.h>
+
+namespace webrtc
+{
+class EventWrapper;
+
+const WebRtc_UWord32 N_MAX_INTERFACES = 3;
+const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
+const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000;//44000;  // Default fs
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000;//44000; // Default fs
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
+
+// Number of the buffers in playout queue
+const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 2;
+// Number of buffers in recording queue
+const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 2;
+// Number of 10 ms recording blocks in rec buffer
+const WebRtc_UWord16 N_REC_BUFFERS = 20;
+
+class ThreadWrapper;
+
+class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceAndroidOpenSLES(const WebRtc_Word32 id);
+    ~AudioDeviceAndroidOpenSLES();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+            ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32
+            PlayoutDeviceName(WebRtc_UWord16 index,
+                              char name[kAdmMaxDeviceNameSize],
+                              char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32
+            RecordingDeviceName(WebRtc_UWord16 index,
+                                char name[kAdmMaxDeviceNameSize],
+                                char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32
+            SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32
+            SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    SLPlayItf playItf;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32
+            MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+            SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                             WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    // Error and warning information
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+    // Attach audio buffer
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+    // Speaker audio routing
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+
+private:
+    // Lock
+    void Lock()
+    {
+        _critSect.Enter();
+    };
+    void UnLock()
+    {
+        _critSect.Leave();
+    };
+
+    static void PlayerSimpleBufferQueueCallback(
+            SLAndroidSimpleBufferQueueItf queueItf,
+            void *pContext);
+    void PlayerSimpleBufferQueueCallbackHandler(
+            SLAndroidSimpleBufferQueueItf queueItf);
+    static void RecorderSimpleBufferQueueCallback(
+            SLAndroidSimpleBufferQueueItf queueItf,
+            void *pContext);
+    void RecorderSimpleBufferQueueCallbackHandler(
+            SLAndroidSimpleBufferQueueItf queueItf);
+    void CheckErr(SLresult res);
+
+    // Delay updates
+    void UpdateRecordingDelay();
+    void UpdatePlayoutDelay(WebRtc_UWord32 nSamplePlayed);
+
+    // Init
+    WebRtc_Word32 InitSampleRate();
+
+    // Threads
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+    // Misc
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    // audio unit
+    SLObjectItf _slEngineObject;
+
+    // playout device
+    SLObjectItf _slPlayer;
+    SLEngineItf _slEngine;
+    SLPlayItf _slPlayerPlay;
+    SLAndroidSimpleBufferQueueItf _slPlayerSimpleBufferQueue;
+    SLObjectItf _slOutputMixObject;
+    SLVolumeItf _slSpeakerVolume;
+
+    // recording device
+    SLObjectItf _slRecorder;
+    SLRecordItf _slRecorderRecord;
+    SLAudioIODeviceCapabilitiesItf _slAudioIODeviceCapabilities;
+    SLAndroidSimpleBufferQueueItf _slRecorderSimpleBufferQueue;
+    SLDeviceVolumeItf _slMicVolume;
+
+    WebRtc_UWord32 _micDeviceId;
+    WebRtc_UWord32 _recQueueSeq;
+
+    // Events
+    EventWrapper& _timeEventRec;
+    // Threads
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    // TODO(xians), remove the following flag
+    bool _recThreadIsInitialized;
+
+    // Playout buffer
+    WebRtc_Word8 _playQueueBuffer[N_PLAY_QUEUE_BUFFERS][2
+            * PLAY_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 _playQueueSeq;
+    // Recording buffer
+    WebRtc_Word8 _recQueueBuffer[N_REC_QUEUE_BUFFERS][2
+            * REC_BUF_SIZE_IN_SAMPLES];
+    WebRtc_Word8 _recBuffer[N_REC_BUFFERS][2*REC_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 _recLength[N_REC_BUFFERS];
+    WebRtc_UWord32 _recSeqNumber[N_REC_BUFFERS];
+    WebRtc_UWord32 _recCurrentSeq;
+    // Current total size all data in buffers, used for delay estimate
+    WebRtc_UWord32 _recBufferTotalSize;
+
+    // States
+    bool _recordingDeviceIsSpecified;
+    bool _playoutDeviceIsSpecified;
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _micIsInitialized;
+    bool _speakerIsInitialized;
+
+    // Warnings and errors
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    // Delay
+    WebRtc_UWord16 _playoutDelay;
+    WebRtc_UWord16 _recordingDelay;
+
+    // AGC state
+    bool _AGC;
+
+    // The sampling rate to use with Audio Device Buffer
+    WebRtc_UWord32 _adbSampleRate;
+    // Stored device properties
+    WebRtc_UWord32 _samplingRateIn; // Sampling frequency for Mic
+    WebRtc_UWord32 _samplingRateOut; // Sampling frequency for Speaker
+    WebRtc_UWord32 _maxSpeakerVolume; // The maximum speaker volume value
+    WebRtc_UWord32 _minSpeakerVolume; // The minimum speaker volume value
+    bool _loudSpeakerOn;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ANDROID_OPENSLES_H
diff --git a/src/modules/audio_device/main/source/android/audio_device_utility_android.cc b/src/modules/audio_device/main/source/android/audio_device_utility_android.cc
new file mode 100644
index 0000000..ccb15d3
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_utility_android.cc
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device utility implementation
+ */
+
+#include "audio_device_utility_android.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityAndroid::AudioDeviceUtilityAndroid(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()), _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+AudioDeviceUtilityAndroid::~AudioDeviceUtilityAndroid()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(&_critSect);
+    }
+
+    delete &_critSect;
+}
+
+WebRtc_Word32 AudioDeviceUtilityAndroid::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "Android");
+
+    return 0;
+}
+
+} // namespace webrtc
diff --git a/src/modules/audio_device/main/source/android/audio_device_utility_android.h b/src/modules/audio_device/main/source/android/audio_device_utility_android.h
new file mode 100644
index 0000000..81f685a
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/audio_device_utility_android.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Android audio device utility interface
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityAndroid: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityAndroid(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityAndroid();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    AudioDeviceModule::ErrorCode _lastError;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_ANDROID_H
diff --git a/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java b/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
new file mode 100644
index 0000000..b56085b
--- /dev/null
+++ b/src/modules/audio_device/main/source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
@@ -0,0 +1,509 @@
+/*

+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+ *

+ *  Use of this source code is governed by a BSD-style license

+ *  that can be found in the LICENSE file in the root of the source

+ *  tree. An additional intellectual property rights grant can be found

+ *  in the file PATENTS.  All contributing project authors may

+ *  be found in the AUTHORS file in the root of the source tree.

+ */

+

+/*

+ *  Android audio device test app

+ */

+

+package org.webrtc.voiceengine;

+

+import java.nio.ByteBuffer;

+import java.util.concurrent.locks.ReentrantLock;

+

+import android.content.Context;

+import android.media.AudioFormat;

+import android.media.AudioManager;

+import android.media.AudioRecord;

+import android.media.AudioTrack;

+import android.util.Log;

+

+

+class AudioDeviceAndroid {

+    private AudioTrack _audioTrack = null;

+    private AudioRecord _audioRecord = null;

+

+    private Context _context;

+    private AudioManager _audioManager;

+

+    private ByteBuffer _playBuffer;

+    private ByteBuffer _recBuffer;

+    private byte[] _tempBufPlay;

+    private byte[] _tempBufRec;

+

+    private final ReentrantLock _playLock = new ReentrantLock();

+    private final ReentrantLock _recLock = new ReentrantLock();

+

+    private boolean _doPlayInit = true;

+    private boolean _doRecInit = true;

+    private boolean _isRecording = false;

+    private boolean _isPlaying = false;

+

+    private int _bufferedRecSamples = 0;

+    private int _bufferedPlaySamples = 0;

+    private int _playPosition = 0;

+

+    AudioDeviceAndroid() {

+        try {

+            _playBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48

+                                                              // kHz

+            _recBuffer = ByteBuffer.allocateDirect(2 * 480); // Max 10 ms @ 48

+                                                             // kHz

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+        }

+

+        _tempBufPlay = new byte[2 * 480];

+        _tempBufRec = new byte[2 * 480];

+    }

+

+    @SuppressWarnings("unused")

+    private int InitRecording(int audioSource, int sampleRate) {

+        // get the minimum buffer size that can be used

+        int minRecBufSize =

+                        AudioRecord.getMinBufferSize(sampleRate,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+

+        // DoLog("min rec buf size is " + minRecBufSize);

+

+        // double size to be more safe

+        int recBufSize = minRecBufSize * 2;

+        _bufferedRecSamples = (5 * sampleRate) / 200;

+        // DoLog("rough rec delay set to " + _bufferedRecSamples);

+

+        // release the object

+        if (_audioRecord != null) {

+            _audioRecord.release();

+            _audioRecord = null;

+        }

+

+        try {

+            _audioRecord = new AudioRecord(

+                            audioSource,

+                            sampleRate,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            recBufSize);

+

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+            return -1;

+        }

+

+        // check that the audioRecord is ready to be used

+        if (_audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {

+            // DoLog("rec not initialized " + sampleRate);

+            return -1;

+        }

+

+        // DoLog("rec sample rate set to " + sampleRate);

+

+        return _bufferedRecSamples;

+    }

+

+    @SuppressWarnings("unused")

+    private int StartRecording() {

+        if (_isPlaying == false) {

+            SetAudioMode(true);

+        }

+

+        // start recording

+        try {

+            _audioRecord.startRecording();

+

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+            return -1;

+        }

+

+        _isRecording = true;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int InitPlayback(int sampleRate) {

+        // get the minimum buffer size that can be used

+        int minPlayBufSize =

+                        AudioTrack.getMinBufferSize(sampleRate,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+

+        // DoLog("min play buf size is " + minPlayBufSize);

+

+        int playBufSize = minPlayBufSize;

+        if (playBufSize < 6000) {

+            playBufSize *= 2;

+        }

+        _bufferedPlaySamples = 0;

+        // DoLog("play buf size is " + playBufSize);

+

+        // release the object

+        if (_audioTrack != null) {

+            _audioTrack.release();

+            _audioTrack = null;

+        }

+

+        try {

+            _audioTrack = new AudioTrack(

+                            AudioManager.STREAM_VOICE_CALL,

+                            sampleRate,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            playBufSize, AudioTrack.MODE_STREAM);

+        } catch (Exception e) {

+            DoLog(e.getMessage());

+            return -1;

+        }

+

+        // check that the audioRecord is ready to be used

+        if (_audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {

+            // DoLog("play not initialized " + sampleRate);

+            return -1;

+        }

+

+        // DoLog("play sample rate set to " + sampleRate);

+

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        // Return max playout volume

+        if (_audioManager == null) {

+            // Don't know the max volume but still init is OK for playout,

+            // so we should not return error.

+            return 0;

+        }

+        return _audioManager.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);

+    }

+

+    @SuppressWarnings("unused")

+    private int StartPlayback() {

+        if (_isRecording == false) {

+            SetAudioMode(true);

+        }

+

+        // start playout

+        try {

+            _audioTrack.play();

+

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+            return -1;

+        }

+

+        _isPlaying = true;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int StopRecording() {

+        _recLock.lock();

+        try {

+            // only stop if we are recording

+            if (_audioRecord.getRecordingState() ==

+              AudioRecord.RECORDSTATE_RECORDING) {

+                // stop recording

+                try {

+                    _audioRecord.stop();

+                } catch (IllegalStateException e) {

+                    e.printStackTrace();

+                    return -1;

+                }

+            }

+

+            // release the object

+            _audioRecord.release();

+            _audioRecord = null;

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _doRecInit = true;

+            _recLock.unlock();

+        }

+

+        if (_isPlaying == false) {

+            SetAudioMode(false);

+        }

+

+        _isRecording = false;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int StopPlayback() {

+        _playLock.lock();

+        try {

+            // only stop if we are playing

+            if (_audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING) {

+                // stop playout

+                try {

+                    _audioTrack.stop();

+                } catch (IllegalStateException e) {

+                    e.printStackTrace();

+                    return -1;

+                }

+

+                // flush the buffers

+                _audioTrack.flush();

+            }

+

+            // release the object

+            _audioTrack.release();

+            _audioTrack = null;

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _doPlayInit = true;

+            _playLock.unlock();

+        }

+

+        if (_isRecording == false) {

+            SetAudioMode(false);

+        }

+

+        _isPlaying = false;

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int PlayAudio(int lengthInBytes) {

+

+        int bufferedSamples = 0;

+

+        _playLock.lock();

+        try {

+            if (_audioTrack == null) {

+                return -2; // We have probably closed down while waiting for

+                           // play lock

+            }

+

+            // Set priority, only do once

+            if (_doPlayInit == true) {

+                try {

+                    android.os.Process.setThreadPriority(

+                        android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+                } catch (Exception e) {

+                    DoLog("Set play thread priority failed: " + e.getMessage());

+                }

+                _doPlayInit = false;

+            }

+

+            int written = 0;

+            _playBuffer.get(_tempBufPlay);

+            written = _audioTrack.write(_tempBufPlay, 0, lengthInBytes);

+            _playBuffer.rewind(); // Reset the position to start of buffer

+

+            // DoLog("Wrote data to sndCard");

+

+            // increase by number of written samples

+            _bufferedPlaySamples += (written >> 1);

+

+            // decrease by number of played samples

+            int pos = _audioTrack.getPlaybackHeadPosition();

+            if (pos < _playPosition) { // wrap or reset by driver

+                _playPosition = 0; // reset

+            }

+            _bufferedPlaySamples -= (pos - _playPosition);

+            _playPosition = pos;

+

+            if (!_isRecording) {

+                bufferedSamples = _bufferedPlaySamples;

+            }

+

+            if (written != lengthInBytes) {

+                // DoLog("Could not write all data to sc (written = " + written

+                // + ", length = " + lengthInBytes + ")");

+                return -1;

+            }

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _playLock.unlock();

+        }

+

+        return bufferedSamples;

+    }

+

+    @SuppressWarnings("unused")

+    private int RecordAudio(int lengthInBytes) {

+        _recLock.lock();

+

+        try {

+            if (_audioRecord == null) {

+                return -2; // We have probably closed down while waiting for rec

+                           // lock

+            }

+

+            // Set priority, only do once

+            if (_doRecInit == true) {

+                try {

+                    android.os.Process.setThreadPriority(

+                        android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+                } catch (Exception e) {

+                    DoLog("Set rec thread priority failed: " + e.getMessage());

+                }

+                _doRecInit = false;

+            }

+

+            int readBytes = 0;

+            _recBuffer.rewind(); // Reset the position to start of buffer

+            readBytes = _audioRecord.read(_tempBufRec, 0, lengthInBytes);

+            // DoLog("read " + readBytes + "from SC");

+            _recBuffer.put(_tempBufRec);

+

+            if (readBytes != lengthInBytes) {

+                // DoLog("Could not read all data from sc (read = " + readBytes

+                // + ", length = " + lengthInBytes + ")");

+                return -1;

+            }

+

+        } catch (Exception e) {

+            DoLogErr("RecordAudio try failed: " + e.getMessage());

+

+        } finally {

+            // Ensure we always unlock, both for success, exception or error

+            // return.

+            _recLock.unlock();

+        }

+

+        return (_bufferedPlaySamples);

+    }

+

+    @SuppressWarnings("unused")

+    private int SetPlayoutSpeaker(boolean loudspeakerOn) {

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        if (_audioManager == null) {

+            DoLogErr("Could not change audio routing - no audio manager");

+            return -1;

+        }

+

+        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);

+

+        if ((3 == apiLevel) || (4 == apiLevel)) {

+            // 1.5 and 1.6 devices

+            if (loudspeakerOn) {

+                // route audio to back speaker

+                _audioManager.setMode(AudioManager.MODE_NORMAL);

+            } else {

+                // route audio to earpiece

+                _audioManager.setMode(AudioManager.MODE_IN_CALL);

+            }

+        } else {

+            // 2.x devices

+            if ((android.os.Build.BRAND.equals("Samsung") ||

+                            android.os.Build.BRAND.equals("samsung")) &&

+                            ((5 == apiLevel) || (6 == apiLevel) ||

+                            (7 == apiLevel))) {

+                // Samsung 2.0, 2.0.1 and 2.1 devices

+                if (loudspeakerOn) {

+                    // route audio to back speaker

+                    _audioManager.setMode(AudioManager.MODE_IN_CALL);

+                    _audioManager.setSpeakerphoneOn(loudspeakerOn);

+                } else {

+                    // route audio to earpiece

+                    _audioManager.setSpeakerphoneOn(loudspeakerOn);

+                    _audioManager.setMode(AudioManager.MODE_NORMAL);

+                }

+            } else {

+                // Non-Samsung and Samsung 2.2 and up devices

+                _audioManager.setSpeakerphoneOn(loudspeakerOn);

+            }

+        }

+

+        return 0;

+    }

+

+    @SuppressWarnings("unused")

+    private int SetPlayoutVolume(int level) {

+

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        int retVal = -1;

+

+        if (_audioManager != null) {

+            _audioManager.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+                            level, 0);

+            retVal = 0;

+        }

+

+        return retVal;

+    }

+

+    @SuppressWarnings("unused")

+    private int GetPlayoutVolume() {

+

+        // create audio manager if needed

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        int level = -1;

+

+        if (_audioManager != null) {

+            level = _audioManager.getStreamVolume(

+                AudioManager.STREAM_VOICE_CALL);

+        }

+

+        return level;

+    }

+

+    private void SetAudioMode(boolean startCall) {

+        int apiLevel = Integer.parseInt(android.os.Build.VERSION.SDK);

+

+        if (_audioManager == null && _context != null) {

+            _audioManager = (AudioManager)

+                _context.getSystemService(Context.AUDIO_SERVICE);

+        }

+

+        if (_audioManager == null) {

+            DoLogErr("Could not set audio mode - no audio manager");

+            return;

+        }

+

+        // ***IMPORTANT*** When the API level for honeycomb (H) has been

+        // decided,

+        // the condition should be changed to include API level 8 to H-1.

+        if ((android.os.Build.BRAND.equals("Samsung") || android.os.Build.BRAND

+                        .equals("samsung")) && (8 == apiLevel)) {

+            // Set Samsung specific VoIP mode for 2.2 devices

+            int mode =

+                            (startCall ? 4 /* VoIP mode */

+                                            : AudioManager.MODE_NORMAL);

+            _audioManager.setMode(mode);

+            if (_audioManager.getMode() != mode) {

+                DoLogErr("Could not set audio mode for Samsung device");

+            }

+        }

+    }

+

+    final String logTag = "WebRTC AD java";

+

+    private void DoLog(String msg) {

+        Log.d(logTag, msg);

+    }

+

+    private void DoLogErr(String msg) {

+        Log.e(logTag, msg);

+    }

+}

diff --git a/src/modules/audio_device/main/source/audio_device.gypi b/src/modules/audio_device/main/source/audio_device.gypi
new file mode 100644
index 0000000..8457737
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device.gypi
@@ -0,0 +1,212 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'audio_device',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '.',
+        '../../../interface',
+        '../interface',
+        'dummy', # dummy audio device
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../../../../',
+          '../../../interface',
+          '../interface',
+        ],
+      },
+      # TODO(xians): Rename files to e.g. *_linux.{ext}, remove sources in conditions section
+      'sources': [
+        '../interface/audio_device.h',
+        '../interface/audio_device_defines.h',
+        'audio_device_buffer.cc',
+        'audio_device_buffer.h',
+        'audio_device_generic.cc',
+        'audio_device_generic.h',
+        'audio_device_utility.cc',
+        'audio_device_utility.h',
+        'audio_device_impl.cc',
+        'audio_device_impl.h',
+        'audio_device_config.h',
+        'dummy/audio_device_dummy.h',
+        'dummy/audio_device_utility_dummy.h',
+      ],
+      'conditions': [
+        ['OS=="linux"', {
+          'include_dirs': [
+            'linux',
+          ],
+        }], # OS==linux
+        ['OS=="mac"', {
+            'include_dirs': [
+              'mac',
+            ],
+        }], # OS==mac
+        ['OS=="win"', {
+            'include_dirs': [
+              'win',
+              '../../../../../..',
+            ],
+        }],
+        ['OS=="android"', {
+            'include_dirs': [
+              'android',
+            ],
+        }], # OS==android
+        ['include_internal_audio_device==0', {
+          'defines': [
+            'WEBRTC_DUMMY_AUDIO_BUILD',
+          ],
+        }],
+        ['include_internal_audio_device==1', {
+          'sources': [
+            'linux/alsasymboltable_linux.cc',
+            'linux/alsasymboltable_linux.h',
+            'linux/audio_device_alsa_linux.cc',
+            'linux/audio_device_alsa_linux.h',
+            'linux/audio_device_utility_linux.cc',
+            'linux/audio_device_utility_linux.h',
+            'linux/audio_mixer_manager_alsa_linux.cc',
+            'linux/audio_mixer_manager_alsa_linux.h',
+            'linux/latebindingsymboltable_linux.cc',
+            'linux/latebindingsymboltable_linux.h',
+            'mac/audio_device_mac.cc',
+            'mac/audio_device_mac.h',
+            'mac/audio_device_utility_mac.cc',
+            'mac/audio_device_utility_mac.h',
+            'mac/audio_mixer_manager_mac.cc',
+            'mac/audio_mixer_manager_mac.h',
+            'mac/portaudio/pa_memorybarrier.h',
+            'mac/portaudio/pa_ringbuffer.c',
+            'mac/portaudio/pa_ringbuffer.h',
+            'win/audio_device_core_win.cc',
+            'win/audio_device_core_win.h',
+            'win/audio_device_wave_win.cc',
+            'win/audio_device_wave_win.h',
+            'win/audio_device_utility_win.cc',
+            'win/audio_device_utility_win.h',
+            'win/audio_mixer_manager_win.cc',
+            'win/audio_mixer_manager_win.h',
+            'android/audio_device_android_opensles.cc',
+            'android/audio_device_android_opensles.h',
+            'android/audio_device_utility_android.cc',
+            'android/audio_device_utility_android.h',
+          ],
+          'conditions': [
+            ['OS=="android"', {
+              'link_settings': {
+                'libraries': [
+                  '-llog',
+                  '-lOpenSLES',
+                ],
+              },
+            }],
+            ['OS=="linux"', {
+              'defines': [
+                'LINUX_ALSA',
+              ],
+              'link_settings': {
+                'libraries': [
+                  '-ldl',
+                ],
+              },
+              'conditions': [
+                ['include_pulse_audio==1', {
+                  'defines': [
+                    'LINUX_PULSE',
+                  ],
+                  'sources': [
+                    'linux/audio_device_pulse_linux.cc',
+                    'linux/audio_device_pulse_linux.h',
+                    'linux/audio_mixer_manager_pulse_linux.cc',
+                    'linux/audio_mixer_manager_pulse_linux.h',
+                    'linux/pulseaudiosymboltable_linux.cc',
+                    'linux/pulseaudiosymboltable_linux.h',
+                  ],
+                }],
+              ],
+            }],
+            ['OS=="mac"', {
+              'link_settings': {
+                'libraries': [
+                  '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
+                  '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
+                ],
+              },
+            }],
+            ['OS=="win"', {
+              'link_settings': {
+                'libraries': [
+                  # Required for the built-in WASAPI AEC.
+                  '-ldmoguids.lib',
+                  '-lwmcodecdspuuid.lib',
+                  '-lamstrmid.lib',
+                  '-lmsdmo.lib',
+                ],
+              },
+            }],
+          ], # conditions
+        }], # include_internal_audio_device==1
+      ], # conditions
+    },
+  ],
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'audio_device_test_api',
+         'type': 'executable',
+         'dependencies': [
+            'audio_device',
+            'webrtc_utility',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'sources': [
+            '../test/audio_device_test_api.cc',
+            '../test/audio_device_test_defines.h',
+          ],
+        },
+        {
+          'target_name': 'audio_device_test_func',
+          'type': 'executable',
+          'dependencies': [
+            'audio_device',
+            'webrtc_utility',
+            '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/test.gyp:test_support',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            '../test/audio_device_test_func.cc',
+            '../test/audio_device_test_defines.h',
+            '../test/func_test_manager.cc',
+            '../test/func_test_manager.h',
+          ],
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/audio_device/main/source/audio_device_buffer.cc b/src/modules/audio_device/main/source/audio_device_buffer.cc
new file mode 100644
index 0000000..83bb450
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_buffer.cc
@@ -0,0 +1,654 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "audio_device_buffer.h"
+#include "audio_device_utility.h"
+#include "audio_device_config.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <cassert>
+
+#include "signal_processing_library.h"
+
+namespace webrtc {
+
+// ----------------------------------------------------------------------------
+//  ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceBuffer::AudioDeviceBuffer() :
+    _id(-1),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrCbAudioTransport(NULL),
+    _recSampleRate(0),
+    _playSampleRate(0),
+    _recChannels(0),
+    _playChannels(0),
+    _recChannel(AudioDeviceModule::kChannelBoth),
+    _recBytesPerSample(0),
+    _playBytesPerSample(0),
+    _recSamples(0),
+    _recSize(0),
+    _playSamples(0),
+    _playSize(0),
+    _recFile(*FileWrapper::Create()),
+    _playFile(*FileWrapper::Create()),
+    _currentMicLevel(0),
+    _newMicLevel(0),
+    _playDelayMS(0),
+    _recDelayMS(0),
+    _clockDrift(0),
+    _measureDelay(false),    // should always be 'false' (EXPERIMENTAL)
+    _pulseList(),
+    _lastPulseTime(AudioDeviceUtility::GetTimeInMS())
+{
+    // valid ID will be set later by SetId, use -1 for now
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s created", __FUNCTION__);
+    memset(_recBuffer, 0, kMaxBufferSizeBytes);
+    memset(_playBuffer, 0, kMaxBufferSizeBytes);
+}
+
+// ----------------------------------------------------------------------------
+//  dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceBuffer::~AudioDeviceBuffer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        _recFile.Flush();
+        _recFile.CloseFile();
+        delete &_recFile;
+
+        _playFile.Flush();
+        _playFile.CloseFile();
+        delete &_playFile;
+
+        _EmptyList();
+    }
+
+    delete &_critSect;
+    delete &_critSectCb;
+}
+
+// ----------------------------------------------------------------------------
+//  SetId
+// ----------------------------------------------------------------------------
+
+void AudioDeviceBuffer::SetId(WebRtc_UWord32 id)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "AudioDeviceBuffer::SetId(id=%d)", id);
+    _id = id;
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterAudioCallback
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RegisterAudioCallback(AudioTransport* audioCallback)
+{
+    CriticalSectionScoped lock(&_critSectCb);
+    _ptrCbAudioTransport = audioCallback;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::InitPlayout()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_measureDelay)
+    {
+        _EmptyList();
+        _lastPulseTime = AudioDeviceUtility::GetTimeInMS();
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::InitRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_measureDelay)
+    {
+        _EmptyList();
+        _lastPulseTime = AudioDeviceUtility::GetTimeInMS();
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingSampleRate(WebRtc_UWord32 fsHz)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingSampleRate(fsHz=%u)", fsHz);
+
+    CriticalSectionScoped lock(&_critSect);
+    _recSampleRate = fsHz;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetPlayoutSampleRate(WebRtc_UWord32 fsHz)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutSampleRate(fsHz=%u)", fsHz);
+
+    CriticalSectionScoped lock(&_critSect);
+    _playSampleRate = fsHz;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RecordingSampleRate() const
+{
+    return _recSampleRate;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::PlayoutSampleRate() const
+{
+    return _playSampleRate;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannels(WebRtc_UWord8 channels)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetRecordingChannels(channels=%u)", channels);
+
+    CriticalSectionScoped lock(&_critSect);
+    _recChannels = channels;
+    _recBytesPerSample = 2*channels;  // 16 bits per sample in mono, 32 bits in stereo
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetPlayoutChannels(WebRtc_UWord8 channels)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "AudioDeviceBuffer::SetPlayoutChannels(channels=%u)", channels);
+
+    CriticalSectionScoped lock(&_critSect);
+    _playChannels = channels;
+    // 16 bits per sample in mono, 32 bits in stereo
+    _playBytesPerSample = 2*channels;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannel
+//
+//  Select which channel to use while recording.
+//  This API requires that stereo is enabled.
+//
+//  Note that, the nChannel parameter in RecordedDataIsAvailable will be
+//  set to 2 even for kChannelLeft and kChannelRight. However, nBytesPerSample
+//  will be 2 instead of 4 four these cases.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordingChannel(const AudioDeviceModule::ChannelType channel)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recChannels == 1)
+    {
+        return -1;
+    }
+
+    if (channel == AudioDeviceModule::kChannelBoth)
+    {
+        // two bytes per channel
+        _recBytesPerSample = 4;
+    }
+    else
+    {
+        // only utilize one out of two possible channels (left or right)
+        _recBytesPerSample = 2;
+    }
+    _recChannel = channel;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RecordingChannel(AudioDeviceModule::ChannelType& channel) const
+{
+    channel = _recChannel;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord8 AudioDeviceBuffer::RecordingChannels() const
+{
+    return _recChannels;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutChannels
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord8 AudioDeviceBuffer::PlayoutChannels() const
+{
+    return _playChannels;
+}
+
+// ----------------------------------------------------------------------------
+//  SetCurrentMicLevel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetCurrentMicLevel(WebRtc_UWord32 level)
+{
+    _currentMicLevel = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  NewMicLevel
+// ----------------------------------------------------------------------------
+
+WebRtc_UWord32 AudioDeviceBuffer::NewMicLevel() const
+{
+    return _newMicLevel;
+}
+
+// ----------------------------------------------------------------------------
+//  SetVQEData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetVQEData(WebRtc_UWord32 playDelayMS, WebRtc_UWord32 recDelayMS, WebRtc_Word32 clockDrift)
+{
+    if ((playDelayMS + recDelayMS) > 300)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "too long delay (play:%i rec:%i)", playDelayMS, recDelayMS, clockDrift);
+    }
+
+    _playDelayMS = playDelayMS;
+    _recDelayMS = recDelayMS;
+    _clockDrift = clockDrift;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StartInputFileRecording(
+    const char fileName[kAdmMaxFileNameSize])
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _recFile.Flush();
+    _recFile.CloseFile();
+
+    return (_recFile.OpenFile(fileName, false, false, false));
+}
+
+// ----------------------------------------------------------------------------
+//  StopInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StopInputFileRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _recFile.Flush();
+    _recFile.CloseFile();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StartOutputFileRecording(
+    const char fileName[kAdmMaxFileNameSize])
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _playFile.Flush();
+    _playFile.CloseFile();
+
+    return (_playFile.OpenFile(fileName, false, false, false));
+}
+
+// ----------------------------------------------------------------------------
+//  StopOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::StopOutputFileRecording()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _playFile.Flush();
+    _playFile.CloseFile();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordedBuffer
+//
+//  Store recorded audio buffer in local memory ready for the actual
+//  "delivery" using a callback.
+//
+//  This method can also parse out left or right channel from a stereo
+//  input signal, i.e., emulate mono.
+//
+//  Examples:
+//
+//  16-bit,48kHz mono,  10ms => nSamples=480 => _recSize=2*480=960 bytes
+//  16-bit,48kHz stereo,10ms => nSamples=480 => _recSize=4*480=1920 bytes
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::SetRecordedBuffer(const void* audioBuffer,
+                                                   WebRtc_UWord32 nSamples)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recBytesPerSample == 0)
+    {
+        assert(false);
+        return -1;
+    }
+
+    _recSamples = nSamples;
+    _recSize = _recBytesPerSample*nSamples; // {2,4}*nSamples
+    if (_recSize > kMaxBufferSizeBytes)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (nSamples != _recSamples)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of recorded samples (%d)", nSamples);
+        return -1;
+    }
+
+    if (_recChannel == AudioDeviceModule::kChannelBoth)
+    {
+        // (default) copy the complete input buffer to the local buffer
+        memcpy(&_recBuffer[0], audioBuffer, _recSize);
+    }
+    else
+    {
+        WebRtc_Word16* ptr16In = (WebRtc_Word16*)audioBuffer;
+        WebRtc_Word16* ptr16Out = (WebRtc_Word16*)&_recBuffer[0];
+
+        if (AudioDeviceModule::kChannelRight == _recChannel)
+        {
+            ptr16In++;
+        }
+
+        // exctract left or right channel from input buffer to the local buffer
+        for (WebRtc_UWord32 i = 0; i < _recSamples; i++)
+        {
+            *ptr16Out = *ptr16In;
+            ptr16Out++;
+            ptr16In++;
+            ptr16In++;
+        }
+    }
+
+    if (_recFile.Open())
+    {
+        // write to binary file in mono or stereo (interleaved)
+        _recFile.Write(&_recBuffer[0], _recSize);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  DeliverRecordedData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::DeliverRecordedData()
+{
+    CriticalSectionScoped lock(&_critSectCb);
+
+    // Ensure that user has initialized all essential members
+    if ((_recSampleRate == 0)     ||
+        (_recSamples == 0)        ||
+        (_recBytesPerSample == 0) ||
+        (_recChannels == 0))
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (_ptrCbAudioTransport == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to deliver recorded data (AudioTransport does not exist)");
+        return 0;
+    }
+
+    WebRtc_Word32 res(0);
+    WebRtc_UWord32 newMicLevel(0);
+    WebRtc_UWord32 totalDelayMS = _playDelayMS +_recDelayMS;
+
+    if (_measureDelay)
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        memset(&_recBuffer[0], 0, _recSize);
+        WebRtc_UWord32 time = AudioDeviceUtility::GetTimeInMS();
+        if (time - _lastPulseTime > 500)
+        {
+            _pulseList.PushBack(time);
+            _lastPulseTime = time;
+
+            WebRtc_Word16* ptr16 = (WebRtc_Word16*)&_recBuffer[0];
+            *ptr16 = 30000;
+        }
+    }
+
+    res = _ptrCbAudioTransport->RecordedDataIsAvailable(&_recBuffer[0],
+                                                        _recSamples,
+                                                        _recBytesPerSample,
+                                                        _recChannels,
+                                                        _recSampleRate,
+                                                        totalDelayMS,
+                                                        _clockDrift,
+                                                        _currentMicLevel,
+                                                        newMicLevel);
+    if (res != -1)
+    {
+        _newMicLevel = newMicLevel;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RequestPlayoutData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::RequestPlayoutData(WebRtc_UWord32 nSamples)
+{
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        // Ensure that user has initialized all essential members
+        if ((_playBytesPerSample == 0) ||
+            (_playChannels == 0)       ||
+            (_playSampleRate == 0))
+        {
+            assert(false);
+            return -1;
+        }
+
+        _playSamples = nSamples;
+        _playSize = _playBytesPerSample * nSamples;  // {2,4}*nSamples
+        if (_playSize > kMaxBufferSizeBytes)
+        {
+            assert(false);
+            return -1;
+        }
+
+        if (nSamples != _playSamples)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "invalid number of samples to be played out (%d)", nSamples);
+            return -1;
+        }
+    }
+
+    WebRtc_UWord32 nSamplesOut(0);
+
+    CriticalSectionScoped lock(&_critSectCb);
+
+    if (_ptrCbAudioTransport == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to feed data to playout (AudioTransport does not exist)");
+        return 0;
+    }
+
+    if (_ptrCbAudioTransport)
+    {
+        WebRtc_UWord32 res(0);
+
+        res = _ptrCbAudioTransport->NeedMorePlayData(_playSamples,
+                                                     _playBytesPerSample,
+                                                     _playChannels,
+                                                     _playSampleRate,
+                                                     &_playBuffer[0],
+                                                     nSamplesOut);
+        if (res != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "NeedMorePlayData() failed");
+        }
+
+        // --- Experimental delay-measurement implementation
+        // *** not be used in released code ***
+
+        if (_measureDelay)
+        {
+            CriticalSectionScoped lock(&_critSect);
+
+            WebRtc_Word16 maxAbs = WebRtcSpl_MaxAbsValueW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels);
+            if (maxAbs > 1000)
+            {
+                WebRtc_UWord32 nowTime = AudioDeviceUtility::GetTimeInMS();
+
+                if (!_pulseList.Empty())
+                {
+                    ListItem* item = _pulseList.First();
+                    if (item)
+                    {
+                        WebRtc_Word16 maxIndex = WebRtcSpl_MaxAbsIndexW16((const WebRtc_Word16*)&_playBuffer[0], (WebRtc_Word16)nSamplesOut*_playChannels);
+                        WebRtc_UWord32 pulseTime = item->GetUnsignedItem();
+                        WebRtc_UWord32 diff = nowTime - pulseTime + (10*maxIndex)/(nSamplesOut*_playChannels);
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "diff time in playout delay (%d)", diff);
+                    }
+                    _pulseList.PopFront();
+                }
+            }
+        }
+    }
+
+    return nSamplesOut;
+}
+
+// ----------------------------------------------------------------------------
+//  GetPlayoutData
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceBuffer::GetPlayoutData(void* audioBuffer)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playSize > kMaxBufferSizeBytes)
+    {
+       WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "_playSize %i exceeds "
+       "kMaxBufferSizeBytes in AudioDeviceBuffer::GetPlayoutData", _playSize);
+       assert(false);
+       return -1;
+    }
+
+    memcpy(audioBuffer, &_playBuffer[0], _playSize);
+
+    if (_playFile.Open())
+    {
+        // write to binary file in mono or stereo (interleaved)
+        _playFile.Write(&_playBuffer[0], _playSize);
+    }
+
+    return _playSamples;
+}
+
+// ----------------------------------------------------------------------------
+//  _EmptyList
+// ----------------------------------------------------------------------------
+
+void AudioDeviceBuffer::_EmptyList()
+{
+    while (!_pulseList.Empty())
+    {
+        ListItem* item = _pulseList.First();
+        if (item)
+        {
+            // WebRtc_UWord32 ts = item->GetUnsignedItem();
+        }
+        _pulseList.PopFront();
+    }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_device/main/source/audio_device_buffer.h b/src/modules/audio_device/main/source/audio_device_buffer.h
new file mode 100644
index 0000000..4bc374a
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_buffer.h
@@ -0,0 +1,129 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
+
+#include "typedefs.h"
+#include "../../../../common_audio/resampler/include/resampler.h"
+#include "file_wrapper.h"
+#include "audio_device.h"
+#include "list_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+const WebRtc_UWord32 kPulsePeriodMs = 1000;
+const WebRtc_UWord32 kMaxBufferSizeBytes = 3840; // 10ms in stereo @ 96kHz
+
+class AudioDeviceObserver;
+class MediaFile;
+
+class AudioDeviceBuffer
+{
+public:
+    void SetId(WebRtc_UWord32 id);
+    WebRtc_Word32 RegisterAudioCallback(AudioTransport* audioCallback);
+
+    WebRtc_Word32 InitPlayout();
+    WebRtc_Word32 InitRecording();
+
+    WebRtc_Word32 SetRecordingSampleRate(WebRtc_UWord32 fsHz);
+    WebRtc_Word32 SetPlayoutSampleRate(WebRtc_UWord32 fsHz);
+    WebRtc_Word32 RecordingSampleRate() const;
+    WebRtc_Word32 PlayoutSampleRate() const;
+
+    WebRtc_Word32 SetRecordingChannels(WebRtc_UWord8 channels);
+    WebRtc_Word32 SetPlayoutChannels(WebRtc_UWord8 channels);
+    WebRtc_UWord8 RecordingChannels() const;
+    WebRtc_UWord8 PlayoutChannels() const;
+    WebRtc_Word32 SetRecordingChannel(
+        const AudioDeviceModule::ChannelType channel);
+    WebRtc_Word32 RecordingChannel(
+        AudioDeviceModule::ChannelType& channel) const;
+
+    WebRtc_Word32 SetRecordedBuffer(const void* audioBuffer,
+                                    WebRtc_UWord32 nSamples);
+    WebRtc_Word32 SetCurrentMicLevel(WebRtc_UWord32 level);
+    WebRtc_Word32 SetVQEData(WebRtc_UWord32 playDelayMS,
+                             WebRtc_UWord32 recDelayMS,
+                             WebRtc_Word32 clockDrift);
+    WebRtc_Word32 DeliverRecordedData();
+    WebRtc_UWord32 NewMicLevel() const;
+
+    WebRtc_Word32 RequestPlayoutData(WebRtc_UWord32 nSamples);
+    WebRtc_Word32 GetPlayoutData(void* audioBuffer);
+
+    WebRtc_Word32 StartInputFileRecording(
+        const char fileName[kAdmMaxFileNameSize]);
+    WebRtc_Word32 StopInputFileRecording();
+    WebRtc_Word32 StartOutputFileRecording(
+        const char fileName[kAdmMaxFileNameSize]);
+    WebRtc_Word32 StopOutputFileRecording();
+
+    AudioDeviceBuffer();
+    ~AudioDeviceBuffer();
+
+private:
+    void _EmptyList();
+
+private:
+    WebRtc_Word32                   _id;
+    CriticalSectionWrapper&         _critSect;
+    CriticalSectionWrapper&         _critSectCb;
+
+    AudioTransport*                 _ptrCbAudioTransport;
+
+    WebRtc_UWord32                  _recSampleRate;
+    WebRtc_UWord32                  _playSampleRate;
+
+    WebRtc_UWord8                   _recChannels;
+    WebRtc_UWord8                   _playChannels;
+
+    // selected recording channel (left/right/both)
+    AudioDeviceModule::ChannelType _recChannel;
+
+    // 2 or 4 depending on mono or stereo
+    WebRtc_UWord8                   _recBytesPerSample;
+    WebRtc_UWord8                   _playBytesPerSample;
+
+    // 10ms in stereo @ 96kHz
+    int8_t                          _recBuffer[kMaxBufferSizeBytes];
+
+    // one sample <=> 2 or 4 bytes
+    WebRtc_UWord32                  _recSamples;
+    WebRtc_UWord32                  _recSize;           // in bytes
+
+    // 10ms in stereo @ 96kHz
+    int8_t                          _playBuffer[kMaxBufferSizeBytes];
+
+    // one sample <=> 2 or 4 bytes
+    WebRtc_UWord32                  _playSamples;
+    WebRtc_UWord32                  _playSize;          // in bytes
+
+    FileWrapper&                    _recFile;
+    FileWrapper&                    _playFile;
+
+    WebRtc_UWord32                  _currentMicLevel;
+    WebRtc_UWord32                  _newMicLevel;
+
+    WebRtc_UWord32                  _playDelayMS;
+    WebRtc_UWord32                  _recDelayMS;
+
+    WebRtc_Word32                   _clockDrift;
+
+    bool                            _measureDelay;
+    ListWrapper                     _pulseList;
+    WebRtc_UWord32                  _lastPulseTime;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_BUFFER_H
diff --git a/src/modules/audio_device/main/source/audio_device_config.h b/src/modules/audio_device/main/source/audio_device_config.h
new file mode 100644
index 0000000..23b9d55
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_config.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+
+// Enumerators
+//
+enum { kAdmMaxIdleTimeProcess = 1000 };
+enum { GET_MIC_VOLUME_INTERVAL_MS = 1000 };
+
+// Platform specifics
+//
+#if defined(_WIN32)
+#if (_MSC_VER >= 1400)
+// Windows Core Audio is the default audio layer in Windows.
+// Only supported for VS 2005 and higher.
+#define WEBRTC_WINDOWS_CORE_AUDIO_BUILD
+#endif
+#endif
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#include <windows.h>
+#include <tchar.h>
+#include <strsafe.h>
+#define DEBUG_PRINT(...)		            \
+{								            \
+	TCHAR msg[256];				            \
+	StringCchPrintf(msg, 256, __VA_ARGS__);	\
+	OutputDebugString(msg);		            \
+}
+#else
+#define DEBUG_PRINT(exp)		((void)0)
+#endif
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CONFIG_H
+
diff --git a/src/modules/audio_device/main/source/audio_device_generic.cc b/src/modules/audio_device/main/source/audio_device_generic.cc
new file mode 100644
index 0000000..7093d80
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_generic.cc
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_generic.h"
+#include "trace.h"
+
+namespace webrtc {
+
+WebRtc_Word32 AudioDeviceGeneric::SetRecordingSampleRate(
+    const WebRtc_UWord32 samplesPerSec)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set recording sample rate not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::SetPlayoutSampleRate(
+    const WebRtc_UWord32 samplesPerSec)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set playout sample rate not supported on this platform");
+    return -1;
+}
+	
+WebRtc_Word32 AudioDeviceGeneric::SetLoudspeakerStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Set loudspeaker status not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::GetLoudspeakerStatus(bool& enable) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Get loudspeaker status not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::ResetAudioDevice()
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Reset audio device not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceGeneric::SoundDeviceControl(unsigned int par1,
+    unsigned int par2, unsigned int par3, unsigned int par4)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Sound device control not supported on this platform");
+    return -1;
+}
+
+int32_t AudioDeviceGeneric::EnableBuiltInAEC(bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Windows AEC not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceGeneric::BuiltInAECIsEnabled() const
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+        "Windows AEC not supported on this platform");
+    return false;
+}
+
+}  // namespace webrtc
+
diff --git a/src/modules/audio_device/main/source/audio_device_generic.h b/src/modules/audio_device/main/source/audio_device_generic.h
new file mode 100644
index 0000000..0c14448
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_generic.h
@@ -0,0 +1,183 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+
+#include "audio_device.h"
+#include "audio_device_buffer.h"
+
+namespace webrtc {
+
+class AudioDeviceGeneric
+{
+ public:
+
+	// Retrieve the currently utilized audio layer
+	virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const = 0;
+
+	// Main initializaton and termination
+    virtual WebRtc_Word32 Init() = 0;
+    virtual WebRtc_Word32 Terminate() = 0;
+	virtual bool Initialized() const = 0;
+
+	// Device enumeration
+	virtual WebRtc_Word16 PlayoutDevices() = 0;
+	virtual WebRtc_Word16 RecordingDevices() = 0;
+	virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]) = 0;
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]) = 0;
+
+	// Device selection
+	virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index) = 0;
+	virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device) = 0;
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index) = 0;
+	virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device) = 0;
+
+	// Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitPlayout() = 0;
+    virtual bool PlayoutIsInitialized() const = 0;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitRecording() = 0;
+    virtual bool RecordingIsInitialized() const = 0;
+
+	// Audio transport control
+    virtual WebRtc_Word32 StartPlayout() = 0;
+    virtual WebRtc_Word32 StopPlayout() = 0;
+    virtual bool Playing() const = 0;
+	virtual WebRtc_Word32 StartRecording() = 0;
+    virtual WebRtc_Word32 StopRecording() = 0;
+    virtual bool Recording() const = 0;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable) = 0;
+    virtual bool AGC() const = 0;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight) = 0;
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const = 0;
+
+	// Audio mixer initialization
+	virtual WebRtc_Word32 SpeakerIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitSpeaker() = 0;
+    virtual bool SpeakerIsInitialized() const = 0;
+	virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 InitMicrophone() = 0;
+    virtual bool MicrophoneIsInitialized() const = 0;
+
+    // Speaker volume controls
+	virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume) = 0;
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const = 0;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const = 0;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const = 0;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(
+        WebRtc_UWord16& stepSize) const = 0;
+
+    // Microphone volume controls
+	virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume) = 0;
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const = 0;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(
+        WebRtc_UWord32& maxVolume) const = 0;
+    virtual WebRtc_Word32 MinMicrophoneVolume(
+        WebRtc_UWord32& minVolume) const = 0;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const = 0;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable) = 0;
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const = 0;
+
+	// Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable) = 0;
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const = 0;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available) = 0;
+	virtual WebRtc_Word32 SetMicrophoneBoost(bool enable) = 0;
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const = 0;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available) = 0;
+	virtual WebRtc_Word32 SetStereoPlayout(bool enable) = 0;
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const = 0;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available) = 0;
+    virtual WebRtc_Word32 SetStereoRecording(bool enable) = 0;
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const = 0;
+
+    // Delay information and control
+	virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type,
+        WebRtc_UWord16 sizeMS = 0) = 0;
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const = 0;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const = 0;
+	virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const = 0;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const = 0;
+    
+    // Native sample rate controls (samples/sec)
+	virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+	virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+
+    // Speaker audio routing (for mobile devices)
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
+    
+    // Reset Audio Device (for mobile devices)
+    virtual WebRtc_Word32 ResetAudioDevice();
+
+    // Sound Audio Device control (for WinCE only)
+    virtual WebRtc_Word32 SoundDeviceControl(unsigned int par1 = 0,
+                                             unsigned int par2 = 0,
+                                             unsigned int par3 = 0,
+                                             unsigned int par4 = 0);
+
+    // Windows Core Audio only.
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    virtual bool PlayoutWarning() const = 0;
+    virtual bool PlayoutError() const = 0;
+    virtual bool RecordingWarning() const = 0;
+    virtual bool RecordingError() const = 0;
+    virtual void ClearPlayoutWarning() = 0;
+    virtual void ClearPlayoutError() = 0;
+    virtual void ClearRecordingWarning() = 0;
+    virtual void ClearRecordingError() = 0;
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0;
+
+    virtual ~AudioDeviceGeneric() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_GENERIC_H
+
diff --git a/src/modules/audio_device/main/source/audio_device_impl.cc b/src/modules/audio_device/main/source/audio_device_impl.cc
new file mode 100644
index 0000000..1d7b824
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_impl.cc
@@ -0,0 +1,2077 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_impl.h"
+#include "audio_device_config.h"
+#include "system_wrappers/interface/ref_count.h"
+
+#include <assert.h>
+#include <string.h>
+
+#if defined(_WIN32)
+    #include "audio_device_utility_win.h"
+    #include "audio_device_wave_win.h"
+ #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    #include "audio_device_core_win.h"
+ #endif
+#elif defined(WEBRTC_ANDROID_OPENSLES)
+    #include <stdlib.h>
+    #include "audio_device_utility_android.h"
+    #include "audio_device_android_opensles.h"
+#elif defined(WEBRTC_ANDROID)
+    #include <stdlib.h>
+    #include "audio_device_utility_android.h"
+    #include "audio_device_android_jni.h"
+#elif defined(WEBRTC_LINUX)
+    #include "audio_device_utility_linux.h"
+ #if defined(LINUX_ALSA)
+    #include "audio_device_alsa_linux.h"
+ #endif
+ #if defined(LINUX_PULSE)
+    #include "audio_device_pulse_linux.h"
+ #endif
+#elif defined(MAC_IPHONE)
+    #include "audio_device_utility_iphone.h"
+    #include "audio_device_iphone.h"
+#elif (defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC))
+    #include "audio_device_utility_mac.h"
+    #include "audio_device_mac.h"
+#endif
+#include "audio_device_dummy.h"
+#include "audio_device_utility_dummy.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#define CHECK_INITIALIZED()         \
+{                                   \
+    if (!_initialized) {            \
+        return -1;                  \
+    };                              \
+}
+
+#define CHECK_INITIALIZED_BOOL()    \
+{                                   \
+    if (!_initialized) {            \
+        return false;               \
+    };                              \
+}
+
+namespace webrtc
+{
+
+AudioDeviceModule* CreateAudioDeviceModule(
+    WebRtc_Word32 id, AudioDeviceModule::AudioLayer audioLayer) {
+  return AudioDeviceModuleImpl::Create(id, audioLayer);
+}
+
+
+// ============================================================================
+//                                   Static methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModule::Create()
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule* AudioDeviceModuleImpl::Create(const WebRtc_Word32 id,
+                                                 const AudioLayer audioLayer)
+{
+
+    // Create the generic ref counted (platform independent) implementation.
+    RefCountImpl<AudioDeviceModuleImpl>* audioDevice =
+        new RefCountImpl<AudioDeviceModuleImpl>(id, audioLayer);
+
+    // Ensure that the current platform is supported.
+    if (audioDevice->CheckPlatform() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    // Create the platform-dependent implementation.
+    if (audioDevice->CreatePlatformSpecificObjects() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    // Ensure that the generic audio buffer can communicate with the
+    // platform-specific parts.
+    if (audioDevice->AttachAudioBuffer() == -1)
+    {
+        delete audioDevice;
+        return NULL;
+    }
+
+    return audioDevice;
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModuleImpl - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::AudioDeviceModuleImpl(const WebRtc_Word32 id, const AudioLayer audioLayer) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectEventCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _critSectAudioCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrCbAudioDeviceObserver(NULL),
+    _ptrAudioDeviceUtility(NULL),
+    _ptrAudioDevice(NULL),
+    _id(id),
+    _platformAudioLayer(audioLayer),
+    _lastProcessTime(AudioDeviceUtility::GetTimeInMS()),
+    _platformType(kPlatformNotSupported),
+    _initialized(false),
+    _lastError(kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  CheckPlatform
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CheckPlatform()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    // Ensure that the current platform is supported
+    //
+    PlatformType platform(kPlatformNotSupported);
+
+#if defined(_WIN32)
+    platform = kPlatformWin32;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is WIN32");
+#elif defined(WEBRTC_ANDROID)
+    platform = kPlatformAndroid;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is ANDROID");
+#elif defined(WEBRTC_LINUX)
+    platform = kPlatformLinux;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is LINUX");
+#elif (defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC))
+    platform = kPlatformMac;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "current platform is MAC");
+#endif
+
+    if (platform == kPlatformNotSupported)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "current platform is not supported => this module will self destruct!");
+        return -1;
+    }
+
+    // Store valid output results
+    //
+    _platformType = platform;
+
+    return 0;
+}
+
+
+// ----------------------------------------------------------------------------
+//  CreatePlatformSpecificObjects
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CreatePlatformSpecificObjects()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    AudioDeviceGeneric* ptrAudioDevice(NULL);
+    AudioDeviceUtility* ptrAudioDeviceUtility(NULL);
+
+#if defined(WEBRTC_DUMMY_AUDIO_BUILD)
+    ptrAudioDevice = new AudioDeviceDummy(Id());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Dummy Audio APIs will be utilized");
+
+    if (ptrAudioDevice != NULL)
+    {
+        ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+    }
+#else
+    const AudioLayer audioLayer(PlatformAudioLayer());
+
+    // Create the *Windows* implementation of the Audio Device
+    //
+#if defined(_WIN32)
+    if ((audioLayer == kWindowsWaveAudio)
+#if !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+        // Wave audio is default if Core audio is not supported in this build
+        || (audioLayer == kPlatformDefaultAudio)
+#endif
+        )
+    {
+        // create *Windows Wave Audio* implementation
+        ptrAudioDevice = new AudioDeviceWindowsWave(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Windows Wave APIs will be utilized");
+    }
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    if ((audioLayer == kWindowsCoreAudio) ||
+        (audioLayer == kPlatformDefaultAudio)
+        )
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Windows Core Audio APIs...");
+
+        if (AudioDeviceWindowsCore::CoreAudioIsSupported())
+        {
+            // create *Windows Core Audio* implementation
+            ptrAudioDevice = new AudioDeviceWindowsCore(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Windows Core Audio APIs will be utilized");
+        }
+        else
+        {
+            // create *Windows Wave Audio* implementation
+            ptrAudioDevice = new AudioDeviceWindowsWave(Id());
+            if (ptrAudioDevice != NULL)
+            {
+                // Core Audio was not supported => revert to Windows Wave instead
+                _platformAudioLayer = kWindowsWaveAudio;  // modify the state set at construction
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Windows Core Audio is *not* supported => Wave APIs will be utilized instead");
+            }
+        }
+    }
+#endif // defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Windows implementation of the Device Utility.
+        // This class is independent of the selected audio layer
+        // for Windows.
+        //
+        ptrAudioDeviceUtility = new AudioDeviceUtilityWindows(Id());
+    }
+#endif  // #if defined(_WIN32)
+
+    // Create the *Android OpenSLES* implementation of the Audio Device
+    //
+#if defined(WEBRTC_ANDROID_OPENSLES)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Android OpenELSE Audio* implementation
+        ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Android OpenSLES Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Android implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+    }
+    // END #if defined(WEBRTC_ANDROID_OPENSLES)
+
+    // Create the *Android Java* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_ANDROID)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Android JNI Audio* implementation
+        ptrAudioDevice = new AudioDeviceAndroidJni(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Android implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+    }
+    // END #if defined(WEBRTC_ANDROID)
+
+    // Create the *Linux* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_LINUX)
+    if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
+    {
+#if defined(LINUX_PULSE)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Linux PulseAudio APIs...");
+
+        if (AudioDeviceLinuxPulse::PulseAudioIsSupported())
+        {
+            // create *Linux PulseAudio* implementation
+            ptrAudioDevice = new AudioDeviceLinuxPulse(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Linux PulseAudio APIs will be utilized");
+        }
+        else
+        {
+#endif
+#if defined(LINUX_ALSA)
+            // create *Linux ALSA Audio* implementation
+            ptrAudioDevice = new AudioDeviceLinuxALSA(Id());
+            if (ptrAudioDevice != NULL)
+            {
+                // Pulse Audio was not supported => revert to ALSA instead
+                _platformAudioLayer = kLinuxAlsaAudio;  // modify the state set at construction
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Linux PulseAudio is *not* supported => ALSA APIs will be utilized instead");
+            }
+#endif
+#if defined(LINUX_PULSE)
+        }
+#endif
+    }
+    else if (audioLayer == kLinuxAlsaAudio)
+    {
+#if defined(LINUX_ALSA)
+        // create *Linux ALSA Audio* implementation
+        ptrAudioDevice = new AudioDeviceLinuxALSA(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Linux ALSA APIs will be utilized");
+#endif
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Linux implementation of the Device Utility.
+        // This class is independent of the selected audio layer
+        // for Linux.
+        //
+        ptrAudioDeviceUtility = new AudioDeviceUtilityLinux(Id());
+    }
+#endif  // #if defined(WEBRTC_LINUX)
+
+    // Create the *iPhone* implementation of the Audio Device
+    //
+#if defined(MAC_IPHONE)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *iPhone Audio* implementation
+        ptrAudioDevice = new AudioDeviceIPhone(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "iPhone Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Mac implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityIPhone(Id());
+    }
+    // END #if defined(MAC_IPHONE)
+
+    // Create the *Mac* implementation of the Audio Device
+    //
+#elif defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+    if (audioLayer == kPlatformDefaultAudio)
+    {
+        // Create *Mac Audio* implementation
+        ptrAudioDevice = new AudioDeviceMac(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Mac OS X Audio APIs will be utilized");
+    }
+
+    if (ptrAudioDevice != NULL)
+    {
+        // Create the Mac implementation of the Device Utility.
+        ptrAudioDeviceUtility = new AudioDeviceUtilityMac(Id());
+    }
+#endif  // #if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+
+    // Create the *Dummy* implementation of the Audio Device
+    // Available for all platforms
+    //
+    if (audioLayer == kDummyAudio)
+    {
+        // Create *Dummy Audio* implementation
+        assert(!ptrAudioDevice);
+        ptrAudioDevice = new AudioDeviceDummy(Id());
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Dummy Audio APIs will be utilized");
+
+        if (ptrAudioDevice != NULL)
+        {
+            ptrAudioDeviceUtility = new AudioDeviceUtilityDummy(Id());
+        }
+    }
+#endif  // if defined(WEBRTC_DUMMY_AUDIO_BUILD)
+
+    if (ptrAudioDevice == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "unable to create the platform specific audio device implementation");
+        return -1;
+    }
+
+    if (ptrAudioDeviceUtility == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "unable to create the platform specific audio device utility");
+        return -1;
+    }
+
+    // Store valid output pointers
+    //
+    _ptrAudioDevice = ptrAudioDevice;
+    _ptrAudioDeviceUtility = ptrAudioDeviceUtility;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+//
+//  Install "bridge" between the platform implemetation and the generic
+//  implementation. The "child" shall set the native sampling rate and the
+//  number of channels in this function call.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::AttachAudioBuffer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    _audioDeviceBuffer.SetId(_id);
+    _ptrAudioDevice->AttachAudioBuffer(&_audioDeviceBuffer);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ~AudioDeviceModuleImpl - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::~AudioDeviceModuleImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    if (_ptrAudioDevice)
+    {
+        delete _ptrAudioDevice;
+        _ptrAudioDevice = NULL;
+    }
+
+    if (_ptrAudioDeviceUtility)
+    {
+        delete _ptrAudioDeviceUtility;
+        _ptrAudioDeviceUtility = NULL;
+    }
+
+    delete &_critSect;
+    delete &_critSectEventCb;
+    delete &_critSectAudioCb;
+}
+
+// ============================================================================
+//                                  Module
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Module::ChangeUniqueId
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Module::TimeUntilNextProcess
+//
+//  Returns the number of milliseconds until the module want a worker thread
+//  to call Process().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 now = AudioDeviceUtility::GetTimeInMS();
+    WebRtc_Word32 deltaProcess = kAdmMaxIdleTimeProcess - (now - _lastProcessTime);
+    return (deltaProcess);
+}
+
+// ----------------------------------------------------------------------------
+//  Module::Process
+//
+//  Check for posted error and warning reports. Generate callbacks if
+//  new reports exists.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Process()
+{
+
+    _lastProcessTime = AudioDeviceUtility::GetTimeInMS();
+
+    // kPlayoutWarning
+    if (_ptrAudioDevice->PlayoutWarning())
+    {
+        CriticalSectionScoped lock(&_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "=> OnWarningIsReported(kPlayoutWarning)");
+            _ptrCbAudioDeviceObserver->OnWarningIsReported(AudioDeviceObserver::kPlayoutWarning);
+        }
+        _ptrAudioDevice->ClearPlayoutWarning();
+    }
+
+    // kPlayoutError
+    if (_ptrAudioDevice->PlayoutError())
+    {
+        CriticalSectionScoped lock(&_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "=> OnErrorIsReported(kPlayoutError)");
+            _ptrCbAudioDeviceObserver->OnErrorIsReported(AudioDeviceObserver::kPlayoutError);
+        }
+        _ptrAudioDevice->ClearPlayoutError();
+    }
+
+    // kRecordingWarning
+    if (_ptrAudioDevice->RecordingWarning())
+    {
+        CriticalSectionScoped lock(&_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "=> OnWarningIsReported(kRecordingWarning)");
+            _ptrCbAudioDeviceObserver->OnWarningIsReported(AudioDeviceObserver::kRecordingWarning);
+        }
+        _ptrAudioDevice->ClearRecordingWarning();
+    }
+
+    // kRecordingError
+    if (_ptrAudioDevice->RecordingError())
+    {
+        CriticalSectionScoped lock(&_critSectEventCb);
+        if (_ptrCbAudioDeviceObserver)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "=> OnErrorIsReported(kRecordingError)");
+            _ptrCbAudioDeviceObserver->OnErrorIsReported(AudioDeviceObserver::kRecordingError);
+        }
+        _ptrAudioDevice->ClearRecordingError();
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                    Public API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ActiveAudioLayer(AudioLayer* audioLayer) const
+{
+
+    AudioLayer activeAudio;
+
+    if (_ptrAudioDevice->ActiveAudioLayer(activeAudio) == -1)
+    {
+        return -1;
+    }
+
+    *audioLayer = activeAudio;
+
+    if (*audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsWaveAudio");
+    }
+    else if (*audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kWindowsCoreAudio");
+    }
+    else if (*audioLayer == AudioDeviceModule::kLinuxAlsaAudio)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: kLinuxAlsaAudio");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: NOT_SUPPORTED");
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  LastError
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule::ErrorCode AudioDeviceModuleImpl::LastError() const
+{
+    return _lastError;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Init()
+{
+
+    if (_initialized)
+        return 0;
+
+    if (!_ptrAudioDeviceUtility)
+        return -1;
+
+    if (!_ptrAudioDevice)
+        return -1;
+
+    _ptrAudioDeviceUtility->Init();
+
+    if (_ptrAudioDevice->Init() == -1)
+    {
+        return -1;
+    }
+
+    _initialized = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::Terminate()
+{
+
+    if (!_initialized)
+        return 0;
+
+    if (_ptrAudioDevice->Terminate() == -1)
+    {
+        return -1;
+    }
+
+    _initialized = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Initialized() const
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", _initialized);
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitSpeaker()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->InitSpeaker());
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitMicrophone()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->InitMicrophone());
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolumeIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerVolumeIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetSpeakerVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolume(WebRtc_UWord32* volume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 level(0);
+
+    if (_ptrAudioDevice->SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    *volume = level;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: volume=%u", *volume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetWaveOutVolume(volumeLeft, volumeRight));
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::WaveOutVolume(WebRtc_UWord16* volumeLeft, WebRtc_UWord16* volumeRight) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 volLeft(0);
+    WebRtc_UWord16 volRight(0);
+
+    if (_ptrAudioDevice->WaveOutVolume(volLeft, volRight) == -1)
+    {
+        return -1;
+    }
+
+    *volumeLeft = volLeft;
+    *volumeRight = volRight;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "outputs: volumeLeft=%u, volumeRight=%u",
+        *volumeLeft, *volumeRight);
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::SpeakerIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    bool isInitialized = _ptrAudioDevice->SpeakerIsInitialized();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", isInitialized);
+    return (isInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::MicrophoneIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    bool isInitialized = _ptrAudioDevice->MicrophoneIsInitialized();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: %d", isInitialized);
+    return (isInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MaxSpeakerVolume(WebRtc_UWord32* maxVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_ptrAudioDevice->MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    *maxVolume = maxVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: maxVolume=%d", *maxVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MinSpeakerVolume(WebRtc_UWord32* minVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_ptrAudioDevice->MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    *minVolume = minVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: minVolume=%u", *minVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerVolumeStepSize(WebRtc_UWord16* stepSize) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delta(0);
+
+    if (_ptrAudioDevice->SpeakerVolumeStepSize(delta) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the speaker-volume step size");
+        return -1;
+    }
+
+    *stepSize = delta;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: stepSize=%u", *stepSize);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerMuteIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->SpeakerMuteIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetSpeakerMute(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetSpeakerMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SpeakerMute(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool muted(false);
+
+    if (_ptrAudioDevice->SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = muted;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneMuteIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneMuteIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneMute(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneMute(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool muted(false);
+
+    if (_ptrAudioDevice->MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = muted;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneBoostIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneBoostIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneBoost(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneBoost(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneBoost(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool onOff(false);
+
+    if (_ptrAudioDevice->MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = onOff;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolumeIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->MicrophoneVolumeIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetMicrophoneVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolume(WebRtc_UWord32* volume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 level(0);
+
+    if (_ptrAudioDevice->MicrophoneVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    *volume = level;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: volume=%u", *volume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoRecordingIsAvailable(bool* available) const
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->StereoRecordingIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetStereoRecording(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->RecordingIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "recording in stereo is not supported");
+        return -1;
+    }
+
+    if (_ptrAudioDevice->SetStereoRecording(enable) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to enable stereo recording");
+        return -1;
+    }
+
+    WebRtc_Word8 nChannels(1);
+    if (enable)
+    {
+        nChannels = 2;
+    }
+    _audioDeviceBuffer.SetRecordingChannels(nChannels);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoRecording(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoRecording(stereo) == -1)
+    {
+        return -1;
+    }
+
+    *enabled = stereo;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingChannel(const ChannelType channel)
+{
+    if (channel == kChannelBoth)
+    {
+    }
+    else if (channel == kChannelLeft)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoRecording(stereo) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "recording in stereo is not supported");
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.SetRecordingChannel(channel));
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingChannel
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingChannel(ChannelType* channel) const
+{
+    CHECK_INITIALIZED();
+
+    ChannelType chType;
+
+    if (_audioDeviceBuffer.RecordingChannel(chType) == -1)
+    {
+        return -1;
+    }
+
+    *channel = chType;
+
+    if (*channel == kChannelBoth)
+    {
+    }
+    else if (*channel == kChannelLeft)
+    {
+    }
+    else
+    {
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoPlayoutIsAvailable(bool* available) const
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->StereoPlayoutIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetStereoPlayout(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->PlayoutIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "unable to set stereo mode while playing side is initialized");
+        return -1;
+    }
+
+    if (_ptrAudioDevice->SetStereoPlayout(enable))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "stereo playout is not supported");
+        return -1;
+    }
+
+    WebRtc_Word8 nChannels(1);
+    if (enable)
+    {
+        nChannels = 2;
+    }
+    _audioDeviceBuffer.SetPlayoutChannels(nChannels);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StereoPlayout(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    bool stereo(false);
+
+    if (_ptrAudioDevice->StereoPlayout(stereo) == -1)
+    {
+        return -1;
+    }
+
+   *enabled = stereo;
+
+   WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: enabled=%u", *enabled);
+   return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetAGC(bool enable)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetAGC(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::AGC() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->AGC());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->PlayoutIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingIsAvailable(bool* available)
+{
+    CHECK_INITIALIZED();
+
+    bool isAvailable(0);
+
+    if (_ptrAudioDevice->RecordingIsAvailable(isAvailable) == -1)
+    {
+        return -1;
+    }
+
+    *available = isAvailable;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: available=%d", *available);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MaxMicrophoneVolume(WebRtc_UWord32* maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_ptrAudioDevice->MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    *maxVolume = maxVol;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: maxVolume=%d", *maxVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MinMicrophoneVolume(WebRtc_UWord32* minVolume) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_ptrAudioDevice->MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    *minVolume = minVol;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: minVolume=%u", *minVolume);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::MicrophoneVolumeStepSize(WebRtc_UWord16* stepSize) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delta(0);
+
+    if (_ptrAudioDevice->MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    *stepSize = delta;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: stepSize=%u", *stepSize);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceModuleImpl::PlayoutDevices()
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 nPlayoutDevices = _ptrAudioDevice->PlayoutDevices();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: #playout devices=%d", nPlayoutDevices);
+    return ((WebRtc_Word16)(nPlayoutDevices));
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetPlayoutDevice(index));
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutDevice(WindowsDeviceType device)
+{
+    if (device == kDefaultDevice)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    return (_ptrAudioDevice->SetPlayoutDevice(device));
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+    CHECK_INITIALIZED();
+
+    if (name == NULL)
+    {
+        _lastError = kAdmErrArgument;
+        return -1;
+    }
+
+    if (_ptrAudioDevice->PlayoutDeviceName(index, name, guid) == -1)
+    {
+        return -1;
+    }
+
+    if (name != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: name=%s", name);
+    }
+    if (guid != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: guid=%s", guid);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+    CHECK_INITIALIZED();
+
+    if (name == NULL)
+    {
+        _lastError = kAdmErrArgument;
+        return -1;
+    }
+
+    if (_ptrAudioDevice->RecordingDeviceName(index, name, guid) == -1)
+    {
+        return -1;
+    }
+
+    if (name != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: name=%s", name);
+    }
+    if (guid != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: guid=%s", guid);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceModuleImpl::RecordingDevices()
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 nRecordingDevices = _ptrAudioDevice->RecordingDevices();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "output: #recording devices=%d", nRecordingDevices);
+    return ((WebRtc_Word16)nRecordingDevices);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingDevice(WebRtc_UWord16 index)
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->SetRecordingDevice(index));
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingDevice(WindowsDeviceType device)
+{
+    if (device == kDefaultDevice)
+    {
+    }
+    else
+    {
+    }
+    CHECK_INITIALIZED();
+
+    return (_ptrAudioDevice->SetRecordingDevice(device));
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitPlayout()
+{
+    CHECK_INITIALIZED();
+    _audioDeviceBuffer.InitPlayout();
+    return (_ptrAudioDevice->InitPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::InitRecording()
+{
+    CHECK_INITIALIZED();
+    _audioDeviceBuffer.InitRecording();
+    return (_ptrAudioDevice->InitRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::PlayoutIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->PlayoutIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::RecordingIsInitialized() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->RecordingIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartPlayout()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StartPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopPlayout()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StopPlayout());
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Playing() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->Playing());
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRecording()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StartRecording());
+}
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRecording()
+{
+    CHECK_INITIALIZED();
+    return (_ptrAudioDevice->StopRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceModuleImpl::Recording() const
+{
+    CHECK_INITIALIZED_BOOL();
+    return (_ptrAudioDevice->Recording());
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterEventObserver
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RegisterEventObserver(AudioDeviceObserver* eventCallback)
+{
+
+    CriticalSectionScoped lock(&_critSectEventCb);
+    _ptrCbAudioDeviceObserver = eventCallback;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RegisterAudioCallback
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RegisterAudioCallback(AudioTransport* audioCallback)
+{
+
+    CriticalSectionScoped lock(&_critSectAudioCb);
+    _audioDeviceBuffer.RegisterAudioCallback(audioCallback);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRawInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRawInputFileRecording(
+    const char pcmFileNameUTF8[kAdmMaxFileNameSize])
+{
+    CHECK_INITIALIZED();
+
+    if (NULL == pcmFileNameUTF8)
+    {
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.StartInputFileRecording(pcmFileNameUTF8));
+}
+
+// ----------------------------------------------------------------------------
+//  StopRawInputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRawInputFileRecording()
+{
+    CHECK_INITIALIZED();
+
+    return (_audioDeviceBuffer.StopInputFileRecording());
+}
+
+// ----------------------------------------------------------------------------
+//  StartRawOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StartRawOutputFileRecording(
+    const char pcmFileNameUTF8[kAdmMaxFileNameSize])
+{
+    CHECK_INITIALIZED();
+
+    if (NULL == pcmFileNameUTF8)
+    {
+        return -1;
+    }
+
+    return (_audioDeviceBuffer.StartOutputFileRecording(pcmFileNameUTF8));
+}
+
+// ----------------------------------------------------------------------------
+//  StopRawOutputFileRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::StopRawOutputFileRecording()
+{
+    CHECK_INITIALIZED();
+
+    return (_audioDeviceBuffer.StopOutputFileRecording());
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutBuffer(const BufferType type, WebRtc_UWord16 sizeMS)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->PlayoutIsInitialized())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "unable to modify the playout buffer while playing side is initialized");
+        return -1;
+    }
+
+    WebRtc_Word32 ret(0);
+
+    if (kFixedBufferSize == type)
+    {
+        if (sizeMS < kAdmMinPlayoutBufferSizeMs || sizeMS > kAdmMaxPlayoutBufferSizeMs)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "size parameter is out of range");
+            return -1;
+        }
+    }
+
+    if ((ret = _ptrAudioDevice->SetPlayoutBuffer(type, sizeMS)) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to set the playout buffer (error: %d)", LastError());
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutBuffer(BufferType* type, WebRtc_UWord16* sizeMS) const
+{
+    CHECK_INITIALIZED();
+
+    BufferType bufType;
+    WebRtc_UWord16 size(0);
+
+    if (_ptrAudioDevice->PlayoutBuffer(bufType, size) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the buffer type and size");
+        return -1;
+    }
+
+    *type = bufType;
+    *sizeMS = size;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: type=%u, sizeMS=%u", *type, *sizeMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutDelay(WebRtc_UWord16* delayMS) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delay(0);
+
+    if (_ptrAudioDevice->PlayoutDelay(delay) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the playout delay");
+        return -1;
+    }
+
+    *delayMS = delay;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: delayMS=%u", *delayMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingDelay(WebRtc_UWord16* delayMS) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 delay(0);
+
+    if (_ptrAudioDevice->RecordingDelay(delay) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the recording delay");
+        return -1;
+    }
+
+    *delayMS = delay;
+
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "output: delayMS=%u", *delayMS);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::CPULoad(WebRtc_UWord16* load) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_UWord16 cpuLoad(0);
+
+    if (_ptrAudioDevice->CPULoad(cpuLoad) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the CPU load");
+        return -1;
+    }
+
+    *load = cpuLoad;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: load=%u", *load);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetRecordingSampleRate(const WebRtc_UWord32 samplesPerSec)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetRecordingSampleRate(samplesPerSec) != 0)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::RecordingSampleRate(WebRtc_UWord32* samplesPerSec) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_Word32 sampleRate = _audioDeviceBuffer.RecordingSampleRate();
+
+    if (sampleRate == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the sample rate");
+        return -1;
+    }
+
+    *samplesPerSec = sampleRate;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: samplesPerSec=%u", *samplesPerSec);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetPlayoutSampleRate(const WebRtc_UWord32 samplesPerSec)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetPlayoutSampleRate(samplesPerSec) != 0)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutSampleRate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::PlayoutSampleRate(WebRtc_UWord32* samplesPerSec) const
+{
+    CHECK_INITIALIZED();
+
+    WebRtc_Word32 sampleRate = _audioDeviceBuffer.PlayoutSampleRate();
+
+    if (sampleRate == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to retrieve the sample rate");
+        return -1;
+    }
+
+    *samplesPerSec = sampleRate;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "output: samplesPerSec=%u", *samplesPerSec);
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  ResetAudioDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::ResetAudioDevice()
+{
+    CHECK_INITIALIZED();
+
+
+    if (_ptrAudioDevice->ResetAudioDevice() == -1)
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::SetLoudspeakerStatus(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->SetLoudspeakerStatus(enable) != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLoudspeakerStatus
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceModuleImpl::GetLoudspeakerStatus(bool* enabled) const
+{
+    CHECK_INITIALIZED();
+
+    if (_ptrAudioDevice->GetLoudspeakerStatus(*enabled) != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int32_t AudioDeviceModuleImpl::EnableBuiltInAEC(bool enable)
+{
+    CHECK_INITIALIZED();
+
+    return _ptrAudioDevice->EnableBuiltInAEC(enable);
+}
+
+bool AudioDeviceModuleImpl::BuiltInAECIsEnabled() const
+{
+    CHECK_INITIALIZED_BOOL();
+
+    return _ptrAudioDevice->BuiltInAECIsEnabled();
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Platform
+// ----------------------------------------------------------------------------
+
+AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const
+{
+    return _platformType;
+}
+
+// ----------------------------------------------------------------------------
+//  PlatformAudioLayer
+// ----------------------------------------------------------------------------
+
+AudioDeviceModule::AudioLayer AudioDeviceModuleImpl::PlatformAudioLayer() const
+{
+
+    switch (_platformAudioLayer)
+    {
+    case kPlatformDefaultAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "output: kPlatformDefaultAudio");
+        break;
+    case kWindowsWaveAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "output: kWindowsWaveAudio");
+        break;
+    case kWindowsCoreAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "output: kWindowsCoreAudio");
+        break;
+    case kLinuxAlsaAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "output: kLinuxAlsaAudio");
+        break;
+    case kDummyAudio:
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                     "output: kDummyAudio");
+        break;
+    default:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "output: INVALID");
+        break;
+    }
+
+    return _platformAudioLayer;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_device/main/source/audio_device_impl.h b/src/modules/audio_device/main/source/audio_device_impl.h
new file mode 100644
index 0000000..559f8aa
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_impl.h
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_IMPL_H
+
+#include "audio_device.h"
+#include "audio_device_buffer.h"
+
+namespace webrtc
+{
+
+class AudioDeviceGeneric;
+class AudioDeviceUtility;
+class CriticalSectionWrapper;
+
+class AudioDeviceModuleImpl : public AudioDeviceModule
+{
+public:
+    enum PlatformType
+    {
+        kPlatformNotSupported = 0,
+        kPlatformWin32 = 1,
+        kPlatformWinCe = 2,
+        kPlatformLinux = 3,
+        kPlatformMac = 4,
+        kPlatformAndroid = 5
+    };
+
+    WebRtc_Word32 CheckPlatform();
+    WebRtc_Word32 CreatePlatformSpecificObjects();
+    WebRtc_Word32 AttachAudioBuffer();
+
+    AudioDeviceModuleImpl(const WebRtc_Word32 id, const AudioLayer audioLayer);
+    virtual ~AudioDeviceModuleImpl();
+
+public: // RefCountedModule
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+public:
+    // Factory methods (resource allocation/deallocation)
+    static AudioDeviceModule* Create(
+        const WebRtc_Word32 id,
+        const AudioLayer audioLayer = kPlatformDefaultAudio);
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioLayer* audioLayer) const;
+
+    // Error handling
+    virtual ErrorCode LastError() const;
+    virtual WebRtc_Word32 RegisterEventObserver(
+        AudioDeviceObserver* eventCallback);
+
+    // Full-duplex transportation of PCM audio
+    virtual WebRtc_Word32 RegisterAudioCallback(
+        AudioTransport* audioCallback);
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16* volumeLeft,
+                                        WebRtc_UWord16* volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool* available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32* volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32* maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32* minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(
+        WebRtc_UWord16* stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32* volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(
+        WebRtc_UWord32* maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(
+        WebRtc_UWord32* minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16* stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool* enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool* enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool* available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool* enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool* available) const;
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool* enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool* available) const;
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool* enabled) const;
+    virtual WebRtc_Word32 SetRecordingChannel(const ChannelType channel);
+    virtual WebRtc_Word32 RecordingChannel(ChannelType* channel) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const BufferType type,
+                                           WebRtc_UWord16 sizeMS = 0);
+    virtual WebRtc_Word32 PlayoutBuffer(BufferType* type,
+                                        WebRtc_UWord16* sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16* delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16* delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16* load) const;
+
+    // Recording of raw PCM data
+    virtual WebRtc_Word32 StartRawOutputFileRecording(
+        const char pcmFileNameUTF8[kAdmMaxFileNameSize]);
+    virtual WebRtc_Word32 StopRawOutputFileRecording();
+    virtual WebRtc_Word32 StartRawInputFileRecording(
+        const char pcmFileNameUTF8[kAdmMaxFileNameSize]);
+    virtual WebRtc_Word32 StopRawInputFileRecording();
+
+    // Native sample rate controls (samples/sec)
+    virtual WebRtc_Word32 SetRecordingSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 RecordingSampleRate(
+        WebRtc_UWord32* samplesPerSec) const;
+    virtual WebRtc_Word32 SetPlayoutSampleRate(
+        const WebRtc_UWord32 samplesPerSec);
+    virtual WebRtc_Word32 PlayoutSampleRate(
+        WebRtc_UWord32* samplesPerSec) const;
+
+    // Mobile device specific functions
+    virtual WebRtc_Word32 ResetAudioDevice();
+    virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
+    virtual WebRtc_Word32 GetLoudspeakerStatus(bool* enabled) const;
+
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    WebRtc_Word32 Id() {return _id;}
+
+private:
+    PlatformType Platform() const;
+    AudioLayer PlatformAudioLayer() const;
+
+private:
+    CriticalSectionWrapper&     _critSect;
+    CriticalSectionWrapper&     _critSectEventCb;
+    CriticalSectionWrapper&     _critSectAudioCb;
+
+    AudioDeviceObserver*        _ptrCbAudioDeviceObserver;
+
+    AudioDeviceUtility*         _ptrAudioDeviceUtility;
+    AudioDeviceGeneric*         _ptrAudioDevice;
+
+    AudioDeviceBuffer           _audioDeviceBuffer;
+
+    WebRtc_Word32               _id;
+    AudioLayer                  _platformAudioLayer;
+    WebRtc_UWord32              _lastProcessTime;
+    PlatformType                _platformType;
+    bool                        _initialized;
+    mutable ErrorCode           _lastError;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_INTERFACE_AUDIO_DEVICE_IMPL_H_
diff --git a/src/modules/audio_device/main/source/audio_device_utility.cc b/src/modules/audio_device/main/source/audio_device_utility.cc
new file mode 100644
index 0000000..203f09a
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_utility.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+
+#if defined(_WIN32)
+
+// ============================================================================
+//                                     Windows
+// ============================================================================
+
+#include <windows.h>
+#include <conio.h>
+#include <ctype.h>
+#include <stdio.h>
+#include <mmsystem.h>
+
+namespace webrtc
+{
+
+void AudioDeviceUtility::WaitForKey()
+{
+	_getch();
+}
+
+WebRtc_UWord32 AudioDeviceUtility::GetTimeInMS()
+{
+	return timeGetTime();
+}
+
+bool AudioDeviceUtility::StringCompare(
+    const char* str1 , const char* str2,
+    const WebRtc_UWord32 length)
+{
+	return ((_strnicmp(str1, str2, length) == 0) ? true : false);
+}
+
+}  // namespace webrtc
+
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+// ============================================================================
+//                                 Linux & Mac
+// ============================================================================
+
+#include <sys/time.h>   // gettimeofday
+#include <time.h>       // gettimeofday
+#include <string.h>     // strncasecmp
+#include <stdio.h>      // getchar
+#include <termios.h>    // tcgetattr
+
+#include <unistd.h>
+
+namespace webrtc
+{
+
+void AudioDeviceUtility::WaitForKey()
+{
+
+    struct termios oldt, newt;
+
+    tcgetattr( STDIN_FILENO, &oldt );
+
+    // we don't want getchar to echo!
+
+    newt = oldt;
+    newt.c_lflag &= ~( ICANON | ECHO );
+    tcsetattr( STDIN_FILENO, TCSANOW, &newt );
+
+    // catch any newline that's hanging around...
+
+    // you'll have to hit enter twice if you
+
+    // choose enter out of all available keys
+
+    if (getchar() == '\n')
+    {
+        getchar();
+    }
+
+    tcsetattr( STDIN_FILENO, TCSANOW, &oldt );
+}
+
+WebRtc_UWord32 AudioDeviceUtility::GetTimeInMS()
+{
+    struct timeval tv;
+    struct timezone tz;
+    WebRtc_UWord32 val;
+
+    gettimeofday(&tv, &tz);
+    val = (WebRtc_UWord32)(tv.tv_sec*1000 + tv.tv_usec/1000);
+    return val;
+}
+
+bool AudioDeviceUtility::StringCompare(
+    const char* str1 , const char* str2, const WebRtc_UWord32 length)
+{
+    return (strncasecmp(str1, str2, length) == 0)?true: false;
+}
+
+}  // namespace webrtc
+
+#endif  // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+
diff --git a/src/modules/audio_device/main/source/audio_device_utility.h b/src/modules/audio_device/main/source/audio_device_utility.h
new file mode 100644
index 0000000..293557e
--- /dev/null
+++ b/src/modules/audio_device/main/source/audio_device_utility.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class AudioDeviceUtility
+{
+public:
+    static WebRtc_UWord32 GetTimeInMS();
+	static void WaitForKey();
+    static bool StringCompare(const char* str1,
+                              const char* str2,
+                              const WebRtc_UWord32 length);
+	virtual WebRtc_Word32 Init() = 0;
+
+	virtual ~AudioDeviceUtility() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_H
+
diff --git a/src/modules/audio_device/main/source/dummy/audio_device_dummy.h b/src/modules/audio_device/main/source/dummy/audio_device_dummy.h
new file mode 100644
index 0000000..beef1f6
--- /dev/null
+++ b/src/modules/audio_device/main/source/dummy/audio_device_dummy.h
@@ -0,0 +1,190 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
+
+#include <stdio.h>
+
+#include "audio_device_generic.h"
+
+namespace webrtc {
+
+class AudioDeviceDummy : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceDummy(const WebRtc_Word32 id) {}
+    ~AudioDeviceDummy() {}
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const { return -1; }
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init() { return 0; }
+    virtual WebRtc_Word32 Terminate() { return 0; }
+    virtual bool Initialized() const { return true; }
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices() { return -1; }
+    virtual WebRtc_Word16 RecordingDevices() { return -1; }
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]) { return -1; }
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]) { return -1; }
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index) { return -1; }
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device) { return -1; }
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device) { return -1; }
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available) {
+      return -1; }
+    virtual WebRtc_Word32 InitPlayout() { return -1; };
+    virtual bool PlayoutIsInitialized() const { return false; }
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available) { return -1; }
+    virtual WebRtc_Word32 InitRecording() { return -1; }
+    virtual bool RecordingIsInitialized() const { return false; }
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout() { return -1; }
+    virtual WebRtc_Word32 StopPlayout() { return -1; }
+    virtual bool Playing() const { return false; }
+    virtual WebRtc_Word32 StartRecording() { return -1; }
+    virtual WebRtc_Word32 StopRecording() { return -1; }
+    virtual bool Recording() const { return false; }
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable) { return -1; }
+    virtual bool AGC() const { return false; }
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(
+        WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight) { return -1; }
+    virtual WebRtc_Word32 WaveOutVolume(
+        WebRtc_UWord16& volumeLeft,
+        WebRtc_UWord16& volumeRight) const { return -1; }
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available) { return -1; }
+    virtual WebRtc_Word32 InitSpeaker() { return -1; }
+    virtual bool SpeakerIsInitialized() const { return false; }
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available) { return -1; }
+    virtual WebRtc_Word32 InitMicrophone() { return -1; }
+    virtual bool MicrophoneIsInitialized() const { return false; }
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume) { return -1; }
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(
+        WebRtc_UWord16& stepSize) const { return -1; }
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume) {
+      return -1;
+    }
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 MinMicrophoneVolume(
+        WebRtc_UWord32& minVolume) const { return -1; }
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const { return -1; }
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available) { return -1; }
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable) { return -1; }
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const { return -1; }
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable) { return -1; }
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const { return -1; }
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable) { return -1; }
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const { return -1; }
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable) { return -1; }
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const { return -1; }
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available) {
+      return -1;
+    }
+    virtual WebRtc_Word32 SetStereoRecording(bool enable) { return -1; }
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const { return -1; }
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type,
+        WebRtc_UWord16 sizeMS) { return -1; }
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type,
+        WebRtc_UWord16& sizeMS) const { return -1; }
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const {
+      return -1;
+    }
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const {
+      return -1;
+    }
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const { return -1; }
+
+    virtual bool PlayoutWarning() const { return false; }
+    virtual bool PlayoutError() const { return false; }
+    virtual bool RecordingWarning() const { return false; }
+    virtual bool RecordingError() const { return false; }
+    virtual void ClearPlayoutWarning() {}
+    virtual void ClearPlayoutError() {}
+    virtual void ClearRecordingWarning() {}
+    virtual void ClearRecordingError() {}
+
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_DUMMY_H
diff --git a/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h b/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h
new file mode 100644
index 0000000..5bf7237
--- /dev/null
+++ b/src/modules/audio_device/main/source/dummy/audio_device_utility_dummy.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_DUMMY_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_DUMMY_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityDummy: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityDummy(const WebRtc_Word32 id) {}
+    ~AudioDeviceUtilityDummy() {}
+
+    virtual WebRtc_Word32 Init() { return 0; }
+};
+
+} // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_UTILITY_DUMMY_H_
diff --git a/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc b/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc
new file mode 100644
index 0000000..1b1707c
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/alsasymboltable_linux.cc
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "alsasymboltable_linux.h"
+
+namespace webrtc_adm_linux_alsa {
+
+LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(AlsaSymbolTable, "libasound.so.2")
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(AlsaSymbolTable, sym)
+ALSA_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DEFINE_END(AlsaSymbolTable)
+
+}  // namespace webrtc_adm_linux_alsa
diff --git a/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h b/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h
new file mode 100644
index 0000000..d25bbd7
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/alsasymboltable_linux.h
@@ -0,0 +1,147 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
+
+#include "latebindingsymboltable_linux.h"
+
+namespace webrtc_adm_linux_alsa {
+
+// The ALSA symbols we need, as an X-Macro list.
+// This list must contain precisely every libasound function that is used in
+// alsasoundsystem.cc.
+#define ALSA_SYMBOLS_LIST \
+  X(snd_device_name_free_hint) \
+  X(snd_device_name_get_hint) \
+  X(snd_device_name_hint) \
+  X(snd_pcm_avail_update) \
+  X(snd_pcm_close) \
+  X(snd_pcm_delay) \
+  X(snd_pcm_drop) \
+  X(snd_pcm_open) \
+  X(snd_pcm_prepare) \
+  X(snd_pcm_readi) \
+  X(snd_pcm_recover) \
+  X(snd_pcm_resume) \
+  X(snd_pcm_reset) \
+  X(snd_pcm_state) \
+  X(snd_pcm_set_params) \
+  X(snd_pcm_get_params) \
+  X(snd_pcm_start) \
+  X(snd_pcm_stream) \
+  X(snd_pcm_frames_to_bytes) \
+  X(snd_pcm_bytes_to_frames) \
+  X(snd_pcm_wait) \
+  X(snd_pcm_writei) \
+  X(snd_pcm_info_get_class) \
+  X(snd_pcm_info_get_subdevices_avail) \
+  X(snd_pcm_info_get_subdevice_name) \
+  X(snd_pcm_info_set_subdevice) \
+  X(snd_pcm_info_get_id) \
+  X(snd_pcm_info_set_device) \
+  X(snd_pcm_info_set_stream) \
+  X(snd_pcm_info_get_name) \
+  X(snd_pcm_info_get_subdevices_count) \
+  X(snd_pcm_info_sizeof) \
+  X(snd_pcm_hw_params) \
+  X(snd_pcm_hw_params_malloc) \
+  X(snd_pcm_hw_params_free) \
+  X(snd_pcm_hw_params_any) \
+  X(snd_pcm_hw_params_set_access) \
+  X(snd_pcm_hw_params_set_format) \
+  X(snd_pcm_hw_params_set_channels) \
+  X(snd_pcm_hw_params_set_rate_near) \
+  X(snd_pcm_hw_params_set_buffer_size_near) \
+  X(snd_card_next) \
+  X(snd_card_get_name) \
+  X(snd_config_update) \
+  X(snd_config_copy) \
+  X(snd_config_get_id) \
+  X(snd_ctl_open) \
+  X(snd_ctl_close) \
+  X(snd_ctl_card_info) \
+  X(snd_ctl_card_info_sizeof) \
+  X(snd_ctl_card_info_get_id) \
+  X(snd_ctl_card_info_get_name) \
+  X(snd_ctl_pcm_next_device) \
+  X(snd_ctl_pcm_info) \
+  X(snd_mixer_load) \
+  X(snd_mixer_free) \
+  X(snd_mixer_detach) \
+  X(snd_mixer_close) \
+  X(snd_mixer_open) \
+  X(snd_mixer_attach) \
+  X(snd_mixer_first_elem) \
+  X(snd_mixer_elem_next) \
+  X(snd_mixer_selem_get_name) \
+  X(snd_mixer_selem_is_active) \
+  X(snd_mixer_selem_register) \
+  X(snd_mixer_selem_set_playback_volume_all) \
+  X(snd_mixer_selem_get_playback_volume) \
+  X(snd_mixer_selem_has_playback_volume) \
+  X(snd_mixer_selem_get_playback_volume_range) \
+  X(snd_mixer_selem_has_playback_switch) \
+  X(snd_mixer_selem_get_playback_switch) \
+  X(snd_mixer_selem_set_playback_switch_all) \
+  X(snd_mixer_selem_has_capture_switch) \
+  X(snd_mixer_selem_get_capture_switch) \
+  X(snd_mixer_selem_set_capture_switch_all) \
+  X(snd_mixer_selem_has_capture_volume) \
+  X(snd_mixer_selem_set_capture_volume_all) \
+  X(snd_mixer_selem_get_capture_volume) \
+  X(snd_mixer_selem_get_capture_volume_range) \
+  X(snd_dlopen) \
+  X(snd_dlclose) \
+  X(snd_config) \
+  X(snd_config_search) \
+  X(snd_config_get_string) \
+  X(snd_config_search_definition) \
+  X(snd_config_get_type) \
+  X(snd_config_delete) \
+  X(snd_config_iterator_entry) \
+  X(snd_config_iterator_first) \
+  X(snd_config_iterator_next) \
+  X(snd_config_iterator_end) \
+  X(snd_config_delete_compound_members) \
+  X(snd_config_get_integer) \
+  X(snd_config_get_bool) \
+  X(snd_dlsym) \
+  X(snd_strerror) \
+  X(snd_lib_error) \
+  X(snd_lib_error_set_handler)
+
+LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(AlsaSymbolTable)
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(AlsaSymbolTable, sym)
+ALSA_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DECLARE_END(AlsaSymbolTable)
+
+}  // namespace webrtc_adm_linux_alsa
+
+#endif  // WEBRTC_AUDIO_DEVICE_ALSASYMBOLTABLE_LINUX_H
diff --git a/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc b/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc
new file mode 100644
index 0000000..14e1667
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.cc
@@ -0,0 +1,2320 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+#include "audio_device_alsa_linux.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "system_wrappers/interface/sleep.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+
+webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
+
+// Accesses ALSA functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libasound, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_alsa::AlsaSymbolTable, &AlsaSymbolTable, sym)
+
+// Redefine these here to be able to do late-binding
+#undef snd_ctl_card_info_alloca
+#define snd_ctl_card_info_alloca(ptr) \
+        do { *ptr = (snd_ctl_card_info_t *) \
+            __builtin_alloca (LATE(snd_ctl_card_info_sizeof)()); \
+            memset(*ptr, 0, LATE(snd_ctl_card_info_sizeof)()); } while (0)
+
+#undef snd_pcm_info_alloca
+#define snd_pcm_info_alloca(pInfo) \
+       do { *pInfo = (snd_pcm_info_t *) \
+       __builtin_alloca (LATE(snd_pcm_info_sizeof)()); \
+       memset(*pInfo, 0, LATE(snd_pcm_info_sizeof)()); } while (0)
+
+// snd_lib_error_handler_t
+void WebrtcAlsaErrorHandler(const char *file,
+                          int line,
+                          const char *function,
+                          int err,
+                          const char *fmt,...){};
+
+namespace webrtc
+{
+static const unsigned int ALSA_PLAYOUT_FREQ = 48000;
+static const unsigned int ALSA_PLAYOUT_CH = 2;
+static const unsigned int ALSA_PLAYOUT_LATENCY = 40*1000; // in us
+static const unsigned int ALSA_CAPTURE_FREQ = 48000;
+static const unsigned int ALSA_CAPTURE_CH = 2;
+static const unsigned int ALSA_CAPTURE_LATENCY = 40*1000; // in us
+static const unsigned int ALSA_PLAYOUT_WAIT_TIMEOUT = 5; // in ms
+static const unsigned int ALSA_CAPTURE_WAIT_TIMEOUT = 5; // in ms
+
+#define FUNC_GET_NUM_OF_DEVICE 0
+#define FUNC_GET_DEVICE_NAME 1
+#define FUNC_GET_DEVICE_NAME_FOR_AN_ENUM 2
+
+AudioDeviceLinuxALSA::AudioDeviceLinuxALSA(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrThreadRec(NULL),
+    _ptrThreadPlay(NULL),
+    _recThreadID(0),
+    _playThreadID(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _handleRecord(NULL),
+    _handlePlayout(NULL),
+    _recordingBuffersizeInFrame(0),
+    _recordingPeriodSizeInFrame(0),
+    _playoutBufferSizeInFrame(0),
+    _playoutPeriodSizeInFrame(0),
+    _recordingBufferSizeIn10MS(0),
+    _playoutBufferSizeIn10MS(0),
+    _recordingFramesIn10MS(0),
+    _playoutFramesIn10MS(0),
+    _recordingFreq(ALSA_CAPTURE_FREQ),
+    _playoutFreq(ALSA_PLAYOUT_FREQ),
+    _recChannels(ALSA_CAPTURE_CH),
+    _playChannels(ALSA_PLAYOUT_CH),
+    _recordingBuffer(NULL),
+    _playoutBuffer(NULL),
+    _recordingFramesLeft(0),
+    _playoutFramesLeft(0),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _AGC(false),
+    _recordingDelay(0),
+    _playoutDelay(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playBufDelay(80),
+    _playBufDelayFixed(80)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceLinuxALSA - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceLinuxALSA::~AudioDeviceLinuxALSA()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    
+    Terminate();
+
+    // Clean up the recording buffer and playout buffer.
+    if (_recordingBuffer)
+    {
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+    }
+    if (_playoutBuffer)
+    {
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+    }
+    delete &_critSect;
+}
+
+void AudioDeviceLinuxALSA::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kLinuxAlsaAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Load libasound
+    if (!AlsaSymbolTable.Load())
+    {
+        // Alsa is not installed on
+        // this system
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "  failed to load symbol table");
+        return -1;
+    }
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _mixerManager.Close();
+
+    // RECORDING
+    if (_ptrThreadRec)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+
+        _critSect.Enter();
+    }
+
+    // PLAYOUT
+    if (_ptrThreadPlay)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+
+        _critSect.Enter();
+    }
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker
+    // exists
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    char devName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, true, _outputDeviceIndex, devName, kAdmMaxDeviceNameSize);
+    return _mixerManager.OpenSpeaker(devName);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid
+    // microphone exists
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    char devName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, false, _inputDeviceIndex, devName, kAdmMaxDeviceNameSize);
+    return _mixerManager.OpenMicrophone(devName);
+}
+
+bool AudioDeviceLinuxALSA::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceLinuxALSA::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                                     WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0); 
+     
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0); 
+        
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0); 
+        
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneBoostIsAvailable(bool& available)
+{
+    
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0); 
+        
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoRecordingIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // If we already have initialized in stereo it's obviously available
+    if (_recIsInitialized && (2 == _recChannels))
+    {
+        available = true;
+        return 0;
+    }
+
+    // Save rec states and the number of rec channels
+    bool recIsInitialized = _recIsInitialized;
+    bool recording = _recording;
+    int recChannels = _recChannels;
+
+    available = false;
+    
+    // Stop/uninitialize recording if initialized (and possibly started)
+    if (_recIsInitialized)
+    {
+        StopRecording();
+    }
+
+    // Try init in stereo;
+    _recChannels = 2;
+    if (InitRecording() == 0)
+    {
+        available = true;
+    }
+
+    // Stop/uninitialize recording
+    StopRecording();
+
+    // Recover previous states
+    _recChannels = recChannels;
+    if (recIsInitialized)
+    {
+        InitRecording();
+    }
+    if (recording)
+    {
+        StartRecording();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoPlayoutIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // If we already have initialized in stereo it's obviously available
+    if (_playIsInitialized && (2 == _playChannels))
+    {
+        available = true;
+        return 0;
+    }
+
+    // Save rec states and the number of rec channels
+    bool playIsInitialized = _playIsInitialized;
+    bool playing = _playing;
+    int playChannels = _playChannels;
+
+    available = false;
+    
+    // Stop/uninitialize recording if initialized (and possibly started)
+    if (_playIsInitialized)
+    {
+        StopPlayout();
+    }
+
+    // Try init in stereo;
+    _playChannels = 2;
+    if (InitPlayout() == 0)
+    {
+        available = true;
+    }
+
+    // Stop/uninitialize recording
+    StopPlayout();
+
+    // Recover previous states
+    _playChannels = playChannels;
+    if (playIsInitialized)
+    {
+        InitPlayout();
+    }
+    if (playing)
+    {
+        StartPlayout();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+ 
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0); 
+        
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxALSA::PlayoutDevices()
+{
+
+    return (WebRtc_Word16)GetDevicesInfo(0, true);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 nDevices = GetDevicesInfo(0, true);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable audio output devices is %u", nDevices);
+
+    if (index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDevicesInfo(1, true, index, name, kAdmMaxDeviceNameSize);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+    
+    return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize);
+}
+
+WebRtc_Word16 AudioDeviceLinuxALSA::RecordingDevices()
+{
+
+    return (WebRtc_Word16)GetDevicesInfo(0, false);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 nDevices = GetDevicesInfo(0, false);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable audio input devices is %u", nDevices);
+
+    if (index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutIsAvailable(bool& available)
+{
+    
+    available = false;
+
+    // Try to initialize the playout side with mono
+    // Assumes that user set num channels after calling this function
+    _playChannels = 1;
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+    else
+    {
+        // It may be possible to play out in stereo
+        res = StereoPlayoutIsAvailable(available);
+        if (available)
+        {
+            // Then set channels to 2 so InitPlayout doesn't fail
+            _playChannels = 2;
+        }
+    }
+    
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingIsAvailable(bool& available)
+{
+    
+    available = false;
+
+    // Try to initialize the recording side with mono
+    // Assumes that user set num channels after calling this function
+    _recChannels = 1;
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+    else
+    {
+        // It may be possible to record in stereo
+        res = StereoRecordingIsAvailable(available);
+        if (available)
+        {
+            // Then set channels to 2 so InitPlayout doesn't fail
+            _recChannels = 2;
+        }
+    }
+    
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitPlayout()
+{
+
+    int errVal = 0;
+
+    CriticalSectionScoped lock(&_critSect);
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // Start by closing any existing wave-output devices
+    //
+    if (_handlePlayout != NULL)
+    {
+        LATE(snd_pcm_close)(_handlePlayout);
+        _handlePlayout = NULL;
+        _playIsInitialized = false;
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  Error closing current playout sound device, error:"
+                         " %s", LATE(snd_strerror)(errVal));
+        }
+    }
+
+    // Open PCM device for playout
+    char deviceName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, true, _outputDeviceIndex, deviceName,
+                   kAdmMaxDeviceNameSize);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  InitPlayout open (%s)", deviceName);
+
+    errVal = LATE(snd_pcm_open)
+                 (&_handlePlayout,
+                  deviceName,
+                  SND_PCM_STREAM_PLAYBACK,
+                  SND_PCM_NONBLOCK);
+
+    if (errVal == -EBUSY) // Device busy - try some more!
+    {
+        for (int i=0; i < 5; i++)
+        {
+            SleepMs(1000);
+            errVal = LATE(snd_pcm_open)
+                         (&_handlePlayout,
+                          deviceName,
+                          SND_PCM_STREAM_PLAYBACK,
+                          SND_PCM_NONBLOCK);
+            if (errVal == 0)
+            {
+                break;
+            }
+        }
+    }
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     unable to open playback device: %s (%d)",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        _handlePlayout = NULL;
+        return -1;
+    }
+
+    _playoutFramesIn10MS = _playoutFreq/100;
+    if ((errVal = LATE(snd_pcm_set_params)( _handlePlayout,
+#if defined(WEBRTC_BIG_ENDIAN)
+        SND_PCM_FORMAT_S16_BE,
+#else
+        SND_PCM_FORMAT_S16_LE, //format
+#endif
+        SND_PCM_ACCESS_RW_INTERLEAVED, //access
+        _playChannels, //channels
+        _playoutFreq, //rate
+        1, //soft_resample
+        ALSA_PLAYOUT_LATENCY //40*1000 //latency required overall latency in us
+    )) < 0)
+    {   /* 0.5sec */
+        _playoutFramesIn10MS = 0;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     unable to set playback device: %s (%d)",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        ErrorRecovery(errVal, _handlePlayout);
+        errVal = LATE(snd_pcm_close)(_handlePlayout);
+        _handlePlayout = NULL;
+        return -1;
+    }
+
+    errVal = LATE(snd_pcm_get_params)(_handlePlayout,
+        &_playoutBufferSizeInFrame, &_playoutPeriodSizeInFrame);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    snd_pcm_get_params %s",
+                     LATE(snd_strerror)(errVal),
+                     errVal);
+        _playoutBufferSizeInFrame = 0;
+        _playoutPeriodSizeInFrame = 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "    playout snd_pcm_get_params "
+                     "buffer_size:%d period_size :%d",
+                     _playoutBufferSizeInFrame, _playoutPeriodSizeInFrame);
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update webrtc audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_playoutFreq);
+        _ptrAudioBuffer->SetPlayoutChannels(_playChannels);
+    }
+
+    // Set play buffer size
+    _playoutBufferSizeIn10MS = LATE(snd_pcm_frames_to_bytes)(
+        _handlePlayout, _playoutFramesIn10MS);
+
+    // Init varaibles used for play
+    _playWarning = 0;
+    _playError = 0;
+
+    if (_handlePlayout != NULL)
+    {
+        _playIsInitialized = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InitRecording()
+{
+
+    int errVal = 0;
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "  InitMicrophone() failed");
+    }
+
+    // Start by closing any existing pcm-input devices
+    //
+    if (_handleRecord != NULL)
+    {
+        int errVal = LATE(snd_pcm_close)(_handleRecord);
+        _handleRecord = NULL;
+        _recIsInitialized = false;
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error closing current recording sound device,"
+                         " error: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+    }
+
+    // Open PCM device for recording
+    // The corresponding settings for playout are made after the record settings
+    char deviceName[kAdmMaxDeviceNameSize] = {0};
+    GetDevicesInfo(2, false, _inputDeviceIndex, deviceName,
+                   kAdmMaxDeviceNameSize);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "InitRecording open (%s)", deviceName);
+    errVal = LATE(snd_pcm_open)
+                 (&_handleRecord,
+                  deviceName,
+                  SND_PCM_STREAM_CAPTURE,
+                  SND_PCM_NONBLOCK);
+
+    // Available modes: 0 = blocking, SND_PCM_NONBLOCK, SND_PCM_ASYNC
+    if (errVal == -EBUSY) // Device busy - try some more!
+    {
+        for (int i=0; i < 5; i++)
+        {
+            SleepMs(1000);
+            errVal = LATE(snd_pcm_open)
+                         (&_handleRecord,
+                          deviceName,
+                          SND_PCM_STREAM_CAPTURE,
+                          SND_PCM_NONBLOCK);
+            if (errVal == 0)
+            {
+                break;
+            }
+        }
+    }
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    unable to open record device: %s",
+                     LATE(snd_strerror)(errVal));
+        _handleRecord = NULL;
+        return -1;
+    }
+
+    _recordingFramesIn10MS = _recordingFreq/100;
+    if ((errVal = LATE(snd_pcm_set_params)(_handleRecord,
+#if defined(WEBRTC_BIG_ENDIAN)
+        SND_PCM_FORMAT_S16_BE, //format
+#else
+        SND_PCM_FORMAT_S16_LE, //format
+#endif
+        SND_PCM_ACCESS_RW_INTERLEAVED, //access
+        _recChannels, //channels
+        _recordingFreq, //rate
+        1, //soft_resample
+        ALSA_CAPTURE_LATENCY //latency in us
+    )) < 0)
+    {
+         // Fall back to another mode then.
+         if (_recChannels == 1)
+           _recChannels = 2;
+         else
+           _recChannels = 1;
+
+         if ((errVal = LATE(snd_pcm_set_params)(_handleRecord,
+#if defined(WEBRTC_BIG_ENDIAN)
+             SND_PCM_FORMAT_S16_BE, //format
+#else
+             SND_PCM_FORMAT_S16_LE, //format
+#endif
+             SND_PCM_ACCESS_RW_INTERLEAVED, //access
+             _recChannels, //channels
+             _recordingFreq, //rate
+             1, //soft_resample
+             ALSA_CAPTURE_LATENCY //latency in us
+         )) < 0)
+         {
+             _recordingFramesIn10MS = 0;
+             WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                          "    unable to set record settings: %s (%d)",
+                          LATE(snd_strerror)(errVal), errVal);
+             ErrorRecovery(errVal, _handleRecord);
+             errVal = LATE(snd_pcm_close)(_handleRecord);
+             _handleRecord = NULL;
+             return -1;
+         }
+    }
+
+    errVal = LATE(snd_pcm_get_params)(_handleRecord,
+        &_recordingBuffersizeInFrame, &_recordingPeriodSizeInFrame);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    snd_pcm_get_params %s",
+                     LATE(snd_strerror)(errVal), errVal);
+        _recordingBuffersizeInFrame = 0;
+        _recordingPeriodSizeInFrame = 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "    capture snd_pcm_get_params "
+                     "buffer_size:%d period_size:%d",
+                     _recordingBuffersizeInFrame, _recordingPeriodSizeInFrame);
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update webrtc audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_recordingFreq);
+        _ptrAudioBuffer->SetRecordingChannels(_recChannels);
+    }
+
+    // Set rec buffer size and create buffer
+    _recordingBufferSizeIn10MS = LATE(snd_pcm_frames_to_bytes)(
+        _handleRecord, _recordingFramesIn10MS);
+
+    if (_handleRecord != NULL)
+    {
+        // Mark recording side as initialized
+        _recIsInitialized = true;
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    _recording = true;
+
+    int errVal = 0;
+    _recordingFramesLeft = _recordingFramesIn10MS;
+
+    // Make sure we only create the buffer once.
+    if (!_recordingBuffer)
+        _recordingBuffer = new WebRtc_Word8[_recordingBufferSizeIn10MS];
+    if (!_recordingBuffer)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "   failed to alloc recording buffer");
+        _recording = false;
+        return -1;
+    }
+    // RECORDING
+    const char* threadName = "webrtc_audio_module_capture_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc,
+                                                this,
+                                                kRealtimePriority,
+                                                threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        _recording = false;
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+        _recording = false;
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    errVal = LATE(snd_pcm_prepare)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     capture snd_pcm_prepare failed (%s)\n",
+                     LATE(snd_strerror)(errVal));
+        // just log error
+        // if snd_pcm_open fails will return -1
+    }
+
+    errVal = LATE(snd_pcm_start)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     capture snd_pcm_start err: %s",
+                     LATE(snd_strerror)(errVal));
+        errVal = LATE(snd_pcm_start)(_handleRecord);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     capture snd_pcm_start 2nd try err: %s",
+                         LATE(snd_strerror)(errVal));
+            StopRecording();
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StopRecording()
+{
+
+    {
+      CriticalSectionScoped lock(&_critSect);
+
+      if (!_recIsInitialized)
+      {
+          return 0;
+      }
+
+      if (_handleRecord == NULL)
+      {
+          return -1;
+      }
+
+      // Make sure we don't start recording (it's asynchronous).
+      _recIsInitialized = false;
+      _recording = false;
+    }
+
+    if (_ptrThreadRec && !_ptrThreadRec->Stop())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    failed to stop the rec audio thread");
+        return -1;
+    }
+    else {
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+    _recordingFramesLeft = 0;
+    if (_recordingBuffer)
+    {
+        delete [] _recordingBuffer;
+        _recordingBuffer = NULL;
+    }
+
+    // Stop and close pcm recording device.
+    int errVal = LATE(snd_pcm_drop)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error stop recording: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    errVal = LATE(snd_pcm_close)(_handleRecord);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error closing record sound device, error: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Check if we have muted and unmute if so.
+    bool muteEnabled = false;
+    MicrophoneMute(muteEnabled);
+    if (muteEnabled)
+    {
+        SetMicrophoneMute(false);
+    }
+
+    // set the pcm input handle to NULL
+    _handleRecord = NULL;
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceLinuxALSA::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceLinuxALSA::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StartPlayout()
+{
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    _playing = true;
+
+    _playoutFramesLeft = 0;
+    if (!_playoutBuffer)
+        _playoutBuffer = new WebRtc_Word8[_playoutBufferSizeIn10MS];
+    if (!_playoutBuffer)
+    {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "    failed to alloc playout buf");
+      _playing = false;
+      return -1;
+    }
+
+    // PLAYOUT
+    const char* threadName = "webrtc_audio_module_play_thread";
+    _ptrThreadPlay =  ThreadWrapper::CreateThread(PlayThreadFunc,
+                                                  this,
+                                                  kRealtimePriority,
+                                                  threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "    failed to create the play audio thread");
+        _playing = false;
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+        _playing = false;
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        delete [] _playoutBuffer;
+        _playoutBuffer = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "     playout snd_pcm_prepare failed (%s)\n",
+                     LATE(snd_strerror)(errVal));
+        // just log error
+        // if snd_pcm_open fails will return -1
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::StopPlayout()
+{
+
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        if (!_playIsInitialized)
+        {
+            return 0;
+        }
+
+        if (_handlePlayout == NULL)
+        {
+            return -1;
+        }
+
+        _playing = false;
+    }
+
+    // stop playout thread first
+    if (_ptrThreadPlay && !_ptrThreadPlay->Stop())
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to stop the play audio thread");
+        return -1;
+    }
+    else {
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _playoutFramesLeft = 0;
+    delete [] _playoutBuffer;
+    _playoutBuffer = NULL;
+
+    // stop and close pcm playout device
+    int errVal = LATE(snd_pcm_drop)(_handlePlayout);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "    Error stop playing: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    errVal = LATE(snd_pcm_close)(_handlePlayout);
+     if (errVal < 0)
+         WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                      "    Error closing playout sound device, error: %s",
+                      LATE(snd_strerror)(errVal));
+
+     // set the pcm input handle to NULL
+     _playIsInitialized = false;
+     _handlePlayout = NULL;
+     WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                  "  handle_playout is now set to NULL");
+
+     return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16)_playoutDelay * 1000 / _playoutFreq;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    // Adding 10ms adjusted value to the record delay due to 10ms buffering.
+    delayMS = (WebRtc_UWord16)(10 + _recordingDelay * 1000 / _recordingFreq);
+    return 0;
+}
+
+bool AudioDeviceLinuxALSA::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxALSA::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+    _playBufType = type;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+    type = _playBufType;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        sizeMS = _playBufDelayFixed; 
+    }
+    else
+    {
+        sizeMS = _playBufDelay; 
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::CPULoad(WebRtc_UWord16& load) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+               "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceLinuxALSA::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceLinuxALSA::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+bool AudioDeviceLinuxALSA::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceLinuxALSA::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+void AudioDeviceLinuxALSA::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+void AudioDeviceLinuxALSA::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32 AudioDeviceLinuxALSA::GetDevicesInfo(
+    const WebRtc_Word32 function,
+    const bool playback,
+    const WebRtc_Word32 enumDeviceNo,
+    char* enumDeviceName,
+    const WebRtc_Word32 ednLen) const
+{
+    
+    // Device enumeration based on libjingle implementation
+    // by Tristan Schmelcher at Google Inc.
+
+    const char *type = playback ? "Output" : "Input";
+    // dmix and dsnoop are only for playback and capture, respectively, but ALSA
+    // stupidly includes them in both lists.
+    const char *ignorePrefix = playback ? "dsnoop:" : "dmix:" ;
+    // (ALSA lists many more "devices" of questionable interest, but we show them
+    // just in case the weird devices may actually be desirable for some
+    // users/systems.)
+
+    int err;
+    int enumCount(0);
+    bool keepSearching(true);
+
+    // From Chromium issue 95797
+    // Loop through the sound cards to get Alsa device hints.
+    // Don't use snd_device_name_hint(-1,..) since there is a access violation
+    // inside this ALSA API with libasound.so.2.0.0.
+    int card = -1;
+    while (!(LATE(snd_card_next)(&card)) && (card >= 0) && keepSearching) {
+        void **hints;
+        err = LATE(snd_device_name_hint)(card, "pcm", &hints);
+        if (err != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "GetDevicesInfo - device name hint error: %s",
+                         LATE(snd_strerror)(err));
+            return -1;
+        }
+
+        enumCount++; // default is 0
+        if ((function == FUNC_GET_DEVICE_NAME ||
+            function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0)
+        {
+            strcpy(enumDeviceName, "default");
+
+            err = LATE(snd_device_name_free_hint)(hints);
+            if (err != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "GetDevicesInfo - device name free hint error: %s",
+                             LATE(snd_strerror)(err));
+            }
+
+            return 0;
+        }
+
+        for (void **list = hints; *list != NULL; ++list)
+        {
+            char *actualType = LATE(snd_device_name_get_hint)(*list, "IOID");
+            if (actualType)
+            {   // NULL means it's both.
+                bool wrongType = (strcmp(actualType, type) != 0);
+                free(actualType);
+                if (wrongType)
+                {
+                    // Wrong type of device (i.e., input vs. output).
+                    continue;
+                }
+            }
+
+            char *name = LATE(snd_device_name_get_hint)(*list, "NAME");
+            if (!name)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "Device has no name");
+                // Skip it.
+                continue;
+            }
+
+            // Now check if we actually want to show this device.
+            if (strcmp(name, "default") != 0 &&
+                strcmp(name, "null") != 0 &&
+                strcmp(name, "pulse") != 0 &&
+                strncmp(name, ignorePrefix, strlen(ignorePrefix)) != 0)
+            {
+                // Yes, we do.
+                char *desc = LATE(snd_device_name_get_hint)(*list, "DESC");
+                if (!desc)
+                {
+                    // Virtual devices don't necessarily have descriptions.
+                    // Use their names instead.
+                    desc = name;
+                }
+
+                if (FUNC_GET_NUM_OF_DEVICE == function)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                                 "    Enum device %d - %s", enumCount, name);
+
+                }
+                if ((FUNC_GET_DEVICE_NAME == function) &&
+                    (enumDeviceNo == enumCount))
+                {
+                    // We have found the enum device, copy the name to buffer.
+                    strncpy(enumDeviceName, desc, ednLen);
+                    enumDeviceName[ednLen-1] = '\0';
+                    keepSearching = false;
+                    // Replace '\n' with '-'.
+                    char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
+                    if (pret)
+                        *pret = '-';
+                }
+                if ((FUNC_GET_DEVICE_NAME_FOR_AN_ENUM == function) &&
+                    (enumDeviceNo == enumCount))
+                {
+                    // We have found the enum device, copy the name to buffer.
+                    strncpy(enumDeviceName, name, ednLen);
+                    enumDeviceName[ednLen-1] = '\0';
+                    keepSearching = false;
+                }
+
+                if (keepSearching)
+                    ++enumCount;
+
+                if (desc != name)
+                    free(desc);
+            }
+
+            free(name);
+
+            if (!keepSearching)
+                break;
+        }
+
+        err = LATE(snd_device_name_free_hint)(hints);
+        if (err != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "GetDevicesInfo - device name free hint error: %s",
+                         LATE(snd_strerror)(err));
+            // Continue and return true anyway, since we did get the whole list.
+        }
+    }
+
+    if (FUNC_GET_NUM_OF_DEVICE == function)
+    {
+        if (enumCount == 1) // only default?
+            enumCount = 0;
+        return enumCount; // Normal return point for function 0
+    }
+
+    if (keepSearching)
+    {
+        // If we get here for function 1 and 2, we didn't find the specified
+        // enum device.
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "GetDevicesInfo - Could not find device name or numbers");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::InputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_handleRecord == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  input state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::OutputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_handlePlayout == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  output state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxALSA::ErrorRecovery(WebRtc_Word32 error,
+                                                  snd_pcm_t* deviceHandle)
+{
+    int st = LATE(snd_pcm_state)(deviceHandle);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+               "Trying to recover from error: %s (%d) (state %d)",
+               (LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_CAPTURE) ?
+                   "capture" : "playout", LATE(snd_strerror)(error), error, st);
+
+    // It is recommended to use snd_pcm_recover for all errors. If that function
+    // cannot handle the error, the input error code will be returned, otherwise
+    // 0 is returned. From snd_pcm_recover API doc: "This functions handles
+    // -EINTR (4) (interrupted system call), -EPIPE (32) (playout overrun or
+    // capture underrun) and -ESTRPIPE (86) (stream is suspended) error codes
+    // trying to prepare given stream for next I/O."
+
+    /** Open */
+    //    SND_PCM_STATE_OPEN = 0,
+    /** Setup installed */
+    //    SND_PCM_STATE_SETUP,
+    /** Ready to start */
+    //    SND_PCM_STATE_PREPARED,
+    /** Running */
+    //    SND_PCM_STATE_RUNNING,
+    /** Stopped: underrun (playback) or overrun (capture) detected */
+    //    SND_PCM_STATE_XRUN,= 4
+    /** Draining: running (playback) or stopped (capture) */
+    //    SND_PCM_STATE_DRAINING,
+    /** Paused */
+    //    SND_PCM_STATE_PAUSED,
+    /** Hardware is suspended */
+    //    SND_PCM_STATE_SUSPENDED,
+    //  ** Hardware is disconnected */
+    //    SND_PCM_STATE_DISCONNECTED,
+    //    SND_PCM_STATE_LAST = SND_PCM_STATE_DISCONNECTED
+
+    // snd_pcm_recover isn't available in older alsa, e.g. on the FC4 machine
+    // in Sthlm lab.
+
+    int res = LATE(snd_pcm_recover)(deviceHandle, error, 1);
+    if (0 == res)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                   "    Recovery - snd_pcm_recover OK");
+
+        if ((error == -EPIPE || error == -ESTRPIPE) && // Buf underrun/overrun.
+            _recording &&
+            LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_CAPTURE)
+        {
+            // For capture streams we also have to repeat the explicit start()
+            // to get data flowing again.
+            int err = LATE(snd_pcm_start)(deviceHandle);
+            if (err != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "  Recovery - snd_pcm_start error: %u", err);
+                return -1;
+            }
+        }
+
+        if ((error == -EPIPE || error == -ESTRPIPE) &&  // Buf underrun/overrun.
+            _playing &&
+            LATE(snd_pcm_stream)(deviceHandle) == SND_PCM_STREAM_PLAYBACK)
+        {
+            // For capture streams we also have to repeat the explicit start() to get
+            // data flowing again.
+            int err = LATE(snd_pcm_start)(deviceHandle);
+            if (err != 0)
+            {
+              WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                       "    Recovery - snd_pcm_start error: %s",
+                       LATE(snd_strerror)(err));
+              return -1;
+            }
+        }
+
+        return -EPIPE == error ? 1 : 0;
+    }
+    else {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Unrecoverable alsa stream error: %d", res);
+    }
+
+    return res;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+bool AudioDeviceLinuxALSA::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxALSA*>(pThis)->PlayThreadProcess());
+}
+
+bool AudioDeviceLinuxALSA::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxALSA*>(pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceLinuxALSA::PlayThreadProcess()
+{
+    if(!_playing)
+        return false;
+
+    int err;
+    snd_pcm_sframes_t frames;
+    snd_pcm_sframes_t avail_frames;
+
+    Lock();
+    //return a positive number of frames ready otherwise a negative error code
+    avail_frames = LATE(snd_pcm_avail_update)(_handlePlayout);
+    if (avail_frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                   "playout snd_pcm_avail_update error: %s",
+                   LATE(snd_strerror)(avail_frames));
+        ErrorRecovery(avail_frames, _handlePlayout);
+        UnLock();
+        return true;
+    }
+    else if (avail_frames == 0)
+    {
+        UnLock();
+
+        //maximum tixe in milliseconds to wait, a negative value means infinity
+        err = LATE(snd_pcm_wait)(_handlePlayout, 2);
+        if (err == 0)
+        { //timeout occured
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "playout snd_pcm_wait timeout");
+        }
+
+        return true;
+    }
+
+    if (_playoutFramesLeft <= 0)
+    {
+        UnLock();
+        _ptrAudioBuffer->RequestPlayoutData(_playoutFramesIn10MS);
+        Lock();
+
+        _playoutFramesLeft = _ptrAudioBuffer->GetPlayoutData(_playoutBuffer);
+        assert(_playoutFramesLeft == _playoutFramesIn10MS);
+    }
+
+    if (static_cast<WebRtc_UWord32>(avail_frames) > _playoutFramesLeft)
+        avail_frames = _playoutFramesLeft;
+
+    int size = LATE(snd_pcm_frames_to_bytes)(_handlePlayout,
+        _playoutFramesLeft);
+    frames = LATE(snd_pcm_writei)(
+        _handlePlayout,
+        &_playoutBuffer[_playoutBufferSizeIn10MS - size],
+        avail_frames);
+
+    if (frames < 0)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                     "playout snd_pcm_writei error: %s",
+                     LATE(snd_strerror)(frames));
+        _playoutFramesLeft = 0;
+        ErrorRecovery(frames, _handlePlayout);
+        UnLock();
+        return true;
+    }
+    else {
+        assert(frames==avail_frames);
+        _playoutFramesLeft -= frames;
+    }
+
+    UnLock();
+    return true;
+}
+
+bool AudioDeviceLinuxALSA::RecThreadProcess()
+{
+    if (!_recording)
+        return false;
+
+    int err;
+    snd_pcm_sframes_t frames;
+    snd_pcm_sframes_t avail_frames;
+    WebRtc_Word8 buffer[_recordingBufferSizeIn10MS];
+
+    Lock();
+
+    //return a positive number of frames ready otherwise a negative error code
+    avail_frames = LATE(snd_pcm_avail_update)(_handleRecord);
+    if (avail_frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "capture snd_pcm_avail_update error: %s",
+                     LATE(snd_strerror)(avail_frames));
+        ErrorRecovery(avail_frames, _handleRecord);
+        UnLock();
+        return true;
+    }
+    else if (avail_frames == 0)
+    { // no frame is available now
+        UnLock();
+
+        //maximum time in milliseconds to wait, a negative value means infinity
+        err = LATE(snd_pcm_wait)(_handleRecord,
+            ALSA_CAPTURE_WAIT_TIMEOUT);
+        if (err == 0) //timeout occured
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "capture snd_pcm_wait timeout");
+
+        return true;
+    }
+
+    if (static_cast<WebRtc_UWord32>(avail_frames) > _recordingFramesLeft)
+        avail_frames = _recordingFramesLeft;
+
+    frames = LATE(snd_pcm_readi)(_handleRecord,
+        buffer, avail_frames); // frames to be written
+    if (frames < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "capture snd_pcm_readi error: %s",
+                     LATE(snd_strerror)(frames));
+        ErrorRecovery(frames, _handleRecord);
+        UnLock();
+        return true;
+    }
+    else if (frames > 0)
+    {
+        assert(frames == avail_frames);
+
+        int left_size = LATE(snd_pcm_frames_to_bytes)(_handleRecord,
+            _recordingFramesLeft);
+        int size = LATE(snd_pcm_frames_to_bytes)(_handleRecord, frames);
+
+        memcpy(&_recordingBuffer[_recordingBufferSizeIn10MS - left_size],
+               buffer, size);
+        _recordingFramesLeft -= frames;
+
+        if (!_recordingFramesLeft)
+        { // buf is full
+            _recordingFramesLeft = _recordingFramesIn10MS;
+
+            // store the recorded buffer (no action will be taken if the
+            // #recorded samples is not a full buffer)
+            _ptrAudioBuffer->SetRecordedBuffer(_recordingBuffer,
+                                               _recordingFramesIn10MS);
+
+            WebRtc_UWord32 currentMicLevel = 0;
+            WebRtc_UWord32 newMicLevel = 0;
+
+            if (AGC())
+            {
+                // store current mic level in the audio buffer if AGC is enabled
+                if (MicrophoneVolume(currentMicLevel) == 0)
+                {
+                    if (currentMicLevel == 0xffffffff)
+                        currentMicLevel = 100;
+                    // this call does not affect the actual microphone volume
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+                }
+            }
+
+            // calculate delay
+            _playoutDelay = 0;
+            _recordingDelay = 0;
+            if (_handlePlayout)
+            {
+                err = LATE(snd_pcm_delay)(_handlePlayout,
+                    &_playoutDelay); // returned delay in frames
+                if (err < 0)
+                {
+                    // TODO(xians): Shall we call ErrorRecovery() here?
+                    _playoutDelay = 0;
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                                 "playout snd_pcm_delay: %s",
+                                 LATE(snd_strerror)(err));
+                }
+            }
+
+            err = LATE(snd_pcm_delay)(_handleRecord,
+                &_recordingDelay); // returned delay in frames
+            if (err < 0)
+            {
+                // TODO(xians): Shall we call ErrorRecovery() here?
+                _recordingDelay = 0;
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                             "capture snd_pcm_delay: %s",
+                             LATE(snd_strerror)(err));
+            }
+
+           // TODO(xians): Shall we add 10ms buffer delay to the record delay?
+            _ptrAudioBuffer->SetVQEData(
+                _playoutDelay * 1000 / _playoutFreq,
+                _recordingDelay * 1000 / _recordingFreq, 0);
+
+            // Deliver recorded samples at specified sample rate, mic level etc.
+            // to the observer using callback.
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+
+            if (AGC())
+            {
+                newMicLevel = _ptrAudioBuffer->NewMicLevel();
+                if (newMicLevel != 0)
+                {
+                    // The VQE will only deliver non-zero microphone levels when a
+                    // change is needed. Set this new mic level (received from the
+                    // observer as return value in the callback).
+                    if (SetMicrophoneVolume(newMicLevel) == -1)
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                                     "  the required modification of the "
+                                     "microphone volume failed");
+                }
+            }
+        }
+    }
+
+    UnLock();
+    return true;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h b/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h
new file mode 100644
index 0000000..20e555c
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_alsa_linux.h
@@ -0,0 +1,250 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_ALSA_LINUX_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+#include "audio_mixer_manager_alsa_linux.h"
+
+#include <sys/soundcard.h>
+#include <sys/ioctl.h>
+
+#include <alsa/asoundlib.h>
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+class AudioDeviceLinuxALSA : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceLinuxALSA(const WebRtc_Word32 id);
+    ~AudioDeviceLinuxALSA();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(
+        AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+   
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(
+        const AudioDeviceModule::BufferType type,
+        WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(
+        AudioDeviceModule::BufferType& type,
+        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    WebRtc_Word32 GetDevicesInfo(const WebRtc_Word32 function,
+                                 const bool playback,
+                                 const WebRtc_Word32 enumDeviceNo = 0,
+                                 char* enumDeviceName = NULL,
+                                 const WebRtc_Word32 ednLen = 0) const;
+    WebRtc_Word32 ErrorRecovery(WebRtc_Word32 error, snd_pcm_t* deviceHandle);
+
+private:
+    void Lock() { _critSect.Enter(); };
+    void UnLock() { _critSect.Leave(); };
+private:
+    inline WebRtc_Word32 InputSanityCheckAfterUnlockedPeriod() const;
+    inline WebRtc_Word32 OutputSanityCheckAfterUnlockedPeriod() const;
+
+private:
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+    
+    CriticalSectionWrapper& _critSect;
+
+    ThreadWrapper* _ptrThreadRec;
+    ThreadWrapper* _ptrThreadPlay;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerLinuxALSA _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    snd_pcm_t* _handleRecord;
+    snd_pcm_t* _handlePlayout;
+
+    snd_pcm_uframes_t _recordingBuffersizeInFrame;
+    snd_pcm_uframes_t _recordingPeriodSizeInFrame;
+    snd_pcm_uframes_t _playoutBufferSizeInFrame;
+    snd_pcm_uframes_t _playoutPeriodSizeInFrame;
+
+    ssize_t _recordingBufferSizeIn10MS;
+    ssize_t _playoutBufferSizeIn10MS;
+    WebRtc_UWord32 _recordingFramesIn10MS;
+    WebRtc_UWord32 _playoutFramesIn10MS;
+
+    WebRtc_UWord32 _recordingFreq;
+    WebRtc_UWord32 _playoutFreq;
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    WebRtc_Word8* _recordingBuffer; // in byte
+    WebRtc_Word8* _playoutBuffer; // in byte
+    WebRtc_UWord32 _recordingFramesLeft;
+    WebRtc_UWord32 _playoutFramesLeft;
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _AGC;
+
+    snd_pcm_sframes_t _recordingDelay;
+    snd_pcm_sframes_t _playoutDelay;
+
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    WebRtc_UWord16 _playBufDelay;                 // playback delay
+    WebRtc_UWord16 _playBufDelayFixed;            // fixed playback delay
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_ALSA_LINUX_H_
diff --git a/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc b/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
new file mode 100644
index 0000000..f846243
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.cc
@@ -0,0 +1,3167 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_device_utility.h"
+#include "audio_device_pulse_linux.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+webrtc_adm_linux_pulse::PulseAudioSymbolTable PaSymbolTable;
+
+// Accesses Pulse functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libpulse, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_pulse::PulseAudioSymbolTable, &PaSymbolTable, sym)
+
+namespace webrtc
+{
+
+// ============================================================================
+//                              Static Methods
+// ============================================================================
+
+bool AudioDeviceLinuxPulse::PulseAudioIsSupported()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%s",
+                 __FUNCTION__);
+
+    bool pulseAudioIsSupported(true);
+
+    // Check that we can initialize
+    AudioDeviceLinuxPulse* admPulse = new AudioDeviceLinuxPulse(-1);
+    if (admPulse->InitPulseAudio() == -1)
+    {
+        pulseAudioIsSupported = false;
+    }
+    admPulse->TerminatePulseAudio();
+    delete admPulse;
+
+    if (pulseAudioIsSupported)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "*** Linux Pulse Audio is supported ***");
+    } else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                     "*** Linux Pulse Audio is NOT supported => will revert to the ALSA API ***");
+    }
+
+    return (pulseAudioIsSupported);
+}
+
+AudioDeviceLinuxPulse::AudioDeviceLinuxPulse(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _timeEventRec(*EventWrapper::Create()),
+    _timeEventPlay(*EventWrapper::Create()),
+    _recStartEvent(*EventWrapper::Create()),
+    _playStartEvent(*EventWrapper::Create()),
+    _ptrThreadPlay(NULL),
+    _ptrThreadRec(NULL),
+    _recThreadID(0),
+    _playThreadID(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _samplingFreq(0),
+    _recChannels(1),
+    _playChannels(1),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _startRec(false),
+    _stopRec(false),
+    _startPlay(false),
+    _stopPlay(false),
+    _AGC(false),
+    update_speaker_volume_at_startup_(false),
+    _playBufDelayFixed(20),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _writeErrors(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _deviceIndex(-1),
+    _numPlayDevices(0),
+    _numRecDevices(0),
+    _playDeviceName(NULL),
+    _recDeviceName(NULL),
+    _playDisplayDeviceName(NULL),
+    _recDisplayDeviceName(NULL),
+    _playBuffer(NULL),
+    _playbackBufferSize(0),
+    _playbackBufferUnused(0),
+    _tempBufferSpace(0),
+    _recBuffer(NULL),
+    _recordBufferSize(0),
+    _recordBufferUsed(0),
+    _tempSampleData(NULL),
+    _tempSampleDataSize(0),
+    _configuredLatencyPlay(0),
+    _configuredLatencyRec(0),
+    _paDeviceIndex(-1),
+    _paStateChanged(false),
+    _paMainloop(NULL),
+    _paMainloopApi(NULL),
+    _paContext(NULL),
+    _recStream(NULL),
+    _playStream(NULL),
+    _recStreamFlags(0),
+    _playStreamFlags(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    memset(_paServerVersion, 0, sizeof(_paServerVersion));
+    memset(&_playBufferAttr, 0, sizeof(_playBufferAttr));
+    memset(&_recBufferAttr, 0, sizeof(_recBufferAttr));
+}
+
+AudioDeviceLinuxPulse::~AudioDeviceLinuxPulse()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    if (_recBuffer)
+    {
+        delete [] _recBuffer;
+        _recBuffer = NULL;
+    }
+    if (_playBuffer)
+    {
+        delete [] _playBuffer;
+        _playBuffer = NULL;
+    }
+    if (_playDeviceName)
+    {
+        delete [] _playDeviceName;
+        _playDeviceName = NULL;
+    }
+    if (_recDeviceName)
+    {
+        delete [] _recDeviceName;
+        _recDeviceName = NULL;
+    }
+
+    delete &_recStartEvent;
+    delete &_playStartEvent;
+    delete &_timeEventRec;
+    delete &_timeEventPlay;
+    delete &_critSect;
+}
+
+void AudioDeviceLinuxPulse::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kLinuxPulseAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    // Initialize PulseAudio
+    if (InitPulseAudio() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to initialize PulseAudio");
+
+        if (TerminatePulseAudio() < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to terminate PulseAudio");
+        }
+
+        return -1;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // RECORDING
+    const char* threadName = "webrtc_audio_module_rec_thread";
+    _ptrThreadRec = ThreadWrapper::CreateThread(RecThreadFunc, this,
+                                                kRealtimePriority, threadName);
+    if (_ptrThreadRec == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the rec audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThreadRec->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the rec audio thread");
+
+        delete _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        return -1;
+    }
+    _recThreadID = threadID;
+
+    // PLAYOUT
+    threadName = "webrtc_audio_module_play_thread";
+    _ptrThreadPlay = ThreadWrapper::CreateThread(PlayThreadFunc, this,
+                                                 kRealtimePriority, threadName);
+    if (_ptrThreadPlay == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to create the play audio thread");
+        return -1;
+    }
+
+    threadID = 0;
+    if (!_ptrThreadPlay->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "  failed to start the play audio thread");
+
+        delete _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        return -1;
+    }
+    _playThreadID = threadID;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    Lock();
+
+    _mixerManager.Close();
+
+    // RECORDING
+    if (_ptrThreadRec)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadRec;
+        _ptrThreadRec = NULL;
+        UnLock();
+
+        tmpThread->SetNotAlive();
+        _timeEventRec.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the rec audio thread");
+        }
+        // Lock again since we need to protect _ptrThreadPlay.
+        Lock();
+    }
+
+    // PLAYOUT
+    if (_ptrThreadPlay)
+    {
+        ThreadWrapper* tmpThread = _ptrThreadPlay;
+        _ptrThreadPlay = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEventPlay.Set();
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  failed to close down the play audio thread");
+        }
+    } else {
+      UnLock();
+    }
+
+    // Terminate PulseAudio
+    if (TerminatePulseAudio() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to terminate PulseAudio");
+        return -1;
+    }
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    // 
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    // check if default device
+    if (_outputDeviceIndex == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        GetDefaultDeviceInfo(false, NULL, deviceIndex);
+        _paDeviceIndex = deviceIndex;
+    } else
+    {
+        // get the PA device index from
+        // the callback
+        _deviceIndex = _outputDeviceIndex;
+
+        // get playout devices
+        PlayoutDevices();
+    }
+
+    // the callback has now set the _paDeviceIndex to
+    // the PulseAudio index of the device
+    if (_mixerManager.OpenSpeaker(_paDeviceIndex) == -1)
+    {
+        return -1;
+    }
+
+    // clear _deviceIndex
+    _deviceIndex = -1;
+    _paDeviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    // Check if default device
+    if (_inputDeviceIndex == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        GetDefaultDeviceInfo(true, NULL, deviceIndex);
+        _paDeviceIndex = deviceIndex;
+    } else
+    {
+        // Get the PA device index from
+        // the callback
+        _deviceIndex = _inputDeviceIndex;
+
+        // get recording devices
+        RecordingDevices();
+    }
+
+    // The callback has now set the _paDeviceIndex to
+    // the PulseAudio index of the device
+    if (_mixerManager.OpenMicrophone(_paDeviceIndex) == -1)
+    {
+        return -1;
+    }
+
+    // Clear _deviceIndex
+    _deviceIndex = -1;
+    _paDeviceIndex = -1;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceLinuxPulse::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control exists
+    available = true;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    if (!_playing) {
+      // Only update the volume if it's been set while we weren't playing.
+      update_speaker_volume_at_startup_ = true;
+    }
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetWaveOutVolume(
+    WebRtc_UWord16 volumeLeft,
+    WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::WaveOutVolume(
+    WebRtc_UWord16& /*volumeLeft*/,
+    WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetSpeakerMute(bool enable)
+{
+
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneMute(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecordingIsAvailable(bool& available)
+{
+
+    if (_recChannels == 2 && _recording) {
+      available = true;
+      return 0;
+    }
+
+    available = false;
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+    int error = 0;
+
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+#ifndef WEBRTC_PA_GTALK
+    // Check if the selected microphone can record stereo.
+    bool isAvailable(false);
+    error = _mixerManager.StereoRecordingIsAvailable(isAvailable);
+    if (!error)
+      available = isAvailable;
+#endif
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return error;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoRecording(bool enable)
+{
+
+#ifndef WEBRTC_PA_GTALK
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+#endif
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayoutIsAvailable(bool& available)
+{
+
+    if (_playChannels == 2 && _playing) {
+      available = true;
+      return 0;
+    }
+
+    available = false;
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+    int error = 0;
+
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // Cannot open the specified device.
+        return -1;
+    }
+
+#ifndef WEBRTC_PA_GTALK
+    // Check if the selected speaker can play stereo.
+    bool isAvailable(false);
+    error = _mixerManager.StereoPlayoutIsAvailable(isAvailable);
+    if (!error)
+      available = isAvailable;
+#endif
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return error;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetStereoPlayout(bool enable)
+{
+
+#ifndef WEBRTC_PA_GTALK
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+#endif
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    available = true;
+
+    // Close the initialized input mixer
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxPulse::PlayoutDevices()
+{
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+    _numPlayDevices = 1; // init to 1 to account for "default"
+
+    // get the whole list of devices and update _numPlayDevices
+    paOperation = LATE(pa_context_get_sink_info_list)(_paContext,
+                                                      PaSinkInfoCallback,
+                                                      this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return _numPlayDevices;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord16 nDevices = PlayoutDevices();
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable output devices is %u", nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices = PlayoutDevices();
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    // Check if default device
+    if (index == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        return GetDefaultDeviceInfo(false, name, deviceIndex);
+    }
+
+    // Tell the callback that we want
+    // The name for this device
+    _playDisplayDeviceName = name;
+    _deviceIndex = index;
+
+    // get playout devices
+    PlayoutDevices();
+
+    // clear device name and index
+    _playDisplayDeviceName = NULL;
+    _deviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    // Check if default device
+    if (index == 0)
+    {
+        WebRtc_UWord16 deviceIndex = 0;
+        return GetDefaultDeviceInfo(true, name, deviceIndex);
+    }
+
+    // Tell the callback that we want
+    // the name for this device
+    _recDisplayDeviceName = name;
+    _deviceIndex = index;
+
+    // Get recording devices
+    RecordingDevices();
+
+    // Clear device name and index
+    _recDisplayDeviceName = NULL;
+    _deviceIndex = -1;
+
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceLinuxPulse::RecordingDevices()
+{
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+    _numRecDevices = 1; // Init to 1 to account for "default"
+
+    // Get the whole list of devices and update _numRecDevices
+    paOperation = LATE(pa_context_get_source_info_list)(_paContext,
+                                                        PaSourceInfoCallback,
+                                                        this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return _numRecDevices;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable input devices is %u", nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetRecordingDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    // Set sampling rate to use
+    WebRtc_UWord32 samplingRate = _samplingFreq * 1000;
+    if (samplingRate == 44000)
+    {
+        samplingRate = 44100;
+    }
+
+    // Set the play sample specification
+    pa_sample_spec playSampleSpec;
+    playSampleSpec.channels = _playChannels;
+    playSampleSpec.format = PA_SAMPLE_S16LE;
+    playSampleSpec.rate = samplingRate;
+
+    // Create a new play stream
+    _playStream = LATE(pa_stream_new)(_paContext, "playStream",
+                                      &playSampleSpec, NULL);
+
+    if (!_playStream)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create play stream, err=%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    // Provide the playStream to the mixer
+    _mixerManager.SetPlayStream(_playStream);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_samplingFreq * 1000);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stream state %d\n", LATE(pa_stream_get_state)(_playStream));
+
+    // Set stream flags
+    _playStreamFlags = (pa_stream_flags_t) (PA_STREAM_AUTO_TIMING_UPDATE
+        | PA_STREAM_INTERPOLATE_TIMING);
+
+    if (_configuredLatencyPlay != WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        // If configuring a specific latency then we want to specify
+        // PA_STREAM_ADJUST_LATENCY to make the server adjust parameters
+        // automatically to reach that target latency. However, that flag doesn't
+        // exist in Ubuntu 8.04 and many people still use that, so we have to check
+        // the protocol version of libpulse.
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            _playStreamFlags |= PA_STREAM_ADJUST_LATENCY;
+        }
+
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_playStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  pa_stream_get_sample_spec()");
+            return -1;
+        }
+
+        size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+        WebRtc_UWord32 latency = bytesPerSec
+            * WEBRTC_PA_PLAYBACK_LATENCY_MINIMUM_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        // Set the play buffer attributes
+        _playBufferAttr.maxlength = latency; // num bytes stored in the buffer
+        _playBufferAttr.tlength = latency; // target fill level of play buffer
+        // minimum free num bytes before server request more data
+        _playBufferAttr.minreq = latency / WEBRTC_PA_PLAYBACK_REQUEST_FACTOR;
+        _playBufferAttr.prebuf = _playBufferAttr.tlength
+            - _playBufferAttr.minreq; // prebuffer tlength before starting playout
+
+        _configuredLatencyPlay = latency;
+    }
+
+    // num samples in bytes * num channels
+    _playbackBufferSize = _samplingFreq * 10 * 2 * _playChannels;
+    _playbackBufferUnused = _playbackBufferSize;
+    _playBuffer = new WebRtc_Word8[_playbackBufferSize];
+
+    // Enable underflow callback
+    LATE(pa_stream_set_underflow_callback)(_playStream,
+                                           PaStreamUnderflowCallback, this);
+
+    // Set the state callback function for the stream
+    LATE(pa_stream_set_state_callback)(_playStream, PaStreamStateCallback, this);
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    // Set sampling rate to use
+    WebRtc_UWord32 samplingRate = _samplingFreq * 1000;
+    if (samplingRate == 44000)
+    {
+        samplingRate = 44100;
+    }
+
+    // Set the rec sample specification
+    pa_sample_spec recSampleSpec;
+    recSampleSpec.channels = _recChannels;
+    recSampleSpec.format = PA_SAMPLE_S16LE;
+    recSampleSpec.rate = samplingRate;
+
+    // Create a new rec stream
+    _recStream = LATE(pa_stream_new)(_paContext, "recStream", &recSampleSpec,
+                                     NULL);
+    if (!_recStream)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to create rec stream, err=%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    // Provide the recStream to the mixer
+    _mixerManager.SetRecStream(_recStream);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_samplingFreq * 1000);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+    }
+
+    if (_configuredLatencyRec != WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        _recStreamFlags = (pa_stream_flags_t) (PA_STREAM_AUTO_TIMING_UPDATE
+            | PA_STREAM_INTERPOLATE_TIMING);
+
+        // If configuring a specific latency then we want to specify
+        // PA_STREAM_ADJUST_LATENCY to make the server adjust parameters
+        // automatically to reach that target latency. However, that flag doesn't
+        // exist in Ubuntu 8.04 and many people still use that, so we have to check
+        // the protocol version of libpulse.
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            _recStreamFlags |= PA_STREAM_ADJUST_LATENCY;
+        }
+
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_recStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  pa_stream_get_sample_spec(rec)");
+            return -1;
+        }
+
+        size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+        WebRtc_UWord32 latency = bytesPerSec
+            * WEBRTC_PA_LOW_CAPTURE_LATENCY_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        // Set the rec buffer attributes
+        // Note: fragsize specifies a maximum transfer size, not a minimum, so
+        // it is not possible to force a high latency setting, only a low one.
+        _recBufferAttr.fragsize = latency; // size of fragment
+        _recBufferAttr.maxlength = latency + bytesPerSec
+            * WEBRTC_PA_CAPTURE_BUFFER_EXTRA_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+        _configuredLatencyRec = latency;
+    }
+
+    _recordBufferSize = _samplingFreq * 10 * 2 * _recChannels;
+    _recordBufferUsed = 0;
+    _recBuffer = new WebRtc_Word8[_recordBufferSize];
+
+    // Enable overflow callback
+    LATE(pa_stream_set_overflow_callback)(_recStream, PaStreamOverflowCallback,
+                                          this);
+
+    // Set the state callback function for the stream
+    LATE(pa_stream_set_state_callback)(_recStream, PaStreamStateCallback, this);
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    // set state to ensure that the recording starts from the audio thread
+    _startRec = true;
+
+    // the audio thread will signal when recording has started
+    _timeEventRec.Set();
+    if (kEventTimeout == _recStartEvent.Wait(10000))
+    {
+        _startRec = false;
+        StopRecording();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate recording");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        // the recording state is set by the audio thread after recording has started
+    } else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate recording");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StopRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_recStream == NULL)
+    {
+        return -1;
+    }
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stopping recording");
+
+    // Stop Recording
+    PaLock();
+
+    DisableReadCallback();
+    LATE(pa_stream_set_overflow_callback)(_recStream, NULL, NULL);
+
+    // Unset this here so that we don't get a TERMINATED callback
+    LATE(pa_stream_set_state_callback)(_recStream, NULL, NULL);
+
+    if (LATE(pa_stream_get_state)(_recStream) != PA_STREAM_UNCONNECTED)
+    {
+        // Disconnect the stream
+        if (LATE(pa_stream_disconnect)(_recStream) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to disconnect rec stream, err=%d\n",
+                         LATE(pa_context_errno)(_paContext));
+            PaUnLock();
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  disconnected recording");
+    }
+
+    LATE(pa_stream_unref)(_recStream);
+    _recStream = NULL;
+
+    PaUnLock();
+
+    // Provide the recStream to the mixer
+    _mixerManager.SetRecStream(_recStream);
+
+    if (_recBuffer)
+    {
+        delete [] _recBuffer;
+        _recBuffer = NULL;
+    }
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceLinuxPulse::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceLinuxPulse::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    // set state to ensure that playout starts from the audio thread
+    _startPlay = true;
+
+    // the audio thread will signal when playout has started
+    _timeEventPlay.Set();
+    if (kEventTimeout == _playStartEvent.Wait(10000))
+    {
+        _startPlay = false;
+        StopPlayout();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate playout");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        // the playing state is set by the audio thread after playout has started
+    } else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to activate playing");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::StopPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_playStream == NULL)
+    {
+        return -1;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stopping playback");
+
+    // Stop Playout
+    PaLock();
+
+    DisableWriteCallback();
+    LATE(pa_stream_set_underflow_callback)(_playStream, NULL, NULL);
+
+    // Unset this here so that we don't get a TERMINATED callback
+    LATE(pa_stream_set_state_callback)(_playStream, NULL, NULL);
+
+    if (LATE(pa_stream_get_state)(_playStream) != PA_STREAM_UNCONNECTED)
+    {
+        // Disconnect the stream
+        if (LATE(pa_stream_disconnect)(_playStream) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to disconnect play stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+            PaUnLock();
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  disconnected playback");
+    }
+
+    LATE(pa_stream_unref)(_playStream);
+    _playStream = NULL;
+
+    PaUnLock();
+
+    // Provide the playStream to the mixer
+    _mixerManager.SetPlayStream(_playStream);
+
+    if (_playBuffer)
+    {
+        delete [] _playBuffer;
+        _playBuffer = NULL;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16) _sndCardPlayDelay;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    delayMS = (WebRtc_UWord16) _sndCardRecDelay;
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::Playing() const
+{
+    return (_playing);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+
+    if (type != AudioDeviceModule::kFixedBufferSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Adaptive buffer size not supported on this platform");
+        return -1;
+    }
+
+    _playBufType = type;
+    _playBufDelayFixed = sizeMS;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+
+    type = _playBufType;
+    sizeMS = _playBufDelayFixed;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+bool AudioDeviceLinuxPulse::PlayoutWarning() const
+{
+  CriticalSectionScoped lock(&_critSect);
+  return (_playWarning > 0);
+}
+
+bool AudioDeviceLinuxPulse::PlayoutError() const
+{
+  CriticalSectionScoped lock(&_critSect);
+  return (_playError > 0);
+}
+
+bool AudioDeviceLinuxPulse::RecordingWarning() const
+{
+  CriticalSectionScoped lock(&_critSect);
+  return (_recWarning > 0);
+}
+
+bool AudioDeviceLinuxPulse::RecordingError() const
+{
+  CriticalSectionScoped lock(&_critSect);
+  return (_recError > 0);
+}
+
+void AudioDeviceLinuxPulse::ClearPlayoutWarning()
+{
+  CriticalSectionScoped lock(&_critSect);
+  _playWarning = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearPlayoutError()
+{
+  CriticalSectionScoped lock(&_critSect);
+  _playError = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearRecordingWarning()
+{
+  CriticalSectionScoped lock(&_critSect);
+  _recWarning = 0;
+}
+
+void AudioDeviceLinuxPulse::ClearRecordingError()
+{
+  CriticalSectionScoped lock(&_critSect);
+  _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioDeviceLinuxPulse::PaContextStateCallback(pa_context *c, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaContextStateCallbackHandler(
+        c);
+}
+
+// ----------------------------------------------------------------------------
+//  PaSinkInfoCallback
+// ----------------------------------------------------------------------------
+
+void AudioDeviceLinuxPulse::PaSinkInfoCallback(pa_context */*c*/,
+                                               const pa_sink_info *i, int eol,
+                                               void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaSinkInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioDeviceLinuxPulse::PaSourceInfoCallback(pa_context */*c*/,
+                                                 const pa_source_info *i,
+                                                 int eol, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaSourceInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioDeviceLinuxPulse::PaServerInfoCallback(pa_context */*c*/,
+                                                 const pa_server_info *i,
+                                                 void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaServerInfoCallbackHandler(i);
+}
+
+void AudioDeviceLinuxPulse::PaStreamStateCallback(pa_stream *p, void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamStateCallbackHandler(p);
+}
+
+void AudioDeviceLinuxPulse::PaContextStateCallbackHandler(pa_context *c)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  context state cb");
+
+    pa_context_state_t state = LATE(pa_context_get_state)(c);
+    switch (state)
+    {
+        case PA_CONTEXT_UNCONNECTED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  unconnected");
+            break;
+        case PA_CONTEXT_CONNECTING:
+        case PA_CONTEXT_AUTHORIZING:
+        case PA_CONTEXT_SETTING_NAME:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  no state");
+            break;
+        case PA_CONTEXT_FAILED:
+        case PA_CONTEXT_TERMINATED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  failed");
+            _paStateChanged = true;
+            LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+            break;
+        case PA_CONTEXT_READY:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  ready");
+            _paStateChanged = true;
+            LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+            break;
+    }
+}
+
+void AudioDeviceLinuxPulse::PaSinkInfoCallbackHandler(const pa_sink_info *i,
+                                                      int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    if (_numPlayDevices == _deviceIndex)
+    {
+        // Convert the device index to the one of the sink
+        _paDeviceIndex = i->index;
+
+        if (_playDeviceName)
+        {
+            // Copy the sink name
+            strncpy(_playDeviceName, i->name, kAdmMaxDeviceNameSize);
+            _playDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+        }
+        if (_playDisplayDeviceName)
+        {
+            // Copy the sink display name
+            strncpy(_playDisplayDeviceName, i->description,
+                    kAdmMaxDeviceNameSize);
+            _playDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+        }
+    }
+
+    _numPlayDevices++;
+}
+
+void AudioDeviceLinuxPulse::PaSourceInfoCallbackHandler(
+    const pa_source_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    // We don't want to list output devices
+     if (i->monitor_of_sink == PA_INVALID_INDEX)
+    {
+        if (_numRecDevices == _deviceIndex)
+        {
+            // Convert the device index to the one of the source
+            _paDeviceIndex = i->index;
+
+            if (_recDeviceName)
+            {
+                // copy the source name
+                strncpy(_recDeviceName, i->name, kAdmMaxDeviceNameSize);
+                _recDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+            }
+            if (_recDisplayDeviceName)
+            {
+                // Copy the source display name
+                strncpy(_recDisplayDeviceName, i->description,
+                        kAdmMaxDeviceNameSize);
+                _recDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+            }
+        }
+
+        _numRecDevices++;
+    }
+}
+
+void AudioDeviceLinuxPulse::PaServerInfoCallbackHandler(const pa_server_info *i)
+{
+    // Use PA native sampling rate
+    WebRtc_UWord32 paSampleRate = i->sample_spec.rate;
+    if (paSampleRate == 44100)
+    {
+#ifdef WEBRTC_PA_GTALK
+        paSampleRate = 48000;
+#else
+        paSampleRate = 44000;
+#endif
+    }
+
+    _samplingFreq = paSampleRate / 1000;
+
+    // Copy the PA server version
+    strncpy(_paServerVersion, i->server_version, 31);
+    _paServerVersion[31] = '\0';
+
+    if (_recDisplayDeviceName)
+    {
+        // Copy the source name
+        strncpy(_recDisplayDeviceName, i->default_source_name,
+                kAdmMaxDeviceNameSize);
+        _recDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+    }
+
+    if (_playDisplayDeviceName)
+    {
+        // Copy the sink name
+        strncpy(_playDisplayDeviceName, i->default_sink_name,
+                kAdmMaxDeviceNameSize);
+        _playDisplayDeviceName[kAdmMaxDeviceNameSize - 1] = '\0';
+    }
+
+    LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+}
+
+void AudioDeviceLinuxPulse::PaStreamStateCallbackHandler(pa_stream *p)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  stream state cb");
+
+    pa_stream_state_t state = LATE(pa_stream_get_state)(p);
+    switch (state)
+    {
+        case PA_STREAM_UNCONNECTED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  unconnected");
+            break;
+        case PA_STREAM_CREATING:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  creating");
+            break;
+        case PA_STREAM_FAILED:
+        case PA_STREAM_TERMINATED:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  failed");
+            break;
+        case PA_STREAM_READY:
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  ready");
+            break;
+    }
+
+    LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::CheckPulseAudioVersion()
+{
+    /*WebRtc_Word32 index = 0;
+     WebRtc_Word32 partIndex = 0;
+     WebRtc_Word32 partNum = 1;
+     WebRtc_Word32 minVersion[3] = {0, 9, 15};
+     bool versionOk = false;
+     char str[8] = {0};*/
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // get the server info and update deviceName
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+                 "  checking PulseAudio version: %s", _paServerVersion);
+
+    /* Saved because it may turn out that we need to check the version in the future
+     while (true)
+     {
+     if (_paServerVersion[index] == '.')
+     {
+     index++;
+     str[partIndex] = '\0';
+     partIndex = 0;
+
+     if(partNum == 2)
+     {
+     if (atoi(str) < minVersion[1])
+     {
+     break;
+     }
+     partNum = 3;
+     }
+     else
+     {
+     if (atoi(str) > minVersion[0])
+     {
+     versionOk = true;
+     break;
+     }
+     partNum = 2;
+     }
+     }
+     else if (_paServerVersion[index] == '\0' || _paServerVersion[index] == '-')
+     {
+     str[partIndex] = '\0';
+     if (atoi(str) >= minVersion[2])
+     {
+     versionOk = true;
+     }
+     break;
+     }
+
+     str[partIndex] = _paServerVersion[index];
+     index++;
+     partIndex++;
+     }
+
+     if (!versionOk)
+     {
+     return -1;
+     }
+     */
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitSamplingFrequency()
+{
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // Get the server info and update _samplingFreq
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::GetDefaultDeviceInfo(bool recDevice,
+                                                          char* name,
+                                                          WebRtc_UWord16& index)
+{
+    char tmpName[kAdmMaxDeviceNameSize] = {0};
+    // subtract length of "default: "
+    WebRtc_UWord16 nameLen = kAdmMaxDeviceNameSize - 9;
+    char* pName = NULL;
+
+    if (name)
+    {
+        // Add "default: "
+        strcpy(name, "default: ");
+        pName = &name[9];
+    }
+
+    // Tell the callback that we want
+    // the name for this device
+    if (recDevice)
+    {
+        _recDisplayDeviceName = tmpName;
+    } else
+    {
+        _playDisplayDeviceName = tmpName;
+    }
+
+    // Set members
+    _paDeviceIndex = -1;
+    _deviceIndex = 0;
+    _numPlayDevices = 0;
+    _numRecDevices = 0;
+
+    PaLock();
+
+    pa_operation* paOperation = NULL;
+
+    // Get the server info and update deviceName
+    paOperation = LATE(pa_context_get_server_info)(_paContext,
+                                                   PaServerInfoCallback, this);
+
+    WaitForOperationCompletion(paOperation);
+
+    // Get the device index
+    if (recDevice)
+    {
+        paOperation
+            = LATE(pa_context_get_source_info_by_name)(_paContext,
+                                                       (char *) tmpName,
+                                                       PaSourceInfoCallback,
+                                                       this);
+    } else
+    {
+        paOperation
+            = LATE(pa_context_get_sink_info_by_name)(_paContext,
+                                                     (char *) tmpName,
+                                                     PaSinkInfoCallback, this);
+    }
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    // Set the index
+    index = _paDeviceIndex;
+
+    if (name)
+    {
+        // Copy to name string
+        strncpy(pName, tmpName, nameLen);
+    }
+
+    // Clear members
+    _playDisplayDeviceName = NULL;
+    _recDisplayDeviceName = NULL;
+    _paDeviceIndex = -1;
+    _deviceIndex = -1;
+    _numPlayDevices = 0;
+    _numRecDevices = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::InitPulseAudio()
+{
+    int retVal = 0;
+
+    // Load libpulse
+    if (!PaSymbolTable.Load())
+    {
+        // Most likely the Pulse library and sound server are not installed on
+        // this system
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to load symbol table");
+        return -1;
+    }
+
+    // Create a mainloop API and connection to the default server
+    // the mainloop is the internal asynchronous API event loop
+    if (_paMainloop) {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PA mainloop has already existed");
+        return -1;
+    }
+    _paMainloop = LATE(pa_threaded_mainloop_new)();
+    if (!_paMainloop)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create mainloop");
+        return -1;
+    }
+
+    // Start the threaded main loop
+    retVal = LATE(pa_threaded_mainloop_start)(_paMainloop);
+    if (retVal != PA_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to start main loop, error=%d", retVal);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  mainloop running!");
+
+    PaLock();
+
+    _paMainloopApi = LATE(pa_threaded_mainloop_get_api)(_paMainloop);
+    if (!_paMainloopApi)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create mainloop API");
+        PaUnLock();
+        return -1;
+    }
+
+    // Create a new PulseAudio context
+    if (_paContext){
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PA context has already existed");
+        PaUnLock();
+        return -1;
+    }
+    _paContext = LATE(pa_context_new)(_paMainloopApi, "WEBRTC VoiceEngine");
+
+    if (!_paContext)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not create context");
+        PaUnLock();
+        return -1;
+    }
+
+    // Set state callback function
+    LATE(pa_context_set_state_callback)(_paContext, PaContextStateCallback,
+                                        this);
+
+    // Connect the context to a server (default)
+    _paStateChanged = false;
+    retVal = LATE(pa_context_connect)(_paContext, NULL, PA_CONTEXT_NOAUTOSPAWN,
+                                      NULL);
+
+    if (retVal != PA_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to connect context, error=%d", retVal);
+        PaUnLock();
+        return -1;
+    }
+
+    // Wait for state change
+    while (!_paStateChanged)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    // Now check to see what final state we reached.
+    pa_context_state_t state = LATE(pa_context_get_state)(_paContext);
+
+    if (state != PA_CONTEXT_READY)
+    {
+        if (state == PA_CONTEXT_FAILED)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect to PulseAudio sound server");
+        } else if (state == PA_CONTEXT_TERMINATED)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  PulseAudio connection terminated early");
+        } else
+        {
+            // Shouldn't happen, because we only signal on one of those three
+            // states
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  unknown problem connecting to PulseAudio");
+        }
+        PaUnLock();
+        return -1;
+    }
+
+    PaUnLock();
+
+    // Give the objects to the mixer manager
+    _mixerManager.SetPulseAudioObjects(_paMainloop, _paContext);
+
+    // Check the version
+    if (CheckPulseAudioVersion() < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio version %s not supported", _paServerVersion);
+        return -1;
+    }
+
+    // Initialize sampling frequency
+    if (InitSamplingFrequency() < 0 || _samplingFreq == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to initialize sampling frequency, set to %d",
+                     _samplingFreq);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::TerminatePulseAudio()
+{
+    // Do nothing if the instance doesn't exist
+    // likely PaSymbolTable.Load() fails
+    if (!_paMainloop) {
+        return 0;
+    }
+
+    PaLock();
+
+    // Disconnect the context
+    if (_paContext)
+    {
+        LATE(pa_context_disconnect)(_paContext);
+    }
+
+    // Unreference the context
+    if (_paContext)
+    {
+        LATE(pa_context_unref)(_paContext);
+    }
+
+    PaUnLock();
+    _paContext = NULL;
+
+    // Stop the threaded main loop
+    if (_paMainloop)
+    {
+        LATE(pa_threaded_mainloop_stop)(_paMainloop);
+    }
+
+    // Free the mainloop
+    if (_paMainloop)
+    {
+        LATE(pa_threaded_mainloop_free)(_paMainloop);
+    }
+
+    _paMainloop = NULL;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "  PulseAudio terminated");
+
+    return 0;
+}
+
+void AudioDeviceLinuxPulse::PaLock()
+{
+    LATE(pa_threaded_mainloop_lock)(_paMainloop);
+}
+
+void AudioDeviceLinuxPulse::PaUnLock()
+{
+    LATE(pa_threaded_mainloop_unlock)(_paMainloop);
+}
+
+void AudioDeviceLinuxPulse::WaitForOperationCompletion(
+    pa_operation* paOperation) const
+{
+    if (!paOperation)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "paOperation NULL in WaitForOperationCompletion");
+        return;
+    }
+
+    while (LATE(pa_operation_get_state)(paOperation) == PA_OPERATION_RUNNING)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    LATE(pa_operation_unref)(paOperation);
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+void AudioDeviceLinuxPulse::EnableWriteCallback()
+{
+    if (LATE(pa_stream_get_state)(_playStream) == PA_STREAM_READY)
+    {
+        // May already have available space. Must check.
+        _tempBufferSpace = LATE(pa_stream_writable_size)(_playStream);
+        if (_tempBufferSpace > 0)
+        {
+            // Yup, there is already space available, so if we register a write
+            // callback then it will not receive any event. So dispatch one ourself
+            // instead
+            _timeEventPlay.Set();
+            return;
+        }
+    }
+
+    LATE(pa_stream_set_write_callback)(_playStream, &PaStreamWriteCallback,
+                                       this);
+}
+
+void AudioDeviceLinuxPulse::DisableWriteCallback()
+{
+    LATE(pa_stream_set_write_callback)(_playStream, NULL, NULL);
+}
+
+void AudioDeviceLinuxPulse::PaStreamWriteCallback(pa_stream */*unused*/,
+                                                  size_t buffer_space,
+                                                  void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamWriteCallbackHandler(
+        buffer_space);
+}
+
+void AudioDeviceLinuxPulse::PaStreamWriteCallbackHandler(size_t bufferSpace)
+{
+    _tempBufferSpace = bufferSpace;
+
+    // Since we write the data asynchronously on a different thread, we have
+    // to temporarily disable the write callback or else Pulse will call it
+    // continuously until we write the data. We re-enable it below.
+    DisableWriteCallback();
+    _timeEventPlay.Set();
+}
+
+void AudioDeviceLinuxPulse::PaStreamUnderflowCallback(pa_stream */*unused*/,
+                                                      void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamUnderflowCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamUnderflowCallbackHandler()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  Playout underflow");
+
+    if (_configuredLatencyPlay == WEBRTC_PA_NO_LATENCY_REQUIREMENTS)
+    {
+        // We didn't configure a pa_buffer_attr before, so switching to one now
+        // would be questionable.
+        return;
+    }
+
+    // Otherwise reconfigure the stream with a higher target latency.
+
+    const pa_sample_spec *spec = LATE(pa_stream_get_sample_spec)(_playStream);
+    if (!spec)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  pa_stream_get_sample_spec()");
+        return;
+    }
+
+    size_t bytesPerSec = LATE(pa_bytes_per_second)(spec);
+    WebRtc_UWord32 newLatency = _configuredLatencyPlay + bytesPerSec
+        * WEBRTC_PA_PLAYBACK_LATENCY_INCREMENT_MSECS / WEBRTC_PA_MSECS_PER_SEC;
+
+    // Set the play buffer attributes
+    _playBufferAttr.maxlength = newLatency;
+    _playBufferAttr.tlength = newLatency;
+    _playBufferAttr.minreq = newLatency / WEBRTC_PA_PLAYBACK_REQUEST_FACTOR;
+    _playBufferAttr.prebuf = _playBufferAttr.tlength - _playBufferAttr.minreq;
+
+    pa_operation *op = LATE(pa_stream_set_buffer_attr)(_playStream,
+                                                       &_playBufferAttr, NULL,
+                                                       NULL);
+    if (!op)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  pa_stream_set_buffer_attr()");
+        return;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(op);
+
+    // Save the new latency in case we underflow again.
+    _configuredLatencyPlay = newLatency;
+}
+
+void AudioDeviceLinuxPulse::EnableReadCallback()
+{
+    LATE(pa_stream_set_read_callback)(_recStream, &PaStreamReadCallback, this);
+}
+
+void AudioDeviceLinuxPulse::DisableReadCallback()
+{
+    LATE(pa_stream_set_read_callback)(_recStream, NULL, NULL);
+}
+
+void AudioDeviceLinuxPulse::PaStreamReadCallback(pa_stream */*unused1*/,
+                                                 size_t /*unused2*/,
+                                                 void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamReadCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamReadCallbackHandler()
+{
+    // We get the data pointer and size now in order to save one Lock/Unlock
+    // in the worker thread
+    if (LATE(pa_stream_peek)(_recStream, &_tempSampleData, &_tempSampleDataSize)
+        != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Can't read data!");
+        return;
+    }
+
+    // Since we consume the data asynchronously on a different thread, we have
+    // to temporarily disable the read callback or else Pulse will call it
+    // continuously until we consume the data. We re-enable it below
+    DisableReadCallback();
+    _timeEventRec.Set();
+}
+
+void AudioDeviceLinuxPulse::PaStreamOverflowCallback(pa_stream */*unused*/,
+                                                     void *pThis)
+{
+    static_cast<AudioDeviceLinuxPulse*> (pThis)->PaStreamOverflowCallbackHandler();
+}
+
+void AudioDeviceLinuxPulse::PaStreamOverflowCallbackHandler()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  Recording overflow");
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::LatencyUsecs(pa_stream *stream)
+{
+    if (!WEBRTC_PA_REPORT_LATENCY)
+    {
+        return 0;
+    }
+
+    if (!stream)
+    {
+        return 0;
+    }
+
+    pa_usec_t latency;
+    int negative;
+    if (LATE(pa_stream_get_latency)(stream, &latency, &negative) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  Can't query latency");
+        // We'd rather continue playout/capture with an incorrect delay than stop
+        // it altogether, so return a valid value.
+        return 0;
+    }
+
+    if (negative)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  warning: pa_stream_get_latency reported negative delay");
+
+        // The delay can be negative for monitoring streams if the captured
+        // samples haven't been played yet. In such a case, "latency" contains the
+        // magnitude, so we must negate it to get the real value.
+        WebRtc_Word32 tmpLatency = (WebRtc_Word32) -latency;
+        if (tmpLatency < 0)
+        {
+            // Make sure that we don't use a negative delay
+            tmpLatency = 0;
+        }
+
+        return tmpLatency;
+    } else
+    {
+        return (WebRtc_Word32) latency;
+    }
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ReadRecordedData(const void* bufferData,
+                                                      size_t bufferSize)
+{
+    size_t size = bufferSize;
+    WebRtc_UWord32 numRecSamples = _recordBufferSize / (2 * _recChannels);
+
+    // Account for the peeked data and the used data
+    WebRtc_UWord32 recDelay = (WebRtc_UWord32) ((LatencyUsecs(_recStream)
+        / 1000) + 10 * ((size + _recordBufferUsed) / _recordBufferSize));
+
+    _sndCardRecDelay = recDelay;
+
+    if (_playStream)
+    {
+        // Get the playout delay
+        _sndCardPlayDelay = (WebRtc_UWord32) (LatencyUsecs(_playStream) / 1000);
+    }
+
+    if (_recordBufferUsed > 0)
+    {
+        // Have to copy to the buffer until it is full
+        size_t copy = _recordBufferSize - _recordBufferUsed;
+        if (size < copy)
+        {
+            copy = size;
+        }
+
+        memcpy(&_recBuffer[_recordBufferUsed], bufferData, copy);
+        _recordBufferUsed += copy;
+        bufferData = static_cast<const char *> (bufferData) + copy;
+        size -= copy;
+
+        if (_recordBufferUsed != _recordBufferSize)
+        {
+            // Not enough data yet to pass to VoE
+            return 0;
+        }
+
+        // Provide data to VoiceEngine
+        if (ProcessRecordedData(_recBuffer, numRecSamples, recDelay) == -1)
+        {
+            // We have stopped recording
+            return -1;
+        }
+
+        _recordBufferUsed = 0;
+    }
+
+    // Now process full 10ms sample sets directly from the input
+    while (size >= _recordBufferSize)
+    {
+        // Provide data to VoiceEngine
+        if (ProcessRecordedData(
+            static_cast<WebRtc_Word8 *> (const_cast<void *> (bufferData)),
+            numRecSamples, recDelay) == -1)
+        {
+            // We have stopped recording
+            return -1;
+        }
+
+        bufferData = static_cast<const char *> (bufferData) + _recordBufferSize;
+        size -= _recordBufferSize;
+
+        // We have consumed 10ms of data
+        recDelay -= 10;
+    }
+
+    // Now save any leftovers for later.
+    if (size > 0)
+    {
+        memcpy(_recBuffer, bufferData, size);
+        _recordBufferUsed = size;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceLinuxPulse::ProcessRecordedData(
+    WebRtc_Word8 *bufferData,
+    WebRtc_UWord32 bufferSizeInSamples,
+    WebRtc_UWord32 recDelay)
+{
+    WebRtc_UWord32 currentMicLevel(0);
+    WebRtc_UWord32 newMicLevel(0);
+
+    _ptrAudioBuffer->SetRecordedBuffer(bufferData, bufferSizeInSamples);
+
+    if (AGC())
+    {
+        // Store current mic level in the audio buffer if AGC is enabled
+        if (MicrophoneVolume(currentMicLevel) == 0)
+        {
+            // This call does not affect the actual microphone volume
+            _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+        }
+    }
+
+    const WebRtc_UWord32 clockDrift(0);
+    // TODO(andrew): this is a temporary hack, to avoid non-causal far- and
+    // near-end signals at the AEC for PulseAudio. I think the system delay is
+    // being correctly calculated here, but for legacy reasons we add +10 ms to
+    // the value in the AEC. The real fix will be part of a larger investigation
+    // into managing system delay in the AEC.
+    if (recDelay > 10)
+        recDelay -= 10;
+    else
+        recDelay = 0;
+    _ptrAudioBuffer->SetVQEData(_sndCardPlayDelay, recDelay, clockDrift);
+
+    // Deliver recorded samples at specified sample rate,
+    // mic level etc. to the observer using callback
+    UnLock();
+    _ptrAudioBuffer->DeliverRecordedData();
+    Lock();
+
+    // We have been unlocked - check the flag again
+    if (!_recording)
+    {
+        return -1;
+    }
+
+    if (AGC())
+    {
+        newMicLevel = _ptrAudioBuffer->NewMicLevel();
+        if (newMicLevel != 0)
+        {
+            // The VQE will only deliver non-zero microphone levels when a
+            // change is needed.
+            // Set this new mic level (received from the observer as return
+            // value in the callback).
+            WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id,
+                         "  AGC change of volume: old=%u => new=%u",
+                         currentMicLevel, newMicLevel);
+            if (SetMicrophoneVolume(newMicLevel) == -1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id,
+                             "  the required modification of the microphone "
+                             "volume failed");
+            }
+        }
+    }
+
+    return 0;
+}
+
+bool AudioDeviceLinuxPulse::PlayThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxPulse*> (pThis)->PlayThreadProcess());
+}
+
+bool AudioDeviceLinuxPulse::RecThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceLinuxPulse*> (pThis)->RecThreadProcess());
+}
+
+bool AudioDeviceLinuxPulse::PlayThreadProcess()
+{
+    switch (_timeEventPlay.Wait(1000))
+    {
+        case kEventSignaled:
+            _timeEventPlay.Reset();
+            break;
+        case kEventError:
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "EventWrapper::Wait() failed");
+            return true;
+        case kEventTimeout:
+            return true;
+    }
+
+    Lock();
+
+    if (_startPlay)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startPlay true, performing initial actions");
+
+        _startPlay = false;
+        _playDeviceName = NULL;
+
+        // Set if not default device
+        if (_outputDeviceIndex > 0)
+        {
+            // Get the playout device name
+            _playDeviceName = new char[kAdmMaxDeviceNameSize];
+            _deviceIndex = _outputDeviceIndex;
+            PlayoutDevices();
+        }
+
+        // Start muted only supported on 0.9.11 and up
+        if (LATE(pa_context_get_protocol_version)(_paContext)
+            >= WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION)
+        {
+            // Get the currently saved speaker mute status
+            // and set the initial mute status accordingly
+            bool enabled(false);
+            _mixerManager.SpeakerMute(enabled);
+            if (enabled)
+            {
+                _playStreamFlags |= PA_STREAM_START_MUTED;
+            }
+        }
+
+        // Get the currently saved speaker volume
+        WebRtc_UWord32 volume = 0;
+        if (update_speaker_volume_at_startup_)
+          _mixerManager.SpeakerVolume(volume);
+
+        PaLock();
+
+        // NULL gives PA the choice of startup volume.
+        pa_cvolume* ptr_cvolume = NULL;
+        if (update_speaker_volume_at_startup_) {
+          pa_cvolume cVolumes;
+          ptr_cvolume = &cVolumes;
+
+          // Set the same volume for all channels
+          const pa_sample_spec *spec =
+              LATE(pa_stream_get_sample_spec)(_playStream);
+          LATE(pa_cvolume_set)(&cVolumes, spec->channels, volume);
+          update_speaker_volume_at_startup_ = false;
+        }
+
+        // Connect the stream to a sink
+        if (LATE(pa_stream_connect_playback)(
+            _playStream,
+            _playDeviceName,
+            &_playBufferAttr,
+            (pa_stream_flags_t) _playStreamFlags,
+            ptr_cvolume, NULL) != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect play stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  play stream connected");
+
+        // Wait for state change
+        while (LATE(pa_stream_get_state)(_playStream) != PA_STREAM_READY)
+        {
+            LATE(pa_threaded_mainloop_wait)(_paMainloop);
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  play stream ready");
+
+        // We can now handle write callbacks
+        EnableWriteCallback();
+
+        PaUnLock();
+
+        // Clear device name
+        if (_playDeviceName)
+        {
+            delete [] _playDeviceName;
+            _playDeviceName = NULL;
+        }
+
+        _playing = true;
+        _playStartEvent.Set();
+
+        UnLock();
+        return true;
+    }
+
+    if (_playing)
+    {
+        if (!_recording)
+        {
+            // Update the playout delay
+            _sndCardPlayDelay = (WebRtc_UWord32) (LatencyUsecs(_playStream)
+                / 1000);
+        }
+
+        if (_playbackBufferUnused < _playbackBufferSize)
+        {
+
+            size_t write = _playbackBufferSize - _playbackBufferUnused;
+            if (_tempBufferSpace < write)
+            {
+                write = _tempBufferSpace;
+            }
+
+            PaLock();
+            if (LATE(pa_stream_write)(
+                                      _playStream,
+                                      (void *) &_playBuffer[_playbackBufferUnused],
+                                      write, NULL, (int64_t) 0,
+                                      PA_SEEK_RELATIVE) != PA_OK)
+            {
+                _writeErrors++;
+                if (_writeErrors > 10)
+                {
+                    if (_playError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning,
+                                     kTraceUtility, _id,
+                                     "  pending playout error exists");
+                    }
+                    _playError = 1; // Triggers callback from module process thread
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceUtility,
+                                 _id,
+                                 "  kPlayoutError message posted: "
+                                 "_writeErrors=%u, error=%d",
+                                 _writeErrors,
+                                 LATE(pa_context_errno)(_paContext));
+                    _writeErrors = 0;
+                }
+            }
+            PaUnLock();
+
+            _playbackBufferUnused += write;
+            _tempBufferSpace -= write;
+        }
+
+        WebRtc_UWord32 numPlaySamples = _playbackBufferSize / (2
+            * _playChannels);
+        if (_tempBufferSpace > 0) // Might have been reduced to zero by the above
+        {
+            // Ask for new PCM data to be played out using the AudioDeviceBuffer
+            // ensure that this callback is executed without taking the
+            // audio-thread lock
+            UnLock();
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  requesting data");
+            WebRtc_UWord32 nSamples =
+                _ptrAudioBuffer->RequestPlayoutData(numPlaySamples);
+            Lock();
+
+            // We have been unlocked - check the flag again
+            if (!_playing)
+            {
+                UnLock();
+                return true;
+            }
+
+            nSamples = _ptrAudioBuffer->GetPlayoutData(_playBuffer);
+            if (nSamples != numPlaySamples)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice,
+                             _id, "  invalid number of output samples(%d)",
+                             nSamples);
+            }
+
+            size_t write = _playbackBufferSize;
+            if (_tempBufferSpace < write)
+            {
+                write = _tempBufferSpace;
+            }
+
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         "  will write");
+            PaLock();
+            if (LATE(pa_stream_write)(_playStream, (void *) &_playBuffer[0],
+                                      write, NULL, (int64_t) 0,
+                                      PA_SEEK_RELATIVE) != PA_OK)
+            {
+                _writeErrors++;
+                if (_writeErrors > 10)
+                {
+                    if (_playError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning,
+                                     kTraceUtility, _id,
+                                     "  pending playout error exists");
+                    }
+                    _playError = 1; // triggers callback from module process thread
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceUtility,
+                                 _id,
+                                 "  kPlayoutError message posted: "
+                                 "_writeErrors=%u, error=%d",
+                                 _writeErrors,
+                                 LATE(pa_context_errno)(_paContext));
+                    _writeErrors = 0;
+                }
+            }
+            PaUnLock();
+
+            _playbackBufferUnused = write;
+        }
+
+        _tempBufferSpace = 0;
+        PaLock();
+        EnableWriteCallback();
+        PaUnLock();
+
+    } // _playing
+
+    UnLock();
+    return true;
+}
+
+bool AudioDeviceLinuxPulse::RecThreadProcess()
+{
+    switch (_timeEventRec.Wait(1000))
+    {
+        case kEventSignaled:
+            _timeEventRec.Reset();
+            break;
+        case kEventError:
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "EventWrapper::Wait() failed");
+            return true;
+        case kEventTimeout:
+            return true;
+    }
+
+    Lock();
+
+    if (_startRec)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "_startRec true, performing initial actions");
+
+        _recDeviceName = NULL;
+
+        // Set if not default device
+        if (_inputDeviceIndex > 0)
+        {
+            // Get the recording device name
+            _recDeviceName = new char[kAdmMaxDeviceNameSize];
+            _deviceIndex = _inputDeviceIndex;
+            RecordingDevices();
+        }
+
+        PaLock();
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  connecting stream");
+
+        // Connect the stream to a source
+        if (LATE(pa_stream_connect_record)(_recStream, _recDeviceName,
+                                           &_recBufferAttr,
+                                           (pa_stream_flags_t) _recStreamFlags)
+            != PA_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to connect rec stream, err=%d",
+                         LATE(pa_context_errno)(_paContext));
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  connected");
+
+        // Wait for state change
+        while (LATE(pa_stream_get_state)(_recStream) != PA_STREAM_READY)
+        {
+            LATE(pa_threaded_mainloop_wait)(_paMainloop);
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "  done");
+
+        // We can now handle read callbacks
+        EnableReadCallback();
+
+        PaUnLock();
+
+        // Clear device name
+        if (_recDeviceName)
+        {
+            delete [] _recDeviceName;
+            _recDeviceName = NULL;
+        }
+
+        _startRec = false;
+        _recording = true;
+        _recStartEvent.Set();
+
+        UnLock();
+        return true;
+    }
+
+    if (_recording)
+    {
+        // Read data and provide it to VoiceEngine
+        if (ReadRecordedData(_tempSampleData, _tempSampleDataSize) == -1)
+        {
+            UnLock();
+            return true;
+        }
+
+        _tempSampleData = NULL;
+        _tempSampleDataSize = 0;
+
+        PaLock();
+        while (true)
+        {
+            // Ack the last thing we read
+            if (LATE(pa_stream_drop)(_recStream) != 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  failed to drop, err=%d\n",
+                             LATE(pa_context_errno)(_paContext));
+            }
+
+            if (LATE(pa_stream_readable_size)(_recStream) <= 0)
+            {
+                // Then that was all the data
+                break;
+            }
+
+            // Else more data.
+            const void *sampleData;
+            size_t sampleDataSize;
+
+            if (LATE(pa_stream_peek)(_recStream, &sampleData, &sampleDataSize)
+                != 0)
+            {
+                _recError = 1; // triggers callback from module process thread
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice,
+                             _id, "  RECORD_ERROR message posted, error = %d",
+                             LATE(pa_context_errno)(_paContext));
+                break;
+            }
+
+            _sndCardRecDelay = (WebRtc_UWord32) (LatencyUsecs(_recStream)
+                / 1000);
+
+            // Drop lock for sigslot dispatch, which could take a while.
+            PaUnLock();
+            // Read data and provide it to VoiceEngine
+            if (ReadRecordedData(sampleData, sampleDataSize) == -1)
+            {
+                UnLock();
+                return true;
+            }
+            PaLock();
+
+            // Return to top of loop for the ack and the check for more data.
+        }
+
+        EnableReadCallback();
+        PaUnLock();
+
+    } // _recording
+
+    UnLock();
+    return true;
+}
+
+}
diff --git a/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h b/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h
new file mode 100644
index 0000000..1a71fe5
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_pulse_linux.h
@@ -0,0 +1,386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_PULSE_LINUX_H
+
+#include "audio_device_generic.h"
+#include "audio_mixer_manager_pulse_linux.h"
+#include "critical_section_wrapper.h"
+
+#include <pulse/pulseaudio.h>
+
+// Set this define to make the code behave like in GTalk/libjingle
+//#define WEBRTC_PA_GTALK
+
+// We define this flag if it's missing from our headers, because we want to be
+// able to compile against old headers but still use PA_STREAM_ADJUST_LATENCY
+// if run against a recent version of the library.
+#ifndef PA_STREAM_ADJUST_LATENCY
+#define PA_STREAM_ADJUST_LATENCY 0x2000U
+#endif
+#ifndef PA_STREAM_START_MUTED
+#define PA_STREAM_START_MUTED 0x1000U
+#endif
+
+// Set this constant to 0 to disable latency reading
+const WebRtc_UWord32 WEBRTC_PA_REPORT_LATENCY = 1;
+
+// Constants from implementation by Tristan Schmelcher [tschmelcher@google.com]
+
+// First PulseAudio protocol version that supports PA_STREAM_ADJUST_LATENCY.
+const WebRtc_UWord32 WEBRTC_PA_ADJUST_LATENCY_PROTOCOL_VERSION = 13;
+
+// Some timing constants for optimal operation. See
+// https://tango.0pointer.de/pipermail/pulseaudio-discuss/2008-January/001170.html
+// for a good explanation of some of the factors that go into this.
+
+// Playback.
+
+// For playback, there is a round-trip delay to fill the server-side playback
+// buffer, so setting too low of a latency is a buffer underflow risk. We will
+// automatically increase the latency if a buffer underflow does occur, but we
+// also enforce a sane minimum at start-up time. Anything lower would be
+// virtually guaranteed to underflow at least once, so there's no point in
+// allowing lower latencies.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_LATENCY_MINIMUM_MSECS = 20;
+
+// Every time a playback stream underflows, we will reconfigure it with target
+// latency that is greater by this amount.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_LATENCY_INCREMENT_MSECS = 20;
+
+// We also need to configure a suitable request size. Too small and we'd burn
+// CPU from the overhead of transfering small amounts of data at once. Too large
+// and the amount of data remaining in the buffer right before refilling it
+// would be a buffer underflow risk. We set it to half of the buffer size.
+const WebRtc_UWord32 WEBRTC_PA_PLAYBACK_REQUEST_FACTOR = 2;
+
+// Capture.
+
+// For capture, low latency is not a buffer overflow risk, but it makes us burn
+// CPU from the overhead of transfering small amounts of data at once, so we set
+// a recommended value that we use for the kLowLatency constant (but if the user
+// explicitly requests something lower then we will honour it).
+// 1ms takes about 6-7% CPU. 5ms takes about 5%. 10ms takes about 4.x%.
+const WebRtc_UWord32 WEBRTC_PA_LOW_CAPTURE_LATENCY_MSECS = 10;
+
+// There is a round-trip delay to ack the data to the server, so the
+// server-side buffer needs extra space to prevent buffer overflow. 20ms is
+// sufficient, but there is no penalty to making it bigger, so we make it huge.
+// (750ms is libpulse's default value for the _total_ buffer size in the
+// kNoLatencyRequirements case.)
+const WebRtc_UWord32 WEBRTC_PA_CAPTURE_BUFFER_EXTRA_MSECS = 750;
+
+const WebRtc_UWord32 WEBRTC_PA_MSECS_PER_SEC = 1000;
+
+// Init _configuredLatencyRec/Play to this value to disable latency requirements
+const WebRtc_Word32 WEBRTC_PA_NO_LATENCY_REQUIREMENTS = -1;
+
+// Set this const to 1 to account for peeked and used data in latency calculation
+const WebRtc_UWord32 WEBRTC_PA_CAPTURE_BUFFER_LATENCY_ADJUSTMENT = 0;
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+class AudioDeviceLinuxPulse: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceLinuxPulse(const WebRtc_Word32 id);
+    ~AudioDeviceLinuxPulse();
+
+    static bool PulseAudioIsSupported();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+        ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(
+        WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+        SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                         WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock()
+    {
+        _critSect.Enter();
+    }
+    ;
+    void UnLock()
+    {
+        _critSect.Leave();
+    }
+    ;
+    void WaitForOperationCompletion(pa_operation* paOperation) const;
+    void WaitForSuccess(pa_operation* paOperation) const;
+
+private:
+    static void PaContextStateCallback(pa_context *c, void *pThis);
+    static void PaSinkInfoCallback(pa_context *c, const pa_sink_info *i,
+                                   int eol, void *pThis);
+    static void PaSourceInfoCallback(pa_context *c, const pa_source_info *i,
+                                     int eol, void *pThis);
+    static void PaServerInfoCallback(pa_context *c, const pa_server_info *i,
+                                     void *pThis);
+    static void PaStreamStateCallback(pa_stream *p, void *pThis);
+    void PaContextStateCallbackHandler(pa_context *c);
+    void PaSinkInfoCallbackHandler(const pa_sink_info *i, int eol);
+    void PaSourceInfoCallbackHandler(const pa_source_info *i, int eol);
+    void PaServerInfoCallbackHandler(const pa_server_info *i);
+    void PaStreamStateCallbackHandler(pa_stream *p);
+
+    void EnableWriteCallback();
+    void DisableWriteCallback();
+    static void PaStreamWriteCallback(pa_stream *unused, size_t buffer_space,
+                                      void *pThis);
+    void PaStreamWriteCallbackHandler(size_t buffer_space);
+    static void PaStreamUnderflowCallback(pa_stream *unused, void *pThis);
+    void PaStreamUnderflowCallbackHandler();
+    void EnableReadCallback();
+    void DisableReadCallback();
+    static void PaStreamReadCallback(pa_stream *unused1, size_t unused2,
+                                     void *pThis);
+    void PaStreamReadCallbackHandler();
+    static void PaStreamOverflowCallback(pa_stream *unused, void *pThis);
+    void PaStreamOverflowCallbackHandler();
+    WebRtc_Word32 LatencyUsecs(pa_stream *stream);
+    WebRtc_Word32 ReadRecordedData(const void* bufferData, size_t bufferSize);
+    WebRtc_Word32 ProcessRecordedData(WebRtc_Word8 *bufferData,
+                                      WebRtc_UWord32 bufferSizeInSamples,
+                                      WebRtc_UWord32 recDelay);
+
+    WebRtc_Word32 CheckPulseAudioVersion();
+    WebRtc_Word32 InitSamplingFrequency();
+    WebRtc_Word32 GetDefaultDeviceInfo(bool recDevice, char* name,
+                                       WebRtc_UWord16& index);
+    WebRtc_Word32 InitPulseAudio();
+    WebRtc_Word32 TerminatePulseAudio();
+
+    void PaLock();
+    void PaUnLock();
+
+    static bool RecThreadFunc(void*);
+    static bool PlayThreadFunc(void*);
+    bool RecThreadProcess();
+    bool PlayThreadProcess();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+
+    CriticalSectionWrapper& _critSect;
+    EventWrapper& _timeEventRec;
+    EventWrapper& _timeEventPlay;
+    EventWrapper& _recStartEvent;
+    EventWrapper& _playStartEvent;
+
+    ThreadWrapper* _ptrThreadPlay;
+    ThreadWrapper* _ptrThreadRec;
+    WebRtc_UWord32 _recThreadID;
+    WebRtc_UWord32 _playThreadID;
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerLinuxPulse _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    WebRtc_UWord32 _samplingFreq;
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _startRec;
+    bool _stopRec;
+    bool _startPlay;
+    bool _stopPlay;
+    bool _AGC;
+    bool update_speaker_volume_at_startup_;
+
+private:
+    WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
+
+    WebRtc_UWord32 _sndCardPlayDelay;
+    WebRtc_UWord32 _sndCardRecDelay;
+
+    WebRtc_Word32 _writeErrors;
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    WebRtc_UWord16 _deviceIndex;
+    WebRtc_Word16 _numPlayDevices;
+    WebRtc_Word16 _numRecDevices;
+    char* _playDeviceName;
+    char* _recDeviceName;
+    char* _playDisplayDeviceName;
+    char* _recDisplayDeviceName;
+    char _paServerVersion[32];
+
+    WebRtc_Word8* _playBuffer;
+    size_t _playbackBufferSize;
+    size_t _playbackBufferUnused;
+    size_t _tempBufferSpace;
+    WebRtc_Word8* _recBuffer;
+    size_t _recordBufferSize;
+    size_t _recordBufferUsed;
+    const void* _tempSampleData;
+    size_t _tempSampleDataSize;
+    WebRtc_Word32 _configuredLatencyPlay;
+    WebRtc_Word32 _configuredLatencyRec;
+
+    // PulseAudio
+    WebRtc_UWord16 _paDeviceIndex;
+    bool _paStateChanged;
+
+    pa_threaded_mainloop* _paMainloop;
+    pa_mainloop_api* _paMainloopApi;
+    pa_context* _paContext;
+
+    pa_stream* _recStream;
+    pa_stream* _playStream;
+    WebRtc_UWord32 _recStreamFlags;
+    WebRtc_UWord32 _playStreamFlags;
+    pa_buffer_attr _playBufferAttr;
+    pa_buffer_attr _recBufferAttr;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_PULSE_LINUX_H_
diff --git a/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc b/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc
new file mode 100644
index 0000000..25abcc9
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_utility_linux.cc
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_linux.h"
+#include "audio_device_config.h"	// DEBUG_PRINT()
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityLinux::AudioDeviceUtilityLinux(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()), _id(id)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+AudioDeviceUtilityLinux::~AudioDeviceUtilityLinux()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+
+WebRtc_Word32 AudioDeviceUtilityLinux::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "Linux");
+
+    return 0;
+}
+
+
+} // namespace webrtc
diff --git a/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h b/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h
new file mode 100644
index 0000000..0e3c410
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_device_utility_linux.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_LINUX_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityLinux: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityLinux(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityLinux();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+};
+
+} // namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_DEVICE_UTILITY_LINUX_H_
diff --git a/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc b/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc
new file mode 100644
index 0000000..2e12f0a
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.cc
@@ -0,0 +1,1317 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_mixer_manager_alsa_linux.h"
+#include "trace.h"
+
+extern webrtc_adm_linux_alsa::AlsaSymbolTable AlsaSymbolTable;
+
+// Accesses ALSA functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libalsa, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_alsa::AlsaSymbolTable, &AlsaSymbolTable, sym)
+
+namespace webrtc
+{
+
+AudioMixerManagerLinuxALSA::AudioMixerManagerLinuxALSA(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _outputMixerHandle(NULL),
+    _inputMixerHandle(NULL),
+    _outputMixerElement(NULL),
+    _inputMixerElement(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+
+    memset(_outputMixerStr, 0, kAdmMaxDeviceNameSize);
+    memset(_inputMixerStr, 0, kAdmMaxDeviceNameSize);
+}
+
+AudioMixerManagerLinuxALSA::~AudioMixerManagerLinuxALSA()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                    PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    int errVal = 0;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing playout mixer");
+        LATE(snd_mixer_free)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_detach)(_outputMixerHandle, _outputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_close)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+        _outputMixerHandle = NULL;
+        _outputMixerElement = NULL;
+    }
+    memset(_outputMixerStr, 0, kAdmMaxDeviceNameSize);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    int errVal = 0;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        LATE(snd_mixer_free)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 2");
+
+        errVal = LATE(snd_mixer_detach)(_inputMixerHandle, _inputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 3");
+
+        errVal = LATE(snd_mixer_close)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer 4");
+        _inputMixerHandle = NULL;
+        _inputMixerElement = NULL;
+    }
+    memset(_inputMixerStr, 0, kAdmMaxDeviceNameSize);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::OpenSpeaker(char* deviceName)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::OpenSpeaker(name=%s)", deviceName);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    int errVal = 0;
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing playout mixer");
+
+        LATE(snd_mixer_free)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_detach)(_outputMixerHandle, _outputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging playout mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        errVal = LATE(snd_mixer_close)(_outputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+    }
+    _outputMixerHandle = NULL;
+    _outputMixerElement = NULL;
+
+    errVal = LATE(snd_mixer_open)(&_outputMixerHandle, 0);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "snd_mixer_open(&_outputMixerHandle, 0) - error");
+        return -1;
+    }
+
+    char controlName[kAdmMaxDeviceNameSize] = { 0 };
+    GetControlName(controlName, deviceName);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     snd_mixer_attach(_outputMixerHandle, %s)", controlName);
+
+    errVal = LATE(snd_mixer_attach)(_outputMixerHandle, controlName);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_attach(_outputMixerHandle, %s) error: %s",
+                     controlName, LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+    strcpy(_outputMixerStr, controlName);
+
+    errVal = LATE(snd_mixer_selem_register)(_outputMixerHandle, NULL, NULL);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_selem_register(_outputMixerHandle,"
+                     " NULL, NULL), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+
+    // Load and find the proper mixer element
+    if (LoadSpeakerMixerElement() < 0)
+    {
+        return -1;
+    }
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  the output mixer device is now open (0x%x)",
+                     _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::OpenMicrophone(char *deviceName)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::OpenMicrophone(name=%s)",
+                 deviceName);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    int errVal = 0;
+
+    // Close any existing input mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        LATE(snd_mixer_free)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error freeing record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        errVal = LATE(snd_mixer_detach)(_inputMixerHandle, _inputMixerStr);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error detachinging record mixer: %s",
+                         LATE(snd_strerror)(errVal));
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+
+        errVal = LATE(snd_mixer_close)(_inputMixerHandle);
+        if (errVal < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "     Error snd_mixer_close(handleMixer) errVal=%d",
+                         errVal);
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Closing record mixer");
+    }
+    _inputMixerHandle = NULL;
+    _inputMixerElement = NULL;
+
+    errVal = LATE(snd_mixer_open)(&_inputMixerHandle, 0);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_open(&_inputMixerHandle, 0) - error");
+        return -1;
+    }
+
+    char controlName[kAdmMaxDeviceNameSize] = { 0 };
+    GetControlName(controlName, deviceName);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     snd_mixer_attach(_inputMixerHandle, %s)", controlName);
+
+    errVal = LATE(snd_mixer_attach)(_inputMixerHandle, controlName);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_attach(_inputMixerHandle, %s) error: %s",
+                     controlName, LATE(snd_strerror)(errVal));
+
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+    strcpy(_inputMixerStr, controlName);
+
+    errVal = LATE(snd_mixer_selem_register)(_inputMixerHandle, NULL, NULL);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_selem_register(_inputMixerHandle,"
+                     " NULL, NULL), error: %s",
+                     LATE(snd_strerror)(errVal));
+
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+    // Load and find the proper mixer element
+    if (LoadMicMixerElement() < 0)
+    {
+        return -1;
+    }
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "  the input mixer device is now open (0x%x)",
+                     _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+bool AudioMixerManagerLinuxALSA::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_outputMixerHandle != NULL);
+}
+
+bool AudioMixerManagerLinuxALSA::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_inputMixerHandle != NULL);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetSpeakerVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetSpeakerVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    int errVal =
+        LATE(snd_mixer_selem_set_playback_volume_all)(_outputMixerElement,
+                                                      volume);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error changing master volume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    long int vol(0);
+
+    int
+        errVal = LATE(snd_mixer_selem_get_playback_volume)(
+            _outputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &vol);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting outputvolume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxALSA::SpeakerVolume() => vol=%i",
+                 vol);
+
+    volume = static_cast<WebRtc_UWord32> (vol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MaxSpeakerVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avilable output mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_playback_volume_range)(_outputMixerElement,
+                                                        &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Playout hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting get_playback_volume_range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (maxVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MinSpeakerVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_playback_volume_range)(_outputMixerElement,
+                                                        &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Playout hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting get_playback_volume_range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (minVol);
+
+    return 0;
+}
+
+// TL: Have done testnig with these but they don't seem reliable and
+// they were therefore not added
+/*
+ // ----------------------------------------------------------------------------
+ //    SetMaxSpeakerVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMaxSpeakerVolume(
+     WebRtc_UWord32 maxVolume)
+ {
+
+ if (_outputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_playback_volume_range(
+ _outputMixerElement, &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting playback volume range: %s", snd_strerror(errVal));
+ }
+
+ maxVol = maxVolume;
+ errVal = snd_mixer_selem_set_playback_volume_range(
+ _outputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+  "     Playout hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting playback volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+
+ // ----------------------------------------------------------------------------
+ //    SetMinSpeakerVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMinSpeakerVolume(
+     WebRtc_UWord32 minVolume)
+ {
+
+ if (_outputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+ "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_playback_volume_range(
+ _outputMixerElement, &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting playback volume range: %s", snd_strerror(errVal));
+ }
+
+ minVol = minVolume;
+ errVal = snd_mixer_selem_set_playback_volume_range(
+ _outputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "     Playout hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+ "     Error setting playback volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+ */
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer exists");
+        return -1;
+    }
+
+    // The step size is always 1 for ALSA
+    stepSize = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerVolumeIsAvailable(
+    bool& available)
+{
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_playback_volume)(_outputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerMuteIsAvailable(
+    bool& available)
+{
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_playback_switch)(_outputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetSpeakerMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer element exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    bool available(false);
+    SpeakerMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the speaker");
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    int errVal =
+        LATE(snd_mixer_selem_set_playback_switch_all)(_outputMixerElement,
+                                                      !enable);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error setting playback switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    bool available =
+        LATE(snd_mixer_selem_has_playback_switch)(_outputMixerElement);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the speaker");
+        return -1;
+    }
+
+    int value(false);
+
+    // Retrieve one boolean control value for a specified mute-control
+    //
+    int
+        errVal = LATE(snd_mixer_selem_get_playback_switch)(
+            _outputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &value);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting playback switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    enabled = (bool) !value;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneMuteIsAvailable(
+    bool& available)
+{
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_capture_switch)(_inputMixerElement);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available(false);
+    MicrophoneMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the microphone");
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    int errVal =
+        LATE(snd_mixer_selem_set_capture_switch_all)(_inputMixerElement,
+                                                     !enable);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error setting capture switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available =
+        LATE(snd_mixer_selem_has_capture_switch)(_inputMixerElement);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to mute the microphone");
+        return -1;
+    }
+
+    int value(false);
+
+    // Retrieve one boolean control value for a specified mute-control
+    //
+    int
+        errVal = LATE(snd_mixer_selem_get_capture_switch)(
+            _inputMixerElement,
+            (snd_mixer_selem_channel_id_t) 0,
+            &value);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting capture switch: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    // Note value = 0 (off) means muted
+    enabled = (bool) !value;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneBoostIsAvailable(
+    bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled through ALSA Simple Mixer Interface
+    available = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneBoost(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid mute control.
+    bool available(false);
+    MicrophoneMuteIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+
+    return (0);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    available = LATE(snd_mixer_selem_has_capture_volume)(_inputMixerElement);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMicrophoneVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxALSA::SetMicrophoneVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    int
+        errVal =
+            LATE(snd_mixer_selem_set_capture_volume_all)(_inputMixerElement,
+                                                         volume);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error changing microphone volume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+
+    return (0);
+}
+
+// TL: Have done testnig with these but they don't seem reliable and
+// they were therefore not added
+/*
+ // ----------------------------------------------------------------------------
+ //    SetMaxMicrophoneVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMaxMicrophoneVolume(
+     WebRtc_UWord32 maxVolume)
+ {
+
+ if (_inputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_capture_volume_range(_inputMixerElement,
+  &minVol, &maxVol);
+ if ((maxVol <= minVol) || (errVal != 0))
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting capture volume range: %s", snd_strerror(errVal));
+ }
+
+ maxVol = (long int)maxVolume;
+ printf("min %d max %d", minVol, maxVol);
+ errVal = snd_mixer_selem_set_capture_volume_range(_inputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+ "     Capture hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting capture volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+
+ // ----------------------------------------------------------------------------
+ //    SetMinMicrophoneVolume
+ // ----------------------------------------------------------------------------
+
+ WebRtc_Word32 AudioMixerManagerLinuxALSA::SetMinMicrophoneVolume(
+ WebRtc_UWord32 minVolume)
+ {
+
+ if (_inputMixerElement == NULL)
+ {
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "  no avaliable output mixer element exists");
+ return -1;
+ }
+
+ long int minVol(0);
+ long int maxVol(0);
+
+ int errVal = snd_mixer_selem_get_capture_volume_range(
+ _inputMixerElement, &minVol, &maxVol);
+ if (maxVol <= minVol)
+ {
+ //maxVol = 255;
+ WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+  "     Error getting capture volume range: %s", snd_strerror(errVal));
+ }
+
+ printf("min %d max %d", minVol, maxVol);
+ minVol = (long int)minVolume;
+ errVal = snd_mixer_selem_set_capture_volume_range(
+ _inputMixerElement, minVol, maxVol);
+ WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+  "     Capture hardware volume range, min: %d, max: %d", minVol, maxVol);
+ if (errVal != 0)
+ {
+ WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+  "     Error setting capture volume range: %s", snd_strerror(errVal));
+ return -1;
+ }
+
+ return 0;
+ }
+ */
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolume(
+    WebRtc_UWord32& volume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int vol(0);
+
+    int
+        errVal =
+            LATE(snd_mixer_selem_get_capture_volume)(
+                _inputMixerElement,
+                (snd_mixer_selem_channel_id_t) 0,
+                &vol);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting inputvolume: %s",
+                     LATE(snd_strerror)(errVal));
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxALSA::MicrophoneVolume() => vol=%i",
+                 vol);
+
+    volume = static_cast<WebRtc_UWord32> (vol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MaxMicrophoneVolume(
+    WebRtc_UWord32& maxVolume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    // check if we have mic volume at all
+    if (!LATE(snd_mixer_selem_has_capture_volume)(_inputMixerElement))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     No microphone volume available");
+        return -1;
+    }
+
+    int errVal =
+        LATE(snd_mixer_selem_get_capture_volume_range)(_inputMixerElement,
+                                                       &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Microphone hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting microphone volume range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (maxVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MinMicrophoneVolume(
+    WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputMixerElement == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer element exists");
+        return -1;
+    }
+
+    long int minVol(0);
+    long int maxVol(0);
+
+    int errVal =
+        LATE(snd_mixer_selem_get_capture_volume_range)(_inputMixerElement,
+                                                       &minVol, &maxVol);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     Microphone hardware volume range, min: %d, max: %d",
+                 minVol, maxVol);
+    if (maxVol <= minVol)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     Error getting microphone volume range: %s",
+                     LATE(snd_strerror)(errVal));
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (minVol);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  no avaliable input mixer exists");
+        return -1;
+    }
+
+    // The step size is always 1 for ALSA
+    stepSize = 1;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::LoadMicMixerElement() const
+{
+    int errVal = LATE(snd_mixer_load)(_inputMixerHandle);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "snd_mixer_load(_inputMixerHandle), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _inputMixerHandle = NULL;
+        return -1;
+    }
+
+    snd_mixer_elem_t *elem = NULL;
+    snd_mixer_elem_t *micElem = NULL;
+    unsigned mixerIdx = 0;
+    const char *selemName = NULL;
+
+    // Find and store handles to the right mixer elements
+    for (elem = LATE(snd_mixer_first_elem)(_inputMixerHandle); elem; elem
+        = LATE(snd_mixer_elem_next)(elem), mixerIdx++)
+    {
+        if (LATE(snd_mixer_selem_is_active)(elem))
+        {
+            selemName = LATE(snd_mixer_selem_get_name)(elem);
+            if (strcmp(selemName, "Capture") == 0) // "Capture", "Mic"
+            {
+                _inputMixerElement = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Capture element set");
+            } else if (strcmp(selemName, "Mic") == 0)
+            {
+                micElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Mic element found");
+            }
+        }
+
+        if (_inputMixerElement)
+        {
+            // Use the first Capture element that is found
+            // The second one may not work
+            break;
+        }
+    }
+
+    if (_inputMixerElement == NULL)
+    {
+        // We didn't find a Capture handle, use Mic.
+        if (micElem != NULL)
+        {
+            _inputMixerElement = micElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Mic as capture volume.");
+        } else
+        {
+            _inputMixerElement = NULL;
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "Could not find capture volume on the mixer.");
+
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxALSA::LoadSpeakerMixerElement() const
+{
+    int errVal = LATE(snd_mixer_load)(_outputMixerHandle);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "     snd_mixer_load(_outputMixerHandle), error: %s",
+                     LATE(snd_strerror)(errVal));
+        _outputMixerHandle = NULL;
+        return -1;
+    }
+
+    snd_mixer_elem_t *elem = NULL;
+    snd_mixer_elem_t *masterElem = NULL;
+    snd_mixer_elem_t *speakerElem = NULL;
+    unsigned mixerIdx = 0;
+    const char *selemName = NULL;
+
+    // Find and store handles to the right mixer elements
+    for (elem = LATE(snd_mixer_first_elem)(_outputMixerHandle); elem; elem
+        = LATE(snd_mixer_elem_next)(elem), mixerIdx++)
+    {
+        if (LATE(snd_mixer_selem_is_active)(elem))
+        {
+            selemName = LATE(snd_mixer_selem_get_name)(elem);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "snd_mixer_selem_get_name %d: %s =%x", mixerIdx,
+                         selemName, elem);
+
+            // "Master", "PCM", "Wave", "Master Mono", "PC Speaker", "PCM", "Wave"
+            if (strcmp(selemName, "PCM") == 0)
+            {
+                _outputMixerElement = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     PCM element set");
+            } else if (strcmp(selemName, "Master") == 0)
+            {
+                masterElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Master element found");
+            } else if (strcmp(selemName, "Speaker") == 0)
+            {
+                speakerElem = elem;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "     Speaker element found");
+            }
+        }
+
+        if (_outputMixerElement)
+        {
+            // We have found the element we want
+            break;
+        }
+    }
+
+    // If we didn't find a PCM Handle, use Master or Speaker
+    if (_outputMixerElement == NULL)
+    {
+        if (masterElem != NULL)
+        {
+            _outputMixerElement = masterElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Master as output volume.");
+        } else if (speakerElem != NULL)
+        {
+            _outputMixerElement = speakerElem;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "     Using Speaker as output volume.");
+        } else
+        {
+            _outputMixerElement = NULL;
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "Could not find output volume in the mixer.");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+void AudioMixerManagerLinuxALSA::GetControlName(char* controlName,
+                                                char* deviceName) const
+{
+    // Example
+    // deviceName: "front:CARD=Intel,DEV=0"
+    // controlName: "hw:CARD=Intel"
+    char* pos1 = strchr(deviceName, ':');
+    char* pos2 = strchr(deviceName, ',');
+    if (!pos2)
+    {
+        // Can also be default:CARD=Intel
+        pos2 = &deviceName[strlen(deviceName)];
+    }
+    if (pos1 && pos2)
+    {
+        strcpy(controlName, "hw");
+        int nChar = (int) (pos2 - pos1);
+        strncpy(&controlName[2], pos1, nChar);
+        controlName[2 + nChar] = '\0';
+    } else
+    {
+        strcpy(controlName, deviceName);
+    }
+
+}
+
+}
diff --git a/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h b/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h
new file mode 100644
index 0000000..94ea982
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_mixer_manager_alsa_linux.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_ALSA_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_ALSA_LINUX_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include "alsasymboltable_linux.h"
+
+#include <alsa/asoundlib.h>
+
+namespace webrtc
+{
+
+class AudioMixerManagerLinuxALSA
+{
+public:
+    WebRtc_Word32 OpenSpeaker(char* deviceName);
+    WebRtc_Word32 OpenMicrophone(char* deviceName);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerLinuxALSA(const WebRtc_Word32 id);
+    ~AudioMixerManagerLinuxALSA();
+
+private:
+    WebRtc_Word32 LoadMicMixerElement() const;
+    WebRtc_Word32 LoadSpeakerMixerElement() const;
+    void GetControlName(char *controlName, char* deviceName) const;
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    mutable snd_mixer_t* _outputMixerHandle;
+    char _outputMixerStr[kAdmMaxDeviceNameSize];
+    mutable snd_mixer_t* _inputMixerHandle;
+    char _inputMixerStr[kAdmMaxDeviceNameSize];
+    mutable snd_mixer_elem_t* _outputMixerElement;
+    mutable snd_mixer_elem_t* _inputMixerElement;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_MIXER_MANAGER_ALSA_LINUX_H_
diff --git a/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc b/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc
new file mode 100644
index 0000000..a0b6852
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.cc
@@ -0,0 +1,1268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "audio_mixer_manager_pulse_linux.h"
+#include "trace.h"
+
+extern webrtc_adm_linux_pulse::PulseAudioSymbolTable PaSymbolTable;
+
+// Accesses Pulse functions through our late-binding symbol table instead of
+// directly. This way we don't have to link to libpulse, which means our binary
+// will work on systems that don't have it.
+#define LATE(sym) \
+  LATESYM_GET(webrtc_adm_linux_pulse::PulseAudioSymbolTable, &PaSymbolTable, sym)
+
+namespace webrtc
+{
+
+enum { kMaxRetryOnFailure = 2 };
+
+AudioMixerManagerLinuxPulse::AudioMixerManagerLinuxPulse(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _paOutputDeviceIndex(-1),
+    _paInputDeviceIndex(-1),
+    _paPlayStream(NULL),
+    _paRecStream(NULL),
+    _paMainloop(NULL),
+    _paContext(NULL),
+    _paVolume(0),
+    _paMute(0),
+    _paVolSteps(0),
+    _paSpeakerMute(false),
+    _paSpeakerVolume(PA_VOLUME_NORM),
+    _paChannels(0),
+    _paObjectsSet(false),
+    _callbackValues(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+}
+
+AudioMixerManagerLinuxPulse::~AudioMixerManagerLinuxPulse()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                    PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetPulseAudioObjects(
+    pa_threaded_mainloop* mainloop,
+    pa_context* context)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!mainloop || !context)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  could not set PulseAudio objects for mixer");
+        return -1;
+    }
+
+    _paMainloop = mainloop;
+    _paContext = context;
+    _paObjectsSet = true;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the PulseAudio objects for the mixer has been set");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    _paMainloop = NULL;
+    _paContext = NULL;
+    _paObjectsSet = false;
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Reset the index to -1
+    _paOutputDeviceIndex = -1;
+    _paPlayStream = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Reset the index to -1
+    _paInputDeviceIndex = -1;
+    _paRecStream = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetPlayStream(pa_stream* playStream)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetPlayStream(playStream)");
+
+    CriticalSectionScoped lock(&_critSect);
+    _paPlayStream = playStream;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetRecStream(pa_stream* recStream)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetRecStream(recStream)");
+
+    CriticalSectionScoped lock(&_critSect);
+    _paRecStream = recStream;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::OpenSpeaker(
+    WebRtc_UWord16 deviceIndex)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::OpenSpeaker(deviceIndex=%d)",
+                 deviceIndex);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // No point in opening the speaker
+    // if PA objects have not been set
+    if (!_paObjectsSet)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio objects has not been set");
+        return -1;
+    }
+
+    // Set the index for the PulseAudio
+    // output device to control
+    _paOutputDeviceIndex = deviceIndex;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the output mixer device is now open");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::OpenMicrophone(
+    WebRtc_UWord16 deviceIndex)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::OpenMicrophone(deviceIndex=%d)",
+                 deviceIndex);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // No point in opening the microphone
+    // if PA objects have not been set
+    if (!_paObjectsSet)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  PulseAudio objects have not been set");
+        return -1;
+    }
+
+    // Set the index for the PulseAudio
+    // input device to control
+    _paInputDeviceIndex = deviceIndex;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  the input mixer device is now open");
+
+    return 0;
+}
+
+bool AudioMixerManagerLinuxPulse::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_paOutputDeviceIndex != -1);
+}
+
+bool AudioMixerManagerLinuxPulse::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_paInputDeviceIndex != -1);
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetSpeakerVolume(
+    WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetSpeakerVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only really set the volume if we have a connected stream
+        PaLock();
+
+        // Get the number of channels from the sample specification
+        const pa_sample_spec *spec =
+            LATE(pa_stream_get_sample_spec)(_paPlayStream);
+        if (!spec)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  could not get sample specification");
+            PaUnLock();
+            return -1;
+        }
+
+        // Set the same volume for all channels
+        pa_cvolume cVolumes;
+        LATE(pa_cvolume_set)(&cVolumes, spec->channels, volume);
+
+        pa_operation* paOperation = NULL;
+        paOperation = LATE(pa_context_set_sink_input_volume)(
+            _paContext,
+            LATE(pa_stream_get_index)(_paPlayStream),
+            &cVolumes,
+            PaSetVolumeCallback, NULL);
+        if (!paOperation)
+        {
+            setFailed = true;
+        }
+
+        // Don't need to wait for the completion
+        LATE(pa_operation_unref)(paOperation);
+
+        PaUnLock();
+    } else
+    {
+        // We have not created a stream or it's not connected to the sink
+        // Save the volume to be set at connection
+        _paSpeakerVolume = volume;
+    }
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not set speaker volume, error%d",
+                     LATE(pa_context_errno)(_paContext));
+
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only get the volume if we have a connected stream
+        if (!GetSinkInputInfo())
+          return -1;
+
+        volume = static_cast<WebRtc_UWord32> (_paVolume);
+        ResetCallbackVariables();
+    } else
+    {
+        volume = _paSpeakerVolume;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerVolume() => vol=%i",
+                 volume);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // PA_VOLUME_NORM corresponds to 100% (0db)
+    // but PA allows up to 150 db amplification
+    maxVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_NORM);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_MUTED);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // The sink input (stream) will always have step size = 1
+    // There are PA_VOLUME_NORM+1 steps
+    stepSize = 1;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerVolumeStepSize() => "
+                 "size=%i, stepSize");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetSpeakerMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only really mute if we have a connected stream
+        PaLock();
+
+        pa_operation* paOperation = NULL;
+        paOperation = LATE(pa_context_set_sink_input_mute)(
+            _paContext,
+            LATE(pa_stream_get_index)(_paPlayStream),
+            (int) enable,
+            PaSetVolumeCallback,
+            NULL);
+        if (!paOperation)
+        {
+            setFailed = true;
+        }
+
+        // Don't need to wait for the completion
+        LATE(pa_operation_unref)(paOperation);
+
+        PaUnLock();
+    } else
+    {
+        // We have not created a stream or it's not connected to the sink
+        // Save the mute status to be set at connection
+        _paSpeakerMute = enable;
+    }
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not mute speaker, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SpeakerMute(bool& enabled) const
+{
+
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        // We can only get the mute status if we have a connected stream
+        if (!GetSinkInputInfo())
+          return -1;
+
+        enabled = static_cast<bool> (_paMute);
+        ResetCallbackVariables();
+    } else
+    {
+        enabled = _paSpeakerMute;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::SpeakerMute() => "
+                 "enabled=%i, enabled");
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::StereoPlayoutIsAvailable(bool& available)
+{
+    if (_paOutputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  output device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paOutputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paPlayStream && (LATE(pa_stream_get_state)(_paPlayStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paPlayStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSinkInfoByIndex(deviceIndex))
+      return -1;
+
+    available = static_cast<bool> (_paChannels == 2);
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable(bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get info for this source
+    // We want to know if the actual device can record in stereo
+    paOperation = LATE(pa_context_get_source_info_by_index)(
+        _paContext, deviceIndex,
+        PaSourceInfoCallback,
+        (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+    PaUnLock();
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting number of input channels: %d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    available = static_cast<bool> (_paChannels == 2);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::StereoRecordingIsAvailable()"
+                 " => available=%i, available");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneMuteIsAvailable(
+    bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneMute(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    bool setFailed(false);
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    // Set mute switch for the source
+    paOperation = LATE(pa_context_set_source_mute_by_index)(
+        _paContext, deviceIndex,
+        enable,
+        PaSetVolumeCallback, NULL);
+
+    if (!paOperation)
+    {
+        setFailed = true;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(paOperation);
+
+    PaUnLock();
+
+    // Reset variables altered by callback
+    ResetCallbackVariables();
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not mute microphone, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneMute(bool& enabled) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSourceInfoByIndex(deviceIndex))
+      return -1;
+
+    enabled = static_cast<bool> (_paMute);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneMute() =>"
+                 " enabled=%i, enabled");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always unavailable in Pulse Audio
+    // Could make it possible to use PA_VOLUME_MAX
+    // but that gives bad audio with some sound cards
+    available = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneBoost(enable=%u)",
+                 enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Ensure that the selected microphone destination has a valid boost control
+    bool available(false);
+    MicrophoneBoostIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneVolumeIsAvailable(
+    bool& available)
+{
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Always available in Pulse Audio
+    available = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerLinuxPulse::SetMicrophoneVolume(volume=%u)",
+                 volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // Unlike output streams, input streams have no concept of a stream volume,
+    // only a device volume. So we have to change the volume of the device
+    // itself.
+
+    // The device may have a different number of channels than the stream and
+    // their mapping may be different, so we don't want to use the channel count
+    // from our sample spec. We could use PA_CHANNELS_MAX to cover our bases,
+    // and the server allows that even if the device's channel count is lower,
+    // but some buggy PA clients don't like that (the pavucontrol on Hardy dies
+    // in an assert if the channel count is different). So instead we look up
+    // the actual number of channels that the device has.
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    bool setFailed(false);
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get the number of channels for this source
+    paOperation
+        = LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
+                                                    PaSourceInfoCallback,
+                                                    (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting input channels: %d",
+                     LATE(pa_context_errno)(_paContext));
+        PaUnLock();
+        return -1;
+    }
+
+    WebRtc_UWord8 channels = _paChannels;
+    ResetCallbackVariables();
+
+    pa_cvolume cVolumes;
+    LATE(pa_cvolume_set)(&cVolumes, channels, volume);
+
+    // Set the volume for the source
+    paOperation
+        = LATE(pa_context_set_source_volume_by_index)(_paContext, deviceIndex,
+                                                      &cVolumes,
+                                                      PaSetVolumeCallback, NULL);
+
+    if (!paOperation)
+    {
+        setFailed = true;
+    }
+
+    // Don't need to wait for this to complete.
+    LATE(pa_operation_unref)(paOperation);
+
+    PaUnLock();
+
+    // Reset variables altered by callback
+    ResetCallbackVariables();
+
+    if (setFailed)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " could not set microphone volume, error%d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    PaUnLock();
+
+    if (!GetSourceInfoByIndex(deviceIndex))
+      return -1;
+
+    volume = static_cast<WebRtc_UWord32> (_paVolume);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneVolume() => vol=%i, volume");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    // PA_VOLUME_NORM corresponds to 100% (0db)
+    // PA allows up to 150 db amplification (PA_VOLUME_MAX)
+    // but that doesn't work well for all sound cards
+    maxVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_NORM);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerLinuxPulse::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (PA_VOLUME_MUTED);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerLinuxPulse::MicrophoneVolumeStepSize(
+    WebRtc_UWord16& stepSize) const
+{
+
+    if (_paInputDeviceIndex == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  input device index has not been set");
+        return -1;
+    }
+
+    uint32_t deviceIndex = (uint32_t) _paInputDeviceIndex;
+
+    PaLock();
+
+    // Get the actual stream device index if we have a connected stream
+    // The device used by the stream can be changed
+    // during the call
+    if (_paRecStream && (LATE(pa_stream_get_state)(_paRecStream)
+        != PA_STREAM_UNCONNECTED))
+    {
+        deviceIndex = LATE(pa_stream_get_device_index)(_paRecStream);
+    }
+
+    pa_operation* paOperation = NULL;
+    ResetCallbackVariables();
+
+    // Get info for this source
+    paOperation
+        = LATE(pa_context_get_source_info_by_index)(_paContext, deviceIndex,
+                                                    PaSourceInfoCallback,
+                                                    (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+
+    PaUnLock();
+
+    if (!_callbackValues)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Error getting step size: %d",
+                     LATE(pa_context_errno)(_paContext));
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> ((PA_VOLUME_NORM + 1) / _paVolSteps);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerLinuxPulse::MicrophoneVolumeStepSize()"
+                 " => size=%i, stepSize");
+
+    // Reset members modified by callback
+    ResetCallbackVariables();
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+void AudioMixerManagerLinuxPulse::PaSinkInfoCallback(pa_context */*c*/,
+                                                     const pa_sink_info *i,
+                                                     int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)-> PaSinkInfoCallbackHandler(
+        i, eol);
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInputInfoCallback(
+    pa_context */*c*/,
+    const pa_sink_input_info *i,
+    int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)->
+        PaSinkInputInfoCallbackHandler(i, eol);
+}
+
+
+void AudioMixerManagerLinuxPulse::PaSourceInfoCallback(pa_context */*c*/,
+                                                       const pa_source_info *i,
+                                                       int eol, void *pThis)
+{
+    static_cast<AudioMixerManagerLinuxPulse*> (pThis)->
+        PaSourceInfoCallbackHandler(i, eol);
+}
+
+void AudioMixerManagerLinuxPulse::PaSetVolumeCallback(pa_context * c,
+                                                      int success, void */*pThis*/)
+{
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+                     " failed to set volume");
+    }
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInfoCallbackHandler(
+    const pa_sink_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // get the max volume for any channel
+    _paMute = i->mute; // get mute status
+
+    // supported since PA 0.9.15
+    //_paVolSteps = i->n_volume_steps; // get the number of volume steps
+    // default value is PA_VOLUME_NORM+1
+    _paVolSteps = PA_VOLUME_NORM + 1;
+}
+
+void AudioMixerManagerLinuxPulse::PaSinkInputInfoCallbackHandler(
+    const pa_sink_input_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // Get the max volume for any channel
+    _paMute = i->mute; // Get mute status
+}
+
+void AudioMixerManagerLinuxPulse::PaSourceInfoCallbackHandler(
+    const pa_source_info *i,
+    int eol)
+{
+    if (eol)
+    {
+        // Signal that we are done
+        LATE(pa_threaded_mainloop_signal)(_paMainloop, 0);
+        return;
+    }
+
+    _callbackValues = true;
+    _paChannels = i->channel_map.channels; // Get number of channels
+    pa_volume_t paVolume = PA_VOLUME_MUTED; // Minimum possible value.
+    for (int j = 0; j < _paChannels; ++j)
+    {
+        if (paVolume < i->volume.values[j])
+        {
+            paVolume = i->volume.values[j];
+        }
+    }
+    _paVolume = paVolume; // Get the max volume for any channel
+    _paMute = i->mute; // Get mute status
+
+    // supported since PA 0.9.15
+    //_paVolSteps = i->n_volume_steps; // Get the number of volume steps
+    // default value is PA_VOLUME_NORM+1
+    _paVolSteps = PA_VOLUME_NORM + 1;
+}
+
+void AudioMixerManagerLinuxPulse::ResetCallbackVariables() const
+{
+    _paVolume = 0;
+    _paMute = 0;
+    _paVolSteps = 0;
+    _paChannels = 0;
+    _callbackValues = false;
+}
+
+void AudioMixerManagerLinuxPulse::WaitForOperationCompletion(
+    pa_operation* paOperation) const
+{
+    while (LATE(pa_operation_get_state)(paOperation) == PA_OPERATION_RUNNING)
+    {
+        LATE(pa_threaded_mainloop_wait)(_paMainloop);
+    }
+
+    LATE(pa_operation_unref)(paOperation);
+}
+
+void AudioMixerManagerLinuxPulse::PaLock() const
+{
+    LATE(pa_threaded_mainloop_lock)(_paMainloop);
+}
+
+void AudioMixerManagerLinuxPulse::PaUnLock() const
+{
+    LATE(pa_threaded_mainloop_unlock)(_paMainloop);
+}
+
+bool AudioMixerManagerLinuxPulse::GetSinkInputInfo() const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+    // Get info for this stream (sink input).
+    paOperation = LATE(pa_context_get_sink_input_info)(
+        _paContext,
+        LATE(pa_stream_get_index)(_paPlayStream),
+        PaSinkInputInfoCallback,
+        (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+  }
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSinkInputInfo failed to get volume info : %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+bool AudioMixerManagerLinuxPulse::GetSinkInfoByIndex(
+    int device_index) const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+    paOperation = LATE(pa_context_get_sink_info_by_index)(_paContext,
+        device_index, PaSinkInfoCallback, (void*) this);
+
+    WaitForOperationCompletion(paOperation);
+  }
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSinkInfoByIndex failed to get volume info: %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+bool AudioMixerManagerLinuxPulse::GetSourceInfoByIndex(
+    int device_index) const {
+  pa_operation* paOperation = NULL;
+  ResetCallbackVariables();
+
+  PaLock();
+  for (int retries = 0; retries < kMaxRetryOnFailure && !_callbackValues;
+       retries ++) {
+  paOperation  = LATE(pa_context_get_source_info_by_index)(
+      _paContext, device_index, PaSourceInfoCallback, (void*) this);
+
+  WaitForOperationCompletion(paOperation);
+  }
+
+  PaUnLock();
+
+  if (!_callbackValues) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "GetSourceInfoByIndex error: %d",
+                 LATE(pa_context_errno)(_paContext));
+    return false;
+  }
+
+  return true;
+}
+
+}
+
diff --git a/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h b/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h
new file mode 100644
index 0000000..db0a559
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/audio_mixer_manager_pulse_linux.h
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_PULSE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_PULSE_LINUX_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include "pulseaudiosymboltable_linux.h"
+
+#include <stdint.h>
+#include <pulse/pulseaudio.h>
+
+#ifndef UINT32_MAX
+#define UINT32_MAX  ((uint32_t)-1)
+#endif
+
+namespace webrtc
+{
+
+class AudioMixerManagerLinuxPulse
+{
+public:
+    WebRtc_Word32 SetPlayStream(pa_stream* playStream);
+    WebRtc_Word32 SetRecStream(pa_stream* recStream);
+    WebRtc_Word32 OpenSpeaker(WebRtc_UWord16 deviceIndex);
+    WebRtc_Word32 OpenMicrophone(WebRtc_UWord16 deviceIndex);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SetPulseAudioObjects(pa_threaded_mainloop* mainloop,
+                                       pa_context* context);
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerLinuxPulse(const WebRtc_Word32 id);
+    ~AudioMixerManagerLinuxPulse();
+
+private:
+    static void PaSinkInfoCallback(pa_context *c, const pa_sink_info *i,
+                                   int eol, void *pThis);
+    static void PaSinkInputInfoCallback(pa_context *c,
+                                        const pa_sink_input_info *i, int eol,
+                                        void *pThis);
+    static void PaSourceInfoCallback(pa_context *c, const pa_source_info *i,
+                                     int eol, void *pThis);
+    static void
+        PaSetVolumeCallback(pa_context* /*c*/, int success, void* /*pThis*/);
+    void PaSinkInfoCallbackHandler(const pa_sink_info *i, int eol);
+    void PaSinkInputInfoCallbackHandler(const pa_sink_input_info *i, int eol);
+    void PaSourceInfoCallbackHandler(const pa_source_info *i, int eol);
+
+    void ResetCallbackVariables() const;
+    void WaitForOperationCompletion(pa_operation* paOperation) const;
+    void PaLock() const;
+    void PaUnLock() const;
+
+    bool GetSinkInputInfo() const;
+    bool GetSinkInfoByIndex(int device_index)const ;
+    bool GetSourceInfoByIndex(int device_index) const;
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    WebRtc_Word16 _paOutputDeviceIndex;
+    WebRtc_Word16 _paInputDeviceIndex;
+
+    pa_stream* _paPlayStream;
+    pa_stream* _paRecStream;
+
+    pa_threaded_mainloop* _paMainloop;
+    pa_context* _paContext;
+
+    mutable WebRtc_UWord32 _paVolume;
+    mutable WebRtc_UWord32 _paMute;
+    mutable WebRtc_UWord32 _paVolSteps;
+    bool _paSpeakerMute;
+    mutable WebRtc_UWord32 _paSpeakerVolume;
+    mutable WebRtc_UWord8 _paChannels;
+    bool _paObjectsSet;
+    mutable bool _callbackValues;
+};
+
+}
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_LINUX_AUDIO_MIXER_MANAGER_PULSE_LINUX_H_
diff --git a/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc b/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc
new file mode 100644
index 0000000..8f3c7c8
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.cc
@@ -0,0 +1,116 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "latebindingsymboltable_linux.h"
+
+#ifdef WEBRTC_LINUX
+#include <dlfcn.h>
+#endif
+
+// TODO(grunell): Either put inside webrtc namespace or use webrtc:: instead.
+using namespace webrtc;
+
+namespace webrtc_adm_linux {
+
+inline static const char *GetDllError() {
+#ifdef WEBRTC_LINUX
+  char *err = dlerror();
+  if (err) {
+    return err;
+  } else {
+    return "No error";
+  }
+#else
+#error Not implemented
+#endif
+}
+
+DllHandle InternalLoadDll(const char dll_name[]) {
+#ifdef WEBRTC_LINUX
+  DllHandle handle = dlopen(dll_name, RTLD_NOW);
+#else
+#error Not implemented
+#endif
+  if (handle == kInvalidDllHandle) {
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+               "Can't load %s : %d", dll_name, GetDllError());
+  }
+  return handle;
+}
+
+void InternalUnloadDll(DllHandle handle) {
+#ifdef WEBRTC_LINUX
+  if (dlclose(handle) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "%d", GetDllError());
+  }
+#else
+#error Not implemented
+#endif
+}
+
+static bool LoadSymbol(DllHandle handle,
+                       const char *symbol_name,
+                       void **symbol) {
+#ifdef WEBRTC_LINUX
+  *symbol = dlsym(handle, symbol_name);
+  char *err = dlerror();
+  if (err) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "Error loading symbol %s : %d", symbol_name, err);
+    return false;
+  } else if (!*symbol) {
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+               "Symbol %s is NULL", symbol_name);
+    return false;
+  }
+  return true;
+#else
+#error Not implemented
+#endif
+}
+
+// This routine MUST assign SOME value for every symbol, even if that value is
+// NULL, or else some symbols may be left with uninitialized data that the
+// caller may later interpret as a valid address.
+bool InternalLoadSymbols(DllHandle handle,
+                         int num_symbols,
+                         const char *const symbol_names[],
+                         void *symbols[]) {
+#ifdef WEBRTC_LINUX
+  // Clear any old errors.
+  dlerror();
+#endif
+  for (int i = 0; i < num_symbols; ++i) {
+    if (!LoadSymbol(handle, symbol_names[i], &symbols[i])) {
+      return false;
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc_adm_linux
diff --git a/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h b/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h
new file mode 100644
index 0000000..91d25aa
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/latebindingsymboltable_linux.h
@@ -0,0 +1,195 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_LATEBINDINGSYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_LATEBINDINGSYMBOLTABLE_LINUX_H
+
+#include <assert.h>
+#include <stddef.h>  // for NULL
+#include <string.h>
+
+#include "constructor_magic.h"
+#include "trace.h"
+
+// This file provides macros for creating "symbol table" classes to simplify the
+// dynamic loading of symbols from DLLs. Currently the implementation only
+// supports Linux and pure C symbols.
+// See talk/sound/pulseaudiosymboltable.(h|cc) for an example.
+
+namespace webrtc_adm_linux {
+
+#ifdef WEBRTC_LINUX
+typedef void *DllHandle;
+
+const DllHandle kInvalidDllHandle = NULL;
+#else
+#error Not implemented
+#endif
+
+// These are helpers for use only by the class below.
+DllHandle InternalLoadDll(const char dll_name[]);
+
+void InternalUnloadDll(DllHandle handle);
+
+bool InternalLoadSymbols(DllHandle handle,
+                         int num_symbols,
+                         const char *const symbol_names[],
+                         void *symbols[]);
+
+template <int SYMBOL_TABLE_SIZE,
+          const char kDllName[],
+          const char *const kSymbolNames[]>
+class LateBindingSymbolTable {
+ public:
+  LateBindingSymbolTable()
+      : handle_(kInvalidDllHandle),
+        undefined_symbols_(false) {
+    memset(symbols_, 0, sizeof(symbols_));
+  }
+
+  ~LateBindingSymbolTable() {
+    Unload();
+  }
+
+  static int NumSymbols() {
+    return SYMBOL_TABLE_SIZE;
+  }
+
+  // We do not use this, but we offer it for theoretical convenience.
+  static const char *GetSymbolName(int index) {
+    assert(index < NumSymbols());
+    return kSymbolNames[index];
+  }
+
+  bool IsLoaded() const {
+    return handle_ != kInvalidDllHandle;
+  }
+
+  // Loads the DLL and the symbol table. Returns true iff the DLL and symbol
+  // table loaded successfully.
+  bool Load() {
+    if (IsLoaded()) {
+      return true;
+    }
+    if (undefined_symbols_) {
+      // We do not attempt to load again because repeated attempts are not
+      // likely to succeed and DLL loading is costly.
+      //WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+      //           "We know there are undefined symbols");
+      return false;
+    }
+    handle_ = InternalLoadDll(kDllName);
+    if (!IsLoaded()) {
+      return false;
+    }
+    if (!InternalLoadSymbols(handle_, NumSymbols(), kSymbolNames, symbols_)) {
+      undefined_symbols_ = true;
+      Unload();
+      return false;
+    }
+    return true;
+  }
+
+  void Unload() {
+    if (!IsLoaded()) {
+      return;
+    }
+    InternalUnloadDll(handle_);
+    handle_ = kInvalidDllHandle;
+    memset(symbols_, 0, sizeof(symbols_));
+  }
+
+  // Retrieves the given symbol. NOTE: Recommended to use LATESYM_GET below
+  // instead of this.
+  void *GetSymbol(int index) const {
+    assert(IsLoaded());
+    assert(index < NumSymbols());
+    return symbols_[index];
+  }
+
+ private:
+  DllHandle handle_;
+  bool undefined_symbols_;
+  void *symbols_[SYMBOL_TABLE_SIZE];
+
+  DISALLOW_COPY_AND_ASSIGN(LateBindingSymbolTable);
+};
+
+// This macro must be invoked in a header to declare a symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(ClassName) \
+enum {
+
+// This macro must be invoked in the header declaration once for each symbol
+// (recommended to use an X-Macro to avoid duplication).
+// This macro defines an enum with names built from the symbols, which
+// essentially creates a hash table in the compiler from symbol names to their
+// indices in the symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(ClassName, sym) \
+  ClassName##_SYMBOL_TABLE_INDEX_##sym,
+
+// This macro completes the header declaration.
+#define LATE_BINDING_SYMBOL_TABLE_DECLARE_END(ClassName) \
+  ClassName##_SYMBOL_TABLE_SIZE \
+}; \
+\
+extern const char ClassName##_kDllName[]; \
+extern const char *const \
+    ClassName##_kSymbolNames[ClassName##_SYMBOL_TABLE_SIZE]; \
+\
+typedef ::webrtc_adm_linux::LateBindingSymbolTable<ClassName##_SYMBOL_TABLE_SIZE, \
+                                            ClassName##_kDllName, \
+                                            ClassName##_kSymbolNames> \
+    ClassName;
+
+// This macro must be invoked in a .cc file to define a previously-declared
+// symbol table class.
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(ClassName, dllName) \
+const char ClassName##_kDllName[] = dllName; \
+const char *const ClassName##_kSymbolNames[ClassName##_SYMBOL_TABLE_SIZE] = {
+
+// This macro must be invoked in the .cc definition once for each symbol
+// (recommended to use an X-Macro to avoid duplication).
+// This would have to use the mangled name if we were to ever support C++
+// symbols.
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(ClassName, sym) \
+  #sym,
+
+#define LATE_BINDING_SYMBOL_TABLE_DEFINE_END(ClassName) \
+};
+
+// Index of a given symbol in the given symbol table class.
+#define LATESYM_INDEXOF(ClassName, sym) \
+  (ClassName##_SYMBOL_TABLE_INDEX_##sym)
+
+// Returns a reference to the given late-binded symbol, with the correct type.
+#define LATESYM_GET(ClassName, inst, sym) \
+  (*reinterpret_cast<typeof(&sym)>( \
+      (inst)->GetSymbol(LATESYM_INDEXOF(ClassName, sym))))
+
+}  // namespace webrtc_adm_linux
+
+#endif  // WEBRTC_ADM_LATEBINDINGSYMBOLTABLE_LINUX_H
diff --git a/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc b/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc
new file mode 100644
index 0000000..ae663f7
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.cc
@@ -0,0 +1,39 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "pulseaudiosymboltable_linux.h"
+
+namespace webrtc_adm_linux_pulse {
+
+LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0")
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym)
+PULSE_AUDIO_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DEFINE_END(PulseAudioSymbolTable)
+
+}  // namespace webrtc_adm_linux_pulse
diff --git a/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h b/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h
new file mode 100644
index 0000000..049509b
--- /dev/null
+++ b/src/modules/audio_device/main/source/linux/pulseaudiosymboltable_linux.h
@@ -0,0 +1,104 @@
+/*
+ * libjingle
+ * Copyright 2004--2010, Google Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are met:
+ *
+ *  1. Redistributions of source code must retain the above copyright notice,
+ *     this list of conditions and the following disclaimer.
+ *  2. Redistributions in binary form must reproduce the above copyright notice,
+ *     this list of conditions and the following disclaimer in the documentation
+ *     and/or other materials provided with the distribution.
+ *  3. The name of the author may not be used to endorse or promote products
+ *     derived from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+ * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
+ * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+ * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+ * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
+#define WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
+
+#include "latebindingsymboltable_linux.h"
+
+namespace webrtc_adm_linux_pulse {
+
+// The PulseAudio symbols we need, as an X-Macro list.
+// This list must contain precisely every libpulse function that is used in
+// the ADM LINUX PULSE Device and Mixer classes
+#define PULSE_AUDIO_SYMBOLS_LIST \
+  X(pa_bytes_per_second) \
+  X(pa_context_connect) \
+  X(pa_context_disconnect) \
+  X(pa_context_errno) \
+  X(pa_context_get_protocol_version) \
+  X(pa_context_get_server_info) \
+  X(pa_context_get_sink_info_list) \
+  X(pa_context_get_sink_info_by_index) \
+  X(pa_context_get_sink_info_by_name) \
+  X(pa_context_get_sink_input_info) \
+  X(pa_context_get_source_info_by_index) \
+  X(pa_context_get_source_info_by_name) \
+  X(pa_context_get_source_info_list) \
+  X(pa_context_get_state) \
+  X(pa_context_new) \
+  X(pa_context_set_sink_input_volume) \
+  X(pa_context_set_sink_input_mute) \
+  X(pa_context_set_source_volume_by_index) \
+  X(pa_context_set_source_mute_by_index) \
+  X(pa_context_set_state_callback) \
+  X(pa_context_unref) \
+  X(pa_cvolume_set) \
+  X(pa_operation_get_state) \
+  X(pa_operation_unref) \
+  X(pa_stream_connect_playback) \
+  X(pa_stream_connect_record) \
+  X(pa_stream_disconnect) \
+  X(pa_stream_drop) \
+  X(pa_stream_get_device_index) \
+  X(pa_stream_get_index) \
+  X(pa_stream_get_latency) \
+  X(pa_stream_get_sample_spec) \
+  X(pa_stream_get_state) \
+  X(pa_stream_new) \
+  X(pa_stream_peek) \
+  X(pa_stream_readable_size) \
+  X(pa_stream_set_buffer_attr) \
+  X(pa_stream_set_overflow_callback) \
+  X(pa_stream_set_read_callback) \
+  X(pa_stream_set_state_callback) \
+  X(pa_stream_set_underflow_callback) \
+  X(pa_stream_set_write_callback) \
+  X(pa_stream_unref) \
+  X(pa_stream_writable_size) \
+  X(pa_stream_write) \
+  X(pa_strerror) \
+  X(pa_threaded_mainloop_free) \
+  X(pa_threaded_mainloop_get_api) \
+  X(pa_threaded_mainloop_lock) \
+  X(pa_threaded_mainloop_new) \
+  X(pa_threaded_mainloop_signal) \
+  X(pa_threaded_mainloop_start) \
+  X(pa_threaded_mainloop_stop) \
+  X(pa_threaded_mainloop_unlock) \
+  X(pa_threaded_mainloop_wait)
+
+LATE_BINDING_SYMBOL_TABLE_DECLARE_BEGIN(PulseAudioSymbolTable)
+#define X(sym) \
+    LATE_BINDING_SYMBOL_TABLE_DECLARE_ENTRY(PulseAudioSymbolTable, sym)
+PULSE_AUDIO_SYMBOLS_LIST
+#undef X
+LATE_BINDING_SYMBOL_TABLE_DECLARE_END(PulseAudioSymbolTable)
+
+}  // namespace webrtc_adm_linux_pulse
+
+#endif  // WEBRTC_AUDIO_DEVICE_PULSEAUDIOSYMBOLTABLE_LINUX_H
diff --git a/src/modules/audio_device/main/source/mac/audio_device_mac.cc b/src/modules/audio_device/main/source/mac/audio_device_mac.cc
new file mode 100644
index 0000000..ecc95d7
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_device_mac.cc
@@ -0,0 +1,3238 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility.h"
+#include "audio_device_mac.h"
+#include "audio_device_config.h"
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+
+#include <cassert>
+
+#include <sys/sysctl.h>         // sysctlbyname()
+#include <mach/mach.h>          // mach_task_self()
+#include <libkern/OSAtomic.h>   // OSAtomicCompareAndSwap()
+#include "portaudio/pa_ringbuffer.h"
+
+namespace webrtc
+{
+
+#define WEBRTC_CA_RETURN_ON_ERR(expr)                                   \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,               \
+                "Error in " #expr, (const char *)&err);                 \
+            return -1;                                                  \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_ERR(expr)                                         \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,               \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_WARN(expr)                                        \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceWarning, kTraceAudioDevice, _id,             \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+enum
+{
+    MaxNumberDevices = 64
+};
+
+void AudioDeviceMac::AtomicSet32(int32_t* theValue, int32_t newValue)
+{
+    while (1)
+    {
+        int32_t oldValue = *theValue;
+        if (OSAtomicCompareAndSwap32Barrier(oldValue, newValue, theValue)
+            == true)
+        {
+            return;
+        }
+    }
+}
+
+int32_t AudioDeviceMac::AtomicGet32(int32_t* theValue)
+{
+    while (1)
+    {
+        WebRtc_Word32 value = *theValue;
+        if (OSAtomicCompareAndSwap32Barrier(value, value, theValue) == true)
+        {
+            return value;
+        }
+    }
+}
+
+// CoreAudio errors are best interpreted as four character strings.
+void AudioDeviceMac::logCAMsg(const TraceLevel level,
+                              const TraceModule module,
+                              const WebRtc_Word32 id, const char *msg,
+                              const char *err)
+{
+    assert(msg != NULL);
+    assert(err != NULL);
+
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+#else
+    // We need to flip the characters in this case.
+    WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
+        + 2, err + 1, err);
+#endif
+}
+
+AudioDeviceMac::AudioDeviceMac(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _stopEventRec(*EventWrapper::Create()),
+    _stopEvent(*EventWrapper::Create()),
+    _captureWorkerThread(NULL),
+    _renderWorkerThread(NULL),
+    _captureWorkerThreadId(0),
+    _renderWorkerThreadId(0),
+    _id(id),
+    _mixerManager(id),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceID(kAudioObjectUnknown),
+    _outputDeviceID(kAudioObjectUnknown),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _recChannels(N_REC_CHANNELS),
+    _playChannels(N_PLAY_CHANNELS),
+    _captureBufData(NULL),
+    _renderBufData(NULL),
+    _playBufType(AudioDeviceModule::kFixedBufferSize),
+    _initialized(false),
+    _isShutDown(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _AGC(false),
+    _renderDeviceIsAlive(1),
+    _captureDeviceIsAlive(1),
+    _twoDevices(true),
+    _doStop(false),
+    _doStopRec(false),
+    _macBookPro(false),
+    _macBookProPanRight(false),
+    _captureLatencyUs(0),
+    _renderLatencyUs(0),
+    _captureDelayUs(0),
+    _renderDelayUs(0),
+    _renderDelayOffsetSamples(0),
+    _playBufDelayFixed(20),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _paCaptureBuffer(NULL),
+    _paRenderBuffer(NULL),
+    _captureBufSizeSamples(0),
+    _renderBufSizeSamples(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+
+    assert(&_stopEvent != NULL);
+    assert(&_stopEventRec != NULL);
+
+    memset(_renderConvertData, 0, sizeof(_renderConvertData));
+    memset(&_outStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_outDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_inStreamFormat, 0, sizeof(AudioStreamBasicDescription));
+    memset(&_inDesiredFormat, 0, sizeof(AudioStreamBasicDescription));
+}
+
+
+AudioDeviceMac::~AudioDeviceMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+
+    if (!_isShutDown)
+    {
+        Terminate();
+    }
+
+    if (_captureWorkerThread)
+    {
+        delete _captureWorkerThread;
+        _captureWorkerThread = NULL;
+    }
+
+    if (_renderWorkerThread)
+    {
+        delete _renderWorkerThread;
+        _renderWorkerThread = NULL;
+    }
+
+    if (_paRenderBuffer)
+    {
+        delete _paRenderBuffer;
+        _paRenderBuffer = NULL;
+    }
+
+    if (_paCaptureBuffer)
+    {
+        delete _paCaptureBuffer;
+        _paCaptureBuffer = NULL;
+    }
+
+    if (_renderBufData)
+    {
+        delete[] _renderBufData;
+        _renderBufData = NULL;
+    }
+
+    if (_captureBufData)
+    {
+        delete[] _captureBufData;
+        _captureBufData = NULL;
+    }
+
+    kern_return_t kernErr = KERN_SUCCESS;
+    kernErr = semaphore_destroy(mach_task_self(), _renderSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_destroy() error: %d", kernErr);
+    }
+
+    kernErr = semaphore_destroy(mach_task_self(), _captureSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_destroy() error: %d", kernErr);
+    }
+
+    delete &_stopEvent;
+    delete &_stopEventRec;
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+void AudioDeviceMac::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+WebRtc_Word32 AudioDeviceMac::ActiveAudioLayer(
+    AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kPlatformDefaultAudio;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    _isShutDown = false;
+
+    // PortAudio ring buffers require an elementCount which is a power of two.
+    if (_renderBufData == NULL)
+    {
+        UInt32 powerOfTwo = 1;
+        while (powerOfTwo < PLAY_BUF_SIZE_IN_SAMPLES)
+        {
+            powerOfTwo <<= 1;
+        }
+        _renderBufSizeSamples = powerOfTwo;
+        _renderBufData = new SInt16[_renderBufSizeSamples];
+    }
+
+    if (_paRenderBuffer == NULL)
+    {
+        _paRenderBuffer = new PaUtilRingBuffer;
+        ring_buffer_size_t bufSize = -1;
+        bufSize = PaUtil_InitializeRingBuffer(_paRenderBuffer, sizeof(SInt16),
+                                              _renderBufSizeSamples,
+                                              _renderBufData);
+        if (bufSize == -1)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " PaUtil_InitializeRingBuffer() error");
+            return -1;
+        }
+    }
+
+    if (_captureBufData == NULL)
+    {
+        UInt32 powerOfTwo = 1;
+        while (powerOfTwo < REC_BUF_SIZE_IN_SAMPLES)
+        {
+            powerOfTwo <<= 1;
+        }
+        _captureBufSizeSamples = powerOfTwo;
+        _captureBufData = new Float32[_captureBufSizeSamples];
+    }
+
+    if (_paCaptureBuffer == NULL)
+    {
+        _paCaptureBuffer = new PaUtilRingBuffer;
+        ring_buffer_size_t bufSize = -1;
+        bufSize = PaUtil_InitializeRingBuffer(_paCaptureBuffer,
+                                              sizeof(Float32),
+                                              _captureBufSizeSamples,
+                                              _captureBufData);
+        if (bufSize == -1)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " PaUtil_InitializeRingBuffer() error");
+            return -1;
+        }
+    }
+
+    if (_renderWorkerThread == NULL)
+    {
+        _renderWorkerThread
+            = ThreadWrapper::CreateThread(RunRender, this, kRealtimePriority,
+                                          "RenderWorkerThread");
+        if (_renderWorkerThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " Render CreateThread() error");
+            return -1;
+        }
+    }
+
+    if (_captureWorkerThread == NULL)
+    {
+        _captureWorkerThread
+            = ThreadWrapper::CreateThread(RunCapture, this, kRealtimePriority,
+                                          "CaptureWorkerThread");
+        if (_captureWorkerThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice,
+                         _id, " Capture CreateThread() error");
+            return -1;
+        }
+    }
+
+    kern_return_t kernErr = KERN_SUCCESS;
+    kernErr = semaphore_create(mach_task_self(), &_renderSemaphore,
+                               SYNC_POLICY_FIFO, 0);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     " semaphore_create() error: %d", kernErr);
+        return -1;
+    }
+
+    kernErr = semaphore_create(mach_task_self(), &_captureSemaphore,
+                               SYNC_POLICY_FIFO, 0);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     " semaphore_create() error: %d", kernErr);
+        return -1;
+    }
+
+    // Setting RunLoop to NULL here instructs HAL to manage its own thread for 
+    // notifications. This was the default behaviour on OS X 10.5 and earlier, but now 
+    // must be explicitly specified. HAL would otherwise try to use the main thread to
+    // issue notifications.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyRunLoop,
+            kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    CFRunLoopRef runLoop = NULL;
+    UInt32 size = sizeof(CFRunLoopRef);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, size, &runLoop));
+
+    // Listen for any device changes.
+    propertyAddress.mSelector = kAudioHardwarePropertyDevices;
+    WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener(kAudioObjectSystemObject,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Determine if this is a MacBook Pro
+    _macBookPro = false;
+    _macBookProPanRight = false;
+    char buf[128];
+    size_t length = sizeof(buf);
+    memset(buf, 0, length);
+
+    int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0);
+    if (intErr != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Error in sysctlbyname(): %d", err);
+    } else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Hardware model: %s", buf);
+        if (strncmp(buf, "MacBookPro", 10) == 0)
+        {
+            _macBookPro = true;
+        }
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _initialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    if (_recording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Recording must be stopped");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Playback must be stopped");
+        return -1;
+    }
+
+    _critSect.Enter();
+
+    _mixerManager.Close();
+
+    OSStatus err = noErr;
+    int retVal = 0;
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(kAudioObjectSystemObject,
+            &propertyAddress, &objectListenerProc, this));
+
+    err = AudioHardwareUnload();
+    if (err != noErr)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Error in AudioHardwareUnload()", (const char*) &err);
+        retVal = -1;
+    }
+
+    _critSect.Leave();
+
+    _isShutDown = true;
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return retVal;
+}
+
+bool AudioDeviceMac::Initialized() const
+{
+    return (_initialized);
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    // 
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (InitDevice(_outputDeviceIndex, _outputDeviceID, false) == -1)
+    {
+        return -1;
+    }
+
+    if (_inputDeviceID == _outputDeviceID)
+    {
+        _twoDevices = false;
+    } else
+    {
+        _twoDevices = true;
+    }
+
+    if (_mixerManager.OpenSpeaker(_outputDeviceID) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone exists
+    // 
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioDeviceMac::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (InitDevice(_inputDeviceIndex, _inputDeviceID, true) == -1)
+    {
+        return -1;
+    }
+
+    if (_inputDeviceID == _outputDeviceID)
+    {
+        _twoDevices = false;
+    } else
+    {
+        _twoDevices = true;
+    }
+
+    if (_mixerManager.OpenMicrophone(_inputDeviceID) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+bool AudioDeviceMac::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+bool AudioDeviceMac::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a volume control exists
+    //
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                               WebRtc_UWord16 volumeRight)
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32
+AudioDeviceMac::WaveOutVolume(WebRtc_UWord16& /*volumeLeft*/,
+                              WebRtc_UWord16& /*volumeRight*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    // Make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    //
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Enumerate all avaliable microphone and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    //
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoRecordingIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone can record stereo
+    //
+    _mixerManager.StereoRecordingIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoPlayoutIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+    bool wasInitialized = _mixerManager.SpeakerIsInitialized();
+
+    if (!wasInitialized && InitSpeaker() == -1)
+    {
+        // Cannot open the specified device
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone can record stereo
+    //
+    _mixerManager.StereoPlayoutIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseSpeaker();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+bool AudioDeviceMac::AGC() const
+{
+
+    return _AGC;
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool wasInitialized = _mixerManager.MicrophoneIsInitialized();
+
+    // Make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (!wasInitialized && InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control.
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a volume control
+    // exists
+    //
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    if (!wasInitialized)
+    {
+        _mixerManager.CloseMicrophone();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+WebRtc_Word32 AudioDeviceMac::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+WebRtc_Word32
+AudioDeviceMac::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+WebRtc_Word16 AudioDeviceMac::PlayoutDevices()
+{
+
+    AudioDeviceID playDevices[MaxNumberDevices];
+    return GetNumberDevices(kAudioDevicePropertyScopeOutput, playDevices,
+                            MaxNumberDevices);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    AudioDeviceID playDevices[MaxNumberDevices];
+    WebRtc_UWord32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeOutput,
+                                               playDevices, MaxNumberDevices);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable waveform-audio output devices is %u",
+                 nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutDevice(
+    AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDeviceName(kAudioDevicePropertyScopeOutput, index, name);
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    const WebRtc_UWord16 nDevices(RecordingDevices());
+
+    if ((index > (nDevices - 1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    return GetDeviceName(kAudioDevicePropertyScopeInput, index, name);
+}
+
+WebRtc_Word16 AudioDeviceMac::RecordingDevices()
+{
+
+    AudioDeviceID recDevices[MaxNumberDevices];
+    return GetNumberDevices(kAudioDevicePropertyScopeInput, recDevices,
+                            MaxNumberDevices);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    AudioDeviceID recDevices[MaxNumberDevices];
+    WebRtc_UWord32 nDevices = GetNumberDevices(kAudioDevicePropertyScopeInput,
+                                               recDevices, MaxNumberDevices);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  number of availiable waveform-audio input devices is %u",
+                 nDevices);
+
+    if (index > (nDevices - 1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  device index is out of range [0,%u]", (nDevices - 1));
+        return -1;
+    }
+
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+
+WebRtc_Word32
+AudioDeviceMac::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType /*device*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                 "WindowsDeviceType not supported");
+    return -1;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutIsAvailable(bool& available)
+{
+
+    available = true;
+
+    // Try to initialize the playout side
+    if (InitPlayout() == -1)
+    {
+        available = false;
+    }
+
+    // We destroy the IOProc created by InitPlayout() in implDeviceIOProc().
+    // We must actually start playout here in order to have the IOProc
+    // deleted by calling StopPlayout().
+    if (StartPlayout() == -1)
+    {
+        available = false;
+    }
+
+    // Cancel effect of initialization
+    if (StopPlayout() == -1)
+    {
+        available = false;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingIsAvailable(bool& available)
+{
+
+    available = true;
+
+    // Try to initialize the recording side
+    if (InitRecording() == -1)
+    {
+        available = false;
+    }
+
+    // We destroy the IOProc created by InitRecording() in implInDeviceIOProc().
+    // We must actually start recording here in order to have the IOProc
+    // deleted by calling StopRecording().
+    if (StartRecording() == -1)
+    {
+        available = false;
+    }
+
+    // Cancel effect of initialization
+    if (StopRecording() == -1)
+    {
+        available = false;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitSpeaker() failed");
+    }
+
+    if (!MicrophoneIsInitialized())
+    {
+        // Make this call to check if we are using
+        // one or two devices (_twoDevices)
+        bool available = false;
+        if (MicrophoneIsAvailable(available) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  MicrophoneIsAvailable() failed");
+        }
+    }
+
+    PaUtil_FlushRingBuffer(_paRenderBuffer);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    _renderDelayOffsetSamples = 0;
+    _renderDelayUs = 0;
+    _renderLatencyUs = 0;
+    _renderDeviceIsAlive = 1;
+    _doStop = false;
+
+    // The internal microphone of a MacBook Pro is located under the left speaker
+    // grille. When the internal speakers are in use, we want to fully stereo
+    // pan to the right.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyDataSource,
+                kAudioDevicePropertyScopeOutput, 0 };
+    if (_macBookPro)
+    {
+        _macBookProPanRight = false;
+        Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                     &propertyAddress);
+        if (hasProperty)
+        {
+            UInt32 dataSource = 0;
+            size = sizeof(dataSource);
+            WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, &size, &dataSource));
+
+            if (dataSource == 'ispk')
+            {
+                _macBookProPanRight = true;
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id,
+                             "MacBook Pro using internal speakers; stereo"
+                             " panning right");
+            } else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice,
+                             _id, "MacBook Pro not using internal speakers");
+            }
+
+            // Add a listener to determine if the status changes. 
+            WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
+                    &propertyAddress, &objectListenerProc, this));
+        }
+    }
+
+    // Get current stream description  
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    memset(&_outStreamFormat, 0, sizeof(_outStreamFormat));
+    size = sizeof(_outStreamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &_outStreamFormat));
+
+    if (_outStreamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable output stream format -> mFormatID",
+                 (const char *) &_outStreamFormat.mFormatID);
+        return -1;
+    }
+
+    if (_outStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Too many channels on device -> mChannelsPerFrame = %d",
+                     _outStreamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    if (_outStreamFormat.mFormatFlags & kAudioFormatFlagIsNonInterleaved)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Non-interleaved audio data is not supported.",
+                     "AudioHardware streams should not have this format.");
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "Ouput stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mSampleRate = %f, mChannelsPerFrame = %u",
+                 _outStreamFormat.mSampleRate,
+                 _outStreamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 _outStreamFormat.mBytesPerPacket,
+                 _outStreamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 _outStreamFormat.mBytesPerFrame,
+                 _outStreamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mFormatFlags = %u, mChannelsPerFrame = %u",
+                 _outStreamFormat.mFormatFlags,
+                 _outStreamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &_outStreamFormat.mFormatID);
+
+    // Our preferred format to work with 
+    _outDesiredFormat.mSampleRate = N_PLAY_SAMPLES_PER_SEC;
+    if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2))
+    {
+        _outDesiredFormat.mChannelsPerFrame = 2;
+    } else
+    {
+        // Disable stereo playout when we only have one channel on the device.
+        _outDesiredFormat.mChannelsPerFrame = 1;
+        _playChannels = 1;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Stereo playout unavailable on this device");
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+    }
+
+    _renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT
+        * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * _outDesiredFormat.mChannelsPerFrame;
+
+    _outDesiredFormat.mBytesPerPacket = _outDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _outDesiredFormat.mFramesPerPacket = 1; // In uncompressed audio, 
+    // a packet is one frame.
+    _outDesiredFormat.mBytesPerFrame = _outDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _outDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+
+    _outDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
+        | kLinearPCMFormatFlagIsPacked;
+#ifdef WEBRTC_BIG_ENDIAN
+    _outDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+#endif
+    _outDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+    WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_outDesiredFormat, &_outStreamFormat,
+            &_renderConverter));
+
+    // First try to set buffer size to desired value (_playBufDelayFixed)
+    UInt32 bufByteCount = (UInt32)((_outStreamFormat.mSampleRate / 1000.0)
+        * _playBufDelayFixed * _outStreamFormat.mChannelsPerFrame
+        * sizeof(Float32));
+    if (_outStreamFormat.mFramesPerPacket != 0)
+    {
+        if (bufByteCount % _outStreamFormat.mFramesPerPacket != 0)
+        {
+            bufByteCount = ((UInt32)(bufByteCount
+                / _outStreamFormat.mFramesPerPacket) + 1)
+                * _outStreamFormat.mFramesPerPacket;
+        }
+    }
+
+    // Ensure the buffer size is within the acceptable range provided by the device.
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+    AudioValueRange range;
+    size = sizeof(range);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &range));
+    if (range.mMinimum > bufByteCount)
+    {
+        bufByteCount = range.mMinimum;
+    } else if (range.mMaximum < bufByteCount)
+    {
+        bufByteCount = range.mMaximum;
+    }
+
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+    size = sizeof(bufByteCount);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, size, &bufByteCount));
+
+    // Get render device latency
+    propertyAddress.mSelector = kAudioDevicePropertyLatency;
+    UInt32 latency = 0;
+    size = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _renderLatencyUs = (WebRtc_UWord32) ((1.0e6 * latency)
+        / _outStreamFormat.mSampleRate);
+
+    // Get render stream latency
+    propertyAddress.mSelector = kAudioDevicePropertyStreams;
+    AudioStreamID stream = 0;
+    size = sizeof(AudioStreamID);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &stream));
+    propertyAddress.mSelector = kAudioStreamPropertyLatency;
+    size = sizeof(UInt32);
+    latency = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _renderLatencyUs += (WebRtc_UWord32) ((1.0e6 * latency)
+        / _outStreamFormat.mSampleRate);
+
+    // Listen for format changes
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Listen for processor overloads
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_twoDevices || !_recIsInitialized)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_outputDeviceID,
+                deviceIOProc, this, &_deviceIOProcID));
+    }
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "  initial playout status: _renderDelayOffsetSamples=%d,"
+                 " _renderDelayUs=%d, _renderLatencyUs=%d",
+                 _renderDelayOffsetSamples, _renderDelayUs, _renderLatencyUs);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  InitMicrophone() failed");
+    }
+
+    if (!SpeakerIsInitialized())
+    {
+        // Make this call to check if we are using
+        // one or two devices (_twoDevices)
+        bool available = false;
+        if (SpeakerIsAvailable(available) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "  SpeakerIsAvailable() failed");
+        }
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+
+    PaUtil_FlushRingBuffer(_paCaptureBuffer);
+
+    _captureDelayUs = 0;
+    _captureLatencyUs = 0;
+    _captureDeviceIsAlive = 1;
+    _doStopRec = false;
+
+    // Get current stream description  
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyStreamFormat,
+                kAudioDevicePropertyScopeInput, 0 };
+    memset(&_inStreamFormat, 0, sizeof(_inStreamFormat));
+    size = sizeof(_inStreamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &_inStreamFormat));
+
+    if (_inStreamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable input stream format -> mFormatID",
+                 (const char *) &_inStreamFormat.mFormatID);
+        return -1;
+    }
+
+    if (_inStreamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     ", Too many channels on device (mChannelsPerFrame = %d)",
+                     _inStreamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " Input stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mSampleRate = %f, mChannelsPerFrame = %u",
+                 _inStreamFormat.mSampleRate, _inStreamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 _inStreamFormat.mBytesPerPacket,
+                 _inStreamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 _inStreamFormat.mBytesPerFrame,
+                 _inStreamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 " mFormatFlags = %u, mChannelsPerFrame = %u",
+                 _inStreamFormat.mFormatFlags,
+                 _inStreamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &_inStreamFormat.mFormatID);
+
+    // Our preferred format to work with
+    if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
+    {
+        _inDesiredFormat.mChannelsPerFrame = 2;
+    } else
+    {
+        // Disable stereo recording when we only have one channel on the device.
+        _inDesiredFormat.mChannelsPerFrame = 1;
+        _recChannels = 1;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     "Stereo recording unavailable on this device");
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        // Update audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+    }
+
+    _inDesiredFormat.mSampleRate = N_REC_SAMPLES_PER_SEC;
+    _inDesiredFormat.mBytesPerPacket = _inDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _inDesiredFormat.mFramesPerPacket = 1;
+    _inDesiredFormat.mBytesPerFrame = _inDesiredFormat.mChannelsPerFrame
+        * sizeof(SInt16);
+    _inDesiredFormat.mBitsPerChannel = sizeof(SInt16) * 8;
+
+    _inDesiredFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger
+        | kLinearPCMFormatFlagIsPacked;
+#ifdef WEBRTC_BIG_ENDIAN
+    _inDesiredFormat.mFormatFlags |= kLinearPCMFormatFlagIsBigEndian;
+#endif
+    _inDesiredFormat.mFormatID = kAudioFormatLinearPCM;
+
+    WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_inStreamFormat, &_inDesiredFormat,
+            &_captureConverter));
+
+    // First try to set buffer size to desired value (10 ms * N_BLOCKS_IO)
+    // TODO(xians): investigate this block.
+    UInt32 bufByteCount = (UInt32)((_inStreamFormat.mSampleRate / 1000.0)
+        * 10.0 * N_BLOCKS_IO * _inStreamFormat.mChannelsPerFrame
+        * sizeof(Float32));
+    if (_inStreamFormat.mFramesPerPacket != 0)
+    {
+        if (bufByteCount % _inStreamFormat.mFramesPerPacket != 0)
+        {
+            bufByteCount = ((UInt32)(bufByteCount
+                / _inStreamFormat.mFramesPerPacket) + 1)
+                * _inStreamFormat.mFramesPerPacket;
+        }
+    }
+
+    // Ensure the buffer size is within the acceptable range provided by the device.
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSizeRange;
+    AudioValueRange range;
+    size = sizeof(range);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &range));
+    if (range.mMinimum > bufByteCount)
+    {
+        bufByteCount = range.mMinimum;
+    } else if (range.mMaximum < bufByteCount)
+    {
+        bufByteCount = range.mMaximum;
+    }
+
+    propertyAddress.mSelector = kAudioDevicePropertyBufferSize;
+    size = sizeof(bufByteCount);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, size, &bufByteCount));
+
+    // Get capture device latency
+    propertyAddress.mSelector = kAudioDevicePropertyLatency;
+    UInt32 latency = 0;
+    size = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _captureLatencyUs = (UInt32)((1.0e6 * latency)
+        / _inStreamFormat.mSampleRate);
+
+    // Get capture stream latency
+    propertyAddress.mSelector = kAudioDevicePropertyStreams;
+    AudioStreamID stream = 0;
+    size = sizeof(AudioStreamID);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &stream));
+    propertyAddress.mSelector = kAudioStreamPropertyLatency;
+    size = sizeof(UInt32);
+    latency = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &latency));
+    _captureLatencyUs += (UInt32)((1.0e6 * latency)
+        / _inStreamFormat.mSampleRate);
+
+    // Listen for format changes
+    // TODO(xians): should we be using kAudioDevicePropertyDeviceHasChanged?
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectAddPropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    // Listen for processor overloads
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_twoDevices)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
+                inDeviceIOProc, this, &_inDeviceIOProcID));
+    } else if (!_playIsInitialized)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceCreateIOProcID(_inputDeviceID,
+                deviceIOProc, this, &_deviceIOProcID));
+    }
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StartRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    if (!_initialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Recording worker thread has not been started");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    unsigned int threadID(0);
+    if (_captureWorkerThread != NULL)
+    {
+        _captureWorkerThread->Start(threadID);
+    }
+    _captureWorkerThreadId = threadID;
+
+    if (_twoDevices)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _inDeviceIOProcID));
+    } else if (!_playing)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_inputDeviceID, _deviceIOProcID));
+    }
+
+    _recording = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StopRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    // Stop device
+    int32_t captureDeviceIsAlive = AtomicGet32(&_captureDeviceIsAlive);
+    if (_twoDevices)
+    {
+        if (_recording && captureDeviceIsAlive == 1)
+        {
+            _recording = false;
+            _doStopRec = true; // Signal to io proc to stop audio device
+            _critSect.Leave(); // Cannot be under lock, risk of deadlock
+            if (kEventTimeout == _stopEventRec.Wait(2000))
+            {
+                CriticalSectionScoped critScoped(&_critSect);
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             " Timed out stopping the capture IOProc. "
+                             "We may have failed to detect a device removal.");
+
+                WEBRTC_CA_LOG_WARN(AudioDeviceStop(_inputDeviceID,
+                                                   _inDeviceIOProcID));
+                WEBRTC_CA_LOG_WARN(
+                    AudioDeviceDestroyIOProcID(_inputDeviceID,
+                                               _inDeviceIOProcID));
+            }
+            _critSect.Enter();
+            _doStopRec = false;
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         " Recording stopped");
+        }
+    }
+    else
+    {
+        // We signal a stop for a shared device even when rendering has
+        // not yet ended. This is to ensure the IOProc will return early as
+        // intended (by checking |_recording|) before accessing
+        // resources we free below (e.g. the capture converter).
+        //
+        // In the case of a shared devcie, the IOProc will verify
+        // rendering has ended before stopping itself.
+        if (_recording && captureDeviceIsAlive == 1)
+        {
+            _recording = false;
+            _doStop = true; // Signal to io proc to stop audio device
+            _critSect.Leave(); // Cannot be under lock, risk of deadlock
+            if (kEventTimeout == _stopEvent.Wait(2000))
+            {
+                CriticalSectionScoped critScoped(&_critSect);
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                             " Timed out stopping the shared IOProc. "
+                             "We may have failed to detect a device removal.");
+
+                // We assume rendering on a shared device has stopped as well if
+                // the IOProc times out.
+                WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
+                                                   _deviceIOProcID));
+                WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                              _deviceIOProcID));
+            }
+            _critSect.Enter();
+            _doStop = false;
+            WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                         " Recording stopped (shared)");
+        }
+    }
+
+    // Setting this signal will allow the worker thread to be stopped.
+    AtomicSet32(&_captureDeviceIsAlive, 0);
+    _critSect.Leave();
+    if (_captureWorkerThread != NULL)
+    {
+        if (!_captureWorkerThread->Stop())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Timed out waiting for the render worker thread to "
+                             "stop.");
+        }
+    }
+    _critSect.Enter();
+
+    WEBRTC_CA_LOG_WARN(AudioConverterDispose(_captureConverter));
+
+    // Remove listeners.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyStreamFormat,
+                kAudioDevicePropertyScopeInput, 0 };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_inputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    return 0;
+}
+
+bool AudioDeviceMac::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+bool AudioDeviceMac::Recording() const
+{
+    return (_recording);
+}
+
+bool AudioDeviceMac::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+WebRtc_Word32 AudioDeviceMac::StartPlayout()
+{
+    
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    unsigned int threadID(0);
+    if (_renderWorkerThread != NULL)
+    {
+        _renderWorkerThread->Start(threadID);
+    }
+    _renderWorkerThreadId = threadID;
+
+    if (_twoDevices || !_recording)
+    {
+        WEBRTC_CA_RETURN_ON_ERR(AudioDeviceStart(_outputDeviceID, _deviceIOProcID));
+    }
+    _playing = true;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::StopPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    OSStatus err = noErr;
+
+    int32_t renderDeviceIsAlive = AtomicGet32(&_renderDeviceIsAlive);
+    if (_playing && renderDeviceIsAlive == 1)
+    {
+        // We signal a stop for a shared device even when capturing has not
+        // yet ended. This is to ensure the IOProc will return early as
+        // intended (by checking |_playing|) before accessing resources we
+        // free below (e.g. the render converter).
+        //
+        // In the case of a shared device, the IOProc will verify capturing
+        // has ended before stopping itself.
+        _playing = false;
+        _doStop = true; // Signal to io proc to stop audio device
+        _critSect.Leave(); // Cannot be under lock, risk of deadlock
+        if (kEventTimeout == _stopEvent.Wait(2000))
+        {
+            CriticalSectionScoped critScoped(&_critSect);
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Timed out stopping the render IOProc. "
+                         "We may have failed to detect a device removal.");
+
+            // We assume capturing on a shared device has stopped as well if the
+            // IOProc times out.
+            WEBRTC_CA_LOG_WARN(AudioDeviceStop(_outputDeviceID,
+                                               _deviceIOProcID));
+            WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                          _deviceIOProcID));
+        }
+        _critSect.Enter();
+        _doStop = false;
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Playout stopped");
+    }
+
+    // Setting this signal will allow the worker thread to be stopped.
+    AtomicSet32(&_renderDeviceIsAlive, 0);
+    _critSect.Leave();
+    if (_renderWorkerThread != NULL)
+    {
+        if (!_renderWorkerThread->Stop())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Timed out waiting for the render worker thread to "
+                         "stop.");
+        }
+    }
+    _critSect.Enter();
+
+    WEBRTC_CA_LOG_WARN(AudioConverterDispose(_renderConverter));
+
+    // Remove listeners.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput,
+            0 };
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    propertyAddress.mSelector = kAudioDeviceProcessorOverload;
+    WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+            &propertyAddress, &objectListenerProc, this));
+
+    if (_macBookPro)
+    {
+        Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                     &propertyAddress);
+        if (hasProperty)
+        {
+            propertyAddress.mSelector = kAudioDevicePropertyDataSource;
+            WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener(_outputDeviceID,
+                    &propertyAddress, &objectListenerProc, this));
+        }
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
+    delayMS = static_cast<WebRtc_UWord16> (1e-3 * (renderDelayUs
+        + _renderLatencyUs) + 0.5);
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+    delayMS = static_cast<WebRtc_UWord16> (1e-3 * (captureDelayUs
+        + _captureLatencyUs) + 0.5);
+    return 0;
+}
+
+bool AudioDeviceMac::Playing() const
+{
+    return (_playing);
+}
+
+WebRtc_Word32 AudioDeviceMac::SetPlayoutBuffer(
+    const AudioDeviceModule::BufferType type,
+    WebRtc_UWord16 sizeMS)
+{
+
+    if (type != AudioDeviceModule::kFixedBufferSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " Adaptive buffer size not supported on this platform");
+        return -1;
+    }
+
+    _playBufType = type;
+    _playBufDelayFixed = sizeMS;
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::PlayoutBuffer(
+    AudioDeviceModule::BufferType& type,
+    WebRtc_UWord16& sizeMS) const
+{
+
+    type = _playBufType;
+    sizeMS = _playBufDelayFixed;
+
+    return 0;
+}
+
+// Not implemented for Mac.
+WebRtc_Word32 AudioDeviceMac::CPULoad(WebRtc_UWord16& /*load*/) const
+{
+
+    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                 "  API call not supported on this platform");
+
+    return -1;
+}
+
+bool AudioDeviceMac::PlayoutWarning() const
+{
+    return (_playWarning > 0);
+}
+
+bool AudioDeviceMac::PlayoutError() const
+{
+    return (_playError > 0);
+}
+
+bool AudioDeviceMac::RecordingWarning() const
+{
+    return (_recWarning > 0);
+}
+
+bool AudioDeviceMac::RecordingError() const
+{
+    return (_recError > 0);
+}
+
+void AudioDeviceMac::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+void AudioDeviceMac::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+void AudioDeviceMac::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+void AudioDeviceMac::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+WebRtc_Word32
+AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope,
+                                 AudioDeviceID scopedDeviceIds[],
+                                 const WebRtc_UWord32 deviceListLength)
+{
+    OSStatus err = noErr;
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal,
+            kAudioObjectPropertyElementMaster };
+    UInt32 size = 0;
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, &size));
+    if (size == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "No devices");
+        return 0;
+    }
+
+    AudioDeviceID* deviceIds = (AudioDeviceID*) malloc(size);
+    UInt32 numberDevices = size / sizeof(AudioDeviceID);
+    AudioBufferList* bufferList = NULL;
+    UInt32 numberScopedDevices = 0;
+
+    // First check if there is a default device and list it
+    UInt32 hardwareProperty = 0;
+    if (scope == kAudioDevicePropertyScopeOutput)
+    {
+        hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+    } else
+    {
+        hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+    }
+
+    AudioObjectPropertyAddress
+        propertyAddressDefault = { hardwareProperty,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+
+    AudioDeviceID usedID;
+    UInt32 uintSize = sizeof(UInt32);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+            &propertyAddressDefault, 0, NULL, &uintSize, &usedID));
+    if (usedID != kAudioDeviceUnknown)
+    {
+        scopedDeviceIds[numberScopedDevices] = usedID;
+        numberScopedDevices++;
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "GetNumberDevices(): Default device unknown");
+    }
+
+    // Then list the rest of the devices
+    bool listOK = true;
+
+    WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+            &propertyAddress, 0, NULL, &size, deviceIds));
+    if (err != noErr)
+    {
+        listOK = false;
+    } else
+    {
+        propertyAddress.mSelector = kAudioDevicePropertyStreamConfiguration;
+        propertyAddress.mScope = scope;
+        propertyAddress.mElement = 0;
+        for (UInt32 i = 0; i < numberDevices; i++)
+        {
+            // Check for input channels
+            WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyDataSize(deviceIds[i],
+                    &propertyAddress, 0, NULL, &size));
+            if (err == kAudioHardwareBadDeviceError)
+            {
+                // This device doesn't actually exist; continue iterating.
+                continue;
+            } else if (err != noErr)
+            {
+                listOK = false;
+                break;
+            }
+
+            bufferList = (AudioBufferList*) malloc(size);
+            WEBRTC_CA_LOG_ERR(AudioObjectGetPropertyData(deviceIds[i],
+                    &propertyAddress, 0, NULL, &size, bufferList));
+            if (err != noErr)
+            {
+                listOK = false;
+                break;
+            }
+
+            if (bufferList->mNumberBuffers > 0)
+            {
+                if (numberScopedDevices >= deviceListLength)
+                {
+                    WEBRTC_TRACE(kTraceError,
+                                 kTraceAudioDevice, _id,
+                                 "Device list is not long enough");
+                    listOK = false;
+                    break;
+                }
+
+                scopedDeviceIds[numberScopedDevices] = deviceIds[i];
+                numberScopedDevices++;
+            }
+
+            free(bufferList);
+            bufferList = NULL;
+        } // for
+    }
+
+    if (!listOK)
+    {
+        if (deviceIds)
+        {
+            free(deviceIds);
+            deviceIds = NULL;
+        }
+
+        if (bufferList)
+        {
+            free(bufferList);
+            bufferList = NULL;
+        }
+
+        return -1;
+    }
+
+    // Happy ending   
+    if (deviceIds)
+    {
+        free(deviceIds);
+        deviceIds = NULL;
+    }
+
+    return numberScopedDevices;
+}
+
+WebRtc_Word32
+AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope,
+                              const WebRtc_UWord16 index,
+                              char* name)
+{
+    OSStatus err = noErr;
+    UInt32 len = kAdmMaxDeviceNameSize;
+    AudioDeviceID deviceIds[MaxNumberDevices];
+
+    int numberDevices = GetNumberDevices(scope, deviceIds, MaxNumberDevices);
+    if (numberDevices < 0)
+    {
+        return -1;
+    } else if (numberDevices == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "No devices");
+        return -1;
+    }
+
+    // If the number is below the number of devices, assume it's "WEBRTC ID"
+    // otherwise assume it's a CoreAudio ID
+    AudioDeviceID usedID;
+
+    // Check if there is a default device
+    bool isDefaultDevice = false;
+    if (index == 0)
+    {
+        UInt32 hardwareProperty = 0;
+        if (scope == kAudioDevicePropertyScopeOutput)
+        {
+            hardwareProperty = kAudioHardwarePropertyDefaultOutputDevice;
+        } else
+        {
+            hardwareProperty = kAudioHardwarePropertyDefaultInputDevice;
+        }
+        AudioObjectPropertyAddress propertyAddress = { hardwareProperty,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+        UInt32 size = sizeof(UInt32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+                &propertyAddress, 0, NULL, &size, &usedID));
+        if (usedID == kAudioDeviceUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "GetDeviceName(): Default device unknown");
+        } else
+        {
+            isDefaultDevice = true;
+        }
+    }
+
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyDeviceName, scope, 0 };
+
+    if (isDefaultDevice)
+    {
+        char devName[len];
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
+                &propertyAddress, 0, NULL, &len, devName));
+
+        sprintf(name, "default (%s)", devName);
+    } else
+    {
+        if (index < numberDevices)
+        {
+            usedID = deviceIds[index];
+        } else
+        {
+            usedID = index;
+        }
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(usedID,
+                &propertyAddress, 0, NULL, &len, name));
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::InitDevice(const WebRtc_UWord16 userDeviceIndex,
+                                         AudioDeviceID& deviceId,
+                                         const bool isInput)
+{
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    AudioObjectPropertyScope deviceScope;
+    AudioObjectPropertySelector defaultDeviceSelector;
+    AudioDeviceID deviceIds[MaxNumberDevices];
+
+    if (isInput)
+    {
+        deviceScope = kAudioDevicePropertyScopeInput;
+        defaultDeviceSelector = kAudioHardwarePropertyDefaultInputDevice;
+    } else
+    {
+        deviceScope = kAudioDevicePropertyScopeOutput;
+        defaultDeviceSelector = kAudioHardwarePropertyDefaultOutputDevice;
+    }
+
+    AudioObjectPropertyAddress
+        propertyAddress = { defaultDeviceSelector,
+                kAudioObjectPropertyScopeGlobal,
+                kAudioObjectPropertyElementMaster };
+
+    // Get the actual device IDs
+    int numberDevices = GetNumberDevices(deviceScope, deviceIds,
+                                         MaxNumberDevices);
+    if (numberDevices < 0)
+    {
+        return -1;
+    } else if (numberDevices == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "InitDevice(): No devices");
+        return -1;
+    }
+
+    bool isDefaultDevice = false;
+    deviceId = kAudioDeviceUnknown;
+    if (userDeviceIndex == 0)
+    {
+        // Try to use default system device
+        size = sizeof(AudioDeviceID);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(kAudioObjectSystemObject,
+                &propertyAddress, 0, NULL, &size, &deviceId));
+        if (deviceId == kAudioDeviceUnknown)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " No default device exists");
+        } else
+        {
+            isDefaultDevice = true;
+        }
+    }
+
+    if (!isDefaultDevice)
+    {
+        deviceId = deviceIds[userDeviceIndex];
+    }
+
+    // Obtain device name and manufacturer for logging.
+    // Also use this as a test to ensure a user-set device ID is valid. 
+    char devName[128];
+    char devManf[128];
+    memset(devName, 0, sizeof(devName));
+    memset(devManf, 0, sizeof(devManf));
+
+    propertyAddress.mSelector = kAudioDevicePropertyDeviceName;
+    propertyAddress.mScope = deviceScope;
+    propertyAddress.mElement = 0;
+    size = sizeof(devName);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
+            &propertyAddress, 0, NULL, &size, devName));
+
+    propertyAddress.mSelector = kAudioDevicePropertyDeviceManufacturer;
+    size = sizeof(devManf);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(deviceId,
+            &propertyAddress, 0, NULL, &size, devManf));
+
+    if (isInput)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Input device: %s %s", devManf, devName);
+    } else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Output device: %s %s", devManf, devName);
+    }
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::objectListenerProc(
+    AudioObjectID objectId,
+    UInt32 numberAddresses,
+    const AudioObjectPropertyAddress addresses[],
+    void* clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implObjectListenerProc(objectId, numberAddresses, addresses);
+
+    // AudioObjectPropertyListenerProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implObjectListenerProc(
+    const AudioObjectID objectId,
+    const UInt32 numberAddresses,
+    const AudioObjectPropertyAddress addresses[])
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "AudioDeviceMac::implObjectListenerProc()");
+    
+    for (UInt32 i = 0; i < numberAddresses; i++)
+    {
+        if (addresses[i].mSelector == kAudioHardwarePropertyDevices)
+        {
+            HandleDeviceChange();
+        } else if (addresses[i].mSelector == kAudioDevicePropertyStreamFormat)
+        {
+            HandleStreamFormatChange(objectId, addresses[i]);
+        } else if (addresses[i].mSelector == kAudioDevicePropertyDataSource)
+        {
+            HandleDataSourceChange(objectId, addresses[i]);
+        } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload)
+        {
+            HandleProcessorOverload(addresses[i]);
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleDeviceChange()
+{
+    OSStatus err = noErr;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "kAudioHardwarePropertyDevices");
+
+    // A device has changed. Check if our registered devices have been removed.
+    // Ensure the devices have been initialized, meaning the IDs are valid.
+    if (MicrophoneIsInitialized())
+    {
+        AudioObjectPropertyAddress propertyAddress = {
+                kAudioDevicePropertyDeviceIsAlive,
+                kAudioDevicePropertyScopeInput, 0 };
+        UInt32 deviceIsAlive = 1;
+        UInt32 size = sizeof(UInt32);
+        err = AudioObjectGetPropertyData(_inputDeviceID, &propertyAddress, 0,
+                                         NULL, &size, &deviceIsAlive);
+
+        if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "Capture device is not alive (probably removed)");
+            AtomicSet32(&_captureDeviceIsAlive, 0);
+            _mixerManager.CloseMicrophone();
+            if (_recError == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  pending recording error exists");
+            }
+            _recError = 1; // triggers callback from module process thread
+        } else if (err != noErr)
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioDeviceGetProperty()", (const char*) &err);
+            return -1;
+        }      
+    }
+
+    if (SpeakerIsInitialized())
+    {
+        AudioObjectPropertyAddress propertyAddress = {
+                kAudioDevicePropertyDeviceIsAlive,
+                kAudioDevicePropertyScopeOutput, 0 };
+        UInt32 deviceIsAlive = 1;
+        UInt32 size = sizeof(UInt32);
+        err = AudioObjectGetPropertyData(_outputDeviceID, &propertyAddress, 0,
+                                         NULL, &size, &deviceIsAlive);
+
+        if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "Render device is not alive (probably removed)");
+            AtomicSet32(&_renderDeviceIsAlive, 0);
+            _mixerManager.CloseSpeaker();
+            if (_playError == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice,
+                             _id, "  pending playout error exists");
+            }
+            _playError = 1; // triggers callback from module process thread
+        } else if (err != noErr)
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioDeviceGetProperty()", (const char*) &err);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleStreamFormatChange(
+    const AudioObjectID objectId,
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    OSStatus err = noErr;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                 "Stream format changed");
+
+    if (objectId != _inputDeviceID && objectId != _outputDeviceID)
+    {
+        return 0;
+    }
+
+    // Get the new device format
+    AudioStreamBasicDescription streamFormat;
+    UInt32 size = sizeof(streamFormat);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    if (streamFormat.mFormatID != kAudioFormatLinearPCM)
+    {
+        logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                 "Unacceptable input stream format -> mFormatID",
+                 (const char *) &streamFormat.mFormatID);
+        return -1;
+    }
+
+    if (streamFormat.mChannelsPerFrame > N_DEVICE_CHANNELS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "Too many channels on device (mChannelsPerFrame = %d)",
+                     streamFormat.mChannelsPerFrame);
+        return -1;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "Stream format:");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mSampleRate = %f, mChannelsPerFrame = %u",
+                 streamFormat.mSampleRate, streamFormat.mChannelsPerFrame);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerPacket = %u, mFramesPerPacket = %u",
+                 streamFormat.mBytesPerPacket, streamFormat.mFramesPerPacket);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mBytesPerFrame = %u, mBitsPerChannel = %u",
+                 streamFormat.mBytesPerFrame, streamFormat.mBitsPerChannel);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "mFormatFlags = %u, mChannelsPerFrame = %u",
+                 streamFormat.mFormatFlags, streamFormat.mChannelsPerFrame);
+    logCAMsg(kTraceInfo, kTraceAudioDevice, _id, "mFormatID",
+             (const char *) &streamFormat.mFormatID);
+
+    if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
+    {
+        memcpy(&_inStreamFormat, &streamFormat, sizeof(streamFormat));
+
+        if (_inStreamFormat.mChannelsPerFrame >= 2 && (_recChannels == 2))
+        {
+            _inDesiredFormat.mChannelsPerFrame = 2;
+        } else
+        {
+            // Disable stereo recording when we only have one channel on the device.
+            _inDesiredFormat.mChannelsPerFrame = 1;
+            _recChannels = 1;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Stereo recording unavailable on this device");
+        }
+
+        if (_ptrAudioBuffer)
+        {
+            // Update audio buffer with the selected parameters
+            _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+            _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8) _recChannels);
+        }
+
+        // Recreate the converter with the new format
+        // TODO(xians): make this thread safe
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_captureConverter));
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&streamFormat, &_inDesiredFormat,
+                &_captureConverter));
+    } else
+    {
+        memcpy(&_outStreamFormat, &streamFormat, sizeof(streamFormat));
+
+        if (_outStreamFormat.mChannelsPerFrame >= 2 && (_playChannels == 2))
+        {
+            _outDesiredFormat.mChannelsPerFrame = 2;
+        } else
+        {
+            // Disable stereo playout when we only have one channel on the device.
+            _outDesiredFormat.mChannelsPerFrame = 1;
+            _playChannels = 1;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Stereo playout unavailable on this device");
+        }
+
+        if (_ptrAudioBuffer)
+        {
+            // Update audio buffer with the selected parameters
+            _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+            _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8) _playChannels);
+        }
+
+        _renderDelayOffsetSamples = _renderBufSizeSamples - N_BUFFERS_OUT
+            * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES
+            * _outDesiredFormat.mChannelsPerFrame;
+
+        // Recreate the converter with the new format
+        // TODO(xians): make this thread safe
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterDispose(_renderConverter));
+
+        WEBRTC_CA_RETURN_ON_ERR(AudioConverterNew(&_outDesiredFormat, &streamFormat,
+                &_renderConverter));
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioDeviceMac::HandleDataSourceChange(
+    const AudioObjectID objectId,
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    OSStatus err = noErr;
+
+    if (_macBookPro && propertyAddress.mScope
+        == kAudioDevicePropertyScopeOutput)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
+                     "Data source changed");
+
+        _macBookProPanRight = false;
+        UInt32 dataSource = 0;
+        UInt32 size = sizeof(UInt32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(objectId,
+                &propertyAddress, 0, NULL, &size, &dataSource));
+        if (dataSource == 'ispk')
+        {
+            _macBookProPanRight = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "MacBook Pro using internal speakers; stereo panning right");
+        } else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "MacBook Pro not using internal speakers");
+        }
+    }
+
+    return 0;
+}
+WebRtc_Word32 AudioDeviceMac::HandleProcessorOverload(
+    const AudioObjectPropertyAddress propertyAddress)
+{
+    // TODO(xians): we probably want to notify the user in some way of the
+    // overload. However, the Windows interpretations of these errors seem to
+    // be more severe than what ProcessorOverload is thrown for.
+    //
+    // We don't log the notification, as it's sent from the HAL's IO thread. We
+    // don't want to slow it down even further.
+    if (propertyAddress.mScope == kAudioDevicePropertyScopeInput)
+    {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "Capture processor
+        // overload");
+        //_callback->ProblemIsReported(
+        // SndCardStreamObserver::ERecordingProblem);
+    } else
+    {
+        //WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+        // "Render processor overload");
+        //_callback->ProblemIsReported(
+        // SndCardStreamObserver::EPlaybackProblem);
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+OSStatus AudioDeviceMac::deviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+                                      const AudioBufferList* inputData,
+                                      const AudioTimeStamp* inputTime,
+                                      AudioBufferList* outputData,
+                                      const AudioTimeStamp* outputTime,
+                                      void *clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implDeviceIOProc(inputData, inputTime, outputData, outputTime);
+
+    // AudioDeviceIOProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::outConverterProc(AudioConverterRef,
+                                          UInt32 *numberDataPackets,
+                                          AudioBufferList *data,
+                                          AudioStreamPacketDescription **,
+                                          void *userData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) userData;
+    assert(ptrThis != NULL);
+
+    return ptrThis->implOutConverterProc(numberDataPackets, data);
+}
+
+OSStatus AudioDeviceMac::inDeviceIOProc(AudioDeviceID, const AudioTimeStamp*,
+                                        const AudioBufferList* inputData,
+                                        const AudioTimeStamp* inputTime,
+                                        AudioBufferList*,
+                                        const AudioTimeStamp*, void* clientData)
+{
+    AudioDeviceMac *ptrThis = (AudioDeviceMac *) clientData;
+    assert(ptrThis != NULL);
+
+    ptrThis->implInDeviceIOProc(inputData, inputTime);
+
+    // AudioDeviceIOProc functions are supposed to return 0
+    return 0;
+}
+
+OSStatus AudioDeviceMac::inConverterProc(
+    AudioConverterRef,
+    UInt32 *numberDataPackets,
+    AudioBufferList *data,
+    AudioStreamPacketDescription ** /*dataPacketDescription*/,
+    void *userData)
+{
+    AudioDeviceMac *ptrThis = static_cast<AudioDeviceMac*> (userData);
+    assert(ptrThis != NULL);
+
+    return ptrThis->implInConverterProc(numberDataPackets, data);
+}
+
+OSStatus AudioDeviceMac::implDeviceIOProc(const AudioBufferList *inputData,
+                                          const AudioTimeStamp *inputTime,
+                                          AudioBufferList *outputData,
+                                          const AudioTimeStamp *outputTime)
+{
+    OSStatus err = noErr;
+    UInt64 outputTimeNs = AudioConvertHostTimeToNanos(outputTime->mHostTime);
+    UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
+
+    if (!_twoDevices && _recording)
+    {
+        implInDeviceIOProc(inputData, inputTime);
+    }
+
+    // Check if we should close down audio device
+    // Double-checked locking optimization to remove locking overhead
+    if (_doStop)
+    {
+        _critSect.Enter();
+        if (_doStop)
+        {
+            if (_twoDevices || (!_recording && !_playing))
+            {
+               // In the case of a shared device, the single driving ioProc
+               // is stopped here
+               WEBRTC_CA_LOG_ERR(AudioDeviceStop(_outputDeviceID,
+                                                 _deviceIOProcID));
+               WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_outputDeviceID,
+                                                             _deviceIOProcID));
+               if (err == noErr)
+               {
+                  WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                               _id, " Playout or shared device stopped");
+               }
+            }
+
+            _doStop = false;
+            _stopEvent.Set();
+            _critSect.Leave();
+            return 0;
+        }
+        _critSect.Leave();
+    }
+
+    if (!_playing)
+    {
+        // This can be the case when a shared device is capturing but not
+        // rendering. We allow the checks above before returning to avoid a
+        // timeout when capturing is stopped.
+        return 0;
+    }
+
+    assert(_outStreamFormat.mBytesPerFrame != 0);
+    UInt32 size = outputData->mBuffers->mDataByteSize
+        / _outStreamFormat.mBytesPerFrame;
+
+    // TODO(xians): signal an error somehow?
+    err = AudioConverterFillComplexBuffer(_renderConverter, outConverterProc,
+                                          this, &size, outputData, NULL);
+    if (err != noErr)
+    {
+        if (err == 1)
+        {
+            // This is our own error.
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " Error in AudioConverterFillComplexBuffer()");
+            return 1;
+        } else
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioConverterFillComplexBuffer()",
+                     (const char *) &err);
+            return 1;
+        }
+    }
+
+    ring_buffer_size_t bufSizeSamples =
+        PaUtil_GetRingBufferReadAvailable(_paRenderBuffer);
+
+    int32_t renderDelayUs = static_cast<int32_t> (1e-3 * (outputTimeNs - nowNs)
+        + 0.5);
+    renderDelayUs += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
+        / _outDesiredFormat.mChannelsPerFrame / _outDesiredFormat.mSampleRate
+        + 0.5);
+
+    AtomicSet32(&_renderDelayUs, renderDelayUs);
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implOutConverterProc(UInt32 *numberDataPackets,
+                                              AudioBufferList *data)
+{
+    assert(data->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = *numberDataPackets
+        * _outDesiredFormat.mChannelsPerFrame;
+
+    data->mBuffers->mNumberChannels = _outDesiredFormat.mChannelsPerFrame;
+    // Always give the converter as much as it wants, zero padding as required.
+    data->mBuffers->mDataByteSize = *numberDataPackets
+        * _outDesiredFormat.mBytesPerPacket;
+    data->mBuffers->mData = _renderConvertData;
+    memset(_renderConvertData, 0, sizeof(_renderConvertData));
+
+    PaUtil_ReadRingBuffer(_paRenderBuffer, _renderConvertData, numSamples);
+
+    kern_return_t kernErr = semaphore_signal_all(_renderSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_signal_all() error: %d", kernErr);
+        return 1;
+    }
+
+    return 0;
+}
+
+OSStatus AudioDeviceMac::implInDeviceIOProc(const AudioBufferList *inputData,
+                                            const AudioTimeStamp *inputTime)
+{
+    OSStatus err = noErr;
+    UInt64 inputTimeNs = AudioConvertHostTimeToNanos(inputTime->mHostTime);
+    UInt64 nowNs = AudioConvertHostTimeToNanos(AudioGetCurrentHostTime());
+
+    // Check if we should close down audio device
+    // Double-checked locking optimization to remove locking overhead
+    if (_doStopRec)
+    {
+        _critSect.Enter();
+        if (_doStopRec)
+        {
+            // This will be signalled only when a shared device is not in use.
+            WEBRTC_CA_LOG_ERR(AudioDeviceStop(_inputDeviceID, _inDeviceIOProcID));
+            WEBRTC_CA_LOG_WARN(AudioDeviceDestroyIOProcID(_inputDeviceID,
+                                                          _inDeviceIOProcID));
+            if (err == noErr)
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice,
+                             _id, " Recording device stopped");
+            }
+
+            _doStopRec = false;
+            _stopEventRec.Set();
+            _critSect.Leave();
+            return 0;
+        }
+        _critSect.Leave();
+    }
+
+    if (!_recording)
+    {
+        // Allow above checks to avoid a timeout on stopping capture.
+        return 0;
+    }
+
+    ring_buffer_size_t bufSizeSamples =
+        PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer);
+
+    int32_t captureDelayUs = static_cast<int32_t> (1e-3 * (nowNs - inputTimeNs)
+        + 0.5);
+    captureDelayUs
+        += static_cast<int32_t> ((1.0e6 * bufSizeSamples)
+            / _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mSampleRate
+            + 0.5);
+
+    AtomicSet32(&_captureDelayUs, captureDelayUs);
+
+    assert(inputData->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = inputData->mBuffers->mDataByteSize
+        * _inStreamFormat.mChannelsPerFrame / _inStreamFormat.mBytesPerPacket;
+    PaUtil_WriteRingBuffer(_paCaptureBuffer, inputData->mBuffers->mData,
+                           numSamples);
+
+    kern_return_t kernErr = semaphore_signal_all(_captureSemaphore);
+    if (kernErr != KERN_SUCCESS)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     " semaphore_signal_all() error: %d", kernErr);
+    }
+
+    return err;
+}
+
+OSStatus AudioDeviceMac::implInConverterProc(UInt32 *numberDataPackets,
+                                             AudioBufferList *data)
+{
+    assert(data->mNumberBuffers == 1);
+    ring_buffer_size_t numSamples = *numberDataPackets
+        * _inStreamFormat.mChannelsPerFrame;
+
+    while (PaUtil_GetRingBufferReadAvailable(_paCaptureBuffer) < numSamples)
+    {
+        mach_timespec_t timeout;
+        timeout.tv_sec = 0;
+        timeout.tv_nsec = TIMER_PERIOD_MS;
+
+        kern_return_t kernErr = semaphore_timedwait(_captureSemaphore, timeout);
+        if (kernErr == KERN_OPERATION_TIMED_OUT)
+        {
+            int32_t signal = AtomicGet32(&_captureDeviceIsAlive);
+            if (signal == 0)
+            {
+                // The capture device is no longer alive; stop the worker thread.
+                *numberDataPackets = 0;
+                return 1;
+            }
+        } else if (kernErr != KERN_SUCCESS)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " semaphore_wait() error: %d", kernErr);
+        }
+    }
+
+    // Pass the read pointer directly to the converter to avoid a memcpy.
+    void* dummyPtr;
+    ring_buffer_size_t dummySize;
+    PaUtil_GetRingBufferReadRegions(_paCaptureBuffer, numSamples,
+                                    &data->mBuffers->mData, &numSamples,
+                                    &dummyPtr, &dummySize);
+    PaUtil_AdvanceRingBufferReadIndex(_paCaptureBuffer, numSamples);
+
+    data->mBuffers->mNumberChannels = _inStreamFormat.mChannelsPerFrame;
+    *numberDataPackets = numSamples / _inStreamFormat.mChannelsPerFrame;
+    data->mBuffers->mDataByteSize = *numberDataPackets
+        * _inStreamFormat.mBytesPerPacket;
+
+    return 0;
+}
+
+bool AudioDeviceMac::RunRender(void* ptrThis)
+{
+    return static_cast<AudioDeviceMac*> (ptrThis)->RenderWorkerThread();
+}
+
+bool AudioDeviceMac::RenderWorkerThread()
+{
+    ring_buffer_size_t numSamples = ENGINE_PLAY_BUF_SIZE_IN_SAMPLES
+        * _outDesiredFormat.mChannelsPerFrame;
+    while (PaUtil_GetRingBufferWriteAvailable(_paRenderBuffer)
+        - _renderDelayOffsetSamples < numSamples)
+    {
+        mach_timespec_t timeout;
+        timeout.tv_sec = 0;
+        timeout.tv_nsec = TIMER_PERIOD_MS;
+
+        kern_return_t kernErr = semaphore_timedwait(_renderSemaphore, timeout);
+        if (kernErr == KERN_OPERATION_TIMED_OUT)
+        {
+            int32_t signal = AtomicGet32(&_renderDeviceIsAlive);
+            if (signal == 0)
+            {
+                // The render device is no longer alive; stop the worker thread.
+                return false;
+            }
+        } else if (kernErr != KERN_SUCCESS)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         " semaphore_timedwait() error: %d", kernErr);
+        }
+    }
+
+    WebRtc_Word8 playBuffer[4 * ENGINE_PLAY_BUF_SIZE_IN_SAMPLES];
+
+    if (!_ptrAudioBuffer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  capture AudioBuffer is invalid");
+        return false;
+    }
+
+    // Ask for new PCM data to be played out using the AudioDeviceBuffer.
+    WebRtc_UWord32 nSamples =
+        _ptrAudioBuffer->RequestPlayoutData(ENGINE_PLAY_BUF_SIZE_IN_SAMPLES);
+
+    nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+    if (nSamples != ENGINE_PLAY_BUF_SIZE_IN_SAMPLES)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  invalid number of output samples(%d)", nSamples);
+    }
+
+    WebRtc_UWord32 nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame;
+
+    SInt16 *pPlayBuffer = (SInt16 *) &playBuffer;
+    if (_macBookProPanRight && (_playChannels == 2))
+    {
+        // Mix entirely into the right channel and zero the left channel.
+        SInt32 sampleInt32 = 0;
+        for (WebRtc_UWord32 sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx
+            += 2)
+        {
+            sampleInt32 = pPlayBuffer[sampleIdx];
+            sampleInt32 += pPlayBuffer[sampleIdx + 1];
+            sampleInt32 /= 2;
+
+            if (sampleInt32 > 32767)
+            {
+                sampleInt32 = 32767;
+            } else if (sampleInt32 < -32768)
+            {
+                sampleInt32 = -32768;
+            }
+
+            pPlayBuffer[sampleIdx] = 0;
+            pPlayBuffer[sampleIdx + 1] = static_cast<SInt16> (sampleInt32);
+        }
+    }
+
+    PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples);
+
+    return true;
+}
+
+bool AudioDeviceMac::RunCapture(void* ptrThis)
+{
+    return static_cast<AudioDeviceMac*> (ptrThis)->CaptureWorkerThread();
+}
+
+bool AudioDeviceMac::CaptureWorkerThread()
+{
+    OSStatus err = noErr;
+    UInt32 noRecSamples = ENGINE_REC_BUF_SIZE_IN_SAMPLES
+        * _inDesiredFormat.mChannelsPerFrame;
+    SInt16 recordBuffer[noRecSamples];
+    UInt32 size = ENGINE_REC_BUF_SIZE_IN_SAMPLES;
+
+    AudioBufferList engineBuffer;
+    engineBuffer.mNumberBuffers = 1; // Interleaved channels.
+    engineBuffer.mBuffers->mNumberChannels = _inDesiredFormat.mChannelsPerFrame;
+    engineBuffer.mBuffers->mDataByteSize = _inDesiredFormat.mBytesPerPacket
+        * noRecSamples;
+    engineBuffer.mBuffers->mData = recordBuffer;
+
+    err = AudioConverterFillComplexBuffer(_captureConverter, inConverterProc,
+                                          this, &size, &engineBuffer, NULL);
+    if (err != noErr)
+    {
+        if (err == 1)
+        {
+            // This is our own error.
+            return false;
+        } else
+        {
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,
+                     "Error in AudioConverterFillComplexBuffer()",
+                     (const char *) &err);
+            return false;
+        }
+    }
+
+    // TODO(xians): what if the returned size is incorrect?
+    if (size == ENGINE_REC_BUF_SIZE_IN_SAMPLES)
+    {
+        WebRtc_UWord32 currentMicLevel(0);
+        WebRtc_UWord32 newMicLevel(0);
+        WebRtc_Word32 msecOnPlaySide;
+        WebRtc_Word32 msecOnRecordSide;
+
+        int32_t captureDelayUs = AtomicGet32(&_captureDelayUs);
+        int32_t renderDelayUs = AtomicGet32(&_renderDelayUs);
+
+        msecOnPlaySide = static_cast<WebRtc_Word32> (1e-3 * (renderDelayUs
+            + _renderLatencyUs) + 0.5);
+        msecOnRecordSide = static_cast<WebRtc_Word32> (1e-3 * (captureDelayUs
+            + _captureLatencyUs) + 0.5);
+
+        if (!_ptrAudioBuffer)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  capture AudioBuffer is invalid");
+            return false;
+        }
+
+        // store the recorded buffer (no action will be taken if the
+        // #recorded samples is not a full buffer)
+        _ptrAudioBuffer->SetRecordedBuffer((WebRtc_Word8*) &recordBuffer,
+                                           (WebRtc_UWord32) size);
+
+        if (AGC())
+        {
+            // store current mic level in the audio buffer if AGC is enabled
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // this call does not affect the actual microphone volume
+                _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+            }
+        }
+
+        _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, 0);
+
+        // deliver recorded samples at specified sample rate, mic level etc.
+        // to the observer using callback
+        _ptrAudioBuffer->DeliverRecordedData();
+
+        if (AGC())
+        {
+            newMicLevel = _ptrAudioBuffer->NewMicLevel();
+            if (newMicLevel != 0)
+            {
+                // The VQE will only deliver non-zero microphone levels when
+                // a change is needed.
+                // Set this new mic level (received from the observer as return
+                // value in the callback).
+                WEBRTC_TRACE(kTraceStream, kTraceAudioDevice,
+                             _id, "  AGC change of volume: old=%u => new=%u",
+                             currentMicLevel, newMicLevel);
+                if (SetMicrophoneVolume(newMicLevel) == -1)
+                {
+                    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                                 "  the required modification of the microphone "
+                                 "volume failed");
+                }
+            }
+        }
+    }
+
+    return true;
+}
+
+} //  namespace webrtc
diff --git a/src/modules/audio_device/main/source/mac/audio_device_mac.h b/src/modules/audio_device/main/source/mac/audio_device_mac.h
new file mode 100644
index 0000000..5106153
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_device_mac.h
@@ -0,0 +1,393 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_MAC_H
+
+#include "audio_device_generic.h"
+#include "critical_section_wrapper.h"
+#include "audio_mixer_manager_mac.h"
+
+#include <CoreAudio/CoreAudio.h>
+#include <AudioToolbox/AudioConverter.h>
+#include <mach/semaphore.h>
+
+struct PaUtilRingBuffer;
+
+namespace webrtc
+{
+class EventWrapper;
+class ThreadWrapper;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 48000;
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 48000;
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 2; // default is stereo playout
+const WebRtc_UWord32 N_DEVICE_CHANNELS = 8;
+
+const WebRtc_UWord32 ENGINE_REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC
+    / 100);
+const WebRtc_UWord32 ENGINE_PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC
+    / 100);
+
+enum
+{
+    N_BLOCKS_IO = 2
+};
+enum
+{
+    N_BUFFERS_IN = 10
+};
+enum
+{
+    N_BUFFERS_OUT = 3
+}; // Must be at least N_BLOCKS_IO
+
+const WebRtc_UWord32 TIMER_PERIOD_MS = (2 * 10 * N_BLOCKS_IO * 1000000);
+
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = (ENGINE_REC_BUF_SIZE_IN_SAMPLES
+    * N_DEVICE_CHANNELS * N_BUFFERS_IN);
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES =
+    (ENGINE_PLAY_BUF_SIZE_IN_SAMPLES * N_PLAY_CHANNELS * N_BUFFERS_OUT);
+
+class AudioDeviceMac: public AudioDeviceGeneric
+{
+public:
+    AudioDeviceMac(const WebRtc_Word32 id);
+    ~AudioDeviceMac();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32
+        ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(
+        AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft,
+                                           WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft,
+                                        WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32
+        MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32
+        SetPlayoutBuffer(const AudioDeviceModule::BufferType type,
+                         WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type,
+                                        WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock()
+    {
+        _critSect.Enter();
+    }
+    ;
+    void UnLock()
+    {
+        _critSect.Leave();
+    }
+    ;
+    WebRtc_Word32 Id()
+    {
+        return _id;
+    }
+
+    static void AtomicSet32(int32_t* theValue, int32_t newValue);
+    static int32_t AtomicGet32(int32_t* theValue);
+
+    static void logCAMsg(const TraceLevel level,
+                         const TraceModule module,
+                         const WebRtc_Word32 id, const char *msg,
+                         const char *err);
+
+    WebRtc_Word32 GetNumberDevices(const AudioObjectPropertyScope scope,
+                                   AudioDeviceID scopedDeviceIds[],
+                                   const WebRtc_UWord32 deviceListLength);
+
+    WebRtc_Word32 GetDeviceName(const AudioObjectPropertyScope scope,
+                                const WebRtc_UWord16 index, char* name);
+
+    WebRtc_Word32 InitDevice(WebRtc_UWord16 userDeviceIndex,
+                             AudioDeviceID& deviceId, bool isInput);
+
+    static OSStatus
+        objectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
+                           const AudioObjectPropertyAddress addresses[],
+                           void* clientData);
+
+    OSStatus
+        implObjectListenerProc(AudioObjectID objectId, UInt32 numberAddresses,
+                               const AudioObjectPropertyAddress addresses[]);
+
+    WebRtc_Word32 HandleDeviceChange();
+
+    WebRtc_Word32
+        HandleStreamFormatChange(AudioObjectID objectId,
+                                 AudioObjectPropertyAddress propertyAddress);
+
+    WebRtc_Word32
+        HandleDataSourceChange(AudioObjectID objectId,
+                               AudioObjectPropertyAddress propertyAddress);
+
+    WebRtc_Word32
+        HandleProcessorOverload(AudioObjectPropertyAddress propertyAddress);
+
+private:
+    static OSStatus deviceIOProc(AudioDeviceID device,
+                                 const AudioTimeStamp *now,
+                                 const AudioBufferList *inputData,
+                                 const AudioTimeStamp *inputTime,
+                                 AudioBufferList *outputData,
+                                 const AudioTimeStamp* outputTime,
+                                 void *clientData);
+
+    static OSStatus
+        outConverterProc(AudioConverterRef audioConverter,
+                         UInt32 *numberDataPackets, AudioBufferList *data,
+                         AudioStreamPacketDescription **dataPacketDescription,
+                         void *userData);
+
+    static OSStatus inDeviceIOProc(AudioDeviceID device,
+                                   const AudioTimeStamp *now,
+                                   const AudioBufferList *inputData,
+                                   const AudioTimeStamp *inputTime,
+                                   AudioBufferList *outputData,
+                                   const AudioTimeStamp *outputTime,
+                                   void *clientData);
+
+    static OSStatus
+        inConverterProc(AudioConverterRef audioConverter,
+                        UInt32 *numberDataPackets, AudioBufferList *data,
+                        AudioStreamPacketDescription **dataPacketDescription,
+                        void *inUserData);
+
+    OSStatus implDeviceIOProc(const AudioBufferList *inputData,
+                              const AudioTimeStamp *inputTime,
+                              AudioBufferList *outputData,
+                              const AudioTimeStamp *outputTime);
+
+    OSStatus implOutConverterProc(UInt32 *numberDataPackets,
+                                  AudioBufferList *data);
+
+    OSStatus implInDeviceIOProc(const AudioBufferList *inputData,
+                                const AudioTimeStamp *inputTime);
+
+    OSStatus implInConverterProc(UInt32 *numberDataPackets,
+                                 AudioBufferList *data);
+
+    static bool RunCapture(void*);
+    static bool RunRender(void*);
+    bool CaptureWorkerThread();
+    bool RenderWorkerThread();
+
+private:
+    AudioDeviceBuffer* _ptrAudioBuffer;
+
+    CriticalSectionWrapper& _critSect;
+
+    EventWrapper& _stopEventRec;
+    EventWrapper& _stopEvent;
+
+    ThreadWrapper* _captureWorkerThread;
+    ThreadWrapper* _renderWorkerThread;
+    WebRtc_UWord32 _captureWorkerThreadId;
+    WebRtc_UWord32 _renderWorkerThreadId;
+
+    WebRtc_Word32 _id;
+
+    AudioMixerManagerMac _mixerManager;
+
+    WebRtc_UWord16 _inputDeviceIndex;
+    WebRtc_UWord16 _outputDeviceIndex;
+    AudioDeviceID _inputDeviceID;
+    AudioDeviceID _outputDeviceID;
+#if __MAC_OS_X_VERSION_MAX_ALLOWED >= 1050
+    AudioDeviceIOProcID _inDeviceIOProcID;
+    AudioDeviceIOProcID _deviceIOProcID;
+#endif
+    bool _inputDeviceIsSpecified;
+    bool _outputDeviceIsSpecified;
+
+    WebRtc_UWord8 _recChannels;
+    WebRtc_UWord8 _playChannels;
+
+    Float32* _captureBufData;
+    SInt16* _renderBufData;
+
+    SInt16 _renderConvertData[PLAY_BUF_SIZE_IN_SAMPLES];
+
+    AudioDeviceModule::BufferType _playBufType;
+
+private:
+    bool _initialized;
+    bool _isShutDown;
+    bool _recording;
+    bool _playing;
+    bool _recIsInitialized;
+    bool _playIsInitialized;
+    bool _AGC;
+
+    // Atomically set varaibles
+    int32_t _renderDeviceIsAlive;
+    int32_t _captureDeviceIsAlive;
+
+    bool _twoDevices;
+    bool _doStop; // For play if not shared device or play+rec if shared device
+    bool _doStopRec; // For rec if not shared device
+    bool _macBookPro;
+    bool _macBookProPanRight;
+
+    AudioConverterRef _captureConverter;
+    AudioConverterRef _renderConverter;
+
+    AudioStreamBasicDescription _outStreamFormat;
+    AudioStreamBasicDescription _outDesiredFormat;
+    AudioStreamBasicDescription _inStreamFormat;
+    AudioStreamBasicDescription _inDesiredFormat;
+
+    WebRtc_UWord32 _captureLatencyUs;
+    WebRtc_UWord32 _renderLatencyUs;
+
+    // Atomically set variables
+    mutable int32_t _captureDelayUs;
+    mutable int32_t _renderDelayUs;
+
+    WebRtc_Word32 _renderDelayOffsetSamples;
+
+private:
+    WebRtc_UWord16 _playBufDelayFixed; // fixed playback delay
+
+    WebRtc_UWord16 _playWarning;
+    WebRtc_UWord16 _playError;
+    WebRtc_UWord16 _recWarning;
+    WebRtc_UWord16 _recError;
+
+    PaUtilRingBuffer* _paCaptureBuffer;
+    PaUtilRingBuffer* _paRenderBuffer;
+
+    semaphore_t _renderSemaphore;
+    semaphore_t _captureSemaphore;
+
+    WebRtc_UWord32 _captureBufSizeSamples;
+    WebRtc_UWord32 _renderBufSizeSamples;
+};
+
+} //  namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_AUDIO_DEVICE_MAC_H_
diff --git a/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc b/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc
new file mode 100644
index 0000000..f59fd5b
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_device_utility_mac.cc
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_mac.h"
+#include "audio_device_config.h"    // DEBUG_PRINT()
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+AudioDeviceUtilityMac::AudioDeviceUtilityMac(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id,
+                 "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityMac() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityMac::~AudioDeviceUtilityMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+WebRtc_Word32 AudioDeviceUtilityMac::Init()
+{
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id,
+                 "  OS info: %s", "OS X");
+
+    return 0;
+}
+
+} //  namespace webrtc
diff --git a/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h b/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h
new file mode 100644
index 0000000..4743e22
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_device_utility_mac.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_MAC_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityMac: public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityMac(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityMac();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+};
+
+} //  namespace webrtc
+
+#endif  // MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_AUDIO_DEVICE_UTILITY_MAC_H_
diff --git a/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc b/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc
new file mode 100644
index 0000000..bd53c57
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.cc
@@ -0,0 +1,1167 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_mixer_manager_mac.h"
+#include "trace.h"
+
+#include <unistd.h>             // getpid()
+
+namespace webrtc {
+	
+#define WEBRTC_CA_RETURN_ON_ERR(expr)                                     \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,    \
+                "Error in " #expr, (const char *)&err);                 \
+            return -1;                                                  \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_ERR(expr)                                           \
+    do {                                                                \
+        err = expr;                                                     \
+        if (err != noErr) {                                             \
+            logCAMsg(kTraceError, kTraceAudioDevice, _id,    \
+                "Error in " #expr, (const char *)&err);                 \
+        }                                                               \
+    } while(0)
+
+#define WEBRTC_CA_LOG_WARN(expr)                                           \
+    do {                                                                 \
+        err = expr;                                                      \
+        if (err != noErr) {                                              \
+            logCAMsg(kTraceWarning, kTraceAudioDevice, _id,  \
+                "Error in " #expr, (const char *)&err);                  \
+        }                                                                \
+    } while(0)
+
+AudioMixerManagerMac::AudioMixerManagerMac(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _inputDeviceID(kAudioObjectUnknown),
+    _outputDeviceID(kAudioObjectUnknown),
+    _noInputChannels(0),
+    _noOutputChannels(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s constructed", __FUNCTION__);
+}
+
+AudioMixerManagerMac::~AudioMixerManagerMac()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id,
+                 "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//	                                PUBLIC METHODS
+// ============================================================================
+
+WebRtc_Word32 AudioMixerManagerMac::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    CloseSpeaker();
+    CloseMicrophone();
+
+    return 0;
+
+}
+
+WebRtc_Word32 AudioMixerManagerMac::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _outputDeviceID = kAudioObjectUnknown;
+    _noOutputChannels = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _inputDeviceID = kAudioObjectUnknown;
+    _noInputChannels = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::OpenSpeaker(AudioDeviceID deviceID)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::OpenSpeaker(id=%d)", deviceID);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    pid_t hogPid = -1;
+
+    _outputDeviceID = deviceID;
+
+    // Check which process, if any, has hogged the device. 
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
+            kAudioDevicePropertyScopeOutput, 0 };
+
+    size = sizeof(hogPid);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &hogPid));
+
+    if (hogPid == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " No process has hogged the input device");
+    }
+    // getpid() is apparently "always successful"
+    else if (hogPid == getpid())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Our process has hogged the input device");
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Another process (pid = %d) has hogged the input device",
+                     static_cast<int> (hogPid));
+
+        return -1;
+    }
+
+    // get number of channels from stream format
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+
+    // Get the stream format, to be able to read the number of channels.
+    AudioStreamBasicDescription streamFormat;
+    size = sizeof(AudioStreamBasicDescription);
+    memset(&streamFormat, 0, size);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    _noOutputChannels = streamFormat.mChannelsPerFrame;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::OpenMicrophone(AudioDeviceID deviceID)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::OpenMicrophone(id=%d)", deviceID);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    pid_t hogPid = -1;
+
+    _inputDeviceID = deviceID;
+
+    // Check which process, if any, has hogged the device. 
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyHogMode,
+            kAudioDevicePropertyScopeInput, 0 };
+    size = sizeof(hogPid);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &hogPid));
+    if (hogPid == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " No process has hogged the input device");
+    }
+    // getpid() is apparently "always successful"
+    else if (hogPid == getpid())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                     " Our process has hogged the input device");
+    } else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Another process (pid = %d) has hogged the input device",
+                     static_cast<int> (hogPid));
+
+        return -1;
+    }
+
+    // get number of channels from stream format
+    propertyAddress.mSelector = kAudioDevicePropertyStreamFormat;
+
+    // Get the stream format, to be able to read the number of channels.
+    AudioStreamBasicDescription streamFormat;
+    size = sizeof(AudioStreamBasicDescription);
+    memset(&streamFormat, 0, size);
+    WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+            &propertyAddress, 0, NULL, &size, &streamFormat));
+
+    _noInputChannels = streamFormat.mChannelsPerFrame;
+
+    return 0;
+}
+
+bool AudioMixerManagerMac::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_outputDeviceID != kAudioObjectUnknown);
+}
+
+bool AudioMixerManagerMac::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s",
+                 __FUNCTION__);
+
+    return (_inputDeviceID != kAudioObjectUnknown);
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetSpeakerVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    bool success = false;
+
+    // volume range is 0.0 - 1.0, convert from 0 -255
+    const Float32 vol = (Float32)(volume / 255.0);
+
+    assert(vol <= 1.0 && vol >= 0.0);
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, size, &vol));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(vol);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, size, &vol));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set a volume on any output channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    Float32 channelVol = 0;
+    Float32 vol = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, &size, &vol));
+
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (vol * 255 + 0.5);
+    } else
+    {
+        // Otherwise get the average volume across channels.
+        vol = 0;
+        for (UInt32 i = 1; i <= _noOutputChannels; i++)
+        {
+            channelVol = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelVol);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelVol));
+
+                vol += channelVol;
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get a volume on any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (255 * vol / channels + 0.5);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::SpeakerVolume() => vol=%i", vol);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    maxVolume = 255;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    minVolume = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    stepSize = 1;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = {
+            kAudioDevicePropertyVolumeScalar, kAudioDevicePropertyScopeOutput,
+            0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Volume cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Mute cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetSpeakerMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    UInt32 mute = enable ? 1 : 0;
+    bool success = false;
+
+    // Does the render device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(mute);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, size, &mute));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noOutputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_outputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(mute);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_outputDeviceID,
+                    &propertyAddress, 0, NULL, size, &mute));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set mute on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    UInt32 channelMuted = 0;
+    UInt32 muted = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeOutput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(muted);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                &propertyAddress, 0, NULL, &size, &muted));
+
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    } else
+    {
+        // Otherwise check if all channels are muted.
+        for (UInt32 i = 1; i <= _noOutputChannels; i++)
+        {
+            muted = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_outputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelMuted);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_outputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelMuted));
+
+                muted = (muted && channelMuted);
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get mute for any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::SpeakerMute() => enabled=%d, enabled");
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::StereoPlayoutIsAvailable(bool& available)
+{
+    if (_outputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = (_noOutputChannels == 2);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::StereoRecordingIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = (_noInputChannels == 2);
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneMuteIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Mute cannot be set for output channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    UInt32 mute = enable ? 1 : 0;
+    bool success = false;
+
+    // Does the capture device have a master mute control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(mute);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, size, &mute));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(mute);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                    &propertyAddress, 0, NULL, size, &mute));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set mute on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    UInt32 channelMuted = 0;
+    UInt32 muted = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyMute,
+            kAudioDevicePropertyScopeInput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(muted);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, &size, &muted));
+
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    } else
+    {
+        // Otherwise check if all channels are muted.
+        for (UInt32 i = 1; i <= _noInputChannels; i++)
+        {
+            muted = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelMuted);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelMuted));
+
+                muted = (muted && channelMuted);
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get mute for any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // 1 means muted
+        enabled = static_cast<bool> (muted);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::MicrophoneMute() => enabled=%d",
+                 enabled);
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    available = false; // No AudioObjectPropertySelector value for Mic Boost
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneBoost(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // Ensure that the selected microphone has a valid boost control.
+    bool available(false);
+    MicrophoneBoostIsAvailable(available);
+    if (!available)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  it is not possible to enable microphone boost");
+        return -1;
+    }
+
+    // It is assumed that the call above fails!
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // Microphone boost cannot be enabled on this platform!
+    enabled = false;
+
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::MicrophoneVolumeIsAvailable(bool& available)
+{
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        available = true;
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err != noErr || !isSettable)
+        {
+            available = false;
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Volume cannot be set for input channel %d, err=%d",
+                         i, err);
+            return -1;
+        }
+    }
+
+    available = true;
+    return 0;
+}
+
+WebRtc_Word32 AudioMixerManagerMac::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "AudioMixerManagerMac::SetMicrophoneVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    bool success = false;
+
+    // volume range is 0.0 - 1.0, convert from 0 - 255
+    const Float32 vol = (Float32)(volume / 255.0);
+
+    assert(vol <= 1.0 && vol >= 0.0);
+
+    // Does the capture device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean isSettable = false;
+    err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                        &isSettable);
+    if (err == noErr && isSettable)
+    {
+        size = sizeof(vol);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, size, &vol));
+
+        return 0;
+    }
+
+    // Otherwise try to set each channel.
+    for (UInt32 i = 1; i <= _noInputChannels; i++)
+    {
+        propertyAddress.mElement = i;
+        isSettable = false;
+        err = AudioObjectIsPropertySettable(_inputDeviceID, &propertyAddress,
+                                            &isSettable);
+        if (err == noErr && isSettable)
+        {
+            size = sizeof(vol);
+            WEBRTC_CA_RETURN_ON_ERR(AudioObjectSetPropertyData(_inputDeviceID,
+                    &propertyAddress, 0, NULL, size, &vol));
+        }
+        success = true;
+    }
+
+    if (!success)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     " Unable to set a level on any input channel");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    OSStatus err = noErr;
+    UInt32 size = 0;
+    unsigned int channels = 0;
+    Float32 channelVol = 0;
+    Float32 volFloat32 = 0;
+
+    // Does the device have a master volume control?
+    // If so, use it exclusively.
+    AudioObjectPropertyAddress
+        propertyAddress = { kAudioDevicePropertyVolumeScalar,
+                kAudioDevicePropertyScopeInput, 0 };
+    Boolean hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+    if (hasProperty)
+    {
+        size = sizeof(volFloat32);
+        WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                &propertyAddress, 0, NULL, &size, &volFloat32));
+
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> (volFloat32 * 255 + 0.5);
+    } else
+    {
+        // Otherwise get the average volume across channels.
+        volFloat32 = 0;
+        for (UInt32 i = 1; i <= _noInputChannels; i++)
+        {
+            channelVol = 0;
+            propertyAddress.mElement = i;
+            hasProperty = AudioObjectHasProperty(_inputDeviceID,
+                                                 &propertyAddress);
+            if (hasProperty)
+            {
+                size = sizeof(channelVol);
+                WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData(_inputDeviceID,
+                        &propertyAddress, 0, NULL, &size, &channelVol));
+
+                volFloat32 += channelVol;
+                channels++;
+            }
+        }
+
+        if (channels == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         " Unable to get a level on any channel");
+            return -1;
+        }
+
+        assert(channels > 0);
+        // vol 0.0 to 1.0 -> convert to 0 - 255
+        volume = static_cast<WebRtc_UWord32> 
+            (255 * volFloat32 / channels + 0.5);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "     AudioMixerManagerMac::MicrophoneVolume() => vol=%u",
+                 volume);
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 255
+    maxVolume = 255;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 10
+    minVolume = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+AudioMixerManagerMac::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputDeviceID == kAudioObjectUnknown)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                     "  device ID has not been set");
+        return -1;
+    }
+
+    // volume range is 0.0 to 1.0
+    // we convert that to 0 - 10
+    stepSize = 1;
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// CoreAudio errors are best interpreted as four character strings.
+void AudioMixerManagerMac::logCAMsg(const TraceLevel level,
+                                    const TraceModule module,
+                                    const WebRtc_Word32 id, const char *msg,
+                                    const char *err)
+{
+    assert(msg != NULL);
+    assert(err != NULL);
+
+#ifdef WEBRTC_BIG_ENDIAN
+    WEBRTC_TRACE(level, module, id, "%s: %.4s", msg, err);
+#else
+    // We need to flip the characters in this case.
+    WEBRTC_TRACE(level, module, id, "%s: %.1s%.1s%.1s%.1s", msg, err + 3, err
+        + 2, err + 1, err);
+#endif
+}
+
+} // namespace webrtc
+// EOF
diff --git a/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h b/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h
new file mode 100644
index 0000000..7209f91
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/audio_mixer_manager_mac.h
@@ -0,0 +1,80 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_MAC_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_MAC_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+
+#include <CoreAudio/CoreAudio.h>
+
+namespace webrtc {
+	
+class AudioMixerManagerMac
+{
+public:
+    WebRtc_Word32 OpenSpeaker(AudioDeviceID deviceID);
+    WebRtc_Word32 OpenMicrophone(AudioDeviceID deviceID);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+
+public:
+    AudioMixerManagerMac(const WebRtc_Word32 id);
+    ~AudioMixerManagerMac();
+
+private:
+    static void logCAMsg(const TraceLevel level,
+                         const TraceModule module,
+                         const WebRtc_Word32 id, const char *msg,
+                         const char *err);
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+
+    AudioDeviceID _inputDeviceID;
+    AudioDeviceID _outputDeviceID;
+
+    WebRtc_UWord16 _noInputChannels;
+    WebRtc_UWord16 _noOutputChannels;
+
+};
+	
+} //namespace webrtc
+
+#endif  // AUDIO_MIXER_MAC_H
diff --git a/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h b/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h
new file mode 100644
index 0000000..f689622
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/portaudio/pa_memorybarrier.h
@@ -0,0 +1,127 @@
+/*
+ * $Id: pa_memorybarrier.h 1240 2007-07-17 13:05:07Z bjornroche $
+ * Portable Audio I/O Library
+ * Memory barrier utilities
+ *
+ * Author: Bjorn Roche, XO Audio, LLC
+ *
+ * This program uses the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/**
+ @file pa_memorybarrier.h
+ @ingroup common_src
+*/
+
+/****************
+ * Some memory barrier primitives based on the system.
+ * right now only OS X, FreeBSD, and Linux are supported. In addition to providing
+ * memory barriers, these functions should ensure that data cached in registers
+ * is written out to cache where it can be snooped by other CPUs. (ie, the volatile
+ * keyword should not be required)
+ *
+ * the primitives that must be defined are:
+ *
+ * PaUtil_FullMemoryBarrier()
+ * PaUtil_ReadMemoryBarrier()
+ * PaUtil_WriteMemoryBarrier()
+ *
+ ****************/
+
+#if defined(__APPLE__)
+#   include <libkern/OSAtomic.h>
+    /* Here are the memory barrier functions. Mac OS X only provides
+       full memory barriers, so the three types of barriers are the same,
+       however, these barriers are superior to compiler-based ones. */
+#   define PaUtil_FullMemoryBarrier()  OSMemoryBarrier()
+#   define PaUtil_ReadMemoryBarrier()  OSMemoryBarrier()
+#   define PaUtil_WriteMemoryBarrier() OSMemoryBarrier()
+#elif defined(__GNUC__)
+    /* GCC >= 4.1 has built-in intrinsics. We'll use those */
+#   if (__GNUC__ > 4) || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)
+#      define PaUtil_FullMemoryBarrier()  __sync_synchronize()
+#      define PaUtil_ReadMemoryBarrier()  __sync_synchronize()
+#      define PaUtil_WriteMemoryBarrier() __sync_synchronize()
+    /* as a fallback, GCC understands volatile asm and "memory" to mean it
+     * should not reorder memory read/writes */
+    /* Note that it is not clear that any compiler actually defines __PPC__,
+     * it can probably removed safely. */
+#   elif defined( __ppc__ ) || defined( __powerpc__) || defined( __PPC__ )
+#      define PaUtil_FullMemoryBarrier()  asm volatile("sync":::"memory")
+#      define PaUtil_ReadMemoryBarrier()  asm volatile("sync":::"memory")
+#      define PaUtil_WriteMemoryBarrier() asm volatile("sync":::"memory")
+#   elif defined( __i386__ ) || defined( __i486__ ) || defined( __i586__ ) || \
+         defined( __i686__ ) || defined( __x86_64__ )
+#      define PaUtil_FullMemoryBarrier()  asm volatile("mfence":::"memory")
+#      define PaUtil_ReadMemoryBarrier()  asm volatile("lfence":::"memory")
+#      define PaUtil_WriteMemoryBarrier() asm volatile("sfence":::"memory")
+#   else
+#      ifdef ALLOW_SMP_DANGERS
+#         warning Memory barriers not defined on this system or system unknown
+#         warning For SMP safety, you should fix this.
+#         define PaUtil_FullMemoryBarrier()
+#         define PaUtil_ReadMemoryBarrier()
+#         define PaUtil_WriteMemoryBarrier()
+#      else
+#         error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed.
+#      endif
+#   endif
+#elif (_MSC_VER >= 1400) && !defined(_WIN32_WCE)
+#   include <intrin.h>
+#   pragma intrinsic(_ReadWriteBarrier)
+#   pragma intrinsic(_ReadBarrier)
+#   pragma intrinsic(_WriteBarrier)
+#   define PaUtil_FullMemoryBarrier()  _ReadWriteBarrier()
+#   define PaUtil_ReadMemoryBarrier()  _ReadBarrier()
+#   define PaUtil_WriteMemoryBarrier() _WriteBarrier()
+#elif defined(_WIN32_WCE)
+#   define PaUtil_FullMemoryBarrier()
+#   define PaUtil_ReadMemoryBarrier()
+#   define PaUtil_WriteMemoryBarrier()
+#elif defined(_MSC_VER) || defined(__BORLANDC__)
+#   define PaUtil_FullMemoryBarrier()  _asm { lock add    [esp], 0 }
+#   define PaUtil_ReadMemoryBarrier()  _asm { lock add    [esp], 0 }
+#   define PaUtil_WriteMemoryBarrier() _asm { lock add    [esp], 0 }
+#else
+#   ifdef ALLOW_SMP_DANGERS
+#      warning Memory barriers not defined on this system or system unknown
+#      warning For SMP safety, you should fix this.
+#      define PaUtil_FullMemoryBarrier()
+#      define PaUtil_ReadMemoryBarrier()
+#      define PaUtil_WriteMemoryBarrier()
+#   else
+#      error Memory barriers are not defined on this system. You can still compile by defining ALLOW_SMP_DANGERS, but SMP safety will not be guaranteed.
+#   endif
+#endif
diff --git a/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c b/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c
new file mode 100644
index 0000000..310d719
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.c
@@ -0,0 +1,227 @@
+/*
+ * $Id: pa_ringbuffer.c 1421 2009-11-18 16:09:05Z bjornroche $
+ * Portable Audio I/O Library
+ * Ring Buffer utility.
+ *
+ * Author: Phil Burk, http://www.softsynth.com
+ * modified for SMP safety on Mac OS X by Bjorn Roche
+ * modified for SMP safety on Linux by Leland Lucius
+ * also, allowed for const where possible
+ * modified for multiple-byte-sized data elements by Sven Fischer 
+ *
+ * Note that this is safe only for a single-thread reader and a
+ * single-thread writer.
+ *
+ * This program uses the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/**
+ @file
+ @ingroup common_src
+*/
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <math.h>
+#include "pa_ringbuffer.h"
+#include <string.h>
+#include "pa_memorybarrier.h"
+
+/***************************************************************************
+ * Initialize FIFO.
+ * elementCount must be power of 2, returns -1 if not.
+ */
+ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr )
+{
+    if( ((elementCount-1) & elementCount) != 0) return -1; /* Not Power of two. */
+    rbuf->bufferSize = elementCount;
+    rbuf->buffer = (char *)dataPtr;
+    PaUtil_FlushRingBuffer( rbuf );
+    rbuf->bigMask = (elementCount*2)-1;
+    rbuf->smallMask = (elementCount)-1;
+    rbuf->elementSizeBytes = elementSizeBytes;
+    return 0;
+}
+
+/***************************************************************************
+** Return number of elements available for reading. */
+ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( PaUtilRingBuffer *rbuf )
+{
+    PaUtil_ReadMemoryBarrier();
+    return ( (rbuf->writeIndex - rbuf->readIndex) & rbuf->bigMask );
+}
+/***************************************************************************
+** Return number of elements available for writing. */
+ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf )
+{
+    /* Since we are calling PaUtil_GetRingBufferReadAvailable, we don't need an aditional MB */
+    return ( rbuf->bufferSize - PaUtil_GetRingBufferReadAvailable(rbuf));
+}
+
+/***************************************************************************
+** Clear buffer. Should only be called when buffer is NOT being read. */
+void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf )
+{
+    rbuf->writeIndex = rbuf->readIndex = 0;
+}
+
+/***************************************************************************
+** Get address of region(s) to which we can write data.
+** If the region is contiguous, size2 will be zero.
+** If non-contiguous, size2 will be the size of second region.
+** Returns room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                       void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                       void **dataPtr2, ring_buffer_size_t *sizePtr2 )
+{
+    ring_buffer_size_t   index;
+    ring_buffer_size_t   available = PaUtil_GetRingBufferWriteAvailable( rbuf );
+    if( elementCount > available ) elementCount = available;
+    /* Check to see if write is not contiguous. */
+    index = rbuf->writeIndex & rbuf->smallMask;
+    if( (index + elementCount) > rbuf->bufferSize )
+    {
+        /* Write data in two blocks that wrap the buffer. */
+        ring_buffer_size_t   firstHalf = rbuf->bufferSize - index;
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = firstHalf;
+        *dataPtr2 = &rbuf->buffer[0];
+        *sizePtr2 = elementCount - firstHalf;
+    }
+    else
+    {
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = elementCount;
+        *dataPtr2 = NULL;
+        *sizePtr2 = 0;
+    }
+    return elementCount;
+}
+
+
+/***************************************************************************
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount )
+{
+    /* we need to ensure that previous writes are seen before we update the write index */
+    PaUtil_WriteMemoryBarrier();
+    return rbuf->writeIndex = (rbuf->writeIndex + elementCount) & rbuf->bigMask;
+}
+
+/***************************************************************************
+** Get address of region(s) from which we can read data.
+** If the region is contiguous, size2 will be zero.
+** If non-contiguous, size2 will be the size of second region.
+** Returns room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                void **dataPtr2, ring_buffer_size_t *sizePtr2 )
+{
+    ring_buffer_size_t   index;
+    ring_buffer_size_t   available = PaUtil_GetRingBufferReadAvailable( rbuf );
+    if( elementCount > available ) elementCount = available;
+    /* Check to see if read is not contiguous. */
+    index = rbuf->readIndex & rbuf->smallMask;
+    if( (index + elementCount) > rbuf->bufferSize )
+    {
+        /* Write data in two blocks that wrap the buffer. */
+        ring_buffer_size_t firstHalf = rbuf->bufferSize - index;
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = firstHalf;
+        *dataPtr2 = &rbuf->buffer[0];
+        *sizePtr2 = elementCount - firstHalf;
+    }
+    else
+    {
+        *dataPtr1 = &rbuf->buffer[index*rbuf->elementSizeBytes];
+        *sizePtr1 = elementCount;
+        *dataPtr2 = NULL;
+        *sizePtr2 = 0;
+    }
+    return elementCount;
+}
+/***************************************************************************
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount )
+{
+    /* we need to ensure that previous writes are always seen before updating the index. */
+    PaUtil_WriteMemoryBarrier();
+    return rbuf->readIndex = (rbuf->readIndex + elementCount) & rbuf->bigMask;
+}
+
+/***************************************************************************
+** Return elements written. */
+ring_buffer_size_t PaUtil_WriteRingBuffer( PaUtilRingBuffer *rbuf, const void *data, ring_buffer_size_t elementCount )
+{
+    ring_buffer_size_t size1, size2, numWritten;
+    void *data1, *data2;
+    numWritten = PaUtil_GetRingBufferWriteRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 );
+    if( size2 > 0 )
+    {
+
+        memcpy( data1, data, size1*rbuf->elementSizeBytes );
+        data = ((char *)data) + size1*rbuf->elementSizeBytes;
+        memcpy( data2, data, size2*rbuf->elementSizeBytes );
+    }
+    else
+    {
+        memcpy( data1, data, size1*rbuf->elementSizeBytes );
+    }
+    PaUtil_AdvanceRingBufferWriteIndex( rbuf, numWritten );
+    return numWritten;
+}
+
+/***************************************************************************
+** Return elements read. */
+ring_buffer_size_t PaUtil_ReadRingBuffer( PaUtilRingBuffer *rbuf, void *data, ring_buffer_size_t elementCount )
+{
+    ring_buffer_size_t size1, size2, numRead;
+    void *data1, *data2;
+    numRead = PaUtil_GetRingBufferReadRegions( rbuf, elementCount, &data1, &size1, &data2, &size2 );
+    if( size2 > 0 )
+    {
+        memcpy( data, data1, size1*rbuf->elementSizeBytes );
+        data = ((char *)data) + size1*rbuf->elementSizeBytes;
+        memcpy( data, data2, size2*rbuf->elementSizeBytes );
+    }
+    else
+    {
+        memcpy( data, data1, size1*rbuf->elementSizeBytes );
+    }
+    PaUtil_AdvanceRingBufferReadIndex( rbuf, numRead );
+    return numRead;
+}
diff --git a/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h b/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h
new file mode 100644
index 0000000..393f6f8
--- /dev/null
+++ b/src/modules/audio_device/main/source/mac/portaudio/pa_ringbuffer.h
@@ -0,0 +1,233 @@
+#ifndef WEBRTC_AUDIO_DEVICE_PA_RINGBUFFER_H
+#define WEBRTC_AUDIO_DEVICE_PA_RINGBUFFER_H
+/*
+ * $Id: pa_ringbuffer.h 1421 2009-11-18 16:09:05Z bjornroche $
+ * Portable Audio I/O Library
+ * Ring Buffer utility.
+ *
+ * Author: Phil Burk, http://www.softsynth.com
+ * modified for SMP safety on OS X by Bjorn Roche.
+ * also allowed for const where possible.
+ * modified for multiple-byte-sized data elements by Sven Fischer 
+ *
+ * Note that this is safe only for a single-thread reader
+ * and a single-thread writer.
+ *
+ * This program is distributed with the PortAudio Portable Audio Library.
+ * For more information see: http://www.portaudio.com
+ * Copyright (c) 1999-2000 Ross Bencina and Phil Burk
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be
+ * included in all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
+ * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+ * CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ */
+
+/*
+ * The text above constitutes the entire PortAudio license; however, 
+ * the PortAudio community also makes the following non-binding requests:
+ *
+ * Any person wishing to distribute modifications to the Software is
+ * requested to send the modifications to the original developer so that
+ * they can be incorporated into the canonical version. It is also 
+ * requested that these non-binding requests be included along with the 
+ * license above.
+ */
+
+/** @file
+ @ingroup common_src
+ @brief Single-reader single-writer lock-free ring buffer
+
+ PaUtilRingBuffer is a ring buffer used to transport samples between
+ different execution contexts (threads, OS callbacks, interrupt handlers)
+ without requiring the use of any locks. This only works when there is
+ a single reader and a single writer (ie. one thread or callback writes
+ to the ring buffer, another thread or callback reads from it).
+
+ The PaUtilRingBuffer structure manages a ring buffer containing N 
+ elements, where N must be a power of two. An element may be any size 
+ (specified in bytes).
+
+ The memory area used to store the buffer elements must be allocated by 
+ the client prior to calling PaUtil_InitializeRingBuffer() and must outlive
+ the use of the ring buffer.
+*/
+
+#if defined(__APPLE__)
+#include <sys/types.h>
+typedef int32_t ring_buffer_size_t;
+#elif defined( __GNUC__ )
+typedef long ring_buffer_size_t;
+#elif (_MSC_VER >= 1400)
+typedef long ring_buffer_size_t;
+#elif defined(_MSC_VER) || defined(__BORLANDC__)
+typedef long ring_buffer_size_t;
+#else
+typedef long ring_buffer_size_t;
+#endif
+
+
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif /* __cplusplus */
+
+typedef struct PaUtilRingBuffer
+{
+    ring_buffer_size_t  bufferSize; /**< Number of elements in FIFO. Power of 2. Set by PaUtil_InitRingBuffer. */
+    ring_buffer_size_t  writeIndex; /**< Index of next writable element. Set by PaUtil_AdvanceRingBufferWriteIndex. */
+    ring_buffer_size_t  readIndex;  /**< Index of next readable element. Set by PaUtil_AdvanceRingBufferReadIndex. */
+    ring_buffer_size_t  bigMask;    /**< Used for wrapping indices with extra bit to distinguish full/empty. */
+    ring_buffer_size_t  smallMask;  /**< Used for fitting indices to buffer. */
+    ring_buffer_size_t  elementSizeBytes; /**< Number of bytes per element. */
+    char  *buffer;    /**< Pointer to the buffer containing the actual data. */
+}PaUtilRingBuffer;
+
+/** Initialize Ring Buffer.
+
+ @param rbuf The ring buffer.
+
+ @param elementSizeBytes The size of a single data element in bytes.
+
+ @param elementCount The number of elements in the buffer (must be power of 2).
+
+ @param dataPtr A pointer to a previously allocated area where the data
+ will be maintained.  It must be elementCount*elementSizeBytes long.
+
+ @return -1 if elementCount is not a power of 2, otherwise 0.
+*/
+ring_buffer_size_t PaUtil_InitializeRingBuffer( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementSizeBytes, ring_buffer_size_t elementCount, void *dataPtr );
+
+/** Clear buffer. Should only be called when buffer is NOT being read.
+
+ @param rbuf The ring buffer.
+*/
+void PaUtil_FlushRingBuffer( PaUtilRingBuffer *rbuf );
+
+/** Retrieve the number of elements available in the ring buffer for writing.
+
+ @param rbuf The ring buffer.
+
+ @return The number of elements available for writing.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteAvailable( PaUtilRingBuffer *rbuf );
+
+/** Retrieve the number of elements available in the ring buffer for reading.
+
+ @param rbuf The ring buffer.
+
+ @return The number of elements available for reading.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadAvailable( PaUtilRingBuffer *rbuf );
+
+/** Write data to the ring buffer.
+
+ @param rbuf The ring buffer.
+
+ @param data The address of new data to write to the buffer.
+
+ @param elementCount The number of elements to be written.
+
+ @return The number of elements written.
+*/
+ring_buffer_size_t PaUtil_WriteRingBuffer( PaUtilRingBuffer *rbuf, const void *data, ring_buffer_size_t elementCount );
+
+/** Read data from the ring buffer.
+
+ @param rbuf The ring buffer.
+
+ @param data The address where the data should be stored.
+
+ @param elementCount The number of elements to be read.
+
+ @return The number of elements read.
+*/
+ring_buffer_size_t PaUtil_ReadRingBuffer( PaUtilRingBuffer *rbuf, void *data, ring_buffer_size_t elementCount );
+
+/** Get address of region(s) to which we can write data.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements desired.
+
+ @param dataPtr1 The address where the first (or only) region pointer will be
+ stored.
+
+ @param sizePtr1 The address where the first (or only) region length will be
+ stored.
+
+ @param dataPtr2 The address where the second region pointer will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @param sizePtr2 The address where the second region length will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @return The room available to be written or elementCount, whichever is smaller.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferWriteRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                       void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                       void **dataPtr2, ring_buffer_size_t *sizePtr2 );
+
+/** Advance the write index to the next location to be written.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements to advance.
+
+ @return The new position.
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferWriteIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount );
+
+/** Get address of region(s) from which we can write data.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements desired.
+
+ @param dataPtr1 The address where the first (or only) region pointer will be
+ stored.
+
+ @param sizePtr1 The address where the first (or only) region length will be
+ stored.
+
+ @param dataPtr2 The address where the second region pointer will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @param sizePtr2 The address where the second region length will be stored if
+ the first region is too small to satisfy elementCount.
+
+ @return The number of elements available for reading.
+*/
+ring_buffer_size_t PaUtil_GetRingBufferReadRegions( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount,
+                                      void **dataPtr1, ring_buffer_size_t *sizePtr1,
+                                      void **dataPtr2, ring_buffer_size_t *sizePtr2 );
+
+/** Advance the read index to the next location to be read.
+
+ @param rbuf The ring buffer.
+
+ @param elementCount The number of elements to advance.
+
+ @return The new position.
+*/
+ring_buffer_size_t PaUtil_AdvanceRingBufferReadIndex( PaUtilRingBuffer *rbuf, ring_buffer_size_t elementCount );
+
+#ifdef __cplusplus
+}
+#endif /* __cplusplus */
+#endif /* MODULES_AUDIO_DEVICE_MAIN_SOURCE_MAC_PORTAUDIO_PA_RINGBUFFER_H_ */
diff --git a/src/modules/audio_device/main/source/win/audio_device_core_win.cc b/src/modules/audio_device/main/source/win/audio_device_core_win.cc
new file mode 100644
index 0000000..7428f0c
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_core_win.cc
@@ -0,0 +1,5184 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma warning(disable: 4995)  //  name was marked as #pragma deprecated
+
+#if (_MSC_VER >= 1310) && (_MSC_VER < 1400)
+// Reports the major and minor versions of the compiler.
+// For example, 1310 for Microsoft Visual C++ .NET 2003. 1310 represents version 13 and a 1.0 point release.
+// The Visual C++ 2005 compiler version is 1400.
+// Type cl /? at the command line to see the major and minor versions of your compiler along with the build number.
+#pragma message(">> INFO: Windows Core Audio is not supported in VS 2003")
+#endif
+
+#include "audio_device_config.h"
+
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+#pragma message(">> INFO: WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined")
+#else
+#pragma message(">> INFO: WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined")
+#endif
+
+#ifdef WEBRTC_WINDOWS_CORE_AUDIO_BUILD
+
+#include "audio_device_core_win.h"
+
+#include <assert.h>
+#include <string.h>
+
+#include <windows.h>
+#include <comdef.h>
+#include <dmo.h>
+#include "Functiondiscoverykeys_devpkey.h"
+#include <mmsystem.h>
+#include <strsafe.h>
+#include <uuids.h>
+
+#include "audio_device_utility.h"
+#include "system_wrappers/interface/sleep.h"
+#include "trace.h"
+
+// Macro that calls a COM method returning HRESULT value.
+#define EXIT_ON_ERROR(hres)    do { if (FAILED(hres)) goto Exit; } while(0)
+
+// Macro that releases a COM object if not NULL.
+#define SAFE_RELEASE(p)     do { if ((p)) { (p)->Release(); (p) = NULL; } } while(0)
+
+#define ROUND(x) ((x) >=0 ? (int)((x) + 0.5) : (int)((x) - 0.5))
+
+// REFERENCE_TIME time units per millisecond
+#define REFTIMES_PER_MILLISEC  10000
+
+typedef struct tagTHREADNAME_INFO
+{
+   DWORD dwType;        // must be 0x1000
+   LPCSTR szName;       // pointer to name (in user addr space)
+   DWORD dwThreadID;    // thread ID (-1=caller thread)
+   DWORD dwFlags;       // reserved for future use, must be zero
+} THREADNAME_INFO;
+
+namespace webrtc {
+namespace {
+
+enum { COM_THREADING_MODEL = COINIT_MULTITHREADED };
+
+enum
+{
+    kAecCaptureStreamIndex = 0,
+    kAecRenderStreamIndex = 1
+};
+
+// An implementation of IMediaBuffer, as required for
+// IMediaObject::ProcessOutput(). After consuming data provided by
+// ProcessOutput(), call SetLength() to update the buffer availability.
+//
+// Example implementation:
+// http://msdn.microsoft.com/en-us/library/dd376684(v=vs.85).aspx
+class MediaBufferImpl : public IMediaBuffer
+{
+public:
+    explicit MediaBufferImpl(DWORD maxLength)
+        : _data(new BYTE[maxLength]),
+          _length(0),
+          _maxLength(maxLength),
+          _refCount(0)
+    {}
+
+    // IMediaBuffer methods.
+    STDMETHOD(GetBufferAndLength(BYTE** ppBuffer, DWORD* pcbLength))
+    {
+        if (!ppBuffer || !pcbLength)
+        {
+            return E_POINTER;
+        }
+
+        *ppBuffer = _data;
+        *pcbLength = _length;
+
+        return S_OK;
+    }
+
+    STDMETHOD(GetMaxLength(DWORD* pcbMaxLength))
+    {
+        if (!pcbMaxLength)
+        {
+            return E_POINTER;
+        }
+
+        *pcbMaxLength = _maxLength;
+        return S_OK;
+    }
+
+    STDMETHOD(SetLength(DWORD cbLength))
+    {
+        if (cbLength > _maxLength)
+        {
+            return E_INVALIDARG;
+        }
+
+        _length = cbLength;
+        return S_OK;
+    }
+
+    // IUnknown methods.
+    STDMETHOD_(ULONG, AddRef())
+    {
+        return InterlockedIncrement(&_refCount);
+    }
+
+    STDMETHOD(QueryInterface(REFIID riid, void** ppv))
+    {
+        if (!ppv)
+        {
+            return E_POINTER;
+        }
+        else if (riid != IID_IMediaBuffer && riid != IID_IUnknown)
+        {
+            return E_NOINTERFACE;
+        }
+
+        *ppv = static_cast<IMediaBuffer*>(this);
+        AddRef();
+        return S_OK;
+    }
+
+    STDMETHOD_(ULONG, Release())
+    {
+        LONG refCount = InterlockedDecrement(&_refCount);
+        if (refCount == 0)
+        {
+            delete this;
+        }
+
+        return refCount;
+    }
+
+private:
+    ~MediaBufferImpl()
+    {
+        delete [] _data;
+    }
+
+    BYTE* _data;
+    DWORD _length;
+    const DWORD _maxLength;
+    LONG _refCount;
+};
+}  // namespace
+
+// ============================================================================
+//                              Static Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  CoreAudioIsSupported
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::CoreAudioIsSupported()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%s", __FUNCTION__);
+
+    bool MMDeviceIsAvailable(false);
+    bool coreAudioIsSupported(false);
+
+    HRESULT hr(S_OK);
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR errorText[MAXERRORLENGTH];
+
+    // 1) Check if Windows version is Vista SP1 or later.
+    //
+    // CoreAudio is only available on Vista SP1 and later.
+    //
+    OSVERSIONINFOEX osvi;
+    DWORDLONG dwlConditionMask = 0;
+    int op = VER_LESS_EQUAL;
+
+    // Initialize the OSVERSIONINFOEX structure.
+    ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
+    osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    osvi.dwMajorVersion = 6;
+    osvi.dwMinorVersion = 0;
+    osvi.wServicePackMajor = 0;
+    osvi.wServicePackMinor = 0;
+    osvi.wProductType = VER_NT_WORKSTATION;
+
+    // Initialize the condition mask.
+    VER_SET_CONDITION(dwlConditionMask, VER_MAJORVERSION, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_MINORVERSION, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_SERVICEPACKMAJOR, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_SERVICEPACKMINOR, op);
+    VER_SET_CONDITION(dwlConditionMask, VER_PRODUCT_TYPE, VER_EQUAL);
+
+    DWORD dwTypeMask = VER_MAJORVERSION | VER_MINORVERSION |
+                       VER_SERVICEPACKMAJOR | VER_SERVICEPACKMINOR |
+                       VER_PRODUCT_TYPE;
+
+    // Perform the test.
+    BOOL isVistaRTMorXP = VerifyVersionInfo(&osvi, dwTypeMask,
+                                            dwlConditionMask);
+    if (isVistaRTMorXP != 0)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
+            "*** Windows Core Audio is only supported on Vista SP1 or later "
+            "=> will revert to the Wave API ***");
+        return false;
+    }
+
+    // 2) Initializes the COM library for use by the calling thread.
+
+    // The COM init wrapper sets the thread's concurrency model to MTA,
+    // and creates a new apartment for the thread if one is required. The
+    // wrapper also ensures that each call to CoInitializeEx is balanced
+    // by a corresponding call to CoUninitialize.
+    //
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      // Things will work even if an STA thread is calling this method but we
+      // want to ensure that MTA is used and therefore return false here.
+      return false;
+    }
+ 
+    // 3) Check if the MMDevice API is available.
+    //
+    // The Windows Multimedia Device (MMDevice) API enables audio clients to
+    // discover audio endpoint devices, determine their capabilities, and create
+    // driver instances for those devices.
+    // Header file Mmdeviceapi.h defines the interfaces in the MMDevice API.
+    // The MMDevice API consists of several interfaces. The first of these is the
+    // IMMDeviceEnumerator interface. To access the interfaces in the MMDevice API,
+    // a client obtains a reference to the IMMDeviceEnumerator interface of a
+    // device-enumerator object by calling the CoCreateInstance function.
+    //
+    // Through the IMMDeviceEnumerator interface, the client can obtain references
+    // to the other interfaces in the MMDevice API. The MMDevice API implements
+    // the following interfaces:
+    //
+    // IMMDevice            Represents an audio device.
+    // IMMDeviceCollection  Represents a collection of audio devices.
+    // IMMDeviceEnumerator  Provides methods for enumerating audio devices.
+    // IMMEndpoint          Represents an audio endpoint device.
+    //
+    IMMDeviceEnumerator* pIMMD(NULL);
+    const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
+    const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);
+
+    hr = CoCreateInstance(
+            CLSID_MMDeviceEnumerator,   // GUID value of MMDeviceEnumerator coclass
+            NULL,
+            CLSCTX_ALL,
+            IID_IMMDeviceEnumerator,    // GUID value of the IMMDeviceEnumerator interface
+            (void**)&pIMMD );
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to create the required COM object", hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() CoCreateInstance(MMDeviceEnumerator) failed (hr=0x%x)", hr);
+
+        const DWORD dwFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+                              FORMAT_MESSAGE_IGNORE_INSERTS;
+        const DWORD dwLangID = MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US);
+    
+        // Gets the system's human readable message string for this HRESULT.
+        // All error message in English by default.
+        DWORD messageLength = ::FormatMessageW(dwFlags, 
+                                               0,
+                                               hr,
+                                               dwLangID,
+                                               errorText,  
+                                               MAXERRORLENGTH,  
+                                               NULL);
+        
+        assert(messageLength <= MAXERRORLENGTH);
+
+        // Trims tailing white space (FormatMessage() leaves a trailing cr-lf.).
+        for (; messageLength && ::isspace(errorText[messageLength - 1]);
+             --messageLength)
+        {
+            errorText[messageLength - 1] = '\0';
+        }
+
+        StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+        StringCchCat(buf, MAXERRORLENGTH, errorText);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1, "%S", buf);
+    }
+    else
+    {
+        MMDeviceIsAvailable = true;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, -1,
+            "AudioDeviceWindowsCore::CoreAudioIsSupported() CoCreateInstance(MMDeviceEnumerator) succeeded", hr);
+        SAFE_RELEASE(pIMMD);
+    }
+
+    // 4) Verify that we can create and initialize our Core Audio class.
+    //
+    // Also, perform a limited "API test" to ensure that Core Audio is supported for all devices.
+    //
+    if (MMDeviceIsAvailable)
+    {
+        coreAudioIsSupported = false;
+
+        AudioDeviceWindowsCore* p = new AudioDeviceWindowsCore(-1);
+        if (p == NULL)
+        {
+            return false;
+        }
+
+        int ok(0);
+        int temp_ok(0);
+        bool available(false);
+
+        ok |= p->Init();
+
+        WebRtc_Word16 numDevsRec = p->RecordingDevices();
+        for (WebRtc_UWord16 i = 0; i < numDevsRec; i++)
+        {
+            ok |= p->SetRecordingDevice(i);
+            temp_ok = p->RecordingIsAvailable(available);
+            ok |= temp_ok;
+            ok |= (available == false);
+            if (available)
+            {
+                ok |= p->InitMicrophone();
+            }
+            if (ok)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
+                    "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to use Core Audio Recording for device id=%i", i);
+            }
+        }
+
+        WebRtc_Word16 numDevsPlay = p->PlayoutDevices();
+        for (WebRtc_UWord16 i = 0; i < numDevsPlay; i++)
+        {
+            ok |= p->SetPlayoutDevice(i);
+            temp_ok = p->PlayoutIsAvailable(available);
+            ok |= temp_ok;
+            ok |= (available == false);
+            if (available)
+            {
+                ok |= p->InitSpeaker();
+            }
+            if (ok)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1 ,
+                    "AudioDeviceWindowsCore::CoreAudioIsSupported() Failed to use Core Audio Playout for device id=%i", i);
+            }
+        }
+
+        ok |= p->Terminate();
+
+        if (ok == 0)
+        {
+            coreAudioIsSupported = true;
+        }
+
+        delete p;
+    }
+
+    if (coreAudioIsSupported)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, "*** Windows Core Audio is supported ***");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1, "*** Windows Core Audio is NOT supported => will revert to the Wave API ***");
+    }
+
+    return (coreAudioIsSupported);
+}
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsCore() - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsCore::AudioDeviceWindowsCore(const WebRtc_Word32 id) :
+    _comInit(ScopedCOMInitializer::kMTA),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _volumeMutex(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _ptrAudioBuffer(NULL),
+    _ptrEnumerator(NULL),
+    _ptrRenderCollection(NULL),
+    _ptrCaptureCollection(NULL),
+    _ptrDeviceOut(NULL),
+    _ptrDeviceIn(NULL),
+    _ptrClientOut(NULL),
+    _ptrClientIn(NULL),
+    _ptrRenderClient(NULL),
+    _ptrCaptureClient(NULL),
+    _ptrCaptureVolume(NULL),
+    _ptrRenderSimpleVolume(NULL),
+    _dmo(NULL),
+    _mediaBuffer(NULL),
+    _builtInAecEnabled(false),
+    _playAudioFrameSize(0),
+    _playSampleRate(0),
+    _playBlockSize(0),
+    _playChannels(2),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _sampleDriftAt48kHz(0),
+    _driftAccumulator(0),
+    _writtenSamples(0),
+    _readSamples(0),
+    _playAcc(0),
+    _recAudioFrameSize(0),
+    _recSampleRate(0),
+    _recBlockSize(0),
+    _recChannels(2),
+    _avrtLibrary(NULL),
+    _winSupportAvrt(false),
+    _hRenderSamplesReadyEvent(NULL),
+    _hPlayThread(NULL),
+    _hCaptureSamplesReadyEvent(NULL),
+    _hRecThread(NULL),
+    _hShutdownRenderEvent(NULL),
+    _hShutdownCaptureEvent(NULL),
+    _hRenderStartedEvent(NULL),
+    _hCaptureStartedEvent(NULL),
+    _hGetCaptureVolumeThread(NULL),
+    _hSetCaptureVolumeThread(NULL),
+    _hSetCaptureVolumeEvent(NULL),
+    _hMmTask(NULL),
+    _initialized(false),
+    _recording(false),
+    _playing(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _speakerIsInitialized(false),
+    _microphoneIsInitialized(false),
+    _AGC(false),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _playBufType(AudioDeviceModule::kAdaptiveBufferSize),
+    _playBufDelay(80),
+    _playBufDelayFixed(80),
+    _usingInputDeviceIndex(false),
+    _usingOutputDeviceIndex(false),
+    _inputDevice(AudioDeviceModule::kDefaultCommunicationDevice),
+    _outputDevice(AudioDeviceModule::kDefaultCommunicationDevice),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _newMicLevel(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+    assert(_comInit.succeeded());
+
+    // Try to load the Avrt DLL
+    if (!_avrtLibrary)
+    {
+        // Get handle to the Avrt DLL module.
+        _avrtLibrary = LoadLibrary(TEXT("Avrt.dll"));
+        if (_avrtLibrary)
+        {
+            // Handle is valid (should only happen if OS larger than vista & win7).
+            // Try to get the function addresses.
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() The Avrt DLL module is now loaded");
+
+            _PAvRevertMmThreadCharacteristics = (PAvRevertMmThreadCharacteristics)GetProcAddress(_avrtLibrary, "AvRevertMmThreadCharacteristics");
+            _PAvSetMmThreadCharacteristicsA = (PAvSetMmThreadCharacteristicsA)GetProcAddress(_avrtLibrary, "AvSetMmThreadCharacteristicsA");
+            _PAvSetMmThreadPriority = (PAvSetMmThreadPriority)GetProcAddress(_avrtLibrary, "AvSetMmThreadPriority");
+
+            if ( _PAvRevertMmThreadCharacteristics &&
+                 _PAvSetMmThreadCharacteristicsA &&
+                 _PAvSetMmThreadPriority)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvRevertMmThreadCharacteristics() is OK");
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvSetMmThreadCharacteristicsA() is OK");
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::AudioDeviceWindowsCore() AvSetMmThreadPriority() is OK");
+                _winSupportAvrt = true;
+            }
+        }
+    }
+
+    // Create our samples ready events - we want auto reset events that start in the not-signaled state.
+    // The state of an auto-reset event object remains signaled until a single waiting thread is released,
+    // at which time the system automatically sets the state to nonsignaled. If no threads are waiting,
+    // the event object's state remains signaled.
+    // (Except for _hShutdownCaptureEvent, which is used to shutdown multiple threads).
+    _hRenderSamplesReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hCaptureSamplesReadyEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownRenderEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownCaptureEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
+    _hRenderStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hCaptureStartedEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+
+    _perfCounterFreq.QuadPart = 1;
+    _perfCounterFactor = 0.0;
+    _avgCPULoad = 0.0;
+
+    // list of number of channels to use on recording side
+    _recChannelsPrioList[0] = 2;    // stereo is prio 1
+    _recChannelsPrioList[1] = 1;    // mono is prio 2
+
+    // list of number of channels to use on playout side
+    _playChannelsPrioList[0] = 2;    // stereo is prio 1
+    _playChannelsPrioList[1] = 1;    // mono is prio 2
+
+    HRESULT hr;
+
+    // We know that this API will work since it has already been verified in
+    // CoreAudioIsSupported, hence no need to check for errors here as well.
+
+    // Retrive the IMMDeviceEnumerator API (should load the MMDevAPI.dll)
+    // TODO(henrika): we should probably move this allocation to Init() instead
+    // and deallocate in Terminate() to make the implementation more symmetric.
+    CoCreateInstance(
+      __uuidof(MMDeviceEnumerator),
+      NULL,
+      CLSCTX_ALL,
+      __uuidof(IMMDeviceEnumerator),
+      reinterpret_cast<void**>(&_ptrEnumerator));
+    assert(NULL != _ptrEnumerator);
+
+    // DMO initialization for built-in WASAPI AEC.
+    {
+        IMediaObject* ptrDMO = NULL;
+        hr = CoCreateInstance(CLSID_CWMAudioAEC,
+                              NULL,
+                              CLSCTX_INPROC_SERVER,
+                              IID_IMediaObject,
+                              reinterpret_cast<void**>(&ptrDMO));
+        if (FAILED(hr) || ptrDMO == NULL)
+        {
+            // Since we check that _dmo is non-NULL in EnableBuiltInAEC(), the
+            // feature is prevented from being enabled.
+            _builtInAecEnabled = false;
+            _TraceCOMError(hr);
+        }
+        _dmo = ptrDMO;
+        SAFE_RELEASE(ptrDMO);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsCore() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsCore::~AudioDeviceWindowsCore()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    // The IMMDeviceEnumerator is created during construction. Must release
+    // it here and not in Terminate() since we don't recreate it in Init().
+    SAFE_RELEASE(_ptrEnumerator);
+
+    _ptrAudioBuffer = NULL;
+
+    if (NULL != _hRenderSamplesReadyEvent)
+    {
+        CloseHandle(_hRenderSamplesReadyEvent);
+        _hRenderSamplesReadyEvent = NULL;
+    }
+
+    if (NULL != _hCaptureSamplesReadyEvent)
+    {
+        CloseHandle(_hCaptureSamplesReadyEvent);
+        _hCaptureSamplesReadyEvent = NULL;
+    }
+
+    if (NULL != _hRenderStartedEvent)
+    {
+        CloseHandle(_hRenderStartedEvent);
+        _hRenderStartedEvent = NULL;
+    }
+
+    if (NULL != _hCaptureStartedEvent)
+    {
+        CloseHandle(_hCaptureStartedEvent);
+        _hCaptureStartedEvent = NULL;
+    }
+
+    if (NULL != _hShutdownRenderEvent)
+    {
+        CloseHandle(_hShutdownRenderEvent);
+        _hShutdownRenderEvent = NULL;
+    }
+
+    if (NULL != _hShutdownCaptureEvent)
+    {
+        CloseHandle(_hShutdownCaptureEvent);
+        _hShutdownCaptureEvent = NULL;
+    }
+
+    if (NULL != _hSetCaptureVolumeEvent)
+    {
+        CloseHandle(_hSetCaptureVolumeEvent);
+        _hSetCaptureVolumeEvent = NULL;
+    }
+
+    if (_avrtLibrary)
+    {
+        BOOL freeOK = FreeLibrary(_avrtLibrary);
+        if (!freeOK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "AudioDeviceWindowsCore::~AudioDeviceWindowsCore() failed to free the loaded Avrt DLL module correctly");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "AudioDeviceWindowsCore::~AudioDeviceWindowsCore() the Avrt DLL module is now unloaded");
+        }
+    }
+
+    delete &_critSect;
+    delete &_volumeMutex;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // Inform the AudioBuffer about default settings for this implementation.
+    // Set all values to zero here since the actual settings will be done by
+    // InitPlayout and InitRecording later.
+    _ptrAudioBuffer->SetRecordingSampleRate(0);
+    _ptrAudioBuffer->SetPlayoutSampleRate(0);
+    _ptrAudioBuffer->SetRecordingChannels(0);
+    _ptrAudioBuffer->SetPlayoutChannels(0);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kWindowsCoreAudio;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    // Enumerate all audio rendering and capturing endpoint devices.
+    // Note that, some of these will not be able to select by the user.
+    // The complete collection is for internal use only.
+    //
+    _EnumerateEndpointDevicesAll(eRender);
+    _EnumerateEndpointDevicesAll(eCapture);
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::Terminate()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_initialized) {
+        return 0;
+    }
+
+    _initialized = false;
+    _speakerIsInitialized = false;
+    _microphoneIsInitialized = false;
+    _playing = false;
+    _recording = false;
+
+    SAFE_RELEASE(_ptrRenderCollection);
+    SAFE_RELEASE(_ptrCaptureCollection);
+    SAFE_RELEASE(_ptrDeviceOut);
+    SAFE_RELEASE(_ptrDeviceIn);
+    SAFE_RELEASE(_ptrClientOut);
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrRenderClient);
+    SAFE_RELEASE(_ptrCaptureClient);
+    SAFE_RELEASE(_ptrCaptureVolume);
+    SAFE_RELEASE(_ptrRenderSimpleVolume);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Initialized() const
+{
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_usingOutputDeviceIndex)
+    {
+        WebRtc_Word16 nDevices = PlayoutDevices();
+        if (_outputDeviceIndex > (nDevices - 1))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "current device selection is invalid => unable to initialize");
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 ret(0);
+
+    SAFE_RELEASE(_ptrDeviceOut);
+    if (_usingOutputDeviceIndex)
+    {
+        // Refresh the selected rendering endpoint device using current index
+        ret = _GetListDevice(eRender, _outputDeviceIndex, &_ptrDeviceOut);
+    }
+    else
+    {
+        ERole role;
+        (_outputDevice == AudioDeviceModule::kDefaultDevice) ? role = eConsole : role = eCommunications;
+        // Refresh the selected rendering endpoint device using role
+        ret = _GetDefaultDevice(eRender, role, &_ptrDeviceOut);
+    }
+
+    if (ret != 0 || (_ptrDeviceOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to initialize the rendering enpoint device");
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    IAudioSessionManager* pManager = NULL;
+    ret = _ptrDeviceOut->Activate(__uuidof(IAudioSessionManager),
+                                  CLSCTX_ALL,
+                                  NULL,
+                                  (void**)&pManager);
+    if (ret != 0 || pManager == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the render manager");
+        SAFE_RELEASE(pManager);
+        return -1;
+    }
+
+    SAFE_RELEASE(_ptrRenderSimpleVolume);
+    ret = pManager->GetSimpleAudioVolume(NULL, FALSE, &_ptrRenderSimpleVolume);
+    if (ret != 0 || _ptrRenderSimpleVolume == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the render simple volume");
+        SAFE_RELEASE(pManager);
+        SAFE_RELEASE(_ptrRenderSimpleVolume);
+        return -1;
+    }
+    SAFE_RELEASE(pManager);
+
+    _speakerIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    available = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    if (_usingInputDeviceIndex)
+    {
+        WebRtc_Word16 nDevices = RecordingDevices();
+        if (_inputDeviceIndex > (nDevices - 1))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "current device selection is invalid => unable to initialize");
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 ret(0);
+
+    SAFE_RELEASE(_ptrDeviceIn);
+    if (_usingInputDeviceIndex)
+    {
+        // Refresh the selected capture endpoint device using current index
+        ret = _GetListDevice(eCapture, _inputDeviceIndex, &_ptrDeviceIn);
+    }
+    else
+    {
+        ERole role;
+        (_inputDevice == AudioDeviceModule::kDefaultDevice) ? role = eConsole : role = eCommunications;
+        // Refresh the selected capture endpoint device using role
+        ret = _GetDefaultDevice(eCapture, role, &_ptrDeviceIn);
+    }
+
+    if (ret != 0 || (_ptrDeviceIn == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to initialize the capturing enpoint device");
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    ret = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume),
+                                 CLSCTX_ALL,
+                                 NULL,
+                                 reinterpret_cast<void **>(&_ptrCaptureVolume));
+    if (ret != 0 || _ptrCaptureVolume == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "  failed to initialize the capture volume");
+        SAFE_RELEASE(_ptrCaptureVolume);
+        return -1;
+    }
+
+    _microphoneIsInitialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::SpeakerIsInitialized() const
+{
+
+    return (_speakerIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::MicrophoneIsInitialized() const
+{
+
+    return (_microphoneIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioSessionManager* pManager = NULL;
+    ISimpleAudioVolume* pVolume = NULL;
+
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioSessionManager), CLSCTX_ALL, NULL, (void**)&pManager);
+    EXIT_ON_ERROR(hr);
+
+    hr = pManager->GetSimpleAudioVolume(NULL, FALSE, &pVolume);
+    EXIT_ON_ERROR(hr);
+
+    float volume(0.0f);
+    hr = pVolume->GetMasterVolume(&volume);
+    if (FAILED(hr))
+    {
+        available = false;
+    }
+    available = true;
+
+    SAFE_RELEASE(pManager);
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pManager);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        if (!_speakerIsInitialized)
+        {
+        return -1;
+        }
+
+        if (_ptrDeviceOut == NULL)
+        {
+            return -1;
+        }
+    }
+
+    if (volume < (WebRtc_UWord32)MIN_CORE_SPEAKER_VOLUME ||
+        volume > (WebRtc_UWord32)MAX_CORE_SPEAKER_VOLUME)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+
+    // scale input volume to valid range (0.0 to 1.0)
+    const float fLevel = (float)volume/MAX_CORE_SPEAKER_VOLUME;
+    _volumeMutex.Enter();
+    hr = _ptrRenderSimpleVolume->SetMasterVolume(fLevel,NULL);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        if (!_speakerIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceOut == NULL)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    float fLevel(0.0f);
+
+    _volumeMutex.Enter();
+    hr = _ptrRenderSimpleVolume->GetMasterVolume(&fLevel);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    // scale input volume range [0.0,1.0] to valid output range
+    volume = static_cast<WebRtc_UWord32> (fLevel*MAX_CORE_SPEAKER_VOLUME);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const
+{
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+//
+//  The internal range for Core Audio is 0.0 to 1.0, where 0.0 indicates
+//  silence and 1.0 indicates full volume (no attenuation).
+//  We add our (webrtc-internal) own max level to match the Wave API and
+//  how it is used today in VoE.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (MAX_CORE_SPEAKER_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (MIN_CORE_SPEAKER_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    stepSize = CORE_SPEAKER_VOLUME_STEP_SIZE;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume),
+        CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    if (FAILED(hr))
+        available = false;
+    else
+        available = true;
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetSpeakerMute(bool enable)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Set the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    const BOOL mute(enable);
+    hr = pVolume->SetMute(mute, NULL);
+    EXIT_ON_ERROR(hr);
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SpeakerMute(bool& enabled) const
+{
+
+    if (!_speakerIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the speaker system mute state.
+    hr = _ptrDeviceOut->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    EXIT_ON_ERROR(hr);
+
+    enabled = (mute == TRUE) ? true : false;
+
+    SAFE_RELEASE(pVolume);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    if (FAILED(hr))
+        available = false;
+    else
+        available = true;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneMute(bool enable)
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Set the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    const BOOL mute(enable);
+    hr = pVolume->SetMute(mute, NULL);
+    EXIT_ON_ERROR(hr);
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneMute(bool& enabled) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    // Query the microphone system mute state.
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL,  reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    BOOL mute;
+    hr = pVolume->GetMute(&mute);
+    EXIT_ON_ERROR(hr);
+
+    enabled = (mute == TRUE) ? true : false;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    available = false;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneBoost(bool enable)
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneBoost(bool& enabled) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoRecordingIsAvailable(bool& available)
+{
+
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetStereoRecording(bool enable)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (enable)
+    {
+        _recChannelsPrioList[0] = 2;    // try stereo first
+        _recChannelsPrioList[1] = 1;
+        _recChannels = 2;
+    }
+    else
+    {
+        _recChannelsPrioList[0] = 1;    // try mono first
+        _recChannelsPrioList[1] = 2;
+        _recChannels = 1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoPlayoutIsAvailable(bool& available)
+{
+
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetStereoPlayout(bool enable)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (enable)
+    {
+        _playChannelsPrioList[0] = 2;    // try stereo first
+        _playChannelsPrioList[1] = 1;
+        _playChannels = 2;
+    }
+    else
+    {
+        _playChannelsPrioList[0] = 1;    // try mono first
+        _playChannelsPrioList[1] = 2;
+        _playChannels = 1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetAGC(bool enable)
+{
+    CriticalSectionScoped lock(&_critSect);
+    _AGC = enable;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::AGC() const
+{
+    CriticalSectionScoped lock(&_critSect);
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    IAudioEndpointVolume* pVolume = NULL;
+
+    hr = _ptrDeviceIn->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, NULL, reinterpret_cast<void**>(&pVolume));
+    EXIT_ON_ERROR(hr);
+
+    float volume(0.0f);
+    hr = pVolume->GetMasterVolumeLevelScalar(&volume);
+    if (FAILED(hr))
+    {
+        available = false;
+    }
+    available = true;
+
+    SAFE_RELEASE(pVolume);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    SAFE_RELEASE(pVolume);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AudioDeviceWindowsCore::SetMicrophoneVolume(volume=%u)", volume);
+
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        if (!_microphoneIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceIn == NULL)
+        {
+            return -1;
+        }
+    }
+
+    if (volume < static_cast<WebRtc_UWord32>(MIN_CORE_MICROPHONE_VOLUME) ||
+        volume > static_cast<WebRtc_UWord32>(MAX_CORE_MICROPHONE_VOLUME))
+    {
+        return -1;
+    }
+
+    HRESULT hr = S_OK;
+    // scale input volume to valid range (0.0 to 1.0)
+    const float fLevel = static_cast<float>(volume)/MAX_CORE_MICROPHONE_VOLUME;
+    _volumeMutex.Enter();
+    _ptrCaptureVolume->SetMasterVolumeLevelScalar(fLevel, NULL);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        if (!_microphoneIsInitialized)
+        {
+            return -1;
+        }
+
+        if (_ptrDeviceIn == NULL)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    float fLevel(0.0f);
+    volume = 0;
+    _volumeMutex.Enter();
+    hr = _ptrCaptureVolume->GetMasterVolumeLevelScalar(&fLevel);
+    _volumeMutex.Leave();
+    EXIT_ON_ERROR(hr);
+
+    // scale input volume range [0.0,1.0] to valid output range
+    volume = static_cast<WebRtc_UWord32> (fLevel*MAX_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+//
+//  The internal range for Core Audio is 0.0 to 1.0, where 0.0 indicates
+//  silence and 1.0 indicates full volume (no attenuation).
+//  We add our (webrtc-internal) own max level to match the Wave API and
+//  how it is used today in VoE.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    maxVolume = static_cast<WebRtc_UWord32> (MAX_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    minVolume = static_cast<WebRtc_UWord32> (MIN_CORE_MICROPHONE_VOLUME);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (!_microphoneIsInitialized)
+    {
+        return -1;
+    }
+
+    stepSize = CORE_MICROPHONE_VOLUME_STEP_SIZE;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::PlayoutDevices()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_RefreshDeviceList(eRender) != -1)
+    {
+        return (_DeviceListCount(eRender));
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    // Get current number of available rendering endpoint devices and refresh the rendering collection.
+    UINT nDevices = PlayoutDevices();
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrRenderCollection != NULL);
+
+    //  Select an endpoint rendering device given the specified index
+    SAFE_RELEASE(_ptrDeviceOut);
+    hr = _ptrRenderCollection->Item(
+                                 index,
+                                 &_ptrDeviceOut);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceOut, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingOutputDeviceIndex = true;
+    _outputDeviceIndex = index;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    ERole role(eCommunications);
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        role = eConsole;
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        role = eCommunications;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Refresh the list of rendering endpoint devices
+    _RefreshDeviceList(eRender);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    //  Select an endpoint rendering device given the specified role
+    SAFE_RELEASE(_ptrDeviceOut);
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           eRender,
+                           role,
+                           &_ptrDeviceOut);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceOut);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceOut, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingOutputDeviceIndex = false;
+    _outputDevice = device;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    bool defaultCommunicationDevice(false);
+    const WebRtc_Word16 nDevices(PlayoutDevices());  // also updates the list of devices
+
+    // Special fix for the case when the user selects '-1' as index (<=> Default Communication Device)
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        defaultCommunicationDevice = true;
+        index = 0;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Default Communication endpoint device will be used");
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    WebRtc_Word32 ret(-1);
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceName(eRender, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceName(eRender, index, szDeviceName, bufferLen);
+    }
+
+    if (ret == 0)
+    {
+        // Convert the endpoint device's friendly-name to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceID(eRender, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceID(eRender, index, szDeviceName, bufferLen);
+    }
+
+    if (guid != NULL && ret == 0)
+    {
+        // Convert the endpoint device's ID string to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    bool defaultCommunicationDevice(false);
+    const WebRtc_Word16 nDevices(RecordingDevices());  // also updates the list of devices
+
+    // Special fix for the case when the user selects '-1' as index (<=> Default Communication Device)
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        defaultCommunicationDevice = true;
+        index = 0;
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Default Communication endpoint device will be used");
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    WebRtc_Word32 ret(-1);
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceName(eCapture, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceName(eCapture, index, szDeviceName, bufferLen);
+    }
+
+    if (ret == 0)
+    {
+        // Convert the endpoint device's friendly-name to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+    if (defaultCommunicationDevice)
+    {
+        ret = _GetDefaultDeviceID(eCapture, eCommunications, szDeviceName, bufferLen);
+    }
+    else
+    {
+        ret = _GetListDeviceID(eCapture, index, szDeviceName, bufferLen);
+    }
+
+    if (guid != NULL && ret == 0)
+    {
+        // Convert the endpoint device's ID string to UTF-8
+        if (WideCharToMultiByte(CP_UTF8, 0, szDeviceName, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d", GetLastError());
+        }
+    }
+
+    return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::RecordingDevices()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_RefreshDeviceList(eCapture) != -1)
+    {
+        return (_DeviceListCount(eCapture));
+    }
+
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    // Get current number of available capture endpoint devices and refresh the capture collection.
+    UINT nDevices = RecordingDevices();
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrCaptureCollection != NULL);
+
+    // Select an endpoint capture device given the specified index
+    SAFE_RELEASE(_ptrDeviceIn);
+    hr = _ptrCaptureCollection->Item(
+                                 index,
+                                 &_ptrDeviceIn);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceIn, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingInputDeviceIndex = true;
+    _inputDeviceIndex = index;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    ERole role(eCommunications);
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        role = eConsole;
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        role = eCommunications;
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Refresh the list of capture endpoint devices
+    _RefreshDeviceList(eCapture);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    //  Select an endpoint capture device given the specified role
+    SAFE_RELEASE(_ptrDeviceIn);
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           eCapture,
+                           role,
+                           &_ptrDeviceIn);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(_ptrDeviceIn);
+        return -1;
+    }
+
+    WCHAR szDeviceName[MAX_PATH];
+    const int bufferLen = sizeof(szDeviceName)/sizeof(szDeviceName)[0];
+
+    // Get the endpoint device's friendly-name
+    if (_GetDeviceName(_ptrDeviceIn, szDeviceName, bufferLen) == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", szDeviceName);
+    }
+
+    _usingInputDeviceIndex = false;
+    _inputDevice = device;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the recording side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitSpeaker() failed");
+    }
+
+    // Ensure that the updated rendering endpoint device is valid
+    if (_ptrDeviceOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_builtInAecEnabled && _recIsInitialized)
+    {
+        // Ensure the correct render device is configured in case
+        // InitRecording() was called before InitPlayout().
+        if (SetDMOProperties() == -1)
+        {
+            return -1;
+        }
+    }
+
+    HRESULT hr = S_OK;
+    WAVEFORMATEX* pWfxOut = NULL;
+    WAVEFORMATEX Wfx;
+    WAVEFORMATEX* pWfxClosestMatch = NULL;
+
+    // Create COM object with IAudioClient interface.
+    SAFE_RELEASE(_ptrClientOut);
+    hr = _ptrDeviceOut->Activate(
+                          __uuidof(IAudioClient),
+                          CLSCTX_ALL,
+                          NULL,
+                          (void**)&_ptrClientOut);
+    EXIT_ON_ERROR(hr);
+
+    // Retrieve the stream format that the audio engine uses for its internal
+    // processing (mixing) of shared-mode streams.
+    hr = _ptrClientOut->GetMixFormat(&pWfxOut);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Audio Engine's current rendering mix format:");
+        // format type
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag     : 0x%X (%u)", pWfxOut->wFormatTag, pWfxOut->wFormatTag);
+        // number of channels (i.e. mono, stereo...)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels      : %d", pWfxOut->nChannels);
+        // sample rate
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d", pWfxOut->nSamplesPerSec);
+        // for buffer estimation
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxOut->nAvgBytesPerSec);
+        // block size of data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign    : %d", pWfxOut->nBlockAlign);
+        // number of bits per sample of mono data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxOut->wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize         : %d", pWfxOut->cbSize);
+    }
+
+    // Set wave format
+    Wfx.wFormatTag = WAVE_FORMAT_PCM;
+    Wfx.wBitsPerSample = 16;
+    Wfx.cbSize = 0;
+
+    const int freqs[] = {48000, 44100, 16000, 96000, 32000, 8000};
+    hr = S_FALSE;
+
+    // Iterate over frequencies and channels, in order of priority
+    for (int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++)
+    {
+        for (int chan = 0; chan < sizeof(_playChannelsPrioList)/sizeof(_playChannelsPrioList[0]); chan++)
+        {
+            Wfx.nChannels = _playChannelsPrioList[chan];
+            Wfx.nSamplesPerSec = freqs[freq];
+            Wfx.nBlockAlign = Wfx.nChannels * Wfx.wBitsPerSample / 8;
+            Wfx.nAvgBytesPerSec = Wfx.nSamplesPerSec * Wfx.nBlockAlign;
+            // If the method succeeds and the audio endpoint device supports the specified stream format,
+            // it returns S_OK. If the method succeeds and provides a closest match to the specified format,
+            // it returns S_FALSE.
+            hr = _ptrClientOut->IsFormatSupported(
+                                  AUDCLNT_SHAREMODE_SHARED,
+                                  &Wfx,
+                                  &pWfxClosestMatch);
+            if (hr == S_OK)
+            {
+                break;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported",
+                    Wfx.nChannels, Wfx.nSamplesPerSec);
+            }
+        }
+        if (hr == S_OK)
+            break;
+    }
+
+    // TODO(andrew): what happens in the event of failure in the above loop?
+    //   Is _ptrClientOut->Initialize expected to fail?
+    //   Same in InitRecording().
+    if (hr == S_OK)
+    {
+        _playAudioFrameSize = Wfx.nBlockAlign;
+        _playBlockSize = Wfx.nSamplesPerSec/100;
+        _playSampleRate = Wfx.nSamplesPerSec;
+        _devicePlaySampleRate = Wfx.nSamplesPerSec; // The device itself continues to run at 44.1 kHz.
+        _devicePlayBlockSize = Wfx.nSamplesPerSec/100;
+        if (_playBlockSize == 441)
+        {
+            _playSampleRate = 44000;    // we are actually running at 44000 Hz and *not* 44100 Hz
+            _playBlockSize = 440;       // adjust to size we can handle
+        }
+        _playChannels = Wfx.nChannels;
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this rendering format:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag         : 0x%X (%u)", Wfx.wFormatTag, Wfx.wFormatTag);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels          : %d", Wfx.nChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec     : %d", Wfx.nSamplesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec    : %d", Wfx.nAvgBytesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign        : %d", Wfx.nBlockAlign);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample     : %d", Wfx.wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize             : %d", Wfx.cbSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playAudioFrameSize: %d", _playAudioFrameSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playBlockSize     : %d", _playBlockSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_playChannels      : %d", _playChannels);
+    }
+
+    _Get44kHzDrift();
+
+    // Create a rendering stream.
+    //
+    // ****************************************************************************
+    // For a shared-mode stream that uses event-driven buffering, the caller must
+    // set both hnsPeriodicity and hnsBufferDuration to 0. The Initialize method
+    // determines how large a buffer to allocate based on the scheduling period
+    // of the audio engine. Although the client's buffer processing thread is
+    // event driven, the basic buffer management process, as described previously,
+    // is unaltered.
+    // Each time the thread awakens, it should call IAudioClient::GetCurrentPadding
+    // to determine how much data to write to a rendering buffer or read from a capture
+    // buffer. In contrast to the two buffers that the Initialize method allocates
+    // for an exclusive-mode stream that uses event-driven buffering, a shared-mode
+    // stream requires a single buffer.
+    // ****************************************************************************
+    //
+    REFERENCE_TIME hnsBufferDuration = 0;  // ask for minimum buffer size (default)
+    if (_devicePlaySampleRate == 44100)
+    {
+        // Ask for a larger buffer size (30ms) when using 44.1kHz as render rate.
+        // There seems to be a larger risk of underruns for 44.1 compared
+        // with the default rate (48kHz). When using default, we set the requested
+        // buffer duration to 0, which sets the buffer to the minimum size
+        // required by the engine thread. The actual buffer size can then be
+        // read by GetBufferSize() and it is 20ms on most machines.
+        hnsBufferDuration = 30*10000;
+    }
+    hr = _ptrClientOut->Initialize(
+                          AUDCLNT_SHAREMODE_SHARED,             // share Audio Engine with other applications
+                          AUDCLNT_STREAMFLAGS_EVENTCALLBACK,    // processing of the audio buffer by the client will be event driven
+                          hnsBufferDuration,                    // requested buffer capacity as a time value (in 100-nanosecond units)
+                          0,                                    // periodicity
+                          &Wfx,                                 // selected wave format
+                          NULL);                                // session GUID
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initialize() failed:");
+        if (pWfxClosestMatch != NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix format: #channels=%d, samples/sec=%d, bits/sample=%d",
+                pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, pWfxClosestMatch->wBitsPerSample);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "no format suggested");
+        }
+    }
+    EXIT_ON_ERROR(hr);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update the audio buffer with the selected parameters
+        _ptrAudioBuffer->SetPlayoutSampleRate(_playSampleRate);
+        _ptrAudioBuffer->SetPlayoutChannels((WebRtc_UWord8)_playChannels);
+    }
+    else
+    {
+        // We can enter this state during CoreAudioIsSupported() when no AudioDeviceImplementation
+        // has been created, hence the AudioDeviceBuffer does not exist.
+        // It is OK to end up here since we don't initiate any media in CoreAudioIsSupported().
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    // Get the actual size of the shared (endpoint buffer).
+    // Typical value is 960 audio frames <=> 20ms @ 48kHz sample rate.
+    UINT bufferFrameCount(0);
+    hr = _ptrClientOut->GetBufferSize(
+                          &bufferFrameCount);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "IAudioClient::GetBufferSize() => %u (<=> %u bytes)",
+            bufferFrameCount, bufferFrameCount*_playAudioFrameSize);
+    }
+
+    // Set the event handle that the system signals when an audio buffer is ready
+    // to be processed by the client.
+    hr = _ptrClientOut->SetEventHandle(
+                          _hRenderSamplesReadyEvent);
+    EXIT_ON_ERROR(hr);
+
+    // Get an IAudioRenderClient interface.
+    SAFE_RELEASE(_ptrRenderClient);
+    hr = _ptrClientOut->GetService(
+                          __uuidof(IAudioRenderClient),
+                          (void**)&_ptrRenderClient);
+    EXIT_ON_ERROR(hr);
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    CoTaskMemFree(pWfxOut);
+    CoTaskMemFree(pWfxClosestMatch);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "render side is now initialized");
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pWfxOut);
+    CoTaskMemFree(pWfxClosestMatch);
+    SAFE_RELEASE(_ptrClientOut);
+    SAFE_RELEASE(_ptrRenderClient);
+    return -1;
+}
+
+// Capture initialization when the built-in AEC DirectX Media Object (DMO) is
+// used. Called from InitRecording(), most of which is skipped over. The DMO
+// handles device initialization itself.
+// Reference: http://msdn.microsoft.com/en-us/library/ff819492(v=vs.85).aspx
+WebRtc_Word32 AudioDeviceWindowsCore::InitRecordingDMO()
+{
+    assert(_builtInAecEnabled);
+    assert(_dmo != NULL);
+
+    if (SetDMOProperties() == -1)
+    {
+        return -1;
+    }
+
+    DMO_MEDIA_TYPE mt = {0};
+    HRESULT hr = MoInitMediaType(&mt, sizeof(WAVEFORMATEX));
+    if (FAILED(hr))
+    {
+        MoFreeMediaType(&mt);
+        _TraceCOMError(hr);
+        return -1;
+    }
+    mt.majortype = MEDIATYPE_Audio;
+    mt.subtype = MEDIASUBTYPE_PCM;
+    mt.formattype = FORMAT_WaveFormatEx;
+
+    // Supported formats
+    // nChannels: 1 (in AEC-only mode)
+    // nSamplesPerSec: 8000, 11025, 16000, 22050
+    // wBitsPerSample: 16
+    WAVEFORMATEX* ptrWav = reinterpret_cast<WAVEFORMATEX*>(mt.pbFormat);
+    ptrWav->wFormatTag = WAVE_FORMAT_PCM;
+    ptrWav->nChannels = 1;
+    // 16000 is the highest we can support with our resampler.
+    ptrWav->nSamplesPerSec = 16000;
+    ptrWav->nAvgBytesPerSec = 32000;
+    ptrWav->nBlockAlign = 2;
+    ptrWav->wBitsPerSample = 16;
+    ptrWav->cbSize = 0;
+
+    // Set the VoE format equal to the AEC output format.
+    _recAudioFrameSize = ptrWav->nBlockAlign;
+    _recSampleRate = ptrWav->nSamplesPerSec;
+    _recBlockSize = ptrWav->nSamplesPerSec / 100;
+    _recChannels = ptrWav->nChannels;
+
+    // Set the DMO output format parameters.
+    hr = _dmo->SetOutputType(kAecCaptureStreamIndex, &mt, 0);
+    MoFreeMediaType(&mt);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    if (_ptrAudioBuffer)
+    {
+        _ptrAudioBuffer->SetRecordingSampleRate(_recSampleRate);
+        _ptrAudioBuffer->SetRecordingChannels(_recChannels);
+    }
+    else
+    {
+        // Refer to InitRecording() for comments.
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    _mediaBuffer = new MediaBufferImpl(_recBlockSize * _recAudioFrameSize);
+
+    // Optional, but if called, must be after media types are set.
+    hr = _dmo->AllocateStreamingResources();
+    if (FAILED(hr))
+    {
+         _TraceCOMError(hr);
+        return -1;
+    }
+
+    _recIsInitialized = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "Capture side is now initialized");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::InitRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (QueryPerformanceFrequency(&_perfCounterFreq) == 0)
+    {
+        return -1;
+    }
+    _perfCounterFactor = 10000000.0 / (double)_perfCounterFreq.QuadPart;
+
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitMicrophone() failed");
+    }
+
+    // Ensure that the updated capturing endpoint device is valid
+    if (_ptrDeviceIn == NULL)
+    {
+        return -1;
+    }
+
+    if (_builtInAecEnabled)
+    {
+        // The DMO will configure the capture device.
+        return InitRecordingDMO();
+    }
+
+    HRESULT hr = S_OK;
+    WAVEFORMATEX* pWfxIn = NULL;
+    WAVEFORMATEX Wfx;
+    WAVEFORMATEX* pWfxClosestMatch = NULL;
+
+    // Create COM object with IAudioClient interface.
+    SAFE_RELEASE(_ptrClientIn);
+    hr = _ptrDeviceIn->Activate(
+                          __uuidof(IAudioClient),
+                          CLSCTX_ALL,
+                          NULL,
+                          (void**)&_ptrClientIn);
+    EXIT_ON_ERROR(hr);
+
+    // Retrieve the stream format that the audio engine uses for its internal
+    // processing (mixing) of shared-mode streams.
+    hr = _ptrClientIn->GetMixFormat(&pWfxIn);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Audio Engine's current capturing mix format:");
+        // format type
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag     : 0x%X (%u)", pWfxIn->wFormatTag, pWfxIn->wFormatTag);
+        // number of channels (i.e. mono, stereo...)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels      : %d", pWfxIn->nChannels);
+        // sample rate
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec : %d", pWfxIn->nSamplesPerSec);
+        // for buffer estimation
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec: %d", pWfxIn->nAvgBytesPerSec);
+        // block size of data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign    : %d", pWfxIn->nBlockAlign);
+        // number of bits per sample of mono data
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample : %d", pWfxIn->wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize         : %d", pWfxIn->cbSize);
+    }
+
+    // Set wave format
+    Wfx.wFormatTag = WAVE_FORMAT_PCM;
+    Wfx.wBitsPerSample = 16;
+    Wfx.cbSize = 0;
+
+    const int freqs[6] = {48000, 44100, 16000, 96000, 32000, 8000};
+    hr = S_FALSE;
+
+    // Iterate over frequencies and channels, in order of priority
+    for (int freq = 0; freq < sizeof(freqs)/sizeof(freqs[0]); freq++)
+    {
+        for (int chan = 0; chan < sizeof(_recChannelsPrioList)/sizeof(_recChannelsPrioList[0]); chan++)
+        {
+            Wfx.nChannels = _recChannelsPrioList[chan];
+            Wfx.nSamplesPerSec = freqs[freq];
+            Wfx.nBlockAlign = Wfx.nChannels * Wfx.wBitsPerSample / 8;
+            Wfx.nAvgBytesPerSec = Wfx.nSamplesPerSec * Wfx.nBlockAlign;
+            // If the method succeeds and the audio endpoint device supports the specified stream format,
+            // it returns S_OK. If the method succeeds and provides a closest match to the specified format,
+            // it returns S_FALSE.
+            hr = _ptrClientIn->IsFormatSupported(
+                                  AUDCLNT_SHAREMODE_SHARED,
+                                  &Wfx,
+                                  &pWfxClosestMatch);
+            if (hr == S_OK)
+            {
+                break;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels=%d, nSamplesPerSec=%d is not supported",
+                    Wfx.nChannels, Wfx.nSamplesPerSec);
+            }
+        }
+        if (hr == S_OK)
+            break;
+    }
+
+    if (hr == S_OK)
+    {
+        _recAudioFrameSize = Wfx.nBlockAlign;
+        _recSampleRate = Wfx.nSamplesPerSec;
+        _recBlockSize = Wfx.nSamplesPerSec/100;
+        _recChannels = Wfx.nChannels;
+        if (_recBlockSize == 441)
+        {
+            _recSampleRate = 44000; // we are actually using 44000 Hz and *not* 44100 Hz
+            _recBlockSize = 440;    // adjust to size we can handle
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "VoE selected this capturing format:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wFormatTag        : 0x%X (%u)", Wfx.wFormatTag, Wfx.wFormatTag);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nChannels         : %d", Wfx.nChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nSamplesPerSec    : %d", Wfx.nSamplesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nAvgBytesPerSec   : %d", Wfx.nAvgBytesPerSec);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "nBlockAlign       : %d", Wfx.nBlockAlign);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wBitsPerSample    : %d", Wfx.wBitsPerSample);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cbSize            : %d", Wfx.cbSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Additional settings:");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recAudioFrameSize: %d", _recAudioFrameSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recBlockSize     : %d", _recBlockSize);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_recChannels      : %d", _recChannels);
+    }
+
+    _Get44kHzDrift();
+
+    // Create a capturing stream.
+    hr = _ptrClientIn->Initialize(
+                          AUDCLNT_SHAREMODE_SHARED,             // share Audio Engine with other applications
+                          AUDCLNT_STREAMFLAGS_EVENTCALLBACK |   // processing of the audio buffer by the client will be event driven
+                          AUDCLNT_STREAMFLAGS_NOPERSIST,        // volume and mute settings for an audio session will not persist across system restarts
+                          0,                                    // required for event-driven shared mode
+                          0,                                    // periodicity
+                          &Wfx,                                 // selected wave format
+                          NULL);                                // session GUID
+
+
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "IAudioClient::Initialize() failed:");
+        if (pWfxClosestMatch != NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "closest mix format: #channels=%d, samples/sec=%d, bits/sample=%d",
+                pWfxClosestMatch->nChannels, pWfxClosestMatch->nSamplesPerSec, pWfxClosestMatch->wBitsPerSample);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "no format suggested");
+        }
+    }
+    EXIT_ON_ERROR(hr);
+
+    if (_ptrAudioBuffer)
+    {
+        // Update the audio buffer with the selected parameters
+        _ptrAudioBuffer->SetRecordingSampleRate(_recSampleRate);
+        _ptrAudioBuffer->SetRecordingChannels((WebRtc_UWord8)_recChannels);
+    }
+    else
+    {
+        // We can enter this state during CoreAudioIsSupported() when no AudioDeviceImplementation
+        // has been created, hence the AudioDeviceBuffer does not exist.
+        // It is OK to end up here since we don't initiate any media in CoreAudioIsSupported().
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioDeviceBuffer must be attached before streaming can start");
+    }
+
+    // Get the actual size of the shared (endpoint buffer).
+    // Typical value is 960 audio frames <=> 20ms @ 48kHz sample rate.
+    UINT bufferFrameCount(0);
+    hr = _ptrClientIn->GetBufferSize(
+                          &bufferFrameCount);
+    if (SUCCEEDED(hr))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "IAudioClient::GetBufferSize() => %u (<=> %u bytes)",
+            bufferFrameCount, bufferFrameCount*_recAudioFrameSize);
+    }
+
+    // Set the event handle that the system signals when an audio buffer is ready
+    // to be processed by the client.
+    hr = _ptrClientIn->SetEventHandle(
+                          _hCaptureSamplesReadyEvent);
+    EXIT_ON_ERROR(hr);
+
+    // Get an IAudioCaptureClient interface.
+    SAFE_RELEASE(_ptrCaptureClient);
+    hr = _ptrClientIn->GetService(
+                          __uuidof(IAudioCaptureClient),
+                          (void**)&_ptrCaptureClient);
+    EXIT_ON_ERROR(hr);
+
+    // Mark capture side as initialized
+    _recIsInitialized = true;
+
+    CoTaskMemFree(pWfxIn);
+    CoTaskMemFree(pWfxClosestMatch);
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "capture side is now initialized");
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pWfxIn);
+    CoTaskMemFree(pWfxClosestMatch);
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrCaptureClient);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_hRecThread != NULL)
+    {
+        return 0;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped critScoped(&_critSect);
+
+        // Create thread which will drive the capturing
+        LPTHREAD_START_ROUTINE lpStartAddress = WSAPICaptureThread;
+        if (_builtInAecEnabled)
+        {
+            // Redirect to the DMO polling method.
+            lpStartAddress = WSAPICaptureThreadPollDMO;
+
+            if (!_playing)
+            {
+                // The DMO won't provide us captured output data unless we
+                // give it render data to process.
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "Playout must be started before recording when using the "
+                    "built-in AEC");
+                return -1;
+            }
+        }
+
+        assert(_hRecThread == NULL);
+        _hRecThread = CreateThread(NULL,
+                                   0,
+                                   lpStartAddress,
+                                   this,
+                                   0,
+                                   NULL);
+        if (_hRecThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "failed to create the recording thread");
+            return -1;
+        }
+
+        // Set thread priority to highest possible
+        SetThreadPriority(_hRecThread, THREAD_PRIORITY_TIME_CRITICAL);
+
+        assert(_hGetCaptureVolumeThread == NULL);
+        _hGetCaptureVolumeThread = CreateThread(NULL,
+                                                0,
+                                                GetCaptureVolumeThread,
+                                                this,
+                                                0,
+                                                NULL);
+        if (_hGetCaptureVolumeThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create the volume getter thread");
+            return -1;
+        }
+
+        assert(_hSetCaptureVolumeThread == NULL);
+        _hSetCaptureVolumeThread = CreateThread(NULL,
+                                                0,
+                                                SetCaptureVolumeThread,
+                                                this,
+                                                0,
+                                                NULL);
+        if (_hSetCaptureVolumeThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                         "  failed to create the volume setter thread");
+            return -1;
+        }
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hCaptureStartedEvent, 1000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "capturing did not start up properly");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "capture audio stream has now started...");
+
+    _avgCPULoad = 0.0f;
+    _playAcc = 0;
+    _recording = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StopRecording()
+{
+    WebRtc_Word32 err = 0;
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    _Lock();
+
+    if (_hRecThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "no capturing stream is active => close down WASAPI only");
+        SAFE_RELEASE(_ptrClientIn);
+        SAFE_RELEASE(_ptrCaptureClient);
+        _recIsInitialized = false;
+        _recording = false;
+        _UnLock();
+        return 0;
+    }
+
+    // Stop the driving thread...
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "closing down the webrtc_core_audio_capture_thread...");
+    // Manual-reset event; it will remain signalled to stop all capture threads.
+    SetEvent(_hShutdownCaptureEvent);
+
+    _UnLock();
+    DWORD ret = WaitForSingleObject(_hRecThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "failed to close down webrtc_core_audio_capture_thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "webrtc_core_audio_capture_thread is now closed");
+    }
+
+    ret = WaitForSingleObject(_hGetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to close down volume getter thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "  volume getter thread is now closed");
+    }
+
+    ret = WaitForSingleObject(_hSetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                     "  failed to close down volume setter thread");
+        err = -1;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "  volume setter thread is now closed");
+    }
+    _Lock();
+
+    ResetEvent(_hShutdownCaptureEvent); // Must be manually reset.
+    // Ensure that the thread has released these interfaces properly.
+    assert(err == -1 || _ptrClientIn == NULL);
+    assert(err == -1 || _ptrCaptureClient == NULL);
+
+    _recIsInitialized = false;
+    _recording = false;
+
+    // These will create thread leaks in the result of an error,
+    // but we can at least resume the call.
+    CloseHandle(_hRecThread);
+    _hRecThread = NULL;
+
+    CloseHandle(_hGetCaptureVolumeThread);
+    _hGetCaptureVolumeThread = NULL;
+
+    CloseHandle(_hSetCaptureVolumeThread);
+    _hSetCaptureVolumeThread = NULL;
+
+    if (_builtInAecEnabled)
+    {
+        assert(_dmo != NULL);
+        // This is necessary. Otherwise the DMO can generate garbage render
+        // audio even after rendering has stopped.
+        HRESULT hr = _dmo->FreeStreamingResources();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+            err = -1;
+        }
+    }
+
+    // Reset the recording delay value.
+    _sndCardRecDelay = 0;
+
+    _UnLock();
+
+    return err;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Recording() const
+{
+    return (_recording);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutIsInitialized() const
+{
+
+    return (_playIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_hPlayThread != NULL)
+    {
+        return 0;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped critScoped(&_critSect);
+
+        // Create thread which will drive the rendering.
+        assert(_hPlayThread == NULL);
+        _hPlayThread = CreateThread(
+                         NULL,
+                         0,
+                         WSAPIRenderThread,
+                         this,
+                         0,
+                         NULL);
+        if (_hPlayThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "failed to create the playout thread");
+            return -1;
+        }
+
+        // Set thread priority to highest possible.
+        SetThreadPriority(_hPlayThread, THREAD_PRIORITY_TIME_CRITICAL);
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hRenderStartedEvent, 1000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "rendering did not start up properly");
+        return -1;
+    }
+
+    _playing = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "rendering audio stream has now started...");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::StopPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped critScoped(&_critSect) ;
+
+        if (_hPlayThread == NULL)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "no rendering stream is active => close down WASAPI only");
+            SAFE_RELEASE(_ptrClientOut);
+            SAFE_RELEASE(_ptrRenderClient);
+            _playIsInitialized = false;
+            _playing = false;
+            return 0;
+        }
+
+        // stop the driving thread...
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "closing down the webrtc_core_audio_render_thread...");
+        SetEvent(_hShutdownRenderEvent);
+    }  // critScoped
+
+    DWORD ret = WaitForSingleObject(_hPlayThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "failed to close down webrtc_core_audio_render_thread");
+        CloseHandle(_hPlayThread);
+        _hPlayThread = NULL;
+        _playIsInitialized = false;
+        _playing = false;
+        return -1;
+    }
+
+    {
+        CriticalSectionScoped critScoped(&_critSect);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "webrtc_core_audio_render_thread is now closed");
+        
+        // to reset this event manually at each time we finish with it, 
+        // in case that the render thread has exited before StopPlayout(),
+        // this event might be caught by the new render thread within same VoE instance.
+        ResetEvent(_hShutdownRenderEvent); 
+
+        SAFE_RELEASE(_ptrClientOut);
+        SAFE_RELEASE(_ptrRenderClient);
+       
+        _playIsInitialized = false;
+        _playing = false;
+
+        CloseHandle(_hPlayThread);
+        _hPlayThread = NULL;
+
+        if (_builtInAecEnabled && _recording)
+        {
+            // The DMO won't provide us captured output data unless we
+            // give it render data to process.
+            //
+            // We still permit the playout to shutdown, and trace a warning.
+            // Otherwise, VoE can get into a state which will never permit
+            // playout to stop properly.
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "Recording should be stopped before playout when using the "
+                "built-in AEC");
+        }
+
+        // Reset the playout delay value.
+        _sndCardPlayDelay = 0;
+    }  // critScoped
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped critScoped(&_critSect);
+    delayMS = static_cast<WebRtc_UWord16>(_sndCardPlayDelay);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped critScoped(&_critSect);
+    delayMS = static_cast<WebRtc_UWord16>(_sndCardRecDelay);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _playBufType = type;
+
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const
+{
+    CriticalSectionScoped lock(&_critSect);
+    type = _playBufType;
+
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        sizeMS = _playBufDelayFixed;
+    }
+    else
+    {
+        // Use same value as for PlayoutDelay
+        sizeMS = static_cast<WebRtc_UWord16>(_sndCardPlayDelay);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::CPULoad(WebRtc_UWord16& load) const
+{
+
+    load = static_cast<WebRtc_UWord16> (100*_avgCPULoad);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutWarning() const
+{
+    return ( _playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::PlayoutError() const
+{
+    return ( _playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingWarning() const
+{
+    return ( _recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsCore::RecordingError() const
+{
+    return ( _recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  [static] WSAPIRenderThread
+// ----------------------------------------------------------------------------
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPIRenderThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoRenderThread();
+}
+
+// ----------------------------------------------------------------------------
+//  [static] WSAPICaptureThread
+// ----------------------------------------------------------------------------
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPICaptureThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoCaptureThread();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::WSAPICaptureThreadPollDMO(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoCaptureThreadPollDMO();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::GetCaptureVolumeThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoGetCaptureVolumeThread();
+}
+
+DWORD WINAPI AudioDeviceWindowsCore::SetCaptureVolumeThread(LPVOID context)
+{
+    return reinterpret_cast<AudioDeviceWindowsCore*>(context)->
+        DoSetCaptureVolumeThread();
+}
+
+DWORD AudioDeviceWindowsCore::DoGetCaptureVolumeThread()
+{
+    HANDLE waitObject = _hShutdownCaptureEvent;
+
+    while (1)
+    {
+        if (AGC())
+        {
+            WebRtc_UWord32 currentMicLevel = 0;
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // This doesn't set the system volume, just stores it.
+                _Lock();
+                if (_ptrAudioBuffer)
+                {
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);
+                }
+                _UnLock();
+            }
+        }
+
+        DWORD waitResult = WaitForSingleObject(waitObject,
+                                               GET_MIC_VOLUME_INTERVAL_MS);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0: // _hShutdownCaptureEvent
+                return 0;
+            case WAIT_TIMEOUT:  // timeout notification
+                break;
+            default:            // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on get volume thread");
+                return -1;
+        }
+    }
+}
+
+DWORD AudioDeviceWindowsCore::DoSetCaptureVolumeThread()
+{
+    HANDLE waitArray[2] = {_hShutdownCaptureEvent, _hSetCaptureVolumeEvent};
+
+    while (1)
+    {
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0:      // _hShutdownCaptureEvent
+                return 0;
+            case WAIT_OBJECT_0 + 1:  // _hSetCaptureVolumeEvent
+                break;
+            default:                 // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on set volume thread");
+                    return -1;
+        }
+
+        _Lock();
+        WebRtc_UWord32 newMicLevel = _newMicLevel;
+        _UnLock();
+
+        if (SetMicrophoneVolume(newMicLevel) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "  the required modification of the microphone volume failed");
+        }
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  DoRenderThread
+// ----------------------------------------------------------------------------
+
+DWORD AudioDeviceWindowsCore::DoRenderThread()
+{
+
+    bool keepPlaying = true;
+    HANDLE waitArray[2] = {_hShutdownRenderEvent, _hRenderSamplesReadyEvent};
+    HRESULT hr = S_OK;
+    HANDLE hMmTask = NULL;
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+    WebRtc_Word32 time(0);
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+          "failed to initialize COM in render thread");
+      return -1;
+    }
+
+    _SetThreadName(-1, "webrtc_core_audio_render_thread");
+
+    // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread priority.
+    //
+    if (_winSupportAvrt)
+    {
+        DWORD taskIndex(0);
+        hMmTask = _PAvSetMmThreadCharacteristicsA("Pro Audio", &taskIndex);
+        if (hMmTask)
+        {
+            if (FALSE == _PAvSetMmThreadPriority(hMmTask, AVRT_PRIORITY_CRITICAL))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to boost play-thread using MMCSS");
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "render thread is now registered with MMCSS (taskIndex=%d)", taskIndex);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to enable MMCSS on render thread (err=%d)", GetLastError());
+            _TraceCOMError(GetLastError());
+        }
+    }
+
+    _Lock();
+
+    // Get size of rendering buffer (length is expressed as the number of audio frames the buffer can hold).
+    // This value is fixed during the rendering session.
+    //
+    UINT32 bufferLength = 0;
+    hr = _ptrClientOut->GetBufferSize(&bufferLength);
+    EXIT_ON_ERROR(hr);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] size of buffer       : %u", bufferLength);
+
+    // Get maximum latency for the current stream (will not change for the lifetime  of the IAudioClient object).
+    //
+    REFERENCE_TIME latency;
+    _ptrClientOut->GetStreamLatency(&latency);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] max stream latency   : %u (%3.2f ms)",
+        (DWORD)latency, (double)(latency/10000.0));
+
+    // Get the length of the periodic interval separating successive processing passes by
+    // the audio engine on the data in the endpoint buffer.
+    //
+    // The period between processing passes by the audio engine is fixed for a particular
+    // audio endpoint device and represents the smallest processing quantum for the audio engine.
+    // This period plus the stream latency between the buffer and endpoint device represents
+    // the minimum possible latency that an audio application can achieve.
+    // Typical value: 100000 <=> 0.01 sec = 10ms.
+    //
+    REFERENCE_TIME devPeriod = 0;
+    REFERENCE_TIME devPeriodMin = 0;
+    _ptrClientOut->GetDevicePeriod(&devPeriod, &devPeriodMin);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] device period        : %u (%3.2f ms)",
+        (DWORD)devPeriod, (double)(devPeriod/10000.0));
+
+    // Derive initial rendering delay.
+    // Example: 10*(960/480) + 15 = 20 + 15 = 35ms
+    //
+    int playout_delay = 10 * (bufferLength / _playBlockSize) +
+        (int)((latency + devPeriod) / 10000);
+    _sndCardPlayDelay = playout_delay;
+    _writtenSamples = 0;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "[REND] initial delay        : %u", playout_delay);
+
+    double endpointBufferSizeMS = 10.0 * ((double)bufferLength / (double)_devicePlayBlockSize);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[REND] endpointBufferSizeMS : %3.2f", endpointBufferSizeMS);
+
+    // Before starting the stream, fill the rendering buffer with silence.
+    //
+    BYTE *pData = NULL;
+    hr = _ptrRenderClient->GetBuffer(bufferLength, &pData);
+    EXIT_ON_ERROR(hr);
+
+    hr = _ptrRenderClient->ReleaseBuffer(bufferLength, AUDCLNT_BUFFERFLAGS_SILENT);
+    EXIT_ON_ERROR(hr);
+
+    _writtenSamples += bufferLength;
+
+    IAudioClock* clock = NULL;
+    hr = _ptrClientOut->GetService(__uuidof(IAudioClock), (void**)&clock);
+    if (FAILED(hr)) {
+      WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                   "failed to get IAudioClock interface from the IAudioClient");
+    }
+
+    // Start up the rendering audio stream.
+    hr = _ptrClientOut->Start();
+    EXIT_ON_ERROR(hr);
+
+    _UnLock();
+
+    // Set event which will ensure that the calling thread modifies the playing state to true.
+    //
+    SetEvent(_hRenderStartedEvent);
+
+    // >> ------------------ THREAD LOOP ------------------
+
+    while (keepPlaying)
+    {
+        // Wait for a render notification event or a shutdown event
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, 500);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0 + 0:     // _hShutdownRenderEvent
+            keepPlaying = false;
+            break;
+        case WAIT_OBJECT_0 + 1:     // _hRenderSamplesReadyEvent
+            break;
+        case WAIT_TIMEOUT:          // timeout notification
+            _ptrClientOut->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "render event timed out after 0.5 seconds");
+            goto Exit;
+        default:                    // unexpected error
+            _ptrClientOut->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "unknown wait termination on render side");
+            goto Exit;
+        }
+
+        while (keepPlaying)
+        {
+            _Lock();
+
+            // Sanity check to ensure that essential states are not modified
+            // during the unlocked period.
+            if (_ptrRenderClient == NULL || _ptrClientOut == NULL)
+            {
+                _UnLock();
+                WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                    "output state has been modified during unlocked period");
+                goto Exit;
+            }
+
+            // Get the number of frames of padding (queued up to play) in the endpoint buffer.
+            UINT32 padding = 0;
+            hr = _ptrClientOut->GetCurrentPadding(&padding);
+            EXIT_ON_ERROR(hr);
+
+            // Derive the amount of available space in the output buffer
+            WebRtc_UWord32 framesAvailable = bufferLength - padding;
+            // WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "#avaliable audio frames = %u", framesAvailable);
+
+            // Do we have 10 ms available in the render buffer?
+            if (framesAvailable < _playBlockSize)
+            {
+                // Not enough space in render buffer to store next render packet.
+                _UnLock();
+                break;
+            }
+
+            // Write n*10ms buffers to the render buffer
+            const WebRtc_UWord32 n10msBuffers = (framesAvailable / _playBlockSize);
+            for (WebRtc_UWord32 n = 0; n < n10msBuffers; n++)
+            {
+                // Get pointer (i.e., grab the buffer) to next space in the shared render buffer.
+                hr = _ptrRenderClient->GetBuffer(_playBlockSize, &pData);
+                EXIT_ON_ERROR(hr);
+
+                QueryPerformanceCounter(&t1);    // measure time: START
+
+                if (_ptrAudioBuffer)
+                {
+                    // Request data to be played out (#bytes = _playBlockSize*_audioFrameSize)
+                    _UnLock();
+                    WebRtc_Word32 nSamples =
+                    _ptrAudioBuffer->RequestPlayoutData(_playBlockSize);
+                    _Lock();
+
+                    if (nSamples == -1) 
+                    {
+                        _UnLock();
+                        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                                     "failed to read data from render client");
+                        goto Exit;
+                    }
+
+                    // Sanity check to ensure that essential states are not modified during the unlocked period
+                    if (_ptrRenderClient == NULL || _ptrClientOut == NULL)
+                    {
+                        _UnLock();
+                        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "output state has been modified during unlocked period");
+                        goto Exit;
+                    }
+                    if (nSamples != _playBlockSize)
+                    {
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "nSamples(%d) != _playBlockSize(%d)", nSamples, _playBlockSize);
+                    }
+
+                    // Get the actual (stored) data
+                    nSamples = _ptrAudioBuffer->GetPlayoutData((WebRtc_Word8*)pData);
+                }
+
+                QueryPerformanceCounter(&t2);    // measure time: STOP
+                time = (int)(t2.QuadPart-t1.QuadPart);
+                _playAcc += time;
+
+                DWORD dwFlags(0);
+                hr = _ptrRenderClient->ReleaseBuffer(_playBlockSize, dwFlags);
+                // See http://msdn.microsoft.com/en-us/library/dd316605(VS.85).aspx
+                // for more details regarding AUDCLNT_E_DEVICE_INVALIDATED.
+                EXIT_ON_ERROR(hr);
+
+                _writtenSamples += _playBlockSize;
+            }
+
+            // Check the current delay on the playout side.
+            if (clock) {
+              UINT64 pos = 0;
+              UINT64 freq = 1;
+              clock->GetPosition(&pos, NULL);
+              clock->GetFrequency(&freq);
+              playout_delay = ROUND((double(_writtenSamples) /
+                  _devicePlaySampleRate - double(pos) / freq) * 1000.0);
+              _sndCardPlayDelay = playout_delay;
+            }
+
+            _UnLock();
+        }
+    }
+
+    // ------------------ THREAD LOOP ------------------ <<
+
+    SleepMs(static_cast<DWORD>(endpointBufferSizeMS+0.5));
+    hr = _ptrClientOut->Stop();
+
+Exit:
+    SAFE_RELEASE(clock);
+
+    if (FAILED(hr))
+    {
+        _UnLock();
+        _ptrClientOut->Stop();
+        _TraceCOMError(hr);
+    }
+
+    if (_winSupportAvrt)
+    {
+        if (NULL != hMmTask)
+        {
+            _PAvRevertMmThreadCharacteristics(hMmTask);
+        }
+    }
+
+    if (keepPlaying)
+    {
+        hr = _ptrClientOut->Stop();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+        }
+        hr = _ptrClientOut->Reset();
+        if (FAILED(hr))
+        {
+            _TraceCOMError(hr);
+        }
+
+        // Trigger callback from module process thread
+        _playError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kPlayoutError message posted: rendering thread has ended pre-maturely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_Rendering thread is now terminated properly");
+    }
+
+    return (DWORD)hr;
+}
+
+DWORD AudioDeviceWindowsCore::InitCaptureThreadPriority()
+{
+    _hMmTask = NULL;
+
+    _SetThreadName(-1, "webrtc_core_audio_capture_thread");
+
+    // Use Multimedia Class Scheduler Service (MMCSS) to boost the thread
+    // priority.
+    if (_winSupportAvrt)
+    {
+        DWORD taskIndex(0);
+        _hMmTask = _PAvSetMmThreadCharacteristicsA("Pro Audio", &taskIndex);
+        if (_hMmTask)
+        {
+            if (!_PAvSetMmThreadPriority(_hMmTask, AVRT_PRIORITY_CRITICAL))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "failed to boost rec-thread using MMCSS");
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "capture thread is now registered with MMCSS (taskIndex=%d)",
+                taskIndex);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "failed to enable MMCSS on capture thread (err=%d)",
+                GetLastError());
+            _TraceCOMError(GetLastError());
+        }
+    }
+
+    return S_OK;
+}
+
+void AudioDeviceWindowsCore::RevertCaptureThreadPriority()
+{
+    if (_winSupportAvrt)
+    {
+        if (NULL != _hMmTask)
+        {
+            _PAvRevertMmThreadCharacteristics(_hMmTask);
+        }
+    }
+
+    _hMmTask = NULL;
+}
+
+DWORD AudioDeviceWindowsCore::DoCaptureThreadPollDMO()
+{
+    assert(_mediaBuffer != NULL);
+    bool keepRecording = true;
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "failed to initialize COM in polling DMO thread");
+      return -1;
+    }
+
+    HRESULT hr = InitCaptureThreadPriority();
+    if (FAILED(hr))
+    {
+        return hr;
+    }
+
+    // Set event which will ensure that the calling thread modifies the
+    // recording state to true.
+    SetEvent(_hCaptureStartedEvent);
+
+    // >> ---------------------------- THREAD LOOP ----------------------------
+    while (keepRecording)
+    {
+        // Poll the DMO every 5 ms.
+        // (The same interval used in the Wave implementation.)
+        DWORD waitResult = WaitForSingleObject(_hShutdownCaptureEvent, 5);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0:         // _hShutdownCaptureEvent
+            keepRecording = false;
+            break;
+        case WAIT_TIMEOUT:          // timeout notification
+            break;
+        default:                    // unexpected error
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "Unknown wait termination on capture side");
+            hr = -1; // To signal an error callback.
+            keepRecording = false;
+            break;
+        }
+
+        while (keepRecording)
+        {
+            CriticalSectionScoped critScoped(&_critSect);
+
+            DWORD dwStatus = 0;
+            {
+                DMO_OUTPUT_DATA_BUFFER dmoBuffer = {0};
+                dmoBuffer.pBuffer = _mediaBuffer;
+                dmoBuffer.pBuffer->AddRef();
+
+                // Poll the DMO for AEC processed capture data. The DMO will
+                // copy available data to |dmoBuffer|, and should only return
+                // 10 ms frames. The value of |dwStatus| should be ignored.
+                hr = _dmo->ProcessOutput(0, 1, &dmoBuffer, &dwStatus);
+                SAFE_RELEASE(dmoBuffer.pBuffer);
+                dwStatus = dmoBuffer.dwStatus;
+            }
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            ULONG bytesProduced = 0;
+            BYTE* data;
+            // Get a pointer to the data buffer. This should be valid until
+            // the next call to ProcessOutput.
+            hr = _mediaBuffer->GetBufferAndLength(&data, &bytesProduced);
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            // TODO(andrew): handle AGC.
+
+            if (bytesProduced > 0)
+            {
+                const int kSamplesProduced = bytesProduced / _recAudioFrameSize;
+                // TODO(andrew): verify that this is always satisfied. It might
+                // be that ProcessOutput will try to return more than 10 ms if
+                // we fail to call it frequently enough.
+                assert(kSamplesProduced == _recBlockSize);
+                assert(sizeof(BYTE) == sizeof(WebRtc_Word8));
+                _ptrAudioBuffer->SetRecordedBuffer(
+                    reinterpret_cast<WebRtc_Word8*>(data),
+                    kSamplesProduced);
+                _ptrAudioBuffer->SetVQEData(0, 0, 0);
+
+                _UnLock();  // Release lock while making the callback.
+                _ptrAudioBuffer->DeliverRecordedData();
+                _Lock();
+            }
+
+            // Reset length to indicate buffer availability.
+            hr = _mediaBuffer->SetLength(0);
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+                keepRecording = false;
+                assert(false);
+                break;
+            }
+
+            if (!(dwStatus & DMO_OUTPUT_DATA_BUFFERF_INCOMPLETE))
+            {
+                // The DMO cannot currently produce more data. This is the
+                // normal case; otherwise it means the DMO had more than 10 ms
+                // of data available and ProcessOutput should be called again.
+                break;
+            }
+        }
+    }
+    // ---------------------------- THREAD LOOP ---------------------------- <<
+
+    RevertCaptureThreadPriority();
+
+    if (FAILED(hr))
+    {
+        // Trigger callback from module process thread
+        _recError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id,
+            "kRecordingError message posted: capturing thread has ended "
+            "prematurely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+            "Capturing thread is now terminated properly");
+    }
+
+    return hr;
+}
+
+
+// ----------------------------------------------------------------------------
+//  DoCaptureThread
+// ----------------------------------------------------------------------------
+
+DWORD AudioDeviceWindowsCore::DoCaptureThread()
+{
+
+    bool keepRecording = true;
+    HANDLE waitArray[2] = {_hShutdownCaptureEvent, _hCaptureSamplesReadyEvent};
+    HRESULT hr = S_OK;
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+    WebRtc_Word32 time(0);
+
+    BYTE* syncBuffer = NULL;
+    UINT32 syncBufIndex = 0;
+
+    _readSamples = 0;
+
+    // Initialize COM as MTA in this thread.
+    ScopedCOMInitializer comInit(ScopedCOMInitializer::kMTA);
+    if (!comInit.succeeded()) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "failed to initialize COM in capture thread");
+      return -1;
+    }
+
+    hr = InitCaptureThreadPriority();
+    if (FAILED(hr))
+    {
+        return hr;
+    }
+
+    _Lock();
+
+    // Get size of capturing buffer (length is expressed as the number of audio frames the buffer can hold).
+    // This value is fixed during the capturing session.
+    //
+    UINT32 bufferLength = 0;
+    hr = _ptrClientIn->GetBufferSize(&bufferLength);
+    EXIT_ON_ERROR(hr);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] size of buffer       : %u", bufferLength);
+
+    // Allocate memory for sync buffer.
+    // It is used for compensation between native 44.1 and internal 44.0 and
+    // for cases when the capture buffer is larger than 10ms.
+    //
+    const UINT32 syncBufferSize = 2*(bufferLength * _recAudioFrameSize);
+    syncBuffer = new BYTE[syncBufferSize];
+    if (syncBuffer == NULL)
+    {
+        return E_POINTER;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] size of sync buffer  : %u [bytes]", syncBufferSize);
+
+    // Get maximum latency for the current stream (will not change for the lifetime of the IAudioClient object).
+    //
+    REFERENCE_TIME latency;
+    _ptrClientIn->GetStreamLatency(&latency);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] max stream latency   : %u (%3.2f ms)",
+        (DWORD)latency, (double)(latency / 10000.0));
+
+    // Get the length of the periodic interval separating successive processing passes by
+    // the audio engine on the data in the endpoint buffer.
+    //
+    REFERENCE_TIME devPeriod = 0;
+    REFERENCE_TIME devPeriodMin = 0;
+    _ptrClientIn->GetDevicePeriod(&devPeriod, &devPeriodMin);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] device period        : %u (%3.2f ms)",
+        (DWORD)devPeriod, (double)(devPeriod / 10000.0));
+
+    double extraDelayMS = (double)((latency + devPeriod) / 10000.0);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] extraDelayMS         : %3.2f", extraDelayMS);
+
+    double endpointBufferSizeMS = 10.0 * ((double)bufferLength / (double)_recBlockSize);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[CAPT] endpointBufferSizeMS : %3.2f", endpointBufferSizeMS);
+
+    // Start up the capturing stream.
+    //
+    hr = _ptrClientIn->Start();
+    EXIT_ON_ERROR(hr);
+
+    _UnLock();
+
+    // Set event which will ensure that the calling thread modifies the recording state to true.
+    //
+    SetEvent(_hCaptureStartedEvent);
+
+    // >> ---------------------------- THREAD LOOP ----------------------------
+
+    while (keepRecording)
+    {
+        // Wait for a capture notification event or a shutdown event
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, 500);
+        switch (waitResult)
+        {
+        case WAIT_OBJECT_0 + 0:        // _hShutdownCaptureEvent
+            keepRecording = false;
+            break;
+        case WAIT_OBJECT_0 + 1:        // _hCaptureSamplesReadyEvent
+            break;
+        case WAIT_TIMEOUT:            // timeout notification
+            _ptrClientIn->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "capture event timed out after 0.5 seconds");
+            goto Exit;
+        default:                    // unexpected error
+            _ptrClientIn->Stop();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "unknown wait termination on capture side");
+            goto Exit;
+        }
+
+        while (keepRecording)
+        {
+            BYTE *pData = 0;
+            UINT32 framesAvailable = 0;
+            DWORD flags = 0;
+            UINT64 recTime = 0;
+            UINT64 recPos = 0;
+
+            _Lock();
+
+            // Sanity check to ensure that essential states are not modified
+            // during the unlocked period.
+            if (_ptrCaptureClient == NULL || _ptrClientIn == NULL)
+            {
+                _UnLock();
+                WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                    "input state has been modified during unlocked period");
+                goto Exit;
+            }
+
+            //  Find out how much capture data is available
+            //
+            hr = _ptrCaptureClient->GetBuffer(&pData,           // packet which is ready to be read by used
+                                              &framesAvailable, // #frames in the captured packet (can be zero)
+                                              &flags,           // support flags (check)
+                                              &recPos,          // device position of first audio frame in data packet
+                                              &recTime);        // value of performance counter at the time of recording the first audio frame
+
+            if (SUCCEEDED(hr))
+            {
+                if (AUDCLNT_S_BUFFER_EMPTY == hr)
+                {
+                    // Buffer was empty => start waiting for a new capture notification event
+                    _UnLock();
+                    break;
+                }
+
+                if (flags & AUDCLNT_BUFFERFLAGS_SILENT)
+                {
+                    // Treat all of the data in the packet as silence and ignore the actual data values.
+                    WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "AUDCLNT_BUFFERFLAGS_SILENT");
+                    pData = NULL;
+                }
+
+                assert(framesAvailable != 0);
+
+                if (pData)
+                {
+                    CopyMemory(&syncBuffer[syncBufIndex*_recAudioFrameSize], pData, framesAvailable*_recAudioFrameSize);
+                }
+                else
+                {
+                    ZeroMemory(&syncBuffer[syncBufIndex*_recAudioFrameSize], framesAvailable*_recAudioFrameSize);
+                }
+                assert(syncBufferSize >= (syncBufIndex*_recAudioFrameSize)+framesAvailable*_recAudioFrameSize);
+
+                // Release the capture buffer
+                //
+                hr = _ptrCaptureClient->ReleaseBuffer(framesAvailable);
+                EXIT_ON_ERROR(hr);
+
+                _readSamples += framesAvailable;
+                syncBufIndex += framesAvailable;
+
+                QueryPerformanceCounter(&t1);
+
+                // Get the current recording and playout delay.
+                WebRtc_UWord32 sndCardRecDelay = (WebRtc_UWord32)
+                    (((((UINT64)t1.QuadPart * _perfCounterFactor) - recTime)
+                        / 10000) + (10*syncBufIndex) / _recBlockSize - 10);
+                WebRtc_UWord32 sndCardPlayDelay =
+                    static_cast<WebRtc_UWord32>(_sndCardPlayDelay);
+
+                _sndCardRecDelay = sndCardRecDelay;
+
+                while (syncBufIndex >= _recBlockSize)
+                {
+                    if (_ptrAudioBuffer)
+                    {
+                        _ptrAudioBuffer->SetRecordedBuffer((const WebRtc_Word8*)syncBuffer, _recBlockSize);
+
+                        _driftAccumulator += _sampleDriftAt48kHz;
+                        const WebRtc_Word32 clockDrift =
+                            static_cast<WebRtc_Word32>(_driftAccumulator);
+                        _driftAccumulator -= clockDrift;
+
+                        _ptrAudioBuffer->SetVQEData(sndCardPlayDelay,
+                                                    sndCardRecDelay,
+                                                    clockDrift);
+
+                        QueryPerformanceCounter(&t1);    // measure time: START
+
+                        _UnLock();  // release lock while making the callback
+                        _ptrAudioBuffer->DeliverRecordedData();
+                        _Lock();    // restore the lock
+
+                        QueryPerformanceCounter(&t2);    // measure time: STOP
+
+                        // Measure "average CPU load".
+                        // Basically what we do here is to measure how many percent of our 10ms period
+                        // is used for encoding and decoding. This value shuld be used as a warning indicator
+                        // only and not seen as an absolute value. Running at ~100% will lead to bad QoS.
+                        time = (int)(t2.QuadPart - t1.QuadPart);
+                        _avgCPULoad = (float)(_avgCPULoad*.99 + (time + _playAcc) / (double)(_perfCounterFreq.QuadPart));
+                        _playAcc = 0;
+
+                        // Sanity check to ensure that essential states are not modified during the unlocked period
+                        if (_ptrCaptureClient == NULL || _ptrClientIn == NULL)
+                        {
+                            _UnLock();
+                            WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id, "input state has been modified during unlocked period");
+                            goto Exit;
+                        }
+                    }
+
+                    // store remaining data which was not able to deliver as 10ms segment
+                    MoveMemory(&syncBuffer[0], &syncBuffer[_recBlockSize*_recAudioFrameSize], (syncBufIndex-_recBlockSize)*_recAudioFrameSize);
+                    syncBufIndex -= _recBlockSize;
+                    sndCardRecDelay -= 10;
+                }
+
+                if (_AGC)
+                {
+                    WebRtc_UWord32 newMicLevel = _ptrAudioBuffer->NewMicLevel();
+                    if (newMicLevel != 0)
+                    {
+                        // The VQE will only deliver non-zero microphone levels when a change is needed.
+                        // Set this new mic level (received from the observer as return value in the callback).
+                        WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AGC change of volume: new=%u",  newMicLevel);
+                        // We store this outside of the audio buffer to avoid
+                        // having it overwritten by the getter thread.
+                        _newMicLevel = newMicLevel;
+                        SetEvent(_hSetCaptureVolumeEvent);
+                    }
+                }
+            }
+            else
+            {
+                // If GetBuffer returns AUDCLNT_E_BUFFER_ERROR, the thread consuming the audio samples
+                // must wait for the next processing pass. The client might benefit from keeping a count
+                // of the failed GetBuffer calls. If GetBuffer returns this error repeatedly, the client
+                // can start a new processing loop after shutting down the current client by calling
+                // IAudioClient::Stop, IAudioClient::Reset, and releasing the audio client.
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                    "IAudioCaptureClient::GetBuffer returned AUDCLNT_E_BUFFER_ERROR, hr = 0x%08X",  hr);
+                goto Exit;
+            }
+
+            _UnLock();
+        }
+    }
+
+    // ---------------------------- THREAD LOOP ---------------------------- <<
+
+    hr = _ptrClientIn->Stop();
+
+Exit:
+    if (FAILED(hr))
+    {
+        _UnLock();
+        _ptrClientIn->Stop();
+        _TraceCOMError(hr);
+    }
+
+    RevertCaptureThreadPriority();
+
+    if (keepRecording)
+    {
+        if (_ptrClientIn != NULL)
+        {
+            hr = _ptrClientIn->Stop();
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+            }
+            hr = _ptrClientIn->Reset();
+            if (FAILED(hr))
+            {
+                _TraceCOMError(hr);
+            }
+        }
+
+        // Trigger callback from module process thread
+        _recError = 1;
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: capturing thread has ended pre-maturely");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_Capturing thread is now terminated properly");
+    }
+
+    SAFE_RELEASE(_ptrClientIn);
+    SAFE_RELEASE(_ptrCaptureClient);
+
+    if (syncBuffer)
+    {
+        delete [] syncBuffer;
+    }
+
+    return (DWORD)hr;
+}
+
+int32_t AudioDeviceWindowsCore::EnableBuiltInAEC(bool enable)
+{
+
+    if (_recIsInitialized)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Attempt to set Windows AEC with recording already initialized");
+        return -1;
+    }
+
+    if (_dmo == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Built-in AEC DMO was not initialized properly at create time");
+        return -1;
+    }
+
+    _builtInAecEnabled = enable;
+    return 0;
+}
+
+bool AudioDeviceWindowsCore::BuiltInAECIsEnabled() const
+{
+    return _builtInAecEnabled;
+}
+
+int AudioDeviceWindowsCore::SetDMOProperties()
+{
+    HRESULT hr = S_OK;
+    assert(_dmo != NULL);
+
+    scoped_refptr<IPropertyStore> ps;
+    {
+        IPropertyStore* ptrPS = NULL;
+        hr = _dmo->QueryInterface(IID_IPropertyStore,
+                                  reinterpret_cast<void**>(&ptrPS));
+        if (FAILED(hr) || ptrPS == NULL)
+        {
+            _TraceCOMError(hr);
+            return -1;
+        }
+        ps = ptrPS;
+        SAFE_RELEASE(ptrPS);
+    }
+
+    // Set the AEC system mode.
+    // SINGLE_CHANNEL_AEC - AEC processing only.
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_SYSTEM_MODE,
+                        SINGLE_CHANNEL_AEC))
+    {
+        return -1;
+    }
+
+    // Set the AEC source mode.
+    // VARIANT_TRUE - Source mode (we poll the AEC for captured data).
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_DMO_SOURCE_MODE,
+                        VARIANT_TRUE) == -1)
+    {
+        return -1;
+    }
+
+    // Enable the feature mode.
+    // This lets us override all the default processing settings below.
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_FEATURE_MODE,
+                        VARIANT_TRUE) == -1)
+    {
+        return -1;
+    }
+
+    // Disable analog AGC (default enabled).
+    if (SetBoolProperty(ps,
+                        MFPKEY_WMAAECMA_MIC_GAIN_BOUNDER,
+                        VARIANT_FALSE) == -1)
+    {
+        return -1;
+    }
+
+    // Disable noise suppression (default enabled).
+    // 0 - Disabled, 1 - Enabled
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_FEATR_NS,
+                        0) == -1)
+    {
+        return -1;
+    }
+
+    // Relevant parameters to leave at default settings:
+    // MFPKEY_WMAAECMA_FEATR_AGC - Digital AGC (disabled).
+    // MFPKEY_WMAAECMA_FEATR_CENTER_CLIP - AEC center clipping (enabled).
+    // MFPKEY_WMAAECMA_FEATR_ECHO_LENGTH - Filter length (256 ms).
+    //   TODO(andrew): investigate decresing the length to 128 ms.
+    // MFPKEY_WMAAECMA_FEATR_FRAME_SIZE - Frame size (0).
+    //   0 is automatic; defaults to 160 samples (or 10 ms frames at the
+    //   selected 16 kHz) as long as mic array processing is disabled.
+    // MFPKEY_WMAAECMA_FEATR_NOISE_FILL - Comfort noise (enabled).
+    // MFPKEY_WMAAECMA_FEATR_VAD - VAD (disabled).
+
+    // Set the devices selected by VoE. If using a default device, we need to
+    // search for the device index.
+    int inDevIndex = _inputDeviceIndex;
+    int outDevIndex = _outputDeviceIndex;
+    if (!_usingInputDeviceIndex)
+    {
+        ERole role = eCommunications;
+        if (_inputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            role = eConsole;
+        }
+
+        if (_GetDefaultDeviceIndex(eCapture, role, &inDevIndex) == -1)
+        {
+            return -1;
+        }
+    }
+
+    if (!_usingOutputDeviceIndex)
+    {
+        ERole role = eCommunications;
+        if (_outputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            role = eConsole;
+        }
+
+        if (_GetDefaultDeviceIndex(eRender, role, &outDevIndex) == -1)
+        {
+            return -1;
+        }
+    }
+
+    DWORD devIndex = static_cast<uint32_t>(outDevIndex << 16) +
+                     static_cast<uint32_t>(0x0000ffff & inDevIndex);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "Capture device index: %d, render device index: %d",
+        inDevIndex, outDevIndex);
+    if (SetVtI4Property(ps,
+                        MFPKEY_WMAAECMA_DEVICE_INDEXES,
+                        devIndex) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int AudioDeviceWindowsCore::SetBoolProperty(IPropertyStore* ptrPS,
+                                            REFPROPERTYKEY key,
+                                            VARIANT_BOOL value)
+{
+    PROPVARIANT pv;
+    PropVariantInit(&pv);
+    pv.vt = VT_BOOL;
+    pv.boolVal = value;
+    HRESULT hr = ptrPS->SetValue(key, pv);
+    PropVariantClear(&pv);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+    return 0;
+}
+
+int AudioDeviceWindowsCore::SetVtI4Property(IPropertyStore* ptrPS,
+                                            REFPROPERTYKEY key,
+                                            LONG value)
+{
+    PROPVARIANT pv;
+    PropVariantInit(&pv);
+    pv.vt = VT_I4;
+    pv.lVal = value;
+    HRESULT hr = ptrPS->SetValue(key, pv);
+    PropVariantClear(&pv);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _RefreshDeviceList
+//
+//  Creates a new list of endpoint rendering or capture devices after
+//  deleting any previously created (and possibly out-of-date) list of
+//  such devices.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_RefreshDeviceList(EDataFlow dir)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDeviceCollection *pCollection = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(_ptrEnumerator != NULL);
+
+    // Create a fresh list of devices using the specified direction
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                           dir,
+                           DEVICE_STATE_ACTIVE,
+                           &pCollection);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    if (dir == eRender)
+    {
+        SAFE_RELEASE(_ptrRenderCollection);
+        _ptrRenderCollection = pCollection;
+    }
+    else
+    {
+        SAFE_RELEASE(_ptrCaptureCollection);
+        _ptrCaptureCollection = pCollection;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _DeviceListCount
+//
+//  Gets a count of the endpoint rendering or capture devices in the
+//  current list of such devices.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsCore::_DeviceListCount(EDataFlow dir)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    UINT count = 0;
+
+    assert(eRender == dir || eCapture == dir);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->GetCount(&count);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->GetCount(&count);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    return static_cast<WebRtc_Word16> (count);
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDeviceName
+//
+//  Gets the friendly name of an endpoint rendering or capture device
+//  from the current list of such devices. The caller uses an index
+//  into the list to identify the device.
+//
+//  Uses: _ptrRenderCollection or _ptrCaptureCollection which is updated
+//  in _RefreshDeviceList().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDeviceName(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->Item(index, &pDevice);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->Item(index, &pDevice);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceName(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDeviceName
+//
+//  Gets the friendly name of an endpoint rendering or capture device
+//  given a specified device role.
+//
+//  Uses: _ptrEnumerator
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceName(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(role == eConsole || role == eCommunications);
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           dir,
+                           role,
+                           &pDevice);
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceName(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDeviceID
+//
+//  Gets the unique ID string of an endpoint rendering or capture device
+//  from the current list of such devices. The caller uses an index
+//  into the list to identify the device.
+//
+//  Uses: _ptrRenderCollection or _ptrCaptureCollection which is updated
+//  in _RefreshDeviceList().
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDeviceID(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+
+    if (eRender == dir && NULL != _ptrRenderCollection)
+    {
+        hr = _ptrRenderCollection->Item(index, &pDevice);
+    }
+    else if (NULL != _ptrCaptureCollection)
+    {
+        hr = _ptrCaptureCollection->Item(index, &pDevice);
+    }
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceID(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDeviceID
+//
+//  Gets the uniqe device ID of an endpoint rendering or capture device
+//  given a specified device role.
+//
+//  Uses: _ptrEnumerator
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceID(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    IMMDevice *pDevice = NULL;
+
+    assert(dir == eRender || dir == eCapture);
+    assert(role == eConsole || role == eCommunications);
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                           dir,
+                           role,
+                           &pDevice);
+
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 res = _GetDeviceID(pDevice, szBuffer, bufferLen);
+    SAFE_RELEASE(pDevice);
+    return res;
+}
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDeviceIndex(EDataFlow dir,
+                                                             ERole role,
+                                                             int* index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr = S_OK;
+    WCHAR szDefaultDeviceID[MAX_PATH] = {0};
+    WCHAR szDeviceID[MAX_PATH] = {0};
+
+    const size_t kDeviceIDLength = sizeof(szDeviceID)/sizeof(szDeviceID[0]);
+    assert(kDeviceIDLength ==
+        sizeof(szDefaultDeviceID) / sizeof(szDefaultDeviceID[0]));
+
+    if (_GetDefaultDeviceID(dir,
+                            role,
+                            szDefaultDeviceID,
+                            kDeviceIDLength) == -1)
+    {
+        return -1;
+    }
+
+    IMMDeviceCollection* collection = _ptrCaptureCollection;
+    if (dir == eRender)
+    {
+        collection = _ptrRenderCollection;
+    }
+
+    if (!collection)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Device collection not valid");
+        return -1;
+    }
+
+    UINT count = 0;
+    hr = collection->GetCount(&count);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    *index = -1;
+    for (UINT i = 0; i < count; i++)
+    {
+        memset(szDeviceID, 0, sizeof(szDeviceID));
+        scoped_refptr<IMMDevice> device;
+        {
+            IMMDevice* ptrDevice = NULL;
+            hr = collection->Item(i, &ptrDevice);
+            if (FAILED(hr) || ptrDevice == NULL)
+            {
+                _TraceCOMError(hr);
+                return -1;
+            }
+            device = ptrDevice;
+            SAFE_RELEASE(ptrDevice);
+        }
+
+        if (_GetDeviceID(device, szDeviceID, kDeviceIDLength) == -1)
+        {
+           return -1;
+        }
+
+        if (wcsncmp(szDefaultDeviceID, szDeviceID, kDeviceIDLength) == 0)
+        {
+            // Found a match.
+            *index = i;
+            break;
+        }
+
+    }
+
+    if (*index == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "Unable to find collection index for default device");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDeviceName(IMMDevice* pDevice,
+                                                     LPWSTR pszBuffer,
+                                                     int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    static const WCHAR szDefault[] = L"<Device not available>";
+
+    HRESULT hr = E_FAIL;
+    IPropertyStore *pProps = NULL;
+    PROPVARIANT varName;
+
+    assert(pszBuffer != NULL);
+    assert(bufferLen > 0);
+
+    if (pDevice != NULL)
+    {
+        hr = pDevice->OpenPropertyStore(STGM_READ, &pProps);
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "IMMDevice::OpenPropertyStore failed, hr = 0x%08X", hr);
+        }
+    }
+
+    // Initialize container for property value.
+    PropVariantInit(&varName);
+
+    if (SUCCEEDED(hr))
+    {
+        // Get the endpoint device's friendly-name property.
+        hr = pProps->GetValue(PKEY_Device_FriendlyName, &varName);
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+                "IPropertyStore::GetValue failed, hr = 0x%08X", hr);
+        }
+    }
+
+    if ((SUCCEEDED(hr)) && (VT_EMPTY == varName.vt))
+    {
+        hr = E_FAIL;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "IPropertyStore::GetValue returned no value, hr = 0x%08X", hr);
+    }
+
+    if ((SUCCEEDED(hr)) && (VT_LPWSTR != varName.vt))
+    {
+        // The returned value is not a wide null terminated string.
+        hr = E_UNEXPECTED;
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "IPropertyStore::GetValue returned unexpected type, hr = 0x%08X", hr);
+    }
+
+    if (SUCCEEDED(hr) && (varName.pwszVal != NULL))
+    {
+        // Copy the valid device name to the provided ouput buffer.
+        wcsncpy_s(pszBuffer, bufferLen, varName.pwszVal, _TRUNCATE);
+    }
+    else
+    {
+        // Failed to find the device name.
+        wcsncpy_s(pszBuffer, bufferLen, szDefault, _TRUNCATE);
+    }
+
+    PropVariantClear(&varName);
+    SAFE_RELEASE(pProps);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDeviceID
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    static const WCHAR szDefault[] = L"<Device not available>";
+
+    HRESULT hr = E_FAIL;
+    LPWSTR pwszID = NULL;
+
+    assert(pszBuffer != NULL);
+    assert(bufferLen > 0);
+
+    if (pDevice != NULL)
+    {
+        hr = pDevice->GetId(&pwszID);
+    }
+
+    if (hr == S_OK)
+    {
+        // Found the device ID.
+        wcsncpy_s(pszBuffer, bufferLen, pwszID, _TRUNCATE);
+    }
+    else
+    {
+        // Failed to find the device ID.
+        wcsncpy_s(pszBuffer, bufferLen, szDefault, _TRUNCATE);
+    }
+
+    CoTaskMemFree(pwszID);
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetDefaultDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetDefaultDevice(EDataFlow dir, ERole role, IMMDevice** ppDevice)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    hr = _ptrEnumerator->GetDefaultAudioEndpoint(
+                                   dir,
+                                   role,
+                                   ppDevice);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _GetListDevice
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_GetListDevice(EDataFlow dir, int index, IMMDevice** ppDevice)
+{
+    HRESULT hr(S_OK);
+
+    assert(_ptrEnumerator != NULL);
+
+    IMMDeviceCollection *pCollection = NULL;
+
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                               dir,
+                               DEVICE_STATE_ACTIVE,        // only active endpoints are OK
+                               &pCollection);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    hr = pCollection->Item(
+                        index,
+                        ppDevice);
+    if (FAILED(hr))
+    {
+        _TraceCOMError(hr);
+        SAFE_RELEASE(pCollection);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  _EnumerateEndpointDevicesAll
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsCore::_EnumerateEndpointDevicesAll(EDataFlow dataFlow) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    assert(_ptrEnumerator != NULL);
+
+    HRESULT hr = S_OK;
+    IMMDeviceCollection *pCollection = NULL;
+
+    // Generate a collection of audio endpoint devices in the system.
+    // Get states for *all* endpoint devices.
+    // Output: IMMDeviceCollection interface.
+    hr = _ptrEnumerator->EnumAudioEndpoints(
+                                 dataFlow,            // data-flow direction (input parameter)
+                                 DEVICE_STATE_ACTIVE | DEVICE_STATE_DISABLED | DEVICE_STATE_NOTPRESENT | DEVICE_STATE_UNPLUGGED,
+                                 &pCollection);        // release interface when done
+
+    EXIT_ON_ERROR(hr);
+
+    // use the IMMDeviceCollection interface...
+
+    UINT count;
+    IMMDevice *pEndpoint = NULL;
+    IPropertyStore *pProps = NULL;
+    IAudioEndpointVolume* pEndpointVolume = NULL;
+    LPWSTR pwszID = NULL;
+
+    // Retrieve a count of the devices in the device collection.
+    hr = pCollection->GetCount(&count);
+    EXIT_ON_ERROR(hr);
+    if (dataFlow == eRender)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#rendering endpoint devices (counting all): %u", count);
+    else if (dataFlow == eCapture)
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#capturing endpoint devices (counting all): %u", count);
+
+    if (count == 0)
+    {
+        return 0;
+    }
+
+    // Each loop prints the name of an endpoint device.
+    for (ULONG i = 0; i < count; i++)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Endpoint %d:", i);
+
+        // Get pointer to endpoint number i.
+        // Output: IMMDevice interface.
+        hr = pCollection->Item(
+                            i,
+                            &pEndpoint);
+        EXIT_ON_ERROR(hr);
+
+        // use the IMMDevice interface of the specified endpoint device...
+
+        // Get the endpoint ID string (uniquely identifies the device among all audio endpoint devices)
+        hr = pEndpoint->GetId(&pwszID);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "ID string    : %S", pwszID);
+
+        // Retrieve an interface to the device's property store.
+        // Output: IPropertyStore interface.
+        hr = pEndpoint->OpenPropertyStore(
+                          STGM_READ,
+                          &pProps);
+        EXIT_ON_ERROR(hr);
+
+        // use the IPropertyStore interface...
+
+        PROPVARIANT varName;
+        // Initialize container for property value.
+        PropVariantInit(&varName);
+
+        // Get the endpoint's friendly-name property.
+        // Example: "Speakers (Realtek High Definition Audio)"
+        hr = pProps->GetValue(
+                       PKEY_Device_FriendlyName,
+                       &varName);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "friendly name: \"%S\"", varName.pwszVal);
+
+        // Get the endpoint's current device state
+        DWORD dwState;
+        hr = pEndpoint->GetState(&dwState);
+        EXIT_ON_ERROR(hr);
+        if (dwState & DEVICE_STATE_ACTIVE)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : *ACTIVE*", dwState);
+        if (dwState & DEVICE_STATE_DISABLED)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : DISABLED", dwState);
+        if (dwState & DEVICE_STATE_NOTPRESENT)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : NOTPRESENT", dwState);
+        if (dwState & DEVICE_STATE_UNPLUGGED)
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "state (0x%x)  : UNPLUGGED", dwState);
+
+        // Check the hardware volume capabilities.
+        DWORD dwHwSupportMask = 0;
+        hr = pEndpoint->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL,
+                               NULL, (void**)&pEndpointVolume);
+        EXIT_ON_ERROR(hr);
+        hr = pEndpointVolume->QueryHardwareSupport(&dwHwSupportMask);
+        EXIT_ON_ERROR(hr);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_VOLUME)
+            // The audio endpoint device supports a hardware volume control
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_VOLUME", dwHwSupportMask);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_MUTE)
+            // The audio endpoint device supports a hardware mute control
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_MUTE", dwHwSupportMask);
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_METER)
+            // The audio endpoint device supports a hardware peak meter
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "hwmask (0x%x) : HARDWARE_SUPPORT_METER", dwHwSupportMask);
+
+        // Check the channel count (#channels in the audio stream that enters or leaves the audio endpoint device)
+        UINT nChannelCount(0);
+        hr = pEndpointVolume->GetChannelCount(
+                                &nChannelCount);
+        EXIT_ON_ERROR(hr);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#channels    : %u", nChannelCount);
+
+        if (dwHwSupportMask & ENDPOINT_HARDWARE_SUPPORT_VOLUME)
+        {
+            // Get the volume range.
+            float fLevelMinDB(0.0);
+            float fLevelMaxDB(0.0);
+            float fVolumeIncrementDB(0.0);
+            hr = pEndpointVolume->GetVolumeRange(
+                                    &fLevelMinDB,
+                                    &fLevelMaxDB,
+                                    &fVolumeIncrementDB);
+            EXIT_ON_ERROR(hr);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "volume range : %4.2f (min), %4.2f (max), %4.2f (inc) [dB]",
+                fLevelMinDB, fLevelMaxDB, fVolumeIncrementDB);
+
+            // The volume range from vmin = fLevelMinDB to vmax = fLevelMaxDB is divided
+            // into n uniform intervals of size vinc = fVolumeIncrementDB, where
+            // n = (vmax ?vmin) / vinc.
+            // The values vmin, vmax, and vinc are measured in decibels. The client can set
+            // the volume level to one of n + 1 discrete values in the range from vmin to vmax.
+            int n = (int)((fLevelMaxDB-fLevelMinDB)/fVolumeIncrementDB);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#intervals   : %d", n);
+
+            // Get information about the current step in the volume range.
+            // This method represents the volume level of the audio stream that enters or leaves
+            // the audio endpoint device as an index or "step" in a range of discrete volume levels.
+            // Output value nStepCount is the number of steps in the range. Output value nStep
+            // is the step index of the current volume level. If the number of steps is n = nStepCount,
+            // then step index nStep can assume values from 0 (minimum volume) to n ?1 (maximum volume).
+            UINT nStep(0);
+            UINT nStepCount(0);
+            hr = pEndpointVolume->GetVolumeStepInfo(
+                                    &nStep,
+                                    &nStepCount);
+            EXIT_ON_ERROR(hr);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "volume steps : %d (nStep), %d (nStepCount)", nStep, nStepCount);
+        }
+
+        CoTaskMemFree(pwszID);
+        pwszID = NULL;
+        PropVariantClear(&varName);
+        SAFE_RELEASE(pProps);
+        SAFE_RELEASE(pEndpoint);
+        SAFE_RELEASE(pEndpointVolume);
+    }
+    SAFE_RELEASE(pCollection);
+    return 0;
+
+Exit:
+    _TraceCOMError(hr);
+    CoTaskMemFree(pwszID);
+    pwszID = NULL;
+    SAFE_RELEASE(pCollection);
+    SAFE_RELEASE(pEndpoint);
+    SAFE_RELEASE(pEndpointVolume);
+    SAFE_RELEASE(pProps);
+    return -1;
+}
+
+// ----------------------------------------------------------------------------
+//  _TraceCOMError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_TraceCOMError(HRESULT hr) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR errorText[MAXERRORLENGTH];
+
+    const DWORD dwFlags = FORMAT_MESSAGE_FROM_SYSTEM |
+                          FORMAT_MESSAGE_IGNORE_INSERTS;
+    const DWORD dwLangID = MAKELANGID(LANG_ENGLISH, SUBLANG_ENGLISH_US);
+    
+    // Gets the system's human readable message string for this HRESULT.
+    // All error message in English by default.
+    DWORD messageLength = ::FormatMessageW(dwFlags, 
+                                           0,
+                                           hr,
+                                           dwLangID,
+                                           errorText,  
+                                           MAXERRORLENGTH,  
+                                           NULL);
+
+    assert(messageLength <= MAXERRORLENGTH);
+
+    // Trims tailing white space (FormatMessage() leaves a trailing cr-lf.).
+    for (; messageLength && ::isspace(errorText[messageLength - 1]);
+         --messageLength)
+    {
+        errorText[messageLength - 1] = '\0';
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+        "Core Audio method failed (hr=0x%x)", hr);
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    StringCchCat(buf, MAXERRORLENGTH, errorText);
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  _SetThreadName
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_SetThreadName(DWORD dwThreadID, LPCSTR szThreadName)
+{
+    // See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
+    // in this function. Name of article is "Setting a Thread Name (Unmanaged)".
+
+    THREADNAME_INFO info;
+    info.dwType = 0x1000;
+    info.szName = szThreadName;
+    info.dwThreadID = dwThreadID;
+    info.dwFlags = 0;
+
+    __try
+    {
+        RaiseException( 0x406D1388, 0, sizeof(info)/sizeof(DWORD), (ULONG_PTR *)&info );
+    }
+    __except (EXCEPTION_CONTINUE_EXECUTION)
+    {
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  _Get44kHzDrift
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsCore::_Get44kHzDrift()
+{
+    // We aren't able to resample at 44.1 kHz. Instead we run at 44 kHz and push/pull
+    // from the engine faster to compensate. If only one direction is set to 44.1 kHz
+    // the result is indistinguishable from clock drift to the AEC. We can compensate
+    // internally if we inform the AEC about the drift.
+    _sampleDriftAt48kHz = 0;
+    _driftAccumulator = 0;
+
+    if (_playSampleRate == 44000 && _recSampleRate != 44000)
+    {
+        _sampleDriftAt48kHz = 480.0f/440;
+    }
+    else if(_playSampleRate != 44000 && _recSampleRate == 44000)
+    {
+        _sampleDriftAt48kHz = -480.0f/441;
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  WideToUTF8
+// ----------------------------------------------------------------------------
+
+char* AudioDeviceWindowsCore::WideToUTF8(const TCHAR* src) const {
+#ifdef UNICODE
+    const size_t kStrLen = sizeof(_str);
+    memset(_str, 0, kStrLen);
+    // Get required size (in bytes) to be able to complete the conversion.
+    int required_size = WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, 0, 0, 0);
+    if (required_size <= kStrLen)
+    {
+        // Process the entire input string, including the terminating null char.
+        if (WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, kStrLen, 0, 0) == 0)
+            memset(_str, 0, kStrLen);
+    }
+    return _str;
+#else
+    return const_cast<char*>(src);
+#endif
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_WINDOWS_CORE_AUDIO_BUILD
diff --git a/src/modules/audio_device/main/source/win/audio_device_core_win.h b/src/modules/audio_device/main/source/win/audio_device_core_win.h
new file mode 100644
index 0000000..1c1c6c5
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_core_win.h
@@ -0,0 +1,386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+
+#if (_MSC_VER >= 1400)  // only include for VS 2005 and higher
+
+#include "audio_device_generic.h"
+
+#pragma once
+#include <wmcodecdsp.h>      // CLSID_CWMAudioAEC
+                             // (must be before audioclient.h)
+#include <Audioclient.h>     // WASAPI
+#include <Audiopolicy.h>
+#include <avrt.h>            // Avrt
+#include <endpointvolume.h>
+#include <mediaobj.h>        // IMediaObject
+#include <Mmdeviceapi.h>     // MMDevice
+
+#include "critical_section_wrapper.h"
+#include "scoped_refptr.h"
+
+// Use Multimedia Class Scheduler Service (MMCSS) to boost the thread priority
+#pragma comment( lib, "avrt.lib" )
+// AVRT function pointers
+typedef BOOL (WINAPI *PAvRevertMmThreadCharacteristics)(HANDLE);
+typedef HANDLE (WINAPI *PAvSetMmThreadCharacteristicsA)(LPCSTR, LPDWORD);
+typedef BOOL (WINAPI *PAvSetMmThreadPriority)(HANDLE, AVRT_PRIORITY);
+
+namespace webrtc {
+
+const float MAX_CORE_SPEAKER_VOLUME = 255.0f;
+const float MIN_CORE_SPEAKER_VOLUME = 0.0f;
+const float MAX_CORE_MICROPHONE_VOLUME = 255.0f;
+const float MIN_CORE_MICROPHONE_VOLUME = 0.0f;
+const WebRtc_UWord16 CORE_SPEAKER_VOLUME_STEP_SIZE = 1;
+const WebRtc_UWord16 CORE_MICROPHONE_VOLUME_STEP_SIZE = 1;
+
+// Utility class which initializes COM in the constructor (STA or MTA),
+// and uninitializes COM in the destructor.
+class ScopedCOMInitializer {
+ public:
+  // Enum value provided to initialize the thread as an MTA instead of STA.
+  enum SelectMTA { kMTA };
+
+  // Constructor for STA initialization.
+  ScopedCOMInitializer() {
+    Initialize(COINIT_APARTMENTTHREADED);
+  }
+
+  // Constructor for MTA initialization.
+  explicit ScopedCOMInitializer(SelectMTA mta) {
+    Initialize(COINIT_MULTITHREADED);
+  }
+
+  ScopedCOMInitializer::~ScopedCOMInitializer() {
+    if (SUCCEEDED(hr_))
+      CoUninitialize();
+  }
+
+  bool succeeded() const { return SUCCEEDED(hr_); }
+ 
+ private:
+  void Initialize(COINIT init) {
+    hr_ = CoInitializeEx(NULL, init);
+  }
+
+  HRESULT hr_;
+
+  ScopedCOMInitializer(const ScopedCOMInitializer&);
+  void operator=(const ScopedCOMInitializer&);
+};
+
+
+class AudioDeviceWindowsCore : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceWindowsCore(const WebRtc_Word32 id);
+    ~AudioDeviceWindowsCore();
+
+    static bool CoreAudioIsSupported();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+    virtual int32_t EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:    // avrt function pointers
+    PAvRevertMmThreadCharacteristics    _PAvRevertMmThreadCharacteristics;
+    PAvSetMmThreadCharacteristicsA      _PAvSetMmThreadCharacteristicsA;
+    PAvSetMmThreadPriority              _PAvSetMmThreadPriority;
+    HMODULE                             _avrtLibrary;
+    bool                                _winSupportAvrt;
+
+private:    // thread functions
+    DWORD InitCaptureThreadPriority();
+    void RevertCaptureThreadPriority();
+    static DWORD WINAPI WSAPICaptureThread(LPVOID context);
+    DWORD DoCaptureThread();
+
+    static DWORD WINAPI WSAPICaptureThreadPollDMO(LPVOID context);
+    DWORD DoCaptureThreadPollDMO();
+
+    static DWORD WINAPI WSAPIRenderThread(LPVOID context);
+    DWORD DoRenderThread();
+
+    static DWORD WINAPI GetCaptureVolumeThread(LPVOID context);
+    DWORD DoGetCaptureVolumeThread();
+
+    static DWORD WINAPI SetCaptureVolumeThread(LPVOID context);
+    DWORD DoSetCaptureVolumeThread();
+
+    void _SetThreadName(DWORD dwThreadID, LPCSTR szThreadName);
+    void _Lock() { _critSect.Enter(); };
+    void _UnLock() { _critSect.Leave(); };
+
+private:
+    WebRtc_Word32 Id() {return _id;}
+
+private:
+    int SetDMOProperties();
+
+    int SetBoolProperty(IPropertyStore* ptrPS,
+                        REFPROPERTYKEY key,
+                        VARIANT_BOOL value);
+
+    int SetVtI4Property(IPropertyStore* ptrPS,
+                        REFPROPERTYKEY key,
+                        LONG value);
+
+    WebRtc_Word32 _EnumerateEndpointDevicesAll(EDataFlow dataFlow) const;
+    void _TraceCOMError(HRESULT hr) const;
+
+    WebRtc_Word32 _RefreshDeviceList(EDataFlow dir);
+    WebRtc_Word16 _DeviceListCount(EDataFlow dir);
+    WebRtc_Word32 _GetDefaultDeviceName(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetListDeviceName(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDeviceName(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen);
+    WebRtc_Word32 _GetListDeviceID(EDataFlow dir, int index, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDeviceID(EDataFlow dir, ERole role, LPWSTR szBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDeviceIndex(EDataFlow dir, ERole role, int* index);
+    WebRtc_Word32 _GetDeviceID(IMMDevice* pDevice, LPWSTR pszBuffer, int bufferLen);
+    WebRtc_Word32 _GetDefaultDevice(EDataFlow dir, ERole role, IMMDevice** ppDevice);
+    WebRtc_Word32 _GetListDevice(EDataFlow dir, int index, IMMDevice** ppDevice);
+
+    void _Get44kHzDrift();
+
+    // Converts from wide-char to UTF-8 if UNICODE is defined.
+    // Does nothing if UNICODE is undefined.
+    char* WideToUTF8(const TCHAR* src) const;
+
+    WebRtc_Word32 InitRecordingDMO();
+
+private:
+    ScopedCOMInitializer                    _comInit;
+    AudioDeviceBuffer*                      _ptrAudioBuffer;
+    CriticalSectionWrapper&                 _critSect;
+    CriticalSectionWrapper&                 _volumeMutex;
+    WebRtc_Word32                           _id;
+
+private:  // MMDevice
+    IMMDeviceEnumerator*                    _ptrEnumerator;
+    IMMDeviceCollection*                    _ptrRenderCollection;
+    IMMDeviceCollection*                    _ptrCaptureCollection;
+    IMMDevice*                              _ptrDeviceOut;
+    IMMDevice*                              _ptrDeviceIn;
+
+private:  // WASAPI
+    IAudioClient*                           _ptrClientOut;
+    IAudioClient*                           _ptrClientIn;
+    IAudioRenderClient*                     _ptrRenderClient;
+    IAudioCaptureClient*                    _ptrCaptureClient;
+    IAudioEndpointVolume*                   _ptrCaptureVolume;
+    ISimpleAudioVolume*                     _ptrRenderSimpleVolume;
+
+    // DirectX Media Object (DMO) for the built-in AEC.
+    scoped_refptr<IMediaObject>             _dmo;
+    scoped_refptr<IMediaBuffer>             _mediaBuffer;
+    bool                                    _builtInAecEnabled;
+
+    HANDLE                                  _hRenderSamplesReadyEvent;
+    HANDLE                                  _hPlayThread;
+    HANDLE                                  _hRenderStartedEvent;
+    HANDLE                                  _hShutdownRenderEvent;
+
+    HANDLE                                  _hCaptureSamplesReadyEvent;
+    HANDLE                                  _hRecThread;
+    HANDLE                                  _hCaptureStartedEvent;
+    HANDLE                                  _hShutdownCaptureEvent;
+
+    HANDLE                                  _hGetCaptureVolumeThread;
+    HANDLE                                  _hSetCaptureVolumeThread;
+    HANDLE                                  _hSetCaptureVolumeEvent;
+
+    HANDLE                                  _hMmTask;
+
+    UINT                                    _playAudioFrameSize;
+    WebRtc_UWord32                          _playSampleRate;
+    WebRtc_UWord32                          _devicePlaySampleRate;
+    WebRtc_UWord32                          _playBlockSize;
+    WebRtc_UWord32                          _devicePlayBlockSize;
+    WebRtc_UWord32                          _playChannels;
+    WebRtc_UWord32                          _sndCardPlayDelay;
+    UINT64                                  _writtenSamples;
+    LONGLONG                                _playAcc;
+
+    UINT                                    _recAudioFrameSize;
+    WebRtc_UWord32                          _recSampleRate;
+    WebRtc_UWord32                          _recBlockSize;
+    WebRtc_UWord32                          _recChannels;
+    UINT64                                  _readSamples;
+    WebRtc_UWord32                          _sndCardRecDelay;
+
+    float                                   _sampleDriftAt48kHz;
+    float                                   _driftAccumulator;
+
+    WebRtc_UWord16                          _recChannelsPrioList[2];
+    WebRtc_UWord16                          _playChannelsPrioList[2];
+
+    LARGE_INTEGER                           _perfCounterFreq;
+    double                                  _perfCounterFactor;
+    float                                   _avgCPULoad;
+
+private:
+    bool                                    _initialized;
+    bool                                    _recording;
+    bool                                    _playing;
+    bool                                    _recIsInitialized;
+    bool                                    _playIsInitialized;
+    bool                                    _speakerIsInitialized;
+    bool                                    _microphoneIsInitialized;
+
+    bool                                    _usingInputDeviceIndex;
+    bool                                    _usingOutputDeviceIndex;
+    AudioDeviceModule::WindowsDeviceType    _inputDevice;
+    AudioDeviceModule::WindowsDeviceType    _outputDevice;
+    WebRtc_UWord16                          _inputDeviceIndex;
+    WebRtc_UWord16                          _outputDeviceIndex;
+
+    bool                                    _AGC;
+
+    WebRtc_UWord16                          _playWarning;
+    WebRtc_UWord16                          _playError;
+    WebRtc_UWord16                          _recWarning;
+    WebRtc_UWord16                          _recError;
+
+    AudioDeviceModule::BufferType           _playBufType;
+    WebRtc_UWord16                          _playBufDelay;
+    WebRtc_UWord16                          _playBufDelayFixed;
+
+    WebRtc_UWord16                          _newMicLevel;
+
+    mutable char                            _str[512];
+};
+
+#endif    // #if (_MSC_VER >= 1400)
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_CORE_WIN_H
+
diff --git a/src/modules/audio_device/main/source/win/audio_device_utility_win.cc b/src/modules/audio_device/main/source/win/audio_device_utility_win.cc
new file mode 100644
index 0000000..49fb522
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_utility_win.cc
@@ -0,0 +1,231 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility_win.h"
+#include "audio_device_config.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <windows.h>
+#include <tchar.h>
+#include <strsafe.h>
+
+#define STRING_MAX_SIZE 256
+
+typedef void (WINAPI *PGNSI)(LPSYSTEM_INFO);
+typedef BOOL (WINAPI *PGPI)(DWORD, DWORD, DWORD, DWORD, PDWORD);
+
+namespace webrtc
+{
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityWindows() - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityWindows::AudioDeviceUtilityWindows(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _lastError(AudioDeviceModule::kAdmErrNone)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceUtilityWindows() - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceUtilityWindows::~AudioDeviceUtilityWindows()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+    {
+        CriticalSectionScoped lock(&_critSect);
+
+        // free stuff here...
+    }
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Init()
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceUtilityWindows::Init()
+{
+
+    TCHAR szOS[STRING_MAX_SIZE];
+
+    if (GetOSDisplayString(szOS))
+    {
+#ifdef _UNICODE
+        char os[STRING_MAX_SIZE];
+        if (WideCharToMultiByte(CP_UTF8, 0, szOS, -1, os, STRING_MAX_SIZE, NULL, NULL) == 0)
+        {
+            strncpy(os, "Could not get OS info", STRING_MAX_SIZE);
+        }
+        // DEBUG_PRINTP("OS info: %s\n", os);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "  OS info: %s", os);
+#else
+        // DEBUG_PRINTP("OS info: %s\n", szOS);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, _id, "  OS info: %s", szOS);
+#endif
+    }
+
+    return 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+BOOL AudioDeviceUtilityWindows::GetOSDisplayString(LPTSTR pszOS)
+{
+    OSVERSIONINFOEX osvi;
+    SYSTEM_INFO si;
+    PGNSI pGNSI;
+    BOOL bOsVersionInfoEx;
+
+    ZeroMemory(&si, sizeof(SYSTEM_INFO));
+    ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
+
+    osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+
+    // Retrieve information about the current operating system
+    //
+    if (!(bOsVersionInfoEx = GetVersionEx((OSVERSIONINFO *) &osvi)))
+        return FALSE;
+
+    // Parse our OS version string
+    //
+    if (VER_PLATFORM_WIN32_NT == osvi.dwPlatformId && osvi.dwMajorVersion > 4)
+    {
+        StringCchCopy(pszOS, STRING_MAX_SIZE, TEXT("Microsoft "));
+
+        // Test for the specific product
+        //
+        //  Operating system	    Version number
+        //  --------------------------------------
+        //  Windows 7	            6.1
+        //  Windows Server 2008 R2	6.1
+        //  Windows Server 2008	    6.0
+        //  Windows Vista	        6.0
+        //  - - - - - - - - - - - - - - - - - - - 
+        //  Windows Server 2003 R2	5.2
+        //  Windows Server 2003	    5.2
+        //  Windows XP	            5.1
+        //  Windows 2000	        5.0
+        //
+        //  see http://msdn.microsoft.com/en-us/library/ms724832(VS.85).aspx for details
+        //
+        if (osvi.dwMajorVersion == 6)
+        {
+            if (osvi.dwMinorVersion == 0)
+            {
+                // Windows Vista or Server 2008
+                if (osvi.wProductType == VER_NT_WORKSTATION)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Vista "));
+                else 
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2008 " ));
+            }
+
+            if (osvi.dwMinorVersion == 1)
+            {
+                // Windows 7 or Server 2008 R2
+                if (osvi.wProductType == VER_NT_WORKSTATION)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows 7 "));
+                else 
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2008 R2 " ));
+            }
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 2)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows Server 2003"));
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 1)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows XP "));
+            if (osvi.wSuiteMask & VER_SUITE_PERSONAL)
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Home Edition" ));
+            else 
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Professional" ));
+        }
+
+        if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 0)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT("Windows 2000 "));
+
+            if (osvi.wProductType == VER_NT_WORKSTATION )
+            {
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Professional" ));
+            }
+            else 
+            {
+                if (osvi.wSuiteMask & VER_SUITE_DATACENTER)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Datacenter Server" ));
+                else if (osvi.wSuiteMask & VER_SUITE_ENTERPRISE)
+                    StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Advanced Server" ));
+                else StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( "Server" ));
+            }
+        }
+
+        // Include service pack (if any)
+        //
+        if (_tcslen(osvi.szCSDVersion) > 0)
+        {
+            StringCchCat(pszOS, STRING_MAX_SIZE, TEXT(" "));
+            StringCchCat(pszOS, STRING_MAX_SIZE, osvi.szCSDVersion);
+        }
+
+        TCHAR buf[80];
+
+        // Include build number
+        //
+        StringCchPrintf( buf, 80, TEXT(" (build %d)"), osvi.dwBuildNumber);
+        StringCchCat(pszOS, STRING_MAX_SIZE, buf);
+
+        // Call GetNativeSystemInfo if supported or GetSystemInfo otherwise
+        //
+        pGNSI = (PGNSI) GetProcAddress(GetModuleHandle(TEXT("kernel32.dll")), "GetNativeSystemInfo");
+        if (NULL != pGNSI)
+            pGNSI(&si);
+        else 
+            GetSystemInfo(&si);
+
+        // Add 64-bit or 32-bit for OS versions "later than" Vista
+        //
+        if (osvi.dwMajorVersion >= 6)
+        {
+            if ((si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_AMD64) || 
+                (si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_IA64))
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT( ", 64-bit" ));
+            else if (si.wProcessorArchitecture == PROCESSOR_ARCHITECTURE_INTEL )
+                StringCchCat(pszOS, STRING_MAX_SIZE, TEXT(", 32-bit"));
+        }
+      
+        return TRUE; 
+    }
+    else
+    {  
+        return FALSE;
+   }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_device/main/source/win/audio_device_utility_win.h b/src/modules/audio_device/main/source/win/audio_device_utility_win.h
new file mode 100644
index 0000000..77b4c22
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_utility_win.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
+
+#include "audio_device_utility.h"
+#include "audio_device.h"
+#include <windows.h>
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class AudioDeviceUtilityWindows : public AudioDeviceUtility
+{
+public:
+    AudioDeviceUtilityWindows(const WebRtc_Word32 id);
+    ~AudioDeviceUtilityWindows();
+
+    virtual WebRtc_Word32 Init();
+
+private:
+    BOOL GetOSDisplayString(LPTSTR pszOS);
+
+private:
+    CriticalSectionWrapper&         _critSect;
+    WebRtc_Word32                   _id;
+    AudioDeviceModule::ErrorCode    _lastError;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_UTILITY_WIN_H
diff --git a/src/modules/audio_device/main/source/win/audio_device_wave_win.cc b/src/modules/audio_device/main/source/win/audio_device_wave_win.cc
new file mode 100644
index 0000000..17edc7b
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_wave_win.cc
@@ -0,0 +1,3824 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_device_utility.h"
+#include "audio_device_wave_win.h"
+#include "audio_device_config.h"
+
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+
+#include <windows.h>
+#include <objbase.h>    // CoTaskMemAlloc, CoTaskMemFree
+#include <strsafe.h>    // StringCchCopy(), StringCchCat(), StringCchPrintf()
+#include <cassert>
+
+// Avoids the need of Windows 7 SDK
+#ifndef WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE
+#define WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE   0x0010
+#endif
+
+// Supported in Windows Vista and Windows 7.
+// http://msdn.microsoft.com/en-us/library/dd370819(v=VS.85).aspx
+// Taken from Mmddk.h.
+#define DRV_RESERVED                      0x0800
+#define DRV_QUERYFUNCTIONINSTANCEID       (DRV_RESERVED + 17)
+#define DRV_QUERYFUNCTIONINSTANCEIDSIZE   (DRV_RESERVED + 18)
+
+#define POW2(A) (2 << ((A) - 1))
+
+namespace webrtc {
+
+// ============================================================================
+//                            Construction & Destruction
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsWave - ctor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsWave::AudioDeviceWindowsWave(const WebRtc_Word32 id) :
+    _ptrAudioBuffer(NULL),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _timeEvent(*EventWrapper::Create()),
+    _recStartEvent(*EventWrapper::Create()),
+    _playStartEvent(*EventWrapper::Create()),
+    _hGetCaptureVolumeThread(NULL),
+    _hShutdownGetVolumeEvent(NULL),
+    _hSetCaptureVolumeThread(NULL),
+    _hShutdownSetVolumeEvent(NULL),
+    _hSetCaptureVolumeEvent(NULL),
+    _ptrThread(NULL),
+    _threadID(0),
+    _critSectCb(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _mixerManager(id),
+    _usingInputDeviceIndex(false),
+    _usingOutputDeviceIndex(false),
+    _inputDevice(AudioDeviceModule::kDefaultDevice),
+    _outputDevice(AudioDeviceModule::kDefaultDevice),
+    _inputDeviceIndex(0),
+    _outputDeviceIndex(0),
+    _inputDeviceIsSpecified(false),
+    _outputDeviceIsSpecified(false),
+    _initialized(false),
+    _recIsInitialized(false),
+    _playIsInitialized(false),
+    _recording(false),
+    _playing(false),
+    _startRec(false),
+    _stopRec(false),
+    _startPlay(false),
+    _stopPlay(false),
+    _AGC(false),
+    _hWaveIn(NULL),
+    _hWaveOut(NULL),
+    _recChannels(N_REC_CHANNELS),
+    _playChannels(N_PLAY_CHANNELS),
+    _recBufCount(0),
+    _recPutBackDelay(0),
+    _recDelayCount(0),
+    _playBufCount(0),
+    _prevPlayTime(0),
+    _prevRecTime(0),
+    _prevTimerCheckTime(0),
+    _timesdwBytes(0),
+    _timerFaults(0),
+    _timerRestartAttempts(0),
+    _no_of_msecleft_warnings(0),
+    _MAX_minBuffer(65),
+    _useHeader(0),
+    _dTcheckPlayBufDelay(10),
+    _playBufDelay(80),
+    _playBufDelayFixed(80),
+    _minPlayBufDelay(20),
+    _avgCPULoad(0),
+    _sndCardPlayDelay(0),
+    _sndCardRecDelay(0),
+    _plSampOld(0),
+    _rcSampOld(0),
+    _playBufType(AudioDeviceModule::kAdaptiveBufferSize),
+    _recordedBytes(0),
+    _playWarning(0),
+    _playError(0),
+    _recWarning(0),
+    _recError(0),
+    _newMicLevel(0),
+    _minMicVolume(0),
+    _maxMicVolume(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created", __FUNCTION__);
+
+    // Initialize value, set to 0 if it fails
+    if (!QueryPerformanceFrequency(&_perfFreq))
+    {
+        _perfFreq.QuadPart = 0;
+    }
+
+    _hShutdownGetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hShutdownSetVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+    _hSetCaptureVolumeEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
+}
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceWindowsWave - dtor
+// ----------------------------------------------------------------------------
+
+AudioDeviceWindowsWave::~AudioDeviceWindowsWave()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destroyed", __FUNCTION__);
+
+    Terminate();
+
+    delete &_recStartEvent;
+    delete &_playStartEvent;
+    delete &_timeEvent;
+    delete &_critSect;
+    delete &_critSectCb;
+
+    if (NULL != _hShutdownGetVolumeEvent)
+    {
+        CloseHandle(_hShutdownGetVolumeEvent);
+        _hShutdownGetVolumeEvent = NULL;
+    }
+
+    if (NULL != _hShutdownSetVolumeEvent)
+    {
+        CloseHandle(_hShutdownSetVolumeEvent);
+        _hShutdownSetVolumeEvent = NULL;
+    }
+
+    if (NULL != _hSetCaptureVolumeEvent)
+    {
+        CloseHandle(_hSetCaptureVolumeEvent);
+        _hSetCaptureVolumeEvent = NULL;
+    }
+}
+
+// ============================================================================
+//                                     API
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  AttachAudioBuffer
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::AttachAudioBuffer(AudioDeviceBuffer* audioBuffer)
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    _ptrAudioBuffer = audioBuffer;
+
+    // inform the AudioBuffer about default settings for this implementation
+    _ptrAudioBuffer->SetRecordingSampleRate(N_REC_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetPlayoutSampleRate(N_PLAY_SAMPLES_PER_SEC);
+    _ptrAudioBuffer->SetRecordingChannels(N_REC_CHANNELS);
+    _ptrAudioBuffer->SetPlayoutChannels(N_PLAY_CHANNELS);
+}
+
+// ----------------------------------------------------------------------------
+//  ActiveAudioLayer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const
+{
+    audioLayer = AudioDeviceModule::kWindowsWaveAudio;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Init()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_initialized)
+    {
+        return 0;
+    }
+
+    const WebRtc_UWord32 nowTime(AudioDeviceUtility::GetTimeInMS());
+
+    _recordedBytes = 0;
+    _prevRecByteCheckTime = nowTime;
+    _prevRecTime = nowTime;
+    _prevPlayTime = nowTime;
+    _prevTimerCheckTime = nowTime;
+
+    _playWarning = 0;
+    _playError = 0;
+    _recWarning = 0;
+    _recError = 0;
+
+    _mixerManager.EnumerateAll();
+
+    if (_ptrThread)
+    {
+        // thread is already created and active
+        return 0;
+    }
+
+    const char* threadName = "webrtc_audio_module_thread";
+    _ptrThread = ThreadWrapper::CreateThread(ThreadFunc, 
+                                             this, 
+                                             kRealtimePriority,
+                                             threadName);
+    if (_ptrThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to create the audio thread");
+        return -1;
+    }
+
+    unsigned int threadID(0);
+    if (!_ptrThread->Start(threadID))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to start the audio thread");
+        delete _ptrThread;
+        _ptrThread = NULL;
+        return -1;
+    }
+    _threadID = threadID;
+
+    const bool periodic(true);
+    if (!_timeEvent.StartTimer(periodic, TIMER_PERIOD_MS))
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "failed to start the timer event");
+        if (_ptrThread->Stop())
+        {
+            delete _ptrThread;
+            _ptrThread = NULL;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "unable to stop the activated thread");
+        }
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                 "periodic timer (dT=%d) is now active", TIMER_PERIOD_MS);
+
+    _hGetCaptureVolumeThread = CreateThread(NULL,
+                                            0,
+                                            GetCaptureVolumeThread,
+                                            this,
+                                            0,
+                                            NULL);
+    if (_hGetCaptureVolumeThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to create the volume getter thread");
+        return -1;
+    }
+
+    SetThreadPriority(_hGetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
+
+    _hSetCaptureVolumeThread = CreateThread(NULL,
+                                            0,
+                                            SetCaptureVolumeThread,
+                                            this,
+                                            0,
+                                            NULL);
+    if (_hSetCaptureVolumeThread == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to create the volume setter thread");
+        return -1;
+    }
+
+    SetThreadPriority(_hSetCaptureVolumeThread, THREAD_PRIORITY_NORMAL);
+
+    _initialized = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Terminate
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Terminate()
+{
+
+    if (!_initialized)
+    {
+        return 0;
+    }
+
+    _critSect.Enter();
+
+    _mixerManager.Close();
+
+    if (_ptrThread)
+    {
+        ThreadWrapper* tmpThread = _ptrThread;
+        _ptrThread = NULL;
+        _critSect.Leave();
+
+        tmpThread->SetNotAlive();
+        _timeEvent.Set();
+
+        if (tmpThread->Stop())
+        {
+            delete tmpThread;
+        }
+        else
+        {
+            _critSect.Leave();
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                         "failed to close down the audio thread");
+            return -1;
+        }
+    }
+    else
+    {
+        _critSect.Leave();
+    }
+
+    _critSect.Enter();
+    SetEvent(_hShutdownGetVolumeEvent);
+    _critSect.Leave();
+    WebRtc_Word32 ret = WaitForSingleObject(_hGetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to close down volume getter thread");
+        CloseHandle(_hGetCaptureVolumeThread);
+        _hGetCaptureVolumeThread = NULL;
+        return -1;
+    }
+    _critSect.Enter();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+        "  volume getter thread is now closed");
+
+    SetEvent(_hShutdownSetVolumeEvent);
+    _critSect.Leave();
+    ret = WaitForSingleObject(_hSetCaptureVolumeThread, 2000);
+    if (ret != WAIT_OBJECT_0)
+    {
+        // the thread did not stop as it should
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
+            "  failed to close down volume setter thread");
+        CloseHandle(_hSetCaptureVolumeThread);
+        _hSetCaptureVolumeThread = NULL;
+        return -1;
+    }
+    _critSect.Enter();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+        "  volume setter thread is now closed");
+
+    CloseHandle(_hGetCaptureVolumeThread);
+    _hGetCaptureVolumeThread = NULL;
+
+    CloseHandle(_hSetCaptureVolumeThread);
+    _hSetCaptureVolumeThread = NULL;
+
+    _critSect.Leave();
+
+    _timeEvent.StopTimer();
+
+    _initialized = false;
+    _outputDeviceIsSpecified = false;
+    _inputDeviceIsSpecified = false;
+
+    return 0;
+}
+
+
+DWORD WINAPI AudioDeviceWindowsWave::GetCaptureVolumeThread(LPVOID context)
+{
+    return(((AudioDeviceWindowsWave*)context)->DoGetCaptureVolumeThread());
+}
+
+DWORD WINAPI AudioDeviceWindowsWave::SetCaptureVolumeThread(LPVOID context)
+{
+    return(((AudioDeviceWindowsWave*)context)->DoSetCaptureVolumeThread());
+}
+
+DWORD AudioDeviceWindowsWave::DoGetCaptureVolumeThread()
+{
+    HANDLE waitObject = _hShutdownGetVolumeEvent;
+
+    while (1)
+    {
+        DWORD waitResult = WaitForSingleObject(waitObject, 
+                                               GET_MIC_VOLUME_INTERVAL_MS);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0: // _hShutdownGetVolumeEvent
+                return 0;
+            case WAIT_TIMEOUT:	// timeout notification
+                break;
+            default:            // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on get volume thread");
+                return -1;
+        }
+
+        if (AGC())
+        {
+            WebRtc_UWord32 currentMicLevel = 0;
+            if (MicrophoneVolume(currentMicLevel) == 0)
+            {
+                // This doesn't set the system volume, just stores it.
+                _critSect.Enter();
+                if (_ptrAudioBuffer)
+                {
+                    _ptrAudioBuffer->SetCurrentMicLevel(currentMicLevel);				
+                }
+                _critSect.Leave();
+            }
+        }
+    }
+}
+
+DWORD AudioDeviceWindowsWave::DoSetCaptureVolumeThread()
+{
+    HANDLE waitArray[2] = {_hShutdownSetVolumeEvent, _hSetCaptureVolumeEvent};
+
+    while (1)
+    {
+        DWORD waitResult = WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);
+        switch (waitResult)
+        {
+            case WAIT_OBJECT_0:     // _hShutdownSetVolumeEvent
+                return 0;
+            case WAIT_OBJECT_0 + 1: // _hSetCaptureVolumeEvent
+                break;
+            default:                // unexpected error
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                    "  unknown wait termination on set volume thread");
+                return -1;
+        }
+
+        _critSect.Enter();
+        WebRtc_UWord32 newMicLevel = _newMicLevel;
+        _critSect.Leave();
+
+        if (SetMicrophoneVolume(newMicLevel) == -1)
+        {   
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+                "  the required modification of the microphone volume failed");
+        }
+    }      
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Initialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Initialized() const
+{
+    return (_initialized);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerIsAvailable(bool& available)
+{
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitSpeaker was successful, we know that a valid speaker exists
+    //
+    available = true;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitSpeaker()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (_mixerManager.EnumerateSpeakers() == -1)
+    {
+        // failed to locate any valid/controllable speaker
+        return -1;
+    }
+
+    if (IsUsingOutputDeviceIndex())
+    {
+        if (_mixerManager.OpenSpeaker(OutputDeviceIndex()) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        if (_mixerManager.OpenSpeaker(OutputDevice()) == -1)
+        {
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneIsAvailable(bool& available)
+{
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        available = false;
+        return 0;
+    }
+
+    // Given that InitMicrophone was successful, we know that a valid microphone exists
+    //
+    available = true;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitMicrophone()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (_mixerManager.EnumerateMicrophones() == -1)
+    {
+        // failed to locate any valid/controllable microphone
+        return -1;
+    }
+
+    if (IsUsingInputDeviceIndex())
+    {
+        if (_mixerManager.OpenMicrophone(InputDeviceIndex()) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        if (_mixerManager.OpenMicrophone(InputDevice()) == -1)
+        {
+            return -1;
+        }
+    }
+
+    WebRtc_UWord32 maxVol = 0;
+    if (_mixerManager.MaxMicrophoneVolume(maxVol) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+            "  unable to retrieve max microphone volume");
+    }
+    _maxMicVolume = maxVol;
+
+    WebRtc_UWord32 minVol = 0;
+    if (_mixerManager.MinMicrophoneVolume(minVol) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
+            "  unable to retrieve min microphone volume");
+    }
+    _minMicVolume = minVol;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::SpeakerIsInitialized() const
+{
+    return (_mixerManager.SpeakerIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::MicrophoneIsInitialized() const
+{
+    return (_mixerManager.MicrophoneIsInitialized());
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolumeIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        // failed to find a valid speaker
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a volume control
+    //
+    _mixerManager.SpeakerVolumeIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+
+    return (_mixerManager.SetSpeakerVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.SpeakerVolume(level) == -1)
+    {
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetWaveOutVolume
+//
+//    The low-order word contains the left-channel volume setting, and the
+//    high-order word contains the right-channel setting.
+//    A value of 0xFFFF represents full volume, and a value of 0x0000 is silence.
+//
+//    If a device does not support both left and right volume control,
+//    the low-order word of dwVolume specifies the volume level,
+//    and the high-order word is ignored.
+//
+//    Most devices do not support the full 16 bits of volume-level control
+//    and will not use the least-significant bits of the requested volume setting.
+//    For example, if a device supports 4 bits of volume control, the values
+//    0x4000, 0x4FFF, and 0x43BE will all be truncated to 0x4000.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight)
+{
+
+    MMRESULT res(0);
+    WAVEOUTCAPS caps;
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
+    }
+
+    // To determine whether the device supports volume control on both
+    // the left and right channels, use the WAVECAPS_LRVOLUME flag.
+    //
+    res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    if (!(caps.dwSupport & WAVECAPS_VOLUME))
+    {
+        // this device does not support volume control using the waveOutSetVolume API
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
+        return -1;
+    }
+    if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
+    {
+        // high-order word (right channel) is ignored
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
+    }
+
+    DWORD dwVolume(0x00000000);
+    dwVolume = (DWORD)(((volumeRight & 0xFFFF) << 16) | (volumeLeft & 0xFFFF));
+
+    res = waveOutSetVolume(_hWaveOut, dwVolume);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutSetVolume() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  WaveOutVolume
+//
+//    The low-order word of this location contains the left-channel volume setting,
+//    and the high-order word contains the right-channel setting.
+//    A value of 0xFFFF (65535) represents full volume, and a value of 0x0000
+//    is silence.
+//
+//    If a device does not support both left and right volume control,
+//    the low-order word of the specified location contains the mono volume level.
+//
+//    The full 16-bit setting(s) set with the waveOutSetVolume function is returned,
+//    regardless of whether the device supports the full 16 bits of volume-level
+//    control.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const
+{
+
+    MMRESULT res(0);
+    WAVEOUTCAPS caps;
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no open playout device exists => using default");
+    }
+
+    // To determine whether the device supports volume control on both
+    // the left and right channels, use the WAVECAPS_LRVOLUME flag.
+    //
+    res = waveOutGetDevCaps((UINT_PTR)_hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    if (!(caps.dwSupport & WAVECAPS_VOLUME))
+    {
+        // this device does not support volume control using the waveOutSetVolume API
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device does not support volume control using the Wave API");
+        return -1;
+    }
+    if (!(caps.dwSupport & WAVECAPS_LRVOLUME))
+    {
+        // high-order word (right channel) is ignored
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "device does not support volume control on both channels");
+    }
+
+    DWORD dwVolume(0x00000000);
+
+    res = waveOutGetVolume(_hWaveOut, &dwVolume);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutGetVolume() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    WORD wVolumeLeft = LOWORD(dwVolume);
+    WORD wVolumeRight = HIWORD(dwVolume);
+
+    volumeLeft = static_cast<WebRtc_UWord16> (wVolumeLeft);
+    volumeRight = static_cast<WebRtc_UWord16> (wVolumeRight);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    WebRtc_UWord32 maxVol(0);
+
+    if (_mixerManager.MaxSpeakerVolume(maxVol) == -1)
+    {
+        return -1;
+    }
+
+    maxVolume = maxVol;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    WebRtc_UWord32 minVol(0);
+
+    if (_mixerManager.MinSpeakerVolume(minVol) == -1)
+    {
+        return -1;
+    }
+
+    minVolume = minVol;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.SpeakerVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable speakers and make an attempt to open up the
+    // output mixer corresponding to the currently selected output device.
+    //
+    if (InitSpeaker() == -1)
+    {
+        // If we end up here it means that the selected speaker has no volume
+        // control, hence it is safe to state that there is no mute control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected speaker has a mute control
+    //
+    _mixerManager.SpeakerMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized output mixer
+    //
+    _mixerManager.CloseSpeaker();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetSpeakerMute(bool enable)
+{
+    return (_mixerManager.SetSpeakerMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SpeakerMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.SpeakerMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneMuteIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a mute control
+    //
+    _mixerManager.MicrophoneMuteIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneMute(bool enable)
+{
+    return (_mixerManager.SetMicrophoneMute(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneMute(bool& enabled) const
+{
+
+    bool muted(0);
+
+    if (_mixerManager.MicrophoneMute(muted) == -1)
+    {
+        return -1;
+    }
+
+    enabled = muted;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneBoostIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected input device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // If we end up here it means that the selected microphone has no volume
+        // control, hence it is safe to state that there is no boost control
+        // already at this stage.
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a boost control
+    //
+    _mixerManager.MicrophoneBoostIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneBoost(bool enable)
+{
+
+    return (_mixerManager.SetMicrophoneBoost(enable));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneBoost(bool& enabled) const
+{
+
+    bool onOff(0);
+
+    if (_mixerManager.MicrophoneBoost(onOff) == -1)
+    {
+        return -1;
+    }
+
+    enabled = onOff;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoRecordingIsAvailable(bool& available)
+{
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetStereoRecording(bool enable)
+{
+
+    if (enable)
+        _recChannels = 2;
+    else
+        _recChannels = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoRecording(bool& enabled) const
+{
+
+    if (_recChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoPlayoutIsAvailable(bool& available)
+{
+    available = true;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetStereoPlayout
+//
+//  Specifies the number of output channels.
+//
+//  NOTE - the setting will only have an effect after InitPlayout has
+//  been called.
+//
+//  16-bit mono:
+//
+//  Each sample is 2 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
+//  For each sample, the first byte is the low-order byte of channel 0 and the
+//  second byte is the high-order byte of channel 0.
+//
+//  16-bit stereo:
+//
+//  Each sample is 4 bytes. Sample 1 is followed by samples 2, 3, 4, and so on.
+//  For each sample, the first byte is the low-order byte of channel 0 (left channel);
+//  the second byte is the high-order byte of channel 0; the third byte is the
+//  low-order byte of channel 1 (right channel); and the fourth byte is the
+//  high-order byte of channel 1.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetStereoPlayout(bool enable)
+{
+
+    if (enable)
+        _playChannels = 2;
+    else
+        _playChannels = 1;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StereoPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StereoPlayout(bool& enabled) const
+{
+
+    if (_playChannels == 2)
+        enabled = true;
+    else
+        enabled = false;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetAGC
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetAGC(bool enable)
+{
+
+    _AGC = enable;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  AGC
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::AGC() const
+{
+    return _AGC;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolumeIsAvailable(bool& available)
+{
+
+    bool isAvailable(false);
+
+    // Enumerate all avaliable microphones and make an attempt to open up the
+    // input mixer corresponding to the currently selected output device.
+    //
+    if (InitMicrophone() == -1)
+    {
+        // Failed to find valid microphone
+        available = false;
+        return 0;
+    }
+
+    // Check if the selected microphone has a volume control
+    //
+    _mixerManager.MicrophoneVolumeIsAvailable(isAvailable);
+    available = isAvailable;
+
+    // Close the initialized input mixer
+    //
+    _mixerManager.CloseMicrophone();
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    return (_mixerManager.SetMicrophoneVolume(volume));
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    WebRtc_UWord32 level(0);
+
+    if (_mixerManager.MicrophoneVolume(level) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to retrive current microphone level");
+        return -1;
+    }
+
+    volume = level;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    // _maxMicVolume can be zero in AudioMixerManager::MaxMicrophoneVolume():
+    // (1) API GetLineControl() returns failure at querying the max Mic level.
+    // (2) API GetLineControl() returns maxVolume as zero in rare cases.
+    // Both cases show we don't have access to the mixer controls.
+    // We return -1 here to indicate that.    
+    if (_maxMicVolume == 0)
+    {
+        return -1;
+    }
+
+    maxVolume = _maxMicVolume;;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+    minVolume = _minMicVolume;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    WebRtc_UWord16 delta(0);
+
+    if (_mixerManager.MicrophoneVolumeStepSize(delta) == -1)
+    {
+        return -1;
+    }
+
+    stepSize = delta;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsWave::PlayoutDevices()
+{
+
+    return (waveOutGetNumDevs());
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutDevice(WebRtc_UWord16 index)
+{
+
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    UINT nDevices = waveOutGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio output devices is %u", nDevices);
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _usingOutputDeviceIndex = true;
+    _outputDeviceIndex = index;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetPlayoutDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+    }
+
+    _usingOutputDeviceIndex = false;
+    _outputDevice = device;
+    _outputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    WebRtc_UWord16 nDevices(PlayoutDevices());
+
+    // Special fix for the case when the user asks for the name of the default device.
+    //
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        index = 0;
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    WAVEOUTCAPSW caps;    // szPname member (product name (NULL terminated) is a WCHAR
+    MMRESULT res;
+
+    res = waveOutGetDevCapsW(index, &caps, sizeof(WAVEOUTCAPSW));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCapsW() failed (err=%d)", res);
+        return -1;
+    }
+    if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
+    }
+
+    if (guid == NULL)
+    {
+        return 0;
+    }
+
+    // It is possible to get the unique endpoint ID string using the Wave API.
+    // However, it is only supported on Windows Vista and Windows 7.
+
+    size_t cbEndpointId(0);
+
+    // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
+    // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
+    res = waveOutMessage((HWAVEOUT)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEIDSIZE,
+                         (DWORD_PTR)&cbEndpointId, NULL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
+        TraceWaveOutError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
+        }
+        return 0;
+    }
+
+    // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
+
+    WCHAR *pstrEndpointId = NULL;
+    pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
+
+    // Get the endpoint ID string for this waveOut device.
+    res = waveOutMessage((HWAVEOUT)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEID,
+                         (DWORD_PTR)pstrEndpointId,
+                          cbEndpointId);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveOutMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
+        TraceWaveOutError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
+        }
+        CoTaskMemFree(pstrEndpointId);
+        return 0;
+    }
+
+    if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
+    }
+    CoTaskMemFree(pstrEndpointId);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDeviceName
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingDeviceName(
+    WebRtc_UWord16 index,
+    char name[kAdmMaxDeviceNameSize],
+    char guid[kAdmMaxGuidSize])
+{
+
+    WebRtc_UWord16 nDevices(RecordingDevices());
+
+    // Special fix for the case when the user asks for the name of the default device.
+    //
+    if (index == (WebRtc_UWord16)(-1))
+    {
+        index = 0;
+    }
+
+    if ((index > (nDevices-1)) || (name == NULL))
+    {
+        return -1;
+    }
+
+    memset(name, 0, kAdmMaxDeviceNameSize);
+
+    if (guid != NULL)
+    {
+        memset(guid, 0, kAdmMaxGuidSize);
+    }
+
+    WAVEINCAPSW caps;    // szPname member (product name (NULL terminated) is a WCHAR
+    MMRESULT res;
+
+    res = waveInGetDevCapsW(index, &caps, sizeof(WAVEINCAPSW));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCapsW() failed (err=%d)", res);
+        return -1;
+    }
+    if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, name, kAdmMaxDeviceNameSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 1", GetLastError());
+    }
+
+    if (guid == NULL)
+    {
+        return 0;
+    }
+
+    // It is possible to get the unique endpoint ID string using the Wave API.
+    // However, it is only supported on Windows Vista and Windows 7.
+
+    size_t cbEndpointId(0);
+
+    // Get the size (including the terminating null) of the endpoint ID string of the waveOut device.
+    // Windows Vista supports the DRV_QUERYFUNCTIONINSTANCEIDSIZE and DRV_QUERYFUNCTIONINSTANCEID messages.
+    res = waveInMessage((HWAVEIN)IntToPtr(index),
+                         DRV_QUERYFUNCTIONINSTANCEIDSIZE,
+                        (DWORD_PTR)&cbEndpointId, NULL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        // DRV_QUERYFUNCTIONINSTANCEIDSIZE is not supported <=> earlier version of Windows than Vista
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) failed (err=%d)", res);
+        TraceWaveInError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 2", GetLastError());
+        }
+        return 0;
+    }
+
+    // waveOutMessage(DRV_QUERYFUNCTIONINSTANCEIDSIZE) worked => we are on a Vista or Windows 7 device
+
+    WCHAR *pstrEndpointId = NULL;
+    pstrEndpointId = (WCHAR*)CoTaskMemAlloc(cbEndpointId);
+
+    // Get the endpoint ID string for this waveOut device.
+    res = waveInMessage((HWAVEIN)IntToPtr(index),
+                          DRV_QUERYFUNCTIONINSTANCEID,
+                         (DWORD_PTR)pstrEndpointId,
+                          cbEndpointId);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInMessage(DRV_QUERYFUNCTIONINSTANCEID) failed (err=%d)", res);
+        TraceWaveInError(res);
+        // Best we can do is to copy the friendly name and use it as guid
+        if (WideCharToMultiByte(CP_UTF8, 0, caps.szPname, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 3", GetLastError());
+        }
+        CoTaskMemFree(pstrEndpointId);
+        return 0;
+    }
+
+    if (WideCharToMultiByte(CP_UTF8, 0, pstrEndpointId, -1, guid, kAdmMaxGuidSize, NULL, NULL) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "WideCharToMultiByte(CP_UTF8) failed with error code %d - 4", GetLastError());
+    }
+    CoTaskMemFree(pstrEndpointId);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word16 AudioDeviceWindowsWave::RecordingDevices()
+{
+
+    return (waveInGetNumDevs());
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice I (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetRecordingDevice(WebRtc_UWord16 index)
+{
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    UINT nDevices = waveInGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "number of availiable waveform-audio input devices is %u", nDevices);
+
+    if (index < 0 || index > (nDevices-1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "device index is out of range [0,%u]", (nDevices-1));
+        return -1;
+    }
+
+    _usingInputDeviceIndex = true;
+    _inputDeviceIndex = index;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetRecordingDevice II (II)
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+    }
+
+    if (_recIsInitialized)
+    {
+        return -1;
+    }
+
+    _usingInputDeviceIndex = false;
+    _inputDevice = device;
+    _inputDeviceIsSpecified = true;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the playout side
+    WebRtc_Word32 res = InitPlayout();
+
+    // Cancel effect of initialization
+    StopPlayout();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingIsAvailable(bool& available)
+{
+
+    available = false;
+
+    // Try to initialize the recording side
+    WebRtc_Word32 res = InitRecording();
+
+    // Cancel effect of initialization
+    StopRecording();
+
+    if (res != -1)
+    {
+        available = true;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_playing)
+    {
+        return -1;
+    }
+
+    if (!_outputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        return 0;
+    }
+
+    // Initialize the speaker (devices might have been added or removed)
+    if (InitSpeaker() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitSpeaker() failed");
+    }
+
+    // Enumerate all availiable output devices
+    EnumeratePlayoutDevices();
+
+    // Start by closing any existing wave-output devices
+    //
+    MMRESULT res(MMSYSERR_ERROR);
+
+    if (_hWaveOut != NULL)
+    {
+        res = waveOutClose(_hWaveOut);
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
+            TraceWaveOutError(res);
+        }
+    }
+
+    // Set the output wave format
+    //
+    WAVEFORMATEX waveFormat;
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM;
+    waveFormat.nChannels       = _playChannels;  // mono <=> 1, stereo <=> 2
+    waveFormat.nSamplesPerSec  = N_PLAY_SAMPLES_PER_SEC;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // Open the given waveform-audio output device for playout
+    //
+    HWAVEOUT hWaveOut(NULL);
+
+    if (IsUsingOutputDeviceIndex())
+    {
+        // verify settings first
+        res = waveOutOpen(NULL, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // open the given waveform-audio output device for recording
+            res = waveOutOpen(&hWaveOut, _outputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening output device corresponding to device ID %u", _outputDeviceIndex);
+        }
+    }
+    else
+    {
+        if (_outputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
+        {
+            // check if it is possible to open the default communication device (supported on Windows 7)
+            res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                // if so, open the default communication device for real
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |  WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+            }
+            else
+            {
+                // use default device since default communication device was not avaliable
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
+            }
+        }
+        else if (_outputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            // open default device since it has been requested
+            res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default output device");
+            }
+        }
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutOpen() failed (err=%d)", res);
+        TraceWaveOutError(res);
+        return -1;
+    }
+
+    // Log information about the aquired output device
+    //
+    WAVEOUTCAPS caps;
+
+    res = waveOutGetDevCaps((UINT_PTR)hWaveOut, &caps, sizeof(WAVEOUTCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    UINT deviceID(0);
+    res = waveOutGetID(hWaveOut, &deviceID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetID() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name       : %s", caps.szPname);
+
+    // Store valid handle for the open waveform-audio output device
+    _hWaveOut = hWaveOut;
+
+    // Store the input wave header as well
+    _waveFormatOut = waveFormat;
+
+    // Prepare wave-out headers
+    //
+    const WebRtc_UWord8 bytesPerSample = 2*_playChannels;
+
+    for (int n = 0; n < N_BUFFERS_OUT; n++)
+    {
+        // set up the output wave header
+        _waveHeaderOut[n].lpData          = reinterpret_cast<LPSTR>(&_playBuffer[n]);
+        _waveHeaderOut[n].dwBufferLength  = bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES;
+        _waveHeaderOut[n].dwFlags         = 0;
+        _waveHeaderOut[n].dwLoops         = 0;
+
+        memset(_playBuffer[n], 0, bytesPerSample*PLAY_BUF_SIZE_IN_SAMPLES);
+
+        // The waveOutPrepareHeader function prepares a waveform-audio data block for playback.
+        // The lpData, dwBufferLength, and dwFlags members of the WAVEHDR structure must be set
+        // before calling this function.
+        //
+        res = waveOutPrepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (err=%d)", n, res);
+            TraceWaveOutError(res);
+        }
+
+        // perform extra check to ensure that the header is prepared
+        if (_waveHeaderOut[n].dwFlags != WHDR_PREPARED)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutPrepareHeader(%d) failed (dwFlags != WHDR_PREPARED)", n);
+        }
+    }
+
+    // Mark playout side as initialized
+    _playIsInitialized = true;
+
+    _dTcheckPlayBufDelay = 10;  // check playback buffer delay every 10 ms
+    _playBufCount = 0;          // index of active output wave header (<=> output buffer index)
+    _playBufDelay = 80;         // buffer delay/size is initialized to 80 ms and slowly decreased until er < 25
+    _minPlayBufDelay = 25;      // minimum playout buffer delay
+    _MAX_minBuffer = 65;        // adaptive minimum playout buffer delay cannot be larger than this value
+    _intro = 1;                 // Used to make sure that adaption starts after (2000-1700)/100 seconds
+    _waitCounter = 1700;        // Counter for start of adaption of playback buffer
+    _erZeroCounter = 0;         // Log how many times er = 0 in consequtive calls to RecTimeProc
+    _useHeader = 0;             // Counts number of "useHeader" detections. Stops at 2.
+
+    _writtenSamples = 0;
+    _writtenSamplesOld = 0;
+    _playedSamplesOld = 0;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id,"initial playout status: _playBufDelay=%d, _minPlayBufDelay=%d",
+        _playBufDelay, _minPlayBufDelay);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  InitRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InitRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_recording)
+    {
+        return -1;
+    }
+
+    if (!_inputDeviceIsSpecified)
+    {
+        return -1;
+    }
+
+    if (_recIsInitialized)
+    {
+        return 0;
+    }
+
+    _avgCPULoad = 0;
+    _playAcc  = 0;
+
+    // Initialize the microphone (devices might have been added or removed)
+    if (InitMicrophone() == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "InitMicrophone() failed");
+    }
+
+    // Enumerate all availiable input devices
+    EnumerateRecordingDevices();
+
+    // Start by closing any existing wave-input devices
+    //
+    MMRESULT res(MMSYSERR_ERROR);
+
+    if (_hWaveIn != NULL)
+    {
+        res = waveInClose(_hWaveIn);
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
+            TraceWaveInError(res);
+        }
+    }
+
+    // Set the input wave format
+    //
+    WAVEFORMATEX waveFormat;
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM;
+    waveFormat.nChannels       = _recChannels;  // mono <=> 1, stereo <=> 2
+    waveFormat.nSamplesPerSec  = N_REC_SAMPLES_PER_SEC;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * (waveFormat.wBitsPerSample/8);
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // Open the given waveform-audio input device for recording
+    //
+    HWAVEIN hWaveIn(NULL);
+
+    if (IsUsingInputDeviceIndex())
+    {
+        // verify settings first
+        res = waveInOpen(NULL, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // open the given waveform-audio input device for recording
+            res = waveInOpen(&hWaveIn, _inputDeviceIndex, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening input device corresponding to device ID %u", _inputDeviceIndex);
+        }
+    }
+    else
+    {
+        if (_inputDevice == AudioDeviceModule::kDefaultCommunicationDevice)
+        {
+            // check if it is possible to open the default communication device (supported on Windows 7)
+            res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                // if so, open the default communication device for real
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_DEFAULT_COMMUNICATION_DEVICE);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+            }
+            else
+            {
+                // use default device since default communication device was not avaliable
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to open default communication device => using default instead");
+            }
+        }
+        else if (_inputDevice == AudioDeviceModule::kDefaultDevice)
+        {
+            // open default device since it has been requested
+            res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_FORMAT_QUERY);
+            if (MMSYSERR_NOERROR == res)
+            {
+                res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default input device");
+            }
+        }
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInOpen() failed (err=%d)", res);
+        TraceWaveInError(res);
+        return -1;
+    }
+
+    // Log information about the aquired input device
+    //
+    WAVEINCAPS caps;
+
+    res = waveInGetDevCaps((UINT_PTR)hWaveIn, &caps, sizeof(WAVEINCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    UINT deviceID(0);
+    res = waveInGetID(hWaveIn, &deviceID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetID() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "utilized device ID : %u", deviceID);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name       : %s", caps.szPname);
+
+    // Store valid handle for the open waveform-audio input device
+    _hWaveIn = hWaveIn;
+
+    // Store the input wave header as well
+    _waveFormatIn = waveFormat;
+
+    // Mark recording side as initialized
+    _recIsInitialized = true;
+
+    _recBufCount = 0;     // index of active input wave header (<=> input buffer index)
+    _recDelayCount = 0;   // ensures that input buffers are returned with certain delay
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StartRecording()
+{
+
+    if (!_recIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_recording)
+    {
+        return 0;
+    }
+
+    // set state to ensure that the recording starts from the audio thread
+    _startRec = true;
+
+    // the audio thread will signal when recording has stopped
+    if (kEventTimeout == _recStartEvent.Wait(10000))
+    {
+        _startRec = false;
+        StopRecording();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
+        return -1;
+    }
+
+    if (_recording)
+    {
+        // the recording state is set by the audio thread after recording has started
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate recording");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StopRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_recIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_hWaveIn == NULL)
+    {
+        return -1;
+    }
+
+    bool wasRecording = _recording;
+    _recIsInitialized = false;
+    _recording = false;
+
+    MMRESULT res;
+
+    // Stop waveform-adio input. If there are any buffers in the queue, the
+    // current buffer will be marked as done (the dwBytesRecorded member in
+    // the header will contain the length of data), but any empty buffers in
+    // the queue will remain there.
+    //
+    res = waveInStop(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStop() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Stop input on the given waveform-audio input device and resets the current
+    // position to zero. All pending buffers are marked as done and returned to
+    // the application.
+    //
+    res = waveInReset(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInReset() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Clean up the preparation performed by the waveInPrepareHeader function.
+    // Only unprepare header if recording was ever started (and headers are prepared).
+    //
+    if (wasRecording)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "waveInUnprepareHeader() will be performed");
+        for (int n = 0; n < N_BUFFERS_IN; n++)
+        {
+            res = waveInUnprepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+            if (MMSYSERR_NOERROR != res)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader() failed (err=%d)", res);
+                TraceWaveInError(res);
+            }
+        }
+    }
+
+    // Close the given waveform-audio input device.
+    //
+    res = waveInClose(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInClose() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    // Set the wave input handle to NULL
+    //
+    _hWaveIn = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveIn is now set to NULL");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingIsInitialized() const
+{
+    return (_recIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  Recording
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Recording() const
+{
+    return (_recording);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutIsInitialized() const
+{
+    return (_playIsInitialized);
+}
+
+// ----------------------------------------------------------------------------
+//  StartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StartPlayout()
+{
+
+    if (!_playIsInitialized)
+    {
+        return -1;
+    }
+
+    if (_playing)
+    {
+        return 0;
+    }
+
+    // set state to ensure that playout starts from the audio thread
+    _startPlay = true;
+
+    // the audio thread will signal when recording has started
+    if (kEventTimeout == _playStartEvent.Wait(10000))
+    {
+        _startPlay = false;
+        StopPlayout();
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playout");
+        return -1;
+    }
+
+    if (_playing)
+    {
+        // the playing state is set by the audio thread after playout has started
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to activate playing");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::StopPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playIsInitialized)
+    {
+        return 0;
+    }
+
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    _playIsInitialized = false;
+    _playing = false;
+    _sndCardPlayDelay = 0;
+    _sndCardRecDelay = 0;
+
+    MMRESULT res;
+
+    // The waveOutReset function stops playback on the given waveform-audio
+    // output device and resets the current position to zero. All pending
+    // playback buffers are marked as done (WHDR_DONE) and returned to the application.
+    // After this function returns, the application can send new playback buffers
+    // to the device by calling waveOutWrite, or close the device by calling waveOutClose.
+    //
+    res = waveOutReset(_hWaveOut);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutReset() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    // The waveOutUnprepareHeader function cleans up the preparation performed
+    // by the waveOutPrepareHeader function. This function must be called after
+    // the device driver is finished with a data block.
+    // You must call this function before freeing the buffer.
+    //
+    for (int n = 0; n < N_BUFFERS_OUT; n++)
+    {
+        res = waveOutUnprepareHeader(_hWaveOut, &_waveHeaderOut[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutUnprepareHeader() failed (err=%d)", res);
+            TraceWaveOutError(res);
+        }
+    }
+
+    // The waveOutClose function closes the given waveform-audio output device.
+    // The close operation fails if the device is still playing a waveform-audio
+    // buffer that was previously sent by calling waveOutWrite. Before calling
+    // waveOutClose, the application must wait for all buffers to finish playing
+    // or call the waveOutReset function to terminate playback.
+    //
+    res = waveOutClose(_hWaveOut);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutClose() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    _hWaveOut = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "_hWaveOut is now set to NULL");
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(&_critSect);
+    delayMS = (WebRtc_UWord16)_sndCardPlayDelay;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecordingDelay(WebRtc_UWord16& delayMS) const
+{
+    CriticalSectionScoped lock(&_critSect);
+    delayMS = (WebRtc_UWord16)_sndCardRecDelay;
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  Playing
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::Playing() const
+{
+    return (_playing);
+}
+// ----------------------------------------------------------------------------
+//  SetPlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS)
+{
+    CriticalSectionScoped lock(&_critSect);
+    _playBufType = type;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        _playBufDelayFixed = sizeMS;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutBuffer
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const
+{
+    CriticalSectionScoped lock(&_critSect);
+    type = _playBufType;
+    if (type == AudioDeviceModule::kFixedBufferSize)
+    {
+        sizeMS = _playBufDelayFixed;
+    }
+    else
+    {
+        sizeMS = _playBufDelay;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CPULoad
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::CPULoad(WebRtc_UWord16& load) const
+{
+
+    load = static_cast<WebRtc_UWord16>(100*_avgCPULoad);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutWarning() const
+{
+    return ( _playWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  PlayoutError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::PlayoutError() const
+{
+    return ( _playError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingWarning
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingWarning() const
+{
+    return ( _recWarning > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  RecordingError
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::RecordingError() const
+{
+    return ( _recError > 0);
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearPlayoutWarning()
+{
+    _playWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearPlayoutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearPlayoutError()
+{
+    _playError = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingWarning
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearRecordingWarning()
+{
+    _recWarning = 0;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearRecordingError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::ClearRecordingError()
+{
+    _recError = 0;
+}
+
+// ============================================================================
+//                                 Private Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  InputSanityCheckAfterUnlockedPeriod
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::InputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_hWaveIn == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "input state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OutputSanityCheckAfterUnlockedPeriod
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::OutputSanityCheckAfterUnlockedPeriod() const
+{
+    if (_hWaveOut == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "output state has been modified during unlocked period");
+        return -1;
+    }
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumeratePlayoutDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::EnumeratePlayoutDevices()
+{
+
+    WebRtc_UWord16 nDevices(PlayoutDevices());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#output devices: %u", nDevices);
+
+    WAVEOUTCAPS caps;
+    MMRESULT res;
+
+    for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
+    {
+        res = waveOutGetDevCaps(deviceID, &caps, sizeof(WAVEOUTCAPS));
+        if (res != MMSYSERR_NOERROR)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetDevCaps() failed (err=%d)", res);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u",caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", caps.szPname);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats            : 0x%x", caps.dwFormats);
+        if (caps.dwFormats & WAVE_FORMAT_48S16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,stereo,16bit : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit  : *NOT* SUPPORTED");
+        }
+        if (caps.dwFormats & WAVE_FORMAT_48M16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,mono,16bit   : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit    : *NOT* SUPPORTED");
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels            : %u", caps.wChannels);
+        TraceSupportFlags(caps.dwSupport);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateRecordingDevices
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::EnumerateRecordingDevices()
+{
+
+    WebRtc_UWord16 nDevices(RecordingDevices());
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#input devices: %u", nDevices);
+
+    WAVEINCAPS caps;
+    MMRESULT res;
+
+    for (UINT deviceID = 0; deviceID < nDevices; deviceID++)
+    {
+        res = waveInGetDevCaps(deviceID, &caps, sizeof(WAVEINCAPS));
+        if (res != MMSYSERR_NOERROR)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetDevCaps() failed (err=%d)", res);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Device ID %u:", deviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u",caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u.%u", HIBYTE(caps.vDriverVersion), LOBYTE(caps.vDriverVersion));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", caps.szPname);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwFormats            : 0x%x", caps.dwFormats);
+        if (caps.dwFormats & WAVE_FORMAT_48S16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,stereo,16bit : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,stereo,16bit  : *NOT* SUPPORTED");
+        }
+        if (caps.dwFormats & WAVE_FORMAT_48M16)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "  48kHz,mono,16bit   : SUPPORTED");
+        }
+        else
+        {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, " 48kHz,mono,16bit    : *NOT* SUPPORTED");
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "wChannels            : %u", caps.wChannels);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceSupportFlags
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceSupportFlags(DWORD dwSupport) const
+{
+    TCHAR buf[256];
+
+    StringCchPrintf(buf, 128, TEXT("support flags        : 0x%x "), dwSupport);
+
+    if (dwSupport & WAVECAPS_PITCH)
+    {
+        // supports pitch control
+        StringCchCat(buf, 256, TEXT("(PITCH)"));
+    }
+    if (dwSupport & WAVECAPS_PLAYBACKRATE)
+    {
+        // supports playback rate control
+        StringCchCat(buf, 256, TEXT("(PLAYBACKRATE)"));
+    }
+    if (dwSupport & WAVECAPS_VOLUME)
+    {
+        // supports volume control
+        StringCchCat(buf, 256, TEXT("(VOLUME)"));
+    }
+    if (dwSupport & WAVECAPS_LRVOLUME)
+    {
+        // supports separate left and right volume control
+        StringCchCat(buf, 256, TEXT("(LRVOLUME)"));
+    }
+    if (dwSupport & WAVECAPS_SYNC)
+    {
+        // the driver is synchronous and will block while playing a buffer
+        StringCchCat(buf, 256, TEXT("(SYNC)"));
+    }
+    if (dwSupport & WAVECAPS_SAMPLEACCURATE)
+    {
+        // returns sample-accurate position information
+        StringCchCat(buf, 256, TEXT("(SAMPLEACCURATE)"));
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveInError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceWaveInError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveInGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveOutError
+// ----------------------------------------------------------------------------
+
+void AudioDeviceWindowsWave::TraceWaveOutError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveOutGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%S", buf);
+}
+
+// ----------------------------------------------------------------------------
+//  PrepareStartPlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PrepareStartPlayout()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    // A total of 30ms of data is immediately placed in the SC buffer
+    //
+    int8_t zeroVec[4*PLAY_BUF_SIZE_IN_SAMPLES];  // max allocation
+    memset(zeroVec, 0, 4*PLAY_BUF_SIZE_IN_SAMPLES);
+
+    {
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+        Write(zeroVec, PLAY_BUF_SIZE_IN_SAMPLES);
+    }
+
+    _playAcc = 0;
+    _playWarning = 0;
+    _playError = 0;
+    _dc_diff_mean = 0;
+    _dc_y_prev = 0;
+    _dc_penalty_counter = 20;
+    _dc_prevtime = 0;
+    _dc_prevplay = 0;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  PrepareStartRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::PrepareStartRecording()
+{
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_hWaveIn == NULL)
+    {
+        return -1;
+    }
+
+    _playAcc = 0;
+    _recordedBytes = 0;
+    _recPutBackDelay = REC_PUT_BACK_DELAY;
+
+    MMRESULT res;
+    MMTIME mmtime;
+    mmtime.wType = TIME_SAMPLES;
+
+    res = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition(TIME_SAMPLES) failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    _read_samples = mmtime.u.sample;
+    _read_samples_old = _read_samples;
+    _rec_samples_old = mmtime.u.sample;
+    _wrapCounter = 0;
+
+    for (int n = 0; n < N_BUFFERS_IN; n++)
+    {
+        const WebRtc_UWord8 nBytesPerSample = 2*_recChannels;
+
+        // set up the input wave header
+        _waveHeaderIn[n].lpData          = reinterpret_cast<LPSTR>(&_recBuffer[n]);
+        _waveHeaderIn[n].dwBufferLength  = nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
+        _waveHeaderIn[n].dwFlags         = 0;
+        _waveHeaderIn[n].dwBytesRecorded = 0;
+        _waveHeaderIn[n].dwUser          = 0;
+
+        memset(_recBuffer[n], 0, nBytesPerSample * REC_BUF_SIZE_IN_SAMPLES);
+
+        // prepare a buffer for waveform-audio input
+        res = waveInPrepareHeader(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", n, res);
+            TraceWaveInError(res);
+        }
+
+        // send an input buffer to the given waveform-audio input device
+        res = waveInAddBuffer(_hWaveIn, &_waveHeaderIn[n], sizeof(WAVEHDR));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", n, res);
+            TraceWaveInError(res);
+        }
+    }
+
+    // start input on the given waveform-audio input device
+    res = waveInStart(_hWaveIn);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInStart() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  GetPlayoutBufferDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetPlayoutBufferDelay(WebRtc_UWord32& writtenSamples, WebRtc_UWord32& playedSamples)
+{
+    int i;
+    int ms_Header;
+    long playedDifference;
+    int msecInPlayoutBuffer(0);   // #milliseconds of audio in the playout buffer
+
+    const WebRtc_UWord16 nSamplesPerMs = (WebRtc_UWord16)(N_PLAY_SAMPLES_PER_SEC/1000);  // default is 48000/1000 = 48
+
+    MMRESULT res;
+    MMTIME mmtime;
+
+    if (!_playing)
+    {
+        playedSamples = 0;
+        return (0);
+    }
+
+    // Retrieve the current playback position.
+    //
+    mmtime.wType = TIME_SAMPLES;  // number of waveform-audio samples
+    res = waveOutGetPosition(_hWaveOut, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutGetPosition() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    writtenSamples = _writtenSamples;   // #samples written to the playout buffer
+    playedSamples = mmtime.u.sample;    // current playout position in the playout buffer
+
+    // derive remaining amount (in ms) of data in the playout buffer
+    msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
+    // DEBUG_PRINTP("msecInPlayoutBuffer=%u\n", msecInPlayoutBuffer);
+
+    playedDifference = (long) (_playedSamplesOld - playedSamples);
+
+    if (playedDifference > 64000)
+    {
+        // If the sound cards number-of-played-out-samples variable wraps around before
+        // written_sampels wraps around this needs to be adjusted. This can happen on
+        // sound cards that uses less than 32 bits to keep track of number of played out
+        // sampels. To avoid being fooled by sound cards that sometimes produces false
+        // output we compare old value minus the new value with a large value. This is
+        // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
+        // would trigger the wrap-around function if we didn't compare with a large value.
+        // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
+
+        i = 31;
+        while((_playedSamplesOld <= (unsigned long)POW2(i)) && (i > 14)) {
+            i--;
+        }
+
+        if((i < 31) && (i > 14)) {
+            // Avoid adjusting when there is 32-bit wrap-around since that is
+            // something neccessary.
+            //
+            WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "msecleft() => wrap around occured: %d bits used by sound card)", (i+1));
+
+            _writtenSamples = _writtenSamples - POW2(i + 1);
+            writtenSamples = _writtenSamples;
+            msecInPlayoutBuffer = ((writtenSamples - playedSamples)/nSamplesPerMs);
+        }
+    }
+    else if ((_writtenSamplesOld > POW2(31)) && (writtenSamples < 96000))
+    {
+        // Wrap around as expected after having used all 32 bits. (But we still
+        // test if the wrap around happened earlier which it should not)
+
+        i = 31;
+        while (_writtenSamplesOld <= (unsigned long)POW2(i)) {
+            i--;
+        }
+
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "  msecleft() (wrap around occured after having used all 32 bits)");
+
+        _writtenSamplesOld = writtenSamples;
+        _playedSamplesOld = playedSamples;
+        msecInPlayoutBuffer = (int)((writtenSamples + POW2(i + 1) - playedSamples)/nSamplesPerMs);
+
+    }
+    else if ((writtenSamples < 96000) && (playedSamples > POW2(31)))
+    {
+        // Wrap around has, as expected, happened for written_sampels before
+        // playedSampels so we have to adjust for this until also playedSampels
+        // has had wrap around.
+
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "  msecleft() (wrap around occured: correction of output is done)");
+
+        _writtenSamplesOld = writtenSamples;
+        _playedSamplesOld = playedSamples;
+        msecInPlayoutBuffer = (int)((writtenSamples + POW2(32) - playedSamples)/nSamplesPerMs);
+    }
+
+    _writtenSamplesOld = writtenSamples;
+    _playedSamplesOld = playedSamples;
+
+
+    // We use the following formaula to track that playout works as it should
+    // y=playedSamples/48 - timeGetTime();
+    // y represent the clock drift between system clock and sound card clock - should be fairly stable
+    // When the exponential mean value of diff(y) goes away from zero something is wrong
+    // The exponential formula will accept 1% clock drift but not more
+    // The driver error means that we will play to little audio and have a high negative clock drift
+    // We kick in our alternative method when the clock drift reaches 20%
+
+    int diff,y;
+    int unsigned time =0;
+
+    // If we have other problems that causes playout glitches
+    // we don't want to switch playout method.
+    // Check if playout buffer is extremely low, or if we haven't been able to
+    // exectue our code in more than 40 ms
+
+    time = timeGetTime();
+
+    if ((msecInPlayoutBuffer < 20) || (time - _dc_prevtime > 40))
+    {
+        _dc_penalty_counter = 100;
+    }
+
+    if ((playedSamples != 0))
+    {
+        y = playedSamples/48 - time;
+        if ((_dc_y_prev != 0) && (_dc_penalty_counter == 0))
+        {
+            diff = y - _dc_y_prev;
+            _dc_diff_mean = (990*_dc_diff_mean)/1000 + 10*diff;
+        }
+        _dc_y_prev = y;
+    }
+
+    if (_dc_penalty_counter)
+    {
+        _dc_penalty_counter--;
+    }
+
+    if (_dc_diff_mean < -200)
+    {
+        // Always reset the filter
+        _dc_diff_mean = 0;
+
+        // Problem is detected. Switch delay method and set min buffer to 80.
+        // Reset the filter and keep monitoring the filter output.
+        // If issue is detected a second time, increase min buffer to 100.
+        // If that does not help, we must modify this scheme further.
+
+        _useHeader++;
+        if (_useHeader == 1)
+        {
+            _minPlayBufDelay = 80;
+            _playWarning = 1;   // only warn first time
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #1: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
+        }
+        else if (_useHeader == 2)
+        {
+            _minPlayBufDelay = 100;   // add some more safety
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1, "Modification #2: _useHeader = %d, _minPlayBufDelay = %d", _useHeader, _minPlayBufDelay);
+        }
+        else
+        {
+            // This state should not be entered... (HA)
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "further actions are required!");
+        }
+        if (_playWarning == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout warning exists");
+        }
+        _playWarning = 1;  // triggers callback from module process thread
+        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kPlayoutWarning message posted: switching to alternative playout delay method");
+    }
+    _dc_prevtime = time;
+    _dc_prevplay = playedSamples;
+
+    // Try a very rough method of looking at how many buffers are still playing
+    ms_Header = 0;
+    for (i = 0; i < N_BUFFERS_OUT; i++) {
+        if ((_waveHeaderOut[i].dwFlags & WHDR_INQUEUE)!=0) {
+            ms_Header += 10;
+        }
+    }
+
+    if ((ms_Header-50) > msecInPlayoutBuffer) {
+        // Test for cases when GetPosition appears to be screwed up (currently just log....)
+        TCHAR infoStr[300];
+        if (_no_of_msecleft_warnings%20==0)
+        {
+            StringCchPrintf(infoStr, 300, TEXT("writtenSamples=%i, playedSamples=%i, msecInPlayoutBuffer=%i, ms_Header=%i"), writtenSamples, playedSamples, msecInPlayoutBuffer, ms_Header);
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "%S", infoStr);
+        }
+        _no_of_msecleft_warnings++;
+    }
+
+    // If this is true we have had a problem with the playout
+    if (_useHeader > 0)
+    {
+        return (ms_Header);
+    }
+
+
+    if (ms_Header < msecInPlayoutBuffer)
+    {
+        if (_no_of_msecleft_warnings % 100 == 0)
+        {
+            TCHAR str[300];
+            StringCchPrintf(str, 300, TEXT("_no_of_msecleft_warnings=%i, msecInPlayoutBuffer=%i ms_Header=%i (minBuffer=%i buffersize=%i writtenSamples=%i playedSamples=%i)"),
+                _no_of_msecleft_warnings, msecInPlayoutBuffer, ms_Header, _minPlayBufDelay, _playBufDelay, writtenSamples, playedSamples);
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "%S", str);
+        }
+        _no_of_msecleft_warnings++;
+        ms_Header -= 6; // Round off as we only have 10ms resolution + Header info is usually slightly delayed compared to GetPosition
+
+        if (ms_Header < 0)
+            ms_Header = 0;
+
+        return (ms_Header);
+    }
+    else
+    {
+        return (msecInPlayoutBuffer);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  GetRecordingBufferDelay
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetRecordingBufferDelay(WebRtc_UWord32& readSamples, WebRtc_UWord32& recSamples)
+{
+    long recDifference;
+    MMTIME mmtime;
+    MMRESULT mmr;
+
+    const WebRtc_UWord16 nSamplesPerMs = (WebRtc_UWord16)(N_REC_SAMPLES_PER_SEC/1000);  // default is 48000/1000 = 48
+
+    // Retrieve the current input position of the given waveform-audio input device
+    //
+    mmtime.wType = TIME_SAMPLES;
+    mmr = waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+    if (MMSYSERR_NOERROR != mmr)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInGetPosition() failed (err=%d)", mmr);
+        TraceWaveInError(mmr);
+    }
+
+    readSamples = _read_samples;    // updated for each full fram in RecProc()
+    recSamples = mmtime.u.sample;   // remaining time in input queue (recorded but not read yet)
+
+    recDifference = (long) (_rec_samples_old - recSamples);
+
+    if( recDifference > 64000) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 1 (recDifference =%d)", recDifference);
+        // If the sound cards number-of-recorded-samples variable wraps around before
+        // read_sampels wraps around this needs to be adjusted. This can happen on
+        // sound cards that uses less than 32 bits to keep track of number of played out
+        // sampels. To avoid being fooled by sound cards that sometimes produces false
+        // output we compare old value minus the new value with a large value. This is
+        // neccessary because some SC:s produce an output like 153, 198, 175, 230 which
+        // would trigger the wrap-around function if we didn't compare with a large value.
+        // The value 64000 is chosen because 2^16=65536 so we allow wrap around at 16 bits.
+        //
+        int i = 31;
+        while((_rec_samples_old <= (unsigned long)POW2(i)) && (i > 14))
+            i--;
+
+        if((i < 31) && (i > 14)) {
+            // Avoid adjusting when there is 32-bit wrap-around since that is
+            // somethying neccessary.
+            //
+            _read_samples = _read_samples - POW2(i + 1);
+            readSamples = _read_samples;
+            _wrapCounter++;
+        } else {
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"AEC (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
+        }
+    }
+
+    if((_wrapCounter>200)){
+        // Do nothing, handled later
+    }
+    else if((_rec_samples_old > POW2(31)) && (recSamples < 96000)) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 2 (_rec_samples_old %d recSamples %d)",_rec_samples_old, recSamples);
+        // Wrap around as expected after having used all 32 bits.
+        _read_samples_old = readSamples;
+        _rec_samples_old = recSamples;
+        _wrapCounter++;
+        return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
+
+
+    } else if((recSamples < 96000) && (readSamples > POW2(31))) {
+        WEBRTC_TRACE (kTraceDebug, kTraceUtility, -1,"WRAP 3 (readSamples %d recSamples %d)",readSamples, recSamples);
+        // Wrap around has, as expected, happened for rec_sampels before
+        // readSampels so we have to adjust for this until also readSampels
+        // has had wrap around.
+        _read_samples_old = readSamples;
+        _rec_samples_old = recSamples;
+        _wrapCounter++;
+        return (int)((recSamples + POW2(32) - readSamples)/nSamplesPerMs);
+    }
+
+    _read_samples_old = _read_samples;
+    _rec_samples_old = recSamples;
+    int res=(((int)_rec_samples_old - (int)_read_samples_old)/nSamplesPerMs);
+
+    if((res > 2000)||(res < 0)||(_wrapCounter>200)){
+        // Reset everything
+        WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1,"msec_read error (res %d wrapCounter %d)",res, _wrapCounter);
+        MMTIME mmtime;
+        mmtime.wType = TIME_SAMPLES;
+
+        mmr=waveInGetPosition(_hWaveIn, &mmtime, sizeof(mmtime));
+        if (mmr != MMSYSERR_NOERROR) {
+            WEBRTC_TRACE (kTraceWarning, kTraceUtility, -1, "waveInGetPosition failed (mmr=%d)", mmr);
+        }
+        _read_samples=mmtime.u.sample;
+        _read_samples_old=_read_samples;
+        _rec_samples_old=mmtime.u.sample;
+
+        // Guess a decent value
+        res = 20;
+    }
+
+    _wrapCounter = 0;
+    return res;
+}
+
+// ============================================================================
+//                                  Thread Methods
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  ThreadFunc
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::ThreadFunc(void* pThis)
+{
+    return (static_cast<AudioDeviceWindowsWave*>(pThis)->ThreadProcess());
+}
+
+// ----------------------------------------------------------------------------
+//  ThreadProcess
+// ----------------------------------------------------------------------------
+
+bool AudioDeviceWindowsWave::ThreadProcess()
+{
+    WebRtc_UWord32 time(0);
+    WebRtc_UWord32 playDiff(0);
+    WebRtc_UWord32 recDiff(0);
+
+    LONGLONG playTime(0);
+    LONGLONG recTime(0);
+
+    switch (_timeEvent.Wait(1000))
+    {
+    case kEventSignaled:
+        break;
+    case kEventError:
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "EventWrapper::Wait() failed => restarting timer");
+        _timeEvent.StopTimer();
+        _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
+        return true;
+    case kEventTimeout:
+        return true;
+    }
+
+    time = AudioDeviceUtility::GetTimeInMS();
+
+    if (_startPlay)
+    {
+        if (PrepareStartPlayout() == 0)
+        {
+            _prevTimerCheckTime = time;
+            _prevPlayTime = time;
+            _startPlay = false;
+            _playing = true;
+            _playStartEvent.Set();
+        }
+    }
+
+    if (_startRec)
+    {
+        if (PrepareStartRecording() == 0)
+        {
+            _prevTimerCheckTime = time;
+            _prevRecTime = time;
+            _prevRecByteCheckTime = time;
+            _startRec = false;
+            _recording = true;
+            _recStartEvent.Set();
+        }
+    }
+
+    if (_playing)
+    {
+        playDiff = time - _prevPlayTime;
+    }
+
+    if (_recording)
+    {
+        recDiff = time - _prevRecTime;
+    }
+
+    if (_playing || _recording)
+    {
+        RestartTimerIfNeeded(time);
+    }
+
+    if (_playing &&
+        (playDiff > (WebRtc_UWord32)(_dTcheckPlayBufDelay - 1)) ||
+        (playDiff < 0))
+    {
+        Lock();
+        if (_playing)
+        {
+            if (PlayProc(playTime) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
+            }
+            _prevPlayTime = time;
+            if (playTime != 0)
+                _playAcc += playTime;
+        }
+        UnLock();
+    }
+
+    if (_playing && (playDiff > 12))
+    {
+        // It has been a long time since we were able to play out, try to
+        // compensate by calling PlayProc again.
+        //
+        Lock();
+        if (_playing)
+        {
+            if (PlayProc(playTime))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "PlayProc() failed");
+            }
+            _prevPlayTime = time;
+            if (playTime != 0)
+                _playAcc += playTime;
+        }
+        UnLock();
+    }
+
+    if (_recording &&
+       (recDiff > REC_CHECK_TIME_PERIOD_MS) ||
+       (recDiff < 0))
+    {
+        Lock();
+        if (_recording)
+        {
+            WebRtc_Word32 nRecordedBytes(0);
+            WebRtc_UWord16 maxIter(10);
+
+            // Deliver all availiable recorded buffers and update the CPU load measurement.
+            // We use a while loop here to compensate for the fact that the multi-media timer
+            // can sometimed enter a "bad state" after hibernation where the resolution is
+            // reduced from ~1ms to ~10-15 ms.
+            //
+            while ((nRecordedBytes = RecProc(recTime)) > 0)
+            {
+                maxIter--;
+                _recordedBytes += nRecordedBytes;
+                if (recTime && _perfFreq.QuadPart)
+                {
+                    // Measure the average CPU load:
+                    // This is a simplified expression where an exponential filter is used:
+                    //   _avgCPULoad = 0.99 * _avgCPULoad + 0.01 * newCPU,
+                    //   newCPU = (recTime+playAcc)/f is time in seconds
+                    //   newCPU / 0.01 is the fraction of a 10 ms period
+                    // The two 0.01 cancels each other.
+                    // NOTE - assumes 10ms audio buffers.
+                    //
+                    _avgCPULoad = (float)(_avgCPULoad*.99 + (recTime+_playAcc)/(double)(_perfFreq.QuadPart));
+                    _playAcc = 0;
+                }
+                if (maxIter == 0)
+                {
+                    // If we get this message ofte, our compensation scheme is not sufficient.
+                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "failed to compensate for reduced MM-timer resolution");
+                }
+            }
+
+            if (nRecordedBytes == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "RecProc() failed");
+            }
+
+            _prevRecTime = time;
+
+            // Monitor the recording process and generate error/warning callbacks if needed
+            MonitorRecording(time);
+        }
+        UnLock();
+    }
+
+    if (!_recording)
+    {
+        _prevRecByteCheckTime = time;
+        _avgCPULoad = 0;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  RecProc
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RecProc(LONGLONG& consumedTime)
+{
+    MMRESULT res;
+    WebRtc_UWord32 bufCount(0);
+    WebRtc_UWord32 nBytesRecorded(0);
+
+    consumedTime = 0;
+
+    // count modulo N_BUFFERS_IN (0,1,2,...,(N_BUFFERS_IN-1),0,1,2,..)
+    if (_recBufCount == N_BUFFERS_IN)
+    {
+        _recBufCount = 0;
+    }
+
+    bufCount = _recBufCount;
+
+    // take mono/stereo mode into account when deriving size of a full buffer
+    const WebRtc_UWord16 bytesPerSample = 2*_recChannels;
+    const WebRtc_UWord32 fullBufferSizeInBytes = bytesPerSample * REC_BUF_SIZE_IN_SAMPLES;
+
+    // read number of recorded bytes for the given input-buffer
+    nBytesRecorded = _waveHeaderIn[bufCount].dwBytesRecorded;
+
+    if (nBytesRecorded == fullBufferSizeInBytes ||
+       (nBytesRecorded > 0))
+    {
+        WebRtc_Word32 msecOnPlaySide;
+        WebRtc_Word32 msecOnRecordSide;
+        WebRtc_UWord32 writtenSamples;
+        WebRtc_UWord32 playedSamples;
+        WebRtc_UWord32 readSamples, recSamples;
+        bool send = true;
+
+        WebRtc_UWord32 nSamplesRecorded = (nBytesRecorded/bytesPerSample);  // divide by 2 or 4 depending on mono or stereo
+
+        if (nBytesRecorded == fullBufferSizeInBytes)
+        {
+            _timesdwBytes = 0;
+        }
+        else
+        {
+            // Test if it is stuck on this buffer
+            _timesdwBytes++;
+            if (_timesdwBytes < 5)
+            {
+                // keep trying
+                return (0);
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id,"nBytesRecorded=%d => don't use", nBytesRecorded);
+                _timesdwBytes = 0;
+                send = false;
+            }
+        }
+
+        // store the recorded buffer (no action will be taken if the #recorded samples is not a full buffer)
+        _ptrAudioBuffer->SetRecordedBuffer(_waveHeaderIn[bufCount].lpData, nSamplesRecorded);
+
+        // update #samples read
+        _read_samples += nSamplesRecorded;
+
+        // Check how large the playout and recording buffers are on the sound card.
+        // This info is needed by the AEC.
+        //
+        msecOnPlaySide = GetPlayoutBufferDelay(writtenSamples, playedSamples);
+        msecOnRecordSide = GetRecordingBufferDelay(readSamples, recSamples);
+
+        // If we use the alternative playout delay method, skip the clock drift compensation
+        // since it will be an unreliable estimate and might degrade AEC performance.
+        WebRtc_Word32 drift = (_useHeader > 0) ? 0 : GetClockDrift(playedSamples, recSamples);
+
+        _ptrAudioBuffer->SetVQEData(msecOnPlaySide, msecOnRecordSide, drift);
+
+        // Store the play and rec delay values for video synchronization
+        _sndCardPlayDelay = msecOnPlaySide;
+        _sndCardRecDelay = msecOnRecordSide;
+
+        LARGE_INTEGER t1,t2;
+
+        if (send)
+        {
+            QueryPerformanceCounter(&t1);
+
+            // deliver recorded samples at specified sample rate, mic level etc. to the observer using callback
+            UnLock();
+            _ptrAudioBuffer->DeliverRecordedData();
+            Lock();
+
+            QueryPerformanceCounter(&t2);
+
+            if (InputSanityCheckAfterUnlockedPeriod() == -1)
+            {
+                // assert(false);
+                return -1;
+            }
+        }
+
+        if (_AGC)
+        {
+            WebRtc_UWord32  newMicLevel = _ptrAudioBuffer->NewMicLevel();
+            if (newMicLevel != 0)
+            {
+                // The VQE will only deliver non-zero microphone levels when a change is needed.
+                WEBRTC_TRACE(kTraceStream, kTraceUtility, _id,"AGC change of volume: => new=%u", newMicLevel);
+
+                // We store this outside of the audio buffer to avoid 
+                // having it overwritten by the getter thread.
+                _newMicLevel = newMicLevel;
+                SetEvent(_hSetCaptureVolumeEvent);
+            }
+        }
+
+        // return utilized buffer to queue after specified delay (default is 4)
+        if (_recDelayCount > (_recPutBackDelay-1))
+        {
+            // deley buffer counter to compensate for "put-back-delay"
+            bufCount = (bufCount + N_BUFFERS_IN - _recPutBackDelay) % N_BUFFERS_IN;
+
+            // reset counter so we can make new detection
+            _waveHeaderIn[bufCount].dwBytesRecorded = 0;
+
+            // return the utilized wave-header after certain delay (given by _recPutBackDelay)
+            res = waveInUnprepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (MMSYSERR_NOERROR != res)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInUnprepareHeader(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+            }
+
+            // ensure that the utilized header can be used again
+            res = waveInPrepareHeader(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (res != MMSYSERR_NOERROR)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInPrepareHeader(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+                return -1;
+            }
+
+            // add the utilized buffer to the queue again
+            res = waveInAddBuffer(_hWaveIn, &(_waveHeaderIn[bufCount]), sizeof(WAVEHDR));
+            if (res != MMSYSERR_NOERROR)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveInAddBuffer(%d) failed (err=%d)", bufCount, res);
+                TraceWaveInError(res);
+                if (_recPutBackDelay < 50)
+                {
+                    _recPutBackDelay++;
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "_recPutBackDelay increased to %d", _recPutBackDelay);
+                }
+                else
+                {
+                    if (_recError == 1)
+                    {
+                        WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
+                    }
+                    _recError = 1;  // triggers callback from module process thread
+                    WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: _recPutBackDelay=%u", _recPutBackDelay);
+                }
+            }
+        }  // if (_recDelayCount > (_recPutBackDelay-1))
+
+        if (_recDelayCount < (_recPutBackDelay+1))
+        {
+            _recDelayCount++;
+        }
+
+        // increase main buffer count since one complete buffer has now been delivered
+        _recBufCount++;
+
+        if (send) {
+            // Calculate processing time
+            consumedTime = (int)(t2.QuadPart-t1.QuadPart);
+            // handle wraps, time should not be higher than a second
+            if ((consumedTime > _perfFreq.QuadPart) || (consumedTime < 0))
+                consumedTime = 0;
+        }
+
+    }  // if ((nBytesRecorded == fullBufferSizeInBytes))
+
+    return nBytesRecorded;
+}
+
+// ----------------------------------------------------------------------------
+//  PlayProc
+// ----------------------------------------------------------------------------
+
+int AudioDeviceWindowsWave::PlayProc(LONGLONG& consumedTime)
+{
+    WebRtc_Word32 remTimeMS(0);
+    int8_t playBuffer[4*PLAY_BUF_SIZE_IN_SAMPLES];
+    WebRtc_UWord32 writtenSamples(0);
+    WebRtc_UWord32 playedSamples(0);
+
+    LARGE_INTEGER t1;
+    LARGE_INTEGER t2;
+
+    consumedTime = 0;
+    _waitCounter++;
+
+    // Get number of ms of sound that remains in the sound card buffer for playback.
+    //
+    remTimeMS = GetPlayoutBufferDelay(writtenSamples, playedSamples);
+
+    // The threshold can be adaptive or fixed. The adaptive scheme is updated
+    // also for fixed mode but the updated threshold is not utilized.
+    //
+    const WebRtc_UWord16 thresholdMS =
+        (_playBufType == AudioDeviceModule::kAdaptiveBufferSize) ? _playBufDelay : _playBufDelayFixed;
+
+    if (remTimeMS < thresholdMS + 9)
+    {
+        _dTcheckPlayBufDelay = 5;
+
+        if (remTimeMS == 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, _id, "playout buffer is empty => we must adapt...");
+            if (_waitCounter > 30)
+            {
+                _erZeroCounter++;
+                if (_erZeroCounter == 2)
+                {
+                    _playBufDelay += 15;
+                    _minPlayBufDelay += 20;
+                    _waitCounter = 50;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0,erZero=2): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+                else if (_erZeroCounter == 3)
+                {
+                    _erZeroCounter = 0;
+                    _playBufDelay += 30;
+                    _minPlayBufDelay += 25;
+                    _waitCounter = 0;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=3): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+                else
+                {
+                    _minPlayBufDelay += 10;
+                    _playBufDelay += 15;
+                    _waitCounter = 50;
+                    WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "New playout states (er=0, erZero=1): minPlayBufDelay=%u, playBufDelay=%u", _minPlayBufDelay, _playBufDelay);
+                }
+            }
+        }
+        else if (remTimeMS < _minPlayBufDelay)
+        {
+            // If there is less than 25 ms of audio in the play out buffer
+            // increase the buffersize limit value. _waitCounter prevents
+            // _playBufDelay to be increased every time this function is called.
+
+            if (_waitCounter > 30)
+            {
+                _playBufDelay += 10;
+                if (_intro == 0)
+                    _waitCounter = 0;
+                WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is increased: playBufDelay=%u", _playBufDelay);
+            }
+        }
+        else if (remTimeMS < thresholdMS - 9)
+        {
+            _erZeroCounter = 0;
+        }
+        else
+        {
+            _erZeroCounter = 0;
+            _dTcheckPlayBufDelay = 10;
+        }
+
+        QueryPerformanceCounter(&t1);   // measure time: START
+
+        // Ask for new PCM data to be played out using the AudioDeviceBuffer.
+        // Ensure that this callback is executed without taking the audio-thread lock.
+        //
+        UnLock();
+        WebRtc_UWord32 nSamples = _ptrAudioBuffer->RequestPlayoutData(PLAY_BUF_SIZE_IN_SAMPLES);
+        Lock();
+
+        if (OutputSanityCheckAfterUnlockedPeriod() == -1)
+        {
+            // assert(false);
+            return -1;
+        }
+
+        nSamples = _ptrAudioBuffer->GetPlayoutData(playBuffer);
+        if (nSamples != PLAY_BUF_SIZE_IN_SAMPLES)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "invalid number of output samples(%d)", nSamples);
+        }
+
+        QueryPerformanceCounter(&t2);   // measure time: STOP
+        consumedTime = (int)(t2.QuadPart - t1.QuadPart);
+
+        Write(playBuffer, PLAY_BUF_SIZE_IN_SAMPLES);
+
+    }  // if (er < thresholdMS + 9)
+    else if (thresholdMS + 9 < remTimeMS )
+    {
+        _erZeroCounter = 0;
+        _dTcheckPlayBufDelay = 2;    // check buffer more often
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Need to check playout buffer more often (dT=%u, remTimeMS=%u)", _dTcheckPlayBufDelay, remTimeMS);
+    }
+
+    // If the buffersize has been stable for 20 seconds try to decrease the buffer size
+    if (_waitCounter > 2000)
+    {
+        _intro = 0;
+        _playBufDelay--;
+        _waitCounter = 1990;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is decreased: playBufDelay=%u", _playBufDelay);
+    }
+
+    // Limit the minimum sound card (playback) delay to adaptive minimum delay
+    if (_playBufDelay < _minPlayBufDelay)
+    {
+        _playBufDelay = _minPlayBufDelay;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %u", _minPlayBufDelay);
+    }
+
+    // Limit the maximum sound card (playback) delay to 150 ms
+    if (_playBufDelay > 150)
+    {
+        _playBufDelay = 150;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Playout threshold is limited to %d", _playBufDelay);
+    }
+
+    // Upper limit of the minimum sound card (playback) delay to 65 ms.
+    // Deactivated during "useHeader mode" (_useHeader > 0).
+    if (_minPlayBufDelay > _MAX_minBuffer &&
+       (_useHeader == 0))
+    {
+        _minPlayBufDelay = _MAX_minBuffer;
+        WEBRTC_TRACE(kTraceDebug, kTraceUtility, _id, "Minimum playout threshold is limited to %d", _MAX_minBuffer);
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  Write
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::Write(int8_t* data, WebRtc_UWord16 nSamples)
+{
+    if (_hWaveOut == NULL)
+    {
+        return -1;
+    }
+
+    if (_playIsInitialized)
+    {
+        MMRESULT res;
+
+        const WebRtc_UWord16 bufCount(_playBufCount);
+
+        // Place data in the memory associated with _waveHeaderOut[bufCount]
+        //
+        const WebRtc_Word16 nBytes = (2*_playChannels)*nSamples;
+        memcpy(&_playBuffer[bufCount][0], &data[0], nBytes);
+
+        // Send a data block to the given waveform-audio output device.
+        //
+        // When the buffer is finished, the WHDR_DONE bit is set in the dwFlags
+        // member of the WAVEHDR structure. The buffer must be prepared with the
+        // waveOutPrepareHeader function before it is passed to waveOutWrite.
+        // Unless the device is paused by calling the waveOutPause function,
+        // playback begins when the first data block is sent to the device.
+        //
+        res = waveOutWrite(_hWaveOut, &_waveHeaderOut[bufCount], sizeof(_waveHeaderOut[bufCount]));
+        if (MMSYSERR_NOERROR != res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "waveOutWrite(%d) failed (err=%d)", bufCount, res);
+            TraceWaveOutError(res);
+
+            _writeErrors++;
+            if (_writeErrors > 10)
+            {
+                if (_playError == 1)
+                {
+                    WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending playout error exists");
+                }
+                _playError = 1;  // triggers callback from module process thread
+                WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kPlayoutError message posted: _writeErrors=%u", _writeErrors);
+            }
+
+            return -1;
+        }
+
+        _playBufCount = (_playBufCount+1) % N_BUFFERS_OUT;  // increase buffer counter modulo size of total buffer
+        _writtenSamples += nSamples;                        // each sample is 2 or 4 bytes
+        _writeErrors = 0;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//    GetClockDrift
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::GetClockDrift(const WebRtc_UWord32 plSamp, const WebRtc_UWord32 rcSamp)
+{
+    int drift = 0;
+    unsigned int plSampDiff = 0, rcSampDiff = 0;
+
+    if (plSamp >= _plSampOld)
+    {
+        plSampDiff = plSamp - _plSampOld;
+    }
+    else
+    {
+        // Wrap
+        int i = 31;
+        while(_plSampOld <= (unsigned int)POW2(i))
+        {
+            i--;
+        }
+
+        // Add the amount remaining prior to wrapping
+        plSampDiff = plSamp +  POW2(i + 1) - _plSampOld;
+    }
+
+    if (rcSamp >= _rcSampOld)
+    {
+        rcSampDiff = rcSamp - _rcSampOld;
+    }
+    else
+    {   // Wrap
+        int i = 31;
+        while(_rcSampOld <= (unsigned int)POW2(i))
+        {
+            i--;
+        }
+
+        rcSampDiff = rcSamp +  POW2(i + 1) - _rcSampOld;
+    }
+
+    drift = plSampDiff - rcSampDiff;
+
+    _plSampOld = plSamp;
+    _rcSampOld = rcSamp;
+
+    return drift;
+}
+
+// ----------------------------------------------------------------------------
+//  MonitorRecording
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::MonitorRecording(const WebRtc_UWord32 time)
+{
+    const WebRtc_UWord16 bytesPerSample = 2*_recChannels;
+    const WebRtc_UWord32 nRecordedSamples = _recordedBytes/bytesPerSample;
+
+    if (nRecordedSamples > 5*N_REC_SAMPLES_PER_SEC)
+    {
+        // 5 seconds of audio has been recorded...
+        if ((time - _prevRecByteCheckTime) > 5700)
+        {
+            // ...and it was more than 5.7 seconds since we last did this check <=>
+            // we have not been able to record 5 seconds of audio in 5.7 seconds,
+            // hence a problem should be reported.
+            // This problem can be related to USB overload.
+            //
+            if (_recWarning == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording warning exists");
+            }
+            _recWarning = 1;  // triggers callback from module process thread
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "kRecordingWarning message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
+        }
+
+        _recordedBytes = 0;            // restart "check again when 5 seconds are recorded"
+        _prevRecByteCheckTime = time;  // reset timer to measure time for recording of 5 seconds
+    }
+
+    if ((time - _prevRecByteCheckTime) > 8000)
+    {
+        // It has been more than 8 seconds since we able to confirm that 5 seconds of
+        // audio was recorded, hence we have not been able to record 5 seconds in
+        // 8 seconds => the complete recording process is most likely dead.
+        //
+        if (_recError == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, "pending recording error exists");
+        }
+        _recError = 1;  // triggers callback from module process thread
+        WEBRTC_TRACE(kTraceError, kTraceUtility, _id, "kRecordingError message posted: time-_prevRecByteCheckTime=%d", time - _prevRecByteCheckTime);
+
+        _prevRecByteCheckTime = time;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MonitorRecording
+//
+//  Restart timer if needed (they seem to be messed up after a hibernate).
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioDeviceWindowsWave::RestartTimerIfNeeded(const WebRtc_UWord32 time)
+{
+    const WebRtc_UWord32 diffMS = time - _prevTimerCheckTime;
+    _prevTimerCheckTime = time;
+
+    if (diffMS > 7)
+    {
+        // one timer-issue detected...
+        _timerFaults++;
+        if (_timerFaults > 5 && _timerRestartAttempts < 2)
+        {
+            // Reinitialize timer event if event fails to execute at least every 5ms.
+            // On some machines it helps and the timer starts working as it should again;
+            // however, not all machines (we have seen issues on e.g. IBM T60).
+            // Therefore, the scheme below ensures that we do max 2 attempts to restart the timer.
+            // For the cases where restart does not do the trick, we compensate for the reduced
+            // resolution on both the recording and playout sides.
+            WEBRTC_TRACE(kTraceWarning, kTraceUtility, _id, " timer issue detected => timer is restarted");
+            _timeEvent.StopTimer();
+            _timeEvent.StartTimer(true, TIMER_PERIOD_MS);
+            // make sure timer gets time to start up and we don't kill/start timer serveral times over and over again
+            _timerFaults = -20;
+            _timerRestartAttempts++;
+        }
+    }
+    else
+    {
+        // restart timer-check scheme since we are OK
+        _timerFaults = 0;
+        _timerRestartAttempts = 0;
+    }
+
+    return 0;
+}
+
+}  // namespace webrtc
+
diff --git a/src/modules/audio_device/main/source/win/audio_device_wave_win.h b/src/modules/audio_device/main/source/win/audio_device_wave_win.h
new file mode 100644
index 0000000..7837bc6
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_device_wave_win.h
@@ -0,0 +1,339 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
+
+#include "audio_device_generic.h"
+#include "audio_mixer_manager_win.h"
+
+#pragma comment( lib, "winmm.lib" )
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+
+const WebRtc_UWord32 TIMER_PERIOD_MS = 2;
+const WebRtc_UWord32 REC_CHECK_TIME_PERIOD_MS = 4;
+const WebRtc_UWord16 REC_PUT_BACK_DELAY = 4;
+
+const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 48000;
+const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 48000;
+
+const WebRtc_UWord32 N_REC_CHANNELS = 1;  // default is mono recording
+const WebRtc_UWord32 N_PLAY_CHANNELS = 2; // default is stereo playout
+
+// NOTE - CPU load will not be correct for other sizes than 10ms
+const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = (N_REC_SAMPLES_PER_SEC/100);
+const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = (N_PLAY_SAMPLES_PER_SEC/100);
+
+enum { N_BUFFERS_IN = 200 };
+enum { N_BUFFERS_OUT = 200 };
+
+class AudioDeviceWindowsWave : public AudioDeviceGeneric
+{
+public:
+    AudioDeviceWindowsWave(const WebRtc_Word32 id);
+    ~AudioDeviceWindowsWave();
+
+    // Retrieve the currently utilized audio layer
+    virtual WebRtc_Word32 ActiveAudioLayer(AudioDeviceModule::AudioLayer& audioLayer) const;
+
+    // Main initializaton and termination
+    virtual WebRtc_Word32 Init();
+    virtual WebRtc_Word32 Terminate();
+    virtual bool Initialized() const;
+
+    // Device enumeration
+    virtual WebRtc_Word16 PlayoutDevices();
+    virtual WebRtc_Word16 RecordingDevices();
+    virtual WebRtc_Word32 PlayoutDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+    virtual WebRtc_Word32 RecordingDeviceName(
+        WebRtc_UWord16 index,
+        char name[kAdmMaxDeviceNameSize],
+        char guid[kAdmMaxGuidSize]);
+
+    // Device selection
+    virtual WebRtc_Word32 SetPlayoutDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetPlayoutDevice(AudioDeviceModule::WindowsDeviceType device);
+    virtual WebRtc_Word32 SetRecordingDevice(WebRtc_UWord16 index);
+    virtual WebRtc_Word32 SetRecordingDevice(AudioDeviceModule::WindowsDeviceType device);
+
+    // Audio transport initialization
+    virtual WebRtc_Word32 PlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitPlayout();
+    virtual bool PlayoutIsInitialized() const;
+    virtual WebRtc_Word32 RecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitRecording();
+    virtual bool RecordingIsInitialized() const;
+
+    // Audio transport control
+    virtual WebRtc_Word32 StartPlayout();
+    virtual WebRtc_Word32 StopPlayout();
+    virtual bool Playing() const;
+    virtual WebRtc_Word32 StartRecording();
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool Recording() const;
+
+    // Microphone Automatic Gain Control (AGC)
+    virtual WebRtc_Word32 SetAGC(bool enable);
+    virtual bool AGC() const;
+
+    // Volume control based on the Windows Wave API (Windows only)
+    virtual WebRtc_Word32 SetWaveOutVolume(WebRtc_UWord16 volumeLeft, WebRtc_UWord16 volumeRight);
+    virtual WebRtc_Word32 WaveOutVolume(WebRtc_UWord16& volumeLeft, WebRtc_UWord16& volumeRight) const;
+
+    // Audio mixer initialization
+    virtual WebRtc_Word32 SpeakerIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitSpeaker();
+    virtual bool SpeakerIsInitialized() const;
+    virtual WebRtc_Word32 MicrophoneIsAvailable(bool& available);
+    virtual WebRtc_Word32 InitMicrophone();
+    virtual bool MicrophoneIsInitialized() const;
+
+    // Speaker volume controls
+    virtual WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Microphone volume controls
+    virtual WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    virtual WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    virtual WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    virtual WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    virtual WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+
+    // Speaker mute control
+    virtual WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetSpeakerMute(bool enable);
+    virtual WebRtc_Word32 SpeakerMute(bool& enabled) const;
+
+    // Microphone mute control
+    virtual WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneMute(bool enable);
+    virtual WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+
+    // Microphone boost control
+    virtual WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    virtual WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+
+    // Stereo support
+    virtual WebRtc_Word32 StereoPlayoutIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoPlayout(bool enable);
+    virtual WebRtc_Word32 StereoPlayout(bool& enabled) const;
+    virtual WebRtc_Word32 StereoRecordingIsAvailable(bool& available);
+    virtual WebRtc_Word32 SetStereoRecording(bool enable);
+    virtual WebRtc_Word32 StereoRecording(bool& enabled) const;
+
+    // Delay information and control
+    virtual WebRtc_Word32 SetPlayoutBuffer(const AudioDeviceModule::BufferType type, WebRtc_UWord16 sizeMS);
+    virtual WebRtc_Word32 PlayoutBuffer(AudioDeviceModule::BufferType& type, WebRtc_UWord16& sizeMS) const;
+    virtual WebRtc_Word32 PlayoutDelay(WebRtc_UWord16& delayMS) const;
+    virtual WebRtc_Word32 RecordingDelay(WebRtc_UWord16& delayMS) const;
+
+    // CPU load
+    virtual WebRtc_Word32 CPULoad(WebRtc_UWord16& load) const;
+
+public:
+    virtual bool PlayoutWarning() const;
+    virtual bool PlayoutError() const;
+    virtual bool RecordingWarning() const;
+    virtual bool RecordingError() const;
+    virtual void ClearPlayoutWarning();
+    virtual void ClearPlayoutError();
+    virtual void ClearRecordingWarning();
+    virtual void ClearRecordingError();
+
+public:
+    virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
+
+private:
+    void Lock() { _critSect.Enter(); };
+    void UnLock() { _critSect.Leave(); };
+    WebRtc_Word32 Id() {return _id;}
+    bool IsUsingOutputDeviceIndex() const {return _usingOutputDeviceIndex;}
+    AudioDeviceModule::WindowsDeviceType OutputDevice() const {return _outputDevice;}
+    WebRtc_UWord16 OutputDeviceIndex() const {return _outputDeviceIndex;}
+    bool IsUsingInputDeviceIndex() const {return _usingInputDeviceIndex;}
+    AudioDeviceModule::WindowsDeviceType InputDevice() const {return _inputDevice;}
+    WebRtc_UWord16 InputDeviceIndex() const {return _inputDeviceIndex;}
+
+private:
+    inline WebRtc_Word32 InputSanityCheckAfterUnlockedPeriod() const;
+    inline WebRtc_Word32 OutputSanityCheckAfterUnlockedPeriod() const;
+
+private:
+    WebRtc_Word32 EnumeratePlayoutDevices();
+    WebRtc_Word32 EnumerateRecordingDevices();
+    void TraceSupportFlags(DWORD dwSupport) const;
+    void TraceWaveInError(MMRESULT error) const;
+    void TraceWaveOutError(MMRESULT error) const;
+    WebRtc_Word32 PrepareStartRecording();
+    WebRtc_Word32 PrepareStartPlayout();
+
+    WebRtc_Word32 RecProc(LONGLONG& consumedTime);
+    int PlayProc(LONGLONG& consumedTime);
+
+    WebRtc_Word32 GetPlayoutBufferDelay(WebRtc_UWord32& writtenSamples, WebRtc_UWord32& playedSamples);
+    WebRtc_Word32 GetRecordingBufferDelay(WebRtc_UWord32& readSamples, WebRtc_UWord32& recSamples);
+    WebRtc_Word32 Write(int8_t* data, WebRtc_UWord16 nSamples);
+    WebRtc_Word32 GetClockDrift(const WebRtc_UWord32 plSamp, const WebRtc_UWord32 rcSamp);
+    WebRtc_Word32 MonitorRecording(const WebRtc_UWord32 time);
+    WebRtc_Word32 RestartTimerIfNeeded(const WebRtc_UWord32 time);
+
+private:
+    static bool ThreadFunc(void*);
+    bool ThreadProcess();
+
+    static DWORD WINAPI GetCaptureVolumeThread(LPVOID context);
+    DWORD DoGetCaptureVolumeThread();
+
+    static DWORD WINAPI SetCaptureVolumeThread(LPVOID context);
+    DWORD DoSetCaptureVolumeThread();
+
+private:
+    AudioDeviceBuffer*                      _ptrAudioBuffer;
+
+    CriticalSectionWrapper&                 _critSect;
+    EventWrapper&                           _timeEvent;
+    EventWrapper&                           _recStartEvent;
+    EventWrapper&                           _playStartEvent;
+
+    HANDLE                                  _hGetCaptureVolumeThread;
+    HANDLE                                  _hShutdownGetVolumeEvent;
+    HANDLE                                  _hSetCaptureVolumeThread;
+    HANDLE                                  _hShutdownSetVolumeEvent;
+    HANDLE                                  _hSetCaptureVolumeEvent;
+
+    ThreadWrapper*                          _ptrThread;
+    WebRtc_UWord32                          _threadID;
+
+    CriticalSectionWrapper&                 _critSectCb;
+
+    WebRtc_Word32                           _id;
+
+    AudioMixerManager                       _mixerManager;
+
+    bool                                    _usingInputDeviceIndex;
+    bool                                    _usingOutputDeviceIndex;
+    AudioDeviceModule::WindowsDeviceType    _inputDevice;
+    AudioDeviceModule::WindowsDeviceType    _outputDevice;
+    WebRtc_UWord16                          _inputDeviceIndex;
+    WebRtc_UWord16                          _outputDeviceIndex;
+    bool                                    _inputDeviceIsSpecified;
+    bool                                    _outputDeviceIsSpecified;
+
+    WAVEFORMATEX                            _waveFormatIn;
+    WAVEFORMATEX                            _waveFormatOut;
+
+    HWAVEIN                                 _hWaveIn;
+    HWAVEOUT                                _hWaveOut;
+
+    WAVEHDR                                 _waveHeaderIn[N_BUFFERS_IN];
+    WAVEHDR                                 _waveHeaderOut[N_BUFFERS_OUT];
+
+    WebRtc_UWord8                           _recChannels;
+    WebRtc_UWord8                           _playChannels;
+    WebRtc_UWord16                          _recBufCount;
+    WebRtc_UWord16                          _recDelayCount;
+    WebRtc_UWord16                          _recPutBackDelay;
+
+    int8_t                                  _recBuffer[N_BUFFERS_IN][4*REC_BUF_SIZE_IN_SAMPLES];
+    int8_t                                  _playBuffer[N_BUFFERS_OUT][4*PLAY_BUF_SIZE_IN_SAMPLES];
+
+    AudioDeviceModule::BufferType           _playBufType;
+
+private:
+    bool                                    _initialized;
+    bool                                    _recording;
+    bool                                    _playing;
+    bool                                    _recIsInitialized;
+    bool                                    _playIsInitialized;
+    bool                                    _startRec;
+    bool                                    _stopRec;
+    bool                                    _startPlay;
+    bool                                    _stopPlay;
+    bool                                    _AGC;
+
+private:
+    WebRtc_UWord32                          _prevPlayTime;
+    WebRtc_UWord32                          _prevRecTime;
+    WebRtc_UWord32                          _prevTimerCheckTime;
+
+    WebRtc_UWord16                          _playBufCount;          // playout buffer index
+    WebRtc_UWord16                          _dTcheckPlayBufDelay;   // dT for check of play buffer, {2,5,10} [ms]
+    WebRtc_UWord16                          _playBufDelay;          // playback delay
+    WebRtc_UWord16                          _playBufDelayFixed;     // fixed playback delay
+    WebRtc_UWord16                          _minPlayBufDelay;       // minimum playback delay
+    WebRtc_UWord16                          _MAX_minBuffer;         // level of (adaptive) min threshold must be < _MAX_minBuffer
+
+    WebRtc_Word32                           _erZeroCounter;         // counts "buffer-is-empty" events
+    WebRtc_Word32                           _intro;
+    WebRtc_Word32                           _waitCounter;
+
+    WebRtc_UWord32                          _writtenSamples;
+    WebRtc_UWord32                          _writtenSamplesOld;
+    WebRtc_UWord32                          _playedSamplesOld;
+
+    WebRtc_UWord32                          _sndCardPlayDelay;
+    WebRtc_UWord32                          _sndCardRecDelay;
+
+    WebRtc_UWord32                          _plSampOld;
+    WebRtc_UWord32                          _rcSampOld;
+
+    WebRtc_UWord32                          _read_samples;
+    WebRtc_UWord32                          _read_samples_old;
+    WebRtc_UWord32                          _rec_samples_old;
+
+    // State that detects driver problems:
+    WebRtc_Word32                           _dc_diff_mean;
+    WebRtc_Word32                           _dc_y_prev;
+    WebRtc_Word32                           _dc_penalty_counter;
+    WebRtc_Word32                           _dc_prevtime;
+    WebRtc_UWord32                          _dc_prevplay;
+
+    WebRtc_UWord32                          _recordedBytes;         // accumulated #recorded bytes (reset periodically)
+    WebRtc_UWord32                          _prevRecByteCheckTime;  // time when we last checked the recording process
+
+    // CPU load measurements
+    LARGE_INTEGER                           _perfFreq;
+    LONGLONG                                _playAcc;               // accumulated time for playout callback
+    float                                   _avgCPULoad;            // average total (rec+play) CPU load
+
+    WebRtc_Word32                           _wrapCounter;
+
+    WebRtc_Word32                           _useHeader;
+    WebRtc_Word16                           _timesdwBytes;
+    WebRtc_Word32                           _no_of_msecleft_warnings;
+    WebRtc_Word32                           _writeErrors;
+    WebRtc_Word32                           _timerFaults;
+    WebRtc_Word32                           _timerRestartAttempts;
+
+    WebRtc_UWord16                          _playWarning;
+    WebRtc_UWord16                          _playError;
+    WebRtc_UWord16                          _recWarning;
+    WebRtc_UWord16                          _recError;
+
+    WebRtc_UWord32                          _newMicLevel;
+    WebRtc_UWord32                          _minMicVolume;
+    WebRtc_UWord32                          _maxMicVolume;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_WAVE_WIN_H
diff --git a/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc b/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc
new file mode 100644
index 0000000..a1dbcb5
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_mixer_manager_win.cc
@@ -0,0 +1,2721 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_mixer_manager_win.h"
+#include "trace.h"
+
+#include <strsafe.h>    // StringCchCopy(), StringCchCat(), StringCchPrintf()
+#include <cassert>      // assert()
+
+#ifdef _WIN32
+// removes warning: "reinterpret_cast: conversion from 'UINT' to 'HMIXEROBJ'
+//                of greater size"
+#pragma warning(disable:4312)
+#endif
+
+// Avoids the need of Windows 7 SDK
+#ifndef WAVE_MAPPED_kDefaultCommunicationDevice
+#define  WAVE_MAPPED_kDefaultCommunicationDevice   0x0010
+#endif
+
+namespace webrtc {
+
+// ============================================================================
+//                             CONSTRUCTION/DESTRUCTION
+// ============================================================================
+
+AudioMixerManager::AudioMixerManager(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _inputMixerHandle(NULL),
+    _outputMixerHandle(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s constructed", __FUNCTION__);
+    ClearSpeakerState();
+    ClearMicrophoneState();
+}
+
+AudioMixerManager::~AudioMixerManager()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s destructed", __FUNCTION__);
+
+    Close();
+
+    delete &_critSect;
+}
+
+// ============================================================================
+//                                 PUBLIC METHODS
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Close
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::Close()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+    return 0;
+
+}
+
+// ----------------------------------------------------------------------------
+//  CloseSpeaker
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::CloseSpeaker()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        return -1;
+    }
+
+    ClearSpeakerState(_outputMixerID);
+
+    mixerClose(_outputMixerHandle);
+    _outputMixerHandle = NULL;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  CloseMicrophone
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::CloseMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        return -1;
+    }
+
+    ClearMicrophoneState(_inputMixerID);
+
+    mixerClose(_inputMixerHandle);
+    _inputMixerHandle = NULL;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateAll
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateAll()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERLINE    sourceLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+    UINT sourceId(0);
+
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        if (!GetCapabilities(mixId, caps, true))
+            continue;
+
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine, true);
+            GetAllLineControls(mixId, destLine, controlArray, true);
+
+            for (sourceId = 0; sourceId < destLine.cConnections; sourceId++)
+            {
+                GetSourceLineInfo(mixId, destId, sourceId, sourceLine, true);
+                GetAllLineControls(mixId, sourceLine, controlArray, true);
+            }
+        }
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateSpeakers
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateSpeakers()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    if (nDevices > MAX_NUMBER_MIXER_DEVICES)
+    {
+        assert(false);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+
+    ClearSpeakerState();
+
+    // scan all avaliable mixer devices
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        // get capabilities for the specified mixer ID
+        GetCapabilities(mixId, caps);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[mixerID=%d] %s: ", mixId, WideToUTF8(caps.szPname));
+        // scan all avaliable destinations for this mixer
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine);
+            if ((destLine.cControls == 0)                         ||    // no controls or
+                (destLine.cConnections == 0)                      ||    // no source lines or
+                (destLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED) ||    // disconnected or
+                !(destLine.fdwLine & MIXERLINE_LINEF_ACTIVE))           // inactive
+            {
+                // don't store this line ID since it will not be possible to control
+                continue;
+            }
+            if ((destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_SPEAKERS) ||
+                (destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_HEADPHONES))
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found valid speaker/headphone (name: %s, ID: %u)", WideToUTF8(destLine.szName), destLine.dwLineID);
+                _speakerState[mixId].dwLineID = destLine.dwLineID;
+                _speakerState[mixId].speakerIsValid = true;
+                // retrieve all controls for the speaker component
+                GetAllLineControls(mixId, destLine, controlArray);
+                for (UINT c = 0; c < destLine.cControls; c++)
+                {
+                    if (controlArray[c].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                    {
+                        _speakerState[mixId].dwVolumeControlID = controlArray[c].dwControlID;
+                        _speakerState[mixId].volumeControlIsValid = true;
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[c].szName), controlArray[c].dwControlID);
+                    }
+                    else if (controlArray[c].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                    {
+                        _speakerState[mixId].dwMuteControlID = controlArray[c].dwControlID;
+                        _speakerState[mixId].muteControlIsValid = true;
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[c].szName), controlArray[c].dwControlID);
+                    }
+                }
+                break;
+            }
+        }
+        if (!SpeakerIsValid(mixId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to find a valid speaker destination line", mixId);
+        }
+    }
+
+    if (ValidSpeakers() == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to locate any valid speaker line");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  EnumerateMicrophones
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::EnumerateMicrophones()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    UINT nDevices = mixerGetNumDevs();
+    if (nDevices > MAX_NUMBER_MIXER_DEVICES)
+    {
+        assert(false);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "#mixer devices: %u", nDevices);
+
+    MIXERCAPS    caps;
+    MIXERLINE    destLine;
+    MIXERLINE    sourceLine;
+    MIXERCONTROL controlArray[MAX_NUMBER_OF_LINE_CONTROLS];
+
+    UINT mixId(0);
+    UINT destId(0);
+
+    ClearMicrophoneState();
+
+    // scan all avaliable mixer devices
+    for (mixId = 0; mixId < nDevices; mixId++)
+    {
+        // get capabilities for the specified mixer ID
+        GetCapabilities(mixId, caps);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "[mixerID=%d] %s: ", mixId, WideToUTF8(caps.szPname));
+        // scan all avaliable destinations for this mixer
+        for (destId = 0; destId < caps.cDestinations; destId++)
+        {
+            GetDestinationLineInfo(mixId, destId, destLine);
+
+            if ((destLine.cConnections == 0)                      ||    // no source lines or
+                (destLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED) ||    // disconnected or
+               !(destLine.fdwLine & MIXERLINE_LINEF_ACTIVE))            // inactive
+            {
+                // Don't store this line ID since there are no sources connected to this destination.
+                // Compare with the speaker side where we also exclude lines with no controls.
+                continue;
+            }
+
+            if (destLine.dwComponentType == MIXERLINE_COMPONENTTYPE_DST_WAVEIN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found valid Wave In destination (name: %s, ID: %u)", WideToUTF8(destLine.szName), destLine.dwLineID);
+                _microphoneState[mixId].dwLineID = destLine.dwLineID;
+                _microphoneState[mixId].microphoneIsValid = true;
+
+                // retrieve all controls for the identified wave-in destination
+                if (!GetAllLineControls(mixId, destLine, controlArray))
+                {
+                    // This destination has no controls. We must try to control
+                    // one of its sources instead. 
+                    // This is a rare state but has been found for some
+                    // Logitech USB headsets.
+
+                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                    "this destination has no controls => must control source");
+                    for (DWORD sourceId = 0; sourceId < destLine.cConnections; sourceId++)
+                    {
+                        GetSourceLineInfo(mixId, destId, sourceId, sourceLine, false); 
+                        if (sourceLine.dwComponentType == 
+                            MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE)
+                        {
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                            "found microphone source ( name: %s, ID: %u)", 
+                            WideToUTF8(sourceLine.szName), sourceId);
+                            GetAllLineControls(mixId, sourceLine, controlArray, false);
+                            // scan the controls for this source and search for volume, 
+                            // mute and on/off (<=> boost) controls
+                            for (UINT sc = 0; sc < sourceLine.cControls; sc++)
+                            {
+                                if (controlArray[sc].dwControlType == 
+                                    MIXERCONTROL_CONTROLTYPE_VOLUME)
+                                {
+                                    // store this volume control
+                                    _microphoneState[mixId].dwVolumeControlID = 
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].volumeControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found volume control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                }
+                                else if (controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_MUTE)
+                                {
+                                    // store this mute control
+                                    _microphoneState[mixId].dwMuteControlID =
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].muteControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found mute control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                }
+                                else if (controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                         controlArray[sc].dwControlType == 
+                                         MIXERCONTROL_CONTROLTYPE_LOUDNESS)
+                                {
+                                    // store this on/off control (most likely a Boost control)
+                                    _microphoneState[mixId].dwOnOffControlID = 
+                                    controlArray[sc].dwControlID;
+                                    _microphoneState[mixId].onOffControlIsValid = true;
+                                    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, 
+                                    "found on/off control (name: %s, ID: %u)", 
+                                    WideToUTF8(controlArray[sc].szName), 
+                                    controlArray[sc].dwControlID);
+                                 }
+                             }
+                         }
+                    }
+
+                    break;
+                }
+
+                // It seems like there are three different configurations we can find in this state:
+                //
+                // (1) The Wave-in destination contains one MUX control only
+                // (2) The Wave-in destination contains one or more controls where one is a volume control
+                // (3) On Vista and Win 7, it seems like case 2 above is extended.
+                //     It is common that a Wave-in destination has two master controls (volume and mute),
+                //     AND a microphone source as well with its own volume and mute controls with unique
+                //     identifiers. Initial tests have shown that it is sufficient to modify the master
+                //     controls only. The source controls will "follow" the master settings, hence the
+                //     source controls seem to be redundant.
+                //
+                // For case 1, we should locate the selected source and its controls. The MUX setting will
+                // give us the selected source. NOTE - the selecion might not be a microphone.
+                //
+                // For case 2, the volume control works as a master level control and we should use that one.
+                //
+                // For case 3, we use the master controls only and assume that the source control will "follow".
+                //
+                // Examples of case 1: - SigmaTel Audio (built-in)
+                //                     - add more..
+                //
+                // Examples of case 2: - Plantronics USB Headset
+                //                      - Eutectics IPP 200 USB phone
+                //                      - add more...
+                //
+                // Examples of case 3: - Realtek High Definition on Vista (TL)
+                //                     - add more...
+
+                if ((destLine.cControls == 1) &&
+                    (controlArray[0].dwControlType == MIXERCONTROL_CONTROLTYPE_MUX))
+                {
+                    // Case 1: MUX control detected  => locate the selected source and its volume control
+                    //         Note that, the selecion might not be a microphone. A warning is given for
+                    //         this case only, i.e., it is OK to control a selected Line In source as long
+                    //         as it is connected to the wave-in destination.
+
+                    UINT selection(0);
+                    const DWORD nItemsInMux(controlArray[0].cMultipleItems);
+
+                    // decide which source line that is selected in the mux
+                    if (GetSelectedMuxSource(mixId, controlArray[0].dwControlID, nItemsInMux, selection))
+                    {
+                        // selection now contains the index of the selected source =>
+                        // read the line information for this source
+                        // if conditions listed below
+                        // condition 1: invalid source
+                        // condition 2: no controls
+                        // condition 3: disconnected
+                        // condition 4: inactive
+                        if (!GetSourceLineInfo(mixId, destId, selection, sourceLine)  ||
+                           (sourceLine.cControls == 0)                                ||
+                           (sourceLine.fdwLine & MIXERLINE_LINEF_DISCONNECTED)        ||
+                          !(sourceLine.fdwLine & MIXERLINE_LINEF_ACTIVE))               
+                        {
+                            continue;
+                        }
+
+                        if (sourceLine.dwComponentType != MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE)
+                        {
+                            // add more details about the selected source (not a microphone)
+                            TraceComponentType(sourceLine.dwComponentType);
+                            // send a warning just to inform about the fact that a non-microphone source will be controlled
+                            WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "the selected (to be controlled) source is not a microphone type");
+                        }
+
+                        // retrieve all controls for the selected source
+                        GetAllLineControls(mixId, sourceLine, controlArray);
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "MUX selection is %u [0,%u]", selection, nItemsInMux-1);
+
+                        // scan the controls for this source and search for volume, mute and on/off (<=> boost) controls
+                        for (UINT sc = 0; sc < sourceLine.cControls; sc++)
+                        {
+                            if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                            {
+                                // store this volume control
+                                _microphoneState[mixId].dwVolumeControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].volumeControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                            else if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                            {
+                                // store this mute control
+                                _microphoneState[mixId].dwMuteControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].muteControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                            else if (controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                     controlArray[sc].dwControlType == MIXERCONTROL_CONTROLTYPE_LOUDNESS)
+                            {
+                                // store this on/off control (most likely a Boost control)
+                                _microphoneState[mixId].dwOnOffControlID = controlArray[sc].dwControlID;
+                                _microphoneState[mixId].onOffControlIsValid = true;
+                                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found on/off control (name: %s, ID: %u)", WideToUTF8(controlArray[sc].szName), controlArray[sc].dwControlID);
+                            }
+                        }
+                    }
+                    else
+                    {
+                        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to detect which source to control");
+                    }
+
+                }
+                else if (destLine.cConnections == 1)
+                {
+                    // Case 2 or Case 3:
+
+                    GetSourceLineInfo(mixId, destId, 0, sourceLine);
+                    if ((sourceLine.dwComponentType == MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE) &&
+                        (sourceLine.cControls > 0))
+                    {
+                        // Case 3: same as Case 2 below but we have also detected a Microphone source
+                        //         with its own controls. So far, I have not been able to find any device
+                        //         where it is required to modify these controls. Until I have found such
+                        //         a device, this case will be handled as a Case 2 (see below).
+
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "microphone source controls will not be controlled");
+                    }
+                    else if ((sourceLine.dwComponentType == MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE) &&
+                             (sourceLine.cControls == 0))
+                    {
+                        // default state on non Vista/Win 7 machines
+                        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "microphone source has no controls => use master controls instead");
+                    }
+                    else
+                    {
+                        // add more details about the selected source (not a microphone)
+                        TraceComponentType(sourceLine.dwComponentType);
+                        // send a warning just to inform about the fact that a non-microphone source will be controlled
+                        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "the connected (to be controlled) source is not a microphone type");
+                    }
+
+                    // Case 2 : one source only and no MUX control detected =>
+                    //          locate the master volume control (and mute + boost controls if possible)
+
+                    // scan the controls for this wave-in destination and search for volume, mute and on/off (<=> boost) controls
+                    for (UINT dc = 0; dc < destLine.cControls; dc++)
+                    {
+                        if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_VOLUME)
+                        {
+                            // store this volume control
+                            _microphoneState[mixId].dwVolumeControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].volumeControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found volume control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                        else if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_MUTE)
+                        {
+                            // store this mute control
+                            _microphoneState[mixId].dwMuteControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].muteControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found mute control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                        else if (controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_ONOFF ||
+                                 controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_LOUDNESS ||
+                                 controlArray[dc].dwControlType == MIXERCONTROL_CONTROLTYPE_BOOLEAN)
+                        {
+                            // store this on/off control
+                            _microphoneState[mixId].dwOnOffControlID = controlArray[dc].dwControlID;
+                            _microphoneState[mixId].onOffControlIsValid = true;
+                            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "found on/off control (name: %s, ID: %u)", WideToUTF8(controlArray[dc].szName), controlArray[dc].dwControlID);
+                        }
+                    }
+                }
+                else
+                {
+                    // We are in a state where more than one source is connected to the wave-in destination.
+                    // I am bailing out here for now until I understand this case better.
+                    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "failed to locate valid microphone controls for this mixer");
+                }
+                break;
+            }
+        }  // for (destId = 0; destId < caps.cDestinations; destId++)
+
+        if (!MicrophoneIsValid(mixId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unable to find a valid microphone destination line", mixId);
+        }
+    }  // for (mixId = 0; mixId < nDevices; mixId++)
+
+    if (ValidMicrophones() == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "failed to locate any valid microphone line");
+        return -1;
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenSpeaker I(II)
+//
+//  Verifies that the mixer contains a valid speaker destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenSpeaker(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(kDefaultDevice)");
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(kDefaultCommunicationDevice)");
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEOUT     hWaveOut(NULL);
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 2;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio output handle for the currently selected output device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the output mixer.
+    //
+    if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        // check if it is possible to open the default communication device (supported on Windows 7)
+        res = waveOutOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |
+            WAVE_MAPPED_kDefaultCommunicationDevice | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // if so, open the default communication device for real
+            res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_kDefaultCommunicationDevice);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+        }
+        else
+        {
+            // use default device since default communication device was not avaliable
+            res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "unable to open default communication device => using default instead");
+        }
+    }
+    else if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        // open default device since it has been requested
+        res = waveOutOpen(&hWaveOut, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default output device");
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutOpen() failed (err=%d)", res);
+        TraceWaveOutError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio output handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveOut, &mixerId, MIXER_OBJECTF_HWAVEOUT);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEOUT) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified output device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio output handle is no longer needed.
+    //
+    waveOutClose(hWaveOut);
+
+    // Verify that the mixer contains a valid speaker destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!SpeakerIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the speaker volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the output mixer handle and active mixer identifier
+    //
+    _outputMixerHandle = hMixer;
+    _outputMixerID = mixerId;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the output mixer device is now open (0x%x)", _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenSpeaker II(II)
+//
+//  Verifies that the mixer contains a valid speaker destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenSpeaker(WebRtc_UWord16 index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenSpeaker(index=%d)", index);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_outputMixerHandle != NULL)
+    {
+        mixerClose(_outputMixerHandle);
+        _outputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEOUT     hWaveOut(NULL);
+
+    const UINT   deviceID(index);  // use index parameter as device identifier
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 2;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio output handle for the currently selected output device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the output mixer.
+    //
+    res = waveOutOpen(&hWaveOut, deviceID, &waveFormat, 0, 0, CALLBACK_NULL);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveOutOpen(deviceID=%u) failed (err=%d)", index, res);
+        TraceWaveOutError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio output handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveOut, &mixerId, MIXER_OBJECTF_HWAVEOUT);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEOUT) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified output device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio output handle is no longer needed.
+    //
+    waveOutClose(hWaveOut);
+
+    // Verify that the mixer contains a valid speaker destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!SpeakerIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the speaker volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the output mixer handle and active mixer identifier
+    //
+    _outputMixerHandle = hMixer;
+    _outputMixerID = mixerId;
+
+    if (_outputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the output mixer device is now open (0x%x)", _outputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenMicrophone I(II)
+//
+//  Verifies that the mixer contains a valid wave-in destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenMicrophone(AudioDeviceModule::WindowsDeviceType device)
+{
+    if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(kDefaultDevice)");
+    }
+    else if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(kDefaultCommunicationDevice)");
+    }
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Close any existing output mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEIN         hWaveIn(NULL);
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 1;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0 ;
+
+    // We need a waveform-audio input handle for the currently selected input device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the input mixer.
+    //
+    if (device == AudioDeviceModule::kDefaultCommunicationDevice)
+    {
+        // check if it is possible to open the default communication device (supported on Windows 7)
+        res = waveInOpen(NULL, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL |
+            WAVE_MAPPED_kDefaultCommunicationDevice | WAVE_FORMAT_QUERY);
+        if (MMSYSERR_NOERROR == res)
+        {
+            // if so, open the default communication device for real
+            res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL | WAVE_MAPPED_kDefaultCommunicationDevice);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default communication device");
+        }
+        else
+        {
+            // use default device since default communication device was not avaliable
+            res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                "unable to open default communication device => using default instead");
+        }
+    }
+    else if (device == AudioDeviceModule::kDefaultDevice)
+    {
+        // open default device since it has been requested
+        res = waveInOpen(&hWaveIn, WAVE_MAPPER, &waveFormat, 0, 0, CALLBACK_NULL);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "opening default input device");
+    }
+
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInOpen() failed (err=%d)", res);
+        TraceWaveInError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio input handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveIn, &mixerId, MIXER_OBJECTF_HWAVEIN);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEIN) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified input device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio input handle is no longer needed.
+    //
+    waveInClose(hWaveIn);
+
+    // Verify that the mixer contains a valid wave-in destination line and a volume control.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!MicrophoneIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the microphone volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the input mixer handle and active mixer identifier
+    //
+    _inputMixerHandle = hMixer;
+    _inputMixerID = mixerId;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the input mixer device is now open (0x%x)", _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  OpenMicrophone II(II)
+//
+//  Verifies that the mixer contains a valid wave-in destination line.
+//  Avoids opening the mixer if valid control has not been found.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::OpenMicrophone(WebRtc_UWord16 index)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::OpenMicrophone(index=%d)", index);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    // Close any existing input mixer handle
+    //
+    if (_inputMixerHandle != NULL)
+    {
+        mixerClose(_inputMixerHandle);
+        _inputMixerHandle = NULL;
+    }
+
+    MMRESULT     res;
+    WAVEFORMATEX waveFormat;
+    HWAVEIN         hWaveIn(NULL);
+
+    const UINT   deviceID(index);  // use index parameter as device identifier
+
+    waveFormat.wFormatTag      = WAVE_FORMAT_PCM ;
+    waveFormat.nChannels       = 1;
+    waveFormat.nSamplesPerSec  = 48000;
+    waveFormat.wBitsPerSample  = 16;
+    waveFormat.nBlockAlign     = waveFormat.nChannels * waveFormat.wBitsPerSample / 8;
+    waveFormat.nAvgBytesPerSec = waveFormat.nSamplesPerSec * waveFormat.nBlockAlign;
+    waveFormat.cbSize          = 0;
+
+    // We need a waveform-audio input handle for the currently selected input device.
+    // This handle will then give us the corresponding mixer identifier. Once the mixer
+    // ID is known, it is possible to open the input mixer.
+    //
+    res = waveInOpen(&hWaveIn, deviceID, &waveFormat, 0, 0, CALLBACK_NULL);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "waveInOpen(deviceID=%u) failed (err=%d)", index, res);
+        TraceWaveInError(res);
+    }
+
+    UINT   mixerId(0);
+    HMIXER hMixer(NULL);
+
+    // Retrieve the device identifier for a mixer device associated with the
+    // aquired waveform-audio input handle.
+    //
+    res = mixerGetID((HMIXEROBJ)hWaveIn, &mixerId, MIXER_OBJECTF_HWAVEIN);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetID(MIXER_OBJECTF_HWAVEIN) failed (err=%d)", res);
+        // identification failed => use default mixer identifier (=0)
+        mixerId = 0;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "specified input device <=> mixer ID %u", mixerId);
+
+    // The waveform-audio input handle is no longer needed.
+    //
+    waveInClose(hWaveIn);
+
+    // Verify that the mixer contains a valid wave-in destination line.
+    // Avoid opening the mixer if valid control has not been found.
+    //
+    if (!MicrophoneIsValid(mixerId))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to control the microphone volume for this mixer device");
+        return -1;
+    }
+
+    // Open the specified mixer device and ensure that the device will not
+    // be removed until the application closes the handle.
+    //
+    res = mixerOpen(&hMixer, mixerId, 0, 0, MIXER_OBJECTF_MIXER);
+    if (MMSYSERR_NOERROR != res)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerOpen() failed (err=%d)", res);
+    }
+
+    // Store the input mixer handle and active mixer identifier
+    //
+    _inputMixerHandle = hMixer;
+    _inputMixerID = mixerId;
+
+    if (_inputMixerHandle != NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "the input mixer device is now open (0x%x)", _inputMixerHandle);
+    }
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SpeakerIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_outputMixerHandle != NULL);
+}
+
+// ----------------------------------------------------------------------------
+// MicrophoneIsInitialized
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::MicrophoneIsInitialized() const
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    return (_inputMixerHandle != NULL);
+}
+
+// ----------------------------------------------------------------------------
+// SetSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetSpeakerVolume(WebRtc_UWord32 volume)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetSpeakerVolume(volume=%u)", volume);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    DWORD dwValue(volume);
+
+    // Set one unsigned control value for a specified volume-control identifier
+    //
+    if (!SetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerVolume
+//
+//  Note that (MIXERCONTROL_CONTROLTYPE_VOLUME & MIXERCONTROL_CT_UNITS_MASK)
+//  always equals MIXERCONTROL_CT_UNITS_UNSIGNED;
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolume(WebRtc_UWord32& volume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    DWORD dwValue(0);
+
+    // Retrieve one unsigned control value for a specified volume-control identifier
+    //
+    if (!GetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    volume = dwValue;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxSpeakerVolume
+//
+//  Note that (MIXERCONTROL_CONTROLTYPE_VOLUME & MIXERCONTROL_CT_UNITS_MASK)
+//  always equals MIXERCONTROL_CT_UNITS_UNSIGNED
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    maxVolume = mixerControl.Bounds.dwMaximum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// MinSpeakerVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MinSpeakerVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    minVolume = mixerControl.Bounds.dwMinimum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_outputMixerID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, _speakerState[mixerID].dwVolumeControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> (mixerControl.Metrics.cSteps);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerVolumeIsAvailable(bool& available)
+{
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    available = _speakerState[_outputMixerID].volumeControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SpeakerMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerMuteIsAvailable(bool& available)
+{
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    available = _speakerState[_outputMixerID].muteControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetSpeakerMute
+//
+//  This mute function works a master mute for the output speaker.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetSpeakerMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetSpeakerMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the output mixer handle exists.
+    //
+    if (!_speakerState[_outputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this speaker line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwMuteControlID);
+
+    // Set one boolean control value for the specified mute-control
+    //
+    if (!SetBooleanControlValue(_outputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SpeakerMute(bool& enabled) const
+{
+
+    if (_outputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable output mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected speaker destination has a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the output mixer handle exists.
+    //
+    if (!_speakerState[_outputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this speaker line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_speakerState[_outputMixerID].dwMuteControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified mute-control identifier
+    //
+    if (!GetBooleanControlValue(_outputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMuteIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneMuteIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].muteControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// SetMicrophoneMute
+//
+//  This mute function works a master mute for the input microphone.
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetMicrophoneMute(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destinationhas a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this microphone line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwMuteControlID);
+
+    // Set one boolean control value for the specified mute-control
+    //
+    if (!SetBooleanControlValue(_inputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneMute
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneMute(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destinationhas a valid mute control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].muteControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "it is not possible to mute this microphone line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwMuteControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified mute-control identifier
+    //
+    if (!GetBooleanControlValue(_inputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoostIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneBoostIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].onOffControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneBoost(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetMicrophoneBoost(enable=%u)", enable);
+
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destination has a valid boost (on/off) control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].onOffControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no boost control exists for this wave-in line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwOnOffControlID);
+
+    // Set one boolean control value for the specified boost (on/off) control
+    //
+    if (!SetBooleanControlValue(_inputMixerID, dwControlID, enable))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneBoost
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneBoost(bool& enabled) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    // Ensure that the selected wave-in destination has a valid boost (on/off) control.
+    // If so, its identifier was stored during the enumeration phase which must
+    // have taken place since the input mixer handle exists.
+    //
+    if (!_microphoneState[_inputMixerID].onOffControlIsValid)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no boost control exists for this wave-in line");
+        return -1;
+    }
+
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwOnOffControlID);
+    bool value(false);
+
+    // Retrieve one boolean control value for a specified boost-control identifier
+    //
+    if (!GetBooleanControlValue(_inputMixerID, dwControlID, value))
+    {
+        return -1;
+    }
+
+    enabled = value;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeIsAvailable
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolumeIsAvailable(bool& available)
+{
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    available = _microphoneState[_inputMixerID].volumeControlIsValid;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  SetMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::SetMicrophoneVolume(WebRtc_UWord32 volume)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    DWORD dwValue(volume);
+
+    // Set one unsigned control value for a specified volume-control identifier
+    //
+    if (!SetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    return (0);
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolume(WebRtc_UWord32& volume) const
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    DWORD dwValue(0);
+
+    // Retrieve one unsigned control value for a specified volume-control identifier
+    //
+    if (!GetUnsignedControlValue(mixerID, dwControlID, dwValue))
+    {
+        return -1;
+    }
+
+    volume = dwValue;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MaxMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "%s", __FUNCTION__);
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    maxVolume = mixerControl.Bounds.dwMaximum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+// MinMicrophoneVolume
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MinMicrophoneVolume(WebRtc_UWord32& minVolume) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    minVolume = mixerControl.Bounds.dwMinimum;
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneVolumeStepSize
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioMixerManager::MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const
+{
+
+    if (_inputMixerHandle == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "no avaliable input mixer exists");
+        return -1;
+    }
+
+    const UINT mixerID(_inputMixerID);
+    const DWORD dwControlID(_microphoneState[_inputMixerID].dwVolumeControlID);
+    MIXERCONTROL mixerControl;
+
+    // Retrieve one control line for a specified volume-control identifier
+    //
+    if (!GetLineControl(mixerID, dwControlID, mixerControl))
+    {
+        return -1;
+    }
+
+    stepSize = static_cast<WebRtc_UWord16> (mixerControl.Metrics.cSteps);
+
+    return 0;
+}
+
+// ============================================================================
+//                              PRIVATE METHODS
+// ============================================================================
+
+// ----------------------------------------------------------------------------
+//  Devices
+//
+//  A given audio card has one Mixer device associated with it. All of the
+//  various components on that card are controlled through that card's one
+//  Mixer device.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::Devices() const
+{
+    UINT nDevs = mixerGetNumDevs();
+    return nDevs;
+}
+
+// ----------------------------------------------------------------------------
+//  DestinationLines
+//
+//  # destination lines given mixer ID.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::DestinationLines(UINT mixId) const
+{
+    MIXERCAPS caps;
+    if (!GetCapabilities(mixId, caps))
+    {
+        return 0;
+    }
+    return (caps.cDestinations);
+}
+// ----------------------------------------------------------------------------
+//  DestinationLines
+//
+//  # source lines given mixer ID and destination ID.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::SourceLines(UINT mixId, DWORD destId) const
+{
+    MIXERLINE dline;
+    if (!GetDestinationLineInfo(mixId, destId, dline))
+    {
+        return 0;
+    }
+    return (dline.cConnections);
+}
+
+// ----------------------------------------------------------------------------
+//  GetCapabilities
+//
+//  Queries a specified mixer device to determine its capabilities.
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetCapabilities(UINT mixId, MIXERCAPS& caps, bool trace) const
+{
+    MMRESULT res;
+    MIXERCAPS mcaps;
+
+    res = mixerGetDevCaps(mixId, &mcaps, sizeof(MIXERCAPS));
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetDevCaps() failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&caps, &mcaps, sizeof(MIXERCAPS));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Mixer ID %u:", mixId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID      : %u", caps.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID           : %u", caps.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "version of driver    : %u", caps.vDriverVersion);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name         : %s", WideToUTF8(caps.szPname));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "misc. support bits   : %u", caps.fdwSupport);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "count of destinations: %u (+)", caps.cDestinations);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "===============================================================");
+    }
+
+    if (caps.cDestinations == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id, "invalid number of mixer destinations");
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetDestinationLineInfo
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetDestinationLineInfo(UINT mixId, DWORD destId, MIXERLINE& line, bool trace) const
+{
+    MMRESULT  res;
+    MIXERLINE mline;
+
+    mline.cbStruct = sizeof(MIXERLINE);
+    mline.dwDestination = destId;   // max destination index is cDestinations-1
+    mline.dwSource = 0;             // not set for MIXER_GETLINEINFOF_DESTINATION
+
+    // Retrieve information about the specified destination line of a mixer device.
+    // Note that we use the mixer ID here and not a handle to an opened mixer.
+    // It is not required to open the mixer for enumeration purposes only.
+    //
+    res = mixerGetLineInfo(reinterpret_cast<HMIXEROBJ>(mixId), &mline, MIXER_OBJECTF_MIXER | MIXER_GETLINEINFOF_DESTINATION);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineInfo(MIXER_GETLINEINFOF_DESTINATION) failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&line, &mline, sizeof(MIXERLINE));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "> Destination Line ID %u:", destId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "destination line index : %u", mline.dwDestination);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwLineID               : %lu (unique)", mline.dwLineID);
+        TraceStatusAndSupportFlags(mline.fdwLine);
+        TraceComponentType(mline.dwComponentType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "count of channels      : %u", mline.cChannels);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# audio source lines   : %u (+)", mline.cConnections);    // valid only for destinations
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# controls             : %u (*)", mline.cControls);       // can be zero
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "short name             : %s", WideToUTF8(mline.szShortName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(mline.szName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        TraceTargetType(mline.Target.dwType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "target device ID       : %lu", mline.Target.dwDeviceID);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "manufacturer ID        : %u", mline.Target.wMid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product ID             : %u", mline.Target.wPid);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "driver version         : %u", mline.Target.vDriverVersion);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "product name           : %s", WideToUTF8(mline.Target.szPname));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "---------------------------------------------------------------");
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetSourceLineInfo
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetSourceLineInfo(UINT mixId, DWORD destId, DWORD srcId, MIXERLINE& line, bool trace) const
+{
+    MMRESULT  res;
+    MIXERLINE mline;
+
+    mline.cbStruct = sizeof(MIXERLINE);
+    mline.dwDestination = destId;   // we want the source info for this destination
+    mline.dwSource = srcId;         // source index (enumerate over these)
+
+    // Retrieve information about the specified source line of a mixer device.
+    // Note that we use the mixer ID here and not a handle to an opened mixer.
+    // It is not required to open the mixer for enumeration purposes only.
+    //
+    res = mixerGetLineInfo(reinterpret_cast<HMIXEROBJ>(mixId), &mline, MIXER_OBJECTF_MIXER | MIXER_GETLINEINFOF_SOURCE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineInfo(MIXER_GETLINEINFOF_SOURCE) failed (err=%d)", res);
+        return false;
+    }
+
+    memcpy(&line, &mline, sizeof(MIXERLINE));
+
+    if (trace)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " >> Source Line ID %u:", srcId);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "destination line index : %u", mline.dwDestination);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwSource               : %u", mline.dwSource);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwLineID               : %lu (unique)", mline.dwLineID);
+        TraceStatusAndSupportFlags(mline.fdwLine);
+        TraceComponentType(mline.dwComponentType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "# controls             : %u (*)", mline.cControls);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(mline.szName));
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -");
+        TraceTargetType(mline.Target.dwType);
+        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "---------------------------------------------------------------");
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+// GetAllLineControls
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetAllLineControls(UINT mixId, const MIXERLINE& line, MIXERCONTROL* controlArray, bool trace) const
+{
+    // Ensure that we don't try to aquire information if there are no controls for this line
+    //
+    if (line.cControls == 0)
+        return false;
+
+    MMRESULT          res;
+    MIXERLINECONTROLS mlineControls;            // contains information about the controls of an audio line
+
+    mlineControls.dwLineID  = line.dwLineID;    // unique audio line identifier
+    mlineControls.cControls = line.cControls;   // number of controls associated with the line
+    mlineControls.pamxctrl  = controlArray;     // points to the first MIXERCONTROL structure to be filled
+    mlineControls.cbStruct  = sizeof(MIXERLINECONTROLS);
+    mlineControls.cbmxctrl  = sizeof(MIXERCONTROL);
+
+    // Get information on ALL controls associated with the specified audio line
+    //
+    res = mixerGetLineControls(reinterpret_cast<HMIXEROBJ>(mixId), &mlineControls, MIXER_OBJECTF_MIXER | MIXER_GETLINECONTROLSF_ALL);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineControls(MIXER_GETLINECONTROLSF_ALL) failed  (err=%d)", res);
+        return false;
+    }
+
+    if (trace)
+    {
+        for (UINT c = 0; c < line.cControls; c++)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, " >> Control ID %u:", c);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "dwControlID            : %u (unique)", controlArray[c].dwControlID);
+            TraceControlType(controlArray[c].dwControlType);
+            TraceControlStatusAndSupportFlags(controlArray[c].fdwControl);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cMultipleItems         : %u", controlArray[c].cMultipleItems);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "short name             : %s", WideToUTF8(controlArray[c].szShortName));
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "full name              : %s", WideToUTF8(controlArray[c].szName));
+            if ((controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_SIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "min signed value       : %d", controlArray[c].Bounds.lMinimum);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "max signed value       : %d", controlArray[c].Bounds.lMaximum);
+            }
+            else if ((controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_UNSIGNED ||
+                     (controlArray[c].dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_BOOLEAN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "min unsigned value     : %u",  controlArray[c].Bounds.dwMinimum);
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "max unsigned value     : %u", controlArray[c].Bounds.dwMaximum);
+            }
+            if (controlArray[c].dwControlType  != MIXERCONTROL_CONTROLTYPE_CUSTOM)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "cSteps                 : %u",  controlArray[c].Metrics.cSteps);
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "...............................................................");
+            GetControlDetails(mixId, controlArray[c], true);
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "...............................................................");
+
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetLineControls
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetLineControl(UINT mixId, DWORD dwControlID, MIXERCONTROL& control) const
+{
+    MMRESULT          res;
+    MIXERLINECONTROLS mlineControl;
+
+    mlineControl.dwControlID = dwControlID;
+    mlineControl.cControls   = 1;
+    mlineControl.pamxctrl    = &control;
+    mlineControl.cbStruct    = sizeof(MIXERLINECONTROLS);
+    mlineControl.cbmxctrl    = sizeof(MIXERCONTROL);
+
+    // Get information on one controls associated with the specified conrol identifier
+    //
+    res = mixerGetLineControls(reinterpret_cast<HMIXEROBJ>(mixId), &mlineControl, MIXER_OBJECTF_MIXER | MIXER_GETLINECONTROLSF_ONEBYID);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetLineControls(MIXER_GETLINECONTROLSF_ONEBYID) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetControlDetails
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetControlDetails(UINT mixId, MIXERCONTROL& controlArray, bool trace) const
+{
+    assert(controlArray.cMultipleItems <= MAX_NUMBER_OF_MULTIPLE_ITEMS);
+
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    MIXERCONTROLDETAILS_SIGNED   valueSigned[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    MIXERCONTROLDETAILS_BOOLEAN  valueBoolean[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+
+    enum ControlType
+    {
+        CT_UNITS_UNSIGNED,
+        CT_UNITS_SIGNED,
+        CT_UNITS_BOOLEAN
+    };
+
+    ControlType ctype(CT_UNITS_UNSIGNED);
+
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.dwControlID    = controlArray.dwControlID;       // control identifier
+    controlDetails.cChannels      = 1;                              // we need to set values as if they were uniform
+    controlDetails.cMultipleItems = controlArray.cMultipleItems;    // only nonzero for CONTROLF_MULTIPLE controls
+                                                                    // can e.g. happen for CONTROLTYPE_MUX
+    if (controlDetails.cMultipleItems > MAX_NUMBER_OF_MULTIPLE_ITEMS)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "cMultipleItems > %d", MAX_NUMBER_OF_MULTIPLE_ITEMS);
+        controlDetails.cMultipleItems = MAX_NUMBER_OF_MULTIPLE_ITEMS;
+    }
+
+    if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_SIGNED)
+    {
+        ctype = CT_UNITS_SIGNED;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_SIGNED);
+        controlDetails.paDetails = &valueSigned[0];
+    }
+    else if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_UNSIGNED)
+    {
+        ctype = CT_UNITS_UNSIGNED;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+        controlDetails.paDetails = &valueUnsigned[0];
+    }
+    else if ((controlArray.dwControlType & MIXERCONTROL_CT_UNITS_MASK) == MIXERCONTROL_CT_UNITS_BOOLEAN)
+    {
+        ctype = CT_UNITS_BOOLEAN;
+        controlDetails.cbDetails = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+        controlDetails.paDetails = &valueBoolean[0];
+    }
+
+    // Retrieve a control's value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    if (trace)
+    {
+        UINT nItems(1);
+        nItems = (controlDetails.cMultipleItems > 0 ? controlDetails.cMultipleItems : 1);
+        for (UINT i = 0; i < nItems; i++)
+        {
+            if (ctype == CT_UNITS_SIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "signed value           : %d", valueSigned[i].lValue);
+            }
+            else if (ctype == CT_UNITS_UNSIGNED)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "unsigned value         : %u", valueUnsigned[i].dwValue);
+            }
+            else if (ctype == CT_UNITS_BOOLEAN)
+            {
+                WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "boolean value          : %u", valueBoolean[i].fValue);
+            }
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetUnsignedControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD& dwValue) const
+{
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+    controlDetails.paDetails      = &valueUnsigned;
+
+    // Retrieve the unsigned value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Deliver the retrieved value
+    //
+    dwValue = valueUnsigned.dwValue;
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  SetUnsignedControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD dwValue) const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceAudioDevice, _id, "AudioMixerManager::SetUnsignedControlValue(mixId=%u, dwControlID=%d, dwValue=%d)", mixId, dwControlID, dwValue);
+
+    MMRESULT                     res;
+    MIXERCONTROLDETAILS          controlDetails;
+    MIXERCONTROLDETAILS_UNSIGNED valueUnsigned;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_UNSIGNED);
+    controlDetails.paDetails      = &valueUnsigned;
+
+    valueUnsigned.dwValue         = dwValue;
+
+    // Set the unsigned value
+    //
+    res = mixerSetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerSetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  SetBooleanControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SetBooleanControlValue(UINT mixId, DWORD dwControlID, bool value) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "AudioMixerManager::SetBooleanControlValue(mixId=%u, dwControlID=%d, value=%d)", mixId, dwControlID, value);
+
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    if (value == true)
+        valueBoolean.fValue = TRUE;
+    else
+        valueBoolean.fValue = FALSE;
+
+    // Set the boolean value
+    //
+    res = mixerSetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerSetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetBooleanControlValue
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetBooleanControlValue(UINT mixId, DWORD dwControlID, bool& value) const
+{
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean;
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = 0;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    // Retrieve the boolean value
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Deliver the retrieved value
+    //
+    if (valueBoolean.fValue == 0)
+        value = false;
+    else
+        value = true;
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  GetSelectedMuxSource
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::GetSelectedMuxSource(UINT mixId, DWORD dwControlID, DWORD cMultipleItems, UINT& index) const
+{
+    assert(cMultipleItems <= MAX_NUMBER_OF_MULTIPLE_ITEMS);
+
+    MMRESULT                    res;
+    MIXERCONTROLDETAILS         controlDetails;
+    MIXERCONTROLDETAILS_BOOLEAN valueBoolean[MAX_NUMBER_OF_MULTIPLE_ITEMS];
+    memset(&valueBoolean, 0, sizeof(valueBoolean));
+
+    controlDetails.dwControlID    = dwControlID;
+    controlDetails.cbStruct       = sizeof(MIXERCONTROLDETAILS);
+    controlDetails.cChannels      = 1;
+    controlDetails.cMultipleItems = cMultipleItems;
+    controlDetails.cbDetails      = sizeof(MIXERCONTROLDETAILS_BOOLEAN);
+    controlDetails.paDetails      = &valueBoolean;
+
+    // Retrieve the boolean values
+    //
+    res = mixerGetControlDetails(reinterpret_cast<HMIXEROBJ>(mixId), &controlDetails, MIXER_OBJECTF_MIXER | MIXER_GETCONTROLDETAILSF_VALUE);
+    if (res != MMSYSERR_NOERROR)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id, "mixerGetControlDetails(MIXER_GETCONTROLDETAILSF_VALUE) failed (err=%d)", res);
+        return false;
+    }
+
+    // Map the current MUX setting to an index corresponding to a source index.
+    // e.g. with cMultipleItems = 3,
+    //  valueBoolean[] = {1,0,0} => index = 2
+    //  valueBoolean[] = {0,1,0} => index = 1
+    //  valueBoolean[] = {0,0,1} => index = 0
+    //
+    // If there is no "1" in the array, we assume index should be 0.
+    index = 0;
+    for (DWORD i = 0; i < cMultipleItems; i++)
+    {
+        if (valueBoolean[i].fValue > 0)
+        {
+            index = (cMultipleItems - 1) - i;
+            break;
+        }
+    }
+
+    return true;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceStatusAndSupportFlags
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceStatusAndSupportFlags(DWORD fdwLine) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("status & support flags : 0x%x "), fdwLine);
+
+    switch (fdwLine)
+    {
+    case MIXERLINE_LINEF_ACTIVE:
+        StringCchCat(buf, 128, TEXT("(ACTIVE DESTINATION)"));
+        break;
+    case MIXERLINE_LINEF_DISCONNECTED:
+        StringCchCat(buf, 128, TEXT("(DISCONNECTED)"));
+        break;
+    case MIXERLINE_LINEF_SOURCE:
+        StringCchCat(buf, 128, TEXT("(INACTIVE SOURCE)"));
+        break;
+    case MIXERLINE_LINEF_SOURCE | MIXERLINE_LINEF_ACTIVE:
+        StringCchCat(buf, 128, TEXT("(ACTIVE SOURCE)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceComponentType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceComponentType(DWORD dwComponentType) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("component type         : 0x%x "), dwComponentType);
+
+    switch (dwComponentType)
+    {
+    // Destination
+    case MIXERLINE_COMPONENTTYPE_DST_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(DST_UNDEFINED)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_DIGITAL:
+        StringCchCat(buf, 128, TEXT("(DST_DIGITAL)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_LINE:
+        StringCchCat(buf, 128, TEXT("(DST_LINE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_MONITOR:
+        StringCchCat(buf, 128, TEXT("(DST_MONITOR)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_SPEAKERS:
+        StringCchCat(buf, 128, TEXT("(DST_SPEAKERS)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_HEADPHONES:
+        StringCchCat(buf, 128, TEXT("(DST_HEADPHONES)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_TELEPHONE:
+        StringCchCat(buf, 128, TEXT("(DST_TELEPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_WAVEIN:
+        StringCchCat(buf, 128, TEXT("(DST_WAVEIN)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_DST_VOICEIN:
+        StringCchCat(buf, 128, TEXT("(DST_VOICEIN)"));
+        break;
+    // Source
+    case MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(SRC_UNDEFINED)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_DIGITAL:
+        StringCchCat(buf, 128, TEXT("(SRC_DIGITAL)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_LINE:
+        StringCchCat(buf, 128, TEXT("(SRC_LINE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE:
+        StringCchCat(buf, 128, TEXT("(SRC_MICROPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER:
+        StringCchCat(buf, 128, TEXT("(SRC_SYNTHESIZER)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC:
+        StringCchCat(buf, 128, TEXT("(SRC_COMPACTDISC)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE:
+        StringCchCat(buf, 128, TEXT("(SRC_TELEPHONE)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER:
+        StringCchCat(buf, 128, TEXT("(SRC_PCSPEAKER)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT:
+        StringCchCat(buf, 128, TEXT("(SRC_WAVEOUT)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY:
+        StringCchCat(buf, 128, TEXT("(SRC_AUXILIARY)"));
+        break;
+    case MIXERLINE_COMPONENTTYPE_SRC_ANALOG:
+        StringCchCat(buf, 128, TEXT("(SRC_ANALOG)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceTargetType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceTargetType(DWORD dwType) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("media device type      : 0x%x "), dwType);
+
+    switch (dwType)
+    {
+    case MIXERLINE_TARGETTYPE_UNDEFINED:
+        StringCchCat(buf, 128, TEXT("(UNDEFINED)"));
+        break;
+    case MIXERLINE_TARGETTYPE_WAVEOUT:
+        StringCchCat(buf, 128, TEXT("(WAVEOUT)"));
+        break;
+    case MIXERLINE_TARGETTYPE_WAVEIN:
+        StringCchCat(buf, 128, TEXT("(WAVEIN)"));
+        break;
+    case MIXERLINE_TARGETTYPE_MIDIOUT:
+        StringCchCat(buf, 128, TEXT("(MIDIOUT)"));
+        break;
+    case MIXERLINE_TARGETTYPE_MIDIIN:
+        StringCchCat(buf, 128, TEXT("(MIDIIN)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceControlType
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceControlType(DWORD dwControlType) const
+{
+    TCHAR buf[128];
+
+    // Class type classification
+    //
+    StringCchPrintf(buf, 128, TEXT("class type             : 0x%x "), dwControlType);
+
+    switch (dwControlType & MIXERCONTROL_CT_CLASS_MASK)
+    {
+    case MIXERCONTROL_CT_CLASS_CUSTOM:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_CUSTOM)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_METER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_METER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_SWITCH:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_SWITCH)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_NUMBER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_NUMBER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_SLIDER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_SLIDER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_FADER:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_FADER)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_TIME:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_TIME)"));
+        break;
+    case MIXERCONTROL_CT_CLASS_LIST:
+        StringCchCat(buf, 128, TEXT("(CT_CLASS_LIST)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+
+    // Control type (for each class)
+    //
+    StringCchPrintf(buf, 128, TEXT("control type           : 0x%x "), dwControlType);
+
+    switch (dwControlType)
+    {
+    case MIXERCONTROL_CONTROLTYPE_CUSTOM:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_CUSTOM)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BOOLEANMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BOOLEANMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SIGNEDMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SIGNEDMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PEAKMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PEAKMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_UNSIGNEDMETER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BOOLEAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BOOLEAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_ONOFF:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_ONOFF)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MUTE:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MUTE)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MONO:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MONO)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_LOUDNESS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_LOUDNESS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_STEREOENH:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_STEREOENH)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BASS_BOOST:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BASS_BOOST)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BUTTON:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BUTTON)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_DECIBELS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_DECIBELS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SIGNED:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SIGNED)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_UNSIGNED:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_UNSIGNED)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PERCENT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PERCENT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SLIDER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SLIDER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_PAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_PAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_QSOUNDPAN:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_QSOUNDPAN)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_FADER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_FADER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_VOLUME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_VOLUME)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_BASS:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_BASS)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_TREBLE:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_TREBLE)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_EQUALIZER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_EQUALIZER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_SINGLESELECT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_SINGLESELECT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MUX:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MUX)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MULTIPLESELECT)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MIXER:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MIXER)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MICROTIME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MICROTIME)"));
+        break;
+    case MIXERCONTROL_CONTROLTYPE_MILLITIME:
+        StringCchCat(buf, 128, TEXT("(CONTROLTYPE_MILLITIME)"));
+        break;
+    default:
+        StringCchCat(buf, 128, TEXT("(INVALID)"));
+        break;
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceControlStatusAndSupportFlags
+//
+//  fdwControl
+//
+//  Status and support flags for the audio line control. The following values
+//  are defined:
+//
+//  MIXERCONTROL_CONTROLF_DISABLED
+//
+//  The control is disabled, perhaps due to other settings for the mixer hardware,
+//  and cannot be used. An application can read current settings from a
+//  disabled control, but it cannot apply settings.
+//
+//  MIXERCONTROL_CONTROLF_MULTIPLE
+//
+//  The control has two or more settings per channel. An equalizer, for example,
+//  requires this flag because each frequency band can be set to a different value.
+//  An equalizer that affects both channels of a stereo line in a uniform fashion
+//  will also specify the MIXERCONTROL_CONTROLF_UNIFORM flag.
+//
+//  MIXERCONTROL_CONTROLF_UNIFORM
+//
+//  The control acts on all channels of a multichannel line in a uniform fashion.
+//  For example, a control that mutes both channels of a stereo line would set
+//  this flag. Most MIXERCONTROL_CONTROLTYPE_MUX and
+//  MIXERCONTROL_CONTROLTYPE_MIXER controls also specify the
+//  MIXERCONTROL_CONTROLF_UNIFORM flag.
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceControlStatusAndSupportFlags(DWORD fdwControl) const
+{
+    TCHAR buf[128];
+
+    StringCchPrintf(buf, 128, TEXT("control support flags  : 0x%x "), fdwControl);
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_DISABLED)
+    {
+        // The control is disabled, perhaps due to other settings for the mixer hardware,
+        // and cannot be used. An application can read current settings from a disabled
+        // control, but it cannot apply settings.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_DISABLED)"));
+    }
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_MULTIPLE)
+    {
+        // The control has two or more settings per channel. An equalizer, for example,
+        // requires this flag because each frequency band can be set to a different
+        // value. An equalizer that affects both channels of a stereo line in a
+        // uniform fashion will also specify the MIXERCONTROL_CONTROLF_UNIFORM flag.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_MULTIPLE)"));
+    }
+
+    if (fdwControl & MIXERCONTROL_CONTROLF_UNIFORM)
+    {
+        // The control acts on all channels of a multichannel line in a uniform
+        // fashion. For example, a control that mutes both channels of a stereo
+        // line would set this flag. Most MIXERCONTROL_CONTROLTYPE_MUX and
+        // MIXERCONTROL_CONTROLTYPE_MIXER controls also specify the
+        // MIXERCONTROL_CONTROLF_UNIFORM flag.
+        StringCchCat(buf, 128, TEXT("(CONTROLF_UNIFORM)"));
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  ClearSpeakerState I (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearSpeakerState(UINT idx)
+{
+    _speakerState[idx].dwLineID = 0L;
+    _speakerState[idx].dwVolumeControlID = 0L;
+    _speakerState[idx].dwMuteControlID = 0L;
+    _speakerState[idx].speakerIsValid = false;
+    _speakerState[idx].muteControlIsValid = false;
+    _speakerState[idx].volumeControlIsValid = false;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearSpeakerState II (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearSpeakerState()
+{
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        ClearSpeakerState(i);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  SpeakerIsValid
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::SpeakerIsValid(UINT idx) const
+{
+    return (_speakerState[idx].speakerIsValid);
+}
+
+// ----------------------------------------------------------------------------
+//  ValidSpeakers
+//
+//  Counts number of valid speaker destinations for all mixer devices.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::ValidSpeakers() const
+{
+    UINT nSpeakers(0);
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        if (SpeakerIsValid(i))
+            nSpeakers++;
+    }
+    return nSpeakers;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearMicrophoneState I (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearMicrophoneState(UINT idx)
+{
+    _microphoneState[idx].dwLineID = 0L;
+    _microphoneState[idx].dwVolumeControlID = 0L;
+    _microphoneState[idx].dwMuteControlID = 0L;
+    _microphoneState[idx].dwOnOffControlID = 0L;
+    _microphoneState[idx].microphoneIsValid = false;
+    _microphoneState[idx].muteControlIsValid = false;
+    _microphoneState[idx].volumeControlIsValid = false;
+    _microphoneState[idx].onOffControlIsValid = false;
+}
+
+// ----------------------------------------------------------------------------
+//  ClearMicrophoneState II (II)
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::ClearMicrophoneState()
+{
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        ClearMicrophoneState(i);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//  MicrophoneIsValid
+// ----------------------------------------------------------------------------
+
+bool AudioMixerManager::MicrophoneIsValid(UINT idx) const
+{
+    return (_microphoneState[idx].microphoneIsValid);
+
+}
+
+// ----------------------------------------------------------------------------
+//  ValidMicrophones
+//
+//  Counts number of valid speaker destinations for all mixer devices.
+//  To be valid, a speaker destination line must exist.
+// ----------------------------------------------------------------------------
+
+UINT AudioMixerManager::ValidMicrophones() const
+{
+    UINT nMicrophones(0);
+    for (int i = 0; i < MAX_NUMBER_MIXER_DEVICES; i++)
+    {
+        if (MicrophoneIsValid(i))
+            nMicrophones++;
+    }
+    return nMicrophones;
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveInError
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceWaveInError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveInGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  TraceWaveOutError
+// ----------------------------------------------------------------------------
+
+void AudioMixerManager::TraceWaveOutError(MMRESULT error) const
+{
+    TCHAR buf[MAXERRORLENGTH];
+    TCHAR msg[MAXERRORLENGTH];
+
+    StringCchPrintf(buf, MAXERRORLENGTH, TEXT("Error details: "));
+    waveOutGetErrorText(error, msg, MAXERRORLENGTH);
+    StringCchCat(buf, MAXERRORLENGTH, msg);
+    WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "%s", WideToUTF8(buf));
+}
+
+// ----------------------------------------------------------------------------
+//  WideToUTF8
+// ----------------------------------------------------------------------------
+
+char* AudioMixerManager::WideToUTF8(const TCHAR* src) const {
+#ifdef UNICODE
+    const size_t kStrLen = sizeof(_str);
+    memset(_str, 0, kStrLen);
+    // Get required size (in bytes) to be able to complete the conversion.
+    int required_size = WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, 0, 0, 0);
+    if (required_size <= kStrLen)
+    {
+        // Process the entire input string, including the terminating null char.
+        if (WideCharToMultiByte(CP_UTF8, 0, src, -1, _str, kStrLen, 0, 0) == 0)
+            memset(_str, 0, kStrLen);
+    }
+    return _str;
+#else
+    return const_cast<char*>(src);
+#endif
+}
+
+}  // namespace webrtc
diff --git a/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h b/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h
new file mode 100644
index 0000000..da9de47
--- /dev/null
+++ b/src/modules/audio_device/main/source/win/audio_mixer_manager_win.h
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_WIN_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_WIN_H
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "critical_section_wrapper.h"
+#include <Windows.h>
+#include <mmsystem.h>
+
+namespace webrtc {
+
+class AudioMixerManager
+{
+public:
+    enum { MAX_NUMBER_MIXER_DEVICES = 40 };
+    enum { MAX_NUMBER_OF_LINE_CONTROLS = 20 };
+    enum { MAX_NUMBER_OF_MULTIPLE_ITEMS = 20 };
+    struct SpeakerLineInfo
+    {
+        DWORD dwLineID;
+        bool  speakerIsValid;
+        DWORD dwVolumeControlID;
+        bool  volumeControlIsValid;
+        DWORD dwMuteControlID;
+        bool  muteControlIsValid;
+    };
+    struct MicrophoneLineInfo
+    {
+        DWORD dwLineID;
+        bool  microphoneIsValid;
+        DWORD dwVolumeControlID;
+        bool  volumeControlIsValid;
+        DWORD dwMuteControlID;
+        bool  muteControlIsValid;
+        DWORD dwOnOffControlID;
+        bool  onOffControlIsValid;
+    };
+public:
+    WebRtc_Word32 EnumerateAll();
+    WebRtc_Word32 EnumerateSpeakers();
+    WebRtc_Word32 EnumerateMicrophones();
+    WebRtc_Word32 OpenSpeaker(AudioDeviceModule::WindowsDeviceType device);
+    WebRtc_Word32 OpenSpeaker(WebRtc_UWord16 index);
+    WebRtc_Word32 OpenMicrophone(AudioDeviceModule::WindowsDeviceType device);
+    WebRtc_Word32 OpenMicrophone(WebRtc_UWord16 index);
+    WebRtc_Word32 SetSpeakerVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 SpeakerVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxSpeakerVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinSpeakerVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 SpeakerVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 SpeakerVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SpeakerMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetSpeakerMute(bool enable);
+    WebRtc_Word32 SpeakerMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneMuteIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneMute(bool enable);
+    WebRtc_Word32 MicrophoneMute(bool& enabled) const;
+    WebRtc_Word32 MicrophoneBoostIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneBoost(bool enable);
+    WebRtc_Word32 MicrophoneBoost(bool& enabled) const;
+    WebRtc_Word32 MicrophoneVolumeIsAvailable(bool& available);
+    WebRtc_Word32 SetMicrophoneVolume(WebRtc_UWord32 volume);
+    WebRtc_Word32 MicrophoneVolume(WebRtc_UWord32& volume) const;
+    WebRtc_Word32 MaxMicrophoneVolume(WebRtc_UWord32& maxVolume) const;
+    WebRtc_Word32 MinMicrophoneVolume(WebRtc_UWord32& minVolume) const;
+    WebRtc_Word32 MicrophoneVolumeStepSize(WebRtc_UWord16& stepSize) const;
+    WebRtc_Word32 Close();
+    WebRtc_Word32 CloseSpeaker();
+    WebRtc_Word32 CloseMicrophone();
+    bool SpeakerIsInitialized() const;
+    bool MicrophoneIsInitialized() const;
+    UINT Devices() const;
+
+private:
+    UINT DestinationLines(UINT mixId) const;
+    UINT SourceLines(UINT mixId, DWORD destId) const;
+    bool GetCapabilities(UINT mixId, MIXERCAPS& caps, bool trace = false) const;
+    bool GetDestinationLineInfo(UINT mixId, DWORD destId, MIXERLINE& line, bool trace = false) const;
+    bool GetSourceLineInfo(UINT mixId, DWORD destId, DWORD srcId, MIXERLINE& line, bool trace = false) const;
+
+    bool GetAllLineControls(UINT mixId, const MIXERLINE& line, MIXERCONTROL* controlArray, bool trace = false) const;
+    bool GetLineControl(UINT mixId, DWORD dwControlID, MIXERCONTROL& control) const;
+    bool GetControlDetails(UINT mixId, MIXERCONTROL& controlArray, bool trace = false) const;
+    bool GetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD& dwValue) const;
+    bool SetUnsignedControlValue(UINT mixId, DWORD dwControlID, DWORD dwValue) const;
+    bool SetBooleanControlValue(UINT mixId, DWORD dwControlID, bool value) const;
+    bool GetBooleanControlValue(UINT mixId, DWORD dwControlID, bool& value) const;
+    bool GetSelectedMuxSource(UINT mixId, DWORD dwControlID, DWORD cMultipleItems, UINT& index) const;
+
+private:
+    void ClearSpeakerState();
+    void ClearSpeakerState(UINT idx);
+    void ClearMicrophoneState();
+    void ClearMicrophoneState(UINT idx);
+    bool SpeakerIsValid(UINT idx) const;
+    UINT ValidSpeakers() const;
+    bool MicrophoneIsValid(UINT idx) const;
+    UINT ValidMicrophones() const;
+
+    void TraceStatusAndSupportFlags(DWORD fdwLine) const;
+    void TraceTargetType(DWORD dwType) const;
+    void TraceComponentType(DWORD dwComponentType) const;
+    void TraceControlType(DWORD dwControlType) const;
+    void TraceControlStatusAndSupportFlags(DWORD fdwControl) const;
+    void TraceWaveInError(MMRESULT error) const;
+    void TraceWaveOutError(MMRESULT error) const;
+    // Converts from wide-char to UTF-8 if UNICODE is defined.
+    // Does nothing if UNICODE is undefined.
+    char* WideToUTF8(const TCHAR* src) const;
+
+public:
+    AudioMixerManager(const WebRtc_Word32 id);
+    ~AudioMixerManager();
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32           _id;
+    HMIXER                  _outputMixerHandle;
+    UINT                    _outputMixerID;
+    HMIXER                  _inputMixerHandle;
+    UINT                    _inputMixerID;
+    SpeakerLineInfo         _speakerState[MAX_NUMBER_MIXER_DEVICES];
+    MicrophoneLineInfo      _microphoneState[MAX_NUMBER_MIXER_DEVICES];
+    mutable char            _str[MAXERRORLENGTH];
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_MIXER_MANAGER_H
diff --git a/src/modules/audio_device/main/test/README.txt b/src/modules/audio_device/main/test/README.txt
new file mode 100644
index 0000000..7435ac5
--- /dev/null
+++ b/src/modules/audio_device/main/test/README.txt
@@ -0,0 +1,23 @@
+INSTRUCTIONS:
+
+- Start with test #3 (Device enumeration) to get an overview of the available
+  audio devices.
+- Next, proceed with test #4 (Device selection) to get more details about 
+  the supported functions for each audio device.
+- Verify two-way audio in test #5. 
+  Repeat this test for different selections of playout and recording devices.
+- More detailed tests (volume, mute etc.) can also be performed using #6-#11.
+
+NOTE:
+
+- Some tests requires that the user opens up the audio mixer dialog and 
+  verifies that a certain action (e.g. Mute ON/OFF) is executed correctly.
+- Files can be recorded during some tests to enable off-line analysis.
+- Full support of 'Default Communication' devices requires Windows 7.
+- If a test consists of several sub tests, press any key to start a new sub test.
+
+KNOWN ISSUES:
+
+- Microphone Boost control is not supported on Windows Vista or Windows 7.
+- Speaker and microphone volume controls will not work as intended on Windows
+  Vista if a 'Default Communication' device is selected in any direction.
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath b/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath
new file mode 100644
index 0000000..6e9239f
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/.project b/src/modules/audio_device/main/test/android/audio_device_android_test/.project
new file mode 100644
index 0000000..38a6307
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/.project
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<projectDescription>

+	<name>AudioDeviceAndroidTest</name>

+	<comment></comment>

+	<projects>

+	</projects>

+	<buildSpec>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>org.eclipse.jdt.core.javabuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ApkBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+	</buildSpec>

+	<natures>

+		<nature>com.android.ide.eclipse.adt.AndroidNature</nature>

+		<nature>org.eclipse.jdt.core.javanature</nature>

+	</natures>

+</projectDescription>

diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml b/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml
new file mode 100644
index 0000000..d8117f5
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/AndroidManifest.xml
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.voiceengine.test">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+    <activity android:label="@string/app_name"
+	      android:name="AudioDeviceAndroidTest">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+  </application>
+
+  <uses-sdk android:minSdkVersion="3"></uses-sdk>
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS">
+  </uses-permission>
+  <uses-permission android:name="android.permission.RECORD_AUDIO">
+  </uses-permission>
+</manifest> 
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties b/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties
new file mode 100644
index 0000000..19ddebd
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-3

diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java b/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java
new file mode 100644
index 0000000..a295780
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/gen/org/webrtc/voiceengine/test/R.java
@@ -0,0 +1,26 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.voiceengine.test;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050000;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040001;

+    }

+}

diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc b/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc
new file mode 100644
index 0000000..f46c6b2
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/jni/audio_device_android_test.cc
@@ -0,0 +1,108 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h> // memset
+#include <android/log.h>
+
+#include "org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h"
+
+#include "../../../../interface/audio_device.h"
+
+#define LOG_TAG "WebRTC ADM Native"
+
+void api_test();
+void func_test(int);
+
+typedef struct
+{
+    // Other
+    JavaVM* jvm;
+} AdmData;
+
+static AdmData admData;
+
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+    if (!vm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG,
+                            "JNI_OnLoad did not receive a valid VM pointer");
+        return -1;
+    }
+
+    // Get JNI
+    JNIEnv* env;
+    if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                             JNI_VERSION_1_4))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, LOG_TAG,
+                            "JNI_OnLoad could not get JNI env");
+        return -1;
+    }
+
+    // Get class to register the native functions with
+    // jclass regClass =
+    // env->FindClass("org/webrtc/voiceengine/test/AudioDeviceAndroidTest");
+    // if (!regClass) {
+    // return -1; // Exception thrown
+    // }
+
+    // Register native functions
+    // JNINativeMethod methods[1];
+    // methods[0].name = NULL;
+    // methods[0].signature = NULL;
+    // methods[0].fnPtr = NULL;
+    // if (JNI_OK != env->RegisterNatives(regClass, methods, 1))
+    // {
+    // return -1;
+    // }
+
+    // Init VoiceEngine data
+    memset(&admData, 0, sizeof(admData));
+
+    // Store the JVM
+    admData.jvm = vm;
+
+    return JNI_VERSION_1_4;
+}
+
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_NativeInit(JNIEnv * env,
+                                                                   jclass)
+{
+    // Look up and cache any interesting class, field and method IDs for
+    // any used java class here
+
+    return true;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_RunTest(JNIEnv *env,
+                                                                jobject context,
+                                                                jint test)
+{
+    // Set instance independent Java objects
+    webrtc::AudioDeviceModule::SetAndroidObjects(admData.jvm, env, context);
+
+    // Start test
+    if (0 == test)
+    {
+        api_test();
+    }
+    else
+    {
+        func_test(test);
+    }
+
+    // Clear instance independent Java objects
+    webrtc::AudioDeviceModule::SetAndroidObjects(NULL, NULL, NULL);
+
+    return 0;
+}
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h b/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h
new file mode 100644
index 0000000..5cbc56f
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/jni/org_webrtc_voiceengine_test_AudioDeviceAndroidTest.h
@@ -0,0 +1,29 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_voiceengine_test_AudioDeviceAndroidTest */
+
+#ifndef _Included_org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+#define _Included_org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+ * Method:    NativeInit
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_NativeInit
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AudioDeviceAndroidTest
+ * Method:    RunTest
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AudioDeviceAndroidTest_RunTest
+  (JNIEnv *, jobject, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png b/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/res/drawable/icon.png
Binary files differ
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml b/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml
new file mode 100644
index 0000000..6161f1d
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/res/layout/main.xml
@@ -0,0 +1,12 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+  <Button android:text="@string/run_button"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+</LinearLayout>
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml b/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml
new file mode 100644
index 0000000..bbb6f51
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/res/values/strings.xml
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+    <string name="app_name">WebRTC Audio Device Android Test</string>
+<string name="run_button">Run Test</string>
+</resources>
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java b/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java
new file mode 100644
index 0000000..4863168
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/AudioDeviceAndroid.java
@@ -0,0 +1 @@
+../../../../../../../source/android/org/webrtc/voiceengine/AudioDeviceAndroid.java
diff --git a/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java b/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java
new file mode 100644
index 0000000..b87af46
--- /dev/null
+++ b/src/modules/audio_device/main/test/android/audio_device_android_test/src/org/webrtc/voiceengine/test/AudioDeviceAndroidTest.java
@@ -0,0 +1,69 @@
+package org.webrtc.voiceengine.test;

+

+import android.app.Activity;

+import android.media.AudioManager;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.View;

+import android.widget.Button;

+

+public class AudioDeviceAndroidTest extends Activity {

+    private Thread _testThread;

+

+    /** Called when the activity is first created. */

+    @Override

+    public void onCreate(Bundle savedInstanceState) {

+        super.onCreate(savedInstanceState);

+        setContentView(R.layout.main);

+

+        final Button buttonStart = (Button) findViewById(R.id.Button01);

+        // buttonStart.setWidth(200);

+        // button.layout(50, 50, 100, 40);

+        buttonStart.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+                _testThread = new Thread(_testProc);

+                _testThread.start();

+            }

+        });

+

+        // Suggest to use the voice call audio stream for hardware volume

+        // controls

+        setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);

+

+        DoLog("Started WebRTC Android ADM Test");

+    }

+

+    private Runnable _testProc = new Runnable() {

+        public void run() {

+            // TODO(xians), choose test from GUI

+            // Select test here, 0 for API test, 1-> for Func tests

+            RunTest(5);

+        }

+    };

+

+    private void DoLog(String msg) {

+        Log.d("*WebRTC ADM*", msg);

+    }

+

+    // //////////////// Native function prototypes ////////////////////

+

+    // Init wrapper

+    private native static boolean NativeInit();

+

+    // Function used to call test

+    private native int RunTest(int testType);

+

+    // Load native library

+    static {

+        Log.d("*WebRTC ADM*", "Loading audio_device_android_test...");

+        System.loadLibrary("audio_device_android_test");

+

+        Log.d("*WebRTC ADM*", "Calling native init...");

+        if (!NativeInit()) {

+            Log.e("*WebRTC ADM*", "Native init failed");

+            throw new RuntimeException("Native init failed");

+        } else {

+            Log.d("*WebRTC ADM*", "Native init successful");

+        }

+    }

+}

diff --git a/src/modules/audio_device/main/test/audio_device_test_api.cc b/src/modules/audio_device/main/test/audio_device_test_api.cc
new file mode 100644
index 0000000..a7b1f71
--- /dev/null
+++ b/src/modules/audio_device/main/test/audio_device_test_api.cc
@@ -0,0 +1,1877 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cmath>
+#include <stdio.h>
+#include <string.h>
+
+#include "audio_device_test_defines.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+#include "../source/audio_device_config.h"
+#include "../source/audio_device_impl.h"
+#include "../source/audio_device_utility.h"
+#include "system_wrappers/interface/sleep.h"
+
+// Helper functions
+#if defined(ANDROID)
+char filenameStr[2][256] =
+{ {0},
+  {0},
+}; // Allow two buffers for those API calls taking two filenames
+int currentStr = 0;
+
+const char* GetFilename(const char* filename)
+{
+  currentStr = !currentStr;
+  sprintf(filenameStr[currentStr], "/sdcard/admtest/%s", filename);
+  return filenameStr[currentStr];
+}
+#elif !defined(MAC_IPHONE)
+const char* GetFilename(const char* filename) {
+  std::string full_path_filename = webrtc::test::OutputPath() + filename;
+  return full_path_filename.c_str();
+}
+#endif
+
+using namespace webrtc;
+
+class AudioEventObserverAPI: public AudioDeviceObserver {
+ public:
+  AudioEventObserverAPI(AudioDeviceModule* audioDevice)
+      : error_(kRecordingError),
+        warning_(kRecordingWarning),
+        audio_device_(audioDevice) {
+  }
+
+  ~AudioEventObserverAPI() {}
+
+  virtual void OnErrorIsReported(const ErrorCode error) {
+    TEST_LOG("\n[*** ERROR ***] => OnErrorIsReported(%d)\n\n", error);
+    error_ = error;
+  }
+
+  virtual void OnWarningIsReported(const WarningCode warning) {
+    TEST_LOG("\n[*** WARNING ***] => OnWarningIsReported(%d)\n\n", warning);
+    warning_ = warning;
+    EXPECT_EQ(0, audio_device_->StopRecording());
+    EXPECT_EQ(0, audio_device_->StopPlayout());
+  }
+
+ public:
+  ErrorCode error_;
+  WarningCode warning_;
+ private:
+  AudioDeviceModule* audio_device_;
+};
+
+class AudioTransportAPI: public AudioTransport {
+ public:
+  AudioTransportAPI(AudioDeviceModule* audioDevice)
+      : rec_count_(0),
+        play_count_(0) {
+  }
+
+  ~AudioTransportAPI() {}
+
+  virtual WebRtc_Word32 RecordedDataIsAvailable(
+      const void* audioSamples,
+      const WebRtc_UWord32 nSamples,
+      const WebRtc_UWord8 nBytesPerSample,
+      const WebRtc_UWord8 nChannels,
+      const WebRtc_UWord32 sampleRate,
+      const WebRtc_UWord32 totalDelay,
+      const WebRtc_Word32 clockSkew,
+      const WebRtc_UWord32 currentMicLevel,
+      WebRtc_UWord32& newMicLevel) {
+    rec_count_++;
+    if (rec_count_ % 100 == 0) {
+      if (nChannels == 1) {
+        // mono
+        TEST_LOG("-");
+      } else if ((nChannels == 2) && (nBytesPerSample == 2)) {
+        // stereo but only using one channel
+        TEST_LOG("-|");
+      } else {
+        // stereo
+        TEST_LOG("--");
+      }
+    }
+    return 0;
+  }
+
+  virtual WebRtc_Word32 NeedMorePlayData(
+      const WebRtc_UWord32 nSamples,
+      const WebRtc_UWord8 nBytesPerSample,
+      const WebRtc_UWord8 nChannels,
+      const WebRtc_UWord32 sampleRate,
+      void* audioSamples,
+      WebRtc_UWord32& nSamplesOut) {
+    play_count_++;
+    if (play_count_ % 100 == 0) {
+      if (nChannels == 1) {
+        TEST_LOG("+");
+      } else {
+        TEST_LOG("++");
+      }
+    }
+    nSamplesOut = 480;
+    return 0;
+  }
+
+ private:
+  WebRtc_UWord32 rec_count_;
+  WebRtc_UWord32 play_count_;
+};
+
+class AudioDeviceAPITest: public testing::Test {
+ protected:
+  AudioDeviceAPITest() {}
+
+  virtual ~AudioDeviceAPITest() {}
+
+  static void SetUpTestCase() {
+    process_thread_ = ProcessThread::CreateProcessThread();
+    process_thread_->Start();
+
+    // Windows:
+    //      if (WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    //          user can select between default (Core) or Wave
+    //      else
+    //          user can select between default (Wave) or Wave
+    const WebRtc_Word32 kId = 444;
+
+#if defined(_WIN32)
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+#if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+    TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is defined!\n\n");
+    // create default implementation (=Core Audio) instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audio_device_->AddRef();
+    EXPECT_EQ(0, audio_device_->Release());
+    // create non-default (=Wave Audio) instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
+    audio_device_->AddRef();
+    EXPECT_EQ(0, audio_device_->Release());
+    // explicitly specify usage of Core Audio (same as default)
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsCoreAudio)) != NULL);
+#else
+    TEST_LOG("WEBRTC_WINDOWS_CORE_AUDIO_BUILD is *not* defined!\n");
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    // create default implementation (=Wave Audio) instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audio_device_->AddRef();
+    EXPECT_EQ(0, audio_device_->Release());
+    // explicitly specify usage of Wave Audio (same as default)
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsWaveAudio)) != NULL);
+#endif
+#endif
+
+#if defined(ANDROID)
+    // Fails tests
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
+    // Create default implementation instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+#elif defined(WEBRTC_LINUX)
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    // create default implementation instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+    audio_device_->AddRef();
+    EXPECT_EQ(0, audio_device_->Terminate());
+    EXPECT_EQ(0, audio_device_->Release());
+    // explicitly specify usage of Pulse Audio (same as default)
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxPulseAudio)) != NULL);
+#endif
+
+#if defined(WEBRTC_MAC)
+    // Fails tests
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsWaveAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kWindowsCoreAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxAlsaAudio)) == NULL);
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kLinuxPulseAudio)) == NULL);
+    // Create default implementation instance
+    EXPECT_TRUE((audio_device_ = AudioDeviceModuleImpl::Create(
+                kId, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+#endif
+
+    if (audio_device_ == NULL) {
+      FAIL() << "Failed creating audio device object!";
+    }
+
+    // The ADM is reference counted.
+    audio_device_->AddRef();
+
+    process_thread_->RegisterModule(audio_device_);
+
+    AudioDeviceModule::AudioLayer audio_layer =
+        AudioDeviceModule::kPlatformDefaultAudio;
+    EXPECT_EQ(0, audio_device_->ActiveAudioLayer(&audio_layer));
+    if (audio_layer == AudioDeviceModule::kLinuxAlsaAudio) {
+      linux_alsa_ = true;
+    }
+  }
+
+  static void TearDownTestCase() {
+    if (process_thread_) {
+      process_thread_->DeRegisterModule(audio_device_);
+      process_thread_->Stop();
+      ProcessThread::DestroyProcessThread(process_thread_);
+    }
+    if (event_observer_) {
+      delete event_observer_;
+      event_observer_ = NULL;
+    }
+    if (audio_transport_) {
+      delete audio_transport_;
+      audio_transport_ = NULL;
+    }
+    if (audio_device_) {
+      EXPECT_EQ(0, audio_device_->Release());
+    }
+    PRINT_TEST_RESULTS;
+  }
+
+  void SetUp() {
+    if (linux_alsa_) {
+      FAIL() << "API Test is not available on ALSA on Linux!";
+    }
+    EXPECT_EQ(0, audio_device_->Init());
+    EXPECT_TRUE(audio_device_->Initialized());
+  }
+
+  void TearDown() {
+    EXPECT_EQ(0, audio_device_->Terminate());
+  }
+
+  void CheckVolume(WebRtc_UWord32 expected, WebRtc_UWord32 actual) {
+    // Mac and Windows have lower resolution on the volume settings.
+#if defined(WEBRTC_MAC) || defined(_WIN32)
+    int diff = abs(static_cast<int>(expected - actual));
+    EXPECT_LE(diff, 5);
+#else
+    EXPECT_TRUE((actual == expected) || (actual == expected-1));
+#endif
+  }
+
+  void CheckInitialPlayoutStates() {
+    EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+    EXPECT_FALSE(audio_device_->Playing());
+    EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+  }
+
+  void CheckInitialRecordingStates() {
+    EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+    EXPECT_FALSE(audio_device_->Recording());
+    EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+  }
+
+  static bool linux_alsa_;
+  static ProcessThread* process_thread_;
+  static AudioDeviceModule* audio_device_;
+  static AudioTransportAPI* audio_transport_;
+  static AudioEventObserverAPI* event_observer_;
+};
+
+// Must be initialized like this to handle static SetUpTestCase() above.
+bool AudioDeviceAPITest::linux_alsa_ = false;
+ProcessThread* AudioDeviceAPITest::process_thread_ = NULL;
+AudioDeviceModule* AudioDeviceAPITest::audio_device_ = NULL;
+AudioTransportAPI* AudioDeviceAPITest::audio_transport_ = NULL;
+AudioEventObserverAPI* AudioDeviceAPITest::event_observer_ = NULL;
+
+TEST_F(AudioDeviceAPITest, RegisterEventObserver) {
+  event_observer_ = new AudioEventObserverAPI(audio_device_);
+  EXPECT_EQ(0, audio_device_->RegisterEventObserver(NULL));
+  EXPECT_EQ(0, audio_device_->RegisterEventObserver(event_observer_));
+  EXPECT_EQ(0, audio_device_->RegisterEventObserver(NULL));
+}
+
+TEST_F(AudioDeviceAPITest, RegisterAudioCallback) {
+  audio_transport_ = new AudioTransportAPI(audio_device_);
+  EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+  EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+  EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+}
+
+TEST_F(AudioDeviceAPITest, Init) {
+  EXPECT_TRUE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Init());
+  EXPECT_TRUE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Terminate());
+  EXPECT_FALSE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Init());
+  EXPECT_TRUE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Terminate());
+  EXPECT_FALSE(audio_device_->Initialized());
+}
+
+TEST_F(AudioDeviceAPITest, Terminate) {
+  EXPECT_TRUE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Terminate());
+  EXPECT_FALSE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Terminate());
+  EXPECT_FALSE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Init());
+  EXPECT_TRUE(audio_device_->Initialized());
+  EXPECT_EQ(0, audio_device_->Terminate());
+  EXPECT_FALSE(audio_device_->Initialized());
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutDevices) {
+  EXPECT_GT(audio_device_->PlayoutDevices(), 0);
+  EXPECT_GT(audio_device_->PlayoutDevices(), 0);
+}
+
+TEST_F(AudioDeviceAPITest, RecordingDevices) {
+  EXPECT_GT(audio_device_->RecordingDevices(), 0);
+  EXPECT_GT(audio_device_->RecordingDevices(), 0);
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutDeviceName) {
+  char name[kAdmMaxDeviceNameSize];
+  char guid[kAdmMaxGuidSize];
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->PlayoutDeviceName(-2, name, guid));
+  EXPECT_EQ(-1, audio_device_->PlayoutDeviceName(no_devices, name, guid));
+  EXPECT_EQ(-1, audio_device_->PlayoutDeviceName(0, NULL, guid));
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->PlayoutDeviceName(0, name, NULL));
+#ifdef _WIN32
+  // shall be mapped to 0.
+  EXPECT_EQ(0, audio_device_->PlayoutDeviceName(-1, name, NULL));
+#else
+  EXPECT_EQ(-1, audio_device_->PlayoutDeviceName(-1, name, NULL));
+#endif
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->PlayoutDeviceName(i, name, guid));
+    EXPECT_EQ(0, audio_device_->PlayoutDeviceName(i, name, NULL));
+  }
+}
+
+TEST_F(AudioDeviceAPITest, RecordingDeviceName) {
+  char name[kAdmMaxDeviceNameSize];
+  char guid[kAdmMaxGuidSize];
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->RecordingDeviceName(-2, name, guid));
+  EXPECT_EQ(-1, audio_device_->RecordingDeviceName(no_devices, name, guid));
+  EXPECT_EQ(-1, audio_device_->RecordingDeviceName(0, NULL, guid));
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->RecordingDeviceName(0, name, NULL));
+#ifdef _WIN32
+  // shall me mapped to 0
+  EXPECT_EQ(0, audio_device_->RecordingDeviceName(-1, name, NULL));
+#else
+  EXPECT_EQ(-1, audio_device_->RecordingDeviceName(-1, name, NULL));
+#endif
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->RecordingDeviceName(i, name, guid));
+    EXPECT_EQ(0, audio_device_->RecordingDeviceName(i, name, NULL));
+  }
+}
+
+TEST_F(AudioDeviceAPITest, SetPlayoutDevice) {
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetPlayoutDevice(-1));
+  EXPECT_EQ(-1, audio_device_->SetPlayoutDevice(no_devices));
+
+  // bulk tests
+#ifdef _WIN32
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      AudioDeviceModule::kDefaultCommunicationDevice));
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      AudioDeviceModule::kDefaultDevice));
+#else
+  EXPECT_EQ(-1, audio_device_->SetPlayoutDevice(
+      AudioDeviceModule::kDefaultCommunicationDevice));
+  EXPECT_EQ(-1, audio_device_->SetPlayoutDevice(
+      AudioDeviceModule::kDefaultDevice));
+#endif
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+  }
+}
+
+TEST_F(AudioDeviceAPITest, SetRecordingDevice) {
+  EXPECT_EQ(0, audio_device_->Init());
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetRecordingDevice(-1));
+  EXPECT_EQ(-1, audio_device_->SetRecordingDevice(no_devices));
+
+  // bulk tests
+#ifdef _WIN32
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultDevice));
+#else
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultDevice) == -1);
+#endif
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+  }
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutIsAvailable) {
+  bool available;
+#ifdef _WIN32
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  // Availability check should not initialize.
+  EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+
+  EXPECT_EQ(0,
+            audio_device_->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice));
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+#endif
+
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, RecordingIsAvailable) {
+  bool available;
+#ifdef _WIN32
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultCommunicationDevice));
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultDevice));
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+#endif
+
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, InitPlayout) {
+  // check initial state
+  EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+
+  // ensure that device must be set before we can initialize
+  EXPECT_EQ(-1, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+
+  // bulk tests
+  bool available;
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_EQ(-1, audio_device_->SetPlayoutDevice(
+        MACRO_DEFAULT_COMMUNICATION_DEVICE));
+    EXPECT_EQ(0, audio_device_->StopPlayout());
+    EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+  }
+
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    // Sleep is needed for e.g. iPhone since we after stopping then starting may
+    // have a hangover time of a couple of ms before initialized.
+    SleepMs(50);
+    EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+  }
+
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->StopPlayout());
+      EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+      EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+      EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+      if (available) {
+        EXPECT_EQ(0, audio_device_->InitPlayout());
+        EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+      }
+    }
+  }
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+}
+
+TEST_F(AudioDeviceAPITest, InitRecording) {
+  // check initial state
+  EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+
+  // ensure that device must be set before we can initialize
+  EXPECT_EQ(-1, audio_device_->InitRecording());
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->InitRecording());
+  EXPECT_TRUE(audio_device_->RecordingIsInitialized());
+
+  // bulk tests
+  bool available;
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    EXPECT_TRUE(audio_device_->RecordingIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    EXPECT_EQ(-1,
+        audio_device_->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE));
+    EXPECT_EQ(0, audio_device_->StopRecording());
+    EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+  }
+
+  EXPECT_EQ(0,
+      audio_device_->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    SleepMs(50);
+    EXPECT_TRUE(audio_device_->RecordingIsInitialized());
+  }
+
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->StopRecording());
+      EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+      EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+      EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+      if (available) {
+        EXPECT_EQ(0, audio_device_->InitRecording());
+        EXPECT_TRUE(audio_device_->RecordingIsInitialized());
+      }
+    }
+  }
+  EXPECT_EQ(0, audio_device_->StopRecording());
+}
+
+TEST_F(AudioDeviceAPITest, StartAndStopPlayout) {
+  bool available;
+  EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+
+  CheckInitialPlayoutStates();
+
+  EXPECT_EQ(-1, audio_device_->StartPlayout());
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+
+#ifdef _WIN32
+  // kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_EQ(0, audio_device_->StartPlayout());
+    EXPECT_TRUE(audio_device_->Playing());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+    EXPECT_EQ(0, audio_device_->StopPlayout());
+    EXPECT_FALSE(audio_device_->Playing());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+  }
+#endif
+
+  // repeat test but for kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_EQ(0, audio_device_->StartPlayout());
+    EXPECT_TRUE(audio_device_->Playing());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+    EXPECT_EQ(0, audio_device_->StopPlayout());
+    EXPECT_FALSE(audio_device_->Playing());
+  }
+
+  // repeat test for all devices
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+    if (available) {
+      EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+      EXPECT_EQ(0, audio_device_->InitPlayout());
+      EXPECT_EQ(0, audio_device_->StartPlayout());
+      EXPECT_TRUE(audio_device_->Playing());
+      EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+      EXPECT_EQ(0, audio_device_->StopPlayout());
+      EXPECT_FALSE(audio_device_->Playing());
+    }
+  }
+}
+
+TEST_F(AudioDeviceAPITest, StartAndStopRecording) {
+  bool available;
+  EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+
+  CheckInitialRecordingStates();
+
+  EXPECT_EQ(-1, audio_device_->StartRecording());
+  EXPECT_EQ(0, audio_device_->StopRecording());
+
+#ifdef _WIN32
+  // kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    EXPECT_EQ(0, audio_device_->StartRecording());
+    EXPECT_TRUE(audio_device_->Recording());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+    EXPECT_EQ(0, audio_device_->StopRecording());
+    EXPECT_FALSE(audio_device_->Recording());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(NULL));
+  }
+#endif
+
+  // repeat test but for kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    EXPECT_EQ(0, audio_device_->StartRecording());
+    EXPECT_TRUE(audio_device_->Recording());
+    EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+    EXPECT_EQ(0, audio_device_->StopRecording());
+    EXPECT_FALSE(audio_device_->Recording());
+  }
+
+  // repeat test for all devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+    if (available) {
+      EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+      EXPECT_EQ(0, audio_device_->InitRecording());
+      EXPECT_EQ(0, audio_device_->StartRecording());
+      EXPECT_TRUE(audio_device_->Recording());
+      EXPECT_EQ(0, audio_device_->RegisterAudioCallback(audio_transport_));
+      EXPECT_EQ(0, audio_device_->StopRecording());
+      EXPECT_FALSE(audio_device_->Recording());
+    }
+  }
+}
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+TEST_F(AudioDeviceAPITest, SetAndGetWaveOutVolume) {
+  WebRtc_UWord32 vol(0);
+  // NOTE 1: Windows Wave only!
+  // NOTE 2: It seems like the waveOutSetVolume API returns
+  // MMSYSERR_NOTSUPPORTED on some Vista machines!
+  const WebRtc_UWord16 maxVol(0xFFFF);
+  WebRtc_UWord16 volL, volR;
+
+  CheckInitialPlayoutStates();
+
+  // make dummy test to see if this API is supported
+  WebRtc_Word32 works = audio_device_->SetWaveOutVolume(vol, vol);
+  WARNING(works == 0);
+
+  if (works == 0)
+  {
+    // set volume without open playout device
+    for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+    {
+      EXPECT_EQ(0, audio_device_->SetWaveOutVolume(vol, vol));
+      EXPECT_EQ(0, audio_device_->WaveOutVolume(volL, volR));
+      EXPECT_TRUE((volL == vol) && (volR == vol));
+    }
+
+    // repeat test but this time with an open (default) output device
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultDevice));
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+    for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+    {
+      EXPECT_EQ(0, audio_device_->SetWaveOutVolume(vol, vol));
+      EXPECT_EQ(0, audio_device_->WaveOutVolume(volL, volR));
+      EXPECT_TRUE((volL == vol) && (volR == vol));
+    }
+
+    // as above but while playout is active
+    EXPECT_EQ(0, audio_device_->StartPlayout());
+    EXPECT_TRUE(audio_device_->Playing());
+    for (vol = 0; vol <= maxVol; vol += (maxVol/5))
+    {
+      EXPECT_EQ(0, audio_device_->SetWaveOutVolume(vol, vol));
+      EXPECT_EQ(0, audio_device_->WaveOutVolume(volL, volR));
+      EXPECT_TRUE((volL == vol) && (volR == vol));
+    }
+  }
+
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+  EXPECT_FALSE(audio_device_->Playing());
+}
+#endif  // defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+
+TEST_F(AudioDeviceAPITest, SpeakerIsAvailable) {
+  bool available;
+  CheckInitialPlayoutStates();
+
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, InitSpeaker) {
+  // NOTE: By calling Terminate (in TearDown) followed by Init (in SetUp) we
+  // ensure that any existing output mixer handle is set to NULL.
+  // The mixer handle is closed and reopened again for each call to
+  // SetPlayoutDevice.
+  CheckInitialPlayoutStates();
+
+  // kDefaultCommunicationDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  bool available;
+  EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+  }
+
+  // fail tests
+  EXPECT_EQ(0, audio_device_->PlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitPlayout());
+    EXPECT_EQ(0, audio_device_->StartPlayout());
+    EXPECT_EQ(-1, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->StopPlayout());
+  }
+
+  // kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+  }
+
+  // repeat test for all devices
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->SpeakerIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->InitSpeaker());
+    }
+  }
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneIsAvailable) {
+  CheckInitialRecordingStates();
+  bool available;
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, InitMicrophone) {
+  // NOTE: By calling Terminate (in TearDown) followed by Init (in SetUp) we
+  // ensure that any existing output mixer handle is set to NULL.
+  // The mixer handle is closed and reopened again for each call to
+  // SetRecordingDevice.
+  CheckInitialRecordingStates();
+
+  // kDefaultCommunicationDevice
+  EXPECT_EQ(0,
+      audio_device_->SetRecordingDevice(MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  bool available;
+  EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+  }
+
+  // fail tests
+  EXPECT_EQ(0, audio_device_->RecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitRecording());
+    EXPECT_EQ(0, audio_device_->StartRecording());
+    EXPECT_EQ(-1, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->StopRecording());
+  }
+
+  // kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+  }
+
+  // repeat test for all devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->InitMicrophone());
+    }
+  }
+}
+
+TEST_F(AudioDeviceAPITest, SpeakerVolumeIsAvailable) {
+  CheckInitialPlayoutStates();
+  bool available;
+
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+  }
+}
+
+// Tests the following methods:
+// SetSpeakerVolume
+// SpeakerVolume
+// MaxSpeakerVolume
+// MinSpeakerVolume
+// NOTE: Disabled on mac due to issue 257.
+#ifndef WEBRTC_MAC
+TEST_F(AudioDeviceAPITest, SpeakerVolumeTests) {
+  WebRtc_UWord32 vol(0);
+  WebRtc_UWord32 volume(0);
+  WebRtc_UWord32 maxVolume(0);
+  WebRtc_UWord32 minVolume(0);
+  WebRtc_UWord16 stepSize(0);
+  bool available;
+  CheckInitialPlayoutStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetSpeakerVolume(0));
+  // speaker must be initialized first
+  EXPECT_EQ(-1, audio_device_->SpeakerVolume(&volume));
+  EXPECT_EQ(-1, audio_device_->MaxSpeakerVolume(&maxVolume));
+  EXPECT_EQ(-1, audio_device_->MinSpeakerVolume(&minVolume));
+  EXPECT_EQ(-1, audio_device_->SpeakerVolumeStepSize(&stepSize));
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+  // test for warning (can e.g. happen on Vista with Wave API)
+  EXPECT_EQ(0,
+            audio_device_->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice));
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->SetSpeakerVolume(19001));
+    EXPECT_EQ(0, audio_device_->SpeakerVolume(&volume));
+    WARNING(volume == 19001);
+  }
+#endif
+
+#ifdef _WIN32
+  // use kDefaultCommunicationDevice and modify/retrieve the volume
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->MaxSpeakerVolume(&maxVolume));
+    EXPECT_EQ(0, audio_device_->MinSpeakerVolume(&minVolume));
+    EXPECT_EQ(0, audio_device_->SpeakerVolumeStepSize(&stepSize));
+    for (vol = minVolume; vol < (int)maxVolume; vol += 20*stepSize) {
+      EXPECT_EQ(0, audio_device_->SetSpeakerVolume(vol));
+      EXPECT_EQ(0, audio_device_->SpeakerVolume(&volume));
+      CheckVolume(volume, vol);
+    }
+  }
+#endif
+
+  // use kDefaultDevice and modify/retrieve the volume
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->MaxSpeakerVolume(&maxVolume));
+    EXPECT_EQ(0, audio_device_->MinSpeakerVolume(&minVolume));
+    EXPECT_EQ(0, audio_device_->SpeakerVolumeStepSize(&stepSize));
+    WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+    step = (step < stepSize ? stepSize : step);
+    for (vol = minVolume; vol <= maxVolume; vol += step) {
+      EXPECT_EQ(0, audio_device_->SetSpeakerVolume(vol));
+      EXPECT_EQ(0, audio_device_->SpeakerVolume(&volume));
+      CheckVolume(volume, vol);
+    }
+  }
+
+  // use all (indexed) devices and modify/retrieve the volume
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->InitSpeaker());
+      EXPECT_EQ(0, audio_device_->MaxSpeakerVolume(&maxVolume));
+      EXPECT_EQ(0, audio_device_->MinSpeakerVolume(&minVolume));
+      EXPECT_EQ(0, audio_device_->SpeakerVolumeStepSize(&stepSize));
+      WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+      step = (step < stepSize ? stepSize : step);
+      for (vol = minVolume; vol <= maxVolume; vol += step) {
+        EXPECT_EQ(0, audio_device_->SetSpeakerVolume(vol));
+        EXPECT_EQ(0, audio_device_->SpeakerVolume(&volume));
+        CheckVolume(volume, vol);
+      }
+    }
+  }
+
+  // restore reasonable level
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->MaxSpeakerVolume(&maxVolume));
+    EXPECT_TRUE(audio_device_->SetSpeakerVolume(maxVolume < 10 ?
+        maxVolume/3 : maxVolume/10) == 0);
+  }
+}
+#endif  // !WEBRTC_MAC
+
+TEST_F(AudioDeviceAPITest, AGC) {
+  // NOTE: The AGC API only enables/disables the AGC. To ensure that it will
+  // have an effect, use it in combination with MicrophoneVolumeIsAvailable.
+  CheckInitialRecordingStates();
+  EXPECT_FALSE(audio_device_->AGC());
+
+  // set/get tests
+  EXPECT_EQ(0, audio_device_->SetAGC(true));
+  EXPECT_TRUE(audio_device_->AGC());
+  EXPECT_EQ(0, audio_device_->SetAGC(false));
+  EXPECT_FALSE(audio_device_->AGC());
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneVolumeIsAvailable) {
+  CheckInitialRecordingStates();
+  bool available;
+
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+  }
+}
+
+// Tests the methods:
+// SetMicrophoneVolume
+// MicrophoneVolume
+// MaxMicrophoneVolume
+// MinMicrophoneVolume
+// NOTE: Disabled on mac due to issue 257.
+#ifndef WEBRTC_MAC
+TEST_F(AudioDeviceAPITest, MicrophoneVolumeTests) {
+  WebRtc_UWord32 vol(0);
+  WebRtc_UWord32 volume(0);
+  WebRtc_UWord32 maxVolume(0);
+  WebRtc_UWord32 minVolume(0);
+  WebRtc_UWord16 stepSize(0);
+  bool available;
+  CheckInitialRecordingStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetMicrophoneVolume(0));
+  // must be initialized first
+  EXPECT_EQ(-1, audio_device_->MicrophoneVolume(&volume));
+  EXPECT_EQ(-1, audio_device_->MaxMicrophoneVolume(&maxVolume));
+  EXPECT_EQ(-1, audio_device_->MinMicrophoneVolume(&minVolume));
+  EXPECT_EQ(-1, audio_device_->MicrophoneVolumeStepSize(&stepSize));
+
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+  // test for warning (can e.g. happen on Vista with Wave API)
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      AudioDeviceModule::kDefaultDevice));
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(19001));
+    EXPECT_EQ(0, audio_device_->MicrophoneVolume(&volume));
+    WARNING(volume == 19001);
+  }
+#endif
+
+#ifdef _WIN32
+  // initialize kDefaultCommunicationDevice and modify/retrieve the volume
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->MaxMicrophoneVolume(&maxVolume));
+    EXPECT_EQ(0, audio_device_->MinMicrophoneVolume(&minVolume));
+    EXPECT_EQ(0, audio_device_->MicrophoneVolumeStepSize(&stepSize));
+    for (vol = minVolume; vol < (int)maxVolume; vol += 10*stepSize)
+    {
+      EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(vol));
+      EXPECT_EQ(0, audio_device_->MicrophoneVolume(&volume));
+      CheckVolume(volume, vol);
+    }
+  }
+#endif
+
+  // reinitialize kDefaultDevice and modify/retrieve the volume
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->MaxMicrophoneVolume(&maxVolume));
+    EXPECT_EQ(0, audio_device_->MinMicrophoneVolume(&minVolume));
+    EXPECT_EQ(0, audio_device_->MicrophoneVolumeStepSize(&stepSize));
+    for (vol = minVolume; vol < maxVolume; vol += 10 * stepSize) {
+      EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(vol));
+      EXPECT_EQ(0, audio_device_->MicrophoneVolume(&volume));
+      CheckVolume(volume, vol);
+    }
+  }
+
+  // use all (indexed) devices and modify/retrieve the volume
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+    if (available) {
+      EXPECT_EQ(0, audio_device_->InitMicrophone());
+      EXPECT_EQ(0, audio_device_->MaxMicrophoneVolume(&maxVolume));
+      EXPECT_EQ(0, audio_device_->MinMicrophoneVolume(&minVolume));
+      EXPECT_EQ(0, audio_device_->MicrophoneVolumeStepSize(&stepSize));
+      for (vol = minVolume; vol < maxVolume; vol += 20 * stepSize) {
+        EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(vol));
+        EXPECT_EQ(0, audio_device_->MicrophoneVolume(&volume));
+        CheckVolume(volume, vol);
+      }
+    }
+  }
+
+  // restore reasonable level
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneVolumeIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->MaxMicrophoneVolume(&maxVolume));
+    EXPECT_EQ(0, audio_device_->SetMicrophoneVolume(maxVolume/10));
+  }
+}
+#endif  // !WEBRTC_MAC
+
+TEST_F(AudioDeviceAPITest, SpeakerMuteIsAvailable) {
+  bool available;
+  CheckInitialPlayoutStates();
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetPlayoutDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->PlayoutDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetPlayoutDevice(i));
+    EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->SpeakerIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneMuteIsAvailable) {
+  bool available;
+  CheckInitialRecordingStates();
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+  // check for availability should not lead to initialization
+#endif
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneBoostIsAvailable) {
+  bool available;
+  CheckInitialRecordingStates();
+#ifdef _WIN32
+  // check the kDefaultCommunicationDevice
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+  // check for availability should not lead to initialization
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+#endif
+
+  // check the kDefaultDevice
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+  EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+
+  // check all availiable devices
+  WebRtc_Word16 no_devices = audio_device_->RecordingDevices();
+  for (int i = 0; i < no_devices; i++) {
+    EXPECT_EQ(0, audio_device_->SetRecordingDevice(i));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+    EXPECT_FALSE(audio_device_->MicrophoneIsInitialized());
+  }
+}
+
+TEST_F(AudioDeviceAPITest, SpeakerMuteTests) {
+  bool available;
+  bool enabled;
+  CheckInitialPlayoutStates();
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetSpeakerMute(true));
+  // requires initialization
+  EXPECT_EQ(-1, audio_device_->SpeakerMute(&enabled));
+
+#ifdef _WIN32
+  // initialize kDefaultCommunicationDevice and modify/retrieve the mute state
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      AudioDeviceModule::kDefaultCommunicationDevice));
+  EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(true));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(false));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+#endif
+
+  // reinitialize kDefaultDevice and modify/retrieve the mute state
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(true));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(false));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+
+  // reinitialize the default device (0) and modify/retrieve the mute state
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(0));
+  EXPECT_EQ(0, audio_device_->SpeakerMuteIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitSpeaker());
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(true));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetSpeakerMute(false));
+    EXPECT_EQ(0, audio_device_->SpeakerMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneMuteTests) {
+  CheckInitialRecordingStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetMicrophoneMute(true));
+  // requires initialization
+  bool available;
+  bool enabled;
+  EXPECT_EQ(-1, audio_device_->MicrophoneMute(&enabled));
+
+#ifdef _WIN32
+  // initialize kDefaultCommunicationDevice and modify/retrieve the mute
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+#endif
+
+  // reinitialize kDefaultDevice and modify/retrieve the mute
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+
+  // reinitialize the default device (0) and modify/retrieve the Mute
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(0));
+  EXPECT_EQ(0, audio_device_->MicrophoneMuteIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneMute(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneMute(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, MicrophoneBoostTests) {
+  bool available;
+  bool enabled;
+  CheckInitialRecordingStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->SetMicrophoneBoost(true));
+  // requires initialization
+  EXPECT_EQ(-1, audio_device_->MicrophoneBoost(&enabled));
+
+#ifdef _WIN32
+  // initialize kDefaultCommunicationDevice and modify/retrieve the boost
+  EXPECT_TRUE(audio_device_->SetRecordingDevice(
+          AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+  EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+  if (available)
+  {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+#endif
+
+  // reinitialize kDefaultDevice and modify/retrieve the boost
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+
+  // reinitialize the default device (0) and modify/retrieve the boost
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(0));
+  EXPECT_EQ(0, audio_device_->MicrophoneBoostIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->InitMicrophone());
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(true));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetMicrophoneBoost(false));
+    EXPECT_EQ(0, audio_device_->MicrophoneBoost(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, StereoPlayoutTests) {
+  CheckInitialPlayoutStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+  // must be performed before initialization
+  EXPECT_EQ(-1, audio_device_->SetStereoPlayout(true));
+#endif
+
+  // ensure that we can set the stereo mode for playout
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+  EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+
+  // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  bool available;
+  bool enabled;
+  EXPECT_EQ(0, audio_device_->StereoPlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(false));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_FALSE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+  }
+
+  // initialize kDefaultDevice and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->StereoPlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(false));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_FALSE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+  }
+
+  // initialize default device (0) and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(0));
+  EXPECT_EQ(0, audio_device_->StereoPlayoutIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(false));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_FALSE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoPlayout(true));
+    EXPECT_EQ(0, audio_device_->StereoPlayout(&enabled));
+    EXPECT_TRUE(enabled);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, StereoRecordingTests) {
+  CheckInitialRecordingStates();
+  EXPECT_FALSE(audio_device_->Playing());
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->InitRecording());
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitRecording());
+  EXPECT_TRUE(audio_device_->RecordingIsInitialized());
+  // must be performed before initialization
+  EXPECT_EQ(-1, audio_device_->SetStereoRecording(true));
+#endif
+  // ensures that we can set the stereo mode for recording
+  EXPECT_EQ(0, audio_device_->StopRecording());
+  EXPECT_FALSE(audio_device_->RecordingIsInitialized());
+
+  // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  bool available;
+  bool enabled;
+  EXPECT_EQ(0, audio_device_->StereoRecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(true));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(false));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+
+  // initialize kDefaultDevice and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->StereoRecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(true));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(false));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+
+  // initialize default device (0) and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(0));
+  EXPECT_EQ(0, audio_device_->StereoRecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(true));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_TRUE(enabled);
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(false));
+    EXPECT_EQ(0, audio_device_->StereoRecording(&enabled));
+    EXPECT_FALSE(enabled);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, RecordingChannelTests) {
+  // the user in Win Core Audio
+  AudioDeviceModule::ChannelType channelType(AudioDeviceModule::kChannelBoth);
+  CheckInitialRecordingStates();
+  EXPECT_FALSE(audio_device_->Playing());
+
+  // fail tests
+  EXPECT_EQ(0, audio_device_->SetStereoRecording(false));
+  EXPECT_EQ(-1, audio_device_->SetRecordingChannel(
+      AudioDeviceModule::kChannelBoth));
+
+  // initialize kDefaultCommunicationDevice and modify/retrieve stereo support
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+  bool available;
+  EXPECT_EQ(0, audio_device_->StereoRecordingIsAvailable(&available));
+  if (available) {
+    EXPECT_EQ(0, audio_device_->SetStereoRecording(true));
+    EXPECT_EQ(0, audio_device_->SetRecordingChannel(
+        AudioDeviceModule::kChannelBoth));
+    EXPECT_EQ(0, audio_device_->RecordingChannel(&channelType));
+    EXPECT_EQ(AudioDeviceModule::kChannelBoth, channelType);
+    EXPECT_EQ(0, audio_device_->SetRecordingChannel(
+        AudioDeviceModule::kChannelLeft));
+    EXPECT_EQ(0, audio_device_->RecordingChannel(&channelType));
+    EXPECT_EQ(AudioDeviceModule::kChannelLeft, channelType);
+    EXPECT_EQ(0, audio_device_->SetRecordingChannel(
+        AudioDeviceModule::kChannelRight));
+    EXPECT_EQ(0, audio_device_->RecordingChannel(&channelType));
+    EXPECT_EQ(AudioDeviceModule::kChannelRight, channelType);
+  }
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutBufferTests) {
+  AudioDeviceModule::BufferType bufferType;
+  WebRtc_UWord16 sizeMS(0);
+
+  CheckInitialPlayoutStates();
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+#if defined(_WIN32) || defined(ANDROID) || defined(MAC_IPHONE)
+  EXPECT_EQ(AudioDeviceModule::kAdaptiveBufferSize, bufferType);
+#else
+  EXPECT_EQ(AudioDeviceModule::kFixedBufferSize, bufferType);
+#endif
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->InitPlayout());
+  // must set device first
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_TRUE(audio_device_->PlayoutIsInitialized());
+#endif
+  EXPECT_TRUE(audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kAdaptiveBufferSize, 100) == -1);
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+  EXPECT_TRUE(audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kFixedBufferSize, kAdmMinPlayoutBufferSizeMs-1) == -1);
+  EXPECT_TRUE(audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kFixedBufferSize, kAdmMaxPlayoutBufferSizeMs+1) == -1);
+
+  // bulk tests (all should be successful)
+  EXPECT_FALSE(audio_device_->PlayoutIsInitialized());
+#ifdef _WIN32
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kAdaptiveBufferSize, 0));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+  EXPECT_EQ(AudioDeviceModule::kAdaptiveBufferSize, bufferType);
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kAdaptiveBufferSize, 10000));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+  EXPECT_EQ(AudioDeviceModule::kAdaptiveBufferSize, bufferType);
+#endif
+#if defined(ANDROID) || defined(MAC_IPHONE)
+  EXPECT_EQ(-1,
+            audio_device_->SetPlayoutBuffer(AudioDeviceModule::kFixedBufferSize,
+                                          kAdmMinPlayoutBufferSizeMs));
+#else
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kFixedBufferSize, kAdmMinPlayoutBufferSizeMs));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+  EXPECT_EQ(AudioDeviceModule::kFixedBufferSize, bufferType);
+  EXPECT_EQ(kAdmMinPlayoutBufferSizeMs, sizeMS);
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kFixedBufferSize, kAdmMaxPlayoutBufferSizeMs));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+  EXPECT_EQ(AudioDeviceModule::kFixedBufferSize, bufferType);
+  EXPECT_EQ(kAdmMaxPlayoutBufferSizeMs, sizeMS);
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kFixedBufferSize, 100));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+  EXPECT_EQ(AudioDeviceModule::kFixedBufferSize, bufferType);
+  EXPECT_EQ(100, sizeMS);
+#endif
+
+#ifdef _WIN32
+  // restore default
+  EXPECT_EQ(0, audio_device_->SetPlayoutBuffer(
+      AudioDeviceModule::kAdaptiveBufferSize, 0));
+  EXPECT_EQ(0, audio_device_->PlayoutBuffer(&bufferType, &sizeMS));
+#endif
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutDelay) {
+  // NOTE: this API is better tested in a functional test
+  WebRtc_UWord16 sizeMS(0);
+  CheckInitialPlayoutStates();
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->PlayoutDelay(&sizeMS));
+  EXPECT_EQ(0, audio_device_->PlayoutDelay(&sizeMS));
+}
+
+TEST_F(AudioDeviceAPITest, RecordingDelay) {
+  // NOTE: this API is better tested in a functional test
+  WebRtc_UWord16 sizeMS(0);
+  CheckInitialRecordingStates();
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->RecordingDelay(&sizeMS));
+  EXPECT_EQ(0, audio_device_->RecordingDelay(&sizeMS));
+}
+
+TEST_F(AudioDeviceAPITest, CPULoad) {
+  // NOTE: this API is better tested in a functional test
+  WebRtc_UWord16 load(0);
+
+  // bulk tests
+#ifdef _WIN32
+  EXPECT_EQ(0, audio_device_->CPULoad(&load));
+  EXPECT_EQ(0, load);
+#else
+  EXPECT_EQ(-1, audio_device_->CPULoad(&load));
+#endif
+}
+
+// TODO(kjellander): Fix flakiness causing failures on Windows.
+#if !defined(_WIN32)
+TEST_F(AudioDeviceAPITest, StartAndStopRawOutputFileRecording) {
+  // NOTE: this API is better tested in a functional test
+  CheckInitialPlayoutStates();
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->StartRawOutputFileRecording(NULL));
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->StartRawOutputFileRecording(
+      GetFilename("raw_output_not_playing.pcm")));
+  EXPECT_EQ(0, audio_device_->StopRawOutputFileRecording());
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(
+      MACRO_DEFAULT_COMMUNICATION_DEVICE));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->StartPlayout());
+#endif
+
+  EXPECT_EQ(0, audio_device_->StartRawOutputFileRecording(
+      GetFilename("raw_output_playing.pcm")));
+  SleepMs(100);
+  EXPECT_EQ(0, audio_device_->StopRawOutputFileRecording());
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+  EXPECT_EQ(0, audio_device_->StartRawOutputFileRecording(
+      GetFilename("raw_output_not_playing.pcm")));
+  EXPECT_EQ(0, audio_device_->StopRawOutputFileRecording());
+
+  // results after this test:
+  //
+  // - size of raw_output_not_playing.pcm shall be 0
+  // - size of raw_output_playing.pcm shall be > 0
+}
+
+// TODO(phoglund): The following test is flaky on Linux.
+#if !defined(WEBRTC_LINUX)
+TEST_F(AudioDeviceAPITest, StartAndStopRawInputFileRecording) {
+  // NOTE: this API is better tested in a functional test
+  CheckInitialRecordingStates();
+  EXPECT_FALSE(audio_device_->Playing());
+
+  // fail tests
+  EXPECT_EQ(-1, audio_device_->StartRawInputFileRecording(NULL));
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->StartRawInputFileRecording(
+      GetFilename("raw_input_not_recording.pcm")));
+  EXPECT_EQ(0, audio_device_->StopRawInputFileRecording());
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitRecording());
+  EXPECT_EQ(0, audio_device_->StartRecording());
+#endif
+  EXPECT_EQ(0, audio_device_->StartRawInputFileRecording(
+      GetFilename("raw_input_recording.pcm")));
+  SleepMs(100);
+  EXPECT_EQ(0, audio_device_->StopRawInputFileRecording());
+  EXPECT_EQ(0, audio_device_->StopRecording());
+  EXPECT_EQ(0, audio_device_->StartRawInputFileRecording(
+      GetFilename("raw_input_not_recording.pcm")));
+  EXPECT_EQ(0, audio_device_->StopRawInputFileRecording());
+
+  // results after this test:
+  //
+  // - size of raw_input_not_recording.pcm shall be 0
+  // - size of raw_input_not_recording.pcm shall be > 0
+}
+#endif  // !WEBRTC_LINUX
+#endif  // !WIN32
+
+TEST_F(AudioDeviceAPITest, RecordingSampleRate) {
+  WebRtc_UWord32 sampleRate(0);
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->RecordingSampleRate(&sampleRate));
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+  EXPECT_EQ(48000, sampleRate);
+#elif defined(ANDROID)
+  TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
+  EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000));
+#elif defined(MAC_IPHONE)
+  TEST_LOG("Recording sample rate is %u\n\n", sampleRate);
+  EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) ||
+              (sampleRate == 8000));
+#endif
+
+  // @TODO(xians) - add tests for all platforms here...
+}
+
+TEST_F(AudioDeviceAPITest, PlayoutSampleRate) {
+  WebRtc_UWord32 sampleRate(0);
+
+  // bulk tests
+  EXPECT_EQ(0, audio_device_->PlayoutSampleRate(&sampleRate));
+#if defined(_WIN32) && !defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
+  EXPECT_EQ(48000, sampleRate);
+#elif defined(ANDROID)
+  TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
+  EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000));
+#elif defined(MAC_IPHONE)
+  TEST_LOG("Playout sample rate is %u\n\n", sampleRate);
+  EXPECT_TRUE((sampleRate == 44000) || (sampleRate == 16000) ||
+              (sampleRate == 8000));
+#endif
+}
+
+TEST_F(AudioDeviceAPITest, ResetAudioDevice) {
+  CheckInitialPlayoutStates();
+  CheckInitialRecordingStates();
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+  EXPECT_EQ(0, audio_device_->SetRecordingDevice(MACRO_DEFAULT_DEVICE));
+
+#if defined(MAC_IPHONE)
+  // Not playing or recording, should just return 0
+  EXPECT_EQ(0, audio_device_->ResetAudioDevice());
+
+  EXPECT_EQ(0, audio_device_->InitRecording());
+  EXPECT_EQ(0, audio_device_->StartRecording());
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->StartPlayout());
+  for (int l=0; l<20; ++l)
+  {
+    TEST_LOG("Resetting sound device several time with pause %d ms\n", l);
+    EXPECT_EQ(0, audio_device_->ResetAudioDevice());
+    SleepMs(l);
+  }
+#else
+  // Fail tests
+  EXPECT_EQ(-1, audio_device_->ResetAudioDevice());
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitRecording());
+  EXPECT_EQ(0, audio_device_->StartRecording());
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->StartPlayout());
+#endif
+  EXPECT_EQ(-1, audio_device_->ResetAudioDevice());
+#endif
+  EXPECT_EQ(0, audio_device_->StopRecording());
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+}
+
+TEST_F(AudioDeviceAPITest, SetPlayoutSpeaker) {
+  CheckInitialPlayoutStates();
+  EXPECT_EQ(0, audio_device_->SetPlayoutDevice(MACRO_DEFAULT_DEVICE));
+
+  bool loudspeakerOn(false);
+#if defined(MAC_IPHONE)
+  // Not playing or recording, should just return a success
+  EXPECT_EQ(0, audio_device_->SetLoudspeakerStatus(true));
+  EXPECT_EQ(0, audio_device_->GetLoudspeakerStatus(loudspeakerOn));
+  EXPECT_TRUE(loudspeakerOn);
+  EXPECT_EQ(0, audio_device_->SetLoudspeakerStatus(false));
+  EXPECT_EQ(0, audio_device_->GetLoudspeakerStatus(loudspeakerOn));
+  EXPECT_FALSE(loudspeakerOn);
+
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->StartPlayout());
+  EXPECT_EQ(0, audio_device_->SetLoudspeakerStatus(true));
+  EXPECT_EQ(0, audio_device_->GetLoudspeakerStatus(loudspeakerOn));
+  EXPECT_TRUE(loudspeakerOn);
+  EXPECT_EQ(0, audio_device_->SetLoudspeakerStatus(false));
+  EXPECT_EQ(0, audio_device_->GetLoudspeakerStatus(loudspeakerOn));
+  EXPECT_FALSE(loudspeakerOn);
+
+#else
+  // Fail tests
+  EXPECT_EQ(-1, audio_device_->SetLoudspeakerStatus(true));
+  EXPECT_EQ(-1, audio_device_->SetLoudspeakerStatus(false));
+  EXPECT_EQ(-1, audio_device_->SetLoudspeakerStatus(true));
+  EXPECT_EQ(-1, audio_device_->SetLoudspeakerStatus(false));
+
+  // TODO(kjellander): Fix so these tests pass on Mac.
+#if !defined(WEBRTC_MAC)
+  EXPECT_EQ(0, audio_device_->InitPlayout());
+  EXPECT_EQ(0, audio_device_->StartPlayout());
+#endif
+
+  EXPECT_EQ(-1, audio_device_->GetLoudspeakerStatus(&loudspeakerOn));
+#endif
+  EXPECT_EQ(0, audio_device_->StopPlayout());
+}
diff --git a/src/modules/audio_device/main/test/audio_device_test_defines.h b/src/modules/audio_device/main/test/audio_device_test_defines.h
new file mode 100644
index 0000000..4ac2a41
--- /dev/null
+++ b/src/modules/audio_device/main/test/audio_device_test_defines.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+#define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+
+#include "audio_device.h"
+#include "common_types.h"
+#include "process_thread.h"
+#include "trace.h"
+
+#ifdef _WIN32
+#define MACRO_DEFAULT_DEVICE AudioDeviceModule::kDefaultDevice
+#define MACRO_DEFAULT_COMMUNICATION_DEVICE AudioDeviceModule::kDefaultCommunicationDevice
+#else
+#define MACRO_DEFAULT_DEVICE 0
+#define MACRO_DEFAULT_COMMUNICATION_DEVICE 0
+#endif
+
+#ifdef ANDROID
+#include <android/log.h>
+#define LOG_TAG "WebRtc ADM TEST"
+#define TEST_LOG(...) __android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)
+#define TEST_LOG_ERROR(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
+#else
+#define TEST_LOG printf
+#define TEST_LOG_ERROR(...) fprintf(stderr, __VA_ARGS__)
+#endif
+
+static int warningCount = 0;
+
+#define RESET_TEST                                              \
+    do {                                                        \
+        warningCount = 0;                                       \
+    } while(0)                                                  \
+
+#define PRINT_ERR_MSG(msg)                                      \
+    do {                                                        \
+        TEST_LOG_ERROR("Error at line %i of %s\n%s",            \
+            __LINE__, __FILE__, msg);                           \
+    } while(0)
+
+#define WARNING(expr)                                           \
+    do {                                                        \
+        if (!(expr)) {                                          \
+            TEST_LOG_ERROR("WARNING #%d: at line %i\n\n",       \
+                           warningCount+1, __LINE__);           \
+            warningCount++;                                     \
+        }                                                       \
+    } while(0)
+
+#define PRINT_TEST_RESULTS                                      \
+    do {                                                        \
+        if (warningCount > 0)                                   \
+        {                                                       \
+            TEST_LOG(">> %d warnings <<\n\n",                   \
+                     warningCount);                             \
+        }                                                       \
+    } while(0)
+
+// Helper functions
+// For iPhone, they are defined in iPhone specific test code.
+// For Android, they are defined in API test only (since both
+//   API and Func tests are built into the same lib).
+// For other, they are defined in both API test and Func test.
+const char* GetFilename(const char* filename);
+const char* GetResource(const char* resource);
+
+#endif  // WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_TEST_DEFINES_H
+
diff --git a/src/modules/audio_device/main/test/audio_device_test_func.cc b/src/modules/audio_device/main/test/audio_device_test_func.cc
new file mode 100644
index 0000000..c549eea
--- /dev/null
+++ b/src/modules/audio_device/main/test/audio_device_test_func.cc
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include "audio_device_test_defines.h"
+#include "func_test_manager.h"
+
+#ifndef __GNUC__
+// Disable warning message 4996 ('scanf': This function or variable may be unsafe)
+#pragma warning( disable : 4996 )
+#endif
+
+using namespace webrtc;
+
+int func_test(int);
+
+// ----------------------------------------------------------------------------
+//  main()
+// ----------------------------------------------------------------------------
+
+#if !defined(MAC_IPHONE)
+int main(int /*argc*/, char* /*argv*/[])
+{
+    func_test(0);
+}
+#endif
+
+// ----------------------------------------------------------------------------
+//  func_test()
+// ----------------------------------------------------------------------------
+
+int func_test(int sel)
+{
+    TEST_LOG("=========================================\n");
+    TEST_LOG("Func Test of the WebRtcAudioDevice Module\n");
+    TEST_LOG("=========================================\n\n");
+
+    // Initialize the counters here to get rid of "unused variables" warnings.
+    warningCount = 0;
+
+    FuncTestManager funcMgr;
+
+    funcMgr.Init();
+
+    bool quit(false);
+
+    while (!quit)
+    {
+        TEST_LOG("---------------------------------------\n");
+        TEST_LOG("Select type of test\n\n");
+        TEST_LOG("  (0) Quit\n");
+        TEST_LOG("  (1) All\n");
+        TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+        TEST_LOG("  (2) Audio-layer selection\n");
+        TEST_LOG("  (3) Device enumeration\n");
+        TEST_LOG("  (4) Device selection\n");
+        TEST_LOG("  (5) Audio transport\n");
+        TEST_LOG("  (6) Speaker volume\n");
+        TEST_LOG("  (7) Microphone volume\n");
+        TEST_LOG("  (8) Speaker mute\n");
+        TEST_LOG("  (9) Microphone mute\n");
+        TEST_LOG(" (10) Microphone boost\n");
+        TEST_LOG(" (11) Microphone AGC\n");
+        TEST_LOG(" (12) Loopback measurements\n");
+        TEST_LOG(" (13) Device removal\n");
+        TEST_LOG(" (14) Advanced mobile device API\n");
+        TEST_LOG(" (66) XTEST\n");
+        TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+        TEST_LOG("\n: ");
+
+        int selection(0);
+        enum TestType testType(TTInvalid);
+
+SHOW_MENU:
+
+        if (sel > 0)
+        {
+            selection = sel;
+        }
+        else
+        {
+            if (scanf("%d", &selection) < 0) {
+              perror("Failed to get selection.");
+            }
+        }
+
+        switch (selection)
+        {
+            case 0:
+                quit = true;
+                break;
+            case 1:
+                testType = TTAll;
+                break;
+            case 2:
+                testType = TTAudioLayerSelection;
+                break;
+            case 3:
+                testType = TTDeviceEnumeration;
+                break;
+            case 4:
+                testType = TTDeviceSelection;
+                break;
+            case 5:
+                testType = TTAudioTransport;
+                break;
+            case 6:
+                testType = TTSpeakerVolume;
+                break;
+            case 7:
+                testType = TTMicrophoneVolume;
+                break;
+            case 8:
+                testType = TTSpeakerMute;
+                break;
+            case 9:
+                testType = TTMicrophoneMute;
+                break;
+            case 10:
+                testType = TTMicrophoneBoost;
+                break;
+            case 11:
+                testType = TTMicrophoneAGC;
+                break;
+            case 12:
+                testType = TTLoopback;
+                break;
+            case 13:
+                testType = TTDeviceRemoval;
+                break;
+            case 14:
+                testType = TTMobileAPI;
+                break;
+            case 66:
+                testType = TTTest;
+                break;
+            default:
+                testType = TTInvalid;
+                TEST_LOG(": ");
+                goto SHOW_MENU;
+                break;
+           }
+
+        funcMgr.DoTest(testType);
+
+        if (sel > 0)
+        {
+            quit = true;
+        }
+    }
+
+    funcMgr.Close();
+
+    return 0;
+}
diff --git a/src/modules/audio_device/main/test/func_test_manager.cc b/src/modules/audio_device/main/test/func_test_manager.cc
new file mode 100644
index 0000000..9cf87bf
--- /dev/null
+++ b/src/modules/audio_device/main/test/func_test_manager.cc
@@ -0,0 +1,2734 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <ctype.h>
+#include <cassert>
+#include <string.h>
+
+#include "func_test_manager.h"
+#include "gtest/gtest.h"
+#include "system_wrappers/interface/sleep.h"
+#include "testsupport/fileutils.h"
+
+#include "../source/audio_device_config.h"
+#include "../source/audio_device_impl.h"
+
+#ifndef __GNUC__
+// Disable warning message ('sprintf': name was marked as #pragma deprecated)
+#pragma warning( disable : 4995 )
+// Disable warning message 4996 ('scanf': This function or variable may be unsafe)
+#pragma warning( disable : 4996 )
+#endif
+
+const char* RecordedMicrophoneFile = "recorded_microphone_mono_48.pcm";
+const char* RecordedMicrophoneVolumeFile =
+"recorded_microphone_volume_mono_48.pcm";
+const char* RecordedMicrophoneMuteFile = "recorded_microphone_mute_mono_48.pcm";
+const char* RecordedMicrophoneBoostFile =
+"recorded_microphone_boost_mono_48.pcm";
+const char* RecordedMicrophoneAGCFile = "recorded_microphone_AGC_mono_48.pcm";
+const char* RecordedSpeakerFile = "recorded_speaker_48.pcm";
+
+struct AudioPacket
+{
+    WebRtc_UWord8 dataBuffer[4 * 960];
+    WebRtc_UWord16 nSamples;
+    WebRtc_UWord16 nBytesPerSample;
+    WebRtc_UWord8 nChannels;
+    WebRtc_UWord32 samplesPerSec;
+};
+
+// Helper functions
+#if !defined(MAC_IPHONE)
+char* GetFilename(char* filename)
+{
+    return filename;
+}
+const char* GetFilename(const char* filename)
+{
+    return filename;
+}
+char* GetResource(char* resource)
+{
+    return resource;
+}
+const char* GetResource(const char* resource)
+{
+    return resource;
+}
+#endif
+
+namespace webrtc
+{
+
+AudioEventObserver::AudioEventObserver(AudioDeviceModule* audioDevice)
+{
+}
+
+AudioEventObserver::~AudioEventObserver()
+{
+}
+
+void AudioEventObserver::OnErrorIsReported(const ErrorCode error)
+{
+    TEST_LOG("\n[*** ERROR ***] => OnErrorIsReported(%d)\n \n", error);
+    _error = error;
+}
+
+
+void AudioEventObserver::OnWarningIsReported(const WarningCode warning)
+{
+    TEST_LOG("\n[*** WARNING ***] => OnWarningIsReported(%d)\n \n", warning);
+    _warning = warning;
+}
+
+AudioTransportImpl::AudioTransportImpl(AudioDeviceModule* audioDevice) :
+    _audioDevice(audioDevice),
+    _playFromFile(false),
+    _fullDuplex(false),
+    _speakerVolume(false),
+    _speakerMute(false),
+    _microphoneVolume(false),
+    _microphoneMute(false),
+    _microphoneBoost(false),
+    _microphoneAGC(false),
+    _loopBackMeasurements(false),
+    _playFile(*FileWrapper::Create()),
+    _recCount(0),
+    _playCount(0),
+    _audioList()
+{
+    _resampler.Reset(48000, 48000, kResamplerSynchronousStereo);
+}
+
+AudioTransportImpl::~AudioTransportImpl()
+{
+    _playFile.Flush();
+    _playFile.CloseFile();
+    delete &_playFile;
+
+    while (!_audioList.Empty())
+    {
+        ListItem* item = _audioList.First();
+        if (item)
+        {
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                delete packet;
+            }
+        }
+        _audioList.PopFront();
+    }
+}
+
+// ----------------------------------------------------------------------------
+//	AudioTransportImpl::SetFilePlayout
+// ----------------------------------------------------------------------------
+
+WebRtc_Word32 AudioTransportImpl::SetFilePlayout(bool enable,
+                                                 const char* fileName)
+{
+    _playFromFile = enable;
+    if (enable)
+    {
+        return (_playFile.OpenFile(fileName, true, true, false));
+    } else
+    {
+        _playFile.Flush();
+        return (_playFile.CloseFile());
+    }
+}
+;
+
+void AudioTransportImpl::SetFullDuplex(bool enable)
+{
+    _fullDuplex = enable;
+
+    while (!_audioList.Empty())
+    {
+        ListItem* item = _audioList.First();
+        if (item)
+        {
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                delete packet;
+            }
+        }
+        _audioList.PopFront();
+    }
+}
+
+WebRtc_Word32 AudioTransportImpl::RecordedDataIsAvailable(
+    const void* audioSamples,
+    const WebRtc_UWord32 nSamples,
+    const WebRtc_UWord8 nBytesPerSample,
+    const WebRtc_UWord8 nChannels,
+    const WebRtc_UWord32 samplesPerSec,
+    const WebRtc_UWord32 totalDelayMS,
+    const WebRtc_Word32 clockDrift,
+    const WebRtc_UWord32 currentMicLevel,
+    WebRtc_UWord32& newMicLevel)
+{
+    if (_fullDuplex && _audioList.GetSize() < 15)
+    {
+        AudioPacket* packet = new AudioPacket();
+        memcpy(packet->dataBuffer, audioSamples, nSamples * nBytesPerSample);
+        packet->nSamples = (WebRtc_UWord16) nSamples;
+        packet->nBytesPerSample = nBytesPerSample;
+        packet->nChannels = nChannels;
+        packet->samplesPerSec = samplesPerSec;
+        _audioList.PushBack(packet);
+    }
+
+    _recCount++;
+    if (_recCount % 100 == 0)
+    {
+        bool addMarker(true);
+
+        if (_loopBackMeasurements)
+        {
+            addMarker = false;
+        }
+
+        if (_microphoneVolume)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord32 volume(0);
+            WebRtc_UWord16 stepSize(0);
+            EXPECT_EQ(0, _audioDevice->MaxMicrophoneVolume(&maxVolume));
+            EXPECT_EQ(0, _audioDevice->MinMicrophoneVolume(&minVolume));
+            EXPECT_EQ(0, _audioDevice->MicrophoneVolumeStepSize(&stepSize));
+            EXPECT_EQ(0, _audioDevice->MicrophoneVolume(&volume));
+            if (volume == 0)
+            {
+                TEST_LOG("[0]");
+                addMarker = false;
+            }
+            int stepScale = (int) ((maxVolume - minVolume) / (stepSize * 10));
+            volume += (stepScale * stepSize);
+            if (volume > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                volume = 0;
+                addMarker = false;
+            }
+            EXPECT_EQ(0, _audioDevice->SetMicrophoneVolume(volume));
+        }
+
+        if (_microphoneAGC)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord16 stepSize(0);
+            EXPECT_EQ(0, _audioDevice->MaxMicrophoneVolume(&maxVolume));
+            EXPECT_EQ(0, _audioDevice->MinMicrophoneVolume(&minVolume));
+            EXPECT_EQ(0, _audioDevice->MicrophoneVolumeStepSize(&stepSize));
+            // emulate real AGC (min->max->min->max etc.)
+            if (currentMicLevel <= 1)
+            {
+                TEST_LOG("[MIN]");
+                addMarker = false;
+            }
+            int stepScale = (int) ((maxVolume - minVolume) / (stepSize * 10));
+            newMicLevel = currentMicLevel + (stepScale * stepSize);
+            if (newMicLevel > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                newMicLevel = 1; // set lowest (non-zero) AGC level
+                addMarker = false;
+            }
+        }
+
+        if (_microphoneMute && (_recCount % 500 == 0))
+        {
+            bool muted(false);
+            EXPECT_EQ(0, _audioDevice->MicrophoneMute(&muted));
+            muted = !muted;
+            EXPECT_EQ(0, _audioDevice->SetMicrophoneMute(muted));
+            if (muted)
+            {
+                TEST_LOG("[MUTE ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[MUTE OFF]");
+                addMarker = false;
+            }
+        }
+
+        if (_microphoneBoost && (_recCount % 500 == 0))
+        {
+            bool boosted(false);
+            EXPECT_EQ(0, _audioDevice->MicrophoneBoost(&boosted));
+            boosted = !boosted;
+            EXPECT_EQ(0, _audioDevice->SetMicrophoneBoost(boosted));
+            if (boosted)
+            {
+                TEST_LOG("[BOOST ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[BOOST OFF]");
+                addMarker = false;
+            }
+        }
+
+        if ((nChannels == 1) && addMarker)
+        {
+            // mono
+            TEST_LOG("-");
+        } else if ((nChannels == 2) && (nBytesPerSample == 2) && addMarker)
+        {
+            AudioDeviceModule::ChannelType
+                chType(AudioDeviceModule::kChannelLeft);
+            EXPECT_EQ(0, _audioDevice->RecordingChannel(&chType));
+            if (chType == AudioDeviceModule::kChannelLeft)
+                TEST_LOG("-|");
+            else
+                TEST_LOG("|-");
+        } else if (addMarker)
+        {
+            // stereo
+            TEST_LOG("--");
+        }
+
+        if (nChannels == 2 && nBytesPerSample == 2)
+        {
+            // TEST_LOG("=> emulated mono (one channel exctracted from stereo input)\n");
+        }
+    }
+
+    return 0;
+}
+
+
+WebRtc_Word32 AudioTransportImpl::NeedMorePlayData(
+    const WebRtc_UWord32 nSamples,
+    const WebRtc_UWord8 nBytesPerSample,
+    const WebRtc_UWord8 nChannels,
+    const WebRtc_UWord32 samplesPerSec,
+    void* audioSamples,
+    WebRtc_UWord32& nSamplesOut)
+{
+    if (_fullDuplex)
+    {
+        if (_audioList.Empty())
+        {
+            // use zero stuffing when not enough data
+            memset(audioSamples, 0, nBytesPerSample * nSamples);
+        } else
+        {
+            ListItem* item = _audioList.First();
+            AudioPacket* packet = static_cast<AudioPacket*> (item->GetItem());
+            if (packet)
+            {
+                int ret(0);
+                int lenOut(0);
+                WebRtc_Word16 tmpBuf_96kHz[80 * 12];
+                WebRtc_Word16* ptr16In = NULL;
+                WebRtc_Word16* ptr16Out = NULL;
+
+                const WebRtc_UWord16 nSamplesIn = packet->nSamples;
+                const WebRtc_UWord8 nChannelsIn = packet->nChannels;
+                const WebRtc_UWord32 samplesPerSecIn = packet->samplesPerSec;
+                const WebRtc_UWord16 nBytesPerSampleIn =
+                    packet->nBytesPerSample;
+
+                WebRtc_Word32 fsInHz(samplesPerSecIn);
+                WebRtc_Word32 fsOutHz(samplesPerSec);
+
+                if (fsInHz == 44100)
+                    fsInHz = 44000;
+
+                if (fsOutHz == 44100)
+                    fsOutHz = 44000;
+
+                if (nChannelsIn == 2 && nBytesPerSampleIn == 4)
+                {
+                    // input is stereo => we will resample in stereo
+                    ret = _resampler.ResetIfNeeded(fsInHz, fsOutHz,
+                                                   kResamplerSynchronousStereo);
+                    if (ret == 0)
+                    {
+                        if (nChannels == 2)
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                2 * nSamplesIn,
+                                (WebRtc_Word16*) audioSamples, 2
+                                * nSamples, lenOut);
+                        } else
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                2 * nSamplesIn, tmpBuf_96kHz, 2
+                                * nSamples, lenOut);
+
+                            ptr16In = &tmpBuf_96kHz[0];
+                            ptr16Out = (WebRtc_Word16*) audioSamples;
+
+                            // do stereo -> mono
+                            for (unsigned int i = 0; i < nSamples; i++)
+                            {
+                                *ptr16Out = *ptr16In; // use left channel
+                                ptr16Out++;
+                                ptr16In++;
+                                ptr16In++;
+                            }
+                        }
+                        assert(2*nSamples == (WebRtc_UWord32)lenOut);
+                    } else
+                    {
+                        if (_playCount % 100 == 0)
+                            TEST_LOG(
+                                     "ERROR: unable to resample from %d to %d\n",
+                                     samplesPerSecIn, samplesPerSec);
+                    }
+                } else
+                {
+                    // input is mono (can be "reduced from stereo" as well) =>
+                    // we will resample in mono
+                    ret = _resampler.ResetIfNeeded(fsInHz, fsOutHz,
+                                                   kResamplerSynchronous);
+                    if (ret == 0)
+                    {
+                        if (nChannels == 1)
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                nSamplesIn,
+                                (WebRtc_Word16*) audioSamples,
+                                nSamples, lenOut);
+                        } else
+                        {
+                            _resampler.Push(
+                                (const WebRtc_Word16*) packet->dataBuffer,
+                                nSamplesIn, tmpBuf_96kHz, nSamples,
+                                lenOut);
+
+                            ptr16In = &tmpBuf_96kHz[0];
+                            ptr16Out = (WebRtc_Word16*) audioSamples;
+
+                            // do mono -> stereo
+                            for (unsigned int i = 0; i < nSamples; i++)
+                            {
+                                *ptr16Out = *ptr16In; // left
+                                ptr16Out++;
+                                *ptr16Out = *ptr16In; // right (same as left sample)
+                                ptr16Out++;
+                                ptr16In++;
+                            }
+                        }
+                        assert(nSamples == (WebRtc_UWord32)lenOut);
+                    } else
+                    {
+                        if (_playCount % 100 == 0)
+                            TEST_LOG("ERROR: unable to resample from %d to %d\n",
+                                     samplesPerSecIn, samplesPerSec);
+                    }
+                }
+                nSamplesOut = nSamples;
+                delete packet;
+            }
+            _audioList.PopFront();
+        }
+    } // if (_fullDuplex)
+
+    if (_playFromFile && _playFile.Open())
+    {
+        WebRtc_Word16 fileBuf[480];
+
+        // read mono-file
+        WebRtc_Word32 len = _playFile.Read((WebRtc_Word8*) fileBuf, 2
+            * nSamples);
+        if (len != 2 * (WebRtc_Word32) nSamples)
+        {
+            _playFile.Rewind();
+            _playFile.Read((WebRtc_Word8*) fileBuf, 2 * nSamples);
+        }
+
+        // convert to stero if required
+        if (nChannels == 1)
+        {
+            memcpy(audioSamples, fileBuf, 2 * nSamples);
+        } else
+        {
+            // mono sample from file is duplicated and sent to left and right
+            // channels
+            WebRtc_Word16* audio16 = (WebRtc_Word16*) audioSamples;
+            for (unsigned int i = 0; i < nSamples; i++)
+            {
+                (*audio16) = fileBuf[i]; // left
+                audio16++;
+                (*audio16) = fileBuf[i]; // right
+                audio16++;
+            }
+        }
+    } // if (_playFromFile && _playFile.Open())
+
+    _playCount++;
+
+    if (_playCount % 100 == 0)
+    {
+        bool addMarker(true);
+
+        if (_speakerVolume)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            WebRtc_UWord32 minVolume(0);
+            WebRtc_UWord32 volume(0);
+            WebRtc_UWord16 stepSize(0);
+            EXPECT_EQ(0, _audioDevice->MaxSpeakerVolume(&maxVolume));
+            EXPECT_EQ(0, _audioDevice->MinSpeakerVolume(&minVolume));
+            EXPECT_EQ(0, _audioDevice->SpeakerVolumeStepSize(&stepSize));
+            EXPECT_EQ(0, _audioDevice->SpeakerVolume(&volume));
+            if (volume == 0)
+            {
+                TEST_LOG("[0]");
+                addMarker = false;
+            }
+            WebRtc_UWord32 step = (maxVolume - minVolume) / 10;
+            step = (step < stepSize ? stepSize : step);
+            volume += step;
+            if (volume > maxVolume)
+            {
+                TEST_LOG("[MAX]");
+                volume = 0;
+                addMarker = false;
+            }
+            EXPECT_EQ(0, _audioDevice->SetSpeakerVolume(volume));
+        }
+
+        if (_speakerMute && (_playCount % 500 == 0))
+        {
+            bool muted(false);
+            EXPECT_EQ(0, _audioDevice->SpeakerMute(&muted));
+            muted = !muted;
+            EXPECT_EQ(0, _audioDevice->SetSpeakerMute(muted));
+            if (muted)
+            {
+                TEST_LOG("[MUTE ON]");
+                addMarker = false;
+            } else
+            {
+                TEST_LOG("[MUTE OFF]");
+                addMarker = false;
+            }
+        }
+
+        if (_loopBackMeasurements)
+        {
+            WebRtc_UWord16 recDelayMS(0);
+            WebRtc_UWord16 playDelayMS(0);
+            WebRtc_UWord32 nItemsInList(0);
+
+            nItemsInList = _audioList.GetSize();
+            EXPECT_EQ(0, _audioDevice->RecordingDelay(&recDelayMS));
+            EXPECT_EQ(0, _audioDevice->PlayoutDelay(&playDelayMS));
+            TEST_LOG("Delay (rec+play)+buf: %3u (%3u+%3u)+%3u [ms]\n",
+                     recDelayMS + playDelayMS + 10 * (nItemsInList + 1),
+                     recDelayMS, playDelayMS, 10 * (nItemsInList + 1));
+
+            addMarker = false;
+        }
+
+        if ((nChannels == 1) && addMarker)
+        {
+            TEST_LOG("+");
+        } else if ((nChannels == 2) && addMarker)
+        {
+            TEST_LOG("++");
+        }
+    } // if (_playCount % 100 == 0)
+
+    nSamplesOut = nSamples;
+
+    return 0;
+}
+
+FuncTestManager::FuncTestManager() :
+    _processThread(NULL),
+    _audioDevice(NULL),
+    _audioEventObserver(NULL),
+    _audioTransport(NULL)
+{
+  _playoutFile48 = webrtc::test::ResourcePath("audio_device\\audio_short48",
+                                              "pcm");
+  _playoutFile44 = webrtc::test::ResourcePath("audio_device\\audio_short44",
+                                              "pcm");
+  _playoutFile16 = webrtc::test::ResourcePath("audio_device\\audio_short16",
+                                              "pcm");
+  _playoutFile8 = webrtc::test::ResourcePath("audio_device\\audio_short8",
+                                             "pcm");
+}
+
+FuncTestManager::~FuncTestManager()
+{
+}
+
+WebRtc_Word32 FuncTestManager::Init()
+{
+    EXPECT_TRUE((_processThread = ProcessThread::CreateProcessThread()) != NULL);
+    if (_processThread == NULL)
+    {
+        return -1;
+    }
+    _processThread->Start();
+
+    // create the Audio Device module
+    EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+        555, ADM_AUDIO_LAYER)) != NULL);
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+    EXPECT_EQ(1, _audioDevice->AddRef());
+
+    // register the Audio Device module
+    _processThread->RegisterModule(_audioDevice);
+
+    // register event observer
+    _audioEventObserver = new AudioEventObserver(_audioDevice);
+    EXPECT_EQ(0, _audioDevice->RegisterEventObserver(_audioEventObserver));
+
+    // register audio transport
+    _audioTransport = new AudioTransportImpl(_audioDevice);
+    EXPECT_EQ(0, _audioDevice->RegisterAudioCallback(_audioTransport));
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::Close()
+{
+    EXPECT_EQ(0, _audioDevice->RegisterEventObserver(NULL));
+    EXPECT_EQ(0, _audioDevice->RegisterAudioCallback(NULL));
+    EXPECT_EQ(0, _audioDevice->Terminate());
+
+    // release the ProcessThread object
+    if (_processThread)
+    {
+        _processThread->DeRegisterModule(_audioDevice);
+        _processThread->Stop();
+        ProcessThread::DestroyProcessThread(_processThread);
+    }
+
+    // delete the audio observer
+    if (_audioEventObserver)
+    {
+        delete _audioEventObserver;
+        _audioEventObserver = NULL;
+    }
+
+    // delete the audio transport
+    if (_audioTransport)
+    {
+        delete _audioTransport;
+        _audioTransport = NULL;
+    }
+
+    // release the AudioDeviceModule object
+    if (_audioDevice)
+    {
+        EXPECT_EQ(0, _audioDevice->Release());
+        _audioDevice = NULL;
+    }
+
+    // return the ThreadWrapper (singleton)
+    Trace::ReturnTrace();
+
+    // PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::DoTest(const TestType testType)
+{
+    switch (testType)
+    {
+        case TTAll:
+            TestAudioLayerSelection();
+            TestDeviceEnumeration();
+            TestDeviceSelection();
+            TestAudioTransport();
+            TestSpeakerVolume();
+            TestMicrophoneVolume();
+            TestLoopback();
+        case TTAudioLayerSelection:
+            TestAudioLayerSelection();
+            break;
+        case TTDeviceEnumeration:
+            TestDeviceEnumeration();
+            break;
+        case TTDeviceSelection:
+            TestDeviceSelection();
+            break;
+        case TTAudioTransport:
+            TestAudioTransport();
+            break;
+        case TTSpeakerVolume:
+            TestSpeakerVolume();
+            break;
+        case TTMicrophoneVolume:
+            TestMicrophoneVolume();
+            break;
+        case TTSpeakerMute:
+            TestSpeakerMute();
+            break;
+        case TTMicrophoneMute:
+            TestMicrophoneMute();
+            break;
+        case TTMicrophoneBoost:
+            TestMicrophoneBoost();
+            break;
+        case TTMicrophoneAGC:
+            TestMicrophoneAGC();
+            break;
+        case TTLoopback:
+            TestLoopback();
+            break;
+        case TTDeviceRemoval:
+            TestDeviceRemoval();
+            break;
+        case TTMobileAPI:
+            TestAdvancedMBAPI();
+        case TTTest:
+            TestExtra();
+            break;
+        default:
+            break;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestAudioLayerSelection()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Audio Layer test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    AudioDeviceModule::AudioLayer audioLayer;
+    EXPECT_EQ(0, audioDevice->ActiveAudioLayer(&audioLayer));
+
+    if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kLinuxAlsaAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kLinuxAlsaAudio\n \n");
+    } else if (audioLayer == AudioDeviceModule::kLinuxPulseAudio)
+    {
+        TEST_LOG("\nActiveAudioLayer: kLinuxPulseAudio\n \n");
+    } else
+    {
+        TEST_LOG("\nActiveAudioLayer: INVALID\n \n");
+    }
+
+    char ch;
+    bool tryWinWave(false);
+    bool tryWinCore(false);
+
+    if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+    {
+        TEST_LOG("Would you like to try kWindowsCoreAudio instead "
+            "[requires Win Vista or Win 7] (Y/N)?\n: ");
+        EXPECT_TRUE(scanf(" %c", &ch) > 0);
+        ch = toupper(ch);
+        if (ch == 'Y')
+        {
+            tryWinCore = true;
+        }
+    } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+    {
+        TEST_LOG("Would you like to try kWindowsWaveAudio instead (Y/N)?\n: ");
+        EXPECT_TRUE(scanf(" %c", &ch) > 0);
+        ch = toupper(ch);
+        if (ch == 'Y')
+        {
+            tryWinWave = true;
+        }
+    }
+
+    if (tryWinWave || tryWinCore)
+    {
+        // =======================================
+        // First, close down what we have started
+
+        // terminate
+        EXPECT_EQ(0, _audioDevice->RegisterEventObserver(NULL));
+        EXPECT_EQ(0, _audioDevice->RegisterAudioCallback(NULL));
+        EXPECT_EQ(0, _audioDevice->Terminate());
+
+        // release the ProcessThread object
+        if (_processThread)
+        {
+            _processThread->DeRegisterModule(_audioDevice);
+            _processThread->Stop();
+            ProcessThread::DestroyProcessThread(_processThread);
+        }
+
+        // delete the audio observer
+        if (_audioEventObserver)
+        {
+            delete _audioEventObserver;
+            _audioEventObserver = NULL;
+        }
+
+        // delete the audio transport
+        if (_audioTransport)
+        {
+            delete _audioTransport;
+            _audioTransport = NULL;
+        }
+
+        // release the AudioDeviceModule object
+        if (_audioDevice)
+        {
+            EXPECT_EQ(0, _audioDevice->Release());
+            _audioDevice = NULL;
+        }
+
+        // ==================================================
+        // Next, try to make fresh start with new audio layer
+
+        EXPECT_TRUE((_processThread = ProcessThread::CreateProcessThread()) != NULL);
+        if (_processThread == NULL)
+        {
+            return -1;
+        }
+        _processThread->Start();
+
+        // create the Audio Device module based on selected audio layer
+        if (tryWinWave)
+        {
+            _audioDevice = AudioDeviceModuleImpl::Create(
+                555,
+                AudioDeviceModule::kWindowsWaveAudio);
+        } else if (tryWinCore)
+        {
+            _audioDevice = AudioDeviceModuleImpl::Create(
+                555,
+                AudioDeviceModule::kWindowsCoreAudio);
+        }
+
+        if (_audioDevice == NULL)
+        {
+            TEST_LOG("\nERROR: Switch of audio layer failed!\n");
+            // restore default audio layer instead
+            EXPECT_TRUE((_audioDevice = AudioDeviceModuleImpl::Create(
+                555, AudioDeviceModule::kPlatformDefaultAudio)) != NULL);
+        }
+
+        if (_audioDevice == NULL)
+        {
+            TEST_LOG("\nERROR: Failed to revert back to default audio layer!\n");
+            return -1;
+        }
+
+        EXPECT_EQ(1, _audioDevice->AddRef());
+
+        // register the Audio Device module
+        _processThread->RegisterModule(_audioDevice);
+
+        // register event observer
+        _audioEventObserver = new AudioEventObserver(_audioDevice);
+        EXPECT_EQ(0, _audioDevice->RegisterEventObserver(_audioEventObserver));
+
+        // register audio transport
+        _audioTransport = new AudioTransportImpl(_audioDevice);
+        EXPECT_EQ(0, _audioDevice->RegisterAudioCallback(_audioTransport));
+
+        EXPECT_EQ(0, _audioDevice->ActiveAudioLayer(&audioLayer));
+
+        if (audioLayer == AudioDeviceModule::kWindowsWaveAudio)
+        {
+            if (tryWinCore)
+                TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio <=> "
+                    "switch was *not* possible\n \n");
+            else
+                TEST_LOG("\nActiveAudioLayer: kWindowsWaveAudio <=> "
+                    "switch was possible\n \n");
+        } else if (audioLayer == AudioDeviceModule::kWindowsCoreAudio)
+        {
+            if (tryWinWave)
+                TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio <=> "
+                    "switch was *not* possible\n \n");
+            else
+                TEST_LOG("\nActiveAudioLayer: kWindowsCoreAudio <=> "
+                    "switch was possible\n \n");
+        }
+    } // if (tryWinWave || tryWinCore)
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceEnumeration()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device Enumeration test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    char name[kAdmMaxDeviceNameSize];
+    char guid[kAdmMaxGuidSize];
+
+    const WebRtc_Word16 nPlayoutDevices(audioDevice->PlayoutDevices());
+    EXPECT_TRUE(nPlayoutDevices >= 0);
+    TEST_LOG("\nPlayoutDevices: %u\n \n", nPlayoutDevices);
+    for (int n = 0; n < nPlayoutDevices; n++)
+    {
+        EXPECT_EQ(0, audioDevice->PlayoutDeviceName(n, name, guid));
+        TEST_LOG(
+                 "PlayoutDeviceName(%d) :   name=%s \n \
+	                 guid=%s\n",
+                 n, name, guid);
+    }
+
+#ifdef _WIN32
+    // default (-1)
+    EXPECT_EQ(0, audioDevice->PlayoutDeviceName(-1, name, guid));
+    TEST_LOG("PlayoutDeviceName(%d):   default name=%s \n \
+	                 default guid=%s\n", -1, name, guid);
+#else
+    // should fail
+    EXPECT_EQ(-1, audioDevice->PlayoutDeviceName(-1, name, guid));
+#endif
+
+    const WebRtc_Word16 nRecordingDevices(audioDevice->RecordingDevices());
+    EXPECT_TRUE(nRecordingDevices >= 0);
+    TEST_LOG("\nRecordingDevices: %u\n \n", nRecordingDevices);
+    for (int n = 0; n < nRecordingDevices; n++)
+    {
+        EXPECT_EQ(0, audioDevice->RecordingDeviceName(n, name, guid));
+        TEST_LOG(
+                 "RecordingDeviceName(%d) : name=%s \n \
+	                 guid=%s\n",
+                 n, name, guid);
+    }
+
+#ifdef _WIN32
+    // default (-1)
+    EXPECT_EQ(0, audioDevice->RecordingDeviceName(-1, name, guid));
+    TEST_LOG("RecordingDeviceName(%d): default name=%s \n \
+	                 default guid=%s\n", -1, name, guid);
+#else
+    // should fail
+    EXPECT_EQ(-1, audioDevice->PlayoutDeviceName(-1, name, guid));
+#endif
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceSelection()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device Selection test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+#define PRINT_HEADING(a, b) \
+	{ \
+		TEST_LOG("Set" #a "Device(" #b ") => \n"); \
+	} \
+
+#define PRINT_HEADING_IDX(a, b,c ) \
+	{ \
+		TEST_LOG("Set" #a "Device(%d) (%s) => \n", b, c); \
+	} \
+
+#define PRINT_STR(a, b) \
+	{ \
+                char str[128]; \
+                (b == true) ? (sprintf(str, "  %-17s: available\n", #a)) : (sprintf(str, "  %-17s: NA\n", #a)); \
+                TEST_LOG("%s", str); \
+	} \
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    bool available(false);
+    WebRtc_Word16 nDevices(-1);
+    char name[kAdmMaxDeviceNameSize];
+    char guid[kAdmMaxGuidSize];
+
+    // =======
+    // Playout
+
+    nDevices = audioDevice->PlayoutDevices();
+    EXPECT_TRUE(nDevices >= 0);
+
+    TEST_LOG("\n");
+#ifdef _WIN32
+    EXPECT_TRUE(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    PRINT_HEADING(Playout, kDefaultCommunicationDevice);
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    PRINT_STR(Playout, available);
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->StereoPlayoutIsAvailable(&available));
+        PRINT_STR(Stereo Playout, available);
+    }
+    else
+    {
+        PRINT_STR(Stereo Playout, false);
+    }
+    EXPECT_EQ(0, audioDevice->SpeakerIsAvailable(&available));
+    PRINT_STR(Speaker, available);
+    EXPECT_EQ(0, audioDevice->SpeakerVolumeIsAvailable(&available));
+    PRINT_STR(Speaker Volume, available);
+    EXPECT_EQ(0, audioDevice->SpeakerMuteIsAvailable(&available));
+    PRINT_STR(Speaker Mute, available);
+
+    EXPECT_EQ(0, audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice));
+    PRINT_HEADING(Playout, kDefaultDevice);
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    PRINT_STR(Playout, available);
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->StereoPlayoutIsAvailable(&available));
+        PRINT_STR(Stereo Playout, available);
+    }
+    else
+    {
+        PRINT_STR(Stereo Playout, false);
+    }
+    EXPECT_EQ(0, audioDevice->SpeakerIsAvailable(&available));
+    PRINT_STR(Speaker, available);
+    EXPECT_EQ(0, audioDevice->SpeakerVolumeIsAvailable(&available));
+    PRINT_STR(Speaker Volume, available);
+    EXPECT_EQ(0, audioDevice->SpeakerMuteIsAvailable(&available));
+    PRINT_STR(Speaker Mute, available);
+#else
+    EXPECT_TRUE(audioDevice->SetPlayoutDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    EXPECT_EQ(-1, audioDevice->SetPlayoutDevice(AudioDeviceModule::kDefaultDevice));
+#endif
+
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, audioDevice->SetPlayoutDevice(i));
+        EXPECT_EQ(0, audioDevice->PlayoutDeviceName(i, name, guid));
+        PRINT_HEADING_IDX(Playout, i, name);
+        EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+        PRINT_STR(Playout, available);
+        if (available)
+        {
+            EXPECT_EQ(0, audioDevice->StereoPlayoutIsAvailable(&available));
+            PRINT_STR(Stereo Playout, available);
+        } else
+        {
+            PRINT_STR(Stereo Playout, false);
+        }
+        EXPECT_EQ(0, audioDevice->SpeakerIsAvailable(&available));
+        PRINT_STR(Speaker, available);
+        EXPECT_EQ(0, audioDevice->SpeakerVolumeIsAvailable(&available));
+        PRINT_STR(Speaker Volume, available);
+        EXPECT_EQ(0, audioDevice->SpeakerMuteIsAvailable(&available));
+        PRINT_STR(Speaker Mute, available);
+    }
+
+    // =========
+    // Recording
+
+    nDevices = audioDevice->RecordingDevices();
+    EXPECT_TRUE(nDevices >= 0);
+
+    TEST_LOG("\n");
+#ifdef _WIN32
+    EXPECT_TRUE(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == 0);
+    PRINT_HEADING(Recording, kDefaultCommunicationDevice);
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    PRINT_STR(Recording, available);
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+        PRINT_STR(Stereo Recording, available);
+    }
+    else
+    {
+        // special fix to ensure that we don't log 'available' when recording is not OK
+        PRINT_STR(Stereo Recording, false);
+    }
+    EXPECT_EQ(0, audioDevice->MicrophoneIsAvailable(&available));
+    PRINT_STR(Microphone, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+    PRINT_STR(Microphone Volume, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneMuteIsAvailable(&available));
+    PRINT_STR(Microphone Mute, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneBoostIsAvailable(&available));
+    PRINT_STR(Microphone Boost, available);
+
+    EXPECT_EQ(0, audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice));
+    PRINT_HEADING(Recording, kDefaultDevice);
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    PRINT_STR(Recording, available);
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+        PRINT_STR(Stereo Recording, available);
+    }
+    else
+    {
+        // special fix to ensure that we don't log 'available' when recording is not OK
+        PRINT_STR(Stereo Recording, false);
+    }
+    EXPECT_EQ(0, audioDevice->MicrophoneIsAvailable(&available));
+    PRINT_STR(Microphone, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+    PRINT_STR(Microphone Volume, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneMuteIsAvailable(&available));
+    PRINT_STR(Microphone Mute, available);
+    EXPECT_EQ(0, audioDevice->MicrophoneBoostIsAvailable(&available));
+    PRINT_STR(Microphone Boost, available);
+#else
+    EXPECT_TRUE(audioDevice->SetRecordingDevice(
+        AudioDeviceModule::kDefaultCommunicationDevice) == -1);
+    EXPECT_EQ(-1, audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice));
+#endif
+
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, audioDevice->SetRecordingDevice(i));
+        EXPECT_EQ(0, audioDevice->RecordingDeviceName(i, name, guid));
+        PRINT_HEADING_IDX(Recording, i, name);
+        EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+        PRINT_STR(Recording, available);
+        if (available)
+        {
+            EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+            PRINT_STR(Stereo Recording, available);
+        } else
+        {
+            // special fix to ensure that we don't log 'available' when recording
+            // is not OK
+            PRINT_STR(Stereo Recording, false);
+        }
+        EXPECT_EQ(0, audioDevice->MicrophoneIsAvailable(&available));
+        PRINT_STR(Microphone, available);
+        EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+        PRINT_STR(Microphone Volume, available);
+        EXPECT_EQ(0, audioDevice->MicrophoneMuteIsAvailable(&available));
+        PRINT_STR(Microphone Mute, available);
+        EXPECT_EQ(0, audioDevice->MicrophoneBoostIsAvailable(&available));
+        PRINT_STR(Microphone Boost, available);
+    }
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestAudioTransport()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Audio Transport test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&recIsAvailable));
+    if (!recIsAvailable)
+    {
+        TEST_LOG(
+                 "\nWARNING: Recording is not available for the selected device!\n \n");
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&playIsAvailable));
+    if (recIsAvailable && playIsAvailable)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else if (!playIsAvailable)
+    {
+        TEST_LOG(
+                 "\nWARNING: Playout is not available for the selected device!\n \n");
+    }
+
+    bool available(false);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    if (playIsAvailable)
+    {
+        // =========================================
+        // Start by playing out an existing PCM file
+
+        EXPECT_EQ(0, audioDevice->SpeakerVolumeIsAvailable(&available));
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            EXPECT_EQ(0, audioDevice->MaxSpeakerVolume(&maxVolume));
+            EXPECT_EQ(0, audioDevice->SetSpeakerVolume(maxVolume/2));
+        }
+
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec));
+        if (samplesPerSec == 48000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile48.c_str()));
+        } else if (samplesPerSec == 44100 || samplesPerSec == 44000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile44.c_str()));
+        } else if (samplesPerSec == 16000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile16.c_str()));
+        } else if (samplesPerSec == 8000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile8.c_str()));
+        } else {
+            TEST_LOG("\nERROR: Sample rate (%u) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+
+        if (audioDevice->Playing())
+        {
+            TEST_LOG("\n> Listen to the file being played (fs=%d) out "
+                "and verify that the audio quality is OK.\n"
+                "> Press any key to stop playing...\n \n",
+                samplesPerSec);
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        EXPECT_EQ(0, audioDevice->StopPlayout());
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+        _audioTransport->SetFilePlayout(false);
+    }
+
+    bool enabled(false);
+    if (recIsAvailable)
+    {
+        // ====================================
+        // Next, record from microphone to file
+
+        EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            EXPECT_EQ(0, audioDevice->MaxMicrophoneVolume(&maxVolume));
+            EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(maxVolume));
+        }
+
+        EXPECT_TRUE(audioDevice->StartRawInputFileRecording(
+            GetFilename(RecordedMicrophoneFile)) == 0);
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            // ensure file recording in mono
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft));
+        }
+        EXPECT_EQ(0, audioDevice->StartRecording());
+        SleepMs(100);
+
+        EXPECT_TRUE(audioDevice->Recording());
+        if (audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> The microphone input signal is now being recorded "
+                "to a PCM file.\n"
+                "> Speak into the microphone to ensure that your voice is"
+                " recorded.\n> Press any key to stop recording...\n \n");
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelBoth));
+        }
+        EXPECT_EQ(0, audioDevice->StopRecording());
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+        EXPECT_EQ(0, audioDevice->StopRawInputFileRecording());
+    }
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        // ==========================
+        // Play out the recorded file
+
+        _audioTransport->SetFilePlayout(true,
+                                        GetFilename(RecordedMicrophoneFile));
+
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+        EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+        if (available)
+        {
+            EXPECT_EQ(0, audioDevice->InitPlayout());
+            EXPECT_EQ(0, audioDevice->StartPlayout());
+            SleepMs(100);
+        }
+
+        EXPECT_TRUE(audioDevice->Playing());
+        if (audioDevice->Playing())
+        {
+            TEST_LOG("\n \n> Listen to the recorded file and verify that the "
+                "audio quality is OK.\n"
+                "> Press any key to stop listening...\n \n");
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        EXPECT_EQ(0, audioDevice->StopPlayout());
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+        _audioTransport->SetFilePlayout(false);
+    }
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        // ==============================
+        // Finally, make full duplex test
+
+        WebRtc_UWord32 playSamplesPerSec(0);
+        WebRtc_UWord32 recSamplesPerSecRec(0);
+
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+
+        _audioTransport->SetFullDuplex(true);
+
+        EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            EXPECT_EQ(0, audioDevice->MaxMicrophoneVolume(&maxVolume));
+            EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(maxVolume));
+        }
+
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&playSamplesPerSec));
+        EXPECT_EQ(0, audioDevice->RecordingSampleRate(&recSamplesPerSecRec));
+        if (playSamplesPerSec != recSamplesPerSecRec)
+        {
+            TEST_LOG("\nERROR: sample rates does not match (fs_play=%u, fs_rec=%u)",
+                     playSamplesPerSec, recSamplesPerSecRec);
+            EXPECT_EQ(0, audioDevice->StopRecording());
+            EXPECT_EQ(0, audioDevice->StopPlayout());
+            EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+            _audioTransport->SetFullDuplex(false);
+            return -1;
+        }
+
+        EXPECT_EQ(0, audioDevice->StartRecording());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+        SleepMs(100);
+
+        if (audioDevice->Playing() && audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> Full duplex audio (fs=%u) is now active.\n"
+                "> Speak into the microphone and verify that your voice is "
+                "played out in loopback.\n> Press any key to stop...\n \n",
+                     playSamplesPerSec);
+            PAUSE(DEFAULT_PAUSE_TIME);
+        }
+
+        EXPECT_EQ(0, audioDevice->StopRecording());
+        EXPECT_EQ(0, audioDevice->StopPlayout());
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+        _audioTransport->SetFullDuplex(false);
+    }
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestSpeakerVolume()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Speaker Volume test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    WebRtc_UWord32 startVolume(0);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    EXPECT_EQ(0, audioDevice->SpeakerVolumeIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetSpeakerVolume(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Volume control is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    // store initial volume setting
+    EXPECT_EQ(0, audioDevice->InitSpeaker());
+    EXPECT_EQ(0, audioDevice->SpeakerVolume(&startVolume));
+
+    // start at volume 0
+    EXPECT_EQ(0, audioDevice->SetSpeakerVolume(0));
+
+    // ======================================
+    // Start playing out an existing PCM file
+
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec));
+        if (48000 == samplesPerSec) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile48.c_str()));
+        } else if (44100 == samplesPerSec || samplesPerSec == 44000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile44.c_str()));
+        } else if (samplesPerSec == 16000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile16.c_str()));
+        } else if (samplesPerSec == 8000) {
+            _audioTransport->SetFilePlayout(
+                true, GetResource(_playoutFile8.c_str()));
+        } else {
+            TEST_LOG("\nERROR: Sample rate (%d) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Playing())
+    {
+        TEST_LOG("\n> Listen to the file being played out and verify that the "
+            "selected speaker volume is varied between [~0] and [~MAX].\n"
+            "> The file shall be played out with an increasing volume level "
+            "correlated to the speaker volume.\n"
+            "> Press any key to stop playing...\n \n");
+        PAUSE(10000);
+    }
+
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+    _audioTransport->SetSpeakerVolume(false);
+    _audioTransport->SetFilePlayout(false);
+
+    // restore volume setting
+    EXPECT_EQ(0, audioDevice->SetSpeakerVolume(startVolume));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestSpeakerMute()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Speaker Mute test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    bool startMute(false);
+    WebRtc_UWord32 samplesPerSec(0);
+
+    EXPECT_EQ(0, audioDevice->SpeakerMuteIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetSpeakerMute(true);
+    } else
+    {
+        TEST_LOG(
+                 "\nERROR: Mute control is not available for the selected"
+                 " device!\n \n");
+        return -1;
+    }
+
+    // store initial mute setting
+    EXPECT_EQ(0, audioDevice->InitSpeaker());
+    EXPECT_EQ(0, audioDevice->SpeakerMute(&startMute));
+
+    // start with no mute
+    EXPECT_EQ(0, audioDevice->SetSpeakerMute(false));
+
+    // ======================================
+    // Start playing out an existing PCM file
+
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&samplesPerSec));
+        if (48000 == samplesPerSec)
+            _audioTransport->SetFilePlayout(true, _playoutFile48.c_str());
+        else if (44100 == samplesPerSec || 44000 == samplesPerSec)
+            _audioTransport->SetFilePlayout(true, _playoutFile44.c_str());
+        else
+        {
+            TEST_LOG("\nERROR: Sample rate (%d) is not supported!\n \n",
+                     samplesPerSec);
+            return -1;
+        }
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Playing())
+    {
+        TEST_LOG("\n> Listen to the file being played out and verify that the"
+            " selected speaker mute control is toggled between [MUTE ON] and"
+            " [MUTE OFF].\n> You should only hear the file during the"
+            " 'MUTE OFF' periods.\n"
+            "> Press any key to stop playing...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+    _audioTransport->SetSpeakerMute(false);
+    _audioTransport->SetFilePlayout(false);
+
+    // restore mute setting
+    EXPECT_EQ(0, audioDevice->SetSpeakerMute(startMute));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneVolume()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Volume test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetMicrophoneVolume(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Volume control is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this"
+        " test (Y/N)?\n: ",
+             RecordedMicrophoneVolumeFile);
+    char ch;
+    bool fileRecording(false);
+    EXPECT_TRUE(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    WebRtc_UWord32 startVolume(0);
+    bool enabled(false);
+
+    // store initial volume setting
+    EXPECT_EQ(0, audioDevice->InitMicrophone());
+    EXPECT_EQ(0, audioDevice->MicrophoneVolume(&startVolume));
+
+    // start at volume 0
+    EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(0));
+
+    // ======================================================================
+    // Start recording from the microphone while the mic volume is changed
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StartRawInputFileRecording(RecordedMicrophoneVolumeFile));
+    }
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            // ensures a mono file
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelRight));
+        }
+        EXPECT_EQ(0, audioDevice->StartRecording());
+    }
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->Recording());
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone volume is varied between [~0] and [~MAX].\n"
+            "> You should hear your own voice with an increasing volume level"
+            " correlated to the microphone volume.\n"
+            "> After a finalized test (and if file recording was enabled) "
+            "verify the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StopRawInputFileRecording());
+    }
+    EXPECT_EQ(0, audioDevice->StopRecording());
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+    EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+
+    _audioTransport->SetMicrophoneVolume(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(startVolume));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneMute()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Mute test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    EXPECT_EQ(0, audioDevice->MicrophoneMuteIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetMicrophoneMute(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Mute control is not available for the selected"
+            " device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected "
+            "device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this "
+        "test (Y/N)?\n: ",
+        RecordedMicrophoneMuteFile);
+    char ch;
+    bool fileRecording(false);
+    EXPECT_TRUE(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    bool startMute(false);
+    bool enabled(false);
+
+    // store initial volume setting
+    EXPECT_EQ(0, audioDevice->InitMicrophone());
+    EXPECT_EQ(0, audioDevice->MicrophoneMute(&startMute));
+
+    // start at no mute
+    EXPECT_EQ(0, audioDevice->SetMicrophoneMute(false));
+
+    // ==================================================================
+    // Start recording from the microphone while the mic mute is toggled
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StartRawInputFileRecording(RecordedMicrophoneMuteFile));
+    }
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            // ensure file recording in mono
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft));
+        }
+        EXPECT_EQ(0, audioDevice->StartRecording());
+    }
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->Recording());
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone mute control is toggled between [MUTE ON] and [MUTE OFF]."
+            "\n> You should only hear your own voice in loopback during the"
+            " 'MUTE OFF' periods.\n> After a finalized test (and if file "
+            "recording was enabled) verify the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StopRawInputFileRecording());
+    }
+    EXPECT_EQ(0, audioDevice->StopRecording());
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+    _audioTransport->SetMicrophoneMute(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    EXPECT_EQ(0, audioDevice->SetMicrophoneMute(startMute));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneBoost()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone Boost test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    EXPECT_EQ(0, audioDevice->MicrophoneBoostIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetMicrophoneBoost(true);
+    } else
+    {
+        TEST_LOG(
+                 "\nERROR: Boost control is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during this "
+        "test (Y/N)?\n: ",
+        RecordedMicrophoneBoostFile);
+    char ch;
+    bool fileRecording(false);
+    EXPECT_TRUE(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    bool startBoost(false);
+    bool enabled(false);
+
+    // store initial volume setting
+    EXPECT_EQ(0, audioDevice->InitMicrophone());
+    EXPECT_EQ(0, audioDevice->MicrophoneBoost(&startBoost));
+
+    // start at no boost
+    EXPECT_EQ(0, audioDevice->SetMicrophoneBoost(false));
+
+    // ==================================================================
+    // Start recording from the microphone while the mic boost is toggled
+    // continuously.
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StartRawInputFileRecording(RecordedMicrophoneBoostFile));
+    }
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            // ensure file recording in mono
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelLeft));
+        }
+        EXPECT_EQ(0, audioDevice->StartRecording());
+    }
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->Recording());
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the selected "
+            "microphone boost control is toggled between [BOOST ON] and [BOOST OFF].\n"
+            "> You should hear your own voice with an increased volume level "
+            "during the 'BOOST ON' periods.\n \n"
+            "> After a finalized test (and if file recording was enabled) verify"
+            " the recorded result off line.\n"
+        "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StopRawInputFileRecording());
+    }
+    EXPECT_EQ(0, audioDevice->StopRecording());
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+    _audioTransport->SetMicrophoneBoost(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore boost setting
+    EXPECT_EQ(0, audioDevice->SetMicrophoneBoost(startBoost));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestMicrophoneAGC()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Microphone AGC test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    bool available(false);
+    EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetMicrophoneAGC(true);
+    } else
+    {
+        TEST_LOG("\nERROR: It is not possible to control the microphone volume"
+            " for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        _audioTransport->SetFullDuplex(true);
+    } else
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    TEST_LOG("\nEnable recording of microphone input to file (%s) during "
+        "this test (Y/N)?\n: ",
+        RecordedMicrophoneAGCFile);
+    char ch;
+    bool fileRecording(false);
+    EXPECT_TRUE(scanf(" %c", &ch) > 0);
+    ch = toupper(ch);
+    if (ch == 'Y')
+    {
+        fileRecording = true;
+    }
+
+    WebRtc_UWord32 startVolume(0);
+    bool enabled(false);
+
+    // store initial volume setting
+    EXPECT_EQ(0, audioDevice->InitMicrophone());
+    EXPECT_EQ(0, audioDevice->MicrophoneVolume(&startVolume));
+
+    // ====================================================================
+    // Start recording from the microphone while the mic volume is changed
+    // continuously
+    // by the emulated AGC (implemented by our audio transport).
+    // Also, start playing out the input to enable real-time verification.
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StartRawInputFileRecording(RecordedMicrophoneAGCFile));
+    }
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->SetAGC(true));
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        if (enabled)
+        {
+            // ensures a mono file
+            EXPECT_EQ(0, audioDevice->SetRecordingChannel(AudioDeviceModule::kChannelRight));
+        }
+        EXPECT_EQ(0, audioDevice->StartRecording());
+    }
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&available));
+    if (available)
+    {
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+    }
+
+    EXPECT_TRUE(audioDevice->AGC());
+    EXPECT_TRUE(audioDevice->Recording());
+    EXPECT_TRUE(audioDevice->Playing());
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the volume of"
+            " the selected microphone is varied between [~0] and [~MAX].\n"
+            "> You should hear your own voice with an increasing volume level"
+            " correlated to an emulated AGC setting.\n"
+            "> After a finalized test (and if file recording was enabled) verify"
+            " the recorded result off line.\n"
+            "> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    if (fileRecording)
+    {
+        EXPECT_EQ(0, audioDevice->StopRawInputFileRecording());
+    }
+    EXPECT_EQ(0, audioDevice->SetAGC(false));
+    EXPECT_EQ(0, audioDevice->StopRecording());
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+    EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+
+    _audioTransport->SetMicrophoneAGC(false);
+    _audioTransport->SetFullDuplex(false);
+
+    // restore volume setting
+    EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(startVolume));
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestLoopback()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Loopback measurement test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+    WebRtc_UWord8 nPlayChannels(0);
+    WebRtc_UWord8 nRecChannels(0);
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&recIsAvailable));
+    if (!recIsAvailable)
+    {
+        TEST_LOG("\nERROR: Recording is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+
+    EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&playIsAvailable));
+    if (recIsAvailable && playIsAvailable)
+    {
+        _audioTransport->SetFullDuplex(true);
+        _audioTransport->SetLoopbackMeasurements(true);
+    } else if (!playIsAvailable)
+    {
+        TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+        return -1;
+    }
+
+    bool enabled(false);
+    bool available(false);
+
+    if (recIsAvailable && playIsAvailable)
+    {
+        WebRtc_UWord32 playSamplesPerSec(0);
+        WebRtc_UWord32 recSamplesPerSecRec(0);
+
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+
+        _audioTransport->SetFullDuplex(true);
+
+        EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+        if (available)
+        {
+            EXPECT_EQ(0, audioDevice->SetStereoRecording(true));
+        }
+
+        EXPECT_EQ(0, audioDevice->StereoPlayoutIsAvailable(&available));
+        if (available)
+        {
+            EXPECT_EQ(0, audioDevice->SetStereoPlayout(true));
+        }
+
+        EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+        if (available)
+        {
+            WebRtc_UWord32 maxVolume(0);
+            EXPECT_EQ(0, audioDevice->MaxMicrophoneVolume(&maxVolume));
+            EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(maxVolume));
+        }
+
+        EXPECT_EQ(0, audioDevice->InitRecording());
+        EXPECT_EQ(0, audioDevice->InitPlayout());
+        EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&playSamplesPerSec));
+        EXPECT_EQ(0, audioDevice->RecordingSampleRate(&recSamplesPerSecRec));
+        EXPECT_EQ(0, audioDevice->StereoPlayout(&enabled));
+        enabled ? nPlayChannels = 2 : nPlayChannels = 1;
+        EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+        enabled ? nRecChannels = 2 : nRecChannels = 1;
+        EXPECT_EQ(0, audioDevice->StartRecording());
+        EXPECT_EQ(0, audioDevice->StartPlayout());
+
+        if (audioDevice->Playing() && audioDevice->Recording())
+        {
+            TEST_LOG("\n \n> Loopback audio is now active.\n"
+               "> Rec : fs=%u, #channels=%u.\n"
+                "> Play: fs=%u, #channels=%u.\n"
+                "> Speak into the microphone and verify that your voice is"
+                "  played out in loopback.\n"
+                "> Press any key to stop...\n \n",
+                recSamplesPerSecRec, nRecChannels, playSamplesPerSec,
+                nPlayChannels);
+            PAUSE(30000);
+        }
+
+        EXPECT_EQ(0, audioDevice->StopRecording());
+        EXPECT_EQ(0, audioDevice->StopPlayout());
+        EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+        _audioTransport->SetFullDuplex(false);
+        _audioTransport->SetLoopbackMeasurements(false);
+    }
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestDeviceRemoval()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Device removal test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    bool recIsAvailable(false);
+    bool playIsAvailable(false);
+    WebRtc_UWord8 nPlayChannels(0);
+    WebRtc_UWord8 nRecChannels(0);
+    WebRtc_UWord8 loopCount(0);
+
+    while (loopCount < 2)
+    {
+        if (SelectRecordingDevice() == -1)
+        {
+            TEST_LOG("\nERROR: Device selection failed!\n \n");
+            return -1;
+        }
+
+        EXPECT_EQ(0, audioDevice->RecordingIsAvailable(&recIsAvailable));
+        if (!recIsAvailable)
+        {
+            TEST_LOG("\nERROR: Recording is not available for the selected device!\n \n");
+            return -1;
+        }
+
+        if (SelectPlayoutDevice() == -1)
+        {
+            TEST_LOG("\nERROR: Device selection failed!\n \n");
+            return -1;
+        }
+
+        EXPECT_EQ(0, audioDevice->PlayoutIsAvailable(&playIsAvailable));
+        if (recIsAvailable && playIsAvailable)
+        {
+            _audioTransport->SetFullDuplex(true);
+        } else if (!playIsAvailable)
+        {
+            TEST_LOG("\nERROR: Playout is not available for the selected device!\n \n");
+            return -1;
+        }
+
+        bool available(false);
+        bool enabled(false);
+
+        if (recIsAvailable && playIsAvailable)
+        {
+            WebRtc_UWord32 playSamplesPerSec(0);
+            WebRtc_UWord32 recSamplesPerSecRec(0);
+
+            EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+
+            _audioTransport->SetFullDuplex(true);
+
+            EXPECT_EQ(0, audioDevice->StereoRecordingIsAvailable(&available));
+            if (available)
+            {
+                EXPECT_EQ(0, audioDevice->SetStereoRecording(true));
+            }
+
+            EXPECT_EQ(0, audioDevice->StereoPlayoutIsAvailable(&available));
+            if (available)
+            {
+                EXPECT_EQ(0, audioDevice->SetStereoPlayout(true));
+            }
+
+            EXPECT_EQ(0, audioDevice->MicrophoneVolumeIsAvailable(&available));
+            if (available)
+            {
+                WebRtc_UWord32 maxVolume(0);
+                EXPECT_EQ(0, audioDevice->MaxMicrophoneVolume(&maxVolume));
+                EXPECT_EQ(0, audioDevice->SetMicrophoneVolume(maxVolume));
+            }
+
+            EXPECT_EQ(0, audioDevice->InitRecording());
+            EXPECT_EQ(0, audioDevice->InitPlayout());
+            EXPECT_EQ(0, audioDevice->PlayoutSampleRate(&playSamplesPerSec));
+            EXPECT_EQ(0, audioDevice->RecordingSampleRate(&recSamplesPerSecRec));
+            EXPECT_EQ(0, audioDevice->StereoPlayout(&enabled));
+            enabled ? nPlayChannels = 2 : nPlayChannels = 1;
+            EXPECT_EQ(0, audioDevice->StereoRecording(&enabled));
+            enabled ? nRecChannels = 2 : nRecChannels = 1;
+            EXPECT_EQ(0, audioDevice->StartRecording());
+            EXPECT_EQ(0, audioDevice->StartPlayout());
+
+            AudioDeviceModule::AudioLayer audioLayer;
+            EXPECT_EQ(0, audioDevice->ActiveAudioLayer(&audioLayer));
+
+            if (audioLayer == AudioDeviceModule::kLinuxPulseAudio)
+            {
+                TEST_LOG("\n \n> PulseAudio loopback audio is now active.\n"
+                    "> Rec : fs=%u, #channels=%u.\n"
+                    "> Play: fs=%u, #channels=%u.\n"
+                    "> Speak into the microphone and verify that your voice is"
+                    " played out in loopback.\n"
+                    "> Unplug the device and make sure that your voice is played"
+                    " out in loop back on the built-in soundcard.\n"
+                    "> Then press any key...\n",
+                         recSamplesPerSecRec, nRecChannels, playSamplesPerSec,
+                         nPlayChannels);
+
+                PAUSE(DEFAULT_PAUSE_TIME);
+            } else if (audioDevice->Playing() && audioDevice->Recording())
+            {
+                if (loopCount < 1)
+                {
+                    TEST_LOG("\n \n> Loopback audio is now active.\n"
+                        "> Rec : fs=%u, #channels=%u.\n"
+                        "> Play: fs=%u, #channels=%u.\n"
+                        "> Speak into the microphone and verify that your voice"
+                        " is played out in loopback.\n"
+                        "> Unplug the device and wait for the error message...\n",
+                        recSamplesPerSecRec, nRecChannels,
+                        playSamplesPerSec, nPlayChannels);
+
+                    _audioEventObserver->_error
+                        = (AudioDeviceObserver::ErrorCode) (-1);
+                    while (_audioEventObserver->_error
+                        == (AudioDeviceObserver::ErrorCode) (-1))
+                    {
+                        SleepMs(500);
+                    }
+                } else
+                {
+                    TEST_LOG("\n \n> Loopback audio is now active.\n"
+                        "> Rec : fs=%u, #channels=%u.\n"
+                        "> Play: fs=%u, #channels=%u.\n"
+                        "> Speak into the microphone and verify that your voice"
+                        " is played out in loopback.\n"
+                        "> Press any key to stop...\n",
+                             recSamplesPerSecRec, nRecChannels,
+                             playSamplesPerSec, nPlayChannels);
+
+                    PAUSE(DEFAULT_PAUSE_TIME);
+                }
+            }
+
+            EXPECT_EQ(0, audioDevice->StopRecording());
+            EXPECT_EQ(0, audioDevice->StopPlayout());
+            EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+            _audioTransport->SetFullDuplex(false);
+
+            if (loopCount < 1)
+            {
+                TEST_LOG("\n \n> Stopped!\n");
+                TEST_LOG("> Now reinsert device if you want to enumerate it.\n");
+                TEST_LOG("> Press any key when done.\n");
+                PAUSE(DEFAULT_PAUSE_TIME);
+            }
+
+            loopCount++;
+        }
+    } // loopCount
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::TestExtra()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Extra test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    EXPECT_EQ(0, audioDevice->Terminate());
+    EXPECT_FALSE(audioDevice->Initialized());
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+WebRtc_Word32 FuncTestManager::SelectRecordingDevice()
+{
+    WebRtc_Word16 nDevices = _audioDevice->RecordingDevices();
+    char name[kAdmMaxDeviceNameSize];
+    char guid[kAdmMaxGuidSize];
+    WebRtc_Word32 ret(-1);
+
+#ifdef _WIN32
+    TEST_LOG("\nSelect Recording Device\n \n");
+    TEST_LOG("  (%d) Default\n", 0);
+    TEST_LOG("  (%d) Default Communication [Win 7]\n", 1);
+    TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, _audioDevice->RecordingDeviceName(i, name, guid));
+        TEST_LOG(" (%d) Device %d (%s)\n", i+10, i, name);
+    }
+    TEST_LOG("\n: ");
+
+    int sel(0);
+
+    scanf("%u", &sel);
+
+    if (sel == 0)
+    {
+        EXPECT_EQ(0, (ret = _audioDevice->SetRecordingDevice(AudioDeviceModule::kDefaultDevice)));
+    }
+    else if (sel == 1)
+    {
+        EXPECT_TRUE((ret = _audioDevice->SetRecordingDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice)) == 0);
+    }
+    else if (sel < (nDevices+10))
+    {
+        EXPECT_EQ(0, (ret = _audioDevice->SetRecordingDevice(sel-10)));
+    }
+    else
+    {
+        return -1;
+    }
+#else
+    TEST_LOG("\nSelect Recording Device\n \n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, _audioDevice->RecordingDeviceName(i, name, guid));
+        TEST_LOG(" (%d) Device %d (%s)\n", i, i, name);
+    }
+    TEST_LOG("\n: ");
+    int sel(0);
+    EXPECT_TRUE(scanf("%u", &sel) > 0);
+    if (sel < (nDevices))
+    {
+        EXPECT_EQ(0, (ret = _audioDevice->SetRecordingDevice(sel)));
+    } else
+    {
+        return -1;
+    }
+#endif
+
+    return ret;
+}
+
+WebRtc_Word32 FuncTestManager::SelectPlayoutDevice()
+{
+    WebRtc_Word16 nDevices = _audioDevice->PlayoutDevices();
+    char name[kAdmMaxDeviceNameSize];
+    char guid[kAdmMaxGuidSize];
+
+#ifdef _WIN32
+    TEST_LOG("\nSelect Playout Device\n \n");
+    TEST_LOG("  (%d) Default\n", 0);
+    TEST_LOG("  (%d) Default Communication [Win 7]\n", 1);
+    TEST_LOG("- - - - - - - - - - - - - - - - - - - -\n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, _audioDevice->PlayoutDeviceName(i, name, guid));
+        TEST_LOG(" (%d) Device %d (%s)\n", i+10, i, name);
+    }
+    TEST_LOG("\n: ");
+
+    int sel(0);
+
+    scanf("%u", &sel);
+
+    WebRtc_Word32 ret(0);
+
+    if (sel == 0)
+    {
+        EXPECT_TRUE((ret = _audioDevice->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultDevice)) == 0);
+    }
+    else if (sel == 1)
+    {
+        EXPECT_TRUE((ret = _audioDevice->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice)) == 0);
+    }
+    else if (sel < (nDevices+10))
+    {
+        EXPECT_EQ(0, (ret = _audioDevice->SetPlayoutDevice(sel-10)));
+    }
+    else
+    {
+        return -1;
+    }
+#else
+    TEST_LOG("\nSelect Playout Device\n \n");
+    for (int i = 0; i < nDevices; i++)
+    {
+        EXPECT_EQ(0, _audioDevice->PlayoutDeviceName(i, name, guid));
+        TEST_LOG(" (%d) Device %d (%s)\n", i, i, name);
+    }
+    TEST_LOG("\n: ");
+    int sel(0);
+    EXPECT_TRUE(scanf("%u", &sel) > 0);
+    WebRtc_Word32 ret(0);
+    if (sel < (nDevices))
+    {
+        EXPECT_EQ(0, (ret = _audioDevice->SetPlayoutDevice(sel)));
+    } else
+    {
+        return -1;
+    }
+#endif
+
+    return ret;
+}
+
+WebRtc_Word32 FuncTestManager::TestAdvancedMBAPI()
+{
+    TEST_LOG("\n=======================================\n");
+    TEST_LOG(" Advanced mobile device API test:\n");
+    TEST_LOG("=======================================\n");
+
+    if (_audioDevice == NULL)
+    {
+        return -1;
+    }
+
+    RESET_TEST;
+
+    AudioDeviceModule* audioDevice = _audioDevice;
+
+    EXPECT_EQ(0, audioDevice->Init());
+    EXPECT_TRUE(audioDevice->Initialized());
+
+    if (SelectRecordingDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+    if (SelectPlayoutDevice() == -1)
+    {
+        TEST_LOG("\nERROR: Device selection failed!\n \n");
+        return -1;
+    }
+    _audioTransport->SetFullDuplex(true);
+    _audioTransport->SetLoopbackMeasurements(true);
+
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(_audioTransport));
+    // Start recording
+    EXPECT_EQ(0, audioDevice->InitRecording());
+    EXPECT_EQ(0, audioDevice->StartRecording());
+    // Start playout
+    EXPECT_EQ(0, audioDevice->InitPlayout());
+    EXPECT_EQ(0, audioDevice->StartPlayout());
+
+    EXPECT_TRUE(audioDevice->Recording());
+    EXPECT_TRUE(audioDevice->Playing());
+
+#if defined(_WIN32_WCE) || defined(MAC_IPHONE)
+    TEST_LOG("\nResetAudioDevice\n \n");
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n\
+> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+    for (int p=0; p<=60; p+=20)
+    {
+        TEST_LOG("Resetting sound device several time with pause %d ms\n", p);
+        for (int l=0; l<20; ++l)
+        {
+            EXPECT_EQ(0, audioDevice->ResetAudioDevice());
+            SleepMs(p);
+        }
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n");
+        SleepMs(2000);
+    }
+#endif
+
+#if defined(MAC_IPHONE)
+    bool loudspeakerOn(false);
+    TEST_LOG("\nSet playout spaker\n \n");
+    if (audioDevice->Recording() && audioDevice->Playing())
+    {
+        TEST_LOG("\n> Speak into the microphone and verify that the audio is good.\n\
+> Press any key to stop...\n \n");
+        PAUSE(DEFAULT_PAUSE_TIME);
+    }
+
+    TEST_LOG("Set to use speaker\n");
+    EXPECT_EQ(0, audioDevice->SetLoudspeakerStatus(true));
+    TEST_LOG("\n> Speak into the microphone and verify that the audio is"
+        " from the loudspeaker.\n\
+> Press any key to stop...\n \n");
+    PAUSE(DEFAULT_PAUSE_TIME);
+    EXPECT_EQ(0, audioDevice->GetLoudspeakerStatus(loudspeakerOn));
+    EXPECT_TRUE(loudspeakerOn);
+
+    TEST_LOG("Set to not use speaker\n");
+    EXPECT_EQ(0, audioDevice->SetLoudspeakerStatus(false));
+    TEST_LOG("\n> Speak into the microphone and verify that the audio is not"
+        " from the loudspeaker.\n\
+> Press any key to stop...\n \n");
+    PAUSE(DEFAULT_PAUSE_TIME);
+    EXPECT_EQ(0, audioDevice->GetLoudspeakerStatus(loudspeakerOn));
+    EXPECT_FALSE(loudspeakerOn);
+#endif
+
+    EXPECT_EQ(0, audioDevice->StopRecording());
+    EXPECT_EQ(0, audioDevice->StopPlayout());
+    EXPECT_EQ(0, audioDevice->RegisterAudioCallback(NULL));
+
+    _audioTransport->SetFullDuplex(false);
+
+    TEST_LOG("\n");
+    PRINT_TEST_RESULTS;
+
+    return 0;
+}
+
+} // namespace webrtc
+
+// EOF
diff --git a/src/modules/audio_device/main/test/func_test_manager.h b/src/modules/audio_device/main/test/func_test_manager.h
new file mode 100644
index 0000000..8c01f78
--- /dev/null
+++ b/src/modules/audio_device/main/test/func_test_manager.h
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
+#define WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
+
+#include "../source/audio_device_utility.h"
+
+#include <string>
+
+#include "typedefs.h"
+#include "audio_device.h"
+#include "audio_device_test_defines.h"
+#include "file_wrapper.h"
+#include "list_wrapper.h"
+#include "resampler.h"
+
+#if defined(MAC_IPHONE) || defined(ANDROID)
+#define USE_SLEEP_AS_PAUSE
+#else
+//#define USE_SLEEP_AS_PAUSE
+#endif
+
+// Sets the default pause time if using sleep as pause
+#define DEFAULT_PAUSE_TIME 5000
+
+#if defined(USE_SLEEP_AS_PAUSE)
+#define PAUSE(a) SleepMs(a);
+#else
+#define PAUSE(a) AudioDeviceUtility::WaitForKey();
+#endif
+
+#define ADM_AUDIO_LAYER AudioDeviceModule::kPlatformDefaultAudio
+//#define ADM_AUDIO_LAYER AudioDeviceModule::kLinuxPulseAudio
+
+enum TestType
+{
+    TTInvalid = -1,
+    TTAll = 0,
+    TTAudioLayerSelection = 1,
+    TTDeviceEnumeration = 2,
+    TTDeviceSelection = 3,
+    TTAudioTransport = 4,
+    TTSpeakerVolume = 5,
+    TTMicrophoneVolume = 6,
+    TTSpeakerMute = 7,
+    TTMicrophoneMute = 8,
+    TTMicrophoneBoost = 9,
+    TTMicrophoneAGC = 10,
+    TTLoopback = 11,
+    TTDeviceRemoval = 13,
+    TTMobileAPI = 14,
+    TTTest = 66,
+};
+
+class ProcessThread;
+
+namespace webrtc
+{
+
+class AudioDeviceModule;
+class AudioEventObserver;
+class AudioTransport;
+
+// ----------------------------------------------------------------------------
+//  AudioEventObserver
+// ----------------------------------------------------------------------------
+
+class AudioEventObserver: public AudioDeviceObserver
+{
+public:
+    virtual void OnErrorIsReported(const ErrorCode error);
+    virtual void OnWarningIsReported(const WarningCode warning);
+    AudioEventObserver(AudioDeviceModule* audioDevice);
+    ~AudioEventObserver();
+public:
+    ErrorCode _error;
+    WarningCode _warning;
+};
+
+// ----------------------------------------------------------------------------
+//  AudioTransport
+// ----------------------------------------------------------------------------
+
+class AudioTransportImpl: public AudioTransport
+{
+public:
+    virtual WebRtc_Word32
+        RecordedDataIsAvailable(const void* audioSamples,
+                                const WebRtc_UWord32 nSamples,
+                                const WebRtc_UWord8 nBytesPerSample,
+                                const WebRtc_UWord8 nChannels,
+                                const WebRtc_UWord32 samplesPerSec,
+                                const WebRtc_UWord32 totalDelayMS,
+                                const WebRtc_Word32 clockDrift,
+                                const WebRtc_UWord32 currentMicLevel,
+                                WebRtc_UWord32& newMicLevel);
+
+    virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+                                           const WebRtc_UWord8 nBytesPerSample,
+                                           const WebRtc_UWord8 nChannels,
+                                           const WebRtc_UWord32 samplesPerSec,
+                                           void* audioSamples,
+                                           WebRtc_UWord32& nSamplesOut);
+
+    AudioTransportImpl(AudioDeviceModule* audioDevice);
+    ~AudioTransportImpl();
+
+public:
+    WebRtc_Word32 SetFilePlayout(bool enable, const char* fileName = NULL);
+    void SetFullDuplex(bool enable);
+    void SetSpeakerVolume(bool enable)
+    {
+        _speakerVolume = enable;
+    }
+    ;
+    void SetSpeakerMute(bool enable)
+    {
+        _speakerMute = enable;
+    }
+    ;
+    void SetMicrophoneMute(bool enable)
+    {
+        _microphoneMute = enable;
+    }
+    ;
+    void SetMicrophoneVolume(bool enable)
+    {
+        _microphoneVolume = enable;
+    }
+    ;
+    void SetMicrophoneBoost(bool enable)
+    {
+        _microphoneBoost = enable;
+    }
+    ;
+    void SetLoopbackMeasurements(bool enable)
+    {
+        _loopBackMeasurements = enable;
+    }
+    ;
+    void SetMicrophoneAGC(bool enable)
+    {
+        _microphoneAGC = enable;
+    }
+    ;
+
+private:
+    AudioDeviceModule* _audioDevice;
+
+    bool _playFromFile;
+    bool _fullDuplex;
+    bool _speakerVolume;
+    bool _speakerMute;
+    bool _microphoneVolume;
+    bool _microphoneMute;
+    bool _microphoneBoost;
+    bool _microphoneAGC;
+    bool _loopBackMeasurements;
+
+    FileWrapper& _playFile;
+
+    WebRtc_UWord32 _recCount;
+    WebRtc_UWord32 _playCount;
+
+    ListWrapper _audioList;
+
+    Resampler _resampler;
+};
+
+// ----------------------------------------------------------------------------
+//  FuncTestManager
+// ----------------------------------------------------------------------------
+
+class FuncTestManager
+{
+public:
+    FuncTestManager();
+    ~FuncTestManager();
+    WebRtc_Word32 Init();
+    WebRtc_Word32 Close();
+    WebRtc_Word32 DoTest(const TestType testType);
+private:
+    WebRtc_Word32 TestAudioLayerSelection();
+    WebRtc_Word32 TestDeviceEnumeration();
+    WebRtc_Word32 TestDeviceSelection();
+    WebRtc_Word32 TestAudioTransport();
+    WebRtc_Word32 TestSpeakerVolume();
+    WebRtc_Word32 TestMicrophoneVolume();
+    WebRtc_Word32 TestSpeakerMute();
+    WebRtc_Word32 TestMicrophoneMute();
+    WebRtc_Word32 TestMicrophoneBoost();
+    WebRtc_Word32 TestLoopback();
+    WebRtc_Word32 TestDeviceRemoval();
+    WebRtc_Word32 TestExtra();
+    WebRtc_Word32 TestMicrophoneAGC();
+    WebRtc_Word32 SelectPlayoutDevice();
+    WebRtc_Word32 SelectRecordingDevice();
+    WebRtc_Word32 TestAdvancedMBAPI();
+private:
+    // Paths to where the resource files to be used for this test are located.
+    std::string _playoutFile48;
+    std::string _playoutFile44;
+    std::string _playoutFile16;
+    std::string _playoutFile8;
+
+    ProcessThread* _processThread;
+    AudioDeviceModule* _audioDevice;
+    AudioEventObserver* _audioEventObserver;
+    AudioTransportImpl* _audioTransport;
+};
+
+} // namespace webrtc
+
+#endif  // #ifndef WEBRTC_AUDIO_DEVICE_FUNC_TEST_MANAGER_H
diff --git a/src/modules/audio_processing/Android.mk b/src/modules/audio_processing/Android.mk
index 3883671..e5bda61 100644
--- a/src/modules/audio_processing/Android.mk
+++ b/src/modules/audio_processing/Android.mk
@@ -33,7 +33,9 @@
 # Flags passed to both C and C++ files.
 LOCAL_CFLAGS := \
     $(MY_WEBRTC_COMMON_DEFS) \
-    '-DWEBRTC_NS_FIXED'
+    '-DWEBRTC_NS_FIXED' \
+    '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+    '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
 #   floating point
 #   -DWEBRTC_NS_FLOAT'
 
@@ -45,11 +47,11 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
-    $(LOCAL_PATH)/aec/interface \
-    $(LOCAL_PATH)/aecm/interface \
-    $(LOCAL_PATH)/agc/interface \
-    $(LOCAL_PATH)/ns/interface \
+    $(LOCAL_PATH)/include \
+    $(LOCAL_PATH)/aec/include \
+    $(LOCAL_PATH)/aecm/include \
+    $(LOCAL_PATH)/agc/include \
+    $(LOCAL_PATH)/ns/include \
     $(LOCAL_PATH)/../interface \
     $(LOCAL_PATH)/../.. \
     $(LOCAL_PATH)/../../common_audio/signal_processing/include \
@@ -81,7 +83,9 @@
 
 # Flags passed to both C and C++ files.
 LOCAL_CFLAGS := \
-    $(MY_WEBRTC_COMMON_DEFS)
+    $(MY_WEBRTC_COMMON_DEFS) \
+    '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+    '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
 
 LOCAL_CFLAGS_arm := $(MY_WEBRTC_COMMON_DEFS_arm)
 LOCAL_CFLAGS_x86 := $(MY_WEBRTC_COMMON_DEFS_x86)
@@ -91,7 +95,7 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../interface \
     $(LOCAL_PATH)/../.. \
     $(LOCAL_PATH)/../../system_wrappers/interface \
@@ -127,12 +131,14 @@
 LOCAL_SRC_FILES:= \
     $(call all-proto-files-under, test) \
     test/unit_test.cc \
-    test/testsupport/fileutils.cc
+    ../../test/testsupport/fileutils.cc
 
 # Flags passed to both C and C++ files.
 LOCAL_CFLAGS := \
     $(MY_WEBRTC_COMMON_DEFS) \
-    '-DWEBRTC_APM_UNIT_TEST_FIXED_PROFILE'
+    '-DWEBRTC_AUDIOPROC_FIXED_PROFILE' \
+    '-DWEBRTC_ANDROID_PLATFORM_BUILD' \
+    '-DWEBRTC_AUDIOPROC_DEBUG_DUMP'
 
 LOCAL_CFLAGS_arm := $(MY_WEBRTC_COMMON_DEFS_arm)
 LOCAL_CFLAGS_x86 := $(MY_WEBRTC_COMMON_DEFS_x86)
@@ -142,10 +148,10 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../interface \
     $(LOCAL_PATH)/../.. \
-    $(LOCAL_PATH)/test \
+    $(LOCAL_PATH)/../../../test \
     $(LOCAL_PATH)/../../system_wrappers/interface \
     $(LOCAL_PATH)/../../common_audio/signal_processing/include \
     external/protobuf/src
diff --git a/src/modules/audio_processing/aec/Android.mk b/src/modules/audio_processing/aec/Android.mk
index a8f0e91..068b7fb 100644
--- a/src/modules/audio_processing/aec/Android.mk
+++ b/src/modules/audio_processing/aec/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -20,9 +20,12 @@
     aec_resampler.c \
     aec_core.c \
     aec_rdft.c \
+
+ifeq ($(TARGET_ARCH),$(filter $(TARGET_ARCH),x86 x86_64))
+LOCAL_SRC_FILES += \
     aec_core_sse2.c \
     aec_rdft_sse2.c
-
+endif
 
 # Flags passed to both C and C++ files.
 LOCAL_CFLAGS := \
@@ -36,7 +39,7 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../utility \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include
diff --git a/src/modules/audio_processing/aec/aec.gypi b/src/modules/audio_processing/aec/aec.gypi
index 7e86a90..1506342 100644
--- a/src/modules/audio_processing/aec/aec.gypi
+++ b/src/modules/audio_processing/aec/aec.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -16,34 +16,60 @@
         'aec_debug_dump%': 0,
       },
       'dependencies': [
+        'apm_util',
         '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
-        'apm_util'
       ],
       'include_dirs': [
-        'interface',
+        'include',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
         ],
       },
       'sources': [
-        'interface/echo_cancellation.h',
+        'include/echo_cancellation.h',
         'echo_cancellation.c',
+        'echo_cancellation_internal.h',
         'aec_core.h',
         'aec_core.c',
-        'aec_core_sse2.c',
         'aec_rdft.h',
         'aec_rdft.c',
-        'aec_rdft_sse2.c',
         'aec_resampler.h',
         'aec_resampler.c',
       ],
       'conditions': [
+        ['target_arch=="ia32" or target_arch=="x64"', {
+          'dependencies': [ 'aec_sse2', ],
+        }],
         ['aec_debug_dump==1', {
           'defines': [ 'WEBRTC_AEC_DEBUG_DUMP', ],
         }],
       ],
     },
   ],
+  'conditions': [
+    ['target_arch=="ia32" or target_arch=="x64"', {
+      'targets': [
+        {
+          'target_name': 'aec_sse2',
+          'type': '<(library)',
+          'sources': [
+            'aec_core_sse2.c',
+            'aec_rdft_sse2.c',
+          ],
+          'conditions': [
+            ['os_posix==1 and OS!="mac"', {
+              'cflags': [ '-msse2', ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_CFLAGS': [ '-msse2', ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+  ],
 }
diff --git a/src/modules/audio_processing/aec/aec_core.c b/src/modules/audio_processing/aec/aec_core.c
index 6718dec..5e1ef00 100644
--- a/src/modules/audio_processing/aec/aec_core.c
+++ b/src/modules/audio_processing/aec/aec_core.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -21,6 +21,7 @@
 #include <string.h>
 
 #include "aec_rdft.h"
+#include "common_audio/signal_processing/include/signal_processing_library.h"
 #include "delay_estimator_wrapper.h"
 #include "ring_buffer.h"
 #include "system_wrappers/interface/cpu_features_wrapper.h"
@@ -205,10 +206,10 @@
         return -1;
     }
 #endif
-    if (WebRtc_CreateDelayEstimator(&aec->delay_estimator,
-                                    PART_LEN1,
-                                    kMaxDelayBlocks,
-                                    kLookaheadBlocks) == -1) {
+    aec->delay_estimator = WebRtc_CreateDelayEstimator(PART_LEN1,
+                                                       kMaxDelayBlocks,
+                                                       kLookaheadBlocks);
+    if (aec->delay_estimator == NULL) {
       WebRtcAec_FreeAec(aec);
       aec = NULL;
       return -1;
@@ -516,11 +517,13 @@
     WebRtcAec_ScaleErrorSignal = ScaleErrorSignal;
     WebRtcAec_FilterAdaptation = FilterAdaptation;
     WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppress;
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
     if (WebRtc_GetCPUInfo(kSSE2)) {
-#if defined(WEBRTC_USE_SSE2)
       WebRtcAec_InitAec_SSE2();
-#endif
     }
+#endif
+
     aec_rdft_init();
 
     return 0;
@@ -540,19 +543,13 @@
     WebRtcAec_InitStats(&aec->rerl);
 }
 
-
 void WebRtcAec_BufferFarendPartition(aec_t *aec, const float* farend) {
   float fft[PART_LEN2];
   float xf[2][PART_LEN1];
 
   // Check if the buffer is full, and in that case flush the oldest data.
   if (WebRtc_available_write(aec->far_buf) < 1) {
-    WebRtc_MoveReadPtr(aec->far_buf, 1);
-    WebRtc_MoveReadPtr(aec->far_buf_windowed, 1);
-    aec->system_delay -= PART_LEN;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-    WebRtc_MoveReadPtr(aec->far_time_buf, 1);
-#endif
+    WebRtcAec_MoveFarReadPtr(aec, 1);
   }
   // Convert far-end partition to the frequency domain without windowing.
   memcpy(fft, farend, sizeof(float) * PART_LEN2);
@@ -565,6 +562,16 @@
   WebRtc_WriteBuffer(aec->far_buf_windowed, &xf[0][0], 1);
 }
 
+int WebRtcAec_MoveFarReadPtr(aec_t *aec, int elements) {
+  int elements_moved = WebRtc_MoveReadPtr(aec->far_buf_windowed, elements);
+  WebRtc_MoveReadPtr(aec->far_buf, elements);
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+  WebRtc_MoveReadPtr(aec->far_time_buf, elements);
+#endif
+  aec->system_delay -= elements_moved * PART_LEN;
+  return elements_moved;
+}
+
 void WebRtcAec_ProcessFrame(aec_t *aec,
                             const short *nearend,
                             const short *nearendH,
@@ -605,16 +612,10 @@
     // |system_delay| indicates others.
     if (aec->system_delay < FRAME_LEN) {
       // We don't have enough data so we rewind 10 ms.
-      WebRtc_MoveReadPtr(aec->far_buf_windowed, -(aec->mult + 1));
-      aec->system_delay -= WebRtc_MoveReadPtr(aec->far_buf, -(aec->mult + 1)) *
-          PART_LEN;
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-      WebRtc_MoveReadPtr(aec->far_time_buf, -(aec->mult + 1));
-#endif
+      WebRtcAec_MoveFarReadPtr(aec, -(aec->mult + 1));
     }
 
     // 2) Compensate for a possible change in the system delay.
-
     WebRtc_MoveReadPtr(aec->far_buf_windowed, move_elements);
     moved_elements = WebRtc_MoveReadPtr(aec->far_buf, move_elements);
     aec->knownDelay -= moved_elements * PART_LEN;
@@ -685,8 +686,8 @@
         int16_t farend[PART_LEN];
         int16_t* farend_ptr = NULL;
         WebRtc_ReadBuffer(aec->far_time_buf, (void**) &farend_ptr, farend, 1);
-        fwrite(farend_ptr, sizeof(int16_t), PART_LEN, aec->farFile);
-        fwrite(nearend_ptr, sizeof(int16_t), PART_LEN, aec->nearFile);
+        (void)fwrite(farend_ptr, sizeof(int16_t), PART_LEN, aec->farFile);
+        (void)fwrite(nearend_ptr, sizeof(int16_t), PART_LEN, aec->nearFile);
     }
 #endif
 
@@ -770,7 +771,7 @@
     memcpy(aec->xfBuf[1] + aec->xfBufBlockPos * PART_LEN1, &xf_ptr[PART_LEN1],
            sizeof(float) * PART_LEN1);
 
-    memset(yf[0], 0, sizeof(float) * (PART_LEN1 * 2));
+    memset(yf, 0, sizeof(yf));
 
     // Filter far
     WebRtcAec_FilterFar(aec, yf);
@@ -843,8 +844,8 @@
                 WEBRTC_SPL_WORD16_MIN);
         }
 
-        fwrite(eInt16, sizeof(int16_t), PART_LEN, aec->outLinearFile);
-        fwrite(output, sizeof(int16_t), PART_LEN, aec->outFile);
+        (void)fwrite(eInt16, sizeof(int16_t), PART_LEN, aec->outLinearFile);
+        (void)fwrite(output, sizeof(int16_t), PART_LEN, aec->outFile);
     }
 #endif
 }
@@ -1226,8 +1227,8 @@
         tmp = pi2 * rand[i - 1];
 
         noise = sqrtf(noisePow[i]);
-        u[i][0] = noise * (float)cos(tmp);
-        u[i][1] = -noise * (float)sin(tmp);
+        u[i][0] = noise * cosf(tmp);
+        u[i][1] = -noise * sinf(tmp);
     }
     u[PART_LEN][1] = 0;
 
diff --git a/src/modules/audio_processing/aec/aec_core.h b/src/modules/audio_processing/aec/aec_core.h
index 1b9828a..c07528d 100644
--- a/src/modules/audio_processing/aec/aec_core.h
+++ b/src/modules/audio_processing/aec/aec_core.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -15,9 +15,10 @@
 #ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
 #define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
 
+#ifdef WEBRTC_AEC_DEBUG_DUMP
 #include <stdio.h>
+#endif
 
-#include "signal_processing_library.h"
 #include "typedefs.h"
 
 #define FRAME_LEN 80
@@ -176,4 +177,9 @@
                             const short *nearendH,
                             int knownDelay);
 
+// A helper function to call WebRtc_MoveReadPtr() for all far-end buffers.
+// Returns the number of elements moved, and adjusts |system_delay| by the
+// corresponding amount in ms.
+int WebRtcAec_MoveFarReadPtr(aec_t* aec, int elements);
+
 #endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_SOURCE_AEC_CORE_H_
diff --git a/src/modules/audio_processing/aec/aec_core_sse2.c b/src/modules/audio_processing/aec/aec_core_sse2.c
index 8894f28..74a1c48 100644
--- a/src/modules/audio_processing/aec/aec_core_sse2.c
+++ b/src/modules/audio_processing/aec/aec_core_sse2.c
@@ -12,13 +12,12 @@
  * The core AEC algorithm, SSE2 version of speed-critical functions.
  */
 
-#include "typedefs.h"
+#include "aec_core.h"
 
-#if defined(WEBRTC_USE_SSE2)
 #include <emmintrin.h>
 #include <math.h>
+#include <string.h>  // memset
 
-#include "aec_core.h"
 #include "aec_rdft.h"
 
 __inline static float MulRe(float aRe, float aIm, float bRe, float bIm)
@@ -414,4 +413,3 @@
   WebRtcAec_OverdriveAndSuppress = OverdriveAndSuppressSSE2;
 }
 
-#endif   // WEBRTC_USE_SSE2
diff --git a/src/modules/audio_processing/aec/aec_rdft.c b/src/modules/audio_processing/aec/aec_rdft.c
index 9222334..d4254dd 100644
--- a/src/modules/audio_processing/aec/aec_rdft.c
+++ b/src/modules/audio_processing/aec/aec_rdft.c
@@ -42,8 +42,8 @@
 
 static int ip[16];
 
-static void bitrv2_32or128(int n, int *ip, float *a) {
-  // n is 32 or 128
+static void bitrv2_32(int *ip, float *a) {
+  const int n = 32;
   int j, j1, k, k1, m, m2;
   float xr, xi, yr, yi;
 
@@ -116,6 +116,80 @@
   }
 }
 
+static void bitrv2_128(float *a) {
+  /*
+      Following things have been attempted but are no faster:
+      (a) Storing the swap indexes in a LUT (index calculations are done
+          for 'free' while waiting on memory/L1).
+      (b) Consolidate the load/store of two consecutive floats by a 64 bit
+          integer (execution is memory/L1 bound).
+      (c) Do a mix of floats and 64 bit integer to maximize register
+          utilization (execution is memory/L1 bound).
+      (d) Replacing ip[i] by ((k<<31)>>25) + ((k >> 1)<<5).
+      (e) Hard-coding of the offsets to completely eliminates index
+          calculations.
+  */
+
+  unsigned int j, j1, k, k1;
+  float xr, xi, yr, yi;
+
+  static const int ip[4] = {0, 64, 32, 96};
+  for (k = 0; k < 4; k++) {
+    for (j = 0; j < k; j++) {
+      j1 = 2 * j + ip[k];
+      k1 = 2 * k + ip[j];
+      xr = a[j1 + 0];
+      xi = a[j1 + 1];
+      yr = a[k1 + 0];
+      yi = a[k1 + 1];
+      a[j1 + 0] = yr;
+      a[j1 + 1] = yi;
+      a[k1 + 0] = xr;
+      a[k1 + 1] = xi;
+      j1 +=  8;
+      k1 += 16;
+      xr = a[j1 + 0];
+      xi = a[j1 + 1];
+      yr = a[k1 + 0];
+      yi = a[k1 + 1];
+      a[j1 + 0] = yr;
+      a[j1 + 1] = yi;
+      a[k1 + 0] = xr;
+      a[k1 + 1] = xi;
+      j1 += 8;
+      k1 -= 8;
+      xr = a[j1 + 0];
+      xi = a[j1 + 1];
+      yr = a[k1 + 0];
+      yi = a[k1 + 1];
+      a[j1 + 0] = yr;
+      a[j1 + 1] = yi;
+      a[k1 + 0] = xr;
+      a[k1 + 1] = xi;
+      j1 +=  8;
+      k1 += 16;
+      xr = a[j1 + 0];
+      xi = a[j1 + 1];
+      yr = a[k1 + 0];
+      yi = a[k1 + 1];
+      a[j1 + 0] = yr;
+      a[j1 + 1] = yi;
+      a[k1 + 0] = xr;
+      a[k1 + 1] = xi;
+    }
+    j1 = 2 * k + 8 + ip[k];
+    k1 = j1 + 8;
+    xr = a[j1 + 0];
+    xi = a[j1 + 1];
+    yr = a[k1 + 0];
+    yi = a[k1 + 1];
+    a[j1 + 0] = yr;
+    a[j1 + 1] = yi;
+    a[k1 + 0] = xr;
+    a[k1 + 1] = xi;
+  }
+}
+
 static void makewt_32(void) {
   const int nw = 32;
   int j, nwh;
@@ -137,7 +211,7 @@
     rdft_w[nw - j] = y;
     rdft_w[nw - j + 1] = x;
   }
-  bitrv2_32or128(nw, ip + 2, rdft_w);
+  bitrv2_32(ip + 2, rdft_w);
 
   // pre-calculate constants used by cft1st_128 and cftmdl_128...
   cftmdl_wk1r[0] = rdft_w[2];
@@ -544,10 +618,8 @@
 }
 
 void aec_rdft_forward_128(float *a) {
-  const int n = 128;
   float xi;
-
-  bitrv2_32or128(n, ip + 2, a);
+  bitrv2_128(a);
   cftfsub_128(a);
   rftfsub_128(a);
   xi = a[0] - a[1];
@@ -556,12 +628,10 @@
 }
 
 void aec_rdft_inverse_128(float *a) {
-  const int n = 128;
-
   a[1] = 0.5f * (a[0] - a[1]);
   a[0] -= a[1];
   rftbsub_128(a);
-  bitrv2_32or128(n, ip + 2, a);
+  bitrv2_128(a);
   cftbsub_128(a);
 }
 
@@ -576,11 +646,11 @@
   cftmdl_128 = cftmdl_128_C;
   rftfsub_128 = rftfsub_128_C;
   rftbsub_128 = rftbsub_128_C;
+#if defined(WEBRTC_ARCH_X86_FAMILY)
   if (WebRtc_GetCPUInfo(kSSE2)) {
-#if defined(WEBRTC_USE_SSE2)
     aec_rdft_init_sse2();
-#endif
   }
+#endif
   // init library constants.
   makewt_32();
   makect_32();
diff --git a/src/modules/audio_processing/aec/aec_rdft_sse2.c b/src/modules/audio_processing/aec/aec_rdft_sse2.c
index f936e2a..eeb3152 100644
--- a/src/modules/audio_processing/aec/aec_rdft_sse2.c
+++ b/src/modules/audio_processing/aec/aec_rdft_sse2.c
@@ -8,13 +8,10 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#include "typedefs.h"
-
-#if defined(WEBRTC_USE_SSE2)
-#include <emmintrin.h>
-
 #include "aec_rdft.h"
 
+#include <emmintrin.h>
+
 static const ALIGN16_BEG float ALIGN16_END k_swap_sign[4] =
   {-1.f, 1.f, -1.f, 1.f};
 
@@ -428,4 +425,3 @@
   rftbsub_128 = rftbsub_128_SSE2;
 }
 
-#endif  // WEBRTC_USE_SS2
diff --git a/src/modules/audio_processing/aec/aec_resampler.c b/src/modules/audio_processing/aec/aec_resampler.c
index ea980cd..126a209 100644
--- a/src/modules/audio_processing/aec/aec_resampler.c
+++ b/src/modules/audio_processing/aec/aec_resampler.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -71,21 +71,24 @@
     return 0;
 }
 
-int WebRtcAec_ResampleLinear(void *resampInst,
-                             const short *inspeech,
-                             int size,
-                             float skew,
-                             short *outspeech)
+void WebRtcAec_ResampleLinear(void *resampInst,
+                              const short *inspeech,
+                              int size,
+                              float skew,
+                              short *outspeech,
+                              int *size_out)
 {
     resampler_t *obj = (resampler_t*) resampInst;
 
     short *y;
     float be, tnew, interp;
-    int tn, outsize, mm;
+    int tn, mm;
 
-    if (size < 0 || size > 2 * FRAME_LEN) {
-        return -1;
-    }
+    assert(!(size < 0 || size > 2 * FRAME_LEN));
+    assert(resampInst != NULL);
+    assert(inspeech != NULL);
+    assert(outspeech != NULL);
+    assert(size_out != NULL);
 
     // Add new frame data in lookahead
     memcpy(&obj->buffer[FRAME_LEN + kResamplingDelay],
@@ -121,15 +124,13 @@
         tn = (int) tnew;
     }
 
-    outsize = mm;
-    obj->position += outsize * be - size;
+    *size_out = mm;
+    obj->position += (*size_out) * be - size;
 
     // Shift buffer
     memmove(obj->buffer,
             &obj->buffer[size],
             (kResamplerBufferSize - size) * sizeof(short));
-
-    return outsize;
 }
 
 int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst)
diff --git a/src/modules/audio_processing/aec/aec_resampler.h b/src/modules/audio_processing/aec/aec_resampler.h
index ab4cc6e..acf8cce 100644
--- a/src/modules/audio_processing/aec/aec_resampler.h
+++ b/src/modules/audio_processing/aec/aec_resampler.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -25,11 +25,11 @@
 int WebRtcAec_GetSkew(void *resampInst, int rawSkew, float *skewEst);
 
 // Resamples input using linear interpolation.
-// Returns size of resampled array.
-int WebRtcAec_ResampleLinear(void *resampInst,
-                             const short *inspeech,
-                             int size,
-                             float skew,
-                             short *outspeech);
+void WebRtcAec_ResampleLinear(void *resampInst,
+                              const short *inspeech,
+                              int size,
+                              float skew,
+                              short *outspeech,
+                              int *size_out);
 
 #endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_AEC_RESAMPLER_H_
diff --git a/src/modules/audio_processing/aec/echo_cancellation.c b/src/modules/audio_processing/aec/echo_cancellation.c
index 66c9b97..b5728b8 100644
--- a/src/modules/audio_processing/aec/echo_cancellation.c
+++ b/src/modules/audio_processing/aec/echo_cancellation.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -22,6 +22,8 @@
 
 #include "aec_core.h"
 #include "aec_resampler.h"
+#include "common_audio/signal_processing/include/signal_processing_library.h"
+#include "modules/audio_processing/aec/echo_cancellation_internal.h"
 #include "ring_buffer.h"
 #include "typedefs.h"
 
@@ -43,58 +45,6 @@
 static int instance_count = 0;
 #endif
 
-typedef struct {
-    int delayCtr;
-    int sampFreq;
-    int splitSampFreq;
-    int scSampFreq;
-    float sampFactor; // scSampRate / sampFreq
-    short nlpMode;
-    short autoOnOff;
-    short activity;
-    short skewMode;
-    int bufSizeStart;
-    //short bufResetCtr;  // counts number of noncausal frames
-    int knownDelay;
-
-    short initFlag; // indicates if AEC has been initialized
-
-    // Variables used for averaging far end buffer size
-    short counter;
-    int sum;
-    short firstVal;
-    short checkBufSizeCtr;
-
-    // Variables used for delay shifts
-    short msInSndCardBuf;
-    short filtDelay;  // Filtered delay estimate.
-    int timeForDelayChange;
-    int ECstartup;
-    int checkBuffSize;
-    short lastDelayDiff;
-
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-    void* far_pre_buf_s16;  // Time domain far-end pre-buffer in int16_t.
-    FILE *bufFile;
-    FILE *delayFile;
-    FILE *skewFile;
-#endif
-
-    // Structures
-    void *resampler;
-
-    int skewFrCtr;
-    int resample; // if the skew is small enough we don't resample
-    int highSkewCtr;
-    float skew;
-
-    void* far_pre_buf;  // Time domain far-end pre-buffer.
-
-    int lastError;
-
-    aec_t *aec;
-} aecpc_t;
-
 // Estimates delay to set the position of the far-end buffer read pointer
 // (controlled by knownDelay)
 static int EstBufDelay(aecpc_t *aecInst);
@@ -326,11 +276,8 @@
 
     if (aecpc->skewMode == kAecTrue && aecpc->resample == kAecTrue) {
         // Resample and get a new number of samples
-        newNrOfSamples = WebRtcAec_ResampleLinear(aecpc->resampler,
-                                                  farend,
-                                                  nrOfSamples,
-                                                  skew,
-                                                  newFarend);
+        WebRtcAec_ResampleLinear(aecpc->resampler, farend, nrOfSamples, skew,
+                                 newFarend, &newNrOfSamples);
         farend_ptr = (const int16_t*) newFarend;
     }
 
@@ -454,7 +401,7 @@
             }
 
 #ifdef WEBRTC_AEC_DEBUG_DUMP
-            fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
+            (void)fwrite(&aecpc->skew, sizeof(aecpc->skew), 1, aecpc->skewFile);
 #endif
         }
     }
@@ -525,18 +472,12 @@
                 // Enable the AEC
                 aecpc->ECstartup = 0;
             } else if (overhead_elements > 0) {
-                WebRtc_MoveReadPtr(aecpc->aec->far_buf_windowed,
-                                   overhead_elements);
-                WebRtc_MoveReadPtr(aecpc->aec->far_buf, overhead_elements);
-#ifdef WEBRTC_AEC_DEBUG_DUMP
-                WebRtc_MoveReadPtr(aecpc->aec->far_time_buf, overhead_elements);
-#endif
                 // TODO(bjornv): Do we need a check on how much we actually
                 // moved the read pointer? It should always be possible to move
                 // the pointer |overhead_elements| since we have only added data
                 // to the buffer and no delay compensation nor AEC processing
                 // has been done.
-                aecpc->aec->system_delay -= overhead_elements * PART_LEN;
+                WebRtcAec_MoveFarReadPtr(aecpc->aec, overhead_elements);
 
                 // Enable the AEC
                 aecpc->ECstartup = 0;
@@ -591,10 +532,11 @@
 
 #ifdef WEBRTC_AEC_DEBUG_DUMP
     {
-        int16_t far_buf_size_ms = (int16_t) (aecpc->aec->system_delay /
+        int16_t far_buf_size_ms = (int16_t)(aecpc->aec->system_delay /
             (sampMsNb * aecpc->aec->mult));
-        fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile);
-        fwrite(&(aecpc->knownDelay), sizeof(aecpc->knownDelay), 1, aecpc->delayFile);
+        (void)fwrite(&far_buf_size_ms, 2, 1, aecpc->bufFile);
+        (void)fwrite(&aecpc->knownDelay, sizeof(aecpc->knownDelay), 1,
+                     aecpc->delayFile);
     }
 #endif
 
@@ -808,7 +750,7 @@
   const int kMsPerBlock = (PART_LEN * 1000) / self->splitSampFreq;
   float l1_norm = 0;
 
-  if (self == NULL) {
+  if (handle == NULL) {
     return -1;
   }
   if (median == NULL) {
@@ -866,23 +808,6 @@
   return 0;
 }
 
-WebRtc_Word32 WebRtcAec_get_version(WebRtc_Word8 *versionStr, WebRtc_Word16 len)
-{
-    const char version[] = "AEC 2.5.0";
-    const short versionLen = (short)strlen(version) + 1; // +1 for null-termination
-
-    if (versionStr == NULL) {
-        return -1;
-    }
-
-    if (versionLen > len) {
-        return -1;
-    }
-
-    strncpy(versionStr, version, versionLen);
-    return 0;
-}
-
 WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst)
 {
     aecpc_t *aecpc = aecInst;
@@ -902,6 +827,8 @@
   // Before we proceed with the delay estimate filtering we:
   // 1) Compensate for the frame that will be read.
   // 2) Compensate for drift resampling.
+  // 3) Compensate for non-causality if needed, since the estimated delay can't
+  //    be negative.
 
   // 1) Compensating for the frame(s) that will be read/processed.
   current_delay += FRAME_LEN * aecpc->aec->mult;
@@ -911,6 +838,11 @@
     current_delay -= kResamplingDelay;
   }
 
+  // 3) Compensate for non-causality, if needed, by flushing one block.
+  if (current_delay < PART_LEN) {
+    current_delay += WebRtcAec_MoveFarReadPtr(aecpc->aec, 1) * PART_LEN;
+  }
+
   aecpc->filtDelay = WEBRTC_SPL_MAX(0, (short) (0.8 * aecpc->filtDelay +
           0.2 * current_delay));
 
diff --git a/src/modules/audio_processing/aec/echo_cancellation_internal.h b/src/modules/audio_processing/aec/echo_cancellation_internal.h
new file mode 100644
index 0000000..b218fce
--- /dev/null
+++ b/src/modules/audio_processing/aec/echo_cancellation_internal.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
+
+#include "modules/audio_processing/aec/aec_core.h"
+
+typedef struct {
+  int delayCtr;
+  int sampFreq;
+  int splitSampFreq;
+  int scSampFreq;
+  float sampFactor;  // scSampRate / sampFreq
+  short nlpMode;
+  short autoOnOff;
+  short activity;
+  short skewMode;
+  int bufSizeStart;
+  int knownDelay;
+
+  short initFlag;  // indicates if AEC has been initialized
+
+  // Variables used for averaging far end buffer size
+  short counter;
+  int sum;
+  short firstVal;
+  short checkBufSizeCtr;
+
+  // Variables used for delay shifts
+  short msInSndCardBuf;
+  short filtDelay;  // Filtered delay estimate.
+  int timeForDelayChange;
+  int ECstartup;
+  int checkBuffSize;
+  short lastDelayDiff;
+
+#ifdef WEBRTC_AEC_DEBUG_DUMP
+  void* far_pre_buf_s16;  // Time domain far-end pre-buffer in int16_t.
+  FILE* bufFile;
+  FILE* delayFile;
+  FILE* skewFile;
+#endif
+
+  // Structures
+  void* resampler;
+
+  int skewFrCtr;
+  int resample;  // if the skew is small enough we don't resample
+  int highSkewCtr;
+  float skew;
+
+  void* far_pre_buf;  // Time domain far-end pre-buffer.
+
+  int lastError;
+
+  aec_t* aec;
+} aecpc_t;
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_ECHO_CANCELLATION_INTERNAL_H_
diff --git a/src/modules/audio_processing/aec/include/echo_cancellation.h b/src/modules/audio_processing/aec/include/echo_cancellation.h
new file mode 100644
index 0000000..a266e84
--- /dev/null
+++ b/src/modules/audio_processing/aec/include/echo_cancellation.h
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
+
+#include "typedefs.h"
+
+// Errors
+#define AEC_UNSPECIFIED_ERROR           12000
+#define AEC_UNSUPPORTED_FUNCTION_ERROR  12001
+#define AEC_UNINITIALIZED_ERROR         12002
+#define AEC_NULL_POINTER_ERROR          12003
+#define AEC_BAD_PARAMETER_ERROR         12004
+
+// Warnings
+#define AEC_BAD_PARAMETER_WARNING       12050
+
+enum {
+    kAecNlpConservative = 0,
+    kAecNlpModerate,
+    kAecNlpAggressive
+};
+
+enum {
+    kAecFalse = 0,
+    kAecTrue
+};
+
+typedef struct {
+    WebRtc_Word16 nlpMode;        // default kAecNlpModerate
+    WebRtc_Word16 skewMode;       // default kAecFalse
+    WebRtc_Word16 metricsMode;    // default kAecFalse
+    int delay_logging;            // default kAecFalse
+    //float realSkew;
+} AecConfig;
+
+typedef struct {
+    WebRtc_Word16 instant;
+    WebRtc_Word16 average;
+    WebRtc_Word16 max;
+    WebRtc_Word16 min;
+} AecLevel;
+
+typedef struct {
+    AecLevel rerl;
+    AecLevel erl;
+    AecLevel erle;
+    AecLevel aNlp;
+} AecMetrics;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AEC. The memory needs to be initialized
+ * separately using the WebRtcAec_Init() function.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void **aecInst               Pointer to the AEC instance to be created
+ *                              and initialized
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return          0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Create(void **aecInst);
+
+/*
+ * This function releases the memory allocated by WebRtcAec_Create().
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void         *aecInst        Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Free(void *aecInst);
+
+/*
+ * Initializes an AEC instance.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * WebRtc_Word32  sampFreq      Sampling frequency of data
+ * WebRtc_Word32  scSampFreq    Soundcard sampling frequency
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return          0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Init(void *aecInst,
+                             WebRtc_Word32 sampFreq,
+                             WebRtc_Word32 scSampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * WebRtc_Word16  *farend       In buffer containing one frame of
+ *                              farend signal for L band
+ * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst,
+                                     const WebRtc_Word16 *farend,
+                                     WebRtc_Word16 nrOfSamples);
+
+/*
+ * Runs the echo canceller on an 80 or 160 sample blocks of data.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void          *aecInst       Pointer to the AEC instance
+ * WebRtc_Word16 *nearend       In buffer containing one frame of
+ *                              nearend+echo signal for L band
+ * WebRtc_Word16 *nearendH      In buffer containing one frame of
+ *                              nearend+echo signal for H band
+ * WebRtc_Word16 nrOfSamples    Number of samples in nearend buffer
+ * WebRtc_Word16 msInSndCardBuf Delay estimate for sound card and
+ *                              system buffers
+ * WebRtc_Word16 skew           Difference between number of samples played
+ *                              and recorded at the soundcard (for clock skew
+ *                              compensation)
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
+ *                              for L band
+ * WebRtc_Word16  *outH         Out buffer, one frame of processed nearend
+ *                              for H band
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_Process(void *aecInst,
+                                const WebRtc_Word16 *nearend,
+                                const WebRtc_Word16 *nearendH,
+                                WebRtc_Word16 *out,
+                                WebRtc_Word16 *outH,
+                                WebRtc_Word16 nrOfSamples,
+                                WebRtc_Word16 msInSndCardBuf,
+                                WebRtc_Word32 skew);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ * AecConfig      config        Config instance that contains all
+ *                              properties to be set
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_set_config(void *aecInst, AecConfig config);
+
+/*
+ * Gets the on-the-fly paramters.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecConfig      *config       Pointer to the config instance that
+ *                              all properties will be written to
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_get_config(void *aecInst, AecConfig *config);
+
+/*
+ * Gets the current echo status of the nearend signal.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *status       0: Almost certainly nearend single-talk
+ *                              1: Might not be neared single-talk
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_get_echo_status(void *aecInst, WebRtc_Word16 *status);
+
+/*
+ * Gets the current echo metrics for the session.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecMetrics     *metrics      Struct which will be filled out with the
+ *                              current echo metrics.
+ * WebRtc_Word32  return         0: OK
+ *                              -1: error
+ */
+WebRtc_Word32 WebRtcAec_GetMetrics(void *aecInst, AecMetrics *metrics);
+
+/*
+ * Gets the current delay metrics for the session.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*      handle            Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * int*       median            Delay median value.
+ * int*       std               Delay standard deviation.
+ *
+ * int        return             0: OK
+ *                              -1: error
+ */
+int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
+
+/*
+ * Gets the last error code.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecInst      Pointer to the AEC instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        11000-11100: error code
+ */
+WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst);
+
+#ifdef __cplusplus
+}
+#endif
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AEC_INCLUDE_ECHO_CANCELLATION_H_
diff --git a/src/modules/audio_processing/aec/interface/echo_cancellation.h b/src/modules/audio_processing/aec/interface/echo_cancellation.h
deleted file mode 100644
index 4da6e73..0000000
--- a/src/modules/audio_processing/aec/interface/echo_cancellation.h
+++ /dev/null
@@ -1,278 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_INTERFACE_ECHO_CANCELLATION_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_INTERFACE_ECHO_CANCELLATION_H_
-
-#include "typedefs.h"
-
-// Errors
-#define AEC_UNSPECIFIED_ERROR           12000
-#define AEC_UNSUPPORTED_FUNCTION_ERROR  12001
-#define AEC_UNINITIALIZED_ERROR         12002
-#define AEC_NULL_POINTER_ERROR          12003
-#define AEC_BAD_PARAMETER_ERROR         12004
-
-// Warnings
-#define AEC_BAD_PARAMETER_WARNING       12050
-
-enum {
-    kAecNlpConservative = 0,
-    kAecNlpModerate,
-    kAecNlpAggressive
-};
-
-enum {
-    kAecFalse = 0,
-    kAecTrue
-};
-
-typedef struct {
-    WebRtc_Word16 nlpMode;        // default kAecNlpModerate
-    WebRtc_Word16 skewMode;       // default kAecFalse
-    WebRtc_Word16 metricsMode;    // default kAecFalse
-    int delay_logging;            // default kAecFalse
-    //float realSkew;
-} AecConfig;
-
-typedef struct {
-    WebRtc_Word16 instant;
-    WebRtc_Word16 average;
-    WebRtc_Word16 max;
-    WebRtc_Word16 min;
-} AecLevel;
-
-typedef struct {
-    AecLevel rerl;
-    AecLevel erl;
-    AecLevel erle;
-    AecLevel aNlp;
-} AecMetrics;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * Allocates the memory needed by the AEC. The memory needs to be initialized
- * separately using the WebRtcAec_Init() function.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void **aecInst               Pointer to the AEC instance to be created
- *                              and initialized
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32 return          0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_Create(void **aecInst);
-
-/*
- * This function releases the memory allocated by WebRtcAec_Create().
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void         *aecInst        Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_Free(void *aecInst);
-
-/*
- * Initializes an AEC instance.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- * WebRtc_Word32  sampFreq      Sampling frequency of data
- * WebRtc_Word32  scSampFreq    Soundcard sampling frequency
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32 return          0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_Init(void *aecInst,
-                             WebRtc_Word32 sampFreq,
-                             WebRtc_Word32 scSampFreq);
-
-/*
- * Inserts an 80 or 160 sample block of data into the farend buffer.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- * WebRtc_Word16  *farend       In buffer containing one frame of
- *                              farend signal for L band
- * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_BufferFarend(void *aecInst,
-                                     const WebRtc_Word16 *farend,
-                                     WebRtc_Word16 nrOfSamples);
-
-/*
- * Runs the echo canceller on an 80 or 160 sample blocks of data.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void          *aecInst       Pointer to the AEC instance
- * WebRtc_Word16 *nearend       In buffer containing one frame of
- *                              nearend+echo signal for L band
- * WebRtc_Word16 *nearendH      In buffer containing one frame of
- *                              nearend+echo signal for H band
- * WebRtc_Word16 nrOfSamples    Number of samples in nearend buffer
- * WebRtc_Word16 msInSndCardBuf Delay estimate for sound card and
- *                              system buffers
- * WebRtc_Word16 skew           Difference between number of samples played
- *                              and recorded at the soundcard (for clock skew
- *                              compensation)
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
- *                              for L band
- * WebRtc_Word16  *outH         Out buffer, one frame of processed nearend
- *                              for H band
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_Process(void *aecInst,
-                                const WebRtc_Word16 *nearend,
-                                const WebRtc_Word16 *nearendH,
-                                WebRtc_Word16 *out,
-                                WebRtc_Word16 *outH,
-                                WebRtc_Word16 nrOfSamples,
-                                WebRtc_Word16 msInSndCardBuf,
-                                WebRtc_Word32 skew);
-
-/*
- * This function enables the user to set certain parameters on-the-fly.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- * AecConfig      config        Config instance that contains all
- *                              properties to be set
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_set_config(void *aecInst, AecConfig config);
-
-/*
- * Gets the on-the-fly paramters.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * AecConfig      *config       Pointer to the config instance that
- *                              all properties will be written to
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_get_config(void *aecInst, AecConfig *config);
-
-/*
- * Gets the current echo status of the nearend signal.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word16  *status       0: Almost certainly nearend single-talk
- *                              1: Might not be neared single-talk
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_get_echo_status(void *aecInst, WebRtc_Word16 *status);
-
-/*
- * Gets the current echo metrics for the session.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * AecMetrics     *metrics      Struct which will be filled out with the
- *                              current echo metrics.
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_GetMetrics(void *aecInst, AecMetrics *metrics);
-
-/*
- * Gets the current delay metrics for the session.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void*      handle            Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * int*       median            Delay median value.
- * int*       std               Delay standard deviation.
- *
- * int        return             0: OK
- *                              -1: error
- */
-int WebRtcAec_GetDelayMetrics(void* handle, int* median, int* std);
-
-/*
- * Gets the last error code.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecInst      Pointer to the AEC instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        11000-11100: error code
- */
-WebRtc_Word32 WebRtcAec_get_error_code(void *aecInst);
-
-/*
- * Gets a version string.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * char           *versionStr   Pointer to a string array
- * WebRtc_Word16  len           The maximum length of the string
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word8   *versionStr   Pointer to a string array
- * WebRtc_Word32  return         0: OK
- *                              -1: error
- */
-WebRtc_Word32 WebRtcAec_get_version(WebRtc_Word8 *versionStr, WebRtc_Word16 len);
-
-#ifdef __cplusplus
-}
-#endif
-#endif  /* WEBRTC_MODULES_AUDIO_PROCESSING_AEC_MAIN_INTERFACE_ECHO_CANCELLATION_H_ */
diff --git a/src/modules/audio_processing/aec/system_delay_unittest.cc b/src/modules/audio_processing/aec/system_delay_unittest.cc
new file mode 100644
index 0000000..272cb8a
--- /dev/null
+++ b/src/modules/audio_processing/aec/system_delay_unittest.cc
@@ -0,0 +1,459 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include "modules/audio_processing/aec/include/echo_cancellation.h"
+#include "modules/audio_processing/aec/echo_cancellation_internal.h"
+#include "typedefs.h"
+
+namespace {
+
+class SystemDelayTest : public ::testing::Test {
+ protected:
+  SystemDelayTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  // Initialization of AEC handle with respect to |sample_rate_hz|. Since the
+  // device sample rate is unimportant we set that value to 48000 Hz.
+  void Init(int sample_rate_hz);
+
+  // Makes one render call and one capture call in that specific order.
+  void RenderAndCapture(int device_buffer_ms);
+
+  // Fills up the far-end buffer with respect to the default device buffer size.
+  int BufferFillUp();
+
+  // Runs and verifies the behavior in a stable startup procedure.
+  void RunStableStartup();
+
+  // Maps buffer size in ms into samples, taking the unprocessed frame into
+  // account.
+  int MapBufferSizeToSamples(int size_in_ms);
+
+  void* handle_;
+  aecpc_t* self_;
+  int samples_per_frame_;
+  // Dummy input/output speech data.
+  int16_t far_[160];
+  int16_t near_[160];
+  int16_t out_[160];
+};
+
+SystemDelayTest::SystemDelayTest()
+    : handle_(NULL),
+      self_(NULL),
+      samples_per_frame_(0) {
+  // Dummy input data are set with more or less arbitrary non-zero values.
+  memset(far_, 1, sizeof(far_));
+  memset(near_, 2, sizeof(near_));
+  memset(out_, 0, sizeof(out_));
+}
+
+void SystemDelayTest::SetUp() {
+  ASSERT_EQ(0, WebRtcAec_Create(&handle_));
+  self_ = reinterpret_cast<aecpc_t*>(handle_);
+}
+
+void SystemDelayTest::TearDown() {
+  // Free AEC
+  ASSERT_EQ(0, WebRtcAec_Free(handle_));
+  handle_ = NULL;
+}
+
+// In SWB mode nothing is added to the buffer handling with respect to
+// functionality compared to WB. We therefore only verify behavior in NB and WB.
+static const int kSampleRateHz[] = { 8000, 16000 };
+static const size_t kNumSampleRates =
+    sizeof(kSampleRateHz) / sizeof(*kSampleRateHz);
+
+// Default audio device buffer size used.
+static const int kDeviceBufMs = 100;
+
+// Requirement for a stable device convergence time in ms. Should converge in
+// less than |kStableConvergenceMs|.
+static const int kStableConvergenceMs = 100;
+
+// Maximum convergence time in ms. This means that we should leave the startup
+// phase after |kMaxConvergenceMs| independent of device buffer stability
+// conditions.
+static const int kMaxConvergenceMs = 500;
+
+void SystemDelayTest::Init(int sample_rate_hz) {
+  // Initialize AEC
+  EXPECT_EQ(0, WebRtcAec_Init(handle_, sample_rate_hz, 48000));
+
+  // One frame equals 10 ms of data.
+  samples_per_frame_ = sample_rate_hz / 100;
+}
+
+void SystemDelayTest::RenderAndCapture(int device_buffer_ms) {
+  EXPECT_EQ(0, WebRtcAec_BufferFarend(handle_, far_, samples_per_frame_));
+  EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL,
+                                 samples_per_frame_, device_buffer_ms, 0));
+}
+
+int SystemDelayTest::BufferFillUp() {
+  // To make sure we have a full buffer when we verify stability we first fill
+  // up the far-end buffer with the same amount as we will report in through
+  // Process().
+  int buffer_size = 0;
+  for (int i = 0; i < kDeviceBufMs / 10; i++) {
+    EXPECT_EQ(0, WebRtcAec_BufferFarend(handle_, far_, samples_per_frame_));
+    buffer_size += samples_per_frame_;
+    EXPECT_EQ(buffer_size, self_->aec->system_delay);
+  }
+  return buffer_size;
+}
+
+void SystemDelayTest::RunStableStartup() {
+  // To make sure we have a full buffer when we verify stability we first fill
+  // up the far-end buffer with the same amount as we will report in through
+  // Process().
+  int buffer_size = BufferFillUp();
+  // A stable device should be accepted and put in a regular process mode within
+  // |kStableConvergenceMs|.
+  int process_time_ms = 0;
+  for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) {
+    RenderAndCapture(kDeviceBufMs);
+    buffer_size += samples_per_frame_;
+    if (self_->ECstartup == 0) {
+      // We have left the startup phase.
+      break;
+    }
+  }
+  // Verify convergence time.
+  EXPECT_GT(kStableConvergenceMs, process_time_ms);
+  // Verify that the buffer has been flushed.
+  EXPECT_GE(buffer_size, self_->aec->system_delay);
+}
+
+int SystemDelayTest::MapBufferSizeToSamples(int size_in_ms) {
+  // The extra 10 ms corresponds to the unprocessed frame.
+  return (size_in_ms + 10) * samples_per_frame_ / 10;
+}
+
+// The tests should meet basic requirements and not be adjusted to what is
+// actually implemented. If we don't get good code coverage this way we either
+// lack in tests or have unnecessary code.
+// General requirements:
+// 1) If we add far-end data the system delay should be increased with the same
+//    amount we add.
+// 2) If the far-end buffer is full we should flush the oldest data to make room
+//    for the new. In this case the system delay is unaffected.
+// 3) There should exist a startup phase in which the buffer size is to be
+//    determined. In this phase no cancellation should be performed.
+// 4) Under stable conditions (small variations in device buffer sizes) the AEC
+//    should determine an appropriate local buffer size within
+//    |kStableConvergenceMs| ms.
+// 5) Under unstable conditions the AEC should make a decision within
+//    |kMaxConvergenceMs| ms.
+// 6) If the local buffer runs out of data we should stuff the buffer with older
+//    frames.
+// 7) The system delay should within |kMaxConvergenceMs| ms heal from
+//    disturbances like drift, data glitches, toggling events and outliers.
+// 8) The system delay should never become negative.
+
+TEST_F(SystemDelayTest, CorrectIncreaseWhenBufferFarend) {
+  // When we add data to the AEC buffer the internal system delay should be
+  // incremented with the same amount as the size of data.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+
+    // Loop through a couple of calls to make sure the system delay increments
+    // correctly.
+    for (int j = 1; j <= 5; j++) {
+      EXPECT_EQ(0, WebRtcAec_BufferFarend(handle_, far_, samples_per_frame_));
+      EXPECT_EQ(j * samples_per_frame_, self_->aec->system_delay);
+    }
+  }
+}
+
+// TODO(bjornv): Add a test to verify behavior if the far-end buffer is full
+// when adding new data.
+
+TEST_F(SystemDelayTest, CorrectDelayAfterStableStartup) {
+  // We run the system in a stable startup. After that we verify that the system
+  // delay meets the requirements.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+
+    // Verify system delay with respect to requirements, i.e., the
+    // |system_delay| is in the interval [75%, 100%] of what's reported on the
+    // average.
+    int average_reported_delay = kDeviceBufMs * samples_per_frame_ / 10;
+    EXPECT_GE(average_reported_delay, self_->aec->system_delay);
+    EXPECT_LE(average_reported_delay * 3 / 4, self_->aec->system_delay);
+  }
+}
+
+TEST_F(SystemDelayTest, CorrectDelayAfterUnstableStartup) {
+  // In an unstable system we would start processing after |kMaxConvergenceMs|.
+  // On the last frame the AEC buffer is adjusted to 60% of the last reported
+  // device buffer size.
+  // We construct an unstable system by altering the device buffer size between
+  // two values |kDeviceBufMs| +- 25 ms.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+
+    // To make sure we have a full buffer when we verify stability we first fill
+    // up the far-end buffer with the same amount as we will report in on the
+    // average through Process().
+    int buffer_size = BufferFillUp();
+
+    int buffer_offset_ms = 25;
+    int reported_delay_ms = 0;
+    int process_time_ms = 0;
+    for (; process_time_ms <= kMaxConvergenceMs; process_time_ms += 10) {
+      reported_delay_ms = kDeviceBufMs + buffer_offset_ms;
+      RenderAndCapture(reported_delay_ms);
+      buffer_size += samples_per_frame_;
+      buffer_offset_ms = -buffer_offset_ms;
+      if (self_->ECstartup == 0) {
+        // We have left the startup phase.
+        break;
+      }
+    }
+    // Verify convergence time.
+    EXPECT_GE(kMaxConvergenceMs, process_time_ms);
+    // Verify that the buffer has been flushed.
+    EXPECT_GE(buffer_size, self_->aec->system_delay);
+
+    // Verify system delay with respect to requirements, i.e., the
+    // |system_delay| is in the interval [60%, 100%] of what's last reported.
+    EXPECT_GE(reported_delay_ms * samples_per_frame_ / 10,
+              self_->aec->system_delay);
+    EXPECT_LE(reported_delay_ms * samples_per_frame_ / 10 * 3 / 5,
+              self_->aec->system_delay);
+  }
+}
+
+TEST_F(SystemDelayTest, CorrectDelayAfterStableBufferBuildUp) {
+  // In this test we start by establishing the device buffer size during stable
+  // conditions, but with an empty internal far-end buffer. Once that is done we
+  // verify that the system delay is increased correctly until we have reach an
+  // internal buffer size of 75% of what's been reported.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+
+    // We assume that running |kStableConvergenceMs| calls will put the
+    // algorithm in a state where the device buffer size has been determined. We
+    // can make that assumption since we have a separate stability test.
+    int process_time_ms = 0;
+    for (; process_time_ms < kStableConvergenceMs; process_time_ms += 10) {
+      EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL,
+                                     samples_per_frame_, kDeviceBufMs, 0));
+    }
+    // Verify that a buffer size has been established.
+    EXPECT_EQ(0, self_->checkBuffSize);
+
+    // We now have established the required buffer size. Let us verify that we
+    // fill up before leaving the startup phase for normal processing.
+    int buffer_size = 0;
+    int target_buffer_size = kDeviceBufMs * samples_per_frame_ / 10 * 3 / 4;
+    process_time_ms = 0;
+    for (; process_time_ms <= kMaxConvergenceMs; process_time_ms += 10) {
+      RenderAndCapture(kDeviceBufMs);
+      buffer_size += samples_per_frame_;
+      if (self_->ECstartup == 0) {
+        // We have left the startup phase.
+        break;
+      }
+    }
+    // Verify convergence time.
+    EXPECT_GT(kMaxConvergenceMs, process_time_ms);
+    // Verify that the buffer has reached the desired size.
+    EXPECT_LE(target_buffer_size, self_->aec->system_delay);
+
+    // Verify normal behavior (system delay is kept constant) after startup by
+    // running a couple of calls to BufferFarend() and Process().
+    for (int j = 0; j < 6; j++) {
+      int system_delay_before_calls = self_->aec->system_delay;
+      RenderAndCapture(kDeviceBufMs);
+      EXPECT_EQ(system_delay_before_calls, self_->aec->system_delay);
+    }
+  }
+}
+
+TEST_F(SystemDelayTest, CorrectDelayWhenBufferUnderrun) {
+  // Here we test a buffer under run scenario. If we keep on calling
+  // WebRtcAec_Process() we will finally run out of data, but should
+  // automatically stuff the buffer. We verify this behavior by checking if the
+  // system delay goes negative.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+
+    // The AEC has now left the Startup phase. We now have at most
+    // |kStableConvergenceMs| in the buffer. Keep on calling Process() until
+    // we run out of data and verify that the system delay is non-negative.
+    for (int j = 0; j <= kStableConvergenceMs; j += 10) {
+      EXPECT_EQ(0, WebRtcAec_Process(handle_, near_, NULL, out_, NULL,
+                                     samples_per_frame_, kDeviceBufMs, 0));
+      EXPECT_LE(0, self_->aec->system_delay);
+    }
+  }
+}
+
+TEST_F(SystemDelayTest, CorrectDelayDuringDrift) {
+  // This drift test should verify that the system delay is never exceeding the
+  // device buffer. The drift is simulated by decreasing the reported device
+  // buffer size by 1 ms every 100 ms. If the device buffer size goes below 30
+  // ms we jump (add) 10 ms to give a repeated pattern.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+
+    // We have now left the startup phase and proceed with normal processing.
+    int jump = 0;
+    for (int j = 0; j < 1000; j++) {
+      // Drift = -1 ms per 100 ms of data.
+      int device_buf_ms = kDeviceBufMs - (j / 10) + jump;
+      int device_buf = MapBufferSizeToSamples(device_buf_ms);
+
+      if (device_buf_ms < 30) {
+        // Add 10 ms data, taking affect next frame.
+        jump += 10;
+      }
+      RenderAndCapture(device_buf_ms);
+
+      // Verify that the system delay does not exceed the device buffer.
+      EXPECT_GE(device_buf, self_->aec->system_delay);
+
+      // Verify that the system delay is non-negative.
+      EXPECT_LE(0, self_->aec->system_delay);
+    }
+  }
+}
+
+TEST_F(SystemDelayTest, ShouldRecoverAfterGlitch) {
+  // This glitch test should verify that the system delay recovers if there is
+  // a glitch in data. The data glitch is constructed as 200 ms of buffering
+  // after which the stable procedure continues. The glitch is never reported by
+  // the device.
+  // The system is said to be in a non-causal state if the difference between
+  // the device buffer and system delay is less than a block (64 samples).
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+    int device_buf = MapBufferSizeToSamples(kDeviceBufMs);
+    // Glitch state.
+    for (int j = 0; j < 20; j++) {
+      EXPECT_EQ(0, WebRtcAec_BufferFarend(handle_, far_, samples_per_frame_));
+      // No need to verify system delay, since that is done in a separate test.
+    }
+    // Verify that we are in a non-causal state, i.e.,
+    // |system_delay| > |device_buf|.
+    EXPECT_LT(device_buf, self_->aec->system_delay);
+
+    // Recover state. Should recover at least 4 ms of data per 10 ms, hence a
+    // glitch of 200 ms will take at most 200 * 10 / 4 = 500 ms to recover from.
+    bool non_causal = true;  // We are currently in a non-causal state.
+    for (int j = 0; j < 50; j++) {
+      int system_delay_before = self_->aec->system_delay;
+      RenderAndCapture(kDeviceBufMs);
+      int system_delay_after = self_->aec->system_delay;
+
+      // We have recovered if |device_buf| - |system_delay_after| >= 64 (one
+      // block). During recovery |system_delay_after| < |system_delay_before|,
+      // otherwise they are equal.
+      if (non_causal) {
+        EXPECT_LT(system_delay_after, system_delay_before);
+        if (device_buf - system_delay_after >= 64) {
+          non_causal = false;
+        }
+      } else {
+        EXPECT_EQ(system_delay_before, system_delay_after);
+      }
+      // Verify that the system delay is non-negative.
+      EXPECT_LE(0, self_->aec->system_delay);
+    }
+    // Check that we have recovered.
+    EXPECT_FALSE(non_causal);
+  }
+}
+
+TEST_F(SystemDelayTest, UnaffectedWhenSpuriousDeviceBufferValues) {
+  // This spurious device buffer data test aims at verifying that the system
+  // delay is unaffected by large outliers.
+  // The system is said to be in a non-causal state if the difference between
+  // the device buffer and system delay is less than a block (64 samples).
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+    int device_buf = MapBufferSizeToSamples(kDeviceBufMs);
+
+    // Normal state. We are currently not in a non-causal state.
+    bool non_causal = false;
+
+    // Run 1 s and replace device buffer size with 500 ms every 100 ms.
+    for (int j = 0; j < 100; j++) {
+      int system_delay_before_calls = self_->aec->system_delay;
+      int device_buf_ms = kDeviceBufMs;
+      if (j % 10 == 0) {
+        device_buf_ms = 500;
+      }
+      RenderAndCapture(device_buf_ms);
+
+      // Check for non-causality.
+      if (device_buf - self_->aec->system_delay < 64) {
+        non_causal = true;
+      }
+      EXPECT_FALSE(non_causal);
+      EXPECT_EQ(system_delay_before_calls, self_->aec->system_delay);
+
+      // Verify that the system delay is non-negative.
+      EXPECT_LE(0, self_->aec->system_delay);
+    }
+  }
+}
+
+TEST_F(SystemDelayTest, CorrectImpactWhenTogglingDeviceBufferValues) {
+  // This test aims at verifying that the system delay is "unaffected" by
+  // toggling values reported by the device.
+  // The test is constructed such that every other device buffer value is zero
+  // and then 2 * |kDeviceBufMs|, hence the size is constant on the average. The
+  // zero values will force us into a non-causal state and thereby lowering the
+  // system delay until we basically runs out of data. Once that happens the
+  // buffer will be stuffed.
+  // TODO(bjornv): This test will have a better impact if we verified that the
+  // delay estimate goes up when the system delay goes done to meet the average
+  // device buffer size.
+  for (size_t i = 0; i < kNumSampleRates; i++) {
+    Init(kSampleRateHz[i]);
+    RunStableStartup();
+    int device_buf = MapBufferSizeToSamples(kDeviceBufMs);
+
+    // Normal state. We are currently not in a non-causal state.
+    bool non_causal = false;
+
+    // Loop through 100 frames (both render and capture), which equals 1 s of
+    // data. Every odd frame we set the device buffer size to 2 * |kDeviceBufMs|
+    // and even frames we set the device buffer size to zero.
+    for (int j = 0; j < 100; j++) {
+      int system_delay_before_calls = self_->aec->system_delay;
+      int device_buf_ms = 2 * (j % 2) * kDeviceBufMs;
+      RenderAndCapture(device_buf_ms);
+
+      // Check for non-causality, compared with the average device buffer size.
+      non_causal |= (device_buf - self_->aec->system_delay < 64);
+      EXPECT_GE(system_delay_before_calls, self_->aec->system_delay);
+
+      // Verify that the system delay is non-negative.
+      EXPECT_LE(0, self_->aec->system_delay);
+    }
+    // Verify we are not in a non-causal state.
+    EXPECT_FALSE(non_causal);
+  }
+}
+
+}  // namespace
diff --git a/src/modules/audio_processing/aecm/Android.mk b/src/modules/audio_processing/aecm/Android.mk
index 59c1b20..c3a9c00 100644
--- a/src/modules/audio_processing/aecm/Android.mk
+++ b/src/modules/audio_processing/aecm/Android.mk
@@ -34,7 +34,7 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../utility \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include \
@@ -79,7 +79,7 @@
 LOCAL_CFLAGS_arm := $(MY_WEBRTC_COMMON_DEFS_arm)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include
 
diff --git a/src/modules/audio_processing/aecm/aecm.gypi b/src/modules/audio_processing/aecm/aecm.gypi
index bf520bf..4dfab51 100644
--- a/src/modules/audio_processing/aecm/aecm.gypi
+++ b/src/modules/audio_processing/aecm/aecm.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -17,19 +17,41 @@
         'apm_util'
       ],
       'include_dirs': [
-        'interface',
+        'include',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
         ],
       },
       'sources': [
-        'interface/echo_control_mobile.h',
+        'include/echo_control_mobile.h',
         'echo_control_mobile.c',
         'aecm_core.c',
         'aecm_core.h',
       ],
+      'conditions': [
+        ['target_arch=="arm" and armv7==1', {
+          'dependencies': [ 'aecm_neon', ],
+        }],
+      ],
     },
   ],
+  'conditions': [
+    ['target_arch=="arm" and armv7==1', {
+      'targets': [
+        {
+          'target_name': 'aecm_neon',
+          'type': '<(library)',
+          'includes': [ '../../../build/arm_neon.gypi', ],
+          'dependencies': [
+            '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+          ],
+          'sources': [
+            'aecm_core_neon.c',
+          ],
+        },
+      ],
+    }],
+  ],
 }
diff --git a/src/modules/audio_processing/aecm/aecm_core.c b/src/modules/audio_processing/aecm/aecm_core.c
index 9bf5c4a..2db0f2a 100644
--- a/src/modules/audio_processing/aecm/aecm_core.c
+++ b/src/modules/audio_processing/aecm/aecm_core.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -11,12 +11,14 @@
 #include "aecm_core.h"
 
 #include <assert.h>
+#include <stddef.h>
 #include <stdlib.h>
 
 #include "cpu_features_wrapper.h"
 #include "delay_estimator_wrapper.h"
 #include "echo_control_mobile.h"
 #include "ring_buffer.h"
+#include "system_wrappers/interface/compile_assert.h"
 #include "typedefs.h"
 
 #ifdef ARM_WINM_LOG
@@ -312,10 +314,9 @@
         return -1;
     }
 
-    if (WebRtc_CreateDelayEstimator(&aecm->delay_estimator,
-                                    PART_LEN1,
-                                    MAX_DELAY,
-                                    0) == -1) {
+    aecm->delay_estimator = WebRtc_CreateDelayEstimator(PART_LEN1, MAX_DELAY,
+                                                        0);
+    if (aecm->delay_estimator == NULL) {
       WebRtcAecm_FreeCore(aecm);
       aecm = NULL;
       return -1;
@@ -396,6 +397,18 @@
     }
 }
 
+// Initialize function pointers for ARM Neon platform.
+#if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON)
+static void WebRtcAecm_InitNeon(void)
+{
+  WebRtcAecm_WindowAndFFT = WebRtcAecm_WindowAndFFTNeon;
+  WebRtcAecm_InverseFFTAndWindow = WebRtcAecm_InverseFFTAndWindowNeon;
+  WebRtcAecm_CalcLinearEnergies = WebRtcAecm_CalcLinearEnergiesNeon;
+  WebRtcAecm_StoreAdaptiveChannel = WebRtcAecm_StoreAdaptiveChannelNeon;
+  WebRtcAecm_ResetAdaptiveChannel = WebRtcAecm_ResetAdaptiveChannelNeon;
+}
+#endif
+
 static void InverseFFTAndWindowC(AecmCore_t* aecm,
                                  WebRtc_Word16* fft,
                                  complex16_t* efw,
@@ -661,7 +674,9 @@
     aecm->supGainErrParamDiffAB = SUPGAIN_ERROR_PARAM_A - SUPGAIN_ERROR_PARAM_B;
     aecm->supGainErrParamDiffBD = SUPGAIN_ERROR_PARAM_B - SUPGAIN_ERROR_PARAM_D;
 
-    assert(PART_LEN % 16 == 0);
+    // Assert a preprocessor definition at compile-time. It's an assumption
+    // used in assembly code, so check the assembly files before any change.
+    COMPILE_ASSERT(PART_LEN % 16 == 0);
 
     // Initialize function pointers.
     WebRtcAecm_WindowAndFFT = WindowAndFFTC;
@@ -674,7 +689,7 @@
     uint64_t features = WebRtc_GetCPUFeaturesARM();
     if ((features & kCPUFeatureNEON) != 0)
     {
-        WebRtcAecm_InitNeon();
+      WebRtcAecm_InitNeon();
     }
 #elif defined(WEBRTC_ARCH_ARM_NEON)
     WebRtcAecm_InitNeon();
@@ -1394,7 +1409,9 @@
     WebRtc_Word16 *fft = (WebRtc_Word16 *) (((uintptr_t) fft_buf + 31) & ~31);
 
     WebRtc_Word16 tmp16no1;
+#ifndef WEBRTC_ARCH_ARM_V7
     WebRtc_Word16 tmp16no2;
+#endif
 #ifdef AECM_WITH_ABS_APPROX
     WebRtc_Word16 max_value = 0;
     WebRtc_Word16 min_value = 0;
@@ -1477,18 +1494,22 @@
             freq_signal_abs[i] = (WebRtc_UWord16)tmp16no1 +
                 (WebRtc_UWord16)tmp16no2;
 #else
-#ifdef WEBRTC_ARCH_ARM_V7A
-           __asm__("smulbb %0, %1, %2" : "=r"(tmp32no1) : "r"(freq_signal[i].real),
-                                                "r"(freq_signal[i].real));
-           __asm__("smlabb %0, %1, %2, %3" :: "r"(tmp32no2), "r"(freq_signal[i].imag), 
-                                                "r"(freq_signal[i].imag), "r"(tmp32no1));
+#ifdef WEBRTC_ARCH_ARM_V7
+            __asm __volatile(
+              "smulbb %[tmp32no1], %[real], %[real]\n\t"
+              "smlabb %[tmp32no2], %[imag], %[imag], %[tmp32no1]\n\t"
+              :[tmp32no1]"=r"(tmp32no1),
+               [tmp32no2]"=r"(tmp32no2)
+              :[real]"r"(freq_signal[i].real),
+               [imag]"r"(freq_signal[i].imag)
+            );
 #else
             tmp16no1 = WEBRTC_SPL_ABS_W16(freq_signal[i].real);
             tmp16no2 = WEBRTC_SPL_ABS_W16(freq_signal[i].imag);
             tmp32no1 = WEBRTC_SPL_MUL_16_16(tmp16no1, tmp16no1);
             tmp32no2 = WEBRTC_SPL_MUL_16_16(tmp16no2, tmp16no2);
             tmp32no2 = WEBRTC_SPL_ADD_SAT_W32(tmp32no1, tmp32no2);
-#endif // WEBRTC_ARCH_ARM_V7A
+#endif // WEBRTC_ARCH_ARM_V7
             tmp32no1 = WebRtcSpl_SqrtFloor(tmp32no2);
 
             freq_signal_abs[i] = (WebRtc_UWord16)tmp32no1;
@@ -1847,7 +1868,7 @@
             {
                 hnl[i] = 0;
             }
-    
+
             // Remove outliers
             if (numPosCoef < 3)
             {
@@ -2124,3 +2145,4 @@
     aecm->farBufReadPos += readLen;
 }
 
+
diff --git a/src/modules/audio_processing/aecm/aecm_core.h b/src/modules/audio_processing/aecm/aecm_core.h
index 0ec62ec..8161a8c 100644
--- a/src/modules/audio_processing/aecm/aecm_core.h
+++ b/src/modules/audio_processing/aecm/aecm_core.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -10,92 +10,13 @@
 
 // Performs echo control (suppression) with fft routines in fixed-point
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_SOURCE_AECM_CORE_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_SOURCE_AECM_CORE_H_
-
-#define AECM_DYNAMIC_Q // turn on/off dynamic Q-domain
-//#define AECM_WITH_ABS_APPROX
-//#define AECM_SHORT                // for 32 sample partition length (otherwise 64)
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_CORE_H_
 
 #include "typedefs.h"
 #include "signal_processing_library.h"
 
-// Algorithm parameters
-
-#define FRAME_LEN       80              // Total frame length, 10 ms
-#ifdef AECM_SHORT
-
-#define PART_LEN        32              // Length of partition
-#define PART_LEN_SHIFT  6               // Length of (PART_LEN * 2) in base 2
-
-#else
-
-#define PART_LEN        64              // Length of partition
-#define PART_LEN_SHIFT  7               // Length of (PART_LEN * 2) in base 2
-
-#endif
-
-#define PART_LEN1       (PART_LEN + 1)  // Unique fft coefficients
-#define PART_LEN2       (PART_LEN << 1) // Length of partition * 2
-#define PART_LEN4       (PART_LEN << 2) // Length of partition * 4
-#define FAR_BUF_LEN     PART_LEN4       // Length of buffers
-#define MAX_DELAY 100
-
-// Counter parameters
-#ifdef AECM_SHORT
-
-#define CONV_LEN        1024            // Convergence length used at startup
-#else
-
-#define CONV_LEN        512             // Convergence length used at startup
-#endif
-
-#define CONV_LEN2       (CONV_LEN << 1) // Convergence length * 2 used at startup
-// Energy parameters
-#define MAX_BUF_LEN     64              // History length of energy signals
-
-#define FAR_ENERGY_MIN  1025            // Lowest Far energy level: At least 2 in energy
-#define FAR_ENERGY_DIFF 929             // Allowed difference between max and min
-
-#define ENERGY_DEV_OFFSET       0       // The energy error offset in Q8
-#define ENERGY_DEV_TOL  400             // The energy estimation tolerance in Q8
-#define FAR_ENERGY_VAD_REGION   230     // Far VAD tolerance region
-// Stepsize parameters
-#define MU_MIN          10              // Min stepsize 2^-MU_MIN (far end energy dependent)
-#define MU_MAX          1               // Max stepsize 2^-MU_MAX (far end energy dependent)
-#define MU_DIFF         9               // MU_MIN - MU_MAX
-// Channel parameters
-#define MIN_MSE_COUNT   20              // Min number of consecutive blocks with enough far end
-                                        // energy to compare channel estimates
-#define MIN_MSE_DIFF    29              // The ratio between adapted and stored channel to
-                                        // accept a new storage (0.8 in Q-MSE_RESOLUTION)
-#define MSE_RESOLUTION  5               // MSE parameter resolution
-#define RESOLUTION_CHANNEL16    12      // W16 Channel in Q-RESOLUTION_CHANNEL16
-#define RESOLUTION_CHANNEL32    28      // W32 Channel in Q-RESOLUTION_CHANNEL
-#define CHANNEL_VAD     16              // Minimum energy in frequency band to update channel
-// Suppression gain parameters: SUPGAIN_ parameters in Q-(RESOLUTION_SUPGAIN)
-#define RESOLUTION_SUPGAIN      8       // Channel in Q-(RESOLUTION_SUPGAIN)
-#define SUPGAIN_DEFAULT (1 << RESOLUTION_SUPGAIN)   // Default suppression gain
-#define SUPGAIN_ERROR_PARAM_A   3072    // Estimation error parameter (Maximum gain) (8 in Q8)
-#define SUPGAIN_ERROR_PARAM_B   1536    // Estimation error parameter (Gain before going down)
-#define SUPGAIN_ERROR_PARAM_D   SUPGAIN_DEFAULT // Estimation error parameter
-                                                // (Should be the same as Default) (1 in Q8)
-#define SUPGAIN_EPC_DT  200             // = SUPGAIN_ERROR_PARAM_C * ENERGY_DEV_TOL
-// Defines for "check delay estimation"
-#define CORR_WIDTH      31              // Number of samples to correlate over.
-#define CORR_MAX        16              // Maximum correlation offset
-#define CORR_MAX_BUF    63
-#define CORR_DEV        4
-#define CORR_MAX_LEVEL  20
-#define CORR_MAX_LOW    4
-#define CORR_BUF_LEN    (CORR_MAX << 1) + 1
-// Note that CORR_WIDTH + 2*CORR_MAX <= MAX_BUF_LEN
-
-#define ONE_Q14         (1 << 14)
-
-// NLP defines
-#define NLP_COMP_LOW    3277            // 0.2 in Q14
-#define NLP_COMP_HIGH   ONE_Q14         // 1 in Q14
+#include "aecm_defines.h"
 
 extern const WebRtc_Word16 WebRtcAecm_kSqrtHanning[];
 
@@ -368,8 +289,33 @@
     const WebRtc_Word16* nearendClean);
 extern InverseFFTAndWindow WebRtcAecm_InverseFFTAndWindow;
 
-// Initialization of the above function pointers for ARM Neon.
-void WebRtcAecm_InitNeon(void);
+// For the above function pointers, functions for generic platforms are declared
+// and defined as static in file aecm_core.c, while those for ARM Neon platforms
+// are declared below and defined in file aecm_core_neon.s.
+#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON)
+void WebRtcAecm_WindowAndFFTNeon(WebRtc_Word16* fft,
+                                 const WebRtc_Word16* time_signal,
+                                 complex16_t* freq_signal,
+                                 int time_signal_scaling);
 
+void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
+                                        WebRtc_Word16* fft,
+                                        complex16_t* efw,
+                                        WebRtc_Word16* output,
+                                        const WebRtc_Word16* nearendClean);
+
+void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
+                                       const WebRtc_UWord16* far_spectrum,
+                                       WebRtc_Word32* echo_est,
+                                       WebRtc_UWord32* far_energy,
+                                       WebRtc_UWord32* echo_energy_adapt,
+                                       WebRtc_UWord32* echo_energy_stored);
+
+void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
+                                         const WebRtc_UWord16* far_spectrum,
+                                         WebRtc_Word32* echo_est);
+
+void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm);
+#endif
 
 #endif
diff --git a/src/modules/audio_processing/aecm/aecm_core_neon.S b/src/modules/audio_processing/aecm/aecm_core_neon.S
new file mode 100644
index 0000000..0708c5f
--- /dev/null
+++ b/src/modules/audio_processing/aecm/aecm_core_neon.S
@@ -0,0 +1,361 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ aecm_core_neon.s
+@ This file contains some functions in AECM, optimized for ARM Neon
+@ platforms. Reference C code is in file aecm_core.c. Bit-exact.
+
+.arch armv7-a
+.fpu neon
+
+#include "aecm_defines.h"
+#include "aecm_core_neon_offsets.h"
+
+.extern WebRtcAecm_kSqrtHanning
+
+.global WebRtcAecm_WindowAndFFTNeon
+.global WebRtcAecm_InverseFFTAndWindowNeon
+.global WebRtcAecm_CalcLinearEnergiesNeon
+.global WebRtcAecm_StoreAdaptiveChannelNeon
+.global WebRtcAecm_ResetAdaptiveChannelNeon
+
+@ void WebRtcAecm_WindowAndFFTNeon(WebRtc_Word16* fft,
+@                                  const WebRtc_Word16* time_signal,
+@                                  complex16_t* freq_signal,
+@                                  int time_signal_scaling);
+.align  2
+WebRtcAecm_WindowAndFFTNeon:
+.fnstart
+.save {r4, r5, lr}
+  push {r4, r5, lr}
+
+  vdup.16 d16, r3
+  mov r5, r2                                 @ WebRtcSpl_ComplexIFFT changes r2.
+
+  vmov.i16 d21, #0                           @ For imaginary parts of |fft|.
+  vmov.i16 d27, #0                           @ For imaginary parts of |fft|.
+  ldr r2, =WebRtcAecm_kSqrtHanning
+  adr lr, kSqrtHanningReversed
+  add r4, r0, #(PART_LEN2 * 2)               @ &fft[PART_LEN2]
+  add r12, r1, #(PART_LEN * 2)               @ time_signal[PART_LEN]
+  mov r3, #(PART_LEN / 4)                    @ Loop counter, unrolled by 4
+
+LOOP_PART_LEN:
+  vld1.16 d0, [r1, :64]!                     @ time_signal[i]
+  vld1.16 d22, [r12, :64]!                   @ time_signal[i + PART_LEN]
+  vld1.16 d17, [r2, :64]!                    @ WebRtcAecm_kSqrtHanning[i]
+  vld1.16 d23, [lr, :64]!                    @ kSqrtHanningReversed[i]
+  vshl.s16  d18, d0, d16
+  vshl.s16  d22, d22, d16
+  vmull.s16 q9, d18, d17
+  vmull.s16 q12, d22, d23
+  subs r3, #1
+  vshrn.i32 d20, q9, #14
+  vshrn.i32 d26, q12, #14
+  vst2.16 {d20, d21}, [r0, :128]!            @ fft[j]
+  vst2.16 {d26, d27}, [r4, :128]!            @ fft[PART_LEN2 + j]
+  bgt LOOP_PART_LEN
+
+  sub r4, r0, #(PART_LEN2 * 2)               @ r4 points to fft[0]
+  mov r0, r4
+  mov r1, #7
+  bl  WebRtcSpl_ComplexBitReverse
+
+  mov r0, r4
+  mov r1, #7
+  mov r2, #1
+  bl  WebRtcSpl_ComplexFFT
+
+  mov r3, #(PART_LEN * 2 / 16)               @ Loop counter, unrolled by 16.
+
+LOOP_PART_LEN2:
+  @ freq_signal[i].real = fft[j];
+  @ freq_signal[i].imag = - fft[j+1];
+  vld2.16 {d20, d21, d22, d23}, [r4, :256]!
+  subs r3, #1
+  vneg.s16 d22, d22
+  vneg.s16 d23, d23
+  vst2.16 {d20, d21, d22, d23}, [r5, :256]!
+  bgt LOOP_PART_LEN2
+
+  pop {r4, r5, pc}
+.fnend
+
+@ void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
+@                                         WebRtc_Word16* fft,
+@                                         complex16_t* efw,
+@                                         WebRtc_Word16* output,
+@                                         const WebRtc_Word16* nearendClean);
+.align  2
+WebRtcAecm_InverseFFTAndWindowNeon:
+.fnstart
+.save {r4-r8, lr}
+  push {r4-r8, lr}
+
+  @ Values of r0, r1, and r3 will change in WebRtcSpl_ComplexIFFT
+  @ and WebRtcSpl_ComplexBitReverse.
+  mov r4, r1
+  mov r5, r0
+  mov r7, r3
+
+  add r3, r1, #((PART_LEN4 - 6) * 2)         @ &fft[PART_LEN4 - 6]
+  mov r6, #(PART_LEN / 4)                    @ Loop counter, unrolled by 4
+  add r12, r2, #(PART_LEN * 4)               @ &efw[PART_LEN]
+  mov r8, #-16
+
+LOOP_PRE_IFFT:
+  vld2.16 {q10}, [r2, :128]!
+  vmov q11, q10
+  vneg.s16 d23, d23
+  vst2.16 {d22, d23}, [r1, :128]!
+  vrev64.16 q10, q10
+  subs r6, #1
+  vst2.16 {q10}, [r3], r8
+  bgt LOOP_PRE_IFFT
+
+  @  fft[PART_LEN2] = efw[PART_LEN].real;
+  @  fft[PART_LEN2 + 1] = -efw[PART_LEN].imag;
+  ldr r8, [r12]
+  ssub16 r2, r6, r8
+  mov r1, #(PART_LEN2 * 2)
+  pkhbt r8, r8, r2
+  str r8, [r4, r1]
+
+  mov r0, r4
+  mov r1, #7
+  bl  WebRtcSpl_ComplexBitReverse
+
+  mov r0, r4
+  mov r1, #7
+  mov r2, #1
+  bl  WebRtcSpl_ComplexIFFT
+
+  mov r1, r4
+  mov r2, r4
+  mov r3, #(PART_LEN * 2 / 8)                @ Loop counter, unrolled by 8.
+
+LOOP_GET_REAL_VALUES:
+  vld2.16 {q10, q11}, [r2, :256]!
+  subs r3, #1
+  vst1.16 {q10}, [r1, :128]!
+  bgt LOOP_GET_REAL_VALUES
+
+  ldr r6, =offset_aecm_outBuf
+  ldr r12, =offset_aecm_dfaCleanQDomain
+  ldr r8, [r5, r6]                           @ &aecm->outBuf[0]
+  ldrsh r2, [r5, r12]                        @ &aecm->dfaCleanQDomain[0]
+
+  adr r12, kSqrtHanningReversed
+  ldr r6, =WebRtcAecm_kSqrtHanning
+  rsb r0, r2, r0                             @ outCFFT - aecm->dfaCleanQDomain
+  vdup.32 q9, r0
+  add r0, r4, #(PART_LEN * 2)                @ &fft[PART_LEN]
+  mov r3, #(PART_LEN / 4)                    @ Loop counter, unrolled by 4.
+
+LOOP_POST_IFFT:
+  vld1.16 d16, [r4, :64]                     @ fft[i];
+  vld1.16 d17, [r6, :64]!                    @ WebRtcAecm_kSqrtHanning[i]
+  vld1.16 d20, [r8, :64]                     @ aecm->outBuf[i]
+  vmull.s16 q8, d16, d17
+  vmovl.s16 q10, d20
+  vrshr.s32 q8, q8, #14
+  vld1.16 d0, [r0, :64]!                     @ &fft[PART_LEN + i]
+  vshl.s32 q8, q8, q9
+  vld1.16 d1, [r12, :64]!                    @ kSqrtHanningReversed[i]
+  vadd.i32 q8, q10
+  vmull.s16 q0, d0, d1
+  vqshrn.s32 d16, q8, #0
+  vshr.s32 q0, q0, #14
+  vst1.16 d16, [r4, :64]!                    @ fft[i];
+  vshl.s32 q0, q0, q9
+  vst1.16 d16, [r7, :64]!                    @ output[i]
+  vqshrn.s32 d0, q0, #0
+  subs r3, #1
+  vst1.16 d0, [r8, :64]!                     @ aecm->outBuf[i]
+  bgt LOOP_POST_IFFT
+
+  ldr r3, =offset_aecm_xBuf
+  ldr r12, =offset_aecm_dBufNoisy
+  ldr r3, [r5, r3]                           @ &aecm->xBuf[0]
+  ldr r1, [r5, r12]                          @ &aecm->dBufNoisy[0]
+  add r2, r3, #(PART_LEN * 2)                @ &aecm->xBuf[PART_LEN]
+  add r0, r1, #(PART_LEN * 2)                @ &aecm->dBufNoisy[PART_LEN]
+  mov r4, #(PART_LEN / 16)                   @ Loop counter, unrolled by 16.
+
+LOOP_COPY:
+  vld1.16 {q10, q11}, [r2, :256]!
+  vld1.16 {q12, q13}, [r0, :256]!
+  subs r4, #1
+  vst1.16 {q10, q11}, [r3, :256]!
+  vst1.16 {q12, q13}, [r1, :256]!
+  bgt LOOP_COPY
+
+  ldr r2, [sp, #24]
+  cmp r2, #0                                  @ Check if (nearendClean != NULL).
+  beq END
+
+  ldr r4, =offset_aecm_dBufClean
+  ldr r1, [r5, r4]                            @ &aecm->dBufClean[0]
+  add r0, r1, #(PART_LEN * 2)                 @ &aecm->dBufClean[PART_LEN]
+
+  vld1.16 {q10, q11}, [r0, :256]!
+  vld1.16 {q12, q13}, [r0, :256]!
+  vst1.16 {q10, q11}, [r1, :256]!
+  vst1.16 {q12, q13}, [r1, :256]!
+  vld1.16 {q10, q11}, [r0, :256]!
+  vld1.16 {q12, q13}, [r0, :256]!
+  vst1.16 {q10, q11}, [r1, :256]!
+  vst1.16 {q12, q13}, [r1, :256]!
+
+END:
+  pop {r4-r8, pc}
+.fnend
+
+@ void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
+@                                        const WebRtc_UWord16* far_spectrum,
+@                                        WebRtc_Word32* echo_est,
+@                                        WebRtc_UWord32* far_energy,
+@                                        WebRtc_UWord32* echo_energy_adapt,
+@                                        WebRtc_UWord32* echo_energy_stored);
+.align  2
+WebRtcAecm_CalcLinearEnergiesNeon:
+.fnstart
+.save {r4-r7}
+  push {r4-r7}
+
+  vmov.i32 q14, #0
+  vmov.i32 q8,  #0
+  vmov.i32 q9,  #0
+
+  ldr r7, =offset_aecm_channelStored
+  ldr r5, =offset_aecm_channelAdapt16
+
+  mov r4, r2
+  mov r12, #(PART_LEN / 8)                   @  Loop counter, unrolled by 8.
+  ldr r6, [r0, r7]
+  ldr r7, [r0, r5]
+
+LOOP_CALC_LINEAR_ENERGIES:
+  vld1.16 {d26, d27}, [r1]!                  @ far_spectrum[i]
+  vld1.16 {d24, d25}, [r6, :128]!            @ &aecm->channelStored[i]
+  vld1.16 {d0, d1}, [r7, :128]!              @ &aecm->channelAdapt16[i]
+  vaddw.u16 q14, q14, d26
+  vmull.u16 q10, d26, d24
+  vmull.u16 q11, d27, d25
+  vaddw.u16 q14, q14, d27
+  vmull.u16 q1, d26, d0
+  vst1.32 {q10, q11}, [r4, :256]!            @ &echo_est[i]
+  vadd.u32 q8, q10
+  vmull.u16 q2, d27, d1
+  vadd.u32 q8, q11
+  vadd.u32 q9, q1
+  subs r12, #1
+  vadd.u32 q9, q2
+  bgt LOOP_CALC_LINEAR_ENERGIES
+
+  vadd.u32 d28, d29
+  vpadd.u32 d28, d28
+  vmov.32 r12, d28[0]
+  vadd.u32 d18, d19
+  vpadd.u32 d18, d18
+  vmov.32 r5, d18[0]                         @ echo_energy_adapt_r
+  vadd.u32 d16, d17
+  vpadd.u32 d16, d16
+
+  ldrh  r1, [r1]                             @ far_spectrum[i]
+  add r12, r12, r1
+  str r12, [r3]                              @ far_energy
+  vmov.32 r2, d16[0]
+
+  ldrsh r12, [r6]                            @ aecm->channelStored[i]
+  ldrh  r6, [r7]                             @ aecm->channelAdapt16[i]
+  mul r0, r12, r1
+  mla r1, r6, r1, r5
+  add r2, r2, r0
+  str r0, [r4]                               @ echo_est[i]
+  ldr r4, [sp, #20]                          @ &echo_energy_stored
+  str r2, [r4]
+  ldr r3, [sp, #16]                          @ &echo_energy_adapt
+  str r1, [r3]
+
+  pop {r4-r7}
+  bx  lr
+.fnend
+
+@ void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
+@                                          const uint16_t* far_spectrum,
+@                                          int32_t* echo_est);
+.align  2
+WebRtcAecm_StoreAdaptiveChannelNeon:
+.fnstart
+  ldr r3, =offset_aecm_channelAdapt16
+  ldr r12, =offset_aecm_channelStored
+  ldr r3, [r0, r3]
+  ldr r0, [r0, r12]
+  mov r12, #(PART_LEN / 8)                   @ Loop counter, unrolled by 8.
+
+LOOP_STORE_ADAPTIVE_CHANNEL:
+  vld1.16 {d24, d25}, [r3, :128]!            @ &aecm->channelAdapt16[i]
+  vld1.16 {d26, d27}, [r1]!                  @ &far_spectrum[i]
+  vst1.16 {d24, d25}, [r0, :128]!            @ &aecm->channelStored[i]
+  vmull.u16 q10, d26, d24
+  vmull.u16 q11, d27, d25
+  vst1.16 {q10, q11}, [r2, :256]!            @ echo_est[i]
+  subs r12, #1
+  bgt LOOP_STORE_ADAPTIVE_CHANNEL
+
+  ldrsh  r12, [r3]
+  strh  r12, [r0]
+  ldrh  r1, [r1]
+  mul r3, r1, r12
+  str r3, [r2]
+
+  bx  lr
+.fnend
+
+@ void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm);
+.align  2
+WebRtcAecm_ResetAdaptiveChannelNeon:
+.fnstart
+  ldr r1, =offset_aecm_channelAdapt16
+  ldr r2, =offset_aecm_channelAdapt32
+  movw r3, #offset_aecm_channelStored
+  ldr r1, [r0, r1]                           @ &aecm->channelAdapt16[0]
+  ldr r2, [r0, r2]                           @ &aecm->channelAdapt32[0]
+  ldr r0, [r0, r3]                           @ &aecm->channelStored[0]
+  mov r3, #(PART_LEN / 8)                    @ Loop counter, unrolled by 8.
+
+LOOP_RESET_ADAPTIVE_CHANNEL:
+  vld1.16 {d24, d25}, [r0, :128]!
+  subs r3, #1
+  vst1.16 {d24, d25}, [r1, :128]!
+  vshll.s16 q10, d24, #16
+  vshll.s16 q11, d25, #16
+  vst1.16 {q10, q11}, [r2, :256]!
+  bgt LOOP_RESET_ADAPTIVE_CHANNEL
+
+  ldrh  r0, [r0]
+  strh  r0, [r1]
+  mov r0, r0, asl #16
+  str r0, [r2]
+
+  bx  lr
+.fnend
+
+  @ Square root of Hanning window in Q14. Compared to WebRtcAecm_kSqrtHanning,
+  @ the order was reversed and one useless element (0) was removed.
+.align  3
+kSqrtHanningReversed:
+  .hword 16384, 16373, 16354, 16325, 16286, 16237, 16179, 16111, 16034, 15947
+  .hword 15851, 15746, 15631, 15506, 15373, 15231, 15079, 14918, 14749, 14571
+  .hword 14384, 14189, 13985, 13773, 13553, 13325, 13089, 12845, 12594, 12335
+  .hword 12068, 11795, 11514, 11227, 10933, 10633, 10326, 10013, 9695, 9370
+  .hword 9040, 8705, 8364, 8019, 7668, 7313, 6954, 6591, 6224, 5853, 5478, 5101
+  .hword 4720, 4337, 3951, 3562, 3172, 2780, 2386, 1990, 1594, 1196, 798, 399
diff --git a/src/modules/audio_processing/aecm/aecm_core_neon.c b/src/modules/audio_processing/aecm/aecm_core_neon.c
index ab448b4..c06a678 100644
--- a/src/modules/audio_processing/aecm/aecm_core_neon.c
+++ b/src/modules/audio_processing/aecm/aecm_core_neon.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -16,7 +16,7 @@
 
 // Square root of Hanning window in Q14.
 static const WebRtc_Word16 kSqrtHanningReversed[] __attribute__((aligned(8))) = {
-  16384, 16373, 16354, 16325, 
+  16384, 16373, 16354, 16325,
   16286, 16237, 16179, 16111,
   16034, 15947, 15851, 15746,
   15631, 15506, 15373, 15231,
@@ -34,10 +34,10 @@
   1594,  1196,  798,   399
 };
 
-static void WindowAndFFTNeon(WebRtc_Word16* fft,
-                             const WebRtc_Word16* time_signal,
-                             complex16_t* freq_signal,
-                             int time_signal_scaling) {
+void WebRtcAecm_WindowAndFFTNeon(WebRtc_Word16* fft,
+                                 const WebRtc_Word16* time_signal,
+                                 complex16_t* freq_signal,
+                                 int time_signal_scaling) {
   int i, j;
 
   int16x4_t tmp16x4_scaling = vdup_n_s16(time_signal_scaling);
@@ -86,13 +86,12 @@
   }
 }
 
-static void InverseFFTAndWindowNeon(AecmCore_t* aecm,
-                                    WebRtc_Word16* fft,
-                                    complex16_t* efw,
-                                    WebRtc_Word16* output,
-                                    const WebRtc_Word16* nearendClean) {
+void WebRtcAecm_InverseFFTAndWindowNeon(AecmCore_t* aecm,
+                                        WebRtc_Word16* fft,
+                                        complex16_t* efw,
+                                        WebRtc_Word16* output,
+                                        const WebRtc_Word16* nearendClean) {
   int i, j, outCFFT;
-  WebRtc_Word32 tmp32no1;
 
   // Synthesis
   for (i = 0, j = 0; i < PART_LEN; i += 4, j += 8) {
@@ -187,18 +186,17 @@
   }
 }
 
-static void CalcLinearEnergiesNeon(AecmCore_t* aecm,
-                                   const WebRtc_UWord16* far_spectrum,
-                                   WebRtc_Word32* echo_est,
-                                   WebRtc_UWord32* far_energy,
-                                   WebRtc_UWord32* echo_energy_adapt,
-                                   WebRtc_UWord32* echo_energy_stored) {
+void WebRtcAecm_CalcLinearEnergiesNeon(AecmCore_t* aecm,
+                                       const WebRtc_UWord16* far_spectrum,
+                                       WebRtc_Word32* echo_est,
+                                       WebRtc_UWord32* far_energy,
+                                       WebRtc_UWord32* echo_energy_adapt,
+                                       WebRtc_UWord32* echo_energy_stored) {
   int i;
 
   register WebRtc_UWord32 far_energy_r;
   register WebRtc_UWord32 echo_energy_stored_r;
   register WebRtc_UWord32 echo_energy_adapt_r;
-  uint32x4_t tmp32x4_0;
 
   __asm__("vmov.i32 q14, #0" : : : "q14"); // far_energy
   __asm__("vmov.i32 q8,  #0" : : : "q8"); // echo_energy_stored
@@ -251,9 +249,9 @@
       aecm->channelAdapt16[i], far_spectrum[i]);
 }
 
-static void StoreAdaptiveChannelNeon(AecmCore_t* aecm,
-                                     const WebRtc_UWord16* far_spectrum,
-                                     WebRtc_Word32* echo_est) {
+void WebRtcAecm_StoreAdaptiveChannelNeon(AecmCore_t* aecm,
+                                         const WebRtc_UWord16* far_spectrum,
+                                         WebRtc_Word32* echo_est) {
   int i;
 
   // During startup we store the channel every block.
@@ -273,7 +271,7 @@
   echo_est[i] = WEBRTC_SPL_MUL_16_U16(aecm->channelStored[i], far_spectrum[i]);
 }
 
-static void ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
+void WebRtcAecm_ResetAdaptiveChannelNeon(AecmCore_t* aecm) {
   int i;
 
   for (i = 0; i < PART_LEN - 7; i += 8) {
@@ -294,10 +292,3 @@
       (WebRtc_Word32)aecm->channelStored[i], 16);
 }
 
-void WebRtcAecm_InitNeon(void) {
-  WebRtcAecm_WindowAndFFT = WindowAndFFTNeon;
-  WebRtcAecm_InverseFFTAndWindow = InverseFFTAndWindowNeon;
-  WebRtcAecm_CalcLinearEnergies = CalcLinearEnergiesNeon;
-  WebRtcAecm_StoreAdaptiveChannel = StoreAdaptiveChannelNeon;
-  WebRtcAecm_ResetAdaptiveChannel = ResetAdaptiveChannelNeon;
-}
diff --git a/src/modules/audio_processing/aecm/aecm_core_neon_offsets.c b/src/modules/audio_processing/aecm/aecm_core_neon_offsets.c
new file mode 100644
index 0000000..b614977
--- /dev/null
+++ b/src/modules/audio_processing/aecm/aecm_core_neon_offsets.c
@@ -0,0 +1,26 @@
+
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "aecm_core.h"
+
+#include <stddef.h>
+
+// Define offset variables that will be compiled and abstracted to constant
+// defines, which will then only be used in ARM assembly code.
+int offset_aecm_dfaCleanQDomain = offsetof(AecmCore_t, dfaCleanQDomain);
+int offset_aecm_outBuf = offsetof(AecmCore_t, outBuf);
+int offset_aecm_xBuf = offsetof(AecmCore_t, xBuf);
+int offset_aecm_dBufNoisy = offsetof(AecmCore_t, dBufNoisy);
+int offset_aecm_dBufClean = offsetof(AecmCore_t, dBufClean);
+int offset_aecm_channelStored = offsetof(AecmCore_t, channelStored);
+int offset_aecm_channelAdapt16 = offsetof(AecmCore_t, channelAdapt16);
+int offset_aecm_channelAdapt32 = offsetof(AecmCore_t, channelAdapt32);
+
diff --git a/src/modules/audio_processing/aecm/aecm_defines.h b/src/modules/audio_processing/aecm/aecm_defines.h
new file mode 100644
index 0000000..437cbf2
--- /dev/null
+++ b/src/modules/audio_processing/aecm/aecm_defines.h
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_DEFINES_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_AECM_DEFINES_H_
+
+#define AECM_DYNAMIC_Q                 /* Turn on/off dynamic Q-domain. */
+
+/* #define AECM_SHORT                   For 32 sample partition length. */
+
+/* Algorithm parameters */
+#define FRAME_LEN       80             /* Total frame length, 10 ms. */
+
+#ifdef AECM_SHORT
+#define PART_LEN        32             /* Length of partition. */
+#define PART_LEN_SHIFT  6              /* Length of (PART_LEN * 2) in base 2. */
+#else
+#define PART_LEN        64             /* Length of partition. */
+#define PART_LEN_SHIFT  7              /* Length of (PART_LEN * 2) in base 2. */
+#endif
+
+#define PART_LEN1       (PART_LEN + 1)  /* Unique fft coefficients. */
+#define PART_LEN2       (PART_LEN << 1) /* Length of partition * 2. */
+#define PART_LEN4       (PART_LEN << 2) /* Length of partition * 4. */
+#define FAR_BUF_LEN     PART_LEN4       /* Length of buffers. */
+#define MAX_DELAY       100
+
+/* Counter parameters */
+#ifdef AECM_SHORT
+#define CONV_LEN        1024         /* Convergence length used at startup. */
+#else
+#define CONV_LEN        512          /* Convergence length used at startup. */
+#endif
+#define CONV_LEN2       (CONV_LEN << 1) /* Used at startup. */
+
+/* Energy parameters */
+#define MAX_BUF_LEN     64           /* History length of energy signals. */
+#define FAR_ENERGY_MIN  1025         /* Lowest Far energy level: At least 2 */
+                                     /* in energy. */
+#define FAR_ENERGY_DIFF 929          /* Allowed difference between max */
+                                     /* and min. */
+#define ENERGY_DEV_OFFSET       0    /* The energy error offset in Q8. */
+#define ENERGY_DEV_TOL  400          /* The energy estimation tolerance (Q8). */
+#define FAR_ENERGY_VAD_REGION   230  /* Far VAD tolerance region. */
+
+/* Stepsize parameters */
+#define MU_MIN          10          /* Min stepsize 2^-MU_MIN (far end energy */
+                                    /* dependent). */
+#define MU_MAX          1           /* Max stepsize 2^-MU_MAX (far end energy */
+                                    /* dependent). */
+#define MU_DIFF         9           /* MU_MIN - MU_MAX */
+
+/* Channel parameters */
+#define MIN_MSE_COUNT   20 /* Min number of consecutive blocks with enough */
+                           /* far end energy to compare channel estimates. */
+#define MIN_MSE_DIFF    29 /* The ratio between adapted and stored channel to */
+                           /* accept a new storage (0.8 in Q-MSE_RESOLUTION). */
+#define MSE_RESOLUTION  5           /* MSE parameter resolution. */
+#define RESOLUTION_CHANNEL16    12  /* W16 Channel in Q-RESOLUTION_CHANNEL16. */
+#define RESOLUTION_CHANNEL32    28  /* W32 Channel in Q-RESOLUTION_CHANNEL. */
+#define CHANNEL_VAD     16          /* Minimum energy in frequency band */
+                                    /* to update channel. */
+
+/* Suppression gain parameters: SUPGAIN parameters in Q-(RESOLUTION_SUPGAIN). */
+#define RESOLUTION_SUPGAIN      8     /* Channel in Q-(RESOLUTION_SUPGAIN). */
+#define SUPGAIN_DEFAULT (1 << RESOLUTION_SUPGAIN)  /* Default. */
+#define SUPGAIN_ERROR_PARAM_A   3072  /* Estimation error parameter */
+                                      /* (Maximum gain) (8 in Q8). */
+#define SUPGAIN_ERROR_PARAM_B   1536  /* Estimation error parameter */
+                                      /* (Gain before going down). */
+#define SUPGAIN_ERROR_PARAM_D   SUPGAIN_DEFAULT /* Estimation error parameter */
+                                /* (Should be the same as Default) (1 in Q8). */
+#define SUPGAIN_EPC_DT  200     /* SUPGAIN_ERROR_PARAM_C * ENERGY_DEV_TOL */
+
+/* Defines for "check delay estimation" */
+#define CORR_WIDTH      31      /* Number of samples to correlate over. */
+#define CORR_MAX        16      /* Maximum correlation offset. */
+#define CORR_MAX_BUF    63
+#define CORR_DEV        4
+#define CORR_MAX_LEVEL  20
+#define CORR_MAX_LOW    4
+#define CORR_BUF_LEN    (CORR_MAX << 1) + 1
+/* Note that CORR_WIDTH + 2*CORR_MAX <= MAX_BUF_LEN. */
+
+#define ONE_Q14         (1 << 14)
+
+/* NLP defines */
+#define NLP_COMP_LOW    3277    /* 0.2 in Q14 */
+#define NLP_COMP_HIGH   ONE_Q14 /* 1 in Q14 */
+
+#endif
diff --git a/src/modules/audio_processing/aecm/echo_control_mobile.c b/src/modules/audio_processing/aecm/echo_control_mobile.c
index 49798b7..e9e9838 100644
--- a/src/modules/audio_processing/aecm/echo_control_mobile.c
+++ b/src/modules/audio_processing/aecm/echo_control_mobile.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -339,8 +339,11 @@
     {
         if (nearendClean == NULL)
         {
-            memcpy(out, nearendNoisy, sizeof(short) * nrOfSamples);
-        } else
+            if (out != nearendNoisy)
+            {
+                memcpy(out, nearendNoisy, sizeof(short) * nrOfSamples);
+            }
+        } else if (out != nearendClean)
         {
             memcpy(out, nearendClean, sizeof(short) * nrOfSamples);
         }
@@ -647,10 +650,12 @@
     aecmob_t *aecm = aecmInst;
     const WebRtc_Word16* echo_path_ptr = echo_path;
 
-    if ((aecm == NULL) || (echo_path == NULL))
-    {
-        aecm->lastError = AECM_NULL_POINTER_ERROR;
-        return -1;
+    if (aecmInst == NULL) {
+      return -1;
+    }
+    if (echo_path == NULL) {
+      aecm->lastError = AECM_NULL_POINTER_ERROR;
+      return -1;
     }
     if (size_bytes != WebRtcAecm_echo_path_size_bytes())
     {
@@ -676,10 +681,12 @@
     aecmob_t *aecm = aecmInst;
     WebRtc_Word16* echo_path_ptr = echo_path;
 
-    if ((aecm == NULL) || (echo_path == NULL))
-    {
-        aecm->lastError = AECM_NULL_POINTER_ERROR;
-        return -1;
+    if (aecmInst == NULL) {
+      return -1;
+    }
+    if (echo_path == NULL) {
+      aecm->lastError = AECM_NULL_POINTER_ERROR;
+      return -1;
     }
     if (size_bytes != WebRtcAecm_echo_path_size_bytes())
     {
@@ -702,25 +709,6 @@
     return (PART_LEN1 * sizeof(WebRtc_Word16));
 }
 
-WebRtc_Word32 WebRtcAecm_get_version(WebRtc_Word8 *versionStr, WebRtc_Word16 len)
-{
-    const char version[] = "AECM 1.2.0";
-    const short versionLen = (short)strlen(version) + 1; // +1 for null-termination
-
-    if (versionStr == NULL)
-    {
-        return -1;
-    }
-
-    if (versionLen > len)
-    {
-        return -1;
-    }
-
-    strncpy(versionStr, version, versionLen);
-    return 0;
-}
-
 WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst)
 {
     aecmob_t *aecm = aecmInst;
diff --git a/src/modules/audio_processing/aecm/include/echo_control_mobile.h b/src/modules/audio_processing/aecm/include/echo_control_mobile.h
new file mode 100644
index 0000000..da0ad86
--- /dev/null
+++ b/src/modules/audio_processing/aecm/include/echo_control_mobile.h
@@ -0,0 +1,233 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
+
+#include "typedefs.h"
+
+enum {
+    AecmFalse = 0,
+    AecmTrue
+};
+
+// Errors
+#define AECM_UNSPECIFIED_ERROR           12000
+#define AECM_UNSUPPORTED_FUNCTION_ERROR  12001
+#define AECM_UNINITIALIZED_ERROR         12002
+#define AECM_NULL_POINTER_ERROR          12003
+#define AECM_BAD_PARAMETER_ERROR         12004
+
+// Warnings
+#define AECM_BAD_PARAMETER_WARNING       12100
+
+typedef struct {
+    WebRtc_Word16 cngMode;            // AECM_FALSE, AECM_TRUE (default)
+    WebRtc_Word16 echoMode;           // 0, 1, 2, 3 (default), 4
+} AecmConfig;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Allocates the memory needed by the AECM. The memory needs to be
+ * initialized separately using the WebRtcAecm_Init() function.
+ *
+ * Inputs                           Description
+ * -------------------------------------------------------------------
+ * void **aecmInst                  Pointer to the AECM instance to be
+ *                                  created and initialized
+ *
+ * Outputs                          Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32 return             0: OK
+ *                                 -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Create(void **aecmInst);
+
+/*
+ * This function releases the memory allocated by WebRtcAecm_Create()
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void *aecmInst               Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Free(void *aecmInst);
+
+/*
+ * Initializes an AECM instance.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst     Pointer to the AECM instance
+ * WebRtc_Word32  sampFreq      Sampling frequency of data
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Init(void* aecmInst,
+                              WebRtc_Word32 sampFreq);
+
+/*
+ * Inserts an 80 or 160 sample block of data into the farend buffer.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst     Pointer to the AECM instance
+ * WebRtc_Word16  *farend       In buffer containing one frame of
+ *                              farend signal
+ * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_BufferFarend(void* aecmInst,
+                                      const WebRtc_Word16* farend,
+                                      WebRtc_Word16 nrOfSamples);
+
+/*
+ * Runs the AECM on an 80 or 160 sample blocks of data.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void           *aecmInst      Pointer to the AECM instance
+ * WebRtc_Word16  *nearendNoisy  In buffer containing one frame of
+ *                               reference nearend+echo signal. If
+ *                               noise reduction is active, provide
+ *                               the noisy signal here.
+ * WebRtc_Word16  *nearendClean  In buffer containing one frame of
+ *                               nearend+echo signal. If noise
+ *                               reduction is active, provide the
+ *                               clean signal here. Otherwise pass a
+ *                               NULL pointer.
+ * WebRtc_Word16  nrOfSamples    Number of samples in nearend buffer
+ * WebRtc_Word16  msInSndCardBuf Delay estimate for sound card and
+ *                               system buffers
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_Process(void* aecmInst,
+                                 const WebRtc_Word16* nearendNoisy,
+                                 const WebRtc_Word16* nearendClean,
+                                 WebRtc_Word16* out,
+                                 WebRtc_Word16 nrOfSamples,
+                                 WebRtc_Word16 msInSndCardBuf);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void     *aecmInst           Pointer to the AECM instance
+ * AecmConfig config            Config instance that contains all
+ *                              properties to be set
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_set_config(void* aecmInst,
+                                    AecmConfig config);
+
+/*
+ * This function enables the user to set certain parameters on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void *aecmInst               Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * AecmConfig  *config          Pointer to the config instance that
+ *                              all properties will be written to
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst,
+                                    AecmConfig *config);
+
+/*
+ * This function enables the user to set the echo path on-the-fly.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*        aecmInst        Pointer to the AECM instance
+ * void*        echo_path       Pointer to the echo path to be set
+ * size_t       size_bytes      Size in bytes of the echo path
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
+                                      const void* echo_path,
+                                      size_t size_bytes);
+
+/*
+ * This function enables the user to get the currently used echo path
+ * on-the-fly
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void*        aecmInst        Pointer to the AECM instance
+ * void*        echo_path       Pointer to echo path
+ * size_t       size_bytes      Size in bytes of the echo path
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        0: OK
+ *                             -1: error
+ */
+WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst,
+                                     void* echo_path,
+                                     size_t size_bytes);
+
+/*
+ * This function enables the user to get the echo path size in bytes
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * size_t       return           : size in bytes
+ */
+size_t WebRtcAecm_echo_path_size_bytes();
+
+/*
+ * Gets the last error code.
+ *
+ * Inputs                       Description
+ * -------------------------------------------------------------------
+ * void         *aecmInst       Pointer to the AECM instance
+ *
+ * Outputs                      Description
+ * -------------------------------------------------------------------
+ * WebRtc_Word32  return        11000-11100: error code
+ */
+WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst);
+
+#ifdef __cplusplus
+}
+#endif
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AECM_INCLUDE_ECHO_CONTROL_MOBILE_H_
diff --git a/src/modules/audio_processing/aecm/interface/echo_control_mobile.h b/src/modules/audio_processing/aecm/interface/echo_control_mobile.h
deleted file mode 100644
index 30bea7a..0000000
--- a/src/modules/audio_processing/aecm/interface/echo_control_mobile.h
+++ /dev/null
@@ -1,250 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_INTERFACE_ECHO_CONTROL_MOBILE_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_INTERFACE_ECHO_CONTROL_MOBILE_H_
-
-#include "typedefs.h"
-
-enum {
-    AecmFalse = 0,
-    AecmTrue
-};
-
-// Errors
-#define AECM_UNSPECIFIED_ERROR           12000
-#define AECM_UNSUPPORTED_FUNCTION_ERROR  12001
-#define AECM_UNINITIALIZED_ERROR         12002
-#define AECM_NULL_POINTER_ERROR          12003
-#define AECM_BAD_PARAMETER_ERROR         12004
-
-// Warnings
-#define AECM_BAD_PARAMETER_WARNING       12100
-
-typedef struct {
-    WebRtc_Word16 cngMode;            // AECM_FALSE, AECM_TRUE (default)
-    WebRtc_Word16 echoMode;           // 0, 1, 2, 3 (default), 4
-} AecmConfig;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * Allocates the memory needed by the AECM. The memory needs to be
- * initialized separately using the WebRtcAecm_Init() function.
- *
- * Inputs                           Description
- * -------------------------------------------------------------------
- * void **aecmInst                  Pointer to the AECM instance to be
- *                                  created and initialized
- *
- * Outputs                          Description
- * -------------------------------------------------------------------
- * WebRtc_Word32 return             0: OK
- *                                 -1: error
- */
-WebRtc_Word32 WebRtcAecm_Create(void **aecmInst);
-
-/*
- * This function releases the memory allocated by WebRtcAecm_Create()
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void *aecmInst               Pointer to the AECM instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_Free(void *aecmInst);
-
-/*
- * Initializes an AECM instance.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecmInst     Pointer to the AECM instance
- * WebRtc_Word32  sampFreq      Sampling frequency of data
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_Init(void* aecmInst,
-                              WebRtc_Word32 sampFreq);
-
-/*
- * Inserts an 80 or 160 sample block of data into the farend buffer.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecmInst     Pointer to the AECM instance
- * WebRtc_Word16  *farend       In buffer containing one frame of
- *                              farend signal
- * WebRtc_Word16  nrOfSamples   Number of samples in farend buffer
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_BufferFarend(void* aecmInst,
-                                      const WebRtc_Word16* farend,
-                                      WebRtc_Word16 nrOfSamples);
-
-/*
- * Runs the AECM on an 80 or 160 sample blocks of data.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void           *aecmInst      Pointer to the AECM instance
- * WebRtc_Word16  *nearendNoisy  In buffer containing one frame of
- *                               reference nearend+echo signal. If
- *                               noise reduction is active, provide
- *                               the noisy signal here.
- * WebRtc_Word16  *nearendClean  In buffer containing one frame of
- *                               nearend+echo signal. If noise
- *                               reduction is active, provide the
- *                               clean signal here. Otherwise pass a
- *                               NULL pointer.
- * WebRtc_Word16  nrOfSamples    Number of samples in nearend buffer
- * WebRtc_Word16  msInSndCardBuf Delay estimate for sound card and
- *                               system buffers
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word16  *out          Out buffer, one frame of processed nearend
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_Process(void* aecmInst,
-                                 const WebRtc_Word16* nearendNoisy,
-                                 const WebRtc_Word16* nearendClean,
-                                 WebRtc_Word16* out,
-                                 WebRtc_Word16 nrOfSamples,
-                                 WebRtc_Word16 msInSndCardBuf);
-
-/*
- * This function enables the user to set certain parameters on-the-fly
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void     *aecmInst           Pointer to the AECM instance
- * AecmConfig config            Config instance that contains all
- *                              properties to be set
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_set_config(void* aecmInst,
-                                    AecmConfig config);
-
-/*
- * This function enables the user to set certain parameters on-the-fly
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void *aecmInst               Pointer to the AECM instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * AecmConfig  *config          Pointer to the config instance that
- *                              all properties will be written to
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_get_config(void *aecmInst,
-                                    AecmConfig *config);
-
-/*
- * This function enables the user to set the echo path on-the-fly.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void*        aecmInst        Pointer to the AECM instance
- * void*        echo_path       Pointer to the echo path to be set
- * size_t       size_bytes      Size in bytes of the echo path
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_InitEchoPath(void* aecmInst,
-                                      const void* echo_path,
-                                      size_t size_bytes);
-
-/*
- * This function enables the user to get the currently used echo path
- * on-the-fly
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void*        aecmInst        Pointer to the AECM instance
- * void*        echo_path       Pointer to echo path
- * size_t       size_bytes      Size in bytes of the echo path
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_GetEchoPath(void* aecmInst,
-                                     void* echo_path,
-                                     size_t size_bytes);
-
-/*
- * This function enables the user to get the echo path size in bytes
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * size_t       return           : size in bytes
- */
-size_t WebRtcAecm_echo_path_size_bytes();
-
-/*
- * Gets the last error code.
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * void         *aecmInst       Pointer to the AECM instance
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word32  return        11000-11100: error code
- */
-WebRtc_Word32 WebRtcAecm_get_error_code(void *aecmInst);
-
-/*
- * Gets a version string
- *
- * Inputs                       Description
- * -------------------------------------------------------------------
- * char           *versionStr   Pointer to a string array
- * WebRtc_Word16  len           The maximum length of the string
- *
- * Outputs                      Description
- * -------------------------------------------------------------------
- * WebRtc_Word8   *versionStr   Pointer to a string array
- * WebRtc_Word32  return        0: OK
- *                             -1: error
- */
-WebRtc_Word32 WebRtcAecm_get_version(WebRtc_Word8 *versionStr,
-                                     WebRtc_Word16 len);
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* WEBRTC_MODULES_AUDIO_PROCESSING_AECM_MAIN_INTERFACE_ECHO_CONTROL_MOBILE_H_ */
diff --git a/src/modules/audio_processing/agc/Android.mk b/src/modules/audio_processing/agc/Android.mk
index 97447cb..9ce0e5c 100644
--- a/src/modules/audio_processing/agc/Android.mk
+++ b/src/modules/audio_processing/agc/Android.mk
@@ -24,6 +24,7 @@
 LOCAL_CFLAGS := \
     $(MY_WEBRTC_COMMON_DEFS)
 
+# digital_agc.c: error: comparison of address of 'stt->vadFarend' equal to a null pointer
 LOCAL_CLANG_CFLAGS := \
     -Wno-tautological-pointer-compare
 
@@ -35,7 +36,7 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include
 
diff --git a/src/modules/audio_processing/agc/agc.gypi b/src/modules/audio_processing/agc/agc.gypi
index 78288b7..f96f237 100644
--- a/src/modules/audio_processing/agc/agc.gypi
+++ b/src/modules/audio_processing/agc/agc.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -15,15 +15,15 @@
         '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
       ],
       'include_dirs': [
-        'interface',
+        'include',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
         ],
       },
       'sources': [
-        'interface/gain_control.h',
+        'include/gain_control.h',
         'analog_agc.c',
         'analog_agc.h',
         'digital_agc.c',
diff --git a/src/modules/audio_processing/agc/analog_agc.c b/src/modules/audio_processing/agc/analog_agc.c
index 40c5566..d60b4b9 100644
--- a/src/modules/audio_processing/agc/analog_agc.c
+++ b/src/modules/audio_processing/agc/analog_agc.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -1650,9 +1650,10 @@
     {
         stt->Rxx16w32_array[0][i] = 0;
     }
-    for (i = 0; i < 20; i++)
+    for (i = 0; i < 10; i++)
     {
         stt->env[0][i] = 0;
+        stt->env[1][i] = 0;
     }
     stt->inQueue = 0;
 
@@ -1692,22 +1693,3 @@
         return 0;
     }
 }
-
-int WebRtcAgc_Version(WebRtc_Word8 *versionStr, WebRtc_Word16 length)
-{
-    const WebRtc_Word8 version[] = "AGC 1.7.0";
-    const WebRtc_Word16 versionLen = (WebRtc_Word16)strlen(version) + 1;
-
-    if (versionStr == NULL)
-    {
-        return -1;
-    }
-
-    if (versionLen > length)
-    {
-        return -1;
-    }
-
-    strncpy(versionStr, version, versionLen);
-    return 0;
-}
diff --git a/src/modules/audio_processing/agc/include/gain_control.h b/src/modules/audio_processing/agc/include/gain_control.h
new file mode 100644
index 0000000..8af5c71
--- /dev/null
+++ b/src/modules/audio_processing/agc/include/gain_control.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
+
+#include "typedefs.h"
+
+// Errors
+#define AGC_UNSPECIFIED_ERROR           18000
+#define AGC_UNSUPPORTED_FUNCTION_ERROR  18001
+#define AGC_UNINITIALIZED_ERROR         18002
+#define AGC_NULL_POINTER_ERROR          18003
+#define AGC_BAD_PARAMETER_ERROR         18004
+
+// Warnings
+#define AGC_BAD_PARAMETER_WARNING       18050
+
+enum
+{
+    kAgcModeUnchanged,
+    kAgcModeAdaptiveAnalog,
+    kAgcModeAdaptiveDigital,
+    kAgcModeFixedDigital
+};
+
+enum
+{
+    kAgcFalse = 0,
+    kAgcTrue
+};
+
+typedef struct
+{
+    WebRtc_Word16 targetLevelDbfs;   // default 3 (-3 dBOv)
+    WebRtc_Word16 compressionGaindB; // default 9 dB
+    WebRtc_UWord8 limiterEnable;     // default kAgcTrue (on)
+} WebRtcAgc_config_t;
+
+#if defined(__cplusplus)
+extern "C"
+{
+#endif
+
+/*
+ * This function processes a 10/20ms frame of far-end speech to determine
+ * if there is active speech. Far-end speech length can be either 10ms or
+ * 20ms. The length of the input speech vector must be given in samples
+ * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000).
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inFar             : Far-end input speech vector (10 or 20ms)
+ *      - samples           : Number of samples in input vector
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_AddFarend(void* agcInst,
+                        const WebRtc_Word16* inFar,
+                        WebRtc_Word16 samples);
+
+/*
+ * This function processes a 10/20ms frame of microphone speech to determine
+ * if there is active speech. Microphone speech length can be either 10ms or
+ * 20ms. The length of the input speech vector must be given in samples
+ * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000). For very low
+ * input levels, the input signal is increased in level by multiplying and
+ * overwriting the samples in inMic[].
+ *
+ * This function should be called before any further processing of the
+ * near-end microphone signal.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inMic             : Microphone input speech vector (10 or 20 ms) for
+ *                            L band
+ *      - inMic_H           : Microphone input speech vector (10 or 20 ms) for
+ *                            H band
+ *      - samples           : Number of samples in input vector
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_AddMic(void* agcInst,
+                     WebRtc_Word16* inMic,
+                     WebRtc_Word16* inMic_H,
+                     WebRtc_Word16 samples);
+
+/*
+ * This function replaces the analog microphone with a virtual one.
+ * It is a digital gain applied to the input signal and is used in the
+ * agcAdaptiveDigital mode where no microphone level is adjustable.
+ * Microphone speech length can be either 10ms or 20ms. The length of the
+ * input speech vector must be given in samples (80/160 when FS=8000, and
+ * 160/320 when FS=16000 or FS=32000).
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - inMic             : Microphone input speech vector for (10 or 20 ms)
+ *                            L band
+ *      - inMic_H           : Microphone input speech vector for (10 or 20 ms)
+ *                            H band
+ *      - samples           : Number of samples in input vector
+ *      - micLevelIn        : Input level of microphone (static)
+ *
+ * Output:
+ *      - inMic             : Microphone output after processing (L band)
+ *      - inMic_H           : Microphone output after processing (H band)
+ *      - micLevelOut       : Adjusted microphone level after processing
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_VirtualMic(void* agcInst,
+                         WebRtc_Word16* inMic,
+                         WebRtc_Word16* inMic_H,
+                         WebRtc_Word16 samples,
+                         WebRtc_Word32 micLevelIn,
+                         WebRtc_Word32* micLevelOut);
+
+/*
+ * This function processes a 10/20ms frame and adjusts (normalizes) the gain
+ * both analog and digitally. The gain adjustments are done only during
+ * active periods of speech. The input speech length can be either 10ms or
+ * 20ms and the output is of the same length. The length of the speech
+ * vectors must be given in samples (80/160 when FS=8000, and 160/320 when
+ * FS=16000 or FS=32000). The echo parameter can be used to ensure the AGC will
+ * not adjust upward in the presence of echo.
+ *
+ * This function should be called after processing the near-end microphone
+ * signal, in any case after any echo cancellation.
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *      - inNear            : Near-end input speech vector (10 or 20 ms) for
+ *                            L band
+ *      - inNear_H          : Near-end input speech vector (10 or 20 ms) for
+ *                            H band
+ *      - samples           : Number of samples in input/output vector
+ *      - inMicLevel        : Current microphone volume level
+ *      - echo              : Set to 0 if the signal passed to add_mic is
+ *                            almost certainly free of echo; otherwise set
+ *                            to 1. If you have no information regarding echo
+ *                            set to 0.
+ *
+ * Output:
+ *      - outMicLevel       : Adjusted microphone volume level
+ *      - out               : Gain-adjusted near-end speech vector (L band)
+ *                          : May be the same vector as the input.
+ *      - out_H             : Gain-adjusted near-end speech vector (H band)
+ *      - saturationWarning : A returned value of 1 indicates a saturation event
+ *                            has occurred and the volume cannot be further
+ *                            reduced. Otherwise will be set to 0.
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_Process(void* agcInst,
+                      const WebRtc_Word16* inNear,
+                      const WebRtc_Word16* inNear_H,
+                      WebRtc_Word16 samples,
+                      WebRtc_Word16* out,
+                      WebRtc_Word16* out_H,
+                      WebRtc_Word32 inMicLevel,
+                      WebRtc_Word32* outMicLevel,
+                      WebRtc_Word16 echo,
+                      WebRtc_UWord8* saturationWarning);
+
+/*
+ * This function sets the config parameters (targetLevelDbfs,
+ * compressionGaindB and limiterEnable).
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *      - config            : config struct
+ *
+ * Output:
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_set_config(void* agcInst, WebRtcAgc_config_t config);
+
+/*
+ * This function returns the config parameters (targetLevelDbfs,
+ * compressionGaindB and limiterEnable).
+ *
+ * Input:
+ *      - agcInst           : AGC instance
+ *
+ * Output:
+ *      - config            : config struct
+ *
+ * Return value:
+ *                          :  0 - Normal operation.
+ *                          : -1 - Error
+ */
+int WebRtcAgc_get_config(void* agcInst, WebRtcAgc_config_t* config);
+
+/*
+ * This function creates an AGC instance, which will contain the state
+ * information for one (duplex) channel.
+ *
+ * Return value             : AGC instance if successful
+ *                          : 0 (i.e., a NULL pointer) if unsuccessful
+ */
+int WebRtcAgc_Create(void **agcInst);
+
+/*
+ * This function frees the AGC instance created at the beginning.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcAgc_Free(void *agcInst);
+
+/*
+ * This function initializes an AGC instance.
+ *
+ * Input:
+ *      - agcInst           : AGC instance.
+ *      - minLevel          : Minimum possible mic level
+ *      - maxLevel          : Maximum possible mic level
+ *      - agcMode           : 0 - Unchanged
+ *                          : 1 - Adaptive Analog Automatic Gain Control -3dBOv
+ *                          : 2 - Adaptive Digital Automatic Gain Control -3dBOv
+ *                          : 3 - Fixed Digital Gain 0dB
+ *      - fs                : Sampling frequency
+ *
+ * Return value             :  0 - Ok
+ *                            -1 - Error
+ */
+int WebRtcAgc_Init(void *agcInst,
+                   WebRtc_Word32 minLevel,
+                   WebRtc_Word32 maxLevel,
+                   WebRtc_Word16 agcMode,
+                   WebRtc_UWord32 fs);
+
+#if defined(__cplusplus)
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_INCLUDE_GAIN_CONTROL_H_
diff --git a/src/modules/audio_processing/agc/interface/gain_control.h b/src/modules/audio_processing/agc/interface/gain_control.h
deleted file mode 100644
index 2893331..0000000
--- a/src/modules/audio_processing/agc/interface/gain_control.h
+++ /dev/null
@@ -1,273 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_INTERFACE_GAIN_CONTROL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_INTERFACE_GAIN_CONTROL_H_
-
-#include "typedefs.h"
-
-// Errors
-#define AGC_UNSPECIFIED_ERROR           18000
-#define AGC_UNSUPPORTED_FUNCTION_ERROR  18001
-#define AGC_UNINITIALIZED_ERROR         18002
-#define AGC_NULL_POINTER_ERROR          18003
-#define AGC_BAD_PARAMETER_ERROR         18004
-
-// Warnings
-#define AGC_BAD_PARAMETER_WARNING       18050
-
-enum
-{
-    kAgcModeUnchanged,
-    kAgcModeAdaptiveAnalog,
-    kAgcModeAdaptiveDigital,
-    kAgcModeFixedDigital
-};
-
-enum
-{
-    kAgcFalse = 0,
-    kAgcTrue
-};
-
-typedef struct
-{
-    WebRtc_Word16 targetLevelDbfs;   // default 3 (-3 dBOv)
-    WebRtc_Word16 compressionGaindB; // default 9 dB
-    WebRtc_UWord8 limiterEnable;     // default kAgcTrue (on)
-} WebRtcAgc_config_t;
-
-#if defined(__cplusplus)
-extern "C"
-{
-#endif
-
-/*
- * This function processes a 10/20ms frame of far-end speech to determine
- * if there is active speech. Far-end speech length can be either 10ms or
- * 20ms. The length of the input speech vector must be given in samples
- * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000).
- *
- * Input:
- *      - agcInst           : AGC instance.
- *      - inFar             : Far-end input speech vector (10 or 20ms)
- *      - samples           : Number of samples in input vector
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_AddFarend(void* agcInst,
-                        const WebRtc_Word16* inFar,
-                        WebRtc_Word16 samples);
-
-/*
- * This function processes a 10/20ms frame of microphone speech to determine
- * if there is active speech. Microphone speech length can be either 10ms or
- * 20ms. The length of the input speech vector must be given in samples
- * (80/160 when FS=8000, and 160/320 when FS=16000 or FS=32000). For very low
- * input levels, the input signal is increased in level by multiplying and
- * overwriting the samples in inMic[].
- *
- * This function should be called before any further processing of the
- * near-end microphone signal.
- *
- * Input:
- *      - agcInst           : AGC instance.
- *      - inMic             : Microphone input speech vector (10 or 20 ms) for
- *                            L band
- *      - inMic_H           : Microphone input speech vector (10 or 20 ms) for
- *                            H band
- *      - samples           : Number of samples in input vector
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_AddMic(void* agcInst,
-                     WebRtc_Word16* inMic,
-                     WebRtc_Word16* inMic_H,
-                     WebRtc_Word16 samples);
-
-/*
- * This function replaces the analog microphone with a virtual one.
- * It is a digital gain applied to the input signal and is used in the
- * agcAdaptiveDigital mode where no microphone level is adjustable.
- * Microphone speech length can be either 10ms or 20ms. The length of the
- * input speech vector must be given in samples (80/160 when FS=8000, and
- * 160/320 when FS=16000 or FS=32000).
- *
- * Input:
- *      - agcInst           : AGC instance.
- *      - inMic             : Microphone input speech vector for (10 or 20 ms)
- *                            L band
- *      - inMic_H           : Microphone input speech vector for (10 or 20 ms)
- *                            H band
- *      - samples           : Number of samples in input vector
- *      - micLevelIn        : Input level of microphone (static)
- *
- * Output:
- *      - inMic             : Microphone output after processing (L band)
- *      - inMic_H           : Microphone output after processing (H band)
- *      - micLevelOut       : Adjusted microphone level after processing
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_VirtualMic(void* agcInst,
-                         WebRtc_Word16* inMic,
-                         WebRtc_Word16* inMic_H,
-                         WebRtc_Word16 samples,
-                         WebRtc_Word32 micLevelIn,
-                         WebRtc_Word32* micLevelOut);
-
-/*
- * This function processes a 10/20ms frame and adjusts (normalizes) the gain
- * both analog and digitally. The gain adjustments are done only during
- * active periods of speech. The input speech length can be either 10ms or
- * 20ms and the output is of the same length. The length of the speech
- * vectors must be given in samples (80/160 when FS=8000, and 160/320 when
- * FS=16000 or FS=32000). The echo parameter can be used to ensure the AGC will
- * not adjust upward in the presence of echo.
- *
- * This function should be called after processing the near-end microphone
- * signal, in any case after any echo cancellation.
- *
- * Input:
- *      - agcInst           : AGC instance
- *      - inNear            : Near-end input speech vector (10 or 20 ms) for
- *                            L band
- *      - inNear_H          : Near-end input speech vector (10 or 20 ms) for
- *                            H band
- *      - samples           : Number of samples in input/output vector
- *      - inMicLevel        : Current microphone volume level
- *      - echo              : Set to 0 if the signal passed to add_mic is
- *                            almost certainly free of echo; otherwise set
- *                            to 1. If you have no information regarding echo
- *                            set to 0.
- *
- * Output:
- *      - outMicLevel       : Adjusted microphone volume level
- *      - out               : Gain-adjusted near-end speech vector (L band)
- *                          : May be the same vector as the input.
- *      - out_H             : Gain-adjusted near-end speech vector (H band)
- *      - saturationWarning : A returned value of 1 indicates a saturation event
- *                            has occurred and the volume cannot be further
- *                            reduced. Otherwise will be set to 0.
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_Process(void* agcInst,
-                      const WebRtc_Word16* inNear,
-                      const WebRtc_Word16* inNear_H,
-                      WebRtc_Word16 samples,
-                      WebRtc_Word16* out,
-                      WebRtc_Word16* out_H,
-                      WebRtc_Word32 inMicLevel,
-                      WebRtc_Word32* outMicLevel,
-                      WebRtc_Word16 echo,
-                      WebRtc_UWord8* saturationWarning);
-
-/*
- * This function sets the config parameters (targetLevelDbfs,
- * compressionGaindB and limiterEnable).
- *
- * Input:
- *      - agcInst           : AGC instance
- *      - config            : config struct
- *
- * Output:
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_set_config(void* agcInst, WebRtcAgc_config_t config);
-
-/*
- * This function returns the config parameters (targetLevelDbfs,
- * compressionGaindB and limiterEnable).
- *
- * Input:
- *      - agcInst           : AGC instance
- *
- * Output:
- *      - config            : config struct
- *
- * Return value:
- *                          :  0 - Normal operation.
- *                          : -1 - Error
- */
-int WebRtcAgc_get_config(void* agcInst, WebRtcAgc_config_t* config);
-
-/*
- * This function creates an AGC instance, which will contain the state
- * information for one (duplex) channel.
- *
- * Return value             : AGC instance if successful
- *                          : 0 (i.e., a NULL pointer) if unsuccessful
- */
-int WebRtcAgc_Create(void **agcInst);
-
-/*
- * This function frees the AGC instance created at the beginning.
- *
- * Input:
- *      - agcInst           : AGC instance.
- *
- * Return value             :  0 - Ok
- *                            -1 - Error
- */
-int WebRtcAgc_Free(void *agcInst);
-
-/*
- * This function initializes an AGC instance.
- *
- * Input:
- *      - agcInst           : AGC instance.
- *      - minLevel          : Minimum possible mic level
- *      - maxLevel          : Maximum possible mic level
- *      - agcMode           : 0 - Unchanged
- *                          : 1 - Adaptive Analog Automatic Gain Control -3dBOv
- *                          : 2 - Adaptive Digital Automatic Gain Control -3dBOv
- *                          : 3 - Fixed Digital Gain 0dB
- *      - fs                : Sampling frequency
- *
- * Return value             :  0 - Ok
- *                            -1 - Error
- */
-int WebRtcAgc_Init(void *agcInst,
-                   WebRtc_Word32 minLevel,
-                   WebRtc_Word32 maxLevel,
-                   WebRtc_Word16 agcMode,
-                   WebRtc_UWord32 fs);
-
-/*
- * This function returns a text string containing the version.
- *
- * Input:
- *      - length            : Length of the char array pointed to by version
- * Output:
- *      - version           : Pointer to a char array of to which the version
- *                          : string will be copied.
- *
- * Return value             :  0 - OK
- *                            -1 - Error
- */
-int WebRtcAgc_Version(WebRtc_Word8 *versionStr, WebRtc_Word16 length);
-
-#if defined(__cplusplus)
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_AGC_MAIN_INTERFACE_GAIN_CONTROL_H_
diff --git a/src/modules/audio_processing/apm_tests.gypi b/src/modules/audio_processing/apm_tests.gypi
index f9b21d2..0d5bfac 100644
--- a/src/modules/audio_processing/apm_tests.gypi
+++ b/src/modules/audio_processing/apm_tests.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -13,9 +13,9 @@
       'type': 'executable',
       'conditions': [
         ['prefer_fixed_point==1', {
-          'defines': [ 'WEBRTC_APM_UNIT_TEST_FIXED_PROFILE' ],
+          'defines': [ 'WEBRTC_AUDIOPROC_FIXED_PROFILE' ],
         }, {
-          'defines': [ 'WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE' ],
+          'defines': [ 'WEBRTC_AUDIOPROC_FLOAT_PROFILE' ],
         }],
         ['enable_protobuf==1', {
           'defines': [ 'WEBRTC_AUDIOPROC_DEBUG_DUMP' ],
@@ -26,10 +26,14 @@
         'audioproc_unittest_proto',
         '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
         '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
-        '<(webrtc_root)/../test/test.gyp:test_support',
-        '<(webrtc_root)/../testing/gtest.gyp:gtest',
+        '<(webrtc_root)/test/test.gyp:test_support',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
       ],
-      'sources': [ 'test/unit_test.cc', ],
+      'sources': [
+        'aec/system_delay_unittest.cc',
+        'test/unit_test.cc',
+        'utility/delay_estimator_unittest.cc',
+      ],
     },
     {
       'target_name': 'audioproc_unittest_proto',
@@ -55,7 +59,7 @@
             'audio_processing',
             'audioproc_debug_proto',
             '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
-            '<(webrtc_root)/../testing/gtest.gyp:gtest',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
           ],
           'sources': [ 'test/process_test.cc', ],
         },
@@ -65,7 +69,7 @@
           'dependencies': [
             'audioproc_debug_proto',
             '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
-            '<(webrtc_root)/../third_party/google-gflags/google-gflags.gyp:google-gflags',
+            '<(DEPTH)/third_party/google-gflags/google-gflags.gyp:google-gflags',
           ],
           'sources': [ 'test/unpack.cc', ],
         },
diff --git a/src/modules/audio_processing/audio_buffer.cc b/src/modules/audio_processing/audio_buffer.cc
index a7fb04d..aee9f68 100644
--- a/src/modules/audio_processing/audio_buffer.cc
+++ b/src/modules/audio_processing/audio_buffer.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -188,27 +188,27 @@
 
 // TODO(andrew): Do deinterleaving and mixing in one step?
 void AudioBuffer::DeinterleaveFrom(AudioFrame* frame) {
-  assert(frame->_audioChannel <= max_num_channels_);
-  assert(frame->_payloadDataLengthInSamples ==  samples_per_channel_);
+  assert(frame->num_channels_ <= max_num_channels_);
+  assert(frame->samples_per_channel_ ==  samples_per_channel_);
 
-  num_channels_ = frame->_audioChannel;
+  num_channels_ = frame->num_channels_;
   data_was_mixed_ = false;
   num_mixed_channels_ = 0;
   num_mixed_low_pass_channels_ = 0;
   reference_copied_ = false;
-  activity_ = frame->_vadActivity;
+  activity_ = frame->vad_activity_;
   is_muted_ = false;
-  if (frame->_energy == 0) {
+  if (frame->energy_ == 0) {
     is_muted_ = true;
   }
 
   if (num_channels_ == 1) {
     // We can get away with a pointer assignment in this case.
-    data_ = frame->_payloadData;
+    data_ = frame->data_;
     return;
   }
 
-  int16_t* interleaved = frame->_payloadData;
+  int16_t* interleaved = frame->data_;
   for (int i = 0; i < num_channels_; i++) {
     int16_t* deinterleaved = channels_[i].data;
     int interleaved_idx = i;
@@ -220,9 +220,9 @@
 }
 
 void AudioBuffer::InterleaveTo(AudioFrame* frame, bool data_changed) const {
-  assert(frame->_audioChannel == num_channels_);
-  assert(frame->_payloadDataLengthInSamples == samples_per_channel_);
-  frame->_vadActivity = activity_;
+  assert(frame->num_channels_ == num_channels_);
+  assert(frame->samples_per_channel_ == samples_per_channel_);
+  frame->vad_activity_ = activity_;
 
   if (!data_changed) {
     return;
@@ -230,18 +230,18 @@
 
   if (num_channels_ == 1) {
     if (data_was_mixed_) {
-      memcpy(frame->_payloadData,
+      memcpy(frame->data_,
              channels_[0].data,
              sizeof(int16_t) * samples_per_channel_);
     } else {
       // These should point to the same buffer in this case.
-      assert(data_ == frame->_payloadData);
+      assert(data_ == frame->data_);
     }
 
     return;
   }
 
-  int16_t* interleaved = frame->_payloadData;
+  int16_t* interleaved = frame->data_;
   for (int i = 0; i < num_channels_; i++) {
     int16_t* deinterleaved = channels_[i].data;
     int interleaved_idx = i;
diff --git a/src/modules/audio_processing/audio_processing.gypi b/src/modules/audio_processing/audio_processing.gypi
index 2a22a79..00078c4 100644
--- a/src/modules/audio_processing/audio_processing.gypi
+++ b/src/modules/audio_processing/audio_processing.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -33,17 +33,17 @@
         '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
       ],
       'include_dirs': [
-        'interface',
+        'include',
         '../interface',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
           '../interface',
         ],
       },
       'sources': [
-        'interface/audio_processing.h',
+        'include/audio_processing.h',
         'audio_buffer.cc',
         'audio_buffer.h',
         'audio_processing_impl.cc',
diff --git a/src/modules/audio_processing/audio_processing_impl.cc b/src/modules/audio_processing/audio_processing_impl.cc
index 9702e9e..ab5607a 100644
--- a/src/modules/audio_processing/audio_processing_impl.cc
+++ b/src/modules/audio_processing/audio_processing_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -28,7 +28,7 @@
 
 #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
 // Files generated at build-time by the protobuf compiler.
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
 #include "external/webrtc/src/modules/audio_processing/debug.pb.h"
 #else
 #include "webrtc/audio_processing/debug.pb.h"
@@ -37,10 +37,6 @@
 
 namespace webrtc {
 AudioProcessing* AudioProcessing::Create(int id) {
-  /*WEBRTC_TRACE(webrtc::kTraceModuleCall,
-             webrtc::kTraceAudioProcessing,
-             id,
-             "AudioProcessing::Create()");*/
 
   AudioProcessingImpl* apm = new AudioProcessingImpl(id);
   if (apm->Initialize() != kNoError) {
@@ -75,6 +71,7 @@
       split_sample_rate_hz_(kSampleRate16kHz),
       samples_per_channel_(sample_rate_hz_ / 100),
       stream_delay_ms_(0),
+      delay_offset_ms_(0),
       was_stream_delay_set_(false),
       num_reverse_channels_(1),
       num_input_channels_(1),
@@ -103,6 +100,7 @@
 }
 
 AudioProcessingImpl::~AudioProcessingImpl() {
+  crit_->Enter();
   while (!component_list_.empty()) {
     ProcessingComponent* component = component_list_.front();
     component->Destroy();
@@ -116,9 +114,6 @@
   }
 #endif
 
-  delete crit_;
-  crit_ = NULL;
-
   if (render_audio_) {
     delete render_audio_;
     render_audio_ = NULL;
@@ -128,6 +123,10 @@
     delete capture_audio_;
     capture_audio_ = NULL;
   }
+
+  crit_->Leave();
+  delete crit_;
+  crit_ = NULL;
 }
 
 CriticalSectionWrapper* AudioProcessingImpl::crit() const {
@@ -139,7 +138,7 @@
 }
 
 int AudioProcessingImpl::Initialize() {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   return InitializeLocked();
 }
 
@@ -183,7 +182,7 @@
 }
 
 int AudioProcessingImpl::set_sample_rate_hz(int rate) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   if (rate != kSampleRate8kHz &&
       rate != kSampleRate16kHz &&
       rate != kSampleRate32kHz) {
@@ -207,7 +206,7 @@
 }
 
 int AudioProcessingImpl::set_num_reverse_channels(int channels) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   // Only stereo supported currently.
   if (channels > 2 || channels < 1) {
     return kBadParameterError;
@@ -225,7 +224,7 @@
 int AudioProcessingImpl::set_num_channels(
     int input_channels,
     int output_channels) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   if (output_channels > input_channels) {
     return kBadParameterError;
   }
@@ -254,22 +253,22 @@
 }
 
 int AudioProcessingImpl::ProcessStream(AudioFrame* frame) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   int err = kNoError;
 
   if (frame == NULL) {
     return kNullPointerError;
   }
 
-  if (frame->_frequencyInHz != sample_rate_hz_) {
+  if (frame->sample_rate_hz_ != sample_rate_hz_) {
     return kBadSampleRateError;
   }
 
-  if (frame->_audioChannel != num_input_channels_) {
+  if (frame->num_channels_ != num_input_channels_) {
     return kBadNumberChannelsError;
   }
 
-  if (frame->_payloadDataLengthInSamples != samples_per_channel_) {
+  if (frame->samples_per_channel_ != samples_per_channel_) {
     return kBadDataLengthError;
   }
 
@@ -278,9 +277,9 @@
     event_msg_->set_type(audioproc::Event::STREAM);
     audioproc::Stream* msg = event_msg_->mutable_stream();
     const size_t data_size = sizeof(int16_t) *
-                             frame->_payloadDataLengthInSamples *
-                             frame->_audioChannel;
-    msg->set_input_data(frame->_payloadData, data_size);
+                             frame->samples_per_channel_ *
+                             frame->num_channels_;
+    msg->set_input_data(frame->data_, data_size);
     msg->set_delay(stream_delay_ms_);
     msg->set_drift(echo_cancellation_->stream_drift_samples());
     msg->set_level(gain_control_->stream_analog_level());
@@ -292,11 +291,11 @@
   // TODO(ajm): experiment with mixing and AEC placement.
   if (num_output_channels_ < num_input_channels_) {
     capture_audio_->Mix(num_output_channels_);
-    frame->_audioChannel = num_output_channels_;
+    frame->num_channels_ = num_output_channels_;
   }
 
-  bool data_changed = stream_data_changed();
-  if (analysis_needed(data_changed)) {
+  bool data_processed = is_data_processed();
+  if (analysis_needed(data_processed)) {
     for (int i = 0; i < num_output_channels_; i++) {
       // Split into a low and high band.
       SplittingFilterAnalysis(capture_audio_->data(i),
@@ -347,7 +346,7 @@
     return err;
   }
 
-  if (synthesis_needed(data_changed)) {
+  if (synthesis_needed(data_processed)) {
     for (int i = 0; i < num_output_channels_; i++) {
       // Recombine low and high bands.
       SplittingFilterSynthesis(capture_audio_->low_pass_split_data(i),
@@ -364,15 +363,15 @@
     return err;
   }
 
-  capture_audio_->InterleaveTo(frame, data_changed);
+  capture_audio_->InterleaveTo(frame, interleave_needed(data_processed));
 
 #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
   if (debug_file_->Open()) {
     audioproc::Stream* msg = event_msg_->mutable_stream();
     const size_t data_size = sizeof(int16_t) *
-                             frame->_payloadDataLengthInSamples *
-                             frame->_audioChannel;
-    msg->set_output_data(frame->_payloadData, data_size);
+                             frame->samples_per_channel_ *
+                             frame->num_channels_;
+    msg->set_output_data(frame->data_, data_size);
     err = WriteMessageToDebugFile();
     if (err != kNoError) {
       return err;
@@ -385,22 +384,22 @@
 }
 
 int AudioProcessingImpl::AnalyzeReverseStream(AudioFrame* frame) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   int err = kNoError;
 
   if (frame == NULL) {
     return kNullPointerError;
   }
 
-  if (frame->_frequencyInHz != sample_rate_hz_) {
+  if (frame->sample_rate_hz_ != sample_rate_hz_) {
     return kBadSampleRateError;
   }
 
-  if (frame->_audioChannel != num_reverse_channels_) {
+  if (frame->num_channels_ != num_reverse_channels_) {
     return kBadNumberChannelsError;
   }
 
-  if (frame->_payloadDataLengthInSamples != samples_per_channel_) {
+  if (frame->samples_per_channel_ != samples_per_channel_) {
     return kBadDataLengthError;
   }
 
@@ -409,9 +408,9 @@
     event_msg_->set_type(audioproc::Event::REVERSE_STREAM);
     audioproc::ReverseStream* msg = event_msg_->mutable_reverse_stream();
     const size_t data_size = sizeof(int16_t) *
-                             frame->_payloadDataLengthInSamples *
-                             frame->_audioChannel;
-    msg->set_data(frame->_payloadData, data_size);
+                             frame->samples_per_channel_ *
+                             frame->num_channels_;
+    msg->set_data(frame->data_, data_size);
     err = WriteMessageToDebugFile();
     if (err != kNoError) {
       return err;
@@ -453,19 +452,23 @@
 }
 
 int AudioProcessingImpl::set_stream_delay_ms(int delay) {
+  Error retval = kNoError;
   was_stream_delay_set_ = true;
+  delay += delay_offset_ms_;
+
   if (delay < 0) {
-    return kBadParameterError;
+    delay = 0;
+    retval = kBadStreamParameterWarning;
   }
 
   // TODO(ajm): the max is rather arbitrarily chosen; investigate.
   if (delay > 500) {
-    stream_delay_ms_ = 500;
-    return kBadStreamParameterWarning;
+    delay = 500;
+    retval = kBadStreamParameterWarning;
   }
 
   stream_delay_ms_ = delay;
-  return kNoError;
+  return retval;
 }
 
 int AudioProcessingImpl::stream_delay_ms() const {
@@ -476,9 +479,18 @@
   return was_stream_delay_set_;
 }
 
+void AudioProcessingImpl::set_delay_offset_ms(int offset) {
+  CriticalSectionScoped crit_scoped(crit_);
+  delay_offset_ms_ = offset;
+}
+
+int AudioProcessingImpl::delay_offset_ms() const {
+  return delay_offset_ms_;
+}
+
 int AudioProcessingImpl::StartDebugRecording(
     const char filename[AudioProcessing::kMaxFilenameSize]) {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
   assert(kMaxFilenameSize == FileWrapper::kMaxFileNameSize);
 
   if (filename == NULL) {
@@ -509,7 +521,7 @@
 }
 
 int AudioProcessingImpl::StopDebugRecording() {
-  CriticalSectionScoped crit_scoped(*crit_);
+  CriticalSectionScoped crit_scoped(crit_);
 
 #ifdef WEBRTC_AUDIOPROC_DEBUG_DUMP
   // We just return if recording hasn't started.
@@ -553,18 +565,13 @@
 }
 
 WebRtc_Word32 AudioProcessingImpl::ChangeUniqueId(const WebRtc_Word32 id) {
-  CriticalSectionScoped crit_scoped(*crit_);
-  /*WEBRTC_TRACE(webrtc::kTraceModuleCall,
-             webrtc::kTraceAudioProcessing,
-             id_,
-             "ChangeUniqueId(new id = %d)",
-             id);*/
+  CriticalSectionScoped crit_scoped(crit_);
   id_ = id;
 
   return kNoError;
 }
 
-bool AudioProcessingImpl::stream_data_changed() const {
+bool AudioProcessingImpl::is_data_processed() const {
   int enabled_count = 0;
   std::list<ProcessingComponent*>::const_iterator it;
   for (it = component_list_.begin(); it != component_list_.end(); it++) {
@@ -589,12 +596,17 @@
   return true;
 }
 
-bool AudioProcessingImpl::synthesis_needed(bool stream_data_changed) const {
-  return (stream_data_changed && sample_rate_hz_ == kSampleRate32kHz);
+bool AudioProcessingImpl::interleave_needed(bool is_data_processed) const {
+  // Check if we've upmixed or downmixed the audio.
+  return (num_output_channels_ != num_input_channels_ || is_data_processed);
 }
 
-bool AudioProcessingImpl::analysis_needed(bool stream_data_changed) const {
-  if (!stream_data_changed && !voice_detection_->is_enabled()) {
+bool AudioProcessingImpl::synthesis_needed(bool is_data_processed) const {
+  return (is_data_processed && sample_rate_hz_ == kSampleRate32kHz);
+}
+
+bool AudioProcessingImpl::analysis_needed(bool is_data_processed) const {
+  if (!is_data_processed && !voice_detection_->is_enabled()) {
     // Only level_estimator_ is enabled.
     return false;
   } else if (sample_rate_hz_ == kSampleRate32kHz) {
diff --git a/src/modules/audio_processing/audio_processing_impl.h b/src/modules/audio_processing/audio_processing_impl.h
index c1ab476..81e5ccb 100644
--- a/src/modules/audio_processing/audio_processing_impl.h
+++ b/src/modules/audio_processing/audio_processing_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -69,6 +69,8 @@
   virtual int AnalyzeReverseStream(AudioFrame* frame);
   virtual int set_stream_delay_ms(int delay);
   virtual int stream_delay_ms() const;
+  virtual void set_delay_offset_ms(int offset);
+  virtual int delay_offset_ms() const;
   virtual int StartDebugRecording(const char filename[kMaxFilenameSize]);
   virtual int StopDebugRecording();
   virtual EchoCancellation* echo_cancellation() const;
@@ -83,9 +85,10 @@
   virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
 
  private:
-  bool stream_data_changed() const;
-  bool synthesis_needed(bool stream_data_changed) const;
-  bool analysis_needed(bool stream_data_changed) const;
+  bool is_data_processed() const;
+  bool interleave_needed(bool is_data_processed) const;
+  bool synthesis_needed(bool is_data_processed) const;
+  bool analysis_needed(bool is_data_processed) const;
 
   int id_;
 
@@ -115,6 +118,7 @@
   int split_sample_rate_hz_;
   int samples_per_channel_;
   int stream_delay_ms_;
+  int delay_offset_ms_;
   bool was_stream_delay_set_;
 
   int num_reverse_channels_;
diff --git a/src/modules/audio_processing/echo_cancellation_impl.cc b/src/modules/audio_processing/echo_cancellation_impl.cc
index 61940b1..d4c5523 100644
--- a/src/modules/audio_processing/echo_cancellation_impl.cc
+++ b/src/modules/audio_processing/echo_cancellation_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -32,22 +32,19 @@
       return kAecNlpModerate;
     case EchoCancellation::kHighSuppression:
       return kAecNlpAggressive;
-    default:
-      return -1;
   }
+  assert(false);
+  return -1;
 }
 
-int MapError(int err) {
+AudioProcessing::Error MapError(int err) {
   switch (err) {
     case AEC_UNSUPPORTED_FUNCTION_ERROR:
       return AudioProcessing::kUnsupportedFunctionError;
-      break;
     case AEC_BAD_PARAMETER_ERROR:
       return AudioProcessing::kBadParameterError;
-      break;
     case AEC_BAD_PARAMETER_WARNING:
       return AudioProcessing::kBadStreamParameterWarning;
-      break;
     default:
       // AEC_UNSPECIFIED_ERROR
       // AEC_UNINITIALIZED_ERROR
@@ -163,7 +160,7 @@
 }
 
 int EchoCancellationImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   // Ensure AEC and AECM are not both enabled.
   if (enable && apm_->echo_control_mobile()->is_enabled()) {
     return apm_->kBadParameterError;
@@ -177,7 +174,7 @@
 }
 
 int EchoCancellationImpl::set_suppression_level(SuppressionLevel level) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (MapSetting(level) == -1) {
     return apm_->kBadParameterError;
   }
@@ -192,7 +189,7 @@
 }
 
 int EchoCancellationImpl::enable_drift_compensation(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   drift_compensation_enabled_ = enable;
   return Configure();
 }
@@ -202,7 +199,7 @@
 }
 
 int EchoCancellationImpl::set_device_sample_rate_hz(int rate) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (rate < 8000 || rate > 96000) {
     return apm_->kBadParameterError;
   }
@@ -226,7 +223,7 @@
 }
 
 int EchoCancellationImpl::enable_metrics(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   metrics_enabled_ = enable;
   return Configure();
 }
@@ -238,7 +235,7 @@
 // TODO(ajm): we currently just use the metrics from the first AEC. Think more
 //            aboue the best way to extend this to multi-channel.
 int EchoCancellationImpl::GetMetrics(Metrics* metrics) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (metrics == NULL) {
     return apm_->kNullPointerError;
   }
@@ -285,7 +282,7 @@
 }
 
 int EchoCancellationImpl::enable_delay_logging(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   delay_logging_enabled_ = enable;
   return Configure();
 }
@@ -296,7 +293,7 @@
 
 // TODO(bjornv): How should we handle the multi-channel case?
 int EchoCancellationImpl::GetDelayMetrics(int* median, int* std) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (median == NULL) {
     return apm_->kNullPointerError;
   }
@@ -328,15 +325,6 @@
   return apm_->kNoError;
 }
 
-int EchoCancellationImpl::get_version(char* version,
-                                      int version_len_bytes) const {
-  if (WebRtcAec_get_version(version, version_len_bytes) != 0) {
-      return apm_->kBadParameterError;
-  }
-
-  return apm_->kNoError;
-}
-
 void* EchoCancellationImpl::CreateHandle() const {
   Handle* handle = NULL;
   if (WebRtcAec_Create(&handle) != apm_->kNoError) {
diff --git a/src/modules/audio_processing/echo_cancellation_impl.h b/src/modules/audio_processing/echo_cancellation_impl.h
index a483a3a..3c2198c 100644
--- a/src/modules/audio_processing/echo_cancellation_impl.h
+++ b/src/modules/audio_processing/echo_cancellation_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CANCELLATION_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CANCELLATION_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -34,7 +34,6 @@
 
   // ProcessingComponent implementation.
   virtual int Initialize();
-  virtual int get_version(char* version, int version_len_bytes) const;
 
  private:
   // EchoCancellation implementation.
@@ -73,4 +72,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CANCELLATION_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CANCELLATION_IMPL_H_
diff --git a/src/modules/audio_processing/echo_control_mobile_impl.cc b/src/modules/audio_processing/echo_control_mobile_impl.cc
index ff15255..9427789 100644
--- a/src/modules/audio_processing/echo_control_mobile_impl.cc
+++ b/src/modules/audio_processing/echo_control_mobile_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -36,12 +36,12 @@
       return 3;
     case EchoControlMobile::kLoudSpeakerphone:
       return 4;
-    default:
-      return -1;
   }
+  assert(false);
+  return -1;
 }
 
-int MapError(int err) {
+AudioProcessing::Error MapError(int err) {
   switch (err) {
     case AECM_UNSUPPORTED_FUNCTION_ERROR:
       return AudioProcessing::kUnsupportedFunctionError;
@@ -155,7 +155,7 @@
 }
 
 int EchoControlMobileImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   // Ensure AEC and AECM are not both enabled.
   if (enable && apm_->echo_cancellation()->is_enabled()) {
     return apm_->kBadParameterError;
@@ -169,7 +169,7 @@
 }
 
 int EchoControlMobileImpl::set_routing_mode(RoutingMode mode) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (MapSetting(mode) == -1) {
     return apm_->kBadParameterError;
   }
@@ -184,7 +184,7 @@
 }
 
 int EchoControlMobileImpl::enable_comfort_noise(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   comfort_noise_enabled_ = enable;
   return Configure();
 }
@@ -195,7 +195,7 @@
 
 int EchoControlMobileImpl::SetEchoPath(const void* echo_path,
                                        size_t size_bytes) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (echo_path == NULL) {
     return apm_->kNullPointerError;
   }
@@ -214,7 +214,7 @@
 
 int EchoControlMobileImpl::GetEchoPath(void* echo_path,
                                        size_t size_bytes) const {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (echo_path == NULL) {
     return apm_->kNullPointerError;
   }
@@ -248,15 +248,6 @@
   return ProcessingComponent::Initialize();
 }
 
-int EchoControlMobileImpl::get_version(char* version,
-                                       int version_len_bytes) const {
-  if (WebRtcAecm_get_version(version, version_len_bytes) != 0) {
-    return apm_->kBadParameterError;
-  }
-
-  return apm_->kNoError;
-}
-
 void* EchoControlMobileImpl::CreateHandle() const {
   Handle* handle = NULL;
   if (WebRtcAecm_Create(&handle) != apm_->kNoError) {
diff --git a/src/modules/audio_processing/echo_control_mobile_impl.h b/src/modules/audio_processing/echo_control_mobile_impl.h
index 6314e66..6d9e369 100644
--- a/src/modules/audio_processing/echo_control_mobile_impl.h
+++ b/src/modules/audio_processing/echo_control_mobile_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CONTROL_MOBILE_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CONTROL_MOBILE_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -32,7 +32,6 @@
 
   // ProcessingComponent implementation.
   virtual int Initialize();
-  virtual int get_version(char* version, int version_len_bytes) const;
 
  private:
   // EchoControlMobile implementation.
@@ -59,4 +58,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_ECHO_CONTROL_MOBILE_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_ECHO_CONTROL_MOBILE_IMPL_H_
diff --git a/src/modules/audio_processing/gain_control_impl.cc b/src/modules/audio_processing/gain_control_impl.cc
index dc3e565..a518ab5 100644
--- a/src/modules/audio_processing/gain_control_impl.cc
+++ b/src/modules/audio_processing/gain_control_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -22,34 +22,18 @@
 
 typedef void Handle;
 
-/*template <class T>
-class GainControlHandle : public ComponentHandle<T> {
-  public:
-    GainControlHandle();
-    virtual ~GainControlHandle();
-
-    virtual int Create();
-    virtual T* ptr() const;
-
-  private:
-    T* handle;
-};*/
-
 namespace {
 WebRtc_Word16 MapSetting(GainControl::Mode mode) {
   switch (mode) {
     case GainControl::kAdaptiveAnalog:
       return kAgcModeAdaptiveAnalog;
-      break;
     case GainControl::kAdaptiveDigital:
       return kAgcModeAdaptiveDigital;
-      break;
     case GainControl::kFixedDigital:
       return kAgcModeFixedDigital;
-      break;
-    default:
-      return -1;
   }
+  assert(false);
+  return -1;
 }
 }  // namespace
 
@@ -226,7 +210,7 @@
 }
 
 int GainControlImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   return EnableComponent(enable);
 }
 
@@ -235,7 +219,7 @@
 }
 
 int GainControlImpl::set_mode(Mode mode) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (MapSetting(mode) == -1) {
     return apm_->kBadParameterError;
   }
@@ -250,7 +234,7 @@
 
 int GainControlImpl::set_analog_level_limits(int minimum,
                                              int maximum) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (minimum < 0) {
     return apm_->kBadParameterError;
   }
@@ -282,7 +266,7 @@
 }
 
 int GainControlImpl::set_target_level_dbfs(int level) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (level > 31 || level < 0) {
     return apm_->kBadParameterError;
   }
@@ -296,7 +280,7 @@
 }
 
 int GainControlImpl::set_compression_gain_db(int gain) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (gain < 0 || gain > 90) {
     return apm_->kBadParameterError;
   }
@@ -310,7 +294,7 @@
 }
 
 int GainControlImpl::enable_limiter(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   limiter_enabled_ = enable;
   return Configure();
 }
@@ -333,14 +317,6 @@
   return apm_->kNoError;
 }
 
-int GainControlImpl::get_version(char* version, int version_len_bytes) const {
-  if (WebRtcAgc_Version(version, version_len_bytes) != 0) {
-      return apm_->kBadParameterError;
-  }
-
-  return apm_->kNoError;
-}
-
 void* GainControlImpl::CreateHandle() const {
   Handle* handle = NULL;
   if (WebRtcAgc_Create(&handle) != apm_->kNoError) {
diff --git a/src/modules/audio_processing/gain_control_impl.h b/src/modules/audio_processing/gain_control_impl.h
index 7b6987e..5915eeb 100644
--- a/src/modules/audio_processing/gain_control_impl.h
+++ b/src/modules/audio_processing/gain_control_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_GAIN_CONTROL_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_GAIN_CONTROL_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
 
 #include <vector>
 
@@ -32,7 +32,6 @@
 
   // ProcessingComponent implementation.
   virtual int Initialize();
-  virtual int get_version(char* version, int version_len_bytes) const;
 
   // GainControl implementation.
   virtual bool is_enabled() const;
@@ -77,4 +76,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_GAIN_CONTROL_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_GAIN_CONTROL_IMPL_H_
diff --git a/src/modules/audio_processing/high_pass_filter_impl.cc b/src/modules/audio_processing/high_pass_filter_impl.cc
index fa6d5d5..b20fed8 100644
--- a/src/modules/audio_processing/high_pass_filter_impl.cc
+++ b/src/modules/audio_processing/high_pass_filter_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -135,7 +135,7 @@
 }
 
 int HighPassFilterImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   return EnableComponent(enable);
 }
 
@@ -143,13 +143,6 @@
   return is_component_enabled();
 }
 
-int HighPassFilterImpl::get_version(char* version,
-                                    int version_len_bytes) const {
-  // An empty string is used to indicate no version information.
-  memset(version, 0, version_len_bytes);
-  return apm_->kNoError;
-}
-
 void* HighPassFilterImpl::CreateHandle() const {
   return new FilterState;
 }
diff --git a/src/modules/audio_processing/high_pass_filter_impl.h b/src/modules/audio_processing/high_pass_filter_impl.h
index 4c23754..94a9c89 100644
--- a/src/modules/audio_processing/high_pass_filter_impl.h
+++ b/src/modules/audio_processing/high_pass_filter_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_HIGH_PASS_FILTER_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_HIGH_PASS_FILTER_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -29,9 +29,6 @@
   // HighPassFilter implementation.
   virtual bool is_enabled() const;
 
-  // ProcessingComponent implementation.
-  virtual int get_version(char* version, int version_len_bytes) const;
-
  private:
   // HighPassFilter implementation.
   virtual int Enable(bool enable);
@@ -48,4 +45,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_HIGH_PASS_FILTER_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_HIGH_PASS_FILTER_IMPL_H_
diff --git a/src/modules/audio_processing/include/audio_processing.h b/src/modules/audio_processing/include/audio_processing.h
new file mode 100644
index 0000000..75b3e20
--- /dev/null
+++ b/src/modules/audio_processing/include/audio_processing.h
@@ -0,0 +1,608 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
+
+#include <stddef.h> // size_t
+
+#include "typedefs.h"
+#include "module.h"
+
+namespace webrtc {
+
+class AudioFrame;
+class EchoCancellation;
+class EchoControlMobile;
+class GainControl;
+class HighPassFilter;
+class LevelEstimator;
+class NoiseSuppression;
+class VoiceDetection;
+
+// The Audio Processing Module (APM) provides a collection of voice processing
+// components designed for real-time communications software.
+//
+// APM operates on two audio streams on a frame-by-frame basis. Frames of the
+// primary stream, on which all processing is applied, are passed to
+// |ProcessStream()|. Frames of the reverse direction stream, which are used for
+// analysis by some components, are passed to |AnalyzeReverseStream()|. On the
+// client-side, this will typically be the near-end (capture) and far-end
+// (render) streams, respectively. APM should be placed in the signal chain as
+// close to the audio hardware abstraction layer (HAL) as possible.
+//
+// On the server-side, the reverse stream will normally not be used, with
+// processing occurring on each incoming stream.
+//
+// Component interfaces follow a similar pattern and are accessed through
+// corresponding getters in APM. All components are disabled at create-time,
+// with default settings that are recommended for most situations. New settings
+// can be applied without enabling a component. Enabling a component triggers
+// memory allocation and initialization to allow it to start processing the
+// streams.
+//
+// Thread safety is provided with the following assumptions to reduce locking
+// overhead:
+//   1. The stream getters and setters are called from the same thread as
+//      ProcessStream(). More precisely, stream functions are never called
+//      concurrently with ProcessStream().
+//   2. Parameter getters are never called concurrently with the corresponding
+//      setter.
+//
+// APM accepts only 16-bit linear PCM audio data in frames of 10 ms. Multiple
+// channels should be interleaved.
+//
+// Usage example, omitting error checking:
+// AudioProcessing* apm = AudioProcessing::Create(0);
+// apm->set_sample_rate_hz(32000); // Super-wideband processing.
+//
+// // Mono capture and stereo render.
+// apm->set_num_channels(1, 1);
+// apm->set_num_reverse_channels(2);
+//
+// apm->high_pass_filter()->Enable(true);
+//
+// apm->echo_cancellation()->enable_drift_compensation(false);
+// apm->echo_cancellation()->Enable(true);
+//
+// apm->noise_reduction()->set_level(kHighSuppression);
+// apm->noise_reduction()->Enable(true);
+//
+// apm->gain_control()->set_analog_level_limits(0, 255);
+// apm->gain_control()->set_mode(kAdaptiveAnalog);
+// apm->gain_control()->Enable(true);
+//
+// apm->voice_detection()->Enable(true);
+//
+// // Start a voice call...
+//
+// // ... Render frame arrives bound for the audio HAL ...
+// apm->AnalyzeReverseStream(render_frame);
+//
+// // ... Capture frame arrives from the audio HAL ...
+// // Call required set_stream_ functions.
+// apm->set_stream_delay_ms(delay_ms);
+// apm->gain_control()->set_stream_analog_level(analog_level);
+//
+// apm->ProcessStream(capture_frame);
+//
+// // Call required stream_ functions.
+// analog_level = apm->gain_control()->stream_analog_level();
+// has_voice = apm->stream_has_voice();
+//
+// // Repeate render and capture processing for the duration of the call...
+// // Start a new call...
+// apm->Initialize();
+//
+// // Close the application...
+// AudioProcessing::Destroy(apm);
+// apm = NULL;
+//
+class AudioProcessing : public Module {
+ public:
+  // Creates a APM instance, with identifier |id|. Use one instance for every
+  // primary audio stream requiring processing. On the client-side, this would
+  // typically be one instance for the near-end stream, and additional instances
+  // for each far-end stream which requires processing. On the server-side,
+  // this would typically be one instance for every incoming stream.
+  static AudioProcessing* Create(int id);
+  virtual ~AudioProcessing() {};
+
+  // TODO(andrew): remove this method. We now allow users to delete instances
+  // directly, useful for scoped_ptr.
+  // Destroys a |apm| instance.
+  static void Destroy(AudioProcessing* apm);
+
+  // Initializes internal states, while retaining all user settings. This
+  // should be called before beginning to process a new audio stream. However,
+  // it is not necessary to call before processing the first stream after
+  // creation.
+  virtual int Initialize() = 0;
+
+  // Sets the sample |rate| in Hz for both the primary and reverse audio
+  // streams. 8000, 16000 or 32000 Hz are permitted.
+  virtual int set_sample_rate_hz(int rate) = 0;
+  virtual int sample_rate_hz() const = 0;
+
+  // Sets the number of channels for the primary audio stream. Input frames must
+  // contain a number of channels given by |input_channels|, while output frames
+  // will be returned with number of channels given by |output_channels|.
+  virtual int set_num_channels(int input_channels, int output_channels) = 0;
+  virtual int num_input_channels() const = 0;
+  virtual int num_output_channels() const = 0;
+
+  // Sets the number of channels for the reverse audio stream. Input frames must
+  // contain a number of channels given by |channels|.
+  virtual int set_num_reverse_channels(int channels) = 0;
+  virtual int num_reverse_channels() const = 0;
+
+  // Processes a 10 ms |frame| of the primary audio stream. On the client-side,
+  // this is the near-end (or captured) audio.
+  //
+  // If needed for enabled functionality, any function with the set_stream_ tag
+  // must be called prior to processing the current frame. Any getter function
+  // with the stream_ tag which is needed should be called after processing.
+  //
+  // The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
+  // members of |frame| must be valid, and correspond to settings supplied
+  // to APM.
+  virtual int ProcessStream(AudioFrame* frame) = 0;
+
+  // Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame
+  // will not be modified. On the client-side, this is the far-end (or to be
+  // rendered) audio.
+  //
+  // It is only necessary to provide this if echo processing is enabled, as the
+  // reverse stream forms the echo reference signal. It is recommended, but not
+  // necessary, to provide if gain control is enabled. On the server-side this
+  // typically will not be used. If you're not sure what to pass in here,
+  // chances are you don't need to use it.
+  //
+  // The |sample_rate_hz_|, |num_channels_|, and |samples_per_channel_|
+  // members of |frame| must be valid.
+  //
+  // TODO(ajm): add const to input; requires an implementation fix.
+  virtual int AnalyzeReverseStream(AudioFrame* frame) = 0;
+
+  // This must be called if and only if echo processing is enabled.
+  //
+  // Sets the |delay| in ms between AnalyzeReverseStream() receiving a far-end
+  // frame and ProcessStream() receiving a near-end frame containing the
+  // corresponding echo. On the client-side this can be expressed as
+  //   delay = (t_render - t_analyze) + (t_process - t_capture)
+  // where,
+  //   - t_analyze is the time a frame is passed to AnalyzeReverseStream() and
+  //     t_render is the time the first sample of the same frame is rendered by
+  //     the audio hardware.
+  //   - t_capture is the time the first sample of a frame is captured by the
+  //     audio hardware and t_pull is the time the same frame is passed to
+  //     ProcessStream().
+  virtual int set_stream_delay_ms(int delay) = 0;
+  virtual int stream_delay_ms() const = 0;
+
+  // Sets a delay |offset| in ms to add to the values passed in through
+  // set_stream_delay_ms(). May be positive or negative.
+  //
+  // Note that this could cause an otherwise valid value passed to
+  // set_stream_delay_ms() to return an error.
+  virtual void set_delay_offset_ms(int offset) = 0;
+  virtual int delay_offset_ms() const = 0;
+
+  // Starts recording debugging information to a file specified by |filename|,
+  // a NULL-terminated string. If there is an ongoing recording, the old file
+  // will be closed, and recording will continue in the newly specified file.
+  // An already existing file will be overwritten without warning.
+  static const size_t kMaxFilenameSize = 1024;
+  virtual int StartDebugRecording(const char filename[kMaxFilenameSize]) = 0;
+
+  // Stops recording debugging information, and closes the file. Recording
+  // cannot be resumed in the same file (without overwriting it).
+  virtual int StopDebugRecording() = 0;
+
+  // These provide access to the component interfaces and should never return
+  // NULL. The pointers will be valid for the lifetime of the APM instance.
+  // The memory for these objects is entirely managed internally.
+  virtual EchoCancellation* echo_cancellation() const = 0;
+  virtual EchoControlMobile* echo_control_mobile() const = 0;
+  virtual GainControl* gain_control() const = 0;
+  virtual HighPassFilter* high_pass_filter() const = 0;
+  virtual LevelEstimator* level_estimator() const = 0;
+  virtual NoiseSuppression* noise_suppression() const = 0;
+  virtual VoiceDetection* voice_detection() const = 0;
+
+  struct Statistic {
+    int instant;  // Instantaneous value.
+    int average;  // Long-term average.
+    int maximum;  // Long-term maximum.
+    int minimum;  // Long-term minimum.
+  };
+
+  enum Error {
+    // Fatal errors.
+    kNoError = 0,
+    kUnspecifiedError = -1,
+    kCreationFailedError = -2,
+    kUnsupportedComponentError = -3,
+    kUnsupportedFunctionError = -4,
+    kNullPointerError = -5,
+    kBadParameterError = -6,
+    kBadSampleRateError = -7,
+    kBadDataLengthError = -8,
+    kBadNumberChannelsError = -9,
+    kFileError = -10,
+    kStreamParameterNotSetError = -11,
+    kNotEnabledError = -12,
+
+    // Warnings are non-fatal.
+    // This results when a set_stream_ parameter is out of range. Processing
+    // will continue, but the parameter may have been truncated.
+    kBadStreamParameterWarning = -13
+  };
+
+  // Inherited from Module.
+  virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; };
+  virtual WebRtc_Word32 Process() { return -1; };
+};
+
+// The acoustic echo cancellation (AEC) component provides better performance
+// than AECM but also requires more processing power and is dependent on delay
+// stability and reporting accuracy. As such it is well-suited and recommended
+// for PC and IP phone applications.
+//
+// Not recommended to be enabled on the server-side.
+class EchoCancellation {
+ public:
+  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
+  // Enabling one will disable the other.
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Differences in clock speed on the primary and reverse streams can impact
+  // the AEC performance. On the client-side, this could be seen when different
+  // render and capture devices are used, particularly with webcams.
+  //
+  // This enables a compensation mechanism, and requires that
+  // |set_device_sample_rate_hz()| and |set_stream_drift_samples()| be called.
+  virtual int enable_drift_compensation(bool enable) = 0;
+  virtual bool is_drift_compensation_enabled() const = 0;
+
+  // Provides the sampling rate of the audio devices. It is assumed the render
+  // and capture devices use the same nominal sample rate. Required if and only
+  // if drift compensation is enabled.
+  virtual int set_device_sample_rate_hz(int rate) = 0;
+  virtual int device_sample_rate_hz() const = 0;
+
+  // Sets the difference between the number of samples rendered and captured by
+  // the audio devices since the last call to |ProcessStream()|. Must be called
+  // if and only if drift compensation is enabled, prior to |ProcessStream()|.
+  virtual int set_stream_drift_samples(int drift) = 0;
+  virtual int stream_drift_samples() const = 0;
+
+  enum SuppressionLevel {
+    kLowSuppression,
+    kModerateSuppression,
+    kHighSuppression
+  };
+
+  // Sets the aggressiveness of the suppressor. A higher level trades off
+  // double-talk performance for increased echo suppression.
+  virtual int set_suppression_level(SuppressionLevel level) = 0;
+  virtual SuppressionLevel suppression_level() const = 0;
+
+  // Returns false if the current frame almost certainly contains no echo
+  // and true if it _might_ contain echo.
+  virtual bool stream_has_echo() const = 0;
+
+  // Enables the computation of various echo metrics. These are obtained
+  // through |GetMetrics()|.
+  virtual int enable_metrics(bool enable) = 0;
+  virtual bool are_metrics_enabled() const = 0;
+
+  // Each statistic is reported in dB.
+  // P_far:  Far-end (render) signal power.
+  // P_echo: Near-end (capture) echo signal power.
+  // P_out:  Signal power at the output of the AEC.
+  // P_a:    Internal signal power at the point before the AEC's non-linear
+  //         processor.
+  struct Metrics {
+    // RERL = ERL + ERLE
+    AudioProcessing::Statistic residual_echo_return_loss;
+
+    // ERL = 10log_10(P_far / P_echo)
+    AudioProcessing::Statistic echo_return_loss;
+
+    // ERLE = 10log_10(P_echo / P_out)
+    AudioProcessing::Statistic echo_return_loss_enhancement;
+
+    // (Pre non-linear processing suppression) A_NLP = 10log_10(P_echo / P_a)
+    AudioProcessing::Statistic a_nlp;
+  };
+
+  // TODO(ajm): discuss the metrics update period.
+  virtual int GetMetrics(Metrics* metrics) = 0;
+
+  // Enables computation and logging of delay values. Statistics are obtained
+  // through |GetDelayMetrics()|.
+  virtual int enable_delay_logging(bool enable) = 0;
+  virtual bool is_delay_logging_enabled() const = 0;
+
+  // The delay metrics consists of the delay |median| and the delay standard
+  // deviation |std|. The values are averaged over the time period since the
+  // last call to |GetDelayMetrics()|.
+  virtual int GetDelayMetrics(int* median, int* std) = 0;
+
+ protected:
+  virtual ~EchoCancellation() {};
+};
+
+// The acoustic echo control for mobile (AECM) component is a low complexity
+// robust option intended for use on mobile devices.
+//
+// Not recommended to be enabled on the server-side.
+class EchoControlMobile {
+ public:
+  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
+  // Enabling one will disable the other.
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Recommended settings for particular audio routes. In general, the louder
+  // the echo is expected to be, the higher this value should be set. The
+  // preferred setting may vary from device to device.
+  enum RoutingMode {
+    kQuietEarpieceOrHeadset,
+    kEarpiece,
+    kLoudEarpiece,
+    kSpeakerphone,
+    kLoudSpeakerphone
+  };
+
+  // Sets echo control appropriate for the audio routing |mode| on the device.
+  // It can and should be updated during a call if the audio routing changes.
+  virtual int set_routing_mode(RoutingMode mode) = 0;
+  virtual RoutingMode routing_mode() const = 0;
+
+  // Comfort noise replaces suppressed background noise to maintain a
+  // consistent signal level.
+  virtual int enable_comfort_noise(bool enable) = 0;
+  virtual bool is_comfort_noise_enabled() const = 0;
+
+  // A typical use case is to initialize the component with an echo path from a
+  // previous call. The echo path is retrieved using |GetEchoPath()|, typically
+  // at the end of a call. The data can then be stored for later use as an
+  // initializer before the next call, using |SetEchoPath()|.
+  //
+  // Controlling the echo path this way requires the data |size_bytes| to match
+  // the internal echo path size. This size can be acquired using
+  // |echo_path_size_bytes()|. |SetEchoPath()| causes an entire reset, worth
+  // noting if it is to be called during an ongoing call.
+  //
+  // It is possible that version incompatibilities may result in a stored echo
+  // path of the incorrect size. In this case, the stored path should be
+  // discarded.
+  virtual int SetEchoPath(const void* echo_path, size_t size_bytes) = 0;
+  virtual int GetEchoPath(void* echo_path, size_t size_bytes) const = 0;
+
+  // The returned path size is guaranteed not to change for the lifetime of
+  // the application.
+  static size_t echo_path_size_bytes();
+
+ protected:
+  virtual ~EchoControlMobile() {};
+};
+
+// The automatic gain control (AGC) component brings the signal to an
+// appropriate range. This is done by applying a digital gain directly and, in
+// the analog mode, prescribing an analog gain to be applied at the audio HAL.
+//
+// Recommended to be enabled on the client-side.
+class GainControl {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // When an analog mode is set, this must be called prior to |ProcessStream()|
+  // to pass the current analog level from the audio HAL. Must be within the
+  // range provided to |set_analog_level_limits()|.
+  virtual int set_stream_analog_level(int level) = 0;
+
+  // When an analog mode is set, this should be called after |ProcessStream()|
+  // to obtain the recommended new analog level for the audio HAL. It is the
+  // users responsibility to apply this level.
+  virtual int stream_analog_level() = 0;
+
+  enum Mode {
+    // Adaptive mode intended for use if an analog volume control is available
+    // on the capture device. It will require the user to provide coupling
+    // between the OS mixer controls and AGC through the |stream_analog_level()|
+    // functions.
+    //
+    // It consists of an analog gain prescription for the audio device and a
+    // digital compression stage.
+    kAdaptiveAnalog,
+
+    // Adaptive mode intended for situations in which an analog volume control
+    // is unavailable. It operates in a similar fashion to the adaptive analog
+    // mode, but with scaling instead applied in the digital domain. As with
+    // the analog mode, it additionally uses a digital compression stage.
+    kAdaptiveDigital,
+
+    // Fixed mode which enables only the digital compression stage also used by
+    // the two adaptive modes.
+    //
+    // It is distinguished from the adaptive modes by considering only a
+    // short time-window of the input signal. It applies a fixed gain through
+    // most of the input level range, and compresses (gradually reduces gain
+    // with increasing level) the input signal at higher levels. This mode is
+    // preferred on embedded devices where the capture signal level is
+    // predictable, so that a known gain can be applied.
+    kFixedDigital
+  };
+
+  virtual int set_mode(Mode mode) = 0;
+  virtual Mode mode() const = 0;
+
+  // Sets the target peak |level| (or envelope) of the AGC in dBFs (decibels
+  // from digital full-scale). The convention is to use positive values. For
+  // instance, passing in a value of 3 corresponds to -3 dBFs, or a target
+  // level 3 dB below full-scale. Limited to [0, 31].
+  //
+  // TODO(ajm): use a negative value here instead, if/when VoE will similarly
+  //            update its interface.
+  virtual int set_target_level_dbfs(int level) = 0;
+  virtual int target_level_dbfs() const = 0;
+
+  // Sets the maximum |gain| the digital compression stage may apply, in dB. A
+  // higher number corresponds to greater compression, while a value of 0 will
+  // leave the signal uncompressed. Limited to [0, 90].
+  virtual int set_compression_gain_db(int gain) = 0;
+  virtual int compression_gain_db() const = 0;
+
+  // When enabled, the compression stage will hard limit the signal to the
+  // target level. Otherwise, the signal will be compressed but not limited
+  // above the target level.
+  virtual int enable_limiter(bool enable) = 0;
+  virtual bool is_limiter_enabled() const = 0;
+
+  // Sets the |minimum| and |maximum| analog levels of the audio capture device.
+  // Must be set if and only if an analog mode is used. Limited to [0, 65535].
+  virtual int set_analog_level_limits(int minimum,
+                                      int maximum) = 0;
+  virtual int analog_level_minimum() const = 0;
+  virtual int analog_level_maximum() const = 0;
+
+  // Returns true if the AGC has detected a saturation event (period where the
+  // signal reaches digital full-scale) in the current frame and the analog
+  // level cannot be reduced.
+  //
+  // This could be used as an indicator to reduce or disable analog mic gain at
+  // the audio HAL.
+  virtual bool stream_is_saturated() const = 0;
+
+ protected:
+  virtual ~GainControl() {};
+};
+
+// A filtering component which removes DC offset and low-frequency noise.
+// Recommended to be enabled on the client-side.
+class HighPassFilter {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+ protected:
+  virtual ~HighPassFilter() {};
+};
+
+// An estimation component used to retrieve level metrics.
+class LevelEstimator {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Returns the root mean square (RMS) level in dBFs (decibels from digital
+  // full-scale), or alternately dBov. It is computed over all primary stream
+  // frames since the last call to RMS(). The returned value is positive but
+  // should be interpreted as negative. It is constrained to [0, 127].
+  //
+  // The computation follows:
+  // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-05
+  // with the intent that it can provide the RTP audio level indication.
+  //
+  // Frames passed to ProcessStream() with an |_energy| of zero are considered
+  // to have been muted. The RMS of the frame will be interpreted as -127.
+  virtual int RMS() = 0;
+
+ protected:
+  virtual ~LevelEstimator() {};
+};
+
+// The noise suppression (NS) component attempts to remove noise while
+// retaining speech. Recommended to be enabled on the client-side.
+//
+// Recommended to be enabled on the client-side.
+class NoiseSuppression {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Determines the aggressiveness of the suppression. Increasing the level
+  // will reduce the noise level at the expense of a higher speech distortion.
+  enum Level {
+    kLow,
+    kModerate,
+    kHigh,
+    kVeryHigh
+  };
+
+  virtual int set_level(Level level) = 0;
+  virtual Level level() const = 0;
+
+  // Returns the internally computed prior speech probability of current frame
+  // averaged over output channels. This is not supported in fixed point, for
+  // which |kUnsupportedFunctionError| is returned.
+  virtual float speech_probability() const = 0;
+
+ protected:
+  virtual ~NoiseSuppression() {};
+};
+
+// The voice activity detection (VAD) component analyzes the stream to
+// determine if voice is present. A facility is also provided to pass in an
+// external VAD decision.
+//
+// In addition to |stream_has_voice()| the VAD decision is provided through the
+// |AudioFrame| passed to |ProcessStream()|. The |vad_activity_| member will be
+// modified to reflect the current decision.
+class VoiceDetection {
+ public:
+  virtual int Enable(bool enable) = 0;
+  virtual bool is_enabled() const = 0;
+
+  // Returns true if voice is detected in the current frame. Should be called
+  // after |ProcessStream()|.
+  virtual bool stream_has_voice() const = 0;
+
+  // Some of the APM functionality requires a VAD decision. In the case that
+  // a decision is externally available for the current frame, it can be passed
+  // in here, before |ProcessStream()| is called.
+  //
+  // VoiceDetection does _not_ need to be enabled to use this. If it happens to
+  // be enabled, detection will be skipped for any frame in which an external
+  // VAD decision is provided.
+  virtual int set_stream_has_voice(bool has_voice) = 0;
+
+  // Specifies the likelihood that a frame will be declared to contain voice.
+  // A higher value makes it more likely that speech will not be clipped, at
+  // the expense of more noise being detected as voice.
+  enum Likelihood {
+    kVeryLowLikelihood,
+    kLowLikelihood,
+    kModerateLikelihood,
+    kHighLikelihood
+  };
+
+  virtual int set_likelihood(Likelihood likelihood) = 0;
+  virtual Likelihood likelihood() const = 0;
+
+  // Sets the |size| of the frames in ms on which the VAD will operate. Larger
+  // frames will improve detection accuracy, but reduce the frequency of
+  // updates.
+  //
+  // This does not impact the size of frames passed to |ProcessStream()|.
+  virtual int set_frame_size_ms(int size) = 0;
+  virtual int frame_size_ms() const = 0;
+
+ protected:
+  virtual ~VoiceDetection() {};
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
diff --git a/src/modules/audio_processing/interface/audio_processing.h b/src/modules/audio_processing/interface/audio_processing.h
deleted file mode 100644
index ee4d06f..0000000
--- a/src/modules/audio_processing/interface/audio_processing.h
+++ /dev/null
@@ -1,597 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
-
-#include <stddef.h> // size_t
-
-#include "typedefs.h"
-#include "module.h"
-
-namespace webrtc {
-
-class AudioFrame;
-class EchoCancellation;
-class EchoControlMobile;
-class GainControl;
-class HighPassFilter;
-class LevelEstimator;
-class NoiseSuppression;
-class VoiceDetection;
-
-// The Audio Processing Module (APM) provides a collection of voice processing
-// components designed for real-time communications software.
-//
-// APM operates on two audio streams on a frame-by-frame basis. Frames of the
-// primary stream, on which all processing is applied, are passed to
-// |ProcessStream()|. Frames of the reverse direction stream, which are used for
-// analysis by some components, are passed to |AnalyzeReverseStream()|. On the
-// client-side, this will typically be the near-end (capture) and far-end
-// (render) streams, respectively. APM should be placed in the signal chain as
-// close to the audio hardware abstraction layer (HAL) as possible.
-//
-// On the server-side, the reverse stream will normally not be used, with
-// processing occurring on each incoming stream.
-//
-// Component interfaces follow a similar pattern and are accessed through
-// corresponding getters in APM. All components are disabled at create-time,
-// with default settings that are recommended for most situations. New settings
-// can be applied without enabling a component. Enabling a component triggers
-// memory allocation and initialization to allow it to start processing the
-// streams.
-//
-// Thread safety is provided with the following assumptions to reduce locking
-// overhead:
-//   1. The stream getters and setters are called from the same thread as
-//      ProcessStream(). More precisely, stream functions are never called
-//      concurrently with ProcessStream().
-//   2. Parameter getters are never called concurrently with the corresponding
-//      setter.
-//
-// APM accepts only 16-bit linear PCM audio data in frames of 10 ms. Multiple
-// channels should be interleaved.
-//
-// Usage example, omitting error checking:
-// AudioProcessing* apm = AudioProcessing::Create(0);
-// apm->set_sample_rate_hz(32000); // Super-wideband processing.
-//
-// // Mono capture and stereo render.
-// apm->set_num_channels(1, 1);
-// apm->set_num_reverse_channels(2);
-//
-// apm->high_pass_filter()->Enable(true);
-//
-// apm->echo_cancellation()->enable_drift_compensation(false);
-// apm->echo_cancellation()->Enable(true);
-//
-// apm->noise_reduction()->set_level(kHighSuppression);
-// apm->noise_reduction()->Enable(true);
-//
-// apm->gain_control()->set_analog_level_limits(0, 255);
-// apm->gain_control()->set_mode(kAdaptiveAnalog);
-// apm->gain_control()->Enable(true);
-//
-// apm->voice_detection()->Enable(true);
-//
-// // Start a voice call...
-//
-// // ... Render frame arrives bound for the audio HAL ...
-// apm->AnalyzeReverseStream(render_frame);
-//
-// // ... Capture frame arrives from the audio HAL ...
-// // Call required set_stream_ functions.
-// apm->set_stream_delay_ms(delay_ms);
-// apm->gain_control()->set_stream_analog_level(analog_level);
-//
-// apm->ProcessStream(capture_frame);
-//
-// // Call required stream_ functions.
-// analog_level = apm->gain_control()->stream_analog_level();
-// has_voice = apm->stream_has_voice();
-//
-// // Repeate render and capture processing for the duration of the call...
-// // Start a new call...
-// apm->Initialize();
-//
-// // Close the application...
-// AudioProcessing::Destroy(apm);
-// apm = NULL;
-//
-class AudioProcessing : public Module {
- public:
-  // Creates a APM instance, with identifier |id|. Use one instance for every
-  // primary audio stream requiring processing. On the client-side, this would
-  // typically be one instance for the near-end stream, and additional instances
-  // for each far-end stream which requires processing. On the server-side,
-  // this would typically be one instance for every incoming stream.
-  static AudioProcessing* Create(int id);
-  virtual ~AudioProcessing() {};
-
-  // TODO(andrew): remove this method. We now allow users to delete instances
-  // directly, useful for scoped_ptr.
-  // Destroys a |apm| instance.
-  static void Destroy(AudioProcessing* apm);
-
-  // Initializes internal states, while retaining all user settings. This
-  // should be called before beginning to process a new audio stream. However,
-  // it is not necessary to call before processing the first stream after
-  // creation.
-  virtual int Initialize() = 0;
-
-  // Sets the sample |rate| in Hz for both the primary and reverse audio
-  // streams. 8000, 16000 or 32000 Hz are permitted.
-  virtual int set_sample_rate_hz(int rate) = 0;
-  virtual int sample_rate_hz() const = 0;
-
-  // Sets the number of channels for the primary audio stream. Input frames must
-  // contain a number of channels given by |input_channels|, while output frames
-  // will be returned with number of channels given by |output_channels|.
-  virtual int set_num_channels(int input_channels, int output_channels) = 0;
-  virtual int num_input_channels() const = 0;
-  virtual int num_output_channels() const = 0;
-
-  // Sets the number of channels for the reverse audio stream. Input frames must
-  // contain a number of channels given by |channels|.
-  virtual int set_num_reverse_channels(int channels) = 0;
-  virtual int num_reverse_channels() const = 0;
-
-  // Processes a 10 ms |frame| of the primary audio stream. On the client-side,
-  // this is the near-end (or captured) audio.
-  //
-  // If needed for enabled functionality, any function with the set_stream_ tag
-  // must be called prior to processing the current frame. Any getter function
-  // with the stream_ tag which is needed should be called after processing.
-  //
-  // The |_frequencyInHz|, |_audioChannel|, and |_payloadDataLengthInSamples|
-  // members of |frame| must be valid, and correspond to settings supplied
-  // to APM.
-  virtual int ProcessStream(AudioFrame* frame) = 0;
-
-  // Analyzes a 10 ms |frame| of the reverse direction audio stream. The frame
-  // will not be modified. On the client-side, this is the far-end (or to be
-  // rendered) audio.
-  //
-  // It is only necessary to provide this if echo processing is enabled, as the
-  // reverse stream forms the echo reference signal. It is recommended, but not
-  // necessary, to provide if gain control is enabled. On the server-side this
-  // typically will not be used. If you're not sure what to pass in here,
-  // chances are you don't need to use it.
-  //
-  // The |_frequencyInHz|, |_audioChannel|, and |_payloadDataLengthInSamples|
-  // members of |frame| must be valid.
-  //
-  // TODO(ajm): add const to input; requires an implementation fix.
-  virtual int AnalyzeReverseStream(AudioFrame* frame) = 0;
-
-  // This must be called if and only if echo processing is enabled.
-  //
-  // Sets the |delay| in ms between AnalyzeReverseStream() receiving a far-end
-  // frame and ProcessStream() receiving a near-end frame containing the
-  // corresponding echo. On the client-side this can be expressed as
-  //   delay = (t_render - t_analyze) + (t_process - t_capture)
-  // where,
-  //   - t_analyze is the time a frame is passed to AnalyzeReverseStream() and
-  //     t_render is the time the first sample of the same frame is rendered by
-  //     the audio hardware.
-  //   - t_capture is the time the first sample of a frame is captured by the
-  //     audio hardware and t_pull is the time the same frame is passed to
-  //     ProcessStream().
-  virtual int set_stream_delay_ms(int delay) = 0;
-  virtual int stream_delay_ms() const = 0;
-
-  // Starts recording debugging information to a file specified by |filename|,
-  // a NULL-terminated string. If there is an ongoing recording, the old file
-  // will be closed, and recording will continue in the newly specified file.
-  // An already existing file will be overwritten without warning.
-  static const size_t kMaxFilenameSize = 1024;
-  virtual int StartDebugRecording(const char filename[kMaxFilenameSize]) = 0;
-
-  // Stops recording debugging information, and closes the file. Recording
-  // cannot be resumed in the same file (without overwriting it).
-  virtual int StopDebugRecording() = 0;
-
-  // These provide access to the component interfaces and should never return
-  // NULL. The pointers will be valid for the lifetime of the APM instance.
-  // The memory for these objects is entirely managed internally.
-  virtual EchoCancellation* echo_cancellation() const = 0;
-  virtual EchoControlMobile* echo_control_mobile() const = 0;
-  virtual GainControl* gain_control() const = 0;
-  virtual HighPassFilter* high_pass_filter() const = 0;
-  virtual LevelEstimator* level_estimator() const = 0;
-  virtual NoiseSuppression* noise_suppression() const = 0;
-  virtual VoiceDetection* voice_detection() const = 0;
-
-  struct Statistic {
-    int instant;  // Instantaneous value.
-    int average;  // Long-term average.
-    int maximum;  // Long-term maximum.
-    int minimum;  // Long-term minimum.
-  };
-
-  // Fatal errors.
-  enum Errors {
-    kNoError = 0,
-    kUnspecifiedError = -1,
-    kCreationFailedError = -2,
-    kUnsupportedComponentError = -3,
-    kUnsupportedFunctionError = -4,
-    kNullPointerError = -5,
-    kBadParameterError = -6,
-    kBadSampleRateError = -7,
-    kBadDataLengthError = -8,
-    kBadNumberChannelsError = -9,
-    kFileError = -10,
-    kStreamParameterNotSetError = -11,
-    kNotEnabledError = -12
-  };
-
-  // Warnings are non-fatal.
-  enum Warnings {
-    // This results when a set_stream_ parameter is out of range. Processing
-    // will continue, but the parameter may have been truncated.
-    kBadStreamParameterWarning = -13,
-  };
-
-  // Inherited from Module.
-  virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; };
-  virtual WebRtc_Word32 Process() { return -1; };
-};
-
-// The acoustic echo cancellation (AEC) component provides better performance
-// than AECM but also requires more processing power and is dependent on delay
-// stability and reporting accuracy. As such it is well-suited and recommended
-// for PC and IP phone applications.
-//
-// Not recommended to be enabled on the server-side.
-class EchoCancellation {
- public:
-  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
-  // Enabling one will disable the other.
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // Differences in clock speed on the primary and reverse streams can impact
-  // the AEC performance. On the client-side, this could be seen when different
-  // render and capture devices are used, particularly with webcams.
-  //
-  // This enables a compensation mechanism, and requires that
-  // |set_device_sample_rate_hz()| and |set_stream_drift_samples()| be called.
-  virtual int enable_drift_compensation(bool enable) = 0;
-  virtual bool is_drift_compensation_enabled() const = 0;
-
-  // Provides the sampling rate of the audio devices. It is assumed the render
-  // and capture devices use the same nominal sample rate. Required if and only
-  // if drift compensation is enabled.
-  virtual int set_device_sample_rate_hz(int rate) = 0;
-  virtual int device_sample_rate_hz() const = 0;
-
-  // Sets the difference between the number of samples rendered and captured by
-  // the audio devices since the last call to |ProcessStream()|. Must be called
-  // if and only if drift compensation is enabled, prior to |ProcessStream()|.
-  virtual int set_stream_drift_samples(int drift) = 0;
-  virtual int stream_drift_samples() const = 0;
-
-  enum SuppressionLevel {
-    kLowSuppression,
-    kModerateSuppression,
-    kHighSuppression
-  };
-
-  // Sets the aggressiveness of the suppressor. A higher level trades off
-  // double-talk performance for increased echo suppression.
-  virtual int set_suppression_level(SuppressionLevel level) = 0;
-  virtual SuppressionLevel suppression_level() const = 0;
-
-  // Returns false if the current frame almost certainly contains no echo
-  // and true if it _might_ contain echo.
-  virtual bool stream_has_echo() const = 0;
-
-  // Enables the computation of various echo metrics. These are obtained
-  // through |GetMetrics()|.
-  virtual int enable_metrics(bool enable) = 0;
-  virtual bool are_metrics_enabled() const = 0;
-
-  // Each statistic is reported in dB.
-  // P_far:  Far-end (render) signal power.
-  // P_echo: Near-end (capture) echo signal power.
-  // P_out:  Signal power at the output of the AEC.
-  // P_a:    Internal signal power at the point before the AEC's non-linear
-  //         processor.
-  struct Metrics {
-    // RERL = ERL + ERLE
-    AudioProcessing::Statistic residual_echo_return_loss;
-
-    // ERL = 10log_10(P_far / P_echo)
-    AudioProcessing::Statistic echo_return_loss;
-
-    // ERLE = 10log_10(P_echo / P_out)
-    AudioProcessing::Statistic echo_return_loss_enhancement;
-
-    // (Pre non-linear processing suppression) A_NLP = 10log_10(P_echo / P_a)
-    AudioProcessing::Statistic a_nlp;
-  };
-
-  // TODO(ajm): discuss the metrics update period.
-  virtual int GetMetrics(Metrics* metrics) = 0;
-
-  // Enables computation and logging of delay values. Statistics are obtained
-  // through |GetDelayMetrics()|.
-  virtual int enable_delay_logging(bool enable) = 0;
-  virtual bool is_delay_logging_enabled() const = 0;
-
-  // The delay metrics consists of the delay |median| and the delay standard
-  // deviation |std|. The values are averaged over the time period since the
-  // last call to |GetDelayMetrics()|.
-  virtual int GetDelayMetrics(int* median, int* std) = 0;
-
- protected:
-  virtual ~EchoCancellation() {};
-};
-
-// The acoustic echo control for mobile (AECM) component is a low complexity
-// robust option intended for use on mobile devices.
-//
-// Not recommended to be enabled on the server-side.
-class EchoControlMobile {
- public:
-  // EchoCancellation and EchoControlMobile may not be enabled simultaneously.
-  // Enabling one will disable the other.
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // Recommended settings for particular audio routes. In general, the louder
-  // the echo is expected to be, the higher this value should be set. The
-  // preferred setting may vary from device to device.
-  enum RoutingMode {
-    kQuietEarpieceOrHeadset,
-    kEarpiece,
-    kLoudEarpiece,
-    kSpeakerphone,
-    kLoudSpeakerphone
-  };
-
-  // Sets echo control appropriate for the audio routing |mode| on the device.
-  // It can and should be updated during a call if the audio routing changes.
-  virtual int set_routing_mode(RoutingMode mode) = 0;
-  virtual RoutingMode routing_mode() const = 0;
-
-  // Comfort noise replaces suppressed background noise to maintain a
-  // consistent signal level.
-  virtual int enable_comfort_noise(bool enable) = 0;
-  virtual bool is_comfort_noise_enabled() const = 0;
-
-  // A typical use case is to initialize the component with an echo path from a
-  // previous call. The echo path is retrieved using |GetEchoPath()|, typically
-  // at the end of a call. The data can then be stored for later use as an
-  // initializer before the next call, using |SetEchoPath()|.
-  //
-  // Controlling the echo path this way requires the data |size_bytes| to match
-  // the internal echo path size. This size can be acquired using
-  // |echo_path_size_bytes()|. |SetEchoPath()| causes an entire reset, worth
-  // noting if it is to be called during an ongoing call.
-  //
-  // It is possible that version incompatibilities may result in a stored echo
-  // path of the incorrect size. In this case, the stored path should be
-  // discarded.
-  virtual int SetEchoPath(const void* echo_path, size_t size_bytes) = 0;
-  virtual int GetEchoPath(void* echo_path, size_t size_bytes) const = 0;
-
-  // The returned path size is guaranteed not to change for the lifetime of
-  // the application.
-  static size_t echo_path_size_bytes();
-
- protected:
-  virtual ~EchoControlMobile() {};
-};
-
-// The automatic gain control (AGC) component brings the signal to an
-// appropriate range. This is done by applying a digital gain directly and, in
-// the analog mode, prescribing an analog gain to be applied at the audio HAL.
-//
-// Recommended to be enabled on the client-side.
-class GainControl {
- public:
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // When an analog mode is set, this must be called prior to |ProcessStream()|
-  // to pass the current analog level from the audio HAL. Must be within the
-  // range provided to |set_analog_level_limits()|.
-  virtual int set_stream_analog_level(int level) = 0;
-
-  // When an analog mode is set, this should be called after |ProcessStream()|
-  // to obtain the recommended new analog level for the audio HAL. It is the
-  // users responsibility to apply this level.
-  virtual int stream_analog_level() = 0;
-
-  enum Mode {
-    // Adaptive mode intended for use if an analog volume control is available
-    // on the capture device. It will require the user to provide coupling
-    // between the OS mixer controls and AGC through the |stream_analog_level()|
-    // functions.
-    //
-    // It consists of an analog gain prescription for the audio device and a
-    // digital compression stage.
-    kAdaptiveAnalog,
-
-    // Adaptive mode intended for situations in which an analog volume control
-    // is unavailable. It operates in a similar fashion to the adaptive analog
-    // mode, but with scaling instead applied in the digital domain. As with
-    // the analog mode, it additionally uses a digital compression stage.
-    kAdaptiveDigital,
-
-    // Fixed mode which enables only the digital compression stage also used by
-    // the two adaptive modes.
-    //
-    // It is distinguished from the adaptive modes by considering only a
-    // short time-window of the input signal. It applies a fixed gain through
-    // most of the input level range, and compresses (gradually reduces gain
-    // with increasing level) the input signal at higher levels. This mode is
-    // preferred on embedded devices where the capture signal level is
-    // predictable, so that a known gain can be applied.
-    kFixedDigital
-  };
-
-  virtual int set_mode(Mode mode) = 0;
-  virtual Mode mode() const = 0;
-
-  // Sets the target peak |level| (or envelope) of the AGC in dBFs (decibels
-  // from digital full-scale). The convention is to use positive values. For
-  // instance, passing in a value of 3 corresponds to -3 dBFs, or a target
-  // level 3 dB below full-scale. Limited to [0, 31].
-  //
-  // TODO(ajm): use a negative value here instead, if/when VoE will similarly
-  //            update its interface.
-  virtual int set_target_level_dbfs(int level) = 0;
-  virtual int target_level_dbfs() const = 0;
-
-  // Sets the maximum |gain| the digital compression stage may apply, in dB. A
-  // higher number corresponds to greater compression, while a value of 0 will
-  // leave the signal uncompressed. Limited to [0, 90].
-  virtual int set_compression_gain_db(int gain) = 0;
-  virtual int compression_gain_db() const = 0;
-
-  // When enabled, the compression stage will hard limit the signal to the
-  // target level. Otherwise, the signal will be compressed but not limited
-  // above the target level.
-  virtual int enable_limiter(bool enable) = 0;
-  virtual bool is_limiter_enabled() const = 0;
-
-  // Sets the |minimum| and |maximum| analog levels of the audio capture device.
-  // Must be set if and only if an analog mode is used. Limited to [0, 65535].
-  virtual int set_analog_level_limits(int minimum,
-                                      int maximum) = 0;
-  virtual int analog_level_minimum() const = 0;
-  virtual int analog_level_maximum() const = 0;
-
-  // Returns true if the AGC has detected a saturation event (period where the
-  // signal reaches digital full-scale) in the current frame and the analog
-  // level cannot be reduced.
-  //
-  // This could be used as an indicator to reduce or disable analog mic gain at
-  // the audio HAL.
-  virtual bool stream_is_saturated() const = 0;
-
- protected:
-  virtual ~GainControl() {};
-};
-
-// A filtering component which removes DC offset and low-frequency noise.
-// Recommended to be enabled on the client-side.
-class HighPassFilter {
- public:
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
- protected:
-  virtual ~HighPassFilter() {};
-};
-
-// An estimation component used to retrieve level metrics.
-class LevelEstimator {
- public:
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // Returns the root mean square (RMS) level in dBFs (decibels from digital
-  // full-scale), or alternately dBov. It is computed over all primary stream
-  // frames since the last call to RMS(). The returned value is positive but
-  // should be interpreted as negative. It is constrained to [0, 127].
-  //
-  // The computation follows:
-  // http://tools.ietf.org/html/draft-ietf-avtext-client-to-mixer-audio-level-05
-  // with the intent that it can provide the RTP audio level indication.
-  //
-  // Frames passed to ProcessStream() with an |_energy| of zero are considered
-  // to have been muted. The RMS of the frame will be interpreted as -127.
-  virtual int RMS() = 0;
-
- protected:
-  virtual ~LevelEstimator() {};
-};
-
-// The noise suppression (NS) component attempts to remove noise while
-// retaining speech. Recommended to be enabled on the client-side.
-//
-// Recommended to be enabled on the client-side.
-class NoiseSuppression {
- public:
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // Determines the aggressiveness of the suppression. Increasing the level
-  // will reduce the noise level at the expense of a higher speech distortion.
-  enum Level {
-    kLow,
-    kModerate,
-    kHigh,
-    kVeryHigh
-  };
-
-  virtual int set_level(Level level) = 0;
-  virtual Level level() const = 0;
-
- protected:
-  virtual ~NoiseSuppression() {};
-};
-
-// The voice activity detection (VAD) component analyzes the stream to
-// determine if voice is present. A facility is also provided to pass in an
-// external VAD decision.
-//
-// In addition to |stream_has_voice()| the VAD decision is provided through the
-// |AudioFrame| passed to |ProcessStream()|. The |_vadActivity| member will be
-// modified to reflect the current decision.
-class VoiceDetection {
- public:
-  virtual int Enable(bool enable) = 0;
-  virtual bool is_enabled() const = 0;
-
-  // Returns true if voice is detected in the current frame. Should be called
-  // after |ProcessStream()|.
-  virtual bool stream_has_voice() const = 0;
-
-  // Some of the APM functionality requires a VAD decision. In the case that
-  // a decision is externally available for the current frame, it can be passed
-  // in here, before |ProcessStream()| is called.
-  //
-  // VoiceDetection does _not_ need to be enabled to use this. If it happens to
-  // be enabled, detection will be skipped for any frame in which an external
-  // VAD decision is provided.
-  virtual int set_stream_has_voice(bool has_voice) = 0;
-
-  // Specifies the likelihood that a frame will be declared to contain voice.
-  // A higher value makes it more likely that speech will not be clipped, at
-  // the expense of more noise being detected as voice.
-  enum Likelihood {
-    kVeryLowLikelihood,
-    kLowLikelihood,
-    kModerateLikelihood,
-    kHighLikelihood
-  };
-
-  virtual int set_likelihood(Likelihood likelihood) = 0;
-  virtual Likelihood likelihood() const = 0;
-
-  // Sets the |size| of the frames in ms on which the VAD will operate. Larger
-  // frames will improve detection accuracy, but reduce the frequency of
-  // updates.
-  //
-  // This does not impact the size of frames passed to |ProcessStream()|.
-  virtual int set_frame_size_ms(int size) = 0;
-  virtual int frame_size_ms() const = 0;
-
- protected:
-  virtual ~VoiceDetection() {};
-};
-}  // namespace webrtc
-
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_INTERFACE_AUDIO_PROCESSING_H_
diff --git a/src/modules/audio_processing/level_estimator_impl.cc b/src/modules/audio_processing/level_estimator_impl.cc
index f127d4a..42cac99 100644
--- a/src/modules/audio_processing/level_estimator_impl.cc
+++ b/src/modules/audio_processing/level_estimator_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -113,7 +113,7 @@
 }
 
 int LevelEstimatorImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   return EnableComponent(enable);
 }
 
@@ -130,13 +130,6 @@
   return level->RMS();
 }
 
-int LevelEstimatorImpl::get_version(char* version,
-                                    int version_len_bytes) const {
-  // An empty string is used to indicate no version information.
-  memset(version, 0, version_len_bytes);
-  return apm_->kNoError;
-}
-
 void* LevelEstimatorImpl::CreateHandle() const {
   return new Level;
 }
diff --git a/src/modules/audio_processing/level_estimator_impl.h b/src/modules/audio_processing/level_estimator_impl.h
index c9b7e02..1a06343 100644
--- a/src/modules/audio_processing/level_estimator_impl.h
+++ b/src/modules/audio_processing/level_estimator_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_LEVEL_ESTIMATOR_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_LEVEL_ESTIMATOR_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -29,9 +29,6 @@
   // LevelEstimator implementation.
   virtual bool is_enabled() const;
 
-  // ProcessingComponent implementation.
-  virtual int get_version(char* version, int version_len_bytes) const;
-
  private:
   // LevelEstimator implementation.
   virtual int Enable(bool enable);
@@ -49,4 +46,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_LEVEL_ESTIMATOR_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_LEVEL_ESTIMATOR_IMPL_H_
diff --git a/src/modules/audio_processing/noise_suppression_impl.cc b/src/modules/audio_processing/noise_suppression_impl.cc
index f899f35..d6162e6 100644
--- a/src/modules/audio_processing/noise_suppression_impl.cc
+++ b/src/modules/audio_processing/noise_suppression_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -41,9 +41,9 @@
       return 2;
     case NoiseSuppression::kVeryHigh:
       return 3;
-    default:
-      return -1;
   }
+  assert(false);
+  return -1;
 }
 }  // namespace
 
@@ -88,7 +88,7 @@
 }
 
 int NoiseSuppressionImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   return EnableComponent(enable);
 }
 
@@ -97,7 +97,7 @@
 }
 
 int NoiseSuppressionImpl::set_level(Level level) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (MapSetting(level) == -1) {
     return apm_->kBadParameterError;
   }
@@ -110,18 +110,18 @@
   return level_;
 }
 
-int NoiseSuppressionImpl::get_version(char* version,
-                                      int version_len_bytes) const {
+float NoiseSuppressionImpl::speech_probability() const {
 #if defined(WEBRTC_NS_FLOAT)
-  if (WebRtcNs_get_version(version, version_len_bytes) != 0)
-#elif defined(WEBRTC_NS_FIXED)
-  if (WebRtcNsx_get_version(version, version_len_bytes) != 0)
-#endif
-  {
-      return apm_->kBadParameterError;
+  float probability_average = 0.0f;
+  for (int i = 0; i < num_handles(); i++) {
+    Handle* my_handle = static_cast<Handle*>(handle(i));
+    probability_average += WebRtcNs_prior_speech_probability(my_handle);
   }
-
-  return apm_->kNoError;
+  return probability_average / num_handles();
+#elif defined(WEBRTC_NS_FIXED)
+  // Currently not available for the fixed point implementation.
+  return apm_->kUnsupportedFunctionError;
+#endif
 }
 
 void* NoiseSuppressionImpl::CreateHandle() const {
diff --git a/src/modules/audio_processing/noise_suppression_impl.h b/src/modules/audio_processing/noise_suppression_impl.h
index c9ff9b3..73a2322 100644
--- a/src/modules/audio_processing/noise_suppression_impl.h
+++ b/src/modules/audio_processing/noise_suppression_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_NOISE_SUPPRESSION_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_NOISE_SUPPRESSION_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -28,9 +28,7 @@
 
   // NoiseSuppression implementation.
   virtual bool is_enabled() const;
-
-  // ProcessingComponent implementation.
-  virtual int get_version(char* version, int version_len_bytes) const;
+  float speech_probability() const;
 
  private:
   // NoiseSuppression implementation.
@@ -51,4 +49,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_NOISE_SUPPRESSION_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NOISE_SUPPRESSION_IMPL_H_
diff --git a/src/modules/audio_processing/ns/Android.mk b/src/modules/audio_processing/ns/Android.mk
index 966bcaf..7f06586 100644
--- a/src/modules/audio_processing/ns/Android.mk
+++ b/src/modules/audio_processing/ns/Android.mk
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -36,7 +36,7 @@
 LOCAL_CFLAGS_mips64 := $(MY_WEBRTC_COMMON_DEFS_mips64)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../utility \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include \
@@ -67,8 +67,27 @@
 LOCAL_MODULE := libwebrtc_ns_neon
 LOCAL_MODULE_TAGS := optional
 
+ifeq ($(TARGET_ARCH), arm64)
+# new nsx_core_neon.S does not compile with clang or gas.
 LOCAL_SRC_FILES := nsx_core_neon.c
 
+else
+GEN := $(LOCAL_PATH)/nsx_core_neon_offsets.h
+
+# Generate a header file nsx_core_neon_offsets.h which will be included in
+# assembly file nsx_core_neon.S, from file nsx_core_neon_offsets.c.
+$(GEN): $(LOCAL_PATH)/../../../../src/build/generate_asm_header.py \
+            $(intermediates)/nsx_core_neon_offsets.S
+	@python $^ $@ offset_nsx_
+
+$(intermediates)/nsx_core_neon_offsets.S: $(LOCAL_PATH)/nsx_core_neon_offsets.c
+	@$(TARGET_CC) $(addprefix -I, $(LOCAL_INCLUDES)) $(addprefix -isystem ,\
+            $(TARGET_C_INCLUDES)) -S -o $@ $^
+
+LOCAL_GENERATED_SOURCES := $(GEN)
+LOCAL_SRC_FILES := nsx_core_neon.S
+endif
+
 # Flags passed to both C and C++ files.
 LOCAL_CFLAGS := \
     $(MY_WEBRTC_COMMON_DEFS) \
@@ -78,10 +97,12 @@
 LOCAL_CFLAGS_arm := $(MY_WEBRTC_COMMON_DEFS_arm)
 
 LOCAL_C_INCLUDES := \
-    $(LOCAL_PATH)/interface \
+    $(LOCAL_PATH)/include \
     $(LOCAL_PATH)/../../.. \
     $(LOCAL_PATH)/../../../common_audio/signal_processing/include
 
+LOCAL_INCLUDES := $(LOCAL_C_INCLUDES)
+
 ifdef WEBRTC_STL
 LOCAL_NDK_STL_VARIANT := $(WEBRTC_STL)
 LOCAL_SDK_VERSION := 14
diff --git a/src/modules/audio_processing/ns/include/noise_suppression.h b/src/modules/audio_processing/ns/include/noise_suppression.h
new file mode 100644
index 0000000..c9a8e32
--- /dev/null
+++ b/src/modules/audio_processing/ns/include/noise_suppression.h
@@ -0,0 +1,123 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
+
+#include "typedefs.h"
+
+typedef struct NsHandleT NsHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance to the noise suppression structure
+ *
+ * Input:
+ *      - NS_inst       : Pointer to noise suppression instance that should be
+ *                        created
+ *
+ * Output:
+ *      - NS_inst       : Pointer to created noise suppression instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Create(NsHandle** NS_inst);
+
+
+/*
+ * This function frees the dynamic memory of a specified noise suppression
+ * instance.
+ *
+ * Input:
+ *      - NS_inst       : Pointer to NS instance that should be freed
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Free(NsHandle* NS_inst);
+
+
+/*
+ * This function initializes a NS instance and has to be called before any other
+ * processing is made.
+ *
+ * Input:
+ *      - NS_inst       : Instance that should be initialized
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - NS_inst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - NS_inst       : Noise suppression instance.
+ *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ *      - NS_inst       : Updated instance.
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
+
+
+/*
+ * This functions does Noise Suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ *      - NS_inst       : Noise suppression instance.
+ *      - spframe       : Pointer to speech frame buffer for L band
+ *      - spframe_H     : Pointer to speech frame buffer for H band
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - NS_inst       : Updated NS instance
+ *      - outframe      : Pointer to output frame for L band
+ *      - outframe_H    : Pointer to output frame for H band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+int WebRtcNs_Process(NsHandle* NS_inst,
+                     short* spframe,
+                     short* spframe_H,
+                     short* outframe,
+                     short* outframe_H);
+
+/* Returns the internally used prior speech probability of the current frame.
+ * There is a frequency bin based one as well, with which this should not be
+ * confused.
+ *
+ * Input
+ *      - handle        : Noise suppression instance.
+ *
+ * Return value         : Prior speech probability in interval [0.0, 1.0].
+ *                        -1 - NULL pointer or uninitialized instance.
+ */
+float WebRtcNs_prior_speech_probability(NsHandle* handle);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_H_
diff --git a/src/modules/audio_processing/ns/include/noise_suppression_x.h b/src/modules/audio_processing/ns/include/noise_suppression_x.h
new file mode 100644
index 0000000..b6eef90
--- /dev/null
+++ b/src/modules/audio_processing/ns/include/noise_suppression_x.h
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
+
+#include "typedefs.h"
+
+typedef struct NsxHandleT NsxHandle;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * This function creates an instance to the noise reduction structure
+ *
+ * Input:
+ *      - nsxInst       : Pointer to noise reduction instance that should be
+ *                       created
+ *
+ * Output:
+ *      - nsxInst       : Pointer to created noise reduction instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Create(NsxHandle** nsxInst);
+
+
+/*
+ * This function frees the dynamic memory of a specified Noise Suppression
+ * instance.
+ *
+ * Input:
+ *      - nsxInst       : Pointer to NS instance that should be freed
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Free(NsxHandle* nsxInst);
+
+
+/*
+ * This function initializes a NS instance
+ *
+ * Input:
+ *      - nsxInst       : Instance that should be initialized
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - nsxInst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs);
+
+/*
+ * This changes the aggressiveness of the noise suppression method.
+ *
+ * Input:
+ *      - nsxInst       : Instance that should be initialized
+ *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
+ *
+ * Output:
+ *      - nsxInst       : Initialized instance
+ *
+ * Return value         :  0 - Ok
+ *                        -1 - Error
+ */
+int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode);
+
+/*
+ * This functions does noise suppression for the inserted speech frame. The
+ * input and output signals should always be 10ms (80 or 160 samples).
+ *
+ * Input
+ *      - nsxInst       : NSx instance. Needs to be initiated before call.
+ *      - speechFrame   : Pointer to speech frame buffer for L band
+ *      - speechFrameHB : Pointer to speech frame buffer for H band
+ *      - fs            : sampling frequency
+ *
+ * Output:
+ *      - nsxInst       : Updated NSx instance
+ *      - outFrame      : Pointer to output frame for L band
+ *      - outFrameHB    : Pointer to output frame for H band
+ *
+ * Return value         :  0 - OK
+ *                        -1 - Error
+ */
+int WebRtcNsx_Process(NsxHandle* nsxInst,
+                      short* speechFrame,
+                      short* speechFrameHB,
+                      short* outFrame,
+                      short* outFrameHB);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_NS_INCLUDE_NOISE_SUPPRESSION_X_H_
diff --git a/src/modules/audio_processing/ns/interface/noise_suppression.h b/src/modules/audio_processing/ns/interface/noise_suppression.h
deleted file mode 100644
index 907faf4..0000000
--- a/src/modules/audio_processing/ns/interface/noise_suppression.h
+++ /dev/null
@@ -1,124 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_H_
-
-#include "typedefs.h"
-
-typedef struct NsHandleT NsHandle;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * This function returns the version number of the code.
- *
- * Input:
- *      - version       : Pointer to a character array where the version
- *                        info is stored.
- *      - length        : Length of version.
- *
- * Return value         :  0 - Ok
- *                        -1 - Error (probably length is not sufficient)
- */
-int WebRtcNs_get_version(char* version, short length);
-
-
-/*
- * This function creates an instance to the noise reduction structure
- *
- * Input:
- *      - NS_inst       : Pointer to noise reduction instance that should be
- *                        created
- *
- * Output:
- *      - NS_inst       : Pointer to created noise reduction instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNs_Create(NsHandle** NS_inst);
-
-
-/*
- * This function frees the dynamic memory of a specified Noise Reduction
- * instance.
- *
- * Input:
- *      - NS_inst       : Pointer to NS instance that should be freed
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNs_Free(NsHandle* NS_inst);
-
-
-/*
- * This function initializes a NS instance
- *
- * Input:
- *      - NS_inst       : Instance that should be initialized
- *      - fs            : sampling frequency
- *
- * Output:
- *      - NS_inst       : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNs_Init(NsHandle* NS_inst, WebRtc_UWord32 fs);
-
-/*
- * This changes the aggressiveness of the noise suppression method.
- *
- * Input:
- *      - NS_inst       : Instance that should be initialized
- *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
- *
- * Output:
- *      - NS_inst       : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNs_set_policy(NsHandle* NS_inst, int mode);
-
-
-/*
- * This functions does Noise Suppression for the inserted speech frame. The
- * input and output signals should always be 10ms (80 or 160 samples).
- *
- * Input
- *      - NS_inst       : NS Instance. Needs to be initiated before call.
- *      - spframe       : Pointer to speech frame buffer for L band
- *      - spframe_H     : Pointer to speech frame buffer for H band
- *      - fs            : sampling frequency
- *
- * Output:
- *      - NS_inst       : Updated NS instance
- *      - outframe      : Pointer to output frame for L band
- *      - outframe_H    : Pointer to output frame for H band
- *
- * Return value         :  0 - OK
- *                        -1 - Error
- */
-int WebRtcNs_Process(NsHandle* NS_inst,
-                     short* spframe,
-                     short* spframe_H,
-                     short* outframe,
-                     short* outframe_H);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_H_
diff --git a/src/modules/audio_processing/ns/interface/noise_suppression_x.h b/src/modules/audio_processing/ns/interface/noise_suppression_x.h
deleted file mode 100644
index 14443fa..0000000
--- a/src/modules/audio_processing/ns/interface/noise_suppression_x.h
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_X_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_X_H_
-
-#include "typedefs.h"
-
-typedef struct NsxHandleT NsxHandle;
-
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * This function returns the version number of the code.
- *
- * Input:
- *      - version       : Pointer to a character array where the version
- *                        info is stored.
- *      - length        : Length of version.
- *
- * Return value         :  0 - Ok
- *                        -1 - Error (probably length is not sufficient)
- */
-int WebRtcNsx_get_version(char* version, short length);
-
-
-/*
- * This function creates an instance to the noise reduction structure
- *
- * Input:
- *      - nsxInst       : Pointer to noise reduction instance that should be
- *                       created
- *
- * Output:
- *      - nsxInst       : Pointer to created noise reduction instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNsx_Create(NsxHandle** nsxInst);
-
-
-/*
- * This function frees the dynamic memory of a specified Noise Suppression
- * instance.
- *
- * Input:
- *      - nsxInst       : Pointer to NS instance that should be freed
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNsx_Free(NsxHandle* nsxInst);
-
-
-/*
- * This function initializes a NS instance
- *
- * Input:
- *      - nsxInst       : Instance that should be initialized
- *      - fs            : sampling frequency
- *
- * Output:
- *      - nsxInst       : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNsx_Init(NsxHandle* nsxInst, WebRtc_UWord32 fs);
-
-/*
- * This changes the aggressiveness of the noise suppression method.
- *
- * Input:
- *      - nsxInst       : Instance that should be initialized
- *      - mode          : 0: Mild, 1: Medium , 2: Aggressive
- *
- * Output:
- *      - nsxInst       : Initialized instance
- *
- * Return value         :  0 - Ok
- *                        -1 - Error
- */
-int WebRtcNsx_set_policy(NsxHandle* nsxInst, int mode);
-
-/*
- * This functions does noise suppression for the inserted speech frame. The
- * input and output signals should always be 10ms (80 or 160 samples).
- *
- * Input
- *      - nsxInst       : NSx instance. Needs to be initiated before call.
- *      - speechFrame   : Pointer to speech frame buffer for L band
- *      - speechFrameHB : Pointer to speech frame buffer for H band
- *      - fs            : sampling frequency
- *
- * Output:
- *      - nsxInst       : Updated NSx instance
- *      - outFrame      : Pointer to output frame for L band
- *      - outFrameHB    : Pointer to output frame for H band
- *
- * Return value         :  0 - OK
- *                        -1 - Error
- */
-int WebRtcNsx_Process(NsxHandle* nsxInst,
-                      short* speechFrame,
-                      short* speechFrameHB,
-                      short* outFrame,
-                      short* outFrameHB);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_INTERFACE_NOISE_SUPPRESSION_X_H_
diff --git a/src/modules/audio_processing/ns/noise_suppression.c b/src/modules/audio_processing/ns/noise_suppression.c
index d33caa9..6684b82 100644
--- a/src/modules/audio_processing/ns/noise_suppression.c
+++ b/src/modules/audio_processing/ns/noise_suppression.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -15,23 +15,6 @@
 #include "ns_core.h"
 #include "defines.h"
 
-int WebRtcNs_get_version(char* versionStr, short length) {
-  const char version[] = "NS 2.2.0";
-  const short versionLen = (short)strlen(version) + 1; // +1: null-termination
-
-  if (versionStr == NULL) {
-    return -1;
-  }
-
-  if (versionLen > length) {
-    return -1;
-  }
-
-  strncpy(versionStr, version, versionLen);
-
-  return 0;
-}
-
 int WebRtcNs_Create(NsHandle** NS_inst) {
   *NS_inst = (NsHandle*) malloc(sizeof(NSinst_t));
   if (*NS_inst != NULL) {
@@ -63,3 +46,14 @@
   return WebRtcNs_ProcessCore(
       (NSinst_t*) NS_inst, spframe, spframe_H, outframe, outframe_H);
 }
+
+float WebRtcNs_prior_speech_probability(NsHandle* handle) {
+  NSinst_t* self = (NSinst_t*) handle;
+  if (handle == NULL) {
+    return -1;
+  }
+  if (self->initFlag == 0) {
+    return -1;
+  }
+  return self->priorSpeechProb;
+}
diff --git a/src/modules/audio_processing/ns/noise_suppression_x.c b/src/modules/audio_processing/ns/noise_suppression_x.c
index afdea7b..6d27d0e 100644
--- a/src/modules/audio_processing/ns/noise_suppression_x.c
+++ b/src/modules/audio_processing/ns/noise_suppression_x.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -15,23 +15,6 @@
 #include "nsx_core.h"
 #include "nsx_defines.h"
 
-int WebRtcNsx_get_version(char* versionStr, short length) {
-  const char version[] = "NS\t3.1.0";
-  const short versionLen = (short)strlen(version) + 1; // +1: null-termination
-
-  if (versionStr == NULL) {
-    return -1;
-  }
-
-  if (versionLen > length) {
-    return -1;
-  }
-
-  strncpy(versionStr, version, versionLen);
-
-  return 0;
-}
-
 int WebRtcNsx_Create(NsxHandle** nsxInst) {
   *nsxInst = (NsxHandle*)malloc(sizeof(NsxInst_t));
   if (*nsxInst != NULL) {
diff --git a/src/modules/audio_processing/ns/ns.gypi b/src/modules/audio_processing/ns/ns.gypi
index 3e3d2e1..940e330 100644
--- a/src/modules/audio_processing/ns/ns.gypi
+++ b/src/modules/audio_processing/ns/ns.gypi
@@ -1,4 +1,4 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
 #
 # Use of this source code is governed by a BSD-style license
 # that can be found in the LICENSE file in the root of the source
@@ -16,15 +16,15 @@
         'apm_util'
       ],
       'include_dirs': [
-        'interface',
+        'include',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
         ],
       },
       'sources': [
-        'interface/noise_suppression.h',
+        'include/noise_suppression.h',
         'noise_suppression.c',
         'windows_private.h',
         'defines.h',
@@ -40,20 +40,44 @@
         '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
       ],
       'include_dirs': [
-        'interface',
+        'include',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
-          'interface',
+          'include',
         ],
       },
       'sources': [
-        'interface/noise_suppression_x.h',
+        'include/noise_suppression_x.h',
         'noise_suppression_x.c',
         'nsx_defines.h',
         'nsx_core.c',
         'nsx_core.h',
       ],
+      'conditions': [
+        ['target_arch=="arm" and armv7==1', {
+          'dependencies': [ 'ns_neon', ],
+        }],
+      ],
     },
   ],
+  'conditions': [
+    ['target_arch=="arm" and armv7==1', {
+      'targets': [
+        {
+          'target_name': 'ns_neon',
+          'type': '<(library)',
+          'includes': [ '../../../build/arm_neon.gypi', ],
+          'dependencies': [
+            '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+          ],
+          'sources': [
+            'nsx_core_neon.c',
+          ],
+        },
+      ],
+    }],
+  ],
 }
+
+
diff --git a/src/modules/audio_processing/ns/ns_core.c b/src/modules/audio_processing/ns/ns_core.c
index e80f699..2e8cedd 100644
--- a/src/modules/audio_processing/ns/ns_core.c
+++ b/src/modules/audio_processing/ns/ns_core.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -737,7 +737,8 @@
   float   magn[HALF_ANAL_BLOCKL], noise[HALF_ANAL_BLOCKL];
   float   theFilter[HALF_ANAL_BLOCKL], theFilterTmp[HALF_ANAL_BLOCKL];
   float   snrLocPost[HALF_ANAL_BLOCKL], snrLocPrior[HALF_ANAL_BLOCKL];
-  float   probSpeechFinal[HALF_ANAL_BLOCKL], previousEstimateStsa[HALF_ANAL_BLOCKL];
+  float   probSpeechFinal[HALF_ANAL_BLOCKL] = { 0 };
+  float   previousEstimateStsa[HALF_ANAL_BLOCKL];
   float   real[ANAL_BLOCKL_MAX], imag[HALF_ANAL_BLOCKL];
   // Variables during startup
   float   sum_log_i = 0.0;
@@ -1254,31 +1255,29 @@
     for (i = 0; i < inst->magnLen; i++) {
       inst->speechProbHB[i] = probSpeechFinal[i];
     }
-    if (inst->blockInd > END_STARTUP_LONG) {
-      // average speech prob from low band
-      // avg over second half (i.e., 4->8kHz) of freq. spectrum
-      avgProbSpeechHB = 0.0;
-      for (i = inst->magnLen - deltaBweHB - 1; i < inst->magnLen - 1; i++) {
-        avgProbSpeechHB += inst->speechProbHB[i];
-      }
-      avgProbSpeechHB = avgProbSpeechHB / ((float)deltaBweHB);
-      // average filter gain from low band
-      // average over second half (i.e., 4->8kHz) of freq. spectrum
-      avgFilterGainHB = 0.0;
-      for (i = inst->magnLen - deltaGainHB - 1; i < inst->magnLen - 1; i++) {
-        avgFilterGainHB += inst->smooth[i];
-      }
-      avgFilterGainHB = avgFilterGainHB / ((float)(deltaGainHB));
-      avgProbSpeechHBTmp = (float)2.0 * avgProbSpeechHB - (float)1.0;
-      // gain based on speech prob:
-      gainModHB = (float)0.5 * ((float)1.0 + (float)tanh(gainMapParHB * avgProbSpeechHBTmp));
-      //combine gain with low band gain
-      gainTimeDomainHB = (float)0.5 * gainModHB + (float)0.5 * avgFilterGainHB;
-      if (avgProbSpeechHB >= (float)0.5) {
-        gainTimeDomainHB = (float)0.25 * gainModHB + (float)0.75 * avgFilterGainHB;
-      }
-      gainTimeDomainHB = gainTimeDomainHB * decayBweHB;
-    } // end of converged
+    // average speech prob from low band
+    // avg over second half (i.e., 4->8kHz) of freq. spectrum
+    avgProbSpeechHB = 0.0;
+    for (i = inst->magnLen - deltaBweHB - 1; i < inst->magnLen - 1; i++) {
+      avgProbSpeechHB += inst->speechProbHB[i];
+    }
+    avgProbSpeechHB = avgProbSpeechHB / ((float)deltaBweHB);
+    // average filter gain from low band
+    // average over second half (i.e., 4->8kHz) of freq. spectrum
+    avgFilterGainHB = 0.0;
+    for (i = inst->magnLen - deltaGainHB - 1; i < inst->magnLen - 1; i++) {
+      avgFilterGainHB += inst->smooth[i];
+    }
+    avgFilterGainHB = avgFilterGainHB / ((float)(deltaGainHB));
+    avgProbSpeechHBTmp = (float)2.0 * avgProbSpeechHB - (float)1.0;
+    // gain based on speech prob:
+    gainModHB = (float)0.5 * ((float)1.0 + (float)tanh(gainMapParHB * avgProbSpeechHBTmp));
+    //combine gain with low band gain
+    gainTimeDomainHB = (float)0.5 * gainModHB + (float)0.5 * avgFilterGainHB;
+    if (avgProbSpeechHB >= (float)0.5) {
+      gainTimeDomainHB = (float)0.25 * gainModHB + (float)0.75 * avgFilterGainHB;
+    }
+    gainTimeDomainHB = gainTimeDomainHB * decayBweHB;
     //make sure gain is within flooring range
     // flooring bottom
     if (gainTimeDomainHB < inst->denoiseBound) {
diff --git a/src/modules/audio_processing/ns/nsx_core.c b/src/modules/audio_processing/ns/nsx_core.c
index 51bde0c..214b807 100644
--- a/src/modules/audio_processing/ns/nsx_core.c
+++ b/src/modules/audio_processing/ns/nsx_core.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -435,6 +435,18 @@
 Denormalize WebRtcNsx_Denormalize;
 CreateComplexBuffer WebRtcNsx_CreateComplexBuffer;
 
+#if (defined WEBRTC_DETECT_ARM_NEON || defined WEBRTC_ARCH_ARM_NEON)
+// Initialize function pointers for ARM Neon platform.
+static void WebRtcNsx_InitNeon(void) {
+  WebRtcNsx_NoiseEstimation = WebRtcNsx_NoiseEstimationNeon;
+  WebRtcNsx_PrepareSpectrum = WebRtcNsx_PrepareSpectrumNeon;
+  WebRtcNsx_SynthesisUpdate = WebRtcNsx_SynthesisUpdateNeon;
+  WebRtcNsx_AnalysisUpdate = WebRtcNsx_AnalysisUpdateNeon;
+  WebRtcNsx_Denormalize = WebRtcNsx_DenormalizeNeon;
+  WebRtcNsx_CreateComplexBuffer = WebRtcNsx_CreateComplexBufferNeon;
+}
+#endif
+
 // Update the noise estimation information.
 static void UpdateNoiseEstimate(NsxInst_t* inst, int offset) {
   WebRtc_Word32 tmp32no1 = 0;
@@ -1881,13 +1893,19 @@
   int q_domain_to_use = 0;
 
   // Code for ARMv7-Neon platform assumes the following:
+  assert(inst->anaLen > 0);
+  assert(inst->anaLen2 > 0);
   assert(inst->anaLen % 16 == 0);
   assert(inst->anaLen2 % 8 == 0);
+  assert(inst->blockLen10ms > 0);
   assert(inst->blockLen10ms % 16 == 0);
   assert(inst->magnLen == inst->anaLen2 + 1);
 
 #ifdef NS_FILEDEBUG
-  fwrite(spframe, sizeof(short), inst->blockLen10ms, inst->infile);
+  if (fwrite(spframe, sizeof(short),
+             inst->blockLen10ms, inst->infile) != inst->blockLen10ms) {
+    return -1;
+  }
 #endif
 
   // Check that initialization has been done
@@ -2364,7 +2382,10 @@
 
   WebRtcNsx_DataSynthesis(inst, outFrame);
 #ifdef NS_FILEDEBUG
-  fwrite(outframe, sizeof(short), inst->blockLen10ms, inst->outfile);
+  if (fwrite(outframe, sizeof(short),
+             inst->blockLen10ms, inst->outfile) != inst->blockLen10ms) {
+    return -1;
+  }
 #endif
 
   //for H band:
@@ -2440,5 +2461,3 @@
 
   return 0;
 }
-
-
diff --git a/src/modules/audio_processing/ns/nsx_core.h b/src/modules/audio_processing/ns/nsx_core.h
index 0a0faf9..4740c06 100644
--- a/src/modules/audio_processing/ns/nsx_core.h
+++ b/src/modules/audio_processing/ns/nsx_core.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -206,10 +206,26 @@
                                     int16_t* out);
 extern CreateComplexBuffer WebRtcNsx_CreateComplexBuffer;
 
-/****************************************************************************
- * Initialization of the above function pointers for ARM Neon.
- */
-void WebRtcNsx_InitNeon(void);
+#if (defined WEBRTC_DETECT_ARM_NEON) || defined (WEBRTC_ARCH_ARM_NEON)
+// For the above function pointers, functions for generic platforms are declared
+// and defined as static in file nsx_core.c, while those for ARM Neon platforms
+// are declared below and defined in file nsx_core_neon.S.
+void WebRtcNsx_NoiseEstimationNeon(NsxInst_t* inst,
+                                   uint16_t* magn,
+                                   uint32_t* noise,
+                                   int16_t* q_noise);
+void WebRtcNsx_CreateComplexBufferNeon(NsxInst_t* inst,
+                                       int16_t* in,
+                                       int16_t* out);
+void WebRtcNsx_SynthesisUpdateNeon(NsxInst_t* inst,
+                                   int16_t* out_frame,
+                                   int16_t gain_factor);
+void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
+                                  int16_t* out,
+                                  int16_t* new_speech);
+void WebRtcNsx_DenormalizeNeon(NsxInst_t* inst, int16_t* in, int factor);
+void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buff);
+#endif
 
 extern const WebRtc_Word16 WebRtcNsx_kLogTable[9];
 extern const WebRtc_Word16 WebRtcNsx_kLogTableFrac[256];
diff --git a/src/modules/audio_processing/ns/nsx_core_neon.S b/src/modules/audio_processing/ns/nsx_core_neon.S
new file mode 100644
index 0000000..31eea06
--- /dev/null
+++ b/src/modules/audio_processing/ns/nsx_core_neon.S
@@ -0,0 +1,682 @@
+@
+@ Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+@
+@ Use of this source code is governed by a BSD-style license
+@ that can be found in the LICENSE file in the root of the source
+@ tree. An additional intellectual property rights grant can be found
+@ in the file PATENTS.  All contributing project authors may
+@ be found in the AUTHORS file in the root of the source tree.
+@
+
+@ nsx_core_neon.s
+@ This file contains some functions in NS, optimized for ARM Neon
+@ platforms. Reference C code is in file nsx_core.c. Bit-exact.
+
+.arch armv7-a
+.fpu neon
+
+#include "nsx_defines.h"
+#include "nsx_core_neon_offsets.h"
+
+.global WebRtcNsx_NoiseEstimationNeon
+.global WebRtcNsx_PrepareSpectrumNeon
+.global WebRtcNsx_SynthesisUpdateNeon
+.global WebRtcNsx_AnalysisUpdateNeon
+.global WebRtcNsx_DenormalizeNeon
+.global WebRtcNsx_CreateComplexBufferNeon
+
+@ void NoiseEstimationNeon(NsxInst_t* inst,
+@                          uint16_t* magn,
+@                          uint32_t* noise,
+@                          int16_t* q_noise);
+
+@ Register usage (across major loops of NoiseEstimationNeon()):
+@ r0-r3: function arguments, and scratch registers.
+@ r4: &inst
+@ r5: &noiseEstLogQuantile[]
+@ r6: inst->magnLen
+@ r7: offset
+@ r8: s, the loop counter for the LOOP_SIMULT
+@ r9: &inst->noiseEstDensity[]
+@ r10: &inst->noiseEstCounter[]
+@ r11: countDiv
+@ r12: i, the loop counter for LOOP_NOISEESTIMATION_MAGNLEN_INNER
+
+WebRtcNsx_NoiseEstimationNeon:
+.fnstart
+.save {r4-r11, r14}
+.vsave {d8-d15}
+.pad #(16 + (HALF_ANAL_BLOCKL + 3) / 4 * 8)
+
+  push {r4-r11, r14}
+  vpush {d8-d15}
+  sub sp, #(16 + (HALF_ANAL_BLOCKL + 3) / 4 * 8)
+
+@ [sp, #0]: logval
+@ [sp, #4]: noise
+@ [sp, #8]: q_noise
+@ [sp, #12]: factor
+@ [sp, #16 ~ #(16 + (HALF_ANAL_BLOCKL + 3) / 4 * 8)]: lmagn[HALF_ANAL_BLOCKL]
+
+  str r2, [sp, #4]            @ noise
+  str r3, [sp, #8]            @ q_noise
+  movw r4, #offset_nsx_normData
+  ldr r2, [r0, #offset_nsx_stages]            @ inst->stages
+  ldr r4, [r0, r4]            @ inst->normData
+  ldr r12, =WebRtcNsx_kLogTable
+  subs r3, r2, r4             @ tabind = inst->stages - inst->normData;
+  ldr r5, [r0, #offset_nsx_magnLen]            @ magnLen
+  rsblt r3, #0
+  lsl r3, #1
+  ldrh r3, [r12, r3]          @ logval = WebRtcNsx_kLogTable[tabind];
+  add r12, sp, #16            @ lmagn[]
+  rsblt r3, #0                @ logval = -WebRtcNsx_kLogTable[-tabind];
+  str r3, [sp]
+  vdup.16 q15, r3
+
+  ldr r9, =WebRtcNsx_kLogTableFrac
+
+LOOP_SET_LMAGN:
+  ldrh r2, [r1], #2           @ magn[i]
+  cmp r2, #0
+  streqh r3, [r12], #2        @ lmagn[i] = logval;
+  beq CHECK_LMAGN_COUNTER
+
+  clz r6, r2
+  mov r4, r6                  @ zeros
+  rsb r6, #31
+  lsl r2, r4
+  ubfx r4, r2, #23, #8
+  mov r2, r4, lsl #1
+  ldrh r4, [r9, r2]           @ WebRtcNsx_kLogTableFrac[frac]
+  add r7, r4, r6, lsl #8      @ log2
+  movw r2, #22713             @ log2_const
+  smulbb r2, r7, r2
+  add r2, r3, r2, lsr #15
+  strh r2, [r12], #2          @ lmagn[i]
+
+CHECK_LMAGN_COUNTER:
+  subs r5, #1
+  bgt LOOP_SET_LMAGN
+
+  movw r3, #21845             @ width_factor
+  vdup.16 q5, r3
+  vmov.s16 q14, #WIDTH_Q8
+
+  movw r5, #offset_nsx_noiseEstLogQuantile
+  movw r7, #offset_nsx_blockIndex
+  movw r9, #offset_nsx_noiseEstDensity
+  add r5, r0
+  ldr r6, [r0, #offset_nsx_magnLen]
+  ldr r7, [r0, r7]
+  add r9, r0
+  cmp r7, #END_STARTUP_LONG
+  add r10, r0, #offset_nsx_noiseEstCounter
+  movge r7, #FACTOR_Q7
+  movlt r7, #FACTOR_Q7_STARTUP
+  mov r4, r0
+  str r7, [sp, #12]           @ factor
+  mov r8, #SIMULT
+  mov r7, #0
+
+LOOP_SIMULT:
+  ldrsh r1, [r10]             @ inst->noiseEstCounter[s]
+  ldr r3, =WebRtcNsx_kCounterDiv
+  mov r11, r1, lsl #1         @ counter
+  ldrh r11, [r3, r11]         @ countDiv = WebRtcNsx_kCounterDiv[counter];
+  sub r12, r6, #1             @ Loop counter.
+  smulbb r3, r1, r11          @ countProd
+  vdup.16 q11, r11
+
+  vqrdmulh.s16 q11, q5, q11   @ WEBRTC_SPL_MUL_16_16_RSFT_WITH_ROUND(
+                              @   width_factor, countDiv, 15);
+  vdup.16 d24, r11
+  vdup.16 d25, r3
+
+  ldr r3, [sp, #12]           @ factor
+  add r1, sp, #16             @ &lmagn[0]
+  vdup.16 q9, r3
+  vmov.i16 q13, #512
+  vmov.i16 q7, #15
+  vmov.i32 q6, #FACTOR_Q16
+
+LOOP_NOISEESTIMATION_MAGNLEN_INNER:
+  vld1.16 {q0}, [r9]          @ noiseEstDensity[offset + i]
+
+  @ Compute delta in the next two blocks.
+  vclz.i16 q4, q0
+  vsub.i16 q4, q4, q7         @ Value of the shift factors; likely negative.
+  vmovl.s16 q3, d8
+  vmovl.s16 q2, d9
+
+  vshl.s32 q1, q6, q3
+  vmovn.i32 d8, q1            @ d8 holds shifted FACTOR_Q16.
+  vshl.s32 q1, q6, q2
+  vcgt.s16 q3, q0, q13        @ Compare noiseEstDensity to 512.
+  vmovn.i32 d9, q1            @ d9 holds shifted FACTOR_Q16.
+  vmov.i16 q1, q9
+  vbit.s16 q1, q4, q3         @ If bigger than 512, delta = shifted FACTOR_Q16.
+
+  vmull.s16 q8, d3, d24
+  vmull.s16 q4, d2, d24
+  vshrn.i32 d2, q4, #14
+  vshrn.i32 d3, q8, #14
+
+  vrshr.s16 q3, q1, #1
+  vrshr.s16 q8, q1, #2
+  vmull.s16 q4, d7, d28
+  vmull.s16 q3, d6, d28
+  vld1.16 {q10}, [r5]         @ inst->noiseEstLogQuantile[offset + i]
+  vshrn.i32 d4, q3, #1
+  vshrn.i32 d5, q4, #1
+
+  vld1.16 {q3}, [r1]!         @ lmagn[i]
+  vsub.i16 q4, q10, q2
+  vadd.i16 q8, q10, q8
+  vsub.i16 q2, q3, q10
+  vmax.s16 q4, q4, q15
+  vcgt.s16 q1, q2, #0
+  vbit q10, q8, q1
+  vbif q10, q4, q1
+
+  vsub.i16 q1, q3, q10
+  vst1.16 {q10}, [r5]!        @ inst->noiseEstLogQuantile[offset + i]
+  vabs.s16 q4, q1
+  vqrdmulh.s16 d2, d0, d25
+  vqrdmulh.s16 d3, d1, d25
+  vcgt.s16 q4, q14, q4
+  vadd.i16 q1, q1, q11
+  vbit q0, q1, q4
+  subs r12, #8
+  vst1.16 {q0}, [r9]!         @ noiseEstDensity[offset + i]
+  bgt LOOP_NOISEESTIMATION_MAGNLEN_INNER
+
+@
+@ Last iteration over magnitude spectrum.
+@
+
+COMPUTE_DELTA:
+  ldrsh r2, [r9]              @ inst->noiseEstDensity[offset + i]
+  cmp r2, #512
+  bgt COMPUTE_DELTA_BIGGER_DENSITY
+
+  movw r2, #offset_nsx_blockIndex
+  ldr r0, [r4, r2]
+  cmp r0, #END_STARTUP_LONG
+  movge r0, #FACTOR_Q7          @ delta
+  movlt r0, #FACTOR_Q7_STARTUP  @ delta
+  b UPDATE_LOG_QUANTILE_ESTIMATE
+
+COMPUTE_DELTA_BIGGER_DENSITY:
+  clz r2, r2
+  rsb r0, r2, #31             @ 14 - factor
+  mov r2, #FACTOR_Q16
+  mov r0, r2, lsr r0          @ FACTOR_Q16 >> (14 - factor)
+
+UPDATE_LOG_QUANTILE_ESTIMATE:
+  smulbb r12, r0, r11
+  ldrsh r1, [r1]              @ lmagn[i]
+  ubfx r12, r12, #14, #16     @ tmp16
+  ldrsh r2, [r5]              @ inst->noiseEstLogQuantile[offset + i]
+  cmp r1, r2
+  bgt UPDATE_LOG_QUANTILE_ESTIMATE_BIGGER_LMAGN
+
+  add r12, #1
+  ldr r3, [sp]                @ logval
+  mov r0, r12, lsr #1         @ tmp16no1
+  mov r12, #3
+  smulbb r12, r0, r12         @ tmp16no2
+  sub r2, r12, lsr #1
+  cmp r3, r2
+  ldrgt r2, [sp]
+  ldrgt r3, [sp]
+  b UPDATE_LOG_QUANTILE_ESTIMATE_STORE
+
+UPDATE_LOG_QUANTILE_ESTIMATE_BIGGER_LMAGN:
+  add r3, r12, #2
+  add r2, r3, lsr #2
+
+UPDATE_LOG_QUANTILE_ESTIMATE_STORE:
+  vmov.s16 r0, d25[0]         @ countProd
+  strh r2, [r5]
+  add r5, #2                  @ increment &noiseEstLogQuantile[offset + i]
+
+UPDATE_DENSITY_ESTIMATE:
+  subs r12, r1, r2
+  rsblt r12, #0
+  cmp r12, #WIDTH_Q8
+  bge UPDATE_DENSITY_ESTIMATE_CHECK_COUNTER
+
+  movw r3, #21845             @ width_factor
+  ldrh r12, [r9]              @ inst->noiseEstDensity[offset + i]
+  smulbb r2, r3, r11
+  smulbb r1, r12, r0
+  add r0, r2, #1 << 14        @ Rounding
+  add r12, r1, #1 << 14
+  mov r1, r12, lsr #15
+  add r3, r1, r0, lsr #15
+  strh r3, [r9]               @ inst->noiseEstDensity[offset + i]
+
+UPDATE_DENSITY_ESTIMATE_CHECK_COUNTER:
+  add r9, #2                  @ updata &noiseEstDensity[offset + i]
+  ldrsh r3, [r10]             @ inst->noiseEstCounter[s]
+  cmp r3, #END_STARTUP_LONG
+  blt POST_UPDATE_DENSITY_ESTIMATE
+
+  movw r2, #offset_nsx_blockIndex
+  mov r12, #0
+  ldr r2, [r4, r2]
+  strh r12, [r10]
+  cmp r2, #END_STARTUP_LONG
+  blt POST_UPDATE_DENSITY_ESTIMATE
+
+  mov r0, r4
+  mov r1, r7
+  bl UpdateNoiseEstimateNeon
+
+POST_UPDATE_DENSITY_ESTIMATE:
+  ldrh r3, [r10]
+  add r3, #1
+  strh r3, [r10], #2
+  subs r8, #1
+  add r7, r6                  @ offset += inst->magnLen;
+  bgt LOOP_SIMULT
+
+  movw r2, #offset_nsx_blockIndex
+  ldr r2, [r4, r2]
+  cmp r2, #END_STARTUP_LONG
+  bge UPDATE_NOISE
+
+  sub r1, r7, r6
+  mov r0, r4
+  bl UpdateNoiseEstimateNeon
+
+UPDATE_NOISE:
+  movw r1, #offset_nsx_noiseEstQuantile
+  add r1, r4
+  ldr r2, [sp, #4]
+
+@ Initial value of loop counter r6 = inst->magnLen.
+LOOP_UPDATE_NOISE:
+  ldrsh r0, [r1], #2
+  subs r6, #1
+  str r0, [r2], #4
+  bgt LOOP_UPDATE_NOISE
+
+UPDATE_Q_NOISE:
+  movw r2, #offset_nsx_qNoise
+  ldr r1, [sp, #8]
+  ldrh r2, [r4, r2]
+  strh r2, [r1]
+
+  add sp, #(16 + (HALF_ANAL_BLOCKL + 3) / 4 * 8)
+  vpop {d8-d15}
+  pop {r4-r11, pc}
+.fnend
+
+@ static void UpdateNoiseEstimateNeon(NsxInst_t* inst, int offset);
+@ Neon registers touched: q0-q3, q8-q13.
+UpdateNoiseEstimateNeon:
+.fnstart
+.save {r4, r5, r6, r14}
+
+  push {r4, r5, r6, r14}
+  mov r5, r0
+
+  vmov.i32 q10, #21
+  vmov.i32 q11, #0x1FFFFF
+  vmov.i32 q9, #0x200000
+
+  movw r0, #offset_nsx_noiseEstLogQuantile
+  movw r6, #offset_nsx_magnLen
+  add r0, r5                  @ &inst->noiseEstLogQuantile
+  add r4, r0, r1, lsl #1      @ &inst->noiseEstLogQuantile[offset]
+  ldrsh r6, [r5, r6]          @ &inst->magnLen
+
+  mov r0, r4
+  mov r1, r6
+  bl WebRtcSpl_MaxValueW16
+
+  sub r12, r6, #1             @ Loop counter: inst->magnLen - 1.
+
+  movw r6, #11819             @ kExp2Const in Q13
+  movw r2, #offset_nsx_noiseEstQuantile
+  vdup.16 d16, r6
+  smulbb r3, r6, r0
+  add r0, r3, #1 << 20        @ Round
+  movw r1, #offset_nsx_qNoise
+  mov r0, r0, lsr #21
+  rsb r0, r0, #14             @ 14 - (round(kExp2Const * tmp16) >> 21)
+  add r2, r5                  @ &inst->noiseEstQuantile
+  vdup.32 q13, r0
+  str r0, [r5, r1]
+
+
+LOOP_UPDATE:
+  vld1.16 {d0, d1}, [r4]!     @ &inst->noiseEstLogQuantile[offset + i]
+  vmull.s16 q1, d0, d16
+  vmull.s16 q0, d1, d16
+  vshr.s32 q3, q1, #21
+  vshr.s32 q2, q0, #21
+  vand q1, q1, q11
+  vand q0, q0, q11
+  vsub.i32 q3, q3, q10
+  vsub.i32 q2, q2, q10
+  vorr q1, q1, q9
+  vorr q0, q0, q9
+  vadd.i32 q3, q3, q13
+  vadd.i32 q2, q2, q13
+  vshl.s32 q1, q1, q3
+  vshl.s32 q0, q0, q2
+  vqmovn.s32 d1, q0
+  vqmovn.s32 d0, q1
+  subs r12, #8
+  vst1.16 {d0, d1}, [r2]!
+  bgt LOOP_UPDATE
+
+POST_LOOP_MAGNLEN:
+  ldrh r1, [r4]
+  smulbb r3, r6, r1           @ kExp2Const * ptr_noiseEstLogQuantile[offset + i]
+  mov r12, #0x00200000
+  bfi r12, r3, #0, #21        @ tmp32no1 = 0x00200000 | (tmp32no2 & 0x001FFFFF);
+  rsb r0, #21                 @ 21 - &inst->qNoise
+  sub r14, r0, r3, lsr #21    @ -tmp16
+  mov r0, r12, lsr r14
+  ssat r3, #16, r0
+  strh r3, [r2]
+
+  pop {r4, r5, r6, pc}
+.fnend
+
+@ void PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf);
+WebRtcNsx_PrepareSpectrumNeon:
+.fnstart
+.save {r4-r8}
+
+  push {r4-r8}
+
+  movw r2, #offset_nsx_real
+  movw r12, #offset_nsx_noiseSupFilter
+  movw r4, #offset_nsx_imag
+  movw r5, #offset_nsx_magnLen
+
+  add r2, r0                  @ &inst->real[0]
+  add r4, r0                  @ &inst->image[0]
+  mov r9, r4                  @ &inst->image[0]
+  mov r3, r2                  @ &inst->real[0]
+  ldr r5, [r0, r5]            @ inst->magnLen
+  add r6, r4, #2              @ &inst->image[1]
+  sub r5, #1
+  add r12, r0                 @ &inst->noiseSupFilter[0]
+  add r5, r2, r5, lsl #1      @ &inst->real[inst->magnLen - 1]
+
+LOOP_MAGNLEN:
+  @ Filter the elements.
+  vld1.16 {d20, d21}, [r2]    @ inst->real[]
+  vld1.16 {d24, d25}, [r12]!  @ inst->noiseSupFilter[]
+  vld1.16 {d22, d23}, [r4]    @ inst->imag[]
+  vmull.s16 q0, d20, d24
+  vmull.s16 q1, d21, d25
+  vmull.s16 q2, d22, d24
+  vmull.s16 q3, d23, d25
+  vshrn.s32 d0, q0, #14
+  vshrn.s32 d1, q1, #14
+  vshrn.s32 d2, q2, #14
+  vshrn.s32 d3, q3, #14
+  vst1.16 {d0, d1}, [r2]!
+  vst1.16 {d2, d3}, [r4]!
+  cmp r2, r5
+  bcc LOOP_MAGNLEN
+
+  @ Last two elements to filter:
+  ldrh r7, [r2]
+  ldrh r8, [r12]
+  ldrh r5, [r4]
+  smulbb r7, r7, r8
+  smulbb r5, r5, r8
+  mov r7, r7, lsr #14
+  mov r8, r5, lsr #14
+  strh r7, [r2]
+  strh r8, [r4]
+
+  ldr r5, [r0, #offset_nsx_anaLen2]            @ inst->anaLen2
+  ldr r7, [r0, #offset_nsx_anaLen]            @ inst->anaLen
+  add r5, r3, r5, lsl #1      @ &inst->real[inst->anaLen2]
+
+  ldrh r2, [r3], #2           @ inst->real[0]
+  ldrh r0, [r9]               @ inst->imag[0]
+  strh r2, [r1], #2           @ Store to freq_buf[0]
+  rsb r0, r0, #0
+  strh r0, [r1], #2           @ Store to freq_buf[1]. Now r1 -> &freq_buf[2]
+
+  add r2, r1, r7, lsl #2
+  sub r2, #36                 @ &freq_buf[-16]
+
+  mvn r12, #0x1F              @ -32
+
+@ At the last iteration, &freq_buf[inst->anaLen + 1] will be written to by both
+@ the vst1 instructions. Only the 2nd vst1 instruction has the correct value
+@ (-inst->imag[inst->anaLen2]), so the order of the two vst1's is important.
+LOOP_ANALEN2:
+  vld1.16 {d0, d1}, [r3]!     @ inst->real[], starting from inst->real[1]
+  vld1.16 {d2, d3}, [r6]!     @ inst->imag[], starting from inst->imag[1]
+  vmov.s16 d4, d0
+  vmov.s16 d6, d1
+  vneg.s16 d5, d2
+  vneg.s16 d7, d3
+  vzip.16 d0, d2
+  vzip.16 d1, d3
+  vzip.16 d4, d5
+  vzip.16 d6, d7
+  vrev64.32 d16, d3
+  vrev64.32 d17, d1
+  vrev64.32 d18, d2
+  vrev64.32 d19, d0
+  cmp r3, r5
+  vst1.16 {d16, d17, d18, d19}, [r2], r12
+  vst1.16 {d4, d5, d6, d7}, [r1]!
+  bls LOOP_ANALEN2
+
+  pop {r4-r8}
+  bx r14
+.fnend
+
+@ void WebRtcNsx_DenormalizeNeon(NsxInst_t* inst, int16_t* in, int factor);
+WebRtcNsx_DenormalizeNeon:
+.fnstart
+  movw r12, #offset_nsx_normData
+  movw r3, #offset_nsx_real
+  ldr r12, [r0, r12]          @ inst->normData
+  add r3, r0                  @ &inst->real[0]
+  sub r2, r12
+  vdup.32 q10, r2
+
+  movw r2, #offset_nsx_anaLen
+  ldrsh r2, [r0, r2]          @ inst->anaLen
+  add r0, r3, r2, lsl #1      @ &inst->real[inst->anaLen]
+
+LOOP_ANALEN:
+  vld2.16 {d0, d1}, [r1]!     @ &in[]
+  vld2.16 {d2, d3}, [r1]!     @ &in[]
+  vmovl.s16 q2, d0
+  vmovl.s16 q3, d2
+  vshl.s32 q2, q10
+  vshl.s32 q3, q10
+  vqmovn.s32 d0, q2
+  vqmovn.s32 d1, q3
+  vst1.16 {d0, d1}, [r3]!     @ inst->real[]
+  cmp r3, r0
+  blt LOOP_ANALEN
+
+  bx r14
+.fnend
+
+@ void SynthesisUpdateNeon(NsxInst_t* inst,
+@                          int16_t* out_frame,
+@                          int16_t gain_factor);
+WebRtcNsx_SynthesisUpdateNeon:
+.fnstart
+.save {r4, r5}
+  push {r4, r5}
+
+  vdup.16 d31, r2
+
+  movw r2, #offset_nsx_anaLen
+  movw r4, #offset_nsx_real
+  movw r12, #offset_nsx_synthesisBuffer
+
+  ldrsh r5, [r0, r2]          @ inst->anaLen
+  add r12, r0                 @ &inst->synthesisBuffer[0];
+  ldr r3, [r0, #offset_nsx_window]            @ &inst->window[0]
+  add r4, r0                  @ &inst->real[0]
+  add r5, r12, r5, lsl #1     @ &inst->synthesisBuffer[inst->anaLen]
+
+  mov r2, r12                 @ &inst->synthesisBuffer[0];
+
+LOOP_SYNTHESIS:
+  vld1.16 {d0, d1}, [r4]!     @ inst->real[]
+  vld1.16 {d2, d3}, [r3]!     @ inst->window[]
+  vld1.16 {d4, d5}, [r2]      @ inst->synthesisBuffer[];
+  vmull.s16 q3, d0, d2
+  vmull.s16 q8, d1, d3
+  vrshrn.i32 d0, q3, #14
+  vrshrn.i32 d1, q8, #14
+  vmull.s16 q3, d31, d0
+  vmull.s16 q8, d31, d1
+  vqrshrn.s32 d0, q3, #13
+  vqrshrn.s32 d1, q8, #13
+  vqadd.s16 d4, d0
+  vqadd.s16 d5, d1
+  vst1.16 {d4, d5}, [r2]!
+  cmp r2, r5
+  blt LOOP_SYNTHESIS
+
+POST_LOOP_SYNTHESIS:
+  movw r3, #offset_nsx_blockLen10ms
+  ldr r2, [r0, r3]
+  mov r3, r12                 @ &inst->synthesisBuffer[0];
+  add r0, r12, r2, lsl #1     @ &inst->synthesisBuffer[inst->blockLen10ms]
+
+LOOP_BLOCKLEN10MS:
+  vld1.16 {q0, q1}, [r3]!     @ inst->synthesisBuffer[];
+  cmp r3, r0
+  vst1.16 {q0, q1}, [r1]!     @ out_frame[]
+  blt LOOP_BLOCKLEN10MS
+
+  cmp r0, r5
+  bge POST_LOOP_MEMCPY
+
+LOOP_MEMCPY:
+  vld1.16 {q0, q1}, [r0]!     @ inst->synthesisBuffer[i + inst->blockLen10ms]
+  cmp r0, r5
+  vst1.16 {q0, q1}, [r12]!    @ inst->synthesisBuffer[i]
+  blt LOOP_MEMCPY
+
+POST_LOOP_MEMCPY:
+  cmp r12, r5
+  vmov.i16 q10, #0
+  vmov.i16 q11, #0
+  bge EXIT_SYNTHESISUPDATE
+
+LOOP_ZEROSARRAY:
+  vst1.16 {q10, q11}, [r12]!  @ inst->synthesisBuffer[i + inst->anaLen]
+  cmp r12, r5
+  blt LOOP_ZEROSARRAY
+
+EXIT_SYNTHESISUPDATE:
+  pop {r4, r5}
+  bx r14
+
+.fnend
+
+@ void AnalysisUpdateNeon(NsxInst_t* inst, int16_t* out, int16_t* new_speech);
+WebRtcNsx_AnalysisUpdateNeon:
+.fnstart
+.save {r4-r6}
+  push {r4-r6}
+
+  movw r3, #offset_nsx_analysisBuffer
+  movw r4, #offset_nsx_anaLen
+  movw r12, #offset_nsx_blockLen10ms
+  add r3, r0                  @ &inst->analysisBuffer[0]
+  ldrsh r4, [r0, r4]          @ inst->anaLen
+  ldr r12, [r0, r12]          @ inst->blockLen10ms
+  sub r6, r4, r12
+  add r6, r3, r6, lsl #1      @ &inst->analysisBuffer[inst->anaLen
+                              @     - inst->blockLen10ms]
+  cmp r3, r6
+  mov r5, r3
+  bge POST_LOOP_MEMCPY_1
+
+  add r12, r3, r12, lsl #1    @ &inst->analysisBuffer[inst->blockLen10ms]
+
+LOOP_MEMCPY_1:
+  vld1.16 {q10, q11}, [r12]!  @ inst->analysisBuffer[i + inst->blockLen10ms]
+  vst1.16 {q10, q11}, [r5]!   @ inst->analysisBuffer[i]
+  cmp r5, r6
+  blt LOOP_MEMCPY_1
+
+POST_LOOP_MEMCPY_1:
+  add r12, r3, r4, lsl #1     @ &inst->analysisBuffer[inst->anaLen]
+  cmp r5, r12
+  bge POST_LOOP_MEMCPY_2
+
+LOOP_MEMCPY_2:
+  vld1.16 {q10, q11}, [r2]!   @ new_speech[i]
+  vst1.16 {q10, q11}, [r5]!   @ inst->analysisBuffer[
+                              @     i + inst->anaLen - inst->blockLen10ms]
+  cmp r5, r12
+  blt LOOP_MEMCPY_2
+
+POST_LOOP_MEMCPY_2:
+  add r4, r1, r4, lsl #1      @ &out[inst->anaLen]
+  cmp r1, r4
+  ldr r2, [r0, #offset_nsx_window]            @ &inst->window[0]
+  bge POST_LOOP_WINDOW_DATA
+
+LOOP_WINDOW_DATA:
+  vld1.16 {d4, d5}, [r3]!     @ inst->analysisBuffer[]
+  vld1.16 {d6, d7}, [r2]!     @ inst->window[]
+  vmull.s16 q0, d4, d6
+  vmull.s16 q1, d5, d7
+  vrshrn.i32 d4, q0, #14
+  vrshrn.i32 d5, q1, #14
+  vst1.16 {d4, d5}, [r1]!     @ out[]
+  cmp r1, r4
+  blt LOOP_WINDOW_DATA
+
+POST_LOOP_WINDOW_DATA:
+  pop {r4-r6}
+  bx r14
+.fnend
+
+@ void CreateComplexBufferNeon(NsxInst_t* inst, int16_t* in, int16_t* out);
+WebRtcNsx_CreateComplexBufferNeon:
+.fnstart
+  movw r3, #offset_nsx_anaLen
+  movw r12, #offset_nsx_normData
+  ldrsh r3, [r0, r3]                  @ inst->anaLen
+  ldr r12, [r0, r12]                  @ inst->normData
+  add r3, r1, r3, lsl #1              @ &in[inst->anaLen]
+
+  vmov.i16 d7, #0                     @ For writing to imaginary parts.
+  vmov.i16 d5, #0                     @ For writing to imaginary parts.
+  vdup.i16 q10, r12
+
+LOOP_CREATE_COMPLEX_BUFFER:           @ Unrolled by 16.
+  vld1.16 {d0, d1, d2, d3}, [r1]!     @ in[]
+  cmp r1, r3
+  vshl.s16 q0, q10
+  vshl.s16 q1, q10
+  vmov d4, d1
+  vmov d1, d5
+  vmov d6, d3
+  vmov d3, d7
+  vst2.16 {d0, d1}, [r2]!
+  vst2.16 {d4, d5}, [r2]!
+  vst2.16 {d2, d3}, [r2]!
+  vst2.16 {d6, d7}, [r2]!
+  blt LOOP_CREATE_COMPLEX_BUFFER
+
+  bx r14
+.fnend
diff --git a/src/modules/audio_processing/ns/nsx_core_neon.c b/src/modules/audio_processing/ns/nsx_core_neon.c
index 2f85abd..8fc74e7 100644
--- a/src/modules/audio_processing/ns/nsx_core_neon.c
+++ b/src/modules/audio_processing/ns/nsx_core_neon.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -15,7 +15,6 @@
 
 // Update the noise estimation information.
 static void UpdateNoiseEstimateNeon(NsxInst_t* inst, int offset) {
-  int i = 0;
   const int16_t kExp2Const = 11819; // Q13
   int16_t* ptr_noiseEstLogQuantile = NULL;
   int16_t* ptr_noiseEstQuantile = NULL;
@@ -92,10 +91,10 @@
 }
 
 // Noise Estimation
-static void NoiseEstimationNeon(NsxInst_t* inst,
-                                uint16_t* magn,
-                                uint32_t* noise,
-                                int16_t* q_noise) {
+void WebRtcNsx_NoiseEstimationNeon(NsxInst_t* inst,
+                                   uint16_t* magn,
+                                   uint32_t* noise,
+                                   int16_t* q_noise) {
   int16_t lmagn[HALF_ANAL_BLOCKL], counter, countDiv;
   int16_t countProd, delta, zeros, frac;
   int16_t log2, tabind, logval, tmp16, tmp16no1, tmp16no2;
@@ -166,7 +165,8 @@
     int16x8_t tmp16x8_1;
     int16x8_t tmp16x8_2;
     int16x8_t tmp16x8_3;
-    int16x8_t tmp16x8_4;
+    // Initialize tmp16x8_4 to zero to avoid compilaton error.
+    int16x8_t tmp16x8_4 = vdupq_n_s16(0);
     int16x8_t tmp16x8_5;
     int32x4_t tmp32x4;
 
@@ -320,7 +320,7 @@
 }
 
 // Filter the data in the frequency domain, and create spectrum.
-static void PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf) {
+void WebRtcNsx_PrepareSpectrumNeon(NsxInst_t* inst, int16_t* freq_buf) {
 
   // (1) Filtering.
 
@@ -455,7 +455,7 @@
 }
 
 // Denormalize the input buffer.
-static __inline void DenormalizeNeon(NsxInst_t* inst, int16_t* in, int factor) {
+void WebRtcNsx_DenormalizeNeon(NsxInst_t* inst, int16_t* in, int factor) {
   int16_t* ptr_real = &inst->real[0];
   int16_t* ptr_in = &in[0];
 
@@ -494,12 +494,12 @@
 
 // For the noise supress process, synthesis, read out fully processed segment,
 // and update synthesis buffer.
-static void SynthesisUpdateNeon(NsxInst_t* inst,
-                                int16_t* out_frame,
-                                int16_t gain_factor) {
+void WebRtcNsx_SynthesisUpdateNeon(NsxInst_t* inst,
+                                   int16_t* out_frame,
+                                   int16_t gain_factor) {
   int16_t* ptr_real = &inst->real[0];
   int16_t* ptr_syn = &inst->synthesisBuffer[0];
-  int16_t* ptr_window = &inst->window[0];
+  const int16_t* ptr_window = &inst->window[0];
 
   // synthesis
   __asm__ __volatile__("vdup.16 d24, %0" : : "r"(gain_factor) : "d24");
@@ -605,9 +605,9 @@
 }
 
 // Update analysis buffer for lower band, and window data before FFT.
-static void AnalysisUpdateNeon(NsxInst_t* inst,
-                               int16_t* out,
-                               int16_t* new_speech) {
+void WebRtcNsx_AnalysisUpdateNeon(NsxInst_t* inst,
+                                  int16_t* out,
+                                  int16_t* new_speech) {
 
   int16_t* ptr_ana = &inst->analysisBuffer[inst->blockLen10ms];
   int16_t* ptr_out = &inst->analysisBuffer[0];
@@ -647,7 +647,7 @@
   }
 
   // Window data before FFT
-  int16_t* ptr_window = &inst->window[0];
+  const int16_t* ptr_window = &inst->window[0];
   ptr_out = &out[0];
   ptr_ana = &inst->analysisBuffer[0];
   for (; ptr_out < &out[inst->anaLen];) {
@@ -682,9 +682,9 @@
 
 // Create a complex number buffer (out[]) as the intput (in[]) interleaved with
 // zeros, and normalize it.
-static __inline void CreateComplexBufferNeon(NsxInst_t* inst,
-                                             int16_t* in,
-                                             int16_t* out) {
+void WebRtcNsx_CreateComplexBufferNeon(NsxInst_t* inst,
+                                       int16_t* in,
+                                       int16_t* out) {
   int16_t* ptr_out = &out[0];
   int16_t* ptr_in = &in[0];
 
@@ -723,12 +723,3 @@
     );
   }
 }
-
-void WebRtcNsx_InitNeon(void) {
-  WebRtcNsx_NoiseEstimation = NoiseEstimationNeon;
-  WebRtcNsx_PrepareSpectrum = PrepareSpectrumNeon;
-  WebRtcNsx_SynthesisUpdate = SynthesisUpdateNeon;
-  WebRtcNsx_AnalysisUpdate = AnalysisUpdateNeon;
-  WebRtcNsx_Denormalize = DenormalizeNeon;
-  WebRtcNsx_CreateComplexBuffer = CreateComplexBufferNeon;
-}
diff --git a/src/modules/audio_processing/ns/nsx_core_neon_offsets.c b/src/modules/audio_processing/ns/nsx_core_neon_offsets.c
new file mode 100644
index 0000000..ee64a59
--- /dev/null
+++ b/src/modules/audio_processing/ns/nsx_core_neon_offsets.c
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "nsx_core.h"
+
+#include <stddef.h>
+
+// Define offset variables that will be compiled and abstracted to constant
+// defines, which will then only be used in ARM assembly code.
+int offset_nsx_anaLen = offsetof(NsxInst_t, anaLen);
+int offset_nsx_anaLen2 = offsetof(NsxInst_t, anaLen2);
+int offset_nsx_normData = offsetof(NsxInst_t, normData);
+int offset_nsx_analysisBuffer = offsetof(NsxInst_t, analysisBuffer);
+int offset_nsx_synthesisBuffer = offsetof(NsxInst_t, synthesisBuffer);
+int offset_nsx_blockLen10ms = offsetof(NsxInst_t, blockLen10ms);
+int offset_nsx_window = offsetof(NsxInst_t, window);
+int offset_nsx_real = offsetof(NsxInst_t, real);
+int offset_nsx_imag = offsetof(NsxInst_t, imag);
+int offset_nsx_noiseSupFilter = offsetof(NsxInst_t, noiseSupFilter);
+int offset_nsx_magnLen = offsetof(NsxInst_t, magnLen);
+int offset_nsx_noiseEstLogQuantile = offsetof(NsxInst_t, noiseEstLogQuantile);
+int offset_nsx_noiseEstQuantile = offsetof(NsxInst_t, noiseEstQuantile);
+int offset_nsx_qNoise = offsetof(NsxInst_t, qNoise);
+int offset_nsx_stages = offsetof(NsxInst_t, stages);
+int offset_nsx_blockIndex = offsetof(NsxInst_t, blockIndex);
+int offset_nsx_noiseEstCounter = offsetof(NsxInst_t, noiseEstCounter);
+int offset_nsx_noiseEstDensity = offsetof(NsxInst_t, noiseEstDensity);
diff --git a/src/modules/audio_processing/ns/nsx_defines.h b/src/modules/audio_processing/ns/nsx_defines.h
index cd1e3bf..ef4d297 100644
--- a/src/modules/audio_processing/ns/nsx_defines.h
+++ b/src/modules/audio_processing/ns/nsx_defines.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -11,49 +11,53 @@
 #ifndef WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
 #define WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
 
-#define ANAL_BLOCKL_MAX         256 // max analysis block length
-#define HALF_ANAL_BLOCKL        129 // half max analysis block length + 1
+#define ANAL_BLOCKL_MAX         256 /* Max analysis block length */
+#define HALF_ANAL_BLOCKL        129 /* Half max analysis block length + 1 */
 #define SIMULT                  3
 #define END_STARTUP_LONG        200
 #define END_STARTUP_SHORT       50
-#define FACTOR_Q16              (WebRtc_Word32)2621440 // 40 in Q16
-#define FACTOR_Q7               (WebRtc_Word16)5120 // 40 in Q7
-#define FACTOR_Q7_STARTUP       (WebRtc_Word16)1024 // 8 in Q7
-#define WIDTH_Q8                3 // 0.01 in Q8 (or 25 )
-//PARAMETERS FOR NEW METHOD
-#define DD_PR_SNR_Q11           2007 // ~= Q11(0.98) DD update of prior SNR
-#define ONE_MINUS_DD_PR_SNR_Q11 41 // DD update of prior SNR
-#define SPECT_FLAT_TAVG_Q14     4915 // (0.30) tavg parameter for spectral flatness measure
-#define SPECT_DIFF_TAVG_Q8      77 // (0.30) tavg parameter for spectral flatness measure
-#define PRIOR_UPDATE_Q14        1638 // Q14(0.1) update parameter of prior model
-#define NOISE_UPDATE_Q8         26 // 26 ~= Q8(0.1) update parameter for noise
-// probability threshold for noise state in speech/noise likelihood
-#define ONE_MINUS_PROB_RANGE_Q8 205 // 205 ~= Q8(0.8)
-#define HIST_PAR_EST            1000 // histogram size for estimation of parameters
-//FEATURE EXTRACTION CONFIG
-//bin size of histogram
+#define FACTOR_Q16              2621440 /* 40 in Q16 */
+#define FACTOR_Q7               5120 /* 40 in Q7 */
+#define FACTOR_Q7_STARTUP       1024 /* 8 in Q7 */
+#define WIDTH_Q8                3 /* 0.01 in Q8 (or 25 ) */
+
+/* PARAMETERS FOR NEW METHOD */
+#define DD_PR_SNR_Q11           2007 /* ~= Q11(0.98) DD update of prior SNR */
+#define ONE_MINUS_DD_PR_SNR_Q11 41 /* DD update of prior SNR */
+#define SPECT_FLAT_TAVG_Q14     4915 /* (0.30) tavg parameter for spectral flatness measure */
+#define SPECT_DIFF_TAVG_Q8      77 /* (0.30) tavg parameter for spectral flatness measure */
+#define PRIOR_UPDATE_Q14        1638 /* Q14(0.1) Update parameter of prior model */
+#define NOISE_UPDATE_Q8         26 /* 26 ~= Q8(0.1) Update parameter for noise */
+
+/* Probability threshold for noise state in speech/noise likelihood. */
+#define ONE_MINUS_PROB_RANGE_Q8 205 /* 205 ~= Q8(0.8) */
+#define HIST_PAR_EST            1000 /* Histogram size for estimation of parameters */
+
+/* FEATURE EXTRACTION CONFIG  */
+/* Bin size of histogram */
 #define BIN_SIZE_LRT            10
-//scale parameters: multiply dominant peaks of the histograms by scale factor to obtain
-// thresholds for prior model
-#define FACTOR_1_LRT_DIFF       6 //for LRT and spectral difference (5 times bigger)
-//for spectral_flatness: used when noise is flatter than speech (10 times bigger)
+/* Scale parameters: multiply dominant peaks of the histograms by scale factor to obtain. */
+/* Thresholds for prior model */
+#define FACTOR_1_LRT_DIFF       6 /* For LRT and spectral difference (5 times bigger) */
+/* For spectral_flatness: used when noise is flatter than speech (10 times bigger). */
 #define FACTOR_2_FLAT_Q10       922
-//peak limit for spectral flatness (varies between 0 and 1)
-#define THRES_PEAK_FLAT         24 // * 2 * BIN_SIZE_FLAT_FX
-//limit on spacing of two highest peaks in histogram: spacing determined by bin size
-#define LIM_PEAK_SPACE_FLAT_DIFF    4 // * 2 * BIN_SIZE_DIFF_FX
-//limit on relevance of second peak:
+/* Peak limit for spectral flatness (varies between 0 and 1) */
+#define THRES_PEAK_FLAT         24 /* * 2 * BIN_SIZE_FLAT_FX */
+/* Limit on spacing of two highest peaks in histogram: spacing determined by bin size. */
+#define LIM_PEAK_SPACE_FLAT_DIFF    4 /* * 2 * BIN_SIZE_DIFF_FX */
+/* Limit on relevance of second peak */
 #define LIM_PEAK_WEIGHT_FLAT_DIFF   2
-#define THRES_FLUCT_LRT         10240 //=20 * inst->modelUpdate; fluctuation limit of LRT feat.
-//limit on the max and min values for the feature thresholds
-#define MAX_FLAT_Q10            38912 //  * 2 * BIN_SIZE_FLAT_FX
-#define MIN_FLAT_Q10            4096 //  * 2 * BIN_SIZE_FLAT_FX
-#define MAX_DIFF                100 // * 2 * BIN_SIZE_DIFF_FX
-#define MIN_DIFF                16 // * 2 * BIN_SIZE_DIFF_FX
-//criteria of weight of histogram peak  to accept/reject feature
-#define THRES_WEIGHT_FLAT_DIFF  154//(int)(0.3*(inst->modelUpdate)) for flatness and difference
-//
-#define STAT_UPDATES            9 // Update every 512 = 1 << 9 block
-#define ONE_MINUS_GAMMA_PAUSE_Q8    13 // ~= Q8(0.05) update for conservative noise estimate
-#define GAMMA_NOISE_TRANS_AND_SPEECH_Q8 3 // ~= Q8(0.01) update for transition and noise region
-#endif // WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_
+#define THRES_FLUCT_LRT         10240 /* = 20 * inst->modelUpdate; fluctuation limit of LRT feat. */
+/* Limit on the max and min values for the feature thresholds */
+#define MAX_FLAT_Q10            38912 /*  * 2 * BIN_SIZE_FLAT_FX */
+#define MIN_FLAT_Q10            4096 /*  * 2 * BIN_SIZE_FLAT_FX */
+#define MAX_DIFF                100 /* * 2 * BIN_SIZE_DIFF_FX */
+#define MIN_DIFF                16 /* * 2 * BIN_SIZE_DIFF_FX */
+/* Criteria of weight of histogram peak  to accept/reject feature */
+#define THRES_WEIGHT_FLAT_DIFF  154 /*(int)(0.3*(inst->modelUpdate)) for flatness and difference */
+
+#define STAT_UPDATES            9 /* Update every 512 = 1 << 9 block */
+#define ONE_MINUS_GAMMA_PAUSE_Q8    13 /* ~= Q8(0.05) Update for conservative noise estimate */
+#define GAMMA_NOISE_TRANS_AND_SPEECH_Q8 3 /* ~= Q8(0.01) Update for transition and noise region */
+
+#endif /* WEBRTC_MODULES_AUDIO_PROCESSING_NS_MAIN_SOURCE_NSX_DEFINES_H_ */
diff --git a/src/modules/audio_processing/processing_component.h b/src/modules/audio_processing/processing_component.h
index 3af0c4d..b3457b5 100644
--- a/src/modules/audio_processing/processing_component.h
+++ b/src/modules/audio_processing/processing_component.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_PROCESSING_COMPONENT_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_PROCESSING_COMPONENT_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H_
 
 #include <vector>
 
@@ -25,7 +25,6 @@
 
   virtual int Initialize();
   virtual int Destroy();
-  virtual int get_version(char* version, int version_len_bytes) const = 0;
 
   bool is_component_enabled() const;
 
@@ -51,4 +50,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_PROCESSING_COMPONENT_H__
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_PROCESSING_COMPONENT_H__
diff --git a/src/modules/audio_processing/test/process_test.cc b/src/modules/audio_processing/test/process_test.cc
index 2023ddb..aa432ff 100644
--- a/src/modules/audio_processing/test/process_test.cc
+++ b/src/modules/audio_processing/test/process_test.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -21,7 +21,7 @@
 #include "module_common_types.h"
 #include "scoped_ptr.h"
 #include "tick_util.h"
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
 #include "external/webrtc/src/modules/audio_processing/debug.pb.h"
 #else
 #include "webrtc/audio_processing/debug.pb.h"
@@ -115,6 +115,7 @@
   printf("  --ns_moderate\n");
   printf("  --ns_high\n");
   printf("  --ns_very_high\n");
+  printf("  --ns_prob_file FILE\n");
   printf("\n  -vad     Voice activity detection\n");
   printf("  --vad_out_file FILE\n");
   printf("\n Level metrics (enabled by default)\n");
@@ -149,6 +150,7 @@
   const char* near_filename = NULL;
   const char* out_filename = NULL;
   const char* vad_out_filename = NULL;
+  const char* ns_prob_filename = NULL;
   const char* aecm_echo_path_in_filename = NULL;
   const char* aecm_echo_path_out_filename = NULL;
 
@@ -336,6 +338,11 @@
       ASSERT_EQ(apm->kNoError,
           apm->noise_suppression()->set_level(NoiseSuppression::kVeryHigh));
 
+    } else if (strcmp(argv[i], "--ns_prob_file") == 0) {
+      i++;
+      ASSERT_LT(i, argc) << "Specify filename after --ns_prob_file";
+      ns_prob_filename = argv[i];
+
     } else if (strcmp(argv[i], "-vad") == 0) {
       ASSERT_EQ(apm->kNoError, apm->voice_detection()->Enable(true));
 
@@ -390,6 +397,7 @@
   const char delay_filename[] = "apm_delay.dat";
   const char drift_filename[] = "apm_drift.dat";
   const char vad_file_default[] = "vad_out.dat";
+  const char ns_prob_file_default[] = "ns_prob.dat";
 
   if (!simulating) {
     far_filename = far_file_default;
@@ -404,6 +412,10 @@
     vad_out_filename = vad_file_default;
   }
 
+  if (!ns_prob_filename) {
+    ns_prob_filename = ns_prob_file_default;
+  }
+
   FILE* pb_file = NULL;
   FILE* far_file = NULL;
   FILE* near_file = NULL;
@@ -412,6 +424,7 @@
   FILE* delay_file = NULL;
   FILE* drift_file = NULL;
   FILE* vad_out_file = NULL;
+  FILE* ns_prob_file = NULL;
   FILE* aecm_echo_path_in_file = NULL;
   FILE* aecm_echo_path_out_file = NULL;
 
@@ -466,6 +479,12 @@
                                       << vad_out_file;
   }
 
+  if (apm->noise_suppression()->is_enabled()) {
+    ns_prob_file = fopen(ns_prob_filename, "wb");
+    ASSERT_TRUE(NULL != ns_prob_file) << "Unable to open NS output file "
+                                      << ns_prob_file;
+  }
+
   if (aecm_echo_path_in_filename != NULL) {
     aecm_echo_path_in_file = fopen(aecm_echo_path_in_filename, "rb");
     ASSERT_TRUE(NULL != aecm_echo_path_in_file) << "Unable to open file "
@@ -504,6 +523,7 @@
   int drift_samples = 0;
   int capture_level = 127;
   int8_t stream_has_voice = 0;
+  float ns_speech_prob = 0.0f;
 
   TickTime t0 = TickTime::Now();
   TickTime t1 = t0;
@@ -546,11 +566,11 @@
             apm->set_num_reverse_channels(msg.num_reverse_channels()));
 
         samples_per_channel = msg.sample_rate() / 100;
-        far_frame._frequencyInHz = msg.sample_rate();
-        far_frame._payloadDataLengthInSamples = samples_per_channel;
-        far_frame._audioChannel = msg.num_reverse_channels();
-        near_frame._frequencyInHz = msg.sample_rate();
-        near_frame._payloadDataLengthInSamples = samples_per_channel;
+        far_frame.sample_rate_hz_ = msg.sample_rate();
+        far_frame.samples_per_channel_ = samples_per_channel;
+        far_frame.num_channels_ = msg.num_reverse_channels();
+        near_frame.sample_rate_hz_ = msg.sample_rate();
+        near_frame.samples_per_channel_ = samples_per_channel;
 
         if (verbose) {
           printf("Init at frame: %d (primary), %d (reverse)\n",
@@ -569,8 +589,8 @@
 
         ASSERT_TRUE(msg.has_data());
         ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
-            far_frame._audioChannel, msg.data().size());
-        memcpy(far_frame._payloadData, msg.data().data(), msg.data().size());
+            far_frame.num_channels_, msg.data().size());
+        memcpy(far_frame.data_, msg.data().data(), msg.data().size());
 
         if (perf_testing) {
           t0 = TickTime::Now();
@@ -597,12 +617,12 @@
         primary_count++;
 
         // ProcessStream could have changed this for the output frame.
-        near_frame._audioChannel = apm->num_input_channels();
+        near_frame.num_channels_ = apm->num_input_channels();
 
         ASSERT_TRUE(msg.has_input_data());
         ASSERT_EQ(sizeof(int16_t) * samples_per_channel *
-            near_frame._audioChannel, msg.input_data().size());
-        memcpy(near_frame._payloadData,
+            near_frame.num_channels_, msg.input_data().size());
+        memcpy(near_frame.data_,
                msg.input_data().data(),
                msg.input_data().size());
 
@@ -630,7 +650,7 @@
         }
         ASSERT_TRUE(err == apm->kNoError ||
                     err == apm->kBadStreamParameterWarning);
-        ASSERT_TRUE(near_frame._audioChannel == apm->num_output_channels());
+        ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
 
         capture_level = apm->gain_control()->stream_analog_level();
 
@@ -643,6 +663,14 @@
                                vad_out_file));
         }
 
+        if (ns_prob_file != NULL) {
+          ns_speech_prob = apm->noise_suppression()->speech_probability();
+          ASSERT_EQ(1u, fwrite(&ns_speech_prob,
+                               sizeof(ns_speech_prob),
+                               1,
+                               ns_prob_file));
+        }
+
         if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
           ASSERT_EQ(msg.level(), capture_level);
         }
@@ -659,8 +687,8 @@
           }
         }
 
-        size_t size = samples_per_channel * near_frame._audioChannel;
-        ASSERT_EQ(size, fwrite(near_frame._payloadData,
+        size_t size = samples_per_channel * near_frame.num_channels_;
+        ASSERT_EQ(size, fwrite(near_frame.data_,
                                sizeof(int16_t),
                                size,
                                out_file));
@@ -700,11 +728,11 @@
         }
       }
 
-      far_frame._frequencyInHz = sample_rate_hz;
-      far_frame._payloadDataLengthInSamples = samples_per_channel;
-      far_frame._audioChannel = num_render_channels;
-      near_frame._frequencyInHz = sample_rate_hz;
-      near_frame._payloadDataLengthInSamples = samples_per_channel;
+      far_frame.sample_rate_hz_ = sample_rate_hz;
+      far_frame.samples_per_channel_ = samples_per_channel;
+      far_frame.num_channels_ = num_render_channels;
+      near_frame.sample_rate_hz_ = sample_rate_hz;
+      near_frame.samples_per_channel_ = samples_per_channel;
 
       if (event == kInitializeEvent || event == kResetEventDeprecated) {
         ASSERT_EQ(1u,
@@ -724,11 +752,11 @@
                   apm->echo_cancellation()->set_device_sample_rate_hz(
                       device_sample_rate_hz));
 
-        far_frame._frequencyInHz = sample_rate_hz;
-        far_frame._payloadDataLengthInSamples = samples_per_channel;
-        far_frame._audioChannel = num_render_channels;
-        near_frame._frequencyInHz = sample_rate_hz;
-        near_frame._payloadDataLengthInSamples = samples_per_channel;
+        far_frame.sample_rate_hz_ = sample_rate_hz;
+        far_frame.samples_per_channel_ = samples_per_channel;
+        far_frame.num_channels_ = num_render_channels;
+        near_frame.sample_rate_hz_ = sample_rate_hz;
+        near_frame.samples_per_channel_ = samples_per_channel;
 
         if (verbose) {
           printf("Init at frame: %d (primary), %d (reverse)\n",
@@ -740,7 +768,7 @@
         reverse_count++;
 
         size_t size = samples_per_channel * num_render_channels;
-        read_count = fread(far_frame._payloadData,
+        read_count = fread(far_frame.data_,
                            sizeof(int16_t),
                            size,
                            far_file);
@@ -778,10 +806,10 @@
 
       } else if (event == kCaptureEvent) {
         primary_count++;
-        near_frame._audioChannel = num_capture_input_channels;
+        near_frame.num_channels_ = num_capture_input_channels;
 
         size_t size = samples_per_channel * num_capture_input_channels;
-        read_count = fread(near_frame._payloadData,
+        read_count = fread(near_frame.data_,
                            sizeof(int16_t),
                            size,
                            near_file);
@@ -829,7 +857,7 @@
         }
         ASSERT_TRUE(err == apm->kNoError ||
                     err == apm->kBadStreamParameterWarning);
-        ASSERT_TRUE(near_frame._audioChannel == apm->num_output_channels());
+        ASSERT_TRUE(near_frame.num_channels_ == apm->num_output_channels());
 
         capture_level = apm->gain_control()->stream_analog_level();
 
@@ -842,6 +870,14 @@
                                vad_out_file));
         }
 
+        if (ns_prob_file != NULL) {
+          ns_speech_prob = apm->noise_suppression()->speech_probability();
+          ASSERT_EQ(1u, fwrite(&ns_speech_prob,
+                               sizeof(ns_speech_prob),
+                               1,
+                               ns_prob_file));
+        }
+
         if (apm->gain_control()->mode() != GainControl::kAdaptiveAnalog) {
           ASSERT_EQ(capture_level_in, capture_level);
         }
@@ -858,8 +894,8 @@
           }
         }
 
-        size = samples_per_channel * near_frame._audioChannel;
-        ASSERT_EQ(size, fwrite(near_frame._payloadData,
+        size = samples_per_channel * near_frame.num_channels_;
+        ASSERT_EQ(size, fwrite(near_frame.data_,
                                sizeof(int16_t),
                                size,
                                out_file));
diff --git a/src/modules/audio_processing/test/unit_test.cc b/src/modules/audio_processing/test/unit_test.cc
index 6fe5905..3e28fb3 100644
--- a/src/modules/audio_processing/test/unit_test.cc
+++ b/src/modules/audio_processing/test/unit_test.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -10,6 +10,8 @@
 
 #include <stdio.h>
 
+#include <algorithm>
+
 #include "gtest/gtest.h"
 
 #include "audio_processing.h"
@@ -17,15 +19,20 @@
 #include "module_common_types.h"
 #include "scoped_ptr.h"
 #include "signal_processing_library.h"
-#include "testsupport/fileutils.h"
+#include "test/testsupport/fileutils.h"
 #include "thread_wrapper.h"
 #include "trace.h"
-#ifdef WEBRTC_ANDROID
+#ifdef WEBRTC_ANDROID_PLATFORM_BUILD
 #include "external/webrtc/src/modules/audio_processing/test/unittest.pb.h"
 #else
 #include "webrtc/audio_processing/unittest.pb.h"
 #endif
 
+#if (defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)) || \
+    (defined(WEBRTC_LINUX) && defined(WEBRTC_ARCH_X86_64) && !defined(NDEBUG))
+#  define WEBRTC_AUDIOPROC_BIT_EXACT
+#endif
+
 using webrtc::AudioProcessing;
 using webrtc::AudioFrame;
 using webrtc::GainControl;
@@ -42,8 +49,22 @@
 namespace {
 // When false, this will compare the output data with the results stored to
 // file. This is the typical case. When the file should be updated, it can
-// be set to true with the command-line switch --write_output_data.
-bool write_output_data = false;
+// be set to true with the command-line switch --write_ref_data.
+bool write_ref_data = false;
+
+const int kSampleRates[] = {8000, 16000, 32000};
+const size_t kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
+const int kChannels[] = {1, 2};
+const size_t kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
+
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+// AECM doesn't support super-wb.
+const int kProcessSampleRates[] = {8000, 16000};
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+const int kProcessSampleRates[] = {8000, 16000, 32000};
+#endif
+const size_t kProcessSampleRatesSize = sizeof(kProcessSampleRates) /
+    sizeof(*kProcessSampleRates);
 
 class ApmTest : public ::testing::Test {
  protected:
@@ -61,29 +82,45 @@
   static void TearDownTestCase() {
     Trace::ReturnTrace();
   }
-  // Path to where the resource files to be used for this test are located.
-  const std::string resource_path;
-  const std::string output_filename;
+
+  void Init(int sample_rate_hz, int num_reverse_channels,
+            int num_input_channels, int num_output_channels,
+            bool open_output_file);
+  std::string ResourceFilePath(std::string name, int sample_rate_hz);
+  std::string OutputFilePath(std::string name,
+                             int sample_rate_hz,
+                             int num_reverse_channels,
+                             int num_input_channels,
+                             int num_output_channels);
+  void EnableAllComponents();
+  bool ReadFrame(FILE* file, AudioFrame* frame);
+
+  const std::string output_path_;
+  const std::string ref_path_;
+  const std::string ref_filename_;
   webrtc::AudioProcessing* apm_;
   webrtc::AudioFrame* frame_;
   webrtc::AudioFrame* revframe_;
   FILE* far_file_;
   FILE* near_file_;
+  FILE* out_file_;
 };
 
 ApmTest::ApmTest()
-    : resource_path(webrtc::test::ProjectRootPath() +
-                    "test/data/audio_processing/"),
-#if defined(WEBRTC_APM_UNIT_TEST_FIXED_PROFILE)
-      output_filename(resource_path + "output_data_fixed.pb"),
-#elif defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
-      output_filename(resource_path + "output_data_float.pb"),
+    : output_path_(webrtc::test::OutputPath()),
+      ref_path_(webrtc::test::ProjectRootPath() +
+                "data/audio_processing/"),
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+      ref_filename_(ref_path_ + "output_data_fixed.pb"),
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+      ref_filename_(ref_path_ + "output_data_float.pb"),
 #endif
       apm_(NULL),
       frame_(NULL),
       revframe_(NULL),
       far_file_(NULL),
-      near_file_(NULL) {}
+      near_file_(NULL),
+      out_file_(NULL) {}
 
 void ApmTest::SetUp() {
   apm_ = AudioProcessing::Create(0);
@@ -92,25 +129,7 @@
   frame_ = new AudioFrame();
   revframe_ = new AudioFrame();
 
-  ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
-  ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(2, 2));
-  ASSERT_EQ(apm_->kNoError, apm_->set_num_reverse_channels(2));
-
-  frame_->_payloadDataLengthInSamples = 320;
-  frame_->_audioChannel = 2;
-  frame_->_frequencyInHz = 32000;
-  revframe_->_payloadDataLengthInSamples = 320;
-  revframe_->_audioChannel = 2;
-  revframe_->_frequencyInHz = 32000;
-
-  std::string input_filename = resource_path + "aec_far.pcm";
-  far_file_ = fopen(input_filename.c_str(), "rb");
-  ASSERT_TRUE(far_file_ != NULL) << "Could not open input file " <<
-      input_filename << "\n";
-  input_filename = resource_path + "aec_near.pcm";
-  near_file_ = fopen(input_filename.c_str(), "rb");
-  ASSERT_TRUE(near_file_ != NULL) << "Could not open input file " <<
-        input_filename << "\n";
+  Init(32000, 2, 2, 2, false);
 }
 
 void ApmTest::TearDown() {
@@ -134,12 +153,94 @@
   }
   near_file_ = NULL;
 
+  if (out_file_) {
+    ASSERT_EQ(0, fclose(out_file_));
+  }
+  out_file_ = NULL;
+
   if (apm_ != NULL) {
     AudioProcessing::Destroy(apm_);
   }
   apm_ = NULL;
 }
 
+std::string ApmTest::ResourceFilePath(std::string name, int sample_rate_hz) {
+  std::ostringstream ss;
+  // Resource files are all stereo.
+  ss << name << sample_rate_hz / 1000 << "_stereo";
+  return webrtc::test::ResourcePath(ss.str(), "pcm");
+}
+
+std::string ApmTest::OutputFilePath(std::string name,
+                                    int sample_rate_hz,
+                                    int num_reverse_channels,
+                                    int num_input_channels,
+                                    int num_output_channels) {
+  std::ostringstream ss;
+  ss << name << sample_rate_hz / 1000 << "_" << num_reverse_channels << "r" <<
+      num_input_channels << "i" << "_";
+  if (num_output_channels == 1) {
+    ss << "mono";
+  } else if (num_output_channels == 2) {
+    ss << "stereo";
+  } else {
+    assert(false);
+    return "";
+  }
+  ss << ".pcm";
+
+  return output_path_ + ss.str();
+}
+
+void ApmTest::Init(int sample_rate_hz, int num_reverse_channels,
+                   int num_input_channels, int num_output_channels,
+                   bool open_output_file) {
+  ASSERT_EQ(apm_->kNoError, apm_->Initialize());
+
+  // Handles error checking of the parameters as well. No need to repeat it.
+  ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(sample_rate_hz));
+  ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(num_input_channels,
+                                                   num_output_channels));
+  ASSERT_EQ(apm_->kNoError,
+            apm_->set_num_reverse_channels(num_reverse_channels));
+
+  // We always use 10 ms frames.
+  const int samples_per_channel = sample_rate_hz / 100;
+  frame_->samples_per_channel_ = samples_per_channel;
+  frame_->num_channels_ = num_input_channels;
+  frame_->sample_rate_hz_ = sample_rate_hz;
+  revframe_->samples_per_channel_ = samples_per_channel;
+  revframe_->num_channels_ = num_reverse_channels;
+  revframe_->sample_rate_hz_ = sample_rate_hz;
+
+  if (far_file_) {
+    ASSERT_EQ(0, fclose(far_file_));
+  }
+  std::string filename = ResourceFilePath("far", sample_rate_hz);
+  far_file_ = fopen(filename.c_str(), "rb");
+  ASSERT_TRUE(far_file_ != NULL) << "Could not open file " <<
+      filename << "\n";
+
+  if (near_file_) {
+    ASSERT_EQ(0, fclose(near_file_));
+  }
+  filename = ResourceFilePath("near", sample_rate_hz);
+  near_file_ = fopen(filename.c_str(), "rb");
+  ASSERT_TRUE(near_file_ != NULL) << "Could not open file " <<
+        filename << "\n";
+
+  if (open_output_file) {
+    if (out_file_) {
+      ASSERT_EQ(0, fclose(out_file_));
+    }
+    filename = OutputFilePath("out", sample_rate_hz, num_reverse_channels,
+                              num_input_channels, num_output_channels);
+    out_file_ = fopen(filename.c_str(), "wb");
+    ASSERT_TRUE(out_file_ != NULL) << "Could not open file " <<
+          filename << "\n";
+  }
+}
+
 void MixStereoToMono(const int16_t* stereo,
                      int16_t* mono,
                      int samples_per_channel) {
@@ -150,44 +251,117 @@
   }
 }
 
-template <class T>
-T MaxValue(T a, T b) {
-  return a > b ? a : b;
+void CopyLeftToRightChannel(int16_t* stereo, int samples_per_channel) {
+  for (int i = 0; i < samples_per_channel; i++) {
+    stereo[i * 2 + 1] = stereo[i * 2];
+  }
+}
+
+void VerifyChannelsAreEqual(int16_t* stereo, int samples_per_channel) {
+  for (int i = 0; i < samples_per_channel; i++) {
+    EXPECT_EQ(stereo[i * 2 + 1], stereo[i * 2]);
+  }
+}
+
+void ApmTest::EnableAllComponents() {
+#if defined(WEBRTC_AUDIOPROC_FIXED_PROFILE)
+  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
+  EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_mode(GainControl::kAdaptiveDigital));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+#elif defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_drift_compensation(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_metrics(true));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->echo_cancellation()->enable_delay_logging(true));
+  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
+  EXPECT_EQ(apm_->kNoError,
+            apm_->gain_control()->set_analog_level_limits(0, 255));
+  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
+#endif
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->high_pass_filter()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->level_estimator()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->noise_suppression()->Enable(true));
+
+  EXPECT_EQ(apm_->kNoError,
+            apm_->voice_detection()->Enable(true));
+}
+
+bool ApmTest::ReadFrame(FILE* file, AudioFrame* frame) {
+  // The files always contain stereo audio.
+  size_t frame_size = frame->samples_per_channel_ * 2;
+  size_t read_count = fread(frame->data_,
+                            sizeof(int16_t),
+                            frame_size,
+                            file);
+  if (read_count != frame_size) {
+    // Check that the file really ended.
+    EXPECT_NE(0, feof(file));
+    return false;  // This is expected.
+  }
+
+  if (frame->num_channels_ == 1) {
+    MixStereoToMono(frame->data_, frame->data_,
+                    frame->samples_per_channel_);
+  }
+
+  return true;
+}
+
+void SetFrameTo(AudioFrame* frame, int16_t value) {
+  for (int i = 0; i < frame->samples_per_channel_ * frame->num_channels_;
+      ++i) {
+    frame->data_[i] = value;
+  }
+}
+
+void SetFrameTo(AudioFrame* frame, int16_t left, int16_t right) {
+  ASSERT_EQ(2, frame->num_channels_);
+  for (int i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
+    frame->data_[i] = left;
+    frame->data_[i + 1] = right;
+  }
 }
 
 template <class T>
 T AbsValue(T a) {
-  return a > 0 ? a : -a;
-}
-
-void SetFrameTo(AudioFrame* frame, int16_t value) {
-  for (int i = 0; i < frame->_payloadDataLengthInSamples * frame->_audioChannel;
-      ++i) {
-    frame->_payloadData[i] = value;
-  }
+  return a > 0 ? a: -a;
 }
 
 int16_t MaxAudioFrame(const AudioFrame& frame) {
-  const int length = frame._payloadDataLengthInSamples * frame._audioChannel;
-  int16_t max = AbsValue(frame._payloadData[0]);
+  const int length = frame.samples_per_channel_ * frame.num_channels_;
+  int16_t max_data = AbsValue(frame.data_[0]);
   for (int i = 1; i < length; i++) {
-    max = MaxValue(max, AbsValue(frame._payloadData[i]));
+    max_data = std::max(max_data, AbsValue(frame.data_[i]));
   }
 
-  return max;
+  return max_data;
 }
 
 bool FrameDataAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
-  if (frame1._payloadDataLengthInSamples !=
-      frame2._payloadDataLengthInSamples) {
+  if (frame1.samples_per_channel_ !=
+      frame2.samples_per_channel_) {
     return false;
   }
-  if (frame1._audioChannel !=
-      frame2._audioChannel) {
+  if (frame1.num_channels_ !=
+      frame2.num_channels_) {
     return false;
   }
-  if (memcmp(frame1._payloadData, frame2._payloadData,
-             frame1._payloadDataLengthInSamples * frame1._audioChannel *
+  if (memcmp(frame1.data_, frame2.data_,
+             frame1.samples_per_channel_ * frame1.num_channels_ *
                sizeof(int16_t))) {
     return false;
   }
@@ -264,12 +438,12 @@
 
   AudioFrame primary_frame;
   AudioFrame reverse_frame;
-  primary_frame._payloadDataLengthInSamples = 320;
-  primary_frame._audioChannel = 2;
-  primary_frame._frequencyInHz = 32000;
-  reverse_frame._payloadDataLengthInSamples = 320;
-  reverse_frame._audioChannel = 2;
-  reverse_frame._frequencyInHz = 32000;
+  primary_frame.samples_per_channel_ = 320;
+  primary_frame.num_channels_ = 2;
+  primary_frame.sample_rate_hz_ = 32000;
+  reverse_frame.samples_per_channel_ = 320;
+  reverse_frame.num_channels_ = 2;
+  reverse_frame.sample_rate_hz_ = 32000;
 
   ap->echo_cancellation()->Enable(true);
   ap->gain_control()->Enable(true);
@@ -432,6 +606,30 @@
   EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
 }
 
+TEST_F(ApmTest, DefaultDelayOffsetIsZero) {
+  EXPECT_EQ(0, apm_->delay_offset_ms());
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(50));
+  EXPECT_EQ(50, apm_->stream_delay_ms());
+}
+
+TEST_F(ApmTest, DelayOffsetWithLimitsIsSetProperly) {
+  // High limit of 500 ms.
+  apm_->set_delay_offset_ms(100);
+  EXPECT_EQ(100, apm_->delay_offset_ms());
+  EXPECT_EQ(apm_->kBadStreamParameterWarning, apm_->set_stream_delay_ms(450));
+  EXPECT_EQ(500, apm_->stream_delay_ms());
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(200, apm_->stream_delay_ms());
+
+  // Low limit of 0 ms.
+  apm_->set_delay_offset_ms(-50);
+  EXPECT_EQ(-50, apm_->delay_offset_ms());
+  EXPECT_EQ(apm_->kBadStreamParameterWarning, apm_->set_stream_delay_ms(20));
+  EXPECT_EQ(0, apm_->stream_delay_ms());
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(100));
+  EXPECT_EQ(50, apm_->stream_delay_ms());
+}
+
 TEST_F(ApmTest, Channels) {
   // Testing number of invalid channels
   EXPECT_EQ(apm_->kBadParameterError, apm_->set_num_channels(0, 1));
@@ -489,14 +687,6 @@
         apm_->echo_cancellation()->device_sample_rate_hz());
   }
 
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->echo_cancellation()->set_suppression_level(
-          static_cast<EchoCancellation::SuppressionLevel>(-1)));
-
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->echo_cancellation()->set_suppression_level(
-          static_cast<EchoCancellation::SuppressionLevel>(4)));
-
   EchoCancellation::SuppressionLevel level[] = {
     EchoCancellation::kLowSuppression,
     EchoCancellation::kModerateSuppression,
@@ -542,18 +732,11 @@
   // AECM won't use super-wideband.
   EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
   EXPECT_EQ(apm_->kBadSampleRateError, apm_->echo_control_mobile()->Enable(true));
-  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
   // Turn AECM on (and AEC off)
+  Init(16000, 2, 2, 2, false);
   EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
   EXPECT_TRUE(apm_->echo_control_mobile()->is_enabled());
 
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->echo_control_mobile()->set_routing_mode(
-      static_cast<EchoControlMobile::RoutingMode>(-1)));
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->echo_control_mobile()->set_routing_mode(
-      static_cast<EchoControlMobile::RoutingMode>(5)));
-
   // Toggle routing modes
   EchoControlMobile::RoutingMode mode[] = {
       EchoControlMobile::kQuietEarpieceOrHeadset,
@@ -603,6 +786,14 @@
   for (size_t i = 0; i < echo_path_size; i++) {
     EXPECT_EQ(echo_path_in[i], echo_path_out[i]);
   }
+
+  // Process a few frames with NS in the default disabled state. This exercises
+  // a different codepath than with it enabled.
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+  EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+  EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+
   // Turn AECM off
   EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(false));
   EXPECT_FALSE(apm_->echo_control_mobile()->is_enabled());
@@ -610,12 +801,6 @@
 
 TEST_F(ApmTest, GainControl) {
   // Testing gain modes
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->gain_control()->set_mode(static_cast<GainControl::Mode>(-1)));
-
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->gain_control()->set_mode(static_cast<GainControl::Mode>(3)));
-
   EXPECT_EQ(apm_->kNoError,
       apm_->gain_control()->set_mode(
       apm_->gain_control()->mode()));
@@ -711,16 +896,7 @@
 }
 
 TEST_F(ApmTest, NoiseSuppression) {
-  // Tesing invalid suppression levels
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->noise_suppression()->set_level(
-          static_cast<NoiseSuppression::Level>(-1)));
-
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->noise_suppression()->set_level(
-          static_cast<NoiseSuppression::Level>(5)));
-
-  // Tesing valid suppression levels
+  // Test valid suppression levels.
   NoiseSuppression::Level level[] = {
     NoiseSuppression::kLow,
     NoiseSuppression::kModerate,
@@ -733,7 +909,7 @@
     EXPECT_EQ(level[i], apm_->noise_suppression()->level());
   }
 
-  // Turing NS on/off
+  // Turn NS on/off
   EXPECT_EQ(apm_->kNoError, apm_->noise_suppression()->Enable(true));
   EXPECT_TRUE(apm_->noise_suppression()->is_enabled());
   EXPECT_EQ(apm_->kNoError, apm_->noise_suppression()->Enable(false));
@@ -741,7 +917,7 @@
 }
 
 TEST_F(ApmTest, HighPassFilter) {
-  // Turing HP filter on/off
+  // Turn HP filter on/off
   EXPECT_EQ(apm_->kNoError, apm_->high_pass_filter()->Enable(true));
   EXPECT_TRUE(apm_->high_pass_filter()->is_enabled());
   EXPECT_EQ(apm_->kNoError, apm_->high_pass_filter()->Enable(false));
@@ -749,7 +925,7 @@
 }
 
 TEST_F(ApmTest, LevelEstimator) {
-  // Turning level estimator on/off
+  // Turn level estimator on/off
   EXPECT_EQ(apm_->kNoError, apm_->level_estimator()->Enable(false));
   EXPECT_FALSE(apm_->level_estimator()->is_enabled());
 
@@ -761,9 +937,9 @@
   // Run this test in wideband; in super-wb, the splitting filter distorts the
   // audio enough to cause deviation from the expectation for small values.
   EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
-  frame_->_payloadDataLengthInSamples = 160;
-  frame_->_audioChannel = 2;
-  frame_->_frequencyInHz = 16000;
+  frame_->samples_per_channel_ = 160;
+  frame_->num_channels_ = 2;
+  frame_->sample_rate_hz_ = 16000;
 
   // Min value if no frames have been processed.
   EXPECT_EQ(127, apm_->level_estimator()->RMS());
@@ -796,14 +972,14 @@
   EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
   EXPECT_EQ(70, apm_->level_estimator()->RMS());
 
-  // Min value if _energy == 0.
+  // Min value if energy_ == 0.
   SetFrameTo(frame_, 10000);
-  uint32_t energy = frame_->_energy; // Save default to restore below.
-  frame_->_energy = 0;
+  uint32_t energy = frame_->energy_; // Save default to restore below.
+  frame_->energy_ = 0;
   EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
   EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
   EXPECT_EQ(127, apm_->level_estimator()->RMS());
-  frame_->_energy = energy;
+  frame_->energy_ = energy;
 
   // Verify reset after enable/disable.
   SetFrameTo(frame_, 32767);
@@ -832,16 +1008,7 @@
             apm_->voice_detection()->set_stream_has_voice(false));
   EXPECT_FALSE(apm_->voice_detection()->stream_has_voice());
 
-  // Tesing invalid likelihoods
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->voice_detection()->set_likelihood(
-          static_cast<VoiceDetection::Likelihood>(-1)));
-
-  EXPECT_EQ(apm_->kBadParameterError,
-      apm_->voice_detection()->set_likelihood(
-          static_cast<VoiceDetection::Likelihood>(5)));
-
-  // Tesing valid likelihoods
+  // Test valid likelihoods
   VoiceDetection::Likelihood likelihood[] = {
       VoiceDetection::kVeryLowLikelihood,
       VoiceDetection::kLowLikelihood,
@@ -855,11 +1022,11 @@
   }
 
   /* TODO(bjornv): Enable once VAD supports other frame lengths than 10 ms
-  // Tesing invalid frame sizes
+  // Test invalid frame sizes
   EXPECT_EQ(apm_->kBadParameterError,
       apm_->voice_detection()->set_frame_size_ms(12));
 
-  // Tesing valid frame sizes
+  // Test valid frame sizes
   for (int i = 10; i <= 30; i += 10) {
     EXPECT_EQ(apm_->kNoError,
         apm_->voice_detection()->set_frame_size_ms(i));
@@ -867,7 +1034,7 @@
   }
   */
 
-  // Turing VAD on/off
+  // Turn VAD on/off
   EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
   EXPECT_TRUE(apm_->voice_detection()->is_enabled());
   EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(false));
@@ -881,20 +1048,84 @@
       AudioFrame::kVadUnknown
   };
   for (size_t i = 0; i < sizeof(activity)/sizeof(*activity); i++) {
-    frame_->_vadActivity = activity[i];
+    frame_->vad_activity_ = activity[i];
     EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
-    EXPECT_EQ(activity[i], frame_->_vadActivity);
+    EXPECT_EQ(activity[i], frame_->vad_activity_);
   }
 
   // Test that AudioFrame activity is set when VAD is enabled.
   EXPECT_EQ(apm_->kNoError, apm_->voice_detection()->Enable(true));
-  frame_->_vadActivity = AudioFrame::kVadUnknown;
+  frame_->vad_activity_ = AudioFrame::kVadUnknown;
   EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
-  EXPECT_NE(AudioFrame::kVadUnknown, frame_->_vadActivity);
+  EXPECT_NE(AudioFrame::kVadUnknown, frame_->vad_activity_);
 
   // TODO(bjornv): Add tests for streamed voice; stream_has_voice()
 }
 
+TEST_F(ApmTest, VerifyDownMixing) {
+  for (size_t i = 0; i < kSampleRatesSize; i++) {
+    Init(kSampleRates[i], 2, 2, 1, false);
+    SetFrameTo(frame_, 1000, 2000);
+    AudioFrame mono_frame;
+    mono_frame.samples_per_channel_ = frame_->samples_per_channel_;
+    mono_frame.num_channels_ = 1;
+    SetFrameTo(&mono_frame, 1500);
+    EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+    EXPECT_TRUE(FrameDataAreEqual(*frame_, mono_frame));
+  }
+}
+
+TEST_F(ApmTest, AllProcessingDisabledByDefault) {
+  EXPECT_FALSE(apm_->echo_cancellation()->is_enabled());
+  EXPECT_FALSE(apm_->echo_control_mobile()->is_enabled());
+  EXPECT_FALSE(apm_->gain_control()->is_enabled());
+  EXPECT_FALSE(apm_->high_pass_filter()->is_enabled());
+  EXPECT_FALSE(apm_->level_estimator()->is_enabled());
+  EXPECT_FALSE(apm_->noise_suppression()->is_enabled());
+  EXPECT_FALSE(apm_->voice_detection()->is_enabled());
+}
+
+TEST_F(ApmTest, NoProcessingWhenAllComponentsDisabled) {
+  for (size_t i = 0; i < kSampleRatesSize; i++) {
+    Init(kSampleRates[i], 2, 2, 2, false);
+    SetFrameTo(frame_, 1000, 2000);
+    AudioFrame frame_copy = *frame_;
+    for (int j = 0; j < 1000; j++) {
+      EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+      EXPECT_TRUE(FrameDataAreEqual(*frame_, frame_copy));
+    }
+  }
+}
+
+TEST_F(ApmTest, IdenticalInputChannelsResultInIdenticalOutputChannels) {
+  EnableAllComponents();
+
+  for (size_t i = 0; i < kProcessSampleRatesSize; i++) {
+    Init(kProcessSampleRates[i], 2, 2, 2, false);
+    int analog_level = 127;
+    while (1) {
+      if (!ReadFrame(far_file_, revframe_)) break;
+      CopyLeftToRightChannel(revframe_->data_, revframe_->samples_per_channel_);
+
+      EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_));
+
+      if (!ReadFrame(near_file_, frame_)) break;
+      CopyLeftToRightChannel(frame_->data_, frame_->samples_per_channel_);
+      frame_->vad_activity_ = AudioFrame::kVadUnknown;
+
+      EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
+      EXPECT_EQ(apm_->kNoError,
+          apm_->echo_cancellation()->set_stream_drift_samples(0));
+      EXPECT_EQ(apm_->kNoError,
+          apm_->gain_control()->set_stream_analog_level(analog_level));
+      EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+      analog_level = apm_->gain_control()->stream_analog_level();
+
+      VerifyChannelsAreEqual(frame_->data_, frame_->samples_per_channel_);
+    }
+  }
+}
+
 TEST_F(ApmTest, SplittingFilter) {
   // Verify the filter is not active through undistorted audio when:
   // 1. No components are enabled...
@@ -935,9 +1166,9 @@
 
   // 5. Not using super-wb.
   EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
-  frame_->_payloadDataLengthInSamples = 160;
-  frame_->_audioChannel = 2;
-  frame_->_frequencyInHz = 16000;
+  frame_->samples_per_channel_ = 160;
+  frame_->num_channels_ = 2;
+  frame_->sample_rate_hz_ = 16000;
   // Enable AEC, which would require the filter in super-wb. We rely on the
   // first few frames of data being unaffected by the AEC.
   // TODO(andrew): This test, and the one below, rely rather tenuously on the
@@ -958,9 +1189,9 @@
   // Check the test is valid. We should have distortion from the filter
   // when AEC is enabled (which won't affect the audio).
   EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(32000));
-  frame_->_payloadDataLengthInSamples = 320;
-  frame_->_audioChannel = 2;
-  frame_->_frequencyInHz = 32000;
+  frame_->samples_per_channel_ = 320;
+  frame_->num_channels_ = 2;
+  frame_->sample_rate_hz_ = 32000;
   SetFrameTo(frame_, 1000);
   frame_copy = *frame_;
   EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
@@ -985,8 +1216,11 @@
   EXPECT_EQ(apm_->kNoError, apm_->StopDebugRecording());
 
   // Verify the file has been written.
-  ASSERT_TRUE(fopen(filename.c_str(), "r") != NULL);
+  FILE* fid = fopen(filename.c_str(), "r");
+  ASSERT_TRUE(fid != NULL);
+
   // Clean it up.
+  ASSERT_EQ(0, fclose(fid));
   ASSERT_EQ(0, remove(filename.c_str()));
 #else
   EXPECT_EQ(apm_->kUnsupportedFunctionError,
@@ -998,90 +1232,44 @@
 #endif  // WEBRTC_AUDIOPROC_DEBUG_DUMP
 }
 
+// TODO(andrew): Add a test to process a few frames with different combinations
+// of enabled components.
+
+// TODO(andrew): Make this test more robust such that it can be run on multiple
+// platforms. It currently requires bit-exactness.
+#ifdef WEBRTC_AUDIOPROC_BIT_EXACT
 TEST_F(ApmTest, Process) {
   GOOGLE_PROTOBUF_VERIFY_VERSION;
-  webrtc::audioproc::OutputData output_data;
+  webrtc::audioproc::OutputData ref_data;
 
-  if (!write_output_data) {
-    ReadMessageLiteFromFile(output_filename, &output_data);
+  if (!write_ref_data) {
+    ReadMessageLiteFromFile(ref_filename_, &ref_data);
   } else {
-    // We don't have a file; add the required tests to the protobuf.
-    // TODO(ajm): vary the output channels as well?
-    const int channels[] = {1, 2};
-    const size_t channels_size = sizeof(channels) / sizeof(*channels);
-#if defined(WEBRTC_APM_UNIT_TEST_FIXED_PROFILE)
-    // AECM doesn't support super-wb.
-    const int sample_rates[] = {8000, 16000};
-#elif defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
-    const int sample_rates[] = {8000, 16000, 32000};
-#endif
-    const size_t sample_rates_size = sizeof(sample_rates) / sizeof(*sample_rates);
-    for (size_t i = 0; i < channels_size; i++) {
-      for (size_t j = 0; j < channels_size; j++) {
-        for (size_t k = 0; k < sample_rates_size; k++) {
-          webrtc::audioproc::Test* test = output_data.add_test();
-          test->set_num_reverse_channels(channels[i]);
-          test->set_num_input_channels(channels[j]);
-          test->set_num_output_channels(channels[j]);
-          test->set_sample_rate(sample_rates[k]);
+    // Write the desired tests to the protobuf reference file.
+    for (size_t i = 0; i < kChannelsSize; i++) {
+      for (size_t j = 0; j < kChannelsSize; j++) {
+        // We can't have more output than input channels.
+        for (size_t k = 0; k <= j; k++) {
+          for (size_t l = 0; l < kProcessSampleRatesSize; l++) {
+            webrtc::audioproc::Test* test = ref_data.add_test();
+            test->set_num_reverse_channels(kChannels[i]);
+            test->set_num_input_channels(kChannels[j]);
+            test->set_num_output_channels(kChannels[k]);
+            test->set_sample_rate(kProcessSampleRates[l]);
+          }
         }
       }
     }
   }
 
-#if defined(WEBRTC_APM_UNIT_TEST_FIXED_PROFILE)
-  EXPECT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(16000));
-  EXPECT_EQ(apm_->kNoError, apm_->echo_control_mobile()->Enable(true));
+  EnableAllComponents();
 
-  EXPECT_EQ(apm_->kNoError,
-            apm_->gain_control()->set_mode(GainControl::kAdaptiveDigital));
-  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
-#elif defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
-  EXPECT_EQ(apm_->kNoError,
-            apm_->echo_cancellation()->enable_drift_compensation(true));
-  EXPECT_EQ(apm_->kNoError,
-            apm_->echo_cancellation()->enable_metrics(true));
-  EXPECT_EQ(apm_->kNoError,
-            apm_->echo_cancellation()->enable_delay_logging(true));
-  EXPECT_EQ(apm_->kNoError, apm_->echo_cancellation()->Enable(true));
+  for (int i = 0; i < ref_data.test_size(); i++) {
+    printf("Running test %d of %d...\n", i + 1, ref_data.test_size());
 
-  EXPECT_EQ(apm_->kNoError,
-            apm_->gain_control()->set_mode(GainControl::kAdaptiveAnalog));
-  EXPECT_EQ(apm_->kNoError,
-            apm_->gain_control()->set_analog_level_limits(0, 255));
-  EXPECT_EQ(apm_->kNoError, apm_->gain_control()->Enable(true));
-#endif
-
-  EXPECT_EQ(apm_->kNoError,
-            apm_->high_pass_filter()->Enable(true));
-
-  EXPECT_EQ(apm_->kNoError,
-            apm_->level_estimator()->Enable(true));
-
-  EXPECT_EQ(apm_->kNoError,
-            apm_->noise_suppression()->Enable(true));
-
-  EXPECT_EQ(apm_->kNoError,
-            apm_->voice_detection()->Enable(true));
-
-  for (int i = 0; i < output_data.test_size(); i++) {
-    printf("Running test %d of %d...\n", i + 1, output_data.test_size());
-
-    webrtc::audioproc::Test* test = output_data.mutable_test(i);
-    const int samples_per_channel = test->sample_rate() / 100;
-    revframe_->_payloadDataLengthInSamples = samples_per_channel;
-    revframe_->_audioChannel = test->num_reverse_channels();
-    revframe_->_frequencyInHz = test->sample_rate();
-    frame_->_payloadDataLengthInSamples = samples_per_channel;
-    frame_->_audioChannel = test->num_input_channels();
-    frame_->_frequencyInHz = test->sample_rate();
-
-    EXPECT_EQ(apm_->kNoError, apm_->Initialize());
-    ASSERT_EQ(apm_->kNoError, apm_->set_sample_rate_hz(test->sample_rate()));
-    ASSERT_EQ(apm_->kNoError, apm_->set_num_channels(frame_->_audioChannel,
-                                                     frame_->_audioChannel));
-    ASSERT_EQ(apm_->kNoError,
-        apm_->set_num_reverse_channels(revframe_->_audioChannel));
+    webrtc::audioproc::Test* test = ref_data.mutable_test(i);
+    Init(test->sample_rate(), test->num_reverse_channels(),
+         test->num_input_channels(), test->num_output_channels(), true);
 
     int frame_count = 0;
     int has_echo_count = 0;
@@ -1090,51 +1278,24 @@
     int analog_level = 127;
     int analog_level_average = 0;
     int max_output_average = 0;
+    float ns_speech_prob_average = 0.0f;
 
     while (1) {
-      // Read far-end frame
-      const size_t frame_size = samples_per_channel * 2;
-      size_t read_count = fread(revframe_->_payloadData,
-                                sizeof(int16_t),
-                                frame_size,
-                                far_file_);
-      if (read_count != frame_size) {
-        // Check that the file really ended.
-        ASSERT_NE(0, feof(far_file_));
-        break; // This is expected.
-      }
-
-      if (revframe_->_audioChannel == 1) {
-        MixStereoToMono(revframe_->_payloadData, revframe_->_payloadData,
-                        samples_per_channel);
-      }
-
+      if (!ReadFrame(far_file_, revframe_)) break;
       EXPECT_EQ(apm_->kNoError, apm_->AnalyzeReverseStream(revframe_));
 
+      if (!ReadFrame(near_file_, frame_)) break;
+      frame_->vad_activity_ = AudioFrame::kVadUnknown;
+
       EXPECT_EQ(apm_->kNoError, apm_->set_stream_delay_ms(0));
       EXPECT_EQ(apm_->kNoError,
           apm_->echo_cancellation()->set_stream_drift_samples(0));
       EXPECT_EQ(apm_->kNoError,
           apm_->gain_control()->set_stream_analog_level(analog_level));
 
-      // Read near-end frame
-      read_count = fread(frame_->_payloadData,
-                         sizeof(int16_t),
-                         frame_size,
-                         near_file_);
-      if (read_count != frame_size) {
-        // Check that the file really ended.
-        ASSERT_NE(0, feof(near_file_));
-        break; // This is expected.
-      }
-
-      if (frame_->_audioChannel == 1) {
-        MixStereoToMono(frame_->_payloadData, frame_->_payloadData,
-                        samples_per_channel);
-      }
-      frame_->_vadActivity = AudioFrame::kVadUnknown;
-
       EXPECT_EQ(apm_->kNoError, apm_->ProcessStream(frame_));
+      // Ensure the frame was downmixed properly.
+      EXPECT_EQ(test->num_output_channels(), frame_->num_channels_);
 
       max_output_average += MaxAudioFrame(*frame_);
 
@@ -1149,17 +1310,29 @@
       }
       if (apm_->voice_detection()->stream_has_voice()) {
         has_voice_count++;
-        EXPECT_EQ(AudioFrame::kVadActive, frame_->_vadActivity);
+        EXPECT_EQ(AudioFrame::kVadActive, frame_->vad_activity_);
       } else {
-        EXPECT_EQ(AudioFrame::kVadPassive, frame_->_vadActivity);
+        EXPECT_EQ(AudioFrame::kVadPassive, frame_->vad_activity_);
       }
 
+      ns_speech_prob_average += apm_->noise_suppression()->speech_probability();
+
+      size_t frame_size = frame_->samples_per_channel_ * frame_->num_channels_;
+      size_t write_count = fwrite(frame_->data_,
+                                  sizeof(int16_t),
+                                  frame_size,
+                                  out_file_);
+      ASSERT_EQ(frame_size, write_count);
+
+      // Reset in case of downmixing.
+      frame_->num_channels_ = test->num_input_channels();
       frame_count++;
     }
     max_output_average /= frame_count;
     analog_level_average /= frame_count;
+    ns_speech_prob_average /= frame_count;
 
-#if defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
     EchoCancellation::Metrics echo_metrics;
     EXPECT_EQ(apm_->kNoError,
               apm_->echo_cancellation()->GetMetrics(&echo_metrics));
@@ -1173,7 +1346,7 @@
     EXPECT_GE(127, rms_level);
 #endif
 
-    if (!write_output_data) {
+    if (!write_ref_data) {
       EXPECT_EQ(test->has_echo_count(), has_echo_count);
       EXPECT_EQ(test->has_voice_count(), has_voice_count);
       EXPECT_EQ(test->is_saturated_count(), is_saturated_count);
@@ -1181,7 +1354,7 @@
       EXPECT_EQ(test->analog_level_average(), analog_level_average);
       EXPECT_EQ(test->max_output_average(), max_output_average);
 
-#if defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
       webrtc::audioproc::Test::EchoMetrics reference =
           test->echo_metrics();
       TestStats(echo_metrics.residual_echo_return_loss,
@@ -1199,6 +1372,9 @@
       EXPECT_EQ(reference_delay.std(), std);
 
       EXPECT_EQ(test->rms_level(), rms_level);
+
+      EXPECT_FLOAT_EQ(test->ns_speech_probability_average(),
+                      ns_speech_prob_average);
 #endif
     } else {
       test->set_has_echo_count(has_echo_count);
@@ -1208,7 +1384,7 @@
       test->set_analog_level_average(analog_level_average);
       test->set_max_output_average(max_output_average);
 
-#if defined(WEBRTC_APM_UNIT_TEST_FLOAT_PROFILE)
+#if defined(WEBRTC_AUDIOPROC_FLOAT_PROFILE)
       webrtc::audioproc::Test::EchoMetrics* message =
           test->mutable_echo_metrics();
       WriteStatsMessage(echo_metrics.residual_echo_return_loss,
@@ -1226,6 +1402,10 @@
       message_delay->set_std(std);
 
       test->set_rms_level(rms_level);
+
+      EXPECT_LE(0.0f, ns_speech_prob_average);
+      EXPECT_GE(1.0f, ns_speech_prob_average);
+      test->set_ns_speech_probability_average(ns_speech_prob_average);
 #endif
     }
 
@@ -1233,18 +1413,20 @@
     rewind(near_file_);
   }
 
-  if (write_output_data) {
-    WriteMessageLiteToFile(output_filename, output_data);
+  if (write_ref_data) {
+    WriteMessageLiteToFile(ref_filename_, ref_data);
   }
 }
+#endif  // WEBRTC_AUDIOPROC_BIT_EXACT
+
 }  // namespace
 
 int main(int argc, char** argv) {
   ::testing::InitGoogleTest(&argc, argv);
 
   for (int i = 1; i < argc; i++) {
-    if (strcmp(argv[i], "--write_output_data") == 0) {
-      write_output_data = true;
+    if (strcmp(argv[i], "--write_ref_data") == 0) {
+      write_ref_data = true;
     }
   }
 
diff --git a/src/modules/audio_processing/test/unittest.proto b/src/modules/audio_processing/test/unittest.proto
index 67ba722..09ec942 100644
--- a/src/modules/audio_processing/test/unittest.proto
+++ b/src/modules/audio_processing/test/unittest.proto
@@ -44,6 +44,9 @@
   optional DelayMetrics delay_metrics = 12;
 
   optional int32 rms_level = 13;
+
+  optional float ns_speech_probability_average = 14;
+
 }
 
 message OutputData {
diff --git a/src/modules/audio_processing/utility/delay_estimator.c b/src/modules/audio_processing/utility/delay_estimator.c
index 24ee74d..3d1d7c4 100644
--- a/src/modules/audio_processing/utility/delay_estimator.c
+++ b/src/modules/audio_processing/utility/delay_estimator.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -59,107 +59,79 @@
   }
 }
 
-int WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle) {
-  assert(handle != NULL);
-
-  if (handle->mean_bit_counts != NULL) {
-    free(handle->mean_bit_counts);
-    handle->mean_bit_counts = NULL;
-  }
-  if (handle->bit_counts != NULL) {
-    free(handle->bit_counts);
-    handle->bit_counts = NULL;
-  }
-  if (handle->binary_far_history != NULL) {
-    free(handle->binary_far_history);
-    handle->binary_far_history = NULL;
-  }
-  if (handle->binary_near_history != NULL) {
-    free(handle->binary_near_history);
-    handle->binary_near_history = NULL;
-  }
-  if (handle->far_bit_counts != NULL) {
-    free(handle->far_bit_counts);
-    handle->far_bit_counts = NULL;
-  }
-
-  free(handle);
-
-  return 0;
-}
-
-int WebRtc_CreateBinaryDelayEstimator(BinaryDelayEstimator** handle,
-                                      int max_delay,
-                                      int lookahead) {
-  BinaryDelayEstimator* self = NULL;
-  int history_size = max_delay + lookahead;
+void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle) {
 
   if (handle == NULL) {
-    return -1;
-  }
-  if (max_delay < 0) {
-    return -1;
-  }
-  if (lookahead < 0) {
-    return -1;
-  }
-  if (history_size < 2) {
-    // Must be this large for buffer shifting.
-    return -1;
+    return;
   }
 
-  self = malloc(sizeof(BinaryDelayEstimator));
-  *handle = self;
-  if (self == NULL) {
-    return -1;
-  }
+  free(handle->mean_bit_counts);
+  handle->mean_bit_counts = NULL;
 
-  self->mean_bit_counts = NULL;
-  self->bit_counts = NULL;
-  self->binary_far_history = NULL;
-  self->far_bit_counts = NULL;
+  free(handle->bit_counts);
+  handle->bit_counts = NULL;
 
-  self->history_size = history_size;
-  self->near_history_size = lookahead + 1;
+  free(handle->binary_far_history);
+  handle->binary_far_history = NULL;
 
-  // Allocate memory for spectrum buffers.
-  self->mean_bit_counts = malloc(history_size * sizeof(int32_t));
-  if (self->mean_bit_counts == NULL) {
-    WebRtc_FreeBinaryDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  self->bit_counts = malloc(history_size * sizeof(int32_t));
-  if (self->bit_counts == NULL) {
-    WebRtc_FreeBinaryDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  // Allocate memory for history buffers.
-  self->binary_far_history = malloc(history_size * sizeof(uint32_t));
-  if (self->binary_far_history == NULL) {
-    WebRtc_FreeBinaryDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  self->binary_near_history = malloc(self->near_history_size *
-      sizeof(uint32_t));
-  if (self->binary_near_history == NULL) {
-    WebRtc_FreeBinaryDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  self->far_bit_counts = malloc(history_size * sizeof(int));
-  if (self->far_bit_counts == NULL) {
-    WebRtc_FreeBinaryDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
+  free(handle->binary_near_history);
+  handle->binary_near_history = NULL;
 
-  return 0;
+  free(handle->far_bit_counts);
+  handle->far_bit_counts = NULL;
+
+  free(handle);
 }
 
-int WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle) {
+BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(int max_delay,
+                                                        int lookahead) {
+  BinaryDelayEstimator* self = NULL;
+  int history_size = max_delay + lookahead;  // Must be > 1 for buffer shifting.
+
+  if ((max_delay >= 0) && (lookahead >= 0) && (history_size > 1)) {
+    // Sanity conditions fulfilled.
+    self = malloc(sizeof(BinaryDelayEstimator));
+  }
+
+  if (self != NULL) {
+    int malloc_fail = 0;
+
+    self->mean_bit_counts = NULL;
+    self->bit_counts = NULL;
+    self->binary_far_history = NULL;
+    self->far_bit_counts = NULL;
+    self->binary_near_history = NULL;
+
+    self->history_size = history_size;
+    self->near_history_size = lookahead + 1;
+
+    // Allocate memory for spectrum buffers.
+    self->mean_bit_counts = malloc(history_size * sizeof(int32_t));
+    malloc_fail |= (self->mean_bit_counts == NULL);
+
+    self->bit_counts = malloc(history_size * sizeof(int32_t));
+    malloc_fail |= (self->bit_counts == NULL);
+
+    // Allocate memory for history buffers.
+    self->binary_far_history = malloc(history_size * sizeof(uint32_t));
+    malloc_fail |= (self->binary_far_history == NULL);
+
+    self->binary_near_history = malloc((lookahead + 1) * sizeof(uint32_t));
+    malloc_fail |= (self->binary_near_history == NULL);
+
+    self->far_bit_counts = malloc(history_size * sizeof(int));
+    malloc_fail |= (self->far_bit_counts == NULL);
+
+    if (malloc_fail) {
+      WebRtc_FreeBinaryDelayEstimator(self);
+      self = NULL;
+    }
+  }
+
+  return self;
+}
+
+void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle) {
   int i = 0;
   assert(handle != NULL);
 
@@ -177,8 +149,6 @@
 
   // Default return value if we're unable to estimate. -1 is used for errors.
   handle->last_delay = -2;
-
-  return 0;
 }
 
 int WebRtc_ProcessBinarySpectrum(BinaryDelayEstimator* handle,
@@ -299,11 +269,6 @@
   return handle->last_delay;
 }
 
-int WebRtc_history_size(BinaryDelayEstimator* handle) {
-  assert(handle != NULL);
-  return handle->history_size;
-}
-
 void WebRtc_MeanEstimatorFix(int32_t new_value,
                              int factor,
                              int32_t* mean_value) {
diff --git a/src/modules/audio_processing/utility/delay_estimator.h b/src/modules/audio_processing/utility/delay_estimator.h
index a376dfe..93c4b4e 100644
--- a/src/modules/audio_processing/utility/delay_estimator.h
+++ b/src/modules/audio_processing/utility/delay_estimator.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -45,14 +45,15 @@
 
 // Releases the memory allocated by WebRtc_CreateBinaryDelayEstimator(...).
 // Input:
-//    - handle            : Pointer to the delay estimation instance.
+//    - handle            : Pointer to the binary delay estimation instance
+//                          which is the return value of
+//                          WebRtc_CreateBinaryDelayEstimator().
 //
-int WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle);
+void WebRtc_FreeBinaryDelayEstimator(BinaryDelayEstimator* handle);
 
 // Refer to WebRtc_CreateDelayEstimator() in delay_estimator_wrapper.h.
-int WebRtc_CreateBinaryDelayEstimator(BinaryDelayEstimator** handle,
-                                      int max_delay,
-                                      int lookahead);
+BinaryDelayEstimator* WebRtc_CreateBinaryDelayEstimator(int max_delay,
+                                                        int lookahead);
 
 // Initializes the delay estimation instance created with
 // WebRtc_CreateBinaryDelayEstimator(...).
@@ -62,7 +63,7 @@
 // Output:
 //    - handle            : Initialized instance.
 //
-int WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle);
+void WebRtc_InitBinaryDelayEstimator(BinaryDelayEstimator* handle);
 
 // Estimates and returns the delay between the binary far-end and binary near-
 // end spectra. The value will be offset by the lookahead (i.e. the lookahead
@@ -97,18 +98,6 @@
 //
 int WebRtc_binary_last_delay(BinaryDelayEstimator* handle);
 
-// Returns the history size used in the far-end buffers to calculate the delay
-// over.
-//
-// Input:
-//    - handle                : Pointer to the delay estimation instance.
-//
-// Return value:
-//    - history_size          :  > 0  - Far-end history size.
-//                              -1    - Error.
-//
-int WebRtc_history_size(BinaryDelayEstimator* handle);
-
 // Updates the |mean_value| recursively with a step size of 2^-|factor|. This
 // function is used internally in the Binary Delay Estimator as well as the
 // Fixed point wrapper.
diff --git a/src/modules/audio_processing/utility/delay_estimator_internal.h b/src/modules/audio_processing/utility/delay_estimator_internal.h
new file mode 100644
index 0000000..46b19ca
--- /dev/null
+++ b/src/modules/audio_processing/utility/delay_estimator_internal.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Header file including the delay estimator handle used for testing.
+
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_
+
+#include "modules/audio_processing/utility/delay_estimator.h"
+#include "typedefs.h"
+
+typedef union {
+  float float_;
+  int32_t int32_;
+} SpectrumType;
+
+typedef struct {
+  // Pointers to mean values of spectrum.
+  SpectrumType* mean_far_spectrum;
+  SpectrumType* mean_near_spectrum;
+  // |mean_*_spectrum| initialization indicator.
+  int far_spectrum_initialized;
+  int near_spectrum_initialized;
+
+  int spectrum_size;
+
+  // Binary spectrum based delay estimator
+  BinaryDelayEstimator* binary_handle;
+} DelayEstimator;
+
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_UTILITY_DELAY_ESTIMATOR_INTERNAL_H_
diff --git a/src/modules/audio_processing/utility/delay_estimator_unittest.cc b/src/modules/audio_processing/utility/delay_estimator_unittest.cc
new file mode 100644
index 0000000..aaaeff4
--- /dev/null
+++ b/src/modules/audio_processing/utility/delay_estimator_unittest.cc
@@ -0,0 +1,302 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+extern "C" {
+#include "modules/audio_processing/utility/delay_estimator.h"
+#include "modules/audio_processing/utility/delay_estimator_internal.h"
+#include "modules/audio_processing/utility/delay_estimator_wrapper.h"
+}
+#include "typedefs.h"
+
+namespace {
+
+enum { kSpectrumSize = 65 };
+// Delay history sizes.
+enum { kMaxDelay = 100 };
+enum { kLookahead = 10 };
+
+class DelayEstimatorTest : public ::testing::Test {
+ protected:
+  DelayEstimatorTest();
+  virtual void SetUp();
+  virtual void TearDown();
+
+  void Init();
+
+  void InitBinary();
+
+  void* handle_;
+  DelayEstimator* self_;
+  BinaryDelayEstimator* binary_handle_;
+  int spectrum_size_;
+  // Dummy input spectra.
+  float far_f_[kSpectrumSize];
+  float near_f_[kSpectrumSize];
+  uint16_t far_u16_[kSpectrumSize];
+  uint16_t near_u16_[kSpectrumSize];
+};
+
+DelayEstimatorTest::DelayEstimatorTest()
+    : handle_(NULL),
+      self_(NULL),
+      binary_handle_(NULL),
+      spectrum_size_(kSpectrumSize) {
+  // Dummy input data are set with more or less arbitrary non-zero values.
+  memset(far_f_, 1, sizeof(far_f_));
+  memset(near_f_, 2, sizeof(near_f_));
+  memset(far_u16_, 1, sizeof(far_u16_));
+  memset(near_u16_, 2, sizeof(near_u16_));
+}
+
+void DelayEstimatorTest::SetUp() {
+  handle_ = WebRtc_CreateDelayEstimator(kSpectrumSize, kMaxDelay, kLookahead);
+  ASSERT_TRUE(handle_ != NULL);
+  self_ = reinterpret_cast<DelayEstimator*>(handle_);
+  binary_handle_ = self_->binary_handle;
+}
+
+void DelayEstimatorTest::TearDown() {
+  WebRtc_FreeDelayEstimator(handle_);
+  handle_ = NULL;
+  self_ = NULL;
+  binary_handle_ = NULL;
+}
+
+void DelayEstimatorTest::Init() {
+  // Initialize Delay Estimator
+  EXPECT_EQ(0, WebRtc_InitDelayEstimator(handle_));
+  // Verify initialization.
+  EXPECT_EQ(0, self_->far_spectrum_initialized);
+  EXPECT_EQ(0, self_->near_spectrum_initialized);
+}
+
+void DelayEstimatorTest::InitBinary() {
+  // Initialize Binary Delay Estimator
+  WebRtc_InitBinaryDelayEstimator(binary_handle_);
+  // Verify initialization. This does not guarantee a complete check, since
+  // |last_delay| may be equal to -2 before initialization if done on the fly.
+  EXPECT_EQ(-2, binary_handle_->last_delay);
+}
+
+TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfWrapper) {
+  // In this test we verify correct error returns on invalid API calls.
+
+  // WebRtc_CreateDelayEstimator() should return a NULL pointer on invalid input
+  // values.
+  // Make sure we have a non-NULL value at start, so we can detect NULL after
+  // create failure.
+  void* handle = handle_;
+  handle = WebRtc_CreateDelayEstimator(33, kMaxDelay, kLookahead);
+  EXPECT_TRUE(handle == NULL);
+  handle = handle_;
+  handle = WebRtc_CreateDelayEstimator(kSpectrumSize, -1, kLookahead);
+  EXPECT_TRUE(handle == NULL);
+  handle = handle_;
+  handle = WebRtc_CreateDelayEstimator(kSpectrumSize, kMaxDelay, -1);
+  EXPECT_TRUE(handle == NULL);
+  handle = handle_;
+  handle = WebRtc_CreateDelayEstimator(kSpectrumSize, 0, 0);
+  EXPECT_TRUE(handle == NULL);
+
+  // WebRtc_InitDelayEstimator() should return -1 if we have a NULL pointer as
+  // |handle|.
+  EXPECT_EQ(-1, WebRtc_InitDelayEstimator(NULL));
+
+  // WebRtc_DelayEstimatorProcessFloat() should return -1 if we have:
+  // 1) NULL pointer as |handle|.
+  // 2) NULL pointer as far-end spectrum.
+  // 3) NULL pointer as near-end spectrum.
+  // 4) Incorrect spectrum size.
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(NULL, far_f_, near_f_,
+                                                  spectrum_size_));
+  // Use |handle_| which is properly created at SetUp().
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(handle_, NULL, near_f_,
+                                                  spectrum_size_));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(handle_, far_f_, NULL,
+                                                  spectrum_size_));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFloat(handle_, far_f_, near_f_,
+                                                  spectrum_size_ + 1));
+
+  // WebRtc_DelayEstimatorProcessFix() should return -1 if we have:
+  // 1) NULL pointer as |handle|.
+  // 2) NULL pointer as far-end spectrum.
+  // 3) NULL pointer as near-end spectrum.
+  // 4) Incorrect spectrum size.
+  // 5) Too high precision in far-end spectrum (Q-domain > 15).
+  // 6) Too high precision in near-end spectrum (Q-domain > 15).
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(NULL, far_u16_, near_u16_,
+                                                spectrum_size_, 0, 0));
+  // Use |handle_| which is properly created at SetUp().
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, NULL, near_u16_,
+                                                spectrum_size_, 0, 0));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, NULL,
+                                                spectrum_size_, 0, 0));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, near_u16_,
+                                                spectrum_size_ + 1, 0, 0));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, near_u16_,
+                                                spectrum_size_, 16, 0));
+  EXPECT_EQ(-1, WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, near_u16_,
+                                                spectrum_size_, 0, 16));
+
+  // WebRtc_last_delay() should return -1 if we have a NULL pointer as |handle|.
+  EXPECT_EQ(-1, WebRtc_last_delay(NULL));
+
+  // Free any local memory if needed.
+  WebRtc_FreeDelayEstimator(handle);
+}
+
+TEST_F(DelayEstimatorTest, InitializedSpectrumAfterProcess) {
+  // In this test we verify that the mean spectra are initialized after first
+  // time we call Process().
+
+  // For floating point operations, process one frame and verify initialization
+  // flag.
+  Init();
+  EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFloat(handle_, far_f_, near_f_,
+                                                  spectrum_size_));
+  EXPECT_EQ(1, self_->far_spectrum_initialized);
+  EXPECT_EQ(1, self_->near_spectrum_initialized);
+
+  // For fixed point operations, process one frame and verify initialization
+  // flag.
+  Init();
+  EXPECT_EQ(-2, WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, near_u16_,
+                                                spectrum_size_, 0, 0));
+  EXPECT_EQ(1, self_->far_spectrum_initialized);
+  EXPECT_EQ(1, self_->near_spectrum_initialized);
+}
+
+TEST_F(DelayEstimatorTest, CorrectLastDelay) {
+  // In this test we verify that we get the correct last delay upon valid call.
+  // We simply process the same data until we leave the initialized state
+  // (|last_delay| = -2). Then we compare the Process() output with the
+  // last_delay() call.
+
+  int last_delay = 0;
+  // Floating point operations.
+  Init();
+  for (int i = 0; i < 200; i++) {
+    last_delay = WebRtc_DelayEstimatorProcessFloat(handle_, far_f_, near_f_,
+                                                   spectrum_size_);
+    if (last_delay != -2) {
+      EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
+      break;
+    }
+  }
+  // Verify that we have left the initialized state.
+  EXPECT_NE(-2, WebRtc_last_delay(handle_));
+
+  // Fixed point operations.
+  Init();
+  for (int i = 0; i < 200; i++) {
+    last_delay = WebRtc_DelayEstimatorProcessFix(handle_, far_u16_, near_u16_,
+                                                 spectrum_size_, 0, 0);
+    if (last_delay != -2) {
+      EXPECT_EQ(last_delay, WebRtc_last_delay(handle_));
+      break;
+    }
+  }
+  // Verify that we have left the initialized state.
+  EXPECT_NE(-2, WebRtc_last_delay(handle_));
+}
+
+TEST_F(DelayEstimatorTest, CorrectErrorReturnsOfBinaryEstimator) {
+  // In this test we verify correct output on invalid API calls to the Binary
+  // Delay Estimator.
+
+  BinaryDelayEstimator* binary_handle = binary_handle_;
+  // WebRtc_CreateBinaryDelayEstimator() should return -1 if we have a NULL
+  // pointer as |binary_handle| or invalid input values. Upon failure, the
+  // |binary_handle| should be NULL.
+  // Make sure we have a non-NULL value at start, so we can detect NULL after
+  // create failure.
+  binary_handle = WebRtc_CreateBinaryDelayEstimator(-1, kLookahead);
+  EXPECT_TRUE(binary_handle == NULL);
+  binary_handle = binary_handle_;
+  binary_handle = WebRtc_CreateBinaryDelayEstimator(kMaxDelay, -1);
+  EXPECT_TRUE(binary_handle == NULL);
+  binary_handle = binary_handle_;
+  binary_handle = WebRtc_CreateBinaryDelayEstimator(0, 0);
+  EXPECT_TRUE(binary_handle == NULL);
+
+  // TODO(bjornv): It is not feasible to force an error of
+  // WebRtc_ProcessBinarySpectrum(). This can only happen if we have more than
+  // 32 bits in our binary spectrum comparison, which by definition can't
+  // happen.
+  // We should therefore remove that option from the code.
+
+  // WebRtc_binary_last_delay() can't return -1 either.
+}
+
+TEST_F(DelayEstimatorTest, MeanEstimatorFix) {
+  // In this test we verify that we update the mean value in correct direction
+  // only. With "direction" we mean increase or decrease.
+
+  InitBinary();
+
+  int32_t mean_value = 4000;
+  int32_t mean_value_before = mean_value;
+  int32_t new_mean_value = mean_value * 2;
+
+  // Increasing |mean_value|.
+  WebRtc_MeanEstimatorFix(new_mean_value, 10, &mean_value);
+  EXPECT_LT(mean_value_before, mean_value);
+  EXPECT_GT(new_mean_value, mean_value);
+
+  // Decreasing |mean_value|.
+  new_mean_value = mean_value / 2;
+  mean_value_before = mean_value;
+  WebRtc_MeanEstimatorFix(new_mean_value, 10, &mean_value);
+  EXPECT_GT(mean_value_before, mean_value);
+  EXPECT_LT(new_mean_value, mean_value);
+}
+
+TEST_F(DelayEstimatorTest, ExactDelayEstimate) {
+  // In this test we verify that we get the correct delay estimate if we shift
+  // the signal accordingly. We verify both causal and non-causal delays.
+
+  // Construct a sequence of binary spectra used to verify delay estimate. The
+  // |sequence_length| has to be long enough for the delay estimation to leave
+  // the initialized state.
+  const int sequence_length = 400;
+  uint32_t binary_spectrum[sequence_length + kMaxDelay + kLookahead];
+  binary_spectrum[0] = 1;
+  for (int i = 1; i < (sequence_length + kMaxDelay + kLookahead); i++) {
+    binary_spectrum[i] = 3 * binary_spectrum[i - 1];
+  }
+
+  // Verify the delay for both causal and non-causal systems. For causal systems
+  // the delay is equivalent with a positive |offset| of the far-end sequence.
+  // For non-causal systems the delay is equivalent with a negative |offset| of
+  // the far-end sequence.
+  for (int offset = -kLookahead; offset < kMaxDelay; offset++) {
+    InitBinary();
+    for (int i = kLookahead; i < (sequence_length + kLookahead); i++) {
+      int delay = WebRtc_ProcessBinarySpectrum(binary_handle_,
+                                               binary_spectrum[i + offset],
+                                               binary_spectrum[i]);
+
+      // Verify that we WebRtc_binary_last_delay() returns correct delay.
+      EXPECT_EQ(delay, WebRtc_binary_last_delay(binary_handle_));
+
+      if (delay != -2) {
+        // Verify correct delay estimate. In the non-causal case the true delay
+        // is equivalent with the |offset|.
+        EXPECT_EQ(offset, delay - kLookahead);
+      }
+    }
+    // Verify that we have left the initialized state.
+    EXPECT_NE(-2, WebRtc_binary_last_delay(binary_handle_));
+  }
+}
+
+}  // namespace
diff --git a/src/modules/audio_processing/utility/delay_estimator_wrapper.c b/src/modules/audio_processing/utility/delay_estimator_wrapper.c
index 438c95f..7491807 100644
--- a/src/modules/audio_processing/utility/delay_estimator_wrapper.c
+++ b/src/modules/audio_processing/utility/delay_estimator_wrapper.c
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -15,25 +15,7 @@
 #include <string.h>
 
 #include "delay_estimator.h"
-
-typedef union {
-  float float_;
-  int32_t int32_;
-} SpectrumType;
-
-typedef struct {
-  // Pointers to mean values of spectrum.
-  SpectrumType* mean_far_spectrum;
-  SpectrumType* mean_near_spectrum;
-  // |mean_*_spectrum| initialization indicator.
-  int far_spectrum_initialized;
-  int near_spectrum_initialized;
-
-  int spectrum_size;
-
-  // Binary spectrum based delay estimator
-  BinaryDelayEstimator* binary_handle;
-} DelayEstimator;
+#include "modules/audio_processing/utility/delay_estimator_internal.h"
 
 // Only bit |kBandFirst| through bit |kBandLast| are processed and
 // |kBandFirst| - |kBandLast| must be < 32.
@@ -140,80 +122,64 @@
   return out;
 }
 
-int WebRtc_FreeDelayEstimator(void* handle) {
+void WebRtc_FreeDelayEstimator(void* handle) {
   DelayEstimator* self = (DelayEstimator*) handle;
 
-  if (self == NULL) {
-    return -1;
+  if (handle == NULL) {
+    return;
   }
 
-  if (self->mean_far_spectrum != NULL) {
-    free(self->mean_far_spectrum);
-    self->mean_far_spectrum = NULL;
-  }
-  if (self->mean_near_spectrum != NULL) {
-    free(self->mean_near_spectrum);
-    self->mean_near_spectrum = NULL;
-  }
+  free(self->mean_far_spectrum);
+  self->mean_far_spectrum = NULL;
+
+  free(self->mean_near_spectrum);
+  self->mean_near_spectrum = NULL;
 
   WebRtc_FreeBinaryDelayEstimator(self->binary_handle);
+  self->binary_handle = NULL;
 
   free(self);
-
-  return 0;
 }
 
-int WebRtc_CreateDelayEstimator(void** handle,
-                                int spectrum_size,
-                                int max_delay,
-                                int lookahead) {
+void* WebRtc_CreateDelayEstimator(int spectrum_size, int max_delay,
+                                  int lookahead) {
   DelayEstimator* self = NULL;
 
+  // TODO(bjornv): Make this a static assert.
   // Check if the sub band used in the delay estimation is small enough to fit
   // the binary spectra in a uint32_t.
   assert(kBandLast - kBandFirst < 32);
 
-  if (handle == NULL) {
-    return -1;
-  }
-  if (spectrum_size < kBandLast) {
-    return -1;
+  if (spectrum_size >= kBandLast) {
+    self = malloc(sizeof(DelayEstimator));
   }
 
-  self = malloc(sizeof(DelayEstimator));
-  *handle = self;
-  if (self == NULL) {
-    return -1;
+  if (self != NULL) {
+    int memory_fail = 0;
+
+    self->mean_far_spectrum = NULL;
+    self->mean_near_spectrum = NULL;
+
+    self->binary_handle = WebRtc_CreateBinaryDelayEstimator(max_delay,
+                                                            lookahead);
+    memory_fail |= (self->binary_handle == NULL);
+
+    // Allocate memory for spectrum buffers.
+    self->mean_far_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+    memory_fail |= (self->mean_far_spectrum == NULL);
+
+    self->mean_near_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
+    memory_fail |= (self->mean_near_spectrum == NULL);
+
+    self->spectrum_size = spectrum_size;
+
+    if (memory_fail) {
+      WebRtc_FreeDelayEstimator(self);
+      self = NULL;
+    }
   }
 
-  self->mean_far_spectrum = NULL;
-  self->mean_near_spectrum = NULL;
-
-  // Create binary delay estimator.
-  if (WebRtc_CreateBinaryDelayEstimator(&self->binary_handle,
-                                        max_delay,
-                                        lookahead) != 0) {
-    WebRtc_FreeDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  // Allocate memory for spectrum buffers.
-  self->mean_far_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
-  if (self->mean_far_spectrum == NULL) {
-    WebRtc_FreeDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-  self->mean_near_spectrum = malloc(spectrum_size * sizeof(SpectrumType));
-  if (self->mean_near_spectrum == NULL) {
-    WebRtc_FreeDelayEstimator(self);
-    self = NULL;
-    return -1;
-  }
-
-  self->spectrum_size = spectrum_size;
-
-  return 0;
+  return self;
 }
 
 int WebRtc_InitDelayEstimator(void* handle) {
@@ -224,9 +190,8 @@
   }
 
   // Initialize binary delay estimator.
-  if (WebRtc_InitBinaryDelayEstimator(self->binary_handle) != 0) {
-    return -1;
-  }
+  WebRtc_InitBinaryDelayEstimator(self->binary_handle);
+
   // Set averaged far and near end spectra to zero.
   memset(self->mean_far_spectrum, 0,
          sizeof(SpectrumType) * self->spectrum_size);
diff --git a/src/modules/audio_processing/utility/delay_estimator_wrapper.h b/src/modules/audio_processing/utility/delay_estimator_wrapper.h
index 2a47b5d..4591e4b 100644
--- a/src/modules/audio_processing/utility/delay_estimator_wrapper.h
+++ b/src/modules/audio_processing/utility/delay_estimator_wrapper.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -20,13 +20,12 @@
 // Input:
 //      - handle        : Pointer to the delay estimation instance.
 //
-int WebRtc_FreeDelayEstimator(void* handle);
+void WebRtc_FreeDelayEstimator(void* handle);
 
 // Allocates the memory needed by the delay estimation. The memory needs to be
 // initialized separately through WebRtc_InitDelayEstimator(...).
 //
 // Inputs:
-//      - handle        : Instance that should be created.
 //      - spectrum_size : Size of the spectrum used both in far-end and
 //                        near-end. Used to allocate memory for spectrum
 //                        specific buffers.
@@ -42,15 +41,15 @@
 //                        This also represents the minimum delay which can be
 //                        estimated.
 //
-// Output:
-//      - handle        : Created instance.
+// Return value:
+//      - void*         : Created |handle|. If the memory can't be allocated or
+//                        if any of the input parameters are invalid NULL is
+//                        returned.
 //
-int WebRtc_CreateDelayEstimator(void** handle,
-                                int spectrum_size,
-                                int max_delay,
-                                int lookahead);
+void* WebRtc_CreateDelayEstimator(int spectrum_size, int max_delay,
+                                  int lookahead);
 
-// Initializes the delay estimation instance created with
+// Initializes the delay estimation instance returned by
 // WebRtc_CreateDelayEstimator(...)
 // Input:
 //      - handle        : Pointer to the delay estimation instance.
diff --git a/src/modules/audio_processing/utility/util.gypi b/src/modules/audio_processing/utility/util.gypi
index 3c3024a..7551322 100644
--- a/src/modules/audio_processing/utility/util.gypi
+++ b/src/modules/audio_processing/utility/util.gypi
@@ -22,6 +22,7 @@
       'sources': [
         'delay_estimator.c',
         'delay_estimator.h',
+        'delay_estimator_internal.h',
         'delay_estimator_wrapper.c',
         'delay_estimator_wrapper.h',
         'fft4g.c',
diff --git a/src/modules/audio_processing/voice_detection_impl.cc b/src/modules/audio_processing/voice_detection_impl.cc
index 49aac2e..50b99a0 100644
--- a/src/modules/audio_processing/voice_detection_impl.cc
+++ b/src/modules/audio_processing/voice_detection_impl.cc
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -23,27 +23,22 @@
 typedef VadInst Handle;
 
 namespace {
-WebRtc_Word16 MapSetting(VoiceDetection::Likelihood likelihood) {
+int MapSetting(VoiceDetection::Likelihood likelihood) {
   switch (likelihood) {
     case VoiceDetection::kVeryLowLikelihood:
       return 3;
-      break;
     case VoiceDetection::kLowLikelihood:
       return 2;
-      break;
     case VoiceDetection::kModerateLikelihood:
       return 1;
-      break;
     case VoiceDetection::kHighLikelihood:
       return 0;
-      break;
-    default:
-      return -1;
   }
+  assert(false);
+  return -1;
 }
 }  // namespace
 
-
 VoiceDetectionImpl::VoiceDetectionImpl(const AudioProcessingImpl* apm)
   : ProcessingComponent(apm),
     apm_(apm),
@@ -92,7 +87,7 @@
 }
 
 int VoiceDetectionImpl::Enable(bool enable) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   return EnableComponent(enable);
 }
 
@@ -113,7 +108,7 @@
 }
 
 int VoiceDetectionImpl::set_likelihood(VoiceDetection::Likelihood likelihood) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   if (MapSetting(likelihood) == -1) {
     return apm_->kBadParameterError;
   }
@@ -127,7 +122,7 @@
 }
 
 int VoiceDetectionImpl::set_frame_size_ms(int size) {
-  CriticalSectionScoped crit_scoped(*apm_->crit());
+  CriticalSectionScoped crit_scoped(apm_->crit());
   assert(size == 10); // TODO(ajm): remove when supported.
   if (size != 10 &&
       size != 20 &&
@@ -157,15 +152,6 @@
   return apm_->kNoError;
 }
 
-int VoiceDetectionImpl::get_version(char* version,
-                                    int version_len_bytes) const {
-  if (WebRtcVad_get_version(version, version_len_bytes) != 0) {
-    return apm_->kBadParameterError;
-  }
-
-  return apm_->kNoError;
-}
-
 void* VoiceDetectionImpl::CreateHandle() const {
   Handle* handle = NULL;
   if (WebRtcVad_Create(&handle) != apm_->kNoError) {
diff --git a/src/modules/audio_processing/voice_detection_impl.h b/src/modules/audio_processing/voice_detection_impl.h
index ef212d1..52d92e0 100644
--- a/src/modules/audio_processing/voice_detection_impl.h
+++ b/src/modules/audio_processing/voice_detection_impl.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -8,8 +8,8 @@
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_VOICE_DETECTION_IMPL_H_
-#define WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_VOICE_DETECTION_IMPL_H_
+#ifndef WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
+#define WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
 
 #include "audio_processing.h"
 #include "processing_component.h"
@@ -31,7 +31,6 @@
 
   // ProcessingComponent implementation.
   virtual int Initialize();
-  virtual int get_version(char* version, int version_len_bytes) const;
 
  private:
   // VoiceDetection implementation.
@@ -60,4 +59,4 @@
 };
 }  // namespace webrtc
 
-#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_MAIN_SOURCE_VOICE_DETECTION_IMPL_H_
+#endif  // WEBRTC_MODULES_AUDIO_PROCESSING_VOICE_DETECTION_IMPL_H_
diff --git a/src/modules/bitrate_controller/OWNERS b/src/modules/bitrate_controller/OWNERS
new file mode 100644
index 0000000..6c70285
--- /dev/null
+++ b/src/modules/bitrate_controller/OWNERS
@@ -0,0 +1,5 @@
+pwestin@webrtc.org

+stefan@webrtc.org

+henrik.lundin@webrtc.org

+mflodman@webrtc.org

+asapersson@webrtc.org

diff --git a/src/modules/bitrate_controller/bitrate_controller.gypi b/src/modules/bitrate_controller/bitrate_controller.gypi
new file mode 100644
index 0000000..2db5fcb
--- /dev/null
+++ b/src/modules/bitrate_controller/bitrate_controller.gypi
@@ -0,0 +1,61 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'bitrate_controller',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '<(webrtc_root)/modules/rtp_rtcp/interface',
+        ],
+      },
+      'sources': [
+        'bitrate_controller_impl.cc',
+        'bitrate_controller_impl.h',
+        'include/bitrate_controller.h',
+        'send_side_bandwidth_estimation.cc',
+        'send_side_bandwidth_estimation.h',
+      ],
+    },
+  ], # targets
+
+  'conditions': [
+    ['include_tests==1', {
+      'targets' : [
+        {
+          'target_name': 'bitrate_controller_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'bitrate_controller',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'sources': [
+            'bitrate_controller_unittest.cc',
+           ],
+         },
+       ], # targets
+    }], # include_tests
+  ], # conditions
+
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2
diff --git a/src/modules/bitrate_controller/bitrate_controller_impl.cc b/src/modules/bitrate_controller/bitrate_controller_impl.cc
new file mode 100644
index 0000000..e7a7127
--- /dev/null
+++ b/src/modules/bitrate_controller/bitrate_controller_impl.cc
@@ -0,0 +1,231 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ */
+
+#include "modules/bitrate_controller/bitrate_controller_impl.h"
+
+#include <utility>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class RtcpBandwidthObserverImpl : public RtcpBandwidthObserver {
+ public:
+  explicit RtcpBandwidthObserverImpl(BitrateControllerImpl* owner)
+      : owner_(owner) {
+  }
+  virtual ~RtcpBandwidthObserverImpl() {
+  }
+  // Received RTCP REMB or TMMBR.
+  virtual void OnReceivedEstimatedBitrate(const uint32_t bitrate) {
+    owner_->OnReceivedEstimatedBitrate(bitrate);
+  }
+  // Received RTCP receiver block.
+  virtual void OnReceivedRtcpReceiverReport(
+      const uint32_t ssrc,
+      const uint8_t fraction_loss,
+      const uint32_t rtt,
+      const uint32_t last_received_extended_high_seq_num,
+      const uint32_t now_ms) {
+    uint32_t number_of_packets = 0;
+    std::map<uint32_t, uint32_t>::iterator it =
+        ssrc_to_last_received_extended_high_seq_num_.find(ssrc);
+
+    if (it != ssrc_to_last_received_extended_high_seq_num_.end()) {
+      number_of_packets = last_received_extended_high_seq_num - it->second;
+    }
+    // Update last received for this SSRC.
+    ssrc_to_last_received_extended_high_seq_num_[ssrc] =
+        last_received_extended_high_seq_num;
+    owner_->OnReceivedRtcpReceiverReport(fraction_loss, rtt, number_of_packets,
+                                         now_ms);
+  }
+ private:
+  std::map<uint32_t, uint32_t> ssrc_to_last_received_extended_high_seq_num_;
+  BitrateControllerImpl* owner_;
+};
+
+BitrateController* BitrateController::CreateBitrateController() {
+  return new BitrateControllerImpl();
+}
+
+BitrateControllerImpl::BitrateControllerImpl()
+    : critsect_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+BitrateControllerImpl::~BitrateControllerImpl() {
+  BitrateObserverConfList::iterator it =
+      bitrate_observers_.begin();
+  while (it != bitrate_observers_.end()) {
+    delete it->second;
+    bitrate_observers_.erase(it);
+    it = bitrate_observers_.begin();
+  }
+  delete critsect_;
+}
+
+RtcpBandwidthObserver* BitrateControllerImpl::CreateRtcpBandwidthObserver() {
+  return new RtcpBandwidthObserverImpl(this);
+}
+
+BitrateControllerImpl::BitrateObserverConfList::iterator
+BitrateControllerImpl::FindObserverConfigurationPair(const BitrateObserver*
+                                                     observer) {
+  BitrateObserverConfList::iterator it = bitrate_observers_.begin();
+  for (; it != bitrate_observers_.end(); ++it) {
+    if (it->first == observer) {
+      return it;
+    }
+  }
+  return bitrate_observers_.end();
+}
+
+void BitrateControllerImpl::SetBitrateObserver(
+    BitrateObserver* observer,
+    const uint32_t start_bitrate,
+    const uint32_t min_bitrate,
+    const uint32_t max_bitrate) {
+  CriticalSectionScoped cs(critsect_);
+
+  BitrateObserverConfList::iterator it = FindObserverConfigurationPair(
+      observer);
+
+  if (it != bitrate_observers_.end()) {
+    // Update current configuration.
+    it->second->start_bitrate_ = start_bitrate;
+    it->second->min_bitrate_ = min_bitrate;
+    it->second->max_bitrate_ = max_bitrate;
+  } else {
+    // Add new settings.
+    bitrate_observers_.push_back(BitrateObserverConfiguration(observer,
+        new BitrateConfiguration(start_bitrate, min_bitrate, max_bitrate)));
+  }
+  uint32_t sum_start_bitrate = 0;
+  uint32_t sum_min_bitrate = 0;
+  uint32_t sum_max_bitrate = 0;
+
+  // Summarize all configurations.
+  for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
+    sum_start_bitrate += it->second->start_bitrate_;
+    sum_min_bitrate += it->second->min_bitrate_;
+    sum_max_bitrate += it->second->max_bitrate_;
+  }
+  // Only change start bitrate if we have exactly one observer. By definition
+  // you can only have one start bitrate, once we have our first estimate we
+  // will adapt from there.
+  if (bitrate_observers_.size() == 1) {
+    bandwidth_estimation_.SetSendBitrate(sum_start_bitrate);
+  }
+  bandwidth_estimation_.SetMinMaxBitrate(sum_min_bitrate,
+                                         sum_max_bitrate);
+}
+
+void BitrateControllerImpl::RemoveBitrateObserver(BitrateObserver* observer) {
+  CriticalSectionScoped cs(critsect_);
+  BitrateObserverConfList::iterator it = FindObserverConfigurationPair(
+      observer);
+  if (it != bitrate_observers_.end()) {
+    delete it->second;
+    bitrate_observers_.erase(it);
+  }
+}
+
+void BitrateControllerImpl::OnReceivedEstimatedBitrate(const uint32_t bitrate) {
+  uint32_t new_bitrate = 0;
+  uint8_t fraction_lost = 0;
+  uint16_t rtt = 0;
+  CriticalSectionScoped cs(critsect_);
+  if (bandwidth_estimation_.UpdateBandwidthEstimate(bitrate,
+                                                    &new_bitrate,
+                                                    &fraction_lost,
+                                                    &rtt)) {
+    OnNetworkChanged(new_bitrate, fraction_lost, rtt);
+  }
+}
+
+void BitrateControllerImpl::OnReceivedRtcpReceiverReport(
+    const uint8_t fraction_loss,
+    const uint32_t rtt,
+    const int number_of_packets,
+    const uint32_t now_ms) {
+  uint32_t new_bitrate = 0;
+  uint8_t loss = fraction_loss;
+  CriticalSectionScoped cs(critsect_);
+  if (bandwidth_estimation_.UpdatePacketLoss(number_of_packets, rtt, now_ms,
+                                             &loss, &new_bitrate)) {
+    OnNetworkChanged(new_bitrate, loss, rtt);
+  }
+}
+
+// We have the lock here.
+void BitrateControllerImpl::OnNetworkChanged(const uint32_t bitrate,
+                                             const uint8_t fraction_loss,
+                                             const uint32_t rtt) {
+  // Sanity check.
+  uint32_t number_of_observers = bitrate_observers_.size();
+  if (number_of_observers == 0) {
+    return;
+  }
+  uint32_t sum_min_bitrates = 0;
+  BitrateObserverConfList::iterator it;
+  for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
+    sum_min_bitrates += it->second->min_bitrate_;
+  }
+  if (bitrate <= sum_min_bitrates) {
+    // Min bitrate to all observers.
+    for (it = bitrate_observers_.begin(); it != bitrate_observers_.end();
+        ++it) {
+      it->first->OnNetworkChanged(it->second->min_bitrate_, fraction_loss,
+                                  rtt);
+    }
+    // Set sum of min to current send bitrate.
+    bandwidth_estimation_.SetSendBitrate(sum_min_bitrates);
+    return;
+  }
+  uint32_t bitrate_per_observer = (bitrate - sum_min_bitrates) /
+      number_of_observers;
+  // Use map to sort list based on max bitrate.
+  ObserverSortingMap list_max_bitrates;
+  for (it = bitrate_observers_.begin(); it != bitrate_observers_.end(); ++it) {
+    list_max_bitrates.insert(std::pair<uint32_t, ObserverConfiguration*>(
+        it->second->max_bitrate_,
+        new ObserverConfiguration(it->first, it->second->min_bitrate_)));
+  }
+  ObserverSortingMap::iterator max_it = list_max_bitrates.begin();
+  while (max_it != list_max_bitrates.end()) {
+    number_of_observers--;
+    uint32_t observer_allowance = max_it->second->min_bitrate_ +
+        bitrate_per_observer;
+    if (max_it->first < observer_allowance) {
+      // We have more than enough for this observer.
+      // Carry the remainder forward.
+      uint32_t remainder = observer_allowance - max_it->first;
+      if (number_of_observers != 0) {
+        bitrate_per_observer += remainder / number_of_observers;
+      }
+      max_it->second->observer_->OnNetworkChanged(max_it->first, fraction_loss,
+                                                  rtt);
+    } else {
+      max_it->second->observer_->OnNetworkChanged(observer_allowance,
+                                                  fraction_loss, rtt);
+    }
+    delete max_it->second;
+    list_max_bitrates.erase(max_it);
+    // Prepare next iteration.
+    max_it = list_max_bitrates.begin();
+  }
+}
+
+bool BitrateControllerImpl::AvailableBandwidth(uint32_t* bandwidth) const {
+  return bandwidth_estimation_.AvailableBandwidth(bandwidth);
+}
+}  // namespace webrtc
+
diff --git a/src/modules/bitrate_controller/bitrate_controller_impl.h b/src/modules/bitrate_controller/bitrate_controller_impl.h
new file mode 100644
index 0000000..58122b6
--- /dev/null
+++ b/src/modules/bitrate_controller/bitrate_controller_impl.h
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ *  Usage: this class will register multiple RtcpBitrateObserver's one at each
+ *  RTCP module. It will aggregate the results and run one bandwidth estimation
+ *  and push the result to the encoder via VideoEncoderCallback.
+ */
+
+#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_
+#define WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_
+
+#include "modules/bitrate_controller/include/bitrate_controller.h"
+
+#include <list>
+#include <map>
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "modules/bitrate_controller/send_side_bandwidth_estimation.h"
+
+namespace webrtc {
+
+class RtcpBandwidthObserverImpl;
+
+class BitrateControllerImpl : public BitrateController {
+ public:
+  friend class RtcpBandwidthObserverImpl;
+
+  explicit BitrateControllerImpl();
+  virtual ~BitrateControllerImpl();
+
+  virtual bool AvailableBandwidth(uint32_t* bandwidth) const;
+
+  virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver();
+
+  virtual void SetBitrateObserver(BitrateObserver* observer,
+                                  const uint32_t start_bitrate,
+                                  const uint32_t min_bitrate,
+                                  const uint32_t max_bitrate);
+
+  virtual void RemoveBitrateObserver(BitrateObserver* observer);
+
+ protected:
+  struct BitrateConfiguration {
+    BitrateConfiguration(uint32_t start_bitrate,
+                         uint32_t min_bitrate,
+                         uint32_t max_bitrate)
+        : start_bitrate_(start_bitrate),
+          min_bitrate_(min_bitrate),
+          max_bitrate_(max_bitrate) {
+    }
+    uint32_t start_bitrate_;
+    uint32_t min_bitrate_;
+    uint32_t max_bitrate_;
+  };
+  struct ObserverConfiguration {
+    ObserverConfiguration(BitrateObserver* observer,
+                          uint32_t bitrate)
+        : observer_(observer),
+          min_bitrate_(bitrate) {
+    }
+    BitrateObserver* observer_;
+    uint32_t min_bitrate_;
+  };
+
+  // Called by BitrateObserver's direct from the RTCP module.
+  void OnReceivedEstimatedBitrate(const uint32_t bitrate);
+
+  void OnReceivedRtcpReceiverReport(const uint8_t fraction_loss,
+                                    const uint32_t rtt,
+                                    const int number_of_packets,
+                                    const uint32_t now_ms);
+
+ private:
+  typedef std::multimap<uint32_t, ObserverConfiguration*> ObserverSortingMap;
+  typedef std::pair<BitrateObserver*, BitrateConfiguration*>
+      BitrateObserverConfiguration;
+  typedef std::list<BitrateObserverConfiguration> BitrateObserverConfList;
+
+  BitrateObserverConfList::iterator
+      FindObserverConfigurationPair(const BitrateObserver* observer);
+  void OnNetworkChanged(const uint32_t bitrate,
+                        const uint8_t fraction_loss,  // 0 - 255.
+                        const uint32_t rtt);
+
+  CriticalSectionWrapper* critsect_;
+  SendSideBandwidthEstimation bandwidth_estimation_;
+  BitrateObserverConfList bitrate_observers_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_BITRATE_CONTROLLER_BITRATE_CONTROLLER_IMPL_H_
diff --git a/src/modules/bitrate_controller/bitrate_controller_unittest.cc b/src/modules/bitrate_controller/bitrate_controller_unittest.cc
new file mode 100644
index 0000000..1aafb0f
--- /dev/null
+++ b/src/modules/bitrate_controller/bitrate_controller_unittest.cc
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <gtest/gtest.h>
+
+#include <algorithm>
+#include <vector>
+
+#include "modules/bitrate_controller/include/bitrate_controller.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+using webrtc::RtcpBandwidthObserver;
+using webrtc::BitrateObserver;
+using webrtc::BitrateController;
+
+class TestBitrateObserver: public BitrateObserver {
+ public:
+  TestBitrateObserver()
+      : last_bitrate_(0),
+        last_fraction_loss_(0),
+        last_rtt_(0) {
+  }
+
+  virtual void OnNetworkChanged(const uint32_t bitrate,
+                                const uint8_t fraction_loss,
+                                const uint32_t rtt) {
+    last_bitrate_ = bitrate;
+    last_fraction_loss_ = fraction_loss;
+    last_rtt_ = rtt;
+  }
+  uint32_t last_bitrate_;
+  uint8_t last_fraction_loss_;
+  uint32_t last_rtt_;
+};
+
+class BitrateControllerTest : public ::testing::Test {
+ protected:
+  BitrateControllerTest() {
+  }
+  ~BitrateControllerTest() {}
+
+  virtual void SetUp() {
+    controller_ = BitrateController::CreateBitrateController();
+    bandwidth_observer_ = controller_->CreateRtcpBandwidthObserver();
+  }
+
+  virtual void TearDown() {
+    delete bandwidth_observer_;
+    delete controller_;
+  }
+  BitrateController* controller_;
+  RtcpBandwidthObserver* bandwidth_observer_;
+};
+
+TEST_F(BitrateControllerTest, Basic) {
+  TestBitrateObserver bitrate_observer;
+  controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
+  controller_->RemoveBitrateObserver(&bitrate_observer);
+}
+
+TEST_F(BitrateControllerTest, OneBitrateObserverOneRtcpObserver) {
+  TestBitrateObserver bitrate_observer;
+  controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
+
+  // Receive a high remb, test bitrate inc.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+
+  // Test start bitrate.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 1, 1);
+  EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+
+  // Test bitrate increase 8% per second.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 21, 1001);
+  EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer.last_rtt_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 41, 2001);
+  EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 61, 3001);
+  EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 801, 4001);
+  EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 101, 5001);
+  EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 121, 6001);
+  EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);  // Max cap.
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 141, 7001);
+  EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);  // Max cap.
+
+  // Test that a low REMB trigger immediately.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(250000);
+  EXPECT_EQ(250000u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer.last_rtt_);
+
+  bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
+  EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);  // Min cap.
+  controller_->RemoveBitrateObserver(&bitrate_observer);
+}
+
+TEST_F(BitrateControllerTest, OneBitrateObserverTwoRtcpObservers) {
+  TestBitrateObserver bitrate_observer;
+  controller_->SetBitrateObserver(&bitrate_observer, 200000, 100000, 300000);
+
+  RtcpBandwidthObserver* second_bandwidth_observer =
+      controller_->CreateRtcpBandwidthObserver();
+
+  // Receive a high remb, test bitrate inc.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+
+  // Test start bitrate.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 1, 1);
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 100, 1, 1);
+  EXPECT_EQ(0u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(0u, bitrate_observer.last_rtt_);
+
+  // Test bitrate increase 8% per second.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 21, 501);
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 100, 21, 1001);
+  EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(100u, bitrate_observer.last_rtt_);
+
+  // Extra report should not change estimate.
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 100, 31, 1501);
+  EXPECT_EQ(217000u, bitrate_observer.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 41, 2001);
+  EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+
+  // Second report should not change estimate.
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 100, 41, 2001);
+  EXPECT_EQ(235360u, bitrate_observer.last_bitrate_);
+
+  // Reports from only one bandwidth observer is ok.
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 50, 61, 3001);
+  EXPECT_EQ(255189u, bitrate_observer.last_bitrate_);
+
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 50, 81, 4001);
+  EXPECT_EQ(276604u, bitrate_observer.last_bitrate_);
+
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 50, 101, 5001);
+  EXPECT_EQ(299732u, bitrate_observer.last_bitrate_);
+
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 50, 121, 6001);
+  EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);  // Max cap.
+
+  second_bandwidth_observer->OnReceivedRtcpReceiverReport(1, 0, 50, 141, 7001);
+  EXPECT_EQ(300000u, bitrate_observer.last_bitrate_);  // Max cap.
+
+  // Test that a low REMB trigger immediately.
+  // We don't care which bandwidth observer that delivers the REMB.
+  second_bandwidth_observer->OnReceivedEstimatedBitrate(250000);
+  EXPECT_EQ(250000u, bitrate_observer.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer.last_rtt_);
+
+  bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
+  EXPECT_EQ(100000u, bitrate_observer.last_bitrate_);  // Min cap.
+  controller_->RemoveBitrateObserver(&bitrate_observer);
+  delete second_bandwidth_observer;
+}
+
+TEST_F(BitrateControllerTest, TwoBitrateObserversOneRtcpObserver) {
+  TestBitrateObserver bitrate_observer_1;
+  TestBitrateObserver bitrate_observer_2;
+  controller_->SetBitrateObserver(&bitrate_observer_2, 200000, 200000, 300000);
+  controller_->SetBitrateObserver(&bitrate_observer_1, 200000, 100000, 300000);
+
+  // Receive a high remb, test bitrate inc.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(400000);
+
+  // Test too low start bitrate, hence lower than sum of min.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 1, 1);
+
+  // Test bitrate increase 8% per second, distributed equally.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 21, 1001);
+  EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer_1.last_rtt_);
+
+  EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer_2.last_rtt_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 41, 2001);
+  EXPECT_EQ(112500u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(212500u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 61, 3001);
+  EXPECT_EQ(126000u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(226000u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 81, 4001);
+  EXPECT_EQ(140580u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(240580u, bitrate_observer_2.last_bitrate_);
+
+  // Check that the bitrate sum honor our REMB.
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 101, 5001);
+  EXPECT_EQ(150000u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(250000u, bitrate_observer_2.last_bitrate_);
+
+  // Remove REMB cap, higher than sum of max.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(700000);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 121, 6001);
+  EXPECT_EQ(166500u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(266500u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 141, 7001);
+  EXPECT_EQ(184320u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(284320u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 161, 8001);
+  EXPECT_EQ(207130u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_);  // Max cap.
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 181, 9001);
+  EXPECT_EQ(248700u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 201, 10001);
+  EXPECT_EQ(293596u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_);
+
+  bandwidth_observer_->OnReceivedRtcpReceiverReport(1, 0, 50, 221, 11001);
+  EXPECT_EQ(300000u, bitrate_observer_1.last_bitrate_);  // Max cap.
+  EXPECT_EQ(300000u, bitrate_observer_2.last_bitrate_);
+
+  // Test that a low REMB trigger immediately.
+  bandwidth_observer_->OnReceivedEstimatedBitrate(350000);
+  EXPECT_EQ(125000u, bitrate_observer_1.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer_1.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer_1.last_rtt_);
+  EXPECT_EQ(225000u, bitrate_observer_2.last_bitrate_);
+  EXPECT_EQ(0, bitrate_observer_2.last_fraction_loss_);
+  EXPECT_EQ(50u, bitrate_observer_2.last_rtt_);
+
+  bandwidth_observer_->OnReceivedEstimatedBitrate(1000);
+  EXPECT_EQ(100000u, bitrate_observer_1.last_bitrate_);  // Min cap.
+  EXPECT_EQ(200000u, bitrate_observer_2.last_bitrate_);  // Min cap.
+  controller_->RemoveBitrateObserver(&bitrate_observer_1);
+  controller_->RemoveBitrateObserver(&bitrate_observer_2);
+}
diff --git a/src/modules/bitrate_controller/include/bitrate_controller.h b/src/modules/bitrate_controller/include/bitrate_controller.h
new file mode 100644
index 0000000..002ab8f
--- /dev/null
+++ b/src/modules/bitrate_controller/include/bitrate_controller.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ *  Usage: this class will register multiple RtcpBitrateObserver's one at each
+ *  RTCP module. It will aggregate the results and run one bandwidth estimation
+ *  and push the result to the encoders via BitrateObserver(s).
+ */
+
+#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
+#define WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class BitrateObserver {
+ /*
+  * Observer class for the encoders, each encoder should implement this class
+  * to get the target bitrate. It also get the fraction loss and rtt to
+  * optimize its settings for this type of network.
+  */
+ public:
+  virtual void OnNetworkChanged(const uint32_t targer_bitrate,
+                                const uint8_t fraction_loss,  // 0 - 255.
+                                const uint32_t rtt) = 0;
+
+  virtual ~BitrateObserver() {}
+};
+
+class BitrateController {
+/*
+ * This class collects feedback from all streams sent to a peer (via
+ * RTCPBandwidthObservers). It does one  aggregated send side bandwidth
+ * estimation and divide the available bitrate between all its registered
+ * BitrateObservers.
+ */
+ public:
+  static BitrateController* CreateBitrateController();
+  virtual ~BitrateController() {}
+
+  virtual RtcpBandwidthObserver* CreateRtcpBandwidthObserver() = 0;
+
+  virtual bool AvailableBandwidth(uint32_t* bandwidth) const = 0;
+
+  /*
+  *  Set the start and max send bitrate used by the bandwidth management.
+  *
+  *  observer, updates bitrates if already in use.
+  *  min_bitrate_kbit = 0 equals no min bitrate.
+  *  max_bitrate_kit = 0 equals no max bitrate.
+  */
+  virtual void SetBitrateObserver(BitrateObserver* observer,
+                                  const uint32_t start_bitrate,
+                                  const uint32_t min_bitrate,
+                                  const uint32_t max_bitrate) = 0;
+
+  virtual void RemoveBitrateObserver(BitrateObserver* observer) = 0;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_BITRATE_CONTROLLER_INCLUDE_BITRATE_CONTROLLER_H_
diff --git a/src/modules/bitrate_controller/send_side_bandwidth_estimation.cc b/src/modules/bitrate_controller/send_side_bandwidth_estimation.cc
new file mode 100644
index 0000000..1fc08c5
--- /dev/null
+++ b/src/modules/bitrate_controller/send_side_bandwidth_estimation.cc
@@ -0,0 +1,225 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/bitrate_controller/send_side_bandwidth_estimation.h"
+
+#include <math.h>  // sqrt()
+
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+SendSideBandwidthEstimation::SendSideBandwidthEstimation()
+    : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      accumulate_lost_packets_Q8_(0),
+      accumulate_expected_packets_(0),
+      bitrate_(0),
+      min_bitrate_configured_(0),
+      max_bitrate_configured_(0),
+      last_fraction_loss_(0),
+      last_round_trip_time_(0),
+      bwe_incoming_(0),
+      time_last_increase_(0),
+      time_last_decrease_(0) {
+}
+
+SendSideBandwidthEstimation::~SendSideBandwidthEstimation() {
+    delete critsect_;
+}
+
+void SendSideBandwidthEstimation::SetSendBitrate(const uint32_t bitrate) {
+  CriticalSectionScoped cs(critsect_);
+  bitrate_ = bitrate;
+}
+
+void SendSideBandwidthEstimation::SetMinMaxBitrate(const uint32_t min_bitrate,
+                                                   const uint32_t max_bitrate) {
+  CriticalSectionScoped cs(critsect_);
+  min_bitrate_configured_ = min_bitrate;
+  if (max_bitrate == 0) {
+    // no max configured use 1Gbit/s
+    max_bitrate_configured_ = 1000000000;
+  } else {
+    max_bitrate_configured_ = max_bitrate;
+  }
+}
+
+bool SendSideBandwidthEstimation::UpdateBandwidthEstimate(
+    const uint32_t bandwidth,
+    uint32_t* new_bitrate,
+    uint8_t* fraction_lost,
+    uint16_t* rtt) {
+  *new_bitrate = 0;
+  CriticalSectionScoped cs(critsect_);
+
+  bwe_incoming_ = bandwidth;
+
+  if (bitrate_ == 0) {
+    // SendSideBandwidthEstimation off
+    return false;
+  }
+  if (bwe_incoming_ > 0 && bitrate_ > bwe_incoming_) {
+    bitrate_ = bwe_incoming_;
+    *new_bitrate = bitrate_;
+    *fraction_lost = last_fraction_loss_;
+    *rtt = last_round_trip_time_;
+    return true;
+  }
+  return false;
+}
+
+bool SendSideBandwidthEstimation::UpdatePacketLoss(
+    const int number_of_packets,
+    const uint32_t rtt,
+    const uint32_t now_ms,
+    uint8_t* loss,
+    uint32_t* new_bitrate) {
+  CriticalSectionScoped cs(critsect_);
+
+  if (bitrate_ == 0) {
+    // SendSideBandwidthEstimation off
+    return false;
+  }
+  // Update RTT.
+  last_round_trip_time_ = rtt;
+
+  // Check sequence number diff and weight loss report
+  if (number_of_packets > 0) {
+    // Calculate number of lost packets.
+    const int num_lost_packets_Q8 = *loss * number_of_packets;
+    // Accumulate reports.
+    accumulate_lost_packets_Q8_ += num_lost_packets_Q8;
+    accumulate_expected_packets_ += number_of_packets;
+
+    // Report loss if the total report is based on sufficiently many packets.
+    if (accumulate_expected_packets_ >= kLimitNumPackets) {
+      *loss = accumulate_lost_packets_Q8_ / accumulate_expected_packets_;
+
+      // Reset accumulators
+      accumulate_lost_packets_Q8_ = 0;
+      accumulate_expected_packets_ = 0;
+    } else {
+      // Report zero loss until we have enough data to estimate
+      // the loss rate.
+      return false;
+    }
+  }
+  // Keep for next time.
+  last_fraction_loss_ = *loss;
+  uint32_t bitrate = 0;
+  if (!ShapeSimple(*loss, rtt, now_ms, &bitrate)) {
+    // No change.
+    return false;
+  }
+  bitrate_ = bitrate;
+  *new_bitrate = bitrate;
+  return true;
+}
+
+bool SendSideBandwidthEstimation::AvailableBandwidth(
+    uint32_t* bandwidth) const {
+  CriticalSectionScoped cs(critsect_);
+  if (bitrate_ == 0) {
+    return false;
+  }
+  *bandwidth = bitrate_;
+  return true;
+}
+
+/*
+ * Calculate the rate that TCP-Friendly Rate Control (TFRC) would apply.
+ * The formula in RFC 3448, Section 3.1, is used.
+ */
+uint32_t SendSideBandwidthEstimation::CalcTFRCbps(uint16_t rtt, uint8_t loss) {
+  if (rtt == 0 || loss == 0) {
+    // input variables out of range
+    return 0;
+  }
+  double R = static_cast<double>(rtt) / 1000;  // RTT in seconds
+  int b = 1;  // number of packets acknowledged by a single TCP acknowledgement;
+              // recommended = 1
+  double t_RTO = 4.0 * R;  // TCP retransmission timeout value in seconds
+                           // recommended = 4*R
+  double p = static_cast<double>(loss) / 255;  // packet loss rate in [0, 1)
+  double s = static_cast<double>(kAvgPacketSizeBytes);
+
+  // calculate send rate in bytes/second
+  double X = s / (R * sqrt(2 * b * p / 3) +
+      (t_RTO * (3 * sqrt(3 * b * p / 8) * p * (1 + 32 * p * p))));
+
+  return (static_cast<uint32_t>(X * 8));  // bits/second
+}
+
+bool SendSideBandwidthEstimation::ShapeSimple(const uint8_t loss,
+                                              const uint32_t rtt,
+                                              const uint32_t now_ms,
+                                              uint32_t* bitrate) {
+  uint32_t new_bitrate = 0;
+  bool reducing = false;
+
+  // Limit the rate increases to once a kBWEIncreaseIntervalMs.
+  if (loss <= 5) {
+    if ((now_ms - time_last_increase_) < kBWEIncreaseIntervalMs) {
+      return false;
+    }
+    time_last_increase_ = now_ms;
+  }
+  // Limit the rate decreases to once a kBWEDecreaseIntervalMs + rtt.
+  if (loss > 26) {
+    if ((now_ms - time_last_decrease_) < kBWEDecreaseIntervalMs + rtt) {
+      return false;
+    }
+    time_last_decrease_ = now_ms;
+  }
+
+  if (loss > 5 && loss <= 26) {
+    // 2% - 10%
+    new_bitrate = bitrate_;
+  } else if (loss > 26) {
+    // 26/256 ~= 10%
+    // reduce rate: newRate = rate * (1 - 0.5*lossRate)
+    // packetLoss = 256*lossRate
+    new_bitrate = static_cast<uint32_t>((bitrate_ *
+        static_cast<double>(512 - loss)) / 512.0);
+    reducing = true;
+  } else {
+    // increase rate by 8%
+    new_bitrate = static_cast<uint32_t>(bitrate_ * 1.08 + 0.5);
+
+    // add 1 kbps extra, just to make sure that we do not get stuck
+    // (gives a little extra increase at low rates, negligible at higher rates)
+    new_bitrate += 1000;
+  }
+  if (reducing) {
+    // Calculate what rate TFRC would apply in this situation
+    // scale loss to Q0 (back to [0, 255])
+    uint32_t tfrc_bitrate = CalcTFRCbps(rtt, loss);
+    if (tfrc_bitrate > new_bitrate) {
+      // do not reduce further if rate is below TFRC rate
+      new_bitrate = tfrc_bitrate;
+    }
+  }
+  if (bwe_incoming_ > 0 && new_bitrate > bwe_incoming_) {
+    new_bitrate = bwe_incoming_;
+  }
+  if (new_bitrate > max_bitrate_configured_) {
+    new_bitrate = max_bitrate_configured_;
+  }
+  if (new_bitrate < min_bitrate_configured_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                 "The configured min bitrate (%u kbps) is greater than the "
+                 "estimated available bandwidth (%u kbps).\n",
+                 min_bitrate_configured_ / 1000, new_bitrate / 1000);
+    new_bitrate = min_bitrate_configured_;
+  }
+  *bitrate = new_bitrate;
+  return true;
+}
+}  // namespace webrtc
diff --git a/src/modules/bitrate_controller/send_side_bandwidth_estimation.h b/src/modules/bitrate_controller/send_side_bandwidth_estimation.h
new file mode 100644
index 0000000..9df9ccd
--- /dev/null
+++ b/src/modules/bitrate_controller/send_side_bandwidth_estimation.h
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ *  FEC and NACK added bitrate is handled outside class
+ */
+
+#ifndef WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
+#define WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+
+namespace webrtc {
+class SendSideBandwidthEstimation {
+ public:
+  SendSideBandwidthEstimation();
+  virtual ~SendSideBandwidthEstimation();
+
+  // Call when we receive a RTCP message with TMMBR or REMB
+  // Return true if new_bitrate is valid.
+  bool UpdateBandwidthEstimate(const uint32_t bandwidth,
+                               uint32_t* new_bitrate,
+                               uint8_t* fraction_lost,
+                               uint16_t* rtt);
+
+  // Call when we receive a RTCP message with a ReceiveBlock
+  // Return true if new_bitrate is valid.
+  bool UpdatePacketLoss(const int number_of_packets,
+                        const uint32_t rtt,
+                        const uint32_t now_ms,
+                        uint8_t* loss,
+                        uint32_t* new_bitrate);
+
+  // Return false if no bandwidth estimate is available
+  bool AvailableBandwidth(uint32_t* bandwidth) const;
+  void SetSendBitrate(const uint32_t bitrate);
+  void SetMinMaxBitrate(const uint32_t min_bitrate, const uint32_t max_bitrate);
+
+ private:
+  bool ShapeSimple(const uint8_t loss, const uint32_t rtt,
+                   const uint32_t now_ms, uint32_t* bitrate);
+
+  uint32_t CalcTFRCbps(uint16_t rtt, uint8_t loss);
+
+  enum { kBWEIncreaseIntervalMs = 1000 };
+  enum { kBWEDecreaseIntervalMs = 300 };
+  enum { kLimitNumPackets = 20 };
+  enum { kAvgPacketSizeBytes = 1000 };
+
+  CriticalSectionWrapper* critsect_;
+
+  // incoming filters
+  int accumulate_lost_packets_Q8_;
+  int accumulate_expected_packets_;
+
+  uint32_t bitrate_;
+  uint32_t min_bitrate_configured_;
+  uint32_t max_bitrate_configured_;
+
+  uint8_t last_fraction_loss_;
+  uint16_t last_round_trip_time_;
+
+  uint32_t bwe_incoming_;
+  uint32_t time_last_increase_;
+  uint32_t time_last_decrease_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_BITRATE_CONTROLLER_SEND_SIDE_BANDWIDTH_ESTIMATION_H_
diff --git a/src/modules/interface/module.h b/src/modules/interface/module.h
index a274d95..d06bef2 100644
--- a/src/modules/interface/module.h
+++ b/src/modules/interface/module.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -19,8 +19,9 @@
 
 class Module {
  public:
-  // Change the unique identifier of this object.
-  virtual int32_t ChangeUniqueId(const int32_t id) = 0;
+  // TODO(henrika): Remove this when chrome is updated.
+  // DEPRICATED Change the unique identifier of this object.
+  virtual int32_t ChangeUniqueId(const int32_t) { return 0; }
 
   // Returns the number of milliseconds until the module want a worker
   // thread to call Process.
@@ -41,7 +42,7 @@
   // TODO(perkj): Make this pure virtual when Chromium have implemented  
   // reference counting ADM and Video capture module.
   virtual int32_t AddRef() {
-    assert(!"Not implemented.");
+    assert(false && "Not implemented.");
     return 1;
   }
 
@@ -52,7 +53,7 @@
   // TODO(perkj): Make this pure virtual when Chromium have implemented  
   // reference counting ADM and Video capture module.
   virtual int32_t Release() {
-    assert(!"Not implemented.");
+    assert(false && "Not implemented.");
     return 1;
   }
 
diff --git a/src/modules/interface/module_common_types.h b/src/modules/interface/module_common_types.h
index 0319dfe..2c3216f 100644
--- a/src/modules/interface/module_common_types.h
+++ b/src/modules/interface/module_common_types.h
@@ -1,3 +1,13 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
 #ifndef MODULE_COMMON_TYPES_H
 #define MODULE_COMMON_TYPES_H
 
@@ -40,14 +50,6 @@
     WebRtc_UWord8  channel;                           // number of channels 2 = stereo
 };
 
-struct RTPVideoHeaderH263
-{
-    void InitRTPVideoHeaderH263() {};
-    bool independentlyDecodable;  // H.263-1998 if no P bit it's not independently decodable
-    bool bits;                    // H.263 mode B, Xor the lasy byte of previus packet with the
-                                  // first byte of this packet
-};
-
 enum {kNoPictureId = -1};
 enum {kNoTl0PicIdx = -1};
 enum {kNoTemporalIdx = -1};
@@ -87,15 +89,12 @@
 };
 union RTPVideoTypeHeader
 {
-    RTPVideoHeaderH263      H263;
     RTPVideoHeaderVP8       VP8;
 };
 
 enum RTPVideoCodecTypes
 {
     kRTPVideoGeneric  = 0,
-    kRTPVideoH263     = 1,
-    kRTPVideoMPEG4    = 5,
     kRTPVideoVP8      = 8,
     kRTPVideoNoVideo  = 10,
     kRTPVideoFEC      = 11,
@@ -298,16 +297,41 @@
     WebRtc_UWord16    JBabsMax;
 };
 
+// Types for the FEC packet masks. The type |kFecMaskRandom| is based on a
+// random loss model. The type |kFecMaskBursty| is based on a bursty/consecutive
+// loss model. The packet masks are defined in
+// modules/rtp_rtcp/fec_private_tables_random(bursty).h
+enum FecMaskType {
+  kFecMaskRandom,
+  kFecMaskBursty,
+};
+
+// Struct containing forward error correction settings.
+struct FecProtectionParams {
+  int fec_rate;
+  bool use_uep_protection;
+  int max_fec_frames;
+  FecMaskType fec_mask_type;
+};
+
 // class describing a complete, or parts of an encoded frame.
 class EncodedVideoData
 {
 public:
     EncodedVideoData() :
+        payloadType(0),
+        timeStamp(0),
+        renderTimeMs(0),
+        encodedWidth(0),
+        encodedHeight(0),
         completeFrame(false),
         missingFrame(false),
         payloadData(NULL),
         payloadSize(0),
-        bufferSize(0)
+        bufferSize(0),
+        fragmentationHeader(),
+        frameType(kVideoFrameDelta),
+        codec(kVideoCodecUnknown)
     {};
 
     EncodedVideoData(const EncodedVideoData& data)
@@ -394,30 +418,24 @@
     VideoCodecType              codec;
 };
 
-// Video Content Metrics
-struct VideoContentMetrics
-{
-    VideoContentMetrics(): motionMagnitudeNZ(0), sizeZeroMotion(0), spatialPredErr(0),
-            spatialPredErrH(0), spatialPredErrV(0), motionPredErr(0),
-            motionHorizontalness(0), motionClusterDistortion(0),
-            nativeWidth(0), nativeHeight(0), contentChange(false) {   }
-    void Reset(){ motionMagnitudeNZ = 0; sizeZeroMotion = 0; spatialPredErr = 0;
-            spatialPredErrH = 0; spatialPredErrV = 0; motionPredErr = 0;
-            motionHorizontalness = 0; motionClusterDistortion = 0;
-            nativeWidth = 0; nativeHeight = 0; contentChange = false; }
+struct VideoContentMetrics {
+  VideoContentMetrics()
+      : motion_magnitude(0.0f),
+        spatial_pred_err(0.0f),
+        spatial_pred_err_h(0.0f),
+        spatial_pred_err_v(0.0f) {
+  }
 
-    float            motionMagnitudeNZ;
-    float            sizeZeroMotion;
-    float            spatialPredErr;
-    float            spatialPredErrH;
-    float            spatialPredErrV;
-    float            motionPredErr;
-    float            motionHorizontalness;
-    float            motionClusterDistortion;
-    WebRtc_UWord32   nativeWidth;
-    WebRtc_UWord32   nativeHeight;
-    WebRtc_UWord32   nativeFrameRate;
-    bool             contentChange;
+  void Reset() {
+    motion_magnitude = 0.0f;
+    spatial_pred_err = 0.0f;
+    spatial_pred_err_h = 0.0f;
+    spatial_pred_err_v = 0.0f;
+  }
+  float motion_magnitude;
+  float spatial_pred_err;
+  float spatial_pred_err_h;
+  float spatial_pred_err_v;
 };
 
 /*************************************************
@@ -575,10 +593,14 @@
             memcpy(newBufferBuffer, _buffer, _bufferSize);
             delete [] _buffer;
         }
+        else
+        {
+            memset(newBufferBuffer, 0, minimumSize * sizeof(WebRtc_UWord8));
+        }
         _buffer = newBufferBuffer;
         _bufferSize = minimumSize;
     }
-     return 0;
+    return 0;
 }
 
 inline
@@ -682,35 +704,23 @@
 }
 
 
-/*************************************************
+/* This class holds up to 60 ms of super-wideband (32 kHz) stereo audio. It
+ * allows for adding and subtracting frames while keeping track of the resulting
+ * states.
  *
- * AudioFrame class
+ * Notes
+ * - The total number of samples in |data_| is
+ *   samples_per_channel_ * num_channels_
  *
- * The AudioFrame class holds up to 60 ms wideband
- * audio. It allows for adding and subtracting frames
- * while keeping track of the resulting states.
+ * - Stereo data is interleaved starting with the left channel.
  *
- * Note
- * - The +operator assume that you would never add
- *   exact opposite frames when deciding the resulting
- *   state. To do this use the -operator.
- *
- * - _audioChannel of 1 indicated mono, and 2
- *   indicates stereo.
- *
- * - _payloadDataLengthInSamples is the number of
- *   samples per channel. Therefore, the total
- *   number of samples in _payloadData is
- *   (_payloadDataLengthInSamples * _audioChannel).
- *
- * - Stereo data is stored in interleaved fashion
- *   starting with the left channel.
- *
- *************************************************/
+ * - The +operator assume that you would never add exactly opposite frames when
+ *   deciding the resulting state. To do this use the -operator.
+ */
 class AudioFrame
 {
 public:
-    enum{kMaxAudioFrameSizeSamples = 3840}; // stereo 32KHz 60ms 2*32*60
+    enum { kMaxDataSizeSamples = 3840 };  // stereo, 32 kHz, 60ms (2*32*60)
 
     enum VADActivity
     {
@@ -730,55 +740,49 @@
     AudioFrame();
     virtual ~AudioFrame();
 
-    WebRtc_Word32 UpdateFrame(
-        const WebRtc_Word32  id,
-        const WebRtc_UWord32 timeStamp,
-        const WebRtc_Word16* payloadData,
-        const WebRtc_UWord16 payloadDataLengthInSamples,
-        const int frequencyInHz,
-        const SpeechType     speechType,
-        const VADActivity    vadActivity,
-        const WebRtc_UWord8  audioChannel = 1,
-        const WebRtc_Word32  volume = -1,
-        const WebRtc_Word32  energy = -1);
+    int UpdateFrame(
+        int id,
+        uint32_t timestamp,
+        const int16_t* data,
+        int samples_per_channel,
+        int sample_rate_hz,
+        SpeechType speech_type,
+        VADActivity vad_activity,
+        int num_channels = 1,
+        uint32_t energy = -1);
 
     AudioFrame& Append(const AudioFrame& rhs);
 
-    void Mute() const;
+    void Mute();
 
     AudioFrame& operator=(const AudioFrame& rhs);
-    AudioFrame& operator>>=(const WebRtc_Word32 rhs);
+    AudioFrame& operator>>=(const int rhs);
     AudioFrame& operator+=(const AudioFrame& rhs);
     AudioFrame& operator-=(const AudioFrame& rhs);
 
-    WebRtc_Word32  _id;
-    WebRtc_UWord32 _timeStamp;
-
-    // Supporting Stereo, stereo samples are interleaved
-    mutable WebRtc_Word16 _payloadData[kMaxAudioFrameSizeSamples];
-    WebRtc_UWord16 _payloadDataLengthInSamples;
-    int _frequencyInHz;
-    WebRtc_UWord8  _audioChannel;
-    SpeechType   _speechType;
-    VADActivity  _vadActivity;
-
-    WebRtc_UWord32 _energy;
-    WebRtc_Word32  _volume;
+    int id_;
+    uint32_t timestamp_;
+    int16_t data_[kMaxDataSizeSamples];
+    int samples_per_channel_;
+    int sample_rate_hz_;
+    int num_channels_;
+    SpeechType speech_type_;
+    VADActivity vad_activity_;
+    uint32_t energy_;
 };
 
 inline
 AudioFrame::AudioFrame()
     :
-    _id(-1),
-    _timeStamp(0),
-    _payloadData(),
-    _payloadDataLengthInSamples(0),
-    _frequencyInHz(0),
-    _audioChannel(1),
-    _speechType(kUndefined),
-    _vadActivity(kVadUnknown),
-    _energy(0xffffffff),
-    _volume(0xffffffff)
+    id_(-1),
+    timestamp_(0),
+    data_(),
+    samples_per_channel_(0),
+    sample_rate_hz_(0),
+    num_channels_(1),
+    speech_type_(kUndefined),
+    vad_activity_(kVadUnknown),
+    energy_(0xffffffff)
 {
 }
 
@@ -788,53 +792,51 @@
 }
 
 inline
-WebRtc_Word32
+int
 AudioFrame::UpdateFrame(
-    const WebRtc_Word32  id,
-    const WebRtc_UWord32 timeStamp,
-    const WebRtc_Word16* payloadData,
-    const WebRtc_UWord16 payloadDataLengthInSamples,
-    const int frequencyInHz,
-    const SpeechType     speechType,
-    const VADActivity    vadActivity,
-    const WebRtc_UWord8  audioChannel,
-    const WebRtc_Word32  volume,
-    const WebRtc_Word32  energy)
+    int id,
+    uint32_t timestamp,
+    const int16_t* data,
+    int samples_per_channel,
+    int sample_rate_hz,
+    SpeechType speech_type,
+    VADActivity vad_activity,
+    int num_channels,
+    uint32_t energy)
 {
-    _id            = id;
-    _timeStamp     = timeStamp;
-    _frequencyInHz = frequencyInHz;
-    _speechType    = speechType;
-    _vadActivity   = vadActivity;
-    _volume        = volume;
-    _audioChannel  = audioChannel;
-    _energy        = energy;
+    id_            = id;
+    timestamp_     = timestamp;
+    sample_rate_hz_ = sample_rate_hz;
+    speech_type_    = speech_type;
+    vad_activity_   = vad_activity;
+    num_channels_  = num_channels;
+    energy_        = energy;
 
-    if((payloadDataLengthInSamples > kMaxAudioFrameSizeSamples) ||
-        (audioChannel > 2) || (audioChannel < 1))
+    if((samples_per_channel > kMaxDataSizeSamples) ||
+        (num_channels > 2) || (num_channels < 1))
     {
-        _payloadDataLengthInSamples = 0;
+        samples_per_channel_ = 0;
         return -1;
     }
-    _payloadDataLengthInSamples = payloadDataLengthInSamples;
-    if(payloadData != NULL)
+    samples_per_channel_ = samples_per_channel;
+    if(data != NULL)
     {
-        memcpy(_payloadData, payloadData, sizeof(WebRtc_Word16) *
-            payloadDataLengthInSamples * _audioChannel);
+        memcpy(data_, data, sizeof(int16_t) *
+            samples_per_channel * num_channels_);
     }
     else
     {
-        memset(_payloadData,0,sizeof(WebRtc_Word16) *
-            payloadDataLengthInSamples * _audioChannel);
+        memset(data_,0,sizeof(int16_t) *
+            samples_per_channel * num_channels_);
     }
     return 0;
 }
 
 inline
 void
-AudioFrame::Mute() const
+AudioFrame::Mute()
 {
-  memset(_payloadData, 0, _payloadDataLengthInSamples * sizeof(WebRtc_Word16));
+  memset(data_, 0, samples_per_channel_ * num_channels_ * sizeof(int16_t));
 }
 
 inline
@@ -842,9 +844,9 @@
 AudioFrame::operator=(const AudioFrame& rhs)
 {
     // Sanity Check
-    if((rhs._payloadDataLengthInSamples > kMaxAudioFrameSizeSamples) ||
-        (rhs._audioChannel > 2) ||
-        (rhs._audioChannel < 1))
+    if((rhs.samples_per_channel_ > kMaxDataSizeSamples) ||
+        (rhs.num_channels_ > 2) ||
+        (rhs.num_channels_ < 1))
     {
         return *this;
     }
@@ -852,35 +854,34 @@
     {
         return *this;
     }
-    _id               = rhs._id;
-    _timeStamp        = rhs._timeStamp;
-    _frequencyInHz    = rhs._frequencyInHz;
-    _speechType       = rhs._speechType;
-    _vadActivity      = rhs._vadActivity;
-    _volume           = rhs._volume;
-    _audioChannel     = rhs._audioChannel;
-    _energy           = rhs._energy;
+    id_               = rhs.id_;
+    timestamp_        = rhs.timestamp_;
+    sample_rate_hz_    = rhs.sample_rate_hz_;
+    speech_type_       = rhs.speech_type_;
+    vad_activity_      = rhs.vad_activity_;
+    num_channels_     = rhs.num_channels_;
+    energy_           = rhs.energy_;
 
-    _payloadDataLengthInSamples = rhs._payloadDataLengthInSamples;
-    memcpy(_payloadData, rhs._payloadData,
-        sizeof(WebRtc_Word16) * rhs._payloadDataLengthInSamples * _audioChannel);
+    samples_per_channel_ = rhs.samples_per_channel_;
+    memcpy(data_, rhs.data_,
+        sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
 
     return *this;
 }
 
 inline
 AudioFrame&
-AudioFrame::operator>>=(const WebRtc_Word32 rhs)
+AudioFrame::operator>>=(const int rhs)
 {
-    assert((_audioChannel > 0) && (_audioChannel < 3));
-    if((_audioChannel > 2) ||
-        (_audioChannel < 1))
+    assert((num_channels_ > 0) && (num_channels_ < 3));
+    if((num_channels_ > 2) ||
+        (num_channels_ < 1))
     {
         return *this;
     }
-    for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+    for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
     {
-        _payloadData[i] = WebRtc_Word16(_payloadData[i] >> rhs);
+        data_[i] = static_cast<int16_t>(data_[i] >> rhs);
     }
     return *this;
 }
@@ -890,39 +891,39 @@
 AudioFrame::Append(const AudioFrame& rhs)
 {
     // Sanity check
-    assert((_audioChannel > 0) && (_audioChannel < 3));
-    if((_audioChannel > 2) ||
-        (_audioChannel < 1))
+    assert((num_channels_ > 0) && (num_channels_ < 3));
+    if((num_channels_ > 2) ||
+        (num_channels_ < 1))
     {
         return *this;
     }
-    if(_audioChannel != rhs._audioChannel)
+    if(num_channels_ != rhs.num_channels_)
     {
         return *this;
     }
-    if((_vadActivity == kVadActive) ||
-        rhs._vadActivity == kVadActive)
+    if((vad_activity_ == kVadActive) ||
+        rhs.vad_activity_ == kVadActive)
     {
-        _vadActivity = kVadActive;
+        vad_activity_ = kVadActive;
     }
-    else if((_vadActivity == kVadUnknown) ||
-        rhs._vadActivity == kVadUnknown)
+    else if((vad_activity_ == kVadUnknown) ||
+        rhs.vad_activity_ == kVadUnknown)
     {
-        _vadActivity = kVadUnknown;
+        vad_activity_ = kVadUnknown;
     }
-    if(_speechType != rhs._speechType)
+    if(speech_type_ != rhs.speech_type_)
     {
-        _speechType = kUndefined;
+        speech_type_ = kUndefined;
     }
 
-    WebRtc_UWord16 offset = _payloadDataLengthInSamples * _audioChannel;
-    for(WebRtc_UWord16 i = 0;
-        i < rhs._payloadDataLengthInSamples * rhs._audioChannel;
+    int offset = samples_per_channel_ * num_channels_;
+    for(int i = 0;
+        i < rhs.samples_per_channel_ * rhs.num_channels_;
         i++)
     {
-        _payloadData[offset+i] = rhs._payloadData[i];
+        data_[offset+i] = rhs.data_[i];
     }
-    _payloadDataLengthInSamples += rhs._payloadDataLengthInSamples;
+    samples_per_channel_ += rhs.samples_per_channel_;
     return *this;
 }
 
@@ -932,23 +933,23 @@
 AudioFrame::operator+=(const AudioFrame& rhs)
 {
     // Sanity check
-    assert((_audioChannel > 0) && (_audioChannel < 3));
-    if((_audioChannel > 2) ||
-        (_audioChannel < 1))
+    assert((num_channels_ > 0) && (num_channels_ < 3));
+    if((num_channels_ > 2) ||
+        (num_channels_ < 1))
     {
         return *this;
     }
-    if(_audioChannel != rhs._audioChannel)
+    if(num_channels_ != rhs.num_channels_)
     {
         return *this;
     }
     bool noPrevData = false;
-    if(_payloadDataLengthInSamples != rhs._payloadDataLengthInSamples)
+    if(samples_per_channel_ != rhs.samples_per_channel_)
     {
-        if(_payloadDataLengthInSamples == 0)
+        if(samples_per_channel_ == 0)
         {
             // special case we have no data to start with
-            _payloadDataLengthInSamples = rhs._payloadDataLengthInSamples;
+            samples_per_channel_ = rhs.samples_per_channel_;
             noPrevData = true;
         } else
         {
@@ -956,47 +957,46 @@
         }
     }
 
-    if((_vadActivity == kVadActive) ||
-        rhs._vadActivity == kVadActive)
+    if((vad_activity_ == kVadActive) ||
+        rhs.vad_activity_ == kVadActive)
     {
-        _vadActivity = kVadActive;
+        vad_activity_ = kVadActive;
     }
-    else if((_vadActivity == kVadUnknown) ||
-        rhs._vadActivity == kVadUnknown)
+    else if((vad_activity_ == kVadUnknown) ||
+        rhs.vad_activity_ == kVadUnknown)
     {
-        _vadActivity = kVadUnknown;
+        vad_activity_ = kVadUnknown;
     }
 
-    if(_speechType != rhs._speechType)
+    if(speech_type_ != rhs.speech_type_)
     {
-        _speechType = kUndefined;
+        speech_type_ = kUndefined;
     }
 
     if(noPrevData)
     {
-        memcpy(_payloadData, rhs._payloadData,
-          sizeof(WebRtc_Word16) * rhs._payloadDataLengthInSamples * _audioChannel);
+        memcpy(data_, rhs.data_,
+          sizeof(int16_t) * rhs.samples_per_channel_ * num_channels_);
     } else
     {
       // IMPROVEMENT this can be done very fast in assembly
-      for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+      for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
       {
-          WebRtc_Word32 wrapGuard = (WebRtc_Word32)_payloadData[i] +
-                  (WebRtc_Word32)rhs._payloadData[i];
+          int32_t wrapGuard = static_cast<int32_t>(data_[i]) +
+              static_cast<int32_t>(rhs.data_[i]);
           if(wrapGuard < -32768)
           {
-              _payloadData[i] = -32768;
+              data_[i] = -32768;
           }else if(wrapGuard > 32767)
           {
-              _payloadData[i] = 32767;
+              data_[i] = 32767;
           }else
           {
-              _payloadData[i] = (WebRtc_Word16)wrapGuard;
+              data_[i] = (int16_t)wrapGuard;
           }
       }
     }
-    _energy = 0xffffffff;
-    _volume = 0xffffffff;
+    energy_ = 0xffffffff;
     return *this;
 }
 
@@ -1005,43 +1005,42 @@
 AudioFrame::operator-=(const AudioFrame& rhs)
 {
     // Sanity check
-    assert((_audioChannel > 0) && (_audioChannel < 3));
-    if((_audioChannel > 2)||
-        (_audioChannel < 1))
+    assert((num_channels_ > 0) && (num_channels_ < 3));
+    if((num_channels_ > 2)||
+        (num_channels_ < 1))
     {
         return *this;
     }
-    if((_payloadDataLengthInSamples != rhs._payloadDataLengthInSamples) ||
-        (_audioChannel != rhs._audioChannel))
+    if((samples_per_channel_ != rhs.samples_per_channel_) ||
+        (num_channels_ != rhs.num_channels_))
     {
         return *this;
     }
-    if((_vadActivity != kVadPassive) ||
-        rhs._vadActivity != kVadPassive)
+    if((vad_activity_ != kVadPassive) ||
+        rhs.vad_activity_ != kVadPassive)
     {
-        _vadActivity = kVadUnknown;
+        vad_activity_ = kVadUnknown;
     }
-    _speechType = kUndefined;
+    speech_type_ = kUndefined;
 
-    for(WebRtc_UWord16 i = 0; i < _payloadDataLengthInSamples * _audioChannel; i++)
+    for(int i = 0; i < samples_per_channel_ * num_channels_; i++)
     {
-        WebRtc_Word32 wrapGuard = (WebRtc_Word32)_payloadData[i] -
-                (WebRtc_Word32)rhs._payloadData[i];
+        int32_t wrapGuard = static_cast<int32_t>(data_[i]) -
+            static_cast<int32_t>(rhs.data_[i]);
         if(wrapGuard < -32768)
         {
-            _payloadData[i] = -32768;
+            data_[i] = -32768;
         }
         else if(wrapGuard > 32767)
         {
-            _payloadData[i] = 32767;
+            data_[i] = 32767;
         }
         else
         {
-            _payloadData[i] = (WebRtc_Word16)wrapGuard;
+            data_[i] = (int16_t)wrapGuard;
         }
     }
-    _energy = 0xffffffff;
-    _volume = 0xffffffff;
+    energy_ = 0xffffffff;
     return *this;
 }
 
diff --git a/src/modules/media_file/OWNERS b/src/modules/media_file/OWNERS
new file mode 100644
index 0000000..2cc47e4
--- /dev/null
+++ b/src/modules/media_file/OWNERS
@@ -0,0 +1,4 @@
+pwestin@webrtc.org

+mflodman@webrtc.org

+perkj@webrtc.org

+niklas.enbom@webrtc.org
\ No newline at end of file
diff --git a/src/modules/media_file/interface/media_file.h b/src/modules/media_file/interface/media_file.h
new file mode 100644
index 0000000..ea10d82
--- /dev/null
+++ b/src/modules/media_file/interface/media_file.h
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
+#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+#include "module.h"
+#include "module_common_types.h"
+#include "media_file_defines.h"
+
+namespace webrtc {
+class MediaFile : public Module
+{
+public:
+    // Factory method. Constructor disabled. id is the identifier for the
+    // MediaFile instance.
+    static MediaFile* CreateMediaFile(const WebRtc_Word32 id);
+    static void DestroyMediaFile(MediaFile* module);
+
+    // Set the MediaFile instance identifier.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Put 10-60ms of audio data from file into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes is both an input and output
+    // parameter. As input parameter it indicates the size of audioBuffer.
+    // As output parameter it indicates the number of bytes written to
+    // audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be converted to
+    // mono).
+    virtual WebRtc_Word32 PlayoutAudioData(
+        WebRtc_Word8* audioBuffer,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Put one video frame into videoBuffer. dataLengthInBytes is both an input
+    // and output parameter. As input parameter it indicates the size of
+    // videoBuffer. As output parameter it indicates the number of bytes written
+    // to videoBuffer.
+    virtual WebRtc_Word32 PlayoutAVIVideoData(
+        WebRtc_Word8* videoBuffer,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Put 10-60ms, depending on codec frame size, of audio data from file into
+    // audioBufferLeft and audioBufferRight. The buffers contain the left and
+    // right channel of played out stereo audio.
+    // dataLengthInBytes is both an input and output parameter. As input
+    // parameter it indicates the size of both audioBufferLeft and
+    // audioBufferRight. As output parameter it indicates the number of bytes
+    // written to both audio buffers.
+    // Note: This API can only be successfully called for WAV files with stereo
+    // audio.
+    virtual WebRtc_Word32 PlayoutStereoData(
+        WebRtc_Word8* audioBufferLeft,
+        WebRtc_Word8* audioBufferRight,
+        WebRtc_UWord32& dataLengthInBytes) = 0;
+
+    // Open the file specified by fileName (relative path is allowed) for
+    // reading. FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo audio is only supported for WAV files.
+    virtual WebRtc_Word32 StartPlayingAudioFile(
+        const char* fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool loop                         = false,
+        const FileFormats format                = kFileFormatPcm16kHzFile,
+        const CodecInst* codecInst              = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0) = 0;
+
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). If loop is true the file will be played until StopPlaying() is
+    // called. When end of file is reached the file is read from the start.
+    // format specifies the type of file fileName refers to. Only video will be
+    // read if videoOnly is true.
+    virtual WebRtc_Word32 StartPlayingVideoFile(const char* fileName,
+                                                const bool loop,
+                                                bool videoOnly,
+                                                const FileFormats format) = 0;
+
+    // Prepare for playing audio from stream.
+    // FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo audio is only supported for WAV files.
+    virtual WebRtc_Word32 StartPlayingAudioStream(
+        InStream& stream,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0) = 0;
+
+    // Stop playing from file or stream.
+    virtual WebRtc_Word32 StopPlaying() = 0;
+
+    // Return true if playing.
+    virtual bool IsPlaying() = 0;
+
+
+    // Set durationMs to the number of ms that has been played from file.
+    virtual WebRtc_Word32 PlayoutPositionMs(
+        WebRtc_UWord32& durationMs) const = 0;
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+    // Note: bufferLength must be exactly one frame.
+    virtual WebRtc_Word32 IncomingAudioData(
+        const WebRtc_Word8*  audioBuffer,
+        const WebRtc_UWord32 bufferLength) = 0;
+
+    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
+    // to file.
+    // Note: videoBuffer can contain encoded data. The codec used must be the
+    // same as what was specified by videoCodecInst for the last successfull
+    // StartRecordingVideoFile(..) call. The videoBuffer must contain exactly
+    // one video frame.
+    virtual WebRtc_Word32 IncomingAVIVideoData(
+        const WebRtc_Word8*  videoBuffer,
+        const WebRtc_UWord32 bufferLength) = 0;
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed). FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. maxSizeBytes
+    // specifies the number of bytes allowed to be written to file if it is
+    // greater than zero.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const char*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes       = 0) = 0;
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). format specifies the type of file fileName
+    // should be. codecInst specifies the encoding of the audio data.
+    // videoCodecInst specifies the encoding of the video data. Only video data
+    // will be recorded if videoOnly is true.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const FileFormats   format,
+        const CodecInst&    codecInst,
+        const VideoCodec&   videoCodecInst,
+        bool videoOnly = false) = 0;
+
+    // Prepare for recording audio to stream.
+    // FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that stream should correspond to.
+    // codecInst specifies the encoding of the audio data.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    virtual WebRtc_Word32 StartRecordingAudioStream(
+        OutStream&           stream,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0) = 0;
+
+    // Stop recording to file or stream.
+    virtual WebRtc_Word32 StopRecording() = 0;
+
+    // Return true if recording.
+    virtual bool IsRecording() = 0;
+
+    // Set durationMs to the number of ms that has been recorded to file.
+    virtual WebRtc_Word32 RecordDurationMs(WebRtc_UWord32& durationMs) = 0;
+
+    // Return true if recording or playing is stereo.
+    virtual bool IsStereo() = 0;
+
+    // Register callback to receive media file related notifications. Disables
+    // callbacks if callback is NULL.
+    virtual WebRtc_Word32 SetModuleFileCallback(FileCallback* callback) = 0;
+
+    // Set durationMs to the size of the file (in ms) specified by fileName.
+    // format specifies the type of file fileName refers to. freqInHz specifies
+    // the sampling frequency of the file.
+    virtual WebRtc_Word32 FileDurationMs(
+        const char*  fileName,
+        WebRtc_UWord32&      durationMs,
+        const FileFormats    format,
+        const WebRtc_UWord32 freqInHz = 16000) = 0;
+
+    // Update codecInst according to the current audio codec being used for
+    // reading or writing.
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const = 0;
+
+    // Update videoCodecInst according to the current video codec being used for
+    // reading or writing.
+    virtual WebRtc_Word32 VideoCodecInst(VideoCodec& videoCodecInst) const = 0;
+
+protected:
+    MediaFile() {}
+    virtual ~MediaFile() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_H_
diff --git a/src/modules/media_file/interface/media_file_defines.h b/src/modules/media_file/interface/media_file_defines.h
new file mode 100644
index 0000000..38af562
--- /dev/null
+++ b/src/modules/media_file/interface/media_file_defines.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
+#define WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
+
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+// Callback class for the MediaFile class.
+class FileCallback
+{
+public:
+    virtual ~FileCallback(){}
+
+    // This function is called by MediaFile when a file has been playing for
+    // durationMs ms. id is the identifier for the MediaFile instance calling
+    // the callback.
+    virtual void PlayNotification(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 durationMs) = 0;
+
+    // This function is called by MediaFile when a file has been recording for
+    // durationMs ms. id is the identifier for the MediaFile instance calling
+    // the callback.
+    virtual void RecordNotification(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 durationMs) = 0;
+
+    // This function is called by MediaFile when a file has been stopped
+    // playing. id is the identifier for the MediaFile instance calling the
+    // callback.
+    virtual void PlayFileEnded(const WebRtc_Word32 id) = 0;
+
+    // This function is called by MediaFile when a file has been stopped
+    // recording. id is the identifier for the MediaFile instance calling the
+    // callback.
+    virtual void RecordFileEnded(const WebRtc_Word32 id) = 0;
+
+protected:
+    FileCallback() {}
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_INTERFACE_MEDIA_FILE_DEFINES_H_
diff --git a/src/modules/media_file/source/avi_file.cc b/src/modules/media_file/source/avi_file.cc
new file mode 100644
index 0000000..3b6d778
--- /dev/null
+++ b/src/modules/media_file/source/avi_file.cc
@@ -0,0 +1,1774 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(henrike): reassess the error handling in this class. Currently failure
+// is detected by asserts in many places. Also a refactoring of this class would
+// be beneficial.
+
+#include "avi_file.h"
+
+#include <assert.h>
+#include <string.h>
+
+#ifdef _WIN32
+#include <windows.h>
+#endif
+
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "list_wrapper.h"
+#include "trace.h"
+
+// http://msdn2.microsoft.com/en-us/library/ms779636.aspx
+// A chunk has the following form:
+// ckID ckSize ckData
+// where ckID is a FOURCC that identifies the data contained in the
+// chunk, ckData is a 4-byte value giving the size of the data in
+// ckData, and ckData is zero or more bytes of data. The data is always
+// padded to nearest WORD boundary. ckSize gives the size of the valid
+// data in the chunk; it does not include the padding, the size of
+// ckID, or the size of ckSize.
+//http://msdn2.microsoft.com/en-us/library/ms779632.aspx
+//NOTE: Workaround to make MPEG4 files play on WMP. MPEG files
+//      place the config parameters efter the BITMAPINFOHEADER and
+//      *NOT* in the 'strd'!
+// http://msdn.microsoft.com/en-us/library/dd183375.aspx
+// http://msdn.microsoft.com/en-us/library/dd183376.aspx
+
+namespace webrtc {
+namespace {
+static const WebRtc_UWord32 kAvifHasindex       = 0x00000010;
+static const WebRtc_UWord32 kAvifMustuseindex   = 0x00000020;
+static const WebRtc_UWord32 kAvifIsinterleaved  = 0x00000100;
+static const WebRtc_UWord32 kAvifTrustcktype    = 0x00000800;
+static const WebRtc_UWord32 kAvifWascapturefile = 0x00010000;
+
+template <class T>
+T MinValue(T a, T b)
+{
+    return a < b ? a : b;
+}
+}  // namespace
+
+AviFile::AVIMAINHEADER::AVIMAINHEADER()
+    : fcc(                  0),
+      cb(                   0),
+      dwMicroSecPerFrame(   0),
+      dwMaxBytesPerSec(     0),
+      dwPaddingGranularity( 0),
+      dwFlags(              0),
+      dwTotalFrames(        0),
+      dwInitialFrames(      0),
+      dwStreams(            0),
+      dwSuggestedBufferSize(0),
+      dwWidth(              0),
+      dwHeight(             0)
+{
+    dwReserved[0] = 0;
+    dwReserved[1] = 0;
+    dwReserved[2] = 0;
+    dwReserved[3] = 0;
+}
+
+AVISTREAMHEADER::AVISTREAMHEADER()
+    : fcc(                  0),
+      cb(                   0),
+      fccType(              0),
+      fccHandler(           0),
+      dwFlags(              0),
+      wPriority(            0),
+      wLanguage(            0),
+      dwInitialFrames(      0),
+      dwScale(              0),
+      dwRate(               0),
+      dwStart(              0),
+      dwLength(             0),
+      dwSuggestedBufferSize(0),
+      dwQuality(            0),
+      dwSampleSize(         0)
+{
+    rcFrame.left   = 0;
+    rcFrame.top    = 0;
+    rcFrame.right  = 0;
+    rcFrame.bottom = 0;
+}
+
+BITMAPINFOHEADER::BITMAPINFOHEADER()
+    : biSize(         0),
+      biWidth(        0),
+      biHeight(       0),
+      biPlanes(       0),
+      biBitCount(     0),
+      biCompression(  0),
+      biSizeImage(    0),
+      biXPelsPerMeter(0),
+      biYPelsPerMeter(0),
+      biClrUsed(      0),
+      biClrImportant( 0)
+{
+}
+
+WAVEFORMATEX::WAVEFORMATEX()
+    : wFormatTag(     0),
+      nChannels(      0),
+      nSamplesPerSec( 0),
+      nAvgBytesPerSec(0),
+      nBlockAlign(    0),
+      wBitsPerSample( 0),
+      cbSize(         0)
+{
+}
+
+AviFile::AVIINDEXENTRY::AVIINDEXENTRY(WebRtc_UWord32 inckid,
+                                      WebRtc_UWord32 indwFlags,
+                                      WebRtc_UWord32 indwChunkOffset,
+                                      WebRtc_UWord32 indwChunkLength)
+    : ckid(inckid),
+      dwFlags(indwFlags),
+      dwChunkOffset(indwChunkOffset),
+      dwChunkLength(indwChunkLength)
+{
+}
+
+AviFile::AviFile()
+    : _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _aviFile(NULL),
+      _aviHeader(),
+      _videoStreamHeader(),
+      _audioStreamHeader(),
+      _videoFormatHeader(),
+      _audioFormatHeader(),
+      _videoConfigParameters(),
+      _videoConfigLength(0),
+      _videoStreamName(),
+      _audioConfigParameters(),
+      _audioStreamName(),
+      _videoStream(),
+      _audioStream(),
+      _nrStreams(0),
+      _aviLength(0),
+      _dataLength(0),
+      _bytesRead(0),
+      _dataStartByte(0),
+      _framesRead(0),
+      _videoFrames(0),
+      _audioFrames(0),
+      _reading(false),
+      _openedAs(AVI_AUDIO),
+      _loop(false),
+      _writing(false),
+      _bytesWritten(0),
+      _riffSizeMark(0),
+      _moviSizeMark(0),
+      _totNumFramesMark(0),
+      _videoStreamLengthMark(0),
+      _audioStreamLengthMark(0),
+      _moviListOffset(0),
+      _writeAudioStream(false),
+      _writeVideoStream(false),
+      _aviMode(NotSet),
+      _videoCodecConfigParams(NULL),
+      _videoCodecConfigParamsLength(0),
+      _videoStreamDataChunkPrefix(0),
+      _audioStreamDataChunkPrefix(0),
+      _created(false),
+      _indexList(new ListWrapper())
+{
+  ResetComplexMembers();
+}
+
+AviFile::~AviFile()
+{
+    Close();
+
+    delete _indexList;
+    delete[] _videoCodecConfigParams;
+    delete _crit;
+}
+
+WebRtc_Word32 AviFile::Open(AVIStreamType streamType, const char* fileName,
+                            bool loop)
+{
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,  "OpenAVIFile(%s)",
+                 fileName);
+    _crit->Enter();
+
+    if (_aviMode != NotSet)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Read;
+
+    if (!fileName)
+    {
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "\tfileName not valid!");
+        return -1;
+    }
+
+#ifdef _WIN32
+    // fopen does not support wide characters on Windows, ergo _wfopen.
+    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
+    wideFileName[0] = 0;
+    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
+                        wideFileName, FileWrapper::kMaxFileNameSize);
+
+    _aviFile = _wfopen(wideFileName, L"rb");
+#else
+    _aviFile = fopen(fileName, "rb");
+#endif
+
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Could not open file!");
+        return -1;
+    }
+
+    // ReadRIFF verifies that the file is AVI and figures out the file length.
+    WebRtc_Word32 err = ReadRIFF();
+    if (err)
+    {
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+        _crit->Leave();
+        return -1;
+    }
+
+   err = ReadHeaders();
+    if (err)
+    {
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Unsupported or corrupt AVI format");
+        return -1;
+    }
+
+    _dataStartByte = _bytesRead;
+    _reading = true;
+    _openedAs = streamType;
+    _loop = loop;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::Close()
+{
+    _crit->Enter();
+    switch (_aviMode)
+    {
+    case Read:
+        CloseRead();
+        break;
+    case Write:
+        CloseWrite();
+        break;
+    default:
+        break;
+    }
+
+    if (_videoCodecConfigParams)
+    {
+        delete [] _videoCodecConfigParams;
+        _videoCodecConfigParams = 0;
+    }
+    ResetMembers();
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_UWord32 AviFile::MakeFourCc(WebRtc_UWord8 ch0, WebRtc_UWord8 ch1,
+                                   WebRtc_UWord8 ch2, WebRtc_UWord8 ch3)
+{
+    return ((WebRtc_UWord32)(WebRtc_UWord8)(ch0)         |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch1) << 8)  |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch2) << 16) |
+            ((WebRtc_UWord32)(WebRtc_UWord8)(ch3) << 24 ));
+}
+
+WebRtc_Word32 AviFile::GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
+                                          BITMAPINFOHEADER& bitmapInfo,
+                                          char* codecConfigParameters,
+                                          WebRtc_Word32& configLength)
+{
+    _crit->Enter();
+    if (!_reading && !_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    memcpy(&videoStreamHeader, &_videoStreamHeader, sizeof(_videoStreamHeader));
+    memcpy(&bitmapInfo, &_videoFormatHeader, sizeof(_videoFormatHeader));
+
+    if (configLength <= _videoConfigLength)
+    {
+        memcpy(codecConfigParameters, _videoConfigParameters,
+               _videoConfigLength);
+        configLength = _videoConfigLength;
+    }
+    else
+    {
+        configLength = 0;
+    }
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::GetDuration(WebRtc_Word32& durationMs)
+{
+    _crit->Enter();
+    if (_videoStreamHeader.dwRate==0 || _videoStreamHeader.dwScale==0)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    durationMs = _videoStreamHeader.dwLength * 1000 /
+        (_videoStreamHeader.dwRate/_videoStreamHeader.dwScale);
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::GetAudioStreamInfo(WAVEFORMATEX& waveHeader)
+{
+    _crit->Enter();
+    if (_aviMode != Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_reading && !_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    memcpy(&waveHeader, &_audioFormatHeader, sizeof(_audioFormatHeader));
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAudio(const WebRtc_UWord8* data,
+                                  WebRtc_Word32 length)
+{
+    _crit->Enter();
+    size_t newBytesWritten = _bytesWritten;
+
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_writeAudioStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    // Start of chunk.
+    const WebRtc_UWord32 chunkOffset = ftell(_aviFile) - _moviListOffset;
+    _bytesWritten += PutLE32(_audioStreamDataChunkPrefix);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t chunkSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBuffer(data, length);
+
+    const long chunkSize = PutLE32LengthFromCurrent(
+        static_cast<long>(chunkSizeMark));
+
+    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
+    if (chunkSize % 2)
+    {
+        _bytesWritten += PutByte(0);
+    }
+    // End of chunk
+
+    // Save chunk information for use when closing file.
+    AddChunkToIndexList(_audioStreamDataChunkPrefix, 0, // No flags.
+                        chunkOffset, chunkSize);
+
+    ++_audioFrames;
+    newBytesWritten = _bytesWritten - newBytesWritten;
+    _crit->Leave();
+    return static_cast<WebRtc_Word32>(newBytesWritten);
+}
+
+WebRtc_Word32 AviFile::WriteVideo(const WebRtc_UWord8* data,
+                                  WebRtc_Word32 length)
+{
+    _crit->Enter();
+    size_t newBytesWritten = _bytesWritten;
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (!_writeVideoStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    // Start of chunk.
+    const WebRtc_UWord32 chunkOffset = ftell(_aviFile) - _moviListOffset;
+    _bytesWritten += PutLE32(_videoStreamDataChunkPrefix);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t chunkSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBuffer(data, length);
+
+    const long chunkSize = PutLE32LengthFromCurrent(
+        static_cast<long>(chunkSizeMark));
+
+    // Make sure that the chunk is aligned on 2 bytes (= 1 sample).
+    if (chunkSize % 2)
+    {
+        //Pad one byte, to WORD align.
+        _bytesWritten += PutByte(0);
+    }
+     //End chunk!
+    AddChunkToIndexList(_videoStreamDataChunkPrefix, 0, // No flags.
+                        chunkOffset, static_cast<WebRtc_UWord32>(chunkSize));
+
+    ++_videoFrames;
+    newBytesWritten = _bytesWritten - newBytesWritten;
+    _crit->Leave();
+    return static_cast<WebRtc_Word32>(newBytesWritten);
+}
+
+WebRtc_Word32 AviFile::PrepareDataChunkHeaders()
+{
+    // 00 video stream, 01 audio stream.
+    // db uncompresses video,  dc compressed video, wb WAV audio
+    if (_writeVideoStream)
+    {
+        if (strncmp((const char*) &_videoStreamHeader.fccHandler, "I420", 4) ==
+            0)
+        {
+            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'b');
+        }
+        else
+        {
+            _videoStreamDataChunkPrefix = MakeFourCc('0', '0', 'd', 'c');
+        }
+        _audioStreamDataChunkPrefix = MakeFourCc('0', '1', 'w', 'b');
+    }
+    else
+    {
+        _audioStreamDataChunkPrefix = MakeFourCc('0', '0', 'w', 'b');
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadMoviSubChunk(WebRtc_UWord8* data,
+                                        WebRtc_Word32& length,
+                                        WebRtc_UWord32 tag1,
+                                        WebRtc_UWord32 tag2)
+{
+    if (!_reading)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                     "AviFile::ReadMoviSubChunk(): File not open!");
+        length = 0;
+        return -1;
+    }
+
+    WebRtc_UWord32 size;
+    bool isEOFReached = false;
+    // Try to read one data chunk header
+    while (true)
+    {
+        // TODO (hellner): what happens if an empty AVI file is opened with
+        // _loop set to true? Seems like this while-loop would never exit!
+
+        // tag = db uncompresses video,  dc compressed video or wb WAV audio.
+        WebRtc_UWord32 tag;
+        _bytesRead += GetLE32(tag);
+        _bytesRead += GetLE32(size);
+
+        const WebRtc_Word32 eof = feof(_aviFile);
+        if (!eof)
+        {
+            if (tag == tag1)
+            {
+                // Supported tag found.
+                break;
+            }
+            else if ((tag == tag2) && (tag2 != 0))
+            {
+                // Supported tag found.
+                break;
+            }
+
+            // Jump to next chunk. The size is in bytes but chunks are aligned
+            // on 2 byte boundaries.
+            const WebRtc_UWord32 seekSize = (size % 2) ? size + 1 : size;
+            const WebRtc_Word32 err = fseek(_aviFile, seekSize, SEEK_CUR);
+
+            if (err)
+            {
+                isEOFReached = true;
+            }
+        }
+        else
+        {
+            isEOFReached = true;
+        }
+
+        if (isEOFReached)
+        {
+            clearerr(_aviFile);
+
+            if (_loop)
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
+                              data file, starting from the beginning.");
+
+                fseek(_aviFile, static_cast<long>(_dataStartByte), SEEK_SET);
+
+                _bytesRead = _dataStartByte;
+                _framesRead = 0;
+                isEOFReached = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                             "AviFile::ReadMoviSubChunk(): Reached end of AVI\
+                             file!");
+                length = 0;
+                return -1;
+            }
+        }
+        _bytesRead += size;
+    }
+
+    if (static_cast<WebRtc_Word32>(size) > length)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,
+                     "AviFile::ReadMoviSubChunk(): AVI read buffer too small!");
+
+        // Jump to next chunk. The size is in bytes but chunks are aligned
+        // on 2 byte boundaries.
+        const WebRtc_UWord32 seekSize = (size % 2) ? size + 1 : size;
+        fseek(_aviFile, seekSize, SEEK_CUR);
+        _bytesRead += seekSize;
+        length = 0;
+        return -1;
+    }
+    _bytesRead += GetBuffer(data, size);
+
+    // The size is in bytes but chunks are aligned on 2 byte boundaries.
+    if (size % 2)
+    {
+        WebRtc_UWord8 dummy_byte;
+        _bytesRead += GetByte(dummy_byte);
+    }
+    length = size;
+    ++_framesRead;
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAudio(WebRtc_UWord8* data, WebRtc_Word32& length)
+{
+    _crit->Enter();
+    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "AviFile::ReadAudio()");
+
+    if (_aviMode != Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (_openedAs != AVI_AUDIO)
+    {
+        length = 0;
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1,  "File not open as audio!");
+        return -1;
+    }
+
+    const WebRtc_Word32 ret = ReadMoviSubChunk(
+        data,
+        length,
+        StreamAndTwoCharCodeToTag(_audioStream.streamNumber, "wb"));
+
+    _crit->Leave();
+    return ret;
+}
+
+WebRtc_Word32 AviFile::ReadVideo(WebRtc_UWord8* data, WebRtc_Word32& length)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "AviFile::ReadVideo()");
+
+    _crit->Enter();
+    if (_aviMode != Read)
+    {
+        //Has to be Read!
+        _crit->Leave();
+        return -1;
+    }
+    if (_openedAs != AVI_VIDEO)
+    {
+        length = 0;
+        _crit->Leave();
+        WEBRTC_TRACE(kTraceDebug, kTraceVideo, -1, "File not open as video!");
+        return -1;
+    }
+
+    const WebRtc_Word32 ret = ReadMoviSubChunk(
+        data,
+        length,
+        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "dc"),
+        StreamAndTwoCharCodeToTag(_videoStream.streamNumber, "db"));
+    _crit->Leave();
+    return ret;
+}
+
+WebRtc_Word32 AviFile::Create(const char* fileName)
+{
+    _crit->Enter();
+    if (_aviMode != Write)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (!_writeVideoStream && !_writeAudioStream)
+    {
+        _crit->Leave();
+        return -1;
+    }
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+#ifdef _WIN32
+    // fopen does not support wide characters on Windows, ergo _wfopen.
+    wchar_t wideFileName[FileWrapper::kMaxFileNameSize];
+    wideFileName[0] = 0;
+
+    MultiByteToWideChar(CP_UTF8,0,fileName, -1, // convert the whole string
+                        wideFileName, FileWrapper::kMaxFileNameSize);
+
+    _aviFile = _wfopen(wideFileName, L"w+b");
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        return -1;
+    }
+#else
+    _aviFile = fopen(fileName, "w+b");
+    if (!_aviFile)
+    {
+        _crit->Leave();
+        return -1;
+    }
+#endif
+
+    WriteRIFF();
+    WriteHeaders();
+
+    _created = true;
+
+    PrepareDataChunkHeaders();
+    ClearIndexList();
+    WriteMoviStart();
+    _aviMode = Write;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::CreateVideoStream(
+    const AVISTREAMHEADER& videoStreamHeader,
+    const BITMAPINFOHEADER& bitMapInfoHeader,
+    const WebRtc_UWord8* codecConfigParams,
+    WebRtc_Word32 codecConfigParamsLength)
+{
+    _crit->Enter();
+    if (_aviMode == Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Write;
+    _writeVideoStream = true;
+
+    _videoStreamHeader = videoStreamHeader;
+    _videoFormatHeader = bitMapInfoHeader;
+
+    if (codecConfigParams && codecConfigParamsLength > 0)
+    {
+        if (_videoCodecConfigParams)
+        {
+            delete [] _videoCodecConfigParams;
+            _videoCodecConfigParams = 0;
+        }
+
+        _videoCodecConfigParams = new WebRtc_UWord8[codecConfigParamsLength];
+        _videoCodecConfigParamsLength = codecConfigParamsLength;
+
+        memcpy(_videoCodecConfigParams, codecConfigParams,
+               _videoCodecConfigParamsLength);
+    }
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::CreateAudioStream(
+    const AVISTREAMHEADER& audioStreamHeader,
+    const WAVEFORMATEX& waveFormatHeader)
+{
+    _crit->Enter();
+
+    if (_aviMode == Read)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    if (_created)
+    {
+        _crit->Leave();
+        return -1;
+    }
+
+    _aviMode = Write;
+    _writeAudioStream = true;
+    _audioStreamHeader = audioStreamHeader;
+    _audioFormatHeader = waveFormatHeader;
+    _crit->Leave();
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteRIFF()
+{
+    const WebRtc_UWord32 riffTag = MakeFourCc('R', 'I', 'F', 'F');
+    _bytesWritten += PutLE32(riffTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    _riffSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 aviTag = MakeFourCc('A', 'V', 'I', ' ');
+    _bytesWritten += PutLE32(aviTag);
+
+    return 0;
+}
+
+
+WebRtc_Word32 AviFile::WriteHeaders()
+{
+    // Main AVI header list.
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t listhdrlSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 hdrlTag = MakeFourCc('h', 'd', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIMainHeader();
+    WriteAVIStreamHeaders();
+
+    const long hdrlLen = PutLE32LengthFromCurrent(
+        static_cast<long>(listhdrlSizeMark));
+
+    // Junk chunk to align on 2048 boundry (CD-ROM sector boundary).
+    const WebRtc_UWord32 junkTag = MakeFourCc('J', 'U', 'N', 'K');
+    _bytesWritten += PutLE32(junkTag);
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t junkSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 junkBufferSize =
+        0x800     // 2048 byte alignment
+        - 12      // RIFF SIZE 'AVI '
+        - 8       // LIST SIZE
+        - hdrlLen //
+        - 8       // JUNK SIZE
+        - 12;     // LIST SIZE 'MOVI'
+
+    // TODO (hellner): why not just fseek here?
+    WebRtc_UWord8* junkBuffer = new WebRtc_UWord8[junkBufferSize];
+    memset(junkBuffer, 0, junkBufferSize);
+    _bytesWritten += PutBuffer(junkBuffer, junkBufferSize);
+    delete [] junkBuffer;
+
+    PutLE32LengthFromCurrent(static_cast<long>(junkSizeMark));
+    // End of JUNK chunk.
+    // End of main AVI header list.
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIMainHeader()
+{
+    const WebRtc_UWord32 avihTag = MakeFourCc('a', 'v', 'i', 'h');
+    _bytesWritten += PutLE32(avihTag);
+    _bytesWritten += PutLE32(14 * sizeof(WebRtc_UWord32));
+
+    const WebRtc_UWord32 scale = _videoStreamHeader.dwScale ?
+        _videoStreamHeader.dwScale : 1;
+    const WebRtc_UWord32 microSecPerFrame = 1000000 /
+        (_videoStreamHeader.dwRate / scale);
+    _bytesWritten += PutLE32(microSecPerFrame);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+
+    WebRtc_UWord32 numStreams = 0;
+    if (_writeVideoStream)
+    {
+        ++numStreams;
+    }
+    if (_writeAudioStream)
+    {
+        ++numStreams;
+    }
+
+    if (numStreams == 1)
+    {
+        _bytesWritten += PutLE32(
+            kAvifTrustcktype
+            | kAvifHasindex
+            | kAvifWascapturefile);
+    }
+    else
+    {
+        _bytesWritten += PutLE32(
+            kAvifTrustcktype
+            | kAvifHasindex
+            | kAvifWascapturefile
+            | kAvifIsinterleaved);
+    }
+
+    _totNumFramesMark = _bytesWritten;
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(numStreams);
+
+    if (_writeVideoStream)
+    {
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.dwSuggestedBufferSize);
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.rcFrame.right-_videoStreamHeader.rcFrame.left);
+        _bytesWritten += PutLE32(
+            _videoStreamHeader.rcFrame.bottom-_videoStreamHeader.rcFrame.top);
+    } else {
+        _bytesWritten += PutLE32(0);
+        _bytesWritten += PutLE32(0);
+        _bytesWritten += PutLE32(0);
+    }
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    _bytesWritten += PutLE32(0);
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIStreamHeaders()
+{
+    if (_writeVideoStream)
+    {
+        WriteAVIVideoStreamHeaders();
+    }
+    if (_writeAudioStream)
+    {
+        WriteAVIAudioStreamHeaders();
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIVideoStreamHeaders()
+{
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t liststrlSizeMark = _bytesWritten;
+
+    const WebRtc_UWord32 hdrlTag = MakeFourCc('s', 't', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIVideoStreamHeaderChunks();
+
+    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIVideoStreamHeaderChunks()
+{
+    // Start of strh
+    const WebRtc_UWord32 strhTag = MakeFourCc('s', 't', 'r', 'h');
+    _bytesWritten += PutLE32(strhTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strhSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_videoStreamHeader.fccType);
+    _bytesWritten += PutLE32(_videoStreamHeader.fccHandler);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwFlags);
+    _bytesWritten += PutLE16(_videoStreamHeader.wPriority);
+    _bytesWritten += PutLE16(_videoStreamHeader.wLanguage);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwInitialFrames);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwScale);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwRate);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwStart);
+
+    _videoStreamLengthMark = _bytesWritten;
+    _bytesWritten += PutLE32(_videoStreamHeader.dwLength);
+
+    _bytesWritten += PutLE32(_videoStreamHeader.dwSuggestedBufferSize);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwQuality);
+    _bytesWritten += PutLE32(_videoStreamHeader.dwSampleSize);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.left);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.top);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.right);
+    _bytesWritten += PutLE16(_videoStreamHeader.rcFrame.bottom);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
+    // End of strh
+
+    // Start of strf
+    const WebRtc_UWord32 strfTag = MakeFourCc('s', 't', 'r', 'f');
+    _bytesWritten += PutLE32(strfTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strfSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_videoFormatHeader.biSize);
+    _bytesWritten += PutLE32(_videoFormatHeader.biWidth);
+    _bytesWritten += PutLE32(_videoFormatHeader.biHeight);
+    _bytesWritten += PutLE16(_videoFormatHeader.biPlanes);
+    _bytesWritten += PutLE16(_videoFormatHeader.biBitCount);
+    _bytesWritten += PutLE32(_videoFormatHeader.biCompression);
+    _bytesWritten += PutLE32(_videoFormatHeader.biSizeImage);
+    _bytesWritten += PutLE32(_videoFormatHeader.biXPelsPerMeter);
+    _bytesWritten += PutLE32(_videoFormatHeader.biYPelsPerMeter);
+    _bytesWritten += PutLE32(_videoFormatHeader.biClrUsed);
+    _bytesWritten += PutLE32(_videoFormatHeader.biClrImportant);
+
+    const bool isMpegFile = _videoStreamHeader.fccHandler ==
+        AviFile::MakeFourCc('M','4','S','2');
+    if (isMpegFile)
+    {
+        if (_videoCodecConfigParams && _videoCodecConfigParamsLength > 0)
+        {
+            _bytesWritten += PutBuffer(_videoCodecConfigParams,
+                                       _videoCodecConfigParamsLength);
+        }
+    }
+
+    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
+    // End of strf
+
+    if ( _videoCodecConfigParams
+         && (_videoCodecConfigParamsLength > 0)
+         && !isMpegFile)
+    {
+        // Write strd, unless it's an MPEG file
+        const WebRtc_UWord32 strdTag = MakeFourCc('s', 't', 'r', 'd');
+        _bytesWritten += PutLE32(strdTag);
+
+        // Size is unknown at this point. Update later.
+        _bytesWritten += PutLE32(0);
+        const size_t strdSizeMark = _bytesWritten;
+
+        _bytesWritten += PutBuffer(_videoCodecConfigParams,
+                                   _videoCodecConfigParamsLength);
+
+        PutLE32LengthFromCurrent(static_cast<long>(strdSizeMark));
+        // End of strd
+    }
+
+    // Start of strn
+    const WebRtc_UWord32 strnTag = MakeFourCc('s', 't', 'r', 'n');
+    _bytesWritten += PutLE32(strnTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strnSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBufferZ("WebRtc.avi ");
+
+    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
+    // End of strd
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIAudioStreamHeaders()
+{
+    // Start of LIST
+    WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t liststrlSizeMark = _bytesWritten;
+
+    WebRtc_UWord32 hdrlTag = MakeFourCc('s', 't', 'r', 'l');
+    _bytesWritten += PutLE32(hdrlTag);
+
+    WriteAVIAudioStreamHeaderChunks();
+
+    PutLE32LengthFromCurrent(static_cast<long>(liststrlSizeMark));
+    //End of LIST
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteAVIAudioStreamHeaderChunks()
+{
+    // Start of strh
+    const WebRtc_UWord32 strhTag = MakeFourCc('s', 't', 'r', 'h');
+    _bytesWritten += PutLE32(strhTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strhSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE32(_audioStreamHeader.fccType);
+    _bytesWritten += PutLE32(_audioStreamHeader.fccHandler);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwFlags);
+    _bytesWritten += PutLE16(_audioStreamHeader.wPriority);
+    _bytesWritten += PutLE16(_audioStreamHeader.wLanguage);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwInitialFrames);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwScale);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwRate);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwStart);
+
+    _audioStreamLengthMark = _bytesWritten;
+    _bytesWritten += PutLE32(_audioStreamHeader.dwLength);
+
+    _bytesWritten += PutLE32(_audioStreamHeader.dwSuggestedBufferSize);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwQuality);
+    _bytesWritten += PutLE32(_audioStreamHeader.dwSampleSize);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.left);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.top);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.right);
+    _bytesWritten += PutLE16(_audioStreamHeader.rcFrame.bottom);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strhSizeMark));
+    // End of strh
+
+    // Start of strf
+    const WebRtc_UWord32 strfTag = MakeFourCc('s', 't', 'r', 'f');
+    _bytesWritten += PutLE32(strfTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strfSizeMark = _bytesWritten;
+
+    _bytesWritten += PutLE16(_audioFormatHeader.wFormatTag);
+    _bytesWritten += PutLE16(_audioFormatHeader.nChannels);
+    _bytesWritten += PutLE32(_audioFormatHeader.nSamplesPerSec);
+    _bytesWritten += PutLE32(_audioFormatHeader.nAvgBytesPerSec);
+    _bytesWritten += PutLE16(_audioFormatHeader.nBlockAlign);
+    _bytesWritten += PutLE16(_audioFormatHeader.wBitsPerSample);
+    _bytesWritten += PutLE16(_audioFormatHeader.cbSize);
+
+    PutLE32LengthFromCurrent(static_cast<long>(strfSizeMark));
+    // End end of strf.
+
+    // Audio doesn't have strd.
+
+    // Start of strn
+    const WebRtc_UWord32 strnTag = MakeFourCc('s', 't', 'r', 'n');
+    _bytesWritten += PutLE32(strnTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t strnSizeMark = _bytesWritten;
+
+    _bytesWritten += PutBufferZ("WebRtc.avi ");
+
+    PutLE32LengthFromCurrent(static_cast<long>(strnSizeMark));
+    // End of strd.
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::WriteMoviStart()
+{
+    // Create template movi list. Fill out size when known (i.e. when closing
+    // file).
+    const WebRtc_UWord32 listTag = MakeFourCc('L', 'I', 'S', 'T');
+    _bytesWritten += PutLE32(listTag);
+
+    _bytesWritten += PutLE32(0); //Size! Change later!
+    _moviSizeMark = _bytesWritten;
+    _moviListOffset = ftell(_aviFile);
+
+    const WebRtc_UWord32 moviTag = MakeFourCc('m', 'o', 'v', 'i');
+    _bytesWritten += PutLE32(moviTag);
+
+    return 0;
+}
+
+size_t AviFile::PutByte(WebRtc_UWord8 byte)
+{
+    return fwrite(&byte, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord8),
+                  _aviFile);
+}
+
+size_t AviFile::PutLE16(WebRtc_UWord16 word)
+{
+    return fwrite(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord16),
+                  _aviFile);
+}
+
+size_t AviFile::PutLE32(WebRtc_UWord32 word)
+{
+    return fwrite(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord32),
+                  _aviFile);
+}
+
+size_t AviFile::PutBuffer(const WebRtc_UWord8* str, size_t size)
+{
+    return fwrite(str, sizeof(WebRtc_UWord8), size,
+                  _aviFile);
+}
+
+size_t AviFile::PutBufferZ(const char* str)
+{
+    // Include NULL charachter, hence the + 1
+    return PutBuffer(reinterpret_cast<const WebRtc_UWord8*>(str),
+                     strlen(str) + 1);
+}
+
+long AviFile::PutLE32LengthFromCurrent(long startPos)
+{
+    const long endPos = ftell(_aviFile);
+    if (endPos < 0) {
+        return 0;
+    }
+    bool success = (0 == fseek(_aviFile, startPos - 4, SEEK_SET));
+    if (!success) {
+        assert(false);
+        return 0;
+    }
+    const long len = endPos - startPos;
+    if (endPos > startPos) {
+        PutLE32(len);
+    }
+    else {
+        assert(false);
+    }
+    success = (0 == fseek(_aviFile, endPos, SEEK_SET));
+    assert(success);
+    return len;
+}
+
+void AviFile::PutLE32AtPos(long pos, WebRtc_UWord32 word)
+{
+    const long currPos = ftell(_aviFile);
+    if (currPos < 0) {
+        assert(false);
+        return;
+    }
+    bool success = (0 == fseek(_aviFile, pos, SEEK_SET));
+    if (!success) {
+      assert(false);
+      return;
+    }
+    PutLE32(word);
+    success = (0 == fseek(_aviFile, currPos, SEEK_SET));
+    assert(success);
+}
+
+void AviFile::CloseRead()
+{
+    if (_aviFile)
+    {
+        fclose(_aviFile);
+        _aviFile = NULL;
+    }
+}
+
+void AviFile::CloseWrite()
+{
+    if (_created)
+    {
+        // Update everything that isn't known until the file is closed. The
+        // marks indicate where in the headers this update should be.
+        PutLE32LengthFromCurrent(static_cast<long>(_moviSizeMark));
+
+        PutLE32AtPos(static_cast<long>(_totNumFramesMark), _videoFrames);
+
+        if (_writeVideoStream)
+        {
+            PutLE32AtPos(static_cast<long>(_videoStreamLengthMark),
+                         _videoFrames);
+        }
+
+        if (_writeAudioStream)
+        {
+            PutLE32AtPos(static_cast<long>(_audioStreamLengthMark),
+                         _audioFrames);
+        }
+
+        WriteIndex();
+        PutLE32LengthFromCurrent(static_cast<long>(_riffSizeMark));
+        ClearIndexList();
+
+        if (_aviFile)
+        {
+            fclose(_aviFile);
+            _aviFile = NULL;
+        }
+    }
+}
+
+void AviFile::ResetMembers()
+{
+    ResetComplexMembers();
+
+    _aviFile = NULL;
+
+    _nrStreams     = 0;
+    _aviLength     = 0;
+    _dataLength    = 0;
+    _bytesRead     = 0;
+    _dataStartByte = 0;
+    _framesRead    = 0;
+    _videoFrames   = 0;
+    _audioFrames   = 0;
+
+    _reading = false;
+    _openedAs = AVI_AUDIO;
+    _loop = false;
+    _writing = false;
+
+    _bytesWritten          = 0;
+
+    _riffSizeMark          = 0;
+    _moviSizeMark          = 0;
+    _totNumFramesMark      = 0;
+    _videoStreamLengthMark = 0;
+    _audioStreamLengthMark = 0;
+
+    _writeAudioStream = false;
+    _writeVideoStream = false;
+
+    _aviMode                      = NotSet;
+    _videoCodecConfigParams       = 0;
+    _videoCodecConfigParamsLength = 0;
+
+    _videoStreamDataChunkPrefix = 0;
+    _audioStreamDataChunkPrefix = 0;
+
+    _created = false;
+
+    _moviListOffset = 0;
+
+    _videoConfigLength = 0;
+}
+
+void AviFile::ResetComplexMembers()
+{
+    memset(&_aviHeader, 0, sizeof(AVIMAINHEADER));
+    memset(&_videoStreamHeader, 0, sizeof(AVISTREAMHEADER));
+    memset(&_audioStreamHeader, 0, sizeof(AVISTREAMHEADER));
+    memset(&_videoFormatHeader, 0, sizeof(BITMAPINFOHEADER));
+    memset(&_audioFormatHeader, 0, sizeof(WAVEFORMATEX));
+    memset(_videoConfigParameters, 0, CODEC_CONFIG_LENGTH);
+    memset(_videoStreamName, 0, STREAM_NAME_LENGTH);
+    memset(_audioStreamName, 0, STREAM_NAME_LENGTH);
+    memset(&_videoStream, 0, sizeof(AVIStream));
+    memset(&_audioStream, 0, sizeof(AVIStream));
+}
+
+size_t AviFile::GetByte(WebRtc_UWord8& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord8), _aviFile);
+}
+
+size_t AviFile::GetLE16(WebRtc_UWord16& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord16),
+                 _aviFile);
+}
+
+size_t AviFile::GetLE32(WebRtc_UWord32& word)
+{
+    return fread(&word, sizeof(WebRtc_UWord8), sizeof(WebRtc_UWord32),
+                 _aviFile);
+}
+
+size_t AviFile::GetBuffer(WebRtc_UWord8* str, size_t size)
+{
+    return fread(str, sizeof(WebRtc_UWord8), size, _aviFile);
+}
+
+WebRtc_Word32 AviFile::ReadRIFF()
+{
+    WebRtc_UWord32 tag;
+    _bytesRead = GetLE32(tag);
+    if (tag != MakeFourCc('R', 'I', 'F', 'F'))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not a RIFF file!");
+        return -1;
+    }
+
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+    _aviLength = size;
+
+    _bytesRead += GetLE32(tag);
+    if (tag != MakeFourCc('A', 'V', 'I', ' '))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,  "Not an AVI file!");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadHeaders()
+{
+    WebRtc_UWord32 tag;
+    _bytesRead += GetLE32(tag);
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+
+    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 listTag;
+    _bytesRead += GetLE32(listTag);
+    if (listTag != MakeFourCc('h', 'd', 'r', 'l'))
+    {
+        return -1;
+    }
+
+    WebRtc_Word32 err = ReadAVIMainHeader();
+    if (err)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIMainHeader()
+{
+    _bytesRead += GetLE32(_aviHeader.fcc);
+    _bytesRead += GetLE32(_aviHeader.cb);
+    _bytesRead += GetLE32(_aviHeader.dwMicroSecPerFrame);
+    _bytesRead += GetLE32(_aviHeader.dwMaxBytesPerSec);
+    _bytesRead += GetLE32(_aviHeader.dwPaddingGranularity);
+    _bytesRead += GetLE32(_aviHeader.dwFlags);
+    _bytesRead += GetLE32(_aviHeader.dwTotalFrames);
+    _bytesRead += GetLE32(_aviHeader.dwInitialFrames);
+    _bytesRead += GetLE32(_aviHeader.dwStreams);
+    _bytesRead += GetLE32(_aviHeader.dwSuggestedBufferSize);
+    _bytesRead += GetLE32(_aviHeader.dwWidth);
+    _bytesRead += GetLE32(_aviHeader.dwHeight);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[0]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[1]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[2]);
+    _bytesRead += GetLE32(_aviHeader.dwReserved[3]);
+
+    if (_aviHeader.fcc != MakeFourCc('a', 'v', 'i', 'h'))
+    {
+        return -1;
+    }
+
+    if (_aviHeader.dwFlags & kAvifMustuseindex)
+    {
+        return -1;
+    }
+
+    bool readVideoStreamHeader = false;
+    bool readAudioStreamHeader = false;
+    unsigned int streamsRead = 0;
+    while (_aviHeader.dwStreams > streamsRead)
+    {
+        WebRtc_UWord32 strltag;
+        _bytesRead += GetLE32(strltag);
+        WebRtc_UWord32 strlsize;
+        _bytesRead += GetLE32(strlsize);
+        const long endSeekPos = ftell(_aviFile) +
+            static_cast<WebRtc_Word32>(strlsize);
+
+        if (strltag != MakeFourCc('L', 'I', 'S', 'T'))
+        {
+            return -1;
+        }
+
+        WebRtc_UWord32 listTag;
+        _bytesRead += GetLE32(listTag);
+        if (listTag != MakeFourCc('s', 't', 'r', 'l'))
+        {
+            return -1;
+        }
+
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag != MakeFourCc('s', 't', 'r', 'h'))
+        {
+            return -1;
+        }
+
+        AVISTREAMHEADER tmpStreamHeader;
+        tmpStreamHeader.fcc = chunktag;
+        tmpStreamHeader.cb  = chunksize;
+
+        _bytesRead += GetLE32(tmpStreamHeader.fccType);
+        _bytesRead += GetLE32(tmpStreamHeader.fccHandler);
+        _bytesRead += GetLE32(tmpStreamHeader.dwFlags);
+        _bytesRead += GetLE16(tmpStreamHeader.wPriority);
+        _bytesRead += GetLE16(tmpStreamHeader.wLanguage);
+        _bytesRead += GetLE32(tmpStreamHeader.dwInitialFrames);
+        _bytesRead += GetLE32(tmpStreamHeader.dwScale);
+        _bytesRead += GetLE32(tmpStreamHeader.dwRate);
+        _bytesRead += GetLE32(tmpStreamHeader.dwStart);
+        _bytesRead += GetLE32(tmpStreamHeader.dwLength);
+        _bytesRead += GetLE32(tmpStreamHeader.dwSuggestedBufferSize);
+        _bytesRead += GetLE32(tmpStreamHeader.dwQuality);
+        _bytesRead += GetLE32(tmpStreamHeader.dwSampleSize);
+
+        WebRtc_UWord16 left;
+        _bytesRead += GetLE16(left);
+        tmpStreamHeader.rcFrame.left = left;
+        WebRtc_UWord16 top;
+        _bytesRead += GetLE16(top);
+        tmpStreamHeader.rcFrame.top = top;
+        WebRtc_UWord16 right;
+        _bytesRead += GetLE16(right);
+        tmpStreamHeader.rcFrame.right = right;
+        WebRtc_UWord16 bottom;
+        _bytesRead += GetLE16(bottom);
+        tmpStreamHeader.rcFrame.bottom = bottom;
+
+        if (!readVideoStreamHeader
+            && (tmpStreamHeader.fccType == MakeFourCc('v', 'i', 'd', 's')))
+        {
+            _videoStreamHeader = tmpStreamHeader; //Bitwise copy is OK!
+            const WebRtc_Word32 err = ReadAVIVideoStreamHeader(endSeekPos);
+            if (err)
+            {
+                return -1;
+            }
+            // Make sure there actually is video data in the file...
+            if (_videoStreamHeader.dwLength == 0)
+            {
+                return -1;
+            }
+            readVideoStreamHeader = true;
+        } else if(!readAudioStreamHeader &&
+                  (tmpStreamHeader.fccType == MakeFourCc('a', 'u', 'd', 's'))) {
+            _audioStreamHeader = tmpStreamHeader;
+            const WebRtc_Word32 err = ReadAVIAudioStreamHeader(endSeekPos);
+            if (err)
+            {
+                return -1;
+            }
+            readAudioStreamHeader = true;
+        }
+        else
+        {
+            fseek(_aviFile, endSeekPos, SEEK_SET);
+            _bytesRead += endSeekPos;
+        }
+
+        ++streamsRead;
+    }
+
+    if (!readVideoStreamHeader && !readAudioStreamHeader)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord32 tag;
+    _bytesRead += GetLE32(tag);
+    WebRtc_UWord32 size;
+    _bytesRead += GetLE32(size);
+
+    if (tag == MakeFourCc('J', 'U', 'N', 'K'))
+    {
+        fseek(_aviFile, size, SEEK_CUR);
+        _bytesRead += size;
+        _bytesRead += GetLE32(tag);
+        _bytesRead += GetLE32(size);
+    }
+    if (tag != MakeFourCc('L', 'I', 'S', 'T'))
+    {
+        return -1;
+    }
+    WebRtc_UWord32 listTag;
+    _bytesRead += GetLE32(listTag);
+    if (listTag != MakeFourCc('m', 'o', 'v', 'i'))
+    {
+        return -1;
+    }
+    _dataLength = size;
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIVideoStreamHeader(WebRtc_Word32 endpos)
+{
+    WebRtc_UWord32 chunktag;
+    _bytesRead += GetLE32(chunktag);
+    WebRtc_UWord32 chunksize;
+    _bytesRead += GetLE32(chunksize);
+
+    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
+    {
+        return -1;
+    }
+
+    _bytesRead += GetLE32(_videoFormatHeader.biSize);
+    _bytesRead += GetLE32(_videoFormatHeader.biWidth);
+    _bytesRead += GetLE32(_videoFormatHeader.biHeight);
+    _bytesRead += GetLE16(_videoFormatHeader.biPlanes);
+    _bytesRead += GetLE16(_videoFormatHeader.biBitCount);
+    _bytesRead += GetLE32(_videoFormatHeader.biCompression);
+    _bytesRead += GetLE32(_videoFormatHeader.biSizeImage);
+    _bytesRead += GetLE32(_videoFormatHeader.biXPelsPerMeter);
+    _bytesRead += GetLE32(_videoFormatHeader.biYPelsPerMeter);
+    _bytesRead += GetLE32(_videoFormatHeader.biClrUsed);
+    _bytesRead += GetLE32(_videoFormatHeader.biClrImportant);
+
+    if (chunksize >  _videoFormatHeader.biSize)
+    {
+        const WebRtc_UWord32 size = chunksize - _videoFormatHeader.biSize;
+        const WebRtc_UWord32 readSize = MinValue(size, CODEC_CONFIG_LENGTH);
+        _bytesRead += GetBuffer(
+            reinterpret_cast<WebRtc_UWord8*>(_videoConfigParameters), readSize);
+        _videoConfigLength = readSize;
+        WebRtc_Word32 skipSize = chunksize - _videoFormatHeader.biSize -
+            readSize;
+        if (skipSize > 0)
+        {
+            fseek(_aviFile, skipSize, SEEK_CUR);
+            _bytesRead += skipSize;
+        }
+    }
+
+    while (static_cast<long>(_bytesRead) < endpos)
+    {
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize, STREAM_NAME_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_videoStreamName), size);
+        }
+        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize,
+                                                 CODEC_CONFIG_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_videoConfigParameters), size);
+            _videoConfigLength = size;
+        }
+        else
+        {
+            fseek(_aviFile, chunksize, SEEK_CUR);
+            _bytesRead += chunksize;
+        }
+
+        if (feof(_aviFile))
+        {
+            return -1;
+        }
+    }
+    _videoStream.streamType = AviFile::AVI_VIDEO;
+    _videoStream.streamNumber = _nrStreams++;
+
+    return 0;
+}
+
+WebRtc_Word32 AviFile::ReadAVIAudioStreamHeader(WebRtc_Word32 endpos)
+{
+    WebRtc_UWord32 chunktag;
+    _bytesRead += GetLE32(chunktag);
+    WebRtc_UWord32 chunksize;
+    _bytesRead += GetLE32(chunksize);
+
+    if (chunktag != MakeFourCc('s', 't', 'r', 'f'))
+    {
+        return -1;
+    }
+
+    const size_t startRead = _bytesRead;
+    _bytesRead += GetLE16(_audioFormatHeader.wFormatTag);
+    _bytesRead += GetLE16(_audioFormatHeader.nChannels);
+    _bytesRead += GetLE32(_audioFormatHeader.nSamplesPerSec);
+    _bytesRead += GetLE32(_audioFormatHeader.nAvgBytesPerSec);
+    _bytesRead += GetLE16(_audioFormatHeader.nBlockAlign);
+    _bytesRead += GetLE16(_audioFormatHeader.wBitsPerSample);
+    if (chunksize > 0x10) {
+        _bytesRead += GetLE16(_audioFormatHeader.cbSize);
+    }
+
+    const WebRtc_UWord32 diffRead = chunksize - (_bytesRead - startRead);
+    if (diffRead > 0)
+    {
+        const WebRtc_UWord32 size = MinValue(diffRead, CODEC_CONFIG_LENGTH);
+        _bytesRead += GetBuffer(
+            reinterpret_cast<WebRtc_UWord8*>(_audioConfigParameters), size);
+    }
+
+    while (static_cast<long>(_bytesRead) < endpos)
+    {
+        WebRtc_UWord32 chunktag;
+        _bytesRead += GetLE32(chunktag);
+        WebRtc_UWord32 chunksize;
+        _bytesRead += GetLE32(chunksize);
+
+        if (chunktag == MakeFourCc('s', 't', 'r', 'n'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize, STREAM_NAME_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_audioStreamName), size);
+        }
+        else if (chunktag == MakeFourCc('s', 't', 'r', 'd'))
+        {
+            const WebRtc_UWord32 size = MinValue(chunksize,
+                                                 CODEC_CONFIG_LENGTH);
+            _bytesRead += GetBuffer(
+                reinterpret_cast<WebRtc_UWord8*>(_audioConfigParameters), size);
+        }
+        else
+        {
+            fseek(_aviFile, chunksize, SEEK_CUR);
+            _bytesRead += chunksize;
+        }
+
+        if (feof(_aviFile))
+        {
+            return -1;
+        }
+    }
+    _audioStream.streamType = AviFile::AVI_AUDIO;
+    _audioStream.streamNumber = _nrStreams++;
+    return 0;
+}
+
+WebRtc_UWord32 AviFile::StreamAndTwoCharCodeToTag(WebRtc_Word32 streamNum,
+                                                  const char* twoCharCode)
+{
+    WebRtc_UWord8 a = '0';
+    WebRtc_UWord8 b;
+    switch (streamNum)
+    {
+    case 1:
+        b = '1';
+        break;
+    case 2:
+        b = '2';
+        break;
+    default:
+        b = '0';
+    }
+    return MakeFourCc(a, b, twoCharCode[0], twoCharCode[1]);
+}
+
+void AviFile::ClearIndexList()
+{
+    while (!_indexList->Empty())
+    {
+        ListItem* listItem = _indexList->First();
+        if (listItem == 0)
+        {
+            break;
+        }
+
+        AVIINDEXENTRY* item = static_cast<AVIINDEXENTRY*>(listItem->GetItem());
+        if (item != NULL)
+        {
+            delete item;
+        }
+        _indexList->PopFront();
+    }
+}
+
+void AviFile::AddChunkToIndexList(WebRtc_UWord32 inChunkId,
+                                  WebRtc_UWord32 inFlags,
+                                  WebRtc_UWord32 inOffset,
+                                  WebRtc_UWord32 inSize)
+{
+    _indexList->PushBack(new AVIINDEXENTRY(inChunkId, inFlags, inOffset,
+                                           inSize));
+}
+
+void AviFile::WriteIndex()
+{
+    const WebRtc_UWord32 idxTag = MakeFourCc('i', 'd', 'x', '1');
+    _bytesWritten += PutLE32(idxTag);
+
+    // Size is unknown at this point. Update later.
+    _bytesWritten += PutLE32(0);
+    const size_t idxChunkSize = _bytesWritten;
+
+    for (ListItem* listItem = _indexList->First();
+         listItem != NULL;
+         listItem = _indexList->Next(listItem))
+    {
+        const AVIINDEXENTRY* item =
+            static_cast<AVIINDEXENTRY*>(listItem->GetItem());
+        if (item != NULL)
+        {
+            _bytesWritten += PutLE32(item->ckid);
+            _bytesWritten += PutLE32(item->dwFlags);
+            _bytesWritten += PutLE32(item->dwChunkOffset);
+            _bytesWritten += PutLE32(item->dwChunkLength);
+        }
+    }
+    PutLE32LengthFromCurrent(static_cast<long>(idxChunkSize));
+}
+} // namespace webrtc
diff --git a/src/modules/media_file/source/avi_file.h b/src/modules/media_file/source/avi_file.h
new file mode 100644
index 0000000..fe70692
--- /dev/null
+++ b/src/modules/media_file/source/avi_file.h
@@ -0,0 +1,277 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Class for reading (x)or writing to an AVI file.
+// Note: the class cannot be used for reading and writing at the same time.
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
+
+#include <stdio.h>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class ListWrapper;
+
+struct AVISTREAMHEADER
+{
+    AVISTREAMHEADER();
+    WebRtc_UWord32 fcc;
+    WebRtc_UWord32 cb;
+    WebRtc_UWord32 fccType;
+    WebRtc_UWord32 fccHandler;
+    WebRtc_UWord32 dwFlags;
+    WebRtc_UWord16 wPriority;
+    WebRtc_UWord16 wLanguage;
+    WebRtc_UWord32 dwInitialFrames;
+    WebRtc_UWord32 dwScale;
+    WebRtc_UWord32 dwRate;
+    WebRtc_UWord32 dwStart;
+    WebRtc_UWord32 dwLength;
+    WebRtc_UWord32 dwSuggestedBufferSize;
+    WebRtc_UWord32 dwQuality;
+    WebRtc_UWord32 dwSampleSize;
+    struct
+    {
+        WebRtc_Word16 left;
+        WebRtc_Word16 top;
+        WebRtc_Word16 right;
+        WebRtc_Word16 bottom;
+    } rcFrame;
+};
+
+struct BITMAPINFOHEADER
+{
+    BITMAPINFOHEADER();
+    WebRtc_UWord32 biSize;
+    WebRtc_UWord32 biWidth;
+    WebRtc_UWord32 biHeight;
+    WebRtc_UWord16 biPlanes;
+    WebRtc_UWord16 biBitCount;
+    WebRtc_UWord32 biCompression;
+    WebRtc_UWord32 biSizeImage;
+    WebRtc_UWord32 biXPelsPerMeter;
+    WebRtc_UWord32 biYPelsPerMeter;
+    WebRtc_UWord32 biClrUsed;
+    WebRtc_UWord32 biClrImportant;
+};
+
+struct WAVEFORMATEX
+{
+    WAVEFORMATEX();
+    WebRtc_UWord16 wFormatTag;
+    WebRtc_UWord16 nChannels;
+    WebRtc_UWord32 nSamplesPerSec;
+    WebRtc_UWord32 nAvgBytesPerSec;
+    WebRtc_UWord16 nBlockAlign;
+    WebRtc_UWord16 wBitsPerSample;
+    WebRtc_UWord16 cbSize;
+};
+
+class AviFile
+{
+public:
+    enum AVIStreamType
+    {
+        AVI_AUDIO = 0,
+        AVI_VIDEO = 1
+    };
+
+    // Unsigned, for comparison with must-be-unsigned types.
+    static const unsigned int CODEC_CONFIG_LENGTH = 64;
+    static const unsigned int STREAM_NAME_LENGTH  = 32;
+
+    AviFile();
+    ~AviFile();
+
+    WebRtc_Word32 Open(AVIStreamType streamType, const char* fileName,
+                       bool loop = false);
+
+    WebRtc_Word32 CreateVideoStream(const AVISTREAMHEADER& videoStreamHeader,
+                                    const BITMAPINFOHEADER& bitMapInfoHeader,
+                                    const WebRtc_UWord8* codecConfigParams,
+                                    WebRtc_Word32 codecConfigParamsLength);
+
+    WebRtc_Word32 CreateAudioStream(const AVISTREAMHEADER& audioStreamHeader,
+                                    const WAVEFORMATEX& waveFormatHeader);
+    WebRtc_Word32 Create(const char* fileName);
+
+    WebRtc_Word32 WriteAudio(const WebRtc_UWord8* data, WebRtc_Word32 length);
+    WebRtc_Word32 WriteVideo(const WebRtc_UWord8* data, WebRtc_Word32 length);
+
+    WebRtc_Word32 GetVideoStreamInfo(AVISTREAMHEADER& videoStreamHeader,
+                                     BITMAPINFOHEADER& bitmapInfo,
+                                     char* codecConfigParameters,
+                                     WebRtc_Word32& configLength);
+
+    WebRtc_Word32 GetDuration(WebRtc_Word32& durationMs);
+
+    WebRtc_Word32 GetAudioStreamInfo(WAVEFORMATEX& waveHeader);
+
+    WebRtc_Word32 ReadAudio(WebRtc_UWord8* data, WebRtc_Word32& length);
+    WebRtc_Word32 ReadVideo(WebRtc_UWord8* data, WebRtc_Word32& length);
+
+    WebRtc_Word32 Close();
+
+    static WebRtc_UWord32 MakeFourCc(WebRtc_UWord8 ch0, WebRtc_UWord8 ch1,
+                                     WebRtc_UWord8 ch2, WebRtc_UWord8 ch3);
+
+private:
+    enum AVIFileMode
+    {
+        NotSet,
+        Read,
+        Write
+    };
+
+    struct AVIINDEXENTRY
+    {
+        AVIINDEXENTRY(WebRtc_UWord32 inckid, WebRtc_UWord32 indwFlags,
+                      WebRtc_UWord32 indwChunkOffset,
+                      WebRtc_UWord32 indwChunkLength);
+        WebRtc_UWord32 ckid;
+        WebRtc_UWord32 dwFlags;
+        WebRtc_UWord32 dwChunkOffset;
+        WebRtc_UWord32 dwChunkLength;
+    };
+
+    WebRtc_Word32 PrepareDataChunkHeaders();
+
+    WebRtc_Word32 ReadMoviSubChunk(WebRtc_UWord8* data, WebRtc_Word32& length,
+                                   WebRtc_UWord32 tag1,
+                                   WebRtc_UWord32 tag2 = 0);
+
+    WebRtc_Word32 WriteRIFF();
+    WebRtc_Word32 WriteHeaders();
+    WebRtc_Word32 WriteAVIMainHeader();
+    WebRtc_Word32 WriteAVIStreamHeaders();
+    WebRtc_Word32 WriteAVIVideoStreamHeaders();
+    WebRtc_Word32 WriteAVIVideoStreamHeaderChunks();
+    WebRtc_Word32 WriteAVIAudioStreamHeaders();
+    WebRtc_Word32 WriteAVIAudioStreamHeaderChunks();
+
+    WebRtc_Word32 WriteMoviStart();
+
+    size_t PutByte(WebRtc_UWord8 byte);
+    size_t PutLE16(WebRtc_UWord16 word);
+    size_t PutLE32(WebRtc_UWord32 word);
+    size_t PutBuffer(const WebRtc_UWord8* str, size_t size);
+    size_t PutBufferZ(const char* str);
+    long PutLE32LengthFromCurrent(long startPos);
+    void PutLE32AtPos(long pos, WebRtc_UWord32 word);
+
+    size_t GetByte(WebRtc_UWord8& word);
+    size_t GetLE16(WebRtc_UWord16& word);
+    size_t GetLE32(WebRtc_UWord32& word);
+    size_t GetBuffer(WebRtc_UWord8* str, size_t size);
+
+    void CloseRead();
+    void CloseWrite();
+
+    void ResetMembers();
+    void ResetComplexMembers();
+
+    WebRtc_Word32 ReadRIFF();
+    WebRtc_Word32 ReadHeaders();
+    WebRtc_Word32 ReadAVIMainHeader();
+    WebRtc_Word32 ReadAVIVideoStreamHeader(WebRtc_Word32 endpos);
+    WebRtc_Word32 ReadAVIAudioStreamHeader(WebRtc_Word32 endpos);
+
+    WebRtc_UWord32 StreamAndTwoCharCodeToTag(WebRtc_Word32 streamNum,
+                                             const char* twoCharCode);
+
+    void ClearIndexList();
+    void AddChunkToIndexList(WebRtc_UWord32 inChunkId, WebRtc_UWord32 inFlags,
+                             WebRtc_UWord32 inOffset,  WebRtc_UWord32 inSize);
+
+    void WriteIndex();
+
+private:
+    struct AVIMAINHEADER
+    {
+        AVIMAINHEADER();
+        WebRtc_UWord32 fcc;
+        WebRtc_UWord32 cb;
+        WebRtc_UWord32 dwMicroSecPerFrame;
+        WebRtc_UWord32 dwMaxBytesPerSec;
+        WebRtc_UWord32 dwPaddingGranularity;
+        WebRtc_UWord32 dwFlags;
+        WebRtc_UWord32 dwTotalFrames;
+        WebRtc_UWord32 dwInitialFrames;
+        WebRtc_UWord32 dwStreams;
+        WebRtc_UWord32 dwSuggestedBufferSize;
+        WebRtc_UWord32 dwWidth;
+        WebRtc_UWord32 dwHeight;
+        WebRtc_UWord32 dwReserved[4];
+    };
+
+    struct AVIStream
+    {
+        AVIStreamType streamType;
+        int           streamNumber;
+    };
+
+    CriticalSectionWrapper* _crit;
+    FILE*            _aviFile;
+    AVIMAINHEADER    _aviHeader;
+    AVISTREAMHEADER  _videoStreamHeader;
+    AVISTREAMHEADER  _audioStreamHeader;
+    BITMAPINFOHEADER _videoFormatHeader;
+    WAVEFORMATEX     _audioFormatHeader;
+
+    WebRtc_Word8 _videoConfigParameters[CODEC_CONFIG_LENGTH];
+    WebRtc_Word32 _videoConfigLength;
+    WebRtc_Word8 _videoStreamName[STREAM_NAME_LENGTH];
+    WebRtc_Word8 _audioConfigParameters[CODEC_CONFIG_LENGTH];
+    WebRtc_Word8 _audioStreamName[STREAM_NAME_LENGTH];
+
+    AVIStream _videoStream;
+    AVIStream _audioStream;
+
+    WebRtc_Word32 _nrStreams;
+    WebRtc_Word32 _aviLength;
+    WebRtc_Word32 _dataLength;
+    size_t        _bytesRead;
+    size_t        _dataStartByte;
+    WebRtc_Word32 _framesRead;
+    WebRtc_Word32 _videoFrames;
+    WebRtc_Word32 _audioFrames;
+
+    bool _reading;
+    AVIStreamType _openedAs;
+    bool _loop;
+    bool _writing;
+
+    size_t _bytesWritten;
+
+    size_t _riffSizeMark;
+    size_t _moviSizeMark;
+    size_t _totNumFramesMark;
+    size_t _videoStreamLengthMark;
+    size_t _audioStreamLengthMark;
+    WebRtc_Word32 _moviListOffset;
+
+    bool _writeAudioStream;
+    bool _writeVideoStream;
+
+    AVIFileMode _aviMode;
+    WebRtc_UWord8* _videoCodecConfigParams;
+    WebRtc_Word32 _videoCodecConfigParamsLength;
+
+    WebRtc_UWord32 _videoStreamDataChunkPrefix;
+    WebRtc_UWord32 _audioStreamDataChunkPrefix;
+    bool _created;
+
+    ListWrapper* _indexList; // Elements are of type AVIINDEXENTRY.
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_AVI_FILE_H_
diff --git a/src/modules/media_file/source/media_file.gypi b/src/modules/media_file/source/media_file.gypi
new file mode 100644
index 0000000..f94a618
--- /dev/null
+++ b/src/modules/media_file/source/media_file.gypi
@@ -0,0 +1,67 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'media_file',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'defines': [
+        'WEBRTC_MODULE_UTILITY_VIDEO', # for compiling support for video recording
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        '../interface/media_file.h',
+        '../interface/media_file_defines.h',
+        'avi_file.cc',
+        'avi_file.h',
+        'media_file_impl.cc',
+        'media_file_impl.h',
+        'media_file_utility.cc',
+        'media_file_utility.h',
+      ], # source
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'media_file_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'media_file',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'media_file_unittest.cc',
+          ],
+        }, # media_file_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/media_file/source/media_file_impl.cc b/src/modules/media_file/source/media_file_impl.cc
new file mode 100644
index 0000000..206c8d8
--- /dev/null
+++ b/src/modules/media_file/source/media_file_impl.cc
@@ -0,0 +1,1372 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "media_file_impl.h"
+#include "tick_util.h"
+#include "trace.h"
+
+#if (defined(WIN32) || defined(WINCE))
+    #define STR_CASE_CMP _stricmp
+    #define STR_NCASE_CMP _strnicmp
+#else
+    #define STR_CASE_CMP strcasecmp
+    #define STR_NCASE_CMP strncasecmp
+#endif
+
+namespace webrtc {
+MediaFile* MediaFile::CreateMediaFile(const WebRtc_Word32 id)
+{
+    return new MediaFileImpl(id);
+}
+
+void MediaFile::DestroyMediaFile(MediaFile* module)
+{
+    delete static_cast<MediaFileImpl*>(module);
+}
+
+MediaFileImpl::MediaFileImpl(const WebRtc_Word32 id)
+    : _id(id),
+      _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _callbackCrit(CriticalSectionWrapper::CreateCriticalSection()),
+      _ptrFileUtilityObj(NULL),
+      codec_info_(),
+      _ptrInStream(NULL),
+      _ptrOutStream(NULL),
+      _fileFormat((FileFormats)-1),
+      _recordDurationMs(0),
+      _playoutPositionMs(0),
+      _notificationMs(0),
+      _playingActive(false),
+      _recordingActive(false),
+      _isStereo(false),
+      _openFile(false),
+      _fileName(),
+      _ptrCallback(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, id, "Created");
+
+    codec_info_.plname[0] = '\0';
+    _fileName[0] = '\0';
+}
+
+
+MediaFileImpl::~MediaFileImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id, "~MediaFileImpl()");
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(_playingActive)
+        {
+            StopPlaying();
+        }
+
+        if(_recordingActive)
+        {
+            StopRecording();
+        }
+
+        delete _ptrFileUtilityObj;
+
+        if(_openFile)
+        {
+            delete _ptrInStream;
+            _ptrInStream = NULL;
+            delete _ptrOutStream;
+            _ptrOutStream = NULL;
+        }
+    }
+
+    delete _crit;
+    delete _callbackCrit;
+}
+
+WebRtc_Word32 MediaFileImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::TimeUntilNextProcess()
+{
+    WEBRTC_TRACE(
+        kTraceWarning,
+        kTraceFile,
+        _id,
+        "TimeUntilNextProcess: This method is not used by MediaFile class.");
+    return -1;
+}
+
+WebRtc_Word32 MediaFileImpl::Process()
+{
+    WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                 "Process: This method is not used by MediaFile class.");
+    return -1;
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutAVIVideoData(
+    WebRtc_Word8* buffer,
+    WebRtc_UWord32& dataLengthInBytes)
+{
+    return PlayoutData( buffer, dataLengthInBytes, true);
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutAudioData(WebRtc_Word8* buffer,
+                                WebRtc_UWord32& dataLengthInBytes)
+{
+    return PlayoutData( buffer, dataLengthInBytes, false);
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutData(WebRtc_Word8* buffer,
+                                         WebRtc_UWord32& dataLengthInBytes,
+                                         bool video)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "MediaFileImpl::PlayoutData(buffer= 0x%x, bufLen= %ld)",
+                 buffer, dataLengthInBytes);
+
+    const WebRtc_UWord32 bufferLengthInBytes = dataLengthInBytes;
+    dataLengthInBytes = 0;
+
+    if(buffer == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Buffer pointer or length is NULL!");
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_playingActive)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently playing!");
+            return -1;
+        }
+
+        if(!_ptrFileUtilityObj)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Playing, but no FileUtility object!");
+            StopPlaying();
+            return -1;
+        }
+
+        switch(_fileFormat)
+        {
+            case kFileFormatPcm32kHzFile:
+            case kFileFormatPcm16kHzFile:
+            case kFileFormatPcm8kHzFile:
+                bytesRead = _ptrFileUtilityObj->ReadPCMData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatCompressedFile:
+                bytesRead = _ptrFileUtilityObj->ReadCompressedData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatWavFile:
+                bytesRead = _ptrFileUtilityObj->ReadWavDataAsMono(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                break;
+            case kFileFormatPreencodedFile:
+                bytesRead = _ptrFileUtilityObj->ReadPreEncodedData(
+                    *_ptrInStream,
+                    buffer,
+                    bufferLengthInBytes);
+                if(bytesRead > 0)
+                {
+                    dataLengthInBytes = bytesRead;
+                    return 0;
+                }
+                break;
+            case kFileFormatAviFile:
+            {
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+                if(video)
+                {
+                    bytesRead = _ptrFileUtilityObj->ReadAviVideoData(
+                        buffer,
+                        bufferLengthInBytes);
+                }
+                else
+                {
+                    bytesRead = _ptrFileUtilityObj->ReadAviAudioData(
+                        buffer,
+                        bufferLengthInBytes);
+                }
+                break;
+#else
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Invalid file format: %d", kFileFormatAviFile);
+                assert(false);
+                break;
+#endif
+            }
+        }
+
+        if( bytesRead > 0)
+        {
+            dataLengthInBytes =(WebRtc_UWord32) bytesRead;
+        }
+    }
+    HandlePlayCallbacks(bytesRead);
+    return 0;
+}
+
+void MediaFileImpl::HandlePlayCallbacks(WebRtc_Word32 bytesRead)
+{
+    bool playEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+
+    if(bytesRead > 0)
+    {
+        // Check if it's time for PlayNotification(..).
+        _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+        if(_notificationMs)
+        {
+            if(_playoutPositionMs >= _notificationMs)
+            {
+                _notificationMs = 0;
+                callbackNotifyMs = _playoutPositionMs;
+            }
+        }
+    }
+    else
+    {
+        // If no bytes were read assume end of file.
+        StopPlaying();
+        playEnded = true;
+    }
+
+    // Only _callbackCrit may and should be taken when making callbacks.
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+        }
+        if(playEnded)
+        {
+            _ptrCallback->PlayFileEnded(_id);
+        }
+    }
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutStereoData(
+    WebRtc_Word8* bufferLeft,
+    WebRtc_Word8* bufferRight,
+    WebRtc_UWord32& dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "MediaFileImpl::PlayoutStereoData(Left = 0x%x, Right = 0x%x,\
+ Len= %ld)",
+                 bufferLeft,
+                 bufferRight,
+                 dataLengthInBytes);
+
+    const WebRtc_UWord32 bufferLengthInBytes = dataLengthInBytes;
+    dataLengthInBytes = 0;
+
+    if(bufferLeft == NULL || bufferRight == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "A buffer pointer or the length is NULL!");
+        return -1;
+    }
+
+    bool playEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_playingActive || !_isStereo)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently playing stereo!");
+            return -1;
+        }
+
+        if(!_ptrFileUtilityObj)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceFile,
+                _id,
+                "Playing stereo, but the FileUtility objects is NULL!");
+            StopPlaying();
+            return -1;
+        }
+
+        // Stereo playout only supported for WAV files.
+        WebRtc_Word32 bytesRead = 0;
+        switch(_fileFormat)
+        {
+            case kFileFormatWavFile:
+                    bytesRead = _ptrFileUtilityObj->ReadWavDataAsStereo(
+                        *_ptrInStream,
+                        bufferLeft,
+                        bufferRight,
+                        bufferLengthInBytes);
+                    break;
+            default:
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Trying to read non-WAV as stereo audio\
+ (not supported)");
+                break;
+        }
+
+        if(bytesRead > 0)
+        {
+            dataLengthInBytes = bytesRead;
+
+            // Check if it's time for PlayNotification(..).
+            _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+            if(_notificationMs)
+            {
+                if(_playoutPositionMs >= _notificationMs)
+                {
+                    _notificationMs = 0;
+                    callbackNotifyMs = _playoutPositionMs;
+                }
+            }
+        }
+        else
+        {
+            // If no bytes were read assume end of file.
+            StopPlaying();
+            playEnded = true;
+        }
+    }
+
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->PlayNotification(_id, callbackNotifyMs);
+        }
+        if(playEnded)
+        {
+            _ptrCallback->PlayFileEnded(_id);
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingAudioFile(
+    const char* fileName,
+    const WebRtc_UWord32 notificationTimeMs,
+    const bool loop,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+    const bool videoOnly = false;
+    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
+                            format, codecInst, startPointMs, stopPointMs);
+}
+
+
+WebRtc_Word32 MediaFileImpl::StartPlayingVideoFile(const char* fileName,
+                                                   const bool loop,
+                                                   bool videoOnly,
+                                                   const FileFormats format)
+{
+
+    const WebRtc_UWord32 notificationTimeMs = 0;
+    const WebRtc_UWord32 startPointMs       = 0;
+    const WebRtc_UWord32 stopPointMs        = 0;
+    return StartPlayingFile(fileName, notificationTimeMs, loop, videoOnly,
+                            format, 0, startPointMs, stopPointMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingFile(
+    const char* fileName,
+    const WebRtc_UWord32 notificationTimeMs,
+    const bool loop,
+    bool videoOnly,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFileFormat(format,codecInst))
+    {
+        return -1;
+    }
+    if(!ValidFilePositions(startPointMs,stopPointMs))
+    {
+        return -1;
+    }
+
+    // Check that the file will play longer than notificationTimeMs ms.
+    if((startPointMs && stopPointMs && !loop) &&
+       (notificationTimeMs > (stopPointMs - startPointMs)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "specified notification time is longer than amount of ms that will\
+ be played");
+        return -1;
+    }
+
+    FileWrapper* inputStream = FileWrapper::Create();
+    if(inputStream == NULL)
+    {
+       WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                    "Failed to allocate input stream for file %s", fileName);
+        return -1;
+    }
+
+    // TODO (hellner): make all formats support reading from stream.
+    bool useStream = (format != kFileFormatAviFile);
+    if( useStream)
+    {
+        if(inputStream->OpenFile(fileName, true, loop) != 0)
+        {
+            delete inputStream;
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Could not open input file %s", fileName);
+            return -1;
+        }
+    }
+
+    if(StartPlayingStream(*inputStream, fileName, loop, notificationTimeMs,
+                          format, codecInst, startPointMs, stopPointMs,
+                          videoOnly) == -1)
+    {
+        if( useStream)
+        {
+            inputStream->CloseFile();
+        }
+        delete inputStream;
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    _openFile = true;
+    strncpy(_fileName, fileName, sizeof(_fileName));
+    _fileName[sizeof(_fileName) - 1] = '\0';
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingAudioStream(
+    InStream& stream,
+    const WebRtc_UWord32 notificationTimeMs,
+    const FileFormats format,
+    const CodecInst* codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs)
+{
+    return StartPlayingStream(stream, 0, false, notificationTimeMs, format,
+                              codecInst, startPointMs, stopPointMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartPlayingStream(
+    InStream& stream,
+    const char* filename,
+    bool loop,
+    const WebRtc_UWord32 notificationTimeMs,
+    const FileFormats format,
+    const CodecInst*  codecInst,
+    const WebRtc_UWord32 startPointMs,
+    const WebRtc_UWord32 stopPointMs,
+    bool videoOnly)
+{
+    if(!ValidFileFormat(format,codecInst))
+    {
+        return -1;
+    }
+
+    if(!ValidFilePositions(startPointMs,stopPointMs))
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    if(_playingActive || _recordingActive)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartPlaying called, but already playing or recording file %s",
+            (_fileName[0] == '\0') ? "(name not set)" : _fileName);
+        return -1;
+    }
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceFile,
+                     _id,
+                     "StartPlaying called, but FileUtilityObj already exists!");
+        StopPlaying();
+        return -1;
+    }
+
+    _ptrFileUtilityObj = new ModuleFileUtility(_id);
+    if(_ptrFileUtilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Failed to create FileUtilityObj!");
+        return -1;
+    }
+
+    switch(format)
+    {
+        case kFileFormatWavFile:
+        {
+            if(_ptrFileUtilityObj->InitWavReading(stream, startPointMs,
+                                                  stopPointMs) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid WAV file!");
+                StopPlaying();
+                return -1;
+            }
+            _fileFormat = kFileFormatWavFile;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            if(_ptrFileUtilityObj->InitCompressedReading(stream, startPointMs,
+                                                         stopPointMs) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid Compressed file!");
+                StopPlaying();
+                return -1;
+            }
+            _fileFormat = kFileFormatCompressedFile;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        case kFileFormatPcm16kHzFile:
+        case kFileFormatPcm32kHzFile:
+        {
+            // ValidFileFormat() called in the beginneing of this function
+            // prevents codecInst from being NULL here.
+            assert(codecInst != NULL);
+            if(!ValidFrequency(codecInst->plfreq) ||
+               _ptrFileUtilityObj->InitPCMReading(stream, startPointMs,
+                                                  stopPointMs,
+                                                  codecInst->plfreq) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid raw 8 or 16 KHz PCM file!");
+                StopPlaying();
+                return -1;
+            }
+
+            _fileFormat = format;
+            break;
+        }
+        case kFileFormatPreencodedFile:
+        {
+            // ValidFileFormat() called in the beginneing of this function
+            // prevents codecInst from being NULL here.
+            assert(codecInst != NULL);
+            if(_ptrFileUtilityObj->InitPreEncodedReading(stream, *codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid PreEncoded file!");
+                StopPlaying();
+                return -1;
+            }
+
+            _fileFormat = kFileFormatPreencodedFile;
+            break;
+        }
+        case kFileFormatAviFile:
+        {
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+            if(_ptrFileUtilityObj->InitAviReading( filename, videoOnly, loop))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Not a valid AVI file!");
+                StopPlaying();
+
+                return -1;
+            }
+
+            _ptrFileUtilityObj->codec_info(codec_info_);
+
+            _fileFormat = kFileFormatAviFile;
+            break;
+#else
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Invalid file format: %d", kFileFormatAviFile);
+            assert(false);
+            break;
+#endif
+        }
+    }
+    if(_ptrFileUtilityObj->codec_info(codec_info_) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Failed to retrieve codec info!");
+        StopPlaying();
+        return -1;
+    }
+
+    _isStereo = (codec_info_.channels == 2);
+    if(_isStereo && (_fileFormat != kFileFormatWavFile))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "Stereo is only allowed for WAV files");
+        StopPlaying();
+        return -1;
+    }
+    _playingActive = true;
+    _playoutPositionMs = _ptrFileUtilityObj->PlayoutPositionMs();
+    _ptrInStream = &stream;
+    _notificationMs = notificationTimeMs;
+
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StopPlaying()
+{
+
+    CriticalSectionScoped lock(_crit);
+    _isStereo = false;
+    if(_ptrFileUtilityObj)
+    {
+        delete _ptrFileUtilityObj;
+        _ptrFileUtilityObj = NULL;
+    }
+    if(_ptrInStream)
+    {
+        // If MediaFileImpl opened the InStream it must be reclaimed here.
+        if(_openFile)
+        {
+            delete _ptrInStream;
+            _openFile = false;
+        }
+        _ptrInStream = NULL;
+    }
+
+    codec_info_.pltype = 0;
+    codec_info_.plname[0] = '\0';
+
+    if(!_playingActive)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "playing is not active!");
+        return -1;
+    }
+
+    _playingActive = false;
+    return 0;
+}
+
+bool MediaFileImpl::IsPlaying()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsPlaying()");
+    CriticalSectionScoped lock(_crit);
+    return _playingActive;
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAudioData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    return IncomingAudioVideoData( buffer, bufferLengthInBytes, false);
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAVIVideoData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    return IncomingAudioVideoData( buffer, bufferLengthInBytes, true);
+}
+
+WebRtc_Word32 MediaFileImpl::IncomingAudioVideoData(
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 bufferLengthInBytes,
+    const bool video)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "MediaFile::IncomingData(buffer= 0x%x, bufLen= %hd",
+                 buffer, bufferLengthInBytes);
+
+    if(buffer == NULL || bufferLengthInBytes == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Buffer pointer or length is NULL!");
+        return -1;
+    }
+
+    bool recordingEnded = false;
+    WebRtc_UWord32 callbackNotifyMs = 0;
+    {
+        CriticalSectionScoped lock(_crit);
+
+        if(!_recordingActive)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Not currently recording!");
+            return -1;
+        }
+        if(_ptrOutStream == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Recording is active, but output stream is NULL!");
+            assert(false);
+            return -1;
+        }
+
+        WebRtc_Word32 bytesWritten = 0;
+        WebRtc_UWord32 samplesWritten = codec_info_.pacsize;
+        if(_ptrFileUtilityObj)
+        {
+            switch(_fileFormat)
+            {
+                case kFileFormatPcm8kHzFile:
+                case kFileFormatPcm16kHzFile:
+                case kFileFormatPcm32kHzFile:
+                    bytesWritten = _ptrFileUtilityObj->WritePCMData(
+                        *_ptrOutStream,
+                        buffer,
+                        bufferLengthInBytes);
+
+                    // Sample size is 2 bytes.
+                    if(bytesWritten > 0)
+                    {
+                        samplesWritten = bytesWritten/sizeof(WebRtc_Word16);
+                    }
+                    break;
+                case kFileFormatCompressedFile:
+                    bytesWritten = _ptrFileUtilityObj->WriteCompressedData(
+                        *_ptrOutStream, buffer, bufferLengthInBytes);
+                    break;
+                case kFileFormatWavFile:
+                    bytesWritten = _ptrFileUtilityObj->WriteWavData(
+                        *_ptrOutStream,
+                        buffer,
+                        bufferLengthInBytes);
+                    if(bytesWritten > 0 && STR_NCASE_CMP(codec_info_.plname,
+                                                         "L16", 4) == 0)
+                    {
+                        // Sample size is 2 bytes.
+                        samplesWritten = bytesWritten/sizeof(WebRtc_Word16);
+                    }
+                    break;
+                case kFileFormatPreencodedFile:
+                    bytesWritten = _ptrFileUtilityObj->WritePreEncodedData(
+                        *_ptrOutStream, buffer, bufferLengthInBytes);
+                    break;
+                case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+                    if(video)
+                    {
+                        bytesWritten = _ptrFileUtilityObj->WriteAviVideoData(
+                            buffer, bufferLengthInBytes);
+                    }else
+                    {
+                        bytesWritten = _ptrFileUtilityObj->WriteAviAudioData(
+                            buffer, bufferLengthInBytes);
+                    }
+                    break;
+#else
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "Invalid file format: %d", kFileFormatAviFile);
+                    assert(false);
+                    break;
+#endif
+            }
+        } else {
+            // TODO (hellner): quick look at the code makes me think that this
+            //                 code is never executed. Remove?
+            if(_ptrOutStream)
+            {
+                if(_ptrOutStream->Write(buffer, bufferLengthInBytes))
+                {
+                    bytesWritten = bufferLengthInBytes;
+                }
+            }
+        }
+
+        if(!video)
+        {
+            _recordDurationMs += samplesWritten / (codec_info_.plfreq / 1000);
+        }
+
+        // Check if it's time for RecordNotification(..).
+        if(_notificationMs)
+        {
+            if(_recordDurationMs  >= _notificationMs)
+            {
+                _notificationMs = 0;
+                callbackNotifyMs = _recordDurationMs;
+            }
+        }
+        if(bytesWritten < (WebRtc_Word32)bufferLengthInBytes)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Failed to write all requested bytes!");
+            StopRecording();
+            recordingEnded = true;
+        }
+    }
+
+    // Only _callbackCrit may and should be taken when making callbacks.
+    CriticalSectionScoped lock(_callbackCrit);
+    if(_ptrCallback)
+    {
+        if(callbackNotifyMs)
+        {
+            _ptrCallback->RecordNotification(_id, callbackNotifyMs);
+        }
+        if(recordingEnded)
+        {
+            _ptrCallback->RecordFileEnded(_id);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingAudioFile(
+    const char* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    const WebRtc_UWord32 maxSizeBytes)
+{
+    VideoCodec dummyCodecInst;
+    return StartRecordingFile(fileName, format, codecInst, dummyCodecInst,
+                              notificationTimeMs, maxSizeBytes);
+}
+
+
+WebRtc_Word32 MediaFileImpl::StartRecordingVideoFile(
+    const char* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    bool videoOnly)
+{
+    const WebRtc_UWord32 notificationTimeMs = 0;
+    const WebRtc_UWord32 maxSizeBytes       = 0;
+
+    return StartRecordingFile(fileName, format, codecInst, videoCodecInst,
+                              notificationTimeMs, maxSizeBytes, videoOnly);
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingFile(
+    const char* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    const WebRtc_UWord32 maxSizeBytes,
+    bool videoOnly)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFileFormat(format,&codecInst))
+    {
+        return -1;
+    }
+
+    FileWrapper* outputStream = FileWrapper::Create();
+    if(outputStream == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Failed to allocate memory for output stream");
+        return -1;
+    }
+
+    // TODO (hellner): make all formats support writing to stream.
+    const bool useStream = ( format != kFileFormatAviFile);
+    if( useStream)
+    {
+        if(outputStream->OpenFile(fileName, false) != 0)
+        {
+            delete outputStream;
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Could not open output file '%s' for writing!",
+                         fileName);
+            return -1;
+        }
+    }
+    if(maxSizeBytes)
+    {
+        outputStream->SetMaxFileSize(maxSizeBytes);
+    }
+
+    if(StartRecordingStream(*outputStream, fileName, format, codecInst,
+                            videoCodecInst, notificationTimeMs,
+                            videoOnly) == -1)
+    {
+        if( useStream)
+        {
+            outputStream->CloseFile();
+        }
+        delete outputStream;
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    _openFile = true;
+    strncpy(_fileName, fileName, sizeof(_fileName));
+    _fileName[sizeof(_fileName) - 1] = '\0';
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingAudioStream(
+    OutStream& stream,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const WebRtc_UWord32 notificationTimeMs)
+{
+    VideoCodec dummyCodecInst;
+    return StartRecordingStream(stream, 0, format, codecInst, dummyCodecInst,
+                                notificationTimeMs);
+}
+
+WebRtc_Word32 MediaFileImpl::StartRecordingStream(
+    OutStream& stream,
+    const char* fileName,
+    const FileFormats format,
+    const CodecInst& codecInst,
+    const VideoCodec& videoCodecInst,
+    const WebRtc_UWord32 notificationTimeMs,
+    bool videoOnly)
+{
+
+    // Check codec info
+    if(!ValidFileFormat(format,&codecInst))
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_crit);
+    if(_recordingActive || _playingActive)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartRecording called, but already recording or playing file %s!",
+                   _fileName);
+        return -1;
+    }
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "StartRecording called, but fileUtilityObj already exists!");
+        StopRecording();
+        return -1;
+    }
+
+    _ptrFileUtilityObj = new ModuleFileUtility(_id);
+    if(_ptrFileUtilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "Cannot allocate fileUtilityObj!");
+        return -1;
+    }
+
+    CodecInst tmpAudioCodec;
+    memcpy(&tmpAudioCodec, &codecInst, sizeof(CodecInst));
+    switch(format)
+    {
+        case kFileFormatWavFile:
+        {
+            if(_ptrFileUtilityObj->InitWavWriting(stream, codecInst) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize WAV file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatWavFile;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            // Write compression codec name at beginning of file
+            if(_ptrFileUtilityObj->InitCompressedWriting(stream, codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize Compressed file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatCompressedFile;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        case kFileFormatPcm16kHzFile:
+        {
+            if(!ValidFrequency(codecInst.plfreq) ||
+               _ptrFileUtilityObj->InitPCMWriting(stream, codecInst.plfreq) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize 8 or 16KHz PCM file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = format;
+            break;
+        }
+        case kFileFormatPreencodedFile:
+        {
+            if(_ptrFileUtilityObj->InitPreEncodedWriting(stream, codecInst) ==
+               -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize Pre-Encoded file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+
+            _fileFormat = kFileFormatPreencodedFile;
+            break;
+        }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        case kFileFormatAviFile:
+        {
+            if( (_ptrFileUtilityObj->InitAviWriting(
+                    fileName,
+                    codecInst,
+                    videoCodecInst,videoOnly) == -1) ||
+                    (_ptrFileUtilityObj->codec_info(tmpAudioCodec) != 0))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "Failed to initialize AVI file!");
+                delete _ptrFileUtilityObj;
+                _ptrFileUtilityObj = NULL;
+                return -1;
+            }
+            _fileFormat = kFileFormatAviFile;
+            break;
+        }
+#endif
+        default:
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Invalid file format %d specified!", format);
+            delete _ptrFileUtilityObj;
+            _ptrFileUtilityObj = NULL;
+            return -1;
+        }
+    }
+    _isStereo = (tmpAudioCodec.channels == 2);
+    if(_isStereo)
+    {
+        if(_fileFormat != kFileFormatWavFile)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                         "Stereo is only allowed for WAV files");
+            StopRecording();
+            return -1;
+        }
+        if((STR_NCASE_CMP(tmpAudioCodec.plname, "L16", 4) != 0) &&
+           (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMU", 5) != 0) &&
+           (STR_NCASE_CMP(tmpAudioCodec.plname, "PCMA", 5) != 0))
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceFile,
+                _id,
+                "Stereo is only allowed for codec PCMU, PCMA and L16 ");
+            StopRecording();
+            return -1;
+        }
+    }
+    memcpy(&codec_info_, &tmpAudioCodec, sizeof(CodecInst));
+    _recordingActive = true;
+    _ptrOutStream = &stream;
+    _notificationMs = notificationTimeMs;
+    _recordDurationMs = 0;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::StopRecording()
+{
+
+    CriticalSectionScoped lock(_crit);
+    if(!_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceFile, _id,
+                     "recording is not active!");
+        return -1;
+    }
+
+    _isStereo = false;
+
+    if(_ptrFileUtilityObj != NULL)
+    {
+        // Both AVI and WAV header has to be updated before closing the stream
+        // because they contain size information.
+        if((_fileFormat == kFileFormatWavFile) &&
+            (_ptrOutStream != NULL))
+        {
+            _ptrFileUtilityObj->UpdateWavHeader(*_ptrOutStream);
+        }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        else if( _fileFormat == kFileFormatAviFile)
+        {
+            _ptrFileUtilityObj->CloseAviFile( );
+        }
+#endif
+        delete _ptrFileUtilityObj;
+        _ptrFileUtilityObj = NULL;
+    }
+
+    if(_ptrOutStream != NULL)
+    {
+        // If MediaFileImpl opened the OutStream it must be reclaimed here.
+        if(_openFile)
+        {
+            delete _ptrOutStream;
+            _openFile = false;
+        }
+        _ptrOutStream = NULL;
+    }
+
+    _recordingActive = false;
+    codec_info_.pltype = 0;
+    codec_info_.plname[0] = '\0';
+
+    return 0;
+}
+
+bool MediaFileImpl::IsRecording()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsRecording()");
+    CriticalSectionScoped lock(_crit);
+    return _recordingActive;
+}
+
+WebRtc_Word32 MediaFileImpl::RecordDurationMs(WebRtc_UWord32& durationMs)
+{
+
+    CriticalSectionScoped lock(_crit);
+    if(!_recordingActive)
+    {
+        durationMs = 0;
+        return -1;
+    }
+    durationMs = _recordDurationMs;
+    return 0;
+}
+
+bool MediaFileImpl::IsStereo()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id, "MediaFileImpl::IsStereo()");
+    CriticalSectionScoped lock(_crit);
+    return _isStereo;
+}
+
+WebRtc_Word32 MediaFileImpl::SetModuleFileCallback(FileCallback* callback)
+{
+
+    CriticalSectionScoped lock(_callbackCrit);
+
+    _ptrCallback = callback;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::FileDurationMs(const char* fileName,
+                                            WebRtc_UWord32& durationMs,
+                                            const FileFormats format,
+                                            const WebRtc_UWord32 freqInHz)
+{
+
+    if(!ValidFileName(fileName))
+    {
+        return -1;
+    }
+    if(!ValidFrequency(freqInHz))
+    {
+        return -1;
+    }
+
+    ModuleFileUtility* utilityObj = new ModuleFileUtility(_id);
+    if(utilityObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to allocate utility object!");
+        return -1;
+    }
+
+    const WebRtc_Word32 duration = utilityObj->FileDurationMs(fileName, format,
+                                                              freqInHz);
+    delete utilityObj;
+    if(duration == -1)
+    {
+        durationMs = 0;
+        return -1;
+    }
+
+    durationMs = duration;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::PlayoutPositionMs(WebRtc_UWord32& positionMs) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive)
+    {
+        positionMs = 0;
+        return -1;
+    }
+    positionMs = _playoutPositionMs;
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::codec_info(CodecInst& codecInst) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive && !_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Neither playout nor recording has been initialized!");
+        return -1;
+    }
+    if (codec_info_.pltype == 0 && codec_info_.plname[0] == '\0')
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "The CodecInst for %s is unknown!",
+            _playingActive ? "Playback" : "Recording");
+        return -1;
+    }
+    memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 MediaFileImpl::VideoCodecInst(VideoCodec& codecInst) const
+{
+    CriticalSectionScoped lock(_crit);
+    if(!_playingActive && !_recordingActive)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Neither playout nor recording has been initialized!");
+        return -1;
+    }
+    if( _ptrFileUtilityObj == NULL)
+    {
+        return -1;
+    }
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    VideoCodec videoCodec;
+    if( _ptrFileUtilityObj->VideoCodecInst( videoCodec) != 0)
+    {
+        return -1;
+    }
+    memcpy(&codecInst,&videoCodec,sizeof(VideoCodec));
+    return 0;
+#else
+    return -1;
+#endif
+}
+
+bool MediaFileImpl::ValidFileFormat(const FileFormats format,
+                                    const CodecInst*  codecInst)
+{
+    if(codecInst == NULL)
+    {
+        if(format == kFileFormatPreencodedFile ||
+           format == kFileFormatPcm8kHzFile    ||
+           format == kFileFormatPcm16kHzFile   ||
+           format == kFileFormatPcm32kHzFile)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                         "Codec info required for file format specified!");
+            return false;
+        }
+    }
+    return true;
+}
+
+bool MediaFileImpl::ValidFileName(const char* fileName)
+{
+    if((fileName == NULL) ||(fileName[0] == '\0'))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1, "FileName not specified!");
+        return false;
+    }
+    return true;
+}
+
+
+bool MediaFileImpl::ValidFilePositions(const WebRtc_UWord32 startPointMs,
+                                       const WebRtc_UWord32 stopPointMs)
+{
+    if(startPointMs == 0 && stopPointMs == 0) // Default values
+    {
+        return true;
+    }
+    if(stopPointMs &&(startPointMs >= stopPointMs))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "startPointMs must be less than stopPointMs!");
+        return false;
+    }
+    if(stopPointMs &&((stopPointMs - startPointMs) < 20))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "minimum play duration for files is 20 ms!");
+        return false;
+    }
+    return true;
+}
+
+bool MediaFileImpl::ValidFrequency(const WebRtc_UWord32 frequency)
+{
+    if((frequency == 8000) || (frequency == 16000)|| (frequency == 32000))
+    {
+        return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                 "Frequency should be 8000, 16000 or 32000 (Hz)");
+    return false;
+}
+} // namespace webrtc
diff --git a/src/modules/media_file/source/media_file_impl.h b/src/modules/media_file/source/media_file_impl.h
new file mode 100644
index 0000000..1823678
--- /dev/null
+++ b/src/modules/media_file/source/media_file_impl.h
@@ -0,0 +1,246 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
+
+#include "common_types.h"
+#include "media_file.h"
+#include "media_file_defines.h"
+#include "media_file_utility.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+class MediaFileImpl : public MediaFile
+{
+
+public:
+    MediaFileImpl(const WebRtc_Word32 id);
+    ~MediaFileImpl();
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    WebRtc_Word32 Process();
+    WebRtc_Word32 TimeUntilNextProcess();
+
+    // MediaFile functions
+    WebRtc_Word32 PlayoutAudioData(WebRtc_Word8*   audioBuffer,
+                                   WebRtc_UWord32& dataLengthInBytes);
+    WebRtc_Word32 PlayoutAVIVideoData(WebRtc_Word8* videoBuffer,
+                                      WebRtc_UWord32& dataLengthInBytes);
+    WebRtc_Word32 PlayoutStereoData(WebRtc_Word8* audioBufferLeft,
+                                    WebRtc_Word8* audioBufferRight,
+                                    WebRtc_UWord32& dataLengthInBytes);
+    virtual WebRtc_Word32 StartPlayingAudioFile(
+        const char*  fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           loop = false,
+        const FileFormats    format = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst = NULL,
+        const WebRtc_UWord32 startPointMs = 0,
+        const WebRtc_UWord32 stopPointMs = 0);
+    WebRtc_Word32 StartPlayingVideoFile(const char* fileName,
+                                        const bool          loop,
+                                        bool                videoOnly,
+                                        const FileFormats   format);
+    WebRtc_Word32 StartPlayingAudioStream(
+        InStream&            stream,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst = NULL,
+        const WebRtc_UWord32 startPointMs = 0,
+        const WebRtc_UWord32 stopPointMs = 0);
+    WebRtc_Word32 StopPlaying();
+    bool IsPlaying();
+    WebRtc_Word32 PlayoutPositionMs(WebRtc_UWord32& positionMs) const;
+    WebRtc_Word32 IncomingAudioData(const WebRtc_Word8*  audioBuffer,
+                                    const WebRtc_UWord32 bufferLength);
+    WebRtc_Word32 IncomingAVIVideoData(const WebRtc_Word8*  audioBuffer,
+                                       const WebRtc_UWord32 bufferLength);
+    WebRtc_Word32 StartRecordingAudioFile(
+        const char*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes = 0);
+    WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const FileFormats   format,
+        const CodecInst&    codecInst,
+        const VideoCodec&   videoCodecInst,
+        bool                videoOnly = false);
+    WebRtc_Word32 StartRecordingAudioStream(
+        OutStream&           stream,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0);
+    WebRtc_Word32 StopRecording();
+    bool IsRecording();
+    WebRtc_Word32 RecordDurationMs(WebRtc_UWord32& durationMs);
+    bool IsStereo();
+    WebRtc_Word32 SetModuleFileCallback(FileCallback* callback);
+    WebRtc_Word32 FileDurationMs(
+        const char*  fileName,
+        WebRtc_UWord32&      durationMs,
+        const FileFormats    format,
+        const WebRtc_UWord32 freqInHz = 16000);
+    WebRtc_Word32 codec_info(CodecInst& codecInst) const;
+    WebRtc_Word32 VideoCodecInst(VideoCodec& codecInst) const;
+
+private:
+    // Returns true if the combination of format and codecInst is valid.
+    static bool ValidFileFormat(const FileFormats format,
+                                const CodecInst*  codecInst);
+
+
+    // Returns true if the filename is valid
+    static bool ValidFileName(const char* fileName);
+
+  // Returns true if the combination of startPointMs and stopPointMs is valid.
+    static bool ValidFilePositions(const WebRtc_UWord32 startPointMs,
+                                   const WebRtc_UWord32 stopPointMs);
+
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. Only video will be read if videoOnly is
+    // true. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    WebRtc_Word32 StartPlayingFile(
+        const char*  fileName,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           loop               = false,
+        bool                 videoOnly          = false,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0);
+
+    // Opens the file specified by fileName for reading (relative path is
+    // allowed) if format is kFileFormatAviFile otherwise use stream for
+    // reading. FileCallback::PlayNotification(..) will be called after
+    // notificationTimeMs of the file has been played if notificationTimeMs is
+    // greater than zero. If loop is true the file will be played until
+    // StopPlaying() is called. When end of file is reached the file is read
+    // from the start. format specifies the type of file fileName refers to.
+    // codecInst specifies the encoding of the audio data. Note that
+    // file formats that contain this information (like WAV files) don't need to
+    // provide a non-NULL codecInst. Only video will be read if videoOnly is
+    // true. startPointMs and stopPointMs, unless zero,
+    // specify what part of the file should be read. From startPointMs ms to
+    // stopPointMs ms.
+    // TODO (hellner): there is no reason why fileName should be needed here.
+    WebRtc_Word32 StartPlayingStream(
+        InStream&            stream,
+        const char*          fileName,
+        bool                 loop,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const FileFormats    format             = kFileFormatPcm16kHzFile,
+        const CodecInst*     codecInst          = NULL,
+        const WebRtc_UWord32 startPointMs       = 0,
+        const WebRtc_UWord32 stopPointMs        = 0,
+        bool                 videoOnly          = true);
+
+    // Writes one frame into dataBuffer. dataLengthInBytes is both an input and
+    // output parameter. As input parameter it indicates the size of
+    // audioBuffer. As output parameter it indicates the number of bytes
+    // written to audioBuffer. If video is true the data written is a video
+    // frame otherwise it is an audio frame.
+    WebRtc_Word32 PlayoutData(WebRtc_Word8* dataBuffer,
+                              WebRtc_UWord32& dataLengthInBytes, bool video);
+
+    // Write one frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The frame is an audio frame if video is true otherwise it is an
+    // audio frame.
+    WebRtc_Word32 IncomingAudioVideoData(const WebRtc_Word8*  buffer,
+                                         const WebRtc_UWord32 bufferLength,
+                                         const bool video);
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed) if format is kFileFormatAviFile otherwise use stream for
+    // writing. FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. videoCodecInst
+    // specifies the encoding of the video data. maxSizeBytes specifies the
+    // number of bytes allowed to be written to file if it is greater than zero.
+    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
+    // only contain video frames.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    WebRtc_Word32 StartRecordingFile(
+        const char*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const VideoCodec&    videoCodecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const WebRtc_UWord32 maxSizeBytes = 0,
+        bool                 videoOnly = false);
+
+    // Open/creates file specified by fileName for writing (relative path is
+    // allowed). FileCallback::RecordNotification(..) will be called after
+    // notificationTimeMs of audio data has been recorded if
+    // notificationTimeMs is greater than zero.
+    // format specifies the type of file that should be created/opened.
+    // codecInst specifies the encoding of the audio data. videoCodecInst
+    // specifies the encoding of the video data. maxSizeBytes specifies the
+    // number of bytes allowed to be written to file if it is greater than zero.
+    // If format is kFileFormatAviFile and videoOnly is true the AVI file will
+    // only contain video frames.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    // TODO (hellner): there is no reason why fileName should be needed here.
+    WebRtc_Word32 StartRecordingStream(
+        OutStream&           stream,
+        const char*  fileName,
+        const FileFormats    format,
+        const CodecInst&     codecInst,
+        const VideoCodec&    videoCodecInst,
+        const WebRtc_UWord32 notificationTimeMs = 0,
+        const bool           videoOnly = false);
+
+    // Returns true if frequencyInHz is a supported frequency.
+    static bool ValidFrequency(const WebRtc_UWord32 frequencyInHz);
+
+    void HandlePlayCallbacks(WebRtc_Word32 bytesRead);
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _crit;
+    CriticalSectionWrapper* _callbackCrit;
+
+    ModuleFileUtility* _ptrFileUtilityObj;
+    CodecInst codec_info_;
+
+    InStream*  _ptrInStream;
+    OutStream* _ptrOutStream;
+
+    FileFormats _fileFormat;
+    WebRtc_UWord32 _recordDurationMs;
+    WebRtc_UWord32 _playoutPositionMs;
+    WebRtc_UWord32 _notificationMs;
+
+    bool _playingActive;
+    bool _recordingActive;
+    bool _isStereo;
+    bool _openFile;
+
+    char _fileName[512];
+
+    FileCallback* _ptrCallback;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_IMPL_H_
diff --git a/src/modules/media_file/source/media_file_unittest.cc b/src/modules/media_file/source/media_file_unittest.cc
new file mode 100644
index 0000000..b9d003c
--- /dev/null
+++ b/src/modules/media_file/source/media_file_unittest.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "modules/media_file/interface/media_file.h"
+#include "testsupport/fileutils.h"
+#include "voice_engine/voice_engine_defines.h" // defines SLEEP
+
+class MediaFileTest : public testing::Test {
+ protected:
+  void SetUp() {
+    // Use number 0 as the the identifier and pass to CreateMediaFile.
+    media_file_ = webrtc::MediaFile::CreateMediaFile(0);
+    ASSERT_TRUE(media_file_ != NULL);
+  }
+  void TearDown() {
+    webrtc::MediaFile::DestroyMediaFile(media_file_);
+    media_file_ = NULL;
+  }
+  webrtc::MediaFile* media_file_;
+};
+
+TEST_F(MediaFileTest, StartPlayingAudioFileWithoutError) {
+  // TODO(leozwang): Use hard coded filename here, we want to
+  // loop through all audio files in future
+  const std::string audio_file = webrtc::test::ProjectRootPath() +
+      "data/voice_engine/audio_tiny48.wav";
+  ASSERT_EQ(0, media_file_->StartPlayingAudioFile(
+      audio_file.c_str(),
+      0,
+      false,
+      webrtc::kFileFormatWavFile));
+
+  ASSERT_EQ(true, media_file_->IsPlaying());
+
+  SLEEP(1);
+
+  ASSERT_EQ(0, media_file_->StopPlaying());
+}
diff --git a/src/modules/media_file/source/media_file_utility.cc b/src/modules/media_file/source/media_file_utility.cc
new file mode 100644
index 0000000..8d06a88
--- /dev/null
+++ b/src/modules/media_file/source/media_file_utility.cc
@@ -0,0 +1,2563 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "file_wrapper.h"
+#include "media_file_utility.h"
+#include "module_common_types.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "avi_file.h"
+#endif
+
+#if (defined(WIN32) || defined(WINCE))
+    #define STR_CASE_CMP _stricmp
+    #define STR_NCASE_CMP _strnicmp
+#else
+    #define STR_CASE_CMP strcasecmp
+    #define STR_NCASE_CMP strncasecmp
+#endif
+
+namespace {
+enum WaveFormats
+{
+    kWaveFormatPcm   = 0x0001,
+    kWaveFormatALaw  = 0x0006,
+    kWaveFormatMuLaw = 0x0007
+};
+
+// First 16 bytes the WAVE header. ckID should be "RIFF", wave_ckID should be
+// "WAVE" and ckSize is the chunk size (4 + n)
+struct WAVE_RIFF_header
+{
+    WebRtc_Word8  ckID[4];
+    WebRtc_Word32 ckSize;
+    WebRtc_Word8  wave_ckID[4];
+};
+
+// First 8 byte of the format chunk. fmt_ckID should be "fmt ". fmt_ckSize is
+// the chunk size (16, 18 or 40 byte)
+struct WAVE_CHUNK_header
+{
+   WebRtc_Word8  fmt_ckID[4];
+   WebRtc_Word32 fmt_ckSize;
+};
+} // unnamed namespace
+
+namespace webrtc {
+ModuleFileUtility::ModuleFileUtility(const WebRtc_Word32 id)
+    : _wavFormatObj(),
+      _dataSize(0),
+      _readSizeBytes(0),
+      _id(id),
+      _stopPointInMs(0),
+      _startPointInMs(0),
+      _playoutPositionMs(0),
+      _bytesWritten(0),
+      codec_info_(),
+      _codecId(kCodecNoCodec),
+      _bytesPerSample(0),
+      _readPos(0),
+      _reading(false),
+      _writing(false),
+      _tempData()
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+      ,
+      _aviAudioInFile(0),
+      _aviVideoInFile(0),
+      _aviOutFile(0)
+#endif
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                 "ModuleFileUtility::ModuleFileUtility()");
+    memset(&codec_info_,0,sizeof(CodecInst));
+    codec_info_.pltype = -1;
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    memset(&_videoCodec,0,sizeof(_videoCodec));
+#endif
+}
+
+ModuleFileUtility::~ModuleFileUtility()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                 "ModuleFileUtility::~ModuleFileUtility()");
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    delete _aviAudioInFile;
+    delete _aviVideoInFile;
+#endif
+}
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+WebRtc_Word32 ModuleFileUtility::InitAviWriting(
+    const char* filename,
+    const CodecInst& audioCodecInst,
+    const VideoCodec& videoCodecInst,
+    const bool videoOnly /*= false*/)
+{
+    _writing = false;
+
+    delete _aviOutFile;
+    _aviOutFile = new AviFile( );
+
+    AVISTREAMHEADER videoStreamHeader;
+    videoStreamHeader.fccType = AviFile::MakeFourCc('v', 'i', 'd', 's');
+
+#ifdef VIDEOCODEC_I420
+    if (strncmp(videoCodecInst.plName, "I420", 7) == 0)
+    {
+        videoStreamHeader.fccHandler = AviFile::MakeFourCc('I','4','2','0');
+    }
+#endif
+#ifdef VIDEOCODEC_VP8
+    if (strncmp(videoCodecInst.plName, "VP8", 7) == 0)
+    {
+        videoStreamHeader.fccHandler = AviFile::MakeFourCc('V','P','8','0');
+    }
+#endif
+    if (videoStreamHeader.fccHandler == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "InitAviWriting() Codec not supported");
+
+        return -1;
+    }
+    videoStreamHeader.dwScale                = 1;
+    videoStreamHeader.dwRate                 = videoCodecInst.maxFramerate;
+    videoStreamHeader.dwSuggestedBufferSize  = videoCodecInst.height *
+        (videoCodecInst.width >> 1) * 3;
+    videoStreamHeader.dwQuality              = (WebRtc_UWord32)-1;
+    videoStreamHeader.dwSampleSize           = 0;
+    videoStreamHeader.rcFrame.top            = 0;
+    videoStreamHeader.rcFrame.bottom         = videoCodecInst.height;
+    videoStreamHeader.rcFrame.left           = 0;
+    videoStreamHeader.rcFrame.right          = videoCodecInst.width;
+
+    BITMAPINFOHEADER bitMapInfoHeader;
+    bitMapInfoHeader.biSize         = sizeof(BITMAPINFOHEADER);
+    bitMapInfoHeader.biHeight       = videoCodecInst.height;
+    bitMapInfoHeader.biWidth        = videoCodecInst.width;
+    bitMapInfoHeader.biPlanes       = 1;
+    bitMapInfoHeader.biBitCount     = 12;
+    bitMapInfoHeader.biClrImportant = 0;
+    bitMapInfoHeader.biClrUsed      = 0;
+    bitMapInfoHeader.biCompression  = videoStreamHeader.fccHandler;
+    bitMapInfoHeader.biSizeImage    = bitMapInfoHeader.biWidth *
+        bitMapInfoHeader.biHeight * bitMapInfoHeader.biBitCount / 8;
+
+    if (_aviOutFile->CreateVideoStream(
+        videoStreamHeader,
+        bitMapInfoHeader,
+        NULL,
+        0) != 0)
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        AVISTREAMHEADER audioStreamHeader;
+        audioStreamHeader.fccType = AviFile::MakeFourCc('a', 'u', 'd', 's');
+        // fccHandler is the FOURCC of the codec for decoding the stream.
+        // It's an optional parameter that is not used by audio streams.
+        audioStreamHeader.fccHandler   = 0;
+        audioStreamHeader.dwScale      = 1;
+
+        WAVEFORMATEX waveFormatHeader;
+        waveFormatHeader.cbSize          = 0;
+        waveFormatHeader.nChannels       = 1;
+
+        if (strncmp(audioCodecInst.plname, "PCMU", 4) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 1;
+            audioStreamHeader.dwRate       = 8000;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize = 80;
+
+            waveFormatHeader.nAvgBytesPerSec = 8000;
+            waveFormatHeader.nSamplesPerSec  = 8000;
+            waveFormatHeader.wBitsPerSample  = 8;
+            waveFormatHeader.nBlockAlign     = 1;
+            waveFormatHeader.wFormatTag      = kWaveFormatMuLaw;
+
+        } else if (strncmp(audioCodecInst.plname, "PCMA", 4) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 1;
+            audioStreamHeader.dwRate       = 8000;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize = 80;
+
+            waveFormatHeader.nAvgBytesPerSec = 8000;
+            waveFormatHeader.nSamplesPerSec  = 8000;
+            waveFormatHeader.wBitsPerSample  = 8;
+            waveFormatHeader.nBlockAlign     = 1;
+            waveFormatHeader.wFormatTag      = kWaveFormatALaw;
+
+        } else if (strncmp(audioCodecInst.plname, "L16", 3) == 0)
+        {
+            audioStreamHeader.dwSampleSize = 2;
+            audioStreamHeader.dwRate       = audioCodecInst.plfreq;
+            audioStreamHeader.dwQuality    = (WebRtc_UWord32)-1;
+            audioStreamHeader.dwSuggestedBufferSize =
+                (audioCodecInst.plfreq/100) * 2;
+
+            waveFormatHeader.nAvgBytesPerSec = audioCodecInst.plfreq * 2;
+            waveFormatHeader.nSamplesPerSec  = audioCodecInst.plfreq;
+            waveFormatHeader.wBitsPerSample  = 16;
+            waveFormatHeader.nBlockAlign     = 2;
+            waveFormatHeader.wFormatTag      = kWaveFormatPcm;
+        } else
+        {
+            return -1;
+        }
+
+        if(_aviOutFile->CreateAudioStream(
+            audioStreamHeader,
+            waveFormatHeader) != 0)
+        {
+            return -1;
+        }
+
+
+        if( InitWavCodec(waveFormatHeader.nSamplesPerSec,
+            waveFormatHeader.nChannels,
+            waveFormatHeader.wBitsPerSample,
+            waveFormatHeader.wFormatTag) != 0)
+        {
+            return -1;
+        }
+    }
+    _aviOutFile->Create(filename);
+    _writing = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteAviAudioData(
+    const WebRtc_Word8* buffer,
+    WebRtc_UWord32 bufferLengthInBytes)
+{
+    if( _aviOutFile != 0)
+    {
+        return _aviOutFile->WriteAudio(
+            reinterpret_cast<const WebRtc_UWord8*>(buffer),
+            bufferLengthInBytes);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
+        return -1;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteAviVideoData(
+        const WebRtc_Word8* buffer,
+        WebRtc_UWord32 bufferLengthInBytes)
+{
+    if( _aviOutFile != 0)
+    {
+        return _aviOutFile->WriteVideo(
+            reinterpret_cast<const WebRtc_UWord8*>(buffer),
+            bufferLengthInBytes);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "AVI file not initialized");
+        return -1;
+    }
+}
+
+
+WebRtc_Word32 ModuleFileUtility::CloseAviFile( )
+{
+    if( _reading && _aviAudioInFile)
+    {
+        delete _aviAudioInFile;
+        _aviAudioInFile = 0;
+    }
+
+    if( _reading && _aviVideoInFile)
+    {
+        delete _aviVideoInFile;
+        _aviVideoInFile = 0;
+    }
+
+    if( _writing && _aviOutFile)
+    {
+        delete _aviOutFile;
+        _aviOutFile = 0;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::InitAviReading(const char* filename,
+                                                bool videoOnly, bool loop)
+{
+    _reading = false;
+    delete _aviVideoInFile;
+    _aviVideoInFile = new AviFile( );
+
+    if ((_aviVideoInFile != 0) && _aviVideoInFile->Open(AviFile::AVI_VIDEO,
+                                                        filename, loop) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Unable to open AVI file (video)");
+        return -1;
+    }
+
+
+    AVISTREAMHEADER videoInStreamHeader;
+    BITMAPINFOHEADER bitmapInfo;
+    char codecConfigParameters[AviFile::CODEC_CONFIG_LENGTH] = {};
+    WebRtc_Word32 configLength = 0;
+    if( _aviVideoInFile->GetVideoStreamInfo(videoInStreamHeader, bitmapInfo,
+                                            codecConfigParameters,
+                                            configLength) != 0)
+    {
+        return -1;
+    }
+    _videoCodec.width = static_cast<WebRtc_UWord16>(
+        videoInStreamHeader.rcFrame.right);
+    _videoCodec.height = static_cast<WebRtc_UWord16>(
+        videoInStreamHeader.rcFrame.bottom);
+    _videoCodec.maxFramerate = static_cast<WebRtc_UWord8>(
+        videoInStreamHeader.dwRate);
+
+    const size_t plnameLen = sizeof(_videoCodec.plName) / sizeof(char);
+    if (bitmapInfo.biCompression == AviFile::MakeFourCc('I','4','2','0'))
+    {
+        strncpy(_videoCodec.plName, "I420", plnameLen);
+       _videoCodec.codecType = kVideoCodecI420;
+    }
+    else if (bitmapInfo.biCompression ==
+             AviFile::MakeFourCc('V', 'P', '8', '0'))
+    {
+        strncpy(_videoCodec.plName, "VP8", plnameLen);
+        _videoCodec.codecType = kVideoCodecVP8;
+    }
+    else
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        delete _aviAudioInFile;
+        _aviAudioInFile = new AviFile();
+
+        if ( (_aviAudioInFile != 0) &&
+            _aviAudioInFile->Open(AviFile::AVI_AUDIO, filename, loop) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "Unable to open AVI file (audio)");
+            return -1;
+        }
+
+        WAVEFORMATEX waveHeader;
+        if(_aviAudioInFile->GetAudioStreamInfo(waveHeader) != 0)
+        {
+            return -1;
+        }
+        if(InitWavCodec(waveHeader.nSamplesPerSec, waveHeader.nChannels,
+                        waveHeader.wBitsPerSample, waveHeader.wFormatTag) != 0)
+        {
+            return -1;
+        }
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadAviAudioData(
+    WebRtc_Word8*  outBuffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    if(_aviAudioInFile == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
+        return -1;
+    }
+
+    WebRtc_Word32 length = bufferLengthInBytes;
+    if(_aviAudioInFile->ReadAudio(
+        reinterpret_cast<WebRtc_UWord8*>(outBuffer),
+        length) != 0)
+    {
+        return -1;
+    }
+    else
+    {
+        return length;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadAviVideoData(
+    WebRtc_Word8* outBuffer,
+    const WebRtc_UWord32 bufferLengthInBytes)
+{
+    if(_aviVideoInFile == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "AVI file not opened.");
+        return -1;
+    }
+
+    WebRtc_Word32 length = bufferLengthInBytes;
+    if( _aviVideoInFile->ReadVideo(
+        reinterpret_cast<WebRtc_UWord8*>(outBuffer),
+        length) != 0)
+    {
+        return -1;
+    } else {
+        return length;
+    }
+}
+
+WebRtc_Word32 ModuleFileUtility::VideoCodecInst(VideoCodec& codecInst)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "ModuleFileUtility::CodecInst(codecInst= 0x%x)", &codecInst);
+
+   if(!_reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst: not currently reading audio file!");
+        return -1;
+    }
+    memcpy(&codecInst,&_videoCodec,sizeof(VideoCodec));
+    return 0;
+}
+#endif
+
+WebRtc_Word32 ModuleFileUtility::ReadWavHeader(InStream& wav)
+{
+    WAVE_RIFF_header RIFFheaderObj;
+    WAVE_CHUNK_header CHUNKheaderObj;
+    // TODO (hellner): tmpStr and tmpStr2 seems unnecessary here.
+    char tmpStr[6] = "FOUR";
+    unsigned char tmpStr2[4];
+    WebRtc_Word32 i, len;
+    bool dataFound = false;
+    bool fmtFound = false;
+    WebRtc_Word8 dummyRead;
+
+
+    _dataSize = 0;
+    len = wav.Read(&RIFFheaderObj, sizeof(WAVE_RIFF_header));
+    if(len != sizeof(WAVE_RIFF_header))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (too short)");
+        return -1;
+    }
+
+    for (i = 0; i < 4; i++)
+    {
+        tmpStr[i] = RIFFheaderObj.ckID[i];
+    }
+    if(strcmp(tmpStr, "RIFF") != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (does not have RIFF)");
+        return -1;
+    }
+    for (i = 0; i < 4; i++)
+    {
+        tmpStr[i] = RIFFheaderObj.wave_ckID[i];
+    }
+    if(strcmp(tmpStr, "WAVE") != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Not a wave file (does not have WAVE)");
+        return -1;
+    }
+
+    len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+    // WAVE files are stored in little endian byte order. Make sure that the
+    // data can be read on big endian as well.
+    // TODO (hellner): little endian to system byte order should be done in
+    //                 in a subroutine.
+    memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+    CHUNKheaderObj.fmt_ckSize =
+        (WebRtc_Word32) ((WebRtc_UWord32) tmpStr2[0] +
+                         (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                         (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                         (((WebRtc_UWord32)tmpStr2[3])<<24));
+
+    memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+
+    while ((len == sizeof(WAVE_CHUNK_header)) && (!fmtFound || !dataFound))
+    {
+        if(strcmp(tmpStr, "fmt ") == 0)
+        {
+            len = wav.Read(&_wavFormatObj, sizeof(WAVE_FMTINFO_header));
+
+            memcpy(tmpStr2, &_wavFormatObj.formatTag, 2);
+            _wavFormatObj.formatTag =
+                (WaveFormats) ((WebRtc_UWord32)tmpStr2[0] +
+                               (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nChannels, 2);
+            _wavFormatObj.nChannels =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nSamplesPerSec, 4);
+            _wavFormatObj.nSamplesPerSec =
+                (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                                 (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                                 (((WebRtc_UWord32)tmpStr2[3])<<24));
+            memcpy(tmpStr2, &_wavFormatObj.nAvgBytesPerSec, 4);
+            _wavFormatObj.nAvgBytesPerSec =
+                (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                                 (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                                 (((WebRtc_UWord32)tmpStr2[3])<<24));
+            memcpy(tmpStr2, &_wavFormatObj.nBlockAlign, 2);
+            _wavFormatObj.nBlockAlign =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+            memcpy(tmpStr2, &_wavFormatObj.nBitsPerSample, 2);
+            _wavFormatObj.nBitsPerSample =
+                (WebRtc_Word16) ((WebRtc_UWord32)tmpStr2[0] +
+                                 (((WebRtc_UWord32)tmpStr2[1])<<8));
+
+            for (i = 0;
+                 i < (CHUNKheaderObj.fmt_ckSize -
+                      (WebRtc_Word32)sizeof(WAVE_FMTINFO_header));
+                 i++)
+            {
+                len = wav.Read(&dummyRead, 1);
+                if(len != 1)
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "File corrupted, reached EOF (reading fmt)");
+                    return -1;
+                }
+            }
+            fmtFound = true;
+        }
+        else if(strcmp(tmpStr, "data") == 0)
+        {
+            _dataSize = CHUNKheaderObj.fmt_ckSize;
+            dataFound = true;
+            break;
+        }
+        else
+        {
+            for (i = 0; i < (CHUNKheaderObj.fmt_ckSize); i++)
+            {
+                len = wav.Read(&dummyRead, 1);
+                if(len != 1)
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                                 "File corrupted, reached EOF (reading other)");
+                    return -1;
+                }
+            }
+        }
+
+        len = wav.Read(&CHUNKheaderObj, sizeof(WAVE_CHUNK_header));
+
+        memcpy(tmpStr2, &CHUNKheaderObj.fmt_ckSize, 4);
+        CHUNKheaderObj.fmt_ckSize =
+            (WebRtc_Word32) ((WebRtc_UWord32)tmpStr2[0] +
+                             (((WebRtc_UWord32)tmpStr2[1])<<8) +
+                             (((WebRtc_UWord32)tmpStr2[2])<<16) +
+                             (((WebRtc_UWord32)tmpStr2[3])<<24));
+
+        memcpy(tmpStr, CHUNKheaderObj.fmt_ckID, 4);
+    }
+
+    // Either a proper format chunk has been read or a data chunk was come
+    // across.
+    if( (_wavFormatObj.formatTag != kWaveFormatPcm) &&
+        (_wavFormatObj.formatTag != kWaveFormatALaw) &&
+        (_wavFormatObj.formatTag != kWaveFormatMuLaw))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Coding formatTag value=%d not supported!",
+                     _wavFormatObj.formatTag);
+        return -1;
+    }
+    if((_wavFormatObj.nChannels < 1) ||
+        (_wavFormatObj.nChannels > 2))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "nChannels value=%d not supported!",
+                     _wavFormatObj.nChannels);
+        return -1;
+    }
+
+    if((_wavFormatObj.nBitsPerSample != 8) &&
+        (_wavFormatObj.nBitsPerSample != 16))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "nBitsPerSample value=%d not supported!",
+                     _wavFormatObj.nBitsPerSample);
+        return -1;
+    }
+
+    // Calculate the number of bytes that 10 ms of audio data correspond to.
+    if(_wavFormatObj.formatTag == kWaveFormatPcm)
+    {
+        // TODO (hellner): integer division for 22050 and 11025 would yield
+        //                 the same result as the else statement. Remove those
+        //                 special cases?
+        if(_wavFormatObj.nSamplesPerSec == 44100)
+        {
+            _readSizeBytes = 440 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else if(_wavFormatObj.nSamplesPerSec == 22050) {
+            _readSizeBytes = 220 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else if(_wavFormatObj.nSamplesPerSec == 11025) {
+            _readSizeBytes = 110 * _wavFormatObj.nChannels *
+                (_wavFormatObj.nBitsPerSample / 8);
+        } else {
+            _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
+              _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
+        }
+
+    } else {
+        _readSizeBytes = (_wavFormatObj.nSamplesPerSec/100) *
+            _wavFormatObj.nChannels * (_wavFormatObj.nBitsPerSample / 8);
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavCodec(WebRtc_UWord32 samplesPerSec,
+                                              WebRtc_UWord32 channels,
+                                              WebRtc_UWord32 bitsPerSample,
+                                              WebRtc_UWord32 formatTag)
+{
+    codec_info_.pltype   = -1;
+    codec_info_.plfreq   = samplesPerSec;
+    codec_info_.channels = channels;
+    codec_info_.rate     = bitsPerSample * samplesPerSec;
+
+    // Calculate the packet size for 10ms frames
+    switch(formatTag)
+    {
+    case kWaveFormatALaw:
+        strcpy(codec_info_.plname, "PCMA");
+        _codecId = kCodecPcma;
+        codec_info_.pltype = 8;
+        codec_info_.pacsize  = codec_info_.plfreq / 100;
+        break;
+    case kWaveFormatMuLaw:
+        strcpy(codec_info_.plname, "PCMU");
+        _codecId = kCodecPcmu;
+        codec_info_.pltype = 0;
+        codec_info_.pacsize  = codec_info_.plfreq / 100;
+         break;
+    case kWaveFormatPcm:
+        codec_info_.pacsize  = (bitsPerSample * (codec_info_.plfreq / 100)) / 8;
+        if(samplesPerSec == 8000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_8Khz;
+        }
+        else if(samplesPerSec == 16000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+        }
+        else if(samplesPerSec == 32000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_32Khz;
+        }
+        // Set the packet size for "odd" sampling frequencies so that it
+        // properly corresponds to _readSizeBytes.
+        else if(samplesPerSec == 11025)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 110;
+            codec_info_.plfreq = 11000;
+        }
+        else if(samplesPerSec == 22050)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 220;
+            codec_info_.plfreq = 22000;
+        }
+        else if(samplesPerSec == 44100)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 440;
+            codec_info_.plfreq = 44000;
+        }
+        else if(samplesPerSec == 48000)
+        {
+            strcpy(codec_info_.plname, "L16");
+            _codecId = kCodecL16_16kHz;
+            codec_info_.pacsize = 480;
+            codec_info_.plfreq = 48000;
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "Unsupported PCM frequency!");
+            return -1;
+        }
+        break;
+        default:
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "unknown WAV format TAG!");
+            return -1;
+            break;
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavReading(InStream& wav,
+                                                const WebRtc_UWord32 start,
+                                                const WebRtc_UWord32 stop)
+{
+
+    _reading = false;
+
+    if(ReadWavHeader(wav) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to read WAV header!");
+        return -1;
+    }
+
+    _playoutPositionMs = 0;
+    _readPos = 0;
+
+    if(start > 0)
+    {
+        WebRtc_UWord8 dummy[WAV_MAX_BUFFER_SIZE];
+        WebRtc_Word32 readLength;
+        if(_readSizeBytes <= WAV_MAX_BUFFER_SIZE)
+        {
+            while (_playoutPositionMs < start)
+            {
+                readLength = wav.Read(dummy, _readSizeBytes);
+                if(readLength == _readSizeBytes)
+                {
+                    _readPos += readLength;
+                    _playoutPositionMs += 10;
+                }
+                else // Must have reached EOF before start position!
+                {
+                    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                       "InitWavReading(), EOF before start position");
+                    return -1;
+                }
+            }
+        }
+        else
+        {
+            return -1;
+        }
+    }
+    if( InitWavCodec(_wavFormatObj.nSamplesPerSec, _wavFormatObj.nChannels,
+                     _wavFormatObj.nBitsPerSample,
+                     _wavFormatObj.formatTag) != 0)
+    {
+        return -1;
+    }
+    _bytesPerSample = _wavFormatObj.nBitsPerSample / 8;
+
+
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavDataAsMono(
+    InStream& wav,
+    WebRtc_Word8* outData,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavDataAsMono(wav= 0x%x, outData= 0x%d,\
+ bufSize= %ld)",
+        &wav,
+        outData,
+        bufferSize);
+
+    // The number of bytes that should be read from file.
+    const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
+    // The number of bytes that will be written to outData.
+    const WebRtc_UWord32 bytesRequested = (codec_info_.channels == 2) ?
+        totalBytesNeeded >> 1 : totalBytesNeeded;
+    if(bufferSize < bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer is too short!");
+        return -1;
+    }
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer NULL!");
+        return -1;
+    }
+
+    if(!_reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: no longer reading file.");
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = ReadWavData(
+        wav,
+        (codec_info_.channels == 2) ? _tempData : (WebRtc_UWord8*)outData,
+        totalBytesNeeded);
+    if(bytesRead == 0)
+    {
+        return 0;
+    }
+    if(bytesRead < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: failed to read data from WAV file.");
+        return -1;
+    }
+    // Output data is should be mono.
+    if(codec_info_.channels == 2)
+    {
+        for (WebRtc_UWord32 i = 0; i < bytesRequested / _bytesPerSample; i++)
+        {
+            // Sample value is the average of left and right buffer rounded to
+            // closest integer value. Note samples can be either 1 or 2 byte.
+            if(_bytesPerSample == 1)
+            {
+                _tempData[i] = ((_tempData[2 * i] + _tempData[(2 * i) + 1] +
+                                 1) >> 1);
+            }
+            else
+            {
+                WebRtc_Word16* sampleData = (WebRtc_Word16*) _tempData;
+                sampleData[i] = ((sampleData[2 * i] + sampleData[(2 * i) + 1] +
+                                  1) >> 1);
+            }
+        }
+        memcpy(outData, _tempData, bytesRequested);
+    }
+    return bytesRequested;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavDataAsStereo(
+    InStream& wav,
+    WebRtc_Word8* outDataLeft,
+    WebRtc_Word8* outDataRight,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavDataAsStereo(wav= 0x%x, outLeft= 0x%x,\
+ outRight= 0x%x, bufSize= %ld)",
+        &wav,
+        outDataLeft,
+        outDataRight,
+        bufferSize);
+
+    if((outDataLeft == NULL) ||
+       (outDataRight == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: an input buffer is NULL!");
+        return -1;
+    }
+    if(codec_info_.channels != 2)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "ReadWavDataAsStereo: WAV file does not contain stereo data!");
+        return -1;
+    }
+    if(! _reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsStereo: no longer reading file.");
+        return -1;
+    }
+
+    // The number of bytes that should be read from file.
+    const WebRtc_UWord32 totalBytesNeeded = _readSizeBytes;
+    // The number of bytes that will be written to the left and the right
+    // buffers.
+    const WebRtc_UWord32 bytesRequested = totalBytesNeeded >> 1;
+    if(bufferSize < bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavData: Output buffers are too short!");
+        assert(false);
+        return -1;
+    }
+
+    WebRtc_Word32 bytesRead = ReadWavData(wav, _tempData, totalBytesNeeded);
+    if(bytesRead <= 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsStereo: failed to read data from WAV file.");
+        return -1;
+    }
+
+    // Turn interleaved audio to left and right buffer. Note samples can be
+    // either 1 or 2 bytes
+    if(_bytesPerSample == 1)
+    {
+        for (WebRtc_UWord32 i = 0; i < bytesRequested; i++)
+        {
+            outDataLeft[i]  = _tempData[2 * i];
+            outDataRight[i] = _tempData[(2 * i) + 1];
+        }
+    }
+    else if(_bytesPerSample == 2)
+    {
+        WebRtc_Word16* sampleData = reinterpret_cast<WebRtc_Word16*>(_tempData);
+        WebRtc_Word16* outLeft = reinterpret_cast<WebRtc_Word16*>(outDataLeft);
+        WebRtc_Word16* outRight = reinterpret_cast<WebRtc_Word16*>(
+            outDataRight);
+
+        // Bytes requested to samples requested.
+        WebRtc_UWord32 sampleCount = bytesRequested >> 1;
+        for (WebRtc_UWord32 i = 0; i < sampleCount; i++)
+        {
+            outLeft[i] = sampleData[2 * i];
+            outRight[i] = sampleData[(2 * i) + 1];
+        }
+    } else {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "ReadWavStereoData: unsupported sample size %d!",
+                   _bytesPerSample);
+        assert(false);
+        return -1;
+    }
+    return bytesRequested;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadWavData(
+    InStream& wav,
+    WebRtc_UWord8* buffer,
+    const WebRtc_UWord32 dataLengthInBytes)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadWavData(wav= 0x%x, buffer= 0x%x, dataLen= %ld)",
+        &wav,
+        buffer,
+        dataLengthInBytes);
+
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadWavDataAsMono: output buffer NULL!");
+        return -1;
+    }
+
+    // Make sure that a read won't return too few samples.
+    // TODO (hellner): why not read the remaining bytes needed from the start
+    //                 of the file?
+    if((_dataSize - _readPos) < (WebRtc_Word32)dataLengthInBytes)
+    {
+        // Rewind() being -1 may be due to the file not supposed to be looped.
+        if(wav.Rewind() == -1)
+        {
+            _reading = false;
+            return 0;
+        }
+        if(InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1)
+        {
+            _reading = false;
+            return -1;
+        }
+    }
+
+    WebRtc_Word32 bytesRead = wav.Read(buffer, dataLengthInBytes);
+    if(bytesRead < 0)
+    {
+        _reading = false;
+        return -1;
+    }
+
+    // This should never happen due to earlier sanity checks.
+    // TODO (hellner): change to an assert and fail here since this should
+    //                 never happen...
+    if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
+    {
+        if((wav.Rewind() == -1) ||
+            (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+        {
+            _reading = false;
+            return -1;
+        }
+        else
+        {
+            bytesRead = wav.Read(buffer, dataLengthInBytes);
+            if(bytesRead < (WebRtc_Word32)dataLengthInBytes)
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+    }
+
+    _readPos += bytesRead;
+
+    // TODO (hellner): Why is dataLengthInBytes let dictate the number of bytes
+    //                 to read when exactly 10ms should be read?!
+    _playoutPositionMs += 10;
+    if((_stopPointInMs > 0) &&
+        (_playoutPositionMs >= _stopPointInMs))
+    {
+        if((wav.Rewind() == -1) ||
+            (InitWavReading(wav, _startPointInMs, _stopPointInMs) == -1))
+        {
+            _reading = false;
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitWavWriting(OutStream& wav,
+                                                const CodecInst& codecInst)
+{
+
+    if(set_codec_info(codecInst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "codecInst identifies unsupported codec!");
+        return -1;
+    }
+    _writing = false;
+    WebRtc_UWord32 channels = (codecInst.channels == 0) ?
+        1 : codecInst.channels;
+
+    if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+    {
+        _bytesPerSample = 1;
+        if(WriteWavHeader(wav, 8000, _bytesPerSample, channels,
+                          kWaveFormatMuLaw, 0) == -1)
+        {
+            return -1;
+        }
+    }else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+    {
+        _bytesPerSample = 1;
+        if(WriteWavHeader(wav, 8000, _bytesPerSample, channels, kWaveFormatALaw,
+                          0) == -1)
+        {
+            return -1;
+        }
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+    {
+        _bytesPerSample = 2;
+        if(WriteWavHeader(wav, codecInst.plfreq, _bytesPerSample, channels,
+                          kWaveFormatPcm, 0) == -1)
+        {
+            return -1;
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "codecInst identifies unsupported codec for WAV file!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 0;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteWavData(OutStream& out,
+                                              const WebRtc_Word8*  buffer,
+                                              const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WriteWavData(out= 0x%x, buf= 0x%x, dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "WriteWavData: input buffer NULL!");
+        return -1;
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    _bytesWritten += dataLength;
+    return dataLength;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::WriteWavHeader(
+    OutStream& wav,
+    const WebRtc_UWord32 freq,
+    const WebRtc_UWord32 bytesPerSample,
+    const WebRtc_UWord32 channels,
+    const WebRtc_UWord32 format,
+    const WebRtc_UWord32 lengthInBytes)
+{
+
+    // Frame size in bytes for 10 ms of audio.
+    // TODO (hellner): 44.1 kHz has 440 samples frame size. Doesn't seem to
+    //                 be taken into consideration here!
+    WebRtc_Word32 frameSize = (freq / 100) * bytesPerSample * channels;
+
+    // Calculate the number of full frames that the wave file contain.
+    const WebRtc_Word32 dataLengthInBytes = frameSize *
+        (lengthInBytes / frameSize);
+
+    WebRtc_Word8 tmpStr[4];
+    WebRtc_Word8 tmpChar;
+    WebRtc_UWord32 tmpLong;
+
+    memcpy(tmpStr, "RIFF", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpLong = dataLengthInBytes + 36;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    memcpy(tmpStr, "WAVE", 4);
+    wav.Write(tmpStr, 4);
+
+    memcpy(tmpStr, "fmt ", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpChar = 16;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(format);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(channels);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpLong = freq;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    // nAverageBytesPerSec = Sample rate * Bytes per sample * Channels
+    tmpLong = bytesPerSample * freq * channels;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    // nBlockAlign = Bytes per sample * Channels
+    tmpChar = (WebRtc_Word8)(bytesPerSample * channels);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    tmpChar = (WebRtc_Word8)(bytesPerSample*8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = 0;
+    wav.Write(&tmpChar, 1);
+
+    memcpy(tmpStr, "data", 4);
+    wav.Write(tmpStr, 4);
+
+    tmpLong = dataLengthInBytes;
+    tmpChar = (WebRtc_Word8)(tmpLong);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 8);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 16);
+    wav.Write(&tmpChar, 1);
+    tmpChar = (WebRtc_Word8)(tmpLong >> 24);
+    wav.Write(&tmpChar, 1);
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::UpdateWavHeader(OutStream& wav)
+{
+    WebRtc_Word32 res = -1;
+    if(wav.Rewind() == -1)
+    {
+        return -1;
+    }
+    WebRtc_UWord32 channels = (codec_info_.channels == 0) ?
+        1 : codec_info_.channels;
+
+    if(STR_CASE_CMP(codec_info_.plname, "L16") == 0)
+    {
+        res = WriteWavHeader(wav, codec_info_.plfreq, 2, channels,
+                             kWaveFormatPcm, _bytesWritten);
+    } else if(STR_CASE_CMP(codec_info_.plname, "PCMU") == 0) {
+            res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatMuLaw,
+                                 _bytesWritten);
+    } else if(STR_CASE_CMP(codec_info_.plname, "PCMA") == 0) {
+            res = WriteWavHeader(wav, 8000, 1, channels, kWaveFormatALaw,
+                                 _bytesWritten);
+    } else {
+        // Allow calling this API even if not writing to a WAVE file.
+        // TODO (hellner): why?!
+        return 0;
+    }
+    return res;
+}
+
+
+WebRtc_Word32 ModuleFileUtility::InitPreEncodedReading(InStream& in,
+                                                       const CodecInst& cinst)
+{
+
+    WebRtc_UWord8 preEncodedID;
+    in.Read(&preEncodedID, 1);
+
+    MediaFileUtility_CodecType codecType =
+        (MediaFileUtility_CodecType)preEncodedID;
+
+    if(set_codec_info(cinst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Pre-encoded file send codec mismatch!");
+        return -1;
+    }
+    if(codecType != _codecId)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "Pre-encoded file format codec mismatch!");
+        return -1;
+    }
+    memcpy(&codec_info_,&cinst,sizeof(CodecInst));
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadPreEncodedData(
+    InStream& in,
+    WebRtc_Word8* outData,
+    const WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadPreEncodedData(in= 0x%x, outData= 0x%x,\
+ bufferSize= %d)",
+        &in,
+        outData,
+        bufferSize);
+
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "output buffer NULL");
+    }
+
+    WebRtc_UWord32 frameLen;
+    WebRtc_UWord8 buf[64];
+    // Each frame has a two byte header containing the frame length.
+    WebRtc_Word32 res = in.Read(buf, 2);
+    if(res != 2)
+    {
+        if(!in.Rewind())
+        {
+            // The first byte is the codec identifier.
+            in.Read(buf, 1);
+            res = in.Read(buf, 2);
+        }
+        else
+        {
+            return -1;
+        }
+    }
+    frameLen = buf[0] + buf[1] * 256;
+    if(bufferSize < frameLen)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceFile,
+            _id,
+            "buffer not large enough to read %d bytes of pre-encoded data!",
+            frameLen);
+        return -1;
+    }
+    return in.Read(outData, frameLen);
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPreEncodedWriting(
+    OutStream& out,
+    const CodecInst& codecInst)
+{
+
+    if(set_codec_info(codecInst) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "CodecInst not recognized!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 1;
+     out.Write(&_codecId, 1);
+     return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WritePreEncodedData(
+    OutStream& out,
+    const WebRtc_Word8*  buffer,
+    const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WritePreEncodedData(out= 0x%x, inData= 0x%x,\
+ dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    WebRtc_Word32 bytesWritten = 0;
+    // The first two bytes is the size of the frame.
+    WebRtc_Word16 lengthBuf;
+    lengthBuf = (WebRtc_Word16)dataLength;
+    if(!out.Write(&lengthBuf, 2))
+    {
+       return -1;
+    }
+    bytesWritten = 2;
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    bytesWritten += dataLength;
+    return bytesWritten;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitCompressedReading(
+    InStream& in,
+    const WebRtc_UWord32 start,
+    const WebRtc_UWord32 stop)
+{
+    WEBRTC_TRACE(
+        kTraceDebug,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::InitCompressedReading(in= 0x%x, start= %d,\
+ stop= %d)",
+        &in,
+        start,
+        stop);
+
+#if defined(WEBRTC_CODEC_GSMAMR) || defined(WEBRTC_CODEC_GSMAMRWB) || \
+    defined(WEBRTC_CODEC_ILBC)
+    WebRtc_Word16 read_len = 0;
+#endif
+    _codecId = kCodecNoCodec;
+    _playoutPositionMs = 0;
+    _reading = false;
+
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    WebRtc_Word32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    WebRtc_Word32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
+#endif
+
+    // Read the codec name
+    WebRtc_Word32 cnt = 0;
+    char buf[64];
+    do
+    {
+        in.Read(&buf[cnt++], 1);
+    } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+    if(cnt==64)
+    {
+        return -1;
+    } else {
+        buf[cnt]=0;
+    }
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(!strcmp("#!AMR\n", buf))
+    {
+        strcpy(codec_info_.plname, "amr");
+        codec_info_.pacsize = 160;
+        _codecId = kCodecAmr;
+        codec_info_.pltype = 112;
+        codec_info_.rate = 12200;
+        codec_info_.plfreq = 8000;
+        codec_info_.channels = 1;
+
+        WebRtc_Word16 mode = 0;
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                // First read byte contain the AMR mode.
+                read_len = in.Read(buf, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                mode = (buf[0]>>3)&0xF;
+                if((mode < 0) || (mode > 8))
+                {
+                    if(mode != 15)
+                    {
+                        return -1;
+                    }
+                }
+                if(mode != 15)
+                {
+                    read_len = in.Read(&buf[1], AMRmode2bytes[mode]);
+                    if(read_len != AMRmode2bytes[mode])
+                    {
+                        return -1;
+                    }
+                }
+                _playoutPositionMs += 20;
+            }
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(!strcmp("#!AMRWB\n", buf))
+    {
+        strcpy(codec_info_.plname, "amr-wb");
+        codec_info_.pacsize = 320;
+        _codecId = kCodecAmrWb;
+        codec_info_.pltype = 120;
+        codec_info_.rate = 20000;
+        codec_info_.plfreq = 16000;
+        codec_info_.channels = 1;
+
+        WebRtc_Word16 mode = 0;
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                // First read byte contain the AMR mode.
+                read_len = in.Read(buf, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                mode = (buf[0]>>3)&0xF;
+                if((mode < 0) || (mode > 9))
+                {
+                    if(mode != 15)
+                    {
+                        return -1;
+                    }
+                }
+                if(mode != 15)
+                {
+                    read_len = in.Read(&buf[1], AMRWBmode2bytes[mode]);
+                    if(read_len != AMRWBmode2bytes[mode])
+                    {
+                        return -1;
+                    }
+                }
+                _playoutPositionMs += 20;
+            }
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if(!strcmp("#!iLBC20\n", buf))
+    {
+        codec_info_.pltype = 102;
+        strcpy(codec_info_.plname, "ilbc");
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 13300;
+        _codecId = kCodecIlbc20Ms;
+
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                read_len = in.Read(buf, 38);
+                if(read_len == 38)
+                {
+                    _playoutPositionMs += 20;
+                }
+                else
+                {
+                    return -1;
+                }
+            }
+        }
+    }
+
+    if(!strcmp("#!iLBC30\n", buf))
+    {
+        codec_info_.pltype = 102;
+        strcpy(codec_info_.plname, "ilbc");
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 240;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 13300;
+        _codecId = kCodecIlbc30Ms;
+
+        if(_startPointInMs > 0)
+        {
+            while (_playoutPositionMs <= _startPointInMs)
+            {
+                read_len = in.Read(buf, 50);
+                if(read_len == 50)
+                {
+                    _playoutPositionMs += 20;
+                }
+                else
+                {
+                    return -1;
+                }
+            }
+        }
+    }
+#endif
+    if(_codecId == kCodecNoCodec)
+    {
+        return -1;
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadCompressedData(InStream& in,
+                                                    WebRtc_Word8* outData,
+                                                    WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadCompressedData(in=0x%x, outData=0x%x,\
+ bytes=%ld)",
+        &in,
+        outData,
+        bufferSize);
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    WebRtc_UWord32 AMRmode2bytes[9]={12,13,15,17,19,20,26,31,5};
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    WebRtc_UWord32 AMRWBmode2bytes[10]={17,23,32,36,40,46,50,58,60,6};
+#endif
+    WebRtc_UWord32 bytesRead = 0;
+
+    if(! _reading)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "not currently reading!");
+        return -1;
+    }
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(_codecId == kCodecAmr)
+    {
+        WebRtc_Word32 res = in.Read(outData, 1);
+        if(res != 1)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                res = in.Read(outData, 1);
+                if(res != 1)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+         const WebRtc_Word16 mode = (outData[0]>>3)&0xF;
+        if((mode < 0) ||
+           (mode > 8))
+        {
+            if(mode != 15)
+            {
+                return -1;
+            }
+        }
+        if(mode != 15)
+        {
+            if(bufferSize < AMRmode2bytes[mode] + 1)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceFile,
+                    _id,
+                    "output buffer is too short to read AMR compressed data.");
+                assert(false);
+                return -1;
+            }
+            bytesRead = in.Read(&outData[1], AMRmode2bytes[mode]);
+            if(bytesRead != AMRmode2bytes[mode])
+            {
+                _reading = false;
+                return -1;
+            }
+            // Count the mode byte to bytes read.
+            bytesRead++;
+        }
+        else
+        {
+            bytesRead = 1;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(_codecId == kCodecAmrWb)
+    {
+        WebRtc_Word32 res = in.Read(outData, 1);
+        if(res != 1)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                res = in.Read(outData, 1);
+                if(res != 1)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+         WebRtc_Word16 mode = (outData[0]>>3)&0xF;
+        if((mode < 0) ||
+           (mode > 8))
+        {
+            if(mode != 15)
+            {
+                return -1;
+            }
+        }
+        if(mode != 15)
+        {
+            if(bufferSize < AMRWBmode2bytes[mode] + 1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                           "output buffer is too short to read AMRWB\
+ compressed.");
+                assert(false);
+                return -1;
+            }
+             bytesRead = in.Read(&outData[1], AMRWBmode2bytes[mode]);
+            if(bytesRead != AMRWBmode2bytes[mode])
+            {
+                _reading = false;
+                return -1;
+            }
+            bytesRead++;
+        }
+        else
+        {
+            bytesRead = 1;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if((_codecId == kCodecIlbc20Ms) ||
+        (_codecId == kCodecIlbc30Ms))
+    {
+        WebRtc_UWord32 byteSize = 0;
+         if(_codecId == kCodecIlbc30Ms)
+        {
+            byteSize = 50;
+        }
+        if(_codecId == kCodecIlbc20Ms)
+        {
+            byteSize = 38;
+        }
+        if(bufferSize < byteSize)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                           "output buffer is too short to read ILBC compressed\
+ data.");
+            assert(false);
+            return -1;
+        }
+
+        bytesRead = in.Read(outData, byteSize);
+        if(bytesRead != byteSize)
+        {
+            if(!in.Rewind())
+            {
+                InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+                bytesRead = in.Read(outData, byteSize);
+                if(bytesRead != byteSize)
+                {
+                    _reading = false;
+                    return -1;
+                }
+            }
+            else
+            {
+                _reading = false;
+                return -1;
+            }
+        }
+    }
+#endif
+    if(bytesRead == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "ReadCompressedData() no bytes read, codec not supported");
+        return -1;
+    }
+
+    _playoutPositionMs += 20;
+    if((_stopPointInMs > 0) &&
+        (_playoutPositionMs >= _stopPointInMs))
+    {
+        if(!in.Rewind())
+        {
+            InitCompressedReading(in, _startPointInMs, _stopPointInMs);
+        }
+        else
+        {
+            _reading = false;
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitCompressedWriting(
+    OutStream& out,
+    const CodecInst& codecInst)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceFile, _id,
+               "ModuleFileUtility::InitCompressedWriting(out= 0x%x,\
+ codecName= %s)",
+               &out, codecInst.plname);
+
+    _writing = false;
+
+#ifdef WEBRTC_CODEC_GSMAMR
+    if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+            _codecId = kCodecAmr;
+            out.Write("#!AMR\n",6);
+            _writing = true;
+            return 0;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
+    {
+        if(codecInst.pacsize == 320)
+        {
+            memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+            _codecId = kCodecAmrWb;
+            out.Write("#!AMRWB\n",8);
+            _writing = true;
+            return 0;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            _codecId = kCodecIlbc20Ms;
+            out.Write("#!iLBC20\n",9);
+        }
+        else if(codecInst.pacsize == 240)
+        {
+            _codecId = kCodecIlbc30Ms;
+            out.Write("#!iLBC30\n",9);
+        }
+        else
+        {
+          WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                       "codecInst defines unsupported compression codec!");
+            return -1;
+        }
+        memcpy(&codec_info_,&codecInst,sizeof(CodecInst));
+        _writing = true;
+        return 0;
+    }
+#endif
+
+    WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                 "codecInst defines unsupported compression codec!");
+    return -1;
+}
+
+WebRtc_Word32 ModuleFileUtility::WriteCompressedData(
+    OutStream& out,
+    const WebRtc_Word8* buffer,
+    const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WriteCompressedData(out= 0x%x, buf= 0x%x,\
+ dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+    return dataLength;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPCMReading(InStream& pcm,
+                                                const WebRtc_UWord32 start,
+                                                const WebRtc_UWord32 stop,
+                                                WebRtc_UWord32 freq)
+{
+    WEBRTC_TRACE(
+        kTraceInfo,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::InitPCMReading(pcm= 0x%x, start=%d, stop=%d,\
+ freq=%d)",
+        &pcm,
+        start,
+        stop,
+        freq);
+
+    WebRtc_Word8 dummy[320];
+    WebRtc_Word32 read_len;
+
+    _playoutPositionMs = 0;
+    _startPointInMs = start;
+    _stopPointInMs = stop;
+    _reading = false;
+
+    if(freq == 8000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 128000;
+        _codecId = kCodecL16_8Khz;
+    }
+    else if(freq == 16000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 16000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 256000;
+        _codecId = kCodecL16_16kHz;
+    }
+    else if(freq == 32000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 32000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 512000;
+        _codecId = kCodecL16_32Khz;
+    }
+
+    // Readsize for 10ms of audio data (2 bytes per sample).
+    _readSizeBytes = 2 * codec_info_. plfreq / 100;
+    if(_startPointInMs > 0)
+    {
+        while (_playoutPositionMs < _startPointInMs)
+        {
+            read_len = pcm.Read(dummy, _readSizeBytes);
+            if(read_len == _readSizeBytes)
+            {
+                _playoutPositionMs += 10;
+            }
+            else // Must have reached EOF before start position!
+            {
+                return -1;
+            }
+        }
+    }
+    _reading = true;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::ReadPCMData(InStream& pcm,
+                                             WebRtc_Word8* outData,
+                                             WebRtc_UWord32 bufferSize)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::ReadPCMData(pcm= 0x%x, outData= 0x%x, bufSize= %d)",
+        &pcm,
+        outData,
+        bufferSize);
+
+    if(outData == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,"buffer NULL");
+    }
+
+    // Readsize for 10ms of audio data (2 bytes per sample).
+    WebRtc_UWord32 bytesRequested = 2 * codec_info_.plfreq / 100;
+    if(bufferSize <  bytesRequested)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                   "ReadPCMData: buffer not long enough for a 10ms frame.");
+        assert(false);
+        return -1;
+    }
+
+    WebRtc_UWord32 bytesRead = pcm.Read(outData, bytesRequested);
+    if(bytesRead < bytesRequested)
+    {
+        if(pcm.Rewind() == -1)
+        {
+            _reading = false;
+        }
+        else
+        {
+            if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+                              codec_info_.plfreq) == -1)
+            {
+                _reading = false;
+            }
+            else
+            {
+                WebRtc_Word32 rest = bytesRequested - bytesRead;
+                WebRtc_Word32 len = pcm.Read(&(outData[bytesRead]), rest);
+                if(len == rest)
+                {
+                    bytesRead += len;
+                }
+                else
+                {
+                    _reading = false;
+                }
+            }
+            if(bytesRead <= 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                        "ReadPCMData: Failed to rewind audio file.");
+                return -1;
+            }
+        }
+    }
+
+    if(bytesRead <= 0)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                   "ReadPCMData: end of file");
+        return -1;
+    }
+    _playoutPositionMs += 10;
+    if(_stopPointInMs && _playoutPositionMs >= _stopPointInMs)
+    {
+        if(!pcm.Rewind())
+        {
+            if(InitPCMReading(pcm, _startPointInMs, _stopPointInMs,
+                              codec_info_.plfreq) == -1)
+            {
+                _reading = false;
+            }
+        }
+    }
+    return bytesRead;
+}
+
+WebRtc_Word32 ModuleFileUtility::InitPCMWriting(OutStream& out,
+                                                WebRtc_UWord32 freq)
+{
+
+    if(freq == 8000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 8000;
+        codec_info_.pacsize  = 160;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 128000;
+
+        _codecId = kCodecL16_8Khz;
+    }
+    else if(freq == 16000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 16000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 256000;
+
+        _codecId = kCodecL16_16kHz;
+    }
+    else if(freq == 32000)
+    {
+        strcpy(codec_info_.plname, "L16");
+        codec_info_.pltype   = -1;
+        codec_info_.plfreq   = 32000;
+        codec_info_.pacsize  = 320;
+        codec_info_.channels = 1;
+        codec_info_.rate     = 512000;
+
+        _codecId = kCodecL16_32Khz;
+    }
+    if((_codecId != kCodecL16_8Khz) &&
+       (_codecId != kCodecL16_16kHz) &&
+       (_codecId != kCodecL16_32Khz))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst is not 8KHz PCM or 16KHz PCM!");
+        return -1;
+    }
+    _writing = true;
+    _bytesWritten = 0;
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::WritePCMData(OutStream& out,
+                                              const WebRtc_Word8*  buffer,
+                                              const WebRtc_UWord32 dataLength)
+{
+    WEBRTC_TRACE(
+        kTraceStream,
+        kTraceFile,
+        _id,
+        "ModuleFileUtility::WritePCMData(out= 0x%x, buf= 0x%x, dataLen= %d)",
+        &out,
+        buffer,
+        dataLength);
+
+    if(buffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "buffer NULL");
+    }
+
+    if(!out.Write(buffer, dataLength))
+    {
+        return -1;
+    }
+
+    _bytesWritten += dataLength;
+    return dataLength;
+}
+
+WebRtc_Word32 ModuleFileUtility::codec_info(CodecInst& codecInst)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+                 "ModuleFileUtility::codec_info(codecInst= 0x%x)", &codecInst);
+
+    if(!_reading && !_writing)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "CodecInst: not currently reading audio file!");
+        return -1;
+    }
+    memcpy(&codecInst,&codec_info_,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::set_codec_info(const CodecInst& codecInst)
+{
+
+    _codecId = kCodecNoCodec;
+    if(STR_CASE_CMP(codecInst.plname, "PCMU") == 0)
+    {
+        _codecId = kCodecPcmu;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "PCMA") == 0)
+    {
+        _codecId = kCodecPcma;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "L16") == 0)
+    {
+        if(codecInst.plfreq == 8000)
+        {
+            _codecId = kCodecL16_8Khz;
+        }
+        else if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecL16_16kHz;
+        }
+        else if(codecInst.plfreq == 32000)
+        {
+            _codecId = kCodecL16_32Khz;
+        }
+    }
+#ifdef WEBRTC_CODEC_GSMAMR
+    else if(STR_CASE_CMP(codecInst.plname, "amr") == 0)
+    {
+        _codecId = kCodecAmr;
+    }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    else if(STR_CASE_CMP(codecInst.plname, "amr-wb") == 0)
+    {
+        _codecId = kCodecAmrWb;
+    }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+    else if(STR_CASE_CMP(codecInst.plname, "ilbc") == 0)
+    {
+        if(codecInst.pacsize == 160)
+        {
+            _codecId = kCodecIlbc20Ms;
+        }
+        else if(codecInst.pacsize == 240)
+        {
+            _codecId = kCodecIlbc30Ms;
+        }
+    }
+#endif
+#if(defined(WEBRTC_CODEC_ISAC) || defined(WEBRTC_CODEC_ISACFX))
+    else if(STR_CASE_CMP(codecInst.plname, "isac") == 0)
+    {
+        if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecIsac;
+        }
+        else if(codecInst.plfreq == 32000)
+        {
+            _codecId = kCodecIsacSwb;
+        }
+    }
+#endif
+#ifdef WEBRTC_CODEC_ISACLC
+    else if(STR_CASE_CMP(codecInst.plname, "isaclc") == 0)
+    {
+        _codecId = kCodecIsacLc;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G722
+    else if(STR_CASE_CMP(codecInst.plname, "G722") == 0)
+    {
+        _codecId = kCodecG722;
+    }
+#endif
+    else if(STR_CASE_CMP(codecInst.plname, "G7221") == 0)
+    {
+#ifdef WEBRTC_CODEC_G722_1
+        if(codecInst.plfreq == 16000)
+        {
+            if(codecInst.rate == 16000)
+            {
+                _codecId = kCodecG722_1_16Kbps;
+            }
+            else if(codecInst.rate == 24000)
+            {
+                _codecId = kCodecG722_1_24Kbps;
+            }
+            else if(codecInst.rate == 32000)
+            {
+                _codecId = kCodecG722_1_32Kbps;
+            }
+        }
+#endif
+#ifdef WEBRTC_CODEC_G722_1C
+        if(codecInst.plfreq == 32000)
+        {
+            if(codecInst.rate == 48000)
+            {
+                _codecId = kCodecG722_1c_48;
+            }
+            else if(codecInst.rate == 32000)
+            {
+                _codecId = kCodecG722_1c_32;
+            }
+            else if(codecInst.rate == 24000)
+            {
+                _codecId = kCodecG722_1c_24;
+            }
+        }
+#endif
+    }
+#ifdef WEBRTC_CODEC_G726
+    else if(STR_CASE_CMP(codecInst.plname, "G726-40") == 0)
+    {
+        _codecId = kCodecG726_40;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-32") == 0)
+    {
+        _codecId = kCodecG726_24;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-24") == 0)
+    {
+        _codecId = kCodecG726_32;
+    }
+    else if(STR_CASE_CMP(codecInst.plname, "G726-16") == 0)
+    {
+        _codecId = kCodecG726_16;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G729
+    else if(STR_CASE_CMP(codecInst.plname, "G729") == 0)
+    {
+        _codecId = kCodecG729;
+    }
+#endif
+#ifdef WEBRTC_CODEC_G729_1
+    else if(STR_CASE_CMP(codecInst.plname, "G7291") == 0)
+    {
+        _codecId = kCodecG729_1;
+    }
+#endif
+#ifdef WEBRTC_CODEC_SPEEX
+    else if(STR_CASE_CMP(codecInst.plname, "speex") == 0)
+    {
+        if(codecInst.plfreq == 8000)
+        {
+            _codecId = kCodecSpeex8Khz;
+        }
+        else if(codecInst.plfreq == 16000)
+        {
+            _codecId = kCodecSpeex16Khz;
+        }
+    }
+#endif
+    if(_codecId == kCodecNoCodec)
+    {
+        return -1;
+    }
+    memcpy(&codec_info_, &codecInst, sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 ModuleFileUtility::FileDurationMs(const char* fileName,
+                                                const FileFormats  fileFormat,
+                                                const WebRtc_UWord32 freqInHz)
+{
+
+    if(fileName == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id, "filename NULL");
+        return -1;
+    }
+
+    WebRtc_Word32 time_in_ms = -1;
+    struct stat file_size;
+    if(stat(fileName,&file_size) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to retrieve file size with stat!");
+        return -1;
+    }
+    FileWrapper* inStreamObj = FileWrapper::Create();
+    if(inStreamObj == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceFile, _id,
+                     "failed to create InStream object!");
+        return -1;
+    }
+    if(inStreamObj->OpenFile(fileName, true) == -1)
+    {
+        delete inStreamObj;
+        WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                     "failed to open file %s!", fileName);
+        return -1;
+    }
+
+    switch (fileFormat)
+    {
+        case kFileFormatWavFile:
+        {
+            if(ReadWavHeader(*inStreamObj) == -1)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                             "failed to read WAV file header!");
+                return -1;
+            }
+            time_in_ms = ((file_size.st_size - 44) /
+                          (_wavFormatObj.nAvgBytesPerSec/1000));
+            break;
+        }
+        case kFileFormatPcm16kHzFile:
+        {
+            // 16 samples per ms. 2 bytes per sample.
+            WebRtc_Word32 denominator = 16*2;
+            time_in_ms = (file_size.st_size)/denominator;
+            break;
+        }
+        case kFileFormatPcm8kHzFile:
+        {
+            // 8 samples per ms. 2 bytes per sample.
+            WebRtc_Word32 denominator = 8*2;
+            time_in_ms = (file_size.st_size)/denominator;
+            break;
+        }
+        case kFileFormatCompressedFile:
+        {
+            WebRtc_Word32 cnt = 0;
+            WebRtc_Word32 read_len = 0;
+            char buf[64];
+            do
+            {
+                read_len = inStreamObj->Read(&buf[cnt++], 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+            } while ((buf[cnt-1] != '\n') && (64 > cnt));
+
+            if(cnt == 64)
+            {
+                return -1;
+            }
+            else
+            {
+                buf[cnt] = 0;
+            }
+#ifdef WEBRTC_CODEC_GSMAMR
+            if(!strcmp("#!AMR\n", buf))
+            {
+                WebRtc_UWord8 dummy;
+                read_len = inStreamObj->Read(&dummy, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                WebRtc_Word16 AMRMode = (dummy>>3)&0xF;
+
+                // TODO (hellner): use tables instead of hardcoding like this!
+                //                 Additionally, this calculation does not
+                //                 take octet alignment into consideration.
+                switch (AMRMode)
+                {
+                        // Mode 0: 4.75 kbit/sec -> 95 bits per 20 ms frame.
+                        // 20 ms = 95 bits ->
+                        // file size in bytes * 8 / 95 is the number of
+                        // 20 ms frames in the file ->
+                        // time_in_ms = file size * 8 / 95 * 20
+                    case 0:
+                        time_in_ms = ((file_size.st_size)*160)/95;
+                        break;
+                        // Mode 1: 5.15 kbit/sec -> 103 bits per 20 ms frame.
+                    case 1:
+                        time_in_ms = ((file_size.st_size)*160)/103;
+                        break;
+                        // Mode 2: 5.90 kbit/sec -> 118 bits per 20 ms frame.
+                    case 2:
+                        time_in_ms = ((file_size.st_size)*160)/118;
+                        break;
+                        // Mode 3: 6.70 kbit/sec -> 134 bits per 20 ms frame.
+                    case 3:
+                        time_in_ms = ((file_size.st_size)*160)/134;
+                        break;
+                        // Mode 4: 7.40 kbit/sec -> 148 bits per 20 ms frame.
+                    case 4:
+                        time_in_ms = ((file_size.st_size)*160)/148;
+                        break;
+                        // Mode 5: 7.95 bit/sec -> 159 bits per 20 ms frame.
+                    case 5:
+                        time_in_ms = ((file_size.st_size)*160)/159;
+                        break;
+                        // Mode 6: 10.2 bit/sec -> 204 bits per 20 ms frame.
+                    case 6:
+                        time_in_ms = ((file_size.st_size)*160)/204;
+                        break;
+                        // Mode 7: 12.2 bit/sec -> 244 bits per 20 ms frame.
+                    case 7:
+                        time_in_ms = ((file_size.st_size)*160)/244;
+                        break;
+                        // Mode 8: SID Mode -> 39 bits per 20 ms frame.
+                    case 8:
+                        time_in_ms = ((file_size.st_size)*160)/39;
+                        break;
+                    default:
+                        break;
+                }
+            }
+#endif
+#ifdef WEBRTC_CODEC_GSMAMRWB
+            if(!strcmp("#!AMRWB\n", buf))
+            {
+                WebRtc_UWord8 dummy;
+                read_len = inStreamObj->Read(&dummy, 1);
+                if(read_len != 1)
+                {
+                    return -1;
+                }
+
+                // TODO (hellner): use tables instead of hardcoding like this!
+                WebRtc_Word16 AMRWBMode = (dummy>>3)&0xF;
+                switch(AMRWBMode)
+                {
+                        // Mode 0: 6.6 kbit/sec -> 132 bits per 20 ms frame.
+                    case 0:
+                        time_in_ms = ((file_size.st_size)*160)/132;
+                        break;
+                        // Mode 1: 8.85 kbit/sec -> 177 bits per 20 ms frame.
+                    case 1:
+                        time_in_ms = ((file_size.st_size)*160)/177;
+                        break;
+                        // Mode 2: 12.65 kbit/sec -> 253 bits per 20 ms frame.
+                    case 2:
+                        time_in_ms = ((file_size.st_size)*160)/253;
+                        break;
+                        // Mode 3: 14.25 kbit/sec -> 285 bits per 20 ms frame.
+                    case 3:
+                        time_in_ms = ((file_size.st_size)*160)/285;
+                        break;
+                        // Mode 4: 15.85 kbit/sec -> 317 bits per 20 ms frame.
+                    case 4:
+                        time_in_ms = ((file_size.st_size)*160)/317;
+                        break;
+                        // Mode 5: 18.25 kbit/sec -> 365 bits per 20 ms frame.
+                    case 5:
+                        time_in_ms = ((file_size.st_size)*160)/365;
+                        break;
+                        // Mode 6: 19.85 kbit/sec -> 397 bits per 20 ms frame.
+                    case 6:
+                        time_in_ms = ((file_size.st_size)*160)/397;
+                        break;
+                        // Mode 7: 23.05 kbit/sec -> 461 bits per 20 ms frame.
+                    case 7:
+                        time_in_ms = ((file_size.st_size)*160)/461;
+                        break;
+                        // Mode 8: 23.85 kbit/sec -> 477 bits per 20 ms frame.
+                    case 8:
+                        time_in_ms = ((file_size.st_size)*160)/477;
+                        break;
+                    default:
+                        delete inStreamObj;
+                        return -1;
+                }
+            }
+#endif
+#ifdef WEBRTC_CODEC_ILBC
+            if(!strcmp("#!iLBC20\n", buf))
+            {
+                // 20 ms is 304 bits
+                time_in_ms = ((file_size.st_size)*160)/304;
+                break;
+            }
+            if(!strcmp("#!iLBC30\n", buf))
+            {
+                // 30 ms takes 400 bits.
+                // file size in bytes * 8 / 400 is the number of
+                // 30 ms frames in the file ->
+                // time_in_ms = file size * 8 / 400 * 30
+                time_in_ms = ((file_size.st_size)*240)/400;
+                break;
+            }
+#endif
+        }
+        case kFileFormatPreencodedFile:
+        {
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "cannot determine duration of Pre-Encoded file!");
+            break;
+        }
+        default:
+            WEBRTC_TRACE(kTraceError, kTraceFile, _id,
+                         "unsupported file format %d!", fileFormat);
+            break;
+    }
+    inStreamObj->CloseFile();
+    delete inStreamObj;
+    return time_in_ms;
+}
+
+WebRtc_UWord32 ModuleFileUtility::PlayoutPositionMs()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceFile, _id,
+               "ModuleFileUtility::PlayoutPosition()");
+
+    if(_reading)
+    {
+        return _playoutPositionMs;
+    }
+    else
+    {
+        return 0;
+    }
+}
+} // namespace webrtc
diff --git a/src/modules/media_file/source/media_file_utility.h b/src/modules/media_file/source/media_file_utility.h
new file mode 100644
index 0000000..d3eaef1
--- /dev/null
+++ b/src/modules/media_file/source/media_file_utility.h
@@ -0,0 +1,349 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Note: the class cannot be used for reading and writing at the same time.
+#ifndef WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
+#define WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
+
+#include <stdio.h>
+
+#include "common_types.h"
+#include "media_file_defines.h"
+
+namespace webrtc {
+class AviFile;
+class InStream;
+class OutStream;
+
+class ModuleFileUtility
+{
+public:
+
+    ModuleFileUtility(const WebRtc_Word32 id);
+    ~ModuleFileUtility();
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    // Open the file specified by fileName for reading (relative path is
+    // allowed). If loop is true the file will be played until StopPlaying() is
+    // called. When end of file is reached the file is read from the start.
+    // Only video will be read if videoOnly is true.
+    WebRtc_Word32 InitAviReading(const char* fileName, bool videoOnly,
+                                 bool loop);
+
+    // Put 10-60ms of audio data from file into the outBuffer depending on
+    // codec frame size. bufferLengthInBytes indicates the size of outBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be coverted to
+    // mono).
+    WebRtc_Word32 ReadAviAudioData(WebRtc_Word8* outBuffer,
+                                   const WebRtc_UWord32 bufferLengthInBytes);
+
+    // Put one video frame into outBuffer. bufferLengthInBytes indicates the
+    // size of outBuffer.
+    // The return value is the number of bytes written to videoBuffer.
+    WebRtc_Word32 ReadAviVideoData(WebRtc_Word8* videoBuffer,
+                                   const WebRtc_UWord32 bufferLengthInBytes);
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). codecInst specifies the encoding of the audio
+    // data. videoCodecInst specifies the encoding of the video data. Only video
+    // data will be recorded if videoOnly is true.
+    WebRtc_Word32 InitAviWriting(const char* filename,
+                                 const CodecInst& codecInst,
+                                 const VideoCodec& videoCodecInst,
+                                 const bool videoOnly);
+
+    // Write one audio frame, i.e. the bufferLengthinBytes first bytes of
+    // audioBuffer, to file. The audio frame size is determined by the
+    // codecInst.pacsize parameter of the last sucessfull
+    // InitAviWriting(..) call.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WriteAviAudioData(const WebRtc_Word8* audioBuffer,
+                                    WebRtc_UWord32 bufferLengthInBytes);
+
+
+    // Write one video frame, i.e. the bufferLength first bytes of videoBuffer,
+    // to file.
+    // Note: videoBuffer can contain encoded data. The codec used must be the
+    // same as what was specified by videoCodecInst for the last successfull
+    // InitAviWriting(..) call. The videoBuffer must contain exactly
+    // one video frame.
+    WebRtc_Word32 WriteAviVideoData(const WebRtc_Word8* videoBuffer,
+                                    WebRtc_UWord32 bufferLengthInBytes);
+
+    // Stop recording to file or stream.
+    WebRtc_Word32 CloseAviFile();
+
+    WebRtc_Word32 VideoCodecInst(VideoCodec& codecInst);
+#endif // #ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    WebRtc_Word32 InitWavReading(InStream& stream,
+                                 const WebRtc_UWord32 startPointMs = 0,
+                                 const WebRtc_UWord32 stopPointMs = 0);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    // Note: This API only play mono audio but can be used on file containing
+    // audio with more channels (in which case the audio will be converted to
+    // mono).
+    WebRtc_Word32 ReadWavDataAsMono(InStream& stream, WebRtc_Word8* audioBuffer,
+                                    const WebRtc_UWord32 dataLengthInBytes);
+
+    // Put 10-60ms, depending on codec frame size, of audio data from file into
+    // audioBufferLeft and audioBufferRight. The buffers contain the left and
+    // right channel of played out stereo audio.
+    // dataLengthInBytes  indicates the size of both audioBufferLeft and
+    // audioBufferRight.
+    // The return value is the number of bytes read for each buffer.
+    // Note: This API can only be successfully called for WAV files with stereo
+    // audio.
+    WebRtc_Word32 ReadWavDataAsStereo(InStream& wav,
+                                      WebRtc_Word8* audioBufferLeft,
+                                      WebRtc_Word8* audioBufferRight,
+                                      const WebRtc_UWord32 bufferLength);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    // Note: codecInst.channels should be set to 2 for stereo (and 1 for
+    // mono). Stereo is only supported for WAV files.
+    WebRtc_Word32 InitWavWriting(OutStream& stream, const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull StartRecordingAudioFile(..) call.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 WriteWavData(OutStream& stream,
+                               const WebRtc_Word8* audioBuffer,
+                               const WebRtc_UWord32 bufferLength);
+
+    // Finalizes the WAV header so that it is correct if nothing more will be
+    // written to stream.
+    // Note: this API must be called before closing stream to ensure that the
+    //       WAVE header is updated with the file size. Don't call this API
+    //       if more samples are to be written to stream.
+    WebRtc_Word32 UpdateWavHeader(OutStream& stream);
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    // freqInHz is the PCM sampling frequency.
+    // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+    WebRtc_Word32 InitPCMReading(InStream& stream,
+                                 const WebRtc_UWord32 startPointMs = 0,
+                                 const WebRtc_UWord32 stopPointMs = 0,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadPCMData(InStream& stream, WebRtc_Word8* audioBuffer,
+                              const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // freqInHz is the PCM sampling frequency.
+    // NOTE, allowed frequencies are 8000, 16000 and 32000 (Hz)
+    WebRtc_Word32 InitPCMWriting(OutStream& stream,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Write one 10ms audio frame, i.e. the bufferLength first bytes of
+    // audioBuffer, to file. The audio frame size is determined by the freqInHz
+    // parameter of the last sucessfull InitPCMWriting(..) call.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 WritePCMData(OutStream& stream,
+                               const WebRtc_Word8* audioBuffer,
+                               WebRtc_UWord32 bufferLength);
+
+    // Prepare for playing audio from stream.
+    // startPointMs and stopPointMs, unless zero, specify what part of the file
+    // should be read. From startPointMs ms to stopPointMs ms.
+    WebRtc_Word32 InitCompressedReading(InStream& stream,
+                                        const WebRtc_UWord32 startPointMs = 0,
+                                        const WebRtc_UWord32 stopPointMs = 0);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadCompressedData(InStream& stream,
+                                     WebRtc_Word8* audioBuffer,
+                                     const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitCompressedWriting(OutStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to file. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull InitCompressedWriting(..) call.
+    // The return value is the number of bytes written to stream.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WriteCompressedData(OutStream& stream,
+                                      const WebRtc_Word8* audioBuffer,
+                                      const WebRtc_UWord32 bufferLength);
+
+    // Prepare for playing audio from stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitPreEncodedReading(InStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Put 10-60ms of audio data from stream into the audioBuffer depending on
+    // codec frame size. dataLengthInBytes indicates the size of audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadPreEncodedData(InStream& stream,
+                                     WebRtc_Word8* audioBuffer,
+                                     const WebRtc_UWord32 dataLengthInBytes);
+
+    // Prepare for recording audio to stream.
+    // codecInst specifies the encoding of the audio data.
+    WebRtc_Word32 InitPreEncodedWriting(OutStream& stream,
+                                        const CodecInst& codecInst);
+
+    // Write one audio frame, i.e. the bufferLength first bytes of audioBuffer,
+    // to stream. The audio frame size is determined by the codecInst.pacsize
+    // parameter of the last sucessfull InitPreEncodedWriting(..) call.
+   // The return value is the number of bytes written to stream.
+    // Note: bufferLength must be exactly one frame.
+    WebRtc_Word32 WritePreEncodedData(OutStream& stream,
+                                      const WebRtc_Word8* inData,
+                                      const WebRtc_UWord32 dataLengthInBytes);
+
+    // Set durationMs to the size of the file (in ms) specified by fileName.
+    // freqInHz specifies the sampling frequency of the file.
+    WebRtc_Word32 FileDurationMs(const char* fileName,
+                                 const FileFormats fileFormat,
+                                 const WebRtc_UWord32 freqInHz = 16000);
+
+    // Return the number of ms that have been played so far.
+    WebRtc_UWord32 PlayoutPositionMs();
+
+    // Update codecInst according to the current audio codec being used for
+    // reading or writing.
+    WebRtc_Word32 codec_info(CodecInst& codecInst);
+
+private:
+    // Biggest WAV frame supported is 10 ms at 48kHz of 2 channel, 16 bit audio.
+    enum{WAV_MAX_BUFFER_SIZE = 480*2*2};
+
+
+    WebRtc_Word32 InitWavCodec(WebRtc_UWord32 samplesPerSec,
+                               WebRtc_UWord32 channels,
+                               WebRtc_UWord32 bitsPerSample,
+                               WebRtc_UWord32 formatTag);
+
+    // Parse the WAV header in stream.
+    WebRtc_Word32 ReadWavHeader(InStream& stream);
+
+    // Update the WAV header. freqInHz, bytesPerSample, channels, format,
+    // lengthInBytes specify characterists of the audio data.
+    // freqInHz is the sampling frequency. bytesPerSample is the sample size in
+    // bytes. channels is the number of channels, e.g. 1 is mono and 2 is
+    // stereo. format is the encode format (e.g. PCMU, PCMA, PCM etc).
+    // lengthInBytes is the number of bytes the audio samples are using up.
+    WebRtc_Word32 WriteWavHeader(OutStream& stream,
+                                 const WebRtc_UWord32 freqInHz,
+                                 const WebRtc_UWord32 bytesPerSample,
+                                 const WebRtc_UWord32 channels,
+                                 const WebRtc_UWord32 format,
+                                 const WebRtc_UWord32 lengthInBytes);
+
+    // Put dataLengthInBytes of audio data from stream into the audioBuffer.
+    // The return value is the number of bytes written to audioBuffer.
+    WebRtc_Word32 ReadWavData(InStream& stream, WebRtc_UWord8* audioBuffer,
+                              const WebRtc_UWord32 dataLengthInBytes);
+
+    // Update the current audio codec being used for reading or writing
+    // according to codecInst.
+    WebRtc_Word32 set_codec_info(const CodecInst& codecInst);
+
+    struct WAVE_FMTINFO_header
+    {
+        WebRtc_Word16 formatTag;
+        WebRtc_Word16 nChannels;
+        WebRtc_Word32 nSamplesPerSec;
+        WebRtc_Word32 nAvgBytesPerSec;
+        WebRtc_Word16 nBlockAlign;
+        WebRtc_Word16 nBitsPerSample;
+    };
+    // Identifiers for preencoded files.
+    enum MediaFileUtility_CodecType
+    {
+        kCodecNoCodec  = 0,
+        kCodecIsac,
+        kCodecIsacSwb,
+        kCodecIsacLc,
+        kCodecL16_8Khz,
+        kCodecL16_16kHz,
+        kCodecL16_32Khz,
+        kCodecPcmu,
+        kCodecPcma,
+        kCodecIlbc20Ms,
+        kCodecIlbc30Ms,
+        kCodecG722,
+        kCodecG722_1_32Kbps,
+        kCodecG722_1_24Kbps,
+        kCodecG722_1_16Kbps,
+        kCodecG722_1c_48,
+        kCodecG722_1c_32,
+        kCodecG722_1c_24,
+        kCodecAmr,
+        kCodecAmrWb,
+        kCodecG729,
+        kCodecG729_1,
+        kCodecG726_40,
+        kCodecG726_32,
+        kCodecG726_24,
+        kCodecG726_16,
+        kCodecSpeex8Khz,
+        kCodecSpeex16Khz
+    };
+
+    // TODO (hellner): why store multiple formats. Just store either codec_info_
+    //                 or _wavFormatObj and supply conversion functions.
+    WAVE_FMTINFO_header _wavFormatObj;
+    WebRtc_Word32 _dataSize;      // Chunk size if reading a WAV file
+    // Number of bytes to read. I.e. frame size in bytes. May be multiple
+    // chunks if reading WAV.
+    WebRtc_Word32 _readSizeBytes;
+
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32 _stopPointInMs;
+    WebRtc_UWord32 _startPointInMs;
+    WebRtc_UWord32 _playoutPositionMs;
+    WebRtc_UWord32 _bytesWritten;
+
+    CodecInst codec_info_;
+    MediaFileUtility_CodecType _codecId;
+
+    // The amount of bytes, on average, used for one audio sample.
+    WebRtc_Word32  _bytesPerSample;
+    WebRtc_Word32  _readPos;
+
+    // Only reading or writing can be enabled, not both.
+    bool _reading;
+    bool _writing;
+
+    // Scratch buffer used for turning stereo audio to mono.
+    WebRtc_UWord8 _tempData[WAV_MAX_BUFFER_SIZE];
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    AviFile* _aviAudioInFile;
+    AviFile* _aviVideoInFile;
+    AviFile* _aviOutFile;
+    VideoCodec _videoCodec;
+#endif
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_MEDIA_FILE_SOURCE_MEDIA_FILE_UTILITY_H_
diff --git a/src/modules/modules.gyp b/src/modules/modules.gyp
new file mode 100644
index 0000000..3209cf7
--- /dev/null
+++ b/src/modules/modules.gyp
@@ -0,0 +1,58 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'audio_coding/codecs/cng/cng.gypi',
+    'audio_coding/codecs/g711/g711.gypi',
+    'audio_coding/codecs/g722/g722.gypi',
+    'audio_coding/codecs/ilbc/ilbc.gypi',
+    'audio_coding/codecs/isac/main/source/isac.gypi',
+    'audio_coding/codecs/isac/fix/source/isacfix.gypi',
+    'audio_coding/codecs/pcm16b/pcm16b.gypi',
+    'audio_coding/main/source/audio_coding_module.gypi',
+    'audio_coding/neteq/neteq.gypi',
+    'audio_conference_mixer/source/audio_conference_mixer.gypi',
+    'audio_device/main/source/audio_device.gypi',
+    'audio_processing/audio_processing.gypi',
+    'audio_processing/aec/aec.gypi',
+    'audio_processing/aecm/aecm.gypi',
+    'audio_processing/agc/agc.gypi',
+    'audio_processing/ns/ns.gypi',
+    'audio_processing/utility/util.gypi',
+    'bitrate_controller/bitrate_controller.gypi',
+    'media_file/source/media_file.gypi',
+    'remote_bitrate_estimator/remote_bitrate_estimator.gypi',
+    'udp_transport/source/udp_transport.gypi',
+    'utility/source/utility.gypi',
+    'video_coding/codecs/i420/main/source/i420.gypi',
+    'video_coding/main/source/video_coding.gypi',
+    'video_capture/main/source/video_capture.gypi',
+    'video_processing/main/source/video_processing.gypi',
+    'video_render/main/source/video_render.gypi',
+    'rtp_rtcp/source/rtp_rtcp.gypi',
+  ],
+
+  'conditions': [
+    ['include_tests==1', {
+      'includes': [
+        'audio_coding/codecs/isac/isac_test.gypi',
+        'audio_coding/codecs/isac/isacfix_test.gypi',
+        'audio_processing/apm_tests.gypi',
+        'rtp_rtcp/source/rtp_rtcp_tests.gypi',
+        'rtp_rtcp/test/testFec/test_fec.gypi',
+        'rtp_rtcp/test/testAPI/test_api.gypi',
+        'video_coding/main/source/video_coding_test.gypi',
+        'video_coding/codecs/test/video_codecs_test_framework.gypi',
+        'video_coding/codecs/tools/video_codecs_tools.gypi',
+        'video_processing/main/test/vpm_tests.gypi',
+      ], # includes
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/modules/remote_bitrate_estimator/OWNERS b/src/modules/remote_bitrate_estimator/OWNERS
new file mode 100644
index 0000000..b705ede
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/OWNERS
@@ -0,0 +1,5 @@
+pwestin@webrtc.org
+stefan@webrtc.org
+henrik.lundin@webrtc.org
+mflodman@webrtc.org
+asapersson@webrtc.org
\ No newline at end of file
diff --git a/src/modules/remote_bitrate_estimator/bitrate_estimator.cc b/src/modules/remote_bitrate_estimator/bitrate_estimator.cc
new file mode 100644
index 0000000..84c287c
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/bitrate_estimator.cc
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bitrate_estimator.h"
+
+namespace webrtc {
+
+enum { kBitrateAverageWindow = 2000 };
+
+BitRateStats::BitRateStats()
+    :_dataSamples(), _accumulatedBytes(0)
+{
+}
+
+BitRateStats::~BitRateStats()
+{
+    while (_dataSamples.size() > 0)
+    {
+        delete _dataSamples.front();
+        _dataSamples.pop_front();
+    }
+}
+
+void BitRateStats::Init()
+{
+    _accumulatedBytes = 0;
+    while (_dataSamples.size() > 0)
+    {
+        delete _dataSamples.front();
+        _dataSamples.pop_front();
+    }
+}
+
+void BitRateStats::Update(WebRtc_UWord32 packetSizeBytes, WebRtc_Word64 nowMs)
+{
+    // Find an empty slot for storing the new sample and at the same time
+    // accumulate the history.
+    _dataSamples.push_back(new DataTimeSizeTuple(packetSizeBytes, nowMs));
+    _accumulatedBytes += packetSizeBytes;
+    EraseOld(nowMs);
+}
+
+void BitRateStats::EraseOld(WebRtc_Word64 nowMs)
+{
+    while (_dataSamples.size() > 0)
+    {
+        if (nowMs - _dataSamples.front()->_timeCompleteMs >
+            kBitrateAverageWindow)
+        {
+            // Delete old sample
+            _accumulatedBytes -= _dataSamples.front()->_sizeBytes;
+            delete _dataSamples.front();
+            _dataSamples.pop_front();
+        }
+        else
+        {
+            break;
+        }
+    }
+}
+
+WebRtc_UWord32 BitRateStats::BitRate(WebRtc_Word64 nowMs)
+{
+    // Calculate the average bit rate the past BITRATE_AVERAGE_WINDOW ms.
+    // Removes any old samples from the list.
+    EraseOld(nowMs);
+    WebRtc_Word64 timeOldest = nowMs;
+    if (_dataSamples.size() > 0)
+    {
+        timeOldest = _dataSamples.front()->_timeCompleteMs;
+    }
+    // Update average bit rate
+    float denom = static_cast<float>(nowMs - timeOldest);
+    if (nowMs == timeOldest)
+    {
+        // Calculate with a one second window when we haven't
+        // received more than one packet.
+        denom = 1000.0;
+    }
+    return static_cast<WebRtc_UWord32>(_accumulatedBytes * 8.0f * 1000.0f /
+                                       denom + 0.5f);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/remote_bitrate_estimator/bitrate_estimator.h b/src/modules/remote_bitrate_estimator/bitrate_estimator.h
new file mode 100644
index 0000000..a3622a7
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/bitrate_estimator.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_
+
+#include <list>
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class BitRateStats
+{
+public:
+    BitRateStats();
+    ~BitRateStats();
+
+    void Init();
+    void Update(WebRtc_UWord32 packetSizeBytes, WebRtc_Word64 nowMs);
+    WebRtc_UWord32 BitRate(WebRtc_Word64 nowMs);
+
+private:
+    struct DataTimeSizeTuple
+    {
+        DataTimeSizeTuple(uint32_t sizeBytes, int64_t timeCompleteMs)
+            :
+              _sizeBytes(sizeBytes),
+              _timeCompleteMs(timeCompleteMs) {}
+
+        WebRtc_UWord32    _sizeBytes;
+        WebRtc_Word64     _timeCompleteMs;
+    };
+
+    void EraseOld(WebRtc_Word64 nowMs);
+
+    std::list<DataTimeSizeTuple*> _dataSamples;
+    WebRtc_UWord32 _accumulatedBytes;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_BITRATE_ESTIMATOR_H_
diff --git a/src/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc b/src/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc
new file mode 100644
index 0000000..b42798a
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/bitrate_estimator_unittest.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the bitrate estimator.
+ */
+
+#include <gtest/gtest.h>
+
+#include "typedefs.h"
+#include "bitrate_estimator.h"
+
+namespace {
+
+using webrtc::BitRateStats;
+
+class BitRateStatsTest : public ::testing::Test
+{
+protected:
+    BitRateStatsTest() {};
+    BitRateStats bitRate;
+};
+
+TEST_F(BitRateStatsTest, TestStrictMode)
+{
+    WebRtc_Word64 nowMs = 0;
+    // Should be initialized to 0.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+    bitRate.Update(1500, nowMs);
+    // Expecting 12 kbps given a 1000 window with one 1500 bytes packet.
+    EXPECT_EQ(12000u, bitRate.BitRate(nowMs));
+    bitRate.Init();
+    // Expecting 0 after init.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+    for (int i = 0; i < 100000; ++i)
+    {
+        if (nowMs % 10 == 0)
+            bitRate.Update(1500, nowMs);
+        // Approximately 1200 kbps expected. Not exact since when packets
+        // are removed we will jump 10 ms to the next packet.
+        if (nowMs > 0 && nowMs % 2000 == 0)
+            EXPECT_NEAR(1200000u, bitRate.BitRate(nowMs), 6000u);
+        nowMs += 1;
+    }
+    nowMs += 2000;
+    // The window is 2 seconds. If nothing has been received for that time
+    // the estimate should be 0.
+    EXPECT_EQ(0u, bitRate.BitRate(nowMs));
+}
+
+}
diff --git a/src/modules/remote_bitrate_estimator/include/bwe_defines.h b/src/modules/remote_bitrate_estimator/include/bwe_defines.h
new file mode 100644
index 0000000..173a284
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/include/bwe_defines.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
+
+#include "typedefs.h"
+
+#define BWE_MAX(a,b) ((a)>(b)?(a):(b))
+#define BWE_MIN(a,b) ((a)<(b)?(a):(b))
+
+namespace webrtc {
+enum BandwidthUsage
+{
+    kBwNormal,
+    kBwOverusing,
+    kBwUnderUsing
+};
+
+enum RateControlState
+{
+    kRcHold,
+    kRcIncrease,
+    kRcDecrease
+};
+
+enum RateControlRegion
+{
+    kRcNearMax,
+    kRcAboveMax,
+    kRcMaxUnknown
+};
+
+class RateControlInput
+{
+public:
+    RateControlInput(BandwidthUsage bwState,
+                     WebRtc_UWord32 incomingBitRate,
+                     double noiseVar)
+        : _bwState(bwState),
+          _incomingBitRate(incomingBitRate),
+          _noiseVar(noiseVar) {}
+
+    BandwidthUsage  _bwState;
+    WebRtc_UWord32      _incomingBitRate;
+    double              _noiseVar;
+};
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BWE_DEFINES_H_
diff --git a/src/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h b/src/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h
new file mode 100644
index 0000000..76a9583
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
+
+#include <gmock/gmock.h>
+
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+
+namespace webrtc {
+
+class MockRemoteBitrateObserver : public RemoteBitrateObserver {
+ public:
+  MOCK_METHOD2(OnReceiveBitrateChanged,
+      void(unsigned int ssrc, unsigned int bitrate));
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_MOCK_MOCK_REMOTE_BITRATE_ESTIMATOR_H_
diff --git a/src/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h b/src/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
new file mode 100644
index 0000000..d5678e4
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// RemoteBitrateEstimator
+// This class estimates the incoming bitrate capacity.
+
+#ifndef WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_REMOTE_BITRATE_ESTIMATOR_H_
+#define WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_REMOTE_BITRATE_ESTIMATOR_H_
+
+#include <map>
+
+#include "modules/remote_bitrate_estimator/bitrate_estimator.h"
+#include "modules/remote_bitrate_estimator/overuse_detector.h"
+#include "modules/remote_bitrate_estimator/remote_rate_control.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// RemoteBitrateObserver is used to signal changes in bitrate estimates for
+// the incoming stream.
+class RemoteBitrateObserver {
+ public:
+  // Called when a receive channel has a new bitrate estimate for the incoming
+  // stream.
+  virtual void OnReceiveBitrateChanged(unsigned int ssrc,
+                                       unsigned int bitrate) = 0;
+
+  virtual ~RemoteBitrateObserver() {}
+};
+
+class RemoteBitrateEstimator {
+ public:
+  RemoteBitrateEstimator(RemoteBitrateObserver* observer,
+                         const OverUseDetectorOptions& options);
+
+  // Called for each incoming packet. If this is a new SSRC, a new
+  // BitrateControl will be created.
+  void IncomingPacket(unsigned int ssrc,
+                      int packet_size,
+                      int64_t arrival_time,
+                      uint32_t rtp_timestamp,
+                      int64_t packet_send_time);
+
+  // Triggers a new estimate calculation for the stream identified by |ssrc|.
+  void UpdateEstimate(unsigned int ssrc, int64_t time_now);
+
+  // Set the current round-trip time experienced by the stream identified by
+  // |ssrc|.
+  void SetRtt(unsigned int ssrc);
+
+  // Removes all data for |ssrc|.
+  void RemoveStream(unsigned int ssrc);
+
+  // Returns true if a valid estimate exists for a stream identified by |ssrc|
+  // and sets |bitrate_bps| to the estimated bitrate in bits per second.
+  bool LatestEstimate(unsigned int ssrc, unsigned int* bitrate_bps) const;
+
+ private:
+  struct BitrateControls {
+    explicit BitrateControls(const OverUseDetectorOptions& options)
+        : remote_rate(),
+          overuse_detector(options),
+          incoming_bitrate() {
+    }
+    BitrateControls(const BitrateControls& other)
+        : remote_rate(other.remote_rate),
+          overuse_detector(other.overuse_detector),
+          incoming_bitrate(other.incoming_bitrate) {
+    }
+    RemoteRateControl remote_rate;
+    OverUseDetector overuse_detector;
+    BitRateStats incoming_bitrate;
+  };
+
+  typedef std::map<unsigned int, BitrateControls> SsrcBitrateControlsMap;
+
+  const OverUseDetectorOptions& options_;
+  SsrcBitrateControlsMap bitrate_controls_;
+  RemoteBitrateObserver* observer_;
+  scoped_ptr<CriticalSectionWrapper> crit_sect_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_REMOTE_BITRATE_ESTIMATOR_INCLUDE_REMOTE_BITRATE_ESTIMATOR_H_
diff --git a/src/modules/remote_bitrate_estimator/overuse_detector.cc b/src/modules/remote_bitrate_estimator/overuse_detector.cc
new file mode 100644
index 0000000..1c5bf9d
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/overuse_detector.cc
@@ -0,0 +1,405 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdlib.h>  // fabsf
+#if _WIN32
+#include <windows.h>
+#endif
+
+#include "modules/remote_bitrate_estimator/overuse_detector.h"
+#include "modules/remote_bitrate_estimator/remote_rate_control.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "system_wrappers/interface/trace.h"
+
+#ifdef WEBRTC_BWE_MATLAB
+extern MatlabEngine eng;  // global variable defined elsewhere
+#endif
+
+#define OVER_USING_TIME_THRESHOLD 100
+#define MIN_FRAME_PERIOD_HISTORY_LEN 60
+
+namespace webrtc {
+OverUseDetector::OverUseDetector(const OverUseDetectorOptions& options)
+    : options_(options),
+      current_frame_(),
+      prev_frame_(),
+      num_of_deltas_(0),
+      slope_(options_.initial_slope),
+      offset_(options_.initial_offset),
+      E_(),
+      process_noise_(),
+      avg_noise_(options_.initial_avg_noise),
+      var_noise_(options_.initial_var_noise),
+      threshold_(options_.initial_threshold),
+      ts_delta_hist_(),
+      prev_offset_(0.0),
+      time_over_using_(-1),
+      over_use_counter_(0),
+      hypothesis_(kBwNormal)
+#ifdef WEBRTC_BWE_MATLAB
+      , plots_()
+#endif
+      {
+  memcpy(E_, options_.initial_e, sizeof(E_));
+  memcpy(process_noise_, options_.initial_process_noise,
+         sizeof(process_noise_));
+}
+
+OverUseDetector::~OverUseDetector() {
+#ifdef WEBRTC_BWE_MATLAB
+  if (plots_.plot1_) {
+    eng.DeletePlot(plots_.plot1_);
+    plots_.plot1_ = NULL;
+  }
+  if (plots_.plot2_) {
+    eng.DeletePlot(plots_.plot2_);
+    plots_.plot2_ = NULL;
+  }
+  if (plots_.plot3_) {
+    eng.DeletePlot(plots_.plot3_);
+    plots_.plot3_ = NULL;
+  }
+  if (plots_.plot4_) {
+    eng.DeletePlot(plots_.plot4_);
+    plots_.plot4_ = NULL;
+  }
+#endif
+
+  ts_delta_hist_.clear();
+}
+
+void OverUseDetector::Update(uint16_t packet_size,
+                             uint32_t timestamp,
+                             const int64_t now_ms) {
+#ifdef WEBRTC_BWE_MATLAB
+  // Create plots
+  const int64_t startTimeMs = nowMS;
+  if (plots_.plot1_ == NULL) {
+    plots_.plot1_ = eng.NewPlot(new MatlabPlot());
+    plots_.plot1_->AddLine(1000, "b.", "scatter");
+  }
+  if (plots_.plot2_ == NULL) {
+    plots_.plot2_ = eng.NewPlot(new MatlabPlot());
+    plots_.plot2_->AddTimeLine(30, "b", "offset", startTimeMs);
+    plots_.plot2_->AddTimeLine(30, "r--", "limitPos", startTimeMs);
+    plots_.plot2_->AddTimeLine(30, "k.", "trigger", startTimeMs);
+    plots_.plot2_->AddTimeLine(30, "ko", "detection", startTimeMs);
+    //  plots_.plot2_->AddTimeLine(30, "g", "slowMean", startTimeMs);
+  }
+  if (plots_.plot3_ == NULL) {
+    plots_.plot3_ = eng.NewPlot(new MatlabPlot());
+    plots_.plot3_->AddTimeLine(30, "b", "noiseVar", startTimeMs);
+  }
+  if (plots_.plot4_ == NULL) {
+    plots_.plot4_ = eng.NewPlot(new MatlabPlot());
+    //  plots_.plot4_->AddTimeLine(60, "b", "p11", startTimeMs);
+    //  plots_.plot4_->AddTimeLine(60, "r", "p12", startTimeMs);
+    plots_.plot4_->AddTimeLine(60, "g", "p22", startTimeMs);
+    //  plots_.plot4_->AddTimeLine(60, "g--", "p22_hat", startTimeMs);
+    //  plots_.plot4_->AddTimeLine(30, "b.-", "deltaFs", startTimeMs);
+  }
+
+#endif
+
+  bool wrapped = false;
+  if (current_frame_.timestamp_ == -1) {
+    current_frame_.timestamp_ = timestamp;
+  } else if (OldTimestamp(
+      timestamp,
+      static_cast<uint32_t>(current_frame_.timestamp_),
+      &wrapped)) {
+    // Don't update with old data
+    return;
+  } else if (timestamp != current_frame_.timestamp_) {
+    // First packet of a later frame, the previous frame sample is ready
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                 "Frame complete at %I64i", current_frame_.completeTimeMs_);
+    if (prev_frame_.completeTimeMs_ >= 0) {  // This is our second frame
+      int64_t t_delta = 0;
+      double ts_delta = 0;
+      // Check for wrap
+      OldTimestamp(
+          static_cast<uint32_t>(prev_frame_.timestamp_),
+          static_cast<uint32_t>(current_frame_.timestamp_),
+          &wrapped);
+      CompensatedTimeDelta(current_frame_, prev_frame_, t_delta, ts_delta,
+                           wrapped);
+      UpdateKalman(t_delta, ts_delta, current_frame_.size_,
+                   prev_frame_.size_);
+    }
+    // The new timestamp is now the current frame,
+    // and the old timestamp becomes the previous frame.
+    prev_frame_ = current_frame_;
+    current_frame_.timestamp_ = timestamp;
+    current_frame_.size_ = 0;
+    current_frame_.completeTimeMs_ = -1;
+  }
+  // Accumulate the frame size
+  current_frame_.size_ += packet_size;
+  current_frame_.completeTimeMs_ = now_ms;
+}
+
+BandwidthUsage OverUseDetector::State() const {
+  return hypothesis_;
+}
+
+double OverUseDetector::NoiseVar() const {
+  return var_noise_;
+}
+
+void OverUseDetector::SetRateControlRegion(RateControlRegion region) {
+  switch (region) {
+    case kRcMaxUnknown: {
+      threshold_ = options_.initial_threshold;
+      break;
+    }
+    case kRcAboveMax:
+    case kRcNearMax: {
+      threshold_ = options_.initial_threshold / 2;
+      break;
+    }
+  }
+}
+
+void OverUseDetector::CompensatedTimeDelta(const FrameSample& currentFrame,
+                                           const FrameSample& prevFrame,
+                                           int64_t& t_delta,
+                                           double& ts_delta,
+                                           bool wrapped) {
+  num_of_deltas_++;
+  if (num_of_deltas_ > 1000) {
+    num_of_deltas_ = 1000;
+  }
+  // Add wrap-around compensation
+  int64_t wrapCompensation = 0;
+  if (wrapped) {
+    wrapCompensation = static_cast<int64_t>(1)<<32;
+  }
+  ts_delta = (currentFrame.timestamp_
+             + wrapCompensation
+             - prevFrame.timestamp_) / 90.0;
+  t_delta = currentFrame.completeTimeMs_ - prevFrame.completeTimeMs_;
+  assert(ts_delta > 0);
+}
+
+double OverUseDetector::CurrentDrift() {
+  return 1.0;
+}
+
+void OverUseDetector::UpdateKalman(int64_t t_delta,
+                                   double ts_delta,
+                                   uint32_t frame_size,
+                                   uint32_t prev_frame_size) {
+  const double minFramePeriod = UpdateMinFramePeriod(ts_delta);
+  const double drift = CurrentDrift();
+  // Compensate for drift
+  const double tTsDelta = t_delta - ts_delta / drift;
+  double fsDelta = static_cast<double>(frame_size) - prev_frame_size;
+
+  // Update the Kalman filter
+  const double scaleFactor =  minFramePeriod / (1000.0 / 30.0);
+  E_[0][0] += process_noise_[0] * scaleFactor;
+  E_[1][1] += process_noise_[1] * scaleFactor;
+
+  if ((hypothesis_ == kBwOverusing && offset_ < prev_offset_) ||
+      (hypothesis_ == kBwUnderUsing && offset_ > prev_offset_)) {
+    E_[1][1] += 10 * process_noise_[1] * scaleFactor;
+  }
+
+  const double h[2] = {fsDelta, 1.0};
+  const double Eh[2] = {E_[0][0]*h[0] + E_[0][1]*h[1],
+                        E_[1][0]*h[0] + E_[1][1]*h[1]};
+
+  const double residual = tTsDelta - slope_*h[0] - offset_;
+
+  const bool stable_state =
+      (BWE_MIN(num_of_deltas_, 60) * fabsf(offset_) < threshold_);
+  // We try to filter out very late frames. For instance periodic key
+  // frames doesn't fit the Gaussian model well.
+  if (fabsf(residual) < 3 * sqrt(var_noise_)) {
+    UpdateNoiseEstimate(residual, minFramePeriod, stable_state);
+  } else {
+    UpdateNoiseEstimate(3 * sqrt(var_noise_), minFramePeriod, stable_state);
+  }
+
+  const double denom = var_noise_ + h[0]*Eh[0] + h[1]*Eh[1];
+
+  const double K[2] = {Eh[0] / denom,
+                       Eh[1] / denom};
+
+  const double IKh[2][2] = {{1.0 - K[0]*h[0], -K[0]*h[1]},
+                            {-K[1]*h[0], 1.0 - K[1]*h[1]}};
+  const double e00 = E_[0][0];
+  const double e01 = E_[0][1];
+
+  // Update state
+  E_[0][0] = e00 * IKh[0][0] + E_[1][0] * IKh[0][1];
+  E_[0][1] = e01 * IKh[0][0] + E_[1][1] * IKh[0][1];
+  E_[1][0] = e00 * IKh[1][0] + E_[1][0] * IKh[1][1];
+  E_[1][1] = e01 * IKh[1][0] + E_[1][1] * IKh[1][1];
+
+  // Covariance matrix, must be positive semi-definite
+  assert(E_[0][0] + E_[1][1] >= 0 &&
+         E_[0][0] * E_[1][1] - E_[0][1] * E_[1][0] >= 0 &&
+         E_[0][0] >= 0);
+
+#ifdef WEBRTC_BWE_MATLAB
+  // plots_.plot4_->Append("p11",E_[0][0]);
+  // plots_.plot4_->Append("p12",E_[0][1]);
+  plots_.plot4_->Append("p22", E_[1][1]);
+  // plots_.plot4_->Append("p22_hat", 0.5*(process_noise_[1] +
+  //    sqrt(process_noise_[1]*(process_noise_[1] + 4*var_noise_))));
+  // plots_.plot4_->Append("deltaFs", fsDelta);
+  plots_.plot4_->Plot();
+#endif
+  slope_ = slope_ + K[0] * residual;
+  prev_offset_ = offset_;
+  offset_ = offset_ + K[1] * residual;
+
+  Detect(ts_delta);
+
+#ifdef WEBRTC_BWE_MATLAB
+  plots_.plot1_->Append("scatter",
+                 static_cast<double>(current_frame_.size_) - prev_frame_.size_,
+                 static_cast<double>(t_delta - ts_delta));
+  plots_.plot1_->MakeTrend("scatter", "slope", slope_, offset_, "k-");
+  plots_.plot1_->MakeTrend("scatter", "thresholdPos",
+                    slope_, offset_ + 2 * sqrt(var_noise_), "r-");
+  plots_.plot1_->MakeTrend("scatter", "thresholdNeg",
+                    slope_, offset_ - 2 * sqrt(var_noise_), "r-");
+  plots_.plot1_->Plot();
+
+  plots_.plot2_->Append("offset", offset_);
+  plots_.plot2_->Append("limitPos", threshold_/BWE_MIN(num_of_deltas_, 60));
+  plots_.plot2_->Plot();
+
+  plots_.plot3_->Append("noiseVar", var_noise_);
+  plots_.plot3_->Plot();
+#endif
+}
+
+double OverUseDetector::UpdateMinFramePeriod(double ts_delta) {
+  double minFramePeriod = ts_delta;
+  if (ts_delta_hist_.size() >= MIN_FRAME_PERIOD_HISTORY_LEN) {
+    std::list<double>::iterator firstItem = ts_delta_hist_.begin();
+    ts_delta_hist_.erase(firstItem);
+  }
+  std::list<double>::iterator it = ts_delta_hist_.begin();
+  for (; it != ts_delta_hist_.end(); it++) {
+    minFramePeriod = BWE_MIN(*it, minFramePeriod);
+  }
+  ts_delta_hist_.push_back(ts_delta);
+  return minFramePeriod;
+}
+
+void OverUseDetector::UpdateNoiseEstimate(double residual,
+                                          double ts_delta,
+                                          bool stable_state) {
+  if (!stable_state) {
+    return;
+  }
+  // Faster filter during startup to faster adapt to the jitter level
+  // of the network alpha is tuned for 30 frames per second, but
+  double alpha = 0.01;
+  if (num_of_deltas_ > 10*30) {
+    alpha = 0.002;
+  }
+  // Only update the noise estimate if we're not over-using
+  // beta is a function of alpha and the time delta since
+  // the previous update.
+  const double beta = pow(1 - alpha, ts_delta * 30.0 / 1000.0);
+  avg_noise_ = beta * avg_noise_
+              + (1 - beta) * residual;
+  var_noise_ = beta * var_noise_
+              + (1 - beta) * (avg_noise_ - residual) * (avg_noise_ - residual);
+  if (var_noise_ < 1e-7) {
+    var_noise_ = 1e-7;
+  }
+}
+
+BandwidthUsage OverUseDetector::Detect(double ts_delta) {
+  if (num_of_deltas_ < 2) {
+    return kBwNormal;
+  }
+  const double T = BWE_MIN(num_of_deltas_, 60) * offset_;
+  if (fabsf(T) > threshold_) {
+    if (offset_ > 0) {
+      if (time_over_using_ == -1) {
+        // Initialize the timer. Assume that we've been
+        // over-using half of the time since the previous
+        // sample.
+        time_over_using_ = ts_delta / 2;
+      } else {
+        // Increment timer
+        time_over_using_ += ts_delta;
+      }
+      over_use_counter_++;
+      if (time_over_using_ > OVER_USING_TIME_THRESHOLD
+          && over_use_counter_ > 1) {
+        if (offset_ >= prev_offset_) {
+#ifdef _DEBUG
+          if (hypothesis_ != kBwOverusing) {
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwOverusing");
+          }
+#endif
+          time_over_using_ = 0;
+          over_use_counter_ = 0;
+          hypothesis_ = kBwOverusing;
+#ifdef WEBRTC_BWE_MATLAB
+          plots_.plot2_->Append("detection", offset_);  // plot it later
+#endif
+        }
+      }
+#ifdef WEBRTC_BWE_MATLAB
+      plots_.plot2_->Append("trigger", offset_);  // plot it later
+#endif
+    } else {
+#ifdef _DEBUG
+      if (hypothesis_ != kBwUnderUsing) {
+        WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwUnderUsing");
+      }
+#endif
+      time_over_using_ = -1;
+      over_use_counter_ = 0;
+      hypothesis_ = kBwUnderUsing;
+    }
+  } else {
+#ifdef _DEBUG
+    if (hypothesis_ != kBwNormal) {
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: kBwNormal");
+    }
+#endif
+    time_over_using_ = -1;
+    over_use_counter_ = 0;
+    hypothesis_ = kBwNormal;
+  }
+  return hypothesis_;
+}
+
+bool OverUseDetector::OldTimestamp(uint32_t new_timestamp,
+                                   uint32_t existing_timestamp,
+                                   bool* wrapped) {
+  bool tmpWrapped =
+      (new_timestamp < 0x0000ffff && existing_timestamp > 0xffff0000) ||
+      (new_timestamp > 0xffff0000 && existing_timestamp < 0x0000ffff);
+  *wrapped = tmpWrapped;
+  if (existing_timestamp > new_timestamp && !tmpWrapped) {
+    return true;
+  } else if (existing_timestamp <= new_timestamp && !tmpWrapped) {
+    return false;
+  } else if (existing_timestamp < new_timestamp && tmpWrapped) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/remote_bitrate_estimator/overuse_detector.h b/src/modules/remote_bitrate_estimator/overuse_detector.h
new file mode 100644
index 0000000..3fd3933
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/overuse_detector.h
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
+
+#include <list>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+#ifdef WEBRTC_BWE_MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+#endif
+
+namespace webrtc {
+enum RateControlRegion;
+
+class OverUseDetector {
+ public:
+  explicit OverUseDetector(const OverUseDetectorOptions& options);
+  ~OverUseDetector();
+  void Update(const WebRtc_UWord16 packet_size,
+              const uint32_t timestamp,
+              const int64_t now_ms);
+  BandwidthUsage State() const;
+  double NoiseVar() const;
+  void SetRateControlRegion(RateControlRegion region);
+
+ private:
+  struct FrameSample {
+    FrameSample() : size_(0), completeTimeMs_(-1), timestamp_(-1) {}
+
+    uint32_t size_;
+    int64_t  completeTimeMs_;
+    int64_t  timestamp_;
+  };
+
+  struct DebugPlots {
+#ifdef WEBRTC_BWE_MATLAB
+    DebugPlots() : plot1(NULL), plot2(NULL), plot3(NULL), plot4(NULL) {}
+    MatlabPlot* plot1;
+    MatlabPlot* plot2;
+    MatlabPlot* plot3;
+    MatlabPlot* plot4;
+#endif
+  };
+
+  static bool OldTimestamp(uint32_t new_timestamp,
+                           uint32_t existing_timestamp,
+                           bool* wrapped);
+
+  void CompensatedTimeDelta(const FrameSample& current_frame,
+                            const FrameSample& prev_frame,
+                            int64_t& t_delta,
+                            double& ts_delta,
+                            bool wrapped);
+  void UpdateKalman(int64_t t_delta,
+                    double ts_elta,
+                    uint32_t frame_size,
+                    uint32_t prev_frame_size);
+  double UpdateMinFramePeriod(double ts_delta);
+  void UpdateNoiseEstimate(double residual, double ts_delta, bool stable_state);
+  BandwidthUsage Detect(double ts_delta);
+  double CurrentDrift();
+
+  OverUseDetectorOptions options_;  // Must be first member
+                                    // variable. Cannot be const
+                                    // because we need to be copyable.
+  FrameSample current_frame_;
+  FrameSample prev_frame_;
+  uint16_t num_of_deltas_;
+  double slope_;
+  double offset_;
+  double E_[2][2];
+  double process_noise_[2];
+  double avg_noise_;
+  double var_noise_;
+  double threshold_;
+  std::list<double> ts_delta_hist_;
+  double prev_offset_;
+  double time_over_using_;
+  uint16_t over_use_counter_;
+  BandwidthUsage hypothesis_;
+#ifdef WEBRTC_BWE_MATLAB
+  DebugPlots plots_;
+#endif
+};
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_OVERUSE_DETECTOR_H_
diff --git a/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.cc b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.cc
new file mode 100644
index 0000000..70713d5
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.cc
@@ -0,0 +1,103 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+
+#include "system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+RemoteBitrateEstimator::RemoteBitrateEstimator(
+    RemoteBitrateObserver* observer,
+    const OverUseDetectorOptions& options)
+    : options_(options),
+      observer_(observer),
+      crit_sect_(CriticalSectionWrapper::CreateCriticalSection()) {
+  assert(observer_);
+}
+
+void RemoteBitrateEstimator::IncomingPacket(unsigned int ssrc,
+                                            int packet_size,
+                                            int64_t arrival_time,
+                                            uint32_t rtp_timestamp,
+                                            int64_t packet_send_time) {
+  CriticalSectionScoped cs(crit_sect_.get());
+  SsrcBitrateControlsMap::iterator it = bitrate_controls_.find(ssrc);
+  if (it == bitrate_controls_.end()) {
+    // This is a new SSRC. Adding to map.
+    // TODO(holmer): If the channel changes SSRC the old SSRC will still be
+    // around in this map until the channel is deleted. This is OK since the
+    // callback will no longer be called for the old SSRC. This will be
+    // automatically cleaned up when we have one RemoteBitrateEstimator per REMB
+    // group.
+    bitrate_controls_.insert(std::make_pair(ssrc, BitrateControls(options_)));
+    it = bitrate_controls_.find(ssrc);
+  }
+  OverUseDetector* overuse_detector = &it->second.overuse_detector;
+  it->second.incoming_bitrate.Update(packet_size, arrival_time);
+  const BandwidthUsage prior_state = overuse_detector->State();
+  overuse_detector->Update(packet_size, rtp_timestamp, arrival_time);
+  if (prior_state != overuse_detector->State() &&
+      overuse_detector->State() == kBwOverusing) {
+    // The first overuse should immediately trigger a new estimate.
+    UpdateEstimate(ssrc, arrival_time);
+  }
+}
+
+void RemoteBitrateEstimator::UpdateEstimate(unsigned int ssrc,
+                                            int64_t time_now) {
+  CriticalSectionScoped cs(crit_sect_.get());
+  SsrcBitrateControlsMap::iterator it = bitrate_controls_.find(ssrc);
+  if (it == bitrate_controls_.end()) {
+    return;
+  }
+  OverUseDetector* overuse_detector = &it->second.overuse_detector;
+  RemoteRateControl* remote_rate = &it->second.remote_rate;
+  const RateControlInput input(overuse_detector->State(),
+                               it->second.incoming_bitrate.BitRate(time_now),
+                               overuse_detector->NoiseVar());
+  const RateControlRegion region = remote_rate->Update(&input, time_now);
+  unsigned int target_bitrate = remote_rate->UpdateBandwidthEstimate(time_now);
+  if (remote_rate->ValidEstimate()) {
+    observer_->OnReceiveBitrateChanged(ssrc, target_bitrate);
+  }
+  overuse_detector->SetRateControlRegion(region);
+}
+
+void RemoteBitrateEstimator::SetRtt(unsigned int rtt) {
+  CriticalSectionScoped cs(crit_sect_.get());
+  for (SsrcBitrateControlsMap::iterator it = bitrate_controls_.begin();
+      it != bitrate_controls_.end(); ++it) {
+    it->second.remote_rate.SetRtt(rtt);
+  }
+}
+
+void RemoteBitrateEstimator::RemoveStream(unsigned int ssrc) {
+  CriticalSectionScoped cs(crit_sect_.get());
+  // Ignoring the return value which is the number of elements erased.
+  bitrate_controls_.erase(ssrc);
+}
+
+bool RemoteBitrateEstimator::LatestEstimate(unsigned int ssrc,
+                                            unsigned int* bitrate_bps) const {
+  CriticalSectionScoped cs(crit_sect_.get());
+  assert(bitrate_bps != NULL);
+  SsrcBitrateControlsMap::const_iterator it = bitrate_controls_.find(ssrc);
+  if (it == bitrate_controls_.end()) {
+    return false;
+  }
+  if (!it->second.remote_rate.ValidEstimate()) {
+    return false;
+  }
+  *bitrate_bps = it->second.remote_rate.LatestEstimate();
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
new file mode 100644
index 0000000..21ec7a9
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'remote_bitrate_estimator',
+      'type': '<(library)',
+      'dependencies': [
+        # system_wrappers
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        'include',
+        '../rtp_rtcp/interface',
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        # interface
+        'include/bwe_defines.h',
+        'include/remote_bitrate_estimator.h',
+
+        # source
+        'bitrate_estimator.cc',
+        'bitrate_estimator.h',
+        'overuse_detector.cc',
+        'overuse_detector.h',
+        'remote_bitrate_estimator.cc',
+        'remote_rate_control.cc',
+        'remote_rate_control.h',
+      ], # source
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'remote_bitrate_estimator_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'remote_bitrate_estimator',
+            '<(DEPTH)/testing/gmock.gyp:gmock',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'include/mock/mock_remote_bitrate_observer.h',
+            'bitrate_estimator_unittest.cc',
+            'remote_bitrate_estimator_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # build_with_chromium
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest.cc b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest.cc
new file mode 100644
index 0000000..2f16ab8
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/remote_bitrate_estimator_unittest.cc
@@ -0,0 +1,322 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for RemoteBitrateEstimator.
+
+#include <gtest/gtest.h>
+#include <list>
+
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+enum { kMtu = 1200 };
+
+class TestBitrateObserver : public RemoteBitrateObserver {
+ public:
+  TestBitrateObserver() : updated_(false), latest_bitrate_(0) {}
+
+  void OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate) {
+    latest_bitrate_ = bitrate;
+    updated_ = true;
+  }
+
+  void Reset() {
+    updated_ = false;
+  }
+
+  bool updated() const {
+    return updated_;
+  }
+
+  unsigned int latest_bitrate() const {
+    return latest_bitrate_;
+  }
+
+ private:
+  bool updated_;
+  unsigned int latest_bitrate_;
+};
+
+class StreamGenerator {
+ public:
+  struct Packet {
+    int64_t send_time;
+    int64_t arrival_time;
+    uint32_t rtp_timestamp;
+    unsigned int size;
+  };
+
+  typedef std::list<Packet*> PacketList;
+
+  StreamGenerator(int fps, int bitrate_bps, int capacity, int64_t time_now)
+      : fps_(fps),
+        bitrate_bps_(bitrate_bps),
+        capacity_(capacity),
+        time_now_(time_now),
+        prev_arrival_time_(time_now),
+        rtp_timestamp_offset_(0xFFFFF000) {}
+
+  void SetCapacity(int capacity_bps) {
+    ASSERT_GT(capacity_bps, 0);
+    capacity_ = capacity_bps;
+  }
+
+  void SetBitrate(int bitrate_bps) {
+    ASSERT_GE(bitrate_bps, 0);
+    bitrate_bps_ = bitrate_bps;
+  }
+
+  void SetRtpTimestampOffset(uint32_t offset) {
+    rtp_timestamp_offset_ = offset;
+  }
+
+  void GenerateFrame(PacketList* packets) {
+    ASSERT_FALSE(packets == NULL);
+    ASSERT_TRUE(packets->empty());
+    ASSERT_GT(fps_, 0);
+    int bits_per_frame = bitrate_bps_ / fps_;
+    int n_packets = std::max(bits_per_frame / (8 * kMtu), 1);
+    int packet_size = bits_per_frame / (8 * n_packets);
+    ASSERT_GE(n_packets, 0);
+    for (int i = 0; i < n_packets; ++i) {
+      Packet* packet = new Packet;
+      packet->send_time = time_now_ + kSendSideOffsetMs;
+      ASSERT_GT(capacity_, 0);
+      packet->arrival_time = std::max(
+          prev_arrival_time_ + 8 * 1000 * packet_size / capacity_,
+          time_now_);
+      packet->size = packet_size;
+      packet->rtp_timestamp = rtp_timestamp_offset_ + 90 * packet->send_time;
+      prev_arrival_time_ = packet->arrival_time;
+      packets->push_back(packet);
+    }
+    time_now_ = time_now_ + 1000 / fps_;
+  }
+
+  int64_t TimeNow() const {
+    return time_now_;
+  }
+
+ private:
+  enum { kSendSideOffsetMs = 1000 };
+
+  int fps_;
+  int bitrate_bps_;
+  int capacity_;
+  int64_t time_now_;
+  int64_t prev_arrival_time_;
+  uint32_t rtp_timestamp_offset_;
+};
+
+class RemoteBitrateEstimatorTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    bitrate_observer_.reset(new TestBitrateObserver);
+    bitrate_estimator_.reset(new RemoteBitrateEstimator(
+        bitrate_observer_.get(), over_use_detector_options_));
+    // Framerate: 30 fps; Start bitrate: 300 kbps; Link capacity: 1000 kbps,
+    // Start time: 0.
+    stream_generator_.reset(new StreamGenerator(30, 3e5, 1e6, 0));
+  }
+
+  // Generates a frame of packets belonging to a stream at a given bitrate and
+  // with a given ssrc. The stream is pushed through a very simple simulated
+  // network, and is then given to the receive-side bandwidth estimator.
+  // Returns true if an over-use was seen, false otherwise.
+  // The StreamGenerator::updated() should be used to check for any changes in
+  // target bitrate after the call to this function.
+  bool GenerateAndProcessFrame(unsigned int ssrc, unsigned int bitrate_bps) {
+    stream_generator_->SetBitrate(bitrate_bps);
+    StreamGenerator::PacketList packets;
+    stream_generator_->GenerateFrame(&packets);
+    int64_t last_arrival_time = -1;
+    bool prev_was_decrease = false;
+    bool overuse = false;
+    while (!packets.empty()) {
+      StreamGenerator::Packet* packet = packets.front();
+      bitrate_estimator_->IncomingPacket(ssrc,
+                                         packet->size,
+                                         packet->arrival_time,
+                                         packet->rtp_timestamp,
+                                         -1);
+      if (bitrate_observer_->updated()) {
+        // Verify that new estimates only are triggered by an overuse and a
+        // rate decrease.
+        overuse = true;
+        EXPECT_LE(bitrate_observer_->latest_bitrate(), bitrate_bps);
+        EXPECT_FALSE(prev_was_decrease);
+        prev_was_decrease = true;
+      } else {
+        prev_was_decrease = false;
+      }
+      bitrate_observer_->Reset();
+      last_arrival_time = packet->arrival_time;
+      delete packet;
+      packets.pop_front();
+    }
+    EXPECT_GT(last_arrival_time, -1);
+    bitrate_estimator_->UpdateEstimate(ssrc, last_arrival_time);
+    return overuse;
+  }
+
+  // Run the bandwidth estimator with a stream of |number_of_frames| frames.
+  // Can for instance be used to run the estimator for some time to get it
+  // into a steady state.
+  unsigned int SteadyStateRun(unsigned int ssrc,
+                              int number_of_frames,
+                              unsigned int start_bitrate,
+                              unsigned int min_bitrate,
+                              unsigned int max_bitrate) {
+    unsigned int bitrate_bps = start_bitrate;
+    bool bitrate_update_seen = false;
+    // Produce |number_of_frames| frames and give them to the estimator.
+    for (int i = 0; i < number_of_frames; ++i) {
+      bool overuse = GenerateAndProcessFrame(ssrc, bitrate_bps);
+      if (overuse) {
+        EXPECT_LT(bitrate_observer_->latest_bitrate(), max_bitrate);
+        EXPECT_GT(bitrate_observer_->latest_bitrate(), min_bitrate);
+        bitrate_bps = bitrate_observer_->latest_bitrate();
+        bitrate_update_seen = true;
+      } else if (bitrate_observer_->updated()) {
+        bitrate_bps = bitrate_observer_->latest_bitrate();
+        bitrate_observer_->Reset();
+      }
+    }
+    EXPECT_TRUE(bitrate_update_seen);
+    return bitrate_bps;
+  }
+
+  OverUseDetectorOptions over_use_detector_options_;
+  scoped_ptr<RemoteBitrateEstimator> bitrate_estimator_;
+  scoped_ptr<TestBitrateObserver> bitrate_observer_;
+  scoped_ptr<StreamGenerator> stream_generator_;
+};
+
+TEST_F(RemoteBitrateEstimatorTest, TestInitialBehavior) {
+  unsigned int bitrate_bps = 0;
+  unsigned int ssrc = 0;
+  int64_t time_now = 0;
+  uint32_t timestamp = 0;
+  EXPECT_FALSE(bitrate_estimator_->LatestEstimate(ssrc, &bitrate_bps));
+  bitrate_estimator_->UpdateEstimate(ssrc, time_now);
+  EXPECT_FALSE(bitrate_estimator_->LatestEstimate(ssrc, &bitrate_bps));
+  EXPECT_FALSE(bitrate_observer_->updated());
+  bitrate_observer_->Reset();
+  // Inserting a packet. Still no valid estimate. We need to wait 1 second.
+  bitrate_estimator_->IncomingPacket(ssrc, kMtu, time_now,
+                                     timestamp, -1);
+  bitrate_estimator_->UpdateEstimate(ssrc, time_now);
+  EXPECT_FALSE(bitrate_estimator_->LatestEstimate(ssrc, &bitrate_bps));
+  EXPECT_FALSE(bitrate_observer_->updated());
+  bitrate_observer_->Reset();
+  // Waiting more than one second gives us a valid estimate.
+  time_now += 1001;
+  bitrate_estimator_->UpdateEstimate(ssrc, time_now);
+  EXPECT_TRUE(bitrate_estimator_->LatestEstimate(ssrc, &bitrate_bps));
+  EXPECT_EQ(bitrate_bps, 10734u);
+  EXPECT_TRUE(bitrate_observer_->updated());
+  bitrate_observer_->Reset();
+  EXPECT_EQ(bitrate_observer_->latest_bitrate(), bitrate_bps);
+}
+
+// Make sure we initially increase the bitrate as expected.
+TEST_F(RemoteBitrateEstimatorTest, TestRateIncreaseRtpTimestamps) {
+  const int kExpectedIterations = 323;
+  unsigned int bitrate_bps = 30000;
+  unsigned int ssrc = 0;
+  int iterations = 0;
+  // Feed the estimator with a stream of packets and verify that it reaches
+  // 500 kbps at the expected time.
+  while (bitrate_bps < 5e5) {
+    bool overuse = GenerateAndProcessFrame(ssrc, bitrate_bps);
+    if (overuse) {
+      EXPECT_GT(bitrate_observer_->latest_bitrate(), bitrate_bps);
+      bitrate_bps = bitrate_observer_->latest_bitrate();
+      bitrate_observer_->Reset();
+    } else if (bitrate_observer_->updated()) {
+      bitrate_bps = bitrate_observer_->latest_bitrate();
+      bitrate_observer_->Reset();
+    }
+    ++iterations;
+    ASSERT_LE(iterations, kExpectedIterations);
+  }
+  ASSERT_EQ(iterations, kExpectedIterations);
+}
+
+// Verify that the time it takes for the estimator to reduce the bitrate when
+// the capacity is tightened stays the same.
+TEST_F(RemoteBitrateEstimatorTest, TestCapacityDropRtpTimestamps) {
+  const unsigned int kSsrc = 0;
+  const int kNumberOfFrames= 300;
+  const int kStartBitrate = 900e3;
+  const int kMinExpectedBitrate = 800e3;
+  const int kMaxExpectedBitrate = 1500e3;
+  // Run in steady state to make the estimator converge.
+  stream_generator_->SetCapacity(1000e3);
+  unsigned int bitrate_bps = SteadyStateRun(kSsrc, kNumberOfFrames,
+                                            kStartBitrate, kMinExpectedBitrate,
+                                            kMaxExpectedBitrate);
+  // Reduce the capacity and verify the decrease time.
+  stream_generator_->SetCapacity(500e3);
+  int64_t bitrate_drop_time = 0;
+  for (int i = 0; i < 1000; ++i) {
+    GenerateAndProcessFrame(kSsrc, bitrate_bps);
+    // Check for either increase or decrease.
+    if (bitrate_observer_->updated()) {
+      if (bitrate_observer_->latest_bitrate() <= 500e3) {
+        bitrate_drop_time = stream_generator_->TimeNow();
+      }
+      bitrate_bps = bitrate_observer_->latest_bitrate();
+      bitrate_observer_->Reset();
+    }
+  }
+  EXPECT_EQ(42900, bitrate_drop_time);
+}
+
+// Verify that the time it takes for the estimator to reduce the bitrate when
+// the capacity is tightened stays the same. This test also verifies that we
+// handle wrap-arounds in this scenario.
+TEST_F(RemoteBitrateEstimatorTest, TestCapacityDropRtpTimestampsWrap) {
+  const unsigned int kSsrc = 0;
+  const int kFramerate= 30;
+  const int kStartBitrate = 900e3;
+  const int kMinExpectedBitrate = 800e3;
+  const int kMaxExpectedBitrate = 1500e3;
+  const int kSteadyStateTime = 10;  // Seconds.
+  // Trigger wrap right after the steady state run.
+  stream_generator_->SetRtpTimestampOffset(
+      std::numeric_limits<uint32_t>::max() - kSteadyStateTime * 90000);
+  // Run in steady state to make the estimator converge.
+  unsigned int bitrate_bps = SteadyStateRun(kSsrc,
+                                            kSteadyStateTime * kFramerate,
+                                            kStartBitrate,
+                                            kMinExpectedBitrate,
+                                            kMaxExpectedBitrate);
+  // Reduce the capacity and verify the decrease time.
+  stream_generator_->SetCapacity(500e3);
+  int64_t bitrate_drop_time = 0;
+  for (int i = 0; i < 1000; ++i) {
+    GenerateAndProcessFrame(kSsrc, bitrate_bps);
+    // Check for either increase or decrease.
+    if (bitrate_observer_->updated()) {
+      if (bitrate_observer_->latest_bitrate() <= 500e3) {
+        bitrate_drop_time = stream_generator_->TimeNow();
+      }
+      bitrate_bps = bitrate_observer_->latest_bitrate();
+      bitrate_observer_->Reset();
+    }
+  }
+  EXPECT_EQ(42900, bitrate_drop_time);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/remote_bitrate_estimator/remote_rate_control.cc b/src/modules/remote_bitrate_estimator/remote_rate_control.cc
new file mode 100644
index 0000000..cb542a4
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/remote_rate_control.cc
@@ -0,0 +1,489 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/remote_bitrate_estimator/remote_rate_control.h"
+
+#include <assert.h>
+#include <math.h>
+#include <string.h>
+#if _WIN32
+#include <windows.h>
+#endif
+
+#include "system_wrappers/interface/trace.h"
+
+#ifdef MATLAB
+extern MatlabEngine eng; // global variable defined elsewhere
+#endif
+
+namespace webrtc {
+RemoteRateControl::RemoteRateControl()
+:
+_minConfiguredBitRate(30000),
+_maxConfiguredBitRate(30000000),
+_currentBitRate(_maxConfiguredBitRate),
+_maxHoldRate(0),
+_avgMaxBitRate(-1.0f),
+_varMaxBitRate(0.4f),
+_rcState(kRcHold),
+_cameFromState(kRcDecrease),
+_rcRegion(kRcMaxUnknown),
+_lastBitRateChange(-1),
+_currentInput(kBwNormal, 0, 1.0),
+_updated(false),
+_timeFirstIncomingEstimate(-1),
+_initializedBitRate(false),
+_avgChangePeriod(1000.0f),
+_lastChangeMs(-1),
+_beta(0.9f),
+_rtt(0)
+#ifdef MATLAB
+,_plot1(NULL),
+_plot2(NULL)
+#endif
+{
+}
+
+RemoteRateControl::~RemoteRateControl()
+{
+#ifdef MATLAB
+    eng.DeletePlot(_plot1);
+    eng.DeletePlot(_plot2);
+#endif
+}
+
+void RemoteRateControl::Reset()
+{
+    _minConfiguredBitRate = 30000;
+    _maxConfiguredBitRate = 30000000;
+    _currentBitRate = _maxConfiguredBitRate;
+    _maxHoldRate = 0;
+    _avgMaxBitRate = -1.0f;
+    _varMaxBitRate = 0.4f;
+    _rcState = kRcHold;
+    _cameFromState = kRcHold;
+    _rcRegion = kRcMaxUnknown;
+    _lastBitRateChange = -1;
+    _avgChangePeriod = 1000.0f;
+    _lastChangeMs = -1;
+    _beta = 0.9f;
+    _currentInput._bwState = kBwNormal;
+    _currentInput._incomingBitRate = 0;
+    _currentInput._noiseVar = 1.0;
+    _updated = false;
+    _timeFirstIncomingEstimate = -1;
+    _initializedBitRate = false;
+}
+
+bool RemoteRateControl::ValidEstimate() const {
+  return _initializedBitRate;
+}
+
+WebRtc_Word32 RemoteRateControl::SetConfiguredBitRates(
+    WebRtc_UWord32 minBitRateBps, WebRtc_UWord32 maxBitRateBps)
+{
+    if (minBitRateBps > maxBitRateBps)
+    {
+        return -1;
+    }
+    _minConfiguredBitRate = minBitRateBps;
+    _maxConfiguredBitRate = maxBitRateBps;
+    _currentBitRate = BWE_MIN(BWE_MAX(minBitRateBps, _currentBitRate),
+                              maxBitRateBps);
+    return 0;
+}
+
+WebRtc_UWord32 RemoteRateControl::LatestEstimate() const {
+  return _currentBitRate;
+}
+
+WebRtc_UWord32 RemoteRateControl::UpdateBandwidthEstimate(WebRtc_Word64 nowMS)
+{
+    _currentBitRate = ChangeBitRate(_currentBitRate,
+                                    _currentInput._incomingBitRate,
+                                    _currentInput._noiseVar,
+                                    nowMS);
+    return _currentBitRate;
+}
+
+void RemoteRateControl::SetRtt(unsigned int rtt) {
+  _rtt = rtt;
+}
+
+RateControlRegion RemoteRateControl::Update(const RateControlInput* input,
+                                            WebRtc_Word64 nowMS)
+{
+    assert(input);
+#ifdef MATLAB
+    // Create plots
+    if (_plot1 == NULL)
+    {
+        _plot1 = eng.NewPlot(new MatlabPlot());
+
+        _plot1->AddTimeLine(30, "b", "current");
+        _plot1->AddTimeLine(30, "r-", "avgMax");
+        _plot1->AddTimeLine(30, "r--", "pStdMax");
+        _plot1->AddTimeLine(30, "r--", "nStdMax");
+        _plot1->AddTimeLine(30, "r+", "max");
+        _plot1->AddTimeLine(30, "g", "incoming");
+        _plot1->AddTimeLine(30, "b+", "recovery");
+    }
+    if (_plot2 == NULL)
+    {
+        _plot2 = eng.NewPlot(new MatlabPlot());
+
+        _plot2->AddTimeLine(30, "b", "alpha");
+    }
+#endif
+
+    // Set the initial bit rate value to what we're receiving the first second
+    if (!_initializedBitRate)
+    {
+        if (_timeFirstIncomingEstimate < 0)
+        {
+            if (input->_incomingBitRate > 0)
+            {
+                _timeFirstIncomingEstimate = nowMS;
+            }
+        }
+        else if (nowMS - _timeFirstIncomingEstimate > 1000 &&
+            input->_incomingBitRate > 0)
+        {
+            _currentBitRate = input->_incomingBitRate;
+            _initializedBitRate = true;
+        }
+    }
+
+    if (_updated && _currentInput._bwState == kBwOverusing)
+    {
+        // Only update delay factor and incoming bit rate. We always want to react on an over-use.
+        _currentInput._noiseVar = input->_noiseVar;
+        _currentInput._incomingBitRate = input->_incomingBitRate;
+        return _rcRegion;
+    }
+    _updated = true;
+    _currentInput = *input;
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Incoming rate = %u kbps", input->_incomingBitRate/1000);
+    return _rcRegion;
+}
+
+WebRtc_UWord32 RemoteRateControl::ChangeBitRate(WebRtc_UWord32 currentBitRate,
+                                                WebRtc_UWord32 incomingBitRate,
+                                                double noiseVar,
+                                                WebRtc_Word64 nowMS)
+{
+    if (!_updated)
+    {
+        return _currentBitRate;
+    }
+    _updated = false;
+    UpdateChangePeriod(nowMS);
+    ChangeState(_currentInput, nowMS);
+    // calculated here because it's used in multiple places
+    const float incomingBitRateKbps = incomingBitRate / 1000.0f;
+    // Calculate the max bit rate std dev given the normalized
+    // variance and the current incoming bit rate.
+    const float stdMaxBitRate = sqrt(_varMaxBitRate * _avgMaxBitRate);
+    bool recovery = false;
+    switch (_rcState)
+    {
+    case kRcHold:
+        {
+            _maxHoldRate = BWE_MAX(_maxHoldRate, incomingBitRate);
+            break;
+        }
+    case kRcIncrease:
+        {
+            if (_avgMaxBitRate >= 0)
+            {
+                if (incomingBitRateKbps > _avgMaxBitRate + 3 * stdMaxBitRate)
+                {
+                    ChangeRegion(kRcMaxUnknown);
+                    _avgMaxBitRate = -1.0;
+                }
+                else if (incomingBitRateKbps > _avgMaxBitRate + 2.5 * stdMaxBitRate)
+                {
+                    ChangeRegion(kRcAboveMax);
+                }
+            }
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                         "BWE: Response time: %f + %i + 10*33\n",
+                         _avgChangePeriod, _rtt);
+            const WebRtc_UWord32 responseTime = static_cast<WebRtc_UWord32>(_avgChangePeriod + 0.5f) + _rtt + 300;
+            double alpha = RateIncreaseFactor(nowMS, _lastBitRateChange,
+                                              responseTime, noiseVar);
+
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                "BWE: _avgChangePeriod = %f ms; RTT = %u ms", _avgChangePeriod, _rtt);
+
+            currentBitRate = static_cast<WebRtc_UWord32>(currentBitRate * alpha) + 1000;
+            if (_maxHoldRate > 0 && _beta * _maxHoldRate > currentBitRate)
+            {
+                currentBitRate = static_cast<WebRtc_UWord32>(_beta * _maxHoldRate);
+                _avgMaxBitRate = _beta * _maxHoldRate / 1000.0f;
+                ChangeRegion(kRcNearMax);
+                recovery = true;
+#ifdef MATLAB
+                _plot1->Append("recovery", _maxHoldRate/1000);
+#endif
+            }
+            _maxHoldRate = 0;
+            WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                "BWE: Increase rate to currentBitRate = %u kbps", currentBitRate/1000);
+            _lastBitRateChange = nowMS;
+            break;
+        }
+    case kRcDecrease:
+        {
+            if (incomingBitRate < _minConfiguredBitRate)
+            {
+                currentBitRate = _minConfiguredBitRate;
+            }
+            else
+            {
+                // Set bit rate to something slightly lower than max
+                // to get rid of any self-induced delay.
+                currentBitRate = static_cast<WebRtc_UWord32>(_beta * incomingBitRate + 0.5);
+                if (currentBitRate > _currentBitRate)
+                {
+                    // Avoid increasing the rate when over-using.
+                    if (_rcRegion != kRcMaxUnknown)
+                    {
+                        currentBitRate = static_cast<WebRtc_UWord32>(_beta * _avgMaxBitRate * 1000 + 0.5f);
+                    }
+                    currentBitRate = BWE_MIN(currentBitRate, _currentBitRate);
+                }
+                ChangeRegion(kRcNearMax);
+
+                if (incomingBitRateKbps < _avgMaxBitRate - 3 * stdMaxBitRate)
+                {
+                    _avgMaxBitRate = -1.0f;
+                }
+
+                UpdateMaxBitRateEstimate(incomingBitRateKbps);
+
+#ifdef MATLAB
+                _plot1->Append("max", incomingBitRateKbps);
+#endif
+
+                WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, "BWE: Decrease rate to currentBitRate = %u kbps", currentBitRate/1000);
+            }
+            // Stay on hold until the pipes are cleared.
+            ChangeState(kRcHold);
+            _lastBitRateChange = nowMS;
+            break;
+        }
+    }
+    if (!recovery && (incomingBitRate > 100000 || currentBitRate > 150000) &&
+        currentBitRate > 1.5 * incomingBitRate)
+    {
+        // Allow changing the bit rate if we are operating at very low rates
+        // Don't change the bit rate if the send side is too far off
+        currentBitRate = _currentBitRate;
+        _lastBitRateChange = nowMS;
+    }
+#ifdef MATLAB
+    if (_avgMaxBitRate >= 0.0f)
+    {
+        _plot1->Append("avgMax", _avgMaxBitRate);
+        _plot1->Append("pStdMax", _avgMaxBitRate + 3*stdMaxBitRate);
+        _plot1->Append("nStdMax", _avgMaxBitRate - 3*stdMaxBitRate);
+    }
+    _plot1->Append("incoming", incomingBitRate/1000);
+    _plot1->Append("current", currentBitRate/1000);
+    _plot1->Plot();
+#endif
+    return currentBitRate;
+}
+
+double RemoteRateControl::RateIncreaseFactor(WebRtc_Word64 nowMs, WebRtc_Word64 lastMs, WebRtc_UWord32 reactionTimeMs, double noiseVar) const
+{
+    // alpha = 1.02 + B ./ (1 + exp(b*(tr - (c1*s2 + c2))))
+    // Parameters
+    const double B = 0.0407;
+    const double b = 0.0025;
+    const double c1 = -6700.0 / (33 * 33);
+    const double c2 = 800.0;
+    const double d = 0.85;
+
+    double alpha = 1.005 + B / (1 + exp( b * (d * reactionTimeMs - (c1 * noiseVar + c2))));
+
+    if (alpha < 1.005)
+    {
+        alpha = 1.005;
+    }
+    else if (alpha > 1.3)
+    {
+        alpha = 1.3;
+    }
+
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "BWE: alpha = %f", alpha);
+#ifdef MATLAB
+            _plot2->Append("alpha", alpha);
+            _plot2->Plot();
+#endif
+
+    if (lastMs > -1)
+    {
+        alpha = pow(alpha, (nowMs - lastMs) / 1000.0);
+    }
+
+    if (_rcRegion == kRcNearMax)
+    {
+        // We're close to our previous maximum. Try to stabilize the
+        // bit rate in this region, by increasing in smaller steps.
+        alpha = alpha - (alpha - 1.0) / 2.0;
+    }
+    else if (_rcRegion == kRcMaxUnknown)
+    {
+        alpha = alpha + (alpha - 1.0) * 2.0;
+    }
+
+    return alpha;
+}
+
+void RemoteRateControl::UpdateChangePeriod(WebRtc_Word64 nowMs)
+{
+    WebRtc_Word64 changePeriod = 0;
+    if (_lastChangeMs > -1)
+    {
+        changePeriod = nowMs - _lastChangeMs;
+    }
+    _lastChangeMs = nowMs;
+    _avgChangePeriod = 0.9f * _avgChangePeriod + 0.1f * changePeriod;
+}
+
+void RemoteRateControl::UpdateMaxBitRateEstimate(float incomingBitRateKbps)
+{
+    const float alpha = 0.05f;
+    if (_avgMaxBitRate == -1.0f)
+    {
+        _avgMaxBitRate = incomingBitRateKbps;
+    }
+    else
+    {
+        _avgMaxBitRate = (1 - alpha) * _avgMaxBitRate +
+                            alpha * incomingBitRateKbps;
+    }
+    // Estimate the max bit rate variance and normalize the variance
+    // with the average max bit rate.
+    const float norm = BWE_MAX(_avgMaxBitRate, 1.0f);
+    _varMaxBitRate = (1 - alpha) * _varMaxBitRate +
+               alpha * (_avgMaxBitRate - incomingBitRateKbps) *
+                       (_avgMaxBitRate - incomingBitRateKbps) /
+                       norm;
+    // 0.4 ~= 14 kbit/s at 500 kbit/s
+    if (_varMaxBitRate < 0.4f)
+    {
+        _varMaxBitRate = 0.4f;
+    }
+    // 2.5f ~= 35 kbit/s at 500 kbit/s
+    if (_varMaxBitRate > 2.5f)
+    {
+        _varMaxBitRate = 2.5f;
+    }
+}
+
+void RemoteRateControl::ChangeState(const RateControlInput& input, WebRtc_Word64 nowMs)
+{
+    switch (_currentInput._bwState)
+    {
+    case kBwNormal:
+        {
+            if (_rcState == kRcHold)
+            {
+                _lastBitRateChange = nowMs;
+                ChangeState(kRcIncrease);
+            }
+            break;
+        }
+    case kBwOverusing:
+        {
+            if (_rcState != kRcDecrease)
+            {
+                ChangeState(kRcDecrease);
+            }
+            break;
+        }
+    case kBwUnderUsing:
+        {
+            ChangeState(kRcHold);
+            break;
+        }
+    }
+}
+
+void RemoteRateControl::ChangeRegion(RateControlRegion region)
+{
+    _rcRegion = region;
+    switch (_rcRegion)
+    {
+    case kRcAboveMax:
+    case kRcMaxUnknown:
+        {
+            _beta = 0.9f;
+            break;
+        }
+    case kRcNearMax:
+        {
+            _beta = 0.95f;
+            break;
+        }
+    }
+}
+
+void RemoteRateControl::ChangeState(RateControlState newState)
+{
+    _cameFromState = _rcState;
+    _rcState = newState;
+    char state1[15];
+    char state2[15];
+    char state3[15];
+    StateStr(_cameFromState, state1);
+    StateStr(_rcState, state2);
+    StateStr(_currentInput._bwState, state3);
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                 "\t%s => %s due to %s\n", state1, state2, state3);
+}
+
+void RemoteRateControl::StateStr(RateControlState state, char* str)
+{
+    switch (state)
+    {
+    case kRcDecrease:
+        strncpy(str, "DECREASE", 9);
+        break;
+    case kRcHold:
+        strncpy(str, "HOLD", 5);
+        break;
+    case kRcIncrease:
+        strncpy(str, "INCREASE", 9);
+        break;
+    }
+}
+
+void RemoteRateControl::StateStr(BandwidthUsage state, char* str)
+{
+    switch (state)
+    {
+    case kBwNormal:
+        strncpy(str, "NORMAL", 7);
+        break;
+    case kBwOverusing:
+        strncpy(str, "OVER USING", 11);
+        break;
+    case kBwUnderUsing:
+        strncpy(str, "UNDER USING", 12);
+        break;
+    }
+}
+
+} // namespace webrtc
diff --git a/src/modules/remote_bitrate_estimator/remote_rate_control.h b/src/modules/remote_bitrate_estimator/remote_rate_control.h
new file mode 100644
index 0000000..6c9d116
--- /dev/null
+++ b/src/modules/remote_bitrate_estimator/remote_rate_control.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
+
+#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
+#include "typedefs.h"
+
+#ifdef MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+#endif
+
+namespace webrtc {
+class RemoteRateControl
+{
+public:
+    RemoteRateControl();
+    ~RemoteRateControl();
+    WebRtc_Word32 SetConfiguredBitRates(WebRtc_UWord32 minBitRate,
+                                        WebRtc_UWord32 maxBitRate);
+    WebRtc_UWord32 LatestEstimate() const;
+    WebRtc_UWord32 UpdateBandwidthEstimate(WebRtc_Word64 nowMS);
+    void SetRtt(unsigned int rtt);
+    RateControlRegion Update(const RateControlInput* input,
+                             WebRtc_Word64 nowMS);
+    void Reset();
+
+    // Returns true if there is a valid estimate of the incoming bitrate, false
+    // otherwise.
+    bool ValidEstimate() const;
+
+private:
+    WebRtc_UWord32 ChangeBitRate(WebRtc_UWord32 currentBitRate,
+                                 WebRtc_UWord32 incomingBitRate,
+                                 double delayFactor,
+                                 WebRtc_Word64 nowMS);
+    double RateIncreaseFactor(WebRtc_Word64 nowMs,
+                              WebRtc_Word64 lastMs,
+                              WebRtc_UWord32 reactionTimeMs,
+                              double noiseVar) const;
+    void UpdateChangePeriod(WebRtc_Word64 nowMs);
+    void UpdateMaxBitRateEstimate(float incomingBitRateKbps);
+    void ChangeState(const RateControlInput& input, WebRtc_Word64 nowMs);
+    void ChangeState(RateControlState newState);
+    void ChangeRegion(RateControlRegion region);
+    static void StateStr(RateControlState state, char* str);
+    static void StateStr(BandwidthUsage state, char* str);
+
+    WebRtc_UWord32        _minConfiguredBitRate;
+    WebRtc_UWord32        _maxConfiguredBitRate;
+    WebRtc_UWord32        _currentBitRate;
+    WebRtc_UWord32        _maxHoldRate;
+    float               _avgMaxBitRate;
+    float               _varMaxBitRate;
+    RateControlState    _rcState;
+    RateControlState    _cameFromState;
+    RateControlRegion   _rcRegion;
+    WebRtc_Word64         _lastBitRateChange;
+    RateControlInput    _currentInput;
+    bool                _updated;
+    WebRtc_Word64         _timeFirstIncomingEstimate;
+    bool                _initializedBitRate;
+
+    float               _avgChangePeriod;
+    WebRtc_Word64         _lastChangeMs;
+    float               _beta;
+    unsigned int _rtt;
+#ifdef MATLAB
+    MatlabPlot          *_plot1;
+    MatlabPlot          *_plot2;
+#endif
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_REMOTE_RATE_CONTROL_H_
diff --git a/src/modules/rtp_rtcp/OWNERS b/src/modules/rtp_rtcp/OWNERS
new file mode 100644
index 0000000..c968564
--- /dev/null
+++ b/src/modules/rtp_rtcp/OWNERS
@@ -0,0 +1,5 @@
+pwestin@webrtc.org

+stefan@webrtc.org

+henrik.lundin@webrtc.org

+mflodman@webrtc.org

+asapersson@webrtc.org
\ No newline at end of file
diff --git a/src/modules/rtp_rtcp/interface/rtp_rtcp.h b/src/modules/rtp_rtcp/interface/rtp_rtcp.h
new file mode 100644
index 0000000..ab3a4bb
--- /dev/null
+++ b/src/modules/rtp_rtcp/interface/rtp_rtcp.h
@@ -0,0 +1,940 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
+
+#include <vector>
+
+#include "modules/interface/module.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+// forward declaration
+class RemoteBitrateEstimator;
+class RemoteBitrateObserver;
+class Transport;
+
+class RtpRtcp : public Module {
+ public:
+  struct Configuration {
+    Configuration()
+        : id(-1),
+          audio(false),
+          clock(NULL),
+          default_module(NULL),
+          incoming_data(NULL),
+          incoming_messages(NULL),
+          outgoing_transport(NULL),
+          rtcp_feedback(NULL),
+          intra_frame_callback(NULL),
+          bandwidth_callback(NULL),
+          audio_messages(NULL),
+          remote_bitrate_estimator(NULL) {
+    }
+   /*  id                   - Unique identifier of this RTP/RTCP module object
+    *  audio                - True for a audio version of the RTP/RTCP module
+    *                         object false will create a video version
+    *  clock                - The clock to use to read time. If NULL object
+    *                         will be using the system clock.
+    *  incoming_data        - Callback object that will receive the incoming
+    *                         data
+    *  incoming_messages    - Callback object that will receive the incoming
+    *                         RTP messages.
+    *  outgoing_transport   - Transport object that will be called when packets
+    *                         are ready to be sent out on the network
+    *  rtcp_feedback        - Callback object that will receive the incoming
+    *                         RTP messages.
+    *  intra_frame_callback - Called when the receiver request a intra frame.
+    *  bandwidth_callback   - Called when we receive a changed estimate from
+    *                         the receiver of out stream.
+    *  audio_messages       - Telehone events.
+    *  remote_bitrate_estimator - Estimates the bandwidth available for a set of
+    *                             streams from the same client.
+    */
+    int32_t id;
+    bool audio;
+    RtpRtcpClock* clock;
+    RtpRtcp* default_module;
+    RtpData* incoming_data;
+    RtpFeedback* incoming_messages;
+    Transport* outgoing_transport;
+    RtcpFeedback* rtcp_feedback;
+    RtcpIntraFrameObserver* intra_frame_callback;
+    RtcpBandwidthObserver* bandwidth_callback;
+    RtpAudioFeedback* audio_messages;
+    RemoteBitrateEstimator* remote_bitrate_estimator;
+  };
+  /*
+   *   Create a RTP/RTCP module object using the system clock.
+   *
+   *   configuration  - Configuration of the RTP/RTCP module.
+   */
+  static RtpRtcp* CreateRtpRtcp(const RtpRtcp::Configuration& configuration);
+
+  /**************************************************************************
+   *
+   *   Receiver functions
+   *
+   ***************************************************************************/
+
+    /*
+    *   configure a RTP packet timeout value
+    *
+    *   RTPtimeoutMS   - time in milliseconds after last received RTP packet
+    *   RTCPtimeoutMS  - time in milliseconds after last received RTCP packet
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetPacketTimeout(
+        const WebRtc_UWord32 RTPtimeoutMS,
+        const WebRtc_UWord32 RTCPtimeoutMS) = 0;
+
+    /*
+    *   Set periodic dead or alive notification
+    *
+    *   enable              - turn periodic dead or alive notification on/off
+    *   sampleTimeSeconds   - sample interval in seconds for dead or alive
+    *                         notifications
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
+        const bool enable,
+        const WebRtc_UWord8 sampleTimeSeconds) = 0;
+
+    /*
+    *   Get periodic dead or alive notification status
+    *
+    *   enable              - periodic dead or alive notification on/off
+    *   sampleTimeSeconds   - sample interval in seconds for dead or alive
+    *                         notifications
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(
+        bool& enable,
+        WebRtc_UWord8& sampleTimeSeconds) = 0;
+
+    /*
+    *   set voice codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceivePayload(
+        const CodecInst& voiceCodec) = 0;
+
+    /*
+    *   set video codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceivePayload(
+        const VideoCodec& videoCodec) = 0;
+
+    /*
+    *   get payload type for a voice codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ReceivePayloadType(
+        const CodecInst& voiceCodec,
+        WebRtc_Word8* plType) = 0;
+
+    /*
+    *   get payload type for a video codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ReceivePayloadType(
+        const VideoCodec& videoCodec,
+        WebRtc_Word8* plType) = 0;
+
+    /*
+    *   Remove a registered payload type from list of accepted payloads
+    *
+    *   payloadType - payload type of codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DeRegisterReceivePayload(
+        const WebRtc_Word8 payloadType) = 0;
+
+   /*
+    *   (De)register RTP header extension type and id.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id) = 0;
+
+    virtual WebRtc_Word32 DeregisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type) = 0;
+
+    /*
+    *   Get last received remote timestamp
+    */
+    virtual WebRtc_UWord32 RemoteTimestamp() const = 0;
+
+    /*
+    *   Get the current estimated remote timestamp
+    *
+    *   timestamp   - estimated timestamp
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 EstimatedRemoteTimeStamp(
+        WebRtc_UWord32& timestamp) const = 0;
+
+    /*
+    *   Get incoming SSRC
+    */
+    virtual WebRtc_UWord32 RemoteSSRC() const = 0;
+
+    /*
+    *   Get remote CSRC
+    *
+    *   arrOfCSRC   - array that will receive the CSRCs
+    *
+    *   return -1 on failure else the number of valid entries in the list
+    */
+    virtual WebRtc_Word32 RemoteCSRCs(
+        WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const  = 0;
+
+    /*
+    *   get the currently configured SSRC filter
+    *
+    *   allowedSSRC - SSRC that will be allowed through
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const = 0;
+
+    /*
+    *   set a SSRC to be used as a filter for incoming RTP streams
+    *
+    *   allowedSSRC - SSRC that will be allowed through
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSSRCFilter(const bool enable,
+                                        const WebRtc_UWord32 allowedSSRC) = 0;
+
+    /*
+    * Turn on/off receiving RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 SetRTXReceiveStatus(const bool enable,
+                                              const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    * Get status of receiving RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 RTXReceiveStatus(bool* enable,
+                                           WebRtc_UWord32* SSRC) const = 0;
+
+    /*
+    *   called by the network module when we receive a packet
+    *
+    *   incomingPacket - incoming packet buffer
+    *   packetLength   - length of incoming buffer
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPacket,
+                                         const WebRtc_UWord16 packetLength) = 0;
+
+    /**************************************************************************
+    *
+    *   Sender
+    *
+    ***************************************************************************/
+
+    /*
+    *   set MTU
+    *
+    *   size    -  Max transfer unit in bytes, default is 1500
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetMaxTransferUnit(const WebRtc_UWord16 size) = 0;
+
+    /*
+    *   set transtport overhead
+    *   default is IPv4 and UDP with no encryption
+    *
+    *   TCP                     - true for TCP false UDP
+    *   IPv6                    - true for IP version 6 false for version 4
+    *   authenticationOverhead  - number of bytes to leave for an
+    *                             authentication header
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTransportOverhead(
+        const bool TCP,
+        const bool IPV6,
+        const WebRtc_UWord8 authenticationOverhead = 0) = 0;
+
+    /*
+    *   Get max payload length
+    *
+    *   A combination of the configuration MaxTransferUnit and
+    *   TransportOverhead.
+    *   Does not account FEC/ULP/RED overhead if FEC is enabled.
+    *   Does not account for RTP headers
+    */
+    virtual WebRtc_UWord16 MaxPayloadLength() const = 0;
+
+    /*
+    *   Get max data payload length
+    *
+    *   A combination of the configuration MaxTransferUnit, headers and
+    *   TransportOverhead.
+    *   Takes into account FEC/ULP/RED overhead if FEC is enabled.
+    *   Takes into account RTP headers
+    */
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
+
+    /*
+    *   set codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendPayload(
+        const CodecInst& voiceCodec) = 0;
+
+    /*
+    *   set codec name and payload type
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendPayload(
+        const VideoCodec& videoCodec) = 0;
+
+    /*
+    *   Unregister a send payload
+    *
+    *   payloadType - payload type of codec
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DeRegisterSendPayload(
+        const WebRtc_Word8 payloadType) = 0;
+
+   /*
+    *   (De)register RTP header extension type and id.
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RegisterSendRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id) = 0;
+
+    virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
+        const RTPExtensionType type) = 0;
+
+   /*
+    *   Enable/disable traffic smoothing of sending stream.
+    */
+    virtual void SetTransmissionSmoothingStatus(const bool enable) = 0;
+
+    virtual bool TransmissionSmoothingStatus() const = 0;
+
+    /*
+    *   get start timestamp
+    */
+    virtual WebRtc_UWord32 StartTimestamp() const = 0;
+
+    /*
+    *   configure start timestamp, default is a random number
+    *
+    *   timestamp   - start timestamp
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetStartTimestamp(
+        const WebRtc_UWord32 timestamp) = 0;
+
+    /*
+    *   Get SequenceNumber
+    */
+    virtual WebRtc_UWord16 SequenceNumber() const = 0;
+
+    /*
+    *   Set SequenceNumber, default is a random number
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSequenceNumber(const WebRtc_UWord16 seq) = 0;
+
+    /*
+    *   Get SSRC
+    */
+    virtual WebRtc_UWord32 SSRC() const = 0;
+
+    /*
+    *   configure SSRC, default is a random number
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSSRC(const WebRtc_UWord32 ssrc) = 0;
+
+    /*
+    *   Get CSRC
+    *
+    *   arrOfCSRC   - array of CSRCs
+    *
+    *   return -1 on failure else number of valid entries in the array
+    */
+    virtual WebRtc_Word32 CSRCs(
+        WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const = 0;
+
+    /*
+    *   Set CSRC
+    *
+    *   arrOfCSRC   - array of CSRCs
+    *   arrLength   - number of valid entries in the array
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCSRCs(
+        const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+        const WebRtc_UWord8 arrLength) = 0;
+
+    /*
+    *   includes CSRCs in RTP header if enabled
+    *
+    *   include CSRC - on/off
+    *
+    *    default:on
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCSRCStatus(const bool include) = 0;
+
+    /*
+    * Turn on/off sending RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 SetRTXSendStatus(const bool enable,
+                                           const bool setSSRC,
+                                           const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    * Get status of sending RTX (RFC 4588) on a specific SSRC.
+    */
+    virtual WebRtc_Word32 RTXSendStatus(bool* enable,
+                                        WebRtc_UWord32* SSRC) const = 0;
+
+    /*
+    *   sends kRtcpByeCode when going from true to false
+    *
+    *   sending - on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendingStatus(const bool sending) = 0;
+
+    /*
+    *   get send status
+    */
+    virtual bool Sending() const = 0;
+
+    /*
+    *   Starts/Stops media packets, on by default
+    *
+    *   sending - on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendingMediaStatus(const bool sending) = 0;
+
+    /*
+    *   get send status
+    */
+    virtual bool SendingMedia() const = 0;
+
+    /*
+    *   get sent bitrate in Kbit/s
+    */
+    virtual void BitrateSent(WebRtc_UWord32* totalRate,
+                             WebRtc_UWord32* videoRate,
+                             WebRtc_UWord32* fecRate,
+                             WebRtc_UWord32* nackRate) const = 0;
+
+    /*
+     *  Get the receive-side estimate of the available bandwidth.
+     */
+    virtual int EstimatedReceiveBandwidth(
+        WebRtc_UWord32* available_bandwidth) const = 0;
+
+    /*
+    *   Used by the codec module to deliver a video or audio frame for
+    *   packetization.
+    *
+    *   frameType       - type of frame to send
+    *   payloadType     - payload type of frame to send
+    *   timestamp       - timestamp of frame to send
+    *   payloadData     - payload buffer of frame to send
+    *   payloadSize     - size of payload buffer to send
+    *   fragmentation   - fragmentation offset data for fragmented frames such
+    *                     as layers or RED
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendOutgoingData(
+        const FrameType frameType,
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader* fragmentation = NULL,
+        const RTPVideoHeader* rtpVideoHdr = NULL) = 0;
+
+    /**************************************************************************
+    *
+    *   RTCP
+    *
+    ***************************************************************************/
+
+    /*
+    *    Get RTCP status
+    */
+    virtual RTCPMethod RTCP() const = 0;
+
+    /*
+    *   configure RTCP status i.e on(compound or non- compound)/off
+    *
+    *   method  - RTCP method to use
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPStatus(const RTCPMethod method) = 0;
+
+    /*
+    *   Set RTCP CName (i.e unique identifier)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   Get RTCP CName (i.e unique identifier)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   Get remote CName
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteCNAME(
+        const WebRtc_UWord32 remoteSSRC,
+        char cName[RTCP_CNAME_SIZE]) const = 0;
+
+    /*
+    *   Get remote NTP
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteNTP(
+        WebRtc_UWord32 *ReceivedNTPsecs,
+        WebRtc_UWord32 *ReceivedNTPfrac,
+        WebRtc_UWord32 *RTCPArrivalTimeSecs,
+        WebRtc_UWord32 *RTCPArrivalTimeFrac) const  = 0;
+
+    /*
+    *   AddMixedCNAME
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 AddMixedCNAME(
+        const WebRtc_UWord32 SSRC,
+        const char cName[RTCP_CNAME_SIZE]) = 0;
+
+    /*
+    *   RemoveMixedCNAME
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    *   Get RoundTripTime
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                              WebRtc_UWord16* RTT,
+                              WebRtc_UWord16* avgRTT,
+                              WebRtc_UWord16* minRTT,
+                              WebRtc_UWord16* maxRTT) const = 0 ;
+
+    /*
+    *   Reset RoundTripTime statistics
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC)= 0 ;
+
+    /*
+    *   Force a send of a RTCP packet
+    *   normal SR and RR are triggered via the process function
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendRTCP(
+        WebRtc_UWord32 rtcpPacketType = kRtcpReport) = 0;
+
+    /*
+    *    Good state of RTP receiver inform sender
+    */
+    virtual WebRtc_Word32 SendRTCPReferencePictureSelection(
+        const WebRtc_UWord64 pictureID) = 0;
+
+    /*
+    *    Send a RTCP Slice Loss Indication (SLI)
+    *    6 least significant bits of pictureID
+    */
+    virtual WebRtc_Word32 SendRTCPSliceLossIndication(
+        const WebRtc_UWord8 pictureID) = 0;
+
+    /*
+    *   Reset RTP statistics
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetStatisticsRTP() = 0;
+
+    /*
+    *   statistics of our localy created statistics of the received RTP stream
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 StatisticsRTP(
+        WebRtc_UWord8* fraction_lost,  // scale 0 to 255
+        WebRtc_UWord32* cum_lost,      // number of lost packets
+        WebRtc_UWord32* ext_max,       // highest sequence number received
+        WebRtc_UWord32* jitter,
+        WebRtc_UWord32* max_jitter = NULL) const = 0;
+
+    /*
+    *   Reset RTP data counters for the receiving side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetReceiveDataCountersRTP() = 0;
+
+    /*
+    *   Reset RTP data counters for the sending side
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 ResetSendDataCountersRTP() = 0;
+
+    /*
+    *   statistics of the amount of data sent and received
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 DataCountersRTP(
+        WebRtc_UWord32* bytesSent,
+        WebRtc_UWord32* packetsSent,
+        WebRtc_UWord32* bytesReceived,
+        WebRtc_UWord32* packetsReceived) const = 0;
+    /*
+    *   Get received RTCP sender info
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteRTCPStat(RTCPSenderInfo* senderInfo) = 0;
+
+    /*
+    *   Get received RTCP report block
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoteRTCPStat(
+        std::vector<RTCPReportBlock>* receiveBlocks) const = 0;
+    /*
+    *   Set received RTCP report block
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 AddRTCPReportBlock(
+        const WebRtc_UWord32 SSRC,
+        const RTCPReportBlock* receiveBlock) = 0;
+
+    /*
+    *   RemoveRTCPReportBlock
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RemoveRTCPReportBlock(const WebRtc_UWord32 SSRC) = 0;
+
+    /*
+    *   (APP) Application specific data
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPApplicationSpecificData(
+        const WebRtc_UWord8 subType,
+        const WebRtc_UWord32 name,
+        const WebRtc_UWord8* data,
+        const WebRtc_UWord16 length) = 0;
+    /*
+    *   (XR) VOIP metric
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetRTCPVoIPMetrics(
+        const RTCPVoIPMetric* VoIPMetric) = 0;
+
+    /*
+    *  (REMB) Receiver Estimated Max Bitrate
+    */
+    virtual bool REMB() const = 0;
+
+    virtual WebRtc_Word32 SetREMBStatus(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                                      const WebRtc_UWord8 numberOfSSRC,
+                                      const WebRtc_UWord32* SSRC) = 0;
+
+    /*
+    *   (IJ) Extended jitter report.
+    */
+    virtual bool IJ() const = 0;
+
+    virtual WebRtc_Word32 SetIJStatus(const bool enable) = 0;
+
+    /*
+    *   (TMMBR) Temporary Max Media Bit Rate
+    */
+    virtual bool TMMBR() const = 0;
+
+    /*
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTMMBRStatus(const bool enable) = 0;
+
+    /*
+    *   (NACK)
+    */
+    virtual NACKMethod NACK() const  = 0;
+
+    /*
+    *   Turn negative acknowledgement requests on/off
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetNACKStatus(const NACKMethod method) = 0;
+
+    /*
+     *  TODO(holmer): Propagate this API to VideoEngine.
+     *  Returns the currently configured selective retransmission settings.
+     */
+    virtual int SelectiveRetransmissions() const = 0;
+
+    /*
+     *  TODO(holmer): Propagate this API to VideoEngine.
+     *  Sets the selective retransmission settings, which will decide which
+     *  packets will be retransmitted if NACKed. Settings are constructed by
+     *  combining the constants in enum RetransmissionMode with bitwise OR.
+     *  All packets are retransmitted if kRetransmitAllPackets is set, while no
+     *  packets are retransmitted if kRetransmitOff is set.
+     *  By default all packets except FEC packets are retransmitted. For VP8
+     *  with temporal scalability only base layer packets are retransmitted.
+     *
+     *  Returns -1 on failure, otherwise 0.
+     */
+    virtual int SetSelectiveRetransmissions(uint8_t settings) = 0;
+
+    /*
+    *   Send a Negative acknowledgement packet
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList,
+                                   const WebRtc_UWord16 size) = 0;
+
+    /*
+    *   Store the sent packets, needed to answer to a Negative acknowledgement
+    *   requests
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetStorePacketsStatus(
+        const bool enable,
+        const WebRtc_UWord16 numberToStore = 200) = 0;
+
+    /**************************************************************************
+    *
+    *   Audio
+    *
+    ***************************************************************************/
+
+    /*
+    *   set audio packet size, used to determine when it's time to send a DTMF
+    *   packet in silence (CNG)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetAudioPacketSize(
+        const WebRtc_UWord16 packetSizeSamples) = 0;
+
+    /*
+    *   Outband TelephoneEvent(DTMF) detection
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetTelephoneEventStatus(
+        const bool enable,
+        const bool forwardToDecoder,
+        const bool detectEndOfTone = false) = 0;
+
+    /*
+    *   Is outband TelephoneEvent(DTMF) turned on/off?
+    */
+    virtual bool TelephoneEvent() const = 0;
+
+    /*
+    *   Returns true if received DTMF events are forwarded to the decoder using
+    *    the OnPlayTelephoneEvent callback.
+    */
+    virtual bool TelephoneEventForwardToDecoder() const = 0;
+
+    /*
+    *   SendTelephoneEventActive
+    *
+    *   return true if we currently send a telephone event and 100 ms after an
+    *   event is sent used to prevent the telephone event tone to be recorded
+    *   by the microphone and send inband just after the tone has ended.
+    */
+    virtual bool SendTelephoneEventActive(
+        WebRtc_Word8& telephoneEvent) const = 0;
+
+    /*
+    *   Send a TelephoneEvent tone using RFC 2833 (4733)
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SendTelephoneEventOutband(
+        const WebRtc_UWord8 key,
+        const WebRtc_UWord16 time_ms,
+        const WebRtc_UWord8 level) = 0;
+
+    /*
+    *   Set payload type for Redundant Audio Data RFC 2198
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetSendREDPayloadType(
+        const WebRtc_Word8 payloadType) = 0;
+
+    /*
+    *   Get payload type for Redundant Audio Data RFC 2198
+    *
+    *   return -1 on failure else 0
+    */
+     virtual WebRtc_Word32 SendREDPayloadType(
+         WebRtc_Word8& payloadType) const = 0;
+
+     /*
+     * Set status and ID for header-extension-for-audio-level-indication.
+     * See http://tools.ietf.org/html/rfc6464 for more details.
+     *
+     * return -1 on failure else 0
+     */
+     virtual WebRtc_Word32 SetRTPAudioLevelIndicationStatus(
+         const bool enable,
+         const WebRtc_UWord8 ID) = 0;
+
+     /*
+     * Get status and ID for header-extension-for-audio-level-indication.
+     *
+     * return -1 on failure else 0
+     */
+     virtual WebRtc_Word32 GetRTPAudioLevelIndicationStatus(
+         bool& enable,
+         WebRtc_UWord8& ID) const = 0;
+
+     /*
+     * Store the audio level in dBov for header-extension-for-audio-level-
+     * indication.
+     * This API shall be called before transmision of an RTP packet to ensure
+     * that the |level| part of the extended RTP header is updated.
+     *
+     * return -1 on failure else 0.
+     */
+     virtual WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov) = 0;
+
+    /**************************************************************************
+    *
+    *   Video
+    *
+    ***************************************************************************/
+
+    /*
+    *   Set the estimated camera delay in MS
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS) = 0;
+
+    /*
+    *   Set the target send bitrate
+    */
+    virtual void SetTargetSendBitrate(const WebRtc_UWord32 bitrate) = 0;
+
+    /*
+    *   Turn on/off generic FEC
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetGenericFECStatus(
+        const bool enable,
+        const WebRtc_UWord8 payloadTypeRED,
+        const WebRtc_UWord8 payloadTypeFEC) = 0;
+
+    /*
+    *   Get generic FEC setting
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 GenericFECStatus(bool& enable,
+                                           WebRtc_UWord8& payloadTypeRED,
+                                           WebRtc_UWord8& payloadTypeFEC) = 0;
+
+
+    virtual WebRtc_Word32 SetFecParameters(
+        const FecProtectionParams* delta_params,
+        const FecProtectionParams* key_params) = 0;
+
+    /*
+    *   Set method for requestion a new key frame
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 SetKeyFrameRequestMethod(
+        const KeyFrameRequestMethod method) = 0;
+
+    /*
+    *   send a request for a keyframe
+    *
+    *   return -1 on failure else 0
+    */
+    virtual WebRtc_Word32 RequestKeyFrame() = 0;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_H_
diff --git a/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h b/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
new file mode 100644
index 0000000..5adb0e8
--- /dev/null
+++ b/src/modules/rtp_rtcp/interface/rtp_rtcp_defines.h
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
+#define WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+#ifndef NULL
+    #define NULL    0
+#endif
+
+#define RTCP_CNAME_SIZE 256    // RFC 3550 page 44, including null termination
+#define IP_PACKET_SIZE 1500    // we assume ethernet
+#define MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS 10
+#define TIMEOUT_SEI_MESSAGES_MS 30000   // in milliseconds
+
+namespace webrtc{
+enum RTCPMethod
+{
+    kRtcpOff          = 0,
+    kRtcpCompound     = 1,
+    kRtcpNonCompound = 2
+};
+
+enum RTPAliveType
+{
+    kRtpDead   = 0,
+    kRtpNoRtp = 1,
+    kRtpAlive  = 2
+};
+
+enum StorageType {
+  kDontStore,
+  kDontRetransmit,
+  kAllowRetransmission
+};
+
+enum RTPExtensionType
+{
+   kRtpExtensionNone,
+   kRtpExtensionTransmissionTimeOffset,
+   kRtpExtensionAudioLevel,
+};
+
+enum RTCPAppSubTypes
+{
+    kAppSubtypeBwe     = 0x00
+};
+
+enum RTCPPacketType
+{
+    kRtcpReport         = 0x0001,
+    kRtcpSr             = 0x0002,
+    kRtcpRr             = 0x0004,
+    kRtcpBye            = 0x0008,
+    kRtcpPli            = 0x0010,
+    kRtcpNack           = 0x0020,
+    kRtcpFir            = 0x0040,
+    kRtcpTmmbr          = 0x0080,
+    kRtcpTmmbn          = 0x0100,
+    kRtcpSrReq          = 0x0200,
+    kRtcpXrVoipMetric   = 0x0400,
+    kRtcpApp            = 0x0800,
+    kRtcpSli            = 0x4000,
+    kRtcpRpsi           = 0x8000,
+    kRtcpRemb           = 0x10000,
+    kRtcpTransmissionTimeOffset = 0x20000
+};
+
+enum KeyFrameRequestMethod
+{
+    kKeyFrameReqFirRtp    = 1,
+    kKeyFrameReqPliRtcp   = 2,
+    kKeyFrameReqFirRtcp   = 3
+};
+
+enum RtpRtcpPacketType
+{
+    kPacketRtp        = 0,
+    kPacketKeepAlive = 1
+};
+
+enum NACKMethod
+{
+    kNackOff      = 0,
+    kNackRtcp     = 2
+};
+
+enum RetransmissionMode {
+  kRetransmitOff          = 0x0,
+  kRetransmitFECPackets   = 0x1,
+  kRetransmitBaseLayer    = 0x2,
+  kRetransmitHigherLayers = 0x4,
+  kRetransmitAllPackets   = 0xFF
+};
+
+struct RTCPSenderInfo
+{
+    WebRtc_UWord32 NTPseconds;
+    WebRtc_UWord32 NTPfraction;
+    WebRtc_UWord32 RTPtimeStamp;
+    WebRtc_UWord32 sendPacketCount;
+    WebRtc_UWord32 sendOctetCount;
+};
+
+struct RTCPReportBlock
+{
+  // Fields as described by RFC 3550 6.4.2.
+    WebRtc_UWord32 remoteSSRC;  // SSRC of sender of this report.
+    WebRtc_UWord32 sourceSSRC;  // SSRC of the RTP packet sender.
+    WebRtc_UWord8 fractionLost;
+    WebRtc_UWord32 cumulativeLost;  // 24 bits valid
+    WebRtc_UWord32 extendedHighSeqNum;
+    WebRtc_UWord32 jitter;
+    WebRtc_UWord32 lastSR;
+    WebRtc_UWord32 delaySinceLastSR;
+};
+
+class RtpData
+{
+public:
+    virtual WebRtc_Word32 OnReceivedPayloadData(
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord16 payloadSize,
+        const WebRtcRTPHeader* rtpHeader) = 0;
+protected:
+    virtual ~RtpData() {}
+};
+
+class RtcpFeedback
+{
+public:
+    virtual void OnApplicationDataReceived(const WebRtc_Word32 /*id*/,
+                                           const WebRtc_UWord8 /*subType*/,
+                                           const WebRtc_UWord32 /*name*/,
+                                           const WebRtc_UWord16 /*length*/,
+                                           const WebRtc_UWord8* /*data*/)  {};
+
+    virtual void OnXRVoIPMetricReceived(
+        const WebRtc_Word32 /*id*/,
+        const RTCPVoIPMetric* /*metric*/)  {};
+
+    virtual void OnRTCPPacketTimeout(const WebRtc_Word32 /*id*/)  {};
+
+    virtual void OnSendReportReceived(const WebRtc_Word32 id,
+                                      const WebRtc_UWord32 senderSSRC)  {};
+
+    virtual void OnReceiveReportReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord32 senderSSRC)  {};
+
+protected:
+    virtual ~RtcpFeedback() {}
+};
+
+class RtpFeedback
+{
+public:
+    // Receiving payload change or SSRC change. (return success!)
+    /*
+    *   channels    - number of channels in codec (1 = mono, 2 = stereo)
+    */
+    virtual WebRtc_Word32 OnInitializeDecoder(
+        const WebRtc_Word32 id,
+        const WebRtc_Word8 payloadType,
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const int frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate) = 0;
+
+    virtual void OnPacketTimeout(const WebRtc_Word32 id) = 0;
+
+    virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packetType) = 0;
+
+    virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) = 0;
+
+    virtual void OnIncomingSSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 SSRC) = 0;
+
+    virtual void OnIncomingCSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 CSRC,
+                                        const bool added) = 0;
+
+protected:
+    virtual ~RtpFeedback() {}
+};
+
+class RtpAudioFeedback {
+ public:
+  virtual void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                        const WebRtc_UWord8 event,
+                                        const bool endOfEvent) = 0;
+
+  virtual void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                                    const WebRtc_UWord8 event,
+                                    const WebRtc_UWord16 lengthMs,
+                                    const WebRtc_UWord8 volume) = 0;
+ protected:
+  virtual ~RtpAudioFeedback() {}
+};
+
+class RtcpIntraFrameObserver {
+ public:
+  virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc) = 0;
+
+  virtual void OnReceivedSLI(const uint32_t ssrc,
+                             const uint8_t picture_id) = 0;
+
+  virtual void OnReceivedRPSI(const uint32_t ssrc,
+                              const uint64_t picture_id) = 0;
+
+  virtual ~RtcpIntraFrameObserver() {}
+};
+
+class RtcpBandwidthObserver {
+ public:
+  // REMB or TMMBR
+  virtual void OnReceivedEstimatedBitrate(const uint32_t bitrate) = 0;
+
+  virtual void OnReceivedRtcpReceiverReport(
+      const uint32_t ssrc,
+      const uint8_t fraction_loss,
+      const uint32_t rtt,
+      const uint32_t last_received_extended_high_seqNum,
+      const uint32_t now_ms) = 0;
+
+  virtual ~RtcpBandwidthObserver() {}
+};
+
+// A clock interface that allows reading of absolute and relative
+// timestamps in an RTP/RTCP module.
+class RtpRtcpClock {
+ public:
+  virtual ~RtpRtcpClock() {}
+
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_Word64 GetTimeInMS() = 0;
+
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) = 0;
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_INTERFACE_RTP_RTCP_DEFINES_H_
diff --git a/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
new file mode 100644
index 0000000..a6849a9
--- /dev/null
+++ b/src/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h
@@ -0,0 +1,293 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_
+#define WEBRTC_MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_
+
+#include <gmock/gmock.h>
+
+#include "modules/interface/module.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class MockRtpRtcp : public RtpRtcp {
+ public:
+  MOCK_METHOD1(ChangeUniqueId,
+      WebRtc_Word32(const WebRtc_Word32 id));
+  MOCK_METHOD1(RegisterDefaultModule,
+      WebRtc_Word32(RtpRtcp* module));
+  MOCK_METHOD0(DeRegisterDefaultModule,
+      WebRtc_Word32());
+  MOCK_METHOD0(DefaultModuleRegistered,
+      bool());
+  MOCK_METHOD0(NumberChildModules,
+      WebRtc_UWord32());
+  MOCK_METHOD1(RegisterSyncModule,
+      WebRtc_Word32(RtpRtcp* module));
+  MOCK_METHOD0(DeRegisterSyncModule,
+      WebRtc_Word32());
+  MOCK_METHOD0(InitReceiver,
+      WebRtc_Word32());
+  MOCK_METHOD1(RegisterIncomingDataCallback,
+      WebRtc_Word32(RtpData* incomingDataCallback));
+  MOCK_METHOD1(RegisterIncomingRTPCallback,
+      WebRtc_Word32(RtpFeedback* incomingMessagesCallback));
+  MOCK_METHOD2(SetPacketTimeout,
+      WebRtc_Word32(const WebRtc_UWord32 RTPtimeoutMS, const WebRtc_UWord32 RTCPtimeoutMS));
+  MOCK_METHOD2(SetPeriodicDeadOrAliveStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 sampleTimeSeconds));
+  MOCK_METHOD2(PeriodicDeadOrAliveStatus,
+      WebRtc_Word32(bool &enable, WebRtc_UWord8 &sampleTimeSeconds));
+  MOCK_METHOD1(RegisterReceivePayload,
+      WebRtc_Word32(const CodecInst& voiceCodec));
+  MOCK_METHOD1(RegisterReceivePayload,
+      WebRtc_Word32(const VideoCodec& videoCodec));
+  MOCK_METHOD2(ReceivePayloadType,
+      WebRtc_Word32(const CodecInst& voiceCodec, WebRtc_Word8* plType));
+  MOCK_METHOD2(ReceivePayloadType,
+      WebRtc_Word32(const VideoCodec& videoCodec, WebRtc_Word8* plType));
+  MOCK_METHOD1(DeRegisterReceivePayload,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_METHOD2(RegisterReceiveRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type, const WebRtc_UWord8 id));
+  MOCK_METHOD1(DeregisterReceiveRtpHeaderExtension,
+               WebRtc_Word32(const RTPExtensionType type));
+  MOCK_CONST_METHOD0(RemoteTimestamp,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD1(EstimatedRemoteTimeStamp,
+      WebRtc_Word32(WebRtc_UWord32& timestamp));
+  MOCK_CONST_METHOD0(RemoteSSRC,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD1(RemoteCSRCs,
+      WebRtc_Word32(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]));
+  MOCK_CONST_METHOD1(SSRCFilter,
+      WebRtc_Word32(WebRtc_UWord32& allowedSSRC));
+  MOCK_METHOD2(SetSSRCFilter,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord32 allowedSSRC));
+  MOCK_METHOD2(SetRTXReceiveStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord32 SSRC));
+  MOCK_CONST_METHOD2(RTXReceiveStatus,
+      WebRtc_Word32(bool* enable, WebRtc_UWord32* SSRC));
+  MOCK_METHOD2(IncomingPacket,
+      WebRtc_Word32(const WebRtc_UWord8* incomingPacket, const WebRtc_UWord16 packetLength));
+  MOCK_METHOD4(IncomingAudioNTP,
+      WebRtc_Word32(const WebRtc_UWord32 audioReceivedNTPsecs,
+                    const WebRtc_UWord32 audioReceivedNTPfrac,
+                    const WebRtc_UWord32 audioRTCPArrivalTimeSecs,
+                    const WebRtc_UWord32 audioRTCPArrivalTimeFrac));
+  MOCK_METHOD0(InitSender,
+      WebRtc_Word32());
+  MOCK_METHOD1(RegisterSendTransport,
+      WebRtc_Word32(Transport* outgoingTransport));
+  MOCK_METHOD1(SetMaxTransferUnit,
+      WebRtc_Word32(const WebRtc_UWord16 size));
+  MOCK_METHOD3(SetTransportOverhead,
+      WebRtc_Word32(const bool TCP, const bool IPV6,
+                    const WebRtc_UWord8 authenticationOverhead));
+  MOCK_CONST_METHOD0(MaxPayloadLength,
+      WebRtc_UWord16());
+  MOCK_CONST_METHOD0(MaxDataPayloadLength,
+      WebRtc_UWord16());
+  MOCK_METHOD1(RegisterSendPayload,
+      WebRtc_Word32(const CodecInst& voiceCodec));
+  MOCK_METHOD1(RegisterSendPayload,
+      WebRtc_Word32(const VideoCodec& videoCodec));
+  MOCK_METHOD1(DeRegisterSendPayload,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_METHOD2(RegisterSendRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type, const WebRtc_UWord8 id));
+  MOCK_METHOD1(DeregisterSendRtpHeaderExtension,
+      WebRtc_Word32(const RTPExtensionType type));
+  MOCK_METHOD1(SetTransmissionSmoothingStatus,
+      void(const bool enable));
+  MOCK_CONST_METHOD0(TransmissionSmoothingStatus,
+      bool());
+  MOCK_CONST_METHOD0(StartTimestamp,
+      WebRtc_UWord32());
+  MOCK_METHOD1(SetStartTimestamp,
+      WebRtc_Word32(const WebRtc_UWord32 timestamp));
+  MOCK_CONST_METHOD0(SequenceNumber,
+      WebRtc_UWord16());
+  MOCK_METHOD1(SetSequenceNumber,
+      WebRtc_Word32(const WebRtc_UWord16 seq));
+  MOCK_CONST_METHOD0(SSRC,
+      WebRtc_UWord32());
+  MOCK_METHOD1(SetSSRC,
+      WebRtc_Word32(const WebRtc_UWord32 ssrc));
+  MOCK_CONST_METHOD1(CSRCs,
+      WebRtc_Word32(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]));
+  MOCK_METHOD2(SetCSRCs,
+      WebRtc_Word32(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize], const WebRtc_UWord8 arrLength));
+  MOCK_METHOD1(SetCSRCStatus,
+      WebRtc_Word32(const bool include));
+  MOCK_METHOD3(SetRTXSendStatus,
+      WebRtc_Word32(const bool enable, const bool setSSRC, const WebRtc_UWord32 SSRC));
+ MOCK_CONST_METHOD2(RTXSendStatus,
+      WebRtc_Word32(bool* enable, WebRtc_UWord32* SSRC));
+  MOCK_METHOD1(SetSendingStatus,
+      WebRtc_Word32(const bool sending));
+  MOCK_CONST_METHOD0(Sending,
+      bool());
+  MOCK_METHOD1(SetSendingMediaStatus,
+      WebRtc_Word32(const bool sending));
+  MOCK_CONST_METHOD0(SendingMedia,
+      bool());
+  MOCK_CONST_METHOD4(BitrateSent,
+      void(WebRtc_UWord32* totalRate, WebRtc_UWord32* videoRate, WebRtc_UWord32* fecRate, WebRtc_UWord32* nackRate));
+  MOCK_CONST_METHOD1(EstimatedReceiveBandwidth,
+      int(WebRtc_UWord32* available_bandwidth));
+  MOCK_METHOD8(SendOutgoingData,
+      WebRtc_Word32(const FrameType frameType,
+                    const WebRtc_Word8 payloadType,
+                    const WebRtc_UWord32 timeStamp,
+                    int64_t capture_time_ms,
+                    const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord32 payloadSize,
+                    const RTPFragmentationHeader* fragmentation,
+                    const RTPVideoHeader* rtpVideoHdr));
+  MOCK_METHOD3(RegisterRtcpObservers,
+      void(RtcpIntraFrameObserver* intraFrameCallback,
+           RtcpBandwidthObserver* bandwidthCallback,
+           RtcpFeedback* callback));
+  MOCK_CONST_METHOD0(RTCP,
+      RTCPMethod());
+  MOCK_METHOD1(SetRTCPStatus,
+      WebRtc_Word32(const RTCPMethod method));
+  MOCK_METHOD1(SetCNAME,
+      WebRtc_Word32(const char cName[RTCP_CNAME_SIZE]));
+  MOCK_METHOD1(CNAME,
+      WebRtc_Word32(char cName[RTCP_CNAME_SIZE]));
+  MOCK_CONST_METHOD2(RemoteCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC,
+                    char cName[RTCP_CNAME_SIZE]));
+  MOCK_CONST_METHOD4(RemoteNTP,
+      WebRtc_Word32(WebRtc_UWord32 *ReceivedNTPsecs, WebRtc_UWord32 *ReceivedNTPfrac, WebRtc_UWord32 *RTCPArrivalTimeSecs, WebRtc_UWord32 *RTCPArrivalTimeFrac));
+  MOCK_METHOD2(AddMixedCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC,
+                    const char cName[RTCP_CNAME_SIZE]));
+  MOCK_METHOD1(RemoveMixedCNAME,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC));
+  MOCK_CONST_METHOD5(RTT,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC, WebRtc_UWord16* RTT, WebRtc_UWord16* avgRTT, WebRtc_UWord16* minRTT, WebRtc_UWord16* maxRTT));
+  MOCK_METHOD1(ResetRTT,
+      WebRtc_Word32(const WebRtc_UWord32 remoteSSRC));
+  MOCK_METHOD1(SendRTCP,
+      WebRtc_Word32(WebRtc_UWord32 rtcpPacketType));
+  MOCK_METHOD1(SendRTCPReferencePictureSelection,
+      WebRtc_Word32(const WebRtc_UWord64 pictureID));
+  MOCK_METHOD1(SendRTCPSliceLossIndication,
+      WebRtc_Word32(const WebRtc_UWord8 pictureID));
+  MOCK_METHOD0(ResetStatisticsRTP,
+      WebRtc_Word32());
+  MOCK_CONST_METHOD5(StatisticsRTP,
+      WebRtc_Word32(WebRtc_UWord8 *fraction_lost, WebRtc_UWord32 *cum_lost, WebRtc_UWord32 *ext_max, WebRtc_UWord32 *jitter, WebRtc_UWord32 *max_jitter));
+  MOCK_METHOD0(ResetReceiveDataCountersRTP,
+      WebRtc_Word32());
+  MOCK_METHOD0(ResetSendDataCountersRTP,
+      WebRtc_Word32());
+  MOCK_CONST_METHOD4(DataCountersRTP,
+      WebRtc_Word32(WebRtc_UWord32 *bytesSent, WebRtc_UWord32 *packetsSent, WebRtc_UWord32 *bytesReceived, WebRtc_UWord32 *packetsReceived));
+  MOCK_METHOD1(RemoteRTCPStat,
+      WebRtc_Word32(RTCPSenderInfo* senderInfo));
+  MOCK_CONST_METHOD1(RemoteRTCPStat,
+      WebRtc_Word32(std::vector<RTCPReportBlock>* receiveBlocks));
+  MOCK_METHOD2(AddRTCPReportBlock,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC, const RTCPReportBlock* receiveBlock));
+  MOCK_METHOD1(RemoveRTCPReportBlock,
+      WebRtc_Word32(const WebRtc_UWord32 SSRC));
+  MOCK_METHOD4(SetRTCPApplicationSpecificData,
+      WebRtc_Word32(const WebRtc_UWord8 subType, const WebRtc_UWord32 name, const WebRtc_UWord8* data, const WebRtc_UWord16 length));
+  MOCK_METHOD1(SetRTCPVoIPMetrics,
+      WebRtc_Word32(const RTCPVoIPMetric* VoIPMetric));
+  MOCK_CONST_METHOD0(REMB,
+      bool());
+  MOCK_METHOD1(SetREMBStatus,
+      WebRtc_Word32(const bool enable));
+  MOCK_METHOD3(SetREMBData,
+      WebRtc_Word32(const WebRtc_UWord32 bitrate, const WebRtc_UWord8 numberOfSSRC, const WebRtc_UWord32* SSRC));
+  MOCK_METHOD1(SetRemoteBitrateObserver,
+      bool(RemoteBitrateObserver*));
+  MOCK_CONST_METHOD0(IJ,
+      bool());
+  MOCK_METHOD1(SetIJStatus,
+      WebRtc_Word32(const bool));
+  MOCK_CONST_METHOD0(TMMBR,
+      bool());
+  MOCK_METHOD1(SetTMMBRStatus,
+      WebRtc_Word32(const bool enable));
+  MOCK_METHOD1(OnBandwidthEstimateUpdate,
+      void(WebRtc_UWord16 bandWidthKbit));
+  MOCK_CONST_METHOD0(NACK,
+      NACKMethod());
+  MOCK_METHOD1(SetNACKStatus,
+      WebRtc_Word32(const NACKMethod method));
+  MOCK_CONST_METHOD0(SelectiveRetransmissions,
+      int());
+  MOCK_METHOD1(SetSelectiveRetransmissions,
+      int(uint8_t settings));
+  MOCK_METHOD2(SendNACK,
+      WebRtc_Word32(const WebRtc_UWord16* nackList, const WebRtc_UWord16 size));
+  MOCK_METHOD2(SetStorePacketsStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord16 numberToStore));
+  MOCK_METHOD1(RegisterAudioCallback,
+      WebRtc_Word32(RtpAudioFeedback* messagesCallback));
+  MOCK_METHOD1(SetAudioPacketSize,
+      WebRtc_Word32(const WebRtc_UWord16 packetSizeSamples));
+  MOCK_METHOD3(SetTelephoneEventStatus,
+      WebRtc_Word32(const bool enable, const bool forwardToDecoder, const bool detectEndOfTone));
+  MOCK_CONST_METHOD0(TelephoneEvent,
+      bool());
+  MOCK_CONST_METHOD0(TelephoneEventForwardToDecoder,
+      bool());
+  MOCK_CONST_METHOD1(SendTelephoneEventActive,
+      bool(WebRtc_Word8& telephoneEvent));
+  MOCK_METHOD3(SendTelephoneEventOutband,
+      WebRtc_Word32(const WebRtc_UWord8 key, const WebRtc_UWord16 time_ms, const WebRtc_UWord8 level));
+  MOCK_METHOD1(SetSendREDPayloadType,
+      WebRtc_Word32(const WebRtc_Word8 payloadType));
+  MOCK_CONST_METHOD1(SendREDPayloadType,
+      WebRtc_Word32(WebRtc_Word8& payloadType));
+  MOCK_METHOD2(SetRTPAudioLevelIndicationStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 ID));
+  MOCK_CONST_METHOD2(GetRTPAudioLevelIndicationStatus,
+      WebRtc_Word32(bool& enable, WebRtc_UWord8& ID));
+  MOCK_METHOD1(SetAudioLevel,
+      WebRtc_Word32(const WebRtc_UWord8 level_dBov));
+  MOCK_METHOD1(SetCameraDelay,
+      WebRtc_Word32(const WebRtc_Word32 delayMS));
+  MOCK_METHOD1(SetTargetSendBitrate,
+      void(const WebRtc_UWord32 bitrate));
+  MOCK_METHOD3(SetGenericFECStatus,
+      WebRtc_Word32(const bool enable, const WebRtc_UWord8 payloadTypeRED, const WebRtc_UWord8 payloadTypeFEC));
+  MOCK_METHOD3(GenericFECStatus,
+      WebRtc_Word32(bool& enable, WebRtc_UWord8& payloadTypeRED, WebRtc_UWord8& payloadTypeFEC));
+  MOCK_METHOD2(SetFecParameters,
+      WebRtc_Word32(const FecProtectionParams* delta_params,
+                    const FecProtectionParams* key_params));
+  MOCK_METHOD1(SetKeyFrameRequestMethod,
+      WebRtc_Word32(const KeyFrameRequestMethod method));
+  MOCK_METHOD0(RequestKeyFrame,
+      WebRtc_Word32());
+  MOCK_CONST_METHOD3(Version,
+      int32_t(char* version, uint32_t& remaining_buffer_in_bytes, uint32_t& position));
+  MOCK_METHOD0(TimeUntilNextProcess,
+        int32_t());
+  MOCK_METHOD0(Process,
+        int32_t());
+
+  // Members.
+  unsigned int remote_ssrc_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_MOCKS_MOCK_RTP_RTCP_H_
diff --git a/src/modules/rtp_rtcp/source/Bitrate.h b/src/modules/rtp_rtcp/source/Bitrate.h
new file mode 100644
index 0000000..be45343
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/Bitrate.h
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"     // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "common_types.h"            // Transport
+#include <stdio.h>
+#include <list>
+
+namespace webrtc {
+class RtpRtcpClock;
+
+class Bitrate
+{
+public:
+    Bitrate(RtpRtcpClock* clock);
+
+    // calculate rates
+    void Process();
+
+    // update with a packet
+    void Update(const WebRtc_Word32 bytes);
+
+    // packet rate last second, updated roughly every 100 ms
+    WebRtc_UWord32 PacketRate() const;
+
+    // bitrate last second, updated roughly every 100 ms
+    WebRtc_UWord32 BitrateLast() const;
+
+    // bitrate last second, updated now
+    WebRtc_UWord32 BitrateNow() const;
+
+protected:
+  RtpRtcpClock& _clock;
+
+private:
+  WebRtc_UWord32 _packetRate;
+  WebRtc_UWord32 _bitrate;
+  WebRtc_UWord8 _bitrateNextIdx;
+  WebRtc_Word64 _packetRateArray[10];
+  WebRtc_Word64 _bitrateArray[10];
+  WebRtc_Word64 _bitrateDiffMS[10];
+  WebRtc_Word64 _timeLastRateUpdate;
+  WebRtc_UWord32 _bytesCount;
+  WebRtc_UWord32 _packetCount;
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_BITRATE_H_
diff --git a/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc b/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc
new file mode 100644
index 0000000..05b7e2f
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/bitstream_builder.cc
@@ -0,0 +1,580 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bitstream_builder.h"
+
+#include <string.h>
+
+namespace webrtc {
+BitstreamBuilder::BitstreamBuilder(WebRtc_UWord8* data, const WebRtc_UWord32 dataSize) :
+    _data(data),
+    _dataSize(dataSize),
+    _byteOffset(0),
+    _bitOffset(0)
+{
+    memset(data, 0, dataSize);
+}
+
+WebRtc_UWord32
+BitstreamBuilder::Length() const
+{
+    return _byteOffset+ (_bitOffset?1:0);
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add1Bit(const WebRtc_UWord8 bit)
+{
+    // sanity
+    if(_bitOffset + 1 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bit);
+    return 0;
+}
+
+void
+BitstreamBuilder::Add1BitWithoutSanity(const WebRtc_UWord8 bit)
+{
+    if(bit & 0x1)
+    {
+        _data[_byteOffset] += (1 << (7-_bitOffset));
+    }
+
+    if(_bitOffset == 7)
+    {
+        // last bit in byte
+        _bitOffset = 0;
+        _byteOffset++;
+    } else
+    {
+        _bitOffset++;
+    }
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add2Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 2 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add3Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 3 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add4Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 4 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add5Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 5 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add6Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 6 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 5);
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add7Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_bitOffset + 7 > 8)
+    {
+        if(_dataSize < Length()+1)
+        {
+            // not enough space in buffer
+            return -1;
+        }
+    }
+    Add1BitWithoutSanity(bits >> 6);
+    Add1BitWithoutSanity(bits >> 5);
+    Add1BitWithoutSanity(bits >> 4);
+    Add1BitWithoutSanity(bits >> 3);
+    Add1BitWithoutSanity(bits >> 2);
+    Add1BitWithoutSanity(bits >> 1);
+    Add1BitWithoutSanity(bits);
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add8Bits(const WebRtc_UWord8 bits)
+{
+    // sanity
+    if(_dataSize < Length()+1)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = bits;
+    } else
+    {
+        _data[_byteOffset] += (bits >> _bitOffset);
+        _data[_byteOffset+1] += (bits << (8-_bitOffset));
+    }
+    _byteOffset++;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add16Bits(const WebRtc_UWord16 bits)
+{
+    // sanity
+    if(_dataSize < Length()+2)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset] += (WebRtc_UWord8)(bits >> (_bitOffset + 8));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> _bitOffset);
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 2;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add24Bits(const WebRtc_UWord32 bits)
+{
+    // sanity
+    if(_dataSize < Length()+3)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset] = (WebRtc_UWord8)(bits >> 16);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+2] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset]   += (WebRtc_UWord8)(bits >> (_bitOffset+16));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> (_bitOffset+8));
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits >> (_bitOffset));
+        _data[_byteOffset+3] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 3;
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::Add32Bits(const WebRtc_UWord32 bits)
+{
+    // sanity
+    if(_dataSize < Length()+4)
+    {
+        // not enough space in buffer
+        return -1;
+    }
+    if(_bitOffset == 0)
+    {
+        _data[_byteOffset]   = (WebRtc_UWord8)(bits >> 24);
+        _data[_byteOffset+1] = (WebRtc_UWord8)(bits >> 16);
+        _data[_byteOffset+2] = (WebRtc_UWord8)(bits >> 8);
+        _data[_byteOffset+3] = (WebRtc_UWord8)(bits);
+    } else
+    {
+        _data[_byteOffset]   += (WebRtc_UWord8)(bits >> (_bitOffset+24));
+        _data[_byteOffset+1] += (WebRtc_UWord8)(bits >> (_bitOffset+16));
+        _data[_byteOffset+2] += (WebRtc_UWord8)(bits >> (_bitOffset+8));
+        _data[_byteOffset+3] += (WebRtc_UWord8)(bits >> (_bitOffset));
+        _data[_byteOffset+4] += (WebRtc_UWord8)(bits << (8-_bitOffset));
+    }
+    _byteOffset += 4;
+    return 0;
+}
+
+// Exp-Golomb codes
+/*
+    with "prefix" and "suffix" bits and assignment to codeNum ranges (informative)
+    Bit string form Range of codeNum
+              1                0
+            0 1 x0             1..2      2bits-1
+          0 0 1 x1 x0          3..6      3bits-1
+        0 0 0 1 x2 x1 x0       7..14     4bits-1
+      0 0 0 0 1 x3 x2 x1 x0    15..30
+    0 0 0 0 0 1 x4 x3 x2 x1 x0 31..62
+*/
+WebRtc_Word32
+BitstreamBuilder::AddUE(const WebRtc_UWord32 value)
+{
+    // un-rolled on 8 bit base to avoid too deep if else chain
+    if(value < 0x0000ffff)
+    {
+        if(value < 0x000000ff)
+        {
+            if(value == 0)
+            {
+                if(AddPrefix(0) != 0)
+                {
+                    return -1;
+                }
+            } else if(value < 3)
+            {
+                if(AddPrefix(1) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(1, value-1);
+            } else if(value < 7)
+            {
+                if(AddPrefix(2) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(2, value-3);
+            } else if(value < 15)
+            {
+                if(AddPrefix(3) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(3, value-7);
+            } else if(value < 31)
+            {
+                if(AddPrefix(4) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(4, value-15);
+            } else if(value < 63)
+            {
+                if(AddPrefix(5) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(5, value-31);
+            } else if(value < 127)
+            {
+                if(AddPrefix(6) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(6, value-63);
+            } else
+            {
+                if(AddPrefix(7) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(7, value-127);
+            }
+        }else
+        {
+            if(value < 0x000001ff)
+            {
+                if(AddPrefix(8) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(8, value-0x000000ff);
+            } else if(value < 0x000003ff)
+            {
+                if(AddPrefix(9) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(9, value-0x000001ff);
+            } else if(value < 0x000007ff)
+            {
+                if(AddPrefix(10) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(10, value-0x000003ff);
+            } else if(value < 0x00000fff)
+            {
+                if(AddPrefix(11) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(1, value-0x000007ff);
+            } else if(value < 0x00001fff)
+            {
+                if(AddPrefix(12) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(12, value-0x00000fff);
+            } else if(value < 0x00003fff)
+            {
+                if(AddPrefix(13) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(13, value-0x00001fff);
+            } else if(value < 0x00007fff)
+            {
+                if(AddPrefix(14) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(14, value-0x00003fff);
+            } else
+            {
+                if(AddPrefix(15) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(15, value-0x00007fff);
+            }
+        }
+    }else
+    {
+        if(value < 0x00ffffff)
+        {
+            if(value < 0x0001ffff)
+            {
+                if(AddPrefix(16) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(16, value-0x0000ffff);
+            } else if(value < 0x0003ffff)
+            {
+                if(AddPrefix(17) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(17, value-0x0001ffff);
+            } else if(value < 0x0007ffff)
+            {
+                if(AddPrefix(18) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(18, value-0x0003ffff);
+            } else if(value < 0x000fffff)
+            {
+                if(AddPrefix(19) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(19, value-0x0007ffff);
+            } else if(value < 0x001fffff)
+            {
+                if(AddPrefix(20) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(20, value-0x000fffff);
+            } else if(value < 0x003fffff)
+            {
+                if(AddPrefix(21) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(21, value-0x001fffff);
+            } else if(value < 0x007fffff)
+            {
+                if(AddPrefix(22) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(22, value-0x003fffff);
+            } else
+            {
+                if(AddPrefix(23) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(23, value-0x007fffff);
+            }
+        } else
+        {
+            if(value < 0x01ffffff)
+            {
+                if(AddPrefix(24) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(24, value-0x00ffffff);
+            } else if(value < 0x03ffffff)
+            {
+                if(AddPrefix(25) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(25, value-0x01ffffff);
+            } else if(value < 0x07ffffff)
+            {
+                if(AddPrefix(26) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(26, value-0x03ffffff);
+            } else if(value < 0x0fffffff)
+            {
+                if(AddPrefix(27) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(27, value-0x07ffffff);
+            } else if(value < 0x1fffffff)
+            {
+                if(AddPrefix(28) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(28, value-0x0fffffff);
+            } else if(value < 0x3fffffff)
+            {
+                if(AddPrefix(29) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(29, value-0x1fffffff);
+            } else if(value < 0x7fffffff)
+            {
+                if(AddPrefix(30) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(30, value-0x3fffffff);
+            } else if(value < 0xffffffff)
+            {
+                if(AddPrefix(31) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(31, value-0x7ffffff);
+            } else
+            {
+                if(AddPrefix(32) != 0)
+                {
+                    return -1;
+                }
+                AddSuffix(32, 0); // exactly 0xffffffff
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+BitstreamBuilder::AddPrefix(const WebRtc_UWord8 numZeros)
+{
+    // sanity for the sufix too
+    WebRtc_UWord32 numBitsToAdd = numZeros * 2 + 1;
+    if(((_dataSize - _byteOffset) *8 + 8-_bitOffset) < numBitsToAdd)
+    {
+        return -1;
+    }
+
+    // add numZeros
+    for (WebRtc_UWord32 i = 0; i < numZeros; i++)
+    {
+        Add1Bit(0);
+    }
+    Add1Bit(1);
+    return 0;
+}
+
+void
+BitstreamBuilder::AddSuffix(const WebRtc_UWord8 numBits, const WebRtc_UWord32 rest)
+{
+    // most significant bit first
+    for(WebRtc_Word32 i = numBits - 1; i >= 0; i--)
+    {
+        if(( rest >> i) & 0x1)
+        {
+            Add1Bit(1);
+        }else
+        {
+            Add1Bit(0);
+        }
+    }
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/H264/bitstream_builder.h b/src/modules/rtp_rtcp/source/H264/bitstream_builder.h
new file mode 100644
index 0000000..c88ef8f
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/bitstream_builder.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class BitstreamBuilder
+{
+public:
+    BitstreamBuilder(WebRtc_UWord8* data, const WebRtc_UWord32 dataSize);
+
+    WebRtc_UWord32 Length() const;
+
+    WebRtc_Word32 Add1Bit(const WebRtc_UWord8 bit);
+    WebRtc_Word32 Add2Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add3Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add4Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add5Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add6Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add7Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add8Bits(const WebRtc_UWord8 bits);
+    WebRtc_Word32 Add16Bits(const WebRtc_UWord16 bits);
+    WebRtc_Word32 Add24Bits(const WebRtc_UWord32 bits);
+    WebRtc_Word32 Add32Bits(const WebRtc_UWord32 bits);
+
+    // Exp-Golomb codes
+    WebRtc_Word32 AddUE(const WebRtc_UWord32 value);
+
+private:
+    WebRtc_Word32 AddPrefix(const WebRtc_UWord8 numZeros);
+    void AddSuffix(const WebRtc_UWord8 numBits, const WebRtc_UWord32 rest);
+    void Add1BitWithoutSanity(const WebRtc_UWord8 bit);
+
+    WebRtc_UWord8* _data;
+    WebRtc_UWord32 _dataSize;
+
+    WebRtc_UWord32 _byteOffset;
+    WebRtc_UWord8  _bitOffset;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_BUILDER_H_
diff --git a/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc b/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc
new file mode 100644
index 0000000..79ec967
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/bitstream_parser.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "bitstream_parser.h"
+
+namespace webrtc {
+BitstreamParser::BitstreamParser(const WebRtc_UWord8* data, const WebRtc_UWord32 dataLength) :
+    _data(data),
+    _dataLength(dataLength),
+    _byteOffset(0),
+    _bitOffset(0)
+{
+}
+    // todo should we have any error codes from this?
+
+WebRtc_UWord8
+BitstreamParser::Get1Bit()
+{
+    WebRtc_UWord8 retVal = 0x1 & (_data[_byteOffset] >> (7-_bitOffset++));
+
+    // prepare next byte
+    if(_bitOffset == 8)
+    {
+        _bitOffset = 0;
+        _byteOffset++;
+    }
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get2Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get3Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get4Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get5Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get6Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 5);
+    retVal += (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get7Bits()
+{
+    WebRtc_UWord8 retVal = (Get1Bit() << 6);
+    retVal += (Get1Bit() << 5);
+    retVal += (Get1Bit() << 4);
+    retVal += (Get1Bit() << 3);
+    retVal += (Get1Bit() << 2);
+    retVal += (Get1Bit() << 1);
+    retVal += Get1Bit();
+    return retVal;
+}
+
+WebRtc_UWord8
+BitstreamParser::Get8Bits()
+{
+    WebRtc_UWord16 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 16 bits
+        retVal = (_data[_byteOffset] << 8)+ (_data[_byteOffset+1]) ;
+        retVal = retVal >> (8-_bitOffset);
+    } else
+    {
+        retVal = _data[_byteOffset];
+    }
+    _byteOffset++;
+    return (WebRtc_UWord8)retVal;
+}
+
+WebRtc_UWord16
+BitstreamParser::Get16Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 24 bits
+        retVal = (_data[_byteOffset] << 16) + (_data[_byteOffset+1] << 8) + (_data[_byteOffset+2]);
+        retVal = retVal >> (8-_bitOffset);
+    }else
+    {
+        // read 16 bits
+        retVal = (_data[_byteOffset] << 8) + (_data[_byteOffset+1]) ;
+    }
+    _byteOffset += 2;
+    return (WebRtc_UWord16)retVal;
+}
+
+WebRtc_UWord32
+BitstreamParser::Get24Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 32 bits
+        retVal = (_data[_byteOffset] << 24) + (_data[_byteOffset+1] << 16) + (_data[_byteOffset+2] << 8) + (_data[_byteOffset+3]);
+        retVal = retVal >> (8-_bitOffset);
+    }else
+    {
+        // read 24 bits
+        retVal = (_data[_byteOffset] << 16) + (_data[_byteOffset+1] << 8) + (_data[_byteOffset+2]) ;
+    }
+    _byteOffset += 3;
+    return retVal & 0x00ffffff; // we need to clean up the high 8 bits
+}
+
+WebRtc_UWord32
+BitstreamParser::Get32Bits()
+{
+    WebRtc_UWord32 retVal;
+
+    if(_bitOffset != 0)
+    {
+        // read 40 bits
+        WebRtc_UWord64 tempVal = _data[_byteOffset];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+1];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+2];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+3];
+        tempVal <<= 8;
+        tempVal += _data[_byteOffset+4];
+        tempVal >>= (8-_bitOffset);
+
+        retVal = WebRtc_UWord32(tempVal);
+    }else
+    {
+        // read 32  bits
+        retVal = (_data[_byteOffset]<< 24) + (_data[_byteOffset+1] << 16) + (_data[_byteOffset+2] << 8) + (_data[_byteOffset+3]) ;
+    }
+    _byteOffset += 4;
+    return retVal;
+}
+
+// Exp-Golomb codes
+/*
+    with "prefix" and "suffix" bits and assignment to codeNum ranges (informative)
+    Bit string form Range of codeNum
+              1                0
+            0 1 x0             1..2
+          0 0 1 x1 x0          3..6
+        0 0 0 1 x2 x1 x0       7..14
+      0 0 0 0 1 x3 x2 x1 x0    15..30
+    0 0 0 0 0 1 x4 x3 x2 x1 x0 31..62
+*/
+
+WebRtc_UWord32
+BitstreamParser::GetUE()
+{
+    WebRtc_UWord32 retVal = 0;
+    WebRtc_UWord8 numLeadingZeros = 0;
+
+    while (Get1Bit() != 1)
+    {
+        numLeadingZeros++;
+    }
+    // prefix
+    retVal = (1 << numLeadingZeros) - 1;
+
+    // suffix
+    while (numLeadingZeros)
+    {
+        retVal += (Get1Bit() << --numLeadingZeros);
+    }
+    return retVal;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/H264/bitstream_parser.h b/src/modules/rtp_rtcp/source/H264/bitstream_parser.h
new file mode 100644
index 0000000..3d8f9ef
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/bitstream_parser.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class BitstreamParser
+{
+public:
+    BitstreamParser(const WebRtc_UWord8* data, const WebRtc_UWord32 dataLength);
+
+    WebRtc_UWord8 Get1Bit();
+    WebRtc_UWord8 Get2Bits();
+    WebRtc_UWord8 Get3Bits();
+    WebRtc_UWord8 Get4Bits();
+    WebRtc_UWord8 Get5Bits();
+    WebRtc_UWord8 Get6Bits();
+    WebRtc_UWord8 Get7Bits();
+    WebRtc_UWord8 Get8Bits();
+    WebRtc_UWord16 Get16Bits();
+    WebRtc_UWord32 Get24Bits();
+    WebRtc_UWord32 Get32Bits();
+
+    // Exp-Golomb codes
+    WebRtc_UWord32 GetUE();
+
+private:
+    const WebRtc_UWord8* _data;
+    const WebRtc_UWord32 _dataLength;
+
+    WebRtc_UWord32 _byteOffset;
+    WebRtc_UWord8  _bitOffset;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_BITSTREAM_PARSER_H_
diff --git a/src/modules/rtp_rtcp/source/H264/h264_information.cc b/src/modules/rtp_rtcp/source/H264/h264_information.cc
new file mode 100644
index 0000000..cf6b549
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/h264_information.cc
@@ -0,0 +1,818 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include "h264_information.h"
+
+//#define DEBUG_SEI_MESSAGE 1
+
+#ifdef DEBUG_SEI_MESSAGE
+    #include "bitstream_parser.h"
+    #include <stdio.h>
+    #include <math.h>
+
+    WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+    {
+        return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+    }
+
+#endif
+
+namespace webrtc {
+H264Information::H264Information(const bool SVC)
+    : _SVC(SVC)
+
+{
+}
+
+H264Information::~H264Information()
+{
+
+}
+
+void
+H264Information::Reset()
+{
+    _parsedLength = 0;
+    _remLength = 0;
+    _length = 0;
+    _info.numNALUs = 0;
+    _info.numLayers = 0;
+
+    memset(_info.startCodeSize, 0, sizeof(_info.startCodeSize));
+    memset(_info.payloadSize, 0, sizeof(_info.payloadSize));
+    memset(_info.NRI, 0, sizeof(_info.NRI));
+    memset(_info.type, 0, sizeof(_info.type));
+    memset(_info.accLayerSize, 0, sizeof(_info.accLayerSize));
+
+    for (WebRtc_Word32 i = 0; i < KMaxNumberOfNALUs; i++)
+    {
+        _info.SVCheader[i].idr =            0;
+        _info.SVCheader[i].priorityID =     0;
+        _info.SVCheader[i].interLayerPred = 0;
+        _info.SVCheader[i].dependencyID =   0;
+        _info.SVCheader[i].qualityID =      0;
+        _info.SVCheader[i].temporalID =     0;
+        _info.SVCheader[i].useRefBasePic =  0;
+        _info.SVCheader[i].discardable =    0;
+        _info.SVCheader[i].output =         0;
+
+        _info.PACSI[i].X = 0;
+        _info.PACSI[i].Y = 0;
+//      _info.PACSI[i].T = 0;
+        _info.PACSI[i].A = 0;
+        _info.PACSI[i].P = 0;
+        _info.PACSI[i].C = 0;
+        _info.PACSI[i].S = 0;
+        _info.PACSI[i].E = 0;
+        _info.PACSI[i].TL0picIDx =   0;
+        _info.PACSI[i].IDRpicID =    0;
+        _info.PACSI[i].DONC =        0;
+        _info.PACSI[i].numSEINALUs = 0;
+        _info.PACSI[i].NALlength =   5;
+    }
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 GetInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+ *             const WebRtc_UWord32 length,
+ *             const H264Info*& ptrInfo);
+ *
+ * Gets information from an encoded stream.
+ *
+ * Input:
+ *          - ptrEncodedBuffer  : Pointer to encoded stream.
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Output:
+ *          - ptrInfo           : Pointer to struct with H.264 info.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::GetInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+                             const WebRtc_UWord32 length,
+                             const H264Info*& ptrInfo)
+{
+    if (!ptrEncodedBuffer || length < 4)
+    {
+        return -1;
+    }
+
+    if (!HasInfo(length))
+    {
+        if (-1 == FindInfo(ptrEncodedBuffer, length))
+        {
+            Reset();
+            return -1;
+        }
+    }
+    ptrInfo = &_info;
+    return 0;
+}
+
+RtpVideoCodecTypes
+H264Information::Type()
+{
+    if(_SVC)
+    {
+        return RTP_H264_SVCVideo;
+    }
+    return RTP_H264Video;
+}
+
+
+/*******************************************************************************
+ * bool HasInfo(const WebRtc_UWord32 length);
+ *
+ * Checks if information has already been stored for this encoded stream.
+ *
+ * Input:
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Return value:
+ *          - true (false)      : Information has (not) been stored.
+ */
+
+bool
+H264Information::HasInfo(const WebRtc_UWord32 length)
+{
+    if (!_info.numNALUs)
+    {
+        return false;
+    }
+
+    // has info, make sure current length matches info length
+    if (length != _length)
+    {
+        Reset();
+        return false;
+    }
+
+    return true;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindInfo(const WebRtc_UWord8* ptrEncodedBuffer,
+ *              const WebRtc_UWord32 length);
+ *
+ * Parses the encoded stream.
+ *
+ * Input:
+ *          - ptrEncodedBuffer  : Pointer to encoded stream.
+ *          - length            : Length in bytes of encoded stream.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length)
+{
+    _ptrData = ptrEncodedBuffer;
+    _length = length;
+    _parsedLength = 0;
+    _remLength = length;
+
+    do
+    {
+        // Get start code length
+        if (FindNALUStartCodeSize() == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Get NAL unit payload size
+        WebRtc_Word32 foundLast = FindNALU();
+        if (foundLast == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Validate parsed length
+        if (_parsedLength > _length)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Get NRI
+        GetNRI();
+
+        // Get type
+        if (FindNALUType() == -1)
+        {
+            Reset();
+            return -1;
+        }
+
+        // Set layer start end bit
+        SetLayerSEBit(foundLast);
+
+
+        // Last NAL unit found?
+        if (foundLast == 1)
+        {
+            if (_parsedLength != _length)
+            {
+                Reset();
+                return -1;
+            }
+            _info.numNALUs++;
+            return SetLayerLengths();
+        }
+
+        // Next NAL unit
+        _ptrData   += (_info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs]);
+        _remLength -= (_info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs]);
+        _info.numNALUs++;
+
+        // Validate memory allocation
+        if (_info.numNALUs >= KMaxNumberOfNALUs)
+        {
+            Reset();
+            return -1;
+        }
+    }
+    while(true);
+
+    return 0;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALUStartCodeSize();
+ *
+ * Finds the start code length of the current NAL unit.
+ *
+ * Output:
+ *          - _info.startCodeSize[currentNALU]  : Start code length in bytes of NAL unit.
+ *
+ * Return value:
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindNALUStartCodeSize()
+{
+    // NAL unit start code. Ex. {0,0,1} or {0,0,0,1}
+    for (WebRtc_UWord32 i = 2; i < _remLength; i++)
+    {
+        if (_ptrData[i] == 1 && _ptrData[i - 1] == 0 && _ptrData[i - 2] == 0)
+        {
+            _info.startCodeSize[_info.numNALUs] = WebRtc_UWord8(i + 1);
+            return 0;
+        }
+    }
+    return -1;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALU();
+ *
+ * Finds the length of the current NAL unit.
+ *
+ * Output:
+ *          - _info.payloadSize[currentNALU]  : Payload length in bytes of NAL unit
+ *                                              (start code length not included).
+ *          - _parsedLength                   : Current parsed length in bytes.
+ *
+ * Return value:
+ *          - 1                 : ok. Last NAL unit found.
+ *          - 0                 : ok
+ *          - (-1)              : Error
+ */
+WebRtc_Word32
+H264Information::FindNALU()
+{
+    for (WebRtc_UWord32 i = _info.startCodeSize[_info.numNALUs]; i < _remLength - 2; i += 2)
+    {
+        if (_ptrData[i] == 0)
+        {
+            WebRtc_Word32 size = 0;
+            if ((_ptrData[i + 1] == 1 && _ptrData[i - 1] == 0) ||
+                (_ptrData[i + 2] == 1 && _ptrData[i + 1] == 0))
+            {
+                // Found a header
+                // Reduce size by preceding zeroes
+                while (_ptrData[i - 1] == 0)
+                {
+                    i--;
+                }
+                size = i;
+            }
+            if (size > 0)
+            {
+                _info.payloadSize[_info.numNALUs] = size - _info.startCodeSize[_info.numNALUs];
+                _parsedLength += _info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs];
+                return 0;
+            }
+        }
+    }
+    // Last NAL unit
+    _info.payloadSize[_info.numNALUs] = _remLength - _info.startCodeSize[_info.numNALUs];
+    if (_info.payloadSize[_info.numNALUs] > 0)
+    {
+        _parsedLength += _info.startCodeSize[_info.numNALUs] + _info.payloadSize[_info.numNALUs];
+        return 1;
+    }
+    return -1;
+}
+
+/*******************************************************************************
+ * void GetNRI();
+ *
+ * Finds the NRI of the current NAL unit.
+ *
+ * Output:
+ *          - _info.NRI[currentNALU]   : NRI of NAL unit.
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ */
+void
+H264Information::GetNRI()
+{
+    //  NAL unit header (1 byte)
+    //  ---------------------------------
+    // |   start code    |F|NRI|  Type   |
+    //  ---------------------------------
+
+    // NRI (2 bits) - nal_ref_idc. '00' - the NAL unit is not used to reconstruct reference pictures.
+    //                             >00  - the NAL unit is required to reconstruct reference pictures
+    //                                    in the same layer, or contains a parameter set.
+
+
+    const WebRtc_UWord8 type = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x1f;
+
+    // NALU type of 5, 7 and 8 shoud have NRI to b011
+    if( type == 5 ||
+        type == 7 ||
+        type == 8)
+    {
+        _info.NRI[_info.numNALUs] = 0x60;
+    }else
+    {
+        _info.NRI[_info.numNALUs] = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x60;
+    }
+}
+
+
+/*******************************************************************************
+ * WebRtc_Word32 FindNALUType();
+ *
+ * Finds the type of the current NAL unit.
+ *
+ * Output:
+ *          - _info.type[currentNALU]  : Type of NAL unit
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ */
+WebRtc_Word32
+H264Information::FindNALUType()
+{
+    //  NAL unit header (1 byte)
+    //  ---------------------------------
+    // |   start code    |F|NRI|  Type   |
+    //  ---------------------------------
+
+    _info.type[_info.numNALUs] = _ptrData[_info.startCodeSize[_info.numNALUs]] & 0x1f;
+
+    if (_info.type[_info.numNALUs] == 0)
+    {
+        return -1;
+    }
+
+    // SVC NAL units, extended header
+    if (ParseSVCNALUHeader() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 ParseSVCNALUHeader();
+ *
+ * Finds the extended header of the current NAL unit. Included for NAL unit types 14 and 20.
+ *
+ * Output:
+ *          - _info.SVCheader[currentNALU]  : SVC header of NAL unit.
+ *
+ * Return value:
+ *          - 0                             : ok
+ *          - (-1)                          : Error
+ */
+WebRtc_Word32
+H264Information::ParseSVCNALUHeader()
+{
+    if (_info.type[_info.numNALUs] == 5)
+    {
+        _info.SVCheader[_info.numNALUs].idr = 1;
+    }
+    if (_info.type[_info.numNALUs] == 6)
+    {
+        WebRtc_UWord32 seiPayloadSize;
+        do
+        {
+            // SEI message
+            seiPayloadSize = 0;
+
+            WebRtc_UWord32 curByte = _info.startCodeSize[_info.numNALUs] + 1;
+            const WebRtc_UWord32 seiStartOffset = curByte;
+
+            WebRtc_UWord32 seiPayloadType = 0;
+            while(_ptrData[curByte] == 0xff)
+            {
+                seiPayloadType += 255;
+                curByte++;
+            }
+            seiPayloadType += _ptrData[curByte++];
+
+            while(_ptrData[curByte] == 0xff)
+            {
+                seiPayloadSize += 255;
+                curByte++;
+            }
+            seiPayloadSize += _ptrData[curByte++];
+
+            if(_info.payloadSize[_info.numNALUs] < _info.startCodeSize[_info.numNALUs] + seiPayloadSize)
+            {
+                // sanity of remaining buffer
+                // return 0 since no one "need" SEI messages
+                assert(false);
+               return 0;
+            }
+
+            if(seiPayloadType == 24)
+            {
+                // we add this to NALU 0 to be signaled in the first PACSI packet
+                _info.PACSI[0].numSEINALUs = 1; // we allways add this to NALU 0 to send it in the first packet
+                if(_info.PACSI[0].seiMessageLength[0] != seiPayloadSize)
+                {
+                    _info.PACSI[0].seiMessageLength[0] = seiPayloadSize;
+                    delete [] _info.PACSI[0].seiMessageData[0];
+                    _info.PACSI[0].seiMessageData[0] = new WebRtc_UWord8[seiPayloadSize];
+                }
+                memcpy(_info.PACSI[0].seiMessageData[0], _ptrData+seiStartOffset, seiPayloadSize);
+
+                _info.PACSI[0].NALlength += seiPayloadSize + 2; // additional 2 is the length
+
+#ifdef DEBUG_SEI_MESSAGE
+                const WebRtc_UWord8 numberOfLayers = 10;
+                WebRtc_UWord16 avgBitrate[numberOfLayers]= {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrateLayer[numberOfLayers]= {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrateLayerRepresentation[numberOfLayers] = {0,0,0,0,0,0,0,0,0,0};
+                WebRtc_UWord16 maxBitrareCalcWindow[numberOfLayers] = {0,0,0,0,0,0,0,0,0,0};
+
+                BitstreamParser parserScalabilityInfo(_ptrData+curByte, seiPayloadSize);
+
+                parserScalabilityInfo.Get1Bit(); // not used in futher parsing
+                const WebRtc_UWord8 priority_layer_info_present = parserScalabilityInfo.Get1Bit();
+                const WebRtc_UWord8 priority_id_setting_flag = parserScalabilityInfo.Get1Bit();
+
+                WebRtc_UWord32 numberOfLayersMinusOne = parserScalabilityInfo.GetUE();
+                for(WebRtc_UWord32 j = 0; j<= numberOfLayersMinusOne; j++)
+                {
+                    printf("\nLayer ID:%d \n",parserScalabilityInfo.GetUE());
+                    printf("Priority ID:%d \n", parserScalabilityInfo.Get6Bits());
+                    printf("Discardable:%d \n", parserScalabilityInfo.Get1Bit());
+
+                    printf("Dependency ID:%d \n", parserScalabilityInfo.Get3Bits());
+                    printf("Quality ID:%d \n", parserScalabilityInfo.Get4Bits());
+                    printf("Temporal ID:%d \n", parserScalabilityInfo.Get3Bits());
+
+                    const WebRtc_UWord8 sub_pic_layer_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 sub_region_layer_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 iroi_division_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 profile_level_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 bitrate_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 frm_rate_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 frm_size_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 layer_dependency_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 parameter_sets_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 bitstream_restriction_info_present_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 exact_inter_layer_pred_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+                    if(sub_pic_layer_flag || iroi_division_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get1Bit();
+                    }
+                    const WebRtc_UWord8 layer_conversion_flag = parserScalabilityInfo.Get1Bit();
+                    const WebRtc_UWord8 layer_output_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+                    if(profile_level_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get24Bits();
+                    }
+                    if(bitrate_info_present_flag)
+                    {
+                        // this is what we want
+                        avgBitrate[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrateLayer[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrateLayerRepresentation[j] = parserScalabilityInfo.Get16Bits();
+                        maxBitrareCalcWindow[j] = parserScalabilityInfo.Get16Bits();
+
+                        printf("\tAvg:%d\n", BitRateBPS(avgBitrate[j]));
+                        printf("\tmaxBitrate:%d\n", BitRateBPS(maxBitrateLayer[j]));
+                        printf("\tmaxBitrate rep:%d\n", BitRateBPS(maxBitrateLayerRepresentation[j]));
+                        printf("\tCalcWindow:%d\n", maxBitrareCalcWindow[j]);
+                    }
+                    if(frm_rate_info_present_flag)
+                    {
+                        printf("\tFrame rate constant:%d\n", parserScalabilityInfo.Get2Bits()); // 0 = not constant, 1 = constant, 2 = maybe...
+                        printf("\tFrame rate avg:%d\n", parserScalabilityInfo.Get16Bits()/256);
+                    }
+                    if(frm_size_info_present_flag || iroi_division_info_present_flag)
+                    {
+                        printf("\tFrame Width:%d\n",(parserScalabilityInfo.GetUE()+1)*16);
+                        printf("\tFrame Height:%d\n",(parserScalabilityInfo.GetUE()+1)*16);
+                    }
+                    if(sub_region_layer_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                        if(parserScalabilityInfo.Get1Bit())
+                        {
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                        }
+                    }
+                    if(sub_pic_layer_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(iroi_division_info_present_flag)
+                    {
+                        if(parserScalabilityInfo.Get1Bit())
+                        {
+                            parserScalabilityInfo.GetUE();
+                            parserScalabilityInfo.GetUE();
+                        }else
+                        {
+                            const WebRtc_UWord32 numRoisMinusOne = parserScalabilityInfo.GetUE();
+                            for(WebRtc_UWord32 k = 0; k <= numRoisMinusOne; k++)
+                            {
+                                parserScalabilityInfo.GetUE();
+                                parserScalabilityInfo.GetUE();
+                                parserScalabilityInfo.GetUE();
+                            }
+                        }
+                    }
+                    if(layer_dependency_info_present_flag)
+                    {
+                        const WebRtc_UWord32 numDirectlyDependentLayers = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k < numDirectlyDependentLayers; k++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                    } else
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(parameter_sets_info_present_flag)
+                    {
+                        const WebRtc_UWord32 numSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k <= numSeqParameterSetMinusOne; k++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                        const WebRtc_UWord32 numSubsetSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 l = 0; l <= numSubsetSeqParameterSetMinusOne; l++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                        const WebRtc_UWord32 numPicParameterSetMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 m = 0; m <= numPicParameterSetMinusOne; m++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                        }
+                    }else
+                    {
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(bitstream_restriction_info_present_flag)
+                    {
+                        parserScalabilityInfo.Get1Bit();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                        parserScalabilityInfo.GetUE();
+                    }
+                    if(layer_conversion_flag)
+                    {
+                        parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 k = 0; k <2;k++)
+                        {
+                            if(parserScalabilityInfo.Get1Bit())
+                            {
+                                parserScalabilityInfo.Get24Bits();
+                                parserScalabilityInfo.Get16Bits();
+                                parserScalabilityInfo.Get16Bits();
+                            }
+                        }
+                    }
+                }
+                if(priority_layer_info_present)
+                {
+                    const WebRtc_UWord32 prNumDidMinusOne = parserScalabilityInfo.GetUE();
+                    for(WebRtc_UWord32 k = 0; k <= prNumDidMinusOne;k++)
+                    {
+                        parserScalabilityInfo.Get3Bits();
+                        const WebRtc_UWord32 prNumMinusOne = parserScalabilityInfo.GetUE();
+                        for(WebRtc_UWord32 l = 0; l <= prNumMinusOne; l++)
+                        {
+                            parserScalabilityInfo.GetUE();
+                            parserScalabilityInfo.Get24Bits();
+                            parserScalabilityInfo.Get16Bits();
+                            parserScalabilityInfo.Get16Bits();
+                        }
+                    }
+                }
+                if(priority_id_setting_flag)
+                {
+                    WebRtc_UWord8 priorityIdSettingUri;
+                    WebRtc_UWord32 priorityIdSettingUriIdx = 0;
+                    do
+                    {
+                        priorityIdSettingUri = parserScalabilityInfo.Get8Bits();
+                    } while (priorityIdSettingUri != 0);
+                }
+#endif
+            } else
+            {
+                // not seiPayloadType 24 ignore
+            }
+            //check if we have more SEI in NALU
+        } while (_info.payloadSize[_info.numNALUs] > _info.startCodeSize[_info.numNALUs] + seiPayloadSize);
+    }
+
+   // Extended NAL unit header (3 bytes).
+   // +---------------+---------------+---------------+
+   // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+   // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   // |R|I|   PRID    |N| DID |  QID  | TID |U|D|O| RR|
+   // +---------------+---------------+---------------+
+
+   // R    - Reserved for future extensions (MUST be 1). Receivers SHOULD ignore the value of R.
+   // I    - Is layer representation an IDR layer (1) or not (0).
+   // PRID - Priority identifier for the NAL unit.
+   // N    - Specifies whether inter-layer prediction may be used for decoding the coded slice (1) or not (0).
+   // DID  - Indicates the inter-layer coding dependency level of a layer representation.
+   // QID  - Indicates the quality level of an MGS layer representation.
+   // TID  - Indicates the temporal level of a layer representation.
+   // U    - Use only reference base pictures during the inter prediction process (1) or not (0).
+   // D    - Discardable flag.
+   // O    - Output_flag. Affects the decoded picture output process as defined in Annex C of [H.264].
+   // RR   - Reserved_three_2bits (MUST be '11'). Receivers SHOULD ignore the value of RR.
+
+    if (_info.type[_info.numNALUs] == 14 ||
+        _info.type[_info.numNALUs] == 20)
+    {
+        WebRtc_UWord32 curByte = _info.startCodeSize[_info.numNALUs] + 1;
+
+        if (_remLength < curByte + 3)
+        {
+                return -1;
+        }
+
+        _info.SVCheader[_info.numNALUs].idr        = (_ptrData[curByte] >> 6) & 0x01;
+        _info.SVCheader[_info.numNALUs].priorityID = (_ptrData[curByte++] & 0x3F);
+
+        _info.SVCheader[_info.numNALUs].interLayerPred = (_ptrData[curByte] >> 7) & 0x01;
+        _info.SVCheader[_info.numNALUs].dependencyID   = (_ptrData[curByte] >> 4) & 0x07;
+        _info.SVCheader[_info.numNALUs].qualityID      = (_ptrData[curByte++] & 0x0F);
+
+        _info.SVCheader[_info.numNALUs].temporalID     = (_ptrData[curByte] >> 5) & 0x07;
+        _info.SVCheader[_info.numNALUs].useRefBasePic  = (_ptrData[curByte] >> 4) & 0x01;
+        _info.SVCheader[_info.numNALUs].discardable    = (_ptrData[curByte] >> 3) & 0x01;
+        _info.SVCheader[_info.numNALUs].output         = (_ptrData[curByte] >> 2) & 0x01;
+
+        if (_info.type[_info.numNALUs] == 14)
+        {
+            // inform the next NALU
+            memcpy(&(_info.SVCheader[_info.numNALUs+1]), &(_info.SVCheader[_info.numNALUs]), sizeof(_H264_SVC_NALUHeader));
+        }
+    }
+   return 0;
+}
+
+
+/*******************************************************************************
+ * void SetLayerSEBit();
+ *
+ * Sets start and end bits for the current NAL unit.
+ *
+ * Output:
+ *          - _info.PACSI[currentNALU].S    : First NAL unit in a layer (S = 1).
+ *          - _info.PACSI[currentNALU].E    : Last NAL unit in a layer (E = 1).
+ *
+ */
+void
+H264Information::SetLayerSEBit(WebRtc_Word32 foundLast)
+{
+    if (_info.numNALUs == 0)
+    {
+        // First NAL unit
+        _info.PACSI[_info.numNALUs].S = 1;
+    }
+
+    if (_info.numNALUs > 0)
+    {
+        if (_info.type[_info.numNALUs] != _info.type[_info.numNALUs-1] &&
+           (_info.type[_info.numNALUs] == 20))
+        {
+            // First layer in scalable extension
+            _info.PACSI[_info.numNALUs].S   = 1;
+            _info.PACSI[_info.numNALUs-1].E = 1;
+        }
+
+        if (_info.type[_info.numNALUs] == 20 && _info.type[_info.numNALUs-1] == 20)
+        {
+            if (_info.SVCheader[_info.numNALUs].temporalID   != _info.SVCheader[_info.numNALUs-1].temporalID ||
+                _info.SVCheader[_info.numNALUs].dependencyID != _info.SVCheader[_info.numNALUs-1].dependencyID ||
+                _info.SVCheader[_info.numNALUs].qualityID    != _info.SVCheader[_info.numNALUs-1].qualityID)
+            {
+                // New layer in scalable extension
+                _info.PACSI[_info.numNALUs].S   = 1;
+                _info.PACSI[_info.numNALUs-1].E = 1;
+            }
+        }
+    }
+
+    if (foundLast)
+    {
+        // Last NAL unit
+        _info.PACSI[_info.numNALUs].E = 1;
+    }
+
+}
+
+/*******************************************************************************
+ * WebRtc_Word32 SetLayerLengths();
+ *
+ * Sets the accumulated layer length.
+ *
+ * Output:
+ *          - _info.accLayerSize[currentLayer]   : Size in bytes of layer: 0 - currentLayer.
+ *
+ * Return value:
+ *          - 0                        : ok
+ *          - (-1)                     : Error
+ *
+ */
+WebRtc_Word32
+H264Information::SetLayerLengths()
+{
+    for (WebRtc_UWord32 curNALU = 0; curNALU < _info.numNALUs; curNALU++)
+    {
+        _info.accLayerSize[_info.numLayers] += _info.startCodeSize[curNALU] + _info.payloadSize[curNALU];
+
+        if (_info.PACSI[curNALU].E == 1)
+        {
+            _info.numLayers++;
+            if (curNALU == WebRtc_UWord32(_info.numNALUs - 1))
+            {
+                break;
+            }
+            if (_info.numLayers >= KMaxNumberOfLayers)
+            {
+                Reset();
+                return -1;
+            }
+            _info.accLayerSize[_info.numLayers] += _info.accLayerSize[_info.numLayers - 1];
+        }
+    }
+
+    if (_info.numLayers < 1 && _info.numLayers > KMaxNumberOfLayers)
+    {
+        Reset();
+        return -1;
+    }
+
+    if (_info.accLayerSize[_info.numLayers - 1] != WebRtc_Word32(_length))
+    {
+        Reset();
+        return -1;
+    }
+
+    return 0;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/H264/h264_information.h b/src/modules/rtp_rtcp/source/H264/h264_information.h
new file mode 100644
index 0000000..c7f5214
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/h264_information.h
@@ -0,0 +1,170 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
+
+#include "VideoCodecInformation.h"
+#include "typedefs.h"
+
+namespace webrtc {
+enum
+{
+    KMaxNumberOfNALUs = 128,
+    KMaxNumberOfSEINALUs = 2,
+    KMaxNumberOfLayers = 16
+};
+
+struct H264_SVC_NALUHeader
+{
+    H264_SVC_NALUHeader()
+    :
+    r(1),
+    idr(0),
+    priorityID(0),
+    interLayerPred(0),
+    dependencyID(0),
+    qualityID(0),
+    temporalID(0),
+    useRefBasePic(0),
+    discardable(0),
+    output(0),
+    rr(3),
+    length(3)
+    {
+    }
+    const WebRtc_UWord8 r;
+    WebRtc_UWord8       idr;
+    WebRtc_UWord8       priorityID;
+    WebRtc_UWord8       interLayerPred;
+    WebRtc_UWord8       dependencyID;
+    WebRtc_UWord8       qualityID;
+    WebRtc_UWord8       temporalID;
+    WebRtc_UWord8       useRefBasePic;
+    WebRtc_UWord8       discardable;
+    WebRtc_UWord8       output;
+    const WebRtc_UWord8 rr;
+    const WebRtc_UWord8 length;
+};
+
+class H264_PACSI_NALU
+{
+public:
+    H264_PACSI_NALU() :
+        NALlength(5),
+        type(30),
+        X(0),
+        Y(0),
+//        T(0),
+        A(0),
+        P(0),
+        C(0),
+        S(0),
+        E(0),
+        TL0picIDx(0),
+        IDRpicID(0),
+        DONC(0),
+        numSEINALUs(0)
+    {
+        memset(seiMessageLength, 0, sizeof(seiMessageLength));
+        memset(seiMessageData, 0, sizeof(seiMessageData));
+    }
+    ~H264_PACSI_NALU()
+    {
+        for(int i = 0; i<KMaxNumberOfSEINALUs; i++)
+        {
+            if(seiMessageData[i])
+            {
+                delete [] seiMessageData[i];
+            }
+        }
+    }
+
+    WebRtc_UWord32        NALlength;
+    const WebRtc_UWord8   type;
+    WebRtc_UWord8         X;
+    WebRtc_UWord8         Y;
+//  WebRtc_UWord8         T;
+    WebRtc_UWord8         A;
+    WebRtc_UWord8         P;
+    WebRtc_UWord8         C;
+    WebRtc_UWord8         S;
+    WebRtc_UWord8         E;
+    WebRtc_UWord8         TL0picIDx;
+    WebRtc_UWord16        IDRpicID;
+    WebRtc_UWord16        DONC;
+    WebRtc_UWord32        numSEINALUs;
+    WebRtc_UWord32        seiMessageLength[KMaxNumberOfSEINALUs]; // we allow KMaxNumberOfSEINALUs SEI messages
+    WebRtc_UWord8*        seiMessageData[KMaxNumberOfSEINALUs];
+};
+
+struct H264Info
+{
+    H264Info()
+        :
+        numNALUs(0),
+        numLayers(0)
+        {
+            memset(startCodeSize, 0, sizeof(startCodeSize));
+            memset(payloadSize, 0, sizeof(payloadSize));
+            memset(NRI, 0, sizeof(NRI));
+            memset(type, 0, sizeof(type));
+            memset(accLayerSize, 0, sizeof(accLayerSize));
+        }
+    WebRtc_UWord16             numNALUs;
+    WebRtc_UWord8              numLayers;
+    WebRtc_UWord8              startCodeSize[KMaxNumberOfNALUs];
+    WebRtc_UWord32             payloadSize[KMaxNumberOfNALUs];
+    WebRtc_UWord8              NRI[KMaxNumberOfNALUs];
+    WebRtc_UWord8              type[KMaxNumberOfNALUs];
+    H264_SVC_NALUHeader SVCheader[KMaxNumberOfNALUs];
+    H264_PACSI_NALU     PACSI[KMaxNumberOfNALUs];
+    WebRtc_Word32              accLayerSize[KMaxNumberOfLayers];
+};
+
+
+class H264Information : public VideoCodecInformation
+{
+public:
+    H264Information(const bool SVC);
+    ~H264Information();
+
+    virtual void Reset();
+
+    virtual RtpVideoCodecTypes Type();
+
+    virtual WebRtc_Word32 GetInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length, const H264Info*& ptrInfo);
+
+
+protected:
+    bool HasInfo(const WebRtc_UWord32 length);
+    WebRtc_Word32  FindInfo(const WebRtc_UWord8* ptrEncodedBuffer, const WebRtc_UWord32 length);
+
+    void GetNRI();
+    WebRtc_Word32 FindNALU();
+    WebRtc_Word32 FindNALUStartCodeSize();
+    WebRtc_Word32 FindNALUType();
+
+    WebRtc_Word32 ParseSVCNALUHeader();
+
+    void SetLayerSEBit(WebRtc_Word32 foundLast);
+    WebRtc_Word32 SetLayerLengths();
+
+private:
+    const bool            _SVC;
+    const WebRtc_UWord8*    _ptrData;
+    WebRtc_UWord32          _length;
+    WebRtc_UWord32          _parsedLength;
+    WebRtc_UWord32          _remLength;
+    H264Info          _info;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_H264_INFORMATION_H_
diff --git a/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc b/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
new file mode 100644
index 0000000..1f35526
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.cc
@@ -0,0 +1,1280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_h264.h"
+
+#include "rtp_utility.h"
+
+namespace webrtc {
+RTPSenderH264::RTPSenderH264(RTPSenderInterface* rtpSender) :
+    // H264
+    _rtpSender(*rtpSender),
+    _h264Mode(H264_SINGLE_NAL_MODE),
+    _h264SendPPS_SPS(true),
+    _h264SVCPayloadType(-1),
+    _h264SVCRelaySequenceNumber(0),
+    _h264SVCRelayTimeStamp(0),
+    _h264SVCRelayLayerComplete(false),
+
+    _useHighestSendLayer(false),
+    _highestDependencyLayerOld(MAX_NUMBER_OF_TEMPORAL_ID-1),
+    _highestDependencyQualityIDOld(MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID-1),
+    _highestDependencyLayer(0),
+    _highestDependencyQualityID(0),
+    _highestTemporalLayer(0)
+{
+}
+
+RTPSenderH264::~RTPSenderH264()
+{
+}
+
+WebRtc_Word32
+RTPSenderH264::Init()
+{
+    _h264SendPPS_SPS = true;
+    _h264Mode = H264_SINGLE_NAL_MODE;
+    return 0;
+}
+
+/*
+    multi-session
+    3 modes supported
+    NI-T        timestamps
+    NI-TC        timestamps/CS-DON
+    NI-C        CS-DON
+
+    Non-interleaved timestamp based mode (NI-T)
+    Non-interleaved cross-session decoding order number (CS-DON) based mode (NI-C)
+    Non-interleaved combined timestamp and CS-DON mode (NI-TC)
+
+    NOT supported  Interleaved CS-DON (I-C) mode.
+
+    NI-T and NI-TC modes both use timestamps to recover the decoding
+    order.  In order to be able to do so, it is necessary for the RTP
+    packet stream to contain data for all sampling instances of a given
+    RTP session in all enhancement RTP sessions that depend on the given
+    RTP session.  The NI-C and I-C modes do not have this limitation,
+    and use the CS-DON values as a means to explicitly indicate decoding
+    order, either directly coded in PACSI NAL units, or inferred from
+    them using the packetization rules.  It is noted that the NI-TC mode
+    offers both alternatives and it is up to the receiver to select
+    which one to use.
+*/
+
+bool
+RTPSenderH264::AddH264SVCNALUHeader(const H264_SVC_NALUHeader& svc,
+                                    WebRtc_UWord8* databuffer,
+                                    WebRtc_Word32& curByte) const
+{
+   // +---------------+---------------+---------------+
+   // |0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|0|1|2|3|4|5|6|7|
+   // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   // |R|I|   PRID    |N| DID |  QID  | TID |U|D|O| RR|
+   // +---------------+---------------+---------------+
+
+   // R    - Reserved for future extensions (MUST be 1). Receivers SHOULD ignore the value of R.
+   // I    - Is layer representation an IDR layer (1) or not (0).
+   // PRID - Priority identifier for the NAL unit.
+   // N    - Specifies whether inter-layer prediction may be used for decoding the coded slice (1) or not (0).
+   // DID  - Indicates the WebRtc_Word32er-layer coding dependency level of a layer representation.
+   // QID  - Indicates the quality level of an MGS layer representation.
+   // TID  - Indicates the temporal level of a layer representation.
+   // U    - Use only reference base pictures during the WebRtc_Word32er prediction process (1) or not (0).
+   // D    - Discardable flag.
+   // O    - Output_flag. Affects the decoded picture output process as defined in Annex C of [H.264].
+   // RR   - Reserved_three_2bits (MUST be '11'). Receivers SHOULD ignore the value of RR.
+
+   // Add header data
+   databuffer[curByte++] = (svc.r << 7)              + (svc.idr << 6)           + (svc.priorityID & 0x3F);
+   databuffer[curByte++] = (svc.interLayerPred << 7) + (svc.dependencyID << 4)  + (svc.qualityID & 0x0F);
+   databuffer[curByte++] = (svc.temporalID << 5)     + (svc.useRefBasePic << 4) + (svc.discardable << 3) +
+                           (svc.output << 2)         + (svc.rr & 0x03);
+   return true;
+}
+
+WebRtc_Word32
+RTPSenderH264::AddH264PACSINALU(const bool firstPacketInNALU,
+                                const bool lastPacketInNALU,
+                                const H264_PACSI_NALU& pacsi,
+                                const H264_SVC_NALUHeader& svc,
+                                const WebRtc_UWord16 DONC,
+                                WebRtc_UWord8* databuffer,
+                                WebRtc_Word32& curByte) const
+{
+    //  0                   1                   2                   3
+    //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |F|NRI|Type(30) |              SVC NAL unit header              |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |X|Y|T|A|P|C|S|E| TL0PICIDX (o.)|        IDRPICID (o.)          |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |          DONC (o.)            |        NAL unit size 1        |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |                                                               |
+    // |                 SEI NAL unit 1                                |
+    // |                                                               |
+    // |                         +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |                         |        NAL unit size 2        |     |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+     |
+    // |                                                               |
+    // |            SEI NAL unit 2                                     |
+    // |                                           +-+-+-+-+-+-+-+-+-+-+
+    // |                                           |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+    // If present, MUST be first NAL unit in aggregation packet + there MUST be at least
+    // one additional unit in the same packet! The RTPHeader and payload header are set as if the 2nd NAL unit
+    // (first non-PACSI NAL unit) is encapsulated in the same packet.
+    // contains scalability info common for all remaining NAL units.
+
+    // todo add API to configure this required for multisession
+    const bool addDONC = false;
+
+    if (svc.length == 0 || pacsi.NALlength == 0)
+    {
+      return 0;
+    }
+
+    WebRtc_Word32 startByte = curByte;
+
+    // NAL unit header
+    databuffer[curByte++] = 30; // NRI will be added later
+
+    // Extended SVC header
+    AddH264SVCNALUHeader(svc, databuffer, curByte);
+
+    // Flags
+    databuffer[curByte++] = (pacsi.X << 7) +
+                            (pacsi.Y << 6) +
+                            (addDONC << 5) +
+                            (pacsi.A << 4) +
+                            (pacsi.P << 3) +
+                            (pacsi.C << 2) +
+                            firstPacketInNALU?(pacsi.S << 1):0 +
+                            lastPacketInNALU?(pacsi.E):0;
+
+    // Optional fields
+    if (pacsi.Y)
+    {
+        databuffer[curByte++] = pacsi.TL0picIDx;
+        databuffer[curByte++] = (WebRtc_UWord8)(pacsi.IDRpicID >> 8);
+        databuffer[curByte++] = (WebRtc_UWord8)(pacsi.IDRpicID);
+    }
+    // Decoding order number
+    if (addDONC) // pacsi.T
+    {
+        databuffer[curByte++] = (WebRtc_UWord8)(DONC >> 8);
+        databuffer[curByte++] = (WebRtc_UWord8)(DONC);
+    }
+
+    // SEI NALU
+    if(firstPacketInNALU) // IMPROVEMENT duplicate it to make sure it arrives...
+    {
+        // we only set this for NALU 0 to make sure we send it only once per frame
+        for (WebRtc_UWord32 i = 0; i < pacsi.numSEINALUs; i++)
+        {
+            // NALU size
+            databuffer[curByte++] = (WebRtc_UWord8)(pacsi.seiMessageLength[i] >> 8);
+            databuffer[curByte++] = (WebRtc_UWord8)(pacsi.seiMessageLength[i]);
+
+            // NALU data
+            memcpy(databuffer + curByte, pacsi.seiMessageData[i], pacsi.seiMessageLength[i]);
+            curByte += pacsi.seiMessageLength[i];
+        }
+    }
+    return curByte - startByte;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264RelaySequenceNumber(const WebRtc_UWord16 seqNum)
+{
+    _h264SVCRelaySequenceNumber = seqNum;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264RelayCompleteLayer(const bool complete)
+{
+    _h264SVCRelayLayerComplete = complete;
+    return 0;
+}
+
+/*
+    12  Filler data
+
+        The only restriction of filler data NAL units within an
+        access unit is that they shall not precede the first VCL
+        NAL unit with the same access unit.
+*/
+WebRtc_Word32
+RTPSenderH264::SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
+                                  const WebRtc_UWord16 bytesToSend,
+                                  const WebRtc_UWord32 ssrc)
+{
+    WebRtc_UWord16 fillerLength = bytesToSend - 12 - 1;
+
+    if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
+    {
+        return 0;
+    }
+
+    if (fillerLength == 0)
+    {
+        // do not send an empty packet, will not reach JB
+        fillerLength = 1;
+    }
+
+    // send codec valid data, H.264 has defined data which is binary 1111111
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);            // version 2
+    dataBuffer[1] = rtpHeader->header.payloadType;
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, rtpHeader->header.timestamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, rtpHeader->header.ssrc);
+
+    // set filler NALU type
+    dataBuffer[12] = 12;        // NRI field = 0, type 12
+
+    // fill with 0xff
+    memset(dataBuffer + 12 + 1, 0xff, fillerLength);
+
+    return _rtpSender.SendToNetwork(dataBuffer,
+                        fillerLength,
+                        12 + 1);
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264FillerData(const WebRtc_UWord32 captureTimestamp,
+                                  const WebRtc_UWord8 payloadType,
+                                  const WebRtc_UWord32 bytes
+                                  )
+{
+
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+    WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - _rtpSender.RTPHeaderLength();
+
+    WebRtc_Word32 bytesToSend=bytes;
+    WebRtc_UWord16 fillerLength=0;
+
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    while(bytesToSend>0)
+    {
+        fillerLength=maxLength;
+        if(fillerLength<maxLength)
+        {
+            fillerLength = (WebRtc_UWord16) bytesToSend;
+        }
+
+        bytesToSend-=fillerLength;
+
+        if (fillerLength > WEBRTC_IP_PACKET_SIZE - 12 - 1)
+        {
+            return 0;
+        }
+
+        if (fillerLength == 0)
+        {
+            // do not send an empty packet, will not reach JB
+            fillerLength = 1;
+        }
+
+        // send paded data
+        // correct seq num, time stamp and payloadtype
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, false,captureTimestamp, true, true);
+
+        // set filler NALU type
+        dataBuffer[12] = 12;        // NRI field = 0, type 12
+
+        // send codec valid data, H.264 has defined data which is binary 1111111
+        // fill with 0xff
+        memset(dataBuffer + 12 + 1, 0xff, fillerLength-1);
+
+        if( _rtpSender.SendToNetwork(dataBuffer,
+                            fillerLength,
+                            12)<0)
+        {
+
+            return -1;;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
+                                      const WebRtc_UWord8* incomingRTPPacket,
+                                      const WebRtc_UWord16 incomingRTPPacketSize,
+                                      const WebRtc_UWord32 ssrc,
+                                      const bool higestLayer)
+{
+    if (rtpHeader->header.sequenceNumber != (WebRtc_UWord16)(_h264SVCRelaySequenceNumber + 1))
+    {
+         // not continous, signal loss
+         _rtpSender.IncrementSequenceNumber();
+    }
+    _h264SVCRelaySequenceNumber = rtpHeader->header.sequenceNumber;
+
+
+    if (rtpHeader->header.timestamp != _h264SVCRelayTimeStamp)
+    {
+        // new frame
+        _h264SVCRelayLayerComplete = false;
+    }
+
+    if (rtpHeader->header.timestamp == _h264SVCRelayTimeStamp &&
+        _h264SVCRelayLayerComplete)
+    {
+        // sanity, end of layer already sent
+        // Could happened for fragmented packet with missing PACSI info (PACSI packet reorded and received after packet it belongs to)
+        // fragmented packet has no layer info set (default info 0)
+        return 0;
+    }
+    _h264SVCRelayTimeStamp = rtpHeader->header.timestamp;
+
+    // re-packetize H.264-SVC packets
+    // we keep the timestap unchanged
+    // make a copy and only change the SSRC and seqNum
+
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    memcpy(dataBuffer, incomingRTPPacket, incomingRTPPacketSize);
+
+    // _sequenceNumber initiated in Init()
+    // _ssrc initiated in constructor
+
+    // re-write payload type
+    if(_h264SVCPayloadType != -1)
+    {
+        dataBuffer[1] &= kRtpMarkerBitMask;
+        dataBuffer[1] += _h264SVCPayloadType;
+    }
+
+    // _sequenceNumber will not work for re-ordering by NACK from original sender
+    // engine responsible for this
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _rtpSender.IncrementSequenceNumber()); // get the current SequenceNumber and add by 1 after returning
+    //ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, ssrc);
+
+    // how do we know it's the last relayed packet in a frame?
+    // 1) packets arrive in order, the engine manages that
+    // 2) highest layer that we relay
+    // 3) the end bit is set for the highest layer
+
+    if(higestLayer && rtpHeader->type.Video.codecHeader.H264.relayE)
+    {
+        // set marker bit
+        dataBuffer[1] |= kRtpMarkerBitMask;
+
+        // set relayed layer as complete
+        _h264SVCRelayLayerComplete = true;
+    }
+    return _rtpSender.SendToNetwork(dataBuffer,
+                         incomingRTPPacketSize - rtpHeader->header.headerLength,
+                         rtpHeader->header.headerLength);
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_STAP_A(const FrameType frameType,
+                                const H264Info* ptrH264Info,
+                                WebRtc_UWord16 &idxNALU,
+                                const WebRtc_Word8 payloadType,
+                                const WebRtc_UWord32 captureTimeStamp,
+                                bool& switchToFUA,
+                                WebRtc_Word32 &payloadBytesToSend,
+                                const WebRtc_UWord8*& data,
+                                const WebRtc_UWord16 rtpHeaderLength)
+{
+    const WebRtc_Word32 H264_NALU_LENGTH = 2;
+
+    WebRtc_UWord16 h264HeaderLength = 1; // normal header length
+    WebRtc_UWord16 maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() -
+                                          FECPacketOverhead() - rtpHeaderLength -
+                                          h264HeaderLength - H264_NALU_LENGTH;
+
+    WebRtc_Word32 dataOffset = rtpHeaderLength + h264HeaderLength;
+    WebRtc_UWord8 NRI = 0;
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+
+    if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
+    {
+        // we need to fragment NAL switch to mode FU-A
+        switchToFUA = true;
+    } else
+    {
+        // combine as many NAL units in every IP packet
+        do
+        {
+            if(!_h264SendPPS_SPS)
+            {
+                // don't send NALU of type 7 and 8 SPS and PPS
+                if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+                {
+                    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    idxNALU++;
+                    continue;
+                }
+            }
+            if(ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A)
+            {
+                if(ptrH264Info->NRI[idxNALU] > NRI)
+                {
+                    NRI = ptrH264Info->NRI[idxNALU];
+                }
+                // put NAL size into packet
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] >> 8);
+                dataOffset++;
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] & 0xff);
+                dataOffset++;
+                // Put payload in packet
+                memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+                dataOffset += ptrH264Info->payloadSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                payloadBytesInPacket += (WebRtc_UWord16)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            } else
+            {
+                // we don't fitt the next NALU in this packet
+                break;
+            }
+            idxNALU++;
+        }while(payloadBytesToSend);
+    }
+
+    // sanity
+    // don't send empty packets
+    if (payloadBytesInPacket)
+    {
+        // add RTP header
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
+        dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
+        WebRtc_UWord16 payloadLength = payloadBytesInPacket + h264HeaderLength;
+
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength))
+        {
+            return -1;
+        }
+    }
+    return 0;
+} // end STAP-A
+
+// STAP-A for H.264 SVC
+WebRtc_Word32
+RTPSenderH264::SendH264_STAP_A_PACSI(const FrameType frameType,
+                                      const H264Info* ptrH264Info,
+                                      WebRtc_UWord16 &idxNALU,
+                                      const WebRtc_Word8 payloadType,
+                                      const WebRtc_UWord32 captureTimeStamp,
+                                      bool& switchToFUA,
+                                      WebRtc_Word32 &payloadBytesToSend,
+                                      const WebRtc_UWord8*& data,
+                                      const WebRtc_UWord16 rtpHeaderLength,
+                                      WebRtc_UWord16& decodingOrderNumber)
+{
+    const WebRtc_Word32 H264_NALU_LENGTH = 2;
+
+    WebRtc_UWord16 h264HeaderLength = 1; // normal header length
+    WebRtc_UWord16 maxPayloadLengthSTAP_A = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength - h264HeaderLength - H264_NALU_LENGTH;
+    WebRtc_Word32 dataOffset = rtpHeaderLength + h264HeaderLength;
+    WebRtc_UWord8 NRI = 0;
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    bool firstNALUNotIDR = true; //delta
+
+    // Put PACSI NAL unit into packet
+    WebRtc_Word32 lengthPACSI = 0;
+    WebRtc_UWord32 PACSI_NALlength = ptrH264Info->PACSI[idxNALU].NALlength;
+    if (PACSI_NALlength > maxPayloadLengthSTAP_A)
+    {
+        return -1;
+    }
+    dataBuffer[dataOffset++] = (WebRtc_UWord8)(PACSI_NALlength >> 8);
+    dataBuffer[dataOffset++] = (WebRtc_UWord8)(PACSI_NALlength & 0xff);
+
+    // end bit will be updated later, since another NALU in this packet might be the last
+    WebRtc_Word32 lengthPASCINALU = AddH264PACSINALU(true,
+                                                   false,
+                                                   ptrH264Info->PACSI[idxNALU],
+                                                   ptrH264Info->SVCheader[idxNALU],
+                           decodingOrderNumber,
+                           dataBuffer,
+                                                   dataOffset);
+    if (lengthPASCINALU <= 0)
+    {
+        return -1;
+    }
+    decodingOrderNumber++;
+
+    lengthPACSI = H264_NALU_LENGTH + lengthPASCINALU;
+    maxPayloadLengthSTAP_A -= (WebRtc_UWord16)lengthPACSI;
+    if (ptrH264Info->payloadSize[idxNALU] > maxPayloadLengthSTAP_A)
+    {
+        // we need to fragment NAL switch to mode FU-A
+        switchToFUA = true;
+        return 0;
+    }
+    if(!ptrH264Info->SVCheader[idxNALU].idr)
+    {
+        firstNALUNotIDR = true;
+    }
+
+    WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                         (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                          ptrH264Info->SVCheader[idxNALU].temporalID;
+
+    {
+        // combine as many NAL units in every IP packet, with the same priorityID
+        // Improvement we could allow several very small MGS NALU from different layers to be sent in one packet
+
+        do
+        {
+            if(!_h264SendPPS_SPS)
+            {
+                // Don't send NALU of type 7 and 8 SPS and PPS,
+                // they could be signaled outofband
+                if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+                {
+                    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                    idxNALU++;
+                    continue;
+                }
+            }
+            //    don't send NALU type 6 (SEI message) not allowed when we send it in PACSI
+            if(ptrH264Info->type[idxNALU] == 6)
+            {
+                // SEI NALU Don't send, not allowed when we send it in PACSI
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                idxNALU++;
+                continue;
+            }
+
+            const WebRtc_UWord32 layerNALU = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                                           (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                            ptrH264Info->SVCheader[idxNALU].temporalID;
+
+            // we need to break on a new layer
+            if( ptrH264Info->payloadSize[idxNALU] + payloadBytesInPacket <= maxPayloadLengthSTAP_A &&
+                layerNALU == layer)
+            {
+                if(ptrH264Info->NRI[idxNALU] > NRI)
+                {
+                    NRI = ptrH264Info->NRI[idxNALU];
+                }
+                // put NAL size into packet
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] >> 8);
+                dataOffset++;
+                dataBuffer[dataOffset] = (WebRtc_UWord8)(ptrH264Info->payloadSize[idxNALU] & 0xff);
+                dataOffset++;
+                // Put payload in packet
+                memcpy(&dataBuffer[dataOffset], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+                dataOffset += ptrH264Info->payloadSize[idxNALU];
+                data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+                payloadBytesInPacket += (WebRtc_UWord16)(ptrH264Info->payloadSize[idxNALU] + H264_NALU_LENGTH);
+                payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            } else
+            {
+                // we don't fitt the next NALU in this packet or,
+                // it's the next layer
+
+                // check if we should send this NALU
+                // based on the layer
+
+                if(_useHighestSendLayer && layerNALU != layer)
+                {
+                    // we don't send this NALU due to it's a new layer
+                    // check if we should send the next or if this is the last
+                    const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[idxNALU].dependencyID << 4) + ptrH264Info->SVCheader[idxNALU].qualityID;
+
+                    bool highestLayer;
+                    if(SendH264SVCLayer(frameType,
+                                        ptrH264Info->SVCheader[idxNALU].temporalID,
+                                        dependencyQualityID,
+                                        highestLayer) == false)
+                    {
+                        // will trigger markerbit and stop sending this frame
+                        payloadBytesToSend = 0;
+                    }
+                }
+                break;
+            }
+            idxNALU++;
+
+        }while(payloadBytesToSend);
+    }
+
+    // sanity, don't send empty packets
+    if (payloadBytesInPacket)
+    {
+        // add RTP header
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, (payloadBytesToSend==0)?true:false, captureTimeStamp);
+
+        dataBuffer[rtpHeaderLength] = 24 + NRI; // STAP-A == 24
+
+        // NRI for PACSI
+        dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] &= 0x1f;   // zero out NRI field
+        dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 1] |= NRI;
+
+        if(ptrH264Info->PACSI[idxNALU-1].E)
+        {
+            // update end bit
+            dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 5] |= 0x01;
+        }
+        if(firstNALUNotIDR)
+        {
+            // we have to check if any of the NALU in this packet is an IDR NALU
+            bool setIBit = false;
+            for(int i = 0; i < idxNALU; i++)
+            {
+                if(ptrH264Info->SVCheader[i].idr)
+                {
+                    setIBit = true;
+                    break;
+                }
+            }
+            if(setIBit)
+            {
+                // update I bit
+                dataBuffer[rtpHeaderLength + H264_NALU_LENGTH + 2] |= 0x40;
+            }
+        }
+        const WebRtc_UWord16 payloadLength = payloadBytesInPacket + h264HeaderLength + (WebRtc_UWord16)lengthPACSI;
+        if(-1 == SendVideoPacket(frameType,
+                                 dataBuffer,
+                                 payloadLength,
+                                 rtpHeaderLength,
+                                 layer==0))
+        {
+            return -1;
+        }
+    }
+    return 0;
+} // end STAP-A
+
+WebRtc_Word32
+RTPSenderH264::SendH264_FU_A(const FrameType frameType,
+                              const H264Info* ptrH264Info,
+                              WebRtc_UWord16 &idxNALU,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              WebRtc_Word32 &payloadBytesToSend,
+                              const WebRtc_UWord8*& data,
+                              const WebRtc_UWord16 rtpHeaderLength,
+                              WebRtc_UWord16& decodingOrderNumber,
+                              const bool sendSVCPACSI)
+{
+
+    // FUA for the rest of the frame
+    WebRtc_UWord16 maxPayloadLength = _rtpSender.MaxPayloadLength() - FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    WebRtc_UWord32 payloadBytesRemainingInNALU = ptrH264Info->payloadSize[idxNALU];
+
+    bool isBaseLayer=false;
+
+    if(payloadBytesRemainingInNALU > maxPayloadLength)
+    {
+        // we need to fragment NALU
+        const WebRtc_UWord16 H264_FUA_LENGTH = 2; // FU-a H.264 header is 2 bytes
+
+        if(sendSVCPACSI)
+        {
+            SendH264_SinglePACSI(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 true,
+                                 false);
+
+            WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                                 (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                  ptrH264Info->SVCheader[idxNALU].temporalID;
+            isBaseLayer=(layer==0);
+        }
+
+        // First packet
+        _rtpSender.BuildRTPheader(dataBuffer,payloadType, false, captureTimeStamp);
+
+        WebRtc_UWord16 maxPayloadLengthFU_A = maxPayloadLength - H264_FUA_LENGTH ;
+        WebRtc_UWord8 fuaIndc = 28 + ptrH264Info->NRI[idxNALU];
+        dataBuffer[rtpHeaderLength] = fuaIndc;                                                     // FU-A indicator
+        dataBuffer[rtpHeaderLength+1] = (WebRtc_UWord8)(ptrH264Info->type[idxNALU] + 0x80)/*start*/; // FU-A header
+
+        memcpy(&dataBuffer[rtpHeaderLength + H264_FUA_LENGTH], &data[ptrH264Info->startCodeSize[idxNALU]+1], maxPayloadLengthFU_A);
+        WebRtc_UWord16 payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength, isBaseLayer))
+        {
+            return -1;
+        }
+
+        //+1 is from the type that is coded into the FU-a header
+        data += maxPayloadLengthFU_A + 1 + ptrH264Info->startCodeSize[idxNALU];             // inc data ptr
+        payloadBytesToSend -= maxPayloadLengthFU_A+1+ptrH264Info->startCodeSize[idxNALU];
+        payloadBytesRemainingInNALU -= maxPayloadLengthFU_A+1;
+
+        // all non first/last packets
+        while(payloadBytesRemainingInNALU  > maxPayloadLengthFU_A)
+        {
+            if(sendSVCPACSI)
+            {
+                SendH264_SinglePACSI(frameType,
+                                     ptrH264Info,
+                                     idxNALU,
+                                     payloadType,
+                                     captureTimeStamp,
+                                     false,
+                                     false);
+            }
+
+            // prepare next header
+            _rtpSender.BuildRTPheader(dataBuffer, payloadType, false, captureTimeStamp);
+
+            dataBuffer[rtpHeaderLength] = (WebRtc_UWord8)fuaIndc;           // FU-A indicator
+            dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];   // FU-A header
+
+            memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, maxPayloadLengthFU_A);
+            payloadLength = maxPayloadLengthFU_A + H264_FUA_LENGTH;
+
+            if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
+            {
+                return -1;
+            }
+            data += maxPayloadLengthFU_A; // inc data ptr
+            payloadBytesToSend -= maxPayloadLengthFU_A;
+            payloadBytesRemainingInNALU -= maxPayloadLengthFU_A;
+            dataBuffer[rtpHeaderLength] = fuaIndc;                         // FU-A indicator
+            dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU];    // FU-A header
+        }
+        if(sendSVCPACSI)
+        {
+            SendH264_SinglePACSI(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 false,
+                                 true); // last packet in NALU
+
+            if(_useHighestSendLayer && idxNALU+1 < ptrH264Info->numNALUs)
+            {
+                // not last NALU in frame
+                // check if it's the the next layer should not be sent
+
+                // check if we should send the next or if this is the last
+                const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[idxNALU+1].dependencyID << 4) +
+                                                         ptrH264Info->SVCheader[idxNALU+1].qualityID;
+
+                bool highestLayer;
+                if(SendH264SVCLayer(frameType,
+                                    ptrH264Info->SVCheader[idxNALU+1].temporalID,
+                                    dependencyQualityID,
+                                    highestLayer) == false)
+                {
+                    // will trigger markerbit and stop sending this frame
+                    payloadBytesToSend = payloadBytesRemainingInNALU;
+                }
+            }
+        }
+        // last packet in NALU
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType,(payloadBytesToSend == (WebRtc_Word32)payloadBytesRemainingInNALU)?true:false, captureTimeStamp);
+        dataBuffer[rtpHeaderLength+1] = ptrH264Info->type[idxNALU] + 0x40/*stop*/; // FU-A header
+
+        memcpy(&dataBuffer[rtpHeaderLength+H264_FUA_LENGTH], data, payloadBytesRemainingInNALU);
+        payloadLength = (WebRtc_UWord16)payloadBytesRemainingInNALU + H264_FUA_LENGTH;
+        payloadBytesToSend -= payloadBytesRemainingInNALU;
+        if(payloadBytesToSend != 0)
+        {
+            data += payloadBytesRemainingInNALU; // inc data ptr
+        }
+        idxNALU++;
+        if(-1 == SendVideoPacket(frameType, dataBuffer, payloadLength, rtpHeaderLength,isBaseLayer))
+        {
+            return -1;
+        }
+    } else
+    {
+        // send NAL unit in singel mode
+        return SendH264_SingleMode(frameType,
+                                   ptrH264Info,
+                                   idxNALU,
+                                   payloadType,
+                                   captureTimeStamp,
+                                   payloadBytesToSend,
+                                   data,
+                                   rtpHeaderLength,
+                                   sendSVCPACSI);
+    }
+    // end FU-a
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_SingleMode(const FrameType frameType,
+                                    const H264Info* ptrH264Info,
+                                    WebRtc_UWord16 &idxNALU,
+                                    const WebRtc_Word8 payloadType,
+                                    const WebRtc_UWord32 captureTimeStamp,
+                                    WebRtc_Word32 &payloadBytesToSend,
+                                    const WebRtc_UWord8*& data,
+                                    const WebRtc_UWord16 rtpHeaderLength,
+                                    WebRtc_UWord16& decodingOrderNumber,
+                                    const bool sendSVCPACSI)
+{
+    // no H.264 header lenght in single mode
+    // we use WEBRTC_IP_PACKET_SIZE instead of the configured MTU since it's better to send fragmented UDP than not to send
+    const WebRtc_UWord16 maxPayloadLength = WEBRTC_IP_PACKET_SIZE - _rtpSender.PacketOverHead() - FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    bool isBaseLayer=false;
+
+    if(ptrH264Info->payloadSize[idxNALU] > maxPayloadLength)
+    {
+        return -3;
+    }
+    if(!_h264SendPPS_SPS)
+    {
+        // don't send NALU of type 7 and 8 SPS and PPS
+        if(ptrH264Info->type[idxNALU] == 7 || ptrH264Info->type[idxNALU] == 8)
+        {
+            payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+            idxNALU++;
+            return 0;
+        }
+    }
+    if(sendSVCPACSI)
+    {
+        SendH264_SinglePACSI(frameType,
+                             ptrH264Info,
+                             idxNALU,
+                             payloadType,
+                             captureTimeStamp,
+                             true,
+                             true);
+
+        WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                             (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                              ptrH264Info->SVCheader[idxNALU].temporalID;
+        isBaseLayer=(layer==0);
+    }
+
+    // Put payload in packet
+    memcpy(&dataBuffer[rtpHeaderLength], &data[ptrH264Info->startCodeSize[idxNALU]], ptrH264Info->payloadSize[idxNALU]);
+
+    WebRtc_UWord16 payloadBytesInPacket = (WebRtc_UWord16)ptrH264Info->payloadSize[idxNALU];
+    payloadBytesToSend -= ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU]; // left to send
+
+    //
+    _rtpSender.BuildRTPheader(dataBuffer,payloadType,(payloadBytesToSend ==0)?true:false, captureTimeStamp);
+
+    dataBuffer[rtpHeaderLength] &= 0x1f; // zero out NRI field
+    dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
+    if(payloadBytesToSend > 0)
+    {
+        data += ptrH264Info->payloadSize[idxNALU] + ptrH264Info->startCodeSize[idxNALU];
+    }
+    idxNALU++;
+    if(-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,isBaseLayer))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SendH264_SinglePACSI(const FrameType frameType,
+                                    const H264Info* ptrH264Info,
+                                     const WebRtc_UWord16 idxNALU,
+                                     const WebRtc_Word8 payloadType,
+                                     const WebRtc_UWord32 captureTimeStamp,
+                                     const bool firstPacketInNALU,
+                                     const bool lastPacketInNALU);
+{
+    // Send PACSI in single mode
+    WebRtc_UWord8 dataBuffer[WEBRTC_IP_PACKET_SIZE];
+    WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)_rtpSender.BuildRTPheader(dataBuffer, payloadType,false, captureTimeStamp);
+    WebRtc_Word32 dataOffset = rtpHeaderLength;
+
+    WebRtc_Word32 lengthPASCINALU = AddH264PACSINALU(firstPacketInNALU,
+                                                   lastPacketInNALU,
+                                                   ptrH264Info->PACSI[idxNALU],
+                                                   ptrH264Info->SVCheader[idxNALU],
+                                                   decodingOrderNumber,
+                                                   dataBuffer,
+                                                   dataOffset);
+
+    if (lengthPASCINALU <= 0)
+    {
+        return -1;
+    }
+    decodingOrderNumber++;
+
+    WebRtc_UWord16 payloadBytesInPacket = (WebRtc_UWord16)lengthPASCINALU;
+
+    // Set payload header (first payload byte co-serves as the payload header)
+    dataBuffer[rtpHeaderLength] &= 0x1f;        // zero out NRI field
+    dataBuffer[rtpHeaderLength] |= ptrH264Info->NRI[idxNALU]; // nri
+
+    const WebRtc_UWord32 layer = (ptrH264Info->SVCheader[idxNALU].dependencyID << 16)+
+                               (ptrH264Info->SVCheader[idxNALU].qualityID << 8) +
+                                ptrH264Info->SVCheader[idxNALU].temporalID;
+
+    if (-1 == SendVideoPacket(frameType, dataBuffer, payloadBytesInPacket, rtpHeaderLength,layer==0))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+
+
+WebRtc_Word32
+RTPSenderH264::SendH264SVC(const FrameType frameType,
+                            const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize,
+                            H264Information& h264Information,
+                            WebRtc_UWord16& decodingOrderNumber)
+{
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    const H264Info* ptrH264Info = NULL;
+    if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
+    {
+        return -1;
+    }
+    if(_useHighestSendLayer)
+    {
+        // we need to check if we should drop the frame
+        // it could be a temporal layer (aka a temporal frame)
+        const WebRtc_UWord8 dependencyQualityID = (ptrH264Info->SVCheader[0].dependencyID << 4) + ptrH264Info->SVCheader[0].qualityID;
+
+        bool dummyHighestLayer;
+        if(SendH264SVCLayer(frameType,
+                            ptrH264Info->SVCheader[0].temporalID,
+                            dependencyQualityID,
+                            dummyHighestLayer) == false)
+        {
+            // skip send this frame
+            return 0;
+        }
+    }
+
+    WebRtc_UWord16 idxNALU = 0;
+    while (payloadBytesToSend > 0)
+    {
+        bool switchToFUA = false;
+        if (SendH264_STAP_A_PACSI(frameType,
+                                  ptrH264Info,
+                                  idxNALU,
+                                  payloadType,
+                                  captureTimeStamp,
+                                  switchToFUA,
+                                  payloadBytesToSend,
+                                  payloadData,
+                                  rtpHeaderLength,
+                                  decodingOrderNumber) != 0)
+        {
+            return -1;
+        }
+        if(switchToFUA)
+        {
+            // FU_A for this NALU
+            if (SendH264_FU_A(frameType,
+                              ptrH264Info,
+                              idxNALU,
+                              payloadType,
+                              captureTimeStamp,
+                              payloadBytesToSend,
+                              payloadData,
+                              rtpHeaderLength,
+                              true) != 0)
+            {
+                return -1;
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264PacketizationMode(const H264PacketizationMode mode)
+{
+    _h264Mode = mode;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetH264SendModeNALU_PPS_SPS(const bool dontSend)
+{
+    _h264SendPPS_SPS = !dontSend;
+    return 0;
+}
+
+bool
+RTPSenderH264::SendH264SVCLayer(const FrameType frameType,
+                                  const WebRtc_UWord8 temporalID,
+                                  const WebRtc_UWord8 dependencyQualityID,
+                                  bool& higestLayer)
+{
+    WebRtc_UWord8 dependencyID  = dependencyQualityID >> 4;
+
+    // keyframe required to switch between dependency layers not quality and temporal
+    if( _highestDependencyLayer != _highestDependencyLayerOld)
+    {
+        // we want to switch dependency layer
+        if(frameType == kVideoFrameKey)
+        {
+            // key frame we can change layer if it's correct layer
+            if(_highestDependencyLayer > _highestDependencyLayerOld)
+            {
+                // we want to switch up
+                // does this packet belong to a new layer?
+
+                if( dependencyID > _highestDependencyLayerOld &&
+                    dependencyID <= _highestDependencyLayer)
+                {
+                    _highestDependencyLayerOld = dependencyID;
+                    _highestDependencyQualityIDOld = _highestDependencyQualityID;
+
+                    if( dependencyID == _highestDependencyLayer &&
+                        dependencyQualityID == _highestDependencyQualityID)
+                    {
+                        higestLayer = true;
+                    }
+                    // relay
+                    return true;
+                }
+            }
+            if(_highestDependencyLayer < _highestDependencyLayerOld)
+            {
+                // we want to switch down
+                // does this packet belong to a low layer?
+                if( dependencyID <= _highestDependencyLayer)
+                {
+                    _highestDependencyLayerOld = dependencyID;
+                    _highestDependencyQualityIDOld = _highestDependencyQualityID;
+                    if( dependencyID == _highestDependencyLayer &&
+                        dependencyQualityID == _highestDependencyQualityID)
+                    {
+                        higestLayer = true;
+                    }
+                    // relay
+                    return true;
+                }
+            }
+        } else
+        {
+            // Delta frame and we are waiting to switch dependency layer
+            if(_highestDependencyLayer > _highestDependencyLayerOld)
+            {
+                // we want to switch up to a higher dependency layer
+                // use old setting until we get a key-frame
+
+                // filter based on old dependency
+                // we could have allowed to add a MGS layer lower than the dependency ID
+                // but then we can't know the highest layer relayed we assume that the user
+                // will add one layer at a time
+                if( _highestTemporalLayer < temporalID ||
+                    _highestDependencyLayerOld < dependencyID ||
+                    _highestDependencyQualityIDOld < dependencyQualityID)
+                {
+                    // drop
+                    return false;
+                }
+                // highest layer based on old
+                if( dependencyID == _highestDependencyLayerOld &&
+                    dependencyQualityID == _highestDependencyQualityIDOld)
+                {
+                    higestLayer = true;
+                }
+            } else
+            {
+                // we want to switch down to a lower dependency layer,
+                // use old setting, done bellow
+                // drop all temporal layers while waiting for the key-frame
+                if(temporalID > 0)
+                {
+                    // drop
+                    return false;
+                }
+                // we can't drop a lower MGS layer since this might depend on it
+                // however we can drop MGS layers larger than dependecyQualityId
+                // with dependency from old and quality 0
+                if( _highestDependencyLayerOld < dependencyID ||
+                    (_highestDependencyQualityIDOld & 0xf0) < dependencyQualityID)
+                {
+                    // drop
+                    return false;
+                }
+                if( dependencyID == _highestDependencyLayerOld &&
+                    dependencyQualityID == (_highestDependencyQualityIDOld & 0xf0))
+                {
+                    higestLayer = true;
+                }
+            }
+        }
+    } else
+    {
+        // filter based on current state
+        if( _highestTemporalLayer < temporalID ||
+            _highestDependencyLayer < dependencyID ||
+            _highestDependencyQualityID < dependencyQualityID)
+        {
+            // drop
+            return false;
+        }
+        if( dependencyID == _highestDependencyLayer &&
+            dependencyQualityID == _highestDependencyQualityID)
+        {
+            higestLayer = true;
+        }
+    }
+    return true;
+}
+
+WebRtc_Word32
+RTPSenderH264::SetHighestSendLayer(const WebRtc_UWord8 dependencyQualityLayer,
+                                   const WebRtc_UWord8 temporalLayer)
+{
+    const WebRtc_UWord8 dependencyLayer = (dependencyQualityLayer >> 4);
+
+    if(_highestDependencyLayerOld != _highestDependencyLayer)
+    {
+        // we have not switched to the new dependency yet
+    } else
+    {
+        if(_highestDependencyLayer == dependencyLayer)
+        {
+            // no change of dependency
+            // switch now _highestDependencyQualityIDOld
+            _highestDependencyQualityIDOld = dependencyQualityLayer;
+        }else
+        {
+            // change of dependency, update _highestDependencyQualityIDOld store as old
+            _highestDependencyQualityIDOld = _highestDependencyQualityID;
+        }
+    }
+    _useHighestSendLayer = true;
+    _highestDependencyLayer = dependencyLayer;
+    _highestDependencyQualityID = dependencyQualityLayer;
+    _highestTemporalLayer = temporalLayer;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderH264::HighestSendLayer(WebRtc_UWord8& dependencyQualityLayer,
+                                WebRtc_UWord8& temporalLayer)
+{
+    if (!_useHighestSendLayer)
+    {
+        // No information set
+        return -1;
+    }
+    dependencyQualityLayer = _highestDependencyQualityID;
+    temporalLayer = _highestTemporalLayer;
+    return 0;
+}
+/*
+*   H.264
+*/
+WebRtc_Word32
+RTPSenderH264::SendH264(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const WebRtc_UWord32 captureTimeStamp,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        H264Information& h264Information)
+{
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord8* data = payloadData;
+    bool switchToFUA = false;
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    const H264Info* ptrH264Info = NULL;
+    if (h264Information.GetInfo(payloadData,payloadSize, ptrH264Info) == -1)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 idxNALU = 0;
+    WebRtc_UWord16 DONCdummy = 0;
+
+    while (payloadBytesToSend > 0)
+    {
+        switch(_h264Mode)
+        {
+        case H264_NON_INTERLEAVED_MODE:
+
+            if(!switchToFUA)
+            {
+                if(SendH264_STAP_A(frameType,
+                                   ptrH264Info,
+                                   idxNALU,
+                                   payloadType,
+                                   captureTimeStamp,
+                                   switchToFUA,
+                                   payloadBytesToSend,
+                                   data,
+                                   rtpHeaderLength) != 0)
+                {
+                    return -1;
+                }
+            }
+            else
+            {
+                // FUA for the rest of the frame
+                if(SendH264_FU_A(frameType,
+                                 ptrH264Info,
+                                 idxNALU,
+                                 payloadType,
+                                 captureTimeStamp,
+                                 payloadBytesToSend,
+                                 data,
+                                 rtpHeaderLength,
+                                 DONCdummy) != 0)
+                {
+                    return -1;
+                }
+                // try to go back to STAP_A
+                switchToFUA = false;
+            }
+            break;
+        case H264_SINGLE_NAL_MODE:
+            {
+                // modeSingleU
+                if(SendH264_SingleMode(frameType,
+                                       ptrH264Info,
+                                       idxNALU,
+                                       payloadType,
+                                       captureTimeStamp,
+                                       payloadBytesToSend,
+                                       data,
+                                       rtpHeaderLength,
+                                       DONCdummy) != 0)
+                {
+                    return -1;
+                }
+                break;
+            }
+        case H264_INTERLEAVED_MODE:
+            // not supported
+            assert(false);
+            return -1;
+        }
+    }
+    return 0;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h b/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h
new file mode 100644
index 0000000..564b870
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/H264/rtp_sender_h264.h
@@ -0,0 +1,179 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
+
+#include "typedefs.h"
+#include "ModuleRTPRTCPConfig.h"
+#include "rtp_rtcp_defines.h"
+#include "h264_information.h"
+
+#include "RTPSender.h"
+
+namespace webrtc {
+class RTPSenderH264
+{
+public:
+    WebRtc_Word32 SendH264(const FrameType frameType,
+                  const WebRtc_Word8 payloadType,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          H264Information& h264Information);
+
+    WebRtc_Word32 SendH264SVC(const FrameType frameType,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              const WebRtc_UWord8* payloadData,
+                              const WebRtc_UWord32 payloadSize,
+                              H264Information& h264Information);
+
+    // H.264 AVC
+    WebRtc_Word32 SetH264PacketizationMode(const H264PacketizationMode mode);
+
+    WebRtc_Word32 SetH264SendModeNALU_PPS_SPS(const bool dontSend);
+
+    // H.264 SVC
+    WebRtc_Word32 SetHighestSendLayer(const WebRtc_UWord8 dependencyQualityLayer,
+                                    const WebRtc_UWord8 temporalLayer);
+
+    WebRtc_Word32 HighestSendLayer(WebRtc_UWord8& dependencyQualityLayer,
+                                 WebRtc_UWord8& temporalLayer);
+
+protected:
+    RTPSenderH264(RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderH264();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord16 FECPacketOverhead() const = 0;
+    virtual RtpVideoCodecTypes VideoCodecType() const = 0;
+
+    virtual WebRtc_Word32 SendVideoPacket(const FrameType frameType,
+                                        const WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        bool baseLayerVideoPacket=false) = 0;
+
+
+    bool SendH264SVCLayer(const FrameType frameType,
+                          const WebRtc_UWord8 temporalID,
+                          const WebRtc_UWord8 dependencyQualityID,
+                          bool& higestLayer);
+
+    // H.264 SVC
+    WebRtc_Word32 AddH264PACSINALU(const bool firstPacketInNALU,
+                                 const bool lastPacketInNALU,
+                                 const H264_PACSI_NALU& paci,
+                                 const H264_SVC_NALUHeader& svc,
+                                 const WebRtc_UWord16 DONC,
+                                 WebRtc_UWord8* databuffer,
+                                 WebRtc_Word32& curByte) const;
+
+    WebRtc_Word32 SendH264FillerData(const WebRtcRTPHeader* rtpHeader,
+                                   const WebRtc_UWord16 bytesToSend,
+                                   const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SendH264FillerData(const WebRtc_UWord32 captureTimestamp,
+                                   const WebRtc_UWord8 payloadType,
+                                   const WebRtc_UWord32 bytesToSend);
+
+    WebRtc_Word32 SendH264SVCRelayPacket(const WebRtcRTPHeader* rtpHeader,
+                                       const WebRtc_UWord8* incomingRTPPacket,
+                                       const WebRtc_UWord16 incomingRTPPacketSize,
+                                       const WebRtc_UWord32 ssrc,
+                                       const bool higestLayer);
+
+    WebRtc_Word32 SetH264RelaySequenceNumber(const WebRtc_UWord16 seqNum);
+
+    WebRtc_Word32 SetH264RelayCompleteLayer(const bool complete);
+
+    // H.264
+    H264PacketizationMode _h264Mode;
+    bool                      _h264SendPPS_SPS;
+
+    // H.264-SVC
+    WebRtc_Word8                _h264SVCPayloadType;
+    WebRtc_UWord16              _h264SVCRelaySequenceNumber;
+    WebRtc_UWord32              _h264SVCRelayTimeStamp;
+    bool                      _h264SVCRelayLayerComplete;
+
+
+private:
+    // H.264
+    WebRtc_Word32 SendH264_SingleMode(const FrameType frameType,
+                                const H264Info* ptrH264Info,
+                                    WebRtc_UWord16 &idxNALU,
+                                    const WebRtc_Word8 payloadType,
+                                    const WebRtc_UWord32 captureTimeStamp,
+                                    WebRtc_Word32 &payloadBytesToSend,
+                                    const WebRtc_UWord8*& data,
+                                    const WebRtc_UWord16 rtpHeaderLength,
+                                    const bool sendSVCPACSI=false);
+
+    WebRtc_Word32 SendH264_FU_A(const FrameType frameType,
+                              const H264Info* ptrH264Info,
+                              WebRtc_UWord16 &idxNALU,
+                              const WebRtc_Word8 payloadType,
+                              const WebRtc_UWord32 captureTimeStamp,
+                              WebRtc_Word32 &payloadBytesToSend,
+                              const WebRtc_UWord8*& data,
+                              const WebRtc_UWord16 rtpHeaderLength,
+                              const bool sendSVCPACSI = false);
+
+    WebRtc_Word32 SendH264_STAP_A(const FrameType frameType,
+                            const H264Info* ptrH264Info,
+                                WebRtc_UWord16 &idxNALU,
+                                const WebRtc_Word8 payloadType,
+                                const WebRtc_UWord32 captureTimeStamp,
+                                bool& switchToFUA,
+                                WebRtc_Word32 &payloadBytesToSend,
+                                const WebRtc_UWord8*& data,
+                                const WebRtc_UWord16 rtpHeaderLength);
+
+    WebRtc_Word32 SendH264_STAP_A_PACSI(const FrameType frameType,
+                                      const H264Info* ptrH264Info,
+                                      WebRtc_UWord16 &idxNALU,
+                                      const WebRtc_Word8 payloadType,
+                                      const WebRtc_UWord32 captureTimeStamp,
+                                      bool& switchToFUA,
+                                      WebRtc_Word32 &payloadBytesToSend,
+                                      const WebRtc_UWord8*& data,
+                                      const WebRtc_UWord16 rtpHeaderLengh)
+
+    WebRtc_Word32 SendH264_SinglePACSI(const FrameType frameType,
+                                 const H264Info* ptrH264Info,
+                                     const WebRtc_UWord16 idxNALU,
+                                     const WebRtc_Word8 payloadType,
+                                     const WebRtc_UWord32 captureTimeStamp,
+                                     const bool firstPacketInNALU,
+                                     const bool lastPacketInNALU);
+
+    bool AddH264SVCNALUHeader(const H264_SVC_NALUHeader& svc,
+                              WebRtc_UWord8* databuffer,
+                              WebRtc_Word32& curByte) const;
+
+    RTPSenderInterface&        _rtpSender;
+
+    // relay
+    bool                    _useHighestSendLayer;
+    WebRtc_UWord8             _highestDependencyLayerOld;
+    WebRtc_UWord8             _highestDependencyQualityIDOld;
+    WebRtc_UWord8             _highestDependencyLayer;
+    WebRtc_UWord8             _highestDependencyQualityID;
+    WebRtc_UWord8             _highestTemporalLayer;
+
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_H264_RTP_SENDER_H264_H_
diff --git a/src/modules/rtp_rtcp/source/bitrate.cc b/src/modules/rtp_rtcp/source/bitrate.cc
new file mode 100644
index 0000000..38cf537
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/bitrate.cc
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "Bitrate.h"
+#include "rtp_utility.h"
+
+namespace webrtc {
+Bitrate::Bitrate(RtpRtcpClock* clock) :
+    _clock(*clock),
+    _packetRate(0),
+    _bitrate(0),
+    _bitrateNextIdx(0),
+    _timeLastRateUpdate(0),
+    _bytesCount(0),
+    _packetCount(0)
+{
+    memset(_packetRateArray, 0, sizeof(_packetRateArray));
+    memset(_bitrateDiffMS, 0, sizeof(_bitrateDiffMS));
+    memset(_bitrateArray, 0, sizeof(_bitrateArray));
+}
+
+void
+Bitrate::Update(const WebRtc_Word32 bytes)
+{
+    _bytesCount += bytes;
+    _packetCount++;
+}
+
+WebRtc_UWord32
+Bitrate::PacketRate() const
+{
+    return _packetRate;
+}
+
+WebRtc_UWord32 Bitrate::BitrateLast() const {
+  return _bitrate;
+}
+
+WebRtc_UWord32 Bitrate::BitrateNow() const {
+  WebRtc_Word64 now = _clock.GetTimeInMS();
+  WebRtc_Word64 diffMS = now -_timeLastRateUpdate;
+
+  if(diffMS > 10000) {  // 10 sec
+    // too high diff ignore
+    return _bitrate; // bits/s
+  }
+  WebRtc_Word64 bitsSinceLastRateUpdate = 8 * _bytesCount * 1000;
+
+  // have to consider the time when the measurement was done
+  // ((bits/sec * sec) + (bits)) / sec
+  WebRtc_Word64 bitrate = (static_cast<WebRtc_UWord64>(_bitrate) * 1000 +
+      bitsSinceLastRateUpdate) / (1000 + diffMS);
+  return static_cast<WebRtc_UWord32>(bitrate);
+}
+
+void Bitrate::Process() {
+  // Triggered by timer.
+  WebRtc_Word64 now = _clock.GetTimeInMS();
+  WebRtc_Word64 diffMS = now -_timeLastRateUpdate;
+
+  if (diffMS < 100) {
+    // Not enough data, wait...
+    return;
+  }
+  if (diffMS > 10000) {  // 10 sec
+    // too high diff ignore
+    _timeLastRateUpdate = now;
+    _bytesCount = 0;
+    _packetCount = 0;
+    return;
+  }
+  _packetRateArray[_bitrateNextIdx] = (_packetCount * 1000) / diffMS;
+  _bitrateArray[_bitrateNextIdx] = 8 * ((_bytesCount * 1000) / diffMS);
+  _bitrateDiffMS[_bitrateNextIdx] = diffMS;
+  _bitrateNextIdx++;
+  if (_bitrateNextIdx >= 10) {
+    _bitrateNextIdx = 0;
+  }
+  WebRtc_Word64 sumDiffMS = 0;
+  WebRtc_Word64 sumBitrateMS = 0;
+  WebRtc_Word64 sumPacketrateMS = 0;
+  for (int i = 0; i < 10; i++) {
+    sumDiffMS += _bitrateDiffMS[i];
+    sumBitrateMS += _bitrateArray[i] * _bitrateDiffMS[i];
+    sumPacketrateMS += _packetRateArray[i] * _bitrateDiffMS[i];
+  }
+  _timeLastRateUpdate = now;
+  _bytesCount = 0;
+  _packetCount = 0;
+  _packetRate = static_cast<WebRtc_UWord32>(sumPacketrateMS / sumDiffMS);
+  _bitrate = static_cast<WebRtc_UWord32>(sumBitrateMS / sumDiffMS);
+}
+
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/dtmf_queue.cc b/src/modules/rtp_rtcp/source/dtmf_queue.cc
new file mode 100644
index 0000000..749309b
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/dtmf_queue.cc
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_queue.h"
+
+#include <string.h> //memset
+
+namespace webrtc {
+DTMFqueue::DTMFqueue():
+    _DTMFCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _nextEmptyIndex(0)
+{
+    memset(_DTMFKey,0, sizeof(_DTMFKey));
+    memset(_DTMFLen,0, sizeof(_DTMFLen));
+    memset(_DTMFLevel,0, sizeof(_DTMFLevel));
+}
+
+DTMFqueue::~DTMFqueue()
+{
+    delete _DTMFCritsect;
+}
+
+WebRtc_Word32
+DTMFqueue::AddDTMF(WebRtc_UWord8 key, WebRtc_UWord16 len, WebRtc_UWord8 level)
+{
+    CriticalSectionScoped lock(_DTMFCritsect);
+
+    if(_nextEmptyIndex >= DTMF_OUTBAND_MAX)
+    {
+        return -1;
+    }
+    WebRtc_Word32 index = _nextEmptyIndex;
+    _DTMFKey[index] = key;
+    _DTMFLen[index] = len;
+    _DTMFLevel[index] = level;
+    _nextEmptyIndex++;
+    return 0;
+}
+
+WebRtc_Word8
+DTMFqueue::NextDTMF(WebRtc_UWord8* DTMFKey, WebRtc_UWord16* len, WebRtc_UWord8* level)
+{
+    CriticalSectionScoped lock(_DTMFCritsect);
+
+    if(!PendingDTMF())
+    {
+        return -1;
+    }
+    *DTMFKey=_DTMFKey[0];
+    *len=_DTMFLen[0];
+    *level=_DTMFLevel[0];
+
+    memmove(&(_DTMFKey[0]), &(_DTMFKey[1]), _nextEmptyIndex*sizeof(WebRtc_UWord8));
+    memmove(&(_DTMFLen[0]), &(_DTMFLen[1]), _nextEmptyIndex*sizeof(WebRtc_UWord16));
+    memmove(&(_DTMFLevel[0]), &(_DTMFLevel[1]), _nextEmptyIndex*sizeof(WebRtc_UWord8));
+
+    _nextEmptyIndex--;
+    return 0;
+}
+
+bool
+DTMFqueue::PendingDTMF()
+{
+    return(_nextEmptyIndex>0);
+}
+
+void
+DTMFqueue::ResetDTMF()
+{
+    _nextEmptyIndex = 0;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/dtmf_queue.h b/src/modules/rtp_rtcp/source/dtmf_queue.h
new file mode 100644
index 0000000..8451a21
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/dtmf_queue.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"
+
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+class DTMFqueue
+{
+public:
+    DTMFqueue();
+    virtual ~DTMFqueue();
+
+    WebRtc_Word32 AddDTMF(WebRtc_UWord8 DTMFKey, WebRtc_UWord16 len, WebRtc_UWord8 level);
+    WebRtc_Word8 NextDTMF(WebRtc_UWord8* DTMFKey, WebRtc_UWord16 * len, WebRtc_UWord8 * level);
+    bool PendingDTMF();
+    void ResetDTMF();
+
+private:
+    CriticalSectionWrapper* _DTMFCritsect;
+    WebRtc_UWord8        _nextEmptyIndex;
+    WebRtc_UWord8        _DTMFKey[DTMF_OUTBAND_MAX];
+    WebRtc_UWord16       _DTMFLen[DTMF_OUTBAND_MAX];
+    WebRtc_UWord8        _DTMFLevel[DTMF_OUTBAND_MAX];
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_DTMF_QUEUE_H_
diff --git a/src/modules/rtp_rtcp/source/fec_private_tables_bursty.h b/src/modules/rtp_rtcp/source/fec_private_tables_bursty.h
new file mode 100644
index 0000000..1de9325
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/fec_private_tables_bursty.h
@@ -0,0 +1,761 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_BURSTY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_BURSTY_H_
+
+// This file contains a set of packets masks for the FEC code. The masks in
+// this table are specifically designed to favor recovery of bursty/consecutive
+// loss network conditions. The tradeoff is worse recovery for random losses.
+// These packet masks are currently defined to protect up to 12 media packets.
+// They have the following property: for any packet mask defined by the
+// parameters (k,m), where k = number of media packets, m = number of FEC
+// packets, all "consecutive" losses of size <= m are completely recoverable.
+// By consecutive losses we mean consecutive with respect to the sequence
+// number ordering of the list (media and FEC) of packets. The difference
+// between these masks (|kFecMaskBursty|) and |kFecMaskRandom| type, defined
+// in fec_private_tables.h, is more significant for longer codes
+// (i.e., more packets/symbols in the code, so larger (k,m), i.e.,  k > 4,
+// m > 3).
+
+#include "typedefs.h"
+
+namespace {
+
+const uint8_t kMaskBursty1_1[2] = {
+  0x80, 0x00
+};
+
+const uint8_t kMaskBursty2_1[2] = {
+  0xc0, 0x00
+};
+
+const uint8_t kMaskBursty2_2[4] = {
+  0x80, 0x00,
+  0xc0, 0x00
+};
+
+const uint8_t kMaskBursty3_1[2] = {
+  0xe0, 0x00
+};
+
+const uint8_t kMaskBursty3_2[4] = {
+  0xc0, 0x00,
+  0xa0, 0x00
+};
+
+const uint8_t kMaskBursty3_3[6] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00
+};
+
+const uint8_t kMaskBursty4_1[2] = {
+  0xf0, 0x00
+};
+
+const uint8_t kMaskBursty4_2[4] = {
+  0xa0, 0x00,
+  0xd0, 0x00
+};
+
+const uint8_t kMaskBursty4_3[6] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x90, 0x00
+};
+
+const uint8_t kMaskBursty4_4[8] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00
+};
+
+const uint8_t kMaskBursty5_1[2] = {
+  0xf8, 0x00
+};
+
+const uint8_t kMaskBursty5_2[4] = {
+  0xd0, 0x00,
+  0xa8, 0x00
+};
+
+const uint8_t kMaskBursty5_3[6] = {
+  0x70, 0x00,
+  0x90, 0x00,
+  0xc8, 0x00
+};
+
+const uint8_t kMaskBursty5_4[8] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x88, 0x00
+};
+
+const uint8_t kMaskBursty5_5[10] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00
+};
+
+const uint8_t kMaskBursty6_1[2] = {
+  0xfc, 0x00
+};
+
+const uint8_t kMaskBursty6_2[4] = {
+  0xa8, 0x00,
+  0xd4, 0x00
+};
+
+const uint8_t kMaskBursty6_3[6] = {
+  0x94, 0x00,
+  0xc8, 0x00,
+  0x64, 0x00
+};
+
+const uint8_t kMaskBursty6_4[8] = {
+  0x60, 0x00,
+  0x38, 0x00,
+  0x88, 0x00,
+  0xc4, 0x00
+};
+
+const uint8_t kMaskBursty6_5[10] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x84, 0x00
+};
+
+const uint8_t kMaskBursty6_6[12] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00
+};
+
+const uint8_t kMaskBursty7_1[2] = {
+  0xfe, 0x00
+};
+
+const uint8_t kMaskBursty7_2[4] = {
+  0xd4, 0x00,
+  0xaa, 0x00
+};
+
+const uint8_t kMaskBursty7_3[6] = {
+  0xc8, 0x00,
+  0x74, 0x00,
+  0x92, 0x00
+};
+
+const uint8_t kMaskBursty7_4[8] = {
+  0x38, 0x00,
+  0x8a, 0x00,
+  0xc4, 0x00,
+  0x62, 0x00
+};
+
+const uint8_t kMaskBursty7_5[10] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x1c, 0x00,
+  0x84, 0x00,
+  0xc2, 0x00
+};
+
+const uint8_t kMaskBursty7_6[12] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x82, 0x00
+};
+
+const uint8_t kMaskBursty7_7[14] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00
+};
+
+const uint8_t kMaskBursty8_1[2] = {
+  0xff, 0x00
+};
+
+const uint8_t kMaskBursty8_2[4] = {
+  0xaa, 0x00,
+  0xd5, 0x00
+};
+
+const uint8_t kMaskBursty8_3[6] = {
+  0x74, 0x00,
+  0x92, 0x00,
+  0xc9, 0x00
+};
+
+const uint8_t kMaskBursty8_4[8] = {
+  0x8a, 0x00,
+  0xc5, 0x00,
+  0x62, 0x00,
+  0x31, 0x00
+};
+
+const uint8_t kMaskBursty8_5[10] = {
+  0x30, 0x00,
+  0x1c, 0x00,
+  0x85, 0x00,
+  0xc2, 0x00,
+  0x61, 0x00
+};
+
+const uint8_t kMaskBursty8_6[12] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0e, 0x00,
+  0x82, 0x00,
+  0xc1, 0x00
+};
+
+const uint8_t kMaskBursty8_7[14] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x81, 0x00
+};
+
+const uint8_t kMaskBursty8_8[16] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00
+};
+
+const uint8_t kMaskBursty9_1[2] = {
+  0xff, 0x80
+};
+
+const uint8_t kMaskBursty9_2[4] = {
+  0xd5, 0x00,
+  0xaa, 0x80
+};
+
+const uint8_t kMaskBursty9_3[6] = {
+  0x92, 0x00,
+  0xc9, 0x00,
+  0x74, 0x80
+};
+
+const uint8_t kMaskBursty9_4[8] = {
+  0xc5, 0x00,
+  0x62, 0x00,
+  0x39, 0x00,
+  0x8a, 0x80
+};
+
+const uint8_t kMaskBursty9_5[10] = {
+  0x1c, 0x00,
+  0x85, 0x00,
+  0xc2, 0x80,
+  0x61, 0x00,
+  0x30, 0x80
+};
+
+const uint8_t kMaskBursty9_6[12] = {
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0e, 0x00,
+  0x82, 0x80,
+  0xc1, 0x00,
+  0x60, 0x80
+};
+
+const uint8_t kMaskBursty9_7[14] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x07, 0x00,
+  0x81, 0x00,
+  0xc0, 0x80
+};
+
+const uint8_t kMaskBursty9_8[16] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x80, 0x80
+};
+
+const uint8_t kMaskBursty9_9[18] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80
+};
+
+const uint8_t kMaskBursty10_1[2] = {
+  0xff, 0xc0
+};
+
+const uint8_t kMaskBursty10_2[4] = {
+  0xaa, 0x80,
+  0xd5, 0x40
+};
+
+const uint8_t kMaskBursty10_3[6] = {
+  0xc9, 0x00,
+  0x74, 0x80,
+  0x92, 0x40
+};
+
+const uint8_t kMaskBursty10_4[8] = {
+  0x62, 0x00,
+  0x39, 0x00,
+  0x8a, 0x80,
+  0xc5, 0x40
+};
+
+const uint8_t kMaskBursty10_5[10] = {
+  0x85, 0x00,
+  0xc2, 0x80,
+  0x61, 0x40,
+  0x30, 0x80,
+  0x18, 0x40
+};
+
+const uint8_t kMaskBursty10_6[12] = {
+  0x18, 0x00,
+  0x0e, 0x00,
+  0x82, 0x80,
+  0xc1, 0x40,
+  0x60, 0x80,
+  0x30, 0x40
+};
+
+const uint8_t kMaskBursty10_7[14] = {
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x07, 0x00,
+  0x81, 0x40,
+  0xc0, 0x80,
+  0x60, 0x40
+};
+
+const uint8_t kMaskBursty10_8[16] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x80, 0x80,
+  0xc0, 0x40
+};
+
+const uint8_t kMaskBursty10_9[18] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x80, 0x40
+};
+
+const uint8_t kMaskBursty10_10[20] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0
+};
+
+const uint8_t kMaskBursty11_1[2] = {
+  0xff, 0xe0
+};
+
+const uint8_t kMaskBursty11_2[4] = {
+  0xd5, 0x40,
+  0xaa, 0xa0
+};
+
+const uint8_t kMaskBursty11_3[6] = {
+  0x74, 0x80,
+  0x92, 0x40,
+  0xc9, 0x20
+};
+
+const uint8_t kMaskBursty11_4[8] = {
+  0x39, 0x00,
+  0x8a, 0x80,
+  0xc5, 0x40,
+  0x62, 0x20
+};
+
+const uint8_t kMaskBursty11_5[10] = {
+  0xc2, 0xc0,
+  0x61, 0x00,
+  0x30, 0xa0,
+  0x1c, 0x40,
+  0x85, 0x20
+};
+
+const uint8_t kMaskBursty11_6[12] = {
+  0x0e, 0x00,
+  0x82, 0x80,
+  0xc1, 0x40,
+  0x60, 0xa0,
+  0x30, 0x40,
+  0x18, 0x20
+};
+
+const uint8_t kMaskBursty11_7[14] = {
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x07, 0x00,
+  0x81, 0x40,
+  0xc0, 0xa0,
+  0x60, 0x40,
+  0x30, 0x20
+};
+
+const uint8_t kMaskBursty11_8[16] = {
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x40,
+  0x80, 0xa0,
+  0xc0, 0x40,
+  0x60, 0x20
+};
+
+const uint8_t kMaskBursty11_9[18] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x80, 0x40,
+  0xc0, 0x20
+};
+
+const uint8_t kMaskBursty11_10[20] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0,
+  0x80, 0x20
+};
+
+const uint8_t kMaskBursty11_11[22] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0,
+  0x00, 0x60
+};
+
+const uint8_t kMaskBursty12_1[2] = {
+  0xff, 0xf0
+};
+
+const uint8_t kMaskBursty12_2[4] = {
+  0xaa, 0xa0,
+  0xd5, 0x50
+};
+
+const uint8_t kMaskBursty12_3[6] = {
+  0x92, 0x40,
+  0xc9, 0x20,
+  0x74, 0x90
+};
+
+const uint8_t kMaskBursty12_4[8] = {
+  0x8a, 0x80,
+  0xc5, 0x40,
+  0x62, 0x20,
+  0x39, 0x10
+};
+
+const uint8_t kMaskBursty12_5[10] = {
+  0x61, 0x00,
+  0x30, 0xa0,
+  0x1c, 0x50,
+  0x85, 0x20,
+  0xc2, 0x90
+};
+
+const uint8_t kMaskBursty12_6[12] = {
+  0x82, 0x90,
+  0xc1, 0x40,
+  0x60, 0xa0,
+  0x30, 0x50,
+  0x18, 0x20,
+  0x0c, 0x10
+};
+
+const uint8_t kMaskBursty12_7[14] = {
+  0x0c, 0x00,
+  0x07, 0x00,
+  0x81, 0x40,
+  0xc0, 0xa0,
+  0x60, 0x50,
+  0x30, 0x20,
+  0x18, 0x10
+};
+
+const uint8_t kMaskBursty12_8[16] = {
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x80, 0xa0,
+  0xc0, 0x50,
+  0x60, 0x20,
+  0x30, 0x10
+};
+
+const uint8_t kMaskBursty12_9[18] = {
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x80, 0x50,
+  0xc0, 0x20,
+  0x60, 0x10
+};
+
+const uint8_t kMaskBursty12_10[20] = {
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0,
+  0x80, 0x20,
+  0xc0, 0x10
+};
+
+const uint8_t kMaskBursty12_11[22] = {
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0,
+  0x00, 0x60,
+  0x80, 0x10
+};
+
+const uint8_t kMaskBursty12_12[24] = {
+  0x80, 0x00,
+  0xc0, 0x00,
+  0x60, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0x0c, 0x00,
+  0x06, 0x00,
+  0x03, 0x00,
+  0x01, 0x80,
+  0x00, 0xc0,
+  0x00, 0x60,
+  0x00, 0x30
+};
+
+const uint8_t* kPacketMaskBursty1[1] = {
+  kMaskBursty1_1
+};
+
+const uint8_t* kPacketMaskBursty2[2] = {
+  kMaskBursty2_1,
+  kMaskBursty2_2
+};
+
+const uint8_t* kPacketMaskBursty3[3] = {
+  kMaskBursty3_1,
+  kMaskBursty3_2,
+  kMaskBursty3_3
+};
+
+const uint8_t* kPacketMaskBursty4[4] = {
+  kMaskBursty4_1,
+  kMaskBursty4_2,
+  kMaskBursty4_3,
+  kMaskBursty4_4
+};
+
+const uint8_t* kPacketMaskBursty5[5] = {
+  kMaskBursty5_1,
+  kMaskBursty5_2,
+  kMaskBursty5_3,
+  kMaskBursty5_4,
+  kMaskBursty5_5
+};
+
+const uint8_t* kPacketMaskBursty6[6] = {
+  kMaskBursty6_1,
+  kMaskBursty6_2,
+  kMaskBursty6_3,
+  kMaskBursty6_4,
+  kMaskBursty6_5,
+  kMaskBursty6_6
+};
+
+const uint8_t* kPacketMaskBursty7[7] = {
+  kMaskBursty7_1,
+  kMaskBursty7_2,
+  kMaskBursty7_3,
+  kMaskBursty7_4,
+  kMaskBursty7_5,
+  kMaskBursty7_6,
+  kMaskBursty7_7
+};
+
+const uint8_t* kPacketMaskBursty8[8] = {
+  kMaskBursty8_1,
+  kMaskBursty8_2,
+  kMaskBursty8_3,
+  kMaskBursty8_4,
+  kMaskBursty8_5,
+  kMaskBursty8_6,
+  kMaskBursty8_7,
+  kMaskBursty8_8
+};
+
+const uint8_t* kPacketMaskBursty9[9] = {
+  kMaskBursty9_1,
+  kMaskBursty9_2,
+  kMaskBursty9_3,
+  kMaskBursty9_4,
+  kMaskBursty9_5,
+  kMaskBursty9_6,
+  kMaskBursty9_7,
+  kMaskBursty9_8,
+  kMaskBursty9_9
+};
+
+const uint8_t* kPacketMaskBursty10[10] = {
+  kMaskBursty10_1,
+  kMaskBursty10_2,
+  kMaskBursty10_3,
+  kMaskBursty10_4,
+  kMaskBursty10_5,
+  kMaskBursty10_6,
+  kMaskBursty10_7,
+  kMaskBursty10_8,
+  kMaskBursty10_9,
+  kMaskBursty10_10
+};
+
+const uint8_t* kPacketMaskBursty11[11] = {
+  kMaskBursty11_1,
+  kMaskBursty11_2,
+  kMaskBursty11_3,
+  kMaskBursty11_4,
+  kMaskBursty11_5,
+  kMaskBursty11_6,
+  kMaskBursty11_7,
+  kMaskBursty11_8,
+  kMaskBursty11_9,
+  kMaskBursty11_10,
+  kMaskBursty11_11
+};
+
+const uint8_t* kPacketMaskBursty12[12] = {
+  kMaskBursty12_1,
+  kMaskBursty12_2,
+  kMaskBursty12_3,
+  kMaskBursty12_4,
+  kMaskBursty12_5,
+  kMaskBursty12_6,
+  kMaskBursty12_7,
+  kMaskBursty12_8,
+  kMaskBursty12_9,
+  kMaskBursty12_10,
+  kMaskBursty12_11,
+  kMaskBursty12_12
+};
+
+const uint8_t** kPacketMaskBurstyTbl[12] = {
+  kPacketMaskBursty1,
+  kPacketMaskBursty2,
+  kPacketMaskBursty3,
+  kPacketMaskBursty4,
+  kPacketMaskBursty5,
+  kPacketMaskBursty6,
+  kPacketMaskBursty7,
+  kPacketMaskBursty8,
+  kPacketMaskBursty9,
+  kPacketMaskBursty10,
+  kPacketMaskBursty11,
+  kPacketMaskBursty12
+};
+
+}  // namespace
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_BURSTY_H_
+
diff --git a/src/modules/rtp_rtcp/source/fec_private_tables_random.h b/src/modules/rtp_rtcp/source/fec_private_tables_random.h
new file mode 100644
index 0000000..7c00786
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/fec_private_tables_random.h
@@ -0,0 +1,24523 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+
+// This file contains a set of packets masks for the FEC code. The masks in
+// this table are specifically designed to favor recovery to random loss.
+// These packet masks are defined to protect up to maximum of 48 media packets.
+
+#include "typedefs.h"
+
+namespace {
+
+const uint8_t kMaskRandom10_1[2] = {
+  0xff, 0xc0
+};
+
+const uint8_t kMaskRandom10_10[20] = {
+  0x4c, 0x00,
+  0x51, 0x00,
+  0xa0, 0x40,
+  0x04, 0xc0,
+  0x03, 0x80,
+  0x86, 0x00,
+  0x29, 0x00,
+  0x42, 0x40,
+  0x98, 0x00,
+  0x30, 0x80
+};
+
+const uint8_t kMaskRandom10_2[4] = {
+  0xaa, 0x80,
+  0xd5, 0x40
+};
+
+const uint8_t kMaskRandom10_3[6] = {
+  0xa4, 0x40,
+  0xc9, 0x00,
+  0x52, 0x80
+};
+
+const uint8_t kMaskRandom10_4[8] = {
+  0xca, 0x00,
+  0x32, 0x80,
+  0xa1, 0x40,
+  0x55, 0x00
+};
+
+const uint8_t kMaskRandom10_5[10] = {
+  0xca, 0x00,
+  0x32, 0x80,
+  0xa1, 0x40,
+  0x55, 0x00,
+  0x08, 0xc0
+};
+
+const uint8_t kMaskRandom10_6[12] = {
+  0x0e, 0x00,
+  0x33, 0x00,
+  0x10, 0xc0,
+  0x45, 0x40,
+  0x88, 0x80,
+  0xe0, 0x00
+};
+
+const uint8_t kMaskRandom10_7[14] = {
+  0x46, 0x00,
+  0x33, 0x00,
+  0x80, 0xc0,
+  0x0c, 0x40,
+  0x28, 0x80,
+  0x94, 0x00,
+  0xc1, 0x00
+};
+
+const uint8_t kMaskRandom10_8[16] = {
+  0x2c, 0x00,
+  0x81, 0x80,
+  0xa0, 0x40,
+  0x05, 0x40,
+  0x18, 0x80,
+  0xc2, 0x00,
+  0x22, 0x80,
+  0x50, 0x40
+};
+
+const uint8_t kMaskRandom10_9[18] = {
+  0x4c, 0x00,
+  0x23, 0x00,
+  0x88, 0xc0,
+  0x21, 0x40,
+  0x52, 0x80,
+  0x94, 0x00,
+  0x26, 0x00,
+  0x48, 0x40,
+  0x91, 0x80
+};
+
+const uint8_t kMaskRandom11_1[2] = {
+  0xff, 0xe0
+};
+
+const uint8_t kMaskRandom11_10[20] = {
+  0x64, 0x40,
+  0x51, 0x40,
+  0xa9, 0x00,
+  0x04, 0xc0,
+  0xd0, 0x00,
+  0x82, 0x40,
+  0x21, 0x20,
+  0x0c, 0x20,
+  0x4a, 0x00,
+  0x12, 0xa0
+};
+
+const uint8_t kMaskRandom11_11[22] = {
+  0x46, 0x40,
+  0x33, 0x20,
+  0x99, 0x00,
+  0x05, 0x80,
+  0x80, 0xa0,
+  0x84, 0x40,
+  0x40, 0x60,
+  0x0a, 0x80,
+  0x68, 0x00,
+  0x10, 0x20,
+  0x30, 0x40
+};
+
+const uint8_t kMaskRandom11_2[4] = {
+  0xec, 0xc0,
+  0x9b, 0xa0
+};
+
+const uint8_t kMaskRandom11_3[6] = {
+  0xca, 0xc0,
+  0xf1, 0x40,
+  0xb6, 0x20
+};
+
+const uint8_t kMaskRandom11_4[8] = {
+  0xc4, 0xc0,
+  0x31, 0x60,
+  0x4b, 0x20,
+  0x2c, 0xa0
+};
+
+const uint8_t kMaskRandom11_5[10] = {
+  0x86, 0x80,
+  0x23, 0x20,
+  0x16, 0x20,
+  0x4c, 0x20,
+  0x41, 0xc0
+};
+
+const uint8_t kMaskRandom11_6[12] = {
+  0x64, 0x40,
+  0x51, 0x40,
+  0x0c, 0xa0,
+  0xa1, 0x20,
+  0x12, 0xa0,
+  0x8a, 0x40
+};
+
+const uint8_t kMaskRandom11_7[14] = {
+  0x46, 0x40,
+  0x33, 0x20,
+  0x91, 0x80,
+  0xa4, 0x20,
+  0x50, 0xa0,
+  0x84, 0xc0,
+  0x09, 0x60
+};
+
+const uint8_t kMaskRandom11_8[16] = {
+  0x0c, 0x80,
+  0x80, 0x60,
+  0xa0, 0x80,
+  0x05, 0x40,
+  0x43, 0x00,
+  0x1a, 0x00,
+  0x60, 0x20,
+  0x14, 0x20
+};
+
+const uint8_t kMaskRandom11_9[18] = {
+  0x46, 0x40,
+  0x62, 0x60,
+  0x8c, 0x00,
+  0x01, 0x60,
+  0x07, 0x80,
+  0xa0, 0x80,
+  0x18, 0xa0,
+  0x91, 0x00,
+  0x78, 0x00
+};
+
+const uint8_t kMaskRandom12_1[2] = {
+  0xff, 0xf0
+};
+
+const uint8_t kMaskRandom12_10[20] = {
+  0x51, 0x40,
+  0x45, 0x10,
+  0x80, 0xd0,
+  0x24, 0x20,
+  0x0a, 0x20,
+  0x00, 0xe0,
+  0xb8, 0x00,
+  0x09, 0x10,
+  0x56, 0x00,
+  0xa2, 0x80
+};
+
+const uint8_t kMaskRandom12_11[22] = {
+  0x53, 0x60,
+  0x21, 0x30,
+  0x10, 0x90,
+  0x00, 0x70,
+  0x0c, 0x10,
+  0x40, 0xc0,
+  0x6a, 0x00,
+  0x86, 0x00,
+  0x24, 0x80,
+  0x89, 0x00,
+  0xc0, 0x20
+};
+
+const uint8_t kMaskRandom12_12[24] = {
+  0x10, 0x60,
+  0x02, 0x30,
+  0x40, 0x50,
+  0x21, 0x80,
+  0x81, 0x10,
+  0x14, 0x80,
+  0x98, 0x00,
+  0x08, 0x90,
+  0x62, 0x00,
+  0x24, 0x20,
+  0x8a, 0x00,
+  0x84, 0x40
+};
+
+const uint8_t kMaskRandom12_2[4] = {
+  0xec, 0xc0,
+  0x93, 0xb0
+};
+
+const uint8_t kMaskRandom12_3[6] = {
+  0x9b, 0x80,
+  0x4f, 0x10,
+  0x3c, 0x60
+};
+
+const uint8_t kMaskRandom12_4[8] = {
+  0x8b, 0x20,
+  0x14, 0xb0,
+  0x22, 0xd0,
+  0x45, 0x50
+};
+
+const uint8_t kMaskRandom12_5[10] = {
+  0x53, 0x60,
+  0x64, 0x20,
+  0x0c, 0xc0,
+  0x82, 0xa0,
+  0x09, 0x30
+};
+
+const uint8_t kMaskRandom12_6[12] = {
+  0x51, 0x40,
+  0xc5, 0x10,
+  0x21, 0x80,
+  0x12, 0x30,
+  0x08, 0xe0,
+  0x2e, 0x00
+};
+
+const uint8_t kMaskRandom12_7[14] = {
+  0x53, 0x60,
+  0x21, 0x30,
+  0x90, 0x90,
+  0x02, 0x50,
+  0x06, 0xa0,
+  0x2c, 0x00,
+  0x88, 0x60
+};
+
+const uint8_t kMaskRandom12_8[16] = {
+  0x20, 0x60,
+  0x80, 0x30,
+  0x42, 0x40,
+  0x01, 0x90,
+  0x14, 0x10,
+  0x0a, 0x80,
+  0x38, 0x00,
+  0xc5, 0x00
+};
+
+const uint8_t kMaskRandom12_9[18] = {
+  0x53, 0x60,
+  0xe4, 0x20,
+  0x24, 0x40,
+  0xa1, 0x10,
+  0x18, 0x30,
+  0x03, 0x90,
+  0x8a, 0x10,
+  0x04, 0x90,
+  0x00, 0xe0
+};
+
+const uint8_t kMaskRandom13_1[2] = {
+  0xff, 0xf8
+};
+
+const uint8_t kMaskRandom13_10[20] = {
+  0xd1, 0x00,
+  0x44, 0x50,
+  0x10, 0x98,
+  0xa0, 0x50,
+  0x4a, 0x08,
+  0x40, 0x30,
+  0x80, 0x28,
+  0x0c, 0x90,
+  0x05, 0x88,
+  0x62, 0x20
+};
+
+const uint8_t kMaskRandom13_11[22] = {
+  0x51, 0x20,
+  0x22, 0x10,
+  0x13, 0x40,
+  0x25, 0x00,
+  0x18, 0x18,
+  0x0a, 0x20,
+  0x88, 0x88,
+  0x06, 0x80,
+  0xe0, 0x20,
+  0x84, 0x40,
+  0x44, 0x18
+};
+
+const uint8_t kMaskRandom13_12[24] = {
+  0x28, 0x28,
+  0x84, 0x50,
+  0x60, 0x40,
+  0x05, 0x48,
+  0x02, 0x98,
+  0x01, 0x30,
+  0x48, 0x10,
+  0x24, 0x80,
+  0x94, 0x00,
+  0x8a, 0x00,
+  0x11, 0x80,
+  0x52, 0x20
+};
+
+const uint8_t kMaskRandom13_13[26] = {
+  0x51, 0x20,
+  0x66, 0x40,
+  0x05, 0x48,
+  0x81, 0x20,
+  0x94, 0x00,
+  0x30, 0x80,
+  0x21, 0x10,
+  0x03, 0xc0,
+  0xe8, 0x00,
+  0x0a, 0x10,
+  0x80, 0x18,
+  0x04, 0x90,
+  0x08, 0xa8
+};
+
+const uint8_t kMaskRandom13_2[4] = {
+  0xec, 0xc0,
+  0x1b, 0x38
+};
+
+const uint8_t kMaskRandom13_3[6] = {
+  0x99, 0xb0,
+  0x46, 0xd8,
+  0x37, 0x28
+};
+
+const uint8_t kMaskRandom13_4[8] = {
+  0x49, 0xb0,
+  0x26, 0xd0,
+  0x85, 0x68,
+  0x52, 0x58
+};
+
+const uint8_t kMaskRandom13_5[10] = {
+  0x51, 0x30,
+  0x66, 0x40,
+  0x0c, 0x68,
+  0xa1, 0xc0,
+  0x22, 0x98
+};
+
+const uint8_t kMaskRandom13_6[12] = {
+  0xd1, 0x20,
+  0x46, 0xd0,
+  0x15, 0x48,
+  0x21, 0x70,
+  0x28, 0xc8,
+  0xaa, 0x20
+};
+
+const uint8_t kMaskRandom13_7[14] = {
+  0x59, 0x20,
+  0x26, 0x50,
+  0xb1, 0x40,
+  0x2b, 0x08,
+  0x14, 0xc8,
+  0xc8, 0x88,
+  0x84, 0xb0
+};
+
+const uint8_t kMaskRandom13_8[16] = {
+  0x80, 0xa8,
+  0x30, 0x90,
+  0x16, 0x08,
+  0x03, 0x30,
+  0x44, 0x60,
+  0x08, 0x18,
+  0xd8, 0x00,
+  0xa1, 0x40
+};
+
+const uint8_t kMaskRandom13_9[18] = {
+  0x59, 0x20,
+  0x66, 0x40,
+  0x14, 0x40,
+  0x21, 0x48,
+  0x02, 0xc8,
+  0x94, 0x10,
+  0x80, 0xa8,
+  0x0a, 0x90,
+  0x40, 0x18
+};
+
+const uint8_t kMaskRandom14_1[2] = {
+  0xff, 0xfc
+};
+
+const uint8_t kMaskRandom14_10[20] = {
+  0xc0, 0xd4,
+  0x1d, 0x40,
+  0xd4, 0x08,
+  0x02, 0x60,
+  0x04, 0x28,
+  0x20, 0x98,
+  0x40, 0x44,
+  0x08, 0x84,
+  0x68, 0x00,
+  0x23, 0x10
+};
+
+const uint8_t kMaskRandom14_11[22] = {
+  0x62, 0xd0,
+  0x35, 0x20,
+  0x14, 0x14,
+  0xc5, 0x08,
+  0x22, 0x0c,
+  0x88, 0xb8,
+  0x42, 0x54,
+  0x28, 0xa4,
+  0x94, 0x20,
+  0x1b, 0x04,
+  0x22, 0xc0
+};
+
+const uint8_t kMaskRandom14_12[24] = {
+  0x81, 0x04,
+  0x40, 0x68,
+  0x90, 0x24,
+  0x28, 0x28,
+  0x52, 0x10,
+  0x41, 0x88,
+  0x09, 0x30,
+  0x48, 0x44,
+  0x04, 0x44,
+  0x0e, 0x80,
+  0xa5, 0x90,
+  0x12, 0x0c
+};
+
+const uint8_t kMaskRandom14_13[26] = {
+  0x62, 0x54,
+  0x34, 0x60,
+  0x48, 0x04,
+  0x00, 0xac,
+  0x28, 0x08,
+  0x81, 0x08,
+  0x23, 0x04,
+  0x06, 0x80,
+  0x80, 0x14,
+  0x30, 0x10,
+  0x8c, 0x20,
+  0x54, 0x00,
+  0x80, 0xc0
+};
+
+const uint8_t kMaskRandom14_14[28] = {
+  0x40, 0x54,
+  0x15, 0x40,
+  0xc0, 0x04,
+  0x28, 0x10,
+  0x05, 0x0c,
+  0x64, 0x80,
+  0x81, 0x80,
+  0x10, 0x98,
+  0x84, 0x20,
+  0x12, 0x30,
+  0x62, 0x00,
+  0x28, 0x60,
+  0x0e, 0x08,
+  0x10, 0x84
+};
+
+const uint8_t kMaskRandom14_2[4] = {
+  0xec, 0xe8,
+  0x3b, 0x9c
+};
+
+const uint8_t kMaskRandom14_3[6] = {
+  0xac, 0xd8,
+  0x55, 0x6c,
+  0x27, 0xb4
+};
+
+const uint8_t kMaskRandom14_4[8] = {
+  0x2c, 0xd8,
+  0x93, 0x68,
+  0x1a, 0xb4,
+  0x47, 0x2c
+};
+
+const uint8_t kMaskRandom14_5[10] = {
+  0x64, 0xd8,
+  0xa5, 0x68,
+  0x52, 0xb4,
+  0x1d, 0xa8,
+  0x9c, 0x54
+};
+
+const uint8_t kMaskRandom14_6[12] = {
+  0x4a, 0x54,
+  0x95, 0x48,
+  0x14, 0xb4,
+  0x51, 0xa8,
+  0x22, 0x6c,
+  0x88, 0x8c
+};
+
+const uint8_t kMaskRandom14_7[14] = {
+  0x62, 0x54,
+  0xb9, 0x20,
+  0x18, 0xb4,
+  0x54, 0x98,
+  0x06, 0x6c,
+  0x85, 0x54,
+  0xaa, 0x88
+};
+
+const uint8_t kMaskRandom14_8[16] = {
+  0xc0, 0x14,
+  0x41, 0x60,
+  0x88, 0x30,
+  0x20, 0xa4,
+  0x0a, 0x48,
+  0x04, 0x98,
+  0x94, 0x40,
+  0x72, 0x00
+};
+
+const uint8_t kMaskRandom14_9[18] = {
+  0xa2, 0x54,
+  0x34, 0x60,
+  0x4a, 0x24,
+  0x20, 0xa8,
+  0x11, 0x84,
+  0x49, 0x08,
+  0x86, 0x0c,
+  0x20, 0xd4,
+  0x88, 0x48
+};
+
+const uint8_t kMaskRandom15_1[2] = {
+  0xff, 0xfe
+};
+
+const uint8_t kMaskRandom15_10[20] = {
+  0xc0, 0xa0,
+  0x15, 0x56,
+  0x74, 0x40,
+  0x00, 0x9c,
+  0x01, 0x2c,
+  0x44, 0x92,
+  0x88, 0x50,
+  0x20, 0xa4,
+  0xaa, 0x04,
+  0x02, 0x62
+};
+
+const uint8_t kMaskRandom15_11[22] = {
+  0x62, 0x22,
+  0xf1, 0x10,
+  0x10, 0x0e,
+  0x10, 0xb0,
+  0x24, 0x24,
+  0x01, 0x12,
+  0x00, 0xc4,
+  0x04, 0xa2,
+  0x02, 0x58,
+  0x2b, 0x00,
+  0x98, 0x40
+};
+
+const uint8_t kMaskRandom15_12[24] = {
+  0x88, 0x90,
+  0x40, 0x54,
+  0x82, 0x62,
+  0x21, 0xa4,
+  0x10, 0x64,
+  0x44, 0x0a,
+  0x10, 0xc8,
+  0x4d, 0x2a,
+  0x38, 0x02,
+  0x17, 0x48,
+  0x90, 0x84,
+  0x72, 0x14
+};
+
+const uint8_t kMaskRandom15_13[26] = {
+  0x62, 0xa2,
+  0x34, 0x44,
+  0x40, 0x4a,
+  0xc4, 0x04,
+  0x08, 0x60,
+  0x94, 0x12,
+  0x88, 0xc0,
+  0x21, 0x32,
+  0xc1, 0x40,
+  0x10, 0x68,
+  0x06, 0x90,
+  0x59, 0x00,
+  0x0a, 0x0c
+};
+
+const uint8_t kMaskRandom15_14[28] = {
+  0x40, 0x82,
+  0x15, 0x54,
+  0x88, 0x12,
+  0xc0, 0x10,
+  0x80, 0xa0,
+  0x01, 0x22,
+  0x40, 0x2c,
+  0x22, 0x02,
+  0x90, 0x04,
+  0x12, 0x40,
+  0x5d, 0x00,
+  0x20, 0x54,
+  0x86, 0x08,
+  0x28, 0x88
+};
+
+const uint8_t kMaskRandom15_15[30] = {
+  0x62, 0x22,
+  0x31, 0x10,
+  0x58, 0x00,
+  0x01, 0x12,
+  0x88, 0x20,
+  0x44, 0x02,
+  0x29, 0x04,
+  0x82, 0xa0,
+  0x0a, 0x1a,
+  0x11, 0xe0,
+  0x84, 0x04,
+  0x86, 0x40,
+  0x00, 0x86,
+  0x44, 0x48,
+  0x10, 0x98
+};
+
+const uint8_t kMaskRandom15_2[4] = {
+  0xec, 0xea,
+  0xbb, 0x9c
+};
+
+const uint8_t kMaskRandom15_3[6] = {
+  0xac, 0x92,
+  0x55, 0x4a,
+  0x43, 0x36
+};
+
+const uint8_t kMaskRandom15_4[8] = {
+  0x25, 0xaa,
+  0x95, 0x54,
+  0x1a, 0x6a,
+  0x43, 0xd4
+};
+
+const uint8_t kMaskRandom15_5[10] = {
+  0x64, 0xa2,
+  0x25, 0x54,
+  0x49, 0x68,
+  0x53, 0x90,
+  0x8e, 0x30
+};
+
+const uint8_t kMaskRandom15_6[12] = {
+  0x62, 0x8a,
+  0x15, 0x54,
+  0x4c, 0x46,
+  0x52, 0x94,
+  0x23, 0x64,
+  0x8a, 0x58
+};
+
+const uint8_t kMaskRandom15_7[14] = {
+  0x62, 0xa2,
+  0xb1, 0x14,
+  0x18, 0x6a,
+  0x44, 0xd4,
+  0x13, 0x64,
+  0x49, 0x1a,
+  0x86, 0x8c
+};
+
+const uint8_t kMaskRandom15_8[16] = {
+  0x90, 0x22,
+  0x09, 0x50,
+  0x00, 0x6a,
+  0x20, 0x34,
+  0x14, 0x44,
+  0xc2, 0x10,
+  0x00, 0xc6,
+  0x65, 0x80
+};
+
+const uint8_t kMaskRandom15_9[18] = {
+  0x62, 0x22,
+  0x24, 0x44,
+  0xc0, 0x50,
+  0x03, 0x0c,
+  0x16, 0x28,
+  0x89, 0x00,
+  0x82, 0x90,
+  0x08, 0xa4,
+  0x90, 0x48
+};
+
+const uint8_t kMaskRandom16_1[2] = {
+  0xff, 0xff
+};
+
+const uint8_t kMaskRandom16_10[20] = {
+  0x45, 0x51,
+  0x10, 0xa2,
+  0x01, 0x25,
+  0x0b, 0x42,
+  0xd8, 0x20,
+  0x82, 0x8c,
+  0x24, 0x4a,
+  0x38, 0x18,
+  0x2a, 0x25,
+  0x84, 0x92
+};
+
+const uint8_t kMaskRandom16_11[22] = {
+  0x55, 0x55,
+  0x2a, 0x22,
+  0x31, 0x11,
+  0x83, 0x42,
+  0x06, 0x98,
+  0x40, 0xe1,
+  0x2c, 0x44,
+  0xd8, 0x28,
+  0x92, 0x81,
+  0x84, 0x32,
+  0x68, 0x0c
+};
+
+const uint8_t kMaskRandom16_12[24] = {
+  0x84, 0x31,
+  0x18, 0xa2,
+  0x4e, 0x01,
+  0x44, 0xc8,
+  0x0e, 0x90,
+  0x20, 0xcc,
+  0x93, 0x40,
+  0x2d, 0x10,
+  0x31, 0x44,
+  0xc0, 0x23,
+  0x11, 0x25,
+  0xe8, 0x80
+};
+
+const uint8_t kMaskRandom16_13[26] = {
+  0x45, 0x15,
+  0x22, 0x22,
+  0x96, 0x0c,
+  0x0c, 0x50,
+  0x62, 0x04,
+  0x49, 0x06,
+  0x11, 0x82,
+  0x12, 0x38,
+  0x40, 0x71,
+  0xa8, 0x8a,
+  0x08, 0xa1,
+  0xa0, 0xc0,
+  0xc5, 0x10
+};
+
+const uint8_t kMaskRandom16_14[28] = {
+  0x45, 0x51,
+  0x22, 0x0a,
+  0x84, 0xd0,
+  0x0c, 0x8a,
+  0x18, 0x06,
+  0x30, 0x03,
+  0x61, 0x08,
+  0x40, 0x11,
+  0x10, 0x2c,
+  0x09, 0x60,
+  0x00, 0x94,
+  0x52, 0x40,
+  0xa4, 0x24,
+  0x82, 0x88
+};
+
+const uint8_t kMaskRandom16_15[30] = {
+  0x55, 0x11,
+  0x22, 0x22,
+  0x11, 0x11,
+  0x80, 0x45,
+  0x20, 0x1a,
+  0x08, 0x68,
+  0x22, 0x84,
+  0x48, 0x09,
+  0x07, 0x01,
+  0x94, 0x20,
+  0x82, 0x06,
+  0x60, 0x48,
+  0x89, 0x80,
+  0x00, 0x8e,
+  0x18, 0x22
+};
+
+const uint8_t kMaskRandom16_16[32] = {
+  0xa4, 0x10,
+  0x01, 0x2a,
+  0x06, 0x42,
+  0x08, 0x68,
+  0x81, 0x90,
+  0x00, 0xf0,
+  0x50, 0x05,
+  0x20, 0x51,
+  0x43, 0x08,
+  0x68, 0x80,
+  0x80, 0x0b,
+  0x10, 0x4c,
+  0x12, 0x30,
+  0x40, 0x85,
+  0x0e, 0x04,
+  0x18, 0x12
+};
+
+const uint8_t kMaskRandom16_2[4] = {
+  0xae, 0xae,
+  0x79, 0x79
+};
+
+const uint8_t kMaskRandom16_3[6] = {
+  0xad, 0x2d,
+  0x76, 0x36,
+  0x26, 0xdb
+};
+
+const uint8_t kMaskRandom16_4[8] = {
+  0x55, 0x55,
+  0xaa, 0xaa,
+  0x35, 0x35,
+  0xca, 0xca
+};
+
+const uint8_t kMaskRandom16_5[10] = {
+  0x55, 0x55,
+  0x2a, 0x2a,
+  0x24, 0x25,
+  0x84, 0xc8,
+  0x10, 0xb6
+};
+
+const uint8_t kMaskRandom16_6[12] = {
+  0x51, 0x51,
+  0x0a, 0x2a,
+  0xa2, 0x15,
+  0x84, 0x4a,
+  0x30, 0x92,
+  0x04, 0xac
+};
+
+const uint8_t kMaskRandom16_7[14] = {
+  0x45, 0x51,
+  0x22, 0x2a,
+  0x91, 0x11,
+  0x2e, 0x08,
+  0x48, 0x34,
+  0x90, 0x29,
+  0x09, 0x86
+};
+
+const uint8_t kMaskRandom16_8[16] = {
+  0x20, 0x54,
+  0x18, 0x88,
+  0x84, 0x07,
+  0x60, 0x48,
+  0x12, 0x82,
+  0x81, 0x41,
+  0x40, 0x62,
+  0x16, 0x30
+};
+
+const uint8_t kMaskRandom16_9[18] = {
+  0x55, 0x51,
+  0x22, 0x2a,
+  0x05, 0x85,
+  0x09, 0x4a,
+  0x84, 0x32,
+  0xc0, 0x0d,
+  0x20, 0xa6,
+  0x1a, 0x09,
+  0x44, 0x64
+};
+
+const uint8_t kMaskRandom17_1[6] = {
+  0xff, 0xff, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_10[60] = {
+  0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+  0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+  0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+  0x8e, 0xcc, 0x00, 0x00, 0x00, 0x00,
+  0x6a, 0x2b, 0x00, 0x00, 0x00, 0x00,
+  0x36, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0xd1, 0x25, 0x80, 0x00, 0x00, 0x00,
+  0xc8, 0x02, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_11[66] = {
+  0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+  0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+  0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+  0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+  0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x89, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_12[72] = {
+  0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+  0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+  0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+  0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+  0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+  0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x35, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_13[78] = {
+  0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+  0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+  0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+  0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+  0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+  0x83, 0x34, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_14[84] = {
+  0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+  0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+  0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x84, 0x80, 0x00, 0x00, 0x00,
+  0xa2, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x95, 0x51, 0x80, 0x00, 0x00, 0x00,
+  0x4a, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x68, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x89, 0x00, 0x00, 0x00, 0x00,
+  0xb0, 0xde, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_15[90] = {
+  0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+  0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+  0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+  0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+  0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+  0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_16[96] = {
+  0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+  0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+  0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+  0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00,
+  0x15, 0x8c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x47, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x81, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x58, 0x58, 0x00, 0x00, 0x00, 0x00,
+  0x0e, 0x28, 0x80, 0x00, 0x00, 0x00,
+  0x83, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x0a, 0x1c, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_17[102] = {
+  0x25, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x91, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0xc0, 0x80, 0x00, 0x00, 0x00,
+  0x68, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0xc8, 0x00, 0x00, 0x00, 0x00,
+  0x43, 0x45, 0x00, 0x00, 0x00, 0x00,
+  0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x1c, 0xa2, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0x4c, 0x00, 0x00, 0x00, 0x00,
+  0x8a, 0x66, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x91, 0x00, 0x00, 0x00, 0x00,
+  0x68, 0x42, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0xa4, 0x00, 0x00, 0x00, 0x00,
+  0x43, 0x13, 0x00, 0x00, 0x00, 0x00,
+  0xc4, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x1c, 0x88, 0x80, 0x00, 0x00, 0x00,
+  0x3c, 0x09, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_2[12] = {
+  0xce, 0xce, 0x00, 0x00, 0x00, 0x00,
+  0xb9, 0x39, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_3[18] = {
+  0xcd, 0xcc, 0x00, 0x00, 0x00, 0x00,
+  0x97, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0xb8, 0xd1, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_4[24] = {
+  0xca, 0xec, 0x00, 0x00, 0x00, 0x00,
+  0xa9, 0x67, 0x00, 0x00, 0x00, 0x00,
+  0x3a, 0xb1, 0x80, 0x00, 0x00, 0x00,
+  0x55, 0x5a, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_5[30] = {
+  0x55, 0x44, 0x80, 0x00, 0x00, 0x00,
+  0x2a, 0x66, 0x00, 0x00, 0x00, 0x00,
+  0x25, 0xa1, 0x80, 0x00, 0x00, 0x00,
+  0xe2, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x99, 0x98, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_6[36] = {
+  0xd1, 0x4c, 0x00, 0x00, 0x00, 0x00,
+  0xa2, 0xc5, 0x00, 0x00, 0x00, 0x00,
+  0x95, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0xca, 0x0a, 0x80, 0x00, 0x00, 0x00,
+  0xa4, 0xaa, 0x00, 0x00, 0x00, 0x00,
+  0x78, 0x15, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_7[42] = {
+  0x15, 0x44, 0x80, 0x00, 0x00, 0x00,
+  0x8a, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x85, 0x91, 0x00, 0x00, 0x00, 0x00,
+  0x32, 0x0a, 0x80, 0x00, 0x00, 0x00,
+  0x58, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x43, 0xc8, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_8[48] = {
+  0x64, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0xa2, 0xc2, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x60, 0x80, 0x00, 0x00, 0x00,
+  0x4a, 0x85, 0x00, 0x00, 0x00, 0x00,
+  0x38, 0x4c, 0x00, 0x00, 0x00, 0x00,
+  0x89, 0x29, 0x00, 0x00, 0x00, 0x00,
+  0x07, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x94, 0xb0, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom17_9[54] = {
+  0x8e, 0xcc, 0x00, 0x00, 0x00, 0x00,
+  0x6a, 0x2b, 0x00, 0x00, 0x00, 0x00,
+  0x36, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0xd1, 0x25, 0x80, 0x00, 0x00, 0x00,
+  0x55, 0x8c, 0x80, 0x00, 0x00, 0x00,
+  0xaa, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0xa5, 0x32, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x61, 0x80, 0x00, 0x00, 0x00,
+  0x3c, 0x5c, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_1[6] = {
+  0xff, 0xff, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_10[60] = {
+  0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+  0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+  0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+  0xcc, 0x66, 0x00, 0x00, 0x00, 0x00,
+  0x2b, 0x15, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+  0x25, 0x92, 0xc0, 0x00, 0x00, 0x00,
+  0xfd, 0x9d, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_11[66] = {
+  0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+  0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+  0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+  0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+  0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x89, 0x44, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_12[72] = {
+  0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+  0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+  0x8c, 0xc6, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x32, 0x99, 0x40, 0x00, 0x00, 0x00,
+  0x61, 0xb0, 0xc0, 0x00, 0x00, 0x00,
+  0x5c, 0x2e, 0x00, 0x00, 0x00, 0x00,
+  0x5b, 0x0c, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_13[78] = {
+  0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+  0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+  0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+  0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+  0x34, 0x1a, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_14[84] = {
+  0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+  0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+  0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x84, 0xc2, 0x40, 0x00, 0x00, 0x00,
+  0x27, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0xa8, 0xc0, 0x00, 0x00, 0x00,
+  0x1a, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x68, 0x34, 0x00, 0x00, 0x00, 0x00,
+  0x89, 0x44, 0x80, 0x00, 0x00, 0x00,
+  0x7f, 0x4f, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_15[90] = {
+  0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+  0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+  0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+  0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+  0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x51, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_16[96] = {
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+  0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+  0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+  0x8c, 0x46, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x23, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0xc0, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0x89, 0x40, 0x00, 0x00, 0x00,
+  0x58, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x40, 0x00, 0x00, 0x00,
+  0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0xef, 0xf2, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_17[102] = {
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+  0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+  0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+  0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0x42, 0xa1, 0x40, 0x00, 0x00, 0x00,
+  0xa4, 0x52, 0x00, 0x00, 0x00, 0x00,
+  0x13, 0x09, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0x88, 0xc4, 0x40, 0x00, 0x00, 0x00,
+  0x09, 0x04, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_18[108] = {
+  0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0x42, 0xa1, 0x40, 0x00, 0x00, 0x00,
+  0xa4, 0x52, 0x00, 0x00, 0x00, 0x00,
+  0x13, 0x09, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0x88, 0xc4, 0x40, 0x00, 0x00, 0x00,
+  0x09, 0x04, 0x80, 0x00, 0x00, 0x00,
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0xc0, 0xe0, 0x40, 0x00, 0x00, 0x00,
+  0x06, 0x83, 0x40, 0x00, 0x00, 0x00,
+  0xc8, 0x64, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x22, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x51, 0x00, 0x00, 0x00, 0x00,
+  0xd0, 0x03, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_2[12] = {
+  0xce, 0x67, 0x00, 0x00, 0x00, 0x00,
+  0x39, 0x9c, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_3[18] = {
+  0xcc, 0x66, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x15, 0x80, 0x00, 0x00, 0x00,
+  0x92, 0xc9, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_4[24] = {
+  0xec, 0x76, 0x00, 0x00, 0x00, 0x00,
+  0x67, 0x33, 0x80, 0x00, 0x00, 0x00,
+  0xb1, 0xd8, 0xc0, 0x00, 0x00, 0x00,
+  0x5a, 0xad, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_5[30] = {
+  0x4c, 0xa6, 0x40, 0x00, 0x00, 0x00,
+  0x66, 0x33, 0x00, 0x00, 0x00, 0x00,
+  0x19, 0xd0, 0xc0, 0x00, 0x00, 0x00,
+  0x9c, 0x89, 0x40, 0x00, 0x00, 0x00,
+  0xe3, 0x4c, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_6[36] = {
+  0xcc, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x62, 0x80, 0x00, 0x00, 0x00,
+  0xb0, 0x98, 0x40, 0x00, 0x00, 0x00,
+  0x8a, 0x85, 0x40, 0x00, 0x00, 0x00,
+  0x29, 0x53, 0x00, 0x00, 0x00, 0x00,
+  0xa6, 0x0a, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_7[42] = {
+  0x44, 0xa2, 0x40, 0x00, 0x00, 0x00,
+  0x23, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x48, 0x80, 0x00, 0x00, 0x00,
+  0x0a, 0x85, 0x40, 0x00, 0x00, 0x00,
+  0x34, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x0b, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0xe0, 0x64, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_8[48] = {
+  0x16, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0xc2, 0x61, 0x00, 0x00, 0x00, 0x00,
+  0x60, 0xb0, 0x40, 0x00, 0x00, 0x00,
+  0x85, 0x42, 0x80, 0x00, 0x00, 0x00,
+  0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x29, 0x14, 0x80, 0x00, 0x00, 0x00,
+  0x11, 0x88, 0xc0, 0x00, 0x00, 0x00,
+  0xb0, 0x58, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom18_9[54] = {
+  0x44, 0xa2, 0x40, 0x00, 0x00, 0x00,
+  0x66, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x90, 0x49, 0x40, 0x00, 0x00, 0x00,
+  0x01, 0xa5, 0x80, 0x00, 0x00, 0x00,
+  0x0e, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x13, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0x20, 0xd0, 0x40, 0x00, 0x00, 0x00,
+  0xc2, 0x51, 0x00, 0x00, 0x00, 0x00,
+  0x29, 0x0c, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_1[6] = {
+  0xff, 0xff, 0xe0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_10[60] = {
+  0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+  0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+  0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+  0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+  0xcc, 0x75, 0x00, 0x00, 0x00, 0x00,
+  0x2b, 0x19, 0xc0, 0x00, 0x00, 0x00,
+  0x32, 0xd2, 0x60, 0x00, 0x00, 0x00,
+  0x25, 0x8e, 0xa0, 0x00, 0x00, 0x00,
+  0x50, 0x88, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_11[66] = {
+  0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+  0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+  0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+  0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+  0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+  0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+  0x89, 0x70, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_12[72] = {
+  0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+  0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+  0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+  0x8c, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x11, 0xc0, 0x00, 0x00, 0x00,
+  0x32, 0x8d, 0x20, 0x00, 0x00, 0x00,
+  0x61, 0x92, 0x60, 0x00, 0x00, 0x00,
+  0x5c, 0x38, 0x80, 0x00, 0x00, 0x00,
+  0x90, 0xc8, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_13[78] = {
+  0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+  0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+  0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+  0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+  0x34, 0x60, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_14[84] = {
+  0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+  0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+  0x84, 0x87, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x51, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x22, 0xa0, 0x00, 0x00, 0x00,
+  0x68, 0x44, 0x40, 0x00, 0x00, 0x00,
+  0x89, 0x70, 0x00, 0x00, 0x00, 0x00,
+  0x6e, 0x27, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_15[90] = {
+  0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+  0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+  0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+  0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+  0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x28, 0x20, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_16[96] = {
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+  0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+  0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+  0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x8c, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x47, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x81, 0x88, 0x60, 0x00, 0x00, 0x00,
+  0x12, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0x58, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0x28, 0xca, 0x00, 0x00, 0x00, 0x00,
+  0x34, 0x60, 0x80, 0x00, 0x00, 0x00,
+  0x7e, 0x75, 0xe0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_17[102] = {
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+  0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+  0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+  0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+  0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+  0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+  0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+  0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+  0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_18[108] = {
+  0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+  0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+  0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+  0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+  0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+  0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x2c, 0x16, 0x00, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xc0, 0x00, 0x00, 0x00,
+  0xc0, 0xd0, 0x20, 0x00, 0x00, 0x00,
+  0x06, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0xc8, 0x0c, 0x40, 0x00, 0x00, 0x00,
+  0x45, 0x61, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x91, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x51, 0x97, 0x20, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_19[114] = {
+  0x4c, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x71, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x40, 0xe0, 0x00, 0x00, 0x00,
+  0x42, 0x90, 0xa0, 0x00, 0x00, 0x00,
+  0xa4, 0x29, 0x40, 0x00, 0x00, 0x00,
+  0x13, 0x5a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x93, 0x40, 0x00, 0x00, 0x00,
+  0x88, 0xac, 0x20, 0x00, 0x00, 0x00,
+  0x09, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x4c, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x28, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x50, 0x20, 0x00, 0x00, 0x00,
+  0x42, 0x82, 0x60, 0x00, 0x00, 0x00,
+  0xa4, 0x01, 0xc0, 0x00, 0x00, 0x00,
+  0x13, 0x43, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x94, 0x80, 0x00, 0x00, 0x00,
+  0x88, 0xa1, 0x20, 0x00, 0x00, 0x00,
+  0x09, 0x4c, 0x00, 0x00, 0x00, 0x00,
+  0xcd, 0x98, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_2[12] = {
+  0xce, 0x77, 0x00, 0x00, 0x00, 0x00,
+  0x39, 0xcc, 0xe0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_3[18] = {
+  0xcc, 0x67, 0x00, 0x00, 0x00, 0x00,
+  0x27, 0x2c, 0xc0, 0x00, 0x00, 0x00,
+  0x92, 0xd2, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_4[24] = {
+  0xec, 0x73, 0x00, 0x00, 0x00, 0x00,
+  0x67, 0x19, 0xc0, 0x00, 0x00, 0x00,
+  0xb1, 0xcc, 0x60, 0x00, 0x00, 0x00,
+  0x5a, 0x96, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_5[30] = {
+  0x4c, 0xe7, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x31, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0xcc, 0x60, 0x00, 0x00, 0x00,
+  0x92, 0xa6, 0xa0, 0x00, 0x00, 0x00,
+  0xb8, 0x99, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_6[36] = {
+  0x4c, 0x36, 0x00, 0x00, 0x00, 0x00,
+  0x45, 0x68, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0xd0, 0x60, 0x00, 0x00, 0x00,
+  0x8a, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0x26, 0x0b, 0x40, 0x00, 0x00, 0x00,
+  0x95, 0x45, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_7[42] = {
+  0xc4, 0xa3, 0x00, 0x00, 0x00, 0x00,
+  0x23, 0x19, 0x80, 0x00, 0x00, 0x00,
+  0x91, 0x1c, 0x20, 0x00, 0x00, 0x00,
+  0x4a, 0x82, 0xa0, 0x00, 0x00, 0x00,
+  0x34, 0x49, 0x40, 0x00, 0x00, 0x00,
+  0x8b, 0x4a, 0x00, 0x00, 0x00, 0x00,
+  0xc8, 0x24, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_8[48] = {
+  0x16, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0xc2, 0x44, 0xc0, 0x00, 0x00, 0x00,
+  0x60, 0xe8, 0x20, 0x00, 0x00, 0x00,
+  0x85, 0x12, 0x60, 0x00, 0x00, 0x00,
+  0xcc, 0x21, 0x40, 0x00, 0x00, 0x00,
+  0x29, 0x63, 0x00, 0x00, 0x00, 0x00,
+  0x11, 0x98, 0xc0, 0x00, 0x00, 0x00,
+  0xb0, 0x0c, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom19_9[54] = {
+  0x44, 0xa7, 0x00, 0x00, 0x00, 0x00,
+  0x66, 0x70, 0x80, 0x00, 0x00, 0x00,
+  0x12, 0xc0, 0xe0, 0x00, 0x00, 0x00,
+  0xc3, 0x10, 0xa0, 0x00, 0x00, 0x00,
+  0x8c, 0x29, 0x40, 0x00, 0x00, 0x00,
+  0x11, 0x5b, 0x00, 0x00, 0x00, 0x00,
+  0x21, 0x93, 0x40, 0x00, 0x00, 0x00,
+  0xa2, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x18, 0x0c, 0xe0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom1_1[2] = {
+  0x80, 0x00
+};
+
+const uint8_t kMaskRandom20_1[6] = {
+  0xff, 0xff, 0xf0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_10[60] = {
+  0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+  0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+  0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+  0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+  0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x8c, 0x20, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_11[66] = {
+  0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+  0x23, 0x88, 0xe0, 0x00, 0x00, 0x00,
+  0x1a, 0x46, 0x90, 0x00, 0x00, 0x00,
+  0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+  0x71, 0x1c, 0x40, 0x00, 0x00, 0x00,
+  0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+  0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+  0xe0, 0x38, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_12[72] = {
+  0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+  0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+  0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+  0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+  0x23, 0x88, 0xe0, 0x00, 0x00, 0x00,
+  0x1a, 0x46, 0x90, 0x00, 0x00, 0x00,
+  0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+  0x71, 0x1c, 0x40, 0x00, 0x00, 0x00,
+  0xf5, 0xdc, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_13[78] = {
+  0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+  0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+  0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+  0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+  0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+  0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+  0xc1, 0x30, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_14[84] = {
+  0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+  0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+  0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+  0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+  0x0e, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x45, 0x51, 0x50, 0x00, 0x00, 0x00,
+  0x88, 0xa2, 0x20, 0x00, 0x00, 0x00,
+  0xe0, 0x38, 0x00, 0x00, 0x00, 0x00,
+  0x56, 0x3e, 0x20, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_15[90] = {
+  0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+  0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+  0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+  0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+  0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+  0x50, 0x54, 0x10, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_16[96] = {
+  0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+  0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+  0x46, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc0, 0x00, 0x00, 0x00,
+  0x10, 0xc4, 0x30, 0x00, 0x00, 0x00,
+  0x0c, 0x43, 0x10, 0x00, 0x00, 0x00,
+  0x28, 0x8a, 0x20, 0x00, 0x00, 0x00,
+  0x94, 0x25, 0x00, 0x00, 0x00, 0x00,
+  0xc1, 0x30, 0x40, 0x00, 0x00, 0x00,
+  0x28, 0x1c, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_17[102] = {
+  0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+  0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+  0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+  0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+  0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+  0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+  0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+  0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+  0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+  0x19, 0x86, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_18[108] = {
+  0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+  0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+  0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+  0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+  0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+  0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+  0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+  0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+  0x2c, 0x0b, 0x00, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x60, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x86, 0x20, 0x00, 0x00, 0x00,
+  0xc2, 0x30, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x88, 0xa0, 0x00, 0x00, 0x00,
+  0x50, 0x54, 0x10, 0x00, 0x00, 0x00,
+  0x21, 0x7b, 0xf0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_19[114] = {
+  0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+  0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+  0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+  0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+  0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+  0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+  0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+  0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+  0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+  0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+  0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+  0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+  0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x8c, 0x20, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_2[12] = {
+  0xee, 0x3b, 0x80, 0x00, 0x00, 0x00,
+  0x99, 0xe6, 0x70, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_20[120] = {
+  0x4c, 0x13, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x40, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x10, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0x80, 0xe0, 0x00, 0x00, 0x00,
+  0x86, 0x21, 0x80, 0x00, 0x00, 0x00,
+  0x29, 0x0a, 0x40, 0x00, 0x00, 0x00,
+  0x42, 0x50, 0x90, 0x00, 0x00, 0x00,
+  0x98, 0x26, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x8c, 0x20, 0x00, 0x00, 0x00,
+  0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0xe3, 0x38, 0xc0, 0x00, 0x00, 0x00,
+  0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+  0x21, 0x48, 0x50, 0x00, 0x00, 0x00,
+  0x52, 0x94, 0xa0, 0x00, 0x00, 0x00,
+  0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+  0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+  0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+  0x19, 0x86, 0x60, 0x00, 0x00, 0x00,
+  0xf7, 0x8d, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_3[18] = {
+  0xce, 0x33, 0x80, 0x00, 0x00, 0x00,
+  0x55, 0x95, 0x60, 0x00, 0x00, 0x00,
+  0xb1, 0x6a, 0x30, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_4[24] = {
+  0xe6, 0x39, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x8c, 0xe0, 0x00, 0x00, 0x00,
+  0x98, 0xe6, 0x30, 0x00, 0x00, 0x00,
+  0x2d, 0x4b, 0x50, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_5[30] = {
+  0xce, 0x33, 0x80, 0x00, 0x00, 0x00,
+  0x63, 0x98, 0xe0, 0x00, 0x00, 0x00,
+  0x98, 0xe5, 0x30, 0x00, 0x00, 0x00,
+  0x2b, 0x53, 0x50, 0x00, 0x00, 0x00,
+  0xb4, 0x5c, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_6[36] = {
+  0x4c, 0x1b, 0x00, 0x00, 0x00, 0x00,
+  0x51, 0x34, 0x40, 0x00, 0x00, 0x00,
+  0x20, 0xe8, 0x30, 0x00, 0x00, 0x00,
+  0x85, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x06, 0x86, 0xa0, 0x00, 0x00, 0x00,
+  0x9a, 0x21, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_7[42] = {
+  0x4e, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x33, 0x2c, 0x00, 0x00, 0x00, 0x00,
+  0x10, 0x0e, 0xb0, 0x00, 0x00, 0x00,
+  0x81, 0x51, 0x50, 0x00, 0x00, 0x00,
+  0x24, 0xc4, 0xa0, 0x00, 0x00, 0x00,
+  0xd4, 0x23, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0xa2, 0x60, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_8[48] = {
+  0x27, 0x09, 0xc0, 0x00, 0x00, 0x00,
+  0x89, 0xa2, 0x60, 0x00, 0x00, 0x00,
+  0xd0, 0x74, 0x10, 0x00, 0x00, 0x00,
+  0x24, 0xc9, 0x30, 0x00, 0x00, 0x00,
+  0xe2, 0x90, 0xa0, 0x00, 0x00, 0x00,
+  0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+  0x31, 0x8c, 0x60, 0x00, 0x00, 0x00,
+  0x18, 0xc6, 0x30, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom20_9[54] = {
+  0x4e, 0x13, 0x80, 0x00, 0x00, 0x00,
+  0x62, 0x38, 0xc0, 0x00, 0x00, 0x00,
+  0x81, 0xe0, 0x70, 0x00, 0x00, 0x00,
+  0xe1, 0x48, 0x50, 0x00, 0x00, 0x00,
+  0x13, 0x94, 0xa0, 0x00, 0x00, 0x00,
+  0xb4, 0x2d, 0x00, 0x00, 0x00, 0x00,
+  0x26, 0x89, 0xa0, 0x00, 0x00, 0x00,
+  0x58, 0x56, 0x10, 0x00, 0x00, 0x00,
+  0x49, 0x86, 0x50, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_1[6] = {
+  0xff, 0xff, 0xf8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_10[60] = {
+  0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+  0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+  0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+  0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+  0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x84, 0xa8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_11[66] = {
+  0xc6, 0x21, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x88, 0xc8, 0x00, 0x00, 0x00,
+  0x1a, 0x45, 0x88, 0x00, 0x00, 0x00,
+  0x24, 0xd3, 0x08, 0x00, 0x00, 0x00,
+  0x71, 0x10, 0x70, 0x00, 0x00, 0x00,
+  0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+  0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+  0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0xe0, 0x22, 0x90, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_12[72] = {
+  0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+  0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+  0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+  0xc6, 0x21, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x88, 0xc8, 0x00, 0x00, 0x00,
+  0x1a, 0x45, 0x88, 0x00, 0x00, 0x00,
+  0x24, 0xd3, 0x08, 0x00, 0x00, 0x00,
+  0x71, 0x10, 0x70, 0x00, 0x00, 0x00,
+  0xa0, 0x65, 0x18, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_13[78] = {
+  0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+  0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+  0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+  0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+  0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+  0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+  0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+  0xc1, 0x02, 0x58, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_14[84] = {
+  0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+  0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+  0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+  0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+  0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+  0x0e, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x33, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0x10, 0xc3, 0x28, 0x00, 0x00, 0x00,
+  0x45, 0x68, 0x48, 0x00, 0x00, 0x00,
+  0x88, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0xe0, 0x22, 0x90, 0x00, 0x00, 0x00,
+  0x4d, 0xd0, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_15[90] = {
+  0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+  0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+  0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+  0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+  0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+  0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+  0x50, 0x45, 0x08, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_16[96] = {
+  0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+  0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+  0x46, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x33, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+  0x10, 0xe4, 0x60, 0x00, 0x00, 0x00,
+  0x0c, 0x69, 0x08, 0x00, 0x00, 0x00,
+  0x28, 0x94, 0x28, 0x00, 0x00, 0x00,
+  0x94, 0x21, 0x30, 0x00, 0x00, 0x00,
+  0xc1, 0x02, 0x58, 0x00, 0x00, 0x00,
+  0x3b, 0xf5, 0x38, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_17[102] = {
+  0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+  0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+  0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+  0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+  0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+  0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+  0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+  0x19, 0x9e, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_18[108] = {
+  0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+  0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+  0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+  0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+  0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+  0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+  0x2c, 0x03, 0x20, 0x00, 0x00, 0x00,
+  0x81, 0xa0, 0x18, 0x00, 0x00, 0x00,
+  0xa0, 0x68, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x41, 0x50, 0x00, 0x00, 0x00,
+  0x18, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0xc2, 0x06, 0x80, 0x00, 0x00, 0x00,
+  0x22, 0x98, 0x08, 0x00, 0x00, 0x00,
+  0x50, 0x45, 0x08, 0x00, 0x00, 0x00,
+  0x5a, 0x56, 0x58, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_19[114] = {
+  0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+  0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+  0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+  0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+  0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+  0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+  0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+  0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+  0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+  0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+  0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x84, 0xa8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_2[12] = {
+  0xee, 0x3b, 0x30, 0x00, 0x00, 0x00,
+  0x99, 0xe6, 0xe8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_20[120] = {
+  0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+  0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+  0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+  0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+  0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0x4e, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0xe3, 0x18, 0x98, 0x00, 0x00, 0x00,
+  0x81, 0xe3, 0x00, 0x00, 0x00, 0x00,
+  0x21, 0x40, 0x58, 0x00, 0x00, 0x00,
+  0x52, 0x81, 0xe0, 0x00, 0x00, 0x00,
+  0xb4, 0x28, 0x20, 0x00, 0x00, 0x00,
+  0x26, 0x86, 0x28, 0x00, 0x00, 0x00,
+  0x58, 0x64, 0x40, 0x00, 0x00, 0x00,
+  0x19, 0x9e, 0x00, 0x00, 0x00, 0x00,
+  0x2a, 0x03, 0x30, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_21[126] = {
+  0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0xa0, 0x6a, 0x40, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x30, 0x00, 0x00, 0x00,
+  0x03, 0xb4, 0x00, 0x00, 0x00, 0x00,
+  0x86, 0x20, 0x90, 0x00, 0x00, 0x00,
+  0x29, 0x08, 0x48, 0x00, 0x00, 0x00,
+  0x42, 0x43, 0x08, 0x00, 0x00, 0x00,
+  0x98, 0x12, 0x80, 0x00, 0x00, 0x00,
+  0x30, 0x84, 0xa8, 0x00, 0x00, 0x00,
+  0x4c, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x51, 0x0c, 0xc8, 0x00, 0x00, 0x00,
+  0xa0, 0x66, 0x40, 0x00, 0x00, 0x00,
+  0x04, 0xc1, 0x60, 0x00, 0x00, 0x00,
+  0x03, 0xa0, 0x28, 0x00, 0x00, 0x00,
+  0x86, 0x21, 0x10, 0x00, 0x00, 0x00,
+  0x29, 0x10, 0x18, 0x00, 0x00, 0x00,
+  0x42, 0x42, 0xa0, 0x00, 0x00, 0x00,
+  0x98, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x30, 0x84, 0x08, 0x00, 0x00, 0x00,
+  0xdf, 0x4c, 0x10, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_3[18] = {
+  0xce, 0x32, 0xb0, 0x00, 0x00, 0x00,
+  0x55, 0xdc, 0x50, 0x00, 0x00, 0x00,
+  0xa8, 0xed, 0x88, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_4[24] = {
+  0xe6, 0x31, 0x30, 0x00, 0x00, 0x00,
+  0x33, 0x8c, 0x58, 0x00, 0x00, 0x00,
+  0x98, 0xd2, 0xc8, 0x00, 0x00, 0x00,
+  0x2d, 0x4b, 0x28, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_5[30] = {
+  0xce, 0x31, 0xb0, 0x00, 0x00, 0x00,
+  0x63, 0x98, 0xd8, 0x00, 0x00, 0x00,
+  0x98, 0xc7, 0x68, 0x00, 0x00, 0x00,
+  0x4d, 0x6b, 0x50, 0x00, 0x00, 0x00,
+  0xb2, 0x6c, 0xa8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_6[36] = {
+  0x4c, 0x19, 0x10, 0x00, 0x00, 0x00,
+  0x51, 0x14, 0x50, 0x00, 0x00, 0x00,
+  0x20, 0xea, 0x08, 0x00, 0x00, 0x00,
+  0x85, 0x41, 0x28, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0xd8, 0x00, 0x00, 0x00,
+  0x8a, 0x24, 0x30, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_7[42] = {
+  0xc6, 0x11, 0x90, 0x00, 0x00, 0x00,
+  0x33, 0x04, 0xc8, 0x00, 0x00, 0x00,
+  0x18, 0x67, 0x40, 0x00, 0x00, 0x00,
+  0x45, 0x42, 0xd0, 0x00, 0x00, 0x00,
+  0x12, 0xd4, 0x28, 0x00, 0x00, 0x00,
+  0xb4, 0x28, 0x30, 0x00, 0x00, 0x00,
+  0x29, 0x92, 0x18, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_8[48] = {
+  0x07, 0x0a, 0x70, 0x00, 0x00, 0x00,
+  0x49, 0xa8, 0x28, 0x00, 0x00, 0x00,
+  0xb0, 0x7a, 0x00, 0x00, 0x00, 0x00,
+  0x24, 0xc5, 0xc0, 0x00, 0x00, 0x00,
+  0x52, 0x80, 0xe8, 0x00, 0x00, 0x00,
+  0xc6, 0x31, 0x80, 0x00, 0x00, 0x00,
+  0x31, 0x94, 0x18, 0x00, 0x00, 0x00,
+  0x18, 0xc7, 0x08, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom21_9[54] = {
+  0x4e, 0x11, 0x10, 0x00, 0x00, 0x00,
+  0x62, 0x1a, 0x08, 0x00, 0x00, 0x00,
+  0x80, 0xe9, 0x40, 0x00, 0x00, 0x00,
+  0xa1, 0x50, 0x50, 0x00, 0x00, 0x00,
+  0x53, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0xa4, 0x24, 0x30, 0x00, 0x00, 0x00,
+  0x16, 0xa0, 0x88, 0x00, 0x00, 0x00,
+  0x58, 0x45, 0x20, 0x00, 0x00, 0x00,
+  0x29, 0x86, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_1[6] = {
+  0xff, 0xff, 0xfc, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_10[60] = {
+  0xc0, 0x38, 0x88, 0x00, 0x00, 0x00,
+  0x30, 0x0e, 0x28, 0x00, 0x00, 0x00,
+  0xe8, 0x07, 0x00, 0x00, 0x00, 0x00,
+  0x85, 0x08, 0xa8, 0x00, 0x00, 0x00,
+  0xd0, 0x92, 0x10, 0x00, 0x00, 0x00,
+  0x86, 0x50, 0x48, 0x00, 0x00, 0x00,
+  0x4a, 0x68, 0x0c, 0x00, 0x00, 0x00,
+  0x01, 0xa0, 0x74, 0x00, 0x00, 0x00,
+  0x4c, 0x81, 0x90, 0x00, 0x00, 0x00,
+  0x62, 0x24, 0x04, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_11[66] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+  0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+  0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+  0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+  0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+  0x30, 0x46, 0x08, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_12[72] = {
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+  0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+  0x86, 0x90, 0xd0, 0x00, 0x00, 0x00,
+  0x23, 0x24, 0x64, 0x00, 0x00, 0x00,
+  0x16, 0x22, 0xc4, 0x00, 0x00, 0x00,
+  0x4c, 0x29, 0x84, 0x00, 0x00, 0x00,
+  0x41, 0xc8, 0x38, 0x00, 0x00, 0x00,
+  0xf4, 0x18, 0x9c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_13[78] = {
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+  0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+  0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+  0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+  0x09, 0x61, 0x2c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_14[84] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+  0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+  0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+  0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x0c, 0xa1, 0x94, 0x00, 0x00, 0x00,
+  0xa1, 0x34, 0x24, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x8a, 0x51, 0x48, 0x00, 0x00, 0x00,
+  0xc6, 0xca, 0xe8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_15[90] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+  0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+  0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+  0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+  0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+  0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+  0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+  0x14, 0x22, 0x84, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_16[96] = {
+  0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+  0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+  0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+  0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x30, 0x00, 0x00, 0x00,
+  0xa4, 0x34, 0x84, 0x00, 0x00, 0x00,
+  0x50, 0xaa, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0xd0, 0x98, 0x00, 0x00, 0x00,
+  0x09, 0x61, 0x2c, 0x00, 0x00, 0x00,
+  0x86, 0xc1, 0x44, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_17[102] = {
+  0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+  0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+  0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+  0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+  0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+  0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+  0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+  0x78, 0x0f, 0x00, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_18[108] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+  0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+  0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+  0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+  0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x81, 0x90, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x0c, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0xa8, 0x00, 0x00, 0x00,
+  0x43, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x1a, 0x03, 0x40, 0x00, 0x00, 0x00,
+  0x60, 0x2c, 0x04, 0x00, 0x00, 0x00,
+  0x14, 0x22, 0x84, 0x00, 0x00, 0x00,
+  0xe4, 0xd4, 0x6c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_19[114] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+  0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+  0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+  0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+  0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+  0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+  0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+  0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+  0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_2[12] = {
+  0xec, 0xdd, 0x98, 0x00, 0x00, 0x00,
+  0x9b, 0xb3, 0x74, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_20[120] = {
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+  0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+  0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+  0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+  0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x62, 0x6c, 0x4c, 0x00, 0x00, 0x00,
+  0x8c, 0x11, 0x80, 0x00, 0x00, 0x00,
+  0x01, 0x60, 0x2c, 0x00, 0x00, 0x00,
+  0x07, 0x80, 0xf0, 0x00, 0x00, 0x00,
+  0xa0, 0x94, 0x10, 0x00, 0x00, 0x00,
+  0x18, 0xa3, 0x14, 0x00, 0x00, 0x00,
+  0x91, 0x12, 0x20, 0x00, 0x00, 0x00,
+  0x78, 0x0f, 0x00, 0x00, 0x00, 0x00,
+  0x3b, 0x48, 0xc4, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_21[126] = {
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+  0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+  0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+  0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+  0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+  0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+  0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+  0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+  0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+  0x30, 0x46, 0x08, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_22[132] = {
+  0x46, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x33, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x99, 0x13, 0x20, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0xb0, 0x00, 0x00, 0x00,
+  0x80, 0xb0, 0x14, 0x00, 0x00, 0x00,
+  0x84, 0x50, 0x88, 0x00, 0x00, 0x00,
+  0x40, 0x68, 0x0c, 0x00, 0x00, 0x00,
+  0x0a, 0x81, 0x50, 0x00, 0x00, 0x00,
+  0x68, 0x0d, 0x00, 0x00, 0x00, 0x00,
+  0x10, 0x22, 0x04, 0x00, 0x00, 0x00,
+  0x30, 0x46, 0x08, 0x00, 0x00, 0x00,
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0xa9, 0x15, 0x20, 0x00, 0x00, 0x00,
+  0x04, 0xc0, 0x98, 0x00, 0x00, 0x00,
+  0xd0, 0x1a, 0x00, 0x00, 0x00, 0x00,
+  0x82, 0x50, 0x48, 0x00, 0x00, 0x00,
+  0x21, 0x24, 0x24, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x84, 0x00, 0x00, 0x00,
+  0x4a, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x12, 0xa2, 0x54, 0x00, 0x00, 0x00,
+  0x9e, 0xce, 0x88, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_3[18] = {
+  0xca, 0xd9, 0x58, 0x00, 0x00, 0x00,
+  0xf1, 0x5e, 0x28, 0x00, 0x00, 0x00,
+  0xb6, 0x35, 0xc4, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_4[24] = {
+  0xc4, 0xd8, 0x98, 0x00, 0x00, 0x00,
+  0x31, 0x66, 0x2c, 0x00, 0x00, 0x00,
+  0x4b, 0x29, 0x64, 0x00, 0x00, 0x00,
+  0x2c, 0xa5, 0x94, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_5[30] = {
+  0xc6, 0xd8, 0xd8, 0x00, 0x00, 0x00,
+  0x63, 0x6c, 0x6c, 0x00, 0x00, 0x00,
+  0x1d, 0xa3, 0xb4, 0x00, 0x00, 0x00,
+  0xad, 0x55, 0xa8, 0x00, 0x00, 0x00,
+  0xb2, 0xb6, 0x54, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_6[36] = {
+  0x64, 0x4c, 0x88, 0x00, 0x00, 0x00,
+  0x51, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0xa8, 0x35, 0x04, 0x00, 0x00, 0x00,
+  0xc4, 0xa0, 0x94, 0x00, 0x00, 0x00,
+  0x03, 0x60, 0x6c, 0x00, 0x00, 0x00,
+  0x90, 0xd2, 0x18, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_7[42] = {
+  0xc6, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x13, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x8d, 0x13, 0xa0, 0x00, 0x00, 0x00,
+  0x8b, 0x41, 0x68, 0x00, 0x00, 0x00,
+  0x52, 0xaa, 0x14, 0x00, 0x00, 0x00,
+  0xa2, 0xd4, 0x18, 0x00, 0x00, 0x00,
+  0x61, 0xa8, 0x2c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_8[48] = {
+  0x28, 0x85, 0x38, 0x00, 0x00, 0x00,
+  0x21, 0xf4, 0x04, 0x00, 0x00, 0x00,
+  0xe9, 0x1d, 0x00, 0x00, 0x00, 0x00,
+  0x17, 0x02, 0xe0, 0x00, 0x00, 0x00,
+  0x83, 0xa0, 0x54, 0x00, 0x00, 0x00,
+  0x46, 0x18, 0xe8, 0x00, 0x00, 0x00,
+  0x50, 0x6a, 0x0c, 0x00, 0x00, 0x00,
+  0x1c, 0x23, 0x84, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom22_9[54] = {
+  0x44, 0x48, 0xc8, 0x00, 0x00, 0x00,
+  0x28, 0x2d, 0x0c, 0x00, 0x00, 0x00,
+  0x25, 0x14, 0xa0, 0x00, 0x00, 0x00,
+  0x59, 0x0a, 0x20, 0x00, 0x00, 0x00,
+  0x03, 0xa0, 0x34, 0x00, 0x00, 0x00,
+  0xc0, 0xd0, 0x18, 0x00, 0x00, 0x00,
+  0xa2, 0x30, 0x44, 0x00, 0x00, 0x00,
+  0x14, 0x82, 0xd0, 0x00, 0x00, 0x00,
+  0x9a, 0x03, 0x80, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_1[6] = {
+  0xff, 0xff, 0xfe, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_10[60] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+  0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+  0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+  0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+  0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+  0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+  0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0xb4, 0x50, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_11[66] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+  0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+  0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+  0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+  0x30, 0x58, 0x04, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_12[72] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+  0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+  0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+  0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+  0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+  0x86, 0x8a, 0x6c, 0x00, 0x00, 0x00,
+  0x23, 0x2c, 0x84, 0x00, 0x00, 0x00,
+  0x16, 0x21, 0x98, 0x00, 0x00, 0x00,
+  0x4c, 0x30, 0x54, 0x00, 0x00, 0x00,
+  0x41, 0xc1, 0x26, 0x00, 0x00, 0x00,
+  0x19, 0x56, 0xe4, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_13[78] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+  0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+  0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+  0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+  0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+  0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+  0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+  0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x71, 0x0c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_14[84] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+  0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+  0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+  0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+  0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+  0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+  0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+  0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00,
+  0x9c, 0x3f, 0xb2, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_15[90] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+  0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+  0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+  0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+  0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+  0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+  0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x14, 0x38, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_16[96] = {
+  0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+  0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+  0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+  0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+  0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+  0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x71, 0x0c, 0x00, 0x00, 0x00,
+  0xfa, 0xd9, 0xf4, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_17[102] = {
+  0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+  0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+  0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+  0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+  0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+  0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+  0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+  0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+  0x78, 0x00, 0x1c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_18[108] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+  0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+  0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+  0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+  0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+  0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+  0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+  0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+  0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+  0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x14, 0x38, 0xa0, 0x00, 0x00, 0x00,
+  0x82, 0x32, 0x56, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_19[114] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+  0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+  0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+  0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+  0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+  0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+  0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+  0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+  0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+  0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+  0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+  0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+  0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0xb4, 0x50, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_2[12] = {
+  0xec, 0xdd, 0x98, 0x00, 0x00, 0x00,
+  0x9b, 0xb2, 0x76, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_20[120] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+  0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+  0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+  0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+  0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+  0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+  0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+  0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+  0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+  0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+  0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+  0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+  0x78, 0x00, 0x1c, 0x00, 0x00, 0x00,
+  0xdb, 0x4a, 0x7a, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_21[126] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+  0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+  0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+  0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+  0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+  0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+  0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+  0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+  0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+  0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+  0x30, 0x58, 0x04, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_22[132] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+  0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+  0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+  0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+  0x30, 0x58, 0x04, 0x00, 0x00, 0x00,
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x48, 0xa2, 0x00, 0x00, 0x00,
+  0xa9, 0x10, 0x1a, 0x00, 0x00, 0x00,
+  0x04, 0xc4, 0x84, 0x00, 0x00, 0x00,
+  0xd0, 0x01, 0x44, 0x00, 0x00, 0x00,
+  0x82, 0x40, 0x1c, 0x00, 0x00, 0x00,
+  0x21, 0x37, 0x00, 0x00, 0x00, 0x00,
+  0x0c, 0x21, 0x22, 0x00, 0x00, 0x00,
+  0x4a, 0x0a, 0xc0, 0x00, 0x00, 0x00,
+  0x12, 0xb4, 0x50, 0x00, 0x00, 0x00,
+  0xea, 0x8d, 0x1a, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_23[138] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x99, 0x02, 0x12, 0x00, 0x00, 0x00,
+  0x05, 0x80, 0x0e, 0x00, 0x00, 0x00,
+  0x80, 0xa1, 0x82, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x18, 0x00, 0x00, 0x00,
+  0x40, 0x6d, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x90, 0xc0, 0x00, 0x00, 0x00,
+  0x68, 0x04, 0x90, 0x00, 0x00, 0x00,
+  0x10, 0x31, 0x20, 0x00, 0x00, 0x00,
+  0x30, 0x58, 0x04, 0x00, 0x00, 0x00,
+  0x46, 0x42, 0x0c, 0x00, 0x00, 0x00,
+  0x33, 0x20, 0x46, 0x00, 0x00, 0x00,
+  0x99, 0x08, 0x0a, 0x00, 0x00, 0x00,
+  0x05, 0x84, 0x30, 0x00, 0x00, 0x00,
+  0x80, 0xb0, 0x22, 0x00, 0x00, 0x00,
+  0x84, 0x42, 0x90, 0x00, 0x00, 0x00,
+  0x40, 0x73, 0x00, 0x00, 0x00, 0x00,
+  0x0a, 0x81, 0x12, 0x00, 0x00, 0x00,
+  0x68, 0x0c, 0x40, 0x00, 0x00, 0x00,
+  0x10, 0x24, 0x84, 0x00, 0x00, 0x00,
+  0x30, 0x51, 0x40, 0x00, 0x00, 0x00,
+  0x5f, 0x50, 0x88, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_3[18] = {
+  0xca, 0xd3, 0x64, 0x00, 0x00, 0x00,
+  0xf1, 0x49, 0x3a, 0x00, 0x00, 0x00,
+  0x76, 0x27, 0xd0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_4[24] = {
+  0xc4, 0xd1, 0x64, 0x00, 0x00, 0x00,
+  0x31, 0x62, 0x96, 0x00, 0x00, 0x00,
+  0x4b, 0x24, 0x5a, 0x00, 0x00, 0x00,
+  0x2c, 0xa8, 0xaa, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_5[30] = {
+  0xc6, 0xca, 0x6c, 0x00, 0x00, 0x00,
+  0x63, 0x6c, 0x96, 0x00, 0x00, 0x00,
+  0x1d, 0xa1, 0xdc, 0x00, 0x00, 0x00,
+  0xad, 0x55, 0x38, 0x00, 0x00, 0x00,
+  0xb2, 0xb7, 0x06, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_6[36] = {
+  0x64, 0x4a, 0x28, 0x00, 0x00, 0x00,
+  0x51, 0x58, 0xa2, 0x00, 0x00, 0x00,
+  0x0c, 0xa4, 0x30, 0x00, 0x00, 0x00,
+  0xa1, 0x22, 0x46, 0x00, 0x00, 0x00,
+  0x12, 0xa1, 0x1c, 0x00, 0x00, 0x00,
+  0x8a, 0x45, 0xc0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_7[42] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x33, 0x24, 0x26, 0x00, 0x00, 0x00,
+  0x91, 0x92, 0x12, 0x00, 0x00, 0x00,
+  0xa4, 0x20, 0x4a, 0x00, 0x00, 0x00,
+  0x50, 0xa0, 0xd4, 0x00, 0x00, 0x00,
+  0x84, 0xc5, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x71, 0x0c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_8[48] = {
+  0x0c, 0x84, 0x0c, 0x00, 0x00, 0x00,
+  0x80, 0x70, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x88, 0x48, 0x00, 0x00, 0x00,
+  0x05, 0x40, 0x32, 0x00, 0x00, 0x00,
+  0x43, 0x02, 0x82, 0x00, 0x00, 0x00,
+  0x1a, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x60, 0x27, 0x00, 0x00, 0x00, 0x00,
+  0x14, 0x38, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom23_9[54] = {
+  0x46, 0x4a, 0x6c, 0x00, 0x00, 0x00,
+  0x62, 0x7c, 0x84, 0x00, 0x00, 0x00,
+  0x8c, 0x04, 0x88, 0x00, 0x00, 0x00,
+  0x01, 0x74, 0x22, 0x00, 0x00, 0x00,
+  0x07, 0x83, 0x06, 0x00, 0x00, 0x00,
+  0xa0, 0x80, 0x72, 0x00, 0x00, 0x00,
+  0x18, 0xb1, 0x42, 0x00, 0x00, 0x00,
+  0x91, 0x00, 0x92, 0x00, 0x00, 0x00,
+  0x78, 0x00, 0x1c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_1[6] = {
+  0xff, 0xff, 0xff, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_10[60] = {
+  0x11, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x34, 0x53, 0x00, 0x00, 0x00,
+  0x00, 0x48, 0x05, 0x00, 0x00, 0x00,
+  0x10, 0x83, 0x09, 0x00, 0x00, 0x00,
+  0x4a, 0x14, 0xa1, 0x00, 0x00, 0x00,
+  0x40, 0xa4, 0x0a, 0x00, 0x00, 0x00,
+  0xa0, 0x6a, 0x02, 0x00, 0x00, 0x00,
+  0x88, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x54, 0x0d, 0x40, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_11[66] = {
+  0x53, 0x65, 0x34, 0x00, 0x00, 0x00,
+  0xa0, 0x32, 0x11, 0x00, 0x00, 0x00,
+  0x15, 0x11, 0x41, 0x00, 0x00, 0x00,
+  0x03, 0x50, 0x15, 0x00, 0x00, 0x00,
+  0x8c, 0x88, 0xc8, 0x00, 0x00, 0x00,
+  0x28, 0x82, 0x88, 0x00, 0x00, 0x00,
+  0x08, 0x48, 0x84, 0x00, 0x00, 0x00,
+  0x99, 0x01, 0x90, 0x00, 0x00, 0x00,
+  0x22, 0x92, 0x29, 0x00, 0x00, 0x00,
+  0x46, 0x04, 0x60, 0x00, 0x00, 0x00,
+  0x8c, 0x2c, 0x02, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_12[72] = {
+  0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+  0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+  0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+  0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+  0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+  0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+  0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+  0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+  0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x44, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_13[78] = {
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0xc5, 0x1c, 0x51, 0x00, 0x00, 0x00,
+  0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+  0x12, 0x31, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0xe0, 0x8e, 0x00, 0x00, 0x00,
+  0x2e, 0x02, 0xe0, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+  0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+  0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+  0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+  0x88, 0x68, 0x86, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_14[84] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+  0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+  0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+  0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+  0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0xc5, 0x1c, 0x51, 0x00, 0x00, 0x00,
+  0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+  0x12, 0x31, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0xe0, 0x8e, 0x00, 0x00, 0x00,
+  0x2e, 0x02, 0xe0, 0x00, 0x00, 0x00,
+  0xf2, 0xd6, 0x8e, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_15[90] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+  0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+  0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+  0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+  0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+  0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+  0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+  0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+  0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+  0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+  0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_16[96] = {
+  0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+  0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+  0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+  0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+  0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+  0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x90, 0x99, 0x09, 0x00, 0x00, 0x00,
+  0x02, 0x50, 0x25, 0x00, 0x00, 0x00,
+  0x06, 0xa0, 0x6a, 0x00, 0x00, 0x00,
+  0x2c, 0x02, 0xc0, 0x00, 0x00, 0x00,
+  0x88, 0x68, 0x86, 0x00, 0x00, 0x00,
+  0xff, 0x6e, 0x0a, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_17[102] = {
+  0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+  0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+  0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+  0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+  0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+  0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+  0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+  0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+  0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+  0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_18[108] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+  0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+  0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+  0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+  0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0x20, 0x62, 0x06, 0x00, 0x00, 0x00,
+  0x80, 0x38, 0x03, 0x00, 0x00, 0x00,
+  0x42, 0x44, 0x24, 0x00, 0x00, 0x00,
+  0x01, 0x90, 0x19, 0x00, 0x00, 0x00,
+  0x14, 0x11, 0x41, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0xa8, 0x00, 0x00, 0x00,
+  0x38, 0x03, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0c, 0x50, 0x00, 0x00, 0x00,
+  0x34, 0x50, 0xae, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_19[114] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+  0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+  0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+  0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+  0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+  0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+  0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+  0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_2[12] = {
+  0xec, 0xce, 0xcc, 0x00, 0x00, 0x00,
+  0x93, 0xb9, 0x3b, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_20[120] = {
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+  0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+  0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+  0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0xe4, 0x2e, 0x42, 0x00, 0x00, 0x00,
+  0x24, 0x42, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x1a, 0x11, 0x00, 0x00, 0x00,
+  0x18, 0x31, 0x83, 0x00, 0x00, 0x00,
+  0x03, 0x90, 0x39, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0xa1, 0x00, 0x00, 0x00,
+  0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0x98, 0xa2, 0x95, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_21[126] = {
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+  0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+  0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+  0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+  0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+  0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+  0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+  0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+  0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+  0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_22[132] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+  0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+  0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+  0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+  0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+  0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+  0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+  0x51, 0x45, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x51, 0x00, 0x00, 0x00,
+  0x80, 0xd8, 0x0d, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x0a, 0x20, 0xa2, 0x00, 0x00, 0x00,
+  0x00, 0xe0, 0x0e, 0x00, 0x00, 0x00,
+  0xb8, 0x0b, 0x80, 0x00, 0x00, 0x00,
+  0x09, 0x10, 0x91, 0x00, 0x00, 0x00,
+  0x56, 0x05, 0x60, 0x00, 0x00, 0x00,
+  0xa2, 0x8a, 0x28, 0x00, 0x00, 0x00,
+  0x1a, 0xaa, 0xee, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_23[138] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+  0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+  0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+  0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+  0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+  0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+  0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+  0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+  0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+  0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+  0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+  0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+  0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+  0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+  0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+  0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x44, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_24[144] = {
+  0x10, 0x61, 0x06, 0x00, 0x00, 0x00,
+  0x02, 0x30, 0x23, 0x00, 0x00, 0x00,
+  0x40, 0x54, 0x05, 0x00, 0x00, 0x00,
+  0x21, 0x82, 0x18, 0x00, 0x00, 0x00,
+  0x81, 0x18, 0x11, 0x00, 0x00, 0x00,
+  0x14, 0x81, 0x48, 0x00, 0x00, 0x00,
+  0x98, 0x09, 0x80, 0x00, 0x00, 0x00,
+  0x08, 0x90, 0x89, 0x00, 0x00, 0x00,
+  0x62, 0x06, 0x20, 0x00, 0x00, 0x00,
+  0x24, 0x22, 0x42, 0x00, 0x00, 0x00,
+  0x8a, 0x08, 0xa0, 0x00, 0x00, 0x00,
+  0x84, 0x48, 0x44, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x13, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+  0x00, 0x70, 0x07, 0x00, 0x00, 0x00,
+  0x0c, 0x10, 0xc1, 0x00, 0x00, 0x00,
+  0x40, 0xc4, 0x0c, 0x00, 0x00, 0x00,
+  0x6a, 0x06, 0xa0, 0x00, 0x00, 0x00,
+  0x86, 0x08, 0x60, 0x00, 0x00, 0x00,
+  0x24, 0x82, 0x48, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x90, 0x00, 0x00, 0x00,
+  0xc0, 0x2c, 0x02, 0x00, 0x00, 0x00,
+  0x88, 0x32, 0x59, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_3[18] = {
+  0x9b, 0x29, 0xb2, 0x00, 0x00, 0x00,
+  0x49, 0xd4, 0x9d, 0x00, 0x00, 0x00,
+  0x3e, 0x83, 0xe8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_4[24] = {
+  0x8b, 0x28, 0xb2, 0x00, 0x00, 0x00,
+  0x14, 0xb1, 0x4b, 0x00, 0x00, 0x00,
+  0x22, 0xd2, 0x2d, 0x00, 0x00, 0x00,
+  0x45, 0x54, 0x55, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_5[30] = {
+  0x53, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x64, 0xb6, 0x4b, 0x00, 0x00, 0x00,
+  0x0e, 0xe0, 0xee, 0x00, 0x00, 0x00,
+  0xa9, 0xca, 0x9c, 0x00, 0x00, 0x00,
+  0xb8, 0x3b, 0x83, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_6[36] = {
+  0xd1, 0x4d, 0x14, 0x00, 0x00, 0x00,
+  0x45, 0x34, 0x53, 0x00, 0x00, 0x00,
+  0x22, 0xd2, 0x2d, 0x00, 0x00, 0x00,
+  0x16, 0xc1, 0x6c, 0x00, 0x00, 0x00,
+  0x0b, 0xa0, 0xba, 0x00, 0x00, 0x00,
+  0xe8, 0x8e, 0x88, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_7[42] = {
+  0xd3, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x25, 0x32, 0x53, 0x00, 0x00, 0x00,
+  0x30, 0xd3, 0x05, 0x00, 0x00, 0x00,
+  0x06, 0x48, 0x6c, 0x00, 0x00, 0x00,
+  0xc0, 0xb8, 0x1b, 0x00, 0x00, 0x00,
+  0x2a, 0xa2, 0xaa, 0x00, 0x00, 0x00,
+  0xa8, 0x4e, 0x84, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_8[48] = {
+  0x81, 0x60, 0x16, 0x00, 0x00, 0x00,
+  0x40, 0x3c, 0x03, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x09, 0x00, 0x00, 0x00,
+  0x06, 0x50, 0x65, 0x00, 0x00, 0x00,
+  0x20, 0x4a, 0x84, 0x00, 0x00, 0x00,
+  0x8a, 0xa0, 0xaa, 0x00, 0x00, 0x00,
+  0x33, 0x03, 0x30, 0x00, 0x00, 0x00,
+  0x4c, 0x84, 0xc8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom24_9[54] = {
+  0xd3, 0x65, 0x36, 0x00, 0x00, 0x00,
+  0x64, 0x26, 0x42, 0x00, 0x00, 0x00,
+  0x18, 0x41, 0xc4, 0x00, 0x00, 0x00,
+  0xa0, 0x4a, 0x04, 0x00, 0x00, 0x00,
+  0x81, 0x38, 0x13, 0x00, 0x00, 0x00,
+  0x22, 0xa2, 0x2a, 0x00, 0x00, 0x00,
+  0x08, 0x70, 0x87, 0x00, 0x00, 0x00,
+  0x04, 0x90, 0x49, 0x00, 0x00, 0x00,
+  0x01, 0xc0, 0x1c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_1[6] = {
+  0xff, 0xff, 0xff, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_10[60] = {
+  0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+  0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+  0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+  0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+  0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+  0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+  0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+  0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+  0xa2, 0x86, 0x22, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_11[66] = {
+  0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+  0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+  0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+  0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+  0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+  0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+  0xc0, 0x24, 0x41, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_12[72] = {
+  0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+  0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+  0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+  0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+  0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+  0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+  0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+  0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+  0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+  0x84, 0x45, 0x22, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_13[78] = {
+  0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+  0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+  0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+  0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+  0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+  0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+  0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+  0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+  0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+  0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+  0x88, 0x68, 0x4b, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_14[84] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+  0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+  0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+  0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+  0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+  0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+  0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+  0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+  0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+  0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+  0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+  0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00,
+  0x73, 0x76, 0x61, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_15[90] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+  0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+  0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+  0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+  0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+  0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+  0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+  0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+  0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+  0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+  0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+  0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_16[96] = {
+  0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+  0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+  0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+  0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+  0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+  0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+  0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+  0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+  0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+  0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+  0x88, 0x68, 0x4b, 0x00, 0x00, 0x00,
+  0x16, 0xe8, 0xdc, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_17[102] = {
+  0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+  0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+  0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+  0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+  0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+  0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+  0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+  0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+  0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+  0x00, 0xe4, 0x01, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_18[108] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+  0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+  0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+  0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+  0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+  0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+  0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+  0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+  0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+  0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+  0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00,
+  0xce, 0x9b, 0xe1, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_19[114] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+  0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+  0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+  0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+  0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+  0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+  0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+  0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+  0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+  0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+  0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+  0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+  0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+  0xa2, 0x86, 0x22, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_2[12] = {
+  0xec, 0xce, 0xcc, 0x00, 0x00, 0x00,
+  0x93, 0xb1, 0xb3, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_20[120] = {
+  0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+  0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+  0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+  0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+  0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+  0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+  0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+  0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+  0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+  0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+  0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+  0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+  0x00, 0xe4, 0x01, 0x80, 0x00, 0x00,
+  0x1b, 0x8a, 0xa0, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_21[126] = {
+  0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+  0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+  0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+  0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+  0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+  0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+  0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+  0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+  0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+  0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+  0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+  0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+  0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+  0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+  0xc0, 0x24, 0x41, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_22[132] = {
+  0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+  0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+  0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+  0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+  0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+  0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+  0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+  0x51, 0x4d, 0x10, 0x00, 0x00, 0x00,
+  0x45, 0x14, 0x45, 0x00, 0x00, 0x00,
+  0x80, 0xd1, 0x09, 0x80, 0x00, 0x00,
+  0x24, 0x2a, 0x05, 0x00, 0x00, 0x00,
+  0x0a, 0x24, 0xa0, 0x80, 0x00, 0x00,
+  0x00, 0xe4, 0x03, 0x00, 0x00, 0x00,
+  0xb8, 0x08, 0x02, 0x80, 0x00, 0x00,
+  0x09, 0x10, 0xc9, 0x00, 0x00, 0x00,
+  0x56, 0x00, 0x58, 0x80, 0x00, 0x00,
+  0xa2, 0x86, 0x22, 0x00, 0x00, 0x00,
+  0x15, 0xa2, 0x99, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_23[138] = {
+  0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+  0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+  0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+  0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+  0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+  0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+  0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+  0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+  0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+  0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+  0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+  0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+  0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+  0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+  0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+  0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+  0x84, 0x45, 0x22, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_24[144] = {
+  0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+  0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+  0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+  0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+  0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+  0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+  0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+  0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+  0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+  0x84, 0x45, 0x22, 0x00, 0x00, 0x00,
+  0x53, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x21, 0x00, 0x00, 0x00,
+  0x10, 0x91, 0x34, 0x00, 0x00, 0x00,
+  0x00, 0x72, 0x50, 0x00, 0x00, 0x00,
+  0x0c, 0x11, 0x81, 0x80, 0x00, 0x00,
+  0x40, 0xc0, 0xa2, 0x00, 0x00, 0x00,
+  0x6a, 0x08, 0x88, 0x80, 0x00, 0x00,
+  0x86, 0x00, 0x68, 0x00, 0x00, 0x00,
+  0x24, 0x8e, 0x02, 0x00, 0x00, 0x00,
+  0x89, 0x08, 0x44, 0x00, 0x00, 0x00,
+  0xc0, 0x24, 0x41, 0x80, 0x00, 0x00,
+  0xf9, 0x0c, 0x14, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_25[150] = {
+  0x10, 0x62, 0x82, 0x80, 0x00, 0x00,
+  0x02, 0x38, 0x45, 0x00, 0x00, 0x00,
+  0x40, 0x56, 0x04, 0x00, 0x00, 0x00,
+  0x21, 0x80, 0x54, 0x80, 0x00, 0x00,
+  0x81, 0x10, 0x29, 0x80, 0x00, 0x00,
+  0x14, 0x80, 0x13, 0x00, 0x00, 0x00,
+  0x98, 0x04, 0x81, 0x00, 0x00, 0x00,
+  0x08, 0x92, 0x48, 0x00, 0x00, 0x00,
+  0x62, 0x09, 0x40, 0x00, 0x00, 0x00,
+  0x24, 0x28, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x01, 0x18, 0x00, 0x00, 0x00,
+  0x84, 0x45, 0x22, 0x00, 0x00, 0x00,
+  0x10, 0x65, 0x12, 0x00, 0x00, 0x00,
+  0x02, 0x36, 0x64, 0x00, 0x00, 0x00,
+  0x40, 0x50, 0x54, 0x80, 0x00, 0x00,
+  0x21, 0x88, 0x12, 0x00, 0x00, 0x00,
+  0x81, 0x19, 0x40, 0x00, 0x00, 0x00,
+  0x14, 0x83, 0x08, 0x00, 0x00, 0x00,
+  0x98, 0x02, 0x11, 0x00, 0x00, 0x00,
+  0x08, 0x90, 0x3c, 0x00, 0x00, 0x00,
+  0x62, 0x0e, 0x80, 0x00, 0x00, 0x00,
+  0x24, 0x20, 0xa1, 0x00, 0x00, 0x00,
+  0x8a, 0x08, 0x01, 0x80, 0x00, 0x00,
+  0x84, 0x40, 0x49, 0x00, 0x00, 0x00,
+  0x1c, 0x20, 0x8a, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_3[18] = {
+  0x9b, 0x89, 0x9b, 0x00, 0x00, 0x00,
+  0x4f, 0x14, 0x6d, 0x80, 0x00, 0x00,
+  0x3c, 0x63, 0x72, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_4[24] = {
+  0x8b, 0x24, 0x9b, 0x00, 0x00, 0x00,
+  0x14, 0xb2, 0x6d, 0x00, 0x00, 0x00,
+  0x22, 0xd8, 0x56, 0x80, 0x00, 0x00,
+  0x45, 0x55, 0x25, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_5[30] = {
+  0x53, 0x65, 0x13, 0x00, 0x00, 0x00,
+  0x64, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x0c, 0xc0, 0xc6, 0x80, 0x00, 0x00,
+  0x82, 0xaa, 0x1c, 0x00, 0x00, 0x00,
+  0x09, 0x32, 0x29, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_6[36] = {
+  0x51, 0x4d, 0x12, 0x00, 0x00, 0x00,
+  0xc5, 0x14, 0x6d, 0x00, 0x00, 0x00,
+  0x21, 0x81, 0x54, 0x80, 0x00, 0x00,
+  0x12, 0x32, 0x17, 0x00, 0x00, 0x00,
+  0x08, 0xe2, 0x8c, 0x80, 0x00, 0x00,
+  0x2e, 0x0a, 0xa2, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_7[42] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0x21, 0x32, 0x65, 0x00, 0x00, 0x00,
+  0x90, 0x9b, 0x14, 0x00, 0x00, 0x00,
+  0x02, 0x52, 0xb0, 0x80, 0x00, 0x00,
+  0x06, 0xa1, 0x4c, 0x80, 0x00, 0x00,
+  0x2c, 0x0c, 0x88, 0x80, 0x00, 0x00,
+  0x88, 0x68, 0x4b, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_8[48] = {
+  0x20, 0x68, 0x0a, 0x80, 0x00, 0x00,
+  0x80, 0x33, 0x09, 0x00, 0x00, 0x00,
+  0x42, 0x41, 0x60, 0x80, 0x00, 0x00,
+  0x01, 0x90, 0x33, 0x00, 0x00, 0x00,
+  0x14, 0x14, 0x46, 0x00, 0x00, 0x00,
+  0x0a, 0x80, 0x81, 0x80, 0x00, 0x00,
+  0x38, 0x0d, 0x80, 0x00, 0x00, 0x00,
+  0xc5, 0x0a, 0x14, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom25_9[54] = {
+  0x53, 0x65, 0x92, 0x00, 0x00, 0x00,
+  0xe4, 0x26, 0x64, 0x00, 0x00, 0x00,
+  0x24, 0x41, 0x44, 0x00, 0x00, 0x00,
+  0xa1, 0x12, 0x14, 0x80, 0x00, 0x00,
+  0x18, 0x30, 0x2c, 0x80, 0x00, 0x00,
+  0x03, 0x99, 0x41, 0x00, 0x00, 0x00,
+  0x8a, 0x18, 0x0a, 0x80, 0x00, 0x00,
+  0x04, 0x90, 0xa9, 0x00, 0x00, 0x00,
+  0x00, 0xe4, 0x01, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_1[6] = {
+  0xff, 0xff, 0xff, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_10[60] = {
+  0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+  0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+  0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+  0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+  0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+  0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+  0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+  0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+  0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+  0x62, 0x23, 0x11, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_11[66] = {
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+  0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+  0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+  0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+  0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+  0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+  0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+  0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+  0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+  0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_12[72] = {
+  0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+  0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+  0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+  0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+  0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+  0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+  0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x52, 0x22, 0x91, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_13[78] = {
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+  0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+  0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+  0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+  0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+  0x08, 0xa8, 0x45, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_14[84] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+  0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+  0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+  0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+  0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+  0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+  0xd1, 0x26, 0x89, 0x00, 0x00, 0x00,
+  0x46, 0xd2, 0x36, 0x80, 0x00, 0x00,
+  0x15, 0x48, 0xaa, 0x40, 0x00, 0x00,
+  0x21, 0x71, 0x0b, 0x80, 0x00, 0x00,
+  0x28, 0xc9, 0x46, 0x40, 0x00, 0x00,
+  0xaa, 0x25, 0x51, 0x00, 0x00, 0x00,
+  0x5d, 0xa7, 0x78, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_15[90] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+  0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+  0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+  0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+  0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+  0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+  0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+  0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+  0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+  0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_16[96] = {
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+  0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+  0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+  0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+  0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+  0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+  0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+  0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+  0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+  0x84, 0xb4, 0x25, 0x80, 0x00, 0x00,
+  0x3c, 0xaf, 0x88, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_17[102] = {
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+  0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+  0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+  0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+  0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+  0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+  0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+  0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_18[108] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+  0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+  0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+  0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+  0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+  0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+  0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+  0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00,
+  0xaa, 0x0c, 0x83, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_19[114] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+  0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+  0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+  0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+  0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+  0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+  0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+  0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+  0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+  0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+  0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+  0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+  0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+  0x62, 0x23, 0x11, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_2[12] = {
+  0xec, 0xc7, 0x66, 0x00, 0x00, 0x00,
+  0x1b, 0x38, 0xd9, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_20[120] = {
+  0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+  0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+  0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+  0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+  0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+  0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+  0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+  0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+  0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+  0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+  0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+  0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+  0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00,
+  0xf4, 0x08, 0xec, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_21[126] = {
+  0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+  0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+  0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+  0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+  0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+  0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+  0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+  0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+  0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+  0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+  0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+  0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+  0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+  0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+  0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+  0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+  0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+  0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+  0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_22[132] = {
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+  0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+  0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+  0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+  0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+  0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+  0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+  0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+  0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+  0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+  0xd1, 0x06, 0x88, 0x00, 0x00, 0x00,
+  0x44, 0x52, 0x22, 0x80, 0x00, 0x00,
+  0x10, 0x98, 0x84, 0xc0, 0x00, 0x00,
+  0xa0, 0x55, 0x02, 0x80, 0x00, 0x00,
+  0x4a, 0x0a, 0x50, 0x40, 0x00, 0x00,
+  0x40, 0x32, 0x01, 0x80, 0x00, 0x00,
+  0x80, 0x2c, 0x01, 0x40, 0x00, 0x00,
+  0x0c, 0x90, 0x64, 0x80, 0x00, 0x00,
+  0x05, 0x88, 0x2c, 0x40, 0x00, 0x00,
+  0x62, 0x23, 0x11, 0x00, 0x00, 0x00,
+  0x13, 0xc6, 0x6b, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_23[138] = {
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+  0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+  0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+  0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+  0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+  0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+  0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+  0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+  0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+  0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+  0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+  0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+  0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+  0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+  0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+  0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+  0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x52, 0x22, 0x91, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_24[144] = {
+  0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+  0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+  0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+  0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+  0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+  0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+  0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x22, 0x11, 0x10, 0x80, 0x00, 0x00,
+  0x13, 0x40, 0x9a, 0x00, 0x00, 0x00,
+  0x25, 0x01, 0x28, 0x00, 0x00, 0x00,
+  0x18, 0x18, 0xc0, 0xc0, 0x00, 0x00,
+  0x0a, 0x20, 0x51, 0x00, 0x00, 0x00,
+  0x88, 0x8c, 0x44, 0x40, 0x00, 0x00,
+  0x06, 0x80, 0x34, 0x00, 0x00, 0x00,
+  0xe0, 0x27, 0x01, 0x00, 0x00, 0x00,
+  0x84, 0x44, 0x22, 0x00, 0x00, 0x00,
+  0x44, 0x1a, 0x20, 0xc0, 0x00, 0x00,
+  0xdb, 0x4d, 0xd8, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_25[150] = {
+  0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+  0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+  0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+  0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+  0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+  0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+  0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+  0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+  0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+  0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+  0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+  0x08, 0xa8, 0x45, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_26[156] = {
+  0x51, 0x22, 0x89, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x81, 0x24, 0x09, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x30, 0x81, 0x84, 0x00, 0x00, 0x00,
+  0x21, 0x11, 0x08, 0x80, 0x00, 0x00,
+  0x03, 0xc0, 0x1e, 0x00, 0x00, 0x00,
+  0xe8, 0x07, 0x40, 0x00, 0x00, 0x00,
+  0x0a, 0x10, 0x50, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x00, 0xc0, 0x00, 0x00,
+  0x04, 0x90, 0x24, 0x80, 0x00, 0x00,
+  0x08, 0xa8, 0x45, 0x40, 0x00, 0x00,
+  0x28, 0x29, 0x41, 0x40, 0x00, 0x00,
+  0x84, 0x54, 0x22, 0x80, 0x00, 0x00,
+  0x60, 0x43, 0x02, 0x00, 0x00, 0x00,
+  0x05, 0x48, 0x2a, 0x40, 0x00, 0x00,
+  0x02, 0x98, 0x14, 0xc0, 0x00, 0x00,
+  0x01, 0x30, 0x09, 0x80, 0x00, 0x00,
+  0x48, 0x12, 0x40, 0x80, 0x00, 0x00,
+  0x24, 0x81, 0x24, 0x00, 0x00, 0x00,
+  0x94, 0x04, 0xa0, 0x00, 0x00, 0x00,
+  0x8a, 0x04, 0x50, 0x00, 0x00, 0x00,
+  0x11, 0x80, 0x8c, 0x00, 0x00, 0x00,
+  0x52, 0x22, 0x91, 0x00, 0x00, 0x00,
+  0xf9, 0x13, 0x51, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_3[18] = {
+  0x99, 0xb4, 0xcd, 0x80, 0x00, 0x00,
+  0x46, 0xda, 0x36, 0xc0, 0x00, 0x00,
+  0x37, 0x29, 0xb9, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_4[24] = {
+  0x49, 0xb2, 0x4d, 0x80, 0x00, 0x00,
+  0x26, 0xd1, 0x36, 0x80, 0x00, 0x00,
+  0x85, 0x6c, 0x2b, 0x40, 0x00, 0x00,
+  0x52, 0x5a, 0x92, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_5[30] = {
+  0x51, 0x32, 0x89, 0x80, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x0c, 0x68, 0x63, 0x40, 0x00, 0x00,
+  0xa1, 0xc5, 0x0e, 0x00, 0x00, 0x00,
+  0x22, 0x99, 0x14, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_6[36] = {
+  0xd1, 0x26, 0x89, 0x00, 0x00, 0x00,
+  0x46, 0xd2, 0x36, 0x80, 0x00, 0x00,
+  0x15, 0x48, 0xaa, 0x40, 0x00, 0x00,
+  0x21, 0x71, 0x0b, 0x80, 0x00, 0x00,
+  0x28, 0xc9, 0x46, 0x40, 0x00, 0x00,
+  0xaa, 0x25, 0x51, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_7[42] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x26, 0x51, 0x32, 0x80, 0x00, 0x00,
+  0xb1, 0x45, 0x8a, 0x00, 0x00, 0x00,
+  0x2b, 0x09, 0x58, 0x40, 0x00, 0x00,
+  0x14, 0xc8, 0xa6, 0x40, 0x00, 0x00,
+  0xc8, 0x8e, 0x44, 0x40, 0x00, 0x00,
+  0x84, 0xb4, 0x25, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_8[48] = {
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x30, 0x91, 0x84, 0x80, 0x00, 0x00,
+  0x16, 0x08, 0xb0, 0x40, 0x00, 0x00,
+  0x03, 0x30, 0x19, 0x80, 0x00, 0x00,
+  0x44, 0x62, 0x23, 0x00, 0x00, 0x00,
+  0x08, 0x18, 0x40, 0xc0, 0x00, 0x00,
+  0xd8, 0x06, 0xc0, 0x00, 0x00, 0x00,
+  0xa1, 0x45, 0x0a, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom26_9[54] = {
+  0x59, 0x22, 0xc9, 0x00, 0x00, 0x00,
+  0x66, 0x43, 0x32, 0x00, 0x00, 0x00,
+  0x14, 0x40, 0xa2, 0x00, 0x00, 0x00,
+  0x21, 0x49, 0x0a, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x16, 0x40, 0x00, 0x00,
+  0x94, 0x14, 0xa0, 0x80, 0x00, 0x00,
+  0x80, 0xac, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x90, 0x54, 0x80, 0x00, 0x00,
+  0x40, 0x1a, 0x00, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_1[6] = {
+  0xff, 0xff, 0xff, 0xe0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_10[60] = {
+  0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+  0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+  0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+  0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+  0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+  0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+  0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+  0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+  0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+  0x62, 0x21, 0x18, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_11[66] = {
+  0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+  0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+  0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+  0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+  0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+  0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+  0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+  0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+  0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+  0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+  0x44, 0x19, 0x16, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_12[72] = {
+  0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+  0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+  0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+  0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+  0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+  0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+  0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+  0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+  0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+  0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+  0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+  0x52, 0x20, 0x90, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_13[78] = {
+  0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+  0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+  0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+  0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+  0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+  0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+  0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+  0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+  0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+  0x08, 0xac, 0x06, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_14[84] = {
+  0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+  0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+  0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+  0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+  0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+  0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+  0xd1, 0x22, 0x52, 0xa0, 0x00, 0x00,
+  0x46, 0xd4, 0xaa, 0x40, 0x00, 0x00,
+  0x15, 0x48, 0xa5, 0xa0, 0x00, 0x00,
+  0x21, 0x72, 0x8d, 0x40, 0x00, 0x00,
+  0x28, 0xc9, 0x13, 0x60, 0x00, 0x00,
+  0xaa, 0x24, 0x44, 0x60, 0x00, 0x00,
+  0x0a, 0xe7, 0x3b, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_15[90] = {
+  0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+  0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+  0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+  0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+  0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+  0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+  0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+  0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+  0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+  0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+  0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+  0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+  0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+  0xa1, 0x43, 0x90, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_16[96] = {
+  0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+  0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+  0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+  0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+  0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+  0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+  0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+  0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+  0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+  0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+  0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+  0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+  0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+  0x84, 0xb5, 0x54, 0x40, 0x00, 0x00,
+  0x01, 0x50, 0xfb, 0xe0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_17[102] = {
+  0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+  0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+  0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+  0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+  0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+  0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+  0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+  0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+  0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+  0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+  0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+  0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+  0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+  0x40, 0x1c, 0x42, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_18[108] = {
+  0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+  0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+  0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+  0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+  0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+  0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+  0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+  0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+  0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+  0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+  0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+  0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+  0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+  0xa1, 0x43, 0x90, 0x00, 0x00, 0x00,
+  0x53, 0xc3, 0x33, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_19[114] = {
+  0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+  0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+  0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+  0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+  0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+  0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+  0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+  0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+  0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+  0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+  0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+  0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+  0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+  0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+  0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+  0x62, 0x21, 0x18, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_2[12] = {
+  0xec, 0xc7, 0x67, 0x40, 0x00, 0x00,
+  0x1b, 0x39, 0xdc, 0xe0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_20[120] = {
+  0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+  0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+  0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+  0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+  0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+  0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+  0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+  0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+  0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+  0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+  0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+  0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+  0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+  0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+  0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+  0x40, 0x1c, 0x42, 0x40, 0x00, 0x00,
+  0xcb, 0xff, 0x6f, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_21[126] = {
+  0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+  0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+  0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+  0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+  0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+  0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+  0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+  0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+  0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+  0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+  0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+  0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+  0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+  0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+  0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+  0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+  0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+  0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+  0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+  0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+  0x44, 0x19, 0x16, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_22[132] = {
+  0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+  0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+  0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+  0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+  0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+  0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+  0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+  0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+  0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+  0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+  0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+  0xd1, 0x06, 0x06, 0xa0, 0x00, 0x00,
+  0x44, 0x50, 0xea, 0x00, 0x00, 0x00,
+  0x10, 0x9e, 0xa0, 0x40, 0x00, 0x00,
+  0xa0, 0x50, 0x13, 0x00, 0x00, 0x00,
+  0x4a, 0x08, 0x21, 0x40, 0x00, 0x00,
+  0x40, 0x31, 0x04, 0xc0, 0x00, 0x00,
+  0x80, 0x2a, 0x02, 0x20, 0x00, 0x00,
+  0x0c, 0x90, 0x44, 0x20, 0x00, 0x00,
+  0x05, 0x8b, 0x40, 0x00, 0x00, 0x00,
+  0x62, 0x21, 0x18, 0x80, 0x00, 0x00,
+  0xf5, 0x2d, 0x52, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_23[138] = {
+  0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+  0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+  0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+  0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+  0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+  0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+  0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+  0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+  0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+  0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+  0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+  0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+  0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+  0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+  0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+  0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+  0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+  0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+  0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+  0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+  0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+  0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+  0x52, 0x20, 0x90, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_24[144] = {
+  0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+  0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+  0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+  0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+  0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+  0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+  0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+  0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+  0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+  0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+  0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+  0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+  0x51, 0x23, 0x16, 0x80, 0x00, 0x00,
+  0x22, 0x11, 0xa9, 0x00, 0x00, 0x00,
+  0x13, 0x40, 0xa0, 0xa0, 0x00, 0x00,
+  0x25, 0x06, 0x28, 0x40, 0x00, 0x00,
+  0x18, 0x19, 0x10, 0x60, 0x00, 0x00,
+  0x0a, 0x24, 0x45, 0xc0, 0x00, 0x00,
+  0x88, 0x8a, 0x12, 0xa0, 0x00, 0x00,
+  0x06, 0x81, 0x45, 0x20, 0x00, 0x00,
+  0xe0, 0x24, 0xa1, 0x00, 0x00, 0x00,
+  0x84, 0x40, 0xd8, 0x20, 0x00, 0x00,
+  0x44, 0x19, 0x16, 0x00, 0x00, 0x00,
+  0xa2, 0x85, 0xdb, 0xa0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_25[150] = {
+  0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+  0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+  0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+  0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+  0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+  0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+  0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+  0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+  0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+  0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+  0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+  0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+  0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+  0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+  0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+  0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+  0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+  0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+  0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+  0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+  0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+  0x08, 0xac, 0x06, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_26[156] = {
+  0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+  0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+  0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+  0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+  0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+  0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+  0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+  0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+  0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+  0x08, 0xac, 0x06, 0x00, 0x00, 0x00,
+  0x28, 0x2c, 0x08, 0x20, 0x00, 0x00,
+  0x84, 0x52, 0x03, 0x40, 0x00, 0x00,
+  0x60, 0x44, 0x81, 0x20, 0x00, 0x00,
+  0x05, 0x49, 0x41, 0x40, 0x00, 0x00,
+  0x02, 0x9a, 0x90, 0x80, 0x00, 0x00,
+  0x01, 0x32, 0x0c, 0x40, 0x00, 0x00,
+  0x48, 0x10, 0x49, 0x80, 0x00, 0x00,
+  0x24, 0x82, 0x42, 0x20, 0x00, 0x00,
+  0x94, 0x00, 0x22, 0x20, 0x00, 0x00,
+  0x8a, 0x00, 0x74, 0x00, 0x00, 0x00,
+  0x11, 0x85, 0x2c, 0x80, 0x00, 0x00,
+  0x52, 0x20, 0x90, 0x60, 0x00, 0x00,
+  0xcd, 0x41, 0xa2, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_27[162] = {
+  0x51, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x05, 0x4a, 0x40, 0x20, 0x00, 0x00,
+  0x81, 0x20, 0x05, 0x60, 0x00, 0x00,
+  0x94, 0x01, 0x40, 0x40, 0x00, 0x00,
+  0x30, 0x84, 0x08, 0x40, 0x00, 0x00,
+  0x21, 0x11, 0x18, 0x20, 0x00, 0x00,
+  0x03, 0xc0, 0x34, 0x00, 0x00, 0x00,
+  0xe8, 0x04, 0x00, 0xa0, 0x00, 0x00,
+  0x0a, 0x11, 0x80, 0x80, 0x00, 0x00,
+  0x80, 0x1c, 0x61, 0x00, 0x00, 0x00,
+  0x04, 0x92, 0xa0, 0x00, 0x00, 0x00,
+  0x08, 0xac, 0x06, 0x00, 0x00, 0x00,
+  0x51, 0x22, 0x02, 0xa0, 0x00, 0x00,
+  0x66, 0x40, 0xaa, 0x00, 0x00, 0x00,
+  0x05, 0x4e, 0x00, 0x20, 0x00, 0x00,
+  0x81, 0x21, 0x40, 0x80, 0x00, 0x00,
+  0x94, 0x00, 0x28, 0x60, 0x00, 0x00,
+  0x30, 0x83, 0x24, 0x00, 0x00, 0x00,
+  0x21, 0x14, 0x0c, 0x00, 0x00, 0x00,
+  0x03, 0xc0, 0x84, 0xc0, 0x00, 0x00,
+  0xe8, 0x04, 0x21, 0x00, 0x00, 0x00,
+  0x0a, 0x10, 0x91, 0x80, 0x00, 0x00,
+  0x80, 0x1b, 0x10, 0x00, 0x00, 0x00,
+  0x04, 0x91, 0x43, 0x00, 0x00, 0x00,
+  0x08, 0xa8, 0x70, 0x40, 0x00, 0x00,
+  0x9c, 0xc0, 0x84, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_3[18] = {
+  0x99, 0xb5, 0x66, 0xc0, 0x00, 0x00,
+  0x46, 0xda, 0xab, 0x60, 0x00, 0x00,
+  0x37, 0x29, 0x3d, 0xa0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_4[24] = {
+  0x49, 0xb1, 0x66, 0xc0, 0x00, 0x00,
+  0x26, 0xd4, 0x9b, 0x40, 0x00, 0x00,
+  0x85, 0x68, 0xd5, 0xa0, 0x00, 0x00,
+  0x52, 0x5a, 0x39, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_5[30] = {
+  0x51, 0x33, 0x26, 0xc0, 0x00, 0x00,
+  0x66, 0x45, 0x2b, 0x40, 0x00, 0x00,
+  0x0c, 0x6a, 0x95, 0xa0, 0x00, 0x00,
+  0xa1, 0xc0, 0xed, 0x40, 0x00, 0x00,
+  0x22, 0x9c, 0xe2, 0xa0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_6[36] = {
+  0xd1, 0x22, 0x52, 0xa0, 0x00, 0x00,
+  0x46, 0xd4, 0xaa, 0x40, 0x00, 0x00,
+  0x15, 0x48, 0xa5, 0xa0, 0x00, 0x00,
+  0x21, 0x72, 0x8d, 0x40, 0x00, 0x00,
+  0x28, 0xc9, 0x13, 0x60, 0x00, 0x00,
+  0xaa, 0x24, 0x44, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_7[42] = {
+  0x59, 0x23, 0x12, 0xa0, 0x00, 0x00,
+  0x26, 0x55, 0xc9, 0x00, 0x00, 0x00,
+  0xb1, 0x40, 0xc5, 0xa0, 0x00, 0x00,
+  0x2b, 0x0a, 0xa4, 0xc0, 0x00, 0x00,
+  0x14, 0xc8, 0x33, 0x60, 0x00, 0x00,
+  0xc8, 0x8c, 0x2a, 0xa0, 0x00, 0x00,
+  0x84, 0xb5, 0x54, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_8[48] = {
+  0x80, 0xae, 0x00, 0xa0, 0x00, 0x00,
+  0x30, 0x92, 0x0b, 0x00, 0x00, 0x00,
+  0x16, 0x0c, 0x41, 0x80, 0x00, 0x00,
+  0x03, 0x31, 0x05, 0x20, 0x00, 0x00,
+  0x44, 0x60, 0x52, 0x40, 0x00, 0x00,
+  0x08, 0x18, 0x24, 0xc0, 0x00, 0x00,
+  0xd8, 0x04, 0xa2, 0x00, 0x00, 0x00,
+  0xa1, 0x43, 0x90, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom27_9[54] = {
+  0x59, 0x25, 0x12, 0xa0, 0x00, 0x00,
+  0x66, 0x41, 0xa3, 0x00, 0x00, 0x00,
+  0x14, 0x42, 0x51, 0x20, 0x00, 0x00,
+  0x21, 0x49, 0x05, 0x40, 0x00, 0x00,
+  0x02, 0xc8, 0x8c, 0x20, 0x00, 0x00,
+  0x94, 0x12, 0x48, 0x40, 0x00, 0x00,
+  0x80, 0xac, 0x30, 0x60, 0x00, 0x00,
+  0x0a, 0x91, 0x06, 0xa0, 0x00, 0x00,
+  0x40, 0x1c, 0x42, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_1[6] = {
+  0xff, 0xff, 0xff, 0xf0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_10[60] = {
+  0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+  0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+  0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+  0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+  0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+  0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+  0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+  0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x10, 0x8c, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_11[66] = {
+  0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+  0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+  0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+  0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+  0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+  0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+  0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+  0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+  0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+  0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+  0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_12[72] = {
+  0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+  0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+  0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+  0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+  0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+  0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+  0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+  0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+  0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+  0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+  0x12, 0x0c, 0x48, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_13[78] = {
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+  0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+  0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+  0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+  0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+  0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+  0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+  0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+  0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+  0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x80, 0xc2, 0x03, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_14[84] = {
+  0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+  0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+  0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+  0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+  0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+  0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+  0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+  0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+  0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+  0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+  0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0x42, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_15[90] = {
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+  0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+  0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+  0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+  0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+  0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00,
+  0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+  0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+  0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+  0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+  0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+  0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+  0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+  0x72, 0x01, 0xc8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_16[96] = {
+  0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+  0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+  0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+  0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+  0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+  0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+  0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+  0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+  0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+  0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+  0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+  0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+  0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00,
+  0xed, 0x76, 0x36, 0x50, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_17[102] = {
+  0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+  0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+  0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+  0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+  0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+  0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+  0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+  0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+  0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+  0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+  0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+  0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+  0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+  0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+  0x88, 0x4a, 0x21, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_18[108] = {
+  0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+  0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+  0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+  0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+  0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+  0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+  0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+  0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+  0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+  0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+  0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+  0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+  0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+  0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+  0x72, 0x01, 0xc8, 0x00, 0x00, 0x00,
+  0x6e, 0x9f, 0x98, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_19[114] = {
+  0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+  0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+  0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+  0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+  0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+  0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+  0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+  0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+  0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+  0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+  0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+  0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+  0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+  0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+  0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x10, 0x8c, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_2[12] = {
+  0xec, 0xeb, 0xb3, 0xa0, 0x00, 0x00,
+  0x3b, 0x9c, 0xee, 0x70, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_20[120] = {
+  0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+  0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+  0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+  0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+  0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+  0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+  0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+  0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+  0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+  0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+  0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+  0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+  0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+  0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+  0x88, 0x4a, 0x21, 0x20, 0x00, 0x00,
+  0xea, 0x1b, 0x3a, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_21[126] = {
+  0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+  0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+  0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+  0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+  0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+  0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+  0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+  0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+  0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+  0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+  0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+  0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+  0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+  0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+  0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+  0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+  0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+  0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+  0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_22[132] = {
+  0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+  0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+  0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+  0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+  0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+  0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+  0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+  0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+  0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+  0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+  0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+  0xc0, 0xd7, 0x03, 0x50, 0x00, 0x00,
+  0x1d, 0x40, 0x75, 0x00, 0x00, 0x00,
+  0xd4, 0x0b, 0x50, 0x20, 0x00, 0x00,
+  0x02, 0x60, 0x09, 0x80, 0x00, 0x00,
+  0x04, 0x28, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0x98, 0x82, 0x60, 0x00, 0x00,
+  0x40, 0x45, 0x01, 0x10, 0x00, 0x00,
+  0x08, 0x84, 0x22, 0x10, 0x00, 0x00,
+  0x68, 0x01, 0xa0, 0x00, 0x00, 0x00,
+  0x23, 0x10, 0x8c, 0x40, 0x00, 0x00,
+  0x45, 0x05, 0x10, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_23[138] = {
+  0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+  0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+  0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+  0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+  0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+  0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+  0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+  0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+  0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+  0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+  0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+  0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+  0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+  0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+  0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+  0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+  0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+  0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+  0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+  0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+  0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+  0x12, 0x0c, 0x48, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_24[144] = {
+  0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+  0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+  0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+  0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+  0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+  0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+  0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+  0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+  0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+  0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+  0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+  0x62, 0xd1, 0x8b, 0x40, 0x00, 0x00,
+  0x35, 0x20, 0xd4, 0x80, 0x00, 0x00,
+  0x14, 0x14, 0x50, 0x50, 0x00, 0x00,
+  0xc5, 0x0b, 0x14, 0x20, 0x00, 0x00,
+  0x22, 0x0c, 0x88, 0x30, 0x00, 0x00,
+  0x88, 0xba, 0x22, 0xe0, 0x00, 0x00,
+  0x42, 0x55, 0x09, 0x50, 0x00, 0x00,
+  0x28, 0xa4, 0xa2, 0x90, 0x00, 0x00,
+  0x94, 0x22, 0x50, 0x80, 0x00, 0x00,
+  0x1b, 0x04, 0x6c, 0x10, 0x00, 0x00,
+  0x22, 0xc0, 0x8b, 0x00, 0x00, 0x00,
+  0x6f, 0xd8, 0xee, 0xa0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_25[150] = {
+  0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+  0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+  0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+  0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+  0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+  0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+  0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+  0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+  0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+  0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+  0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+  0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+  0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+  0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+  0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+  0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+  0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+  0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+  0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+  0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x80, 0xc2, 0x03, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_26[156] = {
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+  0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+  0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+  0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+  0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+  0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+  0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+  0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+  0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+  0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+  0x81, 0x06, 0x04, 0x10, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0xa0, 0x00, 0x00,
+  0x90, 0x26, 0x40, 0x90, 0x00, 0x00,
+  0x28, 0x28, 0xa0, 0xa0, 0x00, 0x00,
+  0x52, 0x11, 0x48, 0x40, 0x00, 0x00,
+  0x41, 0x89, 0x06, 0x20, 0x00, 0x00,
+  0x09, 0x30, 0x24, 0xc0, 0x00, 0x00,
+  0x48, 0x45, 0x21, 0x10, 0x00, 0x00,
+  0x04, 0x44, 0x11, 0x10, 0x00, 0x00,
+  0x0e, 0x80, 0x3a, 0x00, 0x00, 0x00,
+  0xa5, 0x92, 0x96, 0x40, 0x00, 0x00,
+  0x12, 0x0c, 0x48, 0x30, 0x00, 0x00,
+  0xf1, 0x64, 0xbe, 0x40, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_27[162] = {
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+  0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+  0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+  0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+  0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+  0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+  0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+  0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+  0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+  0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+  0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+  0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+  0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+  0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+  0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+  0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+  0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+  0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+  0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+  0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+  0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0x42, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_28[168] = {
+  0x40, 0x55, 0x01, 0x50, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x00, 0x00, 0x00,
+  0xc0, 0x07, 0x00, 0x10, 0x00, 0x00,
+  0x28, 0x10, 0xa0, 0x40, 0x00, 0x00,
+  0x05, 0x0c, 0x14, 0x30, 0x00, 0x00,
+  0x64, 0x81, 0x92, 0x00, 0x00, 0x00,
+  0x81, 0x82, 0x06, 0x00, 0x00, 0x00,
+  0x10, 0x98, 0x42, 0x60, 0x00, 0x00,
+  0x84, 0x22, 0x10, 0x80, 0x00, 0x00,
+  0x12, 0x30, 0x48, 0xc0, 0x00, 0x00,
+  0x62, 0x01, 0x88, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0xa1, 0x80, 0x00, 0x00,
+  0x0e, 0x08, 0x38, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0x42, 0x10, 0x00, 0x00,
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x48, 0x05, 0x20, 0x10, 0x00, 0x00,
+  0x00, 0xac, 0x02, 0xb0, 0x00, 0x00,
+  0x28, 0x08, 0xa0, 0x20, 0x00, 0x00,
+  0x81, 0x0a, 0x04, 0x20, 0x00, 0x00,
+  0x23, 0x04, 0x8c, 0x10, 0x00, 0x00,
+  0x06, 0x80, 0x1a, 0x00, 0x00, 0x00,
+  0x80, 0x16, 0x00, 0x50, 0x00, 0x00,
+  0x30, 0x10, 0xc0, 0x40, 0x00, 0x00,
+  0x8c, 0x22, 0x30, 0x80, 0x00, 0x00,
+  0x54, 0x01, 0x50, 0x00, 0x00, 0x00,
+  0x80, 0xc2, 0x03, 0x00, 0x00, 0x00,
+  0x36, 0x4f, 0x1f, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_3[18] = {
+  0xac, 0xda, 0xb3, 0x60, 0x00, 0x00,
+  0x55, 0x6d, 0x55, 0xb0, 0x00, 0x00,
+  0x27, 0xb4, 0x9e, 0xd0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_4[24] = {
+  0x2c, 0xd8, 0xb3, 0x60, 0x00, 0x00,
+  0x93, 0x6a, 0x4d, 0xa0, 0x00, 0x00,
+  0x1a, 0xb4, 0x6a, 0xd0, 0x00, 0x00,
+  0x47, 0x2d, 0x1c, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_5[30] = {
+  0x64, 0xd9, 0x93, 0x60, 0x00, 0x00,
+  0xa5, 0x6a, 0x95, 0xa0, 0x00, 0x00,
+  0x52, 0xb5, 0x4a, 0xd0, 0x00, 0x00,
+  0x1d, 0xa8, 0x76, 0xa0, 0x00, 0x00,
+  0x9c, 0x56, 0x71, 0x50, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_6[36] = {
+  0x4a, 0x55, 0x29, 0x50, 0x00, 0x00,
+  0x95, 0x4a, 0x55, 0x20, 0x00, 0x00,
+  0x14, 0xb4, 0x52, 0xd0, 0x00, 0x00,
+  0x51, 0xa9, 0x46, 0xa0, 0x00, 0x00,
+  0x22, 0x6c, 0x89, 0xb0, 0x00, 0x00,
+  0x88, 0x8e, 0x22, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_7[42] = {
+  0x62, 0x55, 0x89, 0x50, 0x00, 0x00,
+  0xb9, 0x22, 0xe4, 0x80, 0x00, 0x00,
+  0x18, 0xb4, 0x62, 0xd0, 0x00, 0x00,
+  0x54, 0x99, 0x52, 0x60, 0x00, 0x00,
+  0x06, 0x6c, 0x19, 0xb0, 0x00, 0x00,
+  0x85, 0x56, 0x15, 0x50, 0x00, 0x00,
+  0xaa, 0x8a, 0xaa, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_8[48] = {
+  0xc0, 0x17, 0x00, 0x50, 0x00, 0x00,
+  0x41, 0x61, 0x05, 0x80, 0x00, 0x00,
+  0x88, 0x32, 0x20, 0xc0, 0x00, 0x00,
+  0x20, 0xa4, 0x82, 0x90, 0x00, 0x00,
+  0x0a, 0x48, 0x29, 0x20, 0x00, 0x00,
+  0x04, 0x98, 0x12, 0x60, 0x00, 0x00,
+  0x94, 0x42, 0x51, 0x00, 0x00, 0x00,
+  0x72, 0x01, 0xc8, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom28_9[54] = {
+  0xa2, 0x56, 0x89, 0x50, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x80, 0x00, 0x00,
+  0x4a, 0x25, 0x28, 0x90, 0x00, 0x00,
+  0x20, 0xa8, 0x82, 0xa0, 0x00, 0x00,
+  0x11, 0x84, 0x46, 0x10, 0x00, 0x00,
+  0x49, 0x09, 0x24, 0x20, 0x00, 0x00,
+  0x86, 0x0e, 0x18, 0x30, 0x00, 0x00,
+  0x20, 0xd4, 0x83, 0x50, 0x00, 0x00,
+  0x88, 0x4a, 0x21, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_1[6] = {
+  0xff, 0xff, 0xff, 0xf8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_10[60] = {
+  0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+  0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+  0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+  0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+  0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+  0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+  0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+  0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+  0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+  0x23, 0x10, 0x09, 0x88, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_11[66] = {
+  0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+  0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+  0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+  0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+  0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+  0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+  0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+  0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+  0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+  0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+  0x22, 0xc2, 0x61, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_12[72] = {
+  0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+  0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+  0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+  0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+  0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+  0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+  0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+  0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+  0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+  0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+  0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_13[78] = {
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+  0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+  0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+  0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+  0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+  0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+  0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+  0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+  0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+  0x80, 0xc0, 0x28, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_14[84] = {
+  0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+  0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+  0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+  0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+  0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+  0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+  0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+  0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+  0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+  0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+  0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0xa2, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_15[90] = {
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+  0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+  0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+  0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+  0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+  0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00,
+  0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+  0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+  0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+  0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+  0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+  0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+  0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+  0x72, 0x01, 0x96, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_16[96] = {
+  0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+  0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+  0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+  0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+  0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+  0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+  0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+  0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+  0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+  0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+  0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+  0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+  0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00,
+  0x0d, 0x2c, 0xf2, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_17[102] = {
+  0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+  0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+  0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+  0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+  0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+  0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+  0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+  0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+  0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+  0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+  0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+  0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+  0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+  0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+  0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+  0x88, 0x4a, 0x41, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_18[108] = {
+  0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+  0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+  0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+  0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+  0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+  0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+  0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+  0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+  0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+  0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+  0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+  0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+  0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+  0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+  0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+  0x72, 0x01, 0x96, 0x00, 0x00, 0x00,
+  0x71, 0x36, 0xf2, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_19[114] = {
+  0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+  0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+  0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+  0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+  0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+  0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+  0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+  0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+  0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+  0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+  0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+  0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+  0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+  0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+  0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+  0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+  0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+  0x23, 0x10, 0x09, 0x88, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_2[12] = {
+  0xec, 0xeb, 0xb3, 0xa8, 0x00, 0x00,
+  0x3b, 0x9e, 0xee, 0x70, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_20[120] = {
+  0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+  0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+  0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+  0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+  0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+  0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+  0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+  0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+  0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+  0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+  0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+  0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+  0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+  0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+  0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+  0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+  0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+  0x88, 0x4a, 0x41, 0x20, 0x00, 0x00,
+  0xe7, 0xec, 0xdc, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_21[126] = {
+  0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+  0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+  0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+  0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+  0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+  0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+  0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+  0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+  0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+  0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+  0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+  0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+  0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+  0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+  0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+  0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+  0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+  0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+  0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+  0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+  0x22, 0xc2, 0x61, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_22[132] = {
+  0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+  0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+  0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+  0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+  0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+  0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+  0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+  0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+  0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+  0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+  0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+  0xc0, 0xd7, 0x02, 0x80, 0x00, 0x00,
+  0x1d, 0x40, 0x55, 0x58, 0x00, 0x00,
+  0xd4, 0x09, 0xd1, 0x00, 0x00, 0x00,
+  0x02, 0x60, 0x02, 0x70, 0x00, 0x00,
+  0x04, 0x28, 0x04, 0xb0, 0x00, 0x00,
+  0x20, 0x99, 0x12, 0x48, 0x00, 0x00,
+  0x40, 0x46, 0x21, 0x40, 0x00, 0x00,
+  0x08, 0x84, 0x82, 0x90, 0x00, 0x00,
+  0x68, 0x02, 0xa8, 0x10, 0x00, 0x00,
+  0x23, 0x10, 0x09, 0x88, 0x00, 0x00,
+  0x1c, 0x90, 0xa9, 0xa0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_23[138] = {
+  0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+  0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+  0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+  0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+  0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+  0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+  0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+  0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+  0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+  0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+  0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+  0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+  0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+  0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+  0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+  0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+  0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+  0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+  0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+  0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+  0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+  0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_24[144] = {
+  0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+  0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+  0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+  0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+  0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+  0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+  0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+  0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+  0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+  0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+  0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+  0x62, 0xd1, 0x88, 0x88, 0x00, 0x00,
+  0x35, 0x23, 0xc4, 0x40, 0x00, 0x00,
+  0x14, 0x14, 0x40, 0x38, 0x00, 0x00,
+  0xc5, 0x08, 0x42, 0xc0, 0x00, 0x00,
+  0x22, 0x0c, 0x90, 0x90, 0x00, 0x00,
+  0x88, 0xb8, 0x04, 0x48, 0x00, 0x00,
+  0x42, 0x54, 0x03, 0x10, 0x00, 0x00,
+  0x28, 0xa4, 0x12, 0x88, 0x00, 0x00,
+  0x94, 0x20, 0x09, 0x60, 0x00, 0x00,
+  0x1b, 0x04, 0xac, 0x00, 0x00, 0x00,
+  0x22, 0xc2, 0x61, 0x00, 0x00, 0x00,
+  0xbd, 0x86, 0x97, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_25[150] = {
+  0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+  0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+  0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+  0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+  0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+  0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+  0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+  0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+  0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+  0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+  0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+  0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+  0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+  0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+  0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+  0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+  0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+  0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+  0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+  0x80, 0xc0, 0x28, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_26[156] = {
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+  0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+  0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+  0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+  0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+  0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+  0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+  0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+  0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+  0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+  0x81, 0x06, 0x22, 0x40, 0x00, 0x00,
+  0x40, 0x69, 0x01, 0x50, 0x00, 0x00,
+  0x90, 0x26, 0x09, 0x88, 0x00, 0x00,
+  0x28, 0x28, 0x86, 0x90, 0x00, 0x00,
+  0x52, 0x10, 0x41, 0x90, 0x00, 0x00,
+  0x41, 0x89, 0x10, 0x28, 0x00, 0x00,
+  0x09, 0x30, 0x43, 0x20, 0x00, 0x00,
+  0x48, 0x45, 0x34, 0xa8, 0x00, 0x00,
+  0x04, 0x44, 0xe0, 0x08, 0x00, 0x00,
+  0x0e, 0x80, 0x5d, 0x20, 0x00, 0x00,
+  0xa5, 0x92, 0x42, 0x10, 0x00, 0x00,
+  0x12, 0x0d, 0xc8, 0x50, 0x00, 0x00,
+  0xb5, 0x4c, 0xa9, 0x70, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_27[162] = {
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+  0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+  0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+  0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+  0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+  0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+  0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+  0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+  0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+  0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+  0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+  0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+  0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+  0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+  0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+  0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+  0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+  0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+  0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+  0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+  0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0xa2, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_28[168] = {
+  0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+  0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+  0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+  0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+  0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+  0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+  0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+  0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+  0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+  0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+  0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0xa2, 0x20, 0x00, 0x00,
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0xd1, 0x10, 0x00, 0x00,
+  0x48, 0x05, 0x01, 0x28, 0x00, 0x00,
+  0x00, 0xaf, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x08, 0x21, 0x80, 0x00, 0x00,
+  0x81, 0x0a, 0x50, 0x48, 0x00, 0x00,
+  0x23, 0x06, 0x23, 0x00, 0x00, 0x00,
+  0x06, 0x80, 0x84, 0xc8, 0x00, 0x00,
+  0x80, 0x17, 0x05, 0x00, 0x00, 0x00,
+  0x30, 0x10, 0x41, 0xa0, 0x00, 0x00,
+  0x8c, 0x20, 0x1a, 0x40, 0x00, 0x00,
+  0x54, 0x01, 0x64, 0x00, 0x00, 0x00,
+  0x80, 0xc0, 0x28, 0x30, 0x00, 0x00,
+  0xbe, 0x1f, 0x99, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_29[174] = {
+  0x40, 0x55, 0x02, 0x08, 0x00, 0x00,
+  0x15, 0x40, 0x55, 0x50, 0x00, 0x00,
+  0xc0, 0x06, 0x20, 0x48, 0x00, 0x00,
+  0x28, 0x13, 0x00, 0x40, 0x00, 0x00,
+  0x05, 0x0e, 0x02, 0x80, 0x00, 0x00,
+  0x64, 0x80, 0x04, 0x88, 0x00, 0x00,
+  0x81, 0x81, 0x00, 0xb0, 0x00, 0x00,
+  0x10, 0x98, 0x88, 0x08, 0x00, 0x00,
+  0x84, 0x22, 0x40, 0x10, 0x00, 0x00,
+  0x12, 0x30, 0x49, 0x00, 0x00, 0x00,
+  0x62, 0x01, 0x74, 0x00, 0x00, 0x00,
+  0x28, 0x60, 0x81, 0x50, 0x00, 0x00,
+  0x0e, 0x0a, 0x18, 0x20, 0x00, 0x00,
+  0x10, 0x84, 0xa2, 0x20, 0x00, 0x00,
+  0x40, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x15, 0x40, 0xc4, 0x40, 0x00, 0x00,
+  0xc0, 0x05, 0x60, 0x00, 0x00, 0x00,
+  0x28, 0x10, 0x04, 0x48, 0x00, 0x00,
+  0x05, 0x0e, 0x20, 0x80, 0x00, 0x00,
+  0x64, 0x81, 0x10, 0x08, 0x00, 0x00,
+  0x81, 0x80, 0xa4, 0x10, 0x00, 0x00,
+  0x10, 0x9a, 0x0a, 0x80, 0x00, 0x00,
+  0x84, 0x20, 0x28, 0x68, 0x00, 0x00,
+  0x12, 0x30, 0x47, 0x80, 0x00, 0x00,
+  0x62, 0x02, 0x10, 0x10, 0x00, 0x00,
+  0x28, 0x62, 0x19, 0x00, 0x00, 0x00,
+  0x0e, 0x08, 0x02, 0x18, 0x00, 0x00,
+  0x10, 0x85, 0x11, 0x20, 0x00, 0x00,
+  0x29, 0x50, 0x42, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_3[18] = {
+  0xac, 0xda, 0xb2, 0x48, 0x00, 0x00,
+  0x55, 0x6d, 0x55, 0x28, 0x00, 0x00,
+  0x27, 0xb5, 0x0c, 0xd8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_4[24] = {
+  0x2c, 0xd8, 0x96, 0xa8, 0x00, 0x00,
+  0x93, 0x6a, 0x55, 0x50, 0x00, 0x00,
+  0x1a, 0xb4, 0x69, 0xa8, 0x00, 0x00,
+  0x47, 0x2d, 0x0f, 0x50, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_5[30] = {
+  0x64, 0xd9, 0x92, 0x88, 0x00, 0x00,
+  0xa5, 0x68, 0x95, 0x50, 0x00, 0x00,
+  0x52, 0xb5, 0x25, 0xa0, 0x00, 0x00,
+  0x1d, 0xa9, 0x4e, 0x40, 0x00, 0x00,
+  0x9c, 0x56, 0x38, 0xc0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_6[36] = {
+  0x4a, 0x55, 0x8a, 0x28, 0x00, 0x00,
+  0x95, 0x48, 0x55, 0x50, 0x00, 0x00,
+  0x14, 0xb5, 0x31, 0x18, 0x00, 0x00,
+  0x51, 0xa9, 0x4a, 0x50, 0x00, 0x00,
+  0x22, 0x6c, 0x8d, 0x90, 0x00, 0x00,
+  0x88, 0x8e, 0x29, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_7[42] = {
+  0x62, 0x55, 0x8a, 0x88, 0x00, 0x00,
+  0xb9, 0x22, 0xc4, 0x50, 0x00, 0x00,
+  0x18, 0xb4, 0x61, 0xa8, 0x00, 0x00,
+  0x54, 0x99, 0x13, 0x50, 0x00, 0x00,
+  0x06, 0x6c, 0x4d, 0x90, 0x00, 0x00,
+  0x85, 0x55, 0x24, 0x68, 0x00, 0x00,
+  0xaa, 0x8a, 0x1a, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_8[48] = {
+  0xc0, 0x16, 0x40, 0x88, 0x00, 0x00,
+  0x41, 0x60, 0x25, 0x40, 0x00, 0x00,
+  0x88, 0x30, 0x01, 0xa8, 0x00, 0x00,
+  0x20, 0xa4, 0x80, 0xd0, 0x00, 0x00,
+  0x0a, 0x48, 0x51, 0x10, 0x00, 0x00,
+  0x04, 0x9b, 0x08, 0x40, 0x00, 0x00,
+  0x94, 0x40, 0x03, 0x18, 0x00, 0x00,
+  0x72, 0x01, 0x96, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom29_9[54] = {
+  0xa2, 0x55, 0x88, 0x88, 0x00, 0x00,
+  0x34, 0x60, 0x91, 0x10, 0x00, 0x00,
+  0x4a, 0x27, 0x01, 0x40, 0x00, 0x00,
+  0x20, 0xa8, 0x0c, 0x30, 0x00, 0x00,
+  0x11, 0x84, 0x58, 0xa0, 0x00, 0x00,
+  0x49, 0x0a, 0x24, 0x00, 0x00, 0x00,
+  0x86, 0x0e, 0x0a, 0x40, 0x00, 0x00,
+  0x20, 0xd4, 0x22, 0x90, 0x00, 0x00,
+  0x88, 0x4a, 0x41, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom2_1[2] = {
+  0xc0, 0x00
+};
+
+const uint8_t kMaskRandom2_2[4] = {
+  0xc0, 0x00,
+  0x80, 0x00
+};
+
+const uint8_t kMaskRandom30_1[6] = {
+  0xff, 0xff, 0xff, 0xfc, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_10[60] = {
+  0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+  0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+  0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+  0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+  0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+  0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+  0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+  0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+  0x02, 0x62, 0x04, 0xc4, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_11[66] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+  0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+  0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+  0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+  0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+  0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+  0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+  0x98, 0x41, 0x30, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_12[72] = {
+  0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+  0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+  0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+  0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+  0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+  0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+  0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+  0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+  0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+  0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+  0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+  0x72, 0x14, 0xe4, 0x28, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_13[78] = {
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+  0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+  0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+  0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+  0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+  0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+  0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+  0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+  0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+  0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+  0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+  0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_14[84] = {
+  0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+  0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+  0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+  0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+  0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+  0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+  0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+  0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+  0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+  0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+  0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+  0x28, 0x88, 0x51, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_15[90] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+  0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+  0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+  0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+  0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+  0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+  0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+  0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+  0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+  0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+  0x10, 0x98, 0x21, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_16[96] = {
+  0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+  0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+  0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+  0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+  0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+  0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+  0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+  0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0xb1, 0x15, 0x62, 0x28, 0x00, 0x00,
+  0x18, 0x6a, 0x30, 0xd4, 0x00, 0x00,
+  0x44, 0xd4, 0x89, 0xa8, 0x00, 0x00,
+  0x13, 0x64, 0x26, 0xc8, 0x00, 0x00,
+  0x49, 0x1a, 0x92, 0x34, 0x00, 0x00,
+  0x86, 0x8d, 0x0d, 0x18, 0x00, 0x00,
+  0xce, 0x58, 0xa0, 0x14, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_17[102] = {
+  0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+  0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+  0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+  0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+  0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+  0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+  0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+  0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+  0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+  0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+  0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+  0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+  0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+  0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+  0x90, 0x49, 0x20, 0x90, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_18[108] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+  0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+  0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+  0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+  0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+  0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+  0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+  0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+  0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+  0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+  0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+  0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+  0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+  0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+  0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+  0x65, 0x80, 0xcb, 0x00, 0x00, 0x00,
+  0x00, 0xb2, 0x47, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_19[114] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+  0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+  0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+  0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+  0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+  0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+  0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+  0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+  0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+  0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+  0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+  0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+  0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+  0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+  0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+  0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+  0x02, 0x62, 0x04, 0xc4, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_2[12] = {
+  0xec, 0xeb, 0xd9, 0xd4, 0x00, 0x00,
+  0xbb, 0x9d, 0x77, 0x38, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_20[120] = {
+  0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+  0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+  0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+  0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+  0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+  0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+  0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+  0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+  0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+  0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+  0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+  0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+  0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+  0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+  0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+  0x90, 0x49, 0x20, 0x90, 0x00, 0x00,
+  0x51, 0x88, 0xd1, 0x78, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_21[126] = {
+  0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+  0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+  0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+  0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+  0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+  0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+  0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+  0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+  0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+  0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+  0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+  0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+  0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+  0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+  0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+  0x98, 0x41, 0x30, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_22[132] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+  0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+  0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+  0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+  0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+  0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+  0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+  0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+  0xc0, 0xa1, 0x81, 0x40, 0x00, 0x00,
+  0x15, 0x56, 0x2a, 0xac, 0x00, 0x00,
+  0x74, 0x40, 0xe8, 0x80, 0x00, 0x00,
+  0x00, 0x9c, 0x01, 0x38, 0x00, 0x00,
+  0x01, 0x2c, 0x02, 0x58, 0x00, 0x00,
+  0x44, 0x92, 0x89, 0x24, 0x00, 0x00,
+  0x88, 0x51, 0x10, 0xa0, 0x00, 0x00,
+  0x20, 0xa4, 0x41, 0x48, 0x00, 0x00,
+  0xaa, 0x05, 0x54, 0x08, 0x00, 0x00,
+  0x02, 0x62, 0x04, 0xc4, 0x00, 0x00,
+  0x03, 0x10, 0x18, 0x74, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_23[138] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+  0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+  0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+  0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+  0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+  0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+  0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+  0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+  0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+  0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+  0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+  0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+  0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+  0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+  0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+  0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+  0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+  0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+  0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+  0x72, 0x14, 0xe4, 0x28, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_24[144] = {
+  0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+  0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+  0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+  0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+  0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+  0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+  0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+  0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+  0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+  0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+  0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+  0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0xf1, 0x11, 0xe2, 0x20, 0x00, 0x00,
+  0x10, 0x0e, 0x20, 0x1c, 0x00, 0x00,
+  0x10, 0xb0, 0x21, 0x60, 0x00, 0x00,
+  0x24, 0x24, 0x48, 0x48, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x00, 0xc4, 0x01, 0x88, 0x00, 0x00,
+  0x04, 0xa2, 0x09, 0x44, 0x00, 0x00,
+  0x02, 0x58, 0x04, 0xb0, 0x00, 0x00,
+  0x2b, 0x00, 0x56, 0x00, 0x00, 0x00,
+  0x98, 0x41, 0x30, 0x80, 0x00, 0x00,
+  0xf3, 0x4d, 0x1c, 0x70, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_25[150] = {
+  0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+  0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+  0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+  0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+  0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+  0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+  0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+  0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+  0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+  0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+  0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+  0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+  0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+  0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+  0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+  0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+  0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+  0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+  0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+  0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+  0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+  0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+  0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_26[156] = {
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+  0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+  0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+  0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+  0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+  0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+  0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+  0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+  0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+  0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+  0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+  0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+  0x88, 0x91, 0x11, 0x20, 0x00, 0x00,
+  0x40, 0x54, 0x80, 0xa8, 0x00, 0x00,
+  0x82, 0x63, 0x04, 0xc4, 0x00, 0x00,
+  0x21, 0xa4, 0x43, 0x48, 0x00, 0x00,
+  0x10, 0x64, 0x20, 0xc8, 0x00, 0x00,
+  0x44, 0x0a, 0x88, 0x14, 0x00, 0x00,
+  0x10, 0xc8, 0x21, 0x90, 0x00, 0x00,
+  0x4d, 0x2a, 0x9a, 0x54, 0x00, 0x00,
+  0x38, 0x02, 0x70, 0x04, 0x00, 0x00,
+  0x17, 0x48, 0x2e, 0x90, 0x00, 0x00,
+  0x90, 0x85, 0x21, 0x08, 0x00, 0x00,
+  0x72, 0x14, 0xe4, 0x28, 0x00, 0x00,
+  0x83, 0x11, 0xad, 0xe8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_27[162] = {
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+  0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+  0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+  0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+  0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+  0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+  0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+  0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+  0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+  0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+  0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+  0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+  0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+  0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+  0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+  0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+  0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+  0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+  0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+  0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+  0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+  0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+  0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+  0x28, 0x88, 0x51, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_28[168] = {
+  0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+  0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+  0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+  0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+  0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+  0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+  0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+  0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+  0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+  0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+  0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+  0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0x34, 0x44, 0x68, 0x88, 0x00, 0x00,
+  0x40, 0x4a, 0x80, 0x94, 0x00, 0x00,
+  0xc4, 0x05, 0x88, 0x08, 0x00, 0x00,
+  0x08, 0x60, 0x10, 0xc0, 0x00, 0x00,
+  0x94, 0x13, 0x28, 0x24, 0x00, 0x00,
+  0x88, 0xc1, 0x11, 0x80, 0x00, 0x00,
+  0x21, 0x32, 0x42, 0x64, 0x00, 0x00,
+  0xc1, 0x41, 0x82, 0x80, 0x00, 0x00,
+  0x10, 0x68, 0x20, 0xd0, 0x00, 0x00,
+  0x06, 0x90, 0x0d, 0x20, 0x00, 0x00,
+  0x59, 0x00, 0xb2, 0x00, 0x00, 0x00,
+  0x0a, 0x0c, 0x14, 0x18, 0x00, 0x00,
+  0x94, 0x59, 0x03, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_29[174] = {
+  0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+  0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+  0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+  0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+  0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+  0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+  0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+  0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+  0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+  0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+  0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+  0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+  0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+  0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+  0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+  0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+  0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+  0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+  0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+  0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+  0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+  0x10, 0x98, 0x21, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_3[18] = {
+  0xac, 0x93, 0x59, 0x24, 0x00, 0x00,
+  0x55, 0x4a, 0xaa, 0x94, 0x00, 0x00,
+  0x43, 0x36, 0x86, 0x6c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_30[180] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x31, 0x10, 0x62, 0x20, 0x00, 0x00,
+  0x58, 0x00, 0xb0, 0x00, 0x00, 0x00,
+  0x01, 0x12, 0x02, 0x24, 0x00, 0x00,
+  0x88, 0x21, 0x10, 0x40, 0x00, 0x00,
+  0x44, 0x02, 0x88, 0x04, 0x00, 0x00,
+  0x29, 0x04, 0x52, 0x08, 0x00, 0x00,
+  0x82, 0xa1, 0x05, 0x40, 0x00, 0x00,
+  0x0a, 0x1a, 0x14, 0x34, 0x00, 0x00,
+  0x11, 0xe0, 0x23, 0xc0, 0x00, 0x00,
+  0x84, 0x05, 0x08, 0x08, 0x00, 0x00,
+  0x86, 0x41, 0x0c, 0x80, 0x00, 0x00,
+  0x00, 0x86, 0x01, 0x0c, 0x00, 0x00,
+  0x44, 0x48, 0x88, 0x90, 0x00, 0x00,
+  0x10, 0x98, 0x21, 0x30, 0x00, 0x00,
+  0x40, 0x82, 0x81, 0x04, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x88, 0x13, 0x10, 0x24, 0x00, 0x00,
+  0xc0, 0x11, 0x80, 0x20, 0x00, 0x00,
+  0x80, 0xa1, 0x01, 0x40, 0x00, 0x00,
+  0x01, 0x22, 0x02, 0x44, 0x00, 0x00,
+  0x40, 0x2c, 0x80, 0x58, 0x00, 0x00,
+  0x22, 0x02, 0x44, 0x04, 0x00, 0x00,
+  0x90, 0x05, 0x20, 0x08, 0x00, 0x00,
+  0x12, 0x40, 0x24, 0x80, 0x00, 0x00,
+  0x5d, 0x00, 0xba, 0x00, 0x00, 0x00,
+  0x20, 0x54, 0x40, 0xa8, 0x00, 0x00,
+  0x86, 0x09, 0x0c, 0x10, 0x00, 0x00,
+  0x28, 0x88, 0x51, 0x10, 0x00, 0x00,
+  0x46, 0xf1, 0xef, 0xec, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_4[24] = {
+  0x25, 0xaa, 0x4b, 0x54, 0x00, 0x00,
+  0x95, 0x55, 0x2a, 0xa8, 0x00, 0x00,
+  0x1a, 0x6a, 0x34, 0xd4, 0x00, 0x00,
+  0x43, 0xd4, 0x87, 0xa8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_5[30] = {
+  0x64, 0xa2, 0xc9, 0x44, 0x00, 0x00,
+  0x25, 0x54, 0x4a, 0xa8, 0x00, 0x00,
+  0x49, 0x68, 0x92, 0xd0, 0x00, 0x00,
+  0x53, 0x90, 0xa7, 0x20, 0x00, 0x00,
+  0x8e, 0x31, 0x1c, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_6[36] = {
+  0x62, 0x8a, 0xc5, 0x14, 0x00, 0x00,
+  0x15, 0x54, 0x2a, 0xa8, 0x00, 0x00,
+  0x4c, 0x46, 0x98, 0x8c, 0x00, 0x00,
+  0x52, 0x94, 0xa5, 0x28, 0x00, 0x00,
+  0x23, 0x64, 0x46, 0xc8, 0x00, 0x00,
+  0x8a, 0x59, 0x14, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_7[42] = {
+  0x62, 0xa2, 0xc5, 0x44, 0x00, 0x00,
+  0xb1, 0x15, 0x62, 0x28, 0x00, 0x00,
+  0x18, 0x6a, 0x30, 0xd4, 0x00, 0x00,
+  0x44, 0xd4, 0x89, 0xa8, 0x00, 0x00,
+  0x13, 0x64, 0x26, 0xc8, 0x00, 0x00,
+  0x49, 0x1a, 0x92, 0x34, 0x00, 0x00,
+  0x86, 0x8d, 0x0d, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_8[48] = {
+  0x90, 0x23, 0x20, 0x44, 0x00, 0x00,
+  0x09, 0x50, 0x12, 0xa0, 0x00, 0x00,
+  0x00, 0x6a, 0x00, 0xd4, 0x00, 0x00,
+  0x20, 0x34, 0x40, 0x68, 0x00, 0x00,
+  0x14, 0x44, 0x28, 0x88, 0x00, 0x00,
+  0xc2, 0x11, 0x84, 0x20, 0x00, 0x00,
+  0x00, 0xc6, 0x01, 0x8c, 0x00, 0x00,
+  0x65, 0x80, 0xcb, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom30_9[54] = {
+  0x62, 0x22, 0xc4, 0x44, 0x00, 0x00,
+  0x24, 0x44, 0x48, 0x88, 0x00, 0x00,
+  0xc0, 0x51, 0x80, 0xa0, 0x00, 0x00,
+  0x03, 0x0c, 0x06, 0x18, 0x00, 0x00,
+  0x16, 0x28, 0x2c, 0x50, 0x00, 0x00,
+  0x89, 0x01, 0x12, 0x00, 0x00, 0x00,
+  0x82, 0x91, 0x05, 0x20, 0x00, 0x00,
+  0x08, 0xa4, 0x11, 0x48, 0x00, 0x00,
+  0x90, 0x49, 0x20, 0x90, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_1[6] = {
+  0xff, 0xff, 0xff, 0xfe, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_10[60] = {
+  0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+  0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+  0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+  0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+  0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+  0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+  0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+  0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+  0x02, 0x63, 0x09, 0x24, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_11[66] = {
+  0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+  0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+  0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+  0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+  0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+  0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+  0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+  0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+  0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+  0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+  0x98, 0x40, 0xd0, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_12[72] = {
+  0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+  0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+  0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+  0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+  0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+  0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+  0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+  0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+  0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+  0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+  0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+  0x72, 0x15, 0xd1, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_13[78] = {
+  0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+  0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+  0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+  0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+  0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+  0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+  0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+  0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+  0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+  0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+  0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+  0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+  0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_14[84] = {
+  0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+  0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+  0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+  0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+  0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+  0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+  0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+  0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+  0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+  0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+  0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+  0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+  0x28, 0x89, 0x05, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_15[90] = {
+  0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+  0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+  0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+  0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+  0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+  0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+  0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+  0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+  0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+  0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+  0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+  0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+  0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+  0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+  0x10, 0x98, 0x30, 0x44, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_16[96] = {
+  0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+  0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+  0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+  0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+  0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+  0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+  0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+  0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+  0x62, 0xa2, 0x8a, 0xa2, 0x00, 0x00,
+  0xb1, 0x14, 0x44, 0x54, 0x00, 0x00,
+  0x18, 0x6b, 0x22, 0x22, 0x00, 0x00,
+  0x44, 0xd4, 0x5c, 0x10, 0x00, 0x00,
+  0x13, 0x64, 0x90, 0x68, 0x00, 0x00,
+  0x49, 0x1b, 0x20, 0x52, 0x00, 0x00,
+  0x86, 0x8c, 0x13, 0x0c, 0x00, 0x00,
+  0x8d, 0x94, 0xa9, 0xe0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_17[102] = {
+  0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+  0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+  0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+  0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+  0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+  0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+  0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+  0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+  0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+  0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+  0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+  0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+  0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+  0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+  0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+  0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+  0x90, 0x48, 0x88, 0xc8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_18[108] = {
+  0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+  0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+  0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+  0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+  0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+  0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+  0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+  0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+  0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+  0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+  0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+  0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+  0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+  0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+  0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+  0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+  0x65, 0x80, 0x2c, 0x60, 0x00, 0x00,
+  0xe3, 0xd1, 0x2e, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_19[114] = {
+  0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+  0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+  0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+  0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+  0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+  0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+  0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+  0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+  0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+  0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+  0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+  0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+  0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+  0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+  0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+  0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+  0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+  0x02, 0x63, 0x09, 0x24, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_2[12] = {
+  0xec, 0xeb, 0x5d, 0x5c, 0x00, 0x00,
+  0xbb, 0x9c, 0xf2, 0xf2, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_20[120] = {
+  0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+  0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+  0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+  0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+  0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+  0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+  0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+  0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+  0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+  0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+  0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+  0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+  0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+  0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+  0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+  0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+  0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+  0x90, 0x48, 0x88, 0xc8, 0x00, 0x00,
+  0x9a, 0xd4, 0x6a, 0x36, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_21[126] = {
+  0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+  0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+  0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+  0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+  0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+  0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+  0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+  0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+  0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+  0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+  0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+  0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+  0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+  0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+  0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+  0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+  0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+  0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+  0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+  0x98, 0x40, 0xd0, 0x18, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_22[132] = {
+  0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+  0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+  0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+  0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+  0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+  0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+  0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+  0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+  0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+  0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+  0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+  0xc0, 0xa0, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x56, 0x21, 0x44, 0x00, 0x00,
+  0x74, 0x40, 0x02, 0x4a, 0x00, 0x00,
+  0x00, 0x9c, 0x16, 0x84, 0x00, 0x00,
+  0x01, 0x2d, 0xb0, 0x40, 0x00, 0x00,
+  0x44, 0x93, 0x05, 0x18, 0x00, 0x00,
+  0x88, 0x50, 0x48, 0x94, 0x00, 0x00,
+  0x20, 0xa4, 0x70, 0x30, 0x00, 0x00,
+  0xaa, 0x04, 0x54, 0x4a, 0x00, 0x00,
+  0x02, 0x63, 0x09, 0x24, 0x00, 0x00,
+  0x32, 0x23, 0x73, 0x8e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_23[138] = {
+  0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+  0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+  0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+  0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+  0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+  0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+  0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+  0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+  0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+  0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+  0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+  0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+  0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+  0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+  0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+  0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+  0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+  0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+  0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+  0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+  0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+  0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+  0x72, 0x15, 0xd1, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_24[144] = {
+  0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+  0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+  0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+  0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+  0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+  0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+  0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+  0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+  0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+  0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+  0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+  0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+  0x62, 0x22, 0xaa, 0xaa, 0x00, 0x00,
+  0xf1, 0x10, 0x54, 0x44, 0x00, 0x00,
+  0x10, 0x0e, 0x62, 0x22, 0x00, 0x00,
+  0x10, 0xb1, 0x06, 0x84, 0x00, 0x00,
+  0x24, 0x24, 0x0d, 0x30, 0x00, 0x00,
+  0x01, 0x12, 0x81, 0xc2, 0x00, 0x00,
+  0x00, 0xc4, 0x58, 0x88, 0x00, 0x00,
+  0x04, 0xa3, 0xb0, 0x50, 0x00, 0x00,
+  0x02, 0x59, 0x25, 0x02, 0x00, 0x00,
+  0x2b, 0x01, 0x08, 0x64, 0x00, 0x00,
+  0x98, 0x40, 0xd0, 0x18, 0x00, 0x00,
+  0xf0, 0xdf, 0x91, 0xb6, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_25[150] = {
+  0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+  0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+  0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+  0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+  0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+  0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+  0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+  0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+  0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+  0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+  0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+  0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+  0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+  0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+  0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+  0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+  0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+  0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+  0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+  0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+  0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+  0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+  0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+  0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+  0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_26[156] = {
+  0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+  0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+  0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+  0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+  0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+  0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+  0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+  0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+  0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+  0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+  0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+  0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+  0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+  0x88, 0x91, 0x08, 0x62, 0x00, 0x00,
+  0x40, 0x54, 0x31, 0x44, 0x00, 0x00,
+  0x82, 0x62, 0x9c, 0x02, 0x00, 0x00,
+  0x21, 0xa4, 0x89, 0x90, 0x00, 0x00,
+  0x10, 0x64, 0x1d, 0x20, 0x00, 0x00,
+  0x44, 0x0a, 0x41, 0x98, 0x00, 0x00,
+  0x10, 0xc9, 0x26, 0x80, 0x00, 0x00,
+  0x4d, 0x2a, 0x5a, 0x20, 0x00, 0x00,
+  0x38, 0x02, 0x62, 0x88, 0x00, 0x00,
+  0x17, 0x49, 0x80, 0x46, 0x00, 0x00,
+  0x90, 0x84, 0x22, 0x4a, 0x00, 0x00,
+  0x72, 0x15, 0xd1, 0x00, 0x00, 0x00,
+  0xc5, 0x75, 0x48, 0xba, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_27[162] = {
+  0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+  0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+  0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+  0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+  0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+  0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+  0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+  0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+  0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+  0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+  0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+  0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+  0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+  0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+  0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+  0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+  0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+  0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+  0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+  0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+  0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+  0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+  0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+  0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+  0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+  0x28, 0x89, 0x05, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_28[168] = {
+  0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+  0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+  0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+  0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+  0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+  0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+  0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+  0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+  0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+  0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+  0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+  0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+  0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+  0x62, 0xa2, 0x8a, 0x2a, 0x00, 0x00,
+  0x34, 0x44, 0x44, 0x44, 0x00, 0x00,
+  0x40, 0x4b, 0x2c, 0x18, 0x00, 0x00,
+  0xc4, 0x04, 0x18, 0xa0, 0x00, 0x00,
+  0x08, 0x60, 0xc4, 0x08, 0x00, 0x00,
+  0x94, 0x12, 0x92, 0x0c, 0x00, 0x00,
+  0x88, 0xc0, 0x23, 0x04, 0x00, 0x00,
+  0x21, 0x32, 0x24, 0x70, 0x00, 0x00,
+  0xc1, 0x40, 0x80, 0xe2, 0x00, 0x00,
+  0x10, 0x69, 0x51, 0x14, 0x00, 0x00,
+  0x06, 0x90, 0x11, 0x42, 0x00, 0x00,
+  0x59, 0x01, 0x41, 0x80, 0x00, 0x00,
+  0x0a, 0x0d, 0x8a, 0x20, 0x00, 0x00,
+  0xbc, 0x0d, 0xca, 0x28, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_29[174] = {
+  0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+  0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+  0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+  0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+  0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+  0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+  0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+  0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+  0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+  0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+  0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+  0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+  0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+  0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+  0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+  0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+  0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+  0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+  0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+  0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+  0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+  0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+  0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+  0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+  0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+  0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+  0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+  0x10, 0x98, 0x30, 0x44, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_3[18] = {
+  0xac, 0x93, 0x5a, 0x5a, 0x00, 0x00,
+  0x55, 0x4a, 0xec, 0x6c, 0x00, 0x00,
+  0x43, 0x36, 0x4d, 0xb6, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_30[180] = {
+  0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+  0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+  0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+  0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+  0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+  0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+  0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+  0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+  0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+  0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+  0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+  0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+  0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+  0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+  0x10, 0x98, 0x30, 0x44, 0x00, 0x00,
+  0x40, 0x82, 0x8a, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x44, 0x14, 0x00, 0x00,
+  0x88, 0x13, 0x09, 0xa0, 0x00, 0x00,
+  0xc0, 0x10, 0x19, 0x14, 0x00, 0x00,
+  0x80, 0xa0, 0x30, 0x0c, 0x00, 0x00,
+  0x01, 0x22, 0x60, 0x06, 0x00, 0x00,
+  0x40, 0x2c, 0xc2, 0x10, 0x00, 0x00,
+  0x22, 0x02, 0x80, 0x22, 0x00, 0x00,
+  0x90, 0x04, 0x20, 0x58, 0x00, 0x00,
+  0x12, 0x40, 0x12, 0xc0, 0x00, 0x00,
+  0x5d, 0x00, 0x01, 0x28, 0x00, 0x00,
+  0x20, 0x54, 0xa4, 0x80, 0x00, 0x00,
+  0x86, 0x09, 0x48, 0x48, 0x00, 0x00,
+  0x28, 0x89, 0x05, 0x10, 0x00, 0x00,
+  0xe1, 0x4f, 0xe0, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_31[186] = {
+  0x62, 0x22, 0xaa, 0x22, 0x00, 0x00,
+  0x31, 0x10, 0x44, 0x44, 0x00, 0x00,
+  0x58, 0x00, 0x22, 0x22, 0x00, 0x00,
+  0x01, 0x13, 0x00, 0x8a, 0x00, 0x00,
+  0x88, 0x20, 0x40, 0x34, 0x00, 0x00,
+  0x44, 0x02, 0x10, 0xd0, 0x00, 0x00,
+  0x29, 0x04, 0x45, 0x08, 0x00, 0x00,
+  0x82, 0xa0, 0x90, 0x12, 0x00, 0x00,
+  0x0a, 0x1a, 0x0e, 0x02, 0x00, 0x00,
+  0x11, 0xe1, 0x28, 0x40, 0x00, 0x00,
+  0x84, 0x05, 0x04, 0x0c, 0x00, 0x00,
+  0x86, 0x40, 0xc0, 0x90, 0x00, 0x00,
+  0x00, 0x87, 0x13, 0x00, 0x00, 0x00,
+  0x44, 0x48, 0x01, 0x1c, 0x00, 0x00,
+  0x10, 0x98, 0x30, 0x44, 0x00, 0x00,
+  0x62, 0x23, 0x48, 0x20, 0x00, 0x00,
+  0x31, 0x10, 0x02, 0x54, 0x00, 0x00,
+  0x58, 0x00, 0x0c, 0x84, 0x00, 0x00,
+  0x01, 0x12, 0x10, 0xd0, 0x00, 0x00,
+  0x88, 0x21, 0x03, 0x20, 0x00, 0x00,
+  0x44, 0x02, 0x01, 0xe0, 0x00, 0x00,
+  0x29, 0x04, 0xa0, 0x0a, 0x00, 0x00,
+  0x82, 0xa0, 0x40, 0xa2, 0x00, 0x00,
+  0x0a, 0x1a, 0x86, 0x10, 0x00, 0x00,
+  0x11, 0xe0, 0xd1, 0x00, 0x00, 0x00,
+  0x84, 0x05, 0x00, 0x16, 0x00, 0x00,
+  0x86, 0x40, 0x20, 0x98, 0x00, 0x00,
+  0x00, 0x86, 0x24, 0x60, 0x00, 0x00,
+  0x44, 0x48, 0x81, 0x0a, 0x00, 0x00,
+  0x10, 0x98, 0x1c, 0x08, 0x00, 0x00,
+  0x87, 0x74, 0x30, 0x24, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_4[24] = {
+  0x25, 0xaa, 0xaa, 0xaa, 0x00, 0x00,
+  0x95, 0x55, 0x55, 0x54, 0x00, 0x00,
+  0x1a, 0x6a, 0x6a, 0x6a, 0x00, 0x00,
+  0x43, 0xd5, 0x95, 0x94, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_5[30] = {
+  0x64, 0xa2, 0xaa, 0xaa, 0x00, 0x00,
+  0x25, 0x54, 0x54, 0x54, 0x00, 0x00,
+  0x49, 0x68, 0x48, 0x4a, 0x00, 0x00,
+  0x53, 0x91, 0x09, 0x90, 0x00, 0x00,
+  0x8e, 0x30, 0x21, 0x6c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_6[36] = {
+  0x62, 0x8a, 0xa2, 0xa2, 0x00, 0x00,
+  0x15, 0x54, 0x14, 0x54, 0x00, 0x00,
+  0x4c, 0x47, 0x44, 0x2a, 0x00, 0x00,
+  0x52, 0x95, 0x08, 0x94, 0x00, 0x00,
+  0x23, 0x64, 0x61, 0x24, 0x00, 0x00,
+  0x8a, 0x58, 0x09, 0x58, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_7[42] = {
+  0x62, 0xa2, 0x8a, 0xa2, 0x00, 0x00,
+  0xb1, 0x14, 0x44, 0x54, 0x00, 0x00,
+  0x18, 0x6b, 0x22, 0x22, 0x00, 0x00,
+  0x44, 0xd4, 0x5c, 0x10, 0x00, 0x00,
+  0x13, 0x64, 0x90, 0x68, 0x00, 0x00,
+  0x49, 0x1b, 0x20, 0x52, 0x00, 0x00,
+  0x86, 0x8c, 0x13, 0x0c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_8[48] = {
+  0x90, 0x22, 0x40, 0xa8, 0x00, 0x00,
+  0x09, 0x50, 0x31, 0x10, 0x00, 0x00,
+  0x00, 0x6b, 0x08, 0x0e, 0x00, 0x00,
+  0x20, 0x34, 0xc0, 0x90, 0x00, 0x00,
+  0x14, 0x44, 0x25, 0x04, 0x00, 0x00,
+  0xc2, 0x11, 0x02, 0x82, 0x00, 0x00,
+  0x00, 0xc6, 0x80, 0xc4, 0x00, 0x00,
+  0x65, 0x80, 0x2c, 0x60, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom31_9[54] = {
+  0x62, 0x22, 0xaa, 0xa2, 0x00, 0x00,
+  0x24, 0x44, 0x44, 0x54, 0x00, 0x00,
+  0xc0, 0x50, 0x0b, 0x0a, 0x00, 0x00,
+  0x03, 0x0c, 0x12, 0x94, 0x00, 0x00,
+  0x16, 0x29, 0x08, 0x64, 0x00, 0x00,
+  0x89, 0x01, 0x80, 0x1a, 0x00, 0x00,
+  0x82, 0x90, 0x41, 0x4c, 0x00, 0x00,
+  0x08, 0xa4, 0x34, 0x12, 0x00, 0x00,
+  0x90, 0x48, 0x88, 0xc8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_10[60] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+  0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+  0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+  0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+  0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+  0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+  0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+  0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+  0x84, 0x92, 0x84, 0x92, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_11[66] = {
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+  0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+  0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+  0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+  0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+  0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+  0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+  0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_12[72] = {
+  0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+  0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+  0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+  0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+  0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+  0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+  0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+  0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+  0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+  0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+  0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+  0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_13[78] = {
+  0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+  0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+  0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+  0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+  0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+  0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+  0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+  0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+  0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+  0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+  0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_14[84] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+  0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+  0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+  0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+  0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+  0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+  0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+  0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+  0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+  0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+  0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+  0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+  0x82, 0x88, 0x82, 0x88, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_15[90] = {
+  0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+  0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+  0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+  0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+  0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+  0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+  0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+  0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+  0x18, 0x22, 0x18, 0x22, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_16[96] = {
+  0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+  0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+  0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+  0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+  0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+  0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+  0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+  0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+  0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+  0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+  0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+  0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+  0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+  0x18, 0x12, 0x18, 0x12, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_17[102] = {
+  0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+  0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+  0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+  0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+  0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+  0x16, 0x30, 0x16, 0x30, 0x00, 0x00,
+  0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+  0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+  0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+  0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+  0x44, 0x64, 0x44, 0x64, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_18[108] = {
+  0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+  0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+  0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+  0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+  0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+  0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+  0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+  0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+  0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+  0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+  0x16, 0x30, 0x16, 0x30, 0x00, 0x00,
+  0x1e, 0xb2, 0xd8, 0x53, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_19[114] = {
+  0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+  0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+  0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+  0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+  0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+  0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+  0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+  0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+  0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+  0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+  0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+  0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+  0x84, 0x92, 0x84, 0x92, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_2[12] = {
+  0xae, 0xae, 0xae, 0xae, 0x00, 0x00,
+  0x79, 0x79, 0x79, 0x79, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_20[120] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+  0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+  0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+  0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+  0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+  0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+  0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+  0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+  0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+  0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+  0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+  0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+  0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+  0x44, 0x64, 0x44, 0x64, 0x00, 0x00,
+  0x96, 0xd3, 0xf6, 0xac, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_21[126] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+  0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+  0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+  0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+  0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+  0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+  0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+  0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+  0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+  0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+  0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+  0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+  0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+  0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+  0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+  0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_22[132] = {
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+  0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+  0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+  0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+  0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+  0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+  0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+  0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x10, 0xa2, 0x10, 0xa2, 0x00, 0x00,
+  0x01, 0x25, 0x01, 0x25, 0x00, 0x00,
+  0x0b, 0x42, 0x0b, 0x42, 0x00, 0x00,
+  0xd8, 0x20, 0xd8, 0x20, 0x00, 0x00,
+  0x82, 0x8c, 0x82, 0x8c, 0x00, 0x00,
+  0x24, 0x4a, 0x24, 0x4a, 0x00, 0x00,
+  0x38, 0x18, 0x38, 0x18, 0x00, 0x00,
+  0x2a, 0x25, 0x2a, 0x25, 0x00, 0x00,
+  0x84, 0x92, 0x84, 0x92, 0x00, 0x00,
+  0xeb, 0xb2, 0x22, 0x89, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_23[138] = {
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+  0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+  0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+  0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+  0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+  0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+  0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+  0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+  0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+  0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+  0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+  0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+  0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+  0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+  0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+  0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+  0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+  0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+  0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+  0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_24[144] = {
+  0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+  0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+  0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+  0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+  0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+  0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+  0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+  0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+  0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+  0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+  0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+  0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x22, 0x2a, 0x22, 0x00, 0x00,
+  0x31, 0x11, 0x31, 0x11, 0x00, 0x00,
+  0x83, 0x42, 0x83, 0x42, 0x00, 0x00,
+  0x06, 0x98, 0x06, 0x98, 0x00, 0x00,
+  0x40, 0xe1, 0x40, 0xe1, 0x00, 0x00,
+  0x2c, 0x44, 0x2c, 0x44, 0x00, 0x00,
+  0xd8, 0x28, 0xd8, 0x28, 0x00, 0x00,
+  0x92, 0x81, 0x92, 0x81, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0x68, 0x0c, 0x68, 0x0c, 0x00, 0x00,
+  0xf3, 0x5a, 0x2f, 0x5d, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_25[150] = {
+  0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+  0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+  0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+  0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+  0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+  0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+  0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+  0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+  0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+  0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+  0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+  0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+  0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+  0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+  0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+  0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+  0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+  0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+  0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+  0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+  0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+  0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+  0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_26[156] = {
+  0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+  0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+  0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+  0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+  0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+  0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+  0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+  0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+  0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+  0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+  0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+  0x84, 0x31, 0x84, 0x31, 0x00, 0x00,
+  0x18, 0xa2, 0x18, 0xa2, 0x00, 0x00,
+  0x4e, 0x01, 0x4e, 0x01, 0x00, 0x00,
+  0x44, 0xc8, 0x44, 0xc8, 0x00, 0x00,
+  0x0e, 0x90, 0x0e, 0x90, 0x00, 0x00,
+  0x20, 0xcc, 0x20, 0xcc, 0x00, 0x00,
+  0x93, 0x40, 0x93, 0x40, 0x00, 0x00,
+  0x2d, 0x10, 0x2d, 0x10, 0x00, 0x00,
+  0x31, 0x44, 0x31, 0x44, 0x00, 0x00,
+  0xc0, 0x23, 0xc0, 0x23, 0x00, 0x00,
+  0x11, 0x25, 0x11, 0x25, 0x00, 0x00,
+  0xe8, 0x80, 0xe8, 0x80, 0x00, 0x00,
+  0x52, 0x15, 0x62, 0x0a, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_27[162] = {
+  0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+  0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+  0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+  0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+  0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+  0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+  0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+  0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+  0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+  0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+  0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+  0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+  0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+  0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+  0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+  0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+  0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+  0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+  0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+  0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+  0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+  0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+  0x82, 0x88, 0x82, 0x88, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_28[168] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+  0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+  0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+  0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+  0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+  0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+  0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+  0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+  0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+  0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+  0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+  0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+  0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+  0x45, 0x15, 0x45, 0x15, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x96, 0x0c, 0x96, 0x0c, 0x00, 0x00,
+  0x0c, 0x50, 0x0c, 0x50, 0x00, 0x00,
+  0x62, 0x04, 0x62, 0x04, 0x00, 0x00,
+  0x49, 0x06, 0x49, 0x06, 0x00, 0x00,
+  0x11, 0x82, 0x11, 0x82, 0x00, 0x00,
+  0x12, 0x38, 0x12, 0x38, 0x00, 0x00,
+  0x40, 0x71, 0x40, 0x71, 0x00, 0x00,
+  0xa8, 0x8a, 0xa8, 0x8a, 0x00, 0x00,
+  0x08, 0xa1, 0x08, 0xa1, 0x00, 0x00,
+  0xa0, 0xc0, 0xa0, 0xc0, 0x00, 0x00,
+  0xc5, 0x10, 0xc5, 0x10, 0x00, 0x00,
+  0x7f, 0xe2, 0xbc, 0x01, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_29[174] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+  0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+  0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+  0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+  0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+  0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+  0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+  0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+  0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+  0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+  0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+  0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+  0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+  0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+  0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+  0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+  0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+  0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+  0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+  0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+  0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+  0x18, 0x22, 0x18, 0x22, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_3[18] = {
+  0xad, 0x2d, 0xad, 0x2d, 0x00, 0x00,
+  0x76, 0x36, 0x76, 0x36, 0x00, 0x00,
+  0x26, 0xdb, 0x26, 0xdb, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_30[180] = {
+  0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+  0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+  0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+  0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+  0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+  0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+  0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+  0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+  0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x0a, 0x22, 0x0a, 0x00, 0x00,
+  0x84, 0xd0, 0x84, 0xd0, 0x00, 0x00,
+  0x0c, 0x8a, 0x0c, 0x8a, 0x00, 0x00,
+  0x18, 0x06, 0x18, 0x06, 0x00, 0x00,
+  0x30, 0x03, 0x30, 0x03, 0x00, 0x00,
+  0x61, 0x08, 0x61, 0x08, 0x00, 0x00,
+  0x40, 0x11, 0x40, 0x11, 0x00, 0x00,
+  0x10, 0x2c, 0x10, 0x2c, 0x00, 0x00,
+  0x09, 0x60, 0x09, 0x60, 0x00, 0x00,
+  0x00, 0x94, 0x00, 0x94, 0x00, 0x00,
+  0x52, 0x40, 0x52, 0x40, 0x00, 0x00,
+  0xa4, 0x24, 0xa4, 0x24, 0x00, 0x00,
+  0x82, 0x88, 0x82, 0x88, 0x00, 0x00,
+  0x1e, 0x27, 0xe2, 0xd8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_31[186] = {
+  0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+  0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+  0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+  0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+  0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+  0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+  0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+  0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+  0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+  0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+  0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+  0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+  0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+  0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+  0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+  0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+  0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+  0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+  0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+  0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+  0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+  0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+  0x18, 0x12, 0x18, 0x12, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_32[192] = {
+  0xa4, 0x10, 0xa4, 0x10, 0x00, 0x00,
+  0x01, 0x2a, 0x01, 0x2a, 0x00, 0x00,
+  0x06, 0x42, 0x06, 0x42, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x81, 0x90, 0x81, 0x90, 0x00, 0x00,
+  0x00, 0xf0, 0x00, 0xf0, 0x00, 0x00,
+  0x50, 0x05, 0x50, 0x05, 0x00, 0x00,
+  0x20, 0x51, 0x20, 0x51, 0x00, 0x00,
+  0x43, 0x08, 0x43, 0x08, 0x00, 0x00,
+  0x68, 0x80, 0x68, 0x80, 0x00, 0x00,
+  0x80, 0x0b, 0x80, 0x0b, 0x00, 0x00,
+  0x10, 0x4c, 0x10, 0x4c, 0x00, 0x00,
+  0x12, 0x30, 0x12, 0x30, 0x00, 0x00,
+  0x40, 0x85, 0x40, 0x85, 0x00, 0x00,
+  0x0e, 0x04, 0x0e, 0x04, 0x00, 0x00,
+  0x18, 0x12, 0x18, 0x12, 0x00, 0x00,
+  0x55, 0x11, 0x55, 0x11, 0x00, 0x00,
+  0x22, 0x22, 0x22, 0x22, 0x00, 0x00,
+  0x11, 0x11, 0x11, 0x11, 0x00, 0x00,
+  0x80, 0x45, 0x80, 0x45, 0x00, 0x00,
+  0x20, 0x1a, 0x20, 0x1a, 0x00, 0x00,
+  0x08, 0x68, 0x08, 0x68, 0x00, 0x00,
+  0x22, 0x84, 0x22, 0x84, 0x00, 0x00,
+  0x48, 0x09, 0x48, 0x09, 0x00, 0x00,
+  0x07, 0x01, 0x07, 0x01, 0x00, 0x00,
+  0x94, 0x20, 0x94, 0x20, 0x00, 0x00,
+  0x82, 0x06, 0x82, 0x06, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x89, 0x80, 0x89, 0x80, 0x00, 0x00,
+  0x00, 0x8e, 0x00, 0x8e, 0x00, 0x00,
+  0x18, 0x22, 0x18, 0x22, 0x00, 0x00,
+  0x60, 0xc4, 0x02, 0x02, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_4[24] = {
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0xaa, 0xaa, 0xaa, 0xaa, 0x00, 0x00,
+  0x35, 0x35, 0x35, 0x35, 0x00, 0x00,
+  0xca, 0xca, 0xca, 0xca, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_5[30] = {
+  0x55, 0x55, 0x55, 0x55, 0x00, 0x00,
+  0x2a, 0x2a, 0x2a, 0x2a, 0x00, 0x00,
+  0x24, 0x25, 0x24, 0x25, 0x00, 0x00,
+  0x84, 0xc8, 0x84, 0xc8, 0x00, 0x00,
+  0x10, 0xb6, 0x10, 0xb6, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_6[36] = {
+  0x51, 0x51, 0x51, 0x51, 0x00, 0x00,
+  0x0a, 0x2a, 0x0a, 0x2a, 0x00, 0x00,
+  0xa2, 0x15, 0xa2, 0x15, 0x00, 0x00,
+  0x84, 0x4a, 0x84, 0x4a, 0x00, 0x00,
+  0x30, 0x92, 0x30, 0x92, 0x00, 0x00,
+  0x04, 0xac, 0x04, 0xac, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_7[42] = {
+  0x45, 0x51, 0x45, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x91, 0x11, 0x91, 0x11, 0x00, 0x00,
+  0x2e, 0x08, 0x2e, 0x08, 0x00, 0x00,
+  0x48, 0x34, 0x48, 0x34, 0x00, 0x00,
+  0x90, 0x29, 0x90, 0x29, 0x00, 0x00,
+  0x09, 0x86, 0x09, 0x86, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_8[48] = {
+  0x20, 0x54, 0x20, 0x54, 0x00, 0x00,
+  0x18, 0x88, 0x18, 0x88, 0x00, 0x00,
+  0x84, 0x07, 0x84, 0x07, 0x00, 0x00,
+  0x60, 0x48, 0x60, 0x48, 0x00, 0x00,
+  0x12, 0x82, 0x12, 0x82, 0x00, 0x00,
+  0x81, 0x41, 0x81, 0x41, 0x00, 0x00,
+  0x40, 0x62, 0x40, 0x62, 0x00, 0x00,
+  0x16, 0x30, 0x16, 0x30, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom32_9[54] = {
+  0x55, 0x51, 0x55, 0x51, 0x00, 0x00,
+  0x22, 0x2a, 0x22, 0x2a, 0x00, 0x00,
+  0x05, 0x85, 0x05, 0x85, 0x00, 0x00,
+  0x09, 0x4a, 0x09, 0x4a, 0x00, 0x00,
+  0x84, 0x32, 0x84, 0x32, 0x00, 0x00,
+  0xc0, 0x0d, 0xc0, 0x0d, 0x00, 0x00,
+  0x20, 0xa6, 0x20, 0xa6, 0x00, 0x00,
+  0x1a, 0x09, 0x1a, 0x09, 0x00, 0x00,
+  0x44, 0x64, 0x44, 0x64, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_10[60] = {
+  0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+  0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+  0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+  0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+  0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+  0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+  0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+  0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x92, 0xc8, 0x02, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_11[66] = {
+  0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+  0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+  0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+  0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+  0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+  0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+  0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+  0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+  0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_12[72] = {
+  0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+  0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+  0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+  0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+  0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+  0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+  0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+  0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+  0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+  0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+  0xe8, 0x80, 0x51, 0x35, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_13[78] = {
+  0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+  0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+  0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+  0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+  0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+  0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+  0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+  0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+  0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+  0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+  0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+  0xc5, 0x10, 0x83, 0x34, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_14[84] = {
+  0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+  0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+  0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+  0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+  0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+  0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+  0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+  0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+  0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+  0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+  0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+  0x82, 0x88, 0xb0, 0xde, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_15[90] = {
+  0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+  0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+  0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+  0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+  0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+  0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+  0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+  0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+  0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+  0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+  0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+  0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+  0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_16[96] = {
+  0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+  0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+  0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+  0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+  0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+  0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+  0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+  0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+  0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+  0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+  0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+  0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+  0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+  0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+  0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+  0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_17[102] = {
+  0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+  0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+  0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+  0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+  0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+  0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+  0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+  0x16, 0x30, 0x94, 0xb0, 0x00, 0x00,
+  0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+  0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+  0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+  0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+  0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+  0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+  0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+  0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_18[108] = {
+  0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+  0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+  0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+  0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+  0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+  0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+  0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+  0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+  0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+  0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+  0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+  0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+  0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+  0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+  0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+  0x16, 0x30, 0x94, 0xb0, 0x00, 0x00,
+  0x89, 0x53, 0x03, 0xad, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_19[114] = {
+  0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+  0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+  0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+  0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+  0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+  0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+  0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+  0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+  0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+  0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+  0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+  0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+  0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+  0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+  0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+  0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x92, 0xc8, 0x02, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_2[12] = {
+  0xae, 0xae, 0xce, 0xce, 0x00, 0x00,
+  0x79, 0x79, 0xb9, 0x39, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_20[120] = {
+  0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+  0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+  0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+  0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+  0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+  0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+  0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+  0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+  0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+  0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+  0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+  0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+  0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+  0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+  0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+  0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00,
+  0x73, 0x5f, 0x5b, 0x0e, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_21[126] = {
+  0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+  0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+  0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+  0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+  0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+  0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+  0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+  0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+  0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+  0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+  0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+  0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+  0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+  0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+  0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+  0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+  0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_22[132] = {
+  0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+  0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+  0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+  0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+  0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+  0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+  0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+  0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+  0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+  0x45, 0x51, 0x55, 0x8c, 0x80, 0x00,
+  0x10, 0xa2, 0xaa, 0x27, 0x00, 0x00,
+  0x01, 0x25, 0xa5, 0x32, 0x80, 0x00,
+  0x0b, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0xd8, 0x20, 0x3c, 0x5c, 0x00, 0x00,
+  0x82, 0x8c, 0x8e, 0xcc, 0x00, 0x00,
+  0x24, 0x4a, 0x6a, 0x2b, 0x00, 0x00,
+  0x38, 0x18, 0x36, 0x32, 0x80, 0x00,
+  0x2a, 0x25, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x92, 0xc8, 0x02, 0x80, 0x00,
+  0xcc, 0xe3, 0x42, 0x6b, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_23[138] = {
+  0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+  0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+  0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+  0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+  0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+  0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+  0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+  0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+  0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+  0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+  0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+  0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+  0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+  0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+  0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+  0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+  0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+  0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+  0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+  0xe8, 0x80, 0x51, 0x35, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_24[144] = {
+  0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+  0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+  0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+  0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+  0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+  0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+  0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+  0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+  0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+  0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+  0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+  0x55, 0x55, 0x55, 0x8c, 0x80, 0x00,
+  0x2a, 0x22, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x11, 0xa5, 0x32, 0x80, 0x00,
+  0x83, 0x42, 0x62, 0x61, 0x80, 0x00,
+  0x06, 0x98, 0x3c, 0x5c, 0x00, 0x00,
+  0x40, 0xe1, 0x51, 0x84, 0x80, 0x00,
+  0x2c, 0x44, 0xa2, 0x27, 0x00, 0x00,
+  0xd8, 0x28, 0x95, 0x51, 0x80, 0x00,
+  0x92, 0x81, 0x4a, 0x1a, 0x00, 0x00,
+  0x84, 0x32, 0x30, 0x68, 0x00, 0x00,
+  0x68, 0x0c, 0x2c, 0x89, 0x00, 0x00,
+  0xdc, 0x4e, 0xfc, 0x70, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_25[150] = {
+  0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+  0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+  0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+  0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+  0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+  0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+  0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+  0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+  0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+  0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+  0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+  0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+  0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+  0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+  0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+  0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+  0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+  0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+  0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+  0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+  0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+  0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+  0xc5, 0x10, 0x83, 0x34, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_26[156] = {
+  0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+  0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+  0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+  0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+  0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+  0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+  0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+  0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+  0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+  0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+  0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+  0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+  0x84, 0x31, 0x51, 0x84, 0x80, 0x00,
+  0x18, 0xa2, 0xa2, 0x27, 0x00, 0x00,
+  0x4e, 0x01, 0x95, 0x51, 0x80, 0x00,
+  0x44, 0xc8, 0x4a, 0x1a, 0x00, 0x00,
+  0x0e, 0x90, 0x30, 0x68, 0x00, 0x00,
+  0x20, 0xcc, 0x2c, 0x89, 0x00, 0x00,
+  0x93, 0x40, 0x55, 0x8c, 0x80, 0x00,
+  0x2d, 0x10, 0xaa, 0x27, 0x00, 0x00,
+  0x31, 0x44, 0xa5, 0x32, 0x80, 0x00,
+  0xc0, 0x23, 0x62, 0x61, 0x80, 0x00,
+  0x11, 0x25, 0x3c, 0x5c, 0x00, 0x00,
+  0xe8, 0x80, 0x51, 0x35, 0x00, 0x00,
+  0xa4, 0xa4, 0xfc, 0x91, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_27[162] = {
+  0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+  0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+  0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+  0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+  0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+  0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+  0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+  0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+  0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+  0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+  0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+  0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+  0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+  0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+  0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+  0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+  0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+  0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+  0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+  0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+  0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+  0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+  0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+  0x82, 0x88, 0xb0, 0xde, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_28[168] = {
+  0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+  0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+  0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+  0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+  0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+  0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+  0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+  0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+  0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+  0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+  0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+  0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+  0x45, 0x15, 0x51, 0x84, 0x80, 0x00,
+  0x22, 0x22, 0xa2, 0x27, 0x00, 0x00,
+  0x96, 0x0c, 0x95, 0x51, 0x80, 0x00,
+  0x0c, 0x50, 0x4a, 0x1a, 0x00, 0x00,
+  0x62, 0x04, 0x30, 0x68, 0x00, 0x00,
+  0x49, 0x06, 0x2c, 0x89, 0x00, 0x00,
+  0x11, 0x82, 0x15, 0x8c, 0x00, 0x00,
+  0x12, 0x38, 0x8a, 0x47, 0x00, 0x00,
+  0x40, 0x71, 0x25, 0x81, 0x80, 0x00,
+  0xa8, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x08, 0xa1, 0x58, 0x58, 0x00, 0x00,
+  0xa0, 0xc0, 0x0e, 0x28, 0x80, 0x00,
+  0xc5, 0x10, 0x83, 0x34, 0x00, 0x00,
+  0x1b, 0xf4, 0xaa, 0xec, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_29[174] = {
+  0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+  0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+  0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+  0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+  0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+  0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+  0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+  0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+  0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+  0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+  0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+  0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+  0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+  0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+  0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+  0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+  0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+  0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+  0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+  0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+  0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+  0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+  0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+  0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+  0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_3[18] = {
+  0xad, 0x2d, 0xcd, 0xcc, 0x00, 0x00,
+  0x76, 0x36, 0x97, 0x27, 0x00, 0x00,
+  0x26, 0xdb, 0xb8, 0xd1, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_30[180] = {
+  0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+  0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+  0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+  0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+  0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+  0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+  0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+  0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+  0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+  0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+  0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+  0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+  0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+  0x45, 0x51, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x0a, 0x8a, 0x47, 0x00, 0x00,
+  0x84, 0xd0, 0x25, 0x81, 0x80, 0x00,
+  0x0c, 0x8a, 0x62, 0x12, 0x80, 0x00,
+  0x18, 0x06, 0x58, 0x58, 0x00, 0x00,
+  0x30, 0x03, 0x0e, 0x28, 0x80, 0x00,
+  0x61, 0x08, 0x83, 0x34, 0x00, 0x00,
+  0x40, 0x11, 0x51, 0x84, 0x80, 0x00,
+  0x10, 0x2c, 0xa2, 0x27, 0x00, 0x00,
+  0x09, 0x60, 0x95, 0x51, 0x80, 0x00,
+  0x00, 0x94, 0x4a, 0x1a, 0x00, 0x00,
+  0x52, 0x40, 0x30, 0x68, 0x00, 0x00,
+  0xa4, 0x24, 0x2c, 0x89, 0x00, 0x00,
+  0x82, 0x88, 0xb0, 0xde, 0x80, 0x00,
+  0x6d, 0xd2, 0x8c, 0x00, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_31[186] = {
+  0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+  0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+  0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+  0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+  0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+  0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+  0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+  0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+  0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+  0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+  0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+  0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+  0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+  0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+  0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+  0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+  0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+  0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+  0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+  0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+  0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+  0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+  0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+  0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+  0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+  0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+  0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+  0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+  0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_32[192] = {
+  0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+  0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+  0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+  0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+  0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+  0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+  0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+  0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+  0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+  0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+  0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+  0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+  0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+  0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+  0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+  0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00,
+  0x55, 0x11, 0x15, 0x8c, 0x00, 0x00,
+  0x22, 0x22, 0x8a, 0x47, 0x00, 0x00,
+  0x11, 0x11, 0x25, 0x81, 0x80, 0x00,
+  0x80, 0x45, 0x62, 0x12, 0x80, 0x00,
+  0x20, 0x1a, 0x58, 0x58, 0x00, 0x00,
+  0x08, 0x68, 0x0e, 0x28, 0x80, 0x00,
+  0x22, 0x84, 0x83, 0x34, 0x00, 0x00,
+  0x48, 0x09, 0x25, 0x2c, 0x00, 0x00,
+  0x07, 0x01, 0x8a, 0x91, 0x00, 0x00,
+  0x94, 0x20, 0x91, 0xc0, 0x80, 0x00,
+  0x82, 0x06, 0x68, 0x06, 0x80, 0x00,
+  0x60, 0x48, 0x32, 0xc8, 0x00, 0x00,
+  0x89, 0x80, 0x43, 0x45, 0x00, 0x00,
+  0x00, 0x8e, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x22, 0x1c, 0xa2, 0x00, 0x00,
+  0x73, 0x8e, 0x12, 0xca, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_33[198] = {
+  0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+  0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+  0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+  0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+  0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+  0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+  0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+  0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+  0x43, 0x08, 0x15, 0x8c, 0x00, 0x00,
+  0x68, 0x80, 0x8a, 0x47, 0x00, 0x00,
+  0x80, 0x0b, 0x25, 0x81, 0x80, 0x00,
+  0x10, 0x4c, 0x62, 0x12, 0x80, 0x00,
+  0x12, 0x30, 0x58, 0x58, 0x00, 0x00,
+  0x40, 0x85, 0x0e, 0x28, 0x80, 0x00,
+  0x0e, 0x04, 0x83, 0x34, 0x00, 0x00,
+  0x18, 0x12, 0x0a, 0x1c, 0x00, 0x00,
+  0xa4, 0x10, 0x25, 0x2c, 0x00, 0x00,
+  0x01, 0x2a, 0x8a, 0x91, 0x00, 0x00,
+  0x06, 0x42, 0x91, 0xc0, 0x80, 0x00,
+  0x08, 0x68, 0x68, 0x06, 0x80, 0x00,
+  0x81, 0x90, 0x32, 0xc8, 0x00, 0x00,
+  0x00, 0xf0, 0x43, 0x45, 0x00, 0x00,
+  0x50, 0x05, 0xc4, 0x30, 0x80, 0x00,
+  0x20, 0x51, 0x1c, 0xa2, 0x00, 0x00,
+  0x43, 0x08, 0x25, 0x4c, 0x00, 0x00,
+  0x68, 0x80, 0x8a, 0x66, 0x00, 0x00,
+  0x80, 0x0b, 0x91, 0x91, 0x00, 0x00,
+  0x10, 0x4c, 0x68, 0x42, 0x80, 0x00,
+  0x12, 0x30, 0x32, 0xa4, 0x00, 0x00,
+  0x40, 0x85, 0x43, 0x13, 0x00, 0x00,
+  0x0e, 0x04, 0xc4, 0x30, 0x80, 0x00,
+  0x18, 0x12, 0x1c, 0x88, 0x80, 0x00,
+  0xdb, 0x10, 0x3c, 0x09, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_4[24] = {
+  0x55, 0x55, 0xca, 0xec, 0x00, 0x00,
+  0xaa, 0xaa, 0xa9, 0x67, 0x00, 0x00,
+  0x35, 0x35, 0x3a, 0xb1, 0x80, 0x00,
+  0xca, 0xca, 0x55, 0x5a, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom33_5[30] = {
+  0x55, 0x55, 0x55, 0x44, 0x80, 0x00,
+  0x2a, 0x2a, 0x2a, 0x66, 0x00, 0x00,
+  0x24, 0x25, 0x25, 0xa1, 0x80, 0x00,
+  0x84, 0xc8, 0xe2, 0x12, 0x80, 0x00,
+  0x10, 0xb6, 0x99, 0x98, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_6[36] = {
+  0x51, 0x51, 0xd1, 0x4c, 0x00, 0x00,
+  0x0a, 0x2a, 0xa2, 0xc5, 0x00, 0x00,
+  0xa2, 0x15, 0x95, 0x30, 0x80, 0x00,
+  0x84, 0x4a, 0xca, 0x0a, 0x80, 0x00,
+  0x30, 0x92, 0xa4, 0xaa, 0x00, 0x00,
+  0x04, 0xac, 0x78, 0x15, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_7[42] = {
+  0x45, 0x51, 0x15, 0x44, 0x80, 0x00,
+  0x22, 0x2a, 0x8a, 0x23, 0x00, 0x00,
+  0x91, 0x11, 0x85, 0x91, 0x00, 0x00,
+  0x2e, 0x08, 0x32, 0x0a, 0x80, 0x00,
+  0x48, 0x34, 0x58, 0x34, 0x00, 0x00,
+  0x90, 0x29, 0x2c, 0x0d, 0x00, 0x00,
+  0x09, 0x86, 0x43, 0xc8, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_8[48] = {
+  0x20, 0x54, 0x64, 0x16, 0x00, 0x00,
+  0x18, 0x88, 0xa2, 0xc2, 0x00, 0x00,
+  0x84, 0x07, 0x51, 0x60, 0x80, 0x00,
+  0x60, 0x48, 0x4a, 0x85, 0x00, 0x00,
+  0x12, 0x82, 0x38, 0x4c, 0x00, 0x00,
+  0x81, 0x41, 0x89, 0x29, 0x00, 0x00,
+  0x40, 0x62, 0x07, 0x11, 0x80, 0x00,
+  0x16, 0x30, 0x94, 0xb0, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom33_9[54] = {
+  0x55, 0x51, 0x8e, 0xcc, 0x00, 0x00,
+  0x22, 0x2a, 0x6a, 0x2b, 0x00, 0x00,
+  0x05, 0x85, 0x36, 0x32, 0x80, 0x00,
+  0x09, 0x4a, 0xd1, 0x25, 0x80, 0x00,
+  0x84, 0x32, 0x55, 0x8c, 0x80, 0x00,
+  0xc0, 0x0d, 0xaa, 0x27, 0x00, 0x00,
+  0x20, 0xa6, 0xa5, 0x32, 0x80, 0x00,
+  0x1a, 0x09, 0x62, 0x61, 0x80, 0x00,
+  0x44, 0x64, 0x3c, 0x5c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom34_10[60] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_11[66] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_12[72] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x35, 0x28, 0x9a, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_13[78] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_14[84] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_15[90] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_16[96] = {
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_17[102] = {
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+  0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+  0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+  0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+  0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+  0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+  0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_18[108] = {
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x64, 0x16, 0x32, 0x0b, 0x00, 0x00,
+  0xa2, 0xc2, 0x51, 0x61, 0x00, 0x00,
+  0x51, 0x60, 0xa8, 0xb0, 0x40, 0x00,
+  0x4a, 0x85, 0x25, 0x42, 0x80, 0x00,
+  0x38, 0x4c, 0x1c, 0x26, 0x00, 0x00,
+  0x89, 0x29, 0x44, 0x94, 0x80, 0x00,
+  0x07, 0x11, 0x83, 0x88, 0xc0, 0x00,
+  0x94, 0xb0, 0x4a, 0x58, 0x00, 0x00,
+  0x89, 0x70, 0xf3, 0xf7, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_19[114] = {
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_2[12] = {
+  0xce, 0xce, 0x67, 0x67, 0x00, 0x00,
+  0xb9, 0x39, 0xdc, 0x9c, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom34_20[120] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x5d, 0xc5, 0xfe, 0xd8, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_21[126] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_22[132] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0xc8, 0x02, 0xe4, 0x01, 0x40, 0x00,
+  0x2a, 0xf7, 0x4f, 0xf5, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_23[138] = {
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x35, 0x28, 0x9a, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_24[144] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x4c, 0xb8, 0x04, 0x74, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom34_25[150] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_26[156] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00,
+  0x51, 0x35, 0x28, 0x9a, 0x80, 0x00,
+  0x95, 0x20, 0xe9, 0xef, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom34_27[162] = {
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_28[168] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x10, 0x6c, 0xff, 0x60, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_29[174] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_3[18] = {
+  0xcd, 0xcc, 0x66, 0xe6, 0x00, 0x00,
+  0x97, 0x27, 0x4b, 0x93, 0x80, 0x00,
+  0xb8, 0xd1, 0xdc, 0x68, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom34_30[180] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x51, 0x84, 0xa8, 0xc2, 0x40, 0x00,
+  0xa2, 0x27, 0x51, 0x13, 0x80, 0x00,
+  0x95, 0x51, 0xca, 0xa8, 0xc0, 0x00,
+  0x4a, 0x1a, 0x25, 0x0d, 0x00, 0x00,
+  0x30, 0x68, 0x18, 0x34, 0x00, 0x00,
+  0x2c, 0x89, 0x16, 0x44, 0x80, 0x00,
+  0xb0, 0xde, 0xd8, 0x6f, 0x40, 0x00,
+  0x87, 0x93, 0x96, 0xc7, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_31[186] = {
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_32[192] = {
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0xa6, 0x27, 0xa9, 0x4a, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_33[198] = {
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+  0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+  0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+  0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+  0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+  0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+  0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_34[204] = {
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x25, 0x4c, 0x12, 0xa6, 0x00, 0x00,
+  0x8a, 0x66, 0x45, 0x33, 0x00, 0x00,
+  0x91, 0x91, 0x48, 0xc8, 0x80, 0x00,
+  0x68, 0x42, 0xb4, 0x21, 0x40, 0x00,
+  0x32, 0xa4, 0x19, 0x52, 0x00, 0x00,
+  0x43, 0x13, 0x21, 0x89, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0x88, 0x8e, 0x44, 0x40, 0x00,
+  0x3c, 0x09, 0x1e, 0x04, 0x80, 0x00,
+  0x25, 0x2c, 0x12, 0x96, 0x00, 0x00,
+  0x8a, 0x91, 0x45, 0x48, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xe0, 0x40, 0x00,
+  0x68, 0x06, 0xb4, 0x03, 0x40, 0x00,
+  0x32, 0xc8, 0x19, 0x64, 0x00, 0x00,
+  0x43, 0x45, 0x21, 0xa2, 0x80, 0x00,
+  0xc4, 0x30, 0xe2, 0x18, 0x40, 0x00,
+  0x1c, 0xa2, 0x0e, 0x51, 0x00, 0x00,
+  0x15, 0x8c, 0x0a, 0xc6, 0x00, 0x00,
+  0x8a, 0x47, 0x45, 0x23, 0x80, 0x00,
+  0x25, 0x81, 0x92, 0xc0, 0xc0, 0x00,
+  0x62, 0x12, 0xb1, 0x09, 0x40, 0x00,
+  0x58, 0x58, 0x2c, 0x2c, 0x00, 0x00,
+  0x0e, 0x28, 0x87, 0x14, 0x40, 0x00,
+  0x83, 0x34, 0x41, 0x9a, 0x00, 0x00,
+  0x0a, 0x1c, 0x05, 0x0e, 0x00, 0x00,
+  0x30, 0x3c, 0xb3, 0xe6, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_4[24] = {
+  0xca, 0xec, 0x65, 0x76, 0x00, 0x00,
+  0xa9, 0x67, 0x54, 0xb3, 0x80, 0x00,
+  0x3a, 0xb1, 0x9d, 0x58, 0xc0, 0x00,
+  0x55, 0x5a, 0xaa, 0xad, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom34_5[30] = {
+  0x55, 0x44, 0xaa, 0xa2, 0x40, 0x00,
+  0x2a, 0x66, 0x15, 0x33, 0x00, 0x00,
+  0x25, 0xa1, 0x92, 0xd0, 0xc0, 0x00,
+  0xe2, 0x12, 0xf1, 0x09, 0x40, 0x00,
+  0x99, 0x98, 0x4c, 0xcc, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_6[36] = {
+  0xd1, 0x4c, 0x68, 0xa6, 0x00, 0x00,
+  0xa2, 0xc5, 0x51, 0x62, 0x80, 0x00,
+  0x95, 0x30, 0xca, 0x98, 0x40, 0x00,
+  0xca, 0x0a, 0xe5, 0x05, 0x40, 0x00,
+  0xa4, 0xaa, 0x52, 0x55, 0x00, 0x00,
+  0x78, 0x15, 0x3c, 0x0a, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom34_7[42] = {
+  0x15, 0x44, 0x8a, 0xa2, 0x40, 0x00,
+  0x8a, 0x23, 0x45, 0x11, 0x80, 0x00,
+  0x85, 0x91, 0x42, 0xc8, 0x80, 0x00,
+  0x32, 0x0a, 0x99, 0x05, 0x40, 0x00,
+  0x58, 0x34, 0x2c, 0x1a, 0x00, 0x00,
+  0x2c, 0x0d, 0x16, 0x06, 0x80, 0x00,
+  0x43, 0xc8, 0x21, 0xe4, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_8[48] = {
+  0x64, 0x16, 0x32, 0x0b, 0x00, 0x00,
+  0xa2, 0xc2, 0x51, 0x61, 0x00, 0x00,
+  0x51, 0x60, 0xa8, 0xb0, 0x40, 0x00,
+  0x4a, 0x85, 0x25, 0x42, 0x80, 0x00,
+  0x38, 0x4c, 0x1c, 0x26, 0x00, 0x00,
+  0x89, 0x29, 0x44, 0x94, 0x80, 0x00,
+  0x07, 0x11, 0x83, 0x88, 0xc0, 0x00,
+  0x94, 0xb0, 0x4a, 0x58, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom34_9[54] = {
+  0x8e, 0xcc, 0x47, 0x66, 0x00, 0x00,
+  0x6a, 0x2b, 0x35, 0x15, 0x80, 0x00,
+  0x36, 0x32, 0x9b, 0x19, 0x40, 0x00,
+  0xd1, 0x25, 0xe8, 0x92, 0xc0, 0x00,
+  0x55, 0x8c, 0xaa, 0xc6, 0x40, 0x00,
+  0xaa, 0x27, 0x55, 0x13, 0x80, 0x00,
+  0xa5, 0x32, 0xd2, 0x99, 0x40, 0x00,
+  0x62, 0x61, 0xb1, 0x30, 0xc0, 0x00,
+  0x3c, 0x5c, 0x1e, 0x2e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom35_10[60] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+  0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+  0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom35_11[66] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_12[72] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x35, 0x2d, 0x86, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom35_13[78] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_14[84] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom35_15[90] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom35_16[96] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_17[102] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+  0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+  0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+  0x3c, 0x09, 0x04, 0x82, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_18[108] = {
+  0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+  0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+  0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+  0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+  0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+  0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+  0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+  0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+  0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+  0x64, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0xa2, 0xc2, 0x61, 0x30, 0x80, 0x00,
+  0x51, 0x60, 0xb0, 0x58, 0x20, 0x00,
+  0x4a, 0x85, 0x42, 0xa1, 0x40, 0x00,
+  0x38, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x89, 0x29, 0x14, 0x8a, 0x40, 0x00,
+  0x07, 0x11, 0x88, 0xc4, 0x60, 0x00,
+  0x94, 0xb0, 0x58, 0x2c, 0x00, 0x00,
+  0x40, 0xc9, 0x65, 0xbe, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom35_19[114] = {
+  0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+  0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+  0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+  0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+  0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+  0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+  0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+  0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+  0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+  0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+  0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom35_2[12] = {
+  0xce, 0xce, 0x67, 0x33, 0x80, 0x00,
+  0xb9, 0x39, 0x9c, 0xce, 0x60, 0x00
+};
+
+const uint8_t kMaskRandom35_20[120] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+  0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+  0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+  0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+  0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+  0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+  0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+  0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+  0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+  0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+  0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+  0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00,
+  0x63, 0x36, 0x5c, 0xd3, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom35_21[126] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+  0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+  0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_22[132] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x8e, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x6a, 0x2b, 0x15, 0x8a, 0xc0, 0x00,
+  0x36, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0xd1, 0x25, 0x92, 0xc9, 0x60, 0x00,
+  0xc8, 0x02, 0xfe, 0xce, 0xe0, 0x00,
+  0x84, 0xc7, 0xbc, 0xcc, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_23[138] = {
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x35, 0x2d, 0x86, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom35_24[144] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x0d, 0xfb, 0x06, 0x89, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_25[150] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_26[156] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x55, 0x8c, 0xc6, 0x63, 0x20, 0x00,
+  0xaa, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0xa5, 0x32, 0x99, 0x4c, 0xa0, 0x00,
+  0x62, 0x61, 0xb0, 0xd8, 0x60, 0x00,
+  0x3c, 0x5c, 0x2e, 0x17, 0x00, 0x00,
+  0x51, 0x35, 0x2d, 0x86, 0x20, 0x00,
+  0xc4, 0x57, 0x70, 0x47, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_27[162] = {
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom35_28[168] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x34, 0x4a, 0x80, 0x94, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_29[174] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom35_3[18] = {
+  0xcd, 0xcc, 0x66, 0x33, 0x00, 0x00,
+  0x97, 0x27, 0x13, 0x8a, 0xc0, 0x00,
+  0xb8, 0xd1, 0xc9, 0x64, 0xa0, 0x00
+};
+
+const uint8_t kMaskRandom35_30[180] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x51, 0x84, 0xc2, 0x61, 0x20, 0x00,
+  0xa2, 0x27, 0x13, 0x89, 0xc0, 0x00,
+  0x95, 0x51, 0xa8, 0xd4, 0x60, 0x00,
+  0x4a, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x30, 0x68, 0x34, 0x1a, 0x00, 0x00,
+  0x2c, 0x89, 0x44, 0xa2, 0x40, 0x00,
+  0xb0, 0xde, 0xbf, 0xa7, 0xe0, 0x00,
+  0x32, 0x1b, 0x9f, 0x09, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom35_31[186] = {
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_32[192] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0xeb, 0x31, 0x7b, 0x80, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_33[198] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+  0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+  0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+  0x3c, 0x09, 0x04, 0x82, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_34[204] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+  0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+  0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+  0x3c, 0x09, 0x04, 0x82, 0x40, 0x00,
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x15, 0x8c, 0x46, 0x23, 0x00, 0x00,
+  0x8a, 0x47, 0x23, 0x91, 0xc0, 0x00,
+  0x25, 0x81, 0xc0, 0xe0, 0x60, 0x00,
+  0x62, 0x12, 0x89, 0x44, 0xa0, 0x00,
+  0x58, 0x58, 0x2c, 0x16, 0x00, 0x00,
+  0x0e, 0x28, 0x94, 0x4a, 0x20, 0x00,
+  0x83, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x0a, 0x1c, 0x77, 0xf9, 0x00, 0x00,
+  0x70, 0x07, 0xcd, 0x8c, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom35_35[210] = {
+  0x25, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x8a, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x91, 0xc0, 0xe0, 0x70, 0x20, 0x00,
+  0x68, 0x06, 0x83, 0x41, 0xa0, 0x00,
+  0x32, 0xc8, 0x64, 0x32, 0x00, 0x00,
+  0x43, 0x45, 0x22, 0x91, 0x40, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x51, 0x28, 0x80, 0x00,
+  0x25, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x8a, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x68, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+  0x32, 0xa4, 0x52, 0x29, 0x00, 0x00,
+  0x43, 0x13, 0x09, 0x84, 0xc0, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0x88, 0xc4, 0x62, 0x20, 0x00,
+  0x3c, 0x09, 0x04, 0x82, 0x40, 0x00,
+  0x25, 0x2c, 0x26, 0x13, 0x00, 0x00,
+  0x8a, 0x91, 0x33, 0x19, 0x80, 0x00,
+  0x91, 0xc0, 0xc8, 0xa4, 0x40, 0x00,
+  0x68, 0x06, 0xa1, 0x50, 0xa0, 0x00,
+  0x32, 0xc8, 0x52, 0x29, 0x00, 0x00,
+  0x43, 0x45, 0x09, 0x84, 0xc0, 0x00,
+  0xc4, 0x30, 0x98, 0x4c, 0x20, 0x00,
+  0x1c, 0xa2, 0x44, 0x62, 0x20, 0x00,
+  0x25, 0x4c, 0x04, 0x82, 0x40, 0x00,
+  0x8a, 0x66, 0x16, 0x0b, 0x00, 0x00,
+  0x91, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x68, 0x42, 0xe0, 0x70, 0x20, 0x00,
+  0x32, 0xa4, 0x03, 0x41, 0xa0, 0x00,
+  0x43, 0x13, 0x64, 0x32, 0x00, 0x00,
+  0xc4, 0x30, 0xa2, 0x91, 0x40, 0x00,
+  0x1c, 0x88, 0x98, 0x4c, 0x20, 0x00,
+  0x3c, 0x09, 0x51, 0x28, 0x80, 0x00,
+  0xc2, 0x1c, 0x68, 0x01, 0xa0, 0x00
+};
+
+const uint8_t kMaskRandom35_4[24] = {
+  0xca, 0xec, 0x76, 0x3b, 0x00, 0x00,
+  0xa9, 0x67, 0x33, 0x99, 0xc0, 0x00,
+  0x3a, 0xb1, 0xd8, 0xec, 0x60, 0x00,
+  0x55, 0x5a, 0xad, 0x56, 0xa0, 0x00
+};
+
+const uint8_t kMaskRandom35_5[30] = {
+  0x55, 0x44, 0xa6, 0x53, 0x20, 0x00,
+  0x2a, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x25, 0xa1, 0x8c, 0xe8, 0x60, 0x00,
+  0xe2, 0x12, 0xce, 0x44, 0xa0, 0x00,
+  0x99, 0x98, 0x71, 0xa6, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_6[36] = {
+  0xd1, 0x4c, 0x66, 0x13, 0x00, 0x00,
+  0xa2, 0xc5, 0x22, 0xb1, 0x40, 0x00,
+  0x95, 0x30, 0xd8, 0x4c, 0x20, 0x00,
+  0xca, 0x0a, 0xc5, 0x42, 0xa0, 0x00,
+  0xa4, 0xaa, 0x14, 0xa9, 0x80, 0x00,
+  0x78, 0x15, 0x53, 0x05, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom35_7[42] = {
+  0x15, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8a, 0x23, 0x11, 0x88, 0xc0, 0x00,
+  0x85, 0x91, 0x48, 0xa4, 0x40, 0x00,
+  0x32, 0x0a, 0x85, 0x42, 0xa0, 0x00,
+  0x58, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x2c, 0x0d, 0x05, 0x83, 0x40, 0x00,
+  0x43, 0xc8, 0x70, 0x32, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_8[48] = {
+  0x64, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0xa2, 0xc2, 0x61, 0x30, 0x80, 0x00,
+  0x51, 0x60, 0xb0, 0x58, 0x20, 0x00,
+  0x4a, 0x85, 0x42, 0xa1, 0x40, 0x00,
+  0x38, 0x4c, 0x26, 0x13, 0x00, 0x00,
+  0x89, 0x29, 0x14, 0x8a, 0x40, 0x00,
+  0x07, 0x11, 0x88, 0xc4, 0x60, 0x00,
+  0x94, 0xb0, 0x58, 0x2c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom35_9[54] = {
+  0x8e, 0xcc, 0x22, 0x51, 0x20, 0x00,
+  0x6a, 0x2b, 0x33, 0x13, 0x00, 0x00,
+  0x36, 0x32, 0xc8, 0x24, 0xa0, 0x00,
+  0xd1, 0x25, 0x80, 0xd2, 0xc0, 0x00,
+  0x55, 0x8c, 0x87, 0x09, 0x40, 0x00,
+  0xaa, 0x27, 0x09, 0x85, 0x80, 0x00,
+  0xa5, 0x32, 0x90, 0x68, 0x20, 0x00,
+  0x62, 0x61, 0xe1, 0x28, 0x80, 0x00,
+  0x3c, 0x5c, 0x14, 0x86, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom36_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xf0, 0x00
+};
+
+const uint8_t kMaskRandom36_10[60] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+  0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00
+};
+
+const uint8_t kMaskRandom36_11[66] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom36_12[72] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom36_13[78] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_14[84] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00
+};
+
+const uint8_t kMaskRandom36_15[90] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom36_16[96] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_17[102] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom36_18[108] = {
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00
+};
+
+const uint8_t kMaskRandom36_19[114] = {
+  0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+  0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+  0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+  0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+  0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+  0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+  0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+  0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+  0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00
+};
+
+const uint8_t kMaskRandom36_2[12] = {
+  0xce, 0x67, 0x33, 0x99, 0xc0, 0x00,
+  0x39, 0x9c, 0xce, 0x67, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom36_20[120] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+  0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+  0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+  0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+  0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+  0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+  0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+  0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+  0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+  0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00,
+  0x45, 0xb9, 0x08, 0x16, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom36_21[126] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+  0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom36_22[132] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x2b, 0x15, 0x8a, 0xc5, 0x60, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x25, 0x92, 0xc9, 0x64, 0xb0, 0x00,
+  0xfd, 0x9d, 0xff, 0x67, 0x70, 0x00,
+  0x71, 0x04, 0xba, 0x7b, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom36_23[138] = {
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom36_24[144] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x76, 0x3a, 0xeb, 0x17, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom36_25[150] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_26[156] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x31, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x32, 0x99, 0x4c, 0xa6, 0x50, 0x00,
+  0x61, 0xb0, 0xd8, 0x6c, 0x30, 0x00,
+  0x5c, 0x2e, 0x17, 0x0b, 0x80, 0x00,
+  0x5b, 0x0c, 0x56, 0xc3, 0x10, 0x00,
+  0xec, 0x58, 0x0e, 0x6c, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom36_27[162] = {
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00
+};
+
+const uint8_t kMaskRandom36_28[168] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x29, 0xfd, 0x91, 0x6f, 0xd0, 0x00
+};
+
+const uint8_t kMaskRandom36_29[174] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom36_3[18] = {
+  0xcc, 0x66, 0x33, 0x19, 0x80, 0x00,
+  0x27, 0x15, 0x89, 0xc5, 0x60, 0x00,
+  0x92, 0xc9, 0x64, 0xb2, 0x50, 0x00
+};
+
+const uint8_t kMaskRandom36_30[180] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x84, 0xc2, 0x61, 0x30, 0x90, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0xe0, 0x00,
+  0x51, 0xa8, 0xd4, 0x6a, 0x30, 0x00,
+  0x1a, 0x0d, 0x06, 0x83, 0x40, 0x00,
+  0x68, 0x34, 0x1a, 0x0d, 0x00, 0x00,
+  0x89, 0x44, 0xa2, 0x51, 0x20, 0x00,
+  0x7f, 0x4f, 0xdf, 0xd3, 0xf0, 0x00,
+  0xc5, 0x38, 0xbb, 0x98, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_31[186] = {
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_32[192] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x3a, 0x28, 0x9c, 0x2f, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom36_33[198] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom36_34[204] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x8c, 0x46, 0x23, 0x11, 0x80, 0x00,
+  0x47, 0x23, 0x91, 0xc8, 0xe0, 0x00,
+  0x81, 0xc0, 0xe0, 0x70, 0x30, 0x00,
+  0x12, 0x89, 0x44, 0xa2, 0x50, 0x00,
+  0x58, 0x2c, 0x16, 0x0b, 0x00, 0x00,
+  0x28, 0x94, 0x4a, 0x25, 0x10, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0xef, 0xf2, 0x3b, 0xfc, 0x80, 0x00,
+  0xf7, 0x5e, 0x66, 0x5b, 0x60, 0x00
+};
+
+const uint8_t kMaskRandom36_35[210] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00
+};
+
+const uint8_t kMaskRandom36_36[216] = {
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0xd0, 0x03, 0x74, 0x00, 0xd0, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0xc0, 0xe0, 0x70, 0x38, 0x10, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xd0, 0x00,
+  0xc8, 0x64, 0x32, 0x19, 0x00, 0x00,
+  0x45, 0x22, 0x91, 0x48, 0xa0, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0xa2, 0x51, 0x28, 0x94, 0x40, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x42, 0xa1, 0x50, 0xa8, 0x50, 0x00,
+  0xa4, 0x52, 0x29, 0x14, 0x80, 0x00,
+  0x13, 0x09, 0x84, 0xc2, 0x60, 0x00,
+  0x30, 0x98, 0x4c, 0x26, 0x10, 0x00,
+  0x88, 0xc4, 0x62, 0x31, 0x10, 0x00,
+  0x09, 0x04, 0x82, 0x41, 0x20, 0x00,
+  0xa4, 0x9c, 0x31, 0x13, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom36_4[24] = {
+  0xec, 0x76, 0x3b, 0x1d, 0x80, 0x00,
+  0x67, 0x33, 0x99, 0xcc, 0xe0, 0x00,
+  0xb1, 0xd8, 0xec, 0x76, 0x30, 0x00,
+  0x5a, 0xad, 0x56, 0xab, 0x50, 0x00
+};
+
+const uint8_t kMaskRandom36_5[30] = {
+  0x4c, 0xa6, 0x53, 0x29, 0x90, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0xc0, 0x00,
+  0x19, 0xd0, 0xc6, 0x74, 0x30, 0x00,
+  0x9c, 0x89, 0x67, 0x22, 0x50, 0x00,
+  0xe3, 0x4c, 0x38, 0xd3, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom36_6[36] = {
+  0xcc, 0x26, 0x33, 0x09, 0x80, 0x00,
+  0x45, 0x62, 0x91, 0x58, 0xa0, 0x00,
+  0xb0, 0x98, 0x6c, 0x26, 0x10, 0x00,
+  0x8a, 0x85, 0x62, 0xa1, 0x50, 0x00,
+  0x29, 0x53, 0x0a, 0x54, 0xc0, 0x00,
+  0xa6, 0x0a, 0xa9, 0x82, 0xa0, 0x00
+};
+
+const uint8_t kMaskRandom36_7[42] = {
+  0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+  0x23, 0x11, 0x88, 0xc4, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x52, 0x20, 0x00,
+  0x0a, 0x85, 0x42, 0xa1, 0x50, 0x00,
+  0x34, 0x1a, 0x0d, 0x06, 0x80, 0x00,
+  0x0b, 0x06, 0x82, 0xc1, 0xa0, 0x00,
+  0xe0, 0x64, 0x38, 0x19, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom36_8[48] = {
+  0x16, 0x0b, 0x05, 0x82, 0xc0, 0x00,
+  0xc2, 0x61, 0x30, 0x98, 0x40, 0x00,
+  0x60, 0xb0, 0x58, 0x2c, 0x10, 0x00,
+  0x85, 0x42, 0xa1, 0x50, 0xa0, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0x80, 0x00,
+  0x29, 0x14, 0x8a, 0x45, 0x20, 0x00,
+  0x11, 0x88, 0xc4, 0x62, 0x30, 0x00,
+  0xb0, 0x58, 0x2c, 0x16, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom36_9[54] = {
+  0x44, 0xa2, 0x51, 0x28, 0x90, 0x00,
+  0x66, 0x26, 0x19, 0x89, 0x80, 0x00,
+  0x90, 0x49, 0x64, 0x12, 0x50, 0x00,
+  0x01, 0xa5, 0x80, 0x69, 0x60, 0x00,
+  0x0e, 0x12, 0x83, 0x84, 0xa0, 0x00,
+  0x13, 0x0b, 0x04, 0xc2, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x34, 0x10, 0x00,
+  0xc2, 0x51, 0x30, 0x94, 0x40, 0x00,
+  0x29, 0x0c, 0x8a, 0x43, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xf8, 0x00
+};
+
+const uint8_t kMaskRandom37_10[60] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+  0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+  0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+  0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom37_11[66] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom37_12[72] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_13[78] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_14[84] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00
+};
+
+const uint8_t kMaskRandom37_15[90] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom37_16[96] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00
+};
+
+const uint8_t kMaskRandom37_17[102] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom37_18[108] = {
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00
+};
+
+const uint8_t kMaskRandom37_19[114] = {
+  0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+  0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+  0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+  0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+  0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+  0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+  0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+  0x29, 0x0c, 0x86, 0x03, 0x38, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+  0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+  0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+  0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom37_2[12] = {
+  0xce, 0x67, 0x33, 0x9d, 0xc0, 0x00,
+  0x39, 0x9c, 0xce, 0x73, 0x38, 0x00
+};
+
+const uint8_t kMaskRandom37_20[120] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+  0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+  0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+  0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+  0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+  0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+  0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+  0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+  0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+  0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+  0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+  0x29, 0x0c, 0x86, 0x03, 0x38, 0x00,
+  0xe5, 0x44, 0xda, 0x3a, 0xc8, 0x00
+};
+
+const uint8_t kMaskRandom37_21[126] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+  0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+  0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+  0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom37_22[132] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0xcc, 0x66, 0x33, 0x1d, 0x40, 0x00,
+  0x2b, 0x15, 0x8a, 0xc6, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xb4, 0x98, 0x00,
+  0x25, 0x92, 0xc9, 0x63, 0xa8, 0x00,
+  0xfd, 0x9d, 0xd4, 0x22, 0x30, 0x00,
+  0xe4, 0xd3, 0xff, 0x5a, 0x28, 0x00
+};
+
+const uint8_t kMaskRandom37_23[138] = {
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_24[144] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0xad, 0x58, 0xb2, 0x36, 0x68, 0x00
+};
+
+const uint8_t kMaskRandom37_25[150] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_26[156] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0xc6, 0x63, 0x38, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc4, 0x70, 0x00,
+  0x32, 0x99, 0x4c, 0xa3, 0x48, 0x00,
+  0x61, 0xb0, 0xd8, 0x64, 0x98, 0x00,
+  0x5c, 0x2e, 0x17, 0x0e, 0x20, 0x00,
+  0x5b, 0x0c, 0x64, 0x32, 0x20, 0x00,
+  0x7f, 0xb2, 0x5a, 0xaa, 0x20, 0x00
+};
+
+const uint8_t kMaskRandom37_27[162] = {
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00
+};
+
+const uint8_t kMaskRandom37_28[168] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x7b, 0xc4, 0x24, 0xbf, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom37_29[174] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom37_3[18] = {
+  0xcc, 0x66, 0x33, 0x19, 0xc0, 0x00,
+  0x27, 0x15, 0x89, 0xcb, 0x30, 0x00,
+  0x92, 0xc9, 0x64, 0xb4, 0x98, 0x00
+};
+
+const uint8_t kMaskRandom37_30[180] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x84, 0xc2, 0x61, 0x21, 0xc0, 0x00,
+  0x27, 0x13, 0x89, 0xc6, 0x60, 0x00,
+  0x51, 0xa8, 0xd4, 0x62, 0x18, 0x00,
+  0x1a, 0x0d, 0x06, 0x88, 0xa8, 0x00,
+  0x68, 0x34, 0x1a, 0x11, 0x10, 0x00,
+  0x89, 0x44, 0xa2, 0x5c, 0x00, 0x00,
+  0x7f, 0x4f, 0xdb, 0x89, 0xd8, 0x00,
+  0x1d, 0x8e, 0x11, 0xb0, 0xe8, 0x00
+};
+
+const uint8_t kMaskRandom37_31[186] = {
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00
+};
+
+const uint8_t kMaskRandom37_32[192] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0xf7, 0x95, 0x57, 0x8c, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom37_33[198] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom37_34[204] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x8c, 0x46, 0x23, 0x08, 0xc0, 0x00,
+  0x47, 0x23, 0x91, 0xc6, 0x60, 0x00,
+  0x81, 0xc0, 0xe0, 0x62, 0x18, 0x00,
+  0x12, 0x89, 0x44, 0xa1, 0x88, 0x00,
+  0x58, 0x2c, 0x16, 0x05, 0x10, 0x00,
+  0x28, 0x94, 0x4a, 0x32, 0x80, 0x00,
+  0x34, 0x1a, 0x0d, 0x18, 0x20, 0x00,
+  0xef, 0xf2, 0x1f, 0x9d, 0x78, 0x00,
+  0x31, 0x9c, 0xfb, 0x37, 0xc0, 0x00
+};
+
+const uint8_t kMaskRandom37_35[210] = {
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00
+};
+
+const uint8_t kMaskRandom37_36[216] = {
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0xc3, 0xc7, 0xce, 0xd8, 0x50, 0x00
+};
+
+const uint8_t kMaskRandom37_37[222] = {
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x0b, 0x05, 0x80, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x30, 0x00,
+  0xc0, 0xe0, 0x70, 0x34, 0x08, 0x00,
+  0x06, 0x83, 0x41, 0xa0, 0xa8, 0x00,
+  0xc8, 0x64, 0x32, 0x03, 0x10, 0x00,
+  0x45, 0x22, 0x91, 0x58, 0x40, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0x50, 0x00,
+  0xa2, 0x51, 0x28, 0x8a, 0x08, 0x00,
+  0xd0, 0x03, 0x54, 0x65, 0xc8, 0x00,
+  0x4c, 0x26, 0x13, 0x09, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x9c, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x50, 0x38, 0x00,
+  0x42, 0xa1, 0x50, 0xa4, 0x28, 0x00,
+  0xa4, 0x52, 0x29, 0x0a, 0x50, 0x00,
+  0x13, 0x09, 0x84, 0xd6, 0x80, 0x00,
+  0x30, 0x98, 0x4c, 0x24, 0xd0, 0x00,
+  0x88, 0xc4, 0x62, 0x2b, 0x08, 0x00,
+  0x09, 0x04, 0x82, 0x43, 0x30, 0x00,
+  0x2c, 0x16, 0x13, 0x09, 0x80, 0x00,
+  0x91, 0x48, 0x99, 0x8a, 0x20, 0x00,
+  0xc0, 0xe0, 0x64, 0x54, 0x08, 0x00,
+  0x06, 0x83, 0x50, 0xa0, 0x98, 0x00,
+  0xc8, 0x64, 0x29, 0x00, 0x70, 0x00,
+  0x45, 0x22, 0x84, 0xd0, 0xc0, 0x00,
+  0x30, 0x98, 0x4c, 0x25, 0x20, 0x00,
+  0xa2, 0x51, 0x22, 0x28, 0x48, 0x00,
+  0xd0, 0x03, 0x42, 0x53, 0x00, 0x00,
+  0xee, 0xf5, 0xb3, 0x66, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom37_4[24] = {
+  0xec, 0x76, 0x3b, 0x1c, 0xc0, 0x00,
+  0x67, 0x33, 0x99, 0xc6, 0x70, 0x00,
+  0xb1, 0xd8, 0xec, 0x73, 0x18, 0x00,
+  0x5a, 0xad, 0x56, 0xa5, 0xa8, 0x00
+};
+
+const uint8_t kMaskRandom37_5[30] = {
+  0x4c, 0xa6, 0x53, 0x39, 0xc0, 0x00,
+  0x66, 0x33, 0x19, 0x8c, 0x70, 0x00,
+  0x19, 0xd0, 0xe8, 0x73, 0x18, 0x00,
+  0x9c, 0x89, 0x64, 0xa9, 0xa8, 0x00,
+  0xe3, 0x4c, 0x2e, 0x26, 0x60, 0x00
+};
+
+const uint8_t kMaskRandom37_6[36] = {
+  0xcc, 0x26, 0x13, 0x0d, 0x80, 0x00,
+  0x45, 0x62, 0x91, 0x5a, 0x20, 0x00,
+  0xb0, 0x98, 0x4c, 0x34, 0x18, 0x00,
+  0x8a, 0x85, 0x62, 0xa0, 0xa8, 0x00,
+  0x29, 0x53, 0x09, 0x82, 0xd0, 0x00,
+  0xa6, 0x0a, 0xa5, 0x51, 0x40, 0x00
+};
+
+const uint8_t kMaskRandom37_7[42] = {
+  0x44, 0xa2, 0x71, 0x28, 0xc0, 0x00,
+  0x23, 0x11, 0x88, 0xc6, 0x60, 0x00,
+  0x91, 0x48, 0xa4, 0x47, 0x08, 0x00,
+  0x0a, 0x85, 0x52, 0xa0, 0xa8, 0x00,
+  0x34, 0x1a, 0x0d, 0x12, 0x50, 0x00,
+  0x0b, 0x06, 0xa2, 0xd2, 0x80, 0x00,
+  0xe0, 0x64, 0x32, 0x09, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom37_8[48] = {
+  0x16, 0x0b, 0x05, 0x84, 0xe0, 0x00,
+  0xc2, 0x61, 0x30, 0x91, 0x30, 0x00,
+  0x60, 0xb0, 0x58, 0x3a, 0x08, 0x00,
+  0x85, 0x42, 0xa1, 0x44, 0x98, 0x00,
+  0x4c, 0x26, 0x33, 0x08, 0x50, 0x00,
+  0x29, 0x14, 0x8a, 0x58, 0xc0, 0x00,
+  0x11, 0x88, 0xc4, 0x66, 0x30, 0x00,
+  0xb0, 0x58, 0x2c, 0x03, 0x18, 0x00
+};
+
+const uint8_t kMaskRandom37_9[54] = {
+  0x44, 0xa2, 0x51, 0x29, 0xc0, 0x00,
+  0x66, 0x26, 0x19, 0x9c, 0x20, 0x00,
+  0x90, 0x49, 0x44, 0xb0, 0x38, 0x00,
+  0x01, 0xa5, 0xb0, 0xc4, 0x28, 0x00,
+  0x0e, 0x12, 0xa3, 0x0a, 0x50, 0x00,
+  0x13, 0x0b, 0x04, 0x56, 0xc0, 0x00,
+  0x20, 0xd0, 0x48, 0x64, 0xd0, 0x00,
+  0xc2, 0x51, 0x28, 0x8b, 0x00, 0x00,
+  0x29, 0x0c, 0x86, 0x03, 0x38, 0x00
+};
+
+const uint8_t kMaskRandom38_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xfc, 0x00
+};
+
+const uint8_t kMaskRandom38_10[60] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+  0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+  0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+  0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+  0x50, 0x88, 0xca, 0x11, 0x18, 0x00
+};
+
+const uint8_t kMaskRandom38_11[66] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom38_12[72] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x90, 0xc8, 0x92, 0x19, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom38_13[78] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom38_14[84] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00
+};
+
+const uint8_t kMaskRandom38_15[90] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00
+};
+
+const uint8_t kMaskRandom38_16[96] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00
+};
+
+const uint8_t kMaskRandom38_17[102] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00
+};
+
+const uint8_t kMaskRandom38_18[108] = {
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00
+};
+
+const uint8_t kMaskRandom38_19[114] = {
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+  0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+  0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+  0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+  0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+  0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+  0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+  0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+  0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+  0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom38_2[12] = {
+  0xce, 0x77, 0x19, 0xce, 0xe0, 0x00,
+  0x39, 0xcc, 0xe7, 0x39, 0x9c, 0x00
+};
+
+const uint8_t kMaskRandom38_20[120] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+  0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+  0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+  0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+  0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+  0x44, 0xa7, 0x08, 0x94, 0xe0, 0x00,
+  0x66, 0x70, 0x8c, 0xce, 0x10, 0x00,
+  0x12, 0xc0, 0xe2, 0x58, 0x1c, 0x00,
+  0xc3, 0x10, 0xb8, 0x62, 0x14, 0x00,
+  0x8c, 0x29, 0x51, 0x85, 0x28, 0x00,
+  0x11, 0x5b, 0x02, 0x2b, 0x60, 0x00,
+  0x21, 0x93, 0x44, 0x32, 0x68, 0x00,
+  0xa2, 0x2c, 0x14, 0x45, 0x80, 0x00,
+  0x18, 0x0c, 0xe3, 0x01, 0x9c, 0x00,
+  0xe6, 0xbc, 0x88, 0xe3, 0x78, 0x00
+};
+
+const uint8_t kMaskRandom38_21[126] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+  0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+  0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+  0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+  0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom38_22[132] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0xcc, 0x75, 0x19, 0x8e, 0xa0, 0x00,
+  0x2b, 0x19, 0xc5, 0x63, 0x38, 0x00,
+  0x32, 0xd2, 0x66, 0x5a, 0x4c, 0x00,
+  0x25, 0x8e, 0xa4, 0xb1, 0xd4, 0x00,
+  0x50, 0x88, 0xca, 0x11, 0x18, 0x00,
+  0x0c, 0x3c, 0x48, 0x3d, 0x58, 0x00
+};
+
+const uint8_t kMaskRandom38_23[138] = {
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x90, 0xc8, 0x92, 0x19, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom38_24[144] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x93, 0xc8, 0xb3, 0xbe, 0x5c, 0x00
+};
+
+const uint8_t kMaskRandom38_25[150] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00
+};
+
+const uint8_t kMaskRandom38_26[156] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0xe3, 0x11, 0x9c, 0x60, 0x00,
+  0x27, 0x11, 0xc4, 0xe2, 0x38, 0x00,
+  0x32, 0x8d, 0x26, 0x51, 0xa4, 0x00,
+  0x61, 0x92, 0x6c, 0x32, 0x4c, 0x00,
+  0x5c, 0x38, 0x8b, 0x87, 0x10, 0x00,
+  0x90, 0xc8, 0x92, 0x19, 0x10, 0x00,
+  0x4b, 0xab, 0xfc, 0xe6, 0xe8, 0x00
+};
+
+const uint8_t kMaskRandom38_27[162] = {
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00
+};
+
+const uint8_t kMaskRandom38_28[168] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x70, 0x1b, 0x5b, 0x2c, 0x0c, 0x00
+};
+
+const uint8_t kMaskRandom38_29[174] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00
+};
+
+const uint8_t kMaskRandom38_3[18] = {
+  0xcc, 0x67, 0x19, 0x8c, 0xe0, 0x00,
+  0x27, 0x2c, 0xc4, 0xe5, 0x98, 0x00,
+  0x92, 0xd2, 0x72, 0x5a, 0x4c, 0x00
+};
+
+const uint8_t kMaskRandom38_30[180] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x84, 0x87, 0x10, 0x90, 0xe0, 0x00,
+  0x27, 0x19, 0x84, 0xe3, 0x30, 0x00,
+  0x51, 0x88, 0x6a, 0x31, 0x0c, 0x00,
+  0x1a, 0x22, 0xa3, 0x44, 0x54, 0x00,
+  0x68, 0x44, 0x4d, 0x08, 0x88, 0x00,
+  0x89, 0x70, 0x11, 0x2e, 0x00, 0x00,
+  0x6e, 0x27, 0x6d, 0xc4, 0xec, 0x00,
+  0x5b, 0x16, 0xdf, 0xb8, 0xd0, 0x00
+};
+
+const uint8_t kMaskRandom38_31[186] = {
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00
+};
+
+const uint8_t kMaskRandom38_32[192] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x33, 0x10, 0x02, 0x4e, 0x54, 0x00
+};
+
+const uint8_t kMaskRandom38_33[198] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00
+};
+
+const uint8_t kMaskRandom38_34[204] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x8c, 0x23, 0x11, 0x84, 0x60, 0x00,
+  0x47, 0x19, 0x88, 0xe3, 0x30, 0x00,
+  0x81, 0x88, 0x70, 0x31, 0x0c, 0x00,
+  0x12, 0x86, 0x22, 0x50, 0xc4, 0x00,
+  0x58, 0x14, 0x4b, 0x02, 0x88, 0x00,
+  0x28, 0xca, 0x05, 0x19, 0x40, 0x00,
+  0x34, 0x60, 0x86, 0x8c, 0x10, 0x00,
+  0x7e, 0x75, 0xef, 0xce, 0xbc, 0x00,
+  0x91, 0x48, 0xfa, 0xf0, 0xd8, 0x00
+};
+
+const uint8_t kMaskRandom38_35[210] = {
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00
+};
+
+const uint8_t kMaskRandom38_36[216] = {
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x80, 0x95, 0xc2, 0x68, 0x28, 0x00
+};
+
+const uint8_t kMaskRandom38_37[222] = {
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+  0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+  0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+  0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+  0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+  0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+  0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+  0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+  0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+  0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom38_38[228] = {
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x4c, 0x26, 0x09, 0x84, 0xc0, 0x00,
+  0x66, 0x28, 0x8c, 0xc5, 0x10, 0x00,
+  0x91, 0x50, 0x32, 0x2a, 0x04, 0x00,
+  0x42, 0x82, 0x68, 0x50, 0x4c, 0x00,
+  0xa4, 0x01, 0xd4, 0x80, 0x38, 0x00,
+  0x13, 0x43, 0x02, 0x68, 0x60, 0x00,
+  0x30, 0x94, 0x86, 0x12, 0x90, 0x00,
+  0x88, 0xa1, 0x31, 0x14, 0x24, 0x00,
+  0x09, 0x4c, 0x01, 0x29, 0x80, 0x00,
+  0xcd, 0x98, 0x59, 0xb3, 0x08, 0x00,
+  0x4c, 0x27, 0x09, 0x84, 0xe0, 0x00,
+  0x66, 0x71, 0x8c, 0xce, 0x30, 0x00,
+  0x91, 0x40, 0xf2, 0x28, 0x1c, 0x00,
+  0x42, 0x90, 0xa8, 0x52, 0x14, 0x00,
+  0xa4, 0x29, 0x54, 0x85, 0x28, 0x00,
+  0x13, 0x5a, 0x02, 0x6b, 0x40, 0x00,
+  0x30, 0x93, 0x46, 0x12, 0x68, 0x00,
+  0x88, 0xac, 0x31, 0x15, 0x84, 0x00,
+  0x09, 0x0c, 0xc1, 0x21, 0x98, 0x00,
+  0x2c, 0x16, 0x05, 0x82, 0xc0, 0x00,
+  0x91, 0x40, 0xd2, 0x28, 0x18, 0x00,
+  0xc0, 0xd0, 0x38, 0x1a, 0x04, 0x00,
+  0x06, 0x82, 0xa0, 0xd0, 0x54, 0x00,
+  0xc8, 0x0c, 0x59, 0x01, 0x88, 0x00,
+  0x45, 0x61, 0x08, 0xac, 0x20, 0x00,
+  0x30, 0x91, 0x46, 0x12, 0x28, 0x00,
+  0xa2, 0x28, 0x34, 0x45, 0x04, 0x00,
+  0x51, 0x97, 0x2a, 0x32, 0xe4, 0x00,
+  0x8c, 0xed, 0x11, 0x5f, 0x24, 0x00
+};
+
+const uint8_t kMaskRandom38_4[24] = {
+  0xec, 0x73, 0x1d, 0x8e, 0x60, 0x00,
+  0x67, 0x19, 0xcc, 0xe3, 0x38, 0x00,
+  0xb1, 0xcc, 0x76, 0x39, 0x8c, 0x00,
+  0x5a, 0x96, 0xab, 0x52, 0xd4, 0x00
+};
+
+const uint8_t kMaskRandom38_5[30] = {
+  0x4c, 0xe7, 0x09, 0x9c, 0xe0, 0x00,
+  0x66, 0x31, 0xcc, 0xc6, 0x38, 0x00,
+  0xa1, 0xcc, 0x74, 0x39, 0x8c, 0x00,
+  0x92, 0xa6, 0xb2, 0x54, 0xd4, 0x00,
+  0xb8, 0x99, 0x97, 0x13, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom38_6[36] = {
+  0x4c, 0x36, 0x09, 0x86, 0xc0, 0x00,
+  0x45, 0x68, 0x88, 0xad, 0x10, 0x00,
+  0x30, 0xd0, 0x66, 0x1a, 0x0c, 0x00,
+  0x8a, 0x82, 0xb1, 0x50, 0x54, 0x00,
+  0x26, 0x0b, 0x44, 0xc1, 0x68, 0x00,
+  0x95, 0x45, 0x12, 0xa8, 0xa0, 0x00
+};
+
+const uint8_t kMaskRandom38_7[42] = {
+  0xc4, 0xa3, 0x18, 0x94, 0x60, 0x00,
+  0x23, 0x19, 0x84, 0x63, 0x30, 0x00,
+  0x91, 0x1c, 0x32, 0x23, 0x84, 0x00,
+  0x4a, 0x82, 0xa9, 0x50, 0x54, 0x00,
+  0x34, 0x49, 0x46, 0x89, 0x28, 0x00,
+  0x8b, 0x4a, 0x11, 0x69, 0x40, 0x00,
+  0xc8, 0x24, 0xd9, 0x04, 0x98, 0x00
+};
+
+const uint8_t kMaskRandom38_8[48] = {
+  0x16, 0x13, 0x82, 0xc2, 0x70, 0x00,
+  0xc2, 0x44, 0xd8, 0x48, 0x98, 0x00,
+  0x60, 0xe8, 0x2c, 0x1d, 0x04, 0x00,
+  0x85, 0x12, 0x70, 0xa2, 0x4c, 0x00,
+  0xcc, 0x21, 0x59, 0x84, 0x28, 0x00,
+  0x29, 0x63, 0x05, 0x2c, 0x60, 0x00,
+  0x11, 0x98, 0xc2, 0x33, 0x18, 0x00,
+  0xb0, 0x0c, 0x76, 0x01, 0x8c, 0x00
+};
+
+const uint8_t kMaskRandom38_9[54] = {
+  0x44, 0xa7, 0x08, 0x94, 0xe0, 0x00,
+  0x66, 0x70, 0x8c, 0xce, 0x10, 0x00,
+  0x12, 0xc0, 0xe2, 0x58, 0x1c, 0x00,
+  0xc3, 0x10, 0xb8, 0x62, 0x14, 0x00,
+  0x8c, 0x29, 0x51, 0x85, 0x28, 0x00,
+  0x11, 0x5b, 0x02, 0x2b, 0x60, 0x00,
+  0x21, 0x93, 0x44, 0x32, 0x68, 0x00,
+  0xa2, 0x2c, 0x14, 0x45, 0x80, 0x00,
+  0x18, 0x0c, 0xe3, 0x01, 0x9c, 0x00
+};
+
+const uint8_t kMaskRandom39_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xfe, 0x00
+};
+
+const uint8_t kMaskRandom39_10[60] = {
+  0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+  0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+  0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+  0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+  0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+  0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+  0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+  0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+  0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+  0x50, 0x88, 0xc6, 0x11, 0x84, 0x00
+};
+
+const uint8_t kMaskRandom39_11[66] = {
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom39_12[72] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00
+};
+
+const uint8_t kMaskRandom39_13[78] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom39_14[84] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00
+};
+
+const uint8_t kMaskRandom39_15[90] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00
+};
+
+const uint8_t kMaskRandom39_16[96] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00
+};
+
+const uint8_t kMaskRandom39_17[102] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00
+};
+
+const uint8_t kMaskRandom39_18[108] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00
+};
+
+const uint8_t kMaskRandom39_19[114] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+  0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+  0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+  0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+  0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+  0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+  0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+  0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+  0xcd, 0x98, 0x46, 0x11, 0x84, 0x00
+};
+
+const uint8_t kMaskRandom39_2[12] = {
+  0xce, 0x77, 0x1d, 0xc7, 0x70, 0x00,
+  0x39, 0xcc, 0xf3, 0x3c, 0xce, 0x00
+};
+
+const uint8_t kMaskRandom39_20[120] = {
+  0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+  0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+  0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+  0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+  0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+  0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+  0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+  0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+  0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+  0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+  0x44, 0xa7, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x70, 0x8c, 0x47, 0x18, 0x00,
+  0x12, 0xc0, 0xf0, 0x3c, 0x0e, 0x00,
+  0xc3, 0x10, 0xbc, 0x29, 0x0a, 0x00,
+  0x8c, 0x29, 0x42, 0x72, 0x94, 0x00,
+  0x11, 0x5b, 0x16, 0x85, 0xa0, 0x00,
+  0x21, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0xa2, 0x2c, 0x0b, 0x0a, 0xc2, 0x00,
+  0x18, 0x0c, 0xe9, 0x30, 0xca, 0x00,
+  0x0d, 0xba, 0x52, 0x38, 0xbc, 0x00
+};
+
+const uint8_t kMaskRandom39_21[126] = {
+  0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+  0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+  0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+  0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+  0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+  0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+  0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+  0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+  0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+  0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00
+};
+
+const uint8_t kMaskRandom39_22[132] = {
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x09, 0x82, 0x60, 0x00,
+  0x27, 0x11, 0xca, 0x22, 0x88, 0x00,
+  0x32, 0x8d, 0x34, 0x0d, 0x02, 0x00,
+  0x61, 0x92, 0x60, 0x98, 0x26, 0x00,
+  0x5c, 0x38, 0x80, 0x70, 0x1c, 0x00,
+  0xcc, 0x75, 0x10, 0xc4, 0x30, 0x00,
+  0x2b, 0x19, 0xc5, 0x21, 0x48, 0x00,
+  0x32, 0xd2, 0x68, 0x4a, 0x12, 0x00,
+  0x25, 0x8e, 0xb3, 0x04, 0xc0, 0x00,
+  0x50, 0x88, 0xc6, 0x11, 0x84, 0x00,
+  0xfc, 0x5a, 0xb2, 0x13, 0x12, 0x00
+};
+
+const uint8_t kMaskRandom39_23[138] = {
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00
+};
+
+const uint8_t kMaskRandom39_24[144] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0xac, 0xbc, 0xf0, 0xff, 0x62, 0x00
+};
+
+const uint8_t kMaskRandom39_25[150] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00
+};
+
+const uint8_t kMaskRandom39_26[156] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0xe3, 0x18, 0xc6, 0x30, 0x00,
+  0x27, 0x11, 0xc4, 0x71, 0x1c, 0x00,
+  0x32, 0x8d, 0x23, 0x48, 0xd2, 0x00,
+  0x61, 0x92, 0x64, 0x99, 0x26, 0x00,
+  0x5c, 0x38, 0x8e, 0x23, 0x88, 0x00,
+  0x90, 0xc8, 0x9e, 0xbb, 0x88, 0x00,
+  0x10, 0x17, 0x44, 0x72, 0xec, 0x00
+};
+
+const uint8_t kMaskRandom39_27[162] = {
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00
+};
+
+const uint8_t kMaskRandom39_28[168] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x86, 0xb6, 0x04, 0xbc, 0x1e, 0x00
+};
+
+const uint8_t kMaskRandom39_29[174] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00
+};
+
+const uint8_t kMaskRandom39_3[18] = {
+  0xcc, 0x67, 0x19, 0xc6, 0x70, 0x00,
+  0x27, 0x2c, 0xca, 0xb2, 0xac, 0x00,
+  0x92, 0xd2, 0x76, 0x2d, 0x46, 0x00
+};
+
+const uint8_t kMaskRandom39_30[180] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x84, 0x87, 0x01, 0xc0, 0x70, 0x00,
+  0x27, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x51, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x1a, 0x22, 0xa8, 0xaa, 0x2a, 0x00,
+  0x68, 0x44, 0x51, 0x14, 0x44, 0x00,
+  0x89, 0x70, 0x1c, 0x07, 0x00, 0x00,
+  0x6e, 0x27, 0x6a, 0xc7, 0xc4, 0x00,
+  0xb3, 0x1d, 0x13, 0x03, 0x5a, 0x00
+};
+
+const uint8_t kMaskRandom39_31[186] = {
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00
+};
+
+const uint8_t kMaskRandom39_32[192] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x83, 0x1a, 0x3c, 0x2a, 0x7a, 0x00
+};
+
+const uint8_t kMaskRandom39_33[198] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00
+};
+
+const uint8_t kMaskRandom39_34[204] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x8c, 0x23, 0x08, 0xc2, 0x30, 0x00,
+  0x47, 0x19, 0x86, 0x61, 0x98, 0x00,
+  0x81, 0x88, 0x62, 0x18, 0x86, 0x00,
+  0x12, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0x58, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0x28, 0xca, 0x12, 0x84, 0xa0, 0x00,
+  0x34, 0x60, 0x98, 0x26, 0x08, 0x00,
+  0x7e, 0x75, 0xe5, 0x03, 0x8c, 0x00,
+  0xc6, 0xbb, 0x7e, 0xd9, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom39_35[210] = {
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00
+};
+
+const uint8_t kMaskRandom39_36[216] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x11, 0x78, 0xfe, 0x43, 0xd6, 0x00
+};
+
+const uint8_t kMaskRandom39_37[222] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+  0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+  0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+  0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+  0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+  0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+  0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+  0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+  0xcd, 0x98, 0x46, 0x11, 0x84, 0x00
+};
+
+const uint8_t kMaskRandom39_38[228] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+  0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+  0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+  0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+  0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+  0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+  0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+  0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+  0xcd, 0x98, 0x46, 0x11, 0x84, 0x00,
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x2c, 0x16, 0x05, 0x81, 0x60, 0x00,
+  0x91, 0x40, 0xd0, 0x34, 0x0c, 0x00,
+  0xc0, 0xd0, 0x34, 0x0d, 0x02, 0x00,
+  0x06, 0x82, 0xa0, 0xa8, 0x2a, 0x00,
+  0xc8, 0x0c, 0x43, 0x10, 0xc4, 0x00,
+  0x45, 0x61, 0x18, 0x46, 0x10, 0x00,
+  0x30, 0x91, 0x44, 0x51, 0x14, 0x00,
+  0xa2, 0x28, 0x2a, 0x0a, 0x82, 0x00,
+  0x51, 0x97, 0x24, 0x2f, 0x7e, 0x00,
+  0x9e, 0xd8, 0x3c, 0x7e, 0x2e, 0x00
+};
+
+const uint8_t kMaskRandom39_39[234] = {
+  0x4c, 0x27, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x71, 0x9c, 0x67, 0x18, 0x00,
+  0x91, 0x40, 0xf0, 0x3c, 0x0e, 0x00,
+  0x42, 0x90, 0xa4, 0x29, 0x0a, 0x00,
+  0xa4, 0x29, 0x4a, 0x52, 0x94, 0x00,
+  0x13, 0x5a, 0x16, 0x85, 0xa0, 0x00,
+  0x30, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0x88, 0xac, 0x2b, 0x0a, 0xc2, 0x00,
+  0x09, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x4c, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x66, 0x28, 0x8a, 0x22, 0x88, 0x00,
+  0x91, 0x50, 0x34, 0x0d, 0x02, 0x00,
+  0x42, 0x82, 0x60, 0x98, 0x26, 0x00,
+  0xa4, 0x01, 0xc0, 0x70, 0x1c, 0x00,
+  0x13, 0x43, 0x10, 0xc4, 0x30, 0x00,
+  0x30, 0x94, 0x85, 0x21, 0x48, 0x00,
+  0x88, 0xa1, 0x28, 0x4a, 0x12, 0x00,
+  0x09, 0x4c, 0x13, 0x04, 0xc0, 0x00,
+  0xcd, 0x98, 0x46, 0x11, 0x84, 0x00,
+  0x4c, 0x27, 0x09, 0x82, 0x60, 0x00,
+  0x66, 0x71, 0x8a, 0x22, 0x88, 0x00,
+  0x91, 0x40, 0xf4, 0x0d, 0x02, 0x00,
+  0x42, 0x90, 0xa0, 0x98, 0x26, 0x00,
+  0xa4, 0x29, 0x40, 0x70, 0x1c, 0x00,
+  0x13, 0x5a, 0x10, 0xc4, 0x30, 0x00,
+  0x30, 0x93, 0x45, 0x21, 0x48, 0x00,
+  0x88, 0xac, 0x28, 0x4a, 0x12, 0x00,
+  0x09, 0x0c, 0xd3, 0x04, 0xc0, 0x00,
+  0x4c, 0x26, 0x06, 0x11, 0x84, 0x00,
+  0x66, 0x28, 0x89, 0xc2, 0x70, 0x00,
+  0x91, 0x50, 0x3c, 0x67, 0x18, 0x00,
+  0x42, 0x82, 0x70, 0x3c, 0x0e, 0x00,
+  0xa4, 0x01, 0xc4, 0x29, 0x0a, 0x00,
+  0x13, 0x43, 0x0a, 0x52, 0x94, 0x00,
+  0x30, 0x94, 0x96, 0x85, 0xa0, 0x00,
+  0x88, 0xa1, 0x24, 0xd1, 0x34, 0x00,
+  0x09, 0x4c, 0x0b, 0x0a, 0xc2, 0x00,
+  0xcd, 0x98, 0x43, 0x30, 0xcc, 0x00,
+  0x1d, 0x04, 0x3e, 0xf1, 0xb4, 0x00
+};
+
+const uint8_t kMaskRandom39_4[24] = {
+  0xec, 0x73, 0x1c, 0xc7, 0x30, 0x00,
+  0x67, 0x19, 0xc6, 0x71, 0x9c, 0x00,
+  0xb1, 0xcc, 0x73, 0x1c, 0xc6, 0x00,
+  0x5a, 0x96, 0xa5, 0xa9, 0x6a, 0x00
+};
+
+const uint8_t kMaskRandom39_5[30] = {
+  0x4c, 0xe7, 0x19, 0xc6, 0x70, 0x00,
+  0x66, 0x31, 0xcc, 0x73, 0x1c, 0x00,
+  0xa1, 0xcc, 0x73, 0x1c, 0xa6, 0x00,
+  0x92, 0xa6, 0xa5, 0x6a, 0x6a, 0x00,
+  0xb8, 0x99, 0x96, 0x8b, 0x94, 0x00
+};
+
+const uint8_t kMaskRandom39_6[36] = {
+  0x4c, 0x36, 0x09, 0x83, 0x60, 0x00,
+  0x45, 0x68, 0x8a, 0x26, 0x88, 0x00,
+  0x30, 0xd0, 0x64, 0x1d, 0x06, 0x00,
+  0x8a, 0x82, 0xb0, 0xa8, 0x2a, 0x00,
+  0x26, 0x0b, 0x40, 0xd0, 0xd4, 0x00,
+  0x95, 0x45, 0x13, 0x44, 0x30, 0x00
+};
+
+const uint8_t kMaskRandom39_7[42] = {
+  0xc4, 0xa3, 0x09, 0xc2, 0x30, 0x00,
+  0x23, 0x19, 0x86, 0x65, 0x80, 0x00,
+  0x91, 0x1c, 0x22, 0x01, 0xd6, 0x00,
+  0x4a, 0x82, 0xb0, 0x2a, 0x2a, 0x00,
+  0x34, 0x49, 0x44, 0x98, 0x94, 0x00,
+  0x8b, 0x4a, 0x1a, 0x84, 0x60, 0x00,
+  0xc8, 0x24, 0xc1, 0x94, 0x4c, 0x00
+};
+
+const uint8_t kMaskRandom39_8[48] = {
+  0x16, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xc2, 0x44, 0xd1, 0x34, 0x4c, 0x00,
+  0x60, 0xe8, 0x3a, 0x0e, 0x82, 0x00,
+  0x85, 0x12, 0x64, 0x99, 0x26, 0x00,
+  0xcc, 0x21, 0x5c, 0x52, 0x14, 0x00,
+  0x29, 0x63, 0x18, 0xc6, 0x30, 0x00,
+  0x11, 0x98, 0xc6, 0x31, 0x8c, 0x00,
+  0xb0, 0x0c, 0x63, 0x18, 0xc6, 0x00
+};
+
+const uint8_t kMaskRandom39_9[54] = {
+  0x44, 0xa7, 0x09, 0xc2, 0x70, 0x00,
+  0x66, 0x70, 0x8c, 0x47, 0x18, 0x00,
+  0x12, 0xc0, 0xf0, 0x3c, 0x0e, 0x00,
+  0xc3, 0x10, 0xbc, 0x29, 0x0a, 0x00,
+  0x8c, 0x29, 0x42, 0x72, 0x94, 0x00,
+  0x11, 0x5b, 0x16, 0x85, 0xa0, 0x00,
+  0x21, 0x93, 0x44, 0xd1, 0x34, 0x00,
+  0xa2, 0x2c, 0x0b, 0x0a, 0xc2, 0x00,
+  0x18, 0x0c, 0xe9, 0x30, 0xca, 0x00
+};
+
+const uint8_t kMaskRandom3_1[2] = {
+  0xe0, 0x00
+};
+
+const uint8_t kMaskRandom3_2[4] = {
+  0xc0, 0x00,
+  0xa0, 0x00
+};
+
+const uint8_t kMaskRandom3_3[6] = {
+  0xc0, 0x00,
+  0xa0, 0x00,
+  0x60, 0x00
+};
+
+const uint8_t kMaskRandom40_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0x00
+};
+
+const uint8_t kMaskRandom40_10[60] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+};
+
+const uint8_t kMaskRandom40_11[66] = {
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom40_12[72] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00
+};
+
+const uint8_t kMaskRandom40_13[78] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00
+};
+
+const uint8_t kMaskRandom40_14[84] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00
+};
+
+const uint8_t kMaskRandom40_15[90] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00
+};
+
+const uint8_t kMaskRandom40_16[96] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00
+};
+
+const uint8_t kMaskRandom40_17[102] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00
+};
+
+const uint8_t kMaskRandom40_18[108] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00
+};
+
+const uint8_t kMaskRandom40_19[114] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+};
+
+const uint8_t kMaskRandom40_2[12] = {
+  0xee, 0x3b, 0x8e, 0xe3, 0xb8, 0x00,
+  0x99, 0xe6, 0x79, 0x9e, 0x67, 0x00
+};
+
+const uint8_t kMaskRandom40_20[120] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00
+};
+
+const uint8_t kMaskRandom40_21[126] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00
+};
+
+const uint8_t kMaskRandom40_22[132] = {
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x89, 0xee, 0x1f, 0x38, 0xca, 0x00
+};
+
+const uint8_t kMaskRandom40_23[138] = {
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00
+};
+
+const uint8_t kMaskRandom40_24[144] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x68, 0xde, 0x83, 0xa9, 0xcf, 0x00
+};
+
+const uint8_t kMaskRandom40_25[150] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00
+};
+
+const uint8_t kMaskRandom40_26[156] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8e, 0x00,
+  0x1a, 0x46, 0x91, 0xa4, 0x69, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0x71, 0x1c, 0x47, 0x11, 0xc4, 0x00,
+  0xf5, 0xdc, 0x4f, 0x5d, 0xc4, 0x00,
+  0x06, 0x8e, 0x8c, 0x1a, 0xd2, 0x00
+};
+
+const uint8_t kMaskRandom40_27[162] = {
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00
+};
+
+const uint8_t kMaskRandom40_28[168] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x68, 0x0e, 0x9b, 0x52, 0xb6, 0x00
+};
+
+const uint8_t kMaskRandom40_29[174] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00
+};
+
+const uint8_t kMaskRandom40_3[18] = {
+  0xce, 0x33, 0x8c, 0xe3, 0x38, 0x00,
+  0x55, 0x95, 0x65, 0x59, 0x56, 0x00,
+  0xb1, 0x6a, 0x3b, 0x16, 0xa3, 0x00
+};
+
+const uint8_t kMaskRandom40_30[180] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x0e, 0x03, 0x80, 0xe0, 0x38, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x45, 0x51, 0x54, 0x55, 0x15, 0x00,
+  0x88, 0xa2, 0x28, 0x8a, 0x22, 0x00,
+  0xe0, 0x38, 0x0e, 0x03, 0x80, 0x00,
+  0x56, 0x3e, 0x25, 0x63, 0xe2, 0x00,
+  0xe1, 0x47, 0x04, 0x05, 0x47, 0x00
+};
+
+const uint8_t kMaskRandom40_31[186] = {
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00
+};
+
+const uint8_t kMaskRandom40_32[192] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x03, 0x0c, 0x46, 0x10, 0xc5, 0x00
+};
+
+const uint8_t kMaskRandom40_33[198] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00
+};
+
+const uint8_t kMaskRandom40_34[204] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x18, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x43, 0x00,
+  0x0c, 0x43, 0x10, 0xc4, 0x31, 0x00,
+  0x28, 0x8a, 0x22, 0x88, 0xa2, 0x00,
+  0x94, 0x25, 0x09, 0x42, 0x50, 0x00,
+  0xc1, 0x30, 0x4c, 0x13, 0x04, 0x00,
+  0x28, 0x1c, 0x62, 0x81, 0xc6, 0x00,
+  0x87, 0x3c, 0x08, 0x19, 0x31, 0x00
+};
+
+const uint8_t kMaskRandom40_35[210] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00
+};
+
+const uint8_t kMaskRandom40_36[216] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x1e, 0xb9, 0x3d, 0x25, 0xcc, 0x00
+};
+
+const uint8_t kMaskRandom40_37[222] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00
+};
+
+const uint8_t kMaskRandom40_38[228] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0xb0, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x06, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x88, 0x62, 0x00,
+  0xc2, 0x30, 0x8c, 0x23, 0x08, 0x00,
+  0x22, 0x88, 0xa2, 0x28, 0x8a, 0x00,
+  0x50, 0x54, 0x15, 0x05, 0x41, 0x00,
+  0x21, 0x7b, 0xf2, 0x17, 0xbf, 0x00,
+  0xea, 0xaa, 0x20, 0xa2, 0x1b, 0x00
+};
+
+const uint8_t kMaskRandom40_39[234] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00
+};
+
+const uint8_t kMaskRandom40_4[24] = {
+  0xe6, 0x39, 0x8e, 0x63, 0x98, 0x00,
+  0x33, 0x8c, 0xe3, 0x38, 0xce, 0x00,
+  0x98, 0xe6, 0x39, 0x8e, 0x63, 0x00,
+  0x2d, 0x4b, 0x52, 0xd4, 0xb5, 0x00
+};
+
+const uint8_t kMaskRandom40_40[240] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0xf7, 0x8d, 0xaf, 0x78, 0xda, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0xe3, 0x38, 0xce, 0x33, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x85, 0x00,
+  0x52, 0x94, 0xa5, 0x29, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x19, 0x86, 0x61, 0x98, 0x66, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x30, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x44, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0x81, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x38, 0x0e, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x18, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0xa4, 0x00,
+  0x42, 0x50, 0x94, 0x25, 0x09, 0x00,
+  0x98, 0x26, 0x09, 0x82, 0x60, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0xc2, 0x00,
+  0xa6, 0xf3, 0xab, 0x1b, 0x87, 0x00
+};
+
+const uint8_t kMaskRandom40_5[30] = {
+  0xce, 0x33, 0x8c, 0xe3, 0x38, 0x00,
+  0x63, 0x98, 0xe6, 0x39, 0x8e, 0x00,
+  0x98, 0xe5, 0x39, 0x8e, 0x53, 0x00,
+  0x2b, 0x53, 0x52, 0xb5, 0x35, 0x00,
+  0xb4, 0x5c, 0xab, 0x45, 0xca, 0x00
+};
+
+const uint8_t kMaskRandom40_6[36] = {
+  0x4c, 0x1b, 0x04, 0xc1, 0xb0, 0x00,
+  0x51, 0x34, 0x45, 0x13, 0x44, 0x00,
+  0x20, 0xe8, 0x32, 0x0e, 0x83, 0x00,
+  0x85, 0x41, 0x58, 0x54, 0x15, 0x00,
+  0x06, 0x86, 0xa0, 0x68, 0x6a, 0x00,
+  0x9a, 0x21, 0x89, 0xa2, 0x18, 0x00
+};
+
+const uint8_t kMaskRandom40_7[42] = {
+  0x4e, 0x11, 0x84, 0xe1, 0x18, 0x00,
+  0x33, 0x2c, 0x03, 0x32, 0xc0, 0x00,
+  0x10, 0x0e, 0xb1, 0x00, 0xeb, 0x00,
+  0x81, 0x51, 0x58, 0x15, 0x15, 0x00,
+  0x24, 0xc4, 0xa2, 0x4c, 0x4a, 0x00,
+  0xd4, 0x23, 0x0d, 0x42, 0x30, 0x00,
+  0x0c, 0xa2, 0x60, 0xca, 0x26, 0x00
+};
+
+const uint8_t kMaskRandom40_8[48] = {
+  0x27, 0x09, 0xc2, 0x70, 0x9c, 0x00,
+  0x89, 0xa2, 0x68, 0x9a, 0x26, 0x00,
+  0xd0, 0x74, 0x1d, 0x07, 0x41, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x93, 0x00,
+  0xe2, 0x90, 0xae, 0x29, 0x0a, 0x00,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x31, 0x8c, 0x63, 0x18, 0xc6, 0x00,
+  0x18, 0xc6, 0x31, 0x8c, 0x63, 0x00
+};
+
+const uint8_t kMaskRandom40_9[54] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x38, 0x00,
+  0x62, 0x38, 0xc6, 0x23, 0x8c, 0x00,
+  0x81, 0xe0, 0x78, 0x1e, 0x07, 0x00,
+  0xe1, 0x48, 0x5e, 0x14, 0x85, 0x00,
+  0x13, 0x94, 0xa1, 0x39, 0x4a, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0xd0, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x9a, 0x00,
+  0x58, 0x56, 0x15, 0x85, 0x61, 0x00,
+  0x49, 0x86, 0x54, 0x98, 0x65, 0x00
+};
+
+const uint8_t kMaskRandom41_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0x80
+};
+
+const uint8_t kMaskRandom41_10[60] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+};
+
+const uint8_t kMaskRandom41_11[66] = {
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00
+};
+
+const uint8_t kMaskRandom41_12[72] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80
+};
+
+const uint8_t kMaskRandom41_13[78] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80
+};
+
+const uint8_t kMaskRandom41_14[84] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00
+};
+
+const uint8_t kMaskRandom41_15[90] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80
+};
+
+const uint8_t kMaskRandom41_16[96] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80
+};
+
+const uint8_t kMaskRandom41_17[102] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom41_18[108] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80
+};
+
+const uint8_t kMaskRandom41_19[114] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+};
+
+const uint8_t kMaskRandom41_2[12] = {
+  0xee, 0x3b, 0x8e, 0xe3, 0xb3, 0x00,
+  0x99, 0xe6, 0x79, 0x9e, 0x6e, 0x80
+};
+
+const uint8_t kMaskRandom41_20[120] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00
+};
+
+const uint8_t kMaskRandom41_21[126] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00
+};
+
+const uint8_t kMaskRandom41_22[132] = {
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x33, 0x09, 0x6e, 0x49, 0x6b, 0x80
+};
+
+const uint8_t kMaskRandom41_23[138] = {
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80
+};
+
+const uint8_t kMaskRandom41_24[144] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x45, 0xa6, 0xef, 0xc9, 0xc3, 0x00
+};
+
+const uint8_t kMaskRandom41_25[150] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80
+};
+
+const uint8_t kMaskRandom41_26[156] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0xc6, 0x31, 0x8c, 0x62, 0x1a, 0x00,
+  0x23, 0x88, 0xe2, 0x38, 0x8c, 0x80,
+  0x1a, 0x46, 0x91, 0xa4, 0x58, 0x80,
+  0x24, 0xc9, 0x32, 0x4d, 0x30, 0x80,
+  0x71, 0x1c, 0x47, 0x11, 0x07, 0x00,
+  0xf5, 0xdc, 0x4a, 0x06, 0x51, 0x80,
+  0x6f, 0x72, 0xf1, 0xe7, 0x1a, 0x80
+};
+
+const uint8_t kMaskRandom41_27[162] = {
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00
+};
+
+const uint8_t kMaskRandom41_28[168] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x61, 0x2c, 0xfa, 0x25, 0x38, 0x00
+};
+
+const uint8_t kMaskRandom41_29[174] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80
+};
+
+const uint8_t kMaskRandom41_3[18] = {
+  0xce, 0x33, 0x8c, 0xe3, 0x2b, 0x00,
+  0x55, 0x95, 0x65, 0x5d, 0xc5, 0x00,
+  0xb1, 0x6a, 0x3a, 0x8e, 0xd8, 0x80
+};
+
+const uint8_t kMaskRandom41_30[180] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x0e, 0x03, 0x80, 0xe1, 0x91, 0x00,
+  0x33, 0x0c, 0xc3, 0x31, 0x45, 0x00,
+  0x10, 0xc4, 0x31, 0x0c, 0x32, 0x80,
+  0x45, 0x51, 0x54, 0x56, 0x84, 0x80,
+  0x88, 0xa2, 0x28, 0x88, 0x4a, 0x80,
+  0xe0, 0x38, 0x0e, 0x02, 0x29, 0x00,
+  0x56, 0x3e, 0x24, 0xdd, 0x0c, 0x00,
+  0x59, 0x53, 0x31, 0x62, 0x15, 0x00
+};
+
+const uint8_t kMaskRandom41_31[186] = {
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80
+};
+
+const uint8_t kMaskRandom41_32[192] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0xca, 0xbb, 0xcb, 0x6d, 0xaa, 0x00
+};
+
+const uint8_t kMaskRandom41_33[198] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00
+};
+
+const uint8_t kMaskRandom41_34[204] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x46, 0x11, 0x84, 0x61, 0x19, 0x00,
+  0x33, 0x0c, 0xc3, 0x30, 0xcc, 0x80,
+  0x10, 0xc4, 0x31, 0x0e, 0x46, 0x00,
+  0x0c, 0x43, 0x10, 0xc6, 0x90, 0x80,
+  0x28, 0x8a, 0x22, 0x89, 0x42, 0x80,
+  0x94, 0x25, 0x09, 0x42, 0x13, 0x00,
+  0xc1, 0x30, 0x4c, 0x10, 0x25, 0x80,
+  0x28, 0x1c, 0x63, 0xbf, 0x53, 0x80,
+  0xbd, 0x37, 0x3f, 0x75, 0x36, 0x80
+};
+
+const uint8_t kMaskRandom41_35[210] = {
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80
+};
+
+const uint8_t kMaskRandom41_36[216] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0xc1, 0xb1, 0x80, 0xbe, 0x3e, 0x00
+};
+
+const uint8_t kMaskRandom41_37[222] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80
+};
+
+const uint8_t kMaskRandom41_38[228] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x2c, 0x0b, 0x02, 0xc0, 0x32, 0x00,
+  0x81, 0xa0, 0x68, 0x1a, 0x01, 0x80,
+  0xa0, 0x68, 0x1a, 0x06, 0x82, 0x00,
+  0x05, 0x41, 0x50, 0x54, 0x15, 0x00,
+  0x18, 0x86, 0x21, 0x89, 0x0c, 0x00,
+  0xc2, 0x30, 0x8c, 0x20, 0x68, 0x00,
+  0x22, 0x88, 0xa2, 0x29, 0x80, 0x80,
+  0x50, 0x54, 0x15, 0x04, 0x50, 0x80,
+  0x21, 0x7b, 0xf5, 0xa5, 0x65, 0x80,
+  0xea, 0xc8, 0xbb, 0xd4, 0x5d, 0x00
+};
+
+const uint8_t kMaskRandom41_39[234] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00
+};
+
+const uint8_t kMaskRandom41_4[24] = {
+  0xe6, 0x39, 0x8e, 0x63, 0x13, 0x00,
+  0x33, 0x8c, 0xe3, 0x38, 0xc5, 0x80,
+  0x98, 0xe6, 0x39, 0x8d, 0x2c, 0x80,
+  0x2d, 0x4b, 0x52, 0xd4, 0xb2, 0x80
+};
+
+const uint8_t kMaskRandom41_40[240] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0xe8, 0x07, 0x18, 0x9a, 0x02, 0x00
+};
+
+const uint8_t kMaskRandom41_41[246] = {
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xe1, 0x19, 0x00,
+  0xe3, 0x38, 0xce, 0x31, 0x89, 0x80,
+  0x81, 0xe0, 0x78, 0x1e, 0x30, 0x00,
+  0x21, 0x48, 0x52, 0x14, 0x05, 0x80,
+  0x52, 0x94, 0xa5, 0x28, 0x1e, 0x00,
+  0xb4, 0x2d, 0x0b, 0x42, 0x82, 0x00,
+  0x26, 0x89, 0xa2, 0x68, 0x62, 0x80,
+  0x58, 0x56, 0x15, 0x86, 0x44, 0x00,
+  0x19, 0x86, 0x61, 0x99, 0xe0, 0x00,
+  0xf7, 0x8d, 0xa2, 0xa0, 0x33, 0x00,
+  0x4c, 0x13, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x14, 0x45, 0x11, 0x45, 0x00,
+  0xa0, 0x68, 0x1a, 0x06, 0xa4, 0x00,
+  0x04, 0xc1, 0x30, 0x4c, 0x13, 0x00,
+  0x03, 0x80, 0xe0, 0x3b, 0x40, 0x00,
+  0x86, 0x21, 0x88, 0x62, 0x09, 0x00,
+  0x29, 0x0a, 0x42, 0x90, 0x84, 0x80,
+  0x42, 0x50, 0x94, 0x24, 0x30, 0x80,
+  0x98, 0x26, 0x09, 0x81, 0x28, 0x00,
+  0x30, 0x8c, 0x23, 0x08, 0x4a, 0x80,
+  0x4e, 0x13, 0x84, 0xc1, 0x19, 0x00,
+  0xe3, 0x38, 0xc5, 0x10, 0xcc, 0x80,
+  0x81, 0xe0, 0x7a, 0x06, 0x64, 0x00,
+  0x21, 0x48, 0x50, 0x4c, 0x16, 0x00,
+  0x52, 0x94, 0xa0, 0x3a, 0x02, 0x80,
+  0xb4, 0x2d, 0x08, 0x62, 0x11, 0x00,
+  0x26, 0x89, 0xa2, 0x91, 0x01, 0x80,
+  0x58, 0x56, 0x14, 0x24, 0x2a, 0x00,
+  0x19, 0x86, 0x69, 0x81, 0xa0, 0x00,
+  0xf7, 0x8d, 0xa3, 0x08, 0x40, 0x80,
+  0x2b, 0xea, 0x4d, 0xf4, 0xc1, 0x00
+};
+
+const uint8_t kMaskRandom41_5[30] = {
+  0xce, 0x33, 0x8c, 0xe3, 0x1b, 0x00,
+  0x63, 0x98, 0xe6, 0x39, 0x8d, 0x80,
+  0x98, 0xe5, 0x39, 0x8c, 0x76, 0x80,
+  0x2b, 0x53, 0x54, 0xd6, 0xb5, 0x00,
+  0xb4, 0x5c, 0xab, 0x26, 0xca, 0x80
+};
+
+const uint8_t kMaskRandom41_6[36] = {
+  0x4c, 0x1b, 0x04, 0xc1, 0x91, 0x00,
+  0x51, 0x34, 0x45, 0x11, 0x45, 0x00,
+  0x20, 0xe8, 0x32, 0x0e, 0xa0, 0x80,
+  0x85, 0x41, 0x58, 0x54, 0x12, 0x80,
+  0x06, 0x86, 0xa0, 0x68, 0x0d, 0x80,
+  0x9a, 0x21, 0x88, 0xa2, 0x43, 0x00
+};
+
+const uint8_t kMaskRandom41_7[42] = {
+  0x4e, 0x11, 0x8c, 0x61, 0x19, 0x00,
+  0x33, 0x2c, 0x03, 0x30, 0x4c, 0x80,
+  0x10, 0x0e, 0xb1, 0x86, 0x74, 0x00,
+  0x81, 0x51, 0x54, 0x54, 0x2d, 0x00,
+  0x24, 0xc4, 0xa1, 0x2d, 0x42, 0x80,
+  0xd4, 0x23, 0x0b, 0x42, 0x83, 0x00,
+  0x0c, 0xa2, 0x62, 0x99, 0x21, 0x80
+};
+
+const uint8_t kMaskRandom41_8[48] = {
+  0x27, 0x09, 0xc0, 0x70, 0xa7, 0x00,
+  0x89, 0xa2, 0x64, 0x9a, 0x82, 0x80,
+  0xd0, 0x74, 0x1b, 0x07, 0xa0, 0x00,
+  0x24, 0xc9, 0x32, 0x4c, 0x5c, 0x00,
+  0xe2, 0x90, 0xa5, 0x28, 0x0e, 0x80,
+  0xc6, 0x31, 0x8c, 0x63, 0x18, 0x00,
+  0x31, 0x8c, 0x63, 0x19, 0x41, 0x80,
+  0x18, 0xc6, 0x31, 0x8c, 0x70, 0x80
+};
+
+const uint8_t kMaskRandom41_9[54] = {
+  0x4e, 0x13, 0x84, 0xe1, 0x11, 0x00,
+  0x62, 0x38, 0xc6, 0x21, 0xa0, 0x80,
+  0x81, 0xe0, 0x78, 0x0e, 0x94, 0x00,
+  0xe1, 0x48, 0x5a, 0x15, 0x05, 0x00,
+  0x13, 0x94, 0xa5, 0x30, 0x06, 0x80,
+  0xb4, 0x2d, 0x0a, 0x42, 0x43, 0x00,
+  0x26, 0x89, 0xa1, 0x6a, 0x08, 0x80,
+  0x58, 0x56, 0x15, 0x84, 0x52, 0x00,
+  0x49, 0x86, 0x52, 0x98, 0x68, 0x00
+};
+
+const uint8_t kMaskRandom42_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xc0
+};
+
+const uint8_t kMaskRandom42_10[60] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+};
+
+const uint8_t kMaskRandom42_11[66] = {
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80
+};
+
+const uint8_t kMaskRandom42_12[72] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0
+};
+
+const uint8_t kMaskRandom42_13[78] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0
+};
+
+const uint8_t kMaskRandom42_14[84] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00
+};
+
+const uint8_t kMaskRandom42_15[90] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40
+};
+
+const uint8_t kMaskRandom42_16[96] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0
+};
+
+const uint8_t kMaskRandom42_17[102] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00
+};
+
+const uint8_t kMaskRandom42_18[108] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0
+};
+
+const uint8_t kMaskRandom42_19[114] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+};
+
+const uint8_t kMaskRandom42_2[12] = {
+  0xee, 0x3b, 0x37, 0x71, 0xd9, 0x80,
+  0x99, 0xe6, 0xec, 0xcf, 0x37, 0x40
+};
+
+const uint8_t kMaskRandom42_20[120] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2a, 0x03, 0x31, 0x50, 0x19, 0x80
+};
+
+const uint8_t kMaskRandom42_21[126] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+  0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+  0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+  0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+  0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+  0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+  0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+  0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+  0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+  0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+  0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80
+};
+
+const uint8_t kMaskRandom42_22[132] = {
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0xdb, 0x36, 0xb0, 0x33, 0x14, 0x80
+};
+
+const uint8_t kMaskRandom42_23[138] = {
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0
+};
+
+const uint8_t kMaskRandom42_24[144] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x2e, 0x1c, 0x92, 0xbb, 0x07, 0xc0
+};
+
+const uint8_t kMaskRandom42_25[150] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0
+};
+
+const uint8_t kMaskRandom42_26[156] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0xc6, 0x21, 0xa6, 0x31, 0x0d, 0x00,
+  0x23, 0x88, 0xc9, 0x1c, 0x46, 0x40,
+  0x1a, 0x45, 0x88, 0xd2, 0x2c, 0x40,
+  0x24, 0xd3, 0x09, 0x26, 0x98, 0x40,
+  0x71, 0x10, 0x73, 0x88, 0x83, 0x80,
+  0xa0, 0x65, 0x1d, 0x03, 0x28, 0xc0,
+  0xb8, 0x41, 0xed, 0xa3, 0x77, 0xc0
+};
+
+const uint8_t kMaskRandom42_27[162] = {
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00
+};
+
+const uint8_t kMaskRandom42_28[168] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0xc3, 0x3c, 0x56, 0xc2, 0x30, 0x40
+};
+
+const uint8_t kMaskRandom42_29[174] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40
+};
+
+const uint8_t kMaskRandom42_3[18] = {
+  0xce, 0x32, 0xb6, 0x71, 0x95, 0x80,
+  0x55, 0xdc, 0x52, 0xae, 0xe2, 0x80,
+  0xa8, 0xed, 0x8d, 0x47, 0x6c, 0x40
+};
+
+const uint8_t kMaskRandom42_30[180] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x0e, 0x19, 0x10, 0x70, 0xc8, 0x80,
+  0x33, 0x14, 0x51, 0x98, 0xa2, 0x80,
+  0x10, 0xc3, 0x28, 0x86, 0x19, 0x40,
+  0x45, 0x68, 0x4a, 0x2b, 0x42, 0x40,
+  0x88, 0x84, 0xac, 0x44, 0x25, 0x40,
+  0xe0, 0x22, 0x97, 0x01, 0x14, 0x80,
+  0x4d, 0xd0, 0xc2, 0x6e, 0x86, 0x00,
+  0xf5, 0xdd, 0x0d, 0x58, 0xeb, 0x00
+};
+
+const uint8_t kMaskRandom42_31[186] = {
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0
+};
+
+const uint8_t kMaskRandom42_32[192] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0xf9, 0x1f, 0xb6, 0xe1, 0x09, 0xc0
+};
+
+const uint8_t kMaskRandom42_33[198] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00
+};
+
+const uint8_t kMaskRandom42_34[204] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x46, 0x11, 0x92, 0x30, 0x8c, 0x80,
+  0x33, 0x0c, 0xc9, 0x98, 0x66, 0x40,
+  0x10, 0xe4, 0x60, 0x87, 0x23, 0x00,
+  0x0c, 0x69, 0x08, 0x63, 0x48, 0x40,
+  0x28, 0x94, 0x29, 0x44, 0xa1, 0x40,
+  0x94, 0x21, 0x34, 0xa1, 0x09, 0x80,
+  0xc1, 0x02, 0x5e, 0x08, 0x12, 0xc0,
+  0x3b, 0xf5, 0x39, 0xdf, 0xa9, 0xc0,
+  0xf8, 0xbf, 0xf6, 0x76, 0x1b, 0x80
+};
+
+const uint8_t kMaskRandom42_35[210] = {
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0
+};
+
+const uint8_t kMaskRandom42_36[216] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x57, 0xc7, 0x03, 0xf9, 0xc6, 0x00
+};
+
+const uint8_t kMaskRandom42_37[222] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40
+};
+
+const uint8_t kMaskRandom42_38[228] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2c, 0x03, 0x21, 0x60, 0x19, 0x00,
+  0x81, 0xa0, 0x1c, 0x0d, 0x00, 0xc0,
+  0xa0, 0x68, 0x25, 0x03, 0x41, 0x00,
+  0x05, 0x41, 0x50, 0x2a, 0x0a, 0x80,
+  0x18, 0x90, 0xc0, 0xc4, 0x86, 0x00,
+  0xc2, 0x06, 0x86, 0x10, 0x34, 0x00,
+  0x22, 0x98, 0x09, 0x14, 0xc0, 0x40,
+  0x50, 0x45, 0x0a, 0x82, 0x28, 0x40,
+  0x5a, 0x56, 0x5a, 0xd2, 0xb2, 0xc0,
+  0x05, 0x19, 0x55, 0xee, 0xe2, 0xc0
+};
+
+const uint8_t kMaskRandom42_39[234] = {
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2a, 0x03, 0x31, 0x50, 0x19, 0x80
+};
+
+const uint8_t kMaskRandom42_4[24] = {
+  0xe6, 0x31, 0x37, 0x31, 0x89, 0x80,
+  0x33, 0x8c, 0x59, 0x9c, 0x62, 0xc0,
+  0x98, 0xd2, 0xcc, 0xc6, 0x96, 0x40,
+  0x2d, 0x4b, 0x29, 0x6a, 0x59, 0x40
+};
+
+const uint8_t kMaskRandom42_40[240] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0xf9, 0xdb, 0x5d, 0x7a, 0xd4, 0x40
+};
+
+const uint8_t kMaskRandom42_41[246] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+  0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+  0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+  0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+  0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+  0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+  0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+  0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+  0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+  0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+  0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80
+};
+
+const uint8_t kMaskRandom42_42[252] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4c, 0x11, 0x92, 0x60, 0x8c, 0x80,
+  0x51, 0x0c, 0xca, 0x88, 0x66, 0x40,
+  0xa0, 0x66, 0x45, 0x03, 0x32, 0x00,
+  0x04, 0xc1, 0x60, 0x26, 0x0b, 0x00,
+  0x03, 0xa0, 0x28, 0x1d, 0x01, 0x40,
+  0x86, 0x21, 0x14, 0x31, 0x08, 0x80,
+  0x29, 0x10, 0x19, 0x48, 0x80, 0xc0,
+  0x42, 0x42, 0xa2, 0x12, 0x15, 0x00,
+  0x98, 0x1a, 0x04, 0xc0, 0xd0, 0x00,
+  0x30, 0x84, 0x09, 0x84, 0x20, 0x40,
+  0xdf, 0x4c, 0x16, 0xfa, 0x60, 0x80,
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0xa0, 0x6a, 0x45, 0x03, 0x52, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x09, 0x80,
+  0x03, 0xb4, 0x00, 0x1d, 0xa0, 0x00,
+  0x86, 0x20, 0x94, 0x31, 0x04, 0x80,
+  0x29, 0x08, 0x49, 0x48, 0x42, 0x40,
+  0x42, 0x43, 0x0a, 0x12, 0x18, 0x40,
+  0x98, 0x12, 0x84, 0xc0, 0x94, 0x00,
+  0x30, 0x84, 0xa9, 0x84, 0x25, 0x40,
+  0x4e, 0x11, 0x92, 0x70, 0x8c, 0x80,
+  0xe3, 0x18, 0x9f, 0x18, 0xc4, 0xc0,
+  0x81, 0xe3, 0x04, 0x0f, 0x18, 0x00,
+  0x21, 0x40, 0x59, 0x0a, 0x02, 0xc0,
+  0x52, 0x81, 0xe2, 0x94, 0x0f, 0x00,
+  0xb4, 0x28, 0x25, 0xa1, 0x41, 0x00,
+  0x26, 0x86, 0x29, 0x34, 0x31, 0x40,
+  0x58, 0x64, 0x42, 0xc3, 0x22, 0x00,
+  0x19, 0x9e, 0x00, 0xcc, 0xf0, 0x00,
+  0x2a, 0x03, 0x31, 0x50, 0x19, 0x80,
+  0xea, 0x9e, 0x23, 0xb3, 0x65, 0x00
+};
+
+const uint8_t kMaskRandom42_5[30] = {
+  0xce, 0x31, 0xb6, 0x71, 0x8d, 0x80,
+  0x63, 0x98, 0xdb, 0x1c, 0xc6, 0xc0,
+  0x98, 0xc7, 0x6c, 0xc6, 0x3b, 0x40,
+  0x4d, 0x6b, 0x52, 0x6b, 0x5a, 0x80,
+  0xb2, 0x6c, 0xad, 0x93, 0x65, 0x40
+};
+
+const uint8_t kMaskRandom42_6[36] = {
+  0x4c, 0x19, 0x12, 0x60, 0xc8, 0x80,
+  0x51, 0x14, 0x52, 0x88, 0xa2, 0x80,
+  0x20, 0xea, 0x09, 0x07, 0x50, 0x40,
+  0x85, 0x41, 0x2c, 0x2a, 0x09, 0x40,
+  0x06, 0x80, 0xd8, 0x34, 0x06, 0xc0,
+  0x8a, 0x24, 0x34, 0x51, 0x21, 0x80
+};
+
+const uint8_t kMaskRandom42_7[42] = {
+  0xc6, 0x11, 0x96, 0x30, 0x8c, 0x80,
+  0x33, 0x04, 0xc9, 0x98, 0x26, 0x40,
+  0x18, 0x67, 0x40, 0xc3, 0x3a, 0x00,
+  0x45, 0x42, 0xd2, 0x2a, 0x16, 0x80,
+  0x12, 0xd4, 0x28, 0x96, 0xa1, 0x40,
+  0xb4, 0x28, 0x35, 0xa1, 0x41, 0x80,
+  0x29, 0x92, 0x19, 0x4c, 0x90, 0xc0
+};
+
+const uint8_t kMaskRandom42_8[48] = {
+  0x07, 0x0a, 0x70, 0x38, 0x53, 0x80,
+  0x49, 0xa8, 0x2a, 0x4d, 0x41, 0x40,
+  0xb0, 0x7a, 0x05, 0x83, 0xd0, 0x00,
+  0x24, 0xc5, 0xc1, 0x26, 0x2e, 0x00,
+  0x52, 0x80, 0xea, 0x94, 0x07, 0x40,
+  0xc6, 0x31, 0x86, 0x31, 0x8c, 0x00,
+  0x31, 0x94, 0x19, 0x8c, 0xa0, 0xc0,
+  0x18, 0xc7, 0x08, 0xc6, 0x38, 0x40
+};
+
+const uint8_t kMaskRandom42_9[54] = {
+  0x4e, 0x11, 0x12, 0x70, 0x88, 0x80,
+  0x62, 0x1a, 0x0b, 0x10, 0xd0, 0x40,
+  0x80, 0xe9, 0x44, 0x07, 0x4a, 0x00,
+  0xa1, 0x50, 0x55, 0x0a, 0x82, 0x80,
+  0x53, 0x00, 0x6a, 0x98, 0x03, 0x40,
+  0xa4, 0x24, 0x35, 0x21, 0x21, 0x80,
+  0x16, 0xa0, 0x88, 0xb5, 0x04, 0x40,
+  0x58, 0x45, 0x22, 0xc2, 0x29, 0x00,
+  0x29, 0x86, 0x81, 0x4c, 0x34, 0x00
+};
+
+const uint8_t kMaskRandom43_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xe0
+};
+
+const uint8_t kMaskRandom43_10[60] = {
+  0x4c, 0x19, 0x16, 0x01, 0xc4, 0x40,
+  0x51, 0x14, 0x51, 0x80, 0x71, 0x40,
+  0xa0, 0x6a, 0x47, 0x40, 0x38, 0x00,
+  0x04, 0xc1, 0x34, 0x28, 0x45, 0x40,
+  0x03, 0xb4, 0x06, 0x84, 0x90, 0x80,
+  0x86, 0x20, 0x94, 0x32, 0x82, 0x40,
+  0x29, 0x08, 0x4a, 0x53, 0x40, 0x60,
+  0x42, 0x43, 0x08, 0x0d, 0x03, 0xa0,
+  0x98, 0x12, 0x82, 0x64, 0x0c, 0x80,
+  0x30, 0x84, 0xab, 0x11, 0x20, 0x20
+};
+
+const uint8_t kMaskRandom43_11[66] = {
+  0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+  0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+  0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+  0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+  0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+  0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+  0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+  0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+  0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+  0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+  0xe0, 0x22, 0x91, 0x82, 0x30, 0x40
+};
+
+const uint8_t kMaskRandom43_12[72] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+  0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+  0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+  0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+  0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+  0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0
+};
+
+const uint8_t kMaskRandom43_13[78] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60
+};
+
+const uint8_t kMaskRandom43_14[84] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40
+};
+
+const uint8_t kMaskRandom43_15[90] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20
+};
+
+const uint8_t kMaskRandom43_16[96] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20
+};
+
+const uint8_t kMaskRandom43_17[102] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00
+};
+
+const uint8_t kMaskRandom43_18[108] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60
+};
+
+const uint8_t kMaskRandom43_19[114] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0
+};
+
+const uint8_t kMaskRandom43_2[12] = {
+  0xee, 0x3b, 0x37, 0x66, 0xec, 0xc0,
+  0x99, 0xe6, 0xec, 0xdd, 0x9b, 0xa0
+};
+
+const uint8_t kMaskRandom43_20[120] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2a, 0x03, 0x31, 0xda, 0x46, 0x20
+};
+
+const uint8_t kMaskRandom43_21[126] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+  0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+  0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+  0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+  0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+  0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+  0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+  0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40
+};
+
+const uint8_t kMaskRandom43_22[132] = {
+  0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+  0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+  0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+  0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+  0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+  0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+  0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+  0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+  0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+  0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+  0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+  0x4c, 0x19, 0x16, 0x01, 0xc4, 0x40,
+  0x51, 0x14, 0x51, 0x80, 0x71, 0x40,
+  0xa0, 0x6a, 0x47, 0x40, 0x38, 0x00,
+  0x04, 0xc1, 0x34, 0x28, 0x45, 0x40,
+  0x03, 0xb4, 0x06, 0x84, 0x90, 0x80,
+  0x86, 0x20, 0x94, 0x32, 0x82, 0x40,
+  0x29, 0x08, 0x4a, 0x53, 0x40, 0x60,
+  0x42, 0x43, 0x08, 0x0d, 0x03, 0xa0,
+  0x98, 0x12, 0x82, 0x64, 0x0c, 0x80,
+  0x30, 0x84, 0xab, 0x11, 0x20, 0x20,
+  0xfe, 0x2c, 0x85, 0xcc, 0x24, 0x80
+};
+
+const uint8_t kMaskRandom43_23[138] = {
+  0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+  0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+  0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+  0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+  0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+  0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+  0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+  0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+  0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+  0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+  0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+  0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+  0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+  0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+  0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+  0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0
+};
+
+const uint8_t kMaskRandom43_24[144] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+  0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+  0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+  0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+  0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+  0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+  0xc6, 0x21, 0xa2, 0x32, 0x46, 0x40,
+  0x23, 0x88, 0xc9, 0x99, 0x33, 0x20,
+  0x1a, 0x45, 0x8c, 0xc8, 0x99, 0x00,
+  0x24, 0xd3, 0x08, 0x2c, 0x05, 0x80,
+  0x71, 0x10, 0x74, 0x05, 0x80, 0xa0,
+  0x0e, 0x19, 0x14, 0x22, 0x84, 0x40,
+  0x33, 0x14, 0x52, 0x03, 0x40, 0x60,
+  0x10, 0xc3, 0x28, 0x54, 0x0a, 0x80,
+  0x45, 0x68, 0x4b, 0x40, 0x68, 0x00,
+  0x88, 0x84, 0xa8, 0x81, 0x10, 0x20,
+  0xe0, 0x22, 0x91, 0x82, 0x30, 0x40,
+  0xf9, 0xb1, 0x26, 0x6c, 0x51, 0xe0
+};
+
+const uint8_t kMaskRandom43_25[150] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+  0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+  0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+  0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+  0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+  0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60
+};
+
+const uint8_t kMaskRandom43_26[156] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0xc6, 0x21, 0xa4, 0x34, 0x86, 0x80,
+  0x23, 0x88, 0xc9, 0x19, 0x23, 0x20,
+  0x1a, 0x45, 0x88, 0xb1, 0x16, 0x20,
+  0x24, 0xd3, 0x0a, 0x61, 0x4c, 0x20,
+  0x71, 0x10, 0x72, 0x0e, 0x41, 0xc0,
+  0xa0, 0x65, 0x1f, 0xa0, 0xc4, 0xe0,
+  0xef, 0x84, 0x77, 0xca, 0x0d, 0x40
+};
+
+const uint8_t kMaskRandom43_27[162] = {
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40
+};
+
+const uint8_t kMaskRandom43_28[168] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x16, 0xc9, 0x53, 0x1e, 0xc4, 0x00
+};
+
+const uint8_t kMaskRandom43_29[174] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20
+};
+
+const uint8_t kMaskRandom43_3[18] = {
+  0xce, 0x32, 0xb6, 0x56, 0xca, 0xc0,
+  0x55, 0xdc, 0x57, 0x8a, 0xf1, 0x40,
+  0xa8, 0xed, 0x8d, 0xb1, 0xae, 0x20
+};
+
+const uint8_t kMaskRandom43_30[180] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x0e, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x33, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x10, 0xc3, 0x28, 0x65, 0x0c, 0xa0,
+  0x45, 0x68, 0x4d, 0x09, 0xa1, 0x20,
+  0x88, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0xe0, 0x22, 0x94, 0x52, 0x8a, 0x40,
+  0x4d, 0xd0, 0xc6, 0x36, 0x57, 0x40,
+  0x79, 0x4a, 0x8f, 0x42, 0x79, 0x40
+};
+
+const uint8_t kMaskRandom43_31[186] = {
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20
+};
+
+const uint8_t kMaskRandom43_32[192] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0xd1, 0xd1, 0x11, 0xa4, 0xed, 0xc0
+};
+
+const uint8_t kMaskRandom43_33[198] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00
+};
+
+const uint8_t kMaskRandom43_34[204] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x46, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x33, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0x10, 0xe4, 0x64, 0x8c, 0x91, 0x80,
+  0x0c, 0x69, 0x0d, 0x21, 0xa4, 0x20,
+  0x28, 0x94, 0x2a, 0x85, 0x50, 0xa0,
+  0x94, 0x21, 0x34, 0x26, 0x84, 0xc0,
+  0xc1, 0x02, 0x58, 0x4b, 0x09, 0x60,
+  0x3b, 0xf5, 0x3c, 0x36, 0x0a, 0x20,
+  0x76, 0x81, 0x4d, 0x33, 0x66, 0x00
+};
+
+const uint8_t kMaskRandom43_35[210] = {
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60
+};
+
+const uint8_t kMaskRandom43_36[216] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0xa3, 0x85, 0x0a, 0xb5, 0x11, 0x60
+};
+
+const uint8_t kMaskRandom43_37[222] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0
+};
+
+const uint8_t kMaskRandom43_38[228] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2c, 0x03, 0x20, 0x64, 0x0c, 0x80,
+  0x81, 0xa0, 0x1c, 0x03, 0x80, 0x60,
+  0xa0, 0x68, 0x25, 0x04, 0xa0, 0x80,
+  0x05, 0x41, 0x50, 0x2a, 0x05, 0x40,
+  0x18, 0x90, 0xc2, 0x18, 0x43, 0x00,
+  0xc2, 0x06, 0x80, 0xd0, 0x1a, 0x00,
+  0x22, 0x98, 0x0b, 0x01, 0x60, 0x20,
+  0x50, 0x45, 0x08, 0xa1, 0x14, 0x20,
+  0x5a, 0x56, 0x5f, 0x26, 0xa3, 0x60,
+  0x9a, 0x16, 0x97, 0x21, 0xb9, 0x80
+};
+
+const uint8_t kMaskRandom43_39[234] = {
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2a, 0x03, 0x31, 0xda, 0x46, 0x20
+};
+
+const uint8_t kMaskRandom43_4[24] = {
+  0xe6, 0x31, 0x36, 0x26, 0xc4, 0xc0,
+  0x33, 0x8c, 0x59, 0x8b, 0x31, 0x60,
+  0x98, 0xd2, 0xca, 0x59, 0x4b, 0x20,
+  0x2d, 0x4b, 0x29, 0x65, 0x2c, 0xa0
+};
+
+const uint8_t kMaskRandom43_40[240] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x3a, 0xab, 0x77, 0x63, 0xef, 0x60
+};
+
+const uint8_t kMaskRandom43_41[246] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+  0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+  0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+  0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+  0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+  0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+  0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+  0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40
+};
+
+const uint8_t kMaskRandom43_42[252] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+  0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+  0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+  0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+  0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+  0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+  0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+  0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40,
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4e, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0xe3, 0x18, 0x9b, 0x13, 0x62, 0x60,
+  0x81, 0xe3, 0x04, 0x60, 0x8c, 0x00,
+  0x21, 0x40, 0x58, 0x0b, 0x01, 0x60,
+  0x52, 0x81, 0xe0, 0x3c, 0x07, 0x80,
+  0xb4, 0x28, 0x25, 0x04, 0xa0, 0x80,
+  0x26, 0x86, 0x28, 0xc5, 0x18, 0xa0,
+  0x58, 0x64, 0x44, 0x88, 0x91, 0x00,
+  0x19, 0x9e, 0x03, 0xc0, 0x78, 0x00,
+  0x2a, 0x03, 0x31, 0xda, 0x46, 0x20,
+  0x26, 0x84, 0x10, 0xcd, 0xf7, 0x60
+};
+
+const uint8_t kMaskRandom43_43[258] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0xa0, 0x6a, 0x45, 0x48, 0xa9, 0x00,
+  0x04, 0xc1, 0x30, 0x26, 0x04, 0xc0,
+  0x03, 0xb4, 0x06, 0x80, 0xd0, 0x00,
+  0x86, 0x20, 0x94, 0x12, 0x82, 0x40,
+  0x29, 0x08, 0x49, 0x09, 0x21, 0x20,
+  0x42, 0x43, 0x08, 0x61, 0x0c, 0x20,
+  0x98, 0x12, 0x82, 0x50, 0x4a, 0x00,
+  0x30, 0x84, 0xa8, 0x95, 0x12, 0xa0,
+  0x4c, 0x11, 0x92, 0x32, 0x46, 0x40,
+  0x51, 0x0c, 0xc9, 0x99, 0x33, 0x20,
+  0xa0, 0x66, 0x44, 0xc8, 0x99, 0x00,
+  0x04, 0xc1, 0x60, 0x2c, 0x05, 0x80,
+  0x03, 0xa0, 0x2c, 0x05, 0x80, 0xa0,
+  0x86, 0x21, 0x14, 0x22, 0x84, 0x40,
+  0x29, 0x10, 0x1a, 0x03, 0x40, 0x60,
+  0x42, 0x42, 0xa0, 0x54, 0x0a, 0x80,
+  0x98, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x30, 0x84, 0x08, 0x81, 0x10, 0x20,
+  0xdf, 0x4c, 0x11, 0x82, 0x30, 0x40,
+  0x4c, 0x19, 0x12, 0x32, 0x46, 0x40,
+  0x51, 0x14, 0x51, 0x99, 0x33, 0x20,
+  0xa0, 0x6a, 0x44, 0xc8, 0x99, 0x00,
+  0x04, 0xc1, 0x30, 0x2c, 0x05, 0x80,
+  0x03, 0xb4, 0x04, 0x05, 0x80, 0xa0,
+  0x86, 0x20, 0x94, 0x22, 0x84, 0x40,
+  0x29, 0x08, 0x4a, 0x03, 0x40, 0x60,
+  0x42, 0x43, 0x08, 0x54, 0x0a, 0x80,
+  0x98, 0x12, 0x83, 0x40, 0x68, 0x00,
+  0x30, 0x84, 0xa8, 0x81, 0x10, 0x20,
+  0x4c, 0x11, 0x91, 0x82, 0x30, 0x40,
+  0x51, 0x0c, 0xcb, 0x22, 0x64, 0x40,
+  0xa0, 0x66, 0x42, 0x8a, 0x51, 0x40,
+  0x04, 0xc1, 0x65, 0x48, 0xa9, 0x00,
+  0x03, 0xa0, 0x28, 0x26, 0x04, 0xc0,
+  0x86, 0x21, 0x16, 0x80, 0xd0, 0x00,
+  0x29, 0x10, 0x1c, 0x12, 0x82, 0x40,
+  0x42, 0x42, 0xa1, 0x09, 0x21, 0x20,
+  0x98, 0x1a, 0x00, 0x61, 0x0c, 0x20,
+  0x30, 0x84, 0x0a, 0x50, 0x4a, 0x00,
+  0xdf, 0x4c, 0x10, 0x95, 0x12, 0xa0,
+  0x72, 0x06, 0x94, 0xf6, 0x74, 0x40
+};
+
+const uint8_t kMaskRandom43_5[30] = {
+  0xce, 0x31, 0xb6, 0x36, 0xc6, 0xc0,
+  0x63, 0x98, 0xdb, 0x1b, 0x63, 0x60,
+  0x98, 0xc7, 0x68, 0xed, 0x1d, 0xa0,
+  0x4d, 0x6b, 0x55, 0x6a, 0xad, 0x40,
+  0xb2, 0x6c, 0xad, 0x95, 0xb2, 0xa0
+};
+
+const uint8_t kMaskRandom43_6[36] = {
+  0x4c, 0x19, 0x13, 0x22, 0x64, 0x40,
+  0x51, 0x14, 0x52, 0x8a, 0x51, 0x40,
+  0x20, 0xea, 0x0d, 0x41, 0xa8, 0x20,
+  0x85, 0x41, 0x2e, 0x25, 0x04, 0xa0,
+  0x06, 0x80, 0xd8, 0x1b, 0x03, 0x60,
+  0x8a, 0x24, 0x34, 0x86, 0x90, 0xc0
+};
+
+const uint8_t kMaskRandom43_7[42] = {
+  0xc6, 0x11, 0x96, 0x32, 0x46, 0x40,
+  0x33, 0x04, 0xc8, 0x99, 0x33, 0x20,
+  0x18, 0x67, 0x44, 0x68, 0x9d, 0x00,
+  0x45, 0x42, 0xd4, 0x5a, 0x0b, 0x40,
+  0x12, 0xd4, 0x2a, 0x95, 0x50, 0xa0,
+  0xb4, 0x28, 0x35, 0x16, 0xa0, 0xc0,
+  0x29, 0x92, 0x1b, 0x0d, 0x41, 0x60
+};
+
+const uint8_t kMaskRandom43_8[48] = {
+  0x07, 0x0a, 0x71, 0x44, 0x29, 0xc0,
+  0x49, 0xa8, 0x29, 0x0f, 0xa0, 0x20,
+  0xb0, 0x7a, 0x07, 0x48, 0xe8, 0x00,
+  0x24, 0xc5, 0xc0, 0xb8, 0x17, 0x00,
+  0x52, 0x80, 0xec, 0x1d, 0x02, 0xa0,
+  0xc6, 0x31, 0x82, 0x30, 0xc7, 0x40,
+  0x31, 0x94, 0x1a, 0x83, 0x50, 0x60,
+  0x18, 0xc7, 0x08, 0xe1, 0x1c, 0x20
+};
+
+const uint8_t kMaskRandom43_9[54] = {
+  0x4e, 0x11, 0x12, 0x22, 0x46, 0x40,
+  0x62, 0x1a, 0x09, 0x41, 0x68, 0x60,
+  0x80, 0xe9, 0x41, 0x28, 0xa5, 0x00,
+  0xa1, 0x50, 0x52, 0xc8, 0x51, 0x00,
+  0x53, 0x00, 0x68, 0x1d, 0x01, 0xa0,
+  0xa4, 0x24, 0x36, 0x06, 0x80, 0xc0,
+  0x16, 0xa0, 0x8d, 0x11, 0x82, 0x20,
+  0x58, 0x45, 0x20, 0xa4, 0x16, 0x80,
+  0x29, 0x86, 0x84, 0xd0, 0x1c, 0x00
+};
+
+const uint8_t kMaskRandom44_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xf0
+};
+
+const uint8_t kMaskRandom44_10[60] = {
+  0xc0, 0x38, 0x8b, 0x00, 0xe2, 0x20,
+  0x30, 0x0e, 0x28, 0xc0, 0x38, 0xa0,
+  0xe8, 0x07, 0x03, 0xa0, 0x1c, 0x00,
+  0x85, 0x08, 0xaa, 0x14, 0x22, 0xa0,
+  0xd0, 0x92, 0x13, 0x42, 0x48, 0x40,
+  0x86, 0x50, 0x4a, 0x19, 0x41, 0x20,
+  0x4a, 0x68, 0x0d, 0x29, 0xa0, 0x30,
+  0x01, 0xa0, 0x74, 0x06, 0x81, 0xd0,
+  0x4c, 0x81, 0x91, 0x32, 0x06, 0x40,
+  0x62, 0x24, 0x05, 0x88, 0x90, 0x10
+};
+
+const uint8_t kMaskRandom44_11[66] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+};
+
+const uint8_t kMaskRandom44_12[72] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+  0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+  0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+  0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+  0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+  0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70
+};
+
+const uint8_t kMaskRandom44_13[78] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0
+};
+
+const uint8_t kMaskRandom44_14[84] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0
+};
+
+const uint8_t kMaskRandom44_15[90] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10
+};
+
+const uint8_t kMaskRandom44_16[96] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10
+};
+
+const uint8_t kMaskRandom44_17[102] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00
+};
+
+const uint8_t kMaskRandom44_18[108] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0
+};
+
+const uint8_t kMaskRandom44_19[114] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50
+};
+
+const uint8_t kMaskRandom44_2[12] = {
+  0xec, 0xdd, 0x9b, 0xb3, 0x76, 0x60,
+  0x9b, 0xb3, 0x76, 0x6e, 0xcd, 0xd0
+};
+
+const uint8_t kMaskRandom44_20[120] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10
+};
+
+const uint8_t kMaskRandom44_21[126] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+};
+
+const uint8_t kMaskRandom44_22[132] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20
+};
+
+const uint8_t kMaskRandom44_23[138] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+  0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+  0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+  0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+  0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+  0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70
+};
+
+const uint8_t kMaskRandom44_24[144] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+  0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+  0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+  0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+  0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+  0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x15, 0x0f, 0x44, 0x6d, 0x9d, 0xa0
+};
+
+const uint8_t kMaskRandom44_25[150] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+  0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+  0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+  0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+  0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+  0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0
+};
+
+const uint8_t kMaskRandom44_26[156] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x86, 0x90, 0xd2, 0x1a, 0x43, 0x40,
+  0x23, 0x24, 0x64, 0x8c, 0x91, 0x90,
+  0x16, 0x22, 0xc4, 0x58, 0x8b, 0x10,
+  0x4c, 0x29, 0x85, 0x30, 0xa6, 0x10,
+  0x41, 0xc8, 0x39, 0x07, 0x20, 0xe0,
+  0xf4, 0x18, 0x9f, 0xd0, 0x62, 0x70,
+  0x02, 0xcb, 0x64, 0xb8, 0x55, 0x80
+};
+
+const uint8_t kMaskRandom44_27[162] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0
+};
+
+const uint8_t kMaskRandom44_28[168] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x66, 0x26, 0x6c, 0x91, 0xc7, 0x20
+};
+
+const uint8_t kMaskRandom44_29[174] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10
+};
+
+const uint8_t kMaskRandom44_3[18] = {
+  0xca, 0xd9, 0x5b, 0x2b, 0x65, 0x60,
+  0xf1, 0x5e, 0x2b, 0xc5, 0x78, 0xa0,
+  0xb6, 0x35, 0xc6, 0xd8, 0xd7, 0x10
+};
+
+const uint8_t kMaskRandom44_30[180] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0x0c, 0xa1, 0x94, 0x32, 0x86, 0x50,
+  0xa1, 0x34, 0x26, 0x84, 0xd0, 0x90,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x8a, 0x51, 0x4a, 0x29, 0x45, 0x20,
+  0xc6, 0xca, 0xeb, 0x1b, 0x2b, 0xa0,
+  0x60, 0xf4, 0x75, 0x84, 0x90, 0xc0
+};
+
+const uint8_t kMaskRandom44_31[186] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10
+};
+
+const uint8_t kMaskRandom44_32[192] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x3e, 0x39, 0x86, 0x5c, 0xd9, 0xd0
+};
+
+const uint8_t kMaskRandom44_33[198] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00
+};
+
+const uint8_t kMaskRandom44_34[204] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0xc0,
+  0xa4, 0x34, 0x86, 0x90, 0xd2, 0x10,
+  0x50, 0xaa, 0x15, 0x42, 0xa8, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x42, 0x60,
+  0x09, 0x61, 0x2c, 0x25, 0x84, 0xb0,
+  0x86, 0xc1, 0x46, 0x1b, 0x05, 0x10,
+  0xb5, 0xc7, 0xe8, 0x0c, 0xb9, 0x90
+};
+
+const uint8_t kMaskRandom44_35[210] = {
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0
+};
+
+const uint8_t kMaskRandom44_36[216] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0xa6, 0x92, 0x01, 0x65, 0x91, 0x20
+};
+
+const uint8_t kMaskRandom44_37[222] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50
+};
+
+const uint8_t kMaskRandom44_38[228] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x0c, 0x81, 0x90, 0x32, 0x06, 0x40,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x30,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x05, 0x40, 0xa8, 0x15, 0x02, 0xa0,
+  0x43, 0x08, 0x61, 0x0c, 0x21, 0x80,
+  0x1a, 0x03, 0x40, 0x68, 0x0d, 0x00,
+  0x60, 0x2c, 0x05, 0x80, 0xb0, 0x10,
+  0x14, 0x22, 0x84, 0x50, 0x8a, 0x10,
+  0xe4, 0xd4, 0x6f, 0x93, 0x51, 0xb0,
+  0x43, 0x64, 0xf2, 0xe5, 0x5d, 0x10
+};
+
+const uint8_t kMaskRandom44_39[234] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10
+};
+
+const uint8_t kMaskRandom44_4[24] = {
+  0xc4, 0xd8, 0x9b, 0x13, 0x62, 0x60,
+  0x31, 0x66, 0x2c, 0xc5, 0x98, 0xb0,
+  0x4b, 0x29, 0x65, 0x2c, 0xa5, 0x90,
+  0x2c, 0xa5, 0x94, 0xb2, 0x96, 0x50
+};
+
+const uint8_t kMaskRandom44_40[240] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0xd8, 0x2a, 0x16, 0x26, 0x51, 0x40
+};
+
+const uint8_t kMaskRandom44_41[246] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20
+};
+
+const uint8_t kMaskRandom44_42[252] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x62, 0x6c, 0x4d, 0x89, 0xb1, 0x30,
+  0x8c, 0x11, 0x82, 0x30, 0x46, 0x00,
+  0x01, 0x60, 0x2c, 0x05, 0x80, 0xb0,
+  0x07, 0x80, 0xf0, 0x1e, 0x03, 0xc0,
+  0xa0, 0x94, 0x12, 0x82, 0x50, 0x40,
+  0x18, 0xa3, 0x14, 0x62, 0x8c, 0x50,
+  0x91, 0x12, 0x22, 0x44, 0x48, 0x80,
+  0x78, 0x0f, 0x01, 0xe0, 0x3c, 0x00,
+  0x3b, 0x48, 0xc4, 0xed, 0x23, 0x10,
+  0xd9, 0xc1, 0x6f, 0xa8, 0x1c, 0x90
+};
+
+const uint8_t kMaskRandom44_43[258] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20
+};
+
+const uint8_t kMaskRandom44_44[264] = {
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x9e, 0xce, 0x8a, 0x7b, 0x3a, 0x20,
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa9, 0x15, 0x22, 0xa4, 0x54, 0x80,
+  0x04, 0xc0, 0x98, 0x13, 0x02, 0x60,
+  0xd0, 0x1a, 0x03, 0x40, 0x68, 0x00,
+  0x82, 0x50, 0x4a, 0x09, 0x41, 0x20,
+  0x21, 0x24, 0x24, 0x84, 0x90, 0x90,
+  0x0c, 0x21, 0x84, 0x30, 0x86, 0x10,
+  0x4a, 0x09, 0x41, 0x28, 0x25, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0x89, 0x50,
+  0x46, 0x48, 0xc9, 0x19, 0x23, 0x20,
+  0x33, 0x26, 0x64, 0xcc, 0x99, 0x90,
+  0x99, 0x13, 0x22, 0x64, 0x4c, 0x80,
+  0x05, 0x80, 0xb0, 0x16, 0x02, 0xc0,
+  0x80, 0xb0, 0x16, 0x02, 0xc0, 0x50,
+  0x84, 0x50, 0x8a, 0x11, 0x42, 0x20,
+  0x40, 0x68, 0x0d, 0x01, 0xa0, 0x30,
+  0x0a, 0x81, 0x50, 0x2a, 0x05, 0x40,
+  0x68, 0x0d, 0x01, 0xa0, 0x34, 0x00,
+  0x10, 0x22, 0x04, 0x40, 0x88, 0x10,
+  0x30, 0x46, 0x08, 0xc1, 0x18, 0x20,
+  0xb5, 0x1c, 0x1c, 0x21, 0xac, 0xa0
+};
+
+const uint8_t kMaskRandom44_5[30] = {
+  0xc6, 0xd8, 0xdb, 0x1b, 0x63, 0x60,
+  0x63, 0x6c, 0x6d, 0x8d, 0xb1, 0xb0,
+  0x1d, 0xa3, 0xb4, 0x76, 0x8e, 0xd0,
+  0xad, 0x55, 0xaa, 0xb5, 0x56, 0xa0,
+  0xb2, 0xb6, 0x56, 0xca, 0xd9, 0x50
+};
+
+const uint8_t kMaskRandom44_6[36] = {
+  0x64, 0x4c, 0x89, 0x91, 0x32, 0x20,
+  0x51, 0x4a, 0x29, 0x45, 0x28, 0xa0,
+  0xa8, 0x35, 0x06, 0xa0, 0xd4, 0x10,
+  0xc4, 0xa0, 0x97, 0x12, 0x82, 0x50,
+  0x03, 0x60, 0x6c, 0x0d, 0x81, 0xb0,
+  0x90, 0xd2, 0x1a, 0x43, 0x48, 0x60
+};
+
+const uint8_t kMaskRandom44_7[42] = {
+  0xc6, 0x48, 0xcb, 0x19, 0x23, 0x20,
+  0x13, 0x26, 0x64, 0x4c, 0x99, 0x90,
+  0x8d, 0x13, 0xa2, 0x34, 0x4e, 0x80,
+  0x8b, 0x41, 0x6a, 0x2d, 0x05, 0xa0,
+  0x52, 0xaa, 0x15, 0x4a, 0xa8, 0x50,
+  0xa2, 0xd4, 0x1a, 0x8b, 0x50, 0x60,
+  0x61, 0xa8, 0x2d, 0x86, 0xa0, 0xb0
+};
+
+const uint8_t kMaskRandom44_8[48] = {
+  0x28, 0x85, 0x38, 0xa2, 0x14, 0xe0,
+  0x21, 0xf4, 0x04, 0x87, 0xd0, 0x10,
+  0xe9, 0x1d, 0x03, 0xa4, 0x74, 0x00,
+  0x17, 0x02, 0xe0, 0x5c, 0x0b, 0x80,
+  0x83, 0xa0, 0x56, 0x0e, 0x81, 0x50,
+  0x46, 0x18, 0xe9, 0x18, 0x63, 0xa0,
+  0x50, 0x6a, 0x0d, 0x41, 0xa8, 0x30,
+  0x1c, 0x23, 0x84, 0x70, 0x8e, 0x10
+};
+
+const uint8_t kMaskRandom44_9[54] = {
+  0x44, 0x48, 0xc9, 0x11, 0x23, 0x20,
+  0x28, 0x2d, 0x0c, 0xa0, 0xb4, 0x30,
+  0x25, 0x14, 0xa0, 0x94, 0x52, 0x80,
+  0x59, 0x0a, 0x21, 0x64, 0x28, 0x80,
+  0x03, 0xa0, 0x34, 0x0e, 0x80, 0xd0,
+  0xc0, 0xd0, 0x1b, 0x03, 0x40, 0x60,
+  0xa2, 0x30, 0x46, 0x88, 0xc1, 0x10,
+  0x14, 0x82, 0xd0, 0x52, 0x0b, 0x40,
+  0x9a, 0x03, 0x82, 0x68, 0x0e, 0x00
+};
+
+const uint8_t kMaskRandom45_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xf8
+};
+
+const uint8_t kMaskRandom45_10[60] = {
+  0xc0, 0x38, 0x89, 0x91, 0x28, 0xa0,
+  0x30, 0x0e, 0x29, 0x45, 0x22, 0x88,
+  0xe8, 0x07, 0x02, 0xa4, 0x40, 0x68,
+  0x85, 0x08, 0xa8, 0x13, 0x12, 0x10,
+  0xd0, 0x92, 0x13, 0x40, 0x05, 0x10,
+  0x86, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x4a, 0x68, 0x0c, 0x84, 0xdc, 0x00,
+  0x01, 0xa0, 0x74, 0x30, 0x84, 0x88,
+  0x4c, 0x81, 0x91, 0x28, 0x2b, 0x00,
+  0x62, 0x24, 0x04, 0x4a, 0xd1, 0x40
+};
+
+const uint8_t kMaskRandom45_11[66] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+};
+
+const uint8_t kMaskRandom45_12[72] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+  0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+  0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+  0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+  0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+  0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90
+};
+
+const uint8_t kMaskRandom45_13[78] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30
+};
+
+const uint8_t kMaskRandom45_14[84] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8
+};
+
+const uint8_t kMaskRandom45_15[90] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80
+};
+
+const uint8_t kMaskRandom45_16[96] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0
+};
+
+const uint8_t kMaskRandom45_17[102] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70
+};
+
+const uint8_t kMaskRandom45_18[108] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58
+};
+
+const uint8_t kMaskRandom45_19[114] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40
+};
+
+const uint8_t kMaskRandom45_2[12] = {
+  0xec, 0xdd, 0x9b, 0xb3, 0x76, 0x60,
+  0x9b, 0xb3, 0x76, 0x6e, 0xc9, 0xd8
+};
+
+const uint8_t kMaskRandom45_20[120] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8
+};
+
+const uint8_t kMaskRandom45_21[126] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+};
+
+const uint8_t kMaskRandom45_22[132] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68
+};
+
+const uint8_t kMaskRandom45_23[138] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+  0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+  0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+  0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+  0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+  0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90
+};
+
+const uint8_t kMaskRandom45_24[144] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+  0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+  0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+  0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+  0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+  0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x95, 0x91, 0xad, 0xd9, 0x86, 0x98
+};
+
+const uint8_t kMaskRandom45_25[150] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+  0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+  0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+  0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+  0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+  0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30
+};
+
+const uint8_t kMaskRandom45_26[156] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x86, 0x90, 0xd2, 0x1a, 0x29, 0xb0,
+  0x23, 0x24, 0x64, 0x8c, 0xb2, 0x10,
+  0x16, 0x22, 0xc4, 0x58, 0x86, 0x60,
+  0x4c, 0x29, 0x85, 0x30, 0xc1, 0x50,
+  0x41, 0xc8, 0x39, 0x07, 0x04, 0x98,
+  0xf4, 0x18, 0x9c, 0x65, 0x5b, 0x90,
+  0xb0, 0xfd, 0xb2, 0xf3, 0x8a, 0xc0
+};
+
+const uint8_t kMaskRandom45_27[162] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8
+};
+
+const uint8_t kMaskRandom45_28[168] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x44, 0x46, 0x28, 0xfb, 0x66, 0x80
+};
+
+const uint8_t kMaskRandom45_29[174] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80
+};
+
+const uint8_t kMaskRandom45_3[18] = {
+  0xca, 0xd9, 0x5b, 0x2b, 0x4d, 0x90,
+  0xf1, 0x5e, 0x2b, 0xc5, 0x24, 0xe8,
+  0xb6, 0x35, 0xc5, 0xd8, 0x9f, 0x40
+};
+
+const uint8_t kMaskRandom45_30[180] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0x0c, 0xa1, 0x94, 0x32, 0x90, 0xc0,
+  0xa1, 0x34, 0x26, 0x84, 0x89, 0x18,
+  0x12, 0xa2, 0x54, 0x4a, 0x84, 0x70,
+  0x8a, 0x51, 0x4a, 0x29, 0x17, 0x00,
+  0xc6, 0xca, 0xea, 0x70, 0xfe, 0xc8,
+  0x1c, 0xc9, 0x43, 0x25, 0xa7, 0x00
+};
+
+const uint8_t kMaskRandom45_31[186] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0
+};
+
+const uint8_t kMaskRandom45_32[192] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x40, 0x7e, 0xc1, 0x30, 0x29, 0x50
+};
+
+const uint8_t kMaskRandom45_33[198] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70
+};
+
+const uint8_t kMaskRandom45_34[204] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x91, 0x92, 0x32, 0x46, 0x48, 0x48,
+  0xa4, 0x34, 0x86, 0x90, 0x81, 0x28,
+  0x50, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0x84, 0xd0, 0x9a, 0x13, 0x16, 0x00,
+  0x09, 0x61, 0x2c, 0x25, 0xc4, 0x30,
+  0x86, 0xc1, 0x47, 0xeb, 0x67, 0xd0,
+  0x1f, 0x78, 0x45, 0x5e, 0x46, 0x50
+};
+
+const uint8_t kMaskRandom45_35[210] = {
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58
+};
+
+const uint8_t kMaskRandom45_36[216] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0xd0, 0x1a, 0xf0, 0x14, 0xf0, 0xe8
+};
+
+const uint8_t kMaskRandom45_37[222] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40
+};
+
+const uint8_t kMaskRandom45_38[228] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x0c, 0x81, 0x90, 0x32, 0x10, 0x30,
+  0x80, 0x70, 0x0e, 0x01, 0xc0, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x21, 0x20,
+  0x05, 0x40, 0xa8, 0x15, 0x00, 0xc8,
+  0x43, 0x08, 0x61, 0x0c, 0x0a, 0x08,
+  0x1a, 0x03, 0x40, 0x68, 0x05, 0x40,
+  0x60, 0x2c, 0x05, 0x80, 0x9c, 0x00,
+  0x14, 0x22, 0x84, 0x50, 0xe2, 0x80,
+  0xe4, 0xd4, 0x6e, 0x08, 0xc9, 0x58,
+  0x04, 0x67, 0x1b, 0xba, 0x1d, 0xa0
+};
+
+const uint8_t kMaskRandom45_39[234] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8
+};
+
+const uint8_t kMaskRandom45_4[24] = {
+  0xc4, 0xd8, 0x9b, 0x13, 0x45, 0x90,
+  0x31, 0x66, 0x2c, 0xc5, 0x8a, 0x58,
+  0x4b, 0x29, 0x65, 0x2c, 0x91, 0x68,
+  0x2c, 0xa5, 0x94, 0xb2, 0xa2, 0xa8
+};
+
+const uint8_t kMaskRandom45_40[240] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0xd9, 0x40, 0x46, 0xe6, 0x4f, 0xd8
+};
+
+const uint8_t kMaskRandom45_41[246] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10
+};
+
+const uint8_t kMaskRandom45_42[252] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x62, 0x6c, 0x4d, 0x89, 0xf2, 0x10,
+  0x8c, 0x11, 0x82, 0x30, 0x12, 0x20,
+  0x01, 0x60, 0x2c, 0x05, 0xd0, 0x88,
+  0x07, 0x80, 0xf0, 0x1e, 0x0c, 0x18,
+  0xa0, 0x94, 0x12, 0x82, 0x01, 0xc8,
+  0x18, 0xa3, 0x14, 0x62, 0xc5, 0x08,
+  0x91, 0x12, 0x22, 0x44, 0x02, 0x48,
+  0x78, 0x0f, 0x01, 0xe0, 0x00, 0x70,
+  0x3b, 0x48, 0xc7, 0x6d, 0x29, 0xe8,
+  0xac, 0xcc, 0x04, 0x41, 0x97, 0x30
+};
+
+const uint8_t kMaskRandom45_43[258] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68
+};
+
+const uint8_t kMaskRandom45_44[264] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0xf8, 0x40, 0xe3, 0x2e, 0x16, 0x00
+};
+
+const uint8_t kMaskRandom45_45[270] = {
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x22, 0x88,
+  0xa9, 0x15, 0x22, 0xa4, 0x40, 0x68,
+  0x04, 0xc0, 0x98, 0x13, 0x12, 0x10,
+  0xd0, 0x1a, 0x03, 0x40, 0x05, 0x10,
+  0x82, 0x50, 0x4a, 0x09, 0x00, 0x70,
+  0x21, 0x24, 0x24, 0x84, 0xdc, 0x00,
+  0x0c, 0x21, 0x84, 0x30, 0x84, 0x88,
+  0x4a, 0x09, 0x41, 0x28, 0x2b, 0x00,
+  0x12, 0xa2, 0x54, 0x4a, 0xd1, 0x40,
+  0x9e, 0xce, 0x8b, 0xaa, 0x34, 0x68,
+  0x46, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x33, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x99, 0x13, 0x22, 0x64, 0x08, 0x48,
+  0x05, 0x80, 0xb0, 0x16, 0x00, 0x38,
+  0x80, 0xb0, 0x16, 0x02, 0x86, 0x08,
+  0x84, 0x50, 0x8a, 0x11, 0x20, 0x60,
+  0x40, 0x68, 0x0d, 0x01, 0xb5, 0x00,
+  0x0a, 0x81, 0x50, 0x2a, 0x43, 0x00,
+  0x68, 0x0d, 0x01, 0xa0, 0x12, 0x40,
+  0x10, 0x22, 0x04, 0x40, 0xc4, 0x80,
+  0x30, 0x46, 0x08, 0xc1, 0x60, 0x10,
+  0x64, 0x4c, 0x89, 0x19, 0x08, 0x30,
+  0x51, 0x4a, 0x28, 0xcc, 0x81, 0x18,
+  0xa9, 0x15, 0x22, 0x64, 0x20, 0x28,
+  0x04, 0xc0, 0x98, 0x16, 0x10, 0xc0,
+  0xd0, 0x1a, 0x02, 0x02, 0xc0, 0x88,
+  0x82, 0x50, 0x4a, 0x11, 0x0a, 0x40,
+  0x21, 0x24, 0x25, 0x01, 0xcc, 0x00,
+  0x0c, 0x21, 0x84, 0x2a, 0x04, 0x48,
+  0x4a, 0x09, 0x41, 0xa0, 0x31, 0x00,
+  0x12, 0xa2, 0x54, 0x40, 0x92, 0x10,
+  0x9e, 0xce, 0x88, 0xc1, 0x45, 0x00,
+  0xfb, 0x97, 0x5d, 0x7d, 0x42, 0x20
+};
+
+const uint8_t kMaskRandom45_5[30] = {
+  0xc6, 0xd8, 0xdb, 0x1b, 0x29, 0xb0,
+  0x63, 0x6c, 0x6d, 0x8d, 0xb2, 0x58,
+  0x1d, 0xa3, 0xb4, 0x76, 0x87, 0x70,
+  0xad, 0x55, 0xaa, 0xb5, 0x54, 0xe0,
+  0xb2, 0xb6, 0x56, 0xca, 0xdc, 0x18
+};
+
+const uint8_t kMaskRandom45_6[36] = {
+  0x64, 0x4c, 0x89, 0x91, 0x28, 0xa0,
+  0x51, 0x4a, 0x29, 0x45, 0x62, 0x88,
+  0xa8, 0x35, 0x04, 0x32, 0x90, 0xc0,
+  0xc4, 0xa0, 0x96, 0x84, 0x89, 0x18,
+  0x03, 0x60, 0x6c, 0x4a, 0x84, 0x70,
+  0x90, 0xd2, 0x1a, 0x29, 0x17, 0x00
+};
+
+const uint8_t kMaskRandom45_7[42] = {
+  0xc6, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x13, 0x26, 0x64, 0xcc, 0x90, 0x98,
+  0x8d, 0x13, 0xa2, 0x46, 0x48, 0x48,
+  0x8b, 0x41, 0x6a, 0x90, 0x81, 0x28,
+  0x52, 0xaa, 0x15, 0x42, 0x83, 0x50,
+  0xa2, 0xd4, 0x1a, 0x13, 0x16, 0x00,
+  0x61, 0xa8, 0x2c, 0x25, 0xc4, 0x30
+};
+
+const uint8_t kMaskRandom45_8[48] = {
+  0x28, 0x85, 0x38, 0x32, 0x10, 0x30,
+  0x21, 0xf4, 0x06, 0x01, 0xc0, 0x18,
+  0xe9, 0x1d, 0x02, 0x82, 0x21, 0x20,
+  0x17, 0x02, 0xe0, 0x15, 0x00, 0xc8,
+  0x83, 0xa0, 0x55, 0x0c, 0x0a, 0x08,
+  0x46, 0x18, 0xe8, 0x68, 0x05, 0x40,
+  0x50, 0x6a, 0x0d, 0x80, 0x9c, 0x00,
+  0x1c, 0x23, 0x84, 0x50, 0xe2, 0x80
+};
+
+const uint8_t kMaskRandom45_9[54] = {
+  0x44, 0x48, 0xc9, 0x19, 0x29, 0xb0,
+  0x28, 0x2d, 0x0d, 0x89, 0xf2, 0x10,
+  0x25, 0x14, 0xa2, 0x30, 0x12, 0x20,
+  0x59, 0x0a, 0x20, 0x05, 0xd0, 0x88,
+  0x03, 0xa0, 0x34, 0x1e, 0x0c, 0x18,
+  0xc0, 0xd0, 0x1a, 0x82, 0x01, 0xc8,
+  0xa2, 0x30, 0x44, 0x62, 0xc5, 0x08,
+  0x14, 0x82, 0xd2, 0x44, 0x02, 0x48,
+  0x9a, 0x03, 0x81, 0xe0, 0x00, 0x70
+};
+
+const uint8_t kMaskRandom46_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xfc
+};
+
+const uint8_t kMaskRandom46_10[60] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+};
+
+const uint8_t kMaskRandom46_11[66] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+};
+
+const uint8_t kMaskRandom46_12[72] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+  0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+  0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+  0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+  0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+  0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8
+};
+
+const uint8_t kMaskRandom46_13[78] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+};
+
+const uint8_t kMaskRandom46_14[84] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64
+};
+
+const uint8_t kMaskRandom46_15[90] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+};
+
+const uint8_t kMaskRandom46_16[96] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8
+};
+
+const uint8_t kMaskRandom46_17[102] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+};
+
+const uint8_t kMaskRandom46_18[108] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x82, 0x32, 0x57, 0x04, 0x64, 0xac
+};
+
+const uint8_t kMaskRandom46_19[114] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+};
+
+const uint8_t kMaskRandom46_2[12] = {
+  0xec, 0xdd, 0x99, 0xd9, 0xbb, 0x30,
+  0x9b, 0xb2, 0x77, 0x37, 0x64, 0xec
+};
+
+const uint8_t kMaskRandom46_20[120] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4
+};
+
+const uint8_t kMaskRandom46_21[126] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+};
+
+const uint8_t kMaskRandom46_22[132] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34
+};
+
+const uint8_t kMaskRandom46_23[138] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+  0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+  0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+  0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+  0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+  0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+  0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+  0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+  0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+  0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+  0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+  0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10
+};
+
+const uint8_t kMaskRandom46_24[144] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+  0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+  0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+  0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+  0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+  0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x18, 0x8b, 0x03, 0xb4, 0x3b, 0x10
+};
+
+const uint8_t kMaskRandom46_25[150] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+  0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+  0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+  0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+  0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+  0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+};
+
+const uint8_t kMaskRandom46_26[156] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x86, 0x8a, 0x6d, 0x0d, 0x14, 0xd8,
+  0x23, 0x2c, 0x84, 0x46, 0x59, 0x08,
+  0x16, 0x21, 0x98, 0x2c, 0x43, 0x30,
+  0x4c, 0x30, 0x54, 0x98, 0x60, 0xa8,
+  0x41, 0xc1, 0x26, 0x83, 0x82, 0x4c,
+  0x19, 0x56, 0xe4, 0x32, 0xad, 0xc8,
+  0x2d, 0x6d, 0xd2, 0x57, 0xd6, 0x2c
+};
+
+const uint8_t kMaskRandom46_27[162] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64
+};
+
+const uint8_t kMaskRandom46_28[168] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0x52, 0xf9, 0x72, 0xd9, 0x68
+};
+
+const uint8_t kMaskRandom46_29[174] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+};
+
+const uint8_t kMaskRandom46_3[18] = {
+  0xca, 0xd3, 0x65, 0x95, 0xa6, 0xc8,
+  0xf1, 0x49, 0x3b, 0xe2, 0x92, 0x74,
+  0x76, 0x27, 0xd0, 0xec, 0x4f, 0xa0
+};
+
+const uint8_t kMaskRandom46_30[180] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80,
+  0x9c, 0x3f, 0xb3, 0x38, 0x7f, 0x64,
+  0x99, 0xf6, 0x0a, 0xdd, 0x16, 0xb0
+};
+
+const uint8_t kMaskRandom46_31[186] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8
+};
+
+const uint8_t kMaskRandom46_32[192] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x69, 0xcd, 0xeb, 0x51, 0xc9, 0xa8
+};
+
+const uint8_t kMaskRandom46_33[198] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+};
+
+const uint8_t kMaskRandom46_34[204] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18,
+  0xfa, 0xd9, 0xf5, 0xf5, 0xb3, 0xe8,
+  0x60, 0xf0, 0x13, 0xf0, 0x4d, 0xe0
+};
+
+const uint8_t kMaskRandom46_35[210] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x82, 0x32, 0x57, 0x04, 0x64, 0xac
+};
+
+const uint8_t kMaskRandom46_36[216] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x6c, 0x3a, 0x45, 0x70, 0xd7, 0x00
+};
+
+const uint8_t kMaskRandom46_37[222] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0
+};
+
+const uint8_t kMaskRandom46_38[228] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40,
+  0x82, 0x32, 0x57, 0x04, 0x64, 0xac,
+  0x72, 0x2b, 0xa5, 0xd4, 0xb9, 0x30
+};
+
+const uint8_t kMaskRandom46_39[234] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4
+};
+
+const uint8_t kMaskRandom46_4[24] = {
+  0xc4, 0xd1, 0x65, 0x89, 0xa2, 0xc8,
+  0x31, 0x62, 0x96, 0x62, 0xc5, 0x2c,
+  0x4b, 0x24, 0x5a, 0x96, 0x48, 0xb4,
+  0x2c, 0xa8, 0xaa, 0x59, 0x51, 0x54
+};
+
+const uint8_t kMaskRandom46_40[240] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x7c, 0xc8, 0x93, 0x63, 0x3c, 0x80
+};
+
+const uint8_t kMaskRandom46_41[246] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08
+};
+
+const uint8_t kMaskRandom46_42[252] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38,
+  0xdb, 0x4a, 0x7b, 0xb6, 0x94, 0xf4,
+  0xfc, 0x6e, 0x89, 0x54, 0x4f, 0x00
+};
+
+const uint8_t kMaskRandom46_43[258] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34
+};
+
+const uint8_t kMaskRandom46_44[264] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x1a, 0x8a, 0x00, 0x1c, 0x89, 0x54
+};
+
+const uint8_t kMaskRandom46_45[270] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+  0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+  0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+  0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+  0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+  0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+  0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+  0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+  0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+  0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+  0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+  0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10
+};
+
+const uint8_t kMaskRandom46_46[276] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x46, 0x42, 0x0c, 0x8c, 0x84, 0x18,
+  0x33, 0x20, 0x46, 0x66, 0x40, 0x8c,
+  0x99, 0x08, 0x0b, 0x32, 0x10, 0x14,
+  0x05, 0x84, 0x30, 0x0b, 0x08, 0x60,
+  0x80, 0xb0, 0x23, 0x01, 0x60, 0x44,
+  0x84, 0x42, 0x91, 0x08, 0x85, 0x20,
+  0x40, 0x73, 0x00, 0x80, 0xe6, 0x00,
+  0x0a, 0x81, 0x12, 0x15, 0x02, 0x24,
+  0x68, 0x0c, 0x40, 0xd0, 0x18, 0x80,
+  0x10, 0x24, 0x84, 0x20, 0x49, 0x08,
+  0x30, 0x51, 0x40, 0x60, 0xa2, 0x80,
+  0x5f, 0x50, 0x88, 0xbe, 0xa1, 0x10,
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x99, 0x02, 0x13, 0x32, 0x04, 0x24,
+  0x05, 0x80, 0x0e, 0x0b, 0x00, 0x1c,
+  0x80, 0xa1, 0x83, 0x01, 0x43, 0x04,
+  0x84, 0x48, 0x19, 0x08, 0x90, 0x30,
+  0x40, 0x6d, 0x40, 0x80, 0xda, 0x80,
+  0x0a, 0x90, 0xc0, 0x15, 0x21, 0x80,
+  0x68, 0x04, 0x90, 0xd0, 0x09, 0x20,
+  0x10, 0x31, 0x20, 0x20, 0x62, 0x40,
+  0x30, 0x58, 0x04, 0x60, 0xb0, 0x08,
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x48, 0xa2, 0xa2, 0x91, 0x44,
+  0xa9, 0x10, 0x1b, 0x52, 0x20, 0x34,
+  0x04, 0xc4, 0x84, 0x09, 0x89, 0x08,
+  0xd0, 0x01, 0x45, 0xa0, 0x02, 0x88,
+  0x82, 0x40, 0x1d, 0x04, 0x80, 0x38,
+  0x21, 0x37, 0x00, 0x42, 0x6e, 0x00,
+  0x0c, 0x21, 0x22, 0x18, 0x42, 0x44,
+  0x4a, 0x0a, 0xc0, 0x94, 0x15, 0x80,
+  0x12, 0xb4, 0x50, 0x25, 0x68, 0xa0,
+  0xea, 0x8d, 0x1b, 0xd5, 0x1a, 0x34,
+  0xd5, 0xdf, 0x59, 0xb9, 0xba, 0x10
+};
+
+const uint8_t kMaskRandom46_5[30] = {
+  0xc6, 0xca, 0x6d, 0x8d, 0x94, 0xd8,
+  0x63, 0x6c, 0x96, 0xc6, 0xd9, 0x2c,
+  0x1d, 0xa1, 0xdc, 0x3b, 0x43, 0xb8,
+  0xad, 0x55, 0x39, 0x5a, 0xaa, 0x70,
+  0xb2, 0xb7, 0x07, 0x65, 0x6e, 0x0c
+};
+
+const uint8_t kMaskRandom46_6[36] = {
+  0x64, 0x4a, 0x28, 0xc8, 0x94, 0x50,
+  0x51, 0x58, 0xa2, 0xa2, 0xb1, 0x44,
+  0x0c, 0xa4, 0x30, 0x19, 0x48, 0x60,
+  0xa1, 0x22, 0x47, 0x42, 0x44, 0x8c,
+  0x12, 0xa1, 0x1c, 0x25, 0x42, 0x38,
+  0x8a, 0x45, 0xc1, 0x14, 0x8b, 0x80
+};
+
+const uint8_t kMaskRandom46_7[42] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x33, 0x24, 0x26, 0x66, 0x48, 0x4c,
+  0x91, 0x92, 0x13, 0x23, 0x24, 0x24,
+  0xa4, 0x20, 0x4b, 0x48, 0x40, 0x94,
+  0x50, 0xa0, 0xd4, 0xa1, 0x41, 0xa8,
+  0x84, 0xc5, 0x81, 0x09, 0x8b, 0x00,
+  0x09, 0x71, 0x0c, 0x12, 0xe2, 0x18
+};
+
+const uint8_t kMaskRandom46_8[48] = {
+  0x0c, 0x84, 0x0c, 0x19, 0x08, 0x18,
+  0x80, 0x70, 0x07, 0x00, 0xe0, 0x0c,
+  0xa0, 0x88, 0x49, 0x41, 0x10, 0x90,
+  0x05, 0x40, 0x32, 0x0a, 0x80, 0x64,
+  0x43, 0x02, 0x82, 0x86, 0x05, 0x04,
+  0x1a, 0x01, 0x50, 0x34, 0x02, 0xa0,
+  0x60, 0x27, 0x00, 0xc0, 0x4e, 0x00,
+  0x14, 0x38, 0xa0, 0x28, 0x71, 0x40
+};
+
+const uint8_t kMaskRandom46_9[54] = {
+  0x46, 0x4a, 0x6c, 0x8c, 0x94, 0xd8,
+  0x62, 0x7c, 0x84, 0xc4, 0xf9, 0x08,
+  0x8c, 0x04, 0x89, 0x18, 0x09, 0x10,
+  0x01, 0x74, 0x22, 0x02, 0xe8, 0x44,
+  0x07, 0x83, 0x06, 0x0f, 0x06, 0x0c,
+  0xa0, 0x80, 0x73, 0x41, 0x00, 0xe4,
+  0x18, 0xb1, 0x42, 0x31, 0x62, 0x84,
+  0x91, 0x00, 0x93, 0x22, 0x01, 0x24,
+  0x78, 0x00, 0x1c, 0xf0, 0x00, 0x38
+};
+
+const uint8_t kMaskRandom47_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xfe
+};
+
+const uint8_t kMaskRandom47_10[60] = {
+  0x64, 0x4a, 0x28, 0x22, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x68, 0xa6,
+  0xa9, 0x10, 0x1a, 0x00, 0x90, 0x0a,
+  0x04, 0xc4, 0x84, 0x21, 0x06, 0x12,
+  0xd0, 0x01, 0x44, 0x94, 0x29, 0x42,
+  0x82, 0x40, 0x1c, 0x81, 0x48, 0x14,
+  0x21, 0x37, 0x01, 0x40, 0xd4, 0x04,
+  0x0c, 0x21, 0x23, 0x11, 0x01, 0x18,
+  0x4a, 0x0a, 0xc1, 0x0c, 0x10, 0xc0,
+  0x12, 0xb4, 0x50, 0xa8, 0x1a, 0x80
+};
+
+const uint8_t kMaskRandom47_11[66] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x68,
+  0x33, 0x24, 0x27, 0x40, 0x64, 0x22,
+  0x99, 0x02, 0x12, 0x2a, 0x22, 0x82,
+  0x05, 0x80, 0x0e, 0x06, 0xa0, 0x2a,
+  0x80, 0xa1, 0x83, 0x19, 0x11, 0x90,
+  0x84, 0x48, 0x18, 0x51, 0x05, 0x10,
+  0x40, 0x6d, 0x40, 0x10, 0x91, 0x08,
+  0x0a, 0x90, 0xc1, 0x32, 0x03, 0x20,
+  0x68, 0x04, 0x90, 0x45, 0x24, 0x52,
+  0x10, 0x31, 0x20, 0x8c, 0x08, 0xc0,
+  0x30, 0x58, 0x05, 0x18, 0x58, 0x04
+};
+
+const uint8_t kMaskRandom47_12[72] = {
+  0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+  0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+  0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+  0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+  0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+  0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+  0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+  0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+  0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+  0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+  0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+  0x19, 0x56, 0xe5, 0x08, 0x90, 0x88
+};
+
+const uint8_t kMaskRandom47_13[78] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c
+};
+
+const uint8_t kMaskRandom47_14[84] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c
+};
+
+const uint8_t kMaskRandom47_15[90] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0
+};
+
+const uint8_t kMaskRandom47_16[96] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14
+};
+
+const uint8_t kMaskRandom47_17[102] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c
+};
+
+const uint8_t kMaskRandom47_18[108] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c
+};
+
+const uint8_t kMaskRandom47_19[114] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50
+};
+
+const uint8_t kMaskRandom47_2[12] = {
+  0xec, 0xdd, 0x99, 0xd9, 0x9d, 0x98,
+  0x9b, 0xb2, 0x77, 0x27, 0x72, 0x76
+};
+
+const uint8_t kMaskRandom47_20[120] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a
+};
+
+const uint8_t kMaskRandom47_21[126] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04
+};
+
+const uint8_t kMaskRandom47_22[132] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc
+};
+
+const uint8_t kMaskRandom47_23[138] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+  0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+  0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+  0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+  0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+  0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+  0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+  0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+  0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+  0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+  0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+  0x5f, 0x50, 0x89, 0x08, 0x90, 0x88
+};
+
+const uint8_t kMaskRandom47_24[144] = {
+  0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+  0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+  0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+  0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+  0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+  0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+  0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+  0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+  0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+  0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+  0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+  0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x68,
+  0x33, 0x24, 0x27, 0x40, 0x64, 0x22,
+  0x99, 0x02, 0x12, 0x2a, 0x22, 0x82,
+  0x05, 0x80, 0x0e, 0x06, 0xa0, 0x2a,
+  0x80, 0xa1, 0x83, 0x19, 0x11, 0x90,
+  0x84, 0x48, 0x18, 0x51, 0x05, 0x10,
+  0x40, 0x6d, 0x40, 0x10, 0x91, 0x08,
+  0x0a, 0x90, 0xc1, 0x32, 0x03, 0x20,
+  0x68, 0x04, 0x90, 0x45, 0x24, 0x52,
+  0x10, 0x31, 0x20, 0x8c, 0x08, 0xc0,
+  0x30, 0x58, 0x05, 0x18, 0x58, 0x04,
+  0x27, 0x41, 0x35, 0x57, 0x7e, 0x6a
+};
+
+const uint8_t kMaskRandom47_25[150] = {
+  0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+  0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+  0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+  0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+  0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+  0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+  0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+  0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+  0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+  0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+  0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+  0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c
+};
+
+const uint8_t kMaskRandom47_26[156] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0x20, 0xc2, 0x0c,
+  0x51, 0x58, 0xa2, 0x04, 0x60, 0x46,
+  0x0c, 0xa4, 0x30, 0x80, 0xa8, 0x0a,
+  0xa1, 0x22, 0x46, 0x43, 0x04, 0x30,
+  0x12, 0xa1, 0x1d, 0x02, 0x30, 0x22,
+  0x8a, 0x45, 0xc0, 0x29, 0x02, 0x90,
+  0x86, 0x8a, 0x6d, 0x30, 0x13, 0x00,
+  0x23, 0x2c, 0x84, 0x11, 0x21, 0x12,
+  0x16, 0x21, 0x98, 0xc4, 0x0c, 0x40,
+  0x4c, 0x30, 0x54, 0x48, 0x44, 0x84,
+  0x41, 0xc1, 0x27, 0x14, 0x11, 0x40,
+  0x19, 0x56, 0xe5, 0x08, 0x90, 0x88,
+  0x6c, 0xea, 0xc4, 0x42, 0x20, 0x9e
+};
+
+const uint8_t kMaskRandom47_27[162] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c
+};
+
+const uint8_t kMaskRandom47_28[168] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x86, 0x1e, 0xa6, 0xaf, 0x3d, 0x04
+};
+
+const uint8_t kMaskRandom47_29[174] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0
+};
+
+const uint8_t kMaskRandom47_3[18] = {
+  0xca, 0xd3, 0x65, 0x36, 0x53, 0x64,
+  0xf1, 0x49, 0x3a, 0x93, 0xa9, 0x3a,
+  0x76, 0x27, 0xd0, 0x7d, 0x07, 0xd0
+};
+
+const uint8_t kMaskRandom47_30[180] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x58, 0xa3, 0x8a, 0x38, 0xa2,
+  0x0c, 0xa4, 0x30, 0x43, 0x04, 0x30,
+  0xa1, 0x22, 0x46, 0x24, 0x62, 0x46,
+  0x12, 0xa1, 0x1c, 0x11, 0xc1, 0x1c,
+  0x8a, 0x45, 0xc0, 0x5c, 0x05, 0xc0,
+  0x9c, 0x3f, 0xb3, 0xe5, 0xad, 0x1c,
+  0x97, 0x43, 0x63, 0xc6, 0x09, 0x9c
+};
+
+const uint8_t kMaskRandom47_31[186] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14
+};
+
+const uint8_t kMaskRandom47_32[192] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0xe5, 0x50, 0x45, 0x63, 0xc2, 0xf4
+};
+
+const uint8_t kMaskRandom47_33[198] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c
+};
+
+const uint8_t kMaskRandom47_34[204] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x91, 0x92, 0x13, 0x21, 0x32, 0x12,
+  0xa4, 0x20, 0x4a, 0x04, 0xa0, 0x4a,
+  0x50, 0xa0, 0xd4, 0x0d, 0x40, 0xd4,
+  0x84, 0xc5, 0x80, 0x58, 0x05, 0x80,
+  0x09, 0x71, 0x0d, 0x10, 0xd1, 0x0c,
+  0xfa, 0xd9, 0xf5, 0xfe, 0xdc, 0x14,
+  0xef, 0xbb, 0xa6, 0x23, 0x5c, 0xbe
+};
+
+const uint8_t kMaskRandom47_35[210] = {
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c
+};
+
+const uint8_t kMaskRandom47_36[216] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0e, 0xd7, 0x38, 0x20, 0x87, 0x66
+};
+
+const uint8_t kMaskRandom47_37[222] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50
+};
+
+const uint8_t kMaskRandom47_38[228] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x0c, 0x84, 0x0c, 0x40, 0xc4, 0x0c,
+  0x80, 0x70, 0x07, 0x00, 0x70, 0x06,
+  0xa0, 0x88, 0x48, 0x84, 0x88, 0x48,
+  0x05, 0x40, 0x32, 0x03, 0x20, 0x32,
+  0x43, 0x02, 0x82, 0x28, 0x22, 0x82,
+  0x1a, 0x01, 0x50, 0x15, 0x01, 0x50,
+  0x60, 0x27, 0x00, 0x70, 0x07, 0x00,
+  0x14, 0x38, 0xa1, 0x8a, 0x18, 0xa0,
+  0x82, 0x32, 0x56, 0x68, 0xa1, 0x5c,
+  0x7b, 0x47, 0xa5, 0xde, 0x9a, 0xd4
+};
+
+const uint8_t kMaskRandom47_39[234] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a
+};
+
+const uint8_t kMaskRandom47_4[24] = {
+  0xc4, 0xd1, 0x65, 0x16, 0x51, 0x64,
+  0x31, 0x62, 0x96, 0x29, 0x62, 0x96,
+  0x4b, 0x24, 0x5a, 0x45, 0xa4, 0x5a,
+  0x2c, 0xa8, 0xaa, 0x8a, 0xa8, 0xaa
+};
+
+const uint8_t kMaskRandom47_40[240] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xc4, 0xae, 0x5e, 0x33, 0xf5, 0x1a
+};
+
+const uint8_t kMaskRandom47_41[246] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04
+};
+
+const uint8_t kMaskRandom47_42[252] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x85, 0xc8, 0x5c, 0x84,
+  0x8c, 0x04, 0x88, 0x48, 0x84, 0x88,
+  0x01, 0x74, 0x23, 0x42, 0x34, 0x22,
+  0x07, 0x83, 0x06, 0x30, 0x63, 0x06,
+  0xa0, 0x80, 0x72, 0x07, 0x20, 0x72,
+  0x18, 0xb1, 0x43, 0x14, 0x31, 0x42,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x01, 0xc0, 0x1c,
+  0xdb, 0x4a, 0x7b, 0x31, 0x45, 0x2a,
+  0x3c, 0xb0, 0x36, 0x3b, 0x14, 0xa2
+};
+
+const uint8_t kMaskRandom47_43[258] = {
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc
+};
+
+const uint8_t kMaskRandom47_44[264] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0xd4, 0x8a, 0xd4, 0xd3, 0x3f, 0xe6
+};
+
+const uint8_t kMaskRandom47_45[270] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+  0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+  0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+  0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+  0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+  0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+  0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+  0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+  0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+  0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+  0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+  0x5f, 0x50, 0x89, 0x08, 0x90, 0x88
+};
+
+const uint8_t kMaskRandom47_46[276] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+  0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+  0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+  0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+  0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+  0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+  0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+  0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+  0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+  0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+  0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+  0x5f, 0x50, 0x89, 0x08, 0x90, 0x88,
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x64, 0x4a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x48, 0xa2, 0x8a, 0x28, 0xa2,
+  0xa9, 0x10, 0x1b, 0x01, 0xb0, 0x1a,
+  0x04, 0xc4, 0x84, 0x48, 0x44, 0x84,
+  0xd0, 0x01, 0x44, 0x14, 0x41, 0x44,
+  0x82, 0x40, 0x1c, 0x01, 0xc0, 0x1c,
+  0x21, 0x37, 0x01, 0x70, 0x17, 0x00,
+  0x0c, 0x21, 0x22, 0x12, 0x21, 0x22,
+  0x4a, 0x0a, 0xc0, 0xac, 0x0a, 0xc0,
+  0x12, 0xb4, 0x51, 0x45, 0x14, 0x50,
+  0xea, 0x8d, 0x1a, 0x35, 0x55, 0xdc,
+  0x37, 0x9d, 0xcf, 0xe0, 0xe4, 0x20
+};
+
+const uint8_t kMaskRandom47_47[282] = {
+  0x46, 0x4a, 0x6c, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x42, 0x64, 0x26,
+  0x99, 0x02, 0x12, 0x21, 0x22, 0x12,
+  0x05, 0x80, 0x0e, 0x00, 0xe0, 0x0e,
+  0x80, 0xa1, 0x82, 0x18, 0x21, 0x82,
+  0x84, 0x48, 0x18, 0x81, 0x88, 0x18,
+  0x40, 0x6d, 0x40, 0xd4, 0x0d, 0x40,
+  0x0a, 0x90, 0xc1, 0x0c, 0x10, 0xc0,
+  0x68, 0x04, 0x90, 0x49, 0x04, 0x90,
+  0x10, 0x31, 0x21, 0x12, 0x11, 0x20,
+  0x30, 0x58, 0x05, 0x80, 0x58, 0x04,
+  0x46, 0x42, 0x0c, 0x20, 0xc2, 0x0c,
+  0x33, 0x20, 0x46, 0x04, 0x60, 0x46,
+  0x99, 0x08, 0x0a, 0x80, 0xa8, 0x0a,
+  0x05, 0x84, 0x30, 0x43, 0x04, 0x30,
+  0x80, 0xb0, 0x23, 0x02, 0x30, 0x22,
+  0x84, 0x42, 0x90, 0x29, 0x02, 0x90,
+  0x40, 0x73, 0x01, 0x30, 0x13, 0x00,
+  0x0a, 0x81, 0x12, 0x11, 0x21, 0x12,
+  0x68, 0x0c, 0x40, 0xc4, 0x0c, 0x40,
+  0x10, 0x24, 0x84, 0x48, 0x44, 0x84,
+  0x30, 0x51, 0x41, 0x14, 0x11, 0x40,
+  0x5f, 0x50, 0x89, 0x08, 0x90, 0x88,
+  0x46, 0x4a, 0x6c, 0x20, 0xc2, 0x0c,
+  0x33, 0x24, 0x26, 0x04, 0x60, 0x46,
+  0x99, 0x02, 0x12, 0x80, 0xa8, 0x0a,
+  0x05, 0x80, 0x0e, 0x43, 0x04, 0x30,
+  0x80, 0xa1, 0x83, 0x02, 0x30, 0x22,
+  0x84, 0x48, 0x18, 0x29, 0x02, 0x90,
+  0x40, 0x6d, 0x41, 0x30, 0x13, 0x00,
+  0x0a, 0x90, 0xc0, 0x11, 0x21, 0x12,
+  0x68, 0x04, 0x90, 0xc4, 0x0c, 0x40,
+  0x10, 0x31, 0x20, 0x48, 0x44, 0x84,
+  0x30, 0x58, 0x05, 0x14, 0x11, 0x40,
+  0x46, 0x42, 0x0d, 0x08, 0x90, 0x88,
+  0x33, 0x20, 0x46, 0xa6, 0xca, 0x6c,
+  0x99, 0x08, 0x0a, 0x42, 0x64, 0x26,
+  0x05, 0x84, 0x30, 0x21, 0x22, 0x12,
+  0x80, 0xb0, 0x22, 0x00, 0xe0, 0x0e,
+  0x84, 0x42, 0x90, 0x18, 0x21, 0x82,
+  0x40, 0x73, 0x00, 0x81, 0x88, 0x18,
+  0x0a, 0x81, 0x12, 0xd4, 0x0d, 0x40,
+  0x68, 0x0c, 0x41, 0x0c, 0x10, 0xc0,
+  0x10, 0x24, 0x84, 0x49, 0x04, 0x90,
+  0x30, 0x51, 0x41, 0x12, 0x11, 0x20,
+  0x5f, 0x50, 0x89, 0x80, 0x58, 0x04,
+  0x1f, 0x2f, 0x63, 0x10, 0x64, 0xb2
+};
+
+const uint8_t kMaskRandom47_5[30] = {
+  0xc6, 0xca, 0x6c, 0xa6, 0xca, 0x6c,
+  0x63, 0x6c, 0x96, 0xc9, 0x6c, 0x96,
+  0x1d, 0xa1, 0xdc, 0x1d, 0xc1, 0xdc,
+  0xad, 0x55, 0x39, 0x53, 0x95, 0x38,
+  0xb2, 0xb7, 0x07, 0x70, 0x77, 0x06
+};
+
+const uint8_t kMaskRandom47_6[36] = {
+  0x64, 0x4a, 0x29, 0xa2, 0x9a, 0x28,
+  0x51, 0x58, 0xa2, 0x8a, 0x68, 0xa6,
+  0x0c, 0xa4, 0x30, 0x45, 0xa4, 0x5a,
+  0xa1, 0x22, 0x46, 0x2d, 0x82, 0xd8,
+  0x12, 0xa1, 0x1c, 0x17, 0x41, 0x74,
+  0x8a, 0x45, 0xc1, 0xd1, 0x1d, 0x10
+};
+
+const uint8_t kMaskRandom47_7[42] = {
+  0x46, 0x4a, 0x6d, 0xa6, 0xca, 0x6c,
+  0x33, 0x24, 0x26, 0x4a, 0x64, 0xa6,
+  0x91, 0x92, 0x12, 0x61, 0xa6, 0x0a,
+  0xa4, 0x20, 0x4a, 0x0c, 0x90, 0xd8,
+  0x50, 0xa0, 0xd5, 0x81, 0x70, 0x36,
+  0x84, 0xc5, 0x80, 0x55, 0x45, 0x54,
+  0x09, 0x71, 0x0d, 0x50, 0x9d, 0x08
+};
+
+const uint8_t kMaskRandom47_8[48] = {
+  0x0c, 0x84, 0x0d, 0x02, 0xc0, 0x2c,
+  0x80, 0x70, 0x06, 0x80, 0x78, 0x06,
+  0xa0, 0x88, 0x48, 0x21, 0x22, 0x12,
+  0x05, 0x40, 0x32, 0x0c, 0xa0, 0xca,
+  0x43, 0x02, 0x82, 0x40, 0x95, 0x08,
+  0x1a, 0x01, 0x51, 0x15, 0x41, 0x54,
+  0x60, 0x27, 0x00, 0x66, 0x06, 0x60,
+  0x14, 0x38, 0xa0, 0x99, 0x09, 0x90
+};
+
+const uint8_t kMaskRandom47_9[54] = {
+  0x46, 0x4a, 0x6d, 0xa6, 0xca, 0x6c,
+  0x62, 0x7c, 0x84, 0xc8, 0x4c, 0x84,
+  0x8c, 0x04, 0x88, 0x30, 0x83, 0x88,
+  0x01, 0x74, 0x23, 0x40, 0x94, 0x08,
+  0x07, 0x83, 0x07, 0x02, 0x70, 0x26,
+  0xa0, 0x80, 0x72, 0x45, 0x44, 0x54,
+  0x18, 0xb1, 0x42, 0x10, 0xe1, 0x0e,
+  0x91, 0x00, 0x92, 0x09, 0x20, 0x92,
+  0x78, 0x00, 0x1c, 0x03, 0x80, 0x38
+};
+
+const uint8_t kMaskRandom48_1[6] = {
+  0xff, 0xff, 0xff, 0xff, 0xff, 0xff
+};
+
+const uint8_t kMaskRandom48_10[60] = {
+  0x11, 0x45, 0x14, 0x11, 0x45, 0x14,
+  0x45, 0x34, 0x53, 0x45, 0x34, 0x53,
+  0x00, 0x48, 0x05, 0x00, 0x48, 0x05,
+  0x10, 0x83, 0x09, 0x10, 0x83, 0x09,
+  0x4a, 0x14, 0xa1, 0x4a, 0x14, 0xa1,
+  0x40, 0xa4, 0x0a, 0x40, 0xa4, 0x0a,
+  0xa0, 0x6a, 0x02, 0xa0, 0x6a, 0x02,
+  0x88, 0x80, 0x8c, 0x88, 0x80, 0x8c,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x54, 0x0d, 0x40, 0x54, 0x0d, 0x40
+};
+
+const uint8_t kMaskRandom48_11[66] = {
+  0x53, 0x65, 0x34, 0x53, 0x65, 0x34,
+  0xa0, 0x32, 0x11, 0xa0, 0x32, 0x11,
+  0x15, 0x11, 0x41, 0x15, 0x11, 0x41,
+  0x03, 0x50, 0x15, 0x03, 0x50, 0x15,
+  0x8c, 0x88, 0xc8, 0x8c, 0x88, 0xc8,
+  0x28, 0x82, 0x88, 0x28, 0x82, 0x88,
+  0x08, 0x48, 0x84, 0x08, 0x48, 0x84,
+  0x99, 0x01, 0x90, 0x99, 0x01, 0x90,
+  0x22, 0x92, 0x29, 0x22, 0x92, 0x29,
+  0x46, 0x04, 0x60, 0x46, 0x04, 0x60,
+  0x8c, 0x2c, 0x02, 0x8c, 0x2c, 0x02
+};
+
+const uint8_t kMaskRandom48_12[72] = {
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+};
+
+const uint8_t kMaskRandom48_13[78] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86
+};
+
+const uint8_t kMaskRandom48_14[84] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e
+};
+
+const uint8_t kMaskRandom48_15[90] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50
+};
+
+const uint8_t kMaskRandom48_16[96] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a
+};
+
+const uint8_t kMaskRandom48_17[102] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e
+};
+
+const uint8_t kMaskRandom48_18[108] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x34, 0x50, 0xae, 0x34, 0x50, 0xae
+};
+
+const uint8_t kMaskRandom48_19[114] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28
+};
+
+const uint8_t kMaskRandom48_2[12] = {
+  0xec, 0xce, 0xcc, 0xec, 0xce, 0xcc,
+  0x93, 0xb9, 0x3b, 0x93, 0xb9, 0x3b
+};
+
+const uint8_t kMaskRandom48_20[120] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95
+};
+
+const uint8_t kMaskRandom48_21[126] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02
+};
+
+const uint8_t kMaskRandom48_22[132] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee
+};
+
+const uint8_t kMaskRandom48_23[138] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+};
+
+const uint8_t kMaskRandom48_24[144] = {
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x88, 0x32, 0x59, 0x88, 0x32, 0x59
+};
+
+const uint8_t kMaskRandom48_25[150] = {
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86
+};
+
+const uint8_t kMaskRandom48_26[156] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x3e, 0x20, 0x79, 0xe5, 0x55, 0x70
+};
+
+const uint8_t kMaskRandom48_27[162] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e
+};
+
+const uint8_t kMaskRandom48_28[168] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x32, 0xe3, 0xc0, 0x4a, 0xf2, 0x2a
+};
+
+const uint8_t kMaskRandom48_29[174] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50
+};
+
+const uint8_t kMaskRandom48_3[18] = {
+  0x9b, 0x29, 0xb2, 0x9b, 0x29, 0xb2,
+  0x49, 0xd4, 0x9d, 0x49, 0xd4, 0x9d,
+  0x3e, 0x83, 0xe8, 0x3e, 0x83, 0xe8
+};
+
+const uint8_t kMaskRandom48_30[180] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0xc5, 0x1c, 0x51, 0xc5, 0x1c, 0x51,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x12, 0x31, 0x23, 0x12, 0x31, 0x23,
+  0x08, 0xe0, 0x8e, 0x08, 0xe0, 0x8e,
+  0x2e, 0x02, 0xe0, 0x2e, 0x02, 0xe0,
+  0xf2, 0xd6, 0x8e, 0xf2, 0xd6, 0x8e,
+  0x66, 0xf3, 0x9a, 0xdd, 0x68, 0x93
+};
+
+const uint8_t kMaskRandom48_31[186] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a
+};
+
+const uint8_t kMaskRandom48_32[192] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0xd5, 0x4a, 0x4f, 0x48, 0xb5, 0x31
+};
+
+const uint8_t kMaskRandom48_33[198] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e
+};
+
+const uint8_t kMaskRandom48_34[204] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x90, 0x99, 0x09, 0x90, 0x99, 0x09,
+  0x02, 0x50, 0x25, 0x02, 0x50, 0x25,
+  0x06, 0xa0, 0x6a, 0x06, 0xa0, 0x6a,
+  0x2c, 0x02, 0xc0, 0x2c, 0x02, 0xc0,
+  0x88, 0x68, 0x86, 0x88, 0x68, 0x86,
+  0xff, 0x6e, 0x0a, 0xff, 0x6e, 0x0a,
+  0x40, 0x72, 0x4c, 0xe8, 0xf2, 0x42
+};
+
+const uint8_t kMaskRandom48_35[210] = {
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x34, 0x50, 0xae, 0x34, 0x50, 0xae
+};
+
+const uint8_t kMaskRandom48_36[216] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x71, 0xba, 0x8b, 0xf3, 0xfa, 0x9d
+};
+
+const uint8_t kMaskRandom48_37[222] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28
+};
+
+const uint8_t kMaskRandom48_38[228] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x20, 0x62, 0x06, 0x20, 0x62, 0x06,
+  0x80, 0x38, 0x03, 0x80, 0x38, 0x03,
+  0x42, 0x44, 0x24, 0x42, 0x44, 0x24,
+  0x01, 0x90, 0x19, 0x01, 0x90, 0x19,
+  0x14, 0x11, 0x41, 0x14, 0x11, 0x41,
+  0x0a, 0x80, 0xa8, 0x0a, 0x80, 0xa8,
+  0x38, 0x03, 0x80, 0x38, 0x03, 0x80,
+  0xc5, 0x0c, 0x50, 0xc5, 0x0c, 0x50,
+  0x34, 0x50, 0xae, 0x34, 0x50, 0xae,
+  0x2a, 0x7a, 0xf6, 0x8c, 0xde, 0x51
+};
+
+const uint8_t kMaskRandom48_39[234] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95
+};
+
+const uint8_t kMaskRandom48_4[24] = {
+  0x8b, 0x28, 0xb2, 0x8b, 0x28, 0xb2,
+  0x14, 0xb1, 0x4b, 0x14, 0xb1, 0x4b,
+  0x22, 0xd2, 0x2d, 0x22, 0xd2, 0x2d,
+  0x45, 0x54, 0x55, 0x45, 0x54, 0x55
+};
+
+const uint8_t kMaskRandom48_40[240] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x20, 0x5f, 0x68, 0xd5, 0xa2, 0x1b
+};
+
+const uint8_t kMaskRandom48_41[246] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02
+};
+
+const uint8_t kMaskRandom48_42[252] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0xe4, 0x2e, 0x42, 0xe4, 0x2e, 0x42,
+  0x24, 0x42, 0x44, 0x24, 0x42, 0x44,
+  0xa1, 0x1a, 0x11, 0xa1, 0x1a, 0x11,
+  0x18, 0x31, 0x83, 0x18, 0x31, 0x83,
+  0x03, 0x90, 0x39, 0x03, 0x90, 0x39,
+  0x8a, 0x18, 0xa1, 0x8a, 0x18, 0xa1,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0x98, 0xa2, 0x95, 0x98, 0xa2, 0x95,
+  0x66, 0xcf, 0xa3, 0x47, 0x69, 0x00
+};
+
+const uint8_t kMaskRandom48_43[258] = {
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee
+};
+
+const uint8_t kMaskRandom48_44[264] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0xc6, 0x40, 0x1f, 0x57, 0xc6, 0xe6
+};
+
+const uint8_t kMaskRandom48_45[270] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44
+};
+
+const uint8_t kMaskRandom48_46[276] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x51, 0x45, 0x14, 0x51, 0x45, 0x14,
+  0x45, 0x14, 0x51, 0x45, 0x14, 0x51,
+  0x80, 0xd8, 0x0d, 0x80, 0xd8, 0x0d,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x0a, 0x20, 0xa2, 0x0a, 0x20, 0xa2,
+  0x00, 0xe0, 0x0e, 0x00, 0xe0, 0x0e,
+  0xb8, 0x0b, 0x80, 0xb8, 0x0b, 0x80,
+  0x09, 0x10, 0x91, 0x09, 0x10, 0x91,
+  0x56, 0x05, 0x60, 0x56, 0x05, 0x60,
+  0xa2, 0x8a, 0x28, 0xa2, 0x8a, 0x28,
+  0x1a, 0xaa, 0xee, 0x1a, 0xaa, 0xee,
+  0x10, 0xf9, 0xab, 0x12, 0x14, 0xef
+};
+
+const uint8_t kMaskRandom48_47[282] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x88, 0x32, 0x59, 0x88, 0x32, 0x59
+};
+
+const uint8_t kMaskRandom48_48[288] = {
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x88, 0x32, 0x59, 0x88, 0x32, 0x59,
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x21, 0x32, 0x13, 0x21, 0x32, 0x13,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x00, 0x70, 0x07, 0x00, 0x70, 0x07,
+  0x0c, 0x10, 0xc1, 0x0c, 0x10, 0xc1,
+  0x40, 0xc4, 0x0c, 0x40, 0xc4, 0x0c,
+  0x6a, 0x06, 0xa0, 0x6a, 0x06, 0xa0,
+  0x86, 0x08, 0x60, 0x86, 0x08, 0x60,
+  0x24, 0x82, 0x48, 0x24, 0x82, 0x48,
+  0x89, 0x08, 0x90, 0x89, 0x08, 0x90,
+  0xc0, 0x2c, 0x02, 0xc0, 0x2c, 0x02,
+  0x10, 0x61, 0x06, 0x10, 0x61, 0x06,
+  0x02, 0x30, 0x23, 0x02, 0x30, 0x23,
+  0x40, 0x54, 0x05, 0x40, 0x54, 0x05,
+  0x21, 0x82, 0x18, 0x21, 0x82, 0x18,
+  0x81, 0x18, 0x11, 0x81, 0x18, 0x11,
+  0x14, 0x81, 0x48, 0x14, 0x81, 0x48,
+  0x98, 0x09, 0x80, 0x98, 0x09, 0x80,
+  0x08, 0x90, 0x89, 0x08, 0x90, 0x89,
+  0x62, 0x06, 0x20, 0x62, 0x06, 0x20,
+  0x24, 0x22, 0x42, 0x24, 0x22, 0x42,
+  0x8a, 0x08, 0xa0, 0x8a, 0x08, 0xa0,
+  0x84, 0x48, 0x44, 0x84, 0x48, 0x44,
+  0xff, 0x9b, 0xdf, 0xec, 0xae, 0x0e
+};
+
+const uint8_t kMaskRandom48_5[30] = {
+  0x53, 0x65, 0x36, 0x53, 0x65, 0x36,
+  0x64, 0xb6, 0x4b, 0x64, 0xb6, 0x4b,
+  0x0e, 0xe0, 0xee, 0x0e, 0xe0, 0xee,
+  0xa9, 0xca, 0x9c, 0xa9, 0xca, 0x9c,
+  0xb8, 0x3b, 0x83, 0xb8, 0x3b, 0x83
+};
+
+const uint8_t kMaskRandom48_6[36] = {
+  0xd1, 0x4d, 0x14, 0xd1, 0x4d, 0x14,
+  0x45, 0x34, 0x53, 0x45, 0x34, 0x53,
+  0x22, 0xd2, 0x2d, 0x22, 0xd2, 0x2d,
+  0x16, 0xc1, 0x6c, 0x16, 0xc1, 0x6c,
+  0x0b, 0xa0, 0xba, 0x0b, 0xa0, 0xba,
+  0xe8, 0x8e, 0x88, 0xe8, 0x8e, 0x88
+};
+
+const uint8_t kMaskRandom48_7[42] = {
+  0xd3, 0x65, 0x36, 0xd3, 0x65, 0x36,
+  0x25, 0x32, 0x53, 0x25, 0x32, 0x53,
+  0x30, 0xd3, 0x05, 0x30, 0xd3, 0x05,
+  0x06, 0x48, 0x6c, 0x06, 0x48, 0x6c,
+  0xc0, 0xb8, 0x1b, 0xc0, 0xb8, 0x1b,
+  0x2a, 0xa2, 0xaa, 0x2a, 0xa2, 0xaa,
+  0xa8, 0x4e, 0x84, 0xa8, 0x4e, 0x84
+};
+
+const uint8_t kMaskRandom48_8[48] = {
+  0x81, 0x60, 0x16, 0x81, 0x60, 0x16,
+  0x40, 0x3c, 0x03, 0x40, 0x3c, 0x03,
+  0x10, 0x91, 0x09, 0x10, 0x91, 0x09,
+  0x06, 0x50, 0x65, 0x06, 0x50, 0x65,
+  0x20, 0x4a, 0x84, 0x20, 0x4a, 0x84,
+  0x8a, 0xa0, 0xaa, 0x8a, 0xa0, 0xaa,
+  0x33, 0x03, 0x30, 0x33, 0x03, 0x30,
+  0x4c, 0x84, 0xc8, 0x4c, 0x84, 0xc8
+};
+
+const uint8_t kMaskRandom48_9[54] = {
+  0xd3, 0x65, 0x36, 0xd3, 0x65, 0x36,
+  0x64, 0x26, 0x42, 0x64, 0x26, 0x42,
+  0x18, 0x41, 0xc4, 0x18, 0x41, 0xc4,
+  0xa0, 0x4a, 0x04, 0xa0, 0x4a, 0x04,
+  0x81, 0x38, 0x13, 0x81, 0x38, 0x13,
+  0x22, 0xa2, 0x2a, 0x22, 0xa2, 0x2a,
+  0x08, 0x70, 0x87, 0x08, 0x70, 0x87,
+  0x04, 0x90, 0x49, 0x04, 0x90, 0x49,
+  0x01, 0xc0, 0x1c, 0x01, 0xc0, 0x1c
+};
+
+const uint8_t kMaskRandom4_1[2] = {
+  0xf0, 0x00
+};
+
+const uint8_t kMaskRandom4_2[4] = {
+  0xc0, 0x00,
+  0xb0, 0x00
+};
+
+const uint8_t kMaskRandom4_3[6] = {
+  0xc0, 0x00,
+  0xb0, 0x00,
+  0x60, 0x00
+};
+
+const uint8_t kMaskRandom4_4[8] = {
+  0xc0, 0x00,
+  0xa0, 0x00,
+  0x30, 0x00,
+  0x50, 0x00
+};
+
+const uint8_t kMaskRandom5_1[2] = {
+  0xf8, 0x00
+};
+
+const uint8_t kMaskRandom5_2[4] = {
+  0xa8, 0x00,
+  0xd0, 0x00
+};
+
+const uint8_t kMaskRandom5_3[6] = {
+  0xb0, 0x00,
+  0xc8, 0x00,
+  0x50, 0x00
+};
+
+const uint8_t kMaskRandom5_4[8] = {
+  0xc8, 0x00,
+  0xb0, 0x00,
+  0x50, 0x00,
+  0x28, 0x00
+};
+
+const uint8_t kMaskRandom5_5[10] = {
+  0xc0, 0x00,
+  0x30, 0x00,
+  0x18, 0x00,
+  0xa0, 0x00,
+  0x48, 0x00
+};
+
+const uint8_t kMaskRandom6_1[2] = {
+  0xfc, 0x00
+};
+
+const uint8_t kMaskRandom6_2[4] = {
+  0xa8, 0x00,
+  0xd4, 0x00
+};
+
+const uint8_t kMaskRandom6_3[6] = {
+  0xd0, 0x00,
+  0x68, 0x00,
+  0xa4, 0x00
+};
+
+const uint8_t kMaskRandom6_4[8] = {
+  0xa8, 0x00,
+  0x58, 0x00,
+  0x64, 0x00,
+  0x94, 0x00
+};
+
+const uint8_t kMaskRandom6_5[10] = {
+  0xa8, 0x00,
+  0x84, 0x00,
+  0x64, 0x00,
+  0x90, 0x00,
+  0x58, 0x00
+};
+
+const uint8_t kMaskRandom6_6[12] = {
+  0x98, 0x00,
+  0x64, 0x00,
+  0x50, 0x00,
+  0x14, 0x00,
+  0xa8, 0x00,
+  0xe0, 0x00
+};
+
+const uint8_t kMaskRandom7_1[2] = {
+  0xfe, 0x00
+};
+
+const uint8_t kMaskRandom7_2[4] = {
+  0xd4, 0x00,
+  0xaa, 0x00
+};
+
+const uint8_t kMaskRandom7_3[6] = {
+  0xd0, 0x00,
+  0xaa, 0x00,
+  0x64, 0x00
+};
+
+const uint8_t kMaskRandom7_4[8] = {
+  0xd0, 0x00,
+  0xaa, 0x00,
+  0x64, 0x00,
+  0x1c, 0x00
+};
+
+const uint8_t kMaskRandom7_5[10] = {
+  0x0c, 0x00,
+  0xb0, 0x00,
+  0x1a, 0x00,
+  0xc4, 0x00,
+  0x62, 0x00
+};
+
+const uint8_t kMaskRandom7_6[12] = {
+  0x8c, 0x00,
+  0x4a, 0x00,
+  0x64, 0x00,
+  0xd0, 0x00,
+  0xa0, 0x00,
+  0x32, 0x00
+};
+
+const uint8_t kMaskRandom7_7[14] = {
+  0x4a, 0x00,
+  0x94, 0x00,
+  0x1a, 0x00,
+  0xc4, 0x00,
+  0x28, 0x00,
+  0xc2, 0x00,
+  0x34, 0x00
+};
+
+const uint8_t kMaskRandom8_1[2] = {
+  0xff, 0x00
+};
+
+const uint8_t kMaskRandom8_2[4] = {
+  0xaa, 0x00,
+  0xd5, 0x00
+};
+
+const uint8_t kMaskRandom8_3[6] = {
+  0xc5, 0x00,
+  0x92, 0x00,
+  0x6a, 0x00
+};
+
+const uint8_t kMaskRandom8_4[8] = {
+  0x45, 0x00,
+  0xb4, 0x00,
+  0x6a, 0x00,
+  0x89, 0x00
+};
+
+const uint8_t kMaskRandom8_5[10] = {
+  0x8c, 0x00,
+  0x92, 0x00,
+  0x2b, 0x00,
+  0x51, 0x00,
+  0x64, 0x00
+};
+
+const uint8_t kMaskRandom8_6[12] = {
+  0xa1, 0x00,
+  0x52, 0x00,
+  0x91, 0x00,
+  0x2a, 0x00,
+  0xc4, 0x00,
+  0x4c, 0x00
+};
+
+const uint8_t kMaskRandom8_7[14] = {
+  0x15, 0x00,
+  0xc2, 0x00,
+  0x25, 0x00,
+  0x62, 0x00,
+  0x58, 0x00,
+  0x8c, 0x00,
+  0xa3, 0x00
+};
+
+const uint8_t kMaskRandom8_8[16] = {
+  0x25, 0x00,
+  0x8a, 0x00,
+  0x91, 0x00,
+  0x68, 0x00,
+  0x32, 0x00,
+  0x43, 0x00,
+  0xc4, 0x00,
+  0x1c, 0x00
+};
+
+const uint8_t kMaskRandom9_1[2] = {
+  0xff, 0x80
+};
+
+const uint8_t kMaskRandom9_2[4] = {
+  0xaa, 0x80,
+  0xd5, 0x00
+};
+
+const uint8_t kMaskRandom9_3[6] = {
+  0xa5, 0x00,
+  0xc8, 0x00,
+  0x52, 0x80
+};
+
+const uint8_t kMaskRandom9_4[8] = {
+  0xa2, 0x00,
+  0xc9, 0x00,
+  0x52, 0x80,
+  0x24, 0x80
+};
+
+const uint8_t kMaskRandom9_5[10] = {
+  0x8c, 0x00,
+  0x25, 0x00,
+  0x92, 0x80,
+  0x41, 0x80,
+  0x58, 0x00
+};
+
+const uint8_t kMaskRandom9_6[12] = {
+  0x84, 0x80,
+  0x27, 0x00,
+  0x51, 0x80,
+  0x1a, 0x00,
+  0x68, 0x00,
+  0x89, 0x00
+};
+
+const uint8_t kMaskRandom9_7[14] = {
+  0x8c, 0x00,
+  0x47, 0x00,
+  0x81, 0x80,
+  0x12, 0x80,
+  0x58, 0x00,
+  0x28, 0x80,
+  0xb4, 0x00
+};
+
+const uint8_t kMaskRandom9_8[16] = {
+  0x2c, 0x00,
+  0x91, 0x00,
+  0x40, 0x80,
+  0x06, 0x80,
+  0xc8, 0x00,
+  0x45, 0x00,
+  0x30, 0x80,
+  0xa2, 0x00
+};
+
+const uint8_t kMaskRandom9_9[18] = {
+  0x4c, 0x00,
+  0x62, 0x00,
+  0x91, 0x00,
+  0x42, 0x80,
+  0xa4, 0x00,
+  0x13, 0x00,
+  0x30, 0x80,
+  0x88, 0x80,
+  0x09, 0x00
+};
+
+const uint8_t* kPacketMaskRandom1[1] = {
+  kMaskRandom1_1
+};
+
+const uint8_t* kPacketMaskRandom2[2] = {
+  kMaskRandom2_1,
+  kMaskRandom2_2
+};
+
+const uint8_t* kPacketMaskRandom3[3] = {
+  kMaskRandom3_1,
+  kMaskRandom3_2,
+  kMaskRandom3_3
+};
+
+const uint8_t* kPacketMaskRandom4[4] = {
+  kMaskRandom4_1,
+  kMaskRandom4_2,
+  kMaskRandom4_3,
+  kMaskRandom4_4
+};
+
+const uint8_t* kPacketMaskRandom5[5] = {
+  kMaskRandom5_1,
+  kMaskRandom5_2,
+  kMaskRandom5_3,
+  kMaskRandom5_4,
+  kMaskRandom5_5
+};
+
+const uint8_t* kPacketMaskRandom6[6] = {
+  kMaskRandom6_1,
+  kMaskRandom6_2,
+  kMaskRandom6_3,
+  kMaskRandom6_4,
+  kMaskRandom6_5,
+  kMaskRandom6_6
+};
+
+const uint8_t* kPacketMaskRandom7[7] = {
+  kMaskRandom7_1,
+  kMaskRandom7_2,
+  kMaskRandom7_3,
+  kMaskRandom7_4,
+  kMaskRandom7_5,
+  kMaskRandom7_6,
+  kMaskRandom7_7
+};
+
+const uint8_t* kPacketMaskRandom8[8] = {
+  kMaskRandom8_1,
+  kMaskRandom8_2,
+  kMaskRandom8_3,
+  kMaskRandom8_4,
+  kMaskRandom8_5,
+  kMaskRandom8_6,
+  kMaskRandom8_7,
+  kMaskRandom8_8
+};
+
+const uint8_t* kPacketMaskRandom9[9] = {
+  kMaskRandom9_1,
+  kMaskRandom9_2,
+  kMaskRandom9_3,
+  kMaskRandom9_4,
+  kMaskRandom9_5,
+  kMaskRandom9_6,
+  kMaskRandom9_7,
+  kMaskRandom9_8,
+  kMaskRandom9_9
+};
+
+const uint8_t* kPacketMaskRandom10[10] = {
+  kMaskRandom10_1,
+  kMaskRandom10_2,
+  kMaskRandom10_3,
+  kMaskRandom10_4,
+  kMaskRandom10_5,
+  kMaskRandom10_6,
+  kMaskRandom10_7,
+  kMaskRandom10_8,
+  kMaskRandom10_9,
+  kMaskRandom10_10
+};
+
+const uint8_t* kPacketMaskRandom11[11] = {
+  kMaskRandom11_1,
+  kMaskRandom11_2,
+  kMaskRandom11_3,
+  kMaskRandom11_4,
+  kMaskRandom11_5,
+  kMaskRandom11_6,
+  kMaskRandom11_7,
+  kMaskRandom11_8,
+  kMaskRandom11_9,
+  kMaskRandom11_10,
+  kMaskRandom11_11
+};
+
+const uint8_t* kPacketMaskRandom12[12] = {
+  kMaskRandom12_1,
+  kMaskRandom12_2,
+  kMaskRandom12_3,
+  kMaskRandom12_4,
+  kMaskRandom12_5,
+  kMaskRandom12_6,
+  kMaskRandom12_7,
+  kMaskRandom12_8,
+  kMaskRandom12_9,
+  kMaskRandom12_10,
+  kMaskRandom12_11,
+  kMaskRandom12_12
+};
+
+const uint8_t* kPacketMaskRandom13[13] = {
+  kMaskRandom13_1,
+  kMaskRandom13_2,
+  kMaskRandom13_3,
+  kMaskRandom13_4,
+  kMaskRandom13_5,
+  kMaskRandom13_6,
+  kMaskRandom13_7,
+  kMaskRandom13_8,
+  kMaskRandom13_9,
+  kMaskRandom13_10,
+  kMaskRandom13_11,
+  kMaskRandom13_12,
+  kMaskRandom13_13
+};
+
+const uint8_t* kPacketMaskRandom14[14] = {
+  kMaskRandom14_1,
+  kMaskRandom14_2,
+  kMaskRandom14_3,
+  kMaskRandom14_4,
+  kMaskRandom14_5,
+  kMaskRandom14_6,
+  kMaskRandom14_7,
+  kMaskRandom14_8,
+  kMaskRandom14_9,
+  kMaskRandom14_10,
+  kMaskRandom14_11,
+  kMaskRandom14_12,
+  kMaskRandom14_13,
+  kMaskRandom14_14
+};
+
+const uint8_t* kPacketMaskRandom15[15] = {
+  kMaskRandom15_1,
+  kMaskRandom15_2,
+  kMaskRandom15_3,
+  kMaskRandom15_4,
+  kMaskRandom15_5,
+  kMaskRandom15_6,
+  kMaskRandom15_7,
+  kMaskRandom15_8,
+  kMaskRandom15_9,
+  kMaskRandom15_10,
+  kMaskRandom15_11,
+  kMaskRandom15_12,
+  kMaskRandom15_13,
+  kMaskRandom15_14,
+  kMaskRandom15_15
+};
+
+const uint8_t* kPacketMaskRandom16[16] = {
+  kMaskRandom16_1,
+  kMaskRandom16_2,
+  kMaskRandom16_3,
+  kMaskRandom16_4,
+  kMaskRandom16_5,
+  kMaskRandom16_6,
+  kMaskRandom16_7,
+  kMaskRandom16_8,
+  kMaskRandom16_9,
+  kMaskRandom16_10,
+  kMaskRandom16_11,
+  kMaskRandom16_12,
+  kMaskRandom16_13,
+  kMaskRandom16_14,
+  kMaskRandom16_15,
+  kMaskRandom16_16
+};
+
+const uint8_t* kPacketMaskRandom17[17] = {
+  kMaskRandom17_1,
+  kMaskRandom17_2,
+  kMaskRandom17_3,
+  kMaskRandom17_4,
+  kMaskRandom17_5,
+  kMaskRandom17_6,
+  kMaskRandom17_7,
+  kMaskRandom17_8,
+  kMaskRandom17_9,
+  kMaskRandom17_10,
+  kMaskRandom17_11,
+  kMaskRandom17_12,
+  kMaskRandom17_13,
+  kMaskRandom17_14,
+  kMaskRandom17_15,
+  kMaskRandom17_16,
+  kMaskRandom17_17
+};
+
+const uint8_t* kPacketMaskRandom18[18] = {
+  kMaskRandom18_1,
+  kMaskRandom18_2,
+  kMaskRandom18_3,
+  kMaskRandom18_4,
+  kMaskRandom18_5,
+  kMaskRandom18_6,
+  kMaskRandom18_7,
+  kMaskRandom18_8,
+  kMaskRandom18_9,
+  kMaskRandom18_10,
+  kMaskRandom18_11,
+  kMaskRandom18_12,
+  kMaskRandom18_13,
+  kMaskRandom18_14,
+  kMaskRandom18_15,
+  kMaskRandom18_16,
+  kMaskRandom18_17,
+  kMaskRandom18_18
+};
+
+const uint8_t* kPacketMaskRandom19[19] = {
+  kMaskRandom19_1,
+  kMaskRandom19_2,
+  kMaskRandom19_3,
+  kMaskRandom19_4,
+  kMaskRandom19_5,
+  kMaskRandom19_6,
+  kMaskRandom19_7,
+  kMaskRandom19_8,
+  kMaskRandom19_9,
+  kMaskRandom19_10,
+  kMaskRandom19_11,
+  kMaskRandom19_12,
+  kMaskRandom19_13,
+  kMaskRandom19_14,
+  kMaskRandom19_15,
+  kMaskRandom19_16,
+  kMaskRandom19_17,
+  kMaskRandom19_18,
+  kMaskRandom19_19
+};
+
+const uint8_t* kPacketMaskRandom20[20] = {
+  kMaskRandom20_1,
+  kMaskRandom20_2,
+  kMaskRandom20_3,
+  kMaskRandom20_4,
+  kMaskRandom20_5,
+  kMaskRandom20_6,
+  kMaskRandom20_7,
+  kMaskRandom20_8,
+  kMaskRandom20_9,
+  kMaskRandom20_10,
+  kMaskRandom20_11,
+  kMaskRandom20_12,
+  kMaskRandom20_13,
+  kMaskRandom20_14,
+  kMaskRandom20_15,
+  kMaskRandom20_16,
+  kMaskRandom20_17,
+  kMaskRandom20_18,
+  kMaskRandom20_19,
+  kMaskRandom20_20
+};
+
+const uint8_t* kPacketMaskRandom21[21] = {
+  kMaskRandom21_1,
+  kMaskRandom21_2,
+  kMaskRandom21_3,
+  kMaskRandom21_4,
+  kMaskRandom21_5,
+  kMaskRandom21_6,
+  kMaskRandom21_7,
+  kMaskRandom21_8,
+  kMaskRandom21_9,
+  kMaskRandom21_10,
+  kMaskRandom21_11,
+  kMaskRandom21_12,
+  kMaskRandom21_13,
+  kMaskRandom21_14,
+  kMaskRandom21_15,
+  kMaskRandom21_16,
+  kMaskRandom21_17,
+  kMaskRandom21_18,
+  kMaskRandom21_19,
+  kMaskRandom21_20,
+  kMaskRandom21_21
+};
+
+const uint8_t* kPacketMaskRandom22[22] = {
+  kMaskRandom22_1,
+  kMaskRandom22_2,
+  kMaskRandom22_3,
+  kMaskRandom22_4,
+  kMaskRandom22_5,
+  kMaskRandom22_6,
+  kMaskRandom22_7,
+  kMaskRandom22_8,
+  kMaskRandom22_9,
+  kMaskRandom22_10,
+  kMaskRandom22_11,
+  kMaskRandom22_12,
+  kMaskRandom22_13,
+  kMaskRandom22_14,
+  kMaskRandom22_15,
+  kMaskRandom22_16,
+  kMaskRandom22_17,
+  kMaskRandom22_18,
+  kMaskRandom22_19,
+  kMaskRandom22_20,
+  kMaskRandom22_21,
+  kMaskRandom22_22
+};
+
+const uint8_t* kPacketMaskRandom23[23] = {
+  kMaskRandom23_1,
+  kMaskRandom23_2,
+  kMaskRandom23_3,
+  kMaskRandom23_4,
+  kMaskRandom23_5,
+  kMaskRandom23_6,
+  kMaskRandom23_7,
+  kMaskRandom23_8,
+  kMaskRandom23_9,
+  kMaskRandom23_10,
+  kMaskRandom23_11,
+  kMaskRandom23_12,
+  kMaskRandom23_13,
+  kMaskRandom23_14,
+  kMaskRandom23_15,
+  kMaskRandom23_16,
+  kMaskRandom23_17,
+  kMaskRandom23_18,
+  kMaskRandom23_19,
+  kMaskRandom23_20,
+  kMaskRandom23_21,
+  kMaskRandom23_22,
+  kMaskRandom23_23
+};
+
+const uint8_t* kPacketMaskRandom24[24] = {
+  kMaskRandom24_1,
+  kMaskRandom24_2,
+  kMaskRandom24_3,
+  kMaskRandom24_4,
+  kMaskRandom24_5,
+  kMaskRandom24_6,
+  kMaskRandom24_7,
+  kMaskRandom24_8,
+  kMaskRandom24_9,
+  kMaskRandom24_10,
+  kMaskRandom24_11,
+  kMaskRandom24_12,
+  kMaskRandom24_13,
+  kMaskRandom24_14,
+  kMaskRandom24_15,
+  kMaskRandom24_16,
+  kMaskRandom24_17,
+  kMaskRandom24_18,
+  kMaskRandom24_19,
+  kMaskRandom24_20,
+  kMaskRandom24_21,
+  kMaskRandom24_22,
+  kMaskRandom24_23,
+  kMaskRandom24_24
+};
+
+const uint8_t* kPacketMaskRandom25[25] = {
+  kMaskRandom25_1,
+  kMaskRandom25_2,
+  kMaskRandom25_3,
+  kMaskRandom25_4,
+  kMaskRandom25_5,
+  kMaskRandom25_6,
+  kMaskRandom25_7,
+  kMaskRandom25_8,
+  kMaskRandom25_9,
+  kMaskRandom25_10,
+  kMaskRandom25_11,
+  kMaskRandom25_12,
+  kMaskRandom25_13,
+  kMaskRandom25_14,
+  kMaskRandom25_15,
+  kMaskRandom25_16,
+  kMaskRandom25_17,
+  kMaskRandom25_18,
+  kMaskRandom25_19,
+  kMaskRandom25_20,
+  kMaskRandom25_21,
+  kMaskRandom25_22,
+  kMaskRandom25_23,
+  kMaskRandom25_24,
+  kMaskRandom25_25
+};
+
+const uint8_t* kPacketMaskRandom26[26] = {
+  kMaskRandom26_1,
+  kMaskRandom26_2,
+  kMaskRandom26_3,
+  kMaskRandom26_4,
+  kMaskRandom26_5,
+  kMaskRandom26_6,
+  kMaskRandom26_7,
+  kMaskRandom26_8,
+  kMaskRandom26_9,
+  kMaskRandom26_10,
+  kMaskRandom26_11,
+  kMaskRandom26_12,
+  kMaskRandom26_13,
+  kMaskRandom26_14,
+  kMaskRandom26_15,
+  kMaskRandom26_16,
+  kMaskRandom26_17,
+  kMaskRandom26_18,
+  kMaskRandom26_19,
+  kMaskRandom26_20,
+  kMaskRandom26_21,
+  kMaskRandom26_22,
+  kMaskRandom26_23,
+  kMaskRandom26_24,
+  kMaskRandom26_25,
+  kMaskRandom26_26
+};
+
+const uint8_t* kPacketMaskRandom27[27] = {
+  kMaskRandom27_1,
+  kMaskRandom27_2,
+  kMaskRandom27_3,
+  kMaskRandom27_4,
+  kMaskRandom27_5,
+  kMaskRandom27_6,
+  kMaskRandom27_7,
+  kMaskRandom27_8,
+  kMaskRandom27_9,
+  kMaskRandom27_10,
+  kMaskRandom27_11,
+  kMaskRandom27_12,
+  kMaskRandom27_13,
+  kMaskRandom27_14,
+  kMaskRandom27_15,
+  kMaskRandom27_16,
+  kMaskRandom27_17,
+  kMaskRandom27_18,
+  kMaskRandom27_19,
+  kMaskRandom27_20,
+  kMaskRandom27_21,
+  kMaskRandom27_22,
+  kMaskRandom27_23,
+  kMaskRandom27_24,
+  kMaskRandom27_25,
+  kMaskRandom27_26,
+  kMaskRandom27_27
+};
+
+const uint8_t* kPacketMaskRandom28[28] = {
+  kMaskRandom28_1,
+  kMaskRandom28_2,
+  kMaskRandom28_3,
+  kMaskRandom28_4,
+  kMaskRandom28_5,
+  kMaskRandom28_6,
+  kMaskRandom28_7,
+  kMaskRandom28_8,
+  kMaskRandom28_9,
+  kMaskRandom28_10,
+  kMaskRandom28_11,
+  kMaskRandom28_12,
+  kMaskRandom28_13,
+  kMaskRandom28_14,
+  kMaskRandom28_15,
+  kMaskRandom28_16,
+  kMaskRandom28_17,
+  kMaskRandom28_18,
+  kMaskRandom28_19,
+  kMaskRandom28_20,
+  kMaskRandom28_21,
+  kMaskRandom28_22,
+  kMaskRandom28_23,
+  kMaskRandom28_24,
+  kMaskRandom28_25,
+  kMaskRandom28_26,
+  kMaskRandom28_27,
+  kMaskRandom28_28
+};
+
+const uint8_t* kPacketMaskRandom29[29] = {
+  kMaskRandom29_1,
+  kMaskRandom29_2,
+  kMaskRandom29_3,
+  kMaskRandom29_4,
+  kMaskRandom29_5,
+  kMaskRandom29_6,
+  kMaskRandom29_7,
+  kMaskRandom29_8,
+  kMaskRandom29_9,
+  kMaskRandom29_10,
+  kMaskRandom29_11,
+  kMaskRandom29_12,
+  kMaskRandom29_13,
+  kMaskRandom29_14,
+  kMaskRandom29_15,
+  kMaskRandom29_16,
+  kMaskRandom29_17,
+  kMaskRandom29_18,
+  kMaskRandom29_19,
+  kMaskRandom29_20,
+  kMaskRandom29_21,
+  kMaskRandom29_22,
+  kMaskRandom29_23,
+  kMaskRandom29_24,
+  kMaskRandom29_25,
+  kMaskRandom29_26,
+  kMaskRandom29_27,
+  kMaskRandom29_28,
+  kMaskRandom29_29
+};
+
+const uint8_t* kPacketMaskRandom30[30] = {
+  kMaskRandom30_1,
+  kMaskRandom30_2,
+  kMaskRandom30_3,
+  kMaskRandom30_4,
+  kMaskRandom30_5,
+  kMaskRandom30_6,
+  kMaskRandom30_7,
+  kMaskRandom30_8,
+  kMaskRandom30_9,
+  kMaskRandom30_10,
+  kMaskRandom30_11,
+  kMaskRandom30_12,
+  kMaskRandom30_13,
+  kMaskRandom30_14,
+  kMaskRandom30_15,
+  kMaskRandom30_16,
+  kMaskRandom30_17,
+  kMaskRandom30_18,
+  kMaskRandom30_19,
+  kMaskRandom30_20,
+  kMaskRandom30_21,
+  kMaskRandom30_22,
+  kMaskRandom30_23,
+  kMaskRandom30_24,
+  kMaskRandom30_25,
+  kMaskRandom30_26,
+  kMaskRandom30_27,
+  kMaskRandom30_28,
+  kMaskRandom30_29,
+  kMaskRandom30_30
+};
+
+const uint8_t* kPacketMaskRandom31[31] = {
+  kMaskRandom31_1,
+  kMaskRandom31_2,
+  kMaskRandom31_3,
+  kMaskRandom31_4,
+  kMaskRandom31_5,
+  kMaskRandom31_6,
+  kMaskRandom31_7,
+  kMaskRandom31_8,
+  kMaskRandom31_9,
+  kMaskRandom31_10,
+  kMaskRandom31_11,
+  kMaskRandom31_12,
+  kMaskRandom31_13,
+  kMaskRandom31_14,
+  kMaskRandom31_15,
+  kMaskRandom31_16,
+  kMaskRandom31_17,
+  kMaskRandom31_18,
+  kMaskRandom31_19,
+  kMaskRandom31_20,
+  kMaskRandom31_21,
+  kMaskRandom31_22,
+  kMaskRandom31_23,
+  kMaskRandom31_24,
+  kMaskRandom31_25,
+  kMaskRandom31_26,
+  kMaskRandom31_27,
+  kMaskRandom31_28,
+  kMaskRandom31_29,
+  kMaskRandom31_30,
+  kMaskRandom31_31
+};
+
+const uint8_t* kPacketMaskRandom32[32] = {
+  kMaskRandom32_1,
+  kMaskRandom32_2,
+  kMaskRandom32_3,
+  kMaskRandom32_4,
+  kMaskRandom32_5,
+  kMaskRandom32_6,
+  kMaskRandom32_7,
+  kMaskRandom32_8,
+  kMaskRandom32_9,
+  kMaskRandom32_10,
+  kMaskRandom32_11,
+  kMaskRandom32_12,
+  kMaskRandom32_13,
+  kMaskRandom32_14,
+  kMaskRandom32_15,
+  kMaskRandom32_16,
+  kMaskRandom32_17,
+  kMaskRandom32_18,
+  kMaskRandom32_19,
+  kMaskRandom32_20,
+  kMaskRandom32_21,
+  kMaskRandom32_22,
+  kMaskRandom32_23,
+  kMaskRandom32_24,
+  kMaskRandom32_25,
+  kMaskRandom32_26,
+  kMaskRandom32_27,
+  kMaskRandom32_28,
+  kMaskRandom32_29,
+  kMaskRandom32_30,
+  kMaskRandom32_31,
+  kMaskRandom32_32
+};
+
+const uint8_t* kPacketMaskRandom33[33] = {
+  kMaskRandom33_1,
+  kMaskRandom33_2,
+  kMaskRandom33_3,
+  kMaskRandom33_4,
+  kMaskRandom33_5,
+  kMaskRandom33_6,
+  kMaskRandom33_7,
+  kMaskRandom33_8,
+  kMaskRandom33_9,
+  kMaskRandom33_10,
+  kMaskRandom33_11,
+  kMaskRandom33_12,
+  kMaskRandom33_13,
+  kMaskRandom33_14,
+  kMaskRandom33_15,
+  kMaskRandom33_16,
+  kMaskRandom33_17,
+  kMaskRandom33_18,
+  kMaskRandom33_19,
+  kMaskRandom33_20,
+  kMaskRandom33_21,
+  kMaskRandom33_22,
+  kMaskRandom33_23,
+  kMaskRandom33_24,
+  kMaskRandom33_25,
+  kMaskRandom33_26,
+  kMaskRandom33_27,
+  kMaskRandom33_28,
+  kMaskRandom33_29,
+  kMaskRandom33_30,
+  kMaskRandom33_31,
+  kMaskRandom33_32,
+  kMaskRandom33_33
+};
+
+const uint8_t* kPacketMaskRandom34[34] = {
+  kMaskRandom34_1,
+  kMaskRandom34_2,
+  kMaskRandom34_3,
+  kMaskRandom34_4,
+  kMaskRandom34_5,
+  kMaskRandom34_6,
+  kMaskRandom34_7,
+  kMaskRandom34_8,
+  kMaskRandom34_9,
+  kMaskRandom34_10,
+  kMaskRandom34_11,
+  kMaskRandom34_12,
+  kMaskRandom34_13,
+  kMaskRandom34_14,
+  kMaskRandom34_15,
+  kMaskRandom34_16,
+  kMaskRandom34_17,
+  kMaskRandom34_18,
+  kMaskRandom34_19,
+  kMaskRandom34_20,
+  kMaskRandom34_21,
+  kMaskRandom34_22,
+  kMaskRandom34_23,
+  kMaskRandom34_24,
+  kMaskRandom34_25,
+  kMaskRandom34_26,
+  kMaskRandom34_27,
+  kMaskRandom34_28,
+  kMaskRandom34_29,
+  kMaskRandom34_30,
+  kMaskRandom34_31,
+  kMaskRandom34_32,
+  kMaskRandom34_33,
+  kMaskRandom34_34
+};
+
+const uint8_t* kPacketMaskRandom35[35] = {
+  kMaskRandom35_1,
+  kMaskRandom35_2,
+  kMaskRandom35_3,
+  kMaskRandom35_4,
+  kMaskRandom35_5,
+  kMaskRandom35_6,
+  kMaskRandom35_7,
+  kMaskRandom35_8,
+  kMaskRandom35_9,
+  kMaskRandom35_10,
+  kMaskRandom35_11,
+  kMaskRandom35_12,
+  kMaskRandom35_13,
+  kMaskRandom35_14,
+  kMaskRandom35_15,
+  kMaskRandom35_16,
+  kMaskRandom35_17,
+  kMaskRandom35_18,
+  kMaskRandom35_19,
+  kMaskRandom35_20,
+  kMaskRandom35_21,
+  kMaskRandom35_22,
+  kMaskRandom35_23,
+  kMaskRandom35_24,
+  kMaskRandom35_25,
+  kMaskRandom35_26,
+  kMaskRandom35_27,
+  kMaskRandom35_28,
+  kMaskRandom35_29,
+  kMaskRandom35_30,
+  kMaskRandom35_31,
+  kMaskRandom35_32,
+  kMaskRandom35_33,
+  kMaskRandom35_34,
+  kMaskRandom35_35
+};
+
+const uint8_t* kPacketMaskRandom36[36] = {
+  kMaskRandom36_1,
+  kMaskRandom36_2,
+  kMaskRandom36_3,
+  kMaskRandom36_4,
+  kMaskRandom36_5,
+  kMaskRandom36_6,
+  kMaskRandom36_7,
+  kMaskRandom36_8,
+  kMaskRandom36_9,
+  kMaskRandom36_10,
+  kMaskRandom36_11,
+  kMaskRandom36_12,
+  kMaskRandom36_13,
+  kMaskRandom36_14,
+  kMaskRandom36_15,
+  kMaskRandom36_16,
+  kMaskRandom36_17,
+  kMaskRandom36_18,
+  kMaskRandom36_19,
+  kMaskRandom36_20,
+  kMaskRandom36_21,
+  kMaskRandom36_22,
+  kMaskRandom36_23,
+  kMaskRandom36_24,
+  kMaskRandom36_25,
+  kMaskRandom36_26,
+  kMaskRandom36_27,
+  kMaskRandom36_28,
+  kMaskRandom36_29,
+  kMaskRandom36_30,
+  kMaskRandom36_31,
+  kMaskRandom36_32,
+  kMaskRandom36_33,
+  kMaskRandom36_34,
+  kMaskRandom36_35,
+  kMaskRandom36_36
+};
+
+const uint8_t* kPacketMaskRandom37[37] = {
+  kMaskRandom37_1,
+  kMaskRandom37_2,
+  kMaskRandom37_3,
+  kMaskRandom37_4,
+  kMaskRandom37_5,
+  kMaskRandom37_6,
+  kMaskRandom37_7,
+  kMaskRandom37_8,
+  kMaskRandom37_9,
+  kMaskRandom37_10,
+  kMaskRandom37_11,
+  kMaskRandom37_12,
+  kMaskRandom37_13,
+  kMaskRandom37_14,
+  kMaskRandom37_15,
+  kMaskRandom37_16,
+  kMaskRandom37_17,
+  kMaskRandom37_18,
+  kMaskRandom37_19,
+  kMaskRandom37_20,
+  kMaskRandom37_21,
+  kMaskRandom37_22,
+  kMaskRandom37_23,
+  kMaskRandom37_24,
+  kMaskRandom37_25,
+  kMaskRandom37_26,
+  kMaskRandom37_27,
+  kMaskRandom37_28,
+  kMaskRandom37_29,
+  kMaskRandom37_30,
+  kMaskRandom37_31,
+  kMaskRandom37_32,
+  kMaskRandom37_33,
+  kMaskRandom37_34,
+  kMaskRandom37_35,
+  kMaskRandom37_36,
+  kMaskRandom37_37
+};
+
+const uint8_t* kPacketMaskRandom38[38] = {
+  kMaskRandom38_1,
+  kMaskRandom38_2,
+  kMaskRandom38_3,
+  kMaskRandom38_4,
+  kMaskRandom38_5,
+  kMaskRandom38_6,
+  kMaskRandom38_7,
+  kMaskRandom38_8,
+  kMaskRandom38_9,
+  kMaskRandom38_10,
+  kMaskRandom38_11,
+  kMaskRandom38_12,
+  kMaskRandom38_13,
+  kMaskRandom38_14,
+  kMaskRandom38_15,
+  kMaskRandom38_16,
+  kMaskRandom38_17,
+  kMaskRandom38_18,
+  kMaskRandom38_19,
+  kMaskRandom38_20,
+  kMaskRandom38_21,
+  kMaskRandom38_22,
+  kMaskRandom38_23,
+  kMaskRandom38_24,
+  kMaskRandom38_25,
+  kMaskRandom38_26,
+  kMaskRandom38_27,
+  kMaskRandom38_28,
+  kMaskRandom38_29,
+  kMaskRandom38_30,
+  kMaskRandom38_31,
+  kMaskRandom38_32,
+  kMaskRandom38_33,
+  kMaskRandom38_34,
+  kMaskRandom38_35,
+  kMaskRandom38_36,
+  kMaskRandom38_37,
+  kMaskRandom38_38
+};
+
+const uint8_t* kPacketMaskRandom39[39] = {
+  kMaskRandom39_1,
+  kMaskRandom39_2,
+  kMaskRandom39_3,
+  kMaskRandom39_4,
+  kMaskRandom39_5,
+  kMaskRandom39_6,
+  kMaskRandom39_7,
+  kMaskRandom39_8,
+  kMaskRandom39_9,
+  kMaskRandom39_10,
+  kMaskRandom39_11,
+  kMaskRandom39_12,
+  kMaskRandom39_13,
+  kMaskRandom39_14,
+  kMaskRandom39_15,
+  kMaskRandom39_16,
+  kMaskRandom39_17,
+  kMaskRandom39_18,
+  kMaskRandom39_19,
+  kMaskRandom39_20,
+  kMaskRandom39_21,
+  kMaskRandom39_22,
+  kMaskRandom39_23,
+  kMaskRandom39_24,
+  kMaskRandom39_25,
+  kMaskRandom39_26,
+  kMaskRandom39_27,
+  kMaskRandom39_28,
+  kMaskRandom39_29,
+  kMaskRandom39_30,
+  kMaskRandom39_31,
+  kMaskRandom39_32,
+  kMaskRandom39_33,
+  kMaskRandom39_34,
+  kMaskRandom39_35,
+  kMaskRandom39_36,
+  kMaskRandom39_37,
+  kMaskRandom39_38,
+  kMaskRandom39_39
+};
+
+const uint8_t* kPacketMaskRandom40[40] = {
+  kMaskRandom40_1,
+  kMaskRandom40_2,
+  kMaskRandom40_3,
+  kMaskRandom40_4,
+  kMaskRandom40_5,
+  kMaskRandom40_6,
+  kMaskRandom40_7,
+  kMaskRandom40_8,
+  kMaskRandom40_9,
+  kMaskRandom40_10,
+  kMaskRandom40_11,
+  kMaskRandom40_12,
+  kMaskRandom40_13,
+  kMaskRandom40_14,
+  kMaskRandom40_15,
+  kMaskRandom40_16,
+  kMaskRandom40_17,
+  kMaskRandom40_18,
+  kMaskRandom40_19,
+  kMaskRandom40_20,
+  kMaskRandom40_21,
+  kMaskRandom40_22,
+  kMaskRandom40_23,
+  kMaskRandom40_24,
+  kMaskRandom40_25,
+  kMaskRandom40_26,
+  kMaskRandom40_27,
+  kMaskRandom40_28,
+  kMaskRandom40_29,
+  kMaskRandom40_30,
+  kMaskRandom40_31,
+  kMaskRandom40_32,
+  kMaskRandom40_33,
+  kMaskRandom40_34,
+  kMaskRandom40_35,
+  kMaskRandom40_36,
+  kMaskRandom40_37,
+  kMaskRandom40_38,
+  kMaskRandom40_39,
+  kMaskRandom40_40
+};
+
+const uint8_t* kPacketMaskRandom41[41] = {
+  kMaskRandom41_1,
+  kMaskRandom41_2,
+  kMaskRandom41_3,
+  kMaskRandom41_4,
+  kMaskRandom41_5,
+  kMaskRandom41_6,
+  kMaskRandom41_7,
+  kMaskRandom41_8,
+  kMaskRandom41_9,
+  kMaskRandom41_10,
+  kMaskRandom41_11,
+  kMaskRandom41_12,
+  kMaskRandom41_13,
+  kMaskRandom41_14,
+  kMaskRandom41_15,
+  kMaskRandom41_16,
+  kMaskRandom41_17,
+  kMaskRandom41_18,
+  kMaskRandom41_19,
+  kMaskRandom41_20,
+  kMaskRandom41_21,
+  kMaskRandom41_22,
+  kMaskRandom41_23,
+  kMaskRandom41_24,
+  kMaskRandom41_25,
+  kMaskRandom41_26,
+  kMaskRandom41_27,
+  kMaskRandom41_28,
+  kMaskRandom41_29,
+  kMaskRandom41_30,
+  kMaskRandom41_31,
+  kMaskRandom41_32,
+  kMaskRandom41_33,
+  kMaskRandom41_34,
+  kMaskRandom41_35,
+  kMaskRandom41_36,
+  kMaskRandom41_37,
+  kMaskRandom41_38,
+  kMaskRandom41_39,
+  kMaskRandom41_40,
+  kMaskRandom41_41
+};
+
+const uint8_t* kPacketMaskRandom42[42] = {
+  kMaskRandom42_1,
+  kMaskRandom42_2,
+  kMaskRandom42_3,
+  kMaskRandom42_4,
+  kMaskRandom42_5,
+  kMaskRandom42_6,
+  kMaskRandom42_7,
+  kMaskRandom42_8,
+  kMaskRandom42_9,
+  kMaskRandom42_10,
+  kMaskRandom42_11,
+  kMaskRandom42_12,
+  kMaskRandom42_13,
+  kMaskRandom42_14,
+  kMaskRandom42_15,
+  kMaskRandom42_16,
+  kMaskRandom42_17,
+  kMaskRandom42_18,
+  kMaskRandom42_19,
+  kMaskRandom42_20,
+  kMaskRandom42_21,
+  kMaskRandom42_22,
+  kMaskRandom42_23,
+  kMaskRandom42_24,
+  kMaskRandom42_25,
+  kMaskRandom42_26,
+  kMaskRandom42_27,
+  kMaskRandom42_28,
+  kMaskRandom42_29,
+  kMaskRandom42_30,
+  kMaskRandom42_31,
+  kMaskRandom42_32,
+  kMaskRandom42_33,
+  kMaskRandom42_34,
+  kMaskRandom42_35,
+  kMaskRandom42_36,
+  kMaskRandom42_37,
+  kMaskRandom42_38,
+  kMaskRandom42_39,
+  kMaskRandom42_40,
+  kMaskRandom42_41,
+  kMaskRandom42_42
+};
+
+const uint8_t* kPacketMaskRandom43[43] = {
+  kMaskRandom43_1,
+  kMaskRandom43_2,
+  kMaskRandom43_3,
+  kMaskRandom43_4,
+  kMaskRandom43_5,
+  kMaskRandom43_6,
+  kMaskRandom43_7,
+  kMaskRandom43_8,
+  kMaskRandom43_9,
+  kMaskRandom43_10,
+  kMaskRandom43_11,
+  kMaskRandom43_12,
+  kMaskRandom43_13,
+  kMaskRandom43_14,
+  kMaskRandom43_15,
+  kMaskRandom43_16,
+  kMaskRandom43_17,
+  kMaskRandom43_18,
+  kMaskRandom43_19,
+  kMaskRandom43_20,
+  kMaskRandom43_21,
+  kMaskRandom43_22,
+  kMaskRandom43_23,
+  kMaskRandom43_24,
+  kMaskRandom43_25,
+  kMaskRandom43_26,
+  kMaskRandom43_27,
+  kMaskRandom43_28,
+  kMaskRandom43_29,
+  kMaskRandom43_30,
+  kMaskRandom43_31,
+  kMaskRandom43_32,
+  kMaskRandom43_33,
+  kMaskRandom43_34,
+  kMaskRandom43_35,
+  kMaskRandom43_36,
+  kMaskRandom43_37,
+  kMaskRandom43_38,
+  kMaskRandom43_39,
+  kMaskRandom43_40,
+  kMaskRandom43_41,
+  kMaskRandom43_42,
+  kMaskRandom43_43
+};
+
+const uint8_t* kPacketMaskRandom44[44] = {
+  kMaskRandom44_1,
+  kMaskRandom44_2,
+  kMaskRandom44_3,
+  kMaskRandom44_4,
+  kMaskRandom44_5,
+  kMaskRandom44_6,
+  kMaskRandom44_7,
+  kMaskRandom44_8,
+  kMaskRandom44_9,
+  kMaskRandom44_10,
+  kMaskRandom44_11,
+  kMaskRandom44_12,
+  kMaskRandom44_13,
+  kMaskRandom44_14,
+  kMaskRandom44_15,
+  kMaskRandom44_16,
+  kMaskRandom44_17,
+  kMaskRandom44_18,
+  kMaskRandom44_19,
+  kMaskRandom44_20,
+  kMaskRandom44_21,
+  kMaskRandom44_22,
+  kMaskRandom44_23,
+  kMaskRandom44_24,
+  kMaskRandom44_25,
+  kMaskRandom44_26,
+  kMaskRandom44_27,
+  kMaskRandom44_28,
+  kMaskRandom44_29,
+  kMaskRandom44_30,
+  kMaskRandom44_31,
+  kMaskRandom44_32,
+  kMaskRandom44_33,
+  kMaskRandom44_34,
+  kMaskRandom44_35,
+  kMaskRandom44_36,
+  kMaskRandom44_37,
+  kMaskRandom44_38,
+  kMaskRandom44_39,
+  kMaskRandom44_40,
+  kMaskRandom44_41,
+  kMaskRandom44_42,
+  kMaskRandom44_43,
+  kMaskRandom44_44
+};
+
+const uint8_t* kPacketMaskRandom45[45] = {
+  kMaskRandom45_1,
+  kMaskRandom45_2,
+  kMaskRandom45_3,
+  kMaskRandom45_4,
+  kMaskRandom45_5,
+  kMaskRandom45_6,
+  kMaskRandom45_7,
+  kMaskRandom45_8,
+  kMaskRandom45_9,
+  kMaskRandom45_10,
+  kMaskRandom45_11,
+  kMaskRandom45_12,
+  kMaskRandom45_13,
+  kMaskRandom45_14,
+  kMaskRandom45_15,
+  kMaskRandom45_16,
+  kMaskRandom45_17,
+  kMaskRandom45_18,
+  kMaskRandom45_19,
+  kMaskRandom45_20,
+  kMaskRandom45_21,
+  kMaskRandom45_22,
+  kMaskRandom45_23,
+  kMaskRandom45_24,
+  kMaskRandom45_25,
+  kMaskRandom45_26,
+  kMaskRandom45_27,
+  kMaskRandom45_28,
+  kMaskRandom45_29,
+  kMaskRandom45_30,
+  kMaskRandom45_31,
+  kMaskRandom45_32,
+  kMaskRandom45_33,
+  kMaskRandom45_34,
+  kMaskRandom45_35,
+  kMaskRandom45_36,
+  kMaskRandom45_37,
+  kMaskRandom45_38,
+  kMaskRandom45_39,
+  kMaskRandom45_40,
+  kMaskRandom45_41,
+  kMaskRandom45_42,
+  kMaskRandom45_43,
+  kMaskRandom45_44,
+  kMaskRandom45_45
+};
+
+const uint8_t* kPacketMaskRandom46[46] = {
+  kMaskRandom46_1,
+  kMaskRandom46_2,
+  kMaskRandom46_3,
+  kMaskRandom46_4,
+  kMaskRandom46_5,
+  kMaskRandom46_6,
+  kMaskRandom46_7,
+  kMaskRandom46_8,
+  kMaskRandom46_9,
+  kMaskRandom46_10,
+  kMaskRandom46_11,
+  kMaskRandom46_12,
+  kMaskRandom46_13,
+  kMaskRandom46_14,
+  kMaskRandom46_15,
+  kMaskRandom46_16,
+  kMaskRandom46_17,
+  kMaskRandom46_18,
+  kMaskRandom46_19,
+  kMaskRandom46_20,
+  kMaskRandom46_21,
+  kMaskRandom46_22,
+  kMaskRandom46_23,
+  kMaskRandom46_24,
+  kMaskRandom46_25,
+  kMaskRandom46_26,
+  kMaskRandom46_27,
+  kMaskRandom46_28,
+  kMaskRandom46_29,
+  kMaskRandom46_30,
+  kMaskRandom46_31,
+  kMaskRandom46_32,
+  kMaskRandom46_33,
+  kMaskRandom46_34,
+  kMaskRandom46_35,
+  kMaskRandom46_36,
+  kMaskRandom46_37,
+  kMaskRandom46_38,
+  kMaskRandom46_39,
+  kMaskRandom46_40,
+  kMaskRandom46_41,
+  kMaskRandom46_42,
+  kMaskRandom46_43,
+  kMaskRandom46_44,
+  kMaskRandom46_45,
+  kMaskRandom46_46
+};
+
+const uint8_t* kPacketMaskRandom47[47] = {
+  kMaskRandom47_1,
+  kMaskRandom47_2,
+  kMaskRandom47_3,
+  kMaskRandom47_4,
+  kMaskRandom47_5,
+  kMaskRandom47_6,
+  kMaskRandom47_7,
+  kMaskRandom47_8,
+  kMaskRandom47_9,
+  kMaskRandom47_10,
+  kMaskRandom47_11,
+  kMaskRandom47_12,
+  kMaskRandom47_13,
+  kMaskRandom47_14,
+  kMaskRandom47_15,
+  kMaskRandom47_16,
+  kMaskRandom47_17,
+  kMaskRandom47_18,
+  kMaskRandom47_19,
+  kMaskRandom47_20,
+  kMaskRandom47_21,
+  kMaskRandom47_22,
+  kMaskRandom47_23,
+  kMaskRandom47_24,
+  kMaskRandom47_25,
+  kMaskRandom47_26,
+  kMaskRandom47_27,
+  kMaskRandom47_28,
+  kMaskRandom47_29,
+  kMaskRandom47_30,
+  kMaskRandom47_31,
+  kMaskRandom47_32,
+  kMaskRandom47_33,
+  kMaskRandom47_34,
+  kMaskRandom47_35,
+  kMaskRandom47_36,
+  kMaskRandom47_37,
+  kMaskRandom47_38,
+  kMaskRandom47_39,
+  kMaskRandom47_40,
+  kMaskRandom47_41,
+  kMaskRandom47_42,
+  kMaskRandom47_43,
+  kMaskRandom47_44,
+  kMaskRandom47_45,
+  kMaskRandom47_46,
+  kMaskRandom47_47
+};
+
+const uint8_t* kPacketMaskRandom48[48] = {
+  kMaskRandom48_1,
+  kMaskRandom48_2,
+  kMaskRandom48_3,
+  kMaskRandom48_4,
+  kMaskRandom48_5,
+  kMaskRandom48_6,
+  kMaskRandom48_7,
+  kMaskRandom48_8,
+  kMaskRandom48_9,
+  kMaskRandom48_10,
+  kMaskRandom48_11,
+  kMaskRandom48_12,
+  kMaskRandom48_13,
+  kMaskRandom48_14,
+  kMaskRandom48_15,
+  kMaskRandom48_16,
+  kMaskRandom48_17,
+  kMaskRandom48_18,
+  kMaskRandom48_19,
+  kMaskRandom48_20,
+  kMaskRandom48_21,
+  kMaskRandom48_22,
+  kMaskRandom48_23,
+  kMaskRandom48_24,
+  kMaskRandom48_25,
+  kMaskRandom48_26,
+  kMaskRandom48_27,
+  kMaskRandom48_28,
+  kMaskRandom48_29,
+  kMaskRandom48_30,
+  kMaskRandom48_31,
+  kMaskRandom48_32,
+  kMaskRandom48_33,
+  kMaskRandom48_34,
+  kMaskRandom48_35,
+  kMaskRandom48_36,
+  kMaskRandom48_37,
+  kMaskRandom48_38,
+  kMaskRandom48_39,
+  kMaskRandom48_40,
+  kMaskRandom48_41,
+  kMaskRandom48_42,
+  kMaskRandom48_43,
+  kMaskRandom48_44,
+  kMaskRandom48_45,
+  kMaskRandom48_46,
+  kMaskRandom48_47,
+  kMaskRandom48_48
+};
+
+const uint8_t** kPacketMaskRandomTbl[48] = {
+  kPacketMaskRandom1,
+  kPacketMaskRandom2,
+  kPacketMaskRandom3,
+  kPacketMaskRandom4,
+  kPacketMaskRandom5,
+  kPacketMaskRandom6,
+  kPacketMaskRandom7,
+  kPacketMaskRandom8,
+  kPacketMaskRandom9,
+  kPacketMaskRandom10,
+  kPacketMaskRandom11,
+  kPacketMaskRandom12,
+  kPacketMaskRandom13,
+  kPacketMaskRandom14,
+  kPacketMaskRandom15,
+  kPacketMaskRandom16,
+  kPacketMaskRandom17,
+  kPacketMaskRandom18,
+  kPacketMaskRandom19,
+  kPacketMaskRandom20,
+  kPacketMaskRandom21,
+  kPacketMaskRandom22,
+  kPacketMaskRandom23,
+  kPacketMaskRandom24,
+  kPacketMaskRandom25,
+  kPacketMaskRandom26,
+  kPacketMaskRandom27,
+  kPacketMaskRandom28,
+  kPacketMaskRandom29,
+  kPacketMaskRandom30,
+  kPacketMaskRandom31,
+  kPacketMaskRandom32,
+  kPacketMaskRandom33,
+  kPacketMaskRandom34,
+  kPacketMaskRandom35,
+  kPacketMaskRandom36,
+  kPacketMaskRandom37,
+  kPacketMaskRandom38,
+  kPacketMaskRandom39,
+  kPacketMaskRandom40,
+  kPacketMaskRandom41,
+  kPacketMaskRandom42,
+  kPacketMaskRandom43,
+  kPacketMaskRandom44,
+  kPacketMaskRandom45,
+  kPacketMaskRandom46,
+  kPacketMaskRandom47,
+  kPacketMaskRandom48
+};
+
+}  // namespace
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_PRIVATE_TABLES_H_
+
diff --git a/src/modules/rtp_rtcp/source/fec_test_helper.cc b/src/modules/rtp_rtcp/source/fec_test_helper.cc
new file mode 100644
index 0000000..1fbadb8
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/fec_test_helper.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+
+FrameGenerator::FrameGenerator()
+    : num_packets_(0),
+      seq_num_(0),
+      timestamp_(0) {}
+
+void FrameGenerator::NewFrame(int num_packets) {
+  num_packets_ = num_packets;
+  timestamp_ += 3000;
+}
+
+uint16_t FrameGenerator::NextSeqNum() {
+  return ++seq_num_;
+}
+
+RtpPacket* FrameGenerator::NextPacket(int offset, size_t length) {
+  RtpPacket* rtp_packet = new RtpPacket;
+  for (size_t i = 0; i < length; ++i)
+    rtp_packet->data[i + kRtpHeaderSize] = offset + i;
+  rtp_packet->length = length + kRtpHeaderSize;
+  memset(&rtp_packet->header, 0, sizeof(WebRtcRTPHeader));
+  rtp_packet->header.frameType = kVideoFrameDelta;
+  rtp_packet->header.header.headerLength = kRtpHeaderSize;
+  rtp_packet->header.header.markerBit = (num_packets_ == 1);
+  rtp_packet->header.header.sequenceNumber = seq_num_;
+  rtp_packet->header.header.timestamp = timestamp_;
+  rtp_packet->header.header.payloadType = kVp8PayloadType;
+  BuildRtpHeader(rtp_packet->data, &rtp_packet->header.header);
+  ++seq_num_;
+  --num_packets_;
+  return rtp_packet;
+}
+
+// Creates a new RtpPacket with the RED header added to the packet.
+RtpPacket* FrameGenerator::BuildMediaRedPacket(const RtpPacket* packet) {
+  const int kHeaderLength = packet->header.header.headerLength;
+  RtpPacket* red_packet = new RtpPacket;
+  red_packet->header = packet->header;
+  red_packet->length = packet->length + 1;  // 1 byte RED header.
+  memset(red_packet->data, 0, red_packet->length);
+  // Copy RTP header.
+  memcpy(red_packet->data, packet->data, kHeaderLength);
+  SetRedHeader(red_packet, red_packet->data[1] & 0x7f, kHeaderLength);
+  memcpy(red_packet->data + kHeaderLength + 1, packet->data + kHeaderLength,
+         packet->length - kHeaderLength);
+  return red_packet;
+}
+
+// Creates a new RtpPacket with FEC payload and red header. Does this by
+// creating a new fake media RtpPacket, clears the marker bit and adds a RED
+// header. Finally replaces the payload with the content of |packet->data|.
+RtpPacket* FrameGenerator::BuildFecRedPacket(const Packet* packet) {
+  // Create a fake media packet to get a correct header. 1 byte RED header.
+  ++num_packets_;
+  RtpPacket* red_packet = NextPacket(0, packet->length + 1);
+  red_packet->data[1] &= ~0x80;  // Clear marker bit.
+  const int kHeaderLength = red_packet->header.header.headerLength;
+  SetRedHeader(red_packet, kFecPayloadType, kHeaderLength);
+  memcpy(red_packet->data + kHeaderLength + 1, packet->data,
+         packet->length);
+  red_packet->length = kHeaderLength + 1 + packet->length;
+  return red_packet;
+}
+
+void FrameGenerator::SetRedHeader(Packet* red_packet, uint8_t payload_type,
+                                  int header_length) const {
+  // Replace pltype.
+  red_packet->data[1] &= 0x80;  // Reset.
+  red_packet->data[1] += kRedPayloadType;  // Replace.
+
+  // Add RED header, f-bit always 0.
+  red_packet->data[header_length] = payload_type;
+}
+
+void FrameGenerator::BuildRtpHeader(uint8_t* data, const RTPHeader* header) {
+  data[0] = 0x80;  // Version 2.
+  data[1] = header->payloadType;
+  data[1] |= (header->markerBit ? kRtpMarkerBitMask : 0);
+  ModuleRTPUtility::AssignUWord16ToBuffer(data+2, header->sequenceNumber);
+  ModuleRTPUtility::AssignUWord32ToBuffer(data+4, header->timestamp);
+  ModuleRTPUtility::AssignUWord32ToBuffer(data+8, header->ssrc);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/fec_test_helper.h b/src/modules/rtp_rtcp/source/fec_test_helper.h
new file mode 100644
index 0000000..4a037c7
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/fec_test_helper.h
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+namespace webrtc {
+
+enum { kRtpHeaderSize = 12 };
+enum { kFecPayloadType = 96 };
+enum { kRedPayloadType = 97 };
+enum { kVp8PayloadType = 120 };
+
+typedef ForwardErrorCorrection::Packet Packet;
+
+struct RtpPacket : public Packet {
+  WebRtcRTPHeader header;
+};
+
+class FrameGenerator {
+ public:
+  FrameGenerator();
+
+  void NewFrame(int num_packets);
+
+  uint16_t NextSeqNum();
+
+  RtpPacket* NextPacket(int offset, size_t length);
+
+  // Creates a new RtpPacket with the RED header added to the packet.
+  RtpPacket* BuildMediaRedPacket(const RtpPacket* packet);
+
+  // Creates a new RtpPacket with FEC payload and red header. Does this by
+  // creating a new fake media RtpPacket, clears the marker bit and adds a RED
+  // header. Finally replaces the payload with the content of |packet->data|.
+  RtpPacket* BuildFecRedPacket(const Packet* packet);
+
+  void SetRedHeader(Packet* red_packet, uint8_t payload_type,
+                    int header_length) const;
+
+ private:
+  static void BuildRtpHeader(uint8_t* data, const RTPHeader* header);
+
+  int num_packets_;
+  uint16_t seq_num_;
+  uint32_t timestamp_;
+};
+}
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_FEC_TEST_HELPER_H_
diff --git a/src/modules/rtp_rtcp/source/forward_error_correction.cc b/src/modules/rtp_rtcp/source/forward_error_correction.cc
new file mode 100644
index 0000000..bdad224
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -0,0 +1,844 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+#include <algorithm>
+#include <cassert>
+#include <cstring>
+#include <iterator>
+
+#include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
+// FEC header size in bytes.
+const uint8_t kFecHeaderSize = 10;
+
+// ULP header size in bytes (L bit is set).
+const uint8_t kUlpHeaderSizeLBitSet = (2 + kMaskSizeLBitSet);
+
+// ULP header size in bytes (L bit is cleared).
+const uint8_t kUlpHeaderSizeLBitClear = (2 + kMaskSizeLBitClear);
+
+// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
+const uint8_t kTransportOverhead = 28;
+
+enum { kMaxFecPackets = ForwardErrorCorrection::kMaxMediaPackets };
+
+// Used to link media packets to their protecting FEC packets.
+//
+// TODO(holmer): Refactor into a proper class.
+class ProtectedPacket : public ForwardErrorCorrection::SortablePacket {
+ public:
+  scoped_refptr<ForwardErrorCorrection::Packet> pkt;
+};
+
+typedef std::list<ProtectedPacket*> ProtectedPacketList;
+
+//
+// Used for internal storage of FEC packets in a list.
+//
+// TODO(holmer): Refactor into a proper class.
+class FecPacket : public ForwardErrorCorrection::SortablePacket {
+ public:
+    ProtectedPacketList protectedPktList;
+    uint32_t ssrc;  // SSRC of the current frame.
+    scoped_refptr<ForwardErrorCorrection::Packet> pkt;
+};
+
+bool ForwardErrorCorrection::SortablePacket::LessThan(
+    const SortablePacket* first,
+    const SortablePacket* second) {
+  return (first->seqNum != second->seqNum &&
+      LatestSequenceNumber(first->seqNum, second->seqNum) == second->seqNum);
+}
+
+ForwardErrorCorrection::ForwardErrorCorrection(int32_t id)
+    : _id(id),
+      _generatedFecPackets(kMaxMediaPackets),
+      _fecPacketReceived(false) {
+}
+
+ForwardErrorCorrection::~ForwardErrorCorrection() {
+}
+
+// Input packet
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                    RTP Header (12 octets)                     |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                         RTP Payload                           |
+//   |                                                               |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+// Output packet
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                    FEC Header (10 octets)                     |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                      FEC Level 0 Header                       |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   |                     FEC Level 0 Payload                       |
+//   |                                                               |
+//   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+int32_t ForwardErrorCorrection::GenerateFEC(
+    const PacketList& mediaPacketList,
+    uint8_t protectionFactor,
+    int numImportantPackets,
+    bool useUnequalProtection,
+    FecMaskType fec_mask_type,
+    PacketList* fecPacketList) {
+  if (mediaPacketList.empty()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s media packet list is empty", __FUNCTION__);
+    return -1;
+  }
+  if (!fecPacketList->empty()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s FEC packet list is not empty", __FUNCTION__);
+    return -1;
+  }
+  const uint16_t numMediaPackets = mediaPacketList.size();
+  bool lBit = (numMediaPackets > 8 * kMaskSizeLBitClear);
+  int numMaskBytes = lBit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+  if (numMediaPackets > kMaxMediaPackets) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s can only protect %d media packets per frame; %d requested",
+                 __FUNCTION__, kMaxMediaPackets, numMediaPackets);
+    return -1;
+  }
+
+  // Error checking on the number of important packets.
+  // Can't have more important packets than media packets.
+  if (numImportantPackets > numMediaPackets) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+        "Number of important packets (%d) greater than number of media "
+        "packets (%d)", numImportantPackets, numMediaPackets);
+    return -1;
+  }
+  if (numImportantPackets < 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "Number of important packets (%d) less than zero",
+                 numImportantPackets);
+    return -1;
+  }
+  // Do some error checking on the media packets.
+  PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+  while (mediaListIt != mediaPacketList.end()) {
+    Packet* mediaPacket = *mediaListIt;
+    assert(mediaPacket);
+
+    if (mediaPacket->length < kRtpHeaderSize) {
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                   "%s media packet (%d bytes) is smaller than RTP header",
+                   __FUNCTION__, mediaPacket->length);
+      return -1;
+    }
+
+    // Ensure our FEC packets will fit in a typical MTU.
+    if (mediaPacket->length + PacketOverhead() + kTransportOverhead >
+        IP_PACKET_SIZE) {
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+          "%s media packet (%d bytes) with overhead is larger than MTU(%d)",
+          __FUNCTION__, mediaPacket->length, IP_PACKET_SIZE);
+      return -1;
+    }
+    mediaListIt++;
+  }
+
+  int numFecPackets = GetNumberOfFecPackets(numMediaPackets, protectionFactor);
+  if (numFecPackets == 0) {
+    return 0;
+  }
+
+  // Prepare FEC packets by setting them to 0.
+  for (int i = 0; i < numFecPackets; i++) {
+    memset(_generatedFecPackets[i].data, 0, IP_PACKET_SIZE);
+    _generatedFecPackets[i].length = 0;  // Use this as a marker for untouched
+    // packets.
+    fecPacketList->push_back(&_generatedFecPackets[i]);
+  }
+
+  const internal::PacketMaskTable mask_table(fec_mask_type, numMediaPackets);
+
+  // -- Generate packet masks --
+  // Always allocate space for a large mask.
+  uint8_t* packetMask = new uint8_t[numFecPackets * kMaskSizeLBitSet];
+  memset(packetMask, 0, numFecPackets * numMaskBytes);
+  internal::GeneratePacketMasks(numMediaPackets, numFecPackets,
+                                numImportantPackets, useUnequalProtection,
+                                mask_table, packetMask);
+
+  int numMaskBits = InsertZerosInBitMasks(mediaPacketList, packetMask,
+                                          numMaskBytes, numFecPackets);
+
+  lBit = (numMaskBits > 8 * kMaskSizeLBitClear);
+
+  if (numMaskBits < 0) {
+    delete [] packetMask;
+    return -1;
+  }
+  if (lBit) {
+    numMaskBytes = kMaskSizeLBitSet;
+  }
+
+  GenerateFecBitStrings(mediaPacketList, packetMask, numFecPackets, lBit);
+  GenerateFecUlpHeaders(mediaPacketList, packetMask, lBit, numFecPackets);
+
+  delete [] packetMask;
+  return 0;
+}
+
+int ForwardErrorCorrection::GetNumberOfFecPackets(int numMediaPackets,
+                                                  int protectionFactor) {
+  // Result in Q0 with an unsigned round.
+  int numFecPackets = (numMediaPackets * protectionFactor + (1 << 7)) >> 8;
+  // Generate at least one FEC packet if we need protection.
+  if (protectionFactor > 0 && numFecPackets == 0) {
+    numFecPackets = 1;
+  }
+  assert(numFecPackets <= numMediaPackets);
+  return numFecPackets;
+}
+
+void ForwardErrorCorrection::GenerateFecBitStrings(
+    const PacketList& mediaPacketList,
+    uint8_t* packetMask,
+    int numFecPackets,
+    bool lBit) {
+  if (mediaPacketList.empty()) {
+    return;
+  }
+  uint8_t mediaPayloadLength[2];
+  const int numMaskBytes = lBit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+  const uint16_t ulpHeaderSize = lBit ?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
+  const uint16_t fecRtpOffset = kFecHeaderSize + ulpHeaderSize - kRtpHeaderSize;
+
+  for (int i = 0; i < numFecPackets; i++) {
+    PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+    uint32_t pktMaskIdx = i * numMaskBytes;
+    uint32_t mediaPktIdx = 0;
+    uint16_t fecPacketLength = 0;
+    uint16_t prevSeqNum = ParseSequenceNumber((*mediaListIt)->data);
+    while (mediaListIt != mediaPacketList.end()) {
+      // Each FEC packet has a multiple byte mask.
+      if (packetMask[pktMaskIdx] & (1 << (7 - mediaPktIdx))) {
+        Packet* mediaPacket = *mediaListIt;
+
+        // Assign network-ordered media payload length.
+        ModuleRTPUtility::AssignUWord16ToBuffer(
+            mediaPayloadLength,
+            mediaPacket->length - kRtpHeaderSize);
+
+        fecPacketLength = mediaPacket->length + fecRtpOffset;
+        // On the first protected packet, we don't need to XOR.
+        if (_generatedFecPackets[i].length == 0) {
+          // Copy the first 2 bytes of the RTP header.
+          memcpy(_generatedFecPackets[i].data, mediaPacket->data, 2);
+          // Copy the 5th to 8th bytes of the RTP header.
+          memcpy(&_generatedFecPackets[i].data[4], &mediaPacket->data[4], 4);
+          // Copy network-ordered payload size.
+          memcpy(&_generatedFecPackets[i].data[8], mediaPayloadLength, 2);
+
+          // Copy RTP payload, leaving room for the ULP header.
+          memcpy(&_generatedFecPackets[i].data[kFecHeaderSize + ulpHeaderSize],
+                 &mediaPacket->data[kRtpHeaderSize],
+                 mediaPacket->length - kRtpHeaderSize);
+        } else {
+          // XOR with the first 2 bytes of the RTP header.
+          _generatedFecPackets[i].data[0] ^= mediaPacket->data[0];
+          _generatedFecPackets[i].data[1] ^= mediaPacket->data[1];
+
+          // XOR with the 5th to 8th bytes of the RTP header.
+          for (uint32_t j = 4; j < 8; j++) {
+            _generatedFecPackets[i].data[j] ^= mediaPacket->data[j];
+          }
+
+          // XOR with the network-ordered payload size.
+          _generatedFecPackets[i].data[8] ^= mediaPayloadLength[0];
+          _generatedFecPackets[i].data[9] ^= mediaPayloadLength[1];
+
+          // XOR with RTP payload, leaving room for the ULP header.
+          for (int32_t j = kFecHeaderSize + ulpHeaderSize;
+              j < fecPacketLength; j++) {
+            _generatedFecPackets[i].data[j] ^=
+                mediaPacket->data[j - fecRtpOffset];
+          }
+        }
+        if (fecPacketLength > _generatedFecPackets[i].length) {
+          _generatedFecPackets[i].length = fecPacketLength;
+        }
+      }
+      mediaListIt++;
+      if (mediaListIt != mediaPacketList.end()) {
+        uint16_t seqNum = ParseSequenceNumber((*mediaListIt)->data);
+        mediaPktIdx += static_cast<uint16_t>(seqNum - prevSeqNum);
+        prevSeqNum = seqNum;
+      }
+      if (mediaPktIdx == 8) {
+        // Switch to the next mask byte.
+        mediaPktIdx = 0;
+        pktMaskIdx++;
+      }
+    }
+    assert(_generatedFecPackets[i].length);
+    //Note: This shouldn't happen: means packet mask is wrong or poorly designed
+  }
+}
+
+int ForwardErrorCorrection::InsertZerosInBitMasks(
+    const PacketList& media_packets,
+    uint8_t* packet_mask,
+    int num_mask_bytes,
+    int num_fec_packets) {
+  uint8_t* new_mask = NULL;
+  if (media_packets.size() <= 1) {
+    return media_packets.size();
+  }
+  int last_seq_num = ParseSequenceNumber(media_packets.back()->data);
+  int first_seq_num = ParseSequenceNumber(media_packets.front()->data);
+  int total_missing_seq_nums = static_cast<uint16_t>(last_seq_num -
+                                                     first_seq_num) -
+                                                     media_packets.size() + 1;
+  if (total_missing_seq_nums == 0) {
+    // All sequence numbers are covered by the packet mask. No zero insertion
+    // required.
+    return media_packets.size();
+  }
+  // Allocate the new mask.
+  int new_mask_bytes = kMaskSizeLBitClear;
+  if (media_packets.size() + total_missing_seq_nums > 8 * kMaskSizeLBitClear) {
+    new_mask_bytes = kMaskSizeLBitSet;
+  }
+  new_mask = new uint8_t[num_fec_packets * kMaskSizeLBitSet];
+  memset(new_mask, 0, num_fec_packets * kMaskSizeLBitSet);
+
+  PacketList::const_iterator it = media_packets.begin();
+  uint16_t prev_seq_num = first_seq_num;
+  ++it;
+
+  // Insert the first column.
+  CopyColumn(new_mask, new_mask_bytes, packet_mask, num_mask_bytes,
+             num_fec_packets, 0, 0);
+  int new_bit_index = 1;
+  int old_bit_index = 1;
+  // Insert zeros in the bit mask for every hole in the sequence.
+  for (; it != media_packets.end(); ++it) {
+    if (new_bit_index == 8 * kMaskSizeLBitSet) {
+      // We can only cover up to 48 packets.
+      break;
+    }
+    uint16_t seq_num = ParseSequenceNumber((*it)->data);
+    const int zeros_to_insert =
+        static_cast<uint16_t>(seq_num - prev_seq_num - 1);
+    if (zeros_to_insert > 0) {
+      InsertZeroColumns(zeros_to_insert, new_mask, new_mask_bytes,
+                        num_fec_packets, new_bit_index);
+    }
+    new_bit_index += zeros_to_insert;
+    CopyColumn(new_mask, new_mask_bytes, packet_mask, num_mask_bytes,
+               num_fec_packets, new_bit_index, old_bit_index);
+    ++new_bit_index;
+    ++old_bit_index;
+    prev_seq_num = seq_num;
+  }
+  if (new_bit_index % 8 != 0) {
+    // We didn't fill the last byte. Shift bits to correct position.
+    for (uint16_t row = 0; row < num_fec_packets; ++row) {
+      int new_byte_index = row * new_mask_bytes + new_bit_index / 8;
+      new_mask[new_byte_index] <<= (7 - (new_bit_index % 8));
+    }
+  }
+  // Replace the old mask with the new.
+  memcpy(packet_mask, new_mask, kMaskSizeLBitSet * num_fec_packets);
+  delete [] new_mask;
+  return new_bit_index;
+}
+
+void ForwardErrorCorrection::InsertZeroColumns(int num_zeros,
+                                               uint8_t* new_mask,
+                                               int new_mask_bytes,
+                                               int num_fec_packets,
+                                               int new_bit_index) {
+  for (uint16_t row = 0; row < num_fec_packets; ++row) {
+    const int new_byte_index = row * new_mask_bytes + new_bit_index / 8;
+    const int max_shifts = (7 - (new_bit_index % 8));
+    new_mask[new_byte_index] <<= std::min(num_zeros, max_shifts);
+  }
+}
+
+void ForwardErrorCorrection::CopyColumn(uint8_t* new_mask,
+                                        int new_mask_bytes,
+                                        uint8_t* old_mask,
+                                        int old_mask_bytes,
+                                        int num_fec_packets,
+                                        int new_bit_index,
+                                        int old_bit_index) {
+  // Copy column from the old mask to the beginning of the new mask and shift it
+  // out from the old mask.
+  for (uint16_t row = 0; row < num_fec_packets; ++row) {
+    int new_byte_index = row * new_mask_bytes + new_bit_index / 8;
+    int old_byte_index = row * old_mask_bytes + old_bit_index / 8;
+    new_mask[new_byte_index] |= ((old_mask[old_byte_index] & 0x80) >> 7);
+    if (new_bit_index % 8 != 7) {
+      new_mask[new_byte_index] <<= 1;
+    }
+    old_mask[old_byte_index] <<= 1;
+  }
+}
+
+void ForwardErrorCorrection::GenerateFecUlpHeaders(
+    const PacketList& mediaPacketList,
+    uint8_t* packetMask,
+    bool lBit,
+    int numFecPackets) {
+  // -- Generate FEC and ULP headers --
+  //
+  // FEC Header, 10 bytes
+  //    0                   1                   2                   3
+  //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |E|L|P|X|  CC   |M| PT recovery |            SN base            |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |                          TS recovery                          |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |        length recovery        |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //
+  // ULP Header, 4 bytes (for L = 0)
+  //    0                   1                   2                   3
+  //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |       Protection Length       |             mask              |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  //   |              mask cont. (present only when L = 1)             |
+  //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+  PacketList::const_iterator mediaListIt = mediaPacketList.begin();
+  Packet* mediaPacket = *mediaListIt;
+  assert(mediaPacket != NULL);
+  int numMaskBytes = lBit ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+  const uint16_t ulpHeaderSize = lBit ?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;
+
+  for (int i = 0; i < numFecPackets; i++) {
+    // -- FEC header --
+    _generatedFecPackets[i].data[0] &= 0x7f; // Set E to zero.
+    if (lBit == 0) {
+      _generatedFecPackets[i].data[0] &= 0xbf; // Clear the L bit.
+    } else {
+      _generatedFecPackets[i].data[0] |= 0x40; // Set the L bit.
+    }
+    // Two byte sequence number from first RTP packet to SN base.
+    // We use the same sequence number base for every FEC packet,
+    // but that's not required in general.
+    memcpy(&_generatedFecPackets[i].data[2], &mediaPacket->data[2], 2);
+
+    // -- ULP header --
+    // Copy the payload size to the protection length field.
+    // (We protect the entire packet.)
+    ModuleRTPUtility::AssignUWord16ToBuffer(&_generatedFecPackets[i].data[10],
+        _generatedFecPackets[i].length - kFecHeaderSize - ulpHeaderSize);
+
+    // Copy the packet mask.
+    memcpy(&_generatedFecPackets[i].data[12], &packetMask[i * numMaskBytes],
+           numMaskBytes);
+  }
+}
+
+void ForwardErrorCorrection::ResetState(
+    RecoveredPacketList* recoveredPacketList) {
+  _fecPacketReceived = false;
+
+  // Free the memory for any existing recovered packets, if the user hasn't.
+  while (!recoveredPacketList->empty()) {
+    delete recoveredPacketList->front();
+    recoveredPacketList->pop_front();
+  }
+  assert(recoveredPacketList->empty());
+
+  // Free the FEC packet list.
+  while (!_fecPacketList.empty()) {
+    FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+    FecPacket* fecPacket = *fecPacketListIt;
+    ProtectedPacketList::iterator protectedPacketListIt;
+    protectedPacketListIt = fecPacket->protectedPktList.begin();
+    while (protectedPacketListIt != fecPacket->protectedPktList.end()) {
+      delete *protectedPacketListIt;
+      protectedPacketListIt =
+          fecPacket->protectedPktList.erase(protectedPacketListIt);
+    }
+    assert(fecPacket->protectedPktList.empty());
+    delete fecPacket;
+    _fecPacketList.pop_front();
+  }
+  assert(_fecPacketList.empty());
+}
+
+void ForwardErrorCorrection::InsertMediaPacket(
+    ReceivedPacket* rxPacket,
+    RecoveredPacketList* recoveredPacketList) {
+  RecoveredPacketList::iterator recoveredPacketListIt =
+      recoveredPacketList->begin();
+
+  // Search for duplicate packets.
+  while (recoveredPacketListIt != recoveredPacketList->end()) {
+    if (rxPacket->seqNum == (*recoveredPacketListIt)->seqNum) {
+      // Duplicate packet, no need to add to list.
+      // Delete duplicate media packet data.
+      rxPacket->pkt = NULL;
+      return;
+    }
+    recoveredPacketListIt++;
+  }
+  RecoveredPacket* recoverdPacketToInsert = new RecoveredPacket;
+  recoverdPacketToInsert->wasRecovered = false;
+  // Inserted Media packet is already sent to VCM.
+  recoverdPacketToInsert->returned = true;
+  recoverdPacketToInsert->seqNum = rxPacket->seqNum;
+  recoverdPacketToInsert->pkt = rxPacket->pkt;
+  recoverdPacketToInsert->pkt->length = rxPacket->pkt->length;
+
+  // TODO(holmer): Consider replacing this with a binary search for the right
+  // position, and then just insert the new packet. Would get rid of the sort.
+  recoveredPacketList->push_back(recoverdPacketToInsert);
+  recoveredPacketList->sort(SortablePacket::LessThan);
+  UpdateCoveringFECPackets(recoverdPacketToInsert);
+}
+
+void ForwardErrorCorrection::UpdateCoveringFECPackets(RecoveredPacket* packet) {
+  for (FecPacketList::iterator it = _fecPacketList.begin();
+      it != _fecPacketList.end(); ++it) {
+    // Is this FEC packet protecting the media packet |packet|?
+    ProtectedPacketList::iterator protected_it = std::lower_bound(
+        (*it)->protectedPktList.begin(),
+        (*it)->protectedPktList.end(),
+        packet,
+        SortablePacket::LessThan);
+    if (protected_it != (*it)->protectedPktList.end() &&
+        (*protected_it)->seqNum == packet->seqNum) {
+      // Found an FEC packet which is protecting |packet|.
+      (*protected_it)->pkt = packet->pkt;
+    }
+  }
+}
+
+void ForwardErrorCorrection::InsertFECPacket(
+    ReceivedPacket* rxPacket,
+    const RecoveredPacketList* recoveredPacketList) {
+  _fecPacketReceived = true;
+
+  // Check for duplicate.
+  FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+  while (fecPacketListIt != _fecPacketList.end()) {
+    if (rxPacket->seqNum == (*fecPacketListIt)->seqNum) {
+      // Delete duplicate FEC packet data.
+      rxPacket->pkt = NULL;
+      return;
+    }
+    fecPacketListIt++;
+  }
+  FecPacket* fecPacket = new FecPacket;
+  fecPacket->pkt = rxPacket->pkt;
+  fecPacket->seqNum = rxPacket->seqNum;
+  fecPacket->ssrc = rxPacket->ssrc;
+
+  const uint16_t seqNumBase = ModuleRTPUtility::BufferToUWord16(
+      &fecPacket->pkt->data[2]);
+  const uint16_t maskSizeBytes = (fecPacket->pkt->data[0] & 0x40) ?
+      kMaskSizeLBitSet : kMaskSizeLBitClear;  // L bit set?
+
+  for (uint16_t byteIdx = 0; byteIdx < maskSizeBytes; byteIdx++) {
+    uint8_t packetMask = fecPacket->pkt->data[12 + byteIdx];
+    for (uint16_t bitIdx = 0; bitIdx < 8; bitIdx++) {
+      if (packetMask & (1 << (7 - bitIdx))) {
+        ProtectedPacket* protectedPacket = new ProtectedPacket;
+        fecPacket->protectedPktList.push_back(protectedPacket);
+        // This wraps naturally with the sequence number.
+        protectedPacket->seqNum = static_cast<uint16_t>(seqNumBase +
+            (byteIdx << 3) + bitIdx);
+        protectedPacket->pkt = NULL;
+      }
+    }
+  }
+  if (fecPacket->protectedPktList.empty()) {
+    // All-zero packet mask; we can discard this FEC packet.
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "FEC packet %u has an all-zero packet mask.",
+                 fecPacket->seqNum, __FUNCTION__);
+    delete fecPacket;
+  } else {
+    AssignRecoveredPackets(fecPacket,
+                           recoveredPacketList);
+    // TODO(holmer): Consider replacing this with a binary search for the right
+    // position, and then just insert the new packet. Would get rid of the sort.
+    _fecPacketList.push_back(fecPacket);
+    _fecPacketList.sort(SortablePacket::LessThan);
+    if (_fecPacketList.size() > kMaxFecPackets) {
+      DiscardFECPacket(_fecPacketList.front());
+      _fecPacketList.pop_front();
+    }
+    assert(_fecPacketList.size() <= kMaxFecPackets);
+  }
+}
+
+void ForwardErrorCorrection::AssignRecoveredPackets(
+    FecPacket* fec_packet,
+    const RecoveredPacketList* recovered_packets) {
+  // Search for missing packets which have arrived or have been recovered by
+  // another FEC packet.
+  ProtectedPacketList* not_recovered = &fec_packet->protectedPktList;
+  RecoveredPacketList already_recovered;
+  std::set_intersection(
+      recovered_packets->begin(), recovered_packets->end(),
+      not_recovered->begin(), not_recovered->end(),
+      std::inserter(already_recovered, already_recovered.end()),
+      SortablePacket::LessThan);
+  // Set the FEC pointers to all recovered packets so that we don't have to
+  // search for them when we are doing recovery.
+  ProtectedPacketList::iterator not_recovered_it = not_recovered->begin();
+  for (RecoveredPacketList::iterator it = already_recovered.begin();
+      it != already_recovered.end(); ++it) {
+    // Search for the next recovered packet in |not_recovered|.
+    while ((*not_recovered_it)->seqNum != (*it)->seqNum)
+      ++not_recovered_it;
+    (*not_recovered_it)->pkt = (*it)->pkt;
+  }
+}
+
+void ForwardErrorCorrection::InsertPackets(
+    ReceivedPacketList* receivedPacketList,
+    RecoveredPacketList* recoveredPacketList) {
+
+  while (!receivedPacketList->empty()) {
+    ReceivedPacket* rxPacket = receivedPacketList->front();
+
+    if (rxPacket->isFec) {
+      InsertFECPacket(rxPacket, recoveredPacketList);
+    } else {
+      // Insert packet at the end of |recoveredPacketList|.
+      InsertMediaPacket(rxPacket, recoveredPacketList);
+    }
+    // Delete the received packet "wrapper", but not the packet data.
+    delete rxPacket;
+    receivedPacketList->pop_front();
+  }
+  assert(receivedPacketList->empty());
+  DiscardOldPackets(recoveredPacketList);
+}
+
+void ForwardErrorCorrection::InitRecovery(
+    const FecPacket* fec_packet,
+    RecoveredPacket* recovered) {
+  // This is the first packet which we try to recover with.
+  const uint16_t ulpHeaderSize = fec_packet->pkt->data[0] & 0x40 ?
+      kUlpHeaderSizeLBitSet : kUlpHeaderSizeLBitClear;  // L bit set?
+  recovered->pkt = new Packet;
+  memset(recovered->pkt->data, 0, IP_PACKET_SIZE);
+  recovered->returned = false;
+  recovered->wasRecovered = true;
+  uint8_t protectionLength[2];
+  // Copy the protection length from the ULP header.
+  memcpy(protectionLength, &fec_packet->pkt->data[10], 2);
+  // Copy FEC payload, skipping the ULP header.
+  memcpy(&recovered->pkt->data[kRtpHeaderSize],
+         &fec_packet->pkt->data[kFecHeaderSize + ulpHeaderSize],
+         ModuleRTPUtility::BufferToUWord16(protectionLength));
+  // Copy the length recovery field.
+  memcpy(recovered->length_recovery, &fec_packet->pkt->data[8], 2);
+  // Copy the first 2 bytes of the FEC header.
+  memcpy(recovered->pkt->data, fec_packet->pkt->data, 2);
+  // Copy the 5th to 8th bytes of the FEC header.
+  memcpy(&recovered->pkt->data[4], &fec_packet->pkt->data[4], 4);
+  // Set the SSRC field.
+  ModuleRTPUtility::AssignUWord32ToBuffer(&recovered->pkt->data[8],
+                                          fec_packet->ssrc);
+}
+
+void ForwardErrorCorrection::FinishRecovery(RecoveredPacket* recovered) {
+  // Set the RTP version to 2.
+  recovered->pkt->data[0] |= 0x80;  // Set the 1st bit.
+  recovered->pkt->data[0] &= 0xbf;  // Clear the 2nd bit.
+
+  // Set the SN field.
+  ModuleRTPUtility::AssignUWord16ToBuffer(&recovered->pkt->data[2],
+                                          recovered->seqNum);
+  // Recover the packet length.
+  recovered->pkt->length = ModuleRTPUtility::BufferToUWord16(
+      recovered->length_recovery) + kRtpHeaderSize;
+}
+
+void ForwardErrorCorrection::XorPackets(const Packet* src_packet,
+                                        RecoveredPacket* dst_packet) {
+  // XOR with the first 2 bytes of the RTP header.
+  for (uint32_t i = 0; i < 2; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+  // XOR with the 5th to 8th bytes of the RTP header.
+  for (uint32_t i = 4; i < 8; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+  // XOR with the network-ordered payload size.
+  uint8_t mediaPayloadLength[2];
+  ModuleRTPUtility::AssignUWord16ToBuffer(
+      mediaPayloadLength,
+      src_packet->length - kRtpHeaderSize);
+  dst_packet->length_recovery[0] ^= mediaPayloadLength[0];
+  dst_packet->length_recovery[1] ^= mediaPayloadLength[1];
+
+  // XOR with RTP payload.
+  // TODO(marpan/ajm): Are we doing more XORs than required here?
+  for (int32_t i = kRtpHeaderSize; i < src_packet->length; i++) {
+    dst_packet->pkt->data[i] ^= src_packet->data[i];
+  }
+}
+
+void ForwardErrorCorrection::RecoverPacket(
+    const FecPacket* fecPacket,
+    RecoveredPacket* recPacketToInsert) {
+  InitRecovery(fecPacket, recPacketToInsert);
+  ProtectedPacketList::const_iterator protected_it =
+      fecPacket->protectedPktList.begin();
+  while (protected_it != fecPacket->protectedPktList.end()) {
+    if ((*protected_it)->pkt == NULL) {
+      // This is the packet we're recovering.
+      recPacketToInsert->seqNum = (*protected_it)->seqNum;
+    } else {
+      XorPackets((*protected_it)->pkt, recPacketToInsert);
+    }
+    ++protected_it;
+  }
+  FinishRecovery(recPacketToInsert);
+}
+
+void ForwardErrorCorrection::AttemptRecover(
+    RecoveredPacketList* recoveredPacketList) {
+  FecPacketList::iterator fecPacketListIt = _fecPacketList.begin();
+  while (fecPacketListIt != _fecPacketList.end()) {
+    // Search for each FEC packet's protected media packets.
+    int packets_missing = NumCoveredPacketsMissing(*fecPacketListIt);
+
+    // We can only recover one packet with an FEC packet.
+   if (packets_missing == 1) {
+      // Recovery possible.
+      RecoveredPacket* packetToInsert = new RecoveredPacket;
+      packetToInsert->pkt = NULL;
+      RecoverPacket(*fecPacketListIt, packetToInsert);
+
+      // Add recovered packet to the list of recovered packets and update any
+      // FEC packets covering this packet with a pointer to the data.
+      // TODO(holmer): Consider replacing this with a binary search for the
+      // right position, and then just insert the new packet. Would get rid of
+      // the sort.
+      recoveredPacketList->push_back(packetToInsert);
+      recoveredPacketList->sort(SortablePacket::LessThan);
+      UpdateCoveringFECPackets(packetToInsert);
+      DiscardOldPackets(recoveredPacketList);
+      DiscardFECPacket(*fecPacketListIt);
+      fecPacketListIt = _fecPacketList.erase(fecPacketListIt);
+
+      // A packet has been recovered. We need to check the FEC list again, as
+      // this may allow additional packets to be recovered.
+      // Restart for first FEC packet.
+      fecPacketListIt = _fecPacketList.begin();
+    } else if (packets_missing == 0) {
+        // Either all protected packets arrived or have been recovered. We can
+        // discard this FEC packet.
+        DiscardFECPacket(*fecPacketListIt);
+        fecPacketListIt = _fecPacketList.erase(fecPacketListIt);
+    } else {
+      fecPacketListIt++;
+    }
+  }
+}
+
+int ForwardErrorCorrection::NumCoveredPacketsMissing(
+    const FecPacket* fec_packet) {
+  int packets_missing = 0;
+  ProtectedPacketList::const_iterator it = fec_packet->protectedPktList.begin();
+  for (; it != fec_packet->protectedPktList.end(); ++it) {
+    if ((*it)->pkt == NULL) {
+      ++packets_missing;
+      if (packets_missing > 1) {
+        break;  // We can't recover more than one packet.
+      }
+    }
+  }
+  return packets_missing;
+}
+
+void ForwardErrorCorrection::DiscardFECPacket(FecPacket* fec_packet) {
+  while (!fec_packet->protectedPktList.empty()) {
+    delete fec_packet->protectedPktList.front();
+    fec_packet->protectedPktList.pop_front();
+  }
+  assert(fec_packet->protectedPktList.empty());
+  delete fec_packet;
+}
+
+void ForwardErrorCorrection::DiscardOldPackets(
+    RecoveredPacketList* recoveredPacketList) {
+  while (recoveredPacketList->size() > kMaxMediaPackets) {
+    ForwardErrorCorrection::RecoveredPacket* packet =
+        recoveredPacketList->front();
+    delete packet;
+    recoveredPacketList->pop_front();
+  }
+  assert(recoveredPacketList->size() <= kMaxMediaPackets);
+}
+
+uint16_t ForwardErrorCorrection::ParseSequenceNumber(uint8_t* packet) {
+  return (packet[2] << 8) + packet[3];
+}
+
+int32_t ForwardErrorCorrection::DecodeFEC(
+    ReceivedPacketList* receivedPacketList,
+    RecoveredPacketList* recoveredPacketList) {
+  // TODO(marpan/ajm): can we check for multiple ULP headers, and return an
+  // error?
+  if (recoveredPacketList->size() == kMaxMediaPackets) {
+    const unsigned int seq_num_diff = abs(
+        static_cast<int>(receivedPacketList->front()->seqNum)  -
+        static_cast<int>(recoveredPacketList->back()->seqNum));
+    if (seq_num_diff > kMaxMediaPackets) {
+      // A big gap in sequence numbers. The old recovered packets
+      // are now useless, so it's safe to do a reset.
+      ResetState(recoveredPacketList);
+    }
+  }
+  InsertPackets(receivedPacketList, recoveredPacketList);
+  AttemptRecover(recoveredPacketList);
+  return 0;
+}
+
+uint16_t ForwardErrorCorrection::PacketOverhead() {
+  return kFecHeaderSize + kUlpHeaderSizeLBitSet;
+}
+
+uint16_t ForwardErrorCorrection::LatestSequenceNumber(uint16_t first,
+                                                      uint16_t second) {
+  bool wrap = (first < 0x00ff && second > 0xff00) ||
+          (first > 0xff00 && second < 0x00ff);
+  if (second > first && !wrap)
+    return second;
+  else if (second <= first && !wrap)
+    return first;
+  else if (second < first && wrap)
+    return second;
+  else
+    return first;
+}
+
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/forward_error_correction.h b/src/modules/rtp_rtcp/source/forward_error_correction.h
new file mode 100644
index 0000000..7693331
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/forward_error_correction.h
@@ -0,0 +1,342 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
+
+#include <list>
+#include <vector>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "system_wrappers/interface/ref_count.h"
+#include "system_wrappers/interface/scoped_refptr.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Forward declaration.
+class FecPacket;
+
+/**
+ * Performs codec-independent forward error correction (FEC), based on RFC 5109.
+ * Option exists to enable unequal protection (UEP) across packets.
+ * This is not to be confused with protection within packets
+ * (referred to as uneven level protection (ULP) in RFC 5109).
+ */
+class ForwardErrorCorrection {
+ public:
+  // Maximum number of media packets we can protect
+  static const unsigned int kMaxMediaPackets = 48u;
+
+  // TODO(holmer): As a next step all these struct-like packet classes should be
+  // refactored into proper classes, and their members should be made private.
+  // This will require parts of the functionality in forward_error_correction.cc
+  // and receiver_fec.cc to be refactored into the packet classes.
+  class Packet {
+   public:
+    Packet() : length(0), data(), ref_count_(0) {}
+    virtual ~Packet() {}
+
+    // Add a reference.
+    virtual int32_t AddRef() {
+      return ++ref_count_;
+    }
+
+    // Release a reference. Will delete the object if the reference count
+    // reaches zero.
+    virtual int32_t Release() {
+      int32_t ref_count;
+      ref_count = --ref_count_;
+      if (ref_count == 0)
+        delete this;
+      return ref_count;
+    }
+
+    uint16_t length;  // Length of packet in bytes.
+    uint8_t data[IP_PACKET_SIZE];  // Packet data.
+
+   private:
+    int32_t ref_count_;  // Counts the number of references to a packet.
+  };
+
+  // TODO(holmer): Refactor into a proper class.
+  class SortablePacket {
+   public:
+    // True if first is <= than second.
+    static bool LessThan(const SortablePacket* first,
+                         const SortablePacket* second);
+
+    uint16_t seqNum;
+  };
+
+  /**
+   * The received list parameter of #DecodeFEC() must reference structs of this
+   * type. The lastMediaPktInFrame is not required to be used for correct
+   * recovery, but will reduce delay by allowing #DecodeFEC() to pre-emptively
+   * determine frame completion. If set, we assume a FEC stream, and the
+   * following assumptions must hold:\n
+   *
+   * 1. The media packets in a frame have contiguous sequence numbers, i.e. the
+   *    frame's FEC packets have sequence numbers either lower than the first
+   *    media packet or higher than the last media packet.\n
+   * 2. All FEC packets have a sequence number base equal to the first media
+   *    packet in the corresponding frame.\n
+   *
+   * The ssrc member is needed to ensure we can restore the SSRC field of
+   * recovered packets. In most situations this could be retrieved from other
+   * media packets, but in the case of an FEC packet protecting a single
+   * missing media packet, we have no other means of obtaining it.
+   */
+  // TODO(holmer): Refactor into a proper class.
+  class ReceivedPacket : public SortablePacket {
+   public:
+    uint32_t ssrc;  // SSRC of the current frame. Must be set for FEC
+                    // packets, but not required for media packets.
+    bool isFec;  // Set to true if this is an FEC packet and false
+                 // otherwise.
+    scoped_refptr<Packet> pkt;  // Pointer to the packet storage.
+  };
+
+  /**
+   * The recovered list parameter of #DecodeFEC() will reference structs of
+   * this type.
+   */
+  // TODO(holmer): Refactor into a proper class.
+  class RecoveredPacket : public SortablePacket {
+   public:
+    bool wasRecovered;  // Will be true if this packet was recovered by
+                        // the FEC. Otherwise it was a media packet passed in
+                        // through the received packet list.
+    bool returned;  // True when the packet already has been returned to the
+                    // caller through the callback.
+    uint8_t length_recovery[2];  // Two bytes used for recovering the packet
+                                 // length with XOR operations.
+    scoped_refptr<Packet> pkt;  // Pointer to the packet storage.
+  };
+
+  typedef std::list<Packet*> PacketList;
+  typedef std::list<ReceivedPacket*> ReceivedPacketList;
+  typedef std::list<RecoveredPacket*> RecoveredPacketList;
+
+  /**
+   * \param[in] id Module ID
+   */
+  ForwardErrorCorrection(int32_t id);
+
+  virtual ~ForwardErrorCorrection();
+
+  /**
+   * Generates a list of FEC packets from supplied media packets.
+   *
+   * \param[in]  mediaPacketList     List of media packets to protect, of type
+   *                                 #Packet. All packets must belong to the
+   *                                 same frame and the list must not be empty.
+   * \param[in]  protectionFactor    FEC protection overhead in the [0, 255]
+   *                                 domain. To obtain 100% overhead, or an
+   *                                 equal number of FEC packets as media
+   *                                 packets, use 255.
+   * \param[in] numImportantPackets  The number of "important" packets in the
+   *                                 frame. These packets may receive greater
+   *                                 protection than the remaining packets. The
+   *                                 important packets must be located at the
+   *                                 start of the media packet list. For codecs
+   *                                 with data partitioning, the important
+   *                                 packets may correspond to first partition
+   *                                 packets.
+   * \param[in] useUnequalProtection Parameter to enable/disable unequal
+   *                                 protection  (UEP) across packets. Enabling
+   *                                 UEP will allocate more protection to the
+   *                                 numImportantPackets from the start of the
+   *                                 mediaPacketList.
+   * \param[in]  fec_mask_type       The type of packet mask used in the FEC.
+   *                                 Random or bursty type may be selected. The
+   *                                 bursty type is only defined up to 12 media
+   *                                 packets. If the number of media packets is
+   *                                 above 12, the packets masks from the
+   *                                 random table will be selected.
+   * \param[out] fecPacketList       List of FEC packets, of type #Packet. Must
+   *                                 be empty on entry. The memory available
+   *                                 through the list will be valid until the
+   *                                 next call to GenerateFEC().
+   *
+   * \return 0 on success, -1 on failure.
+   */
+  int32_t GenerateFEC(const PacketList& mediaPacketList,
+                      uint8_t protectionFactor,
+                      int numImportantPackets,
+                      bool useUnequalProtection,
+                      FecMaskType fec_mask_type,
+                      PacketList* fecPacketList);
+
+  /**
+   *  Decodes a list of media and FEC packets. It will parse the input received
+   *  packet list, storing FEC packets internally and inserting media packets to
+   *  the output recovered packet list. The recovered list will be sorted by
+   *  ascending sequence number and have duplicates removed. The function
+   *  should be called as new packets arrive, with the recovered list being
+   *  progressively assembled with each call. The received packet list will be
+   *  empty at output.\n
+   *
+   *  The user will allocate packets submitted through the received list. The
+   *  function will handle allocation of recovered packets and optionally
+   *  deleting of all packet memory. The user may delete the recovered list
+   *  packets, in which case they must remove deleted packets from the
+   *  recovered list.\n
+   *
+   * \param[in]  receivedPacketList  List of new received packets, of type
+   *                                 #ReceivedPacket, belonging to a single
+   *                                 frame. At output the list will be empty,
+   *                                 with packets  either stored internally,
+   *                                 or accessible through the recovered list.
+   * \param[out] recoveredPacketList List of recovered media packets, of type
+   *                                 #RecoveredPacket, belonging to a single
+   *                                 frame. The memory available through the
+   *                                 list will be valid until the next call to
+   *                                 DecodeFEC().
+   *
+   * \return 0 on success, -1 on failure.
+   */
+  int32_t DecodeFEC(ReceivedPacketList* receivedPacketList,
+                    RecoveredPacketList* recoveredPacketList);
+
+  // Get the number of FEC packets, given the number of media packets and the
+  // protection factor.
+  int GetNumberOfFecPackets(int numMediaPackets,
+                            int protectionFactor);
+
+  /**
+   * Gets the size in bytes of the FEC/ULP headers, which must be accounted for
+   * as packet overhead.
+   * \return Packet overhead in bytes.
+   */
+  static uint16_t PacketOverhead();
+
+  // Reset internal states from last frame and clear the recoveredPacketList.
+  // Frees all memory allocated by this class.
+  void ResetState(RecoveredPacketList* recoveredPacketList);
+
+ private:
+  typedef std::list<FecPacket*> FecPacketList;
+
+  void GenerateFecUlpHeaders(const PacketList& mediaPacketList,
+                             uint8_t* packetMask,
+                             bool lBit,
+                             int numFecPackets);
+
+  // Analyzes |media_packets| for holes in the sequence and inserts zero columns
+  // into the |packet_mask| where those holes are found. Zero columns means that
+  // those packets will have no protection.
+  // Returns the number of bits used for one row of the new packet mask.
+  // Requires that |packet_mask| has at least 6 * |num_fec_packets| bytes
+  // allocated.
+  int InsertZerosInBitMasks(const PacketList& media_packets,
+                            uint8_t* packet_mask,
+                            int num_mask_bytes,
+                            int num_fec_packets);
+
+  // Inserts |num_zeros| zero columns into |new_mask| at position
+  // |new_bit_index|. If the current byte of |new_mask| can't fit all zeros, the
+  // byte will be filled with zeros from |new_bit_index|, but the next byte will
+  // be untouched.
+  static void InsertZeroColumns(int num_zeros,
+                                uint8_t* new_mask,
+                                int new_mask_bytes,
+                                int num_fec_packets,
+                                int new_bit_index);
+
+  // Copies the left most bit column from the byte pointed to by
+  // |old_bit_index| in |old_mask| to the right most column of the byte pointed
+  // to by |new_bit_index| in |new_mask|. |old_mask_bytes| and |new_mask_bytes|
+  // represent the number of bytes used per row for each mask. |num_fec_packets|
+  // represent the number of rows of the masks.
+  // The copied bit is shifted out from |old_mask| and is shifted one step to
+  // the left in |new_mask|. |new_mask| will contain "xxxx xxn0" after this
+  // operation, where x are previously inserted bits and n is the new bit.
+  static void CopyColumn(uint8_t* new_mask,
+                         int new_mask_bytes,
+                         uint8_t* old_mask,
+                         int old_mask_bytes,
+                         int num_fec_packets,
+                         int new_bit_index,
+                         int old_bit_index);
+
+  void GenerateFecBitStrings(const PacketList& mediaPacketList,
+                             uint8_t* packetMask,
+                             int numFecPackets,
+                             bool lBit);
+
+  // Insert received packets into FEC or recovered list.
+  void InsertPackets(ReceivedPacketList* receivedPacketList,
+                     RecoveredPacketList* recoveredPacketList);
+
+  // Insert media packet into recovered packet list. We delete duplicates.
+  void InsertMediaPacket(ReceivedPacket* rxPacket,
+                         RecoveredPacketList* recoveredPacketList);
+
+  // Assigns pointers to the recovered packet from all FEC packets which cover
+  // it.
+  // Note: This reduces the complexity when we want to try to recover a packet
+  // since we don't have to find the intersection between recovered packets and
+  // packets covered by the FEC packet.
+  void UpdateCoveringFECPackets(RecoveredPacket* packet);
+
+  // Insert packet into FEC list. We delete duplicates.
+  void InsertFECPacket(ReceivedPacket* rxPacket,
+                       const RecoveredPacketList* recoveredPacketList);
+
+  // Assigns pointers to already recovered packets covered by this FEC packet.
+  static void AssignRecoveredPackets(
+      FecPacket* fec_packet,
+      const RecoveredPacketList* recovered_packets);
+
+  // Insert into recovered list in correct position.
+  void InsertRecoveredPacket(
+      RecoveredPacket* recPacketToInsert,
+      RecoveredPacketList* recoveredPacketList);
+
+  // Attempt to recover missing packets.
+  void AttemptRecover(RecoveredPacketList* recoveredPacketList);
+
+  // Initializes the packet recovery using the FEC packet.
+  static  void InitRecovery(const FecPacket* fec_packet,
+                            RecoveredPacket* recovered);
+
+  // Performs XOR between |src_packet| and |dst_packet| and stores the result
+  // in |dst_packet|.
+  static void XorPackets(const Packet* src_packet,
+                         RecoveredPacket* dst_packet);
+
+  // Finish up the recovery of a packet.
+  static  void FinishRecovery(RecoveredPacket* recovered);
+
+  // Recover a missing packet.
+  void RecoverPacket(const FecPacket* fecPacket,
+                     RecoveredPacket* recPacketToInsert);
+
+  // Get the number of missing media packets which are covered by this
+  // FEC packet. An FEC packet can recover at most one packet, and if zero
+  // packets are missing the FEC packet can be discarded.
+  // This function returns 2 when two or more packets are missing.
+  static int NumCoveredPacketsMissing(const FecPacket* fec_packet);
+
+  static uint16_t LatestSequenceNumber(uint16_t first,
+                                       uint16_t second);
+
+  static void DiscardFECPacket(FecPacket* fec_packet);
+  static void DiscardOldPackets(RecoveredPacketList* recoveredPacketList);
+  static uint16_t ParseSequenceNumber(uint8_t* packet);
+
+  int32_t _id;
+  std::vector<Packet> _generatedFecPackets;
+  FecPacketList _fecPacketList;
+  bool _fecPacketReceived;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_FORWARD_ERROR_CORRECTION_H_
diff --git a/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc b/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc
new file mode 100644
index 0000000..eb84fa7
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/forward_error_correction_internal.cc
@@ -0,0 +1,436 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
+
+#include <cassert>
+#include <cstring>
+
+#include "modules/rtp_rtcp/source/fec_private_tables_random.h"
+#include "modules/rtp_rtcp/source/fec_private_tables_bursty.h"
+
+namespace {
+
+// Allow for different modes of protection for packets in UEP case.
+enum ProtectionMode
+{
+    kModeNoOverlap,
+    kModeOverlap,
+    kModeBiasFirstPacket,
+};
+
+/**
+  * Fits an input mask (subMask) to an output mask.
+  * The mask is a matrix where the rows are the FEC packets,
+  * and the columns are the source packets the FEC is applied to.
+  * Each row of the mask is represented by a number of mask bytes.
+  *
+  * \param[in]  numMaskBytes    The number of mask bytes of output mask.
+  * \param[in]  numSubMaskBytes The number of mask bytes of input mask.
+  * \param[in]  numRows         The number of rows of the input mask.
+  * \param[in]  subMask         A pointer to hold the input mask, of size
+  *                             [0, numRows * numSubMaskBytes]
+  * \param[out] packetMask      A pointer to hold the output mask, of size
+  *                             [0, x * numMaskBytes], where x >= numRows.
+  */
+void FitSubMask(int numMaskBytes,
+                int numSubMaskBytes,
+                int numRows,
+                const uint8_t* subMask,
+                uint8_t* packetMask)
+{
+    if (numMaskBytes == numSubMaskBytes)
+    {
+        memcpy(packetMask, subMask, numRows * numSubMaskBytes);
+    }
+    else
+    {
+        for (int i = 0; i < numRows; i++)
+        {
+            int pktMaskIdx = i * numMaskBytes;
+            int pktMaskIdx2 = i * numSubMaskBytes;
+            for (int j = 0; j < numSubMaskBytes; j++)
+            {
+                packetMask[pktMaskIdx] = subMask[pktMaskIdx2];
+                pktMaskIdx++;
+                pktMaskIdx2++;
+            }
+        }
+    }
+}
+
+/**
+  * Shifts a mask by number of columns (bits), and fits it to an output mask.
+  * The mask is a matrix where the rows are the FEC packets,
+  * and the columns are the source packets the FEC is applied to.
+  * Each row of the mask is represented by a number of mask bytes.
+  *
+  * \param[in]  numMaskBytes     The number of mask bytes of output mask.
+  * \param[in]  numSubMaskBytes  The number of mask bytes of input mask.
+  * \param[in]  numColumnShift   The number columns to be shifted, and
+  *                              the starting row for the output mask.
+  * \param[in]  endRow           The ending row for the output mask.
+  * \param[in]  subMask          A pointer to hold the input mask, of size
+  *                              [0, (endRowFEC - startRowFec) * numSubMaskBytes]
+  * \param[out] packetMask       A pointer to hold the output mask, of size
+  *                              [0, x * numMaskBytes], where x >= endRowFEC.
+  */
+// TODO (marpan): This function is doing three things at the same time:
+// shift within a byte, byte shift and resizing.
+// Split up into subroutines.
+void ShiftFitSubMask(int numMaskBytes,
+                     int resMaskBytes,
+                     int numColumnShift,
+                     int endRow,
+                     const uint8_t* subMask,
+                     uint8_t* packetMask)
+{
+
+    // Number of bit shifts within a byte
+    const int numBitShifts = (numColumnShift % 8);
+    const int numByteShifts = numColumnShift >> 3;
+
+    // Modify new mask with sub-mask21.
+
+    // Loop over the remaining FEC packets.
+    for (int i = numColumnShift; i < endRow; i++)
+    {
+        // Byte index of new mask, for row i and column resMaskBytes,
+        // offset by the number of bytes shifts
+        int pktMaskIdx = i * numMaskBytes + resMaskBytes - 1 + numByteShifts;
+        // Byte index of subMask, for row i and column resMaskBytes
+        int pktMaskIdx2 =
+            (i - numColumnShift) * resMaskBytes + resMaskBytes - 1;
+
+        uint8_t shiftRightCurrByte = 0;
+        uint8_t shiftLeftPrevByte = 0;
+        uint8_t combNewByte = 0;
+
+        // Handle case of numMaskBytes > resMaskBytes:
+        // For a given row, copy the rightmost "numBitShifts" bits
+        // of the last byte of subMask into output mask.
+        if (numMaskBytes > resMaskBytes)
+        {
+            shiftLeftPrevByte =
+                (subMask[pktMaskIdx2] << (8 - numBitShifts));
+            packetMask[pktMaskIdx + 1] = shiftLeftPrevByte;
+        }
+
+        // For each row i (FEC packet), shift the bit-mask of the subMask.
+        // Each row of the mask contains "resMaskBytes" of bytes.
+        // We start from the last byte of the subMask and move to first one.
+        for (int j = resMaskBytes - 1; j > 0; j--)
+        {
+            // Shift current byte of sub21 to the right by "numBitShifts".
+            shiftRightCurrByte =
+                subMask[pktMaskIdx2] >> numBitShifts;
+
+            // Fill in shifted bits with bits from the previous (left) byte:
+            // First shift the previous byte to the left by "8-numBitShifts".
+            shiftLeftPrevByte =
+                (subMask[pktMaskIdx2 - 1] << (8 - numBitShifts));
+
+            // Then combine both shifted bytes into new mask byte.
+            combNewByte = shiftRightCurrByte | shiftLeftPrevByte;
+
+            // Assign to new mask.
+            packetMask[pktMaskIdx] = combNewByte;
+            pktMaskIdx--;
+            pktMaskIdx2--;
+        }
+        // For the first byte in the row (j=0 case).
+        shiftRightCurrByte = subMask[pktMaskIdx2] >> numBitShifts;
+        packetMask[pktMaskIdx] = shiftRightCurrByte;
+
+    }
+}
+}  // namespace
+
+namespace webrtc {
+namespace internal {
+
+PacketMaskTable::PacketMaskTable(FecMaskType fec_mask_type,
+                                 int num_media_packets)
+    : fec_mask_type_(InitMaskType(fec_mask_type, num_media_packets)),
+      fec_packet_mask_table_(InitMaskTable(fec_mask_type_)) {
+}
+
+// Sets |fec_mask_type_| to the type of packet mask selected. The type of
+// packet mask selected is based on |fec_mask_type| and |numMediaPackets|.
+// If |numMediaPackets| is larger than the maximum allowed by |fec_mask_type|
+// for the bursty type, then the random type is selected.
+FecMaskType PacketMaskTable::InitMaskType(FecMaskType fec_mask_type,
+                                          int num_media_packets) {
+  // The mask should not be bigger than |packetMaskTbl|.
+  assert(num_media_packets <= static_cast<int>(sizeof(kPacketMaskRandomTbl) /
+                                               sizeof(*kPacketMaskRandomTbl)));
+  switch (fec_mask_type) {
+    case kFecMaskRandom: {
+      return kFecMaskRandom;
+    }
+    case kFecMaskBursty: {
+      int max_media_packets = static_cast<int>(sizeof(kPacketMaskBurstyTbl) /
+                                               sizeof(*kPacketMaskBurstyTbl));
+      if (num_media_packets > max_media_packets) {
+        return kFecMaskRandom;
+      } else {
+        return kFecMaskBursty;
+      }
+    }
+  }
+  assert(false);
+  return kFecMaskRandom;
+}
+
+// Returns the pointer to the packet mask tables corresponding to type
+// |fec_mask_type|.
+const uint8_t*** PacketMaskTable::InitMaskTable(FecMaskType fec_mask_type) {
+  switch (fec_mask_type) {
+    case kFecMaskRandom: {
+      return kPacketMaskRandomTbl;
+    }
+    case kFecMaskBursty: {
+      return kPacketMaskBurstyTbl;
+    }
+  }
+  assert(false);
+  return kPacketMaskRandomTbl;
+}
+
+// Remaining protection after important (first partition) packet protection
+void RemainingPacketProtection(int numMediaPackets,
+                               int numFecRemaining,
+                               int numFecForImpPackets,
+                               int numMaskBytes,
+                               ProtectionMode mode,
+                               uint8_t* packetMask,
+                               const PacketMaskTable& mask_table)
+{
+    if (mode == kModeNoOverlap)
+    {
+        // subMask21
+
+        const int lBit =
+            (numMediaPackets - numFecForImpPackets) > 16 ? 1 : 0;
+
+        const int resMaskBytes =
+            (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+        const uint8_t* packetMaskSub21 =
+            mask_table.fec_packet_mask_table()
+            [numMediaPackets - numFecForImpPackets - 1]
+            [numFecRemaining - 1];
+
+        ShiftFitSubMask(numMaskBytes, resMaskBytes, numFecForImpPackets,
+                        (numFecForImpPackets + numFecRemaining),
+                        packetMaskSub21, packetMask);
+
+    }
+    else if (mode == kModeOverlap || mode == kModeBiasFirstPacket)
+    {
+        // subMask22
+
+        const uint8_t* packetMaskSub22 =
+            mask_table.fec_packet_mask_table()
+            [numMediaPackets - 1][numFecRemaining - 1];
+
+        FitSubMask(numMaskBytes, numMaskBytes, numFecRemaining, packetMaskSub22,
+                   &packetMask[numFecForImpPackets * numMaskBytes]);
+
+        if (mode == kModeBiasFirstPacket)
+        {
+            for (int i = 0; i < numFecRemaining; i++)
+            {
+                int pktMaskIdx = i * numMaskBytes;
+                packetMask[pktMaskIdx] = packetMask[pktMaskIdx] | (1 << 7);
+            }
+        }
+    }
+    else
+    {
+        assert(false);
+    }
+
+}
+
+// Protection for important (first partition) packets
+void ImportantPacketProtection(int numFecForImpPackets,
+                               int numImpPackets,
+                               int numMaskBytes,
+                               uint8_t* packetMask,
+                               const PacketMaskTable& mask_table)
+{
+    const int lBit = numImpPackets > 16 ? 1 : 0;
+    const int numImpMaskBytes =
+        (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+    // Get subMask1 from table
+    const uint8_t* packetMaskSub1 =
+        mask_table.fec_packet_mask_table()
+        [numImpPackets - 1][numFecForImpPackets - 1];
+
+    FitSubMask(numMaskBytes, numImpMaskBytes,
+               numFecForImpPackets, packetMaskSub1, packetMask);
+
+}
+
+// This function sets the protection allocation: i.e., how many FEC packets
+// to use for numImp (1st partition) packets, given the: number of media
+// packets, number of FEC packets, and number of 1st partition packets.
+int SetProtectionAllocation(int numMediaPackets,
+                            int numFecPackets,
+                            int numImpPackets)
+{
+
+    // TODO (marpan): test different cases for protection allocation:
+
+    // Use at most (allocPar * numFecPackets) for important packets.
+    float allocPar = 0.5;
+    int maxNumFecForImp = allocPar * numFecPackets;
+
+    int numFecForImpPackets = (numImpPackets < maxNumFecForImp) ?
+        numImpPackets : maxNumFecForImp;
+
+    // Fall back to equal protection in this case
+    if (numFecPackets == 1 && (numMediaPackets > 2 * numImpPackets))
+    {
+        numFecForImpPackets = 0;
+    }
+
+    return numFecForImpPackets;
+}
+
+// Modification for UEP: reuse the off-line tables for the packet masks.
+// Note: these masks were designed for equal packet protection case,
+// assuming random packet loss.
+
+// Current version has 3 modes (options) to build UEP mask from existing ones.
+// Various other combinations may be added in future versions.
+// Longer-term, we may add another set of tables specifically for UEP cases.
+// TODO (marpan): also consider modification of masks for bursty loss cases.
+
+// Mask is characterized as (#packets_to_protect, #fec_for_protection).
+// Protection factor defined as: (#fec_for_protection / #packets_to_protect).
+
+// Let k=numMediaPackets, n=total#packets, (n-k)=numFecPackets, m=numImpPackets.
+
+// For ProtectionMode 0 and 1:
+// one mask (subMask1) is used for 1st partition packets,
+// the other mask (subMask21/22, for 0/1) is for the remaining FEC packets.
+
+// In both mode 0 and 1, the packets of 1st partition (numImpPackets) are
+// treated equally important, and are afforded more protection than the
+// residual partition packets.
+
+// For numImpPackets:
+// subMask1 = (m, t): protection = t/(m), where t=F(k,n-k,m).
+// t=F(k,n-k,m) is the number of packets used to protect first partition in
+// subMask1. This is determined from the function SetProtectionAllocation().
+
+// For the left-over protection:
+// Mode 0: subMask21 = (k-m,n-k-t): protection = (n-k-t)/(k-m)
+// mode 0 has no protection overlap between the two partitions.
+// For mode 0, we would typically set t = min(m, n-k).
+
+
+// Mode 1: subMask22 = (k, n-k-t), with protection (n-k-t)/(k)
+// mode 1 has protection overlap between the two partitions (preferred).
+
+// For ProtectionMode 2:
+// This gives 1st packet of list (which is 1st packet of 1st partition) more
+// protection. In mode 2, the equal protection mask (which is obtained from
+// mode 1 for t=0) is modified (more "1s" added in 1st column of packet mask)
+// to bias higher protection for the 1st source packet.
+
+// Protection Mode 2 may be extended for a sort of sliding protection
+// (i.e., vary the number/density of "1s" across columns) across packets.
+
+void UnequalProtectionMask(int numMediaPackets,
+                           int numFecPackets,
+                           int numImpPackets,
+                           int numMaskBytes,
+                           uint8_t* packetMask,
+                           const PacketMaskTable& mask_table)
+{
+
+    // Set Protection type and allocation
+    // TODO (marpan): test/update for best mode and some combinations thereof.
+
+    ProtectionMode mode = kModeOverlap;
+    int numFecForImpPackets = 0;
+
+    if (mode != kModeBiasFirstPacket)
+    {
+        numFecForImpPackets = SetProtectionAllocation(numMediaPackets,
+                                                      numFecPackets,
+                                                      numImpPackets);
+    }
+
+    int numFecRemaining = numFecPackets - numFecForImpPackets;
+    // Done with setting protection type and allocation
+
+    //
+    // Generate subMask1
+    //
+    if (numFecForImpPackets > 0)
+    {
+        ImportantPacketProtection(numFecForImpPackets, numImpPackets,
+                                  numMaskBytes, packetMask,
+                                  mask_table);
+    }
+
+    //
+    // Generate subMask2
+    //
+    if (numFecRemaining > 0)
+    {
+        RemainingPacketProtection(numMediaPackets, numFecRemaining,
+                                  numFecForImpPackets, numMaskBytes,
+                                  mode, packetMask, mask_table);
+    }
+
+}
+
+void GeneratePacketMasks(int numMediaPackets,
+                         int numFecPackets,
+                         int numImpPackets,
+                         bool useUnequalProtection,
+                         const PacketMaskTable& mask_table,
+                         uint8_t* packetMask)
+{
+    assert(numMediaPackets > 0);
+    assert(numFecPackets <= numMediaPackets && numFecPackets > 0);
+    assert(numImpPackets <= numMediaPackets && numImpPackets >= 0);
+
+    int lBit = numMediaPackets > 16 ? 1 : 0;
+    const int numMaskBytes =
+        (lBit == 1) ? kMaskSizeLBitSet : kMaskSizeLBitClear;
+
+    // Equal-protection for these cases
+    if (!useUnequalProtection || numImpPackets == 0)
+    {
+        // Retrieve corresponding mask table directly:for equal-protection case.
+        // Mask = (k,n-k), with protection factor = (n-k)/k,
+        // where k = numMediaPackets, n=total#packets, (n-k)=numFecPackets.
+        memcpy(packetMask,
+               mask_table.fec_packet_mask_table()[numMediaPackets - 1]
+                                                 [numFecPackets - 1],
+               numFecPackets * numMaskBytes);
+    }
+    else  //UEP case
+    {
+        UnequalProtectionMask(numMediaPackets, numFecPackets, numImpPackets,
+                              numMaskBytes, packetMask, mask_table);
+
+    } // End of UEP modification
+} //End of GetPacketMasks
+
+}  // namespace internal
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/forward_error_correction_internal.h b/src/modules/rtp_rtcp/source/forward_error_correction_internal.h
new file mode 100644
index 0000000..799ce04
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/forward_error_correction_internal.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Packet mask size in bytes (L bit is set).
+static const int kMaskSizeLBitSet = 6;
+// Packet mask size in bytes (L bit is cleared).
+static const int kMaskSizeLBitClear = 2;
+
+namespace internal {
+
+class PacketMaskTable {
+ public:
+  PacketMaskTable(FecMaskType fec_mask_type, int num_media_packets);
+  ~PacketMaskTable() {
+  }
+  FecMaskType fec_mask_type() const { return fec_mask_type_; }
+  const uint8_t*** fec_packet_mask_table() const {
+    return fec_packet_mask_table_;
+  }
+ private:
+  FecMaskType InitMaskType(FecMaskType fec_mask_type,
+                           int num_media_packets);
+  const uint8_t*** InitMaskTable(FecMaskType fec_mask_type_);
+  const FecMaskType fec_mask_type_;
+  const uint8_t*** fec_packet_mask_table_;
+};
+
+ /**
+  * Returns an array of packet masks. The mask of a single FEC packet
+  * corresponds to a number of mask bytes. The mask indicates which
+  * media packets should be protected by the FEC packet.
+
+  * \param[in]  numMediaPackets       The number of media packets to protect.
+  *                                    [1, maxMediaPackets].
+  * \param[in]  numFecPackets         The number of FEC packets which will
+  *                                    be generated. [1, numMediaPackets].
+  * \param[in]  numImpPackets         The number of important packets.
+  *                                    [0, numMediaPackets].
+  *                                   numImpPackets = 0 is the equal
+  *                                    protection scenario.
+  * \param[in]  useUnequalProtection  Enables unequal protection: allocates
+  *                                    more protection to the numImpPackets.
+  * \param[in]  mask_table            An instance of the |PacketMaskTable|
+  *                                    class, which contains the type of FEC
+  *                                    packet mask used, and a pointer to the
+  *                                    corresponding packet masks.
+  * \param[out] packetMask            A pointer to hold the packet mask array,
+  *                                    of size:
+  *                                    numFecPackets * "number of mask bytes".
+  */
+void GeneratePacketMasks(int numMediaPackets,
+                         int numFecPackets,
+                         int numImpPackets,
+                         bool useUnequalProtection,
+                         const PacketMaskTable& mask_table,
+                         uint8_t* packetMask);
+
+} // namespace internal
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h b/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h
new file mode 100644
index 0000000..9498b74
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
+
+#include "modules/rtp_rtcp/source/rtp_receiver_video.h"
+
+namespace webrtc {
+
+class MockRTPReceiverVideo : public RTPReceiverVideo {
+ public:
+  MockRTPReceiverVideo() : RTPReceiverVideo(0, NULL, NULL) {}
+  MOCK_METHOD1(ChangeUniqueId,
+      void(const WebRtc_Word32 id));
+  MOCK_METHOD3(ReceiveRecoveredPacketCallback,
+      WebRtc_Word32(WebRtcRTPHeader* rtpHeader,
+                    const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadDataLength));
+  MOCK_METHOD3(CallbackOfReceivedPayloadData,
+      WebRtc_Word32(const WebRtc_UWord8* payloadData,
+                    const WebRtc_UWord16 payloadSize,
+                    const WebRtcRTPHeader* rtpHeader));
+  MOCK_CONST_METHOD0(TimeStamp,
+      WebRtc_UWord32());
+  MOCK_CONST_METHOD0(SequenceNumber,
+      WebRtc_UWord16());
+  MOCK_CONST_METHOD2(PayloadTypeToPayload,
+      WebRtc_UWord32(const WebRtc_UWord8 payloadType,
+                     ModuleRTPUtility::Payload*& payload));
+  MOCK_CONST_METHOD2(RetransmitOfOldPacket,
+      bool(const WebRtc_UWord16 sequenceNumber,
+           const WebRtc_UWord32 rtpTimeStamp));
+  MOCK_CONST_METHOD0(REDPayloadType,
+      WebRtc_Word8());
+};
+
+}  // namespace webrtc
+
+#endif  //WEBRTC_MODULES_RTP_RTCP_SOURCE_MOCK_MOCK_RTP_RECEIVER_VIDEO_H_
diff --git a/src/modules/rtp_rtcp/source/producer_fec.cc b/src/modules/rtp_rtcp/source/producer_fec.cc
new file mode 100644
index 0000000..52d5086
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/producer_fec.cc
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/producer_fec.h"
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+
+enum { kREDForFECHeaderLength = 1 };
+// This controls the maximum amount of excess overhead (actual - target)
+// allowed in order to trigger GenerateFEC(), before |params_.max_fec_frames|
+// is reached. Overhead here is defined as relative to number of media packets.
+enum { kMaxExcessOverhead = 50 };  // Q8.
+// This is the minimum number of media packets required (above some protection
+// level) in order to trigger GenerateFEC(), before |params_.max_fec_frames| is
+// reached.
+enum { kMinimumMediaPackets = 4 };
+// Threshold on the received FEC protection level, above which we enforce at
+// least |kMinimumMediaPackets| packets for the FEC code. Below this
+// threshold |kMinimumMediaPackets| is set to default value of 1.
+enum { kHighProtectionThreshold = 80 };  // Corresponds to ~30 overhead, range
+// is 0 to 255, where 255 corresponds to 100% overhead (relative to number of
+// media packets).
+
+struct RtpPacket {
+  WebRtc_UWord16 rtpHeaderLength;
+  ForwardErrorCorrection::Packet* pkt;
+};
+
+RedPacket::RedPacket(int length)
+    : data_(new uint8_t[length]),
+      length_(length),
+      header_length_(0) {
+}
+
+RedPacket::~RedPacket() {
+  delete [] data_;
+}
+
+void RedPacket::CreateHeader(const uint8_t* rtp_header, int header_length,
+                             int red_pl_type, int pl_type) {
+  assert(header_length + kREDForFECHeaderLength <= length_);
+  memcpy(data_, rtp_header, header_length);
+  // Replace payload type.
+  data_[1] &= 0x80;
+  data_[1] += red_pl_type;
+  // Add RED header
+  // f-bit always 0
+  data_[header_length] = pl_type;
+  header_length_ = header_length + kREDForFECHeaderLength;
+}
+
+void RedPacket::SetSeqNum(int seq_num) {
+  assert(seq_num >= 0 && seq_num < (1<<16));
+  ModuleRTPUtility::AssignUWord16ToBuffer(&data_[2], seq_num);
+}
+
+void RedPacket::AssignPayload(const uint8_t* payload, int length) {
+  assert(header_length_ + length <= length_);
+  memcpy(data_ + header_length_, payload, length);
+}
+
+void RedPacket::ClearMarkerBit() {
+  data_[1] &= 0x7F;
+}
+
+uint8_t* RedPacket::data() const {
+  return data_;
+}
+
+int RedPacket::length() const {
+  return length_;
+}
+
+ProducerFec::ProducerFec(ForwardErrorCorrection* fec)
+    : fec_(fec),
+      media_packets_fec_(),
+      fec_packets_(),
+      num_frames_(0),
+      incomplete_frame_(false),
+      num_first_partition_(0),
+      minimum_media_packets_fec_(1),
+      params_(),
+      new_params_() {
+  memset(&params_, 0, sizeof(params_));
+  memset(&new_params_, 0, sizeof(new_params_));
+}
+
+ProducerFec::~ProducerFec() {
+  DeletePackets();
+}
+
+void ProducerFec::SetFecParameters(const FecProtectionParams* params,
+                                   int num_first_partition) {
+  // Number of first partition packets cannot exceed kMaxMediaPackets
+  assert(params->fec_rate >= 0 && params->fec_rate < 256);
+  if (num_first_partition >
+      static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets)) {
+      num_first_partition =
+          ForwardErrorCorrection::kMaxMediaPackets;
+  }
+  // Store the new params and apply them for the next set of FEC packets being
+  // produced.
+  new_params_ = *params;
+  num_first_partition_ = num_first_partition;
+  if (params->fec_rate > kHighProtectionThreshold) {
+    minimum_media_packets_fec_ = kMinimumMediaPackets;
+  } else {
+    minimum_media_packets_fec_ = 1;
+  }
+}
+
+RedPacket* ProducerFec::BuildRedPacket(const uint8_t* data_buffer,
+                                       int payload_length,
+                                       int rtp_header_length,
+                                       int red_pl_type) {
+  RedPacket* red_packet = new RedPacket(payload_length +
+                                        kREDForFECHeaderLength +
+                                        rtp_header_length);
+  int pl_type = data_buffer[1] & 0x7f;
+  red_packet->CreateHeader(data_buffer, rtp_header_length,
+                           red_pl_type, pl_type);
+  red_packet->AssignPayload(data_buffer + rtp_header_length, payload_length);
+  return red_packet;
+}
+
+int ProducerFec::AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
+                                            int payload_length,
+                                            int rtp_header_length) {
+  assert(fec_packets_.empty());
+  if (media_packets_fec_.empty()) {
+    params_ = new_params_;
+  }
+  incomplete_frame_ = true;
+  const bool marker_bit = (data_buffer[1] & kRtpMarkerBitMask) ? true : false;
+  if (media_packets_fec_.size() < ForwardErrorCorrection::kMaxMediaPackets) {
+    // Generic FEC can only protect up to kMaxMediaPackets packets.
+    ForwardErrorCorrection::Packet* packet = new ForwardErrorCorrection::Packet;
+    packet->length = payload_length + rtp_header_length;
+    memcpy(packet->data, data_buffer, packet->length);
+    media_packets_fec_.push_back(packet);
+  }
+  if (marker_bit) {
+    ++num_frames_;
+    incomplete_frame_ = false;
+  }
+  // Produce FEC over at most |params_.max_fec_frames| frames, or as soon as:
+  // (1) the excess overhead (actual overhead - requested/target overhead) is
+  // less than |kMaxExcessOverhead|, and
+  // (2) at least |minimum_media_packets_fec_| media packets is reached.
+  if (!incomplete_frame_ &&
+      (num_frames_ == params_.max_fec_frames ||
+          (ExcessOverheadBelowMax() && MinimumMediaPacketsReached()))) {
+    assert(num_first_partition_ <=
+           static_cast<int>(ForwardErrorCorrection::kMaxMediaPackets));
+    int ret = fec_->GenerateFEC(media_packets_fec_,
+                                params_.fec_rate,
+                                num_first_partition_,
+                                params_.use_uep_protection,
+                                params_.fec_mask_type,
+                                &fec_packets_);
+    if (fec_packets_.empty()) {
+      num_frames_ = 0;
+      DeletePackets();
+    }
+    return ret;
+  }
+  return 0;
+}
+
+// Returns true if the excess overhead (actual - target) for the FEC is below
+// the amount |kMaxExcessOverhead|. This effects the lower protection level
+// cases and low number of media packets/frame. The target overhead is given by
+// |params_.fec_rate|, and is only achievable in the limit of large number of
+// media packets.
+bool ProducerFec::ExcessOverheadBelowMax() {
+  return ((Overhead() - params_.fec_rate) < kMaxExcessOverhead);
+}
+
+// Returns true if the media packet list for the FEC is at least
+// |minimum_media_packets_fec_|. This condition tries to capture the effect
+// that, for the same amount of protection/overhead, longer codes
+// (e.g. (2k,2m) vs (k,m)) are generally more effective at recovering losses.
+bool ProducerFec::MinimumMediaPacketsReached() {
+  float avg_num_packets_frame = static_cast<float>(media_packets_fec_.size()) /
+                                num_frames_;
+  if (avg_num_packets_frame < 2.0f) {
+  return (static_cast<int>(media_packets_fec_.size()) >=
+      minimum_media_packets_fec_);
+  } else {
+    // For larger rates (more packets/frame), increase the threshold.
+    return (static_cast<int>(media_packets_fec_.size()) >=
+        minimum_media_packets_fec_ + 1);
+  }
+}
+
+bool ProducerFec::FecAvailable() const {
+  return (fec_packets_.size() > 0);
+}
+
+RedPacket* ProducerFec::GetFecPacket(int red_pl_type,
+                                     int fec_pl_type,
+                                     uint16_t seq_num,
+                                     int rtp_header_length) {
+  if (fec_packets_.empty())
+    return NULL;
+  // Build FEC packet. The FEC packets in |fec_packets_| doesn't
+  // have RTP headers, so we're reusing the header from the last
+  // media packet.
+  ForwardErrorCorrection::Packet* packet_to_send = fec_packets_.front();
+  ForwardErrorCorrection::Packet* last_media_packet = media_packets_fec_.back();
+  RedPacket* return_packet = new RedPacket(packet_to_send->length +
+                                           kREDForFECHeaderLength +
+                                           rtp_header_length);
+  return_packet->CreateHeader(last_media_packet->data,
+                              rtp_header_length,
+                              red_pl_type,
+                              fec_pl_type);
+  return_packet->SetSeqNum(seq_num);
+  return_packet->ClearMarkerBit();
+  return_packet->AssignPayload(packet_to_send->data, packet_to_send->length);
+  fec_packets_.pop_front();
+  if (fec_packets_.empty()) {
+    // Done with all the FEC packets. Reset for next run.
+    DeletePackets();
+    num_frames_ = 0;
+  }
+  return return_packet;
+}
+
+int ProducerFec::Overhead() const {
+  // Overhead is defined as relative to the number of media packets, and not
+  // relative to total number of packets. This definition is inhereted from the
+  // protection factor produced by video_coding module and how the FEC
+  // generation is implemented.
+  assert(!media_packets_fec_.empty());
+  int num_fec_packets = fec_->GetNumberOfFecPackets(media_packets_fec_.size(),
+                                                    params_.fec_rate);
+  // Return the overhead in Q8.
+  return (num_fec_packets << 8) / media_packets_fec_.size();
+}
+
+void ProducerFec::DeletePackets() {
+  while (!media_packets_fec_.empty()) {
+    delete media_packets_fec_.front();
+    media_packets_fec_.pop_front();
+  }
+  assert(media_packets_fec_.empty());
+}
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/producer_fec.h b/src/modules/rtp_rtcp/source/producer_fec.h
new file mode 100644
index 0000000..180bd83
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/producer_fec.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
+
+#include <list>
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+namespace webrtc {
+
+struct RtpPacket;
+
+class RedPacket {
+ public:
+  explicit RedPacket(int length);
+  ~RedPacket();
+  void CreateHeader(const uint8_t* rtp_header, int header_length,
+                    int red_pl_type, int pl_type);
+  void SetSeqNum(int seq_num);
+  void AssignPayload(const uint8_t* payload, int length);
+  void ClearMarkerBit();
+  uint8_t* data() const;
+  int length() const;
+
+ private:
+  uint8_t* data_;
+  int length_;
+  int header_length_;
+};
+
+class ProducerFec {
+ public:
+  explicit ProducerFec(ForwardErrorCorrection* fec);
+  ~ProducerFec();
+
+  void SetFecParameters(const FecProtectionParams* params,
+                        int max_fec_frames);
+
+  RedPacket* BuildRedPacket(const uint8_t* data_buffer,
+                            int payload_length,
+                            int rtp_header_length,
+                            int red_pl_type);
+
+  int AddRtpPacketAndGenerateFec(const uint8_t* data_buffer,
+                                 int payload_length,
+                                 int rtp_header_length);
+
+  bool ExcessOverheadBelowMax();
+
+  bool MinimumMediaPacketsReached();
+
+  bool FecAvailable() const;
+
+  RedPacket* GetFecPacket(int red_pl_type,
+                          int fec_pl_type,
+                          uint16_t seq_num,
+                          int rtp_header_length);
+
+ private:
+  void DeletePackets();
+  int Overhead() const;
+  ForwardErrorCorrection* fec_;
+  std::list<ForwardErrorCorrection::Packet*> media_packets_fec_;
+  std::list<ForwardErrorCorrection::Packet*> fec_packets_;
+  int num_frames_;
+  bool incomplete_frame_;
+  int num_first_partition_;
+  int minimum_media_packets_fec_;
+  FecProtectionParams params_;
+  FecProtectionParams new_params_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_PRODUCER_FEC_H_
diff --git a/src/modules/rtp_rtcp/source/producer_fec_unittest.cc b/src/modules/rtp_rtcp/source/producer_fec_unittest.cc
new file mode 100644
index 0000000..7639a81
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/producer_fec_unittest.cc
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <list>
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/producer_fec.h"
+
+namespace webrtc {
+
+void VerifyHeader(uint16_t seq_num,
+                  uint32_t timestamp,
+                  int red_pltype,
+                  int fec_pltype,
+                  RedPacket* packet,
+                  bool marker_bit) {
+  EXPECT_GT(packet->length(), static_cast<int>(kRtpHeaderSize));
+  EXPECT_TRUE(packet->data() != NULL);
+  uint8_t* data = packet->data();
+  // Marker bit not set.
+  EXPECT_EQ(marker_bit ? 0x80 : 0, data[1] & 0x80);
+  EXPECT_EQ(red_pltype, data[1] & 0x7F);
+  EXPECT_EQ(seq_num, (data[2] << 8) + data[3]);
+  uint32_t parsed_timestamp = (data[4] << 24) + (data[5] << 16) +
+      (data[6] << 8) + data[7];
+  EXPECT_EQ(timestamp, parsed_timestamp);
+  EXPECT_EQ(fec_pltype, data[kRtpHeaderSize]);
+}
+
+class ProducerFecTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    fec_ = new ForwardErrorCorrection(0);
+    producer_ = new ProducerFec(fec_);
+    generator_ = new FrameGenerator;
+  }
+
+  virtual void TearDown() {
+    delete producer_;
+    delete fec_;
+    delete generator_;
+  }
+  ForwardErrorCorrection* fec_;
+  ProducerFec* producer_;
+  FrameGenerator* generator_;
+};
+
+TEST_F(ProducerFecTest, OneFrameFec) {
+  // The number of media packets (|kNumPackets|), number of frames (one for
+  // this test), and the protection factor (|params->fec_rate|) are set to make
+  // sure the conditions for generating FEC are satisfied. This means:
+  // (1) protection factor is high enough so that actual overhead over 1 frame
+  // of packets is within |kMaxExcessOverhead|, and (2) the total number of
+  // media packets for 1 frame is at least |minimum_media_packets_fec_|.
+  const int kNumPackets = 4;
+  FecProtectionParams params = {15, false, 3};
+  std::list<RtpPacket*> rtp_packets;
+  generator_->NewFrame(kNumPackets);
+  producer_->SetFecParameters(&params, 0);  // Expecting one FEC packet.
+  uint32_t last_timestamp = 0;
+  for (int i = 0; i < kNumPackets; ++i) {
+    RtpPacket* rtp_packet = generator_->NextPacket(i, 10);
+    rtp_packets.push_back(rtp_packet);
+    EXPECT_EQ(0, producer_->AddRtpPacketAndGenerateFec(rtp_packet->data,
+                                                       rtp_packet->length,
+                                                       kRtpHeaderSize));
+    last_timestamp = rtp_packet->header.header.timestamp;
+  }
+  EXPECT_TRUE(producer_->FecAvailable());
+  uint16_t seq_num = generator_->NextSeqNum();
+  RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
+                                              kFecPayloadType,
+                                              seq_num,
+                                              kRtpHeaderSize);
+  EXPECT_FALSE(producer_->FecAvailable());
+  ASSERT_TRUE(packet != NULL);
+  VerifyHeader(seq_num, last_timestamp,
+               kRedPayloadType, kFecPayloadType, packet, false);
+  while (!rtp_packets.empty()) {
+    delete rtp_packets.front();
+    rtp_packets.pop_front();
+  }
+  delete packet;
+}
+
+TEST_F(ProducerFecTest, TwoFrameFec) {
+  // The number of media packets/frame (|kNumPackets|), the number of frames
+  // (|kNumFrames|), and the protection factor (|params->fec_rate|) are set to
+  // make sure the conditions for generating FEC are satisfied. This means:
+  // (1) protection factor is high enough so that actual overhead over
+  // |kNumFrames| is within |kMaxExcessOverhead|, and (2) the total number of
+  // media packets for |kNumFrames| frames is at least
+  // |minimum_media_packets_fec_|.
+  const int kNumPackets = 2;
+  const int kNumFrames = 2;
+
+  FecProtectionParams params = {15, 0, 3};
+  std::list<RtpPacket*> rtp_packets;
+  producer_->SetFecParameters(&params, 0);  // Expecting one FEC packet.
+  uint32_t last_timestamp = 0;
+  for (int i = 0; i < kNumFrames; ++i) {
+    generator_->NewFrame(kNumPackets);
+    for (int j = 0; j < kNumPackets; ++j) {
+      RtpPacket* rtp_packet = generator_->NextPacket(i * kNumPackets + j, 10);
+      rtp_packets.push_back(rtp_packet);
+      EXPECT_EQ(0, producer_->AddRtpPacketAndGenerateFec(rtp_packet->data,
+                                           rtp_packet->length,
+                                           kRtpHeaderSize));
+      last_timestamp = rtp_packet->header.header.timestamp;
+    }
+  }
+  EXPECT_TRUE(producer_->FecAvailable());
+  uint16_t seq_num = generator_->NextSeqNum();
+  RedPacket* packet = producer_->GetFecPacket(kRedPayloadType,
+                                              kFecPayloadType,
+                                              seq_num,
+                                              kRtpHeaderSize);
+  EXPECT_FALSE(producer_->FecAvailable());
+  EXPECT_TRUE(packet != NULL);
+  VerifyHeader(seq_num, last_timestamp,
+               kRedPayloadType, kFecPayloadType, packet, false);
+  while (!rtp_packets.empty()) {
+    delete rtp_packets.front();
+    rtp_packets.pop_front();
+  }
+  delete packet;
+}
+
+TEST_F(ProducerFecTest, BuildRedPacket) {
+  generator_->NewFrame(1);
+  RtpPacket* packet = generator_->NextPacket(0, 10);
+  RedPacket* red_packet = producer_->BuildRedPacket(packet->data,
+                                                    packet->length -
+                                                    kRtpHeaderSize,
+                                                    kRtpHeaderSize,
+                                                    kRedPayloadType);
+  EXPECT_EQ(packet->length + 1, red_packet->length());
+  VerifyHeader(packet->header.header.sequenceNumber,
+               packet->header.header.timestamp,
+               kRedPayloadType,
+               packet->header.header.payloadType,
+               red_packet,
+               true);  // Marker bit set.
+  for (int i = 0; i < 10; ++i)
+    EXPECT_EQ(i, red_packet->data()[kRtpHeaderSize + 1 + i]);
+  delete red_packet;
+  delete packet;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/receiver_fec.cc b/src/modules/rtp_rtcp/source/receiver_fec.cc
new file mode 100644
index 0000000..e86f578
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/receiver_fec.cc
@@ -0,0 +1,269 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/receiver_fec.h"
+
+#include <cassert>
+
+#include "modules/rtp_rtcp/source/rtp_receiver_video.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/trace.h"
+
+// RFC 5109
+namespace webrtc {
+ReceiverFEC::ReceiverFEC(const WebRtc_Word32 id, RTPReceiverVideo* owner)
+    : _id(id),
+      _owner(owner),
+      _fec(new ForwardErrorCorrection(id)),
+      _payloadTypeFEC(-1) {
+}
+
+ReceiverFEC::~ReceiverFEC() {
+  // Clean up DecodeFEC()
+  while (!_receivedPacketList.empty()){
+    ForwardErrorCorrection::ReceivedPacket* receivedPacket =
+        _receivedPacketList.front();
+    delete receivedPacket;
+    _receivedPacketList.pop_front();
+  }
+  assert(_receivedPacketList.empty());
+
+  if (_fec != NULL) {
+    _fec->ResetState(&_recoveredPacketList);
+    delete _fec;
+  }
+}
+
+void ReceiverFEC::SetPayloadTypeFEC(const WebRtc_Word8 payloadType) {
+  _payloadTypeFEC = payloadType;
+}
+
+/*
+    0                   1                    2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3  4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |F|   block PT  |  timestamp offset         |   block length    |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+
+RFC 2198          RTP Payload for Redundant Audio Data    September 1997
+
+   The bits in the header are specified as follows:
+
+   F: 1 bit First bit in header indicates whether another header block
+       follows.  If 1 further header blocks follow, if 0 this is the
+       last header block.
+       If 0 there is only 1 byte RED header
+
+   block PT: 7 bits RTP payload type for this block.
+
+   timestamp offset:  14 bits Unsigned offset of timestamp of this block
+       relative to timestamp given in RTP header.  The use of an unsigned
+       offset implies that redundant data must be sent after the primary
+       data, and is hence a time to be subtracted from the current
+       timestamp to determine the timestamp of the data for which this
+       block is the redundancy.
+
+   block length:  10 bits Length in bytes of the corresponding data
+       block excluding header.
+*/
+
+WebRtc_Word32 ReceiverFEC::AddReceivedFECPacket(
+    const WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* incomingRtpPacket,
+    const WebRtc_UWord16 payloadDataLength,
+    bool& FECpacket) {
+  if (_payloadTypeFEC == -1) {
+    return -1;
+  }
+
+  WebRtc_UWord8 REDHeaderLength = 1;
+
+  // Add to list without RED header, aka a virtual RTP packet
+  // we remove the RED header
+
+  ForwardErrorCorrection::ReceivedPacket* receivedPacket =
+      new ForwardErrorCorrection::ReceivedPacket;
+  receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+  // get payload type from RED header
+  WebRtc_UWord8 payloadType =
+      incomingRtpPacket[rtpHeader->header.headerLength] & 0x7f;
+
+  // use the payloadType to decide if it's FEC or coded data
+  if (_payloadTypeFEC == payloadType) {
+    receivedPacket->isFec = true;
+    FECpacket = true;
+  } else {
+    receivedPacket->isFec = false;
+    FECpacket = false;
+  }
+  receivedPacket->seqNum = rtpHeader->header.sequenceNumber;
+
+  WebRtc_UWord16 blockLength = 0;
+  if(incomingRtpPacket[rtpHeader->header.headerLength] & 0x80) {
+    // f bit set in RED header
+    REDHeaderLength = 4;
+    WebRtc_UWord16 timestampOffset =
+        (incomingRtpPacket[rtpHeader->header.headerLength + 1]) << 8;
+    timestampOffset += incomingRtpPacket[rtpHeader->header.headerLength+2];
+    timestampOffset = timestampOffset >> 2;
+    if(timestampOffset != 0) {
+      // |timestampOffset| should be 0. However, it's possible this is the first
+      // location a corrupt payload can be caught, so don't assert.
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "Corrupt payload found in %s", __FUNCTION__);
+      delete receivedPacket;
+      return -1;
+    }
+
+    blockLength =
+        (0x03 & incomingRtpPacket[rtpHeader->header.headerLength + 2]) << 8;
+    blockLength += (incomingRtpPacket[rtpHeader->header.headerLength + 3]);
+
+    // check next RED header
+    if(incomingRtpPacket[rtpHeader->header.headerLength+4] & 0x80) {
+      // more than 2 blocks in packet not supported
+      delete receivedPacket;
+      assert(false);
+      return -1;
+    }
+    if(blockLength > payloadDataLength - REDHeaderLength) {
+      // block length longer than packet
+      delete receivedPacket;
+      assert(false);
+      return -1;
+    }
+  }
+
+  ForwardErrorCorrection::ReceivedPacket* secondReceivedPacket = NULL;
+  if (blockLength > 0) {
+    // handle block length, split into 2 packets
+    REDHeaderLength = 5;
+
+    // copy the RTP header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket,
+           rtpHeader->header.headerLength);
+
+    // replace the RED payload type
+    receivedPacket->pkt->data[1] &= 0x80;         // reset the payload
+    receivedPacket->pkt->data[1] += payloadType;  // set the media payload type
+
+    // copy the payload data
+    memcpy(receivedPacket->pkt->data + rtpHeader->header.headerLength,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           blockLength);
+
+    receivedPacket->pkt->length = blockLength;
+
+    secondReceivedPacket = new ForwardErrorCorrection::ReceivedPacket;
+    secondReceivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+    secondReceivedPacket->isFec = true;
+    secondReceivedPacket->seqNum = rtpHeader->header.sequenceNumber;
+
+    // copy the FEC payload data
+    memcpy(secondReceivedPacket->pkt->data,
+           incomingRtpPacket + rtpHeader->header.headerLength +
+               REDHeaderLength + blockLength,
+           payloadDataLength - REDHeaderLength - blockLength);
+
+    secondReceivedPacket->pkt->length = payloadDataLength - REDHeaderLength -
+        blockLength;
+
+  } else if(receivedPacket->isFec) {
+    // everything behind the RED header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           payloadDataLength - REDHeaderLength);
+    receivedPacket->pkt->length = payloadDataLength - REDHeaderLength;
+    receivedPacket->ssrc =
+        ModuleRTPUtility::BufferToUWord32(&incomingRtpPacket[8]);
+
+  } else {
+    // copy the RTP header
+    memcpy(receivedPacket->pkt->data,
+           incomingRtpPacket,
+           rtpHeader->header.headerLength);
+
+    // replace the RED payload type
+    receivedPacket->pkt->data[1] &= 0x80;         // reset the payload
+    receivedPacket->pkt->data[1] += payloadType;  // set the media payload type
+
+    // copy the media payload data
+    memcpy(receivedPacket->pkt->data + rtpHeader->header.headerLength,
+           incomingRtpPacket + rtpHeader->header.headerLength + REDHeaderLength,
+           payloadDataLength - REDHeaderLength);
+
+    receivedPacket->pkt->length = rtpHeader->header.headerLength +
+        payloadDataLength - REDHeaderLength;
+  }
+
+  if(receivedPacket->pkt->length == 0) {
+    delete secondReceivedPacket;
+    delete receivedPacket;
+    return 0;
+  }
+
+  _receivedPacketList.push_back(receivedPacket);
+  if (secondReceivedPacket) {
+    _receivedPacketList.push_back(secondReceivedPacket);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ReceiverFEC::ProcessReceivedFEC() {
+  if (!_receivedPacketList.empty()) {
+    // Send received media packet to VCM.
+    if (!_receivedPacketList.front()->isFec) {
+      if (ParseAndReceivePacket(_receivedPacketList.front()->pkt) != 0) {
+        return -1;
+      }
+    }
+    if (_fec->DecodeFEC(&_receivedPacketList, &_recoveredPacketList) != 0) {
+      return -1;
+    }
+    assert(_receivedPacketList.empty());
+  }
+  // Send any recovered media packets to VCM.
+  ForwardErrorCorrection::RecoveredPacketList::iterator it =
+      _recoveredPacketList.begin();
+  for (; it != _recoveredPacketList.end(); ++it) {
+    if ((*it)->returned)  // Already sent to the VCM and the jitter buffer.
+      continue;
+    if (ParseAndReceivePacket((*it)->pkt) != 0) {
+      return -1;
+    }
+    (*it)->returned = true;
+  }
+  return 0;
+}
+
+int ReceiverFEC::ParseAndReceivePacket(
+    const ForwardErrorCorrection::Packet* packet) {
+  WebRtcRTPHeader header;
+  memset(&header, 0, sizeof(header));
+  ModuleRTPUtility::RTPHeaderParser parser(packet->data,
+                                           packet->length);
+  if (!parser.Parse(header)) {
+    return -1;
+  }
+  if (_owner->ReceiveRecoveredPacketCallback(
+      &header,
+      &packet->data[header.header.headerLength],
+      packet->length - header.header.headerLength) != 0) {
+    return -1;
+  }
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/receiver_fec.h b/src/modules/rtp_rtcp/source/receiver_fec.h
new file mode 100644
index 0000000..63aaa72
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/receiver_fec.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
+
+#include "rtp_rtcp_defines.h"
+// This header is included to get the nested declaration of Packet structure.
+#include "forward_error_correction.h"
+
+#include "typedefs.h"
+
+namespace webrtc {
+class RTPReceiverVideo;
+
+class ReceiverFEC
+{
+public:
+    ReceiverFEC(const WebRtc_Word32 id, RTPReceiverVideo* owner);
+    virtual ~ReceiverFEC();
+
+    WebRtc_Word32 AddReceivedFECPacket(const WebRtcRTPHeader* rtpHeader,
+                                       const WebRtc_UWord8* incomingRtpPacket,
+                                       const WebRtc_UWord16 payloadDataLength,
+                                       bool& FECpacket);
+
+    WebRtc_Word32 ProcessReceivedFEC();
+
+    void SetPayloadTypeFEC(const WebRtc_Word8 payloadType);
+
+private:
+    int ParseAndReceivePacket(const ForwardErrorCorrection::Packet* packet);
+
+    int _id;
+    RTPReceiverVideo* _owner;
+    ForwardErrorCorrection* _fec;
+    // TODO(holmer): In the current version _receivedPacketList is never more
+    // than one packet, since we process FEC every time a new packet
+    // arrives. We should remove the list.
+    ForwardErrorCorrection::ReceivedPacketList _receivedPacketList;
+    ForwardErrorCorrection::RecoveredPacketList _recoveredPacketList;
+    WebRtc_Word8 _payloadTypeFEC;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RECEIVER_FEC_H_
diff --git a/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc b/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc
new file mode 100644
index 0000000..93b6e7a
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/receiver_fec_unittest.cc
@@ -0,0 +1,383 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+#include <list>
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/fec_test_helper.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/mock/mock_rtp_receiver_video.h"
+#include "modules/rtp_rtcp/source/receiver_fec.h"
+
+using ::testing::_;
+using ::testing::Args;
+using ::testing::ElementsAreArray;
+
+namespace webrtc {
+
+class ReceiverFecTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    fec_ = new ForwardErrorCorrection(0);
+    receiver_fec_ = new ReceiverFEC(0, &rtp_receiver_video_);
+    generator_ = new FrameGenerator();
+    receiver_fec_->SetPayloadTypeFEC(kFecPayloadType);
+  }
+
+  virtual void TearDown() {
+    delete fec_;
+    delete receiver_fec_;
+    delete generator_;
+  }
+
+  void GenerateFEC(std::list<Packet*>* media_packets,
+                   std::list<Packet*>* fec_packets,
+                   unsigned int num_fec_packets) {
+    EXPECT_EQ(0, fec_->GenerateFEC(
+        *media_packets,
+        num_fec_packets * 255 / media_packets->size(),
+        0,
+        false,
+        kFecMaskBursty,
+        fec_packets));
+    ASSERT_EQ(num_fec_packets, fec_packets->size());
+  }
+
+  void GenerateFrame(int num_media_packets,
+                     int frame_offset,
+                     std::list<RtpPacket*>* media_rtp_packets,
+                     std::list<Packet*>* media_packets) {
+    generator_->NewFrame(num_media_packets);
+    for (int i = 0; i < num_media_packets; ++i) {
+      media_rtp_packets->push_back(generator_->NextPacket(frame_offset + i,
+                                                          kRtpHeaderSize + 10));
+      media_packets->push_back(media_rtp_packets->back());
+    }
+  }
+
+  void VerifyReconstructedMediaPacket(const RtpPacket* packet, int times) {
+    // Verify that the content of the reconstructed packet is equal to the
+    // content of |packet|, and that the same content is received |times| number
+    // of times in a row.
+    EXPECT_CALL(rtp_receiver_video_,
+                ReceiveRecoveredPacketCallback(_, _,
+                                               packet->length - kRtpHeaderSize))
+        .With(Args<1, 2>(ElementsAreArray(packet->data + kRtpHeaderSize,
+                                          packet->length - kRtpHeaderSize)))
+        .Times(times);
+  }
+
+  void BuildAndAddRedMediaPacket(RtpPacket* packet) {
+    RtpPacket* red_packet = generator_->BuildMediaRedPacket(packet);
+    bool is_fec = false;
+    EXPECT_EQ(0, receiver_fec_->AddReceivedFECPacket(&red_packet->header,
+                                                     red_packet->data,
+                                                     red_packet->length -
+                                                     kRtpHeaderSize,
+                                                     is_fec));
+    delete red_packet;
+    EXPECT_FALSE(is_fec);
+  }
+
+  void BuildAndAddRedFecPacket(Packet* packet) {
+    RtpPacket* red_packet = generator_->BuildFecRedPacket(packet);
+    bool is_fec = false;
+    EXPECT_EQ(0, receiver_fec_->AddReceivedFECPacket(&red_packet->header,
+                                                     red_packet->data,
+                                                     red_packet->length -
+                                                     kRtpHeaderSize,
+                                                     is_fec));
+    delete red_packet;
+    EXPECT_TRUE(is_fec);
+  }
+
+  ForwardErrorCorrection* fec_;
+  MockRTPReceiverVideo rtp_receiver_video_;
+  ReceiverFEC* receiver_fec_;
+  FrameGenerator* generator_;
+};
+
+void DeletePackets(std::list<Packet*>* packets) {
+  while (!packets->empty()) {
+    delete packets->front();
+    packets->pop_front();
+  }
+}
+
+TEST_F(ReceiverFecTest, TwoMediaOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(2, 0, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  std::list<RtpPacket*>::iterator media_it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(*media_it);
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  // Drop one media packet.
+  std::list<Packet*>::iterator fec_it = fec_packets.begin();
+  BuildAndAddRedFecPacket(*fec_it);
+  ++it;
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TwoMediaTwoFec) {
+  const unsigned int kNumFecPackets = 2u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(2, 0, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  // Drop both media packets.
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  std::list<Packet*>::iterator fec_it = fec_packets.begin();
+  BuildAndAddRedFecPacket(*fec_it);
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  ++fec_it;
+  BuildAndAddRedFecPacket(*fec_it);
+  ++it;
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TwoFramesOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
+  GenerateFrame(1, 1, &media_rtp_packets, &media_packets);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(media_rtp_packets.front());
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  // Drop one media packet.
+  BuildAndAddRedFecPacket(fec_packets.front());
+  ++it;
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, OneCompleteOneUnrecoverableFrame) {
+  const unsigned int kNumFecPackets = 1u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  GenerateFrame(1, 0, &media_rtp_packets, &media_packets);
+  GenerateFrame(2, 1, &media_rtp_packets, &media_packets);
+
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  BuildAndAddRedMediaPacket(*it);  // First frame: one packet.
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  ++it;
+  BuildAndAddRedMediaPacket(*it);  // First packet of second frame.
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, MaxFramesOneFec) {
+  const unsigned int kNumFecPackets = 1u;
+  const unsigned int kNumMediaPackets = 48u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets; ++i) {
+    GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+  }
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets, &fec_packets, kNumFecPackets);
+
+  // Recovery
+  std::list<RtpPacket*>::iterator it = media_rtp_packets.begin();
+  ++it;  // Drop first packet.
+  for (; it != media_rtp_packets.end(); ++it) {
+    BuildAndAddRedMediaPacket(*it);
+    VerifyReconstructedMediaPacket(*it, 1);
+    EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  }
+  BuildAndAddRedFecPacket(fec_packets.front());
+  it = media_rtp_packets.begin();
+  VerifyReconstructedMediaPacket(*it, 1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, TooManyFrames) {
+  const unsigned int kNumFecPackets = 1u;
+  const unsigned int kNumMediaPackets = 49u;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets; ++i) {
+    GenerateFrame(1, i, &media_rtp_packets, &media_packets);
+  }
+  std::list<Packet*> fec_packets;
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packets,
+                                  kNumFecPackets * 255 / kNumMediaPackets,
+                                  0,
+                                  false,
+                                  kFecMaskBursty,
+                                  &fec_packets));
+
+  DeletePackets(&media_packets);
+}
+
+TEST_F(ReceiverFecTest, PacketNotDroppedTooEarly) {
+  // 1 frame with 2 media packets and one FEC packet. One media packet missing.
+  // Delay the FEC packet.
+  Packet* delayed_fec = NULL;
+  const unsigned int kNumFecPacketsBatch1 = 1u;
+  const unsigned int kNumMediaPacketsBatch1 = 2u;
+  std::list<RtpPacket*> media_rtp_packets_batch1;
+  std::list<Packet*> media_packets_batch1;
+  GenerateFrame(kNumMediaPacketsBatch1, 0, &media_rtp_packets_batch1,
+                &media_packets_batch1);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
+
+  BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  delayed_fec = fec_packets.front();
+
+  // Fill the FEC decoder. No packets should be dropped.
+  const unsigned int kNumMediaPacketsBatch2 = 46u;
+  std::list<RtpPacket*> media_rtp_packets_batch2;
+  std::list<Packet*> media_packets_batch2;
+  for (unsigned int i = 0; i < kNumMediaPacketsBatch2; ++i) {
+    GenerateFrame(1, i, &media_rtp_packets_batch2, &media_packets_batch2);
+  }
+  for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
+      it != media_rtp_packets_batch2.end(); ++it) {
+    BuildAndAddRedMediaPacket(*it);
+    EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+        .Times(1);
+    EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  }
+
+  // Add the delayed FEC packet. One packet should be reconstructed.
+  BuildAndAddRedFecPacket(delayed_fec);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets_batch1);
+  DeletePackets(&media_packets_batch2);
+}
+
+TEST_F(ReceiverFecTest, PacketDroppedWhenTooOld) {
+  // 1 frame with 2 media packets and one FEC packet. One media packet missing.
+  // Delay the FEC packet.
+  Packet* delayed_fec = NULL;
+  const unsigned int kNumFecPacketsBatch1 = 1u;
+  const unsigned int kNumMediaPacketsBatch1 = 2u;
+  std::list<RtpPacket*> media_rtp_packets_batch1;
+  std::list<Packet*> media_packets_batch1;
+  GenerateFrame(kNumMediaPacketsBatch1, 0, &media_rtp_packets_batch1,
+                &media_packets_batch1);
+  std::list<Packet*> fec_packets;
+  GenerateFEC(&media_packets_batch1, &fec_packets, kNumFecPacketsBatch1);
+
+  BuildAndAddRedMediaPacket(media_rtp_packets_batch1.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  delayed_fec = fec_packets.front();
+
+  // Fill the FEC decoder and force the last packet to be dropped.
+  const unsigned int kNumMediaPacketsBatch2 = 48u;
+  std::list<RtpPacket*> media_rtp_packets_batch2;
+  std::list<Packet*> media_packets_batch2;
+  for (unsigned int i = 0; i < kNumMediaPacketsBatch2; ++i) {
+    GenerateFrame(1, i, &media_rtp_packets_batch2, &media_packets_batch2);
+  }
+  for (std::list<RtpPacket*>::iterator it = media_rtp_packets_batch2.begin();
+      it != media_rtp_packets_batch2.end(); ++it) {
+    BuildAndAddRedMediaPacket(*it);
+    EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+        .Times(1);
+    EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+  }
+
+  // Add the delayed FEC packet. No packet should be reconstructed since the
+  // first media packet of that frame has been dropped due to being too old.
+  BuildAndAddRedFecPacket(delayed_fec);
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(0);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets_batch1);
+  DeletePackets(&media_packets_batch2);
+}
+
+TEST_F(ReceiverFecTest, OldFecPacketDropped) {
+  // 49 frames with 2 media packets and one FEC packet. All media packets
+  // missing.
+  const unsigned int kNumMediaPackets = 49 * 2;
+  std::list<RtpPacket*> media_rtp_packets;
+  std::list<Packet*> media_packets;
+  for (unsigned int i = 0; i < kNumMediaPackets / 2; ++i) {
+    std::list<RtpPacket*> frame_media_rtp_packets;
+    std::list<Packet*> frame_media_packets;
+    std::list<Packet*> fec_packets;
+    GenerateFrame(2, 0, &frame_media_rtp_packets, &frame_media_packets);
+    GenerateFEC(&frame_media_packets, &fec_packets, 1);
+    for (std::list<Packet*>::iterator it = fec_packets.begin();
+        it != fec_packets.end(); ++it) {
+      // Only FEC packets inserted. No packets recoverable at this time.
+      BuildAndAddRedFecPacket(*it);
+      EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+          .Times(0);
+      EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+    }
+    media_packets.insert(media_packets.end(),
+                         frame_media_packets.begin(),
+                         frame_media_packets.end());
+    media_rtp_packets.insert(media_rtp_packets.end(),
+                             frame_media_rtp_packets.begin(),
+                             frame_media_rtp_packets.end());
+  }
+  // Insert the oldest media packet. The corresponding FEC packet is too old
+  // and should've been dropped. Only the media packet we inserted will be
+  // returned.
+  BuildAndAddRedMediaPacket(media_rtp_packets.front());
+  EXPECT_CALL(rtp_receiver_video_, ReceiveRecoveredPacketCallback(_,_,_))
+      .Times(1);
+  EXPECT_EQ(0, receiver_fec_->ProcessReceivedFEC());
+
+  DeletePackets(&media_packets);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc b/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
new file mode 100644
index 0000000..466fccd
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_format_remb_unittest.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <gtest/gtest.h>
+
+#include "typedefs.h"
+#include "common_types.h"
+#include "rtp_utility.h"
+#include "rtcp_sender.h"
+#include "rtcp_receiver.h"
+#include "rtp_rtcp_impl.h"
+#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
+#include "modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
+
+namespace {
+
+using namespace webrtc;
+
+
+class TestTransport : public Transport {
+ public:
+  TestTransport(RTCPReceiver* rtcp_receiver) :
+    rtcp_receiver_(rtcp_receiver) {
+  }
+
+  virtual int SendPacket(int /*channel*/, const void* /*data*/, int /*len*/) {
+    return -1;
+  }
+  virtual int SendRTCPPacket(int /*channel*/,
+                             const void *packet,
+                             int packetLength) {
+    RTCPUtility::RTCPParserV2 rtcpParser((WebRtc_UWord8*)packet,
+                                         (WebRtc_Word32)packetLength,
+                                         true); // Allow non-compound RTCP
+
+    EXPECT_TRUE(rtcpParser.IsValid());
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    EXPECT_EQ(0, rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation,
+                                                    &rtcpParser));
+
+    EXPECT_EQ((WebRtc_UWord32)kRtcpRemb,
+              rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb);
+    EXPECT_EQ((WebRtc_UWord32)1234,
+              rtcpPacketInformation.receiverEstimatedMaxBitrate);
+    return packetLength;
+  }
+ private:
+  RTCPReceiver* rtcp_receiver_;
+};
+
+
+class RtcpFormatRembTest : public ::testing::Test {
+ protected:
+  RtcpFormatRembTest()
+      : over_use_detector_options_(),
+        remote_bitrate_observer_(),
+        remote_bitrate_estimator_(&remote_bitrate_observer_,
+                                  over_use_detector_options_) {}
+  virtual void SetUp();
+  virtual void TearDown();
+
+  OverUseDetectorOptions over_use_detector_options_;
+  RtpRtcpClock* system_clock_;
+  ModuleRtpRtcpImpl* dummy_rtp_rtcp_impl_;
+  RTCPSender* rtcp_sender_;
+  RTCPReceiver* rtcp_receiver_;
+  TestTransport* test_transport_;
+  MockRemoteBitrateObserver remote_bitrate_observer_;
+  RemoteBitrateEstimator remote_bitrate_estimator_;
+};
+
+void RtcpFormatRembTest::SetUp() {
+  system_clock_ = ModuleRTPUtility::GetSystemClock();
+  RtpRtcp::Configuration configuration;
+  configuration.id = 0;
+  configuration.audio = false;
+  configuration.clock = system_clock_;
+  configuration.remote_bitrate_estimator = &remote_bitrate_estimator_;
+  dummy_rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration);
+  rtcp_sender_ = new RTCPSender(0, false, system_clock_, dummy_rtp_rtcp_impl_);
+  rtcp_receiver_ = new RTCPReceiver(0, system_clock_, dummy_rtp_rtcp_impl_);
+  test_transport_ = new TestTransport(rtcp_receiver_);
+
+  EXPECT_EQ(0, rtcp_sender_->Init());
+  EXPECT_EQ(0, rtcp_sender_->RegisterSendTransport(test_transport_));
+}
+
+void RtcpFormatRembTest::TearDown() {
+  delete rtcp_sender_;
+  delete rtcp_receiver_;
+  delete dummy_rtp_rtcp_impl_;
+  delete test_transport_;
+  delete system_clock_;
+}
+
+TEST_F(RtcpFormatRembTest, TestBasicAPI) {
+  EXPECT_FALSE(rtcp_sender_->REMB());
+  EXPECT_EQ(0, rtcp_sender_->SetREMBStatus(true));
+  EXPECT_TRUE(rtcp_sender_->REMB());
+  EXPECT_EQ(0, rtcp_sender_->SetREMBStatus(false));
+  EXPECT_FALSE(rtcp_sender_->REMB());
+
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 0, NULL));
+}
+
+TEST_F(RtcpFormatRembTest, TestNonCompund) {
+  WebRtc_UWord32 SSRC = 456789;
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpNonCompound));
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 1, &SSRC));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
+}
+
+TEST_F(RtcpFormatRembTest, TestCompund) {
+  WebRtc_UWord32 SSRCs[2] = {456789, 98765};
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SetREMBData(1234, 2, SSRCs));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRemb));
+}
+
+int main(int argc, char** argv) {
+  ::testing::InitGoogleTest(&argc, argv);
+
+  return RUN_ALL_TESTS();
+}
+
+} // namespace
diff --git a/src/modules/rtp_rtcp/source/rtcp_receiver.cc b/src/modules/rtp_rtcp/source/rtcp_receiver.cc
new file mode 100644
index 0000000..4d0f7d9
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_receiver.cc
@@ -0,0 +1,1375 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_receiver.h"
+
+#include <string.h> //memset
+#include <cassert> //assert
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "rtcp_utility.h"
+#include "rtp_rtcp_impl.h"
+
+namespace
+{
+    const float FRAC = 4.294967296E9;
+}
+
+namespace webrtc {
+using namespace RTCPUtility;
+using namespace RTCPHelp;
+
+RTCPReceiver::RTCPReceiver(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                           ModuleRtpRtcpImpl* owner)
+    : TMMBRHelp(),
+    _id(id),
+    _clock(*clock),
+    _method(kRtcpOff),
+    _lastReceived(0),
+    _rtpRtcp(*owner),
+      _criticalSectionFeedbacks(
+          CriticalSectionWrapper::CreateCriticalSection()),
+    _cbRtcpFeedback(NULL),
+    _cbRtcpBandwidthObserver(NULL),
+    _cbRtcpIntraFrameObserver(NULL),
+    _criticalSectionRTCPReceiver(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _SSRC(0),
+    _remoteSSRC(0),
+    _remoteSenderInfo(),
+    _lastReceivedSRNTPsecs(0),
+    _lastReceivedSRNTPfrac(0),
+    _receivedInfoMap(),
+    _packetTimeOutMS(0),
+      _rtt(0) {
+    memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTCPReceiver::~RTCPReceiver() {
+  delete _criticalSectionRTCPReceiver;
+  delete _criticalSectionFeedbacks;
+
+  while (!_receivedReportBlockMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator first =
+        _receivedReportBlockMap.begin();
+    delete first->second;
+    _receivedReportBlockMap.erase(first);
+  }
+  while (!_receivedInfoMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator first =
+        _receivedInfoMap.begin();
+    delete first->second;
+    _receivedInfoMap.erase(first);
+  }
+  while (!_receivedCnameMap.empty()) {
+    std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator first =
+        _receivedCnameMap.begin();
+    delete first->second;
+    _receivedCnameMap.erase(first);
+  }
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id,
+               "%s deleted", __FUNCTION__);
+}
+
+void
+RTCPReceiver::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+RTCPMethod
+RTCPReceiver::Status() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    return _method;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetRTCPStatus(const RTCPMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _method = method;
+    return 0;
+}
+
+WebRtc_Word64
+RTCPReceiver::LastReceived()
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    return _lastReceived;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetRemoteSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    // new SSRC reset old reports
+    memset(&_remoteSenderInfo, 0, sizeof(_remoteSenderInfo));
+    _lastReceivedSRNTPsecs = 0;
+    _lastReceivedSRNTPfrac = 0;
+
+    _remoteSSRC = ssrc;
+    return 0;
+}
+
+void RTCPReceiver::RegisterRtcpObservers(
+    RtcpIntraFrameObserver* intra_frame_callback,
+    RtcpBandwidthObserver* bandwidth_callback,
+    RtcpFeedback* feedback_callback) {
+  CriticalSectionScoped lock(_criticalSectionFeedbacks);
+  _cbRtcpIntraFrameObserver = intra_frame_callback;
+  _cbRtcpBandwidthObserver = bandwidth_callback;
+  _cbRtcpFeedback = feedback_callback;
+}
+
+
+void RTCPReceiver::SetSSRC( const WebRtc_UWord32 ssrc) {
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _SSRC = ssrc;
+}
+
+WebRtc_Word32 RTCPReceiver::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  RTCPReportBlockInformation* reportBlock =
+      GetReportBlockInformation(remoteSSRC);
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tfailed to GetReportBlockInformation(%u)", remoteSSRC);
+    return -1;
+  }
+  reportBlock->RTT = 0;
+  reportBlock->avgRTT = 0;
+  reportBlock->minRTT = 0;
+  reportBlock->maxRTT = 0;
+  return 0;
+}
+
+WebRtc_Word32 RTCPReceiver::RTT(const WebRtc_UWord32 remoteSSRC,
+                                WebRtc_UWord16* RTT,
+                                WebRtc_UWord16* avgRTT,
+                                WebRtc_UWord16* minRTT,
+                                WebRtc_UWord16* maxRTT) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  RTCPReportBlockInformation* reportBlock =
+      GetReportBlockInformation(remoteSSRC);
+
+  if (reportBlock == NULL) {
+    return -1;
+  }
+  if (RTT) {
+    *RTT = reportBlock->RTT;
+  }
+  if (avgRTT) {
+    *avgRTT = reportBlock->avgRTT;
+  }
+  if (minRTT) {
+    *minRTT = reportBlock->minRTT;
+  }
+  if (maxRTT) {
+    *maxRTT = reportBlock->maxRTT;
+  }
+  return 0;
+}
+
+WebRtc_UWord16 RTCPReceiver::RTT() const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  if (!_receivedReportBlockMap.empty()) {
+    return 0;
+  }
+  return _rtt;
+}
+
+int RTCPReceiver::SetRTT(WebRtc_UWord16 rtt) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  if (!_receivedReportBlockMap.empty()) {
+    return -1;
+  }
+  _rtt = rtt;
+  return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::NTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                  WebRtc_UWord32 *ReceivedNTPfrac,
+                  WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                  WebRtc_UWord32 *RTCPArrivalTimeFrac) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    if(ReceivedNTPsecs)
+    {
+        *ReceivedNTPsecs = _remoteSenderInfo.NTPseconds; // NTP from incoming SendReport
+    }
+    if(ReceivedNTPfrac)
+    {
+        *ReceivedNTPfrac = _remoteSenderInfo.NTPfraction;
+    }
+    if(RTCPArrivalTimeFrac)
+    {
+        *RTCPArrivalTimeFrac = _lastReceivedSRNTPfrac; // local NTP time when we received a RTCP packet with a send block
+    }
+    if(RTCPArrivalTimeSecs)
+    {
+        *RTCPArrivalTimeSecs = _lastReceivedSRNTPsecs;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::SenderInfoReceived(RTCPSenderInfo* senderInfo) const
+{
+    if(senderInfo == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    if(_lastReceivedSRNTPsecs == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s No received SR", __FUNCTION__);
+        return -1;
+    }
+    memcpy(senderInfo, &(_remoteSenderInfo), sizeof(RTCPSenderInfo));
+    return 0;
+}
+
+// statistics
+// we can get multiple receive reports when we receive the report from a CE
+WebRtc_Word32 RTCPReceiver::StatisticsReceived(
+    std::vector<RTCPReportBlock>* receiveBlocks) const {
+  assert(receiveBlocks);
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::const_iterator it =
+      _receivedReportBlockMap.begin();
+
+  while (it != _receivedReportBlockMap.end()) {
+    receiveBlocks->push_back(it->second->remoteReceiveBlock);
+    it++;
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTCPReceiver::IncomingRTCPPacket(RTCPPacketInformation& rtcpPacketInformation,
+                                 RTCPUtility::RTCPParserV2* rtcpParser)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    _lastReceived = _clock.GetTimeInMS();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser->Begin();
+    while (pktType != RTCPUtility::kRtcpNotValidCode)
+    {
+        // Each "case" is responsible for iterate the parser to the
+        // next top level packet.
+        switch (pktType)
+        {
+        case RTCPUtility::kRtcpSrCode:
+        case RTCPUtility::kRtcpRrCode:
+            HandleSenderReceiverReport(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpSdesCode:
+            HandleSDES(*rtcpParser);
+            break;
+        case RTCPUtility::kRtcpXrVoipMetricCode:
+            HandleXRVOIPMetric(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpByeCode:
+            HandleBYE(*rtcpParser);
+            break;
+        case RTCPUtility::kRtcpRtpfbNackCode:
+            HandleNACK(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpRtpfbTmmbrCode:
+            HandleTMMBR(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpRtpfbTmmbnCode:
+            HandleTMMBN(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpRtpfbSrReqCode:
+            HandleSR_REQ(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbPliCode:
+            HandlePLI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbSliCode:
+            HandleSLI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbRpsiCode:
+            HandleRPSI(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpExtendedIjCode:
+            HandleIJ(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbFirCode:
+            HandleFIR(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpPsfbAppCode:
+            HandlePsfbApp(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpAppCode:
+            // generic application messages
+            HandleAPP(*rtcpParser, rtcpPacketInformation);
+            break;
+        case RTCPUtility::kRtcpAppItemCode:
+            // generic application messages
+            HandleAPPItem(*rtcpParser, rtcpPacketInformation);
+            break;
+        default:
+            rtcpParser->Iterate();
+            break;
+        }
+        pktType = rtcpParser->PacketType();
+    }
+    return 0;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
+                                         RTCPPacketInformation& rtcpPacketInformation)
+{
+    RTCPUtility::RTCPPacketTypes rtcpPacketType = rtcpParser.PacketType();
+    const RTCPUtility::RTCPPacket& rtcpPacket   = rtcpParser.Packet();
+
+    assert((rtcpPacketType == RTCPUtility::kRtcpRrCode) || (rtcpPacketType == RTCPUtility::kRtcpSrCode));
+
+    // SR.SenderSSRC
+    // The synchronization source identifier for the originator of this SR packet
+
+    // rtcpPacket.RR.SenderSSRC
+    // The source of the packet sender, same as of SR? or is this a CE?
+
+    const WebRtc_UWord32 remoteSSRC = (rtcpPacketType == RTCPUtility::kRtcpRrCode) ? rtcpPacket.RR.SenderSSRC:rtcpPacket.SR.SenderSSRC;
+    const WebRtc_UWord8  numberOfReportBlocks = (rtcpPacketType == RTCPUtility::kRtcpRrCode) ? rtcpPacket.RR.NumberOfReportBlocks:rtcpPacket.SR.NumberOfReportBlocks;
+
+    rtcpPacketInformation.remoteSSRC = remoteSSRC;
+
+    RTCPReceiveInformation* ptrReceiveInfo = CreateReceiveInformation(remoteSSRC);
+    if (!ptrReceiveInfo)
+    {
+        rtcpParser.Iterate();
+        return;
+    }
+
+    if (rtcpPacketType == RTCPUtility::kRtcpSrCode)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+            "Received SR(%d). SSRC:0x%x, from SSRC:0x%x, to us %d.", _id, _SSRC, remoteSSRC, (_remoteSSRC == remoteSSRC)?1:0);
+
+        if (_remoteSSRC == remoteSSRC) // have I received RTP packets from this party
+        {
+            // only signal that we have received a SR when we accept one
+            rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSr;
+
+            // We will only store the send report from one source, but
+            // we will store all the receive block
+
+            // Save the NTP time of this report
+            _remoteSenderInfo.NTPseconds = rtcpPacket.SR.NTPMostSignificant;
+            _remoteSenderInfo.NTPfraction = rtcpPacket.SR.NTPLeastSignificant;
+            _remoteSenderInfo.RTPtimeStamp = rtcpPacket.SR.RTPTimestamp;
+            _remoteSenderInfo.sendPacketCount = rtcpPacket.SR.SenderPacketCount;
+            _remoteSenderInfo.sendOctetCount = rtcpPacket.SR.SenderOctetCount;
+
+            _clock.CurrentNTP(_lastReceivedSRNTPsecs, _lastReceivedSRNTPfrac);
+        }
+        else
+        {
+            rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRr;
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+            "Received RR(%d). SSRC:0x%x, from SSRC:0x%x", _id, _SSRC, remoteSSRC);
+
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRr;
+    }
+    UpdateReceiveInformation(*ptrReceiveInfo);
+
+    rtcpPacketType = rtcpParser.Iterate();
+
+    while (rtcpPacketType == RTCPUtility::kRtcpReportBlockItemCode)
+    {
+        HandleReportBlock(rtcpPacket, rtcpPacketInformation, remoteSSRC, numberOfReportBlocks);
+        rtcpPacketType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
+                                RTCPPacketInformation& rtcpPacketInformation,
+                                const WebRtc_UWord32 remoteSSRC,
+                                const WebRtc_UWord8 numberOfReportBlocks) {
+  // This will be called once per report block in the RTCP packet.
+  // We filter out all report blocks that are not for us.
+  // Each packet has max 31 RR blocks.
+  //
+  // We can calc RTT if we send a send report and get a report block back.
+
+  // |rtcpPacket.ReportBlockItem.SSRC| is the SSRC identifier of the source to
+  // which the information in this reception report block pertains.
+
+  // Filter out all report blocks that are not for us.
+  if (rtcpPacket.ReportBlockItem.SSRC != _SSRC) {
+    // This block is not for us ignore it.
+    return;
+  }
+
+  // To avoid problem with acquiring _criticalSectionRTCPSender while holding
+  // _criticalSectionRTCPReceiver.
+  _criticalSectionRTCPReceiver->Leave();
+  WebRtc_UWord32 sendTimeMS =
+      _rtpRtcp.SendTimeOfSendReport(rtcpPacket.ReportBlockItem.LastSR);
+  _criticalSectionRTCPReceiver->Enter();
+
+  RTCPReportBlockInformation* reportBlock =
+      CreateReportBlockInformation(remoteSSRC);
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tfailed to CreateReportBlockInformation(%u)", remoteSSRC);
+    return;
+  }
+  const RTCPPacketReportBlockItem& rb = rtcpPacket.ReportBlockItem;
+  reportBlock->remoteReceiveBlock.remoteSSRC = remoteSSRC;
+  reportBlock->remoteReceiveBlock.sourceSSRC = rb.SSRC;
+  reportBlock->remoteReceiveBlock.fractionLost = rb.FractionLost;
+  reportBlock->remoteReceiveBlock.cumulativeLost =
+      rb.CumulativeNumOfPacketsLost;
+  reportBlock->remoteReceiveBlock.extendedHighSeqNum =
+      rb.ExtendedHighestSequenceNumber;
+  reportBlock->remoteReceiveBlock.jitter = rb.Jitter;
+  reportBlock->remoteReceiveBlock.delaySinceLastSR = rb.DelayLastSR;
+  reportBlock->remoteReceiveBlock.lastSR = rb.LastSR;
+
+  if (rtcpPacket.ReportBlockItem.Jitter > reportBlock->remoteMaxJitter) {
+    reportBlock->remoteMaxJitter = rtcpPacket.ReportBlockItem.Jitter;
+  }
+
+  WebRtc_UWord32 delaySinceLastSendReport =
+      rtcpPacket.ReportBlockItem.DelayLastSR;
+
+  // local NTP time when we received this
+  WebRtc_UWord32 lastReceivedRRNTPsecs = 0;
+  WebRtc_UWord32 lastReceivedRRNTPfrac = 0;
+
+  _clock.CurrentNTP(lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
+
+  // time when we received this in MS
+  WebRtc_UWord32 receiveTimeMS = ModuleRTPUtility::ConvertNTPTimeToMS(
+      lastReceivedRRNTPsecs, lastReceivedRRNTPfrac);
+
+  // Estimate RTT
+  WebRtc_UWord32 d = (delaySinceLastSendReport & 0x0000ffff) * 1000;
+  d /= 65536;
+  d += ((delaySinceLastSendReport & 0xffff0000) >> 16) * 1000;
+
+  WebRtc_Word32 RTT = 0;
+
+  if (sendTimeMS > 0) {
+    RTT = receiveTimeMS - d - sendTimeMS;
+    if (RTT <= 0) {
+      RTT = 1;
+    }
+    if (RTT > reportBlock->maxRTT) {
+      // store max RTT
+      reportBlock->maxRTT = (WebRtc_UWord16) RTT;
+    }
+    if (reportBlock->minRTT == 0) {
+      // first RTT
+      reportBlock->minRTT = (WebRtc_UWord16) RTT;
+    } else if (RTT < reportBlock->minRTT) {
+      // Store min RTT
+      reportBlock->minRTT = (WebRtc_UWord16) RTT;
+    }
+    // store last RTT
+    reportBlock->RTT = (WebRtc_UWord16) RTT;
+
+    // store average RTT
+    if (reportBlock->numAverageCalcs != 0) {
+      float ac = static_cast<float> (reportBlock->numAverageCalcs);
+      float newAverage = ((ac / (ac + 1)) * reportBlock->avgRTT)
+          + ((1 / (ac + 1)) * RTT);
+      reportBlock->avgRTT = static_cast<int> (newAverage + 0.5f);
+    } else {
+      // first RTT
+      reportBlock->avgRTT = (WebRtc_UWord16) RTT;
+    }
+    reportBlock->numAverageCalcs++;
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceRtpRtcp, _id,
+               " -> Received report block(%d), from SSRC:0x%x, RTT:%d, loss:%d",
+               _id, remoteSSRC, RTT, rtcpPacket.ReportBlockItem.FractionLost);
+
+  // rtcpPacketInformation
+  rtcpPacketInformation.AddReportInfo(
+      reportBlock->remoteReceiveBlock.fractionLost, (WebRtc_UWord16) RTT,
+      reportBlock->remoteReceiveBlock.extendedHighSeqNum,
+      reportBlock->remoteReceiveBlock.jitter);
+}
+
+RTCPReportBlockInformation*
+RTCPReceiver::CreateReportBlockInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator it =
+      _receivedReportBlockMap.find(remoteSSRC);
+
+  RTCPReportBlockInformation* ptrReportBlockInfo = NULL;
+  if (it != _receivedReportBlockMap.end()) {
+    ptrReportBlockInfo = it->second;
+  } else {
+    ptrReportBlockInfo = new RTCPReportBlockInformation;
+    _receivedReportBlockMap[remoteSSRC] = ptrReportBlockInfo;
+  }
+  return ptrReportBlockInfo;
+}
+
+RTCPReportBlockInformation*
+RTCPReceiver::GetReportBlockInformation(WebRtc_UWord32 remoteSSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::const_iterator it =
+      _receivedReportBlockMap.find(remoteSSRC);
+
+  if (it == _receivedReportBlockMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+RTCPCnameInformation*
+RTCPReceiver::CreateCnameInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+      _receivedCnameMap.find(remoteSSRC);
+
+  if (it != _receivedCnameMap.end()) {
+    return it->second;
+  }
+  RTCPCnameInformation* cnameInfo = new RTCPCnameInformation;
+  memset(cnameInfo->name, 0, RTCP_CNAME_SIZE);
+  _receivedCnameMap[remoteSSRC] = cnameInfo;
+  return cnameInfo;
+}
+
+RTCPCnameInformation*
+RTCPReceiver::GetCnameInformation(WebRtc_UWord32 remoteSSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::const_iterator it =
+      _receivedCnameMap.find(remoteSSRC);
+
+  if (it == _receivedCnameMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+RTCPReceiveInformation*
+RTCPReceiver::CreateReceiveInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator it =
+      _receivedInfoMap.find(remoteSSRC);
+
+  if (it != _receivedInfoMap.end()) {
+    return it->second;
+  }
+  RTCPReceiveInformation* receiveInfo = new RTCPReceiveInformation;
+  _receivedInfoMap[remoteSSRC] = receiveInfo;
+  return receiveInfo;
+}
+
+RTCPReceiveInformation*
+RTCPReceiver::GetReceiveInformation(WebRtc_UWord32 remoteSSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator it =
+      _receivedInfoMap.find(remoteSSRC);
+  if (it == _receivedInfoMap.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+void RTCPReceiver::UpdateReceiveInformation(
+    RTCPReceiveInformation& receiveInformation) {
+  // Update that this remote is alive
+  receiveInformation.lastTimeReceived = _clock.GetTimeInMS();
+}
+
+bool RTCPReceiver::UpdateRTCPReceiveInformationTimers() {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  bool updateBoundingSet = false;
+  WebRtc_Word64 timeNow = _clock.GetTimeInMS();
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.begin();
+
+  while (receiveInfoIt != _receivedInfoMap.end()) {
+    RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+    if (receiveInfo == NULL) {
+      return updateBoundingSet;
+    }
+    // time since last received rtcp packet
+    // when we dont have a lastTimeReceived and the object is marked
+    // readyForDelete it's removed from the map
+    if (receiveInfo->lastTimeReceived) {
+      /// use audio define since we don't know what interval the remote peer is
+      // using
+      if ((timeNow - receiveInfo->lastTimeReceived) >
+          5 * RTCP_INTERVAL_AUDIO_MS) {
+        // no rtcp packet for the last five regular intervals, reset limitations
+        receiveInfo->TmmbrSet.clearSet();
+        // prevent that we call this over and over again
+        receiveInfo->lastTimeReceived = 0;
+        // send new TMMBN to all channels using the default codec
+        updateBoundingSet = true;
+      }
+      receiveInfoIt++;
+    } else if (receiveInfo->readyForDelete) {
+      // store our current receiveInfoItem
+      std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator
+      receiveInfoItemToBeErased = receiveInfoIt;
+      receiveInfoIt++;
+      delete receiveInfoItemToBeErased->second;
+      _receivedInfoMap.erase(receiveInfoItemToBeErased);
+    } else {
+      receiveInfoIt++;
+    }
+  }
+  return updateBoundingSet;
+}
+
+WebRtc_Word32 RTCPReceiver::BoundingSet(bool &tmmbrOwner,
+                                        TMMBRSet* boundingSetRec) {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.find(_remoteSSRC);
+
+  if (receiveInfoIt == _receivedInfoMap.end()) {
+    return -1;
+  }
+  RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+  if (receiveInfo == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s failed to get RTCPReceiveInformation",
+                 __FUNCTION__);
+    return -1;
+  }
+  if (receiveInfo->TmmbnBoundingSet.lengthOfSet() > 0) {
+    boundingSetRec->VerifyAndAllocateSet(
+        receiveInfo->TmmbnBoundingSet.lengthOfSet() + 1);
+    for(WebRtc_UWord32 i=0; i< receiveInfo->TmmbnBoundingSet.lengthOfSet();
+        i++) {
+      if(receiveInfo->TmmbnBoundingSet.Ssrc(i) == _SSRC) {
+        // owner of bounding set
+        tmmbrOwner = true;
+      }
+      boundingSetRec->SetEntry(i,
+                               receiveInfo->TmmbnBoundingSet.Tmmbr(i),
+                               receiveInfo->TmmbnBoundingSet.PacketOH(i),
+                               receiveInfo->TmmbnBoundingSet.Ssrc(i));
+    }
+  }
+  return receiveInfo->TmmbnBoundingSet.lengthOfSet();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSDES(RTCPUtility::RTCPParserV2& rtcpParser)
+{
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpSdesChunkCode)
+    {
+        HandleSDESChunk(rtcpParser);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+  RTCPCnameInformation* cnameInfo =
+      CreateCnameInformation(rtcpPacket.CName.SenderSSRC);
+  assert(cnameInfo);
+
+  cnameInfo->name[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cnameInfo->name, rtcpPacket.CName.CName, RTCP_CNAME_SIZE - 1);
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
+                         RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+    if (_SSRC != rtcpPacket.NACK.MediaSSRC)
+    {
+        // Not to us.
+        rtcpParser.Iterate();
+        return;
+    }
+
+    rtcpPacketInformation.ResetNACKPacketIdArray();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbNackItemCode)
+    {
+        HandleNACKItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleNACKItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                             RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.AddNACKPacket(rtcpPacket.NACKItem.PacketID);
+
+    WebRtc_UWord16 bitMask = rtcpPacket.NACKItem.BitMask;
+    if(bitMask)
+    {
+        for(int i=1; i <= 16; ++i)
+        {
+            if(bitMask & 0x01)
+            {
+                rtcpPacketInformation.AddNACKPacket(rtcpPacket.NACKItem.PacketID + i);
+            }
+            bitMask = bitMask >>1;
+        }
+    }
+
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpNack;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleBYE(RTCPUtility::RTCPParserV2& rtcpParser) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+  // clear our lists
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  std::map<WebRtc_UWord32, RTCPReportBlockInformation*>::iterator
+      reportBlockInfoIt = _receivedReportBlockMap.find(
+          rtcpPacket.BYE.SenderSSRC);
+
+  if (reportBlockInfoIt != _receivedReportBlockMap.end()) {
+    delete reportBlockInfoIt->second;
+    _receivedReportBlockMap.erase(reportBlockInfoIt);
+  }
+  //  we can't delete it due to TMMBR
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::iterator receiveInfoIt =
+      _receivedInfoMap.find(rtcpPacket.BYE.SenderSSRC);
+
+  if (receiveInfoIt != _receivedInfoMap.end()) {
+    receiveInfoIt->second->readyForDelete = true;
+  }
+
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator cnameInfoIt =
+      _receivedCnameMap.find(rtcpPacket.BYE.SenderSSRC);
+
+  if (cnameInfoIt != _receivedCnameMap.end()) {
+    delete cnameInfoIt->second;
+    _receivedCnameMap.erase(cnameInfoIt);
+  }
+  rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser,
+                                 RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+    if(rtcpPacket.XRVOIPMetricItem.SSRC == _SSRC)
+    {
+        // Store VoIP metrics block if it's about me
+        // from OriginatorSSRC do we filter it?
+        // rtcpPacket.XR.OriginatorSSRC;
+
+        RTCPVoIPMetric receivedVoIPMetrics;
+        receivedVoIPMetrics.burstDensity = rtcpPacket.XRVOIPMetricItem.burstDensity;
+        receivedVoIPMetrics.burstDuration = rtcpPacket.XRVOIPMetricItem.burstDuration;
+        receivedVoIPMetrics.discardRate = rtcpPacket.XRVOIPMetricItem.discardRate;
+        receivedVoIPMetrics.endSystemDelay = rtcpPacket.XRVOIPMetricItem.endSystemDelay;
+        receivedVoIPMetrics.extRfactor = rtcpPacket.XRVOIPMetricItem.extRfactor;
+        receivedVoIPMetrics.gapDensity = rtcpPacket.XRVOIPMetricItem.gapDensity;
+        receivedVoIPMetrics.gapDuration = rtcpPacket.XRVOIPMetricItem.gapDuration;
+        receivedVoIPMetrics.Gmin = rtcpPacket.XRVOIPMetricItem.Gmin;
+        receivedVoIPMetrics.JBabsMax = rtcpPacket.XRVOIPMetricItem.JBabsMax;
+        receivedVoIPMetrics.JBmax = rtcpPacket.XRVOIPMetricItem.JBmax;
+        receivedVoIPMetrics.JBnominal = rtcpPacket.XRVOIPMetricItem.JBnominal;
+        receivedVoIPMetrics.lossRate = rtcpPacket.XRVOIPMetricItem.lossRate;
+        receivedVoIPMetrics.MOSCQ = rtcpPacket.XRVOIPMetricItem.MOSCQ;
+        receivedVoIPMetrics.MOSLQ = rtcpPacket.XRVOIPMetricItem.MOSLQ;
+        receivedVoIPMetrics.noiseLevel = rtcpPacket.XRVOIPMetricItem.noiseLevel;
+        receivedVoIPMetrics.RERL = rtcpPacket.XRVOIPMetricItem.RERL;
+        receivedVoIPMetrics.Rfactor = rtcpPacket.XRVOIPMetricItem.Rfactor;
+        receivedVoIPMetrics.roundTripDelay = rtcpPacket.XRVOIPMetricItem.roundTripDelay;
+        receivedVoIPMetrics.RXconfig = rtcpPacket.XRVOIPMetricItem.RXconfig;
+        receivedVoIPMetrics.signalLevel = rtcpPacket.XRVOIPMetricItem.signalLevel;
+
+        rtcpPacketInformation.AddVoIPMetric(&receivedVoIPMetrics);
+
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpXrVoipMetric; // received signal
+    }
+    rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                             RTCPPacketInformation& rtcpPacketInformation) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+  if (_SSRC == rtcpPacket.PLI.MediaSSRC) {
+    // Received a signal that we need to send a new key frame.
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpPli;
+  }
+  rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBR(RTCPUtility::RTCPParserV2& rtcpParser,
+                          RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    WebRtc_UWord32 senderSSRC = rtcpPacket.TMMBR.SenderSSRC;
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(senderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    if(rtcpPacket.TMMBR.MediaSSRC)
+    {
+        // rtcpPacket.TMMBR.MediaSSRC SHOULD be 0 if same as SenderSSRC
+        // in relay mode this is a valid number
+        senderSSRC = rtcpPacket.TMMBR.MediaSSRC;
+    }
+
+    // Use packet length to calc max number of TMMBR blocks
+    // each TMMBR block is 8 bytes
+    ptrdiff_t maxNumOfTMMBRBlocks = rtcpParser.LengthLeft() / 8;
+
+    // sanity
+    if(maxNumOfTMMBRBlocks > 200) // we can't have more than what's in one packet
+    {
+        assert(false);
+        rtcpParser.Iterate();
+        return;
+    }
+    ptrReceiveInfo->VerifyAndAllocateTMMBRSet((WebRtc_UWord32)maxNumOfTMMBRBlocks);
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbTmmbrItemCode)
+    {
+        HandleTMMBRItem(*ptrReceiveInfo, rtcpPacket, rtcpPacketInformation, senderSSRC);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBRItem(RTCPReceiveInformation& receiveInfo,
+                              const RTCPUtility::RTCPPacket& rtcpPacket,
+                              RTCPPacketInformation& rtcpPacketInformation,
+                              const WebRtc_UWord32 senderSSRC)
+{
+    if (_SSRC == rtcpPacket.TMMBRItem.SSRC &&
+        rtcpPacket.TMMBRItem.MaxTotalMediaBitRate > 0)
+    {
+        receiveInfo.InsertTMMBRItem(senderSSRC, rtcpPacket.TMMBRItem,
+                                    _clock.GetTimeInMS());
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpTmmbr;
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBN(RTCPUtility::RTCPParserV2& rtcpParser,
+                          RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+    RTCPReceiveInformation* ptrReceiveInfo = GetReceiveInformation(rtcpPacket.TMMBN.SenderSSRC);
+    if (ptrReceiveInfo == NULL)
+    {
+        // This remote SSRC must be saved before.
+        rtcpParser.Iterate();
+        return;
+    }
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpTmmbn;
+    // Use packet length to calc max number of TMMBN blocks
+    // each TMMBN block is 8 bytes
+    ptrdiff_t maxNumOfTMMBNBlocks = rtcpParser.LengthLeft() / 8;
+
+    // sanity
+    if(maxNumOfTMMBNBlocks > 200) // we cant have more than what's in one packet
+    {
+        assert(false);
+        rtcpParser.Iterate();
+        return;
+    }
+
+    ptrReceiveInfo->VerifyAndAllocateBoundingSet((WebRtc_UWord32)maxNumOfTMMBNBlocks);
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpRtpfbTmmbnItemCode)
+    {
+        HandleTMMBNItem(*ptrReceiveInfo, rtcpPacket);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSR_REQ(RTCPUtility::RTCPParserV2& rtcpParser,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSrReq;
+    rtcpParser.Iterate();
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleTMMBNItem(RTCPReceiveInformation& receiveInfo,
+                              const RTCPUtility::RTCPPacket& rtcpPacket)
+{
+    receiveInfo.TmmbnBoundingSet.AddEntry(
+        rtcpPacket.TMMBNItem.MaxTotalMediaBitRate,
+        rtcpPacket.TMMBNItem.MeasuredOverhead,
+        rtcpPacket.TMMBNItem.SSRC);
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpPsfbSliItemCode)
+    {
+        HandleSLIItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleSLIItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                            RTCPPacketInformation& rtcpPacketInformation)
+{
+    // in theory there could be multiple slices lost
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpSli; // received signal that we need to refresh a slice
+    rtcpPacketInformation.sliPictureId = rtcpPacket.SLIItem.PictureId;
+}
+
+void
+RTCPReceiver::HandleRPSI(RTCPUtility::RTCPParserV2& rtcpParser,
+                         RTCPHelp::RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    if(pktType == RTCPUtility::kRtcpPsfbRpsiCode)
+    {
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRpsi; // received signal that we have a confirmed reference picture
+        if(rtcpPacket.RPSI.NumberOfValidBits%8 != 0)
+        {
+            // to us unknown
+            // continue
+            rtcpParser.Iterate();
+            return;
+        }
+        rtcpPacketInformation.rpsiPictureId = 0;
+
+        // convert NativeBitString to rpsiPictureId
+        WebRtc_UWord8 numberOfBytes = rtcpPacket.RPSI.NumberOfValidBits /8;
+        for(WebRtc_UWord8 n = 0; n < (numberOfBytes-1); n++)
+        {
+            rtcpPacketInformation.rpsiPictureId += (rtcpPacket.RPSI.NativeBitString[n] & 0x7f);
+            rtcpPacketInformation.rpsiPictureId <<= 7; // prepare next
+        }
+        rtcpPacketInformation.rpsiPictureId += (rtcpPacket.RPSI.NativeBitString[numberOfBytes-1] & 0x7f);
+    }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
+                                 RTCPPacketInformation& rtcpPacketInformation) {
+  RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+  if (pktType == RTCPUtility::kRtcpPsfbRembCode) {
+    pktType = rtcpParser.Iterate();
+    if (pktType == RTCPUtility::kRtcpPsfbRembItemCode) {
+      HandleREMBItem(rtcpParser, rtcpPacketInformation);
+      rtcpParser.Iterate();
+    }
+  }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void
+RTCPReceiver::HandleIJ(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+    while (pktType == RTCPUtility::kRtcpExtendedIjItemCode)
+    {
+        HandleIJItem(rtcpPacket, rtcpPacketInformation);
+        pktType = rtcpParser.Iterate();
+    }
+}
+
+void
+RTCPReceiver::HandleIJItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpTransmissionTimeOffset;
+    rtcpPacketInformation.interArrivalJitter =
+    rtcpPacket.ExtendedJitterReportItem.Jitter;
+}
+
+void RTCPReceiver::HandleREMBItem(
+    RTCPUtility::RTCPParserV2& rtcpParser,
+    RTCPPacketInformation& rtcpPacketInformation) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+  rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpRemb;
+  rtcpPacketInformation.receiverEstimatedMaxBitrate =
+      rtcpPacket.REMBItem.BitRate;
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleFIR(RTCPUtility::RTCPParserV2& rtcpParser,
+                             RTCPPacketInformation& rtcpPacketInformation) {
+  const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+  RTCPReceiveInformation* ptrReceiveInfo =
+      GetReceiveInformation(rtcpPacket.FIR.SenderSSRC);
+
+  RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Iterate();
+  while (pktType == RTCPUtility::kRtcpPsfbFirItemCode) {
+    HandleFIRItem(ptrReceiveInfo, rtcpPacket, rtcpPacketInformation);
+    pktType = rtcpParser.Iterate();
+  }
+}
+
+// no need for critsect we have _criticalSectionRTCPReceiver
+void RTCPReceiver::HandleFIRItem(RTCPReceiveInformation* receiveInfo,
+                                 const RTCPUtility::RTCPPacket& rtcpPacket,
+                                 RTCPPacketInformation& rtcpPacketInformation) {
+  // Is it our sender that is requested to generate a new keyframe
+  if (_SSRC != rtcpPacket.FIRItem.SSRC) {
+    return;
+  }
+  // rtcpPacket.FIR.MediaSSRC SHOULD be 0 but we ignore to check it
+  // we don't know who this originate from
+  if (receiveInfo) {
+    // check if we have reported this FIRSequenceNumber before
+    if (rtcpPacket.FIRItem.CommandSequenceNumber !=
+        receiveInfo->lastFIRSequenceNumber) {
+      WebRtc_Word64 now = _clock.GetTimeInMS();
+      // sanity; don't go crazy with the callbacks
+      if ((now - receiveInfo->lastFIRRequest) > RTCP_MIN_FRAME_LENGTH_MS) {
+        receiveInfo->lastFIRRequest = now;
+        receiveInfo->lastFIRSequenceNumber =
+            rtcpPacket.FIRItem.CommandSequenceNumber;
+        // received signal that we need to send a new key frame
+        rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpFir;
+      }
+    }
+  } else {
+    // received signal that we need to send a new key frame
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpFir;
+  }
+}
+
+void
+RTCPReceiver::HandleAPP(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    rtcpPacketInformation.rtcpPacketTypeFlags |= kRtcpApp;
+    rtcpPacketInformation.applicationSubType = rtcpPacket.APP.SubType;
+    rtcpPacketInformation.applicationName = rtcpPacket.APP.Name;
+
+    rtcpParser.Iterate();
+}
+
+void
+RTCPReceiver::HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                           RTCPPacketInformation& rtcpPacketInformation)
+{
+    const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+
+    rtcpPacketInformation.AddApplicationData(rtcpPacket.APP.Data, rtcpPacket.APP.Size);
+
+    rtcpParser.Iterate();
+}
+
+WebRtc_Word32 RTCPReceiver::UpdateTMMBR() {
+  WebRtc_Word32 numBoundingSet = 0;
+  WebRtc_UWord32 bitrate = 0;
+  WebRtc_UWord32 accNumCandidates = 0;
+
+  WebRtc_Word32 size = TMMBRReceived(0, 0, NULL);
+  if (size > 0) {
+    TMMBRSet* candidateSet = VerifyAndAllocateCandidateSet(size);
+    // Get candidate set from receiver.
+    accNumCandidates = TMMBRReceived(size, accNumCandidates, candidateSet);
+  } else {
+    // Candidate set empty.
+    VerifyAndAllocateCandidateSet(0);  // resets candidate set
+  }
+  // Find bounding set
+  TMMBRSet* boundingSet = NULL;
+  numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+  if (numBoundingSet == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "Failed to find TMMBR bounding set.");
+    return -1;
+  }
+  // Set bounding set
+  // Inform remote clients about the new bandwidth
+  // inform the remote client
+  _rtpRtcp.SetTMMBN(boundingSet);
+
+  // might trigger a TMMBN
+  if (numBoundingSet == 0) {
+    // owner of max bitrate request has timed out
+    // empty bounding set has been sent
+    return 0;
+  }
+  // Get net bitrate from bounding set depending on sent packet rate
+  if (CalcMinBitRate(&bitrate)) {
+    // we have a new bandwidth estimate on this channel
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    if (_cbRtcpBandwidthObserver) {
+        _cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(bitrate * 1000);
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+                   "Set TMMBR request:%d kbps", bitrate);
+    }
+  }
+  return 0;
+}
+
+// Holding no Critical section
+void RTCPReceiver::TriggerCallbacksFromRTCPPacket(
+    RTCPPacketInformation& rtcpPacketInformation) {
+  // Process TMMBR and REMB first to avoid multiple callbacks
+  // to OnNetworkChanged.
+  if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpTmmbr) {
+    WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                 "SIG [RTCP] Incoming TMMBR to id:%d", _id);
+
+    // Might trigger a OnReceivedBandwidthEstimateUpdate.
+    UpdateTMMBR();
+  }
+  if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSrReq) {
+    _rtpRtcp.OnRequestSendReport();
+  }
+  if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpNack) {
+    if (rtcpPacketInformation.nackSequenceNumbersLength > 0) {
+      WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                   "SIG [RTCP] Incoming NACK length:%d",
+                   rtcpPacketInformation.nackSequenceNumbersLength);
+      _rtpRtcp.OnReceivedNACK(
+          rtcpPacketInformation.nackSequenceNumbersLength,
+          rtcpPacketInformation.nackSequenceNumbers);
+    }
+  }
+  {
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+
+    // We need feedback that we have received a report block(s) so that we
+    // can generate a new packet in a conference relay scenario, one received
+    // report can generate several RTCP packets, based on number relayed/mixed
+    // a send report block should go out to all receivers.
+    if (_cbRtcpIntraFrameObserver) {
+      if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) ||
+          (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpFir)) {
+        if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpPli) {
+          WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                       "SIG [RTCP] Incoming PLI from SSRC:0x%x",
+                       rtcpPacketInformation.remoteSSRC);
+        } else {
+          WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                       "SIG [RTCP] Incoming FIR from SSRC:0x%x",
+                       rtcpPacketInformation.remoteSSRC);
+        }
+        _cbRtcpIntraFrameObserver->OnReceivedIntraFrameRequest(
+            rtcpPacketInformation.remoteSSRC);
+      }
+      if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSli) {
+        _cbRtcpIntraFrameObserver->OnReceivedSLI(
+            rtcpPacketInformation.remoteSSRC,
+            rtcpPacketInformation.sliPictureId);
+      }
+      if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRpsi) {
+        _cbRtcpIntraFrameObserver->OnReceivedRPSI(
+            rtcpPacketInformation.remoteSSRC,
+            rtcpPacketInformation.rpsiPictureId);
+      }
+    }
+    if (_cbRtcpBandwidthObserver) {
+      if (rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRemb) {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceRtpRtcp, _id,
+                     "SIG [RTCP] Incoming REMB:%d",
+                     rtcpPacketInformation.receiverEstimatedMaxBitrate);
+        _cbRtcpBandwidthObserver->OnReceivedEstimatedBitrate(
+            rtcpPacketInformation.receiverEstimatedMaxBitrate);
+      }
+      if ((rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr ||
+          rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpRr) &&
+          rtcpPacketInformation.reportBlock) {
+        WebRtc_Word64 now = _clock.GetTimeInMS();
+        _cbRtcpBandwidthObserver->OnReceivedRtcpReceiverReport(
+            rtcpPacketInformation.remoteSSRC,
+            rtcpPacketInformation.fractionLost,
+            rtcpPacketInformation.roundTripTime,
+            rtcpPacketInformation.lastReceivedExtendedHighSeqNum,
+            now);
+      }
+    }
+    if(_cbRtcpFeedback) {
+      if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpSr) {
+        _cbRtcpFeedback->OnSendReportReceived(_id,
+            rtcpPacketInformation.remoteSSRC);
+      } else {
+        _cbRtcpFeedback->OnReceiveReportReceived(_id,
+            rtcpPacketInformation.remoteSSRC);
+      }
+      if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpXrVoipMetric) {
+        _cbRtcpFeedback->OnXRVoIPMetricReceived(_id,
+            rtcpPacketInformation.VoIPMetric);
+      }
+      if(rtcpPacketInformation.rtcpPacketTypeFlags & kRtcpApp) {
+        _cbRtcpFeedback->OnApplicationDataReceived(_id,
+            rtcpPacketInformation.applicationSubType,
+            rtcpPacketInformation.applicationName,
+            rtcpPacketInformation.applicationLength,
+            rtcpPacketInformation.applicationData);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 RTCPReceiver::CNAME(const WebRtc_UWord32 remoteSSRC,
+                                  char cName[RTCP_CNAME_SIZE]) const {
+  assert(cName);
+
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+  RTCPCnameInformation* cnameInfo = GetCnameInformation(remoteSSRC);
+  if (cnameInfo == NULL) {
+    return -1;
+  }
+  cName[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cName, cnameInfo->name, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+// no callbacks allowed inside this function
+WebRtc_Word32 RTCPReceiver::TMMBRReceived(const WebRtc_UWord32 size,
+                                          const WebRtc_UWord32 accNumCandidates,
+                                          TMMBRSet* candidateSet) const {
+  CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+
+  std::map<WebRtc_UWord32, RTCPReceiveInformation*>::const_iterator
+      receiveInfoIt = _receivedInfoMap.begin();
+  if (receiveInfoIt == _receivedInfoMap.end()) {
+    return -1;
+  }
+  WebRtc_UWord32 num = accNumCandidates;
+  if (candidateSet) {
+    while( num < size && receiveInfoIt != _receivedInfoMap.end()) {
+      RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+      if (receiveInfo == NULL) {
+        return 0;
+      }
+      for (WebRtc_UWord32 i = 0;
+           (num < size) && (i < receiveInfo->TmmbrSet.lengthOfSet()); i++) {
+        if (receiveInfo->GetTMMBRSet(i, num, candidateSet,
+                                     _clock.GetTimeInMS()) == 0) {
+          num++;
+        }
+      }
+      receiveInfoIt++;
+    }
+  } else {
+    while (receiveInfoIt != _receivedInfoMap.end()) {
+      RTCPReceiveInformation* receiveInfo = receiveInfoIt->second;
+      if(receiveInfo == NULL) {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                     "%s failed to get RTCPReceiveInformation",
+                     __FUNCTION__);
+        return -1;
+      }
+      num += receiveInfo->TmmbrSet.lengthOfSet();
+      receiveInfoIt++;
+    }
+  }
+  return num;
+}
+
+WebRtc_Word32
+RTCPReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+    _packetTimeOutMS = timeoutMS;
+    return 0;
+}
+
+void RTCPReceiver::PacketTimeout()
+{
+    if(_packetTimeOutMS == 0)
+    {
+        // not configured
+        return;
+    }
+
+    bool packetTimeOut = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTCPReceiver);
+        if(_lastReceived == 0)
+        {
+            // not active
+            return;
+        }
+
+        WebRtc_Word64 now = _clock.GetTimeInMS();
+        if(now - _lastReceived > _packetTimeOutMS)
+        {
+            packetTimeOut = true;
+            _lastReceived = 0;  // only one callback
+        }
+    }
+    CriticalSectionScoped lock(_criticalSectionFeedbacks);
+    if(packetTimeOut && _cbRtcpFeedback)
+    {
+        _cbRtcpFeedback->OnRTCPPacketTimeout(_id);
+    }
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_receiver.h b/src/modules/rtp_rtcp/source/rtcp_receiver.h
new file mode 100644
index 0000000..c587ebb
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_receiver.h
@@ -0,0 +1,226 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
+
+#include <map>
+#include <vector>
+
+#include "typedefs.h"
+#include "rtp_utility.h"
+#include "rtcp_utility.h"
+#include "rtp_rtcp_defines.h"
+#include "rtcp_receiver_help.h"
+#include "tmmbr_help.h"
+
+namespace webrtc {
+class ModuleRtpRtcpImpl;
+
+class RTCPReceiver : public TMMBRHelp
+{
+public:
+    RTCPReceiver(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                 ModuleRtpRtcpImpl* owner);
+    virtual ~RTCPReceiver();
+
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    RTCPMethod Status() const;
+    WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    WebRtc_Word64 LastReceived();
+
+    void SetSSRC( const WebRtc_UWord32 ssrc);
+    void SetRelaySSRC( const WebRtc_UWord32 ssrc);
+    WebRtc_Word32 SetRemoteSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_UWord32 RelaySSRC() const;
+
+    void RegisterRtcpObservers(RtcpIntraFrameObserver* intra_frame_callback,
+                               RtcpBandwidthObserver* bandwidth_callback,
+                               RtcpFeedback* feedback_callback);
+
+    WebRtc_Word32 IncomingRTCPPacket(RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                                   RTCPUtility::RTCPParserV2 *rtcpParser);
+
+    void TriggerCallbacksFromRTCPPacket(RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    // get received cname
+    WebRtc_Word32 CNAME(const WebRtc_UWord32 remoteSSRC,
+                        char cName[RTCP_CNAME_SIZE]) const;
+
+    // get received NTP
+    WebRtc_Word32 NTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                      WebRtc_UWord32 *ReceivedNTPfrac,
+                      WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                      WebRtc_UWord32 *RTCPArrivalTimeFrac) const;
+
+    // get rtt
+    WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                      WebRtc_UWord16* RTT,
+                      WebRtc_UWord16* avgRTT,
+                      WebRtc_UWord16* minRTT,
+                      WebRtc_UWord16* maxRTT) const;
+
+    WebRtc_UWord16 RTT() const;
+
+    int SetRTT(WebRtc_UWord16 rtt);
+
+    WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC);
+
+    WebRtc_Word32 SenderInfoReceived(RTCPSenderInfo* senderInfo) const;
+
+    // get statistics
+    WebRtc_Word32 StatisticsReceived(
+        std::vector<RTCPReportBlock>* receiveBlocks) const;
+
+    // Get TMMBR
+    WebRtc_Word32 TMMBRReceived(const WebRtc_UWord32 size,
+                                const WebRtc_UWord32 accNumCandidates,
+                                TMMBRSet* candidateSet) const;
+
+    bool UpdateRTCPReceiveInformationTimers();
+
+    WebRtc_Word32 BoundingSet(bool &tmmbrOwner, TMMBRSet* boundingSetRec);
+
+    WebRtc_Word32 UpdateTMMBR();
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+    void PacketTimeout();
+
+protected:
+    RTCPHelp::RTCPReportBlockInformation* CreateReportBlockInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPHelp::RTCPReportBlockInformation* GetReportBlockInformation(const WebRtc_UWord32 remoteSSRC) const;
+
+    RTCPUtility::RTCPCnameInformation* CreateCnameInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPUtility::RTCPCnameInformation* GetCnameInformation(const WebRtc_UWord32 remoteSSRC) const;
+
+    RTCPHelp::RTCPReceiveInformation* CreateReceiveInformation(const WebRtc_UWord32 remoteSSRC);
+    RTCPHelp::RTCPReceiveInformation* GetReceiveInformation(const WebRtc_UWord32 remoteSSRC);
+
+    void UpdateReceiveInformation( RTCPHelp::RTCPReceiveInformation& receiveInformation);
+
+    void HandleSenderReceiverReport(RTCPUtility::RTCPParserV2& rtcpParser,
+                                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleReportBlock(const RTCPUtility::RTCPPacket& rtcpPacket,
+                           RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                           const WebRtc_UWord32 remoteSSRC,
+                           const WebRtc_UWord8 numberOfReportBlocks);
+
+    void HandleSDES(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandleSDESChunk(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandleXRVOIPMetric(RTCPUtility::RTCPParserV2& rtcpParser,
+                            RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleNACK(RTCPUtility::RTCPParserV2& rtcpParser,
+                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleNACKItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                        RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleBYE(RTCPUtility::RTCPParserV2& rtcpParser);
+
+    void HandlePLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleSLI(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleSLIItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleRPSI(RTCPUtility::RTCPParserV2& rtcpParser,
+                    RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandlePsfbApp(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleREMBItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                        RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleIJ(RTCPUtility::RTCPParserV2& rtcpParser,
+                  RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleIJItem(const RTCPUtility::RTCPPacket& rtcpPacket,
+                      RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBR(RTCPUtility::RTCPParserV2& rtcpParser,
+                     RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBRItem(RTCPHelp::RTCPReceiveInformation& receiveInfo,
+                         const RTCPUtility::RTCPPacket& rtcpPacket,
+                         RTCPHelp::RTCPPacketInformation& rtcpPacketInformation,
+                         const WebRtc_UWord32 senderSSRC);
+
+    void HandleTMMBN(RTCPUtility::RTCPParserV2& rtcpParser,
+                     RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleSR_REQ(RTCPUtility::RTCPParserV2& rtcpParser,
+                      RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleTMMBNItem(RTCPHelp::RTCPReceiveInformation& receiveInfo,
+                         const RTCPUtility::RTCPPacket& rtcpPacket);
+
+    void HandleFIR(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleFIRItem(RTCPHelp::RTCPReceiveInformation* receiveInfo,
+                       const RTCPUtility::RTCPPacket& rtcpPacket,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleAPP(RTCPUtility::RTCPParserV2& rtcpParser,
+                   RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+    void HandleAPPItem(RTCPUtility::RTCPParserV2& rtcpParser,
+                       RTCPHelp::RTCPPacketInformation& rtcpPacketInformation);
+
+ private:
+  WebRtc_Word32           _id;
+  RtpRtcpClock&           _clock;
+  RTCPMethod              _method;
+  WebRtc_Word64           _lastReceived;
+  ModuleRtpRtcpImpl&      _rtpRtcp;
+
+  CriticalSectionWrapper* _criticalSectionFeedbacks;
+  RtcpFeedback*           _cbRtcpFeedback;
+  RtcpBandwidthObserver*  _cbRtcpBandwidthObserver;
+  RtcpIntraFrameObserver* _cbRtcpIntraFrameObserver;
+
+  CriticalSectionWrapper* _criticalSectionRTCPReceiver;
+  WebRtc_UWord32          _SSRC;
+  WebRtc_UWord32          _remoteSSRC;
+
+  // Received send report
+  RTCPSenderInfo _remoteSenderInfo;
+  // when did we receive the last send report
+  WebRtc_UWord32 _lastReceivedSRNTPsecs;
+  WebRtc_UWord32 _lastReceivedSRNTPfrac;
+
+  // Received report blocks.
+  std::map<WebRtc_UWord32, RTCPHelp::RTCPReportBlockInformation*>
+      _receivedReportBlockMap;
+  std::map<WebRtc_UWord32, RTCPHelp::RTCPReceiveInformation*>
+      _receivedInfoMap;
+  std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*>
+      _receivedCnameMap;
+
+  WebRtc_UWord32            _packetTimeOutMS;
+
+  // Externally set RTT. This value can only be used if there are no valid
+  // RTT estimates.
+  WebRtc_UWord16 _rtt;
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_H_
diff --git a/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc b/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc
new file mode 100644
index 0000000..81e33ac
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_receiver_help.cc
@@ -0,0 +1,206 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_receiver_help.h"
+
+#include <string.h>  // memset
+#include <cassert>  // assert
+
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+
+namespace webrtc {
+using namespace RTCPHelp;
+
+RTCPPacketInformation::RTCPPacketInformation()
+    : rtcpPacketTypeFlags(0),
+      remoteSSRC(0),
+      nackSequenceNumbers(0),
+      nackSequenceNumbersLength(0),
+      applicationSubType(0),
+      applicationName(0),
+      applicationData(),
+      applicationLength(0),
+      reportBlock(false),
+      fractionLost(0),
+      roundTripTime(0),
+      lastReceivedExtendedHighSeqNum(0),
+      jitter(0),
+      interArrivalJitter(0),
+      sliPictureId(0),
+      rpsiPictureId(0),
+      receiverEstimatedMaxBitrate(0),
+      VoIPMetric(NULL) {
+}
+
+RTCPPacketInformation::~RTCPPacketInformation()
+{
+    delete [] nackSequenceNumbers;
+    delete [] applicationData;
+    delete VoIPMetric;
+}
+
+void
+RTCPPacketInformation::AddVoIPMetric(const RTCPVoIPMetric* metric)
+{
+    VoIPMetric = new RTCPVoIPMetric();
+    memcpy(VoIPMetric, metric, sizeof(RTCPVoIPMetric));
+}
+
+void RTCPPacketInformation::AddApplicationData(const WebRtc_UWord8* data,
+                                               const WebRtc_UWord16 size) {
+    WebRtc_UWord8* oldData = applicationData;
+    WebRtc_UWord16 oldLength = applicationLength;
+
+    // Don't copy more than kRtcpAppCode_DATA_SIZE bytes.
+    WebRtc_UWord16 copySize = size;
+    if (size > kRtcpAppCode_DATA_SIZE) {
+        copySize = kRtcpAppCode_DATA_SIZE;
+    }
+
+    applicationLength += copySize;
+    applicationData = new WebRtc_UWord8[applicationLength];
+
+    if (oldData)
+    {
+        memcpy(applicationData, oldData, oldLength);
+        memcpy(applicationData+oldLength, data, copySize);
+        delete [] oldData;
+    } else
+    {
+        memcpy(applicationData, data, copySize);
+    }
+}
+
+void
+RTCPPacketInformation::ResetNACKPacketIdArray()
+{
+    if (NULL == nackSequenceNumbers)
+    {
+        nackSequenceNumbers = new WebRtc_UWord16[NACK_PACKETS_MAX_SIZE];
+    }
+    nackSequenceNumbersLength = 0;
+}
+
+void
+RTCPPacketInformation::AddNACKPacket(const WebRtc_UWord16 packetID)
+{
+    assert(nackSequenceNumbers);
+
+    WebRtc_UWord16& idx = nackSequenceNumbersLength;
+    if (idx < NACK_PACKETS_MAX_SIZE)
+    {
+        nackSequenceNumbers[idx++] = packetID;
+    }
+}
+
+void
+RTCPPacketInformation::AddReportInfo(const WebRtc_UWord8 fraction,
+                                     const WebRtc_UWord16 rtt,
+                                     const WebRtc_UWord32 extendedHighSeqNum,
+                                     const WebRtc_UWord32 j)
+{
+    reportBlock = true;
+    fractionLost = fraction;
+    roundTripTime = rtt;
+    jitter = j;
+    lastReceivedExtendedHighSeqNum = extendedHighSeqNum;
+}
+
+RTCPReportBlockInformation::RTCPReportBlockInformation():
+    remoteReceiveBlock(),
+    remoteMaxJitter(0),
+    RTT(0),
+    minRTT(0),
+    maxRTT(0),
+    avgRTT(0),
+    numAverageCalcs(0)
+{
+    memset(&remoteReceiveBlock,0,sizeof(remoteReceiveBlock));
+}
+
+RTCPReportBlockInformation::~RTCPReportBlockInformation()
+{
+}
+
+RTCPReceiveInformation::RTCPReceiveInformation()
+    : lastTimeReceived(0),
+      lastFIRSequenceNumber(-1),
+      lastFIRRequest(0),
+      readyForDelete(false) {
+}
+
+RTCPReceiveInformation::~RTCPReceiveInformation() {
+}
+
+// Increase size of TMMBRSet if needed, and also take care of
+// the _tmmbrSetTimeouts vector.
+void RTCPReceiveInformation::VerifyAndAllocateTMMBRSet(
+    const WebRtc_UWord32 minimumSize) {
+  if (minimumSize > TmmbrSet.sizeOfSet()) {
+    TmmbrSet.VerifyAndAllocateSetKeepingData(minimumSize);
+    // make sure that our buffers are big enough
+    _tmmbrSetTimeouts.reserve(minimumSize);
+  }
+}
+
+void RTCPReceiveInformation::InsertTMMBRItem(
+    const WebRtc_UWord32 senderSSRC,
+    const RTCPUtility::RTCPPacketRTPFBTMMBRItem& TMMBRItem,
+    const WebRtc_Word64 currentTimeMS) {
+  // serach to see if we have it in our list
+  for (WebRtc_UWord32 i = 0; i < TmmbrSet.lengthOfSet(); i++)  {
+    if (TmmbrSet.Ssrc(i) == senderSSRC) {
+      // we already have this SSRC in our list update it
+      TmmbrSet.SetEntry(i,
+                        TMMBRItem.MaxTotalMediaBitRate,
+                        TMMBRItem.MeasuredOverhead,
+                        senderSSRC);
+      _tmmbrSetTimeouts[i] = currentTimeMS;
+      return;
+    }
+  }
+  VerifyAndAllocateTMMBRSet(TmmbrSet.lengthOfSet() + 1);
+  TmmbrSet.AddEntry(TMMBRItem.MaxTotalMediaBitRate,
+                    TMMBRItem.MeasuredOverhead,
+                    senderSSRC);
+  _tmmbrSetTimeouts.push_back(currentTimeMS);
+}
+
+WebRtc_Word32 RTCPReceiveInformation::GetTMMBRSet(
+    const WebRtc_UWord32 sourceIdx,
+    const WebRtc_UWord32 targetIdx,
+    TMMBRSet* candidateSet,
+    const WebRtc_Word64 currentTimeMS) {
+  if (sourceIdx >= TmmbrSet.lengthOfSet()) {
+    return -1;
+  }
+  if (targetIdx >= candidateSet->sizeOfSet()) {
+    return -1;
+  }
+  // use audio define since we don't know what interval the remote peer is using
+  if (currentTimeMS - _tmmbrSetTimeouts[sourceIdx] >
+      5 * RTCP_INTERVAL_AUDIO_MS) {
+    // value timed out
+    TmmbrSet.RemoveEntry(sourceIdx);
+    _tmmbrSetTimeouts.erase(_tmmbrSetTimeouts.begin() + sourceIdx);
+    return -1;
+  }
+  candidateSet->SetEntry(targetIdx,
+                         TmmbrSet.Tmmbr(sourceIdx),
+                         TmmbrSet.PacketOH(sourceIdx),
+                         TmmbrSet.Ssrc(sourceIdx));
+  return 0;
+}
+
+void RTCPReceiveInformation::VerifyAndAllocateBoundingSet(
+    const WebRtc_UWord32 minimumSize) {
+  TmmbnBoundingSet.VerifyAndAllocateSet(minimumSize);
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_receiver_help.h b/src/modules/rtp_rtcp/source/rtcp_receiver_help.h
new file mode 100644
index 0000000..a721430
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_receiver_help.h
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
+
+#include <vector>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"  // RTCPReportBlock
+#include "modules/rtp_rtcp/source/rtcp_utility.h"
+#include "modules/rtp_rtcp/source/tmmbr_help.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace RTCPHelp
+{
+
+class RTCPPacketInformation
+{
+public:
+    RTCPPacketInformation();
+    ~RTCPPacketInformation();
+
+    void AddVoIPMetric(const RTCPVoIPMetric*  metric);
+
+    void AddApplicationData(const WebRtc_UWord8* data,
+                            const WebRtc_UWord16 size);
+
+    void AddNACKPacket(const WebRtc_UWord16 packetID);
+    void ResetNACKPacketIdArray();
+
+    void AddReportInfo(const WebRtc_UWord8 fractionLost,
+                       const WebRtc_UWord16 rtt,
+                       const WebRtc_UWord32 extendedHighSeqNum,
+                       const WebRtc_UWord32 jitter);
+
+    WebRtc_UWord32  rtcpPacketTypeFlags; // RTCPPacketTypeFlags bit field
+    WebRtc_UWord32  remoteSSRC;
+
+    WebRtc_UWord16* nackSequenceNumbers;
+    WebRtc_UWord16  nackSequenceNumbersLength;
+
+    WebRtc_UWord8   applicationSubType;
+    WebRtc_UWord32  applicationName;
+    WebRtc_UWord8*  applicationData;
+    WebRtc_UWord16  applicationLength;
+
+    bool            reportBlock;
+    WebRtc_UWord8   fractionLost;
+    WebRtc_UWord16  roundTripTime;
+    WebRtc_UWord32  lastReceivedExtendedHighSeqNum;
+    WebRtc_UWord32  jitter;
+
+    WebRtc_UWord32  interArrivalJitter;
+
+    WebRtc_UWord8   sliPictureId;
+    WebRtc_UWord64  rpsiPictureId;
+    WebRtc_UWord32  receiverEstimatedMaxBitrate;
+
+    RTCPVoIPMetric*  VoIPMetric;
+};
+
+
+class RTCPReportBlockInformation
+{
+public:
+    RTCPReportBlockInformation();
+    ~RTCPReportBlockInformation();
+
+    // Statistics
+    RTCPReportBlock remoteReceiveBlock;
+    WebRtc_UWord32        remoteMaxJitter;
+
+    // RTT
+    WebRtc_UWord16    RTT;
+    WebRtc_UWord16    minRTT;
+    WebRtc_UWord16    maxRTT;
+    WebRtc_UWord16    avgRTT;
+    WebRtc_UWord32    numAverageCalcs;
+};
+
+class RTCPReceiveInformation
+{
+public:
+    RTCPReceiveInformation();
+    ~RTCPReceiveInformation();
+
+    void VerifyAndAllocateBoundingSet(const WebRtc_UWord32 minimumSize);
+    void VerifyAndAllocateTMMBRSet(const WebRtc_UWord32 minimumSize);
+
+    void InsertTMMBRItem(const WebRtc_UWord32 senderSSRC,
+                         const RTCPUtility::RTCPPacketRTPFBTMMBRItem& TMMBRItem,
+                         const WebRtc_Word64 currentTimeMS);
+
+    // get
+    WebRtc_Word32 GetTMMBRSet(const WebRtc_UWord32 sourceIdx,
+                              const WebRtc_UWord32 targetIdx,
+                              TMMBRSet* candidateSet,
+                              const WebRtc_Word64 currentTimeMS);
+
+    WebRtc_Word64 lastTimeReceived;
+
+    // FIR
+    WebRtc_Word32 lastFIRSequenceNumber;
+    WebRtc_Word64 lastFIRRequest;
+
+    // TMMBN
+    TMMBRSet        TmmbnBoundingSet;
+
+    // TMMBR
+    TMMBRSet        TmmbrSet;
+
+    bool            readyForDelete;
+private:
+    std::vector<WebRtc_Word64> _tmmbrSetTimeouts;
+};
+
+} // end namespace RTCPHelp
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_RECEIVER_HELP_H_
diff --git a/src/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc b/src/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
new file mode 100644
index 0000000..2cb6d25
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_receiver_unittest.cc
@@ -0,0 +1,364 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the RTCPReceiver.
+ */
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+// Note: This file has no directory. Lint warning must be ignored.
+#include "common_types.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+
+namespace webrtc {
+
+namespace {  // Anonymous namespace; hide utility functions and classes.
+
+// A very simple packet builder class for building RTCP packets.
+class PacketBuilder {
+ public:
+  static const int kMaxPacketSize = 1024;
+
+  PacketBuilder()
+      : pos_(0),
+        pos_of_len_(0) {
+  }
+
+
+  void Add8(WebRtc_UWord8 byte) {
+    EXPECT_LT(pos_, kMaxPacketSize - 1);
+    buffer_[pos_] = byte;
+    ++ pos_;
+  }
+
+  void Add16(WebRtc_UWord16 word) {
+    Add8(word >> 8);
+    Add8(word & 0xFF);
+  }
+
+  void Add32(WebRtc_UWord32 word) {
+    Add8(word >> 24);
+    Add8((word >> 16) & 0xFF);
+    Add8((word >> 8) & 0xFF);
+    Add8(word & 0xFF);
+  }
+
+  void Add64(WebRtc_UWord32 upper_half, WebRtc_UWord32 lower_half) {
+    Add32(upper_half);
+    Add32(lower_half);
+  }
+
+  // Set the 5-bit value in the 1st byte of the header
+  // and the payload type. Set aside room for the length field,
+  // and make provision for backpatching it.
+  // Note: No way to set the padding bit.
+  void AddRtcpHeader(int payload, int format_or_count) {
+    PatchLengthField();
+    Add8(0x80 | (format_or_count & 0x1F));
+    Add8(payload);
+    pos_of_len_ = pos_;
+    Add16(0xDEAD);  // Initialize length to "clearly illegal".
+  }
+
+  void AddTmmbrBandwidth(int mantissa, int exponent, int overhead) {
+    // 6 bits exponent, 17 bits mantissa, 9 bits overhead.
+    WebRtc_UWord32 word = 0;
+    word |= (exponent << 26);
+    word |= ((mantissa & 0x1FFFF) << 9);
+    word |= (overhead & 0x1FF);
+    Add32(word);
+  }
+
+  void AddSrPacket(WebRtc_UWord32 sender_ssrc) {
+    AddRtcpHeader(200, 0);
+    Add32(sender_ssrc);
+    Add64(0x10203, 0x4050607);  // NTP timestamp
+    Add32(0x10203);  // RTP timestamp
+    Add32(0);  // Sender's packet count
+    Add32(0);  // Sender's octet count
+  }
+
+  const WebRtc_UWord8* packet() {
+    PatchLengthField();
+    return buffer_;
+  }
+
+  unsigned int length() {
+    return pos_;
+  }
+ private:
+  void PatchLengthField() {
+    if (pos_of_len_ > 0) {
+      // Backpatch the packet length. The client must have taken
+      // care of proper padding to 32-bit words.
+      int this_packet_length = (pos_ - pos_of_len_ - 2);
+      ASSERT_EQ(0, this_packet_length % 4)
+          << "Packets must be a multiple of 32 bits long"
+          << " pos " << pos_ << " pos_of_len " << pos_of_len_;
+      buffer_[pos_of_len_] = this_packet_length >> 10;
+      buffer_[pos_of_len_+1] = (this_packet_length >> 2) & 0xFF;
+      pos_of_len_ = 0;
+    }
+  }
+
+  int pos_;
+  // Where the length field of the current packet is.
+  // Note that 0 is not a legal value, so is used for "uninitialized".
+  int pos_of_len_;
+  WebRtc_UWord8 buffer_[kMaxPacketSize];
+};
+
+// Fake system clock, controllable to the millisecond.
+// The Epoch for this clock is Jan 1, 1970, as evidenced
+// by the NTP calculation.
+class FakeSystemClock : public RtpRtcpClock {
+ public:
+  FakeSystemClock()
+      : time_in_ms_(1335900000) {}  // A nonzero, but fake, value.
+
+  virtual WebRtc_Word64 GetTimeInMS() {
+    return time_in_ms_;
+  }
+
+  virtual void CurrentNTP(WebRtc_UWord32& secs,
+                          WebRtc_UWord32& frac) {
+    secs = (time_in_ms_ / 1000) + ModuleRTPUtility::NTP_JAN_1970;
+    // NTP_FRAC is 2^32 - number of ticks per second in the NTP fraction.
+    frac = (WebRtc_UWord32)((time_in_ms_ % 1000)
+                            * ModuleRTPUtility::NTP_FRAC / 1000);
+  }
+
+  void AdvanceClock(int ms_to_advance) {
+    time_in_ms_ += ms_to_advance;
+  }
+ private:
+  WebRtc_Word64 time_in_ms_;
+};
+
+
+// This test transport verifies that no functions get called.
+class TestTransport : public Transport,
+                      public RtpData {
+ public:
+  explicit TestTransport()
+      : rtcp_receiver_(NULL) {
+  }
+  void SetRTCPReceiver(RTCPReceiver* rtcp_receiver) {
+    rtcp_receiver_ = rtcp_receiver;
+  }
+  virtual int SendPacket(int /*ch*/, const void* /*data*/, int /*len*/) {
+    ADD_FAILURE();  // FAIL() gives a compile error.
+    return -1;
+  }
+
+  // Injects an RTCP packet into the receiver.
+  virtual int SendRTCPPacket(int /* ch */, const void *packet, int packet_len) {
+    ADD_FAILURE();
+    return 0;
+  }
+
+  virtual int OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadSize,
+                                    const WebRtcRTPHeader* rtpHeader) {
+    ADD_FAILURE();
+    return 0;
+  }
+  RTCPReceiver* rtcp_receiver_;
+};
+
+class RtcpReceiverTest : public ::testing::Test {
+ protected:
+  RtcpReceiverTest()
+      : over_use_detector_options_(),
+        remote_bitrate_observer_(),
+        remote_bitrate_estimator_(&remote_bitrate_observer_,
+                                  over_use_detector_options_) {
+    // system_clock_ = ModuleRTPUtility::GetSystemClock();
+    system_clock_ = new FakeSystemClock();
+    test_transport_ = new TestTransport();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = 0;
+    configuration.audio = false;
+    configuration.clock = system_clock_;
+    configuration.outgoing_transport = test_transport_;
+    configuration.remote_bitrate_estimator = &remote_bitrate_estimator_;
+    rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration);
+    rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_);
+    test_transport_->SetRTCPReceiver(rtcp_receiver_);
+  }
+  ~RtcpReceiverTest() {
+    delete rtcp_receiver_;
+    delete rtp_rtcp_impl_;
+    delete test_transport_;
+    delete system_clock_;
+  }
+
+  // Injects an RTCP packet into the receiver.
+  // Returns 0 for OK, non-0 for failure.
+  int InjectRtcpPacket(const WebRtc_UWord8* packet,
+                        WebRtc_UWord16 packet_len) {
+    RTCPUtility::RTCPParserV2 rtcpParser(packet,
+                                         packet_len,
+                                         true);  // Allow non-compound RTCP
+
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    int result = rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation,
+                                                    &rtcpParser);
+    rtcp_packet_info_ = rtcpPacketInformation;
+    return result;
+  }
+
+  OverUseDetectorOptions over_use_detector_options_;
+  FakeSystemClock* system_clock_;
+  ModuleRtpRtcpImpl* rtp_rtcp_impl_;
+  RTCPReceiver* rtcp_receiver_;
+  TestTransport* test_transport_;
+  RTCPHelp::RTCPPacketInformation rtcp_packet_info_;
+  MockRemoteBitrateObserver remote_bitrate_observer_;
+  RemoteBitrateEstimator remote_bitrate_estimator_;
+};
+
+
+TEST_F(RtcpReceiverTest, BrokenPacketIsIgnored) {
+  const WebRtc_UWord8 bad_packet[] = {0, 0, 0, 0};
+  EXPECT_EQ(0, InjectRtcpPacket(bad_packet, sizeof(bad_packet)));
+  EXPECT_EQ(0U, rtcp_packet_info_.rtcpPacketTypeFlags);
+}
+
+TEST_F(RtcpReceiverTest, InjectSrPacket) {
+  const WebRtc_UWord32 kSenderSsrc = 0x10203;
+  PacketBuilder p;
+  p.AddSrPacket(kSenderSsrc);
+  EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length()));
+  // The parser will note the remote SSRC on a SR from other than his
+  // expected peer, but will not flag that he's gotten a packet.
+  EXPECT_EQ(kSenderSsrc, rtcp_packet_info_.remoteSSRC);
+  EXPECT_EQ(0U,
+            kRtcpSr & rtcp_packet_info_.rtcpPacketTypeFlags);
+}
+
+TEST_F(RtcpReceiverTest, TmmbrReceivedWithNoIncomingPacket) {
+  // This call is expected to fail because no data has arrived.
+  EXPECT_EQ(-1, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+}
+
+TEST_F(RtcpReceiverTest, TmmbrPacketAccepted) {
+  const WebRtc_UWord32 kMediaFlowSsrc = 0x2040608;
+  const WebRtc_UWord32 kSenderSsrc = 0x10203;
+  const WebRtc_UWord32 kMediaRecipientSsrc = 0x101;
+  rtcp_receiver_->SetSSRC(kMediaFlowSsrc);  // Matches "media source" above.
+
+  PacketBuilder p;
+  p.AddSrPacket(kSenderSsrc);
+  // TMMBR packet.
+  p.AddRtcpHeader(205, 3);
+  p.Add32(kSenderSsrc);
+  p.Add32(kMediaRecipientSsrc);
+  p.Add32(kMediaFlowSsrc);
+  p.AddTmmbrBandwidth(30000, 0, 0);  // 30 Kbits/sec bandwidth, no overhead.
+
+  EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length()));
+  EXPECT_EQ(1, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+  TMMBRSet candidate_set;
+  candidate_set.VerifyAndAllocateSet(1);
+  EXPECT_EQ(1, rtcp_receiver_->TMMBRReceived(1, 0, &candidate_set));
+  EXPECT_LT(0U, candidate_set.Tmmbr(0));
+  EXPECT_EQ(kMediaRecipientSsrc, candidate_set.Ssrc(0));
+}
+
+TEST_F(RtcpReceiverTest, TmmbrPacketNotForUsIgnored) {
+  const WebRtc_UWord32 kMediaFlowSsrc = 0x2040608;
+  const WebRtc_UWord32 kSenderSsrc = 0x10203;
+  const WebRtc_UWord32 kMediaRecipientSsrc = 0x101;
+  const WebRtc_UWord32 kOtherMediaFlowSsrc = 0x9999;
+
+  PacketBuilder p;
+  p.AddSrPacket(kSenderSsrc);
+  // TMMBR packet.
+  p.AddRtcpHeader(205, 3);
+  p.Add32(kSenderSsrc);
+  p.Add32(kMediaRecipientSsrc);
+  p.Add32(kOtherMediaFlowSsrc);  // This SSRC is not what we're sending.
+  p.AddTmmbrBandwidth(30000, 0, 0);
+
+  rtcp_receiver_->SetSSRC(kMediaFlowSsrc);
+  EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length()));
+  EXPECT_EQ(0, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+}
+
+TEST_F(RtcpReceiverTest, TmmbrPacketZeroRateIgnored) {
+  const WebRtc_UWord32 kMediaFlowSsrc = 0x2040608;
+  const WebRtc_UWord32 kSenderSsrc = 0x10203;
+  const WebRtc_UWord32 kMediaRecipientSsrc = 0x101;
+  rtcp_receiver_->SetSSRC(kMediaFlowSsrc);  // Matches "media source" above.
+
+  PacketBuilder p;
+  p.AddSrPacket(kSenderSsrc);
+  // TMMBR packet.
+  p.AddRtcpHeader(205, 3);
+  p.Add32(kSenderSsrc);
+  p.Add32(kMediaRecipientSsrc);
+  p.Add32(kMediaFlowSsrc);
+  p.AddTmmbrBandwidth(0, 0, 0);  // Rate zero.
+
+  EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length()));
+  EXPECT_EQ(0, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+}
+
+TEST_F(RtcpReceiverTest, TmmbrThreeConstraintsTimeOut) {
+  const WebRtc_UWord32 kMediaFlowSsrc = 0x2040608;
+  const WebRtc_UWord32 kSenderSsrc = 0x10203;
+  const WebRtc_UWord32 kMediaRecipientSsrc = 0x101;
+  rtcp_receiver_->SetSSRC(kMediaFlowSsrc);  // Matches "media source" above.
+
+  // Inject 3 packets "from" kMediaRecipientSsrc, Ssrc+1, Ssrc+2.
+  // The times of arrival are starttime + 0, starttime + 5 and starttime + 10.
+  for (WebRtc_UWord32 ssrc = kMediaRecipientSsrc;
+       ssrc < kMediaRecipientSsrc+3; ++ssrc) {
+    PacketBuilder p;
+    p.AddSrPacket(kSenderSsrc);
+    // TMMBR packet.
+    p.AddRtcpHeader(205, 3);
+    p.Add32(kSenderSsrc);
+    p.Add32(ssrc);
+    p.Add32(kMediaFlowSsrc);
+    p.AddTmmbrBandwidth(30000, 0, 0);  // 30 Kbits/sec bandwidth, no overhead.
+
+    EXPECT_EQ(0, InjectRtcpPacket(p.packet(), p.length()));
+    system_clock_->AdvanceClock(5000);  // 5 seconds between each packet.
+  }
+  // It is now starttime+15.
+  EXPECT_EQ(3, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+  TMMBRSet candidate_set;
+  candidate_set.VerifyAndAllocateSet(3);
+  EXPECT_EQ(3, rtcp_receiver_->TMMBRReceived(3, 0, &candidate_set));
+  EXPECT_LT(0U, candidate_set.Tmmbr(0));
+  // We expect the timeout to be 25 seconds. Advance the clock by 12
+  // seconds, timing out the first packet.
+  system_clock_->AdvanceClock(12000);
+  // Odd behaviour: Just counting them does not trigger the timeout.
+  EXPECT_EQ(3, rtcp_receiver_->TMMBRReceived(0, 0, NULL));
+  // Odd behaviour: There's only one left after timeout, not 2.
+  EXPECT_EQ(1, rtcp_receiver_->TMMBRReceived(3, 0, &candidate_set));
+  EXPECT_EQ(kMediaRecipientSsrc + 2, candidate_set.Ssrc(0));
+}
+
+
+}  // Anonymous namespace
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_sender.cc b/src/modules/rtp_rtcp/source/rtcp_sender.cc
new file mode 100644
index 0000000..887c6d7
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_sender.cc
@@ -0,0 +1,2134 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_sender.h"
+
+#include <cassert>  // assert
+#include <cstdlib>  // rand
+#include <string.h>  // memcpy
+
+#include "common_types.h"
+#include "modules/remote_bitrate_estimator/remote_rate_control.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+using RTCPUtility::RTCPCnameInformation;
+
+RTCPSender::RTCPSender(const WebRtc_Word32 id,
+                       const bool audio,
+                       RtpRtcpClock* clock,
+                       ModuleRtpRtcpImpl* owner) :
+    _id(id),
+    _audio(audio),
+    _clock(*clock),
+    _method(kRtcpOff),
+    _rtpRtcp(*owner),
+    _criticalSectionTransport(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbTransport(NULL),
+
+    _criticalSectionRTCPSender(CriticalSectionWrapper::CreateCriticalSection()),
+    _usingNack(false),
+    _sending(false),
+    _sendTMMBN(false),
+    _REMB(false),
+    _sendREMB(false),
+    _TMMBR(false),
+    _IJ(false),
+    _nextTimeToSendRTCP(0),
+    _SSRC(0),
+    _remoteSSRC(0),
+    _CNAME(),
+    _reportBlocks(),
+    _csrcCNAMEs(),
+
+    _cameraDelayMS(0),
+
+    _lastSendReport(),
+    _lastRTCPTime(),
+
+    _CSRCs(0),
+    _CSRC(),
+    _includeCSRCs(true),
+
+    _sequenceNumberFIR(0),
+
+    _lengthRembSSRC(0),
+    _sizeRembSSRC(0),
+    _rembSSRC(NULL),
+    _rembBitrate(0),
+
+    _tmmbrHelp(),
+    _tmmbr_Send(0),
+    _packetOH_Send(0),
+
+    _appSend(false),
+    _appSubType(0),
+    _appName(),
+    _appData(NULL),
+    _appLength(0),
+    _xrSendVoIPMetric(false),
+    _xrVoIPMetric()
+{
+    memset(_CNAME, 0, sizeof(_CNAME));
+    memset(_lastSendReport, 0, sizeof(_lastSendReport));
+    memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTCPSender::~RTCPSender() {
+  delete [] _rembSSRC;
+  delete [] _appData;
+
+  while (!_reportBlocks.empty()) {
+    std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+        _reportBlocks.begin();
+    delete it->second;
+    _reportBlocks.erase(it);
+  }
+  while (!_csrcCNAMEs.empty()) {
+    std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+        _csrcCNAMEs.begin();
+    delete it->second;
+    _csrcCNAMEs.erase(it);
+  }
+  delete _criticalSectionTransport;
+  delete _criticalSectionRTCPSender;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32
+RTCPSender::Init()
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    _method = kRtcpOff;
+    _cbTransport = NULL;
+    _usingNack = false;
+    _sending = false;
+    _sendTMMBN = false;
+    _TMMBR = false;
+    _IJ = false;
+    _REMB = false;
+    _sendREMB = false;
+    _SSRC = 0;
+    _remoteSSRC = 0;
+    _cameraDelayMS = 0;
+    _sequenceNumberFIR = 0;
+    _tmmbr_Send = 0;
+    _packetOH_Send = 0;
+    //_remoteRateControl.Reset();
+    _nextTimeToSendRTCP = 0;
+    _CSRCs = 0;
+    _appSend = false;
+    _appSubType = 0;
+
+    if(_appData)
+    {
+        delete [] _appData;
+        _appData = NULL;
+    }
+    _appLength = 0;
+
+    _xrSendVoIPMetric = false;
+
+    memset(&_xrVoIPMetric, 0, sizeof(_xrVoIPMetric));
+    memset(_CNAME, 0, sizeof(_CNAME));
+    memset(_lastSendReport, 0, sizeof(_lastSendReport));
+    memset(_lastRTCPTime, 0, sizeof(_lastRTCPTime));
+    return 0;
+}
+
+void
+RTCPSender::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+}
+
+WebRtc_Word32
+RTCPSender::RegisterSendTransport(Transport* outgoingTransport)
+{
+    CriticalSectionScoped lock(_criticalSectionTransport);
+    _cbTransport = outgoingTransport;
+    return 0;
+}
+
+RTCPMethod
+RTCPSender::Status() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _method;
+}
+
+WebRtc_Word32
+RTCPSender::SetRTCPStatus(const RTCPMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    if(method != kRtcpOff)
+    {
+        if(_audio)
+        {
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + (RTCP_INTERVAL_AUDIO_MS/2);
+        } else
+        {
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + (RTCP_INTERVAL_VIDEO_MS/2);
+        }
+    }
+    _method = method;
+    return 0;
+}
+
+bool
+RTCPSender::Sending() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _sending;
+}
+
+WebRtc_Word32
+RTCPSender::SetSendingStatus(const bool sending)
+{
+    bool sendRTCPBye = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+        if(_method != kRtcpOff)
+        {
+            if(sending == false && _sending == true)
+            {
+                // Trigger RTCP bye
+                sendRTCPBye = true;
+            }
+        }
+        _sending = sending;
+    }
+    if(sendRTCPBye)
+    {
+        return SendRTCP(kRtcpBye);
+    }
+    return 0;
+}
+
+bool
+RTCPSender::REMB() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _REMB;
+}
+
+WebRtc_Word32
+RTCPSender::SetREMBStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _REMB = enable;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetREMBData(const WebRtc_UWord32 bitrate,
+                        const WebRtc_UWord8 numberOfSSRC,
+                        const WebRtc_UWord32* SSRC)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _rembBitrate = bitrate;
+ 
+    if(_sizeRembSSRC < numberOfSSRC)
+    {
+        delete [] _rembSSRC;
+        _rembSSRC = new WebRtc_UWord32[numberOfSSRC];
+        _sizeRembSSRC = numberOfSSRC;
+    } 
+
+    _lengthRembSSRC = numberOfSSRC;
+    for (int i = 0; i < numberOfSSRC; i++)
+    {  
+        _rembSSRC[i] = SSRC[i];
+    }
+    _sendREMB = true;
+    return 0;
+}
+
+bool
+RTCPSender::TMMBR() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _TMMBR;
+}
+
+WebRtc_Word32
+RTCPSender::SetTMMBRStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _TMMBR = enable;
+    return 0;
+}
+
+bool
+RTCPSender::IJ() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    return _IJ;
+}
+
+WebRtc_Word32
+RTCPSender::SetIJStatus(const bool enable)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _IJ = enable;
+    return 0;
+}
+
+void
+RTCPSender::SetSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_SSRC != 0)
+    {
+        // not first SetSSRC, probably due to a collision
+        // schedule a new RTCP report
+        // make sure that we send a RTP packet
+        _nextTimeToSendRTCP = _clock.GetTimeInMS() + 100;
+    }
+    _SSRC = ssrc;
+}
+
+WebRtc_Word32
+RTCPSender::SetRemoteSSRC( const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _remoteSSRC = ssrc;
+    //_remoteRateControl.Reset();
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetCameraDelay(const WebRtc_Word32 delayMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    if(delayMS > 1000 || delayMS < -1000)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument, delay can't be larger than 1 sec", __FUNCTION__);
+        return -1;
+    }
+    _cameraDelayMS = delayMS;
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::CNAME(char cName[RTCP_CNAME_SIZE]) {
+  assert(cName);
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  cName[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(cName, _CNAME, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::SetCNAME(const char cName[RTCP_CNAME_SIZE]) {
+  if (!cName)
+    return -1;
+
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  _CNAME[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(_CNAME, cName, RTCP_CNAME_SIZE - 1);
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                        const char cName[RTCP_CNAME_SIZE]) {
+  assert(cName);
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  if (_csrcCNAMEs.size() >= kRtpCsrcSize) {
+    return -1;
+  }
+  RTCPCnameInformation* ptr = new RTCPCnameInformation();
+  ptr->name[RTCP_CNAME_SIZE - 1] = 0;
+  strncpy(ptr->name, cName, RTCP_CNAME_SIZE - 1);
+  _csrcCNAMEs[SSRC] = ptr;
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::RemoveMixedCNAME(const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+  std::map<WebRtc_UWord32, RTCPCnameInformation*>::iterator it =
+      _csrcCNAMEs.find(SSRC);
+
+  if (it == _csrcCNAMEs.end()) {
+    return -1;
+  }
+  delete it->second;
+  _csrcCNAMEs.erase(it);
+  return 0;
+}
+
+bool
+RTCPSender::TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP) const
+{
+/*
+    For audio we use a fix 5 sec interval
+
+    For video we use 1 sec interval fo a BW smaller than 360 kbit/s,
+        technicaly we break the max 5% RTCP BW for video below 10 kbit/s but that should be extreamly rare
+
+
+From RFC 3550
+
+    MAX RTCP BW is 5% if the session BW
+        A send report is approximately 65 bytes inc CNAME
+        A report report is approximately 28 bytes
+
+    The RECOMMENDED value for the reduced minimum in seconds is 360
+      divided by the session bandwidth in kilobits/second.  This minimum
+      is smaller than 5 seconds for bandwidths greater than 72 kb/s.
+
+    If the participant has not yet sent an RTCP packet (the variable
+      initial is true), the constant Tmin is set to 2.5 seconds, else it
+      is set to 5 seconds.
+
+    The interval between RTCP packets is varied randomly over the
+      range [0.5,1.5] times the calculated interval to avoid unintended
+      synchronization of all participants
+
+    if we send
+    If the participant is a sender (we_sent true), the constant C is
+      set to the average RTCP packet size (avg_rtcp_size) divided by 25%
+      of the RTCP bandwidth (rtcp_bw), and the constant n is set to the
+      number of senders.
+
+    if we receive only
+      If we_sent is not true, the constant C is set
+      to the average RTCP packet size divided by 75% of the RTCP
+      bandwidth.  The constant n is set to the number of receivers
+      (members - senders).  If the number of senders is greater than
+      25%, senders and receivers are treated together.
+
+    reconsideration NOT required for peer-to-peer
+      "timer reconsideration" is
+      employed.  This algorithm implements a simple back-off mechanism
+      which causes users to hold back RTCP packet transmission if the
+      group sizes are increasing.
+
+      n = number of members
+      C = avg_size/(rtcpBW/4)
+
+   3. The deterministic calculated interval Td is set to max(Tmin, n*C).
+
+   4. The calculated interval T is set to a number uniformly distributed
+      between 0.5 and 1.5 times the deterministic calculated interval.
+
+   5. The resulting value of T is divided by e-3/2=1.21828 to compensate
+      for the fact that the timer reconsideration algorithm converges to
+      a value of the RTCP bandwidth below the intended average
+*/
+
+    WebRtc_Word64 now = _clock.GetTimeInMS();
+
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_method == kRtcpOff)
+    {
+        return false;
+    }
+
+    if(!_audio && sendKeyframeBeforeRTP)
+    {
+        // for video key-frames we want to send the RTCP before the large key-frame
+        // if we have a 100 ms margin
+        now += RTCP_SEND_BEFORE_KEY_FRAME_MS;
+    }
+
+    if(now > _nextTimeToSendRTCP)
+    {
+        return true;
+
+    } else if(now < 0x0000ffff && _nextTimeToSendRTCP > 0xffff0000) // 65 sec margin
+    {
+        // wrap
+        return true;
+    }
+    return false;
+}
+
+WebRtc_UWord32
+RTCPSender::LastSendReport( WebRtc_UWord32& lastRTCPTime)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    lastRTCPTime = _lastRTCPTime[0];
+    return _lastSendReport[0];
+}
+
+WebRtc_UWord32
+RTCPSender::SendTimeOfSendReport(const WebRtc_UWord32 sendReport)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    // This is only saved when we are the sender
+    if((_lastSendReport[0] == 0) || (sendReport == 0))
+    {
+        return 0; // will be ignored
+    } else
+    {
+        for(int i = 0; i < RTCP_NUMBER_OF_SR; ++i)
+        {
+            if( _lastSendReport[i] == sendReport)
+            {
+                return _lastRTCPTime[i];
+            }
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::AddReportBlock(const WebRtc_UWord32 SSRC,
+                                         const RTCPReportBlock* reportBlock) {
+  if (reportBlock == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+  if (_reportBlocks.size() >= RTCP_MAX_REPORT_BLOCKS) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  RTCPReportBlock* copyReportBlock = new RTCPReportBlock();
+  memcpy(copyReportBlock, reportBlock, sizeof(RTCPReportBlock));
+  _reportBlocks[SSRC] = copyReportBlock;
+  return 0;
+}
+
+WebRtc_Word32 RTCPSender::RemoveReportBlock(const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+  std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+      _reportBlocks.find(SSRC);
+
+  if (it == _reportBlocks.end()) {
+    return -1;
+  }
+  delete it->second;
+  _reportBlocks.erase(it);
+  return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildSR(WebRtc_UWord8* rtcpbuffer,
+                    WebRtc_UWord32& pos,
+                    const WebRtc_UWord32 NTPsec,
+                    const WebRtc_UWord32 NTPfrac,
+                    const RTCPReportBlock* received)
+{
+    // sanity
+    if(pos + 52 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    WebRtc_UWord32 RTPtime;
+    WebRtc_UWord32 BackTimedNTPsec;
+    WebRtc_UWord32 BackTimedNTPfrac;
+
+    WebRtc_UWord32 posNumberOfReportBlocks = pos;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+
+    // Sender report
+    rtcpbuffer[pos++]=(WebRtc_UWord8)200;
+
+    for(int i = (RTCP_NUMBER_OF_SR-2); i >= 0; i--)
+    {
+        // shift old
+        _lastSendReport[i+1] = _lastSendReport[i];
+        _lastRTCPTime[i+1] =_lastRTCPTime[i];
+    }
+
+    _lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac); // before video cam compensation
+
+    if(_cameraDelayMS >= 0)
+    {
+        // fraction of a second as an unsigned word32 4.294 967 296E9
+        WebRtc_UWord32 cameraDelayFixFrac =  (WebRtc_UWord32)_cameraDelayMS* 4294967; // note camera delay can't be larger than +/-1000ms
+        if(NTPfrac > cameraDelayFixFrac)
+        {
+            // no problem just reduce the fraction part
+            BackTimedNTPfrac = NTPfrac - cameraDelayFixFrac;
+            BackTimedNTPsec = NTPsec;
+        } else
+        {
+            // we need to reduce the sec and add that sec to the frac
+            BackTimedNTPsec = NTPsec - 1;
+            BackTimedNTPfrac = 0xffffffff - (cameraDelayFixFrac - NTPfrac);
+        }
+    } else
+    {
+        // fraction of a second as an unsigned word32 4.294 967 296E9
+        WebRtc_UWord32 cameraDelayFixFrac =  (WebRtc_UWord32)(-_cameraDelayMS)* 4294967; // note camera delay can't be larger than +/-1000ms
+        if(NTPfrac > 0xffffffff - cameraDelayFixFrac)
+        {
+            // we need to add the sec and add that sec to the frac
+            BackTimedNTPsec = NTPsec + 1;
+            BackTimedNTPfrac = cameraDelayFixFrac + NTPfrac; // this will wrap but that is ok
+        } else
+        {
+            // no problem just add the fraction part
+            BackTimedNTPsec = NTPsec;
+            BackTimedNTPfrac = NTPfrac + cameraDelayFixFrac;
+        }
+    }
+    _lastSendReport[0] = (BackTimedNTPsec <<16) + (BackTimedNTPfrac >> 16);
+
+    // RTP timestamp
+    // This should have a ramdom start value added
+    // RTP is counted from NTP not the acctual RTP
+    // This reflects the perfect RTP time
+    // we solve this by initiating RTP to our NTP :)
+
+    WebRtc_UWord32 freqHz = 90000; // For video
+    if(_audio)
+    {
+        freqHz =  _rtpRtcp.CurrentSendFrequencyHz();
+        RTPtime = ModuleRTPUtility::GetCurrentRTP(&_clock, freqHz);
+    }
+    else // video 
+    {
+        // used to be (WebRtc_UWord32)(((float)BackTimedNTPfrac/(float)FRAC)* 90000)
+        WebRtc_UWord32 tmp = 9*(BackTimedNTPfrac/429496);
+        RTPtime = BackTimedNTPsec*freqHz + tmp;
+    }
+
+    
+    
+
+    // Add sender data
+    // Save  for our length field
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+    // NTP
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, BackTimedNTPsec);
+    pos += 4;
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, BackTimedNTPfrac);
+    pos += 4;
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, RTPtime);
+    pos += 4;
+
+    //sender's packet count
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rtpRtcp.PacketCountSent());
+    pos += 4;
+
+    //sender's octet count
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rtpRtcp.ByteCountSent());
+    pos += 4;
+
+    WebRtc_UWord8 numberOfReportBlocks = 0;
+    WebRtc_Word32 retVal = AddReportBlocks(rtcpbuffer, pos, numberOfReportBlocks, received, NTPsec, NTPfrac);
+    if(retVal < 0)
+    {
+        //
+        return retVal ;
+    }
+    rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
+
+    WebRtc_UWord16 len = WebRtc_UWord16((pos/4) -1);
+    ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+    return 0;
+}
+
+
+WebRtc_Word32 RTCPSender::BuildSDEC(WebRtc_UWord8* rtcpbuffer,
+                                    WebRtc_UWord32& pos) {
+  size_t lengthCname = strlen(_CNAME);
+  assert(lengthCname < RTCP_CNAME_SIZE);
+
+  // sanity
+  if(pos + 12 + lengthCname  >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -2;
+  }
+  // SDEC Source Description
+
+  // We always need to add SDES CNAME
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(0x80 + 1 + _csrcCNAMEs.size());
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(202);
+
+  // handle SDES length later on
+  WebRtc_UWord32 SDESLengthPos = pos;
+  pos++;
+  pos++;
+
+  // Add our own SSRC
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+  pos += 4;
+
+  // CNAME = 1
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(1);
+
+  //
+  rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(lengthCname);
+
+  WebRtc_UWord16 SDESLength = 10;
+
+  memcpy(&rtcpbuffer[pos], _CNAME, lengthCname);
+  pos += lengthCname;
+  SDESLength += (WebRtc_UWord16)lengthCname;
+
+  WebRtc_UWord16 padding = 0;
+  // We must have a zero field even if we have an even multiple of 4 bytes
+  if ((pos % 4) == 0) {
+    padding++;
+    rtcpbuffer[pos++]=0;
+  }
+  while ((pos % 4) != 0) {
+    padding++;
+    rtcpbuffer[pos++]=0;
+  }
+  SDESLength += padding;
+
+  std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*>::iterator it =
+      _csrcCNAMEs.begin();
+
+  for(; it != _csrcCNAMEs.end(); it++) {
+    RTCPCnameInformation* cname = it->second;
+    WebRtc_UWord32 SSRC = it->first;
+
+    // Add SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, SSRC);
+    pos += 4;
+
+    // CNAME = 1
+    rtcpbuffer[pos++] = static_cast<WebRtc_UWord8>(1);
+
+    size_t length = strlen(cname->name);
+    assert(length < RTCP_CNAME_SIZE);
+
+    rtcpbuffer[pos++]= static_cast<WebRtc_UWord8>(length);
+    SDESLength += 6;
+
+    memcpy(&rtcpbuffer[pos],cname->name, length);
+
+    pos += length;
+    SDESLength += length;
+    WebRtc_UWord16 padding = 0;
+
+    // We must have a zero field even if we have an even multiple of 4 bytes
+    if((pos % 4) == 0){
+      padding++;
+      rtcpbuffer[pos++]=0;
+    }
+    while((pos % 4) != 0){
+      padding++;
+      rtcpbuffer[pos++] = 0;
+    }
+    SDESLength += padding;
+  }
+  // in 32-bit words minus one and we don't count the header
+  WebRtc_UWord16 buffer_length = (SDESLength / 4) - 1;
+  ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer + SDESLengthPos,
+                                          buffer_length);
+  return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildRR(WebRtc_UWord8* rtcpbuffer,
+                    WebRtc_UWord32& pos,
+                    const WebRtc_UWord32 NTPsec,
+                    const WebRtc_UWord32 NTPfrac,
+                    const RTCPReportBlock* received)
+{
+    // sanity one block
+    if(pos + 32 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    WebRtc_UWord32 posNumberOfReportBlocks = pos;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)201;
+
+    // Save  for our length field
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    WebRtc_UWord8 numberOfReportBlocks = 0;
+    WebRtc_Word32 retVal = AddReportBlocks(rtcpbuffer, pos, numberOfReportBlocks, received, NTPsec, NTPfrac);
+    if(retVal < 0)
+    {
+        return retVal;
+    }
+    rtcpbuffer[posNumberOfReportBlocks] += numberOfReportBlocks;
+
+    WebRtc_UWord16 len = WebRtc_UWord16((pos)/4 -1);
+    ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+2, len);
+    return 0;
+}
+
+// From RFC 5450: Transmission Time Offsets in RTP Streams.
+//        0                   1                   2                   3
+//        0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//   hdr |V=2|P|    RC   |   PT=IJ=195   |             length            |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//       |                      inter-arrival jitter                     |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//       .                                                               .
+//       .                                                               .
+//       .                                                               .
+//       |                      inter-arrival jitter                     |
+//       +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+//
+//  If present, this RTCP packet must be placed after a receiver report
+//  (inside a compound RTCP packet), and MUST have the same value for RC
+//  (reception report count) as the receiver report.
+
+WebRtc_Word32
+RTCPSender::BuildExtendedJitterReport(
+    WebRtc_UWord8* rtcpbuffer,
+    WebRtc_UWord32& pos,
+    const WebRtc_UWord32 jitterTransmissionTimeOffset)
+{
+    if (_reportBlocks.size() > 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "Not implemented.");
+        return 0;
+    }
+
+    // sanity
+    if(pos + 8 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add picture loss indicator
+    WebRtc_UWord8 RC = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + RC;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)195;
+
+    // Used fixed length of 2
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(1);
+
+    // Add inter-arrival jitter
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos,
+                                            jitterTransmissionTimeOffset);
+    pos += 4;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildPLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 12 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add picture loss indicator
+    WebRtc_UWord8 FMT = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    //Used fixed length of 2
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(2);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+    return 0;
+}
+
+WebRtc_Word32 RTCPSender::BuildFIR(WebRtc_UWord8* rtcpbuffer,
+                                   WebRtc_UWord32& pos,
+                                   bool repeat) {
+  // sanity
+  if(pos + 20 >= IP_PACKET_SIZE)  {
+    return -2;
+  }
+  if (!repeat) {
+    _sequenceNumberFIR++;   // do not increase if repetition
+  }
+
+  // add full intra request indicator
+  WebRtc_UWord8 FMT = 4;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0x80 + FMT;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)206;
+
+  //Length of 4
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)(4);
+
+  // Add our own SSRC
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _SSRC);
+  pos += 4;
+
+  // RFC 5104     4.3.1.2.  Semantics
+  // SSRC of media source
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+
+  // Additional Feedback Control Information (FCI)
+  ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer + pos, _remoteSSRC);
+  pos += 4;
+
+  rtcpbuffer[pos++] = (WebRtc_UWord8)(_sequenceNumberFIR);
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  rtcpbuffer[pos++] = (WebRtc_UWord8)0;
+  return 0;
+}
+
+/*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |            First        |        Number           | PictureID |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+WebRtc_Word32
+RTCPSender::BuildSLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos, const WebRtc_UWord8 pictureID)
+{
+    // sanity
+    if(pos + 16 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add slice loss indicator
+    WebRtc_UWord8 FMT = 2;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    //Used fixed length of 3
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(3);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // Add first, number & picture ID 6 bits
+    // first  = 0, 13 - bits
+    // number = 0x1fff, 13 - bits only ones for now
+    WebRtc_UWord32 sliField = (0x1fff << 6)+ (0x3f & pictureID);
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, sliField);
+    pos += 4;
+    return 0;
+}
+
+/*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |      PB       |0| Payload Type|    Native RPSI bit string     |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   defined per codec          ...                | Padding (0) |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+/*
+*    Note: not generic made for VP8
+*/
+WebRtc_Word32
+RTCPSender::BuildRPSI(WebRtc_UWord8* rtcpbuffer,
+                     WebRtc_UWord32& pos,
+                     const WebRtc_UWord64 pictureID,
+                     const WebRtc_UWord8 payloadType)
+{
+    // sanity
+    if(pos + 24 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add Reference Picture Selection Indication
+    WebRtc_UWord8 FMT = 3;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    // calc length
+    WebRtc_UWord32 bitsRequired = 7;
+    WebRtc_UWord8 bytesRequired = 1;
+    while((pictureID>>bitsRequired) > 0)
+    {
+        bitsRequired += 7;
+        bytesRequired++;
+    }
+
+    WebRtc_UWord8 size = 3;
+    if(bytesRequired > 6)
+    {
+        size = 5;
+    } else if(bytesRequired > 2)
+    {
+        size = 4;
+    }
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=size;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // calc padding length
+    WebRtc_UWord8 paddingBytes = 4-((2+bytesRequired)%4);
+    if(paddingBytes == 4)
+    {
+        paddingBytes = 0;
+    }
+    // add padding length in bits
+    rtcpbuffer[pos] = paddingBytes*8; // padding can be 0, 8, 16 or 24
+    pos++;
+
+    // add payload type
+    rtcpbuffer[pos] = payloadType;
+    pos++;
+
+    // add picture ID
+    for(int i = bytesRequired-1; i > 0; i--)
+    {
+        rtcpbuffer[pos] = 0x80 | WebRtc_UWord8(pictureID >> (i*7));
+        pos++;
+    }
+    // add last byte of picture ID
+    rtcpbuffer[pos] = WebRtc_UWord8(pictureID & 0x7f);
+    pos++;
+
+    // add padding
+    for(int j = 0; j <paddingBytes; j++)
+    {
+        rtcpbuffer[pos] = 0;
+        pos++;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildREMB(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 20 + 4 * _lengthRembSSRC >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    // add application layer feedback
+    WebRtc_UWord8 FMT = 15;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)206;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=_lengthRembSSRC + 4;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Remote SSRC must be 0
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, 0);
+    pos += 4;
+
+    rtcpbuffer[pos++]='R';
+    rtcpbuffer[pos++]='E';
+    rtcpbuffer[pos++]='M';
+    rtcpbuffer[pos++]='B';
+
+    rtcpbuffer[pos++] = _lengthRembSSRC;
+    // 6 bit Exp
+    // 18 bit mantissa
+    WebRtc_UWord8 brExp = 0;
+    for(WebRtc_UWord32 i=0; i<64; i++)
+    {
+        if(_rembBitrate <= ((WebRtc_UWord32)262143 << i))
+        {
+            brExp = i;
+            break;
+        }
+    }
+    const WebRtc_UWord32 brMantissa = (_rembBitrate >> brExp);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)((brExp << 2) + ((brMantissa >> 16) & 0x03));
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(brMantissa >> 8);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(brMantissa);
+
+    for (int i = 0; i < _lengthRembSSRC; i++) 
+    { 
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _rembSSRC[i]);
+        pos += 4;
+    }
+    return 0;
+}
+
+void
+RTCPSender::SetTargetBitrate(unsigned int target_bitrate)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    _tmmbr_Send = target_bitrate / 1000;
+}
+
+WebRtc_Word32
+RTCPSender::BuildTMMBR(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // Before sending the TMMBR check the received TMMBN, only an owner is allowed to raise the bitrate
+    // If the sender is an owner of the TMMBN -> send TMMBR
+    // If not an owner but the TMMBR would enter the TMMBN -> send TMMBR
+
+    // get current bounding set from RTCP receiver
+    bool tmmbrOwner = false;
+    // store in candidateSet, allocates one extra slot
+    TMMBRSet* candidateSet = _tmmbrHelp.CandidateSet();
+
+    // holding _criticalSectionRTCPSender while calling RTCPreceiver which
+    // will accuire _criticalSectionRTCPReceiver is a potental deadlock but
+    // since RTCPreceiver is not doing the reverse we should be fine
+    WebRtc_Word32 lengthOfBoundingSet
+        = _rtpRtcp.BoundingSet(tmmbrOwner, candidateSet);
+
+    if(lengthOfBoundingSet > 0)
+    {
+        for (WebRtc_Word32 i = 0; i < lengthOfBoundingSet; i++)
+        {
+            if( candidateSet->Tmmbr(i) == _tmmbr_Send &&
+                candidateSet->PacketOH(i) == _packetOH_Send)
+            {
+                // do not send the same tuple
+                return 0;
+            }
+        }
+        if(!tmmbrOwner)
+        {
+            // use received bounding set as candidate set
+            // add current tuple
+            candidateSet->SetEntry(lengthOfBoundingSet,
+                                   _tmmbr_Send,
+                                   _packetOH_Send,
+                                   _SSRC);
+            int numCandidates = lengthOfBoundingSet+ 1;
+
+            // find bounding set
+            TMMBRSet* boundingSet = NULL;
+            int numBoundingSet = _tmmbrHelp.FindTMMBRBoundingSet(boundingSet);
+            if(numBoundingSet > 0 || numBoundingSet <= numCandidates)
+            {
+                tmmbrOwner = _tmmbrHelp.IsOwner(_SSRC, numBoundingSet);
+            }
+            if(!tmmbrOwner)
+            {
+                // did not enter bounding set, no meaning to send this request
+                return 0;
+            }
+        }
+    }
+
+    if(_tmmbr_Send)
+    {
+        // sanity
+        if(pos + 20 >= IP_PACKET_SIZE)
+        {
+            return -2;
+        }
+        // add TMMBR indicator
+        WebRtc_UWord8 FMT = 3;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+        //Length of 4
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(4);
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+
+        // RFC 5104     4.2.1.2.  Semantics
+
+        // SSRC of media source
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+
+        // Additional Feedback Control Information (FCI)
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+        pos += 4;
+
+        WebRtc_UWord32 bitRate = _tmmbr_Send*1000;
+        WebRtc_UWord32 mmbrExp = 0;
+        for(WebRtc_UWord32 i=0;i<64;i++)
+        {
+            if(bitRate <= ((WebRtc_UWord32)131071 << i))
+            {
+                mmbrExp = i;
+                break;
+            }
+        }
+        WebRtc_UWord32 mmbrMantissa = (bitRate >> mmbrExp);
+
+        rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrExp << 2) + ((mmbrMantissa >> 15) & 0x03));
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(mmbrMantissa >> 7);
+        rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrMantissa << 1) + ((_packetOH_Send >> 8)& 0x01));
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(_packetOH_Send);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildTMMBN(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    TMMBRSet* boundingSet = _tmmbrHelp.BoundingSetToSend();
+    if(boundingSet == NULL)
+    {
+        return -1;
+    }
+    // sanity
+    if(pos + 12 + boundingSet->lengthOfSet()*8 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    WebRtc_UWord8 FMT = 4;
+    // add TMMBN indicator
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+    //Add length later
+    int posLength = pos;
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // RFC 5104     4.2.2.2.  Semantics
+
+    // SSRC of media source
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+
+    // Additional Feedback Control Information (FCI)
+    int numBoundingSet = 0;
+    for(WebRtc_UWord32 n=0; n< boundingSet->lengthOfSet(); n++)
+    {
+        if (boundingSet->Tmmbr(n) > 0)
+        {
+            WebRtc_UWord32 tmmbrSSRC = boundingSet->Ssrc(n);
+            ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, tmmbrSSRC);
+            pos += 4;
+
+            WebRtc_UWord32 bitRate = boundingSet->Tmmbr(n) * 1000;
+            WebRtc_UWord32 mmbrExp = 0;
+            for(int i=0; i<64; i++)
+            {
+                if(bitRate <=  ((WebRtc_UWord32)131071 << i))
+                {
+                    mmbrExp = i;
+                    break;
+                }
+            }
+            WebRtc_UWord32 mmbrMantissa = (bitRate >> mmbrExp);
+            WebRtc_UWord32 measuredOH = boundingSet->PacketOH(n);
+
+            rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrExp << 2) + ((mmbrMantissa >> 15) & 0x03));
+            rtcpbuffer[pos++]=(WebRtc_UWord8)(mmbrMantissa >> 7);
+            rtcpbuffer[pos++]=(WebRtc_UWord8)((mmbrMantissa << 1) + ((measuredOH >> 8)& 0x01));
+            rtcpbuffer[pos++]=(WebRtc_UWord8)(measuredOH);
+            numBoundingSet++;
+        }
+    }
+    WebRtc_UWord16 length= (WebRtc_UWord16)(2+2*numBoundingSet);
+    rtcpbuffer[posLength++]=(WebRtc_UWord8)(length>>8);
+    rtcpbuffer[posLength]=(WebRtc_UWord8)(length);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildAPP(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(_appData == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+        return -1;
+    }
+    if(pos + 12 + _appLength >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + _appSubType;
+
+    // Add APP ID
+    rtcpbuffer[pos++]=(WebRtc_UWord8)204;
+
+    WebRtc_UWord16 length = (_appLength>>2) + 2; // include SSRC and name
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(length>>8);
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(length);
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add our application name
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _appName);
+    pos += 4;
+
+    // Add the data
+    memcpy(rtcpbuffer +pos, _appData,_appLength);
+    pos += _appLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildNACK(WebRtc_UWord8* rtcpbuffer,
+                      WebRtc_UWord32& pos,
+                      const WebRtc_Word32 nackSize,
+                      const WebRtc_UWord16* nackList)
+{
+    // sanity
+    if(pos + 16 >= IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -2;
+    }
+
+    // int size, WebRtc_UWord16* nackList
+    // add nack list
+    WebRtc_UWord8 FMT = 1;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + FMT;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)205;
+
+    rtcpbuffer[pos++]=(WebRtc_UWord8) 0;
+    int nackSizePos = pos;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)(3); //setting it to one kNACK signal as default
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // add the list
+    int i = 0;
+    int numOfNackFields = 0;
+    while(nackSize > i && numOfNackFields < 253)
+    {
+        WebRtc_UWord16 nack = nackList[i];
+        // put dow our sequence number
+        ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+pos, nack);
+        pos += 2;
+
+        i++;
+        numOfNackFields++;
+        if(nackSize > i)
+        {
+            bool moreThan16Away = (WebRtc_UWord16(nack+16) < nackList[i])?true: false;
+            if(!moreThan16Away)
+            {
+                // check for a wrap
+                if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                {
+                    // wrap
+                    moreThan16Away = true;
+                }
+            }
+            if(moreThan16Away)
+            {
+                // next is more than 16 away
+                rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+                rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+            } else
+            {
+                // build our bitmask
+                WebRtc_UWord16 bitmask = 0;
+
+                bool within16Away = (WebRtc_UWord16(nack+16) > nackList[i])?true: false;
+                if(within16Away)
+                {
+                   // check for a wrap
+                    if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                    {
+                        // wrap
+                        within16Away = false;
+                    }
+                }
+
+                while( nackSize > i && within16Away)
+                {
+                    WebRtc_Word16 shift = (nackList[i]-nack)-1;
+                    assert(!(shift > 15) && !(shift < 0));
+
+                    bitmask += (1<< shift);
+                    i++;
+                    if(nackSize > i)
+                    {
+                        within16Away = (WebRtc_UWord16(nack+16) > nackList[i])?true: false;
+                        if(within16Away)
+                        {
+                            // check for a wrap
+                            if(WebRtc_UWord16(nack+16) > 0xff00 && nackList[i] < 0x0fff)
+                            {
+                                // wrap
+                                within16Away = false;
+                            }
+                        }
+                    }
+                }
+                ModuleRTPUtility::AssignUWord16ToBuffer(rtcpbuffer+pos, bitmask);
+                pos += 2;
+            }
+            // sanity do we have room from one more 4 byte block?
+            if(pos + 4 >= IP_PACKET_SIZE)
+            {
+                return -2;
+            }
+        } else
+        {
+            // no more in the list
+            rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+            rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        }
+    }
+    rtcpbuffer[nackSizePos]=(WebRtc_UWord8)(2+numOfNackFields);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildBYE(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 8 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+    if(_includeCSRCs)
+    {
+        // Add a bye packet
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + 1 + _CSRCs;  // number of SSRC+CSRCs
+        rtcpbuffer[pos++]=(WebRtc_UWord8)203;
+
+        // length
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)(1 + _CSRCs);
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+
+        // add CSRCs
+        for(int i = 0; i < _CSRCs; i++)
+        {
+            ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _CSRC[i]);
+            pos += 4;
+        }
+    } else
+    {
+        // Add a bye packet
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0x80 + 1;  // number of SSRC+CSRCs
+        rtcpbuffer[pos++]=(WebRtc_UWord8)203;
+
+        // length
+        rtcpbuffer[pos++]=(WebRtc_UWord8)0;
+        rtcpbuffer[pos++]=(WebRtc_UWord8)1;
+
+        // Add our own SSRC
+        ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+        pos += 4;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::BuildVoIPMetric(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos)
+{
+    // sanity
+    if(pos + 44 >= IP_PACKET_SIZE)
+    {
+        return -2;
+    }
+
+    // Add XR header
+    rtcpbuffer[pos++]=(WebRtc_UWord8)0x80;
+    rtcpbuffer[pos++]=(WebRtc_UWord8)207;
+
+    WebRtc_UWord32 XRLengthPos = pos;
+
+    // handle length later on
+    pos++;
+    pos++;
+
+    // Add our own SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _SSRC);
+    pos += 4;
+
+    // Add a VoIP metrics block
+    rtcpbuffer[pos++]=7;
+    rtcpbuffer[pos++]=0;
+    rtcpbuffer[pos++]=0;
+    rtcpbuffer[pos++]=8;
+
+    // Add the remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.lossRate;
+    rtcpbuffer[pos++] = _xrVoIPMetric.discardRate;
+    rtcpbuffer[pos++] = _xrVoIPMetric.burstDensity;
+    rtcpbuffer[pos++] = _xrVoIPMetric.gapDensity;
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.burstDuration >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.burstDuration);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.gapDuration >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.gapDuration);
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.roundTripDelay >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.roundTripDelay);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.endSystemDelay >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.endSystemDelay);
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.signalLevel;
+    rtcpbuffer[pos++] = _xrVoIPMetric.noiseLevel;
+    rtcpbuffer[pos++] = _xrVoIPMetric.RERL;
+    rtcpbuffer[pos++] = _xrVoIPMetric.Gmin;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.Rfactor;
+    rtcpbuffer[pos++] = _xrVoIPMetric.extRfactor;
+    rtcpbuffer[pos++] = _xrVoIPMetric.MOSLQ;
+    rtcpbuffer[pos++] = _xrVoIPMetric.MOSCQ;
+
+    rtcpbuffer[pos++] = _xrVoIPMetric.RXconfig;
+    rtcpbuffer[pos++] = 0; // reserved
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBnominal >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBnominal);
+
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBmax >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBmax);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBabsMax >> 8);
+    rtcpbuffer[pos++] = (WebRtc_UWord8)(_xrVoIPMetric.JBabsMax);
+
+    rtcpbuffer[XRLengthPos]=(WebRtc_UWord8)(0);
+    rtcpbuffer[XRLengthPos+1]=(WebRtc_UWord8)(10);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SendRTCP(const WebRtc_UWord32 packetTypeFlags,
+                     const WebRtc_Word32 nackSize,       // NACK
+                     const WebRtc_UWord16* nackList,     // NACK
+                     const bool repeat,                  // FIR
+                     const WebRtc_UWord64 pictureID)     // SLI & RPSI
+{
+    WebRtc_UWord32 rtcpPacketTypeFlags = packetTypeFlags;
+    WebRtc_UWord32 pos = 0;
+    WebRtc_UWord8 rtcpbuffer[IP_PACKET_SIZE];
+
+    do  // only to be able to use break :) (and the critsect must be inside its own scope)
+    {
+        // collect the received information
+        RTCPReportBlock received;
+        bool hasReceived = false;
+        WebRtc_UWord32 NTPsec = 0;
+        WebRtc_UWord32 NTPfrac = 0;
+        bool rtcpCompound = false;
+        WebRtc_UWord32 jitterTransmissionOffset = 0;
+
+        {
+          CriticalSectionScoped lock(_criticalSectionRTCPSender);
+          if(_method == kRtcpOff)
+          {
+              WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                           "%s invalid state", __FUNCTION__);
+              return -1;
+          }
+          rtcpCompound = (_method == kRtcpCompound) ? true : false;
+        }
+
+        if (rtcpCompound ||
+            rtcpPacketTypeFlags & kRtcpReport ||
+            rtcpPacketTypeFlags & kRtcpSr ||
+            rtcpPacketTypeFlags & kRtcpRr)
+        {
+            // get statistics from our RTPreceiver outside critsect
+            if(_rtpRtcp.ReportBlockStatistics(&received.fractionLost,
+                                              &received.cumulativeLost,
+                                              &received.extendedHighSeqNum,
+                                              &received.jitter,
+                                              &jitterTransmissionOffset) == 0)
+            {
+                hasReceived = true;
+
+                WebRtc_UWord32 lastReceivedRRNTPsecs = 0;
+                WebRtc_UWord32 lastReceivedRRNTPfrac = 0;
+                WebRtc_UWord32 remoteSR = 0;
+
+                // ok even if we have not received a SR, we will send 0 in that case
+                _rtpRtcp.LastReceivedNTP(lastReceivedRRNTPsecs,
+                                         lastReceivedRRNTPfrac,
+                                         remoteSR);
+
+                // get our NTP as late as possible to avoid a race
+                _clock.CurrentNTP(NTPsec, NTPfrac);
+
+                // Delay since last received report
+                WebRtc_UWord32 delaySinceLastReceivedSR = 0;
+                if((lastReceivedRRNTPsecs !=0) || (lastReceivedRRNTPfrac !=0))
+                {
+                    // get the 16 lowest bits of seconds and the 16 higest bits of fractions
+                    WebRtc_UWord32 now=NTPsec&0x0000FFFF;
+                    now <<=16;
+                    now += (NTPfrac&0xffff0000)>>16;
+
+                    WebRtc_UWord32 receiveTime = lastReceivedRRNTPsecs&0x0000FFFF;
+                    receiveTime <<=16;
+                    receiveTime += (lastReceivedRRNTPfrac&0xffff0000)>>16;
+
+                    delaySinceLastReceivedSR = now-receiveTime;
+                }
+                received.delaySinceLastSR = delaySinceLastReceivedSR;
+                received.lastSR = remoteSR;
+            } else
+            {
+                // we need to send our NTP even if we dont have received any reports
+                _clock.CurrentNTP(NTPsec, NTPfrac);
+            }
+        }
+
+        CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+        if(_TMMBR ) // attach TMMBR to send and receive reports
+        {
+            rtcpPacketTypeFlags |= kRtcpTmmbr;
+        }
+        if(_appSend)
+        {
+            rtcpPacketTypeFlags |= kRtcpApp;
+            _appSend = false;
+        }
+        if(_REMB && _sendREMB)
+        {
+            // Always attach REMB to SR if that is configured. Note that REMB is
+            // only sent on one of the RTP modules in the REMB group.
+            rtcpPacketTypeFlags |= kRtcpRemb;
+        }        
+        if(_xrSendVoIPMetric)
+        {
+            rtcpPacketTypeFlags |= kRtcpXrVoipMetric;
+            _xrSendVoIPMetric = false;
+        }
+        if(_sendTMMBN)  // set when having received a TMMBR
+        {
+            rtcpPacketTypeFlags |= kRtcpTmmbn;
+            _sendTMMBN = false;
+        }
+
+        if(_method == kRtcpCompound)
+        {
+            if(_sending)
+            {
+                rtcpPacketTypeFlags |= kRtcpSr;
+            } else
+            {
+                rtcpPacketTypeFlags |= kRtcpRr;
+            }
+            if (_IJ && hasReceived)
+            {
+                rtcpPacketTypeFlags |= kRtcpTransmissionTimeOffset;
+            }
+        } else if(_method == kRtcpNonCompound)
+        {
+            if(rtcpPacketTypeFlags & kRtcpReport)
+            {
+                if(_sending)
+                {
+                    rtcpPacketTypeFlags |= kRtcpSr;
+                } else
+                {
+                    rtcpPacketTypeFlags |= kRtcpRr;
+                }
+            }
+        }
+        if( rtcpPacketTypeFlags & kRtcpRr ||
+            rtcpPacketTypeFlags & kRtcpSr)
+        {
+            // generate next time to send a RTCP report
+            // seeded from RTP constructor
+            WebRtc_Word32 random = rand() % 1000;
+            WebRtc_Word32 timeToNext = RTCP_INTERVAL_AUDIO_MS;
+
+            if(_audio)
+            {
+                timeToNext = (RTCP_INTERVAL_AUDIO_MS/2) + (RTCP_INTERVAL_AUDIO_MS*random/1000);
+            }else
+            {
+                WebRtc_UWord32 minIntervalMs = RTCP_INTERVAL_AUDIO_MS;
+                if(_sending)
+                {
+                    // calc bw for video 360/sendBW in kbit/s
+                    WebRtc_UWord32 sendBitrateKbit = 0;
+                    WebRtc_UWord32 videoRate = 0;
+                    WebRtc_UWord32 fecRate = 0;
+                    WebRtc_UWord32 nackRate = 0;
+                    _rtpRtcp.BitrateSent(&sendBitrateKbit,
+                                         &videoRate,
+                                         &fecRate,
+                                         &nackRate);
+                    sendBitrateKbit /= 1000;
+                    if(sendBitrateKbit != 0)
+                    {
+                        minIntervalMs = 360000/sendBitrateKbit;
+                    }
+                }
+                if(minIntervalMs > RTCP_INTERVAL_VIDEO_MS)
+                {
+                    minIntervalMs = RTCP_INTERVAL_VIDEO_MS;
+                }
+                timeToNext = (minIntervalMs/2) + (minIntervalMs*random/1000);
+            }
+            _nextTimeToSendRTCP = _clock.GetTimeInMS() + timeToNext;
+        }
+
+        // if the data does not fitt in the packet we fill it as much as possible
+        WebRtc_Word32 buildVal = 0;
+
+        if(rtcpPacketTypeFlags & kRtcpSr)
+        {
+            if(hasReceived)
+            {
+                buildVal = BuildSR(rtcpbuffer, pos, NTPsec, NTPfrac, &received);
+            } else
+            {
+                buildVal = BuildSR(rtcpbuffer, pos, NTPsec, NTPfrac);
+            }
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+            buildVal = BuildSDEC(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+
+        }else if(rtcpPacketTypeFlags & kRtcpRr)
+        {
+            if(hasReceived)
+            {
+                buildVal = BuildRR(rtcpbuffer, pos, NTPsec, NTPfrac,&received);
+            }else
+            {
+                buildVal = BuildRR(rtcpbuffer, pos, NTPsec, NTPfrac);
+            }
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+            // only of set
+            if(_CNAME[0] != 0)
+            {
+                buildVal = BuildSDEC(rtcpbuffer, pos);
+                if(buildVal == -1)
+                {
+                    return -1; // error
+                }
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTransmissionTimeOffset)
+        {
+            // If present, this RTCP packet must be placed after a
+            // receiver report.
+            buildVal = BuildExtendedJitterReport(rtcpbuffer,
+                                                 pos,
+                                                 jitterTransmissionOffset);
+            if(buildVal == -1)
+            {
+                return -1; // error
+            }
+            else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpPli)
+        {
+            buildVal = BuildPLI(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpFir)
+        {
+            buildVal = BuildFIR(rtcpbuffer, pos, repeat);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpSli)
+        {
+            buildVal = BuildSLI(rtcpbuffer, pos, (WebRtc_UWord8)pictureID);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpRpsi)
+        {
+            const WebRtc_Word8 payloadType = _rtpRtcp.SendPayloadType();
+            if(payloadType == -1)
+            {
+                return -1;
+            }
+            buildVal = BuildRPSI(rtcpbuffer, pos, pictureID, (WebRtc_UWord8)payloadType);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpRemb)
+        {
+            buildVal = BuildREMB(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpBye)
+        {
+            buildVal = BuildBYE(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpApp)
+        {
+            buildVal = BuildAPP(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTmmbr)
+        {
+            buildVal = BuildTMMBR(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpTmmbn)
+        {
+            buildVal = BuildTMMBN(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpNack)
+        {
+            buildVal = BuildNACK(rtcpbuffer, pos, nackSize, nackList);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+        if(rtcpPacketTypeFlags & kRtcpXrVoipMetric)
+        {
+            buildVal = BuildVoIPMetric(rtcpbuffer, pos);
+            if(buildVal == -1)
+            {
+                return -1; // error
+
+            }else if(buildVal == -2)
+            {
+                break;  // out of buffer
+            }
+        }
+    }while (false);
+    // Sanity don't send empty packets.
+    if (pos == 0)
+    {
+        return -1;
+    }
+    return SendToNetwork(rtcpbuffer, (WebRtc_UWord16)pos);
+}
+
+WebRtc_Word32
+RTCPSender::SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                          const WebRtc_UWord16 length)
+{
+    CriticalSectionScoped lock(_criticalSectionTransport);
+    if(_cbTransport)
+    {
+        if(_cbTransport->SendRTCPPacket(_id, dataBuffer, length) > 0)
+        {
+            return 0;
+        }
+    }
+    return -1;
+}
+
+WebRtc_Word32
+RTCPSender::SetCSRCStatus(const bool include)
+{
+    _includeCSRCs = include;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                    const WebRtc_UWord8 arrLength)
+{
+    if(arrLength > kRtpCsrcSize)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        assert(false);
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    for(int i = 0; i < arrLength;i++)
+    {
+        _CSRC[i] = arrOfCSRC[i];
+    }
+    _CSRCs = arrLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetApplicationSpecificData(const WebRtc_UWord8 subType,
+                                       const WebRtc_UWord32 name,
+                                       const WebRtc_UWord8* data,
+                                       const WebRtc_UWord16 length)
+{
+    if(length %4 != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if(_appData)
+    {
+        delete [] _appData;
+    }
+
+    _appSend = true;
+    _appSubType = subType;
+    _appName = name;
+    _appData = new WebRtc_UWord8[length];
+    _appLength = length;
+    memcpy(_appData, data, length);
+    return 0;
+}
+
+WebRtc_Word32
+RTCPSender::SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+    memcpy(&_xrVoIPMetric, VoIPMetric, sizeof(RTCPVoIPMetric));
+
+    _xrSendVoIPMetric = true;
+    return 0;
+}
+
+// called under critsect _criticalSectionRTCPSender
+WebRtc_Word32 RTCPSender::AddReportBlocks(WebRtc_UWord8* rtcpbuffer,
+                                          WebRtc_UWord32& pos,
+                                          WebRtc_UWord8& numberOfReportBlocks,
+                                          const RTCPReportBlock* received,
+                                          const WebRtc_UWord32 NTPsec,
+                                          const WebRtc_UWord32 NTPfrac) {
+  // sanity one block
+  if(pos + 24 >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  numberOfReportBlocks = _reportBlocks.size();
+  if (received) {
+    // add our multiple RR to numberOfReportBlocks
+    numberOfReportBlocks++;
+  }
+  if (received) {
+    // answer to the one that sends to me
+    _lastRTCPTime[0] = ModuleRTPUtility::ConvertNTPTimeToMS(NTPsec, NTPfrac);
+
+    // Remote SSRC
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, _remoteSSRC);
+    pos += 4;
+
+    // fraction lost
+    rtcpbuffer[pos++]=received->fractionLost;
+
+    // cumulative loss
+    ModuleRTPUtility::AssignUWord24ToBuffer(rtcpbuffer+pos,
+                                            received->cumulativeLost);
+    pos += 3;
+    // extended highest seq_no, contain the highest sequence number received
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                            received->extendedHighSeqNum);
+    pos += 4;
+
+    //Jitter
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, received->jitter);
+    pos += 4;
+
+    // Last SR timestamp, our NTP time when we received the last report
+    // This is the value that we read from the send report packet not when we
+    // received it...
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, received->lastSR);
+    pos += 4;
+
+    // Delay since last received report,time since we received the report
+    ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                            received->delaySinceLastSR);
+    pos += 4;
+  }
+  if ((pos + _reportBlocks.size() * 24) >= IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  std::map<WebRtc_UWord32, RTCPReportBlock*>::iterator it =
+      _reportBlocks.begin();
+
+  for (; it != _reportBlocks.end(); it++) {
+    // we can have multiple report block in a conference
+    WebRtc_UWord32 remoteSSRC = it->first;
+    RTCPReportBlock* reportBlock = it->second;
+    if (reportBlock) {
+      // Remote SSRC
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos, remoteSSRC);
+      pos += 4;
+
+      // fraction lost
+      rtcpbuffer[pos++] = reportBlock->fractionLost;
+
+      // cumulative loss
+      ModuleRTPUtility::AssignUWord24ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->cumulativeLost);
+      pos += 3;
+
+      // extended highest seq_no, contain the highest sequence number received
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->extendedHighSeqNum);
+      pos += 4;
+
+      //Jitter
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->jitter);
+      pos += 4;
+
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->lastSR);
+      pos += 4;
+
+      ModuleRTPUtility::AssignUWord32ToBuffer(rtcpbuffer+pos,
+                                              reportBlock->delaySinceLastSR);
+      pos += 4;
+    }
+  }
+  return pos;
+}
+
+// no callbacks allowed inside this function
+WebRtc_Word32
+RTCPSender::SetTMMBN(const TMMBRSet* boundingSet,
+                     const WebRtc_UWord32 maxBitrateKbit)
+{
+    CriticalSectionScoped lock(_criticalSectionRTCPSender);
+
+    if (0 == _tmmbrHelp.SetTMMBRBoundingSetToSend(boundingSet, maxBitrateKbit))
+    {
+        _sendTMMBN = true;
+        return 0;
+    }
+    return -1;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_sender.h b/src/modules/rtp_rtcp/source/rtcp_sender.h
new file mode 100644
index 0000000..1471958
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_sender.h
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
+
+#include <map>
+
+#include "typedefs.h"
+#include "rtcp_utility.h"
+#include "rtp_utility.h"
+#include "rtp_rtcp_defines.h"
+#include "scoped_ptr.h"
+#include "tmmbr_help.h"
+#include "modules/remote_bitrate_estimator/include/bwe_defines.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+
+namespace webrtc {
+
+class ModuleRtpRtcpImpl; 
+
+class RTCPSender
+{
+public:
+    RTCPSender(const WebRtc_Word32 id, const bool audio,
+               RtpRtcpClock* clock, ModuleRtpRtcpImpl* owner);
+    virtual ~RTCPSender();
+
+    void ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 Init();
+
+    WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport);
+
+    RTCPMethod Status() const;
+    WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    bool Sending() const;
+    WebRtc_Word32 SetSendingStatus(const bool enabled); // combine the functions
+
+    WebRtc_Word32 SetNackStatus(const bool enable);
+
+    void SetSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SetRemoteSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS);
+
+    WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]);
+    WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]);
+
+    WebRtc_Word32 AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                const char cName[RTCP_CNAME_SIZE]);
+
+    WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC);
+
+    WebRtc_UWord32 SendTimeOfSendReport(const WebRtc_UWord32 sendReport);
+
+    bool TimeToSendRTCPReport(const bool sendKeyframeBeforeRTP = false) const;
+
+    WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
+
+    WebRtc_Word32 SendRTCP(const WebRtc_UWord32 rtcpPacketTypeFlags,
+                           const WebRtc_Word32 nackSize = 0,
+                           const WebRtc_UWord16* nackList = 0,
+                           const bool repeat = false,
+                           const WebRtc_UWord64 pictureID = 0);
+
+    WebRtc_Word32 AddReportBlock(const WebRtc_UWord32 SSRC,
+                                 const RTCPReportBlock* receiveBlock);
+
+    WebRtc_Word32 RemoveReportBlock(const WebRtc_UWord32 SSRC);
+
+    /*
+    *  REMB
+    */
+    bool REMB() const;
+
+    WebRtc_Word32 SetREMBStatus(const bool enable);
+
+    WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                              const WebRtc_UWord8 numberOfSSRC,
+                              const WebRtc_UWord32* SSRC);
+
+    /*
+    *   TMMBR
+    */
+    bool TMMBR() const;
+
+    WebRtc_Word32 SetTMMBRStatus(const bool enable);
+
+    WebRtc_Word32 SetTMMBN(const TMMBRSet* boundingSet,
+                           const WebRtc_UWord32 maxBitrateKbit);
+
+    /*
+    *   Extended jitter report
+    */
+    bool IJ() const;
+
+    WebRtc_Word32 SetIJStatus(const bool enable);
+
+    /*
+    *
+    */
+
+    WebRtc_Word32 SetApplicationSpecificData(const WebRtc_UWord8 subType,
+                                             const WebRtc_UWord32 name,
+                                             const WebRtc_UWord8* data,
+                                             const WebRtc_UWord16 length);
+
+    WebRtc_Word32 SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
+
+    WebRtc_Word32 SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                           const WebRtc_UWord8 arrLength);
+
+    WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    void SetTargetBitrate(unsigned int target_bitrate);
+
+private:
+    WebRtc_Word32 SendToNetwork(const WebRtc_UWord8* dataBuffer,
+                                const WebRtc_UWord16 length);
+
+    void UpdatePacketRate();
+
+    WebRtc_Word32 AddReportBlocks(WebRtc_UWord8* rtcpbuffer,
+                                WebRtc_UWord32& pos,
+                                WebRtc_UWord8& numberOfReportBlocks,
+                                const RTCPReportBlock* received,
+                                const WebRtc_UWord32 NTPsec,
+                                const WebRtc_UWord32 NTPfrac);
+
+    WebRtc_Word32 BuildSR(WebRtc_UWord8* rtcpbuffer,
+                        WebRtc_UWord32& pos,
+                        const WebRtc_UWord32 NTPsec,
+                        const WebRtc_UWord32 NTPfrac,
+                        const RTCPReportBlock* received = NULL);
+
+    WebRtc_Word32 BuildRR(WebRtc_UWord8* rtcpbuffer,
+                        WebRtc_UWord32& pos,
+                        const WebRtc_UWord32 NTPsec,
+                        const WebRtc_UWord32 NTPfrac,
+                        const RTCPReportBlock* received = NULL);
+
+    WebRtc_Word32 BuildExtendedJitterReport(
+        WebRtc_UWord8* rtcpbuffer,
+        WebRtc_UWord32& pos,
+        const WebRtc_UWord32 jitterTransmissionTimeOffset);
+
+    WebRtc_Word32 BuildSDEC(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildPLI(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildREMB(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildTMMBR(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildTMMBN(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildAPP(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildVoIPMetric(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildBYE(WebRtc_UWord8* rtcpbuffer, WebRtc_UWord32& pos);
+    WebRtc_Word32 BuildFIR(WebRtc_UWord8* rtcpbuffer,
+                           WebRtc_UWord32& pos,
+                           bool repeat);
+    WebRtc_Word32 BuildSLI(WebRtc_UWord8* rtcpbuffer,
+                         WebRtc_UWord32& pos,
+                         const WebRtc_UWord8 pictureID);
+    WebRtc_Word32 BuildRPSI(WebRtc_UWord8* rtcpbuffer,
+                         WebRtc_UWord32& pos,
+                         const WebRtc_UWord64 pictureID,
+                         const WebRtc_UWord8 payloadType);
+
+    WebRtc_Word32 BuildNACK(WebRtc_UWord8* rtcpbuffer,
+                          WebRtc_UWord32& pos,
+                          const WebRtc_Word32 nackSize,
+                          const WebRtc_UWord16* nackList);
+
+private:
+    WebRtc_Word32            _id;
+    const bool               _audio;
+    RtpRtcpClock&            _clock;
+    RTCPMethod               _method;
+
+    ModuleRtpRtcpImpl&      _rtpRtcp;
+
+    CriticalSectionWrapper* _criticalSectionTransport;
+    Transport*              _cbTransport;
+
+    CriticalSectionWrapper* _criticalSectionRTCPSender;
+    bool                    _usingNack;
+    bool                    _sending;
+    bool                    _sendTMMBN;
+    bool                    _REMB;
+    bool                    _sendREMB;
+    bool                    _TMMBR;
+    bool                    _IJ;
+
+    WebRtc_Word64        _nextTimeToSendRTCP;
+
+    WebRtc_UWord32 _SSRC;
+    WebRtc_UWord32 _remoteSSRC;  // SSRC that we receive on our RTP channel
+    char _CNAME[RTCP_CNAME_SIZE];
+
+    std::map<WebRtc_UWord32, RTCPReportBlock*> _reportBlocks;
+    std::map<WebRtc_UWord32, RTCPUtility::RTCPCnameInformation*> _csrcCNAMEs;
+
+    WebRtc_Word32         _cameraDelayMS;
+
+    // Sent
+    WebRtc_UWord32        _lastSendReport[RTCP_NUMBER_OF_SR];  // allow packet loss and RTT above 1 sec
+    WebRtc_UWord32        _lastRTCPTime[RTCP_NUMBER_OF_SR];
+
+    // send CSRCs
+    WebRtc_UWord8         _CSRCs;
+    WebRtc_UWord32        _CSRC[kRtpCsrcSize];
+    bool                _includeCSRCs;
+
+    // Full intra request
+    WebRtc_UWord8         _sequenceNumberFIR;
+
+    // REMB    
+    WebRtc_UWord8       _lengthRembSSRC;
+    WebRtc_UWord8       _sizeRembSSRC;
+    WebRtc_UWord32*     _rembSSRC;
+    WebRtc_UWord32      _rembBitrate;
+
+    TMMBRHelp           _tmmbrHelp;
+    WebRtc_UWord32      _tmmbr_Send;
+    WebRtc_UWord32      _packetOH_Send;
+
+    // APP
+    bool                 _appSend;
+    WebRtc_UWord8        _appSubType;
+    WebRtc_UWord32       _appName;
+    WebRtc_UWord8*       _appData;
+    WebRtc_UWord16       _appLength;
+
+    // XR VoIP metric
+    bool                _xrSendVoIPMetric;
+    RTCPVoIPMetric      _xrVoIPMetric;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_SENDER_H_
diff --git a/src/modules/rtp_rtcp/source/rtcp_sender_unittest.cc b/src/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
new file mode 100644
index 0000000..1d14f27
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_sender_unittest.cc
@@ -0,0 +1,246 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the RTCPSender.
+ */
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "common_types.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/remote_bitrate_estimator/include/mock/mock_remote_bitrate_observer.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "modules/rtp_rtcp/source/rtp_rtcp_impl.h"
+
+namespace webrtc {
+
+void CreateRtpPacket(const bool marker_bit, const WebRtc_UWord8 payload,
+    const WebRtc_UWord16 seq_num, const WebRtc_UWord32 timestamp,
+    const WebRtc_UWord32 ssrc, WebRtc_UWord8* array,
+    WebRtc_UWord16* cur_pos) {
+  ASSERT_TRUE(payload <= 127);
+  array[(*cur_pos)++] = 0x80;
+  array[(*cur_pos)++] = payload | (marker_bit ? 0x80 : 0);
+  array[(*cur_pos)++] = seq_num >> 8;
+  array[(*cur_pos)++] = seq_num;
+  array[(*cur_pos)++] = timestamp >> 24;
+  array[(*cur_pos)++] = timestamp >> 16;
+  array[(*cur_pos)++] = timestamp >> 8;
+  array[(*cur_pos)++] = timestamp;
+  array[(*cur_pos)++] = ssrc >> 24;
+  array[(*cur_pos)++] = ssrc >> 16;
+  array[(*cur_pos)++] = ssrc >> 8;
+  array[(*cur_pos)++] = ssrc;
+  // VP8 payload header
+  array[(*cur_pos)++] = 0x90;  // X bit = 1
+  array[(*cur_pos)++] = 0x20;  // T bit = 1
+  array[(*cur_pos)++] = 0x00;  // TID = 0
+  array[(*cur_pos)++] = 0x00;  // Key frame
+  array[(*cur_pos)++] = 0x00;
+  array[(*cur_pos)++] = 0x00;
+  array[(*cur_pos)++] = 0x9d;
+  array[(*cur_pos)++] = 0x01;
+  array[(*cur_pos)++] = 0x2a;
+  array[(*cur_pos)++] = 128;
+  array[(*cur_pos)++] = 0;
+  array[(*cur_pos)++] = 96;
+  array[(*cur_pos)++] = 0;
+}
+
+class TestTransport : public Transport,
+                      public RtpData {
+ public:
+  TestTransport()
+      : rtcp_receiver_(NULL) {
+  }
+  void SetRTCPReceiver(RTCPReceiver* rtcp_receiver) {
+    rtcp_receiver_ = rtcp_receiver;
+  }
+  virtual int SendPacket(int /*ch*/, const void* /*data*/, int /*len*/) {
+    return -1;
+  }
+
+  virtual int SendRTCPPacket(int /*ch*/, const void *packet, int packet_len) {
+    RTCPUtility::RTCPParserV2 rtcpParser((WebRtc_UWord8*)packet,
+                                         (WebRtc_Word32)packet_len,
+                                         true); // Allow non-compound RTCP
+
+    EXPECT_TRUE(rtcpParser.IsValid());
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    EXPECT_EQ(0, rtcp_receiver_->IncomingRTCPPacket(rtcpPacketInformation,
+                                                   &rtcpParser));
+    rtcp_packet_info_ = rtcpPacketInformation;
+
+    return packet_len;
+  }
+
+  virtual int OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadSize,
+                                    const WebRtcRTPHeader* rtpHeader) {
+    return 0;
+  }
+  RTCPReceiver* rtcp_receiver_;
+  RTCPHelp::RTCPPacketInformation rtcp_packet_info_;
+};
+
+class RtcpSenderTest : public ::testing::Test {
+ protected:
+  RtcpSenderTest()
+      : over_use_detector_options_(),
+        remote_bitrate_observer_(),
+        remote_bitrate_estimator_(&remote_bitrate_observer_,
+                                  over_use_detector_options_) {
+    system_clock_ = ModuleRTPUtility::GetSystemClock();
+    test_transport_ = new TestTransport();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = 0;
+    configuration.audio = false;
+    configuration.clock = system_clock_;
+    configuration.incoming_data = test_transport_;
+    configuration.outgoing_transport = test_transport_;
+    configuration.remote_bitrate_estimator = &remote_bitrate_estimator_;
+
+    rtp_rtcp_impl_ = new ModuleRtpRtcpImpl(configuration);
+    rtcp_sender_ = new RTCPSender(0, false, system_clock_, rtp_rtcp_impl_);
+    rtcp_receiver_ = new RTCPReceiver(0, system_clock_, rtp_rtcp_impl_);
+    test_transport_->SetRTCPReceiver(rtcp_receiver_);
+    // Initialize
+    EXPECT_EQ(0, rtcp_sender_->Init());
+    EXPECT_EQ(0, rtcp_sender_->RegisterSendTransport(test_transport_));
+  }
+  ~RtcpSenderTest() {
+    delete rtcp_sender_;
+    delete rtcp_receiver_;
+    delete rtp_rtcp_impl_;
+    delete test_transport_;
+    delete system_clock_;
+  }
+
+  // Helper function: Incoming RTCP has a specific packet type.
+  bool gotPacketType(RTCPPacketType packet_type) {
+    return ((test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags) &
+            packet_type) != 0U;
+  }
+
+  OverUseDetectorOptions over_use_detector_options_;
+  RtpRtcpClock* system_clock_;
+  ModuleRtpRtcpImpl* rtp_rtcp_impl_;
+  RTCPSender* rtcp_sender_;
+  RTCPReceiver* rtcp_receiver_;
+  TestTransport* test_transport_;
+  MockRemoteBitrateObserver remote_bitrate_observer_;
+  RemoteBitrateEstimator remote_bitrate_estimator_;
+
+  enum {kMaxPacketLength = 1500};
+  uint8_t packet_[kMaxPacketLength];
+};
+
+TEST_F(RtcpSenderTest, RtcpOff) {
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpOff));
+  EXPECT_EQ(-1, rtcp_sender_->SendRTCP(kRtcpSr));
+}
+
+TEST_F(RtcpSenderTest, IJStatus) {
+  ASSERT_FALSE(rtcp_sender_->IJ());
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  ASSERT_TRUE(rtcp_sender_->IJ());
+}
+
+TEST_F(RtcpSenderTest, TestCompound) {
+  const bool marker_bit = false;
+  const WebRtc_UWord8 payload = 100;
+  const WebRtc_UWord16 seq_num = 11111;
+  const WebRtc_UWord32 timestamp = 1234567;
+  const WebRtc_UWord32 ssrc = 0x11111111;
+  WebRtc_UWord16 packet_length = 0;
+  CreateRtpPacket(marker_bit, payload, seq_num, timestamp, ssrc, packet_,
+      &packet_length);
+  EXPECT_EQ(25, packet_length);
+
+  VideoCodec codec_inst;
+  strncpy(codec_inst.plName, "VP8", webrtc::kPayloadNameSize - 1);
+  codec_inst.codecType = webrtc::kVideoCodecVP8;
+  codec_inst.plType = payload;
+  EXPECT_EQ(0, rtp_rtcp_impl_->RegisterReceivePayload(codec_inst));
+
+  // Make sure RTP packet has been received.
+  EXPECT_EQ(0, rtp_rtcp_impl_->IncomingPacket(packet_, packet_length));
+
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
+
+  // Transmission time offset packet should be received.
+  ASSERT_TRUE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
+      kRtcpTransmissionTimeOffset);
+}
+
+TEST_F(RtcpSenderTest, TestCompound_NoRtpReceived) {
+  EXPECT_EQ(0, rtcp_sender_->SetIJStatus(true));
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpRr));
+
+  // Transmission time offset packet should not be received.
+  ASSERT_FALSE(test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags &
+      kRtcpTransmissionTimeOffset);
+}
+
+// This test is written to verify actual behaviour. It does not seem
+// to make much sense to send an empty TMMBN, since there is no place
+// to put an actual limit here. It's just information that no limit
+// is set, which is kind of the starting assumption.
+// See http://code.google.com/p/webrtc/issues/detail?id=468 for one
+// situation where this caused confusion.
+TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndEmpty) {
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  TMMBRSet bounding_set;
+  EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
+  ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
+  // We now expect the packet to show up in the rtcp_packet_info_ of
+  // test_transport_.
+  ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
+  EXPECT_TRUE(gotPacketType(kRtcpTmmbn));
+  TMMBRSet* incoming_set = NULL;
+  bool owner = false;
+  // The BoundingSet function returns the number of members of the
+  // bounding set, and touches the incoming set only if there's > 1.
+  EXPECT_EQ(0, test_transport_->rtcp_receiver_->BoundingSet(owner,
+      incoming_set));
+}
+
+TEST_F(RtcpSenderTest, SendsTmmbnIfSetAndValid) {
+  EXPECT_EQ(0, rtcp_sender_->SetRTCPStatus(kRtcpCompound));
+  TMMBRSet bounding_set;
+  bounding_set.VerifyAndAllocateSet(1);
+  const WebRtc_UWord32 kSourceSsrc = 12345;
+  bounding_set.AddEntry(32768, 0, kSourceSsrc);
+
+  EXPECT_EQ(0, rtcp_sender_->SetTMMBN(&bounding_set, 3));
+  ASSERT_EQ(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
+  EXPECT_EQ(0, rtcp_sender_->SendRTCP(kRtcpSr));
+  // We now expect the packet to show up in the rtcp_packet_info_ of
+  // test_transport_.
+  ASSERT_NE(0U, test_transport_->rtcp_packet_info_.rtcpPacketTypeFlags);
+  EXPECT_TRUE(gotPacketType(kRtcpTmmbn));
+  TMMBRSet incoming_set;
+  bool owner = false;
+  // We expect 1 member of the incoming set.
+  EXPECT_EQ(1, test_transport_->rtcp_receiver_->BoundingSet(owner,
+      &incoming_set));
+  EXPECT_EQ(kSourceSsrc, incoming_set.Ssrc(0));
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_utility.cc b/src/modules/rtp_rtcp/source/rtcp_utility.cc
new file mode 100644
index 0000000..8673e87
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_utility.cc
@@ -0,0 +1,1527 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtcp_utility.h"
+
+#include <cstring> // memcpy
+#include <cmath>   // ceil
+#include <cassert>
+
+namespace webrtc {
+// RTCPParserV2 : currently read only
+
+RTCPUtility::RTCPParserV2::RTCPParserV2(const WebRtc_UWord8* rtcpData,
+                                        size_t rtcpDataLength,
+                                        bool rtcpReducedSizeEnable)
+    : _ptrRTCPDataBegin(rtcpData),
+      _RTCPReducedSizeEnable(rtcpReducedSizeEnable),
+      _ptrRTCPDataEnd(rtcpData + rtcpDataLength),
+      _validPacket(false),
+      _ptrRTCPData(rtcpData),
+      _ptrRTCPBlockEnd(NULL),
+      _state(State_TopLevel),
+      _numberOfBlocks(0),
+      _packetType(kRtcpNotValidCode) {
+  Validate();
+}
+
+RTCPUtility::RTCPParserV2::~RTCPParserV2() {
+}
+
+ptrdiff_t
+RTCPUtility::RTCPParserV2::LengthLeft() const
+{
+    return (_ptrRTCPDataEnd- _ptrRTCPData);
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::PacketType() const
+{
+    return _packetType;
+}
+
+const RTCPUtility::RTCPPacket&
+RTCPUtility::RTCPParserV2::Packet() const
+{
+    return _packet;
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::Begin()
+{
+    _ptrRTCPData = _ptrRTCPDataBegin;
+
+    return Iterate();
+}
+
+RTCPUtility::RTCPPacketTypes
+RTCPUtility::RTCPParserV2::Iterate()
+{
+    // Reset packet type
+    _packetType = kRtcpNotValidCode;
+
+    if (IsValid())
+    {
+        switch (_state)
+        {
+        case State_TopLevel:
+            IterateTopLevel();
+            break;
+        case State_ReportBlockItem:
+            IterateReportBlockItem();
+            break;
+        case State_SDESChunk:
+            IterateSDESChunk();
+            break;
+        case State_BYEItem:
+            IterateBYEItem();
+            break;
+        case State_ExtendedJitterItem:
+            IterateExtendedJitterItem();
+            break;
+        case State_RTPFB_NACKItem:
+            IterateNACKItem();
+            break;
+        case State_RTPFB_TMMBRItem:
+            IterateTMMBRItem();
+            break;
+        case State_RTPFB_TMMBNItem:
+            IterateTMMBNItem();
+            break;
+        case State_PSFB_SLIItem:
+            IterateSLIItem();
+            break;
+        case State_PSFB_RPSIItem:
+            IterateRPSIItem();
+            break;
+        case State_PSFB_FIRItem:
+            IterateFIRItem();
+            break;
+        case State_PSFB_AppItem:
+            IteratePsfbAppItem();
+            break;
+        case State_PSFB_REMBItem:
+            IteratePsfbREMBItem();
+            break;
+        case State_AppItem:
+            IterateAppItem();
+            break;
+        default:
+            assert(false); // Invalid state!
+            break;
+        }
+    }
+    return _packetType;
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTopLevel()
+{
+    for (;;)
+    {
+        RTCPCommonHeader header;
+
+        const bool success = RTCPParseCommonHeader(_ptrRTCPData,
+                                                    _ptrRTCPDataEnd,
+                                                    header);
+
+        if (!success)
+        {
+            return;
+        }
+        _ptrRTCPBlockEnd = _ptrRTCPData + header.LengthInOctets;
+        if (_ptrRTCPBlockEnd > _ptrRTCPDataEnd)
+        {
+            // Bad block!
+            return;
+        }
+
+        switch (header.PT)
+        {
+        case PT_SR:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseSR();
+            return;
+        }
+        case PT_RR:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseRR();
+            return;
+        }
+        case PT_SDES:
+        {
+            // number of SDES blocks
+            _numberOfBlocks = header.IC;
+            const bool ok = ParseSDES();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_BYE:
+        {
+            _numberOfBlocks = header.IC;
+            const bool ok = ParseBYE();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_IJ:
+        {
+            // number of Report blocks
+            _numberOfBlocks = header.IC;
+            ParseIJ();
+            return;
+        }
+        case PT_RTPFB: // Fall through!
+        case PT_PSFB:
+        {
+            const bool ok = ParseFBCommon(header);
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+
+            return;
+        }
+        case PT_APP:
+        {
+            const bool ok = ParseAPP(header);
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        case PT_XR:
+        {
+            const bool ok = ParseXR();
+            if (!ok)
+            {
+                // Nothing supported found, continue to next block!
+                break;
+            }
+            return;
+        }
+        default:
+            // Not supported! Skip!
+            EndCurrentBlock();
+            break;
+        }
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateReportBlockItem()
+{
+    const bool success = ParseReportBlockItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateSDESChunk()
+{
+    const bool success = ParseSDESChunk();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateBYEItem()
+{
+    const bool success = ParseBYEItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateExtendedJitterItem()
+{
+    const bool success = ParseIJItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateNACKItem()
+{
+    const bool success = ParseNACKItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTMMBRItem()
+{
+    const bool success = ParseTMMBRItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateTMMBNItem()
+{
+    const bool success = ParseTMMBNItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateSLIItem()
+{
+    const bool success = ParseSLIItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateRPSIItem()
+{
+    const bool success = ParseRPSIItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateFIRItem()
+{
+    const bool success = ParseFIRItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IteratePsfbAppItem()
+{
+    const bool success = ParsePsfbAppItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IteratePsfbREMBItem()
+{
+    const bool success = ParsePsfbREMBItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::IterateAppItem()
+{
+    const bool success = ParseAPPItem();
+    if (!success)
+    {
+        Iterate();
+    }
+}
+
+void
+RTCPUtility::RTCPParserV2::Validate()
+{
+    if (_ptrRTCPData == NULL)
+    {
+        return; // NOT VALID
+    }
+
+    RTCPCommonHeader header;
+    const bool success = RTCPParseCommonHeader(_ptrRTCPDataBegin,
+                                               _ptrRTCPDataEnd,
+                                               header);
+
+    if (!success)
+    {
+        return; // NOT VALID!
+    }
+
+    // * if (!reducedSize) : first packet must be RR or SR.
+    //
+    // * The padding bit (P) should be zero for the first packet of a
+    //   compound RTCP packet because padding should only be applied,
+    //   if it is needed, to the last packet. (NOT CHECKED!)
+    //
+    // * The length fields of the individual RTCP packets must add up
+    //   to the overall length of the compound RTCP packet as
+    //   received.  This is a fairly strong check. (NOT CHECKED!)
+
+    if (!_RTCPReducedSizeEnable)
+    {
+        if ((header.PT != PT_SR) && (header.PT != PT_RR))
+        {
+            return; // NOT VALID
+        }
+    }
+
+    _validPacket = true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::IsValid() const
+{
+    return _validPacket;
+}
+
+void
+RTCPUtility::RTCPParserV2::EndCurrentBlock()
+{
+    _ptrRTCPData = _ptrRTCPBlockEnd;
+}
+
+bool
+RTCPUtility::RTCPParseCommonHeader( const WebRtc_UWord8* ptrDataBegin,
+                                    const WebRtc_UWord8* ptrDataEnd,
+                                    RTCPCommonHeader& parsedHeader)
+{
+    if (!ptrDataBegin || !ptrDataEnd)
+    {
+        return false;
+    }
+
+    //  0                   1                   2                   3
+    //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    // |V=2|P|    IC   |      PT       |             length            |
+    // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    //
+    // Common header for all RTCP packets, 4 octets.
+
+    if ((ptrDataEnd - ptrDataBegin) < 4)
+    {
+        return false;
+    }
+
+    parsedHeader.V              = ptrDataBegin[0] >> 6;
+    parsedHeader.P              = ((ptrDataBegin[0] & 0x20) == 0) ? false : true;
+    parsedHeader.IC             = ptrDataBegin[0] & 0x1f;
+    parsedHeader.PT             = ptrDataBegin[1];
+
+    parsedHeader.LengthInOctets = (ptrDataBegin[2] << 8) + ptrDataBegin[3] + 1;
+    parsedHeader.LengthInOctets *= 4;
+
+    if(parsedHeader.LengthInOctets == 0)
+    {
+        return false;
+    }
+    // Check if RTP version field == 2
+    if (parsedHeader.V != 2)
+    {
+        return false;
+    }
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseRR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        return false;
+    }
+
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpRrCode;
+
+    _packet.RR.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.RR.SenderSSRC += *_ptrRTCPData++;
+
+    _packet.RR.NumberOfReportBlocks = _numberOfBlocks;
+
+    // State transition
+    _state = State_ReportBlockItem;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 28)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpSrCode;
+
+    _packet.SR.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderSSRC += *_ptrRTCPData++;
+
+    _packet.SR.NTPMostSignificant = *_ptrRTCPData++ << 24;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++ << 16;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++ << 8;
+    _packet.SR.NTPMostSignificant += *_ptrRTCPData++;
+
+    _packet.SR.NTPLeastSignificant = *_ptrRTCPData++ << 24;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++ << 16;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++ << 8;
+    _packet.SR.NTPLeastSignificant += *_ptrRTCPData++;
+
+    _packet.SR.RTPTimestamp = *_ptrRTCPData++ << 24;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++ << 16;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++ << 8;
+    _packet.SR.RTPTimestamp += *_ptrRTCPData++;
+
+    _packet.SR.SenderPacketCount = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderPacketCount += *_ptrRTCPData++;
+
+    _packet.SR.SenderOctetCount = *_ptrRTCPData++ << 24;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++ << 16;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++ << 8;
+    _packet.SR.SenderOctetCount += *_ptrRTCPData++;
+
+    _packet.SR.NumberOfReportBlocks = _numberOfBlocks;
+
+    // State transition
+    if(_numberOfBlocks != 0)
+    {
+        _state = State_ReportBlockItem;
+    }else
+    {
+        // don't go to state report block item if 0 report blocks
+        _state = State_TopLevel;
+        EndCurrentBlock();
+    }
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseReportBlockItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 24 || _numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packet.ReportBlockItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.SSRC += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.FractionLost = *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost = *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.CumulativeNumOfPacketsLost += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.ExtendedHighestSequenceNumber += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.Jitter = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.Jitter += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.LastSR = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.LastSR += *_ptrRTCPData++;
+
+    _packet.ReportBlockItem.DelayLastSR = *_ptrRTCPData++ << 24;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++ << 16;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++ << 8;
+    _packet.ReportBlockItem.DelayLastSR += *_ptrRTCPData++;
+
+    _numberOfBlocks--;
+    _packetType = kRtcpReportBlockItemCode;
+    return true;
+}
+
+/* From RFC 5450: Transmission Time Offsets in RTP Streams.
+      0                   1                   2                   3
+      0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ hdr |V=2|P|    RC   |   PT=IJ=195   |             length            |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     |                      inter-arrival jitter                     |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+     .                                                               .
+     .                                                               .
+     .                                                               .
+     |                      inter-arrival jitter                     |
+     +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+
+bool
+RTCPUtility::RTCPParserV2::ParseIJ()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packetType = kRtcpExtendedIjCode;
+
+    // State transition
+    _state = State_ExtendedJitterItem;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseIJItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4 || _numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packet.ExtendedJitterReportItem.Jitter = *_ptrRTCPData++ << 24;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++ << 16;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++ << 8;
+    _packet.ExtendedJitterReportItem.Jitter += *_ptrRTCPData++;
+
+    _numberOfBlocks--;
+    _packetType = kRtcpExtendedIjItemCode;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDES()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _ptrRTCPData += 4; // Skip header
+
+    _state = State_SDESChunk;
+    _packetType = kRtcpSdesCode;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDESChunk()
+{
+    if(_numberOfBlocks <= 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _numberOfBlocks--;
+
+    // Find CName item in a SDES chunk.
+    while (_ptrRTCPData < _ptrRTCPBlockEnd)
+    {
+        const ptrdiff_t dataLen = _ptrRTCPBlockEnd - _ptrRTCPData;
+        if (dataLen < 4)
+        {
+            _state = State_TopLevel;
+
+            EndCurrentBlock();
+            return false;
+        }
+
+        WebRtc_UWord32 SSRC = *_ptrRTCPData++ << 24;
+        SSRC += *_ptrRTCPData++ << 16;
+        SSRC += *_ptrRTCPData++ << 8;
+        SSRC += *_ptrRTCPData++;
+
+        const bool foundCname = ParseSDESItem();
+        if (foundCname)
+        {
+            _packet.CName.SenderSSRC = SSRC; // Add SSRC
+            return true;
+        }
+    }
+    _state = State_TopLevel;
+
+    EndCurrentBlock();
+    return false;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSDESItem()
+{
+    // Find CName
+    // Only the CNAME item is mandatory. RFC 3550 page 46
+    bool foundCName = false;
+
+    size_t itemOctetsRead = 0;
+    while (_ptrRTCPData < _ptrRTCPBlockEnd)
+    {
+        const WebRtc_UWord8 tag = *_ptrRTCPData++;
+        ++itemOctetsRead;
+
+        if (tag == 0)
+        {
+            // End tag! 4 oct aligned
+            while ((itemOctetsRead++ % 4) != 0)
+            {
+                ++_ptrRTCPData;
+            }
+            return foundCName;
+        }
+
+        if (_ptrRTCPData < _ptrRTCPBlockEnd)
+        {
+            const WebRtc_UWord8 len = *_ptrRTCPData++;
+            ++itemOctetsRead;
+
+            if (tag == 1)
+            {
+                // CNAME
+
+                // Sanity
+                if ((_ptrRTCPData + len) >= _ptrRTCPBlockEnd)
+                {
+                    _state = State_TopLevel;
+
+                    EndCurrentBlock();
+                    return false;
+                }
+                WebRtc_UWord8 i = 0;
+                for (; i < len; ++i)
+                {
+                    const WebRtc_UWord8 c = _ptrRTCPData[i];
+                    if ((c < ' ') || (c > '{') || (c == '%') || (c == '\\'))
+                    {
+                        // Illegal char
+                        _state = State_TopLevel;
+
+                        EndCurrentBlock();
+                        return false;
+                    }
+                    _packet.CName.CName[i] = c;
+                }
+                // Make sure we are null terminated.
+                _packet.CName.CName[i] = 0;
+                _packetType = kRtcpSdesChunkCode;
+
+                foundCName = true;
+            }
+            _ptrRTCPData += len;
+            itemOctetsRead += len;
+        }
+    }
+
+    // No end tag found!
+    _state = State_TopLevel;
+
+    EndCurrentBlock();
+    return false;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseBYE()
+{
+    _ptrRTCPData += 4; // Skip header
+
+    _state = State_BYEItem;
+
+    return ParseBYEItem();
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseBYEItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+    if (length < 4 || _numberOfBlocks == 0)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpByeCode;
+
+    _packet.BYE.SenderSSRC = *_ptrRTCPData++ << 24;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++ << 16;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++ << 8;
+    _packet.BYE.SenderSSRC += *_ptrRTCPData++;
+
+    // we can have several CSRCs attached
+
+    // sanity
+    if(length >= 4*_numberOfBlocks)
+    {
+        _ptrRTCPData += (_numberOfBlocks -1)*4;
+    }
+    _numberOfBlocks = 0;
+
+    return true;
+}
+/*
+  0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |V=2|P|reserved |   PT=XR=207   |             length            |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |                              SSRC                             |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   :                         report blocks                         :
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+bool RTCPUtility::RTCPParserV2::ParseXR()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip header
+
+    _packet.XR.OriginatorSSRC = *_ptrRTCPData++ << 24;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++ << 16;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++ << 8;
+    _packet.XR.OriginatorSSRC += *_ptrRTCPData++;
+
+    return ParseXRItem();
+}
+/*
+    0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      BT       | type-specific |         block length          |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    :             type-specific block contents                      :
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+
+bool
+RTCPUtility::RTCPParserV2::ParseXRItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4) //
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    WebRtc_UWord8 blockType = *_ptrRTCPData++;
+    WebRtc_UWord8 typeSpecific = *_ptrRTCPData++;
+
+    WebRtc_UWord16 blockLength = *_ptrRTCPData++ << 8;
+    blockLength = *_ptrRTCPData++;
+
+    if(blockType == 7 && typeSpecific == 0)
+    {
+        if(blockLength != 8)
+        {
+            EndCurrentBlock();
+            return false;
+        }
+        return ParseXRVOIPMetricItem();
+    }else
+    {
+        EndCurrentBlock();
+        return false;
+    }
+}
+/*
+ 0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |     BT=7      |   reserved    |       block length = 8        |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |                        SSRC of source                         |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   loss rate   | discard rate  | burst density |  gap density  |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |       burst duration          |         gap duration          |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |     round trip delay          |       end system delay        |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   | signal level  |  noise level  |     RERL      |     Gmin      |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   R factor    | ext. R factor |    MOS-LQ     |    MOS-CQ     |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |   RX config   |   reserved    |          JB nominal           |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   |          JB maximum           |          JB abs max           |
+   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+*/
+bool
+RTCPUtility::RTCPParserV2::ParseXRVOIPMetricItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 28)
+    {
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpXrVoipMetricCode;
+
+    _packet.XRVOIPMetricItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.SSRC += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.lossRate = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.discardRate = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.burstDensity = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.gapDensity = *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.burstDuration = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.burstDuration += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.gapDuration = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.gapDuration += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.roundTripDelay = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.roundTripDelay += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.endSystemDelay = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.endSystemDelay += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.signalLevel = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.noiseLevel = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.RERL = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.Gmin = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.Rfactor = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.extRfactor = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.MOSLQ = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.MOSCQ = *_ptrRTCPData++;
+    _packet.XRVOIPMetricItem.RXconfig = *_ptrRTCPData++;
+    _ptrRTCPData++; // skip reserved
+
+    _packet.XRVOIPMetricItem.JBnominal = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBnominal += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.JBmax = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBmax += *_ptrRTCPData++;
+
+    _packet.XRVOIPMetricItem.JBabsMax = *_ptrRTCPData++ << 8;
+    _packet.XRVOIPMetricItem.JBabsMax += *_ptrRTCPData++;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseFBCommon(const RTCPCommonHeader& header)
+{
+    assert((header.PT == PT_RTPFB) || (header.PT == PT_PSFB)); // Parser logic check
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 12) // 4 * 3, RFC4585 section 6.1
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip RTCP header
+
+    WebRtc_UWord32 senderSSRC = *_ptrRTCPData++ << 24;
+    senderSSRC += *_ptrRTCPData++ << 16;
+    senderSSRC += *_ptrRTCPData++ << 8;
+    senderSSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord32 mediaSSRC = *_ptrRTCPData++ << 24;
+    mediaSSRC += *_ptrRTCPData++ << 16;
+    mediaSSRC += *_ptrRTCPData++ << 8;
+    mediaSSRC += *_ptrRTCPData++;
+
+    if (header.PT == PT_RTPFB)
+    {
+        // Transport layer feedback
+
+        switch (header.IC)
+        {
+        case 1:
+        {
+            // NACK
+            _packetType             = kRtcpRtpfbNackCode;
+            _packet.NACK.SenderSSRC = senderSSRC;
+            _packet.NACK.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_NACKItem;
+
+            return true;
+        }
+        case 2:
+        {
+            // used to be ACK is this code point, which is removed
+            // conficts with http://tools.ietf.org/html/draft-levin-avt-rtcp-burst-00
+            break;
+        }
+        case 3:
+        {
+            // TMMBR
+            _packetType              = kRtcpRtpfbTmmbrCode;
+            _packet.TMMBR.SenderSSRC = senderSSRC;
+            _packet.TMMBR.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_TMMBRItem;
+
+            return true;
+        }
+        case 4:
+        {
+            // TMMBN
+            _packetType              = kRtcpRtpfbTmmbnCode;
+            _packet.TMMBN.SenderSSRC = senderSSRC;
+            _packet.TMMBN.MediaSSRC  = mediaSSRC;
+
+            _state = State_RTPFB_TMMBNItem;
+
+            return true;
+        }
+        case 5:
+         {
+            // RTCP-SR-REQ Rapid Synchronisation of RTP Flows
+            // draft-perkins-avt-rapid-rtp-sync-03.txt
+            // trigger a new RTCP SR
+            _packetType = kRtcpRtpfbSrReqCode;
+
+            // Note: No state transition, SR REQ is empty!
+            return true;
+        }
+        default:
+            break;
+        }
+        EndCurrentBlock();
+        return false;
+    }
+    else if (header.PT == PT_PSFB)
+    {
+        // Payload specific feedback
+        switch (header.IC)
+        {
+        case 1:
+            // PLI
+            _packetType            = kRtcpPsfbPliCode;
+            _packet.PLI.SenderSSRC = senderSSRC;
+            _packet.PLI.MediaSSRC  = mediaSSRC;
+
+            // Note: No state transition, PLI FCI is empty!
+            return true;
+        case 2:
+            // SLI
+            _packetType            = kRtcpPsfbSliCode;
+            _packet.SLI.SenderSSRC = senderSSRC;
+            _packet.SLI.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_SLIItem;
+
+            return true;
+        case 3:
+            _packetType             = kRtcpPsfbRpsiCode;
+            _packet.RPSI.SenderSSRC = senderSSRC;
+            _packet.RPSI.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_RPSIItem;
+            return true;
+        case 4:
+            // FIR
+            _packetType            = kRtcpPsfbFirCode;
+            _packet.FIR.SenderSSRC = senderSSRC;
+            _packet.FIR.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_FIRItem;
+            return true;
+        case 15:
+            _packetType                = kRtcpPsfbAppCode;
+            _packet.PSFBAPP.SenderSSRC = senderSSRC;
+            _packet.PSFBAPP.MediaSSRC  = mediaSSRC;
+
+            _state = State_PSFB_AppItem;
+            return true;
+        default:
+            break;
+        }
+
+        EndCurrentBlock();
+        return false;
+    }
+    else
+    {
+        assert(false);
+
+        EndCurrentBlock();
+        return false;
+    }
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseRPSIItem()
+{
+    // RFC 4585 6.3.3.  Reference Picture Selection Indication (RPSI)
+    /*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      PB       |0| Payload Type|    Native RPSI bit string     |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |   defined per codec          ...                | Padding (0) |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    */
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(length > 2+RTCP_RPSI_DATA_SIZE)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpPsfbRpsiCode;
+
+    WebRtc_UWord8 paddingBits = *_ptrRTCPData++;
+    _packet.RPSI.PayloadType = *_ptrRTCPData++;
+
+    memcpy(_packet.RPSI.NativeBitString, _ptrRTCPData, length-2);
+
+    _packet.RPSI.NumberOfValidBits = WebRtc_UWord16(length-2)*8 - paddingBits;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseNACKItem()
+{
+    // RFC 4585 6.2.1. Generic NACK
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbNackItemCode;
+
+    _packet.NACKItem.PacketID = *_ptrRTCPData++ << 8;
+    _packet.NACKItem.PacketID += *_ptrRTCPData++;
+
+    _packet.NACKItem.BitMask = *_ptrRTCPData++ << 8;
+    _packet.NACKItem.BitMask += *_ptrRTCPData++;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParsePsfbAppItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'R')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'E')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'M')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    if(*_ptrRTCPData++ != 'B')
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpPsfbRembCode;
+    _state = State_PSFB_REMBItem;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParsePsfbREMBItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packet.REMBItem.NumberOfSSRCs = *_ptrRTCPData++;
+    const WebRtc_UWord8 brExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 brMantissa = (_ptrRTCPData[0] & 0x03) << 16;
+    brMantissa += (_ptrRTCPData[1] << 8);
+    brMantissa += (_ptrRTCPData[2]);
+
+    _ptrRTCPData += 3; // Fwd read data
+    _packet.REMBItem.BitRate = (brMantissa << brExp);
+
+    const ptrdiff_t length_ssrcs = _ptrRTCPBlockEnd - _ptrRTCPData;
+    if (length_ssrcs < 4 * _packet.REMBItem.NumberOfSSRCs)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpPsfbRembItemCode;
+
+    for (int i = 0; i < _packet.REMBItem.NumberOfSSRCs; i++)
+    {
+        _packet.REMBItem.SSRCs[i] = *_ptrRTCPData++ << 24;
+        _packet.REMBItem.SSRCs[i] += *_ptrRTCPData++ << 16;
+        _packet.REMBItem.SSRCs[i] += *_ptrRTCPData++ << 8;
+        _packet.REMBItem.SSRCs[i] += *_ptrRTCPData++;
+    }
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseTMMBRItem()
+{
+    // RFC 5104 4.2.1. Temporary Maximum Media Stream Bit Rate Request (TMMBR)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbTmmbrItemCode;
+
+    _packet.TMMBRItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.TMMBRItem.SSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord8 mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
+    mxtbrMantissa += (_ptrRTCPData[1] << 7);
+    mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+
+    WebRtc_UWord32 measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
+    measuredOH += _ptrRTCPData[3];
+
+    _ptrRTCPData += 4; // Fwd read data
+
+    _packet.TMMBRItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+    _packet.TMMBRItem.MeasuredOverhead     = measuredOH;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseTMMBNItem()
+{
+    // RFC 5104 4.2.2. Temporary Maximum Media Stream Bit Rate Notification (TMMBN)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpRtpfbTmmbnItemCode;
+
+    _packet.TMMBNItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.TMMBNItem.SSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord8 mxtbrExp = (_ptrRTCPData[0] >> 2) & 0x3F;
+
+    WebRtc_UWord32 mxtbrMantissa = (_ptrRTCPData[0] & 0x03) << 15;
+    mxtbrMantissa += (_ptrRTCPData[1] << 7);
+    mxtbrMantissa += (_ptrRTCPData[2] >> 1) & 0x7F;
+
+    WebRtc_UWord32 measuredOH = (_ptrRTCPData[2] & 0x01) << 8;
+    measuredOH += _ptrRTCPData[3];
+
+    _ptrRTCPData += 4; // Fwd read data
+
+    _packet.TMMBNItem.MaxTotalMediaBitRate = ((mxtbrMantissa << mxtbrExp) / 1000);
+    _packet.TMMBNItem.MeasuredOverhead     = measuredOH;
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseSLIItem()
+{
+    // RFC 5104 6.3.2.  Slice Loss Indication (SLI)
+    /*
+    0                   1                   2                   3
+    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |            First        |        Number           | PictureID |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    */
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpPsfbSliItemCode;
+
+    WebRtc_UWord32 buffer;
+    buffer = *_ptrRTCPData++ << 24;
+    buffer += *_ptrRTCPData++ << 16;
+    buffer += *_ptrRTCPData++ << 8;
+    buffer += *_ptrRTCPData++;
+
+    _packet.SLIItem.FirstMB = WebRtc_UWord16((buffer>>19) & 0x1fff);
+    _packet.SLIItem.NumberOfMB = WebRtc_UWord16((buffer>>6) & 0x1fff);
+    _packet.SLIItem.PictureId = WebRtc_UWord8(buffer & 0x3f);
+
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseFIRItem()
+{
+    // RFC 5104 4.3.1. Full Intra Request (FIR)
+
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 8)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+
+    _packetType = kRtcpPsfbFirItemCode;
+
+    _packet.FIRItem.SSRC = *_ptrRTCPData++ << 24;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++ << 16;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++ << 8;
+    _packet.FIRItem.SSRC += *_ptrRTCPData++;
+
+    _packet.FIRItem.CommandSequenceNumber = *_ptrRTCPData++;
+    _ptrRTCPData += 3; // Skip "Reserved" bytes.
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseAPP( const RTCPCommonHeader& header)
+{
+    ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    if (length < 12) // 4 * 3, RFC 3550 6.7 APP: Application-Defined RTCP Packet
+    {
+        EndCurrentBlock();
+        return false;
+    }
+
+    _ptrRTCPData += 4; // Skip RTCP header
+
+    WebRtc_UWord32 senderSSRC = *_ptrRTCPData++ << 24;
+    senderSSRC += *_ptrRTCPData++ << 16;
+    senderSSRC += *_ptrRTCPData++ << 8;
+    senderSSRC += *_ptrRTCPData++;
+
+    WebRtc_UWord32 name = *_ptrRTCPData++ << 24;
+    name += *_ptrRTCPData++ << 16;
+    name += *_ptrRTCPData++ << 8;
+    name += *_ptrRTCPData++;
+
+    length  = _ptrRTCPBlockEnd - _ptrRTCPData;
+
+    _packetType = kRtcpAppCode;
+
+    _packet.APP.SubType = header.IC;
+    _packet.APP.Name = name;
+
+    _state = State_AppItem;
+    return true;
+}
+
+bool
+RTCPUtility::RTCPParserV2::ParseAPPItem()
+{
+    const ptrdiff_t length = _ptrRTCPBlockEnd - _ptrRTCPData;
+    if (length < 4)
+    {
+        _state = State_TopLevel;
+
+        EndCurrentBlock();
+        return false;
+    }
+    _packetType = kRtcpAppItemCode;
+
+    if(length > kRtcpAppCode_DATA_SIZE)
+    {
+        memcpy(_packet.APP.Data, _ptrRTCPData, kRtcpAppCode_DATA_SIZE);
+        _packet.APP.Size = kRtcpAppCode_DATA_SIZE;
+        _ptrRTCPData += kRtcpAppCode_DATA_SIZE;
+    }else
+    {
+        memcpy(_packet.APP.Data, _ptrRTCPData, length);
+        _packet.APP.Size = (WebRtc_UWord16)length;
+        _ptrRTCPData += length;
+    }
+    return true;
+}
+
+RTCPUtility::RTCPPacketIterator::RTCPPacketIterator(WebRtc_UWord8* rtcpData,
+                                                    size_t rtcpDataLength)
+    : _ptrBegin(rtcpData),
+      _ptrEnd(rtcpData + rtcpDataLength),
+      _ptrBlock(NULL) {
+  memset(&_header, 0, sizeof(_header));
+}
+
+RTCPUtility::RTCPPacketIterator::~RTCPPacketIterator() {
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Begin()
+{
+    _ptrBlock = _ptrBegin;
+
+    return Iterate();
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Iterate()
+{
+    const bool success = RTCPParseCommonHeader(_ptrBlock, _ptrEnd, _header);
+    if (!success)
+    {
+        _ptrBlock = NULL;
+        return NULL;
+    }
+    _ptrBlock += _header.LengthInOctets;
+
+    if (_ptrBlock > _ptrEnd)
+    {
+        _ptrBlock = NULL;
+        return  NULL;
+    }
+
+    return &_header;
+}
+
+const RTCPUtility::RTCPCommonHeader*
+RTCPUtility::RTCPPacketIterator::Current()
+{
+    if (!_ptrBlock)
+    {
+        return NULL;
+    }
+
+    return &_header;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtcp_utility.h b/src/modules/rtp_rtcp/source/rtcp_utility.h
new file mode 100644
index 0000000..cce1f0b
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtcp_utility.h
@@ -0,0 +1,446 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
+
+#include <cstddef> // size_t, ptrdiff_t
+
+#include "typedefs.h"
+#include "rtp_rtcp_config.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+namespace RTCPUtility {
+    // CNAME
+    struct RTCPCnameInformation
+    {
+        char name[RTCP_CNAME_SIZE];
+    };
+    struct RTCPPacketRR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord8  NumberOfReportBlocks;
+    };
+    struct RTCPPacketSR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord8  NumberOfReportBlocks;
+
+        // sender info
+        WebRtc_UWord32 NTPMostSignificant;
+        WebRtc_UWord32 NTPLeastSignificant;
+        WebRtc_UWord32 RTPTimestamp;
+        WebRtc_UWord32 SenderPacketCount;
+        WebRtc_UWord32 SenderOctetCount;
+    };
+    struct RTCPPacketReportBlockItem
+    {
+        // report block
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord8  FractionLost;
+        WebRtc_UWord32 CumulativeNumOfPacketsLost;
+        WebRtc_UWord32 ExtendedHighestSequenceNumber;
+        WebRtc_UWord32 Jitter;
+        WebRtc_UWord32 LastSR;
+        WebRtc_UWord32 DelayLastSR;
+    };
+    struct RTCPPacketSDESCName
+    {
+        // RFC3550
+        WebRtc_UWord32 SenderSSRC;
+        char CName[RTCP_CNAME_SIZE];
+    };
+
+    struct RTCPPacketExtendedJitterReportItem
+    {
+        // RFC 5450
+        WebRtc_UWord32 Jitter;
+    };
+
+    struct RTCPPacketBYE
+    {
+        WebRtc_UWord32 SenderSSRC;
+    };
+    struct RTCPPacketXR
+    {
+        // RFC 3611
+        WebRtc_UWord32 OriginatorSSRC;
+    };
+    struct RTCPPacketXRVOIPMetricItem
+    {
+        // RFC 3611 4.7
+        WebRtc_UWord32    SSRC;
+        WebRtc_UWord8     lossRate;
+        WebRtc_UWord8     discardRate;
+        WebRtc_UWord8     burstDensity;
+        WebRtc_UWord8     gapDensity;
+        WebRtc_UWord16    burstDuration;
+        WebRtc_UWord16    gapDuration;
+        WebRtc_UWord16    roundTripDelay;
+        WebRtc_UWord16    endSystemDelay;
+        WebRtc_UWord8     signalLevel;
+        WebRtc_UWord8     noiseLevel;
+        WebRtc_UWord8     RERL;
+        WebRtc_UWord8     Gmin;
+        WebRtc_UWord8     Rfactor;
+        WebRtc_UWord8     extRfactor;
+        WebRtc_UWord8     MOSLQ;
+        WebRtc_UWord8     MOSCQ;
+        WebRtc_UWord8     RXconfig;
+        WebRtc_UWord16    JBnominal;
+        WebRtc_UWord16    JBmax;
+        WebRtc_UWord16    JBabsMax;
+    };
+
+    struct RTCPPacketRTPFBNACK
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+    struct RTCPPacketRTPFBNACKItem
+    {
+        // RFC4585
+        WebRtc_UWord16 PacketID;
+        WebRtc_UWord16 BitMask;
+    };
+
+    struct RTCPPacketRTPFBTMMBR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketRTPFBTMMBRItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord32 MaxTotalMediaBitRate; // In Kbit/s
+        WebRtc_UWord32 MeasuredOverhead;
+    };
+
+    struct RTCPPacketRTPFBTMMBN
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketRTPFBTMMBNItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC; // "Owner"
+        WebRtc_UWord32 MaxTotalMediaBitRate;
+        WebRtc_UWord32 MeasuredOverhead;
+    };
+
+    struct RTCPPacketPSFBFIR
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC; // zero!
+    };
+    struct RTCPPacketPSFBFIRItem
+    {
+        // RFC5104
+        WebRtc_UWord32 SSRC;
+        WebRtc_UWord8  CommandSequenceNumber;
+    };
+
+    struct RTCPPacketPSFBPLI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+
+    struct RTCPPacketPSFBSLI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+    struct RTCPPacketPSFBSLIItem
+    {
+        // RFC4585
+        WebRtc_UWord16 FirstMB;
+        WebRtc_UWord16 NumberOfMB;
+        WebRtc_UWord8 PictureId;
+    };
+    struct RTCPPacketPSFBRPSI
+    {
+        // RFC4585
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+        WebRtc_UWord8  PayloadType;
+        WebRtc_UWord16 NumberOfValidBits;
+        WebRtc_UWord8  NativeBitString[RTCP_RPSI_DATA_SIZE];
+    };
+    struct RTCPPacketPSFBAPP
+    {
+        WebRtc_UWord32 SenderSSRC;
+        WebRtc_UWord32 MediaSSRC;
+    };
+    struct RTCPPacketPSFBREMBItem
+    {
+        WebRtc_UWord32 BitRate;
+        WebRtc_UWord8 NumberOfSSRCs;
+        WebRtc_UWord32 SSRCs[MAX_NUMBER_OF_REMB_FEEDBACK_SSRCS];
+    };
+    // generic name APP
+    struct RTCPPacketAPP
+    {
+        WebRtc_UWord8     SubType;
+        WebRtc_UWord32    Name;
+        WebRtc_UWord8     Data[kRtcpAppCode_DATA_SIZE];
+        WebRtc_UWord16    Size;
+    };
+
+    union RTCPPacket
+    {
+        RTCPPacketRR              RR;
+        RTCPPacketSR              SR;
+        RTCPPacketReportBlockItem ReportBlockItem;
+
+        RTCPPacketSDESCName       CName;
+        RTCPPacketBYE             BYE;
+
+        RTCPPacketExtendedJitterReportItem ExtendedJitterReportItem;
+
+        RTCPPacketRTPFBNACK       NACK;
+        RTCPPacketRTPFBNACKItem   NACKItem;
+
+        RTCPPacketPSFBPLI         PLI;
+        RTCPPacketPSFBSLI         SLI;
+        RTCPPacketPSFBSLIItem     SLIItem;
+        RTCPPacketPSFBRPSI        RPSI;
+        RTCPPacketPSFBAPP         PSFBAPP;
+        RTCPPacketPSFBREMBItem    REMBItem;
+
+        RTCPPacketRTPFBTMMBR      TMMBR;
+        RTCPPacketRTPFBTMMBRItem  TMMBRItem;
+        RTCPPacketRTPFBTMMBN      TMMBN;
+        RTCPPacketRTPFBTMMBNItem  TMMBNItem;
+        RTCPPacketPSFBFIR         FIR;
+        RTCPPacketPSFBFIRItem     FIRItem;
+
+        RTCPPacketXR               XR;
+        RTCPPacketXRVOIPMetricItem XRVOIPMetricItem;
+
+        RTCPPacketAPP             APP;
+    };
+
+    enum RTCPPacketTypes
+    {
+        kRtcpNotValidCode,
+
+        // RFC3550
+        kRtcpRrCode,
+        kRtcpSrCode,
+        kRtcpReportBlockItemCode,
+
+        kRtcpSdesCode,
+        kRtcpSdesChunkCode,
+        kRtcpByeCode,
+
+        // RFC5450
+        kRtcpExtendedIjCode,
+        kRtcpExtendedIjItemCode,
+
+        // RFC4585
+        kRtcpRtpfbNackCode,
+        kRtcpRtpfbNackItemCode,
+
+        kRtcpPsfbPliCode,
+        kRtcpPsfbRpsiCode,
+        kRtcpPsfbSliCode,
+        kRtcpPsfbSliItemCode,
+        kRtcpPsfbAppCode,
+        kRtcpPsfbRembCode,
+        kRtcpPsfbRembItemCode,
+
+        // RFC5104
+        kRtcpRtpfbTmmbrCode,
+        kRtcpRtpfbTmmbrItemCode,
+        kRtcpRtpfbTmmbnCode,
+        kRtcpRtpfbTmmbnItemCode,
+        kRtcpPsfbFirCode,
+        kRtcpPsfbFirItemCode,
+
+        // draft-perkins-avt-rapid-rtp-sync
+        kRtcpRtpfbSrReqCode,
+
+        // RFC 3611
+        kRtcpXrVoipMetricCode,
+
+        kRtcpAppCode,
+        kRtcpAppItemCode,
+    };
+
+    struct RTCPRawPacket
+    {
+        const WebRtc_UWord8* _ptrPacketBegin;
+        const WebRtc_UWord8* _ptrPacketEnd;
+    };
+
+    struct RTCPModRawPacket
+    {
+        WebRtc_UWord8* _ptrPacketBegin;
+        WebRtc_UWord8* _ptrPacketEnd;
+    };
+
+    struct RTCPCommonHeader
+    {
+        WebRtc_UWord8  V;  // Version
+        bool           P;  // Padding
+        WebRtc_UWord8  IC; // Item count/subtype
+        WebRtc_UWord8  PT; // Packet Type
+        WebRtc_UWord16 LengthInOctets;
+    };
+
+    enum RTCPPT
+    {
+        PT_IJ    = 195,
+        PT_SR    = 200,
+        PT_RR    = 201,
+        PT_SDES  = 202,
+        PT_BYE   = 203,
+        PT_APP   = 204,
+        PT_RTPFB = 205,
+        PT_PSFB  = 206,
+        PT_XR    = 207
+    };
+
+    bool RTCPParseCommonHeader( const WebRtc_UWord8* ptrDataBegin,
+                                const WebRtc_UWord8* ptrDataEnd,
+                                RTCPCommonHeader& parsedHeader);
+
+    class RTCPParserV2
+    {
+    public:
+        RTCPParserV2(const WebRtc_UWord8* rtcpData,
+                     size_t rtcpDataLength,
+                     bool rtcpReducedSizeEnable); // Set to true, to allow non-compound RTCP!
+        ~RTCPParserV2();
+
+        RTCPPacketTypes PacketType() const;
+        const RTCPPacket& Packet() const;
+        const RTCPRawPacket& RawPacket() const;
+        ptrdiff_t LengthLeft() const;
+
+        bool IsValid() const;
+
+        RTCPPacketTypes Begin();
+        RTCPPacketTypes Iterate();
+
+    private:
+        enum ParseState
+        {
+            State_TopLevel,        // Top level packet
+            State_ReportBlockItem, // SR/RR report block
+            State_SDESChunk,       // SDES chunk
+            State_BYEItem,         // BYE item
+            State_ExtendedJitterItem, // Extended jitter report item
+            State_RTPFB_NACKItem,  // NACK FCI item
+            State_RTPFB_TMMBRItem, // TMMBR FCI item
+            State_RTPFB_TMMBNItem, // TMMBN FCI item
+            State_PSFB_SLIItem,    // SLI FCI item
+            State_PSFB_RPSIItem,   // RPSI FCI item
+            State_PSFB_FIRItem,    // FIR FCI item
+            State_PSFB_AppItem,    // Application specific FCI item
+            State_PSFB_REMBItem,   // Application specific REMB item
+            State_XRItem,
+            State_AppItem
+        };
+
+    private:
+        void IterateTopLevel();
+        void IterateReportBlockItem();
+        void IterateSDESChunk();
+        void IterateBYEItem();
+        void IterateExtendedJitterItem();
+        void IterateNACKItem();
+        void IterateTMMBRItem();
+        void IterateTMMBNItem();
+        void IterateSLIItem();
+        void IterateRPSIItem();
+        void IterateFIRItem();
+        void IteratePsfbAppItem();
+        void IteratePsfbREMBItem();
+        void IterateAppItem();
+
+        void Validate();
+        void EndCurrentBlock();
+
+        bool ParseRR();
+        bool ParseSR();
+        bool ParseReportBlockItem();
+
+        bool ParseSDES();
+        bool ParseSDESChunk();
+        bool ParseSDESItem();
+
+        bool ParseBYE();
+        bool ParseBYEItem();
+
+        bool ParseIJ();
+        bool ParseIJItem();
+
+        bool ParseXR();
+        bool ParseXRItem();
+        bool ParseXRVOIPMetricItem();
+
+        bool ParseFBCommon(const RTCPCommonHeader& header);
+        bool ParseNACKItem();
+        bool ParseTMMBRItem();
+        bool ParseTMMBNItem();
+        bool ParseSLIItem();
+        bool ParseRPSIItem();
+        bool ParseFIRItem();
+        bool ParsePsfbAppItem();
+        bool ParsePsfbREMBItem();
+
+        bool ParseAPP(const RTCPCommonHeader& header);
+        bool ParseAPPItem();
+
+    private:
+        const WebRtc_UWord8* const _ptrRTCPDataBegin;
+        const bool                 _RTCPReducedSizeEnable;
+        const WebRtc_UWord8* const _ptrRTCPDataEnd;
+
+        bool                     _validPacket;
+        const WebRtc_UWord8*     _ptrRTCPData;
+        const WebRtc_UWord8*     _ptrRTCPBlockEnd;
+
+        ParseState               _state;
+        WebRtc_UWord8            _numberOfBlocks;
+
+        RTCPPacketTypes          _packetType;
+        RTCPPacket               _packet;
+    };
+
+    class RTCPPacketIterator
+    {
+    public:
+        RTCPPacketIterator(WebRtc_UWord8* rtcpData,
+                            size_t rtcpDataLength);
+        ~RTCPPacketIterator();
+
+        const RTCPCommonHeader* Begin();
+        const RTCPCommonHeader* Iterate();
+        const RTCPCommonHeader* Current();
+
+    private:
+        WebRtc_UWord8* const     _ptrBegin;
+        WebRtc_UWord8* const     _ptrEnd;
+
+        WebRtc_UWord8*           _ptrBlock;
+
+        RTCPCommonHeader         _header;
+    };
+} // RTCPUtility
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTCP_UTILITY_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc b/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc
new file mode 100644
index 0000000..fef60a0
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_fec_unittest.cc
@@ -0,0 +1,874 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+
+#include <gtest/gtest.h>
+#include <list>
+
+#include "rtp_utility.h"
+
+using webrtc::ForwardErrorCorrection;
+
+// Minimum RTP header size in bytes.
+const uint8_t kRtpHeaderSize = 12;
+
+// Transport header size in bytes. Assume UDP/IPv4 as a reasonable minimum.
+const uint8_t kTransportOverhead = 28;
+
+// Maximum number of media packets used in the FEC (RFC 5109).
+const uint8_t kMaxNumberMediaPackets = ForwardErrorCorrection::kMaxMediaPackets;
+
+typedef std::list<ForwardErrorCorrection::Packet*> PacketList;
+typedef std::list<ForwardErrorCorrection::ReceivedPacket*> ReceivedPacketList;
+typedef std::list<ForwardErrorCorrection::RecoveredPacket*> RecoveredPacketList;
+
+template<typename T> void ClearList(std::list<T*>* my_list) {
+  T* packet = NULL;
+  while (!my_list->empty()) {
+    packet = my_list->front();
+    delete packet;
+    my_list->pop_front();
+  }
+}
+
+class RtpFecTest : public ::testing::Test {
+ protected:
+  RtpFecTest()
+      :  fec_(new ForwardErrorCorrection(0)),
+         ssrc_(rand()),
+         fec_seq_num_(0) {
+  }
+
+  ForwardErrorCorrection* fec_;
+  int ssrc_;
+  uint16_t fec_seq_num_;
+
+  PacketList media_packet_list_;
+  PacketList fec_packet_list_;
+  ReceivedPacketList received_packet_list_;
+  RecoveredPacketList recovered_packet_list_;
+
+  // Media packet "i" is lost if media_loss_mask_[i] = 1,
+  // received if media_loss_mask_[i] = 0.
+  int media_loss_mask_[kMaxNumberMediaPackets];
+
+  // FEC packet "i" is lost if fec_loss_mask_[i] = 1,
+  // received if fec_loss_mask_[i] = 0.
+  int fec_loss_mask_[kMaxNumberMediaPackets];
+
+  // Construct the media packet list, up to |num_media_packets| packets.
+  // Returns the next sequence number after the last media packet.
+  // (this will be the sequence of the first FEC packet)
+  int ConstructMediaPacketsSeqNum(int num_media_packets,
+                                  int start_seq_num);
+  int ConstructMediaPackets(int num_media_packets);
+
+  // Construct the received packet list: a subset of the media and FEC packets.
+  void NetworkReceivedPackets();
+
+  // Add packet from |packet_list| to list of received packets, using the
+  // |loss_mask|.
+  // The |packet_list| may be a media packet list (is_fec = false), or a
+  // FEC packet list (is_fec = true).
+  void ReceivedPackets(
+      const PacketList& packet_list,
+      int* loss_mask,
+      bool is_fec);
+
+  // Check for complete recovery after FEC decoding.
+  bool IsRecoveryComplete();
+
+  // Delete the received packets.
+  void FreeRecoveredPacketList();
+
+  // Delete the media and FEC packets.
+  void TearDown();
+};
+
+// TODO(marpan): Consider adding table for input/output to simplify tests.
+
+TEST_F(RtpFecTest, HandleIncorrectInputs) {
+  int kNumImportantPackets = 0;
+  bool kUseUnequalProtection =  false;
+  uint8_t kProtectionFactor = 60;
+
+  // Media packet list is empty.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  kProtectionFactor,
+                                  kNumImportantPackets,
+                                  kUseUnequalProtection,
+                                  webrtc::kFecMaskBursty,
+                                  &fec_packet_list_));
+
+  int num_media_packets = 10;
+  ConstructMediaPackets(num_media_packets);
+
+  kNumImportantPackets = -1;
+  // Number of important packets below 0.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  kProtectionFactor,
+                                  kNumImportantPackets,
+                                  kUseUnequalProtection,
+                                  webrtc::kFecMaskBursty,
+                                  &fec_packet_list_));
+
+  kNumImportantPackets = 12;
+  // Number of important packets greater than number of media packets.
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  kProtectionFactor,
+                                  kNumImportantPackets,
+                                  kUseUnequalProtection,
+                                  webrtc::kFecMaskBursty,
+                                  &fec_packet_list_));
+
+  num_media_packets = kMaxNumberMediaPackets + 1;
+  ConstructMediaPackets(num_media_packets);
+
+  kNumImportantPackets = 0;
+  // Number of media packet is above maximum allowed (kMaxNumberMediaPackets).
+  EXPECT_EQ(-1, fec_->GenerateFEC(media_packet_list_,
+                                  kProtectionFactor,
+                                  kNumImportantPackets,
+                                  kUseUnequalProtection,
+                                  webrtc::kFecMaskBursty,
+                                  &fec_packet_list_));
+}
+
+TEST_F(RtpFecTest, FecRecoveryNoLoss) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection =  false;
+  const int kNumMediaPackets = 4;
+  uint8_t kProtectionFactor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // No packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // No packets lost, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLoss) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection = false;
+  const int kNumMediaPackets = 4;
+  uint8_t kProtectionFactor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // 1 media packet lost
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 2 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 2 packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+// Test 50% protection with random mask type: Two cases are considered:
+// a 50% non-consecutive loss which can be fully recovered, and a 50%
+// consecutive loss which cannot be fully recovered.
+TEST_F(RtpFecTest, FecRecoveryWithLoss50percRandomMask) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection =  false;
+  const int kNumMediaPackets = 4;
+  const uint8_t kProtectionFactor = 255;
+
+  // Packet Mask for (4,4,0) code, from random mask table.
+  // (kNumMediaPackets = 4; num_fec_packets = 4, kNumImportantPackets = 0)
+
+  //         media#0   media#1  media#2    media#3
+  // fec#0:    1          1        0          0
+  // fec#1:    1          0        1          0
+  // fec#2:    0          0        1          1
+  // fec#3:    0          1        0          1
+  //
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskRandom,
+                                 &fec_packet_list_));
+
+  // Expect 4 FEC packets.
+  EXPECT_EQ(4, static_cast<int>(fec_packet_list_.size()));
+
+  // 4 packets lost: 3 media packets (0, 2, 3), and one FEC packet (0) lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[0] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // With media packet#1 and FEC packets #1, #2, #3, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 4 consecutive packets lost: media packets 0, 1, 2, 3.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Cannot get complete recovery for this loss configuration with random mask.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+// Test 50% protection with bursty type: Three cases are considered:
+// two 50% consecutive losses which can be fully recovered, and one
+// non-consecutive which cannot be fully recovered.
+TEST_F(RtpFecTest, FecRecoveryWithLoss50percBurstyMask) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection =  false;
+  const int kNumMediaPackets = 4;
+  const uint8_t kProtectionFactor = 255;
+
+  // Packet Mask for (4,4,0) code, from bursty mask table.
+  // (kNumMediaPackets = 4; num_fec_packets = 4, kNumImportantPackets = 0)
+
+  //         media#0   media#1  media#2    media#3
+  // fec#0:    1          0        0          0
+  // fec#1:    1          1        0          0
+  // fec#2:    0          1        1          0
+  // fec#3:    0          0        1          1
+  //
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 4 FEC packets.
+  EXPECT_EQ(4, static_cast<int>(fec_packet_list_.size()));
+
+  // 4 consecutive packets lost: media packets 0,1,2,3.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_,
+                               &recovered_packet_list_));
+
+  // Expect complete recovery for consecutive packet loss <= 50%.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 4 consecutive packets lost: media packets 1,2, 3, and FEC packet 0.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[0] = 1;
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Expect complete recovery for consecutive packet loss <= 50%.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 4 packets lost (non-consecutive loss): media packets 0, 3, and FEC# 0, 3.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[0] = 1;
+  fec_loss_mask_[3] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Cannot get complete recovery for this loss configuration.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryNoLossUep) {
+  const int kNumImportantPackets = 2;
+  const bool kUseUnequalProtection =  true;
+  const int kNumMediaPackets = 4;
+  const uint8_t kProtectionFactor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // No packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // No packets lost, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryWithLossUep) {
+  const int kNumImportantPackets = 2;
+  const bool kUseUnequalProtection =  true;
+  const int kNumMediaPackets = 4;
+  const uint8_t kProtectionFactor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // 1 media packet lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 2 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 2 packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+// Test 50% protection with random mask type for UEP on.
+TEST_F(RtpFecTest, FecRecoveryWithLoss50percUepRandomMask) {
+  const int kNumImportantPackets = 1;
+  const bool kUseUnequalProtection =  true;
+  const int kNumMediaPackets = 4;
+  const uint8_t kProtectionFactor = 255;
+
+  // Packet Mask for (4,4,1) code, from random mask table.
+  // (kNumMediaPackets = 4; num_fec_packets = 4, kNumImportantPackets = 1)
+
+  //         media#0   media#1  media#2    media#3
+  // fec#0:    1          0        0          0
+  // fec#1:    1          1        0          0
+  // fec#2:    1          0        1          1
+  // fec#3:    0          1        1          0
+  //
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  EXPECT_EQ(0, fec_->GenerateFEC(media_packet_list_,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskRandom,
+                                 &fec_packet_list_));
+
+  // Expect 4 FEC packets.
+  EXPECT_EQ(4, static_cast<int>(fec_packet_list_.size()));
+
+  // 4 packets lost: 3 media packets and FEC packet#1 lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[1] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // With media packet#3 and FEC packets #0, #1, #3, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 5 packets lost: 4 media packets and one FEC packet#2 lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  fec_loss_mask_[2] = 1;
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[1] = 1;
+  media_loss_mask_[2] = 1;
+  media_loss_mask_[3] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Cannot get complete recovery for this loss configuration.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryNonConsecutivePackets) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection = false;
+  const int kNumMediaPackets = 5;
+  uint8_t kProtectionFactor = 60;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  // Create a new temporary packet list for generating FEC packets.
+  // This list should have every other packet removed.
+  PacketList protected_media_packets;
+  int i = 0;
+  for (PacketList::iterator it = media_packet_list_.begin();
+      it != media_packet_list_.end(); ++it, ++i) {
+    if (i % 2 == 0)
+      protected_media_packets.push_back(*it);
+  }
+
+  EXPECT_EQ(0, fec_->GenerateFEC(protected_media_packets,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 1 FEC packet.
+  EXPECT_EQ(1, static_cast<int>(fec_packet_list_.size()));
+
+  // 1 protected media packet lost
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[2] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // Unprotected packet lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[1] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Unprotected packet lost. Recovery not possible.
+  EXPECT_FALSE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 2 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[0] = 1;
+  media_loss_mask_[2] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 2 protected packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryNonConsecutivePacketsExtension) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection = false;
+  const int kNumMediaPackets = 21;
+  uint8_t kProtectionFactor = 127;
+
+  fec_seq_num_ = ConstructMediaPackets(kNumMediaPackets);
+
+  // Create a new temporary packet list for generating FEC packets.
+  // This list should have every other packet removed.
+  PacketList protected_media_packets;
+  int i = 0;
+  for (PacketList::iterator it = media_packet_list_.begin();
+      it != media_packet_list_.end(); ++it, ++i) {
+    if (i % 2 == 0)
+      protected_media_packets.push_back(*it);
+  }
+
+  // Zero column insertion will have to extend the size of the packet
+  // mask since the number of actual packets are 21, while the number
+  // of protected packets are 11.
+  EXPECT_EQ(0, fec_->GenerateFEC(protected_media_packets,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 5 FEC packet.
+  EXPECT_EQ(5, static_cast<int>(fec_packet_list_.size()));
+
+  // Last protected media packet lost
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 1] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // Last unprotected packet lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 2] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Unprotected packet lost. Recovery not possible.
+  EXPECT_FALSE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 6 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 11] = 1;
+  media_loss_mask_[kNumMediaPackets - 9] = 1;
+  media_loss_mask_[kNumMediaPackets - 7] = 1;
+  media_loss_mask_[kNumMediaPackets - 5] = 1;
+  media_loss_mask_[kNumMediaPackets - 3] = 1;
+  media_loss_mask_[kNumMediaPackets - 1] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 5 protected packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+TEST_F(RtpFecTest, FecRecoveryNonConsecutivePacketsWrap) {
+  const int kNumImportantPackets = 0;
+  const bool kUseUnequalProtection = false;
+  const int kNumMediaPackets = 21;
+  uint8_t kProtectionFactor = 127;
+
+  fec_seq_num_ = ConstructMediaPacketsSeqNum(kNumMediaPackets, 0xFFFF - 5);
+
+  // Create a new temporary packet list for generating FEC packets.
+  // This list should have every other packet removed.
+  PacketList protected_media_packets;
+  int i = 0;
+  for (PacketList::iterator it = media_packet_list_.begin();
+      it != media_packet_list_.end(); ++it, ++i) {
+    if (i % 2 == 0)
+      protected_media_packets.push_back(*it);
+  }
+
+  // Zero column insertion will have to extend the size of the packet
+  // mask since the number of actual packets are 21, while the number
+  // of protected packets are 11.
+  EXPECT_EQ(0, fec_->GenerateFEC(protected_media_packets,
+                                 kProtectionFactor,
+                                 kNumImportantPackets,
+                                 kUseUnequalProtection,
+                                 webrtc::kFecMaskBursty,
+                                 &fec_packet_list_));
+
+  // Expect 5 FEC packet.
+  EXPECT_EQ(5, static_cast<int>(fec_packet_list_.size()));
+
+  // Last protected media packet lost
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 1] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // One packet lost, one FEC packet, expect complete recovery.
+  EXPECT_TRUE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // Last unprotected packet lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 2] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // Unprotected packet lost. Recovery not possible.
+  EXPECT_FALSE(IsRecoveryComplete());
+  FreeRecoveredPacketList();
+
+  // 6 media packets lost.
+  memset(media_loss_mask_, 0, sizeof(media_loss_mask_));
+  memset(fec_loss_mask_, 0, sizeof(fec_loss_mask_));
+  media_loss_mask_[kNumMediaPackets - 11] = 1;
+  media_loss_mask_[kNumMediaPackets - 9] = 1;
+  media_loss_mask_[kNumMediaPackets - 7] = 1;
+  media_loss_mask_[kNumMediaPackets - 5] = 1;
+  media_loss_mask_[kNumMediaPackets - 3] = 1;
+  media_loss_mask_[kNumMediaPackets - 1] = 1;
+  NetworkReceivedPackets();
+
+  EXPECT_EQ(0, fec_->DecodeFEC(&received_packet_list_ ,
+                               &recovered_packet_list_));
+
+  // 5 protected packets lost, one FEC packet, cannot get complete recovery.
+  EXPECT_FALSE(IsRecoveryComplete());
+}
+
+// TODO(marpan): Add more test cases.
+
+void RtpFecTest::TearDown() {
+  fec_->ResetState(&recovered_packet_list_);
+  delete fec_;
+  FreeRecoveredPacketList();
+  ClearList(&media_packet_list_);
+  EXPECT_TRUE(media_packet_list_.empty());
+}
+
+void RtpFecTest::FreeRecoveredPacketList() {
+  ClearList(&recovered_packet_list_);
+}
+
+bool RtpFecTest::IsRecoveryComplete() {
+  // Check that the number of media and recovered packets are equal.
+  if (media_packet_list_.size() != recovered_packet_list_.size()) {
+    return false;
+  }
+
+  ForwardErrorCorrection::Packet* media_packet;
+  ForwardErrorCorrection::RecoveredPacket* recovered_packet;
+
+  bool recovery = true;
+
+  PacketList::iterator
+    media_packet_list_item = media_packet_list_.begin();
+  RecoveredPacketList::iterator
+    recovered_packet_list_item = recovered_packet_list_.begin();
+  while (media_packet_list_item != media_packet_list_.end()) {
+    if (recovered_packet_list_item == recovered_packet_list_.end()) {
+      return false;
+    }
+    media_packet = *media_packet_list_item;
+    recovered_packet = *recovered_packet_list_item;
+    if (recovered_packet->pkt->length != media_packet->length) {
+      return false;
+    }
+    if (memcmp(recovered_packet->pkt->data, media_packet->data,
+               media_packet->length) != 0) {
+      return false;
+    }
+    media_packet_list_item++;
+    recovered_packet_list_item++;
+  }
+  return recovery;
+}
+
+void RtpFecTest::NetworkReceivedPackets() {
+  const bool kFecPacket = true;
+  ReceivedPackets(media_packet_list_, media_loss_mask_, !kFecPacket);
+  ReceivedPackets(fec_packet_list_, fec_loss_mask_, kFecPacket);
+}
+
+void RtpFecTest:: ReceivedPackets(
+    const PacketList& packet_list,
+    int* loss_mask,
+    bool is_fec) {
+  ForwardErrorCorrection::Packet* packet;
+  ForwardErrorCorrection::ReceivedPacket* received_packet;
+  int seq_num = fec_seq_num_;
+  int packet_idx = 0;
+
+  PacketList::const_iterator
+  packet_list_item = packet_list.begin();
+
+  while (packet_list_item != packet_list.end()) {
+    packet = *packet_list_item;
+    if (loss_mask[packet_idx] == 0) {
+      received_packet = new ForwardErrorCorrection::ReceivedPacket;
+      received_packet->pkt = new ForwardErrorCorrection::Packet;
+      received_packet_list_.push_back(received_packet);
+      received_packet->pkt->length = packet->length;
+      memcpy(received_packet->pkt->data, packet->data,
+             packet->length);
+      received_packet->isFec = is_fec;
+      if (!is_fec) {
+        // For media packets, the sequence number and marker bit is
+        // obtained from RTP header. These were set in ConstructMediaPackets().
+        received_packet->seqNum =
+            webrtc::ModuleRTPUtility::BufferToUWord16(&packet->data[2]);
+      }
+      else {
+        // The sequence number, marker bit, and ssrc number are defined in the
+        // RTP header of the FEC packet, which is not constructed in this test.
+        // So we set these values below based on the values generated in
+        // ConstructMediaPackets().
+        received_packet->seqNum = seq_num;
+        // The ssrc value for FEC packets is set to the one used for the
+        // media packets in ConstructMediaPackets().
+        received_packet->ssrc = ssrc_;
+      }
+    }
+    packet_idx++;
+    packet_list_item ++;
+    // Sequence number of FEC packets are defined as increment by 1 from
+    // last media packet in frame.
+    if (is_fec) seq_num++;
+  }
+}
+
+int RtpFecTest::ConstructMediaPacketsSeqNum(int num_media_packets,
+                                            int start_seq_num) {
+  assert(num_media_packets > 0);
+  ForwardErrorCorrection::Packet* media_packet = NULL;
+  int sequence_number = start_seq_num;
+  int time_stamp = rand();
+
+  for (int i = 0; i < num_media_packets; i++) {
+    media_packet = new ForwardErrorCorrection::Packet;
+    media_packet_list_.push_back(media_packet);
+    media_packet->length =
+        static_cast<uint16_t>((static_cast<float>(rand()) / RAND_MAX) *
+        (IP_PACKET_SIZE - kRtpHeaderSize - kTransportOverhead -
+            ForwardErrorCorrection::PacketOverhead()));
+
+    if (media_packet->length < kRtpHeaderSize) {
+      media_packet->length = kRtpHeaderSize;
+    }
+    // Generate random values for the first 2 bytes
+    media_packet->data[0] = static_cast<uint8_t>(rand() % 256);
+    media_packet->data[1] = static_cast<uint8_t>(rand() % 256);
+
+    // The first two bits are assumed to be 10 by the FEC encoder.
+    // In fact the FEC decoder will set the two first bits to 10 regardless of
+    // what they actually were. Set the first two bits to 10 so that a memcmp
+    // can be performed for the whole restored packet.
+    media_packet->data[0] |= 0x80;
+    media_packet->data[0] &= 0xbf;
+
+    // FEC is applied to a whole frame.
+    // A frame is signaled by multiple packets without the marker bit set
+    // followed by the last packet of the frame for which the marker bit is set.
+    // Only push one (fake) frame to the FEC.
+    media_packet->data[1] &= 0x7f;
+
+    webrtc::ModuleRTPUtility::AssignUWord16ToBuffer(&media_packet->data[2],
+                                                    sequence_number);
+    webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[4],
+                                                    time_stamp);
+    webrtc::ModuleRTPUtility::AssignUWord32ToBuffer(&media_packet->data[8],
+                                                    ssrc_);
+
+    // Generate random values for payload.
+    for (int j = 12; j < media_packet->length; j++) {
+      media_packet->data[j] = static_cast<uint8_t> (rand() % 256);
+    }
+    sequence_number++;
+  }
+  // Last packet, set marker bit.
+  assert(media_packet != NULL);
+  media_packet->data[1] |= 0x80;
+  return sequence_number;
+}
+
+int RtpFecTest::ConstructMediaPackets(int num_media_packets) {
+  return ConstructMediaPacketsSeqNum(num_media_packets, rand());
+}
diff --git a/src/modules/rtp_rtcp/source/rtp_format_vp8.cc b/src/modules/rtp_rtcp/source/rtp_format_vp8.cc
new file mode 100644
index 0000000..f066ae2
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_format_vp8.cc
@@ -0,0 +1,469 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+
+#include <string.h>  // memcpy
+
+#include <cassert>   // assert
+#include <vector>
+
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+namespace webrtc {
+
+// Define how the VP8PacketizerModes are implemented.
+// Modes are: kStrict, kAggregate, kEqualSize.
+const RtpFormatVp8::AggregationMode RtpFormatVp8::aggr_modes_[kNumModes] =
+    { kAggrNone, kAggrPartitions, kAggrFragments };
+const bool RtpFormatVp8::balance_modes_[kNumModes] =
+    { true, true, true };
+const bool RtpFormatVp8::separate_first_modes_[kNumModes] =
+    { true, false, false };
+
+RtpFormatVp8::RtpFormatVp8(const WebRtc_UWord8* payload_data,
+                           WebRtc_UWord32 payload_size,
+                           const RTPVideoHeaderVP8& hdr_info,
+                           int max_payload_len,
+                           const RTPFragmentationHeader& fragmentation,
+                           VP8PacketizerMode mode)
+    : payload_data_(payload_data),
+      payload_size_(static_cast<int>(payload_size)),
+      vp8_fixed_payload_descriptor_bytes_(1),
+      aggr_mode_(aggr_modes_[mode]),
+      balance_(balance_modes_[mode]),
+      separate_first_(separate_first_modes_[mode]),
+      hdr_info_(hdr_info),
+      num_partitions_(fragmentation.fragmentationVectorSize),
+      max_payload_len_(max_payload_len),
+      packets_calculated_(false) {
+  part_info_ = fragmentation;
+}
+
+RtpFormatVp8::RtpFormatVp8(const WebRtc_UWord8* payload_data,
+                           WebRtc_UWord32 payload_size,
+                           const RTPVideoHeaderVP8& hdr_info,
+                           int max_payload_len)
+    : payload_data_(payload_data),
+      payload_size_(static_cast<int>(payload_size)),
+      part_info_(),
+      vp8_fixed_payload_descriptor_bytes_(1),
+      aggr_mode_(aggr_modes_[kEqualSize]),
+      balance_(balance_modes_[kEqualSize]),
+      separate_first_(separate_first_modes_[kEqualSize]),
+      hdr_info_(hdr_info),
+      num_partitions_(1),
+      max_payload_len_(max_payload_len),
+      packets_calculated_(false) {
+    part_info_.VerifyAndAllocateFragmentationHeader(1);
+    part_info_.fragmentationLength[0] = payload_size;
+    part_info_.fragmentationOffset[0] = 0;
+}
+
+int RtpFormatVp8::NextPacket(WebRtc_UWord8* buffer,
+                             int* bytes_to_send,
+                             bool* last_packet) {
+  if (!packets_calculated_) {
+    int ret = 0;
+    if (aggr_mode_ == kAggrPartitions && balance_) {
+      ret = GeneratePacketsBalancedAggregates();
+    } else {
+      ret = GeneratePackets();
+    }
+    if (ret < 0) {
+      return ret;
+    }
+  }
+  if (packets_.empty()) {
+    return -1;
+  }
+  InfoStruct packet_info = packets_.front();
+  packets_.pop();
+
+  *bytes_to_send = WriteHeaderAndPayload(packet_info, buffer, max_payload_len_);
+  if (*bytes_to_send < 0) {
+    return -1;
+  }
+
+  *last_packet = packets_.empty();
+  return packet_info.first_partition_ix;
+}
+
+int RtpFormatVp8::CalcNextSize(int max_payload_len, int remaining_bytes,
+                               bool split_payload) const {
+  if (max_payload_len == 0 || remaining_bytes == 0) {
+    return 0;
+  }
+  if (!split_payload) {
+    return max_payload_len >= remaining_bytes ? remaining_bytes : 0;
+  }
+
+  if (balance_) {
+    // Balance payload sizes to produce (almost) equal size
+    // fragments.
+    // Number of fragments for remaining_bytes:
+    int num_frags = remaining_bytes / max_payload_len + 1;
+    // Number of bytes in this fragment:
+    return static_cast<int>(static_cast<double>(remaining_bytes)
+                            / num_frags + 0.5);
+  } else {
+    return max_payload_len >= remaining_bytes ? remaining_bytes
+        : max_payload_len;
+  }
+}
+
+int RtpFormatVp8::GeneratePackets() {
+  if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+      + PayloadDescriptorExtraLength() + 1) {
+    // The provided payload length is not long enough for the payload
+    // descriptor and one payload byte. Return an error.
+    return -1;
+  }
+  int total_bytes_processed = 0;
+  bool start_on_new_fragment = true;
+  bool beginning = true;
+  int part_ix = 0;
+  while (total_bytes_processed < payload_size_) {
+    int packet_bytes = 0;  // How much data to send in this packet.
+    bool split_payload = true;  // Splitting of partitions is initially allowed.
+    int remaining_in_partition = part_info_.fragmentationOffset[part_ix] -
+        total_bytes_processed + part_info_.fragmentationLength[part_ix];
+    int rem_payload_len = max_payload_len_ -
+        (vp8_fixed_payload_descriptor_bytes_ + PayloadDescriptorExtraLength());
+    int first_partition_in_packet = part_ix;
+
+    while (int next_size = CalcNextSize(rem_payload_len, remaining_in_partition,
+                                        split_payload)) {
+      packet_bytes += next_size;
+      rem_payload_len -= next_size;
+      remaining_in_partition -= next_size;
+
+      if (remaining_in_partition == 0 && !(beginning && separate_first_)) {
+        // Advance to next partition?
+        // Check that there are more partitions; verify that we are either
+        // allowed to aggregate fragments, or that we are allowed to
+        // aggregate intact partitions and that we started this packet
+        // with an intact partition (indicated by first_fragment_ == true).
+        if (part_ix + 1 < num_partitions_ &&
+            ((aggr_mode_ == kAggrFragments) ||
+                (aggr_mode_ == kAggrPartitions && start_on_new_fragment))) {
+          assert(part_ix < num_partitions_);
+          remaining_in_partition = part_info_.fragmentationLength[++part_ix];
+          // Disallow splitting unless kAggrFragments. In kAggrPartitions,
+          // we can only aggregate intact partitions.
+          split_payload = (aggr_mode_ == kAggrFragments);
+        }
+      } else if (balance_ && remaining_in_partition > 0) {
+        break;
+      }
+    }
+    if (remaining_in_partition == 0) {
+      ++part_ix;  // Advance to next partition.
+    }
+    assert(packet_bytes > 0);
+
+    QueuePacket(total_bytes_processed, packet_bytes, first_partition_in_packet,
+                start_on_new_fragment);
+    total_bytes_processed += packet_bytes;
+    start_on_new_fragment = (remaining_in_partition == 0);
+    beginning = false;  // Next packet cannot be first packet in frame.
+  }
+  packets_calculated_ = true;
+  assert(total_bytes_processed == payload_size_);
+  return 0;
+}
+
+int RtpFormatVp8::GeneratePacketsBalancedAggregates() {
+  if (max_payload_len_ < vp8_fixed_payload_descriptor_bytes_
+      + PayloadDescriptorExtraLength() + 1) {
+    // The provided payload length is not long enough for the payload
+    // descriptor and one payload byte. Return an error.
+    return -1;
+  }
+  std::vector<int> partition_decision;
+  const int overhead = vp8_fixed_payload_descriptor_bytes_ +
+      PayloadDescriptorExtraLength();
+  const uint32_t max_payload_len = max_payload_len_ - overhead;
+  int min_size, max_size;
+  AggregateSmallPartitions(&partition_decision, &min_size, &max_size);
+
+  int total_bytes_processed = 0;
+  int part_ix = 0;
+  while (part_ix < num_partitions_) {
+    if (partition_decision[part_ix] == -1) {
+      // Split large partitions.
+      int remaining_partition = part_info_.fragmentationLength[part_ix];
+      int num_fragments = Vp8PartitionAggregator::CalcNumberOfFragments(
+          remaining_partition, max_payload_len, overhead, min_size, max_size);
+      const int packet_bytes =
+          (remaining_partition + num_fragments - 1) / num_fragments;
+      for (int n = 0; n < num_fragments; ++n) {
+        const int this_packet_bytes = packet_bytes < remaining_partition ?
+            packet_bytes : remaining_partition;
+        QueuePacket(total_bytes_processed, this_packet_bytes, part_ix,
+                    (n == 0));
+        remaining_partition -= this_packet_bytes;
+        total_bytes_processed += this_packet_bytes;
+        if (this_packet_bytes < min_size) {
+          min_size = this_packet_bytes;
+        }
+        if (this_packet_bytes > max_size) {
+          max_size = this_packet_bytes;
+        }
+      }
+      assert(remaining_partition == 0);
+      ++part_ix;
+    } else {
+      int this_packet_bytes = 0;
+      const int first_partition_in_packet = part_ix;
+      const int aggregation_index = partition_decision[part_ix];
+      while (static_cast<size_t>(part_ix) < partition_decision.size() &&
+          partition_decision[part_ix] == aggregation_index) {
+        // Collect all partitions that were aggregated into the same packet.
+        this_packet_bytes += part_info_.fragmentationLength[part_ix];
+        ++part_ix;
+      }
+      QueuePacket(total_bytes_processed, this_packet_bytes,
+                  first_partition_in_packet, true);
+      total_bytes_processed += this_packet_bytes;
+    }
+  }
+  packets_calculated_ = true;
+  return 0;
+}
+
+void RtpFormatVp8::AggregateSmallPartitions(std::vector<int>* partition_vec,
+                                            int* min_size,
+                                            int* max_size) {
+  assert(min_size && max_size);
+  *min_size = -1;
+  *max_size = -1;
+  assert(partition_vec);
+  partition_vec->assign(num_partitions_, -1);
+  const int overhead = vp8_fixed_payload_descriptor_bytes_ +
+      PayloadDescriptorExtraLength();
+  const uint32_t max_payload_len = max_payload_len_ - overhead;
+  int first_in_set = 0;
+  int last_in_set = 0;
+  int num_aggregate_packets = 0;
+  // Find sets of partitions smaller than max_payload_len_.
+  while (first_in_set < num_partitions_) {
+    if (part_info_.fragmentationLength[first_in_set] < max_payload_len) {
+      // Found start of a set.
+      last_in_set = first_in_set;
+      while (last_in_set + 1 < num_partitions_ &&
+          part_info_.fragmentationLength[last_in_set + 1] < max_payload_len) {
+        ++last_in_set;
+      }
+      // Found end of a set. Run optimized aggregator. It is ok if start == end.
+      Vp8PartitionAggregator aggregator(part_info_, first_in_set,
+                                        last_in_set);
+      if (*min_size >= 0 && *max_size >= 0) {
+        aggregator.SetPriorMinMax(*min_size, *max_size);
+      }
+      Vp8PartitionAggregator::ConfigVec optimal_config =
+          aggregator.FindOptimalConfiguration(max_payload_len, overhead);
+      aggregator.CalcMinMax(optimal_config, min_size, max_size);
+      for (int i = first_in_set, j = 0; i <= last_in_set; ++i, ++j) {
+        // Transfer configuration for this set of partitions to the joint
+        // partition vector representing all partitions in the frame.
+        (*partition_vec)[i] = num_aggregate_packets + optimal_config[j];
+      }
+      num_aggregate_packets += optimal_config.back() + 1;
+      first_in_set = last_in_set;
+    }
+    ++first_in_set;
+  }
+}
+
+void RtpFormatVp8::QueuePacket(int start_pos,
+                               int packet_size,
+                               int first_partition_in_packet,
+                               bool start_on_new_fragment) {
+  // Write info to packet info struct and store in packet info queue.
+  InfoStruct packet_info;
+  packet_info.payload_start_pos = start_pos;
+  packet_info.size = packet_size;
+  packet_info.first_partition_ix = first_partition_in_packet;
+  packet_info.first_fragment = start_on_new_fragment;
+  packets_.push(packet_info);
+}
+
+int RtpFormatVp8::WriteHeaderAndPayload(const InfoStruct& packet_info,
+                                        WebRtc_UWord8* buffer,
+                                        int buffer_length) const {
+  // Write the VP8 payload descriptor.
+  //       0
+  //       0 1 2 3 4 5 6 7 8
+  //      +-+-+-+-+-+-+-+-+-+
+  //      |X| |N|S| PART_ID |
+  //      +-+-+-+-+-+-+-+-+-+
+  // X:   |I|L|T|K|         | (mandatory if any of the below are used)
+  //      +-+-+-+-+-+-+-+-+-+
+  // I:   |PictureID (8/16b)| (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+  // L:   |   TL0PIC_IDX    | (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+  // T/K: |TID:Y|  KEYIDX   | (optional)
+  //      +-+-+-+-+-+-+-+-+-+
+
+  assert(packet_info.size > 0);
+  buffer[0] = 0;
+  if (XFieldPresent())            buffer[0] |= kXBit;
+  if (hdr_info_.nonReference)     buffer[0] |= kNBit;
+  if (packet_info.first_fragment) buffer[0] |= kSBit;
+  buffer[0] |= (packet_info.first_partition_ix & kPartIdField);
+
+  const int extension_length = WriteExtensionFields(buffer, buffer_length);
+
+  memcpy(&buffer[vp8_fixed_payload_descriptor_bytes_ + extension_length],
+         &payload_data_[packet_info.payload_start_pos], packet_info.size);
+
+  // Return total length of written data.
+  return packet_info.size + vp8_fixed_payload_descriptor_bytes_
+      + extension_length;
+}
+
+int RtpFormatVp8::WriteExtensionFields(WebRtc_UWord8* buffer,
+                                       int buffer_length) const {
+  int extension_length = 0;
+  if (XFieldPresent()) {
+    WebRtc_UWord8* x_field = buffer + vp8_fixed_payload_descriptor_bytes_;
+    *x_field = 0;
+    extension_length = 1;  // One octet for the X field.
+    if (PictureIdPresent()) {
+      if (WritePictureIDFields(x_field, buffer, buffer_length,
+                               &extension_length) < 0) {
+        return -1;
+      }
+    }
+    if (TL0PicIdxFieldPresent()) {
+      if (WriteTl0PicIdxFields(x_field, buffer, buffer_length,
+                               &extension_length) < 0) {
+        return -1;
+      }
+    }
+    if (TIDFieldPresent() || KeyIdxFieldPresent()) {
+      if (WriteTIDAndKeyIdxFields(x_field, buffer, buffer_length,
+                                  &extension_length) < 0) {
+        return -1;
+      }
+    }
+    assert(extension_length == PayloadDescriptorExtraLength());
+  }
+  return extension_length;
+}
+
+int RtpFormatVp8::WritePictureIDFields(WebRtc_UWord8* x_field,
+                                       WebRtc_UWord8* buffer,
+                                       int buffer_length,
+                                       int* extension_length) const {
+  *x_field |= kIBit;
+  const int pic_id_length = WritePictureID(
+      buffer + vp8_fixed_payload_descriptor_bytes_ + *extension_length,
+      buffer_length - vp8_fixed_payload_descriptor_bytes_
+      - *extension_length);
+  if (pic_id_length < 0) return -1;
+  *extension_length += pic_id_length;
+  return 0;
+}
+
+int RtpFormatVp8::WritePictureID(WebRtc_UWord8* buffer,
+                                 int buffer_length) const {
+  const WebRtc_UWord16 pic_id =
+      static_cast<WebRtc_UWord16> (hdr_info_.pictureId);
+  int picture_id_len = PictureIdLength();
+  if (picture_id_len > buffer_length) return -1;
+  if (picture_id_len == 2) {
+    buffer[0] = 0x80 | ((pic_id >> 8) & 0x7F);
+    buffer[1] = pic_id & 0xFF;
+  } else if (picture_id_len == 1) {
+    buffer[0] = pic_id & 0x7F;
+  }
+  return picture_id_len;
+}
+
+int RtpFormatVp8::WriteTl0PicIdxFields(WebRtc_UWord8* x_field,
+                                       WebRtc_UWord8* buffer,
+                                       int buffer_length,
+                                       int* extension_length) const {
+  if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+      + 1) {
+    return -1;
+  }
+  *x_field |= kLBit;
+  buffer[vp8_fixed_payload_descriptor_bytes_
+         + *extension_length] = hdr_info_.tl0PicIdx;
+  ++*extension_length;
+  return 0;
+}
+
+int RtpFormatVp8::WriteTIDAndKeyIdxFields(WebRtc_UWord8* x_field,
+                                          WebRtc_UWord8* buffer,
+                                          int buffer_length,
+                                          int* extension_length) const {
+  if (buffer_length < vp8_fixed_payload_descriptor_bytes_ + *extension_length
+      + 1) {
+    return -1;
+  }
+  WebRtc_UWord8* data_field =
+      &buffer[vp8_fixed_payload_descriptor_bytes_ + *extension_length];
+  *data_field = 0;
+  if (TIDFieldPresent()) {
+    *x_field |= kTBit;
+    assert(hdr_info_.temporalIdx >= 0 && hdr_info_.temporalIdx <= 3);
+    *data_field |= hdr_info_.temporalIdx << 6;
+    *data_field |= hdr_info_.layerSync ? kYBit : 0;
+  }
+  if (KeyIdxFieldPresent()) {
+    *x_field |= kKBit;
+    *data_field |= (hdr_info_.keyIdx & kKeyIdxField);
+  }
+  ++*extension_length;
+  return 0;
+}
+
+int RtpFormatVp8::PayloadDescriptorExtraLength() const {
+  int length_bytes = PictureIdLength();
+  if (TL0PicIdxFieldPresent()) ++length_bytes;
+  if (TIDFieldPresent() || KeyIdxFieldPresent()) ++length_bytes;
+  if (length_bytes > 0) ++length_bytes;  // Include the extension field.
+  return length_bytes;
+}
+
+int RtpFormatVp8::PictureIdLength() const {
+  if (hdr_info_.pictureId == kNoPictureId) {
+    return 0;
+  }
+  if (hdr_info_.pictureId <= 0x7F) {
+    return 1;
+  }
+  return 2;
+}
+
+bool RtpFormatVp8::XFieldPresent() const {
+  return (TIDFieldPresent() || TL0PicIdxFieldPresent() || PictureIdPresent()
+      || KeyIdxFieldPresent());
+}
+
+bool RtpFormatVp8::TIDFieldPresent() const {
+  assert((hdr_info_.layerSync == false) ||
+         (hdr_info_.temporalIdx != kNoTemporalIdx));
+  return (hdr_info_.temporalIdx != kNoTemporalIdx);
+}
+
+bool RtpFormatVp8::KeyIdxFieldPresent() const {
+  return (hdr_info_.keyIdx != kNoKeyIdx);
+}
+
+bool RtpFormatVp8::TL0PicIdxFieldPresent() const {
+  return (hdr_info_.tl0PicIdx != kNoTl0PicIdx);
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_format_vp8.h b/src/modules/rtp_rtcp/source/rtp_format_vp8.h
new file mode 100644
index 0000000..f568f4d
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_format_vp8.h
@@ -0,0 +1,206 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the declaration of the VP8 packetizer class.
+ * A packetizer object is created for each encoded video frame. The
+ * constructor is called with the payload data and size,
+ * together with the fragmentation information and a packetizer mode
+ * of choice. Alternatively, if no fragmentation info is available, the
+ * second constructor can be used with only payload data and size; in that
+ * case the mode kEqualSize is used.
+ *
+ * After creating the packetizer, the method NextPacket is called
+ * repeatedly to get all packets for the frame. The method returns
+ * false as long as there are more packets left to fetch.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
+
+#include <queue>
+#include <vector>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+enum VP8PacketizerMode {
+  kStrict = 0,  // Split partitions if too large;
+                // never aggregate, balance size.
+  kAggregate,   // Split partitions if too large; aggregate whole partitions.
+  kEqualSize,   // Split entire payload without considering partition limits.
+                // This will produce equal size packets for the whole frame.
+  kNumModes,
+};
+
+// Packetizer for VP8.
+class RtpFormatVp8 {
+ public:
+  // Initialize with payload from encoder and fragmentation info.
+  // The payload_data must be exactly one encoded VP8 frame.
+  RtpFormatVp8(const WebRtc_UWord8* payload_data,
+               WebRtc_UWord32 payload_size,
+               const RTPVideoHeaderVP8& hdr_info,
+               int max_payload_len,
+               const RTPFragmentationHeader& fragmentation,
+               VP8PacketizerMode mode);
+
+  // Initialize without fragmentation info. Mode kEqualSize will be used.
+  // The payload_data must be exactly one encoded VP8 frame.
+  RtpFormatVp8(const WebRtc_UWord8* payload_data,
+               WebRtc_UWord32 payload_size,
+               const RTPVideoHeaderVP8& hdr_info,
+               int max_payload_len);
+
+  // Get the next payload with VP8 payload header.
+  // max_payload_len limits the sum length of payload and VP8 payload header.
+  // buffer is a pointer to where the output will be written.
+  // bytes_to_send is an output variable that will contain number of bytes
+  // written to buffer. Parameter last_packet is true for the last packet of
+  // the frame, false otherwise (i.e., call the function again to get the
+  // next packet).
+  // For the kStrict and kAggregate mode: returns the partition index from which
+  // the first payload byte in the packet is taken, with the first partition
+  // having index 0; returns negative on error.
+  // For the kEqualSize mode: returns 0 on success, return negative on error.
+  int NextPacket(WebRtc_UWord8* buffer,
+                 int* bytes_to_send,
+                 bool* last_packet);
+
+ private:
+  typedef struct {
+    int payload_start_pos;
+    int size;
+    bool first_fragment;
+    int first_partition_ix;
+  } InfoStruct;
+  typedef std::queue<InfoStruct> InfoQueue;
+  enum AggregationMode {
+    kAggrNone = 0,    // No aggregation.
+    kAggrPartitions,  // Aggregate intact partitions.
+    kAggrFragments    // Aggregate intact and fragmented partitions.
+  };
+
+  static const AggregationMode aggr_modes_[kNumModes];
+  static const bool balance_modes_[kNumModes];
+  static const bool separate_first_modes_[kNumModes];
+  static const int kXBit        = 0x80;
+  static const int kNBit        = 0x20;
+  static const int kSBit        = 0x10;
+  static const int kPartIdField = 0x0F;
+  static const int kKeyIdxField = 0x1F;
+  static const int kIBit        = 0x80;
+  static const int kLBit        = 0x40;
+  static const int kTBit        = 0x20;
+  static const int kKBit        = 0x10;
+  static const int kYBit        = 0x20;
+
+  // Calculate size of next chunk to send. Returns 0 if none can be sent.
+  int CalcNextSize(int max_payload_len, int remaining_bytes,
+                   bool split_payload) const;
+
+  // Calculate all packet sizes and load to packet info queue.
+  int GeneratePackets();
+
+  // Calculate all packet sizes using Vp8PartitionAggregator and load to packet
+  // info queue.
+  int GeneratePacketsBalancedAggregates();
+
+  // Helper function to GeneratePacketsBalancedAggregates(). Find all
+  // continuous sets of partitions smaller than the max payload size (not
+  // max_size), and aggregate them into balanced packets. The result is written
+  // to partition_vec, which is of the same length as the number of partitions.
+  // A value of -1 indicates that the partition is too large and must be split.
+  // Aggregates are numbered 0, 1, 2, etc. For each set of small partitions,
+  // the aggregate numbers restart at 0. Output values min_size and max_size
+  // will hold the smallest and largest resulting aggregates (i.e., not counting
+  // those that must be split).
+  void AggregateSmallPartitions(std::vector<int>* partition_vec,
+                                int* min_size,
+                                int* max_size);
+
+  // Insert packet into packet queue.
+  void QueuePacket(int start_pos,
+                   int packet_size,
+                   int first_partition_in_packet,
+                   bool start_on_new_fragment);
+
+  // Write the payload header and copy the payload to the buffer.
+  // The info in packet_info determines which part of the payload is written
+  // and what to write in the header fields.
+  int WriteHeaderAndPayload(const InfoStruct& packet_info,
+                            WebRtc_UWord8* buffer,
+                            int buffer_length) const;
+
+
+  // Write the X field and the appropriate extension fields to buffer.
+  // The function returns the extension length (including X field), or -1
+  // on error.
+  int WriteExtensionFields(WebRtc_UWord8* buffer, int buffer_length) const;
+
+  // Set the I bit in the x_field, and write PictureID to the appropriate
+  // position in buffer. The function returns 0 on success, -1 otherwise.
+  int WritePictureIDFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                           int buffer_length, int* extension_length) const;
+
+  // Set the L bit in the x_field, and write Tl0PicIdx to the appropriate
+  // position in buffer. The function returns 0 on success, -1 otherwise.
+  int WriteTl0PicIdxFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                           int buffer_length, int* extension_length) const;
+
+  // Set the T and K bits in the x_field, and write TID, Y and KeyIdx to the
+  // appropriate position in buffer. The function returns 0 on success,
+  // -1 otherwise.
+  int WriteTIDAndKeyIdxFields(WebRtc_UWord8* x_field, WebRtc_UWord8* buffer,
+                              int buffer_length, int* extension_length) const;
+
+  // Write the PictureID from codec_specific_info_ to buffer. One or two
+  // bytes are written, depending on magnitude of PictureID. The function
+  // returns the number of bytes written.
+  int WritePictureID(WebRtc_UWord8* buffer, int buffer_length) const;
+
+  // Calculate and return length (octets) of the variable header fields in
+  // the next header (i.e., header length in addition to vp8_header_bytes_).
+  int PayloadDescriptorExtraLength() const;
+
+  // Calculate and return length (octets) of PictureID field in the next
+  // header. Can be 0, 1, or 2.
+  int PictureIdLength() const;
+
+  // Check whether each of the optional fields will be included in the header.
+  bool XFieldPresent() const;
+  bool TIDFieldPresent() const;
+  bool KeyIdxFieldPresent() const;
+  bool TL0PicIdxFieldPresent() const;
+  bool PictureIdPresent() const { return (PictureIdLength() > 0); }
+
+  const WebRtc_UWord8* payload_data_;
+  const int payload_size_;
+  RTPFragmentationHeader part_info_;
+  const int vp8_fixed_payload_descriptor_bytes_;  // Length of VP8 payload
+                                                  // descriptors's fixed part.
+  const AggregationMode aggr_mode_;
+  const bool balance_;
+  const bool separate_first_;
+  const RTPVideoHeaderVP8 hdr_info_;
+  const int num_partitions_;
+  const int max_payload_len_;
+  InfoQueue packets_;
+  bool packets_calculated_;
+
+  DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8);
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc b/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc
new file mode 100644
index 0000000..bf858c5
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.cc
@@ -0,0 +1,249 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h"
+
+#include "gtest/gtest.h"
+
+namespace webrtc {
+
+namespace test {
+
+RtpFormatVp8TestHelper::RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr)
+    : payload_data_(NULL),
+      buffer_(NULL),
+      data_ptr_(NULL),
+      fragmentation_(NULL),
+      hdr_info_(hdr),
+      sloppy_partitioning_(false),
+      inited_(false) {}
+
+RtpFormatVp8TestHelper::~RtpFormatVp8TestHelper() {
+  delete fragmentation_;
+  delete [] payload_data_;
+  delete [] buffer_;
+}
+
+bool RtpFormatVp8TestHelper::Init(const int* partition_sizes,
+                                  int num_partitions) {
+  if (inited_) return false;
+  fragmentation_ = new RTPFragmentationHeader;
+  fragmentation_->VerifyAndAllocateFragmentationHeader(num_partitions);
+  payload_size_ = 0;
+  // Calculate sum payload size.
+  for (int p = 0; p < num_partitions; ++p) {
+    payload_size_ += partition_sizes[p];
+  }
+  buffer_size_ = payload_size_ + 6;  // Add space for payload descriptor.
+  payload_data_ = new WebRtc_UWord8[payload_size_];
+  buffer_ = new WebRtc_UWord8[buffer_size_];
+  int j = 0;
+  // Loop through the partitions again.
+  for (int p = 0; p < num_partitions; ++p) {
+    fragmentation_->fragmentationLength[p] = partition_sizes[p];
+    fragmentation_->fragmentationOffset[p] = j;
+    for (int i = 0; i < partition_sizes[p]; ++i) {
+      assert(j < payload_size_);
+      payload_data_[j++] = p;  // Set the payload value to the partition index.
+    }
+  }
+  data_ptr_ = payload_data_;
+  inited_ = true;
+  return true;
+}
+
+void RtpFormatVp8TestHelper::GetAllPacketsAndCheck(
+    RtpFormatVp8* packetizer,
+    const int* expected_sizes,
+    const int* expected_part,
+    const bool* expected_frag_start,
+    int expected_num_packets) {
+  ASSERT_TRUE(inited_);
+  int send_bytes = 0;
+  bool last = false;
+  for (int i = 0; i < expected_num_packets; ++i) {
+    std::ostringstream ss;
+    ss << "Checking packet " << i;
+    SCOPED_TRACE(ss.str());
+    EXPECT_EQ(expected_part[i],
+              packetizer->NextPacket(buffer_, &send_bytes, &last));
+    CheckPacket(send_bytes, expected_sizes[i], last,
+                expected_frag_start[i]);
+  }
+  EXPECT_TRUE(last);
+}
+
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: | TID | KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+
+// First octet tests.
+#define EXPECT_BIT_EQ(x, n, a) EXPECT_EQ((((x) >> (n)) & 0x1), a)
+
+#define EXPECT_RSV_ZERO(x) EXPECT_EQ(((x) & 0xE0), 0)
+
+#define EXPECT_BIT_X_EQ(x, a) EXPECT_BIT_EQ(x, 7, a)
+
+#define EXPECT_BIT_N_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_BIT_S_EQ(x, a) EXPECT_BIT_EQ(x, 4, a)
+
+#define EXPECT_PART_ID_EQ(x, a) EXPECT_EQ(((x) & 0x0F), a)
+
+// Extension fields tests
+#define EXPECT_BIT_I_EQ(x, a) EXPECT_BIT_EQ(x, 7, a)
+
+#define EXPECT_BIT_L_EQ(x, a) EXPECT_BIT_EQ(x, 6, a)
+
+#define EXPECT_BIT_T_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_BIT_K_EQ(x, a) EXPECT_BIT_EQ(x, 4, a)
+
+#define EXPECT_TID_EQ(x, a) EXPECT_EQ((((x) & 0xC0) >> 6), a)
+
+#define EXPECT_BIT_Y_EQ(x, a) EXPECT_BIT_EQ(x, 5, a)
+
+#define EXPECT_KEYIDX_EQ(x, a) EXPECT_EQ(((x) & 0x1F), a)
+
+void RtpFormatVp8TestHelper::CheckHeader(bool frag_start) {
+  payload_start_ = 1;
+  EXPECT_BIT_EQ(buffer_[0], 6, 0);  // Check reserved bit.
+
+  if (hdr_info_->pictureId != kNoPictureId ||
+      hdr_info_->temporalIdx != kNoTemporalIdx ||
+      hdr_info_->tl0PicIdx != kNoTl0PicIdx ||
+      hdr_info_->keyIdx != kNoKeyIdx) {
+    EXPECT_BIT_X_EQ(buffer_[0], 1);
+    ++payload_start_;
+    CheckPictureID();
+    CheckTl0PicIdx();
+    CheckTIDAndKeyIdx();
+  } else {
+    EXPECT_BIT_X_EQ(buffer_[0], 0);
+  }
+
+  EXPECT_BIT_N_EQ(buffer_[0], hdr_info_->nonReference ? 1 : 0);
+  EXPECT_BIT_S_EQ(buffer_[0], frag_start ? 1 : 0);
+
+  // Check partition index.
+  if (!sloppy_partitioning_) {
+    // The test payload data is constructed such that the payload value is the
+    // same as the partition index.
+    EXPECT_EQ(buffer_[0] & 0x0F, buffer_[payload_start_]);
+  } else {
+    // Partition should be set to 0.
+    EXPECT_EQ(buffer_[0] & 0x0F, 0);
+  }
+}
+
+// Verify that the I bit and the PictureID field are both set in accordance
+// with the information in hdr_info_->pictureId.
+void RtpFormatVp8TestHelper::CheckPictureID() {
+  if (hdr_info_->pictureId != kNoPictureId) {
+    EXPECT_BIT_I_EQ(buffer_[1], 1);
+    if (hdr_info_->pictureId > 0x7F) {
+      EXPECT_BIT_EQ(buffer_[payload_start_], 7, 1);
+      EXPECT_EQ(buffer_[payload_start_] & 0x7F,
+                (hdr_info_->pictureId >> 8) & 0x7F);
+      EXPECT_EQ(buffer_[payload_start_ + 1],
+                hdr_info_->pictureId & 0xFF);
+      payload_start_ += 2;
+    } else {
+      EXPECT_BIT_EQ(buffer_[payload_start_], 7, 0);
+      EXPECT_EQ(buffer_[payload_start_] & 0x7F,
+                (hdr_info_->pictureId) & 0x7F);
+      payload_start_ += 1;
+    }
+  } else {
+    EXPECT_BIT_I_EQ(buffer_[1], 0);
+  }
+}
+
+// Verify that the L bit and the TL0PICIDX field are both set in accordance
+// with the information in hdr_info_->tl0PicIdx.
+void RtpFormatVp8TestHelper::CheckTl0PicIdx() {
+  if (hdr_info_->tl0PicIdx != kNoTl0PicIdx) {
+    EXPECT_BIT_L_EQ(buffer_[1], 1);
+    EXPECT_EQ(buffer_[payload_start_], hdr_info_->tl0PicIdx);
+    ++payload_start_;
+  } else {
+    EXPECT_BIT_L_EQ(buffer_[1], 0);
+  }
+}
+
+// Verify that the T bit and the TL0PICIDX field, and the K bit and KEYIDX
+// field are all set in accordance with the information in
+// hdr_info_->temporalIdx and hdr_info_->keyIdx, respectively.
+void RtpFormatVp8TestHelper::CheckTIDAndKeyIdx() {
+  if (hdr_info_->temporalIdx == kNoTemporalIdx &&
+      hdr_info_->keyIdx == kNoKeyIdx) {
+    EXPECT_BIT_T_EQ(buffer_[1], 0);
+    EXPECT_BIT_K_EQ(buffer_[1], 0);
+    return;
+  }
+  if (hdr_info_->temporalIdx != kNoTemporalIdx) {
+    EXPECT_BIT_T_EQ(buffer_[1], 1);
+    EXPECT_TID_EQ(buffer_[payload_start_], hdr_info_->temporalIdx);
+    EXPECT_BIT_Y_EQ(buffer_[payload_start_], hdr_info_->layerSync ? 1 : 0);
+  } else {
+    EXPECT_BIT_T_EQ(buffer_[1], 0);
+    EXPECT_TID_EQ(buffer_[payload_start_], 0);
+    EXPECT_BIT_Y_EQ(buffer_[payload_start_], 0);
+  }
+  if (hdr_info_->keyIdx != kNoKeyIdx) {
+    EXPECT_BIT_K_EQ(buffer_[1], 1);
+    EXPECT_KEYIDX_EQ(buffer_[payload_start_], hdr_info_->keyIdx);
+  } else {
+    EXPECT_BIT_K_EQ(buffer_[1], 0);
+    EXPECT_KEYIDX_EQ(buffer_[payload_start_], 0);
+  }
+  ++payload_start_;
+}
+
+// Verify that the payload (i.e., after the headers) of the packet stored in
+// buffer_ is identical to the expected (as found in data_ptr_).
+void RtpFormatVp8TestHelper::CheckPayload(int payload_end) {
+  for (int i = payload_start_; i < payload_end; ++i, ++data_ptr_)
+    EXPECT_EQ(buffer_[i], *data_ptr_);
+}
+
+// Verify that the input variable "last" agrees with the position of data_ptr_.
+// If data_ptr_ has advanced payload_size_ bytes from the start (payload_data_)
+// we are at the end and last should be true. Otherwise, it should be false.
+void RtpFormatVp8TestHelper::CheckLast(bool last) const {
+  EXPECT_EQ(last, data_ptr_ == payload_data_ + payload_size_);
+}
+
+// Verify the contents of a packet. Check the length versus expected_bytes,
+// the header, payload, and "last" flag.
+void RtpFormatVp8TestHelper::CheckPacket(int send_bytes,
+                                         int expect_bytes,
+                                         bool last,
+                                         bool frag_start) {
+  EXPECT_EQ(expect_bytes, send_bytes);
+  CheckHeader(frag_start);
+  CheckPayload(send_bytes);
+  CheckLast(last);
+}
+
+}  // namespace test
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
new file mode 100644
index 0000000..f9bf686
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains the class RtpFormatVp8TestHelper. The class is
+// responsible for setting up a fake VP8 bitstream according to the
+// RTPVideoHeaderVP8 header, and partition information. After initialization,
+// an RTPFragmentationHeader is provided so that the tester can create a
+// packetizer. The packetizer can then be provided to this helper class, which
+// will then extract all packets and compare to the expected outcome.
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+namespace test {
+
+class RtpFormatVp8TestHelper {
+ public:
+  explicit RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr);
+  ~RtpFormatVp8TestHelper();
+  bool Init(const int* partition_sizes, int num_partitions);
+  void GetAllPacketsAndCheck(RtpFormatVp8* packetizer,
+                             const int* expected_sizes,
+                             const int* expected_part,
+                             const bool* expected_frag_start,
+                             int expected_num_packets);
+
+  uint8_t* payload_data() const { return payload_data_; }
+  int payload_size() const { return payload_size_; }
+  RTPFragmentationHeader* fragmentation() const { return fragmentation_; }
+  int buffer_size() const { return buffer_size_; }
+  void set_sloppy_partitioning(bool value) { sloppy_partitioning_ = value; }
+
+ private:
+  void CheckHeader(bool frag_start);
+  void CheckPictureID();
+  void CheckTl0PicIdx();
+  void CheckTIDAndKeyIdx();
+  void CheckPayload(int payload_end);
+  void CheckLast(bool last) const;
+  void CheckPacket(int send_bytes, int expect_bytes, bool last,
+                   bool frag_start);
+
+  uint8_t* payload_data_;
+  uint8_t* buffer_;
+  uint8_t* data_ptr_;
+  RTPFragmentationHeader* fragmentation_;
+  const RTPVideoHeaderVP8* hdr_info_;
+  int payload_start_;
+  int payload_size_;
+  int buffer_size_;
+  bool sloppy_partitioning_;
+  bool inited_;
+
+  DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8TestHelper);
+};
+
+}  // namespace test
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VP8_TEST_HELPER_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc b/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
new file mode 100644
index 0000000..be935b8
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_format_vp8_unittest.cc
@@ -0,0 +1,338 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the VP8 packetizer.
+ */
+
+#include <gtest/gtest.h>
+
+#include "compile_assert.h"
+
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class RtpFormatVp8Test : public ::testing::Test {
+ protected:
+  RtpFormatVp8Test() : helper_(NULL) {}
+  virtual void TearDown() { delete helper_; }
+  bool Init(const int* partition_sizes, int num_partitions) {
+    hdr_info_.pictureId = kNoPictureId;
+    hdr_info_.nonReference = false;
+    hdr_info_.temporalIdx = kNoTemporalIdx;
+    hdr_info_.layerSync = false;
+    hdr_info_.tl0PicIdx = kNoTl0PicIdx;
+    hdr_info_.keyIdx = kNoKeyIdx;
+    if (helper_ != NULL) return false;
+    helper_ = new test::RtpFormatVp8TestHelper(&hdr_info_);
+    return helper_->Init(partition_sizes, num_partitions);
+  }
+
+  RTPVideoHeaderVP8 hdr_info_;
+  test::RtpFormatVp8TestHelper* helper_;
+};
+
+TEST_F(RtpFormatVp8Test, TestStrictMode) {
+  const int kSizeVector[] = {10, 8, 27};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 200;  // > 0x7F should produce 2-byte PictureID.
+  const int kMaxSize = 13;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kStrict);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {9, 9, 12, 11, 11, 11, 10};
+  const int kExpectedPart[] = {0, 0, 1, 2, 2, 2, 2};
+  const bool kExpectedFragStart[] =
+      {true, false, true, true, false, false, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateMode) {
+  const int kSizeVector[] = {60, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 25;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {23, 23, 23, 23};
+  const int kExpectedPart[] = {0, 0, 0, 1};
+  const bool kExpectedFragStart[] = {true, false, false, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions1) {
+  const int kSizeVector[] = {1600, 200, 200, 200, 200, 200, 200, 200, 200};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1500;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {803, 803, 803, 803};
+  const int kExpectedPart[] = {0, 0, 1, 5};
+  const bool kExpectedFragStart[] = {true, false, true, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeManyPartitions2) {
+  const int kSizeVector[] = {1599, 200, 200, 200, 1600, 200, 200, 200, 200};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1500;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {803, 802, 603, 803, 803, 803};
+  const int kExpectedPart[] = {0, 0, 1, 4, 4, 5};
+  const bool kExpectedFragStart[] = {true, false, true, true, false, true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+TEST_F(RtpFormatVp8Test, TestAggregateModeTwoLargePartitions) {
+  const int kSizeVector[] = {1654, 2268};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 20;  // <= 0x7F should produce 1-byte PictureID.
+  const int kMaxSize = 1460;
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // The expected sizes are obtained by running a verified good implementation.
+  const int kExpectedSizes[] = {830, 830, 1137, 1137};
+  const int kExpectedPart[] = {0, 0, 1, 1};
+  const bool kExpectedFragStart[] = {true, false, true, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify that EqualSize mode is forced if fragmentation info is missing.
+TEST_F(RtpFormatVp8Test, TestEqualSizeModeFallback) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.pictureId = 200;  // > 0x7F should produce 2-byte PictureID
+  const int kMaxSize = 12;  // Small enough to produce 4 packets.
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize);
+
+  // Expecting three full packets, and one with the remainder.
+  const int kExpectedSizes[] = {12, 11, 12, 11};
+  const int kExpectedPart[] = {0, 0, 0, 0};  // Always 0 for equal size mode.
+  // Frag start only true for first packet in equal size mode.
+  const bool kExpectedFragStart[] = {true, false, false, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->set_sloppy_partitioning(true);
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify that non-reference bit is set. EqualSize mode fallback is expected.
+TEST_F(RtpFormatVp8Test, TestNonReferenceBit) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.nonReference = true;
+  const int kMaxSize = 25;  // Small enough to produce two packets.
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize);
+
+  // EqualSize mode => First packet full; other not.
+  const int kExpectedSizes[] = {16, 16};
+  const int kExpectedPart[] = {0, 0};  // Always 0 for equal size mode.
+  // Frag start only true for first packet in equal size mode.
+  const bool kExpectedFragStart[] = {true, false};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->set_sloppy_partitioning(true);
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify Tl0PicIdx and TID fields, and layerSync bit.
+TEST_F(RtpFormatVp8Test, TestTl0PicIdxAndTID) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.tl0PicIdx = 117;
+  hdr_info_.temporalIdx = 2;
+  hdr_info_.layerSync = true;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 4 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 4};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify KeyIdx field.
+TEST_F(RtpFormatVp8Test, TestKeyIdx) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.keyIdx = 17;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 3 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 3};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+// Verify TID field and KeyIdx field in combination.
+TEST_F(RtpFormatVp8Test, TestTIDAndKeyIdx) {
+  const int kSizeVector[] = {10, 10, 10};
+  const int kNumPartitions = sizeof(kSizeVector) / sizeof(kSizeVector[0]);
+  ASSERT_TRUE(Init(kSizeVector, kNumPartitions));
+
+  hdr_info_.temporalIdx = 1;
+  hdr_info_.keyIdx = 5;
+  // kMaxSize is only limited by allocated buffer size.
+  const int kMaxSize = helper_->buffer_size();
+  RtpFormatVp8 packetizer(helper_->payload_data(),
+                          helper_->payload_size(),
+                          hdr_info_,
+                          kMaxSize,
+                          *(helper_->fragmentation()),
+                          kAggregate);
+
+  // Expect one single packet of payload_size() + 3 bytes header.
+  const int kExpectedSizes[1] = {helper_->payload_size() + 3};
+  const int kExpectedPart[1] = {0};  // Packet starts with partition 0.
+  const bool kExpectedFragStart[1] = {true};
+  const int kExpectedNum = sizeof(kExpectedSizes) / sizeof(kExpectedSizes[0]);
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedPart) / sizeof(kExpectedPart[0]));
+  COMPILE_ASSERT(kExpectedNum ==
+      sizeof(kExpectedFragStart) / sizeof(kExpectedFragStart[0]));
+
+  helper_->GetAllPacketsAndCheck(&packetizer, kExpectedSizes, kExpectedPart,
+                                 kExpectedFragStart, kExpectedNum);
+}
+
+}  // namespace
diff --git a/src/modules/rtp_rtcp/source/rtp_header_extension.cc b/src/modules/rtp_rtcp/source/rtp_header_extension.cc
new file mode 100644
index 0000000..58b01bf
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_header_extension.cc
@@ -0,0 +1,176 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+
+#include "common_types.h"
+#include "rtp_header_extension.h"
+
+namespace webrtc {
+
+RtpHeaderExtensionMap::RtpHeaderExtensionMap() {
+}
+
+RtpHeaderExtensionMap::~RtpHeaderExtensionMap() {
+  Erase();
+}
+
+void RtpHeaderExtensionMap::Erase() {
+  while (!extensionMap_.empty()) {
+    std::map<uint8_t, HeaderExtension*>::iterator it =
+        extensionMap_.begin();
+    delete it->second;
+    extensionMap_.erase(it);
+  }
+}
+
+int32_t RtpHeaderExtensionMap::Register(const RTPExtensionType type,
+                                        const uint8_t id) {
+  if (id < 1 || id > 14) {
+    return -1;
+  }
+  std::map<uint8_t, HeaderExtension*>::iterator it =
+      extensionMap_.find(id);
+  if (it != extensionMap_.end()) {
+    return -1;
+  }
+  extensionMap_[id] = new HeaderExtension(type);
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::Deregister(const RTPExtensionType type) {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    return 0;
+  }
+  std::map<uint8_t, HeaderExtension*>::iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return -1;
+  }
+  delete it->second;
+  extensionMap_.erase(it);
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::GetType(const uint8_t id,
+                                       RTPExtensionType* type) const {
+  assert(type);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return -1;
+  }
+  HeaderExtension* extension = it->second;
+  *type = extension->type;
+  return 0;
+}
+
+int32_t RtpHeaderExtensionMap::GetId(const RTPExtensionType type,
+                                     uint8_t* id) const {
+  assert(id);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    if (extension->type == type) {
+      *id = it->first;
+      return 0;
+    }
+    it++;
+  }
+  return -1;
+}
+
+uint16_t RtpHeaderExtensionMap::GetTotalLengthInBytes() const {
+  // Get length for each extension block.
+  uint16_t length = 0;
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    length += extension->length;
+    it++;
+  }
+  // Add RTP extension header length.
+  if (length > 0) {
+    length += RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+  }
+  return length;
+}
+
+int32_t RtpHeaderExtensionMap::GetLengthUntilBlockStartInBytes(
+    const RTPExtensionType type) const {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    // Not registered.
+    return -1;
+  }
+  // Get length until start of extension block type.
+  uint16_t length = RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    if (extension->type == type) {
+      break;
+    } else {
+      length += extension->length;
+    }
+    it++;
+  }
+  return length;
+}
+
+int32_t RtpHeaderExtensionMap::Size() const {
+  return extensionMap_.size();
+}
+
+RTPExtensionType RtpHeaderExtensionMap::First() const {
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  if (it == extensionMap_.end()) {
+     return kRtpExtensionNone;
+  }
+  HeaderExtension* extension = it->second;
+  return extension->type;
+}
+
+RTPExtensionType RtpHeaderExtensionMap::Next(RTPExtensionType type) const {
+  uint8_t id;
+  if (GetId(type, &id) != 0) {
+    return kRtpExtensionNone;
+  }
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.find(id);
+  if (it == extensionMap_.end()) {
+    return kRtpExtensionNone;
+  }
+  it++;
+  if (it == extensionMap_.end()) {
+    return kRtpExtensionNone;
+  }
+  HeaderExtension* extension = it->second;
+  return extension->type;
+}
+
+void RtpHeaderExtensionMap::GetCopy(RtpHeaderExtensionMap* map) const {
+  assert(map);
+  std::map<uint8_t, HeaderExtension*>::const_iterator it =
+      extensionMap_.begin();
+  while (it != extensionMap_.end()) {
+    HeaderExtension* extension = it->second;
+    map->Register(extension->type, it->first);
+    it++;
+  }
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_header_extension.h b/src/modules/rtp_rtcp/source/rtp_header_extension.h
new file mode 100644
index 0000000..bb6dd81
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_header_extension.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
+
+#include <map>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+enum {RTP_ONE_BYTE_HEADER_EXTENSION = 0xbede};
+
+enum {
+   RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES = 4,
+   TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES = 4
+};
+
+struct HeaderExtension {
+  HeaderExtension(RTPExtensionType extension_type)
+    : type(extension_type),
+      length(0) {
+     if (type == kRtpExtensionTransmissionTimeOffset) {
+       length = TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+     }
+   }
+
+   const RTPExtensionType type;
+   uint8_t length;
+};
+
+class RtpHeaderExtensionMap {
+ public:
+  RtpHeaderExtensionMap();
+  ~RtpHeaderExtensionMap();
+
+  void Erase();
+
+  int32_t Register(const RTPExtensionType type, const uint8_t id);
+
+  int32_t Deregister(const RTPExtensionType type);
+
+  int32_t GetType(const uint8_t id, RTPExtensionType* type) const;
+
+  int32_t GetId(const RTPExtensionType type, uint8_t* id) const;
+
+  uint16_t GetTotalLengthInBytes() const;
+
+  int32_t GetLengthUntilBlockStartInBytes(const RTPExtensionType type) const;
+
+  void GetCopy(RtpHeaderExtensionMap* map) const;
+
+  int32_t Size() const;
+
+  RTPExtensionType First() const;
+
+  RTPExtensionType Next(RTPExtensionType type) const;
+
+ private:
+  std::map<uint8_t, HeaderExtension*> extensionMap_;
+};
+}
+#endif // WEBRTC_MODULES_RTP_RTCP_RTP_HEADER_EXTENSION_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc b/src/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
new file mode 100644
index 0000000..e4160c7
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_header_extension_unittest.cc
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file includes unit tests for the RtpHeaderExtensionMap.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class RtpHeaderExtensionTest : public ::testing::Test {
+ protected:
+  RtpHeaderExtensionTest() {}
+  ~RtpHeaderExtensionTest() {}
+
+  RtpHeaderExtensionMap map_;
+  enum {kId = 3};
+};
+
+TEST_F(RtpHeaderExtensionTest, Register) {
+  EXPECT_EQ(0, map_.Size());
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(1, map_.Size());
+  EXPECT_EQ(0, map_.Deregister(kRtpExtensionTransmissionTimeOffset));
+  EXPECT_EQ(0, map_.Size());
+}
+
+TEST_F(RtpHeaderExtensionTest, RegisterIllegalArg) {
+  // Valid range for id: [1-14].
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, 0));
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, 15));
+}
+
+TEST_F(RtpHeaderExtensionTest, NonUniqueId) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(-1, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetTotalLength) {
+  EXPECT_EQ(0, map_.GetTotalLengthInBytes());
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES +
+            TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES,
+            map_.GetTotalLengthInBytes());
+}
+
+TEST_F(RtpHeaderExtensionTest, GetLengthUntilBlockStart) {
+  EXPECT_EQ(-1, map_.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset));
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES,
+      map_.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetType) {
+  RTPExtensionType typeOut;
+  EXPECT_EQ(-1, map_.GetType(kId, &typeOut));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(0, map_.GetType(kId, &typeOut));
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, typeOut);
+}
+
+TEST_F(RtpHeaderExtensionTest, GetId) {
+  uint8_t idOut;
+  EXPECT_EQ(-1, map_.GetId(kRtpExtensionTransmissionTimeOffset, &idOut));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(0, map_.GetId(kRtpExtensionTransmissionTimeOffset, &idOut));
+  EXPECT_EQ(kId, idOut);
+}
+
+TEST_F(RtpHeaderExtensionTest, IterateTypes) {
+  EXPECT_EQ(kRtpExtensionNone, map_.First());
+  EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
+
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, map_.First());
+  EXPECT_EQ(kRtpExtensionNone, map_.Next(kRtpExtensionTransmissionTimeOffset));
+}
+
+TEST_F(RtpHeaderExtensionTest, GetCopy) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+
+  RtpHeaderExtensionMap mapOut;
+  map_.GetCopy(&mapOut);
+  EXPECT_EQ(1, mapOut.Size());
+  EXPECT_EQ(kRtpExtensionTransmissionTimeOffset, mapOut.First());
+}
+
+TEST_F(RtpHeaderExtensionTest, Erase) {
+  EXPECT_EQ(0, map_.Register(kRtpExtensionTransmissionTimeOffset, kId));
+  EXPECT_EQ(1, map_.Size());
+  map_.Erase();
+  EXPECT_EQ(0, map_.Size());
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_packet_history.cc b/src/modules/rtp_rtcp/source/rtp_packet_history.cc
new file mode 100644
index 0000000..3f09f6d
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_packet_history.cc
@@ -0,0 +1,275 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_packet_history.h"
+
+#include <assert.h>
+#include <cstring>   // memset
+
+#include "critical_section_wrapper.h"
+#include "rtp_utility.h"
+#include "trace.h"
+
+namespace webrtc {
+
+RTPPacketHistory::RTPPacketHistory(RtpRtcpClock* clock)
+  : clock_(*clock),
+    critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+    store_(false),
+    prev_index_(0),
+    max_packet_length_(0) {
+}
+
+RTPPacketHistory::~RTPPacketHistory() {
+  Free();
+  delete critsect_;
+}
+
+void RTPPacketHistory::SetStorePacketsStatus(bool enable, 
+                                             uint16_t number_to_store) {
+  if (enable) {
+    Allocate(number_to_store);
+  } else {
+    Free();
+  }
+}
+
+void RTPPacketHistory::Allocate(uint16_t number_to_store) {
+  assert(number_to_store > 0);
+  webrtc::CriticalSectionScoped cs(*critsect_); 
+  if (store_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+        "SetStorePacketsStatus already set, number: %d", number_to_store);
+    return;
+  }
+
+  store_ = true;
+  stored_packets_.resize(number_to_store);
+  stored_seq_nums_.resize(number_to_store);
+  stored_lengths_.resize(number_to_store);
+  stored_times_.resize(number_to_store);
+  stored_resend_times_.resize(number_to_store);
+  stored_types_.resize(number_to_store);
+}
+
+void RTPPacketHistory::Free() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return;
+  }
+
+  std::vector<std::vector<uint8_t> >::iterator it;
+  for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {   
+    it->clear();
+  }
+
+  stored_packets_.clear();
+  stored_seq_nums_.clear();
+  stored_lengths_.clear();
+  stored_times_.clear();
+  stored_resend_times_.clear();
+  stored_types_.clear();
+
+  store_ = false;
+  prev_index_ = 0;
+  max_packet_length_ = 0;
+}
+
+bool RTPPacketHistory::StorePackets() const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  return store_;
+}
+
+// private, lock should already be taken
+void RTPPacketHistory::VerifyAndAllocatePacketLength(uint16_t packet_length) {
+  assert(packet_length > 0);
+  if (!store_) {
+    return;
+  }
+
+  if (packet_length <= max_packet_length_) {
+    return;
+  }
+
+  std::vector<std::vector<uint8_t> >::iterator it;
+  for (it = stored_packets_.begin(); it != stored_packets_.end(); ++it) {
+    it->resize(packet_length);
+  }
+  max_packet_length_ = packet_length;
+}
+
+int32_t RTPPacketHistory::PutRTPPacket(const uint8_t* packet,
+                                       uint16_t packet_length,
+                                       uint16_t max_packet_length,
+                                       int64_t capture_time_ms,
+                                       StorageType type) {
+  if (type == kDontStore) {
+    return 0;
+  }
+
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return 0;
+  }
+
+  assert(packet);
+  assert(packet_length > 3);
+
+  VerifyAndAllocatePacketLength(max_packet_length);
+
+  if (packet_length > max_packet_length_) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, -1,
+        "Failed to store RTP packet, length: %d", packet_length);
+    return -1;
+  }
+
+  const uint16_t seq_num = (packet[2] << 8) + packet[3];
+
+  // Store packet
+  std::vector<std::vector<uint8_t> >::iterator it =
+      stored_packets_.begin() + prev_index_;
+  std::copy(packet, packet + packet_length, it->begin());
+
+  stored_seq_nums_[prev_index_] = seq_num;
+  stored_lengths_[prev_index_] = packet_length;
+  stored_times_[prev_index_] = capture_time_ms;
+  stored_resend_times_[prev_index_] = 0;  // packet not resent
+  stored_types_[prev_index_] = type;
+
+  ++prev_index_;
+  if (prev_index_ >= stored_seq_nums_.size()) {
+    prev_index_ = 0;
+  }
+  return 0;
+}
+
+bool RTPPacketHistory::HasRTPPacket(uint16_t sequence_number) const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return false;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    return false;
+  }
+ 
+  uint16_t length = stored_lengths_.at(index);
+  if (length == 0 || length > max_packet_length_) {
+    // Invalid length.
+    return false;
+  }
+  return true;
+}
+
+bool RTPPacketHistory::GetRTPPacket(uint16_t sequence_number,
+                                    uint32_t min_elapsed_time_ms,
+                                    uint8_t* packet,
+                                    uint16_t* packet_length,
+                                    int64_t* stored_time_ms,
+                                    StorageType* type) const {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return false;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "No match for getting seqNum %u", sequence_number);
+    return false;
+  }
+
+  uint16_t length = stored_lengths_.at(index);
+  if (length == 0 || length > max_packet_length_) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+        "No match for getting seqNum %u, len %d", sequence_number, length);
+    return false;
+  }
+
+ if (length > *packet_length) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1, 
+        "Input buffer too short for packet %u", sequence_number);
+    return false;
+ }
+
+  // Verify elapsed time since last retrieve. 
+  int64_t now = clock_.GetTimeInMS();
+  if (min_elapsed_time_ms > 0 &&
+      ((now - stored_resend_times_.at(index)) < min_elapsed_time_ms)) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1, 
+        "Skip getting packet %u, packet recently resent.", sequence_number);
+    *packet_length = 0;
+    return true;
+  }
+
+  // Get packet.
+  std::vector<std::vector<uint8_t> >::const_iterator it_found_packet =
+      stored_packets_.begin() + index;
+  std::copy(it_found_packet->begin(), it_found_packet->begin() + length, packet);
+  *packet_length = stored_lengths_.at(index);
+  *stored_time_ms = stored_times_.at(index);
+  *type = stored_types_.at(index);
+  return true;
+}
+
+void RTPPacketHistory::UpdateResendTime(uint16_t sequence_number) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  if (!store_) {
+    return;
+  }
+
+  int32_t index = 0;
+  bool found = FindSeqNum(sequence_number, &index);
+  if (!found) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+        "Failed to update resend time, seq num: %u.", sequence_number);
+    return;
+  }
+  stored_resend_times_[index] = clock_.GetTimeInMS();
+}
+
+// private, lock should already be taken
+bool RTPPacketHistory::FindSeqNum(uint16_t sequence_number,
+                                  int32_t* index) const {
+  uint16_t temp_sequence_number = 0;
+  if (prev_index_ > 0) {
+    *index = prev_index_ - 1;
+    temp_sequence_number = stored_seq_nums_[*index];
+  } else {
+    *index = stored_seq_nums_.size() - 1;
+    temp_sequence_number = stored_seq_nums_[*index];  // wrap
+  }
+
+  int32_t idx = (prev_index_ - 1) - (temp_sequence_number - sequence_number);
+  if (idx >= 0 && idx < static_cast<int>(stored_seq_nums_.size())) {
+    *index = idx;
+    temp_sequence_number = stored_seq_nums_[*index];
+  }
+
+  if (temp_sequence_number != sequence_number) {
+    // We did not found a match, search all.
+    for (uint16_t m = 0; m < stored_seq_nums_.size(); m++) {
+      if (stored_seq_nums_[m] == sequence_number) {
+        *index = m;
+        temp_sequence_number = stored_seq_nums_[*index];
+        break;
+      }
+    }
+  }
+  if (temp_sequence_number == sequence_number) {
+    // We found a match.
+    return true;
+  }
+  return false;
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_packet_history.h b/src/modules/rtp_rtcp/source/rtp_packet_history.h
new file mode 100644
index 0000000..5efadcd
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_packet_history.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ *  Class for storing RTP packets.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+#define WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
+
+#include <vector>
+
+#include "module_common_types.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class RtpRtcpClock;
+class CriticalSectionWrapper;
+
+class RTPPacketHistory {
+ public:
+  RTPPacketHistory(RtpRtcpClock* clock);
+  ~RTPPacketHistory();
+
+  void SetStorePacketsStatus(bool enable, uint16_t number_to_store);
+
+  bool StorePackets() const;
+
+  // Stores RTP packet.
+  int32_t PutRTPPacket(const uint8_t* packet,
+                       uint16_t packet_length,
+                       uint16_t max_packet_length,
+                       int64_t capture_time_ms,
+                       StorageType type);
+
+  // Gets stored RTP packet corresponding to the input sequence number.
+  // The packet is copied to the buffer pointed to by ptr_rtp_packet.
+  // The rtp_packet_length should show the available buffer size.
+  // Returns true if packet is found.
+  // rtp_packet_length: returns the copied packet length on success.
+  // min_elapsed_time_ms: the minimum time that must have elapsed since the last
+  // time the packet was resent (parameter is ignored if set to zero).
+  // If the packet is found but the minimum time has not elaped, no bytes are
+  // copied.
+  // stored_time_ms: returns the time when the packet was stored.
+  // type: returns the storage type set in PutRTPPacket.
+  bool GetRTPPacket(uint16_t sequence_number,
+                    uint32_t min_elapsed_time_ms,
+                    uint8_t* packet,
+                    uint16_t* packet_length,
+                    int64_t* stored_time_ms,
+                    StorageType* type) const;
+
+  bool HasRTPPacket(uint16_t sequence_number) const;
+
+  void UpdateResendTime(uint16_t sequence_number);
+
+ private:
+  void Allocate(uint16_t number_to_store);
+  void Free();
+  void VerifyAndAllocatePacketLength(uint16_t packet_length);
+  bool FindSeqNum(uint16_t sequence_number, int32_t* index) const;
+
+ private:
+  RtpRtcpClock& clock_;
+  CriticalSectionWrapper* critsect_;
+  bool store_;
+  uint32_t prev_index_;
+  uint16_t max_packet_length_;
+
+  std::vector<std::vector<uint8_t> > stored_packets_;
+  std::vector<uint16_t> stored_seq_nums_;
+  std::vector<uint16_t> stored_lengths_;
+  std::vector<int64_t> stored_times_;
+  std::vector<int64_t> stored_resend_times_;
+  std::vector<StorageType> stored_types_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_RTP_RTCP_RTP_PACKET_HISTORY_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc b/src/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
new file mode 100644
index 0000000..47f3df5
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_packet_history_unittest.cc
@@ -0,0 +1,223 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ * This file includes unit tests for the RTPPacketHistory.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_packet_history.h"
+#include "rtp_rtcp_defines.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class FakeClock : public RtpRtcpClock {
+ public:
+  FakeClock() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_Word64 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_Word64 time_in_ms_;
+};
+
+class RtpPacketHistoryTest : public ::testing::Test {
+ protected:
+  RtpPacketHistoryTest()
+     : hist_(new RTPPacketHistory(&fake_clock_)) {
+  }
+  ~RtpPacketHistoryTest() {
+    delete hist_;
+  }
+  
+  FakeClock fake_clock_;
+  RTPPacketHistory* hist_;
+  enum {kPayload = 127};
+  enum {kSsrc = 12345678};
+  enum {kSeqNum = 88};
+  enum {kTimestamp = 127};
+  enum {kMaxPacketLength = 1500};
+  uint8_t packet_[kMaxPacketLength];
+  uint8_t packet_out_[kMaxPacketLength];
+
+  void CreateRtpPacket(uint16_t seq_num, uint32_t ssrc, uint8_t payload,
+      uint32_t timestamp, uint8_t* array, uint16_t* cur_pos) {
+    array[(*cur_pos)++] = 0x80;
+    array[(*cur_pos)++] = payload;
+    array[(*cur_pos)++] = seq_num >> 8;
+    array[(*cur_pos)++] = seq_num;
+    array[(*cur_pos)++] = timestamp >> 24;
+    array[(*cur_pos)++] = timestamp >> 16;
+    array[(*cur_pos)++] = timestamp >> 8;
+    array[(*cur_pos)++] = timestamp;
+    array[(*cur_pos)++] = ssrc >> 24;
+    array[(*cur_pos)++] = ssrc >> 16;
+    array[(*cur_pos)++] = ssrc >> 8;
+    array[(*cur_pos)++] = ssrc;
+  } 
+};
+
+TEST_F(RtpPacketHistoryTest, SetStoreStatus) {
+  EXPECT_FALSE(hist_->StorePackets());
+  hist_->SetStorePacketsStatus(true, 10);
+  EXPECT_TRUE(hist_->StorePackets());
+  hist_->SetStorePacketsStatus(false, 0);
+  EXPECT_FALSE(hist_->StorePackets());
+}
+
+TEST_F(RtpPacketHistoryTest, NoStoreStatus) {
+  EXPECT_FALSE(hist_->StorePackets());
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kAllowRetransmission));
+  // Packet should not be stored.
+  len = kMaxPacketLength;
+  int64_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, DontStore) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kDontStore));
+
+  // Packet should not be stored.
+  len = kMaxPacketLength;
+  int64_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, PutRtpPacket_TooLargePacketLength) {
+  hist_->SetStorePacketsStatus(true, 10);
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  EXPECT_EQ(-1, hist_->PutRTPPacket(packet_,
+                                    kMaxPacketLength + 1,
+                                    kMaxPacketLength,
+                                    capture_time_ms,
+                                    kAllowRetransmission));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket_TooSmallBuffer) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kAllowRetransmission));
+  uint16_t len_out = len - 1;
+  int64_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(kSeqNum, 0, packet_, &len_out, &time,
+                                   &type));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket_NotStored) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = kMaxPacketLength;
+  int64_t time;
+  StorageType type;
+  EXPECT_FALSE(hist_->GetRTPPacket(0, 0, packet_, &len, &time, &type));
+}
+
+TEST_F(RtpPacketHistoryTest, PutRtpPacket) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+
+  EXPECT_FALSE(hist_->HasRTPPacket(kSeqNum));
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kAllowRetransmission));
+  EXPECT_TRUE(hist_->HasRTPPacket(kSeqNum));
+}
+
+TEST_F(RtpPacketHistoryTest, GetRtpPacket) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kAllowRetransmission));
+
+  uint16_t len_out = kMaxPacketLength;
+  int64_t time;
+  StorageType type;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
+                                  &type));
+  EXPECT_EQ(len, len_out);
+  EXPECT_EQ(kAllowRetransmission, type);
+  EXPECT_EQ(capture_time_ms, time);
+  for (int i = 0; i < len; i++)  {
+    EXPECT_EQ(packet_[i], packet_out_[i]);
+  }
+}
+
+TEST_F(RtpPacketHistoryTest, DontRetransmit) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kDontRetransmit));
+
+  uint16_t len_out = kMaxPacketLength;
+  int64_t time;
+  StorageType type;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 0, packet_out_, &len_out, &time,
+                                  &type));
+  EXPECT_EQ(len, len_out);
+  EXPECT_EQ(kDontRetransmit, type);
+  EXPECT_EQ(capture_time_ms, time);
+}
+
+TEST_F(RtpPacketHistoryTest, MinResendTime) {
+  hist_->SetStorePacketsStatus(true, 10);
+  uint16_t len = 0;
+  int64_t capture_time_ms = fake_clock_.GetTimeInMS();
+  CreateRtpPacket(kSeqNum, kSsrc, kPayload, kTimestamp, packet_, &len);
+  EXPECT_EQ(0, hist_->PutRTPPacket(packet_, len, kMaxPacketLength,
+                                   capture_time_ms, kAllowRetransmission));
+
+  hist_->UpdateResendTime(kSeqNum);
+  fake_clock_.IncrementTime(100);
+
+  // Time has elapsed.
+  len = kMaxPacketLength;
+  StorageType type;
+  int64_t time;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 100, packet_, &len, &time, &type));
+  EXPECT_GT(len, 0);
+  EXPECT_EQ(capture_time_ms, time);
+
+  // Time has not elapsed. Packet should be found, but no bytes copied.
+  len = kMaxPacketLength;
+  EXPECT_TRUE(hist_->GetRTPPacket(kSeqNum, 101, packet_, &len, &time, &type));
+  EXPECT_EQ(0, len);
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver.cc b/src/modules/rtp_rtcp/source/rtp_receiver.cc
new file mode 100644
index 0000000..1887fc3
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver.cc
@@ -0,0 +1,1615 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "rtp_receiver.h"
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_rtcp_impl.h"
+#include "critical_section_wrapper.h"
+
+#include <cassert>
+#include <string.h> //memcpy
+#include <math.h>   // floor
+#include <stdlib.h> // abs
+
+namespace webrtc {
+
+using ModuleRTPUtility::AudioPayload;
+using ModuleRTPUtility::GetCurrentRTP;
+using ModuleRTPUtility::Payload;
+using ModuleRTPUtility::RTPPayloadParser;
+using ModuleRTPUtility::StringCompare;
+using ModuleRTPUtility::VideoPayload;
+
+RTPReceiver::RTPReceiver(const WebRtc_Word32 id,
+                         const bool audio,
+                         RtpRtcpClock* clock,
+                         RemoteBitrateEstimator* remote_bitrate,
+                         ModuleRtpRtcpImpl* owner) :
+    RTPReceiverAudio(id),
+    RTPReceiverVideo(id, remote_bitrate, owner),
+    Bitrate(clock),
+    _id(id),
+    _audio(audio),
+    _rtpRtcp(*owner),
+    _criticalSectionCbs(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbRtpFeedback(NULL),
+    _cbRtpData(NULL),
+
+    _criticalSectionRTPReceiver(
+        CriticalSectionWrapper::CreateCriticalSection()),
+    _lastReceiveTime(0),
+    _lastReceivedPayloadLength(0),
+    _lastReceivedPayloadType(-1),
+    _lastReceivedMediaPayloadType(-1),
+    _lastReceivedAudioSpecific(),
+    _lastReceivedVideoSpecific(),
+
+    _packetTimeOutMS(0),
+
+    _redPayloadType(-1),
+    _payloadTypeMap(),
+    _rtpHeaderExtensionMap(),
+    _SSRC(0),
+    _numCSRCs(0),
+    _currentRemoteCSRC(),
+    _numEnergy(0),
+    _currentRemoteEnergy(),
+    _useSSRCFilter(false),
+    _SSRCFilter(0),
+
+    _jitterQ4(0),
+    _jitterMaxQ4(0),
+    _cumulativeLoss(0),
+    _jitterQ4TransmissionTimeOffset(0),
+    _localTimeLastReceivedTimestamp(0),
+    _lastReceivedTimestamp(0),
+    _lastReceivedSequenceNumber(0),
+    _lastReceivedTransmissionTimeOffset(0),
+
+    _receivedSeqFirst(0),
+    _receivedSeqMax(0),
+    _receivedSeqWraps(0),
+
+    _receivedPacketOH(12), // RTP header
+    _receivedByteCount(0),
+    _receivedOldPacketCount(0),
+    _receivedInorderPacketCount(0),
+
+    _lastReportInorderPackets(0),
+    _lastReportOldPackets(0),
+    _lastReportSeqMax(0),
+    _lastReportFractionLost(0),
+    _lastReportCumulativeLost(0),
+    _lastReportExtendedHighSeqNum(0),
+    _lastReportJitter(0),
+    _lastReportJitterTransmissionTimeOffset(0),
+
+    _nackMethod(kNackOff),
+    _RTX(false),
+    _ssrcRTX(0) {
+  memset(_currentRemoteCSRC, 0, sizeof(_currentRemoteCSRC));
+  memset(_currentRemoteEnergy, 0, sizeof(_currentRemoteEnergy));
+  memset(&_lastReceivedAudioSpecific, 0, sizeof(_lastReceivedAudioSpecific));
+
+  _lastReceivedAudioSpecific.channels = 1;
+  _lastReceivedVideoSpecific.maxRate = 0;
+  _lastReceivedVideoSpecific.videoCodecType = kRtpNoVideo;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTPReceiver::~RTPReceiver() {
+  if (_cbRtpFeedback) {
+    for (int i = 0; i < _numCSRCs; i++) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id,_currentRemoteCSRC[i], false);
+    }
+  }
+  delete _criticalSectionCbs;
+  delete _criticalSectionRTPReceiver;
+
+  while (!_payloadTypeMap.empty()) {
+    std::map<WebRtc_Word8, Payload*>::iterator it = _payloadTypeMap.begin();
+    delete it->second;
+    _payloadTypeMap.erase(it);
+  }
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+
+RtpVideoCodecTypes
+RTPReceiver::VideoCodecType() const
+{
+    return _lastReceivedVideoSpecific.videoCodecType;
+}
+
+WebRtc_UWord32
+RTPReceiver::MaxConfiguredBitrate() const
+{
+    return _lastReceivedVideoSpecific.maxRate;
+}
+
+bool
+RTPReceiver::REDPayloadType(const WebRtc_Word8 payloadType) const
+{
+    return (_redPayloadType == payloadType)?true:false;
+}
+
+WebRtc_Word8
+RTPReceiver::REDPayloadType() const
+{
+    return _redPayloadType;
+}
+
+    // configure a timeout value
+WebRtc_Word32
+RTPReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    _packetTimeOutMS = timeoutMS;
+    return 0;
+}
+
+void RTPReceiver::PacketTimeout()
+{
+    bool packetTimeOut = false;
+    {
+        CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+        if(_packetTimeOutMS == 0)
+        {
+            // not configured
+            return;
+        }
+
+        if(_lastReceiveTime == 0)
+        {
+            // not active
+            return;
+        }
+
+        WebRtc_Word64 now = _clock.GetTimeInMS();
+
+        if(now - _lastReceiveTime > _packetTimeOutMS)
+        {
+            packetTimeOut = true;
+            _lastReceiveTime = 0;  // only one callback
+            _lastReceivedPayloadType = -1; // makes RemotePayload return -1, which we want
+            _lastReceivedMediaPayloadType = -1;
+        }
+    }
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(packetTimeOut && _cbRtpFeedback)
+    {
+        _cbRtpFeedback->OnPacketTimeout(_id);
+    }
+}
+
+void
+RTPReceiver::ProcessDeadOrAlive(const bool RTCPalive, const WebRtc_Word64 now)
+{
+    if(_cbRtpFeedback == NULL)
+    {
+        // no callback
+        return;
+    }
+    RTPAliveType alive = kRtpDead;
+
+    if(_lastReceiveTime + 1000 > now)
+    {
+        // always alive if we have received a RTP packet the last sec
+        alive = kRtpAlive;
+
+    } else
+    {
+        if(RTCPalive)
+        {
+            if(_audio)
+            {
+                // alive depends on CNG
+                // if last received size < 10 likely CNG
+                if(_lastReceivedPayloadLength < 10) // our CNG is 9 bytes
+                {
+                    // potential CNG
+                    // receiver need to check kRtpNoRtp against NetEq speechType kOutputPLCtoCNG
+                    alive = kRtpNoRtp;
+                } else
+                {
+                    // dead
+                }
+            } else
+            {
+                // dead for video
+            }
+        }else
+        {
+            // no RTP packet for 1 sec and no RTCP
+            // dead
+        }
+    }
+
+
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(_cbRtpFeedback)
+    {
+        _cbRtpFeedback->OnPeriodicDeadOrAlive(_id, alive);
+    }
+}
+
+WebRtc_UWord16
+RTPReceiver::PacketOHReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedPacketOH;
+}
+
+WebRtc_UWord32
+RTPReceiver::PacketCountReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedInorderPacketCount;
+}
+
+WebRtc_UWord32
+RTPReceiver::ByteCountReceived() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _receivedByteCount;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    _cbRtpFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterIncomingDataCallback(RtpData* incomingDataCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    _cbRtpData = incomingDataCallback;
+    return 0;
+}
+
+WebRtc_Word32 RTPReceiver::RegisterReceivePayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  assert(payloadName);
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  // sanity
+  switch (payloadType) {
+    // reserved payload types to avoid RTCP conflicts when marker bit is set
+    case 64:        //  192 Full INTRA-frame request
+    case 72:        //  200 Sender report
+    case 73:        //  201 Receiver report
+    case 74:        //  202 Source description
+    case 75:        //  203 Goodbye
+    case 76:        //  204 Application-defined
+    case 77:        //  205 Transport layer FB message
+    case 78:        //  206 Payload-specific FB message
+    case 79:        //  207 Extended report
+      WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                   "%s invalid payloadtype:%d",
+                   __FUNCTION__, payloadType);
+      return -1;
+    default:
+      break;
+  }
+  size_t payloadNameLength = strlen(payloadName);
+
+  std::map<WebRtc_Word8, Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+  if (it != _payloadTypeMap.end()) {
+    // we already use this payload type
+    Payload* payload = it->second;
+    assert(payload);
+
+    size_t nameLength = strlen(payload->name);
+
+    // check if it's the same as we already have
+    // if same ignore sending an error
+    if (payloadNameLength == nameLength &&
+        StringCompare(payload->name, payloadName, payloadNameLength)) {
+      if (_audio &&
+          payload->audio &&
+          payload->typeSpecific.Audio.frequency == frequency &&
+          payload->typeSpecific.Audio.channels == channels &&
+          (payload->typeSpecific.Audio.rate == rate ||
+              payload->typeSpecific.Audio.rate == 0 || rate == 0)) {
+        payload->typeSpecific.Audio.rate = rate;
+        // Ensure that we update the rate if new or old is zero
+        return 0;
+      }
+      if (!_audio && !payload->audio) {
+        // update maxBitrate for video
+        payload->typeSpecific.Video.maxRate = rate;
+        return 0;
+      }
+    }
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument payloadType:%d already registered",
+                 __FUNCTION__, payloadType);
+    return -1;
+  }
+  if (_audio) {
+    // remove existing item, hence search for the name
+    // only for audio, for video we allow a codecs to use multiple pltypes
+    std::map<WebRtc_Word8, Payload*>::iterator audio_it =
+        _payloadTypeMap.begin();
+    while (audio_it != _payloadTypeMap.end()) {
+      Payload* payload = audio_it->second;
+      size_t nameLength = strlen(payload->name);
+
+      if (payloadNameLength == nameLength &&
+          StringCompare(payload->name, payloadName, payloadNameLength)) {
+        // we found the payload name in the list
+        // if audio check frequency and rate
+        if (payload->audio) {
+          if (payload->typeSpecific.Audio.frequency == frequency &&
+              (payload->typeSpecific.Audio.rate == rate ||
+                  payload->typeSpecific.Audio.rate == 0 || rate == 0) &&
+                  payload->typeSpecific.Audio.channels == channels) {
+            // remove old setting
+            delete payload;
+            _payloadTypeMap.erase(audio_it);
+            break;
+          }
+        } else if(StringCompare(payloadName,"red",3)) {
+          delete payload;
+          _payloadTypeMap.erase(audio_it);
+          break;
+        }
+      }
+      audio_it++;
+    }
+  }
+  Payload* payload = NULL;
+
+  // save the RED payload type
+  // used in both audio and video
+  if (StringCompare(payloadName,"red",3)) {
+    _redPayloadType = payloadType;
+    payload = new Payload;
+    payload->audio = false;
+    payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+    strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  } else {
+    if (_audio) {
+      payload = RegisterReceiveAudioPayload(payloadName, payloadType,
+                                            frequency, channels, rate);
+    } else {
+      payload = RegisterReceiveVideoPayload(payloadName, payloadType, rate);
+    }
+  }
+  if (payload == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s filed to register payload",
+                 __FUNCTION__);
+    return -1;
+  }
+  _payloadTypeMap[payloadType] = payload;
+
+  // Successful set of payload type, clear the value of last receivedPT,
+  // since it might mean something else
+  _lastReceivedPayloadType = -1;
+  _lastReceivedMediaPayloadType = -1;
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::DeRegisterReceivePayload(
+    const WebRtc_Word8 payloadType) {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s failed to find payloadType:%d",
+                 __FUNCTION__, payloadType);
+    return -1;
+  }
+  delete it->second;
+  _payloadTypeMap.erase(it);
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::ReceivePayloadType(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate,
+    WebRtc_Word8* payloadType) const {
+  if (payloadType == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "%s invalid argument", __FUNCTION__);
+    return -1;
+  }
+  size_t payloadNameLength = strlen(payloadName);
+
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.begin();
+
+  while (it != _payloadTypeMap.end()) {
+    Payload* payload = it->second;
+    assert(payload);
+
+    size_t nameLength = strlen(payload->name);
+    if (payloadNameLength == nameLength &&
+        StringCompare(payload->name, payloadName, payloadNameLength)) {
+      // name match
+      if( payload->audio) {
+        if (rate == 0) {
+          // [default] audio, check freq and channels
+          if (payload->typeSpecific.Audio.frequency == frequency &&
+              payload->typeSpecific.Audio.channels == channels) {
+            *payloadType = it->first;
+            return 0;
+          }
+        } else {
+          // audio, check freq, channels and rate
+          if( payload->typeSpecific.Audio.frequency == frequency &&
+              payload->typeSpecific.Audio.channels == channels &&
+              payload->typeSpecific.Audio.rate == rate) {
+            // extra rate condition added
+            *payloadType = it->first;
+            return 0;
+          }
+        }
+      } else {
+        // video
+        *payloadType = it->first;
+        return 0;
+      }
+    }
+    it++;
+  }
+  return -1;
+}
+
+WebRtc_Word32 RTPReceiver::ReceivePayload(
+    const WebRtc_Word8 payloadType,
+    char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    WebRtc_UWord32* frequency,
+    WebRtc_UWord8* channels,
+    WebRtc_UWord32* rate) const {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  Payload* payload = it->second;
+  assert(payload);
+
+  if(frequency) {
+    if(payload->audio) {
+      *frequency = payload->typeSpecific.Audio.frequency;
+    } else {
+      *frequency = 90000;
+    }
+  }
+  if (channels) {
+    if(payload->audio) {
+      *channels = payload->typeSpecific.Audio.channels;
+    } else {
+      *channels = 1;
+    }
+  }
+  if (rate) {
+    if(payload->audio) {
+      *rate = payload->typeSpecific.Audio.rate;
+    } else {
+      assert(false);
+      *rate = 0;
+    }
+  }
+  if (payloadName) {
+    payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+    strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiver::RemotePayload(
+    char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    WebRtc_Word8* payloadType,
+    WebRtc_UWord32* frequency,
+    WebRtc_UWord8* channels) const {
+  if(_lastReceivedPayloadType == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "%s invalid state", __FUNCTION__);
+    return -1;
+  }
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(_lastReceivedPayloadType);
+
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  Payload* payload = it->second;
+  assert(payload);
+  payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+
+  if (payloadType) {
+    *payloadType = _lastReceivedPayloadType;
+  }
+  if (frequency) {
+    if (payload->audio) {
+      *frequency = payload->typeSpecific.Audio.frequency;
+    } else {
+      *frequency = 90000;
+    }
+  }
+  if (channels) {
+    if (payload->audio) {
+      *channels = payload->typeSpecific.Audio.channels;
+    } else {
+      *channels = 1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                        const WebRtc_UWord8 id)
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    return _rtpHeaderExtensionMap.Register(type, id);
+}
+
+WebRtc_Word32
+RTPReceiver::DeregisterRtpHeaderExtension(const RTPExtensionType type)
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    return _rtpHeaderExtensionMap.Deregister(type);
+}
+
+void RTPReceiver::GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+    _rtpHeaderExtensionMap.GetCopy(map);
+}
+
+NACKMethod
+RTPReceiver::NACK() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _nackMethod;
+}
+
+    // Turn negative acknowledgement requests on/off
+WebRtc_Word32
+RTPReceiver::SetNACKStatus(const NACKMethod method)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    _nackMethod = method;
+    return 0;
+}
+
+void RTPReceiver::SetRTXStatus(const bool enable,
+                               const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+  _RTX = enable;
+  _ssrcRTX = SSRC;
+}
+
+void RTPReceiver::RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const {
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+  *enable = _RTX;
+  *SSRC = _ssrcRTX;
+}
+
+WebRtc_UWord32
+RTPReceiver::SSRC() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _SSRC;
+}
+
+    // Get remote CSRC
+WebRtc_Word32
+RTPReceiver::CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    assert(_numCSRCs <= kRtpCsrcSize);
+
+    if(_numCSRCs >0)
+    {
+        memcpy(arrOfCSRC, _currentRemoteCSRC, sizeof(WebRtc_UWord32)*_numCSRCs);
+    }
+    return _numCSRCs;
+}
+
+WebRtc_Word32
+RTPReceiver::Energy( WebRtc_UWord8 arrOfEnergy[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    assert(_numEnergy <= kRtpCsrcSize);
+
+    if(_numEnergy >0)
+    {
+        memcpy(arrOfEnergy, _currentRemoteEnergy, sizeof(WebRtc_UWord8)*_numCSRCs);
+    }
+    return _numEnergy;
+}
+
+WebRtc_Word32 RTPReceiver::IncomingRTPPacket(
+    WebRtcRTPHeader* rtp_header,
+    const WebRtc_UWord8* packet,
+    const WebRtc_UWord16 packet_length) {
+  // rtp_header contains the parsed RTP header.
+  // Adjust packet length w r t RTP padding.
+  int length = packet_length - rtp_header->header.paddingLength;
+
+  // length sanity
+  if ((length - rtp_header->header.headerLength) < 0) {
+     WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                  "%s invalid argument",
+                  __FUNCTION__);
+     return -1;
+  }
+  if (_RTX) {
+    if (_ssrcRTX == rtp_header->header.ssrc) {
+      // Sanity check.
+      if (rtp_header->header.headerLength + 2 > packet_length) {
+        return -1;
+      }
+      rtp_header->header.ssrc = _SSRC;
+      rtp_header->header.sequenceNumber =
+          (packet[rtp_header->header.headerLength] << 8) +
+          packet[1 + rtp_header->header.headerLength];
+      // Count the RTX header as part of the RTP header.
+      rtp_header->header.headerLength += 2;
+    }
+  }
+  if (_useSSRCFilter) {
+    if (rtp_header->header.ssrc != _SSRCFilter) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "%s drop packet due to SSRC filter",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+  if (_lastReceiveTime == 0) {
+    // trigger only once
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if (_cbRtpFeedback) {
+      if (length - rtp_header->header.headerLength == 0) {
+        // keepalive packet
+        _cbRtpFeedback->OnReceivedPacket(_id, kPacketKeepAlive);
+      } else {
+        _cbRtpFeedback->OnReceivedPacket(_id, kPacketRtp);
+      }
+    }
+  }
+  WebRtc_Word8 first_payload_byte = 0;
+  if (length > 0) {
+    first_payload_byte = packet[rtp_header->header.headerLength];
+  }
+  // trigger our callbacks
+  CheckSSRCChanged(rtp_header);
+
+  bool is_red = false;
+  VideoPayload video_specific;
+  video_specific.maxRate = 0;
+  video_specific.videoCodecType = kRtpNoVideo;
+
+  AudioPayload audio_specific;
+  audio_specific.channels = 0;
+  audio_specific.frequency = 0;
+
+  if (CheckPayloadChanged(rtp_header,
+                          first_payload_byte,
+                          is_red,
+                          audio_specific,
+                          video_specific) == -1) {
+    if (length - rtp_header->header.headerLength == 0)
+    {
+      // ok keepalive packet
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+                   "%s received keepalive",
+                   __FUNCTION__);
+      return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "%s received invalid payloadtype",
+                 __FUNCTION__);
+    return -1;
+  }
+  CheckCSRC(rtp_header);
+
+  const WebRtc_UWord8* payload_data =
+      packet + rtp_header->header.headerLength;
+
+  WebRtc_UWord16 payload_data_length =
+      static_cast<WebRtc_UWord16>(length - rtp_header->header.headerLength);
+
+  WebRtc_Word32 retVal = 0;
+  if(_audio) {
+    retVal = ParseAudioCodecSpecific(rtp_header,
+                                     payload_data,
+                                     payload_data_length,
+                                     audio_specific,
+                                     is_red);
+  } else {
+    retVal = ParseVideoCodecSpecific(rtp_header,
+                                     payload_data,
+                                     payload_data_length,
+                                     video_specific.videoCodecType,
+                                     is_red,
+                                     packet,
+                                     packet_length,
+                                     _clock.GetTimeInMS());
+  }
+  if(retVal < 0) {
+    return retVal;
+  }
+
+  CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+  // this compare to _receivedSeqMax
+  // we store the last received after we have done the callback
+  bool old_packet = RetransmitOfOldPacket(rtp_header->header.sequenceNumber,
+                                          rtp_header->header.timestamp);
+
+  // this updates _receivedSeqMax and other members
+  UpdateStatistics(rtp_header, payload_data_length, old_packet);
+
+  // Need to be updated after RetransmitOfOldPacket &
+  // RetransmitOfOldPacketUpdateStatistics
+  _lastReceiveTime = _clock.GetTimeInMS();
+  _lastReceivedPayloadLength = payload_data_length;
+
+  if (!old_packet) {
+    if (_lastReceivedTimestamp != rtp_header->header.timestamp) {
+      _lastReceivedTimestamp = rtp_header->header.timestamp;
+    }
+    _lastReceivedSequenceNumber = rtp_header->header.sequenceNumber;
+    _lastReceivedTransmissionTimeOffset =
+        rtp_header->extension.transmissionTimeOffset;
+  }
+  return retVal;
+}
+
+// must not have critsect when called
+WebRtc_Word32
+RTPReceiver::CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                           const WebRtc_UWord16 payloadSize,
+                                           const WebRtcRTPHeader* rtpHeader)
+{
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if(_cbRtpData)
+    {
+        return _cbRtpData->OnReceivedPayloadData(payloadData, payloadSize, rtpHeader);
+    }
+    return -1;
+}
+
+// we already have the _criticalSectionRTPReceiver critsect when we call this
+void
+RTPReceiver::UpdateStatistics(const WebRtcRTPHeader* rtpHeader,
+                              const WebRtc_UWord16 bytes,
+                              const bool oldPacket)
+{
+    WebRtc_UWord32 freq = 90000;
+    if(_audio)
+    {
+        freq = AudioFrequency();
+    }
+
+    Bitrate::Update(bytes);
+
+    _receivedByteCount += bytes;
+
+    if (_receivedSeqMax == 0 && _receivedSeqWraps == 0)
+    {
+        // First received report
+        _receivedSeqFirst = rtpHeader->header.sequenceNumber;
+        _receivedSeqMax = rtpHeader->header.sequenceNumber;
+        _receivedInorderPacketCount = 1;
+        _localTimeLastReceivedTimestamp =
+            GetCurrentRTP(&_clock, freq); //time in samples
+        return;
+    }
+
+    // count only the new packets received
+    if(InOrderPacket(rtpHeader->header.sequenceNumber))
+    {
+        const WebRtc_UWord32 RTPtime =
+            GetCurrentRTP(&_clock, freq); //time in samples
+        _receivedInorderPacketCount++;
+
+        // wrong if we use RetransmitOfOldPacket
+        WebRtc_Word32 seqDiff = rtpHeader->header.sequenceNumber - _receivedSeqMax;
+        if (seqDiff < 0)
+        {
+            // Wrap around detected
+            _receivedSeqWraps++;
+        }
+        // new max
+        _receivedSeqMax = rtpHeader->header.sequenceNumber;
+
+        if (rtpHeader->header.timestamp != _lastReceivedTimestamp &&
+            _receivedInorderPacketCount > 1)
+        {
+            WebRtc_Word32 timeDiffSamples = (RTPtime - _localTimeLastReceivedTimestamp) -
+                                          (rtpHeader->header.timestamp - _lastReceivedTimestamp);
+
+            timeDiffSamples = abs(timeDiffSamples);
+
+            // libJingle sometimes deliver crazy jumps in TS for the same stream
+            // If this happen don't update jitter value
+            if(timeDiffSamples < 450000)  // Use 5 secs video frequency as border
+            {
+                // note we calculate in Q4 to avoid using float
+                WebRtc_Word32 jitterDiffQ4 = (timeDiffSamples << 4) - _jitterQ4;
+                _jitterQ4 += ((jitterDiffQ4 + 8) >> 4);
+            }
+
+            // Extended jitter report, RFC 5450.
+            // Actual network jitter, excluding the source-introduced jitter.
+            WebRtc_Word32 timeDiffSamplesExt =
+                (RTPtime - _localTimeLastReceivedTimestamp) -
+                ((rtpHeader->header.timestamp +
+                  rtpHeader->extension.transmissionTimeOffset) -
+                (_lastReceivedTimestamp +
+                 _lastReceivedTransmissionTimeOffset));
+
+            timeDiffSamplesExt = abs(timeDiffSamplesExt);
+
+            if(timeDiffSamplesExt < 450000)  // Use 5 secs video freq as border
+            {
+                // note we calculate in Q4 to avoid using float
+                WebRtc_Word32 jitterDiffQ4TransmissionTimeOffset =
+                    (timeDiffSamplesExt << 4) - _jitterQ4TransmissionTimeOffset;
+                _jitterQ4TransmissionTimeOffset +=
+                    ((jitterDiffQ4TransmissionTimeOffset + 8) >> 4);
+            }
+        }
+        _localTimeLastReceivedTimestamp = RTPtime;
+    } else
+    {
+        if(oldPacket)
+        {
+            _receivedOldPacketCount++;
+        }else
+        {
+            _receivedInorderPacketCount++;
+        }
+    }
+
+    WebRtc_UWord16 packetOH = rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
+
+    // our measured overhead
+    // filter from RFC 5104     4.2.1.2
+    // avg_OH (new) = 15/16*avg_OH (old) + 1/16*pckt_OH,
+    _receivedPacketOH =  (15*_receivedPacketOH + packetOH) >> 4;
+}
+
+// we already have the _criticalSectionRTPReceiver critsect when we call this
+bool RTPReceiver::RetransmitOfOldPacket(
+    const WebRtc_UWord16 sequenceNumber,
+    const WebRtc_UWord32 rtpTimeStamp) const {
+  if (InOrderPacket(sequenceNumber)) {
+    return false;
+  }
+  WebRtc_UWord32 frequencyKHz = 90;  // Video frequency.
+  if (_audio) {
+    frequencyKHz = AudioFrequency() / 1000;
+  }
+  WebRtc_Word64 timeDiffMS = _clock.GetTimeInMS() - _lastReceiveTime;
+  // Diff in time stamp since last received in order.
+  WebRtc_Word32 rtpTimeStampDiffMS = static_cast<WebRtc_Word32>(
+      rtpTimeStamp - _lastReceivedTimestamp) / frequencyKHz;
+
+  WebRtc_UWord16 minRTT = 0;
+  WebRtc_Word32 maxDelayMs = 0;
+  _rtpRtcp.RTT(_SSRC, NULL, NULL, &minRTT, NULL);
+  if (minRTT == 0) {
+    float jitter = _jitterQ4 >> 4;  // Jitter variance in samples.
+    // Jitter standard deviation in samples.
+    float jitterStd = sqrt(jitter);
+    // 2 times the std deviation => 95% confidence.
+    // And transform to ms by dividing by the frequency in kHz.
+    maxDelayMs = static_cast<WebRtc_Word32>((2 * jitterStd) / frequencyKHz);
+
+    // Min maxDelayMs is 1.
+    if (maxDelayMs == 0) {
+      maxDelayMs = 1; 
+    }
+  } else {
+    maxDelayMs = (minRTT / 3) + 1;
+  }
+  if (timeDiffMS > rtpTimeStampDiffMS + maxDelayMs) {
+    return true;
+  }
+  return false;
+}
+
+bool
+RTPReceiver::InOrderPacket(const WebRtc_UWord16 sequenceNumber) const
+{
+    if(_receivedSeqMax >= sequenceNumber)
+    {
+        if(!(_receivedSeqMax > 0xff00 && sequenceNumber < 0x0ff ))//detect wrap around
+        {
+            if(_receivedSeqMax - NACK_PACKETS_MAX_SIZE > sequenceNumber)
+            {
+                // we have a restart of the remote side
+            }else
+            {
+                // we received a retransmit of a packet we already have
+                return false;
+            }
+        }
+    }else
+    {
+        // check for a wrap
+        if(sequenceNumber > 0xff00 && _receivedSeqMax < 0x0ff )//detect wrap around
+        {
+            if(_receivedSeqMax - NACK_PACKETS_MAX_SIZE > sequenceNumber)
+            {
+                // we have a restart of the remote side
+            }else
+            {
+                // we received a retransmit of a packet we already have
+                return false;
+            }
+        }
+    }
+    return true;
+}
+
+WebRtc_UWord16
+RTPReceiver::SequenceNumber() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _lastReceivedSequenceNumber;
+}
+
+WebRtc_UWord32
+RTPReceiver::TimeStamp() const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    return _lastReceivedTimestamp;
+}
+
+WebRtc_UWord32 RTPReceiver::PayloadTypeToPayload(
+    const WebRtc_UWord8 payloadType,
+    Payload*& payload) const {
+
+  std::map<WebRtc_Word8, Payload*>::const_iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  // check that this is a registered payload type
+  if (it == _payloadTypeMap.end()) {
+    return -1;
+  }
+  payload = it->second;
+  return 0;
+}
+
+// timeStamp of the last incoming packet that is the first packet of its frame
+WebRtc_Word32
+RTPReceiver::EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    WebRtc_UWord32 freq = 90000;
+    if(_audio)
+    {
+        freq = AudioFrequency();
+    }
+    if(_localTimeLastReceivedTimestamp == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+        return -1;
+    }
+    //time in samples
+    WebRtc_UWord32 diff = GetCurrentRTP(&_clock, freq)
+        - _localTimeLastReceivedTimestamp;
+
+    timestamp = _lastReceivedTimestamp + diff;
+    return 0;
+}
+
+    // get the currently configured SSRC filter
+WebRtc_Word32
+RTPReceiver::SSRCFilter(WebRtc_UWord32& allowedSSRC) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+    if(_useSSRCFilter)
+    {
+        allowedSSRC = _SSRCFilter;
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id, "%s invalid state", __FUNCTION__);
+    return -1;
+}
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+WebRtc_Word32
+RTPReceiver::SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC)
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _useSSRCFilter = enable;
+    if(enable)
+    {
+        _SSRCFilter = allowedSSRC;
+    } else
+    {
+        _SSRCFilter = 0;
+    }
+    return 0;
+}
+
+// no criticalsection when called
+void RTPReceiver::CheckSSRCChanged(const WebRtcRTPHeader* rtpHeader) {
+  bool newSSRC = false;
+  bool reInitializeDecoder = false;
+  char payloadName[RTP_PAYLOAD_NAME_SIZE];
+  WebRtc_UWord32 frequency = 90000; // default video freq
+  WebRtc_UWord8 channels = 1;
+  WebRtc_UWord32 rate = 0;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (_SSRC != rtpHeader->header.ssrc ||
+        (_lastReceivedPayloadType == -1 && _SSRC == 0)) {
+      // we need the _payloadType to make the call if the remote SSRC is 0
+      newSSRC = true;
+
+      // reset last report
+      ResetStatistics();
+
+      _lastReceivedTimestamp      = 0;
+      _lastReceivedSequenceNumber = 0;
+      _lastReceivedTransmissionTimeOffset = 0;
+
+      if (_SSRC) {  // do we have a SSRC? then the stream is restarted
+        //  if we have the same codec? reinit decoder
+        if (rtpHeader->header.payloadType == _lastReceivedPayloadType) {
+          reInitializeDecoder = true;
+
+          std::map<WebRtc_Word8, Payload*>::iterator it =
+              _payloadTypeMap.find(rtpHeader->header.payloadType);
+
+          if (it == _payloadTypeMap.end()) {
+            return;
+          }
+          Payload* payload = it->second;
+          assert(payload);
+          payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+          strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+          if(payload->audio) {
+            frequency = payload->typeSpecific.Audio.frequency;
+            channels =  payload->typeSpecific.Audio.channels;
+            rate = payload->typeSpecific.Audio.rate;
+          } else {
+            frequency = 90000;
+          }
+        }
+      }
+      _SSRC = rtpHeader->header.ssrc;
+    }
+  }
+  if(newSSRC) {
+    // we need to get this to our RTCP sender and receiver
+    // need to do this outside critical section
+    _rtpRtcp.SetRemoteSSRC(rtpHeader->header.ssrc);
+  }
+  CriticalSectionScoped lock(_criticalSectionCbs);
+  if(_cbRtpFeedback) {
+    if(newSSRC) {
+      _cbRtpFeedback->OnIncomingSSRCChanged(_id, rtpHeader->header.ssrc);
+    }
+    if(reInitializeDecoder) {
+      if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id,
+          rtpHeader->header.payloadType, payloadName, frequency, channels,
+          rate)) {  // new stream same codec
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                     "Failed to create decoder for payload type:%d",
+                     rtpHeader->header.payloadType);
+      }
+    }
+  }
+}
+
+// no criticalsection when called
+WebRtc_Word32 RTPReceiver::CheckPayloadChanged(
+    const WebRtcRTPHeader* rtpHeader,
+    const WebRtc_Word8 firstPayloadByte,
+    bool& isRED,
+    AudioPayload& audioSpecificPayload,
+    VideoPayload& videoSpecificPayload) {
+  bool reInitializeDecoder = false;
+
+  char payloadName[RTP_PAYLOAD_NAME_SIZE];
+  WebRtc_Word8 payloadType = rtpHeader->header.payloadType;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (payloadType != _lastReceivedPayloadType) {
+      if (REDPayloadType(payloadType)) {
+        // get the real codec payload type
+        payloadType = firstPayloadByte & 0x7f;
+        isRED = true;
+
+        if (REDPayloadType(payloadType)) {
+            // Invalid payload type, traced by caller. If we proceeded here,
+            // this would be set as |_lastReceivedPayloadType|, and we would no
+            // longer catch corrupt packets at this level.
+            return -1;
+        }
+
+        //when we receive RED we need to check the real payload type
+        if (payloadType == _lastReceivedPayloadType) {
+          if(_audio)
+          {
+            memcpy(&audioSpecificPayload, &_lastReceivedAudioSpecific,
+                   sizeof(_lastReceivedAudioSpecific));
+          } else {
+            memcpy(&videoSpecificPayload, &_lastReceivedVideoSpecific,
+                   sizeof(_lastReceivedVideoSpecific));
+          }
+          return 0;
+        }
+      }
+      if (_audio) {
+        if (TelephoneEventPayloadType(payloadType)) {
+          // don't do callbacks for DTMF packets
+          isRED = false;
+          return 0;
+        }
+        // frequency is updated for CNG
+        if (CNGPayloadType(payloadType, audioSpecificPayload.frequency)) {
+          // don't do callbacks for DTMF packets
+          isRED = false;
+          return 0;
+        }
+      }
+      std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+          _payloadTypeMap.find(payloadType);
+
+      // check that this is a registered payload type
+      if (it == _payloadTypeMap.end()) {
+        return -1;
+      }
+      Payload* payload = it->second;
+      assert(payload);
+      payloadName[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+      strncpy(payloadName, payload->name, RTP_PAYLOAD_NAME_SIZE - 1);
+      _lastReceivedPayloadType = payloadType;
+
+      reInitializeDecoder = true;
+
+      if(payload->audio) {
+        memcpy(&_lastReceivedAudioSpecific, &(payload->typeSpecific.Audio),
+               sizeof(_lastReceivedAudioSpecific));
+        memcpy(&audioSpecificPayload, &(payload->typeSpecific.Audio),
+               sizeof(_lastReceivedAudioSpecific));
+      } else {
+        memcpy(&_lastReceivedVideoSpecific, &(payload->typeSpecific.Video),
+               sizeof(_lastReceivedVideoSpecific));
+        memcpy(&videoSpecificPayload, &(payload->typeSpecific.Video),
+               sizeof(_lastReceivedVideoSpecific));
+
+        if (_lastReceivedVideoSpecific.videoCodecType == kRtpFecVideo)
+        {
+          // Only reset the decoder on media packets.
+          reInitializeDecoder = false;
+        } else {
+          if (_lastReceivedMediaPayloadType == _lastReceivedPayloadType) {
+            // Only reset the decoder if the media codec type has changed.
+            reInitializeDecoder = false;
+          }
+          _lastReceivedMediaPayloadType = _lastReceivedPayloadType;
+        }
+      }
+      if (reInitializeDecoder) {
+        // reset statistics
+        ResetStatistics();
+      }
+    } else {
+      if(_audio)
+      {
+        memcpy(&audioSpecificPayload, &_lastReceivedAudioSpecific,
+               sizeof(_lastReceivedAudioSpecific));
+      } else
+      {
+        memcpy(&videoSpecificPayload, &_lastReceivedVideoSpecific,
+               sizeof(_lastReceivedVideoSpecific));
+      }
+      isRED = false;
+    }
+  }   // end critsect
+  if (reInitializeDecoder) {
+    CriticalSectionScoped lock(_criticalSectionCbs);
+    if (_cbRtpFeedback) {
+      // create new decoder instance
+      if(_audio) {
+        if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id, payloadType,
+            payloadName, audioSpecificPayload.frequency,
+            audioSpecificPayload.channels, audioSpecificPayload.rate)) {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "Failed to create audio decoder for payload type:%d",
+                       payloadType);
+          return -1; // Wrong payload type
+        }
+      } else {
+        if (-1 == _cbRtpFeedback->OnInitializeDecoder(_id, payloadType,
+            payloadName, 90000, 1, 0)) {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "Failed to create video decoder for payload type:%d",
+                       payloadType);
+          return -1; // Wrong payload type
+        }
+      }
+    }
+  }
+  return 0;
+}
+
+// no criticalsection when called
+void RTPReceiver::CheckCSRC(const WebRtcRTPHeader* rtpHeader) {
+  WebRtc_Word32 numCSRCsDiff = 0;
+  WebRtc_UWord32 oldRemoteCSRC[kRtpCsrcSize];
+  WebRtc_UWord8 oldNumCSRCs = 0;
+
+  {
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (TelephoneEventPayloadType(rtpHeader->header.payloadType)) {
+      // Don't do this for DTMF packets
+      return;
+    }
+    _numEnergy = rtpHeader->type.Audio.numEnergy;
+    if (rtpHeader->type.Audio.numEnergy > 0 &&
+        rtpHeader->type.Audio.numEnergy <= kRtpCsrcSize) {
+      memcpy(_currentRemoteEnergy,
+             rtpHeader->type.Audio.arrOfEnergy,
+             rtpHeader->type.Audio.numEnergy);
+    }
+    oldNumCSRCs  = _numCSRCs;
+    if (oldNumCSRCs > 0) {
+      // Make a copy of old.
+      memcpy(oldRemoteCSRC, _currentRemoteCSRC,
+             _numCSRCs * sizeof(WebRtc_UWord32));
+    }
+    const WebRtc_UWord8 numCSRCs = rtpHeader->header.numCSRCs;
+    if ((numCSRCs > 0) && (numCSRCs <= kRtpCsrcSize)) {
+      // Copy new
+      memcpy(_currentRemoteCSRC,
+             rtpHeader->header.arrOfCSRCs,
+             numCSRCs * sizeof(WebRtc_UWord32));
+    }
+    if (numCSRCs > 0 || oldNumCSRCs > 0) {
+      numCSRCsDiff = numCSRCs - oldNumCSRCs;
+      _numCSRCs = numCSRCs;  // Update stored CSRCs.
+    } else {
+      // No change.
+      return;
+    }
+  }  // End scoped CriticalSection.
+
+  CriticalSectionScoped lock(_criticalSectionCbs);
+  if (_cbRtpFeedback == NULL) {
+    return;
+  }
+  bool haveCalledCallback = false;
+  // Search for new CSRC in old array.
+  for (WebRtc_UWord8 i = 0; i < rtpHeader->header.numCSRCs; ++i) {
+    const WebRtc_UWord32 csrc = rtpHeader->header.arrOfCSRCs[i];
+
+    bool foundMatch = false;
+    for (WebRtc_UWord8 j = 0; j < oldNumCSRCs; ++j) {
+      if (csrc == oldRemoteCSRC[j]) {  // old list
+        foundMatch = true;
+        break;
+      }
+    }
+    if (!foundMatch && csrc) {
+      // Didn't find it, report it as new.
+      haveCalledCallback = true;
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, csrc, true);
+    }
+  }
+  // Search for old CSRC in new array.
+  for (WebRtc_UWord8 i = 0; i < oldNumCSRCs; ++i) {
+    const WebRtc_UWord32 csrc = oldRemoteCSRC[i];
+
+    bool foundMatch = false;
+    for (WebRtc_UWord8 j = 0; j < rtpHeader->header.numCSRCs; ++j) {
+      if (csrc == rtpHeader->header.arrOfCSRCs[j]) {
+        foundMatch = true;
+        break;
+      }
+    }
+    if (!foundMatch && csrc) {
+      // Did not find it, report as removed.
+      haveCalledCallback = true;
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, csrc, false);
+    }
+  }
+  if (!haveCalledCallback) {
+    // If the CSRC list contain non-unique entries we will endup here.
+    // Using CSRC 0 to signal this event, not interop safe, other
+    // implementations might have CSRC 0 as avalid value.
+    if (numCSRCsDiff > 0) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, 0, true);
+    } else if (numCSRCsDiff < 0) {
+      _cbRtpFeedback->OnIncomingCSRCChanged(_id, 0, false);
+    }
+  }
+}
+
+WebRtc_Word32
+RTPReceiver::ResetStatistics()
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _lastReportInorderPackets = 0;
+    _lastReportOldPackets = 0;
+    _lastReportSeqMax = 0;
+    _lastReportFractionLost = 0;
+    _lastReportCumulativeLost = 0;
+    _lastReportExtendedHighSeqNum = 0;
+    _lastReportJitter = 0;
+    _lastReportJitterTransmissionTimeOffset = 0;
+    _jitterQ4 = 0;
+    _jitterMaxQ4 = 0;
+    _cumulativeLoss = 0;
+    _jitterQ4TransmissionTimeOffset = 0;
+    _receivedSeqWraps = 0;
+    _receivedSeqMax = 0;
+    _receivedSeqFirst = 0;
+    _receivedByteCount = 0;
+    _receivedOldPacketCount = 0;
+    _receivedInorderPacketCount = 0;
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::ResetDataCounters()
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    _receivedByteCount = 0;
+    _receivedOldPacketCount = 0;
+    _receivedInorderPacketCount = 0;
+    _lastReportInorderPackets = 0;
+
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::Statistics(WebRtc_UWord8  *fraction_lost,
+                       WebRtc_UWord32 *cum_lost,
+                       WebRtc_UWord32 *ext_max,
+                       WebRtc_UWord32 *jitter,
+                       WebRtc_UWord32 *max_jitter,
+                       WebRtc_UWord32 *jitter_transmission_time_offset,
+                       bool reset) const
+{
+    WebRtc_Word32 missing;
+    return Statistics(fraction_lost,
+                      cum_lost,
+                      ext_max,
+                      jitter,
+                      max_jitter,
+                      jitter_transmission_time_offset,
+                      &missing,
+                      reset);
+}
+
+WebRtc_Word32
+RTPReceiver::Statistics(WebRtc_UWord8  *fraction_lost,
+                        WebRtc_UWord32 *cum_lost,
+                        WebRtc_UWord32 *ext_max,
+                        WebRtc_UWord32 *jitter,
+                        WebRtc_UWord32 *max_jitter,
+                        WebRtc_UWord32 *jitter_transmission_time_offset,
+                        WebRtc_Word32  *missing,
+                        bool reset) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if (missing == NULL)
+    {
+        return -1;
+    }
+    if(_receivedSeqFirst == 0 && _receivedByteCount == 0)
+    {
+        // we have not received anything
+        // -1 required by RTCP sender
+        return -1;
+    }
+    if(!reset)
+    {
+        if(_lastReportInorderPackets == 0)
+        {
+            // no report
+            return -1;
+        }
+        // just get last report
+        if(fraction_lost)
+        {
+            *fraction_lost = _lastReportFractionLost;
+        }
+        if(cum_lost)
+        {
+            *cum_lost = _lastReportCumulativeLost;  // 24 bits valid
+        }
+        if(ext_max)
+        {
+            *ext_max = _lastReportExtendedHighSeqNum;
+        }
+        if(jitter)
+        {
+            *jitter =_lastReportJitter;
+        }
+        if(max_jitter)
+        {
+            // note that the internal jitter value is in Q4
+            // and needs to be scaled by 1/16
+            *max_jitter = (_jitterMaxQ4 >> 4);
+        }
+        if(jitter_transmission_time_offset)
+        {
+            *jitter_transmission_time_offset =
+               _lastReportJitterTransmissionTimeOffset;
+        }
+        return 0;
+    }
+
+    if (_lastReportInorderPackets == 0)
+    {
+        // First time we send a report
+        _lastReportSeqMax = _receivedSeqFirst-1;
+    }
+    /*
+    *   calc fraction lost
+    */
+    WebRtc_UWord16 expSinceLast = (_receivedSeqMax - _lastReportSeqMax);
+
+    if(_lastReportSeqMax > _receivedSeqMax)
+    {
+        // can we assume that the seqNum can't go decrease over a full RTCP period ?
+        expSinceLast = 0;
+    }
+
+    // number of received RTP packets since last report, counts all packets but not re-transmissions
+    WebRtc_UWord32 recSinceLast = _receivedInorderPacketCount - _lastReportInorderPackets;
+
+    if(_nackMethod == kNackOff)
+    {
+        // this is needed for re-ordered packets
+        WebRtc_UWord32 oldPackets = _receivedOldPacketCount - _lastReportOldPackets;
+        recSinceLast += oldPackets;
+    }else
+    {
+        // with NACK we don't know the expected retransmitions during the last second
+        // we know how many "old" packets we have received we just count the numer of
+        // old received to estimate the loss but it still does not guarantee an exact number
+        // since we run this based on time triggered by sending of a RTP packet this
+        // should have a minimum effect
+
+        // with NACK we don't count old packets as received since they are re-transmitted
+        // we use RTT to decide if a packet is re-ordered or re-transmitted
+    }
+
+    *missing = 0;
+    if(expSinceLast > recSinceLast)
+    {
+        *missing = (expSinceLast - recSinceLast);
+    }
+    WebRtc_UWord8 fractionLost = 0;
+    if(expSinceLast)
+    {
+        // scale 0 to 255, where 255 is 100% loss
+        fractionLost = (WebRtc_UWord8) ((255 * (*missing)) / expSinceLast);
+    }
+    if(fraction_lost)
+    {
+        *fraction_lost = fractionLost;
+    }
+    // we need a counter for cumulative loss too
+    _cumulativeLoss += *missing;
+
+    if(_jitterQ4 > _jitterMaxQ4)
+    {
+        _jitterMaxQ4 = _jitterQ4;
+    }
+    if(cum_lost)
+    {
+        *cum_lost =  _cumulativeLoss;
+    }
+    if(ext_max)
+    {
+        *ext_max = (_receivedSeqWraps<<16) + _receivedSeqMax;
+    }
+    if(jitter)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *jitter = (_jitterQ4 >> 4);
+    }
+    if(max_jitter)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *max_jitter = (_jitterMaxQ4 >> 4);
+    }
+    if(jitter_transmission_time_offset)
+    {
+        // note that the internal jitter value is in Q4
+        // and needs to be scaled by 1/16
+        *jitter_transmission_time_offset =
+            (_jitterQ4TransmissionTimeOffset >> 4);
+    }
+    if(reset)
+    {
+        // store this report
+        _lastReportFractionLost = fractionLost;
+        _lastReportCumulativeLost = _cumulativeLoss;  // 24 bits valid
+        _lastReportExtendedHighSeqNum = (_receivedSeqWraps<<16) + _receivedSeqMax;
+        _lastReportJitter  = (_jitterQ4 >> 4);
+        _lastReportJitterTransmissionTimeOffset =
+            (_jitterQ4TransmissionTimeOffset >> 4);
+
+        // only for report blocks in RTCP SR and RR
+        _lastReportInorderPackets = _receivedInorderPacketCount;
+        _lastReportOldPackets = _receivedOldPacketCount;
+        _lastReportSeqMax = _receivedSeqMax;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+RTPReceiver::DataCounters(WebRtc_UWord32 *bytesReceived,
+                          WebRtc_UWord32 *packetsReceived) const
+{
+    CriticalSectionScoped lock(_criticalSectionRTPReceiver);
+
+    if(bytesReceived)
+    {
+        *bytesReceived = _receivedByteCount;
+    }
+    if(packetsReceived)
+    {
+        *packetsReceived = _receivedOldPacketCount + _receivedInorderPacketCount;
+    }
+    return 0;
+}
+
+void
+RTPReceiver::ProcessBitrate()
+{
+    CriticalSectionScoped cs(_criticalSectionRTPReceiver);
+
+    Bitrate::Process();
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver.h b/src/modules/rtp_rtcp/source/rtp_receiver.h
new file mode 100644
index 0000000..1ee824b
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver.h
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
+
+#include <map>
+
+#include "typedefs.h"
+#include "rtp_utility.h"
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+#include "rtp_receiver_audio.h"
+#include "rtp_receiver_video.h"
+#include "rtcp_receiver_help.h"
+#include "Bitrate.h"
+
+namespace webrtc {
+class RtpRtcpFeedback;
+class ModuleRtpRtcpImpl;
+class Trace;
+
+class RTPReceiver : public RTPReceiverAudio, public RTPReceiverVideo, public Bitrate
+{
+public:
+    RTPReceiver(const WebRtc_Word32 id,
+                const bool audio,
+                RtpRtcpClock* clock,
+                RemoteBitrateEstimator* remote_bitrate,
+                ModuleRtpRtcpImpl* owner);
+
+    virtual ~RTPReceiver();
+
+    RtpVideoCodecTypes VideoCodecType() const;
+    WebRtc_UWord32 MaxConfiguredBitrate() const;
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+    void PacketTimeout();
+
+    void ProcessDeadOrAlive(const bool RTCPalive, const WebRtc_Word64 now);
+
+    void ProcessBitrate();
+
+    WebRtc_Word32 RegisterIncomingDataCallback(RtpData* incomingDataCallback);
+    WebRtc_Word32 RegisterIncomingRTPCallback(RtpFeedback* incomingMessagesCallback);
+
+    WebRtc_Word32 RegisterReceivePayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_Word32 DeRegisterReceivePayload(const WebRtc_Word8 payloadType);
+
+    WebRtc_Word32 ReceivePayloadType(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate,
+        WebRtc_Word8* payloadType) const;
+
+    WebRtc_Word32 ReceivePayload(const WebRtc_Word8 payloadType,
+                                 char payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                 WebRtc_UWord32* frequency,
+                                 WebRtc_UWord8* channels,
+                                 WebRtc_UWord32* rate) const;
+
+    WebRtc_Word32 RemotePayload(char payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                WebRtc_Word8* payloadType,
+                                WebRtc_UWord32* frequency,
+                                WebRtc_UWord8* channels) const;
+
+    WebRtc_Word32 IncomingRTPPacket(WebRtcRTPHeader* rtpheader,
+                                    const WebRtc_UWord8* incomingRtpPacket,
+                                    const WebRtc_UWord16 incomingRtpPacketLengt);
+
+    NACKMethod NACK() const ;
+
+    // Turn negative acknowledgement requests on/off
+    WebRtc_Word32 SetNACKStatus(const NACKMethod method);
+
+
+    // last received
+    virtual WebRtc_UWord32 TimeStamp() const;
+    virtual WebRtc_UWord16 SequenceNumber() const;
+
+    WebRtc_Word32 EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const;
+
+    WebRtc_UWord32 SSRC() const;
+
+    WebRtc_Word32 CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const;
+
+    WebRtc_Word32 Energy( WebRtc_UWord8 arrOfEnergy[kRtpCsrcSize]) const;
+
+    // get the currently configured SSRC filter
+    WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const;
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+    WebRtc_Word32 SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC);
+
+    WebRtc_Word32 Statistics(WebRtc_UWord8  *fraction_lost,
+                             WebRtc_UWord32 *cum_lost,
+                             WebRtc_UWord32 *ext_max,
+                             WebRtc_UWord32 *jitter,  // will be moved from JB
+                             WebRtc_UWord32 *max_jitter,
+                             WebRtc_UWord32 *jitter_transmission_time_offset,
+                             bool reset) const;
+
+    WebRtc_Word32 Statistics(WebRtc_UWord8  *fraction_lost,
+                             WebRtc_UWord32 *cum_lost,
+                             WebRtc_UWord32 *ext_max,
+                             WebRtc_UWord32 *jitter,  // will be moved from JB
+                             WebRtc_UWord32 *max_jitter,
+                             WebRtc_UWord32 *jitter_transmission_time_offset,
+                             WebRtc_Word32 *missing,
+                             bool reset) const;
+
+    WebRtc_Word32 DataCounters(WebRtc_UWord32 *bytesReceived,
+                               WebRtc_UWord32 *packetsReceived) const;
+
+    WebRtc_Word32 ResetStatistics();
+
+    WebRtc_Word32 ResetDataCounters();
+
+    WebRtc_UWord16 PacketOHReceived() const;
+
+    WebRtc_UWord32 PacketCountReceived() const;
+
+    WebRtc_UWord32 ByteCountReceived() const;
+
+    WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                             const WebRtc_UWord8 id);
+
+    WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
+
+    void GetHeaderExtensionMapCopy(RtpHeaderExtensionMap* map) const;
+
+    virtual WebRtc_UWord32 PayloadTypeToPayload(const WebRtc_UWord8 payloadType,
+                                                ModuleRTPUtility::Payload*& payload) const;
+    /*
+    *  RTX
+    */
+    void SetRTXStatus(const bool enable, const WebRtc_UWord32 SSRC);
+
+    void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
+
+protected:
+    virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                        const WebRtc_UWord16 payloadSize,
+                                                        const WebRtcRTPHeader* rtpHeader);
+
+    virtual bool RetransmitOfOldPacket(const WebRtc_UWord16 sequenceNumber,
+                                       const WebRtc_UWord32 rtpTimeStamp) const;
+
+
+    void UpdateStatistics(const WebRtcRTPHeader* rtpHeader,
+                          const WebRtc_UWord16 bytes,
+                          const bool oldPacket);
+
+    virtual WebRtc_Word8 REDPayloadType() const;
+
+private:
+    // Is RED configured with payload type payloadType
+    bool REDPayloadType(const WebRtc_Word8 payloadType) const;
+
+    bool InOrderPacket(const WebRtc_UWord16 sequenceNumber) const;
+
+    void CheckSSRCChanged(const WebRtcRTPHeader* rtpHeader);
+    void CheckCSRC(const WebRtcRTPHeader* rtpHeader);
+    WebRtc_Word32 CheckPayloadChanged(const WebRtcRTPHeader* rtpHeader,
+                                      const WebRtc_Word8 firstPayloadByte,
+                                      bool& isRED,
+                                      ModuleRTPUtility::AudioPayload& audioSpecific,
+                                      ModuleRTPUtility::VideoPayload& videoSpecific);
+
+    void UpdateNACKBitRate(WebRtc_Word32 bytes, WebRtc_UWord32 now);
+    bool ProcessNACKBitRate(WebRtc_UWord32 now);
+
+private:
+    WebRtc_Word32           _id;
+    const bool              _audio;
+    ModuleRtpRtcpImpl&      _rtpRtcp;
+
+    CriticalSectionWrapper* _criticalSectionCbs;
+    RtpFeedback*            _cbRtpFeedback;
+    RtpData*                _cbRtpData;
+
+    CriticalSectionWrapper* _criticalSectionRTPReceiver;
+    mutable WebRtc_Word64   _lastReceiveTime;
+    WebRtc_UWord16          _lastReceivedPayloadLength;
+    WebRtc_Word8            _lastReceivedPayloadType;
+    WebRtc_Word8            _lastReceivedMediaPayloadType;
+
+    ModuleRTPUtility::AudioPayload _lastReceivedAudioSpecific;
+    ModuleRTPUtility::VideoPayload _lastReceivedVideoSpecific;
+
+    WebRtc_UWord32            _packetTimeOutMS;
+    WebRtc_Word8              _redPayloadType;
+
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
+    RtpHeaderExtensionMap     _rtpHeaderExtensionMap;
+
+    // SSRCs
+    WebRtc_UWord32            _SSRC;
+    WebRtc_UWord8             _numCSRCs;
+    WebRtc_UWord32            _currentRemoteCSRC[kRtpCsrcSize];
+    WebRtc_UWord8             _numEnergy;
+    WebRtc_UWord8             _currentRemoteEnergy[kRtpCsrcSize];
+
+    bool                      _useSSRCFilter;
+    WebRtc_UWord32            _SSRCFilter;
+
+    // stats on received RTP packets
+    WebRtc_UWord32            _jitterQ4;
+    mutable WebRtc_UWord32    _jitterMaxQ4;
+    mutable WebRtc_UWord32    _cumulativeLoss;
+    WebRtc_UWord32            _jitterQ4TransmissionTimeOffset;
+
+    WebRtc_UWord32            _localTimeLastReceivedTimestamp;
+    WebRtc_UWord32            _lastReceivedTimestamp;
+    WebRtc_UWord16            _lastReceivedSequenceNumber;
+    WebRtc_Word32             _lastReceivedTransmissionTimeOffset;
+    WebRtc_UWord16            _receivedSeqFirst;
+    WebRtc_UWord16            _receivedSeqMax;
+    WebRtc_UWord16            _receivedSeqWraps;
+
+    // current counter values
+    WebRtc_UWord16            _receivedPacketOH;
+    WebRtc_UWord32            _receivedByteCount;
+    WebRtc_UWord32            _receivedOldPacketCount;
+    WebRtc_UWord32            _receivedInorderPacketCount;
+
+    // counter values when we sent the last report
+    mutable WebRtc_UWord32    _lastReportInorderPackets;
+    mutable WebRtc_UWord32    _lastReportOldPackets;
+    mutable WebRtc_UWord16    _lastReportSeqMax;
+    mutable WebRtc_UWord8     _lastReportFractionLost;
+    mutable WebRtc_UWord32    _lastReportCumulativeLost;  // 24 bits valid
+    mutable WebRtc_UWord32    _lastReportExtendedHighSeqNum;
+    mutable WebRtc_UWord32    _lastReportJitter;
+    mutable WebRtc_UWord32    _lastReportJitterTransmissionTimeOffset;
+
+    NACKMethod _nackMethod;
+
+    bool _RTX;
+    WebRtc_UWord32 _ssrcRTX;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc b/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc
new file mode 100644
index 0000000..a57da75
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver_audio.cc
@@ -0,0 +1,355 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_receiver_audio.h"
+
+#include <cassert> //assert
+#include <cstring> // memcpy()
+#include <math.h>    // pow()
+
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+RTPReceiverAudio::RTPReceiverAudio(const WebRtc_Word32 id):
+    _id(id),
+    _lastReceivedFrequency(8000),
+    _telephoneEvent(false),
+    _telephoneEventForwardToDecoder(false),
+    _telephoneEventDetectEndOfTone(false),
+    _telephoneEventPayloadType(-1),
+    _cngNBPayloadType(-1),
+    _cngWBPayloadType(-1),
+    _cngSWBPayloadType(-1),
+    _cngPayloadType(-1),
+    _G722PayloadType(-1),
+    _lastReceivedG722(false),
+    _criticalSectionFeedback(CriticalSectionWrapper::CreateCriticalSection()),
+    _cbAudioFeedback(NULL)
+{
+}
+
+RTPReceiverAudio::~RTPReceiverAudio()
+{
+    delete _criticalSectionFeedback;
+}
+
+WebRtc_Word32
+RTPReceiverAudio::RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback)
+{
+    CriticalSectionScoped lock(_criticalSectionFeedback);
+    _cbAudioFeedback = incomingMessagesCallback;
+    return 0;
+}
+
+WebRtc_UWord32
+RTPReceiverAudio::AudioFrequency() const
+{
+    if(_lastReceivedG722)
+    {
+        return 8000;
+    }
+    return _lastReceivedFrequency;
+}
+
+// Outband TelephoneEvent(DTMF) detection
+WebRtc_Word32
+RTPReceiverAudio::SetTelephoneEventStatus(const bool enable,
+                                          const bool forwardToDecoder,
+                                          const bool detectEndOfTone)
+{
+    _telephoneEvent= enable;
+    _telephoneEventDetectEndOfTone = detectEndOfTone;
+    _telephoneEventForwardToDecoder = forwardToDecoder;
+    return 0;
+}
+
+ // Is outband TelephoneEvent(DTMF) turned on/off?
+bool
+RTPReceiverAudio::TelephoneEvent() const
+{
+    return _telephoneEvent;
+}
+
+// Is forwarding of outband telephone events turned on/off?
+bool
+RTPReceiverAudio::TelephoneEventForwardToDecoder() const
+{
+    return _telephoneEventForwardToDecoder;
+}
+
+bool
+RTPReceiverAudio::TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const
+{
+    return (_telephoneEventPayloadType == payloadType)?true:false;
+}
+
+bool
+RTPReceiverAudio::CNGPayloadType(const WebRtc_Word8 payloadType,
+                                 WebRtc_UWord32& frequency)
+{
+    //  we can have three CNG on 8000Hz, 16000Hz and 32000Hz
+    if(_cngNBPayloadType == payloadType)
+    {
+        frequency = 8000;
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngNBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngNBPayloadType;
+        return true;
+    } else if(_cngWBPayloadType == payloadType)
+    {
+        // if last received codec is G.722 we must use frequency 8000
+        if(_lastReceivedG722)
+        {
+            frequency = 8000;
+        } else
+        {
+            frequency = 16000;
+        }
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngWBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngWBPayloadType;
+        return true;
+    }else if(_cngSWBPayloadType == payloadType)
+    {
+        frequency = 32000;
+        if ((_cngPayloadType != -1) &&(_cngPayloadType !=_cngSWBPayloadType))
+        {
+            ResetStatistics();
+        }
+        _cngPayloadType = _cngSWBPayloadType;
+        return true;
+    }else
+    {
+        //  not CNG
+        if(_G722PayloadType == payloadType)
+        {
+            _lastReceivedG722 = true;
+        }else
+        {
+            _lastReceivedG722 = false;
+        }
+    }
+    return false;
+}
+
+/*
+   Sample based or frame based codecs based on RFC 3551
+
+   NOTE! There is one error in the RFC, stating G.722 uses 8 bits/samples.
+   The correct rate is 4 bits/sample.
+
+   name of                              sampling              default
+   encoding  sample/frame  bits/sample      rate  ms/frame  ms/packet
+
+   Sample based audio codecs
+   DVI4      sample        4                var.                   20
+   G722      sample        4              16,000                   20
+   G726-40   sample        5               8,000                   20
+   G726-32   sample        4               8,000                   20
+   G726-24   sample        3               8,000                   20
+   G726-16   sample        2               8,000                   20
+   L8        sample        8                var.                   20
+   L16       sample        16               var.                   20
+   PCMA      sample        8                var.                   20
+   PCMU      sample        8                var.                   20
+
+   Frame based audio codecs
+   G723      frame         N/A             8,000        30         30
+   G728      frame         N/A             8,000       2.5         20
+   G729      frame         N/A             8,000        10         20
+   G729D     frame         N/A             8,000        10         20
+   G729E     frame         N/A             8,000        10         20
+   GSM       frame         N/A             8,000        20         20
+   GSM-EFR   frame         N/A             8,000        20         20
+   LPC       frame         N/A             8,000        20         20
+   MPA       frame         N/A              var.      var.
+
+   G7221     frame         N/A
+*/
+
+ModuleRTPUtility::Payload* RTPReceiverAudio::RegisterReceiveAudioPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
+    _telephoneEventPayloadType = payloadType;
+  }
+  if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2)) {
+    //  we can have three CNG on 8000Hz, 16000Hz and 32000Hz
+    if(frequency == 8000){
+      _cngNBPayloadType = payloadType;
+    } else if(frequency == 16000) {
+      _cngWBPayloadType = payloadType;
+    } else if(frequency == 32000) {
+      _cngSWBPayloadType = payloadType;
+    } else {
+      assert(false);
+      return NULL;
+    }
+  }
+
+  ModuleRTPUtility::Payload* payload = new ModuleRTPUtility::Payload;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Audio.frequency = frequency;
+  payload->typeSpecific.Audio.channels = channels;
+  payload->typeSpecific.Audio.rate = rate;
+  payload->audio = true;
+  return payload;
+}
+
+// we are not allowed to have any critsects when calling CallbackOfReceivedPayloadData
+WebRtc_Word32
+RTPReceiverAudio::ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
+                                          const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadLength,
+                                          const ModuleRTPUtility::AudioPayload& audioSpecific,
+                                          const bool isRED)
+{
+    WebRtc_UWord8 newEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
+    WebRtc_UWord8 removedEvents[MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS];
+    WebRtc_UWord8 numberOfNewEvents = 0;
+    WebRtc_UWord8 numberOfRemovedEvents = 0;
+    bool telephoneEventPacket = TelephoneEventPayloadType(rtpHeader->header.payloadType);
+
+    if(payloadLength == 0)
+    {
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped lock(_criticalSectionFeedback);
+
+        if(telephoneEventPacket)
+        {
+            // RFC 4733 2.3
+            /*
+                0                   1                   2                   3
+                0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+                |     event     |E|R| volume    |          duration             |
+                +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+            */
+            if(payloadLength % 4 != 0)
+            {
+                return -1;
+            }
+            WebRtc_UWord8 numberOfEvents = payloadLength / 4;
+
+            // sanity
+            if(numberOfEvents >= MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS)
+            {
+                numberOfEvents = MAX_NUMBER_OF_PARALLEL_TELEPHONE_EVENTS;
+            }
+            for (int n = 0; n < numberOfEvents; n++)
+            {
+                bool end = (payloadData[(4*n)+1] & 0x80)? true:false;
+
+                std::set<WebRtc_UWord8>::iterator event =
+                    _telephoneEventReported.find(payloadData[4*n]);
+
+                if(event != _telephoneEventReported.end())
+                {
+                    // we have already seen this event
+                    if(end)
+                    {
+                        removedEvents[numberOfRemovedEvents]= payloadData[4*n];
+                        numberOfRemovedEvents++;
+                        _telephoneEventReported.erase(payloadData[4*n]);
+                    }
+                }else
+                {
+                    if(end)
+                    {
+                        // don't add if it's a end of a tone
+                    }else
+                    {
+                        newEvents[numberOfNewEvents] = payloadData[4*n];
+                        numberOfNewEvents++;
+                        _telephoneEventReported.insert(payloadData[4*n]);
+                    }
+                }
+            }
+
+            // RFC 4733 2.5.1.3 & 2.5.2.3 Long-Duration Events
+            // should not be a problem since we don't care about the duration
+
+            // RFC 4733 See 2.5.1.5. & 2.5.2.4.  Multiple Events in a Packet
+        }
+
+        if(_telephoneEvent && _cbAudioFeedback)
+        {
+            for (int n = 0; n < numberOfNewEvents; n++)
+            {
+                _cbAudioFeedback->OnReceivedTelephoneEvent(_id, newEvents[n], false);
+            }
+            if(_telephoneEventDetectEndOfTone)
+            {
+                for (int n = 0; n < numberOfRemovedEvents; n++)
+                {
+                    _cbAudioFeedback->OnReceivedTelephoneEvent(_id, removedEvents[n], true);
+                }
+            }
+        }
+    }
+    if(! telephoneEventPacket )
+    {
+        _lastReceivedFrequency = audioSpecific.frequency;
+    }
+
+    // Check if this is a CNG packet, receiver might want to know
+    WebRtc_UWord32 dummy;
+    if(CNGPayloadType(rtpHeader->header.payloadType, dummy))
+    {
+        rtpHeader->type.Audio.isCNG=true;
+        rtpHeader->frameType = kAudioFrameCN;
+    }else
+    {
+        rtpHeader->frameType = kAudioFrameSpeech;
+        rtpHeader->type.Audio.isCNG=false;
+    }
+
+    // check if it's a DTMF event, hence something we can playout
+    if(telephoneEventPacket)
+    {
+        if(!_telephoneEventForwardToDecoder)
+        {
+            // don't forward event to decoder
+            return 0;
+        }
+        std::set<WebRtc_UWord8>::iterator first =
+            _telephoneEventReported.begin();
+        if(first != _telephoneEventReported.end() && *first > 15)
+        {
+            // don't forward non DTMF events
+            return 0;
+        }
+    }
+    if(isRED && !(payloadData[0] & 0x80))
+    {
+        // we recive only one frame packed in a RED packet remove the RED wrapper
+        rtpHeader->header.payloadType = payloadData[0];
+
+        // only one frame in the RED strip the one byte to help NetEq
+        return CallbackOfReceivedPayloadData(payloadData+1,
+                                             payloadLength-1,
+                                             rtpHeader);
+    }
+
+    rtpHeader->type.Audio.channel = audioSpecific.channels;
+    return CallbackOfReceivedPayloadData(payloadData, payloadLength, rtpHeader);
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver_audio.h b/src/modules/rtp_rtcp/source/rtp_receiver_audio.h
new file mode 100644
index 0000000..0b0ba30
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver_audio.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
+
+#include <set>
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class RTPReceiverAudio
+{
+public:
+    RTPReceiverAudio(const WebRtc_Word32 id);
+    virtual ~RTPReceiverAudio();
+
+    WebRtc_Word32 RegisterIncomingAudioCallback(RtpAudioFeedback* incomingMessagesCallback);
+
+    ModuleRTPUtility::Payload* RegisterReceiveAudioPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_UWord32 AudioFrequency() const;
+
+    // Outband TelephoneEvent (DTMF) detection
+    WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
+                                        const bool forwardToDecoder,
+                                        const bool detectEndOfTone);
+
+    // Is outband DTMF(AVT) turned on/off?
+    bool TelephoneEvent() const ;
+
+    // Is forwarding of outband telephone events turned on/off?
+    bool TelephoneEventForwardToDecoder() const ;
+
+    // Is TelephoneEvent configured with payload type payloadType
+    bool TelephoneEventPayloadType(const WebRtc_Word8 payloadType) const;
+
+    // Is CNG configured with payload type payloadType
+    bool CNGPayloadType(const WebRtc_Word8 payloadType, WebRtc_UWord32& frequency);
+
+    WebRtc_Word32 ParseAudioCodecSpecific(WebRtcRTPHeader* rtpHeader,
+                                        const WebRtc_UWord8* payloadData,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const ModuleRTPUtility::AudioPayload& audioSpecific,
+                                        const bool isRED);
+
+    virtual WebRtc_Word32 ResetStatistics() = 0;
+
+protected:
+    virtual WebRtc_Word32 CallbackOfReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                      const WebRtc_UWord16 payloadSize,
+                                                      const WebRtcRTPHeader* rtpHeader) = 0;
+private:
+    WebRtc_Word32             _id;
+
+    WebRtc_UWord32            _lastReceivedFrequency;
+
+    bool                    _telephoneEvent;
+    bool                    _telephoneEventForwardToDecoder;
+    bool                    _telephoneEventDetectEndOfTone;
+    WebRtc_Word8            _telephoneEventPayloadType;
+    std::set<WebRtc_UWord8> _telephoneEventReported;
+
+    WebRtc_Word8              _cngNBPayloadType;
+    WebRtc_Word8              _cngWBPayloadType;
+    WebRtc_Word8              _cngSWBPayloadType;
+    WebRtc_Word8                _cngPayloadType;
+
+    // G722 is special since it use the wrong number of RTP samples in timestamp VS. number of samples in the frame
+    WebRtc_Word8              _G722PayloadType;
+    bool                    _lastReceivedG722;
+
+    CriticalSectionWrapper* _criticalSectionFeedback;
+    RtpAudioFeedback*   _cbAudioFeedback;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_AUDIO_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver_video.cc b/src/modules/rtp_rtcp/source/rtp_receiver_video.cc
new file mode 100644
index 0000000..0af375e
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver_video.cc
@@ -0,0 +1,351 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_receiver_video.h"
+
+#include <cassert> //assert
+#include <cstring>  // memcpy()
+#include <math.h>
+
+#include "critical_section_wrapper.h"
+#include "receiver_fec.h"
+#include "rtp_rtcp_impl.h"
+#include "rtp_utility.h"
+#include "trace.h"
+
+namespace webrtc {
+WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+{
+    return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+}
+
+RTPReceiverVideo::RTPReceiverVideo(const WebRtc_Word32 id,
+                                   RemoteBitrateEstimator* remote_bitrate,
+                                   ModuleRtpRtcpImpl* owner)
+    : _id(id),
+      _criticalSectionReceiverVideo(
+          CriticalSectionWrapper::CreateCriticalSection()),
+      _currentFecFrameDecoded(false),
+      _receiveFEC(NULL),
+      remote_bitrate_(remote_bitrate),
+      _packetOverHead(28) {
+}
+
+RTPReceiverVideo::~RTPReceiverVideo() {
+    delete _criticalSectionReceiverVideo;
+    delete _receiveFEC;
+}
+
+ModuleRTPUtility::Payload* RTPReceiverVideo::RegisterReceiveVideoPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 maxRate) {
+  RtpVideoCodecTypes videoType = kRtpNoVideo;
+  if (ModuleRTPUtility::StringCompare(payloadName, "VP8", 3)) {
+    videoType = kRtpVp8Video;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+    videoType = kRtpNoVideo;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "ULPFEC", 6)) {
+    // store this
+    if (_receiveFEC == NULL) {
+      _receiveFEC = new ReceiverFEC(_id, this);
+    }
+    _receiveFEC->SetPayloadTypeFEC(payloadType);
+    videoType = kRtpFecVideo;
+  } else {
+    return NULL;
+  }
+  ModuleRTPUtility::Payload* payload =  new ModuleRTPUtility::Payload;
+
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Video.videoCodecType = videoType;
+  payload->typeSpecific.Video.maxRate = maxRate;
+  payload->audio = false;
+  return payload;
+}
+
+// we have no critext when calling this
+// we are not allowed to have any critsects when calling
+// CallbackOfReceivedPayloadData
+WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecific(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength,
+    const RtpVideoCodecTypes videoType,
+    const bool isRED,
+    const WebRtc_UWord8* incomingRtpPacket,
+    const WebRtc_UWord16 incomingRtpPacketSize,
+    const WebRtc_Word64 nowMS) {
+  WebRtc_Word32 retVal = 0;
+
+  _criticalSectionReceiverVideo->Enter();
+
+  // Add headers, ideally we would like to include for instance
+  // Ethernet header here as well.
+  const WebRtc_UWord16 packetSize = payloadDataLength + _packetOverHead +
+      rtpHeader->header.headerLength + rtpHeader->header.paddingLength;
+  uint32_t compensated_timestamp = rtpHeader->header.timestamp +
+      rtpHeader->extension.transmissionTimeOffset;
+  remote_bitrate_->IncomingPacket(rtpHeader->header.ssrc,
+                                  packetSize,
+                                  nowMS,
+                                  compensated_timestamp,
+                                  -1);
+
+  if (isRED) {
+    if(_receiveFEC == NULL) {
+      _criticalSectionReceiverVideo->Leave();
+      return -1;
+    }
+    bool FECpacket = false;
+    retVal = _receiveFEC->AddReceivedFECPacket(
+        rtpHeader,
+        incomingRtpPacket,
+        payloadDataLength,
+        FECpacket);
+    if (retVal != -1) {
+      retVal = _receiveFEC->ProcessReceivedFEC();
+    }
+    _criticalSectionReceiverVideo->Leave();
+
+    if(retVal == 0 && FECpacket) {
+      // Callback with the received FEC packet.
+      // The normal packets are delivered after parsing.
+      // This contains the original RTP packet header but with
+      // empty payload and data length.
+      rtpHeader->frameType = kFrameEmpty;
+      // We need this for the routing.
+      WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
+      if(retVal != 0) {
+        return retVal;
+      }
+      retVal = CallbackOfReceivedPayloadData(NULL, 0, rtpHeader);
+    }
+  } else {
+    // will leave the _criticalSectionReceiverVideo critsect
+    retVal = ParseVideoCodecSpecificSwitch(rtpHeader,
+                                           payloadData,
+                                           payloadDataLength,
+                                           videoType);
+  }
+  return retVal;
+}
+
+WebRtc_Word32 RTPReceiverVideo::BuildRTPheader(
+    const WebRtcRTPHeader* rtpHeader,
+    WebRtc_UWord8* dataBuffer) const {
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);  // version 2
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(rtpHeader->header.payloadType);
+  if (rtpHeader->header.markerBit) {
+    dataBuffer[1] |= kRtpMarkerBitMask;  // MarkerBit is 1
+  }
+  ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + 2,
+                                          rtpHeader->header.sequenceNumber);
+  ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 4,
+                                          rtpHeader->header.timestamp);
+  ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer + 8,
+                                          rtpHeader->header.ssrc);
+
+  WebRtc_Word32 rtpHeaderLength = 12;
+
+  // Add the CSRCs if any
+  if (rtpHeader->header.numCSRCs > 0) {
+    if (rtpHeader->header.numCSRCs > 16) {
+      // error
+      assert(false);
+    }
+    WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
+    for (WebRtc_UWord32 i = 0; i < rtpHeader->header.numCSRCs; ++i) {
+      ModuleRTPUtility::AssignUWord32ToBuffer(ptr,
+                                              rtpHeader->header.arrOfCSRCs[i]);
+      ptr +=4;
+    }
+    dataBuffer[0] = (dataBuffer[0]&0xf0) | rtpHeader->header.numCSRCs;
+    // Update length of header
+    rtpHeaderLength += sizeof(WebRtc_UWord32)*rtpHeader->header.numCSRCs;
+  }
+  return rtpHeaderLength;
+}
+
+WebRtc_Word32 RTPReceiverVideo::ReceiveRecoveredPacketCallback(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength) {
+  // TODO(pwestin) Re-factor this to avoid the messy critsect handling.
+  _criticalSectionReceiverVideo->Enter();
+
+  _currentFecFrameDecoded = true;
+
+  ModuleRTPUtility::Payload* payload = NULL;
+  if (PayloadTypeToPayload(rtpHeader->header.payloadType, payload) != 0) {
+    _criticalSectionReceiverVideo->Leave();
+    return -1;
+  }
+  // here we can re-create the original lost packet so that we can use it for
+  // the relay we need to re-create the RED header too
+  WebRtc_UWord8 recoveredPacket[IP_PACKET_SIZE];
+  WebRtc_UWord16 rtpHeaderLength = (WebRtc_UWord16)BuildRTPheader(
+      rtpHeader, recoveredPacket);
+
+  const WebRtc_UWord8 REDForFECHeaderLength = 1;
+
+  // replace pltype
+  recoveredPacket[1] &= 0x80;             // reset
+  recoveredPacket[1] += REDPayloadType(); // replace with RED payload type
+
+  // add RED header
+  recoveredPacket[rtpHeaderLength] = rtpHeader->header.payloadType;
+  // f-bit always 0
+
+  memcpy(recoveredPacket + rtpHeaderLength + REDForFECHeaderLength, payloadData,
+         payloadDataLength);
+
+  return ParseVideoCodecSpecificSwitch(
+      rtpHeader,
+      payloadData,
+      payloadDataLength,
+      payload->typeSpecific.Video.videoCodecType);
+}
+
+WebRtc_Word32 RTPReceiverVideo::SetCodecType(const RtpVideoCodecTypes videoType,
+                                             WebRtcRTPHeader* rtpHeader) const {
+  switch (videoType) {
+    case kRtpNoVideo:
+      rtpHeader->type.Video.codec = kRTPVideoGeneric;
+      break;
+    case kRtpVp8Video:
+      rtpHeader->type.Video.codec = kRTPVideoVP8;
+      break;
+    case kRtpFecVideo:
+      rtpHeader->type.Video.codec = kRTPVideoFEC;
+      break;
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPReceiverVideo::ParseVideoCodecSpecificSwitch(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength,
+    const RtpVideoCodecTypes videoType) {
+  WebRtc_Word32 retVal = SetCodecType(videoType, rtpHeader);
+  if (retVal != 0) {
+    _criticalSectionReceiverVideo->Leave();
+    return retVal;
+  }
+  WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
+               __FUNCTION__, rtpHeader->header.timestamp);
+
+  // All receive functions release _criticalSectionReceiverVideo before
+  // returning.
+  switch (videoType) {
+    case kRtpNoVideo:
+      return ReceiveGenericCodec(rtpHeader, payloadData, payloadDataLength);
+    case kRtpVp8Video:
+      return ReceiveVp8Codec(rtpHeader, payloadData, payloadDataLength);
+    case kRtpFecVideo:
+      break;
+  }
+  _criticalSectionReceiverVideo->Leave();
+  return -1;
+}
+
+WebRtc_Word32 RTPReceiverVideo::ReceiveVp8Codec(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength) {
+  bool success;
+  ModuleRTPUtility::RTPPayload parsedPacket;
+  if (payloadDataLength == 0) {
+    success = true;
+    parsedPacket.info.VP8.dataLength = 0;
+  } else {
+    ModuleRTPUtility::RTPPayloadParser rtpPayloadParser(kRtpVp8Video,
+                                                        payloadData,
+                                                        payloadDataLength,
+                                                        _id);
+
+    success = rtpPayloadParser.Parse(parsedPacket);
+  }
+  // from here down we only work on local data
+  _criticalSectionReceiverVideo->Leave();
+
+  if (!success) {
+    return -1;
+  }
+  if (parsedPacket.info.VP8.dataLength == 0) {
+    // we have an "empty" VP8 packet, it's ok, could be one way video
+    // Inform the jitter buffer about this packet.
+    rtpHeader->frameType = kFrameEmpty;
+    if (CallbackOfReceivedPayloadData(NULL, 0, rtpHeader) != 0) {
+      return -1;
+    }
+    return 0;
+  }
+  rtpHeader->frameType = (parsedPacket.frameType == ModuleRTPUtility::kIFrame) ?
+      kVideoFrameKey : kVideoFrameDelta;
+
+  RTPVideoHeaderVP8 *toHeader = &rtpHeader->type.Video.codecHeader.VP8;
+  ModuleRTPUtility::RTPPayloadVP8 *fromHeader = &parsedPacket.info.VP8;
+
+  rtpHeader->type.Video.isFirstPacket = fromHeader->beginningOfPartition
+      && (fromHeader->partitionID == 0);
+  toHeader->pictureId = fromHeader->hasPictureID ? fromHeader->pictureID :
+      kNoPictureId;
+  toHeader->tl0PicIdx = fromHeader->hasTl0PicIdx ? fromHeader->tl0PicIdx :
+      kNoTl0PicIdx;
+  if (fromHeader->hasTID) {
+    toHeader->temporalIdx = fromHeader->tID;
+    toHeader->layerSync = fromHeader->layerSync;
+  } else {
+    toHeader->temporalIdx = kNoTemporalIdx;
+    toHeader->layerSync = false;
+  }
+  toHeader->keyIdx = fromHeader->hasKeyIdx ? fromHeader->keyIdx : kNoKeyIdx;
+
+  toHeader->frameWidth = fromHeader->frameWidth;
+  toHeader->frameHeight = fromHeader->frameHeight;
+
+  toHeader->partitionId = fromHeader->partitionID;
+  toHeader->beginningOfPartition = fromHeader->beginningOfPartition;
+
+  if(CallbackOfReceivedPayloadData(parsedPacket.info.VP8.data,
+                                   parsedPacket.info.VP8.dataLength,
+                                   rtpHeader) != 0) {
+    return -1;
+  }
+  return 0;
+}
+
+
+WebRtc_Word32 RTPReceiverVideo::ReceiveGenericCodec(
+    WebRtcRTPHeader* rtpHeader,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord16 payloadDataLength) {
+  rtpHeader->frameType = kVideoFrameKey;
+
+  if(((SequenceNumber() + 1) == rtpHeader->header.sequenceNumber) &&
+      (TimeStamp() != rtpHeader->header.timestamp)) {
+    rtpHeader->type.Video.isFirstPacket = true;
+  }
+  _criticalSectionReceiverVideo->Leave();
+
+  if(CallbackOfReceivedPayloadData(payloadData, payloadDataLength,
+                                   rtpHeader) != 0) {
+    return -1;
+  }
+  return 0;
+}
+
+void RTPReceiverVideo::SetPacketOverHead(WebRtc_UWord16 packetOverHead) {
+  _packetOverHead = packetOverHead;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_receiver_video.h b/src/modules/rtp_rtcp/source/rtp_receiver_video.h
new file mode 100644
index 0000000..e50ef60
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_receiver_video.h
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "typedefs.h"
+
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/remote_bitrate_estimator/overuse_detector.h"
+#include "modules/remote_bitrate_estimator/remote_rate_control.h"
+#include "Bitrate.h"
+#include "scoped_ptr.h"
+
+namespace webrtc {
+class ReceiverFEC;
+class ModuleRtpRtcpImpl;
+class CriticalSectionWrapper;
+
+class RTPReceiverVideo {
+ public:
+  RTPReceiverVideo(const WebRtc_Word32 id,
+                   RemoteBitrateEstimator* remote_bitrate,
+                   ModuleRtpRtcpImpl* owner);
+
+  virtual ~RTPReceiverVideo();
+
+  ModuleRTPUtility::Payload* RegisterReceiveVideoPayload(
+      const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+      const WebRtc_Word8 payloadType,
+      const WebRtc_UWord32 maxRate);
+
+  WebRtc_Word32 ParseVideoCodecSpecific(
+      WebRtcRTPHeader* rtpHeader,
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadDataLength,
+      const RtpVideoCodecTypes videoType,
+      const bool isRED,
+      const WebRtc_UWord8* incomingRtpPacket,
+      const WebRtc_UWord16 incomingRtpPacketSize,
+      const WebRtc_Word64 nowMS);
+
+  virtual WebRtc_Word32 ReceiveRecoveredPacketCallback(
+      WebRtcRTPHeader* rtpHeader,
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadDataLength);
+
+  void SetPacketOverHead(WebRtc_UWord16 packetOverHead);
+
+ protected:
+  virtual WebRtc_Word32 CallbackOfReceivedPayloadData(
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadSize,
+      const WebRtcRTPHeader* rtpHeader) = 0;
+
+  virtual WebRtc_UWord32 TimeStamp() const = 0;
+  virtual WebRtc_UWord16 SequenceNumber() const = 0;
+
+  virtual WebRtc_UWord32 PayloadTypeToPayload(
+      const WebRtc_UWord8 payloadType,
+      ModuleRTPUtility::Payload*& payload) const = 0;
+
+  virtual bool RetransmitOfOldPacket(
+      const WebRtc_UWord16 sequenceNumber,
+      const WebRtc_UWord32 rtpTimeStamp) const  = 0;
+
+  virtual WebRtc_Word8 REDPayloadType() const = 0;
+
+  WebRtc_Word32 SetCodecType(const RtpVideoCodecTypes videoType,
+                             WebRtcRTPHeader* rtpHeader) const;
+
+  WebRtc_Word32 ParseVideoCodecSpecificSwitch(
+      WebRtcRTPHeader* rtpHeader,
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadDataLength,
+      const RtpVideoCodecTypes videoType);
+
+  WebRtc_Word32 ReceiveGenericCodec(WebRtcRTPHeader *rtpHeader,
+                                    const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord16 payloadDataLength);
+
+  WebRtc_Word32 ReceiveVp8Codec(WebRtcRTPHeader *rtpHeader,
+                                const WebRtc_UWord8* payloadData,
+                                const WebRtc_UWord16 payloadDataLength);
+
+  WebRtc_Word32 BuildRTPheader(const WebRtcRTPHeader* rtpHeader,
+                               WebRtc_UWord8* dataBuffer) const;
+
+ private:
+  WebRtc_Word32             _id;
+
+  CriticalSectionWrapper*   _criticalSectionReceiverVideo;
+
+  // FEC
+  bool                      _currentFecFrameDecoded;
+  ReceiverFEC*              _receiveFEC;
+
+  // BWE
+  RemoteBitrateEstimator* remote_bitrate_;
+  WebRtc_UWord16            _packetOverHead;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RECEIVER_VIDEO_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_rtcp.gypi b/src/modules/rtp_rtcp/source/rtp_rtcp.gypi
new file mode 100644
index 0000000..6fa9a5a
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_rtcp.gypi
@@ -0,0 +1,99 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'rtp_rtcp',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/modules/modules.gyp:remote_bitrate_estimator',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        # Common
+        '../interface/rtp_rtcp.h',
+        '../interface/rtp_rtcp_defines.h',
+        'bitrate.cc',
+        'Bitrate.h',
+        'rtp_rtcp_config.h',
+        'rtp_rtcp_impl.cc',
+        'rtp_rtcp_impl.h',
+        'rtcp_receiver.cc',
+        'rtcp_receiver.h',
+        'rtcp_receiver_help.cc',
+        'rtcp_receiver_help.h',
+        'rtcp_sender.cc',
+        'rtcp_sender.h',
+        'rtcp_utility.cc',
+        'rtcp_utility.h',
+        'rtp_header_extension.cc',
+        'rtp_header_extension.h',
+        'rtp_receiver.cc',
+        'rtp_receiver.h',
+        'rtp_sender.cc',
+        'rtp_sender.h',
+        'rtp_utility.cc',
+        'rtp_utility.h',
+        'ssrc_database.cc',
+        'ssrc_database.h',
+        'tmmbr_help.cc',
+        'tmmbr_help.h',
+        # Audio Files
+        'dtmf_queue.cc',
+        'dtmf_queue.h',
+        'rtp_receiver_audio.cc',
+        'rtp_receiver_audio.h',
+        'rtp_sender_audio.cc',
+        'rtp_sender_audio.h',
+        # Video Files
+        'fec_private_tables_random.h',
+        'fec_private_tables_bursty.h',
+        'forward_error_correction.cc',
+        'forward_error_correction.h',
+        'forward_error_correction_internal.cc',
+        'forward_error_correction_internal.h',
+        'producer_fec.cc',
+        'producer_fec.h',
+        'rtp_packet_history.cc',
+        'rtp_packet_history.h',
+        'rtp_receiver_video.cc',
+        'rtp_receiver_video.h',
+        'rtp_sender_video.cc',
+        'rtp_sender_video.h',
+        'receiver_fec.cc',
+        'receiver_fec.h',
+        'video_codec_information.h',
+        'rtp_format_vp8.cc',
+        'rtp_format_vp8.h',
+        'transmission_bucket.cc',
+        'transmission_bucket.h',
+        'vp8_partition_aggregator.cc',
+        'vp8_partition_aggregator.h',
+        # Mocks
+        '../mocks/mock_rtp_rtcp.h',
+      ], # source
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/rtp_rtcp/source/rtp_rtcp_config.h b/src/modules/rtp_rtcp/source/rtp_rtcp_config.h
new file mode 100644
index 0000000..066b3c2
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_rtcp_config.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
+
+// Configuration file for RTP utilities (RTPSender, RTPReceiver ...)
+namespace webrtc {
+enum { kRtpRtcpMaxIdleTimeProcess = 5,
+       kRtpRtcpBitrateProcessTimeMs = 10,
+       kRtpRtcpPacketTimeoutProcessTimeMs = 100 };
+
+enum { NACK_PACKETS_MAX_SIZE    = 256 }; // in packets
+enum { NACK_BYTECOUNT_SIZE      = 60};   // size of our NACK history
+
+enum { RTCP_INTERVAL_VIDEO_MS       = 1000 };
+enum { RTCP_INTERVAL_AUDIO_MS       = 5000 };
+enum { RTCP_SEND_BEFORE_KEY_FRAME_MS= 100 };
+enum { RTCP_MAX_REPORT_BLOCKS       = 31};      // RFC 3550 page 37
+enum { RTCP_MIN_FRAME_LENGTH_MS     = 17};
+enum { kRtcpAppCode_DATA_SIZE           = 32*4};    // multiple of 4, this is not a limitation of the size
+enum { RTCP_RPSI_DATA_SIZE          = 30};
+enum { RTCP_NUMBER_OF_SR            = 60 };
+
+enum { MAX_NUMBER_OF_TEMPORAL_ID    = 8 };          // RFC
+enum { MAX_NUMBER_OF_DEPENDENCY_QUALITY_ID  = 128 };// RFC
+enum { MAX_NUMBER_OF_REMB_FEEDBACK_SSRCS = 255 };
+
+enum { BW_HISTORY_SIZE          = 35};
+
+#define MIN_AUDIO_BW_MANAGEMENT_BITRATE   6
+#define MIN_VIDEO_BW_MANAGEMENT_BITRATE   30
+
+enum { DTMF_OUTBAND_MAX         = 20};
+
+enum { RTP_MAX_BURST_SLEEP_TIME = 500 };
+enum { RTP_AUDIO_LEVEL_UNIQUE_ID = 0xbede };
+enum { RTP_MAX_PACKETS_PER_FRAME= 512 }; // must be multiple of 32
+} // namespace webrtc
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_CONFIG_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc b/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
new file mode 100644
index 0000000..cf4af60
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_rtcp_impl.cc
@@ -0,0 +1,1979 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "rtp_rtcp_impl.h"
+#include "trace.h"
+
+#ifdef MATLAB
+#include "../test/BWEStandAlone/MatlabPlot.h"
+extern MatlabEngine eng; // global variable defined elsewhere
+#endif
+
+#include <string.h> //memcpy
+#include <cassert> //assert
+
+// local for this file
+namespace {
+
+const float FracMS = 4.294967296E6f;
+
+}  // namepace
+
+#ifdef _WIN32
+// disable warning C4355: 'this' : used in base member initializer list
+#pragma warning(disable : 4355)
+#endif
+
+namespace webrtc {
+
+const WebRtc_UWord16 kDefaultRtt = 200;
+
+RtpRtcp* RtpRtcp::CreateRtpRtcp(const RtpRtcp::Configuration& configuration) {
+  if (configuration.clock) {
+    return new ModuleRtpRtcpImpl(configuration);
+  } else {
+    RtpRtcp::Configuration configuration_copy;
+    memcpy(&configuration_copy, &configuration,
+           sizeof(RtpRtcp::Configuration));
+    configuration_copy.clock = ModuleRTPUtility::GetSystemClock();
+    ModuleRtpRtcpImpl* rtp_rtcp_instance =
+        new ModuleRtpRtcpImpl(configuration_copy);
+    rtp_rtcp_instance->OwnsClock();
+    return rtp_rtcp_instance;
+  }
+}
+
+ModuleRtpRtcpImpl::ModuleRtpRtcpImpl(const Configuration& configuration)
+    : _rtpSender(configuration.id, configuration.audio, configuration.clock),
+      _rtpReceiver(configuration.id, configuration.audio, configuration.clock,
+                   configuration.remote_bitrate_estimator, this),
+      _rtcpSender(configuration.id, configuration.audio, configuration.clock,
+                  this),
+      _rtcpReceiver(configuration.id, configuration.clock, this),
+      _owns_clock(false),
+      _clock(*configuration.clock),
+      _id(configuration.id),
+      _audio(configuration.audio),
+      _collisionDetected(false),
+      _lastProcessTime(configuration.clock->GetTimeInMS()),
+      _lastBitrateProcessTime(configuration.clock->GetTimeInMS()),
+      _lastPacketTimeoutProcessTime(configuration.clock->GetTimeInMS()),
+      _packetOverHead(28),  // IPV4 UDP
+      _criticalSectionModulePtrs(
+          CriticalSectionWrapper::CreateCriticalSection()),
+      _criticalSectionModulePtrsFeedback(
+          CriticalSectionWrapper::CreateCriticalSection()),
+      _defaultModule(
+          static_cast<ModuleRtpRtcpImpl*>(configuration.default_module)),
+      _deadOrAliveActive(false),
+      _deadOrAliveTimeoutMS(0),
+      _deadOrAliveLastTimer(0),
+      _nackMethod(kNackOff),
+      _nackLastTimeSent(0),
+      _nackLastSeqNumberSent(0),
+      _simulcast(false),
+      _keyFrameReqMethod(kKeyFrameReqFirRtp),
+      remote_bitrate_(configuration.remote_bitrate_estimator)
+#ifdef MATLAB
+       , _plot1(NULL)
+#endif
+{
+  _sendVideoCodec.codecType = kVideoCodecUnknown;
+
+  if (_defaultModule) {
+    _defaultModule->RegisterChildModule(this);
+  }
+  // TODO(pwestin) move to constructors of each rtp/rtcp sender/receiver object.
+  _rtpReceiver.RegisterIncomingDataCallback(configuration.incoming_data);
+  _rtpReceiver.RegisterIncomingRTPCallback(configuration.incoming_messages);
+  _rtcpReceiver.RegisterRtcpObservers(configuration.intra_frame_callback,
+                                      configuration.bandwidth_callback,
+                                      configuration.rtcp_feedback);
+  _rtpSender.RegisterAudioCallback(configuration.audio_messages);
+  _rtpReceiver.RegisterIncomingAudioCallback(configuration.audio_messages);
+
+  _rtpSender.RegisterSendTransport(configuration.outgoing_transport);
+  _rtcpSender.RegisterSendTransport(configuration.outgoing_transport);
+
+  // make sure that RTCP objects are aware of our SSRC
+  WebRtc_UWord32 SSRC = _rtpSender.SSRC();
+  _rtcpSender.SetSSRC(SSRC);
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s created", __FUNCTION__);
+}
+
+ModuleRtpRtcpImpl::~ModuleRtpRtcpImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+
+  // All child modules MUST be deleted before deleting the default.
+  assert(_childModules.empty());
+
+  // Deregister for the child modules
+  // will go in to the default and remove it self
+  if (_defaultModule) {
+    _defaultModule->DeRegisterChildModule(this);
+  }
+#ifdef MATLAB
+  if (_plot1) {
+    eng.DeletePlot(_plot1);
+    _plot1 = NULL;
+  }
+#endif
+  if (_owns_clock) {
+    delete &_clock;
+  }
+}
+
+void ModuleRtpRtcpImpl::RegisterChildModule(RtpRtcp* module) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterChildModule(module:0x%x)",
+               module);
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+
+  CriticalSectionScoped doubleLock(_criticalSectionModulePtrsFeedback.get());
+  // we use two locks for protecting _childModules one
+  // (_criticalSectionModulePtrsFeedback) for incoming
+  // messages (BitrateSent) and _criticalSectionModulePtrs
+  //  for all outgoing messages sending packets etc
+  _childModules.push_back((ModuleRtpRtcpImpl*)module);
+}
+
+void ModuleRtpRtcpImpl::DeRegisterChildModule(RtpRtcp* removeModule) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterChildModule(module:0x%x)", removeModule);
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+
+  CriticalSectionScoped doubleLock(_criticalSectionModulePtrsFeedback.get());
+
+  std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+  while (it != _childModules.end()) {
+    RtpRtcp* module = *it;
+    if (module == removeModule) {
+      _childModules.erase(it);
+      return;
+    }
+    it++;
+  }
+}
+
+// returns the number of milliseconds until the module want a worker thread
+// to call Process
+WebRtc_Word32 ModuleRtpRtcpImpl::TimeUntilNextProcess() {
+  const WebRtc_Word64 now = _clock.GetTimeInMS();
+  return kRtpRtcpMaxIdleTimeProcess - (now - _lastProcessTime);
+}
+
+// Process any pending tasks such as timeouts
+// non time critical events
+WebRtc_Word32 ModuleRtpRtcpImpl::Process() {
+  const WebRtc_Word64 now = _clock.GetTimeInMS();
+  _lastProcessTime = now;
+
+  _rtpSender.ProcessSendToNetwork();
+
+  if (now >= _lastPacketTimeoutProcessTime +
+      kRtpRtcpPacketTimeoutProcessTimeMs) {
+    _rtpReceiver.PacketTimeout();
+    _rtcpReceiver.PacketTimeout();
+    _lastPacketTimeoutProcessTime = now;
+  }
+
+  if (now >= _lastBitrateProcessTime + kRtpRtcpBitrateProcessTimeMs) {
+    _rtpSender.ProcessBitrate();
+    _rtpReceiver.ProcessBitrate();
+    _lastBitrateProcessTime = now;
+  }
+
+  ProcessDeadOrAliveTimer();
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (!defaultInstance && _rtcpSender.TimeToSendRTCPReport()) {
+    WebRtc_UWord16 max_rtt = 0;
+    if (_rtcpSender.Sending()) {
+      std::vector<RTCPReportBlock> receive_blocks;
+      _rtcpReceiver.StatisticsReceived(&receive_blocks);
+      for (std::vector<RTCPReportBlock>::iterator it = receive_blocks.begin();
+           it != receive_blocks.end(); ++it) {
+        WebRtc_UWord16 rtt = 0;
+        _rtcpReceiver.RTT(it->remoteSSRC, &max_rtt, NULL, NULL, NULL);
+        max_rtt = (rtt > max_rtt) ? rtt : max_rtt;
+      }
+    } else {
+      // We're only receiving, i.e. this module doesn't have its own RTT
+      // estimate. Use the RTT set by a sending channel using the same default
+      // module.
+      max_rtt = _rtcpReceiver.RTT();
+    }
+    if (max_rtt == 0) {
+      // No valid estimate available, i.e. no sending channel using the same
+      // default module or no RTCP received yet.
+      max_rtt = kDefaultRtt;
+    }
+    if (remote_bitrate_) {
+      remote_bitrate_->SetRtt(max_rtt);
+      remote_bitrate_->UpdateEstimate(_rtpReceiver.SSRC(), now);
+      if (TMMBR()) {
+        unsigned int target_bitrate = 0;
+        if (remote_bitrate_->LatestEstimate(_rtpReceiver.SSRC(),
+                                            &target_bitrate)) {
+          _rtcpSender.SetTargetBitrate(target_bitrate);
+        }
+      }
+    }
+    _rtcpSender.SendRTCP(kRtcpReport);
+  }
+
+  if (UpdateRTCPReceiveInformationTimers()) {
+    // a receiver has timed out
+    _rtcpReceiver.UpdateTMMBR();
+  }
+  return 0;
+}
+
+/**
+*   Receiver
+*/
+
+void ModuleRtpRtcpImpl::ProcessDeadOrAliveTimer() {
+  if (_deadOrAliveActive) {
+    const WebRtc_Word64 now = _clock.GetTimeInMS();
+    if (now > _deadOrAliveTimeoutMS + _deadOrAliveLastTimer) {
+      // RTCP is alive if we have received a report the last 12 seconds
+      _deadOrAliveLastTimer += _deadOrAliveTimeoutMS;
+
+      bool RTCPalive = false;
+      if (_rtcpReceiver.LastReceived() + 12000 > now) {
+        RTCPalive = true;
+      }
+      _rtpReceiver.ProcessDeadOrAlive(RTCPalive, now);
+    }
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetPeriodicDeadOrAliveStatus(
+  const bool enable,
+  const WebRtc_UWord8 sampleTimeSeconds) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetPeriodicDeadOrAliveStatus(enable, %d)",
+                 sampleTimeSeconds);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetPeriodicDeadOrAliveStatus(disable)");
+  }
+  if (sampleTimeSeconds == 0) {
+    return -1;
+  }
+  _deadOrAliveActive = enable;
+  _deadOrAliveTimeoutMS = sampleTimeSeconds * 1000;
+  // trigger the first after one period
+  _deadOrAliveLastTimer = _clock.GetTimeInMS();
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::PeriodicDeadOrAliveStatus(
+    bool& enable,
+    WebRtc_UWord8& sampleTimeSeconds) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "PeriodicDeadOrAliveStatus()");
+
+  enable = _deadOrAliveActive;
+  sampleTimeSeconds = (WebRtc_UWord8)(_deadOrAliveTimeoutMS / 1000);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetPacketTimeout(
+    const WebRtc_UWord32 RTPtimeoutMS,
+    const WebRtc_UWord32 RTCPtimeoutMS) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetPacketTimeout(%u,%u)",
+               RTPtimeoutMS,
+               RTCPtimeoutMS);
+
+  if (_rtpReceiver.SetPacketTimeout(RTPtimeoutMS) == 0) {
+    return _rtcpReceiver.SetPacketTimeout(RTCPtimeoutMS);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceivePayload(
+  const CodecInst& voiceCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterReceivePayload(voiceCodec)");
+
+  return _rtpReceiver.RegisterReceivePayload(
+           voiceCodec.plname,
+           voiceCodec.pltype,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceivePayload(
+  const VideoCodec& videoCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterReceivePayload(videoCodec)");
+
+  return _rtpReceiver.RegisterReceivePayload(videoCodec.plName,
+                                             videoCodec.plType,
+                                             90000,
+                                             0,
+                                             videoCodec.maxBitrate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReceivePayloadType(
+  const CodecInst& voiceCodec,
+  WebRtc_Word8* plType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "ReceivePayloadType(voiceCodec)");
+
+  return _rtpReceiver.ReceivePayloadType(
+           voiceCodec.plname,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate,
+           plType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReceivePayloadType(
+  const VideoCodec& videoCodec,
+  WebRtc_Word8* plType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "ReceivePayloadType(videoCodec)");
+
+  return _rtpReceiver.ReceivePayloadType(videoCodec.plName,
+                                         90000,
+                                         0,
+                                         videoCodec.maxBitrate,
+                                         plType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterReceivePayload(
+    const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterReceivePayload(%d)",
+               payloadType);
+
+  return _rtpReceiver.DeRegisterReceivePayload(payloadType);
+}
+
+// get the currently configured SSRC filter
+WebRtc_Word32 ModuleRtpRtcpImpl::SSRCFilter(WebRtc_UWord32& allowedSSRC) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SSRCFilter()");
+
+  return _rtpReceiver.SSRCFilter(allowedSSRC);
+}
+
+// set a SSRC to be used as a filter for incoming RTP streams
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSSRCFilter(
+  const bool enable,
+  const WebRtc_UWord32 allowedSSRC) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetSSRCFilter(enable, 0x%x)",
+                 allowedSSRC);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetSSRCFilter(disable)");
+  }
+
+  return _rtpReceiver.SetSSRCFilter(enable, allowedSSRC);
+}
+
+// Get last received remote timestamp
+WebRtc_UWord32 ModuleRtpRtcpImpl::RemoteTimestamp() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteTimestamp()");
+
+  return _rtpReceiver.TimeStamp();
+}
+
+// Get the current estimated remote timestamp
+WebRtc_Word32 ModuleRtpRtcpImpl::EstimatedRemoteTimeStamp(
+    WebRtc_UWord32& timestamp) const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "EstimatedRemoteTimeStamp()");
+
+  return _rtpReceiver.EstimatedRemoteTimeStamp(timestamp);
+}
+
+// Get incoming SSRC
+WebRtc_UWord32 ModuleRtpRtcpImpl::RemoteSSRC() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteSSRC()");
+
+  return _rtpReceiver.SSRC();
+}
+
+// Get remote CSRC
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteCSRCs(
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteCSRCs()");
+
+  return _rtpReceiver.CSRCs(arrOfCSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTXSendStatus(
+  const bool enable,
+  const bool setSSRC,
+  const WebRtc_UWord32 SSRC) {
+  _rtpSender.SetRTXStatus(enable, setSSRC, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RTXSendStatus(bool* enable,
+                                               WebRtc_UWord32* SSRC) const {
+  _rtpSender.RTXStatus(enable, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTXReceiveStatus(
+  const bool enable,
+  const WebRtc_UWord32 SSRC) {
+  _rtpReceiver.SetRTXStatus(enable, SSRC);
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RTXReceiveStatus(bool* enable,
+                                                  WebRtc_UWord32* SSRC) const {
+  _rtpReceiver.RTXStatus(enable, SSRC);
+  return 0;
+}
+
+// called by the network module when we receive a packet
+WebRtc_Word32 ModuleRtpRtcpImpl::IncomingPacket(
+    const WebRtc_UWord8* incomingPacket,
+    const WebRtc_UWord16 incomingPacketLength) {
+  WEBRTC_TRACE(kTraceStream,
+               kTraceRtpRtcp,
+               _id,
+               "IncomingPacket(packetLength:%u)",
+               incomingPacketLength);
+  // minimum RTP is 12 bytes
+  // minimum RTCP is 8 bytes (RTCP BYE)
+  if (incomingPacketLength < 8 || incomingPacket == NULL) {
+    WEBRTC_TRACE(kTraceDebug,
+                 kTraceRtpRtcp,
+                 _id,
+                 "IncomingPacket invalid buffer or length");
+    return -1;
+  }
+  // check RTP version
+  const WebRtc_UWord8  version  = incomingPacket[0] >> 6 ;
+  if (version != 2) {
+    WEBRTC_TRACE(kTraceDebug,
+                 kTraceRtpRtcp,
+                 _id,
+                 "IncomingPacket invalid RTP version");
+    return -1;
+  }
+
+  ModuleRTPUtility::RTPHeaderParser rtpParser(incomingPacket,
+                                              incomingPacketLength);
+
+  if (rtpParser.RTCP()) {
+    // Allow receive of non-compound RTCP packets.
+    RTCPUtility::RTCPParserV2 rtcpParser(incomingPacket,
+                                         incomingPacketLength,
+                                         true);
+
+    const bool validRTCPHeader = rtcpParser.IsValid();
+    if (!validRTCPHeader) {
+      WEBRTC_TRACE(kTraceDebug,
+                   kTraceRtpRtcp,
+                   _id,
+                   "IncomingPacket invalid RTCP packet");
+      return -1;
+    }
+    RTCPHelp::RTCPPacketInformation rtcpPacketInformation;
+    WebRtc_Word32 retVal = _rtcpReceiver.IncomingRTCPPacket(
+                             rtcpPacketInformation,
+                             &rtcpParser);
+    if (retVal == 0) {
+      _rtcpReceiver.TriggerCallbacksFromRTCPPacket(rtcpPacketInformation);
+    }
+    return retVal;
+
+  } else {
+    WebRtcRTPHeader rtpHeader;
+    memset(&rtpHeader, 0, sizeof(rtpHeader));
+
+    RtpHeaderExtensionMap map;
+    _rtpReceiver.GetHeaderExtensionMapCopy(&map);
+
+    const bool validRTPHeader = rtpParser.Parse(rtpHeader, &map);
+    if (!validRTPHeader) {
+      WEBRTC_TRACE(kTraceDebug,
+                   kTraceRtpRtcp,
+                   _id,
+                   "IncomingPacket invalid RTP header");
+      return -1;
+    }
+    return _rtpReceiver.IncomingRTPPacket(&rtpHeader,
+                                          incomingPacket,
+                                          incomingPacketLength);
+  }
+}
+
+/**
+*   Sender
+*/
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendPayload(
+  const CodecInst& voiceCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSendPayload(plName:%s plType:%d frequency:%u)",
+               voiceCodec.plname,
+               voiceCodec.pltype,
+               voiceCodec.plfreq);
+
+  return _rtpSender.RegisterPayload(
+           voiceCodec.plname,
+           voiceCodec.pltype,
+           voiceCodec.plfreq,
+           voiceCodec.channels,
+           (voiceCodec.rate < 0) ? 0 : voiceCodec.rate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendPayload(
+  const VideoCodec& videoCodec) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RegisterSendPayload(plName:%s plType:%d)",
+               videoCodec.plName,
+               videoCodec.plType);
+
+  _sendVideoCodec = videoCodec;
+  _simulcast = (videoCodec.numberOfSimulcastStreams > 1) ? true : false;
+  return _rtpSender.RegisterPayload(videoCodec.plName,
+                                    videoCodec.plType,
+                                    90000,
+                                    0,
+                                    videoCodec.maxBitrate);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeRegisterSendPayload(
+    const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "DeRegisterSendPayload(%d)", payloadType);
+
+  return _rtpSender.DeRegisterSendPayload(payloadType);
+}
+
+WebRtc_Word8 ModuleRtpRtcpImpl::SendPayloadType() const {
+  return _rtpSender.SendPayloadType();
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::StartTimestamp() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "StartTimestamp()");
+
+  return _rtpSender.StartTimestamp();
+}
+
+// configure start timestamp, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetStartTimestamp(
+    const WebRtc_UWord32 timestamp) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetStartTimestamp(%d)",
+               timestamp);
+
+  return _rtpSender.SetStartTimestamp(timestamp, true);
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::SequenceNumber() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SequenceNumber()");
+
+  return _rtpSender.SequenceNumber();
+}
+
+// Set SequenceNumber, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSequenceNumber(
+    const WebRtc_UWord16 seqNum) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSequenceNumber(%d)",
+               seqNum);
+
+  return _rtpSender.SetSequenceNumber(seqNum);
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::SSRC() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SSRC()");
+
+  return _rtpSender.SSRC();
+}
+
+// configure SSRC, default is a random number
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSSRC(const WebRtc_UWord32 ssrc) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetSSRC(%d)", ssrc);
+
+  if (_rtpSender.SetSSRC(ssrc) == 0) {
+    _rtcpReceiver.SetSSRC(ssrc);
+    _rtcpSender.SetSSRC(ssrc);
+    return 0;
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCSRCStatus(const bool include) {
+  _rtcpSender.SetCSRCStatus(include);
+  return _rtpSender.SetCSRCStatus(include);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::CSRCs(
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "CSRCs()");
+
+  return _rtpSender.CSRCs(arrOfCSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCSRCs(
+    const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+    const WebRtc_UWord8 arrLength) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetCSRCs(arrLength:%d)",
+               arrLength);
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetCSRCs(arrOfCSRC, arrLength);
+      }
+      it++;
+    }
+    return 0;
+
+  } else {
+    for (int i = 0; i < arrLength; i++) {
+      WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "\tidx:%d CSRC:%u", i,
+                   arrOfCSRC[i]);
+    }
+    _rtcpSender.SetCSRCs(arrOfCSRC, arrLength);
+    return _rtpSender.SetCSRCs(arrOfCSRC, arrLength);
+  }
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::PacketCountSent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "PacketCountSent()");
+
+  return _rtpSender.Packets();
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::ByteCountSent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ByteCountSent()");
+
+  return _rtpSender.Bytes();
+}
+
+int ModuleRtpRtcpImpl::CurrentSendFrequencyHz() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "CurrentSendFrequencyHz()");
+
+  return _rtpSender.SendPayloadFrequency();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendingStatus(const bool sending) {
+  if (sending) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingStatus(sending)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingStatus(stopped)");
+  }
+  if (_rtcpSender.Sending() != sending) {
+    // sends RTCP BYE when going from true to false
+    if (_rtcpSender.SetSendingStatus(sending) != 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                   "Failed to send RTCP BYE");
+    }
+
+    _collisionDetected = false;
+
+    // generate a new timeStamp if true and not configured via API
+    // generate a new SSRC for the next "call" if false
+    _rtpSender.SetSendingStatus(sending);
+
+    // make sure that RTCP objects are aware of our SSRC (it could have changed
+    // due to collision)
+    WebRtc_UWord32 SSRC = _rtpSender.SSRC();
+    _rtcpReceiver.SetSSRC(SSRC);
+    _rtcpSender.SetSSRC(SSRC);
+    return 0;
+  }
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::Sending() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "Sending()");
+
+  return _rtcpSender.Sending();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendingMediaStatus(const bool sending) {
+  if (sending) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingMediaStatus(sending)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetSendingMediaStatus(stopped)");
+  }
+  _rtpSender.SetSendingMediaStatus(sending);
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::SendingMedia() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "Sending()");
+
+  const bool haveChildModules(_childModules.empty() ? false : true);
+  if (!haveChildModules) {
+    return _rtpSender.SendingMedia();
+  }
+
+  CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+  std::list<ModuleRtpRtcpImpl*>::const_iterator it = _childModules.begin();
+  while (it != _childModules.end()) {
+    RTPSender& rtpSender = (*it)->_rtpSender;
+    if (rtpSender.SendingMedia()) {
+      return true;
+    }
+    it++;
+  }
+  return false;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendOutgoingData(
+    FrameType frameType,
+    WebRtc_Word8 payloadType,
+    WebRtc_UWord32 timeStamp,
+    int64_t capture_time_ms,
+    const WebRtc_UWord8* payloadData,
+    WebRtc_UWord32 payloadSize,
+    const RTPFragmentationHeader* fragmentation,
+    const RTPVideoHeader* rtpVideoHdr) {
+  WEBRTC_TRACE(
+    kTraceStream,
+    kTraceRtpRtcp,
+    _id,
+    "SendOutgoingData(frameType:%d payloadType:%d timeStamp:%u size:%u)",
+    frameType, payloadType, timeStamp, payloadSize);
+
+  const bool haveChildModules(_childModules.empty() ? false : true);
+  if (!haveChildModules) {
+    // Don't sent RTCP from default module
+    if (_rtcpSender.TimeToSendRTCPReport(kVideoFrameKey == frameType)) {
+      _rtcpSender.SendRTCP(kRtcpReport);
+    }
+    return _rtpSender.SendOutgoingData(frameType,
+                                       payloadType,
+                                       timeStamp,
+                                       capture_time_ms,
+                                       payloadData,
+                                       payloadSize,
+                                       fragmentation,
+                                       NULL,
+                                       &(rtpVideoHdr->codecHeader));
+  }
+  WebRtc_Word32 retVal = -1;
+  if (_simulcast) {
+    if (rtpVideoHdr == NULL) {
+      return -1;
+    }
+    int idx = 0;
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    for (; idx < rtpVideoHdr->simulcastIdx; ++it) {
+      if (it == _childModules.end()) {
+        return -1;
+      }
+      if ((*it)->SendingMedia()) {
+        ++idx;
+      }
+    }
+    for (; it != _childModules.end(); ++it) {
+      if ((*it)->SendingMedia()) {
+        break;
+      }
+      ++idx;
+    }
+    if (it == _childModules.end()) {
+      return -1;
+    }
+    RTPSender& rtpSender = (*it)->_rtpSender;
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SendOutgoingData(SimulcastIdx:%u size:%u, ssrc:0x%x)",
+                 idx, payloadSize, rtpSender.SSRC());
+    return rtpSender.SendOutgoingData(frameType,
+                                      payloadType,
+                                      timeStamp,
+                                      capture_time_ms,
+                                      payloadData,
+                                      payloadSize,
+                                      fragmentation,
+                                      NULL,
+                                      &(rtpVideoHdr->codecHeader));
+  } else {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    // TODO(pwestin) remove codecInfo from SendOutgoingData
+    VideoCodecInformation* codecInfo = NULL;
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    if (it != _childModules.end()) {
+      RTPSender& rtpSender = (*it)->_rtpSender;
+      retVal = rtpSender.SendOutgoingData(frameType,
+                                          payloadType,
+                                          timeStamp,
+                                          capture_time_ms,
+                                          payloadData,
+                                          payloadSize,
+                                          fragmentation,
+                                          NULL,
+                                          &(rtpVideoHdr->codecHeader));
+
+      it++;
+    }
+
+    // send to all remaining "child" modules
+    while (it != _childModules.end()) {
+      RTPSender& rtpSender = (*it)->_rtpSender;
+      retVal = rtpSender.SendOutgoingData(frameType,
+                                          payloadType,
+                                          timeStamp,
+                                          capture_time_ms,
+                                          payloadData,
+                                          payloadSize,
+                                          fragmentation,
+                                          codecInfo,
+                                          &(rtpVideoHdr->codecHeader));
+
+      it++;
+    }
+  }
+  return retVal;
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::MaxPayloadLength() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "MaxPayloadLength()");
+
+  return _rtpSender.MaxPayloadLength();
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::MaxDataPayloadLength() const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "MaxDataPayloadLength()");
+
+  WebRtc_UWord16 minDataPayloadLength = IP_PACKET_SIZE - 28; // Assuming IP/UDP
+
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        WebRtc_UWord16 dataPayloadLength =
+          module->MaxDataPayloadLength();
+        if (dataPayloadLength < minDataPayloadLength) {
+          minDataPayloadLength = dataPayloadLength;
+        }
+      }
+      it++;
+    }
+  }
+
+  WebRtc_UWord16 dataPayloadLength = _rtpSender.MaxDataPayloadLength();
+  if (dataPayloadLength < minDataPayloadLength) {
+    minDataPayloadLength = dataPayloadLength;
+  }
+  return minDataPayloadLength;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTransportOverhead(
+  const bool TCP,
+  const bool IPV6,
+  const WebRtc_UWord8 authenticationOverhead) {
+  WEBRTC_TRACE(
+    kTraceModuleCall,
+    kTraceRtpRtcp,
+    _id,
+    "SetTransportOverhead(TCP:%d, IPV6:%d authenticationOverhead:%u)",
+    TCP, IPV6, authenticationOverhead);
+
+  WebRtc_UWord16 packetOverHead = 0;
+  if (IPV6) {
+    packetOverHead = 40;
+  } else {
+    packetOverHead = 20;
+  }
+  if (TCP) {
+    // TCP
+    packetOverHead += 20;
+  } else {
+    // UDP
+    packetOverHead += 8;
+  }
+  packetOverHead += authenticationOverhead;
+
+  if (packetOverHead == _packetOverHead) {
+    // ok same as before
+    return 0;
+  }
+  // calc diff
+  WebRtc_Word16 packetOverHeadDiff = packetOverHead - _packetOverHead;
+
+  // store new
+  _packetOverHead = packetOverHead;
+
+  _rtpReceiver.SetPacketOverHead(_packetOverHead);
+  WebRtc_UWord16 length = _rtpSender.MaxPayloadLength() - packetOverHeadDiff;
+  return _rtpSender.SetMaxPayloadLength(length, _packetOverHead);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetMaxTransferUnit(const WebRtc_UWord16 MTU) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetMaxTransferUnit(%u)",
+               MTU);
+
+  if (MTU > IP_PACKET_SIZE) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "Invalid in argument to SetMaxTransferUnit(%u)", MTU);
+    return -1;
+  }
+  return _rtpSender.SetMaxPayloadLength(MTU - _packetOverHead,
+                                        _packetOverHead);
+}
+
+/*
+*   RTCP
+*/
+RTCPMethod ModuleRtpRtcpImpl::RTCP() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTCP()");
+
+  if (_rtcpSender.Status() != kRtcpOff) {
+    return _rtcpReceiver.Status();
+  }
+  return kRtcpOff;
+}
+
+// configure RTCP status i.e on/off
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPStatus(const RTCPMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetRTCPStatus(%d)",
+               method);
+
+  if (_rtcpSender.SetRTCPStatus(method) == 0) {
+    return _rtcpReceiver.SetRTCPStatus(method);
+  }
+  return -1;
+}
+
+// only for internal test
+WebRtc_UWord32 ModuleRtpRtcpImpl::LastSendReport(WebRtc_UWord32& lastRTCPTime) {
+  return _rtcpSender.LastSendReport(lastRTCPTime);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCNAME(const char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetCNAME(%s)", cName);
+  return _rtcpSender.SetCNAME(cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::CNAME(char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "CNAME()");
+  return _rtcpSender.CNAME(cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::AddMixedCNAME(
+  const WebRtc_UWord32 SSRC,
+  const char cName[RTCP_CNAME_SIZE]) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "AddMixedCNAME(SSRC:%u)", SSRC);
+
+  return _rtcpSender.AddMixedCNAME(SSRC, cName);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoveMixedCNAME(const WebRtc_UWord32 SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "RemoveMixedCNAME(SSRC:%u)", SSRC);
+  return _rtcpSender.RemoveMixedCNAME(SSRC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteCNAME(
+  const WebRtc_UWord32 remoteSSRC,
+  char cName[RTCP_CNAME_SIZE]) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "RemoteCNAME(SSRC:%u)", remoteSSRC);
+
+  return _rtcpReceiver.CNAME(remoteSSRC, cName);
+}
+
+WebRtc_UWord16 ModuleRtpRtcpImpl::RemoteSequenceNumber() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteSequenceNumber()");
+
+  return _rtpReceiver.SequenceNumber();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteNTP(
+    WebRtc_UWord32* receivedNTPsecs,
+    WebRtc_UWord32* receivedNTPfrac,
+    WebRtc_UWord32* RTCPArrivalTimeSecs,
+    WebRtc_UWord32* RTCPArrivalTimeFrac) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteNTP()");
+
+  return _rtcpReceiver.NTP(receivedNTPsecs,
+                           receivedNTPfrac,
+                           RTCPArrivalTimeSecs,
+                           RTCPArrivalTimeFrac);
+}
+
+// Get RoundTripTime
+WebRtc_Word32 ModuleRtpRtcpImpl::RTT(const WebRtc_UWord32 remoteSSRC,
+                                     WebRtc_UWord16* RTT,
+                                     WebRtc_UWord16* avgRTT,
+                                     WebRtc_UWord16* minRTT,
+                                     WebRtc_UWord16* maxRTT) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RTT()");
+
+  return _rtcpReceiver.RTT(remoteSSRC, RTT, avgRTT, minRTT, maxRTT);
+}
+
+// Reset RoundTripTime statistics
+WebRtc_Word32
+ModuleRtpRtcpImpl::ResetRTT(const WebRtc_UWord32 remoteSSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ResetRTT(SSRC:%u)",
+               remoteSSRC);
+
+  return _rtcpReceiver.ResetRTT(remoteSSRC);
+}
+
+// Reset RTP statistics
+WebRtc_Word32
+ModuleRtpRtcpImpl::ResetStatisticsRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ResetStatisticsRTP()");
+
+  return _rtpReceiver.ResetStatistics();
+}
+
+// Reset RTP data counters for the receiving side
+WebRtc_Word32 ModuleRtpRtcpImpl::ResetReceiveDataCountersRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "ResetReceiveDataCountersRTP()");
+
+  return _rtpReceiver.ResetDataCounters();
+}
+
+// Reset RTP data counters for the sending side
+WebRtc_Word32 ModuleRtpRtcpImpl::ResetSendDataCountersRTP() {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "ResetSendDataCountersRTP()");
+
+  return _rtpSender.ResetDataCounters();
+}
+
+// Force a send of an RTCP packet
+// normal SR and RR are triggered via the process function
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCP(WebRtc_UWord32 rtcpPacketType) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SendRTCP(0x%x)",
+               rtcpPacketType);
+
+  return  _rtcpSender.SendRTCP(rtcpPacketType);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPApplicationSpecificData(
+    const WebRtc_UWord8 subType,
+    const WebRtc_UWord32 name,
+    const WebRtc_UWord8* data,
+    const WebRtc_UWord16 length) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetRTCPApplicationSpecificData(subType:%d name:0x%x)", subType,
+               name);
+
+  return  _rtcpSender.SetApplicationSpecificData(subType, name, data, length);
+}
+
+/*
+*   (XR) VOIP metric
+*/
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTCPVoIPMetrics(
+    const RTCPVoIPMetric* VoIPMetric) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetRTCPVoIPMetrics()");
+
+  return  _rtcpSender.SetRTCPVoIPMetrics(VoIPMetric);
+}
+
+// our localy created statistics of the received RTP stream
+WebRtc_Word32 ModuleRtpRtcpImpl::StatisticsRTP(
+    WebRtc_UWord8*  fraction_lost,
+    WebRtc_UWord32* cum_lost,
+    WebRtc_UWord32* ext_max,
+    WebRtc_UWord32* jitter,
+    WebRtc_UWord32* max_jitter) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "StatisticsRTP()");
+
+  WebRtc_UWord32 jitter_transmission_time_offset = 0;
+
+  WebRtc_Word32 retVal = _rtpReceiver.Statistics(
+      fraction_lost,
+      cum_lost,
+      ext_max,
+      jitter,
+      max_jitter,
+      &jitter_transmission_time_offset,
+      (_rtcpSender.Status() == kRtcpOff));
+  if (retVal == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "StatisticsRTP() no statisitics availble");
+  }
+  return retVal;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DataCountersRTP(
+    WebRtc_UWord32* bytesSent,
+    WebRtc_UWord32* packetsSent,
+    WebRtc_UWord32* bytesReceived,
+    WebRtc_UWord32* packetsReceived) const {
+  WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "DataCountersRTP()");
+
+  if (bytesSent) {
+    *bytesSent = _rtpSender.Bytes();
+  }
+  if (packetsSent) {
+    *packetsSent = _rtpSender.Packets();
+  }
+  return _rtpReceiver.DataCounters(bytesReceived, packetsReceived);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::ReportBlockStatistics(
+    WebRtc_UWord8* fraction_lost,
+    WebRtc_UWord32* cum_lost,
+    WebRtc_UWord32* ext_max,
+    WebRtc_UWord32* jitter,
+    WebRtc_UWord32* jitter_transmission_time_offset) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "ReportBlockStatistics()");
+  WebRtc_Word32 missing = 0;
+  WebRtc_Word32 ret = _rtpReceiver.Statistics(fraction_lost,
+                                              cum_lost,
+                                              ext_max,
+                                              jitter,
+                                              NULL,
+                                              jitter_transmission_time_offset,
+                                              &missing,
+                                              true);
+
+#ifdef MATLAB
+  if (_plot1 == NULL) {
+    _plot1 = eng.NewPlot(new MatlabPlot());
+    _plot1->AddTimeLine(30, "b", "lost", _clock.GetTimeInMS());
+  }
+  _plot1->Append("lost", missing);
+  _plot1->Plot();
+#endif
+
+  return ret;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteRTCPStat(RTCPSenderInfo* senderInfo) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteRTCPStat()");
+
+  return _rtcpReceiver.SenderInfoReceived(senderInfo);
+}
+
+// received RTCP report
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoteRTCPStat(
+  std::vector<RTCPReportBlock>* receiveBlocks) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoteRTCPStat()");
+
+  return _rtcpReceiver.StatisticsReceived(receiveBlocks);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::AddRTCPReportBlock(
+    const WebRtc_UWord32 SSRC,
+    const RTCPReportBlock* reportBlock) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "AddRTCPReportBlock()");
+
+  return _rtcpSender.AddReportBlock(SSRC, reportBlock);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RemoveRTCPReportBlock(
+    const WebRtc_UWord32 SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "RemoveRTCPReportBlock()");
+
+  return _rtcpSender.RemoveReportBlock(SSRC);
+}
+
+/*
+ *  (REMB) Receiver Estimated Max Bitrate
+ */
+bool ModuleRtpRtcpImpl::REMB() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "REMB()");
+
+  return _rtcpSender.REMB();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetREMBStatus(const bool enable) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetREMBStatus(enable)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetREMBStatus(disable)");
+  }
+  return _rtcpSender.SetREMBStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetREMBData(const WebRtc_UWord32 bitrate,
+                                             const WebRtc_UWord8 numberOfSSRC,
+                                             const WebRtc_UWord32* SSRC) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetREMBData(bitrate:%d,?,?)", bitrate);
+  return _rtcpSender.SetREMBData(bitrate, numberOfSSRC, SSRC);
+}
+
+/*
+ *   (IJ) Extended jitter report.
+ */
+bool ModuleRtpRtcpImpl::IJ() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "IJ()");
+
+  return _rtcpSender.IJ();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetIJStatus(const bool enable) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetIJStatus(%s)", enable ? "true" : "false");
+
+  return _rtcpSender.SetIJStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterSendRtpHeaderExtension(
+  const RTPExtensionType type,
+  const WebRtc_UWord8 id) {
+  return _rtpSender.RegisterRtpHeaderExtension(type, id);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeregisterSendRtpHeaderExtension(
+  const RTPExtensionType type) {
+  return _rtpSender.DeregisterRtpHeaderExtension(type);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RegisterReceiveRtpHeaderExtension(
+  const RTPExtensionType type,
+  const WebRtc_UWord8 id) {
+  return _rtpReceiver.RegisterRtpHeaderExtension(type, id);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::DeregisterReceiveRtpHeaderExtension(
+  const RTPExtensionType type) {
+  return _rtpReceiver.DeregisterRtpHeaderExtension(type);
+}
+
+void ModuleRtpRtcpImpl::SetTransmissionSmoothingStatus(const bool enable) {
+  _rtpSender.SetTransmissionSmoothingStatus(enable);
+}
+
+bool ModuleRtpRtcpImpl::TransmissionSmoothingStatus() const {
+  return _rtpSender.TransmissionSmoothingStatus();
+}
+
+/*
+*   (TMMBR) Temporary Max Media Bit Rate
+*/
+bool ModuleRtpRtcpImpl::TMMBR() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TMMBR()");
+
+  return _rtcpSender.TMMBR();
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTMMBRStatus(const bool enable) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetTMMBRStatus(enable)");
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetTMMBRStatus(disable)");
+  }
+  return _rtcpSender.SetTMMBRStatus(enable);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTMMBN(const TMMBRSet* boundingSet) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SetTMMBN()");
+
+  WebRtc_UWord32 maxBitrateKbit = _rtpSender.MaxConfiguredBitrateVideo() / 1000;
+  return _rtcpSender.SetTMMBN(boundingSet, maxBitrateKbit);
+}
+
+/*
+*   (NACK) Negative acknowledgement
+*/
+
+// Is Negative acknowledgement requests on/off?
+NACKMethod ModuleRtpRtcpImpl::NACK() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "NACK()");
+
+  NACKMethod childMethod = kNackOff;
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to check all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        NACKMethod nackMethod = module->NACK();
+        if (nackMethod != kNackOff) {
+          childMethod = nackMethod;
+          break;
+        }
+      }
+      it++;
+    }
+  }
+
+  NACKMethod method = _nackMethod;
+  if (childMethod != kNackOff) {
+    method = childMethod;
+  }
+  return method;
+}
+
+// Turn negative acknowledgement requests on/off
+WebRtc_Word32 ModuleRtpRtcpImpl::SetNACKStatus(NACKMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetNACKStatus(%u)", method);
+
+  _nackMethod = method;
+  _rtpReceiver.SetNACKStatus(method);
+  return 0;
+}
+
+// Returns the currently configured retransmission mode.
+int ModuleRtpRtcpImpl::SelectiveRetransmissions() const {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SelectiveRetransmissions()");
+  return _rtpSender.SelectiveRetransmissions();
+}
+
+// Enable or disable a retransmission mode, which decides which packets will
+// be retransmitted if NACKed.
+int ModuleRtpRtcpImpl::SetSelectiveRetransmissions(uint8_t settings) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSelectiveRetransmissions(%u)",
+               settings);
+  return _rtpSender.SetSelectiveRetransmissions(settings);
+}
+
+// Send a Negative acknowledgement packet
+WebRtc_Word32 ModuleRtpRtcpImpl::SendNACK(const WebRtc_UWord16* nackList,
+                                          const WebRtc_UWord16 size) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendNACK(size:%u)", size);
+
+  if (size > NACK_PACKETS_MAX_SIZE) {
+    RequestKeyFrame();
+    return -1;
+  }
+  WebRtc_UWord16 avgRTT = 0;
+  _rtcpReceiver.RTT(_rtpReceiver.SSRC(), NULL, &avgRTT, NULL, NULL);
+
+  WebRtc_Word64 waitTime = 5 + ((avgRTT * 3) >> 1);  // 5 + RTT*1.5
+  if (waitTime == 5) {
+    waitTime = 100;  // During startup we don't have an RTT
+  }
+  const WebRtc_Word64 now = _clock.GetTimeInMS();
+  const WebRtc_Word64 timeLimit = now - waitTime;
+
+  if (_nackLastTimeSent < timeLimit) {
+    // send list
+  } else {
+    // only send if extended list
+    if (_nackLastSeqNumberSent == nackList[size - 1]) {
+      // last seq num is the same don't send list
+      return 0;
+    } else {
+      // send list
+    }
+  }
+  _nackLastTimeSent =  now;
+  _nackLastSeqNumberSent = nackList[size - 1];
+
+  switch (_nackMethod) {
+    case kNackRtcp:
+      return _rtcpSender.SendRTCP(kRtcpNack, size, nackList);
+    case kNackOff:
+      return -1;
+  };
+  return -1;
+}
+
+// Store the sent packets, needed to answer to a Negative acknowledgement
+// requests
+WebRtc_Word32 ModuleRtpRtcpImpl::SetStorePacketsStatus(
+  const bool enable,
+  const WebRtc_UWord16 numberToStore) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetStorePacketsStatus(enable, numberToStore:%d)",
+                 numberToStore);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+                 "SetStorePacketsStatus(disable)");
+  }
+  return _rtpSender.SetStorePacketsStatus(enable, numberToStore);
+}
+
+/*
+*   Audio
+*/
+
+// Outband TelephoneEvent detection
+WebRtc_Word32 ModuleRtpRtcpImpl::SetTelephoneEventStatus(
+  const bool enable,
+  const bool forwardToDecoder,
+  const bool detectEndOfTone) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetTelephoneEventStatus(enable:%d forwardToDecoder:%d"
+               " detectEndOfTone:%d)", enable, forwardToDecoder,
+               detectEndOfTone);
+
+  return _rtpReceiver.SetTelephoneEventStatus(enable, forwardToDecoder,
+                                              detectEndOfTone);
+}
+
+// Is outband TelephoneEvent turned on/off?
+bool ModuleRtpRtcpImpl::TelephoneEvent() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "TelephoneEvent()");
+
+  return _rtpReceiver.TelephoneEvent();
+}
+
+// Is forwarding of outband telephone events turned on/off?
+bool ModuleRtpRtcpImpl::TelephoneEventForwardToDecoder() const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "TelephoneEventForwardToDecoder()");
+
+  return _rtpReceiver.TelephoneEventForwardToDecoder();
+}
+
+// Send a TelephoneEvent tone using RFC 2833 (4733)
+WebRtc_Word32 ModuleRtpRtcpImpl::SendTelephoneEventOutband(
+    const WebRtc_UWord8 key,
+    const WebRtc_UWord16 timeMs,
+    const WebRtc_UWord8 level) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SendTelephoneEventOutband(key:%u, timeMs:%u, level:%u)", key,
+               timeMs, level);
+
+  return _rtpSender.SendTelephoneEvent(key, timeMs, level);
+}
+
+bool ModuleRtpRtcpImpl::SendTelephoneEventActive(
+  WebRtc_Word8& telephoneEvent) const {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendTelephoneEventActive()");
+
+  return _rtpSender.SendTelephoneEventActive(telephoneEvent);
+}
+
+// set audio packet size, used to determine when it's time to send a DTMF
+// packet in silence (CNG)
+WebRtc_Word32 ModuleRtpRtcpImpl::SetAudioPacketSize(
+  const WebRtc_UWord16 packetSizeSamples) {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetAudioPacketSize(%u)",
+               packetSizeSamples);
+
+  return _rtpSender.SetAudioPacketSize(packetSizeSamples);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetRTPAudioLevelIndicationStatus(
+  const bool enable,
+  const WebRtc_UWord8 ID) {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetRTPAudioLevelIndicationStatus(enable=%d, ID=%u)",
+               enable,
+               ID);
+
+  if (enable) {
+    _rtpReceiver.RegisterRtpHeaderExtension(kRtpExtensionAudioLevel, ID);
+  } else {
+    _rtpReceiver.DeregisterRtpHeaderExtension(kRtpExtensionAudioLevel);
+  }
+  return _rtpSender.SetAudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::GetRTPAudioLevelIndicationStatus(
+  bool& enable,
+  WebRtc_UWord8& ID) const {
+
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "GetRTPAudioLevelIndicationStatus()");
+  return _rtpSender.AudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetAudioLevel(const WebRtc_UWord8 level_dBov) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetAudioLevel(level_dBov:%u)",
+               level_dBov);
+  return _rtpSender.SetAudioLevel(level_dBov);
+}
+
+// Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32 ModuleRtpRtcpImpl::SetSendREDPayloadType(
+  const WebRtc_Word8 payloadType) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetSendREDPayloadType(%d)",
+               payloadType);
+
+  return _rtpSender.SetRED(payloadType);
+}
+
+// Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32 ModuleRtpRtcpImpl::SendREDPayloadType(
+    WebRtc_Word8& payloadType) const {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "SendREDPayloadType()");
+
+  return _rtpSender.RED(payloadType);
+}
+
+
+/*
+*   Video
+*/
+RtpVideoCodecTypes ModuleRtpRtcpImpl::ReceivedVideoCodec() const {
+  return _rtpReceiver.VideoCodecType();
+}
+
+RtpVideoCodecTypes ModuleRtpRtcpImpl::SendVideoCodec() const {
+  return _rtpSender.VideoCodecType();
+}
+
+void ModuleRtpRtcpImpl::SetTargetSendBitrate(const uint32_t bitrate) {
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id,
+               "SetTargetSendBitrate: %ubit", bitrate);
+
+  const bool haveChildModules(_childModules.empty() ? false : true);
+  if (haveChildModules) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    if (_simulcast) {
+      uint32_t bitrate_remainder = bitrate;
+      std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+      for (int i = 0; it != _childModules.end() &&
+          i < _sendVideoCodec.numberOfSimulcastStreams; ++it) {
+        if ((*it)->SendingMedia()) {
+          ++i;
+          RTPSender& rtpSender = (*it)->_rtpSender;
+          if (_sendVideoCodec.simulcastStream[i].maxBitrate * 1000 >
+              bitrate_remainder) {
+            rtpSender.SetTargetSendBitrate(bitrate_remainder);
+            bitrate_remainder = 0;
+          } else {
+            rtpSender.SetTargetSendBitrate(
+                _sendVideoCodec.simulcastStream[i].maxBitrate * 1000);
+            bitrate_remainder -=
+                _sendVideoCodec.simulcastStream[i].maxBitrate * 1000;
+          }
+        }
+      }
+    } else {
+      std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+      for (; it != _childModules.end(); ++it) {
+        RTPSender& rtpSender = (*it)->_rtpSender;
+        rtpSender.SetTargetSendBitrate(bitrate);
+      }
+    }
+  } else {
+    _rtpSender.SetTargetSendBitrate(bitrate);
+  }
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetKeyFrameRequestMethod(
+  const KeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetKeyFrameRequestMethod(method:%u)",
+               method);
+
+  _keyFrameReqMethod = method;
+  return 0;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::RequestKeyFrame() {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "RequestKeyFrame");
+
+  switch (_keyFrameReqMethod) {
+    case kKeyFrameReqFirRtp:
+      return _rtpSender.SendRTPIntraRequest();
+    case kKeyFrameReqPliRtcp:
+      return _rtcpSender.SendRTCP(kRtcpPli);
+    case kKeyFrameReqFirRtcp:
+      return _rtcpSender.SendRTCP(kRtcpFir);
+  }
+  return -1;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCPSliceLossIndication(
+  const WebRtc_UWord8 pictureID) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SendRTCPSliceLossIndication (pictureID:%d)",
+               pictureID);
+  return _rtcpSender.SendRTCP(kRtcpSli, 0, 0, false, pictureID);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetCameraDelay(const WebRtc_Word32 delayMS) {
+  WEBRTC_TRACE(kTraceModuleCall,
+               kTraceRtpRtcp,
+               _id,
+               "SetCameraDelay(%d)",
+               delayMS);
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetCameraDelay(delayMS);
+      }
+      it++;
+    }
+    return 0;
+  }
+  return _rtcpSender.SetCameraDelay(delayMS);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetGenericFECStatus(
+  const bool enable,
+  const WebRtc_UWord8 payloadTypeRED,
+  const WebRtc_UWord8 payloadTypeFEC) {
+  if (enable) {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetGenericFECStatus(enable, %u)",
+                 payloadTypeRED);
+  } else {
+    WEBRTC_TRACE(kTraceModuleCall,
+                 kTraceRtpRtcp,
+                 _id,
+                 "SetGenericFECStatus(disable)");
+  }
+  return _rtpSender.SetGenericFECStatus(enable,
+                                        payloadTypeRED,
+                                        payloadTypeFEC);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::GenericFECStatus(
+  bool& enable,
+  WebRtc_UWord8& payloadTypeRED,
+  WebRtc_UWord8& payloadTypeFEC) {
+
+  WEBRTC_TRACE(kTraceModuleCall, kTraceRtpRtcp, _id, "GenericFECStatus()");
+
+  bool childEnabled = false;
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance) {
+    // for default we need to check all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module)  {
+        bool enabled = false;
+        WebRtc_UWord8 dummyPTypeRED = 0;
+        WebRtc_UWord8 dummyPTypeFEC = 0;
+        if (module->GenericFECStatus(enabled,
+                                     dummyPTypeRED,
+                                     dummyPTypeFEC) == 0 && enabled) {
+          childEnabled = true;
+          break;
+        }
+      }
+      it++;
+    }
+  }
+  WebRtc_Word32 retVal = _rtpSender.GenericFECStatus(enable,
+                                                     payloadTypeRED,
+                                                     payloadTypeFEC);
+  if (childEnabled) {
+    // returns true if enabled for any child module
+    enable = childEnabled;
+  }
+  return retVal;
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SetFecParameters(
+    const FecProtectionParams* delta_params,
+    const FecProtectionParams* key_params) {
+  const bool defaultInstance(_childModules.empty() ? false : true);
+  if (defaultInstance)  {
+    // for default we need to update all child modules too
+    CriticalSectionScoped lock(_criticalSectionModulePtrs.get());
+
+    std::list<ModuleRtpRtcpImpl*>::iterator it = _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        module->SetFecParameters(delta_params, key_params);
+      }
+      it++;
+    }
+    return 0;
+  }
+  return _rtpSender.SetFecParameters(delta_params, key_params);
+}
+
+void ModuleRtpRtcpImpl::SetRemoteSSRC(const WebRtc_UWord32 SSRC) {
+  // inform about the incoming SSRC
+  _rtcpSender.SetRemoteSSRC(SSRC);
+  _rtcpReceiver.SetRemoteSSRC(SSRC);
+
+  // check for a SSRC collision
+  if (_rtpSender.SSRC() == SSRC && !_collisionDetected) {
+    // if we detect a collision change the SSRC but only once
+    _collisionDetected = true;
+    WebRtc_UWord32 newSSRC = _rtpSender.GenerateNewSSRC();
+    if (newSSRC == 0) {
+      // configured via API ignore
+      return;
+    }
+    if (kRtcpOff != _rtcpSender.Status()) {
+      // send RTCP bye on the current SSRC
+      _rtcpSender.SendRTCP(kRtcpBye);
+    }
+    // change local SSRC
+
+    // inform all objects about the new SSRC
+    _rtcpSender.SetSSRC(newSSRC);
+    _rtcpReceiver.SetSSRC(newSSRC);
+  }
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::BitrateReceivedNow() const {
+  return _rtpReceiver.BitrateNow();
+}
+
+void ModuleRtpRtcpImpl::BitrateSent(WebRtc_UWord32* totalRate,
+                                    WebRtc_UWord32* videoRate,
+                                    WebRtc_UWord32* fecRate,
+                                    WebRtc_UWord32* nackRate) const {
+  const bool defaultInstance(_childModules.empty() ? false : true);
+
+  if (defaultInstance) {
+    // for default we need to update the send bitrate
+    CriticalSectionScoped lock(_criticalSectionModulePtrsFeedback.get());
+
+    if (totalRate != NULL)
+      *totalRate = 0;
+    if (videoRate != NULL)
+      *videoRate = 0;
+    if (fecRate != NULL)
+      *fecRate = 0;
+    if (nackRate != NULL)
+      *nackRate = 0;
+
+    std::list<ModuleRtpRtcpImpl*>::const_iterator it =
+      _childModules.begin();
+    while (it != _childModules.end()) {
+      RtpRtcp* module = *it;
+      if (module) {
+        WebRtc_UWord32 childTotalRate = 0;
+        WebRtc_UWord32 childVideoRate = 0;
+        WebRtc_UWord32 childFecRate = 0;
+        WebRtc_UWord32 childNackRate = 0;
+        module->BitrateSent(&childTotalRate,
+                            &childVideoRate,
+                            &childFecRate,
+                            &childNackRate);
+        if (totalRate != NULL && childTotalRate > *totalRate)
+          *totalRate = childTotalRate;
+        if (videoRate != NULL && childVideoRate > *videoRate)
+          *videoRate = childVideoRate;
+        if (fecRate != NULL && childFecRate > *fecRate)
+          *fecRate = childFecRate;
+        if (nackRate != NULL && childNackRate > *nackRate)
+          *nackRate = childNackRate;
+      }
+      it++;
+    }
+    return;
+  }
+  if (totalRate != NULL)
+    *totalRate = _rtpSender.BitrateLast();
+  if (videoRate != NULL)
+    *videoRate = _rtpSender.VideoBitrateSent();
+  if (fecRate != NULL)
+    *fecRate = _rtpSender.FecOverheadRate();
+  if (nackRate != NULL)
+    *nackRate = _rtpSender.NackOverheadRate();
+}
+
+int ModuleRtpRtcpImpl::EstimatedReceiveBandwidth(
+    WebRtc_UWord32* available_bandwidth) const {
+  if (remote_bitrate_) {
+    if (!remote_bitrate_->LatestEstimate(_rtpReceiver.SSRC(),
+                                         available_bandwidth)) {
+      return -1;
+    }
+    return 0;
+  }
+  // No bandwidth receive-side bandwidth estimation is connected to this module.
+  return -1;
+}
+
+// bad state of RTP receiver request a keyframe
+void ModuleRtpRtcpImpl::OnRequestIntraFrame() {
+  RequestKeyFrame();
+}
+
+void ModuleRtpRtcpImpl::OnRequestSendReport() {
+  _rtcpSender.SendRTCP(kRtcpSr);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::SendRTCPReferencePictureSelection(
+  const WebRtc_UWord64 pictureID) {
+  return _rtcpSender.SendRTCP(kRtcpRpsi, 0, 0, false, pictureID);
+}
+
+WebRtc_UWord32 ModuleRtpRtcpImpl::SendTimeOfSendReport(
+  const WebRtc_UWord32 sendReport) {
+  return _rtcpSender.SendTimeOfSendReport(sendReport);
+}
+
+void ModuleRtpRtcpImpl::OnReceivedNACK(
+  const WebRtc_UWord16 nackSequenceNumbersLength,
+  const WebRtc_UWord16* nackSequenceNumbers) {
+  if (!_rtpSender.StorePackets() ||
+      nackSequenceNumbers == NULL ||
+      nackSequenceNumbersLength == 0) {
+    return;
+  }
+  WebRtc_UWord16 avgRTT = 0;
+  _rtcpReceiver.RTT(_rtpReceiver.SSRC(), NULL, &avgRTT, NULL, NULL);
+  _rtpSender.OnReceivedNACK(nackSequenceNumbersLength,
+                            nackSequenceNumbers,
+                            avgRTT);
+}
+
+WebRtc_Word32 ModuleRtpRtcpImpl::LastReceivedNTP(
+  WebRtc_UWord32& RTCPArrivalTimeSecs,  // when we received the last report
+  WebRtc_UWord32& RTCPArrivalTimeFrac,
+  WebRtc_UWord32& remoteSR) {
+  // remote SR: NTP inside the last received (mid 16 bits from sec and frac)
+  WebRtc_UWord32 NTPsecs = 0;
+  WebRtc_UWord32 NTPfrac = 0;
+
+  if (-1 == _rtcpReceiver.NTP(&NTPsecs,
+                              &NTPfrac,
+                              &RTCPArrivalTimeSecs,
+                              &RTCPArrivalTimeFrac)) {
+    return -1;
+  }
+  remoteSR = ((NTPsecs & 0x0000ffff) << 16) + ((NTPfrac & 0xffff0000) >> 16);
+  return 0;
+}
+
+bool ModuleRtpRtcpImpl::UpdateRTCPReceiveInformationTimers() {
+  // if this returns true this channel has timed out
+  // periodically check if this is true and if so call UpdateTMMBR
+  return _rtcpReceiver.UpdateRTCPReceiveInformationTimers();
+}
+
+// called from RTCPsender
+WebRtc_Word32 ModuleRtpRtcpImpl::BoundingSet(bool& tmmbrOwner,
+                                             TMMBRSet*& boundingSet) {
+  return _rtcpReceiver.BoundingSet(tmmbrOwner, boundingSet);
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h b/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h
new file mode 100644
index 0000000..046e4e1
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_rtcp_impl.h
@@ -0,0 +1,515 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
+
+#include <list>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/rtp_rtcp/source/rtcp_receiver.h"
+#include "modules/rtp_rtcp/source/rtcp_sender.h"
+#include "modules/rtp_rtcp/source/rtp_receiver.h"
+#include "modules/rtp_rtcp/source/rtp_sender.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+#ifdef MATLAB
+class MatlabPlot;
+#endif
+
+namespace webrtc {
+
+class ModuleRtpRtcpImpl : public RtpRtcp {
+ public:
+    explicit ModuleRtpRtcpImpl(const RtpRtcp::Configuration& configuration);
+
+    virtual ~ModuleRtpRtcpImpl();
+
+    // returns the number of milliseconds until the module want a worker thread to call Process
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+
+    // Process any pending tasks such as timeouts
+    virtual WebRtc_Word32 Process();
+
+    /**
+    *   Receiver
+    */
+    // configure a timeout value
+    virtual WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 RTPtimeoutMS,
+                                           const WebRtc_UWord32 RTCPtimeoutMS);
+
+    // Set periodic dead or alive notification
+    virtual WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
+        const bool enable,
+        const WebRtc_UWord8 sampleTimeSeconds);
+
+    // Get periodic dead or alive notification status
+    virtual WebRtc_Word32 PeriodicDeadOrAliveStatus(
+        bool &enable,
+        WebRtc_UWord8 &sampleTimeSeconds);
+
+    virtual WebRtc_Word32 RegisterReceivePayload(const CodecInst& voiceCodec);
+
+    virtual WebRtc_Word32 RegisterReceivePayload(const VideoCodec& videoCodec);
+
+    virtual WebRtc_Word32 ReceivePayloadType(const CodecInst& voiceCodec,
+                                             WebRtc_Word8* plType);
+
+    virtual WebRtc_Word32 ReceivePayloadType(const VideoCodec& videoCodec,
+                                             WebRtc_Word8* plType);
+
+    virtual WebRtc_Word32 DeRegisterReceivePayload(
+        const WebRtc_Word8 payloadType);
+
+    // register RTP header extension
+    virtual WebRtc_Word32 RegisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id);
+
+    virtual WebRtc_Word32 DeregisterReceiveRtpHeaderExtension(
+        const RTPExtensionType type);
+
+    // get the currently configured SSRC filter
+    virtual WebRtc_Word32 SSRCFilter(WebRtc_UWord32& allowedSSRC) const;
+
+    // set a SSRC to be used as a filter for incoming RTP streams
+    virtual WebRtc_Word32 SetSSRCFilter(const bool enable, const WebRtc_UWord32 allowedSSRC);
+
+    // Get last received remote timestamp
+    virtual WebRtc_UWord32 RemoteTimestamp() const;
+
+    // Get the current estimated remote timestamp
+    virtual WebRtc_Word32 EstimatedRemoteTimeStamp(WebRtc_UWord32& timestamp) const;
+
+    virtual WebRtc_UWord32 RemoteSSRC() const;
+
+    virtual WebRtc_Word32 RemoteCSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const ;
+
+    virtual WebRtc_Word32 SetRTXReceiveStatus(const bool enable,
+                                              const WebRtc_UWord32 SSRC);
+
+    virtual WebRtc_Word32 RTXReceiveStatus(bool* enable,
+                                           WebRtc_UWord32* SSRC) const;
+
+    // called by the network module when we receive a packet
+    virtual WebRtc_Word32 IncomingPacket( const WebRtc_UWord8* incomingPacket,
+                                        const WebRtc_UWord16 packetLength);
+
+    /**
+    *   Sender
+    */
+    virtual WebRtc_Word32 RegisterSendPayload(const CodecInst& voiceCodec);
+
+    virtual WebRtc_Word32 RegisterSendPayload(const VideoCodec& videoCodec);
+
+    virtual WebRtc_Word32 DeRegisterSendPayload(const WebRtc_Word8 payloadType);
+
+    virtual WebRtc_Word8 SendPayloadType() const;
+
+    // register RTP header extension
+    virtual WebRtc_Word32 RegisterSendRtpHeaderExtension(
+        const RTPExtensionType type,
+        const WebRtc_UWord8 id);
+
+    virtual WebRtc_Word32 DeregisterSendRtpHeaderExtension(
+        const RTPExtensionType type);
+
+    virtual void SetTransmissionSmoothingStatus(const bool enable);
+
+    virtual bool TransmissionSmoothingStatus() const;
+
+    // get start timestamp
+    virtual WebRtc_UWord32 StartTimestamp() const;
+
+    // configure start timestamp, default is a random number
+    virtual WebRtc_Word32 SetStartTimestamp(const WebRtc_UWord32 timestamp);
+
+    virtual WebRtc_UWord16 SequenceNumber() const;
+
+    // Set SequenceNumber, default is a random number
+    virtual WebRtc_Word32 SetSequenceNumber(const WebRtc_UWord16 seq);
+
+    virtual WebRtc_UWord32 SSRC() const;
+
+    // configure SSRC, default is a random number
+    virtual WebRtc_Word32 SetSSRC(const WebRtc_UWord32 ssrc);
+
+    virtual WebRtc_Word32 CSRCs( WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const ;
+
+    virtual WebRtc_Word32 SetCSRCs( const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                                  const WebRtc_UWord8 arrLength);
+
+    virtual WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    virtual WebRtc_UWord32 PacketCountSent() const;
+
+    virtual int CurrentSendFrequencyHz() const;
+
+    virtual WebRtc_UWord32 ByteCountSent() const;
+
+    virtual WebRtc_Word32 SetRTXSendStatus(const bool enable,
+                                           const bool setSSRC,
+                                           const WebRtc_UWord32 SSRC);
+
+    virtual WebRtc_Word32 RTXSendStatus(bool* enable,
+                                        WebRtc_UWord32* SSRC) const;
+
+    // sends kRtcpByeCode when going from true to false
+    virtual WebRtc_Word32 SetSendingStatus(const bool sending);
+
+    virtual bool Sending() const;
+
+    // Drops or relays media packets
+    virtual WebRtc_Word32 SetSendingMediaStatus(const bool sending);
+
+    virtual bool SendingMedia() const;
+
+    // Used by the codec module to deliver a video or audio frame for packetization
+    virtual WebRtc_Word32 SendOutgoingData(
+        const FrameType frameType,
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader* fragmentation = NULL,
+        const RTPVideoHeader* rtpVideoHdr = NULL);
+
+    /*
+    *   RTCP
+    */
+
+    // Get RTCP status
+    virtual RTCPMethod RTCP() const;
+
+    // configure RTCP status i.e on/off
+    virtual WebRtc_Word32 SetRTCPStatus(const RTCPMethod method);
+
+    // Set RTCP CName
+    virtual WebRtc_Word32 SetCNAME(const char cName[RTCP_CNAME_SIZE]);
+
+    // Get RTCP CName
+    virtual WebRtc_Word32 CNAME(char cName[RTCP_CNAME_SIZE]);
+
+    // Get remote CName
+    virtual WebRtc_Word32 RemoteCNAME(const WebRtc_UWord32 remoteSSRC,
+                                      char cName[RTCP_CNAME_SIZE]) const;
+
+    // Get remote NTP
+    virtual WebRtc_Word32 RemoteNTP(WebRtc_UWord32 *ReceivedNTPsecs,
+                                  WebRtc_UWord32 *ReceivedNTPfrac,
+                                  WebRtc_UWord32 *RTCPArrivalTimeSecs,
+                                  WebRtc_UWord32 *RTCPArrivalTimeFrac) const ;
+
+    virtual WebRtc_Word32 AddMixedCNAME(const WebRtc_UWord32 SSRC,
+                                        const char cName[RTCP_CNAME_SIZE]);
+
+    virtual WebRtc_Word32 RemoveMixedCNAME(const WebRtc_UWord32 SSRC);
+
+    // Get RoundTripTime
+    virtual WebRtc_Word32 RTT(const WebRtc_UWord32 remoteSSRC,
+                            WebRtc_UWord16* RTT,
+                            WebRtc_UWord16* avgRTT,
+                            WebRtc_UWord16* minRTT,
+                            WebRtc_UWord16* maxRTT) const;
+
+    // Reset RoundTripTime statistics
+    virtual WebRtc_Word32 ResetRTT(const WebRtc_UWord32 remoteSSRC);
+
+    // Force a send of an RTCP packet
+    // normal SR and RR are triggered via the process function
+    virtual WebRtc_Word32 SendRTCP(WebRtc_UWord32 rtcpPacketType = kRtcpReport);
+
+    // statistics of our localy created statistics of the received RTP stream
+    virtual WebRtc_Word32 StatisticsRTP(WebRtc_UWord8  *fraction_lost,
+                                      WebRtc_UWord32 *cum_lost,
+                                      WebRtc_UWord32 *ext_max,
+                                      WebRtc_UWord32 *jitter,
+                                      WebRtc_UWord32 *max_jitter = NULL) const;
+
+    // Reset RTP statistics
+    virtual WebRtc_Word32 ResetStatisticsRTP();
+
+    virtual WebRtc_Word32 ResetReceiveDataCountersRTP();
+
+    virtual WebRtc_Word32 ResetSendDataCountersRTP();
+
+    // statistics of the amount of data sent and received
+    virtual WebRtc_Word32 DataCountersRTP(WebRtc_UWord32 *bytesSent,
+                                          WebRtc_UWord32 *packetsSent,
+                                          WebRtc_UWord32 *bytesReceived,
+                                          WebRtc_UWord32 *packetsReceived) const;
+
+    virtual WebRtc_Word32 ReportBlockStatistics(
+        WebRtc_UWord8 *fraction_lost,
+        WebRtc_UWord32 *cum_lost,
+        WebRtc_UWord32 *ext_max,
+        WebRtc_UWord32 *jitter,
+        WebRtc_UWord32 *jitter_transmission_time_offset);
+
+    // Get received RTCP report, sender info
+    virtual WebRtc_Word32 RemoteRTCPStat( RTCPSenderInfo* senderInfo);
+
+    // Get received RTCP report, report block
+    virtual WebRtc_Word32 RemoteRTCPStat(
+        std::vector<RTCPReportBlock>* receiveBlocks) const;
+
+    // Set received RTCP report block
+    virtual WebRtc_Word32 AddRTCPReportBlock(const WebRtc_UWord32 SSRC,
+                                           const RTCPReportBlock* receiveBlock);
+
+    virtual WebRtc_Word32 RemoveRTCPReportBlock(const WebRtc_UWord32 SSRC);
+
+    /*
+    *  (REMB) Receiver Estimated Max Bitrate
+    */
+    virtual bool REMB() const;
+
+    virtual WebRtc_Word32 SetREMBStatus(const bool enable);
+
+    virtual WebRtc_Word32 SetREMBData(const WebRtc_UWord32 bitrate,
+                                      const WebRtc_UWord8 numberOfSSRC,
+                                      const WebRtc_UWord32* SSRC);
+
+    /*
+    *   (IJ) Extended jitter report.
+    */
+    virtual bool IJ() const;
+
+    virtual WebRtc_Word32 SetIJStatus(const bool enable);
+
+    /*
+    *   (TMMBR) Temporary Max Media Bit Rate
+    */
+    virtual bool TMMBR() const ;
+
+    virtual WebRtc_Word32 SetTMMBRStatus(const bool enable);
+
+    WebRtc_Word32 SetTMMBN(const TMMBRSet* boundingSet);
+
+    virtual WebRtc_UWord16 MaxPayloadLength() const;
+
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const;
+
+    virtual WebRtc_Word32 SetMaxTransferUnit(const WebRtc_UWord16 size);
+
+    virtual WebRtc_Word32 SetTransportOverhead(const bool TCP,
+                                             const bool IPV6,
+                                             const WebRtc_UWord8 authenticationOverhead = 0);
+
+    /*
+    *   (NACK) Negative acknowledgement
+    */
+
+    // Is Negative acknowledgement requests on/off?
+    virtual NACKMethod NACK() const ;
+
+    // Turn negative acknowledgement requests on/off
+    virtual WebRtc_Word32 SetNACKStatus(const NACKMethod method);
+
+    virtual int SelectiveRetransmissions() const;
+
+    virtual int SetSelectiveRetransmissions(uint8_t settings);
+
+    // Send a Negative acknowledgement packet
+    virtual WebRtc_Word32 SendNACK(const WebRtc_UWord16* nackList,
+                                   const WebRtc_UWord16 size);
+
+    // Store the sent packets, needed to answer to a Negative acknowledgement requests
+    virtual WebRtc_Word32 SetStorePacketsStatus(const bool enable, const WebRtc_UWord16 numberToStore = 200);
+
+    /*
+    *   (APP) Application specific data
+    */
+    virtual WebRtc_Word32 SetRTCPApplicationSpecificData(const WebRtc_UWord8 subType,
+                                                       const WebRtc_UWord32 name,
+                                                       const WebRtc_UWord8* data,
+                                                       const WebRtc_UWord16 length);
+    /*
+    *   (XR) VOIP metric
+    */
+    virtual WebRtc_Word32 SetRTCPVoIPMetrics(const RTCPVoIPMetric* VoIPMetric);
+
+    /*
+    *   Audio
+    */
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    virtual WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Outband DTMF detection
+    virtual WebRtc_Word32 SetTelephoneEventStatus(const bool enable,
+                                                const bool forwardToDecoder,
+                                                const bool detectEndOfTone = false);
+
+    // Is outband DTMF turned on/off?
+    virtual bool TelephoneEvent() const;
+
+    // Is forwarding of outband telephone events turned on/off?
+    virtual bool TelephoneEventForwardToDecoder() const;
+
+    virtual bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    // Send a TelephoneEvent tone using RFC 2833 (4733)
+    virtual WebRtc_Word32 SendTelephoneEventOutband(const WebRtc_UWord8 key,
+                                                  const WebRtc_UWord16 time_ms,
+                                                  const WebRtc_UWord8 level);
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    virtual WebRtc_Word32 SetSendREDPayloadType(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    virtual WebRtc_Word32 SendREDPayloadType(WebRtc_Word8& payloadType) const;
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 SetRTPAudioLevelIndicationStatus(const bool enable,
+                                                         const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 GetRTPAudioLevelIndicationStatus(bool& enable,
+                                                         WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    virtual WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    /*
+    *   Video
+    */
+    virtual RtpVideoCodecTypes ReceivedVideoCodec() const;
+
+    virtual RtpVideoCodecTypes SendVideoCodec() const;
+
+    virtual WebRtc_Word32 SendRTCPSliceLossIndication(const WebRtc_UWord8 pictureID);
+
+    // Set method for requestion a new key frame
+    virtual WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
+
+    // send a request for a keyframe
+    virtual WebRtc_Word32 RequestKeyFrame();
+
+    virtual WebRtc_Word32 SetCameraDelay(const WebRtc_Word32 delayMS);
+
+    virtual void SetTargetSendBitrate(const WebRtc_UWord32 bitrate);
+
+    virtual WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                            const WebRtc_UWord8 payloadTypeRED,
+                                            const WebRtc_UWord8 payloadTypeFEC);
+
+    virtual WebRtc_Word32 GenericFECStatus(bool& enable,
+                                         WebRtc_UWord8& payloadTypeRED,
+                                         WebRtc_UWord8& payloadTypeFEC);
+
+    virtual WebRtc_Word32 SetFecParameters(
+        const FecProtectionParams* delta_params,
+        const FecProtectionParams* key_params);
+
+    virtual WebRtc_Word32 LastReceivedNTP(WebRtc_UWord32& NTPsecs,
+                                          WebRtc_UWord32& NTPfrac,
+                                          WebRtc_UWord32& remoteSR);
+
+    virtual WebRtc_Word32 BoundingSet(bool &tmmbrOwner,
+                                      TMMBRSet*& boundingSetRec);
+
+    virtual void BitrateSent(WebRtc_UWord32* totalRate,
+                             WebRtc_UWord32* videoRate,
+                             WebRtc_UWord32* fecRate,
+                             WebRtc_UWord32* nackRate) const;
+
+    virtual int EstimatedReceiveBandwidth(
+        WebRtc_UWord32* available_bandwidth) const;
+
+    virtual void SetRemoteSSRC(const WebRtc_UWord32 SSRC);
+
+    virtual WebRtc_UWord32 SendTimeOfSendReport(const WebRtc_UWord32 sendReport);
+
+    // good state of RTP receiver inform sender
+    virtual WebRtc_Word32 SendRTCPReferencePictureSelection(const WebRtc_UWord64 pictureID);
+
+    void OnReceivedTMMBR();
+
+    // bad state of RTP receiver request a keyframe
+    void OnRequestIntraFrame();
+
+    // received a request for a new SLI
+    void OnReceivedSliceLossIndication(const WebRtc_UWord8 pictureID);
+
+    // received a new refereence frame
+    void OnReceivedReferencePictureSelectionIndication(
+        const WebRtc_UWord64 pitureID);
+
+    void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                        const WebRtc_UWord16* nackSequenceNumbers);
+
+    void OnRequestSendReport();
+
+    // Following function is only called when constructing the object so no
+    // need to worry about data race.
+    void OwnsClock() { _owns_clock = true; }
+
+protected:
+    void RegisterChildModule(RtpRtcp* module);
+
+    void DeRegisterChildModule(RtpRtcp* module);
+
+    bool UpdateRTCPReceiveInformationTimers();
+
+    void ProcessDeadOrAliveTimer();
+
+    WebRtc_UWord32 BitrateReceivedNow() const;
+
+    // Get remote SequenceNumber
+    WebRtc_UWord16 RemoteSequenceNumber() const;
+
+    // only for internal testing
+    WebRtc_UWord32 LastSendReport(WebRtc_UWord32& lastRTCPTime);
+
+    RTPSender                 _rtpSender;
+    RTPReceiver               _rtpReceiver;
+
+    RTCPSender                _rtcpSender;
+    RTCPReceiver              _rtcpReceiver;
+
+    bool                      _owns_clock;
+    RtpRtcpClock&             _clock;
+private:
+    WebRtc_Word32             _id;
+    const bool                _audio;
+    bool                      _collisionDetected;
+    WebRtc_Word64             _lastProcessTime;
+    WebRtc_Word64             _lastBitrateProcessTime;
+    WebRtc_Word64             _lastPacketTimeoutProcessTime;
+    WebRtc_UWord16            _packetOverHead;
+
+    scoped_ptr<CriticalSectionWrapper> _criticalSectionModulePtrs;
+    scoped_ptr<CriticalSectionWrapper> _criticalSectionModulePtrsFeedback;
+    ModuleRtpRtcpImpl*            _defaultModule;
+    std::list<ModuleRtpRtcpImpl*> _childModules;
+
+    // Dead or alive
+    bool                  _deadOrAliveActive;
+    WebRtc_UWord32        _deadOrAliveTimeoutMS;
+    WebRtc_Word64        _deadOrAliveLastTimer;
+    // send side
+    NACKMethod            _nackMethod;
+    WebRtc_UWord32        _nackLastTimeSent;
+    WebRtc_UWord16        _nackLastSeqNumberSent;
+
+    bool                  _simulcast;
+    VideoCodec            _sendVideoCodec;
+    KeyFrameRequestMethod _keyFrameReqMethod;
+
+    RemoteBitrateEstimator* remote_bitrate_;
+
+#ifdef MATLAB
+    MatlabPlot*           _plot1;
+#endif
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_RTCP_IMPL_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi b/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
new file mode 100644
index 0000000..09a1fe2
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_rtcp_tests.gypi
@@ -0,0 +1,51 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'rtp_rtcp_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(DEPTH)/testing/gmock.gyp:gmock',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../../',
+      ],
+      'sources': [
+        'fec_test_helper.cc',
+        'fec_test_helper.h',
+        'producer_fec_unittest.cc',
+        'receiver_fec_unittest.cc',
+        'rtp_fec_unittest.cc',
+        'rtp_format_vp8_unittest.cc',
+        'rtp_format_vp8_test_helper.cc',
+        'rtp_format_vp8_test_helper.h',
+        'rtcp_format_remb_unittest.cc',
+        'rtp_packet_history_unittest.cc',
+        'rtp_utility_unittest.cc',
+        'rtp_header_extension_unittest.cc',
+        'rtp_sender_unittest.cc',
+        'rtcp_sender_unittest.cc',
+        'rtcp_receiver_unittest.cc',
+        'transmission_bucket_unittest.cc',
+        'vp8_partition_aggregator_unittest.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/rtp_rtcp/source/rtp_sender.cc b/src/modules/rtp_rtcp/source/rtp_sender.cc
new file mode 100644
index 0000000..5f83fe1
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender.cc
@@ -0,0 +1,1608 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdlib> // srand
+
+#include "rtp_sender.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include "rtp_packet_history.h"
+#include "rtp_sender_audio.h"
+#include "rtp_sender_video.h"
+
+namespace webrtc {
+RTPSender::RTPSender(const WebRtc_Word32 id,
+                     const bool audio,
+                     RtpRtcpClock* clock) :
+    Bitrate(clock),
+    _id(id),
+    _audioConfigured(audio),
+    _audio(NULL),
+    _video(NULL),
+    _sendCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _transportCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+
+    _transport(NULL),
+
+    _sendingMedia(true), // Default to sending media
+
+    _maxPayloadLength(IP_PACKET_SIZE-28), // default is IP/UDP
+    _targetSendBitrate(0),
+    _packetOverHead(28),
+
+    _payloadType(-1),
+    _payloadTypeMap(),
+
+    _rtpHeaderExtensionMap(),
+    _transmissionTimeOffset(0),
+
+    // NACK
+    _nackByteCountTimes(),
+    _nackByteCount(),
+    _nackBitrate(clock),
+
+    _packetHistory(new RTPPacketHistory(clock)),
+    _sendBucket(),
+    _timeLastSendToNetworkUpdate(clock->GetTimeInMS()),
+    _transmissionSmoothing(false),
+
+    // statistics
+    _packetsSent(0),
+    _payloadBytesSent(0),
+
+    // RTP variables
+    _startTimeStampForced(false),
+    _startTimeStamp(0),
+    _ssrcDB(*SSRCDatabase::GetSSRCDatabase()),
+    _remoteSSRC(0),
+    _sequenceNumberForced(false),
+    _sequenceNumber(0),
+    _sequenceNumberRTX(0),
+    _ssrcForced(false),
+    _ssrc(0),
+    _timeStamp(0),
+    _CSRCs(0),
+    _CSRC(),
+    _includeCSRCs(true),
+    _RTX(false),
+    _ssrcRTX(0)
+{
+    memset(_nackByteCountTimes, 0, sizeof(_nackByteCountTimes));
+    memset(_nackByteCount, 0, sizeof(_nackByteCount));
+
+    memset(_CSRC, 0, sizeof(_CSRC));
+
+    // we need to seed the random generator, otherwise we get 26500 each time, hardly a random value :)
+    srand( (WebRtc_UWord32)_clock.GetTimeInMS() );
+
+    _ssrc = _ssrcDB.CreateSSRC(); // can't be 0
+
+    if(audio)
+    {
+        _audio = new RTPSenderAudio(id, &_clock, this);
+    } else
+    {
+        _video = new RTPSenderVideo(id, &_clock, this);
+    }
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, id, "%s created", __FUNCTION__);
+}
+
+RTPSender::~RTPSender() {
+  if(_remoteSSRC != 0) {
+    _ssrcDB.ReturnSSRC(_remoteSSRC);
+  }
+  _ssrcDB.ReturnSSRC(_ssrc);
+
+  SSRCDatabase::ReturnSSRCDatabase();
+  delete _sendCritsect;
+  delete _transportCritsect;
+  while (!_payloadTypeMap.empty()) {
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+        _payloadTypeMap.begin();
+    delete it->second;
+    _payloadTypeMap.erase(it);
+  }
+  delete _packetHistory;
+  delete _audio;
+  delete _video;
+
+  WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, _id, "%s deleted", __FUNCTION__);
+}
+/*
+WebRtc_Word32
+RTPSender::Init(const WebRtc_UWord32 remoteSSRC)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    // reset to default generation
+    _ssrcForced = false;
+    _startTimeStampForced = false;
+
+    // register a remote SSRC if we have it to avoid collisions
+    if(remoteSSRC != 0)
+    {
+        if(_ssrc == remoteSSRC)
+        {
+            // collision detected
+            _ssrc = _ssrcDB.CreateSSRC(); // can't be 0
+        }
+        _remoteSSRC = remoteSSRC;
+        _ssrcDB.RegisterSSRC(remoteSSRC);
+    }
+    _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    _sequenceNumberRTX = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    _packetsSent = 0;
+    _payloadBytesSent = 0;
+    _packetOverHead = 28;
+
+    _rtpHeaderExtensionMap.Erase();
+
+    while (!_payloadTypeMap.empty()) {
+      std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+          _payloadTypeMap.begin();
+      delete it->second;
+      _payloadTypeMap.erase(it);
+    }
+
+    memset(_CSRC, 0, sizeof(_CSRC));
+
+    memset(_nackByteCount, 0, sizeof(_nackByteCount));
+    memset(_nackByteCountTimes, 0, sizeof(_nackByteCountTimes));
+    _nackBitrate.Init();
+
+    SetStorePacketsStatus(false, 0);
+    _sendBucket.Reset();
+
+    Bitrate::Init();
+
+    if(_audioConfigured)
+    {
+        _audio->Init();
+    } else
+    {
+        _video->Init();
+    }
+    return(0);
+}
+*/
+
+void RTPSender::SetTargetSendBitrate(const WebRtc_UWord32 bits) {
+  _targetSendBitrate = static_cast<uint16_t>(bits / 1000);
+}
+
+WebRtc_UWord16
+RTPSender::ActualSendBitrateKbit() const
+{
+    return (WebRtc_UWord16) (Bitrate::BitrateNow()/1000);
+}
+
+WebRtc_UWord32
+RTPSender::VideoBitrateSent() const {
+  if (_video)
+    return _video->VideoBitrateSent();
+  else
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::FecOverheadRate() const {
+  if (_video)
+    return _video->FecOverheadRate();
+  else
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::NackOverheadRate() const {
+  return _nackBitrate.BitrateLast();
+}
+
+WebRtc_Word32
+RTPSender::SetTransmissionTimeOffset(
+    const WebRtc_Word32 transmissionTimeOffset)
+{
+    if (transmissionTimeOffset > (0x800000 - 1) ||
+        transmissionTimeOffset < -(0x800000 - 1))  // Word24
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendCritsect);
+    _transmissionTimeOffset = transmissionTimeOffset;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                      const WebRtc_UWord8 id)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.Register(type, id);
+}
+
+WebRtc_Word32
+RTPSender::DeregisterRtpHeaderExtension(const RTPExtensionType type)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.Deregister(type);
+}
+
+WebRtc_UWord16
+RTPSender::RtpHeaderExtensionTotalLength() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _rtpHeaderExtensionMap.GetTotalLengthInBytes();
+}
+
+//can be called multiple times
+WebRtc_Word32 RTPSender::RegisterPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadNumber,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  assert(payloadName);
+  CriticalSectionScoped cs(_sendCritsect);
+
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadNumber);
+
+  if (_payloadTypeMap.end() != it) {
+    // we already use this payload type
+    ModuleRTPUtility::Payload* payload = it->second;
+    assert(payload);
+
+    // check if it's the same as we already have
+    if (ModuleRTPUtility::StringCompare(payload->name, payloadName,
+                                        RTP_PAYLOAD_NAME_SIZE - 1)) {
+      if (_audioConfigured && payload->audio &&
+          payload->typeSpecific.Audio.frequency == frequency &&
+          (payload->typeSpecific.Audio.rate == rate ||
+              payload->typeSpecific.Audio.rate == 0 || rate == 0)) {
+        payload->typeSpecific.Audio.rate = rate;
+        // Ensure that we update the rate if new or old is zero
+        return 0;
+      }
+      if(!_audioConfigured && !payload->audio) {
+        return 0;
+      }
+    }
+    return -1;
+  }
+  WebRtc_Word32 retVal = -1;
+  ModuleRTPUtility::Payload* payload = NULL;
+  if (_audioConfigured) {
+    retVal = _audio->RegisterAudioPayload(payloadName, payloadNumber, frequency,
+                                          channels, rate, payload);
+  } else {
+    retVal = _video->RegisterVideoPayload(payloadName, payloadNumber, rate,
+                                          payload);
+  }
+  if(payload) {
+    _payloadTypeMap[payloadNumber] = payload;
+  }
+  return retVal;
+}
+
+WebRtc_Word32 RTPSender::DeRegisterSendPayload(const WebRtc_Word8 payloadType) {
+  CriticalSectionScoped lock(_sendCritsect);
+
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+
+  if (_payloadTypeMap.end() == it) return -1;
+
+  ModuleRTPUtility::Payload* payload = it->second;
+  delete payload;
+  _payloadTypeMap.erase(it);
+  return 0;
+}
+
+WebRtc_Word8 RTPSender::SendPayloadType() const
+{
+    return _payloadType;
+}
+
+
+int RTPSender::SendPayloadFrequency() const
+{
+    return _audio->AudioFrequency();
+}
+
+
+WebRtc_Word32
+RTPSender::SetMaxPayloadLength(const WebRtc_UWord16 maxPayloadLength, const WebRtc_UWord16 packetOverHead)
+{
+    // sanity check
+    if(maxPayloadLength < 100 || maxPayloadLength > IP_PACKET_SIZE)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id, "%s invalid argument", __FUNCTION__);
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_sendCritsect);
+    _maxPayloadLength = maxPayloadLength;
+    _packetOverHead = packetOverHead;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceRtpRtcp, _id, "SetMaxPayloadLength to %d.", maxPayloadLength);
+    return 0;
+}
+
+WebRtc_UWord16 RTPSender::MaxDataPayloadLength() const {
+  if(_audioConfigured) {
+    return _maxPayloadLength - RTPHeaderLength();
+  } else {
+    return _maxPayloadLength - RTPHeaderLength() -
+        _video->FECPacketOverhead() - ((_RTX) ? 2 : 0);
+        // Include the FEC/ULP/RED overhead.
+  }
+}
+
+WebRtc_UWord16
+RTPSender::MaxPayloadLength() const
+{
+    return _maxPayloadLength;
+}
+
+WebRtc_UWord16
+RTPSender::PacketOverHead() const
+{
+    return _packetOverHead;
+}
+
+void RTPSender::SetTransmissionSmoothingStatus(const bool enable) {
+  CriticalSectionScoped cs(_sendCritsect);
+  _transmissionSmoothing = enable;
+}
+
+bool RTPSender::TransmissionSmoothingStatus() const {
+  CriticalSectionScoped cs(_sendCritsect);
+  return _transmissionSmoothing;
+}
+
+void RTPSender::SetRTXStatus(const bool enable,
+                             const bool setSSRC,
+                             const WebRtc_UWord32 SSRC) {
+  CriticalSectionScoped cs(_sendCritsect);
+  _RTX = enable;
+  if (enable) {
+    if (setSSRC) {
+     _ssrcRTX = SSRC;
+    } else {
+     _ssrcRTX = _ssrcDB.CreateSSRC();   // can't be 0
+    }
+  }
+}
+
+void RTPSender::RTXStatus(bool* enable,
+                          WebRtc_UWord32* SSRC) const {
+  CriticalSectionScoped cs(_sendCritsect);
+  *enable = _RTX;
+  *SSRC = _ssrcRTX;
+}
+
+WebRtc_Word32 RTPSender::CheckPayloadType(const WebRtc_Word8 payloadType,
+                                          RtpVideoCodecTypes& videoType) {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  if (payloadType < 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tinvalid payloadType (%d)", payloadType);
+    return -1;
+  }
+  if (_audioConfigured) {
+    WebRtc_Word8 redPlType = -1;
+    if (_audio->RED(redPlType) == 0) {
+      // We have configured RED.
+      if(redPlType == payloadType) {
+        // And it's a match...
+        return 0;
+      }
+    }
+  }
+  if (_payloadType == payloadType) {
+    if (!_audioConfigured) {
+      videoType = _video->VideoCodecType();
+    }
+    return 0;
+  }
+  std::map<WebRtc_Word8, ModuleRTPUtility::Payload*>::iterator it =
+      _payloadTypeMap.find(payloadType);
+  if (it == _payloadTypeMap.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "\tpayloadType:%d not registered", payloadType);
+    return -1;
+  }
+  _payloadType = payloadType;
+  ModuleRTPUtility::Payload* payload = it->second;
+  assert(payload);
+  if (payload->audio) {
+    if (_audioConfigured) {
+      // Extract payload frequency
+      int payloadFreqHz;
+      if (ModuleRTPUtility::StringCompare(payload->name,"g722",4)&&
+          (payload->name[4] == 0)) {
+        //Check that strings end there, g722.1...
+        // Special case for G.722, bug in spec
+        payloadFreqHz=8000;
+      } else {
+        payloadFreqHz=payload->typeSpecific.Audio.frequency;
+      }
+
+      //we don't do anything if it's CN
+      if ((_audio->AudioFrequency() != payloadFreqHz)&&
+          (!ModuleRTPUtility::StringCompare(payload->name,"cn",2))) {
+        _audio->SetAudioFrequency(payloadFreqHz);
+        // We need to correct the timestamp again,
+        // since this might happen after we've set it
+        WebRtc_UWord32 RTPtime =
+            ModuleRTPUtility::GetCurrentRTP(&_clock, payloadFreqHz);
+        SetStartTimestamp(RTPtime);
+        // will be ignored if it's already configured via API
+      }
+    }
+  } else {
+    if(!_audioConfigured) {
+      _video->SetVideoCodecType(payload->typeSpecific.Video.videoCodecType);
+      videoType = payload->typeSpecific.Video.videoCodecType;
+      _video->SetMaxConfiguredBitrateVideo(
+          payload->typeSpecific.Video.maxRate);
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32
+RTPSender::SendOutgoingData(const FrameType frame_type,
+                            const WebRtc_Word8 payload_type,
+                            const WebRtc_UWord32 capture_timestamp,
+                            int64_t capture_time_ms,
+                            const WebRtc_UWord8* payload_data,
+                            const WebRtc_UWord32 payload_size,
+                            const RTPFragmentationHeader* fragmentation,
+                            VideoCodecInformation* codec_info,
+                            const RTPVideoTypeHeader* rtp_type_hdr)
+{
+    {
+        // Drop this packet if we're not sending media packets.
+        CriticalSectionScoped cs(_sendCritsect);
+        if (!_sendingMedia)
+        {
+            return 0;
+        }
+    }
+    RtpVideoCodecTypes video_type = kRtpNoVideo;
+    if (CheckPayloadType(payload_type, video_type) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+            "%s invalid argument failed to find payloadType:%d",
+            __FUNCTION__, payload_type);
+        return -1;
+    }
+
+    if (_audioConfigured)
+    {
+        assert(frame_type == kAudioFrameSpeech ||
+               frame_type == kAudioFrameCN ||
+               frame_type == kFrameEmpty);
+
+        return _audio->SendAudio(frame_type, payload_type, capture_timestamp,
+            payload_data, payload_size,fragmentation);
+    } else {
+        assert(frame_type != kAudioFrameSpeech &&
+               frame_type != kAudioFrameCN);
+
+        if (frame_type == kFrameEmpty) {
+          return SendPaddingAccordingToBitrate(payload_type, capture_timestamp,
+                                               capture_time_ms);
+        }
+        return _video->SendVideo(video_type,
+                                 frame_type,
+                                 payload_type,
+                                 capture_timestamp,
+                                 capture_time_ms,
+                                 payload_data,
+                                 payload_size,
+                                 fragmentation,
+                                 codec_info,
+                                 rtp_type_hdr);
+    }
+}
+
+WebRtc_Word32 RTPSender::SendPaddingAccordingToBitrate(
+    WebRtc_Word8 payload_type,
+    WebRtc_UWord32 capture_timestamp,
+    int64_t capture_time_ms) {
+  // Current bitrate since last estimate(1 second) averaged with the
+  // estimate since then, to get the most up to date bitrate.
+  uint32_t current_bitrate = BitrateNow();
+  int bitrate_diff = _targetSendBitrate * 1000 - current_bitrate;
+  if (bitrate_diff > 0) {
+    int bytes = 0;
+    if (current_bitrate == 0) {
+      // Start up phase. Send one 33.3 ms batch to start with.
+      bytes = (bitrate_diff / 8) / 30;
+    } else {
+      bytes = (bitrate_diff / 8);
+      // Cap at 200 ms of target send data.
+      int bytes_cap = _targetSendBitrate * 25;  // 1000 / 8 / 5
+      if (bytes > bytes_cap) {
+        bytes = bytes_cap;
+      }
+    }
+    // Send padding data.
+    return SendPadData(payload_type, capture_timestamp, capture_time_ms, bytes);
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPSender::SendPadData(WebRtc_Word8 payload_type,
+                                     WebRtc_UWord32 capture_timestamp,
+                                     int64_t capture_time_ms,
+                                     WebRtc_Word32 bytes) {
+  // Drop this packet if we're not sending media packets
+  if (!_sendingMedia) {
+    return 0;
+  }
+  // Max in the RFC 3550 is 255 bytes, we limit it to be modulus 32 for SRTP.
+  int max_length = 224;
+  WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+
+  for (; bytes > 0; bytes -= max_length) {
+    int padding_bytes_in_packet = max_length;
+    if (bytes < max_length) {
+      padding_bytes_in_packet = (bytes + 16) & 0xffe0;  // Keep our modulus 32.
+    }
+    if (padding_bytes_in_packet < 32) {
+       // Sanity don't send empty packets.
+       break;
+    }
+
+    WebRtc_Word32 header_length;
+    {
+      // Correct seq num, timestamp and payload type.
+      header_length = BuildRTPheader(data_buffer,
+                                     payload_type,
+                                     false,  // No markerbit.
+                                     capture_timestamp,
+                                     true,  // Timestamp provided.
+                                     true);  // Increment sequence number.
+    }
+    data_buffer[0] |= 0x20;  // Set padding bit.
+    WebRtc_Word32* data =
+        reinterpret_cast<WebRtc_Word32*>(&(data_buffer[header_length]));
+
+    // Fill data buffer with random data.
+    for(int j = 0; j < (padding_bytes_in_packet >> 2); j++) {
+      data[j] = rand();
+    }
+    // Set number of padding bytes in the last byte of the packet.
+    data_buffer[header_length + padding_bytes_in_packet - 1] =
+        padding_bytes_in_packet;
+    // Send the packet
+    if (0 > SendToNetwork(data_buffer,
+                          padding_bytes_in_packet,
+                          header_length,
+                          capture_time_ms,
+                          kDontRetransmit)) {
+      // Error sending the packet.
+      break;
+    }
+  }
+  if (bytes > 31) {  // 31 due to our modulus 32.
+    // We did not manage to send all bytes.
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 RTPSender::SetStorePacketsStatus(
+    const bool enable,
+    const WebRtc_UWord16 numberToStore) {
+  _packetHistory->SetStorePacketsStatus(enable, numberToStore);
+  return 0;
+}
+
+bool RTPSender::StorePackets() const {
+  return _packetHistory->StorePackets();
+}
+
+WebRtc_Word32 RTPSender::ReSendPacket(WebRtc_UWord16 packet_id,
+                                      WebRtc_UWord32 min_resend_time) {
+
+  WebRtc_UWord16 length = IP_PACKET_SIZE;
+  WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+  WebRtc_UWord8* buffer_to_send_ptr = data_buffer;
+
+  int64_t stored_time_in_ms;
+  StorageType type;
+  bool found = _packetHistory->GetRTPPacket(packet_id,
+      min_resend_time, data_buffer, &length, &stored_time_in_ms, &type);
+  if (!found) {
+    // Packet not found.
+    return 0;
+  }
+
+  if (length == 0 || type == kDontRetransmit) {
+    // No bytes copied (packet recently resent, skip resending) or
+    // packet should not be retransmitted.
+    return 0;
+  }
+
+  WebRtc_UWord8 data_buffer_rtx[IP_PACKET_SIZE];
+  if (_RTX) {
+    buffer_to_send_ptr = data_buffer_rtx;
+
+    CriticalSectionScoped cs(_sendCritsect);
+    // Add RTX header.
+    ModuleRTPUtility::RTPHeaderParser rtpParser(
+        reinterpret_cast<const WebRtc_UWord8*>(data_buffer),
+        length);
+
+    WebRtcRTPHeader rtp_header;
+    rtpParser.Parse(rtp_header);
+
+    // Add original RTP header.
+    memcpy(data_buffer_rtx, data_buffer, rtp_header.header.headerLength);
+
+    // Replace sequence number.
+    WebRtc_UWord8* ptr = data_buffer_rtx + 2;
+    ModuleRTPUtility::AssignUWord16ToBuffer(ptr, _sequenceNumberRTX++);
+
+    // Replace SSRC.
+    ptr += 6;
+    ModuleRTPUtility::AssignUWord32ToBuffer(ptr, _ssrcRTX);
+
+    // Add OSN (original sequence number).
+    ptr = data_buffer_rtx + rtp_header.header.headerLength;
+    ModuleRTPUtility::AssignUWord16ToBuffer(
+        ptr, rtp_header.header.sequenceNumber);
+    ptr += 2;
+
+    // Add original payload data.
+    memcpy(ptr,
+           data_buffer + rtp_header.header.headerLength,
+           length - rtp_header.header.headerLength);
+    length += 2;
+  }
+
+  WebRtc_Word32 bytes_sent = ReSendToNetwork(buffer_to_send_ptr, length);
+  if (bytes_sent <= 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, _id,
+                 "Transport failed to resend packet_id %u", packet_id);
+    return -1;
+  }
+
+  // Store the time when the packet was last resent.
+  _packetHistory->UpdateResendTime(packet_id);
+
+  return bytes_sent;
+}
+
+WebRtc_Word32 RTPSender::ReSendToNetwork(const WebRtc_UWord8* packet,
+                                         const WebRtc_UWord32 size) {
+  WebRtc_Word32 bytes_sent = -1;
+  {
+    CriticalSectionScoped lock(_transportCritsect);
+    if (_transport) {
+      bytes_sent = _transport->SendPacket(_id, packet, size);
+    }
+  }
+
+  if (bytes_sent <= 0) {
+    return -1;
+  }
+
+  // Update send statistics
+  CriticalSectionScoped cs(_sendCritsect);
+  Bitrate::Update(bytes_sent);
+  _packetsSent++;
+  // We on purpose don't add to _payloadBytesSent since this is a
+  // re-transmit and not new payload data.
+  return bytes_sent;
+}
+
+int RTPSender::SelectiveRetransmissions() const {
+  if (!_video) return -1;
+  return _video->SelectiveRetransmissions();
+}
+
+int RTPSender::SetSelectiveRetransmissions(uint8_t settings) {
+  if (!_video) return -1;
+  return _video->SetSelectiveRetransmissions(settings);
+}
+
+void
+RTPSender::OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                          const WebRtc_UWord16* nackSequenceNumbers,
+                          const WebRtc_UWord16 avgRTT) {
+    const WebRtc_Word64 now = _clock.GetTimeInMS();
+    WebRtc_UWord32 bytesReSent = 0;
+
+  // Enough bandwidth to send NACK?
+  if (!ProcessNACKBitRate(now)) {
+    WEBRTC_TRACE(kTraceStream,
+                 kTraceRtpRtcp,
+                 _id,
+                 "NACK bitrate reached. Skip sending NACK response. Target %d",
+                 _targetSendBitrate);
+    return;
+  }
+
+  for (WebRtc_UWord16 i = 0; i < nackSequenceNumbersLength; ++i) {
+    const WebRtc_Word32 bytesSent = ReSendPacket(nackSequenceNumbers[i],
+                                                 5+avgRTT);
+    if (bytesSent > 0) {
+      bytesReSent += bytesSent;
+    } else if (bytesSent == 0) {
+      // The packet has previously been resent.
+      // Try resending next packet in the list.
+      continue;
+    } else if (bytesSent < 0) {
+      // Failed to send one Sequence number. Give up the rest in this nack.
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceRtpRtcp,
+                   _id,
+                   "Failed resending RTP packet %d, Discard rest of packets",
+                   nackSequenceNumbers[i]);
+      break;
+    }
+    // delay bandwidth estimate (RTT * BW)
+    if (_targetSendBitrate != 0 && avgRTT) {
+      // kbits/s * ms = bits => bits/8 = bytes
+      WebRtc_UWord32 targetBytes =
+          (static_cast<WebRtc_UWord32>(_targetSendBitrate) * avgRTT) >> 3;
+      if (bytesReSent > targetBytes) {
+        break; // ignore the rest of the packets in the list
+      }
+    }
+  }
+  if (bytesReSent > 0) {
+    // TODO(pwestin) consolidate these two methods.
+    UpdateNACKBitRate(bytesReSent, now);
+    _nackBitrate.Update(bytesReSent);
+  }
+}
+
+/**
+*    @return true if the nack bitrate is lower than the requested max bitrate
+*/
+bool RTPSender::ProcessNACKBitRate(const WebRtc_UWord32 now) {
+  WebRtc_UWord32 num = 0;
+  WebRtc_Word32 byteCount = 0;
+  const WebRtc_UWord32 avgInterval=1000;
+
+  CriticalSectionScoped cs(_sendCritsect);
+
+  if (_targetSendBitrate == 0) {
+    return true;
+  }
+  for (num = 0; num < NACK_BYTECOUNT_SIZE; num++) {
+    if ((now - _nackByteCountTimes[num]) > avgInterval) {
+      // don't use data older than 1sec
+      break;
+    } else {
+      byteCount += _nackByteCount[num];
+    }
+  }
+  WebRtc_Word32 timeInterval = avgInterval;
+  if (num == NACK_BYTECOUNT_SIZE) {
+    // More than NACK_BYTECOUNT_SIZE nack messages has been received
+    // during the last msgInterval
+    timeInterval = now - _nackByteCountTimes[num-1];
+    if(timeInterval < 0) {
+      timeInterval = avgInterval;
+    }
+  }
+  return (byteCount*8) < (_targetSendBitrate * timeInterval);
+}
+
+void RTPSender::UpdateNACKBitRate(const WebRtc_UWord32 bytes,
+                                  const WebRtc_UWord32 now) {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  // save bitrate statistics
+  if(bytes > 0) {
+    if(now == 0) {
+      // add padding length
+      _nackByteCount[0] += bytes;
+    } else {
+      if(_nackByteCountTimes[0] == 0) {
+        // first no shift
+      } else {
+        // shift
+        for(int i = (NACK_BYTECOUNT_SIZE-2); i >= 0 ; i--) {
+          _nackByteCount[i+1] = _nackByteCount[i];
+          _nackByteCountTimes[i+1] = _nackByteCountTimes[i];
+        }
+      }
+      _nackByteCount[0] = bytes;
+      _nackByteCountTimes[0] = now;
+    }
+  }
+}
+
+// Function triggered by timer.
+void RTPSender::ProcessSendToNetwork() {
+  WebRtc_Word64 delta_time_ms;
+  {
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if (!_transmissionSmoothing) {
+      return;
+    }
+    WebRtc_Word64 now = _clock.GetTimeInMS();
+    delta_time_ms = now - _timeLastSendToNetworkUpdate;
+    _timeLastSendToNetworkUpdate = now;
+  }
+  _sendBucket.UpdateBytesPerInterval(delta_time_ms, _targetSendBitrate);
+
+  while (!_sendBucket.Empty()) {
+
+    WebRtc_Word32 seq_num = _sendBucket.GetNextPacket();
+    if (seq_num < 0) {
+      break;
+    }
+
+    WebRtc_UWord8 data_buffer[IP_PACKET_SIZE];
+    WebRtc_UWord16 length = IP_PACKET_SIZE;
+    int64_t stored_time_ms;
+    StorageType type;
+    bool found = _packetHistory->GetRTPPacket(seq_num, 0, data_buffer, &length,
+        &stored_time_ms, &type);
+    if (!found) {
+      assert(false);
+      return;
+    }
+    assert(length > 0);
+
+    WebRtc_Word64 diff_ms = _clock.GetTimeInMS() - stored_time_ms;
+
+    ModuleRTPUtility::RTPHeaderParser rtpParser(data_buffer, length);
+    WebRtcRTPHeader rtp_header;
+    rtpParser.Parse(rtp_header);
+
+    UpdateTransmissionTimeOffset(data_buffer, length, rtp_header, diff_ms);
+
+    // Send packet
+    WebRtc_Word32 bytes_sent = -1;
+    {
+      CriticalSectionScoped cs(_transportCritsect);
+      if (_transport) {
+        bytes_sent = _transport->SendPacket(_id, data_buffer, length);
+      }
+    }
+
+    // Update send statistics
+    if (bytes_sent > 0) {
+      CriticalSectionScoped cs(_sendCritsect);
+      Bitrate::Update(bytes_sent);
+      _packetsSent++;
+      if (bytes_sent > rtp_header.header.headerLength) {
+        _payloadBytesSent += bytes_sent - rtp_header.header.headerLength;
+      }
+    }
+  }
+}
+
+WebRtc_Word32
+RTPSender::SendToNetwork(WebRtc_UWord8* buffer,
+                         const WebRtc_UWord16 length,
+                         const WebRtc_UWord16 rtpLength,
+                         int64_t capture_time_ms,
+                         const StorageType storage)
+{
+  // Used for NACK or to spead out the transmission of packets.
+  if (_packetHistory->PutRTPPacket(
+      buffer, rtpLength + length, _maxPayloadLength, capture_time_ms, storage)
+      != 0) {
+    return -1;
+  }
+
+  if (_transmissionSmoothing) {
+    const WebRtc_UWord16 sequenceNumber = (buffer[2] << 8) + buffer[3];
+    _sendBucket.Fill(sequenceNumber, rtpLength + length);
+    // Packet will be sent at a later time.
+    return 0;
+  }
+
+  // |capture_time_ms| <= 0 is considered invalid.
+  // TODO(holmer): This should be changed all over Video Engine so that negative
+  // time is consider invalid, while 0 is considered a valid time.
+  if (capture_time_ms > 0) {
+    ModuleRTPUtility::RTPHeaderParser rtpParser(buffer, length);
+    WebRtcRTPHeader rtp_header;
+    rtpParser.Parse(rtp_header);
+    int64_t time_now = _clock.GetTimeInMS();
+    UpdateTransmissionTimeOffset(buffer, length, rtp_header,
+                                 time_now - capture_time_ms);
+  }
+
+  // Send packet
+  WebRtc_Word32 bytes_sent = -1;
+  {
+    CriticalSectionScoped cs(_transportCritsect);
+    if (_transport) {
+      bytes_sent = _transport->SendPacket(_id, buffer, length + rtpLength);
+    }
+  }
+
+  if (bytes_sent <= 0) {
+    return -1;
+  }
+
+  // Update send statistics
+  CriticalSectionScoped cs(_sendCritsect);
+  Bitrate::Update(bytes_sent);
+  _packetsSent++;
+  if (bytes_sent > rtpLength) {
+    _payloadBytesSent += bytes_sent - rtpLength;
+  }
+  return 0;
+}
+
+void
+RTPSender::ProcessBitrate()
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    Bitrate::Process();
+    _nackBitrate.Process();
+
+    if (_audioConfigured)
+      return;
+    _video->ProcessBitrate();
+}
+
+WebRtc_UWord16
+RTPSender::RTPHeaderLength() const
+{
+    WebRtc_UWord16 rtpHeaderLength = 12;
+
+    if(_includeCSRCs)
+    {
+        rtpHeaderLength += sizeof(WebRtc_UWord32)*_CSRCs;
+    }
+    rtpHeaderLength += RtpHeaderExtensionTotalLength();
+
+    return rtpHeaderLength;
+}
+
+WebRtc_UWord16
+RTPSender::IncrementSequenceNumber()
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sequenceNumber++;
+}
+
+WebRtc_Word32
+RTPSender::ResetDataCounters()
+{
+    _packetsSent = 0;
+    _payloadBytesSent = 0;
+
+    return 0;
+}
+
+// number of sent RTP packets
+// dont use critsect to avoid potental deadlock
+WebRtc_UWord32
+RTPSender::Packets() const
+{
+    return _packetsSent;
+}
+
+// number of sent RTP bytes
+// dont use critsect to avoid potental deadlock
+WebRtc_UWord32
+RTPSender::Bytes() const
+{
+    return _payloadBytesSent;
+}
+
+WebRtc_Word32
+RTPSender::BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                          const WebRtc_Word8 payloadType,
+                          const bool markerBit,
+                          const WebRtc_UWord32 captureTimeStamp,
+                          const bool timeStampProvided,
+                          const bool incSequenceNumber)
+{
+    assert(payloadType>=0);
+
+    CriticalSectionScoped cs(_sendCritsect);
+
+    dataBuffer[0] = static_cast<WebRtc_UWord8>(0x80);            // version 2
+    dataBuffer[1] = static_cast<WebRtc_UWord8>(payloadType);
+    if (markerBit)
+    {
+        dataBuffer[1] |= kRtpMarkerBitMask;  // MarkerBit is set
+    }
+
+    if(timeStampProvided)
+    {
+        _timeStamp = _startTimeStamp + captureTimeStamp;
+    } else
+    {
+        // make a unique time stamp
+        // used for inband signaling
+        // we can't inc by the actual time, since then we increase the risk of back timing
+        _timeStamp++;
+    }
+
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+2, _sequenceNumber);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+4, _timeStamp);
+    ModuleRTPUtility::AssignUWord32ToBuffer(dataBuffer+8, _ssrc);
+
+    WebRtc_Word32 rtpHeaderLength = 12;
+
+    // Add the CSRCs if any
+    if (_includeCSRCs && _CSRCs > 0)
+    {
+        if(_CSRCs > kRtpCsrcSize)
+        {
+            // error
+            assert(false);
+            return -1;
+        }
+        WebRtc_UWord8* ptr = &dataBuffer[rtpHeaderLength];
+        for (WebRtc_UWord32 i = 0; i < _CSRCs; ++i)
+        {
+            ModuleRTPUtility::AssignUWord32ToBuffer(ptr, _CSRC[i]);
+            ptr +=4;
+        }
+        dataBuffer[0] = (dataBuffer[0]&0xf0) | _CSRCs;
+
+        // Update length of header
+        rtpHeaderLength += sizeof(WebRtc_UWord32)*_CSRCs;
+    }
+    {
+        _sequenceNumber++; // prepare for next packet
+    }
+
+    WebRtc_UWord16 len = BuildRTPHeaderExtension(dataBuffer + rtpHeaderLength);
+    if (len)
+    {
+      dataBuffer[0] |= 0x10;  // set eXtension bit
+      rtpHeaderLength += len;
+    }
+
+    return rtpHeaderLength;
+}
+
+WebRtc_UWord16
+RTPSender::BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const
+{
+    if (_rtpHeaderExtensionMap.Size() <= 0) {
+       return 0;
+    }
+
+    /* RTP header extension, RFC 3550.
+     0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      defined by profile       |           length              |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |                        header extension                       |
+    |                             ....                              |
+    */
+
+    const WebRtc_UWord32 kPosLength = 2;
+    const WebRtc_UWord32 kHeaderLength = RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES;
+
+    // Add extension ID (0xBEDE).
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer,
+                                            RTP_ONE_BYTE_HEADER_EXTENSION);
+
+    // Add extensions.
+    WebRtc_UWord16 total_block_length = 0;
+
+    RTPExtensionType type = _rtpHeaderExtensionMap.First();
+    while (type != kRtpExtensionNone)
+    {
+        WebRtc_UWord8 block_length = 0;
+        if (type == kRtpExtensionTransmissionTimeOffset)
+        {
+            block_length = BuildTransmissionTimeOffsetExtension(
+                dataBuffer + kHeaderLength + total_block_length);
+        }
+        total_block_length += block_length;
+        type = _rtpHeaderExtensionMap.Next(type);
+    }
+
+    if (total_block_length == 0)
+    {
+        // No extension added.
+        return 0;
+    }
+
+    // Set header length (in number of Word32, header excluded).
+    assert(total_block_length % 4 == 0);
+    ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer + kPosLength,
+                                            total_block_length / 4);
+
+    // Total added length.
+    return kHeaderLength + total_block_length;
+}
+
+WebRtc_UWord8
+RTPSender::BuildTransmissionTimeOffsetExtension(WebRtc_UWord8* dataBuffer) const
+{
+   // From RFC 5450: Transmission Time Offsets in RTP Streams.
+   //
+   // The transmission time is signaled to the receiver in-band using the
+   // general mechanism for RTP header extensions [RFC5285]. The payload
+   // of this extension (the transmitted value) is a 24-bit signed integer.
+   // When added to the RTP timestamp of the packet, it represents the
+   // "effective" RTP transmission time of the packet, on the RTP
+   // timescale.
+   //
+   // The form of the transmission offset extension block:
+   //
+   //    0                   1                   2                   3
+   //    0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+   //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+   //   |  ID   | len=2 |              transmission offset              |
+   //   +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+    // Get id defined by user.
+    WebRtc_UWord8 id;
+    if (_rtpHeaderExtensionMap.GetId(kRtpExtensionTransmissionTimeOffset, &id)
+        != 0) {
+      // Not registered.
+      return 0;
+    }
+
+    int pos = 0;
+    const WebRtc_UWord8 len = 2;
+    dataBuffer[pos++] = (id << 4) + len;
+    ModuleRTPUtility::AssignUWord24ToBuffer(dataBuffer + pos,
+                                            _transmissionTimeOffset);
+    pos += 3;
+    assert(pos == TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES);
+    return TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+}
+
+void RTPSender::UpdateTransmissionTimeOffset(
+    WebRtc_UWord8* rtp_packet,
+    const WebRtc_UWord16 rtp_packet_length,
+    const WebRtcRTPHeader& rtp_header,
+    const WebRtc_Word64 time_diff_ms) const {
+  CriticalSectionScoped cs(_sendCritsect);
+
+  // Get length until start of transmission block.
+  int transmission_block_pos =
+      _rtpHeaderExtensionMap.GetLengthUntilBlockStartInBytes(
+      kRtpExtensionTransmissionTimeOffset);
+  if (transmission_block_pos < 0) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, not registered.");
+    return;
+  }
+
+  int block_pos = 12 + rtp_header.header.numCSRCs + transmission_block_pos;
+  if (rtp_packet_length < block_pos + 4 ||
+      rtp_header.header.headerLength < block_pos + 4) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, invalid length.");
+    return;
+  }
+
+  // Verify that header contains extension.
+  if (!((rtp_packet[12 + rtp_header.header.numCSRCs] == 0xBE) &&
+        (rtp_packet[12 + rtp_header.header.numCSRCs + 1] == 0xDE))) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, hdr extension not found.");
+    return;
+  }
+
+  // Get id.
+  WebRtc_UWord8 id = 0;
+  if (_rtpHeaderExtensionMap.GetId(kRtpExtensionTransmissionTimeOffset,
+                                   &id) != 0) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset, no id.");
+    return;
+  }
+
+  // Verify first byte in block.
+  const WebRtc_UWord8 first_block_byte = (id << 4) + 2;
+  if (rtp_packet[block_pos] != first_block_byte) {
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id,
+        "Failed to update transmission time offset.");
+    return;
+  }
+
+  // Update transmission offset field.
+  ModuleRTPUtility::AssignUWord24ToBuffer(rtp_packet + block_pos + 1,
+                                          time_diff_ms * 90);  // RTP timestamp.
+}
+
+WebRtc_Word32
+RTPSender::RegisterSendTransport(Transport* transport)
+{
+     CriticalSectionScoped cs(_transportCritsect);
+    _transport = transport;
+    return 0;
+}
+
+void
+RTPSender::SetSendingStatus(const bool enabled)
+{
+    if(enabled)
+    {
+        WebRtc_UWord32 freq;
+        if(_audioConfigured)
+        {
+            WebRtc_UWord32 frequency = _audio->AudioFrequency();
+
+            // sanity
+            switch(frequency)
+            {
+            case 8000:
+            case 12000:
+            case 16000:
+            case 24000:
+            case 32000:
+                break;
+            default:
+                assert(false);
+                return;
+            }
+            freq = frequency;
+        } else
+        {
+            freq = 90000; // 90 KHz for all video
+        }
+        WebRtc_UWord32 RTPtime = ModuleRTPUtility::GetCurrentRTP(&_clock, freq);
+
+        SetStartTimestamp(RTPtime); // will be ignored if it's already configured via API
+
+    } else
+    {
+        if(!_ssrcForced)
+        {
+            // generate a new SSRC
+            _ssrcDB.ReturnSSRC(_ssrc);
+            _ssrc = _ssrcDB.CreateSSRC();   // can't be 0
+
+        }
+        if(!_sequenceNumberForced && !_ssrcForced) // don't initialize seq number if SSRC passed externally
+        {
+            // generate a new sequence number
+            _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+        }
+    }
+}
+
+void
+RTPSender::SetSendingMediaStatus(const bool enabled)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    _sendingMedia = enabled;
+}
+
+bool
+RTPSender::SendingMedia() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sendingMedia;
+}
+
+WebRtc_UWord32
+RTPSender::Timestamp() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _timeStamp;
+}
+
+
+WebRtc_Word32
+RTPSender::SetStartTimestamp( const WebRtc_UWord32 timestamp, const bool force)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    if(force)
+    {
+        _startTimeStampForced = force;
+        _startTimeStamp = timestamp;
+    } else
+    {
+        if(!_startTimeStampForced)
+        {
+            _startTimeStamp = timestamp;
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::StartTimestamp() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _startTimeStamp;
+}
+
+WebRtc_UWord32
+RTPSender::GenerateNewSSRC()
+{
+    // if configured via API, return 0
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if(_ssrcForced)
+    {
+        return 0;
+    }
+    _ssrc = _ssrcDB.CreateSSRC();   // can't be 0
+    return _ssrc;
+}
+
+WebRtc_Word32
+RTPSender::SetSSRC(WebRtc_UWord32 ssrc)
+{
+    // this is configured via the API
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if (_ssrc == ssrc && _ssrcForced)
+    {
+        return 0; // since it's same ssrc, don't reset anything
+    }
+
+    _ssrcForced = true;
+
+    _ssrcDB.ReturnSSRC(_ssrc);
+    _ssrcDB.RegisterSSRC(ssrc);
+    _ssrc = ssrc;
+
+    if(!_sequenceNumberForced)
+    {
+        _sequenceNumber = rand() / (RAND_MAX / MAX_INIT_RTP_SEQ_NUMBER);
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+RTPSender::SSRC() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _ssrc;
+}
+
+WebRtc_Word32
+RTPSender::SetCSRCStatus(const bool include)
+{
+    _includeCSRCs = include;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                    const WebRtc_UWord8 arrLength)
+{
+    if(arrLength > kRtpCsrcSize)
+    {
+        assert(false);
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_sendCritsect);
+
+    for(int i = 0; i < arrLength;i++)
+    {
+        _CSRC[i] = arrOfCSRC[i];
+    }
+    _CSRCs = arrLength;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSender::CSRCs(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+
+    if(arrOfCSRC == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    for(int i = 0; i < _CSRCs && i < kRtpCsrcSize;i++)
+    {
+        arrOfCSRC[i] = _CSRC[i];
+    }
+    return _CSRCs;
+}
+
+WebRtc_Word32
+RTPSender::SetSequenceNumber(WebRtc_UWord16 seq)
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    _sequenceNumberForced = true;
+    _sequenceNumber = seq;
+    return 0;
+}
+
+WebRtc_UWord16
+RTPSender::SequenceNumber() const
+{
+    CriticalSectionScoped cs(_sendCritsect);
+    return _sequenceNumber;
+}
+
+
+    /*
+    *    Audio
+    */
+WebRtc_Word32
+RTPSender::RegisterAudioCallback(RtpAudioFeedback* messagesCallback)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->RegisterAudioCallback(messagesCallback);
+}
+
+    // Send a DTMF tone, RFC 2833 (4733)
+WebRtc_Word32
+RTPSender::SendTelephoneEvent(const WebRtc_UWord8 key,
+                              const WebRtc_UWord16 time_ms,
+                              const WebRtc_UWord8 level)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SendTelephoneEvent(key, time_ms, level);
+}
+
+bool
+RTPSender::SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const
+{
+    if(!_audioConfigured)
+    {
+        return false;
+    }
+    return _audio->SendTelephoneEventActive(telephoneEvent);
+}
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+WebRtc_Word32
+RTPSender::SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetAudioPacketSize(packetSizeSamples);
+}
+
+WebRtc_Word32
+RTPSender::SetAudioLevelIndicationStatus(const bool enable,
+                                         const WebRtc_UWord8 ID)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetAudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32
+RTPSender::AudioLevelIndicationStatus(bool& enable,
+                                      WebRtc_UWord8& ID) const
+{
+    return _audio->AudioLevelIndicationStatus(enable, ID);
+}
+
+WebRtc_Word32
+RTPSender::SetAudioLevel(const WebRtc_UWord8 level_dBov)
+{
+    return _audio->SetAudioLevel(level_dBov);
+}
+
+    // Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSender::SetRED(const WebRtc_Word8 payloadType)
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->SetRED(payloadType);
+}
+
+    // Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSender::RED(WebRtc_Word8& payloadType) const
+{
+    if(!_audioConfigured)
+    {
+        return -1;
+    }
+    return _audio->RED(payloadType);
+}
+
+    /*
+    *    Video
+    */
+VideoCodecInformation*
+RTPSender::CodecInformationVideo()
+{
+    if(_audioConfigured)
+    {
+        return NULL;
+    }
+    return _video->CodecInformationVideo();
+}
+
+RtpVideoCodecTypes
+RTPSender::VideoCodecType() const
+{
+    if(_audioConfigured)
+    {
+        return kRtpNoVideo;
+    }
+    return _video->VideoCodecType();
+}
+
+WebRtc_UWord32
+RTPSender::MaxConfiguredBitrateVideo() const
+{
+    if(_audioConfigured)
+    {
+        return 0;
+    }
+    return _video->MaxConfiguredBitrateVideo();
+}
+
+WebRtc_Word32
+RTPSender::SendRTPIntraRequest()
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SendRTPIntraRequest();
+}
+
+// FEC
+WebRtc_Word32
+RTPSender::SetGenericFECStatus(const bool enable,
+                               const WebRtc_UWord8 payloadTypeRED,
+                               const WebRtc_UWord8 payloadTypeFEC)
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->SetGenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
+}
+
+WebRtc_Word32
+RTPSender::GenericFECStatus(bool& enable,
+                            WebRtc_UWord8& payloadTypeRED,
+                            WebRtc_UWord8& payloadTypeFEC) const
+{
+    if(_audioConfigured)
+    {
+        return -1;
+    }
+    return _video->GenericFECStatus(enable, payloadTypeRED, payloadTypeFEC);
+}
+
+WebRtc_Word32 RTPSender::SetFecParameters(
+    const FecProtectionParams* delta_params,
+    const FecProtectionParams* key_params) {
+  if (_audioConfigured) {
+    return -1;
+  }
+  return _video->SetFecParameters(delta_params, key_params);
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_sender.h b/src/modules/rtp_rtcp/source/rtp_sender.h
new file mode 100644
index 0000000..4c843a7
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender.h
@@ -0,0 +1,355 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
+
+#include <cassert>
+#include <cmath>
+#include <map>
+
+#include "rtp_rtcp_config.h"       // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "rtp_rtcp_defines.h"
+#include "common_types.h"          // Encryption
+#include "ssrc_database.h"
+#include "Bitrate.h"
+#include "rtp_header_extension.h"
+#include "video_codec_information.h"
+#include "transmission_bucket.h"
+
+#define MAX_INIT_RTP_SEQ_NUMBER 32767 // 2^15 -1
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class RTPPacketHistory;
+class RTPSenderAudio;
+class RTPSenderVideo;
+
+class RTPSenderInterface
+{
+public:
+    RTPSenderInterface() {}
+    virtual ~RTPSenderInterface() {}
+
+    virtual WebRtc_UWord32 SSRC() const = 0;
+    virtual WebRtc_UWord32 Timestamp() const = 0;
+
+    virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                                       const WebRtc_Word8 payloadType,
+                                       const bool markerBit,
+                                       const WebRtc_UWord32 captureTimeStamp,
+                                       const bool timeStampProvided = true,
+                                       const bool incSequenceNumber = true) = 0;
+
+    virtual WebRtc_UWord16 RTPHeaderLength() const = 0;
+    virtual WebRtc_UWord16 IncrementSequenceNumber() = 0;
+    virtual WebRtc_UWord16 SequenceNumber()   const = 0;
+    virtual WebRtc_UWord16 MaxPayloadLength() const = 0;
+    virtual WebRtc_UWord16 MaxDataPayloadLength() const = 0;
+    virtual WebRtc_UWord16 PacketOverHead() const = 0;
+    virtual WebRtc_UWord16 ActualSendBitrateKbit() const = 0;
+
+    virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        int64_t capture_time_ms,
+                                        const StorageType storage) = 0;
+};
+
+class RTPSender : public Bitrate, public RTPSenderInterface
+{
+public:
+    RTPSender(const WebRtc_Word32 id, const bool audio, RtpRtcpClock* clock);
+    virtual ~RTPSender();
+
+    void ProcessBitrate();
+    void ProcessSendToNetwork();
+
+    WebRtc_UWord16 ActualSendBitrateKbit() const;
+
+    WebRtc_UWord32 VideoBitrateSent() const;
+    WebRtc_UWord32 FecOverheadRate() const;
+    WebRtc_UWord32 NackOverheadRate() const;
+
+    void SetTargetSendBitrate(const WebRtc_UWord32 bits);
+
+    WebRtc_UWord16 MaxDataPayloadLength() const; // with RTP and FEC headers
+
+    // callback
+    WebRtc_Word32 RegisterSendTransport(Transport* outgoingTransport);
+
+    WebRtc_Word32 RegisterPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate);
+
+    WebRtc_Word32 DeRegisterSendPayload(const WebRtc_Word8 payloadType);
+
+    WebRtc_Word8 SendPayloadType() const;
+
+    int SendPayloadFrequency() const;
+
+    void SetSendingStatus(const bool enabled);
+
+    void SetSendingMediaStatus(const bool enabled);
+    bool SendingMedia() const;
+
+    // number of sent RTP packets
+    WebRtc_UWord32 Packets() const;
+
+    // number of sent RTP bytes
+    WebRtc_UWord32 Bytes() const;
+
+    WebRtc_Word32 ResetDataCounters();
+
+    WebRtc_UWord32 StartTimestamp() const;
+    WebRtc_Word32 SetStartTimestamp(const WebRtc_UWord32 timestamp,
+                                    const bool force = false);
+
+    WebRtc_UWord32 GenerateNewSSRC();
+    WebRtc_Word32 SetSSRC( const WebRtc_UWord32 ssrc);
+
+    WebRtc_UWord16 SequenceNumber() const;
+    WebRtc_Word32 SetSequenceNumber( WebRtc_UWord16 seq);
+
+    WebRtc_Word32 CSRCs(WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize]) const;
+
+    WebRtc_Word32 SetCSRCStatus(const bool include);
+
+    WebRtc_Word32 SetCSRCs(const WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize],
+                           const WebRtc_UWord8 arrLength);
+
+    WebRtc_Word32 SetMaxPayloadLength(const WebRtc_UWord16 length,
+                                      const WebRtc_UWord16 packetOverHead);
+
+    WebRtc_Word32 SendOutgoingData(const FrameType frameType,
+                                   const WebRtc_Word8 payloadType,
+                                   const WebRtc_UWord32 timeStamp,
+                                   int64_t capture_time_ms,
+                                   const WebRtc_UWord8* payloadData,
+                                   const WebRtc_UWord32 payloadSize,
+                                   const RTPFragmentationHeader* fragmentation,
+                                   VideoCodecInformation* codecInfo = NULL,
+                                   const RTPVideoTypeHeader* rtpTypeHdr = NULL);
+
+    WebRtc_Word32 SendPadData(WebRtc_Word8 payload_type,
+                              WebRtc_UWord32 capture_timestamp,
+                              int64_t capture_time_ms,
+                              WebRtc_Word32 bytes);
+    /*
+    * RTP header extension
+    */
+    WebRtc_Word32 SetTransmissionTimeOffset(
+        const WebRtc_Word32 transmissionTimeOffset);
+
+    WebRtc_Word32 RegisterRtpHeaderExtension(const RTPExtensionType type,
+                                             const WebRtc_UWord8 id);
+
+    WebRtc_Word32 DeregisterRtpHeaderExtension(const RTPExtensionType type);
+
+    WebRtc_UWord16 RtpHeaderExtensionTotalLength() const;
+
+    WebRtc_UWord16 BuildRTPHeaderExtension(WebRtc_UWord8* dataBuffer) const;
+
+    WebRtc_UWord8 BuildTransmissionTimeOffsetExtension(
+        WebRtc_UWord8* dataBuffer) const;
+
+    void UpdateTransmissionTimeOffset(WebRtc_UWord8* rtp_packet,
+                                      const WebRtc_UWord16 rtp_packet_length,
+                                      const WebRtcRTPHeader& rtp_header,
+                                      const WebRtc_Word64 time_diff_ms) const;
+
+    void SetTransmissionSmoothingStatus(const bool enable);
+
+    bool TransmissionSmoothingStatus() const;
+
+    /*
+    *    NACK
+    */
+    int SelectiveRetransmissions() const;
+    int SetSelectiveRetransmissions(uint8_t settings);
+    void OnReceivedNACK(const WebRtc_UWord16 nackSequenceNumbersLength,
+                        const WebRtc_UWord16* nackSequenceNumbers,
+                        const WebRtc_UWord16 avgRTT);
+
+    WebRtc_Word32 SetStorePacketsStatus(const bool enable,
+                                        const WebRtc_UWord16 numberToStore);
+
+    bool StorePackets() const;
+
+    WebRtc_Word32 ReSendPacket(WebRtc_UWord16 packet_id,
+                               WebRtc_UWord32 min_resend_time = 0);
+
+    WebRtc_Word32 ReSendToNetwork(const WebRtc_UWord8* packet,
+                                  const WebRtc_UWord32 size);
+
+    bool ProcessNACKBitRate(const WebRtc_UWord32 now);
+
+    /*
+    *  RTX
+    */
+    void SetRTXStatus(const bool enable,
+                      const bool setSSRC,
+                      const WebRtc_UWord32 SSRC);
+
+    void RTXStatus(bool* enable, WebRtc_UWord32* SSRC) const;
+
+    /*
+    * Functions wrapping RTPSenderInterface
+    */
+    virtual WebRtc_Word32 BuildRTPheader(WebRtc_UWord8* dataBuffer,
+                                       const WebRtc_Word8 payloadType,
+                                       const bool markerBit,
+                                       const WebRtc_UWord32 captureTimeStamp,
+                                       const bool timeStampProvided = true,
+                                       const bool incSequenceNumber = true);
+
+    virtual WebRtc_UWord16 RTPHeaderLength() const ;
+    virtual WebRtc_UWord16 IncrementSequenceNumber();
+    virtual WebRtc_UWord16 MaxPayloadLength() const;
+    virtual WebRtc_UWord16 PacketOverHead() const;
+
+    // current timestamp
+    virtual WebRtc_UWord32 Timestamp() const;
+    virtual WebRtc_UWord32 SSRC() const;
+
+    virtual WebRtc_Word32 SendToNetwork(WebRtc_UWord8* dataBuffer,
+                                        const WebRtc_UWord16 payloadLength,
+                                        const WebRtc_UWord16 rtpHeaderLength,
+                                        int64_t capture_time_ms,
+                                        const StorageType storage);
+
+    /*
+    *    Audio
+    */
+    WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
+
+    // Send a DTMF tone using RFC 2833 (4733)
+    WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
+                                     const WebRtc_UWord16 time_ms,
+                                     const WebRtc_UWord8 level);
+
+    bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
+
+    /*
+    *    Video
+    */
+    VideoCodecInformation* CodecInformationVideo();
+
+    RtpVideoCodecTypes VideoCodecType() const;
+
+    WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
+
+    WebRtc_Word32 SendRTPIntraRequest();
+
+    // FEC
+    WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC);
+
+    WebRtc_Word32 GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const;
+
+    WebRtc_Word32 SetFecParameters(
+        const FecProtectionParams* delta_params,
+        const FecProtectionParams* key_params);
+
+protected:
+    WebRtc_Word32 CheckPayloadType(const WebRtc_Word8 payloadType,
+                                   RtpVideoCodecTypes& videoType);
+
+private:
+    void UpdateNACKBitRate(const WebRtc_UWord32 bytes,
+                           const WebRtc_UWord32 now);
+
+    WebRtc_Word32 SendPaddingAccordingToBitrate(
+        WebRtc_Word8 payload_type,
+        WebRtc_UWord32 capture_timestamp,
+        int64_t capture_time_ms);
+
+    WebRtc_Word32              _id;
+    const bool                 _audioConfigured;
+    RTPSenderAudio*            _audio;
+    RTPSenderVideo*            _video;
+
+    CriticalSectionWrapper*    _sendCritsect;
+
+    CriticalSectionWrapper*    _transportCritsect;
+    Transport*                 _transport;
+
+    bool                      _sendingMedia;
+
+    WebRtc_UWord16            _maxPayloadLength;
+    WebRtc_UWord16            _targetSendBitrate;
+    WebRtc_UWord16            _packetOverHead;
+
+    WebRtc_Word8              _payloadType;
+    std::map<WebRtc_Word8, ModuleRTPUtility::Payload*> _payloadTypeMap;
+
+    RtpHeaderExtensionMap     _rtpHeaderExtensionMap;
+    WebRtc_Word32             _transmissionTimeOffset;
+
+    // NACK
+    WebRtc_UWord32            _nackByteCountTimes[NACK_BYTECOUNT_SIZE];
+    WebRtc_Word32             _nackByteCount[NACK_BYTECOUNT_SIZE];
+    Bitrate                   _nackBitrate;
+
+    RTPPacketHistory*         _packetHistory;
+    TransmissionBucket        _sendBucket;
+    WebRtc_Word64             _timeLastSendToNetworkUpdate;
+    bool                      _transmissionSmoothing;
+
+    // statistics
+    WebRtc_UWord32            _packetsSent;
+    WebRtc_UWord32            _payloadBytesSent;
+
+    // RTP variables
+    bool                      _startTimeStampForced;
+    WebRtc_UWord32            _startTimeStamp;
+    SSRCDatabase&             _ssrcDB;
+    WebRtc_UWord32            _remoteSSRC;
+    bool                      _sequenceNumberForced;
+    WebRtc_UWord16            _sequenceNumber;
+    WebRtc_UWord16            _sequenceNumberRTX;
+    bool                      _ssrcForced;
+    WebRtc_UWord32            _ssrc;
+    WebRtc_UWord32            _timeStamp;
+    WebRtc_UWord8             _CSRCs;
+    WebRtc_UWord32            _CSRC[kRtpCsrcSize];
+    bool                      _includeCSRCs;
+    bool                      _RTX;
+    WebRtc_UWord32            _ssrcRTX;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_sender_audio.cc b/src/modules/rtp_rtcp/source/rtp_sender_audio.cc
new file mode 100644
index 0000000..0f6f69f
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender_audio.cc
@@ -0,0 +1,604 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_audio.h"
+
+#include <string.h> //memcpy
+#include <cassert> //assert
+
+namespace webrtc {
+RTPSenderAudio::RTPSenderAudio(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                               RTPSenderInterface* rtpSender) :
+    _id(id),
+    _clock(*clock),
+    _rtpSender(rtpSender),
+    _audioFeedbackCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _audioFeedback(NULL),
+    _sendAudioCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _frequency(8000),
+    _packetSizeSamples(160),
+    _dtmfEventIsOn(false),
+    _dtmfEventFirstPacketSent(false),
+    _dtmfPayloadType(-1),
+    _dtmfTimestamp(0),
+    _dtmfKey(0),
+    _dtmfLengthSamples(0),
+    _dtmfLevel(0),
+    _dtmfTimeLastSent(0),
+    _dtmfTimestampLastSent(0),
+    _REDPayloadType(-1),
+    _inbandVADactive(false),
+    _cngNBPayloadType(-1),
+    _cngWBPayloadType(-1),
+    _cngSWBPayloadType(-1),
+    _lastPayloadType(-1),
+    _includeAudioLevelIndication(false),    // @TODO - reset at Init()?
+    _audioLevelIndicationID(0),
+    _audioLevel_dBov(0) {
+};
+
+RTPSenderAudio::~RTPSenderAudio()
+{
+    delete _sendAudioCritsect;
+    delete _audioFeedbackCritsect;
+}
+
+WebRtc_Word32
+RTPSenderAudio::RegisterAudioCallback(RtpAudioFeedback* messagesCallback)
+{
+    CriticalSectionScoped cs(_audioFeedbackCritsect);
+    _audioFeedback = messagesCallback;
+    return 0;
+}
+
+void
+RTPSenderAudio::SetAudioFrequency(const WebRtc_UWord32 f)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    _frequency = f;
+}
+
+int
+RTPSenderAudio::AudioFrequency() const
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    return _frequency;
+}
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+WebRtc_Word32
+RTPSenderAudio::SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    _packetSizeSamples = packetSizeSamples;
+    return 0;
+}
+
+WebRtc_Word32 RTPSenderAudio::RegisterAudioPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate,
+    ModuleRTPUtility::Payload*& payload) {
+  CriticalSectionScoped cs(_sendAudioCritsect);
+
+  if (ModuleRTPUtility::StringCompare(payloadName, "cn", 2))  {
+    //  we can have multiple CNG payload types
+    if (frequency == 8000) {
+      _cngNBPayloadType = payloadType;
+
+    } else if (frequency == 16000) {
+      _cngWBPayloadType = payloadType;
+
+    } else if (frequency == 32000) {
+      _cngSWBPayloadType = payloadType;
+    } else {
+      return -1;
+    }
+  }
+  if (ModuleRTPUtility::StringCompare(payloadName, "telephone-event", 15)) {
+    // Don't add it to the list
+    // we dont want to allow send with a DTMF payloadtype
+    _dtmfPayloadType = payloadType;
+    return 0;
+    // The default timestamp rate is 8000 Hz, but other rates may be defined.
+  }
+  payload = new ModuleRTPUtility::Payload;
+  payload->typeSpecific.Audio.frequency = frequency;
+  payload->typeSpecific.Audio.channels = channels;
+  payload->typeSpecific.Audio.rate = rate;
+  payload->audio = true;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  return 0;
+}
+
+bool
+RTPSenderAudio::MarkerBit(const FrameType frameType,
+                          const WebRtc_Word8 payloadType)
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    // for audio true for first packet in a speech burst
+    bool markerBit = false;
+    if(_lastPayloadType != payloadType)
+    {
+        if(_cngNBPayloadType != -1)
+        {
+            // we have configured NB CNG
+            if(_cngNBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        if(_cngWBPayloadType != -1)
+        {
+            // we have configured WB CNG
+            if(_cngWBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        if(_cngSWBPayloadType != -1)
+        {
+            // we have configured SWB CNG
+            if(_cngSWBPayloadType == payloadType)
+            {
+                // only set a marker bit when we change payload type to a non CNG
+                return false;
+            }
+        }
+        // payloadType differ
+        if(_lastPayloadType == -1)
+        {
+            if(frameType != kAudioFrameCN)
+            {
+                // first packet and NOT CNG
+                return true;
+
+            }else
+            {
+                // first packet and CNG
+                _inbandVADactive = true;
+                return false;
+            }
+        }
+        // not first packet AND
+        // not CNG AND
+        // payloadType changed
+
+        // set a marker bit when we change payload type
+        markerBit = true;
+    }
+
+    // For G.723 G.729, AMR etc we can have inband VAD
+    if(frameType == kAudioFrameCN)
+    {
+        _inbandVADactive = true;
+
+    } else if(_inbandVADactive)
+    {
+        _inbandVADactive = false;
+        markerBit = true;
+    }
+    return markerBit;
+}
+
+bool
+RTPSenderAudio::SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const
+{
+    if(_dtmfEventIsOn)
+    {
+        telephoneEvent = _dtmfKey;
+        return true;
+    }
+    WebRtc_Word64 delaySinceLastDTMF = _clock.GetTimeInMS() - _dtmfTimeLastSent;
+    if(delaySinceLastDTMF < 100)
+    {
+        telephoneEvent = _dtmfKey;
+        return true;
+    }
+    telephoneEvent = -1;
+    return false;
+}
+
+WebRtc_Word32 RTPSenderAudio::SendAudio(
+    const FrameType frameType,
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 captureTimeStamp,
+    const WebRtc_UWord8* payloadData,
+    const WebRtc_UWord32 dataSize,
+    const RTPFragmentationHeader* fragmentation) {
+  // TODO(pwestin) Breakup function in smaller functions.
+  WebRtc_UWord16 payloadSize = static_cast<WebRtc_UWord16>(dataSize);
+  WebRtc_UWord16 maxPayloadLength = _rtpSender->MaxPayloadLength();
+  bool dtmfToneStarted = false;
+  WebRtc_UWord16 dtmfLengthMS = 0;
+  WebRtc_UWord8 key = 0;
+
+  // Check if we have pending DTMFs to send
+  if (!_dtmfEventIsOn && PendingDTMF()) {
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    WebRtc_Word64 delaySinceLastDTMF = _clock.GetTimeInMS() - _dtmfTimeLastSent;
+
+    if (delaySinceLastDTMF > 100) {
+      // New tone to play
+      _dtmfTimestamp = captureTimeStamp;
+      if (NextDTMF(&key, &dtmfLengthMS, &_dtmfLevel) >= 0) {
+        _dtmfEventFirstPacketSent = false;
+        _dtmfKey = key;
+        _dtmfLengthSamples = (_frequency / 1000) * dtmfLengthMS;
+        dtmfToneStarted = true;
+        _dtmfEventIsOn = true;
+      }
+    }
+  }
+  if (dtmfToneStarted) {
+    CriticalSectionScoped cs(_audioFeedbackCritsect);
+    if (_audioFeedback) {
+      _audioFeedback->OnPlayTelephoneEvent(_id, key, dtmfLengthMS, _dtmfLevel);
+    }
+  }
+
+  // A source MAY send events and coded audio packets for the same time
+  // but we don't support it
+  {
+    _sendAudioCritsect->Enter();
+
+    if (_dtmfEventIsOn) {
+      if (frameType == kFrameEmpty) {
+        // kFrameEmpty is used to drive the DTMF when in CN mode
+        // it can be triggered more frequently than we want to send the
+        // DTMF packets.
+        if (_packetSizeSamples > (captureTimeStamp - _dtmfTimestampLastSent)) {
+          // not time to send yet
+          _sendAudioCritsect->Leave();
+          return 0;
+        }
+      }
+      _dtmfTimestampLastSent = captureTimeStamp;
+      WebRtc_UWord32 dtmfDurationSamples = captureTimeStamp - _dtmfTimestamp;
+      bool ended = false;
+      bool send = true;
+
+      if (_dtmfLengthSamples > dtmfDurationSamples) {
+        if (dtmfDurationSamples <= 0) {
+          // Skip send packet at start, since we shouldn't use duration 0
+          send = false;
+        }
+      } else {
+        ended = true;
+        _dtmfEventIsOn = false;
+        _dtmfTimeLastSent = _clock.GetTimeInMS();
+      }
+      // don't hold the critsect while calling SendTelephoneEventPacket
+      _sendAudioCritsect->Leave();
+      if (send) {
+        if (dtmfDurationSamples > 0xffff) {
+          // RFC 4733 2.5.2.3 Long-Duration Events
+          SendTelephoneEventPacket(ended, _dtmfTimestamp,
+                                   static_cast<WebRtc_UWord16>(0xffff), false);
+
+          // set new timestap for this segment
+          _dtmfTimestamp = captureTimeStamp;
+          dtmfDurationSamples -= 0xffff;
+          _dtmfLengthSamples -= 0xffff;
+
+          return SendTelephoneEventPacket(
+              ended,
+              _dtmfTimestamp,
+              static_cast<WebRtc_UWord16>(dtmfDurationSamples),
+              false);
+        } else {
+          // set markerBit on the first packet in the burst
+          _dtmfEventFirstPacketSent = true;
+          return SendTelephoneEventPacket(
+              ended,
+              _dtmfTimestamp,
+              static_cast<WebRtc_UWord16>(dtmfDurationSamples),
+              !_dtmfEventFirstPacketSent);
+        }
+      }
+      return 0;
+    }
+    _sendAudioCritsect->Leave();
+  }
+  if (payloadSize == 0 || payloadData == NULL) {
+    if (frameType == kFrameEmpty) {
+      // we don't send empty audio RTP packets
+      // no error since we use it to drive DTMF when we use VAD
+      return 0;
+    }
+    return -1;
+  }
+  WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
+  bool markerBit = MarkerBit(frameType, payloadType);
+
+  WebRtc_Word32 rtpHeaderLength = 0;
+  WebRtc_UWord16 timestampOffset = 0;
+
+  if (_REDPayloadType >= 0 && fragmentation && !markerBit &&
+      fragmentation->fragmentationVectorSize > 1) {
+    // have we configured RED? use its payload type
+    // we need to get the current timestamp to calc the diff
+    WebRtc_UWord32 oldTimeStamp = _rtpSender->Timestamp();
+    rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, _REDPayloadType,
+                                                 markerBit, captureTimeStamp);
+
+    timestampOffset = WebRtc_UWord16(_rtpSender->Timestamp() - oldTimeStamp);
+  } else {
+    rtpHeaderLength = _rtpSender->BuildRTPheader(dataBuffer, payloadType,
+                                                 markerBit, captureTimeStamp);
+  }
+  if (rtpHeaderLength <= 0) {
+    return -1;
+  }
+  {
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    // https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/
+    if (_includeAudioLevelIndication) {
+      dataBuffer[0] |= 0x10; // set eXtension bit
+      /*
+        0                   1                   2                   3
+        0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |      0xBE     |      0xDE     |            length=1           |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |  ID   | len=0 |V|   level     |      0x00     |      0x00     |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+       */
+      // add our ID (0xBEDE)
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
+                                              RTP_AUDIO_LEVEL_UNIQUE_ID);
+      rtpHeaderLength += 2;
+
+      // add the length (length=1) in number of word32
+      const WebRtc_UWord8 length = 1;
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength,
+                                              length);
+      rtpHeaderLength += 2;
+
+      // add ID (defined by the user) and len(=0) byte
+      const WebRtc_UWord8 id = _audioLevelIndicationID;
+      const WebRtc_UWord8 len = 0;
+      dataBuffer[rtpHeaderLength++] = (id << 4) + len;
+
+      // add voice-activity flag (V) bit and the audio level (in dBov)
+      const WebRtc_UWord8 V = (frameType == kAudioFrameSpeech);
+      WebRtc_UWord8 level = _audioLevel_dBov;
+      dataBuffer[rtpHeaderLength++] = (V << 7) + level;
+
+      // add two bytes zero padding
+      ModuleRTPUtility::AssignUWord16ToBuffer(dataBuffer+rtpHeaderLength, 0);
+      rtpHeaderLength += 2;
+    }
+
+    if(maxPayloadLength < rtpHeaderLength + payloadSize ) {
+      // too large payload buffer
+      return -1;
+    }
+
+    if (_REDPayloadType >= 0 &&  // Have we configured RED?
+        fragmentation &&
+        fragmentation->fragmentationVectorSize > 1 &&
+        !markerBit) {
+      if (timestampOffset <= 0x3fff) {
+        if(fragmentation->fragmentationVectorSize != 2) {
+          // we only support 2 codecs when using RED
+          return -1;
+        }
+        // only 0x80 if we have multiple blocks
+        dataBuffer[rtpHeaderLength++] = 0x80 +
+            fragmentation->fragmentationPlType[1];
+        WebRtc_UWord32 blockLength = fragmentation->fragmentationLength[1];
+
+        // sanity blockLength
+        if(blockLength > 0x3ff) {  // block length 10 bits 1023 bytes
+          return -1;
+        }
+        WebRtc_UWord32 REDheader = (timestampOffset << 10) + blockLength;
+        ModuleRTPUtility::AssignUWord24ToBuffer(dataBuffer + rtpHeaderLength,
+                                                REDheader);
+        rtpHeaderLength += 3;
+
+        dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+        // copy the RED data
+        memcpy(dataBuffer+rtpHeaderLength,
+               payloadData + fragmentation->fragmentationOffset[1],
+               fragmentation->fragmentationLength[1]);
+
+        // copy the normal data
+        memcpy(dataBuffer+rtpHeaderLength +
+               fragmentation->fragmentationLength[1],
+               payloadData + fragmentation->fragmentationOffset[0],
+               fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0] +
+            fragmentation->fragmentationLength[1]);
+      } else {
+        // silence for too long send only new data
+        dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+        memcpy(dataBuffer+rtpHeaderLength,
+               payloadData + fragmentation->fragmentationOffset[0],
+               fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0]);
+      }
+    } else {
+      if (fragmentation && fragmentation->fragmentationVectorSize > 0) {
+        // use the fragment info if we have one
+        dataBuffer[rtpHeaderLength++] = fragmentation->fragmentationPlType[0];
+        memcpy( dataBuffer+rtpHeaderLength,
+                payloadData + fragmentation->fragmentationOffset[0],
+                fragmentation->fragmentationLength[0]);
+
+        payloadSize = static_cast<WebRtc_UWord16>(
+            fragmentation->fragmentationLength[0]);
+      } else {
+        memcpy(dataBuffer+rtpHeaderLength, payloadData, payloadSize);
+      }
+    }
+    _lastPayloadType = payloadType;
+  }   // end critical section
+  return _rtpSender->SendToNetwork(dataBuffer,
+                                   payloadSize,
+                                   static_cast<WebRtc_UWord16>(rtpHeaderLength),
+                                   -1,
+                                   kAllowRetransmission);
+}
+
+WebRtc_Word32
+RTPSenderAudio::SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID)
+{
+    if(ID < 1 || ID > 14)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendAudioCritsect);
+
+    _includeAudioLevelIndication = enable;
+    _audioLevelIndicationID = ID;
+
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderAudio::AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const
+{
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    enable = _includeAudioLevelIndication;
+    ID = _audioLevelIndicationID;
+    return 0;
+}
+
+    // Audio level magnitude and voice activity flag are set for each RTP packet
+WebRtc_Word32
+RTPSenderAudio::SetAudioLevel(const WebRtc_UWord8 level_dBov)
+{
+    if (level_dBov > 127)
+    {
+        return -1;
+    }
+    CriticalSectionScoped cs(_sendAudioCritsect);
+    _audioLevel_dBov = level_dBov;
+    return 0;
+}
+
+    // Set payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSenderAudio::SetRED(const WebRtc_Word8 payloadType)
+{
+    if(payloadType < -1 )
+    {
+        return -1;
+    }
+    _REDPayloadType = payloadType;
+    return 0;
+}
+
+    // Get payload type for Redundant Audio Data RFC 2198
+WebRtc_Word32
+RTPSenderAudio::RED(WebRtc_Word8& payloadType) const
+{
+    if(_REDPayloadType == -1)
+    {
+        // not configured
+        return -1;
+    }
+    payloadType = _REDPayloadType;
+    return 0;
+}
+
+// Send a TelephoneEvent tone using RFC 2833 (4733)
+WebRtc_Word32
+RTPSenderAudio::SendTelephoneEvent(const WebRtc_UWord8 key,
+                                   const WebRtc_UWord16 time_ms,
+                                   const WebRtc_UWord8 level)
+{
+    // DTMF is protected by its own critsect
+    if(_dtmfPayloadType < 0)
+    {
+        // TelephoneEvent payloadtype not configured
+        return -1;
+    }
+    return AddDTMF(key, time_ms, level);
+}
+
+WebRtc_Word32
+RTPSenderAudio::SendTelephoneEventPacket(const bool ended,
+                                         const WebRtc_UWord32 dtmfTimeStamp,
+                                         const WebRtc_UWord16 duration,
+                                         const bool markerBit)
+{
+    WebRtc_UWord8 dtmfbuffer[IP_PACKET_SIZE];
+    WebRtc_UWord8 sendCount = 1;
+    WebRtc_Word32 retVal = 0;
+
+    if(ended)
+    {
+        // resend last packet in an event 3 times
+        sendCount = 3;
+    }
+    do
+    {
+        _sendAudioCritsect->Enter();
+
+        //Send DTMF data
+        _rtpSender->BuildRTPheader(dtmfbuffer, _dtmfPayloadType, markerBit, dtmfTimeStamp);
+
+        // reset CSRC and X bit
+        dtmfbuffer[0] &= 0xe0;
+
+        //Create DTMF data
+        /*    From RFC 2833:
+
+         0                   1                   2                   3
+         0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        |     event     |E|R| volume    |          duration             |
+        +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        */
+        // R bit always cleared
+        WebRtc_UWord8 R = 0x00;
+        WebRtc_UWord8 volume = _dtmfLevel;
+
+        // First packet un-ended
+          WebRtc_UWord8 E = 0x00;
+
+        if(ended)
+        {
+            E = 0x80;
+        }
+
+        // First byte is Event number, equals key number
+        dtmfbuffer[12] = _dtmfKey;
+        dtmfbuffer[13] = E|R|volume;
+        ModuleRTPUtility::AssignUWord16ToBuffer(dtmfbuffer+14, duration);
+
+        _sendAudioCritsect->Leave();
+        retVal = _rtpSender->SendToNetwork(dtmfbuffer, 4, 12, -1,
+                                           kAllowRetransmission);
+        sendCount--;
+
+    }while (sendCount > 0 && retVal == 0);
+
+    return retVal;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_sender_audio.h b/src/modules/rtp_rtcp/source/rtp_sender_audio.h
new file mode 100644
index 0000000..5974441
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender_audio.h
@@ -0,0 +1,129 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
+
+#include "rtp_rtcp_config.h"          // misc. defines (e.g. MAX_PACKET_LENGTH)
+#include "common_types.h"             // Transport
+#include "typedefs.h"
+
+#include "dtmf_queue.h"
+#include "rtp_utility.h"
+
+#include "rtp_sender.h"
+
+namespace webrtc {
+class RTPSenderAudio: public DTMFqueue
+{
+public:
+    RTPSenderAudio(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                   RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderAudio();
+
+    WebRtc_Word32 RegisterAudioPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 frequency,
+        const WebRtc_UWord8 channels,
+        const WebRtc_UWord32 rate,
+        ModuleRTPUtility::Payload*& payload);
+
+    WebRtc_Word32 SendAudio(const FrameType frameType,
+                            const WebRtc_Word8 payloadType,
+                            const WebRtc_UWord32 captureTimeStamp,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize,
+                            const RTPFragmentationHeader* fragmentation);
+
+    // set audio packet size, used to determine when it's time to send a DTMF packet in silence (CNG)
+    WebRtc_Word32 SetAudioPacketSize(const WebRtc_UWord16 packetSizeSamples);
+
+    // Set status and ID for header-extension-for-audio-level-indication.
+    // Valid ID range is [1,14].
+    WebRtc_Word32 SetAudioLevelIndicationStatus(const bool enable,
+                                              const WebRtc_UWord8 ID);
+
+    // Get status and ID for header-extension-for-audio-level-indication.
+    WebRtc_Word32 AudioLevelIndicationStatus(bool& enable,
+                                           WebRtc_UWord8& ID) const;
+
+    // Store the audio level in dBov for header-extension-for-audio-level-indication.
+    // Valid range is [0,100]. Actual value is negative.
+    WebRtc_Word32 SetAudioLevel(const WebRtc_UWord8 level_dBov);
+
+    // Send a DTMF tone using RFC 2833 (4733)
+      WebRtc_Word32 SendTelephoneEvent(const WebRtc_UWord8 key,
+                                   const WebRtc_UWord16 time_ms,
+                                   const WebRtc_UWord8 level);
+
+    bool SendTelephoneEventActive(WebRtc_Word8& telephoneEvent) const;
+
+    void SetAudioFrequency(const WebRtc_UWord32 f);
+
+    int AudioFrequency() const;
+
+    // Set payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 SetRED(const WebRtc_Word8 payloadType);
+
+    // Get payload type for Redundant Audio Data RFC 2198
+    WebRtc_Word32 RED(WebRtc_Word8& payloadType) const;
+
+    WebRtc_Word32 RegisterAudioCallback(RtpAudioFeedback* messagesCallback);
+
+protected:
+    WebRtc_Word32 SendTelephoneEventPacket(const bool ended,
+                                         const WebRtc_UWord32 dtmfTimeStamp,
+                                         const WebRtc_UWord16 duration,
+                                         const bool markerBit); // set on first packet in talk burst
+
+    bool MarkerBit(const FrameType frameType,
+                   const WebRtc_Word8 payloadType);
+
+private:
+    WebRtc_Word32             _id;
+    RtpRtcpClock&             _clock;
+    RTPSenderInterface*     _rtpSender;
+    CriticalSectionWrapper* _audioFeedbackCritsect;
+    RtpAudioFeedback*   _audioFeedback;
+
+    CriticalSectionWrapper*   _sendAudioCritsect;
+
+    WebRtc_UWord32            _frequency;
+    WebRtc_UWord16            _packetSizeSamples;
+
+    // DTMF
+    bool              _dtmfEventIsOn;
+    bool              _dtmfEventFirstPacketSent;
+    WebRtc_Word8      _dtmfPayloadType;
+    WebRtc_UWord32    _dtmfTimestamp;
+    WebRtc_UWord8     _dtmfKey;
+    WebRtc_UWord32    _dtmfLengthSamples;
+    WebRtc_UWord8     _dtmfLevel;
+    WebRtc_Word64     _dtmfTimeLastSent;
+    WebRtc_UWord32    _dtmfTimestampLastSent;
+
+    WebRtc_Word8      _REDPayloadType;
+
+    // VAD detection, used for markerbit
+    bool              _inbandVADactive;
+    WebRtc_Word8      _cngNBPayloadType;
+    WebRtc_Word8      _cngWBPayloadType;
+    WebRtc_Word8      _cngSWBPayloadType;
+    WebRtc_Word8      _lastPayloadType;
+
+    // Audio level indication (https://datatracker.ietf.org/doc/draft-lennox-avt-rtp-audio-level-exthdr/)
+    bool            _includeAudioLevelIndication;
+    WebRtc_UWord8     _audioLevelIndicationID;
+    WebRtc_UWord8     _audioLevel_dBov;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_AUDIO_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_sender_unittest.cc b/src/modules/rtp_rtcp/source/rtp_sender_unittest.cc
new file mode 100644
index 0000000..7bbb190
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender_unittest.cc
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the RTPSender.
+ */
+
+#include <gtest/gtest.h>
+
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_defines.h"
+#include "rtp_sender.h"
+#include "rtp_utility.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+namespace {
+const int kId = 1;
+const int kTypeLength = TRANSMISSION_TIME_OFFSET_LENGTH_IN_BYTES;
+const int kPayload = 100;
+const uint32_t kTimestamp = 10;
+const uint16_t kSeqNum = 33;
+const int kTimeOffset = 22222;
+const int kMaxPacketLength = 1500;
+}  // namespace
+
+class FakeClockTest : public RtpRtcpClock {
+ public:
+  FakeClockTest() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_Word64 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_Word64 time_in_ms_;
+};
+
+class LoopbackTransportTest : public webrtc::Transport {
+ public:
+  LoopbackTransportTest()
+    : packets_sent_(0),
+      last_sent_packet_len_(0) {
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    packets_sent_++;
+    memcpy(last_sent_packet_, data, len);
+    last_sent_packet_len_ = len;
+    return len;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    return -1;
+  }
+  int packets_sent_;
+  int last_sent_packet_len_;
+  uint8_t last_sent_packet_[kMaxPacketLength];
+};
+
+class RtpSenderTest : public ::testing::Test {
+ protected:
+  RtpSenderTest()
+    : fake_clock_(),
+      rtp_sender_(new RTPSender(0, false, &fake_clock_)),
+      transport_(),
+      kMarkerBit(true),
+      kType(kRtpExtensionTransmissionTimeOffset),
+      packet_() {
+    EXPECT_EQ(0, rtp_sender_->SetSequenceNumber(kSeqNum));
+  }
+  ~RtpSenderTest() {
+    delete rtp_sender_;
+  }
+
+  FakeClockTest fake_clock_;
+  RTPSender* rtp_sender_;
+  LoopbackTransportTest transport_;
+  const bool kMarkerBit;
+  RTPExtensionType kType;
+  uint8_t packet_[kMaxPacketLength];
+
+  void VerifyRTPHeaderCommon(const WebRtcRTPHeader& rtp_header) {
+    EXPECT_EQ(kMarkerBit, rtp_header.header.markerBit);
+    EXPECT_EQ(kPayload, rtp_header.header.payloadType);
+    EXPECT_EQ(kSeqNum, rtp_header.header.sequenceNumber);
+    EXPECT_EQ(kTimestamp, rtp_header.header.timestamp);
+    EXPECT_EQ(rtp_sender_->SSRC(), rtp_header.header.ssrc);
+    EXPECT_EQ(0, rtp_header.header.numCSRCs);
+    EXPECT_EQ(0, rtp_header.header.paddingLength);
+  }
+};
+
+TEST_F(RtpSenderTest, RegisterRtpHeaderExtension) {
+  EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+  EXPECT_EQ(RTP_ONE_BYTE_HEADER_LENGTH_IN_BYTES + kTypeLength,
+            rtp_sender_->RtpHeaderExtensionTotalLength());
+  EXPECT_EQ(0, rtp_sender_->DeregisterRtpHeaderExtension(kType));
+  EXPECT_EQ(0, rtp_sender_->RtpHeaderExtensionTotalLength());
+}
+
+TEST_F(RtpSenderTest, BuildRTPPacket) {
+  WebRtc_Word32 length = rtp_sender_->BuildRTPheader(packet_,
+                                                     kPayload,
+                                                     kMarkerBit,
+                                                     kTimestamp);
+  EXPECT_EQ(12, length);
+
+  // Verify
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+
+  ASSERT_TRUE(valid_rtp_header);
+  ASSERT_FALSE(rtp_parser.RTCP());
+  VerifyRTPHeaderCommon(rtp_header);
+  EXPECT_EQ(length, rtp_header.header.headerLength);
+  EXPECT_EQ(0, rtp_header.extension.transmissionTimeOffset);
+}
+
+TEST_F(RtpSenderTest, BuildRTPPacketWithTransmissionOffsetExtension) {
+  EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kTimeOffset));
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+
+  WebRtc_Word32 length = rtp_sender_->BuildRTPheader(packet_,
+                                                     kPayload,
+                                                     kMarkerBit,
+                                                     kTimestamp);
+  EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+
+  // Verify
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+
+  ASSERT_TRUE(valid_rtp_header);
+  ASSERT_FALSE(rtp_parser.RTCP());
+  VerifyRTPHeaderCommon(rtp_header);
+  EXPECT_EQ(length, rtp_header.header.headerLength);
+  EXPECT_EQ(kTimeOffset, rtp_header.extension.transmissionTimeOffset);
+
+  // Parse without map extension
+  webrtc::WebRtcRTPHeader rtp_header2;
+  const bool valid_rtp_header2 = rtp_parser.Parse(rtp_header2, NULL);
+
+  ASSERT_TRUE(valid_rtp_header2);
+  VerifyRTPHeaderCommon(rtp_header2);
+  EXPECT_EQ(length, rtp_header2.header.headerLength);
+  EXPECT_EQ(0, rtp_header2.extension.transmissionTimeOffset);
+}
+
+TEST_F(RtpSenderTest, BuildRTPPacketWithNegativeTransmissionOffsetExtension) {
+  const int kNegTimeOffset = -500;
+  EXPECT_EQ(0, rtp_sender_->SetTransmissionTimeOffset(kNegTimeOffset));
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+
+  WebRtc_Word32 length = rtp_sender_->BuildRTPheader(packet_,
+                                                     kPayload,
+                                                     kMarkerBit,
+                                                     kTimestamp);
+  EXPECT_EQ(12 + rtp_sender_->RtpHeaderExtensionTotalLength(), length);
+
+  // Verify
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(packet_, length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+
+  ASSERT_TRUE(valid_rtp_header);
+  ASSERT_FALSE(rtp_parser.RTCP());
+  VerifyRTPHeaderCommon(rtp_header);
+  EXPECT_EQ(length, rtp_header.header.headerLength);
+  EXPECT_EQ(kNegTimeOffset, rtp_header.extension.transmissionTimeOffset);
+}
+
+TEST_F(RtpSenderTest, NoTrafficSmoothing) {
+  EXPECT_EQ(0, rtp_sender_->RegisterSendTransport(&transport_));
+
+  WebRtc_Word32 rtp_length = rtp_sender_->BuildRTPheader(packet_,
+                                                         kPayload,
+                                                         kMarkerBit,
+                                                         kTimestamp);
+
+  // Packet should be sent immediately.
+  EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_,
+                                          0,
+                                          rtp_length,
+                                          kTimestamp / 90,
+                                          kAllowRetransmission));
+  EXPECT_EQ(1, transport_.packets_sent_);
+  EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
+}
+
+TEST_F(RtpSenderTest, TrafficSmoothing) {
+  rtp_sender_->SetTransmissionSmoothingStatus(true);
+  EXPECT_EQ(0, rtp_sender_->SetStorePacketsStatus(true, 10));
+  EXPECT_EQ(0, rtp_sender_->RegisterRtpHeaderExtension(kType, kId));
+  EXPECT_EQ(0, rtp_sender_->RegisterSendTransport(&transport_));
+
+  WebRtc_Word32 rtp_length = rtp_sender_->BuildRTPheader(packet_,
+                                                         kPayload,
+                                                         kMarkerBit,
+                                                         kTimestamp);
+
+  // Packet should be stored in a send bucket.
+  EXPECT_EQ(0, rtp_sender_->SendToNetwork(packet_,
+                                          0,
+                                          rtp_length,
+                                          fake_clock_.GetTimeInMS(),
+                                          kAllowRetransmission));
+  EXPECT_EQ(0, transport_.packets_sent_);
+
+  const int kStoredTimeInMs = 100;
+  fake_clock_.IncrementTime(kStoredTimeInMs);
+
+  // Process send bucket. Packet should now be sent.
+  rtp_sender_->ProcessSendToNetwork();
+  EXPECT_EQ(1, transport_.packets_sent_);
+  EXPECT_EQ(rtp_length, transport_.last_sent_packet_len_);
+
+  // Parse sent packet.
+  webrtc::ModuleRTPUtility::RTPHeaderParser rtp_parser(
+      transport_.last_sent_packet_, rtp_length);
+  webrtc::WebRtcRTPHeader rtp_header;
+
+  RtpHeaderExtensionMap map;
+  map.Register(kType, kId);
+  const bool valid_rtp_header = rtp_parser.Parse(rtp_header, &map);
+  ASSERT_TRUE(valid_rtp_header);
+
+  // Verify transmission time offset.
+  EXPECT_EQ(kStoredTimeInMs * 90, rtp_header.extension.transmissionTimeOffset);
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_sender_video.cc b/src/modules/rtp_rtcp/source/rtp_sender_video.cc
new file mode 100644
index 0000000..e56aef9
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender_video.cc
@@ -0,0 +1,492 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_sender_video.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include "rtp_utility.h"
+
+#include <string.h> // memcpy
+#include <cassert>  // assert
+#include <cstdlib>  // srand
+
+#include "producer_fec.h"
+#include "rtp_format_vp8.h"
+
+namespace webrtc {
+enum { REDForFECHeaderLength = 1 };
+
+struct RtpPacket {
+  WebRtc_UWord16 rtpHeaderLength;
+  ForwardErrorCorrection::Packet* pkt;
+};
+
+RTPSenderVideo::RTPSenderVideo(const WebRtc_Word32 id,
+                               RtpRtcpClock* clock,
+                               RTPSenderInterface* rtpSender) :
+    _id(id),
+    _rtpSender(*rtpSender),
+    _sendVideoCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+
+    _videoType(kRtpNoVideo),
+    _videoCodecInformation(NULL),
+    _maxBitrate(0),
+    _retransmissionSettings(kRetransmitBaseLayer),
+
+    // Generic FEC
+    _fec(id),
+    _fecEnabled(false),
+    _payloadTypeRED(-1),
+    _payloadTypeFEC(-1),
+    _numberFirstPartition(0),
+    delta_fec_params_(),
+    key_fec_params_(),
+    producer_fec_(&_fec),
+    _fecOverheadRate(clock),
+    _videoBitrate(clock) {
+  memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+  memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+  delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
+  delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
+        kFecMaskRandom;
+}
+
+RTPSenderVideo::~RTPSenderVideo()
+{
+    if(_videoCodecInformation)
+    {
+        delete _videoCodecInformation;
+    }
+    delete _sendVideoCritsect;
+}
+
+void
+RTPSenderVideo::SetVideoCodecType(RtpVideoCodecTypes videoType)
+{
+    CriticalSectionScoped cs(_sendVideoCritsect);
+    _videoType = videoType;
+}
+
+RtpVideoCodecTypes
+RTPSenderVideo::VideoCodecType() const
+{
+    return _videoType;
+}
+
+WebRtc_Word32 RTPSenderVideo::RegisterVideoPayload(
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const WebRtc_Word8 payloadType,
+    const WebRtc_UWord32 maxBitRate,
+    ModuleRTPUtility::Payload*& payload) {
+  CriticalSectionScoped cs(_sendVideoCritsect);
+
+  RtpVideoCodecTypes videoType = kRtpNoVideo;
+  if (ModuleRTPUtility::StringCompare(payloadName, "VP8",3)) {
+    videoType = kRtpVp8Video;
+  } else if (ModuleRTPUtility::StringCompare(payloadName, "I420", 4)) {
+    videoType = kRtpNoVideo;
+  } else {
+    return -1;
+  }
+  payload = new ModuleRTPUtility::Payload;
+  payload->name[RTP_PAYLOAD_NAME_SIZE - 1] = 0;
+  strncpy(payload->name, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+  payload->typeSpecific.Video.videoCodecType = videoType;
+  payload->typeSpecific.Video.maxRate = maxBitRate;
+  payload->audio = false;
+  return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendVideoPacket(WebRtc_UWord8* data_buffer,
+                                const WebRtc_UWord16 payload_length,
+                                const WebRtc_UWord16 rtp_header_length,
+                                int64_t capture_time_ms,
+                                StorageType storage,
+                                bool protect) {
+  if(_fecEnabled) {
+    int ret = 0;
+    int fec_overhead_sent = 0;
+    int video_sent = 0;
+
+    RedPacket* red_packet = producer_fec_.BuildRedPacket(data_buffer,
+                                                         payload_length,
+                                                         rtp_header_length,
+                                                         _payloadTypeRED);
+    // Sending the media packet with RED header.
+    int packet_success = _rtpSender.SendToNetwork(
+        red_packet->data(),
+        red_packet->length() - rtp_header_length,
+        rtp_header_length,
+        capture_time_ms,
+        storage);
+
+    ret |= packet_success;
+
+    if (packet_success == 0) {
+      video_sent += red_packet->length();
+    }
+    delete red_packet;
+    red_packet = NULL;
+
+    if (protect) {
+      ret = producer_fec_.AddRtpPacketAndGenerateFec(data_buffer,
+                                                     payload_length,
+                                                     rtp_header_length);
+      if (ret != 0)
+        return ret;
+    }
+
+    while (producer_fec_.FecAvailable()) {
+      red_packet = producer_fec_.GetFecPacket(
+          _payloadTypeRED,
+          _payloadTypeFEC,
+          _rtpSender.IncrementSequenceNumber(),
+          rtp_header_length);
+      StorageType storage = kDontRetransmit;
+      if (_retransmissionSettings & kRetransmitFECPackets) {
+        storage = kAllowRetransmission;
+      }
+      // Sending FEC packet with RED header.
+      int packet_success = _rtpSender.SendToNetwork(
+          red_packet->data(),
+          red_packet->length() - rtp_header_length,
+          rtp_header_length,
+          capture_time_ms,
+          storage);
+
+      ret |= packet_success;
+
+      if (packet_success == 0) {
+        fec_overhead_sent += red_packet->length();
+      }
+      delete red_packet;
+      red_packet = NULL;
+    }
+    _videoBitrate.Update(video_sent);
+    _fecOverheadRate.Update(fec_overhead_sent);
+    return ret;
+  }
+  int ret = _rtpSender.SendToNetwork(data_buffer,
+                                     payload_length,
+                                     rtp_header_length,
+                                     capture_time_ms,
+                                     storage);
+  if (ret == 0) {
+    _videoBitrate.Update(payload_length + rtp_header_length);
+  }
+  return ret;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendRTPIntraRequest()
+{
+    // RFC 2032
+    // 5.2.1.  Full intra-frame Request (FIR) packet
+
+    WebRtc_UWord16 length = 8;
+    WebRtc_UWord8 data[8];
+    data[0] = 0x80;
+    data[1] = 192;
+    data[2] = 0;
+    data[3] = 1; // length
+
+    ModuleRTPUtility::AssignUWord32ToBuffer(data+4, _rtpSender.SSRC());
+
+    return _rtpSender.SendToNetwork(data, 0, length, -1, kAllowRetransmission);
+}
+
+WebRtc_Word32
+RTPSenderVideo::SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC)
+{
+    _fecEnabled = enable;
+    _payloadTypeRED = payloadTypeRED;
+    _payloadTypeFEC = payloadTypeFEC;
+    memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+    memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+    delta_fec_params_.max_fec_frames = key_fec_params_.max_fec_frames = 1;
+    delta_fec_params_.fec_mask_type = key_fec_params_.fec_mask_type =
+          kFecMaskRandom;
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const
+{
+    enable = _fecEnabled;
+    payloadTypeRED = _payloadTypeRED;
+    payloadTypeFEC = _payloadTypeFEC;
+    return 0;
+}
+
+WebRtc_UWord16
+RTPSenderVideo::FECPacketOverhead() const
+{
+    if (_fecEnabled)
+    {
+        return ForwardErrorCorrection::PacketOverhead() +
+            REDForFECHeaderLength;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTPSenderVideo::SetFecParameters(
+    const FecProtectionParams* delta_params,
+    const FecProtectionParams* key_params) {
+  assert(delta_params);
+  assert(key_params);
+  delta_fec_params_ = *delta_params;
+  key_fec_params_ = *key_params;
+  return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendVideo(const RtpVideoCodecTypes videoType,
+                          const FrameType frameType,
+                          const WebRtc_Word8 payloadType,
+                          const uint32_t captureTimeStamp,
+                          int64_t capture_time_ms,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          const RTPFragmentationHeader* fragmentation,
+                          VideoCodecInformation* codecInfo,
+                          const RTPVideoTypeHeader* rtpTypeHdr)
+{
+    if( payloadSize == 0)
+    {
+        return -1;
+    }
+
+    if (frameType == kVideoFrameKey) {
+      producer_fec_.SetFecParameters(&key_fec_params_,
+                                     _numberFirstPartition);
+    } else {
+      producer_fec_.SetFecParameters(&delta_fec_params_,
+                                     _numberFirstPartition);
+    }
+
+    // Default setting for number of first partition packets:
+    // Will be extracted in SendVP8 for VP8 codec; other codecs use 0
+    _numberFirstPartition = 0;
+
+    WebRtc_Word32 retVal = -1;
+    switch(videoType)
+    {
+    case kRtpNoVideo:
+        retVal = SendGeneric(payloadType, captureTimeStamp, capture_time_ms,
+                             payloadData, payloadSize);
+        break;
+    case kRtpVp8Video:
+        retVal = SendVP8(frameType,
+                         payloadType,
+                         captureTimeStamp,
+                         capture_time_ms,
+                         payloadData,
+                         payloadSize,
+                         fragmentation,
+                         rtpTypeHdr);
+        break;
+    default:
+        assert(false);
+        break;
+    }
+    if(retVal <= 0)
+    {
+        return retVal;
+    }
+    WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, _id, "%s(timestamp:%u)",
+                 __FUNCTION__, captureTimeStamp);
+    return 0;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendGeneric(const WebRtc_Word8 payloadType,
+                            const uint32_t captureTimeStamp,
+                            int64_t capture_time_ms,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize)
+{
+    WebRtc_UWord16 payloadBytesInPacket = 0;
+    WebRtc_UWord32 bytesSent = 0;
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+
+    const WebRtc_UWord8* data = payloadData;
+    WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+    WebRtc_UWord16 maxLength = _rtpSender.MaxPayloadLength() -
+        FECPacketOverhead() - rtpHeaderLength;
+    WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE];
+
+    // Fragment packet into packets of max MaxPayloadLength bytes payload.
+    while (payloadBytesToSend > 0)
+    {
+        if (payloadBytesToSend > maxLength)
+        {
+            payloadBytesInPacket = maxLength;
+            payloadBytesToSend -= payloadBytesInPacket;
+            // MarkerBit is 0
+            if(_rtpSender.BuildRTPheader(dataBuffer,
+                                         payloadType,
+                                         false,
+                                         captureTimeStamp) != rtpHeaderLength)
+            {
+                return -1;
+           }
+        }
+        else
+        {
+            payloadBytesInPacket = (WebRtc_UWord16)payloadBytesToSend;
+            payloadBytesToSend = 0;
+            // MarkerBit is 1
+            if(_rtpSender.BuildRTPheader(dataBuffer, payloadType, true,
+                                         captureTimeStamp) != rtpHeaderLength)
+            {
+                return -1;
+            }
+        }
+
+        // Put payload in packet
+        memcpy(&dataBuffer[rtpHeaderLength], &data[bytesSent],
+               payloadBytesInPacket);
+        bytesSent += payloadBytesInPacket;
+
+        if(-1 == SendVideoPacket(dataBuffer,
+                                 payloadBytesInPacket,
+                                 rtpHeaderLength,
+                                 capture_time_ms,
+                                 kAllowRetransmission,
+                                 true))
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+VideoCodecInformation*
+RTPSenderVideo::CodecInformationVideo()
+{
+    return _videoCodecInformation;
+}
+
+void
+RTPSenderVideo::SetMaxConfiguredBitrateVideo(const WebRtc_UWord32 maxBitrate)
+{
+    _maxBitrate = maxBitrate;
+}
+
+WebRtc_UWord32
+RTPSenderVideo::MaxConfiguredBitrateVideo() const
+{
+    return _maxBitrate;
+}
+
+WebRtc_Word32
+RTPSenderVideo::SendVP8(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const uint32_t captureTimeStamp,
+                        int64_t capture_time_ms,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        const RTPFragmentationHeader* fragmentation,
+                        const RTPVideoTypeHeader* rtpTypeHdr)
+{
+    const WebRtc_UWord16 rtpHeaderLength = _rtpSender.RTPHeaderLength();
+
+    WebRtc_Word32 payloadBytesToSend = payloadSize;
+    const WebRtc_UWord8* data = payloadData;
+
+    WebRtc_UWord16 maxPayloadLengthVP8 = _rtpSender.MaxDataPayloadLength();
+
+    assert(rtpTypeHdr);
+    // Initialize disregarding partition boundaries: this will use kEqualSize
+    // packetization mode, which produces ~equal size packets for each frame.
+    RtpFormatVp8 packetizer(data, payloadBytesToSend, rtpTypeHdr->VP8,
+                            maxPayloadLengthVP8);
+
+    StorageType storage = kAllowRetransmission;
+    if (rtpTypeHdr->VP8.temporalIdx == 0 &&
+        !(_retransmissionSettings & kRetransmitBaseLayer)) {
+      storage = kDontRetransmit;
+    }
+    if (rtpTypeHdr->VP8.temporalIdx > 0 &&
+        !(_retransmissionSettings & kRetransmitHigherLayers)) {
+      storage = kDontRetransmit;
+    }
+
+    bool last = false;
+    _numberFirstPartition = 0;
+    // |rtpTypeHdr->VP8.temporalIdx| is zero for base layers, or -1 if the field
+    // isn't used. We currently only protect base layers.
+    bool protect = (rtpTypeHdr->VP8.temporalIdx < 1);
+    while (!last)
+    {
+        // Write VP8 Payload Descriptor and VP8 payload.
+        WebRtc_UWord8 dataBuffer[IP_PACKET_SIZE] = {0};
+        int payloadBytesInPacket = 0;
+        int packetStartPartition =
+            packetizer.NextPacket(&dataBuffer[rtpHeaderLength],
+                                  &payloadBytesInPacket, &last);
+        // TODO(holmer): Temporarily disable first partition packet counting
+        // to avoid a bug in ProducerFec which doesn't properly handle
+        // important packets.
+        // if (packetStartPartition == 0)
+        // {
+        //     ++_numberFirstPartition;
+        // }
+        // else
+        if (packetStartPartition < 0)
+        {
+            return -1;
+        }
+
+        // Write RTP header.
+        // Set marker bit true if this is the last packet in frame.
+        _rtpSender.BuildRTPheader(dataBuffer, payloadType, last,
+            captureTimeStamp);
+        if (-1 == SendVideoPacket(dataBuffer, payloadBytesInPacket,
+            rtpHeaderLength, capture_time_ms, storage, protect))
+        {
+          WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                       "RTPSenderVideo::SendVP8 failed to send packet number"
+                       " %d", _rtpSender.SequenceNumber());
+        }
+    }
+    return 0;
+}
+
+void RTPSenderVideo::ProcessBitrate() {
+  _videoBitrate.Process();
+  _fecOverheadRate.Process();
+}
+
+WebRtc_UWord32 RTPSenderVideo::VideoBitrateSent() const {
+  return _videoBitrate.BitrateLast();
+}
+
+WebRtc_UWord32 RTPSenderVideo::FecOverheadRate() const {
+  return _fecOverheadRate.BitrateLast();
+}
+
+int RTPSenderVideo::SelectiveRetransmissions() const {
+  return _retransmissionSettings;
+}
+
+int RTPSenderVideo::SetSelectiveRetransmissions(uint8_t settings) {
+  _retransmissionSettings = settings;
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_sender_video.h b/src/modules/rtp_rtcp/source/rtp_sender_video.h
new file mode 100644
index 0000000..64ac058
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_sender_video.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
+
+#include <list>
+
+#include "typedefs.h"
+#include "common_types.h"               // Transport
+#include "rtp_rtcp_config.h"
+
+#include "rtp_rtcp_defines.h"
+#include "rtp_utility.h"
+
+#include "video_codec_information.h"
+#include "forward_error_correction.h"
+#include "Bitrate.h"
+#include "rtp_sender.h"
+#include "producer_fec.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+struct RtpPacket;
+
+class RTPSenderVideo
+{
+public:
+    RTPSenderVideo(const WebRtc_Word32 id, RtpRtcpClock* clock,
+                   RTPSenderInterface* rtpSender);
+    virtual ~RTPSenderVideo();
+
+    virtual RtpVideoCodecTypes VideoCodecType() const;
+
+    WebRtc_UWord16 FECPacketOverhead() const;
+
+    WebRtc_Word32 RegisterVideoPayload(
+        const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+        const WebRtc_Word8 payloadType,
+        const WebRtc_UWord32 maxBitRate,
+        ModuleRTPUtility::Payload*& payload);
+
+    WebRtc_Word32 SendVideo(const RtpVideoCodecTypes videoType,
+                          const FrameType frameType,
+                          const WebRtc_Word8 payloadType,
+                          const uint32_t captureTimeStamp,
+                          int64_t capture_time_ms,
+                          const WebRtc_UWord8* payloadData,
+                          const WebRtc_UWord32 payloadSize,
+                          const RTPFragmentationHeader* fragmentation,
+                          VideoCodecInformation* codecInfo,
+                          const RTPVideoTypeHeader* rtpTypeHdr);
+
+    WebRtc_Word32 SendRTPIntraRequest();
+
+    void SetVideoCodecType(RtpVideoCodecTypes type);
+
+    VideoCodecInformation* CodecInformationVideo();
+
+    void SetMaxConfiguredBitrateVideo(const WebRtc_UWord32 maxBitrate);
+
+    WebRtc_UWord32 MaxConfiguredBitrateVideo() const;
+
+    // FEC
+    WebRtc_Word32 SetGenericFECStatus(const bool enable,
+                                    const WebRtc_UWord8 payloadTypeRED,
+                                    const WebRtc_UWord8 payloadTypeFEC);
+
+    WebRtc_Word32 GenericFECStatus(bool& enable,
+                                 WebRtc_UWord8& payloadTypeRED,
+                                 WebRtc_UWord8& payloadTypeFEC) const;
+
+    WebRtc_Word32 SetFecParameters(const FecProtectionParams* delta_params,
+                                   const FecProtectionParams* key_params);
+
+    void ProcessBitrate();
+
+    WebRtc_UWord32 VideoBitrateSent() const;
+    WebRtc_UWord32 FecOverheadRate() const;
+
+    int SelectiveRetransmissions() const;
+    int SetSelectiveRetransmissions(uint8_t settings);
+
+protected:
+    virtual WebRtc_Word32 SendVideoPacket(WebRtc_UWord8* dataBuffer,
+                                          const WebRtc_UWord16 payloadLength,
+                                          const WebRtc_UWord16 rtpHeaderLength,
+                                          int64_t capture_time_ms,
+                                          StorageType storage,
+                                          bool protect);
+
+private:
+    WebRtc_Word32 SendGeneric(const WebRtc_Word8 payloadType,
+                            const uint32_t captureTimeStamp,
+                            int64_t capture_time_ms,
+                            const WebRtc_UWord8* payloadData,
+                            const WebRtc_UWord32 payloadSize);
+
+    WebRtc_Word32 SendVP8(const FrameType frameType,
+                        const WebRtc_Word8 payloadType,
+                        const uint32_t captureTimeStamp,
+                        int64_t capture_time_ms,
+                        const WebRtc_UWord8* payloadData,
+                        const WebRtc_UWord32 payloadSize,
+                        const RTPFragmentationHeader* fragmentation,
+                        const RTPVideoTypeHeader* rtpTypeHdr);
+
+private:
+    WebRtc_Word32             _id;
+    RTPSenderInterface&        _rtpSender;
+
+    CriticalSectionWrapper*   _sendVideoCritsect;
+    RtpVideoCodecTypes  _videoType;
+    VideoCodecInformation*  _videoCodecInformation;
+    WebRtc_UWord32            _maxBitrate;
+    WebRtc_Word32             _retransmissionSettings;
+
+    // FEC
+    ForwardErrorCorrection  _fec;
+    bool                    _fecEnabled;
+    WebRtc_Word8              _payloadTypeRED;
+    WebRtc_Word8              _payloadTypeFEC;
+    unsigned int              _numberFirstPartition;
+    FecProtectionParams delta_fec_params_;
+    FecProtectionParams key_fec_params_;
+    ProducerFec producer_fec_;
+
+    // Bitrate used for FEC payload, RED headers, RTP headers for FEC packets
+    // and any padding overhead.
+    Bitrate                   _fecOverheadRate;
+    // Bitrate used for video payload and RTP headers
+    Bitrate                   _videoBitrate;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_SENDER_VIDEO_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_utility.cc b/src/modules/rtp_rtcp/source/rtp_utility.cc
new file mode 100644
index 0000000..298d479
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_utility.cc
@@ -0,0 +1,912 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_utility.h"
+
+#include <cassert>
+#include <cmath>  // ceil
+#include <cstring>  // memcpy
+
+#if defined(_WIN32)
+#include <Windows.h>  // FILETIME
+#include <WinSock.h>  // timeval
+#include <MMSystem.h>  // timeGetTime
+#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
+#include <sys/time.h>  // gettimeofday
+#include <time.h>
+#endif
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#include <stdio.h>
+#endif
+
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+#define DEBUG_PRINT(...)           \
+  {                                \
+    char msg[256];                 \
+    sprintf(msg, __VA_ARGS__);     \
+    OutputDebugString(msg);        \
+  }
+#else
+// special fix for visual 2003
+#define DEBUG_PRINT(exp)        ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+namespace webrtc {
+
+namespace ModuleRTPUtility {
+
+/*
+ * Time routines.
+ */
+
+#if defined(_WIN32)
+
+struct reference_point {
+  FILETIME      file_time;
+  LARGE_INTEGER counterMS;
+};
+
+struct WindowsHelpTimer {
+  volatile LONG _timeInMs;
+  volatile LONG _numWrapTimeInMs;
+  reference_point _ref_point;
+
+  volatile LONG _sync_flag;
+};
+
+void Synchronize(WindowsHelpTimer* help_timer) {
+  const LONG start_value = 0;
+  const LONG new_value = 1;
+  const LONG synchronized_value = 2;
+
+  LONG compare_flag = new_value;
+  while (help_timer->_sync_flag == start_value) {
+    const LONG new_value = 1;
+    compare_flag = InterlockedCompareExchange(
+        &help_timer->_sync_flag, new_value, start_value);
+  }
+  if (compare_flag != start_value) {
+    // This thread was not the one that incremented the sync flag.
+    // Block until synchronization finishes.
+    while (compare_flag != synchronized_value) {
+      ::Sleep(0);
+    }
+    return;
+  }
+  // Only the synchronizing thread gets here so this part can be
+  // considered single threaded.
+
+  // set timer accuracy to 1 ms
+  timeBeginPeriod(1);
+  FILETIME    ft0 = { 0, 0 },
+              ft1 = { 0, 0 };
+  //
+  // Spin waiting for a change in system time. Get the matching
+  // performance counter value for that time.
+  //
+  ::GetSystemTimeAsFileTime(&ft0);
+  do {
+    ::GetSystemTimeAsFileTime(&ft1);
+
+    help_timer->_ref_point.counterMS.QuadPart = ::timeGetTime();
+    ::Sleep(0);
+  } while ((ft0.dwHighDateTime == ft1.dwHighDateTime) &&
+          (ft0.dwLowDateTime == ft1.dwLowDateTime));
+    help_timer->_ref_point.file_time = ft1;
+}
+
+void get_time(WindowsHelpTimer* help_timer, FILETIME& current_time) {
+  // we can't use query performance counter due to speed stepping
+  DWORD t = timeGetTime();
+  // NOTE: we have a missmatch in sign between _timeInMs(LONG) and
+  // (DWORD) however we only use it here without +- etc
+  volatile LONG* timeInMsPtr = &help_timer->_timeInMs;
+  // Make sure that we only inc wrapper once.
+  DWORD old = InterlockedExchange(timeInMsPtr, t);
+  if(old > t) {
+    // wrap
+    help_timer->_numWrapTimeInMs++;
+  }
+  LARGE_INTEGER elapsedMS;
+  elapsedMS.HighPart = help_timer->_numWrapTimeInMs;
+  elapsedMS.LowPart = t;
+
+  elapsedMS.QuadPart = elapsedMS.QuadPart -
+      help_timer->_ref_point.counterMS.QuadPart;
+
+  // Translate to 100-nanoseconds intervals (FILETIME resolution)
+  // and add to reference FILETIME to get current FILETIME.
+  ULARGE_INTEGER filetime_ref_as_ul;
+
+  filetime_ref_as_ul.HighPart =
+      help_timer->_ref_point.file_time.dwHighDateTime;
+  filetime_ref_as_ul.LowPart =
+      help_timer->_ref_point.file_time.dwLowDateTime;
+  filetime_ref_as_ul.QuadPart +=
+      (ULONGLONG)((elapsedMS.QuadPart)*1000*10);
+
+  // Copy to result
+  current_time.dwHighDateTime = filetime_ref_as_ul.HighPart;
+  current_time.dwLowDateTime = filetime_ref_as_ul.LowPart;
+  }
+
+  // A clock reading times from the Windows API.
+  class WindowsSystemClock : public RtpRtcpClock {
+  public:
+    WindowsSystemClock(WindowsHelpTimer* helpTimer)
+      : _helpTimer(helpTimer) {}
+
+    virtual ~WindowsSystemClock() {}
+
+    virtual WebRtc_Word64 GetTimeInMS();
+
+    virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
+
+  private:
+    WindowsHelpTimer* _helpTimer;
+};
+
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+// A clock reading times from the POSIX API.
+class UnixSystemClock : public RtpRtcpClock {
+public:
+  UnixSystemClock() {}
+  virtual ~UnixSystemClock() {}
+
+  virtual WebRtc_Word64 GetTimeInMS();
+
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac);
+};
+#endif
+
+#if defined(_WIN32)
+WebRtc_Word64 WindowsSystemClock::GetTimeInMS() {
+  return TickTime::MillisecondTimestamp();
+}
+
+// Use the system time (roughly synchronised to the tick, and
+// extrapolated using the system performance counter.
+void WindowsSystemClock::CurrentNTP(WebRtc_UWord32& secs,
+                                    WebRtc_UWord32& frac) {
+  const WebRtc_UWord64 FILETIME_1970 = 0x019db1ded53e8000;
+
+  FILETIME StartTime;
+  WebRtc_UWord64 Time;
+  struct timeval tv;
+
+  // We can't use query performance counter since they can change depending on
+  // speed steping
+  get_time(_helpTimer, StartTime);
+
+  Time = (((WebRtc_UWord64) StartTime.dwHighDateTime) << 32) +
+         (WebRtc_UWord64) StartTime.dwLowDateTime;
+
+  // Convert the hecto-nano second time to tv format
+  Time -= FILETIME_1970;
+
+  tv.tv_sec = (WebRtc_UWord32)(Time / (WebRtc_UWord64)10000000);
+  tv.tv_usec = (WebRtc_UWord32)((Time % (WebRtc_UWord64)10000000) / 10);
+
+  double dtemp;
+
+  secs = tv.tv_sec + NTP_JAN_1970;
+  dtemp = tv.tv_usec / 1e6;
+
+  if (dtemp >= 1) {
+    dtemp -= 1;
+    secs++;
+  } else if (dtemp < -1) {
+    dtemp += 1;
+    secs--;
+  }
+  dtemp *= NTP_FRAC;
+  frac = (WebRtc_UWord32)dtemp;
+}
+
+#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
+
+WebRtc_Word64 UnixSystemClock::GetTimeInMS() {
+  return TickTime::MillisecondTimestamp();
+}
+
+// Use the system time.
+void UnixSystemClock::CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+  double dtemp;
+  struct timeval tv;
+  struct timezone tz;
+  tz.tz_minuteswest  = 0;
+  tz.tz_dsttime = 0;
+  gettimeofday(&tv, &tz);
+
+  secs = tv.tv_sec + NTP_JAN_1970;
+  dtemp = tv.tv_usec / 1e6;
+  if (dtemp >= 1) {
+    dtemp -= 1;
+    secs++;
+  } else if (dtemp < -1) {
+    dtemp += 1;
+    secs--;
+  }
+  dtemp *= NTP_FRAC;
+  frac = (WebRtc_UWord32)dtemp;
+}
+#endif
+
+#if defined(_WIN32)
+// Keeps the global state for the Windows implementation of RtpRtcpClock.
+// Note that this is a POD. Only PODs are allowed to have static storage
+// duration according to the Google Style guide.
+static WindowsHelpTimer global_help_timer = {0, 0, {{ 0, 0}, 0}, 0};
+#endif
+
+RtpRtcpClock* GetSystemClock() {
+#if defined(_WIN32)
+  return new WindowsSystemClock(&global_help_timer);
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+  return new UnixSystemClock();
+#else
+  return NULL;
+#endif
+}
+
+WebRtc_UWord32 GetCurrentRTP(RtpRtcpClock* clock, WebRtc_UWord32 freq) {
+  const bool use_global_clock = (clock == NULL);
+  RtpRtcpClock* local_clock = clock;
+  if (use_global_clock) {
+    local_clock = GetSystemClock();
+  }
+  WebRtc_UWord32 secs = 0, frac = 0;
+  local_clock->CurrentNTP(secs, frac);
+  if (use_global_clock) {
+    delete local_clock;
+  }
+  return ConvertNTPTimeToRTP(secs, frac, freq);
+}
+
+WebRtc_UWord32 ConvertNTPTimeToRTP(WebRtc_UWord32 NTPsec,
+                                   WebRtc_UWord32 NTPfrac,
+                                   WebRtc_UWord32 freq) {
+  float ftemp = (float)NTPfrac / (float)NTP_FRAC;
+  WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
+  return NTPsec * freq + tmp;
+}
+
+WebRtc_UWord32 ConvertNTPTimeToMS(WebRtc_UWord32 NTPsec,
+                                  WebRtc_UWord32 NTPfrac) {
+  int freq = 1000;
+  float ftemp = (float)NTPfrac / (float)NTP_FRAC;
+  WebRtc_UWord32 tmp = (WebRtc_UWord32)(ftemp * freq);
+  WebRtc_UWord32 MStime = NTPsec * freq + tmp;
+  return MStime;
+}
+
+bool OldTimestamp(uint32_t newTimestamp,
+                  uint32_t existingTimestamp,
+                  bool* wrapped) {
+  bool tmpWrapped =
+    (newTimestamp < 0x0000ffff && existingTimestamp > 0xffff0000) ||
+    (newTimestamp > 0xffff0000 && existingTimestamp < 0x0000ffff);
+  *wrapped = tmpWrapped;
+  if (existingTimestamp > newTimestamp && !tmpWrapped) {
+    return true;
+  } else if (existingTimestamp <= newTimestamp && !tmpWrapped) {
+    return false;
+  } else if (existingTimestamp < newTimestamp && tmpWrapped) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+/*
+ * Misc utility routines
+ */
+
+#if defined(_WIN32)
+bool StringCompare(const char* str1, const char* str2,
+                   const WebRtc_UWord32 length) {
+  return (_strnicmp(str1, str2, length) == 0) ? true : false;
+}
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+bool StringCompare(const char* str1, const char* str2,
+                   const WebRtc_UWord32 length) {
+  return (strncasecmp(str1, str2, length) == 0) ? true : false;
+}
+#endif
+
+#if !defined(WEBRTC_LITTLE_ENDIAN) && !defined(WEBRTC_BIG_ENDIAN)
+#error Either WEBRTC_LITTLE_ENDIAN or WEBRTC_BIG_ENDIAN must be defined
+#endif
+
+/* for RTP/RTCP
+    All integer fields are carried in network byte order, that is, most
+    significant byte (octet) first.  AKA big-endian.
+*/
+void AssignUWord32ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 24);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 16);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[3] = static_cast<WebRtc_UWord8>(value);
+#else
+  WebRtc_UWord32* ptr = reinterpret_cast<WebRtc_UWord32*>(dataBuffer);
+  ptr[0] = value;
+#endif
+}
+
+void AssignUWord24ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 16);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value);
+#else
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[2] = static_cast<WebRtc_UWord8>(value >> 16);
+#endif
+}
+
+void AssignUWord16ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord16 value) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  dataBuffer[0] = static_cast<WebRtc_UWord8>(value >> 8);
+  dataBuffer[1] = static_cast<WebRtc_UWord8>(value);
+#else
+  WebRtc_UWord16* ptr = reinterpret_cast<WebRtc_UWord16*>(dataBuffer);
+  ptr[0] = value;
+#endif
+}
+
+WebRtc_UWord16 BufferToUWord16(const WebRtc_UWord8* dataBuffer) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  return (dataBuffer[0] << 8) + dataBuffer[1];
+#else
+  return *reinterpret_cast<const WebRtc_UWord16*>(dataBuffer);
+#endif
+}
+
+WebRtc_UWord32 BufferToUWord24(const WebRtc_UWord8* dataBuffer) {
+  return (dataBuffer[0] << 16) + (dataBuffer[1] << 8) + dataBuffer[2];
+}
+
+WebRtc_UWord32 BufferToUWord32(const WebRtc_UWord8* dataBuffer) {
+#if defined(WEBRTC_LITTLE_ENDIAN)
+  return (dataBuffer[0] << 24) + (dataBuffer[1] << 16) + (dataBuffer[2] << 8) +
+      dataBuffer[3];
+#else
+  return *reinterpret_cast<const WebRtc_UWord32*>(dataBuffer);
+#endif
+}
+
+WebRtc_UWord32 pow2(WebRtc_UWord8 exp) {
+  return 1 << exp;
+}
+
+void RTPPayload::SetType(RtpVideoCodecTypes videoType) {
+  type = videoType;
+
+  switch (type) {
+    case kRtpNoVideo:
+      break;
+    case kRtpVp8Video: {
+      info.VP8.nonReferenceFrame = false;
+      info.VP8.beginningOfPartition = false;
+      info.VP8.partitionID = 0;
+      info.VP8.hasPictureID = false;
+      info.VP8.hasTl0PicIdx = false;
+      info.VP8.hasTID = false;
+      info.VP8.hasKeyIdx = false;
+      info.VP8.pictureID = -1;
+      info.VP8.tl0PicIdx = -1;
+      info.VP8.tID = -1;
+      info.VP8.layerSync = false;
+      info.VP8.frameWidth = 0;
+      info.VP8.frameHeight = 0;
+      break;
+    }
+    default:
+      break;
+  }
+}
+
+RTPHeaderParser::RTPHeaderParser(const WebRtc_UWord8* rtpData,
+                                 const WebRtc_UWord32 rtpDataLength)
+  : _ptrRTPDataBegin(rtpData),
+    _ptrRTPDataEnd(rtpData ? (rtpData + rtpDataLength) : NULL) {
+}
+
+RTPHeaderParser::~RTPHeaderParser() {
+}
+
+bool RTPHeaderParser::RTCP() const {
+  // 72 to 76 is reserved for RTP
+  // 77 to 79 is not reserver but  they are not assigned we will block them
+  // for RTCP 200 SR  == marker bit + 72
+  // for RTCP 204 APP == marker bit + 76
+  /*
+  *       RTCP
+  *
+  * FIR      full INTRA-frame request             192     [RFC2032]   supported
+  * NACK     negative acknowledgement             193     [RFC2032]
+  * IJ       Extended inter-arrival jitter report 195     [RFC-ietf-avt-rtp-toff
+  * set-07.txt] http://tools.ietf.org/html/draft-ietf-avt-rtp-toffset-07
+  * SR       sender report                        200     [RFC3551]   supported
+  * RR       receiver report                      201     [RFC3551]   supported
+  * SDES     source description                   202     [RFC3551]   supported
+  * BYE      goodbye                              203     [RFC3551]   supported
+  * APP      application-defined                  204     [RFC3551]   ignored
+  * RTPFB    Transport layer FB message           205     [RFC4585]   supported
+  * PSFB     Payload-specific FB message          206     [RFC4585]   supported
+  * XR       extended report                      207     [RFC3611]   supported
+  */
+
+  /* 205       RFC 5104
+   * FMT 1      NACK       supported
+   * FMT 2      reserved
+   * FMT 3      TMMBR      supported
+   * FMT 4      TMMBN      supported
+   */
+
+  /* 206      RFC 5104
+  * FMT 1:     Picture Loss Indication (PLI)                      supported
+  * FMT 2:     Slice Lost Indication (SLI)
+  * FMT 3:     Reference Picture Selection Indication (RPSI)
+  * FMT 4:     Full Intra Request (FIR) Command                   supported
+  * FMT 5:     Temporal-Spatial Trade-off Request (TSTR)
+  * FMT 6:     Temporal-Spatial Trade-off Notification (TSTN)
+  * FMT 7:     Video Back Channel Message (VBCM)
+  * FMT 15:    Application layer FB message
+  */
+
+  const WebRtc_UWord8  payloadType = _ptrRTPDataBegin[1];
+
+  bool RTCP = false;
+
+  // check if this is a RTCP packet
+  switch (payloadType) {
+    case 192:
+      RTCP = true;
+      break;
+    case 193:
+      // not supported
+      // pass through and check for a potential RTP packet
+      break;
+    case 195:
+    case 200:
+    case 201:
+    case 202:
+    case 203:
+    case 204:
+    case 205:
+    case 206:
+    case 207:
+      RTCP = true;
+      break;
+  }
+  return RTCP;
+}
+
+bool RTPHeaderParser::Parse(WebRtcRTPHeader& parsedPacket,
+                            RtpHeaderExtensionMap* ptrExtensionMap) const {
+  const ptrdiff_t length = _ptrRTPDataEnd - _ptrRTPDataBegin;
+
+  if (length < 12) {
+    return false;
+  }
+
+  // Version
+  const WebRtc_UWord8 V  = _ptrRTPDataBegin[0] >> 6;
+  // Padding
+  const bool          P  = ((_ptrRTPDataBegin[0] & 0x20) == 0) ? false : true;
+  // eXtension
+  const bool          X  = ((_ptrRTPDataBegin[0] & 0x10) == 0) ? false : true;
+  const WebRtc_UWord8 CC = _ptrRTPDataBegin[0] & 0x0f;
+  const bool          M  = ((_ptrRTPDataBegin[1] & 0x80) == 0) ? false : true;
+
+  const WebRtc_UWord8 PT = _ptrRTPDataBegin[1] & 0x7f;
+
+  const WebRtc_UWord16 sequenceNumber = (_ptrRTPDataBegin[2] << 8) +
+      _ptrRTPDataBegin[3];
+
+  const WebRtc_UWord8* ptr = &_ptrRTPDataBegin[4];
+
+  WebRtc_UWord32 RTPTimestamp = *ptr++ << 24;
+  RTPTimestamp += *ptr++ << 16;
+  RTPTimestamp += *ptr++ << 8;
+  RTPTimestamp += *ptr++;
+
+  WebRtc_UWord32 SSRC = *ptr++ << 24;
+  SSRC += *ptr++ << 16;
+  SSRC += *ptr++ << 8;
+  SSRC += *ptr++;
+
+  if (V != 2) {
+    return false;
+  }
+
+  const WebRtc_UWord8 CSRCocts = CC * 4;
+
+  if ((ptr + CSRCocts) > _ptrRTPDataEnd) {
+    return false;
+  }
+
+  parsedPacket.header.markerBit      = M;
+  parsedPacket.header.payloadType    = PT;
+  parsedPacket.header.sequenceNumber = sequenceNumber;
+  parsedPacket.header.timestamp      = RTPTimestamp;
+  parsedPacket.header.ssrc           = SSRC;
+  parsedPacket.header.numCSRCs       = CC;
+  parsedPacket.header.paddingLength  = P ? *(_ptrRTPDataEnd - 1) : 0;
+
+  for (unsigned int i = 0; i < CC; ++i) {
+    WebRtc_UWord32 CSRC = *ptr++ << 24;
+    CSRC += *ptr++ << 16;
+    CSRC += *ptr++ << 8;
+    CSRC += *ptr++;
+    parsedPacket.header.arrOfCSRCs[i] = CSRC;
+  }
+  parsedPacket.type.Audio.numEnergy = parsedPacket.header.numCSRCs;
+
+  parsedPacket.header.headerLength   = 12 + CSRCocts;
+
+  // If in effect, MAY be omitted for those packets for which the offset
+  // is zero.
+  parsedPacket.extension.transmissionTimeOffset = 0;
+
+  if (X) {
+    /* RTP header extension, RFC 3550.
+     0                   1                   2                   3
+     0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |      defined by profile       |           length              |
+    +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+    |                        header extension                       |
+    |                             ....                              |
+    */
+    const ptrdiff_t remain = _ptrRTPDataEnd - ptr;
+    if (remain < 4) {
+      return false;
+    }
+
+    parsedPacket.header.headerLength += 4;
+
+    WebRtc_UWord16 definedByProfile = *ptr++ << 8;
+    definedByProfile += *ptr++;
+
+    WebRtc_UWord16 XLen = *ptr++ << 8;
+    XLen += *ptr++; // in 32 bit words
+    XLen *= 4; // in octs
+
+    if (remain < (4 + XLen)) {
+      return false;
+    }
+    if (definedByProfile == RTP_ONE_BYTE_HEADER_EXTENSION) {
+      const WebRtc_UWord8* ptrRTPDataExtensionEnd = ptr + XLen;
+      ParseOneByteExtensionHeader(parsedPacket,
+                                  ptrExtensionMap,
+                                  ptrRTPDataExtensionEnd,
+                                  ptr);
+    }
+    parsedPacket.header.headerLength += XLen;
+  }
+  return true;
+}
+
+void RTPHeaderParser::ParseOneByteExtensionHeader(
+    WebRtcRTPHeader& parsedPacket,
+    const RtpHeaderExtensionMap* ptrExtensionMap,
+    const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+    const WebRtc_UWord8* ptr) const {
+  if (!ptrExtensionMap) {
+    return;
+  }
+
+  while (ptrRTPDataExtensionEnd - ptr > 0) {
+    //  0
+    //  0 1 2 3 4 5 6 7
+    // +-+-+-+-+-+-+-+-+
+    // |  ID   |  len  |
+    // +-+-+-+-+-+-+-+-+
+
+    const WebRtc_UWord8 id = (*ptr & 0xf0) >> 4;
+    const WebRtc_UWord8 len = (*ptr & 0x0f);
+    ptr++;
+
+    if (id == 15) {
+      WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                   "Ext id: 15 encountered, parsing terminated.");
+      return;
+    }
+
+    RTPExtensionType type;
+    if (ptrExtensionMap->GetType(id, &type) != 0) {
+      WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                   "Failed to find extension id: %d", id);
+      return;
+    }
+
+    switch (type) {
+      case kRtpExtensionTransmissionTimeOffset: {
+        if (len != 2) {
+          WEBRTC_TRACE(kTraceWarning, kTraceRtpRtcp, -1,
+                       "Incorrect transmission time offset len: %d", len);
+          return;
+        }
+        //  0                   1                   2                   3
+        //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        // |  ID   | len=2 |              transmission offset              |
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+
+        WebRtc_Word32 transmissionTimeOffset = *ptr++ << 16;
+        transmissionTimeOffset += *ptr++ << 8;
+        transmissionTimeOffset += *ptr++;
+        parsedPacket.extension.transmissionTimeOffset = transmissionTimeOffset;
+        if (transmissionTimeOffset & 0x800000) {
+          // Negative offset, correct sign for Word24 to Word32.
+          parsedPacket.extension.transmissionTimeOffset |= 0xFF000000;
+        }
+        break;
+      }
+      case kRtpExtensionAudioLevel: {
+        //   --- Only used for debugging ---
+        //  0                   1                   2                   3
+        //  0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        // |  ID   | len=0 |V|   level     |      0x00     |      0x00     |
+        // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+        //
+
+        // Parse out the fields but only use it for debugging for now.
+        // const WebRtc_UWord8 V = (*ptr & 0x80) >> 7;
+        // const WebRtc_UWord8 level = (*ptr & 0x7f);
+        // DEBUG_PRINT("RTP_AUDIO_LEVEL_UNIQUE_ID: ID=%u, len=%u, V=%u,
+        // level=%u", ID, len, V, level);
+        break;
+      }
+      default: {
+        WEBRTC_TRACE(kTraceStream, kTraceRtpRtcp, -1,
+                     "Extension type not implemented.");
+        return;
+      }
+    }
+    WebRtc_UWord8 num_bytes = ParsePaddingBytes(ptrRTPDataExtensionEnd, ptr);
+    ptr += num_bytes;
+  }
+}
+
+WebRtc_UWord8 RTPHeaderParser::ParsePaddingBytes(
+  const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+  const WebRtc_UWord8* ptr) const {
+
+  WebRtc_UWord8 num_zero_bytes = 0;
+  while (ptrRTPDataExtensionEnd - ptr > 0) {
+    if (*ptr != 0) {
+      return num_zero_bytes;
+    }
+    ptr++;
+    num_zero_bytes++;
+  }
+  return num_zero_bytes;
+}
+
+// RTP payload parser
+RTPPayloadParser::RTPPayloadParser(const RtpVideoCodecTypes videoType,
+                                   const WebRtc_UWord8* payloadData,
+                                   WebRtc_UWord16 payloadDataLength,
+                                   WebRtc_Word32 id)
+  :
+  _id(id),
+  _dataPtr(payloadData),
+  _dataLength(payloadDataLength),
+  _videoType(videoType) {
+}
+
+RTPPayloadParser::~RTPPayloadParser() {
+}
+
+bool RTPPayloadParser::Parse(RTPPayload& parsedPacket) const {
+  parsedPacket.SetType(_videoType);
+
+  switch (_videoType) {
+    case kRtpNoVideo:
+      return ParseGeneric(parsedPacket);
+    case kRtpVp8Video:
+      return ParseVP8(parsedPacket);
+    default:
+      return false;
+  }
+}
+
+bool RTPPayloadParser::ParseGeneric(RTPPayload& /*parsedPacket*/) const {
+  return false;
+}
+
+//
+// VP8 format:
+//
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+//
+// Payload header (considered part of the actual payload, sent to decoder)
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |Size0|H| VER |P|
+//      +-+-+-+-+-+-+-+-+
+//      |      ...      |
+//      +               +
+
+bool RTPPayloadParser::ParseVP8(RTPPayload& parsedPacket) const {
+  RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
+  const WebRtc_UWord8* dataPtr = _dataPtr;
+  int dataLength = _dataLength;
+
+  // Parse mandatory first byte of payload descriptor
+  bool extension = (*dataPtr & 0x80) ? true : false;            // X bit
+  vp8->nonReferenceFrame = (*dataPtr & 0x20) ? true : false;    // N bit
+  vp8->beginningOfPartition = (*dataPtr & 0x10) ? true : false; // S bit
+  vp8->partitionID = (*dataPtr & 0x0F);          // PartID field
+
+  if (vp8->partitionID > 8) {
+    // Weak check for corrupt data: PartID MUST NOT be larger than 8.
+    return false;
+  }
+
+  // Advance dataPtr and decrease remaining payload size
+  dataPtr++;
+  dataLength--;
+
+  if (extension) {
+    const int parsedBytes = ParseVP8Extension(vp8, dataPtr, dataLength);
+    if (parsedBytes < 0) return false;
+    dataPtr += parsedBytes;
+    dataLength -= parsedBytes;
+  }
+
+  if (dataLength <= 0) {
+    WEBRTC_TRACE(kTraceError, kTraceRtpRtcp, _id,
+                 "Error parsing VP8 payload descriptor; payload too short");
+    return false;
+  }
+
+  // Read P bit from payload header (only at beginning of first partition)
+  if (dataLength > 0 && vp8->beginningOfPartition && vp8->partitionID == 0) {
+    parsedPacket.frameType = (*dataPtr & 0x01) ? kPFrame : kIFrame;
+  } else {
+    parsedPacket.frameType = kPFrame;
+  }
+  if (0 != ParseVP8FrameSize(parsedPacket, dataPtr, dataLength)) {
+    return false;
+  }
+  parsedPacket.info.VP8.data       = dataPtr;
+  parsedPacket.info.VP8.dataLength = dataLength;
+  return true;
+}
+
+int RTPPayloadParser::ParseVP8FrameSize(RTPPayload& parsedPacket,
+                                        const WebRtc_UWord8* dataPtr,
+                                        int dataLength) const {
+  if (parsedPacket.frameType != kIFrame) {
+    // Included in payload header for I-frames.
+    return 0;
+  }
+  if (dataLength < 10) {
+    // For an I-frame we should always have the uncompressed VP8 header
+    // in the beginning of the partition.
+    return -1;
+  }
+  RTPPayloadVP8* vp8 = &parsedPacket.info.VP8;
+  vp8->frameWidth = ((dataPtr[7] << 8) + dataPtr[6]) & 0x3FFF;
+  vp8->frameHeight = ((dataPtr[9] << 8) + dataPtr[8]) & 0x3FFF;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8Extension(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8* dataPtr,
+                                        int dataLength) const {
+  int parsedBytes = 0;
+  if (dataLength <= 0) return -1;
+  // Optional X field is present
+  vp8->hasPictureID = (*dataPtr & 0x80) ? true : false; // I bit
+  vp8->hasTl0PicIdx = (*dataPtr & 0x40) ? true : false; // L bit
+  vp8->hasTID = (*dataPtr & 0x20) ? true : false;       // T bit
+  vp8->hasKeyIdx = (*dataPtr & 0x10) ? true : false;    // K bit
+
+  // Advance dataPtr and decrease remaining payload size
+  dataPtr++;
+  parsedBytes++;
+  dataLength--;
+
+  if (vp8->hasPictureID) {
+    if (ParseVP8PictureID(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+
+  if (vp8->hasTl0PicIdx) {
+    if (ParseVP8Tl0PicIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+
+  if (vp8->hasTID || vp8->hasKeyIdx) {
+    if (ParseVP8TIDAndKeyIdx(vp8, &dataPtr, &dataLength, &parsedBytes) != 0) {
+      return -1;
+    }
+  }
+  return parsedBytes;
+}
+
+int RTPPayloadParser::ParseVP8PictureID(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8** dataPtr,
+                                        int* dataLength,
+                                        int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  vp8->pictureID = (**dataPtr & 0x7F);
+  if (**dataPtr & 0x80) {
+    (*dataPtr)++;
+    (*parsedBytes)++;
+    if (--(*dataLength) <= 0) return -1;
+    // PictureID is 15 bits
+    vp8->pictureID = (vp8->pictureID << 8) +** dataPtr;
+  }
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8Tl0PicIdx(RTPPayloadVP8* vp8,
+                                        const WebRtc_UWord8** dataPtr,
+                                        int* dataLength,
+                                        int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  vp8->tl0PicIdx = **dataPtr;
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+int RTPPayloadParser::ParseVP8TIDAndKeyIdx(RTPPayloadVP8* vp8,
+                                           const WebRtc_UWord8** dataPtr,
+                                           int* dataLength,
+                                           int* parsedBytes) const {
+  if (*dataLength <= 0) return -1;
+  if (vp8->hasTID) {
+    vp8->tID = ((**dataPtr >> 6) & 0x03);
+    vp8->layerSync = (**dataPtr & 0x20) ? true : false;  // Y bit
+  }
+  if (vp8->hasKeyIdx) {
+    vp8->keyIdx = (**dataPtr & 0x1F);
+  }
+  (*dataPtr)++;
+  (*parsedBytes)++;
+  (*dataLength)--;
+  return 0;
+}
+
+}  // namespace ModuleRTPUtility
+
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/rtp_utility.h b/src/modules/rtp_rtcp/source/rtp_utility.h
new file mode 100644
index 0000000..20e7af1
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_utility.h
@@ -0,0 +1,243 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
+
+#include <cstddef> // size_t, ptrdiff_t
+
+#include "typedefs.h"
+#include "rtp_header_extension.h"
+#include "rtp_rtcp_config.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+enum RtpVideoCodecTypes
+{
+    kRtpNoVideo       = 0,
+    kRtpFecVideo      = 10,
+    kRtpVp8Video      = 11
+};
+
+const WebRtc_UWord8 kRtpMarkerBitMask = 0x80;
+
+namespace ModuleRTPUtility
+{
+    // January 1970, in NTP seconds.
+    const uint32_t NTP_JAN_1970 = 2208988800UL;
+
+    // Magic NTP fractional unit.
+    const double NTP_FRAC = 4.294967296E+9;
+
+    struct AudioPayload
+    {
+        WebRtc_UWord32    frequency;
+        WebRtc_UWord8     channels;
+        WebRtc_UWord32    rate;
+    };
+    struct VideoPayload
+    {
+        RtpVideoCodecTypes   videoCodecType;
+        WebRtc_UWord32       maxRate;
+    };
+    union PayloadUnion
+    {
+        AudioPayload Audio;
+        VideoPayload Video;
+    };
+    struct Payload
+    {
+        char name[RTP_PAYLOAD_NAME_SIZE];
+        bool audio;
+        PayloadUnion typeSpecific;
+    };
+
+    // Return a clock that reads the time as reported by the operating
+    // system. The returned instances are guaranteed to read the same
+    // times; in particular, they return relative times relative to
+    // the same base.
+    // Note that even though the instances returned by this function
+    // read the same times a new object is created every time this
+    // API is called. The ownership of this object belongs to the
+    // caller.
+    RtpRtcpClock* GetSystemClock();
+
+    // Return the current RTP timestamp from the NTP timestamp
+    // returned by the specified clock.
+    WebRtc_UWord32 GetCurrentRTP(RtpRtcpClock* clock, WebRtc_UWord32 freq);
+
+    // Return the current RTP absolute timestamp.
+    WebRtc_UWord32 ConvertNTPTimeToRTP(WebRtc_UWord32 NTPsec,
+                                       WebRtc_UWord32 NTPfrac,
+                                       WebRtc_UWord32 freq);
+
+    // Return the time in milliseconds corresponding to the specified
+    // NTP timestamp.
+    WebRtc_UWord32 ConvertNTPTimeToMS(WebRtc_UWord32 NTPsec,
+                                      WebRtc_UWord32 NTPfrac);
+
+    WebRtc_UWord32 pow2(WebRtc_UWord8 exp);
+
+    // Returns true if |newTimestamp| is older than |existingTimestamp|.
+    // |wrapped| will be set to true if there has been a wraparound between the
+    // two timestamps.
+    bool OldTimestamp(uint32_t newTimestamp,
+                      uint32_t existingTimestamp,
+                      bool* wrapped);
+
+    bool StringCompare(const char* str1,
+                       const char* str2,
+                       const WebRtc_UWord32 length);
+
+    void AssignUWord32ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value);
+    void AssignUWord24ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord32 value);
+    void AssignUWord16ToBuffer(WebRtc_UWord8* dataBuffer, WebRtc_UWord16 value);
+
+    /**
+     * Converts a network-ordered two-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered two-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord16 BufferToUWord16(const WebRtc_UWord8* dataBuffer);
+
+    /**
+     * Converts a network-ordered three-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered three-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord32 BufferToUWord24(const WebRtc_UWord8* dataBuffer);
+
+    /**
+     * Converts a network-ordered four-byte input buffer to a host-ordered value.
+     * \param[in] dataBuffer Network-ordered four-byte buffer to convert.
+     * \return Host-ordered value.
+     */
+    WebRtc_UWord32 BufferToUWord32(const WebRtc_UWord8* dataBuffer);
+
+    class RTPHeaderParser
+    {
+    public:
+        RTPHeaderParser(const WebRtc_UWord8* rtpData,
+                        const WebRtc_UWord32 rtpDataLength);
+        ~RTPHeaderParser();
+
+        bool RTCP() const;
+        bool Parse(WebRtcRTPHeader& parsedPacket,
+                   RtpHeaderExtensionMap* ptrExtensionMap = NULL) const;
+
+    private:
+        void ParseOneByteExtensionHeader(
+            WebRtcRTPHeader& parsedPacket,
+            const RtpHeaderExtensionMap* ptrExtensionMap,
+            const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+            const WebRtc_UWord8* ptr) const;
+
+        WebRtc_UWord8 ParsePaddingBytes(
+            const WebRtc_UWord8* ptrRTPDataExtensionEnd,
+            const WebRtc_UWord8* ptr) const;
+
+        const WebRtc_UWord8* const _ptrRTPDataBegin;
+        const WebRtc_UWord8* const _ptrRTPDataEnd;
+    };
+
+    enum FrameTypes
+    {
+        kIFrame,    // key frame
+        kPFrame         // Delta frame
+    };
+
+    struct RTPPayloadVP8
+    {
+        bool                 nonReferenceFrame;
+        bool                 beginningOfPartition;
+        int                  partitionID;
+        bool                 hasPictureID;
+        bool                 hasTl0PicIdx;
+        bool                 hasTID;
+        bool                 hasKeyIdx;
+        int                  pictureID;
+        int                  tl0PicIdx;
+        int                  tID;
+        bool                 layerSync;
+        int                  keyIdx;
+        int                  frameWidth;
+        int                  frameHeight;
+
+        const WebRtc_UWord8*   data; 
+        WebRtc_UWord16         dataLength;
+    };
+
+    union RTPPayloadUnion
+    {
+        RTPPayloadVP8   VP8;
+    };
+
+    struct RTPPayload
+    {
+        void SetType(RtpVideoCodecTypes videoType);
+
+        RtpVideoCodecTypes  type;
+        FrameTypes          frameType;
+        RTPPayloadUnion     info;
+    };
+
+    // RTP payload parser
+    class RTPPayloadParser
+    {
+    public:
+        RTPPayloadParser(const RtpVideoCodecTypes payloadType,
+                         const WebRtc_UWord8* payloadData,
+                         const WebRtc_UWord16 payloadDataLength, // Length w/o padding.
+                         const WebRtc_Word32 id);
+
+        ~RTPPayloadParser();
+
+        bool Parse(RTPPayload& parsedPacket) const;
+
+    private:
+        bool ParseGeneric(RTPPayload& parsedPacket) const;
+
+        bool ParseVP8(RTPPayload& parsedPacket) const;
+
+        int ParseVP8Extension(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 *dataPtr,
+                              int dataLength) const;
+
+        int ParseVP8PictureID(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 **dataPtr,
+                              int *dataLength,
+                              int *parsedBytes) const;
+
+        int ParseVP8Tl0PicIdx(RTPPayloadVP8 *vp8,
+                              const WebRtc_UWord8 **dataPtr,
+                              int *dataLength,
+                              int *parsedBytes) const;
+
+        int ParseVP8TIDAndKeyIdx(RTPPayloadVP8 *vp8,
+                                 const WebRtc_UWord8 **dataPtr,
+                                 int *dataLength,
+                                 int *parsedBytes) const;
+
+        int ParseVP8FrameSize(RTPPayload& parsedPacket,
+                              const WebRtc_UWord8 *dataPtr,
+                              int dataLength) const;
+
+    private:
+        WebRtc_Word32               _id;
+        const WebRtc_UWord8*        _dataPtr;
+        const WebRtc_UWord16        _dataLength;
+        const RtpVideoCodecTypes    _videoType;
+    };
+
+}  // namespace ModuleRTPUtility
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_RTP_UTILITY_H_
diff --git a/src/modules/rtp_rtcp/source/rtp_utility_unittest.cc b/src/modules/rtp_rtcp/source/rtp_utility_unittest.cc
new file mode 100644
index 0000000..eabc812
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/rtp_utility_unittest.cc
@@ -0,0 +1,288 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+/*
+ * This file conatins unit tests for the ModuleRTPUtility.
+ */
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/rtp_format_vp8.h"
+#include "modules/rtp_rtcp/source/rtp_utility.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+using ModuleRTPUtility::RTPPayloadParser;
+using ModuleRTPUtility::RTPPayload;
+using ModuleRTPUtility::RTPPayloadVP8;
+
+// Payload descriptor
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |X|R|N|S|PartID | (REQUIRED)
+//      +-+-+-+-+-+-+-+-+
+// X:   |I|L|T|K|  RSV  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// I:   |   PictureID   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// L:   |   TL0PICIDX   | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+// T/K: |TID:Y| KEYIDX  | (OPTIONAL)
+//      +-+-+-+-+-+-+-+-+
+//
+// Payload header
+//       0 1 2 3 4 5 6 7
+//      +-+-+-+-+-+-+-+-+
+//      |Size0|H| VER |P|
+//      +-+-+-+-+-+-+-+-+
+//      |     Size1     |
+//      +-+-+-+-+-+-+-+-+
+//      |     Size2     |
+//      +-+-+-+-+-+-+-+-+
+//      | Bytes 4..N of |
+//      | VP8 payload   |
+//      :               :
+//      +-+-+-+-+-+-+-+-+
+//      | OPTIONAL RTP  |
+//      | padding       |
+//      :               :
+//      +-+-+-+-+-+-+-+-+
+
+void VerifyBasicHeader(const RTPPayloadVP8 &header,
+                       bool N, bool S, int PartID) {
+  EXPECT_EQ(N, header.nonReferenceFrame);
+  EXPECT_EQ(S, header.beginningOfPartition);
+  EXPECT_EQ(PartID, header.partitionID);
+}
+
+void VerifyExtensions(const RTPPayloadVP8 &header,
+                      bool I, bool L, bool T, bool K) {
+  EXPECT_EQ(I, header.hasPictureID);
+  EXPECT_EQ(L, header.hasTl0PicIdx);
+  EXPECT_EQ(T, header.hasTID);
+  EXPECT_EQ(K, header.hasKeyIdx);
+}
+
+TEST(ParseVP8Test, BasicHeader) {
+  WebRtc_UWord8 payload[4] = {0};
+  payload[0] = 0x14;  // Binary 0001 0100; S = 1, PartID = 4.
+  payload[1] = 0x01;  // P frame.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 4 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(payload + 1, parsedPacket.info.VP8.data);
+  EXPECT_EQ(4 - 1, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, PictureID) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0xA0;
+  payload[1] = 0x80;
+  payload[2] = 17;
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.pictureID);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+
+
+  // Re-use payload, but change to long PictureID.
+  payload[2] = 0x80 | 17;
+  payload[3] = 17;
+  RTPPayloadParser rtpPayloadParser2(kRtpVp8Video, payload, 10, 0);
+
+  ASSERT_TRUE(rtpPayloadParser2.Parse(parsedPacket));
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 1 /*N*/, 0 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 0 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
+
+  EXPECT_EQ(payload + 4, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 4, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, Tl0PicIdx) {
+  WebRtc_UWord8 payload[13] = {0};
+  payload[0] = 0x90;
+  payload[1] = 0x40;
+  payload[2] = 17;
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 13, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 1 /*S*/, 0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 1 /*L*/, 0 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.tl0PicIdx);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(13 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, TIDAndLayerSync) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x20;
+  payload[2] = 0x80;  // TID(2) + LayerSync(false)
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 1 /*T*/, 0 /*K*/);
+
+  EXPECT_EQ(2, parsedPacket.info.VP8.tID);
+  EXPECT_FALSE(parsedPacket.info.VP8.layerSync);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, KeyIdx) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x10;  // K = 1.
+  payload[2] = 0x11;  // KEYIDX = 17 decimal.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 0 /*I*/, 0 /*L*/, 0 /*T*/, 1 /*K*/);
+
+  EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(payload + 3, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 3, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, MultipleExtensions) {
+  WebRtc_UWord8 payload[10] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x80 | 0x40 | 0x20 | 0x10;
+  payload[2] = 0x80 | 17;    // PictureID, high 7 bits.
+  payload[3] = 17;           // PictureID, low 8 bits.
+  payload[4] = 42;           // Tl0PicIdx.
+  payload[5] = 0x40 | 0x20 | 0x11;  // TID(1) + LayerSync(true) + KEYIDX(17).
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 10, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kPFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8, 0 /*N*/, 0 /*S*/, 8 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8, 1 /*I*/, 1 /*L*/, 1 /*T*/, 1 /*K*/);
+
+  EXPECT_EQ((17<<8) + 17, parsedPacket.info.VP8.pictureID);
+  EXPECT_EQ(42, parsedPacket.info.VP8.tl0PicIdx);
+  EXPECT_EQ(1, parsedPacket.info.VP8.tID);
+  EXPECT_EQ(17, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(payload + 6, parsedPacket.info.VP8.data);
+  EXPECT_EQ(10 - 6, parsedPacket.info.VP8.dataLength);
+}
+
+TEST(ParseVP8Test, TooShortHeader) {
+  WebRtc_UWord8 payload[4] = {0};
+  payload[0] = 0x88;
+  payload[1] = 0x80 | 0x40 | 0x20 | 0x10;  // All extensions are enabled...
+  payload[2] = 0x80 | 17;  // ... but only 2 bytes PictureID is provided.
+  payload[3] = 17;  // PictureID, low 8 bits.
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, payload, 4, 0);
+
+  RTPPayload parsedPacket;
+  EXPECT_FALSE(rtpPayloadParser.Parse(parsedPacket));
+}
+
+TEST(ParseVP8Test, TestWithPacketizer) {
+  WebRtc_UWord8 payload[10] = {0};
+  WebRtc_UWord8 packet[20] = {0};
+  RTPVideoHeaderVP8 inputHeader;
+  inputHeader.nonReference = true;
+  inputHeader.pictureId = 300;
+  inputHeader.temporalIdx = 1;
+  inputHeader.layerSync = false;
+  inputHeader.tl0PicIdx = kNoTl0PicIdx;  // Disable.
+  inputHeader.keyIdx = 31;
+  RtpFormatVp8 packetizer(payload, 10, inputHeader, 20);
+  bool last;
+  int send_bytes;
+  ASSERT_EQ(0, packetizer.NextPacket(packet, &send_bytes, &last));
+  ASSERT_TRUE(last);
+
+  RTPPayloadParser rtpPayloadParser(kRtpVp8Video, packet, send_bytes, 0);
+
+  RTPPayload parsedPacket;
+  ASSERT_TRUE(rtpPayloadParser.Parse(parsedPacket));
+
+  EXPECT_EQ(ModuleRTPUtility::kIFrame, parsedPacket.frameType);
+  EXPECT_EQ(kRtpVp8Video, parsedPacket.type);
+
+  VerifyBasicHeader(parsedPacket.info.VP8,
+                    inputHeader.nonReference /*N*/,
+                    1 /*S*/,
+                    0 /*PartID*/);
+  VerifyExtensions(parsedPacket.info.VP8,
+                   1 /*I*/,
+                   0 /*L*/,
+                   1 /*T*/,
+                   1 /*K*/);
+
+  EXPECT_EQ(inputHeader.pictureId, parsedPacket.info.VP8.pictureID);
+  EXPECT_EQ(inputHeader.temporalIdx, parsedPacket.info.VP8.tID);
+  EXPECT_EQ(inputHeader.layerSync, parsedPacket.info.VP8.layerSync);
+  EXPECT_EQ(inputHeader.keyIdx, parsedPacket.info.VP8.keyIdx);
+
+  EXPECT_EQ(packet + 5, parsedPacket.info.VP8.data);
+  EXPECT_EQ(send_bytes - 5, parsedPacket.info.VP8.dataLength);
+}
+
+}  // namespace
diff --git a/src/modules/rtp_rtcp/source/ssrc_database.cc b/src/modules/rtp_rtcp/source/ssrc_database.cc
new file mode 100644
index 0000000..b3e9ab0
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/ssrc_database.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ssrc_database.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <stdlib.h>
+#include <cassert>
+
+#ifdef _WIN32
+    #include <windows.h>
+    #include <MMSystem.h> //timeGetTime
+
+// TODO(hellner): investigate if it is necessary to disable these warnings.
+    #pragma warning(disable:4311)
+    #pragma warning(disable:4312)
+#else
+    #include <stdio.h>
+    #include <string.h>
+    #include <time.h>
+    #include <sys/time.h>
+#endif
+
+namespace webrtc {
+SSRCDatabase*
+SSRCDatabase::StaticInstance(CountOperation count_operation)
+{
+  SSRCDatabase* impl =
+      GetStaticInstance<SSRCDatabase>(count_operation);
+  return impl;
+}
+
+SSRCDatabase*
+SSRCDatabase::GetSSRCDatabase()
+{
+    return StaticInstance(kAddRef);
+}
+
+void
+SSRCDatabase::ReturnSSRCDatabase()
+{
+    StaticInstance(kRelease);
+}
+
+WebRtc_UWord32
+SSRCDatabase::CreateSSRC()
+{
+    CriticalSectionScoped lock(_critSect);
+
+    WebRtc_UWord32 ssrc = GenerateRandom();
+
+#ifndef WEBRTC_NO_STL
+
+    while(_ssrcMap.find(ssrc) != _ssrcMap.end())
+    {
+        ssrc = GenerateRandom();
+    }
+    _ssrcMap[ssrc] = 0;
+
+#else
+    if(_sizeOfSSRC <= _numberOfSSRC)
+    {
+        // allocate more space
+        const int newSize = _sizeOfSSRC + 10;
+        WebRtc_UWord32* tempSSRCVector = new WebRtc_UWord32[newSize];
+        memcpy(tempSSRCVector, _ssrcVector, _sizeOfSSRC*sizeof(WebRtc_UWord32));
+        delete [] _ssrcVector;
+
+        _ssrcVector = tempSSRCVector;
+        _sizeOfSSRC = newSize;
+    }
+
+    // check if in DB
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                i = 0; // start over with a new ssrc
+                ssrc = GenerateRandom();
+            }
+
+        }
+        //  add to database
+        _ssrcVector[_numberOfSSRC] = ssrc;
+        _numberOfSSRC++;
+    }
+#endif
+    return ssrc;
+}
+
+WebRtc_Word32
+SSRCDatabase::RegisterSSRC(const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_critSect);
+
+#ifndef WEBRTC_NO_STL
+
+    _ssrcMap[ssrc] = 0;
+
+#else
+    if(_sizeOfSSRC <= _numberOfSSRC)
+    {
+        // allocate more space
+        const int newSize = _sizeOfSSRC + 10;
+        WebRtc_UWord32* tempSSRCVector = new WebRtc_UWord32[newSize];
+        memcpy(tempSSRCVector, _ssrcVector, _sizeOfSSRC*sizeof(WebRtc_UWord32));
+        delete [] _ssrcVector;
+
+        _ssrcVector = tempSSRCVector;
+        _sizeOfSSRC = newSize;
+    }
+    // check if in DB
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                return -1;
+            }
+        }
+        //  add to database
+        _ssrcVector[_numberOfSSRC] = ssrc;
+        _numberOfSSRC++;
+    }
+#endif
+    return 0;
+}
+
+WebRtc_Word32
+SSRCDatabase::ReturnSSRC(const WebRtc_UWord32 ssrc)
+{
+    CriticalSectionScoped lock(_critSect);
+
+#ifndef WEBRTC_NO_STL
+    _ssrcMap.erase(ssrc);
+
+#else
+    if(_ssrcVector)
+    {
+        for (int i=0; i<_numberOfSSRC; i++)
+        {
+            if (_ssrcVector[i] == ssrc)
+            {
+                // we have a match
+                // remove from database
+                _ssrcVector[i] = _ssrcVector[_numberOfSSRC-1];
+                _numberOfSSRC--;
+                break;
+            }
+        }
+    }
+#endif
+    return 0;
+}
+
+SSRCDatabase::SSRCDatabase()
+{
+    // we need to seed the random generator, otherwise we get 26500 each time, hardly a random value :)
+#ifdef _WIN32
+    srand(timeGetTime());
+#else
+    struct timeval tv;
+    struct timezone tz;
+    gettimeofday(&tv, &tz);
+    srand(tv.tv_usec);
+#endif
+
+#ifdef WEBRTC_NO_STL
+    _sizeOfSSRC = 10;
+    _numberOfSSRC = 0;
+    _ssrcVector = new WebRtc_UWord32[10];
+#endif
+    _critSect = CriticalSectionWrapper::CreateCriticalSection();
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s created", __FUNCTION__);
+}
+
+SSRCDatabase::~SSRCDatabase()
+{
+#ifdef WEBRTC_NO_STL
+    delete [] _ssrcVector;
+#else
+    _ssrcMap.clear();
+#endif
+    delete _critSect;
+
+    WEBRTC_TRACE(kTraceMemory, kTraceRtpRtcp, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_UWord32 SSRCDatabase::GenerateRandom()
+{
+    WebRtc_UWord32 ssrc = 0;
+    do
+    {
+        ssrc = rand();
+        ssrc = ssrc <<16;
+        ssrc += rand();
+
+    } while (ssrc == 0 || ssrc == 0xffffffff);
+
+    return ssrc;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/ssrc_database.h b/src/modules/rtp_rtcp/source/ssrc_database.h
new file mode 100644
index 0000000..370e549
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/ssrc_database.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
+
+#ifndef WEBRTC_NO_STL
+    #include <map>
+#endif
+
+#include "system_wrappers/interface/static_instance.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class SSRCDatabase
+{
+public:
+    static SSRCDatabase* GetSSRCDatabase();
+    static void ReturnSSRCDatabase();
+
+    WebRtc_UWord32 CreateSSRC();
+    WebRtc_Word32 RegisterSSRC(const WebRtc_UWord32 ssrc);
+    WebRtc_Word32 ReturnSSRC(const WebRtc_UWord32 ssrc);
+
+protected:
+    SSRCDatabase();
+    virtual ~SSRCDatabase();
+
+    static SSRCDatabase* CreateInstance() { return new SSRCDatabase(); }
+
+private:
+    // Friend function to allow the SSRC destructor to be accessed from the
+    // template class.
+    friend SSRCDatabase* GetStaticInstance<SSRCDatabase>(
+        CountOperation count_operation);
+    static SSRCDatabase* StaticInstance(CountOperation count_operation);
+
+    WebRtc_UWord32 GenerateRandom();
+
+#ifdef WEBRTC_NO_STL
+    int _numberOfSSRC;
+    int _sizeOfSSRC;
+
+    WebRtc_UWord32* _ssrcVector;
+#else
+    std::map<WebRtc_UWord32, WebRtc_UWord32>    _ssrcMap;
+#endif
+
+    CriticalSectionWrapper* _critSect;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_SSRC_DATABASE_H_
diff --git a/src/modules/rtp_rtcp/source/tmmbr_help.cc b/src/modules/rtp_rtcp/source/tmmbr_help.cc
new file mode 100644
index 0000000..50f0e08
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/tmmbr_help.cc
@@ -0,0 +1,482 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tmmbr_help.h"
+
+#include <assert.h>
+#include <limits>
+#include <string.h>
+#include "rtp_rtcp_config.h"
+
+namespace webrtc {
+TMMBRSet::TMMBRSet() :
+    _sizeOfSet(0),
+    _lengthOfSet(0)
+{
+}
+
+TMMBRSet::~TMMBRSet()
+{
+    _sizeOfSet = 0;
+    _lengthOfSet = 0;
+}
+
+void
+TMMBRSet::VerifyAndAllocateSet(WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > _sizeOfSet)
+    {
+        // make sure that our buffers are big enough
+        _data.resize(minimumSize);
+        _sizeOfSet = minimumSize;
+    }
+    // reset memory
+    for(WebRtc_UWord32 i = 0; i < _sizeOfSet; i++)
+    {
+        _data.at(i).tmmbr = 0;
+        _data.at(i).packet_oh = 0;
+        _data.at(i).ssrc = 0;
+    }
+    _lengthOfSet = 0;
+}
+
+void
+TMMBRSet::VerifyAndAllocateSetKeepingData(WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > _sizeOfSet)
+    {
+        {
+          _data.resize(minimumSize);
+        }
+        _sizeOfSet = minimumSize;
+    }
+}
+
+void TMMBRSet::SetEntry(unsigned int i,
+                         WebRtc_UWord32 tmmbrSet,
+                         WebRtc_UWord32 packetOHSet,
+                         WebRtc_UWord32 ssrcSet) {
+  assert(i < _sizeOfSet);
+  _data.at(i).tmmbr = tmmbrSet;
+  _data.at(i).packet_oh = packetOHSet;
+  _data.at(i).ssrc = ssrcSet;
+  if (i >= _lengthOfSet) {
+    _lengthOfSet = i + 1;
+  }
+}
+
+void TMMBRSet::AddEntry(WebRtc_UWord32 tmmbrSet,
+                        WebRtc_UWord32 packetOHSet,
+                        WebRtc_UWord32 ssrcSet) {
+  assert(_lengthOfSet < _sizeOfSet);
+  SetEntry(_lengthOfSet, tmmbrSet, packetOHSet, ssrcSet);
+}
+
+void TMMBRSet::RemoveEntry(WebRtc_UWord32 sourceIdx) {
+  assert(sourceIdx < _lengthOfSet);
+  _data.erase(_data.begin() + sourceIdx);
+  _lengthOfSet--;
+  _data.resize(_sizeOfSet);  // Ensure that size remains the same.
+}
+
+void TMMBRSet::SwapEntries(WebRtc_UWord32 i, WebRtc_UWord32 j) {
+    SetElement temp;
+    temp = _data[i];
+    _data[i] = _data[j];
+    _data[j] = temp;
+}
+
+void TMMBRSet::ClearEntry(WebRtc_UWord32 idx) {
+  SetEntry(idx, 0, 0, 0);
+}
+
+TMMBRHelp::TMMBRHelp()
+    : _criticalSection(CriticalSectionWrapper::CreateCriticalSection()),
+      _candidateSet(),
+      _boundingSet(),
+      _boundingSetToSend(),
+      _ptrIntersectionBoundingSet(NULL),
+      _ptrMaxPRBoundingSet(NULL) {
+}
+
+TMMBRHelp::~TMMBRHelp() {
+  delete [] _ptrIntersectionBoundingSet;
+  delete [] _ptrMaxPRBoundingSet;
+  _ptrIntersectionBoundingSet = 0;
+  _ptrMaxPRBoundingSet = 0;
+  delete _criticalSection;
+}
+
+TMMBRSet*
+TMMBRHelp::VerifyAndAllocateBoundingSet(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if(minimumSize > _boundingSet.sizeOfSet())
+    {
+        // make sure that our buffers are big enough
+        if(_ptrIntersectionBoundingSet)
+        {
+            delete [] _ptrIntersectionBoundingSet;
+            delete [] _ptrMaxPRBoundingSet;
+        }
+        _ptrIntersectionBoundingSet = new float[minimumSize];
+        _ptrMaxPRBoundingSet = new float[minimumSize];
+    }
+    _boundingSet.VerifyAndAllocateSet(minimumSize);
+    return &_boundingSet;
+}
+
+TMMBRSet* TMMBRHelp::BoundingSet() {
+  return &_boundingSet;
+}
+
+WebRtc_Word32
+TMMBRHelp::SetTMMBRBoundingSetToSend(const TMMBRSet* boundingSetToSend,
+                                     const WebRtc_UWord32 maxBitrateKbit)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    if (boundingSetToSend == NULL)
+    {
+        _boundingSetToSend.clearSet();
+        return 0;
+    }
+
+    VerifyAndAllocateBoundingSetToSend(boundingSetToSend->lengthOfSet());
+    _boundingSetToSend.clearSet();
+    for (WebRtc_UWord32 i = 0; i < boundingSetToSend->lengthOfSet(); i++)
+    {
+        // cap at our configured max bitrate
+        WebRtc_UWord32 bitrate = boundingSetToSend->Tmmbr(i);
+        if(maxBitrateKbit)
+        {
+            // do we have a configured max bitrate?
+            if(bitrate > maxBitrateKbit)
+            {
+                bitrate = maxBitrateKbit;
+            }
+        }
+        _boundingSetToSend.SetEntry(i, bitrate,
+                                    boundingSetToSend->PacketOH(i),
+                                    boundingSetToSend->Ssrc(i));
+    }
+    return 0;
+}
+
+WebRtc_Word32
+TMMBRHelp::VerifyAndAllocateBoundingSetToSend(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    _boundingSetToSend.VerifyAndAllocateSet(minimumSize);
+    return 0;
+}
+
+TMMBRSet*
+TMMBRHelp::VerifyAndAllocateCandidateSet(WebRtc_UWord32 minimumSize)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    _candidateSet.VerifyAndAllocateSet(minimumSize);
+    return &_candidateSet;
+}
+
+TMMBRSet*
+TMMBRHelp::CandidateSet()
+{
+    return &_candidateSet;
+}
+
+TMMBRSet*
+TMMBRHelp::BoundingSetToSend()
+{
+    return &_boundingSetToSend;
+}
+
+WebRtc_Word32
+TMMBRHelp::FindTMMBRBoundingSet(TMMBRSet*& boundingSet)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    // Work on local variable, will be modified
+    TMMBRSet    candidateSet;
+    candidateSet.VerifyAndAllocateSet(_candidateSet.sizeOfSet());
+
+    // TODO(hta) Figure out if this should be lengthOfSet instead.
+    for (WebRtc_UWord32 i = 0; i < _candidateSet.sizeOfSet(); i++)
+    {
+        if(_candidateSet.Tmmbr(i))
+        {
+            candidateSet.AddEntry(_candidateSet.Tmmbr(i),
+                                  _candidateSet.PacketOH(i),
+                                  _candidateSet.Ssrc(i));
+        }
+        else
+        {
+            // make sure this is zero if tmmbr = 0
+            assert(_candidateSet.PacketOH(i) == 0);
+            // Old code:
+            // _candidateSet.ptrPacketOHSet[i] = 0;
+        }
+    }
+
+    // Number of set candidates
+    WebRtc_Word32 numSetCandidates = candidateSet.lengthOfSet();
+    // Find bounding set
+    WebRtc_UWord32 numBoundingSet = 0;
+    if (numSetCandidates > 0)
+    {
+        numBoundingSet =  FindTMMBRBoundingSet(numSetCandidates, candidateSet);
+        if(numBoundingSet < 1 || (numBoundingSet > _candidateSet.sizeOfSet()))
+        {
+            return -1;
+        }
+        boundingSet = &_boundingSet;
+    }
+    return numBoundingSet;
+}
+
+
+WebRtc_Word32
+TMMBRHelp::FindTMMBRBoundingSet(WebRtc_Word32 numCandidates, TMMBRSet& candidateSet)
+{
+    CriticalSectionScoped lock(_criticalSection);
+
+    WebRtc_UWord32 numBoundingSet = 0;
+    VerifyAndAllocateBoundingSet(candidateSet.sizeOfSet());
+
+    if (numCandidates == 1)
+    {
+        // TODO(hta): lengthOfSet instead of sizeOfSet?
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+        {
+            if (candidateSet.Tmmbr(i) > 0)
+            {
+                _boundingSet.AddEntry(candidateSet.Tmmbr(i),
+                                    candidateSet.PacketOH(i),
+                                    candidateSet.Ssrc(i));
+                numBoundingSet++;
+            }
+        }
+        if (numBoundingSet != 1)
+        {
+            numBoundingSet = -1;
+        }
+    } else
+    {
+        // 1. Sort by increasing packetOH
+        for (int i = candidateSet.sizeOfSet() - 1; i >= 0; i--)
+        {
+            for (int j = 1; j <= i; j++)
+            {
+                if (candidateSet.PacketOH(j-1) > candidateSet.PacketOH(j))
+                {
+                    candidateSet.SwapEntries(j-1, j);
+                }
+            }
+        }
+        // 2. For tuples with same OH, keep the one w/ the lowest bitrate
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+        {
+            if (candidateSet.Tmmbr(i) > 0)
+            {
+                // get min bitrate for packets w/ same OH
+                WebRtc_UWord32 currentPacketOH = candidateSet.PacketOH(i);
+                WebRtc_UWord32 currentMinTMMBR = candidateSet.Tmmbr(i);
+                WebRtc_UWord32 currentMinIndexTMMBR = i;
+                for (WebRtc_UWord32 j = i+1; j < candidateSet.sizeOfSet(); j++)
+                {
+                    if(candidateSet.PacketOH(j) == currentPacketOH)
+                    {
+                        if(candidateSet.Tmmbr(j) < currentMinTMMBR)
+                        {
+                            currentMinTMMBR = candidateSet.Tmmbr(j);
+                            currentMinIndexTMMBR = j;
+                        }
+                    }
+                }
+                // keep lowest bitrate
+                for (WebRtc_UWord32 j = 0; j < candidateSet.sizeOfSet(); j++)
+                {
+                  if(candidateSet.PacketOH(j) == currentPacketOH
+                     && j != currentMinIndexTMMBR)
+                    {
+                        candidateSet.ClearEntry(j);
+                    }
+                }
+            }
+        }
+        // 3. Select and remove tuple w/ lowest tmmbr.
+        // (If more than 1, choose the one w/ highest OH).
+        WebRtc_UWord32 minTMMBR = 0;
+        WebRtc_UWord32 minIndexTMMBR = 0;
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+        {
+            if (candidateSet.Tmmbr(i) > 0)
+            {
+                minTMMBR = candidateSet.Tmmbr(i);
+                minIndexTMMBR = i;
+                break;
+            }
+        }
+
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+        {
+            if (candidateSet.Tmmbr(i) > 0 && candidateSet.Tmmbr(i) <= minTMMBR)
+            {
+                // get min bitrate
+                minTMMBR = candidateSet.Tmmbr(i);
+                minIndexTMMBR = i;
+            }
+        }
+        // first member of selected list
+        _boundingSet.SetEntry(numBoundingSet,
+                              candidateSet.Tmmbr(minIndexTMMBR),
+                              candidateSet.PacketOH(minIndexTMMBR),
+                              candidateSet.Ssrc(minIndexTMMBR));
+
+        // set intersection value
+        _ptrIntersectionBoundingSet[numBoundingSet] = 0;
+        // calculate its maximum packet rate (where its line crosses x-axis)
+        _ptrMaxPRBoundingSet[numBoundingSet]
+            = _boundingSet.Tmmbr(numBoundingSet) * 1000
+            / float(8 * _boundingSet.PacketOH(numBoundingSet));
+        numBoundingSet++;
+        // remove from candidate list
+        candidateSet.ClearEntry(minIndexTMMBR);
+        numCandidates--;
+
+        // 4. Discard from candidate list all tuple w/ lower OH
+        // (next tuple must be steeper)
+        for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+        {
+            if(candidateSet.Tmmbr(i) > 0
+               && candidateSet.PacketOH(i) < _boundingSet.PacketOH(0))
+            {
+                candidateSet.ClearEntry(i);
+                numCandidates--;
+            }
+        }
+
+        if (numCandidates == 0)
+        {
+            // Should be true already:_boundingSet.lengthOfSet = numBoundingSet;
+            assert(_boundingSet.lengthOfSet() == numBoundingSet);
+            return numBoundingSet;
+        }
+
+        bool getNewCandidate = true;
+        int curCandidateTMMBR = 0;
+        int curCandidateIndex = 0;
+        int curCandidatePacketOH = 0;
+        int curCandidateSSRC = 0;
+        do
+        {
+            if (getNewCandidate)
+            {
+                // 5. Remove first remaining tuple from candidate list
+                for (WebRtc_UWord32 i = 0; i < candidateSet.sizeOfSet(); i++)
+                {
+                    if (candidateSet.Tmmbr(i) > 0)
+                    {
+                        curCandidateTMMBR    = candidateSet.Tmmbr(i);
+                        curCandidatePacketOH = candidateSet.PacketOH(i);
+                        curCandidateSSRC     = candidateSet.Ssrc(i);
+                        curCandidateIndex    = i;
+                        candidateSet.ClearEntry(curCandidateIndex);
+                        break;
+                    }
+                }
+            }
+
+            // 6. Calculate packet rate and intersection of the current
+            // line with line of last tuple in selected list
+            float packetRate
+                = float(curCandidateTMMBR
+                        - _boundingSet.Tmmbr(numBoundingSet-1))*1000
+                / (8*(curCandidatePacketOH
+                      - _boundingSet.PacketOH(numBoundingSet-1)));
+
+            // 7. If the packet rate is equal or lower than intersection of
+            //    last tuple in selected list,
+            //    remove last tuple in selected list & go back to step 6
+            if(packetRate <= _ptrIntersectionBoundingSet[numBoundingSet-1])
+            {
+                // remove last tuple and goto step 6
+                numBoundingSet--;
+                _boundingSet.ClearEntry(numBoundingSet);
+                _ptrIntersectionBoundingSet[numBoundingSet] = 0;
+                _ptrMaxPRBoundingSet[numBoundingSet]        = 0;
+                getNewCandidate = false;
+            } else
+            {
+                // 8. If packet rate is lower than maximum packet rate of
+                // last tuple in selected list, add current tuple to selected
+                // list
+                if (packetRate < _ptrMaxPRBoundingSet[numBoundingSet-1])
+                {
+                    _boundingSet.SetEntry(numBoundingSet,
+                                          curCandidateTMMBR,
+                                          curCandidatePacketOH,
+                                          curCandidateSSRC);
+                    _ptrIntersectionBoundingSet[numBoundingSet] = packetRate;
+                    _ptrMaxPRBoundingSet[numBoundingSet]
+                        = _boundingSet.Tmmbr(numBoundingSet)*1000
+                        / float(8*_boundingSet.PacketOH(numBoundingSet));
+                    numBoundingSet++;
+                }
+                numCandidates--;
+                getNewCandidate = true;
+            }
+
+            // 9. Go back to step 5 if any tuple remains in candidate list
+        } while (numCandidates > 0);
+    }
+    return numBoundingSet;
+}
+
+bool TMMBRHelp::IsOwner(const WebRtc_UWord32 ssrc,
+                        const WebRtc_UWord32 length) const {
+  CriticalSectionScoped lock(_criticalSection);
+
+  if (length == 0) {
+    // Empty bounding set.
+    return false;
+  }
+  for(WebRtc_UWord32 i = 0;
+      (i < length) && (i < _boundingSet.sizeOfSet()); ++i) {
+    if(_boundingSet.Ssrc(i) == ssrc) {
+      return true;
+    }
+  }
+  return false;
+}
+
+bool TMMBRHelp::CalcMinBitRate( WebRtc_UWord32* minBitrateKbit) const {
+  CriticalSectionScoped lock(_criticalSection);
+
+  if (_candidateSet.sizeOfSet() == 0) {
+    // Empty bounding set.
+    return false;
+  }
+  *minBitrateKbit = std::numeric_limits<uint32_t>::max();
+
+  for (WebRtc_UWord32 i = 0; i < _candidateSet.sizeOfSet(); ++i) {
+    WebRtc_UWord32 curNetBitRateKbit = _candidateSet.Tmmbr(i);
+    if (curNetBitRateKbit < MIN_VIDEO_BW_MANAGEMENT_BITRATE) {
+      curNetBitRateKbit = MIN_VIDEO_BW_MANAGEMENT_BITRATE;
+    }
+    *minBitrateKbit = curNetBitRateKbit < *minBitrateKbit ?
+        curNetBitRateKbit : *minBitrateKbit;
+  }
+  return true;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/tmmbr_help.h b/src/modules/rtp_rtcp/source/tmmbr_help.h
new file mode 100644
index 0000000..45ce1c4
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/tmmbr_help.h
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
+
+#include <vector>
+#include "typedefs.h"
+
+#include "critical_section_wrapper.h"
+
+#ifndef NULL
+    #define NULL    0
+#endif
+
+namespace webrtc {
+class TMMBRSet
+{
+public:
+    TMMBRSet();
+    ~TMMBRSet();
+
+    void VerifyAndAllocateSet(WebRtc_UWord32 minimumSize);
+    void VerifyAndAllocateSetKeepingData(WebRtc_UWord32 minimumSize);
+    // Number of valid data items in set.
+    WebRtc_UWord32 lengthOfSet() const { return _lengthOfSet; }
+    // Presently allocated max size of set.
+    WebRtc_UWord32 sizeOfSet() const { return _sizeOfSet; }
+    void clearSet() {
+      _lengthOfSet = 0;
+    }
+    WebRtc_UWord32 Tmmbr(int i) const {
+      return _data.at(i).tmmbr;
+    }
+    WebRtc_UWord32 PacketOH(int i) const {
+      return _data.at(i).packet_oh;
+    }
+    WebRtc_UWord32 Ssrc(int i) const {
+      return _data.at(i).ssrc;
+    }
+    void SetEntry(unsigned int i,
+                  WebRtc_UWord32 tmmbrSet,
+                  WebRtc_UWord32 packetOHSet,
+                  WebRtc_UWord32 ssrcSet);
+
+    void AddEntry(WebRtc_UWord32 tmmbrSet,
+                  WebRtc_UWord32 packetOHSet,
+                  WebRtc_UWord32 ssrcSet);
+
+    // Remove one entry from table, and move all others down.
+    void RemoveEntry(WebRtc_UWord32 sourceIdx);
+
+    void SwapEntries(WebRtc_UWord32 firstIdx,
+                     WebRtc_UWord32 secondIdx);
+
+    // Set entry data to zero, but keep it in table.
+    void ClearEntry(WebRtc_UWord32 idx);
+
+ private:
+    class SetElement {
+      public:
+        SetElement() : tmmbr(0), packet_oh(0), ssrc(0) {}
+        WebRtc_UWord32 tmmbr;
+        WebRtc_UWord32 packet_oh;
+        WebRtc_UWord32 ssrc;
+    };
+
+    std::vector<SetElement> _data;
+    // Number of places allocated.
+    WebRtc_UWord32    _sizeOfSet;
+    // NUmber of places currently in use.
+    WebRtc_UWord32    _lengthOfSet;
+};
+
+class TMMBRHelp
+{
+public:
+    TMMBRHelp();
+    virtual ~TMMBRHelp();
+
+    TMMBRSet* BoundingSet(); // used for debuging
+    TMMBRSet* CandidateSet();
+    TMMBRSet* BoundingSetToSend();
+
+    TMMBRSet* VerifyAndAllocateCandidateSet(const WebRtc_UWord32 minimumSize);
+    WebRtc_Word32 FindTMMBRBoundingSet(TMMBRSet*& boundingSet);
+    WebRtc_Word32 SetTMMBRBoundingSetToSend(
+        const TMMBRSet* boundingSetToSend,
+        const WebRtc_UWord32 maxBitrateKbit);
+
+    bool IsOwner(const WebRtc_UWord32 ssrc, const WebRtc_UWord32 length) const;
+
+    bool CalcMinBitRate(WebRtc_UWord32* minBitrateKbit) const;
+
+protected:
+    TMMBRSet*   VerifyAndAllocateBoundingSet(WebRtc_UWord32 minimumSize);
+    WebRtc_Word32 VerifyAndAllocateBoundingSetToSend(WebRtc_UWord32 minimumSize);
+
+    WebRtc_Word32 FindTMMBRBoundingSet(WebRtc_Word32 numCandidates, TMMBRSet& candidateSet);
+
+private:
+    CriticalSectionWrapper* _criticalSection;
+    TMMBRSet                _candidateSet;
+    TMMBRSet                _boundingSet;
+    TMMBRSet                _boundingSetToSend;
+
+    float*                  _ptrIntersectionBoundingSet;
+    float*                  _ptrMaxPRBoundingSet;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_TMMBR_HELP_H_
diff --git a/src/modules/rtp_rtcp/source/transmission_bucket.cc b/src/modules/rtp_rtcp/source/transmission_bucket.cc
new file mode 100644
index 0000000..e79d227
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/transmission_bucket.cc
@@ -0,0 +1,117 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "transmission_bucket.h"
+
+#include <assert.h>
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+
+TransmissionBucket::TransmissionBucket()
+  : critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+    accumulator_(0),
+    bytes_rem_total_(0),
+    bytes_rem_interval_(0),
+    packets_(),
+    first_(true) {
+}
+
+TransmissionBucket::~TransmissionBucket() {
+  packets_.clear();
+  delete critsect_;
+}
+
+void TransmissionBucket::Reset() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  accumulator_ = 0;
+  bytes_rem_total_ = 0;
+  bytes_rem_interval_ = 0;
+  packets_.clear();
+  first_ = true;
+}
+
+void TransmissionBucket::Fill(const uint16_t seq_num,
+                              const uint32_t num_bytes) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  accumulator_ += num_bytes;
+
+  Packet p(seq_num, num_bytes);
+  packets_.push_back(p);
+}
+
+bool TransmissionBucket::Empty() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+  return packets_.empty();
+}
+
+void TransmissionBucket::UpdateBytesPerInterval(
+    const uint32_t delta_time_ms,
+    const uint16_t target_bitrate_kbps) {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+
+  const float kMargin = 1.05f;
+  uint32_t bytes_per_interval = 
+      kMargin * (target_bitrate_kbps * delta_time_ms / 8);
+
+  if (bytes_rem_interval_ < 0) {
+    bytes_rem_interval_ += bytes_per_interval;
+  } else {
+    bytes_rem_interval_ = bytes_per_interval;
+  }
+
+  if (accumulator_) {
+    bytes_rem_total_ += bytes_per_interval;
+    return;
+  }
+  bytes_rem_total_ = bytes_per_interval;
+}
+
+int32_t TransmissionBucket::GetNextPacket() {
+  webrtc::CriticalSectionScoped cs(*critsect_);
+
+  if (accumulator_ == 0) {
+    // Empty.
+    return -1;
+  }
+
+  std::vector<Packet>::const_iterator it_begin = packets_.begin();
+  const uint16_t num_bytes = (*it_begin).length_;
+  const uint16_t seq_num = (*it_begin).sequence_number_;
+
+  if (first_) {
+    // Ok to transmit first packet.
+    first_ = false;
+    packets_.erase(packets_.begin());
+    return seq_num;
+  }
+
+  const float kFrameComplete = 0.80f;
+  if (num_bytes * kFrameComplete > bytes_rem_total_) {
+    // Packet does not fit.
+    return -1;
+  }
+
+  if (bytes_rem_interval_ <= 0) {
+    // All bytes consumed for this interval.
+    return -1;
+  }
+
+  // Ok to transmit packet.
+  bytes_rem_total_ -= num_bytes;
+  bytes_rem_interval_ -= num_bytes;
+
+  assert(accumulator_ >= num_bytes);
+  accumulator_ -= num_bytes;
+
+  packets_.erase(packets_.begin());
+  return seq_num;
+}
+} // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/transmission_bucket.h b/src/modules/rtp_rtcp/source/transmission_bucket.h
new file mode 100644
index 0000000..79e45d8
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/transmission_bucket.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
+#define WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
+
+#include <vector>
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+class TransmissionBucket {
+ public:
+  TransmissionBucket();
+  ~TransmissionBucket();
+
+  // Resets members to initial state.
+  void Reset();
+
+  // Adds packet to be sent.
+  void Fill(const uint16_t seq_num, const uint32_t num_bytes);
+
+  // Returns true if there is no packet to be sent.
+  bool Empty();
+
+  // Updates the number of bytes that can be sent for the next time interval.
+  void UpdateBytesPerInterval(const uint32_t delta_time_in_ms,
+                              const uint16_t target_bitrate_kbps);
+
+  // Checks if next packet in line can be transmitted. Returns the sequence
+  // number of the packet on success, -1 otherwise. The packet is removed from
+  // the vector on success.
+  int32_t GetNextPacket();
+
+ private:
+   struct Packet {
+     Packet(uint16_t sequence_number, uint16_t length_in_bytes)
+       : sequence_number_(sequence_number),
+         length_(length_in_bytes) {
+     }
+     uint16_t sequence_number_;
+     uint16_t length_;
+   };
+
+   CriticalSectionWrapper* critsect_;
+   uint32_t accumulator_;
+   int32_t bytes_rem_total_;
+   int32_t bytes_rem_interval_;
+   std::vector<Packet> packets_;
+   bool first_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_RTP_RTCP_TRANSMISSION_BUCKET_H_
diff --git a/src/modules/rtp_rtcp/source/transmission_bucket_unittest.cc b/src/modules/rtp_rtcp/source/transmission_bucket_unittest.cc
new file mode 100644
index 0000000..a8c9247
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/transmission_bucket_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests for the TransmissionBucket.
+ */
+
+#include <gtest/gtest.h>
+
+#include "transmission_bucket.h"
+
+namespace webrtc {
+
+class TransmissionBucketTest : public ::testing::Test {
+ protected:  
+  TransmissionBucket send_bucket_;
+};
+
+TEST_F(TransmissionBucketTest, Fill) {
+  EXPECT_TRUE(send_bucket_.Empty());
+  send_bucket_.Fill(1, 100);
+  EXPECT_FALSE(send_bucket_.Empty());
+}
+
+TEST_F(TransmissionBucketTest, Reset) {
+  send_bucket_.Fill(1, 100);
+  EXPECT_FALSE(send_bucket_.Empty());
+  send_bucket_.Reset();
+  EXPECT_TRUE(send_bucket_.Empty());
+}
+
+TEST_F(TransmissionBucketTest, GetNextPacket) {
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // empty
+  send_bucket_.Fill(1234, 100);
+  EXPECT_EQ(1234, send_bucket_.GetNextPacket());  // first packet ok
+  send_bucket_.Fill(1235, 100);
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // packet does not fit
+}
+
+TEST_F(TransmissionBucketTest, UpdateBytesPerInterval) {
+  const int delta_time_ms = 1;
+  const int target_bitrate_kbps = 800;
+  send_bucket_.UpdateBytesPerInterval(delta_time_ms, target_bitrate_kbps);
+
+  send_bucket_.Fill(1234, 50);
+  send_bucket_.Fill(1235, 50);
+  send_bucket_.Fill(1236, 50);
+
+  EXPECT_EQ(1234, send_bucket_.GetNextPacket());  // first packet ok
+  EXPECT_EQ(1235, send_bucket_.GetNextPacket());  // ok
+  EXPECT_EQ(1236, send_bucket_.GetNextPacket());  // ok
+  EXPECT_TRUE(send_bucket_.Empty());
+
+  send_bucket_.Fill(1237, 50);
+  EXPECT_EQ(-1, send_bucket_.GetNextPacket());    // packet does not fit
+}
+}  // namespace webrtc
diff --git a/src/modules/rtp_rtcp/source/video_codec_information.h b/src/modules/rtp_rtcp/source/video_codec_information.h
new file mode 100644
index 0000000..4364f0b
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/video_codec_information.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
+
+#include "rtp_rtcp_config.h"
+#include "rtp_utility.h"
+
+namespace webrtc {
+class VideoCodecInformation
+{
+public:
+    virtual void Reset() = 0;
+
+    virtual RtpVideoCodecTypes Type() = 0;
+    virtual ~VideoCodecInformation(){};
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_RTP_RTCP_SOURCE_VIDEO_CODEC_INFORMATION_H_
diff --git a/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc b/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
new file mode 100644
index 0000000..6d5bb75
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/vp8_partition_aggregator.cc
@@ -0,0 +1,268 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+#include <assert.h>
+#include <stdlib.h>  // NULL
+
+#include <algorithm>
+#include <limits>
+
+namespace webrtc {
+
+PartitionTreeNode::PartitionTreeNode(PartitionTreeNode* parent,
+                                     const int* size_vector,
+                                     int num_partitions,
+                                     int this_size)
+    : parent_(parent),
+      this_size_(this_size),
+      size_vector_(size_vector),
+      num_partitions_(num_partitions),
+      max_parent_size_(0),
+      min_parent_size_(std::numeric_limits<int>::max()),
+      packet_start_(false) {
+  assert(num_partitions >= 0);
+  children_[kLeftChild] = NULL;
+  children_[kRightChild] = NULL;
+}
+
+PartitionTreeNode* PartitionTreeNode::CreateRootNode(const int* size_vector,
+                                                     int num_partitions) {
+  PartitionTreeNode* root_node =
+      new PartitionTreeNode(NULL, &size_vector[1], num_partitions - 1,
+                            size_vector[0]);
+  root_node->set_packet_start(true);
+  return root_node;
+}
+
+PartitionTreeNode::~PartitionTreeNode() {
+  delete children_[kLeftChild];
+  delete children_[kRightChild];
+}
+
+int PartitionTreeNode::Cost(int penalty) {
+  assert(penalty >= 0);
+  int cost = 0;
+  if (num_partitions_ == 0) {
+    // This is a solution node.
+    cost = std::max(max_parent_size_, this_size_) -
+        std::min(min_parent_size_, this_size_);
+  } else {
+    cost = std::max(max_parent_size_, this_size_) - min_parent_size_;
+  }
+  return cost + NumPackets() * penalty;
+}
+
+bool PartitionTreeNode::CreateChildren(int max_size) {
+  assert(max_size > 0);
+  bool children_created = false;
+  if (num_partitions_ > 0) {
+    if (this_size_ + size_vector_[0] <= max_size) {
+      assert(!children_[kLeftChild]);
+      children_[kLeftChild] =
+          new PartitionTreeNode(this,
+                                &size_vector_[1],
+                                num_partitions_ - 1,
+                                this_size_ + size_vector_[0]);
+      children_[kLeftChild]->set_max_parent_size(max_parent_size_);
+      children_[kLeftChild]->set_min_parent_size(min_parent_size_);
+      // "Left" child is continuation of same packet.
+      children_[kLeftChild]->set_packet_start(false);
+      children_created = true;
+    }
+    if (this_size_ > 0) {
+      assert(!children_[kRightChild]);
+      children_[kRightChild] = new PartitionTreeNode(this,
+                                                     &size_vector_[1],
+                                                     num_partitions_ - 1,
+                                                     size_vector_[0]);
+      children_[kRightChild]->set_max_parent_size(
+          std::max(max_parent_size_, this_size_));
+      children_[kRightChild]->set_min_parent_size(
+          std::min(min_parent_size_, this_size_));
+      // "Right" child starts a new packet.
+      children_[kRightChild]->set_packet_start(true);
+      children_created = true;
+    }
+  }
+  return children_created;
+}
+
+int PartitionTreeNode::NumPackets() {
+  if (parent_ == NULL) {
+    // Root node is a "right" child by definition.
+    return 1;
+  }
+  if (parent_->children_[kLeftChild] == this) {
+    // This is a "left" child.
+    return parent_->NumPackets();
+  } else {
+    // This is a "right" child.
+    return 1 + parent_->NumPackets();
+  }
+}
+
+PartitionTreeNode* PartitionTreeNode::GetOptimalNode(int max_size,
+                                                     int penalty) {
+  CreateChildren(max_size);
+  PartitionTreeNode* left = children_[kLeftChild];
+  PartitionTreeNode* right = children_[kRightChild];
+  if ((left == NULL) && (right == NULL)) {
+    // This is a solution node; return it.
+    return this;
+  } else if (left == NULL) {
+    // One child empty, return the other.
+    return right->GetOptimalNode(max_size, penalty);
+  } else if (right == NULL) {
+    // One child empty, return the other.
+    return left->GetOptimalNode(max_size, penalty);
+  } else {
+    PartitionTreeNode* first;
+    PartitionTreeNode* second;
+    if (left->Cost(penalty) <= right->Cost(penalty)) {
+      first = left;
+      second = right;
+    } else {
+      first = right;
+      second = left;
+    }
+    first = first->GetOptimalNode(max_size, penalty);
+    if (second->Cost(penalty) <= first->Cost(penalty)) {
+      second = second->GetOptimalNode(max_size, penalty);
+      // Compare cost estimate for "second" with actual cost for "first".
+      if (second->Cost(penalty) < first->Cost(penalty)) {
+        return second;
+      }
+    }
+    return first;
+  }
+}
+
+Vp8PartitionAggregator::Vp8PartitionAggregator(
+    const RTPFragmentationHeader& fragmentation,
+    int first_partition_idx, int last_partition_idx)
+    : root_(NULL),
+      num_partitions_(last_partition_idx - first_partition_idx + 1),
+      size_vector_(new int[num_partitions_]),
+      largest_partition_size_(0) {
+  assert(first_partition_idx >= 0);
+  assert(last_partition_idx >= first_partition_idx);
+  assert(last_partition_idx < fragmentation.fragmentationVectorSize);
+  for (size_t i = 0; i < num_partitions_; ++i) {
+    size_vector_[i] =
+        fragmentation.fragmentationLength[i + first_partition_idx];
+    largest_partition_size_ = std::max(largest_partition_size_,
+                                       size_vector_[i]);
+  }
+  root_ = PartitionTreeNode::CreateRootNode(size_vector_, num_partitions_);
+}
+
+Vp8PartitionAggregator::~Vp8PartitionAggregator() {
+  delete [] size_vector_;
+  delete root_;
+}
+
+void Vp8PartitionAggregator::SetPriorMinMax(int min_size, int max_size) {
+  assert(root_);
+  assert(min_size >= 0);
+  assert(max_size >= min_size);
+  root_->set_min_parent_size(min_size);
+  root_->set_max_parent_size(max_size);
+}
+
+Vp8PartitionAggregator::ConfigVec
+Vp8PartitionAggregator::FindOptimalConfiguration(int max_size, int penalty) {
+  assert(root_);
+  assert(max_size >= largest_partition_size_);
+  PartitionTreeNode* opt = root_->GetOptimalNode(max_size, penalty);
+  ConfigVec config_vector(num_partitions_, 0);
+  PartitionTreeNode* temp_node = opt;
+  int packet_index = opt->NumPackets() - 1;
+  for (int i = num_partitions_ - 1; i >= 0; --i) {
+    assert(packet_index >= 0);
+    assert(temp_node != NULL);
+    config_vector[i] = packet_index;
+    if (temp_node->packet_start()) --packet_index;
+    temp_node = temp_node->parent();
+  }
+  return config_vector;
+}
+
+void Vp8PartitionAggregator::CalcMinMax(const ConfigVec& config,
+                                        int* min_size, int* max_size) const {
+  if (*min_size < 0) {
+    *min_size = std::numeric_limits<int>::max();
+  }
+  if (*max_size < 0) {
+    *max_size = 0;
+  }
+  unsigned int i = 0;
+  while (i < config.size()) {
+    int this_size = 0;
+    unsigned int j = i;
+    while (j < config.size() && config[i] == config[j]) {
+      this_size += size_vector_[j];
+      ++j;
+    }
+    i = j;
+    if (this_size < *min_size) {
+      *min_size = this_size;
+    }
+    if (this_size > *max_size) {
+      *max_size = this_size;
+    }
+  }
+}
+
+int Vp8PartitionAggregator::CalcNumberOfFragments(int large_partition_size,
+                                                  int max_payload_size,
+                                                  int penalty,
+                                                  int min_size,
+                                                  int max_size) {
+  assert(max_size <= max_payload_size);
+  assert(min_size <= max_size);
+  assert(max_payload_size > 0);
+  // Divisions with rounding up.
+  const int min_number_of_fragments =
+      (large_partition_size + max_payload_size - 1) / max_payload_size;
+  if (min_size < 0 || max_size < 0) {
+    // No aggregates produced, so we do not have any size boundaries.
+    // Simply split in as few partitions as possible.
+    return min_number_of_fragments;
+  }
+  const int max_number_of_fragments =
+      (large_partition_size + min_size - 1) / min_size;
+  int num_fragments = -1;
+  int best_cost = std::numeric_limits<int>::max();
+  for (int n = min_number_of_fragments; n <= max_number_of_fragments; ++n) {
+    // Round up so that we use the largest fragment.
+    int fragment_size = (large_partition_size + n - 1) / n;
+    int cost = 0;
+    if (fragment_size < min_size) {
+      cost = min_size - fragment_size + n * penalty;
+    } else if (fragment_size > max_size) {
+      cost = fragment_size - max_size + n * penalty;
+    } else {
+      cost = n * penalty;
+    }
+    if (fragment_size <= max_payload_size && cost < best_cost) {
+      num_fragments = n;
+      best_cost = cost;
+    }
+  }
+  assert(num_fragments > 0);
+  // TODO(mflodman) Assert disabled since it's falsely triggered, see issue 293.
+  //assert(large_partition_size / num_fragments + 1 <= max_payload_size);
+  return num_fragments;
+}
+
+}  // namespace
+
diff --git a/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h b/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h
new file mode 100644
index 0000000..c5d47de
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/vp8_partition_aggregator.h
@@ -0,0 +1,135 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
+
+#include <vector>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+// Class used to solve the VP8 aggregation problem.
+class PartitionTreeNode {
+ public:
+  // Create a tree node.
+  PartitionTreeNode(PartitionTreeNode* parent,
+                    const int* size_vector,
+                    int num_partitions,
+                    int this_size);
+
+  // Create a root node.
+  static PartitionTreeNode* CreateRootNode(const int* size_vector,
+                                           int num_partitions);
+
+  ~PartitionTreeNode();
+
+  // Calculate the cost for the node. If the node is a solution node, the cost
+  // will be the actual cost associated with that solution. If not, the cost
+  // will be the cost accumulated so far along the current branch (which is a
+  // lower bound for any solution along the branch).
+  int Cost(int penalty);
+
+  // Create the two children for this node.
+  bool CreateChildren(int max_size);
+
+  // Get the number of packets for the configuration that this node represents.
+  int NumPackets();
+
+  // Find the optimal solution given a maximum packet size and a per-packet
+  // penalty. The method will be recursively called while the solver is
+  // probing down the tree of nodes.
+  PartitionTreeNode* GetOptimalNode(int max_size, int penalty);
+
+  // Setters and getters.
+  void set_max_parent_size(int size) { max_parent_size_ = size; }
+  void set_min_parent_size(int size) { min_parent_size_ = size; }
+  PartitionTreeNode* parent() const { return parent_; }
+  PartitionTreeNode* left_child() const { return children_[kLeftChild]; }
+  PartitionTreeNode* right_child() const { return children_[kRightChild]; }
+  int this_size() const { return this_size_; }
+  bool packet_start() const { return packet_start_; }
+
+ private:
+  enum Children {
+    kLeftChild = 0,
+    kRightChild = 1
+  };
+
+  void set_packet_start(bool value) { packet_start_ = value; }
+
+  PartitionTreeNode* parent_;
+  PartitionTreeNode* children_[2];
+  int this_size_;
+  const int* size_vector_;
+  int num_partitions_;
+  int max_parent_size_;
+  int min_parent_size_;
+  bool packet_start_;
+
+  DISALLOW_COPY_AND_ASSIGN(PartitionTreeNode);
+};
+
+// Class that calculates the optimal aggregation of VP8 partitions smaller than
+// the maximum packet size.
+class Vp8PartitionAggregator {
+ public:
+  typedef std::vector<int> ConfigVec;
+
+  // Constructor. All partitions in the fragmentation header from index
+  // first_partition_idx to last_partition_idx must be smaller than
+  // maximum packet size to be used in FindOptimalConfiguration.
+  Vp8PartitionAggregator(const RTPFragmentationHeader& fragmentation,
+                         int first_partition_idx, int last_partition_idx);
+
+  ~Vp8PartitionAggregator();
+
+  // Set the smallest and largest payload sizes produces so far.
+  void SetPriorMinMax(int min_size, int max_size);
+
+  // Find the aggregation of VP8 partitions that produces the smallest cost.
+  // The result is given as a vector of the same length as the number of
+  // partitions given to the constructor (i.e., last_partition_idx -
+  // first_partition_idx + 1), where each element indicates the packet index
+  // for that partition. Thus, the output vector starts at 0 and is increasing
+  // up to the number of packets - 1.
+  ConfigVec FindOptimalConfiguration(int max_size, int penalty);
+
+  // Calculate minimum and maximum packet sizes for a given aggregation config.
+  // The extreme packet sizes of the given aggregation are compared with the
+  // values given in min_size and max_size, and if either of these are exceeded,
+  // the new extreme value will be written to the corresponding variable.
+  void CalcMinMax(const ConfigVec& config, int* min_size, int* max_size) const;
+
+  // Calculate the number of fragments to divide a large partition into.
+  // The large partition is of size large_partition_size. The payload must not
+  // be larger than max_payload_size. Each fragment comes at an overhead cost
+  // of penalty bytes. If the size of the fragments fall outside the range
+  // [min_size, max_size], an extra cost is inflicted.
+  static int CalcNumberOfFragments(int large_partition_size,
+                                   int max_payload_size,
+                                   int penalty,
+                                   int min_size,
+                                   int max_size);
+
+ private:
+  PartitionTreeNode* root_;
+  size_t num_partitions_;
+  int* size_vector_;
+  int largest_partition_size_;
+
+  DISALLOW_COPY_AND_ASSIGN(Vp8PartitionAggregator);
+};
+}  // namespace
+
+#endif  // WEBRTC_MODULES_RTP_RTCP_SOURCE_VP8_PARTITION_AGGREGATOR_H_
diff --git a/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc b/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
new file mode 100644
index 0000000..3c274d1
--- /dev/null
+++ b/src/modules/rtp_rtcp/source/vp8_partition_aggregator_unittest.cc
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdlib.h>  // NULL
+
+#include "gtest/gtest.h"
+#include "modules/rtp_rtcp/source/vp8_partition_aggregator.h"
+
+namespace webrtc {
+
+TEST(PartitionTreeNode, CreateAndDelete) {
+  const int kVector[] = {1, 2, 3};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  PartitionTreeNode* node1 =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  PartitionTreeNode* node2 =
+      new PartitionTreeNode(node1, kVector, kNumPartitions, 17);
+  delete node1;
+  delete node2;
+}
+
+TEST(PartitionTreeNode, CreateChildrenAndDelete) {
+  const int kVector[] = {1, 2, 3};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 10;
+  const int kPenalty = 5;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  EXPECT_TRUE(root->CreateChildren(kMaxSize));
+  ASSERT_TRUE(NULL != root->left_child());
+  ASSERT_TRUE(NULL != root->right_child());
+  EXPECT_EQ(3, root->left_child()->this_size());
+  EXPECT_EQ(2, root->right_child()->this_size());
+  EXPECT_EQ(11, root->right_child()->Cost(kPenalty));
+  EXPECT_FALSE(root->left_child()->packet_start());
+  EXPECT_TRUE(root->right_child()->packet_start());
+  delete root;
+}
+
+TEST(PartitionTreeNode, FindOptimalConfig) {
+  const int kVector[] = {197, 194, 213, 215, 184, 199, 197, 207};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 1500;
+  const int kPenalty = 1;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  root->set_max_parent_size(500);
+  root->set_min_parent_size(300);
+  PartitionTreeNode* opt = root->GetOptimalNode(kMaxSize, kPenalty);
+  ASSERT_TRUE(opt != NULL);
+  EXPECT_EQ(4, opt->NumPackets());
+  // Expect optimal sequence to be {1, 0, 1, 0, 1, 0, 1, 0}, which corresponds
+  // to (right)-left-right-left-right-left-right-left, where the root node is
+  // implicitly a "right" node by definition.
+  EXPECT_TRUE(opt->parent()->parent()->parent()->parent()->parent()->
+              parent()->parent()->packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->parent()->parent()->parent()->
+               parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->parent()->parent()->parent()->parent()->
+              packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->parent()->parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->parent()->parent()->packet_start());
+  EXPECT_FALSE(opt->parent()->parent()->packet_start());
+  EXPECT_TRUE(opt->parent()->packet_start());
+  EXPECT_FALSE(opt->packet_start());
+  EXPECT_TRUE(opt == root->left_child()->right_child()->left_child()->
+              right_child()->left_child()->right_child()->left_child());
+  delete root;
+}
+
+TEST(PartitionTreeNode, FindOptimalConfigSinglePartition) {
+  const int kVector[] = {17};
+  const int kNumPartitions = sizeof(kVector) / sizeof(kVector[0]);
+  const int kMaxSize = 1500;
+  const int kPenalty = 1;
+  PartitionTreeNode* root =
+      PartitionTreeNode::CreateRootNode(kVector, kNumPartitions);
+  PartitionTreeNode* opt = root->GetOptimalNode(kMaxSize, kPenalty);
+  ASSERT_TRUE(opt != NULL);
+  EXPECT_EQ(1, opt->NumPackets());
+  EXPECT_TRUE(opt == root);
+  delete root;
+}
+
+static void VerifyConfiguration(const int* expected_config,
+                                size_t expected_config_len,
+                                const std::vector<int>& opt_config,
+                                const RTPFragmentationHeader& fragmentation) {
+  ASSERT_EQ(expected_config_len, fragmentation.fragmentationVectorSize);
+  EXPECT_EQ(expected_config_len, opt_config.size());
+  for (size_t i = 0; i < expected_config_len; ++i) {
+    EXPECT_EQ(expected_config[i], opt_config[i]);
+  }
+}
+
+static void VerifyMinMax(const Vp8PartitionAggregator& aggregator,
+                         const std::vector<int>& opt_config,
+                         int expected_min,
+                         int expected_max) {
+  int min_size = -1;
+  int max_size = -1;
+  aggregator.CalcMinMax(opt_config, &min_size, &max_size);
+  EXPECT_EQ(expected_min, min_size);
+  EXPECT_EQ(expected_max, max_size);
+}
+
+TEST(Vp8PartitionAggregator, CreateAndDelete) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(3);
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 2);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfig) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(8);
+  fragmentation.fragmentationLength[0] = 197;
+  fragmentation.fragmentationLength[1] = 194;
+  fragmentation.fragmentationLength[2] = 213;
+  fragmentation.fragmentationLength[3] = 215;
+  fragmentation.fragmentationLength[4] = 184;
+  fragmentation.fragmentationLength[5] = 199;
+  fragmentation.fragmentationLength[6] = 197;
+  fragmentation.fragmentationLength[7] = 207;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 7);
+  aggregator->SetPriorMinMax(300, 500);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0, 0, 1, 1, 2, 2, 3, 3};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 383, 428);
+  // Change min and max and run method again. This time, we expect it to leave
+  // the values unchanged.
+  int min_size = 382;
+  int max_size = 429;
+  aggregator->CalcMinMax(opt_config, &min_size, &max_size);
+  EXPECT_EQ(382, min_size);
+  EXPECT_EQ(429, max_size);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfigEqualFragments) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(8);
+  fragmentation.fragmentationLength[0] = 200;
+  fragmentation.fragmentationLength[1] = 200;
+  fragmentation.fragmentationLength[2] = 200;
+  fragmentation.fragmentationLength[3] = 200;
+  fragmentation.fragmentationLength[4] = 200;
+  fragmentation.fragmentationLength[5] = 200;
+  fragmentation.fragmentationLength[6] = 200;
+  fragmentation.fragmentationLength[7] = 200;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 7);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0, 0, 0, 0, 1, 1, 1, 1};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 800, 800);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, FindOptimalConfigSinglePartition) {
+  RTPFragmentationHeader fragmentation;
+  fragmentation.VerifyAndAllocateFragmentationHeader(1);
+  fragmentation.fragmentationLength[0] = 17;
+  Vp8PartitionAggregator* aggregator =
+      new Vp8PartitionAggregator(fragmentation, 0, 0);
+  int kMaxSize = 1500;
+  int kPenalty = 1;
+  std::vector<int> opt_config = aggregator->FindOptimalConfiguration(kMaxSize,
+                                                                     kPenalty);
+  const int kExpectedConfig[] = {0};
+  const size_t kExpectedConfigSize =
+      sizeof(kExpectedConfig) / sizeof(kExpectedConfig[0]);
+  VerifyConfiguration(kExpectedConfig, kExpectedConfigSize, opt_config,
+                      fragmentation);
+  VerifyMinMax(*aggregator, opt_config, 17, 17);
+  delete aggregator;
+}
+
+TEST(Vp8PartitionAggregator, TestCalcNumberOfFragments) {
+  const int kMTU = 1500;
+  EXPECT_EQ(2,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 300, 900));
+  EXPECT_EQ(3,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 300, 798));
+  EXPECT_EQ(2,
+            Vp8PartitionAggregator::CalcNumberOfFragments(
+                1600, kMTU, 1, 900, 1000));
+}
+
+}  // namespace
+
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc
new file mode 100644
index 0000000..1a55e4e
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.cc
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWEConvergenceTest.h"
+
+#include <fstream>
+#include <string>
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+
+
+BWEConvergenceTestUp::BWEConvergenceTestUp(std::string testName, int startRateKbps, int availBWkbps)
+:
+_availBWkbps(availBWkbps),
+BWEOneWayTest(testName, startRateKbps)
+{
+}
+
+
+BWEConvergenceTestUp::~BWEConvergenceTestUp()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWEConvergenceTestUp::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+
+    if (_master)
+    {
+        _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+        if (!_gen)
+        {
+            return (-1);
+        }
+    }
+
+    return BWEOneWayTest::Init(ip, port);
+}
+
+
+bool BWEConvergenceTestUp::StoppingCriterionMaster()
+{
+    return ((_sendrec->BitrateSent() / 1000.0) > (0.9 * _availBWkbps));
+}
+
+
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h
new file mode 100644
index 0000000..b830d14
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEConvergenceTest.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
+
+#include <string>
+
+#include "BWETestBase.h"
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+class BWEConvergenceTestUp : public BWEOneWayTest
+{
+public:
+    BWEConvergenceTestUp(std::string testName, int startRateKbps, int availBWkbps);
+    virtual ~BWEConvergenceTestUp();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+
+private:
+    int _availBWkbps;
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWECONVERGENCETEST_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc
new file mode 100644
index 0000000..1fd19fe
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <fstream>
+#include <math.h>
+
+#include "BWEStabilityTest.h"
+#include "TestLoadGenerator.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+
+
+BWEStabilityTest::BWEStabilityTest(std::string testName, int rateKbps, int testDurationSeconds)
+:
+_testDurationSeconds(testDurationSeconds),
+BWEOneWayTest(testName, rateKbps)
+{
+}
+
+
+BWEStabilityTest::~BWEStabilityTest()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWEStabilityTest::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+    const double keyToDeltaRatio = 7;
+    const int keyFramePeriod = 300;
+
+    if (_master)
+    {
+        _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+        //_gen = new PeriodicKeyFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate,
+        //                                     spreadFactor, keyToDeltaRatio, keyFramePeriod);
+        if (!_gen)
+        {
+            return (-1);
+        }
+
+    }
+
+    return BWEOneWayTest::Init(ip, port);
+}
+
+
+void BWEStabilityTest::Report(std::fstream &log)
+{
+    // cannot report on a running test
+    if(_running) return;
+
+    BWETest::Report(log);
+
+    CriticalSectionScoped cs(_statCritSect);
+
+    log << "Bitrate statistics\n";
+    log << "\tAverage = " <<  _rateVecKbps.Mean() << " kbps\n";
+    log << "\tMin     = " <<  _rateVecKbps.Min() << " kbps\n";
+    log << "\tMax     = " <<  _rateVecKbps.Max() << " kbps\n";
+    log << "\tStd     = " <<  _rateVecKbps.Std() << " kbps\n";
+
+}
+
+
+bool BWEStabilityTest::StoppingCriterionMaster()
+{
+    return (TickTime::MillisecondTimestamp() - _startTimeMs >= _testDurationSeconds * 1000);
+}
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h
new file mode 100644
index 0000000..8f213b1
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStabilityTest.h
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
+
+#include <string>
+
+#include "BWETestBase.h"
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+class BWEStabilityTest : public BWEOneWayTest
+{
+public:
+    BWEStabilityTest(std::string testName, int rateKbps, int testDurationSeconds);
+    virtual ~BWEStabilityTest();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual void Report(std::fstream &log);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+
+private:
+    int _testDurationSeconds;
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWESTABILITYTEST_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
new file mode 100644
index 0000000..471ea5f
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWEStandAlone.cc
@@ -0,0 +1,200 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// BWEStandAlone.cpp : Defines the entry point for the console application.
+//
+
+#include <string>
+#include <stdio.h>
+
+#include "event_wrapper.h"
+#include "udp_transport.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+
+#include "MatlabPlot.h"
+
+//#include "vld.h"
+
+class myTransportCB: public UdpTransportData
+{
+public:
+    myTransportCB (RtpRtcp *rtpMod) : _rtpMod(rtpMod) {};
+protected:
+    // Inherited from UdpTransportData
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+        const WebRtc_Word32 rtpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+        const WebRtc_Word32 rtcpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+private:
+    RtpRtcp *_rtpMod;
+};
+
+void myTransportCB::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                      const WebRtc_Word32 rtpPacketLength,
+                                      const WebRtc_Word8* fromIP,
+                                      const WebRtc_UWord16 fromPort)
+{
+    printf("Receiving RTP from IP %s, port %u\n", fromIP, fromPort);
+    _rtpMod->IncomingPacket((WebRtc_UWord8 *) incomingRtpPacket, static_cast<WebRtc_UWord16>(rtpPacketLength));
+}
+
+void myTransportCB::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                       const WebRtc_Word32 rtcpPacketLength,
+                                       const WebRtc_Word8* fromIP,
+                                       const WebRtc_UWord16 fromPort)
+{
+    printf("Receiving RTCP from IP %s, port %u\n", fromIP, fromPort);
+    _rtpMod->IncomingPacket((WebRtc_UWord8 *) incomingRtcpPacket, static_cast<WebRtc_UWord16>(rtcpPacketLength));
+}
+
+
+int main(int argc, char* argv[])
+{
+    bool isSender = false;
+    bool isReceiver = false;
+    WebRtc_UWord16 port;
+    std::string ip;
+    TestSenderReceiver *sendrec = new TestSenderReceiver();
+    TestLoadGenerator *gen;
+
+    if (argc == 2)
+    {
+        // receiver only
+        isReceiver = true;
+
+        // read port
+        port = atoi(argv[1]);
+    }
+    else if (argc == 3)
+    {
+        // sender and receiver
+        isSender = true;
+        isReceiver = true;
+
+        // read IP
+        ip = argv[1];
+
+        // read port
+        port = atoi(argv[2]);
+    }
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile("BWEStandAloneTrace.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    sendrec->InitReceiver(port);
+
+    sendrec->Start();
+
+    if (isSender)
+    {
+        const WebRtc_UWord32 startRateKbps = 1000;
+        //gen = new CBRGenerator(sendrec, 1000, 500);
+        gen = new CBRFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2);
+        //gen = new PeriodicKeyFixFRGenerator(sendrec, startRateKbps, 90000, 30, 0.2, 7, 300);
+        //const WebRtc_UWord16 numFrameRates = 5;
+        //const WebRtc_UWord8 frameRates[numFrameRates] = {30, 15, 20, 23, 25};
+        //gen = new CBRVarFRGenerator(sendrec, 1000, frameRates, numFrameRates, 90000, 4.0, 0.1, 0.2);
+        //gen = new CBRFrameDropGenerator(sendrec, startRateKbps, 90000, 0.2);
+        sendrec->SetLoadGenerator(gen);
+        sendrec->InitSender(startRateKbps, ip.c_str(), port);
+        gen->Start();
+    }
+
+    while (1)
+    {
+    }
+
+    if (isSender)
+    {
+        gen->Stop();
+        delete gen;
+    }
+
+    delete sendrec;
+
+    //WebRtc_UWord8 numberOfSocketThreads = 1;
+    //UdpTransport* transport = UdpTransport::Create(0, numberOfSocketThreads);
+
+    //RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(1, false);
+    //if (rtp->InitSender() != 0)
+    //{
+    //    exit(1);
+    //}
+    //if (rtp->RegisterSendTransport(transport) != 0)
+    //{
+    //    exit(1);
+    //}
+
+//    transport->InitializeSendSockets("192.168.200.39", 8000);
+    //transport->InitializeSendSockets("127.0.0.1", 10000);
+    //transport->InitializeSourcePorts(8000);
+
+
+    return(0);
+ //   myTransportCB *tp = new myTransportCB(rtp);
+ //   transport->InitializeReceiveSockets(tp, 10000, "0.0.0.0");
+ //   transport->StartReceiving(500);
+
+ //   WebRtc_Word8 data[100];
+ //   for (int i = 0; i < 100; data[i] = i++);
+
+ //   for (int i = 0; i < 100; i++)
+ //   {
+ //       transport->SendRaw(data, 100, false);
+ //   }
+
+
+
+ //   WebRtc_Word32 totTime = 0;
+ //   while (totTime < 10000)
+ //   {
+ //       transport->Process();
+ //       WebRtc_Word32 wTime = transport->TimeUntilNextProcess();
+ //       totTime += wTime;
+ //       Sleep(wTime);
+ //   }
+
+
+    //if (transport)
+    //{
+    //    // Destroy the Socket Transport module
+    //    transport->StopReceiving();
+    //    transport->InitializeReceiveSockets(NULL,0);// deregister callback
+ //       UdpTransport::Destroy(transport);
+    //    transport = NULL;
+ //   }
+
+ //   if (tp)
+ //   {
+ //       delete tp;
+ //       tp = NULL;
+ //   }
+
+ //   if (rtp)
+ //   {
+ //       RtpRtcp::DestroyRtpRtcp(rtp);
+ //       rtp = NULL;
+ //   }
+
+
+    //return 0;
+}
+
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc
new file mode 100644
index 0000000..2940abd
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.cc
@@ -0,0 +1,453 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWETestBase.h"
+
+#include <algorithm> // sort
+#include <fstream>
+#include <string>
+#include <vector>
+#include <math.h>
+
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+
+
+double StatVec::Mean()
+{
+    double sum = 0;
+
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it;
+    for (it = begin(); it < end(); ++it)
+    {
+        sum += (*it);
+    }
+
+    return (sum / size());
+}
+
+double StatVec::Variance()
+{
+    double sumSqaure = 0;
+    double sum = 0;
+
+    std::vector<double>::iterator it;
+    for (it = begin(); it < end(); ++it)
+    {
+        sum += (*it);
+        sumSqaure += (*it) * (*it);
+    }
+
+    // Normalizes by N-1. This produces the best unbiased estimate of the
+    // variance if X is a sample from a normal distribution.
+    int M = static_cast<int> (size() - 1);
+
+    if (M > 0)
+    {
+        double var = (sumSqaure / M) - (sum / (M+1)) * (sum / M);
+        assert(var >= 0);
+        return (var);
+    }
+    else
+    {
+        return (0);
+    }
+}
+
+double StatVec::Std()
+{
+    return (sqrt(Variance()));
+}
+
+double StatVec::Max()
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it = begin();
+    double maxVal = (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        if ((*it) > maxVal) maxVal = (*it);
+    }
+
+    return (maxVal);
+}
+
+double StatVec::Min()
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    std::vector<double>::iterator it = begin();
+    double minVal = (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        if ((*it) < minVal) minVal = (*it);
+    }
+
+    return (minVal);
+}
+
+double StatVec::Median()
+{
+    double median;
+
+    // sanity
+    if (size() <= 0) return (0);
+
+    // sort the vector
+    sort(begin(), end());
+
+    if ((size() % 2) == 0)
+    {
+        // even size; use average of two center elements
+        median = (at(size()/2 - 1) + at(size()/2)) / 2.0;
+    }
+    else
+    {
+        // odd size; take center element
+        median = at(size()/2);
+    }
+
+    return (median);
+}
+
+double StatVec::Percentile(double p)
+{
+    // sanity
+    if (size() <= 0) return (0);
+
+    // sort the vector
+    sort(begin(), end());
+
+    int rank = static_cast<int> (((size() - 1) * p) / 100 + 0.5); // between 1 and size()
+    rank -= 1; // between 0 and size()-1
+
+    assert(rank >= 0);
+    assert(rank < static_cast<int>(size()));
+
+    return (at(rank));
+}
+
+void StatVec::Export(std::fstream &file, bool colVec /*= false*/)
+{
+    // sanity
+    if (size() <= 0) return;
+
+    std::string separator;
+    if (colVec) separator = "\n";
+    else separator = ", ";
+
+    std::vector<double>::iterator it = begin();
+    file << (*it);
+    ++it;
+
+    for (; it < end(); ++it)
+    {
+        file << separator << (*it);
+    }
+
+    file << std::endl;
+}
+
+
+bool BWETestProcThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    BWETest *theObj = static_cast<BWETest *>(obj);
+
+    theObj->ProcLoop();
+
+    theObj->Stop();
+
+    return(true);
+}
+
+
+BWETest::BWETest(std::string testName, int startRateKbps):
+_testName(testName),
+_startRateKbps(startRateKbps),
+_master(false),
+_sendrec(NULL),
+_gen(NULL),
+_initialized(false),
+_started(false),
+_running(false),
+_eventPtr(NULL),
+_procThread(NULL),
+_startTimeMs(-1),
+_stopTimeMs(-1),
+_statCritSect(CriticalSectionWrapper::CreateCriticalSection())
+{
+    _sendrec = new TestSenderReceiver();
+}
+
+
+BWETest::~BWETest()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    _statCritSect->Enter();
+    delete &_statCritSect;
+
+    if (_sendrec)
+    {
+        delete _sendrec;
+        _sendrec = NULL;
+    }
+}
+
+
+bool BWETest::SetMaster(bool isMaster /*= true*/)
+{
+    if (!_initialized)
+    {
+        // Can only set status before initializing.
+        _master = isMaster;
+    }
+
+    return (_master);
+}
+
+
+int BWETest::Init(std::string ip, WebRtc_UWord16 port)
+{
+    if (_initialized)
+    {
+        // cannot init twice
+        return (-1);
+    }
+
+    if (!_sendrec)
+    {
+        throw "SenderReceiver must be created";
+        exit(1);
+    }
+
+    if (_started)
+    {
+        // cannot init after start
+        return (-1);
+    }
+
+    // initialize receiver port (for feedback)
+    _sendrec->InitReceiver(port);
+
+    // initialize sender
+    _sendrec->SetLoadGenerator(_gen);
+    _sendrec->InitSender(_startRateKbps, ip.c_str(), port);
+    //_gen->Start();
+
+    _sendrec->SetCallback(this);
+
+    _initialized = true;
+
+    return 0;
+}
+
+
+bool BWETest::Start()
+{
+    if (!_initialized)
+    {
+        // must init first
+        return (false);
+    }
+    if (_started)
+    {
+        // already started, do nothing
+        return (true);
+    }
+
+    if (_sendrec->Start() != 0)
+    {
+        // failed
+        return (false);
+    }
+
+    if (_gen)
+    {
+        if (_gen->Start() != 0)
+        {
+            // failed
+            return (false);
+        }
+    }
+
+    _eventPtr = EventWrapper::Create();
+
+    _startTimeMs = TickTime::MillisecondTimestamp();
+    _started = true;
+    _running = true;
+
+    return (true);
+}
+
+
+bool BWETest::Stop()
+{
+    if (_procThread)
+    {
+        _stopTimeMs = TickTime::MillisecondTimestamp();
+        _procThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_procThread->Stop())
+        {
+            ;
+        }
+
+        delete _procThread;
+        _procThread = NULL;
+
+    }
+
+    if (_eventPtr)
+    {
+        delete _eventPtr;
+        _eventPtr = NULL;
+    }
+
+    _procThread = NULL;
+
+    if(_gen)
+    {
+        _gen->Stop();
+    }
+
+    return(true);
+}
+
+
+bool BWETest::ProcLoop(void)
+{
+    bool receiving = false;
+
+    // no critSect
+    while (_running)
+    {
+
+        // check stopping criterions
+        if (_master && StoppingCriterionMaster())
+        {
+            printf("StoppingCriterionMaster()\n");
+            _stopTimeMs = TickTime::MillisecondTimestamp();
+            _running = false;
+        }
+        else if (!_master && StoppingCriterionSlave())
+        {
+            printf("StoppingCriterionSlave()\n");
+            _running = false;
+        }
+
+        // wait
+        _eventPtr->Wait(1000); // 1000 ms
+
+    }
+
+    return true;
+}
+
+
+void BWETest::Report(std::fstream &log)
+{
+    // cannot report on a running test
+    if(_running) return;
+
+    CriticalSectionScoped cs(_statCritSect);
+
+    log << "\n\n*** Test name = " << _testName << "\n";
+    log << "Execution time = " <<  static_cast<double>(_stopTimeMs - _startTimeMs) / 1000 << " s\n";
+    log << "\n";
+    log << "RTT statistics\n";
+    log << "\tMin     = " << _rttVecMs.Min() << " ms\n";
+    log << "\tMax     = " << _rttVecMs.Max() << " ms\n";
+    log << "\n";
+    log << "Loss statistics\n";
+    log << "\tAverage = " << _lossVec.Mean() << "%\n";
+    log << "\tMax     = " << _lossVec.Max() << "%\n";
+
+    log << "\n" << "Rates" << "\n";
+    _rateVecKbps.Export(log);
+
+    log << "\n" << "RTT" << "\n";
+    _rttVecMs.Export(log);
+
+}
+
+
+// SenderReceiver callback
+void BWETest::OnOnNetworkChanged(const WebRtc_UWord32 bitrateTargetBps,
+                                 const WebRtc_UWord8 fractionLost,
+                                 const WebRtc_UWord16 roundTripTimeMs,
+                                 const WebRtc_UWord32 jitterMS,
+                                 const WebRtc_UWord16 bwEstimateKbitMin,
+                                 const WebRtc_UWord16 bwEstimateKbitMax)
+{
+    CriticalSectionScoped cs(_statCritSect);
+
+    // bitrate statistics
+    WebRtc_Word32 newBitrateKbps = bitrateTargetBps/1000;
+
+    _rateVecKbps.push_back(newBitrateKbps);
+    _rttVecMs.push_back(roundTripTimeMs);
+    _lossVec.push_back(static_cast<double>(fractionLost) / 255.0);
+}
+
+
+int BWEOneWayTest::Init(std::string ip, WebRtc_UWord16 port)
+{
+
+    if (!_master)
+    {
+        // Use timeout stopping criterion by default for receiver
+        UseRecvTimeout();
+    }
+
+    return (BWETest::Init(ip, port));
+
+}
+
+
+bool BWEOneWayTest::Start()
+{
+    bool ret = BWETest::Start();
+
+    if (!_master)
+    {
+        // send one dummy RTP packet to enable RTT measurements
+        const WebRtc_UWord8 dummy = 0;
+        //_gen->sendPayload(TickTime::MillisecondTimestamp(), &dummy, 0);
+        _sendrec->SendOutgoingData(
+            static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp()*90),
+            &dummy, 1, webrtc::kVideoFrameDelta);
+    }
+
+    return ret;
+}
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h
new file mode 100644
index 0000000..bab1b94
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETestBase.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
+
+#include <string>
+#include <vector>
+
+#include "typedefs.h"
+
+#include "TestSenderReceiver.h"
+
+
+class StatVec : public std::vector<double>
+{
+public:
+    double Mean();
+    double Variance();
+    double Std();
+    double Max();
+    double Min();
+    double Median();
+    double Percentile(double p); // 0 <= p <= 100%
+    void Export(std::fstream &file, bool colVec = false);
+};
+
+
+class BWETest : public SendRecCB
+{
+public:
+    BWETest(std::string testName, int startRateKbps);
+    virtual ~BWETest();
+
+    bool SetMaster(bool isMaster = true);
+    void UseRecvTimeout() { _sendrec->SetPacketTimeout(1000); };
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual bool Start();
+    virtual bool Stop();
+    bool ProcLoop(void);
+    virtual void Report(std::fstream &log);
+    std::string TestName() { return (_testName); };
+
+    // SenderReceiver callback
+    virtual void OnOnNetworkChanged(const WebRtc_UWord32 bitrateTargetBps,
+        const WebRtc_UWord8 fractionLost,
+        const WebRtc_UWord16 roundTripTimeMs,
+        const WebRtc_UWord32 jitterMS,
+        const WebRtc_UWord16 bwEstimateKbitMin,
+        const WebRtc_UWord16 bwEstimateKbitMax);
+
+
+protected:
+    virtual bool StoppingCriterionMaster() = 0;
+    virtual bool StoppingCriterionSlave() { return (_sendrec->timeOutTriggered()); };
+
+    TestSenderReceiver * _sendrec;
+    TestLoadGenerator * _gen;
+
+    std::string _testName;
+    int _startRateKbps;
+    bool _master;
+    bool _initialized;
+    bool _started;
+    bool _running;
+    EventWrapper *_eventPtr;
+    ThreadWrapper* _procThread;
+    WebRtc_Word64 _startTimeMs;
+    WebRtc_Word64 _stopTimeMs;
+
+    // Statistics, protected by separate CritSect
+    CriticalSectionWrapper* _statCritSect;
+    StatVec _rateVecKbps;
+    StatVec _rttVecMs;
+    StatVec _lossVec;
+};
+
+
+class BWEOneWayTest : public BWETest
+{
+public:
+    BWEOneWayTest(std::string testName, int startRateKbps) :
+      BWETest(testName, startRateKbps) {};
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+    virtual bool Start();
+
+protected:
+    virtual bool StoppingCriterionSlave() {return ( _sendrec->timeOutTriggered()); };
+
+private:
+
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETESTBASE_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc
new file mode 100644
index 0000000..f1d79fe
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETester.cc
@@ -0,0 +1,274 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// BWETester.cpp : Defines the entry point for the console application.
+//
+
+#include <fstream>
+#include <string>
+#include <iostream>
+#include <ctime>
+
+#include "event_wrapper.h"
+#include "trace.h"
+
+#include "BWEStabilityTest.h"
+#include "BWEConvergenceTest.h"
+#include "BWETwoWayLimitFinding.h"
+
+#include "MatlabPlot.h"
+
+//#include "vld.h"
+
+#ifdef MATLAB
+MatlabEngine eng;
+#endif
+
+
+class testContainer
+{
+public:
+    testContainer(BWETest *test, bool waitForKeyStroke, int delayStartSec,
+        std::string instruction) :
+    _test(test),
+        _waitMaster(waitForKeyStroke),
+        _waitSlave(waitForKeyStroke),
+        _delayMaster(delayStartSec),
+        _delaySlave(delayStartSec),
+        _instr(instruction) {};
+
+    testContainer(BWETest *test,
+        bool waitForKeyStrokeMaster,
+        bool waitForKeyStrokeSlave,
+        int delayStartSecMaster,
+        int delayStartSecSlave,
+        std::string instruction) :
+    _test(test),
+        _waitMaster(waitForKeyStrokeMaster),
+        _waitSlave(waitForKeyStrokeSlave),
+        _delayMaster(delayStartSecMaster),
+        _delaySlave(delayStartSecSlave),
+        _instr(instruction) {};
+
+    ~testContainer() { if(_test) delete _test; _test = NULL; };
+
+    BWETest *_test;
+    bool _waitMaster;
+    bool _waitSlave;
+    int _delayMaster;
+    int _delaySlave;
+    std::string _instr;
+};
+
+
+// This is were the test cases are created.
+// Syntax:
+// tests->push_back(new testContainer(
+//    new _BWETestConstructor_, // constructor for the test case
+//    _wait_,                   // wait for user key press before start
+//    _delay_,                  // delay test start (after a key press if enabled)
+//    "Intruction to user."));  // message to show in console before starting
+//
+// Or:
+// tests->push_back(new testContainer(
+//    new _BWETestConstructor_, // constructor for the test case
+//    _waitMaster_,             // master will wait for user key press before start
+//    _waitSlave_,              // slave will wait for user key press before start
+//    _delayMaster_,            // delay master test start (after a key press if enabled)
+//    _delaySlave_,             // delay slave test start (after a key press if enabled)
+//    "Intruction to user."));  // message to show in console before starting
+//
+// Valid test cases are:
+// BWEConvergenceTestUp
+// BWEStabilityTest
+// BWETwoWayLimitFinding
+
+
+void PopulateTests(std::vector<testContainer *>* tests, bool isMaster)
+{
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 400, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 512 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 4000, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 5120 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWEStabilityTest("Stability", 400, 5*60),
+        true, true,
+        0, 0,
+        "Set bandwidth limit to 512 kbps and a normal distributed delay\
+        with mean 100 ms and std dev 15 ms"));
+
+    tests->push_back(new testContainer(
+        new BWEConvergenceTestUp("Convergence 256->512", 256, 512),
+        true,
+        0,
+        "Set bandwith limit to 512 kbps"));
+
+        tests->push_back(new testContainer(
+        new BWEConvergenceTestUp("Convergence 1024->5120", 1024, 5120),
+        true,
+        0,
+        "Set bandwith limit to 5120 kbps"));
+
+    tests->push_back(new testContainer(
+        new BWETwoWayLimitFinding("Asymmetric limit finding {1024, 2048} kbps",
+        500, 1024,
+        500, 2048,
+        isMaster),
+        true,
+        0,
+        "Set bandwith limit to {1024, 2048} kbps asymmetric"));
+
+    tests->push_back(new testContainer(
+        new BWETwoWayLimitFinding("Symmetric limit finding {1024, 1024} kbps",
+        500, 1024,
+        500, 1024,
+        isMaster),
+        true,
+        0,
+        "Set bandwith limit to 1024 kbps symmetric"));
+}
+
+
+int main(int argc, char* argv[])
+{
+
+    bool isMaster = false;
+    WebRtc_UWord16 port;
+    std::string ip;
+    std::fstream log;
+    log.open("TestLog.txt", std::fstream::out | std::fstream::app);
+
+    log << "\n\nBWE TESTER\n";
+
+    time_t t = time(0);   // get time now
+    struct tm * now = localtime( & t );
+    log << (now->tm_year + 1900) << '-'
+        << (now->tm_mon + 1) << '-'
+        <<  now->tm_mday << " "
+        <<  now->tm_hour << ":" << now->tm_min
+        << "\n";
+
+    if (argc == 4)
+    {
+        // read IP
+        ip = argv[1];
+
+        // read port
+        port = atoi(argv[2]);
+
+        // read master/slave
+        isMaster = (atoi(argv[3]) != 0);
+
+        std::cout << "Destination: " << ip << "\n";
+        log << "Destination: " << ip << "\n";
+        std::cout << "Port: " << port << "\n";
+        log << "Port: " << port << "\n";
+        if (isMaster)
+        {
+            std::cout << "Master\n";
+            log << "Master\n";
+        }
+        else
+        {
+            std::cout << "Slave\n";
+            log << "Slave\n";
+        }
+
+    }
+    else
+    {
+        printf("Usage\nBWETester dstIP port master\n");
+        exit(1);
+    }
+
+    std::vector<testContainer*> tests;
+    PopulateTests(&tests, isMaster);
+
+    int testIndex = 0;
+    EventWrapper* event = EventWrapper::Create();
+    std::vector<testContainer*>::iterator it;
+    for (it=tests.begin() ; it < tests.end(); it++)
+    {
+        ++testIndex;
+
+        BWETest *theTest = (*it)->_test;
+
+        if (theTest)
+        {
+            std::cout << "\nTest " << testIndex << ": " << theTest->TestName() << "\n";
+        }
+
+        // Print instructions
+        std::cout << "--> " << (*it)->_instr << std::endl;
+
+        if ((isMaster && (*it)->_waitMaster)
+            || (!isMaster && (*it)->_waitSlave))
+        {
+            // Wait for a key press
+            std::cout << "Press enter to start test\n";
+            getc(stdin);
+        }
+
+        if (isMaster)
+        {
+            if ((*it)->_delayMaster > 0)
+            {
+                // Wait
+                std::cout << "Test starting in "
+                    << (*it)->_delayMaster
+                    << " seconds" << std::endl;
+                event->Wait((*it)->_delayMaster * 1000);
+            }
+        }
+        else
+        {
+            if ((*it)->_delaySlave > 0)
+            {
+                // Wait
+                std::cout << "Test starting in "
+                    << (*it)->_delaySlave
+                    << " seconds" << std::endl;
+                event->Wait((*it)->_delaySlave * 1000);
+            }
+        }
+
+        // Start execution
+        if (theTest)
+        {
+            theTest->SetMaster(isMaster);
+            if (theTest->Init(ip, port) != 0)
+            {
+                throw "Error initializing sender";
+                exit (1);
+            }
+
+            theTest->Start();
+            theTest->ProcLoop();
+            theTest->Stop();
+            theTest->Report(log);
+            log << std::flush;
+        }
+
+        delete (*it); // deletes the test too
+    }
+    delete event;
+    event = NULL;
+
+    log.close();
+    return (0);
+}
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc
new file mode 100644
index 0000000..043c7b0
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "BWETwoWayLimitFinding.h"
+#include "TestLoadGenerator.h"
+
+
+BWETwoWayLimitFinding::BWETwoWayLimitFinding(
+    std::string testName,
+    int masterStartRateKbps, int masterAvailBWkbps,
+    int slaveStartRateKbps, int slaveAvailBWkbps,
+    bool isMaster /*= false*/)
+    :
+BWETest(testName, (isMaster ? masterStartRateKbps : slaveStartRateKbps)),
+_availBWkbps(isMaster ? masterAvailBWkbps : slaveAvailBWkbps),
+_incomingAvailBWkbps(isMaster ? slaveAvailBWkbps : masterAvailBWkbps),
+_forwLimitReached(false),
+_revLimitReached(false)
+{
+    _master = isMaster;
+}
+
+
+BWETwoWayLimitFinding::~BWETwoWayLimitFinding()
+{
+    if (_gen)
+    {
+        delete _gen;
+        _gen = NULL;
+    }
+}
+
+
+int BWETwoWayLimitFinding::Init(std::string ip, WebRtc_UWord16 port)
+{
+    // create the load generator object
+    const int rtpSampleRate = 90000;
+    const int frameRate = 30;
+    const double spreadFactor = 0.2;
+
+    _gen = new CBRFixFRGenerator(_sendrec, _startRateKbps, rtpSampleRate, frameRate, spreadFactor);
+    if (!_gen)
+    {
+        return (-1);
+    }
+
+    if (!_master) UseRecvTimeout(); // slave shuts down when incoming stream dies
+
+    return BWETest::Init(ip, port);
+}
+
+
+bool BWETwoWayLimitFinding::StoppingCriterionMaster()
+{
+    if ((_sendrec->BitrateSent() / 1000.0) > (0.95 * _availBWkbps))
+    {
+        _forwLimitReached = true;
+    }
+
+    WebRtc_Word32 revRateKbps = _sendrec->ReceiveBitrateKbps();
+    if (revRateKbps > (0.95 * _incomingAvailBWkbps))
+    {
+        _revLimitReached = true;
+    }
+
+    return (_forwLimitReached && _revLimitReached);
+}
+
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h
new file mode 100644
index 0000000..fc790e5
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/BWETwoWayLimitFinding.h
@@ -0,0 +1,41 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
+
+#include "BWETestBase.h"
+
+class BWETwoWayLimitFinding : public BWETest
+{
+public:
+    BWETwoWayLimitFinding(std::string testName,
+        int masterStartRateKbps, int masterAvailBWkbps,
+        int slaveStartRateKbps, int slaveAvailBWkbps,
+        bool isMaster = false);
+
+    virtual ~BWETwoWayLimitFinding();
+
+    virtual int Init(std::string ip, WebRtc_UWord16 port);
+
+protected:
+    virtual bool StoppingCriterionMaster();
+    //virtual bool StoppingCriterionSlave();
+
+private:
+    int _availBWkbps;
+    int _incomingAvailBWkbps;
+    bool _forwLimitReached;
+    bool _revLimitReached;
+
+};
+
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_BWETWOWAYLIMITFINDING_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
new file mode 100644
index 0000000..9c81fd0
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.cc
@@ -0,0 +1,1071 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "MatlabPlot.h"
+#ifdef MATLAB
+#include "engine.h"
+#endif
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+#include <sstream>
+#include <algorithm>
+#include <math.h>
+#include <stdio.h>
+
+using namespace webrtc;
+
+#ifdef MATLAB
+MatlabEngine eng;
+
+MatlabLine::MatlabLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
+:
+_xArray(NULL),
+_yArray(NULL),
+_maxLen(maxLen),
+_plotAttribute(),
+_name()
+{
+    if (_maxLen > 0)
+    {
+        _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+    }
+
+    if (plotAttrib)
+    {
+        _plotAttribute = plotAttrib;
+    }
+
+    if (name)
+    {
+        _name = name;
+    }
+}
+
+MatlabLine::~MatlabLine()
+{
+    if (_xArray != NULL)
+    {
+        mxDestroyArray(_xArray);
+    }
+    if (_yArray != NULL)
+    {
+        mxDestroyArray(_yArray);
+    }
+}
+
+void MatlabLine::Append(double x, double y)
+{
+    if (_maxLen > 0 && _xData.size() > static_cast<WebRtc_UWord32>(_maxLen))
+    {
+        _xData.resize(_maxLen);
+        _yData.resize(_maxLen);
+    }
+
+    _xData.push_front(x);
+    _yData.push_front(y);
+}
+
+
+// append y-data with running integer index as x-data
+void MatlabLine::Append(double y)
+{
+    if (_xData.empty())
+    {
+        // first element is index 0
+        Append(0, y);
+    }
+    else
+    {
+        // take last x-value and increment
+        double temp = _xData.back(); // last x-value
+        Append(temp + 1, y);
+    }
+}
+
+
+void MatlabLine::SetMaxLen(int maxLen)
+{
+    if (maxLen <= 0)
+    {
+        // means no maxLen
+        _maxLen = -1;
+    }
+    else
+    {
+        _maxLen = maxLen;
+
+        if (_xArray != NULL)
+        {
+            mxDestroyArray(_xArray);
+            mxDestroyArray(_yArray);
+        }
+        _xArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _maxLen, mxREAL);
+
+        maxLen = ((unsigned int)maxLen <= _xData.size()) ? maxLen : (int)_xData.size();
+        _xData.resize(maxLen);
+        _yData.resize(maxLen);
+
+        //// reserve the right amount of memory
+        //_xData.reserve(_maxLen);
+        //_yData.reserve(_maxLen);
+    }
+}
+
+void MatlabLine::SetAttribute(char *plotAttrib)
+{
+    _plotAttribute = plotAttrib;
+}
+
+void MatlabLine::SetName(char *name)
+{
+    _name = name;
+}
+
+void MatlabLine::GetPlotData(mxArray** xData, mxArray** yData)
+{
+    // Make sure we have enough Matlab allocated memory.
+    // Assuming both arrays (x and y) are of the same size.
+    if (_xData.empty())
+    {
+        return; // No data
+    }
+    unsigned int size = 0;
+    if (_xArray != NULL)
+    {
+        size = (unsigned int)mxGetNumberOfElements(_xArray);
+    }
+    if (size < _xData.size())
+    {
+        if (_xArray != NULL)
+        {
+            mxDestroyArray(_xArray);
+            mxDestroyArray(_yArray);
+        }
+        _xArray = mxCreateDoubleMatrix(1, _xData.size(), mxREAL);
+        _yArray = mxCreateDoubleMatrix(1, _yData.size(), mxREAL);
+    }
+
+    if (!_xData.empty())
+    {
+        double* x = mxGetPr(_xArray);
+
+        std::list<double>::iterator it = _xData.begin();
+
+        for (int i = 0; it != _xData.end(); it++, i++)
+        {
+            x[i] = *it;
+        }
+    }
+
+    if (!_yData.empty())
+    {
+        double* y = mxGetPr(_yArray);
+
+        std::list<double>::iterator it = _yData.begin();
+
+        for (int i = 0; it != _yData.end(); it++, i++)
+        {
+            y[i] = *it;
+        }
+    }
+    *xData = _xArray;
+    *yData = _yArray;
+}
+
+std::string MatlabLine::GetXName()
+{
+    std::ostringstream xString;
+    xString << "x_" << _name;
+    return xString.str();
+}
+
+std::string MatlabLine::GetYName()
+{
+    std::ostringstream yString;
+    yString << "y_" << _name;
+    return yString.str();
+}
+
+std::string MatlabLine::GetPlotString()
+{
+
+    std::ostringstream s;
+
+    if (_xData.size() == 0)
+    {
+        s << "[0 1], [0 1]"; // To get an empty plot
+    }
+    else
+    {
+        s << GetXName() << "(1:" << _xData.size() << "),";
+        s << GetYName() << "(1:" << _yData.size() << ")";
+    }
+
+    s << ", '";
+    s << _plotAttribute;
+    s << "'";
+
+    return s.str();
+}
+
+std::string MatlabLine::GetRefreshString()
+{
+    std::ostringstream s;
+
+    if (_xData.size() > 0)
+    {
+        s << "set(h,'xdata',"<< GetXName() <<"(1:" << _xData.size() << "),'ydata',"<< GetYName() << "(1:" << _yData.size() << "));";
+    }
+    else
+    {
+        s << "set(h,'xdata',[NaN],'ydata',[NaN]);";
+    }
+    return s.str();
+}
+
+std::string MatlabLine::GetLegendString()
+{
+    return ("'" + _name + "'");
+}
+
+bool MatlabLine::hasLegend()
+{
+    return (!_name.empty());
+}
+
+
+// remove data points, but keep attributes
+void MatlabLine::Reset()
+{
+    _xData.clear();
+    _yData.clear();
+}
+
+
+void MatlabLine::UpdateTrendLine(MatlabLine * sourceData, double slope, double offset)
+{
+    Reset(); // reset data, not attributes and name
+
+    double thexMin = sourceData->xMin();
+    double thexMax = sourceData->xMax();
+    Append(thexMin, thexMin * slope + offset);
+    Append(thexMax, thexMax * slope + offset);
+}
+
+double MatlabLine::xMin()
+{
+    if (!_xData.empty())
+    {
+        std::list<double>::iterator theStart = _xData.begin();
+        std::list<double>::iterator theEnd = _xData.end();
+        return(*min_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::xMax()
+{
+    if (!_xData.empty())
+    {
+        std::list<double>::iterator theStart = _xData.begin();
+        std::list<double>::iterator theEnd = _xData.end();
+        return(*max_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::yMin()
+{
+    if (!_yData.empty())
+    {
+        std::list<double>::iterator theStart = _yData.begin();
+        std::list<double>::iterator theEnd = _yData.end();
+        return(*min_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+double MatlabLine::yMax()
+{
+    if (!_yData.empty())
+    {
+        std::list<double>::iterator theStart = _yData.begin();
+        std::list<double>::iterator theEnd = _yData.end();
+        return(*max_element(theStart, theEnd));
+    }
+    return (0.0);
+}
+
+
+
+MatlabTimeLine::MatlabTimeLine(int horizonSeconds /*= -1*/, const char *plotAttrib /*= NULL*/,
+                               const char *name /*= NULL*/,
+                               WebRtc_Word64 refTimeMs /* = -1*/)
+                               :
+_timeHorizon(horizonSeconds),
+MatlabLine(-1, plotAttrib, name) // infinite number of elements
+{
+    if (refTimeMs < 0)
+        _refTimeMs = TickTime::MillisecondTimestamp();
+    else
+        _refTimeMs = refTimeMs;
+}
+
+void MatlabTimeLine::Append(double y)
+{
+    MatlabLine::Append(static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0, y);
+
+    PurgeOldData();
+}
+
+
+void MatlabTimeLine::PurgeOldData()
+{
+    if (_timeHorizon > 0)
+    {
+        // remove old data
+        double historyLimit = static_cast<double>(TickTime::MillisecondTimestamp() - _refTimeMs) / 1000.0
+            - _timeHorizon; // remove data points older than this
+
+        std::list<double>::reverse_iterator ritx = _xData.rbegin();
+        WebRtc_UWord32 removeCount = 0;
+        while (ritx != _xData.rend())
+        {
+            if (*ritx >= historyLimit)
+            {
+                break;
+            }
+            ritx++;
+            removeCount++;
+        }
+        if (removeCount == 0)
+        {
+            return;
+        }
+
+        // remove the range [begin, it).
+        //if (removeCount > 10)
+        //{
+        //    printf("Removing %lu elements\n", removeCount);
+        //}
+        _xData.resize(_xData.size() - removeCount);
+        _yData.resize(_yData.size() - removeCount);
+    }
+}
+
+
+WebRtc_Word64 MatlabTimeLine::GetRefTime()
+{
+    return(_refTimeMs);
+}
+
+
+
+
+MatlabPlot::MatlabPlot()
+:
+_figHandle(-1),
+_smartAxis(false),
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_timeToPlot(false),
+_plotting(false),
+_enabled(true),
+_firstPlot(true),
+_legendEnabled(true),
+_donePlottingEvent(EventWrapper::Create())
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _xlim[0] = 0;
+    _xlim[1] = 0;
+    _ylim[0] = 0;
+    _ylim[1] = 0;
+
+#ifdef PLOT_TESTING
+    _plotStartTime = -1;
+    _plotDelay = 0;
+#endif
+
+}
+
+
+MatlabPlot::~MatlabPlot()
+{
+    _critSect->Enter();
+
+    // delete all line objects
+    while (!_line.empty())
+    {
+        delete *(_line.end() - 1);
+        _line.pop_back();
+    }
+
+    delete _critSect;
+    delete _donePlottingEvent;
+}
+
+
+int MatlabPlot::AddLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    MatlabLine *newLine = new MatlabLine(maxLen, plotAttrib, name);
+    _line.push_back(newLine);
+
+    return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
+}
+
+
+int MatlabPlot::AddTimeLine(int maxLen /*= -1*/, const char *plotAttrib /*= NULL*/, const char *name /*= NULL*/,
+                            WebRtc_Word64 refTimeMs /*= -1*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    MatlabTimeLine *newLine = new MatlabTimeLine(maxLen, plotAttrib, name, refTimeMs);
+    _line.push_back(newLine);
+
+    return (static_cast<int>(_line.size() - 1)); // index of newly inserted line
+}
+
+
+int MatlabPlot::GetLineIx(const char *name)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    bool matchFound = false;
+    int lineIx = 0;
+
+    for (; it != _line.end(); it++, lineIx++)
+    {
+        if ((*it)->_name == name)
+        {
+            matchFound = true;
+            break;
+        }
+    }
+
+    if (matchFound)
+    {
+        return (lineIx);
+    }
+    else
+    {
+        return (-1);
+    }
+}
+
+
+void MatlabPlot::Append(int lineIndex, double x, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    // sanity for index
+    if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
+    {
+        throw "Line index out of range";
+        exit(1);
+    }
+
+    return (_line[lineIndex]->Append(x, y));
+}
+
+
+void MatlabPlot::Append(int lineIndex, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    // sanity for index
+    if (lineIndex < 0 || lineIndex >= static_cast<int>(_line.size()))
+    {
+        throw "Line index out of range";
+        exit(1);
+    }
+
+    return (_line[lineIndex]->Append(y));
+}
+
+
+int MatlabPlot::Append(const char *name, double x, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    int lineIx = GetLineIx(name);
+
+    if (lineIx < 0) //(!matchFound)
+    {
+        // no match; append new line
+        lineIx = AddLine(-1, NULL, name);
+    }
+
+    // append data to line
+    Append(lineIx, x, y);
+    return (lineIx);
+}
+
+int MatlabPlot::Append(const char *name, double y)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    // search the list for a matching line name
+    int lineIx = GetLineIx(name);
+
+    if (lineIx < 0) //(!matchFound)
+    {
+        // no match; append new line
+        lineIx = AddLine(-1, NULL, name);
+    }
+
+    // append data to line
+    Append(lineIx, y);
+    return (lineIx);
+}
+
+int MatlabPlot::Length(char *name)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return -1;
+    }
+
+    int ix = GetLineIx(name);
+    if (ix >= 0)
+    {
+        return (static_cast<int>(_line[ix]->_xData.size()));
+    }
+    else
+    {
+        return (-1);
+    }
+}
+
+
+void MatlabPlot::SetPlotAttribute(char *name, char *plotAttrib)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (!_enabled)
+    {
+        return;
+    }
+
+    int lineIx = GetLineIx(name);
+
+    if (lineIx >= 0)
+    {
+        _line[lineIx]->SetAttribute(plotAttrib);
+    }
+}
+
+// Must be called under critical section _critSect
+void MatlabPlot::UpdateData(Engine* ep)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        mxArray* xData = NULL;
+        mxArray* yData = NULL;
+        (*it)->GetPlotData(&xData, &yData);
+        if (xData != NULL)
+        {
+            std::string xName = (*it)->GetXName();
+            std::string yName = (*it)->GetYName();
+            _critSect->Leave();
+#ifdef MATLAB6
+            mxSetName(xData, xName.c_str());
+            mxSetName(yData, yName.c_str());
+            engPutArray(ep, xData);
+            engPutArray(ep, yData);
+#else
+            int ret = engPutVariable(ep, xName.c_str(), xData);
+            assert(ret == 0);
+            ret = engPutVariable(ep, yName.c_str(), yData);
+            assert(ret == 0);
+#endif
+            _critSect->Enter();
+        }
+    }
+}
+
+bool MatlabPlot::GetPlotCmd(std::ostringstream & cmd, Engine* ep)
+{
+    _critSect->Enter();
+
+    if (!DataAvailable())
+    {
+        return false;
+    }
+
+    if (_firstPlot)
+    {
+        GetPlotCmd(cmd);
+        _firstPlot = false;
+    }
+    else
+    {
+        GetRefreshCmd(cmd);
+    }
+
+    UpdateData(ep);
+
+    _critSect->Leave();
+
+    return true;
+}
+
+// Call inside critsect
+void MatlabPlot::GetPlotCmd(std::ostringstream & cmd)
+{
+    // we have something to plot
+    // empty the stream
+    cmd.str(""); // (this seems to be the only way)
+
+    cmd << "figure; h" << _figHandle << "= plot(";
+
+    // first line
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    cmd << (*it)->GetPlotString();
+
+    it++;
+
+    // remaining lines
+    for (; it != _line.end(); it++)
+    {
+        cmd << ", ";
+        cmd << (*it)->GetPlotString();
+    }
+
+    cmd << "); ";
+
+    if (_legendEnabled)
+    {
+        GetLegendCmd(cmd);
+    }
+
+    if (_smartAxis)
+    {
+        double xMin = _xlim[0];
+        double xMax = _xlim[1];
+        double yMax = _ylim[1];
+        for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+        {
+            xMax = std::max(xMax, (*it)->xMax());
+            xMin = std::min(xMin, (*it)->xMin());
+
+            yMax = std::max(yMax, (*it)->yMax());
+            yMax = std::max(yMax, fabs((*it)->yMin()));
+        }
+        _xlim[0] = xMin;
+        _xlim[1] = xMax;
+        _ylim[0] = -yMax;
+        _ylim[1] = yMax;
+
+        cmd << "axis([" << _xlim[0] << ", " << _xlim[1] << ", " << _ylim[0] << ", " << _ylim[1] << "]);";
+    }
+
+    int i=1;
+    for (it = _line.begin(); it != _line.end(); i++, it++)
+    {
+        cmd << "set(h" << _figHandle << "(" << i << "), 'Tag', " << (*it)->GetLegendString() << ");";
+    }
+}
+
+// Call inside critsect
+void MatlabPlot::GetRefreshCmd(std::ostringstream & cmd)
+{
+    cmd.str(""); // (this seems to be the only way)
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    for (it = _line.begin(); it != _line.end(); it++)
+    {
+        cmd << "h = findobj(0, 'Tag', " << (*it)->GetLegendString() << ");";
+        cmd << (*it)->GetRefreshString();
+    }
+    //if (_legendEnabled)
+    //{
+    //    GetLegendCmd(cmd);
+    //}
+}
+
+void MatlabPlot::GetLegendCmd(std::ostringstream & cmd)
+{
+    std::vector<MatlabLine*>::iterator it = _line.begin();
+    bool anyLegend = false;
+    for (; it != _line.end(); it++)
+    {
+        anyLegend = anyLegend || (*it)->hasLegend();
+    }
+    if (anyLegend)
+    {
+        // create the legend
+
+        cmd << "legend(h" << _figHandle << ",{";
+
+
+        // iterate lines
+        int i = 0;
+        for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+        {
+            if (i > 0)
+            {
+                cmd << ", ";
+            }
+            cmd << (*it)->GetLegendString();
+            i++;
+        }
+
+        cmd << "}, 2); "; // place legend in upper-left corner
+    }
+}
+
+// Call inside critsect
+bool MatlabPlot::DataAvailable()
+{
+    if (!_enabled)
+    {
+        return false;
+    }
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        (*it)->PurgeOldData();
+    }
+
+    return true;
+}
+
+void MatlabPlot::Plot()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _timeToPlot = true;
+
+#ifdef PLOT_TESTING
+    _plotStartTime = TickTime::MillisecondTimestamp();
+#endif
+}
+
+
+void MatlabPlot::Reset()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _enabled = true;
+
+    for (std::vector<MatlabLine*>::iterator it = _line.begin(); it != _line.end(); it++)
+    {
+        (*it)->Reset();
+    }
+
+}
+
+void MatlabPlot::SetFigHandle(int handle)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (handle > 0)
+        _figHandle = handle;
+}
+
+bool
+MatlabPlot::TimeToPlot()
+{
+    CriticalSectionScoped cs(_critSect);
+    return _enabled && _timeToPlot;
+}
+
+void
+MatlabPlot::Plotting()
+{
+    CriticalSectionScoped cs(_critSect);
+    _plotting = true;
+}
+
+void
+MatlabPlot::DonePlotting()
+{
+    CriticalSectionScoped cs(_critSect);
+    _timeToPlot = false;
+    _plotting = false;
+    _donePlottingEvent->Set();
+}
+
+void
+MatlabPlot::DisablePlot()
+{
+    _critSect->Enter();
+    while (_plotting)
+    {
+        _critSect->Leave();
+        _donePlottingEvent->Wait(WEBRTC_EVENT_INFINITE);
+        _critSect->Enter();
+    }
+    _enabled = false;
+}
+
+int MatlabPlot::MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    int sourceIx;
+    int trendIx;
+
+    sourceIx = GetLineIx(sourceName);
+    if (sourceIx < 0)
+    {
+        // could not find source
+        return (-1);
+    }
+
+    trendIx = GetLineIx(trendName);
+    if (trendIx < 0)
+    {
+        // no trend found; add new line
+        trendIx = AddLine(2 /*maxLen*/, plotAttrib, trendName);
+    }
+
+    _line[trendIx]->UpdateTrendLine(_line[sourceIx], slope, offset);
+
+    return (trendIx);
+
+}
+
+
+MatlabEngine::MatlabEngine()
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_plotThread(NULL),
+_running(false),
+_numPlots(0)
+{
+    _eventPtr = EventWrapper::Create();
+
+    _plotThread = ThreadWrapper::CreateThread(MatlabEngine::PlotThread, this, kLowPriority, "MatlabPlot");
+
+    if (_plotThread == NULL)
+    {
+        throw "Unable to start MatlabEngine thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    unsigned int tid;
+    _plotThread->Start(tid);
+
+}
+
+MatlabEngine::~MatlabEngine()
+{
+    _critSect->Enter();
+
+    if (_plotThread)
+    {
+        _plotThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_plotThread->Stop())
+        {
+            ;
+        }
+
+        delete _plotThread;
+    }
+
+    _plots.clear();
+
+    _plotThread = NULL;
+
+    delete _eventPtr;
+    _eventPtr = NULL;
+
+    _critSect->Leave();
+    delete _critSect;
+
+}
+
+MatlabPlot * MatlabEngine::NewPlot(MatlabPlot *newPlot)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //MatlabPlot *newPlot = new MatlabPlot();
+
+    if (newPlot)
+    {
+        newPlot->SetFigHandle(++_numPlots); // first plot is number 1
+        _plots.push_back(newPlot);
+    }
+
+    return (newPlot);
+
+}
+
+
+void MatlabEngine::DeletePlot(MatlabPlot *plot)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (plot == NULL)
+    {
+        return;
+    }
+
+    std::vector<MatlabPlot *>::iterator it;
+    for (it = _plots.begin(); it < _plots.end(); it++)
+    {
+        if (plot == *it)
+        {
+            break;
+        }
+    }
+
+    assert (plot == *it);
+
+    (*it)->DisablePlot();
+
+    _plots.erase(it);
+    --_numPlots;
+
+    delete plot;
+}
+
+
+bool MatlabEngine::PlotThread(void *obj)
+{
+    if (!obj)
+    {
+        return (false);
+    }
+
+    MatlabEngine *eng = (MatlabEngine *) obj;
+
+    Engine *ep = engOpen(NULL);
+    if (!ep)
+    {
+        throw "Cannot open Matlab engine";
+        return (false);
+    }
+
+    engSetVisible(ep, true);
+    engEvalString(ep, "close all;");
+
+    while (eng->_running)
+    {
+        eng->_critSect->Enter();
+
+        // iterate through all plots
+        for (unsigned int ix = 0; ix < eng->_plots.size(); ix++)
+        {
+            MatlabPlot *plot = eng->_plots[ix];
+            if (plot->TimeToPlot())
+            {
+                plot->Plotting();
+                eng->_critSect->Leave();
+                std::ostringstream cmd;
+
+                if (engEvalString(ep, cmd.str().c_str()))
+                {
+                    // engine dead
+                    return (false);
+                }
+
+                // empty the stream
+                cmd.str(""); // (this seems to be the only way)
+                if (plot->GetPlotCmd(cmd, ep))
+                {
+                    // things to plot, we have already accessed what we need in the plot
+                    plot->DonePlotting();
+
+                    WebRtc_Word64 start = TickTime::MillisecondTimestamp();
+                    // plot it
+                    int ret = engEvalString(ep, cmd.str().c_str());
+                    printf("time=%I64i\n", TickTime::MillisecondTimestamp() - start);
+                    if (ret)
+                    {
+                        // engine dead
+                        return (false);
+                    }
+
+#ifdef PLOT_TESTING
+                    if(plot->_plotStartTime >= 0)
+                    {
+                        plot->_plotDelay = TickTime::MillisecondTimestamp() - plot->_plotStartTime;
+                        plot->_plotStartTime = -1;
+                    }
+#endif
+                }
+                eng->_critSect->Enter();
+            }
+        }
+
+        eng->_critSect->Leave();
+        // wait a while
+        eng->_eventPtr->Wait(66); // 33 ms
+    }
+
+    if (ep)
+    {
+        engClose(ep);
+        ep = NULL;
+    }
+
+    return (true);
+
+}
+
+#endif // MATLAB
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h b/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
new file mode 100644
index 0000000..08c7006
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/MatlabPlot.h
@@ -0,0 +1,170 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
+
+#include <list>
+#include <string>
+#include <vector>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+//#define PLOT_TESTING
+
+#ifdef MATLAB
+
+typedef struct engine Engine;
+typedef struct mxArray_tag mxArray;
+
+class MatlabLine
+{
+    friend class MatlabPlot;
+
+public:
+    MatlabLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
+    ~MatlabLine();
+    virtual void Append(double x, double y);
+    virtual void Append(double y);
+    void SetMaxLen(int maxLen);
+    void SetAttribute(char *plotAttrib);
+    void SetName(char *name);
+    void Reset();
+    virtual void PurgeOldData() {};
+
+    void UpdateTrendLine(MatlabLine * sourceData, double slope, double offset);
+
+    double xMin();
+    double xMax();
+    double yMin();
+    double yMax();
+
+protected:
+    void GetPlotData(mxArray** xData, mxArray** yData);
+    std::string GetXName();
+    std::string GetYName();
+    std::string GetPlotString();
+    std::string GetRefreshString();
+    std::string GetLegendString();
+    bool hasLegend();
+    std::list<double> _xData;
+    std::list<double> _yData;
+    mxArray* _xArray;
+    mxArray* _yArray;
+    int _maxLen;
+    std::string _plotAttribute;
+    std::string _name;
+};
+
+
+class MatlabTimeLine : public MatlabLine
+{
+public:
+    MatlabTimeLine(int horizonSeconds = -1, const char *plotAttrib = NULL, const char *name = NULL,
+        WebRtc_Word64 refTimeMs = -1);
+    ~MatlabTimeLine() {};
+    void Append(double y);
+    void PurgeOldData();
+    WebRtc_Word64 GetRefTime();
+
+private:
+    WebRtc_Word64 _refTimeMs;
+    int _timeHorizon;
+};
+
+
+class MatlabPlot
+{
+    friend class MatlabEngine;
+
+public:
+    MatlabPlot();
+    ~MatlabPlot();
+
+    int AddLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL);
+    int AddTimeLine(int maxLen = -1, const char *plotAttrib = NULL, const char *name = NULL,
+        WebRtc_Word64 refTimeMs = -1);
+    int GetLineIx(const char *name);
+    void Append(int lineIndex, double x, double y);
+    void Append(int lineIndex, double y);
+    int Append(const char *name, double x, double y);
+    int Append(const char *name, double y);
+    int Length(char *name);
+    void SetPlotAttribute(char *name, char *plotAttrib);
+    void Plot();
+    void Reset();
+    void SmartAxis(bool status = true) { _smartAxis = status; };
+    void SetFigHandle(int handle);
+    void EnableLegend(bool enable) { _legendEnabled = enable; };
+
+    bool TimeToPlot();
+    void Plotting();
+    void DonePlotting();
+    void DisablePlot();
+
+    int MakeTrend(const char *sourceName, const char *trendName, double slope, double offset, const char *plotAttrib = NULL);
+
+#ifdef PLOT_TESTING
+    WebRtc_Word64 _plotStartTime;
+    WebRtc_Word64 _plotDelay;
+#endif
+
+private:
+    void UpdateData(Engine* ep);
+    bool GetPlotCmd(std::ostringstream & cmd, Engine* ep);
+    void GetPlotCmd(std::ostringstream & cmd); // call inside crit sect
+    void GetRefreshCmd(std::ostringstream & cmd); // call inside crit sect
+    void GetLegendCmd(std::ostringstream & cmd);
+    bool DataAvailable();
+
+    std::vector<MatlabLine *> _line;
+    int _figHandle;
+    bool _smartAxis;
+    double _xlim[2];
+    double _ylim[2];
+    webrtc::CriticalSectionWrapper *_critSect;
+    bool _timeToPlot;
+    bool _plotting;
+    bool _enabled;
+    bool _firstPlot;
+    bool _legendEnabled;
+    webrtc::EventWrapper* _donePlottingEvent;
+};
+
+
+class MatlabEngine
+{
+public:
+    MatlabEngine();
+    ~MatlabEngine();
+
+    MatlabPlot * NewPlot(MatlabPlot *newPlot);
+    void DeletePlot(MatlabPlot *plot);
+
+private:
+    static bool PlotThread(void *obj);
+
+    std::vector<MatlabPlot *> _plots;
+    webrtc::CriticalSectionWrapper *_critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _plotThread;
+    bool _running;
+    int _numPlots;
+};
+
+#endif //MATLAB
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_MATLABPLOT_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
new file mode 100644
index 0000000..d322242
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.cc
@@ -0,0 +1,438 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm> // for max function
+#include <stdio.h>
+
+#include "TestLoadGenerator.h"
+#include "TestSenderReceiver.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+
+bool SenderThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    TestLoadGenerator *_genObj = static_cast<TestLoadGenerator *>(obj);
+
+    return _genObj->GeneratorLoop();
+}
+
+
+TestLoadGenerator::TestLoadGenerator(TestSenderReceiver *sender, WebRtc_Word32 rtpSampleRate)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_genThread(NULL),
+_bitrateKbps(0),
+_sender(sender),
+_running(false),
+_rtpSampleRate(rtpSampleRate)
+{
+}
+
+TestLoadGenerator::~TestLoadGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    delete _critSect;
+}
+
+WebRtc_Word32 TestLoadGenerator::SetBitrate (WebRtc_Word32 newBitrateKbps)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (newBitrateKbps < 0)
+    {
+        return -1;
+    }
+
+    _bitrateKbps = newBitrateKbps;
+
+    printf("New bitrate = %i kbps\n", _bitrateKbps);
+
+    return _bitrateKbps;
+}
+
+
+WebRtc_Word32 TestLoadGenerator::Start (const char *threadName)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _eventPtr = EventWrapper::Create();
+
+    _genThread = ThreadWrapper::CreateThread(SenderThreadFunction, this, kRealtimePriority, threadName);
+    if (_genThread == NULL)
+    {
+        throw "Unable to start generator thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    unsigned int tid;
+    _genThread->Start(tid);
+
+    return 0;
+}
+
+
+WebRtc_Word32 TestLoadGenerator::Stop ()
+{
+    _critSect.Enter();
+
+    if (_genThread)
+    {
+        _genThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_genThread->Stop())
+        {
+            _critSect.Leave();
+            _critSect.Enter();
+        }
+
+        delete _genThread;
+        _genThread = NULL;
+
+        delete _eventPtr;
+        _eventPtr = NULL;
+    }
+
+    _genThread = NULL;
+    _critSect.Leave();
+    return (0);
+}
+
+
+int TestLoadGenerator::generatePayload ()
+{
+    return(generatePayload( static_cast<WebRtc_UWord32>( TickTime::MillisecondTimestamp() * _rtpSampleRate / 1000 )));
+}
+
+
+int TestLoadGenerator::sendPayload (const WebRtc_UWord32 timeStamp,
+                                    const WebRtc_UWord8* payloadData,
+                                    const WebRtc_UWord32 payloadSize,
+                                    const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
+{
+
+    return (_sender->SendOutgoingData(timeStamp, payloadData, payloadSize, frameType));
+}
+
+
+CBRGenerator::CBRGenerator (TestSenderReceiver *sender, WebRtc_Word32 payloadSizeBytes, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate)
+:
+//_eventPtr(NULL),
+_payloadSizeBytes(payloadSizeBytes),
+_payload(new WebRtc_UWord8[payloadSizeBytes]),
+TestLoadGenerator(sender, rtpSampleRate)
+{
+    SetBitrate (bitrateKbps);
+}
+
+CBRGenerator::~CBRGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    if (_payload)
+    {
+        delete [] _payload;
+    }
+
+}
+
+bool CBRGenerator::GeneratorLoop ()
+{
+    double periodMs;
+    WebRtc_Word64 nextSendTime = TickTime::MillisecondTimestamp();
+
+
+    // no critSect
+    while (_running)
+    {
+        // send data (critSect inside)
+        generatePayload( static_cast<WebRtc_UWord32>(nextSendTime * _rtpSampleRate / 1000) );
+
+        // calculate wait time
+        periodMs = 8.0 * _payloadSizeBytes / ( _bitrateKbps );
+
+        nextSendTime = static_cast<WebRtc_Word64>(nextSendTime + periodMs);
+
+        WebRtc_Word32 waitTime = static_cast<WebRtc_Word32>(nextSendTime - TickTime::MillisecondTimestamp());
+        if (waitTime < 0)
+        {
+            waitTime = 0;
+        }
+        // wait
+        _eventPtr->Wait(static_cast<WebRtc_Word32>(waitTime));
+    }
+
+    return true;
+}
+
+int CBRGenerator::generatePayload ( WebRtc_UWord32 timestamp )
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //WebRtc_UWord8 *payload = new WebRtc_UWord8[_payloadSizeBytes];
+
+    int ret = sendPayload(timestamp, _payload, _payloadSizeBytes);
+
+    //delete [] payload;
+    return ret;
+}
+
+
+
+
+/////////////////////
+
+CBRFixFRGenerator::CBRFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                      WebRtc_Word32 rtpSampleRate, WebRtc_Word32 frameRateFps /*= 30*/,
+                                      double spread /*= 0.0*/)
+:
+//_eventPtr(NULL),
+_payloadSizeBytes(0),
+_payload(NULL),
+_payloadAllocLen(0),
+_frameRateFps(frameRateFps),
+_spreadFactor(spread),
+TestLoadGenerator(sender, rtpSampleRate)
+{
+    SetBitrate (bitrateKbps);
+}
+
+CBRFixFRGenerator::~CBRFixFRGenerator ()
+{
+    if (_running)
+    {
+        Stop();
+    }
+
+    if (_payload)
+    {
+        delete [] _payload;
+        _payloadAllocLen = 0;
+    }
+
+}
+
+bool CBRFixFRGenerator::GeneratorLoop ()
+{
+    double periodMs;
+    WebRtc_Word64 nextSendTime = TickTime::MillisecondTimestamp();
+
+    _critSect.Enter();
+
+    if (_frameRateFps <= 0)
+    {
+        return false;
+    }
+
+    _critSect.Leave();
+
+    // no critSect
+    while (_running)
+    {
+        _critSect.Enter();
+
+        // calculate payload size
+        _payloadSizeBytes = nextPayloadSize();
+
+        if (_payloadSizeBytes > 0)
+        {
+
+            if (_payloadAllocLen < _payloadSizeBytes * (1 + _spreadFactor))
+            {
+                // re-allocate _payload
+                if (_payload)
+                {
+                    delete [] _payload;
+                    _payload = NULL;
+                }
+
+                _payloadAllocLen = static_cast<WebRtc_Word32>((_payloadSizeBytes * (1 + _spreadFactor) * 3) / 2 + .5); // 50% extra to avoid frequent re-alloc
+                _payload = new WebRtc_UWord8[_payloadAllocLen];
+            }
+
+
+            // send data (critSect inside)
+            generatePayload( static_cast<WebRtc_UWord32>(nextSendTime * _rtpSampleRate / 1000) );
+        }
+
+        _critSect.Leave();
+
+        // calculate wait time
+        periodMs = 1000.0 / _frameRateFps;
+        nextSendTime = static_cast<WebRtc_Word64>(nextSendTime + periodMs + 0.5);
+
+        WebRtc_Word32 waitTime = static_cast<WebRtc_Word32>(nextSendTime - TickTime::MillisecondTimestamp());
+        if (waitTime < 0)
+        {
+            waitTime = 0;
+        }
+        // wait
+        _eventPtr->Wait(waitTime);
+    }
+
+    return true;
+}
+
+WebRtc_Word32 CBRFixFRGenerator::nextPayloadSize()
+{
+    const double periodMs = 1000.0 / _frameRateFps;
+    return static_cast<WebRtc_Word32>(_bitrateKbps * periodMs / 8 + 0.5);
+}
+
+int CBRFixFRGenerator::generatePayload ( WebRtc_UWord32 timestamp )
+{
+    CriticalSectionScoped cs(_critSect);
+
+    double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
+    factor = 1 + 2 * _spreadFactor * factor; // [1 - _spreadFactor ; 1 + _spreadFactor]
+
+    WebRtc_Word32 thisPayloadBytes = static_cast<WebRtc_Word32>(_payloadSizeBytes * factor);
+    // sanity
+    if (thisPayloadBytes > _payloadAllocLen)
+    {
+        thisPayloadBytes = _payloadAllocLen;
+    }
+
+    int ret = sendPayload(timestamp, _payload, thisPayloadBytes);
+    return ret;
+}
+
+
+/////////////////////
+
+PeriodicKeyFixFRGenerator::PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                                      WebRtc_Word32 rtpSampleRate, WebRtc_Word32 frameRateFps /*= 30*/,
+                                                      double spread /*= 0.0*/, double keyFactor /*= 4.0*/, WebRtc_UWord32 keyPeriod /*= 300*/)
+:
+_keyFactor(keyFactor),
+_keyPeriod(keyPeriod),
+_frameCount(0),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRateFps, spread)
+{
+}
+
+WebRtc_Word32 PeriodicKeyFixFRGenerator::nextPayloadSize()
+{
+    // calculate payload size for a delta frame
+    WebRtc_Word32 payloadSizeBytes = static_cast<WebRtc_Word32>(1000 * _bitrateKbps / (8.0 * _frameRateFps * (1.0 + (_keyFactor - 1.0) / _keyPeriod)) + 0.5);
+
+    if (_frameCount % _keyPeriod == 0)
+    {
+        // this is a key frame, scale the payload size
+        payloadSizeBytes = static_cast<WebRtc_Word32>(_keyFactor * _payloadSizeBytes + 0.5);
+    }
+    _frameCount++;
+
+    return payloadSizeBytes;
+}
+
+////////////////////
+
+CBRVarFRGenerator::CBRVarFRGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, const WebRtc_UWord8* frameRates,
+                                     WebRtc_UWord16 numFrameRates, WebRtc_Word32 rtpSampleRate, double avgFrPeriodMs,
+                                     double frSpreadFactor, double spreadFactor)
+:
+_avgFrPeriodMs(avgFrPeriodMs),
+_frSpreadFactor(frSpreadFactor),
+_frameRates(NULL),
+_numFrameRates(numFrameRates),
+_frChangeTimeMs(TickTime::MillisecondTimestamp() + _avgFrPeriodMs),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, frameRates[0], spreadFactor)
+{
+    _frameRates = new WebRtc_UWord8[_numFrameRates];
+    memcpy(_frameRates, frameRates, _numFrameRates);
+}
+
+CBRVarFRGenerator::~CBRVarFRGenerator()
+{
+    delete [] _frameRates;
+}
+
+void CBRVarFRGenerator::ChangeFrameRate()
+{
+    const WebRtc_Word64 nowMs = TickTime::MillisecondTimestamp();
+    if (nowMs < _frChangeTimeMs)
+    {
+        return;
+    }
+    // Time to change frame rate
+    WebRtc_UWord16 frIndex = static_cast<WebRtc_UWord16>(static_cast<double>(rand()) / RAND_MAX
+                                            * (_numFrameRates - 1) + 0.5) ;
+    assert(frIndex < _numFrameRates);
+    _frameRateFps = _frameRates[frIndex];
+    // Update the next frame rate change time
+    double factor = ((double) rand() - RAND_MAX/2) / RAND_MAX; // [-0.5; 0.5]
+    factor = 1 + 2 * _frSpreadFactor * factor; // [1 - _frSpreadFactor ; 1 + _frSpreadFactor]
+    _frChangeTimeMs = nowMs + static_cast<WebRtc_Word64>(1000.0 * factor *
+                                    _avgFrPeriodMs + 0.5);
+
+    printf("New frame rate: %d\n", _frameRateFps);
+}
+
+WebRtc_Word32 CBRVarFRGenerator::nextPayloadSize()
+{
+    ChangeFrameRate();
+    return CBRFixFRGenerator::nextPayloadSize();
+}
+
+////////////////////
+
+CBRFrameDropGenerator::CBRFrameDropGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                                         WebRtc_Word32 rtpSampleRate, double spreadFactor)
+:
+_accBits(0),
+CBRFixFRGenerator(sender, bitrateKbps, rtpSampleRate, 30, spreadFactor)
+{
+}
+
+CBRFrameDropGenerator::~CBRFrameDropGenerator()
+{
+}
+
+WebRtc_Word32 CBRFrameDropGenerator::nextPayloadSize()
+{
+    _accBits -= 1000 * _bitrateKbps / _frameRateFps;
+    if (_accBits < 0)
+    {
+        _accBits = 0;
+    }
+    if (_accBits > 0.3 * _bitrateKbps * 1000)
+    {
+        //printf("drop\n");
+        return 0;
+    }
+    else
+    {
+        //printf("keep\n");
+        const double periodMs = 1000.0 / _frameRateFps;
+        WebRtc_Word32 frameSize = static_cast<WebRtc_Word32>(_bitrateKbps * periodMs / 8 + 0.5);
+        frameSize = std::max(frameSize, static_cast<WebRtc_Word32>(300 * periodMs / 8 + 0.5));
+        _accBits += frameSize * 8;
+        return frameSize;
+    }
+}
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h b/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
new file mode 100644
index 0000000..c22591c
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/TestLoadGenerator.h
@@ -0,0 +1,146 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
+
+#include <stdlib.h>
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+class TestSenderReceiver;
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+class TestLoadGenerator
+{
+public:
+    TestLoadGenerator (TestSenderReceiver *sender, WebRtc_Word32 rtpSampleRate = 90000);
+    virtual ~TestLoadGenerator ();
+
+    WebRtc_Word32 SetBitrate (WebRtc_Word32 newBitrateKbps);
+    virtual WebRtc_Word32 Start (const char *threadName = NULL);
+    virtual WebRtc_Word32 Stop ();
+    virtual bool GeneratorLoop () = 0;
+
+protected:
+    virtual int generatePayload ( WebRtc_UWord32 timestamp ) = 0;
+    int generatePayload ();
+    int sendPayload (const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
+
+    webrtc::CriticalSectionWrapper* _critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _genThread;
+    WebRtc_Word32 _bitrateKbps;
+    TestSenderReceiver *_sender;
+    bool _running;
+    WebRtc_Word32 _rtpSampleRate;
+};
+
+
+class CBRGenerator : public TestLoadGenerator
+{
+public:
+    CBRGenerator (TestSenderReceiver *sender, WebRtc_Word32 payloadSizeBytes, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000);
+    virtual ~CBRGenerator ();
+
+    virtual WebRtc_Word32 Start () {return (TestLoadGenerator::Start("CBRGenerator"));};
+
+    virtual bool GeneratorLoop ();
+
+protected:
+    virtual int generatePayload ( WebRtc_UWord32 timestamp );
+
+    WebRtc_Word32 _payloadSizeBytes;
+    WebRtc_UWord8 *_payload;
+};
+
+
+class CBRFixFRGenerator : public TestLoadGenerator // constant bitrate and fixed frame rate
+{
+public:
+    CBRFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000,
+        WebRtc_Word32 frameRateFps = 30, double spread = 0.0);
+    virtual ~CBRFixFRGenerator ();
+
+    virtual WebRtc_Word32 Start () {return (TestLoadGenerator::Start("CBRFixFRGenerator"));};
+
+    virtual bool GeneratorLoop ();
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize ();
+    virtual int generatePayload ( WebRtc_UWord32 timestamp );
+
+    WebRtc_Word32 _payloadSizeBytes;
+    WebRtc_UWord8 *_payload;
+    WebRtc_Word32 _payloadAllocLen;
+    WebRtc_Word32 _frameRateFps;
+    double      _spreadFactor;
+};
+
+class PeriodicKeyFixFRGenerator : public CBRFixFRGenerator // constant bitrate and fixed frame rate with periodically large frames
+{
+public:
+    PeriodicKeyFixFRGenerator (TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, WebRtc_Word32 rtpSampleRate = 90000,
+        WebRtc_Word32 frameRateFps = 30, double spread = 0.0, double keyFactor = 4.0, WebRtc_UWord32 keyPeriod = 300);
+    virtual ~PeriodicKeyFixFRGenerator () {}
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize ();
+
+    double          _keyFactor;
+    WebRtc_UWord32    _keyPeriod;
+    WebRtc_UWord32    _frameCount;
+};
+
+// Probably better to inherit CBRFixFRGenerator from CBRVarFRGenerator, but since
+// the fix FR version already existed this was easier.
+class CBRVarFRGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
+{
+public:
+    CBRVarFRGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps, const WebRtc_UWord8* frameRates,
+        WebRtc_UWord16 numFrameRates, WebRtc_Word32 rtpSampleRate = 90000, double avgFrPeriodMs = 5.0,
+        double frSpreadFactor = 0.05, double spreadFactor = 0.0);
+
+    ~CBRVarFRGenerator();
+
+protected:
+    virtual void ChangeFrameRate();
+    virtual WebRtc_Word32 nextPayloadSize ();
+
+    double       _avgFrPeriodMs;
+    double       _frSpreadFactor;
+    WebRtc_UWord8* _frameRates;
+    WebRtc_UWord16 _numFrameRates;
+    WebRtc_Word64  _frChangeTimeMs;
+};
+
+class CBRFrameDropGenerator : public CBRFixFRGenerator // constant bitrate and variable frame rate
+{
+public:
+    CBRFrameDropGenerator(TestSenderReceiver *sender, WebRtc_Word32 bitrateKbps,
+                    WebRtc_Word32 rtpSampleRate = 90000, double spreadFactor = 0.0);
+
+    ~CBRFrameDropGenerator();
+
+protected:
+    virtual WebRtc_Word32 nextPayloadSize();
+
+    double       _accBits;
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTLOADGENERATOR_H_
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc b/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
new file mode 100644
index 0000000..1fc0fd3
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.cc
@@ -0,0 +1,442 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include "rtp_rtcp.h"
+#include "udp_transport.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "critical_section_wrapper.h"
+#include "TestSenderReceiver.h"
+#include "TestLoadGenerator.h"
+#include <stdlib.h>
+
+#define NR_OF_SOCKET_BUFFERS 500
+
+
+bool ProcThreadFunction(void *obj)
+{
+    if (obj == NULL)
+    {
+        return false;
+    }
+    TestSenderReceiver *theObj = static_cast<TestSenderReceiver *>(obj);
+
+    return theObj->ProcLoop();
+}
+
+
+TestSenderReceiver::TestSenderReceiver (void)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_eventPtr(NULL),
+_procThread(NULL),
+_running(false),
+_payloadType(0),
+_loadGenerator(NULL),
+_isSender(false),
+_isReceiver(false),
+_timeOut(false),
+_sendRecCB(NULL),
+_lastBytesReceived(0),
+_lastTime(-1)
+{
+    // RTP/RTCP module
+    _rtp = RtpRtcp::CreateRtpRtcp(0, false);
+    if (!_rtp)
+    {
+        throw "Could not create RTP/RTCP module";
+        exit(1);
+    }
+
+    if (_rtp->InitReceiver() != 0)
+    {
+        throw "_rtp->InitReceiver()";
+        exit(1);
+    }
+
+    if (_rtp->InitSender() != 0)
+    {
+        throw "_rtp->InitSender()";
+        exit(1);
+    }
+
+    // SocketTransport module
+    WebRtc_UWord8 numberOfThreads = 1;
+    _transport = UdpTransport::Create(0, numberOfThreads);
+    if (!_transport)
+    {
+        throw "Could not create transport module";
+        exit(1);
+    }
+}
+
+TestSenderReceiver::~TestSenderReceiver (void)
+{
+
+    Stop(); // N.B. without critSect
+
+    _critSect->Enter();
+
+    if (_rtp)
+    {
+        RtpRtcp::DestroyRtpRtcp(_rtp);
+        _rtp = NULL;
+    }
+
+    if (_transport)
+    {
+        UdpTransport::Destroy(_transport);
+        _transport = NULL;
+    }
+
+    delete _critSect;
+
+}
+
+
+WebRtc_Word32 TestSenderReceiver::InitReceiver (const WebRtc_UWord16 rtpPort,
+                                              const WebRtc_UWord16 rtcpPort,
+                                              const WebRtc_Word8 payloadType /*= 127*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    // init transport
+    if (_transport->InitializeReceiveSockets(this, rtpPort/*, 0, NULL, 0, true*/) != 0)
+    {
+        throw "_transport->InitializeReceiveSockets";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingRTPCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingRTPCallback";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingDataCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingRTPCallback";
+        exit(1);
+    }
+
+    if (_rtp->SetRTCPStatus(kRtcpNonCompound) != 0)
+    {
+        throw "_rtp->SetRTCPStatus";
+        exit(1);
+    }
+
+    if (_rtp->SetTMMBRStatus(true) != 0)
+    {
+        throw "_rtp->SetTMMBRStatus";
+        exit(1);
+    }
+
+    if (_rtp->RegisterReceivePayload("I420", payloadType, 90000) != 0)
+    {
+        throw "_rtp->RegisterReceivePayload";
+        exit(1);
+    }
+
+    _isReceiver = true;
+
+    return (0);
+}
+
+
+WebRtc_Word32 TestSenderReceiver::Start()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _eventPtr = EventWrapper::Create();
+
+    if (_rtp->SetSendingStatus(true) != 0)
+    {
+        throw "_rtp->SetSendingStatus";
+        exit(1);
+    }
+
+    _procThread = ThreadWrapper::CreateThread(ProcThreadFunction, this, kRealtimePriority, "TestSenderReceiver");
+    if (_procThread == NULL)
+    {
+        throw "Unable to create process thread";
+        exit(1);
+    }
+
+    _running = true;
+
+    if (_isReceiver)
+    {
+        if (_transport->StartReceiving(NR_OF_SOCKET_BUFFERS) != 0)
+        {
+            throw "_transport->StartReceiving";
+            exit(1);
+        }
+    }
+
+    unsigned int tid;
+    _procThread->Start(tid);
+
+    return 0;
+
+}
+
+
+WebRtc_Word32 TestSenderReceiver::Stop ()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _transport->StopReceiving();
+
+    if (_procThread)
+    {
+        _procThread->SetNotAlive();
+        _running = false;
+        _eventPtr->Set();
+
+        while (!_procThread->Stop())
+        {
+            ;
+        }
+
+        delete _eventPtr;
+
+        delete _procThread;
+    }
+
+    _procThread = NULL;
+
+    return (0);
+}
+
+
+bool TestSenderReceiver::ProcLoop(void)
+{
+
+    // process RTP/RTCP module
+    _rtp->Process();
+
+    // process SocketTransport module
+    _transport->Process();
+
+    // no critSect
+    while (_running)
+    {
+        // ask RTP/RTCP module for wait time
+        WebRtc_Word32 rtpWait = _rtp->TimeUntilNextProcess();
+
+        // ask SocketTransport module for wait time
+        WebRtc_Word32 tpWait = _transport->TimeUntilNextProcess();
+
+        WebRtc_Word32 minWait = (rtpWait < tpWait) ? rtpWait: tpWait;
+        minWait = (minWait > 0) ? minWait : 0;
+        // wait
+        _eventPtr->Wait(minWait);
+
+        // process RTP/RTCP module
+        _rtp->Process();
+
+        // process SocketTransport module
+        _transport->Process();
+
+    }
+
+    return true;
+}
+
+
+WebRtc_Word32 TestSenderReceiver::ReceiveBitrateKbps ()
+{
+    WebRtc_UWord32 bytesSent;
+    WebRtc_UWord32 packetsSent;
+    WebRtc_UWord32 bytesReceived;
+    WebRtc_UWord32 packetsReceived;
+
+    if (_rtp->DataCountersRTP(&bytesSent, &packetsSent, &bytesReceived, &packetsReceived) == 0)
+    {
+        WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+        WebRtc_Word32 kbps = 0;
+        if (now > _lastTime)
+        {
+            if (_lastTime > 0)
+            {
+                // 8 * bytes / ms = kbps
+                kbps = static_cast<WebRtc_Word32>(
+                    (8 * (bytesReceived - _lastBytesReceived)) / (now - _lastTime));
+            }
+            _lastTime = now;
+            _lastBytesReceived = bytesReceived;
+        }
+        return (kbps);
+    }
+
+    return (-1);
+}
+
+
+WebRtc_Word32 TestSenderReceiver::SetPacketTimeout(const WebRtc_UWord32 timeoutMS)
+{
+    return (_rtp->SetPacketTimeout(timeoutMS, 0 /* RTCP timeout */));
+}
+
+
+void TestSenderReceiver::OnPacketTimeout(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped lock(_critSect);
+
+    _timeOut = true;
+}
+
+
+void TestSenderReceiver::OnReceivedPacket(const WebRtc_Word32 id,
+                                    const RtpRtcpPacketType packetType)
+{
+    // do nothing
+    //printf("OnReceivedPacket\n");
+
+}
+
+WebRtc_Word32 TestSenderReceiver::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadSize,
+                                          const webrtc::WebRtcRTPHeader* rtpHeader)
+{
+    //printf("OnReceivedPayloadData\n");
+    return (0);
+}
+
+
+void TestSenderReceiver::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                      const WebRtc_Word32 rtpPacketLength,
+                                      const WebRtc_Word8* fromIP,
+                                      const WebRtc_UWord16 fromPort)
+{
+    _rtp->IncomingPacket((WebRtc_UWord8 *) incomingRtpPacket, static_cast<WebRtc_UWord16>(rtpPacketLength));
+}
+
+
+
+void TestSenderReceiver::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                       const WebRtc_Word32 rtcpPacketLength,
+                                       const WebRtc_Word8* fromIP,
+                                       const WebRtc_UWord16 fromPort)
+{
+    _rtp->IncomingPacket((WebRtc_UWord8 *) incomingRtcpPacket, static_cast<WebRtc_UWord16>(rtcpPacketLength));
+}
+
+
+
+
+
+///////////////////
+
+
+WebRtc_Word32 TestSenderReceiver::InitSender (const WebRtc_UWord32 startBitrateKbps,
+                                            const WebRtc_Word8* ipAddr,
+                                            const WebRtc_UWord16 rtpPort,
+                                            const WebRtc_UWord16 rtcpPort /*= 0*/,
+                                            const WebRtc_Word8 payloadType /*= 127*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _payloadType = payloadType;
+
+    // check load generator valid
+    if (_loadGenerator)
+    {
+        _loadGenerator->SetBitrate(startBitrateKbps);
+    }
+
+    if (_rtp->RegisterSendTransport(_transport) != 0)
+    {
+        throw "_rtp->RegisterSendTransport";
+        exit(1);
+    }
+    if (_rtp->RegisterSendPayload("I420", _payloadType, 90000) != 0)
+    {
+        throw "_rtp->RegisterSendPayload";
+        exit(1);
+    }
+
+    if (_rtp->RegisterIncomingVideoCallback(this) != 0)
+    {
+        throw "_rtp->RegisterIncomingVideoCallback";
+        exit(1);
+    }
+
+    if (_rtp->SetRTCPStatus(kRtcpNonCompound) != 0)
+    {
+        throw "_rtp->SetRTCPStatus";
+        exit(1);
+    }
+
+    if (_rtp->SetSendBitrate(startBitrateKbps*1000, 0, MAX_BITRATE_KBPS) != 0)
+    {
+        throw "_rtp->SetSendBitrate";
+        exit(1);
+    }
+
+
+    // SocketTransport
+    if (_transport->InitializeSendSockets(ipAddr, rtpPort, rtcpPort))
+    {
+        throw "_transport->InitializeSendSockets";
+        exit(1);
+    }
+
+    _isSender = true;
+
+    return (0);
+}
+
+
+
+WebRtc_Word32
+TestSenderReceiver::SendOutgoingData(const WebRtc_UWord32 timeStamp,
+                                     const WebRtc_UWord8* payloadData,
+                                     const WebRtc_UWord32 payloadSize,
+                                     const webrtc::FrameType frameType /*= webrtc::kVideoFrameDelta*/)
+{
+    return (_rtp->SendOutgoingData(frameType, _payloadType, timeStamp, payloadData, payloadSize));
+}
+
+
+WebRtc_Word32 TestSenderReceiver::SetLoadGenerator(TestLoadGenerator *generator)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _loadGenerator = generator;
+    return(0);
+
+}
+
+void TestSenderReceiver::OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 minBitrateBps,
+                                  const WebRtc_UWord32 maxBitrateBps,
+                                  const WebRtc_UWord8 fractionLost,
+                                  const WebRtc_UWord16 roundTripTimeMs,
+                                  const WebRtc_UWord16 bwEstimateKbitMin,
+                                  const WebRtc_UWord16 bwEstimateKbitMax)
+{
+    if (_loadGenerator)
+    {
+        _loadGenerator->SetBitrate(maxBitrateBps/1000);
+    }
+
+    if (_sendRecCB)
+    {
+        _sendRecCB->OnOnNetworkChanged(maxBitrateBps,
+            fractionLost,
+            roundTripTimeMs,
+            bwEstimateKbitMin,
+            bwEstimateKbitMax);
+    }
+}
diff --git a/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h b/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
new file mode 100644
index 0000000..7f7f2f0
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/BWEStandAlone/TestSenderReceiver.h
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
+#define WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+#include "udp_transport.h"
+
+class TestLoadGenerator;
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+}
+
+using namespace webrtc;
+
+#define MAX_BITRATE_KBPS 50000
+
+
+class SendRecCB
+{
+public:
+    virtual void OnOnNetworkChanged(const WebRtc_UWord32 bitrateTarget,
+        const WebRtc_UWord8 fractionLost,
+        const WebRtc_UWord16 roundTripTimeMs,
+        const WebRtc_UWord16 bwEstimateKbitMin,
+        const WebRtc_UWord16 bwEstimateKbitMax) = 0;
+
+    virtual ~SendRecCB() {};
+};
+
+
+class TestSenderReceiver : public RtpFeedback, public RtpData, public UdpTransportData, public RtpVideoFeedback
+{
+
+public:
+    TestSenderReceiver (void);
+
+    ~TestSenderReceiver (void);
+
+    void SetCallback (SendRecCB *cb) { _sendRecCB = cb; };
+
+    WebRtc_Word32 Start();
+
+    WebRtc_Word32 Stop();
+
+    bool ProcLoop();
+
+    /////////////////////////////////////////////
+    // Receiver methods
+
+    WebRtc_Word32 InitReceiver (const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0,
+        const WebRtc_Word8 payloadType = 127);
+
+    WebRtc_Word32 ReceiveBitrateKbps ();
+
+    WebRtc_Word32 SetPacketTimeout(const WebRtc_UWord32 timeoutMS);
+
+    bool timeOutTriggered () { return (_timeOut); };
+
+    // Inherited from RtpFeedback
+    virtual WebRtc_Word32 OnInitializeDecoder(const WebRtc_Word32 id,
+                                            const WebRtc_Word8 payloadType,
+                                            const WebRtc_Word8 payloadName[RTP_PAYLOAD_NAME_SIZE],
+                                            const WebRtc_UWord32 frequency,
+                                            const WebRtc_UWord8 channels,
+                                            const WebRtc_UWord32 rate) { return(0);};
+
+    virtual void OnPacketTimeout(const WebRtc_Word32 id);
+
+    virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packetType);
+
+    virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) {};
+
+    virtual void OnIncomingSSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 SSRC) {};
+
+    virtual void OnIncomingCSRCChanged( const WebRtc_Word32 id,
+                                        const WebRtc_UWord32 CSRC,
+                                        const bool added) {};
+
+
+    // Inherited from RtpData
+
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                const WebRtc_UWord16 payloadSize,
+                                                const webrtc::WebRtcRTPHeader* rtpHeader);
+
+
+    // Inherited from UdpTransportData
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+        const WebRtc_Word32 rtpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+        const WebRtc_Word32 rtcpPacketLength,
+        const WebRtc_Word8* fromIP,
+        const WebRtc_UWord16 fromPort);
+
+
+
+    /////////////////////////////////
+    // Sender methods
+
+    WebRtc_Word32 InitSender (const WebRtc_UWord32 startBitrateKbps,
+        const WebRtc_Word8* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0,
+        const WebRtc_Word8 payloadType = 127);
+
+    WebRtc_Word32 SendOutgoingData(const WebRtc_UWord32 timeStamp,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const webrtc::FrameType frameType = webrtc::kVideoFrameDelta);
+
+    WebRtc_Word32 SetLoadGenerator(TestLoadGenerator *generator);
+
+    WebRtc_UWord32 BitrateSent() { return (_rtp->BitrateSent()); };
+
+
+    // Inherited from RtpVideoFeedback
+    virtual void OnReceivedIntraFrameRequest(const WebRtc_Word32 id,
+        const WebRtc_UWord8 message = 0) {};
+
+    virtual void OnNetworkChanged(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 minBitrateBps,
+                                  const WebRtc_UWord32 maxBitrateBps,
+                                  const WebRtc_UWord8 fractionLost,
+                                  const WebRtc_UWord16 roundTripTimeMs,
+                                  const WebRtc_UWord16 bwEstimateKbitMin,
+                                  const WebRtc_UWord16 bwEstimateKbitMax);
+
+private:
+    RtpRtcp* _rtp;
+    UdpTransport* _transport;
+    webrtc::CriticalSectionWrapper* _critSect;
+    webrtc::EventWrapper *_eventPtr;
+    webrtc::ThreadWrapper* _procThread;
+    bool _running;
+    WebRtc_Word8 _payloadType;
+    TestLoadGenerator* _loadGenerator;
+    bool _isSender;
+    bool _isReceiver;
+    bool _timeOut;
+    SendRecCB * _sendRecCB;
+    WebRtc_UWord32 _lastBytesReceived;
+    WebRtc_Word64 _lastTime;
+
+};
+
+#endif // WEBRTC_MODULES_RTP_RTCP_TEST_BWESTANDALONE_TESTSENDERRECEIVER_H_
diff --git a/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc b/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc
new file mode 100644
index 0000000..38b6e15
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/bitstreamTest/bitstreamTest.cc
@@ -0,0 +1,538 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../../source/BitstreamBuilder.h"
+#include "../../source/BitstreamParser.h"
+
+#include <assert.h>
+#include <stdio.h>
+#include <math.h>
+#include <tchar.h>
+#include <windows.h>
+
+WebRtc_UWord32 BitRateBPS(WebRtc_UWord16 x )
+{
+    return (x & 0x3fff) * WebRtc_UWord32(pow(10.0f,(2 + (x >> 14))));
+}
+
+WebRtc_UWord16 BitRateBPSInv(WebRtc_UWord32 x )
+{
+    // 16383 0x3fff
+    //     1 638 300    exp 0
+    //    16 383 000    exp 1
+    //   163 830 000    exp 2
+    // 1 638 300 000    exp 3
+    const float exp = log10(float(x>>14)) - 2;
+    if(exp < 0.0)
+    {
+        return WebRtc_UWord16(x /100);
+    }else if(exp < 1.0)
+    {
+        return 0x4000 + WebRtc_UWord16(x /1000);
+    }else if(exp < 2.0)
+    {
+        return 0x8000 + WebRtc_UWord16(x /10000);
+    }else if(exp < 3.0)
+    {
+        return 0xC000 + WebRtc_UWord16(x /100000);
+    } else
+    {
+        assert(false);
+        return 0;
+    }
+}
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+    WebRtc_UWord8 dataBuffer[128];
+    BitstreamBuilder builder(dataBuffer, sizeof(dataBuffer));
+
+    // test 1 to 4 bits
+    builder.Add1Bit(1);
+    builder.Add1Bit(0);
+    builder.Add1Bit(1);
+
+    builder.Add2Bits(1);
+    builder.Add2Bits(2);
+    builder.Add2Bits(3);
+
+    builder.Add3Bits(1);
+    builder.Add3Bits(3);
+    builder.Add3Bits(7);
+
+    builder.Add4Bits(1);
+    builder.Add4Bits(5);
+    builder.Add4Bits(15);
+
+    assert(4 == builder.Length());
+
+    BitstreamParser parser(dataBuffer, sizeof(dataBuffer));
+
+    assert(1 == parser.Get1Bit());
+    assert(0 == parser.Get1Bit());
+    assert(1 == parser.Get1Bit());
+
+    assert(1 == parser.Get2Bits());
+    assert(2 == parser.Get2Bits());
+    assert(3 == parser.Get2Bits());
+
+    assert(1 == parser.Get3Bits());
+    assert(3 == parser.Get3Bits());
+    assert(7 == parser.Get3Bits());
+
+    assert(1 == parser.Get4Bits());
+    assert(5 == parser.Get4Bits());
+    assert(15 == parser.Get4Bits());
+
+    printf("Test of 1 to 4 bits done\n");
+
+    // test 5 to 7 bits
+    builder.Add5Bits(1);
+    builder.Add5Bits(15);
+    builder.Add5Bits(30);
+
+    builder.Add6Bits(1);
+    builder.Add6Bits(30);
+    builder.Add6Bits(60);
+
+    builder.Add7Bits(1);
+    builder.Add7Bits(60);
+    builder.Add7Bits(120);
+
+    assert(1 == parser.Get5Bits());
+    assert(15 == parser.Get5Bits());
+    assert(30 == parser.Get5Bits());
+
+    assert(1 == parser.Get6Bits());
+    assert(30 == parser.Get6Bits());
+    assert(60 == parser.Get6Bits());
+
+    assert(1 == parser.Get7Bits());
+    assert(60 == parser.Get7Bits());
+    assert(120 == parser.Get7Bits());
+
+    printf("Test of 5 to 7 bits done\n");
+
+    builder.Add8Bits(1);
+    builder.Add1Bit(1);
+    builder.Add8Bits(255);
+    builder.Add1Bit(0);
+    builder.Add8Bits(127);
+    builder.Add1Bit(1);
+    builder.Add8Bits(60);
+    builder.Add1Bit(0);
+    builder.Add8Bits(30);
+    builder.Add1Bit(1);
+    builder.Add8Bits(120);
+    builder.Add1Bit(0);
+    builder.Add8Bits(160);
+    builder.Add1Bit(1);
+    builder.Add8Bits(180);
+
+    assert(1 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(127 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(30 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(120 == parser.Get8Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get8Bits());
+    assert(1 == parser.Get1Bit());
+    assert(180 == parser.Get8Bits());
+
+    printf("Test of 8 bits done\n");
+
+    builder.Add16Bits(1);
+    builder.Add1Bit(1);
+    builder.Add16Bits(255);
+    builder.Add1Bit(0);
+    builder.Add16Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add16Bits(60);
+    builder.Add1Bit(0);
+    builder.Add16Bits(30);
+    builder.Add1Bit(1);
+    builder.Add16Bits(30120);
+    builder.Add1Bit(0);
+    builder.Add16Bits(160);
+    builder.Add1Bit(1);
+    builder.Add16Bits(180);
+
+    assert(1 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(30 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(30120 == parser.Get16Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get16Bits());
+    assert(1 == parser.Get1Bit());
+    assert(180 == parser.Get16Bits());
+
+    printf("Test of 16 bits done\n");
+
+    builder.Add24Bits(1);
+    builder.Add1Bit(1);
+    builder.Add24Bits(255);
+    builder.Add1Bit(0);
+    builder.Add24Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add24Bits(60);
+    builder.Add1Bit(0);
+    builder.Add24Bits(303333);
+    builder.Add1Bit(1);
+    builder.Add24Bits(30120);
+    builder.Add1Bit(0);
+    builder.Add24Bits(160);
+    builder.Add1Bit(1);
+    builder.Add24Bits(8018018);
+
+    assert(1 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(303333 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(30120 == parser.Get24Bits());
+    assert(0 == parser.Get1Bit());
+    assert(160 == parser.Get24Bits());
+    assert(1 == parser.Get1Bit());
+    assert(8018018 == parser.Get24Bits());
+
+    printf("Test of 24 bits done\n");
+
+    builder.Add32Bits(1);
+    builder.Add1Bit(1);
+    builder.Add32Bits(255);
+    builder.Add1Bit(0);
+    builder.Add32Bits(12756);
+    builder.Add1Bit(1);
+    builder.Add32Bits(60);
+    builder.Add1Bit(0);
+    builder.Add32Bits(303333);
+    builder.Add1Bit(1);
+    builder.Add32Bits(3012000012);
+    builder.Add1Bit(0);
+    builder.Add32Bits(1601601601);
+    builder.Add1Bit(1);
+    builder.Add32Bits(8018018);
+
+    assert(1 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(255 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(12756 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(60 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(303333 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(3012000012 == parser.Get32Bits());
+    assert(0 == parser.Get1Bit());
+    assert(1601601601 == parser.Get32Bits());
+    assert(1 == parser.Get1Bit());
+    assert(8018018 == parser.Get32Bits());
+
+    printf("Test of 32 bits done\n");
+
+    builder.AddUE(1);
+    builder.AddUE(4);
+    builder.AddUE(9809706);
+    builder.AddUE(2);
+    builder.AddUE(15);
+    builder.AddUE(16998);
+
+    assert( 106 == builder.Length());
+
+    assert(1 == parser.GetUE());
+    assert(4 == parser.GetUE());
+    assert(9809706 == parser.GetUE());
+    assert(2 == parser.GetUE());
+    assert(15 == parser.GetUE());
+    assert(16998 == parser.GetUE());
+
+    printf("Test UE bits done\n");
+
+    BitstreamBuilder builderScalabilityInfo(dataBuffer, sizeof(dataBuffer));
+    BitstreamParser parserScalabilityInfo(dataBuffer, sizeof(dataBuffer));
+
+    const WebRtc_UWord8 numberOfLayers = 4;
+    const WebRtc_UWord8 layerId[numberOfLayers] = {0,1,2,3};
+    const WebRtc_UWord8 priorityId[numberOfLayers] = {0,1,2,3};
+    const WebRtc_UWord8 discardableId[numberOfLayers] = {0,1,1,1};
+
+    const WebRtc_UWord8 dependencyId[numberOfLayers]= {0,1,1,1};
+    const WebRtc_UWord8 qualityId[numberOfLayers]= {0,0,0,1};
+    const WebRtc_UWord8 temporalId[numberOfLayers]= {0,0,1,1};
+
+    const WebRtc_UWord16 avgBitrate[numberOfLayers]= {BitRateBPSInv(100000),
+                                                    BitRateBPSInv(200000),
+                                                    BitRateBPSInv(400000),
+                                                    BitRateBPSInv(800000)};
+
+    // todo which one is the sum?
+    const WebRtc_UWord16 maxBitrateLayer[numberOfLayers]= {BitRateBPSInv(150000),
+                                                         BitRateBPSInv(300000),
+                                                         BitRateBPSInv(500000),
+                                                         BitRateBPSInv(900000)};
+
+    const WebRtc_UWord16 maxBitrateLayerRepresentation[numberOfLayers] = {BitRateBPSInv(150000),
+                                                                        BitRateBPSInv(450000),
+                                                                        BitRateBPSInv(950000),
+                                                                        BitRateBPSInv(1850000)};
+
+    assert( 16300 == BitRateBPS(BitRateBPSInv(16383)));
+    assert( 163800 == BitRateBPS(BitRateBPSInv(163830)));
+    assert( 1638300 == BitRateBPS(BitRateBPSInv(1638300)));
+    assert( 1638000 == BitRateBPS(BitRateBPSInv(1638400)));
+
+    assert( 18500 == BitRateBPS(BitRateBPSInv(18500)));
+    assert( 185000 == BitRateBPS(BitRateBPSInv(185000)));
+    assert( 1850000 == BitRateBPS(BitRateBPSInv(1850000)));
+    assert( 18500000 == BitRateBPS(BitRateBPSInv(18500000)));
+    assert( 185000000 == BitRateBPS(BitRateBPSInv(185000000)));
+
+    const WebRtc_UWord16 maxBitrareCalcWindow[numberOfLayers] = {200, 200,200,200};// in 1/100 of second
+
+    builderScalabilityInfo.Add1Bit(0);  // temporal_id_nesting_flag
+    builderScalabilityInfo.Add1Bit(0);    // priority_layer_info_present_flag
+    builderScalabilityInfo.Add1Bit(0);  // priority_id_setting_flag
+
+    builderScalabilityInfo.AddUE(numberOfLayers-1);
+
+    for(int i = 0; i<= numberOfLayers-1; i++)
+    {
+        builderScalabilityInfo.AddUE(layerId[i]);
+        builderScalabilityInfo.Add6Bits(priorityId[i]);
+        builderScalabilityInfo.Add1Bit(discardableId[i]);
+        builderScalabilityInfo.Add3Bits(dependencyId[i]);
+        builderScalabilityInfo.Add4Bits(qualityId[i]);
+        builderScalabilityInfo.Add3Bits(temporalId[i]);
+
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+
+        builderScalabilityInfo.Add1Bit(1);    // bitrate_info_present_flag
+
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+        builderScalabilityInfo.Add1Bit(0);
+
+        builderScalabilityInfo.Add16Bits(avgBitrate[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrateLayer[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrateLayerRepresentation[i]);
+        builderScalabilityInfo.Add16Bits(maxBitrareCalcWindow[i]);
+
+        builderScalabilityInfo.AddUE(0); // layer_dependency_info_src_layer_id_delta
+        builderScalabilityInfo.AddUE(0); // parameter_sets_info_src_layer_id_delta
+    }
+
+    printf("Test builderScalabilityInfo done\n");
+
+    // Scalability Info parser
+    parserScalabilityInfo.Get1Bit(); // not used in futher parsing
+    const WebRtc_UWord8 priority_layer_info_present = parserScalabilityInfo.Get1Bit();
+    const WebRtc_UWord8 priority_id_setting_flag = parserScalabilityInfo.Get1Bit();
+
+    WebRtc_UWord32 numberOfLayersMinusOne = parserScalabilityInfo.GetUE();
+    for(WebRtc_UWord32 j = 0; j<= numberOfLayersMinusOne; j++)
+    {
+        parserScalabilityInfo.GetUE();
+        parserScalabilityInfo.Get6Bits();
+        parserScalabilityInfo.Get1Bit();
+        parserScalabilityInfo.Get3Bits();
+        parserScalabilityInfo.Get4Bits();
+        parserScalabilityInfo.Get3Bits();
+
+        const WebRtc_UWord8 sub_pic_layer_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 sub_region_layer_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 iroi_division_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 profile_level_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 bitrate_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 frm_rate_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 frm_size_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 layer_dependency_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 parameter_sets_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 bitstream_restriction_info_present_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 exact_inter_layer_pred_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+        if(sub_pic_layer_flag || iroi_division_info_present_flag)
+        {
+            parserScalabilityInfo.Get1Bit();
+        }
+        const WebRtc_UWord8 layer_conversion_flag = parserScalabilityInfo.Get1Bit();
+        const WebRtc_UWord8 layer_output_flag = parserScalabilityInfo.Get1Bit();  // not used in futher parsing
+
+        if(profile_level_info_present_flag)
+        {
+            parserScalabilityInfo.Get24Bits();
+        }
+        if(bitrate_info_present_flag)
+        {
+            // this is what we want
+            assert(avgBitrate[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrateLayer[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrateLayerRepresentation[j] == parserScalabilityInfo.Get16Bits());
+            assert(maxBitrareCalcWindow[j] == parserScalabilityInfo.Get16Bits());
+        }else
+        {
+            assert(false);
+        }
+        if(frm_rate_info_present_flag)
+        {
+            parserScalabilityInfo.Get2Bits();
+            parserScalabilityInfo.Get16Bits();
+        }
+        if(frm_size_info_present_flag || iroi_division_info_present_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+        }
+        if(sub_region_layer_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            if(parserScalabilityInfo.Get1Bit())
+            {
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+            }
+        }
+        if(sub_pic_layer_flag)
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(iroi_division_info_present_flag)
+        {
+            if(parserScalabilityInfo.Get1Bit())
+            {
+                parserScalabilityInfo.GetUE();
+                parserScalabilityInfo.GetUE();
+            }else
+            {
+                const WebRtc_UWord32 numRoisMinusOne = parserScalabilityInfo.GetUE();
+                for(WebRtc_UWord32 k = 0; k <= numRoisMinusOne; k++)
+                {
+                    parserScalabilityInfo.GetUE();
+                    parserScalabilityInfo.GetUE();
+                    parserScalabilityInfo.GetUE();
+                }
+            }
+        }
+        if(layer_dependency_info_present_flag)
+        {
+            const WebRtc_UWord32 numDirectlyDependentLayers = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k < numDirectlyDependentLayers; k++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+        } else
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(parameter_sets_info_present_flag)
+        {
+            const WebRtc_UWord32 numSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k <= numSeqParameterSetMinusOne; k++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+            const WebRtc_UWord32 numSubsetSeqParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 l = 0; l <= numSubsetSeqParameterSetMinusOne; l++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+            const WebRtc_UWord32 numPicParameterSetMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 m = 0; m <= numPicParameterSetMinusOne; m++)
+            {
+                parserScalabilityInfo.GetUE();
+            }
+        }else
+        {
+            parserScalabilityInfo.GetUE();
+        }
+        if(bitstream_restriction_info_present_flag)
+        {
+            parserScalabilityInfo.Get1Bit();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+            parserScalabilityInfo.GetUE();
+        }
+        if(layer_conversion_flag)
+        {
+            parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 k = 0; k <2;k++)
+            {
+                if(parserScalabilityInfo.Get1Bit())
+                {
+                    parserScalabilityInfo.Get24Bits();
+                    parserScalabilityInfo.Get16Bits();
+                    parserScalabilityInfo.Get16Bits();
+                }
+            }
+        }
+    }
+    if(priority_layer_info_present)
+    {
+        const WebRtc_UWord32 prNumDidMinusOne = parserScalabilityInfo.GetUE();
+        for(WebRtc_UWord32 k = 0; k <= prNumDidMinusOne;k++)
+        {
+            parserScalabilityInfo.Get3Bits();
+            const WebRtc_UWord32 prNumMinusOne = parserScalabilityInfo.GetUE();
+            for(WebRtc_UWord32 l = 0; l <= prNumMinusOne; l++)
+            {
+                parserScalabilityInfo.GetUE();
+                parserScalabilityInfo.Get24Bits();
+                parserScalabilityInfo.Get16Bits();
+                parserScalabilityInfo.Get16Bits();
+            }
+        }
+    }
+    if(priority_id_setting_flag)
+    {
+        WebRtc_UWord8 priorityIdSettingUri;
+        WebRtc_UWord32 priorityIdSettingUriIdx = 0;
+        do
+        {
+            priorityIdSettingUri = parserScalabilityInfo.Get8Bits();
+        } while (priorityIdSettingUri != 0);
+    }
+    printf("Test parserScalabilityInfo done\n");
+
+    printf("\nAPI test of parser for ScalabilityInfo done\n");
+
+    ::Sleep(5000);
+}
diff --git a/src/modules/rtp_rtcp/test/bwe_standalone.gypi b/src/modules/rtp_rtcp/test/bwe_standalone.gypi
new file mode 100644
index 0000000..36a50de
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/bwe_standalone.gypi
@@ -0,0 +1,111 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'bwe_standalone',
+      'type': 'executable',
+      'dependencies': [
+        'matlab_plotting',
+        'rtp_rtcp',
+        'udp_transport',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'sources': [
+        'BWEStandAlone/BWEStandAlone.cc',
+        'BWEStandAlone/TestLoadGenerator.cc',
+        'BWEStandAlone/TestLoadGenerator.h',
+        'BWEStandAlone/TestSenderReceiver.cc',
+        'BWEStandAlone/TestSenderReceiver.h',
+      ], # source
+      'conditions': [
+          ['OS=="linux"', {
+              'cflags': [
+                  '-fexceptions', # enable exceptions
+                  ],
+              },
+           ],
+          ],
+
+      'include_dirs': [
+          ],
+      'link_settings': {
+          },
+    },
+
+    {
+      'target_name': 'matlab_plotting',
+      'type': '<(library)',
+      'dependencies': [
+        'matlab_plotting_include',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+          '/opt/matlab2010a/extern/include',
+          ],
+      # 'direct_dependent_settings': {
+      #     'defines': [
+      #         'MATLAB',
+      #         ],
+      #     'include_dirs': [
+      #         'BWEStandAlone',
+      #         ],
+      #     },
+      'export_dependent_settings': [
+          'matlab_plotting_include',
+          ],
+      'sources': [
+          'BWEStandAlone/MatlabPlot.cc',
+          'BWEStandAlone/MatlabPlot.h',
+          ],
+      'link_settings': {
+          'ldflags' : [
+              '-L/opt/matlab2010a/bin/glnxa64',
+              '-leng',
+              '-lmx',
+              '-Wl,-rpath,/opt/matlab2010a/bin/glnxa64',
+              ],
+          },
+      'defines': [
+          'MATLAB',
+          ],
+      'conditions': [
+          ['OS=="linux"', {
+              'cflags': [
+                  '-fexceptions', # enable exceptions
+                  ],
+              },
+           ],
+          ],
+      },
+
+    {
+      'target_name': 'matlab_plotting_include',
+      'type': 'none',
+      'direct_dependent_settings': {
+          'defines': [
+#              'MATLAB',
+              ],
+          'include_dirs': [
+              'BWEStandAlone',
+              ],
+          },
+      },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api.cc b/src/modules/rtp_rtcp/test/testAPI/test_api.cc
new file mode 100644
index 0000000..dd12c45
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api.cc
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+class RtpRtcpAPITest : public ::testing::Test {
+ protected:
+  RtpRtcpAPITest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpAPITest() {}
+
+  virtual void SetUp() {
+    RtpRtcp::Configuration configuration;
+    configuration.id = test_id;
+    configuration.audio = true;
+    configuration.clock = &fake_clock;
+    module = RtpRtcp::CreateRtpRtcp(configuration);
+  }
+
+  virtual void TearDown() {
+    delete module;
+  }
+
+  int test_id;
+  RtpRtcp* module;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpAPITest, Basic) {
+  EXPECT_EQ(0, module->SetSequenceNumber(test_sequence_number));
+  EXPECT_EQ(test_sequence_number, module->SequenceNumber());
+
+  EXPECT_EQ(0, module->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(test_timestamp, module->StartTimestamp());
+
+  EXPECT_FALSE(module->Sending());
+  EXPECT_EQ(0, module->SetSendingStatus(true));
+  EXPECT_TRUE(module->Sending());
+}
+
+TEST_F(RtpRtcpAPITest, MTU) {
+  EXPECT_EQ(-1, module->SetMaxTransferUnit(10));
+  EXPECT_EQ(-1, module->SetMaxTransferUnit(IP_PACKET_SIZE + 1));
+  EXPECT_EQ(0, module->SetMaxTransferUnit(1234));
+  EXPECT_EQ(1234-20-8, module->MaxPayloadLength());
+
+  EXPECT_EQ(0, module->SetTransportOverhead(true, true, 12));
+  EXPECT_EQ(1234 - 20- 20 -20 - 12, module->MaxPayloadLength());
+
+  EXPECT_EQ(0, module->SetTransportOverhead(false, false, 0));
+  EXPECT_EQ(1234 - 20 - 8, module->MaxPayloadLength());
+}
+
+TEST_F(RtpRtcpAPITest, SSRC) {
+  EXPECT_EQ(0, module->SetSSRC(test_ssrc));
+  EXPECT_EQ(test_ssrc, module->SSRC());
+}
+
+TEST_F(RtpRtcpAPITest, CSRC) {
+  EXPECT_EQ(0, module->SetCSRCs(test_CSRC, 2));
+  WebRtc_UWord32 testOfCSRC[webrtc::kRtpCsrcSize];
+  EXPECT_EQ(2, module->CSRCs(testOfCSRC));
+  EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
+  EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
+}
+
+TEST_F(RtpRtcpAPITest, TrafficSmoothing) {
+  EXPECT_FALSE(module->TransmissionSmoothingStatus());
+  module->SetTransmissionSmoothingStatus(true);
+  EXPECT_TRUE(module->TransmissionSmoothingStatus());
+}
+
+TEST_F(RtpRtcpAPITest, RTCP) {
+  EXPECT_EQ(kRtcpOff, module->RTCP());
+  EXPECT_EQ(0, module->SetRTCPStatus(kRtcpCompound));
+  EXPECT_EQ(kRtcpCompound, module->RTCP());
+
+  EXPECT_EQ(0, module->SetCNAME("john.doe@test.test"));
+
+  char cName[RTCP_CNAME_SIZE];
+  EXPECT_EQ(0, module->CNAME(cName));
+  EXPECT_STRCASEEQ(cName, "john.doe@test.test");
+
+  EXPECT_FALSE(module->TMMBR());
+  EXPECT_EQ(0, module->SetTMMBRStatus(true));
+  EXPECT_TRUE(module->TMMBR());
+  EXPECT_EQ(0, module->SetTMMBRStatus(false));
+  EXPECT_FALSE(module->TMMBR());
+
+  EXPECT_EQ(kNackOff, module->NACK());
+  EXPECT_EQ(0, module->SetNACKStatus(kNackRtcp));
+  EXPECT_EQ(kNackRtcp, module->NACK());
+}
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api.gypi b/src/modules/rtp_rtcp/test/testAPI/test_api.gypi
new file mode 100644
index 0000000..eaa3a72
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_rtp_rtcp_api',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+      ],
+      
+      'include_dirs': [
+        '../../interface',
+        '../../source',
+        '../../../../system_wrappers/interface',
+      ],
+   
+      'sources': [
+        'test_api.cc',
+        'test_api_audio.cc',
+        'test_api_nack.cc',
+        'test_api_rtcp.cc',
+        'test_api_video.cc',
+      ],
+      
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api.h b/src/modules/rtp_rtcp/test/testAPI/test_api.h
new file mode 100644
index 0000000..6261d7f
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+namespace webrtc {
+
+class FakeRtpRtcpClock : public RtpRtcpClock {
+ public:
+  FakeRtpRtcpClock() {
+    time_in_ms_ = 123456;
+  }
+  // Return a timestamp in milliseconds relative to some arbitrary
+  // source; the source is fixed for this clock.
+  virtual WebRtc_Word64 GetTimeInMS() {
+    return time_in_ms_;
+  }
+  // Retrieve an NTP absolute timestamp.
+  virtual void CurrentNTP(WebRtc_UWord32& secs, WebRtc_UWord32& frac) {
+    secs = time_in_ms_ / 1000;
+    frac = (time_in_ms_ % 1000) * 4294967;
+  }
+  void IncrementTime(WebRtc_UWord32 time_increment_ms) {
+    time_in_ms_ += time_increment_ms;
+  }
+ private:
+  WebRtc_Word64 time_in_ms_;
+};
+
+// This class sends all its packet straight to the provided RtpRtcp module.
+// with optional packet loss.
+class LoopBackTransport : public webrtc::Transport {
+ public:
+  LoopBackTransport()
+    : _count(0),
+      _packetLoss(0),
+      _rtpRtcpModule(NULL) {
+  }
+  void SetSendModule(RtpRtcp* rtpRtcpModule) {
+    _rtpRtcpModule = rtpRtcpModule;
+  }
+  void DropEveryNthPacket(int n) {
+    _packetLoss = n;
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    _count++;
+    if (_packetLoss > 0) {
+      if ((_count % _packetLoss) == 0) {
+        return len;
+      }
+    }
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+ private:
+  int _count;
+  int _packetLoss;
+  RtpRtcp* _rtpRtcpModule;
+};
+
+class RtpReceiver : public RtpData {
+ public:
+   virtual WebRtc_Word32 OnReceivedPayloadData(
+       const WebRtc_UWord8* payloadData,
+       const WebRtc_UWord16 payloadSize,
+       const webrtc::WebRtcRTPHeader* rtpHeader) {
+    return 0;
+  }
+};
+
+}  // namespace webrtc
+ 
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc b/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
new file mode 100644
index 0000000..bf88ace
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api_audio.cc
@@ -0,0 +1,332 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+#define test_rate 64000u
+
+class VerifyingAudioReceiver : public RtpData {
+ public:
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* payloadData,
+      const WebRtc_UWord16 payloadSize,
+      const webrtc::WebRtcRTPHeader* rtpHeader) {
+    if (rtpHeader->header.payloadType == 98 ||
+        rtpHeader->header.payloadType == 99) {
+      EXPECT_EQ(4, payloadSize);
+      char str[5];
+      memcpy(str, payloadData, payloadSize);
+      str[4] = 0;
+      // All our test vectors for payload type 96 and 97 even the stereo is on
+      // a per channel base equal to the 4 chars "test".
+      // Note there is no null termination so we add that to use the
+      // test EXPECT_STRCASEEQ.
+      EXPECT_STRCASEEQ("test", str);
+      return 0;
+    }
+    if (rtpHeader->header.payloadType == 100 ||
+        rtpHeader->header.payloadType == 101 ||
+        rtpHeader->header.payloadType == 102) {
+      if (rtpHeader->type.Audio.channel == 1) {
+        if (payloadData[0] == 0xff) {
+          // All our test vectors for payload type 100, 101 and 102 have the
+          // first channel data being equal to 0xff.
+          return 0;
+        }
+      }
+      ADD_FAILURE() << "This code path should never happen.";
+      return -1;
+    }
+    return 0;
+  }
+};
+
+class RTPCallback : public RtpFeedback {
+ public:
+  virtual WebRtc_Word32 OnInitializeDecoder(
+      const WebRtc_Word32 id,
+      const WebRtc_Word8 payloadType,
+      const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+      const int frequency,
+      const WebRtc_UWord8 channels,
+      const WebRtc_UWord32 rate) {
+    if (payloadType == 96) {
+      EXPECT_EQ(test_rate, rate) <<
+          "The rate should be 64K for this payloadType";
+    }
+    return 0;
+  }
+  virtual void OnPacketTimeout(const WebRtc_Word32 id) {
+  }
+  virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                const RtpRtcpPacketType packetType) {
+  }
+  virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                     const RTPAliveType alive) {
+  }
+  virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 SSRC) {
+  }
+  virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 CSRC,
+                                     const bool added) {
+  }
+};
+
+class AudioFeedback : public RtpAudioFeedback {
+  virtual void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                        const WebRtc_UWord8 event,
+                                        const bool end) {
+    static WebRtc_UWord8 expectedEvent = 0;
+
+    if (end) {
+      WebRtc_UWord8 oldEvent = expectedEvent-1;
+      if (expectedEvent == 32) {
+        oldEvent = 15;
+      }
+      EXPECT_EQ(oldEvent, event);
+    } else {
+      EXPECT_EQ(expectedEvent, event);
+      expectedEvent++;
+    }
+    if (expectedEvent == 16) {
+      expectedEvent = 32;
+    }
+  }
+  virtual void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                                    const WebRtc_UWord8 event,
+                                    const WebRtc_UWord16 lengthMs,
+                                    const WebRtc_UWord8 volume) {
+  };
+};
+
+class RtpRtcpAudioTest : public ::testing::Test {
+ protected:
+  RtpRtcpAudioTest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpAudioTest() {}
+
+  virtual void SetUp() {
+    audioFeedback = new AudioFeedback();
+    data_receiver1 = new VerifyingAudioReceiver();
+    data_receiver2 = new VerifyingAudioReceiver();
+    rtp_callback = new RTPCallback();
+    transport1 = new LoopBackTransport();
+    transport2 = new LoopBackTransport();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = test_id;
+    configuration.audio = true;
+    configuration.clock = &fake_clock;
+    configuration.incoming_data = data_receiver1;
+    configuration.outgoing_transport = transport1;
+    configuration.audio_messages = audioFeedback;
+
+    module1 = RtpRtcp::CreateRtpRtcp(configuration);
+
+    configuration.id = test_id + 1;
+    configuration.incoming_data = data_receiver2;
+    configuration.incoming_messages = rtp_callback;
+    configuration.outgoing_transport = transport2;
+    configuration.audio_messages = audioFeedback;
+
+    module2 = RtpRtcp::CreateRtpRtcp(configuration);
+
+    transport1->SetSendModule(module2);
+    transport2->SetSendModule(module1);
+  }
+
+  virtual void TearDown() {
+    delete module1;
+    delete module2;
+    delete transport1;
+    delete transport2;
+    delete audioFeedback;
+    delete data_receiver1;
+    delete data_receiver2;
+    delete rtp_callback;
+  }
+
+  int test_id;
+  RtpRtcp* module1;
+  RtpRtcp* module2;
+  VerifyingAudioReceiver* data_receiver1;
+  VerifyingAudioReceiver* data_receiver2;
+  LoopBackTransport* transport1;
+  LoopBackTransport* transport2;
+  AudioFeedback* audioFeedback;
+  RTPCallback* rtp_callback;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpAudioTest, Basic) {
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+
+  EXPECT_FALSE(module1->TelephoneEvent());
+
+  // Test detection at the end of a DTMF tone.
+  EXPECT_EQ(0, module2->SetTelephoneEventStatus(true, true, true));
+  EXPECT_EQ(true, module2->TelephoneEvent());
+
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  // Start basic RTP test.
+
+  // Send an empty RTP packet.
+  // Should fail since we have not registerd the payload type.
+  EXPECT_EQ(-1, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
+                                          96, 0, -1, NULL, 0));
+
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+  printf("4\n");
+
+  const WebRtc_UWord8 test[5] = "test";
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                         0, -1, test, 4));
+
+  EXPECT_EQ(test_ssrc, module2->RemoteSSRC());
+  EXPECT_EQ(test_timestamp, module2->RemoteTimestamp());
+}
+
+TEST_F(RtpRtcpAudioTest, RED) {
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  voiceCodec.pltype = 127;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "RED", 4);
+
+  EXPECT_EQ(0, module1->SetSendREDPayloadType(voiceCodec.pltype));
+  WebRtc_Word8 red = 0;
+  EXPECT_EQ(0, module1->SendREDPayloadType(red));
+  EXPECT_EQ(voiceCodec.pltype, red);
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  RTPFragmentationHeader fragmentation;
+  fragmentation.fragmentationVectorSize = 2;
+  fragmentation.fragmentationLength = new WebRtc_UWord32[2];
+  fragmentation.fragmentationLength[0] = 4;
+  fragmentation.fragmentationLength[1] = 4;
+  fragmentation.fragmentationOffset = new WebRtc_UWord32[2];
+  fragmentation.fragmentationOffset[0] = 0;
+  fragmentation.fragmentationOffset[1] = 4;
+  fragmentation.fragmentationTimeDiff = new WebRtc_UWord16[2];
+  fragmentation.fragmentationTimeDiff[0] = 0;
+  fragmentation.fragmentationTimeDiff[1] = 0;
+  fragmentation.fragmentationPlType = new WebRtc_UWord8[2];
+  fragmentation.fragmentationPlType[0] = 96;
+  fragmentation.fragmentationPlType[1] = 96;
+
+  const WebRtc_UWord8 test[5] = "test";
+  // Send a RTP packet.
+  EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech,
+                                         96, 160, -1, test, 4,
+                                         &fragmentation));
+
+  EXPECT_EQ(0, module1->SetSendREDPayloadType(-1));
+  EXPECT_EQ(-1, module1->SendREDPayloadType(red));
+}
+
+TEST_F(RtpRtcpAudioTest, DTMF) {
+  CodecInst voiceCodec;
+  voiceCodec.pltype = 96;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "PCMU", 5);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+  voiceCodec.rate = test_rate;
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+  EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+  EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+  // Prepare for DTMF.
+  voiceCodec.pltype = 97;
+  voiceCodec.plfreq = 8000;
+  memcpy(voiceCodec.plname, "telephone-event", 16);
+
+  EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+  EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+  // Start DTMF test.
+  WebRtc_UWord32 timeStamp = 160;
+
+  // Send a DTMF tone using RFC 2833 (4733).
+  for (int i = 0; i < 16; i++) {
+    EXPECT_EQ(0, module1->SendTelephoneEventOutband(i, timeStamp, 10));
+  }
+  timeStamp += 160;  // Prepare for next packet.
+
+  const WebRtc_UWord8 test[9] = "test";
+
+  // Send RTP packets for 16 tones a 160 ms  100ms
+  // pause between = 2560ms + 1600ms = 4160ms
+  for (;timeStamp <= 250 * 160; timeStamp += 160) {
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           timeStamp, -1, test, 4));
+    fake_clock.IncrementTime(20);
+    module1->Process();
+  }
+  EXPECT_EQ(0, module1->SendTelephoneEventOutband(32, 9000, 10));
+
+  for (;timeStamp <= 740 * 160; timeStamp += 160) {
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           timeStamp, -1, test, 4));
+    fake_clock.IncrementTime(20);
+    module1->Process();
+  }
+  delete audioFeedback;
+}
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc b/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc
new file mode 100644
index 0000000..6fa81e7
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api_nack.cc
@@ -0,0 +1,258 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+const int kVideoNackListSize = 10;
+const int kTestId = 123;
+const WebRtc_UWord32 kTestSsrc = 3456;
+const WebRtc_UWord16 kTestSequenceNumber = 2345;
+const WebRtc_UWord32 kTestNumberOfPackets = 450;
+const int kTestNumberOfRtxPackets = 49;
+
+class VerifyingNackReceiver : public RtpData
+{
+ public:
+  VerifyingNackReceiver() {}
+
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* data,
+      const WebRtc_UWord16 size,
+      const webrtc::WebRtcRTPHeader* rtp_header) {
+
+    EXPECT_EQ(kTestSsrc, rtp_header->header.ssrc);
+    EXPECT_EQ(std::find(sequence_numbers_.begin(),
+                    sequence_numbers_.end(),
+                    rtp_header->header.sequenceNumber),
+              sequence_numbers_.end());
+    sequence_numbers_.push_back(rtp_header->header.sequenceNumber);
+    return 0;
+  }
+  std::vector<WebRtc_UWord16 > sequence_numbers_;
+};
+
+class NackLoopBackTransport : public webrtc::Transport {
+ public:
+  NackLoopBackTransport(uint32_t rtx_ssrc)
+    : count_(0),
+      packet_loss_(0),
+      rtx_ssrc_(rtx_ssrc),
+      count_rtx_ssrc_(0),
+      module_(NULL) {
+  }
+  void SetSendModule(RtpRtcp* rtpRtcpModule) {
+    module_ = rtpRtcpModule;
+  }
+  void DropEveryNthPacket(int n) {
+    packet_loss_ = n;
+  }
+  virtual int SendPacket(int channel, const void *data, int len) {
+    count_++;
+    const unsigned char* ptr = static_cast<const unsigned  char*>(data);
+    uint32_t ssrc = (ptr[8] << 24) + (ptr[9] << 16) + (ptr[10] << 8) + ptr[11];
+    if (ssrc == rtx_ssrc_) count_rtx_ssrc_++;
+
+    if (packet_loss_ > 0) {
+      if ((count_ % packet_loss_) == 0) {
+        return len;
+      }
+    }
+    if (module_->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  virtual int SendRTCPPacket(int channel, const void *data, int len) {
+    if (module_->IncomingPacket((const WebRtc_UWord8*)data, len) == 0) {
+      return len;
+    }
+    return -1;
+  }
+  int count_;
+  int packet_loss_;
+  uint32_t rtx_ssrc_;
+  int count_rtx_ssrc_;
+  RtpRtcp* module_;
+};
+
+class RtpRtcpNackTest : public ::testing::Test {
+ protected:
+  RtpRtcpNackTest() {}
+  ~RtpRtcpNackTest() {}
+
+  virtual void SetUp() {
+    transport_ = new NackLoopBackTransport(kTestSsrc + 1);
+    nack_receiver_ = new VerifyingNackReceiver();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = kTestId;
+    configuration.audio = false;
+    configuration.clock = &fake_clock;
+    configuration.incoming_data = nack_receiver_;
+    configuration.outgoing_transport = transport_;
+    video_module_ = RtpRtcp::CreateRtpRtcp(configuration);
+
+    EXPECT_EQ(0, video_module_->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, video_module_->SetSSRC(kTestSsrc));
+    EXPECT_EQ(0, video_module_->SetNACKStatus(kNackRtcp));
+    EXPECT_EQ(0, video_module_->SetStorePacketsStatus(true));
+    EXPECT_EQ(0, video_module_->SetSendingStatus(true));
+    EXPECT_EQ(0, video_module_->SetSequenceNumber(kTestSequenceNumber));
+    EXPECT_EQ(0, video_module_->SetStartTimestamp(111111));
+
+    transport_->SetSendModule(video_module_);
+
+    VideoCodec video_codec;
+    memset(&video_codec, 0, sizeof(video_codec));
+    video_codec.plType = 123;
+    memcpy(video_codec.plName, "I420", 5);
+
+    EXPECT_EQ(0, video_module_->RegisterSendPayload(video_codec));
+    EXPECT_EQ(0, video_module_->RegisterReceivePayload(video_codec));
+
+    payload_data_length = sizeof(payload_data);
+
+    for (int n = 0; n < payload_data_length; n++) {
+      payload_data[n] = n % 10;
+    }
+  }
+
+  virtual void TearDown() {
+    delete video_module_;
+    delete transport_;
+    delete nack_receiver_;
+  }
+
+  RtpRtcp* video_module_;
+  NackLoopBackTransport* transport_;
+  VerifyingNackReceiver* nack_receiver_;
+  WebRtc_UWord8  payload_data[65000];
+  int payload_data_length;
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpNackTest, RTCP) {
+  WebRtc_UWord32 timestamp = 3000;
+  WebRtc_UWord16 nack_list[kVideoNackListSize];
+  transport_->DropEveryNthPacket(10);
+
+  for (int frame = 0; frame < 10; ++frame) {
+    EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
+                                                timestamp,
+                                                timestamp / 90,
+                                                payload_data,
+                                                payload_data_length));
+
+    std::sort(nack_receiver_->sequence_numbers_.begin(),
+              nack_receiver_->sequence_numbers_.end());
+
+    std::vector<WebRtc_UWord16> missing_sequence_numbers;
+    std::vector<WebRtc_UWord16>::iterator it =
+        nack_receiver_->sequence_numbers_.begin();
+
+    while (it != nack_receiver_->sequence_numbers_.end()) {
+      WebRtc_UWord16 sequence_number_1 = *it;
+      ++it;
+      if (it != nack_receiver_->sequence_numbers_.end()) {
+        WebRtc_UWord16 sequence_number_2 = *it;
+        // Add all missing sequence numbers to list
+        for (WebRtc_UWord16 i = sequence_number_1 + 1; i < sequence_number_2;
+            ++i) {
+          missing_sequence_numbers.push_back(i);
+        }
+      }
+    }
+    int n = 0;
+    for (it = missing_sequence_numbers.begin();
+        it != missing_sequence_numbers.end(); ++it) {
+      nack_list[n++] = (*it);
+    }
+    video_module_->SendNACK(nack_list, n);
+    fake_clock.IncrementTime(33);
+    video_module_->Process();
+
+    // Prepare next frame.
+    timestamp += 3000;
+  }
+  std::sort(nack_receiver_->sequence_numbers_.begin(),
+            nack_receiver_->sequence_numbers_.end());
+  EXPECT_EQ(kTestSequenceNumber, *(nack_receiver_->sequence_numbers_.begin()));
+  EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1,
+            *(nack_receiver_->sequence_numbers_.rbegin()));
+  EXPECT_EQ(kTestNumberOfPackets, nack_receiver_->sequence_numbers_.size());
+  EXPECT_EQ(0, transport_->count_rtx_ssrc_);
+}
+
+TEST_F(RtpRtcpNackTest, RTX) {
+  EXPECT_EQ(0, video_module_->SetRTXReceiveStatus(true, kTestSsrc + 1));
+  EXPECT_EQ(0, video_module_->SetRTXSendStatus(true, true, kTestSsrc + 1));
+
+  transport_->DropEveryNthPacket(10);
+
+  WebRtc_UWord32 timestamp = 3000;
+  WebRtc_UWord16 nack_list[kVideoNackListSize];
+
+  for (int frame = 0; frame < 10; ++frame) {
+    EXPECT_EQ(0, video_module_->SendOutgoingData(webrtc::kVideoFrameDelta,
+                                                 123,
+                                                 timestamp,
+                                                 timestamp / 90,
+                                                 payload_data,
+                                                 payload_data_length));
+
+    std::sort(nack_receiver_->sequence_numbers_.begin(),
+              nack_receiver_->sequence_numbers_.end());
+
+    std::vector<WebRtc_UWord16> missing_sequence_numbers;
+
+
+    std::vector<WebRtc_UWord16>::iterator it =
+        nack_receiver_->sequence_numbers_.begin();
+    while (it != nack_receiver_->sequence_numbers_.end()) {
+      int sequence_number_1 = *it;
+      ++it;
+      if (it != nack_receiver_->sequence_numbers_.end()) {
+        int sequence_number_2 = *it;
+        // Add all missing sequence numbers to list.
+        for (int i = sequence_number_1 + 1; i < sequence_number_2; ++i) {
+          missing_sequence_numbers.push_back(i);
+        }
+      }
+    }
+    int n = 0;
+    for (it = missing_sequence_numbers.begin();
+        it != missing_sequence_numbers.end(); ++it) {
+      nack_list[n++] = (*it);
+    }
+    video_module_->SendNACK(nack_list, n);
+    fake_clock.IncrementTime(33);
+    video_module_->Process();
+
+    // Prepare next frame.
+    timestamp += 3000;
+  }
+  std::sort(nack_receiver_->sequence_numbers_.begin(),
+            nack_receiver_->sequence_numbers_.end());
+  EXPECT_EQ(kTestSequenceNumber, *(nack_receiver_->sequence_numbers_.begin()));
+  EXPECT_EQ(kTestSequenceNumber + kTestNumberOfPackets - 1,
+            *(nack_receiver_->sequence_numbers_.rbegin()));
+  EXPECT_EQ(kTestNumberOfPackets, nack_receiver_->sequence_numbers_.size());
+  EXPECT_EQ(kTestNumberOfRtxPackets, transport_->count_rtx_ssrc_);
+}
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc b/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
new file mode 100644
index 0000000..29596f9
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api_rtcp.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+ 
+const uint64_t kTestPictureId = 12345678;
+
+class RtcpCallback : public RtcpFeedback, public RtcpIntraFrameObserver {
+ public:
+  void SetModule(RtpRtcp* module) {
+    _rtpRtcpModule = module;
+  };
+  virtual void OnRTCPPacketTimeout(const WebRtc_Word32 id) {
+  }
+  virtual void OnLipSyncUpdate(const WebRtc_Word32 id,
+                               const WebRtc_Word32 audioVideoOffset) {
+  };
+  virtual void OnXRVoIPMetricReceived(
+      const WebRtc_Word32 id,
+      const RTCPVoIPMetric* metric) {
+  };
+  virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord8 subType,
+                                         const WebRtc_UWord32 name,
+                                         const WebRtc_UWord16 length,
+                                         const WebRtc_UWord8* data) {
+    char print_name[5];
+    print_name[0] = static_cast<char>(name >> 24);
+    print_name[1] = static_cast<char>(name >> 16);
+    print_name[2] = static_cast<char>(name >> 8);
+    print_name[3] = static_cast<char>(name);
+    print_name[4] = 0;
+
+    EXPECT_STRCASEEQ("test", print_name);
+  };
+  virtual void OnSendReportReceived(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 senderSSRC) {
+    RTCPSenderInfo senderInfo;
+    EXPECT_EQ(0, _rtpRtcpModule->RemoteRTCPStat(&senderInfo));
+  };
+  virtual void OnReceiveReportReceived(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 senderSSRC) {
+  };
+  virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc) {
+  };
+  virtual void OnReceivedSLI(const uint32_t ssrc,
+                             const uint8_t pictureId) {
+    EXPECT_EQ(28, pictureId);
+  };
+  virtual void OnReceivedRPSI(const uint32_t ssrc,
+                              const uint64_t pictureId) {
+    EXPECT_EQ(kTestPictureId, pictureId);
+  };
+ private:
+  RtpRtcp* _rtpRtcpModule;
+};
+
+class RtpRtcpRtcpTest : public ::testing::Test {
+ protected:
+  RtpRtcpRtcpTest() {
+    test_CSRC[0] = 1234;
+    test_CSRC[2] = 2345;
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpRtcpTest() {}
+
+  virtual void SetUp() {
+    receiver = new RtpReceiver();
+    transport1 = new LoopBackTransport();
+    transport2 = new LoopBackTransport();
+    myRTCPFeedback1 = new RtcpCallback();
+    myRTCPFeedback2 = new RtcpCallback();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = test_id;
+    configuration.audio = false;
+    configuration.clock = &fake_clock;
+    configuration.outgoing_transport = transport1;
+    configuration.rtcp_feedback = myRTCPFeedback1;
+    configuration.intra_frame_callback = myRTCPFeedback1;
+
+    module1 = RtpRtcp::CreateRtpRtcp(configuration);
+
+    configuration.id = test_id + 1;
+    configuration.outgoing_transport = transport2;
+    configuration.rtcp_feedback = myRTCPFeedback2;
+    configuration.intra_frame_callback = myRTCPFeedback2;
+    module2 = RtpRtcp::CreateRtpRtcp(configuration);
+
+    transport1->SetSendModule(module2);
+    transport2->SetSendModule(module1);
+    myRTCPFeedback1->SetModule(module1);
+    myRTCPFeedback2->SetModule(module2);
+
+    EXPECT_EQ(0, module1->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, module2->SetRTCPStatus(kRtcpCompound));
+
+    EXPECT_EQ(0, module2->SetSSRC(test_ssrc + 1));
+    EXPECT_EQ(0, module1->SetSSRC(test_ssrc));
+    EXPECT_EQ(0, module1->SetSequenceNumber(test_sequence_number));
+    EXPECT_EQ(0, module1->SetStartTimestamp(test_timestamp));
+    EXPECT_EQ(0, module1->SetCSRCs(test_CSRC, 2));
+    EXPECT_EQ(0, module1->SetCNAME("john.doe@test.test"));
+
+    EXPECT_EQ(0, module1->SetSendingStatus(true));
+
+    CodecInst voiceCodec;
+    voiceCodec.pltype = 96;
+    voiceCodec.plfreq = 8000;
+    voiceCodec.rate = 64000;
+    memcpy(voiceCodec.plname, "PCMU", 5);
+
+    EXPECT_EQ(0, module1->RegisterSendPayload(voiceCodec));
+    EXPECT_EQ(0, module1->RegisterReceivePayload(voiceCodec));
+    EXPECT_EQ(0, module2->RegisterSendPayload(voiceCodec));
+    EXPECT_EQ(0, module2->RegisterReceivePayload(voiceCodec));
+
+    // We need to send one RTP packet to get the RTCP packet to be accepted by
+    // the receiving module.
+    // send RTP packet with the data "testtest"
+    const WebRtc_UWord8 test[9] = "testtest";
+    EXPECT_EQ(0, module1->SendOutgoingData(webrtc::kAudioFrameSpeech, 96,
+                                           0, -1, test, 8));
+  }
+
+  virtual void TearDown() {
+    delete module1;
+    delete module2;
+    delete transport1;
+    delete transport2;
+    delete receiver;
+  }
+
+  int test_id;
+  RtpRtcp* module1;
+  RtpRtcp* module2;
+  RtpReceiver* receiver;
+  LoopBackTransport* transport1;
+  LoopBackTransport* transport2;
+  RtcpCallback* myRTCPFeedback1;
+  RtcpCallback* myRTCPFeedback2;
+
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord32 test_CSRC[webrtc::kRtpCsrcSize];
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpRtcpTest, RTCP_PLI_RPSI) {
+  EXPECT_EQ(0, module1->SendRTCPReferencePictureSelection(kTestPictureId));
+  EXPECT_EQ(0, module1->SendRTCPSliceLossIndication(156));
+}
+
+TEST_F(RtpRtcpRtcpTest, RTCP_CNAME) {
+  WebRtc_UWord32 testOfCSRC[webrtc::kRtpCsrcSize];
+  EXPECT_EQ(2, module2->RemoteCSRCs(testOfCSRC));
+  EXPECT_EQ(test_CSRC[0], testOfCSRC[0]);
+  EXPECT_EQ(test_CSRC[1], testOfCSRC[1]);
+
+  // Set cname of mixed.
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[0], "john@192.168.0.1"));
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
+
+  EXPECT_EQ(-1, module1->RemoveMixedCNAME(test_CSRC[0] + 1));
+  EXPECT_EQ(0, module1->RemoveMixedCNAME(test_CSRC[1]));
+  EXPECT_EQ(0, module1->AddMixedCNAME(test_CSRC[1], "jane@192.168.0.2"));
+
+  // send RTCP packet, triggered by timer
+  fake_clock.IncrementTime(7500);
+  module1->Process();
+  fake_clock.IncrementTime(100);
+  module2->Process();
+
+  char cName[RTCP_CNAME_SIZE];
+  EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC() + 1, cName));
+
+  // Check multiple CNAME.
+  EXPECT_EQ(0, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
+  EXPECT_EQ(0, strncmp(cName, "john.doe@test.test", RTCP_CNAME_SIZE));
+
+  EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[0], cName));
+  EXPECT_EQ(0, strncmp(cName, "john@192.168.0.1", RTCP_CNAME_SIZE));
+
+  EXPECT_EQ(0, module2->RemoteCNAME(test_CSRC[1], cName));
+  EXPECT_EQ(0, strncmp(cName, "jane@192.168.0.2", RTCP_CNAME_SIZE));
+
+  EXPECT_EQ(0, module1->SetSendingStatus(false));
+
+  // Test that BYE clears the CNAME
+  EXPECT_EQ(-1, module2->RemoteCNAME(module2->RemoteSSRC(), cName));
+}
+
+TEST_F(RtpRtcpRtcpTest, RTCP) {
+  RTCPReportBlock reportBlock;
+  reportBlock.cumulativeLost = 1;
+  reportBlock.delaySinceLastSR = 2;
+  reportBlock.extendedHighSeqNum = 3;
+  reportBlock.fractionLost= 4;
+  reportBlock.jitter = 5;
+  reportBlock.lastSR = 6;
+
+  // Set report blocks.
+  EXPECT_EQ(-1, module1->AddRTCPReportBlock(test_CSRC[0], NULL));
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[0], &reportBlock));
+
+  reportBlock.lastSR= 7;
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[1], &reportBlock));
+
+  WebRtc_UWord32 name = 't' << 24;
+  name += 'e' << 16;
+  name += 's' << 8;
+  name += 't';
+  EXPECT_EQ(0, module1->SetRTCPApplicationSpecificData(
+      3,
+      name,
+      (const WebRtc_UWord8 *)"test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test test"\
+          " test test test test test test test test test test test test ",
+          300));
+
+  // send RTCP packet, triggered by timer
+  fake_clock.IncrementTime(7500);
+  module1->Process();
+  fake_clock.IncrementTime(100);
+  module2->Process();
+
+  WebRtc_UWord32 receivedNTPsecs = 0;
+  WebRtc_UWord32 receivedNTPfrac = 0;
+  WebRtc_UWord32 RTCPArrivalTimeSecs = 0;
+  WebRtc_UWord32 RTCPArrivalTimeFrac = 0;
+  EXPECT_EQ(0, module2->RemoteNTP(&receivedNTPsecs, &receivedNTPfrac,
+                                  &RTCPArrivalTimeSecs, &RTCPArrivalTimeFrac));
+
+
+  // get all report blocks
+  std::vector<RTCPReportBlock> report_blocks;
+  EXPECT_EQ(-1, module1->RemoteRTCPStat(NULL));
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  EXPECT_EQ(1u, report_blocks.size());
+  const RTCPReportBlock& reportBlockReceived = report_blocks[0];
+
+  float secSinceLastReport =
+      static_cast<float>(reportBlockReceived.delaySinceLastSR) / 65536.0f;
+  EXPECT_GE(0.101f, secSinceLastReport);
+  EXPECT_LE(0.100f, secSinceLastReport);
+  EXPECT_EQ(test_sequence_number, reportBlockReceived.extendedHighSeqNum);
+  EXPECT_EQ(0, reportBlockReceived.fractionLost);
+
+  EXPECT_EQ(static_cast<WebRtc_UWord32>(0),
+            reportBlockReceived.cumulativeLost);
+
+  WebRtc_UWord8  fraction_lost = 0;  // scale 0 to 255
+  WebRtc_UWord32 cum_lost = 0;       // number of lost packets
+  WebRtc_UWord32 ext_max = 0;        // highest sequence number received
+  WebRtc_UWord32 jitter = 0;
+  WebRtc_UWord32 max_jitter = 0;
+  EXPECT_EQ(0, module2->StatisticsRTP(&fraction_lost,
+                                      &cum_lost,
+                                      &ext_max,
+                                      &jitter,
+                                      &max_jitter));
+  EXPECT_EQ(0, fraction_lost);
+  EXPECT_EQ((WebRtc_UWord32)0, cum_lost);
+  EXPECT_EQ(test_sequence_number, ext_max);
+  EXPECT_EQ(reportBlockReceived.jitter, jitter);
+
+  WebRtc_UWord16 RTT;
+  WebRtc_UWord16 avgRTT;
+  WebRtc_UWord16 minRTT;
+  WebRtc_UWord16 maxRTT;
+
+  // Get RoundTripTime.
+  EXPECT_EQ(0, module1->RTT(test_ssrc + 1, &RTT, &avgRTT, &minRTT, &maxRTT));
+  EXPECT_GE(10, RTT);
+  EXPECT_GE(10, avgRTT);
+  EXPECT_GE(10, minRTT);
+  EXPECT_GE(10, maxRTT);
+
+  // Set report blocks.
+  EXPECT_EQ(0, module1->AddRTCPReportBlock(test_CSRC[0], &reportBlock));
+
+  // Test receive report.
+  EXPECT_EQ(0, module1->SetSendingStatus(false));
+
+  // Send RTCP packet, triggered by timer.
+  fake_clock.IncrementTime(5000);
+  module1->Process();
+  module2->Process();
+}
+
+TEST_F(RtpRtcpRtcpTest, RemoteRTCPStatRemote) {
+  std::vector<RTCPReportBlock> report_blocks;
+
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  EXPECT_EQ(0u, report_blocks.size());
+
+  // send RTCP packet, triggered by timer
+  fake_clock.IncrementTime(7500);
+  module1->Process();
+  fake_clock.IncrementTime(100);
+  module2->Process();
+
+  EXPECT_EQ(0, module1->RemoteRTCPStat(&report_blocks));
+  ASSERT_EQ(1u, report_blocks.size());
+
+  // |test_ssrc+1| is the SSRC of module2 that send the report.
+  EXPECT_EQ(test_ssrc+1, report_blocks[0].remoteSSRC);
+  EXPECT_EQ(test_ssrc, report_blocks[0].sourceSSRC);
+
+  EXPECT_EQ(0u, report_blocks[0].cumulativeLost);
+  EXPECT_LT(0u, report_blocks[0].delaySinceLastSR);
+  EXPECT_EQ(test_sequence_number, report_blocks[0].extendedHighSeqNum);
+  EXPECT_EQ(0u, report_blocks[0].fractionLost);
+}
diff --git a/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc b/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc
new file mode 100644
index 0000000..fdd3ed3
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testAPI/test_api_video.cc
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <algorithm>
+#include <vector>
+#include <gtest/gtest.h>
+
+#include "test_api.h"
+
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "rtp_rtcp_defines.h"
+
+using namespace webrtc;
+
+class RtpRtcpVideoTest : public ::testing::Test {
+ protected:
+  RtpRtcpVideoTest() {
+    test_id = 123;
+    test_ssrc = 3456;
+    test_timestamp = 4567;
+    test_sequence_number = 2345;
+  }
+  ~RtpRtcpVideoTest() {}
+
+  virtual void SetUp() {
+    transport = new LoopBackTransport();
+    receiver = new RtpReceiver();
+    RtpRtcp::Configuration configuration;
+    configuration.id = test_id;
+    configuration.audio = false;
+    configuration.clock = &fake_clock;
+    configuration.incoming_data = receiver;
+    configuration.outgoing_transport = transport;
+
+    video_module = RtpRtcp::CreateRtpRtcp(configuration);
+
+    EXPECT_EQ(0, video_module->SetRTCPStatus(kRtcpCompound));
+    EXPECT_EQ(0, video_module->SetSSRC(test_ssrc));
+    EXPECT_EQ(0, video_module->SetNACKStatus(kNackRtcp));
+    EXPECT_EQ(0, video_module->SetStorePacketsStatus(true));
+    EXPECT_EQ(0, video_module->SetSendingStatus(true));
+
+    transport->SetSendModule(video_module);
+
+    VideoCodec video_codec;
+    memset(&video_codec, 0, sizeof(video_codec));
+    video_codec.plType = 123;
+    memcpy(video_codec.plName, "I420", 5);
+
+    EXPECT_EQ(0, video_module->RegisterSendPayload(video_codec));
+    EXPECT_EQ(0, video_module->RegisterReceivePayload(video_codec));
+
+    payload_data_length = sizeof(payload_data);
+
+    for (int n = 0; n < payload_data_length; n++) {
+      payload_data[n] = n%10;
+    }
+  }
+
+  virtual void TearDown() {
+    delete video_module;
+    delete transport;
+    delete receiver;
+  }
+
+  int test_id;
+  RtpRtcp* video_module;
+  LoopBackTransport* transport;
+  RtpReceiver* receiver;
+  WebRtc_UWord32 test_ssrc;
+  WebRtc_UWord32 test_timestamp;
+  WebRtc_UWord16 test_sequence_number;
+  WebRtc_UWord8  payload_data[65000];
+  int payload_data_length;
+  FakeRtpRtcpClock fake_clock;
+};
+
+TEST_F(RtpRtcpVideoTest, BasicVideo) {
+  WebRtc_UWord32 timestamp = 3000;
+  EXPECT_EQ(0, video_module->SendOutgoingData(webrtc::kVideoFrameDelta, 123,
+                                             timestamp,
+                                             timestamp / 90,
+                                             payload_data,
+                                             payload_data_length));
+
+}
+
diff --git a/src/modules/rtp_rtcp/test/testFec/test_fec.cc b/src/modules/rtp_rtcp/test/testFec/test_fec.cc
new file mode 100644
index 0000000..d6cd8be
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testFec/test_fec.cc
@@ -0,0 +1,521 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Test application for core FEC algorithm. Calls encoding and decoding
+ * functions in ForwardErrorCorrection directly.
+ */
+
+#include <cassert>
+#include <cstdio>
+#include <cstdlib>
+#include <cstring>
+#include <ctime>
+#include <list>
+
+#include "modules/rtp_rtcp/source/fec_private_tables_bursty.h"
+#include "modules/rtp_rtcp/source/forward_error_correction.h"
+#include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
+
+#include "rtp_utility.h"
+#include "testsupport/fileutils.h"
+
+//#define VERBOSE_OUTPUT
+
+using namespace webrtc;
+
+void ReceivePackets(
+    ForwardErrorCorrection::ReceivedPacketList* toDecodeList,
+    ForwardErrorCorrection::ReceivedPacketList* receivedPacketList,
+    WebRtc_UWord32 numPacketsToDecode, float reorderRate, float duplicateRate);
+
+int main() {
+  // TODO(marpan): Split this function into subroutines/helper functions.
+  enum { kMaxNumberMediaPackets = 48 };
+  enum { kMaxNumberFecPackets = 48 };
+
+  const WebRtc_UWord32 kNumMaskBytesL0 = 2;
+  const WebRtc_UWord32 kNumMaskBytesL1 = 6;
+
+  // FOR UEP
+  const bool kUseUnequalProtection = true;
+
+  // FEC mask types.
+  const FecMaskType kMaskTypes[] = {kFecMaskRandom, kFecMaskBursty};
+  const int kNumFecMaskTypes = sizeof(kMaskTypes) / sizeof(*kMaskTypes);
+  // Maximum number of media packets allowed for the mask type.
+  const uint16_t kMaxMediaPackets[] = {kMaxNumberMediaPackets,
+      sizeof(kPacketMaskBurstyTbl) / sizeof(*kPacketMaskBurstyTbl)};
+  if (kMaxMediaPackets[1] != 12) {
+    printf("ERROR: max media packets for bursty mode not equal to 12 \n");
+    return -1;
+  }
+
+  WebRtc_UWord32 id = 0;
+  ForwardErrorCorrection fec(id);
+
+  ForwardErrorCorrection::PacketList mediaPacketList;
+  ForwardErrorCorrection::PacketList fecPacketList;
+  ForwardErrorCorrection::ReceivedPacketList toDecodeList;
+  ForwardErrorCorrection::ReceivedPacketList receivedPacketList;
+  ForwardErrorCorrection::RecoveredPacketList recoveredPacketList;
+  std::list<WebRtc_UWord8*> fecMaskList;
+
+  ForwardErrorCorrection::Packet* mediaPacket;
+  // Running over only one loss rate to limit execution time.
+  const float lossRate[] = {0.5f};
+  const WebRtc_UWord32 lossRateSize = sizeof(lossRate)/sizeof(*lossRate);
+  const float reorderRate = 0.1f;
+  const float duplicateRate = 0.1f;
+
+  WebRtc_UWord8 mediaLossMask[kMaxNumberMediaPackets];
+  WebRtc_UWord8 fecLossMask[kMaxNumberFecPackets];
+  WebRtc_UWord8 fecPacketMasks[kMaxNumberFecPackets][kMaxNumberMediaPackets];
+
+  // Seed the random number generator, storing the seed to file in order to
+  // reproduce past results.
+  const unsigned int randomSeed = static_cast<unsigned int>(time(NULL));
+  srand(randomSeed);
+  std::string filename = webrtc::test::OutputPath() + "randomSeedLog.txt";
+  FILE* randomSeedFile = fopen(filename.c_str(), "a");
+  fprintf(randomSeedFile, "%u\n", randomSeed);
+  fclose(randomSeedFile);
+  randomSeedFile = NULL;
+
+  WebRtc_UWord16 seqNum = static_cast<WebRtc_UWord16>(rand());
+  WebRtc_UWord32 timeStamp = static_cast<WebRtc_UWord32>(rand());
+  const WebRtc_UWord32 ssrc = static_cast<WebRtc_UWord32>(rand());
+
+  // Loop over the mask types: random and bursty.
+  for (int mask_type_idx = 0; mask_type_idx < kNumFecMaskTypes;
+      ++mask_type_idx) {
+
+    for (WebRtc_UWord32 lossRateIdx = 0; lossRateIdx < lossRateSize;
+        ++lossRateIdx) {
+
+      printf("Loss rate: %.2f, Mask type %d \n", lossRate[lossRateIdx],
+             mask_type_idx);
+
+      const WebRtc_UWord32 packetMaskMax = kMaxMediaPackets[mask_type_idx];
+      WebRtc_UWord8* packetMask =
+          new WebRtc_UWord8[packetMaskMax * kNumMaskBytesL1];
+
+      FecMaskType fec_mask_type = kMaskTypes[mask_type_idx];
+
+      for (WebRtc_UWord32 numMediaPackets = 1;
+          numMediaPackets <= packetMaskMax;
+          numMediaPackets++) {
+        internal::PacketMaskTable mask_table(fec_mask_type, numMediaPackets);
+
+        for (WebRtc_UWord32 numFecPackets = 1;
+            numFecPackets <= numMediaPackets &&
+            numFecPackets <= packetMaskMax;
+            numFecPackets++) {
+
+          // Loop over numImpPackets: usually <= (0.3*numMediaPackets).
+          // For this test we check up to ~ (0.5*numMediaPackets).
+          WebRtc_UWord32 maxNumImpPackets = numMediaPackets / 2 + 1;
+          for (WebRtc_UWord32 numImpPackets = 0;
+              numImpPackets <= maxNumImpPackets &&
+              numImpPackets <= packetMaskMax;
+              numImpPackets++) {
+
+            WebRtc_UWord8 protectionFactor = static_cast<WebRtc_UWord8>
+            (numFecPackets * 255 / numMediaPackets);
+
+            const WebRtc_UWord32 maskBytesPerFecPacket =
+                (numMediaPackets > 16) ? kNumMaskBytesL1 : kNumMaskBytesL0;
+
+            memset(packetMask, 0, numMediaPackets * maskBytesPerFecPacket);
+
+            // Transfer packet masks from bit-mask to byte-mask.
+            internal::GeneratePacketMasks(numMediaPackets,
+                                          numFecPackets,
+                                          numImpPackets,
+                                          kUseUnequalProtection,
+                                          mask_table,
+                                          packetMask);
+
+#ifdef VERBOSE_OUTPUT
+            printf("%u media packets, %u FEC packets, %u numImpPackets, "
+                "loss rate = %.2f \n",
+                numMediaPackets, numFecPackets, numImpPackets,
+                lossRate[lossRateIdx]);
+            printf("Packet mask matrix \n");
+#endif
+
+            for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+              for (WebRtc_UWord32 j = 0; j < numMediaPackets; j++) {
+                const WebRtc_UWord8 byteMask =
+                    packetMask[i * maskBytesPerFecPacket + j / 8];
+                const WebRtc_UWord32 bitPosition = (7 - j % 8);
+                fecPacketMasks[i][j] =
+                    (byteMask & (1 << bitPosition)) >> bitPosition;
+#ifdef VERBOSE_OUTPUT
+                printf("%u ", fecPacketMasks[i][j]);
+#endif
+              }
+#ifdef VERBOSE_OUTPUT
+              printf("\n");
+#endif
+            }
+#ifdef VERBOSE_OUTPUT
+            printf("\n");
+#endif
+            // Check for all zero rows or columns: indicates incorrect mask.
+            WebRtc_UWord32 rowLimit = numMediaPackets;
+            for (WebRtc_UWord32 i = 0; i < numFecPackets; ++i) {
+              WebRtc_UWord32 rowSum = 0;
+              for (WebRtc_UWord32 j = 0; j < rowLimit; ++j) {
+                rowSum += fecPacketMasks[i][j];
+              }
+              if (rowSum == 0) {
+                printf("ERROR: row is all zero %d \n", i);
+                return -1;
+              }
+            }
+            for (WebRtc_UWord32 j = 0; j < rowLimit; ++j) {
+              WebRtc_UWord32 columnSum = 0;
+              for (WebRtc_UWord32 i = 0; i < numFecPackets; ++i) {
+                columnSum += fecPacketMasks[i][j];
+              }
+              if (columnSum == 0) {
+                printf("ERROR: column is all zero %d \n", j);
+                return -1;
+              }
+            }
+
+            // Construct media packets.
+            for (WebRtc_UWord32 i = 0; i < numMediaPackets; ++i)  {
+              mediaPacket = new ForwardErrorCorrection::Packet;
+              mediaPacketList.push_back(mediaPacket);
+              mediaPacket->length =
+                  static_cast<WebRtc_UWord16>((static_cast<float>(rand()) /
+                      RAND_MAX) * (IP_PACKET_SIZE - 12 -
+                          28 - ForwardErrorCorrection::PacketOverhead()));
+              if (mediaPacket->length < 12) {
+                mediaPacket->length = 12;
+              }
+              // Generate random values for the first 2 bytes.
+              mediaPacket->data[0] = static_cast<WebRtc_UWord8>(rand() % 256);
+              mediaPacket->data[1] = static_cast<WebRtc_UWord8>(rand() % 256);
+
+              // The first two bits are assumed to be 10 by the
+              // FEC encoder. In fact the FEC decoder will set the
+              // two first bits to 10 regardless of what they
+              // actually were. Set the first two bits to 10
+              // so that a memcmp can be performed for the
+              // whole restored packet.
+              mediaPacket->data[0] |= 0x80;
+              mediaPacket->data[0] &= 0xbf;
+
+              // FEC is applied to a whole frame.
+              // A frame is signaled by multiple packets without
+              // the marker bit set followed by the last packet of
+              // the frame for which the marker bit is set.
+              // Only push one (fake) frame to the FEC.
+              mediaPacket->data[1] &= 0x7f;
+
+              ModuleRTPUtility::AssignUWord16ToBuffer(&mediaPacket->data[2],
+                                                      seqNum);
+              ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[4],
+                                                      timeStamp);
+              ModuleRTPUtility::AssignUWord32ToBuffer(&mediaPacket->data[8],
+                                                      ssrc);
+              // Generate random values for payload
+              for (WebRtc_Word32 j = 12; j < mediaPacket->length; ++j)  {
+                mediaPacket->data[j] =
+                    static_cast<WebRtc_UWord8> (rand() % 256);
+              }
+              seqNum++;
+            }
+            mediaPacket->data[1] |= 0x80;
+
+            if (fec.GenerateFEC(mediaPacketList, protectionFactor,
+                                numImpPackets, kUseUnequalProtection,
+                                fec_mask_type, &fecPacketList) != 0) {
+              printf("Error: GenerateFEC() failed\n");
+              return -1;
+            }
+
+            if (fecPacketList.size() != numFecPackets) {
+              printf("Error: we requested %u FEC packets, "
+                  "but GenerateFEC() produced %u\n",
+                  numFecPackets,
+                  static_cast<WebRtc_UWord32>(fecPacketList.size()));
+              return -1;
+            }
+            memset(mediaLossMask, 0, sizeof(mediaLossMask));
+            ForwardErrorCorrection::PacketList::iterator
+                mediaPacketListItem = mediaPacketList.begin();
+            ForwardErrorCorrection::ReceivedPacket* receivedPacket;
+            WebRtc_UWord32 mediaPacketIdx = 0;
+
+            while (mediaPacketListItem != mediaPacketList.end()) {
+              mediaPacket = *mediaPacketListItem;
+              // We want a value between 0 and 1.
+              const float lossRandomVariable = (static_cast<float>(rand()) /
+                  (RAND_MAX));
+
+              if (lossRandomVariable >= lossRate[lossRateIdx]) {
+                mediaLossMask[mediaPacketIdx] = 1;
+                receivedPacket =
+                    new ForwardErrorCorrection::ReceivedPacket;
+                receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+                receivedPacketList.push_back(receivedPacket);
+
+                receivedPacket->pkt->length = mediaPacket->length;
+                memcpy(receivedPacket->pkt->data, mediaPacket->data,
+                       mediaPacket->length);
+                receivedPacket->seqNum =
+                    ModuleRTPUtility::BufferToUWord16(&mediaPacket->data[2]);
+                receivedPacket->isFec = false;
+              }
+              mediaPacketIdx++;
+              ++mediaPacketListItem;
+            }
+            memset(fecLossMask, 0, sizeof(fecLossMask));
+            ForwardErrorCorrection::PacketList::iterator
+                fecPacketListItem = fecPacketList.begin();
+            ForwardErrorCorrection::Packet* fecPacket;
+            WebRtc_UWord32 fecPacketIdx = 0;
+            while (fecPacketListItem != fecPacketList.end()) {
+              fecPacket = *fecPacketListItem;
+              const float lossRandomVariable =
+                  (static_cast<float>(rand()) / (RAND_MAX));
+              if (lossRandomVariable >= lossRate[lossRateIdx]) {
+                fecLossMask[fecPacketIdx] = 1;
+                receivedPacket =
+                    new ForwardErrorCorrection::ReceivedPacket;
+                receivedPacket->pkt = new ForwardErrorCorrection::Packet;
+
+                receivedPacketList.push_back(receivedPacket);
+
+                receivedPacket->pkt->length = fecPacket->length;
+                memcpy(receivedPacket->pkt->data, fecPacket->data,
+                       fecPacket->length);
+
+                receivedPacket->seqNum = seqNum;
+                receivedPacket->isFec = true;
+                receivedPacket->ssrc = ssrc;
+
+                fecMaskList.push_back(fecPacketMasks[fecPacketIdx]);
+              }
+              ++fecPacketIdx;
+              ++seqNum;
+              ++fecPacketListItem;
+            }
+
+#ifdef VERBOSE_OUTPUT
+            printf("Media loss mask:\n");
+            for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++) {
+              printf("%u ", mediaLossMask[i]);
+            }
+            printf("\n\n");
+
+            printf("FEC loss mask:\n");
+            for (WebRtc_UWord32 i = 0; i < numFecPackets; i++) {
+              printf("%u ", fecLossMask[i]);
+            }
+            printf("\n\n");
+#endif
+
+            std::list<WebRtc_UWord8*>::iterator fecMaskIt = fecMaskList.begin();
+            WebRtc_UWord8* fecMask;
+            while (fecMaskIt != fecMaskList.end()) {
+              fecMask = *fecMaskIt;
+              WebRtc_UWord32 hammingDist = 0;
+              WebRtc_UWord32 recoveryPosition = 0;
+              for (WebRtc_UWord32 i = 0; i < numMediaPackets; i++) {
+                if (mediaLossMask[i] == 0 && fecMask[i] == 1) {
+                  recoveryPosition = i;
+                  ++hammingDist;
+                }
+              }
+              std::list<WebRtc_UWord8*>::iterator itemToDelete = fecMaskIt;
+              ++fecMaskIt;
+
+              if (hammingDist == 1) {
+                // Recovery possible. Restart search.
+                mediaLossMask[recoveryPosition] = 1;
+                fecMaskIt = fecMaskList.begin();
+              } else if (hammingDist == 0)  {
+                // FEC packet cannot provide further recovery.
+                fecMaskList.erase(itemToDelete);
+              }
+            }
+#ifdef VERBOSE_OUTPUT
+            printf("Recovery mask:\n");
+            for (WebRtc_UWord32 i = 0; i < numMediaPackets; ++i) {
+              printf("%u ", mediaLossMask[i]);
+            }
+            printf("\n\n");
+#endif
+            // For error-checking frame completion.
+            bool fecPacketReceived = false;
+            while (!receivedPacketList.empty()) {
+              WebRtc_UWord32 numPacketsToDecode = static_cast<WebRtc_UWord32>
+                  ((static_cast<float>(rand()) / RAND_MAX) *
+                  receivedPacketList.size() + 0.5);
+              if (numPacketsToDecode < 1) {
+                numPacketsToDecode = 1;
+              }
+              ReceivePackets(&toDecodeList, &receivedPacketList,
+                             numPacketsToDecode, reorderRate, duplicateRate);
+
+              if (fecPacketReceived == false) {
+                ForwardErrorCorrection::ReceivedPacketList::iterator
+                    toDecodeIt = toDecodeList.begin();
+                while (toDecodeIt != toDecodeList.end()) {
+                  receivedPacket = *toDecodeIt;
+                  if (receivedPacket->isFec) {
+                    fecPacketReceived = true;
+                  }
+                  ++toDecodeIt;
+                }
+              }
+              if (fec.DecodeFEC(&toDecodeList, &recoveredPacketList)
+                  != 0) {
+                printf("Error: DecodeFEC() failed\n");
+                return -1;
+              }
+              if (!toDecodeList.empty()) {
+                printf("Error: received packet list is not empty\n");
+                return -1;
+              }
+            }
+            mediaPacketListItem = mediaPacketList.begin();
+            mediaPacketIdx = 0;
+            while (mediaPacketListItem != mediaPacketList.end()) {
+              if (mediaLossMask[mediaPacketIdx] == 1) {
+                // Should have recovered this packet.
+                ForwardErrorCorrection::RecoveredPacketList::iterator
+                    recoveredPacketListItem = recoveredPacketList.begin();
+
+                if (recoveredPacketListItem == recoveredPacketList.end()) {
+                  printf("Error: insufficient number of recovered packets.\n");
+                  return -1;
+                }
+                mediaPacket = *mediaPacketListItem;
+                ForwardErrorCorrection::RecoveredPacket* recoveredPacket =
+                    *recoveredPacketListItem;
+
+                if (recoveredPacket->pkt->length != mediaPacket->length) {
+                  printf("Error: recovered packet length not identical to "
+                      "original media packet\n");
+                  return -1;
+                }
+                if (memcmp(recoveredPacket->pkt->data, mediaPacket->data,
+                           mediaPacket->length) != 0) {
+                  printf("Error: recovered packet payload not identical to "
+                      "original media packet\n");
+                  return -1;
+                }
+                delete recoveredPacket;
+                recoveredPacketList.pop_front();
+              }
+              ++mediaPacketIdx;
+              ++mediaPacketListItem;
+            }
+            fec.ResetState(&recoveredPacketList);
+            if (!recoveredPacketList.empty()) {
+              printf("Error: excessive number of recovered packets.\n");
+              printf("\t size is:%u\n",
+                     static_cast<WebRtc_UWord32>(recoveredPacketList.size()));
+              return -1;
+            }
+            // -- Teardown --
+            mediaPacketListItem = mediaPacketList.begin();
+            while (mediaPacketListItem != mediaPacketList.end()) {
+              delete *mediaPacketListItem;
+              ++mediaPacketListItem;
+              mediaPacketList.pop_front();
+            }
+            assert(mediaPacketList.empty());
+
+            fecPacketListItem = fecPacketList.begin();
+            while (fecPacketListItem != fecPacketList.end()) {
+              ++fecPacketListItem;
+              fecPacketList.pop_front();
+            }
+
+            // Delete received packets we didn't pass to DecodeFEC(), due to
+            // early frame completion.
+            ForwardErrorCorrection::ReceivedPacketList::iterator
+                receivedPacketIt = receivedPacketList.begin();
+            while (receivedPacketIt != receivedPacketList.end()) {
+              receivedPacket = *receivedPacketIt;
+              delete receivedPacket;
+              ++receivedPacketIt;
+              receivedPacketList.pop_front();
+            }
+            assert(receivedPacketList.empty());
+
+            while (!fecMaskList.empty()) {
+              fecMaskList.pop_front();
+            }
+            timeStamp += 90000 / 30;
+          }  // loop over numImpPackets
+        }  // loop over FecPackets
+      }  // loop over numMediaPackets
+      delete [] packetMask;
+    }  // loop over loss rates
+  }  // loop over mask types
+
+  // Have DecodeFEC free allocated memory.
+  fec.ResetState(&recoveredPacketList);
+  if (!recoveredPacketList.empty()) {
+    printf("Error: recovered packet list is not empty\n");
+    return -1;
+  }
+  printf("\nAll tests passed successfully\n");
+  return 0;
+}
+
+void ReceivePackets(
+    ForwardErrorCorrection::ReceivedPacketList* toDecodeList,
+    ForwardErrorCorrection::ReceivedPacketList* receivedPacketList,
+    WebRtc_UWord32 numPacketsToDecode, float reorderRate, float duplicateRate) {
+  assert(toDecodeList->empty());
+  assert(numPacketsToDecode <= receivedPacketList->size());
+
+  ForwardErrorCorrection::ReceivedPacketList::iterator it;
+  for (WebRtc_UWord32 i = 0; i < numPacketsToDecode; i++) {
+    it = receivedPacketList->begin();
+    // Reorder packets.
+    float randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    while (randomVariable < reorderRate) {
+      ++it;
+      if (it == receivedPacketList->end()) {
+        --it;
+        break;
+      }
+      randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    }
+    ForwardErrorCorrection::ReceivedPacket* receivedPacket = *it;
+    toDecodeList->push_back(receivedPacket);
+
+    // Duplicate packets.
+    randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    while (randomVariable < duplicateRate) {
+      ForwardErrorCorrection::ReceivedPacket* duplicatePacket =
+          new ForwardErrorCorrection::ReceivedPacket;
+      *duplicatePacket = *receivedPacket;
+      duplicatePacket->pkt = new ForwardErrorCorrection::Packet;
+      memcpy(duplicatePacket->pkt->data, receivedPacket->pkt->data,
+             receivedPacket->pkt->length);
+      duplicatePacket->pkt->length = receivedPacket->pkt->length;
+
+      toDecodeList->push_back(duplicatePacket);
+      randomVariable = static_cast<float>(rand()) / RAND_MAX;
+    }
+    receivedPacketList->erase(it);
+  }
+}
diff --git a/src/modules/rtp_rtcp/test/testFec/test_fec.gypi b/src/modules/rtp_rtcp/test/testFec/test_fec.gypi
new file mode 100644
index 0000000..e4fde9c
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testFec/test_fec.gypi
@@ -0,0 +1,36 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'test_fec',
+      'type': 'executable',
+      'dependencies': [
+        'rtp_rtcp',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+      ],
+      
+      'include_dirs': [
+        '../../source',
+        '../../../../system_wrappers/interface',
+      ],
+   
+      'sources': [
+        'test_fec.cc',
+      ],
+      
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc b/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc
new file mode 100644
index 0000000..e4c36d1
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testH263Parser/testH263Parser.cc
@@ -0,0 +1,580 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "h263_information.h"
+
+#define TEST_STR "Test H263 parser."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+void PrintInfo(const H263Info* ptr)
+{
+    std::cout << "info =    GOBs: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << ptr->ptrGOBbuffer[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+    std::cout << "          sBit: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << (int)ptr->ptrGOBbufferSBit[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+
+    std::cout << "uiH263PTypeFmt: " << (int)ptr->uiH263PTypeFmt << std::endl;
+    std::cout << "     codecBits: " << (int)ptr->codecBits << std::endl;
+    std::cout << "        pQuant: " << (int)ptr->pQuant << std::endl;
+    std::cout << "         fType: " << (int)ptr->fType << std::endl;
+    std::cout << "        cpmBit: " << (int)ptr->cpmBit << std::endl;
+    std::cout << "     numOfGOBs: " << (int)ptr->numOfGOBs << std::endl;
+    std::cout << "      numOfMBs: " << (int)ptr->totalNumOfMBs << std::endl;
+    std::cout << "                " <<  std::endl;
+};
+
+void PrintMBInfo(
+    const H263Info* ptr,
+    const H263MBInfo* ptrMB)
+{
+    std::cout << "        gQuant: {";
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << (int)ptr->ptrGQuant[i] << " ";
+    }
+    std::cout << "}" << std::endl;
+    std::cout << "                " <<  std::endl;
+
+    std::cout << "MBs:{";
+    int k = 0;
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        std::cout << " {";
+        for (int j = 0; j < ptr->ptrNumOfMBs[i]; j++)
+        {
+            std::cout << ptrMB->ptrBuffer[k++] << " ";
+        }
+        std::cout << "}" << std::endl;
+    }
+    std::cout << "}" << std::endl;
+    PRINT_LINE;
+};
+
+void ValidateResults(
+    const H263Info* ptr,
+    const H263Info* ptrRef)
+{
+    assert(ptr->uiH263PTypeFmt == ptrRef->uiH263PTypeFmt);
+    assert(ptr->codecBits      == ptrRef->codecBits);
+    assert(ptr->pQuant         == ptrRef->pQuant);
+    assert(ptr->fType          == ptrRef->fType);
+    assert(ptr->numOfGOBs      == ptrRef->numOfGOBs);
+    assert(ptr->totalNumOfMBs  == ptrRef->totalNumOfMBs);
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        assert(ptr->ptrGOBbuffer[i]     == ptrRef->ptrGOBbuffer[i]);
+        assert(ptr->ptrGOBbufferSBit[i] == ptrRef->ptrGOBbufferSBit[i]);
+    }
+    PrintInfo(ptr);
+}
+
+void ValidateMBResults(
+    const H263Info* ptr,
+    const H263MBInfo* ptrMB,
+    const H263Info* ptrRef,
+    bool  printRes = true)
+{
+    int offset = 0;
+    int numBytes = 0;
+    for (int i = 0; i < ptr->numOfGOBs; i++)
+    {
+        offset = ptr->CalculateMBOffset(i+1);
+        numBytes += ptrMB->ptrBuffer[offset - 1] / 8;
+        int numBytesRem = ptrMB->ptrBuffer[offset - 1] % 8;
+        if (numBytesRem)
+        {
+            numBytes++;
+        }
+        assert(ptr->ptrGQuant[i] == ptrRef->ptrGQuant[i]);
+    }
+    assert(ptr->ptrGOBbuffer[ptr->numOfGOBs] == numBytes);
+    assert(unsigned int( ptr->totalNumOfMBs) <= ptrMB->bufferSize);
+    if (printRes)
+    {
+        PrintMBInfo(ptr, ptrMB);
+    }
+}
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "--------------------------------" << std::endl;
+    std::cout << "------- Test H.263 Parser ------" << std::endl;
+    std::cout << "--------------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // -----------------------------------------------------
+    // Input data - H.263 encoded stream SQCIF (P-frame)
+    // -----------------------------------------------------
+    const int lengthRefSQCIF = 77;
+    const unsigned char encodedStreamSQCIF[lengthRefSQCIF] = {
+    0x00, 0x00, 0x81, 0xf6, 0x06, 0x04, 0x3f, 0xb7, 0xbc, 0x00, 0x00, 0x86, 0x23,
+    0x5b, 0xdb, 0xdf, 0xb1, 0x93, 0xdb, 0xde, 0xd6, 0xf0, 0x00, 0x00, 0x8a, 0x27,
+    0xdb, 0xcf, 0xad, 0xbe, 0x00, 0x00, 0x8e, 0x27, 0xed, 0xef, 0x80, 0x00, 0x00,
+    0x92, 0x27, 0x6f, 0x7f, 0x80, 0x00, 0x00, 0x96, 0x20, 0xfc, 0xe2, 0xdb, 0xfe,
+    0xb7, 0x7d, 0xea, 0x5f, 0xf8, 0xab, 0xd2, 0xff, 0xf6, 0xc9, 0xe5, 0x5e, 0x97,
+    0xf7, 0xff, 0xad, 0x4f, 0x49, 0x3b, 0xff, 0xd6, 0xa6, 0x75, 0x82, 0x60};
+
+    // Expected results
+    H263Info infoRefSQCIF;
+    infoRefSQCIF.uiH263PTypeFmt = 1;
+    infoRefSQCIF.codecBits      = 8;
+    infoRefSQCIF.pQuant         = 4;
+    infoRefSQCIF.fType          = 1;
+    infoRefSQCIF.cpmBit         = 0;
+    infoRefSQCIF.numOfGOBs      = 6;
+    infoRefSQCIF.totalNumOfMBs  = 8*6;
+
+    infoRefSQCIF.ptrGOBbuffer[0] = 0;  infoRefSQCIF.ptrGOBbufferSBit[0] = 0; infoRefSQCIF.ptrGQuant[0] = 0;
+    infoRefSQCIF.ptrGOBbuffer[1] = 9;  infoRefSQCIF.ptrGOBbufferSBit[1] = 0; infoRefSQCIF.ptrGQuant[1] = 4;
+    infoRefSQCIF.ptrGOBbuffer[2] = 22; infoRefSQCIF.ptrGOBbufferSBit[2] = 0; infoRefSQCIF.ptrGQuant[2] = 4;
+    infoRefSQCIF.ptrGOBbuffer[3] = 30; infoRefSQCIF.ptrGOBbufferSBit[3] = 0; infoRefSQCIF.ptrGQuant[3] = 4;
+    infoRefSQCIF.ptrGOBbuffer[4] = 37; infoRefSQCIF.ptrGOBbufferSBit[4] = 0; infoRefSQCIF.ptrGQuant[4] = 4;
+    infoRefSQCIF.ptrGOBbuffer[5] = 44; infoRefSQCIF.ptrGOBbufferSBit[5] = 0; infoRefSQCIF.ptrGQuant[5] = 4;
+
+    // ----------------------------------------------------
+    // Input data - H.263 encoded stream QCIF (P-frame)
+    // ----------------------------------------------------
+    const int lengthRefQCIF = 123;
+    const unsigned char encodedStreamQCIF[lengthRefQCIF] = {
+    0x00, 0x00, 0x81, 0x02, 0x0a, 0x04, 0x3f, 0xf8, 0x00, 0x00, 0x86, 0x27, 0x8b,
+    0xc6, 0x9f, 0x17, 0x9c, 0x00, 0x00, 0x8a, 0x20, 0xbc, 0x22, 0xf8, 0x5f, 0x46,
+    0x03, 0xc1, 0x77, 0x15, 0xe0, 0xb8, 0x38, 0x3f, 0x05, 0xa0, 0xbf, 0x8f, 0x00,
+    0x00, 0x8e, 0x27, 0xfc, 0x5e, 0x5a, 0x33, 0x80, 0x00, 0x00, 0x92, 0x25, 0x8c,
+    0x1e, 0xbf, 0xfc, 0x7e, 0x35, 0xfc, 0x00, 0x00, 0x96, 0x27, 0xff, 0x00, 0x00,
+    0x9a, 0x20, 0xdb, 0x34, 0xef, 0xfc, 0x00, 0x00, 0x9e, 0x20, 0xaf, 0x17, 0x0d,
+    0x3e, 0xde, 0x0f, 0x8f, 0xff, 0x80, 0x00, 0x00, 0xa2, 0x22, 0xbb, 0x27, 0x81,
+    0xeb, 0xff, 0x5b, 0x07, 0xab, 0xff, 0xad, 0x9e, 0xd8, 0xc9, 0x6b, 0x75, 0x54,
+    0xbf, 0xbe, 0x8a, 0xbd, 0xf2, 0xfb, 0xfb, 0x3d, 0x3d, 0x25, 0xb7, 0xf7, 0xfc,
+    0x92, 0x4c, 0xdb, 0x6d, 0x69, 0xc0};
+
+    // Expected results
+    H263Info infoRefQCIF;
+    infoRefQCIF.uiH263PTypeFmt = 2;
+    infoRefQCIF.codecBits      = 8;
+    infoRefQCIF.pQuant         = 4;
+    infoRefQCIF.fType          = 1;
+    infoRefQCIF.cpmBit         = 0;
+    infoRefQCIF.numOfGOBs      = 9;
+    infoRefQCIF.totalNumOfMBs  = 11*9;
+
+    infoRefQCIF.ptrGOBbuffer[0] = 0;   infoRefQCIF.ptrGOBbufferSBit[0] = 0; infoRefQCIF.ptrGQuant[0] = 0;
+    infoRefQCIF.ptrGOBbuffer[1] = 8;   infoRefQCIF.ptrGOBbufferSBit[1] = 0; infoRefQCIF.ptrGQuant[1] = 4;
+    infoRefQCIF.ptrGOBbuffer[2] = 17;  infoRefQCIF.ptrGOBbufferSBit[2] = 0; infoRefQCIF.ptrGQuant[2] = 4;
+    infoRefQCIF.ptrGOBbuffer[3] = 38;  infoRefQCIF.ptrGOBbufferSBit[3] = 0; infoRefQCIF.ptrGQuant[3] = 4;
+    infoRefQCIF.ptrGOBbuffer[4] = 47;  infoRefQCIF.ptrGOBbufferSBit[4] = 0; infoRefQCIF.ptrGQuant[4] = 4;
+    infoRefQCIF.ptrGOBbuffer[5] = 58;  infoRefQCIF.ptrGOBbufferSBit[5] = 0; infoRefQCIF.ptrGQuant[5] = 4;
+    infoRefQCIF.ptrGOBbuffer[6] = 63;  infoRefQCIF.ptrGOBbufferSBit[6] = 0; infoRefQCIF.ptrGQuant[6] = 4;
+    infoRefQCIF.ptrGOBbuffer[7] = 71;  infoRefQCIF.ptrGOBbufferSBit[7] = 0; infoRefQCIF.ptrGQuant[7] = 4;
+    infoRefQCIF.ptrGOBbuffer[8] = 84;  infoRefQCIF.ptrGOBbufferSBit[8] = 0; infoRefQCIF.ptrGQuant[8] = 4;
+
+    // ---------------------------------------------------
+    // Input data - H.263 encoded stream CIF (P-frame)
+    // ---------------------------------------------------
+    const int lengthRefCIF = 212;
+    const unsigned char encodedStreamCIF[lengthRefCIF] = {
+    0x00, 0x00, 0x82, 0x9a, 0x0e, 0x04, 0x3f, 0xff, 0xff, 0x00, 0x00, 0x86, 0x27,
+    0xff, 0xff, 0xe0, 0x00, 0x00, 0x8a, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0x8e,
+    0x27, 0xff, 0xff, 0x6b, 0x09, 0x70, 0x00, 0x00, 0x92, 0x27, 0xff, 0xff, 0xe0,
+    0x00, 0x00, 0x96, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0x9a, 0x27, 0x6f, 0x7f,
+    0xff, 0xfe, 0x00, 0x00, 0x9e, 0x27, 0xff, 0xfe, 0xc6, 0x31, 0xe0, 0x00, 0x00,
+    0xa2, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0xa6, 0x27, 0xf6, 0xb7, 0xfe, 0xa6,
+    0x14, 0x95, 0xb4, 0xc6, 0x41, 0x6b, 0x3a, 0x2e, 0x8d, 0x42, 0xef, 0xc0, 0x00,
+    0x00, 0xaa, 0x27, 0xff, 0xb1, 0x95, 0x05, 0x0c, 0xe3, 0x4a, 0x17, 0xff, 0x80,
+    0x00, 0x00, 0xae, 0x27, 0xff, 0xf6, 0xf7, 0xfe, 0x00, 0x00, 0xb2, 0x27, 0xff,
+    0x8b, 0xdf, 0xff, 0x00, 0x00, 0xb6, 0x27, 0xff, 0xff, 0xe0, 0x00, 0x00, 0xba,
+    0x26, 0x2f, 0x7f, 0xff, 0xfb, 0x58, 0x5b, 0x80, 0x00, 0x00, 0xbe, 0x20, 0xbc,
+    0xe4, 0x5e, 0x6f, 0xff, 0xfe, 0xce, 0xf1, 0x94, 0x00, 0x00, 0xc2, 0x23, 0x18,
+    0x4b, 0x17, 0x87, 0x0f, 0xff, 0xb3, 0xb6, 0x09, 0x63, 0x46, 0x73, 0x40, 0xb2,
+    0x5f, 0x0a, 0xc6, 0xbe, 0xd7, 0x7a, 0x61, 0xbc, 0x68, 0xde, 0xf0, 0x00, 0x00,
+    0xc6, 0x26, 0x2f, 0x0a, 0xff, 0xff, 0x17, 0x9a, 0x17, 0x8c, 0xb1, 0x37, 0x67,
+    0xaf, 0xec, 0xf7, 0xa5, 0xbf, 0xb6, 0x49, 0x27, 0x6d, 0xb7, 0x92, 0x49, 0x1b,
+    0xb4, 0x9a, 0xe0, 0x62};
+
+    // Expected results
+    H263Info infoRefCIF;
+    infoRefCIF.uiH263PTypeFmt = 3;
+    infoRefCIF.codecBits      = 8;
+    infoRefCIF.pQuant         = 4;
+    infoRefCIF.fType          = 1;
+    infoRefCIF.cpmBit         = 0;
+    infoRefCIF.numOfGOBs      = 18;
+    infoRefCIF.totalNumOfMBs  = 22*18;
+
+    infoRefCIF.ptrGOBbuffer[0]  = 0;   infoRefCIF.ptrGOBbufferSBit[0]  = 0; infoRefCIF.ptrGQuant[0]  = 0;
+    infoRefCIF.ptrGOBbuffer[1]  = 9;   infoRefCIF.ptrGOBbufferSBit[1]  = 0; infoRefCIF.ptrGQuant[1]  = 4;
+    infoRefCIF.ptrGOBbuffer[2]  = 16;  infoRefCIF.ptrGOBbufferSBit[2]  = 0; infoRefCIF.ptrGQuant[2]  = 4;
+    infoRefCIF.ptrGOBbuffer[3]  = 23;  infoRefCIF.ptrGOBbufferSBit[3]  = 0; infoRefCIF.ptrGQuant[3]  = 4;
+    infoRefCIF.ptrGOBbuffer[4]  = 32;  infoRefCIF.ptrGOBbufferSBit[4]  = 0; infoRefCIF.ptrGQuant[4]  = 4;
+    infoRefCIF.ptrGOBbuffer[5]  = 39;  infoRefCIF.ptrGOBbufferSBit[5]  = 0; infoRefCIF.ptrGQuant[5]  = 4;
+    infoRefCIF.ptrGOBbuffer[6]  = 46;  infoRefCIF.ptrGOBbufferSBit[6]  = 0; infoRefCIF.ptrGQuant[6]  = 4;
+    infoRefCIF.ptrGOBbuffer[7]  = 54;  infoRefCIF.ptrGOBbufferSBit[7]  = 0; infoRefCIF.ptrGQuant[7]  = 4;
+    infoRefCIF.ptrGOBbuffer[8]  = 63;  infoRefCIF.ptrGOBbufferSBit[8]  = 0; infoRefCIF.ptrGQuant[8]  = 4;
+    infoRefCIF.ptrGOBbuffer[9]  = 70;  infoRefCIF.ptrGOBbufferSBit[9]  = 0; infoRefCIF.ptrGQuant[9]  = 4;
+    infoRefCIF.ptrGOBbuffer[10] = 90;  infoRefCIF.ptrGOBbufferSBit[10]  = 0; infoRefCIF.ptrGQuant[10]  = 4;
+    infoRefCIF.ptrGOBbuffer[11] = 104; infoRefCIF.ptrGOBbufferSBit[11] = 0; infoRefCIF.ptrGQuant[11] = 4;
+    infoRefCIF.ptrGOBbuffer[12] = 112; infoRefCIF.ptrGOBbufferSBit[12] = 0; infoRefCIF.ptrGQuant[12] = 4;
+    infoRefCIF.ptrGOBbuffer[13] = 120; infoRefCIF.ptrGOBbufferSBit[13] = 0; infoRefCIF.ptrGQuant[13] = 4;
+    infoRefCIF.ptrGOBbuffer[14] = 127; infoRefCIF.ptrGOBbufferSBit[14] = 0; infoRefCIF.ptrGQuant[14] = 4;
+    infoRefCIF.ptrGOBbuffer[15] = 138; infoRefCIF.ptrGOBbufferSBit[15] = 0; infoRefCIF.ptrGQuant[15] = 4;
+    infoRefCIF.ptrGOBbuffer[16] = 151; infoRefCIF.ptrGOBbufferSBit[16] = 0; infoRefCIF.ptrGQuant[16] = 4;
+    infoRefCIF.ptrGOBbuffer[17] = 180; infoRefCIF.ptrGOBbufferSBit[17] = 0; infoRefCIF.ptrGQuant[17] = 4;
+
+    // -----------------------------------------------------------------------
+    // Input data - H.263 encoded stream QCIF (I-frame). Non byte aligned GOBs
+    // -----------------------------------------------------------------------
+    const int lengthRefQCIF_N = 2020;
+    const unsigned char encodedStreamQCIF_N[lengthRefQCIF_N] = {
+    0x00,0x00,0x80,0x06,0x08,0x07,0x34,0xe4,0xf7,0x66,0x75,0x12,0x9b,0x64,0x83,0xe9,0x4c,0xc7,0x3c,0x77,0x83,0xcf,0x67,0x96,
+    0xe0,0x06,0x69,0x95,0x70,0x60,0x43,0x53,0x96,0x8a,0xa2,0x9e,0x96,0xf8,0x79,0xf0,0xf2,0xb8,0x30,0x21,0xb0,0x0c,0xc0,0x22,
+    0x0e,0x70,0x12,0xb0,0x30,0x21,0x9e,0x03,0x3e,0x02,0x22,0xa8,0x83,0xcf,0x7d,0xe8,0xf3,0x85,0x8d,0x01,0x47,0x29,0x03,0x02,
+    0x16,0x5c,0x06,0x7c,0x84,0x56,0x9c,0x0c,0x08,0x49,0x70,0x29,0xe1,0x68,0x39,0xbf,0xd0,0xef,0x6f,0x6e,0x8e,0x64,0x72,0x70,
+    0x30,0x21,0x03,0xe0,0x53,0x82,0xab,0xe2,0xd4,0xa8,0x3e,0x12,0x80,0xc8,0x2a,0x9f,0x07,0x23,0xdf,0x4f,0xaa,0x5b,0x72,0xaa,
+    0x22,0x81,0x21,0x7a,0x80,0x54,0x85,0x82,0x50,0x17,0x40,0x2a,0x7f,0x3c,0xfe,0xa5,0x49,0x39,0x08,0x18,0x20,0x65,0x95,0x8c,
+    0x8f,0x67,0xc7,0xc0,0x63,0xe1,0x1b,0xf1,0xef,0xfd,0x25,0x13,0x24,0x9c,0xa9,0x1f,0x02,0x9b,0xe1,0x19,0xae,0x97,0x02,0x9a,
+    0x84,0x6f,0xbf,0xfd,0x14,0xf2,0x09,0xe3,0x10,0x38,0x0a,0x61,0x99,0x6e,0xf8,0x14,0xce,0x7e,0xff,0xe7,0xa7,0x8e,0x4f,0x14,
+    0xa9,0x80,0xa6,0x71,0x3a,0x98,0x06,0x1c,0xfc,0xff,0xcd,0x8f,0x18,0x9e,0x24,0x52,0x05,0x9c,0x42,0xa5,0x33,0x9f,0xdf,0x80,
+    0x00,0x21,0x0e,0xf6,0x61,0xe2,0x7a,0x35,0x64,0x8e,0x61,0xa2,0x58,0x2a,0x0b,0x45,0x4c,0xe0,0xa1,0xa0,0xa6,0x83,0x58,0xf0,
+    0x08,0x77,0x3e,0xb5,0xc6,0x05,0x6b,0x6e,0x69,0x06,0x07,0xe6,0x82,0x9b,0xc8,0x7e,0x2d,0x6d,0x06,0x07,0xe3,0xe0,0xa6,0x2e,
+    0x05,0x58,0x39,0xbf,0xd1,0xc7,0x9e,0x15,0x29,0x39,0x31,0xbc,0x18,0x1f,0x62,0xf5,0x95,0x20,0xa0,0xe6,0xff,0x38,0x83,0x03,
+    0xea,0x3f,0x02,0xc8,0x05,0x71,0x31,0xef,0x79,0x49,0x48,0x73,0x06,0x07,0xd4,0x4b,0x5f,0xc0,0xe4,0x81,0x3b,0x03,0x03,0xea,
+    0x24,0x81,0x90,0x55,0x41,0x64,0x2b,0xea,0xd1,0xcc,0xa5,0x99,0x79,0x06,0x07,0xd8,0x49,0x05,0x39,0x64,0x16,0x3d,0x03,0x03,
+    0xee,0x24,0x83,0x0b,0xf9,0x46,0x50,0x27,0xdf,0xd4,0xa6,0xa6,0x3d,0x83,0x03,0xf0,0x25,0x83,0x0b,0xf8,0x19,0xbe,0x83,0x03,
+    0xf2,0x3f,0x06,0x17,0xf1,0x58,0xd6,0x04,0xff,0xfa,0x94,0xb4,0x87,0xe0,0x43,0x2f,0x06,0x17,0xf1,0x58,0x11,0x7b,0xe0,0x21,
+    0x97,0x82,0x9d,0x58,0x11,0x7b,0xf9,0xf8,0x15,0xa2,0x9f,0x40,0x3e,0xf4,0x07,0xd5,0x81,0x9f,0xa1,0x7b,0xca,0x9a,0xbb,0xdf,
+    0xcf,0xc0,0xf3,0xb0,0x09,0x37,0x00,0x97,0x63,0xee,0x47,0xff,0xff,0xe8,0x68,0xa5,0xc6,0x43,0xdf,0x2f,0x06,0xad,0xad,0x3f,
+    0xef,0xf6,0x30,0x03,0xea,0x80,0xcf,0x80,0x8b,0x8a,0xe2,0xb5,0x40,0xa7,0x2e,0x06,0x27,0xf7,0xc2,0xd6,0x47,0x2f,0xc1,0x3f,
+    0x80,0x00,0x44,0x1f,0xb5,0xbd,0xc5,0xed,0x4f,0x70,0x7c,0x43,0xec,0x8e,0xcf,0x6e,0xcf,0x7b,0x88,0x7d,0x9b,0xfc,0x13,0xfc,
+    0x24,0x3b,0xf4,0xe1,0x61,0xe1,0xe2,0x61,0x5f,0xa7,0x13,0x15,0x17,0x1b,0x08,0xff,0x38,0x98,0x98,0xb8,0xb8,0x2f,0xf9,0xc3,
+    0xc2,0xc5,0x43,0xc0,0xff,0xce,0x09,0xfa,0x16,0x06,0x07,0xfe,0x6f,0x4e,0xcf,0xaf,0x30,0x3f,0xf3,0x70,0x6c,0x74,0x6e,0x81,
+    0xff,0x9b,0x33,0x13,0x53,0x2c,0x0f,0xf8,0x00,0x04,0x61,0xf0,0xd4,0xf7,0x17,0xb6,0x3a,0x3e,0x1d,0xfa,0x6f,0x0f,0xaf,0x4f,
+    0xf0,0xef,0xd3,0x81,0x85,0x84,0x88,0x86,0x7e,0x9c,0x4c,0x64,0x64,0x84,0x2b,0xfc,0xe3,0xa4,0x24,0xa4,0xe0,0xdf,0xe7,0x1f,
+    0x1d,0x27,0x25,0x03,0xff,0x38,0xd8,0xb9,0x19,0x0f,0xff,0xf9,0xc4,0x42,0xc6,0xc4,0xff,0xc0,0xce,0x05,0xf6,0x16,0x05,0xfe,
+    0x06,0x6f,0x0e,0x4f,0x8e,0xef,0xf0,0x59,0x6d,0x68,0x72,0x3e,0xd9,0x1f,0xff,0x02,0x00,0x01,0x20,0x7c,0x3a,0x29,0x4c,0xf7,
+    0xa5,0x34,0x0c,0x3d,0xb5,0xae,0x3a,0x1d,0xf5,0x6f,0xc3,0xb0,0x30,0x33,0x82,0x03,0xa0,0x60,0x66,0xbf,0x01,0xc0,0x04,0x5b,
+    0x07,0x04,0x16,0x10,0xf3,0xf2,0xe1,0x94,0x17,0x81,0x82,0xd1,0x54,0x40,0xf0,0x7e,0xb4,0x2d,0x16,0x2d,0x83,0x82,0x0a,0xb8,
+    0x0e,0x08,0x2c,0x11,0xe7,0xf5,0xc5,0x02,0x10,0x96,0xb7,0x8b,0x45,0x91,0x60,0xc0,0x83,0x89,0x60,0xa8,0x07,0x2b,0xf9,0x70,
+    0x1c,0x10,0x65,0xb0,0x70,0x41,0xa0,0x8f,0x3f,0xae,0x28,0x18,0x10,0x91,0xf8,0x19,0x54,0x81,0xf1,0x20,0xc0,0x84,0x8f,0xc0,
+    0xcf,0x90,0x3d,0x6c,0x1c,0x10,0x55,0xa0,0x70,0x41,0x7f,0xcf,0x7f,0xf8,0x90,0x60,0x43,0x0b,0xc1,0x85,0xfb,0x80,0xc4,0x81,
+    0x8a,0xe1,0x81,0x81,0x0c,0x56,0x0c,0x2f,0xd0,0x31,0x40,0x62,0xb5,0x80,0x70,0x41,0x55,0xc1,0xc1,0x05,0x7e,0xff,0x5c,0x18,
+    0x30,0x21,0xbf,0x06,0x17,0xe8,0x18,0xa0,0x31,0x5b,0xec,0x06,0x00,0x34,0x18,0x6f,0xbf,0xa0,0x0c,0x95,0x01,0xc1,0x04,0x51,
+    0x07,0x04,0x11,0xf4,0xf4,0x0f,0x77,0x06,0x04,0x34,0x18,0x6f,0xc5,0x60,0xaa,0x07,0x34,0x09,0xd0,0x18,0x10,0xef,0x03,0x0b,
+    0xf8,0x5e,0x0a,0xa0,0x73,0x40,0x93,0xc7,0x8e,0x4d,0x51,0xe5,0xdc,0xf9,0x03,0xdc,0x01,0x81,0x0e,0x54,0x0a,0x71,0xf8,0x11,
+    0x82,0xc6,0x98,0x0c,0x00,0x69,0x72,0xe5,0xe0,0xe4,0xff,0x25,0x67,0x80,0xcf,0x90,0xb9,0x1f,0x15,0x01,0x9f,0x21,0x73,0xe4,
+    0x17,0x63,0x06,0x04,0x34,0x7c,0x05,0xc0,0x8b,0xd7,0x41,0x81,0x0c,0x12,0x00,0xc0,0x2a,0xfe,0x2d,0x42,0x03,0x80,0x66,0x21,
+    0x71,0xf0,0x1c,0x03,0x30,0x6a,0xfb,0x05,0xd5,0xc1,0x81,0x0c,0x12,0x01,0x4c,0x0a,0xbf,0x8b,0x54,0xa0,0x06,0x09,0x00,0xa6,
+    0x05,0x5f,0xc5,0xa7,0x40,0xa2,0x03,0x2e,0x38,0xd2,0xe2,0x77,0xe8,0x10,0x00,0x09,0x43,0xa6,0x72,0xbf,0x3c,0xea,0x0a,0x1c,
+    0x60,0xb4,0x54,0xee,0x5e,0x6a,0x16,0x21,0xcf,0xb4,0x75,0x06,0x07,0xdb,0xec,0x14,0x0b,0x1d,0xc1,0x81,0xf4,0x1f,0x87,0xc2,
+    0xc8,0x98,0xe8,0x37,0xfa,0x3d,0x83,0x03,0xe6,0x3f,0x27,0x7f,0x06,0x07,0xd4,0x4b,0x03,0x25,0x62,0xd9,0x19,0x38,0x27,0xfe,
+    0xfe,0x0c,0x0f,0xb0,0xfc,0x0c,0xd4,0x3f,0x16,0xbc,0x41,0x24,0xbf,0x04,0x7a,0x04,0x7e,0x2d,0x90,0x74,0x2c,0xfc,0xcc,0x4f,
+    0x28,0xc6,0x09,0xfd,0x6e,0x80,0xa1,0x2f,0x03,0x34,0x08,0xfc,0x5a,0xe2,0x07,0xd5,0x81,0x9a,0x85,0xf0,0x60,0x7c,0x32,0x82,
+    0x06,0x07,0xe4,0x32,0xff,0x77,0xff,0x6f,0x53,0xf2,0x76,0xdd,0x7c,0x08,0x30,0x3e,0xe0,0x15,0xfe,0x0c,0x0f,0xaa,0xa0,0xc1,
+    0xf3,0xff,0x2d,0x6d,0x0f,0xc0,0xc0,0xfa,0x2a,0x0c,0x1f,0x81,0x81,0xf3,0x54,0x20,0xb9,0xee,0x07,0x4c,0x86,0x18,0xdf,0x01,
+    0x81,0xf3,0xf0,0x9d,0xf0,0x18,0x1f,0x30,0xc9,0xe6,0x0b,0x2c,0x0b,0xaf,0x40,0xc0,0xf9,0x86,0x4f,0x00,0xc0,0xf9,0xc2,0x67,
+    0x98,0x3c,0xac,0xaa,0xba,0x83,0x03,0xe5,0x16,0x01,0xce,0x20,0xe0,0xf9,0xbd,0x41,0xed,0x45,0x2c,0xc3,0x68,0x38,0x3e,0x6d,
+    0x00,0xe0,0xfa,0x3d,0xc1,0x00,0x00,0x4c,0x1d,0xab,0xda,0xb3,0x51,0x2f,0xaa,0xcd,0x45,0xc5,0x39,0xf1,0xf3,0x8c,0x90,0x8e,
+    0x92,0x86,0x7f,0x9c,0xa4,0xbc,0xb4,0xcc,0x1f,0xfa,0xb9,0x48,0xf1,0xd0,0x8b,0xb8,0x9b,0x94,0xb6,0x51,0x84,0xbb,0xa6,0x2c,
+    0x52,0xaf,0xde,0xad,0x58,0x2b,0x82,0x32,0xfe,0xb9,0x50,0x60,0x81,0xc1,0x86,0xfb,0x06,0x27,0xf5,0x50,0x39,0xa0,0x93,0x20,
+    0xc1,0x04,0x01,0x81,0xf8,0x31,0x3f,0x7e,0x07,0x34,0x12,0x4a,0x84,0x21,0xf6,0x88,0xa0,0x46,0x0b,0x25,0xc1,0x81,0x08,0x2e,
+    0xe8,0x31,0xbf,0xc3,0x60,0x95,0xe5,0x1f,0x90,0x40,0x73,0x40,0xa0,0xd7,0x35,0xa1,0x00,0x18,0x50,0x42,0xf0,0x62,0x7e,0x81,
+    0xce,0x03,0x9a,0x52,0x01,0xc0,0xc2,0x83,0x2b,0x06,0x27,0xe2,0x83,0x9a,0x05,0x32,0x0c,0x08,0x41,0x72,0xd7,0x51,0xa4,0x2d,
+    0x60,0x59,0x36,0x0c,0x08,0x38,0x94,0xb1,0x6f,0xfa,0xca,0xed,0x62,0x33,0x6f,0x18,0x18,0x42,0xae,0x6c,0x7a,0x01,0x80,0xc2,
+    0x83,0xfc,0x18,0x9f,0x7f,0x8b,0x26,0x47,0xa0,0x80,0x0c,0x28,0x47,0xc1,0x89,0xf7,0x56,0x2c,0x9d,0x05,0x00,0x91,0xa2,0x35,
+    0x2d,0x1c,0xf0,0x5d,0x3a,0x0a,0x01,0x2b,0xa3,0xaa,0x04,0x41,0x3d,0xdc,0x2f,0x85,0x5c,0xb0,0x21,0x80,0x60,0x30,0xa1,0x00,
+    0xc5,0x7c,0x86,0x72,0x80,0xc0,0xfc,0x84,0x00,0x61,0x42,0x3c,0x0c,0x4f,0xd8,0x67,0x36,0x0c,0x08,0x38,0xfb,0x04,0x60,0x55,
+    0x83,0x45,0xfe,0x6a,0x5c,0x14,0x02,0x50,0x29,0x86,0x6e,0xfc,0x5c,0x5b,0x0c,0x79,0x10,0x60,0x7e,0x02,0x10,0x30,0xa1,0x05,
+    0xc0,0x45,0xf1,0xa0,0xc0,0xfb,0x89,0x00,0xc2,0x84,0x03,0x80,0x0d,0x25,0x04,0xa1,0xf0,0x29,0x86,0x31,0xc0,0x84,0x3e,0x05,
+    0x34,0x06,0xb0,0x04,0xd3,0xb4,0x36,0x85,0xcb,0x88,0x06,0x07,0xdc,0x4a,0x06,0x14,0x20,0x21,0x20,0x15,0xc2,0x03,0x03,0xec,
+    0x3e,0x06,0x14,0x20,0x20,0x01,0x01,0x5c,0x40,0x1e,0x1f,0x02,0x9b,0xc8,0x01,0x7d,0x07,0x15,0x0f,0x81,0x4d,0xe0,0x20,0x0b,
+    0x37,0x74,0x25,0xa0,0xb2,0x86,0x5b,0xf8,0x30,0x3e,0xc5,0xc0,0xc2,0x83,0x84,0x00,0x62,0x40,0xc5,0x6f,0x10,0x03,0x7c,0x0c,
+    0x28,0x28,0x91,0x01,0xc8,0x04,0x7e,0x50,0x5c,0x0a,0x65,0x48,0x1c,0xeb,0x15,0x17,0x01,0x8f,0x0d,0x1e,0x11,0xfd,0x08,0xae,
+    0x18,0x00,0x02,0x70,0xe5,0x5f,0x95,0x9a,0x8e,0x7e,0x56,0x23,0x3e,0x42,0x31,0xed,0xf3,0x90,0x94,0x92,0x96,0x87,0x7f,0x9c,
+    0xc4,0xdc,0xdc,0xf4,0x1f,0xfa,0x79,0xa9,0x78,0x0c,0x1f,0xd9,0x72,0x32,0xaa,0x1f,0x32,0x6a,0x76,0x7e,0xfe,0x79,0x59,0x75,
+    0xb7,0x83,0xb0,0x62,0xc0,0x9f,0x03,0x0d,0xc0,0xbe,0x19,0x91,0xbc,0x05,0xd4,0x6e,0xf3,0xe3,0x90,0x46,0x25,0x9a,0x06,0x07,
+    0xe0,0x7c,0x06,0x4b,0x28,0x34,0xdf,0xef,0xa2,0xa1,0x05,0x56,0x03,0x07,0xf9,0x00,0x82,0xb4,0x8c,0x26,0x1c,0x12,0xbc,0x23,
+    0xa5,0xa3,0x98,0x8c,0x9a,0x14,0x16,0x2f,0x92,0xad,0x25,0xa0,0x5c,0x3e,0xc4,0x13,0x63,0x6f,0x97,0x05,0x28,0xdb,0x8d,0x08,
+    0x48,0xcf,0x53,0x03,0x03,0xef,0xe1,0xd0,0x28,0xe0,0x31,0x5f,0x8b,0x32,0x8d,0xb4,0xfd,0x68,0xfd,0x58,0x30,0x3e,0xea,0x94,
+    0xe8,0xf6,0x25,0xc5,0x69,0x98,0xc4,0xa1,0x83,0xc6,0x21,0xe2,0xc2,0xb8,0x60,0x68,0x80,0x5c,0xf9,0x75,0x15,0x32,0x05,0xb4,
+    0x11,0x74,0x9a,0x55,0xa8,0x21,0x02,0x30,0x13,0x24,0xac,0x06,0x07,0xd9,0x50,0x19,0x2c,0xa0,0xd2,0xff,0x9f,0xae,0x06,0x07,
+    0xd5,0x56,0x08,0xe0,0xaa,0x4a,0x0a,0xe3,0x4e,0xa8,0x83,0x96,0x0f,0x43,0x83,0x43,0xfe,0xf5,0xc9,0x83,0x5a,0x03,0x18,0x07,
+    0x08,0x0a,0x44,0x1b,0x50,0x12,0x5a,0xe0,0x60,0x7d,0x4b,0x97,0xf8,0xd2,0xb0,0x18,0x1f,0x42,0xe5,0xfe,0x34,0x75,0x0a,0xe2,
+    0x3c,0x80,0x3e,0x7f,0xf1,0x80,0xd9,0x80,0x9d,0xac,0x06,0x07,0xd1,0x50,0x17,0x07,0xcf,0xfe,0xa4,0x18,0x1f,0x45,0x40,0x5c,
+    0x23,0x76,0x88,0xdc,0x3c,0x18,0x70,0x52,0x20,0x9a,0xa7,0x06,0x07,0xcd,0x52,0x70,0x8e,0x90,0x18,0x1f,0x32,0xe0,0x2e,0x58,
+    0x39,0x20,0x76,0x88,0x3b,0xf1,0xf7,0x7d,0x04,0x89,0xf0,0x60,0x7c,0x8b,0x81,0x52,0x82,0x03,0x1b,0xfc,0x1d,0x12,0xcc,0x03,
+    0x03,0xe3,0xe5,0x83,0x37,0x88,0x70,0xe0,0x56,0x00,0x01,0x40,0x73,0x4f,0xbf,0x11,0x9f,0x22,0xfe,0xc5,0x22,0x8f,0x20,0x8d,
+    0x7b,0x7e,0x65,0x26,0x64,0x78,0x4d,0x2d,0x6c,0x26,0x87,0x7f,0xf4,0x0f,0xa7,0x7c,0xe5,0xb1,0x68,0x5a,0x2d,0xa2,0xdf,0x01,
+    0x82,0xd7,0xc1,0x41,0x7a,0xf7,0xdd,0x7f,0xad,0xae,0x2e,0xa3,0x57,0x7c,0x06,0x23,0x40,0x5f,0x89,0x9b,0x46,0x43,0x6b,0xfc,
+    0xff,0x32,0x44,0x9a,0x53,0x5e,0xfb,0x0b,0xf2,0x6e,0x03,0x8b,0xfd,0xa3,0x6c,0x07,0x17,0xfb,0x1a,0xaa,0x76,0x60,0xf1,0xcf,
+    0x10,0xff,0x5b,0x60,0x38,0xbf,0xdb,0x3b,0x17,0x18,0xe7,0x8f,0x1d,0xa4,0x3c,0x0c,0x9d,0x22,0x3e,0xd7,0x00,0xc9,0x63,0xf7,
+    0x38,0xb8,0xc7,0x4c,0x78,0xed,0x31,0xe3,0x9d,0x22,0x3e,0xe7,0x17,0x19,0x6d,0xec,0x27,0xd0,0x8a,0xbc,0x3b,0xe3,0x05,0x2e,
+    0xd2,0x51,0xe2,0x67,0x58,0x8f,0xb5,0xb0,0x97,0x63,0x61,0x2e,0x82,0x88,0x4d,0x9a,0xa3,0xc4,0xce,0x91,0x3f,0x57,0x61,0x2e,
+    0x96,0xc2,0x5c,0x95,0x10,0x9b,0x13,0x21,0x33,0xa4,0x4f,0xcd,0x9f,0x84,0xd8,0x92,0xc5,0x86,0x77,0xd2,0x3a,0xc7,0xc2,0x32,
+    0x38,0x2f,0x73,0x89};
+
+    // Expected results
+    H263Info infoRefQCIF_N;
+    infoRefQCIF_N.uiH263PTypeFmt = 2;
+    infoRefQCIF_N.codecBits      = 0;
+    infoRefQCIF_N.pQuant         = 7;
+    infoRefQCIF_N.fType          = 0;
+    infoRefQCIF_N.cpmBit         = 0;
+    infoRefQCIF_N.numOfGOBs      = 9;
+    infoRefQCIF_N.totalNumOfMBs  = 11*9;
+
+    infoRefQCIF_N.ptrGOBbuffer[0] = 0;    infoRefQCIF_N.ptrGOBbufferSBit[0] = 0; infoRefQCIF_N.ptrGQuant[0] = 0;
+    infoRefQCIF_N.ptrGOBbuffer[1] = 215;  infoRefQCIF_N.ptrGOBbufferSBit[1] = 2; infoRefQCIF_N.ptrGQuant[1] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[2] = 456;  infoRefQCIF_N.ptrGOBbufferSBit[2] = 1; infoRefQCIF_N.ptrGQuant[2] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[3] = 535;  infoRefQCIF_N.ptrGOBbufferSBit[3] = 5; infoRefQCIF_N.ptrGQuant[3] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[4] = 615;  infoRefQCIF_N.ptrGOBbufferSBit[4] = 7; infoRefQCIF_N.ptrGQuant[4] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[5] = 925;  infoRefQCIF_N.ptrGOBbufferSBit[5] = 4; infoRefQCIF_N.ptrGQuant[5] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[6] = 1133; infoRefQCIF_N.ptrGOBbufferSBit[6] = 1; infoRefQCIF_N.ptrGQuant[6] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[7] = 1512; infoRefQCIF_N.ptrGOBbufferSBit[7] = 6; infoRefQCIF_N.ptrGQuant[7] = 7;
+    infoRefQCIF_N.ptrGOBbuffer[8] = 1832; infoRefQCIF_N.ptrGOBbufferSBit[8] = 7; infoRefQCIF_N.ptrGQuant[8] = 7;
+
+    // --------------------------------------------------
+    // Input data - H.263 encoded stream CIF (I-frame)
+    // --------------------------------------------------
+
+    FILE* openFile = fopen("H263Foreman_CIF_Iframe.bin", "rb");
+
+    fseek(openFile, 0, SEEK_END);
+    int lengthRefCIF_I = ftell(openFile);
+    fseek(openFile, 0, SEEK_SET);
+
+    unsigned char* encodedStreamCIF_I = new unsigned char[lengthRefCIF_I];
+    fread(encodedStreamCIF_I, 1, lengthRefCIF_I, openFile);
+    fclose(openFile);
+
+    // Expected results
+    H263Info infoRefCIF_I;
+    infoRefCIF_I.uiH263PTypeFmt = 3;
+    infoRefCIF_I.codecBits      = 0;
+    infoRefCIF_I.pQuant         = 5;
+    infoRefCIF_I.fType          = 0;
+    infoRefCIF_I.cpmBit         = 0;
+    infoRefCIF_I.numOfGOBs      = 18;
+    infoRefCIF_I.totalNumOfMBs  = 22*18;
+
+    infoRefCIF_I.ptrGOBbuffer[0]  = 0;     infoRefCIF_I.ptrGOBbufferSBit[0]  = 0; infoRefCIF_I.ptrGQuant[0]  = 0;
+    infoRefCIF_I.ptrGOBbuffer[1]  = 1607;  infoRefCIF_I.ptrGOBbufferSBit[1]  = 0; infoRefCIF_I.ptrGQuant[1]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[2]  = 2759;  infoRefCIF_I.ptrGOBbufferSBit[2]  = 0; infoRefCIF_I.ptrGQuant[2]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[3]  = 3699;  infoRefCIF_I.ptrGOBbufferSBit[3]  = 0; infoRefCIF_I.ptrGQuant[3]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[4]  = 4506;  infoRefCIF_I.ptrGOBbufferSBit[4]  = 0; infoRefCIF_I.ptrGQuant[4]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[5]  = 5260;  infoRefCIF_I.ptrGOBbufferSBit[5]  = 0; infoRefCIF_I.ptrGQuant[5]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[6]  = 6254;  infoRefCIF_I.ptrGOBbufferSBit[6]  = 0; infoRefCIF_I.ptrGQuant[6]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[7]  = 7117;  infoRefCIF_I.ptrGOBbufferSBit[7]  = 0; infoRefCIF_I.ptrGQuant[7]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[8]  = 7804;  infoRefCIF_I.ptrGOBbufferSBit[8]  = 0; infoRefCIF_I.ptrGQuant[8]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[9]  = 8600;  infoRefCIF_I.ptrGOBbufferSBit[9]  = 0; infoRefCIF_I.ptrGQuant[9]  = 5;
+    infoRefCIF_I.ptrGOBbuffer[10]  = 9419; infoRefCIF_I.ptrGOBbufferSBit[10] = 0; infoRefCIF_I.ptrGQuant[10] = 5;
+    infoRefCIF_I.ptrGOBbuffer[11] = 10126; infoRefCIF_I.ptrGOBbufferSBit[11] = 0; infoRefCIF_I.ptrGQuant[11] = 5;
+    infoRefCIF_I.ptrGOBbuffer[12] = 10723; infoRefCIF_I.ptrGOBbufferSBit[12] = 0; infoRefCIF_I.ptrGQuant[12] = 5;
+    infoRefCIF_I.ptrGOBbuffer[13] = 11280; infoRefCIF_I.ptrGOBbufferSBit[13] = 0; infoRefCIF_I.ptrGQuant[13] = 5;
+    infoRefCIF_I.ptrGOBbuffer[14] = 11910; infoRefCIF_I.ptrGOBbufferSBit[14] = 0; infoRefCIF_I.ptrGQuant[14] = 5;
+    infoRefCIF_I.ptrGOBbuffer[15] = 12430; infoRefCIF_I.ptrGOBbufferSBit[15] = 0; infoRefCIF_I.ptrGQuant[15] = 5;
+    infoRefCIF_I.ptrGOBbuffer[16] = 12925; infoRefCIF_I.ptrGOBbufferSBit[16] = 0; infoRefCIF_I.ptrGQuant[16] = 5;
+    infoRefCIF_I.ptrGOBbuffer[17] = 13506; infoRefCIF_I.ptrGOBbufferSBit[17] = 0; infoRefCIF_I.ptrGQuant[17] = 5;
+
+    // --------------------------------------------------
+    // Input data - H.263 encoded stream CIF (P-frame)
+    // --------------------------------------------------
+
+    openFile = fopen("H263Foreman_CIF_Pframe.bin", "rb");
+
+    fseek(openFile, 0, SEEK_END);
+    int lengthRefCIF_P = ftell(openFile);
+    fseek(openFile, 0, SEEK_SET);
+
+    unsigned char* encodedStreamCIF_P = new unsigned char[lengthRefCIF_P];
+    fread(encodedStreamCIF_P, 1, lengthRefCIF_P, openFile);
+    fclose(openFile);
+
+    // Expected results
+    H263Info infoRefCIF_P;
+    infoRefCIF_P.uiH263PTypeFmt = 3;
+    infoRefCIF_P.codecBits      = 8;
+    infoRefCIF_P.pQuant         = 4;
+    infoRefCIF_P.fType          = 1;
+    infoRefCIF_P.cpmBit         = 0;
+    infoRefCIF_P.numOfGOBs      = 18;
+    infoRefCIF_P.totalNumOfMBs  = 22*18;
+
+    infoRefCIF_P.ptrGOBbuffer[0]  = 0;    infoRefCIF_P.ptrGOBbufferSBit[0]  = 0; infoRefCIF_P.ptrGQuant[0]  = 0;
+    infoRefCIF_P.ptrGOBbuffer[1]  = 252;  infoRefCIF_P.ptrGOBbufferSBit[1]  = 0; infoRefCIF_P.ptrGQuant[1]  = 5;
+    infoRefCIF_P.ptrGOBbuffer[2]  = 482;  infoRefCIF_P.ptrGOBbufferSBit[2]  = 0; infoRefCIF_P.ptrGQuant[2]  = 6;
+    infoRefCIF_P.ptrGOBbuffer[3]  = 581;  infoRefCIF_P.ptrGOBbufferSBit[3]  = 0; infoRefCIF_P.ptrGQuant[3]  = 6;
+    infoRefCIF_P.ptrGOBbuffer[4]  = 676;  infoRefCIF_P.ptrGOBbufferSBit[4]  = 0; infoRefCIF_P.ptrGQuant[4]  = 7;
+    infoRefCIF_P.ptrGOBbuffer[5]  = 756;  infoRefCIF_P.ptrGOBbufferSBit[5]  = 0; infoRefCIF_P.ptrGQuant[5]  = 7;
+    infoRefCIF_P.ptrGOBbuffer[6]  = 855;  infoRefCIF_P.ptrGOBbufferSBit[6]  = 0; infoRefCIF_P.ptrGQuant[6]  = 8;
+    infoRefCIF_P.ptrGOBbuffer[7]  = 949;  infoRefCIF_P.ptrGOBbufferSBit[7]  = 0; infoRefCIF_P.ptrGQuant[7]  = 9;
+    infoRefCIF_P.ptrGOBbuffer[8]  = 1004; infoRefCIF_P.ptrGOBbufferSBit[8]  = 0; infoRefCIF_P.ptrGQuant[8]  = 10;
+    infoRefCIF_P.ptrGOBbuffer[9]  = 1062; infoRefCIF_P.ptrGOBbufferSBit[9]  = 0; infoRefCIF_P.ptrGQuant[9]  = 11;
+    infoRefCIF_P.ptrGOBbuffer[10] = 1115; infoRefCIF_P.ptrGOBbufferSBit[10] = 0; infoRefCIF_P.ptrGQuant[10] = 11;
+    infoRefCIF_P.ptrGOBbuffer[11] = 1152; infoRefCIF_P.ptrGOBbufferSBit[11] = 0; infoRefCIF_P.ptrGQuant[11] = 13;
+    infoRefCIF_P.ptrGOBbuffer[12] = 1183; infoRefCIF_P.ptrGOBbufferSBit[12] = 0; infoRefCIF_P.ptrGQuant[12] = 14;
+    infoRefCIF_P.ptrGOBbuffer[13] = 1214; infoRefCIF_P.ptrGOBbufferSBit[13] = 0; infoRefCIF_P.ptrGQuant[13] = 15;
+    infoRefCIF_P.ptrGOBbuffer[14] = 1257; infoRefCIF_P.ptrGOBbufferSBit[14] = 0; infoRefCIF_P.ptrGQuant[14] = 16;
+    infoRefCIF_P.ptrGOBbuffer[15] = 1286; infoRefCIF_P.ptrGOBbufferSBit[15] = 0; infoRefCIF_P.ptrGQuant[15] = 16;
+    infoRefCIF_P.ptrGOBbuffer[16] = 1321; infoRefCIF_P.ptrGOBbufferSBit[16] = 0; infoRefCIF_P.ptrGQuant[16] = 16;
+    infoRefCIF_P.ptrGOBbuffer[17] = 1352; infoRefCIF_P.ptrGOBbufferSBit[17] = 0; infoRefCIF_P.ptrGQuant[17] = 14;
+
+    //---------------------------------------------------------------
+    //---------------------------------------------------------------
+    //---------------------------------------------------------------
+    // Start test
+    const H263Info* ptrInfoSQCIF = NULL;
+    const H263MBInfo* ptrMBInfoSQCIF = NULL;
+    const H263Info* ptrInfoQCIF = NULL;
+    const H263MBInfo* ptrMBInfoQCIF = NULL;
+    const H263Info* ptrInfoCIF = NULL;
+    const H263MBInfo* ptrMBInfoCIF = NULL;
+    const H263Info* ptrInfoQCIF_N = NULL;
+    const H263MBInfo* ptrMBInfoQCIF_N = NULL;
+    const H263Info* ptrInfoCIF_I = NULL;
+    const H263MBInfo* ptrMBInfoCIF_I = NULL;
+    const H263Info* ptrInfoCIF_P = NULL;
+    const H263MBInfo* ptrMBInfoCIF_P = NULL;
+    H263Information h263Information;
+
+    // Input buffer
+    const int length = 3000;
+    unsigned char* encodedBuffer = new unsigned char[lengthRefCIF_P];
+
+    // Test invalid inputs
+    assert(-1 == h263Information.GetInfo(NULL, length, ptrInfoSQCIF));
+    assert(-1 == h263Information.GetInfo(encodedBuffer, 0, ptrInfoSQCIF));
+    assert(-1 == h263Information.GetInfo(encodedBuffer, length, ptrInfoSQCIF)); // invalid H.263 stream
+//    assert(-1 == h263Information.GetInfo(encodedStreamSQCIF, lengthRefSQCIF/2, ptrInfoSQCIF)); // invalid H.263 stream
+
+    assert(-1 == h263Information.GetMBInfo(NULL, length, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, 0, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, 0, ptrMBInfoSQCIF));
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, -1, ptrMBInfoSQCIF)); // incorrect group number
+    assert(-1 == h263Information.GetMBInfo(encodedBuffer, length, 8, ptrMBInfoSQCIF));  // incorrect group number
+
+    // ----------------------------------------------
+    // Get info from encoded H.263 stream - SQCIF
+    // ----------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamSQCIF, lengthRefSQCIF, ptrInfoSQCIF));
+    ValidateResults(ptrInfoSQCIF, &infoRefSQCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoSQCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamSQCIF, lengthRefSQCIF, i, ptrMBInfoSQCIF));
+    }
+    ValidateMBResults(ptrInfoSQCIF, ptrMBInfoSQCIF, &infoRefSQCIF);
+
+    // ---------------------------------------------
+    // Get info from encoded H.263 stream - QCIF
+    // ---------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamQCIF, lengthRefQCIF, ptrInfoQCIF));
+    ValidateResults(ptrInfoQCIF, &infoRefQCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoQCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamQCIF, lengthRefQCIF, i, ptrMBInfoQCIF));
+    }
+    ValidateMBResults(ptrInfoQCIF, ptrMBInfoQCIF, &infoRefQCIF);
+
+    // --------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // --------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF, lengthRefCIF, ptrInfoCIF));
+    ValidateResults(ptrInfoCIF, &infoRefCIF);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoCIF->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF, lengthRefCIF, i, ptrMBInfoCIF));
+    }
+    ValidateMBResults(ptrInfoCIF, ptrMBInfoCIF, &infoRefCIF);
+
+    // ----------------------------------------------------------------------
+    // Get info from encoded H.263 stream - QCIF - non byte aligned GOBs
+    // ----------------------------------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamQCIF_N, lengthRefQCIF_N, ptrInfoQCIF_N));
+    ValidateResults(ptrInfoQCIF_N, &infoRefQCIF_N);
+
+    // Get MB info
+    for (int i = 0; i < ptrInfoQCIF_N->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamQCIF_N, lengthRefQCIF_N, i, ptrMBInfoQCIF_N));
+    }
+    ValidateMBResults(ptrInfoQCIF_N, ptrMBInfoQCIF_N, &infoRefQCIF_N);
+
+    // -------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // -------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF_I, lengthRefCIF_I, ptrInfoCIF_I));
+    ValidateResults(ptrInfoCIF_I, &infoRefCIF_I);
+
+    // Get MB info
+    unsigned int start = timeGetTime();
+    for (int i = 0; i < ptrInfoCIF_I->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF_I, lengthRefCIF_I, i, ptrMBInfoCIF_I));
+    }
+    unsigned int endMB = timeGetTime();
+    ValidateMBResults(ptrInfoCIF_I, ptrMBInfoCIF_I, &infoRefCIF_I, false);
+
+    std::cout << "I-frame, length: " << lengthRefCIF_I << " bytes. Time: " << endMB - start << " ms." << std::endl;
+    PRINT_LINE;
+
+    // -------------------------------------------
+    // Get info from encoded H.263 stream - CIF
+    // -------------------------------------------
+    h263Information.Reset();
+    assert(0 == h263Information.GetInfo(encodedStreamCIF_P, lengthRefCIF_P, ptrInfoCIF_P));
+    ValidateResults(ptrInfoCIF_P, &infoRefCIF_P);
+
+    // Get MB info
+    start = timeGetTime();
+    for (int i = 0; i < ptrInfoCIF_P->numOfGOBs; i++)
+    {
+        assert(0 == h263Information.GetMBInfo(encodedStreamCIF_P, lengthRefCIF_P, i, ptrMBInfoCIF_P));
+    }
+    endMB = timeGetTime();
+    ValidateMBResults(ptrInfoCIF_P, ptrMBInfoCIF_P, &infoRefCIF_P, false);
+
+    std::cout << "P-frame, length:  " << lengthRefCIF_P << " bytes. Time: " << endMB - start << " ms." << std::endl;
+    PRINT_LINE;
+
+    delete [] encodedStreamCIF_I;
+    delete [] encodedStreamCIF_P;
+    delete [] encodedBuffer;
+
+    TEST_PASSED();
+    ::Sleep(5000);
+    return 0;
+}
+
diff --git a/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc b/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc
new file mode 100644
index 0000000..c282557
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testRateControl/testRateControl.cc
@@ -0,0 +1,271 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "rtp_rtcp.h"
+#include "common_types.h"
+#include "RateControlDetector.h"
+/*#include "rtcp_utility.h"
+#include "tmmbr_help.h"*/
+
+#define TEST_STR "Test RateControl."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+const int maxFileLen = 200;
+WebRtc_UWord8* dataFile[maxFileLen];
+
+
+struct InputSet
+{
+    WebRtc_UWord32 TMMBR;
+    WebRtc_UWord32 packetOH;
+    WebRtc_UWord32 SSRC;
+};
+
+const InputSet set0   = {220,  80, 11111};  // bitRate, packetOH, ssrc
+const InputSet set1   = {180,  90, 22222};
+const InputSet set2   = {100, 210, 33333};
+const InputSet set3   = { 35,  40, 44444};
+const InputSet set4   = { 40,  60, 55555};
+const InputSet set4_1 = {100,  60, 55555};
+const InputSet set4_2 = { 10,  60, 55555};
+const InputSet set5   = {200,  40, 66666};
+const InputSet set00  = {  0,  40, 66666};
+
+
+
+
+WebRtc_Word32 GetFile(char* fileName)
+{
+    if (!fileName[0])
+    {
+        return 0;
+    }
+
+    FILE* openFile = fopen(fileName, "rb");
+    assert(openFile != NULL);
+    fseek(openFile, 0, SEEK_END);
+    int len = (WebRtc_Word16)(ftell(openFile));
+    rewind(openFile);
+    assert(len > 0 && len < maxFileLen);
+    fread(dataFile, 1, len, openFile);
+    fclose(openFile);
+    return len;
+};
+
+
+class LoopBackTransport2 : public webrtc::Transport
+{
+public:
+    LoopBackTransport2(RtpRtcp* rtpRtcpModule)  :
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len);
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 0;
+        }
+
+        // Send in bitrate request
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len);
+    }
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+
+class LoopBackTransportVideo : public webrtc::Transport
+{
+public:
+    LoopBackTransportVideo(RtpRtcp* rtpRtcpModule)  :
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len);
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+
+        strcpy(fileName, "RTCPPacketTMMBR0.bin");
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 0;
+        }
+
+        // Send in bitrate request*/
+        return _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len);
+    }
+
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+class TestRateControl : private RateControlDetector
+{
+public:
+    TestRateControl():RateControlDetector(0)
+    {
+    }
+    ~TestRateControl()
+    {
+    }
+    void Start()
+    {
+        //Test perfect conditions
+        // But only one packet per frame
+        SetLastUsedBitRate(500);
+        WebRtc_UWord32 rtpTs=1234*90;
+        WebRtc_UWord32 framePeriod=33; // In Ms
+        WebRtc_UWord32 rtpDelta=framePeriod*90;
+        WebRtc_UWord32 netWorkDelay=10;
+        WebRtc_UWord32 arrivalTime=rtpTs/90+netWorkDelay;
+        WebRtc_UWord32 newBitRate=0;
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay;
+            }
+            newBitRate=RateControl(2*netWorkDelay);
+            SetLastUsedBitRate(newBitRate);
+            Sleep(10*framePeriod);
+            std::cout << "RTCP Packet " << k << " new bitrate " << newBitRate << std::endl;
+        }
+        Reset();
+
+
+        //Test increasing RTT
+        std::cout << "Test increasing RTT - No Receive timing changes" << std::endl;
+        SetLastUsedBitRate(500);
+
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay;
+            }
+            WebRtc_UWord32 rtt=2*netWorkDelay+k*20;
+            newBitRate=RateControl(rtt);
+            Sleep(10*framePeriod);
+            SetLastUsedBitRate(newBitRate);
+            std::cout << "RTCP Packet " << k << " RTT "<< rtt << " new bitrate " << newBitRate << std::endl;
+
+        }
+
+        Reset();
+
+
+        //Test increasing RTT
+        std::cout << "Test increasing RTT - Changed receive timing" << std::endl;
+        SetLastUsedBitRate(500);
+
+        for(WebRtc_UWord32 k=0;k<10;k++)
+        {
+            // Receive 10 packets
+            for(WebRtc_UWord32 i=0;i<10;i++)
+            {
+                NotifyNewArrivedPacket(rtpTs,arrivalTime);
+                rtpTs+=rtpDelta;
+                arrivalTime=rtpTs/90+netWorkDelay+i+(k*20);
+            }
+            WebRtc_UWord32 rtt=2*netWorkDelay+k*20;
+            newBitRate=RateControl(rtt);
+            Sleep(10*framePeriod);
+            SetLastUsedBitRate(newBitRate);
+            std::cout << "RTCP Packet " << k << " RTT "<< rtt << " new bitrate " << newBitRate << std::endl;
+
+        }
+
+
+
+    };
+};
+
+class NULLDataZink: public RtpData
+{
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                                const WebRtc_UWord16 payloadSize,
+                                                const webrtc::WebRtcRTPHeader* rtpHeader)
+    {
+        return 0;
+    };
+};
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "---Test RateControl ----" << std::endl;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // --------------------
+    // Test TMMBRHelp class
+
+    // --------------------
+    TestRateControl test;
+    test.Start();
+
+    printf("RateControl-class test done.\n");
+
+    // ------------------------
+    // Test RateControl single module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideo = RtpRtcp::CreateRtpRtcp(0, false);
+
+    LoopBackTransportVideo* myLoopBackTransportVideo = new LoopBackTransportVideo(rtpRtcpModuleVideo);
+    assert(0 == rtpRtcpModuleVideo->RegisterSendTransport(myLoopBackTransportVideo));
+    printf("Multi module test done.\n");
+
+
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo);
+    delete myLoopBackTransportVideo;
+
+    TEST_PASSED();
+    ::Sleep(5000);
+
+    return 0;
+}
+
diff --git a/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc b/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc
new file mode 100644
index 0000000..d1e1572
--- /dev/null
+++ b/src/modules/rtp_rtcp/test/testTMMBR/testTMMBR.cc
@@ -0,0 +1,1034 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <windows.h>
+#include <iostream>
+#include <tchar.h>
+
+#include "rtp_rtcp.h"
+#include "common_types.h"
+#include "rtcp_utility.h"
+#include "tmmbr_help.h"
+
+#define TEST_STR "Test TMMBR."
+#define TEST_PASSED() std::cerr << TEST_STR << " : [OK]" << std::endl
+#define PRINT_LINE std::cout << "------------------------------------------" << std::endl;
+
+
+const int maxFileLen = 200;
+WebRtc_UWord8* dataFile[maxFileLen];
+
+
+struct InputSet
+{
+    WebRtc_UWord32 TMMBR;
+    WebRtc_UWord32 packetOH;
+    WebRtc_UWord32 SSRC;
+};
+
+const InputSet set0   = {220,  80, 11111};  // bitRate, packetOH, ssrc
+const InputSet set1   = {180,  90, 22222};
+const InputSet set2   = {100, 210, 33333};
+const InputSet set3   = { 35,  40, 44444};
+const InputSet set4   = { 40,  60, 55555};
+const InputSet set4_1 = {100,  60, 55555};
+const InputSet set4_2 = { 10,  60, 55555};
+const InputSet set5   = {200,  40, 66666};
+const InputSet set00  = {  0,  40, 66666};
+
+const int maxBitrate = 230;  // if this is lower than max in the list above test should fail
+
+void Verify(TMMBRSet* boundingSet, int index, InputSet set)
+{
+    assert(boundingSet->ptrTmmbrSet[index]    == set.TMMBR);
+    assert(boundingSet->ptrPacketOHSet[index] == set.packetOH);
+    assert(boundingSet->ptrSsrcSet[index]     == set.SSRC);
+};
+
+int ParseRTCPPacket(const void *data, int len, TMMBRSet*& boundingSet)
+{
+    int numItems = -1;
+    RTCPUtility::RTCPParserV2 rtcpParser((const WebRtc_UWord8*)data, len, true);
+    RTCPUtility::RTCPPacketTypes pktType = rtcpParser.Begin();
+    while (pktType != RTCPUtility::kRtcpNotValidCode)
+    {
+        const RTCPUtility::RTCPPacket& rtcpPacket = rtcpParser.Packet();
+        if (pktType == RTCPUtility::kRtcpRtpfbTmmbnCode)
+        {
+            assert(0 == rtcpPacket.TMMBN.SenderSSRC);
+            assert(0 == rtcpPacket.TMMBN.MediaSSRC);
+            numItems = 0;
+        }
+        if (pktType == RTCPUtility::kRtcpRtpfbTmmbnItemCode)
+        {
+            boundingSet->ptrTmmbrSet[numItems]    = rtcpPacket.TMMBNItem.MaxTotalMediaBitRate;
+            boundingSet->ptrPacketOHSet[numItems] = rtcpPacket.TMMBNItem.MeasuredOverhead;
+            boundingSet->ptrSsrcSet[numItems]     = rtcpPacket.TMMBNItem.SSRC;
+            ++numItems;
+        }
+        pktType = rtcpParser.Iterate();
+    }
+    return numItems;
+};
+
+WebRtc_Word32 GetFile(char* fileName)
+{
+    if (!fileName[0])
+    {
+        return 0;
+    }
+
+    FILE* openFile = fopen(fileName, "rb");
+    assert(openFile != NULL);
+    fseek(openFile, 0, SEEK_END);
+    int len = (WebRtc_Word16)(ftell(openFile));
+    rewind(openFile);
+    assert(len > 0 && len < maxFileLen);
+    fread(dataFile, 1, len, openFile);
+    fclose(openFile);
+    return len;
+};
+
+
+class LoopBackTransport2 : public webrtc::Transport, private TMMBRHelp
+{
+public:
+    LoopBackTransport2(RtpRtcp* rtpRtcpModule)  :
+      TMMBRHelp(false),
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        if( 0  == _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len))
+        {
+            return len;
+        }
+        return -1;
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+        TMMBRSet* boundingSet = BoundingSet();
+        boundingSet->VerifyAndAllocateSet(3);
+
+        if (_cnt == 0)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {2,4,0} -> {4,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR3.bin");
+        }
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 1;
+        }
+
+        // Send in bitrate request
+        if(_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len) == 0)
+        {
+            return len;
+        }
+        return -1;
+    }
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+
+class LoopBackTransportVideo : public webrtc::Transport, private TMMBRHelp
+{
+public:
+    LoopBackTransportVideo(RtpRtcp* rtpRtcpModule)  :
+      TMMBRHelp(false),
+      _rtpRtcpModule(rtpRtcpModule),
+      _cnt(0)
+    {
+    }
+    virtual int SendPacket(int channel, const void *data, int len)
+    {
+        if(_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)data, len)== 0)
+        {
+            return len;
+        }
+        return -1;
+    }
+    virtual int SendRTCPPacket(int channel, const void *data, int len)
+    {
+        char fileName[256] = {0};
+        TMMBRSet* boundingSet = BoundingSet();
+        boundingSet->VerifyAndAllocateSet(3);
+
+        if (_cnt == 0)
+        {
+            strcpy(fileName, "RTCPPacketTMMBR0.bin");
+        }
+        else if (_cnt == 1)
+        {
+            // TMMBN {0} -> {0}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set0);
+
+            strcpy(fileName, "RTCPPacketTMMBR1.bin");
+        }
+        else if (_cnt == 2)
+        {
+            // TMMBN {0,1} -> {1}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set1);
+
+            strcpy(fileName, "RTCPPacketTMMBR2.bin");
+        }
+        else if (_cnt == 3)
+        {
+            // TMMBN {0,1,2} -> {2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR3.bin");
+        }
+        else if (_cnt == 4)
+        {
+            // TMMBN {0,1,2,3} -> {3,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR4.bin");
+        }
+        else if (_cnt == 5)
+        {
+            // TMMBN {0,1,2,3,4} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR5.bin");
+        }
+        else if (_cnt == 6)
+        {
+            // TMMBN {0,1,2,3,4,5} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR4_2.bin");
+        }
+        else if (_cnt == 7)
+        {
+            // TMMBN {0,1,2,3,4_2,5} -> {4_2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4_2);
+
+            ++_cnt;
+            ::Sleep(5*RTCP_INTERVAL_AUDIO_MS + 1000); // time out receiver
+            _rtpRtcpModule->Process();             // SendRTCP() (_cnt == 8)
+                                                   // a receiver has timed out -> UpdateTMMBR()
+        }
+        else if (_cnt == 8)
+        {
+            // No TMMBN in this packet
+            assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+        }
+        else if (_cnt == 10)
+        {
+            // TMMBN {} -> {}, empty set
+            assert(0 == ParseRTCPPacket(data, len, boundingSet));
+
+            strcpy(fileName, "RTCPPacketTMMBR2.bin");
+        }
+        else if (_cnt == 11)
+        {
+            // TMMBN {2} -> {2}
+            assert(1 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set2);
+        }
+        else if (_cnt == 12) // ----- multi module -------------
+        {
+            // No TMMBN in this packet
+            assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+
+            strcpy(fileName, "RTCPPacketTMMBR4.bin");
+        }
+        else if (_cnt == 13)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {2,4} -> {4,2}
+            assert(2 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set4);
+            Verify(boundingSet, 1, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR0.bin");
+        }
+        else if (_cnt == 14)
+        {
+            // TMMBN {}
+            // TMMBN {3}
+            // TMMBN {}
+            // TMMBN {2,4,0} -> {3,4,2}
+            assert(3 == ParseRTCPPacket(data, len, boundingSet));
+            Verify(boundingSet, 0, set3);
+            Verify(boundingSet, 1, set4);
+            Verify(boundingSet, 2, set2);
+
+            strcpy(fileName, "RTCPPacketTMMBR1.bin");
+        }
+        //else if (_cnt == 15)
+        //{
+        //    // TMMBN {}
+        //    // TMMBN {}
+        //    // TMMBN {}
+        //    // TMMBN {2,4,0,1} -> {4,2}
+        //    //assert(2 == ParseRTCPPacket(data, len, boundingSet));
+        //    //Verify(boundingSet, 0, set4);
+        //    //Verify(boundingSet, 1, set2);
+        //}
+        //else if (_cnt == 15)
+        //{
+        //    // No TMMBN in this packet
+        //    assert(-1 == ParseRTCPPacket(data, len, boundingSet));
+        //}
+        else if (_cnt == 15)
+        {
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {}
+            // TMMBN {} -> {}, empty set
+            assert(0 == ParseRTCPPacket(data, len, boundingSet));
+        }
+
+        ++_cnt;
+
+        // Get stored rtcp packet w/ TMMBR
+        len = GetFile(fileName);
+        if (len == 0)
+        {
+            return 1;
+        }
+
+        // Send in bitrate request
+        if( 0 == _rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)dataFile, len))
+        {
+            return len;
+        }
+        return -1;
+    }
+
+    RtpRtcp* _rtpRtcpModule;
+    WebRtc_UWord32       _cnt;
+};
+
+class TestTMMBR : private TMMBRHelp
+{
+public:
+    TestTMMBR() : TMMBRHelp(false) {};
+
+    void Add(TMMBRSet* candidateSet, int index, InputSet set)
+    {
+        candidateSet->ptrTmmbrSet[index]    = set.TMMBR;
+        candidateSet->ptrPacketOHSet[index] = set.packetOH;
+        candidateSet->ptrSsrcSet[index]     = set.SSRC;
+    };
+
+    void Start()
+    {
+        // Get sets
+        TMMBRSet* candidateSet = CandidateSet();
+        assert(0 == candidateSet->sizeOfSet);
+        TMMBRSet* boundingSet = BoundingSet();
+        assert(0 == boundingSet->sizeOfSet);
+        TMMBRSet* boundingSetToSend = BoundingSetToSend();
+        assert(0 == boundingSetToSend->sizeOfSet);
+
+        WebRtc_Word32 numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(0 == numBoundingSet); // should be empty
+
+        assert( 0 == SetTMMBRBoundingSetToSend(NULL,0));        // ok to send empty set
+        assert( 0 == SetTMMBRBoundingSetToSend(boundingSet,0)); // ok to send empty set
+
+        WebRtc_UWord32 minBitrateKbit = 0;
+        WebRtc_UWord32 maxBitrateKbit = 0;
+        assert(-1 == CalcMinMaxBitRate(0, 0, 1, false, minBitrateKbit, maxBitrateKbit)); // no bounding set
+
+        // ---------------------------------
+        // Test candidate set {0} -> {0}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(1);
+        assert(1 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set0);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, 0));   // incorrect length
+        assert(!IsOwner(set1.SSRC, 100)); // incorrect length
+
+        assert( IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set0);
+
+        // Get net bitrate depending on packet rate
+        assert( 0 == CalcMinMaxBitRate(0, numBoundingSet, false,0, minBitrateKbit, maxBitrateKbit));
+        assert(set0.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+        assert(0 == CalcMinMaxBitRate(0, 100, false,0, minBitrateKbit, maxBitrateKbit));  // incorrect length
+        assert(set0.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1} -> {1}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(2);
+        assert(2 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set1);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert( IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set1);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set1.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2} -> {2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(3);
+        assert(3 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set2.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(4 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3,4} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(5 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+        Add(candidateSet, 4, set4);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {0,1,2,3,4,5} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(6);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set0);
+        Add(candidateSet, 1, set1);
+        Add(candidateSet, 2, set2);
+        Add(candidateSet, 3, set3);
+        Add(candidateSet, 4, set4);
+        Add(candidateSet, 5, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set0.TMMBR == maxBitrateKbit);
+
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4,5} -> {3,4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(3 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+        Verify(boundingSet, 2, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        assert(boundingSetToSend->sizeOfSet == numBoundingSet);
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+        Verify(boundingSetToSend, 2, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+
+        // ---------------------------------
+        // Test candidate set {1,3,4,5} -> {3,4}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set3);
+        Add(candidateSet, 2, set4);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set4);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set4);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet,true,0,  minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,4,5} -> {4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set4);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set4);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set4.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,5} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(4);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4_1,5} -> {3,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4_1);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set3);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert( IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set3);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set3.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {1,2,3,4_2,5} -> {4_2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(5);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set1);
+        Add(candidateSet, 1, set2);
+        Add(candidateSet, 2, set3);
+        Add(candidateSet, 3, set4_2);
+        Add(candidateSet, 4, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set4_2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4_2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0, numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(MIN_VIDEO_BW_MANAGEMENT_BITRATE == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {} -> {}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(0);
+        assert(6 == candidateSet->sizeOfSet);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(0 == numBoundingSet);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+
+        // Get net bitrate depending on packet rate
+        assert(-1 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+
+        // ---------------------------------
+        // Test candidate set {x0,5} -> {5}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(2);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set00);
+        Add(candidateSet, 1, set5);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(1 == numBoundingSet);
+        Verify(boundingSet, 0, set5);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert(!IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert(!IsOwner(set4.SSRC, numBoundingSet));
+        assert( IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set5);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set5.TMMBR == minBitrateKbit);
+        assert(set5.TMMBR == maxBitrateKbit);
+
+        // ---------------------------------
+        // Test candidate set {x0,4,2} -> {4,2}
+        // ---------------------------------
+        candidateSet = VerifyAndAllocateCandidateSet(3);
+        assert(6 == candidateSet->sizeOfSet);
+        Add(candidateSet, 0, set00);
+        Add(candidateSet, 1, set4);
+        Add(candidateSet, 2, set2);
+
+        // Find bounding set
+        numBoundingSet = FindTMMBRBoundingSet(boundingSet);
+        assert(2 == numBoundingSet);
+        Verify(boundingSet, 0, set4);
+        Verify(boundingSet, 1, set2);
+
+        // Is owner of set
+        assert(!IsOwner(set0.SSRC, numBoundingSet));
+        assert(!IsOwner(set1.SSRC, numBoundingSet));
+        assert( IsOwner(set2.SSRC, numBoundingSet));
+        assert(!IsOwner(set3.SSRC, numBoundingSet));
+        assert( IsOwner(set4.SSRC, numBoundingSet));
+        assert(!IsOwner(set5.SSRC, numBoundingSet));
+
+        // Set boundingSet to send
+        assert(0 == SetTMMBRBoundingSetToSend(boundingSet, maxBitrate));
+
+        // Get boundingSet to send
+        boundingSetToSend = BoundingSetToSend();
+        Verify(boundingSetToSend, 0, set4);
+        Verify(boundingSetToSend, 1, set2);
+
+        // Get net bitrate depending on packet rate
+        assert(0 == CalcMinMaxBitRate(0,numBoundingSet, true,0, minBitrateKbit, maxBitrateKbit));
+        assert(set4.TMMBR == minBitrateKbit);
+        assert(set2.TMMBR == maxBitrateKbit);
+    };
+};
+
+class NULLDataZink: public RtpData
+{
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                              const WebRtc_UWord16 payloadSize,
+                                              const webrtc::WebRtcRTPHeader* rtpHeader,
+                                              const WebRtc_UWord8* incomingRtpPacket,
+                                              const WebRtc_UWord16 incomingRtpPacketLengt)
+    {
+        return 0;
+    };
+};
+
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+
+    std::string str;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "------ Test TMMBR ------" << std::endl;
+    std::cout << "------------------------" << std::endl;
+    std::cout << "  "  << std::endl;
+
+    // --------------------
+    // Test TMMBRHelp class
+    // --------------------
+    TestTMMBR test;
+    test.Start();
+
+    printf("TMMBRHelp-class test done.\n");
+
+    // ------------------------
+    // Test TMMBR single module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideo = RtpRtcp::CreateRtpRtcp(0, false);
+
+    LoopBackTransportVideo* myLoopBackTransportVideo = new LoopBackTransportVideo(rtpRtcpModuleVideo);
+    assert(0 == rtpRtcpModuleVideo->RegisterSendTransport(myLoopBackTransportVideo));
+
+    assert(false == rtpRtcpModuleVideo->TMMBR());
+    rtpRtcpModuleVideo->SetTMMBRStatus(true);
+    assert(true == rtpRtcpModuleVideo->TMMBR());
+
+    assert(0 == rtpRtcpModuleVideo->RegisterSendPayload( "I420", 96));
+    assert(0 == rtpRtcpModuleVideo->RegisterReceivePayload( "I420", 96));
+
+    // send a RTP packet with SSRC 11111 to get 11111 as the received SSRC
+    assert(0 == rtpRtcpModuleVideo->SetSSRC(11111));
+    const WebRtc_UWord8 testStream[9] = "testtest";
+    assert(0 == rtpRtcpModuleVideo->RegisterIncomingDataCallback(new NULLDataZink())); // needed to avoid error from parsing the incoming stream
+    assert(0 == rtpRtcpModuleVideo->SendOutgoingData(webrtc::kVideoFrameKey,96, 0, testStream, 8));
+
+    // set the SSRC to 0
+    assert(0 == rtpRtcpModuleVideo->SetSSRC(0));
+
+    //
+    assert(0 == rtpRtcpModuleVideo->SetRTCPStatus(kRtcpCompound));
+
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {0}                     // should this make us remember a TMMBR?
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {1},   verify TMMBN {0}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {2},   verify TMMBN {1}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {3},   verify TMMBN {2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {4},   verify TMMBN {3,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {5},   verify TMMBN {3,4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {4_2}, verify TMMBN {3,4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP()); // -> time out receivers,   verify TMMBN {4_2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // -> incoming TMMBR {2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());  // ->                       verify TMMBN {2}
+
+    printf("Single module test done.\n");
+
+    // ------------------------
+    // Test TMMBR multi module
+    // ------------------------
+    RtpRtcp* rtpRtcpModuleVideoDef = RtpRtcp::CreateRtpRtcp(10, false);
+    assert(0 == rtpRtcpModuleVideo->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo1 = RtpRtcp::CreateRtpRtcp(1, false);
+    assert(0 == rtpRtcpModuleVideo1->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo2 = RtpRtcp::CreateRtpRtcp(2, false);
+    assert(0 == rtpRtcpModuleVideo2->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    RtpRtcp* rtpRtcpModuleVideo3 = RtpRtcp::CreateRtpRtcp(3, false);
+    assert(0 == rtpRtcpModuleVideo3->RegisterDefaultModule(rtpRtcpModuleVideoDef));
+
+    LoopBackTransport2* myLoopBackTransport2 = new LoopBackTransport2(rtpRtcpModuleVideo2);
+    assert(0 == rtpRtcpModuleVideo2->RegisterSendTransport(myLoopBackTransport2));
+
+    assert(0 == rtpRtcpModuleVideo2->SetRTCPStatus(kRtcpCompound));
+
+    // set the SSRC to 0
+    assert(0 == rtpRtcpModuleVideo2->SetSSRC(0));
+
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {4}, verify no TMMBN in this packet
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {0}, verify TMMBN {4,2}
+    assert(0 == rtpRtcpModuleVideo2->SendRTCP());  // -> incoming TMMBR {3}, verify TMMBN {4,2}
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // -> incoming TMMBR {1}, verify TMMBN {3,4,2}
+    ::Sleep(5*RTCP_INTERVAL_AUDIO_MS + 1000);
+    rtpRtcpModuleVideo2->Process();                // time out receiver2 -> UpdateTMMBR()
+    assert(0 == rtpRtcpModuleVideo->SendRTCP());   // verify TMMBN {}
+
+    printf("Multi module test done.\n");
+
+
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo1);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo2);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideo3);
+    RtpRtcp::DestroyRtpRtcp(rtpRtcpModuleVideoDef);
+
+    TEST_PASSED();
+    ::Sleep(5000);
+
+    return 0;
+}
+
diff --git a/src/modules/udp_transport/OWNERS b/src/modules/udp_transport/OWNERS
new file mode 100644
index 0000000..3b2a444
--- /dev/null
+++ b/src/modules/udp_transport/OWNERS
@@ -0,0 +1,4 @@
+pwestin@webrtc.org

+henrikg@webrtc.org

+mallinath@webrtc.org

+tomasl@webrtc.org
\ No newline at end of file
diff --git a/src/modules/udp_transport/interface/udp_transport.h b/src/modules/udp_transport/interface/udp_transport.h
new file mode 100644
index 0000000..6596fde
--- /dev/null
+++ b/src/modules/udp_transport/interface/udp_transport.h
@@ -0,0 +1,386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
+
+#include "common_types.h"
+#include "module.h"
+#include "typedefs.h"
+
+#define SS_MAXSIZE 128
+#define SS_ALIGNSIZE (sizeof (WebRtc_UWord64))
+#define SS_PAD1SIZE  (SS_ALIGNSIZE - sizeof(WebRtc_Word16))
+#define SS_PAD2SIZE  (SS_MAXSIZE - (sizeof(WebRtc_Word16) + SS_PAD1SIZE +\
+                                    SS_ALIGNSIZE))
+
+// BSD requires use of HAVE_STRUCT_SOCKADDR_SA_LEN
+namespace webrtc {
+struct SocketAddressIn
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8      sin_length;
+    WebRtc_Word8      sin_family;
+#else
+    WebRtc_Word16     sin_family;
+#endif
+    WebRtc_UWord16    sin_port;
+    WebRtc_UWord32    sin_addr;
+    WebRtc_Word8      sin_zero[8];
+};
+
+struct Version6InAddress
+{
+    union
+    {
+        WebRtc_UWord8     _s6_u8[16];
+        WebRtc_UWord32    _s6_u32[4];
+        WebRtc_UWord64    _s6_u64[2];
+    } Version6AddressUnion;
+};
+
+struct SocketAddressInVersion6
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8      sin_length;
+    WebRtc_Word8      sin_family;
+#else
+    WebRtc_Word16     sin_family;
+#endif
+    // Transport layer port number.
+    WebRtc_UWord16 sin6_port;
+    // IPv6 traffic class and flow info or ip4 address.
+    WebRtc_UWord32 sin6_flowinfo;
+    // IPv6 address
+    struct Version6InAddress sin6_addr;
+    // Set of interfaces for a scope.
+    WebRtc_UWord32 sin6_scope_id;
+};
+
+struct SocketAddressStorage
+{
+    // sin_family should be either AF_INET (IPv4) or AF_INET6 (IPv6)
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+    WebRtc_Word8   sin_length;
+    WebRtc_Word8   sin_family;
+#else
+    WebRtc_Word16  sin_family;
+#endif
+    WebRtc_Word8   __ss_pad1[SS_PAD1SIZE];
+    WebRtc_UWord64 __ss_align;
+    WebRtc_Word8   __ss_pad2[SS_PAD2SIZE];
+};
+
+struct SocketAddress
+{
+    union
+    {
+        struct SocketAddressIn _sockaddr_in;
+        struct SocketAddressInVersion6 _sockaddr_in6;
+        struct SocketAddressStorage _sockaddr_storage;
+    };
+};
+
+// Callback class that receives packets from UdpTransport.
+class UdpTransportData
+{
+public:
+    virtual ~UdpTransportData()  {};
+
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort) = 0;
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort) = 0;
+};
+
+
+class UdpTransport : public Module, public Transport
+{
+public:
+    enum
+    {
+        kIpAddressVersion6Length = 64,
+        kIpAddressVersion4Length = 16
+    };
+    enum ErrorCode
+    {
+        kNoSocketError            = 0,
+        kFailedToBindPort         = 1,
+        kIpAddressInvalid         = 2,
+        kAddressInvalid           = 3,
+        kSocketInvalid            = 4,
+        kPortInvalid              = 5,
+        kTosInvalid               = 6,
+        kMulticastAddressInvalid  = 7,
+        kQosError                 = 8,
+        kSocketAlreadyInitialized = 9,
+        kIpVersion6Error          = 10,
+        FILTER_ERROR              = 11,
+        kStartReceiveError        = 12,
+        kStopReceiveError         = 13,
+        kCannotFindLocalIp        = 14,
+        kTosError                 = 16,
+        kNotInitialized           = 17,
+        kPcpError                 = 18
+    };
+
+    // Factory method. Constructor disabled.
+    static UdpTransport* Create(const WebRtc_Word32 id,
+                                WebRtc_UWord8& numSocketThreads);
+    static void Destroy(UdpTransport* module);
+
+    // Prepares the class for sending RTP packets to ipAddr:rtpPort and RTCP
+    // packets to ipAddr:rtpPort+1 if rtcpPort is zero. Otherwise to
+    // ipAddr:rtcpPort.
+    virtual WebRtc_Word32 InitializeSendSockets(
+        const char* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Register packetCallback for receiving incoming packets. Set the local
+    // RTP port to rtpPort. Bind local IP address to ipAddr. If ipAddr is NULL
+    // bind to local IP ANY. Set the local rtcp port to rtcpPort or rtpPort + 1
+    // if rtcpPort is 0.
+    virtual WebRtc_Word32 InitializeReceiveSockets(
+        UdpTransportData* const packetCallback,
+        const WebRtc_UWord16 rtpPort,
+        const char* ipAddr = NULL,
+        const char* multicastIpAddr = NULL,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Set local RTP port to rtpPort and RTCP port to rtcpPort or rtpPort + 1 if
+    // rtcpPort is 0. These ports will be used for sending instead of the local
+    // ports set by InitializeReceiveSockets(..).
+    virtual WebRtc_Word32 InitializeSourcePorts(
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Retrieve local ports used for sending if other than the ports specified
+    // by InitializeReceiveSockets(..). rtpPort is set to the RTP port.
+    // rtcpPort is set to the RTCP port.
+    virtual WebRtc_Word32 SourcePorts(WebRtc_UWord16& rtpPort,
+                                      WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Set ipAddr to the IP address that is currently being listened on. rtpPort
+    // to the RTP port listened to. rtcpPort to the RTCP port listened on.
+    // multicastIpAddr to the multicast IP address group joined (the address
+    // is NULL terminated).
+    virtual WebRtc_Word32 ReceiveSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort,
+        char multicastIpAddr[kIpAddressVersion6Length]) const = 0;
+
+    // Set ipAddr to the IP address being sent from. rtpPort to the local RTP
+    // port used for sending and rtcpPort to the local RTCP port used for
+    // sending.
+    virtual WebRtc_Word32 SendSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Put the IP address, RTP port and RTCP port from the last received packet
+    // into ipAddr, rtpPort and rtcpPort respectively.
+    virtual WebRtc_Word32 RemoteSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const = 0;
+
+    // Enable/disable quality of service if QoS is true or false respectively.
+    // Set the type of service to serviceType, max bitrate in kbit/s to
+    // maxBitrate and override DSCP if overrideDSCP is not 0.
+    // Note: Must be called both InitializeSendSockets() and
+    // InitializeReceiveSockets() has been called.
+    virtual WebRtc_Word32 SetQoS(const bool QoS,
+                                 const WebRtc_Word32 serviceType,
+                                 const WebRtc_UWord32 maxBitrate = 0,
+                                 const WebRtc_Word32 overrideDSCP = 0,
+                                 const bool audio = false) = 0;
+
+    // Set QoS to true if quality of service has been turned on. If QoS is true,
+    // also set serviceType to type of service and overrideDSCP to override
+    // DSCP.
+    virtual WebRtc_Word32 QoS(bool& QoS,
+                              WebRtc_Word32& serviceType,
+                              WebRtc_Word32& overrideDSCP) const = 0;
+
+    // Set type of service.
+    virtual WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP,
+                                 const bool useSetSockOpt = false) = 0;
+
+    // Get type of service configuration.
+    virtual WebRtc_Word32 ToS(WebRtc_Word32& DSCP,
+                              bool& useSetSockOpt) const = 0;
+
+    // Set Priority Code Point (IEEE 802.1Q)
+    // Note: for Linux this function will set the priority for the socket,
+    // which then can be mapped to a PCP value with vconfig.
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 PCP) = 0;
+
+    // Get Priority Code Point
+    virtual WebRtc_Word32 PCP(WebRtc_Word32& PCP) const = 0;
+
+    // Enable IPv6.
+    // Note: this API must be called before any call to
+    // InitializeReceiveSockets() or InitializeSendSockets(). It is not
+    // possible to go back to IPv4 (default) after this call.
+    virtual WebRtc_Word32 EnableIpV6() = 0;
+
+    // Return true if IPv6 has been enabled.
+    virtual bool IpV6Enabled() const = 0;
+
+    // Only allow packets received from filterIPAddress to be processed.
+    // Note: must be called after EnableIPv6(), if IPv6 is used.
+    virtual WebRtc_Word32 SetFilterIP(
+        const char filterIPAddress[kIpAddressVersion6Length]) = 0;
+
+    // Write the filter IP address (if any) to filterIPAddress.
+    virtual WebRtc_Word32 FilterIP(
+        char filterIPAddress[kIpAddressVersion6Length]) const = 0;
+
+    // Only allow RTP packets from rtpFilterPort and RTCP packets from
+    // rtcpFilterPort be processed.
+    // Note: must be called after EnableIPv6(), if IPv6 is used.
+    virtual WebRtc_Word32 SetFilterPorts(
+        const WebRtc_UWord16 rtpFilterPort,
+        const WebRtc_UWord16 rtcpFilterPort) = 0;
+
+    // Set rtpFilterPort to the filter RTP port and rtcpFilterPort to the
+    // filter RTCP port (if filtering based on port is enabled).
+    virtual WebRtc_Word32 FilterPorts(WebRtc_UWord16& rtpFilterPort,
+                                      WebRtc_UWord16& rtcpFilterPort) const = 0;
+
+    // Set the number of buffers that the socket implementation may use for
+    // receiving packets to numberOfSocketBuffers. I.e. the number of packets
+    // that can be received in parallell.
+    // Note: this API only has effect on Windows.
+    virtual WebRtc_Word32 StartReceiving(
+        const WebRtc_UWord32 numberOfSocketBuffers) = 0;
+
+    // Stop receive incoming packets.
+    virtual WebRtc_Word32 StopReceiving() = 0;
+
+    // Return true incoming packets are received.
+    virtual bool Receiving() const = 0;
+
+    // Return true if send sockets have been initialized.
+    virtual bool SendSocketsInitialized() const = 0;
+
+    // Return true if local ports for sending has been set.
+    virtual bool SourcePortsInitialized() const = 0;
+
+    // Return true if receive sockets have been initialized.
+    virtual bool ReceiveSocketsInitialized() const = 0;
+
+    // Send data with size length to ip:portnr. The same port as the set
+    // with InitializeSendSockets(..) is used if portnr is 0. The same IP
+    // address as set with InitializeSendSockets(..) is used if ip is NULL.
+    // If isRTCP is true the port used will be the RTCP port.
+    virtual WebRtc_Word32 SendRaw(const WebRtc_Word8* data,
+                                  WebRtc_UWord32 length,
+                                  WebRtc_Word32 isRTCP,
+                                  WebRtc_UWord16 portnr = 0,
+                                  const char* ip = NULL) = 0;
+
+    // Send RTP data with size length to the address specified by to.
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8* data,
+                                          WebRtc_UWord32 length,
+                                          const SocketAddress& to) = 0;
+
+
+    // Send RTCP data with size length to the address specified by to.
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8* data,
+                                           WebRtc_UWord32 length,
+                                           const SocketAddress& to) = 0;
+
+    // Send RTP data with size length to ip:rtpPort where ip is the ip set by
+    // the InitializeSendSockets(..) call.
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8* data,
+                                          WebRtc_UWord32 length,
+                                          WebRtc_UWord16 rtpPort) = 0;
+
+
+    // Send RTCP data with size length to ip:rtcpPort where ip is the ip set by
+    // the InitializeSendSockets(..) call.
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8* data,
+                                           WebRtc_UWord32 length,
+                                           WebRtc_UWord16 rtcpPort) = 0;
+
+    // Set the IP address to which packets are sent to ipaddr.
+    virtual WebRtc_Word32 SetSendIP(
+        const char ipaddr[kIpAddressVersion6Length]) = 0;
+
+    // Set the send RTP and RTCP port to rtpPort and rtcpPort respectively.
+    virtual WebRtc_Word32 SetSendPorts(const WebRtc_UWord16 rtpPort,
+                                       const WebRtc_UWord16 rtcpPort = 0) = 0;
+
+    // Retreive the last registered error code.
+    virtual ErrorCode LastError() const = 0;
+
+    // Put the local IPv4 address in localIP.
+    // Note: this API is for IPv4 only.
+    static WebRtc_Word32 LocalHostAddress(WebRtc_UWord32& localIP);
+
+    // Put the local IP6 address in localIP.
+    // Note: this API is for IPv6 only.
+    static WebRtc_Word32 LocalHostAddressIPV6(char localIP[16]);
+
+    // Return a copy of hostOrder (host order) in network order.
+    static WebRtc_UWord16 Htons(WebRtc_UWord16 hostOrder);
+
+    // Return a copy of hostOrder (host order) in network order.
+    static WebRtc_UWord32 Htonl(WebRtc_UWord32 hostOrder);
+
+    // Return IPv4 address in ip as 32 bit integer.
+    static WebRtc_UWord32 InetAddrIPV4(const char* ip);
+
+    // Convert the character string src into a network address structure in
+    // the af address family and put it in dst.
+    // Note: same functionality as inet_pton(..)
+    static WebRtc_Word32 InetPresentationToNumeric(WebRtc_Word32 af,
+                                                   const char* src,
+                                                   void* dst);
+
+    // Set ip and sourcePort according to address. As input parameter ipSize
+    // is the length of ip. As output parameter it's the number of characters
+    // written to ip (not counting the '\0' character).
+    // Note: this API is only implemented on Windows and Linux.
+    static WebRtc_Word32 IPAddress(const SocketAddress& address,
+                                   char* ip,
+                                   WebRtc_UWord32& ipSize,
+                                   WebRtc_UWord16& sourcePort);
+
+    // Set ip and sourcePort according to address. As input parameter ipSize
+    // is the length of ip. As output parameter it's the number of characters
+    // written to ip (not counting the '\0' character).
+    // Note: this API is only implemented on Windows and Linux.
+    // Additional note: this API caches the address of the last call to it. If
+    // address is likley to be the same for multiple calls it may be beneficial
+    // to call this API instead of IPAddress().
+    virtual WebRtc_Word32 IPAddressCached(const SocketAddress& address,
+                                          char* ip,
+                                          WebRtc_UWord32& ipSize,
+                                          WebRtc_UWord16& sourcePort) = 0;
+
+    // Return true if ipaddr is a valid IP address.
+    // If ipV6 is false ipaddr is interpreted as an IPv4 address otherwise it
+    // is interptreted as IPv6.
+    static bool IsIpAddressValid(const char* ipaddr, const bool ipV6);
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_INTERFACE_UDP_TRANSPORT_H_
diff --git a/src/modules/udp_transport/source/traffic_control_windows.cc b/src/modules/udp_transport/source/traffic_control_windows.cc
new file mode 100644
index 0000000..09038c0
--- /dev/null
+++ b/src/modules/udp_transport/source/traffic_control_windows.cc
@@ -0,0 +1,253 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "traffic_control_windows.h"
+
+#include <assert.h>
+
+#include "trace.h"
+
+namespace webrtc {
+TrafficControlWindows* TrafficControlWindows::instance = NULL;
+WebRtc_UWord32 TrafficControlWindows::refCounter = 0;
+
+TrafficControlWindows::TrafficControlWindows(const WebRtc_Word32 id) : _id(id)
+{
+}
+
+TrafficControlWindows* TrafficControlWindows::GetInstance(
+    const WebRtc_Word32 id)
+{
+    if(instance != NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - Returning already created object");
+        refCounter++;
+        return instance;
+    }
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "TrafficControlWindows - Creating new object");
+    instance = new TrafficControlWindows(id);
+    if(instance == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                     "TrafficControlWindows - Error allocating memory");
+        return NULL;
+    }
+
+    instance->tcRegister = NULL;
+    instance->tcDeregister = NULL;
+
+    instance->tcEnumerate = NULL;
+    instance->tcOpenInterface = NULL;
+    instance->tcCloseInterface = NULL;
+
+    instance->tcAddFlow = NULL;
+    instance->tcDeleteFlow = NULL;
+
+    instance->tcAddFilter = NULL;
+    instance->tcDeleteFilter = NULL;
+
+    HMODULE trafficLib = LoadLibrary(TEXT("traffic.dll"));
+    if(trafficLib == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - No QOS support, LoadLibrary returned NULL,\
+ last error: %d\n",
+            GetLastError());
+        delete instance;
+        instance = NULL;
+        return NULL;
+    }
+
+    instance->tcRegister = (registerFn)GetProcAddress(trafficLib,
+                                                      "TcRegisterClient");
+    instance->tcDeregister = (deregisterFn)GetProcAddress(trafficLib,
+                                                          "TcDeregisterClient");
+    instance->tcEnumerate = (enumerateFn)GetProcAddress(
+        trafficLib,
+        "TcEnumerateInterfaces");
+    instance->tcOpenInterface = (openInterfaceFn)GetProcAddress(
+        trafficLib,
+        "TcOpenInterfaceW");
+    instance->tcCloseInterface = (closeInterfaceFn)GetProcAddress(
+        trafficLib,
+        "TcCloseInterface");
+    instance->tcAddFlow = (flowAddFn)GetProcAddress(trafficLib,
+                                                    "TcAddFlow");
+    instance->tcDeleteFlow = (flowDeleteFn)GetProcAddress(trafficLib,
+                                                          "TcDeleteFlow");
+
+    instance->tcAddFilter = (filterAddFn)GetProcAddress(trafficLib,
+                                                        "TcAddFilter");
+    instance->tcDeleteFilter = (filterDeleteFn)GetProcAddress(trafficLib,
+                                                              "TcDeleteFilter");
+
+    if(instance->tcRegister       == NULL ||
+       instance->tcDeregister     == NULL ||
+       instance->tcEnumerate      == NULL ||
+       instance->tcOpenInterface  == NULL ||
+       instance->tcCloseInterface == NULL ||
+       instance->tcAddFlow        == NULL ||
+       instance->tcAddFilter      == NULL ||
+       instance->tcDeleteFlow     == NULL ||
+       instance->tcDeleteFilter   == NULL)
+    {
+        delete instance;
+        instance = NULL;
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            id,
+            "TrafficControlWindows - Could not find function pointer for\
+ traffic control functions");
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            id,
+            "Tcregister    : %x, tcDeregister: %x, tcEnumerate: %x,\
+ tcOpenInterface: %x, tcCloseInterface: %x, tcAddFlow: %x, tcAddFilter: %x,\
+ tcDeleteFlow: %x, tcDeleteFilter: %x",
+            instance->tcRegister,
+            instance->tcDeregister,
+            instance->tcEnumerate,
+            instance->tcOpenInterface,
+            instance->tcCloseInterface,
+            instance->tcAddFlow,
+            instance->tcAddFilter,
+            instance->tcDeleteFlow,
+            instance->tcDeleteFilter );
+        return NULL;
+    }
+    refCounter++;
+    return instance;
+}
+
+void TrafficControlWindows::Release(TrafficControlWindows* gtc)
+{
+    if (0 == refCounter)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, -1,
+                     "TrafficControlWindows - Cannot release, refCounter is 0");
+        return;
+    }
+    if (NULL == gtc)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, -1,
+                     "TrafficControlWindows - Not releasing, gtc is NULL");
+        return;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, gtc->_id,
+                 "TrafficControlWindows - Releasing object");
+    refCounter--;
+    if ((0 == refCounter) && instance)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceTransport, gtc->_id,
+                     "TrafficControlWindows - Deleting object");
+        delete instance;
+        instance = NULL;
+    }
+}
+WebRtc_Word32 TrafficControlWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+ULONG TrafficControlWindows::TcRegisterClient(
+    ULONG TciVersion,
+    HANDLE ClRegCtx,
+    PTCI_CLIENT_FUNC_LIST ClientHandlerList,
+    PHANDLE pClientHandle)
+{
+    assert(tcRegister != NULL);
+
+    return tcRegister(TciVersion, ClRegCtx, ClientHandlerList, pClientHandle);
+}
+
+ULONG TrafficControlWindows::TcDeregisterClient(HANDLE clientHandle)
+{
+    assert(tcDeregister != NULL);
+
+    return tcDeregister(clientHandle);
+}
+
+
+ULONG TrafficControlWindows::TcEnumerateInterfaces(
+    HANDLE ClientHandle,
+    PULONG pBufferSize,
+    PTC_IFC_DESCRIPTOR interfaceBuffer)
+{
+    assert(tcEnumerate != NULL);
+
+    return tcEnumerate(ClientHandle, pBufferSize, interfaceBuffer);
+}
+
+
+ULONG TrafficControlWindows::TcOpenInterfaceW(LPWSTR pInterfaceName,
+                                              HANDLE ClientHandle,
+                                              HANDLE ClIfcCtx,
+                                              PHANDLE pIfcHandle)
+{
+    assert(tcOpenInterface != NULL);
+
+    return tcOpenInterface(pInterfaceName, ClientHandle, ClIfcCtx, pIfcHandle);
+
+}
+
+ULONG TrafficControlWindows::TcCloseInterface(HANDLE IfcHandle)
+{
+    assert(tcCloseInterface != NULL);
+
+    return tcCloseInterface(IfcHandle);
+}
+
+ULONG TrafficControlWindows::TcAddFlow(HANDLE IfcHandle, HANDLE ClFlowCtx,
+                                       ULONG  Flags, PTC_GEN_FLOW pGenericFlow,
+                                       PHANDLE pFlowHandle)
+{
+    assert(tcAddFlow != NULL);
+    return tcAddFlow(IfcHandle, ClFlowCtx, Flags, pGenericFlow, pFlowHandle);
+}
+
+ULONG TrafficControlWindows::TcAddFilter(HANDLE FlowHandle,
+                                         PTC_GEN_FILTER pGenericFilter,
+                                         PHANDLE pFilterHandle)
+{
+    assert(tcAddFilter != NULL);
+    return tcAddFilter(FlowHandle, pGenericFilter, pFilterHandle);
+}
+
+ULONG TrafficControlWindows::TcDeleteFlow(HANDLE FlowHandle)
+{
+    assert(tcDeleteFlow != NULL);
+    return tcDeleteFlow(FlowHandle);
+
+}
+
+ULONG TrafficControlWindows::TcDeleteFilter(HANDLE FilterHandle)
+{
+    assert(tcDeleteFilter != NULL);
+    return tcDeleteFilter(FilterHandle);
+}
+
+void MyClNotifyHandler(HANDLE ClRegCtx, HANDLE ClIfcCtx, ULONG Event,
+                       HANDLE SubCode, ULONG BufSize, PVOID Buffer)
+{
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/traffic_control_windows.h b/src/modules/udp_transport/source/traffic_control_windows.h
new file mode 100644
index 0000000..cfa52ce
--- /dev/null
+++ b/src/modules/udp_transport/source/traffic_control_windows.h
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
+
+#ifndef WIN32_LEAN_AND_MEAN
+#define WIN32_LEAN_AND_MEAN
+#endif
+
+// Disable deprication warning from traffic.h
+#pragma warning(disable : 4995)
+
+#include <windows.h>
+#include <qos.h>
+#include <ntddndis.h>
+#include <traffic.h>
+
+#include "trace.h"
+
+namespace webrtc {
+void MyClNotifyHandler(HANDLE ClRegCtx, HANDLE ClIfcCtx, ULONG Event,
+                       HANDLE SubCode, ULONG BufSize, PVOID Buffer);
+
+
+typedef ULONG (WINAPI *registerFn)(ULONG, HANDLE, PTCI_CLIENT_FUNC_LIST,
+                                   PHANDLE);
+typedef ULONG (WINAPI *deregisterFn)(HANDLE);
+typedef ULONG (WINAPI *enumerateFn)(HANDLE, PULONG, PTC_IFC_DESCRIPTOR);
+typedef ULONG (WINAPI *openInterfaceFn)(LPWSTR, HANDLE, HANDLE, PHANDLE);
+typedef ULONG (WINAPI *closeInterfaceFn)(HANDLE);
+typedef ULONG (WINAPI *flowAddFn)(HANDLE, HANDLE, ULONG, PTC_GEN_FLOW, PHANDLE);
+typedef ULONG (WINAPI *filterAddFn)(HANDLE, PTC_GEN_FILTER, PHANDLE);
+typedef ULONG (WINAPI *flowDeleteFn)(HANDLE);
+typedef ULONG (WINAPI *filterDeleteFn)(HANDLE);
+
+class TrafficControlWindows
+{
+ public:
+    // Factory method. Constructor disabled.
+    static TrafficControlWindows* GetInstance(const WebRtc_Word32 id);
+    static void Release(TrafficControlWindows* gtc);
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    ULONG TcRegisterClient(ULONG TciVersion, HANDLE ClRegCtx,
+                           PTCI_CLIENT_FUNC_LIST ClientHandlerList,
+                           PHANDLE pClientHandle);
+
+    ULONG TcDeregisterClient(HANDLE clientHandle);
+
+    ULONG TcEnumerateInterfaces(HANDLE ClientHandle, PULONG pBufferSize,
+                                PTC_IFC_DESCRIPTOR interfaceBuffer);
+
+    ULONG TcOpenInterfaceW(LPWSTR pInterfaceName, HANDLE ClientHandle,
+                           HANDLE ClIfcCtx, PHANDLE pIfcHandle);
+
+    ULONG TcCloseInterface(HANDLE IfcHandle);
+
+    ULONG TcAddFlow(HANDLE IfcHandle, HANDLE ClFlowCtx, ULONG Flags,
+                    PTC_GEN_FLOW pGenericFlow, PHANDLE pFlowHandle);
+
+    ULONG TcAddFilter(HANDLE FlowHandle, PTC_GEN_FILTER pGenericFilter,
+                      PHANDLE pFilterHandle);
+
+    ULONG TcDeleteFlow(HANDLE FlowHandle);
+    ULONG TcDeleteFilter(HANDLE FilterHandle);
+private:
+    TrafficControlWindows(const WebRtc_Word32 id);
+    WebRtc_Word32 _id;
+    TCI_CLIENT_FUNC_LIST QoSFunctions;
+
+    static TrafficControlWindows* instance;
+
+    registerFn tcRegister;
+    deregisterFn tcDeregister;
+
+    enumerateFn tcEnumerate;
+    openInterfaceFn tcOpenInterface;
+    closeInterfaceFn tcCloseInterface;
+
+    flowAddFn tcAddFlow;
+    flowDeleteFn tcDeleteFlow;
+
+    filterAddFn tcAddFilter;
+    filterDeleteFn tcDeleteFilter;
+
+    static WebRtc_UWord32 refCounter;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_TRAFFIC_CONTROL_WINDOWS_H_
diff --git a/src/modules/udp_transport/source/udp_socket2_manager_windows.cc b/src/modules/udp_transport/source/udp_socket2_manager_windows.cc
new file mode 100644
index 0000000..32863d3
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket2_manager_windows.cc
@@ -0,0 +1,657 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket2_manager_windows.h"
+
+#include <assert.h>
+#include <stdio.h>
+
+#include "aligned_malloc.h"
+#include "udp_socket2_windows.h"
+
+namespace webrtc {
+WebRtc_UWord32 UdpSocket2ManagerWindows::_numOfActiveManagers = 0;
+bool UdpSocket2ManagerWindows::_wsaInit = false;
+
+UdpSocket2ManagerWindows::UdpSocket2ManagerWindows()
+    : UdpSocketManager(),
+      _id(-1),
+      _stopped(false),
+      _init(false),
+      _pCrit(CriticalSectionWrapper::CreateCriticalSection()),
+      _ioCompletionHandle(NULL),
+      _numActiveSockets(0),
+      _event(EventWrapper::Create())
+{
+    _managerNumber = _numOfActiveManagers++;
+
+    if(_numOfActiveManagers == 1)
+    {
+        WORD wVersionRequested = MAKEWORD(2, 2);
+        WSADATA wsaData;
+        _wsaInit = WSAStartup(wVersionRequested, &wsaData) == 0;
+        // TODO (hellner): seems safer to use RAII for this. E.g. what happens
+        //                 if a UdpSocket2ManagerWindows() created and destroyed
+        //                 without being initialized.
+    }
+}
+
+UdpSocket2ManagerWindows::~UdpSocket2ManagerWindows()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::~UdpSocket2ManagerWindows()",
+                 _managerNumber);
+
+    if(_init)
+    {
+        _pCrit->Enter();
+        if(_numActiveSockets)
+        {
+            _pCrit->Leave();
+            _event->Wait(INFINITE);
+        }
+        else
+        {
+            _pCrit->Leave();
+        }
+        StopWorkerThreads();
+
+        // All threads are stopped. Safe to delete them.
+        ListItem* pItem = NULL;
+        while((pItem = _workerThreadsList.First()) != NULL)
+        {
+            delete static_cast<UdpSocket2WorkerWindows*>(pItem->GetItem());
+            _workerThreadsList.PopFront();
+        }
+
+        _ioContextPool.Free();
+
+        _numOfActiveManagers--;
+        if(_ioCompletionHandle)
+        {
+            CloseHandle(_ioCompletionHandle);
+        }
+        if (_numOfActiveManagers == 0)
+        {
+            if(_wsaInit)
+            {
+                WSACleanup();
+            }
+        }
+    }
+    if(_pCrit)
+    {
+        delete _pCrit;
+    }
+    if(_event)
+    {
+        delete _event;
+    }
+}
+
+bool UdpSocket2ManagerWindows::Init(WebRtc_Word32 id,
+                                    WebRtc_UWord8& numOfWorkThreads) {
+  CriticalSectionScoped cs(_pCrit);
+  if ((_id != -1) || (_numOfWorkThreads != 0)) {
+      assert(_id != -1);
+      assert(_numOfWorkThreads != 0);
+      return false;
+  }
+  _id = id;
+  _numOfWorkThreads = numOfWorkThreads;
+  return true;
+}
+
+WebRtc_Word32 UdpSocket2ManagerWindows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocket2ManagerWindows::Start()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::Start()",_managerNumber);
+    if(!_init)
+    {
+        StartWorkerThreads();
+    }
+
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    // Start worker threads.
+    _stopped = false;
+    WebRtc_Word32 error = 0;
+    ListItem* pItem = _workerThreadsList.First();
+    UdpSocket2WorkerWindows* pWorker;
+    while(pItem != NULL && !error)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        if(!pWorker->Start())
+            error = 1;
+        pItem = _workerThreadsList.Next(pItem);
+    }
+    if(error)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::Start() error starting worker\
+ threads",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    _pCrit->Leave();
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::StartWorkerThreads()
+{
+    if(!_init)
+    {
+        _pCrit->Enter();
+
+        _ioCompletionHandle = CreateIoCompletionPort(INVALID_HANDLE_VALUE, NULL,
+                                                     0, 0);
+        if(_ioCompletionHandle == NULL)
+        {
+            WebRtc_Word32 error = GetLastError();
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads()"
+                "_ioCompletioHandle == NULL: error:%d",
+                _managerNumber,error);
+            _pCrit->Leave();
+            return false;
+        }
+
+        // Create worker threads.
+        WebRtc_UWord32 i = 0;
+        bool error = false;
+        while(i < _numOfWorkThreads && !error)
+        {
+            UdpSocket2WorkerWindows* pWorker =
+                new UdpSocket2WorkerWindows(_ioCompletionHandle);
+            if(pWorker->Init() != 0)
+            {
+                error = true;
+                delete pWorker;
+                break;
+            }
+            _workerThreadsList.PushFront(pWorker);
+            i++;
+        }
+        if(error)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads() error "
+                "creating work threads",
+                _managerNumber);
+            // Delete worker threads.
+            ListItem* pItem = NULL;
+            while((pItem = _workerThreadsList.First()) != NULL)
+            {
+                delete static_cast<UdpSocket2WorkerWindows*>(pItem->GetItem());
+                _workerThreadsList.PopFront();
+            }
+            _pCrit->Leave();
+            return false;
+        }
+        if(_ioContextPool.Init())
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2ManagerWindows(%d)::StartWorkerThreads() error "
+                "initiating _ioContextPool",
+                _managerNumber);
+            _pCrit->Leave();
+            return false;
+        }
+        _init = true;
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows::StartWorkerThreads %d number of work "
+            "threads created and initialized",
+            _numOfWorkThreads);
+        _pCrit->Leave();
+    }
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::Stop()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::Stop()",_managerNumber);
+
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    _stopped = true;
+    if(_numActiveSockets)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::Stop() there is still active\
+ sockets",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    // No active sockets. Stop all worker threads.
+    bool result = StopWorkerThreads();
+    _pCrit->Leave();
+    return result;
+}
+
+bool UdpSocket2ManagerWindows::StopWorkerThreads()
+{
+    WebRtc_Word32 error = 0;
+    WEBRTC_TRACE(
+        kTraceDebug,
+        kTraceTransport,
+        _id,
+        "UdpSocket2ManagerWindows(%d)::StopWorkerThreads() Worker\
+ threadsStoped, numActicve Sockets=%d",
+        _managerNumber,
+        _numActiveSockets);
+    UdpSocket2WorkerWindows* pWorker;
+    ListItem* pItem = _workerThreadsList.First();
+
+    // Set worker threads to not alive so that they will stop calling
+    // UdpSocket2WorkerWindows::Run().
+    while(pItem != NULL)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        pWorker->SetNotAlive();
+        pItem = _workerThreadsList.Next(pItem);
+    }
+    // Release all threads waiting for GetQueuedCompletionStatus(..).
+    if(_ioCompletionHandle)
+    {
+        WebRtc_UWord32 i = 0;
+        for(i = 0; i < _workerThreadsList.GetSize(); i++)
+        {
+            PostQueuedCompletionStatus(_ioCompletionHandle, 0 ,0 , NULL);
+        }
+    }
+    pItem = _workerThreadsList.First();
+
+    while(pItem != NULL)
+    {
+        pWorker = (UdpSocket2WorkerWindows*)pItem->GetItem();
+        if(pWorker->Stop() == false)
+        {
+            error = -1;
+            WEBRTC_TRACE(kTraceWarning,  kTraceTransport, -1,
+                         "failed to stop worker thread");
+        }
+        pItem = _workerThreadsList.Next(pItem);
+    }
+
+    if(error)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::StopWorkerThreads() error stopping\
+ worker threads",
+            _managerNumber);
+        return false;
+    }
+    return true;
+}
+
+bool UdpSocket2ManagerWindows::AddSocketPrv(UdpSocket2Windows* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2ManagerWindows(%d)::AddSocketPrv()",_managerNumber);
+    if(!_init)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() manager not\
+ initialized",
+            _managerNumber);
+        return false;
+    }
+    _pCrit->Enter();
+    if(s == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() socket == NULL",
+            _managerNumber);
+        _pCrit->Leave();
+        return false;
+    }
+    if(s->GetFd() == NULL || s->GetFd() == INVALID_SOCKET)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() socket->GetFd() ==\
+ %d",
+            _managerNumber,
+            (WebRtc_Word32)s->GetFd());
+        _pCrit->Leave();
+        return false;
+
+    }
+    _ioCompletionHandle = CreateIoCompletionPort((HANDLE)s->GetFd(),
+                                                 _ioCompletionHandle,
+                                                 (ULONG_PTR)(s), 0);
+    if(_ioCompletionHandle == NULL)
+    {
+        WebRtc_Word32 error = GetLastError();
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::AddSocketPrv() Error adding to IO\
+ completion: %d",
+            _managerNumber,
+            error);
+        _pCrit->Leave();
+        return false;
+    }
+    _numActiveSockets++;
+    _pCrit->Leave();
+    return true;
+}
+bool UdpSocket2ManagerWindows::RemoveSocketPrv(UdpSocket2Windows* s)
+{
+    if(!_init)
+    {
+        return false;
+    }
+    _pCrit->Enter();
+    _numActiveSockets--;
+    if(_numActiveSockets == 0)
+    {
+        _event->Set();
+    }
+    _pCrit->Leave();
+    return true;
+}
+
+PerIoContext* UdpSocket2ManagerWindows::PopIoContext()
+{
+    if(!_init)
+    {
+        return NULL;
+    }
+
+    PerIoContext* pIoC = NULL;
+    if(!_stopped)
+    {
+        pIoC = _ioContextPool.PopIoContext();
+    }else
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2ManagerWindows(%d)::PopIoContext() Manager Not started",
+            _managerNumber);
+    }
+    return pIoC;
+}
+
+WebRtc_Word32 UdpSocket2ManagerWindows::PushIoContext(PerIoContext* pIoContext)
+{
+    return _ioContextPool.PushIoContext(pIoContext);
+}
+
+IoContextPool::IoContextPool()
+    : _pListHead(NULL),
+      _init(false),
+      _size(0),
+      _inUse(0)
+{
+}
+
+IoContextPool::~IoContextPool()
+{
+    Free();
+    assert(_size.Value() == 0);
+    AlignedFree(_pListHead);
+}
+
+WebRtc_Word32 IoContextPool::Init(WebRtc_UWord32 /*increaseSize*/)
+{
+    if(_init)
+    {
+        return 0;
+    }
+
+    _pListHead = (PSLIST_HEADER)AlignedMalloc(sizeof(SLIST_HEADER),
+                                              MEMORY_ALLOCATION_ALIGNMENT);
+    if(_pListHead == NULL)
+    {
+        return -1;
+    }
+    InitializeSListHead(_pListHead);
+    _init = true;
+    return 0;
+}
+
+PerIoContext* IoContextPool::PopIoContext()
+{
+    if(!_init)
+    {
+        return NULL;
+    }
+
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    if(pListEntry == NULL)
+    {
+        IoContextPoolItem* item = (IoContextPoolItem*)
+            AlignedMalloc(
+                sizeof(IoContextPoolItem),
+                MEMORY_ALLOCATION_ALIGNMENT);
+        if(item == NULL)
+        {
+            return NULL;
+        }
+        memset(&item->payload.ioContext,0,sizeof(PerIoContext));
+        item->payload.base = item;
+        pListEntry = &(item->itemEntry);
+        ++_size;
+    }
+    ++_inUse;
+    return &((IoContextPoolItem*)pListEntry)->payload.ioContext;
+}
+
+WebRtc_Word32 IoContextPool::PushIoContext(PerIoContext* pIoContext)
+{
+    // TODO (hellner): Overlapped IO should be completed at this point. Perhaps
+    //                 add an assert?
+    const bool overlappedIOCompleted = HasOverlappedIoCompleted(
+        (LPOVERLAPPED)pIoContext);
+
+    IoContextPoolItem* item = ((IoContextPoolItemPayload*)pIoContext)->base;
+
+    const WebRtc_Word32 usedItems = --_inUse;
+    const WebRtc_Word32 totalItems = _size.Value();
+    const WebRtc_Word32 freeItems = totalItems - usedItems;
+    if(freeItems < 0)
+    {
+        assert(false);
+        AlignedFree(item);
+        return -1;
+    }
+    if((freeItems >= totalItems>>1) &&
+        overlappedIOCompleted)
+    {
+        AlignedFree(item);
+        --_size;
+        return 0;
+    }
+    InterlockedPushEntrySList(_pListHead, &(item->itemEntry));
+    return 0;
+}
+
+WebRtc_Word32 IoContextPool::Free()
+{
+    if(!_init)
+    {
+        return 0;
+    }
+
+    WebRtc_Word32 itemsFreed = 0;
+    PSLIST_ENTRY pListEntry = InterlockedPopEntrySList(_pListHead);
+    while(pListEntry != NULL)
+    {
+        IoContextPoolItem* item = ((IoContextPoolItem*)pListEntry);
+        AlignedFree(item);
+        --_size;
+        itemsFreed++;
+        pListEntry = InterlockedPopEntrySList(_pListHead);
+    }
+    return itemsFreed;
+}
+
+WebRtc_Word32 UdpSocket2WorkerWindows::_numOfWorkers = 0;
+
+UdpSocket2WorkerWindows::UdpSocket2WorkerWindows(HANDLE ioCompletionHandle)
+    : _ioCompletionHandle(ioCompletionHandle),
+      _pThread(NULL),
+      _init(false)
+{
+    _workerNumber = _numOfWorkers++;
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocket2WorkerWindows created");
+}
+
+UdpSocket2WorkerWindows::~UdpSocket2WorkerWindows()
+{
+    if(_pThread)
+    {
+        delete _pThread;
+    }
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocket2WorkerWindows deleted");
+}
+
+bool UdpSocket2WorkerWindows::Start()
+{
+    unsigned int id = 0;
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Start UdpSocket2WorkerWindows");
+    return _pThread->Start(id);
+}
+
+bool UdpSocket2WorkerWindows::Stop()
+{
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Stop UdpSocket2WorkerWindows");
+    return _pThread->Stop();
+}
+
+void UdpSocket2WorkerWindows::SetNotAlive()
+{
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "SetNotAlive UdpSocket2WorkerWindows");
+    _pThread->SetNotAlive();
+}
+
+WebRtc_Word32 UdpSocket2WorkerWindows::Init()
+{
+    if(!_init)
+    {
+        const char* threadName = "UdpSocket2ManagerWindows_thread";
+        _pThread = ThreadWrapper::CreateThread(Run, this, kRealtimePriority,
+                                               threadName);
+        if(_pThread == NULL)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                -1,
+                "UdpSocket2WorkerWindows(%d)::Init(), error creating thread!",
+                _workerNumber);
+            return -1;
+        }
+        _init = true;
+    }
+    return 0;
+}
+
+bool UdpSocket2WorkerWindows::Run(ThreadObj obj)
+{
+    UdpSocket2WorkerWindows* pWorker =
+        static_cast<UdpSocket2WorkerWindows*>(obj);
+    return pWorker->Process();
+}
+
+// Process should always return true. Stopping the worker threads is done in
+// the UdpSocket2ManagerWindows::StopWorkerThreads() function.
+bool UdpSocket2WorkerWindows::Process()
+{
+    WebRtc_Word32 success = 0;
+    DWORD ioSize = 0;
+    UdpSocket2Windows* pSocket = NULL;
+    PerIoContext* pIOContext = 0;
+    OVERLAPPED* pOverlapped = 0;
+    success = GetQueuedCompletionStatus(_ioCompletionHandle,
+                                        &ioSize,
+                                       (ULONG_PTR*)&pSocket, &pOverlapped, 200);
+
+    WebRtc_UWord32 error = 0;
+    if(!success)
+    {
+        error = GetLastError();
+        if(error == WAIT_TIMEOUT)
+        {
+            return true;
+        }
+        // This may happen if e.g. PostQueuedCompletionStatus() has been called.
+        // The IO context still needs to be reclaimed or re-used which is done
+        // in UdpSocket2Windows::IOCompleted(..).
+    }
+    if(pSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceDebug,
+            kTraceTransport,
+            -1,
+            "UdpSocket2WorkerWindows(%d)::Process(), pSocket == 0, end thread",
+            _workerNumber);
+        return true;
+    }
+    pIOContext = (PerIoContext*)pOverlapped;
+    pSocket->IOCompleted(pIOContext,ioSize,error);
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket2_manager_windows.h b/src/modules/udp_transport/source/udp_socket2_manager_windows.h
new file mode 100644
index 0000000..782cb41
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket2_manager_windows.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+#include <winsock2.h>
+
+#include "atomic32.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "thread_wrapper.h"
+#include "udp_socket2_windows.h"
+#include "udp_socket_manager_wrapper.h"
+
+#define MAX_IO_BUFF_SIZE 1600
+
+namespace webrtc {
+enum IO_OPERATION {
+    OP_READ,
+    OP_WRITE
+};
+
+class UdpSocket2Windows;
+
+// Struct used for all socket I/O operations.
+struct PerIoContext {
+    WSAOVERLAPPED overlapped;
+    char buffer[MAX_IO_BUFF_SIZE];
+    WSABUF wsabuf;
+    int nTotalBytes;
+    int nSentBytes;
+    int bytes;
+    IO_OPERATION ioOperation;
+    SocketAddress from;
+    int fromLen;
+    // Should be set to true if the I/O context was passed to the system by
+    // a thread not controlled by the socket implementation.
+    bool ioInitiatedByThreadWrapper;
+    // TODO (hellner): Not used. Delete it.
+    PerIoContext* pNextFree;
+};
+
+struct IoContextPoolItem;
+struct IoContextPoolItemPayload
+{
+    PerIoContext    ioContext;
+    IoContextPoolItem* base;
+};
+
+struct IoContextPoolItem
+{
+    // Atomic single linked list entry header.
+    SLIST_ENTRY itemEntry;
+    // Atomic single linked list payload
+    IoContextPoolItemPayload payload;
+};
+
+class IoContextPool
+{
+public:
+    IoContextPool();
+    virtual ~IoContextPool();
+    virtual WebRtc_Word32 Init(WebRtc_UWord32 increaseSize = 128);
+    // Re-use an old unused IO context or create a new one.
+    virtual PerIoContext* PopIoContext();
+    virtual WebRtc_Word32 PushIoContext(PerIoContext* pIoContext);
+    virtual inline WebRtc_Word32 GetSize(WebRtc_UWord32* inUse = 0)
+    {return _size.Value();}
+    virtual WebRtc_Word32 Free();
+private:
+    // Sample code for use of msfts single linked atomic list can be found here:
+    // http://msdn.microsoft.com/en-us/library/ms686962(VS.85).aspx
+
+    // Atomic single linked list head.
+    PSLIST_HEADER _pListHead;
+
+    bool _init;
+    Atomic32 _size;
+    Atomic32 _inUse;
+};
+
+
+class UdpSocket2ManagerWindows : public UdpSocketManager
+{
+public:
+    UdpSocket2ManagerWindows();
+    virtual ~UdpSocket2ManagerWindows();
+
+    virtual bool Init(WebRtc_Word32 id, WebRtc_UWord8& numOfWorkThreads);
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual inline bool AddSocket(UdpSocketWrapper* s)
+    {if(s) return AddSocketPrv(reinterpret_cast<UdpSocket2Windows*>(s));
+     return false;}
+    virtual bool RemoveSocket(UdpSocketWrapper* s)
+    {if(s) return RemoveSocketPrv(reinterpret_cast<UdpSocket2Windows*>(s));
+     return false;}
+
+    PerIoContext* PopIoContext(void);
+    WebRtc_Word32 PushIoContext(PerIoContext* pIoContext);
+
+private:
+    bool StopWorkerThreads();
+    bool StartWorkerThreads();
+    bool AddSocketPrv(UdpSocket2Windows* s);
+    bool RemoveSocketPrv(UdpSocket2Windows* s);
+
+    static WebRtc_UWord32 _numOfActiveManagers;
+    static bool _wsaInit;
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _pCrit;
+    WebRtc_Word32 _managerNumber;
+    volatile bool _stopped;
+    bool _init;
+    WebRtc_Word32 _numActiveSockets;
+    ListWrapper _workerThreadsList;
+    EventWrapper* _event;
+
+    HANDLE _ioCompletionHandle;
+    IoContextPool _ioContextPool;
+};
+
+class UdpSocket2WorkerWindows
+{
+public:
+    UdpSocket2WorkerWindows(HANDLE ioCompletionHandle);
+    virtual ~UdpSocket2WorkerWindows();
+
+    virtual bool Start();
+    virtual bool Stop();
+    virtual WebRtc_Word32 Init();
+    virtual void SetNotAlive();
+protected:
+    static bool Run(ThreadObj obj);
+    bool Process();
+private:
+    HANDLE _ioCompletionHandle;
+    ThreadWrapper*_pThread;
+    static WebRtc_Word32 _numOfWorkers;
+    WebRtc_Word32 _workerNumber;
+    volatile bool _stop;
+    bool _init;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_MANAGER_WINDOWS_H_
diff --git a/src/modules/udp_transport/source/udp_socket2_windows.cc b/src/modules/udp_transport/source/udp_socket2_windows.cc
new file mode 100644
index 0000000..2362cdd
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket2_windows.cc
@@ -0,0 +1,1386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket2_windows.h"
+
+#include <assert.h>
+#include <stdlib.h>
+#include <winsock2.h>
+
+#include "traffic_control_windows.h"
+#include "udp_socket2_manager_windows.h"
+
+#pragma warning(disable : 4311)
+
+namespace webrtc {
+typedef struct _QOS_DESTADDR
+{
+    QOS_OBJECT_HDR ObjectHdr;
+    const struct sockaddr* SocketAddress;
+    ULONG SocketAddressLength;
+} QOS_DESTADDR, *LPQOS_DESTADDR;
+
+typedef const QOS_DESTADDR* LPCQOS_DESTADDR;
+
+// TODO (patrikw): seems to be defined in ws2ipdef.h as 3. How come it's
+//                 redefined here (as a different value)?
+#define IP_TOS 8
+
+#define QOS_GENERAL_ID_BASE 2000
+#define QOS_OBJECT_DESTADDR (0x00000004 + QOS_GENERAL_ID_BASE)
+
+UdpSocket2Windows::UdpSocket2Windows(const WebRtc_Word32 id,
+                                     UdpSocketManager* mgr, bool ipV6Enable,
+                                     bool disableGQOS)
+    : _id(id),
+      _qos(true),
+      _iProtocol(0),
+      _outstandingCalls(0),
+      _outstandingCallComplete(0),
+      _terminate(false),
+      _addedToMgr(false),
+      _safeTodelete(false),
+      _outstandingCallsDisabled(false),
+      _clientHandle(NULL),
+      _flowHandle(NULL),
+      _filterHandle(NULL),
+      _flow(NULL),
+      _gtc(NULL),
+      _pcp(-2),
+      _receiveBuffers(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocket2Windows::UdpSocket2Windows()");
+
+    _wantsIncoming = false;
+    _mgr = static_cast<UdpSocket2ManagerWindows *>(mgr);
+
+    _obj = NULL;
+    _incomingCb = NULL;
+    _socket = INVALID_SOCKET;
+    _pCrit = CriticalSectionWrapper::CreateCriticalSection();
+    _ptrCbRWLock     = RWLockWrapper::CreateRWLock();
+    _ptrDestRWLock   = RWLockWrapper::CreateRWLock();
+    _ptrSocketRWLock = RWLockWrapper::CreateRWLock();
+    _ptrDeleteCrit   = CriticalSectionWrapper::CreateCriticalSection();
+    _ptrDeleteCond   = ConditionVariableWrapper::CreateConditionVariable();
+
+    // Check if QoS is supported.
+    BOOL bProtocolFound = FALSE;
+    WSAPROTOCOL_INFO *lpProtocolBuf = NULL;
+    WSAPROTOCOL_INFO    pProtocolInfo;
+
+    if(!disableGQOS)
+    {
+        DWORD dwBufLen = 0;
+        // Set dwBufLen to the size needed to retreive all the requested
+        // information from WSAEnumProtocols.
+        WebRtc_Word32 nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+        lpProtocolBuf = (WSAPROTOCOL_INFO*)malloc(dwBufLen);
+        nRet = WSAEnumProtocols(NULL, lpProtocolBuf, &dwBufLen);
+
+        if (ipV6Enable)
+        {
+            _iProtocol=AF_INET6;
+        } else {
+            _iProtocol=AF_INET;
+        }
+
+        for (WebRtc_Word32 i=0; i<nRet; i++)
+        {
+            if (_iProtocol == lpProtocolBuf[i].iAddressFamily &&
+                IPPROTO_UDP == lpProtocolBuf[i].iProtocol)
+            {
+                if ((XP1_QOS_SUPPORTED ==
+                     (XP1_QOS_SUPPORTED & lpProtocolBuf[i].dwServiceFlags1)))
+                {
+                    pProtocolInfo = lpProtocolBuf[i];
+                    bProtocolFound = TRUE;
+                    break;
+                }
+            }
+         }
+    }
+
+    if(!bProtocolFound)
+    {
+        free(lpProtocolBuf);
+        _qos=false;
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR_NO_QOS,\
+ !bProtocolFound");
+    } else {
+
+        _socket = WSASocket(FROM_PROTOCOL_INFO, FROM_PROTOCOL_INFO,
+                            FROM_PROTOCOL_INFO,&pProtocolInfo, 0,
+                            WSA_FLAG_OVERLAPPED);
+        free(lpProtocolBuf);
+
+        if (_socket != INVALID_SOCKET)
+        {
+            return;
+        } else {
+            _qos = false;
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR_NO_QOS");
+        }
+    }
+    // QoS not supported.
+    if(ipV6Enable)
+    {
+        _socket = WSASocket(AF_INET6, SOCK_DGRAM, IPPROTO_UDP, 0 , 0,
+                            WSA_FLAG_OVERLAPPED);
+    }else
+    {
+        _socket = WSASocket(AF_INET, SOCK_DGRAM, IPPROTO_UDP, 0 , 0,
+                            WSA_FLAG_OVERLAPPED);
+    }
+    if (_socket == INVALID_SOCKET)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), INVALID_SOCKET,\
+ WSAerror: %d",
+            WSAGetLastError());
+    }
+
+    // Disable send buffering on the socket to improve CPU usage.
+    // This is done by setting SO_SNDBUF to 0.
+    WebRtc_Word32 nZero = 0;
+    WebRtc_Word32 nRet = setsockopt(_socket, SOL_SOCKET, SO_SNDBUF,
+                                    (char*)&nZero, sizeof(nZero));
+    if( nRet == SOCKET_ERROR )
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows::UdpSocket2Windows(), SOCKET_ERROR,\
+ WSAerror: %d",
+            WSAGetLastError());
+    }
+}
+
+UdpSocket2Windows::~UdpSocket2Windows()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id,
+                 "UdpSocket2Windows::~UdpSocket2Windows()");
+
+    WaitForOutstandingCalls();
+
+    delete _ptrCbRWLock;
+    delete _ptrDeleteCrit;
+    delete _ptrDeleteCond;
+    delete _ptrDestRWLock;
+    delete _ptrSocketRWLock;
+
+    if(_pCrit)
+        delete _pCrit;
+
+    if (_flow)
+    {
+        free(_flow);
+        _flow = NULL;
+    }
+
+    if (_gtc)
+    {
+        if(_filterHandle)
+        {
+            _gtc->TcDeleteFilter(_filterHandle);
+        }
+        if(_flowHandle)
+        {
+            _gtc->TcDeleteFlow(_flowHandle);
+        }
+        TrafficControlWindows::Release( _gtc);
+    }
+}
+
+WebRtc_Word32 UdpSocket2Windows::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    if (_gtc)
+    {
+        _gtc->ChangeUniqueId(id);
+    }
+    return 0;
+}
+
+bool UdpSocket2Windows::ValidHandle()
+{
+    return GetFd() != INVALID_SOCKET;
+}
+
+bool UdpSocket2Windows::SetCallback(CallbackObj obj, IncomingSocketCallback cb)
+{
+    _ptrCbRWLock->AcquireLockExclusive();
+    _obj = obj;
+    _incomingCb = cb;
+    _ptrCbRWLock->ReleaseLockExclusive();
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::SetCallback ",(WebRtc_Word32)this);
+    if(_addedToMgr)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SetCallback alreadey added",
+                     (WebRtc_Word32) this);
+        return false;
+
+    }
+    if (_mgr->AddSocket(this))
+    {
+        WEBRTC_TRACE(
+            kTraceDebug, kTraceTransport, _id,
+            "UdpSocket2Windows(%d)::SetCallback socket added to manager",
+            (WebRtc_Word32)this);
+        _addedToMgr = true;
+        return true;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::SetCallback error adding me to mgr",
+                 (WebRtc_Word32) this);
+    return false;
+}
+
+bool UdpSocket2Windows::SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                                   const WebRtc_Word8* optval,
+                                   WebRtc_Word32 optlen)
+{
+    bool returnValue = true;
+    if(!AquireSocket())
+    {
+        return false;
+    }
+    if(0 != setsockopt(_socket, level, optname,
+                       reinterpret_cast<const char*>(optval), optlen ))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetSockopt(), WSAerror:%d",
+                     WSAGetLastError());
+        returnValue = false;
+    }
+    ReleaseSocket();
+    return returnValue;
+}
+
+bool UdpSocket2Windows::StartReceiving(WebRtc_UWord32 receiveBuffers)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows(%d)::StartReceiving(%d)",
+                 (WebRtc_Word32)this, receiveBuffers);
+
+    _wantsIncoming = true;
+
+    WebRtc_Word32 numberOfReceiveBuffersToCreate =
+        receiveBuffers - _receiveBuffers.Value();
+    numberOfReceiveBuffersToCreate = (numberOfReceiveBuffersToCreate < 0) ?
+        0 : numberOfReceiveBuffersToCreate;
+
+    WebRtc_Word32 error = 0;
+    for(WebRtc_Word32 i = 0;
+        i < numberOfReceiveBuffersToCreate;
+        i++)
+    {
+        if(PostRecv())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "UdpSocket2Windows::StartReceiving() i=%d", i);
+            error = -1;
+            break;
+        }
+        ++_receiveBuffers;
+    }
+    if(error == -1)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "Socket receiving using:%d number of buffers",
+                 _receiveBuffers.Value());
+    return true;
+}
+
+bool UdpSocket2Windows::StopReceiving()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocket2Windows::StopReceiving()");
+    _wantsIncoming = false;
+    return true;
+}
+
+bool UdpSocket2Windows::Bind(const SocketAddress& name)
+{
+    const struct sockaddr* addr =
+        reinterpret_cast<const struct sockaddr*>(&name);
+    bool returnValue = true;
+    if(!AquireSocket())
+    {
+        return false;
+    }
+    if (0 != bind(_socket, addr, sizeof(SocketAddress)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::Bind() WSAerror: %d",
+                     WSAGetLastError());
+        returnValue = false;
+    }
+    ReleaseSocket();
+    return returnValue;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SendTo(const WebRtc_Word8* buf,
+                                        WebRtc_Word32 len,
+                                        const SocketAddress& to)
+{
+    WebRtc_Word32 retVal = 0;
+    WebRtc_Word32 error = 0;
+    if(len < 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SendTo(), len= %d < 0",
+                     (WebRtc_Word32)this, len);
+        return -1;
+    }
+
+    PerIoContext* pIoContext = _mgr->PopIoContext();
+    if(pIoContext == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::SendTo(), pIoContext==0",
+                     (WebRtc_Word32) this);
+        return -1;
+    }
+    // sizeof(pIoContext->buffer) is smaller than the highest number that
+    // can be represented by a WebRtc_Word32.
+    if(len >= (WebRtc_Word32) sizeof(pIoContext->buffer))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::SendTo(), len= %d > buffer_size = %d",
+            (WebRtc_Word32) this,
+            len,sizeof(pIoContext->buffer));
+        len = sizeof(pIoContext->buffer);
+    }
+
+    memcpy(pIoContext->buffer,buf,len);
+    pIoContext->wsabuf.buf = pIoContext->buffer;
+    pIoContext->wsabuf.len = len;
+    pIoContext->fromLen=sizeof(SocketAddress);
+    pIoContext->ioOperation = OP_WRITE;
+    pIoContext->nTotalBytes = len;
+    pIoContext->nSentBytes=0;
+
+    DWORD numOfbytesSent = 0;
+    const struct sockaddr* addr = reinterpret_cast<const struct sockaddr*>(&to);
+
+    if(!AquireSocket())
+    {
+        _mgr->PushIoContext(pIoContext);
+        return -1;
+    }
+    // Assume that the WSASendTo call will be successfull to make sure that
+    // _outstandingCalls is positive. Roll back if WSASendTo failed.
+    if(!NewOutstandingCall())
+    {
+        _mgr->PushIoContext(pIoContext);
+        ReleaseSocket();
+        return -1;
+    }
+    retVal = WSASendTo(_socket, &pIoContext->wsabuf, 1, &numOfbytesSent,
+                       0, addr, sizeof(SocketAddress),
+                       &(pIoContext->overlapped), 0);
+    ReleaseSocket();
+
+    if( retVal == SOCKET_ERROR  )
+    {
+        error =  WSAGetLastError();
+        if(error != ERROR_IO_PENDING)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "UdpSocket2Windows::SendTo() WSAerror: %d",error);
+        }
+    }
+    if(retVal == 0 || (retVal == SOCKET_ERROR && error == ERROR_IO_PENDING))
+    {
+        return len;
+    }
+    if((error = _mgr->PushIoContext(pIoContext)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::SendTo(), error:%d pushing ioContext",
+            (WebRtc_Word32)this, error);
+    }
+
+    // Roll back.
+    OutstandingCallCompleted();
+    return -1;
+}
+
+void UdpSocket2Windows::IOCompleted(PerIoContext* pIOContext,
+                                    WebRtc_UWord32 ioSize, WebRtc_UWord32 error)
+{
+    if(pIOContext == NULL || error == ERROR_OPERATION_ABORTED)
+    {
+        if ((pIOContext != NULL) &&
+            !pIOContext->ioInitiatedByThreadWrapper &&
+            (error == ERROR_OPERATION_ABORTED) &&
+            (pIOContext->ioOperation == OP_READ) &&
+            _outstandingCallsDisabled)
+        {
+            // !pIOContext->initiatedIOByThreadWrapper indicate that the I/O
+            // was not initiated by a ThreadWrapper thread.
+            // This may happen if the thread that initiated receiving (e.g.
+            // by calling StartListen())) is deleted before any packets have
+            // been received.
+            // In this case there is no packet in the PerIoContext. Re-use it
+            // to post a new PostRecv(..).
+            // Note 1: the PerIoContext will henceforth be posted by a thread
+            //         that is controlled by the socket implementation.
+            // Note 2: This is more likely to happen to RTCP packets as
+            //         they are less frequent than RTP packets.
+            // Note 3: _outstandingCallsDisabled being false indicates
+            //         that the socket isn't being shut down.
+            // Note 4: This should only happen buffers set to receive packets
+            //         (OP_READ).
+        } else {
+            if(pIOContext == NULL)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows::IOCompleted(%d,%d,%d), %d",
+                    (WebRtc_Word32)pIOContext,
+                    ioSize,
+                    error,
+                    pIOContext ? (WebRtc_Word32)pIOContext->ioOperation : -1);
+            } else {
+                WEBRTC_TRACE(
+                    kTraceDebug,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows::IOCompleted() Operation aborted");
+            }
+            if(pIOContext)
+            {
+                WebRtc_Word32 remainingReceiveBuffers = --_receiveBuffers;
+                if(remainingReceiveBuffers < 0)
+                {
+                    assert(false);
+                }
+                WebRtc_Word32 err = 0;
+                if((err = _mgr->PushIoContext(pIOContext)))
+                {
+                    WEBRTC_TRACE(
+                        kTraceError,
+                        kTraceTransport,
+                        _id,
+                        "UdpSocket2Windows::IOCompleted(), err = %d, when\
+ pushing ioContext after error",
+                        err);
+                }
+            }
+            OutstandingCallCompleted();
+            return;
+        }
+    } // if (pIOContext == NULL || error == ERROR_OPERATION_ABORTED)
+
+    if(pIOContext->ioOperation == OP_WRITE)
+    {
+        _mgr->PushIoContext(pIOContext);
+    }
+    else if(pIOContext->ioOperation == OP_READ)
+    {
+        if(!error && ioSize != 0)
+        {
+            _ptrCbRWLock->AcquireLockShared();
+            if(_wantsIncoming && _incomingCb)
+            {
+                _incomingCb(_obj,
+                            reinterpret_cast<const WebRtc_Word8*>(
+                                pIOContext->wsabuf.buf),
+                            ioSize,
+                            &pIOContext->from);
+            }
+            _ptrCbRWLock->ReleaseLockShared();
+        }
+        WebRtc_Word32 err = PostRecv(pIOContext);
+        if(err == 0)
+        {
+            // The PerIoContext was posted by a thread controlled by the socket
+            // implementation.
+            pIOContext->ioInitiatedByThreadWrapper = true;
+        }
+        OutstandingCallCompleted();
+        return;
+    } else {
+        // Unknown operation. Should not happen. Return pIOContext to avoid
+        // memory leak.
+        assert(false);
+        _mgr->PushIoContext(pIOContext);
+    }
+    OutstandingCallCompleted();
+    // Don't touch any members after OutstandingCallCompleted() since the socket
+    // may be deleted at this point.
+}
+
+WebRtc_Word32 UdpSocket2Windows::PostRecv()
+{
+    PerIoContext* pIoContext=_mgr->PopIoContext();
+    if(pIoContext == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::PostRecv(), pIoContext == 0",
+                     (WebRtc_Word32)this);
+        return -1;
+    }
+    // This function may have been called by thread not controlled by the socket
+    // implementation.
+    pIoContext->ioInitiatedByThreadWrapper = false;
+    return PostRecv(pIoContext);
+}
+
+WebRtc_Word32 UdpSocket2Windows::PostRecv(PerIoContext* pIoContext)
+{
+    if(pIoContext==0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::PostRecv(?), pIoContext==0",
+                     (WebRtc_Word32)this);
+        return -1;
+    }
+
+    DWORD numOfRecivedBytes = 0;
+    DWORD flags = 0;
+    pIoContext->wsabuf.buf = pIoContext->buffer;
+    pIoContext->wsabuf.len = sizeof(pIoContext->buffer);
+    pIoContext->fromLen = sizeof(SocketAddress);
+    pIoContext->ioOperation = OP_READ;
+    WebRtc_Word32 rxError = 0;
+    WebRtc_Word32 nRet = 0;
+    WebRtc_Word32 postingSucessfull = false;
+
+    if(!AquireSocket())
+    {
+        _mgr->PushIoContext(pIoContext);
+        return -1;
+    }
+
+    // Assume that the WSARecvFrom() call will be successfull to make sure that
+    // _outstandingCalls is positive. Roll back if WSARecvFrom() failed.
+    if(!NewOutstandingCall())
+    {
+        _mgr->PushIoContext(pIoContext);
+        ReleaseSocket();
+        return -1;
+    }
+    for(WebRtc_Word32 tries = 0; tries < 10; tries++)
+    {
+        nRet = WSARecvFrom(
+            _socket,
+            &(pIoContext->wsabuf),
+            1,
+            &numOfRecivedBytes,
+            &flags,
+            reinterpret_cast<struct sockaddr*>(&(pIoContext->from)),
+            &(pIoContext->fromLen),
+            &(pIoContext->overlapped),
+            0);
+
+        if( nRet == SOCKET_ERROR)
+        {
+            rxError = WSAGetLastError();
+            if(rxError != ERROR_IO_PENDING)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    _id,
+                    "UdpSocket2Windows(%d)::PostRecv(?), WSAerror:%d when\
+ posting new recieve,trie:%d",
+                    (WebRtc_Word32)this,
+                    rxError,
+                    tries);
+                // Tell the OS that this is a good place to context switch if
+                // it wants to.
+                Sleep(0);
+            }
+        }
+        if((rxError == ERROR_IO_PENDING) || (nRet == 0))
+        {
+            postingSucessfull = true;
+            break;
+        }
+    }
+    ReleaseSocket();
+
+    if(postingSucessfull)
+    {
+        return 0;
+    }
+    WebRtc_Word32 remainingReceiveBuffers = --_receiveBuffers;
+    if(remainingReceiveBuffers < 0)
+    {
+        assert(false);
+    }
+    WebRtc_Word32 error = 0;
+    if((error = _mgr->PushIoContext(pIoContext)))
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocket2Windows(%d)::PostRecv(?), error:%d when PushIoContext",
+            (WebRtc_Word32)this,
+            error);
+    }
+    // Roll back.
+    OutstandingCallCompleted();
+    return -1;
+}
+
+void UdpSocket2Windows::CloseBlocking()
+{
+    LINGER  lingerStruct;
+
+    lingerStruct.l_onoff = 1;
+    lingerStruct.l_linger = 0;
+    if(AquireSocket())
+    {
+        setsockopt(_socket, SOL_SOCKET, SO_LINGER,
+                   reinterpret_cast<const char*>(&lingerStruct),
+                   sizeof(lingerStruct));
+        ReleaseSocket();
+    }
+
+    _wantsIncoming = false;
+    // Reclaims the socket and prevents it from being used again.
+    InvalidateSocket();
+    DisableNewOutstandingCalls();
+    WaitForOutstandingCalls();
+    delete this;
+}
+
+bool UdpSocket2Windows::SetQos(WebRtc_Word32 serviceType,
+                               WebRtc_Word32 tokenRate,
+                               WebRtc_Word32 bucketSize,
+                               WebRtc_Word32 peekBandwith,
+                               WebRtc_Word32 minPolicedSize,
+                               WebRtc_Word32 maxSduSize,
+                               const SocketAddress &stRemName,
+                               WebRtc_Word32 overrideDSCP)
+{
+    if(_qos == false)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetQos(), socket not capable of QOS");
+        return false;
+    }
+    if(overrideDSCP != 0)
+    {
+        FLOWSPEC f;
+        WebRtc_Word32 err = CreateFlowSpec(serviceType, tokenRate, bucketSize,
+                                           peekBandwith, minPolicedSize,
+                                           maxSduSize, &f);
+        if(err == -1)
+        {
+            return false;
+        }
+
+        SocketAddress socketName;
+        struct sockaddr_in* name =
+            reinterpret_cast<struct sockaddr_in*>(&socketName);
+        int nameLength = sizeof(SocketAddress);
+        if(AquireSocket())
+        {
+            getsockname(_socket, (struct sockaddr*)name, &nameLength);
+            ReleaseSocket();
+        }
+
+        if(serviceType == 0)
+        {
+            // Disable TOS byte setting.
+            return SetTrafficControl(0, -1, name, &f, &f) == 0;
+        }
+        return SetTrafficControl(overrideDSCP, -1, name, &f, &f) == 0;
+    }
+
+    QOS Qos;
+    DWORD BytesRet;
+    QOS_DESTADDR QosDestaddr;
+
+    memset (&Qos, QOS_NOT_SPECIFIED, sizeof(QOS));
+
+    Qos.SendingFlowspec.ServiceType        = serviceType;
+    Qos.SendingFlowspec.TokenRate          = tokenRate;
+    Qos.SendingFlowspec.TokenBucketSize    = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.PeakBandwidth      = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    Qos.SendingFlowspec.MaxSduSize         = QOS_NOT_SPECIFIED;
+
+    // Only ServiceType is needed for receiving.
+    Qos.ReceivingFlowspec.ServiceType        = serviceType;
+    Qos.ReceivingFlowspec.TokenRate          = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.TokenBucketSize    = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.PeakBandwidth      = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.Latency            = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.DelayVariation     = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    Qos.ReceivingFlowspec.MaxSduSize         = QOS_NOT_SPECIFIED;
+
+    Qos.ProviderSpecific.len = 0;
+
+    Qos.ProviderSpecific.buf = NULL;
+
+    ZeroMemory((WebRtc_Word8 *)&QosDestaddr, sizeof(QosDestaddr));
+
+    OSVERSIONINFOEX osvie;
+    osvie.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    GetVersionEx((LPOSVERSIONINFO)&osvie);
+
+//    Operating system        Version number    dwMajorVersion    dwMinorVersion
+//    Windows 7                6.1                6                1
+//    Windows Server 2008 R2   6.1                6                1
+//    Windows Server 2008      6.0                6                0
+//    Windows Vista            6.0                6                0
+//    Windows Server 2003 R2   5.2                5                2
+//    Windows Server 2003      5.2                5                2
+//    Windows XP               5.1                5                1
+//    Windows 2000             5.0                5                0
+
+    // SERVICE_NO_QOS_SIGNALING and QOS_DESTADDR should not be used if version
+    // is 6.0 or greater.
+    if(osvie.dwMajorVersion >= 6)
+    {
+        Qos.SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        Qos.ReceivingFlowspec.ServiceType = serviceType;
+
+    } else {
+        Qos.SendingFlowspec.MinimumPolicedSize =
+            QOS_NOT_SPECIFIED | SERVICE_NO_QOS_SIGNALING;
+        Qos.ReceivingFlowspec.ServiceType =
+            serviceType | SERVICE_NO_QOS_SIGNALING;
+
+        QosDestaddr.ObjectHdr.ObjectType   = QOS_OBJECT_DESTADDR;
+        QosDestaddr.ObjectHdr.ObjectLength = sizeof(QosDestaddr);
+        QosDestaddr.SocketAddress = (SOCKADDR *)&stRemName;
+        if (AF_INET6 == _iProtocol)
+        {
+            QosDestaddr.SocketAddressLength = sizeof(SocketAddressInVersion6);
+        } else {
+            QosDestaddr.SocketAddressLength = sizeof(SocketAddressIn);
+        }
+
+        Qos.ProviderSpecific.len = QosDestaddr.ObjectHdr.ObjectLength;
+        Qos.ProviderSpecific.buf = (char*)&QosDestaddr;
+    }
+
+    if(!AquireSocket()) {
+        return false;
+    }
+    // To set QoS with SIO_SET_QOS the socket must be locally bound first
+    // or the call will fail with error code 10022.
+    WebRtc_Word32 result = WSAIoctl(GetFd(), SIO_SET_QOS, &Qos, sizeof(QOS),
+                                    NULL, 0, &BytesRet, NULL,NULL);
+    ReleaseSocket();
+    if (result == SOCKET_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows::SetQos() WSAerror : %d",
+                     WSAGetLastError());
+        return false;
+    }
+    return true;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetTOS(WebRtc_Word32 serviceType)
+{
+    SocketAddress socketName;
+
+    struct sockaddr_in* name =
+        reinterpret_cast<struct sockaddr_in*>(&socketName);
+    int nameLength = sizeof(SocketAddress);
+    if(AquireSocket())
+    {
+        getsockname(_socket, (struct sockaddr*)name, &nameLength);
+        ReleaseSocket();
+    }
+
+    WebRtc_Word32 res = SetTrafficControl(serviceType, -1, name);
+    if (res == -1)
+    {
+        OSVERSIONINFO OsVersion;
+        OsVersion.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+        GetVersionEx (&OsVersion);
+
+        if ((OsVersion.dwMajorVersion == 4)) // NT 4.0
+        {
+            if(SetSockopt(IPPROTO_IP,IP_TOS ,
+                          (WebRtc_Word8*)&serviceType, 4) != 0)
+            {
+                return -1;
+            }
+        }
+    }
+    return res;
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetPCP(WebRtc_Word32 pcp)
+{
+    SocketAddress socketName;
+    struct sockaddr_in* name =
+        reinterpret_cast<struct sockaddr_in*>(&socketName);
+    int nameLength = sizeof(SocketAddress);
+    if(AquireSocket())
+    {
+        getsockname(_socket, (struct sockaddr*)name, &nameLength);
+        ReleaseSocket();
+    }
+    return SetTrafficControl(-1, pcp, name);
+}
+
+WebRtc_Word32 UdpSocket2Windows::SetTrafficControl(
+    WebRtc_Word32 dscp,
+    WebRtc_Word32 pcp,
+    const struct sockaddr_in* name,
+    FLOWSPEC* send, FLOWSPEC* recv)
+{
+    if (pcp == _pcp)
+    {
+        // No change.
+        pcp = -1;
+    }
+    if ((-1 == pcp) && (-1 == dscp))
+    {
+        return 0;
+    }
+    if (!_gtc)
+    {
+        _gtc = TrafficControlWindows::GetInstance(_id);
+    }
+    if (!_gtc)
+    {
+        return -1;
+    }
+    if(_filterHandle)
+    {
+        _gtc->TcDeleteFilter(_filterHandle);
+        _filterHandle = NULL;
+    }
+    if(_flowHandle)
+    {
+        _gtc->TcDeleteFlow(_flowHandle);
+        _flowHandle = NULL;
+    }
+    if(_clientHandle)
+    {
+        _gtc->TcDeregisterClient(_clientHandle);
+        _clientHandle = NULL;
+    }
+    if ((0 == dscp) && (-2 == _pcp) && (-1 == pcp))
+    {
+        // TODO (pwestin): why is this not done before deleting old filter and
+        //                 flow? This scenario should probably be documented in
+        //                 the function declaration.
+        return 0;
+    }
+
+    TCI_CLIENT_FUNC_LIST QoSFunctions;
+    QoSFunctions.ClAddFlowCompleteHandler = NULL;
+    QoSFunctions.ClDeleteFlowCompleteHandler = NULL;
+    QoSFunctions.ClModifyFlowCompleteHandler = NULL;
+    QoSFunctions.ClNotifyHandler = (TCI_NOTIFY_HANDLER)MyClNotifyHandler;
+    // Register the client with Traffic control interface.
+    HANDLE ClientHandle;
+    ULONG result = _gtc->TcRegisterClient(CURRENT_TCI_VERSION, NULL,
+                                          &QoSFunctions,&ClientHandle);
+    if(result != NO_ERROR)
+    {
+        // This is likely caused by the application not being run as
+        // administrator.
+      WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                   "TcRegisterClient returned %d", result);
+        return result;
+    }
+
+    // Find traffic control-enabled network interfaces that matches this
+    // socket's IP address.
+    ULONG BufferSize = 0;
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize, NULL);
+
+    if(result != NO_ERROR && result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        _gtc->TcDeregisterClient(ClientHandle);
+        return result;
+    }
+
+    if(result != ERROR_INSUFFICIENT_BUFFER)
+    {
+        // Empty buffer contains all control-enabled network interfaces. I.e.
+        // QoS is not enabled.
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "QOS faild since QOS is not installed on the interface");
+
+        _gtc->TcDeregisterClient(ClientHandle);
+        return -1;
+    }
+
+    PTC_IFC_DESCRIPTOR pInterfaceBuffer =
+        (PTC_IFC_DESCRIPTOR)malloc(BufferSize);
+    if(pInterfaceBuffer == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Out ot memory failure");
+        _gtc->TcDeregisterClient(ClientHandle);
+        return ERROR_NOT_ENOUGH_MEMORY;
+    }
+
+    result = _gtc->TcEnumerateInterfaces(ClientHandle, &BufferSize,
+                                         pInterfaceBuffer);
+
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "Critical: error enumerating interfaces when passing in correct\
+ buffer size: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    PTC_IFC_DESCRIPTOR oneinterface;
+    HANDLE ifcHandle, iFilterHandle, iflowHandle;
+    bool addrFound = false;
+    ULONG filterSourceAddress = ULONG_MAX;
+
+    // Find the interface corresponding to the local address.
+    for(oneinterface = pInterfaceBuffer;
+        oneinterface != (PTC_IFC_DESCRIPTOR)
+            (((WebRtc_Word8*)pInterfaceBuffer) + BufferSize);
+        oneinterface = (PTC_IFC_DESCRIPTOR)
+            ((WebRtc_Word8 *)oneinterface + oneinterface->Length))
+    {
+
+        char interfaceName[500];
+        WideCharToMultiByte(CP_ACP, 0, oneinterface->pInterfaceName, -1,
+                            interfaceName, sizeof(interfaceName), 0, 0 );
+
+        PNETWORK_ADDRESS_LIST addresses =
+            &(oneinterface->AddressListDesc.AddressList);
+        for(LONG i = 0; i < addresses->AddressCount ; i++)
+        {
+            // Only look at TCP/IP addresses.
+            if(addresses->Address[i].AddressType != NDIS_PROTOCOL_ID_TCP_IP)
+            {
+                continue;
+            }
+
+            NETWORK_ADDRESS_IP* pIpAddr =
+                (NETWORK_ADDRESS_IP*)&(addresses->Address[i].Address);
+            struct in_addr in;
+            in.S_un.S_addr = pIpAddr->in_addr;
+            if(pIpAddr->in_addr == name->sin_addr.S_un.S_addr)
+            {
+                filterSourceAddress = pIpAddr->in_addr;
+                addrFound = true;
+            }
+        }
+        if(!addrFound)
+        {
+            continue;
+        } else
+        {
+            break;
+        }
+    }
+    if(!addrFound)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "QOS faild since address is not found");
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+    result = _gtc->TcOpenInterfaceW(oneinterface->pInterfaceName, ClientHandle,
+                                    NULL, &ifcHandle);
+    if(result != NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Error opening interface: %d", result);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    // Create flow if one doesn't exist.
+    if (!_flow)
+    {
+        bool addPCP = ((pcp >= 0) || ((-1 == pcp) && (_pcp >= 0)));
+        int allocSize = sizeof(TC_GEN_FLOW) + sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        _flow = (PTC_GEN_FLOW)malloc(allocSize);
+
+        _flow->SendingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.Latency = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+        _flow->SendingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+        _flow->SendingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+
+        _flow->ReceivingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.Latency = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+        _flow->ReceivingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+        _flow->ReceivingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        dsClass->DSField = 0;
+        dsClass->ObjectHdr.ObjectType = QOS_OBJECT_DS_CLASS;
+        dsClass->ObjectHdr.ObjectLength = sizeof(QOS_DS_CLASS);
+
+        if (addPCP)
+        {
+            QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+            trafficClass->TrafficClass = 0;
+            trafficClass->ObjectHdr.ObjectType = QOS_OBJECT_TRAFFIC_CLASS;
+            trafficClass->ObjectHdr.ObjectLength = sizeof(QOS_TRAFFIC_CLASS);
+        }
+
+        _flow->TcObjectsLength = sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+    } else if (-1 != pcp) {
+        // Reallocate memory since pcp has changed.
+        PTC_GEN_FLOW oldFlow = _flow;
+        bool addPCP = (pcp >= 0);
+        int allocSize = sizeof(TC_GEN_FLOW) + sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        _flow = (PTC_GEN_FLOW)malloc(allocSize);
+
+        // Copy old flow.
+        _flow->ReceivingFlowspec = oldFlow->ReceivingFlowspec;
+        _flow->SendingFlowspec = oldFlow->SendingFlowspec;
+        // The DS info is always the first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        QOS_DS_CLASS* oldDsClass = (QOS_DS_CLASS*)oldFlow->TcObjects;
+        dsClass->DSField = oldDsClass->DSField;
+        dsClass->ObjectHdr.ObjectType = oldDsClass->ObjectHdr.ObjectType;
+        dsClass->ObjectHdr.ObjectLength = oldDsClass->ObjectHdr.ObjectLength;
+
+        if (addPCP)
+        {
+            QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+            trafficClass->TrafficClass = 0;
+            trafficClass->ObjectHdr.ObjectType = QOS_OBJECT_TRAFFIC_CLASS;
+            trafficClass->ObjectHdr.ObjectLength = sizeof(QOS_TRAFFIC_CLASS);
+        }
+
+        _flow->TcObjectsLength = sizeof(QOS_DS_CLASS) +
+            (addPCP ? sizeof(QOS_TRAFFIC_CLASS) : 0);
+        free(oldFlow);
+    }
+
+    // Setup send and receive flow and DS object.
+    if (dscp >= 0)
+    {
+        if (!send || (0 == dscp))
+        {
+            _flow->SendingFlowspec.DelayVariation = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.Latency = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.MaxSduSize = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.MinimumPolicedSize = QOS_NOT_SPECIFIED;
+            _flow->SendingFlowspec.PeakBandwidth =
+                (0 == dscp ? QOS_NOT_SPECIFIED : POSITIVE_INFINITY_RATE);
+            _flow->SendingFlowspec.ServiceType = SERVICETYPE_BESTEFFORT;
+            _flow->SendingFlowspec.TokenBucketSize = QOS_NOT_SPECIFIED;
+            // 128000 * 10 is 10mbit/s.
+            _flow->SendingFlowspec.TokenRate =
+                (0 == dscp ? QOS_NOT_SPECIFIED : 128000 * 10);
+        }
+        else
+        {
+            _flow->SendingFlowspec.DelayVariation = send->DelayVariation;
+            _flow->SendingFlowspec.Latency = send->Latency;
+            _flow->SendingFlowspec.MaxSduSize = send->MaxSduSize;
+            _flow->SendingFlowspec.MinimumPolicedSize =
+                send->MinimumPolicedSize;
+            _flow->SendingFlowspec.PeakBandwidth = send->PeakBandwidth;
+            _flow->SendingFlowspec.PeakBandwidth = POSITIVE_INFINITY_RATE;
+            _flow->SendingFlowspec.ServiceType = send->ServiceType;
+            _flow->SendingFlowspec.TokenBucketSize = send->TokenBucketSize;
+            _flow->SendingFlowspec.TokenRate = send->TokenRate;
+        }
+
+        if (!recv  || (0 == dscp))
+        {
+            _flow->ReceivingFlowspec.DelayVariation =
+                _flow->SendingFlowspec.DelayVariation;
+            _flow->ReceivingFlowspec.Latency = _flow->SendingFlowspec.Latency;
+            _flow->ReceivingFlowspec.MaxSduSize =
+                _flow->SendingFlowspec.MaxSduSize;
+            _flow->ReceivingFlowspec.MinimumPolicedSize =
+                _flow->SendingFlowspec.MinimumPolicedSize;
+            _flow->ReceivingFlowspec.PeakBandwidth = QOS_NOT_SPECIFIED;
+            _flow->ReceivingFlowspec.ServiceType =
+                0 == dscp ? SERVICETYPE_BESTEFFORT : SERVICETYPE_CONTROLLEDLOAD;
+            _flow->ReceivingFlowspec.TokenBucketSize =
+                _flow->SendingFlowspec.TokenBucketSize;
+            _flow->ReceivingFlowspec.TokenRate =
+                _flow->SendingFlowspec.TokenRate;
+        } else {
+            _flow->ReceivingFlowspec.DelayVariation = recv->DelayVariation;
+            _flow->ReceivingFlowspec.Latency = recv->Latency;
+            _flow->ReceivingFlowspec.MaxSduSize = recv->MaxSduSize;
+            _flow->ReceivingFlowspec.MinimumPolicedSize =
+                recv->MinimumPolicedSize;
+            _flow->ReceivingFlowspec.PeakBandwidth = recv->PeakBandwidth;
+            _flow->ReceivingFlowspec.ServiceType = recv->ServiceType;
+            _flow->ReceivingFlowspec.TokenBucketSize = recv->TokenBucketSize;
+            _flow->ReceivingFlowspec.TokenRate = QOS_NOT_SPECIFIED;
+        }
+
+        // Setup DS (for DSCP value).
+        // DS is always the first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        dsClass->DSField = dscp;
+    }
+
+    // Setup PCP (802.1p priority in 802.1Q/VLAN tagging)
+    if (pcp >= 0)
+    {
+        // DS is always first object.
+        QOS_DS_CLASS* dsClass = (QOS_DS_CLASS*)_flow->TcObjects;
+        QOS_TRAFFIC_CLASS* trafficClass = (QOS_TRAFFIC_CLASS*)(dsClass + 1);
+        trafficClass->TrafficClass = pcp;
+    }
+
+    result = _gtc->TcAddFlow(ifcHandle, NULL, 0, _flow, &iflowHandle);
+    if(result != NO_ERROR)
+    {
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return -1;
+    }
+
+    IP_PATTERN filterPattern, mask;
+
+    ZeroMemory((WebRtc_Word8*)&filterPattern, sizeof(IP_PATTERN));
+    ZeroMemory((WebRtc_Word8*)&mask, sizeof(IP_PATTERN));
+
+    filterPattern.ProtocolId = IPPROTO_UDP;
+    // "name" fields already in network order.
+    filterPattern.S_un.S_un_ports.s_srcport = name->sin_port;
+    filterPattern.SrcAddr = filterSourceAddress;
+
+    // Unsigned max of a type corresponds to a bitmask with all bits set to 1.
+    // I.e. the filter should allow all ProtocolIds, any source port and any
+    // IP address
+    mask.ProtocolId = UCHAR_MAX;
+    mask.S_un.S_un_ports.s_srcport = USHRT_MAX;
+    mask.SrcAddr = ULONG_MAX;
+
+    TC_GEN_FILTER filter;
+
+    filter.AddressType = NDIS_PROTOCOL_ID_TCP_IP;
+    filter.Mask = (LPVOID)&mask;
+    filter.Pattern = (LPVOID)&filterPattern;
+    filter.PatternSize = sizeof(IP_PATTERN);
+
+    result = _gtc->TcAddFilter(iflowHandle, &filter, &iFilterHandle);
+    if(result != NO_ERROR)
+    {
+        _gtc->TcDeleteFlow(iflowHandle);
+        _gtc->TcCloseInterface(ifcHandle);
+        _gtc->TcDeregisterClient(ClientHandle);
+        free(pInterfaceBuffer);
+        return result;
+    }
+
+    _flowHandle = iflowHandle;
+    _filterHandle = iFilterHandle;
+    _clientHandle = ClientHandle;
+    if (-1 != pcp)
+    {
+        _pcp = pcp;
+    }
+
+    _gtc->TcCloseInterface(ifcHandle);
+    free(pInterfaceBuffer);
+
+    return 0;
+}
+
+WebRtc_Word32 UdpSocket2Windows::CreateFlowSpec(WebRtc_Word32 serviceType,
+                                                WebRtc_Word32 tokenRate,
+                                                WebRtc_Word32 bucketSize,
+                                                WebRtc_Word32 peekBandwith,
+                                                WebRtc_Word32 minPolicedSize,
+                                                WebRtc_Word32 maxSduSize,
+                                                FLOWSPEC* f)
+{
+    if (!f)
+    {
+        return -1;
+    }
+
+    f->ServiceType        = serviceType;
+    f->TokenRate          = tokenRate;
+    f->TokenBucketSize    = QOS_NOT_SPECIFIED;
+    f->PeakBandwidth      = QOS_NOT_SPECIFIED;
+    f->DelayVariation     = QOS_NOT_SPECIFIED;
+    f->Latency            = QOS_NOT_SPECIFIED;
+    f->MaxSduSize         = QOS_NOT_SPECIFIED;
+    f->MinimumPolicedSize = QOS_NOT_SPECIFIED;
+    return 0;
+}
+
+bool UdpSocket2Windows::NewOutstandingCall()
+{
+    assert(!_outstandingCallsDisabled);
+
+    ++_outstandingCalls;
+    return true;
+}
+
+void UdpSocket2Windows::OutstandingCallCompleted()
+{
+    _ptrDestRWLock->AcquireLockShared();
+    ++_outstandingCallComplete;
+    if((--_outstandingCalls == 0) && _outstandingCallsDisabled)
+    {
+        // When there are no outstanding calls and new outstanding calls are
+        // disabled it is time to terminate.
+        _terminate = true;
+    }
+    _ptrDestRWLock->ReleaseLockShared();
+
+    if((--_outstandingCallComplete == 0) &&
+        (_terminate))
+    {
+        // Only one thread will enter here. The thread with the last outstanding
+        // call.
+        CriticalSectionScoped cs(_ptrDeleteCrit);
+        _safeTodelete = true;
+        _ptrDeleteCond->Wake();
+    }
+}
+
+void UdpSocket2Windows::DisableNewOutstandingCalls()
+{
+    _ptrDestRWLock->AcquireLockExclusive();
+    if(_outstandingCallsDisabled)
+    {
+        // Outstandning calls are already disabled.
+        _ptrDestRWLock->ReleaseLockExclusive();
+        return;
+    }
+    _outstandingCallsDisabled = true;
+    const bool noOutstandingCalls = (_outstandingCalls.Value() == 0);
+    _ptrDestRWLock->ReleaseLockExclusive();
+
+    RemoveSocketFromManager();
+
+    if(noOutstandingCalls)
+    {
+        CriticalSectionScoped cs(_ptrDeleteCrit);
+        _safeTodelete = true;
+        _ptrDeleteCond->Wake();
+    }
+}
+
+void UdpSocket2Windows::WaitForOutstandingCalls()
+{
+    CriticalSectionScoped cs(_ptrDeleteCrit);
+    while(!_safeTodelete)
+    {
+        _ptrDeleteCond->SleepCS(*_ptrDeleteCrit);
+    }
+}
+
+void UdpSocket2Windows::RemoveSocketFromManager()
+{
+    // New outstanding calls should be disabled at this point.
+    assert(_outstandingCallsDisabled);
+
+    if(_addedToMgr)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "calling UdpSocketManager::RemoveSocket()");
+        if(_mgr->RemoveSocket(this))
+        {
+            _addedToMgr=false;
+        }
+    }
+}
+
+bool UdpSocket2Windows::AquireSocket()
+{
+    _ptrSocketRWLock->AcquireLockShared();
+    const bool returnValue = _socket != INVALID_SOCKET;
+    if(!returnValue)
+    {
+        _ptrSocketRWLock->ReleaseLockShared();
+    }
+    return returnValue;
+}
+
+void UdpSocket2Windows::ReleaseSocket()
+{
+    _ptrSocketRWLock->ReleaseLockShared();
+}
+
+bool UdpSocket2Windows::InvalidateSocket()
+{
+    _ptrSocketRWLock->AcquireLockExclusive();
+    if(_socket == INVALID_SOCKET)
+    {
+        _ptrSocketRWLock->ReleaseLockExclusive();
+        return true;
+    }
+    // Give the socket back to the system. All socket calls will fail from now
+    // on.
+    if(closesocket(_socket) == SOCKET_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocket2Windows(%d)::InvalidateSocket() WSAerror: %d",
+                     (WebRtc_Word32)this, WSAGetLastError());
+    }
+    _socket = INVALID_SOCKET;
+    _ptrSocketRWLock->ReleaseLockExclusive();
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket2_windows.h b/src/modules/udp_transport/source/udp_socket2_windows.h
new file mode 100644
index 0000000..8cc46ee
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket2_windows.h
@@ -0,0 +1,171 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
+
+// Disable deprication warning from traffic.h
+#pragma warning(disable : 4995)
+
+// Don't change include order for these header files.
+#include <Winsock2.h>
+#include <Ntddndis.h>
+#include <traffic.h>
+
+#include "atomic32.h"
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "udp_socket_wrapper.h"
+#include "udp_socket2_manager_windows.h"
+
+namespace webrtc {
+class UdpSocket2ManagerWindows;
+class TrafficControlWindows;
+struct PerIoContext;
+
+class UdpSocket2Windows : public UdpSocketWrapper
+{
+public:
+    UdpSocket2Windows(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                      bool ipV6Enable = false, bool disableGQOS = false);
+    virtual ~UdpSocket2Windows();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool ValidHandle();
+
+    virtual bool SetCallback(CallbackObj, IncomingSocketCallback);
+
+    virtual bool Bind(const SocketAddress& name);
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen);
+
+    virtual bool StartReceiving(const WebRtc_UWord32 receiveBuffers);
+    virtual inline bool StartReceiving() {return StartReceiving(8);}
+    virtual bool StopReceiving();
+
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to);
+
+    virtual void CloseBlocking();
+
+    virtual SOCKET GetFd() { return _socket;}
+    virtual bool SetQos(WebRtc_Word32 serviceType, WebRtc_Word32 tokenRate,
+                        WebRtc_Word32 bucketSize, WebRtc_Word32 peekBandwith,
+                        WebRtc_Word32 minPolicedSize, WebRtc_Word32 maxSduSize,
+                        const SocketAddress &stRemName,
+                        WebRtc_Word32 overrideDSCP = 0);
+
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType);
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 pcp);
+
+    virtual WebRtc_UWord32 ReceiveBuffers(){return _receiveBuffers.Value();}
+
+protected:
+    void IOCompleted(PerIoContext* pIOContext, WebRtc_UWord32 ioSize,
+                     WebRtc_UWord32 error);
+
+    WebRtc_Word32 PostRecv();
+    // Use pIoContext to post a new WSARecvFrom(..).
+    WebRtc_Word32 PostRecv(PerIoContext* pIoContext);
+
+private:
+    friend class UdpSocket2WorkerWindows;
+
+    // Set traffic control (TC) flow adding it the interface that matches this
+    // sockets address.
+    // A filter is created and added to the flow.
+    // The flow consists of:
+    // (1) QoS send and receive information (flow specifications).
+    // (2) A DS object (for specifying exact DSCP value).
+    // (3) Possibly a traffic object (for specifying exact 802.1p priority (PCP)
+    //     value).
+    //
+    // dscp values:
+    // -1   don't change the current dscp value.
+    // 0    don't add any flow to TC, unless pcp is specified.
+    // 1-63 Add a flow to TC with the specified dscp value.
+    // pcp values:
+    // -2  Don't add pcp info to the flow, (3) will not be added.
+    // -1  Don't change the current value.
+    // 0-7 Add pcp info to the flow with the specified value,
+    //     (3) will be added.
+    //
+    // If both dscp and pcp are -1 no flow will be created or added to TC.
+    // If dscp is 0 and pcp is 0-7 (1), (2) and (3) will be created.
+    // Note: input parameter values are assumed to be in valid range, checks
+    // must be done by caller.
+    WebRtc_Word32 SetTrafficControl(WebRtc_Word32 dscp, WebRtc_Word32 pcp,
+                                    const struct sockaddr_in* name,
+                                    FLOWSPEC* send = NULL,
+                                    FLOWSPEC* recv = NULL);
+    WebRtc_Word32 CreateFlowSpec(WebRtc_Word32 serviceType,
+                                 WebRtc_Word32 tokenRate,
+                                 WebRtc_Word32 bucketSize,
+                                 WebRtc_Word32 peekBandwith,
+                                 WebRtc_Word32 minPolicedSize,
+                                 WebRtc_Word32 maxSduSize, FLOWSPEC *f);
+
+    WebRtc_Word32 _id;
+    RWLockWrapper* _ptrCbRWLock;
+    IncomingSocketCallback _incomingCb;
+    CallbackObj _obj;
+    bool _qos;
+
+    SocketAddress _remoteAddr;
+    SOCKET _socket;
+    WebRtc_Word32 _iProtocol;
+    UdpSocket2ManagerWindows* _mgr;
+
+    CriticalSectionWrapper* _pCrit;
+    Atomic32 _outstandingCalls;
+    Atomic32 _outstandingCallComplete;
+    volatile bool _terminate;
+    volatile bool _addedToMgr;
+
+    CriticalSectionWrapper* _ptrDeleteCrit;
+    ConditionVariableWrapper* _ptrDeleteCond;
+    bool _safeTodelete;
+
+    RWLockWrapper* _ptrDestRWLock;
+    bool _outstandingCallsDisabled;
+    bool NewOutstandingCall();
+    void OutstandingCallCompleted();
+    void DisableNewOutstandingCalls();
+    void WaitForOutstandingCalls();
+
+    void RemoveSocketFromManager();
+
+    // RWLockWrapper is used as a reference counter for the socket. Write lock
+    // is used for creating and deleting socket. Read lock is used for
+    // accessing the socket.
+    RWLockWrapper* _ptrSocketRWLock;
+    bool AquireSocket();
+    void ReleaseSocket();
+    bool InvalidateSocket();
+
+    // Traffic control handles and structure pointers.
+    HANDLE _clientHandle;
+    HANDLE _flowHandle;
+    HANDLE _filterHandle;
+    PTC_GEN_FLOW _flow;
+    // TrafficControlWindows implements TOS and PCP.
+    TrafficControlWindows* _gtc;
+    // Holds the current pcp value. Can be -2 or 0 - 7.
+    int _pcp;
+
+    Atomic32 _receiveBuffers;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET2_WINDOWS_H_
diff --git a/src/modules/udp_transport/source/udp_socket_manager_posix.cc b/src/modules/udp_transport/source/udp_socket_manager_posix.cc
new file mode 100644
index 0000000..889fb2f
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_manager_posix.cc
@@ -0,0 +1,429 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_manager_posix.h"
+
+#include <strings.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+
+#include "trace.h"
+#include "udp_socket_posix.h"
+
+namespace webrtc {
+UdpSocketManagerPosix::UdpSocketManagerPosix()
+    : UdpSocketManager(),
+      _id(-1),
+      _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _numberOfSocketMgr(-1),
+      _incSocketMgrNextTime(0),
+      _nextSocketMgrToAssign(0),
+      _socketMgr()
+{
+}
+
+bool UdpSocketManagerPosix::Init(WebRtc_Word32 id,
+                                 WebRtc_UWord8& numOfWorkThreads) {
+    CriticalSectionScoped cs(_critSect);
+    if ((_id != -1) || (_numOfWorkThreads != 0)) {
+        assert(_id != -1);
+        assert(_numOfWorkThreads != 0);
+        return false;
+    }
+
+    _id = id;
+    _numberOfSocketMgr = numOfWorkThreads;
+    _numOfWorkThreads = numOfWorkThreads;
+
+    if(MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX < _numberOfSocketMgr)
+    {
+        _numberOfSocketMgr = MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX;
+    }
+    for(int i = 0;i < _numberOfSocketMgr; i++)
+    {
+        _socketMgr[i] = new UdpSocketManagerPosixImpl();
+    }
+    return true;
+}
+
+
+UdpSocketManagerPosix::~UdpSocketManagerPosix()
+{
+    Stop();
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::UdpSocketManagerPosix()",
+                 _numberOfSocketMgr);
+
+    for(int i = 0;i < _numberOfSocketMgr; i++)
+    {
+        delete _socketMgr[i];
+    }
+    delete _critSect;
+}
+
+WebRtc_Word32 UdpSocketManagerPosix::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocketManagerPosix::Start()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::Start()",
+                 _numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = true;
+    for(int i = 0;i < _numberOfSocketMgr && retVal; i++)
+    {
+        retVal = _socketMgr[i]->Start();
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::Start() error starting socket managers",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::Stop()
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::Stop()",_numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = true;
+    for(int i = 0; i < _numberOfSocketMgr && retVal; i++)
+    {
+        retVal = _socketMgr[i]->Stop();
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::Stop() there are still active socket "
+            "managers",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::AddSocket(UdpSocketWrapper* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::AddSocket()",_numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = _socketMgr[_nextSocketMgrToAssign]->AddSocket(s);
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::AddSocket() failed to add socket to\
+ manager",
+            _numberOfSocketMgr);
+    }
+
+    // Distribute sockets on UdpSocketManagerPosixImpls in a round-robin
+    // fashion.
+    if(_incSocketMgrNextTime == 0)
+    {
+        _incSocketMgrNextTime++;
+    } else {
+        _incSocketMgrNextTime = 0;
+        _nextSocketMgrToAssign++;
+        if(_nextSocketMgrToAssign >= _numberOfSocketMgr)
+        {
+            _nextSocketMgrToAssign = 0;
+        }
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+bool UdpSocketManagerPosix::RemoveSocket(UdpSocketWrapper* s)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketManagerPosix(%d)::RemoveSocket()",
+                 _numberOfSocketMgr);
+
+    _critSect->Enter();
+    bool retVal = false;
+    for(int i = 0;i < _numberOfSocketMgr && (retVal == false); i++)
+    {
+        retVal = _socketMgr[i]->RemoveSocket(s);
+    }
+    if(!retVal)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "UdpSocketManagerPosix(%d)::RemoveSocket() failed to remove socket\
+ from manager",
+            _numberOfSocketMgr);
+    }
+    _critSect->Leave();
+    return retVal;
+}
+
+
+UdpSocketManagerPosixImpl::UdpSocketManagerPosixImpl()
+{
+    _critSectList = CriticalSectionWrapper::CreateCriticalSection();
+    _thread = ThreadWrapper::CreateThread(UdpSocketManagerPosixImpl::Run, this,
+                                          kRealtimePriority,
+                                          "UdpSocketManagerPosixImplThread");
+    FD_ZERO(&_readFds);
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocketManagerPosix created");
+}
+
+UdpSocketManagerPosixImpl::~UdpSocketManagerPosixImpl()
+{
+    if(_thread != NULL)
+    {
+        delete _thread;
+    }
+
+    if (_critSectList != NULL)
+    {
+        UpdateSocketMap();
+
+        _critSectList->Enter();
+
+        MapItem* item = _socketMap.First();
+        while(item)
+        {
+            UdpSocketPosix* s = static_cast<UdpSocketPosix*>(item->GetItem());
+            _socketMap.Erase(item);
+            item = _socketMap.First();
+            delete s;
+        }
+        _critSectList->Leave();
+
+        delete _critSectList;
+    }
+
+    WEBRTC_TRACE(kTraceMemory,  kTraceTransport, -1,
+                 "UdpSocketManagerPosix deleted");
+}
+
+bool UdpSocketManagerPosixImpl::Start()
+{
+    unsigned int id = 0;
+    if (_thread == NULL)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Start UdpSocketManagerPosix");
+    return _thread->Start(id);
+}
+
+bool UdpSocketManagerPosixImpl::Stop()
+{
+    if (_thread == NULL)
+    {
+        return true;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo,  kTraceTransport, -1,
+                 "Stop UdpSocketManagerPosix");
+    return _thread->Stop();
+}
+
+bool UdpSocketManagerPosixImpl::Process()
+{
+    bool doSelect = false;
+    // Timeout = 1 second.
+    struct timeval timeout;
+    timeout.tv_sec = 0;
+    timeout.tv_usec = 10000;
+    MapItem* it;
+
+    FD_ZERO(&_readFds);
+
+    UpdateSocketMap();
+
+    unsigned int maxFd = 0;
+    for (it = _socketMap.First(); it != NULL; it=_socketMap.Next(it))
+    {
+        doSelect = true;
+        maxFd = maxFd > it->GetUnsignedId() ? maxFd : it->GetUnsignedId();
+        FD_SET(it->GetUnsignedId(), &_readFds);
+
+        maxFd = maxFd > it->GetUnsignedId() ? maxFd : it->GetUnsignedId();
+        doSelect = true;
+    }
+
+    int num = 0;
+    if (doSelect)
+    {
+        num = select(maxFd+1, &_readFds, NULL, NULL, &timeout);
+
+        if (num == SOCKET_ERROR)
+        {
+            // Timeout = 10 ms.
+            timespec t;
+            t.tv_sec = 0;
+            t.tv_nsec = 10000*1000;
+            nanosleep(&t, NULL);
+            return true;
+        }
+    }else
+    {
+        // Timeout = 10 ms.
+        timespec t;
+        t.tv_sec = 0;
+        t.tv_nsec = 10000*1000;
+        nanosleep(&t, NULL);
+        return true;
+    }
+
+    for (it = _socketMap.First(); it != NULL && num > 0;
+         it = _socketMap.Next(it))
+    {
+        UdpSocketPosix* s = static_cast<UdpSocketPosix*>(it->GetItem());
+        if (FD_ISSET(it->GetUnsignedId(), &_readFds))
+        {
+            s->HasIncoming();
+            num--;
+        }
+    }
+    return true;
+}
+
+bool UdpSocketManagerPosixImpl::Run(ThreadObj obj)
+{
+    UdpSocketManagerPosixImpl* mgr =
+        static_cast<UdpSocketManagerPosixImpl*>(obj);
+    return mgr->Process();
+}
+
+bool UdpSocketManagerPosixImpl::AddSocket(UdpSocketWrapper* s)
+{
+    UdpSocketPosix* sl = static_cast<UdpSocketPosix*>(s);
+    if(sl->GetFd() == INVALID_SOCKET || !(sl->GetFd() < FD_SETSIZE))
+    {
+        return false;
+    }
+    _critSectList->Enter();
+    _addList.PushBack(s);
+    _critSectList->Leave();
+    return true;
+}
+
+bool UdpSocketManagerPosixImpl::RemoveSocket(UdpSocketWrapper* s)
+{
+    // Put in remove list if this is the correct UdpSocketManagerPosixImpl.
+    _critSectList->Enter();
+
+    // If the socket is in the add list it's safe to remove and delete it.
+    ListItem* addListItem = _addList.First();
+    while(addListItem)
+    {
+        UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem();
+        unsigned int addFD = addSocket->GetFd();
+        unsigned int removeFD = static_cast<UdpSocketPosix*>(s)->GetFd();
+        if(removeFD == addFD)
+        {
+            _removeList.PushBack(removeFD);
+            _critSectList->Leave();
+            return true;
+        }
+        addListItem = _addList.Next(addListItem);
+    }
+
+    // Checking the socket map is safe since all Erase and Insert calls to this
+    // map are also protected by _critSectList.
+    if(_socketMap.Find(static_cast<UdpSocketPosix*>(s)->GetFd()) != NULL)
+    {
+        _removeList.PushBack(static_cast<UdpSocketPosix*>(s)->GetFd());
+        _critSectList->Leave();
+         return true;
+    }
+    _critSectList->Leave();
+    return false;
+}
+
+void UdpSocketManagerPosixImpl::UpdateSocketMap()
+{
+    // Remove items in remove list.
+    _critSectList->Enter();
+    while(!_removeList.Empty())
+    {
+        UdpSocketPosix* deleteSocket = NULL;
+        unsigned int removeFD = _removeList.First()->GetUnsignedItem();
+
+        // If the socket is in the add list it hasn't been added to the socket
+        // map yet. Just remove the socket from the add list.
+        ListItem* addListItem = _addList.First();
+        while(addListItem)
+        {
+            UdpSocketPosix* addSocket = (UdpSocketPosix*)addListItem->GetItem();
+            unsigned int addFD = addSocket->GetFd();
+            if(removeFD == addFD)
+            {
+                deleteSocket = addSocket;
+                _addList.Erase(addListItem);
+                break;
+            }
+            addListItem = _addList.Next(addListItem);
+        }
+
+        // Find and remove socket from _socketMap.
+        MapItem* it = _socketMap.Find(removeFD);
+        if(it != NULL)
+        {
+            UdpSocketPosix* socket =
+                static_cast<UdpSocketPosix*>(it->GetItem());
+            if(socket)
+            {
+                deleteSocket = socket;
+            }
+            _socketMap.Erase(it);
+        }
+        if(deleteSocket)
+        {
+            deleteSocket->ReadyForDeletion();
+            delete deleteSocket;
+        }
+        _removeList.PopFront();
+    }
+
+    // Add sockets from add list.
+    while(!_addList.Empty())
+    {
+        UdpSocketPosix* s =
+            static_cast<UdpSocketPosix*>(_addList.First()->GetItem());
+        if(s)
+        {
+            _socketMap.Insert(s->GetFd(), s);
+        }
+        _addList.PopFront();
+    }
+    _critSectList->Leave();
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket_manager_posix.h b/src/modules/udp_transport/source/udp_socket_manager_posix.h
new file mode 100644
index 0000000..c89aa13
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_manager_posix.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
+
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "critical_section_wrapper.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+#include "thread_wrapper.h"
+#include "udp_socket_manager_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+#define MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX 8
+
+namespace webrtc {
+
+class ConditionVariableWrapper;
+class UdpSocketManagerPosixImpl;
+
+class UdpSocketManagerPosix : public UdpSocketManager
+{
+public:
+    UdpSocketManagerPosix();
+    virtual ~UdpSocketManagerPosix();
+
+    virtual bool Init(WebRtc_Word32 id,
+                      WebRtc_UWord8& numOfWorkThreads);
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual bool AddSocket(UdpSocketWrapper* s);
+    virtual bool RemoveSocket(UdpSocketWrapper* s);
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper* _critSect;
+    WebRtc_UWord8 _numberOfSocketMgr;
+    WebRtc_UWord8 _incSocketMgrNextTime;
+    WebRtc_UWord8 _nextSocketMgrToAssign;
+    UdpSocketManagerPosixImpl* _socketMgr[MAX_NUMBER_OF_SOCKET_MANAGERS_LINUX];
+};
+
+class UdpSocketManagerPosixImpl
+{
+public:
+    UdpSocketManagerPosixImpl();
+    virtual ~UdpSocketManagerPosixImpl();
+
+    virtual bool Start();
+    virtual bool Stop();
+
+    virtual bool AddSocket(UdpSocketWrapper* s);
+    virtual bool RemoveSocket(UdpSocketWrapper* s);
+
+protected:
+    static bool Run(ThreadObj obj);
+    bool Process();
+    void UpdateSocketMap();
+
+private:
+    ThreadWrapper* _thread;
+    CriticalSectionWrapper* _critSectList;
+
+    fd_set _readFds;
+
+    MapWrapper _socketMap;
+    ListWrapper _addList;
+    ListWrapper _removeList;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_POSIX_H_
diff --git a/src/modules/udp_transport/source/udp_socket_manager_unittest.cc b/src/modules/udp_transport/source/udp_socket_manager_unittest.cc
new file mode 100644
index 0000000..891a47a
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_manager_unittest.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Tests for the UdpSocketManager interface.
+// Note: This tests UdpSocketManager together with UdpSocketWrapper,
+// due to the way the code is full of static-casts to the platform dependent
+// subtypes.
+// It also uses the static UdpSocketManager object.
+// The most important property of these tests is that they do not leak memory.
+
+#include "udp_socket_wrapper.h"
+#include "udp_socket_manager_wrapper.h"
+#include "gtest/gtest.h"
+#include "src/system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+TEST(UdpSocketManager, CreateCallsInitAndDoesNotLeakMemory) {
+  WebRtc_Word32 id = 42;
+  WebRtc_UWord8 threads = 1;
+  UdpSocketManager* mgr = UdpSocketManager::Create(id, threads);
+  // Create is supposed to have called init on the object.
+  EXPECT_FALSE(mgr->Init(id, threads))
+      << "Init should return false since Create is supposed to call it.";
+  UdpSocketManager::Return();
+}
+
+// Creates a socket and adds it to the socket manager, and then removes it
+// before destroying the socket manager.
+TEST(UdpSocketManager, AddAndRemoveSocketDoesNotLeakMemory) {
+  WebRtc_Word32 id = 42;
+  WebRtc_UWord8 threads = 1;
+  UdpSocketManager* mgr = UdpSocketManager::Create(id, threads);
+  UdpSocketWrapper* socket
+       = UdpSocketWrapper::CreateSocket(id,
+                                        mgr,
+                                        NULL,  // CallbackObj
+                                        NULL,  // IncomingSocketCallback
+                                        false,  // ipV6Enable
+                                        false);  // disableGQOS
+  // The constructor will do AddSocket on the manager.
+  // RemoveSocket indirectly calls Delete.
+  EXPECT_EQ(true, mgr->RemoveSocket(socket));
+  UdpSocketManager::Return();
+}
+
+// Creates a socket and add it to the socket manager, but does not remove it
+// before destroying the socket manager.
+// On Posix, this destroys the socket.
+// On Winsock2 Windows, it enters an infinite wait for all the sockets
+// to go away.
+TEST(UdpSocketManager, UnremovedSocketsGetCollectedAtManagerDeletion) {
+#if defined(_WIN32)
+  // It's hard to test an infinite wait, so we don't.
+#else
+  WebRtc_Word32 id = 42;
+  WebRtc_UWord8 threads = 1;
+  UdpSocketManager* mgr = UdpSocketManager::Create(id, threads);
+  UdpSocketWrapper* unused_socket
+       = UdpSocketWrapper::CreateSocket(id,
+                                                mgr,
+                                                NULL,  // CallbackObj
+                                                NULL,  // IncomingSocketCallback
+                                                false,  // ipV6Enable
+                                                false);  // disableGQOS
+  // The constructor will do AddSocket on the manager.
+  // Call a member funtion to work around "set but not used" compliation
+  // error on ChromeOS ARM.
+  unused_socket->SetEventToNull();
+  unused_socket = NULL;
+  UdpSocketManager::Return();
+#endif
+}
+
+}  // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc b/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc
new file mode 100644
index 0000000..37388be
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_manager_wrapper.cc
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_manager_wrapper.h"
+
+#include <cassert>
+
+#ifdef _WIN32
+#include "fix_interlocked_exchange_pointer_win.h"
+#include "udp_socket2_manager_windows.h"
+#else
+#include "udp_socket_manager_posix.h"
+#endif
+
+namespace webrtc {
+UdpSocketManager* UdpSocketManager::CreateInstance()
+{
+#if defined(_WIN32)
+  return static_cast<UdpSocketManager*>(new UdpSocket2ManagerWindows());
+#else
+    return new UdpSocketManagerPosix();
+#endif
+}
+
+UdpSocketManager* UdpSocketManager::StaticInstance(
+    CountOperation count_operation,
+    const WebRtc_Word32 id,
+    WebRtc_UWord8& numOfWorkThreads)
+{
+    UdpSocketManager* impl =
+        GetStaticInstance<UdpSocketManager>(count_operation);
+    if (count_operation == kAddRef && impl != NULL) {
+        if (impl->Init(id, numOfWorkThreads)) {
+            impl->Start();
+        }
+    }
+    return impl;
+}
+
+UdpSocketManager* UdpSocketManager::Create(const WebRtc_Word32 id,
+                                           WebRtc_UWord8& numOfWorkThreads)
+{
+    return UdpSocketManager::StaticInstance(kAddRef, id,
+                                            numOfWorkThreads);
+}
+
+void UdpSocketManager::Return()
+{
+    WebRtc_UWord8 numOfWorkThreads = 0;
+    UdpSocketManager::StaticInstance(kRelease, -1,
+                                     numOfWorkThreads);
+}
+
+UdpSocketManager::UdpSocketManager() : _numOfWorkThreads(0)
+{
+}
+
+WebRtc_UWord8 UdpSocketManager::WorkThreads() const
+{
+    return _numOfWorkThreads;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket_manager_wrapper.h b/src/modules/udp_transport/source/udp_socket_manager_wrapper.h
new file mode 100644
index 0000000..e7bd09e
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_manager_wrapper.h
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
+
+#include "system_wrappers/interface/static_instance.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class UdpSocketWrapper;
+
+class UdpSocketManager
+{
+public:
+    static UdpSocketManager* Create(const WebRtc_Word32 id,
+                                    WebRtc_UWord8& numOfWorkThreads);
+    static void Return();
+
+    // Initializes the socket manager. Returns true if the manager wasn't
+    // already initialized.
+    virtual bool Init(WebRtc_Word32 id,
+                      WebRtc_UWord8& numOfWorkThreads) = 0;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Start listening to sockets that have been registered via the
+    // AddSocket(..) API.
+    virtual bool Start() = 0;
+    // Stop listening to sockets.
+    virtual bool Stop() = 0;
+
+    virtual WebRtc_UWord8 WorkThreads() const;
+
+    // Register a socket with the socket manager.
+    virtual bool AddSocket(UdpSocketWrapper* s) = 0;
+    // Unregister a socket from the manager.
+    virtual bool RemoveSocket(UdpSocketWrapper* s) = 0;
+
+protected:
+    UdpSocketManager();
+    virtual ~UdpSocketManager() {}
+
+    WebRtc_UWord8 _numOfWorkThreads;
+
+    // Factory method.
+    static UdpSocketManager* CreateInstance();
+
+private:
+    // Friend function to allow the UDP destructor to be accessed from the
+    // instance template.
+    friend UdpSocketManager*
+    GetStaticInstance<UdpSocketManager>(CountOperation count_operation);
+
+    static UdpSocketManager* StaticInstance(
+        CountOperation count_operation,
+        const WebRtc_Word32 id,
+        WebRtc_UWord8& numOfWorkThreads);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_MANAGER_WRAPPER_H_
diff --git a/src/modules/udp_transport/source/udp_socket_posix.cc b/src/modules/udp_transport/source/udp_socket_posix.cc
new file mode 100644
index 0000000..69dc099
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_posix.cc
@@ -0,0 +1,277 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_posix.h"
+
+#include <errno.h>
+#include <fcntl.h>
+#include <netdb.h>
+#include <string.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <time.h>
+#include <unistd.h>
+
+#include "trace.h"
+#include "udp_socket_manager_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+namespace webrtc {
+UdpSocketPosix::UdpSocketPosix(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                               bool ipV6Enable)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "UdpSocketPosix::UdpSocketPosix()");
+
+    _wantsIncoming = false;
+    _error = 0;
+    _mgr = mgr;
+
+    _id = id;
+    _obj = NULL;
+    _incomingCb = NULL;
+    _readyForDeletionCond = ConditionVariableWrapper::CreateConditionVariable();
+    _closeBlockingCompletedCond =
+        ConditionVariableWrapper::CreateConditionVariable();
+    _cs = CriticalSectionWrapper::CreateCriticalSection();
+    _readyForDeletion = false;
+    _closeBlockingActive = false;
+    _closeBlockingCompleted= false;
+    if(ipV6Enable)
+    {
+        _socket = socket(AF_INET6, SOCK_DGRAM, IPPROTO_UDP);
+    }
+    else {
+        _socket = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP);
+    }
+
+    // Set socket to nonblocking mode.
+    int enable_non_blocking = 1;
+    if(ioctl(_socket, FIONBIO, &enable_non_blocking) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, id,
+                     "Failed to make socket nonblocking");
+    }
+    // Enable close on fork for file descriptor so that it will not block until
+    // forked process terminates.
+    if(fcntl(_socket, F_SETFD, FD_CLOEXEC) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, id,
+                     "Failed to set FD_CLOEXEC for socket");
+    }
+}
+
+UdpSocketPosix::~UdpSocketPosix()
+{
+    if(_socket != INVALID_SOCKET)
+    {
+        close(_socket);
+        _socket = INVALID_SOCKET;
+    }
+    if(_readyForDeletionCond)
+    {
+        delete _readyForDeletionCond;
+    }
+
+    if(_closeBlockingCompletedCond)
+    {
+        delete _closeBlockingCompletedCond;
+    }
+
+    if(_cs)
+    {
+        delete _cs;
+    }
+}
+
+WebRtc_Word32 UdpSocketPosix::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+bool UdpSocketPosix::SetCallback(CallbackObj obj, IncomingSocketCallback cb)
+{
+    _obj = obj;
+    _incomingCb = cb;
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketPosix(%p)::SetCallback", this);
+
+    if (_mgr->AddSocket(this))
+      {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "UdpSocketPosix(%p)::SetCallback socket added to manager",
+                     this);
+        return true;   // socket is now ready for action
+      }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "UdpSocketPosix(%p)::SetCallback error adding me to mgr",
+                 this);
+    return false;
+}
+
+bool UdpSocketPosix::SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen)
+{
+   if(0 == setsockopt(_socket, level, optname, optval, optlen ))
+   {
+       return true;
+   }
+
+   _error = errno;
+   WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                "UdpSocketPosix::SetSockopt(), error:%d", _error);
+   return false;
+}
+
+WebRtc_Word32 UdpSocketPosix::SetTOS(WebRtc_Word32 serviceType)
+{
+    if (SetSockopt(IPPROTO_IP, IP_TOS ,(WebRtc_Word8*)&serviceType ,4) != 0)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+bool UdpSocketPosix::Bind(const SocketAddress& name)
+{
+    int size = sizeof(sockaddr);
+    if (0 == bind(_socket, reinterpret_cast<const sockaddr*>(&name),size))
+    {
+        return true;
+    }
+    _error = errno;
+    WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                 "UdpSocketPosix::Bind() error: %d",_error);
+    return false;
+}
+
+WebRtc_Word32 UdpSocketPosix::SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                     const SocketAddress& to)
+{
+    int size = sizeof(sockaddr);
+    int retVal = sendto(_socket,buf, len, 0,
+                        reinterpret_cast<const sockaddr*>(&to), size);
+    if(retVal == SOCKET_ERROR)
+    {
+        _error = errno;
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "UdpSocketPosix::SendTo() error: %d", _error);
+    }
+
+    return retVal;
+}
+
+bool UdpSocketPosix::ValidHandle()
+{
+    return _socket != INVALID_SOCKET;
+}
+
+void UdpSocketPosix::HasIncoming()
+{
+    // replace 2048 with a mcro define and figure out
+    // where 2048 comes from
+    WebRtc_Word8 buf[2048];
+    int retval;
+    SocketAddress from;
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+    sockaddr sockaddrfrom;
+    memset(&from, 0, sizeof(from));
+    memset(&sockaddrfrom, 0, sizeof(sockaddrfrom));
+    socklen_t fromlen = sizeof(sockaddrfrom);
+#else
+    memset(&from, 0, sizeof(from));
+    socklen_t fromlen = sizeof(from);
+#endif
+
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_MAC)
+        retval = recvfrom(_socket,buf, sizeof(buf), 0,
+                          reinterpret_cast<sockaddr*>(&sockaddrfrom), &fromlen);
+        memcpy(&from, &sockaddrfrom, fromlen);
+        from._sockaddr_storage.sin_family = sockaddrfrom.sa_family;
+#else
+        retval = recvfrom(_socket,buf, sizeof(buf), 0,
+                          reinterpret_cast<sockaddr*>(&from), &fromlen);
+#endif
+
+    switch(retval)
+    {
+    case 0:
+        // The peer has performed an orderly shutdown.
+        break;
+    case SOCKET_ERROR:
+        break;
+    default:
+        if(_wantsIncoming && _incomingCb)
+        {
+          _incomingCb(_obj, buf, retval, &from);
+        }
+        break;
+    }
+}
+
+void UdpSocketPosix::CloseBlocking()
+{
+    _cs->Enter();
+    _closeBlockingActive = true;
+    if(!CleanUp())
+    {
+        _closeBlockingActive = false;
+        _cs->Leave();
+        return;
+    }
+
+    while(!_readyForDeletion)
+    {
+        _readyForDeletionCond->SleepCS(*_cs);
+    }
+    _closeBlockingCompleted = true;
+    _closeBlockingCompletedCond->Wake();
+    _cs->Leave();
+}
+
+void UdpSocketPosix::ReadyForDeletion()
+{
+    _cs->Enter();
+    if(!_closeBlockingActive)
+    {
+        _cs->Leave();
+        return;
+    }
+    close(_socket);
+    _socket = INVALID_SOCKET;
+    _readyForDeletion = true;
+    _readyForDeletionCond->Wake();
+    while(!_closeBlockingCompleted)
+    {
+        _closeBlockingCompletedCond->SleepCS(*_cs);
+    }
+    _cs->Leave();
+}
+
+bool UdpSocketPosix::CleanUp()
+{
+    _wantsIncoming = false;
+
+    if (_socket == INVALID_SOCKET)
+    {
+        return false;
+    }
+
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                 "calling UdpSocketManager::RemoveSocket()...");
+    _mgr->RemoveSocket(this);
+    // After this, the socket should may be or will be as deleted. Return
+    // immediately.
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket_posix.h b/src/modules/udp_transport/source/udp_socket_posix.h
new file mode 100644
index 0000000..ee76abb
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_posix.h
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
+
+#include <arpa/inet.h>
+#include <netinet/in.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+
+#include "condition_variable_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "udp_socket_wrapper.h"
+
+#define SOCKET_ERROR -1
+
+namespace webrtc {
+class UdpSocketPosix : public UdpSocketWrapper
+{
+public:
+    UdpSocketPosix(const WebRtc_Word32 id, UdpSocketManager* mgr,
+                   bool ipV6Enable = false);
+
+    virtual ~UdpSocketPosix();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual bool SetCallback(CallbackObj obj, IncomingSocketCallback cb);
+
+    virtual bool Bind(const SocketAddress& name);
+
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval, WebRtc_Word32 optlen);
+
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType);
+
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to);
+
+    // Deletes socket in addition to closing it.
+    // TODO (hellner): make destructor protected.
+    virtual void CloseBlocking();
+
+    virtual SOCKET GetFd() {return _socket;}
+    virtual WebRtc_Word32 GetError() {return _error;}
+
+    virtual bool ValidHandle();
+
+    virtual bool SetQos(WebRtc_Word32 /*serviceType*/,
+                        WebRtc_Word32 /*tokenRate*/,
+                        WebRtc_Word32 /*bucketSize*/,
+                        WebRtc_Word32 /*peekBandwith*/,
+                        WebRtc_Word32 /*minPolicedSize*/,
+                        WebRtc_Word32 /*maxSduSize*/,
+                        const SocketAddress& /*stRemName*/,
+                        WebRtc_Word32 /*overrideDSCP*/) {return false;}
+
+    bool CleanUp();
+    void HasIncoming();
+    bool WantsIncoming() {return _wantsIncoming;}
+    void ReadyForDeletion();
+private:
+    friend class UdpSocketManagerPosix;
+
+    WebRtc_Word32 _id;
+    IncomingSocketCallback _incomingCb;
+    CallbackObj _obj;
+    WebRtc_Word32 _error;
+
+    SOCKET _socket;
+    UdpSocketManager* _mgr;
+    ConditionVariableWrapper* _closeBlockingCompletedCond;
+    ConditionVariableWrapper* _readyForDeletionCond;
+
+    bool _closeBlockingActive;
+    bool _closeBlockingCompleted;
+    bool _readyForDeletion;
+
+    CriticalSectionWrapper* _cs;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_POSIX_H_
diff --git a/src/modules/udp_transport/source/udp_socket_wrapper.cc b/src/modules/udp_transport/source/udp_socket_wrapper.cc
new file mode 100644
index 0000000..6445e76
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_wrapper.cc
@@ -0,0 +1,147 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_socket_wrapper.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#include "event_wrapper.h"
+#include "trace.h"
+#include "udp_socket_manager_wrapper.h"
+
+#if defined(_WIN32)
+    #include "udp_socket2_windows.h"
+#else
+    #include "udp_socket_posix.h"
+#endif
+
+
+namespace webrtc {
+bool UdpSocketWrapper::_initiated = false;
+
+// Temporary Android hack. The value 1024 is taken from
+// <ndk>/build/platforms/android-1.5/arch-arm/usr/include/linux/posix_types.h
+// TODO (tomasl): can we remove this now?
+#ifndef FD_SETSIZE
+#define FD_SETSIZE 1024
+#endif
+
+UdpSocketWrapper::UdpSocketWrapper()
+    : _wantsIncoming(false),
+      _deleteEvent(NULL)
+{
+}
+
+UdpSocketWrapper::~UdpSocketWrapper()
+{
+    if(_deleteEvent)
+    {
+      _deleteEvent->Set();
+      _deleteEvent = NULL;
+    }
+}
+
+void UdpSocketWrapper::SetEventToNull()
+{
+    if (_deleteEvent)
+    {
+        _deleteEvent = NULL;
+    }
+}
+
+UdpSocketWrapper* UdpSocketWrapper::CreateSocket(const WebRtc_Word32 id,
+                                                 UdpSocketManager* mgr,
+                                                 CallbackObj obj,
+                                                 IncomingSocketCallback cb,
+                                                 bool ipV6Enable,
+                                                 bool disableGQOS)
+
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id,
+                 "UdpSocketWrapper::CreateSocket");
+
+    UdpSocketWrapper* s = 0;
+
+#ifdef _WIN32
+    if (!_initiated)
+    {
+        WSADATA wsaData;
+        WORD wVersionRequested = MAKEWORD( 2, 2 );
+        WebRtc_Word32 err = WSAStartup( wVersionRequested, &wsaData);
+        if (err != 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to initialize sockets\
+ WSAStartup error:%d",
+                err);
+            return NULL;
+        }
+
+        _initiated = true;
+    }
+
+    s = new UdpSocket2Windows(id, mgr, ipV6Enable, disableGQOS);
+
+#else
+    if (!_initiated)
+    {
+        _initiated = true;
+    }
+    s = new UdpSocketPosix(id, mgr, ipV6Enable);
+    if (s)
+    {
+        UdpSocketPosix* sl = static_cast<UdpSocketPosix*>(s);
+        if (sl->GetFd() != INVALID_SOCKET && sl->GetFd() < FD_SETSIZE)
+        {
+            // ok
+        } else
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to initialize socket");
+            delete s;
+            s = NULL;
+        }
+    }
+#endif
+    if (s)
+    {
+        s->_deleteEvent = NULL;
+        if (!s->SetCallback(obj, cb))
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                id,
+                "UdpSocketWrapper::CreateSocket failed to ser callback");
+            return(NULL);
+        }
+    }
+    return s;
+}
+
+bool UdpSocketWrapper::StartReceiving()
+{
+    _wantsIncoming = true;
+    return true;
+}
+
+bool UdpSocketWrapper::StopReceiving()
+{
+    _wantsIncoming = false;
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_socket_wrapper.h b/src/modules/udp_transport/source/udp_socket_wrapper.h
new file mode 100644
index 0000000..e5289cf
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_wrapper.h
@@ -0,0 +1,112 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
+
+#include "udp_transport.h"
+
+namespace webrtc {
+class EventWrapper;
+class UdpSocketManager;
+
+#define SOCKET_ERROR_NO_QOS -1000
+
+#ifndef _WIN32
+typedef int SOCKET;
+#endif
+
+#ifndef INVALID_SOCKET
+#define INVALID_SOCKET  (SOCKET)(~0)
+
+#ifndef AF_INET
+#define AF_INET 2
+#endif
+
+#endif
+
+typedef void* CallbackObj;
+typedef void(*IncomingSocketCallback)(CallbackObj obj, const WebRtc_Word8* buf,
+                                      WebRtc_Word32 len,
+                                      const SocketAddress* from);
+
+class UdpSocketWrapper
+{
+public:
+    static UdpSocketWrapper* CreateSocket(const WebRtc_Word32 id,
+                                          UdpSocketManager* mgr,
+                                          CallbackObj obj,
+                                          IncomingSocketCallback cb,
+                                          bool ipV6Enable = false,
+                                          bool disableGQOS = false);
+
+    // Set the unique identifier of this class to id.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    // Register cb for receiving callbacks when there are incoming packets.
+    // Register obj so that it will be passed in calls to cb.
+    virtual bool SetCallback(CallbackObj obj, IncomingSocketCallback cb) = 0;
+
+    // Socket to local address specified by name.
+    virtual bool Bind(const SocketAddress& name) = 0;
+
+    // Start receiving UDP data.
+    virtual bool StartReceiving();
+    virtual inline bool StartReceiving(const WebRtc_UWord32 /*receiveBuffers*/)
+    {return StartReceiving();}
+    // Stop receiving UDP data.
+    virtual bool StopReceiving();
+
+    virtual bool ValidHandle() = 0;
+
+    // Set socket options.
+    virtual bool SetSockopt(WebRtc_Word32 level, WebRtc_Word32 optname,
+                            const WebRtc_Word8* optval,
+                            WebRtc_Word32 optlen) = 0;
+
+    // Set TOS for outgoing packets.
+    virtual WebRtc_Word32 SetTOS(const WebRtc_Word32 serviceType) = 0;
+
+    // Set 802.1Q PCP field (802.1p) for outgoing VLAN traffic.
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 /*pcp*/) {return -1;}
+
+    // Send buf of length len to the address specified by to.
+    virtual WebRtc_Word32 SendTo(const WebRtc_Word8* buf, WebRtc_Word32 len,
+                                 const SocketAddress& to) = 0;
+
+    virtual void SetEventToNull();
+
+    // Close socket and don't return until completed.
+    virtual void CloseBlocking() {}
+
+    // tokenRate is in bit/s. peakBandwidt is in byte/s
+    virtual bool SetQos(WebRtc_Word32 serviceType, WebRtc_Word32 tokenRate,
+                        WebRtc_Word32 bucketSize, WebRtc_Word32 peekBandwith,
+                        WebRtc_Word32 minPolicedSize, WebRtc_Word32 maxSduSize,
+                        const SocketAddress &stRemName,
+                        WebRtc_Word32 overrideDSCP = 0) = 0;
+
+    virtual WebRtc_UWord32 ReceiveBuffers() {return 0;};
+
+protected:
+    // Creating the socket is done via CreateSocket().
+    UdpSocketWrapper();
+    // Destroying the socket is done via CloseBlocking().
+    virtual ~UdpSocketWrapper();
+
+    bool _wantsIncoming;
+    EventWrapper*  _deleteEvent;
+
+private:
+    static bool _initiated;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_SOCKET_WRAPPER_H_
diff --git a/src/modules/udp_transport/source/udp_socket_wrapper_unittest.cc b/src/modules/udp_transport/source/udp_socket_wrapper_unittest.cc
new file mode 100644
index 0000000..32d9a64
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_socket_wrapper_unittest.cc
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Tests for the UdpSocketWrapper interface.
+// This will test the UdpSocket implementations on various platforms.
+// Note that this test is using a real SocketManager, which starts up
+// an extra worker thread, making the testing more complex than it
+// should be.
+// This is because on Posix, the CloseBlocking function waits for the
+// ReadyForDeletion function to be called, which has to be called after
+// CloseBlocking, and thus has to be called from another thread.
+// The manager is the one actually doing the deleting.
+// This is done differently in the Winsock2 code, but that code
+// will also hang if the destructor is called directly.
+
+#include "gtest/gtest.h"
+#include "gmock/gmock.h"
+#include "modules/udp_transport/source/udp_socket_wrapper.h"
+#include "modules/udp_transport/source/udp_socket_manager_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+using ::testing::_;
+using ::testing::Return;
+
+namespace webrtc {
+
+const int kLogTrace = 0;
+
+class TestTraceCallback: public TraceCallback {
+ public:
+  void Print(const TraceLevel level,
+             const char *traceString,
+             const int length) {
+    if (traceString) {
+      char* tmp = new char[length+1];
+      memcpy(tmp, traceString, length);
+      tmp[length] = '\0';
+      printf("%s\n", tmp);
+      fflush(stdout);
+      delete[] tmp;
+    }
+  }
+};
+
+class MockSocketManager : public UdpSocketManager {
+ public:
+  MockSocketManager() {}
+  // Access to protected destructor.
+  void Destroy() {
+    delete this;
+  }
+  MOCK_METHOD2(Init, bool(WebRtc_Word32, WebRtc_UWord8&));
+  MOCK_METHOD1(ChangeUniqueId, WebRtc_Word32(const WebRtc_Word32));
+  MOCK_METHOD0(Start, bool());
+  MOCK_METHOD0(Stop, bool());
+  MOCK_METHOD1(AddSocket, bool(webrtc::UdpSocketWrapper*));
+  MOCK_METHOD1(RemoveSocket, bool(webrtc::UdpSocketWrapper*));
+};
+
+// Creates a socket using the static constructor method and verifies that
+// it's added to the socket manager.
+TEST(UdpSocketWrapper, CreateSocket) {
+  TestTraceCallback trace;
+  if (kLogTrace) {
+    Trace::CreateTrace();
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    Trace::SetTraceCallback(&trace);
+  }
+
+  WebRtc_Word32 id = 42;
+  // We can't test deletion of sockets without a socket manager.
+  WebRtc_UWord8 threads = 1;
+  UdpSocketManager* mgr = UdpSocketManager::Create(id, threads);
+  WEBRTC_TRACE(kTraceMemory, kTraceTransport, 42,
+               "Test trace call");
+
+  UdpSocketWrapper* socket
+       = UdpSocketWrapper::CreateSocket(id,
+                                        mgr,
+                                        NULL,  // CallbackObj
+                                        NULL,  // IncomingSocketCallback
+                                        false,  // ipV6Enable
+                                        false);  // disableGQOS
+  socket->CloseBlocking();
+  UdpSocketManager::Return();
+  if (kLogTrace) {
+    Trace::ReturnTrace();
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_transport.gypi b/src/modules/udp_transport/source/udp_transport.gypi
new file mode 100644
index 0000000..58c208a
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_transport.gypi
@@ -0,0 +1,110 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'udp_transport',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+        ],
+      },
+      'sources': [
+        # PLATFORM INDEPENDENT SOURCE FILES
+        '../interface/udp_transport.h',
+        'udp_transport_impl.cc',
+        'udp_socket_wrapper.cc',
+        'udp_socket_manager_wrapper.cc',
+        'udp_transport_impl.h',
+        'udp_socket_wrapper.h',
+        'udp_socket_manager_wrapper.h',
+        # PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
+        # Posix (Linux/Mac)
+        'udp_socket_posix.cc',
+        'udp_socket_posix.h',
+        'udp_socket_manager_posix.cc',
+        'udp_socket_manager_posix.h',
+        # Windows
+        'udp_socket2_manager_windows.cc',
+        'udp_socket2_manager_windows.h',
+        'udp_socket2_windows.cc',
+        'udp_socket2_windows.h',
+        'traffic_control_windows.cc',
+        'traffic_control_windows.h',
+      ], # source
+      'conditions': [
+        # DEFINE PLATFORM SPECIFIC SOURCE FILES
+        ['os_posix==0', {
+          'sources!': [
+            'udp_socket_posix.cc',
+            'udp_socket_posix.h',
+            'udp_socket_manager_posix.cc',
+            'udp_socket_manager_posix.h',
+          ],
+        }],
+        ['OS!="win"', {
+          'sources!': [
+            'udp_socket2_manager_windows.cc',
+            'udp_socket2_manager_windows.h',
+            'udp_socket2_windows.cc',
+            'udp_socket2_windows.h',
+            'traffic_control_windows.cc',
+            'traffic_control_windows.h',
+          ],
+        }],
+        ['OS=="linux"', {
+          'cflags': [
+            '-fno-strict-aliasing',
+          ],
+        }],
+        ['OS=="mac"', {
+          'xcode_settings': {
+            'OTHER_CPLUSPLUSFLAGS': [ '-fno-strict-aliasing' ],
+          },
+        }],
+      ] # conditions
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'udp_transport_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'udp_transport',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(DEPTH)/testing/gmock.gyp:gmock',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'udp_transport_unittest.cc',
+            'udp_socket_manager_unittest.cc',
+            'udp_socket_wrapper_unittest.cc',
+          ],
+        }, # udp_transport_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/udp_transport/source/udp_transport_impl.cc b/src/modules/udp_transport/source/udp_transport_impl.cc
new file mode 100644
index 0000000..e50db5d
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_transport_impl.cc
@@ -0,0 +1,3035 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "udp_transport_impl.h"
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#if defined(_WIN32)
+#include <winsock2.h>
+#include <ws2tcpip.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <arpa/inet.h>
+#include <ctype.h>
+#include <fcntl.h>
+#include <netdb.h>
+#include <net/if.h>
+#include <netinet/in.h>
+#include <stdlib.h>
+#include <sys/ioctl.h>
+#include <sys/socket.h>
+#include <sys/time.h>
+#include <unistd.h>
+#ifndef MAC_IPHONE
+#include <net/if_arp.h>
+#endif
+#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+#if defined(WEBRTC_MAC)
+#include <ifaddrs.h>
+#include <machine/types.h>
+#endif
+#if defined(WEBRTC_LINUX)
+#include <linux/netlink.h>
+#include <linux/rtnetlink.h>
+#endif
+
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "trace.h"
+#include "typedefs.h"
+#include "udp_socket_manager_wrapper.h"
+
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#define GetLastError() errno
+
+#define IFRSIZE ((int)(size * sizeof (struct ifreq)))
+
+#define NLMSG_OK_NO_WARNING(nlh,len)                                    \
+  ((len) >= (int)sizeof(struct nlmsghdr) &&                             \
+   (int)(nlh)->nlmsg_len >= (int)sizeof(struct nlmsghdr) &&             \
+   (int)(nlh)->nlmsg_len <= (len))
+
+#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+
+namespace webrtc {
+
+class SocketFactory : public UdpTransportImpl::SocketFactoryInterface {
+ public:
+  UdpSocketWrapper* CreateSocket(const WebRtc_Word32 id,
+                                 UdpSocketManager* mgr,
+                                 CallbackObj obj,
+                                 IncomingSocketCallback cb,
+                                 bool ipV6Enable,
+                                 bool disableGQOS) {
+    return UdpSocketWrapper::CreateSocket(id, mgr, obj, cb, ipV6Enable,
+                                          disableGQOS);
+  }
+};
+
+// Creates an UdpTransport using the definition of SocketFactory above,
+// and passes (creating if needed) a pointer to the static singleton
+// UdpSocketManager.
+UdpTransport* UdpTransport::Create(const WebRtc_Word32 id,
+                                   WebRtc_UWord8& numSocketThreads)
+{
+  return new UdpTransportImpl(id,
+                              new SocketFactory(),
+                              UdpSocketManager::Create(id, numSocketThreads));
+}
+
+// Deletes the UdpTransport and decrements the refcount of the
+// static singleton UdpSocketManager, possibly destroying it.
+// Should only be used on UdpTransports that are created using Create.
+void UdpTransport::Destroy(UdpTransport* module)
+{
+    if(module)
+    {
+        delete module;
+        UdpSocketManager::Return();
+    }
+}
+
+UdpTransportImpl::UdpTransportImpl(const WebRtc_Word32 id,
+                                   SocketFactoryInterface* maker,
+                                   UdpSocketManager* socket_manager)
+    : _id(id),
+      _socket_creator(maker),
+      _crit(CriticalSectionWrapper::CreateCriticalSection()),
+      _critFilter(CriticalSectionWrapper::CreateCriticalSection()),
+      _critPacketCallback(CriticalSectionWrapper::CreateCriticalSection()),
+      _mgr(socket_manager),
+      _lastError(kNoSocketError),
+      _destPort(0),
+      _destPortRTCP(0),
+      _localPort(0),
+      _localPortRTCP(0),
+      _srcPort(0),
+      _srcPortRTCP(0),
+      _fromPort(0),
+      _fromPortRTCP(0),
+      _fromIP(),
+      _destIP(),
+      _localIP(),
+      _localMulticastIP(),
+      _ptrRtpSocket(NULL),
+      _ptrRtcpSocket(NULL),
+      _ptrSendRtpSocket(NULL),
+      _ptrSendRtcpSocket(NULL),
+      _remoteRTPAddr(),
+      _remoteRTCPAddr(),
+      _localRTPAddr(),
+      _localRTCPAddr(),
+      _tos(0),
+      _receiving(false),
+      _useSetSockOpt(false),
+      _qos(false),
+      _pcp(0),
+      _ipV6Enabled(false),
+      _serviceType(0),
+      _overrideDSCP(0),
+      _maxBitrate(0),
+      _cachLock(RWLockWrapper::CreateRWLock()),
+      _previousAddress(),
+      _previousIP(),
+      _previousIPSize(0),
+      _previousSourcePort(0),
+      _filterIPAddress(),
+      _rtpFilterPort(0),
+      _rtcpFilterPort(0),
+      _packetCallback(0)
+{
+    memset(&_remoteRTPAddr, 0, sizeof(_remoteRTPAddr));
+    memset(&_remoteRTCPAddr, 0, sizeof(_remoteRTCPAddr));
+    memset(&_localRTPAddr, 0, sizeof(_localRTPAddr));
+    memset(&_localRTCPAddr, 0, sizeof(_localRTCPAddr));
+
+    memset(_fromIP, 0, sizeof(_fromIP));
+    memset(_destIP, 0, sizeof(_destIP));
+    memset(_localIP, 0, sizeof(_localIP));
+    memset(_localMulticastIP, 0, sizeof(_localMulticastIP));
+
+    memset(&_filterIPAddress, 0, sizeof(_filterIPAddress));
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, id, "%s created", __FUNCTION__);
+}
+
+UdpTransportImpl::~UdpTransportImpl()
+{
+    CloseSendSockets();
+    CloseReceiveSockets();
+    delete _crit;
+    delete _critFilter;
+    delete _critPacketCallback;
+    delete _cachLock;
+    delete _socket_creator;
+
+    WEBRTC_TRACE(kTraceMemory, kTraceTransport, _id, "%s deleted",
+                 __FUNCTION__);
+}
+
+WebRtc_Word32 UdpTransportImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(_crit);
+    _id = id;
+    if(_mgr)
+    {
+        _mgr->ChangeUniqueId(id);
+    }
+    if(_ptrRtpSocket)
+    {
+        _ptrRtpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrRtcpSocket)
+    {
+        _ptrRtcpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrSendRtpSocket)
+    {
+        _ptrSendRtpSocket->ChangeUniqueId(id);
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        _ptrSendRtcpSocket->ChangeUniqueId(id);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::TimeUntilNextProcess()
+{
+    return 100;
+}
+
+WebRtc_Word32 UdpTransportImpl::Process()
+{
+    return 0;
+}
+
+UdpTransport::ErrorCode UdpTransportImpl::LastError() const
+{
+    return _lastError;
+}
+
+bool SameAddress(const SocketAddress& address1, const SocketAddress& address2)
+{
+    return (memcmp(&address1,&address2,sizeof(address1)) == 0);
+}
+
+void UdpTransportImpl::GetCachedAddress(char* ip,
+                                        WebRtc_UWord32& ipSize,
+                                        WebRtc_UWord16& sourcePort)
+{
+    const WebRtc_UWord32 originalIPSize = ipSize;
+    // If the incoming string is too small, fill it as much as there is room
+    // for. Make sure that there is room for the '\0' character.
+    ipSize = (ipSize - 1 < _previousIPSize) ? ipSize - 1 : _previousIPSize;
+    memcpy(ip,_previousIP,sizeof(WebRtc_Word8)*(ipSize + 1));
+    ip[originalIPSize - 1] = '\0';
+    sourcePort = _previousSourcePort;
+}
+
+WebRtc_Word32 UdpTransportImpl::IPAddressCached(const SocketAddress& address,
+                                                char* ip,
+                                                WebRtc_UWord32& ipSize,
+                                                WebRtc_UWord16& sourcePort)
+{
+    {
+        ReadLockScoped rl(*_cachLock);
+        // Check if the old address can be re-used (is the same).
+        if(SameAddress(address,_previousAddress))
+        {
+            GetCachedAddress(ip,ipSize,sourcePort);
+            return 0;
+        }
+    }
+    // Get the new address and store it.
+    WriteLockScoped wl(*_cachLock);
+    ipSize = kIpAddressVersion6Length;
+    if(IPAddress(address,_previousIP,ipSize,_previousSourcePort) != 0)
+    {
+        return -1;
+    }
+    _previousIPSize = ipSize;
+    memcpy(&_previousAddress, &address, sizeof(address));
+    // Address has been cached at this point.
+    GetCachedAddress(ip,ipSize,sourcePort);
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeReceiveSockets(
+    UdpTransportData* const packetCallback,
+    const WebRtc_UWord16 portnr,
+    const char* ip,
+    const char* multicastIpAddr,
+    const WebRtc_UWord16 rtcpPort)
+{
+
+    {
+        CriticalSectionScoped cs(_critPacketCallback);
+        _packetCallback = packetCallback;
+
+        if(packetCallback == NULL)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                         "Closing down receive sockets");
+            return 0;
+        }
+    }
+
+    CriticalSectionScoped cs(_crit);
+    CloseReceiveSockets();
+
+    if(portnr == 0)
+    {
+        // TODO (hellner): why not just fail here?
+        if(_destPort == 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets port 0 not allowed");
+            _lastError = kPortInvalid;
+            return -1;
+        }
+        _localPort = _destPort;
+    } else {
+        _localPort = portnr;
+    }
+    if(rtcpPort)
+    {
+        _localPortRTCP = rtcpPort;
+    }else {
+        _localPortRTCP = _localPort + 1;
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "InitializeReceiveSockets RTCP port not configured using RTP\
+ port+1=%d",
+            _localPortRTCP);
+    }
+
+    if(ip)
+    {
+        if(IsIpAddressValid(ip,IpV6Enabled()))
+        {
+            strncpy(_localIP, ip,kIpAddressVersion6Length);
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets invalid IP address");
+            _lastError = kIpAddressInvalid;
+            return -1;
+        }
+    }else
+    {
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+    }
+    if(multicastIpAddr && !IpV6Enabled())
+    {
+        if(IsIpAddressValid(multicastIpAddr,IpV6Enabled()))
+        {
+            strncpy(_localMulticastIP, multicastIpAddr,
+                    kIpAddressVersion6Length);
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "InitializeReceiveSockets invalid IP address");
+            _lastError =  kIpAddressInvalid;
+            return -1;
+        }
+    }
+    if(_mgr == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets no socket manager");
+        return -1;
+    }
+
+    _useSetSockOpt=false;
+    _tos=0;
+    _pcp=0;
+
+    _ptrRtpSocket = _socket_creator->CreateSocket(_id, _mgr, this,
+                                    IncomingRTPCallback,
+                                    IpV6Enabled(), false);
+
+    _ptrRtcpSocket = _socket_creator->CreateSocket(_id, _mgr, this,
+                                     IncomingRTCPCallback,
+                                     IpV6Enabled(), false);
+
+    ErrorCode retVal = BindLocalRTPSocket();
+    if(retVal != kNoSocketError)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets faild to bind RTP socket");
+        _lastError = retVal;
+        CloseReceiveSockets();
+        return -1;
+    }
+    retVal = BindLocalRTCPSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeReceiveSockets faild to bind RTCP socket");
+        CloseReceiveSockets();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::ReceiveSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort,
+    char multicastIpAddr[kIpAddressVersion6Length]) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _localPort;
+    rtcpPort = _localPortRTCP;
+    if (ipAddr)
+    {
+        strncpy(ipAddr, _localIP, IpV6Enabled() ?
+                UdpTransport::kIpAddressVersion6Length :
+                UdpTransport::kIpAddressVersion4Length);
+    }
+    if (multicastIpAddr)
+    {
+        strncpy(multicastIpAddr, _localMulticastIP, IpV6Enabled() ?
+                UdpTransport::kIpAddressVersion6Length :
+                UdpTransport::kIpAddressVersion4Length);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _destPort;
+    rtcpPort = _destPortRTCP;
+    strncpy(ipAddr, _destIP, IpV6Enabled() ?
+            UdpTransport::kIpAddressVersion6Length :
+            UdpTransport::kIpAddressVersion4Length);
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::RemoteSocketInformation(
+    char ipAddr[kIpAddressVersion6Length],
+    WebRtc_UWord16& rtpPort,
+    WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+    rtpPort = _fromPort;
+    rtcpPort = _fromPortRTCP;
+    if(ipAddr)
+    {
+        strncpy(ipAddr, _fromIP, IpV6Enabled() ?
+                kIpAddressVersion6Length :
+                kIpAddressVersion4Length);
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::FilterPorts(
+    WebRtc_UWord16& rtpFilterPort,
+    WebRtc_UWord16& rtcpFilterPort) const
+{
+    CriticalSectionScoped cs(_critFilter);
+    rtpFilterPort = _rtpFilterPort;
+    rtcpFilterPort = _rtcpFilterPort;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetQoS(bool QoS, WebRtc_Word32 serviceType,
+                                       WebRtc_UWord32 maxBitrate,
+                                       WebRtc_Word32 overrideDSCP, bool audio)
+{
+    if(QoS)
+    {
+        return EnableQoS(serviceType, audio, maxBitrate, overrideDSCP);
+    }else
+    {
+        return DisableQoS();
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::EnableQoS(WebRtc_Word32 serviceType,
+                                          bool audio, WebRtc_UWord32 maxBitrate,
+                                          WebRtc_Word32 overrideDSCP)
+{
+    if (_ipV6Enabled)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but will be ignored since IPv6 is enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (_tos)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "TOS already enabled, can't use TOS and QoS at the same time");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (_pcp)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "PCP already enabled, can't use PCP and QoS at the same time");
+        _lastError = kQosError;
+        return -1;
+    }
+    if(_destPort == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet configured\
+ the send destination");
+        return -1;
+    }
+    if(_qos)
+    {
+        if(_overrideDSCP == 0 && overrideDSCP != 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "QOS is already enabled and overrideDSCP differs, not allowed");
+            return -1;
+        }
+    }
+    CriticalSectionScoped cs(_crit);
+
+    UdpSocketWrapper* rtpSock = _ptrSendRtpSocket ?
+        _ptrSendRtpSocket :
+        _ptrRtpSocket;
+    if (!rtpSock || !rtpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTP socket");
+        return -1;
+    }
+    UdpSocketWrapper* rtcpSock = _ptrSendRtcpSocket ?
+        _ptrSendRtcpSocket :
+        _ptrRtcpSocket;
+    if (!rtcpSock || !rtcpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTCP socket");
+        return -1;
+    }
+
+    // Minimum packet size in bytes for which the requested quality of service
+    // will be provided. The smallest RTP header is 12 byte.
+    const WebRtc_Word32 min_policed_size = 12;
+    // Max SDU, maximum packet size permitted or used in the traffic flow, in
+    // bytes.
+    const WebRtc_Word32 max_sdu_size = 1500;
+
+    // Enable QoS for RTP sockets.
+    if(maxBitrate)
+    {
+        // Note: 1 kbit is 125 bytes.
+        // Token Rate is typically set to the average bit rate from peak to
+        // peak.
+        // Bucket size is normally set to the largest average frame size.
+        if(audio)
+        {
+            WEBRTC_TRACE(kTraceStateInfo,
+                         kTraceTransport,
+                         _id,
+                         "Enable QOS for audio with max bitrate:%d",
+                         maxBitrate);
+
+            const WebRtc_Word32 token_rate = maxBitrate*125;
+            // The largest audio packets are 60ms frames. This is a fraction
+            // more than 16 packets/second. These 16 frames are sent, at max,
+            // at a bitrate of maxBitrate*125 -> 1 frame is maxBitrate*125/16 ~
+            // maxBitrate * 8.
+            const WebRtc_Word32 bucket_size = maxBitrate * 8;
+            const WebRtc_Word32 peek_bandwith =  maxBitrate * 125;
+            if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                                 peek_bandwith, min_policed_size,
+                                 max_sdu_size, _remoteRTPAddr, overrideDSCP))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "QOS failed on the RTP socket");
+                _lastError = kQosError;
+                return -1;
+            }
+        }else
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                         "Enable QOS for video with max bitrate:%d",
+                         maxBitrate);
+
+            // Allow for a token rate that is twice that of the maximum bitrate
+            // (in bytes).
+            const WebRtc_Word32 token_rate = maxBitrate*250;
+            // largest average frame size (key frame size). Assuming that a
+            // keyframe is 25% of the bitrate during the second its sent
+            // Assume that a key frame is 25% of the bitrate the second that it
+            // is sent. The largest frame size is then maxBitrate* 125 * 0.25 ~
+            // 31.
+            const WebRtc_Word32 bucket_size = maxBitrate*31;
+            const WebRtc_Word32 peek_bandwith = maxBitrate*125;
+            if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                                peek_bandwith, min_policed_size, max_sdu_size,
+                                _remoteRTPAddr, overrideDSCP))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "QOS failed on the RTP socket");
+                _lastError = kQosError;
+                return -1;
+            }
+        }
+    } else if(audio)
+    {
+        // No max bitrate set. Audio.
+        WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                     "Enable QOS for audio with default max bitrate");
+
+        // Let max bitrate be 240kbit/s.
+        const WebRtc_Word32 token_rate = 30000;
+        const WebRtc_Word32 bucket_size = 2000;
+        const WebRtc_Word32 peek_bandwith = 30000;
+        if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                             peek_bandwith, min_policed_size, max_sdu_size,
+                             _remoteRTPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "QOS failed on the RTP socket");
+            _lastError = kQosError;
+            return -1;
+        }
+    }else
+    {
+        // No max bitrate set. Video.
+        WEBRTC_TRACE(kTraceStateInfo, kTraceTransport, _id,
+                     "Enable QOS for video with default max bitrate");
+
+        // Let max bitrate be 10mbit/s.
+        const WebRtc_Word32 token_rate = 128000*10;
+        const WebRtc_Word32 bucket_size = 32000;
+        const WebRtc_Word32 peek_bandwith = 256000;
+        if (!rtpSock->SetQos(serviceType, token_rate, bucket_size,
+                             peek_bandwith, min_policed_size, max_sdu_size,
+                             _remoteRTPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "QOS failed on the RTP socket");
+            _lastError = kQosError;
+            return -1;
+        }
+    }
+
+    // Enable QoS for RTCP sockets.
+    // TODO (hellner): shouldn't RTCP be based on 5% of the maximum bandwidth?
+    if(audio)
+    {
+        const WebRtc_Word32 token_rate = 200;
+        const WebRtc_Word32 bucket_size = 200;
+        const WebRtc_Word32 peek_bandwith = 400;
+        if (!rtcpSock->SetQos(serviceType, token_rate, bucket_size,
+                              peek_bandwith, min_policed_size, max_sdu_size,
+                              _remoteRTCPAddr, overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "QOS failed on the RTCP socket");
+            _lastError = kQosError;
+        }
+    }else
+    {
+        const WebRtc_Word32 token_rate = 5000;
+        const WebRtc_Word32 bucket_size = 100;
+        const WebRtc_Word32 peek_bandwith = 10000;
+        if (!rtcpSock->SetQos(serviceType, token_rate, bucket_size,
+                              peek_bandwith, min_policed_size, max_sdu_size,
+                            _remoteRTCPAddr, _overrideDSCP))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "QOS failed on the RTCP socket");
+            _lastError = kQosError;
+        }
+    }
+    _qos = true;
+    _serviceType = serviceType;
+    _maxBitrate = maxBitrate;
+    _overrideDSCP = overrideDSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::DisableQoS()
+{
+    if(_qos == false)
+    {
+        return 0;
+    }
+    CriticalSectionScoped cs(_crit);
+
+    UdpSocketWrapper* rtpSock = (_ptrSendRtpSocket ?
+                                 _ptrSendRtpSocket : _ptrRtpSocket);
+    if (!rtpSock || !rtpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTP socket");
+        return -1;
+    }
+    UdpSocketWrapper* rtcpSock = (_ptrSendRtcpSocket ?
+                                  _ptrSendRtcpSocket : _ptrRtcpSocket);
+    if (!rtcpSock || !rtcpSock->ValidHandle())
+    {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceTransport,
+            _id,
+            "QOS is enabled but not started since we have not yet created the\
+ RTCP socket");
+        return -1;
+    }
+
+    const WebRtc_Word32 service_type = 0;   // = SERVICETYPE_NOTRAFFIC
+    const WebRtc_Word32 not_specified = -1;
+    if (!rtpSock->SetQos(service_type, not_specified, not_specified,
+                         not_specified, not_specified, not_specified,
+                         _remoteRTPAddr, _overrideDSCP))
+    {
+        _lastError = kQosError;
+        return -1;
+    }
+    if (!rtcpSock->SetQos(service_type, not_specified, not_specified,
+                         not_specified, not_specified, not_specified,
+                         _remoteRTCPAddr,_overrideDSCP))
+    {
+        _lastError = kQosError;
+    }
+    _qos = false;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::QoS(bool& QoS, WebRtc_Word32& serviceType,
+                                    WebRtc_Word32& overrideDSCP) const
+{
+    CriticalSectionScoped cs(_crit);
+    QoS = _qos;
+    serviceType = _serviceType;
+    overrideDSCP = _overrideDSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetToS(WebRtc_Word32 DSCP, bool useSetSockOpt)
+{
+    if (_qos)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "QoS already enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if (DSCP < 0 || DSCP > 63)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Invalid DSCP");
+        _lastError = kTosInvalid;
+        return -1;
+    }
+    if(_tos)
+    {
+        if(useSetSockOpt != _useSetSockOpt)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "Can't switch SetSockOpt method without disabling TOS first");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+    }
+    CriticalSectionScoped cs(_crit);
+    UdpSocketWrapper* rtpSock = NULL;
+    UdpSocketWrapper* rtcpSock = NULL;
+    if(_ptrSendRtpSocket)
+    {
+        rtpSock = _ptrSendRtpSocket;
+    }else
+    {
+        rtpSock = _ptrRtpSocket;
+    }
+    if (rtpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        rtcpSock = _ptrSendRtcpSocket;
+    }else
+    {
+        rtcpSock = _ptrRtcpSocket;
+    }
+    if (rtcpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtcpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+
+    if (useSetSockOpt)
+    {
+#ifdef _WIN32
+        OSVERSIONINFO OsVersion;
+        OsVersion.dwOSVersionInfoSize = sizeof(OSVERSIONINFO);
+        GetVersionEx(&OsVersion);
+        // Disable QoS before setting ToS on Windows XP. This is done by closing
+        // and re-opening the sockets.
+        // TODO (hellner): why not just fail here and force the user to
+        //                 re-initialize sockets? Doing this may trick the user
+        //                 into thinking that the sockets are in a state which
+        //                 they aren't.
+        if (OsVersion.dwMajorVersion == 5 &&
+            OsVersion.dwMinorVersion == 1)
+        {
+            if(!_useSetSockOpt)
+            {
+                if(_ptrSendRtpSocket)
+                {
+                    CloseSendSockets();
+                    _ptrSendRtpSocket =
+                        _socket_creator->CreateSocket(_id, _mgr, NULL,
+                                        NULL, IpV6Enabled(),
+                                        true);
+                    _ptrSendRtcpSocket =
+                        _socket_creator->CreateSocket(_id, _mgr, NULL,
+                                        NULL, IpV6Enabled(),
+                                        true);
+                    rtpSock=_ptrSendRtpSocket;
+                    rtcpSock=_ptrSendRtcpSocket;
+                    ErrorCode retVal = BindRTPSendSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    retVal = BindRTCPSendSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                }
+                else
+                {
+                    bool receiving=_receiving;
+                    WebRtc_UWord32 noOfReceiveBuffers = 0;
+                    if(receiving)
+                    {
+                        noOfReceiveBuffers=_ptrRtpSocket->ReceiveBuffers();
+                        if(StopReceiving()!=0)
+                        {
+                            return -1;
+                        }
+                    }
+                    CloseReceiveSockets();
+                    _ptrRtpSocket = _socket_creator->CreateSocket(
+                        _id, _mgr, this, IncomingRTPCallback, IpV6Enabled(),
+                        true);
+                    _ptrRtcpSocket = _socket_creator->CreateSocket(
+                        _id, _mgr, this, IncomingRTCPCallback, IpV6Enabled(),
+                        true);
+                    rtpSock=_ptrRtpSocket;
+                    rtcpSock=_ptrRtcpSocket;
+                    ErrorCode retVal = BindLocalRTPSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    retVal = BindLocalRTCPSocket();
+                    if(retVal != kNoSocketError)
+                    {
+                        _lastError = retVal;
+                        return -1;
+                    }
+                    if(receiving)
+                    {
+                        if(StartReceiving(noOfReceiveBuffers) !=
+                           kNoSocketError)
+                        {
+                            return -1;
+                        }
+                    }
+                }
+            }
+        }
+#endif // #ifdef _WIN32
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "Setting TOS using SetSockopt");
+        WebRtc_Word32 TOSShifted = DSCP << 2;
+        if (!rtpSock->SetSockopt(IPPROTO_IP, IP_TOS,
+                                 (WebRtc_Word8*) &TOSShifted, 4))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not SetSockopt tos value on RTP socket");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+        if (!rtcpSock->SetSockopt(IPPROTO_IP, IP_TOS,
+                                  (WebRtc_Word8*) &TOSShifted, 4))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not sSetSockopt tos value on RTCP socket");
+            _lastError = kTosInvalid;
+            return -1;
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id,
+                     "Setting TOS NOT using SetSockopt");
+        if (rtpSock->SetTOS(DSCP) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not set tos value on RTP socket");
+            _lastError = kTosError;
+            return -1;
+        }
+        if (rtcpSock->SetTOS(DSCP) != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Could not set tos value on RTCP socket");
+            _lastError = kTosError;
+            return -1;
+        }
+    }
+    _useSetSockOpt = useSetSockOpt;
+    _tos = DSCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::ToS(WebRtc_Word32& DSCP,
+                                    bool& useSetSockOpt) const
+{
+    CriticalSectionScoped cs(_crit);
+    DSCP = _tos;
+    useSetSockOpt = _useSetSockOpt;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetPCP(WebRtc_Word32 PCP)
+{
+
+    if (_qos)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "QoS already enabled");
+        _lastError = kQosError;
+        return -1;
+    }
+    if ((PCP < 0) || (PCP > 7))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Invalid PCP");
+        _lastError = kPcpError;
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_crit);
+    UdpSocketWrapper* rtpSock = NULL;
+    UdpSocketWrapper* rtcpSock = NULL;
+    if(_ptrSendRtpSocket)
+    {
+        rtpSock = _ptrSendRtpSocket;
+    }else
+    {
+        rtpSock = _ptrRtpSocket;
+    }
+    if (rtpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        rtcpSock = _ptrSendRtcpSocket;
+    }else
+    {
+        rtcpSock = _ptrRtcpSocket;
+    }
+    if (rtcpSock == NULL)
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+    if(!rtcpSock->ValidHandle())
+    {
+        _lastError = kSocketInvalid;
+        return -1;
+    }
+
+#if defined(_WIN32)
+    if (rtpSock->SetPCP(PCP) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not set PCP value on RTP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+    if (rtcpSock->SetPCP(PCP) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not set PCP value on RTCP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+
+#elif defined(WEBRTC_LINUX)
+    if (!rtpSock->SetSockopt(SOL_SOCKET, SO_PRIORITY, (WebRtc_Word8*) &PCP,
+                             sizeof(PCP)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not SetSockopt PCP value on RTP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+    if (!rtcpSock->SetSockopt(SOL_SOCKET, SO_PRIORITY, (WebRtc_Word8*) &PCP,
+                              sizeof(PCP)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Could not SetSockopt PCP value on RTCP socket");
+        _lastError = kPcpError;
+        return -1;
+    }
+#else
+    // Not supported on other platforms (WEBRTC_MAC)
+    _lastError = kPcpError;
+    return -1;
+#endif
+    _pcp = PCP;
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::PCP(WebRtc_Word32& PCP) const
+{
+    CriticalSectionScoped cs(_crit);
+    PCP = _pcp;
+    return 0;
+}
+
+bool UdpTransportImpl::SetSockOptUsed()
+{
+    return _useSetSockOpt;
+}
+
+WebRtc_Word32 UdpTransportImpl::EnableIpV6() {
+
+  CriticalSectionScoped cs(_crit);
+  const bool initialized = (_ptrSendRtpSocket || _ptrRtpSocket);
+
+  if (_ipV6Enabled) {
+    return 0;
+  }
+  if (initialized) {
+    _lastError = kIpVersion6Error;
+    return -1;
+  }
+  _ipV6Enabled = true;
+  return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::FilterIP(
+    char filterIPAddress[kIpAddressVersion6Length]) const
+{
+
+    if(filterIPAddress == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "FilterIP: Invalid argument");
+        return -1;
+    }
+    if(_filterIPAddress._sockaddr_storage.sin_family == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "No Filter configured");
+        return -1;
+    }
+    CriticalSectionScoped cs(_critFilter);
+    WebRtc_UWord32 ipSize = kIpAddressVersion6Length;
+    WebRtc_UWord16 sourcePort;
+    return IPAddress(_filterIPAddress, filterIPAddress, ipSize, sourcePort);
+}
+
+WebRtc_Word32 UdpTransportImpl::SetFilterIP(
+    const char filterIPAddress[kIpAddressVersion6Length])
+{
+    if(filterIPAddress == NULL)
+    {
+        memset(&_filterIPAddress, 0, sizeof(_filterIPAddress));
+        WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id, "Filter IP reset");
+        return 0;
+    }
+    CriticalSectionScoped cs(_critFilter);
+    if (_ipV6Enabled)
+    {
+        _filterIPAddress._sockaddr_storage.sin_family = AF_INET6;
+
+        if (InetPresentationToNumeric(
+                AF_INET6,
+                filterIPAddress,
+                &_filterIPAddress._sockaddr_in6.sin6_addr) < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id, "Failed to set\
+ filter IP for IPv6");
+            _lastError = FILTER_ERROR;
+            return -1;
+        }
+    }
+    else
+    {
+        _filterIPAddress._sockaddr_storage.sin_family = AF_INET;
+
+        if(InetPresentationToNumeric(
+               AF_INET,
+               filterIPAddress,
+               &_filterIPAddress._sockaddr_in.sin_addr) < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to set filter IP for IPv4");
+            _lastError = FILTER_ERROR;
+            return -1;
+        }
+    }
+    WEBRTC_TRACE(kTraceDebug, kTraceTransport, _id, "Filter IP set");
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetFilterPorts(WebRtc_UWord16 rtpFilterPort,
+                                               WebRtc_UWord16 rtcpFilterPort)
+{
+    CriticalSectionScoped cs(_critFilter);
+    _rtpFilterPort = rtpFilterPort;
+    _rtcpFilterPort = rtcpFilterPort;
+    return 0;
+}
+
+bool UdpTransportImpl::SendSocketsInitialized() const
+{
+    CriticalSectionScoped cs(_crit);
+    if(_ptrSendRtpSocket)
+    {
+        return true;
+    }
+    if(_destPort !=0)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::ReceiveSocketsInitialized() const
+{
+    if(_ptrRtpSocket)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::SourcePortsInitialized() const
+{
+    if(_ptrSendRtpSocket)
+    {
+        return true;
+    }
+    return false;
+}
+
+bool UdpTransportImpl::IpV6Enabled() const
+{
+    WEBRTC_TRACE(kTraceStream, kTraceTransport, _id, "%s", __FUNCTION__);
+    return _ipV6Enabled;
+}
+
+void UdpTransportImpl::BuildRemoteRTPAddr()
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTPAddr.sin_length = 0;
+        _remoteRTPAddr.sin_family = PF_INET6;
+#else
+        _remoteRTPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+
+        _remoteRTPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _remoteRTPAddr._sockaddr_in6.sin6_scope_id=0;
+        _remoteRTPAddr._sockaddr_in6.sin6_port = Htons(_destPort);
+        InetPresentationToNumeric(AF_INET6,_destIP,
+                                  &_remoteRTPAddr._sockaddr_in6.sin6_addr);
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTPAddr.sin_length = 0;
+        _remoteRTPAddr.sin_family = PF_INET;
+#else
+        _remoteRTPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _remoteRTPAddr._sockaddr_in.sin_port = Htons(_destPort);
+        _remoteRTPAddr._sockaddr_in.sin_addr = InetAddrIPV4(_destIP);
+    }
+}
+
+void UdpTransportImpl::BuildRemoteRTCPAddr()
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTCPAddr.sin_length = 0;
+        _remoteRTCPAddr.sin_family = PF_INET6;
+#else
+        _remoteRTCPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+
+        _remoteRTCPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _remoteRTCPAddr._sockaddr_in6.sin6_scope_id=0;
+        _remoteRTCPAddr._sockaddr_in6.sin6_port = Htons(_destPortRTCP);
+        InetPresentationToNumeric(AF_INET6,_destIP,
+                                  &_remoteRTCPAddr._sockaddr_in6.sin6_addr);
+
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _remoteRTCPAddr.sin_length = 0;
+        _remoteRTCPAddr.sin_family = PF_INET;
+#else
+        _remoteRTCPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _remoteRTCPAddr._sockaddr_in.sin_port = Htons(_destPortRTCP);
+        _remoteRTCPAddr._sockaddr_in.sin_addr= InetAddrIPV4(_destIP);
+    }
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindRTPSendSocket()
+{
+    if(!_ptrSendRtpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(!_ptrSendRtpSocket->ValidHandle())
+    {
+        return kIpAddressInvalid;
+    }
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTPAddr.sin_length = 0;
+        _localRTPAddr.sin_family = PF_INET6;
+#else
+        _localRTPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        _localRTPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _localRTPAddr._sockaddr_in6.sin6_scope_id=0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[0] =
+            0; // = INADDR_ANY
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[1] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[2] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[3] =
+            0;
+        _localRTPAddr._sockaddr_in6.sin6_port = Htons(_srcPort);
+        if(_ptrSendRtpSocket->Bind(_localRTPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPort);
+            return kFailedToBindPort;
+        }
+    } else {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTPAddr.sin_length = 0;
+        _localRTPAddr.sin_family = PF_INET;
+#else
+        _localRTPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _localRTPAddr._sockaddr_in.sin_addr = 0;
+        _localRTPAddr._sockaddr_in.sin_port = Htons(_srcPort);
+        if(_ptrSendRtpSocket->Bind(_localRTPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPort);
+            return kFailedToBindPort;
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindRTCPSendSocket()
+{
+    if(!_ptrSendRtcpSocket)
+    {
+        return kSocketInvalid;
+    }
+
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTCPAddr.sin_length = 0;
+        _localRTCPAddr.sin_family = PF_INET6;
+#else
+        _localRTCPAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        _localRTCPAddr._sockaddr_in6.sin6_flowinfo=0;
+        _localRTCPAddr._sockaddr_in6.sin6_scope_id=0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[0] =
+            0; // = INADDR_ANY
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[1] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[2] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[3] =
+            0;
+        _localRTCPAddr._sockaddr_in6.sin6_port = Htons(_srcPortRTCP);
+        if(_ptrSendRtcpSocket->Bind(_localRTCPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPortRTCP);
+            return kFailedToBindPort;
+        }
+    } else {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        _localRTCPAddr.sin_length = 0;
+        _localRTCPAddr.sin_family = PF_INET;
+#else
+        _localRTCPAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        _localRTCPAddr._sockaddr_in.sin_addr= 0;
+        _localRTCPAddr._sockaddr_in.sin_port = Htons(_srcPortRTCP);
+        if(_ptrSendRtcpSocket->Bind(_localRTCPAddr) == false)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _srcPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindLocalRTPSocket()
+{
+    if(!_ptrRtpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(!IpV6Enabled())
+    {
+        SocketAddress recAddr;
+        memset(&recAddr, 0, sizeof(SocketAddress));
+        recAddr._sockaddr_storage.sin_family = AF_INET;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        recAddr.sin_length = 0;
+        recAddr.sin_family = PF_INET;
+#else
+        recAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        recAddr._sockaddr_in.sin_addr = InetAddrIPV4(_localIP);
+        recAddr._sockaddr_in.sin_port = Htons(_localPort);
+
+        if (!_ptrRtpSocket->Bind(recAddr))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPort);
+            return kFailedToBindPort;
+        }
+    }
+    else
+    {
+        SocketAddress stLclName;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        stLclName.sin_lenght = 0;
+        stLclName.sin_family = PF_INET6;
+#else
+        stLclName._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        InetPresentationToNumeric(AF_INET6,_localIP,
+                                  &stLclName._sockaddr_in6.sin6_addr);
+        stLclName._sockaddr_in6.sin6_port = Htons(_localPort);
+        stLclName._sockaddr_in6.sin6_flowinfo = 0;
+        stLclName._sockaddr_in6.sin6_scope_id = 0;
+
+        if (!_ptrRtpSocket->Bind(stLclName))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPort);
+            return kFailedToBindPort;
+        }
+    }
+
+    if(_localMulticastIP[0] != 0)
+    {
+        // Join the multicast group from which to receive datagrams.
+        struct ip_mreq mreq;
+        mreq.imr_multiaddr.s_addr = InetAddrIPV4(_localMulticastIP);
+        mreq.imr_interface.s_addr = INADDR_ANY;
+
+        if (!_ptrRtpSocket->SetSockopt(IPPROTO_IP,IP_ADD_MEMBERSHIP,
+                                       (WebRtc_Word8*)&mreq,sizeof (mreq)))
+        {
+           WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "setsockopt() for multicast failed, not closing socket");
+        }else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceTransport, _id,
+                         "multicast group successfully joined");
+        }
+    }
+    return kNoSocketError;
+}
+
+UdpTransportImpl::ErrorCode UdpTransportImpl::BindLocalRTCPSocket()
+{
+    if(!_ptrRtcpSocket)
+    {
+        return kSocketInvalid;
+    }
+    if(! IpV6Enabled())
+    {
+        SocketAddress recAddr;
+        memset(&recAddr, 0, sizeof(SocketAddress));
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        recAddr.sin_length = 0;
+        recAddr.sin_family = AF_INET;
+#else
+        recAddr._sockaddr_storage.sin_family = AF_INET;
+#endif
+        recAddr._sockaddr_in.sin_addr = InetAddrIPV4(_localIP);
+        recAddr._sockaddr_in.sin_port = Htons(_localPortRTCP);
+
+        if (!_ptrRtcpSocket->Bind(recAddr))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    else
+    {
+        SocketAddress stLclName;
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        stLclName.sin_length = 0;
+        stLclName.sin_family = PF_INET6;
+#else
+        stLclName._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        stLclName._sockaddr_in6.sin6_flowinfo = 0;
+        stLclName._sockaddr_in6.sin6_scope_id = 0;
+        stLclName._sockaddr_in6.sin6_port = Htons(_localPortRTCP);
+
+        InetPresentationToNumeric(AF_INET6,_localIP,
+                                  &stLclName._sockaddr_in6.sin6_addr);
+        if (!_ptrRtcpSocket->Bind(stLclName))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceTransport, _id,
+                         "Failed to bind to port:%d ", _localPortRTCP);
+            return kFailedToBindPort;
+        }
+    }
+    if(_localMulticastIP[0] != 0)
+    {
+        // Join the multicast group from which to receive datagrams.
+        struct ip_mreq mreq;
+        mreq.imr_multiaddr.s_addr = InetAddrIPV4(_localMulticastIP);
+        mreq.imr_interface.s_addr = INADDR_ANY;
+
+        if (!_ptrRtcpSocket->SetSockopt(IPPROTO_IP,IP_ADD_MEMBERSHIP,
+                                        (WebRtc_Word8*)&mreq,sizeof (mreq)))
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "setsockopt() for multicast failed, not closing socket");
+        }else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceTransport, _id,
+                         "multicast group successfully joined");
+        }
+    }
+    return kNoSocketError;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeSourcePorts(WebRtc_UWord16 rtpPort,
+                                                      WebRtc_UWord16 rtcpPort)
+{
+
+    if(rtpPort == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "InitializeSourcePorts port 0 not allowed");
+        _lastError = kPortInvalid;
+        return -1;
+    }
+
+    CriticalSectionScoped cs(_crit);
+
+    CloseSendSockets();
+
+    if(_mgr == NULL)
+    {
+        return -1;
+    }
+
+    _srcPort = rtpPort;
+    if(rtcpPort == 0)
+    {
+        _srcPortRTCP = rtpPort+1;
+    } else
+    {
+        _srcPortRTCP = rtcpPort;
+    }
+    _useSetSockOpt =false;
+    _tos=0;
+    _pcp=0;
+
+    _ptrSendRtpSocket = _socket_creator->CreateSocket(_id, _mgr, NULL, NULL,
+                                        IpV6Enabled(), false);
+    _ptrSendRtcpSocket = _socket_creator->CreateSocket(_id, _mgr, NULL, NULL,
+                                         IpV6Enabled(), false);
+
+    ErrorCode retVal = BindRTPSendSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        return -1;
+    }
+    retVal = BindRTCPSendSocket();
+    if(retVal != kNoSocketError)
+    {
+        _lastError = retVal;
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SourcePorts(WebRtc_UWord16& rtpPort,
+                                            WebRtc_UWord16& rtcpPort) const
+{
+    CriticalSectionScoped cs(_crit);
+
+    rtpPort  = (_srcPort != 0) ? _srcPort : _localPort;
+    rtcpPort = (_srcPortRTCP != 0) ? _srcPortRTCP : _localPortRTCP;
+    return 0;
+}
+
+
+#ifdef _WIN32
+WebRtc_Word32 UdpTransportImpl::StartReceiving(
+    WebRtc_UWord32 numberOfSocketBuffers)
+#else
+WebRtc_Word32 UdpTransportImpl::StartReceiving(
+    WebRtc_UWord32 /*numberOfSocketBuffers*/)
+#endif
+{
+    CriticalSectionScoped cs(_crit);
+    if(_receiving)
+    {
+        return 0;
+    }
+    if(_ptrRtpSocket)
+    {
+#ifdef _WIN32
+        if(!_ptrRtpSocket->StartReceiving(numberOfSocketBuffers))
+#else
+        if(!_ptrRtpSocket->StartReceiving())
+#endif
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to start receive on RTP socket");
+            _lastError = kStartReceiveError;
+            return -1;
+        }
+    }
+    if(_ptrRtcpSocket)
+    {
+        if(!_ptrRtcpSocket->StartReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to start receive on RTCP socket");
+            _lastError = kStartReceiveError;
+            return -1;
+        }
+    }
+    if( _ptrRtpSocket == NULL &&
+        _ptrRtcpSocket == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                     "Failed to StartReceiving, no socket initialized");
+        _lastError = kStartReceiveError;
+        return -1;
+    }
+    _receiving = true;
+    return 0;
+}
+
+bool UdpTransportImpl::Receiving() const
+{
+   return _receiving;
+}
+
+WebRtc_Word32 UdpTransportImpl::StopReceiving()
+{
+
+    CriticalSectionScoped cs(_crit);
+
+    _receiving = false;
+
+    if (_ptrRtpSocket)
+    {
+        if (!_ptrRtpSocket->StopReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to stop receiving on RTP socket");
+            _lastError = kStopReceiveError;
+            return -1;
+        }
+    }
+    if (_ptrRtcpSocket)
+    {
+        if (!_ptrRtcpSocket->StopReceiving())
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "Failed to stop receiving on RTCP socket");
+            _lastError = kStopReceiveError;
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::InitializeSendSockets(
+    const char* ipaddr,
+    const WebRtc_UWord16 rtpPort,
+    const WebRtc_UWord16 rtcpPort)
+{
+    {
+        CriticalSectionScoped cs(_crit);
+        _destPort = rtpPort;
+        if(rtcpPort == 0)
+        {
+            _destPortRTCP = _destPort+1;
+        } else
+        {
+            _destPortRTCP = rtcpPort;
+        }
+
+        if(ipaddr == NULL)
+        {
+            if (!IsIpAddressValid(_destIP, IpV6Enabled()))
+            {
+                _destPort = 0;
+                _destPortRTCP = 0;
+                _lastError = kIpAddressInvalid;
+                return -1;
+            }
+        } else
+        {
+            if (IsIpAddressValid(ipaddr, IpV6Enabled()))
+            {
+                strncpy(
+                    _destIP,
+                    ipaddr,
+                    IpV6Enabled() ? kIpAddressVersion6Length :
+                    kIpAddressVersion4Length);
+            } else {
+                _destPort = 0;
+                _destPortRTCP = 0;
+                _lastError = kIpAddressInvalid;
+                return -1;
+            }
+        }
+        BuildRemoteRTPAddr();
+        BuildRemoteRTCPAddr();
+    }
+
+    if (_ipV6Enabled)
+    {
+        if (_qos)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceTransport,
+                _id,
+                "QOS is enabled but will be ignored since IPv6 is enabled");
+        }
+    }else
+    {
+        // TODO (grunell): Multicast support is experimantal.
+
+        // Put the first digit of the remote address in val.
+        WebRtc_Word32 val = ntohl(_remoteRTPAddr._sockaddr_in.sin_addr)>> 24;
+
+        if((val > 223) && (val < 240))
+        {
+            // Multicast address.
+            CriticalSectionScoped cs(_crit);
+
+            UdpSocketWrapper* rtpSock = (_ptrSendRtpSocket ?
+                                         _ptrSendRtpSocket : _ptrRtpSocket);
+            if (!rtpSock || !rtpSock->ValidHandle())
+            {
+                _lastError = kSocketInvalid;
+                return -1;
+            }
+            UdpSocketWrapper* rtcpSock = (_ptrSendRtcpSocket ?
+                                          _ptrSendRtcpSocket : _ptrRtcpSocket);
+            if (!rtcpSock || !rtcpSock->ValidHandle())
+            {
+                _lastError = kSocketInvalid;
+                return -1;
+            }
+
+            // Set Time To Live to same region
+            WebRtc_Word32 iOptVal = 64;
+            if (!rtpSock->SetSockopt(IPPROTO_IP, IP_MULTICAST_TTL,
+                                     (WebRtc_Word8*)&iOptVal,
+                                     sizeof (WebRtc_Word32)))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "setsockopt for multicast error on RTP socket");
+                _ptrRtpSocket->CloseBlocking();
+                _ptrRtpSocket = NULL;
+                _lastError = kMulticastAddressInvalid;
+                return -1;
+            }
+            if (!rtcpSock->SetSockopt(IPPROTO_IP, IP_MULTICAST_TTL,
+                                      (WebRtc_Word8*)&iOptVal,
+                                      sizeof (WebRtc_Word32)))
+            {
+                WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                             "setsockopt for multicast error on RTCP socket");
+                _ptrRtpSocket->CloseBlocking();
+                _ptrRtpSocket = NULL;
+                _lastError = kMulticastAddressInvalid;
+                return -1;
+            }
+        }
+    }
+    return 0;
+}
+
+void UdpTransportImpl::BuildSockaddrIn(WebRtc_UWord16 portnr,
+                                       const char* ip,
+                                       SocketAddress& remoteAddr) const
+{
+    if(_ipV6Enabled)
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        remoteAddr.sin_length = 0;
+        remoteAddr.sin_family = PF_INET6;
+#else
+        remoteAddr._sockaddr_storage.sin_family = PF_INET6;
+#endif
+        remoteAddr._sockaddr_in6.sin6_port = Htons(portnr);
+        InetPresentationToNumeric(AF_INET6, ip,
+                                  &remoteAddr._sockaddr_in6.sin6_addr);
+        remoteAddr._sockaddr_in6.sin6_flowinfo=0;
+        remoteAddr._sockaddr_in6.sin6_scope_id=0;
+    } else
+    {
+#ifdef HAVE_STRUCT_SOCKADDR_SA_LEN
+        remoteAddr.sin_length = 0;
+        remoteAddr.sin_family = PF_INET;
+#else
+        remoteAddr._sockaddr_storage.sin_family = PF_INET;
+#endif
+        remoteAddr._sockaddr_in.sin_port = Htons(portnr);
+        remoteAddr._sockaddr_in.sin_addr= InetAddrIPV4(
+            const_cast<char*>(ip));
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRaw(const WebRtc_Word8 *data,
+                                        WebRtc_UWord32 length,
+                                        WebRtc_Word32 isRTCP,
+                                        WebRtc_UWord16 portnr,
+                                        const char* ip)
+{
+    CriticalSectionScoped cs(_crit);
+    if(isRTCP)
+    {
+        UdpSocketWrapper* rtcpSock = NULL;
+        if(_ptrSendRtcpSocket)
+        {
+            rtcpSock = _ptrSendRtcpSocket;
+        } else if(_ptrRtcpSocket)
+        {
+            rtcpSock = _ptrRtcpSocket;
+        } else
+        {
+            return -1;
+        }
+        if(portnr == 0 && ip == NULL)
+        {
+            return rtcpSock->SendTo(data,length,_remoteRTCPAddr);
+
+        } else if(portnr != 0 && ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, ip, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        } else if(ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(_destPortRTCP, ip, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        } else
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, _destIP, remoteAddr);
+            return rtcpSock->SendTo(data,length,remoteAddr);
+        }
+    } else {
+        UdpSocketWrapper* rtpSock = NULL;
+        if(_ptrSendRtpSocket)
+        {
+            rtpSock = _ptrSendRtpSocket;
+
+        } else if(_ptrRtpSocket)
+        {
+            rtpSock = _ptrRtpSocket;
+        } else
+        {
+            return -1;
+        }
+        if(portnr == 0 && ip == NULL)
+        {
+            return rtpSock->SendTo(data,length,_remoteRTPAddr);
+
+        } else if(portnr != 0 && ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, ip, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        } else if(ip != NULL)
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(_destPort, ip, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        } else
+        {
+            SocketAddress remoteAddr;
+            BuildSockaddrIn(portnr, _destIP, remoteAddr);
+            return rtpSock->SendTo(data,length,remoteAddr);
+        }
+    }
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTPPacketTo(const WebRtc_Word8* data,
+                                                WebRtc_UWord32 length,
+                                                const SocketAddress& to)
+{
+    CriticalSectionScoped cs(_crit);
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTCPPacketTo(const WebRtc_Word8* data,
+                                                 WebRtc_UWord32 length,
+                                                 const SocketAddress& to)
+{
+
+    CriticalSectionScoped cs(_crit);
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTPPacketTo(const WebRtc_Word8* data,
+                                                WebRtc_UWord32 length,
+                                                const WebRtc_UWord16 rtpPort)
+{
+
+    CriticalSectionScoped cs(_crit);
+    // Use the current SocketAdress but update it with rtpPort.
+    SocketAddress to;
+    memcpy(&to, &_remoteRTPAddr, sizeof(SocketAddress));
+
+    if(_ipV6Enabled)
+    {
+        to._sockaddr_in6.sin6_port = Htons(rtpPort);
+    } else
+    {
+        to._sockaddr_in.sin_port = Htons(rtpPort);
+    }
+
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SendRTCPPacketTo(const WebRtc_Word8* data,
+                                                 WebRtc_UWord32 length,
+                                                 const WebRtc_UWord16 rtcpPort)
+{
+    CriticalSectionScoped cs(_crit);
+
+    // Use the current SocketAdress but update it with rtcpPort.
+    SocketAddress to;
+    memcpy(&to, &_remoteRTCPAddr, sizeof(SocketAddress));
+
+    if(_ipV6Enabled)
+    {
+        to._sockaddr_in6.sin6_port = Htons(rtcpPort);
+    } else
+    {
+        to._sockaddr_in.sin_port = Htons(rtcpPort);
+    }
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo(data,length,to);
+
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo(data,length,to);
+    }
+    return -1;
+}
+
+int UdpTransportImpl::SendPacket(int /*channel*/, const void* data, int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceTransport, _id, "%s", __FUNCTION__);
+
+    CriticalSectionScoped cs(_crit);
+
+    if(_destIP[0] == 0)
+    {
+        return -1;
+    }
+    if(_destPort == 0)
+    {
+        return -1;
+    }
+
+    // Create socket if it hasn't been set up already.
+    // TODO (hellner): why not fail here instead. Sockets not being initialized
+    //                 indicates that there is a problem somewhere.
+    if( _ptrSendRtpSocket == NULL &&
+        _ptrRtpSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "Creating RTP socket since no receive or source socket is\
+ configured");
+
+        _ptrRtpSocket = _socket_creator->CreateSocket(_id, _mgr, this,
+                                        IncomingRTPCallback,
+                                        IpV6Enabled(), false);
+
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+        _localPort = _destPort;
+
+        ErrorCode retVal = BindLocalRTPSocket();
+        if(retVal != kNoSocketError)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "SendPacket() failed to bind RTP socket");
+            _lastError = retVal;
+            CloseReceiveSockets();
+            return -1;
+        }
+    }
+
+    if(_ptrSendRtpSocket)
+    {
+        return _ptrSendRtpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                         _remoteRTPAddr);
+
+    } else if(_ptrRtpSocket)
+    {
+        return _ptrRtpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                     _remoteRTPAddr);
+    }
+    return -1;
+}
+
+int UdpTransportImpl::SendRTCPPacket(int /*channel*/, const void* data,
+                                     int length)
+{
+
+    CriticalSectionScoped cs(_crit);
+    if(_destIP[0] == 0)
+    {
+        return -1;
+    }
+    if(_destPortRTCP == 0)
+    {
+        return -1;
+    }
+
+    // Create socket if it hasn't been set up already.
+    // TODO (hellner): why not fail here instead. Sockets not being initialized
+    //                 indicates that there is a problem somewhere.
+    if( _ptrSendRtcpSocket == NULL &&
+        _ptrRtcpSocket == NULL)
+    {
+        WEBRTC_TRACE(
+            kTraceStateInfo,
+            kTraceTransport,
+            _id,
+            "Creating RTCP socket since no receive or source socket is\
+ configured");
+
+        _ptrRtcpSocket = _socket_creator->CreateSocket(_id, _mgr, this,
+                                         IncomingRTCPCallback,
+                                         IpV6Enabled(), false);
+
+        // Don't bind to a specific IP address.
+        if(! IpV6Enabled())
+        {
+            strncpy(_localIP, "0.0.0.0",16);
+        } else
+        {
+            strncpy(_localIP, "0000:0000:0000:0000:0000:0000:0000:0000",
+                    kIpAddressVersion6Length);
+        }
+        _localPortRTCP = _destPortRTCP;
+
+        ErrorCode retVal = BindLocalRTCPSocket();
+        if(retVal != kNoSocketError)
+        {
+            _lastError = retVal;
+            WEBRTC_TRACE(kTraceError, kTraceTransport, _id,
+                         "SendRTCPPacket() failed to bind RTCP socket");
+            CloseReceiveSockets();
+            return -1;
+        }
+    }
+
+    if(_ptrSendRtcpSocket)
+    {
+        return _ptrSendRtcpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                          _remoteRTCPAddr);
+    } else if(_ptrRtcpSocket)
+    {
+        return _ptrRtcpSocket->SendTo((const WebRtc_Word8*)data, length,
+                                      _remoteRTCPAddr);
+    }
+    return -1;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetSendIP(const char* ipaddr)
+{
+    if(!IsIpAddressValid(ipaddr,IpV6Enabled()))
+    {
+        return kIpAddressInvalid;
+    }
+    CriticalSectionScoped cs(_crit);
+    strncpy(_destIP, ipaddr,kIpAddressVersion6Length);
+    BuildRemoteRTPAddr();
+    BuildRemoteRTCPAddr();
+    return 0;
+}
+
+WebRtc_Word32 UdpTransportImpl::SetSendPorts(WebRtc_UWord16 rtpPort,
+                                             WebRtc_UWord16 rtcpPort)
+{
+    CriticalSectionScoped cs(_crit);
+    _destPort = rtpPort;
+    if(rtcpPort == 0)
+    {
+        _destPortRTCP = _destPort+1;
+    } else
+    {
+        _destPortRTCP = rtcpPort;
+    }
+    BuildRemoteRTPAddr();
+    BuildRemoteRTCPAddr();
+    return 0;
+}
+
+void UdpTransportImpl::IncomingRTPCallback(CallbackObj obj,
+                                           const WebRtc_Word8* rtpPacket,
+                                           WebRtc_Word32 rtpPacketLength,
+                                           const SocketAddress* from)
+{
+    if (rtpPacket && rtpPacketLength > 0)
+    {
+        UdpTransportImpl* socketTransport = (UdpTransportImpl*) obj;
+        socketTransport->IncomingRTPFunction(rtpPacket, rtpPacketLength, from);
+    }
+}
+
+void UdpTransportImpl::IncomingRTCPCallback(CallbackObj obj,
+                                            const WebRtc_Word8* rtcpPacket,
+                                            WebRtc_Word32 rtcpPacketLength,
+                                            const SocketAddress* from)
+{
+    if (rtcpPacket && rtcpPacketLength > 0)
+    {
+        UdpTransportImpl* socketTransport = (UdpTransportImpl*) obj;
+        socketTransport->IncomingRTCPFunction(rtcpPacket, rtcpPacketLength,
+                                              from);
+    }
+}
+
+void UdpTransportImpl::IncomingRTPFunction(const WebRtc_Word8* rtpPacket,
+                                           WebRtc_Word32 rtpPacketLength,
+                                           const SocketAddress* fromSocket)
+{
+    char ipAddress[kIpAddressVersion6Length];
+    WebRtc_UWord32 ipAddressLength = kIpAddressVersion6Length;
+    WebRtc_UWord16 portNr = 0;
+
+    {
+        CriticalSectionScoped cs(_critFilter);
+        if (FilterIPAddress(fromSocket) == false)
+        {
+            // Packet should be filtered out. Drop it.
+            WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                         "Incoming RTP packet blocked by IP filter");
+            return;
+        }
+
+        if (IPAddressCached(*fromSocket, ipAddress, ipAddressLength, portNr) <
+            0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::IncomingRTPFunction - Cannot get sender\
+ information");
+        }else
+        {
+            // Make sure ipAddress is null terminated.
+            ipAddress[kIpAddressVersion6Length - 1] = 0;
+            strncpy(_fromIP, ipAddress, kIpAddressVersion6Length - 1);
+        }
+
+        // Filter based on port.
+        if (_rtpFilterPort != 0 &&
+            _rtpFilterPort != portNr)
+        {
+            // Drop packet.
+            memset(_fromIP, 0, sizeof(_fromIP));
+            WEBRTC_TRACE(
+                kTraceStream,
+                kTraceTransport,
+                _id,
+                "Incoming RTP packet blocked by filter incoming from port:%d\
+ allowed port:%d",
+                portNr,
+                _rtpFilterPort);
+            return;
+        }
+        _fromPort = portNr;
+    }
+
+    CriticalSectionScoped cs(_critPacketCallback);
+    if (_packetCallback)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+            "Incoming RTP packet from ip:%s port:%d", ipAddress, portNr);
+        _packetCallback->IncomingRTPPacket(rtpPacket, rtpPacketLength,
+                                           ipAddress, portNr);
+    }
+}
+
+void UdpTransportImpl::IncomingRTCPFunction(const WebRtc_Word8* rtcpPacket,
+                                            WebRtc_Word32 rtcpPacketLength,
+                                            const SocketAddress* fromSocket)
+{
+    char ipAddress[kIpAddressVersion6Length];
+    WebRtc_UWord32 ipAddressLength = kIpAddressVersion6Length;
+    WebRtc_UWord16 portNr = 0;
+
+    {
+        CriticalSectionScoped cs(_critFilter);
+        if (FilterIPAddress(fromSocket) == false)
+        {
+            // Packet should be filtered out. Drop it.
+            WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                         "Incoming RTCP packet blocked by IP filter");
+            return;
+        }
+        if (IPAddress(*fromSocket, ipAddress, ipAddressLength, portNr) < 0)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::IncomingRTCPFunction - Cannot get sender\
+ information");
+        }else {
+            // Make sure ipAddress is null terminated.
+            ipAddress[kIpAddressVersion6Length - 1] = 0;
+            strncpy(_fromIP, ipAddress, kIpAddressVersion6Length - 1);
+        }
+
+        // Filter based on port.
+        if (_rtcpFilterPort != 0 &&
+            _rtcpFilterPort != portNr)
+        {
+            // Drop packet.
+            WEBRTC_TRACE(
+                kTraceStream,
+                kTraceTransport,
+                _id,
+                "Incoming RTCP packet blocked by filter incoming from port:%d\
+ allowed port:%d",
+                portNr,
+                _rtpFilterPort);
+            return;
+        }
+        _fromPortRTCP = portNr;
+    }
+
+    CriticalSectionScoped cs(_critPacketCallback);
+    if (_packetCallback)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceTransport, _id,
+                     "Incoming RTCP packet from ip:%s port:%d", ipAddress,
+                     portNr);
+        _packetCallback->IncomingRTCPPacket(rtcpPacket, rtcpPacketLength,
+                                            ipAddress, portNr);
+    }
+}
+
+bool UdpTransportImpl::FilterIPAddress(const SocketAddress* fromAddress)
+{
+    if(fromAddress->_sockaddr_storage.sin_family == AF_INET)
+    {
+        if (_filterIPAddress._sockaddr_storage.sin_family == AF_INET)
+        {
+            // IP is stored in sin_addr.
+            if (_filterIPAddress._sockaddr_in.sin_addr != 0 &&
+                (_filterIPAddress._sockaddr_in.sin_addr !=
+                 fromAddress->_sockaddr_in.sin_addr))
+            {
+                return false;
+            }
+        }
+    }
+    else if(fromAddress->_sockaddr_storage.sin_family == AF_INET6)
+    {
+        if (_filterIPAddress._sockaddr_storage.sin_family == AF_INET6)
+        {
+            // IP is stored in sin_6addr.
+            for (WebRtc_Word32 i = 0; i < 4; i++)
+            {
+                if (_filterIPAddress._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i] != 0 &&
+                    _filterIPAddress._sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i] != fromAddress->_sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u32[i])
+                {
+                    return false;
+                }
+            }
+        }
+    }
+    else
+    {
+      WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                _id,
+                "UdpTransportImpl::FilterIPAddress() unknown address family");
+        return false;
+    }
+    return true;
+}
+
+void UdpTransportImpl::CloseReceiveSockets()
+{
+    if(_ptrRtpSocket)
+    {
+        _ptrRtpSocket->CloseBlocking();
+        _ptrRtpSocket = NULL;
+    }
+    if(_ptrRtcpSocket)
+    {
+        _ptrRtcpSocket->CloseBlocking();
+        _ptrRtcpSocket = NULL;
+    }
+    _receiving = false;
+}
+
+void UdpTransportImpl::CloseSendSockets()
+{
+    if(_ptrSendRtpSocket)
+    {
+        _ptrSendRtpSocket->CloseBlocking();
+        _ptrSendRtpSocket = 0;
+    }
+    if(_ptrSendRtcpSocket)
+    {
+        _ptrSendRtcpSocket->CloseBlocking();
+        _ptrSendRtcpSocket = 0;
+    }
+}
+
+WebRtc_UWord16 UdpTransport::Htons(const WebRtc_UWord16 port)
+{
+    return htons(port);
+}
+
+WebRtc_UWord32 UdpTransport::Htonl(const WebRtc_UWord32 a)
+{
+    return htonl(a);
+}
+
+WebRtc_UWord32 UdpTransport::InetAddrIPV4(const char* ip)
+{
+    return ::inet_addr(ip);
+}
+
+WebRtc_Word32 UdpTransport::InetPresentationToNumeric(WebRtc_Word32 af,
+                                                      const char* src,
+                                                      void* dst)
+{
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    const WebRtc_Word32 result = inet_pton(af, src, dst);
+    return result > 0 ? 0 : -1;
+
+#elif defined(_WIN32)
+    SocketAddress temp;
+    int length=sizeof(SocketAddress);
+
+    if(af == AF_INET)
+    {
+        WebRtc_Word32 result = WSAStringToAddressA(
+            (const LPSTR)src,
+            af,
+            0,
+            reinterpret_cast<struct sockaddr*>(&temp),
+            &length);
+        if(result != 0)
+        {
+            return -1;
+        }
+        memcpy(dst,&(temp._sockaddr_in.sin_addr),
+               sizeof(temp._sockaddr_in.sin_addr));
+        return 0;
+    }
+    else if(af == AF_INET6)
+    {
+        WebRtc_Word32 result = WSAStringToAddressA(
+            (const LPSTR)src,
+            af,
+            0,
+            reinterpret_cast<struct sockaddr*>(&temp),
+            &length);
+        if(result !=0)
+        {
+            return -1;
+        }
+        memcpy(dst,&(temp._sockaddr_in6.sin6_addr),
+               sizeof(temp._sockaddr_in6.sin6_addr));
+        return 0;
+
+    }else
+    {
+        return -1;
+    }
+#else
+    return -1;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::LocalHostAddressIPV6(char n_localIP[16])
+{
+
+#if defined(_WIN32)
+    struct addrinfo *result = NULL;
+    struct addrinfo *ptr = NULL;
+    struct addrinfo hints;
+
+    ZeroMemory(&hints, sizeof(hints));
+    hints.ai_family = AF_INET6;
+
+    char szHostName[256] = "";
+    if(::gethostname(szHostName, sizeof(szHostName) - 1))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1, "gethostname failed");
+        return -1;
+    }
+
+    DWORD dwRetval = getaddrinfo(szHostName, NULL, &hints, &result);
+    if ( dwRetval != 0 )
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                     "getaddrinfo failed, error:%d", dwRetval);
+        return -1;
+    }
+    for(ptr=result; ptr != NULL ;ptr=ptr->ai_next)
+    {
+        switch (ptr->ai_family)
+        {
+            case AF_INET6:
+                {
+                    for(int i = 0; i< 16; i++)
+                    {
+                        n_localIP[i] = (*(SocketAddress*)ptr->ai_addr).
+                            _sockaddr_in6.sin6_addr.Version6AddressUnion._s6_u8[i];
+                    }
+                    bool islocalIP = true;
+
+                    for(int n = 0; n< 15; n++)
+                    {
+                        if(n_localIP[n] != 0)
+                        {
+                            islocalIP = false;
+                            break;
+                        }
+                    }
+
+                    if(islocalIP && n_localIP[15] != 1)
+                    {
+                        islocalIP = false;
+                    }
+
+                    if(islocalIP && ptr->ai_next)
+                    {
+                        continue;
+                    }
+                    if(n_localIP[0] == 0xfe &&
+                       n_localIP[1] == 0x80 && ptr->ai_next)
+                    {
+                        continue;
+                    }
+                    freeaddrinfo(result);
+                }
+                return 0;
+            default:
+                break;
+        };
+    }
+    freeaddrinfo(result);
+    WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                 "getaddrinfo failed to find address");
+    return -1;
+
+#elif defined(WEBRTC_MAC)
+    struct ifaddrs* ptrIfAddrs = NULL;
+    struct ifaddrs* ptrIfAddrsStart = NULL;
+
+    getifaddrs(&ptrIfAddrsStart);
+    ptrIfAddrs = ptrIfAddrsStart;
+    while(ptrIfAddrs)
+    {
+        if(ptrIfAddrs->ifa_addr->sa_family == AF_INET6)
+        {
+            const struct sockaddr_in6* sock_in6 =
+                reinterpret_cast<struct sockaddr_in6*>(ptrIfAddrs->ifa_addr);
+            const struct in6_addr* sin6_addr = &sock_in6->sin6_addr;
+
+            if (IN6_IS_ADDR_LOOPBACK(sin6_addr) ||
+                IN6_IS_ADDR_LINKLOCAL(sin6_addr)) {
+                ptrIfAddrs = ptrIfAddrs->ifa_next;
+                continue;
+            }
+            memcpy(n_localIP, sin6_addr->s6_addr, sizeof(sin6_addr->s6_addr));
+            freeifaddrs(ptrIfAddrsStart);
+            return 0;
+        }
+        ptrIfAddrs = ptrIfAddrs->ifa_next;
+    }
+    freeifaddrs(ptrIfAddrsStart);
+    return -1;
+#elif defined(WEBRTC_ANDROID)
+    return -1;
+#else // WEBRTC_LINUX
+    struct
+    {
+        struct nlmsghdr n;
+        struct ifaddrmsg r;
+    } req;
+
+    struct rtattr* rta = NULL;
+    int status;
+    char buf[16384]; // = 16 * 1024 (16 kB)
+    struct nlmsghdr* nlmp;
+    struct ifaddrmsg* rtmp;
+    struct rtattr* rtatp;
+    int rtattrlen;
+    struct in6_addr* in6p;
+
+    int fd = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_ROUTE);
+    if (fd == -1)
+    {
+        return -1;
+    }
+
+    // RTM_GETADDR is used to fetch the ip address from the kernel interface
+    // table. Populate the msg structure (req) the size of the message buffer
+    // is specified to netlinkmessage header, and flags values are set as
+    // NLM_F_ROOT | NLM_F_REQUEST.
+    // The request flag must be set for all messages requesting the data from
+    // kernel. The root flag is used to notify the kernel to return the full
+    // tabel. Another flag (not used) is NLM_F_MATCH. This is used to get only
+    // specified entries in the table. At the time of writing this program this
+    // flag is not implemented in kernel
+
+    memset(&req, 0, sizeof(req));
+    req.n.nlmsg_len = NLMSG_LENGTH(sizeof(struct ifaddrmsg));
+    req.n.nlmsg_flags = NLM_F_REQUEST | NLM_F_ROOT;
+    req.n.nlmsg_type = RTM_GETADDR;
+    req.r.ifa_family = AF_INET6;
+
+    // Fill up all the attributes for the rtnetlink header.
+    // The lenght is very important. 16 signifies the ipv6 address.
+    rta = (struct rtattr*)(((char*)&req) + NLMSG_ALIGN(req.n.nlmsg_len));
+    rta->rta_len = RTA_LENGTH(16);
+
+    status = send(fd, &req, req.n.nlmsg_len, 0);
+    if (status < 0)
+    {
+        close(fd);
+        return -1;
+    }
+    status = recv(fd, buf, sizeof(buf), 0);
+    if (status < 0)
+    {
+        close(fd);
+        return -1;
+    }
+    if(status == 0)
+    {
+        close(fd);
+        return -1;
+    }
+    close(fd);
+
+    // The message is stored in buff. Parse the message to get the requested
+    // data.
+    {
+        nlmp = (struct nlmsghdr*)buf;
+        int len = nlmp->nlmsg_len;
+        int req_len = len - sizeof(*nlmp);
+
+        if (req_len < 0 || len > status)
+        {
+            return -1;
+        }
+        if (!NLMSG_OK_NO_WARNING(nlmp, status))
+        {
+            return -1;
+        }
+        rtmp = (struct ifaddrmsg*)NLMSG_DATA(nlmp);
+        rtatp = (struct rtattr*)IFA_RTA(rtmp);
+
+        rtattrlen = IFA_PAYLOAD(nlmp);
+
+        for (; RTA_OK(rtatp, rtattrlen); rtatp = RTA_NEXT(rtatp, rtattrlen))
+        {
+
+            // Here we hit the fist chunk of the message. Time to validate the
+            // type. For more info on the different types see
+            // "man(7) rtnetlink" The table below is taken from man pages.
+            // Attributes
+            // rta_type        value type             description
+            // -------------------------------------------------------------
+            // IFA_UNSPEC      -                      unspecified.
+            // IFA_ADDRESS     raw protocol address   interface address
+            // IFA_LOCAL       raw protocol address   local address
+            // IFA_LABEL       asciiz string          name of the interface
+            // IFA_BROADCAST   raw protocol address   broadcast address.
+            // IFA_ANYCAST     raw protocol address   anycast address
+            // IFA_CACHEINFO   struct ifa_cacheinfo   Address information.
+
+            if(rtatp->rta_type == IFA_ADDRESS)
+            {
+                bool islocalIP = true;
+                in6p = (struct in6_addr*)RTA_DATA(rtatp);
+                for(int n = 0; n< 15; n++)
+                {
+                    if(in6p->s6_addr[n] != 0)
+                    {
+                        islocalIP = false;
+                        break;
+                    }
+                }
+                if(islocalIP && in6p->s6_addr[15] != 1)
+                {
+                    islocalIP = false;
+                }
+                if(!islocalIP)
+                {
+                    for(int i = 0; i< 16; i++)
+                    {
+                        n_localIP[i] = in6p->s6_addr[i];
+                    }
+                    if(n_localIP[0] == static_cast<char> (0xfe)
+                       && n_localIP[1] == static_cast<char>(0x80) )
+                    {
+                        // Auto configured IP.
+                        continue;
+                    }
+                    break;
+                }
+            }
+        }
+    }
+    return 0;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::LocalHostAddress(WebRtc_UWord32& localIP)
+{
+ #if defined(_WIN32)
+    hostent* localHost;
+    localHost = gethostbyname( "" );
+    if(localHost)
+    {
+        if(localHost->h_addrtype != AF_INET)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceTransport,
+                -1,
+                "LocalHostAddress can only get local IP for IP Version 4");
+            return -1;
+        }
+        localIP= Htonl(
+            (*(struct in_addr *)localHost->h_addr_list[0]).S_un.S_addr);
+        return 0;
+    }
+    else
+    {
+        WebRtc_Word32 error = WSAGetLastError();
+        WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
+                     "gethostbyname failed, error:%d", error);
+        return -1;
+    }
+#elif (defined(WEBRTC_MAC))
+    char localname[255];
+    if (gethostname(localname, 255) != -1)
+    {
+        hostent* localHost;
+        localHost = gethostbyname(localname);
+        if(localHost)
+        {
+            if(localHost->h_addrtype != AF_INET)
+            {
+                WEBRTC_TRACE(
+                    kTraceError,
+                    kTraceTransport,
+                    -1,
+                    "LocalHostAddress can only get local IP for IP Version 4");
+                return -1;
+            }
+            localIP = Htonl((*(struct in_addr*)*localHost->h_addr_list).s_addr);
+            return 0;
+        }
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1, "gethostname failed");
+    return -1;
+#else // WEBRTC_LINUX
+    int sockfd, size  = 1;
+    struct ifreq* ifr;
+    struct ifconf ifc;
+
+    if (0 > (sockfd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP)))
+    {
+      return -1;
+    }
+    ifc.ifc_len = IFRSIZE;
+    ifc.ifc_req = NULL;
+    do
+    {
+        ++size;
+        // Buffer size needed is unknown. Try increasing it until no overflow
+        // occurs.
+        if (NULL == (ifc.ifc_req = (ifreq*)realloc(ifc.ifc_req, IFRSIZE))) {
+          fprintf(stderr, "Out of memory.\n");
+          exit(EXIT_FAILURE);
+        }
+        ifc.ifc_len = IFRSIZE;
+        if (ioctl(sockfd, SIOCGIFCONF, &ifc))
+        {
+            free(ifc.ifc_req);
+            close(sockfd);
+            return -1;
+        }
+    } while  (IFRSIZE <= ifc.ifc_len);
+
+    ifr = ifc.ifc_req;
+    for (;(char *) ifr < (char *) ifc.ifc_req + ifc.ifc_len; ++ifr)
+    {
+        if (ifr->ifr_addr.sa_data == (ifr+1)->ifr_addr.sa_data)
+        {
+          continue;  // duplicate, skip it
+        }
+        if (ioctl(sockfd, SIOCGIFFLAGS, ifr))
+        {
+          continue;  // failed to get flags, skip it
+        }
+        if(strncmp(ifr->ifr_name, "lo",3) == 0)
+        {
+            continue;
+        }else
+        {
+            struct sockaddr* saddr = &(ifr->ifr_addr);
+            SocketAddress* socket_addess = reinterpret_cast<SocketAddress*>(
+                saddr);
+            localIP = Htonl(socket_addess->_sockaddr_in.sin_addr);
+            close(sockfd);
+            free(ifc.ifc_req);
+            return 0;
+        }
+    }
+    free(ifc.ifc_req);
+    close(sockfd);
+    return -1;
+#endif
+}
+
+WebRtc_Word32 UdpTransport::IPAddress(const SocketAddress& address,
+                                      char* ip,
+                                      WebRtc_UWord32& ipSize,
+                                      WebRtc_UWord16& sourcePort)
+{
+ #if defined(_WIN32)
+    DWORD dwIPSize = ipSize;
+    WebRtc_Word32 returnvalue = WSAAddressToStringA((LPSOCKADDR)(&address),
+                                         sizeof(SocketAddress),
+                                         NULL,
+                                         ip,
+                                         &dwIPSize);
+    if(returnvalue == -1)
+    {
+        return -1;
+    }
+
+    WebRtc_UWord16 source_port = 0;
+    if(address._sockaddr_storage.sin_family == AF_INET)
+    {
+        // Parse IP assuming format "a.b.c.d:port".
+        char* ipEnd = strchr(ip,':');
+        if(ipEnd != NULL)
+        {
+            *ipEnd = '\0';
+        }
+        ipSize = (WebRtc_Word32)strlen(ip);
+        if(ipSize == 0)
+        {
+            return -1;
+        }
+        source_port = address._sockaddr_in.sin_port;
+    }
+    else
+    {
+        // Parse IP assuming format "[address]:port".
+        char* ipEnd = strchr(ip,']');
+        if(ipEnd != NULL)
+        {
+          // Calculate length
+            WebRtc_Word32 adrSize = WebRtc_Word32(ipEnd - ip) - 1;
+            memmove(ip, &ip[1], adrSize);   // Remove '['
+            *(ipEnd - 1) = '\0';
+        }
+        ipSize = (WebRtc_Word32)strlen(ip);
+        if(ipSize == 0)
+        {
+            return -1;
+        }
+
+        source_port = address._sockaddr_in6.sin6_port;
+    }
+    // Convert port number to network byte order.
+    sourcePort = htons(source_port);
+    return 0;
+
+ #elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    WebRtc_Word32 ipFamily = address._sockaddr_storage.sin_family;
+    const void* ptrNumericIP = NULL;
+
+    if(ipFamily == AF_INET)
+    {
+        ptrNumericIP = &(address._sockaddr_in.sin_addr);
+    }
+    else if(ipFamily == AF_INET6)
+    {
+        ptrNumericIP = &(address._sockaddr_in6.sin6_addr);
+    }
+    else
+    {
+        return -1;
+    }
+    if(inet_ntop(ipFamily, ptrNumericIP, ip, ipSize) == NULL)
+    {
+        return -1;
+    }
+    WebRtc_UWord16 source_port;
+    if(ipFamily == AF_INET)
+    {
+        source_port = address._sockaddr_in.sin_port;
+    } else
+    {
+        source_port = address._sockaddr_in6.sin6_port;
+    }
+    // Convert port number to network byte order.
+    sourcePort = htons(source_port);
+    return 0;
+ #else
+    return -1;
+ #endif
+}
+
+bool UdpTransport::IsIpAddressValid(const char* ipadr, const bool ipV6)
+{
+    if(ipV6)
+    {
+        WebRtc_Word32 len = (WebRtc_Word32)strlen(ipadr);
+        if( len>39 || len == 0)
+        {
+            return false;
+        }
+
+        WebRtc_Word32 i;
+        WebRtc_Word32 colonPos[7] = {0,0,0,0,0,0,0};
+        WebRtc_Word32 lastColonPos = -2;
+        WebRtc_Word32 nColons = 0;
+        WebRtc_Word32 nDubbleColons = 0;
+        WebRtc_Word32 nDots = 0;
+        WebRtc_Word32 error = 0;
+        char c;
+        for(i = 0; i < len ; i++)
+        {
+            c=ipadr[i];
+            if(isxdigit(c))
+                ;
+            else if(c == ':')
+            {
+                if(nColons < 7)
+                    colonPos[nColons] = i;
+                if((i-lastColonPos)==1)
+                    nDubbleColons++;
+                lastColonPos=i;
+                if(nDots != 0)
+                {
+                    error = 1;
+                }
+                nColons++;
+            }
+            else if(c == '.')
+            {
+                nDots++;
+            }
+            else
+            {
+                error = 1;
+            }
+
+        }
+        if(error)
+        {
+            return false;
+        }
+        if(nDubbleColons > 1)
+        {
+            return false;
+        }
+        if(nColons > 7 || nColons < 2)
+        {
+            return false;
+        }
+        if(!(nDots == 3 || nDots == 0))
+        {
+            return false;
+        }
+        lastColonPos = -1;
+        WebRtc_Word32 charsBeforeColon = 0;
+        for(i = 0; i < nColons; i++)
+        {
+            charsBeforeColon=colonPos[i]-lastColonPos-1;
+            if(charsBeforeColon > 4)
+            {
+                return false;
+            }
+            lastColonPos=colonPos[i];
+        }
+        WebRtc_Word32 lengthAfterLastColon = len - lastColonPos - 1;
+        if(nDots == 0)
+        {
+            if(lengthAfterLastColon > 4)
+                return false;
+        }
+        if(nDots == 3 && lengthAfterLastColon > 0)
+        {
+            return IsIpAddressValid((ipadr+lastColonPos+1),false);
+        }
+
+    }
+    else
+    {
+        WebRtc_Word32 len = (WebRtc_Word32)strlen(ipadr);
+        if((len>15)||(len==0))
+        {
+            return false;
+        }
+
+        // IPv4 should be [0-255].[0-255].[0-255].[0-255]
+        WebRtc_Word32 i;
+        WebRtc_Word32 nDots = 0;
+        WebRtc_Word32 iDotPos[4] = {0,0,0,0};
+
+        for (i = 0; (i < len) && (nDots < 4); i++)
+        {
+            if (ipadr[i] == (char)'.')
+            {
+                // Store index of dots and count number of dots.
+                iDotPos[nDots++] = i;
+            }
+        }
+
+        bool allUnder256 = false;
+        // TODO (hellner): while loop seems to be abused here to get
+        // label like functionality. Fix later to avoid introducing bugs now.
+
+        // Check that all numbers are smaller than 256.
+        do
+        {
+            if (nDots != 3 )
+            {
+                break;
+            }
+
+            if (iDotPos[0] <= 3)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[0],iDotPos[0]);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                {
+                    break;
+                }
+            } else {
+                break;
+            }
+
+            if (iDotPos[1] - iDotPos[0] <= 4)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[0]+1], iDotPos[1] - iDotPos[0] - 1);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                    break;
+            } else {
+                break;
+            }
+
+            if (iDotPos[2] - iDotPos[1] <= 4)
+            {
+                char nr[4];
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[1]+1], iDotPos[1] - iDotPos[0] - 1);
+                WebRtc_Word32 num = atoi(nr);
+                if (num > 255)
+                    break;
+
+                memset(nr,0,4);
+                strncpy(nr,&ipadr[iDotPos[2]+1], len - iDotPos[2] -1);
+                num = atoi(nr);
+                if (num > 255)
+                    break;
+                else
+                    allUnder256 = true;
+            } else
+                break;
+        } while(false);
+
+        if (nDots != 3 || !allUnder256)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/udp_transport/source/udp_transport_impl.h b/src/modules/udp_transport/source/udp_transport_impl.h
new file mode 100644
index 0000000..9f4fd9f
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_transport_impl.h
@@ -0,0 +1,263 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
+#define WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
+
+#include "udp_transport.h"
+#include "udp_socket_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class RWLockWrapper;
+class UdpSocketManager;
+
+class UdpTransportImpl : public UdpTransport
+{
+public:
+    // A factory that returns a wrapped UDP socket or equivalent.
+    class SocketFactoryInterface {
+    public:
+        virtual ~SocketFactoryInterface() {}
+        virtual UdpSocketWrapper* CreateSocket(const WebRtc_Word32 id,
+                                               UdpSocketManager* mgr,
+                                               CallbackObj obj,
+                                               IncomingSocketCallback cb,
+                                               bool ipV6Enable,
+                                               bool disableGQOS) = 0;
+    };
+
+    // Constructor, only called by UdpTransport::Create and tests.
+    // The constructor takes ownership of the "maker".
+    // The constructor does not take ownership of socket_manager.
+    UdpTransportImpl(const WebRtc_Word32 id,
+                     SocketFactoryInterface* maker,
+                     UdpSocketManager* socket_manager);
+    virtual ~UdpTransportImpl();
+
+    // Module functions
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // UdpTransport functions
+    virtual WebRtc_Word32 InitializeSendSockets(
+        const char* ipAddr,
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 InitializeReceiveSockets(
+        UdpTransportData* const packetCallback,
+        const WebRtc_UWord16 rtpPort,
+        const char* ipAddr = NULL,
+        const char* multicastIpAddr = NULL,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 InitializeSourcePorts(
+        const WebRtc_UWord16 rtpPort,
+        const WebRtc_UWord16 rtcpPort = 0);
+    virtual WebRtc_Word32 SourcePorts(WebRtc_UWord16& rtpPort,
+                                      WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 ReceiveSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort,
+        char multicastIpAddr[kIpAddressVersion6Length]) const;
+    virtual WebRtc_Word32 SendSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 RemoteSocketInformation(
+        char ipAddr[kIpAddressVersion6Length],
+        WebRtc_UWord16& rtpPort,
+        WebRtc_UWord16& rtcpPort) const;
+    virtual WebRtc_Word32 SetQoS(const bool QoS,
+                                 const WebRtc_Word32 serviceType,
+                                 const WebRtc_UWord32 maxBitrate = 0,
+                                 const WebRtc_Word32 overrideDSCP = 0,
+                                 const bool audio = false);
+    virtual WebRtc_Word32 QoS(bool& QoS, WebRtc_Word32& serviceType,
+                              WebRtc_Word32& overrideDSCP) const;
+    virtual WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP,
+                                 const bool useSetSockOpt = false);
+    virtual WebRtc_Word32 ToS(WebRtc_Word32& DSCP,
+                              bool& useSetSockOpt) const;
+    virtual WebRtc_Word32 SetPCP(const WebRtc_Word32 PCP);
+    virtual WebRtc_Word32 PCP(WebRtc_Word32& PCP) const;
+    virtual WebRtc_Word32 EnableIpV6();
+    virtual bool IpV6Enabled() const;
+    virtual WebRtc_Word32 SetFilterIP(
+        const char filterIPAddress[kIpAddressVersion6Length]);
+    virtual WebRtc_Word32 FilterIP(
+        char filterIPAddress[kIpAddressVersion6Length]) const;
+    virtual WebRtc_Word32 SetFilterPorts(const WebRtc_UWord16 rtpFilterPort,
+                                         const WebRtc_UWord16 rtcpFilterPort);
+    virtual WebRtc_Word32 FilterPorts(WebRtc_UWord16& rtpFilterPort,
+                                      WebRtc_UWord16& rtcpFilterPort) const;
+    virtual WebRtc_Word32 StartReceiving(
+        const WebRtc_UWord32 numberOfSocketBuffers);
+    virtual WebRtc_Word32 StopReceiving();
+    virtual bool Receiving() const;
+    virtual bool SendSocketsInitialized() const;
+    virtual bool SourcePortsInitialized() const;
+    virtual bool ReceiveSocketsInitialized() const;
+    virtual WebRtc_Word32 SendRaw(const WebRtc_Word8* data,
+                                  WebRtc_UWord32 length, WebRtc_Word32 isRTCP,
+                                  WebRtc_UWord16 portnr = 0,
+                                  const char* ip = NULL);
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8 *data,
+                                          WebRtc_UWord32 length,
+                                          const SocketAddress& to);
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8 *data,
+                                           WebRtc_UWord32 length,
+                                           const SocketAddress& to);
+    virtual WebRtc_Word32 SendRTPPacketTo(const WebRtc_Word8 *data,
+                                          WebRtc_UWord32 length,
+                                          WebRtc_UWord16 rtpPort);
+    virtual WebRtc_Word32 SendRTCPPacketTo(const WebRtc_Word8 *data,
+                                           WebRtc_UWord32 length,
+                                           WebRtc_UWord16 rtcpPort);
+    // Transport functions
+    virtual int SendPacket(int channel, const void* data, int length);
+    virtual int SendRTCPPacket(int channel, const void* data, int length);
+
+    // UdpTransport functions continue.
+    virtual WebRtc_Word32 SetSendIP(const char* ipaddr);
+    virtual WebRtc_Word32 SetSendPorts(const WebRtc_UWord16 rtpPort,
+                                       const WebRtc_UWord16 rtcpPort = 0);
+
+    virtual ErrorCode LastError() const;
+
+    virtual WebRtc_Word32 IPAddressCached(const SocketAddress& address,
+                                          char* ip,
+                                          WebRtc_UWord32& ipSize,
+                                          WebRtc_UWord16& sourcePort);
+
+    WebRtc_Word32 Id() const {return _id;}
+protected:
+    // IncomingSocketCallback signature functions for receiving callbacks from
+    // UdpSocketWrapper.
+    static void IncomingRTPCallback(CallbackObj obj,
+                                    const WebRtc_Word8* rtpPacket,
+                                    WebRtc_Word32 rtpPacketLength,
+                                    const SocketAddress* from);
+    static void IncomingRTCPCallback(CallbackObj obj,
+                                     const WebRtc_Word8* rtcpPacket,
+                                     WebRtc_Word32 rtcpPacketLength,
+                                     const SocketAddress* from);
+
+    void CloseSendSockets();
+    void CloseReceiveSockets();
+
+    // Update _remoteRTPAddr according to _destPort and _destIP
+    void BuildRemoteRTPAddr();
+    // Update _remoteRTCPAddr according to _destPortRTCP and _destIP
+    void BuildRemoteRTCPAddr();
+
+    void BuildSockaddrIn(WebRtc_UWord16 portnr, const char* ip,
+                         SocketAddress& remoteAddr) const;
+
+    ErrorCode BindLocalRTPSocket();
+    ErrorCode BindLocalRTCPSocket();
+
+    ErrorCode BindRTPSendSocket();
+    ErrorCode BindRTCPSendSocket();
+
+    void IncomingRTPFunction(const WebRtc_Word8* rtpPacket,
+                             WebRtc_Word32 rtpPacketLength,
+                             const SocketAddress* from);
+    void IncomingRTCPFunction(const WebRtc_Word8* rtcpPacket,
+                              WebRtc_Word32 rtcpPacketLength,
+                              const SocketAddress* from);
+
+    bool FilterIPAddress(const SocketAddress* fromAddress);
+
+    bool SetSockOptUsed();
+
+    WebRtc_Word32 EnableQoS(WebRtc_Word32 serviceType, bool audio,
+                            WebRtc_UWord32 maxBitrate,
+                            WebRtc_Word32 overrideDSCP);
+
+    WebRtc_Word32 DisableQoS();
+
+private:
+    void GetCachedAddress(char* ip, WebRtc_UWord32& ipSize,
+                          WebRtc_UWord16& sourcePort);
+
+    WebRtc_Word32 _id;
+    SocketFactoryInterface* _socket_creator;
+    // Protects the sockets from being re-configured while receiving packets.
+    CriticalSectionWrapper* _crit;
+    CriticalSectionWrapper* _critFilter;
+    // _packetCallback's critical section.
+    CriticalSectionWrapper* _critPacketCallback;
+    UdpSocketManager* _mgr;
+    ErrorCode _lastError;
+
+    // Remote RTP and RTCP ports.
+    WebRtc_UWord16 _destPort;
+    WebRtc_UWord16 _destPortRTCP;
+
+    // Local RTP and RTCP ports.
+    WebRtc_UWord16 _localPort;
+    WebRtc_UWord16 _localPortRTCP;
+
+    // Local port number when the local port for receiving and local port number
+    // for sending are not the same.
+    WebRtc_UWord16 _srcPort;
+    WebRtc_UWord16 _srcPortRTCP;
+
+    // Remote port from which last received packet was sent.
+    WebRtc_UWord16 _fromPort;
+    WebRtc_UWord16 _fromPortRTCP;
+
+    char _fromIP[kIpAddressVersion6Length];
+    char _destIP[kIpAddressVersion6Length];
+    char _localIP[kIpAddressVersion6Length];
+    char _localMulticastIP[kIpAddressVersion6Length];
+
+    UdpSocketWrapper* _ptrRtpSocket;
+    UdpSocketWrapper* _ptrRtcpSocket;
+
+    // Local port when the local port for receiving and local port for sending
+    // are not the same.
+    UdpSocketWrapper* _ptrSendRtpSocket;
+    UdpSocketWrapper* _ptrSendRtcpSocket;
+
+    SocketAddress _remoteRTPAddr;
+    SocketAddress _remoteRTCPAddr;
+
+    SocketAddress _localRTPAddr;
+    SocketAddress _localRTCPAddr;
+
+    WebRtc_Word32 _tos;
+    bool _receiving;
+    bool _useSetSockOpt;
+    bool _qos;
+    WebRtc_Word32 _pcp;
+    bool _ipV6Enabled;
+    WebRtc_Word32 _serviceType;
+    WebRtc_Word32 _overrideDSCP;
+    WebRtc_UWord32 _maxBitrate;
+
+    // Cache used by GetCachedAddress(..).
+    RWLockWrapper* _cachLock;
+    SocketAddress _previousAddress;
+    char _previousIP[kIpAddressVersion6Length];
+    WebRtc_UWord32 _previousIPSize;
+    WebRtc_UWord16 _previousSourcePort;
+
+    SocketAddress _filterIPAddress;
+    WebRtc_UWord16 _rtpFilterPort;
+    WebRtc_UWord16 _rtcpFilterPort;
+
+    UdpTransportData* _packetCallback;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UDP_TRANSPORT_SOURCE_UDP_TRANSPORT_IMPL_H_
diff --git a/src/modules/udp_transport/source/udp_transport_unittest.cc b/src/modules/udp_transport/source/udp_transport_unittest.cc
new file mode 100644
index 0000000..3125e2e
--- /dev/null
+++ b/src/modules/udp_transport/source/udp_transport_unittest.cc
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "udp_transport.h"
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+// We include the implementation header file to get at the dependency-injecting
+// constructor.
+#include "udp_transport_impl.h"
+// We must mock the socket manager, for which we need its definition.
+#include "udp_socket_manager_wrapper.h"
+
+using ::testing::_;
+using ::testing::Return;
+
+class MockUdpSocketWrapper : public webrtc::UdpSocketWrapper {
+ public:
+  // The following methods have to be mocked because they are pure.
+  MOCK_METHOD1(ChangeUniqueId, WebRtc_Word32(WebRtc_Word32));
+  MOCK_METHOD2(SetCallback, bool(webrtc::CallbackObj,
+                                 webrtc::IncomingSocketCallback));
+  MOCK_METHOD1(Bind, bool(const webrtc::SocketAddress&));
+  MOCK_METHOD0(ValidHandle, bool());
+  MOCK_METHOD4(SetSockopt, bool(WebRtc_Word32, WebRtc_Word32,
+                                const WebRtc_Word8*,
+                                WebRtc_Word32));
+  MOCK_METHOD1(SetTOS, WebRtc_Word32(WebRtc_Word32));
+  MOCK_METHOD3(SendTo, WebRtc_Word32(const WebRtc_Word8*, WebRtc_Word32,
+                                     const webrtc::SocketAddress&));
+  MOCK_METHOD8(SetQos, bool(WebRtc_Word32, WebRtc_Word32,
+                            WebRtc_Word32, WebRtc_Word32,
+                            WebRtc_Word32, WebRtc_Word32,
+                            const webrtc::SocketAddress &,
+                            WebRtc_Word32));
+};
+
+class MockUdpSocketManager : public webrtc::UdpSocketManager {
+ public:
+  // Access to protected destructor.
+  void Destroy() {
+    delete this;
+  }
+  MOCK_METHOD2(Init, bool(WebRtc_Word32, WebRtc_UWord8&));
+  MOCK_METHOD1(ChangeUniqueId, WebRtc_Word32(const WebRtc_Word32));
+  MOCK_METHOD0(Start, bool());
+  MOCK_METHOD0(Stop, bool());
+  MOCK_METHOD1(AddSocket, bool(webrtc::UdpSocketWrapper*));
+  MOCK_METHOD1(RemoveSocket, bool(webrtc::UdpSocketWrapper*));
+};
+
+class MockSocketFactory :
+    public webrtc::UdpTransportImpl::SocketFactoryInterface {
+ public:
+  MockSocketFactory(std::vector<MockUdpSocketWrapper*>* socket_counter)
+      : socket_counter_(socket_counter) {
+  }
+  webrtc::UdpSocketWrapper* CreateSocket(const WebRtc_Word32 id,
+                                         webrtc::UdpSocketManager* mgr,
+                                         webrtc::CallbackObj obj,
+                                         webrtc::IncomingSocketCallback cb,
+                                         bool ipV6Enable,
+                                         bool disableGQOS) {
+    MockUdpSocketWrapper* socket = new MockUdpSocketWrapper();
+    // We instrument the socket with calls that are expected, but do
+    // not matter for any specific test, in order to avoid warning messages.
+    EXPECT_CALL(*socket, ValidHandle()).WillRepeatedly(Return(true));
+    EXPECT_CALL(*socket, Bind(_)).WillOnce(Return(true));
+    socket_counter_->push_back(socket);
+    return socket;
+  }
+  std::vector<MockUdpSocketWrapper*>* socket_counter_;
+};
+
+class UDPTransportTest : public ::testing::Test {
+ public:
+  UDPTransportTest()
+      : sockets_created_(0) {
+  }
+
+  ~UDPTransportTest() {
+    // In production, sockets register themselves at creation time with
+    // an UdpSocketManager, and the UdpSocketManager is responsible for
+    // deleting them. In this test, we just delete them after the test.
+    while (!sockets_created_.empty()) {
+      delete sockets_created_.back();
+      sockets_created_.pop_back();
+    }
+  }
+
+  int NumSocketsCreated() {
+    return sockets_created_.size();
+  }
+
+  std::vector<MockUdpSocketWrapper*>* sockets_created() {
+    return &sockets_created_;
+  }
+private:
+  std::vector<MockUdpSocketWrapper*> sockets_created_;
+};
+
+TEST_F(UDPTransportTest, CreateTransport) {
+  WebRtc_Word32 id = 0;
+  WebRtc_UWord8 threads = 1;
+  webrtc::UdpTransport* transport = webrtc::UdpTransport::Create(id, threads);
+  webrtc::UdpTransport::Destroy(transport);
+}
+
+// This test verifies that the mock_socket is not called from the constructor.
+TEST_F(UDPTransportTest, ConstructorDoesNotCreateSocket) {
+  WebRtc_Word32 id = 0;
+  webrtc::UdpTransportImpl::SocketFactoryInterface* null_maker = NULL;
+  webrtc::UdpSocketManager* null_manager = NULL;
+  webrtc::UdpTransport* transport = new webrtc::UdpTransportImpl(id,
+                                                                 null_maker,
+                                                                 null_manager);
+  delete transport;
+}
+
+TEST_F(UDPTransportTest, InitializeSourcePorts) {
+  WebRtc_Word32 id = 0;
+  webrtc::UdpTransportImpl::SocketFactoryInterface* mock_maker
+      = new MockSocketFactory(sockets_created());
+  MockUdpSocketManager* mock_manager = new MockUdpSocketManager();
+  webrtc::UdpTransport* transport = new webrtc::UdpTransportImpl(id,
+                                                                 mock_maker,
+                                                                 mock_manager);
+  EXPECT_EQ(0, transport->InitializeSourcePorts(4711, 4712));
+  EXPECT_EQ(2, NumSocketsCreated());
+
+  delete transport;
+  mock_manager->Destroy();
+}
diff --git a/src/modules/udp_transport/test/SocketManagerTest.cc b/src/modules/udp_transport/test/SocketManagerTest.cc
new file mode 100644
index 0000000..03119be
--- /dev/null
+++ b/src/modules/udp_transport/test/SocketManagerTest.cc
@@ -0,0 +1,449 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <iostream>
+
+#ifdef _WIN32
+#include <windows.h>
+#include <tchar.h>
+#else
+#include <stdio.h>
+#define Sleep(x) usleep(x*1000)
+#endif
+
+#include "udp_transport.h"
+#include "common_types.h"
+#include "trace.h"
+
+//#define QOS_TEST
+//#define QOS_TEST_WITH_OVERRIDE // require admin on Win7
+//#define TOS_TEST               // require admin on Win7
+//#define TOS_TEST_USING_SETSOCKOPT
+//#define PCP_TEST
+
+class UdpTransportDataA: public UdpTransportData
+{
+public:
+    UdpTransportDataA() :
+        _counterRTP(0),
+        _counterRTCP(0)
+    {
+    };
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incommingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort)
+    {
+        _counterRTP++;
+    };
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incommingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort)
+    {
+        _counterRTCP++;
+    };
+    WebRtc_UWord32    _counterRTP;
+    WebRtc_UWord32    _counterRTCP;
+};
+
+class UdpTransportDataB: public UdpTransportData
+{
+public:
+    UdpTransportDataB() :
+        _counterRTP(0),
+        _counterRTCP(0)
+    {
+    };
+    virtual void IncomingRTPPacket(const WebRtc_Word8* incommingRtpPacket,
+                                   const WebRtc_Word32 rtpPacketLength,
+                                   const char* fromIP,
+                                   const WebRtc_UWord16 fromPort)
+    {
+        _counterRTP++;
+    };
+
+    virtual void IncomingRTCPPacket(const WebRtc_Word8* incommingRtcpPacket,
+                                    const WebRtc_Word32 rtcpPacketLength,
+                                    const char* fromIP,
+                                    const WebRtc_UWord16 fromPort)
+    {
+        _counterRTCP++;
+    };
+    WebRtc_UWord32    _counterRTP;
+    WebRtc_UWord32    _counterRTCP;
+};
+
+#ifdef _WIN32
+int _tmain(int argc, _TCHAR* argv[])
+#else
+int main(int argc, char* argv[])
+#endif
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile("testTrace.txt");
+    Trace::SetEncryptedTraceFile("testTraceDebug.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    printf("Start UdpTransport test\n");
+
+    WebRtc_UWord8 numberOfSocketThreads = 5;
+    UdpTransport* client1 = UdpTransport::Create(1,numberOfSocketThreads,NULL);
+    numberOfSocketThreads = 0;
+    UdpTransport* client2 = UdpTransport::Create(2,numberOfSocketThreads,NULL);
+    assert(5 == numberOfSocketThreads);
+
+    UdpTransportDataA* client1Callback = new UdpTransportDataA();
+    UdpTransportDataB* client2Callback = new UdpTransportDataB();
+
+    WebRtc_UWord32 localIP = 0;
+    char localIPAddr[64];
+    assert( 0 == client1->LocalHostAddress(localIP)); // network host order aka big-endian
+
+    sprintf(localIPAddr,"%lu.%lu.%lu.%lu",(localIP>>24)& 0x0ff,(localIP>>16)& 0x0ff ,(localIP>>8)& 0x0ff, localIP & 0x0ff);
+    printf("\tLocal IP:%s\n", localIPAddr);
+
+    char localIPV6[16];
+    char localIPAddrV6[128];
+    if( 0 == client1->LocalHostAddressIPV6(localIPV6))
+    {
+        sprintf(localIPAddrV6,"%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x", localIPV6[0],localIPV6[1],localIPV6[2],localIPV6[3],localIPV6[4],localIPV6[5],localIPV6[6],localIPV6[7], localIPV6[8],localIPV6[9],localIPV6[10],localIPV6[11],localIPV6[12],localIPV6[13],localIPV6[14],localIPV6[15]);
+        printf("\tLocal IPV6:%s\n", localIPAddrV6);
+    }
+
+    char test[9] = "testtest";
+    assert( 0 == client1->InitializeReceiveSockets(client1Callback,1234, localIPAddr));
+
+#if defined QOS_TEST_WITH_OVERRIDE || defined QOS_TEST || defined TOS_TEST || defined TOS_TEST_USING_SETSOCKOPT
+    assert( -1 == client1->SetQoS(true, 3, 1000));  //  should fail
+    assert( 0 == client1->InitializeSendSockets("192.168.200.1", 1236,1237));
+#else
+    assert( 0 == client1->InitializeSendSockets(localIPAddr, 1236,1237));
+#endif
+    assert( 0 == client1->StartReceiving(20));
+
+    assert( 0 == client2->InitializeReceiveSockets(client2Callback,1236));
+    assert( 0 == client2->InitializeSendSockets(localIPAddr, 1234,1235));
+    assert( 0 == client2->StartReceiving(20));
+
+    Sleep(10);
+
+#ifdef TOS_TEST
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetToS(2));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(3));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(0));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested TOS  \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef TOS_TEST_USING_SETSOCKOPT
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetToS(2, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(3, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetToS(0, true));
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested TOS using setsockopt \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef QOS_TEST
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetQoS(true, 2, 1000));  // SERVICETYPE_CONTROLLEDLOAD 2
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(true, 3, 1000));  // SERVICETYPE_GUARANTEED 3
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(false, 0));  //
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    printf("Tested QOS  \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef QOS_TEST_WITH_OVERRIDE
+    // note: you need to have QOS installed on your interface for this test
+    // test by doing a ethereal sniff and filter out packets with ip.dst == 192.168.200.1
+    assert( 0 == client1->SetQoS(true, 2, 1000, 1));  // SERVICETYPE_CONTROLLEDLOAD 2
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(true, 2, 1000, 2));  // SERVICETYPE_GUARANTEED 3
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    Sleep(10);
+    assert( 0 == client1->SetQoS(false, 0));  //
+    Sleep(10);
+    assert( 9 == client1->SendPacket(-1, test, 9));
+    printf("Tested QOS with override \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+#ifdef PCP_TEST
+    // Note: We currently don't know how to test that the bits are actually set in the frame,
+    // this test simply tests the API and that we can send a packet after setting PCP.
+    assert( -1 == client1->SetPCP(-1)); // should fail
+    assert( -1 == client1->SetPCP(8)); // should fail
+    printf("Setting PCP to 7 returned %d \n", client1->SetPCP(7));
+    printf("(Failing is normal, requires the CAP_NET_ADMIN capability to succeed.) \n");
+    Sleep(10);
+    for (int pcp = 6; pcp >= 0; --pcp)
+    {
+        assert( 0 == client1->SetPCP(pcp));
+        Sleep(10);
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+    printf("Tested PCP \n");
+    Sleep(5000);
+    return 0;
+#endif
+
+    Sleep(10);
+
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    // test start rec after a socket has revceived data
+    // result: packets received before first startReceive is saved by the OS
+/*
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+    Sleep(10);
+    assert( 0 == client2->StartReceiving(20));
+
+//    assert( 0 == client2->StopReceiving());
+
+    Sleep(10);
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+
+    assert( 0 == client2->StartReceiving(20));
+
+    for(int i = 0; i < 100; i++)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+    }
+*/
+    Sleep(10);
+
+    assert( 0 == client1Callback->_counterRTP);
+    assert( 1 == client2Callback->_counterRTP);
+    assert( 0 == client1Callback->_counterRTCP);
+    assert( 0 == client2Callback->_counterRTCP);
+
+    printf("Sent 1 packet on one socket \n");
+
+    char ipAddr[64];
+    char tempIpAddr[64];
+    char ipMulticastAddr[64];
+    WebRtc_UWord16 rtpPort = 0;
+    WebRtc_UWord16 rtcpPort = 0;
+    bool reusableSocket = true;
+    assert( 0 == client2->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1234);
+    assert( strncmp(ipAddr, localIPAddr, 16) == 0);
+
+    assert( 0 == client2->ReceiveSocketInformation(ipAddr, rtpPort, rtcpPort, ipMulticastAddr, reusableSocket));
+    assert( rtpPort == 1236);
+    assert( rtcpPort == 1237);
+    assert( strncmp(ipAddr, "0.0.0.0", 16) == 0);
+    assert( ipMulticastAddr[0] == 0);
+    assert( reusableSocket == false);
+
+    assert( 0 == client2->SendSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1234);
+    assert( rtcpPort == 1235);
+    assert( strncmp(ipAddr,localIPAddr, 16) == 0);
+
+    const int numberOfPackets = 1000;
+    int n = 0;
+    while(n < numberOfPackets)
+    {
+        assert( 9 == client1->SendPacket(-1, test, 9));
+        assert( 9 == client2->SendPacket(-1, test, 9));
+        assert( 9 == client1->SendRTCPPacket(-1, test, 9));
+        assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+        n++;
+    }
+    int loops = 0;
+    for(; loops < 100 &&
+        !(client1Callback->_counterRTP == numberOfPackets &&
+        client1Callback->_counterRTCP == numberOfPackets &&
+        client2Callback->_counterRTP == numberOfPackets+1 &&
+        client2Callback->_counterRTCP == numberOfPackets);
+        loops++)
+    {
+        Sleep(10);
+    }
+    printf("\tSent %d packets on 4 sockets in:%d ms\n", numberOfPackets, loops*10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+1 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->StopReceiving());
+    assert( 0 == client2->StopReceiving());
+
+    printf("Tear down client 2\n");
+
+    // configure that fail
+    assert( -1 == client2->InitializeReceiveSockets(client2Callback,1234, localIPAddr)); // port in use
+    assert( !client2->ReceiveSocketsInitialized());
+    assert( 0 == client2->InitializeReceiveSockets(client2Callback,1236));
+    assert( 0 == client2->StartReceiving(20));
+
+    printf("Client 2 re-configured\n");
+
+    assert( client1->SendSocketsInitialized());
+    assert( client1->ReceiveSocketsInitialized());
+    assert( client2->SendSocketsInitialized());
+    assert( client2->ReceiveSocketsInitialized());
+
+    assert( 9 == client1->SendPacket(-1, test, 9));
+
+    // this should not be received since we dont receive in client 1
+    assert( 9 == client2->SendPacket(-1, test, 9));
+
+    Sleep(10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+    printf("\tSent 1 packet on one socket \n");
+
+    printf("Start filter test\n");
+
+    assert( 0 == client1->StartReceiving(20));
+
+    assert( 0 == client1->SetFilterPorts(1234, 1235)); // should filter out what we send
+    assert( 0 == client1->SetFilterIP(localIPAddr));
+
+    assert( 0 == client1->FilterIP(tempIpAddr));
+    assert( strncmp(tempIpAddr, localIPAddr, 16) == 0);
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+
+    Sleep(10);
+
+    assert( numberOfPackets == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterPorts(1236, 1237)); // should pass through
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+1 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+1 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterIP("127.0.0.2"));
+
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+1 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+1 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->SetFilterIP(NULL));
+    assert( 0 == client1->SetFilterPorts(0, 0));
+
+    printf("Tested filter \n");
+
+    assert( 0 == client2->InitializeSourcePorts(1238, 1239));
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( numberOfPackets+2 == client1Callback->_counterRTP);
+    assert( numberOfPackets+2 == client2Callback->_counterRTP);
+    assert( numberOfPackets+2 == client1Callback->_counterRTCP);
+    assert( numberOfPackets == client2Callback->_counterRTCP);
+
+    assert( 0 == client1->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1238);
+    assert( rtcpPort == 1239);
+    assert( strncmp(ipAddr, localIPAddr, 16) == 0);
+
+    printf("Tested source port \n");
+
+    assert( 0 == client2->InitializeSourcePorts(1240 ));
+    assert( 9 == client2->SendPacket(-1, test, 9));
+    assert( 9 == client2->SendRTCPPacket(-1, test, 9));
+    printf("\tSent 1 packet on two sockets \n");
+
+    Sleep(10);
+
+    assert( 0 == client1->RemoteSocketInformation(ipAddr, rtpPort, rtcpPort));
+    assert( rtpPort == 1240);
+    assert( rtcpPort == 1241);
+
+    printf("Tested SetSendPorts source port \n");
+
+    UdpTransport::Destroy(client1);
+    UdpTransport::Destroy(client2);
+
+    printf("\n\nUdpTransport test done\n");
+
+    delete client1Callback;
+    delete client2Callback;
+
+    Sleep(5000);
+    Trace::ReturnTrace();
+};
diff --git a/src/modules/utility/OWNERS b/src/modules/utility/OWNERS
new file mode 100644
index 0000000..674c738
--- /dev/null
+++ b/src/modules/utility/OWNERS
@@ -0,0 +1,4 @@
+henrike@webrtc.org
+pwestin@webrtc.org
+asapersson@webrtc.org
+perkj@webrtc.org
\ No newline at end of file
diff --git a/src/modules/utility/interface/audio_frame_operations.h b/src/modules/utility/interface/audio_frame_operations.h
new file mode 100644
index 0000000..4705cee
--- /dev/null
+++ b/src/modules/utility/interface/audio_frame_operations.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
+#define WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class AudioFrame;
+
+// TODO(andrew): consolidate this with utility.h and audio_frame_manipulator.h.
+// Change reference parameters to pointers. Consider using a namespace rather
+// than a class.
+class AudioFrameOperations {
+ public:
+  // Upmixes mono |src_audio| to stereo |dst_audio|. This is an out-of-place
+  // operation, meaning src_audio and dst_audio must point to different
+  // buffers. It is the caller's responsibility to ensure that |dst_audio| is
+  // sufficiently large.
+  static void MonoToStereo(const int16_t* src_audio, int samples_per_channel,
+                           int16_t* dst_audio);
+  // |frame.num_channels_| will be updated. This version checks for sufficient
+  // buffer size and that |num_channels_| is mono.
+  static int MonoToStereo(AudioFrame* frame);
+
+  // Downmixes stereo |src_audio| to mono |dst_audio|. This is an in-place
+  // operation, meaning |src_audio| and |dst_audio| may point to the same
+  // buffer.
+  static void StereoToMono(const int16_t* src_audio, int samples_per_channel,
+                           int16_t* dst_audio);
+  // |frame.num_channels_| will be updated. This version checks that
+  // |num_channels_| is stereo.
+  static int StereoToMono(AudioFrame* frame);
+
+  // Swap the left and right channels of |frame|. Fails silently if |frame| is
+  // not stereo.
+  static void SwapStereoChannels(AudioFrame* frame);
+
+  // Zeros out the audio and sets |frame.energy| to zero.
+  static void Mute(AudioFrame& frame);
+
+  static int Scale(float left, float right, AudioFrame& frame);
+
+  static int ScaleWithSat(float scale, AudioFrame& frame);
+};
+
+}  //  namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_AUDIO_FRAME_OPERATIONS_H_
diff --git a/src/modules/utility/interface/file_player.h b/src/modules/utility/interface/file_player.h
new file mode 100644
index 0000000..ee7be64
--- /dev/null
+++ b/src/modules/utility/interface/file_player.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "module_common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class FileCallback;
+
+class FilePlayer
+{
+public:
+    // The largest decoded frame size in samples (60ms with 32kHz sample rate).
+    enum {MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+    enum {MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+
+    // Note: will return NULL for video file formats (e.g. AVI) if the flag
+    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    static FilePlayer* CreateFilePlayer(const WebRtc_UWord32 instanceID,
+                                        const FileFormats fileFormat);
+
+    static void DestroyFilePlayer(FilePlayer* player);
+
+    // Read 10 ms of audio at |frequencyInHz| to |outBuffer|. |lengthInSamples|
+    // will be set to the number of samples read (not the number of samples per
+    // channel).
+    virtual int Get10msAudioFromFile(
+        int16_t* outBuffer,
+        int& lengthInSamples,
+        int frequencyInHz) = 0;
+
+    // Register callback for receiving file playing notifications.
+    virtual WebRtc_Word32 RegisterModuleFileCallback(
+        FileCallback* callback) = 0;
+
+    // API for playing audio from fileName to channel.
+    // Note: codecInst is used for pre-encoded files.
+    virtual WebRtc_Word32 StartPlayingFile(
+        const char* fileName,
+        bool loop,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL) = 0;
+
+    // Note: codecInst is used for pre-encoded files.
+    virtual WebRtc_Word32 StartPlayingFile(
+        InStream& sourceStream,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL) = 0;
+
+    virtual WebRtc_Word32 StopPlayingFile() = 0;
+
+    virtual bool IsPlayingFile() const = 0;
+
+    virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs) = 0;
+
+    // Set audioCodec to the currently used audio codec.
+    virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const = 0;
+
+    virtual WebRtc_Word32 Frequency() const = 0;
+
+    // Note: scaleFactor is in the range [0.0 - 2.0]
+    virtual WebRtc_Word32 SetAudioScaling(float scaleFactor) = 0;
+
+    // Return the time in ms until next video frame should be pulled (by
+    // calling GetVideoFromFile(..)).
+    // Note: this API reads one video frame from file. This means that it should
+    //       be called exactly once per GetVideoFromFile(..) API call.
+    virtual WebRtc_Word32 TimeUntilNextVideoFrame() { return -1;}
+
+    virtual WebRtc_Word32 StartPlayingVideoFile(
+        const char* /*fileName*/,
+        bool /*loop*/,
+        bool /*videoOnly*/) { return -1;}
+
+    virtual WebRtc_Word32 video_codec_info(VideoCodec& /*videoCodec*/) const
+    {return -1;}
+
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/)
+    { return -1;}
+
+    // Same as GetVideoFromFile(). videoFrame will have the resolution specified
+    // by the width outWidth and height outHeight in pixels.
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& /*videoFrame*/,
+                                           const WebRtc_UWord32 /*outWidth*/,
+                                           const WebRtc_UWord32 /*outHeight*/)
+    {return -1;}
+protected:
+    virtual ~FilePlayer() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_PLAYER_H_
diff --git a/src/modules/utility/interface/file_recorder.h b/src/modules/utility/interface/file_recorder.h
new file mode 100644
index 0000000..eb460ae
--- /dev/null
+++ b/src/modules/utility/interface/file_recorder.h
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
+
+#include "audio_coding_module_typedefs.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "media_file_defines.h"
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class FileRecorder
+{
+public:
+
+    // Note: will return NULL for video file formats (e.g. AVI) if the flag
+    //       WEBRTC_MODULE_UTILITY_VIDEO is not defined.
+    static FileRecorder* CreateFileRecorder(const WebRtc_UWord32 instanceID,
+                                            const FileFormats fileFormat);
+
+    static void DestroyFileRecorder(FileRecorder* recorder);
+
+    virtual WebRtc_Word32 RegisterModuleFileCallback(
+        FileCallback* callback) = 0;
+
+    virtual FileFormats RecordingFileFormat() const = 0;
+
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const char* fileName,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notification,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage) = 0;
+
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        OutStream& destStream,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notification,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage) = 0;
+
+    // Stop recording.
+    // Note: this API is for both audio and video.
+    virtual WebRtc_Word32 StopRecording() = 0;
+
+    // Return true if recording.
+    // Note: this API is for both audio and video.
+    virtual bool IsRecording() const = 0;
+
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const = 0;
+
+    // Write frame to file. Frame should contain 10ms of un-ecoded audio data.
+    virtual WebRtc_Word32 RecordAudioToFile(
+        const AudioFrame& frame,
+        const TickTime* playoutTS = NULL) = 0;
+
+    // Open/create the file specified by fileName for writing audio/video data
+    // (relative path is allowed). audioCodecInst specifies the encoding of the
+    // audio data. videoCodecInst specifies the encoding of the video data.
+    // Only video data will be recorded if videoOnly is true. amrFormat
+    // specifies the amr/amrwb storage format.
+    // Note: the file format is AVI.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false) = 0;
+
+    // Record the video frame in videoFrame to AVI file.
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame) = 0;
+
+protected:
+    virtual ~FileRecorder() {}
+
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_FILE_RECORDER_H_
diff --git a/src/modules/utility/interface/process_thread.h b/src/modules/utility/interface/process_thread.h
new file mode 100644
index 0000000..6c51404
--- /dev/null
+++ b/src/modules/utility/interface/process_thread.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+class Module;
+
+class ProcessThread
+{
+public:
+    static ProcessThread* CreateProcessThread();
+    static void DestroyProcessThread(ProcessThread* module);
+
+    virtual WebRtc_Word32 Start() = 0;
+    virtual WebRtc_Word32 Stop() = 0;
+
+    virtual WebRtc_Word32 RegisterModule(const Module* module) = 0;
+    virtual WebRtc_Word32 DeRegisterModule(const Module* module) = 0;
+protected:
+    virtual ~ProcessThread();
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_PROCESS_THREAD_H_
diff --git a/src/modules/utility/interface/rtp_dump.h b/src/modules/utility/interface/rtp_dump.h
new file mode 100644
index 0000000..9291a1c
--- /dev/null
+++ b/src/modules/utility/interface/rtp_dump.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file implements a class that writes a stream of RTP and RTCP packets
+// to a file according to the format specified by rtpplay. See
+// http://www.cs.columbia.edu/irt/software/rtptools/.
+// Notes: supported platforms are Windows, Linux and Mac OSX
+
+#ifndef WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
+#define WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
+
+#include "typedefs.h"
+#include "file_wrapper.h"
+
+namespace webrtc {
+class RtpDump
+{
+public:
+    // Factory method.
+    static RtpDump* CreateRtpDump();
+
+    // Delete function. Destructor disabled.
+    static void DestroyRtpDump(RtpDump* object);
+
+    // Open the file fileNameUTF8 for writing RTP/RTCP packets.
+    // Note: this API also adds the rtpplay header.
+    virtual WebRtc_Word32 Start(const char* fileNameUTF8) = 0;
+
+    // Close the existing file. No more packets will be recorded.
+    virtual WebRtc_Word32 Stop() = 0;
+
+    // Return true if a file is open for recording RTP/RTCP packets.
+    virtual bool IsActive() const = 0;
+
+    // Writes the RTP/RTCP packet in packet with length packetLength in bytes.
+    // Note: packet should contain the RTP/RTCP part of the packet. I.e. the
+    // first bytes of packet should be the RTP/RTCP header.
+    virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
+                                     WebRtc_UWord16 packetLength) = 0;
+
+protected:
+    virtual ~RtpDump();
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_INTERFACE_RTP_DUMP_H_
diff --git a/src/modules/utility/source/audio_frame_operations.cc b/src/modules/utility/source/audio_frame_operations.cc
new file mode 100644
index 0000000..2e13de4
--- /dev/null
+++ b/src/modules/utility/source/audio_frame_operations.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "audio_frame_operations.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+void AudioFrameOperations::MonoToStereo(const int16_t* src_audio,
+                                        int samples_per_channel,
+                                        int16_t* dst_audio) {
+  for (int i = 0; i < samples_per_channel; i++) {
+    dst_audio[2 * i] = src_audio[i];
+    dst_audio[2 * i + 1] = src_audio[i];
+  }
+}
+
+int AudioFrameOperations::MonoToStereo(AudioFrame* frame) {
+  if (frame->num_channels_ != 1) {
+    return -1;
+  }
+  if ((frame->samples_per_channel_ * 2) >= AudioFrame::kMaxDataSizeSamples) {
+    // Not enough memory to expand from mono to stereo.
+    return -1;
+  }
+
+  int16_t data_copy[AudioFrame::kMaxDataSizeSamples];
+  memcpy(data_copy, frame->data_,
+         sizeof(int16_t) * frame->samples_per_channel_);
+  MonoToStereo(data_copy, frame->samples_per_channel_, frame->data_);
+  frame->num_channels_ = 2;
+
+  return 0;
+}
+
+void AudioFrameOperations::StereoToMono(const int16_t* src_audio,
+                                        int samples_per_channel,
+                                        int16_t* dst_audio) {
+  for (int i = 0; i < samples_per_channel; i++) {
+    dst_audio[i] = (src_audio[2 * i] + src_audio[2 * i + 1]) >> 1;
+  }
+}
+
+int AudioFrameOperations::StereoToMono(AudioFrame* frame) {
+  if (frame->num_channels_ != 2) {
+    return -1;
+  }
+
+  StereoToMono(frame->data_, frame->samples_per_channel_, frame->data_);
+  frame->num_channels_ = 1;
+
+  return 0;
+}
+
+void AudioFrameOperations::SwapStereoChannels(AudioFrame* frame) {
+  if (frame->num_channels_ != 2) return;
+
+  for (int i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
+    int16_t temp_data = frame->data_[i];
+    frame->data_[i] = frame->data_[i + 1];
+    frame->data_[i + 1] = temp_data;
+  }
+}
+
+void AudioFrameOperations::Mute(AudioFrame& frame) {
+  memset(frame.data_, 0, sizeof(int16_t) *
+      frame.samples_per_channel_ * frame.num_channels_);
+  frame.energy_ = 0;
+}
+
+int AudioFrameOperations::Scale(float left, float right, AudioFrame& frame) {
+  if (frame.num_channels_ != 2) {
+    return -1;
+  }
+
+  for (int i = 0; i < frame.samples_per_channel_; i++) {
+    frame.data_[2 * i] =
+        static_cast<int16_t>(left * frame.data_[2 * i]);
+    frame.data_[2 * i + 1] =
+        static_cast<int16_t>(right * frame.data_[2 * i + 1]);
+  }
+  return 0;
+}
+
+int AudioFrameOperations::ScaleWithSat(float scale, AudioFrame& frame) {
+  int32_t temp_data = 0;
+
+  // Ensure that the output result is saturated [-32768, +32767].
+  for (int i = 0; i < frame.samples_per_channel_ * frame.num_channels_;
+       i++) {
+    temp_data = static_cast<int32_t>(scale * frame.data_[i]);
+    if (temp_data < -32768) {
+      frame.data_[i] = -32768;
+    } else if (temp_data > 32767) {
+      frame.data_[i] = 32767;
+    } else {
+      frame.data_[i] = static_cast<int16_t>(temp_data);
+    }
+  }
+  return 0;
+}
+
+}  //  namespace webrtc
+
diff --git a/src/modules/utility/source/audio_frame_operations_unittest.cc b/src/modules/utility/source/audio_frame_operations_unittest.cc
new file mode 100644
index 0000000..c11dce3
--- /dev/null
+++ b/src/modules/utility/source/audio_frame_operations_unittest.cc
@@ -0,0 +1,226 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include "audio_frame_operations.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+namespace {
+
+class AudioFrameOperationsTest : public ::testing::Test {
+ protected:
+  AudioFrameOperationsTest() {
+    // Set typical values.
+    frame_.samples_per_channel_ = 320;
+    frame_.num_channels_ = 2;
+  }
+
+  AudioFrame frame_;
+};
+
+void SetFrameData(AudioFrame* frame, int16_t left, int16_t right) {
+  for (int i = 0; i < frame->samples_per_channel_ * 2; i += 2) {
+    frame->data_[i] = left;
+    frame->data_[i + 1] = right;
+  }
+}
+
+void SetFrameData(AudioFrame* frame, int16_t data) {
+  for (int i = 0; i < frame->samples_per_channel_; i++) {
+    frame->data_[i] = data;
+  }
+}
+
+void VerifyFramesAreEqual(const AudioFrame& frame1, const AudioFrame& frame2) {
+  EXPECT_EQ(frame1.num_channels_, frame2.num_channels_);
+  EXPECT_EQ(frame1.samples_per_channel_,
+            frame2.samples_per_channel_);
+
+  for (int i = 0; i < frame1.samples_per_channel_ * frame1.num_channels_;
+      i++) {
+    EXPECT_EQ(frame1.data_[i], frame2.data_[i]);
+  }
+}
+
+TEST_F(AudioFrameOperationsTest, MonoToStereoFailsWithBadParameters) {
+  EXPECT_EQ(-1, AudioFrameOperations::MonoToStereo(&frame_));
+
+  frame_.samples_per_channel_ = AudioFrame::kMaxDataSizeSamples;
+  frame_.num_channels_ = 1;
+  EXPECT_EQ(-1, AudioFrameOperations::MonoToStereo(&frame_));
+}
+
+TEST_F(AudioFrameOperationsTest, MonoToStereoSucceeds) {
+  frame_.num_channels_ = 1;
+  SetFrameData(&frame_, 1);
+  AudioFrame temp_frame = frame_;
+  EXPECT_EQ(0, AudioFrameOperations::MonoToStereo(&frame_));
+
+  AudioFrame stereo_frame;
+  stereo_frame.samples_per_channel_ = 320;
+  stereo_frame.num_channels_ = 2;
+  SetFrameData(&stereo_frame, 1, 1);
+  VerifyFramesAreEqual(stereo_frame, frame_);
+
+  SetFrameData(&frame_, 0);
+  AudioFrameOperations::MonoToStereo(temp_frame.data_,
+                                     frame_.samples_per_channel_,
+                                     frame_.data_);
+  frame_.num_channels_ = 2;  // Need to set manually.
+  VerifyFramesAreEqual(stereo_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, StereoToMonoFailsWithBadParameters) {
+  frame_.num_channels_ = 1;
+  EXPECT_EQ(-1, AudioFrameOperations::StereoToMono(&frame_));
+}
+
+TEST_F(AudioFrameOperationsTest, StereoToMonoSucceeds) {
+  SetFrameData(&frame_, 4, 2);
+  AudioFrame temp_frame = frame_;
+  EXPECT_EQ(0, AudioFrameOperations::StereoToMono(&frame_));
+
+  AudioFrame mono_frame;
+  mono_frame.samples_per_channel_ = 320;
+  mono_frame.num_channels_ = 1;
+  SetFrameData(&mono_frame, 3);
+  VerifyFramesAreEqual(mono_frame, frame_);
+
+  SetFrameData(&frame_, 0);
+  AudioFrameOperations::StereoToMono(temp_frame.data_,
+                                     frame_.samples_per_channel_,
+                                     frame_.data_);
+  frame_.num_channels_ = 1;  // Need to set manually.
+  VerifyFramesAreEqual(mono_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, StereoToMonoDoesNotWrapAround) {
+  SetFrameData(&frame_, -32768, -32768);
+  EXPECT_EQ(0, AudioFrameOperations::StereoToMono(&frame_));
+
+  AudioFrame mono_frame;
+  mono_frame.samples_per_channel_ = 320;
+  mono_frame.num_channels_ = 1;
+  SetFrameData(&mono_frame, -32768);
+  VerifyFramesAreEqual(mono_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, SwapStereoChannelsSucceedsOnStereo) {
+  SetFrameData(&frame_, 0, 1);
+
+  AudioFrame swapped_frame;
+  swapped_frame.samples_per_channel_ = 320;
+  swapped_frame.num_channels_ = 2;
+  SetFrameData(&swapped_frame, 1, 0);
+
+  AudioFrameOperations::SwapStereoChannels(&frame_);
+  VerifyFramesAreEqual(swapped_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, SwapStereoChannelsFailsOnMono) {
+  frame_.num_channels_ = 1;
+  // Set data to "stereo", despite it being a mono frame.
+  SetFrameData(&frame_, 0, 1);
+
+  AudioFrame orig_frame = frame_;
+  AudioFrameOperations::SwapStereoChannels(&frame_);
+  // Verify that no swap occurred.
+  VerifyFramesAreEqual(orig_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, MuteSucceeds) {
+  SetFrameData(&frame_, 1000, 1000);
+  frame_.energy_ = 1000 * 1000 * frame_.samples_per_channel_ *
+      frame_.num_channels_;
+  AudioFrameOperations::Mute(frame_);
+
+  AudioFrame muted_frame;
+  muted_frame.samples_per_channel_ = 320;
+  muted_frame.num_channels_ = 2;
+  SetFrameData(&muted_frame, 0, 0);
+  muted_frame.energy_ = 0;
+  VerifyFramesAreEqual(muted_frame, frame_);
+  EXPECT_EQ(muted_frame.energy_, frame_.energy_);
+}
+
+// TODO(andrew): should not allow negative scales.
+TEST_F(AudioFrameOperationsTest, DISABLED_ScaleFailsWithBadParameters) {
+  frame_.num_channels_ = 1;
+  EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, frame_));
+
+  frame_.num_channels_ = 3;
+  EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, 1.0, frame_));
+
+  frame_.num_channels_ = 2;
+  EXPECT_EQ(-1, AudioFrameOperations::Scale(-1.0, 1.0, frame_));
+  EXPECT_EQ(-1, AudioFrameOperations::Scale(1.0, -1.0, frame_));
+}
+
+// TODO(andrew): fix the wraparound bug. We should always saturate.
+TEST_F(AudioFrameOperationsTest, DISABLED_ScaleDoesNotWrapAround) {
+  SetFrameData(&frame_, 4000, -4000);
+  EXPECT_EQ(0, AudioFrameOperations::Scale(10.0, 10.0, frame_));
+
+  AudioFrame clipped_frame;
+  clipped_frame.samples_per_channel_ = 320;
+  clipped_frame.num_channels_ = 2;
+  SetFrameData(&clipped_frame, 32767, -32768);
+  VerifyFramesAreEqual(clipped_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, ScaleSucceeds) {
+  SetFrameData(&frame_, 1, -1);
+  EXPECT_EQ(0, AudioFrameOperations::Scale(2.0, 3.0, frame_));
+
+  AudioFrame scaled_frame;
+  scaled_frame.samples_per_channel_ = 320;
+  scaled_frame.num_channels_ = 2;
+  SetFrameData(&scaled_frame, 2, -3);
+  VerifyFramesAreEqual(scaled_frame, frame_);
+}
+
+// TODO(andrew): should fail with a negative scale.
+TEST_F(AudioFrameOperationsTest, DISABLED_ScaleWithSatFailsWithBadParameters) {
+  EXPECT_EQ(-1, AudioFrameOperations::ScaleWithSat(-1.0, frame_));
+}
+
+TEST_F(AudioFrameOperationsTest, ScaleWithSatDoesNotWrapAround) {
+  frame_.num_channels_ = 1;
+  SetFrameData(&frame_, 4000);
+  EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, frame_));
+
+  AudioFrame clipped_frame;
+  clipped_frame.samples_per_channel_ = 320;
+  clipped_frame.num_channels_ = 1;
+  SetFrameData(&clipped_frame, 32767);
+  VerifyFramesAreEqual(clipped_frame, frame_);
+
+  SetFrameData(&frame_, -4000);
+  EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(10.0, frame_));
+  SetFrameData(&clipped_frame, -32768);
+  VerifyFramesAreEqual(clipped_frame, frame_);
+}
+
+TEST_F(AudioFrameOperationsTest, ScaleWithSatSucceeds) {
+  frame_.num_channels_ = 1;
+  SetFrameData(&frame_, 1);
+  EXPECT_EQ(0, AudioFrameOperations::ScaleWithSat(2.0, frame_));
+
+  AudioFrame scaled_frame;
+  scaled_frame.samples_per_channel_ = 320;
+  scaled_frame.num_channels_ = 1;
+  SetFrameData(&scaled_frame, 2);
+  VerifyFramesAreEqual(scaled_frame, frame_);
+}
+
+}  // namespace
+}  // namespace webrtc
diff --git a/src/modules/utility/source/coder.cc b/src/modules/utility/source/coder.cc
new file mode 100644
index 0000000..31d528d
--- /dev/null
+++ b/src/modules/utility/source/coder.cc
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "coder.h"
+#include "common_types.h"
+#include "module_common_types.h"
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+AudioCoder::AudioCoder(WebRtc_UWord32 instanceID)
+    : _acm(AudioCodingModule::Create(instanceID)),
+      _receiveCodec(),
+      _encodeTimestamp(0),
+      _encodedData(NULL),
+      _encodedLengthInBytes(0),
+      _decodeTimestamp(0)
+{
+    _acm->InitializeSender();
+    _acm->InitializeReceiver();
+    _acm->RegisterTransportCallback(this);
+}
+
+AudioCoder::~AudioCoder()
+{
+    AudioCodingModule::Destroy(_acm);
+}
+
+WebRtc_Word32 AudioCoder::SetEncodeCodec(const CodecInst& codecInst,
+					 ACMAMRPackingFormat amrFormat)
+{
+    if(_acm->RegisterSendCodec((CodecInst&)codecInst) == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::SetDecodeCodec(const CodecInst& codecInst,
+					 ACMAMRPackingFormat amrFormat)
+{
+    if(_acm->RegisterReceiveCodec((CodecInst&)codecInst) == -1)
+    {
+        return -1;
+    }
+    memcpy(&_receiveCodec,&codecInst,sizeof(CodecInst));
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::Decode(AudioFrame& decodedAudio,
+				 WebRtc_UWord32 sampFreqHz,
+				 const WebRtc_Word8*  incomingPayload,
+				 WebRtc_Word32  payloadLength)
+{
+    if (payloadLength > 0)
+    {
+        const WebRtc_UWord8 payloadType = _receiveCodec.pltype;
+        _decodeTimestamp += _receiveCodec.pacsize;
+        if(_acm->IncomingPayload((const WebRtc_UWord8*) incomingPayload,
+                                 payloadLength,
+                                 payloadType,
+                                 _decodeTimestamp) == -1)
+        {
+            return -1;
+        }
+    }
+    return _acm->PlayoutData10Ms((WebRtc_UWord16)sampFreqHz,
+				 (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::PlayoutData(AudioFrame& decodedAudio,
+				      WebRtc_UWord16& sampFreqHz)
+{
+    return _acm->PlayoutData10Ms(sampFreqHz, (AudioFrame&)decodedAudio);
+}
+
+WebRtc_Word32 AudioCoder::Encode(const AudioFrame& audio,
+				 WebRtc_Word8* encodedData,
+				 WebRtc_UWord32& encodedLengthInBytes)
+{
+    // Fake a timestamp in case audio doesn't contain a correct timestamp.
+    // Make a local copy of the audio frame since audio is const
+    AudioFrame audioFrame = audio;
+    audioFrame.timestamp_ = _encodeTimestamp;
+    _encodeTimestamp += audioFrame.samples_per_channel_;
+
+    // For any codec with a frame size that is longer than 10 ms the encoded
+    // length in bytes should be zero until a a full frame has been encoded.
+    _encodedLengthInBytes = 0;
+    if(_acm->Add10MsData((AudioFrame&)audioFrame) == -1)
+    {
+        return -1;
+    }
+    _encodedData = encodedData;
+    if(_acm->Process() == -1)
+    {
+        return -1;
+    }
+    encodedLengthInBytes = _encodedLengthInBytes;
+    return 0;
+}
+
+WebRtc_Word32 AudioCoder::SendData(
+    FrameType /* frameType */,
+    WebRtc_UWord8   /* payloadType */,
+    WebRtc_UWord32  /* timeStamp */,
+    const WebRtc_UWord8*  payloadData,
+    WebRtc_UWord16  payloadSize,
+    const RTPFragmentationHeader* /* fragmentation*/)
+{
+    memcpy(_encodedData,payloadData,sizeof(WebRtc_UWord8) * payloadSize);
+    _encodedLengthInBytes = payloadSize;
+    return 0;
+}
+} // namespace webrtc
diff --git a/src/modules/utility/source/coder.h b/src/modules/utility/source/coder.h
new file mode 100644
index 0000000..e7cbfb8
--- /dev/null
+++ b/src/modules/utility/source/coder.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
+
+#include "audio_coding_module.h"
+#include "common_types.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class AudioFrame;
+
+class AudioCoder : public AudioPacketizationCallback
+{
+public:
+    AudioCoder(WebRtc_UWord32 instanceID);
+    ~AudioCoder();
+
+    WebRtc_Word32 SetEncodeCodec(
+        const CodecInst& codecInst,
+	ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+    WebRtc_Word32 SetDecodeCodec(
+        const CodecInst& codecInst,
+	ACMAMRPackingFormat amrFormat = AMRBandwidthEfficient);
+
+    WebRtc_Word32 Decode(AudioFrame& decodedAudio, WebRtc_UWord32 sampFreqHz,
+			 const WebRtc_Word8* incomingPayload,
+			 WebRtc_Word32  payloadLength);
+
+    WebRtc_Word32 PlayoutData(AudioFrame& decodedAudio,
+			      WebRtc_UWord16& sampFreqHz);
+
+    WebRtc_Word32 Encode(const AudioFrame& audio,
+                         WebRtc_Word8*   encodedData,
+			 WebRtc_UWord32& encodedLengthInBytes);
+
+protected:
+    virtual WebRtc_Word32 SendData(FrameType frameType,
+				   WebRtc_UWord8 payloadType,
+				   WebRtc_UWord32 timeStamp,
+				   const WebRtc_UWord8* payloadData,
+				   WebRtc_UWord16 payloadSize,
+				   const RTPFragmentationHeader* fragmentation);
+
+private:
+    AudioCodingModule* _acm;
+
+    CodecInst _receiveCodec;
+
+    WebRtc_UWord32 _encodeTimestamp;
+    WebRtc_Word8*  _encodedData;
+    WebRtc_UWord32 _encodedLengthInBytes;
+
+    WebRtc_UWord32 _decodeTimestamp;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_CODER_H_
diff --git a/src/modules/utility/source/file_player_impl.cc b/src/modules/utility/source/file_player_impl.cc
new file mode 100644
index 0000000..574fd7e
--- /dev/null
+++ b/src/modules/utility/source/file_player_impl.cc
@@ -0,0 +1,722 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "file_player_impl.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "cpu_wrapper.h"
+    #include "frame_scaler.h"
+    #include "tick_util.h"
+    #include "video_coder.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FilePlayer* FilePlayer::CreateFilePlayer(WebRtc_UWord32 instanceID,
+                                         FileFormats fileFormat)
+{
+    switch(fileFormat)
+    {
+    case kFileFormatWavFile:
+    case kFileFormatCompressedFile:
+    case kFileFormatPreencodedFile:
+    case kFileFormatPcm16kHzFile:
+    case kFileFormatPcm8kHzFile:
+    case kFileFormatPcm32kHzFile:
+        // audio formats
+        return new FilePlayerImpl(instanceID, fileFormat);
+    case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        return new VideoFilePlayerImpl(instanceID, fileFormat);
+#else
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                     "Invalid file format: %d", kFileFormatAviFile);
+        assert(false);
+        return NULL;
+#endif
+    }
+    assert(false);
+    return NULL;
+}
+
+void FilePlayer::DestroyFilePlayer(FilePlayer* player)
+{
+    delete player;
+}
+
+FilePlayerImpl::FilePlayerImpl(const WebRtc_UWord32 instanceID,
+                               const FileFormats fileFormat)
+    : _instanceID(instanceID),
+      _fileFormat(fileFormat),
+      _fileModule(*MediaFile::CreateMediaFile(instanceID)),
+      _decodedLengthInMS(0),
+      _audioDecoder(instanceID),
+      _codec(),
+      _numberOf10MsPerFrame(0),
+      _numberOf10MsInDecoder(0),
+      _resampler(),
+      _scaling(1.0)
+{
+    _codec.plfreq = 0;
+}
+
+FilePlayerImpl::~FilePlayerImpl()
+{
+    MediaFile::DestroyMediaFile(&_fileModule);
+}
+
+WebRtc_Word32 FilePlayerImpl::Frequency() const
+{
+    if(_codec.plfreq == 0)
+    {
+        return -1;
+    }
+    // Make sure that sample rate is 8,16 or 32 kHz. E.g. WAVE files may have
+    // other sampling rates.
+    if(_codec.plfreq == 11000)
+    {
+        return 16000;
+    }
+    else if(_codec.plfreq == 22000)
+    {
+        return 32000;
+    }
+    else if(_codec.plfreq == 44000)
+    {
+        return 32000;
+    }
+    else if(_codec.plfreq == 48000)
+    {
+        return 32000;
+    }
+    else
+    {
+        return _codec.plfreq;
+    }
+}
+
+WebRtc_Word32 FilePlayerImpl::AudioCodec(CodecInst& audioCodec) const
+{
+    audioCodec = _codec;
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::Get10msAudioFromFile(
+    int16_t* outBuffer,
+    int& lengthInSamples,
+    int frequencyInHz)
+{
+    if(_codec.plfreq == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+           "FilePlayerImpl::Get10msAudioFromFile() playing not started!\
+ codecFreq = %d, wantedFreq = %d",
+           _codec.plfreq, frequencyInHz);
+        return -1;
+    }
+
+    AudioFrame unresampledAudioFrame;
+    if(STR_CASE_CMP(_codec.plname, "L16") == 0)
+    {
+        unresampledAudioFrame.sample_rate_hz_ = _codec.plfreq;
+
+        // L16 is un-encoded data. Just pull 10 ms.
+        WebRtc_UWord32 lengthInBytes =
+            sizeof(unresampledAudioFrame.data_);
+        if (_fileModule.PlayoutAudioData(
+                (WebRtc_Word8*)unresampledAudioFrame.data_,
+                lengthInBytes) == -1)
+        {
+            // End of file reached.
+            return -1;
+        }
+        if(lengthInBytes == 0)
+        {
+            lengthInSamples = 0;
+            return 0;
+        }
+        // One sample is two bytes.
+        unresampledAudioFrame.samples_per_channel_ =
+            (WebRtc_UWord16)lengthInBytes >> 1;
+
+    }else {
+        // Decode will generate 10 ms of audio data. PlayoutAudioData(..)
+        // expects a full frame. If the frame size is larger than 10 ms,
+        // PlayoutAudioData(..) data should be called proportionally less often.
+        WebRtc_Word16 encodedBuffer[MAX_AUDIO_BUFFER_IN_SAMPLES];
+        WebRtc_UWord32 encodedLengthInBytes = 0;
+        if(++_numberOf10MsInDecoder >= _numberOf10MsPerFrame)
+        {
+            _numberOf10MsInDecoder = 0;
+            WebRtc_UWord32 bytesFromFile = sizeof(encodedBuffer);
+            if (_fileModule.PlayoutAudioData((WebRtc_Word8*)encodedBuffer,
+                                             bytesFromFile) == -1)
+            {
+                // End of file reached.
+                return -1;
+            }
+            encodedLengthInBytes = bytesFromFile;
+        }
+        if(_audioDecoder.Decode(unresampledAudioFrame,frequencyInHz,
+                                (WebRtc_Word8*)encodedBuffer,
+                                encodedLengthInBytes) == -1)
+        {
+            return -1;
+        }
+    }
+
+    int outLen = 0;
+    if(_resampler.ResetIfNeeded(unresampledAudioFrame.sample_rate_hz_,
+                                frequencyInHz, kResamplerSynchronous))
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+           "FilePlayerImpl::Get10msAudioFromFile() unexpected codec");
+
+        // New sampling frequency. Update state.
+        outLen = frequencyInHz / 100;
+        memset(outBuffer, 0, outLen * sizeof(WebRtc_Word16));
+        return 0;
+    }
+    _resampler.Push(unresampledAudioFrame.data_,
+                    unresampledAudioFrame.samples_per_channel_,
+                    outBuffer,
+                    MAX_AUDIO_BUFFER_IN_SAMPLES,
+                    outLen);
+
+    lengthInSamples = outLen;
+
+    if(_scaling != 1.0)
+    {
+        for (int i = 0;i < outLen; i++)
+        {
+            outBuffer[i] = (WebRtc_Word16)(outBuffer[i] * _scaling);
+        }
+    }
+    _decodedLengthInMS += 10;
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::RegisterModuleFileCallback(FileCallback* callback)
+{
+    return _fileModule.SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetAudioScaling(float scaleFactor)
+{
+    if((scaleFactor >= 0)&&(scaleFactor <= 2.0))
+    {
+        _scaling = scaleFactor;
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceWarning, kTraceVoice, _instanceID,
+              "FilePlayerImpl::SetAudioScaling() not allowed scale factor");
+    return -1;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(const char* fileName,
+                                               bool loop,
+                                               WebRtc_UWord32 startPosition,
+                                               float volumeScaling,
+                                               WebRtc_UWord32 notification,
+                                               WebRtc_UWord32 stopPosition,
+                                               const CodecInst* codecInst)
+{
+    if (_fileFormat == kFileFormatPcm16kHzFile ||
+        _fileFormat == kFileFormatPcm8kHzFile||
+        _fileFormat == kFileFormatPcm32kHzFile )
+    {
+        CodecInst codecInstL16;
+        strncpy(codecInstL16.plname,"L16",32);
+        codecInstL16.pltype   = 93;
+        codecInstL16.channels = 1;
+
+        if (_fileFormat == kFileFormatPcm8kHzFile)
+        {
+            codecInstL16.rate     = 128000;
+            codecInstL16.plfreq   = 8000;
+            codecInstL16.pacsize  = 80;
+
+        } else if(_fileFormat == kFileFormatPcm16kHzFile)
+        {
+            codecInstL16.rate     = 256000;
+            codecInstL16.plfreq   = 16000;
+            codecInstL16.pacsize  = 160;
+
+        }else if(_fileFormat == kFileFormatPcm32kHzFile)
+        {
+            codecInstL16.rate     = 512000;
+            codecInstL16.plfreq   = 32000;
+            codecInstL16.pacsize  = 160;
+        } else
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+                       "FilePlayerImpl::StartPlayingFile() sample frequency\
+ specifed not supported for PCM format.");
+            return -1;
+        }
+
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, &codecInstL16,
+                                              startPosition,
+                                              stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+            return -1;
+        }
+        SetAudioScaling(volumeScaling);
+    }else if(_fileFormat == kFileFormatPreencodedFile)
+    {
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, codecInst) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingPreEncodedFile() failed to\
+ initialize pre-encoded file %s playout.",
+                fileName);
+            return -1;
+        }
+    } else
+    {
+        CodecInst* no_inst = NULL;
+        if (_fileModule.StartPlayingAudioFile(fileName, notification, loop,
+                                              _fileFormat, no_inst,
+                                              startPosition,
+                                              stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize file\
+ %s playout.", fileName);
+            return -1;
+        }
+        SetAudioScaling(volumeScaling);
+    }
+    if (SetUpAudioDecoder() == -1)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StartPlayingFile(InStream& sourceStream,
+                                               WebRtc_UWord32 startPosition,
+                                               float volumeScaling,
+                                               WebRtc_UWord32 notification,
+                                               WebRtc_UWord32 stopPosition,
+                                               const CodecInst* codecInst)
+{
+    if (_fileFormat == kFileFormatPcm16kHzFile ||
+        _fileFormat == kFileFormatPcm32kHzFile ||
+        _fileFormat == kFileFormatPcm8kHzFile)
+    {
+        CodecInst codecInstL16;
+        strncpy(codecInstL16.plname,"L16",32);
+        codecInstL16.pltype   = 93;
+        codecInstL16.channels = 1;
+
+        if (_fileFormat == kFileFormatPcm8kHzFile)
+        {
+            codecInstL16.rate     = 128000;
+            codecInstL16.plfreq   = 8000;
+            codecInstL16.pacsize  = 80;
+
+        }else if (_fileFormat == kFileFormatPcm16kHzFile)
+        {
+            codecInstL16.rate     = 256000;
+            codecInstL16.plfreq   = 16000;
+            codecInstL16.pacsize  = 160;
+
+        }else if (_fileFormat == kFileFormatPcm32kHzFile)
+        {
+            codecInstL16.rate     = 512000;
+            codecInstL16.plfreq   = 32000;
+            codecInstL16.pacsize  = 160;
+        }else
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() sample frequency specifed\
+ not supported for PCM format.");
+            return -1;
+        }
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, &codecInstL16,
+                                                startPosition,
+                                                stopPosition) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+            return -1;
+        }
+
+    }else if(_fileFormat == kFileFormatPreencodedFile)
+    {
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, codecInst) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FilePlayerImpl::StartPlayingFile() failed to initialize stream\
+ playout.");
+            return -1;
+        }
+    } else {
+        CodecInst* no_inst = NULL;
+        if (_fileModule.StartPlayingAudioStream(sourceStream, notification,
+                                                _fileFormat, no_inst,
+                                                startPosition,
+                                                stopPosition) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice, _instanceID,
+                       "FilePlayerImpl::StartPlayingFile() failed to initialize\
+ stream playout.");
+            return -1;
+        }
+    }
+    SetAudioScaling(volumeScaling);
+
+    if (SetUpAudioDecoder() == -1)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 FilePlayerImpl::StopPlayingFile()
+{
+    memset(&_codec, 0, sizeof(CodecInst));
+    _numberOf10MsPerFrame  = 0;
+    _numberOf10MsInDecoder = 0;
+    return _fileModule.StopPlaying();
+}
+
+bool FilePlayerImpl::IsPlayingFile() const
+{
+    return _fileModule.IsPlaying();
+}
+
+WebRtc_Word32 FilePlayerImpl::GetPlayoutPosition(WebRtc_UWord32& durationMs)
+{
+    return _fileModule.PlayoutPositionMs(durationMs);
+}
+
+WebRtc_Word32 FilePlayerImpl::SetUpAudioDecoder()
+{
+    if ((_fileModule.codec_info(_codec) == -1))
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FilePlayerImpl::StartPlayingFile() failed to retrieve Codec info\
+ of file data.");
+        return -1;
+    }
+    if( STR_CASE_CMP(_codec.plname, "L16") != 0 &&
+        _audioDecoder.SetDecodeCodec(_codec,AMRFileStorage) == -1)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FilePlayerImpl::StartPlayingFile() codec %s not supported",
+            _codec.plname);
+        return -1;
+    }
+    _numberOf10MsPerFrame = _codec.pacsize / (_codec.plfreq / 100);
+    _numberOf10MsInDecoder = 0;
+    return 0;
+}
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+VideoFilePlayerImpl::VideoFilePlayerImpl(WebRtc_UWord32 instanceID,
+                                         FileFormats fileFormat)
+    : FilePlayerImpl(instanceID,fileFormat),
+      _videoDecoder(*new VideoCoder(instanceID)),
+      video_codec_info_(),
+      _decodedVideoFrames(0),
+      _encodedData(*new EncodedVideoData()),
+      _frameScaler(*new FrameScaler()),
+      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
+      _startTime(),
+      _accumulatedRenderTimeMs(0),
+      _frameLengthMS(0),
+      _numberOfFramesRead(0),
+      _videoOnly(false)
+{
+    memset(&video_codec_info_, 0, sizeof(video_codec_info_));
+}
+
+VideoFilePlayerImpl::~VideoFilePlayerImpl()
+{
+    delete _critSec;
+    delete &_frameScaler;
+    delete &_videoDecoder;
+    delete &_encodedData;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StartPlayingVideoFile(
+    const char* fileName,
+    bool loop,
+    bool videoOnly)
+{
+    CriticalSectionScoped lock( _critSec);
+
+    if(_fileModule.StartPlayingVideoFile(fileName, loop, videoOnly,
+                                         _fileFormat) != 0)
+    {
+        return -1;
+    }
+
+    _decodedVideoFrames = 0;
+    _accumulatedRenderTimeMs = 0;
+    _frameLengthMS = 0;
+    _numberOfFramesRead = 0;
+    _videoOnly = videoOnly;
+
+    // Set up video_codec_info_ according to file,
+    if(SetUpVideoDecoder() != 0)
+    {
+        StopPlayingFile();
+        return -1;
+    }
+    if(!videoOnly)
+    {
+        // Set up _codec according to file,
+        if(SetUpAudioDecoder() != 0)
+        {
+            StopPlayingFile();
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::StopPlayingFile()
+{
+    CriticalSectionScoped lock( _critSec);
+
+    _decodedVideoFrames = 0;
+    _videoDecoder.ResetDecoder();
+
+    return FilePlayerImpl::StopPlayingFile();
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame,
+                                                    WebRtc_UWord32 outWidth,
+                                                    WebRtc_UWord32 outHeight)
+{
+    CriticalSectionScoped lock( _critSec);
+
+    WebRtc_Word32 retVal = GetVideoFromFile(videoFrame);
+    if(retVal != 0)
+    {
+        return retVal;
+    }
+    if( videoFrame.Length() > 0)
+    {
+        retVal = _frameScaler.ResizeFrameIfNeeded(&videoFrame, outWidth,
+                                                  outHeight);
+    }
+    return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::GetVideoFromFile(VideoFrame& videoFrame)
+{
+    CriticalSectionScoped lock( _critSec);
+    // No new video data read from file.
+    if(_encodedData.payloadSize == 0)
+    {
+        videoFrame.SetLength(0);
+        return -1;
+    }
+    WebRtc_Word32 retVal = 0;
+    if(strncmp(video_codec_info_.plName, "I420", 5) == 0)
+    {
+        videoFrame.CopyFrame(_encodedData.payloadSize,_encodedData.payloadData);
+        videoFrame.SetLength(_encodedData.payloadSize);
+        videoFrame.SetWidth(video_codec_info_.width);
+        videoFrame.SetHeight(video_codec_info_.height);
+    }else
+    {
+        // Set the timestamp manually since there is no timestamp in the file.
+        // Update timestam according to 90 kHz stream.
+        _encodedData.timeStamp += (90000 / video_codec_info_.maxFramerate);
+        retVal = _videoDecoder.Decode(videoFrame, _encodedData);
+    }
+
+    WebRtc_Word64 renderTimeMs = TickTime::MillisecondTimestamp();
+    videoFrame.SetRenderTime(renderTimeMs);
+
+     // Indicate that the current frame in the encoded buffer is old/has
+     // already been read.
+    _encodedData.payloadSize = 0;
+    if( retVal == 0)
+    {
+        _decodedVideoFrames++;
+    }
+    return retVal;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::video_codec_info(
+    VideoCodec& videoCodec) const
+{
+    if(video_codec_info_.plName[0] == 0)
+    {
+        return -1;
+    }
+    memcpy(&videoCodec, &video_codec_info_, sizeof(VideoCodec));
+    return 0;
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::TimeUntilNextVideoFrame()
+{
+    if(_fileFormat != kFileFormatAviFile)
+    {
+        return -1;
+    }
+    if(!_fileModule.IsPlaying())
+    {
+        return -1;
+    }
+    if(_encodedData.payloadSize <= 0)
+    {
+        // Read next frame from file.
+        CriticalSectionScoped lock( _critSec);
+
+        if(_fileFormat == kFileFormatAviFile)
+        {
+            // Get next video frame
+            WebRtc_UWord32 encodedBufferLengthInBytes = _encodedData.bufferSize;
+            if(_fileModule.PlayoutAVIVideoData(
+                   reinterpret_cast< WebRtc_Word8*>(_encodedData.payloadData),
+                   encodedBufferLengthInBytes) != 0)
+            {
+                 WEBRTC_TRACE(
+                     kTraceWarning,
+                     kTraceVideo,
+                     _instanceID,
+                     "FilePlayerImpl::TimeUntilNextVideoFrame() error reading\
+ video data");
+                return -1;
+            }
+            _encodedData.payloadSize = encodedBufferLengthInBytes;
+            _encodedData.codec = video_codec_info_.codecType;
+            _numberOfFramesRead++;
+
+            if(_accumulatedRenderTimeMs == 0)
+            {
+                _startTime = TickTime::Now();
+                // This if-statement should only trigger once.
+                _accumulatedRenderTimeMs = 1;
+            } else {
+                // A full seconds worth of frames have been read.
+                if(_numberOfFramesRead % video_codec_info_.maxFramerate == 0)
+                {
+                    // Frame rate is in frames per seconds. Frame length is
+                    // calculated as an integer division which means it may
+                    // be rounded down. Compensate for this every second.
+                    WebRtc_UWord32 rest = 1000%_frameLengthMS;
+                    _accumulatedRenderTimeMs += rest;
+                }
+                _accumulatedRenderTimeMs += _frameLengthMS;
+            }
+        }
+    }
+
+    WebRtc_Word64 timeToNextFrame;
+    if(_videoOnly)
+    {
+        timeToNextFrame = _accumulatedRenderTimeMs -
+            (TickTime::Now() - _startTime).Milliseconds();
+
+    } else {
+        // Synchronize with the audio stream instead of system clock.
+        timeToNextFrame = _accumulatedRenderTimeMs - _decodedLengthInMS;
+    }
+    if(timeToNextFrame < 0)
+    {
+        return 0;
+
+    } else if(timeToNextFrame > 0x0fffffff)
+    {
+        // Wraparound or audio stream has gone to far ahead of the video stream.
+        return -1;
+    }
+    return static_cast<WebRtc_Word32>(timeToNextFrame);
+}
+
+WebRtc_Word32 VideoFilePlayerImpl::SetUpVideoDecoder()
+{
+    if (_fileModule.VideoCodecInst(video_codec_info_) != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVideo,
+            _instanceID,
+            "FilePlayerImpl::SetVideoDecoder() failed to retrieve Codec info of\
+ file data.");
+        return -1;
+    }
+
+    WebRtc_Word32 useNumberOfCores = 1;
+    if(_videoDecoder.SetDecodeCodec(video_codec_info_, useNumberOfCores) != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVideo,
+            _instanceID,
+            "FilePlayerImpl::SetUpVideoDecoder() codec %s not supported",
+            video_codec_info_.plName);
+        return -1;
+    }
+
+    _frameLengthMS = 1000/video_codec_info_.maxFramerate;
+
+    // Size of unencoded data (I420) should be the largest possible frame size
+    // in a file.
+    const WebRtc_UWord32 KReadBufferSize = 3 * video_codec_info_.width *
+        video_codec_info_.height / 2;
+    _encodedData.VerifyAndAllocate(KReadBufferSize);
+    _encodedData.encodedHeight = video_codec_info_.height;
+    _encodedData.encodedWidth = video_codec_info_.width;
+    _encodedData.payloadType = video_codec_info_.plType;
+    _encodedData.timeStamp = 0;
+    return 0;
+}
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/src/modules/utility/source/file_player_impl.h b/src/modules/utility/source/file_player_impl.h
new file mode 100644
index 0000000..9ae3838
--- /dev/null
+++ b/src/modules/utility/source/file_player_impl.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "critical_section_wrapper.h"
+#include "engine_configurations.h"
+#include "file_player.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "resampler.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoCoder;
+class FrameScaler;
+
+class FilePlayerImpl : public FilePlayer
+{
+public:
+    FilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    ~FilePlayerImpl();
+
+    virtual int Get10msAudioFromFile(
+        int16_t* outBuffer,
+        int& lengthInSamples,
+        int frequencyInHz);
+    virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+    virtual WebRtc_Word32 StartPlayingFile(
+        const char* fileName,
+        bool loop,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL);
+    virtual WebRtc_Word32 StartPlayingFile(
+        InStream& sourceStream,
+        WebRtc_UWord32 startPosition,
+        float volumeScaling,
+        WebRtc_UWord32 notification,
+        WebRtc_UWord32 stopPosition = 0,
+        const CodecInst* codecInst = NULL);
+    virtual WebRtc_Word32 StopPlayingFile();
+    virtual bool IsPlayingFile() const;
+    virtual WebRtc_Word32 GetPlayoutPosition(WebRtc_UWord32& durationMs);
+    virtual WebRtc_Word32 AudioCodec(CodecInst& audioCodec) const;
+    virtual WebRtc_Word32 Frequency() const;
+    virtual WebRtc_Word32 SetAudioScaling(float scaleFactor);
+
+protected:
+    WebRtc_Word32 SetUpAudioDecoder();
+
+    WebRtc_UWord32 _instanceID;
+    const FileFormats _fileFormat;
+    MediaFile& _fileModule;
+
+    WebRtc_UWord32 _decodedLengthInMS;
+
+private:
+    AudioCoder _audioDecoder;
+
+    CodecInst _codec;
+    WebRtc_Word32 _numberOf10MsPerFrame;
+    WebRtc_Word32 _numberOf10MsInDecoder;
+
+    Resampler _resampler;
+    float _scaling;
+};
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class VideoFilePlayerImpl: public FilePlayerImpl
+{
+public:
+    VideoFilePlayerImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    ~VideoFilePlayerImpl();
+
+    // FilePlayer functions.
+    virtual WebRtc_Word32 TimeUntilNextVideoFrame();
+    virtual WebRtc_Word32 StartPlayingVideoFile(const char* fileName,
+                                                bool loop,
+                                                bool videoOnly);
+    virtual WebRtc_Word32 StopPlayingFile();
+    virtual WebRtc_Word32 video_codec_info(VideoCodec& videoCodec) const;
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame);
+    virtual WebRtc_Word32 GetVideoFromFile(VideoFrame& videoFrame,
+                                           const WebRtc_UWord32 outWidth,
+                                           const WebRtc_UWord32 outHeight);
+
+private:
+    WebRtc_Word32 SetUpVideoDecoder();
+
+    VideoCoder& _videoDecoder;
+    VideoCodec video_codec_info_;
+    WebRtc_Word32 _decodedVideoFrames;
+
+    EncodedVideoData& _encodedData;
+
+    FrameScaler& _frameScaler;
+    CriticalSectionWrapper* _critSec;
+    TickTime _startTime;
+    WebRtc_Word64 _accumulatedRenderTimeMs;
+    WebRtc_UWord32 _frameLengthMS;
+
+    WebRtc_Word32 _numberOfFramesRead;
+    bool _videoOnly;
+};
+#endif //WEBRTC_MODULE_UTILITY_VIDEO
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_PLAYER_IMPL_H_
diff --git a/src/modules/utility/source/file_player_unittest.cc b/src/modules/utility/source/file_player_unittest.cc
new file mode 100644
index 0000000..2e76905
--- /dev/null
+++ b/src/modules/utility/source/file_player_unittest.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Empty test just to get code coverage metrics for this dir.
+ */
+#include "file_player.h"
+#include "gtest/gtest.h"
+
+TEST(FilePlayerTest, EmptyTestToGetCodeCoverage) {}
diff --git a/src/modules/utility/source/file_recorder_impl.cc b/src/modules/utility/source/file_recorder_impl.cc
new file mode 100644
index 0000000..b7ec084
--- /dev/null
+++ b/src/modules/utility/source/file_recorder_impl.cc
@@ -0,0 +1,793 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#include "file_recorder_impl.h"
+#include "media_file.h"
+#include "trace.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "cpu_wrapper.h"
+    #include "critical_section_wrapper.h"
+    #include "frame_scaler.h"
+    #include "video_coder.h"
+    #include "video_frames_queue.h"
+#endif
+
+// OS independent case insensitive string comparison.
+#ifdef WIN32
+    #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+#else
+    #define STR_CASE_CMP(x,y) ::strcasecmp(x,y)
+#endif
+
+namespace webrtc {
+FileRecorder* FileRecorder::CreateFileRecorder(WebRtc_UWord32 instanceID,
+                                               FileFormats fileFormat)
+{
+    switch(fileFormat)
+    {
+    case kFileFormatWavFile:
+    case kFileFormatCompressedFile:
+    case kFileFormatPreencodedFile:
+    case kFileFormatPcm16kHzFile:
+    case kFileFormatPcm8kHzFile:
+    case kFileFormatPcm32kHzFile:
+        return new FileRecorderImpl(instanceID, fileFormat);
+    case kFileFormatAviFile:
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+        return new AviRecorder(instanceID, fileFormat);
+#else
+        WEBRTC_TRACE(kTraceError, kTraceFile, -1,
+                             "Invalid file format: %d", kFileFormatAviFile);
+        assert(false);
+        return NULL;
+#endif
+    }
+    assert(false);
+    return NULL;
+}
+
+void FileRecorder::DestroyFileRecorder(FileRecorder* recorder)
+{
+    delete recorder;
+}
+
+FileRecorderImpl::FileRecorderImpl(WebRtc_UWord32 instanceID,
+                                   FileFormats fileFormat)
+    : _instanceID(instanceID),
+      _fileFormat(fileFormat),
+      _moduleFile(MediaFile::CreateMediaFile(_instanceID)),
+      codec_info_(),
+      _amrFormat(AMRFileStorage),
+      _audioBuffer(),
+      _audioEncoder(instanceID),
+      _audioResampler()
+{
+}
+
+FileRecorderImpl::~FileRecorderImpl()
+{
+    MediaFile::DestroyMediaFile(_moduleFile);
+}
+
+FileFormats FileRecorderImpl::RecordingFileFormat() const
+{
+    return _fileFormat;
+}
+
+WebRtc_Word32 FileRecorderImpl::RegisterModuleFileCallback(
+    FileCallback* callback)
+{
+    if(_moduleFile == NULL)
+    {
+        return -1;
+    }
+    return _moduleFile->SetModuleFileCallback(callback);
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+    const char* fileName,
+    const CodecInst& codecInst,
+    WebRtc_UWord32 notificationTimeMs,
+    ACMAMRPackingFormat amrFormat)
+{
+    if(_moduleFile == NULL)
+    {
+        return -1;
+    }
+    codec_info_ = codecInst;
+    _amrFormat = amrFormat;
+
+    WebRtc_Word32 retVal = 0;
+    if(_fileFormat != kFileFormatAviFile)
+    {
+        // AVI files should be started using StartRecordingVideoFile(..) all
+        // other formats should use this API.
+        retVal =_moduleFile->StartRecordingAudioFile(fileName, _fileFormat,
+                                                     codecInst,
+                                                     notificationTimeMs);
+    }
+
+    if( retVal == 0)
+    {
+        retVal = SetUpAudioEncoder();
+    }
+    if( retVal != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::StartRecording() failed to initialize file %s for\
+ recording.",
+            fileName);
+
+        if(IsRecording())
+        {
+            StopRecording();
+        }
+    }
+    return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StartRecordingAudioFile(
+    OutStream& destStream,
+    const CodecInst& codecInst,
+    WebRtc_UWord32 notificationTimeMs,
+    ACMAMRPackingFormat amrFormat)
+{
+    codec_info_ = codecInst;
+    _amrFormat = amrFormat;
+
+    WebRtc_Word32 retVal = _moduleFile->StartRecordingAudioStream(
+        destStream,
+        _fileFormat,
+        codecInst,
+        notificationTimeMs);
+
+    if( retVal == 0)
+    {
+        retVal = SetUpAudioEncoder();
+    }
+    if( retVal != 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::StartRecording() failed to initialize outStream for\
+ recording.");
+
+        if(IsRecording())
+        {
+            StopRecording();
+        }
+    }
+    return retVal;
+}
+
+WebRtc_Word32 FileRecorderImpl::StopRecording()
+{
+    memset(&codec_info_, 0, sizeof(CodecInst));
+    return _moduleFile->StopRecording();
+}
+
+bool FileRecorderImpl::IsRecording() const
+{
+    return _moduleFile->IsRecording();
+}
+
+WebRtc_Word32 FileRecorderImpl::RecordAudioToFile(
+    const AudioFrame& incomingAudioFrame,
+    const TickTime* playoutTS)
+{
+    if (codec_info_.plfreq == 0)
+    {
+        WEBRTC_TRACE(
+            kTraceWarning,
+            kTraceVoice,
+            _instanceID,
+            "FileRecorder::RecordAudioToFile() recording audio is not turned\
+ on");
+        return -1;
+    }
+    AudioFrame tempAudioFrame;
+    tempAudioFrame.samples_per_channel_ = 0;
+    if( incomingAudioFrame.num_channels_ == 2 &&
+        !_moduleFile->IsStereo())
+    {
+        // Recording mono but incoming audio is (interleaved) stereo.
+        tempAudioFrame.num_channels_ = 1;
+        tempAudioFrame.sample_rate_hz_ = incomingAudioFrame.sample_rate_hz_;
+        tempAudioFrame.samples_per_channel_ =
+          incomingAudioFrame.samples_per_channel_;
+        for (WebRtc_UWord16 i = 0;
+             i < (incomingAudioFrame.samples_per_channel_); i++)
+        {
+            // Sample value is the average of left and right buffer rounded to
+            // closest integer value. Note samples can be either 1 or 2 byte.
+             tempAudioFrame.data_[i] =
+                 ((incomingAudioFrame.data_[2 * i] +
+                   incomingAudioFrame.data_[(2 * i) + 1] + 1) >> 1);
+        }
+    }
+    else if( incomingAudioFrame.num_channels_ == 1 &&
+        _moduleFile->IsStereo())
+    {
+        // Recording stereo but incoming audio is mono.
+        tempAudioFrame.num_channels_ = 2;
+        tempAudioFrame.sample_rate_hz_ = incomingAudioFrame.sample_rate_hz_;
+        tempAudioFrame.samples_per_channel_ =
+          incomingAudioFrame.samples_per_channel_;
+        for (WebRtc_UWord16 i = 0;
+             i < (incomingAudioFrame.samples_per_channel_); i++)
+        {
+            // Duplicate sample to both channels
+             tempAudioFrame.data_[2*i] =
+               incomingAudioFrame.data_[i];
+             tempAudioFrame.data_[2*i+1] =
+               incomingAudioFrame.data_[i];
+        }
+    }
+
+    const AudioFrame* ptrAudioFrame = &incomingAudioFrame;
+    if(tempAudioFrame.samples_per_channel_ != 0)
+    {
+        // If ptrAudioFrame is not empty it contains the audio to be recorded.
+        ptrAudioFrame = &tempAudioFrame;
+    }
+
+    // Encode the audio data before writing to file. Don't encode if the codec
+    // is PCM.
+    // NOTE: stereo recording is only supported for WAV files.
+    // TODO (hellner): WAV expect PCM in little endian byte order. Not
+    // "encoding" with PCM coder should be a problem for big endian systems.
+    WebRtc_UWord32 encodedLenInBytes = 0;
+    if (_fileFormat == kFileFormatPreencodedFile ||
+        STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+    {
+        if (_audioEncoder.Encode(*ptrAudioFrame, _audioBuffer,
+                                 encodedLenInBytes) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceWarning,
+                kTraceVoice,
+                _instanceID,
+                "FileRecorder::RecordAudioToFile() codec %s not supported or\
+ failed to encode stream",
+                codec_info_.plname);
+            return -1;
+        }
+    } else {
+        int outLen = 0;
+        if(ptrAudioFrame->num_channels_ == 2)
+        {
+            // ptrAudioFrame contains interleaved stereo audio.
+            _audioResampler.ResetIfNeeded(ptrAudioFrame->sample_rate_hz_,
+                                          codec_info_.plfreq,
+                                          kResamplerSynchronousStereo);
+            _audioResampler.Push(ptrAudioFrame->data_,
+                                 ptrAudioFrame->samples_per_channel_ *
+                                 ptrAudioFrame->num_channels_,
+                                 (WebRtc_Word16*)_audioBuffer,
+                                 MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+        } else {
+            _audioResampler.ResetIfNeeded(ptrAudioFrame->sample_rate_hz_,
+                                          codec_info_.plfreq,
+                                          kResamplerSynchronous);
+            _audioResampler.Push(ptrAudioFrame->data_,
+                                 ptrAudioFrame->samples_per_channel_,
+                                 (WebRtc_Word16*)_audioBuffer,
+                                 MAX_AUDIO_BUFFER_IN_BYTES, outLen);
+        }
+        encodedLenInBytes = outLen * sizeof(WebRtc_Word16);
+    }
+
+    // Codec may not be operating at a frame rate of 10 ms. Whenever enough
+    // 10 ms chunks of data has been pushed to the encoder an encoded frame
+    // will be available. Wait until then.
+    if (encodedLenInBytes)
+    {
+        WebRtc_UWord16 msOfData =
+            ptrAudioFrame->samples_per_channel_ /
+            WebRtc_UWord16(ptrAudioFrame->sample_rate_hz_ / 1000);
+        if (WriteEncodedAudioData(_audioBuffer,
+                                  (WebRtc_UWord16)encodedLenInBytes,
+                                  msOfData, playoutTS) == -1)
+        {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::SetUpAudioEncoder()
+{
+    if (_fileFormat == kFileFormatPreencodedFile ||
+        STR_CASE_CMP(codec_info_.plname, "L16") != 0)
+    {
+        if(_audioEncoder.SetEncodeCodec(codec_info_,_amrFormat) == -1)
+        {
+            WEBRTC_TRACE(
+                kTraceError,
+                kTraceVoice,
+                _instanceID,
+                "FileRecorder::StartRecording() codec %s not supported",
+                codec_info_.plname);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::codec_info(CodecInst& codecInst) const
+{
+    if(codec_info_.plfreq == 0)
+    {
+        return -1;
+    }
+    codecInst = codec_info_;
+    return 0;
+}
+
+WebRtc_Word32 FileRecorderImpl::WriteEncodedAudioData(
+    const WebRtc_Word8* audioBuffer,
+    WebRtc_UWord16 bufferLength,
+    WebRtc_UWord16 /*millisecondsOfData*/,
+    const TickTime* /*playoutTS*/)
+{
+    return _moduleFile->IncomingAudioData(audioBuffer, bufferLength);
+}
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AudioFrameFileInfo
+{
+    public:
+       AudioFrameFileInfo(const WebRtc_Word8* audioData,
+                     const WebRtc_UWord16 audioSize,
+                     const WebRtc_UWord16 audioMS,
+                     const TickTime& playoutTS)
+           : _audioData(), _audioSize(audioSize), _audioMS(audioMS),
+             _playoutTS(playoutTS)
+       {
+           if(audioSize > MAX_AUDIO_BUFFER_IN_BYTES)
+           {
+               assert(false);
+               _audioSize = 0;
+               return;
+           }
+           memcpy(_audioData, audioData, audioSize);
+       };
+    // TODO (hellner): either turn into a struct or provide get/set functions.
+    WebRtc_Word8   _audioData[MAX_AUDIO_BUFFER_IN_BYTES];
+    WebRtc_UWord16 _audioSize;
+    WebRtc_UWord16 _audioMS;
+    TickTime _playoutTS;
+};
+
+AviRecorder::AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat)
+    : FileRecorderImpl(instanceID, fileFormat),
+      _videoOnly(false),
+      _thread( 0),
+      _timeEvent(*EventWrapper::Create()),
+      _critSec(CriticalSectionWrapper::CreateCriticalSection()),
+      _writtenVideoFramesCounter(0),
+      _writtenAudioMS(0),
+      _writtenVideoMS(0)
+{
+    _videoEncoder = new VideoCoder(instanceID);
+    _frameScaler = new FrameScaler();
+    _videoFramesQueue = new VideoFramesQueue();
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "AviRecorder()");
+}
+
+AviRecorder::~AviRecorder( )
+{
+    StopRecording( );
+
+    delete _videoEncoder;
+    delete _frameScaler;
+    delete _videoFramesQueue;
+    delete _thread;
+    delete &_timeEvent;
+    delete _critSec;
+}
+
+WebRtc_Word32 AviRecorder::StartRecordingVideoFile(
+    const char* fileName,
+    const CodecInst& audioCodecInst,
+    const VideoCodec& videoCodecInst,
+    ACMAMRPackingFormat amrFormat,
+    bool videoOnly)
+{
+    _firstAudioFrameReceived = false;
+    _videoCodecInst = videoCodecInst;
+    _videoOnly = videoOnly;
+
+    if(_moduleFile->StartRecordingVideoFile(fileName, _fileFormat,
+                                            audioCodecInst, videoCodecInst,
+                                            videoOnly) != 0)
+    {
+        return -1;
+    }
+
+    if(!videoOnly)
+    {
+        if(FileRecorderImpl::StartRecordingAudioFile(fileName,audioCodecInst, 0,
+                                                     amrFormat) !=0)
+        {
+            StopRecording();
+            return -1;
+        }
+    }
+    if( SetUpVideoEncoder() != 0)
+    {
+        StopRecording();
+        return -1;
+    }
+    if(_videoOnly)
+    {
+        // Writing to AVI file is non-blocking.
+        // Start non-blocking timer if video only. If recording both video and
+        // audio let the pushing of audio frames be the timer.
+        _timeEvent.StartTimer(true, 1000 / _videoCodecInst.maxFramerate);
+    }
+    StartThread();
+    return 0;
+}
+
+WebRtc_Word32 AviRecorder::StopRecording()
+{
+    _timeEvent.StopTimer();
+
+    StopThread();
+    return FileRecorderImpl::StopRecording();
+}
+
+WebRtc_Word32 AviRecorder::CalcI420FrameSize( ) const
+{
+    return 3 * _videoCodecInst.width * _videoCodecInst.height / 2;
+}
+
+WebRtc_Word32 AviRecorder::SetUpVideoEncoder()
+{
+    // Size of unencoded data (I420) should be the largest possible frame size
+    // in a file.
+    _videoMaxPayloadSize = CalcI420FrameSize();
+    _videoEncodedData.VerifyAndAllocate(_videoMaxPayloadSize);
+
+    _videoCodecInst.plType = _videoEncoder->DefaultPayloadType(
+        _videoCodecInst.plName);
+
+    WebRtc_Word32 useNumberOfCores = 1;
+    // Set the max payload size to 16000. This means that the codec will try to
+    // create slices that will fit in 16000 kByte packets. However, the
+    // Encode() call will still generate one full frame.
+    if(_videoEncoder->SetEncodeCodec(_videoCodecInst, useNumberOfCores,
+                                     16000))
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 AviRecorder::RecordVideoToFile(const VideoFrame& videoFrame)
+{
+    CriticalSectionScoped lock(_critSec);
+
+    if(!IsRecording() || ( videoFrame.Length() == 0))
+    {
+        return -1;
+    }
+    // The frame is written to file in AviRecorder::Process().
+    WebRtc_Word32 retVal = _videoFramesQueue->AddFrame(videoFrame);
+    if(retVal != 0)
+    {
+        StopRecording();
+    }
+    return retVal;
+}
+
+bool AviRecorder::StartThread()
+{
+    unsigned int id;
+    if( _thread == 0)
+    {
+        return false;
+    }
+
+    return _thread->Start(id);
+}
+
+bool AviRecorder::StopThread()
+{
+    _critSec->Enter();
+
+    if(_thread)
+    {
+        _thread->SetNotAlive();
+
+        ThreadWrapper* thread = _thread;
+        _thread = NULL;
+
+        _timeEvent.Set();
+
+        _critSec->Leave();
+
+        if(thread->Stop())
+        {
+            delete thread;
+        } else {
+            return false;
+        }
+    } else {
+        _critSec->Leave();
+    }
+    return true;
+}
+
+bool AviRecorder::Run( ThreadObj threadObj)
+{
+    return static_cast<AviRecorder*>( threadObj)->Process();
+}
+
+WebRtc_Word32 AviRecorder::ProcessAudio()
+{
+    if (_writtenVideoFramesCounter == 0)
+    {
+        // Get the most recent frame that is due for writing to file. Since
+        // frames are unencoded it's safe to throw away frames if necessary
+        // for synchronizing audio and video.
+        VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+        if(frameToProcess)
+        {
+            // Syncronize audio to the current frame to process by throwing away
+            // audio samples with older timestamp than the video frame.
+            WebRtc_UWord32 numberOfAudioElements =
+                _audioFramesToWrite.GetSize();
+            for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+            {
+                AudioFrameFileInfo* frameInfo =
+                    (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+                if(frameInfo)
+                {
+                    if(TickTime::TicksToMilliseconds(
+                           frameInfo->_playoutTS.Ticks()) <
+                       frameToProcess->RenderTimeMs())
+                    {
+                        delete frameInfo;
+                        _audioFramesToWrite.PopFront();
+                    } else
+                    {
+                        break;
+                    }
+                }
+            }
+        }
+    }
+    // Write all audio up to current timestamp.
+    WebRtc_Word32 error = 0;
+    WebRtc_UWord32 numberOfAudioElements = _audioFramesToWrite.GetSize();
+    for (WebRtc_UWord32 i = 0; i < numberOfAudioElements; ++i)
+    {
+        AudioFrameFileInfo* frameInfo =
+            (AudioFrameFileInfo*)_audioFramesToWrite.First()->GetItem();
+        if(frameInfo)
+        {
+            if((TickTime::Now() - frameInfo->_playoutTS).Milliseconds() > 0)
+            {
+                _moduleFile->IncomingAudioData(frameInfo->_audioData,
+                                               frameInfo->_audioSize);
+                _writtenAudioMS += frameInfo->_audioMS;
+                delete frameInfo;
+                _audioFramesToWrite.PopFront();
+            } else {
+                break;
+            }
+        } else {
+            _audioFramesToWrite.PopFront();
+        }
+    }
+    return error;
+}
+
+bool AviRecorder::Process()
+{
+    switch(_timeEvent.Wait(500))
+    {
+    case kEventSignaled:
+        if(_thread == NULL)
+        {
+            return false;
+        }
+        break;
+    case kEventError:
+        return false;
+    case kEventTimeout:
+        // No events triggered. No work to do.
+        return true;
+    }
+    CriticalSectionScoped lock( _critSec);
+
+    // Get the most recent frame to write to file (if any). Synchronize it with
+    // the audio stream (if any). Synchronization the video based on its render
+    // timestamp (i.e. VideoFrame::RenderTimeMS())
+    VideoFrame* frameToProcess = _videoFramesQueue->FrameToRecord();
+    if( frameToProcess == NULL)
+    {
+        return true;
+    }
+    WebRtc_Word32 error = 0;
+    if(!_videoOnly)
+    {
+        if(!_firstAudioFrameReceived)
+        {
+            // Video and audio can only be synchronized if both have been
+            // received.
+            return true;
+        }
+        error = ProcessAudio();
+
+        while (_writtenAudioMS > _writtenVideoMS)
+        {
+            error = EncodeAndWriteVideoToFile( *frameToProcess);
+            if( error != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                        "AviRecorder::Process() error writing to file.");
+                break;
+            } else {
+                WebRtc_UWord32 frameLengthMS = 1000 /
+                    _videoCodecInst.maxFramerate;
+                _writtenVideoFramesCounter++;
+                _writtenVideoMS += frameLengthMS;
+                // A full seconds worth of frames have been written.
+                if(_writtenVideoFramesCounter%_videoCodecInst.maxFramerate == 0)
+                {
+                    // Frame rate is in frames per seconds. Frame length is
+                    // calculated as an integer division which means it may
+                    // be rounded down. Compensate for this every second.
+                    WebRtc_UWord32 rest = 1000 % frameLengthMS;
+                    _writtenVideoMS += rest;
+                }
+            }
+        }
+    } else {
+        // Frame rate is in frames per seconds. Frame length is calculated as an
+        // integer division which means it may be rounded down. This introduces
+        // drift. Once a full frame worth of drift has happened, skip writing
+        // one frame. Note that frame rate is in frames per second so the
+        // drift is completely compensated for.
+        WebRtc_UWord32 frameLengthMS = 1000/_videoCodecInst.maxFramerate;
+        WebRtc_UWord32 restMS = 1000 % frameLengthMS;
+        WebRtc_UWord32 frameSkip = (_videoCodecInst.maxFramerate *
+                                    frameLengthMS) / restMS;
+
+        _writtenVideoFramesCounter++;
+        if(_writtenVideoFramesCounter % frameSkip == 0)
+        {
+            _writtenVideoMS += frameLengthMS;
+            return true;
+        }
+
+        error = EncodeAndWriteVideoToFile( *frameToProcess);
+        if(error != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                    "AviRecorder::Process() error writing to file.");
+        } else {
+            _writtenVideoMS += frameLengthMS;
+        }
+    }
+    return error == 0;
+}
+
+WebRtc_Word32 AviRecorder::EncodeAndWriteVideoToFile(VideoFrame& videoFrame)
+{
+    if(!IsRecording() || (videoFrame.Length() == 0))
+    {
+        return -1;
+    }
+
+    if(_frameScaler->ResizeFrameIfNeeded(&videoFrame, _videoCodecInst.width,
+                                         _videoCodecInst.height) != 0)
+    {
+        return -1;
+    }
+
+    _videoEncodedData.payloadSize = 0;
+
+    if( STR_CASE_CMP(_videoCodecInst.plName, "I420") == 0)
+    {
+        _videoEncodedData.VerifyAndAllocate(videoFrame.Length());
+
+        // I420 is raw data. No encoding needed (each sample is represented by
+        // 1 byte so there is no difference depending on endianness).
+        memcpy(_videoEncodedData.payloadData, videoFrame.Buffer(),
+               videoFrame.Length());
+
+        _videoEncodedData.payloadSize = videoFrame.Length();
+        _videoEncodedData.frameType = kVideoFrameKey;
+    }else {
+        if( _videoEncoder->Encode(videoFrame, _videoEncodedData) != 0)
+        {
+            return -1;
+        }
+    }
+
+    if(_videoEncodedData.payloadSize > 0)
+    {
+        if(_moduleFile->IncomingAVIVideoData(
+               (WebRtc_Word8*)(_videoEncodedData.payloadData),
+               _videoEncodedData.payloadSize))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, _instanceID,
+                         "Error writing AVI file");
+            return -1;
+        }
+    } else {
+        WEBRTC_TRACE(
+            kTraceError,
+            kTraceVideo,
+            _instanceID,
+            "FileRecorder::RecordVideoToFile() frame dropped by encoder bitrate\
+ likely to low.");
+    }
+    return 0;
+}
+
+// Store audio frame in the _audioFramesToWrite buffer. The writing to file
+// happens in AviRecorder::Process().
+WebRtc_Word32 AviRecorder::WriteEncodedAudioData(
+    const WebRtc_Word8* audioBuffer,
+    WebRtc_UWord16 bufferLength,
+    WebRtc_UWord16 millisecondsOfData,
+    const TickTime* playoutTS)
+{
+    if (!IsRecording())
+    {
+        return -1;
+    }
+    if (bufferLength > MAX_AUDIO_BUFFER_IN_BYTES)
+    {
+        return -1;
+    }
+    if (_videoOnly)
+    {
+        return -1;
+    }
+    if (_audioFramesToWrite.GetSize() > kMaxAudioBufferQueueLength)
+    {
+        StopRecording();
+        return -1;
+    }
+    _firstAudioFrameReceived = true;
+
+    if(playoutTS)
+    {
+        _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+                                                            bufferLength,
+                                                            millisecondsOfData,
+                                                            *playoutTS));
+    } else {
+        _audioFramesToWrite.PushBack(new AudioFrameFileInfo(audioBuffer,
+                                                            bufferLength,
+                                                            millisecondsOfData,
+                                                            TickTime::Now()));
+    }
+    _timeEvent.Set();
+    return 0;
+}
+
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
diff --git a/src/modules/utility/source/file_recorder_impl.h b/src/modules/utility/source/file_recorder_impl.h
new file mode 100644
index 0000000..6e56972
--- /dev/null
+++ b/src/modules/utility/source/file_recorder_impl.h
@@ -0,0 +1,163 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains a class that can write audio and/or video to file in
+// multiple file formats. The unencoded input data is written to file in the
+// encoded format specified.
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
+
+#include "coder.h"
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "event_wrapper.h"
+#include "file_recorder.h"
+#include "media_file_defines.h"
+#include "media_file.h"
+#include "module_common_types.h"
+#include "resampler.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "typedefs.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+    #include "frame_scaler.h"
+    #include "video_coder.h"
+    #include "video_frames_queue.h"
+#endif
+
+namespace webrtc {
+// The largest decoded frame size in samples (60ms with 32kHz sample rate).
+enum { MAX_AUDIO_BUFFER_IN_SAMPLES = 60*32};
+enum { MAX_AUDIO_BUFFER_IN_BYTES = MAX_AUDIO_BUFFER_IN_SAMPLES*2};
+enum { kMaxAudioBufferQueueLength = 100 };
+
+class FileRecorderImpl : public FileRecorder
+{
+public:
+    FileRecorderImpl(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    virtual ~FileRecorderImpl();
+
+    // FileRecorder functions.
+    virtual WebRtc_Word32 RegisterModuleFileCallback(FileCallback* callback);
+    virtual FileFormats RecordingFileFormat() const;
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        const char* fileName,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notificationTimeMs,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage);
+    virtual WebRtc_Word32 StartRecordingAudioFile(
+        OutStream& destStream,
+        const CodecInst& codecInst,
+        WebRtc_UWord32 notificationTimeMs,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage);
+    virtual WebRtc_Word32 StopRecording();
+    virtual bool IsRecording() const;
+    virtual WebRtc_Word32 codec_info(CodecInst& codecInst) const;
+    virtual WebRtc_Word32 RecordAudioToFile(
+        const AudioFrame& frame,
+        const TickTime* playoutTS = NULL);
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false)
+    {
+        return -1;
+    }
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame)
+    {
+        return -1;
+    }
+
+protected:
+    virtual WebRtc_Word32 WriteEncodedAudioData(
+        const WebRtc_Word8* audioBuffer,
+        WebRtc_UWord16 bufferLength,
+        WebRtc_UWord16 millisecondsOfData,
+        const TickTime* playoutTS);
+
+    WebRtc_Word32 SetUpAudioEncoder();
+
+    WebRtc_UWord32 _instanceID;
+    FileFormats _fileFormat;
+    MediaFile* _moduleFile;
+
+private:
+    CodecInst codec_info_;
+    ACMAMRPackingFormat _amrFormat;
+
+    WebRtc_Word8 _audioBuffer[MAX_AUDIO_BUFFER_IN_BYTES];
+    AudioCoder _audioEncoder;
+    Resampler _audioResampler;
+};
+
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+class AviRecorder : public FileRecorderImpl
+{
+public:
+    AviRecorder(WebRtc_UWord32 instanceID, FileFormats fileFormat);
+    virtual ~AviRecorder();
+
+    // FileRecorder functions.
+    virtual WebRtc_Word32 StartRecordingVideoFile(
+        const char* fileName,
+        const CodecInst& audioCodecInst,
+        const VideoCodec& videoCodecInst,
+        ACMAMRPackingFormat amrFormat = AMRFileStorage,
+        bool videoOnly = false);
+    virtual WebRtc_Word32 StopRecording();
+    virtual WebRtc_Word32 RecordVideoToFile(const VideoFrame& videoFrame);
+
+protected:
+    virtual WebRtc_Word32 WriteEncodedAudioData(
+        const WebRtc_Word8*  audioBuffer,
+        WebRtc_UWord16 bufferLength,
+        WebRtc_UWord16 millisecondsOfData,
+        const TickTime* playoutTS);
+private:
+    static bool Run(ThreadObj threadObj);
+    bool Process();
+
+    bool StartThread();
+    bool StopThread();
+
+    WebRtc_Word32 EncodeAndWriteVideoToFile(VideoFrame& videoFrame);
+    WebRtc_Word32 ProcessAudio();
+
+    WebRtc_Word32 CalcI420FrameSize() const;
+    WebRtc_Word32 SetUpVideoEncoder();
+
+    VideoCodec _videoCodecInst;
+    bool _videoOnly;
+
+    ListWrapper _audioFramesToWrite;
+    bool _firstAudioFrameReceived;
+
+    VideoFramesQueue* _videoFramesQueue;
+
+    FrameScaler* _frameScaler;
+    VideoCoder* _videoEncoder;
+    WebRtc_Word32 _videoMaxPayloadSize;
+    EncodedVideoData _videoEncodedData;
+
+    ThreadWrapper* _thread;
+    EventWrapper& _timeEvent;
+    CriticalSectionWrapper* _critSec;
+    WebRtc_Word64 _writtenVideoFramesCounter;
+    WebRtc_Word64 _writtenAudioMS;
+    WebRtc_Word64 _writtenVideoMS;
+};
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_FILE_RECORDER_IMPL_H_
diff --git a/src/modules/utility/source/frame_scaler.cc b/src/modules/utility/source/frame_scaler.cc
new file mode 100644
index 0000000..c012e89
--- /dev/null
+++ b/src/modules/utility/source/frame_scaler.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/utility/source/frame_scaler.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "common_video/libyuv/include/scaler.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+FrameScaler::FrameScaler()
+    : scaler_(new Scaler()),
+      scaled_frame_() {}
+
+FrameScaler::~FrameScaler() {}
+
+int FrameScaler::ResizeFrameIfNeeded(VideoFrame* video_frame,
+                                     WebRtc_UWord32 out_width,
+                                     WebRtc_UWord32 out_height) {
+  if (video_frame->Length() == 0) {
+    return -1;
+  }
+
+  if ((video_frame->Width() != out_width) ||
+      (video_frame->Height() != out_height)) {
+    // Set correct scale settings and scale |video_frame| into |scaled_frame_|.
+    scaler_->Set(video_frame->Width(), video_frame->Height(), out_width,
+                 out_height, kI420, kI420, kScaleBox);
+    int out_length = CalcBufferSize(kI420, out_width, out_height);
+    scaled_frame_.VerifyAndAllocate(out_length);
+    int ret = scaler_->Scale(video_frame->Buffer(), scaled_frame_.Buffer(),
+                             out_length);
+    if (ret < 0) {
+      return ret;
+    }
+
+    scaled_frame_.SetWidth(out_width);
+    scaled_frame_.SetHeight(out_height);
+    scaled_frame_.SetLength(out_length);
+    scaled_frame_.SetRenderTime(video_frame->RenderTimeMs());
+    scaled_frame_.SetTimeStamp(video_frame->TimeStamp());
+    video_frame->SwapFrame(scaled_frame_);
+  }
+  return 0;
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/src/modules/utility/source/frame_scaler.h b/src/modules/utility/source/frame_scaler.h
new file mode 100644
index 0000000..f86a933
--- /dev/null
+++ b/src/modules/utility/source/frame_scaler.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file implements a class that can be used for scaling frames.
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class Scaler;
+class VideoFrame;
+
+class FrameScaler {
+ public:
+    FrameScaler();
+    ~FrameScaler();
+
+    // Re-sizes |video_frame| so that it has the width |out_width| and height
+    // |out_height|.
+    int ResizeFrameIfNeeded(VideoFrame* video_frame,
+                            WebRtc_UWord32 out_width,
+                            WebRtc_UWord32 out_height);
+
+ private:
+    scoped_ptr<Scaler> scaler_;
+    VideoFrame scaled_frame_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULE_UTILITY_VIDEO
+
+#endif  // WEBRTC_MODULES_UTILITY_SOURCE_FRAME_SCALER_H_
diff --git a/src/modules/utility/source/process_thread_impl.cc b/src/modules/utility/source/process_thread_impl.cc
new file mode 100644
index 0000000..9028c44
--- /dev/null
+++ b/src/modules/utility/source/process_thread_impl.cc
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "process_thread_impl.h"
+#include "module.h"
+#include "trace.h"
+
+namespace webrtc {
+ProcessThread::~ProcessThread()
+{
+}
+
+ProcessThread* ProcessThread::CreateProcessThread()
+{
+    return new ProcessThreadImpl();
+}
+
+void ProcessThread::DestroyProcessThread(ProcessThread* module)
+{
+    delete module;
+}
+
+ProcessThreadImpl::ProcessThreadImpl()
+    : _timeEvent(*EventWrapper::Create()),
+      _critSectModules(CriticalSectionWrapper::CreateCriticalSection()),
+      _thread(NULL)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+ProcessThreadImpl::~ProcessThreadImpl()
+{
+    delete _critSectModules;
+    delete &_timeEvent;
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 ProcessThreadImpl::Start()
+{
+    CriticalSectionScoped lock(_critSectModules);
+    if(_thread)
+    {
+        return -1;
+    }
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "ProcessThread");
+    unsigned int id;
+    WebRtc_Word32 retVal = _thread->Start(id);
+    if(retVal >= 0)
+    {
+        return 0;
+    }
+    delete _thread;
+    _thread = NULL;
+    return -1;
+}
+
+WebRtc_Word32 ProcessThreadImpl::Stop()
+{
+    _critSectModules->Enter();
+    if(_thread)
+    {
+        _thread->SetNotAlive();
+
+        ThreadWrapper* thread = _thread;
+        _thread = NULL;
+
+        _timeEvent.Set();
+        _critSectModules->Leave();
+
+        if(thread->Stop())
+        {
+            delete thread;
+        } else {
+            return -1;
+        }
+    } else {
+        _critSectModules->Leave();
+    }
+    return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::RegisterModule(const Module* module)
+{
+    CriticalSectionScoped lock(_critSectModules);
+
+    // Only allow module to be registered once.
+    ListItem* item = _modules.First();
+    for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+    {
+        if(module == item->GetItem())
+        {
+            return -1;
+        }
+        item = _modules.Next(item);
+    }
+
+    _modules.PushFront(module);
+    WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+                 "number of registered modules has increased to %d",
+                 _modules.GetSize());
+    // Wake the thread calling ProcessThreadImpl::Process() to update the
+    // waiting time. The waiting time for the just registered module may be
+    // shorter than all other registered modules.
+    _timeEvent.Set();
+    return 0;
+}
+
+WebRtc_Word32 ProcessThreadImpl::DeRegisterModule(const Module* module)
+{
+    CriticalSectionScoped lock(_critSectModules);
+
+    ListItem* item = _modules.First();
+    for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+    {
+        if(module == item->GetItem())
+        {
+            int res = _modules.Erase(item);
+            WEBRTC_TRACE(kTraceInfo, kTraceUtility, -1,
+                         "number of registered modules has decreased to %d",
+                         _modules.GetSize());
+            return res;
+        }
+        item = _modules.Next(item);
+    }
+    return -1;
+}
+
+bool ProcessThreadImpl::Run(void* obj)
+{
+    return static_cast<ProcessThreadImpl*>(obj)->Process();
+}
+
+bool ProcessThreadImpl::Process()
+{
+    // Wait for the module that should be called next, but don't block thread
+    // longer than 100 ms.
+    WebRtc_Word32 minTimeToNext = 100;
+    {
+        CriticalSectionScoped lock(_critSectModules);
+        ListItem* item = _modules.First();
+        for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+        {
+            WebRtc_Word32 timeToNext =
+                static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+            if(minTimeToNext > timeToNext)
+            {
+                minTimeToNext = timeToNext;
+            }
+            item = _modules.Next(item);
+        }
+    }
+
+    if(minTimeToNext > 0)
+    {
+        if(kEventError == _timeEvent.Wait(minTimeToNext))
+        {
+            return true;
+        }
+        if(!_thread)
+        {
+            return false;
+        }
+    }
+    {
+        CriticalSectionScoped lock(_critSectModules);
+        ListItem* item = _modules.First();
+        for(WebRtc_UWord32 i = 0; i < _modules.GetSize() && item; i++)
+        {
+            WebRtc_Word32 timeToNext =
+                static_cast<Module*>(item->GetItem())->TimeUntilNextProcess();
+            if(timeToNext < 1)
+            {
+                static_cast<Module*>(item->GetItem())->Process();
+            }
+            item = _modules.Next(item);
+        }
+    }
+    return true;
+}
+} // namespace webrtc
diff --git a/src/modules/utility/source/process_thread_impl.h b/src/modules/utility/source/process_thread_impl.h
new file mode 100644
index 0000000..79b1272
--- /dev/null
+++ b/src/modules/utility/source/process_thread_impl.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "list_wrapper.h"
+#include "process_thread.h"
+#include "thread_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class ProcessThreadImpl : public ProcessThread
+{
+public:
+    ProcessThreadImpl();
+    virtual ~ProcessThreadImpl();
+
+    virtual WebRtc_Word32 Start();
+    virtual WebRtc_Word32 Stop();
+
+    virtual WebRtc_Word32 RegisterModule(const Module* module);
+    virtual WebRtc_Word32 DeRegisterModule(const Module* module);
+
+protected:
+    static bool Run(void* obj);
+
+    bool Process();
+
+private:
+    EventWrapper&           _timeEvent;
+    CriticalSectionWrapper* _critSectModules;
+    ListWrapper             _modules;
+    ThreadWrapper*          _thread;
+};
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_PROCESS_THREAD_IMPL_H_
diff --git a/src/modules/utility/source/rtp_dump_impl.cc b/src/modules/utility/source/rtp_dump_impl.cc
new file mode 100644
index 0000000..69a52ec
--- /dev/null
+++ b/src/modules/utility/source/rtp_dump_impl.cc
@@ -0,0 +1,282 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_dump_impl.h"
+
+#include <cassert>
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+#include <Windows.h>
+#include <mmsystem.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <string.h>
+#include <sys/time.h>
+#include <time.h>
+#endif
+
+#if (defined(_DEBUG) && defined(_WIN32))
+#define DEBUG_PRINT(expr)   OutputDebugString(##expr)
+#define DEBUG_PRINTP(expr, p)   \
+{                               \
+    char msg[128];              \
+    sprintf(msg, ##expr, p);    \
+    OutputDebugString(msg);     \
+}
+#else
+#define DEBUG_PRINT(expr)    ((void)0)
+#define DEBUG_PRINTP(expr,p) ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+namespace webrtc {
+const char RTPFILE_VERSION[] = "1.0";
+const WebRtc_UWord32 MAX_UWORD32 = 0xffffffff;
+
+// This stucture is specified in the rtpdump documentation.
+// This struct corresponds to RD_packet_t in
+// http://www.cs.columbia.edu/irt/software/rtptools/
+typedef struct
+{
+    // Length of packet, including this header (may be smaller than plen if not
+    // whole packet recorded).
+    WebRtc_UWord16 length;
+    // Actual header+payload length for RTP, 0 for RTCP.
+    WebRtc_UWord16 plen;
+    // Milliseconds since the start of recording.
+    WebRtc_UWord32 offset;
+} rtpDumpPktHdr_t;
+
+RtpDump* RtpDump::CreateRtpDump()
+{
+    return new RtpDumpImpl();
+}
+
+void RtpDump::DestroyRtpDump(RtpDump* object)
+{
+    delete object;
+}
+
+RtpDumpImpl::RtpDumpImpl()
+    : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _file(*FileWrapper::Create()),
+      _startTime(0)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s created", __FUNCTION__);
+}
+
+RtpDump::~RtpDump()
+{
+}
+
+RtpDumpImpl::~RtpDumpImpl()
+{
+    _file.Flush();
+    _file.CloseFile();
+    delete &_file;
+    delete _critSect;
+    WEBRTC_TRACE(kTraceMemory, kTraceUtility, -1, "%s deleted", __FUNCTION__);
+}
+
+WebRtc_Word32 RtpDumpImpl::Start(const char* fileNameUTF8)
+{
+
+    if (fileNameUTF8 == NULL)
+    {
+        return -1;
+    }
+
+    CriticalSectionScoped lock(_critSect);
+    _file.Flush();
+    _file.CloseFile();
+    if (_file.OpenFile(fileNameUTF8, false, false, false) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "failed to open the specified file");
+        return -1;
+    }
+
+    // Store start of RTP dump (to be used for offset calculation later).
+    _startTime = GetTimeInMS();
+
+    // All rtp dump files start with #!rtpplay.
+    char magic[16];
+    sprintf(magic, "#!rtpplay%s \n", RTPFILE_VERSION);
+    if (_file.WriteText(magic) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+
+    // The header according to the rtpdump documentation is sizeof(RD_hdr_t)
+    // which is 8 + 4 + 2 = 14 bytes for 32-bit architecture (and 22 bytes on
+    // 64-bit architecture). However, Wireshark use 16 bytes for the header
+    // regardless of if the binary is 32-bit or 64-bit. Go by the same approach
+    // as Wireshark since it makes more sense.
+    // http://wiki.wireshark.org/rtpdump explains that an additional 2 bytes
+    // of padding should be added to the header.
+    char dummyHdr[16];
+    memset(dummyHdr, 0, 16);
+    if (!_file.Write(dummyHdr, sizeof(dummyHdr)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RtpDumpImpl::Stop()
+{
+    CriticalSectionScoped lock(_critSect);
+    _file.Flush();
+    _file.CloseFile();
+    return 0;
+}
+
+bool RtpDumpImpl::IsActive() const
+{
+    CriticalSectionScoped lock(_critSect);
+    return _file.Open();
+}
+
+WebRtc_Word32 RtpDumpImpl::DumpPacket(const WebRtc_UWord8* packet,
+                                      WebRtc_UWord16 packetLength)
+{
+    CriticalSectionScoped lock(_critSect);
+    if (!IsActive())
+    {
+        return 0;
+    }
+
+    if (packet == NULL)
+    {
+        return -1;
+    }
+
+    if (packetLength < 1)
+    {
+        return -1;
+    }
+
+    // If the packet doesn't contain a valid RTCP header the packet will be
+    // considered RTP (without further verification).
+    bool isRTCP = RTCP(packet);
+
+    rtpDumpPktHdr_t hdr;
+    WebRtc_UWord32 offset;
+
+    // Offset is relative to when recording was started.
+    offset = GetTimeInMS();
+    if (offset < _startTime)
+    {
+        // Compensate for wraparound.
+        offset += MAX_UWORD32 - _startTime + 1;
+    } else {
+        offset -= _startTime;
+    }
+    hdr.offset = RtpDumpHtonl(offset);
+
+    hdr.length = RtpDumpHtons((WebRtc_UWord16)(packetLength + sizeof(hdr)));
+    if (isRTCP)
+    {
+        hdr.plen = 0;
+    }
+    else
+    {
+        hdr.plen = RtpDumpHtons((WebRtc_UWord16)packetLength);
+    }
+
+    if (!_file.Write(&hdr, sizeof(hdr)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+    if (!_file.Write(packet, packetLength))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
+                     "error writing to file");
+        return -1;
+    }
+
+    return 0;
+}
+
+bool RtpDumpImpl::RTCP(const WebRtc_UWord8* packet) const
+{
+    const WebRtc_UWord8 payloadType = packet[1];
+    bool is_rtcp = false;
+
+    switch(payloadType)
+    {
+    case 192:
+        is_rtcp = true;
+        break;
+    case 193: case 195:
+        break;
+    case 200: case 201: case 202: case 203:
+    case 204: case 205: case 206: case 207:
+        is_rtcp = true;
+        break;
+    }
+    return is_rtcp;
+}
+
+// TODO (hellner): why is TickUtil not used here?
+inline WebRtc_UWord32 RtpDumpImpl::GetTimeInMS() const
+{
+#if defined(_WIN32)
+    return timeGetTime();
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    struct timeval tv;
+    struct timezone tz;
+    unsigned long val;
+
+    gettimeofday(&tv, &tz);
+    val = tv.tv_sec * 1000 + tv.tv_usec / 1000;
+    return val;
+#else
+    #error Either _WIN32 or LINUX or WEBRTC_MAC has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+
+inline WebRtc_UWord32 RtpDumpImpl::RtpDumpHtonl(WebRtc_UWord32 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+    return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+    return (x >> 24) + ((((x >> 16) & 0xFF) << 8) + ((((x >> 8) & 0xFF) << 16) +
+                                                     ((x & 0xFF) << 24)));
+#else
+#error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+
+inline WebRtc_UWord16 RtpDumpImpl::RtpDumpHtons(WebRtc_UWord16 x) const
+{
+#if defined(WEBRTC_BIG_ENDIAN)
+    return x;
+#elif defined(WEBRTC_LITTLE_ENDIAN)
+    return (x >> 8) + ((x & 0xFF) << 8);
+#else
+    #error Either WEBRTC_BIG_ENDIAN or WEBRTC_LITTLE_ENDIAN has to be defined!
+    assert(false);
+    return 0;
+#endif
+}
+} // namespace webrtc
diff --git a/src/modules/utility/source/rtp_dump_impl.h b/src/modules/utility/source/rtp_dump_impl.h
new file mode 100644
index 0000000..9715c35
--- /dev/null
+++ b/src/modules/utility/source/rtp_dump_impl.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
+
+#include "rtp_dump.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class FileWrapper;
+class RtpDumpImpl : public RtpDump
+{
+public:
+    RtpDumpImpl();
+    virtual ~RtpDumpImpl();
+
+    virtual WebRtc_Word32 Start(const char* fileNameUTF8);
+    virtual WebRtc_Word32 Stop();
+    virtual bool IsActive() const;
+    virtual WebRtc_Word32 DumpPacket(const WebRtc_UWord8* packet,
+                                     WebRtc_UWord16 packetLength);
+private:
+    // Return the system time in ms.
+    inline WebRtc_UWord32 GetTimeInMS() const;
+    // Return x in network byte order (big endian).
+    inline WebRtc_UWord32 RtpDumpHtonl(WebRtc_UWord32 x) const;
+    // Return x in network byte order (big endian).
+    inline WebRtc_UWord16 RtpDumpHtons(WebRtc_UWord16 x) const;
+
+    // Return true if the packet starts with a valid RTCP header.
+    // Note: See ModuleRTPUtility::RTPHeaderParser::RTCP() for details on how
+    //       to determine if the packet is an RTCP packet.
+    bool RTCP(const WebRtc_UWord8* packet) const;
+
+private:
+    CriticalSectionWrapper* _critSect;
+    FileWrapper& _file;
+    WebRtc_UWord32 _startTime;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_RTP_DUMP_IMPL_H_
diff --git a/src/modules/utility/source/utility.gypi b/src/modules/utility/source/utility.gypi
new file mode 100644
index 0000000..308dd9e
--- /dev/null
+++ b/src/modules/utility/source/utility.gypi
@@ -0,0 +1,95 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_utility',
+      'type': '<(library)',
+      'dependencies': [
+        'audio_coding_module',
+        '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../interface',
+        '../../media_file/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../interface',
+          '../../audio_coding/main/interface',
+        ],
+      },
+      'sources': [
+        '../interface/audio_frame_operations.h',
+        '../interface/file_player.h',
+        '../interface/file_recorder.h',
+        '../interface/process_thread.h',
+        '../interface/rtp_dump.h',
+        'audio_frame_operations.cc',
+        'coder.cc',
+        'coder.h',
+        'file_player_impl.cc',
+        'file_player_impl.h',
+        'file_recorder_impl.cc',
+        'file_recorder_impl.h',
+        'process_thread_impl.cc',
+        'process_thread_impl.h',
+        'rtp_dump_impl.cc',
+        'rtp_dump_impl.h',
+      ],
+      'conditions': [
+        ['enable_video==1', {
+          # Adds support for video recording.
+          'defines': [
+            'WEBRTC_MODULE_UTILITY_VIDEO',
+          ],
+          'dependencies': [
+            'webrtc_video_coding',
+          ],
+          'include_dirs': [
+            '../../video_coding/main/interface',
+          ],
+          'sources': [
+            'frame_scaler.cc',
+            'video_coder.cc',
+            'video_frames_queue.cc',
+          ],
+        }],
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'webrtc_utility_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'webrtc_utility',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'audio_frame_operations_unittest.cc',
+            'file_player_unittest.cc',
+          ],
+        }, # webrtc_utility_unittests
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/utility/source/video_coder.cc b/src/modules/utility/source/video_coder.cc
new file mode 100644
index 0000000..7276723
--- /dev/null
+++ b/src/modules/utility/source/video_coder.cc
@@ -0,0 +1,152 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "video_coder.h"
+
+namespace webrtc {
+VideoCoder::VideoCoder(WebRtc_UWord32 instanceID)
+    : _vcm(VideoCodingModule::Create(instanceID)),
+      _decodedVideo(0)
+{
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+
+    _vcm->RegisterTransportCallback(this);
+    _vcm->RegisterReceiveCallback(this);
+}
+
+VideoCoder::~VideoCoder()
+{
+    VideoCodingModule::Destroy(_vcm);
+}
+
+WebRtc_Word32 VideoCoder::ResetDecoder()
+{
+    _vcm->ResetDecoder();
+
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+
+    _vcm->RegisterTransportCallback(this);
+    _vcm->RegisterReceiveCallback(this);
+    return 0;
+}
+
+WebRtc_Word32 VideoCoder::SetEncodeCodec(VideoCodec& videoCodecInst,
+                                         WebRtc_UWord32 numberOfCores,
+                                         WebRtc_UWord32 maxPayloadSize)
+{
+    if(_vcm->RegisterSendCodec(&videoCodecInst, numberOfCores,
+                               maxPayloadSize) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::SetDecodeCodec(VideoCodec& videoCodecInst,
+                                         WebRtc_Word32 numberOfCores)
+{
+    if (videoCodecInst.plType == 0)
+    {
+        WebRtc_Word8 plType = DefaultPayloadType(videoCodecInst.plName);
+        if (plType == -1)
+        {
+            return -1;
+        }
+        videoCodecInst.plType = plType;
+    }
+
+    if(_vcm->RegisterReceiveCodec(&videoCodecInst, numberOfCores) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCoder::Decode(VideoFrame& decodedVideo,
+                                 const EncodedVideoData& encodedData)
+{
+    decodedVideo.SetLength(0);
+    if(encodedData.payloadSize <= 0)
+    {
+        return -1;
+    }
+
+    _decodedVideo = &decodedVideo;
+    if(_vcm->DecodeFromStorage(encodedData) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+
+WebRtc_Word32 VideoCoder::Encode(const VideoFrame& videoFrame,
+                                 EncodedVideoData& videoEncodedData)
+{
+    // The AddVideoFrame(..) call will (indirectly) call SendData(). Store a
+    // pointer to videoFrame so that it can be updated.
+    _videoEncodedData = &videoEncodedData;
+    videoEncodedData.payloadSize = 0;
+    if(_vcm->AddVideoFrame(videoFrame) != VCM_OK)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word8 VideoCoder::DefaultPayloadType(const char* plName)
+{
+    VideoCodec tmpCodec;
+    WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
+    for (WebRtc_UWord8 i = 0; i < numberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &tmpCodec);
+        if(strncmp(tmpCodec.plName, plName, kPayloadNameSize) == 0)
+        {
+            return tmpCodec.plType;
+        }
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoCoder::FrameToRender(VideoFrame& videoFrame)
+{
+    return _decodedVideo->CopyFrame(videoFrame);
+}
+
+WebRtc_Word32 VideoCoder::SendData(
+    const FrameType frameType,
+    const WebRtc_UWord8  payloadType,
+    const WebRtc_UWord32 timeStamp,
+    int64_t capture_time_ms,
+    const WebRtc_UWord8* payloadData,
+    WebRtc_UWord32 payloadSize,
+    const RTPFragmentationHeader& fragmentationHeader,
+    const RTPVideoHeader* /*rtpVideoHdr*/)
+{
+    // Store the data in _videoEncodedData which is a pointer to videoFrame in
+    // Encode(..)
+    _videoEncodedData->VerifyAndAllocate(payloadSize);
+    _videoEncodedData->frameType = frameType;
+    _videoEncodedData->payloadType = payloadType;
+    _videoEncodedData->timeStamp = timeStamp;
+    _videoEncodedData->fragmentationHeader = fragmentationHeader;
+    memcpy(_videoEncodedData->payloadData, payloadData,
+           sizeof(WebRtc_UWord8) * payloadSize);
+    _videoEncodedData->payloadSize = payloadSize;
+    return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/src/modules/utility/source/video_coder.h b/src/modules/utility/source/video_coder.h
new file mode 100644
index 0000000..7e7762a
--- /dev/null
+++ b/src/modules/utility/source/video_coder.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "video_coding.h"
+
+namespace webrtc {
+class VideoCoder : public VCMPacketizationCallback, public VCMReceiveCallback
+{
+public:
+    VideoCoder(WebRtc_UWord32 instanceID);
+    ~VideoCoder();
+
+    WebRtc_Word32 ResetDecoder();
+
+    WebRtc_Word32 SetEncodeCodec(VideoCodec& videoCodecInst,
+                                 WebRtc_UWord32 numberOfCores,
+                                 WebRtc_UWord32 maxPayloadSize);
+
+
+    // Select the codec that should be used for decoding. videoCodecInst.plType
+    // will be set to the codec's default payload type.
+    WebRtc_Word32 SetDecodeCodec(VideoCodec& videoCodecInst,
+                                 WebRtc_Word32 numberOfCores);
+
+    WebRtc_Word32 Decode(VideoFrame& decodedVideo,
+                         const EncodedVideoData& encodedData);
+
+    WebRtc_Word32 Encode(const VideoFrame& videoFrame,
+                         EncodedVideoData& videoEncodedData);
+
+    WebRtc_Word8 DefaultPayloadType(const char* plName);
+
+private:
+    // VCMReceiveCallback function.
+    // Note: called by VideoCodingModule when decoding finished.
+    WebRtc_Word32 FrameToRender(VideoFrame& videoFrame);
+
+    // VCMPacketizationCallback function.
+    // Note: called by VideoCodingModule when encoding finished.
+    WebRtc_Word32 SendData(
+        FrameType /*frameType*/,
+        WebRtc_UWord8 /*payloadType*/,
+        WebRtc_UWord32 /*timeStamp*/,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /* fragmentationHeader*/,
+        const RTPVideoHeader* rtpTypeHdr);
+
+    VideoCodingModule* _vcm;
+    VideoFrame* _decodedVideo;
+    EncodedVideoData* _videoEncodedData;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_CODER_H_
diff --git a/src/modules/utility/source/video_frames_queue.cc b/src/modules/utility/source/video_frames_queue.cc
new file mode 100644
index 0000000..ab590c4
--- /dev/null
+++ b/src/modules/utility/source/video_frames_queue.cc
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_frames_queue.h"
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include <cassert>
+
+#include "module_common_types.h"
+#include "tick_util.h"
+#include "trace.h"
+
+namespace webrtc {
+VideoFramesQueue::VideoFramesQueue()
+    : _incomingFrames(),
+      _renderDelayMs(10)
+{
+}
+
+VideoFramesQueue::~VideoFramesQueue()
+{
+    while (!_incomingFrames.Empty())
+    {
+        ListItem* item = _incomingFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _incomingFrames.Erase(item);
+    }
+    while (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            VideoFrame* ptrFrame = static_cast<VideoFrame*>(item->GetItem());
+            assert(ptrFrame != NULL);
+            ptrFrame->Free();
+            delete ptrFrame;
+        }
+        _emptyFrames.Erase(item);
+    }
+}
+
+WebRtc_Word32 VideoFramesQueue::AddFrame(const VideoFrame& newFrame)
+{
+    VideoFrame* ptrFrameToAdd = NULL;
+    // Try to re-use a VideoFrame. Only allocate new memory if it is necessary.
+    if (!_emptyFrames.Empty())
+    {
+        ListItem* item = _emptyFrames.First();
+        if (item)
+        {
+            ptrFrameToAdd = static_cast<VideoFrame*>(item->GetItem());
+            _emptyFrames.Erase(item);
+        }
+    }
+    if (!ptrFrameToAdd)
+    {
+        if (_emptyFrames.GetSize() + _incomingFrames.GetSize() >
+            KMaxNumberOfFrames)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                         "%s: too many frames, limit: %d", __FUNCTION__,
+                         KMaxNumberOfFrames);
+            return -1;
+        }
+
+        WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
+                     "%s: allocating buffer %d", __FUNCTION__,
+                     _emptyFrames.GetSize() + _incomingFrames.GetSize());
+
+        ptrFrameToAdd = new VideoFrame();
+        if (!ptrFrameToAdd)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                         "%s: could not create new frame for", __FUNCTION__);
+            return -1;
+        }
+    }
+    ptrFrameToAdd->CopyFrame(newFrame);
+    _incomingFrames.PushBack(ptrFrameToAdd);
+    return 0;
+}
+
+// Find the most recent frame that has a VideoFrame::RenderTimeMs() that is
+// lower than current time in ms (TickTime::MillisecondTimestamp()).
+// Note _incomingFrames is sorted so that the oldest frame is first.
+// Recycle all frames that are older than the most recent frame.
+VideoFrame* VideoFramesQueue::FrameToRecord()
+{
+    VideoFrame* ptrRenderFrame = NULL;
+    ListItem* item = _incomingFrames.First();
+    while(item)
+    {
+        VideoFrame* ptrOldestFrameInList =
+            static_cast<VideoFrame*>(item->GetItem());
+        if (ptrOldestFrameInList->RenderTimeMs() <=
+            TickTime::MillisecondTimestamp() + _renderDelayMs)
+        {
+            if (ptrRenderFrame)
+            {
+                // List is traversed beginning to end. If ptrRenderFrame is not
+                // NULL it must be the first, and thus oldest, VideoFrame in the
+                // queue. It can be recycled.
+                ReturnFrame(ptrRenderFrame);
+                _incomingFrames.PopFront();
+            }
+            item = _incomingFrames.Next(item);
+            ptrRenderFrame = ptrOldestFrameInList;
+        }else
+        {
+            // All VideoFrames following this one will be even newer. No match
+            // will be found.
+            break;
+        }
+    }
+    return ptrRenderFrame;
+}
+
+WebRtc_Word32 VideoFramesQueue::ReturnFrame(VideoFrame* ptrOldFrame)
+{
+    ptrOldFrame->SetTimeStamp(0);
+    ptrOldFrame->SetWidth(0);
+    ptrOldFrame->SetHeight(0);
+    ptrOldFrame->SetRenderTime(0);
+    ptrOldFrame->SetLength(0);
+    _emptyFrames.PushBack(ptrOldFrame);
+    return 0;
+}
+
+//
+WebRtc_Word32 VideoFramesQueue::SetRenderDelay(WebRtc_UWord32 renderDelay)
+{
+     _renderDelayMs = renderDelay;
+     return 0;
+}
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
diff --git a/src/modules/utility/source/video_frames_queue.h b/src/modules/utility/source/video_frames_queue.h
new file mode 100644
index 0000000..6c9be1c
--- /dev/null
+++ b/src/modules/utility/source/video_frames_queue.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+#define WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
+
+#ifdef WEBRTC_MODULE_UTILITY_VIDEO
+
+#include "engine_configurations.h"
+#include "list_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc {
+class VideoFrame;
+
+class VideoFramesQueue
+{
+public:
+    VideoFramesQueue();
+    ~VideoFramesQueue();
+
+    // Put newFrame (last) in the queue.
+    WebRtc_Word32 AddFrame(const VideoFrame& newFrame);
+
+    // Return the most current frame. I.e. the frame with the highest
+    // VideoFrame::RenderTimeMs() that is lower than
+    // TickTime::MillisecondTimestamp().
+    VideoFrame* FrameToRecord();
+
+    // Set the render delay estimate to renderDelay ms.
+    WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 renderDelay);
+
+protected:
+    // Make ptrOldFrame available for re-use. I.e. put it in the empty frames
+    // queue.
+    WebRtc_Word32 ReturnFrame(VideoFrame* ptrOldFrame);
+
+private:
+    // Don't allow the buffer to expand beyond KMaxNumberOfFrames VideoFrames.
+    // 300 frames correspond to 10 seconds worth of frames at 30 fps.
+    enum {KMaxNumberOfFrames = 300};
+
+    // List of VideoFrame pointers. The list is sorted in the order of when the
+    // VideoFrame was inserted into the list. The first VideoFrame in the list
+    // was inserted first.
+    ListWrapper    _incomingFrames;
+    // A list of frames that are free to be re-used.
+    ListWrapper    _emptyFrames;
+
+    // Estimated render delay.
+    WebRtc_UWord32 _renderDelayMs;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULE_UTILITY_VIDEO
+#endif  // WEBRTC_MODULES_UTILITY_SOURCE_VIDEO_FRAMES_QUEUE_H_
diff --git a/src/modules/utility/test/testAPI.cc b/src/modules/utility/test/testAPI.cc
new file mode 100644
index 0000000..166d483
--- /dev/null
+++ b/src/modules/utility/test/testAPI.cc
@@ -0,0 +1,368 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// testAPI.cpp : Defines the entry point for the console application.
+//
+// NOTES:
+//          1. MediaFile library and testAPI.cpp must be built in DEBUG mode for testing.
+//
+
+#include <iostream>
+#include <stdio.h>
+#include <assert.h>
+
+#ifdef WIN32
+    #include <windows.h>
+    #include <tchar.h>
+#endif
+
+#include "common_types.h"
+#include "trace.h"
+
+#include "Engineconfigurations.h"
+#include "media_file.h"
+#include "file_player.h"
+#include "file_recorder.h"
+
+
+bool notify = false, playing = false, recording = false;
+
+// callback class for FileModule
+class MyFileModuleCallback : public FileCallback
+{
+public:
+    virtual void PlayNotification( const WebRtc_Word32 id,
+                                   const WebRtc_UWord32 durationMs )
+    {
+        printf("\tReceived PlayNotification from module %ld, durationMs = %ld\n",
+               id, durationMs);
+        notify = true;
+    };
+
+    virtual void RecordNotification( const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs )
+    {
+        printf("\tReceived RecordNotification from module %ld, durationMs = %ld\n",
+               id, durationMs);
+        notify = true;
+    };
+
+    virtual void PlayFileEnded(const WebRtc_Word32 id)
+    {
+        printf("\tReceived PlayFileEnded notification from module %ld.\n", id);
+        playing = false;
+    };
+
+    virtual void RecordFileEnded(const WebRtc_Word32 id)
+    {
+        printf("\tReceived RecordFileEnded notification from module %ld.\n", id);
+        recording = false;
+    }
+};
+
+// main test app
+#ifdef WIN32
+int _tmain(int argc, _TCHAR* argv[])
+#else
+int main(int /*argc*/, char** /*argv*/)
+#endif
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile("testTrace.txt");
+    Trace::SetEncryptedTraceFile("testTraceDebug.txt");
+
+    int playId = 1;
+    int recordId = 2;
+
+    printf("Welcome to test of FilePlayer and FileRecorder\n");
+
+
+    ///////////////////////////////////////////////
+    //
+    // avi test case 1
+    //
+    ///////////////////////////////////////////////
+
+
+    // todo PW we need more AVI tests Mp4
+
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(1, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(1, webrtc::kFileFormatAviFile));
+
+        const char* KFileName = "./tmpAviFileTestCase1_audioI420CIF30fps.avi";
+
+        printf("\tReading from an avi file and writing the information to another \n");
+        printf("\tin the same format (I420 CIF 30fps) \n");
+        printf("\t\t check file named %s\n", KFileName);
+
+        assert(filePlayer.StartPlayingVideoFile(
+           "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi",
+           false, false) == 0);
+
+        // init codecs
+         webrtc::VideoCodec videoCodec;
+        webrtc::VideoCodec recVideoCodec;
+        webrtc::CodecInst audioCodec;
+        assert(filePlayer.VideoCodec( videoCodec ) == 0);
+        assert(filePlayer.AudioCodec( audioCodec) == 0);
+
+        recVideoCodec = videoCodec;
+
+        assert( fileRecorder.StartRecordingVideoFile(KFileName,
+                                                     audioCodec,
+                                                     recVideoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
+
+        webrtc::VideoFrame videoFrame;
+        videoFrame.VerifyAndAllocate(videoReadSize);
+
+        int  frameCount   = 0;
+        bool audioNotDone = true;
+        bool videoNotDone =    true;
+        AudioFrame audioFrame;
+
+        while( audioNotDone || videoNotDone)
+        {
+            if(filePlayer.TimeUntilNextVideoFrame() <= 0)
+            {
+                if(filePlayer.GetVideoFromFile( videoFrame) != 0)
+                {
+                    // no more video frames
+                    break;
+                }
+                frameCount++;
+                videoNotDone = ( videoFrame.Length() > 0);
+                videoFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+                if( videoNotDone)
+                {
+                    assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+                    ::Sleep(10);
+                }
+            }
+             WebRtc_UWord32 decodedDataLengthInSamples;
+            if( 0 !=  filePlayer.Get10msAudioFromFile( audioFrame.data_, decodedDataLengthInSamples, audioCodec.plfreq))
+            {
+                audioNotDone = false;
+            } else
+            {
+                audioFrame.sample_rate_hz_ = filePlayer.Frequency();
+                audioFrame.samples_per_channel_ = (WebRtc_UWord16)decodedDataLengthInSamples;
+                fileRecorder.RecordAudioToFile(audioFrame, &TickTime::Now());
+            }
+       }
+        ::Sleep(100);
+        assert(fileRecorder.StopRecording() == 0);
+        assert( !fileRecorder.IsRecording());
+        assert(frameCount == 135);
+        printf("\tGenerated %s\n\n", KFileName);
+    }
+    ///////////////////////////////////////////////
+    //
+    // avi test case 2
+    //
+    ///////////////////////////////////////////////
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(2, webrtc::kFileFormatAviFile));
+
+        const char* KFileName = "./tmpAviFileTestCase2_audioI420CIF20fps.avi";
+
+        printf("\tWriting information to a avi file and check the written file by \n");
+        printf("\treopening it and control codec information.\n");
+        printf("\t\t check file named %s all frames should be light green.\n", KFileName);
+        // init codecs
+        webrtc::VideoCodec videoCodec;
+        webrtc::CodecInst      audioCodec;
+
+        memset(&videoCodec, 0, sizeof(videoCodec));
+
+        const char* KVideoCodecName = "I420";
+        strcpy(videoCodec.plName, KVideoCodecName);
+        videoCodec.plType    = 124;
+        videoCodec.maxFramerate = 20;
+        videoCodec.height    = 288;
+        videoCodec.width     = 352;
+
+        const char* KAudioCodecName = "PCMU";
+        strcpy(audioCodec.plname, KAudioCodecName);
+        audioCodec.pltype   = 0;
+        audioCodec.plfreq   = 8000;
+        audioCodec.pacsize  = 80;
+        audioCodec.channels = 1;
+        audioCodec.rate     = 64000;
+
+        assert( fileRecorder.StartRecordingVideoFile(
+            KFileName,
+            audioCodec,
+            videoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        const WebRtc_UWord32 KVideoWriteSize = static_cast< WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3) / 2);
+        webrtc::VideoFrame videoFrame;
+
+        // 10 ms
+        AudioFrame audioFrame;
+        audioFrame.samples_per_channel_ = audioCodec.plfreq/100;
+        memset(audioFrame.data_, 0, 2*audioFrame.samples_per_channel_);
+        audioFrame.sample_rate_hz_ = 8000;
+
+        // prepare the video frame
+        videoFrame.VerifyAndAllocate(KVideoWriteSize);
+        memset(videoFrame.Buffer(), 127, videoCodec.width * videoCodec.height);
+        memset(videoFrame.Buffer() +(videoCodec.width * videoCodec.height), 0, videoCodec.width * videoCodec.height/2);
+        videoFrame.SetLength(KVideoWriteSize);
+        videoFrame.SetHeight(videoCodec.height);
+        videoFrame.SetWidth(videoCodec.width);
+
+        // write avi file, with 20 video frames
+        const int KWriteNumFrames = 20;
+        int       writeFrameCount = 0;
+        while(writeFrameCount < KWriteNumFrames)
+        {
+            // add a video frame
+            assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+
+            // add 50 ms of audio
+            for(int i=0; i<5; i++)
+            {
+                assert( fileRecorder.RecordAudioToFile(audioFrame) == 0);
+            }// for i
+            writeFrameCount++;
+        }
+        ::Sleep(10); // enough tim eto write the queued data to the file
+        assert(writeFrameCount == 20);
+        assert(fileRecorder.StopRecording() == 0);
+        assert( ! fileRecorder.IsRecording());
+
+        assert(filePlayer.StartPlayingVideoFile(KFileName,false, false) == 0);
+        assert(filePlayer.IsPlayingFile( ));
+
+        // compare codecs read from file to the ones used when writing the file
+        webrtc::VideoCodec readVideoCodec;
+        assert(filePlayer.VideoCodec( readVideoCodec ) == 0);
+        assert(strcmp(readVideoCodec.plName, videoCodec.plName) == 0);
+        assert(readVideoCodec.width      == videoCodec.width);
+        assert(readVideoCodec.height     == videoCodec.height);
+        assert(readVideoCodec.maxFramerate  == videoCodec.maxFramerate);
+
+        webrtc::CodecInst readAudioCodec;
+        assert(filePlayer.AudioCodec( readAudioCodec) == 0);
+        assert(strcmp(readAudioCodec.plname, audioCodec.plname) == 0);
+        assert(readAudioCodec.pltype     == audioCodec.pltype);
+        assert(readAudioCodec.plfreq     == audioCodec.plfreq);
+        assert(readAudioCodec.pacsize    == audioCodec.pacsize);
+        assert(readAudioCodec.channels   == audioCodec.channels);
+        assert(readAudioCodec.rate       == audioCodec.rate);
+
+        assert(filePlayer.StopPlayingFile() == 0);
+        assert( ! filePlayer.IsPlayingFile());
+        printf("\tGenerated %s\n\n", KFileName);
+    }
+    ///////////////////////////////////////////////
+    //
+    // avi test case 3
+    //
+    ///////////////////////////////////////////////
+
+    {
+        FilePlayer& filePlayer(*FilePlayer::CreateFilePlayer(2, webrtc::kFileFormatAviFile));
+        FileRecorder& fileRecorder(*FileRecorder::CreateFileRecorder(3, webrtc::kFileFormatAviFile));
+
+        printf("\tReading from an avi file and writing the information to another \n");
+        printf("\tin a different format (H.263 CIF 30fps) \n");
+        printf("\t\t check file named tmpAviFileTestCase1_audioH263CIF30fps.avi\n");
+
+        assert(filePlayer.StartPlayingVideoFile(
+           "../../../MediaFile/main/test/files/aviTestCase1_audioI420CIF30fps.avi",
+           false,
+           false) == 0);
+
+        // init codecs
+         webrtc::VideoCodec videoCodec;
+        webrtc::VideoCodec recVideoCodec;
+        webrtc::CodecInst      audioCodec;
+        assert(filePlayer.VideoCodec( videoCodec ) == 0);
+        assert(filePlayer.AudioCodec( audioCodec) == 0);
+        recVideoCodec = videoCodec;
+
+        memcpy(recVideoCodec.plName, "H263",5);
+        recVideoCodec.startBitrate = 1000;
+        recVideoCodec.codecSpecific.H263.quality = 1;
+        recVideoCodec.plType = 34;
+        recVideoCodec.codecType = webrtc::kVideoCodecH263;
+
+        assert( fileRecorder.StartRecordingVideoFile(
+            "./tmpAviFileTestCase1_audioH263CIF30fps.avi",
+            audioCodec,
+            recVideoCodec) == 0);
+
+        assert(fileRecorder.IsRecording());
+
+        WebRtc_UWord32 videoReadSize = static_cast<WebRtc_UWord32>( (videoCodec.width * videoCodec.height * 3.0) / 2.0);
+
+        webrtc::VideoFrame videoFrame;
+        videoFrame.VerifyAndAllocate(videoReadSize);
+
+        int  videoFrameCount   = 0;
+        int  audioFrameCount   = 0;
+        bool audioNotDone = true;
+        bool videoNotDone =    true;
+        AudioFrame audioFrame;
+
+        while( audioNotDone || videoNotDone)
+        {
+            if(filePlayer.TimeUntilNextVideoFrame() <= 0)
+            {
+                if(filePlayer.GetVideoFromFile( videoFrame) != 0)
+                {
+                    break;
+                }
+                videoFrameCount++;
+                videoNotDone = ( videoFrame.Length() > 0);
+                if( videoNotDone)
+                {
+                    assert(fileRecorder.RecordVideoToFile(videoFrame) == 0);
+                }
+            }
+
+            WebRtc_UWord32 decodedDataLengthInSamples;
+            if( 0 != filePlayer.Get10msAudioFromFile( audioFrame.data_, decodedDataLengthInSamples, audioCodec.plfreq))
+            {
+                audioNotDone = false;
+
+            } else
+            {
+                ::Sleep(5);
+                audioFrame.sample_rate_hz_ = filePlayer.Frequency();
+                audioFrame.samples_per_channel_ = (WebRtc_UWord16)decodedDataLengthInSamples;
+                assert(0 == fileRecorder.RecordAudioToFile(audioFrame));
+
+                audioFrameCount++;
+            }
+        }
+        assert(videoFrameCount == 135);
+        assert(audioFrameCount == 446); // we will start & stop with a video frame
+
+        assert(fileRecorder.StopRecording() == 0);
+        assert( !fileRecorder.IsRecording());
+        printf("\tGenerated ./tmpAviFileTestCase1_audioH263CIF30fps.avi\n\n");
+    }
+
+
+    printf("\nTEST completed.\n");
+
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/src/modules/video_capture/OWNERS b/src/modules/video_capture/OWNERS
new file mode 100644
index 0000000..9034747
--- /dev/null
+++ b/src/modules/video_capture/OWNERS
@@ -0,0 +1,4 @@
+mallinath@webrtc.org
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
\ No newline at end of file
diff --git a/src/modules/video_capture/main/interface/video_capture.h b/src/modules/video_capture/main/interface/video_capture.h
new file mode 100644
index 0000000..43380ec
--- /dev/null
+++ b/src/modules/video_capture/main/interface/video_capture.h
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
+
+#include "modules/interface/module.h"
+#include "modules/video_capture/main/interface/video_capture_defines.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+WebRtc_Word32 SetCaptureAndroidVM(void* javaVM, void* javaContext);
+#endif
+
+class VideoCaptureModule: public RefCountedModule {
+ public:
+  // Interface for receiving information about available camera devices.
+  class DeviceInfo {
+   public:
+    virtual WebRtc_UWord32 NumberOfDevices() = 0;
+
+    // Returns the available capture devices.
+    // deviceNumber   - Index of capture device.
+    // deviceNameUTF8 - Friendly name of the capture device.
+    // deviceUniqueIdUTF8 - Unique name of the capture device if it exist.
+    //                      Otherwise same as deviceNameUTF8.
+    // productUniqueIdUTF8 - Unique product id if it exist.
+    //                       Null terminated otherwise.
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber,
+        char* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength,
+        char* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        char* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0) = 0;
+
+
+    // Returns the number of capabilities this device.
+    virtual WebRtc_Word32 NumberOfCapabilities(
+        const char* deviceUniqueIdUTF8) = 0;
+
+    // Gets the capabilities of the named device.
+    virtual WebRtc_Word32 GetCapability(
+        const char* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability) = 0;
+
+    // Gets clockwise angle the captured frames should be rotated in order
+    // to be displayed correctly on a normally rotated display.
+    virtual WebRtc_Word32 GetOrientation(
+        const char* deviceUniqueIdUTF8,
+        VideoCaptureRotation& orientation) = 0;
+
+    // Gets the capability that best matches the requested width, height and
+    // frame rate.
+    // Returns the deviceCapabilityNumber on success.
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const char* deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting) = 0;
+
+     // Display OS /capture device specific settings dialog
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const char* deviceUniqueIdUTF8,
+        const char* dialogTitleUTF8,
+        void* parentWindow,
+        WebRtc_UWord32 positionX,
+        WebRtc_UWord32 positionY) = 0;
+
+    virtual ~DeviceInfo() {}
+  };
+
+  class VideoCaptureEncodeInterface {
+   public:
+    virtual WebRtc_Word32 ConfigureEncoder(const VideoCodec& codec,
+                                           WebRtc_UWord32 maxPayloadSize) = 0;
+    // Inform the encoder about the new target bit rate.
+    //  - newBitRate       : New target bit rate in Kbit/s.
+    //  - frameRate        : The target frame rate.
+    virtual WebRtc_Word32 SetRates(WebRtc_Word32 newBitRate,
+                                   WebRtc_Word32 frameRate) = 0;
+    // Inform the encoder about the packet loss and the round-trip time.
+    //   - packetLoss   : Fraction lost
+    //                    (loss rate in percent = 100 * packetLoss / 255).
+    //   - rtt          : Round-trip time in milliseconds.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt) = 0;
+
+    // Encode the next frame as key frame.
+    virtual WebRtc_Word32 EncodeFrameType(const FrameType type) = 0;
+  protected:
+    virtual ~VideoCaptureEncodeInterface() {
+    }
+  };
+
+  //   Register capture data callback
+  virtual WebRtc_Word32 RegisterCaptureDataCallback(
+      VideoCaptureDataCallback& dataCallback) = 0;
+
+  //  Remove capture data callback
+  virtual WebRtc_Word32 DeRegisterCaptureDataCallback() = 0;
+
+  // Register capture callback.
+  virtual WebRtc_Word32 RegisterCaptureCallback(
+      VideoCaptureFeedBack& callBack) = 0;
+
+  //  Remove capture callback.
+  virtual WebRtc_Word32 DeRegisterCaptureCallback() = 0;
+
+  // Start capture device
+  virtual WebRtc_Word32 StartCapture(
+      const VideoCaptureCapability& capability) = 0;
+
+  virtual WebRtc_Word32 StopCapture() = 0;
+
+  // Send an image when the capture device is not running.
+  virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
+                                       WebRtc_Word32 frameRate = 1) = 0;
+
+  virtual WebRtc_Word32 StopSendImage() = 0;
+
+  // Returns the name of the device used by this module.
+  virtual const char* CurrentDeviceName() const = 0;
+
+  // Returns true if the capture device is running
+  virtual bool CaptureStarted() = 0;
+
+  // Gets the current configuration.
+  virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings) = 0;
+
+  virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS) = 0;
+
+  // Returns the current CaptureDelay. Only valid when the camera is running.
+  virtual WebRtc_Word32 CaptureDelay() = 0;
+
+  // Set the rotation of the captured frames.
+  // If the rotation is set to the same as returned by
+  // DeviceInfo::GetOrientation the captured frames are
+  // displayed correctly if rendered.
+  virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation) = 0;
+
+  // Gets a pointer to an encode interface if the capture device supports the
+  // requested type and size.  NULL otherwise.
+  virtual VideoCaptureEncodeInterface* GetEncodeInterface(
+      const VideoCodec& codec) = 0;
+
+  virtual WebRtc_Word32 EnableFrameRateCallback(const bool enable) = 0;
+  virtual WebRtc_Word32 EnableNoPictureAlarm(const bool enable) = 0;
+
+protected:
+  virtual ~VideoCaptureModule() {};
+};
+
+} // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_H_
diff --git a/src/modules/video_capture/main/interface/video_capture_defines.h b/src/modules/video_capture/main/interface/video_capture_defines.h
new file mode 100644
index 0000000..72188df
--- /dev/null
+++ b/src/modules/video_capture/main/interface/video_capture_defines.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
+
+// Includes
+#include "typedefs.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+    #define NULL    0
+#endif
+
+enum {kVideoCaptureUniqueNameLength =1024}; //Max unique capture device name lenght
+enum {kVideoCaptureDeviceNameLength =256}; //Max capture device name lenght
+enum {kVideoCaptureProductIdLength =128}; //Max product id length
+
+// Enums
+enum VideoCaptureRotation
+{
+    kCameraRotate0 = 0,
+    kCameraRotate90 = 5,
+    kCameraRotate180 = 10,
+    kCameraRotate270 = 15
+};
+
+struct VideoCaptureCapability
+{
+    WebRtc_Word32 width;
+    WebRtc_Word32 height;
+    WebRtc_Word32 maxFPS;
+    WebRtc_Word32 expectedCaptureDelay;
+    RawVideoType rawType;
+    VideoCodecType codecType;
+    bool interlaced;
+
+    VideoCaptureCapability()
+    {
+        width = 0;
+        height = 0;
+        maxFPS = 0;
+        expectedCaptureDelay = 0;
+        rawType = kVideoUnknown;
+        codecType = kVideoCodecUnknown;
+        interlaced = false;
+    }
+    ;
+    bool operator!=(const VideoCaptureCapability &other) const
+    {
+        if (width != other.width)
+            return true;
+        if (height != other.height)
+            return true;
+        if (maxFPS != other.maxFPS)
+            return true;
+        if (rawType != other.rawType)
+            return true;
+        if (codecType != other.codecType)
+            return true;
+        if (interlaced != other.interlaced)
+            return true;
+        return false;
+    }
+    bool operator==(const VideoCaptureCapability &other) const
+    {
+        return !operator!=(other);
+    }
+};
+
+enum VideoCaptureAlarm
+{
+    Raised = 0,
+    Cleared = 1
+};
+
+// VideoFrameI420 doesn't take the ownership of the buffer.
+// It's mostly used to group the parameters for external capture.
+struct VideoFrameI420
+{
+  VideoFrameI420() {
+    y_plane = NULL;
+    u_plane = NULL;
+    v_plane = NULL;
+    y_pitch = 0;
+    u_pitch = 0;
+    v_pitch = 0;
+    width = 0;
+    height = 0;
+  }
+
+  unsigned char* y_plane;
+  unsigned char* u_plane;
+  unsigned char* v_plane;
+
+  int y_pitch;
+  int u_pitch;
+  int v_pitch;
+
+  unsigned short width;
+  unsigned short height;
+};
+
+/* External Capture interface. Returned by Create
+ and implemented by the capture module.
+ */
+class VideoCaptureExternal
+{
+public:
+    virtual WebRtc_Word32 IncomingFrame(WebRtc_UWord8* videoFrame,
+                                        WebRtc_Word32 videoFrameLength,
+                                        const VideoCaptureCapability& frameInfo,
+                                        WebRtc_Word64 captureTime = 0) = 0;
+    virtual WebRtc_Word32 IncomingFrameI420(const VideoFrameI420& video_frame,
+                                            WebRtc_Word64 captureTime = 0) = 0;
+protected:
+    ~VideoCaptureExternal() {}
+};
+
+// Callback class to be implemented by module user
+class VideoCaptureDataCallback
+{
+public:
+    virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                         VideoFrame& videoFrame,
+                                         VideoCodecType codecType) = 0;
+    virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                       const WebRtc_Word32 delay) = 0;
+protected:
+    virtual ~VideoCaptureDataCallback(){}
+};
+
+class VideoCaptureFeedBack
+{
+public:
+    virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 frameRate) = 0;
+    virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                  const VideoCaptureAlarm alarm) = 0;
+protected:
+    virtual ~VideoCaptureFeedBack(){}
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_DEFINES_H_
diff --git a/src/modules/video_capture/main/interface/video_capture_factory.h b/src/modules/video_capture/main/interface/video_capture_factory.h
new file mode 100644
index 0000000..5a8f741
--- /dev/null
+++ b/src/modules/video_capture/main/interface/video_capture_factory.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This file contains interfaces used for creating the VideoCaptureModule
+// and DeviceInfo.
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
+
+#include "modules/video_capture/main/interface/video_capture.h"
+
+namespace webrtc {
+
+class VideoCaptureFactory {
+ public:
+  // Create a video capture module object
+  // id - unique identifier of this video capture module object.
+  // deviceUniqueIdUTF8 - name of the device.
+  //                      Available names can be found by using GetDeviceName
+  static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                    const char* deviceUniqueIdUTF8);
+
+  // Create a video capture module object used for external capture.
+  // id - unique identifier of this video capture module object
+  // externalCapture - [out] interface to call when a new frame is captured.
+  static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                    VideoCaptureExternal*& externalCapture);
+
+  static VideoCaptureModule::DeviceInfo* CreateDeviceInfo(
+      const WebRtc_Word32 id);
+
+#ifdef WEBRTC_ANDROID
+  static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
+#endif
+
+ private:
+  ~VideoCaptureFactory();
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_INTERFACE_VIDEO_CAPTURE_FACTORY_H_
diff --git a/src/modules/video_capture/main/source/External/device_info_external.cc b/src/modules/video_capture/main/source/External/device_info_external.cc
new file mode 100644
index 0000000..ab70bda
--- /dev/null
+++ b/src/modules/video_capture/main/source/External/device_info_external.cc
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../device_info_impl.h"
+#include "../video_capture_impl.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+class ExternalDeviceInfo : public DeviceInfoImpl {
+ public:
+  ExternalDeviceInfo(const WebRtc_Word32 id)
+      : DeviceInfoImpl(id) {
+  }
+  virtual ~ExternalDeviceInfo() {}
+  virtual WebRtc_UWord32 NumberOfDevices() { return 0; }
+  virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+      const char* /*deviceUniqueIdUTF8*/,
+      const char* /*dialogTitleUTF8*/,
+      void* /*parentWindow*/,
+      WebRtc_UWord32 /*positionX*/,
+      WebRtc_UWord32 /*positionY*/) { return -1; }
+  virtual WebRtc_Word32 GetDeviceName(
+      WebRtc_UWord32 deviceNumber,
+      char* deviceNameUTF8,
+      WebRtc_UWord32 deviceNameLength,
+      char* deviceUniqueIdUTF8,
+      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+      char* productUniqueIdUTF8=0,
+      WebRtc_UWord32 productUniqueIdUTF8Length=0) {
+    return -1;
+  }
+  virtual WebRtc_Word32 CreateCapabilityMap(
+      const char* deviceUniqueIdUTF8) { return 0; }
+  virtual WebRtc_Word32 Init() { return 0; }
+};
+
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+    const WebRtc_Word32 id) {
+  return new ExternalDeviceInfo(id);
+}
+
+}  // namespace videocapturemodule
+
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/External/video_capture_external.cc b/src/modules/video_capture/main/source/External/video_capture_external.cc
new file mode 100644
index 0000000..dcc59aa
--- /dev/null
+++ b/src/modules/video_capture/main/source/External/video_capture_external.cc
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../video_capture_impl.h"
+#include "ref_count.h"
+
+namespace webrtc {
+
+namespace videocapturemodule {
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const char* deviceUniqueIdUTF8) {
+  RefCountImpl<VideoCaptureImpl>* implementation =
+      new RefCountImpl<VideoCaptureImpl>(id);
+  return implementation;
+}
+
+}  // namespace videocapturemodule
+
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Linux/device_info_linux.cc b/src/modules/video_capture/main/source/Linux/device_info_linux.cc
new file mode 100644
index 0000000..653ee16
--- /dev/null
+++ b/src/modules/video_capture/main/source/Linux/device_info_linux.cc
@@ -0,0 +1,330 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_linux.h"
+
+#include <errno.h>
+#include <unistd.h>
+#include <sys/ioctl.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+//v4l includes
+#include <linux/videodev2.h>
+
+#include "ref_count.h"
+#include "trace.h"
+
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const WebRtc_Word32 id)
+{
+    videocapturemodule::DeviceInfoLinux *deviceInfo =
+                    new videocapturemodule::DeviceInfoLinux(id);
+    if (!deviceInfo)
+    {
+        deviceInfo = NULL;
+    }
+
+    return deviceInfo;
+}
+
+DeviceInfoLinux::DeviceInfoLinux(const WebRtc_Word32 id)
+    : DeviceInfoImpl(id)
+{
+}
+
+WebRtc_Word32 DeviceInfoLinux::Init()
+{
+    return 0;
+}
+
+DeviceInfoLinux::~DeviceInfoLinux()
+{
+}
+
+WebRtc_UWord32 DeviceInfoLinux::NumberOfDevices()
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+    WebRtc_UWord32 count = 0;
+    char device[20];
+    int fd = -1;
+
+    /* detect /dev/video [0-63]VideoCaptureModule entries */
+    for (int n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            close(fd);
+            count++;
+        }
+    }
+
+    return count;
+}
+
+WebRtc_Word32 DeviceInfoLinux::GetDeviceName(
+                                         WebRtc_UWord32 deviceNumber,
+                                         char* deviceNameUTF8,
+                                         WebRtc_UWord32 deviceNameLength,
+                                         char* deviceUniqueIdUTF8,
+                                         WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                         char* /*productUniqueIdUTF8*/,
+                                         WebRtc_UWord32 /*productUniqueIdUTF8Length*/)
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
+
+    // Travel through /dev/video [0-63]
+    WebRtc_UWord32 count = 0;
+    char device[20];
+    int fd = -1;
+    bool found = false;
+    for (int n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            if (count == deviceNumber) {
+                // Found the device
+                found = true;
+                break;
+            } else {
+                close(fd);
+                count++;
+            }
+        }
+    }
+
+    if (!found)
+        return -1;
+
+    // query device capabilities
+    struct v4l2_capability cap;
+    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in querying the device capability for device %s. errno = %d",
+                   device, errno);
+        close(fd);
+        return -1;
+    }
+
+    close(fd);
+
+    char cameraName[64];
+    memset(deviceNameUTF8, 0, deviceNameLength);
+    memcpy(cameraName, cap.card, sizeof(cap.card));
+
+    if (deviceNameLength >= strlen(cameraName))
+    {
+        memcpy(deviceNameUTF8, cameraName, strlen(cameraName));
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "buffer passed is too small");
+        return -1;
+    }
+
+    if (cap.bus_info[0] != 0) // may not available in all drivers
+    {
+        // copy device id 
+        if (deviceUniqueIdUTF8Length >= strlen((const char*) cap.bus_info))
+        {
+            memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+            memcpy(deviceUniqueIdUTF8, cap.bus_info,
+                   strlen((const char*) cap.bus_info));
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "buffer passed is too small");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoLinux::CreateCapabilityMap(
+                                        const char* deviceUniqueIdUTF8)
+{
+    int fd;
+    char device[32];
+    bool found = false;
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+                            (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "Device name too long");
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+               "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+    /* detect /dev/video [0-63] entries */
+    for (int n = 0; n < 64; ++n)
+    {
+        sprintf(device, "/dev/video%d", n);
+        fd = open(device, O_RDONLY);
+        if (fd == -1)
+          continue;
+
+        // query device capabilities
+        struct v4l2_capability cap;
+        if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+        {
+            if (cap.bus_info[0] != 0)
+            {
+                if (strncmp((const char*) cap.bus_info,
+                            (const char*) deviceUniqueIdUTF8,
+                            strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                {
+                    found = true;
+                    break; // fd matches with device unique id supplied
+                }
+            }
+            else //match for device name
+            {
+                if (IsDeviceNameMatches((const char*) cap.card,
+                                        (const char*) deviceUniqueIdUTF8))
+                {
+                    found = true;
+                    break;
+                }
+            }
+        }
+        close(fd); // close since this is not the matching device
+    }
+
+    if (!found)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+        return -1;
+    }
+
+    // now fd will point to the matching device
+    // reset old capability map
+    MapItem* item = NULL;
+    while ((item = _captureCapabilities.Last()))
+    {
+        delete static_cast<VideoCaptureCapability*> (item->GetItem());
+        _captureCapabilities.Erase(item);
+    }
+
+    int size = FillCapabilityMap(fd);
+    close(fd);
+
+    // Store the new used device name
+    _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+    _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+                                                   _lastUsedDeviceNameLength + 1);
+    memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength + 1);
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
+               _captureCapabilities.Size());
+
+    return size;
+}
+
+bool DeviceInfoLinux::IsDeviceNameMatches(const char* name,
+                                          const char* deviceUniqueIdUTF8)
+{
+    if (strncmp(deviceUniqueIdUTF8, name, strlen(name)) == 0)
+            return true;
+    return false;
+}
+
+WebRtc_Word32 DeviceInfoLinux::FillCapabilityMap(int fd)
+{
+
+    // set image format
+    struct v4l2_format video_fmt;
+    memset(&video_fmt, 0, sizeof(struct v4l2_format));
+
+    video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    video_fmt.fmt.pix.sizeimage = 0;
+
+    int totalFmts = 3;
+    unsigned int videoFormats[] = {
+        V4L2_PIX_FMT_MJPEG,
+        V4L2_PIX_FMT_YUV420,
+        V4L2_PIX_FMT_YUYV };
+
+    int sizes = 13;
+    unsigned int size[][2] = { { 128, 96 }, { 160, 120 }, { 176, 144 },
+                               { 320, 240 }, { 352, 288 }, { 640, 480 },
+                               { 704, 576 }, { 800, 600 }, { 960, 720 },
+                               { 1280, 720 }, { 1024, 768 }, { 1440, 1080 },
+                               { 1920, 1080 } };
+
+    int index = 0;
+    for (int fmts = 0; fmts < totalFmts; fmts++)
+    {
+        for (int i = 0; i < sizes; i++)
+        {
+            video_fmt.fmt.pix.pixelformat = videoFormats[fmts];
+            video_fmt.fmt.pix.width = size[i][0];
+            video_fmt.fmt.pix.height = size[i][1];
+
+            if (ioctl(fd, VIDIOC_TRY_FMT, &video_fmt) >= 0)
+            {
+                if ((video_fmt.fmt.pix.width == size[i][0])
+                    && (video_fmt.fmt.pix.height == size[i][1]))
+                {
+                    VideoCaptureCapability *cap = new VideoCaptureCapability();
+                    cap->width = video_fmt.fmt.pix.width;
+                    cap->height = video_fmt.fmt.pix.height;
+                    cap->expectedCaptureDelay = 120;
+                    if (videoFormats[fmts] == V4L2_PIX_FMT_YUYV)
+                    {
+                        cap->rawType = kVideoYUY2;
+                    }
+                    else if (videoFormats[fmts] == V4L2_PIX_FMT_MJPEG)
+                    {
+                        cap->rawType = kVideoMJPEG;
+                    }
+
+                    // get fps of current camera mode
+                    // V4l2 does not have a stable method of knowing so we just guess.
+                    if(cap->width >= 800 && cap->rawType != kVideoMJPEG)
+                    {
+                        cap->maxFPS = 15;
+                    }
+                    else
+                    {
+                        cap->maxFPS = 30;
+                    }
+
+                    _captureCapabilities.Insert(index, cap);
+                    index++;
+                    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                               "Camera capability, width:%d height:%d type:%d fps:%d",
+                               cap->width, cap->height, cap->rawType, cap->maxFPS);
+                }
+            }
+        }
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "CreateCapabilityMap %d",
+               _captureCapabilities.Size());
+    return _captureCapabilities.Size();
+}
+
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Linux/device_info_linux.h b/src/modules/video_capture/main/source/Linux/device_info_linux.h
new file mode 100644
index 0000000..2826fe8
--- /dev/null
+++ b/src/modules/video_capture/main/source/Linux/device_info_linux.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
+
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoLinux: public DeviceInfoImpl
+{
+public:
+    DeviceInfoLinux(const WebRtc_Word32 id);
+    virtual ~DeviceInfoLinux();
+    virtual WebRtc_UWord32 NumberOfDevices();
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber,
+        char* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength,
+        char* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        char* productUniqueIdUTF8=0,
+        WebRtc_UWord32 productUniqueIdUTF8Length=0);
+    /*
+    * Fills the membervariable _captureCapabilities with capabilites for the given device name.
+    */
+    virtual WebRtc_Word32 CreateCapabilityMap (const char* deviceUniqueIdUTF8);
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const char* /*deviceUniqueIdUTF8*/,
+        const char* /*dialogTitleUTF8*/,
+        void* /*parentWindow*/,
+        WebRtc_UWord32 /*positionX*/,
+        WebRtc_UWord32 /*positionY*/) { return -1;}
+    WebRtc_Word32 FillCapabilityMap(int fd);
+    WebRtc_Word32 Init();
+private:
+
+    bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_DEVICE_INFO_LINUX_H_
diff --git a/src/modules/video_capture/main/source/Linux/video_capture_linux.cc b/src/modules/video_capture/main/source/Linux/video_capture_linux.cc
new file mode 100644
index 0000000..c395fa7
--- /dev/null
+++ b/src/modules/video_capture/main/source/Linux/video_capture_linux.cc
@@ -0,0 +1,482 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <linux/videodev2.h>
+#include <errno.h>
+#include <stdio.h>
+#include <sys/mman.h>
+#include <string.h>
+
+#include <iostream>
+#include <new>
+
+#include "ref_count.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "video_capture_linux.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(const WebRtc_Word32 id,
+                                             const char* deviceUniqueId)
+{
+    RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>* implementation =
+        new RefCountImpl<videocapturemodule::VideoCaptureModuleV4L2>(id);
+
+    if (!implementation || implementation->Init(deviceUniqueId) != 0)
+    {
+        delete implementation;
+        implementation = NULL;
+    }
+
+    return implementation;
+}
+
+VideoCaptureModuleV4L2::VideoCaptureModuleV4L2(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), 
+      _captureThread(NULL),
+      _captureCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _deviceId(-1), 
+      _deviceFd(-1),
+      _buffersAllocatedByDevice(-1),
+      _currentWidth(-1), 
+      _currentHeight(-1),
+      _currentFrameRate(-1), 
+      _captureStarted(false),
+      _captureVideoType(kVideoI420), 
+      _pool(NULL)
+{
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::Init(const char* deviceUniqueIdUTF8)
+{
+    int len = strlen((const char*) deviceUniqueIdUTF8);
+    _deviceUniqueId = new (std::nothrow) char[len + 1];
+    if (_deviceUniqueId)
+    {
+        memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
+    }
+
+    int fd;
+    char device[32];
+    bool found = false;
+
+    /* detect /dev/video [0-63] entries */
+    int n;
+    for (n = 0; n < 64; n++)
+    {
+        sprintf(device, "/dev/video%d", n);
+        if ((fd = open(device, O_RDONLY)) != -1)
+        {
+            // query device capabilities
+            struct v4l2_capability cap;
+            if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
+            {
+                if (cap.bus_info[0] != 0)
+                {
+                    if (strncmp((const char*) cap.bus_info,
+                                (const char*) deviceUniqueIdUTF8,
+                                strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                    {
+                        close(fd);
+                        found = true;
+                        break; // fd matches with device unique id supplied
+                    }
+                }
+            }
+            close(fd); // close since this is not the matching device
+        }
+    }
+    if (!found)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
+        return -1;
+    }
+    _deviceId = n; //store the device id
+    return 0;
+}
+
+VideoCaptureModuleV4L2::~VideoCaptureModuleV4L2()
+{
+    StopCapture();
+    if (_captureCritSect)
+    {
+        delete _captureCritSect;
+    }
+    if (_deviceFd != -1)
+      close(_deviceFd);
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::StartCapture(
+    const VideoCaptureCapability& capability)
+{
+    if (_captureStarted)
+    {
+        if (capability.width == _currentWidth &&
+            capability.height == _currentHeight &&
+            _captureVideoType == capability.rawType)
+        {
+            return 0;
+        }
+        else
+        {
+            StopCapture();
+        }
+    }
+
+    CriticalSectionScoped cs(_captureCritSect);
+    //first open /dev/video device
+    char device[20];
+    sprintf(device, "/dev/video%d", (int) _deviceId);
+
+    if ((_deviceFd = open(device, O_RDWR | O_NONBLOCK, 0)) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in opening %s errono = %d", device, errno);
+        return -1;
+    }
+
+    // Supported video formats in preferred order.
+    // If the requested resolution is larger than VGA, we prefer MJPEG. Go for
+    // I420 otherwise.
+    const int nFormats = 3;
+    unsigned int fmts[nFormats];
+    if (capability.width > 640 || capability.height > 480) {
+        fmts[0] = V4L2_PIX_FMT_MJPEG;
+        fmts[1] = V4L2_PIX_FMT_YUV420;
+        fmts[2] = V4L2_PIX_FMT_YUYV;
+    } else {
+        fmts[0] = V4L2_PIX_FMT_YUV420;
+        fmts[1] = V4L2_PIX_FMT_YUYV;
+        fmts[2] = V4L2_PIX_FMT_MJPEG;
+    }
+
+    struct v4l2_format video_fmt;
+    memset(&video_fmt, 0, sizeof(struct v4l2_format));
+    video_fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    video_fmt.fmt.pix.sizeimage = 0;
+    video_fmt.fmt.pix.width = capability.width;
+    video_fmt.fmt.pix.height = capability.height;
+
+    bool formatMatch = false;
+    for (int i = 0; i < nFormats; i++)
+    {
+        video_fmt.fmt.pix.pixelformat = fmts[i];
+        if (ioctl(_deviceFd, VIDIOC_TRY_FMT, &video_fmt) < 0)
+        {
+            continue;
+        }
+        else
+        {
+            formatMatch = true;
+            break;
+        }
+    }
+    if (!formatMatch)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "no supporting video formats found");
+        return -1;
+    }
+
+    if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUYV)
+        _captureVideoType = kVideoYUY2;
+    else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_YUV420)
+        _captureVideoType = kVideoI420;
+    else if (video_fmt.fmt.pix.pixelformat == V4L2_PIX_FMT_MJPEG)
+        _captureVideoType = kVideoMJPEG;
+
+    //set format and frame size now
+    if (ioctl(_deviceFd, VIDIOC_S_FMT, &video_fmt) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in VIDIOC_S_FMT, errno = %d", errno);
+        return -1;
+    }
+
+    // initialize current width and height
+    _currentWidth = video_fmt.fmt.pix.width;
+    _currentHeight = video_fmt.fmt.pix.height;
+    _captureDelay = 120;
+
+    // Trying to set frame rate, before check driver capability.
+    bool driver_framerate_support = true;
+    struct v4l2_streamparm streamparms;
+    memset(&streamparms, 0, sizeof(streamparms));
+    streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(_deviceFd, VIDIOC_G_PARM, &streamparms) < 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "error in VIDIOC_G_PARM errno = %d", errno);
+        driver_framerate_support = false;
+      // continue
+    } else {
+      // check the capability flag is set to V4L2_CAP_TIMEPERFRAME.
+      if (streamparms.parm.capture.capability == V4L2_CAP_TIMEPERFRAME) {
+        // driver supports the feature. Set required framerate.
+        memset(&streamparms, 0, sizeof(streamparms));
+        streamparms.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        streamparms.parm.capture.timeperframe.numerator = 1;
+        streamparms.parm.capture.timeperframe.denominator = capability.maxFPS;
+        if (ioctl(_deviceFd, VIDIOC_S_PARM, &streamparms) < 0) {
+          WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "Failed to set the framerate. errno=%d", errno);
+          driver_framerate_support = false;
+        } else {
+          _currentFrameRate = capability.maxFPS;
+        }
+      }
+    }
+    // If driver doesn't support framerate control, need to hardcode.
+    // Hardcoding the value based on the frame size.
+    if (!driver_framerate_support) {
+      if(_currentWidth >= 800 && _captureVideoType != kVideoMJPEG) {
+        _currentFrameRate = 15;
+      } else {
+        _currentFrameRate = 30;
+      }
+    }
+
+    if (!AllocateVideoBuffers())
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "failed to allocate video capture buffers");
+        return -1;
+    }
+
+    //start capture thread;
+    if (!_captureThread)
+    {
+        _captureThread = ThreadWrapper::CreateThread(
+            VideoCaptureModuleV4L2::CaptureThread, this, kHighPriority);
+        unsigned int id;
+        _captureThread->Start(id);
+    }
+
+    // Needed to start UVC camera - from the uvcview application
+    enum v4l2_buf_type type;
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(_deviceFd, VIDIOC_STREAMON, &type) == -1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to turn on stream");
+        return -1;
+    }
+
+    _captureStarted = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::StopCapture()
+{
+    if (_captureThread) {
+        // Make sure the capture thread stop stop using the critsect.
+        _captureThread->SetNotAlive();
+        if (_captureThread->Stop()) {
+            delete _captureThread;
+            _captureThread = NULL;
+        } else
+        {
+            // Couldn't stop the thread, leak instead of crash.
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: could not stop capture thread", __FUNCTION__);
+            assert(!"could not stop capture thread");
+        }
+    }
+
+    CriticalSectionScoped cs(_captureCritSect);
+    if (_captureStarted)
+    {
+        _captureStarted = false;
+        _captureThread = NULL;
+
+        DeAllocateVideoBuffers();
+        close(_deviceFd);
+        _deviceFd = -1;
+    }
+
+    return 0;
+}
+
+//critical section protected by the caller
+
+bool VideoCaptureModuleV4L2::AllocateVideoBuffers()
+{
+    struct v4l2_requestbuffers rbuffer;
+    memset(&rbuffer, 0, sizeof(v4l2_requestbuffers));
+
+    rbuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    rbuffer.memory = V4L2_MEMORY_MMAP;
+    rbuffer.count = kNoOfV4L2Bufffers;
+
+    if (ioctl(_deviceFd, VIDIOC_REQBUFS, &rbuffer) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "Could not get buffers from device. errno = %d", errno);
+        return false;
+    }
+
+    if (rbuffer.count > kNoOfV4L2Bufffers)
+        rbuffer.count = kNoOfV4L2Bufffers;
+
+    _buffersAllocatedByDevice = rbuffer.count;
+
+    //Map the buffers
+    _pool = new Buffer[rbuffer.count];
+
+    for (unsigned int i = 0; i < rbuffer.count; i++)
+    {
+        struct v4l2_buffer buffer;
+        memset(&buffer, 0, sizeof(v4l2_buffer));
+        buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buffer.memory = V4L2_MEMORY_MMAP;
+        buffer.index = i;
+
+        if (ioctl(_deviceFd, VIDIOC_QUERYBUF, &buffer) < 0)
+        {
+            return false;
+        }
+
+        _pool[i].start = mmap(NULL, buffer.length, PROT_READ | PROT_WRITE, MAP_SHARED,
+                              _deviceFd, buffer.m.offset);
+
+        if (MAP_FAILED == _pool[i].start)
+        {
+            for (unsigned int j = 0; j < i; j++)
+                munmap(_pool[j].start, _pool[j].length);
+            return false;
+        }
+
+        _pool[i].length = buffer.length;
+
+        if (ioctl(_deviceFd, VIDIOC_QBUF, &buffer) < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool VideoCaptureModuleV4L2::DeAllocateVideoBuffers()
+{
+    // unmap buffers
+    for (int i = 0; i < _buffersAllocatedByDevice; i++)
+        munmap(_pool[i].start, _pool[i].length);
+
+    delete[] _pool;
+
+    // turn off stream
+    enum v4l2_buf_type type;
+    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+    if (ioctl(_deviceFd, VIDIOC_STREAMOFF, &type) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "VIDIOC_STREAMOFF error. errno: %d", errno);
+    }
+
+    return true;
+}
+
+bool VideoCaptureModuleV4L2::CaptureStarted()
+{
+    return _captureStarted;
+}
+
+bool VideoCaptureModuleV4L2::CaptureThread(void* obj)
+{
+    return static_cast<VideoCaptureModuleV4L2*> (obj)->CaptureProcess();
+}
+bool VideoCaptureModuleV4L2::CaptureProcess()
+{
+    int retVal = 0;
+    fd_set rSet;
+    struct timeval timeout;
+
+    _captureCritSect->Enter();
+
+    FD_ZERO(&rSet);
+    FD_SET(_deviceFd, &rSet);
+    timeout.tv_sec = 1;
+    timeout.tv_usec = 0;
+
+    retVal = select(_deviceFd + 1, &rSet, NULL, NULL, &timeout);
+    if (retVal < 0 && errno != EINTR) // continue if interrupted
+    {
+        // select failed
+        _captureCritSect->Leave();
+        return false;
+    }
+    else if (retVal == 0)
+    {
+        // select timed out
+        _captureCritSect->Leave();
+        return true;
+    }
+    else if (!FD_ISSET(_deviceFd, &rSet))
+    {
+        // not event on camera handle
+        _captureCritSect->Leave();
+        return true;
+    }
+
+    if (_captureStarted)
+    {
+        struct v4l2_buffer buf;
+        memset(&buf, 0, sizeof(struct v4l2_buffer));
+        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+        buf.memory = V4L2_MEMORY_MMAP;
+        // dequeue a buffer - repeat until dequeued properly!
+        while (ioctl(_deviceFd, VIDIOC_DQBUF, &buf) < 0)
+        {
+            if (errno != EINTR)
+            {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                           "could not sync on a buffer on device %s", strerror(errno));
+                _captureCritSect->Leave();
+                return true;
+            }
+        }
+        VideoCaptureCapability frameInfo;
+        frameInfo.width = _currentWidth;
+        frameInfo.height = _currentHeight;
+        frameInfo.rawType = _captureVideoType;
+
+        // convert to to I420 if needed
+        IncomingFrame((unsigned char*) _pool[buf.index].start,
+                      buf.bytesused, frameInfo);
+        // enqueue the buffer again
+        if (ioctl(_deviceFd, VIDIOC_QBUF, &buf) == -1)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                       "Failed to enqueue capture buffer");
+        }
+    }
+    _captureCritSect->Leave();
+    usleep(0);
+    return true;
+}
+
+WebRtc_Word32 VideoCaptureModuleV4L2::CaptureSettings(VideoCaptureCapability& settings)
+{
+    settings.width = _currentWidth;
+    settings.height = _currentHeight;
+    settings.maxFPS = _currentFrameRate;
+    settings.rawType=_captureVideoType;
+
+    return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Linux/video_capture_linux.h b/src/modules/video_capture/main/source/Linux/video_capture_linux.h
new file mode 100644
index 0000000..e5f6ae7
--- /dev/null
+++ b/src/modules/video_capture/main/source/Linux/video_capture_linux.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
+
+#include "common_types.h"
+#include "../video_capture_impl.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class ThreadWrapper;
+namespace videocapturemodule
+{
+class VideoCaptureModuleV4L2: public VideoCaptureImpl
+{
+public:
+    VideoCaptureModuleV4L2(WebRtc_Word32 id);
+    virtual ~VideoCaptureModuleV4L2();
+    virtual WebRtc_Word32 Init(const char* deviceUniqueId);
+    virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+    virtual bool CaptureStarted();
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+private:
+    enum {kNoOfV4L2Bufffers=4};
+
+    static bool CaptureThread(void*);
+    bool CaptureProcess();
+    bool AllocateVideoBuffers();
+    bool DeAllocateVideoBuffers();
+
+    ThreadWrapper* _captureThread;
+    CriticalSectionWrapper* _captureCritSect;
+
+    WebRtc_Word32 _deviceId;
+    WebRtc_Word32 _deviceFd;
+
+    WebRtc_Word32 _buffersAllocatedByDevice;
+    WebRtc_Word32 _currentWidth;
+    WebRtc_Word32 _currentHeight;
+    WebRtc_Word32 _currentFrameRate;
+    bool _captureStarted;
+    RawVideoType _captureVideoType;
+    struct Buffer
+    {
+        void *start;
+        size_t length;
+    };
+    Buffer *_pool;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_LINUX_VIDEO_CAPTURE_LINUX_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h
new file mode 100644
index 0000000..7b5710c
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
+
+#import <QTKit/QTKit.h>
+
+#include <stdio.h>
+
+#include "../../video_capture_impl.h"
+#include "video_capture_qtkit_utility.h"
+#include "../../device_info_impl.h"
+
+
+// Forward declaraion
+@class VideoCaptureMacQTKitObjC;
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKit : public VideoCaptureImpl
+{
+public:
+    VideoCaptureMacQTKit(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQTKit();
+
+    /*
+    *   Create a video capture module object
+    *
+    *   id - unique identifier of this video capture module object
+    *   deviceUniqueIdUTF8 -  name of the device. Available names can be found
+    *       by using GetDeviceName
+    *   deviceUniqueIdUTF8Length - length of deviceUniqueIdUTF8
+    */
+    static void Destroy(VideoCaptureModule* module);
+
+    WebRtc_Word32 Init(const WebRtc_Word32 id,
+                       const char* deviceUniqueIdUTF8);
+
+
+    // Start/Stop
+    virtual WebRtc_Word32 StartCapture(
+        const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+
+    // Properties of the set device
+
+    virtual bool CaptureStarted();
+
+    WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+    // Help functions
+    WebRtc_Word32 SetCameraOutput();
+
+private:
+    VideoCaptureMacQTKitObjC*        _captureDevice;
+    VideoCaptureMacQTKitInfoObjC*    _captureInfo;
+    bool                    _isCapturing;
+    WebRtc_Word32            _id;
+    WebRtc_Word32            _captureWidth;
+    WebRtc_Word32            _captureHeight;
+    WebRtc_Word32            _captureFrameRate;
+    char                     _currentDeviceNameUTF8[MAX_NAME_LENGTH];
+    char                     _currentDeviceUniqueIdUTF8[MAX_NAME_LENGTH];
+    char                     _currentDeviceProductUniqueIDUTF8[MAX_NAME_LENGTH];
+    WebRtc_Word32            _frameCount;
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm
new file mode 100644
index 0000000..93494d8
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit.mm
@@ -0,0 +1,224 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_qtkit.h"
+#import "video_capture_qtkit_objc.h"
+#import "video_capture_qtkit_info_objc.h"
+#include "trace.h"
+#include "critical_section_wrapper.h"
+#include "../../video_capture_config.h"
+
+namespace webrtc
+{
+
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKit::VideoCaptureMacQTKit(const WebRtc_Word32 id) :
+    VideoCaptureImpl(id),
+    _captureDevice(NULL),
+    _captureInfo(NULL),
+    _isCapturing(false),
+    _id(id),
+    _captureWidth(QTKIT_DEFAULT_WIDTH),
+    _captureHeight(QTKIT_DEFAULT_HEIGHT),
+    _captureFrameRate(QTKIT_DEFAULT_FRAME_RATE),
+    _frameCount(0)
+{
+
+    memset(_currentDeviceNameUTF8, 0, MAX_NAME_LENGTH);
+    memset(_currentDeviceUniqueIdUTF8, 0, MAX_NAME_LENGTH);
+    memset(_currentDeviceProductUniqueIDUTF8, 0, MAX_NAME_LENGTH);
+}
+
+VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
+{
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "~VideoCaptureMacQTKit() called");
+    if(_captureDevice)
+    {
+        [_captureDevice stopCapture];
+        [_captureDevice release];
+    }
+
+    if(_captureInfo)
+    {
+        [_captureInfo release];
+    }
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::Init(
+    const WebRtc_Word32 id, const char* iDeviceUniqueIdUTF8)
+{
+    CriticalSectionScoped cs(&_apiCs);
+
+
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*)iDeviceUniqueIdUTF8);
+    if(nameLength>kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new char[nameLength+1];
+    memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
+
+    _captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
+    if(NULL == _captureDevice)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "Failed to create an instance of "
+                     "VideoCaptureMacQTKitObjC");
+        return -1;
+    }
+
+    if(-1 == [[_captureDevice registerOwner:this]intValue])
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "Failed to register owner for _captureDevice");
+        return -1;
+    }
+
+    if(0 == strcmp((char*)iDeviceUniqueIdUTF8, ""))
+    {
+        // the user doesn't want to set a capture device at this time
+        return 0;
+    }
+
+    _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc]init];
+    if(nil == _captureInfo)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id, "Failed to create an instance of VideoCaptureMacQTKitInfoObjC");
+        return -1;
+    }
+
+    int captureDeviceCount = [[_captureInfo getCaptureDeviceCount]intValue];
+    if(captureDeviceCount < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "No Capture Devices Present");
+        return -1;
+    }
+
+    const int NAME_LENGTH = 1024;
+    char deviceNameUTF8[1024] = "";
+    char deviceUniqueIdUTF8[1024] = "";
+    char deviceProductUniqueIDUTF8[1024] = "";
+
+    bool captureDeviceFound = false;
+    for(int index = 0; index < captureDeviceCount; index++){
+
+        memset(deviceNameUTF8, 0, NAME_LENGTH);
+        memset(deviceUniqueIdUTF8, 0, NAME_LENGTH);
+        memset(deviceProductUniqueIDUTF8, 0, NAME_LENGTH);
+        if(-1 == [[_captureInfo getDeviceNamesFromIndex:index
+                   DefaultName:deviceNameUTF8 WithLength:NAME_LENGTH
+                   AndUniqueID:deviceUniqueIdUTF8 WithLength:NAME_LENGTH
+                   AndProductID:deviceProductUniqueIDUTF8
+                   WithLength:NAME_LENGTH]intValue])
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "GetDeviceName returned -1 for index %d", index);
+            return -1;
+        }
+        if(0 == strcmp((const char*)iDeviceUniqueIdUTF8,
+                       (char*)deviceUniqueIdUTF8))
+        {
+            // we have a match
+            captureDeviceFound = true;
+            break;
+        }
+    }
+
+    if(false == captureDeviceFound)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "Failed to find capture device unique ID %s",
+                     iDeviceUniqueIdUTF8);
+        return -1;
+    }
+
+    // at this point we know that the user has passed in a valid camera. Let's
+    // set it as the current.
+    if(-1 == [[_captureDevice
+               setCaptureDeviceById:(char*)deviceUniqueIdUTF8]intValue])
+    {
+        strcpy((char*)_deviceUniqueId, (char*)deviceUniqueIdUTF8);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to set capture device %s (unique ID %s) even "
+                     "though it was a valid return from "
+                     "VideoCaptureMacQTKitInfo");
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "successfully Init VideoCaptureMacQTKit" );
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::StartCapture(
+    const VideoCaptureCapability& capability)
+{
+
+    _captureWidth = capability.width;
+    _captureHeight = capability.height;
+    _captureFrameRate = capability.maxFPS;
+    _captureDelay = 120;
+
+    if(-1 == [[_captureDevice setCaptureHeight:_captureHeight
+               AndWidth:_captureWidth AndFrameRate:_captureFrameRate]intValue])
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "Could not set width=%d height=%d frameRate=%d",
+                     _captureWidth, _captureHeight, _captureFrameRate);
+        return -1;
+    }
+
+    if(-1 == [[_captureDevice startCapture]intValue])
+    {
+        return -1;
+    }
+    _isCapturing = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::StopCapture()
+{
+    [_captureDevice stopCapture];
+
+    _isCapturing = false;
+    return 0;
+}
+
+bool VideoCaptureMacQTKit::CaptureStarted()
+{
+    return _isCapturing;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKit::CaptureSettings(VideoCaptureCapability& settings)
+{
+    settings.width = _captureWidth;
+    settings.height = _captureHeight;
+    settings.maxFPS = _captureFrameRate;
+    return 0;
+}
+
+
+// ********** begin functions inherited from DeviceInfoImpl **********
+
+struct VideoCaptureCapabilityMacQTKit:public VideoCaptureCapability
+{
+    VideoCaptureCapabilityMacQTKit()
+    {
+    }
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h
new file mode 100644
index 0000000..39847ea
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
+
+#include "../../video_capture_impl.h"
+#include "../../device_info_impl.h"
+#include "video_capture_qtkit_utility.h"
+
+#include "map_wrapper.h"
+
+
+@class VideoCaptureMacQTKitInfoObjC;
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+class VideoCaptureMacQTKitInfo: public DeviceInfoImpl
+{
+public:
+
+   VideoCaptureMacQTKitInfo(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQTKitInfo();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     * deviceNumber   -[in] index of capture device
+     * deviceNameUTF8 - friendly name of the capture device
+     * deviceUniqueIdUTF8 - unique name of the capture device if it exist.
+     *      Otherwise same as deviceNameUTF8
+     * productUniqueIdUTF8 - unique product id if it exist. Null terminated
+     *      otherwise.
+     */
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber, char* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength, char* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        char* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+    /*
+     *   Returns the number of capabilities for this device
+     */
+    virtual WebRtc_Word32 NumberOfCapabilities(
+        const char* deviceUniqueIdUTF8);
+
+    /*
+     *   Gets the capabilities of the named device
+     */
+    virtual WebRtc_Word32 GetCapability(
+        const char* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability);
+
+    /*
+     *  Gets the capability that best matches the requested width, height and frame rate.
+     *  Returns the deviceCapabilityNumber on success.
+     */
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const char* deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting);
+
+    /*
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const char* deviceUniqueIdUTF8,
+        const char* dialogTitleUTF8, void* parentWindow,
+        WebRtc_UWord32 positionX, WebRtc_UWord32 positionY);
+
+protected:
+    virtual WebRtc_Word32 CreateCapabilityMap(
+        const char* deviceUniqueIdUTF8);
+
+    VideoCaptureMacQTKitInfoObjC*    _captureInfo;
+};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm
new file mode 100644
index 0000000..df61f08
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info.mm
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "../../video_capture_config.h"
+#import "video_capture_qtkit_info_objc.h"
+
+#include "video_capture.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const WebRtc_Word32 id) :
+    DeviceInfoImpl(id)
+{
+    _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
+}
+
+VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
+{
+    [_captureInfo release];
+
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::Init()
+{
+
+    return 0;
+}
+
+WebRtc_UWord32 VideoCaptureMacQTKitInfo::NumberOfDevices()
+{
+
+    WebRtc_UWord32 captureDeviceCount =
+        [[_captureInfo getCaptureDeviceCount]intValue];
+    return captureDeviceCount;
+
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetDeviceName(
+    WebRtc_UWord32 deviceNumber, char* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameLength, char* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
+                   DefaultName:deviceNameUTF8 WithLength:deviceNameLength
+                   AndUniqueID:deviceUniqueIdUTF8
+                   WithLength:deviceUniqueIdUTF8Length
+                   AndProductID:productUniqueIdUTF8
+                   WithLength:productUniqueIdUTF8Length]intValue];
+    return errNum;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::NumberOfCapabilities(
+    const char* deviceUniqueIdUTF8)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetCapability(
+    const char* deviceUniqueIdUTF8,
+    const WebRtc_UWord32 deviceCapabilityNumber,
+    VideoCaptureCapability& capability)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::GetBestMatchedCapability(
+    const char*deviceUniqueIdUTF8,
+    const VideoCaptureCapability& requested, VideoCaptureCapability& resulting)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox(
+    const char* deviceUniqueIdUTF8,
+    const char* dialogTitleUTF8, void* parentWindow,
+    WebRtc_UWord32 positionX, WebRtc_UWord32 positionY)
+{
+
+    return [[_captureInfo
+             displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
+             AndTitle:dialogTitleUTF8
+             AndParentWindow:parentWindow AtX:positionX AndY:positionY]
+             intValue];
+}
+
+WebRtc_Word32 VideoCaptureMacQTKitInfo::CreateCapabilityMap(
+    const char* deviceUniqueIdUTF8)
+{
+    // Not implemented. Mac doesn't use discrete steps in capabilities, rather
+    // "analog". QTKit will do it's best to convert frames to what ever format
+    // you ask for.
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+     return -1;
+}
+}  // namespace videocapturemodule
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h
new file mode 100644
index 0000000..a77a6da
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_qtkit_info_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
+
+#import <QTKit/QTKit.h>
+#import <Foundation/Foundation.h>
+#include "video_capture_qtkit_utility.h"
+#include "video_capture_qtkit_info.h"
+
+@interface VideoCaptureMacQTKitInfoObjC : NSObject{
+    bool                                _OSSupportedInfo;
+    NSArray*                            _captureDevicesInfo;
+    NSAutoreleasePool*                    _poolInfo;
+    int                                    _captureDeviceCountInfo;
+
+}
+
+/**************************************************************************
+ *
+ *   The following functions are considered to be private
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDevices;
+- (NSNumber*)initializeVariables;
+- (void)checkOSSupported;
+
+
+/**************************************************************************
+ *
+ *   The following functions are considered to be public and called by VideoCaptureMacQTKitInfo class
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDeviceCount;
+
+- (NSNumber*)getDeviceNamesFromIndex:(WebRtc_UWord32)index
+    DefaultName:(char*)deviceName
+    WithLength:(WebRtc_UWord32)deviceNameLength
+    AndUniqueID:(char*)deviceUniqueID
+    WithLength:(WebRtc_UWord32)deviceUniqueIDLength
+    AndProductID:(char*)deviceProductID
+    WithLength:(WebRtc_UWord32)deviceProductIDLength;
+
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:
+        (const char*)deviceUniqueIdUTF8
+    AndTitle:(const char*)dialogTitleUTF8
+    AndParentWindow:(void*) parentWindow AtX:(WebRtc_UWord32)positionX
+    AndY:(WebRtc_UWord32) positionY;
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_INFO_OBJC_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm
new file mode 100644
index 0000000..f40131c
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_info_objc.mm
@@ -0,0 +1,188 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma mark **** imports/includes
+
+#import "video_capture_qtkit_info_objc.h"
+
+#include "trace.h"
+
+using namespace webrtc;
+
+#pragma mark **** hidden class interface
+
+@implementation VideoCaptureMacQTKitInfoObjC
+
+// ****************** over-written OS methods ***********************
+#pragma mark **** over-written OS methods
+
+/// ***** Objective-C. Similar to C++ constructor, although invoked manually
+/// ***** Potentially returns an instance of self
+-(id)init{
+    self = [super init];
+    if(nil != self){
+        [self checkOSSupported];
+        [self initializeVariables];
+    }
+    else
+    {
+        return nil;
+    }
+    return self;
+}
+
+/// ***** Objective-C. Similar to C++ destructor
+/// ***** Returns nothing
+- (void)dealloc {
+    [super dealloc];
+}
+
+// ****************** public methods ******************
+#pragma mark **** public method implementations
+
+/// ***** Creates a message box with Cocoa framework
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)displayCaptureSettingsDialogBoxWithDevice:(const char*)deviceUniqueIdUTF8
+                    AndTitle:(const char*)dialogTitleUTF8
+                    AndParentWindow:(void*) parentWindow
+                    AtX:(WebRtc_UWord32)positionX
+                    AndY:(WebRtc_UWord32) positionY
+{
+    NSString* strTitle = [NSString stringWithFormat:@"%s", dialogTitleUTF8];
+    NSString* strButton = @"Alright";
+    NSString* strMessage = [NSString stringWithFormat:@"Device %s is capturing", deviceUniqueIdUTF8];
+    NSAlert* alert = [NSAlert alertWithMessageText:strTitle
+                      defaultButton:strButton
+                      alternateButton:nil otherButton:nil
+                      informativeTextWithFormat:strMessage];
+    [alert setAlertStyle:NSInformationalAlertStyle];
+    [alert runModal];
+    return [NSNumber numberWithInt:0];
+}
+
+- (NSNumber*)getCaptureDeviceCount{
+    [self getCaptureDevices];
+    return [NSNumber numberWithInt:_captureDeviceCountInfo];
+}
+
+
+- (NSNumber*)getDeviceNamesFromIndex:(WebRtc_UWord32)index
+    DefaultName:(char*)deviceName
+    WithLength:(WebRtc_UWord32)deviceNameLength
+    AndUniqueID:(char*)deviceUniqueID
+    WithLength:(WebRtc_UWord32)deviceUniqueIDLength
+    AndProductID:(char*)deviceProductID
+    WithLength:(WebRtc_UWord32)deviceProductIDLength
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(index > (WebRtc_UWord32)_captureDeviceCountInfo)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    QTCaptureDevice* tempCaptureDevice =
+        (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+    if(!tempCaptureDevice)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    memset(deviceName, 0, deviceNameLength);
+    memset(deviceUniqueID, 0, deviceUniqueIDLength);
+
+    bool successful = NO;
+
+    NSString* tempString = [tempCaptureDevice localizedDisplayName];
+    successful = [tempString getCString:(char*)deviceName
+                  maxLength:deviceNameLength encoding:NSUTF8StringEncoding];
+    if(NO == successful)
+    {
+        memset(deviceName, 0, deviceNameLength);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    tempString = [tempCaptureDevice uniqueID];
+    successful = [tempString getCString:(char*)deviceUniqueID
+                  maxLength:deviceUniqueIDLength encoding:NSUTF8StringEncoding];
+    if(NO == successful)
+    {
+        memset(deviceUniqueID, 0, deviceNameLength);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    return [NSNumber numberWithInt:0];
+
+}
+
+// ****************** "private" category functions below here  ******************
+#pragma mark **** "private" method implementations
+
+- (NSNumber*)initializeVariables
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _poolInfo = [[NSAutoreleasePool alloc]init];
+    _captureDeviceCountInfo = 0;
+    [self getCaptureDevices];
+
+    return [NSNumber numberWithInt:0];
+}
+
+// ***** Checks to see if the QTCaptureSession framework is available in the OS
+// ***** If it is not, isOSSupprted = NO
+// ***** Throughout the rest of the class isOSSupprted is checked and functions
+// ***** are/aren't called depending
+// ***** The user can use weak linking to the QTKit framework and run on older
+// ***** versions of the OS
+// ***** I.E. Backwards compaitibility
+// ***** Returns nothing. Sets member variable
+- (void)checkOSSupported
+{
+    Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
+    _OSSupportedInfo = NO;
+    if(nil == osSupportedTest)
+    {
+    }
+    _OSSupportedInfo = YES;
+}
+
+/// ***** Retrieves the number of capture devices currently available
+/// ***** Stores them in an NSArray instance
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)getCaptureDevices
+{
+    if(NO == _OSSupportedInfo)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(_captureDevicesInfo)
+    {
+        [_captureDevicesInfo release];
+    }
+    _captureDevicesInfo = [[NSArray alloc]
+                            initWithArray:[QTCaptureDevice
+                                           inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+    _captureDeviceCountInfo = _captureDevicesInfo.count;
+    if(_captureDeviceCountInfo < 1){
+        return [NSNumber numberWithInt:0];
+    }
+    return [NSNumber numberWithInt:0];
+}
+
+@end
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h
new file mode 100644
index 0000000..d48dbf1
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_qtkit_objc.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
+
+#import <Foundation/Foundation.h>
+#import <QTKit/QTKit.h>
+#import <AppKit/AppKit.h>
+#import <CoreData/CoreData.h>
+#import <CoreFoundation/CoreFoundation.h>
+#import <CoreVideo/CoreVideo.h>
+
+#import "video_capture_recursive_lock.h"
+
+#include "video_capture_qtkit.h"
+
+@interface VideoCaptureMacQTKitObjC : NSObject{
+    // class properties
+    bool                                    _capturing;
+    int                                    _counter;
+    int                                    _frameRate;
+    int                                    _frameWidth;
+    int                                    _frameHeight;
+    int                                    _framesDelivered;
+    int                                    _framesRendered;
+    bool                                _OSSupported;
+    bool                                _captureInitialized;
+    
+    // WebRTC Custom classes
+    webrtc::videocapturemodule::VideoCaptureMacQTKit* _owner;
+    VideoCaptureRecursiveLock*            _rLock;
+    
+    // QTKit variables
+    QTCaptureSession*                    _captureSession;
+    QTCaptureDeviceInput*                _captureVideoDeviceInput;
+    QTCaptureDecompressedVideoOutput*    _captureDecompressedVideoOutput;
+    NSArray*                            _captureDevices;
+    int                                    _captureDeviceCount;
+    int                                    _captureDeviceIndex;
+    NSString*                            _captureDeviceName;
+    char                                _captureDeviceNameUTF8[1024];
+    char                                _captureDeviceNameUniqueID[1024];
+    char                                _captureDeviceNameProductID[1024];
+    NSString*                            _key;
+    NSNumber*                            _val;
+    NSDictionary*                        _videoSettings;
+    NSString*                            _captureQuality;
+    
+    // other
+    NSAutoreleasePool*                    _pool;
+
+}
+/**************************************************************************
+ *
+ *   The following functions are considered to be private.
+ *
+ ***************************************************************************/
+
+- (NSNumber*)getCaptureDevices;
+- (NSNumber*)initializeVideoCapture;
+- (NSNumber*)initializeVariables;
+- (void)checkOSSupported;
+
+
+/**************************************************************************
+ *
+ *   The following functions are considered public and to be called by the VideoCaptureMacQTKit class.
+ *
+ ***************************************************************************/
+
+
+- (NSNumber*)registerOwner:(webrtc::videocapturemodule::VideoCaptureMacQTKit*)owner;
+- (NSNumber*)setCaptureDeviceById:(char*)uniqueId;
+- (NSNumber*)setCaptureHeight:(int)height AndWidth:(int)width AndFrameRate:(int)frameRate;
+- (NSNumber*)startCapture;
+- (NSNumber*)stopCapture;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_OBJC_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm
new file mode 100644
index 0000000..48f734c
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_objc.mm
@@ -0,0 +1,462 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#define DEFAULT_CAPTURE_DEVICE_INDEX    1
+#define DEFAULT_FRAME_RATE              30
+#define DEFAULT_FRAME_WIDTH                352
+#define DEFAULT_FRAME_HEIGHT            288
+#define ROTATE_CAPTURED_FRAME           1
+#define LOW_QUALITY                     1
+
+#import "video_capture_qtkit_objc.h"
+#include "video_capture_qtkit_utility.h"
+#include "trace.h"
+
+using namespace webrtc;
+using namespace videocapturemodule;
+
+@implementation VideoCaptureMacQTKitObjC
+
+#pragma mark **** over-written OS methods
+
+/// ***** Objective-C. Similar to C++ constructor, although must be invoked
+///       manually.
+/// ***** Potentially returns an instance of self
+-(id)init{
+    self = [super init];
+    if(nil != self)
+    {
+        [self checkOSSupported];
+        [self initializeVariables];
+    }
+    else
+    {
+        return nil;
+    }
+    return self;
+}
+
+/// ***** Objective-C. Similar to C++ destructor
+/// ***** Returns nothing
+- (void)dealloc {
+    if(_captureSession)
+    {
+        [_captureSession stopRunning];
+        [_captureSession release];
+    }
+    [super dealloc];
+}
+
+#pragma mark **** public methods
+
+
+
+/// ***** Registers the class's owner, which is where the delivered frames are
+///       sent
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)registerOwner:(VideoCaptureMacQTKit*)owner{
+    if(!owner){
+        return [NSNumber numberWithInt:-1];
+    }
+    _owner = owner;
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Sets the QTCaptureSession's input device from a char*
+/// ***** Sets several member variables. Can signal the error system if one has
+///       occurred
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)setCaptureDeviceById:(char*)uniqueId{
+    if(NO == _OSSupported)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d OS version does not support necessary APIs",
+                     __FUNCTION__, __LINE__);
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(!uniqueId || (0 == strcmp("", uniqueId)))
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d  \"\" was passed in for capture device name",
+                     __FUNCTION__, __LINE__);
+        memset(_captureDeviceNameUTF8, 0, 1024);
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(0 == strcmp(uniqueId, _captureDeviceNameUniqueID))
+    {
+        // camera already set
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d Capture device is already set to %s", __FUNCTION__,
+                     __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:0];
+    }
+
+    bool success = NO;
+    QTCaptureDevice* tempCaptureDevice;
+    for(int index = 0; index < _captureDeviceCount; index++)
+    {
+        tempCaptureDevice = (QTCaptureDevice*)[_captureDevices
+                                               objectAtIndex:index];
+        char tempCaptureDeviceId[1024] = "";
+        [[tempCaptureDevice uniqueID]
+          getCString:tempCaptureDeviceId maxLength:1024
+          encoding:NSUTF8StringEncoding];
+        if(0 == strcmp(uniqueId, tempCaptureDeviceId))
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                         "%s:%d Found capture device id %s as index %d",
+                         __FUNCTION__, __LINE__, tempCaptureDeviceId, index);
+            success = YES;
+          [[tempCaptureDevice localizedDisplayName]
+              getCString:_captureDeviceNameUTF8
+               maxLength:1024
+                encoding:NSUTF8StringEncoding];
+          [[tempCaptureDevice uniqueID]
+              getCString:_captureDeviceNameUniqueID
+               maxLength:1024
+                encoding:NSUTF8StringEncoding];
+            break;
+        }
+
+    }
+
+    if(NO == success)
+    {
+        // camera not found
+        // nothing has been changed yet, so capture device will stay in it's
+        // state
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                     "%s:%d Capture device id %s was not found in list of "
+                     "available devices.", __FUNCTION__, __LINE__, uniqueId);
+        return [NSNumber numberWithInt:0];
+    }
+
+    NSError* error;
+    success = [tempCaptureDevice open:&error];
+    if(!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+                     "%s:%d Failed to open capture device: %s",
+                     __FUNCTION__, __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    if(_captureVideoDeviceInput)
+    {
+        [_captureVideoDeviceInput release];
+    }
+    _captureVideoDeviceInput = [[QTCaptureDeviceInput alloc]
+                                 initWithDevice:tempCaptureDevice];
+
+    success = [_captureSession addInput:_captureVideoDeviceInput error:&error];
+    if(!success)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoCapture, 0,
+                     "%s:%d Failed to add input from %s to the capture session",
+                     __FUNCTION__, __LINE__, _captureDeviceNameUTF8);
+        return [NSNumber numberWithInt:-1];
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoCapture, 0,
+                 "%s:%d successfully added capture device: %s", __FUNCTION__,
+                 __LINE__, _captureDeviceNameUTF8);
+    return [NSNumber numberWithInt:0];
+}
+
+
+/// ***** Updates the capture devices size and frequency
+/// ***** Sets member variables _frame* and _captureDecompressedVideoOutput
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)setCaptureHeight:(int)height AndWidth:(int)width
+             AndFrameRate:(int)frameRate{
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _frameWidth = width;
+    _frameHeight = height;
+    _frameRate = frameRate;
+
+    // TODO(mflodman) Check fps settings.
+    // [_captureDecompressedVideoOutput
+    //     setMinimumVideoFrameInterval:(NSTimeInterval)1/(float)_frameRate];
+    NSDictionary* captureDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
+                                       [NSNumber numberWithDouble:_frameWidth], (id)kCVPixelBufferWidthKey,
+                                       [NSNumber numberWithDouble:_frameHeight], (id)kCVPixelBufferHeightKey,
+                                       [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+                                       (id)kCVPixelBufferPixelFormatTypeKey, nil]; 
+    [_captureDecompressedVideoOutput performSelectorOnMainThread:@selector(setPixelBufferAttributes:) withObject:captureDictionary waitUntilDone:NO];
+//    [_captureDecompressedVideoOutput setPixelBufferAttributes:captureDictionary];
+
+        
+    // these methods return type void so there isn't much we can do about
+    // checking success
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Starts the QTCaptureSession, assuming correct state. Also ensures that
+///       an NSRunLoop is running
+/// ***** Without and NSRunLoop to process events, the OS doesn't check for a
+///       new frame.
+/// ***** Sets member variables _capturing
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)startCapture{
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(YES == _capturing)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+  
+//    NSLog(@"--------------- before ---------------");
+    [[NSRunLoop mainRunLoop] runUntilDate:[NSDate distantFuture]];
+//    NSLog(@"--------------- after ---------------");
+
+    if(NO == _captureInitialized)
+    {
+        // this should never be called..... it is initialized on class init
+        [self initializeVideoCapture];
+    }    
+    [_captureSession startRunning];
+
+    
+    _capturing = YES;
+
+    return [NSNumber numberWithInt:0];
+}
+
+/// ***** Stops the QTCaptureSession, assuming correct state
+/// ***** Sets member variables _capturing
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)stopCapture{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(nil == _captureSession)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(NO == _capturing)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(YES == _capturing)
+    {
+        [_captureSession stopRunning];
+    }
+
+    _capturing = NO;
+    return [NSNumber numberWithInt:0];
+}
+
+// ********** "private" functions below here **********
+#pragma mark **** "private" methods
+
+/// ***** Class member variables are initialized here
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)initializeVariables{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    _pool = [[NSAutoreleasePool alloc]init];
+
+    memset(_captureDeviceNameUTF8, 0, 1024);
+    _counter = 0;
+    _framesDelivered = 0;
+    _framesRendered = 0;
+    _captureDeviceCount = 0;
+    _capturing = NO;
+    _captureInitialized = NO;
+    _frameRate = DEFAULT_FRAME_RATE;
+    _frameWidth = DEFAULT_FRAME_WIDTH;
+    _frameHeight = DEFAULT_FRAME_HEIGHT;
+    _captureDeviceName = [[NSString alloc] initWithFormat:@""];
+    _rLock = [[VideoCaptureRecursiveLock alloc] init];
+    _captureSession = [[QTCaptureSession alloc] init];
+    _captureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc]
+                                        init];
+    [_captureDecompressedVideoOutput setDelegate:self];
+
+    [self getCaptureDevices];
+    [self initializeVideoCapture];
+
+    return [NSNumber numberWithInt:0];
+
+}
+
+// Checks to see if the QTCaptureSession framework is available in the OS
+// If it is not, isOSSupprted = NO.
+// Throughout the rest of the class isOSSupprted is checked and functions
+// are/aren't called depending
+// The user can use weak linking to the QTKit framework and run on older
+// versions of the OS. I.E. Backwards compaitibility
+// Returns nothing. Sets member variable
+- (void)checkOSSupported{
+
+    Class osSupportedTest = NSClassFromString(@"QTCaptureSession");
+    _OSSupported = NO;
+    if(nil == osSupportedTest)
+    {
+    }
+    _OSSupported = YES;
+}
+
+/// ***** Retrieves the number of capture devices currently available
+/// ***** Stores them in an NSArray instance
+/// ***** Returns 0 on success, -1 otherwise.
+- (NSNumber*)getCaptureDevices{
+
+    if(NO == _OSSupported)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+
+    if(_captureDevices)
+    {
+        [_captureDevices release];
+    }
+    _captureDevices = [[NSArray alloc] initWithArray:
+        [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo]];
+
+    _captureDeviceCount = _captureDevices.count;
+    if(_captureDeviceCount < 1)
+    {
+        return [NSNumber numberWithInt:0];
+    }
+    return [NSNumber numberWithInt:0];
+}
+
+// Initializes a QTCaptureSession (member variable) to deliver frames via
+// callback
+// QTCapture* member variables affected
+// The image format and frequency are setup here
+// Returns 0 on success, -1 otherwise.
+- (NSNumber*)initializeVideoCapture{
+
+    if(YES == _captureInitialized)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    QTCaptureDevice* videoDevice =
+        (QTCaptureDevice*)[_captureDevices objectAtIndex:0];
+
+    bool success = NO;
+    NSError*    error;
+
+    success = [videoDevice open:&error];
+    if(!success)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    [_captureDecompressedVideoOutput setPixelBufferAttributes:
+        [NSDictionary dictionaryWithObjectsAndKeys:
+            [NSNumber numberWithDouble:_frameWidth], (id)kCVPixelBufferWidthKey,
+            [NSNumber numberWithDouble:_frameHeight], (id)kCVPixelBufferHeightKey,
+            [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32ARGB],
+            (id)kCVPixelBufferPixelFormatTypeKey, nil]];
+
+    // TODO(mflodman) Check fps settings.
+    //[_captureDecompressedVideoOutput setMinimumVideoFrameInterval:
+    //    (NSTimeInterval)1/(float)_frameRate];
+    //[_captureDecompressedVideoOutput setAutomaticallyDropsLateVideoFrames:YES];
+
+    success = [_captureSession addOutput:_captureDecompressedVideoOutput
+               error:&error];
+
+    if(!success)
+    {
+        return [NSNumber numberWithInt:-1];
+    }
+
+    _captureInitialized = YES;
+
+    return [NSNumber numberWithInt:0];
+}
+
+// This is the callback that is called when the OS has a frame to deliver to us.
+// Starts being called when [_captureSession startRunning] is called. Stopped
+// similarly.
+// Parameter videoFrame contains the image. The format, size, and frequency
+// were setup earlier.
+// Returns 0 on success, -1 otherwise.
+- (void)captureOutput:(QTCaptureOutput *)captureOutput
+    didOutputVideoFrame:(CVImageBufferRef)videoFrame
+     withSampleBuffer:(QTSampleBuffer *)sampleBuffer
+     fromConnection:(QTCaptureConnection *)connection{
+
+    if(YES == [_rLock tryLock])
+    {
+        [_rLock lock];
+    }
+    else
+    {
+        return;
+    }
+
+    if(NO == _OSSupported)
+    {
+        return;
+    }
+
+    const int LOCK_FLAGS = 0; // documentation says to pass 0
+
+    // get size of the frame
+    CVPixelBufferLockBaseAddress(videoFrame, LOCK_FLAGS);
+    void* baseAddress = CVPixelBufferGetBaseAddress(videoFrame);
+    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(videoFrame);
+    int frameHeight = CVPixelBufferGetHeight(videoFrame);
+    CVPixelBufferUnlockBaseAddress(videoFrame, LOCK_FLAGS);
+
+    if(_owner)
+    {
+
+        int frameSize = bytesPerRow * frameHeight;    // 32 bit ARGB format
+        CVBufferRetain(videoFrame);
+        VideoCaptureCapability tempCaptureCapability;
+        tempCaptureCapability.width = _frameWidth;
+        tempCaptureCapability.height = _frameHeight;
+        tempCaptureCapability.maxFPS = _frameRate;
+        // TODO(wu) : Update actual type and not hard-coded value. 
+        tempCaptureCapability.rawType = kVideoBGRA;
+
+        _owner->IncomingFrame((unsigned char*)baseAddress,
+                              frameSize,
+                              tempCaptureCapability,
+                              0);
+
+        CVBufferRelease(videoFrame);
+    }
+
+    _framesDelivered++;
+    _framesRendered++;
+
+    if(YES == [_rLock locked])
+    {
+        [_rLock unlock];
+    }
+}
+
+@end
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h
new file mode 100644
index 0000000..5ef0b96
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_qtkit_utility.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_qtkit_utility.h
+ *
+ */
+
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
+
+#define MAX_NAME_LENGTH                1024
+
+#define QTKIT_MIN_WIDTH                0
+#define QTKIT_MAX_WIDTH                2560
+#define QTKIT_DEFAULT_WIDTH            352
+
+#define QTKIT_MIN_HEIGHT            0
+#define QTKIT_MAX_HEIGHT            1440
+#define QTKIT_DEFAULT_HEIGHT        288
+
+#define QTKIT_MIN_FRAME_RATE        1
+#define QTKIT_MAX_FRAME_RATE        60
+#define QTKIT_DEFAULT_FRAME_RATE    30
+
+#define RELEASE_AND_CLEAR(p)        if (p) { (p) -> Release () ; (p) = NULL ; }
+
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_QTKIT_UTILITY_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h
new file mode 100644
index 0000000..f4008a4
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  video_capture_recursive_lock.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
+
+#import <Foundation/Foundation.h>
+
+@interface VideoCaptureRecursiveLock : NSRecursiveLock <NSLocking> {
+    BOOL _locked;
+}
+
+@property BOOL locked;
+
+- (void)lock;
+- (void)unlock;
+
+@end
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QTKIT_VIDEO_CAPTURE_RECURSIVE_LOCK_H_
diff --git a/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm
new file mode 100644
index 0000000..d9df5cb
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QTKit/video_capture_recursive_lock.mm
@@ -0,0 +1,33 @@
+//
+//  video_capture_recursive_lock.mm
+//
+//
+
+#import "video_capture_recursive_lock.h"
+
+@implementation VideoCaptureRecursiveLock
+
+@synthesize locked = _locked;
+
+- (id)init{
+    self = [super init];
+    if(nil == self){
+        return nil;
+    }
+
+    [self setLocked:NO];
+    return self;
+}
+
+- (void)lock{
+    [self setLocked:YES];
+    [super lock];
+}
+
+- (void)unlock{
+    [self setLocked:NO];
+    [super unlock];
+}
+
+
+@end
diff --git a/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc
new file mode 100644
index 0000000..69fd67a
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.cc
@@ -0,0 +1,1388 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time.cc
+ *
+ */
+
+
+#include "video_capture_quick_time.h"
+
+#include "CriticalSectionWrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "trace.h"
+#include <unistd.h>
+
+namespace webrtc
+{
+
+VideoCaptureMacQuickTime::VideoCaptureMacQuickTime(WebRtc_Word32 iID) :
+    VideoCaptureImpl(iID), // super class constructor
+    _id(iID),
+    _isCapturing(false),
+    _captureCapability(),
+    _grabberCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _videoMacCritsect(CriticalSectionWrapper::CreateCriticalSection()),
+    _terminated(true), _grabberUpdateThread(NULL),
+    _grabberUpdateEvent(NULL), _captureGrabber(NULL), _captureDevice(NULL),
+    _captureVideoType(kVideoUnknown), _captureIsInitialized(false),
+    _gWorld(NULL), _captureChannel(0), _captureSequence(NULL),
+    _sgPrepared(false), _sgStarted(false), _trueCaptureWidth(0),
+    _trueCaptureHeight(0), _captureDeviceList(),
+    _captureDeviceListTime(0), _captureCapabilityList()
+
+{
+    _captureCapability.width = START_CODEC_WIDTH;
+    _captureCapability.height = START_CODEC_HEIGHT;
+    memset(_captureDeviceDisplayName, 0, sizeof(_captureDeviceDisplayName));
+}
+
+VideoCaptureMacQuickTime::~VideoCaptureMacQuickTime()
+{
+
+
+    VideoCaptureTerminate();
+
+    if (_videoMacCritsect)
+    {
+        delete _videoMacCritsect;
+    }
+    if (_grabberCritsect)
+    {
+        delete _grabberCritsect;
+    }
+
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::Init(
+    const WebRtc_Word32 id, const char* deviceUniqueIdUTF8)
+{
+
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (nameLength > kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new char[nameLength + 1];
+    memset(_deviceUniqueId, 0, nameLength + 1);
+    memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+    // Check OSX version
+    OSErr err = noErr;
+    long version;
+
+    _videoMacCritsect->Enter();
+    if (!_terminated)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Already Initialized", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    err = Gestalt(gestaltSystemVersion, &version);
+    if (err != noErr)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not retrieve OS version", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d OS X version: %x,", __FUNCTION__, __LINE__, version);
+    if (version < 0x00001040) // Older version than Mac OSX 10.4
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d OS version not supported", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    err = Gestalt(gestaltQuickTime, &version);
+    if (err != noErr)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not retrieve QuickTime version",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d QuickTime version: %x", __FUNCTION__, __LINE__,
+                 version);
+    if (version < 0x07000000) // QT v. 7.x or newer (QT 5.0.2 0x05020000)
+    {
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d EnterMovies()", __FUNCTION__, __LINE__);
+    EnterMovies();
+
+    if (VideoCaptureSetCaptureDevice((char*) deviceUniqueIdUTF8,
+                                   kVideoCaptureProductIdLength) == -1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d failed to set capture device: %s", __FUNCTION__,
+                     __LINE__, deviceUniqueIdUTF8);
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    _terminated = false;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d successful initialization", __FUNCTION__, __LINE__);
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::StartCapture(
+    const VideoCaptureCapability& capability)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id, "%s:%d "
+        "capability.width=%d, capability.height=%d ,capability.maxFPS=%d "
+        "capability.expectedCaptureDelay=%d, capability.interlaced=%d",
+        __FUNCTION__, __LINE__, capability.width, capability.height,
+        capability.maxFPS, capability.expectedCaptureDelay,
+        capability.interlaced);
+
+    _captureCapability.width = capability.width;
+    _captureCapability.height = capability.height;
+    _captureDelay = 120;
+
+    if (VideoCaptureRun() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::StopCapture()
+{
+
+    if (VideoCaptureStop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+bool VideoCaptureMacQuickTime::CaptureStarted()
+{
+    return _isCapturing;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTime::CaptureSettings(
+    VideoCaptureCapability& settings)
+{
+	settings.width = _captureCapability.width;
+	settings.height = _captureCapability.height;
+	settings.maxFPS = 0;
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureTerminate()
+{
+    VideoCaptureStop();
+
+    _videoMacCritsect->Enter();
+    if (_terminated)
+    {
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Already terminated", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    _grabberCritsect->Enter();
+
+    // Stop the camera/sequence grabber
+    // Resets: _captureSequence, _sgStarted
+    StopQuickTimeCapture();
+
+    // Remove local video settings
+    // Resets: _gWorld, _captureCapability.width, _captureCapability.height
+    RemoveLocalGWorld();
+    DisconnectCaptureDevice();
+
+    if (_grabberUpdateThread)
+        _grabberUpdateThread->SetNotAlive();
+
+    _grabberCritsect->Leave();
+
+    if (_grabberUpdateEvent)
+        _grabberUpdateEvent->Set();
+
+    SLEEP(1);
+    _grabberCritsect->Enter();
+
+    if (_grabberUpdateThread)
+    {
+        _grabberUpdateThread->Stop();
+        delete _grabberUpdateThread;
+        _grabberUpdateThread = NULL;
+    }
+    if (_grabberUpdateEvent)
+    {
+        delete _grabberUpdateEvent;
+        _grabberUpdateEvent = NULL;
+    }
+
+    // Close the sequence grabber
+    if (_captureGrabber)
+    {
+        SGRelease(_captureGrabber);
+        _captureGrabber = NULL;
+        CloseComponent(_captureGrabber);
+        _captureDevice = NULL;
+    }
+    _captureVideoType = kVideoUnknown;
+
+    // Delete capture device list
+    ListItem* item = _captureDeviceList.First();
+    while (item)
+    {
+        delete static_cast<unsigned char*> (item->GetItem());
+        _captureDeviceList.Erase(item);
+        item = _captureDeviceList.First();
+    }
+    _captureDeviceListTime = 0;
+
+    _terminated = true;
+
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::UpdateCaptureSettings(int channel,
+                                                    webrtc::VideoCodec& inst,
+                                                    bool def)
+{
+
+    if (channel < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid channel number: %d", __FUNCTION__,
+                     __LINE__, channel);
+        return -1;
+    }
+
+    // the size has changed, we need to change our setup
+    _videoMacCritsect->Enter();
+
+    // Stop capturing, if we are...
+    _grabberCritsect->Enter();
+
+    bool wasCapturing = false;
+    StopQuickTimeCapture(&wasCapturing);
+
+    // Create a new offline GWorld to receive captured frames
+    RemoveLocalGWorld();
+
+    if (CreateLocalGWorld(inst.width, inst.height) == -1)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        // Error already logged
+        return -1;
+    }
+    _captureCapability.width = inst.width;
+    _captureCapability.height = inst.height;
+
+    // Connect the capture device to our offline GWorld
+    // if we already have a capture device selected.
+    if (_captureDevice)
+    {
+        DisconnectCaptureDevice();
+        if (ConnectCaptureDevice() == -1)
+        {
+            // Error already logged
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            return -1;
+        }
+    }
+
+    // Start capture if we did before
+    if (wasCapturing)
+    {
+        if (StartQuickTimeCapture() == -1)
+        {
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Failed to start capturing", __FUNCTION__,
+                         __LINE__);
+            return -1;
+        }
+    }
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    return 0;
+}
+
+// Creates an off screen graphics world used for converting
+// captured video frames if we can't get a format we want.
+// Assumed protected by critsects
+int VideoCaptureMacQuickTime::CreateLocalGWorld(int width, int height)
+{
+    if (_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d GWorld already created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (width == 0 || height == 0)
+    {
+        return -1;
+    }
+
+    Rect captureRect;
+    captureRect.left = 0;
+    captureRect.top = 0;
+    captureRect.right = width;
+    captureRect.bottom = height;
+
+    // Create a GWorld in same size as we want to send to the codec
+    if (QTNewGWorld(&(_gWorld), k2vuyPixelFormat, &captureRect, 0, NULL, 0)
+        != noErr)
+    {
+        return -1;
+    }
+    _captureCapability.width = width;
+    _captureCapability.height = height;
+
+    if (!LockPixels(GetGWorldPixMap(_gWorld)))
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not lock pixmap. Continuing anyhow",
+                     __FUNCTION__, __LINE__);
+    }
+
+    CGrafPtr theOldPort;
+    GDHandle theOldDevice;
+    GetGWorld(&theOldPort, &theOldDevice); // Gets the result from QTGetNewGWorld
+    SetGWorld(_gWorld, NULL); // Sets the new GWorld
+    BackColor( blackColor); // Changes the color on the graphic port
+    ForeColor( whiteColor);
+    EraseRect(&captureRect);
+    SetGWorld(theOldPort, theOldDevice);
+
+    return 0;
+}
+
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::RemoveLocalGWorld()
+{
+    if (!_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d !gWorld", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    DisposeGWorld(_gWorld);
+    _gWorld = NULL;
+    _captureCapability.width = START_CODEC_WIDTH;
+    _captureCapability.height = START_CODEC_HEIGHT;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d GWorld has been removed", __FUNCTION__, __LINE__);
+    return 0;
+}
+
+// ConnectCaptureDevice
+// This function prepares the capture device
+// with the wanted settings, but the capture
+// device isn't started.
+//
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::ConnectCaptureDevice()
+{
+    // Prepare the capture grabber if a capture device is already set
+    if (!_captureGrabber)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No capture device is selected", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    if (_captureIsInitialized)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Capture device is already initialized",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (!_gWorld)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No GWorld is created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    OSErr err = noErr;
+    long flags = 0;
+
+    // Connect the camera to our offline GWorld
+    // We won't use the GWorld if we get the format we want
+    // from the camera.
+    if (SGSetGWorld(_captureGrabber, _gWorld, NULL ) != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not connect capture device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    if (SGSetDataRef(_captureGrabber, 0, 0, seqGrabDontMakeMovie) != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not configure capture device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Set our capture callback
+    if (SGSetDataProc(_captureGrabber, NewSGDataUPP(SendProcess), (long) this)
+        != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not set capture callback. Unable to receive "
+                     "frames", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    // Create a video channel to the sequence grabber
+    if (SGNewChannel(_captureGrabber, VideoMediaType, &_captureChannel)
+        != noErr) // Takes time!!!
+    {
+        return -1;
+    }
+
+    // Get a list with all capture devices to choose the one we want.
+    SGDeviceList deviceList = NULL;
+    if (SGGetChannelDeviceList(_captureChannel, sgDeviceListIncludeInputs,
+                               &deviceList) != noErr)
+    {
+
+    }
+
+    int numDevicesTypes = (*deviceList)->count;
+    bool captureDeviceFound = false;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Found %d channel devices", __FUNCTION__, __LINE__,
+                 numDevicesTypes);
+
+    // Loop through all devices to get the one we want.
+    for (int i = 0; i < numDevicesTypes; i++)
+    {
+        SGDeviceName deviceTypeName = (*deviceList)->entry[i];
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Inspecting device number: %d", __FUNCTION__,
+                     __LINE__, i);
+        // Get the list with input devices
+        if (deviceTypeName.inputs)
+        {
+            SGDeviceInputList inputList = deviceTypeName.inputs;
+            int numInputDev = (*inputList)->count;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Device has %d inputs", __FUNCTION__, __LINE__,
+                         numInputDev);
+            for (int inputDevIndex = 0;
+                 inputDevIndex < numInputDev;
+                 inputDevIndex++)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Inspecting input number: %d",
+                             __FUNCTION__, __LINE__, inputDevIndex);
+                SGDeviceInputName deviceInputName =
+                    (*inputList)->entry[inputDevIndex];
+                char devInName[64];
+                memset(devInName, 0, 64);
+
+                // SGDeviceInputName::name is a Str63, defined as a Pascal string.
+                // (Refer to MacTypes.h)
+                CFIndex devInNameLength =
+                    PascalStringToCString(deviceInputName.name, devInName,
+                                          sizeof(devInName));
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Converted pascal string with length:%d  "
+                             "to: %s", __FUNCTION__, __LINE__,
+                             sizeof(devInName), devInName);
+                if (devInNameLength < 0)
+                {
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Failed to convert device name from "
+                                 "pascal string to c string", __FUNCTION__,
+                                 __LINE__);
+                    return -1;
+                }
+
+                if (!strcmp(devInName, _captureDeviceDisplayName))
+                {
+                    WEBRTC_TRACE(webrtc::kTraceDebug,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d We have found our device: %s",
+                                 __FUNCTION__, __LINE__,
+                                 _captureDeviceDisplayName);
+
+                    if (SGSetChannelDevice(_captureChannel, deviceTypeName.name)
+                        != noErr)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Could not set capture device type: "
+                                     "%s",__FUNCTION__, __LINE__,
+                                     deviceTypeName.name);
+                        return -1;
+                    }
+
+                    WEBRTC_TRACE(webrtc::kTraceInfo,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d Capture device type is: %s",
+                                 __FUNCTION__, __LINE__, deviceTypeName.name);
+                    if (SGSetChannelDeviceInput(_captureChannel, inputDevIndex)
+                        != noErr)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Could not set SG device",
+                                     __FUNCTION__, __LINE__);
+                        return -1;
+                    }
+
+                    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Capture device: %s has successfully "
+                                 "been set", __FUNCTION__, __LINE__,
+                                 _captureDeviceDisplayName);
+                    captureDeviceFound = true;
+                    break;
+                }
+            }
+            if (captureDeviceFound)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Capture device found, breaking from loops",
+                             __FUNCTION__, __LINE__);
+                break;
+            }
+        }
+    }
+    err = SGDisposeDeviceList(_captureGrabber, deviceList);
+
+    if (!captureDeviceFound)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Failed to find capture device: %s. Returning -1",
+                     __FUNCTION__, __LINE__, _captureDeviceDisplayName);
+        return -1;
+    }
+
+    // Set the size we want from the capture device
+    Rect captureSize;
+    captureSize.left = 0;
+    captureSize.top = 0;
+    captureSize.right = _captureCapability.width;
+    captureSize.bottom = _captureCapability.height;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Using capture rect: l:%d t:%d r:%d b:%d", __FUNCTION__,
+                 __LINE__, captureSize.left, captureSize.top,
+                 captureSize.right, captureSize.bottom);
+
+    err = SGSetChannelBounds(_captureChannel, &captureSize);
+    if (err == noErr)
+    {
+        err = SGSetChannelUsage(_captureChannel, flags | seqGrabRecord);
+    }
+    if (err != noErr)
+    {
+        SGDisposeChannel(_captureGrabber, _captureChannel);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error setting SG channel to device", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Find out what video format we'll get from the capture device.
+    OSType compType;
+    err = SGGetVideoCompressorType(_captureChannel, &compType);
+
+    // Convert the Apple video format name to a VideoCapture name.
+    if (compType == k2vuyPixelFormat)
+    {
+        _captureVideoType = kVideoUYVY;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers UYUV formatted frames",
+                     __FUNCTION__, __LINE__);
+    }
+    else if (compType == kYUVSPixelFormat)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers YUY2 formatted frames",
+                     __FUNCTION__, __LINE__);
+        _captureVideoType = kVideoYUY2;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers frames in an unknown format: 0x%x. "
+                     "Consult QuickdrawTypes.h",
+                     __FUNCTION__, __LINE__, compType);
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Device delivers frames in an unknown format.",
+                     __FUNCTION__, __LINE__);
+        _captureVideoType = kVideoUnknown;
+    }
+
+    if (SGPrepare(_captureGrabber, false, true) != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Try to set the codec size as capture size.
+    err = SGSetChannelBounds(_captureChannel, &captureSize);
+
+    // Check if we really will get the size we asked for.
+    ImageDescriptionHandle imageDesc = (ImageDescriptionHandle) NewHandle(0);
+    err = SGGetChannelSampleDescription(_captureChannel, (Handle) imageDesc);
+
+    _trueCaptureWidth = (**imageDesc).width;
+    _trueCaptureHeight = (**imageDesc).height;
+
+    DisposeHandle((Handle) imageDesc);
+
+    _captureIsInitialized = true;
+    _sgPrepared = true;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Success starting sequence grabber", __FUNCTION__,
+                 __LINE__);
+
+    return 0;
+}
+
+// Assumed critsect protected
+int VideoCaptureMacQuickTime::DisconnectCaptureDevice()
+{
+    if (_sgStarted)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Capture device is still running. Returning -1",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    if (!_sgPrepared)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No capture device connected", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Close the capture channel
+    SGStop(_captureGrabber);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d !!!! releasing sg stuff", __FUNCTION__, __LINE__);
+    SGDisposeChannel(_captureGrabber, _captureChannel);
+    SGRelease(_captureGrabber);
+    CloseComponent(_captureGrabber);
+
+    // Reset all values
+    _captureChannel = NULL;
+    _captureVideoType = kVideoUnknown;
+    _trueCaptureWidth = 0;
+    _trueCaptureHeight = 0;
+    _captureIsInitialized = false;
+    _sgPrepared = false;
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Sequence grabber removed", __FUNCTION__, __LINE__);
+
+    return 0;
+}
+
+// StartQuickTimeCapture
+//
+// Actually starts the camera
+// 
+int VideoCaptureMacQuickTime::StartQuickTimeCapture()
+{
+    _grabberCritsect->Enter();
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Attempting to start sequence grabber", __FUNCTION__,
+                 __LINE__);
+
+    if (_sgStarted)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber already started", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+    if (!_sgPrepared)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber not prepared properly",
+                     __FUNCTION__, __LINE__);
+        return 0;
+    }
+
+    if (SGStartRecord(_captureGrabber) != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    Rect captureRect = { 0, 0, 0, 0 };
+    MatrixRecord scaleMatrix;
+    ImageDescriptionHandle imageDesc = (ImageDescriptionHandle) NewHandle(0);
+
+    // Get the sample description for the channel, which is the same as for the
+    // capture device
+    if (SGGetChannelSampleDescription(_captureChannel, (Handle) imageDesc)
+        != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error accessing device properties", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Create a scale matrix to scale the captured image
+    // Needed if we don't get the size wanted from the camera
+    captureRect.right = (**imageDesc).width;
+    captureRect.bottom = (**imageDesc).height;
+
+    Rect codecRect;
+    codecRect.left = 0;
+    codecRect.top = 0;
+    codecRect.right = _captureCapability.width;
+    codecRect.bottom = _captureCapability.height;
+    RectMatrix(&scaleMatrix, &captureRect, &codecRect);
+
+    // Start grabbing images from the capture device to _gWorld
+    if (DecompressSequenceBegin(&_captureSequence, imageDesc, _gWorld, NULL,
+                                NULL, &scaleMatrix, srcCopy, (RgnHandle) NULL,
+                                NULL, codecNormalQuality, bestSpeedCodec)
+        != noErr)
+    {
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Error starting decompress sequence", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+    DisposeHandle((Handle) imageDesc);
+    _sgStarted = true;
+    _grabberCritsect->Leave();
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::StopQuickTimeCapture(bool* wasCapturing)
+{
+    _grabberCritsect->Enter();
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d wasCapturing=%d", __FUNCTION__, __LINE__, wasCapturing);
+
+    if (!_sgStarted)
+    {
+        if (wasCapturing)
+            *wasCapturing = false;
+
+        _grabberCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence grabber was never started", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+
+    if (wasCapturing)
+        *wasCapturing = true;
+
+    OSErr error = noErr;
+    error = SGStop(_captureGrabber);
+    CDSequenceEnd(_captureSequence);
+    _captureSequence = NULL;
+    _sgStarted = false;
+
+    _grabberCritsect->Leave();
+    if (error != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not stop sequence grabber", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+//-------------------------------------------------
+//
+//  Thread/function to keep capture device working
+//
+//-------------------------------------------------
+
+
+//
+// GrabberUpdateThread / GrabberUpdateProcess
+//
+// Called at a certain time interval to tell
+// the capture device / SequenceGrabber to
+// actually work.
+bool VideoCaptureMacQuickTime::GrabberUpdateThread(void* obj)
+{
+    return static_cast<VideoCaptureMacQuickTime*> (obj)->GrabberUpdateProcess();
+}
+
+bool VideoCaptureMacQuickTime::GrabberUpdateProcess()
+{
+    _grabberUpdateEvent->Wait(30);
+
+    if (_isCapturing == false)
+        return false;
+
+    _grabberCritsect->Enter();
+    if (_captureGrabber)
+    {
+        if (SGIdle(_captureGrabber) != noErr)
+        {
+        }
+    }
+    _grabberCritsect->Leave();
+    return true;
+}
+
+//
+// VideoCaptureStop
+//
+// Stops the capture device
+//
+int VideoCaptureMacQuickTime::VideoCaptureStop()
+{
+    if (_grabberUpdateThread)
+    {
+        _grabberUpdateThread->Stop();
+    }
+
+    _videoMacCritsect->Enter();
+    _grabberCritsect->Enter();
+    int retVal = StopQuickTimeCapture();
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+    if (retVal == -1)
+    {
+        return -1;
+    }
+
+    _isCapturing = false;
+    return 0;
+}
+
+//
+// VideoCaptureRun
+//
+// Starts the capture device and creates
+// the update thread.
+//
+int VideoCaptureMacQuickTime::VideoCaptureRun()
+{
+    _videoMacCritsect->Enter();
+    _grabberCritsect->Enter();
+
+    int res = StartQuickTimeCapture();
+
+    // Create the thread for updating sequence grabber if not created earlier
+    if (!_grabberUpdateThread)
+    {
+        _grabberUpdateEvent = EventWrapper::Create();
+        _grabberUpdateThread = ThreadWrapper::CreateThread(
+            VideoCaptureMacQuickTime::GrabberUpdateThread, this, kHighPriority);
+        unsigned int id;
+        _grabberUpdateThread->Start(id);
+    }
+    else
+    {
+        unsigned int id;
+        _grabberUpdateThread->Start(id);
+    }
+
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+
+    _isCapturing = true;
+    return res;
+}
+
+// ---------------------------------------------------------------------- 
+//
+// SendProcess
+// sequence grabber data procedure
+//
+// This function is called by the capture device as soon as a new
+// frame is available.
+//
+//
+// SendFrame
+//
+// The non-static function used by the capture device callback
+//
+// Input:
+//        sgChannel: the capture device channel generating the callback
+//        data:      the video frame
+//        length:    the data length in bytes
+//        grabTime:  time stamp generated by the capture device / sequece grabber
+//
+// ----------------------------------------------------------------------
+
+OSErr VideoCaptureMacQuickTime::SendProcess(SGChannel sgChannel, Ptr p,
+                                            long len, long* /*offset*/,
+                                            long /*chRefCon*/, TimeValue time,
+                                            short /*writeType*/, long refCon)
+{
+    VideoCaptureMacQuickTime* videoEngine =
+        reinterpret_cast<VideoCaptureMacQuickTime*> (refCon);
+    return videoEngine->SendFrame(sgChannel, (char*) p, len, time);
+}
+
+int VideoCaptureMacQuickTime::SendFrame(SGChannel /*sgChannel*/, char* data,
+                                        long length, TimeValue /*grabTime*/)
+{
+    if (!_sgPrepared)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Sequence Grabber is not initialized", __FUNCTION__,
+                     __LINE__);
+        return 0;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Frame has been delivered\n", __FUNCTION__, __LINE__);
+
+    CodecFlags ignore;
+    _grabberCritsect->Enter();
+    if (_gWorld)
+    {
+        // Will be set to true if we don't recognize the size and/or video
+        // format.
+        bool convertFrame = false;
+        WebRtc_Word32 width = 352;
+        WebRtc_Word32 height = 288;
+        WebRtc_Word32 frameSize = 0;
+
+        VideoCaptureCapability captureCapability;
+        captureCapability.width = width;
+        captureCapability.height = height;
+        captureCapability.maxFPS = 30;
+
+        switch (_captureVideoType)
+        {
+            case kVideoUYVY:
+                captureCapability.rawType = kVideoUYVY;
+                break;
+            case kVideoYUY2:
+                captureCapability.rawType = kVideoYUY2;
+                break;
+            case kVideoI420:
+                captureCapability.rawType = kVideoI420;
+                break;
+            default:
+                captureCapability.rawType = kVideoI420;
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d raw = I420 by default\n",
+                             __FUNCTION__, __LINE__);
+                break;
+        }
+
+        // Convert the camera video type to something VideoEngine can work with
+        // Check if we need to downsample the incomming frame.
+        switch (_captureVideoType)
+        {
+            case kVideoUYVY:
+            case kVideoYUY2:
+                frameSize = (width * height * 16) >> 3; // 16 is for YUY2 format
+                if (width == _captureCapability.width || height
+                    == _captureCapability.height)
+                {
+                    // Ok format and size, send the frame to super class
+                    IncomingFrame((WebRtc_UWord8*) data,
+                                  (WebRtc_Word32) frameSize, captureCapability,
+                                  TickTime::MillisecondTimestamp());
+
+                }
+                else if (width == _trueCaptureWidth && height
+                    == _trueCaptureHeight)
+                {
+                    // We need to scale the picture to correct size...
+                    // This happens for cameras not supporting all sizes.
+                    // E.g. older built-in iSight doesn't support QCIF.
+
+                    // Convert the incoming frame into our GWorld.
+                    int res =
+                        DecompressSequenceFrameS(_captureSequence, data,
+                                                 length, 0, &ignore, NULL);
+                    if (res != noErr && res != -8976) // 8796 ==  black frame
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceWarning,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Captured black frame. Not "
+                                     "processing it", __FUNCTION__, __LINE__);
+                        _grabberCritsect->Leave();
+                        return 0;
+                    }
+
+                    // Copy the frame from the PixMap to our video buffer
+                    PixMapHandle pixMap = GetGWorldPixMap(_gWorld);
+
+                    // Lock the image data in the GWorld.
+                    LockPixels(pixMap);
+
+                    // Get a pointer to the pixel data.
+                    Ptr capturedFrame = GetPixBaseAddr(pixMap);
+
+                    // Send the converted frame out to super class
+                    IncomingFrame((WebRtc_UWord8*) data,
+                                  (WebRtc_Word32) frameSize, captureCapability,
+                                  TickTime::MillisecondTimestamp());
+
+                    // Unlock the image data to get ready for the next frame.
+                    UnlockPixels(pixMap);
+                }
+                else
+                {
+                    // Not a size we recognize, use the Mac internal scaling...
+                    convertFrame = true;
+                    WEBRTC_TRACE(webrtc::kTraceDebug,
+                                 webrtc::kTraceVideoCapture, _id,
+                                 "%s:%d Not correct incoming stream size for "
+                                 "the format and configured size",
+                                 __FUNCTION__, __LINE__);
+                }
+                break;
+            default:
+
+                // Not a video format we recognize, use the Mac internal scaling
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Unknown video frame format (default)",
+                             __FUNCTION__, __LINE__);
+                convertFrame = true;
+                break;
+        }
+
+        if (convertFrame)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Unrecognized frame format. Converting frame",
+                         __FUNCTION__, __LINE__);
+
+            // We don't recognise the input format. Convert to UYVY, I420 is not
+            // supported on osx. Decompress the grabbed frame into the GWorld,
+            // i.e from webcam format to ARGB (RGB24), and extract the frame.
+            int res = DecompressSequenceFrameS(_captureSequence, data, length,
+                                               0, &ignore, NULL);
+            if (res != noErr && res != -8976) // 8796 means a black frame
+            {
+                _grabberCritsect->Leave();
+                return 0;
+            }
+
+            // Copy the frame from the PixMap to our video buffer
+            PixMapHandle rgbPixMap = GetGWorldPixMap(_gWorld);
+            LockPixels(rgbPixMap);
+            Ptr capturedFrame = GetPixBaseAddr(rgbPixMap);
+
+            // Get the picture size
+            int width = (*rgbPixMap)->bounds.right;
+            int height = (*rgbPixMap)->bounds.bottom;
+
+            // 16 is for YUY2 format.
+            WebRtc_Word32 frameSize = (width * height * 16) >> 3;
+
+            // Ok format and size, send the frame to super class
+            IncomingFrame((WebRtc_UWord8*) data, (WebRtc_Word32) frameSize,
+                          captureCapability, TickTime::MillisecondTimestamp());
+
+            UnlockPixels(rgbPixMap);
+        }
+
+        // Tell the capture device it's ok to update.
+        SGUpdate(_captureGrabber, NULL);
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No GWorld created, but frames are being delivered",
+                     __FUNCTION__, __LINE__);
+    }
+
+    _grabberCritsect->Leave();
+    return 0;
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureInitThreadContext()
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d ", __FUNCTION__, __LINE__);
+    _videoMacCritsect->Enter();
+    EnterMoviesOnThread( kQTEnterMoviesFlagDontSetComponentsThreadMode);
+    _videoMacCritsect->Leave();
+    return 0;
+}
+
+//
+//
+//  Functions for handling capture devices
+//
+//
+
+VideoCaptureMacQuickTime::VideoCaptureMacName::VideoCaptureMacName() :
+    _size(0)
+{
+    memset(_name, 0, kVideoCaptureMacNameMaxSize);
+}
+
+int VideoCaptureMacQuickTime::VideoCaptureSetCaptureDevice(
+    const char* deviceName, int size)
+{
+
+
+    _videoMacCritsect->Enter();
+    bool wasCapturing = false;
+
+    _grabberCritsect->Enter();
+    if (_captureGrabber)
+    {
+        // Stop grabbing, disconnect and close the old capture device
+        StopQuickTimeCapture(&wasCapturing);
+        DisconnectCaptureDevice();
+        CloseComponent(_captureGrabber);
+        _captureDevice = NULL;
+        _captureGrabber = NULL;
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Old capture device removed", __FUNCTION__,
+                     __LINE__);
+    }
+
+    if (deviceName == NULL || size == 0)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return 0;
+    }
+
+    if (size < 0)
+    {
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d 'size' is not valid", __FUNCTION__, __LINE__);
+        return 0;
+    }
+
+    ComponentDescription compCaptureType;
+
+    // Define the component we want to open
+    compCaptureType.componentType = SeqGrabComponentType;
+    compCaptureType.componentSubType = 0;
+    compCaptureType.componentManufacturer = 0;
+    compCaptureType.componentFlags = 0;
+    compCaptureType.componentFlagsMask = 0;
+
+    long numSequenceGrabbers = CountComponents(&compCaptureType);
+
+    // loop through the available grabbers and open the first possible
+    for (int i = 0; i < numSequenceGrabbers; i++)
+    {
+        _captureDevice = FindNextComponent(0, &compCaptureType);
+        _captureGrabber = OpenComponent(_captureDevice);
+        if (_captureGrabber != NULL)
+        {
+            // We've found a sequencegrabber that we could open
+            if (SGInitialize(_captureGrabber) != noErr)
+            {
+                _grabberCritsect->Leave();
+                _videoMacCritsect->Leave();
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id,
+                             "%s:%d Could not initialize sequence grabber",
+                             __FUNCTION__, __LINE__);
+                return -1;
+            }
+            break;
+        }
+        if (i == numSequenceGrabbers - 1)
+        {
+            // Couldn't open a sequence grabber
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Could not open a sequence grabber",
+                         __FUNCTION__, __LINE__);
+            return -1;
+        }
+    }
+
+    if (!_gWorld)
+    {
+        // We don't have a GWorld. Create one to enable early preview
+        // without calling SetSendCodec
+        if (CreateLocalGWorld(_captureCapability.width,
+                              _captureCapability.height) == -1)
+        {
+            // Error already logged
+            _grabberCritsect->Leave();
+            _videoMacCritsect->Leave();
+            return -1;
+        }
+    }
+    // Connect the camera with our GWorld
+    int cpySize = size;
+    if ((unsigned int) size > sizeof(_captureDeviceDisplayName))
+    {
+        cpySize = sizeof(_captureDeviceDisplayName);
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Copying %d chars from deviceName to "
+                 "_captureDeviceDisplayName (size=%d)\n",
+                 __FUNCTION__, __LINE__, cpySize, size);
+    memcpy(_captureDeviceDisplayName, deviceName, cpySize);
+    if (ConnectCaptureDevice() == -1)
+    {
+        // Error already logged
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+
+    if (StartQuickTimeCapture() == -1)
+    {
+        // Error already logged
+        _grabberCritsect->Leave();
+        _videoMacCritsect->Leave();
+        return -1;
+    }
+    _grabberCritsect->Leave();
+    _videoMacCritsect->Leave();
+    return 0;
+}
+
+bool VideoCaptureMacQuickTime::IsCaptureDeviceSelected()
+{
+    _grabberCritsect->Leave();
+    return (_captureIsInitialized) ? true : false;
+    _grabberCritsect->Leave();
+}
+
+/**
+ Convert a Pascal string to a C string.
+ 
+ \param[in]  pascalString
+ Pascal string to convert. Pascal strings contain the number of 
+ characters in the first byte and are not null-terminated.
+ 
+ \param[out] cString
+ The C string buffer into which to copy the converted string.
+ 
+ \param[in]  bufferSize
+ The size of the C string buffer in bytes.
+ 
+ \return The number of characters in the string on success and -1 on failure.
+ */
+CFIndex VideoCaptureMacQuickTime::PascalStringToCString(
+    const unsigned char* pascalString, char* cString, CFIndex bufferSize)
+{
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "%s:%d Converting pascal string to c string", __FUNCTION__,
+                 __LINE__);
+    if (pascalString == NULL)
+    {
+        return -1;
+    }
+
+    if (cString == NULL)
+    {
+        return -1;
+    }
+
+    if (bufferSize == 0)
+    {
+        return -1;
+    }
+
+    CFIndex cStringLength = 0;
+    CFIndex maxStringLength = bufferSize - 1;
+
+    CFStringRef cfString = CFStringCreateWithPascalString(
+        NULL, pascalString, kCFStringEncodingMacRoman);
+    if (cfString == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                     "%s:%d Error in CFStringCreateWithPascalString()",
+                     __FUNCTION__, __LINE__);
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFIndex cfLength = CFStringGetLength(cfString);
+    cStringLength = cfLength;
+    if (cfLength > maxStringLength)
+    {
+        cStringLength = maxStringLength;
+    }
+
+    Boolean success = CFStringGetCString(cfString, cString, bufferSize,
+                                         kCFStringEncodingMacRoman);
+
+    // Ensure the problem isn't insufficient buffer length.
+    // This is fine; we will return a partial string.
+    if (success == false && cfLength <= maxStringLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                     "%s:%d Error in CFStringGetCString()", __FUNCTION__,
+                     __LINE__);
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFRelease(cfString);
+    return cStringLength;
+}
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h
new file mode 100644
index 0000000..84e0667
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time.h
+ *
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
+
+#include <QuickTime/QuickTime.h>
+
+
+#include "../../device_info_impl.h"
+#include "../../video_capture_impl.h"
+#include "list_wrapper.h"
+
+
+#define START_CODEC_WIDTH 352
+#define START_CODEC_HEIGHT 288
+#define SLEEP(x) usleep(x * 1000);
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+class VideoCaptureMacQuickTime : public VideoCaptureImpl
+{
+
+public:
+	VideoCaptureMacQuickTime(const WebRtc_Word32 id);
+	virtual ~VideoCaptureMacQuickTime();
+
+	static void Destroy(VideoCaptureModule* module);
+
+    WebRtc_Word32 Init(const WebRtc_Word32 id,
+                       const WebRtc_UWord8* deviceUniqueIdUTF8);
+	virtual WebRtc_Word32 StartCapture(
+	    const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+	virtual bool CaptureStarted();
+	virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+    // TODO: remove?
+    int VideoCaptureInitThreadContext();
+    int VideoCaptureTerminate();
+    int VideoCaptureSetCaptureDevice(const char* deviceName, int size);
+	int UpdateCaptureSettings(int channel, webrtc::VideoCodec& inst, bool def);
+    int VideoCaptureRun();
+    int VideoCaptureStop();
+
+protected:
+
+private: // functions
+
+    struct VideoCaptureMacName
+    {
+        VideoCaptureMacName();
+
+        enum { kVideoCaptureMacNameMaxSize = 64};
+        char _name[kVideoCaptureMacNameMaxSize];
+        CFIndex _size;
+    };
+
+    // Timeout value [ms] if we want to create a new device list or not
+    enum { kVideoCaptureDeviceListTimeout =     5000};
+    // Temporary constant allowing this size from builtin iSight webcams.
+    enum { kYuy2_1280_1024_length = 2621440};
+
+private:
+
+    // Capture device callback
+    static OSErr SendProcess(SGChannel sgChannel, Ptr p, long len, long *offset,
+                             long chRefCon, TimeValue time, short writeType,
+                             long refCon);
+    int SendFrame(SGChannel sgChannel, char* data, long length, TimeValue time);
+
+    // Capture device functions
+    int CreateLocalGWorld(int width, int height);
+    int RemoveLocalGWorld();
+    int ConnectCaptureDevice();
+    int DisconnectCaptureDevice();
+    virtual bool IsCaptureDeviceSelected();
+
+    // Process to make sure the capture device won't stop
+    static bool GrabberUpdateThread(void*);
+    bool GrabberUpdateProcess();
+
+    // Starts and stops the capture
+    int StartQuickTimeCapture();
+    int StopQuickTimeCapture(bool* wasCapturing = NULL);
+
+    static CFIndex PascalStringToCString(const unsigned char* pascalString,
+                                         char* cString,
+                                         CFIndex bufferSize);
+
+private: // variables
+	WebRtc_Word32			_id;
+	bool					_isCapturing;
+	VideoCaptureCapability	_captureCapability;
+    CriticalSectionWrapper* _grabberCritsect;
+    CriticalSectionWrapper* _videoMacCritsect;
+    bool                    _terminated;
+    webrtc::ThreadWrapper*  _grabberUpdateThread;
+    webrtc::EventWrapper*           _grabberUpdateEvent;
+    SeqGrabComponent        _captureGrabber;
+    Component               _captureDevice;
+    char                    _captureDeviceDisplayName[64];
+	RawVideoType		    _captureVideoType;
+    bool                    _captureIsInitialized;
+    GWorldPtr               _gWorld;
+    SGChannel               _captureChannel;
+    ImageSequence           _captureSequence;
+    bool                    _sgPrepared;
+    bool                    _sgStarted;
+    int                     _trueCaptureWidth;
+    int                     _trueCaptureHeight;
+    ListWrapper             _captureDeviceList;
+    unsigned long           _captureDeviceListTime;
+    ListWrapper             _captureCapabilityList;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_H_
diff --git a/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc
new file mode 100644
index 0000000..152ab70
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.cc
@@ -0,0 +1,391 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time_info.cc
+ *
+ */
+
+#include "../../video_capture_config.h"
+#include "video_capture_quick_time_info.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "trace.h"
+#include "video_capture.h"
+
+namespace webrtc
+{
+
+VideoCaptureMacQuickTimeInfo::VideoCaptureMacQuickTimeInfo(
+    const WebRtc_Word32 iID) :
+    DeviceInfoImpl(iID), _id(iID),
+    _grabberCritsect(CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+VideoCaptureMacQuickTimeInfo::~VideoCaptureMacQuickTimeInfo()
+{
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::Init()
+{
+
+    return 0;
+}
+
+WebRtc_UWord32 VideoCaptureMacQuickTimeInfo::NumberOfDevices()
+{
+    int numOfDevices = 0;
+
+    // don't care about these variables... dummy vars to call GetCaptureDevices
+    const int kNameLength = 1024;
+    char deviceNameUTF8[kNameLength] = "";
+    char deviceUniqueIdUTF8[kNameLength] = "";
+    char productUniqueIdUTF8[kNameLength] = "";
+
+    if (GetCaptureDevices(0, deviceNameUTF8, kNameLength, deviceUniqueIdUTF8,
+                          kNameLength, productUniqueIdUTF8, kNameLength,
+                          numOfDevices) != 0)
+    {
+        return 0;
+    }
+
+    return numOfDevices;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetDeviceName(
+    WebRtc_UWord32 deviceNumber, char* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameUTF8Length, char* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+
+    int numOfDevices = 0; // not needed for this function
+    return GetCaptureDevices(deviceNumber, deviceNameUTF8,
+                             deviceNameUTF8Length, deviceUniqueIdUTF8,
+                             deviceUniqueIdUTF8Length, productUniqueIdUTF8,
+                             productUniqueIdUTF8Length, numOfDevices);
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::NumberOfCapabilities(
+    const char* deviceUniqueIdUTF8)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetCapability(
+    const char* deviceUniqueIdUTF8,
+    const WebRtc_UWord32 deviceCapabilityNumber,
+    VideoCaptureCapability& capability)
+{
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::GetBestMatchedCapability(
+    const char*deviceUniqueIdUTF8,
+    const VideoCaptureCapability& requested, VideoCaptureCapability& resulting)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::DisplayCaptureSettingsDialogBox(
+    const char* deviceUniqueIdUTF8,
+    const char* dialogTitleUTF8, void* parentWindow,
+    WebRtc_UWord32 positionX, WebRtc_UWord32 positionY)
+{
+     return -1;
+}
+
+WebRtc_Word32 VideoCaptureMacQuickTimeInfo::CreateCapabilityMap(
+    const char* deviceUniqueIdUTF8)
+{
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "NumberOfCapabilities is not supported on the Mac platform.");
+    return -1;
+}
+
+int VideoCaptureMacQuickTimeInfo::GetCaptureDevices(
+    WebRtc_UWord32 deviceNumber, char* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameUTF8Length, char* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
+    WebRtc_UWord32 productUniqueIdUTF8Length, int& numberOfDevices)
+{
+
+
+    numberOfDevices = 0;
+    memset(deviceNameUTF8, 0, deviceNameUTF8Length);
+    memset(deviceUniqueIdUTF8, 0, deviceUniqueIdUTF8Length);
+    memset(productUniqueIdUTF8, 0, productUniqueIdUTF8Length);
+
+    if (deviceNumber < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid deviceNumber", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    Component captureDevice = NULL;
+    SeqGrabComponent captureGrabber = NULL;
+    SGChannel captureChannel = NULL;
+    bool closeChannel = false;
+
+    ComponentDescription compCaptureType;
+
+    compCaptureType.componentType = SeqGrabComponentType;
+    compCaptureType.componentSubType = 0;
+    compCaptureType.componentManufacturer = 0;
+    compCaptureType.componentFlags = 0;
+    compCaptureType.componentFlagsMask = 0;
+
+    // Get the number of sequence grabbers
+    long numSequenceGrabbers = CountComponents(&compCaptureType);
+
+    if (deviceNumber > numSequenceGrabbers)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Invalid deviceNumber", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    if (numSequenceGrabbers <= 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d No sequence grabbers available", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Open a sequence grabber
+    for (int seqGrabberIndex = 0;
+         seqGrabberIndex < numSequenceGrabbers;
+         seqGrabberIndex++)
+    {
+        captureDevice = FindNextComponent(0, &compCaptureType);
+        captureGrabber = OpenComponent(captureDevice);
+        if (captureGrabber != NULL)
+        {
+            // We've found a sequencegrabber
+            if (SGInitialize(captureGrabber) != noErr)
+            {
+                CloseComponent(captureGrabber);
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                             _id, "%s:%d Could not init the sequence grabber",
+                             __FUNCTION__, __LINE__);
+                return -1;
+            }
+            break;
+        }
+        if (seqGrabberIndex == numSequenceGrabbers - 1)
+        {
+            // Couldn't open a sequence grabber
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "%s:%d Could not open a sequence grabber",
+                         __FUNCTION__, __LINE__);
+            return -1;
+        }
+    }
+
+    // Create a temporary channel to get the names of the capture devices.
+    // Takes time, make this in a nother way...
+    if (SGNewChannel(captureGrabber, VideoMediaType, &captureChannel) != noErr)
+    {
+        // Could not create a video channel...
+        SGRelease(captureGrabber);
+        CloseComponent(captureGrabber);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not create a sequence grabber video channel",
+                     __FUNCTION__, __LINE__);
+        return -1;
+    }
+    closeChannel = true;
+
+    // Find the type of capture devices, e.g. USB-devices, Firewire, DV, ...
+    SGDeviceList deviceList = NULL;
+    if (SGGetChannelDeviceList(captureChannel, sgDeviceListIncludeInputs,
+                               &deviceList) != noErr)
+    {
+        if (closeChannel)
+            SGDisposeChannel(captureGrabber, captureChannel);
+        if (captureGrabber)
+        {
+            SGRelease(captureGrabber);
+            CloseComponent(captureGrabber);
+        }
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "%s:%d Could not create a device list", __FUNCTION__,
+                     __LINE__);
+        return -1;
+    }
+
+    // Loop through all device types and all devices for each type
+    // and store in a list.
+    int numDevices = (*deviceList)->count;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d Found %d devices", __FUNCTION__, __LINE__, numDevices);
+
+    for (int i = 0; i < numDevices; i++)
+    {
+
+        SGDeviceName sgDeviceName = (*deviceList)->entry[i];
+        // Get the list with input devices for this type of device
+        if (sgDeviceName.inputs)
+        {
+            SGDeviceInputList inputList = sgDeviceName.inputs;
+            int numInputDev = (*inputList)->count;
+
+            for (int inputDevIndex = 0;
+                 inputDevIndex < numInputDev;
+                 inputDevIndex++)
+            {
+                // Get the name for this capture device
+                SGDeviceInputName deviceInputName =
+                    (*inputList)->entry[inputDevIndex];
+
+                VideoCaptureMacName* deviceName = new VideoCaptureMacName();
+
+                deviceName->_size = PascalStringToCString(
+                    deviceInputName.name, deviceName->_name,
+                    sizeof(deviceName->_name));
+
+                if (deviceName->_size > 0)
+                {
+                    WEBRTC_TRACE(webrtc::kTraceDebug,webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "%s:%d Capture device %d: %s was successfully "
+                                 "set", __FUNCTION__, __LINE__, numberOfDevices,
+                                 deviceName->_name);
+
+                    if (numberOfDevices == deviceNumber)
+                    {
+                        strcpy((char*) deviceNameUTF8, deviceName->_name);
+                        strcpy((char*) deviceUniqueIdUTF8, deviceName->_name);
+                    }
+                    numberOfDevices++;
+                }
+                else
+                {
+                    delete deviceName;
+
+                    if (deviceName->_size < 0)
+                    {
+                        WEBRTC_TRACE(webrtc::kTraceError,
+                                     webrtc::kTraceVideoCapture, _id,
+                                     "%s:%d Error in PascalStringToCString",
+                                     __FUNCTION__, __LINE__);
+                        return -1;
+                    }
+                }
+            }
+        }
+    }
+
+    // clean up
+    SGDisposeDeviceList(captureGrabber, deviceList);
+    if (closeChannel)
+    {
+        SGDisposeChannel(captureGrabber, captureChannel);
+    }
+    if (captureGrabber)
+    {
+        SGRelease(captureGrabber);
+        CloseComponent(captureGrabber);
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s:%d End function successfully", __FUNCTION__, __LINE__);
+    return 0;
+}
+
+/**
+ Convert a Pascal string to a C string.
+ 
+ \param[in]  pascalString
+ Pascal string to convert. Pascal strings contain the number of 
+ characters in the first byte and are not null-terminated.
+ 
+ \param[out] cString
+ The C string buffer into which to copy the converted string.
+ 
+ \param[in]  bufferSize
+ The size of the C string buffer in bytes.
+ 
+ \return The number of characters in the string on success and -1 on failure.
+ */
+CFIndex VideoCaptureMacQuickTimeInfo::PascalStringToCString(
+    const unsigned char* pascalString, char* cString, CFIndex bufferSize)
+{
+    if (pascalString == NULL)
+    {
+        return -1;
+    }
+
+    if (cString == NULL)
+    {
+        return -1;
+    }
+
+    if (bufferSize == 0)
+    {
+        return -1;
+    }
+
+    CFIndex cStringLength = 0;
+    CFIndex maxStringLength = bufferSize - 1;
+
+    CFStringRef cfString = CFStringCreateWithPascalString(
+        NULL, pascalString, kCFStringEncodingMacRoman);
+    if (cfString == NULL)
+    {
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFIndex cfLength = CFStringGetLength(cfString);
+    cStringLength = cfLength;
+    if (cfLength > maxStringLength)
+    {
+        cStringLength = maxStringLength;
+    }
+
+    Boolean success = CFStringGetCString(cfString, cString, bufferSize,
+                                         kCFStringEncodingMacRoman);
+
+    // Ensure the problem isn't insufficient buffer length.
+    // This is fine; we will return a partial string.
+    if (success == false && cfLength <= maxStringLength)
+    {
+        CFRelease(cfString);
+        return -1;
+    }
+
+    CFRelease(cfString);
+    return cStringLength;
+}
+
+//
+//
+//  Functions for handling capture devices
+//
+//
+
+VideoCaptureMacQuickTimeInfo::VideoCaptureMacName::VideoCaptureMacName() :
+    _size(0)
+{
+    memset(_name, 0, kVideoCaptureMacNameMaxSize);
+}
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h
new file mode 100644
index 0000000..4cc8483
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/QuickTime/video_capture_quick_time_info.h
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_quick_time_info.h
+ *
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
+
+#include <QuickTime/QuickTime.h>
+
+#include "../../video_capture_impl.h"
+#include "../../device_info_impl.h"
+#include "list_wrapper.h"
+#include "map_wrapper.h"
+
+class VideoRenderCallback;
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+
+class VideoCaptureMacQuickTimeInfo: public DeviceInfoImpl
+{
+public:
+
+    static DeviceInfo* Create(const WebRtc_Word32 id);
+    static void Destroy(DeviceInfo* deviceInfo);
+
+    VideoCaptureMacQuickTimeInfo(const WebRtc_Word32 id);
+    virtual ~VideoCaptureMacQuickTimeInfo();
+
+    WebRtc_Word32 Init();
+
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     * deviceNumber   -[in] index of capture device
+     * deviceNameUTF8 - friendly name of the capture device
+     * deviceUniqueIdUTF8 - unique name of the capture device if it exist.
+     *                      Otherwise same as deviceNameUTF8
+     * productUniqueIdUTF8 - unique product id if it exist. Null terminated
+     *                       otherwise.
+     */
+    virtual WebRtc_Word32 GetDeviceName(
+        WebRtc_UWord32 deviceNumber, char* deviceNameUTF8,
+        WebRtc_UWord32 deviceNameLength, char* deviceUniqueIdUTF8,
+        WebRtc_UWord32 deviceUniqueIdUTF8Length,
+        char* productUniqueIdUTF8 = 0,
+        WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+
+    // ************** The remaining public functions are not supported on Mac
+
+    /*
+     *   Returns the number of capabilities for this device
+     */
+    virtual WebRtc_Word32 NumberOfCapabilities(const char* deviceUniqueIdUTF8);
+
+    /*
+     *   Gets the capabilities of the named device
+     */
+    virtual WebRtc_Word32 GetCapability(
+        const char* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability);
+
+    /*
+     *  Gets the capability that best matches the requested width, height and frame rate.
+     *  Returns the deviceCapabilityNumber on success.
+     */
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const char* deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting);
+
+    /*
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+        const char* deviceUniqueIdUTF8,
+        const char* dialogTitleUTF8, void* parentWindow,
+        WebRtc_UWord32 positionX, WebRtc_UWord32 positionY);
+
+protected:
+    virtual WebRtc_Word32 CreateCapabilityMap(
+        const char* deviceUniqueIdUTF8);
+
+private:
+
+    struct VideoCaptureMacName
+    {
+        VideoCaptureMacName();
+
+        enum
+        {
+            kVideoCaptureMacNameMaxSize = 64
+        };
+        char _name[kVideoCaptureMacNameMaxSize];
+        CFIndex _size;
+    };
+
+    enum
+    {
+        kVideoCaptureMacDeviceListTimeout = 5000
+    }; // Timeout value [ms] if we want to create a new device list or not
+    enum
+    {
+        kYuy2_1280_1024_length = 2621440
+    }; // Temporary constant allowing this size from built-in iSight webcams.
+
+private:
+    // private methods
+
+    int GetCaptureDevices(WebRtc_UWord32 deviceNumber,
+                          char* deviceNameUTF8,
+                          WebRtc_UWord32 deviceNameUTF8Length,
+                          char* deviceUniqueIdUTF8,
+                          WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                          char* productUniqueIdUTF8,
+                          WebRtc_UWord32 productUniqueIdUTF8Length,
+                          int& numberOfDevices);
+
+    static CFIndex PascalStringToCString(const unsigned char* pascalString,
+                                         char* cString, CFIndex bufferSize);
+
+private:
+    // member vars
+    WebRtc_Word32 _id;
+    bool _terminated;
+    CriticalSectionWrapper* _grabberCritsect;
+    webrtc::Trace* _trace;
+    webrtc::ThreadWrapper* _grabberUpdateThread;
+    webrtc::EventWrapper* _grabberUpdateEvent;
+    SeqGrabComponent _captureGrabber;
+    Component _captureDevice;
+    char _captureDeviceDisplayName[64];
+    bool _captureIsInitialized;
+    GWorldPtr _gWorld;
+    SGChannel _captureChannel;
+    ImageSequence _captureSequence;
+    bool _sgPrepared;
+    bool _sgStarted;
+    int _codecWidth;
+    int _codecHeight;
+    int _trueCaptureWidth;
+    int _trueCaptureHeight;
+    ListWrapper _captureDeviceList;
+    WebRtc_Word64 _captureDeviceListTime;
+    ListWrapper _captureCapabilityList;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_MAC_QUICKTIME_VIDEO_CAPTURE_QUICK_TIME_INFO_H_
diff --git a/src/modules/video_capture/main/source/Mac/video_capture_mac.h b/src/modules/video_capture/main/source/Mac/video_capture_mac.h
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/video_capture_mac.h
diff --git a/src/modules/video_capture/main/source/Mac/video_capture_mac.mm b/src/modules/video_capture/main/source/Mac/video_capture_mac.mm
new file mode 100644
index 0000000..6ed9ddb
--- /dev/null
+++ b/src/modules/video_capture/main/source/Mac/video_capture_mac.mm
@@ -0,0 +1,275 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  video_capture_mac.cc
+ *
+ */
+
+
+// super class stuff
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+#include "../video_capture_config.h"
+#include "ref_count.h"
+
+#include "trace.h"
+
+#include <QuickTime/QuickTime.h>
+
+// 10.4 support must be decided runtime. We will just decide which framework to
+// use at compile time "work" classes. One for QTKit, one for QuickTime
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+#include "QuickTime/video_capture_quick_time.h"
+#include "QuickTime/video_capture_quick_time_info.h"
+#else
+#include "QTKit/video_capture_qtkit.h"
+#include "QTKit/video_capture_qtkit_info.h"
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+// static
+bool CheckOSVersion()
+{
+    // Check OSX version
+    OSErr err = noErr;
+
+    SInt32 version;
+
+    err = Gestalt(gestaltSystemVersion, &version);
+    if (err != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "Could not get OS version");
+        return false;
+    }
+
+    if (version < 0x00001040) // Older version than Mac OSX 10.4
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "OS version too old: 0x%x", version);
+        return false;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "OS version compatible: 0x%x", version);
+
+    return true;
+}
+
+// static
+bool CheckQTVersion()
+{
+    // Check OSX version
+    OSErr err = noErr;
+
+    SInt32 version;
+
+    err = Gestalt(gestaltQuickTime, &version);
+    if (err != noErr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "Could not get QuickTime version");
+        return false;
+    }
+
+    if (version < 0x07000000) // QT v. 7.x or newer (QT 5.0.2 0x05020000)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, 0,
+                     "QuickTime version too old: 0x%x", version);
+        return false;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, 0,
+                 "QuickTime version compatible: 0x%x", version);
+    return true;
+}
+
+/**************************************************************************
+ *
+ *    Create/Destroy a VideoCaptureModule
+ *
+ ***************************************************************************/
+
+/*
+ *   Returns version of the module and its components
+ *
+ *   version                 - buffer to which the version will be written
+ *   remainingBufferInBytes  - remaining number of WebRtc_Word8 in the version
+ *                             buffer
+ *   position                - position of the next empty WebRtc_Word8 in the
+ *                             version buffer
+ */
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id, const char* deviceUniqueIdUTF8)
+{
+
+    if (webrtc::videocapturemodule::CheckOSVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "OS version is too old. Could not create video capture "
+                     "module. Returning NULL");
+        return NULL;
+    }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+    if (webrtc::videocapturemodule::CheckQTVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "QuickTime version is too old. Could not create video "
+                     "capture module. Returning NULL");
+        return NULL;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "%s line %d. QTKit is not supported on this machine. Using "
+                 "QuickTime framework to capture video",
+                 __FILE__, __LINE__);
+
+    RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>*
+        newCaptureModule =
+            new RefCountImpl<videocapturemodule::VideoCaptureMacQuickTime>(id);
+
+    if (!newCaptureModule)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, !newCaptureModule",
+                     deviceUniqueIdUTF8);
+        return NULL;
+    }
+
+    if (newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, "
+                     "newCaptureModule->Init()!=0",
+                     deviceUniqueIdUTF8);
+        delete newCaptureModule;
+        return NULL;
+    }
+
+    // Successfully created VideoCaptureMacQuicktime. Return it
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Module created for unique device %s. Will use QuickTime "
+                 "framework to capture",
+                 deviceUniqueIdUTF8);
+    return newCaptureModule;
+
+#else // QTKit version
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Using QTKit framework to capture video", id);
+
+    RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>* newCaptureModule =
+        new RefCountImpl<videocapturemodule::VideoCaptureMacQTKit>(id);
+
+    if(!newCaptureModule)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, !newCaptureModule",
+                     deviceUniqueIdUTF8);
+        return NULL;
+    }
+    if(newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, id,
+                     "could not Create for unique device %s, "
+                     "newCaptureModule->Init()!=0", deviceUniqueIdUTF8);
+        delete newCaptureModule;
+        return NULL;
+    }
+
+    // Successfully created VideoCaptureMacQuicktime. Return it
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "Module created for unique device %s, will use QTKit "
+                 "framework",deviceUniqueIdUTF8);
+    return newCaptureModule;
+#endif
+}
+
+/**************************************************************************
+ *
+ *    Create/Destroy a DeviceInfo
+ *
+ ***************************************************************************/
+
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo(const WebRtc_Word32 id)
+{
+
+
+    if (webrtc::videocapturemodule::CheckOSVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "OS version is too old. Could not create video capture "
+                     "module. Returning NULL");
+        return NULL;
+    }
+
+#if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
+    if (webrtc::videocapturemodule::CheckQTVersion() == false)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
+                     "QuickTime version is too old. Could not create video "
+                     "capture module. Returning NULL");
+        return NULL;
+    }
+
+    webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo* newCaptureInfoModule =
+        new webrtc::videocapturemodule::VideoCaptureMacQuickTimeInfo(id);
+
+    if (!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+    {
+        Destroy(newCaptureInfoModule);
+        newCaptureInfoModule = NULL;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                     "Failed to Init newCaptureInfoModule created with id %d "
+                     "and device \"\" ", id);
+        return NULL;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "VideoCaptureModule created for id", id);
+    return newCaptureInfoModule;
+
+#else // QTKit version
+    webrtc::videocapturemodule::VideoCaptureMacQTKitInfo* newCaptureInfoModule =
+        new webrtc::videocapturemodule::VideoCaptureMacQTKitInfo(id);
+
+    if(!newCaptureInfoModule || newCaptureInfoModule->Init() != 0)
+    {
+        //Destroy(newCaptureInfoModule);
+        delete newCaptureInfoModule;
+        newCaptureInfoModule = NULL;
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                     "Failed to Init newCaptureInfoModule created with id %d "
+                     "and device \"\" ", id);
+        return NULL;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, id,
+                 "VideoCaptureModule created for id", id);
+    return newCaptureInfoModule;
+
+#endif
+
+}
+
+/**************************************************************************
+ *
+ *    End Create/Destroy VideoCaptureModule
+ *
+ ***************************************************************************/
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
diff --git a/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h b/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h
new file mode 100644
index 0000000..a4d9da64
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/capture_delay_values_windows.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
+
+#include "../video_capture_delay.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+const WebRtc_Word32 NoWindowsCaptureDelays=1;
+const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays]=
+{ 
+    "Microsoft LifeCam Cinema","usb#vid_045e&pid_075d",{{640,480,125},{640,360,117},{424,240,111},{352,288,111},{320,240,116},{176,144,101},{160,120,109},{1280,720,166},{960,544,126},{800,448,120},{800,600,127}},
+};
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_CAPTURE_DELAY_VALUES_WINDOWS_H_
diff --git a/src/modules/video_capture/main/source/Windows/device_info_windows.cc b/src/modules/video_capture/main/source/Windows/device_info_windows.cc
new file mode 100644
index 0000000..2dcaa24
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/device_info_windows.cc
@@ -0,0 +1,790 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_windows.h"
+
+#include "../video_capture_config.h"
+#include "help_functions_windows.h"
+#include "capture_delay_values_windows.h"
+#include "ref_count.h"
+#include "trace.h"
+
+#include <Streams.h>
+#include <Dvdmedia.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule::DeviceInfo* VideoCaptureImpl::CreateDeviceInfo(
+                                                        const WebRtc_Word32 id)
+{
+    videocapturemodule::DeviceInfoWindows* dsInfo =
+                        new videocapturemodule::DeviceInfoWindows(id);
+
+    if (!dsInfo || dsInfo->Init() != 0)
+    {
+        delete dsInfo;
+        dsInfo = NULL;
+    }
+    return dsInfo;
+}
+
+DeviceInfoWindows::DeviceInfoWindows(const WebRtc_Word32 id)
+    : DeviceInfoImpl(id), _dsDevEnum(NULL), _dsMonikerDevEnum(NULL),
+      _CoUninitializeIsRequired(true)
+{
+    // 1) Initialize the COM library (make Windows load the DLLs).
+    //
+    // CoInitializeEx must be called at least once, and is usually called only once,
+    // for each thread that uses the COM library. Multiple calls to CoInitializeEx
+    // by the same thread are allowed as long as they pass the same concurrency flag,
+    // but subsequent valid calls return S_FALSE.
+    // To close the COM library gracefully on a thread, each successful call to
+    // CoInitializeEx, including any call that returns S_FALSE, must be balanced
+    // by a corresponding call to CoUninitialize.
+    //
+
+    /*Apartment-threading, while allowing for multiple threads of execution,
+     serializes all incoming calls by requiring that calls to methods of objects created by this thread always run on the same thread
+     the apartment/thread that created them. In addition, calls can arrive only at message-queue boundaries (i.e., only during a
+     PeekMessage, SendMessage, DispatchMessage, etc.). Because of this serialization, it is not typically necessary to write concurrency control into
+     the code for the object, other than to avoid calls to PeekMessage and SendMessage during processing that must not be interrupted by other method
+     invocations or calls to other objects in the same apartment/thread.*/
+
+    ///CoInitializeEx(NULL, COINIT_APARTMENTTHREADED ); //| COINIT_SPEED_OVER_MEMORY 
+    HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED); // Use COINIT_MULTITHREADED since Voice Engine uses COINIT_MULTITHREADED
+    if (FAILED(hr))
+    {
+        // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+        _CoUninitializeIsRequired = FALSE;
+
+        if (hr == RPC_E_CHANGED_MODE)
+        {
+            // Calling thread has already initialized COM to be used in a single-threaded
+            // apartment (STA). We are then prevented from using STA.
+            // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is set".
+            //
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                         "VideoCaptureWindowsDSInfo::VideoCaptureWindowsDSInfo "
+                         "CoInitializeEx(NULL, COINIT_APARTMENTTHREADED) => "
+                         "RPC_E_CHANGED_MODE, error 0x%x",
+                         hr);
+        }
+    }
+}
+
+DeviceInfoWindows::~DeviceInfoWindows()
+{
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    RELEASE_AND_CLEAR(_dsDevEnum);
+    if (_CoUninitializeIsRequired)
+    {
+        CoUninitialize();
+    }
+}
+
+WebRtc_Word32 DeviceInfoWindows::Init()
+{
+    HRESULT hr = CoCreateInstance(CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC,
+                                  IID_ICreateDevEnum, (void **) &_dsDevEnum);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create CLSID_SystemDeviceEnum, error 0x%x", hr);
+        return -1;
+    }
+    return 0;
+}
+WebRtc_UWord32 DeviceInfoWindows::NumberOfDevices()
+{
+    ReadLockScoped cs(_apiLock);
+    return GetDeviceInfo(0, 0, 0, 0, 0, 0, 0);
+
+}
+WebRtc_Word32 DeviceInfoWindows::GetDeviceName(
+                                       WebRtc_UWord32 deviceNumber,
+                                       char* deviceNameUTF8,
+                                       WebRtc_UWord32 deviceNameLength,
+                                       char* deviceUniqueIdUTF8,
+                                       WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                       char* productUniqueIdUTF8,
+                                       WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    ReadLockScoped cs(_apiLock);
+    const WebRtc_Word32 result = GetDeviceInfo(deviceNumber, deviceNameUTF8,
+                                               deviceNameLength,
+                                               deviceUniqueIdUTF8,
+                                               deviceUniqueIdUTF8Length,
+                                               productUniqueIdUTF8,
+                                               productUniqueIdUTF8Length);
+    return result > (WebRtc_Word32) deviceNumber ? 0 : -1;
+}
+
+WebRtc_Word32 DeviceInfoWindows::GetDeviceInfo(
+                                       WebRtc_UWord32 deviceNumber,
+                                       char* deviceNameUTF8,
+                                       WebRtc_UWord32 deviceNameLength,
+                                       char* deviceUniqueIdUTF8,
+                                       WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                       char* productUniqueIdUTF8,
+                                       WebRtc_UWord32 productUniqueIdUTF8Length)
+
+{
+
+    // enumerate all video capture devices
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    HRESULT hr =
+        _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+                                          &_dsMonikerDevEnum, 0);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+                     " No webcam exist?", hr);
+        return 0;
+    }
+
+    _dsMonikerDevEnum->Reset();
+    ULONG cFetched;
+    IMoniker *pM;
+    int index = 0;
+    while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched))
+    {
+        IPropertyBag *pBag;
+        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+        if (S_OK == hr)
+        {
+            // Find the description or friendly name.
+            VARIANT varName;
+            VariantInit(&varName);
+            hr = pBag->Read(L"Description", &varName, 0);
+            if (FAILED(hr))
+            {
+                hr = pBag->Read(L"FriendlyName", &varName, 0);
+            }
+            if (SUCCEEDED(hr))
+            {
+                // ignore all VFW drivers
+                if ((wcsstr(varName.bstrVal, (L"(VFW)")) == NULL) &&
+                    (_wcsnicmp(varName.bstrVal, (L"Google Camera Adapter"),21)
+                        != 0))
+                {
+                    // Found a valid device
+                    if (index == deviceNumber) // This is the device we are interested in.
+                    {
+                        int convResult = 0;
+                        if (deviceNameLength > 0)
+                        {
+                            convResult = WideCharToMultiByte(CP_UTF8, 0,
+                                                             varName.bstrVal, -1,
+                                                             (char*) deviceNameUTF8,
+                                                             deviceNameLength, NULL,
+                                                             NULL);
+                            if (convResult == 0)
+                            {
+                                WEBRTC_TRACE(webrtc::kTraceError,
+                                             webrtc::kTraceVideoCapture, _id,
+                                             "Failed to convert device name to UTF8. %d",
+                                             GetLastError());
+                                return -1;
+                            }
+                        }
+                        if (deviceUniqueIdUTF8Length > 0)
+                        {
+                            hr = pBag->Read(L"DevicePath", &varName, 0);
+                            if (FAILED(hr))
+                            {
+                                strncpy_s((char *) deviceUniqueIdUTF8,
+                                          deviceUniqueIdUTF8Length,
+                                          (char *) deviceNameUTF8, convResult);
+                                WEBRTC_TRACE(webrtc::kTraceError,
+                                             webrtc::kTraceVideoCapture, _id,
+                                             "Failed to get deviceUniqueIdUTF8 using deviceNameUTF8");
+                            }
+                            else
+                            {
+                                convResult = WideCharToMultiByte(
+                                                          CP_UTF8,
+                                                          0,
+                                                          varName.bstrVal,
+                                                          -1,
+                                                          (char*) deviceUniqueIdUTF8,
+                                                          deviceUniqueIdUTF8Length,
+                                                          NULL, NULL);
+                                if (convResult == 0)
+                                {
+                                    WEBRTC_TRACE(webrtc::kTraceError,
+                                                 webrtc::kTraceVideoCapture, _id,
+                                                 "Failed to convert device name to UTF8. %d",
+                                                 GetLastError());
+                                    return -1;
+                                }
+                                if (productUniqueIdUTF8
+                                    && productUniqueIdUTF8Length > 0)
+                                {
+                                    GetProductId(deviceUniqueIdUTF8,
+                                                 productUniqueIdUTF8,
+                                                 productUniqueIdUTF8Length);
+                                }
+                            }
+                        }
+
+                    }
+                    ++index; // increase the number of valid devices
+                }
+            }
+            VariantClear(&varName);
+            pBag->Release();
+            pM->Release();
+        }
+
+    }
+    if (deviceNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id, "%s %s",
+                     __FUNCTION__, deviceNameUTF8);
+    }
+    return index;
+}
+
+IBaseFilter * DeviceInfoWindows::GetDeviceFilter(
+                                     const char* deviceUniqueIdUTF8,
+                                     char* productUniqueIdUTF8,
+                                     WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8); // UTF8 is also NULL terminated
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Device name too long");
+        return NULL;
+    }
+
+    // enumerate all video capture devices
+    RELEASE_AND_CLEAR(_dsMonikerDevEnum);
+    HRESULT hr = _dsDevEnum->CreateClassEnumerator(CLSID_VideoInputDeviceCategory,
+                                                   &_dsMonikerDevEnum, 0);
+    if (hr != NOERROR)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to enumerate CLSID_SystemDeviceEnum, error 0x%x."
+                     " No webcam exist?", hr);
+        return 0;
+    }
+    _dsMonikerDevEnum->Reset();
+    ULONG cFetched;
+    IMoniker *pM;
+
+    IBaseFilter *captureFilter = NULL;
+    bool deviceFound = false;
+    while (S_OK == _dsMonikerDevEnum->Next(1, &pM, &cFetched) && !deviceFound)
+    {
+        IPropertyBag *pBag;
+        hr = pM->BindToStorage(0, 0, IID_IPropertyBag, (void **) &pBag);
+        if (S_OK == hr)
+        {
+            // Find the description or friendly name.
+            VARIANT varName;
+            VariantInit(&varName);
+            if (deviceUniqueIdUTF8Length > 0)
+            {
+                hr = pBag->Read(L"DevicePath", &varName, 0);
+                if (FAILED(hr))
+                {
+                    hr = pBag->Read(L"Description", &varName, 0);
+                    if (FAILED(hr))
+                    {
+                        hr = pBag->Read(L"FriendlyName", &varName, 0);
+                    }
+                }
+                if (SUCCEEDED(hr))
+                {
+                    char tempDevicePathUTF8[256];
+                    tempDevicePathUTF8[0] = 0;
+                    WideCharToMultiByte(CP_UTF8, 0, varName.bstrVal, -1,
+                                        tempDevicePathUTF8,
+                                        sizeof(tempDevicePathUTF8), NULL,
+                                        NULL);
+                    if (strncmp(tempDevicePathUTF8,
+                                (const char*) deviceUniqueIdUTF8,
+                                deviceUniqueIdUTF8Length) == 0)
+                    {
+                        // We have found the requested device                        
+                        deviceFound = true;
+                        hr = pM->BindToObject(0, 0, IID_IBaseFilter,
+                                              (void**) &captureFilter);
+                        if FAILED(hr)
+                        {
+                            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                         _id, "Failed to bind to the selected capture device %d",hr);
+                        }
+
+                        if (productUniqueIdUTF8
+                            && productUniqueIdUTF8Length > 0) // Get the device name
+                        {
+
+                            GetProductId(deviceUniqueIdUTF8,
+                                         productUniqueIdUTF8,
+                                         productUniqueIdUTF8Length);
+                        }
+
+                    }
+                }
+            }
+            VariantClear(&varName);
+            pBag->Release();
+            pM->Release();
+        }
+    }
+    return captureFilter;
+}
+
+WebRtc_Word32 DeviceInfoWindows::GetWindowsCapability(
+                              const WebRtc_Word32 capabilityIndex,
+                              VideoCaptureCapabilityWindows& windowsCapability)
+
+{
+    ReadLockScoped cs(_apiLock);
+    // Make sure the number is valid
+    if (capabilityIndex >= _captureCapabilities.Size() || capabilityIndex < 0)
+        return -1;
+
+    MapItem* item = _captureCapabilities.Find(capabilityIndex);
+    if (!item)
+        return -1;
+
+    VideoCaptureCapabilityWindows* capPointer =
+                static_cast<VideoCaptureCapabilityWindows*> (item->GetItem());
+    windowsCapability = *capPointer;
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoWindows::CreateCapabilityMap(
+                                         const char* deviceUniqueIdUTF8)
+
+{
+    // Reset old capability list
+    MapItem* item = NULL;
+    while (item = _captureCapabilities.Last())
+    {
+        delete item->GetItem();
+        _captureCapabilities.Erase(item);
+    }
+
+    const WebRtc_Word32 deviceUniqueIdUTF8Length =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Device name too long");
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
+
+
+    char productId[kVideoCaptureProductIdLength];
+    IBaseFilter* captureDevice = DeviceInfoWindows::GetDeviceFilter(
+                                               deviceUniqueIdUTF8,
+                                               productId,
+                                               kVideoCaptureProductIdLength);
+    if (!captureDevice)
+        return -1;
+    IPin* outputCapturePin = GetOutputPin(captureDevice);
+    if (!outputCapturePin)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get capture device output pin");
+        RELEASE_AND_CLEAR(captureDevice);
+        return -1;
+    }
+    IAMExtDevice* extDevice = NULL;
+    HRESULT hr = captureDevice->QueryInterface(IID_IAMExtDevice,
+                                               (void **) &extDevice);
+    if (SUCCEEDED(hr) && extDevice)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                     "This is an external device");
+        extDevice->Release();
+    }
+
+    IAMStreamConfig* streamConfig = NULL;
+    hr = outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+                                          (void**) &streamConfig);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get IID_IAMStreamConfig interface from capture device");
+        return -1;
+    }
+
+    // this  gets the FPS
+    IAMVideoControl* videoControlConfig = NULL;
+    HRESULT hrVC = captureDevice->QueryInterface(IID_IAMVideoControl,
+                                      (void**) &videoControlConfig);
+    if (FAILED(hrVC))
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                     "IID_IAMVideoControl Interface NOT SUPPORTED");
+    }
+
+    AM_MEDIA_TYPE *pmt = NULL;
+    VIDEO_STREAM_CONFIG_CAPS caps;
+    int count, size;
+
+    hr = streamConfig->GetNumberOfCapabilities(&count, &size);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to GetNumberOfCapabilities");
+        RELEASE_AND_CLEAR(videoControlConfig);
+        RELEASE_AND_CLEAR(streamConfig);
+        RELEASE_AND_CLEAR(outputCapturePin);
+        RELEASE_AND_CLEAR(captureDevice);
+        return -1;
+    }
+
+    WebRtc_Word32 index = 0; // Index in created _capabilities map
+    // Check if the device support formattype == FORMAT_VideoInfo2 and FORMAT_VideoInfo. 
+    // Prefer FORMAT_VideoInfo since some cameras (ZureCam) has been seen having problem with MJPEG and FORMAT_VideoInfo2
+    // Interlace flag is only supported in FORMAT_VideoInfo2
+    bool supportFORMAT_VideoInfo2 = false;
+    bool supportFORMAT_VideoInfo = false;
+    bool foundInterlacedFormat = false;
+    GUID preferedVideoFormat = FORMAT_VideoInfo;
+    for (WebRtc_Word32 tmp = 0; tmp < count; ++tmp)
+    {
+        hr = streamConfig->GetStreamCaps(tmp, &pmt,
+                                         reinterpret_cast<BYTE*> (&caps));
+        if (!FAILED(hr))
+        {
+            if (pmt->majortype == MEDIATYPE_Video
+                && pmt->formattype == FORMAT_VideoInfo2)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                             " Device support FORMAT_VideoInfo2");
+                supportFORMAT_VideoInfo2 = true;
+                VIDEOINFOHEADER2* h =
+                    reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+                assert(h);
+                foundInterlacedFormat |= h->dwInterlaceFlags
+                                        & (AMINTERLACE_IsInterlaced
+                                           | AMINTERLACE_DisplayModeBobOnly);
+            }
+            if (pmt->majortype == MEDIATYPE_Video
+                && pmt->formattype == FORMAT_VideoInfo)
+            {
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                             " Device support FORMAT_VideoInfo2");
+                supportFORMAT_VideoInfo = true;
+            }
+        }
+    }
+    if (supportFORMAT_VideoInfo2)
+    {
+        if (supportFORMAT_VideoInfo && !foundInterlacedFormat)
+        {
+            preferedVideoFormat = FORMAT_VideoInfo;
+        }
+        else
+        {
+            preferedVideoFormat = FORMAT_VideoInfo2;
+        }
+    }
+
+    for (WebRtc_Word32 tmp = 0; tmp < count; ++tmp)
+    {
+        hr = streamConfig->GetStreamCaps(tmp, &pmt,
+                                         reinterpret_cast<BYTE*> (&caps));
+        if (FAILED(hr))
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to GetStreamCaps");
+            RELEASE_AND_CLEAR(videoControlConfig);
+            RELEASE_AND_CLEAR(streamConfig);
+            RELEASE_AND_CLEAR(outputCapturePin);
+            RELEASE_AND_CLEAR(captureDevice);
+            return -1;
+        }
+
+        if (pmt->majortype == MEDIATYPE_Video
+            && pmt->formattype == preferedVideoFormat)
+        {
+
+            VideoCaptureCapabilityWindows* capability =
+                                        new VideoCaptureCapabilityWindows();
+            WebRtc_Word64 avgTimePerFrame = 0;
+
+            if (pmt->formattype == FORMAT_VideoInfo)
+            {
+                VIDEOINFOHEADER* h =
+                    reinterpret_cast<VIDEOINFOHEADER*> (pmt->pbFormat);
+                assert(h);
+                capability->directShowCapabilityIndex = tmp;
+                capability->width = h->bmiHeader.biWidth;
+                capability->height = h->bmiHeader.biHeight;
+                avgTimePerFrame = h->AvgTimePerFrame;
+            }
+            if (pmt->formattype == FORMAT_VideoInfo2)
+            {
+                VIDEOINFOHEADER2* h =
+                    reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+                assert(h);
+                capability->directShowCapabilityIndex = tmp;
+                capability->width = h->bmiHeader.biWidth;
+                capability->height = h->bmiHeader.biHeight;
+                capability->interlaced = h->dwInterlaceFlags
+                                        & (AMINTERLACE_IsInterlaced
+                                           | AMINTERLACE_DisplayModeBobOnly);
+                avgTimePerFrame = h->AvgTimePerFrame;
+            }
+
+            if (hrVC == S_OK)
+            {
+                LONGLONG *maxFps; // array                        
+                long listSize;
+                SIZE size;
+                size.cx = capability->width;
+                size.cy = capability->height;
+
+                // GetMaxAvailableFrameRate doesn't return max frame rate always
+                // eg: Logitech Notebook. This may be due to a bug in that API
+                // because GetFrameRateList array is reversed in the above camera. So 
+                // a util method written. Can't assume the first value will return
+                // the max fps.
+                hrVC = videoControlConfig->GetFrameRateList(outputCapturePin,
+                                                            tmp, size,
+                                                            &listSize,
+                                                            &maxFps);
+
+                if (hrVC == S_OK && listSize > 0)
+                {
+                    LONGLONG maxFPS = GetMaxOfFrameArray(maxFps, listSize);
+                    capability->maxFPS = static_cast<int> (10000000
+                                                           / maxFPS);
+                    capability->supportFrameRateControl = true;
+                }
+                else // use existing method
+                {
+                    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                                 _id,
+                                 "GetMaxAvailableFrameRate NOT SUPPORTED");
+                    if (avgTimePerFrame > 0)
+                        capability->maxFPS = static_cast<int> (10000000
+                                                               / avgTimePerFrame);
+                    else
+                        capability->maxFPS = 0;
+                }
+            }
+            else // use existing method in case IAMVideoControl is not supported
+            {
+                if (avgTimePerFrame > 0)
+                    capability->maxFPS = static_cast<int> (10000000
+                                                           / avgTimePerFrame);
+                else
+                    capability->maxFPS = 0;
+            }
+
+            // can't switch MEDIATYPE :~(
+            if (pmt->subtype == MEDIASUBTYPE_I420)
+            {
+                capability->rawType = kVideoI420;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_IYUV)
+            {
+                capability->rawType = kVideoIYUV;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_RGB24)
+            {
+                capability->rawType = kVideoRGB24;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_YUY2)
+            {
+                capability->rawType = kVideoYUY2;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_RGB565)
+            {
+                capability->rawType = kVideoRGB565;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_MJPG)
+            {
+                capability->rawType = kVideoMJPEG;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_dvsl
+                    || pmt->subtype == MEDIASUBTYPE_dvsd
+                    || pmt->subtype == MEDIASUBTYPE_dvhd) // If this is an external DV camera
+            {
+                capability->rawType = kVideoYUY2;// MS DV filter seems to create this type
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_UYVY) // Seen used by Declink capture cards
+            {
+                capability->rawType = kVideoUYVY;
+            }
+            else if (pmt->subtype == MEDIASUBTYPE_HDYC) // Seen used by Declink capture cards. Uses BT. 709 color. Not entiry correct to use UYVY. http://en.wikipedia.org/wiki/YCbCr
+            {
+                WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                             "Device support HDYC.");
+                capability->rawType = kVideoUYVY;
+            }
+            else
+            {
+                WCHAR strGuid[39];
+                StringFromGUID2(pmt->subtype, strGuid, 39);
+                WEBRTC_TRACE( webrtc::kTraceWarning,
+                             webrtc::kTraceVideoCapture, _id,
+                             "Device support unknown media type %ls, width %d, height %d",
+                             strGuid);
+                delete capability;
+                continue;
+            }
+
+            // Get the expected capture delay from the static list
+            capability->expectedCaptureDelay
+                            = GetExpectedCaptureDelay(WindowsCaptureDelays,
+                                                      NoWindowsCaptureDelays,
+                                                      productId,
+                                                      capability->width,
+                                                      capability->height);
+            _captureCapabilities.Insert(index++, capability);
+            WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                         "Camera capability, width:%d height:%d type:%d fps:%d",
+                         capability->width, capability->height,
+                         capability->rawType, capability->maxFPS);
+        }
+        DeleteMediaType(pmt);
+        pmt = NULL;
+    }
+    RELEASE_AND_CLEAR(streamConfig);
+    RELEASE_AND_CLEAR(videoControlConfig);
+    RELEASE_AND_CLEAR(outputCapturePin);
+    RELEASE_AND_CLEAR(captureDevice); // Release the capture device
+    
+    // Store the new used device name
+    _lastUsedDeviceNameLength = deviceUniqueIdUTF8Length;
+    _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+                                                   _lastUsedDeviceNameLength
+                                                       + 1);
+    memcpy(_lastUsedDeviceName, deviceUniqueIdUTF8, _lastUsedDeviceNameLength+ 1);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CreateCapabilityMap %d", _captureCapabilities.Size());
+
+    return _captureCapabilities.Size();
+}
+
+/* Constructs a product ID from the Windows DevicePath. on a USB device the devicePath contains product id and vendor id. 
+ This seems to work for firewire as well
+ /* Example of device path
+ "\\?\usb#vid_0408&pid_2010&mi_00#7&258e7aaf&0&0000#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ "\\?\avc#sony&dv-vcr&camcorder&dv#65b2d50301460008#{65e8773d-8f56-11d0-a3b9-00a0c9223196}\global"
+ */
+void DeviceInfoWindows::GetProductId(const char* devicePath,
+                                      char* productUniqueIdUTF8,
+                                      WebRtc_UWord32 productUniqueIdUTF8Length)
+{
+    *productUniqueIdUTF8 = '\0';
+    char* startPos = strstr((char*) devicePath, "\\\\?\\");
+    if (!startPos)
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+        return;
+    }
+    startPos += 4;
+
+    char* pos = strchr(startPos, '&');
+    if (!pos || pos >= (char*) devicePath + strlen((char*) devicePath))
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+        return;
+    }
+    // Find the second occurence
+    pos = strchr(pos + 1, '&');
+    WebRtc_UWord32 bytesToCopy = (WebRtc_UWord32)(pos - startPos);
+    if (pos && (bytesToCopy <= productUniqueIdUTF8Length) && bytesToCopy
+        <= kVideoCaptureProductIdLength)
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length,
+                  (char*) startPos, bytesToCopy);
+    }
+    else
+    {
+        strncpy_s((char*) productUniqueIdUTF8, productUniqueIdUTF8Length, "", 1);
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "Failed to get the product Id");
+    }
+}
+
+WebRtc_Word32 DeviceInfoWindows::DisplayCaptureSettingsDialogBox(
+                                         const char* deviceUniqueIdUTF8,
+                                         const char* dialogTitleUTF8,
+                                         void* parentWindow,
+                                         WebRtc_UWord32 positionX,
+                                         WebRtc_UWord32 positionY)
+{
+    ReadLockScoped cs(_apiLock);
+    HWND window = (HWND) parentWindow;
+
+    IBaseFilter* filter = GetDeviceFilter(deviceUniqueIdUTF8, NULL, 0);
+    if (!filter)
+        return -1;
+
+    ISpecifyPropertyPages* pPages = NULL;
+    CAUUID uuid;
+    HRESULT hr = S_OK;
+
+    hr = filter->QueryInterface(IID_ISpecifyPropertyPages, (LPVOID*) &pPages);
+    if (!SUCCEEDED(hr))
+    {
+        filter->Release();
+        return -1;
+    }
+    hr = pPages->GetPages(&uuid);
+    if (!SUCCEEDED(hr))
+    {
+        filter->Release();
+        return -1;
+    }
+
+    WCHAR tempDialogTitleWide[256];
+    tempDialogTitleWide[0] = 0;
+    int size = 255;
+
+    // UTF-8 to wide char
+    MultiByteToWideChar(CP_UTF8, 0, (char*) dialogTitleUTF8, -1,
+                        tempDialogTitleWide, size);
+
+    // Invoke a dialog box to display.
+
+    hr = OleCreatePropertyFrame(window, // You must create the parent window.
+                                positionX, // Horizontal position for the dialog box.
+                                positionY, // Vertical position for the dialog box.
+                                tempDialogTitleWide,// String used for the dialog box caption.
+                                1, // Number of pointers passed in pPlugin.
+                                (LPUNKNOWN*) &filter, // Pointer to the filter.
+                                uuid.cElems, // Number of property pages.
+                                uuid.pElems, // Array of property page CLSIDs.
+                                LOCALE_USER_DEFAULT, // Locale ID for the dialog box.
+                                0, NULL); // Reserved
+    // Release memory.
+    if (uuid.pElems)
+    {
+        CoTaskMemFree(uuid.pElems);
+    }
+    filter->Release();
+    return 0;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/src/modules/video_capture/main/source/Windows/device_info_windows.h b/src/modules/video_capture/main/source/Windows/device_info_windows.h
new file mode 100644
index 0000000..2bae6b3
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/device_info_windows.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
+
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+
+#include <Dshow.h>
+#include "map_wrapper.h"
+
+// forward declarations
+namespace webrtc
+{
+namespace videocapturemodule
+{
+struct VideoCaptureCapabilityWindows: public VideoCaptureCapability
+{
+    WebRtc_UWord32 directShowCapabilityIndex;
+    bool supportFrameRateControl;
+    VideoCaptureCapabilityWindows()
+    {
+        directShowCapabilityIndex = 0;
+        supportFrameRateControl = false;
+    }
+
+};
+class DeviceInfoWindows: public DeviceInfoImpl
+{
+public:
+    DeviceInfoWindows(const WebRtc_Word32 id);
+    virtual ~DeviceInfoWindows();
+
+    WebRtc_Word32 Init();
+    virtual WebRtc_UWord32 NumberOfDevices();
+
+    /*
+     * Returns the available capture devices.
+     */
+    virtual WebRtc_Word32
+        GetDeviceName(WebRtc_UWord32 deviceNumber,
+                      char* deviceNameUTF8,
+                      WebRtc_UWord32 deviceNameLength,
+                      char* deviceUniqueIdUTF8,
+                      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                      char* productUniqueIdUTF8,
+                      WebRtc_UWord32 productUniqueIdUTF8Length);
+
+    /* 
+     * Display OS /capture device specific settings dialog
+     */
+    virtual WebRtc_Word32
+        DisplayCaptureSettingsDialogBox(
+                                        const char* deviceUniqueIdUTF8,
+                                        const char* dialogTitleUTF8,
+                                        void* parentWindow,
+                                        WebRtc_UWord32 positionX,
+                                        WebRtc_UWord32 positionY);
+
+    // Windows specific
+
+    /* Gets a capture device filter
+     The user of this API is responsible for releasing the filter when it not needed.
+     */
+    IBaseFilter * GetDeviceFilter(const char* deviceUniqueIdUTF8,
+                                  char* productUniqueIdUTF8 = NULL,
+                                  WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+
+    WebRtc_Word32
+        GetWindowsCapability(const WebRtc_Word32 capabilityIndex,
+                             VideoCaptureCapabilityWindows& windowsCapability);
+
+    static void GetProductId(const char* devicePath,
+                             char* productUniqueIdUTF8,
+                             WebRtc_UWord32 productUniqueIdUTF8Length);
+protected:
+
+    WebRtc_Word32 GetDeviceInfo(WebRtc_UWord32 deviceNumber,
+                                char* deviceNameUTF8,
+                                WebRtc_UWord32 deviceNameLength,
+                                char* deviceUniqueIdUTF8,
+                                WebRtc_UWord32 deviceUniqueIdUTF8Length,
+                                char* productUniqueIdUTF8,
+                                WebRtc_UWord32 productUniqueIdUTF8Length);
+
+    virtual WebRtc_Word32
+        CreateCapabilityMap(const char* deviceUniqueIdUTF8);
+
+private:
+    ICreateDevEnum* _dsDevEnum;
+    IEnumMoniker* _dsMonikerDevEnum;
+    bool _CoUninitializeIsRequired;
+
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_DEVICE_INFO_WINDOWS_H_
diff --git a/src/modules/video_capture/main/source/Windows/direct_show_base_classes.gyp b/src/modules/video_capture/main/source/Windows/direct_show_base_classes.gyp
new file mode 100644
index 0000000..1eb2127
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/direct_show_base_classes.gyp
@@ -0,0 +1,102 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This target is broken out into its own gyp file in order to be treated as
+# third party code. (Since src/build/common.gypi is not included,
+# chromium_code is disabled).
+#
+# We can't place this in third_party/ because Chromium parses
+# video_capture.gypi and would fail to find it in the Chromium third_party/.
+{
+  'targets': [
+    {
+      'target_name': 'direct_show_base_classes',
+      'type': 'static_library',
+      'variables': {
+        # Path needed to build the Direct Show base classes on Windows. The
+        # code is included in the Windows SDK.
+        'direct_show_dir%':
+          'C:/Program Files/Microsoft SDKs/Windows/v7.1/Samples/multimedia/directshow/baseclasses/',
+      },
+      'defines!': [
+        'NOMINMAX',
+      ],
+      'include_dirs': [
+        '<(direct_show_dir)',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(direct_show_dir)',
+        ],
+      },
+      'sources': [
+        '<(direct_show_dir)amextra.cpp',
+        '<(direct_show_dir)amextra.h',
+        '<(direct_show_dir)amfilter.cpp',
+        '<(direct_show_dir)amfilter.h',
+        '<(direct_show_dir)amvideo.cpp',
+        '<(direct_show_dir)cache.h',
+        '<(direct_show_dir)combase.cpp',
+        '<(direct_show_dir)combase.h',
+        '<(direct_show_dir)cprop.cpp',
+        '<(direct_show_dir)cprop.h',
+        '<(direct_show_dir)ctlutil.cpp',
+        '<(direct_show_dir)ctlutil.h',
+        '<(direct_show_dir)ddmm.cpp',
+        '<(direct_show_dir)ddmm.h',
+        '<(direct_show_dir)dllentry.cpp',
+        '<(direct_show_dir)dllsetup.cpp',
+        '<(direct_show_dir)dllsetup.h',
+        '<(direct_show_dir)fourcc.h',
+        '<(direct_show_dir)measure.h',
+        '<(direct_show_dir)msgthrd.h',
+        '<(direct_show_dir)mtype.cpp',
+        '<(direct_show_dir)mtype.h',
+        '<(direct_show_dir)outputq.cpp',
+        '<(direct_show_dir)outputq.h',
+        '<(direct_show_dir)pstream.cpp',
+        '<(direct_show_dir)pstream.h',
+        '<(direct_show_dir)pullpin.cpp',
+        '<(direct_show_dir)pullpin.h',
+        '<(direct_show_dir)refclock.cpp',
+        '<(direct_show_dir)refclock.h',
+        '<(direct_show_dir)reftime.h',
+        '<(direct_show_dir)renbase.cpp',
+        '<(direct_show_dir)renbase.h',
+        '<(direct_show_dir)schedule.cpp',
+        '<(direct_show_dir)seekpt.cpp',
+        '<(direct_show_dir)seekpt.h',
+        '<(direct_show_dir)source.cpp',
+        '<(direct_show_dir)source.h',
+        '<(direct_show_dir)streams.h',
+        '<(direct_show_dir)strmctl.cpp',
+        '<(direct_show_dir)strmctl.h',
+        '<(direct_show_dir)sysclock.cpp',
+        '<(direct_show_dir)sysclock.h',
+        '<(direct_show_dir)transfrm.cpp',
+        '<(direct_show_dir)transfrm.h',
+        '<(direct_show_dir)transip.cpp',
+        '<(direct_show_dir)transip.h',
+        '<(direct_show_dir)videoctl.cpp',
+        '<(direct_show_dir)videoctl.h',
+        '<(direct_show_dir)vtrans.cpp',
+        '<(direct_show_dir)vtrans.h',
+        '<(direct_show_dir)winctrl.cpp',
+        '<(direct_show_dir)winctrl.h',
+        '<(direct_show_dir)winutil.cpp',
+        '<(direct_show_dir)winutil.h',
+        '<(direct_show_dir)wxdebug.cpp',
+        '<(direct_show_dir)wxdebug.h',
+        '<(direct_show_dir)wxlist.cpp',
+        '<(direct_show_dir)wxlist.h',
+        '<(direct_show_dir)wxutil.cpp',
+        '<(direct_show_dir)wxutil.h',
+      ],
+    },
+  ],
+}
diff --git a/src/modules/video_capture/main/source/Windows/help_functions_windows.cc b/src/modules/video_capture/main/source/Windows/help_functions_windows.cc
new file mode 100644
index 0000000..405a53d
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/help_functions_windows.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "help_functions_windows.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// This returns minimum :), which will give max frame rate...
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size)
+{
+    LONGLONG maxFPS = maxFps[0];
+    for (int i = 0; i < size; i++)
+    {
+        if (maxFPS > maxFps[i])
+            maxFPS = maxFps[i];
+    }
+    return maxFPS;
+}
+
+IPin* GetInputPin(IBaseFilter* filter)
+{
+    HRESULT hr;
+    IPin* pin = NULL;
+    IEnumPins* pPinEnum = NULL;
+    filter->EnumPins(&pPinEnum);
+    if (pPinEnum == NULL)
+    {
+        return NULL;
+    }
+
+    // get first unconnected pin
+    hr = pPinEnum->Reset(); // set to first pin
+
+    while (S_OK == pPinEnum->Next(1, &pin, NULL))
+    {
+        PIN_DIRECTION pPinDir;
+        pin->QueryDirection(&pPinDir);
+        if (PINDIR_INPUT == pPinDir) // This is an input pin
+        {
+            IPin* tempPin = NULL;
+            if (S_OK != pin->ConnectedTo(&tempPin)) // The pint is not connected
+            {
+                pPinEnum->Release();
+                return pin;
+            }
+        }
+        pin->Release();
+    }
+    pPinEnum->Release();
+    return NULL;
+}
+
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category)
+{
+    HRESULT hr;
+    IPin* pin = NULL;
+    IEnumPins* pPinEnum = NULL;
+    filter->EnumPins(&pPinEnum);
+    if (pPinEnum == NULL)
+    {
+        return NULL;
+    }
+    // get first unconnected pin
+    hr = pPinEnum->Reset(); // set to first pin
+    while (S_OK == pPinEnum->Next(1, &pin, NULL))
+    {
+        PIN_DIRECTION pPinDir;
+        pin->QueryDirection(&pPinDir);
+        if (PINDIR_OUTPUT == pPinDir) // This is an output pin
+        {
+            if (Category == GUID_NULL || PinMatchesCategory(pin, Category))
+            {
+                pPinEnum->Release();
+                return pin;
+            }
+        }
+        pin->Release();
+        pin = NULL;
+    }
+    pPinEnum->Release();
+    return NULL;
+}
+
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category)
+{
+    BOOL bFound = FALSE;
+    IKsPropertySet *pKs = NULL;
+    HRESULT hr = pPin->QueryInterface(IID_PPV_ARGS(&pKs));
+    if (SUCCEEDED(hr))
+    {
+        GUID PinCategory;
+        DWORD cbReturned;
+        hr = pKs->Get(AMPROPSETID_Pin, AMPROPERTY_PIN_CATEGORY, NULL, 0, &PinCategory,
+                      sizeof(GUID), &cbReturned);
+        if (SUCCEEDED(hr) && (cbReturned == sizeof(GUID)))
+        {
+            bFound = (PinCategory == Category);
+        }
+        pKs->Release();
+    }
+    return bFound;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
+
diff --git a/src/modules/video_capture/main/source/Windows/help_functions_windows.h b/src/modules/video_capture/main/source/Windows/help_functions_windows.h
new file mode 100644
index 0000000..0020877
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/help_functions_windows.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
+
+#include <dshow.h>
+#include <initguid.h>
+
+DEFINE_GUID(MEDIASUBTYPE_I420, 0x30323449, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+DEFINE_GUID(MEDIASUBTYPE_V210, 0x30313276, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+DEFINE_GUID(MEDIASUBTYPE_HDYC, 0x43594448, 0x0000, 0x0010, 0x80, 0x00, 0x00,
+            0xAA, 0x00, 0x38, 0x9B, 0x71);
+
+#define RELEASE_AND_CLEAR(p) if (p) { (p) -> Release () ; (p) = NULL ; }
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+LONGLONG GetMaxOfFrameArray(LONGLONG *maxFps, long size);
+
+IPin* GetInputPin(IBaseFilter* filter);
+IPin* GetOutputPin(IBaseFilter* filter, REFGUID Category = GUID_NULL);
+BOOL PinMatchesCategory(IPin *pPin, REFGUID Category);
+
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_HELP_FUNCTIONS_WINDOWS_H_
diff --git a/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc b/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc
new file mode 100644
index 0000000..ec9d2bf
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/sink_filter_windows.cc
@@ -0,0 +1,530 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "sink_filter_windows.h"
+
+#include "trace.h"
+#include "help_functions_windows.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+#include <initguid.h>
+
+#define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+typedef struct tagTHREADNAME_INFO
+{
+   DWORD dwType;        // must be 0x1000
+   LPCSTR szName;       // pointer to name (in user addr space)
+   DWORD dwThreadID;    // thread ID (-1=caller thread)
+   DWORD dwFlags;       // reserved for future use, must be zero
+} THREADNAME_INFO;
+
+DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
+            0xe2, 0xce, 0x12, 0xc3);
+
+CaptureInputPin::CaptureInputPin (WebRtc_Word32 moduleId,
+                            IN TCHAR * szName,
+                            IN CaptureSinkFilter* pFilter,
+                            IN CCritSec * pLock,
+                            OUT HRESULT * pHr,
+                            IN LPCWSTR pszName)
+    : CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
+      _requestedCapability(),
+      _resultingCapability()
+{
+    _moduleId=moduleId;
+    _threadHandle = NULL;
+}
+
+CaptureInputPin::~CaptureInputPin()
+{
+}
+
+HRESULT
+CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
+{
+    // reset the thread handle
+    _threadHandle = NULL;
+
+    if(iPosition < 0)
+    return E_INVALIDARG;
+
+    VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*) pmt->AllocFormatBuffer(
+                            sizeof(VIDEOINFOHEADER));
+    if(NULL == pvi)
+    {
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType VIDEOINFOHEADER is NULL. Returning...Line:%d\n", __LINE__);
+        return(E_OUTOFMEMORY);
+    }
+
+    ZeroMemory(pvi, sizeof(VIDEOINFOHEADER));
+    pvi->bmiHeader.biSize = sizeof(BITMAPINFOHEADER);
+    pvi->bmiHeader.biPlanes = 1;
+    pvi->bmiHeader.biClrImportant = 0;
+    pvi->bmiHeader.biClrUsed = 0;
+    if (_requestedCapability.maxFPS != 0) {
+        pvi->AvgTimePerFrame = 10000000/_requestedCapability.maxFPS;
+    }
+
+    SetRectEmpty(&(pvi->rcSource)); // we want the whole image area rendered.
+    SetRectEmpty(&(pvi->rcTarget)); // no particular destination rectangle
+
+    pmt->SetType(&MEDIATYPE_Video);
+    pmt->SetFormatType(&FORMAT_VideoInfo);
+    pmt->SetTemporalCompression(FALSE);
+
+    WebRtc_Word32 positionOffset=1;
+    if(_requestedCapability.codecType!=kVideoCodecUnknown)
+    {
+        positionOffset=0;
+    }
+
+    switch (iPosition+positionOffset)
+    {
+        case 0:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('I','4','2','0');
+            pvi->bmiHeader.biBitCount = 12; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                        *_requestedCapability.width/2;
+            pmt->SetSubtype(&MEDIASUBTYPE_I420);
+        }
+        break;
+        case 1:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('Y','U','Y','2');;
+            pvi->bmiHeader.biBitCount = 16; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 2*_requestedCapability.width
+                                        *_requestedCapability.height;
+            pmt->SetSubtype(&MEDIASUBTYPE_YUY2);
+        }
+        break;
+        case 2:
+        {
+            pvi->bmiHeader.biCompression = BI_RGB;
+            pvi->bmiHeader.biBitCount = 24; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                        *_requestedCapability.width;
+            pmt->SetSubtype(&MEDIASUBTYPE_RGB24);
+        }
+        break;
+        case 3:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('U','Y','V','Y');
+            pvi->bmiHeader.biBitCount = 16; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 2*_requestedCapability.height
+                                         *_requestedCapability.width;
+            pmt->SetSubtype(&MEDIASUBTYPE_UYVY);
+        }
+        break;
+        case 4:
+        {
+            pvi->bmiHeader.biCompression = MAKEFOURCC('M','J','P','G');
+            pvi->bmiHeader.biBitCount = 12; //bit per pixel
+            pvi->bmiHeader.biWidth = _requestedCapability.width;
+            pvi->bmiHeader.biHeight = _requestedCapability.height;
+            pvi->bmiHeader.biSizeImage = 3*_requestedCapability.height
+                                         *_requestedCapability.width/2;
+            pmt->SetSubtype(&MEDIASUBTYPE_MJPG);
+        }
+        break;
+        default :
+        return VFW_S_NO_MORE_ITEMS;
+    }
+    pmt->SetSampleSize(pvi->bmiHeader.biSizeImage);
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+             "GetMediaType position %d, width %d, height %d, biCompression 0x%x",
+             iPosition, _requestedCapability.width,
+             _requestedCapability.height,pvi->bmiHeader.biCompression);
+    return NOERROR;
+}
+
+HRESULT
+CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
+{
+    // reset the thread handle
+    _threadHandle = NULL;
+
+    const GUID *type = pMediaType->Type();
+    if (*type != MEDIATYPE_Video)
+    return E_INVALIDARG;
+
+    const GUID *formatType = pMediaType->FormatType();
+
+    // Check for the subtypes we support
+    const GUID *SubType = pMediaType->Subtype();
+    if (SubType == NULL)
+    {
+        return E_INVALIDARG;
+    }
+
+    if(*formatType == FORMAT_VideoInfo)
+    {
+        VIDEOINFOHEADER *pvi = (VIDEOINFOHEADER *) pMediaType->Format();
+        if(pvi == NULL)
+        {
+            return E_INVALIDARG;
+        }
+
+        // Store the incoming width and height
+        _resultingCapability.width = pvi->bmiHeader.biWidth;
+
+        // Store the incoming height,
+        // for RGB24 we assume the frame to be upside down
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biHeight > 0)
+        {
+           _resultingCapability.height = -(pvi->bmiHeader.biHeight);
+        }
+        else
+        {
+           _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+        }
+
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType width:%d height:%d Compression:0x%x\n",
+                     pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+                     pvi->bmiHeader.biCompression);
+
+        if(*SubType == MEDIASUBTYPE_MJPG
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+        {
+            _resultingCapability.rawType = kVideoMJPEG;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_I420
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+        {
+            _resultingCapability.rawType = kVideoI420;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_YUY2
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+        {
+            _resultingCapability.rawType = kVideoYUY2;
+            ::Sleep(60); // workaround for bad driver
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_UYVY
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+
+        if(*SubType == MEDIASUBTYPE_HDYC)
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biCompression == BI_RGB)
+        {
+            _resultingCapability.rawType = kVideoRGB24;
+            return S_OK; // This format is acceptable.
+        }
+    }
+    if(*formatType == FORMAT_VideoInfo2)
+    {
+        // VIDEOINFOHEADER2 that has dwInterlaceFlags
+        VIDEOINFOHEADER2 *pvi = (VIDEOINFOHEADER2 *) pMediaType->Format();
+
+        if(pvi == NULL)
+        {
+            return E_INVALIDARG;
+        }
+
+        WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
+                     "CheckMediaType width:%d height:%d Compression:0x%x\n",
+                     pvi->bmiHeader.biWidth,pvi->bmiHeader.biHeight,
+                     pvi->bmiHeader.biCompression);
+
+        _resultingCapability.width = pvi->bmiHeader.biWidth;
+
+        // Store the incoming height,
+        // for RGB24 we assume the frame to be upside down
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biHeight > 0)
+        {
+           _resultingCapability.height = -(pvi->bmiHeader.biHeight);
+        }
+        else
+        {
+           _resultingCapability.height = abs(pvi->bmiHeader.biHeight);
+        }
+
+        if(*SubType == MEDIASUBTYPE_MJPG
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('M','J','P','G'))
+        {
+            _resultingCapability.rawType = kVideoMJPEG;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_I420
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('I','4','2','0'))
+        {
+            _resultingCapability.rawType = kVideoI420;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_YUY2
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('Y','U','Y','2'))
+        {
+            _resultingCapability.rawType = kVideoYUY2;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_UYVY
+            && pvi->bmiHeader.biCompression == MAKEFOURCC('U','Y','V','Y'))
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+
+        if(*SubType == MEDIASUBTYPE_HDYC)
+        {
+            _resultingCapability.rawType = kVideoUYVY;
+            return S_OK; // This format is acceptable.
+        }
+        if(*SubType == MEDIASUBTYPE_RGB24
+            && pvi->bmiHeader.biCompression == BI_RGB)
+        {
+            _resultingCapability.rawType = kVideoRGB24;
+            return S_OK; // This format is acceptable.
+        }
+    }
+    return E_INVALIDARG;
+}
+
+HRESULT
+CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
+{
+    HRESULT hr = S_OK;
+
+    ASSERT (m_pFilter);
+    ASSERT (pIMediaSample);
+
+    // get the thread handle of the delivering thread inc its priority
+    if( _threadHandle == NULL)
+    {
+        HANDLE handle= GetCurrentThread();
+        SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
+        _threadHandle = handle;
+        // See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
+        // in this function. Name od article is "Setting a Thread Name (Unmanaged)".
+
+        THREADNAME_INFO info;
+        info.dwType = 0x1000;
+        info.szName = "capture_thread";
+        info.dwThreadID = (DWORD)-1;
+        info.dwFlags = 0;
+
+        __try
+        {
+            RaiseException( 0x406D1388, 0, sizeof(info)/sizeof(DWORD),
+                            (DWORD_PTR*)&info );
+        }
+        __except (EXCEPTION_CONTINUE_EXECUTION)
+        {
+        }
+
+    }
+
+    reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
+    hr = CBaseInputPin::Receive (pIMediaSample);
+
+    if (SUCCEEDED (hr))
+    {
+        const WebRtc_Word32 length = pIMediaSample->GetActualDataLength();
+
+        unsigned char* pBuffer = NULL;
+        if(S_OK != pIMediaSample->GetPointer(&pBuffer))
+        {
+            reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+            return S_FALSE;
+        }
+
+        // NOTE: filter unlocked within Send call
+        reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
+                                        pBuffer,length,_resultingCapability);
+    }
+    else
+    {
+        reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+    }
+
+    return hr;
+}
+
+// called under LockReceive
+HRESULT CaptureInputPin::SetMatchingMediaType(
+                                    const VideoCaptureCapability& capability)
+{
+
+    _requestedCapability = capability;
+    _resultingCapability = VideoCaptureCapability();
+    return S_OK;
+}
+//  ----------------------------------------------------------------------------
+CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
+                              IN LPUNKNOWN punk,
+                              OUT HRESULT * phr,
+                              VideoCaptureExternal& captureObserver,
+                              WebRtc_Word32 moduleId)
+    : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
+      m_pInput(NULL),
+      _captureObserver(captureObserver),
+      _moduleId(moduleId)
+{
+    (* phr) = S_OK;
+    m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
+                                   this,
+                                   & m_crtFilter,
+                                   phr, L"VideoCapture");
+    if (m_pInput == NULL || FAILED (* phr))
+    {
+        (* phr) = FAILED (* phr) ? (* phr) : E_OUTOFMEMORY;
+        goto cleanup;
+    }
+    cleanup :
+    return;
+}
+
+CaptureSinkFilter::~CaptureSinkFilter()
+{
+    delete m_pInput;
+}
+
+int CaptureSinkFilter::GetPinCount()
+{
+    return 1;
+}
+
+CBasePin *
+CaptureSinkFilter::GetPin(IN int Index)
+{
+    CBasePin * pPin;
+    LockFilter ();
+    if (Index == 0)
+    {
+        pPin = m_pInput;
+    }
+    else
+    {
+        pPin = NULL;
+    }
+    UnlockFilter ();
+    return pPin;
+}
+
+STDMETHODIMP CaptureSinkFilter::Pause()
+{
+    LockFilter();
+    if (m_State == State_Stopped)
+    {
+        //  change the state, THEN activate the input pin
+        m_State = State_Paused;
+        if (m_pInput && m_pInput->IsConnected())
+        {
+            m_pInput->Active();
+        }
+        if (m_pInput && !m_pInput->IsConnected())
+        {
+            m_State = State_Running;
+        }
+    }
+    else if (m_State == State_Running)
+    {
+        m_State = State_Paused;
+    }
+    UnlockFilter();
+    return S_OK;
+}
+
+STDMETHODIMP CaptureSinkFilter::Stop()
+{
+    LockReceive();
+    LockFilter();
+
+    //  set the state
+    m_State = State_Stopped;
+
+    //  inactivate the pins
+    if (m_pInput)
+        m_pInput->Inactive();
+
+    UnlockFilter();
+    UnlockReceive();
+    return S_OK;
+}
+
+void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
+{
+    LockFilter();
+    m_pGraph = graph;
+    UnlockFilter();
+}
+
+void CaptureSinkFilter::ProcessCapturedFrame(unsigned char* pBuffer,
+                                         WebRtc_Word32 length,
+                                         const VideoCaptureCapability& frameInfo)
+{
+    //  we have the receiver lock
+    if (m_State == State_Running)
+    {
+        _captureObserver.IncomingFrame(pBuffer, length, frameInfo);
+
+        // trying to hold it since it's only a memcpy
+        // IMPROVEMENT if this work move critsect
+        UnlockReceive();
+        return;
+    }
+    UnlockReceive();
+    return;
+}
+
+STDMETHODIMP CaptureSinkFilter::SetMatchingMediaType(
+                                        const VideoCaptureCapability& capability)
+{
+    LockReceive();
+    LockFilter();
+    HRESULT hr;
+    if (m_pInput)
+    {
+        hr = m_pInput->SetMatchingMediaType(capability);
+    }
+    else
+    {
+        hr = E_UNEXPECTED;
+    }
+    UnlockFilter();
+    UnlockReceive();
+    return hr;
+}
+
+STDMETHODIMP CaptureSinkFilter::GetClassID( OUT CLSID * pCLSID )
+{
+    (* pCLSID) = CLSID_SINKFILTER;
+    return S_OK;
+}
+
+} // namespace videocapturemodule
+} //namespace webrtc
diff --git a/src/modules/video_capture/main/source/Windows/sink_filter_windows.h b/src/modules/video_capture/main/source/Windows/sink_filter_windows.h
new file mode 100644
index 0000000..efeb9c7
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/sink_filter_windows.h
@@ -0,0 +1,100 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
+
+#include <Streams.h> // Include base DS filter header files
+
+#include "video_capture_defines.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+//forward declaration
+
+class CaptureSinkFilter;
+/**
+ *	input pin for camera input
+ *
+ */
+class CaptureInputPin: public CBaseInputPin
+{
+public:
+    WebRtc_Word32 _moduleId;
+
+    VideoCaptureCapability _requestedCapability;
+    VideoCaptureCapability _resultingCapability;
+    HANDLE _threadHandle;
+
+    CaptureInputPin ( WebRtc_Word32 moduleId,
+                      IN TCHAR* szName,
+                      IN CaptureSinkFilter* pFilter,
+                      IN CCritSec * pLock,
+                      OUT HRESULT * pHr,
+                      IN LPCWSTR pszName);
+    virtual ~CaptureInputPin();
+
+    HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
+    HRESULT CheckMediaType (IN const CMediaType * pmt);
+    STDMETHODIMP Receive (IN IMediaSample *);
+    HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
+};
+
+class CaptureSinkFilter: public CBaseFilter
+{
+
+public:
+    CaptureSinkFilter (IN TCHAR * tszName,
+                   IN LPUNKNOWN punk,
+                   OUT HRESULT * phr,
+                   VideoCaptureExternal& captureObserver,
+                   WebRtc_Word32 moduleId);
+    virtual ~CaptureSinkFilter();
+
+    //  --------------------------------------------------------------------
+    //  class methods
+
+    void ProcessCapturedFrame(unsigned char* pBuffer, WebRtc_Word32 length,
+                              const VideoCaptureCapability& frameInfo);
+    //  explicit receiver lock aquisition and release
+    void LockReceive()  { m_crtRecv.Lock();}
+    void UnlockReceive() {m_crtRecv.Unlock();}
+    //  explicit filter lock aquisition and release
+    void LockFilter() {m_crtFilter.Lock();}
+    void UnlockFilter() { m_crtFilter.Unlock(); }
+    void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
+
+    //  --------------------------------------------------------------------
+    //  COM interfaces
+DECLARE_IUNKNOWN    ;
+    STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
+
+    //  --------------------------------------------------------------------
+    //  CBaseFilter methods
+    int GetPinCount ();
+    CBasePin * GetPin ( IN int Index);
+    STDMETHODIMP Pause ();
+    STDMETHODIMP Stop ();
+    STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
+    //  --------------------------------------------------------------------
+    //  class factory calls this
+    static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
+private:
+    CCritSec m_crtFilter; //  filter lock
+    CCritSec m_crtRecv;  //  receiver lock; always acquire before filter lock
+    CaptureInputPin * m_pInput;
+    VideoCaptureExternal& _captureObserver;
+    WebRtc_Word32 _moduleId;
+};
+} // namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_WINDOWS_H_
diff --git a/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc b/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc
new file mode 100644
index 0000000..13820d6
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/video_capture_factory_windows.cc
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ref_count.h"
+#include "video_capture_windows.h"
+#include "trace.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const char* deviceUniqueIdUTF8)
+{
+
+    if (deviceUniqueIdUTF8 == NULL)
+    {
+        return NULL;
+    }
+
+    char productId[kVideoCaptureProductIdLength];
+    videocapturemodule::DeviceInfoWindows::GetProductId(deviceUniqueIdUTF8,
+                                                        productId,
+                                                        sizeof(productId));
+    
+    RefCountImpl<videocapturemodule::VideoCaptureDS>* newCaptureModule =
+        new RefCountImpl<videocapturemodule::VideoCaptureDS>(id);
+
+    if (newCaptureModule->Init(id, deviceUniqueIdUTF8) != 0)
+    {
+        delete newCaptureModule;
+        newCaptureModule = NULL;
+    }
+    return newCaptureModule;
+}
+} //namespace videocapturemodule
+} //namespace webrtc
diff --git a/src/modules/video_capture/main/source/Windows/video_capture_windows.cc b/src/modules/video_capture/main/source/Windows/video_capture_windows.cc
new file mode 100644
index 0000000..4c2d524
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/video_capture_windows.cc
@@ -0,0 +1,414 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_windows.h"
+
+#include "../video_capture_config.h"
+#include "critical_section_wrapper.h"
+#include "help_functions_windows.h"
+#include "sink_filter_windows.h"
+#include "trace.h"
+
+#include <Dvdmedia.h> // VIDEOINFOHEADER2
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureDS::VideoCaptureDS(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), _dsInfo(id), _captureFilter(NULL),
+      _graphBuilder(NULL), _mediaControl(NULL), _sinkFilter(NULL),
+      _inputSendPin(NULL), _outputCapturePin(NULL), _dvFilter(NULL),
+      _inputDvPin(NULL), _outputDvPin(NULL)
+{
+}
+
+VideoCaptureDS::~VideoCaptureDS()
+{
+    if (_mediaControl)
+    {
+        _mediaControl->Stop();
+    }
+    if (_graphBuilder)
+    {
+        if (_sinkFilter)
+            _graphBuilder->RemoveFilter(_sinkFilter);
+        if (_captureFilter)
+            _graphBuilder->RemoveFilter(_captureFilter);
+        if (_dvFilter)
+            _graphBuilder->RemoveFilter(_dvFilter);
+    }
+    RELEASE_AND_CLEAR(_captureFilter); // release the capture device
+    RELEASE_AND_CLEAR(_sinkFilter);
+    RELEASE_AND_CLEAR(_dvFilter);
+
+    RELEASE_AND_CLEAR(_mediaControl);
+    RELEASE_AND_CLEAR(_inputSendPin);
+    RELEASE_AND_CLEAR(_outputCapturePin);
+
+    RELEASE_AND_CLEAR(_inputDvPin);
+    RELEASE_AND_CLEAR(_outputDvPin);
+
+    RELEASE_AND_CLEAR(_graphBuilder);
+}
+
+WebRtc_Word32 VideoCaptureDS::Init(const WebRtc_Word32 id,
+                                          const char* deviceUniqueIdUTF8)
+{
+    const WebRtc_Word32 nameLength =
+        (WebRtc_Word32) strlen((char*) deviceUniqueIdUTF8);
+    if (nameLength > kVideoCaptureUniqueNameLength)
+        return -1;
+
+    // Store the device name
+    _deviceUniqueId = new (std::nothrow) char[nameLength + 1];
+    memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+    if (_dsInfo.Init() != 0)
+        return -1;
+
+    _captureFilter = _dsInfo.GetDeviceFilter(deviceUniqueIdUTF8);
+    if (!_captureFilter)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create capture filter.");
+        return -1;
+    }
+
+    // Get the interface for DirectShow's GraphBuilder
+    HRESULT hr = CoCreateInstance(CLSID_FilterGraph, NULL,
+                                  CLSCTX_INPROC_SERVER, IID_IGraphBuilder,
+                                  (void **) &_graphBuilder);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create graph builder.");
+        return -1;
+    }
+
+    hr = _graphBuilder->QueryInterface(IID_IMediaControl,
+                                       (void **) &_mediaControl);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create media control builder.");
+        return -1;
+    }
+    hr = _graphBuilder->AddFilter(_captureFilter, CAPTURE_FILTER_NAME);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to add the capture device to the graph.");
+        return -1;
+    }
+
+    _outputCapturePin = GetOutputPin(_captureFilter, PIN_CATEGORY_CAPTURE);
+
+    // Create the sink filte used for receiving Captured frames.
+    _sinkFilter = new CaptureSinkFilter(SINK_FILTER_NAME, NULL, &hr,
+                                        *this, _id);
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to create send filter");
+        return -1;
+    }
+    _sinkFilter->AddRef();
+
+    hr = _graphBuilder->AddFilter(_sinkFilter, SINK_FILTER_NAME);
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to add the send filter to the graph.");
+        return -1;
+    }
+    _inputSendPin = GetInputPin(_sinkFilter);
+
+    // Temporary connect here.
+    // This is done so that no one else can use the capture device.
+    if (SetCameraOutput(_requestedCapability) != 0)
+    {
+        return -1;
+    }
+    hr = _mediaControl->Pause();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to Pause the Capture device. Is it already occupied? %d.",
+                     hr);
+        return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, _id,
+                 "Capture device '%s' initialized.", deviceUniqueIdUTF8);
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::StartCapture(
+                                      const VideoCaptureCapability& capability)
+{
+    CriticalSectionScoped cs(&_apiCs);
+
+    if (capability != _requestedCapability)
+    {
+        DisconnectGraph();
+
+        if (SetCameraOutput(capability) != 0)
+        {
+            return -1;
+        }
+    }
+    HRESULT hr = _mediaControl->Run();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to start the Capture device.");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::StopCapture()
+{
+    CriticalSectionScoped cs(&_apiCs);
+
+    HRESULT hr = _mediaControl->Pause();
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to stop the capture graph. %d", hr);
+        return -1;
+    }
+    return 0;
+}
+bool VideoCaptureDS::CaptureStarted()
+{
+    OAFilterState state = 0;
+    HRESULT hr = _mediaControl->GetState(1000, &state);
+    if (hr != S_OK && hr != VFW_S_CANT_CUE)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to get the CaptureStarted status");
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+                 "CaptureStarted %d", state);
+    return state == State_Running;
+
+}
+WebRtc_Word32 VideoCaptureDS::CaptureSettings(
+                                             VideoCaptureCapability& settings)
+{
+    settings = _requestedCapability;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::SetCameraOutput(
+                             const VideoCaptureCapability& requestedCapability)
+{
+
+    // Get the best matching capability
+    VideoCaptureCapability capability;
+    WebRtc_Word32 capabilityIndex;
+
+    // Store the new requested size
+    _requestedCapability = requestedCapability;
+    // Match the requested capability with the supported.
+    if ((capabilityIndex = _dsInfo.GetBestMatchedCapability(_deviceUniqueId,
+                                                            _requestedCapability,
+                                                            capability)) < 0)
+    {
+        return -1;
+    }
+    //Reduce the frame rate if possible.
+    if (capability.maxFPS > requestedCapability.maxFPS)
+    {
+        capability.maxFPS = requestedCapability.maxFPS;
+    } else if (capability.maxFPS <= 0)
+    {
+        capability.maxFPS = 30;
+    }
+    // Store the new expected capture delay
+    _captureDelay = capability.expectedCaptureDelay;
+
+    // Convert it to the windows capability index since they are not nexessary
+    // the same
+    VideoCaptureCapabilityWindows windowsCapability;
+    if (_dsInfo.GetWindowsCapability(capabilityIndex, windowsCapability) != 0)
+    {
+        return -1;
+    }
+
+    IAMStreamConfig* streamConfig = NULL;
+    AM_MEDIA_TYPE *pmt = NULL;
+    VIDEO_STREAM_CONFIG_CAPS caps;
+
+    HRESULT hr = _outputCapturePin->QueryInterface(IID_IAMStreamConfig,
+                                                   (void**) &streamConfig);
+    if (hr)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Can't get the Capture format settings.");
+        return -1;
+    }
+
+    //Get the windows capability from the capture device
+    bool isDVCamera = false;
+    hr = streamConfig->GetStreamCaps(
+                                    windowsCapability.directShowCapabilityIndex,
+                                    &pmt, reinterpret_cast<BYTE*> (&caps));
+    if (!FAILED(hr))
+    {
+        if (pmt->formattype == FORMAT_VideoInfo2)
+        {
+            VIDEOINFOHEADER2* h =
+                reinterpret_cast<VIDEOINFOHEADER2*> (pmt->pbFormat);
+            if (capability.maxFPS > 0
+                && windowsCapability.supportFrameRateControl)
+            {
+                h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+                                                    / capability.maxFPS);
+            }
+        }
+        else
+        {
+            VIDEOINFOHEADER* h = reinterpret_cast<VIDEOINFOHEADER*>
+                                (pmt->pbFormat);
+            if (capability.maxFPS > 0
+                && windowsCapability.supportFrameRateControl)
+            {
+                h->AvgTimePerFrame = REFERENCE_TIME(10000000.0
+                                                    / capability.maxFPS);
+            }
+
+        }
+
+        // Set the sink filter to request this capability
+        _sinkFilter->SetMatchingMediaType(capability);
+        //Order the capture device to use this capability
+        hr += streamConfig->SetFormat(pmt);
+
+        //Check if this is a DV camera and we need to add MS DV Filter
+        if (pmt->subtype == MEDIASUBTYPE_dvsl
+           || pmt->subtype == MEDIASUBTYPE_dvsd
+           || pmt->subtype == MEDIASUBTYPE_dvhd)
+            isDVCamera = true; // This is a DV camera. Use MS DV filter
+    }
+    RELEASE_AND_CLEAR(streamConfig);
+
+    if (FAILED(hr))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to set capture device output format");
+        return -1;
+    }
+
+    if (isDVCamera)
+    {
+        hr = ConnectDVCamera();
+    }
+    else
+    {
+        hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputSendPin,
+                                          NULL);
+    }
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to connect the Capture graph %d", hr);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureDS::DisconnectGraph()
+{
+    HRESULT hr = _mediaControl->Stop();
+    hr += _graphBuilder->Disconnect(_outputCapturePin);
+    hr += _graphBuilder->Disconnect(_inputSendPin);
+
+    //if the DV camera filter exist
+    if (_dvFilter)
+    {
+        _graphBuilder->Disconnect(_inputDvPin);
+        _graphBuilder->Disconnect(_outputDvPin);
+    }
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE( webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to Stop the Capture device for reconfiguration %d",
+                     hr);
+        return -1;
+    }
+    return 0;
+}
+HRESULT VideoCaptureDS::ConnectDVCamera()
+{
+    HRESULT hr = S_OK;
+
+    if (!_dvFilter)
+    {
+        hr = CoCreateInstance(CLSID_DVVideoCodec, NULL, CLSCTX_INPROC,
+                              IID_IBaseFilter, (void **) &_dvFilter);
+        if (hr != S_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to create the dv decoder: %x", hr);
+            return hr;
+        }
+        hr = _graphBuilder->AddFilter(_dvFilter, L"VideoDecoderDV");
+        if (hr != S_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to add the dv decoder to the graph: %x", hr);
+            return hr;
+        }
+        _inputDvPin = GetInputPin(_dvFilter);
+        if (_inputDvPin == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to get input pin from DV decoder");
+            return -1;
+        }
+        _outputDvPin = GetOutputPin(_dvFilter);
+        if (_outputDvPin == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to get output pin from DV decoder");
+            return -1;
+        }
+    }
+    hr = _graphBuilder->ConnectDirect(_outputCapturePin, _inputDvPin, NULL);
+    if (hr != S_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                     "Failed to connect capture device to the dv devoder: %x",
+                     hr);
+        return hr;
+    }
+
+    hr = _graphBuilder->ConnectDirect(_outputDvPin, _inputSendPin, NULL);
+    if (hr != S_OK)
+    {
+        if (hr == 0x80070004)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to connect the capture device, busy");
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Failed to connect capture device to the send graph: 0x%x",
+                         hr);
+        }
+        return hr;
+    }
+    return hr;
+}
+} // namespace videocapturemodule
+} //namespace webrtc
diff --git a/src/modules/video_capture/main/source/Windows/video_capture_windows.h b/src/modules/video_capture/main/source/Windows/video_capture_windows.h
new file mode 100644
index 0000000..a888a9f
--- /dev/null
+++ b/src/modules/video_capture/main/source/Windows/video_capture_windows.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
+
+#include "../video_capture_impl.h"
+#include <tchar.h>
+
+#include "device_info_windows.h"
+
+#define CAPTURE_FILTER_NAME L"VideoCaptureFilter"
+#define SINK_FILTER_NAME L"SinkFilter"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+// Forward declaraion
+class CaptureSinkFilter;
+
+class VideoCaptureDS: public VideoCaptureImpl
+{
+public:
+
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      const WebRtc_UWord8* deviceUniqueIdUTF8);
+
+    VideoCaptureDS(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
+                               const char* deviceUniqueIdUTF8);
+
+    /*************************************************************************
+     *
+     *   Start/Stop
+     *
+     *************************************************************************/
+    virtual WebRtc_Word32
+        StartCapture(const VideoCaptureCapability& capability);
+    virtual WebRtc_Word32 StopCapture();
+
+    /**************************************************************************
+     *
+     *   Properties of the set device
+     *
+     **************************************************************************/
+
+    virtual bool CaptureStarted();
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+
+protected:
+    virtual ~VideoCaptureDS();
+
+    // Help functions
+
+    WebRtc_Word32
+        SetCameraOutput(const VideoCaptureCapability& requestedCapability);
+    WebRtc_Word32 DisconnectGraph();
+    HRESULT VideoCaptureDS::ConnectDVCamera();
+
+    DeviceInfoWindows _dsInfo;
+
+    IBaseFilter* _captureFilter;
+    IGraphBuilder* _graphBuilder;
+    IMediaControl* _mediaControl;
+    CaptureSinkFilter* _sinkFilter;
+    IPin* _inputSendPin;
+    IPin* _outputCapturePin;
+
+    // Microsoft DV interface (external DV cameras)
+    IBaseFilter* _dvFilter;
+    IPin* _inputDvPin;
+    IPin* _outputDvPin;
+
+};
+} // namespace videocapturemodule
+} //namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_VIDEO_CAPTURE_WINDOWS_H_
diff --git a/src/modules/video_capture/main/source/android/device_info_android.cc b/src/modules/video_capture/main/source/android/device_info_android.cc
new file mode 100644
index 0000000..d119531
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/device_info_android.cc
@@ -0,0 +1,326 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_android.h"
+
+#include <stdio.h>
+
+#include "ref_count.h"
+#include "trace.h"
+#include "video_capture_android.h"
+
+namespace webrtc
+{
+
+namespace videocapturemodule
+{
+
+VideoCaptureModule::DeviceInfo*
+VideoCaptureImpl::CreateDeviceInfo (const WebRtc_Word32 id) {
+  videocapturemodule::DeviceInfoAndroid *deviceInfo =
+      new videocapturemodule::DeviceInfoAndroid(id);
+  if (deviceInfo && deviceInfo->Init() != 0) {
+    delete deviceInfo;
+    deviceInfo = NULL;
+  }
+  return deviceInfo;
+}
+
+DeviceInfoAndroid::DeviceInfoAndroid(const WebRtc_Word32 id) :
+    DeviceInfoImpl(id) {
+}
+
+WebRtc_Word32 DeviceInfoAndroid::Init() {
+  return 0;
+}
+
+DeviceInfoAndroid::~DeviceInfoAndroid() {
+}
+
+WebRtc_UWord32 DeviceInfoAndroid::NumberOfDevices() {
+  JNIEnv *env;
+  jclass javaCmDevInfoClass;
+  jobject javaCmDevInfoObject;
+  bool attached = false;
+  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+          env,
+          javaCmDevInfoClass,
+          javaCmDevInfoObject,
+          attached) != 0)
+    return 0;
+
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+               "%s GetMethodId", __FUNCTION__);
+  // get the method ID for the Android Java GetDeviceUniqueName name.
+  jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+                                   "NumberOfDevices",
+                                   "()I");
+
+  jint numberOfDevices = 0;
+  if (cid != NULL) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+                 "%s Calling Number of devices", __FUNCTION__);
+    numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
+  }
+  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+  if (numberOfDevices > 0)
+    return numberOfDevices;
+  return 0;
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetDeviceName(
+    WebRtc_UWord32 deviceNumber,
+    char* deviceNameUTF8,
+    WebRtc_UWord32 deviceNameLength,
+    char* deviceUniqueIdUTF8,
+    WebRtc_UWord32 deviceUniqueIdUTF8Length,
+    char* /*productUniqueIdUTF8*/,
+    WebRtc_UWord32 /*productUniqueIdUTF8Length*/) {
+
+  JNIEnv *env;
+  jclass javaCmDevInfoClass;
+  jobject javaCmDevInfoObject;
+  WebRtc_Word32 result = 0;
+  bool attached = false;
+  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+          env,
+          javaCmDevInfoClass,
+          javaCmDevInfoObject,
+          attached)!= 0)
+    return -1;
+
+  // get the method ID for the Android Java GetDeviceUniqueName name.
+  jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
+                                   "(I)Ljava/lang/String;");
+  if (cid != NULL) {
+    jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
+                                                      cid, deviceNumber);
+    if (javaDeviceNameObj == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "%s: Failed to get device name for device %d.",
+                   __FUNCTION__, (int) deviceNumber);
+      result = -1;
+    } else {
+      jboolean isCopy;
+      const char* javaDeviceNameChar = env->GetStringUTFChars(
+          (jstring) javaDeviceNameObj
+          ,&isCopy);
+      const jsize javaDeviceNameCharLength =
+          env->GetStringUTFLength((jstring) javaDeviceNameObj);
+      if ((WebRtc_UWord32) javaDeviceNameCharLength <
+          deviceUniqueIdUTF8Length) {
+        memcpy(deviceUniqueIdUTF8,
+               javaDeviceNameChar,
+               javaDeviceNameCharLength + 1);
+      }
+      else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                     _id, "%s: deviceUniqueIdUTF8 to short.",
+                     __FUNCTION__);
+        result = -1;
+      }
+      if ((WebRtc_UWord32) javaDeviceNameCharLength < deviceNameLength) {
+        memcpy(deviceNameUTF8,
+               javaDeviceNameChar,
+               javaDeviceNameCharLength + 1);
+      }
+      env->ReleaseStringUTFChars((jstring) javaDeviceNameObj,
+                                 javaDeviceNameChar);
+    }  // javaDeviceNameObj == NULL
+
+  }
+  else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: Failed to find GetDeviceUniqueName function id",
+                 __FUNCTION__);
+    result = -1;
+  }
+
+  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: result %d", __FUNCTION__, (int) result);
+  return result;
+
+}
+
+WebRtc_Word32 DeviceInfoAndroid::CreateCapabilityMap(
+    const char* deviceUniqueIdUTF8) {
+  MapItem* item = NULL;
+  while ((item = _captureCapabilities.Last())) {
+    delete (VideoCaptureCapability*) item->GetItem();
+    _captureCapabilities.Erase(item);
+  }
+
+  JNIEnv *env;
+  jclass javaCmDevInfoClass;
+  jobject javaCmDevInfoObject;
+  bool attached = false;
+  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+          env,
+          javaCmDevInfoClass,
+          javaCmDevInfoObject,
+          attached) != 0)
+    return -1;
+
+  // Find the capability class
+  jclass javaCapClassLocal = env->FindClass(AndroidJavaCaptureCapabilityClass);
+  if (javaCapClassLocal == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Can't find java class VideoCaptureCapabilityAndroid.",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // get the method ID for the Android Java GetCapabilityArray .
+  char signature[256];
+  sprintf(signature,
+          "(Ljava/lang/String;)[L%s;",
+          AndroidJavaCaptureCapabilityClass);
+  jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
+                                   "GetCapabilityArray",
+                                   signature);
+  if (cid == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Can't find method GetCapabilityArray.", __FUNCTION__);
+    return -1;
+  }
+  // Create a jstring so we can pass the deviceUniquName to the java method.
+  jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+
+  if (capureIdString == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Can't create string for  method GetCapabilityArray.",
+                 __FUNCTION__);
+    return -1;
+  }
+  // Call the java class and get an array with capabilities back.
+  jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
+                                                      cid, capureIdString);
+  if (!javaCapabilitiesObj) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Failed to call java GetCapabilityArray.",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  jfieldID widthField = env->GetFieldID(javaCapClassLocal, "width", "I");
+  jfieldID heigtField = env->GetFieldID(javaCapClassLocal, "height", "I");
+  jfieldID maxFpsField = env->GetFieldID(javaCapClassLocal, "maxFPS", "I");
+  if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Failed to get field Id.", __FUNCTION__);
+    return -1;
+  }
+
+  const jsize numberOfCapabilities =
+      env->GetArrayLength((jarray) javaCapabilitiesObj);
+
+  for (jsize i = 0; i < numberOfCapabilities; ++i) {
+    VideoCaptureCapability *cap = new VideoCaptureCapability();
+    jobject capabilityElement = env->GetObjectArrayElement(
+        (jobjectArray) javaCapabilitiesObj,
+        i);
+
+    cap->width = env->GetIntField(capabilityElement, widthField);
+    cap->height = env->GetIntField(capabilityElement, heigtField);
+    cap->expectedCaptureDelay = _expectedCaptureDelay;
+    cap->rawType = kVideoNV21;
+    cap->maxFPS = env->GetIntField(capabilityElement, maxFpsField);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Cap width %d, height %d, fps %d", __FUNCTION__,
+                 cap->width, cap->height, cap->maxFPS);
+    _captureCapabilities.Insert(i, cap);
+  }
+
+  _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
+  _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
+                                        _lastUsedDeviceNameLength + 1);
+  memcpy(_lastUsedDeviceName,
+         deviceUniqueIdUTF8,
+         _lastUsedDeviceNameLength + 1);
+
+  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+               "CreateCapabilityMap %d", _captureCapabilities.Size());
+
+  return _captureCapabilities.Size();
+}
+
+WebRtc_Word32 DeviceInfoAndroid::GetOrientation(
+    const char* deviceUniqueIdUTF8,
+    VideoCaptureRotation& orientation) {
+  JNIEnv *env;
+  jclass javaCmDevInfoClass;
+  jobject javaCmDevInfoObject;
+  bool attached = false;
+  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+          env,
+          javaCmDevInfoClass,
+          javaCmDevInfoObject,
+          attached) != 0)
+    return -1;
+
+  // get the method ID for the Android Java GetOrientation .
+  jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
+                                   "(Ljava/lang/String;)I");
+  if (cid == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Can't find method GetOrientation.", __FUNCTION__);
+    return -1;
+  }
+  // Create a jstring so we can pass the deviceUniquName to the java method.
+  jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+  if (capureIdString == NULL) {
+    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Can't create string for  method GetCapabilityArray.",
+                 __FUNCTION__);
+    return -1;
+  }
+  // Call the java class and get the orientation.
+  jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
+                                         capureIdString);
+  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
+
+  WebRtc_Word32 retValue = 0;
+  switch (jorientation) {
+    case -1: // Error
+      orientation = kCameraRotate0;
+      retValue = -1;
+      break;
+    case 0:
+      orientation = kCameraRotate0;
+      break;
+    case 90:
+      orientation = kCameraRotate90;
+      break;
+    case 180:
+      orientation = kCameraRotate180;
+      break;
+    case 270:
+      orientation = kCameraRotate270;
+      break;
+    case 360:
+      orientation = kCameraRotate0;
+      break;
+  }
+  return retValue;
+}
+
+}  // namespace videocapturemodule
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/android/device_info_android.h b/src/modules/video_capture/main/source/android/device_info_android.h
new file mode 100644
index 0000000..855a291
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/device_info_android.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
+
+#include <jni.h>
+#include "../video_capture_impl.h"
+#include "../device_info_impl.h"
+
+#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
+#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+// Android logging, uncomment to print trace to
+// logcat instead of trace file/callback
+// #include <android/log.h>
+// #define WEBRTC_TRACE(a,b,c,...)
+// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+
+class DeviceInfoAndroid : public DeviceInfoImpl {
+
+ public:
+  DeviceInfoAndroid(const WebRtc_Word32 id);
+  WebRtc_Word32 Init();
+  virtual ~DeviceInfoAndroid();
+  virtual WebRtc_UWord32 NumberOfDevices();
+  virtual WebRtc_Word32 GetDeviceName(
+      WebRtc_UWord32 deviceNumber,
+      char* deviceNameUTF8,
+      WebRtc_UWord32 deviceNameLength,
+      char* deviceUniqueIdUTF8,
+      WebRtc_UWord32 deviceUniqueIdUTF8Length,
+      char* productUniqueIdUTF8 = 0,
+      WebRtc_UWord32 productUniqueIdUTF8Length = 0);
+  virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8);
+
+  virtual WebRtc_Word32 DisplayCaptureSettingsDialogBox(
+      const char* /*deviceUniqueIdUTF8*/,
+      const char* /*dialogTitleUTF8*/,
+      void* /*parentWindow*/,
+      WebRtc_UWord32 /*positionX*/,
+      WebRtc_UWord32 /*positionY*/) { return -1; }
+  virtual WebRtc_Word32 GetOrientation(const char* deviceUniqueIdUTF8,
+                                       VideoCaptureRotation& orientation);
+ private:
+  bool IsDeviceNameMatches(const char* name, const char* deviceUniqueIdUTF8);
+  enum {_expectedCaptureDelay = 190};
+};
+
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
new file mode 100644
index 0000000..33c9927
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/CaptureCapabilityAndroid.java
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+public class CaptureCapabilityAndroid {
+    public int width  = 0;
+    public int height = 0;
+    public int maxFPS = 0;
+}
diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
new file mode 100644
index 0000000..ef7fc7b
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -0,0 +1,258 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.io.IOException;
+import java.util.Locale;
+import java.util.concurrent.locks.ReentrantLock;
+
+import org.webrtc.videoengine.CaptureCapabilityAndroid;
+import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid.AndroidVideoCaptureDevice;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.hardware.Camera;
+import android.hardware.Camera.PreviewCallback;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceHolder.Callback;
+
+public class VideoCaptureAndroid implements PreviewCallback, Callback {
+
+    private final static String TAG = "WEBRTC-JC";
+
+    private Camera camera;
+    private AndroidVideoCaptureDevice currentDevice = null;
+    public ReentrantLock previewBufferLock = new ReentrantLock();
+    // This lock takes sync with StartCapture and SurfaceChanged
+    private ReentrantLock captureLock = new ReentrantLock();
+    private int PIXEL_FORMAT = ImageFormat.NV21;
+    PixelFormat pixelFormat = new PixelFormat();
+    // True when the C++ layer has ordered the camera to be started.
+    private boolean isCaptureStarted = false;
+    private boolean isCaptureRunning = false;
+    private boolean isSurfaceReady = false;
+    private SurfaceHolder surfaceHolder = null;
+
+    private final int numCaptureBuffers = 3;
+    private int expectedFrameSize = 0;
+    private int orientation = 0;
+    private int id = 0;
+    // C++ callback context variable.
+    private long context = 0;
+    private SurfaceHolder localPreview = null;
+    // True if this class owns the preview video buffers.
+    private boolean ownsBuffers = false;
+
+    private int mCaptureWidth = -1;
+    private int mCaptureHeight = -1;
+    private int mCaptureFPS = -1;
+
+    public static
+    void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
+        Log.d(TAG, "DeleteVideoCaptureAndroid");
+
+        captureAndroid.StopCapture();
+        captureAndroid.camera.release();
+        captureAndroid.camera = null;
+        captureAndroid.context = 0;
+    }
+
+    public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
+            AndroidVideoCaptureDevice in_device) {
+        id = in_id;
+        context = in_context;
+        camera = in_camera;
+        currentDevice = in_device;
+    }
+
+    private int tryStartCapture(int width, int height, int frameRate) {
+        if (camera == null) {
+            Log.e(TAG, "Camera not initialized %d" + id);
+            return -1;
+        }
+
+        Log.d(TAG, "tryStartCapture " + width +
+                " height " + height +" frame rate " + frameRate +
+                "isCaptureRunning " + isCaptureRunning +
+                "isSurfaceReady " + isSurfaceReady +
+                "isCaptureStarted " + isCaptureStarted);
+
+        if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) {
+            return 0;
+        }
+
+        try {
+            camera.setPreviewDisplay(surfaceHolder);
+
+            CaptureCapabilityAndroid currentCapability =
+                    new CaptureCapabilityAndroid();
+            currentCapability.width = width;
+            currentCapability.height = height;
+            currentCapability.maxFPS = frameRate;
+            PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
+
+            Camera.Parameters parameters = camera.getParameters();
+            parameters.setPreviewSize(currentCapability.width,
+                    currentCapability.height);
+            parameters.setPreviewFormat(PIXEL_FORMAT);
+            parameters.setPreviewFrameRate(currentCapability.maxFPS);
+            camera.setParameters(parameters);
+
+            int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
+            byte[] buffer = null;
+            for (int i = 0; i < numCaptureBuffers; i++) {
+                buffer = new byte[bufSize];
+                camera.addCallbackBuffer(buffer);
+            }
+            camera.setPreviewCallbackWithBuffer(this);
+            ownsBuffers = true;
+
+            camera.startPreview();
+            previewBufferLock.lock();
+            expectedFrameSize = bufSize;
+            isCaptureRunning = true;
+            previewBufferLock.unlock();
+
+        }
+        catch (Exception ex) {
+            Log.e(TAG, "Failed to start camera");
+            return -1;
+        }
+
+        isCaptureRunning = true;
+        return 0;
+    }
+
+    public int StartCapture(int width, int height, int frameRate) {
+        Log.d(TAG, "StartCapture width " + width +
+                " height " + height +" frame rate " + frameRate);
+        // Get the local preview SurfaceHolder from the static render class
+        localPreview = ViERenderer.GetLocalRenderer();
+        if (localPreview != null) {
+            localPreview.addCallback(this);
+        }
+
+        captureLock.lock();
+        isCaptureStarted = true;
+        mCaptureWidth = width;
+        mCaptureHeight = height;
+        mCaptureFPS = frameRate;
+
+        int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
+
+        captureLock.unlock();
+        return res;
+    }
+
+    public int StopCapture() {
+        Log.d(TAG, "StopCapture");
+        try {
+            previewBufferLock.lock();
+            isCaptureRunning = false;
+            previewBufferLock.unlock();
+            camera.stopPreview();
+            camera.setPreviewCallbackWithBuffer(null);
+        }
+        catch (Exception ex) {
+            Log.e(TAG, "Failed to stop camera");
+            return -1;
+        }
+
+        isCaptureStarted = false;
+        return 0;
+    }
+
+    native void ProvideCameraFrame(byte[] data, int length, long captureObject);
+
+    public void onPreviewFrame(byte[] data, Camera camera) {
+        previewBufferLock.lock();
+
+        // The following line is for debug only
+        // Log.v(TAG, "preview frame length " + data.length +
+        //            " context" + context);
+        if (isCaptureRunning) {
+            // If StartCapture has been called but not StopCapture
+            // Call the C++ layer with the captured frame
+            if (data.length == expectedFrameSize) {
+                ProvideCameraFrame(data, expectedFrameSize, context);
+                if (ownsBuffers) {
+                    // Give the video buffer to the camera service again.
+                    camera.addCallbackBuffer(data);
+                }
+            }
+        }
+        previewBufferLock.unlock();
+    }
+
+    // Sets the rotation of the preview render window.
+    // Does not affect the captured video image.
+    public void SetPreviewRotation(int rotation) {
+        Log.v(TAG, "SetPreviewRotation:" + rotation);
+
+        if (camera != null) {
+            previewBufferLock.lock();
+            int width = 0;
+            int height = 0;
+            int framerate = 0;
+
+            if (isCaptureRunning) {
+                width = mCaptureWidth;
+                height = mCaptureHeight;
+                framerate = mCaptureFPS;
+                StopCapture();
+            }
+
+            int resultRotation = 0;
+            if (currentDevice.frontCameraType ==
+                    VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
+                // this is a 2.3 or later front facing camera.
+                // SetDisplayOrientation will flip the image horizontally
+                // before doing the rotation.
+                resultRotation=(360-rotation) % 360; // compensate the mirror
+            }
+            else {
+                // Back facing or 2.2 or previous front camera
+                resultRotation=rotation;
+            }
+            camera.setDisplayOrientation(resultRotation);
+
+            if (isCaptureRunning) {
+                StartCapture(width, height, framerate);
+            }
+            previewBufferLock.unlock();
+        }
+    }
+
+    public void surfaceChanged(SurfaceHolder holder,
+                               int format, int width, int height) {
+        Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
+
+        captureLock.lock();
+        isSurfaceReady = true;
+        surfaceHolder = holder;
+
+        tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
+        captureLock.unlock();
+        return;
+    }
+
+    public void surfaceCreated(SurfaceHolder holder) {
+        Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
+    }
+
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
+        isSurfaceReady = false;
+    }
+}
diff --git a/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
new file mode 100644
index 0000000..b0e75cc
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -0,0 +1,402 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.io.File;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Locale;
+
+import dalvik.system.DexClassLoader;
+
+import android.content.Context;
+import android.hardware.Camera;
+import android.hardware.Camera.Size;
+import android.util.Log;
+
+public class VideoCaptureDeviceInfoAndroid {
+
+    //Context
+    Context context;
+
+    // Set VERBOSE as the default logging level because camera device info
+    // is very useful information and doesn't degrade performance normally
+    private final static String TAG = "WEBRTC";
+
+    // Private class with info about all available cameras and the capabilities
+    public class AndroidVideoCaptureDevice {
+        AndroidVideoCaptureDevice() {
+            frontCameraType = FrontFacingCameraType.None;
+            index = 0;
+        }
+
+        public String deviceUniqueName;
+        public CaptureCapabilityAndroid captureCapabilies[];
+        public FrontFacingCameraType frontCameraType;
+
+        // Orientation of camera as described in
+        // android.hardware.Camera.CameraInfo.Orientation
+        public int orientation;
+        // Camera index used in Camera.Open on Android 2.3 and onwards
+        public int index;
+    }
+
+    public enum FrontFacingCameraType {
+        None, // This is not a front facing camera
+                GalaxyS, // Galaxy S front facing camera.
+                HTCEvo, // HTC Evo front facing camera
+                Android23, // Android 2.3 front facing camera.
+                }
+
+    String currentDeviceUniqueId;
+    int id;
+    List<AndroidVideoCaptureDevice> deviceList;
+
+    public static VideoCaptureDeviceInfoAndroid
+    CreateVideoCaptureDeviceInfoAndroid(int in_id, Context in_context) {
+        Log.d(TAG,
+                String.format(Locale.US, "VideoCaptureDeviceInfoAndroid"));
+
+        VideoCaptureDeviceInfoAndroid self =
+                new VideoCaptureDeviceInfoAndroid(in_id, in_context);
+        if(self != null && self.Init() == 0) {
+            return self;
+        }
+        else {
+            Log.d(TAG, "Failed to create VideoCaptureDeviceInfoAndroid.");
+        }
+        return null;
+    }
+
+    private VideoCaptureDeviceInfoAndroid(int in_id,
+            Context in_context) {
+        id = in_id;
+        context = in_context;
+        deviceList = new ArrayList<AndroidVideoCaptureDevice>();
+    }
+
+    private int Init() {
+        // Populate the deviceList with available cameras and their capabilities.
+        Camera camera = null;
+        try{
+            if(android.os.Build.VERSION.SDK_INT > 8) {
+                // From Android 2.3 and onwards
+                for(int i = 0; i < Camera.getNumberOfCameras(); ++i) {
+                    AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
+
+                    Camera.CameraInfo info = new Camera.CameraInfo();
+                    Camera.getCameraInfo(i, info);
+                    newDevice.index = i;
+                    newDevice.orientation=info.orientation;
+                    if(info.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
+                        newDevice.deviceUniqueName =
+                                "Camera " + i +", Facing back, Orientation "+ info.orientation;
+                        Log.d(TAG, "Camera " + i +", Facing back, Orientation "+ info.orientation);
+
+                    }
+                    else {
+                        newDevice.deviceUniqueName =
+                                "Camera " + i +", Facing front, Orientation "+ info.orientation;
+                        newDevice.frontCameraType = FrontFacingCameraType.Android23;
+                        Log.d(TAG, "Camera " + i +", Facing front, Orientation "+ info.orientation);
+                    }
+
+                    camera = Camera.open(i);
+                    Camera.Parameters parameters = camera.getParameters();
+                    AddDeviceInfo(newDevice, parameters);
+                    camera.release();
+                    camera = null;
+                    deviceList.add(newDevice);
+                }
+            }
+        }
+        catch (Exception ex) {
+            Log.e(TAG, "Failed to init VideoCaptureDeviceInfo ex" +
+                    ex.getLocalizedMessage());
+            return -1;
+        }
+        VerifyCapabilities();
+        return 0;
+    }
+
+    // Adds the capture capabilities of the currently opened device
+    private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
+            Camera.Parameters parameters) {
+
+        List<Size> sizes = parameters.getSupportedPreviewSizes();
+        List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
+        int maxFPS = 0;
+        for(Integer frameRate:frameRates) {
+            if(frameRate > maxFPS) {
+                maxFPS = frameRate;
+            }
+        }
+
+        newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
+        for(int i = 0; i < sizes.size(); ++i) {
+            Size s = sizes.get(i);
+            newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
+            newDevice.captureCapabilies[i].height = s.height;
+            newDevice.captureCapabilies[i].width = s.width;
+            newDevice.captureCapabilies[i].maxFPS = maxFPS;
+            Log.v(TAG,
+                    "VideoCaptureDeviceInfo " + "maxFPS:" + maxFPS +
+                    " width:" + s.width + " height:" + s.height);
+        }
+    }
+
+    // Function that make sure device specific capabilities are
+    // in the capability list.
+    // Ie Galaxy S supports CIF but does not list CIF as a supported capability.
+    // Motorola Droid Camera does not work with frame rate above 15fps.
+    // http://code.google.com/p/android/issues/detail?id=5514#c0
+    private void VerifyCapabilities() {
+        // Nexus S or Galaxy S
+        if(android.os.Build.DEVICE.equals("GT-I9000") ||
+                android.os.Build.DEVICE.equals("crespo")) {
+            CaptureCapabilityAndroid specificCapability =
+                    new CaptureCapabilityAndroid();
+            specificCapability.width = 352;
+            specificCapability.height = 288;
+            specificCapability.maxFPS = 15;
+            AddDeviceSpecificCapability(specificCapability);
+
+            specificCapability = new CaptureCapabilityAndroid();
+            specificCapability.width = 176;
+            specificCapability.height = 144;
+            specificCapability.maxFPS = 15;
+            AddDeviceSpecificCapability(specificCapability);
+
+            specificCapability = new CaptureCapabilityAndroid();
+            specificCapability.width = 320;
+            specificCapability.height = 240;
+            specificCapability.maxFPS = 15;
+            AddDeviceSpecificCapability(specificCapability);
+        }
+        // Motorola Milestone Camera server does not work at 30fps
+        // even though it reports that it can
+        if(android.os.Build.MANUFACTURER.equals("motorola") &&
+                android.os.Build.DEVICE.equals("umts_sholes")) {
+            for(AndroidVideoCaptureDevice device:deviceList) {
+                for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
+                    capability.maxFPS=15;
+                }
+            }
+        }
+    }
+
+    private void AddDeviceSpecificCapability(
+        CaptureCapabilityAndroid specificCapability) {
+        for(AndroidVideoCaptureDevice device:deviceList) {
+            boolean foundCapability = false;
+            for(CaptureCapabilityAndroid capability:device.captureCapabilies) {
+                if(capability.width == specificCapability.width &&
+                        capability.height == specificCapability.height) {
+                    foundCapability = true;
+                    break;
+                }
+            }
+            if(foundCapability==false) {
+                CaptureCapabilityAndroid newCaptureCapabilies[]=
+                        new CaptureCapabilityAndroid[device.captureCapabilies.length+1];
+                for(int i = 0; i < device.captureCapabilies.length; ++i) {
+                    newCaptureCapabilies[i+1] = device.captureCapabilies[i];
+                }
+                newCaptureCapabilies[0] = specificCapability;
+                device.captureCapabilies = newCaptureCapabilies;
+            }
+        }
+    }
+
+    // Returns the number of Capture devices that is supported
+    public int NumberOfDevices() {
+        return deviceList.size();
+    }
+
+    public String GetDeviceUniqueName(int deviceNumber) {
+        if(deviceNumber < 0 || deviceNumber >= deviceList.size()) {
+            return null;
+        }
+        return deviceList.get(deviceNumber).deviceUniqueName;
+    }
+
+    public CaptureCapabilityAndroid[] GetCapabilityArray (String deviceUniqueId)
+    {
+        for (AndroidVideoCaptureDevice device: deviceList) {
+            if(device.deviceUniqueName.equals(deviceUniqueId)) {
+                return (CaptureCapabilityAndroid[]) device.captureCapabilies;
+            }
+        }
+        return null;
+    }
+
+    // Returns the camera orientation as described by
+    // android.hardware.Camera.CameraInfo.orientation
+    public int GetOrientation(String deviceUniqueId) {
+        for (AndroidVideoCaptureDevice device: deviceList) {
+            if(device.deviceUniqueName.equals(deviceUniqueId)) {
+                return device.orientation;
+            }
+        }
+        return -1;
+    }
+
+    // Returns an instance of VideoCaptureAndroid.
+    public VideoCaptureAndroid AllocateCamera(int id, long context,
+            String deviceUniqueId) {
+        try {
+            Log.d(TAG, "AllocateCamera " + deviceUniqueId);
+
+            Camera camera = null;
+            AndroidVideoCaptureDevice deviceToUse = null;
+            for (AndroidVideoCaptureDevice device: deviceList) {
+                if(device.deviceUniqueName.equals(deviceUniqueId)) {
+                    // Found the wanted camera
+                    deviceToUse = device;
+                    switch(device.frontCameraType) {
+                        case GalaxyS:
+                            camera = AllocateGalaxySFrontCamera();
+                            break;
+                        case HTCEvo:
+                            camera = AllocateEVOFrontFacingCamera();
+                            break;
+                        default:
+                            // From Android 2.3 and onwards)
+                            if(android.os.Build.VERSION.SDK_INT>8)
+                                camera=Camera.open(device.index);
+                            else
+                                camera=Camera.open(); // Default camera
+                    }
+                }
+            }
+
+            if(camera == null) {
+                return null;
+            }
+            Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
+
+            return new VideoCaptureAndroid(id, context, camera, deviceToUse);
+
+        }catch (Exception ex) {
+            Log.e(TAG, "AllocateCamera Failed to open camera- ex " +
+                    ex.getLocalizedMessage());
+        }
+        return null;
+    }
+
+    // Searches for a front facing camera device. This is device specific code.
+    private Camera.Parameters
+    SearchOldFrontFacingCameras(AndroidVideoCaptureDevice newDevice)
+            throws SecurityException, IllegalArgumentException,
+            NoSuchMethodException, ClassNotFoundException,
+            IllegalAccessException, InvocationTargetException {
+        // Check the id of the opened camera device
+        // Returns null on X10 and 1 on Samsung Galaxy S.
+        Camera camera = Camera.open();
+        Camera.Parameters parameters = camera.getParameters();
+        String cameraId = parameters.get("camera-id");
+        if(cameraId != null && cameraId.equals("1")) {
+            // This might be a Samsung Galaxy S with a front facing camera.
+            try {
+                parameters.set("camera-id", 2);
+                camera.setParameters(parameters);
+                parameters = camera.getParameters();
+                newDevice.frontCameraType = FrontFacingCameraType.GalaxyS;
+                newDevice.orientation = 0;
+                camera.release();
+                return parameters;
+            }
+            catch (Exception ex) {
+                // Nope - it did not work.
+                Log.e(TAG, "Init Failed to open front camera camera - ex " +
+                        ex.getLocalizedMessage());
+            }
+        }
+        camera.release();
+
+        // Check for Evo front facing camera
+        File file =
+                new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
+        boolean exists = file.exists();
+        if (!exists) {
+            file =
+                    new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
+            exists = file.exists();
+        }
+        if(exists) {
+            newDevice.frontCameraType = FrontFacingCameraType.HTCEvo;
+            newDevice.orientation = 0;
+            Camera evCamera = AllocateEVOFrontFacingCamera();
+            parameters = evCamera.getParameters();
+            evCamera.release();
+            return parameters;
+        }
+        return null;
+    }
+
+    // Returns a handle to HTC front facing camera.
+    // The caller is responsible to release it on completion.
+    private Camera AllocateEVOFrontFacingCamera()
+            throws SecurityException, NoSuchMethodException,
+            ClassNotFoundException, IllegalArgumentException,
+            IllegalAccessException, InvocationTargetException {
+        String classPath = null;
+        File file =
+                new File("/system/framework/com.htc.hardware.twinCamDevice.jar");
+        classPath = "com.htc.hardware.twinCamDevice.FrontFacingCamera";
+        boolean exists = file.exists();
+        if (!exists){
+            file =
+                    new File("/system/framework/com.sprint.hardware.twinCamDevice.jar");
+            classPath = "com.sprint.hardware.twinCamDevice.FrontFacingCamera";
+            exists = file.exists();
+        }
+        if(!exists) {
+            return null;
+        }
+
+        String dexOutputDir = "";
+        if(context != null) {
+            dexOutputDir = context.getFilesDir().getAbsolutePath();
+            File mFilesDir = new File(dexOutputDir, "dexfiles");
+            if(!mFilesDir.exists()){
+                // Log.e("*WEBRTCN*", "Directory doesn't exists");
+                if(!mFilesDir.mkdirs()) {
+                    // Log.e("*WEBRTCN*", "Unable to create files directory");
+                }
+            }
+        }
+
+        dexOutputDir += "/dexfiles";
+
+        DexClassLoader loader =
+                new DexClassLoader(file.getAbsolutePath(), dexOutputDir,
+                        null, ClassLoader.getSystemClassLoader());
+
+        Method method = loader.loadClass(classPath).getDeclaredMethod(
+            "getFrontFacingCamera", (Class[]) null);
+        Camera camera = (Camera) method.invoke((Object[])null,(Object[]) null);
+        return camera;
+    }
+
+    // Returns a handle to Galaxy S front camera.
+    // The caller is responsible to release it on completion.
+    private Camera AllocateGalaxySFrontCamera() {
+        Camera camera = Camera.open();
+        Camera.Parameters parameters = camera.getParameters();
+        parameters.set("camera-id",2);
+        camera.setParameters(parameters);
+        return camera;
+    }
+
+}
diff --git a/src/modules/video_capture/main/source/android/video_capture_android.cc b/src/modules/video_capture/main/source/android/video_capture_android.cc
new file mode 100644
index 0000000..f5abbc9
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/video_capture_android.cc
@@ -0,0 +1,664 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_android.h"
+
+#include <stdio.h>
+
+#include "critical_section_wrapper.h"
+#include "ref_count.h"
+#include "trace.h"
+
+namespace webrtc
+{
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+// TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only
+// keep and reference java vm.
+WebRtc_Word32 SetCaptureAndroidVM(void* javaVM, void* javaContext) {
+  return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(
+      javaVM,
+      javaContext);
+}
+#endif
+
+namespace videocapturemodule
+{
+
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    const char* deviceUniqueIdUTF8) {
+
+  RefCountImpl<videocapturemodule::VideoCaptureAndroid>* implementation =
+      new RefCountImpl<videocapturemodule::VideoCaptureAndroid>(id);
+
+  if (!implementation || implementation->Init(id, deviceUniqueIdUTF8) != 0) {
+    delete implementation;
+    implementation = NULL;
+  }
+  return implementation;
+}
+
+// Android logging, uncomment to print trace to
+// logcat instead of trace file/callback
+// #include <android/log.h>
+// #undef WEBRTC_TRACE
+// #define WEBRTC_TRACE(a,b,c,...)
+// __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+
+JavaVM* VideoCaptureAndroid::g_jvm = NULL;
+//VideoCaptureAndroid.java
+jclass VideoCaptureAndroid::g_javaCmClass = NULL;
+//VideoCaptureDeviceInfoAndroid.java
+jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
+//static instance of VideoCaptureDeviceInfoAndroid.java
+jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
+jobject VideoCaptureAndroid::g_javaContext = NULL;
+
+/*
+ * Register references to Java Capture class.
+ */
+WebRtc_Word32 VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
+                                                     void* javaContext) {
+
+  g_jvm = static_cast<JavaVM*> (javaVM);
+  g_javaContext = static_cast<jobject> (javaContext);
+
+  if (javaVM) {
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not get Java environment", __FUNCTION__);
+      return -1;
+    }
+    // get java capture class type (note path to class packet)
+    jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
+    if (!javaCmClassLocal) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not find java class", __FUNCTION__);
+      return -1;
+    }
+    // create a global reference to the class
+    // (to tell JNI that we are referencing it
+    // after this function has returned)
+    g_javaCmClass = static_cast<jclass>
+        (env->NewGlobalRef(javaCmClassLocal));
+    if (!g_javaCmClass) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: InitVideoEngineJava(): could not create"
+                   " Java Camera class reference",
+                   __FUNCTION__);
+      return -1;
+    }
+    // Delete local class ref, we only use the global ref
+    env->DeleteLocalRef(javaCmClassLocal);
+    JNINativeMethod nativeFunctions =
+        { "ProvideCameraFrame", "([BIJ)V",
+          (void*) &VideoCaptureAndroid::ProvideCameraFrame };
+    if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
+      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                   "%s: Registered native functions", __FUNCTION__);
+    }
+    else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: Failed to register native functions",
+                   __FUNCTION__);
+      return -1;
+    }
+
+    // get java capture class type (note path to class packet)
+    jclass javaCmDevInfoClassLocal = env->FindClass(
+        AndroidJavaCaptureDeviceInfoClass);
+    if (!javaCmDevInfoClassLocal) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not find java class", __FUNCTION__);
+      return -1;
+    }
+
+    // create a global reference to the class
+    // (to tell JNI that we are referencing it
+    // after this function has returned)
+    g_javaCmDevInfoClass = static_cast<jclass>
+        (env->NewGlobalRef(javaCmDevInfoClassLocal));
+    if (!g_javaCmDevInfoClass) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: InitVideoEngineJava(): could not create Java "
+                   "Camera Device info class reference",
+                   __FUNCTION__);
+      return -1;
+    }
+    // Delete local class ref, we only use the global ref
+    env->DeleteLocalRef(javaCmDevInfoClassLocal);
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "VideoCaptureDeviceInfoAndroid get method id");
+
+    // get the method ID for the Android Java CaptureClass static
+    //CreateVideoCaptureAndroid factory method.
+    jmethodID cid = env->GetStaticMethodID(
+        g_javaCmDevInfoClass,
+        "CreateVideoCaptureDeviceInfoAndroid",
+        "(ILandroid/content/Context;)"
+        "Lorg/webrtc/videoengine/VideoCaptureDeviceInfoAndroid;");
+    if (cid == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not get java"
+                   "VideoCaptureDeviceInfoAndroid constructor ID",
+                   __FUNCTION__);
+      return -1;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "%s: construct static java device object", __FUNCTION__);
+
+    // construct the object by calling the static constructor object
+    jobject javaCameraDeviceInfoObjLocal =
+        env->CallStaticObjectMethod(g_javaCmDevInfoClass,
+                                    cid, (int) -1,
+                                    g_javaContext);
+    if (!javaCameraDeviceInfoObjLocal) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not create Java Capture Device info object",
+                   __FUNCTION__);
+      return -1;
+    }
+    // create a reference to the object (to tell JNI that
+    // we are referencing it after this function has returned)
+    g_javaCmDevInfoObject = env->NewGlobalRef(javaCameraDeviceInfoObjLocal);
+    if (!g_javaCmDevInfoObject) {
+      WEBRTC_TRACE(webrtc::kTraceError,
+                   webrtc::kTraceAudioDevice,
+                   -1,
+                   "%s: could not create Java"
+                   "cameradevinceinfo object reference",
+                   __FUNCTION__);
+      return -1;
+    }
+    // Delete local object ref, we only use the global ref
+    env->DeleteLocalRef(javaCameraDeviceInfoObjLocal);
+    return 0;
+  }
+  else {
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                 "%s: JVM is NULL, assuming deinit", __FUNCTION__);
+    if (!g_jvm) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: SetAndroidObjects not called with a valid JVM.",
+                   __FUNCTION__);
+      return -1;
+    }
+    JNIEnv* env = NULL;
+    bool attached = false;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                     -1, "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        return -1;
+      }
+      attached = true;
+    }
+    env->DeleteGlobalRef(g_javaCmDevInfoObject);
+    env->DeleteGlobalRef(g_javaCmDevInfoClass);
+    env->DeleteGlobalRef(g_javaCmClass);
+    if (attached && g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+      return -1;
+    }
+    return 0;
+    env = (JNIEnv *) NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
+    JNIEnv*& env,
+    jclass& javaCmDevInfoClass,
+    jobject& javaCmDevInfoObject,
+    bool& attached) {
+  // get the JNI env for this thread
+  if (!g_jvm) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: SetAndroidObjects not called with a valid JVM.",
+                 __FUNCTION__);
+    return -1;
+  }
+  attached = false;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    attached = true;
+  }
+  javaCmDevInfoClass = g_javaCmDevInfoClass;
+  javaCmDevInfoObject = g_javaCmDevInfoObject;
+  return 0;
+
+}
+
+WebRtc_Word32 VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
+    bool attached) {
+  if (attached && g_jvm->DetachCurrentThread() < 0) {
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
+                 "%s: Could not detach thread from JVM", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * JNI callback from Java class. Called
+ * when the camera has a new frame to deliver
+ * Class:     org_webrtc_capturemodule_VideoCaptureAndroid
+ * Method:    ProvideCameraFrame
+ * Signature: ([BIJ)V
+ */
+void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
+                                                     jobject,
+                                                     jbyteArray javaCameraFrame,
+                                                     jint length,
+                                                     jlong context) {
+  VideoCaptureAndroid* captureModule =
+      reinterpret_cast<VideoCaptureAndroid*>(context);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
+               -1, "%s: IncomingFrame %d", __FUNCTION__,length);
+  jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
+  captureModule->IncomingFrame((WebRtc_UWord8*) cameraFrame,
+                               length,captureModule->_frameInfo,0);
+  env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
+}
+
+
+
+VideoCaptureAndroid::VideoCaptureAndroid(const WebRtc_Word32 id)
+    : VideoCaptureImpl(id), _capInfo(id), _javaCaptureObj(NULL),
+      _captureStarted(false) {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+               "%s: context %x", __FUNCTION__, (int) this);
+}
+
+// ----------------------------------------------------------------------------
+//  Init
+//
+//  Initializes needed Java resources like the JNI interface to
+//  VideoCaptureAndroid.java
+// ----------------------------------------------------------------------------
+WebRtc_Word32 VideoCaptureAndroid::Init(const WebRtc_Word32 id,
+                                        const char* deviceUniqueIdUTF8) {
+  const int nameLength = strlen(deviceUniqueIdUTF8);
+  if (nameLength >= kVideoCaptureUniqueNameLength) {
+    return -1;
+  }
+
+  // Store the device name
+  _deviceUniqueId = new char[nameLength + 1];
+  memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
+
+  if (_capInfo.Init() != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError,
+                 webrtc::kTraceVideoCapture,
+                 _id,
+                 "%s: Failed to initialize CaptureDeviceInfo",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
+               __FUNCTION__);
+  // use the jvm that has been set
+  if (!g_jvm) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: Not a valid Java VM pointer", __FUNCTION__);
+    return -1;
+  }
+  // get the JNI env for this thread
+  JNIEnv *env;
+  bool isAttached = false;
+
+  // get the JNI env for this thread
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
+               "get method id");
+
+  // get the method ID for the Android Java
+  // CaptureDeviceInfoClass AllocateCamera factory method.
+  char signature[256];
+  sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
+
+  jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
+                                   signature);
+  if (cid == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                 "%s: could not get constructor ID", __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
+  // construct the object by calling the static constructor object
+  jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
+                                                     cid, (jint) id,
+                                                     (jlong) this,
+                                                     capureIdString);
+  if (!javaCameraObjLocal) {
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                 "%s: could not create Java Capture object", __FUNCTION__);
+    return -1;
+  }
+
+  // create a reference to the object (to tell JNI that we are referencing it
+  // after this function has returned)
+  _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
+  if (!_javaCaptureObj) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
+                 "%s: could not create Java camera object reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Delete local object ref, we only use the global ref
+  env->DeleteLocalRef(javaCameraObjLocal);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  return 0;
+}
+
+VideoCaptureAndroid::~VideoCaptureAndroid() {
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
+               __FUNCTION__);
+  if (_javaCaptureObj == NULL || g_jvm == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: Nothing to clean", __FUNCTION__);
+  }
+  else {
+    bool isAttached = false;
+    // get the JNI env for this thread
+    JNIEnv *env;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+      }
+      else {
+        isAttached = true;
+      }
+    }
+
+    // get the method ID for the Android Java CaptureClass static
+    // DeleteVideoCaptureAndroid  method. Call this to release the camera so
+    // another application can use it.
+    jmethodID cid = env->GetStaticMethodID(
+        g_javaCmClass,
+        "DeleteVideoCaptureAndroid",
+        "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
+    if (cid != NULL) {
+      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                   "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
+      // Close the camera by calling the static destruct function.
+      env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
+
+      // Delete global object ref to the camera.
+      env->DeleteGlobalRef(_javaCaptureObj);
+      _javaCaptureObj = NULL;
+    }
+    else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: Failed to find DeleteVideoCaptureAndroid id",
+                   __FUNCTION__);
+    }
+
+    // Detach this thread if it was attached
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
+                     _id, "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 VideoCaptureAndroid::StartCapture(
+    const VideoCaptureCapability& capability) {
+  CriticalSectionScoped cs(&_apiCs);
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: ", __FUNCTION__);
+
+  bool isAttached = false;
+  WebRtc_Word32 result = 0;
+  // get the JNI env for this thread
+  JNIEnv *env;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+    }
+    else {
+      isAttached = true;
+    }
+  }
+
+  if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
+                                        _frameInfo) < 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: GetBestMatchedCapability failed. Req cap w%d h%d",
+                 __FUNCTION__, capability.width, capability.height);
+    return -1;
+  }
+
+  // Store the new expected capture delay
+  _captureDelay = _frameInfo.expectedCaptureDelay;
+
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+               "%s: _frameInfo w%d h%d", __FUNCTION__, _frameInfo.width,
+               _frameInfo.height);
+
+  // get the method ID for the Android Java
+  // CaptureClass static StartCapture  method.
+  jmethodID cid = env->GetMethodID(g_javaCmClass, "StartCapture", "(III)I");
+  if (cid != NULL) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "%s: Call StartCapture", __FUNCTION__);
+    // Close the camera by calling the static destruct function.
+    result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
+                                _frameInfo.height, _frameInfo.maxFPS);
+  }
+  else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: Failed to find StartCapture id", __FUNCTION__);
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+  if (result == 0) {
+    _requestedCapability = capability;
+    _captureStarted = true;
+  }
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: result %d", __FUNCTION__, result);
+  return result;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::StopCapture() {
+  CriticalSectionScoped cs(&_apiCs);
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: ", __FUNCTION__);
+
+  bool isAttached = false;
+  WebRtc_Word32 result = 0;
+  // get the JNI env for this thread
+  JNIEnv *env = NULL;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+    }
+    else {
+      isAttached = true;
+    }
+  }
+
+  memset(&_requestedCapability, 0, sizeof(_requestedCapability));
+  memset(&_frameInfo, 0, sizeof(_frameInfo));
+
+  // get the method ID for the Android Java CaptureClass StopCapture  method.
+  jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
+  if (cid != NULL) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                 "%s: Call StopCapture", __FUNCTION__);
+    // Close the camera by calling the static destruct function.
+    result = env->CallIntMethod(_javaCaptureObj, cid);
+  }
+  else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                 "%s: Failed to find StopCapture id", __FUNCTION__);
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+  _captureStarted = false;
+
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: result %d", __FUNCTION__, result);
+  return result;
+}
+
+bool VideoCaptureAndroid::CaptureStarted() {
+  CriticalSectionScoped cs(&_apiCs);
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: ", __FUNCTION__);
+  return _captureStarted;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::CaptureSettings(
+    VideoCaptureCapability& settings) {
+  CriticalSectionScoped cs(&_apiCs);
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+               "%s: ", __FUNCTION__);
+  settings = _requestedCapability;
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureAndroid::SetCaptureRotation(
+    VideoCaptureRotation rotation) {
+  CriticalSectionScoped cs(&_apiCs);
+  if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) {
+    if (!g_jvm)
+      return -1;
+
+    // get the JNI env for this thread
+    JNIEnv *env;
+    bool isAttached = false;
+
+    // get the JNI env for this thread
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        return -1;
+      }
+      isAttached = true;
+    }
+
+    jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
+                                     "(I)V");
+    if (cid == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                   "%s: could not get java SetPreviewRotation ID",
+                   __FUNCTION__);
+      return -1;
+    }
+    jint rotateFrame = 0;
+    switch (rotation) {
+      case kCameraRotate0:
+        rotateFrame = 0;
+        break;
+      case kCameraRotate90:
+        rotateFrame = 90;
+        break;
+      case kCameraRotate180:
+        rotateFrame = 180;
+        break;
+      case kCameraRotate270:
+        rotateFrame = 270;
+        break;
+    }
+    env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
+
+    // Detach this thread if it was attached
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
+                     _id, "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+
+  }
+  return 0;
+}
+
+}  // namespace videocapturemodule
+}  // namespace webrtc
diff --git a/src/modules/video_capture/main/source/android/video_capture_android.h b/src/modules/video_capture/main/source/android/video_capture_android.h
new file mode 100644
index 0000000..3fd7e64
--- /dev/null
+++ b/src/modules/video_capture/main/source/android/video_capture_android.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
+
+#include <jni.h>
+#include "device_info_android.h"
+#include "../video_capture_impl.h"
+
+#define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
+
+namespace webrtc {
+namespace videocapturemodule {
+
+class VideoCaptureAndroid : public VideoCaptureImpl {
+ public:
+  static WebRtc_Word32 SetAndroidObjects(void* javaVM, void* javaContext);
+  static WebRtc_Word32 AttachAndUseAndroidDeviceInfoObjects(
+      JNIEnv*& env,
+      jclass& javaCmDevInfoClass,
+      jobject& javaCmDevInfoObject,
+      bool& attached);
+  static WebRtc_Word32 ReleaseAndroidDeviceInfoObjects(bool attached);
+
+  VideoCaptureAndroid(const WebRtc_Word32 id);
+  virtual WebRtc_Word32 Init(const WebRtc_Word32 id,
+                             const char* deviceUniqueIdUTF8);
+
+
+  virtual WebRtc_Word32 StartCapture(
+      const VideoCaptureCapability& capability);
+  virtual WebRtc_Word32 StopCapture();
+  virtual bool CaptureStarted();
+  virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& settings);
+  virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
+
+ protected:
+  virtual ~VideoCaptureAndroid();
+  static void JNICALL ProvideCameraFrame (JNIEnv * env,
+                                          jobject,
+                                          jbyteArray javaCameraFrame,
+                                          jint length, jlong context);
+  DeviceInfoAndroid _capInfo;
+  jobject _javaCaptureObj; // Java Camera object.
+  VideoCaptureCapability _frameInfo;
+  bool _captureStarted;
+
+  static JavaVM* g_jvm;
+  static jclass g_javaCmClass;
+  static jclass g_javaCmDevInfoClass;
+  //Static java object implementing the needed device info functions;
+  static jobject g_javaCmDevInfoObject;
+  static jobject g_javaContext; // Java Application context
+};
+
+}  // namespace videocapturemodule
+}  // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
diff --git a/src/modules/video_capture/main/source/device_info_impl.cc b/src/modules/video_capture/main/source/device_info_impl.cc
new file mode 100644
index 0000000..e3f7bb5
--- /dev/null
+++ b/src/modules/video_capture/main/source/device_info_impl.cc
@@ -0,0 +1,400 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "device_info_impl.h"
+#include "video_capture_config.h"
+#include "trace.h"
+#include <stdlib.h>
+
+#ifndef abs
+#define abs(a) (a>=0?a:-a)
+#endif
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+DeviceInfoImpl::DeviceInfoImpl(const WebRtc_Word32 id)
+    : _id(id), _apiLock(*RWLockWrapper::CreateRWLock()), _lastUsedDeviceName(NULL),
+      _lastUsedDeviceNameLength(0)
+{
+}
+
+DeviceInfoImpl::~DeviceInfoImpl(void)
+{
+    _apiLock.AcquireLockExclusive();
+    // Reset old capability list
+    MapItem* item = NULL;
+    while ((item = _captureCapabilities.Last()))
+    {
+        delete (VideoCaptureCapability*) item->GetItem();
+        _captureCapabilities.Erase(item);
+    }
+    free(_lastUsedDeviceName);
+    _apiLock.ReleaseLockExclusive();
+
+    delete &_apiLock;
+}
+WebRtc_Word32 DeviceInfoImpl::NumberOfCapabilities(
+                                        const char* deviceUniqueIdUTF8)
+{
+
+    if (!deviceUniqueIdUTF8)
+        return -1;
+
+    _apiLock.AcquireLockShared();
+
+    if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
+    {
+        // Is it the same device that is asked for again.
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        if(strncasecmp((char*)_lastUsedDeviceName,
+                       (char*) deviceUniqueIdUTF8,
+                       _lastUsedDeviceNameLength)==0)
+#else
+        if (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) == 0)
+#endif
+        {
+            //yes
+            _apiLock.ReleaseLockShared();
+            return _captureCapabilities.Size();
+        }
+    }
+    // Need to get exclusive rights to create the new capability map.
+    _apiLock.ReleaseLockShared();
+    WriteLockScoped cs2(_apiLock);
+
+    WebRtc_Word32 ret = CreateCapabilityMap(deviceUniqueIdUTF8);
+    return ret;
+}
+
+WebRtc_Word32 DeviceInfoImpl::GetCapability(const char* deviceUniqueIdUTF8,
+                                            const WebRtc_UWord32 deviceCapabilityNumber,
+                                            VideoCaptureCapability& capability)
+{
+
+    if (!deviceUniqueIdUTF8)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "deviceUniqueIdUTF8 parameter not set in call to GetCapability");
+        return -1;
+    }
+    ReadLockScoped cs(_apiLock);
+
+    if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        || (strncasecmp((char*)_lastUsedDeviceName,
+                        (char*) deviceUniqueIdUTF8,
+                        _lastUsedDeviceNameLength)!=0))
+#else
+        || (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) != 0))
+#endif
+
+    {
+        _apiLock.ReleaseLockShared();
+        _apiLock.AcquireLockExclusive();
+        if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+        {
+            _apiLock.ReleaseLockExclusive();
+            _apiLock.AcquireLockShared();
+            return -1;
+        }
+        _apiLock.ReleaseLockExclusive();
+        _apiLock.AcquireLockShared();
+    }
+
+    // Make sure the number is valid
+    if (deviceCapabilityNumber >= (unsigned int) _captureCapabilities.Size())
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "deviceCapabilityNumber %d is invalid in call to GetCapability",
+                   deviceCapabilityNumber);
+        return -1;
+    }
+
+    MapItem* item = _captureCapabilities.Find(deviceCapabilityNumber);
+    if (!item)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "Failed to find capability number %d of %d possible",
+                   deviceCapabilityNumber, _captureCapabilities.Size());
+        return -1;
+    }
+
+    VideoCaptureCapability* capPointer =  static_cast<VideoCaptureCapability*>
+                                          (item->GetItem());
+    if (!capPointer)
+    {
+        return -1;
+    }
+
+    capability = *capPointer;
+    return 0;
+}
+
+WebRtc_Word32 DeviceInfoImpl::GetBestMatchedCapability(
+                                        const char*deviceUniqueIdUTF8,
+                                        const VideoCaptureCapability& requested,
+                                        VideoCaptureCapability& resulting)
+{
+
+
+    if (!deviceUniqueIdUTF8)
+        return -1;
+
+    ReadLockScoped cs(_apiLock);
+    if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
+#if defined(WEBRTC_MAC_INTEL) || defined(WEBRTC_LINUX)
+        || (strncasecmp((char*)_lastUsedDeviceName,
+                        (char*) deviceUniqueIdUTF8,
+                        _lastUsedDeviceNameLength)!=0))
+#else
+        || (_strnicmp((char*) _lastUsedDeviceName,
+                      (char*) deviceUniqueIdUTF8,
+                      _lastUsedDeviceNameLength) != 0))
+#endif
+    {
+        _apiLock.ReleaseLockShared();
+        _apiLock.AcquireLockExclusive();
+        if (-1 == CreateCapabilityMap(deviceUniqueIdUTF8))
+        {
+            return -1;
+        }
+        _apiLock.ReleaseLockExclusive();
+        _apiLock.AcquireLockShared();
+    }
+
+    WebRtc_Word32 bestformatIndex = -1;
+    WebRtc_Word32 bestWidth = 0;
+    WebRtc_Word32 bestHeight = 0;
+    WebRtc_Word32 bestFrameRate = 0;
+    RawVideoType bestRawType = kVideoUnknown;
+    webrtc::VideoCodecType bestCodecType = webrtc::kVideoCodecUnknown;
+
+    const WebRtc_Word32 numberOfCapabilies = _captureCapabilities.Size();
+
+    for (WebRtc_Word32 tmp = 0; tmp < numberOfCapabilies; ++tmp) // Loop through all capabilities
+    {
+        MapItem* item = _captureCapabilities.Find(tmp);
+        if (!item)
+            return -1;
+
+        VideoCaptureCapability& capability = *static_cast<VideoCaptureCapability*>
+                                              (item->GetItem());
+
+        const WebRtc_Word32 diffWidth = capability.width - requested.width;
+        const WebRtc_Word32 diffHeight = capability.height - requested.height;
+        const WebRtc_Word32 diffFrameRate = capability.maxFPS - requested.maxFPS;
+
+        const WebRtc_Word32 currentbestDiffWith = bestWidth - requested.width;
+        const WebRtc_Word32 currentbestDiffHeight = bestHeight - requested.height;
+        const WebRtc_Word32 currentbestDiffFrameRate = bestFrameRate - requested.maxFPS;
+
+        if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equalt that previouse.
+            || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+        {
+
+            if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+            {
+                if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+                    || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+                {
+                    if (diffWidth == currentbestDiffWith && diffHeight
+                        == currentbestDiffHeight) // Same size as previously
+                    {
+                        //Also check the best frame rate if the diff is the same as previouse
+                        if (((diffFrameRate >= 0 &&
+                              diffFrameRate <= currentbestDiffFrameRate) // Frame rate to high but better match than previouse and we have not selected IUV
+                            ||
+                            (currentbestDiffFrameRate < 0 &&
+                             diffFrameRate >= currentbestDiffFrameRate)) // Current frame rate is lower than requested. This is better.
+                        )
+                        {
+                            if ((currentbestDiffFrameRate == diffFrameRate) // Same frame rate as previous  or frame rate allready good enough
+                                || (currentbestDiffFrameRate >= 0))
+                            {
+                                if (bestRawType != requested.rawType
+                                    && requested.rawType != kVideoUnknown
+                                    && (capability.rawType == requested.rawType
+                                        || capability.rawType == kVideoI420
+                                        || capability.rawType == kVideoYUY2
+                                        || capability.rawType == kVideoYV12))
+                                {
+                                    bestCodecType = capability.codecType;
+                                    bestRawType = capability.rawType;
+                                    bestformatIndex = tmp;
+                                }
+                                // If width height and frame rate is full filled we can use the camera for encoding if it is supported.
+                                if (capability.height == requested.height
+                                    && capability.width == requested.width
+                                    && capability.maxFPS >= requested.maxFPS)
+                                {
+                                    if (capability.codecType == requested.codecType
+                                        && bestCodecType != requested.codecType)
+                                    {
+                                        bestCodecType = capability.codecType;
+                                        bestformatIndex = tmp;
+                                    }
+                                }
+                            }
+                            else // Better frame rate
+                            {
+                                if (requested.codecType == capability.codecType)
+                                {
+
+                                    bestWidth = capability.width;
+                                    bestHeight = capability.height;
+                                    bestFrameRate = capability.maxFPS;
+                                    bestCodecType = capability.codecType;
+                                    bestRawType = capability.rawType;
+                                    bestformatIndex = tmp;
+                                }
+                            }
+                        }
+                    }
+                    else // Better width than previously
+                    {
+                        if (requested.codecType == capability.codecType)
+                        {
+                            bestWidth = capability.width;
+                            bestHeight = capability.height;
+                            bestFrameRate = capability.maxFPS;
+                            bestCodecType = capability.codecType;
+                            bestRawType = capability.rawType;
+                            bestformatIndex = tmp;
+                        }
+                    }
+                }// else width no good
+            }
+            else // Better height
+            {
+                if (requested.codecType == capability.codecType)
+                {
+                    bestWidth = capability.width;
+                    bestHeight = capability.height;
+                    bestFrameRate = capability.maxFPS;
+                    bestCodecType = capability.codecType;
+                    bestRawType = capability.rawType;
+                    bestformatIndex = tmp;
+                }
+            }
+        }// else height not good
+    }//end for
+
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
+               "Best camera format: Width %d, Height %d, Frame rate %d, Color format %d",
+               bestWidth, bestHeight, bestFrameRate, bestRawType);
+
+    // Copy the capability
+    MapItem* item = _captureCapabilities.Find(bestformatIndex);
+    if (!item)
+        return -1;
+    VideoCaptureCapability* capPointer =
+        static_cast<VideoCaptureCapability*> (item->GetItem());
+    if (!capPointer)
+        return -1;
+
+    resulting = *capPointer;
+
+    return bestformatIndex;
+}
+
+/* Returns the expected Capture delay*/
+WebRtc_Word32 DeviceInfoImpl::GetExpectedCaptureDelay(
+                                          const DelayValues delayValues[],
+                                          const WebRtc_UWord32 sizeOfDelayValues,
+                                          const char* productId,
+                                          const WebRtc_UWord32 width,
+                                          const WebRtc_UWord32 height)
+{
+    WebRtc_Word32 bestDelay = kDefaultCaptureDelay;
+
+    for (WebRtc_UWord32 device = 0; device < sizeOfDelayValues; ++device)
+    {
+        if (delayValues[device].productId && strncmp((char*) productId,
+                                                     (char*) delayValues[device].productId,
+                                                     kVideoCaptureProductIdLength) == 0)
+        {
+            // We have found the camera
+
+            WebRtc_Word32 bestWidth = 0;
+            WebRtc_Word32 bestHeight = 0;
+
+            //Loop through all tested sizes and find one that seems fitting
+            for (WebRtc_UWord32 delayIndex = 0; delayIndex < NoOfDelayValues; ++delayIndex)
+            {
+                const DelayValue& currentValue = delayValues[device].delayValues[delayIndex];
+
+                const WebRtc_Word32 diffWidth = currentValue.width - width;
+                const WebRtc_Word32 diffHeight = currentValue.height - height;
+
+                const WebRtc_Word32 currentbestDiffWith = bestWidth - width;
+                const WebRtc_Word32 currentbestDiffHeight = bestHeight - height;
+
+                if ((diffHeight >= 0 && diffHeight <= abs(currentbestDiffHeight)) // Height better or equal than previous.
+                    || (currentbestDiffHeight < 0 && diffHeight >= currentbestDiffHeight))
+                {
+
+                    if (diffHeight == currentbestDiffHeight) // Found best height. Care about the width)
+                    {
+                        if ((diffWidth >= 0 && diffWidth <= abs(currentbestDiffWith)) // Width better or equal
+                            || (currentbestDiffWith < 0 && diffWidth >= currentbestDiffWith))
+                        {
+                            if (diffWidth == currentbestDiffWith && diffHeight
+                                == currentbestDiffHeight) // Same size as previous
+                            {
+                            }
+                            else // Better width than previously
+                            {
+                                bestWidth = currentValue.width;
+                                bestHeight = currentValue.height;
+                                bestDelay = currentValue.delay;
+                            }
+                        }// else width no good
+                    }
+                    else // Better height
+                    {
+                        bestWidth = currentValue.width;
+                        bestHeight = currentValue.height;
+                        bestDelay = currentValue.delay;
+                    }
+                }// else height not good
+            }//end for
+            break;
+        }
+    }
+    if (bestDelay > kMaxCaptureDelay)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                   "Expected capture delay too high. %dms, will use %d", bestDelay,
+                   kMaxCaptureDelay);
+        bestDelay = kMaxCaptureDelay;
+
+    }
+
+    return bestDelay;
+
+}
+
+//Default implementation. This should be overridden by Mobile implementations.
+WebRtc_Word32 DeviceInfoImpl::GetOrientation(const char* deviceUniqueIdUTF8,
+                                             VideoCaptureRotation& orientation)
+{
+    orientation = kCameraRotate0;
+    return -1;
+}
+} //namespace videocapturemodule
+} // namespace webrtc
+
+
diff --git a/src/modules/video_capture/main/source/device_info_impl.h b/src/modules/video_capture/main/source/device_info_impl.h
new file mode 100644
index 0000000..0bc4711
--- /dev/null
+++ b/src/modules/video_capture/main/source/device_info_impl.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
+
+#include "video_capture.h"
+
+#include "map_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "video_capture_delay.h"
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+class DeviceInfoImpl: public VideoCaptureModule::DeviceInfo
+{
+public:
+    DeviceInfoImpl(const WebRtc_Word32 id);
+    virtual ~DeviceInfoImpl(void);
+    virtual WebRtc_Word32 NumberOfCapabilities(const char* deviceUniqueIdUTF8);
+    virtual WebRtc_Word32 GetCapability(
+        const char* deviceUniqueIdUTF8,
+        const WebRtc_UWord32 deviceCapabilityNumber,
+        VideoCaptureCapability& capability);
+
+    virtual WebRtc_Word32 GetBestMatchedCapability(
+        const char* deviceUniqueIdUTF8,
+        const VideoCaptureCapability& requested,
+        VideoCaptureCapability& resulting);
+    virtual WebRtc_Word32 GetOrientation(
+        const char* deviceUniqueIdUTF8,
+        VideoCaptureRotation& orientation);
+
+protected:
+    /* Initialize this object*/
+
+    virtual WebRtc_Word32 Init()=0;
+    /*
+     * Fills the member variable _captureCapabilities with capabilities for the given device name.
+     */
+    virtual WebRtc_Word32 CreateCapabilityMap(const char* deviceUniqueIdUTF8)=0;
+
+    /* Returns the expected Capture delay*/
+    WebRtc_Word32 GetExpectedCaptureDelay(const DelayValues delayValues[],
+                                          const WebRtc_UWord32 sizeOfDelayValues,
+                                          const char* productId,
+                                          const WebRtc_UWord32 width,
+                                          const WebRtc_UWord32 height);
+protected:
+    // Data members
+    WebRtc_Word32 _id;
+    MapWrapper _captureCapabilities;
+    RWLockWrapper& _apiLock;
+    char* _lastUsedDeviceName;
+    WebRtc_UWord32 _lastUsedDeviceNameLength;
+};
+} //namespace videocapturemodule
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_DEVICE_INFO_IMPL_H_
diff --git a/src/modules/video_capture/main/source/video_capture.gypi b/src/modules/video_capture/main/source/video_capture.gypi
new file mode 100644
index 0000000..f3fb826
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture.gypi
@@ -0,0 +1,199 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_capture_module',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '<(webrtc_root)/common_video/libyuv/include',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../interface',
+          '<(webrtc_root)/common_video/libyuv/include',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_capture.h',
+        '../interface/video_capture_defines.h',
+        '../interface/video_capture_factory.h',
+        # headers
+        'video_capture_config.h',
+        'video_capture_delay.h',
+        'video_capture_impl.h',
+        'device_info_impl.h',
+
+        # DEFINE PLATFORM INDEPENDENT SOURCE FILES
+        'video_capture_factory.cc',
+        'video_capture_impl.cc',
+        'device_info_impl.cc',
+      ],
+      'conditions': [
+        ['include_internal_video_capture==0', {
+          'sources': [
+            'External/device_info_external.cc',
+            'External/video_capture_external.cc',
+          ],
+        },{  # include_internal_video_capture == 1
+          'conditions': [
+            # DEFINE PLATFORM SPECIFIC SOURCE FILES
+            ['OS=="linux"', {
+              'include_dirs': [
+                'Linux',
+              ],
+              'sources': [
+                'Linux/device_info_linux.h',
+                'Linux/video_capture_linux.h',
+                'Linux/device_info_linux.cc',
+                'Linux/video_capture_linux.cc',
+              ],
+            }],  # linux
+            ['OS=="mac"', {
+              'sources': [
+                'Mac/QTKit/video_capture_recursive_lock.h',
+                'Mac/QTKit/video_capture_qtkit.h',
+                'Mac/QTKit/video_capture_qtkit_info.h',
+                'Mac/QTKit/video_capture_qtkit_info_objc.h',
+                'Mac/QTKit/video_capture_qtkit_objc.h',
+                'Mac/QTKit/video_capture_qtkit_utility.h',
+                'Mac/video_capture_mac.mm',
+                'Mac/QTKit/video_capture_qtkit.mm',
+                'Mac/QTKit/video_capture_qtkit_objc.mm',
+                'Mac/QTKit/video_capture_recursive_lock.mm',
+                'Mac/QTKit/video_capture_qtkit_info.mm',
+                'Mac/QTKit/video_capture_qtkit_info_objc.mm',
+              ],
+              'include_dirs': [
+                'Mac',
+              ],
+              'link_settings': {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-framework QTKit',
+                  ],
+                },
+              },
+            }],  # mac
+            ['OS=="win"', {
+              'dependencies': [
+                '<(webrtc_root)/modules/video_capture/main/source/Windows/direct_show_base_classes.gyp:direct_show_base_classes',
+              ],
+              'include_dirs': [
+                'Windows',
+              ],
+              'sources': [
+                'Windows/help_functions_windows.h',
+                'Windows/sink_filter_windows.h',
+                'Windows/video_capture_windows.h',
+                'Windows/device_info_windows.h',
+                'Windows/capture_delay_values_windows.h',
+                'Windows/help_functions_windows.cc',
+                'Windows/sink_filter_windows.cc',
+                'Windows/video_capture_windows.cc',
+                'Windows/device_info_windows.cc',
+                'Windows/video_capture_factory_windows.cc',
+              ],
+              'msvs_settings': {
+                'VCLibrarianTool': {
+                  'AdditionalDependencies': 'Strmiids.lib',
+                },
+              },
+            }],  # win
+            ['OS=="android"', {
+              'include_dirs': [
+                'android',
+              ],
+              'sources': [
+                'android/device_info_android.cc',
+                'android/device_info_android.h',
+                'android/video_capture_android.cc',
+                'android/video_capture_android.h',
+              ],
+            }],  # android
+          ], # conditions
+        }],  # include_internal_video_capture
+      ], # conditions
+    },
+  ],
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'video_capture_module_test',
+          'type': 'executable',
+          'dependencies': [
+           'video_capture_module',
+           'webrtc_utility',
+           '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+           '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+          'include_dirs': [
+            '../interface',
+          ],
+          'sources': [
+            '../test/video_capture_unittest.cc',
+            '../test/video_capture_main_mac.mm',
+          ],
+          'conditions': [
+           # DEFINE PLATFORM SPECIFIC INCLUDE AND CFLAGS
+            ['OS=="mac" or OS=="linux"', {
+              'cflags': [
+                '-Wno-write-strings',
+              ],
+              'ldflags': [
+                '-lpthread -lm',
+              ],
+            }],
+            ['OS=="linux"', {
+              'libraries': [
+                '-lrt',
+                '-lXext',
+                '-lX11',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                # Link with a special main for mac so we can use the webcam.
+                '<(webrtc_root)/test/test.gyp:test_support_main_threaded_mac',
+              ],
+              'xcode_settings': {
+                # TODO(andrew): CoreAudio and AudioToolbox shouldn't be needed.
+                'OTHER_LDFLAGS': [
+                  '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
+                ],
+              },
+            }], # OS=="mac"
+            ['OS!="mac"', {
+              'dependencies': [
+                # Otherwise, use the regular main.
+                '<(webrtc_root)/test/test.gyp:test_support_main',
+              ],
+            }], # OS!="mac"
+          ] # conditions
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/video_capture/main/source/video_capture_config.h b/src/modules/video_capture/main/source/video_capture_config.h
new file mode 100644
index 0000000..ab4010c
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture_config.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+enum {kDefaultWidth = 640};  // Start width
+enum {kDefaultHeight = 480}; // Start heigt
+enum {kDefaultFrameRate = 30}; // Start frame rate
+
+enum {kMaxFrameRate =60}; // Max allowed frame rate of the start image 
+
+enum {kDefaultCaptureDelay = 120}; 
+enum {kMaxCaptureDelay = 270}; // Max capture delay allowed in the precompiled capture delay values.  
+
+enum {kProcessInterval = 300}; 
+enum {kFrameRateCallbackInterval = 1000}; 
+enum {kFrameRateCountHistorySize = 90};
+enum {kFrameRateHistoryWindowMs = 2000};
+}  // namespace videocapturemodule
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_CONFIG_H_
diff --git a/src/modules/video_capture/main/source/video_capture_delay.h b/src/modules/video_capture/main/source/video_capture_delay.h
new file mode 100644
index 0000000..9f5b76e
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture_delay.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+
+struct DelayValue
+{
+    WebRtc_Word32 width;
+    WebRtc_Word32 height;
+    WebRtc_Word32 delay;
+};
+
+enum { NoOfDelayValues = 40 };
+struct DelayValues
+{
+    char * deviceName;
+    char* productId;
+    DelayValue delayValues[NoOfDelayValues];
+};
+
+} //namespace videocapturemodule
+} //namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_DELAY_H_
diff --git a/src/modules/video_capture/main/source/video_capture_factory.cc b/src/modules/video_capture/main/source/video_capture_factory.cc
new file mode 100644
index 0000000..6b792e1
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture_factory.cc
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_factory.h"
+#include "video_capture_impl.h"
+
+namespace webrtc
+{
+
+VideoCaptureModule* VideoCaptureFactory::Create(const WebRtc_Word32 id,
+    const char* deviceUniqueIdUTF8) {
+  return videocapturemodule::VideoCaptureImpl::Create(id, deviceUniqueIdUTF8);
+}
+
+VideoCaptureModule* VideoCaptureFactory::Create(const WebRtc_Word32 id,
+    VideoCaptureExternal*& externalCapture) {
+  return videocapturemodule::VideoCaptureImpl::Create(id, externalCapture);
+}
+
+VideoCaptureModule::DeviceInfo* VideoCaptureFactory::CreateDeviceInfo(
+    const WebRtc_Word32 id) {
+  return videocapturemodule::VideoCaptureImpl::CreateDeviceInfo(id);
+}
+
+} // namespace webrtc
diff --git a/src/modules/video_capture/main/source/video_capture_impl.cc b/src/modules/video_capture/main/source/video_capture_impl.cc
new file mode 100644
index 0000000..a0f9638
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture_impl.cc
@@ -0,0 +1,505 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_capture_impl.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "critical_section_wrapper.h"
+#include "module_common_types.h"
+#include "ref_count.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "video_capture_config.h"
+
+#include <stdlib.h>
+
+namespace webrtc
+{
+namespace videocapturemodule
+{
+VideoCaptureModule* VideoCaptureImpl::Create(
+    const WebRtc_Word32 id,
+    VideoCaptureExternal*& externalCapture)
+{
+    RefCountImpl<VideoCaptureImpl>* implementation =
+        new RefCountImpl<VideoCaptureImpl>(id);
+    externalCapture = implementation;
+    return implementation;
+}
+
+const char* VideoCaptureImpl::CurrentDeviceName() const
+{
+    return _deviceUniqueId;
+}
+
+WebRtc_Word32 VideoCaptureImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+// returns the number of milliseconds until the module want a worker thread to call Process
+WebRtc_Word32 VideoCaptureImpl::TimeUntilNextProcess()
+{
+    CriticalSectionScoped cs(&_callBackCs);
+    TickTime timeNow = TickTime::Now();
+
+    WebRtc_Word32 timeToNormalProcess = kProcessInterval
+        - (WebRtc_Word32)((TickTime::Now() - _lastProcessTime).Milliseconds());
+    WebRtc_Word32 timeToStartImage = timeToNormalProcess;
+    if (_startImageFrameIntervall)
+    {
+        timeToStartImage = _startImageFrameIntervall
+            - (WebRtc_Word32)((timeNow - _lastSentStartImageTime).Milliseconds());
+        if (timeToStartImage < 0)
+        {
+            timeToStartImage = 0;
+        }
+    }
+    return (timeToStartImage < timeToNormalProcess)
+            ? timeToStartImage : timeToNormalProcess;
+}
+
+// Process any pending tasks such as timeouts
+WebRtc_Word32 VideoCaptureImpl::Process()
+{
+    CriticalSectionScoped cs(&_callBackCs);
+
+    const TickTime now = TickTime::Now();
+    _lastProcessTime = TickTime::Now();
+
+    // Handle No picture alarm
+
+    if (_lastProcessFrameCount.Ticks() == _incomingFrameTimes[0].Ticks() &&
+        _captureAlarm != Raised)
+    {
+        if (_noPictureAlarmCallBack && _captureCallBack)
+        {
+            _captureAlarm = Raised;
+            _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+        }
+    }
+    else if (_lastProcessFrameCount.Ticks() != _incomingFrameTimes[0].Ticks() &&
+             _captureAlarm != Cleared)
+    {
+        if (_noPictureAlarmCallBack && _captureCallBack)
+        {
+            _captureAlarm = Cleared;
+            _captureCallBack->OnNoPictureAlarm(_id, _captureAlarm);
+
+        }
+    }
+
+    // Handle frame rate callback
+    if ((now - _lastFrameRateCallbackTime).Milliseconds()
+        > kFrameRateCallbackInterval)
+    {
+        if (_frameRateCallBack && _captureCallBack)
+        {
+            const WebRtc_UWord32 frameRate = CalculateFrameRate(now);
+            _captureCallBack->OnCaptureFrameRate(_id, frameRate);
+        }
+        _lastFrameRateCallbackTime = now; // Can be set by EnableFrameRateCallback
+
+    }
+
+    _lastProcessFrameCount = _incomingFrameTimes[0];
+
+    // Handle start image frame rates.
+    if (_startImageFrameIntervall
+        && (now - _lastSentStartImageTime).Milliseconds() >= _startImageFrameIntervall)
+    {
+        _lastSentStartImageTime = now;
+        if (_dataCallBack)
+        {
+            _captureFrame.CopyFrame(_startImage);
+            _captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+            _dataCallBack->OnIncomingCapturedFrame(_id, _captureFrame,
+                                                   kVideoCodecUnknown);
+        }
+    }
+    return 0;
+}
+
+VideoCaptureImpl::VideoCaptureImpl(const WebRtc_Word32 id)
+    : _id(id), _deviceUniqueId(NULL), _apiCs(*CriticalSectionWrapper::CreateCriticalSection()),
+      _captureDelay(0), _requestedCapability(),
+      _callBackCs(*CriticalSectionWrapper::CreateCriticalSection()),
+      _lastProcessTime(TickTime::Now()),
+      _lastFrameRateCallbackTime(TickTime::Now()), _frameRateCallBack(false),
+      _noPictureAlarmCallBack(false), _captureAlarm(Cleared), _setCaptureDelay(0),
+      _dataCallBack(NULL), _captureCallBack(NULL),
+      _startImage(), _startImageFrameIntervall(0),
+      _lastSentStartImageTime(TickTime::Now()),
+      _lastProcessFrameCount(TickTime::Now()), _rotateFrame(kRotateNone),
+      last_capture_time_(TickTime::MillisecondTimestamp())
+
+{
+    _requestedCapability.width = kDefaultWidth;
+    _requestedCapability.height = kDefaultHeight;
+    _requestedCapability.maxFPS = 30;
+    _requestedCapability.rawType = kVideoI420;
+    _requestedCapability.codecType = kVideoCodecUnknown;
+    memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+}
+
+VideoCaptureImpl::~VideoCaptureImpl()
+{
+    DeRegisterCaptureDataCallback();
+    DeRegisterCaptureCallback();
+    delete &_callBackCs;
+    delete &_apiCs;
+
+    if (_deviceUniqueId)
+        delete[] _deviceUniqueId;
+}
+
+WebRtc_Word32 VideoCaptureImpl::RegisterCaptureDataCallback(
+                                        VideoCaptureDataCallback& dataCallBack)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _dataCallBack = &dataCallBack;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureDataCallback()
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _dataCallBack = NULL;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::RegisterCaptureCallback(VideoCaptureFeedBack& callBack)
+{
+
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _captureCallBack = &callBack;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::DeRegisterCaptureCallback()
+{
+
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _captureCallBack = NULL;
+    return 0;
+
+}
+WebRtc_Word32 VideoCaptureImpl::SetCaptureDelay(WebRtc_Word32 delayMS)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    _captureDelay = delayMS;
+    return 0;
+}
+WebRtc_Word32 VideoCaptureImpl::CaptureDelay()
+{
+    CriticalSectionScoped cs(&_apiCs);
+    return _setCaptureDelay;
+}
+
+WebRtc_Word32 VideoCaptureImpl::DeliverCapturedFrame(VideoFrame& captureFrame,
+    WebRtc_Word32 width, WebRtc_Word32 height, WebRtc_Word64 capture_time,
+    VideoCodecType codec_type) {
+  UpdateFrameCount();// frame count used for local frame rate callback.
+  _startImageFrameIntervall = 0; // prevent the start image to be displayed.
+
+  const bool callOnCaptureDelayChanged = _setCaptureDelay != _captureDelay;
+  // Capture delay changed
+  if (_setCaptureDelay != _captureDelay) {
+      _setCaptureDelay = _captureDelay;
+  }
+
+  // Set the capture time
+  if (capture_time != 0) {
+      captureFrame.SetRenderTime(capture_time);
+  }
+  else {
+      captureFrame.SetRenderTime(TickTime::MillisecondTimestamp());
+  }
+
+  if (captureFrame.RenderTimeMs() == last_capture_time_) {
+    // We don't allow the same capture time for two frames, drop this one.
+    return -1;
+  }
+  last_capture_time_ = captureFrame.RenderTimeMs();
+
+  captureFrame.SetHeight(height);
+  captureFrame.SetWidth(width);
+
+  if (_dataCallBack) {
+    if (callOnCaptureDelayChanged) {
+      _dataCallBack->OnCaptureDelayChanged(_id, _captureDelay);
+    }
+    _dataCallBack->OnIncomingCapturedFrame(_id, captureFrame, codec_type);
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::IncomingFrame(
+    WebRtc_UWord8* videoFrame,
+    WebRtc_Word32 videoFrameLength,
+    const VideoCaptureCapability& frameInfo,
+    WebRtc_Word64 captureTime/*=0*/)
+{
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideoCapture, _id,
+               "IncomingFrame width %d, height %d", (int) frameInfo.width,
+               (int) frameInfo.height);
+
+    TickTime startProcessTime = TickTime::Now();
+
+    CriticalSectionScoped cs(&_callBackCs);
+
+    const WebRtc_Word32 width = frameInfo.width;
+    const WebRtc_Word32 height = frameInfo.height;
+
+    if (frameInfo.codecType == kVideoCodecUnknown)
+    {
+        // Not encoded, convert to I420.
+        const VideoType commonVideoType =
+                  RawVideoTypeToCommonVideoVideoType(frameInfo.rawType);
+
+        if (frameInfo.rawType != kVideoMJPEG &&
+            CalcBufferSize(commonVideoType, width,
+                           abs(height)) != videoFrameLength)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                         "Wrong incoming frame length.");
+            return -1;
+        }
+
+        // Allocate I420 buffer.
+        int requiredLength = CalcBufferSize(kI420, width, abs(height));
+        _captureFrame.VerifyAndAllocate(requiredLength);
+        if (!_captureFrame.Buffer())
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to allocate frame buffer.");
+            return -1;
+        }
+
+        memset(_captureFrame.Buffer(), 0, _captureFrame.Size());
+        // Keeping stride = width for I420 destination.
+        int dstStride  = width;
+        const int conversionResult = ConvertToI420(commonVideoType,
+                                                   videoFrame,
+                                                   0, 0,  // No cropping
+                                                   width, height,
+                                                   videoFrameLength,
+                                                   width, height, dstStride,
+                                                   _rotateFrame,
+                                                   _captureFrame.Buffer());
+        if (conversionResult < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to convert capture frame from type %d to I420",
+                       frameInfo.rawType);
+            return -1;
+        }
+        _captureFrame.SetLength(requiredLength);
+    }
+    else // Encoded format
+    {
+        if (_captureFrame.CopyFrame(videoFrameLength, videoFrame) != 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "Failed to copy captured frame of length %d", (int) videoFrameLength);
+        }
+    }
+
+    DeliverCapturedFrame(_captureFrame, width, abs(height), captureTime,
+                         frameInfo.codecType);
+
+
+    const WebRtc_UWord32 processTime =
+        (WebRtc_UWord32)(TickTime::Now() - startProcessTime).Milliseconds();
+    if (processTime > 10) // If the process time is too long MJPG will not work well.
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
+                   "Too long processing time of Incoming frame: %ums",
+                   (unsigned int) processTime);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::IncomingFrameI420(
+    const VideoFrameI420& video_frame, WebRtc_Word64 captureTime) {
+
+  CriticalSectionScoped cs(&_callBackCs);
+
+  // Allocate I420 buffer
+  int frame_size = CalcBufferSize(kI420,
+                                  video_frame.width,
+                                  video_frame.height);
+  _captureFrame.VerifyAndAllocate(frame_size);
+  if (!_captureFrame.Buffer()) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+               "Failed to allocate frame buffer.");
+    return -1;
+  }
+
+  // Copy planes to the _captureFrame
+  int y_width = video_frame.width;
+  int uv_width = video_frame.width / 2;
+  int y_rows = video_frame.height;
+  int uv_rows = video_frame.height / 2;  // I420
+  unsigned char* current_pointer = _captureFrame.Buffer();
+  unsigned char* y_plane = video_frame.y_plane;
+  unsigned char* u_plane = video_frame.u_plane;
+  unsigned char* v_plane = video_frame.v_plane;
+  // Copy Y
+  for (int i = 0; i < y_rows; ++i) {
+    memcpy(current_pointer, y_plane, y_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += y_width;
+    y_plane += video_frame.y_pitch;
+  }
+  // Copy U
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, u_plane, uv_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += uv_width;
+    u_plane += video_frame.u_pitch;
+  }
+  // Copy V
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, v_plane, uv_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += uv_width;
+    v_plane += video_frame.v_pitch;
+  }
+  _captureFrame.SetLength(frame_size);
+
+  DeliverCapturedFrame(_captureFrame,
+                       video_frame.width,
+                       video_frame.height,
+                       captureTime,
+                       kVideoCodecUnknown);
+
+  return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::SetCaptureRotation(VideoCaptureRotation rotation)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    switch (rotation)
+    {
+        case kCameraRotate0:
+            _rotateFrame = kRotateNone;
+            break;
+        case kCameraRotate90:
+            _rotateFrame = kRotate90;
+            break;
+        case kCameraRotate180:
+            _rotateFrame = kRotate180;
+            break;
+        case kCameraRotate270:
+            _rotateFrame = kRotate270;
+            break;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::StartSendImage(const VideoFrame& videoFrame,
+                                                     WebRtc_Word32 frameRate)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    if (frameRate < 1 || frameRate > kMaxFrameRate)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                   "StartSendImage Invalid parameter. frameRate %d", (int) frameRate);
+        return -1;;
+    }
+    _startImage.CopyFrame(videoFrame);
+    _startImageFrameIntervall = 1000 / frameRate;
+    _lastSentStartImageTime = TickTime::Now();
+    return 0;
+
+}
+WebRtc_Word32 VideoCaptureImpl::StopSendImage()
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _startImageFrameIntervall = 0;
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::EnableFrameRateCallback(const bool enable)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _frameRateCallBack = enable;
+    if (enable)
+    {
+        _lastFrameRateCallbackTime = TickTime::Now();
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoCaptureImpl::EnableNoPictureAlarm(const bool enable)
+{
+    CriticalSectionScoped cs(&_apiCs);
+    CriticalSectionScoped cs2(&_callBackCs);
+    _noPictureAlarmCallBack = enable;
+    return 0;
+}
+
+void VideoCaptureImpl::UpdateFrameCount()
+{
+    if (_incomingFrameTimes[0].MicrosecondTimestamp() == 0)
+    {
+        // first no shift
+    }
+    else
+    {
+        // shift
+        for (int i = (kFrameRateCountHistorySize - 2); i >= 0; i--)
+        {
+            _incomingFrameTimes[i + 1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = TickTime::Now();
+}
+
+WebRtc_UWord32 VideoCaptureImpl::CalculateFrameRate(const TickTime& now)
+{
+    WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for (num = 1; num < (kFrameRateCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num].Ticks() <= 0
+            || (now - _incomingFrameTimes[num]).Milliseconds() > kFrameRateHistoryWindowMs) // don't use data older than 2sec
+        {
+            break;
+        }
+        else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        WebRtc_Word64 diff = (now - _incomingFrameTimes[num - 1]).Milliseconds();
+        if (diff > 0)
+        {
+            return WebRtc_UWord32((nrOfFrames * 1000.0f / diff) + 0.5f);
+        }
+    }
+
+    return nrOfFrames;
+}
+} // namespace videocapturemodule
+} // namespace webrtc
diff --git a/src/modules/video_capture/main/source/video_capture_impl.h b/src/modules/video_capture/main/source/video_capture_impl.h
new file mode 100644
index 0000000..6d2c023
--- /dev/null
+++ b/src/modules/video_capture/main/source/video_capture_impl.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
+
+/*
+ * video_capture_impl.h
+ */
+
+#include "video_capture.h"
+#include "video_capture_config.h"
+#include "tick_util.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+
+namespace videocapturemodule {
+// Class definitions
+class VideoCaptureImpl: public VideoCaptureModule, public VideoCaptureExternal
+{
+public:
+
+    /*
+     *   Create a video capture module object
+     *
+     *   id              - unique identifier of this video capture module object
+     *   deviceUniqueIdUTF8 -  name of the device. Available names can be found by using GetDeviceName
+     */
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      const char* deviceUniqueIdUTF8);
+
+    /*
+     *   Create a video capture module object used for external capture.
+     *
+     *   id              - unique identifier of this video capture module object
+     *   externalCapture - [out] interface to call when a new frame is captured.
+     */
+    static VideoCaptureModule* Create(const WebRtc_Word32 id,
+                                      VideoCaptureExternal*& externalCapture);
+
+    static DeviceInfo* CreateDeviceInfo(const WebRtc_Word32 id);
+
+    // Implements Module declared functions.
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    //Call backs
+    virtual WebRtc_Word32 RegisterCaptureDataCallback(VideoCaptureDataCallback& dataCallback);
+    virtual WebRtc_Word32 DeRegisterCaptureDataCallback();
+    virtual WebRtc_Word32 RegisterCaptureCallback(VideoCaptureFeedBack& callBack);
+    virtual WebRtc_Word32 DeRegisterCaptureCallback();
+
+    virtual WebRtc_Word32 StartSendImage(const VideoFrame& videoFrame,
+                                         WebRtc_Word32 frameRate = 1);
+    virtual WebRtc_Word32 StopSendImage();
+
+    virtual WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delayMS);
+    virtual WebRtc_Word32 CaptureDelay();
+    virtual WebRtc_Word32 SetCaptureRotation(VideoCaptureRotation rotation);
+
+    virtual WebRtc_Word32 EnableFrameRateCallback(const bool enable);
+    virtual WebRtc_Word32 EnableNoPictureAlarm(const bool enable);
+
+    virtual const char* CurrentDeviceName() const;
+
+    // Module handling
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    // Implement VideoCaptureExternal
+    virtual WebRtc_Word32 IncomingFrame(WebRtc_UWord8* videoFrame,
+                                        WebRtc_Word32 videoFrameLength,
+                                        const VideoCaptureCapability& frameInfo,
+                                        WebRtc_Word64 captureTime = 0);
+    virtual WebRtc_Word32 IncomingFrameI420(
+        const VideoFrameI420& video_frame,
+        WebRtc_Word64 captureTime = 0);
+
+    // Platform dependent
+    virtual WebRtc_Word32 StartCapture(const VideoCaptureCapability& capability)
+    {
+        _requestedCapability = capability;
+        return -1;
+    }
+    virtual WebRtc_Word32 StopCapture()   { return -1; }
+    virtual bool CaptureStarted() {return false; }
+    virtual WebRtc_Word32 CaptureSettings(VideoCaptureCapability& /*settings*/)
+    { return -1; }
+    VideoCaptureEncodeInterface* GetEncodeInterface(const VideoCodec& /*codec*/)
+    { return NULL; }
+
+protected:
+    VideoCaptureImpl(const WebRtc_Word32 id);
+    virtual ~VideoCaptureImpl();
+    WebRtc_Word32 DeliverCapturedFrame(
+        VideoFrame& captureFrame, WebRtc_Word32 width, WebRtc_Word32 height,
+        WebRtc_Word64 capture_time, VideoCodecType codec_type);
+
+    WebRtc_Word32 _id; // Module ID
+    char* _deviceUniqueId; // current Device unique name;
+    CriticalSectionWrapper& _apiCs;
+    WebRtc_Word32 _captureDelay; // Current capture delay. May be changed of platform dependent parts.
+    VideoCaptureCapability _requestedCapability; // Should be set by platform dependent code in StartCapture.
+private:
+    void UpdateFrameCount();
+    WebRtc_UWord32 CalculateFrameRate(const TickTime& now);
+
+    CriticalSectionWrapper& _callBackCs;
+
+    TickTime _lastProcessTime; // last time the module process function was called.
+    TickTime _lastFrameRateCallbackTime; // last time the frame rate callback function was called.
+    bool _frameRateCallBack; // true if EnableFrameRateCallback
+    bool _noPictureAlarmCallBack; //true if EnableNoPictureAlarm
+    VideoCaptureAlarm _captureAlarm; // current value of the noPictureAlarm
+
+    WebRtc_Word32 _setCaptureDelay; // The currently used capture delay
+    VideoCaptureDataCallback* _dataCallBack;
+    VideoCaptureFeedBack* _captureCallBack;
+
+    VideoFrame _startImage;
+    WebRtc_Word32 _startImageFrameIntervall;
+    TickTime _lastSentStartImageTime; // last time the start image was sent
+    TickTime _lastProcessFrameCount;
+    TickTime _incomingFrameTimes[kFrameRateCountHistorySize];// timestamp for local captured frames
+    VideoRotationMode _rotateFrame; //Set if the frame should be rotated by the capture module.
+
+    VideoFrame _captureFrame;
+
+    // Used to make sure incoming timestamp is increasing for every frame.
+    WebRtc_Word64 last_capture_time_;
+};
+} // namespace videocapturemodule
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_VIDEO_CAPTURE_IMPL_H_
diff --git a/src/modules/video_capture/main/test/android/.classpath b/src/modules/video_capture/main/test/android/.classpath
new file mode 100644
index 0000000..841ac51
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="renderer"/>

+	<classpathentry kind="src" path="java"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/src/modules/video_capture/main/test/android/.project b/src/modules/video_capture/main/test/android/.project
new file mode 100644
index 0000000..373da0e
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/.project
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<projectDescription>

+	<name>VideoCaptureModuleAndroidTest</name>

+	<comment></comment>

+	<projects>

+	</projects>

+	<buildSpec>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>org.eclipse.jdt.core.javabuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ApkBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+	</buildSpec>

+	<natures>

+		<nature>com.android.ide.eclipse.adt.AndroidNature</nature>

+		<nature>org.eclipse.jdt.core.javanature</nature>

+	</natures>

+</projectDescription>

diff --git a/src/modules/video_capture/main/test/android/AndroidManifest.xml b/src/modules/video_capture/main/test/android/AndroidManifest.xml
new file mode 100644
index 0000000..1a4ec9b
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/AndroidManifest.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  package="org.webrtc.capturemoduleandroidtest"
+	  android:versionCode="1"
+	  android:versionName="1.0">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+        <activity android:label="@string/app_name"
+		  android:name="VideoCaptureModuleTest"
+		  android:configChanges="orientation|keyboardHidden"
+		  android:launchMode="singleTask"
+		  android:multiprocess="false">
+          <intent-filter>
+            <action android:name="android.intent.action.MAIN" />
+            <category android:name="android.intent.category.LAUNCHER" />
+          </intent-filter>
+        </activity>
+  </application>
+
+  <uses-feature android:required="true"
+		android:name="android.hardware.camera">
+  </uses-feature>
+
+  <uses-permission android:name="android.permission.CAMERA">
+  </uses-permission>
+  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+  <uses-sdk android:targetSdkVersion="7"
+	    android:minSdkVersion="7">
+  </uses-sdk>
+</manifest> 
diff --git a/src/modules/video_capture/main/test/android/default.properties b/src/modules/video_capture/main/test/android/default.properties
new file mode 100644
index 0000000..2ad44a4
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-9

diff --git a/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java b/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java
new file mode 100644
index 0000000..94bf93b
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/gen/org/webrtc/capturemoduleandroidtest/R.java
@@ -0,0 +1,33 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.capturemoduleandroidtest;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050000;

+        public static final int Button02=0x7f050001;

+        public static final int Button03=0x7f050002;

+        public static final int Button04=0x7f050003;

+        public static final int renderView=0x7f050004;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040001;

+        public static final int run_button2=0x7f040002;

+        public static final int run_button3=0x7f040003;

+        public static final int run_button4=0x7f040004;

+    }

+}

diff --git a/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h b/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h
new file mode 100644
index 0000000..0320df0
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/jni/org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest */
+
+#ifndef _Included_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+#define _Included_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest
+ * Method:    RunTest
+ * Signature: (Landroid/content/Context;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RunTest
+  (JNIEnv *, jobject, jobject);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RenderInit
+(JNIEnv * env, jobject context,jobject surface);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StartCapture
+(JNIEnv *, jobject);
+
+JNIEXPORT jint JNICALL Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StopCapture
+(JNIEnv *, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc b/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc
new file mode 100644
index 0000000..ac2f6e1
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/jni/video_capture_module_android_test_jni.cc
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h> // memset
+#include <android/log.h>
+
+#include "org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest.h"
+#include "../../../interface/video_capture_factory.h"
+#include "../../../../../video_render/main/interface/video_render.h"
+#include "../../testAPI/testPlatformDependent.h"
+#include "../../testAPI/testPlatformDependent.h"
+#ifdef RENDER_PREVIEW
+#include "../../testAPI/Renderer.h"
+#endif
+
+using namespace webrtc;
+#define WEBRTC_LOG_TAG "*WEBRTCN*" // As in WEBRTC Native...
+// ADM data struct
+typedef struct
+{
+    // Other
+    JavaVM* jvm;
+    Renderer* renderer;
+    VideoCaptureModule* _videoCapture;
+    VideoCaptureModule::DeviceInfo*_captureInfo;
+} JniData;
+
+// Global variables visible in this file
+static JniData jniData;
+
+//////////////////////////////////////////////////////////////////
+// General functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// JNI_OnLoad
+//
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "JNI_OnLoad");
+  if (!vm)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  // Get JNI
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env), JNI_VERSION_1_4))
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init JniData data
+  memset(&jniData, 0, sizeof(jniData));
+
+  // Store the JVM
+  jniData.jvm = vm;
+
+  return JNI_VERSION_1_4;
+}
+
+/////////////////////////////////////////////
+// Run Test
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RunTest(
+    JNIEnv * env,
+    jobject context,
+    jobject surface)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Run test");
+  // Set instance independent Java objects
+  VideoCaptureModule::SetAndroidObjects(jniData.jvm, context);
+
+  // Start test
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "Create testPlatformDependent");
+  testPlatformDependent testPlatformDependent;
+  testPlatformDependent.SetRenderer(jniData.renderer);
+  testPlatformDependent.DoTest();
+
+  // Clear instance independent Java objects
+  VideoCaptureModule::SetAndroidObjects(NULL, NULL);
+
+  return 0;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_RenderInit(
+    JNIEnv * env,
+    jobject context,
+    jobject surface)
+{
+  VideoRender::SetAndroidObjects(jniData.jvm);
+#ifdef RENDER_PREVIEW
+  Renderer::SetRenderWindow(surface);
+  jniData.renderer=new Renderer(true);
+#endif
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StartCapture(
+    JNIEnv * env,
+    jobject context)
+{
+  if (!jniData._captureInfo) {
+    VideoCaptureModule::SetAndroidObjects(jniData.jvm, context);
+    jniData._captureInfo = VideoCaptureFactory::CreateDeviceInfo(5);
+    WebRtc_UWord8 id[256];
+    WebRtc_UWord8 name[256];
+    jniData._captureInfo->GetDeviceName(0, name, 256, id, 256);
+    jniData._videoCapture = VideoCaptureFactory::Create(0, id);
+    VideoCaptureCapability capability;
+
+    jniData._captureInfo->GetCapability(id, 0, capability);
+    capability.width = 176;
+    capability.height = 144;
+    capability.maxFPS = 15;
+
+    jniData._videoCapture->StartCapture(capability);
+  }
+  return 0;
+}
+
+JNIEXPORT jint JNICALL
+Java_org_webrtc_capturemoduleandroidtest_VideoCaptureModuleTest_StopCapture(
+    JNIEnv * env,
+    jobject context)
+{
+  if (jniData._videoCapture) {
+    jniData._videoCapture->StopCapture();
+    delete jniData._captureInfo;
+    VideoCaptureModule::Destroy(jniData._videoCapture);
+    jniData._videoCapture = NULL;
+    jniData._captureInfo = NULL;
+  }
+  return 0;
+}
diff --git a/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png b/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png
new file mode 100644
index 0000000..8074c4c
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/res/drawable-hdpi/icon.png
Binary files differ
diff --git a/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png b/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png
new file mode 100644
index 0000000..1095584
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/res/drawable-ldpi/icon.png
Binary files differ
diff --git a/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png b/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/res/drawable-mdpi/icon.png
Binary files differ
diff --git a/src/modules/video_capture/main/test/android/res/layout/main.xml b/src/modules/video_capture/main/test/android/res/layout/main.xml
new file mode 100644
index 0000000..3642733
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/res/layout/main.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+  <Button android:text="@string/run_button"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button2"
+	  android:id="@+id/Button02"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button3"
+	  android:id="@+id/Button03"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Button android:text="@string/run_button4"
+	  android:id="@+id/Button04"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <LinearLayout 
+     android:id="@+id/renderView"
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent"
+     android:layout_weight="1">
+  </LinearLayout>
+</LinearLayout>
diff --git a/src/modules/video_capture/main/test/android/res/values/strings.xml b/src/modules/video_capture/main/test/android/res/values/strings.xml
new file mode 100644
index 0000000..bd9a752
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/res/values/strings.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+    <string name="app_name">VideoCaptureModuleAndroidTest</string>
+<string name="run_button">Run Test</string>
+
+<string name="run_button2">Run Java test</string>
+<string name="run_button3">Start c++ Capture</string>
+<string name="run_button4">Stop c++ Capture</string>
+
+</resources>
diff --git a/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java b/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java
new file mode 100644
index 0000000..7a92c48
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureJavaTest.java
@@ -0,0 +1,61 @@
+package org.webrtc.capturemoduleandroidtest;

+

+import java.util.List;

+

+import android.content.Context;

+import android.util.Log;

+

+import org.webrtc.videoengine.CaptureCapabilityAndroid;

+import org.webrtc.videoengine.VideoCaptureAndroid;

+import org.webrtc.videoengine.VideoCaptureDeviceInfoAndroid;

+

+public class VideoCaptureJavaTest {

+  void DoTest(Context context)

+  {

+    VideoCaptureDeviceInfoAndroid videoCaptureDeviceInfo =

+        VideoCaptureDeviceInfoAndroid.CreateVideoCaptureDeviceInfoAndroid(

+            5,context);

+    for(int i = 0; i < videoCaptureDeviceInfo.NumberOfDevices(); i++) {

+      String deviceUniqueId=videoCaptureDeviceInfo.GetDeviceUniqueName(i);

+      VideoCaptureAndroid videoCapture =

+          videoCaptureDeviceInfo.AllocateCamera(i,0,deviceUniqueId);

+

+      CaptureCapabilityAndroid capArray[] =

+          videoCaptureDeviceInfo.GetCapabilityArray(deviceUniqueId);

+      for(CaptureCapabilityAndroid cap: capArray) {

+        Log.d("*WEBRTC*", "Capability widht" + cap.width +

+              " height " +cap.height+ " frameRate " +cap.maxFPS);

+        int result=videoCapture.StartCapture(cap.width,

+                                             cap.height,

+                                             cap.maxFPS);

+        try{

+          Thread.sleep(2000);//sleep for 2000 ms

+        }

+        catch(InterruptedException ie){

+          //If this thread was interrupted by another thread

+        }

+        result+=videoCapture.StopCapture();

+        Log.d("*WEBRTC*", "Start stop result " + result);

+      }

+      VideoCaptureAndroid.DeleteVideoCaptureAndroid(videoCapture);

+      videoCapture=null;

+    }

+    Log.d("*WEBRTC*", "Test complete");

+  }

+

+  VideoCaptureDeviceInfoAndroid _videoCaptureDeviceInfo;

+  VideoCaptureAndroid _videoCapture;

+  void StartCapture(Context context) {

+    _videoCaptureDeviceInfo =

+        VideoCaptureDeviceInfoAndroid.CreateVideoCaptureDeviceInfoAndroid(

+            5,context);

+    String deviceUniqueId=_videoCaptureDeviceInfo.GetDeviceUniqueName(0);

+    _videoCapture=_videoCaptureDeviceInfo.AllocateCamera(5,0,deviceUniqueId);

+    _videoCapture.StartCapture(176,144,15);

+  }

+  void StopCapture() {

+    _videoCapture.StopCapture();

+    VideoCaptureAndroid.DeleteVideoCaptureAndroid(_videoCapture);

+ }

+

+}

diff --git a/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java b/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java
new file mode 100644
index 0000000..7191b1c
--- /dev/null
+++ b/src/modules/video_capture/main/test/android/src/org/webrtc/capturemoduleandroidtest/VideoCaptureModuleTest.java
@@ -0,0 +1,142 @@
+package org.webrtc.capturemoduleandroidtest;

+

+import javax.microedition.khronos.egl.EGLConfig;

+import javax.microedition.khronos.opengles.GL10;

+

+import org.webrtc.capturemoduleandroidtest.R;

+import org.webrtc.videoengine.ViERenderer;

+

+import android.app.Activity;

+import android.opengl.GLSurfaceView;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.SurfaceHolder;

+import android.view.SurfaceView;

+import android.view.View;

+import android.view.View.OnClickListener;

+import android.widget.Button;

+import android.widget.LinearLayout;

+

+public class VideoCaptureModuleTest

+    extends Activity implements OnClickListener {

+  // Set to 1 if OpenGL shall be used. 0 Otherwise

+  private final int _useOpenGL=1;

+  private Thread _testThread;

+  private SurfaceView _view=null;

+  private VideoCaptureModuleTest _thisPointer;

+  private VideoCaptureJavaTest _videoCaptureJavaTest;

+  /** Called when the activity is first created. */

+  @Override

+  public void onCreate(Bundle savedInstanceState) {

+    super.onCreate(savedInstanceState);

+    setContentView(R.layout.main);

+

+    final Button buttonStartCP = (Button) findViewById(R.id.Button01);

+    buttonStartCP.setOnClickListener(this);

+    final Button buttonStartJava = (Button) findViewById(R.id.Button02);

+    buttonStartJava.setOnClickListener(this);

+    final Button buttonStartCPP = (Button) findViewById(R.id.Button03);

+    buttonStartCPP.setOnClickListener(this);

+    final Button buttonStopCPP = (Button) findViewById(R.id.Button04);

+    buttonStopCPP.setOnClickListener(this);

+  }

+

+  private Runnable _testProc = new Runnable() {

+      public void run() {

+        // TODO: choose test from GUI

+        // Select test here, 0 for API test, 1-> for Func tests

+        RunTest(_view);

+      }

+    };

+

+  @Override

+  protected void onStart()

+  {

+    super.onStart();

+  }

+  @Override

+  protected void onRestart()

+  {

+    super.onRestart();

+  }

+  @Override

+  protected void onPause()

+  {

+    super.onPause();

+  }

+  @Override

+  protected void onStop()

+  {

+    super.onStop();

+  }

+

+  // Function used to call test

+  private native int RunTest(Object view);

+  private native int RenderInit(Object view);

+

+  private native int StartCapture();

+  private native int StopCapture();

+

+  static {

+    Log.d("*WEBRTC*",

+          "Loading ModuleVideoCaptureModuleAndroidTest...");

+    System.loadLibrary(

+        "ModuleVideoCaptureModuleAndroidTestJniAPI");

+  }

+

+  public void onClick(View v) {

+    //get the handle to the layout

+    LinearLayout renderLayout=(LinearLayout) findViewById(R.id.renderView);

+    switch(v.getId())

+    {

+      case R.id.Button01:

+        renderLayout.removeAllViews();

+        _view=ViERenderer.CreateLocalRenderer(this);

+        if(_useOpenGL==1)

+        {

+          _view= ViERenderer.CreateRenderer(this, true);

+        }

+        else

+        {

+          _view= new SurfaceView(this);

+        }

+        // add the surfaceview to the layout,

+        // the surfaceview will be the same size as the layout (container)

+        renderLayout.addView(_view);

+        RenderInit(_view);

+        _testThread = new Thread(_testProc);

+        _testThread.start();

+        break;

+      case R.id.Button02:

+        _view=ViERenderer.CreateLocalRenderer(this);

+        renderLayout.removeAllViews();

+        if(_videoCaptureJavaTest==null)

+        {

+          _videoCaptureJavaTest=new VideoCaptureJavaTest();

+          _videoCaptureJavaTest.StartCapture(this);

+          // add the surfaceview to the layout,

+          // the surfaceview will be the same size as the layout (container)

+          renderLayout.addView(_view);

+        }

+        else

+        {

+          _videoCaptureJavaTest.StopCapture();

+          _videoCaptureJavaTest=null;

+        }

+        break;

+

+      case R.id.Button03:

+        _view=ViERenderer.CreateLocalRenderer(this);

+        renderLayout.removeAllViews();

+        StartCapture();

+        // add the surfaceview to the layout,

+        // the surfaceview will be the same size as the layout (container)

+        renderLayout.addView(_view);

+        break;

+      case R.id.Button04:

+        renderLayout.removeAllViews();

+        StopCapture();

+        break;

+    }

+  }

+}
\ No newline at end of file
diff --git a/src/modules/video_capture/main/test/video_capture_main_mac.mm b/src/modules/video_capture/main/test/video_capture_main_mac.mm
new file mode 100644
index 0000000..94fb6fc
--- /dev/null
+++ b/src/modules/video_capture/main/test/video_capture_main_mac.mm
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "testsupport/mac/run_threaded_main_mac.h"
+
+int ImplementThisToRunYourTest(int argc, char** argv) {
+  testing::InitGoogleTest(&argc, argv);
+  return RUN_ALL_TESTS();
+}
diff --git a/src/modules/video_capture/main/test/video_capture_unittest.cc b/src/modules/video_capture/main/test/video_capture_unittest.cc
new file mode 100644
index 0000000..1a96abe
--- /dev/null
+++ b/src/modules/video_capture/main/test/video_capture_unittest.cc
@@ -0,0 +1,575 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/scoped_refptr.h"
+#include "system_wrappers/interface/sleep.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testing/gtest/include/gtest/gtest.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_capture/main/interface/video_capture.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+
+using webrtc::CriticalSectionWrapper;
+using webrtc::CriticalSectionScoped;
+using webrtc::scoped_ptr;
+using webrtc::SleepMs;
+using webrtc::TickTime;
+using webrtc::VideoCaptureAlarm;
+using webrtc::VideoCaptureCapability;
+using webrtc::VideoCaptureDataCallback;
+using webrtc::VideoCaptureFactory;
+using webrtc::VideoCaptureFeedBack;
+using webrtc::VideoCaptureModule;
+
+
+#define WAIT_(ex, timeout, res) \
+  do { \
+    res = (ex); \
+    WebRtc_Word64 start = TickTime::MillisecondTimestamp(); \
+    while (!res && TickTime::MillisecondTimestamp() < start + timeout) { \
+      SleepMs(5); \
+      res = (ex); \
+    } \
+  } while (0);\
+
+#define EXPECT_TRUE_WAIT(ex, timeout) \
+  do { \
+    bool res; \
+    WAIT_(ex, timeout, res); \
+    if (!res) EXPECT_TRUE(ex); \
+  } while (0);
+
+
+static const int kTimeOut = 5000;
+static const int kTestHeight = 288;
+static const int kTestWidth = 352;
+static const int kTestFramerate = 30;
+
+// Compares the content of two video frames.
+static bool CompareFrames(const webrtc::VideoFrame& frame1,
+                          const webrtc::VideoFrame& frame2) {
+  bool result =
+      (frame1.Length() == frame2.Length()) &&
+      (frame1.Width() == frame2.Width()) &&
+      (frame1.Height() == frame2.Height());
+
+  for (unsigned int i = 0; i < frame1.Length() && result; ++i)
+    result = (*(frame1.Buffer()+i) == *(frame2.Buffer()+i));
+  return result;
+}
+
+// Compares the content of a I420 frame in planar form and video frame.
+static bool CompareFrames(const webrtc::VideoFrameI420& frame1,
+                          const webrtc::VideoFrame& frame2) {
+  if (frame1.width != frame2.Width() ||
+      frame1.height != frame2.Height()) {
+      return false;
+  }
+
+  // Compare Y
+  unsigned char* y_plane = frame1.y_plane;
+  for (unsigned int i = 0; i < frame2.Height(); ++i) {
+    for (unsigned int j = 0; j < frame2.Width(); ++j) {
+      if (*y_plane != *(frame2.Buffer()+i*frame2.Width() +j))
+        return false;
+      ++y_plane;
+    }
+    y_plane += frame1.y_pitch - frame1.width;
+  }
+
+  // Compare U
+  unsigned char* u_plane = frame1.u_plane;
+  for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
+    for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
+      if (*u_plane !=*(
+          frame2.Buffer()+frame2.Width() * frame2.Height() +
+          i*frame2.Width() / 2 + j)) {
+        return false;
+      }
+      ++u_plane;
+    }
+    u_plane += frame1.u_pitch - frame1.width / 2;
+  }
+
+  // Compare V
+  unsigned char* v_plane = frame1.v_plane;
+  for (unsigned int i = 0; i < frame2.Height() /2; ++i) {
+    for (unsigned int j = 0; j < frame2.Width() /2; ++j) {
+      if (*v_plane != *(
+          frame2.Buffer()+frame2.Width() * frame2.Height()* 5 / 4 +
+          i*frame2.Width() / 2 + j)) {
+        return false;
+      }
+      ++v_plane;
+    }
+    v_plane += frame1.v_pitch - frame1.width / 2;
+  }
+  return true;
+}
+
+
+class TestVideoCaptureCallback : public VideoCaptureDataCallback {
+ public:
+  TestVideoCaptureCallback()
+    : capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      capture_delay_(0),
+      last_render_time_ms_(0),
+      incoming_frames_(0),
+      timing_warnings_(0) {
+  }
+
+  ~TestVideoCaptureCallback() {
+    if (timing_warnings_ > 0)
+      printf("No of timing warnings %d\n", timing_warnings_);
+  }
+
+  virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                       webrtc::VideoFrame& videoFrame,
+                                       webrtc::VideoCodecType codecType) {
+    CriticalSectionScoped cs(capture_cs_.get());
+
+    int height = static_cast<int>(videoFrame.Height());
+    int width = static_cast<int>(videoFrame.Width());
+    EXPECT_EQ(height, capability_.height);
+    EXPECT_EQ(width, capability_.width);
+    // RenderTimstamp should be the time now.
+    EXPECT_TRUE(
+        videoFrame.RenderTimeMs() >= TickTime::MillisecondTimestamp()-30 &&
+        videoFrame.RenderTimeMs() <= TickTime::MillisecondTimestamp());
+
+    if ((videoFrame.RenderTimeMs() >
+            last_render_time_ms_ + (1000 * 1.1) / capability_.maxFPS &&
+            last_render_time_ms_ > 0) ||
+        (videoFrame.RenderTimeMs() <
+            last_render_time_ms_ + (1000 * 0.9) / capability_.maxFPS &&
+            last_render_time_ms_ > 0)) {
+      timing_warnings_++;
+    }
+
+    incoming_frames_++;
+    last_render_time_ms_ = videoFrame.RenderTimeMs();
+    last_frame_.CopyFrame(videoFrame);
+  }
+
+  virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                     const WebRtc_Word32 delay) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    capture_delay_ = delay;
+  }
+
+  void SetExpectedCapability(VideoCaptureCapability capability) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    capability_= capability;
+    incoming_frames_ = 0;
+    last_render_time_ms_ = 0;
+    capture_delay_ = 0;
+  }
+  int incoming_frames() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return incoming_frames_;
+  }
+
+  int capture_delay() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return capture_delay_;
+  }
+  int timing_warnings() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return timing_warnings_;
+  }
+  VideoCaptureCapability capability() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return capability_;
+  }
+
+  bool CompareLastFrame(const webrtc::VideoFrame& frame) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return CompareFrames(last_frame_, frame);
+  }
+
+  bool CompareLastFrame(const webrtc::VideoFrameI420& frame) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return CompareFrames(frame, last_frame_);
+  }
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> capture_cs_;
+  VideoCaptureCapability capability_;
+  int capture_delay_;
+  WebRtc_Word64 last_render_time_ms_;
+  int incoming_frames_;
+  int timing_warnings_;
+  webrtc::VideoFrame last_frame_;
+};
+
+class TestVideoCaptureFeedBack : public VideoCaptureFeedBack {
+ public:
+  TestVideoCaptureFeedBack() :
+    capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+    frame_rate_(0),
+    alarm_(webrtc::Cleared) {
+  }
+
+  virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 frameRate) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    frame_rate_ = frameRate;
+  }
+
+  virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                const VideoCaptureAlarm reported_alarm) {
+    CriticalSectionScoped cs(capture_cs_.get());
+    alarm_ = reported_alarm;
+  }
+  int frame_rate() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return frame_rate_;
+
+  }
+  VideoCaptureAlarm alarm() {
+    CriticalSectionScoped cs(capture_cs_.get());
+    return alarm_;
+  }
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> capture_cs_;
+  unsigned int frame_rate_;
+  VideoCaptureAlarm alarm_;
+};
+
+class VideoCaptureTest : public testing::Test {
+ public:
+  VideoCaptureTest() : number_of_devices_(0) {}
+
+  void SetUp() {
+    device_info_.reset(VideoCaptureFactory::CreateDeviceInfo(5));
+    number_of_devices_ = device_info_->NumberOfDevices();
+    ASSERT_GT(number_of_devices_, 0u);
+  }
+
+  webrtc::scoped_refptr<VideoCaptureModule> OpenVideoCaptureDevice(
+      unsigned int device,
+      VideoCaptureDataCallback* callback) {
+    char device_name[256];
+    char unique_name[256];
+
+    EXPECT_EQ(0, device_info_->GetDeviceName(
+        device, device_name, 256, unique_name, 256));
+
+    webrtc::scoped_refptr<VideoCaptureModule> module(
+        VideoCaptureFactory::Create(device, unique_name));
+    if (module.get() == NULL)
+      return NULL;
+
+    EXPECT_FALSE(module->CaptureStarted());
+
+    EXPECT_EQ(0, module->RegisterCaptureDataCallback(*callback));
+    return module;
+  }
+
+  void StartCapture(VideoCaptureModule* capture_module,
+                    VideoCaptureCapability capability) {
+    EXPECT_EQ(0, capture_module->StartCapture(capability));
+    EXPECT_TRUE(capture_module->CaptureStarted());
+
+    VideoCaptureCapability resulting_capability;
+    EXPECT_EQ(0, capture_module->CaptureSettings(resulting_capability));
+    EXPECT_EQ(capability.width, resulting_capability.width);
+    EXPECT_EQ(capability.height, resulting_capability.height);
+  }
+
+  scoped_ptr<VideoCaptureModule::DeviceInfo> device_info_;
+  unsigned int number_of_devices_;
+};
+
+TEST_F(VideoCaptureTest, CreateDelete) {
+  for (int i = 0; i < 5; ++i) {
+    WebRtc_Word64 start_time = TickTime::MillisecondTimestamp();
+    TestVideoCaptureCallback capture_observer;
+    webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
+        0, &capture_observer));
+    ASSERT_TRUE(module.get() != NULL);
+
+    VideoCaptureCapability capability;
+#ifndef WEBRTC_MAC
+    device_info_->GetCapability(module->CurrentDeviceName(), 0, capability);
+#else
+    capability.width = kTestWidth;
+    capability.height = kTestHeight;
+    capability.maxFPS = kTestFramerate;
+    capability.rawType = webrtc::kVideoUnknown;
+#endif
+    capture_observer.SetExpectedCapability(capability);
+    StartCapture(module.get(), capability);
+
+    // Less than 4s to start the camera.
+    EXPECT_LE(TickTime::MillisecondTimestamp() - start_time, 4000);
+
+    // Make sure 5 frames are captured.
+    EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
+
+    EXPECT_GT(capture_observer.capture_delay(), 0);
+
+    WebRtc_Word64 stop_time = TickTime::MillisecondTimestamp();
+    EXPECT_EQ(0, module->StopCapture());
+    EXPECT_FALSE(module->CaptureStarted());
+
+    // Less than 3s to stop the camera.
+    EXPECT_LE(TickTime::MillisecondTimestamp() - stop_time, 3000);
+  }
+}
+
+TEST_F(VideoCaptureTest, Capabilities) {
+#ifdef WEBRTC_MAC
+  printf("Video capture capabilities are not supported on Mac.\n");
+  return;
+#endif
+
+  TestVideoCaptureCallback capture_observer;
+
+  webrtc::scoped_refptr<VideoCaptureModule> module(OpenVideoCaptureDevice(
+          0, &capture_observer));
+  ASSERT_TRUE(module.get() != NULL);
+
+  int number_of_capabilities = device_info_->NumberOfCapabilities(
+      module->CurrentDeviceName());
+  EXPECT_GT(number_of_capabilities, 0);
+  for (int i = 0; i < number_of_capabilities; ++i) {
+    VideoCaptureCapability capability;
+    EXPECT_EQ(0, device_info_->GetCapability(module->CurrentDeviceName(), i,
+                                             capability));
+    capture_observer.SetExpectedCapability(capability);
+    StartCapture(module.get(), capability);
+    // Make sure 5 frames are captured.
+    EXPECT_TRUE_WAIT(capture_observer.incoming_frames() >= 5, kTimeOut);
+
+    EXPECT_EQ(0, module->StopCapture());
+  }
+}
+
+// NOTE: flaky, crashes sometimes.
+// http://code.google.com/p/webrtc/issues/detail?id=777
+TEST_F(VideoCaptureTest, DISABLED_TestTwoCameras) {
+  if (number_of_devices_ < 2) {
+    printf("There are not two cameras available. Aborting test. \n");
+    return;
+  }
+
+  TestVideoCaptureCallback capture_observer1;
+  webrtc::scoped_refptr<VideoCaptureModule> module1(OpenVideoCaptureDevice(
+          0, &capture_observer1));
+  ASSERT_TRUE(module1.get() != NULL);
+  VideoCaptureCapability capability1;
+#ifndef WEBRTC_MAC
+  device_info_->GetCapability(module1->CurrentDeviceName(), 0, capability1);
+#else
+  capability1.width = kTestWidth;
+  capability1.height = kTestHeight;
+  capability1.maxFPS = kTestFramerate;
+  capability1.rawType = webrtc::kVideoUnknown;
+#endif
+  capture_observer1.SetExpectedCapability(capability1);
+
+  TestVideoCaptureCallback capture_observer2;
+  webrtc::scoped_refptr<VideoCaptureModule> module2(OpenVideoCaptureDevice(
+          1, &capture_observer2));
+  ASSERT_TRUE(module1.get() != NULL);
+
+
+  VideoCaptureCapability capability2;
+#ifndef WEBRTC_MAC
+  device_info_->GetCapability(module2->CurrentDeviceName(), 0, capability2);
+#else
+  capability2.width = kTestWidth;
+  capability2.height = kTestHeight;
+  capability2.maxFPS = kTestFramerate;
+  capability2.rawType = webrtc::kVideoUnknown;
+#endif
+  capture_observer2.SetExpectedCapability(capability2);
+
+  StartCapture(module1.get(), capability1);
+  StartCapture(module2.get(), capability2);
+  EXPECT_TRUE_WAIT(capture_observer1.incoming_frames() >= 5, kTimeOut);
+  EXPECT_TRUE_WAIT(capture_observer2.incoming_frames() >= 5, kTimeOut);
+}
+
+// Test class for testing external capture and capture feedback information
+// such as frame rate and picture alarm.
+class VideoCaptureExternalTest : public testing::Test {
+ public:
+  void SetUp() {
+    capture_module_ = VideoCaptureFactory::Create(0, capture_input_interface_);
+    process_module_ = webrtc::ProcessThread::CreateProcessThread();
+    process_module_->Start();
+    process_module_->RegisterModule(capture_module_);
+
+    VideoCaptureCapability capability;
+    capability.width = kTestWidth;
+    capability.height = kTestHeight;
+    capability.rawType = webrtc::kVideoYV12;
+    capability.maxFPS = kTestFramerate;
+    capture_callback_.SetExpectedCapability(capability);
+
+    test_frame_.VerifyAndAllocate(kTestWidth * kTestHeight * 3 / 2);
+    test_frame_.SetLength(kTestWidth * kTestHeight * 3 / 2);
+    test_frame_.SetHeight(kTestHeight);
+    test_frame_.SetWidth(kTestWidth);
+    SleepMs(1); // Wait 1ms so that two tests can't have the same timestamp.
+    memset(test_frame_.Buffer(), 127, test_frame_.Length());
+
+    EXPECT_EQ(0, capture_module_->RegisterCaptureDataCallback(
+        capture_callback_));
+    EXPECT_EQ(0, capture_module_->RegisterCaptureCallback(capture_feedback_));
+    EXPECT_EQ(0, capture_module_->EnableFrameRateCallback(true));
+    EXPECT_EQ(0, capture_module_->EnableNoPictureAlarm(true));
+  }
+
+  void TearDown() {
+    process_module_->Stop();
+    webrtc::ProcessThread::DestroyProcessThread(process_module_);
+  }
+
+  webrtc::VideoCaptureExternal* capture_input_interface_;
+  webrtc::scoped_refptr<VideoCaptureModule> capture_module_;
+  webrtc::ProcessThread* process_module_;
+  webrtc::VideoFrame test_frame_;
+  TestVideoCaptureCallback capture_callback_;
+  TestVideoCaptureFeedBack capture_feedback_;
+};
+
+// Test input of external video frames.
+TEST_F(VideoCaptureExternalTest , TestExternalCapture) {
+  EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+      test_frame_.Buffer(), test_frame_.Length(),
+      capture_callback_.capability(), 0));
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
+}
+
+// Test input of planar I420 frames.
+// NOTE: flaky, sometimes fails on the last CompareLastFrame.
+// http://code.google.com/p/webrtc/issues/detail?id=777
+TEST_F(VideoCaptureExternalTest, DISABLED_TestExternalCaptureI420) {
+  webrtc::VideoFrameI420 frame_i420;
+  frame_i420.width = kTestWidth;
+  frame_i420.height = kTestHeight;
+  frame_i420.y_plane = test_frame_.Buffer();
+  frame_i420.u_plane = frame_i420.y_plane + (kTestWidth * kTestHeight);
+  frame_i420.v_plane = frame_i420.u_plane + ((kTestWidth * kTestHeight) >> 2);
+  frame_i420.y_pitch = kTestWidth;
+  frame_i420.u_pitch = kTestWidth / 2;
+  frame_i420.v_pitch = kTestWidth / 2;
+  EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0));
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(frame_i420));
+
+  // Test with a frame with pitch not equal to width
+  memset(test_frame_.Buffer(), 0xAA, test_frame_.Length());
+  webrtc::VideoFrame aligned_test_frame;
+  int y_pitch = kTestWidth + 2;
+  int u_pitch = kTestWidth / 2 + 1;
+  int v_pitch = u_pitch;
+  aligned_test_frame.VerifyAndAllocate(kTestHeight * y_pitch +
+                                       (kTestHeight / 2) * u_pitch +
+                                       (kTestHeight / 2) * v_pitch);
+  aligned_test_frame.SetLength(aligned_test_frame.Size());
+  memset(aligned_test_frame.Buffer(), 0, aligned_test_frame.Length());
+  // Copy the test_frame_ to aligned_test_frame.
+  int y_width = kTestWidth;
+  int uv_width = kTestWidth / 2;
+  int y_rows = kTestHeight;
+  int uv_rows = kTestHeight / 2;
+  unsigned char* current_pointer = aligned_test_frame.Buffer();
+  unsigned char* y_plane = test_frame_.Buffer();
+  unsigned char* u_plane = y_plane + kTestWidth * kTestHeight;
+  unsigned char* v_plane = u_plane + ((kTestWidth * kTestHeight) >> 2);
+  // Copy Y
+  for (int i = 0; i < y_rows; ++i) {
+    memcpy(current_pointer, y_plane, y_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += y_pitch;
+    y_plane += y_width;
+  }
+  // Copy U
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, u_plane, uv_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += u_pitch;
+    u_plane += uv_width;
+  }
+  // Copy V
+  for (int i = 0; i < uv_rows; ++i) {
+    memcpy(current_pointer, v_plane, uv_width);
+    // Remove the alignment which ViE doesn't support.
+    current_pointer += v_pitch;
+    v_plane += uv_width;
+  }
+  frame_i420.width = kTestWidth;
+  frame_i420.height = kTestHeight;
+  frame_i420.y_plane = aligned_test_frame.Buffer();
+  frame_i420.u_plane = frame_i420.y_plane + (y_pitch * y_rows);
+  frame_i420.v_plane = frame_i420.u_plane + (u_pitch * uv_rows);
+  frame_i420.y_pitch = y_pitch;
+  frame_i420.u_pitch = u_pitch;
+  frame_i420.v_pitch = v_pitch;
+
+  EXPECT_EQ(0, capture_input_interface_->IncomingFrameI420(frame_i420, 0));
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
+}
+
+// Test frame rate and no picture alarm.
+TEST_F(VideoCaptureExternalTest , FrameRate) {
+  WebRtc_Word64 testTime = 3;
+  TickTime startTime = TickTime::Now();
+
+  while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+    EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+        test_frame_.Buffer(), test_frame_.Length(),
+        capture_callback_.capability(), 0));
+    SleepMs(100);
+  }
+  EXPECT_TRUE(capture_feedback_.frame_rate() >= 8 &&
+              capture_feedback_.frame_rate() <= 10);
+  SleepMs(500);
+  EXPECT_EQ(webrtc::Raised, capture_feedback_.alarm());
+
+  startTime = TickTime::Now();
+  while ((TickTime::Now() - startTime).Milliseconds() < testTime * 1000) {
+    EXPECT_EQ(0, capture_input_interface_->IncomingFrame(
+        test_frame_.Buffer(), test_frame_.Length(),
+        capture_callback_.capability(), 0));
+    SleepMs(1000 / 30);
+  }
+  EXPECT_EQ(webrtc::Cleared, capture_feedback_.alarm());
+  // Frame rate might be less than 33 since we have paused providing
+  // frames for a while.
+  EXPECT_TRUE(capture_feedback_.frame_rate() >= 25 &&
+              capture_feedback_.frame_rate() <= 33);
+}
+
+// Test start image
+TEST_F(VideoCaptureExternalTest , StartImage) {
+  EXPECT_EQ(0, capture_module_->StartSendImage(
+      test_frame_, 10));
+
+  EXPECT_TRUE_WAIT(capture_callback_.incoming_frames() == 5, kTimeOut);
+  EXPECT_EQ(0, capture_module_->StopSendImage());
+
+  SleepMs(200);
+  // Test that no more start images have arrived.
+  EXPECT_TRUE(capture_callback_.incoming_frames() >= 4 &&
+              capture_callback_.incoming_frames() <= 5);
+  EXPECT_TRUE(capture_callback_.CompareLastFrame(test_frame_));
+}
+
diff --git a/src/modules/video_coding/codecs/OWNERS b/src/modules/video_coding/codecs/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/src/modules/video_coding/codecs/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/src/modules/video_coding/codecs/i420/main/interface/i420.h b/src/modules/video_coding/codecs/i420/main/interface/i420.h
new file mode 100644
index 0000000..ea740c5
--- /dev/null
+++ b/src/modules/video_coding/codecs/i420/main/interface/i420.h
@@ -0,0 +1,151 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
+
+#include "video_codec_interface.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class I420Encoder : public VideoEncoder {
+public:
+
+  I420Encoder();
+
+  virtual ~I420Encoder();
+
+// Initialize the encoder with the information from the VideoCodec.
+//
+// Input:
+//          - codecSettings     : Codec settings.
+//          - numberOfCores     : Number of cores available for the encoder.
+//          - maxPayloadSize    : The maximum size each payload is allowed
+//                                to have. Usually MTU - overhead.
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
+//                                <0 - Error
+  virtual int InitEncode(const VideoCodec* codecSettings,
+                         int /*numberOfCores*/,
+                         uint32_t /*maxPayloadSize*/);
+
+// "Encode" an I420 image (as a part of a video stream). The encoded image
+// will be returned to the user via the encode complete callback.
+//
+// Input:
+//          - inputImage        : Image to be encoded.
+//          - codecSpecificInfo : Pointer to codec specific data.
+//          - frameType         : Frame type to be sent (Key /Delta).
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK.
+//                                <0 - Error
+  virtual int Encode(const VideoFrame& inputImage,
+                     const CodecSpecificInfo* /*codecSpecificInfo*/,
+                     const VideoFrameType /*frameTypes*/);
+
+// Register an encode complete callback object.
+//
+// Input:
+//          - callback         : Callback object which handles encoded images.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+
+// Free encoder memory.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int Release();
+
+  virtual int SetRates(uint32_t /*newBitRate*/, uint32_t /*frameRate*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+  virtual int SetChannelParameters(uint32_t /*packetLoss*/, int /*rtt*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+  virtual int CodecConfigParameters(uint8_t* /*buffer*/, int /*size*/)
+    {return WEBRTC_VIDEO_CODEC_OK;}
+
+private:
+  bool                     _inited;
+  EncodedImage             _encodedImage;
+  EncodedImageCallback*    _encodedCompleteCallback;
+
+}; // end of WebRtcI420DEncoder class
+
+class I420Decoder : public VideoDecoder {
+public:
+
+  I420Decoder();
+
+  virtual ~I420Decoder();
+
+// Initialize the decoder.
+// The user must notify the codec of width and height values.
+//
+// Return value         :  WEBRTC_VIDEO_CODEC_OK.
+//                        <0 - Errors
+  virtual int InitDecode(const VideoCodec* codecSettings,
+                         int /*numberOfCores*/);
+
+  virtual int SetCodecConfigParameters(const uint8_t* /*buffer*/, int /*size*/)
+    {return WEBRTC_VIDEO_CODEC_OK;};
+
+// Decode encoded image (as a part of a video stream). The decoded image
+// will be returned to the user through the decode complete callback.
+//
+// Input:
+//          - inputImage        : Encoded image to be decoded
+//          - missingFrames     : True if one or more frames have been lost
+//                                since the previous decode call.
+//          - codecSpecificInfo : pointer to specific codec data
+//          - renderTimeMs      : Render time in Ms
+//
+// Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+//                                 <0 - Error
+  virtual int Decode(const EncodedImage& inputImage,
+                     bool missingFrames,
+                     const RTPFragmentationHeader* /*fragmentation*/,
+                     const CodecSpecificInfo* /*codecSpecificInfo*/,
+                     int64_t /*renderTimeMs*/);
+
+// Register a decode complete callback object.
+//
+// Input:
+//          - callback         : Callback object which handles decoded images.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
+
+// Free decoder memory.
+//
+// Return value                : WEBRTC_VIDEO_CODEC_OK if OK.
+//                                  <0 - Error
+  virtual int Release();
+
+// Reset decoder state and prepare for a new call.
+//
+// Return value         :  WEBRTC_VIDEO_CODEC_OK.
+//                          <0 - Error
+  virtual int Reset();
+
+private:
+
+  VideoFrame                  _decodedImage;
+  int                         _width;
+  int                         _height;
+  bool                        _inited;
+  DecodedImageCallback*       _decodeCompleteCallback;
+
+}; // End of WebRtcI420Decoder class.
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_I420_H_
diff --git a/src/modules/video_coding/codecs/i420/main/source/i420.cc b/src/modules/video_coding/codecs/i420/main/source/i420.cc
new file mode 100644
index 0000000..38cbbb8
--- /dev/null
+++ b/src/modules/video_coding/codecs/i420/main/source/i420.cc
@@ -0,0 +1,201 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/i420/main/interface/i420.h"
+
+#include <string.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+
+namespace webrtc
+{
+
+I420Encoder::I420Encoder():
+_inited(false),
+_encodedImage(),
+_encodedCompleteCallback(NULL)
+{}
+
+I420Encoder::~I420Encoder() {
+  _inited = false;
+  if (_encodedImage._buffer != NULL) {
+    delete [] _encodedImage._buffer;
+    _encodedImage._buffer = NULL;
+  }
+}
+
+int I420Encoder::Release() {
+  // Should allocate an encoded frame and then release it here, for that we
+  // actually need an init flag.
+  if (_encodedImage._buffer != NULL) {
+    delete [] _encodedImage._buffer;
+    _encodedImage._buffer = NULL;
+  }
+  _inited = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int I420Encoder::InitEncode(const VideoCodec* codecSettings,
+                            int /*numberOfCores*/,
+                            uint32_t /*maxPayloadSize */) {
+  if (codecSettings == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (codecSettings->width < 1 || codecSettings->height < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+
+  // Allocating encoded memory.
+  if (_encodedImage._buffer != NULL) {
+    delete [] _encodedImage._buffer;
+    _encodedImage._buffer = NULL;
+    _encodedImage._size = 0;
+  }
+  const uint32_t newSize = CalcBufferSize(kI420,
+                                          codecSettings->width,
+                                          codecSettings->height);
+  uint8_t* newBuffer = new uint8_t[newSize];
+  if (newBuffer == NULL) {
+    return WEBRTC_VIDEO_CODEC_MEMORY;
+  }
+  _encodedImage._size = newSize;
+  _encodedImage._buffer = newBuffer;
+
+  // If no memory allocation, no point to init.
+  _inited = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+
+int I420Encoder::Encode(const VideoFrame& inputImage,
+                    const CodecSpecificInfo* /*codecSpecificInfo*/,
+                    const VideoFrameType /*frameType*/) {
+  if (!_inited) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (_encodedCompleteCallback == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  _encodedImage._frameType = kKeyFrame; // No coding.
+  _encodedImage._timeStamp = inputImage.TimeStamp();
+  _encodedImage._encodedHeight = inputImage.Height();
+  _encodedImage._encodedWidth = inputImage.Width();
+  if (inputImage.Length() > _encodedImage._size) {
+
+    // Allocating encoded memory.
+    if (_encodedImage._buffer != NULL) {
+      delete [] _encodedImage._buffer;
+      _encodedImage._buffer = NULL;
+      _encodedImage._size = 0;
+    }
+    const uint32_t newSize = CalcBufferSize(kI420,
+                                            _encodedImage._encodedWidth,
+                                            _encodedImage._encodedHeight);
+    uint8_t* newBuffer = new uint8_t[newSize];
+    if (newBuffer == NULL) {
+      return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    _encodedImage._size = newSize;
+    _encodedImage._buffer = newBuffer;
+  }
+  memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
+  _encodedImage._length = inputImage.Length();
+  _encodedCompleteCallback->Encoded(_encodedImage);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+int
+I420Encoder::RegisterEncodeCompleteCallback(EncodedImageCallback* callback) {
+  _encodedCompleteCallback = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+I420Decoder::I420Decoder():
+_decodedImage(),
+_width(0),
+_height(0),
+_inited(false),
+_decodeCompleteCallback(NULL)
+{}
+
+I420Decoder::~I420Decoder() {
+  Release();
+}
+
+int
+I420Decoder::Reset() {
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+
+int
+I420Decoder::InitDecode(const VideoCodec* codecSettings,
+                        int /*numberOfCores */) {
+  if (codecSettings == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  } else if (codecSettings->width < 1 || codecSettings->height < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  _width = codecSettings->width;
+  _height = codecSettings->height;
+  _inited = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int
+I420Decoder::Decode(const EncodedImage& inputImage,
+                    bool /*missingFrames*/,
+                    const RTPFragmentationHeader* /*fragmentation*/,
+                    const CodecSpecificInfo* /*codecSpecificInfo*/,
+                    int64_t /*renderTimeMs*/) {
+  if (inputImage._buffer == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (_decodeCompleteCallback == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (inputImage._length <= 0) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (!_inited) {
+   return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  // Set decoded image parameters.
+  if (_decodedImage.CopyFrame(inputImage._length, inputImage._buffer) < 0) {
+    return WEBRTC_VIDEO_CODEC_MEMORY;
+  }
+  _decodedImage.SetHeight(_height);
+  _decodedImage.SetWidth(_width);
+  _decodedImage.SetTimeStamp(inputImage._timeStamp);
+
+  _decodeCompleteCallback->Decoded(_decodedImage);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int
+I420Decoder::RegisterDecodeCompleteCallback(DecodedImageCallback* callback) {
+  _decodeCompleteCallback = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int
+I420Decoder::Release() {
+  _decodedImage.Free();
+  _inited = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+}
diff --git a/src/modules/video_coding/codecs/i420/main/source/i420.gypi b/src/modules/video_coding/codecs/i420/main/source/i420.gypi
new file mode 100644
index 0000000..af13f8d
--- /dev/null
+++ b/src/modules/video_coding/codecs/i420/main/source/i420.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_i420',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '../../../../../../common_video/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../../../../common_video/interface',
+        ],
+      },
+      'sources': [
+        '../interface/i420.h',
+        'i420.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h b/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
new file mode 100644
index 0000000..7b60cf5
--- /dev/null
+++ b/src/modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
+
+#include <string>
+
+#include "gmock/gmock.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class MockEncodedImageCallback : public EncodedImageCallback {
+ public:
+  MOCK_METHOD3(Encoded,
+               WebRtc_Word32(EncodedImage& encodedImage,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             const RTPFragmentationHeader* fragmentation));
+};
+
+class MockVideoEncoder : public VideoEncoder {
+ public:
+  MOCK_CONST_METHOD2(Version,
+                     WebRtc_Word32(WebRtc_Word8 *version,
+                                   WebRtc_Word32 length));
+  MOCK_METHOD3(InitEncode,
+               WebRtc_Word32(const VideoCodec* codecSettings,
+                             WebRtc_Word32 numberOfCores,
+                             WebRtc_UWord32 maxPayloadSize));
+  MOCK_METHOD3(Encode,
+               WebRtc_Word32(const VideoFrame& inputImage,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             const VideoFrameType frameType));
+  MOCK_METHOD1(RegisterEncodeCompleteCallback,
+               WebRtc_Word32(EncodedImageCallback* callback));
+  MOCK_METHOD0(Release, WebRtc_Word32());
+  MOCK_METHOD0(Reset, WebRtc_Word32());
+  MOCK_METHOD2(SetChannelParameters, WebRtc_Word32(WebRtc_UWord32 packetLoss,
+                                                   int rtt));
+  MOCK_METHOD2(SetRates,
+               WebRtc_Word32(WebRtc_UWord32 newBitRate,
+                             WebRtc_UWord32 frameRate));
+  MOCK_METHOD1(SetPeriodicKeyFrames, WebRtc_Word32(bool enable));
+  MOCK_METHOD2(CodecConfigParameters,
+               WebRtc_Word32(WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
+};
+
+class MockDecodedImageCallback : public DecodedImageCallback {
+ public:
+  MOCK_METHOD1(Decoded,
+               WebRtc_Word32(VideoFrame& decodedImage));
+  MOCK_METHOD1(ReceivedDecodedReferenceFrame,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+  MOCK_METHOD1(ReceivedDecodedFrame,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+};
+
+class MockVideoDecoder : public VideoDecoder {
+ public:
+  MOCK_METHOD2(InitDecode,
+      WebRtc_Word32(const VideoCodec* codecSettings,
+                    WebRtc_Word32 numberOfCores));
+  MOCK_METHOD5(Decode,
+               WebRtc_Word32(const EncodedImage& inputImage,
+                             bool missingFrames,
+                             const RTPFragmentationHeader* fragmentation,
+                             const CodecSpecificInfo* codecSpecificInfo,
+                             WebRtc_Word64 renderTimeMs));
+  MOCK_METHOD1(RegisterDecodeCompleteCallback,
+               WebRtc_Word32(DecodedImageCallback* callback));
+  MOCK_METHOD0(Release, WebRtc_Word32());
+  MOCK_METHOD0(Reset, WebRtc_Word32());
+  MOCK_METHOD2(SetCodecConfigParameters,
+               WebRtc_Word32(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32));
+  MOCK_METHOD0(Copy, VideoDecoder*());
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_MOCK_MOCK_VIDEO_CODEC_INTERFACE_H_
diff --git a/src/modules/video_coding/codecs/interface/video_codec_interface.h b/src/modules/video_coding/codecs/interface/video_codec_interface.h
new file mode 100644
index 0000000..c107b18
--- /dev/null
+++ b/src/modules/video_coding/codecs/interface/video_codec_interface.h
@@ -0,0 +1,247 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
+
+#include "common_types.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_error_codes.h"
+#include "common_video/interface/video_image.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class RTPFragmentationHeader; // forward declaration
+
+// Note: if any pointers are added to this struct, it must be fitted
+// with a copy-constructor. See below.
+struct CodecSpecificInfoVP8
+{
+    bool             hasReceivedSLI;
+    WebRtc_UWord8    pictureIdSLI;
+    bool             hasReceivedRPSI;
+    WebRtc_UWord64   pictureIdRPSI;
+    WebRtc_Word16    pictureId;         // negative value to skip pictureId
+    bool             nonReference;
+    WebRtc_UWord8    simulcastIdx;
+    WebRtc_UWord8    temporalIdx;
+    bool             layerSync;
+    int              tl0PicIdx;         // Negative value to skip tl0PicIdx
+    WebRtc_Word8     keyIdx;            // negative value to skip keyIdx
+};
+
+union CodecSpecificInfoUnion
+{
+    CodecSpecificInfoVP8       VP8;
+};
+
+// Note: if any pointers are added to this struct or its sub-structs, it
+// must be fitted with a copy-constructor. This is because it is copied
+// in the copy-constructor of VCMEncodedFrame.
+struct CodecSpecificInfo
+{
+    VideoCodecType   codecType;
+    CodecSpecificInfoUnion codecSpecific;
+};
+
+class EncodedImageCallback
+{
+public:
+    virtual ~EncodedImageCallback() {};
+
+    // Callback function which is called when an image has been encoded.
+    //
+    // Input:
+    //          - encodedImage         : The encoded image
+    //
+    // Return value                    : > 0,   signals to the caller that one or more future frames
+    //                                          should be dropped to keep bit rate or frame rate.
+    //                                   = 0,   if OK.
+    //                                   < 0,   on error.
+    virtual WebRtc_Word32
+    Encoded(EncodedImage& encodedImage,
+            const CodecSpecificInfo* codecSpecificInfo = NULL,
+            const RTPFragmentationHeader* fragmentation = NULL) = 0;
+};
+
+class VideoEncoder
+{
+public:
+    virtual ~VideoEncoder() {};
+
+    // Initialize the encoder with the information from the VideoCodec.
+    //
+    // Input:
+    //          - codecSettings     : Codec settings
+    //          - numberOfCores     : Number of cores available for the encoder
+    //          - maxPayloadSize    : The maximum size each payload is allowed
+    //                                to have. Usually MTU - overhead.
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 InitEncode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores, WebRtc_UWord32 maxPayloadSize) = 0;
+
+    // Encode an I420 image (as a part of a video stream). The encoded image
+    // will be returned to the user through the encode complete callback.
+    //
+    // Input:
+    //          - inputImage        : Image to be encoded
+    //          - codecSpecificInfo : Pointer to codec specific data
+    //          - frameType         : The frame type to encode
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Encode(const VideoFrame& inputImage,
+                                 const CodecSpecificInfo* codecSpecificInfo,
+                                 const VideoFrameType frameType) = 0;
+
+    // Register an encode complete callback object.
+    //
+    // Input:
+    //          - callback         : Callback object which handles encoded images.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterEncodeCompleteCallback(EncodedImageCallback* callback) = 0;
+
+    // Free encoder memory.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Release() = 0;
+
+    // Inform the encoder about the packet loss and round trip time on the
+    // network used to decide the best pattern and signaling.
+    //
+    //          - packetLoss       : Fraction lost (loss rate in percent =
+    //                               100 * packetLoss / 255)
+    //          - rtt              : Round-trip time in milliseconds
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt) = 0;
+
+    // Inform the encoder about the new target bit rate.
+    //
+    //          - newBitRate       : New target bit rate
+    //          - frameRate        : The target frame rate
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate) = 0;
+
+    // Use this function to enable or disable periodic key frames. Can be useful for codecs
+    // which have other ways of stopping error propagation.
+    //
+    //          - enable           : Enable or disable periodic key frames
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable) { return WEBRTC_VIDEO_CODEC_ERROR; }
+
+    // Codec configuration data to send out-of-band, i.e. in SIP call setup
+    //
+    //          - buffer           : Buffer pointer to where the configuration data
+    //                               should be stored
+    //          - size             : The size of the buffer in bytes
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
+};
+
+class DecodedImageCallback
+{
+public:
+    virtual ~DecodedImageCallback() {};
+
+    // Callback function which is called when an image has been decoded.
+    //
+    // Input:
+    //          - decodedImage         : The decoded image.
+    //
+    // Return value                    : 0 if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage) = 0;
+
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId) {return -1;}
+
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId) {return -1;}
+};
+
+class VideoDecoder
+{
+public:
+    virtual ~VideoDecoder() {};
+
+    // Initialize the decoder with the information from the VideoCodec.
+    //
+    // Input:
+    //          - inst              : Codec settings
+    //          - numberOfCores     : Number of cores available for the decoder
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 InitDecode(const VideoCodec* codecSettings, WebRtc_Word32 numberOfCores) = 0;
+
+    // Decode encoded image (as a part of a video stream). The decoded image
+    // will be returned to the user through the decode complete callback.
+    //
+    // Input:
+    //          - inputImage        : Encoded image to be decoded
+    //          - missingFrames     : True if one or more frames have been lost
+    //                                since the previous decode call.
+    //          - fragmentation     : Specifies where the encoded frame can be
+    //                                split into separate fragments. The meaning
+    //                                of fragment is codec specific, but often
+    //                                means that each fragment is decodable by
+    //                                itself.
+    //          - codecSpecificInfo : Pointer to codec specific data
+    //          - renderTimeMs      : System time to render in milliseconds. Only
+    //                                used by decoders with internal rendering.
+    //
+    // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32
+    Decode(const EncodedImage& inputImage,
+           bool missingFrames,
+           const RTPFragmentationHeader* fragmentation,
+           const CodecSpecificInfo* codecSpecificInfo = NULL,
+           WebRtc_Word64 renderTimeMs = -1) = 0;
+
+    // Register an decode complete callback object.
+    //
+    // Input:
+    //          - callback         : Callback object which handles decoded images.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 RegisterDecodeCompleteCallback(DecodedImageCallback* callback) = 0;
+
+    // Free decoder memory.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Release() = 0;
+
+    // Reset decoder state and prepare for a new call.
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 Reset() = 0;
+
+    // Codec configuration data sent out-of-band, i.e. in SIP call setup
+    //
+    // Input/Output:
+    //          - buffer           : Buffer pointer to the configuration data
+    //          - size             : The size of the configuration data in
+    //                               bytes
+    //
+    // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+    virtual WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/, WebRtc_Word32 /*size*/) { return WEBRTC_VIDEO_CODEC_ERROR; }
+
+    // Create a copy of the codec and its internal state.
+    //
+    // Return value                : A copy of the instance if OK, NULL otherwise.
+    virtual VideoDecoder* Copy() { return NULL; }
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_CODEC_INTERFACE_H
diff --git a/src/modules/video_coding/codecs/interface/video_error_codes.h b/src/modules/video_coding/codecs/interface/video_error_codes.h
new file mode 100644
index 0000000..dfa3f53
--- /dev/null
+++ b/src/modules/video_coding/codecs/interface/video_error_codes.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
+
+// NOTE: in sync with video_coding_module_defines.h
+
+// Define return values
+
+#define WEBRTC_VIDEO_CODEC_REQUEST_SLI 2
+#define WEBRTC_VIDEO_CODEC_NO_OUTPUT 1
+#define WEBRTC_VIDEO_CODEC_OK 0
+#define WEBRTC_VIDEO_CODEC_ERROR -1
+#define WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED -2
+#define WEBRTC_VIDEO_CODEC_MEMORY -3
+#define WEBRTC_VIDEO_CODEC_ERR_PARAMETER -4
+#define WEBRTC_VIDEO_CODEC_ERR_SIZE -5
+#define WEBRTC_VIDEO_CODEC_TIMEOUT -6
+#define WEBRTC_VIDEO_CODEC_UNINITIALIZED -7
+#define WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI -12
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_INTERFACE_VIDEO_ERROR_CODES_H
diff --git a/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h b/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
new file mode 100644
index 0000000..57d21ca
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/mock/mock_packet_manipulator.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <string>
+
+#include "common_video/interface/video_image.h"
+#include "gmock/gmock.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class MockPacketManipulator : public PacketManipulator {
+ public:
+  MOCK_METHOD1(ManipulatePackets, int(webrtc::EncodedImage* encoded_image));
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_MOCK_MOCK_PACKET_MANIPULATOR_H_
diff --git a/src/modules/video_coding/codecs/test/packet_manipulator.cc b/src/modules/video_coding/codecs/test/packet_manipulator.cc
new file mode 100644
index 0000000..acdb2e5
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/packet_manipulator.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <cassert>
+#include <cstdio>
+
+namespace webrtc {
+namespace test {
+
+PacketManipulatorImpl::PacketManipulatorImpl(PacketReader* packet_reader,
+                                             const NetworkingConfig& config,
+                                             bool verbose)
+    : packet_reader_(packet_reader),
+      config_(config),
+      active_burst_packets_(0),
+      critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      random_seed_(1),
+      verbose_(verbose) {
+  assert(packet_reader);
+}
+
+PacketManipulatorImpl::~PacketManipulatorImpl() {
+  delete critsect_;
+}
+
+int PacketManipulatorImpl::ManipulatePackets(
+    webrtc::EncodedImage* encoded_image) {
+  assert(encoded_image);
+  int nbr_packets_dropped = 0;
+  // There's no need to build a copy of the image data since viewing an
+  // EncodedImage object, setting the length to a new lower value represents
+  // that everything is dropped after that position in the byte array.
+  // EncodedImage._size is the allocated bytes.
+  // EncodedImage._length is how many that are filled with data.
+  int new_length = 0;
+  packet_reader_->InitializeReading(encoded_image->_buffer,
+                                    encoded_image->_length,
+                                    config_.packet_size_in_bytes);
+  WebRtc_UWord8* packet = NULL;
+  int nbr_bytes_to_read;
+  // keep track of if we've lost any packets, since then we shall loose
+  // the remains of the current frame:
+  bool packet_loss_has_occurred = false;
+  while ((nbr_bytes_to_read = packet_reader_->NextPacket(&packet)) > 0) {
+    // Check if we're currently in a packet loss burst that is not completed:
+    if (active_burst_packets_ > 0) {
+      active_burst_packets_--;
+      nbr_packets_dropped++;
+    } else if (RandomUniform() < config_.packet_loss_probability ||
+        packet_loss_has_occurred) {
+      packet_loss_has_occurred = true;
+      nbr_packets_dropped++;
+      if (config_.packet_loss_mode == kBurst) {
+        // Initiate a new burst
+        active_burst_packets_ = config_.packet_loss_burst_length - 1;
+      }
+    } else {
+      new_length += nbr_bytes_to_read;
+    }
+  }
+  encoded_image->_length = new_length;
+  if (nbr_packets_dropped > 0) {
+    // Must set completeFrame to false to inform the decoder about this:
+    encoded_image->_completeFrame = false;
+    if (verbose_) {
+      printf("Dropped %d packets for frame %d (frame length: %d)\n",
+             nbr_packets_dropped, encoded_image->_timeStamp,
+             encoded_image->_length);
+    }
+  }
+  return nbr_packets_dropped;
+}
+
+void PacketManipulatorImpl::InitializeRandomSeed(unsigned int seed) {
+  random_seed_ = seed;
+}
+
+inline double PacketManipulatorImpl::RandomUniform() {
+  // Use the previous result as new seed before each rand() call. Doing this
+  // it doesn't matter if other threads are calling rand() since we'll always
+  // get the same behavior as long as we're using a fixed initial seed.
+  critsect_->Enter();
+  srand(random_seed_);
+  random_seed_ = std::rand();
+  critsect_->Leave();
+  return (random_seed_ + 1.0)/(RAND_MAX + 1.0);
+}
+
+const char* PacketLossModeToStr(PacketLossMode e) {
+  switch (e) {
+    case kUniform:
+      return "Uniform";
+    case kBurst:
+      return "Burst";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+}  // namespace test
+}  // namespace webrtcc
diff --git a/src/modules/video_coding/codecs/test/packet_manipulator.h b/src/modules/video_coding/codecs/test/packet_manipulator.h
new file mode 100644
index 0000000..e3891cb
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/packet_manipulator.h
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
+
+#include <cstdlib>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+// Which mode the packet loss shall be performed according to.
+enum PacketLossMode {
+  // Drops packets with a configured probability independently for each packet
+  kUniform,
+  // Drops packets similar to uniform but when a packet is being dropped,
+  // the number of lost packets in a row is equal to the configured burst
+  // length.
+  kBurst
+};
+// Returns a string representation of the enum value.
+const char* PacketLossModeToStr(PacketLossMode e);
+
+// Contains configurations related to networking and simulation of
+// scenarios caused by network interference.
+struct NetworkingConfig {
+  NetworkingConfig()
+  : packet_size_in_bytes(1500), max_payload_size_in_bytes(1440),
+    packet_loss_mode(kUniform), packet_loss_probability(0.0),
+    packet_loss_burst_length(1) {
+  }
+
+  // Packet size in bytes. Default: 1500 bytes.
+  int packet_size_in_bytes;
+
+  // Encoder specific setting of maximum size in bytes of each payload.
+  // Default: 1440 bytes.
+  int max_payload_size_in_bytes;
+
+  // Packet loss mode. Two different packet loss models are supported:
+  // uniform or burst. This setting has no effect unless
+  // packet_loss_probability is >0.
+  // Default: uniform.
+  PacketLossMode packet_loss_mode;
+
+  // Packet loss probability. A value between 0.0 and 1.0 that defines the
+  // probability of a packet being lost. 0.1 means 10% and so on.
+  // Default: 0 (no loss).
+  double packet_loss_probability;
+
+  // Packet loss burst length. Defines how many packets will be lost in a burst
+  // when a packet has been decided to be lost. Must be >=1. Default: 1.
+  int packet_loss_burst_length;
+};
+
+// Class for simulating packet loss on the encoded frame data.
+// When a packet loss has occurred in a frame, the remaining data in that
+// frame is lost (even if burst length is only a single packet).
+// TODO(kjellander): Support discarding only individual packets in the frame
+// when CL 172001 has been submitted. This also requires a correct
+// fragmentation header to be passed to the decoder.
+//
+// To get a repeatable packet drop pattern, re-initialize the random seed
+// using InitializeRandomSeed before each test run.
+class PacketManipulator {
+ public:
+  virtual ~PacketManipulator() {}
+
+  // Manipulates the data of the encoded_image to simulate parts being lost
+  // during transport.
+  // If packets are dropped from frame data, the completedFrame field will be
+  // set to false.
+  // Returns the number of packets being dropped.
+  virtual int
+    ManipulatePackets(webrtc::EncodedImage* encoded_image) = 0;
+};
+
+class PacketManipulatorImpl : public PacketManipulator {
+ public:
+  PacketManipulatorImpl(PacketReader* packet_reader,
+                        const NetworkingConfig& config,
+                        bool verbose);
+  virtual ~PacketManipulatorImpl();
+  virtual int ManipulatePackets(webrtc::EncodedImage* encoded_image);
+  virtual void InitializeRandomSeed(unsigned int seed);
+ protected:
+  // Returns a uniformly distributed random value between 0.0 and 1.0
+  virtual double RandomUniform();
+ private:
+  PacketReader* packet_reader_;
+  const NetworkingConfig& config_;
+  // Used to simulate a burst over several frames.
+  int active_burst_packets_;
+  CriticalSectionWrapper* critsect_;
+  unsigned int random_seed_;
+  bool verbose_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PACKET_MANIPULATOR_H_
diff --git a/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc b/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
new file mode 100644
index 0000000..a5d8bc3
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/packet_manipulator_unittest.cc
@@ -0,0 +1,153 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+
+#include <queue>
+
+#include "gtest/gtest.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
+#include "testsupport/unittest_utils.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+const double kNeverDropProbability = 0.0;
+const double kAlwaysDropProbability = 1.0;
+const int kBurstLength = 1;
+
+class PacketManipulatorTest: public PacketRelatedTest {
+ protected:
+  PacketReader packet_reader_;
+  EncodedImage image_;
+  NetworkingConfig drop_config_;
+  NetworkingConfig no_drop_config_;
+
+  PacketManipulatorTest() {
+    image_._buffer = packet_data_;
+    image_._length = kPacketDataLength;
+    image_._size = kPacketDataLength;
+
+    drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
+    drop_config_.packet_loss_probability = kAlwaysDropProbability;
+    drop_config_.packet_loss_burst_length = kBurstLength;
+    drop_config_.packet_loss_mode = kUniform;
+
+    no_drop_config_.packet_size_in_bytes = kPacketSizeInBytes;
+    no_drop_config_.packet_loss_probability = kNeverDropProbability;
+    no_drop_config_.packet_loss_burst_length = kBurstLength;
+    no_drop_config_.packet_loss_mode = kUniform;
+  }
+
+  virtual ~PacketManipulatorTest() {}
+
+  void SetUp() {
+    PacketRelatedTest::SetUp();
+  }
+
+  void TearDown() {
+    PacketRelatedTest::TearDown();
+  }
+
+  void VerifyPacketLoss(int expected_nbr_packets_dropped,
+                        int actual_nbr_packets_dropped,
+                        int expected_packet_data_length,
+                        WebRtc_UWord8* expected_packet_data,
+                        EncodedImage& actual_image) {
+    EXPECT_EQ(expected_nbr_packets_dropped, actual_nbr_packets_dropped);
+    EXPECT_EQ(expected_packet_data_length, static_cast<int>(image_._length));
+    EXPECT_EQ(0, memcmp(expected_packet_data, actual_image._buffer,
+                        expected_packet_data_length));
+  }
+};
+
+TEST_F(PacketManipulatorTest, Constructor) {
+  PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
+}
+
+TEST_F(PacketManipulatorTest, DropNone) {
+  PacketManipulatorImpl manipulator(&packet_reader_,  no_drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+  VerifyPacketLoss(0, nbr_packets_dropped, kPacketDataLength,
+                   packet_data_, image_);
+}
+
+TEST_F(PacketManipulatorTest, UniformDropNoneSmallFrame) {
+  int data_length = 400;  // smaller than the packet size
+  image_._length = data_length;
+  PacketManipulatorImpl manipulator(&packet_reader_, no_drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  VerifyPacketLoss(0, nbr_packets_dropped, data_length,
+                     packet_data_, image_);
+}
+
+TEST_F(PacketManipulatorTest, UniformDropAll) {
+  PacketManipulatorImpl manipulator(&packet_reader_, drop_config_, false);
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+  VerifyPacketLoss(kPacketDataNumberOfPackets, nbr_packets_dropped,
+                   0, packet_data_, image_);
+}
+
+// Use our customized test class to make the second packet being lost
+TEST_F(PacketManipulatorTest, UniformDropSinglePacket) {
+  drop_config_.packet_loss_probability = 0.5;
+  PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
+  manipulator.AddRandomResult(1.0);
+  manipulator.AddRandomResult(0.3);  // less than 0.5 will cause packet loss
+  manipulator.AddRandomResult(1.0);
+
+  // Execute the test target method:
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  // Since we setup the predictive packet manipulator, it will throw away the
+  // second packet. The third packet is also lost because when we have lost one,
+  // the remains shall also be discarded (in the current implementation).
+  VerifyPacketLoss(2, nbr_packets_dropped, kPacketSizeInBytes, packet1_,
+                   image_);
+}
+
+// Use our customized test class to make the second packet being lost
+TEST_F(PacketManipulatorTest, BurstDropNinePackets) {
+  // Create a longer packet data structure (10 packets)
+  const int kNbrPackets = 10;
+  const int kDataLength = kPacketSizeInBytes * kNbrPackets;
+  WebRtc_UWord8 data[kDataLength];
+  WebRtc_UWord8* data_pointer = data;
+  // Fill with 0s, 1s and so on to be able to easily verify which were dropped:
+  for (int i = 0; i < kNbrPackets; ++i) {
+    memset(data_pointer + i * kPacketSizeInBytes, i, kPacketSizeInBytes);
+  }
+  // Overwrite the defaults from the test fixture:
+  image_._buffer = data;
+  image_._length = kDataLength;
+  image_._size = kDataLength;
+
+  drop_config_.packet_loss_probability = 0.5;
+  drop_config_.packet_loss_burst_length = 5;
+  drop_config_.packet_loss_mode = kBurst;
+  PredictivePacketManipulator manipulator(&packet_reader_, drop_config_);
+  manipulator.AddRandomResult(1.0);
+  manipulator.AddRandomResult(0.3);  // less than 0.5 will cause packet loss
+  for (int i = 0; i < kNbrPackets - 2; ++i) {
+    manipulator.AddRandomResult(1.0);
+  }
+
+  // Execute the test target method:
+  int nbr_packets_dropped = manipulator.ManipulatePackets(&image_);
+
+  // Should discard every packet after the first one.
+  VerifyPacketLoss(9, nbr_packets_dropped, kPacketSizeInBytes, data, image_);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc b/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
new file mode 100644
index 0000000..5668378
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/predictive_packet_manipulator.cc
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/predictive_packet_manipulator.h"
+
+#include <cassert>
+#include <cstdio>
+
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+PredictivePacketManipulator::PredictivePacketManipulator(
+    PacketReader* packet_reader, const NetworkingConfig& config)
+    : PacketManipulatorImpl(packet_reader, config, false) {
+}
+
+PredictivePacketManipulator::~PredictivePacketManipulator() {
+}
+
+
+void PredictivePacketManipulator::AddRandomResult(double result) {
+  assert(result >= 0.0 && result <= 1.0);
+  random_results_.push(result);
+}
+
+double PredictivePacketManipulator::RandomUniform() {
+  if(random_results_.size() == 0u) {
+    fprintf(stderr, "No more stored results, please make sure AddRandomResult()"
+            "is called same amount of times you're going to invoke the "
+            "RandomUniform() function, i.e. once per packet.\n");
+    assert(false);
+  }
+  double result = random_results_.front();
+  random_results_.pop();
+  return result;
+}
+
+}  // namespace test
+}  // namespace webrtcc
diff --git a/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h b/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h
new file mode 100644
index 0000000..22a0ce9
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/predictive_packet_manipulator.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
+
+#include <queue>
+
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "testsupport/packet_reader.h"
+
+namespace webrtc {
+namespace test {
+
+// Predictive packet manipulator that allows for setup of the result of
+// the random invocations.
+class PredictivePacketManipulator : public PacketManipulatorImpl {
+ public:
+  PredictivePacketManipulator(PacketReader* packet_reader,
+                              const NetworkingConfig& config);
+  virtual ~PredictivePacketManipulator();
+  // Adds a result. You must add at least the same number of results as the
+  // expected calls to the RandomUniform method. The results are added to a
+  // FIFO queue so they will be returned in the same order they were added.
+  // Result parameter must be 0.0 to 1.0.
+  void AddRandomResult(double result);
+ protected:
+  // Returns a uniformly distributed random value between 0.0 and 1.0
+  virtual double RandomUniform();
+
+ private:
+  std::queue<double> random_results_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_PREDICTIVE_PACKET_MANIPULATOR_H_
diff --git a/src/modules/video_coding/codecs/test/stats.cc b/src/modules/video_coding/codecs/test/stats.cc
new file mode 100644
index 0000000..e9c9438
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/stats.cc
@@ -0,0 +1,172 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/stats.h"
+
+#include <algorithm>  // min_element, max_element
+#include <cassert>
+#include <cstdio>
+
+namespace webrtc {
+namespace test {
+
+Stats::Stats() {}
+
+Stats::~Stats() {}
+
+bool LessForEncodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.encode_time_in_us < s2.encode_time_in_us;
+}
+
+bool LessForDecodeTime(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.decode_time_in_us < s2.decode_time_in_us;
+}
+
+bool LessForEncodedSize(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.encoded_frame_length_in_bytes < s2.encoded_frame_length_in_bytes;
+}
+
+bool LessForBitRate(const FrameStatistic& s1, const FrameStatistic& s2) {
+    return s1.bit_rate_in_kbps < s2.bit_rate_in_kbps;
+}
+
+FrameStatistic& Stats::NewFrame(int frame_number) {
+  assert(frame_number >= 0);
+  FrameStatistic stat;
+  stat.frame_number = frame_number;
+  stats_.push_back(stat);
+  return stats_[frame_number];
+}
+
+void Stats::PrintSummary() {
+  printf("Processing summary:\n");
+  if (stats_.size() == 0) {
+    printf("No frame statistics have been logged yet.\n");
+    return;
+  }
+
+  // Calculate min, max, average and total encoding time
+  int total_encoding_time_in_us = 0;
+  int total_decoding_time_in_us = 0;
+  int total_encoded_frames_lengths = 0;
+  int total_encoded_key_frames_lengths = 0;
+  int total_encoded_nonkey_frames_lengths = 0;
+  int nbr_keyframes = 0;
+  int nbr_nonkeyframes = 0;
+
+  for (FrameStatisticsIterator it = stats_.begin();
+      it != stats_.end(); ++it) {
+    total_encoding_time_in_us += it->encode_time_in_us;
+    total_decoding_time_in_us += it->decode_time_in_us;
+    total_encoded_frames_lengths += it->encoded_frame_length_in_bytes;
+    if (it->frame_type == webrtc::kKeyFrame) {
+      total_encoded_key_frames_lengths += it->encoded_frame_length_in_bytes;
+      nbr_keyframes++;
+    } else {
+      total_encoded_nonkey_frames_lengths += it->encoded_frame_length_in_bytes;
+      nbr_nonkeyframes++;
+    }
+  }
+
+  FrameStatisticsIterator frame;
+
+  // ENCODING
+  printf("Encoding time:\n");
+  frame = std::min_element(stats_.begin(),
+                      stats_.end(), LessForEncodeTime);
+  printf("  Min     : %7d us (frame %d)\n",
+         frame->encode_time_in_us, frame->frame_number);
+
+  frame = std::max_element(stats_.begin(),
+                      stats_.end(), LessForEncodeTime);
+  printf("  Max     : %7d us (frame %d)\n",
+         frame->encode_time_in_us, frame->frame_number);
+
+  printf("  Average : %7d us\n",
+         static_cast<int>(total_encoding_time_in_us / stats_.size()));
+
+  // DECODING
+  printf("Decoding time:\n");
+  // only consider frames that were successfully decoded (packet loss may cause
+  // failures)
+  std::vector<FrameStatistic> decoded_frames;
+  for (std::vector<FrameStatistic>::iterator it = stats_.begin();
+      it != stats_.end(); ++it) {
+    if (it->decoding_successful) {
+      decoded_frames.push_back(*it);
+    }
+  }
+  if (decoded_frames.size() == 0) {
+    printf("No successfully decoded frames exist in this statistics.\n");
+  } else {
+    frame = std::min_element(decoded_frames.begin(),
+                        decoded_frames.end(), LessForDecodeTime);
+    printf("  Min     : %7d us (frame %d)\n",
+           frame->decode_time_in_us, frame->frame_number);
+
+    frame = std::max_element(decoded_frames.begin(),
+                        decoded_frames.end(), LessForDecodeTime);
+    printf("  Max     : %7d us (frame %d)\n",
+           frame->decode_time_in_us, frame->frame_number);
+
+    printf("  Average : %7d us\n",
+           static_cast<int>(total_decoding_time_in_us / decoded_frames.size()));
+    printf("  Failures: %d frames failed to decode.\n",
+           static_cast<int>(stats_.size() - decoded_frames.size()));
+  }
+
+  // SIZE
+  printf("Frame sizes:\n");
+  frame = std::min_element(stats_.begin(),
+                      stats_.end(), LessForEncodedSize);
+  printf("  Min     : %7d bytes (frame %d)\n",
+         frame->encoded_frame_length_in_bytes, frame->frame_number);
+
+  frame = std::max_element(stats_.begin(),
+                      stats_.end(), LessForEncodedSize);
+  printf("  Max     : %7d bytes (frame %d)\n",
+         frame->encoded_frame_length_in_bytes, frame->frame_number);
+
+  printf("  Average : %7d bytes\n",
+         static_cast<int>(total_encoded_frames_lengths / stats_.size()));
+  if (nbr_keyframes > 0) {
+    printf("  Average key frame size    : %7d bytes (%d keyframes)\n",
+           total_encoded_key_frames_lengths / nbr_keyframes,
+           nbr_keyframes);
+  }
+  if (nbr_nonkeyframes > 0) {
+    printf("  Average non-key frame size: %7d bytes (%d frames)\n",
+           total_encoded_nonkey_frames_lengths / nbr_nonkeyframes,
+           nbr_nonkeyframes);
+  }
+
+  // BIT RATE
+  printf("Bit rates:\n");
+  frame = std::min_element(stats_.begin(),
+                      stats_.end(), LessForBitRate);
+  printf("  Min bit rate: %7d kbps (frame %d)\n",
+         frame->bit_rate_in_kbps, frame->frame_number);
+
+  frame = std::max_element(stats_.begin(),
+                      stats_.end(), LessForBitRate);
+  printf("  Max bit rate: %7d kbps (frame %d)\n",
+         frame->bit_rate_in_kbps, frame->frame_number);
+
+  printf("\n");
+  printf("Total encoding time  : %7d ms.\n",
+         total_encoding_time_in_us / 1000);
+  printf("Total decoding time  : %7d ms.\n",
+         total_decoding_time_in_us / 1000);
+  printf("Total processing time: %7d ms.\n",
+         (total_encoding_time_in_us + total_decoding_time_in_us) / 1000);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test/stats.h b/src/modules/video_coding/codecs/test/stats.h
new file mode 100644
index 0000000..ec2bb9d
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/stats.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
+
+#include <vector>
+
+#include "common_video/interface/video_image.h"
+
+namespace webrtc {
+namespace test {
+
+// Contains statistics of a single frame that has been processed.
+struct FrameStatistic {
+  FrameStatistic() :
+      encoding_successful(false), decoding_successful(false),
+      encode_return_code(0), decode_return_code(0),
+      encode_time_in_us(0), decode_time_in_us(0),
+      frame_number(0), packets_dropped(0), total_packets(0),
+      bit_rate_in_kbps(0), encoded_frame_length_in_bytes(0),
+      frame_type(kDeltaFrame) {
+  };
+  bool encoding_successful;
+  bool decoding_successful;
+  int encode_return_code;
+  int decode_return_code;
+  int encode_time_in_us;
+  int decode_time_in_us;
+  int frame_number;
+  // How many packets were discarded of the encoded frame data (if any)
+  int packets_dropped;
+  int total_packets;
+
+  // Current bit rate. Calculated out of the size divided with the time
+  // interval per frame.
+  int bit_rate_in_kbps;
+
+  // Copied from EncodedImage
+  int encoded_frame_length_in_bytes;
+  webrtc::VideoFrameType frame_type;
+};
+
+// Handles statistics from a single video processing run.
+// Contains calculation methods for interesting metrics from these stats.
+class Stats {
+ public:
+  typedef std::vector<FrameStatistic>::iterator FrameStatisticsIterator;
+
+  Stats();
+  virtual ~Stats();
+
+  // Add a new statistic data object.
+  // The frame number must be incrementing and start at zero in order to use
+  // it as an index for the frame_statistics_ vector.
+  // Returns the newly created statistic object.
+  FrameStatistic& NewFrame(int frame_number);
+
+  // Prints a summary of all the statistics that have been gathered during the
+  // processing
+  void PrintSummary();
+
+  std::vector<FrameStatistic> stats_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_STATS_H_
diff --git a/src/modules/video_coding/codecs/test/stats_unittest.cc b/src/modules/video_coding/codecs/test/stats_unittest.cc
new file mode 100644
index 0000000..53a50d7
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/stats_unittest.cc
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/stats.h"
+
+#include "gtest/gtest.h"
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+class StatsTest: public testing::Test {
+ protected:
+  StatsTest() {
+  }
+
+  virtual ~StatsTest() {
+  }
+
+  void SetUp() {
+    stats_ = new Stats();
+  }
+
+  void TearDown() {
+    delete stats_;
+  }
+
+  Stats* stats_;
+};
+
+// Test empty object
+TEST_F(StatsTest, Uninitialized) {
+  EXPECT_EQ(0u, stats_->stats_.size());
+  stats_->PrintSummary();  // should not crash
+}
+
+// Add single frame stats and verify
+TEST_F(StatsTest, AddOne) {
+  stats_->NewFrame(0u);
+  FrameStatistic* frameStat = &stats_->stats_[0];
+  EXPECT_EQ(0, frameStat->frame_number);
+}
+
+// Add multiple frame stats and verify
+TEST_F(StatsTest, AddMany) {
+  int nbr_of_frames = 1000;
+  for (int i = 0; i < nbr_of_frames; ++i) {
+    FrameStatistic& frameStat = stats_->NewFrame(i);
+    EXPECT_EQ(i, frameStat.frame_number);
+  }
+  EXPECT_EQ(nbr_of_frames, static_cast<int>(stats_->stats_.size()));
+
+  stats_->PrintSummary();  // should not crash
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi b/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi
new file mode 100644
index 0000000..5060b08
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/video_codecs_test_framework.gypi
@@ -0,0 +1,65 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'video_codecs_test_framework',
+          'type': '<(library)',
+          'dependencies': [
+            '<(webrtc_root)/test/test.gyp:test_support',
+          ],
+          'sources': [
+            'mock/mock_packet_manipulator.h',
+            'packet_manipulator.h',
+            'packet_manipulator.cc',
+            'predictive_packet_manipulator.h',
+            'predictive_packet_manipulator.cc',
+            'stats.h',
+            'stats.cc',
+            'videoprocessor.h',
+            'videoprocessor.cc',
+          ],
+        },
+        {
+          'target_name': 'video_codecs_test_framework_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            '<(DEPTH)/testing/gmock.gyp:gmock',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'sources': [
+            'packet_manipulator_unittest.cc',
+            'stats_unittest.cc',
+            'videoprocessor_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'video_codecs_test_framework_integrationtests',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/metrics.gyp:metrics',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
+          ],
+          'sources': [
+            'videoprocessor_integrationtest.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/modules/video_coding/codecs/test/videoprocessor.cc b/src/modules/video_coding/codecs/test/videoprocessor.cc
new file mode 100644
index 0000000..750d990
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/videoprocessor.cc
@@ -0,0 +1,397 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+
+#include <cassert>
+#include <cstring>
+#include <limits>
+
+#include "system_wrappers/interface/cpu_info.h"
+
+namespace webrtc {
+namespace test {
+
+VideoProcessorImpl::VideoProcessorImpl(webrtc::VideoEncoder* encoder,
+                                       webrtc::VideoDecoder* decoder,
+                                       FrameReader* frame_reader,
+                                       FrameWriter* frame_writer,
+                                       PacketManipulator* packet_manipulator,
+                                       const TestConfig& config,
+                                       Stats* stats)
+    : encoder_(encoder),
+      decoder_(decoder),
+      frame_reader_(frame_reader),
+      frame_writer_(frame_writer),
+      packet_manipulator_(packet_manipulator),
+      config_(config),
+      stats_(stats),
+      encode_callback_(NULL),
+      decode_callback_(NULL),
+      source_buffer_(NULL),
+      first_key_frame_has_been_excluded_(false),
+      last_frame_missing_(false),
+      initialized_(false),
+      encoded_frame_size_(0),
+      prev_time_stamp_(0),
+      num_dropped_frames_(0),
+      num_spatial_resizes_(0),
+      last_encoder_frame_width_(0),
+      last_encoder_frame_height_(0),
+      scaler_() {
+  assert(encoder);
+  assert(decoder);
+  assert(frame_reader);
+  assert(frame_writer);
+  assert(packet_manipulator);
+  assert(stats);
+}
+
+bool VideoProcessorImpl::Init() {
+  // Calculate a factor used for bit rate calculations:
+  bit_rate_factor_ = config_.codec_settings->maxFramerate * 0.001 * 8;  // bits
+
+  int frame_length_in_bytes = frame_reader_->FrameLength();
+
+  // Initialize data structures used by the encoder/decoder APIs
+  source_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
+  last_successful_frame_buffer_ = new WebRtc_UWord8[frame_length_in_bytes];
+
+  // Set fixed properties common for all frames:
+  source_frame_.SetWidth(config_.codec_settings->width);
+  source_frame_.SetHeight(config_.codec_settings->height);
+  source_frame_.VerifyAndAllocate(frame_length_in_bytes);
+  source_frame_.SetLength(frame_length_in_bytes);
+
+  // To keep track of spatial resize actions by encoder.
+  last_encoder_frame_width_ = config_.codec_settings->width;
+  last_encoder_frame_height_ = config_.codec_settings->height;
+
+  // Setup required callbacks for the encoder/decoder:
+  encode_callback_ = new VideoProcessorEncodeCompleteCallback(this);
+  decode_callback_ = new VideoProcessorDecodeCompleteCallback(this);
+  WebRtc_Word32 register_result =
+      encoder_->RegisterEncodeCompleteCallback(encode_callback_);
+  if (register_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to register encode complete callback, return code: "
+        "%d\n", register_result);
+    return false;
+  }
+  register_result = decoder_->RegisterDecodeCompleteCallback(decode_callback_);
+  if (register_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to register decode complete callback, return code: "
+            "%d\n", register_result);
+    return false;
+  }
+  // Init the encoder and decoder
+  WebRtc_UWord32 nbr_of_cores = 1;
+  if (!config_.use_single_core) {
+    nbr_of_cores = CpuInfo::DetectNumberOfCores();
+  }
+  WebRtc_Word32 init_result =
+      encoder_->InitEncode(config_.codec_settings, nbr_of_cores,
+                           config_.networking_config.max_payload_size_in_bytes);
+  if (init_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to initialize VideoEncoder, return code: %d\n",
+            init_result);
+    return false;
+  }
+  init_result = decoder_->InitDecode(config_.codec_settings, nbr_of_cores);
+  if (init_result != WEBRTC_VIDEO_CODEC_OK) {
+    fprintf(stderr, "Failed to initialize VideoDecoder, return code: %d\n",
+            init_result);
+    return false;
+  }
+
+  if (config_.verbose) {
+    printf("Video Processor:\n");
+    printf("  #CPU cores used  : %d\n", nbr_of_cores);
+    printf("  Total # of frames: %d\n", frame_reader_->NumberOfFrames());
+    printf("  Codec settings:\n");
+    printf("    Start bitrate  : %d kbps\n",
+           config_.codec_settings->startBitrate);
+    printf("    Width          : %d\n", config_.codec_settings->width);
+    printf("    Height         : %d\n", config_.codec_settings->height);
+  }
+  initialized_ = true;
+  return true;
+}
+
+VideoProcessorImpl::~VideoProcessorImpl() {
+  delete[] source_buffer_;
+  delete[] last_successful_frame_buffer_;
+  encoder_->RegisterEncodeCompleteCallback(NULL);
+  delete encode_callback_;
+  decoder_->RegisterDecodeCompleteCallback(NULL);
+  delete decode_callback_;
+}
+
+
+void VideoProcessorImpl::SetRates(int bit_rate, int frame_rate) {
+  int set_rates_result = encoder_->SetRates(bit_rate, frame_rate);
+  assert(set_rates_result >= 0);
+  if (set_rates_result < 0) {
+    fprintf(stderr, "Failed to update encoder with new rate %d, "
+            "return code: %d\n", bit_rate, set_rates_result);
+  }
+  num_dropped_frames_ = 0;
+  num_spatial_resizes_ = 0;
+}
+
+int VideoProcessorImpl::EncodedFrameSize() {
+  return encoded_frame_size_;
+}
+
+int VideoProcessorImpl::NumberDroppedFrames() {
+  return num_dropped_frames_;
+}
+
+int VideoProcessorImpl::NumberSpatialResizes() {
+  return num_spatial_resizes_;
+}
+
+bool VideoProcessorImpl::ProcessFrame(int frame_number) {
+  assert(frame_number >=0);
+  if (!initialized_) {
+    fprintf(stderr, "Attempting to use uninitialized VideoProcessor!\n");
+    return false;
+  }
+  // |prev_time_stamp_| is used for getting number of dropped frames.
+  if (frame_number == 0) {
+    prev_time_stamp_ = -1;
+  }
+  if (frame_reader_->ReadFrame(source_buffer_)) {
+    // Copy the source frame to the newly read frame data.
+    // Length is common for all frames.
+    source_frame_.CopyFrame(source_frame_.Length(), source_buffer_);
+
+    // Ensure we have a new statistics data object we can fill:
+    FrameStatistic& stat = stats_->NewFrame(frame_number);
+
+    encode_start_ = TickTime::Now();
+    // Use the frame number as "timestamp" to identify frames
+    source_frame_.SetTimeStamp(frame_number);
+
+    // Decide if we're going to force a keyframe:
+    VideoFrameType frame_type = kDeltaFrame;
+    if (config_.keyframe_interval > 0 &&
+        frame_number % config_.keyframe_interval == 0) {
+      frame_type = kKeyFrame;
+    }
+
+    // For dropped frames, we regard them as zero size encoded frames.
+    encoded_frame_size_ = 0;
+
+    WebRtc_Word32 encode_result = encoder_->Encode(source_frame_, NULL,
+                                                   frame_type);
+
+    if (encode_result != WEBRTC_VIDEO_CODEC_OK) {
+      fprintf(stderr, "Failed to encode frame %d, return code: %d\n",
+              frame_number, encode_result);
+    }
+    stat.encode_return_code = encode_result;
+    return true;
+  } else {
+    return false;  // we've reached the last frame
+  }
+}
+
+void VideoProcessorImpl::FrameEncoded(EncodedImage* encoded_image) {
+  // Timestamp is frame number, so this gives us #dropped frames.
+  int num_dropped_from_prev_encode =  encoded_image->_timeStamp -
+      prev_time_stamp_ - 1;
+  num_dropped_frames_ +=  num_dropped_from_prev_encode;
+  prev_time_stamp_ =  encoded_image->_timeStamp;
+  if (num_dropped_from_prev_encode > 0) {
+    // For dropped frames, we write out the last decoded frame to avoid getting
+    // out of sync for the computation of PSNR and SSIM.
+    for (int i = 0; i < num_dropped_from_prev_encode; i++) {
+      frame_writer_->WriteFrame(last_successful_frame_buffer_);
+    }
+  }
+  // Frame is not dropped, so update the encoded frame size
+  // (encoder callback is only called for non-zero length frames).
+  encoded_frame_size_ = encoded_image->_length;
+
+  TickTime encode_stop = TickTime::Now();
+  int frame_number = encoded_image->_timeStamp;
+  FrameStatistic& stat = stats_->stats_[frame_number];
+  stat.encode_time_in_us = GetElapsedTimeMicroseconds(encode_start_,
+                                                      encode_stop);
+  stat.encoding_successful = true;
+  stat.encoded_frame_length_in_bytes = encoded_image->_length;
+  stat.frame_number = encoded_image->_timeStamp;
+  stat.frame_type = encoded_image->_frameType;
+  stat.bit_rate_in_kbps = encoded_image->_length * bit_rate_factor_;
+  stat.total_packets = encoded_image->_length /
+      config_.networking_config.packet_size_in_bytes + 1;
+
+  // Perform packet loss if criteria is fullfilled:
+  bool exclude_this_frame = false;
+  // Only keyframes can be excluded
+  if (encoded_image->_frameType == kKeyFrame) {
+    switch (config_.exclude_frame_types) {
+      case kExcludeOnlyFirstKeyFrame:
+        if (!first_key_frame_has_been_excluded_) {
+          first_key_frame_has_been_excluded_ = true;
+          exclude_this_frame = true;
+        }
+        break;
+      case kExcludeAllKeyFrames:
+        exclude_this_frame = true;
+        break;
+      default:
+        assert(false);
+    }
+  }
+  if (!exclude_this_frame) {
+    stat.packets_dropped =
+          packet_manipulator_->ManipulatePackets(encoded_image);
+  }
+
+  // Keep track of if frames are lost due to packet loss so we can tell
+  // this to the encoder (this is handled by the RTP logic in the full stack)
+  decode_start_ = TickTime::Now();
+  // TODO(kjellander): Pass fragmentation header to the decoder when
+  // CL 172001 has been submitted and PacketManipulator supports this.
+  WebRtc_Word32 decode_result = decoder_->Decode(*encoded_image,
+                                                 last_frame_missing_, NULL);
+  stat.decode_return_code = decode_result;
+  if (decode_result != WEBRTC_VIDEO_CODEC_OK) {
+    // Write the last successful frame the output file to avoid getting it out
+    // of sync with the source file for SSIM and PSNR comparisons:
+    frame_writer_->WriteFrame(last_successful_frame_buffer_);
+  }
+  // save status for losses so we can inform the decoder for the next frame:
+  last_frame_missing_ = encoded_image->_length == 0;
+}
+
+void VideoProcessorImpl::FrameDecoded(const VideoFrame& image) {
+  TickTime decode_stop = TickTime::Now();
+  int frame_number = image.TimeStamp();
+  // Report stats
+  FrameStatistic& stat = stats_->stats_[frame_number];
+  stat.decode_time_in_us = GetElapsedTimeMicroseconds(decode_start_,
+                                                      decode_stop);
+  stat.decoding_successful = true;
+
+  // Check for resize action (either down or up):
+  if (static_cast<int>(image.Width()) != last_encoder_frame_width_ ||
+      static_cast<int>(image.Height()) != last_encoder_frame_height_ ) {
+    ++num_spatial_resizes_;
+    last_encoder_frame_width_ = image.Width();
+    last_encoder_frame_height_ = image.Height();
+  }
+  // Check if codec size is different from native/original size, and if so,
+  // upsample back to original size: needed for PSNR and SSIM computations.
+  if (image.Width() !=  config_.codec_settings->width ||
+      image.Height() != config_.codec_settings->height) {
+    int required_size = CalcBufferSize(kI420,
+                                       config_.codec_settings->width,
+                                       config_.codec_settings->height);
+    VideoFrame up_image;
+    up_image.VerifyAndAllocate(required_size);
+    up_image.SetLength(required_size);
+    up_image.SetWidth(config_.codec_settings->width);
+    up_image.SetHeight(config_.codec_settings->height);
+
+    int ret_val = scaler_.Set(image.Width(), image.Height(),
+                              config_.codec_settings->width,
+                              config_.codec_settings->height,
+                              kI420, kI420, kScaleBilinear);
+    assert(ret_val >= 0);
+    if (ret_val < 0) {
+      fprintf(stderr, "Failed to set scalar for frame: %d, return code: %d\n",
+              frame_number, ret_val);
+    }
+    ret_val = scaler_.Scale(image.Buffer(), up_image.Buffer(),
+                            required_size);
+    assert(ret_val >= 0);
+    if (ret_val < 0) {
+      fprintf(stderr, "Failed to scale frame: %d, return code: %d\n",
+              frame_number, ret_val);
+    }
+    // Update our copy of the last successful frame:
+    memcpy(last_successful_frame_buffer_, up_image.Buffer(), up_image.Length());
+
+    bool write_success = frame_writer_->WriteFrame(up_image.Buffer());
+    assert(write_success);
+    if (!write_success) {
+      fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
+    }
+    up_image.Free();
+  } else {  // No resize.
+    // Update our copy of the last successful frame:
+    memcpy(last_successful_frame_buffer_, image.Buffer(), image.Length());
+
+    bool write_success = frame_writer_->WriteFrame(image.Buffer());
+    assert(write_success);
+    if (!write_success) {
+      fprintf(stderr, "Failed to write frame %d to disk!", frame_number);
+    }
+  }
+}
+
+int VideoProcessorImpl::GetElapsedTimeMicroseconds(
+    const webrtc::TickTime& start, const webrtc::TickTime& stop) {
+  WebRtc_UWord64 encode_time = (stop - start).Microseconds();
+  assert(encode_time <
+         static_cast<unsigned int>(std::numeric_limits<int>::max()));
+  return static_cast<int>(encode_time);
+}
+
+const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e) {
+  switch (e) {
+    case kExcludeOnlyFirstKeyFrame:
+      return "ExcludeOnlyFirstKeyFrame";
+    case kExcludeAllKeyFrames:
+      return "ExcludeAllKeyFrames";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+const char* VideoCodecTypeToStr(webrtc::VideoCodecType e) {
+  switch (e) {
+    case kVideoCodecVP8:
+      return "VP8";
+    case kVideoCodecI420:
+      return "I420";
+    case kVideoCodecRED:
+      return "RED";
+    case kVideoCodecULPFEC:
+      return "ULPFEC";
+    case kVideoCodecUnknown:
+      return "Unknown";
+    default:
+      assert(false);
+      return "Unknown";
+  }
+}
+
+// Callbacks
+WebRtc_Word32
+VideoProcessorImpl::VideoProcessorEncodeCompleteCallback::Encoded(
+    EncodedImage& encoded_image,
+    const webrtc::CodecSpecificInfo* codec_specific_info,
+    const webrtc::RTPFragmentationHeader* fragmentation) {
+  video_processor_->FrameEncoded(&encoded_image);  // Forward to parent class.
+  return 0;
+}
+WebRtc_Word32
+VideoProcessorImpl::VideoProcessorDecodeCompleteCallback::Decoded(
+    VideoFrame& image) {
+  video_processor_->FrameDecoded(image);  // forward to parent class
+  return 0;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test/videoprocessor.h b/src/modules/video_coding/codecs/test/videoprocessor.h
new file mode 100644
index 0000000..a8f9228
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/videoprocessor.h
@@ -0,0 +1,260 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
+
+#include <string>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "common_video/libyuv/include/scaler.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/stats.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+
+namespace webrtc {
+namespace test {
+
+// Defines which frame types shall be excluded from packet loss and when.
+enum ExcludeFrameTypes {
+  // Will exclude the first keyframe in the video sequence from packet loss.
+  // Following keyframes will be targeted for packet loss.
+  kExcludeOnlyFirstKeyFrame,
+  // Exclude all keyframes from packet loss, no matter where in the video
+  // sequence they occur.
+  kExcludeAllKeyFrames
+};
+// Returns a string representation of the enum value.
+const char* ExcludeFrameTypesToStr(ExcludeFrameTypes e);
+
+// Test configuration for a test run
+struct TestConfig {
+  TestConfig()
+    : name(""), description(""), test_number(0),
+      input_filename(""), output_filename(""), output_dir("out"),
+      networking_config(), exclude_frame_types(kExcludeOnlyFirstKeyFrame),
+      frame_length_in_bytes(-1), use_single_core(false), keyframe_interval(0),
+      codec_settings(NULL), verbose(true) {
+  };
+
+  // Name of the test. This is purely metadata and does not affect
+  // the test in any way.
+  std::string name;
+
+  // More detailed description of the test. This is purely metadata and does
+  // not affect the test in any way.
+  std::string description;
+
+  // Number of this test. Useful if multiple runs of the same test with
+  // different configurations shall be managed.
+  int test_number;
+
+  // File to process for the test. This must be a video file in the YUV format.
+  std::string input_filename;
+
+  // File to write to during processing for the test. Will be a video file
+  // in the YUV format.
+  std::string output_filename;
+
+  // Path to the directory where encoded files will be put
+  // (absolute or relative to the executable). Default: "out".
+  std::string output_dir;
+
+  // Configurations related to networking.
+  NetworkingConfig networking_config;
+
+  // Decides how the packet loss simulations shall exclude certain frames
+  // from packet loss. Default: kExcludeOnlyFirstKeyFrame.
+  ExcludeFrameTypes exclude_frame_types;
+
+  // The length of a single frame of the input video file. This value is
+  // calculated out of the width and height according to the video format
+  // specification. Must be set before processing.
+  int frame_length_in_bytes;
+
+  // Force the encoder and decoder to use a single core for processing.
+  // Using a single core is necessary to get a deterministic behavior for the
+  // encoded frames - using multiple cores will produce different encoded frames
+  // since multiple cores are competing to consume the byte budget for each
+  // frame in parallel.
+  // If set to false, the maximum number of available cores will be used.
+  // Default: false.
+  bool use_single_core;
+
+  // If set to a value >0 this setting forces the encoder to create a keyframe
+  // every Nth frame. Note that the encoder may create a keyframe in other
+  // locations in addition to the interval that is set using this parameter.
+  // Forcing key frames may also affect encoder planning optimizations in
+  // a negative way, since it will suddenly be forced to produce an expensive
+  // key frame.
+  // Default: 0.
+  int keyframe_interval;
+
+  // The codec settings to use for the test (target bitrate, video size,
+  // framerate and so on). This struct must be created and filled in using
+  // the VideoCodingModule::Codec() method.
+  webrtc::VideoCodec* codec_settings;
+
+  // If printing of information to stdout shall be performed during processing.
+  bool verbose;
+};
+
+// Returns a string representation of the enum value.
+const char* VideoCodecTypeToStr(webrtc::VideoCodecType e);
+
+// Handles encoding/decoding of video using the VideoEncoder/VideoDecoder
+// interfaces. This is done in a sequential manner in order to be able to
+// measure times properly.
+// The class processes a frame at the time for the configured input file.
+// It maintains state of where in the source input file the processing is at.
+//
+// Regarding packet loss: Note that keyframes are excluded (first or all
+// depending on the ExcludeFrameTypes setting). This is because if key frames
+// would be altered, all the following delta frames would be pretty much
+// worthless. VP8 has an error-resilience feature that makes it able to handle
+// packet loss in key non-first keyframes, which is why only the first is
+// excluded by default.
+// Packet loss in such important frames is handled on a higher level in the
+// Video Engine, where signaling would request a retransmit of the lost packets,
+// since they're so important.
+//
+// Note this class is not thread safe in any way and is meant for simple testing
+// purposes.
+class VideoProcessor {
+ public:
+  virtual ~VideoProcessor() {}
+
+  // Performs initial calculations about frame size, sets up callbacks etc.
+  // Returns false if an error has occurred, in addition to printing to stderr.
+  virtual bool Init() = 0;
+
+  // Processes a single frame. Returns true as long as there's more frames
+  // available in the source clip.
+  // Frame number must be an integer >=0.
+  virtual bool ProcessFrame(int frame_number) = 0;
+
+  // Updates the encoder with the target bit rate and the frame rate.
+  virtual void SetRates(int bit_rate, int frame_rate) = 0;
+
+  // Return the size of the encoded frame in bytes. Dropped frames by the
+  // encoder are regarded as zero size.
+  virtual int EncodedFrameSize() = 0;
+
+  // Return the number of dropped frames.
+  virtual int NumberDroppedFrames() = 0;
+
+  // Return the number of spatial resizes.
+  virtual int NumberSpatialResizes() = 0;
+};
+
+class VideoProcessorImpl : public VideoProcessor {
+ public:
+  VideoProcessorImpl(webrtc::VideoEncoder* encoder,
+                     webrtc::VideoDecoder* decoder,
+                     FrameReader* frame_reader,
+                     FrameWriter* frame_writer,
+                     PacketManipulator* packet_manipulator,
+                     const TestConfig& config,
+                     Stats* stats);
+  virtual ~VideoProcessorImpl();
+  virtual bool Init();
+  virtual bool ProcessFrame(int frame_number);
+
+ private:
+  // Invoked by the callback when a frame has completed encoding.
+  void FrameEncoded(webrtc::EncodedImage* encodedImage);
+  // Invoked by the callback when a frame has completed decoding.
+  void FrameDecoded(const webrtc::VideoFrame& image);
+  // Used for getting a 32-bit integer representing time
+  // (checks the size is within signed 32-bit bounds before casting it)
+  int GetElapsedTimeMicroseconds(const webrtc::TickTime& start,
+                                 const webrtc::TickTime& stop);
+  // Updates the encoder with the target bit rate and the frame rate.
+  void SetRates(int bit_rate, int frame_rate);
+  // Return the size of the encoded frame in bytes.
+  int EncodedFrameSize();
+  // Return the number of dropped frames.
+  int NumberDroppedFrames();
+  // Return the number of spatial resizes.
+  int NumberSpatialResizes();
+
+  webrtc::VideoEncoder* encoder_;
+  webrtc::VideoDecoder* decoder_;
+  FrameReader* frame_reader_;
+  FrameWriter* frame_writer_;
+  PacketManipulator* packet_manipulator_;
+  const TestConfig& config_;
+  Stats* stats_;
+
+  EncodedImageCallback* encode_callback_;
+  DecodedImageCallback* decode_callback_;
+  // Buffer used for reading the source video file:
+  WebRtc_UWord8* source_buffer_;
+  // Keep track of the last successful frame, since we need to write that
+  // when decoding fails:
+  WebRtc_UWord8* last_successful_frame_buffer_;
+  webrtc::VideoFrame source_frame_;
+  // To keep track of if we have excluded the first key frame from packet loss:
+  bool first_key_frame_has_been_excluded_;
+  // To tell the decoder previous frame have been dropped due to packet loss:
+  bool last_frame_missing_;
+  // If Init() has executed successfully.
+  bool initialized_;
+  int encoded_frame_size_;
+  int prev_time_stamp_;
+  int num_dropped_frames_;
+  int num_spatial_resizes_;
+  int last_encoder_frame_width_;
+  int last_encoder_frame_height_;
+  Scaler scaler_;
+
+  // Statistics
+  double bit_rate_factor_;  // multiply frame length with this to get bit rate
+  webrtc::TickTime encode_start_;
+  webrtc::TickTime decode_start_;
+
+  // Callback class required to implement according to the VideoEncoder API.
+  class VideoProcessorEncodeCompleteCallback
+    : public webrtc::EncodedImageCallback {
+   public:
+      explicit VideoProcessorEncodeCompleteCallback(VideoProcessorImpl* vp)
+        : video_processor_(vp) {
+    }
+    WebRtc_Word32 Encoded(
+        webrtc::EncodedImage& encoded_image,
+        const webrtc::CodecSpecificInfo* codec_specific_info = NULL,
+        const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+
+   private:
+    VideoProcessorImpl* video_processor_;
+  };
+
+  // Callback class required to implement according to the VideoDecoder API.
+  class VideoProcessorDecodeCompleteCallback
+    : public webrtc::DecodedImageCallback {
+   public:
+      explicit VideoProcessorDecodeCompleteCallback(VideoProcessorImpl* vp)
+      : video_processor_(vp) {
+    }
+    WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
+
+   private:
+    VideoProcessorImpl* video_processor_;
+  };
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_VIDEOPROCESSOR_H_
diff --git a/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc b/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
new file mode 100644
index 0000000..6ae0606
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/videoprocessor_integrationtest.cc
@@ -0,0 +1,742 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+#include <math.h>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "testsupport/packet_reader.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Maximum number of rate updates (i.e., calls to encoder to change bitrate
+// and/or frame rate) for the current tests.
+const int kMaxNumRateUpdates = 3;
+
+const int kPercTargetvsActualMismatch = 20;
+
+// Codec and network settings.
+struct CodecConfigPars {
+  float packet_loss;
+  int num_temporal_layers;
+  int key_frame_interval;
+  bool error_concealment_on;
+  bool denoising_on;
+};
+
+// Quality metrics.
+struct QualityMetrics {
+  double minimum_avg_psnr;
+  double minimum_min_psnr;
+  double minimum_avg_ssim;
+  double minimum_min_ssim;
+};
+
+// The sequence of bitrate and frame rate changes for the encoder, the frame
+// number where the changes are made, and the total number of frames for the
+// test.
+struct RateProfile {
+  int target_bit_rate[kMaxNumRateUpdates];
+  int input_frame_rate[kMaxNumRateUpdates];
+  int frame_index_rate_update[kMaxNumRateUpdates + 1];
+  int num_frames;
+};
+
+// Metrics for the rate control. The rate mismatch metrics are defined as
+// percentages.|max_time_hit_target| is defined as number of frames, after a
+// rate update is made to the encoder, for the encoder to reach within
+// |kPercTargetvsActualMismatch| of new target rate. The metrics are defined for
+// each rate update sequence.
+struct RateControlMetrics {
+  int max_num_dropped_frames;
+  int max_key_frame_size_mismatch;
+  int max_delta_frame_size_mismatch;
+  int max_encoding_rate_mismatch;
+  int max_time_hit_target;
+  int num_spatial_resizes;
+};
+
+
+// Sequence used is foreman (CIF): may be better to use VGA for resize test.
+const int kCIFWidth = 352;
+const int kCIFHeight = 288;
+const int kNbrFramesShort = 100;  // Some tests are run for shorter sequence.
+const int kNbrFramesLong = 299;
+
+// Parameters from VP8 wrapper, which control target size of key frames.
+const float kInitialBufferSize = 0.5f;
+const float kOptimalBufferSize = 0.6f;
+const float kScaleKeyFrameSize = 0.5f;
+
+// Integration test for video processor. Encodes+decodes a clip and
+// writes it to the output directory. After completion, quality metrics
+// (PSNR and SSIM) and rate control metrics are computed to verify that the
+// quality and encoder response is acceptable. The rate control tests allow us
+// to verify the behavior for changing bitrate, changing frame rate, frame
+// dropping/spatial resize, and temporal layers. The limits for the rate
+// control metrics are set to be fairly conservative, so failure should only
+// happen when some significant regression or breakdown occurs.
+class VideoProcessorIntegrationTest: public testing::Test {
+ protected:
+  VideoEncoder* encoder_;
+  VideoDecoder* decoder_;
+  webrtc::test::FrameReader* frame_reader_;
+  webrtc::test::FrameWriter* frame_writer_;
+  webrtc::test::PacketReader packet_reader_;
+  webrtc::test::PacketManipulator* packet_manipulator_;
+  webrtc::test::Stats stats_;
+  webrtc::test::TestConfig config_;
+  VideoCodec codec_settings_;
+  webrtc::test::VideoProcessor* processor_;
+
+  // Quantities defined/updated for every encoder rate update.
+  // Some quantities defined per temporal layer (at most 3 layers in this test).
+  int num_frames_per_update_[3];
+  float sum_frame_size_mismatch_[3];
+  float sum_encoded_frame_size_[3];
+  float encoding_bitrate_[3];
+  float per_frame_bandwidth_[3];
+  float bit_rate_layer_[3];
+  float frame_rate_layer_[3];
+  int num_frames_total_;
+  float sum_encoded_frame_size_total_;
+  float encoding_bitrate_total_;
+  float perc_encoding_rate_mismatch_;
+  int num_frames_to_hit_target_;
+  bool encoding_rate_within_target_;
+  int bit_rate_;
+  int frame_rate_;
+  int layer_;
+  float target_size_key_frame_initial_;
+  float target_size_key_frame_;
+  float sum_key_frame_size_mismatch_;
+  int num_key_frames_;
+  float start_bitrate_;
+
+  // Codec and network settings.
+  float packet_loss_;
+  int num_temporal_layers_;
+  int key_frame_interval_;
+  bool error_concealment_on_;
+  bool denoising_on_;
+
+
+  VideoProcessorIntegrationTest() {}
+  virtual ~VideoProcessorIntegrationTest() {}
+
+  void SetUpCodecConfig() {
+    encoder_ = VP8Encoder::Create();
+    decoder_ = VP8Decoder::Create();
+
+    // CIF is currently used for all tests below.
+    // Setup the TestConfig struct for processing of a clip in CIF resolution.
+    config_.input_filename =
+        webrtc::test::ResourcePath("foreman_cif", "yuv");
+    config_.output_filename = webrtc::test::OutputPath() +
+          "foreman_cif_short_video_codecs_test_framework_integrationtests.yuv";
+    config_.frame_length_in_bytes = 3 * kCIFWidth * kCIFHeight / 2;
+    config_.verbose = false;
+    // Only allow encoder/decoder to use single core, for predictability.
+    config_.use_single_core = true;
+    // Key frame interval and packet loss are set for each test.
+    config_.keyframe_interval = key_frame_interval_;
+    config_.networking_config.packet_loss_probability = packet_loss_;
+
+    // Get a codec configuration struct and configure it.
+    VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
+    config_.codec_settings = &codec_settings_;
+    config_.codec_settings->startBitrate = start_bitrate_;
+    config_.codec_settings->width = kCIFWidth;
+    config_.codec_settings->height = kCIFHeight;
+    // These features may be set depending on the test.
+    config_.codec_settings->codecSpecific.VP8.errorConcealmentOn =
+        error_concealment_on_;
+    config_.codec_settings->codecSpecific.VP8.denoisingOn =
+        denoising_on_;
+    config_.codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
+        num_temporal_layers_;
+
+    frame_reader_ =
+        new webrtc::test::FrameReaderImpl(config_.input_filename,
+                                          config_.frame_length_in_bytes);
+    frame_writer_ =
+        new webrtc::test::FrameWriterImpl(config_.output_filename,
+                                          config_.frame_length_in_bytes);
+    ASSERT_TRUE(frame_reader_->Init());
+    ASSERT_TRUE(frame_writer_->Init());
+
+    packet_manipulator_ = new webrtc::test::PacketManipulatorImpl(
+        &packet_reader_, config_.networking_config, config_.verbose);
+    processor_ = new webrtc::test::VideoProcessorImpl(encoder_, decoder_,
+                                                      frame_reader_,
+                                                      frame_writer_,
+                                                      packet_manipulator_,
+                                                      config_, &stats_);
+    ASSERT_TRUE(processor_->Init());
+  }
+
+  // Reset quantities after each encoder update, update the target
+  // per-frame bandwidth.
+  void ResetRateControlMetrics(int num_frames) {
+    for (int i = 0; i < num_temporal_layers_; i++) {
+      num_frames_per_update_[i] = 0;
+      sum_frame_size_mismatch_[i] = 0.0f;
+      sum_encoded_frame_size_[i] = 0.0f;
+      encoding_bitrate_[i] = 0.0f;
+      // Update layer per-frame-bandwidth.
+      per_frame_bandwidth_[i] = static_cast<float>(bit_rate_layer_[i]) /
+             static_cast<float>(frame_rate_layer_[i]);
+    }
+    // Set maximum size of key frames, following setting in the VP8 wrapper.
+    float max_key_size = kScaleKeyFrameSize * kOptimalBufferSize * frame_rate_;
+    // We don't know exact target size of the key frames (except for first one),
+    // but the minimum in libvpx is ~|3 * per_frame_bandwidth| and maximum is
+    // set by |max_key_size_  * per_frame_bandwidth|. Take middle point/average
+    // as reference for mismatch. Note key frames always correspond to base
+    // layer frame in this test.
+    target_size_key_frame_ = 0.5 * (3 + max_key_size) * per_frame_bandwidth_[0];
+    num_frames_total_ = 0;
+    sum_encoded_frame_size_total_ = 0.0f;
+    encoding_bitrate_total_ = 0.0f;
+    perc_encoding_rate_mismatch_ = 0.0f;
+    num_frames_to_hit_target_ = num_frames;
+    encoding_rate_within_target_ = false;
+    sum_key_frame_size_mismatch_ = 0.0;
+    num_key_frames_ = 0;
+  }
+
+  // For every encoded frame, update the rate control metrics.
+  void UpdateRateControlMetrics(int frame_num, VideoFrameType frame_type) {
+    int encoded_frame_size = processor_->EncodedFrameSize();
+    float encoded_size_kbits = encoded_frame_size * 8.0f / 1000.0f;
+    // Update layer data.
+    // Update rate mismatch relative to per-frame bandwidth for delta frames.
+    if (frame_type == kDeltaFrame) {
+      // TODO(marpan): Should we count dropped (zero size) frames in mismatch?
+      sum_frame_size_mismatch_[layer_] += fabs(encoded_size_kbits -
+                                               per_frame_bandwidth_[layer_]) /
+                                               per_frame_bandwidth_[layer_];
+    } else {
+      float target_size = (frame_num == 1) ? target_size_key_frame_initial_ :
+          target_size_key_frame_;
+      sum_key_frame_size_mismatch_ += fabs(encoded_size_kbits - target_size) /
+          target_size;
+      num_key_frames_ += 1;
+    }
+    sum_encoded_frame_size_[layer_] += encoded_size_kbits;
+    // Encoding bitrate per layer: from the start of the update/run to the
+    // current frame.
+    encoding_bitrate_[layer_] = sum_encoded_frame_size_[layer_] *
+        frame_rate_layer_[layer_] /
+        num_frames_per_update_[layer_];
+    // Total encoding rate: from the start of the update/run to current frame.
+    sum_encoded_frame_size_total_ += encoded_size_kbits;
+    encoding_bitrate_total_ = sum_encoded_frame_size_total_ * frame_rate_ /
+        num_frames_total_;
+    perc_encoding_rate_mismatch_ =  100 * fabs(encoding_bitrate_total_ -
+                                               bit_rate_) / bit_rate_;
+    if (perc_encoding_rate_mismatch_ < kPercTargetvsActualMismatch &&
+        !encoding_rate_within_target_) {
+      num_frames_to_hit_target_ = num_frames_total_;
+      encoding_rate_within_target_ = true;
+    }
+  }
+
+  // Verify expected behavior of rate control and print out data.
+  void VerifyRateControl(int update_index,
+                         int max_key_frame_size_mismatch,
+                         int max_delta_frame_size_mismatch,
+                         int max_encoding_rate_mismatch,
+                         int max_time_hit_target,
+                         int max_num_dropped_frames,
+                         int num_spatial_resizes) {
+    int num_dropped_frames = processor_->NumberDroppedFrames();
+    int num_resize_actions = processor_->NumberSpatialResizes();
+    printf("For update #: %d,\n "
+        " Target Bitrate: %d,\n"
+        " Encoding bitrate: %f,\n"
+        " Frame rate: %d \n",
+        update_index, bit_rate_, encoding_bitrate_total_, frame_rate_);
+    printf(" Number of frames to approach target rate = %d, \n"
+           " Number of dropped frames = %d, \n"
+           " Number of spatial resizes = %d, \n",
+           num_frames_to_hit_target_, num_dropped_frames, num_resize_actions);
+    EXPECT_LE(perc_encoding_rate_mismatch_, max_encoding_rate_mismatch);
+    if (num_key_frames_ > 0) {
+      int perc_key_frame_size_mismatch = 100 * sum_key_frame_size_mismatch_ /
+              num_key_frames_;
+      printf(" Number of Key frames: %d \n"
+             " Key frame rate mismatch: %d \n",
+             num_key_frames_, perc_key_frame_size_mismatch);
+      EXPECT_LE(perc_key_frame_size_mismatch, max_key_frame_size_mismatch);
+    }
+    printf("\n");
+    printf("Rates statistics for Layer data \n");
+    for (int i = 0; i < num_temporal_layers_ ; i++) {
+      printf("Layer #%d \n", i);
+      int perc_frame_size_mismatch = 100 * sum_frame_size_mismatch_[i] /
+        num_frames_per_update_[i];
+      int perc_encoding_rate_mismatch = 100 * fabs(encoding_bitrate_[i] -
+                                                   bit_rate_layer_[i]) /
+                                                   bit_rate_layer_[i];
+      printf(" Target Layer Bit rate: %f \n"
+          " Layer frame rate: %f, \n"
+          " Layer per frame bandwidth: %f, \n"
+          " Layer Encoding bit rate: %f, \n"
+          " Layer Percent frame size mismatch: %d,  \n"
+          " Layer Percent encoding rate mismatch = %d, \n"
+          " Number of frame processed per layer = %d \n",
+          bit_rate_layer_[i], frame_rate_layer_[i], per_frame_bandwidth_[i],
+          encoding_bitrate_[i], perc_frame_size_mismatch,
+          perc_encoding_rate_mismatch, num_frames_per_update_[i]);
+      EXPECT_LE(perc_frame_size_mismatch, max_delta_frame_size_mismatch);
+      EXPECT_LE(perc_encoding_rate_mismatch, max_encoding_rate_mismatch);
+    }
+    printf("\n");
+    EXPECT_LE(num_frames_to_hit_target_, max_time_hit_target);
+    EXPECT_LE(num_dropped_frames, max_num_dropped_frames);
+    // Only if the spatial resizer is on in the codec wrapper do we expect to
+    // get |num_spatial_resizes| resizes, otherwise we should not get any.
+    EXPECT_TRUE(num_resize_actions == 0 ||
+                num_resize_actions == num_spatial_resizes);
+  }
+
+  // Layer index corresponding to frame number, for up to 3 layers.
+  void LayerIndexForFrame(int frame_number) {
+    if (num_temporal_layers_ == 1) {
+      layer_ = 0;
+    } else if (num_temporal_layers_ == 2) {
+        // layer 0:  0     2     4 ...
+        // layer 1:     1     3
+        if (frame_number % 2 == 0) {
+          layer_ = 0;
+        } else {
+          layer_ = 1;
+        }
+    } else if (num_temporal_layers_ == 3) {
+      // layer 0:  0            4            8 ...
+      // layer 1:        2            6
+      // layer 2:     1      3     5      7
+      if (frame_number % 4 == 0) {
+        layer_ = 0;
+      } else if ((frame_number + 2) % 4 == 0) {
+        layer_ = 1;
+      } else if ((frame_number + 1) % 2 == 0) {
+        layer_ = 2;
+      }
+    } else {
+      assert(false);  // Only up to 3 layers.
+    }
+  }
+
+  // Set the bitrate and frame rate per layer, for up to 3 layers.
+  void SetLayerRates() {
+    assert(num_temporal_layers_<= 3);
+    for (int i = 0; i < num_temporal_layers_; i++) {
+      float bit_rate_ratio =
+          kVp8LayerRateAlloction[num_temporal_layers_ - 1][i];
+      if (i > 0) {
+        float bit_rate_delta_ratio = kVp8LayerRateAlloction
+            [num_temporal_layers_ - 1][i] -
+            kVp8LayerRateAlloction[num_temporal_layers_ - 1][i - 1];
+        bit_rate_layer_[i] = bit_rate_ * bit_rate_delta_ratio;
+      } else {
+        bit_rate_layer_[i] = bit_rate_ * bit_rate_ratio;
+      }
+      frame_rate_layer_[i] = frame_rate_ / static_cast<float>(
+          1 << (num_temporal_layers_ - 1));
+    }
+    if (num_temporal_layers_ == 3) {
+      frame_rate_layer_[2] = frame_rate_ / 2.0f;
+    }
+  }
+
+  VideoFrameType FrameType(int frame_number) {
+    if (frame_number == 0 || ((frame_number) % key_frame_interval_ == 0 &&
+        key_frame_interval_ > 0)) {
+      return kKeyFrame;
+    } else {
+      return kDeltaFrame;
+    }
+  }
+
+  void TearDown() {
+    delete processor_;
+    delete packet_manipulator_;
+    delete frame_writer_;
+    delete frame_reader_;
+    delete decoder_;
+    delete encoder_;
+  }
+
+  // Processes all frames in the clip and verifies the result.
+  void ProcessFramesAndVerify(QualityMetrics quality_metrics,
+                              RateProfile rate_profile,
+                              CodecConfigPars process,
+                              RateControlMetrics* rc_metrics) {
+    // Codec/config settings.
+    start_bitrate_ = rate_profile.target_bit_rate[0];
+    packet_loss_ = process.packet_loss;
+    key_frame_interval_ = process.key_frame_interval;
+    num_temporal_layers_ = process.num_temporal_layers;
+    error_concealment_on_ = process.error_concealment_on;
+    denoising_on_ = process.denoising_on;
+    SetUpCodecConfig();
+    // Update the layers and the codec with the initial rates.
+    bit_rate_ =  rate_profile.target_bit_rate[0];
+    frame_rate_ = rate_profile.input_frame_rate[0];
+    SetLayerRates();
+    // Set the initial target size for key frame.
+    target_size_key_frame_initial_ = 0.5 * kInitialBufferSize *
+        bit_rate_layer_[0];
+    processor_->SetRates(bit_rate_, frame_rate_);
+    // Process each frame, up to |num_frames|.
+    int num_frames = rate_profile.num_frames;
+    int update_index = 0;
+    ResetRateControlMetrics(
+        rate_profile.frame_index_rate_update[update_index + 1]);
+    int frame_number = 0;
+    VideoFrameType frame_type = kDeltaFrame;
+    while (processor_->ProcessFrame(frame_number) &&
+        frame_number < num_frames) {
+      // Get the layer index for the frame |frame_number|.
+      LayerIndexForFrame(frame_number);
+      frame_type = FrameType(frame_number);
+      // Counter for whole sequence run.
+      ++frame_number;
+      // Counters for each rate update.
+      ++num_frames_per_update_[layer_];
+      ++num_frames_total_;
+      UpdateRateControlMetrics(frame_number, frame_type);
+      // If we hit another/next update, verify stats for current state and
+      // update layers and codec with new rates.
+      if (frame_number ==
+          rate_profile.frame_index_rate_update[update_index + 1]) {
+        VerifyRateControl(
+            update_index,
+            rc_metrics[update_index].max_key_frame_size_mismatch,
+            rc_metrics[update_index].max_delta_frame_size_mismatch,
+            rc_metrics[update_index].max_encoding_rate_mismatch,
+            rc_metrics[update_index].max_time_hit_target,
+            rc_metrics[update_index].max_num_dropped_frames,
+            rc_metrics[update_index].num_spatial_resizes);
+        // Update layer rates and the codec with new rates.
+        ++update_index;
+        bit_rate_ =  rate_profile.target_bit_rate[update_index];
+        frame_rate_ = rate_profile.input_frame_rate[update_index];
+        SetLayerRates();
+        ResetRateControlMetrics(rate_profile.
+                                frame_index_rate_update[update_index + 1]);
+        processor_->SetRates(bit_rate_, frame_rate_);
+      }
+    }
+    VerifyRateControl(
+        update_index,
+        rc_metrics[update_index].max_key_frame_size_mismatch,
+        rc_metrics[update_index].max_delta_frame_size_mismatch,
+        rc_metrics[update_index].max_encoding_rate_mismatch,
+        rc_metrics[update_index].max_time_hit_target,
+        rc_metrics[update_index].max_num_dropped_frames,
+        rc_metrics[update_index].num_spatial_resizes);
+    EXPECT_EQ(num_frames, frame_number);
+    EXPECT_EQ(num_frames + 1, static_cast<int>(stats_.stats_.size()));
+
+    // Release encoder and decoder to make sure they have finished processing:
+    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
+    EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
+    // Close the files before we start using them for SSIM/PSNR calculations.
+    frame_reader_->Close();
+    frame_writer_->Close();
+
+    // TODO(marpan): should compute these quality metrics per SetRates update.
+    webrtc::test::QualityMetricsResult psnr_result, ssim_result;
+    EXPECT_EQ(0, webrtc::test::I420MetricsFromFiles(
+        config_.input_filename.c_str(),
+        config_.output_filename.c_str(),
+        config_.codec_settings->width,
+        config_.codec_settings->height,
+        &psnr_result,
+        &ssim_result));
+    printf("PSNR avg: %f, min: %f    SSIM avg: %f, min: %f\n",
+           psnr_result.average, psnr_result.min,
+           ssim_result.average, ssim_result.min);
+    stats_.PrintSummary();
+    EXPECT_GT(psnr_result.average, quality_metrics.minimum_avg_psnr);
+    EXPECT_GT(psnr_result.min, quality_metrics.minimum_min_psnr);
+    EXPECT_GT(ssim_result.average, quality_metrics.minimum_avg_ssim);
+    EXPECT_GT(ssim_result.min, quality_metrics.minimum_min_ssim);
+  }
+};
+
+void SetRateProfilePars(RateProfile* rate_profile,
+                        int update_index,
+                        int bit_rate,
+                        int frame_rate,
+                        int frame_index_rate_update) {
+  rate_profile->target_bit_rate[update_index] = bit_rate;
+  rate_profile->input_frame_rate[update_index] = frame_rate;
+  rate_profile->frame_index_rate_update[update_index] = frame_index_rate_update;
+}
+
+void SetCodecParameters(CodecConfigPars* process_settings,
+                        float packet_loss,
+                        int key_frame_interval,
+                        int num_temporal_layers,
+                        bool error_concealment_on,
+                        bool denoising_on) {
+  process_settings->packet_loss = packet_loss;
+  process_settings->key_frame_interval =  key_frame_interval;
+  process_settings->num_temporal_layers = num_temporal_layers,
+  process_settings->error_concealment_on = error_concealment_on;
+  process_settings->denoising_on = denoising_on;
+}
+
+void SetQualityMetrics(QualityMetrics* quality_metrics,
+                       double minimum_avg_psnr,
+                       double minimum_min_psnr,
+                       double minimum_avg_ssim,
+                       double minimum_min_ssim) {
+  quality_metrics->minimum_avg_psnr = minimum_avg_psnr;
+  quality_metrics->minimum_min_psnr = minimum_min_psnr;
+  quality_metrics->minimum_avg_ssim = minimum_avg_ssim;
+  quality_metrics->minimum_min_ssim = minimum_min_ssim;
+}
+
+void SetRateControlMetrics(RateControlMetrics* rc_metrics,
+                           int update_index,
+                           int max_num_dropped_frames,
+                           int max_key_frame_size_mismatch,
+                           int max_delta_frame_size_mismatch,
+                           int max_encoding_rate_mismatch,
+                           int max_time_hit_target,
+                           int num_spatial_resizes) {
+  rc_metrics[update_index].max_num_dropped_frames = max_num_dropped_frames;
+  rc_metrics[update_index].max_key_frame_size_mismatch =
+      max_key_frame_size_mismatch;
+  rc_metrics[update_index].max_delta_frame_size_mismatch =
+      max_delta_frame_size_mismatch;
+  rc_metrics[update_index].max_encoding_rate_mismatch =
+      max_encoding_rate_mismatch;
+  rc_metrics[update_index].max_time_hit_target = max_time_hit_target;
+  rc_metrics[update_index].num_spatial_resizes = num_spatial_resizes;
+}
+
+// Run with no packet loss and fixed bitrate. Quality should be very high.
+// One key frame (first frame only) in sequence. Setting |key_frame_interval|
+// to -1 below means no periodic key frames in test.
+TEST_F(VideoProcessorIntegrationTest, ProcessZeroPacketLoss) {
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
+  rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
+  rate_profile.num_frames = kNbrFramesShort;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 37.0, 33.0, 0.90, 0.90);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[1];
+  SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with 5% packet loss and fixed bitrate. Quality should be a bit lower.
+// One key frame (first frame only) in sequence.
+TEST_F(VideoProcessorIntegrationTest, Process5PercentPacketLoss) {
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
+  rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
+  rate_profile.num_frames = kNbrFramesShort;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.05f, -1, 1, true, true);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 21.0, 16.0, 0.60, 0.40);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[1];
+  SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with 10% packet loss and fixed bitrate. Quality should be even lower.
+// One key frame (first frame only) in sequence.
+TEST_F(VideoProcessorIntegrationTest, Process10PercentPacketLoss) {
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 500, 30, 0);
+  rate_profile.frame_index_rate_update[1] = kNbrFramesShort + 1;
+  rate_profile.num_frames = kNbrFramesShort;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.1f, -1, 1, true, true);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 19.0, 16.0, 0.50, 0.35);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[1];
+  SetRateControlMetrics(rc_metrics, 0, 0, 40, 20, 10, 15, 0);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with no packet loss, with varying bitrate (3 rate updates):
+// low to high to medium. Check that quality and encoder response to the new
+// target rate/per-frame bandwidth (for each rate update) is within limits.
+// One key frame (first frame only) in sequence.
+TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeBitRate) {
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
+  SetRateProfilePars(&rate_profile, 1, 800, 30, 100);
+  SetRateProfilePars(&rate_profile, 2, 500, 30, 200);
+  rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
+  rate_profile.num_frames = kNbrFramesLong;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 34.0, 32.0, 0.85, 0.80);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[3];
+  SetRateControlMetrics(rc_metrics, 0, 0, 45, 20, 10, 15, 0);
+  SetRateControlMetrics(rc_metrics, 1, 0, 0, 25, 20, 10, 0);
+  SetRateControlMetrics(rc_metrics, 2, 0, 0, 25, 15, 10, 0);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with no packet loss, with an update (decrease) in frame rate.
+// Lower frame rate means higher per-frame-bandwidth, so easier to encode.
+// At the bitrate in this test, this means better rate control after the
+// update(s) to lower frame rate. So expect less frame drops, and max values
+// for the rate control metrics can be lower. One key frame (first frame only).
+// Note: quality after update should be higher but we currently compute quality
+// metrics avergaed over whole sequence run.
+TEST_F(VideoProcessorIntegrationTest, ProcessNoLossChangeFrameRateFrameDrop) {
+  config_.networking_config.packet_loss_probability = 0;
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 80, 30, 0);
+  SetRateProfilePars(&rate_profile, 1, 80, 15, 100);
+  SetRateProfilePars(&rate_profile, 2, 80, 10, 200);
+  rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
+  rate_profile.num_frames = kNbrFramesLong;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.0f, -1, 1, true, true);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 31.0, 23.0, 0.80, 0.65);
+  quality_metrics.minimum_avg_psnr = 31;
+  quality_metrics.minimum_min_psnr = 23;
+  quality_metrics.minimum_avg_ssim = 0.8;
+  quality_metrics.minimum_min_ssim = 0.65;
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[3];
+  SetRateControlMetrics(rc_metrics, 0, 40, 20, 75, 15, 60, 0);
+  SetRateControlMetrics(rc_metrics, 1, 10, 0, 25, 10, 35, 0);
+  SetRateControlMetrics(rc_metrics, 2, 0, 0, 20, 10, 10, 0);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with no packet loss, at low bitrate, then increase rate somewhat.
+// Key frame is thrown in every 120 frames. Can expect some frame drops after
+// key frame, even at high rate. If resizer is on, expect spatial resize down
+// at first key frame, and back up at second key frame. Expected values for
+// quality and rate control in this test are such that the test should pass
+// with resizing on or off. Error_concealment is off in this test since there
+// is a memory leak with resizing and error concealment.
+TEST_F(VideoProcessorIntegrationTest, ProcessNoLossSpatialResizeFrameDrop) {
+  config_.networking_config.packet_loss_probability = 0;
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 80, 30, 0);
+  SetRateProfilePars(&rate_profile, 1, 200, 30, 120);
+  SetRateProfilePars(&rate_profile, 2, 200, 30, 240);
+  rate_profile.frame_index_rate_update[3] = kNbrFramesLong + 1;
+  rate_profile.num_frames = kNbrFramesLong;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.0f, 120, 1, false, true);
+  // Metrics for expected quality.: lower quality on average from up-sampling
+  // the down-sampled portion of the run, in case resizer is on.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 29.0, 20.0, 0.75, 0.60);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[3];
+  SetRateControlMetrics(rc_metrics, 0, 45, 20, 75, 20, 70, 0);
+  SetRateControlMetrics(rc_metrics, 1, 20, 35, 30, 20, 15, 1);
+  // TODO(marpan): Lower this mismatch value for key frame when we upgrade to
+  // new libvpx: currently there is bug in the QP selection.
+  SetRateControlMetrics(rc_metrics, 2, 0, 110, 30, 15, 25, 1);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+
+// Run with no packet loss, with 3 temporal layers, with a rate update in the
+// middle of the sequence. The max values for the frame size mismatch and
+// encoding rate mismatch are applied to each layer.
+// No dropped frames in this test, and the denoiser is off for temporal layers.
+// One key frame (first frame only) in sequence, so no spatial resizing.
+TEST_F(VideoProcessorIntegrationTest, ProcessNoLossTemporalLayers) {
+  config_.networking_config.packet_loss_probability = 0;
+  // Bitrate and frame rate profile.
+  RateProfile rate_profile;
+  SetRateProfilePars(&rate_profile, 0, 200, 30, 0);
+  SetRateProfilePars(&rate_profile, 1, 400, 30, 150);
+  rate_profile.frame_index_rate_update[2] = kNbrFramesLong + 1;
+  rate_profile.num_frames = kNbrFramesLong;
+  // Codec/network settings.
+  CodecConfigPars process_settings;
+  SetCodecParameters(&process_settings, 0.0f, -1, 3, true, false);
+  // Metrics for expected quality.
+  QualityMetrics quality_metrics;
+  SetQualityMetrics(&quality_metrics, 32.5, 30.0, 0.85, 0.80);
+  // Metrics for rate control.
+  RateControlMetrics rc_metrics[2];
+  SetRateControlMetrics(rc_metrics, 0, 0, 20, 30, 10, 10, 0);
+  SetRateControlMetrics(rc_metrics, 1, 0, 0, 30, 15, 10, 1);
+  ProcessFramesAndVerify(quality_metrics,
+                         rate_profile,
+                         process_settings,
+                         rc_metrics);
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc b/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc
new file mode 100644
index 0000000..d51ef6b
--- /dev/null
+++ b/src/modules/video_coding/codecs/test/videoprocessor_unittest.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "gmock/gmock.h"
+#include "modules/video_coding/codecs/test/mock/mock_packet_manipulator.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "testsupport/mock/mock_frame_reader.h"
+#include "testsupport/mock/mock_frame_writer.h"
+#include "testsupport/packet_reader.h"
+#include "testsupport/unittest_utils.h"
+#include "typedefs.h"
+
+using ::testing::_;
+using ::testing::AtLeast;
+using ::testing::Return;
+
+namespace webrtc {
+namespace test {
+
+// Very basic testing for VideoProcessor. It's mostly tested by running the
+// video_quality_measurement program.
+class VideoProcessorTest: public testing::Test {
+ protected:
+  MockVideoEncoder encoder_mock_;
+  MockVideoDecoder decoder_mock_;
+  MockFrameReader frame_reader_mock_;
+  MockFrameWriter frame_writer_mock_;
+  MockPacketManipulator packet_manipulator_mock_;
+  Stats stats_;
+  TestConfig config_;
+  VideoCodec codec_settings_;
+
+  VideoProcessorTest() {}
+  virtual ~VideoProcessorTest() {}
+  void SetUp() {
+    // Get a codec configuration struct and configure it.
+    VideoCodingModule::Codec(kVideoCodecVP8, &codec_settings_);
+    config_.codec_settings = &codec_settings_;
+    config_.codec_settings->startBitrate = 100;
+    config_.codec_settings->width = 352;
+    config_.codec_settings->height = 288;
+  }
+  void TearDown() {}
+
+  void ExpectInit() {
+    EXPECT_CALL(encoder_mock_, InitEncode(_, _, _))
+      .Times(1);
+    EXPECT_CALL(encoder_mock_, RegisterEncodeCompleteCallback(_))
+      .Times(AtLeast(1));
+    EXPECT_CALL(decoder_mock_, InitDecode(_, _))
+      .Times(1);
+    EXPECT_CALL(decoder_mock_, RegisterDecodeCompleteCallback(_))
+      .Times(AtLeast(1));
+    EXPECT_CALL(frame_reader_mock_, NumberOfFrames())
+      .WillOnce(Return(1));
+    EXPECT_CALL(frame_reader_mock_, FrameLength())
+      .WillOnce(Return(150000));
+  }
+};
+
+TEST_F(VideoProcessorTest, Init) {
+  ExpectInit();
+  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
+                                     &frame_reader_mock_,
+                                     &frame_writer_mock_,
+                                     &packet_manipulator_mock_, config_,
+                                     &stats_);
+  ASSERT_TRUE(video_processor.Init());
+}
+
+TEST_F(VideoProcessorTest, ProcessFrame) {
+  ExpectInit();
+  EXPECT_CALL(encoder_mock_, Encode(_, _, _))
+    .Times(1);
+  EXPECT_CALL(frame_reader_mock_, ReadFrame(_))
+    .WillOnce(Return(true));
+  // Since we don't return any callback from the mock, the decoder will not
+  // be more than initialized...
+  VideoProcessorImpl video_processor(&encoder_mock_, &decoder_mock_,
+                                     &frame_reader_mock_,
+                                     &frame_writer_mock_,
+                                     &packet_manipulator_mock_, config_,
+                                     &stats_);
+  ASSERT_TRUE(video_processor.Init());
+  video_processor.ProcessFrame(0);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/test_framework/benchmark.cc b/src/modules/video_coding/codecs/test_framework/benchmark.cc
new file mode 100644
index 0000000..e3048ae
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/benchmark.cc
@@ -0,0 +1,310 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark.h"
+
+#include <cassert>
+#include <iostream>
+#include <sstream>
+#include <vector>
+#if defined(_WIN32)
+    #include <windows.h>
+#endif
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "modules/video_coding/codecs/test_framework/video_source.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+
+#define SSIM_CALC 0 // by default, don't compute SSIM
+
+using namespace webrtc;
+
+Benchmark::Benchmark()
+:
+NormalAsyncTest("Benchmark", "Codec benchmark over a range of test cases", 6),
+_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
+_codecName("Default")
+{
+}
+
+Benchmark::Benchmark(std::string name, std::string description)
+:
+NormalAsyncTest(name, description, 6),
+_resultsFileName(webrtc::test::OutputPath() + "benchmark.txt"),
+_codecName("Default")
+{
+}
+
+Benchmark::Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName)
+:
+NormalAsyncTest(name, description, 6),
+_resultsFileName(resultsFileName),
+_codecName(codecName)
+{
+}
+
+void
+Benchmark::Perform()
+{
+    std::vector<const VideoSource*> sources;
+    std::vector<const VideoSource*>::iterator it;
+
+    // Configuration --------------------------
+    sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
+                                            "resources/foreman_cif.yuv", kCIF));
+//    sources.push_back(new const VideoSource(webrtc::test::ProjectRootPath() +
+//                                            "resources/akiyo_cif.yuv", kCIF));
+
+    const VideoSize size[] = {kQCIF, kCIF};
+    const int frameRate[] = {10, 15, 30};
+    // Specifies the framerates for which to perform a speed test.
+    const bool speedTestMask[] = {false, false, false};
+    const int bitRate[] = {50, 100, 200, 300, 400, 500, 600, 1000};
+    // Determines the number of iterations to perform to arrive at the speed result.
+    enum { kSpeedTestIterations = 10 };
+    // ----------------------------------------
+
+    const int nFrameRates = sizeof(frameRate)/sizeof(*frameRate);
+    assert(sizeof(speedTestMask)/sizeof(*speedTestMask) == nFrameRates);
+    const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+    int testIterations = 10;
+
+    webrtc::test::QualityMetricsResult psnr[nBitrates];
+    webrtc::test::QualityMetricsResult ssim[nBitrates];
+    double fps[nBitrates];
+    double totalEncodeTime[nBitrates];
+    double totalDecodeTime[nBitrates];
+
+    _results.open(_resultsFileName.c_str(), std::fstream::out);
+    _results << GetMagicStr() << std::endl;
+    _results << _codecName << std::endl;
+
+    for (it = sources.begin() ; it < sources.end(); it++)
+    {
+        for (int i = 0; i < static_cast<int>(sizeof(size)/sizeof(*size)); i++)
+        {
+            for (int j = 0; j < nFrameRates; j++)
+            {
+                std::stringstream ss;
+                std::string strFrameRate;
+                std::string outFileName;
+                ss << frameRate[j];
+                ss >> strFrameRate;
+                outFileName = (*it)->GetFilePath() + "/" + (*it)->GetName() + "_" +
+                    VideoSource::GetSizeString(size[i]) + "_" + strFrameRate + ".yuv";
+
+                _target = new const VideoSource(outFileName, size[i], frameRate[j]);
+                (*it)->Convert(*_target);
+                if (VideoSource::FileExists(outFileName.c_str()))
+                {
+                    _inname = outFileName;
+                }
+                else
+                {
+                    _inname = (*it)->GetFileName();
+                }
+
+                std::cout << (*it)->GetName() << ", " << VideoSource::GetSizeString(size[i])
+                    << ", " << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]:";
+                _results << (*it)->GetName() << "," << VideoSource::GetSizeString(size[i])
+                    << "," << frameRate[j] << " fps" << std::endl << "Bitrate [kbps]";
+
+                if (speedTestMask[j])
+                {
+                    testIterations = kSpeedTestIterations;
+                }
+                else
+                {
+                    testIterations = 1;
+                }
+
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    _bitRate = (bitRate[k]);
+                    double avgFps = 0.0;
+                    totalEncodeTime[k] = 0;
+                    totalDecodeTime[k] = 0;
+
+                    for (int l = 0; l < testIterations; l++)
+                    {
+                        PerformNormalTest();
+                        _appendNext = false;
+
+                        avgFps += _framecnt / (_totalEncodeTime + _totalDecodeTime);
+                        totalEncodeTime[k] += _totalEncodeTime;
+                        totalDecodeTime[k] += _totalDecodeTime;
+
+                    }
+                    avgFps /= testIterations;
+                    totalEncodeTime[k] /= testIterations;
+                    totalDecodeTime[k] /= testIterations;
+
+                    double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
+                    std::cout << " " << actualBitRate;
+                    _results << "," << actualBitRate;
+                    webrtc::test::QualityMetricsResult psnr_result;
+                    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(),
+                                      _inst.width, _inst.height, &psnr[k]);
+                    if (SSIM_CALC)
+                    {
+                        webrtc::test::QualityMetricsResult ssim_result;
+                        I420SSIMFromFiles(_inname.c_str(), _outname.c_str(),
+                                          _inst.width, _inst.height, &ssim[k]);
+
+                    }
+                    fps[k] = avgFps;
+                }
+                std::cout << std::endl << "Y-PSNR [dB]:";
+                _results << std::endl << "Y-PSNR [dB]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << psnr[k].average;
+                    _results << "," << psnr[k].average;
+
+                }
+                if (SSIM_CALC)
+                {
+                    std::cout << std::endl << "SSIM: ";
+                    _results << std::endl << "SSIM ";
+                    for (int k = 0; k < nBitrates; k++)
+                    {
+                        std::cout << " " << ssim[k].average;
+                        _results << "," << ssim[k].average;
+                    }
+
+                }
+
+                std::cout << std::endl << "Encode Time[ms]:";
+                _results << std::endl << "Encode Time[ms]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << totalEncodeTime[k];
+                    _results << "," << totalEncodeTime[k];
+
+                }
+
+                std::cout << std::endl << "Decode Time[ms]:";
+                _results << std::endl << "Decode Time[ms]";
+                for (int k = 0; k < nBitrates; k++)
+                {
+                    std::cout << " " << totalDecodeTime[k];
+                    _results << "," << totalDecodeTime[k];
+
+                }
+
+                if (speedTestMask[j])
+                {
+                    std::cout << std::endl << "Speed [fps]:";
+                    _results << std::endl << "Speed [fps]";
+                    for (int k = 0; k < nBitrates; k++)
+                    {
+                        std::cout << " " << static_cast<int>(fps[k] + 0.5);
+                        _results << "," << static_cast<int>(fps[k] + 0.5);
+                    }
+                }
+                std::cout << std::endl << std::endl;
+                _results << std::endl << std::endl;
+
+                delete _target;
+            }
+        }
+        delete *it;
+    }
+    _results.close();
+}
+
+void
+Benchmark::PerformNormalTest()
+{
+    _encoder = GetNewEncoder();
+    _decoder = GetNewDecoder();
+    CodecSettings(_target->GetWidth(), _target->GetHeight(), _target->GetFrameRate(), _bitRate);
+    Setup();
+    EventWrapper* waitEvent = EventWrapper::Create();
+
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _encoder->InitEncode(&_inst, 4, 1440);
+    CodecSpecific_InitBitrate();
+    _decoder->InitDecode(&_inst,1);
+
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+
+    SetCodecSpecificParameters();
+
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    while (!complete)
+    {
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode = static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                DoPacketLoss();
+                int ret = Decode();
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr, "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+        waitEvent->Wait(5);
+    }
+
+    _inputVideoBuffer.Free();
+    //_encodedVideoBuffer.Reset(); ?
+    _encodedVideoBuffer.Free();
+    _decodedVideoBuffer.Free();
+
+    _encoder->Release();
+    _decoder->Release();
+    delete waitEvent;
+    delete _encoder;
+    delete _decoder;
+    Teardown();
+}
+
+void
+Benchmark::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }
+    else
+    {
+        _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
diff --git a/src/modules/video_coding/codecs/test_framework/benchmark.h b/src/modules/video_coding/codecs/test_framework/benchmark.h
new file mode 100644
index 0000000..57806e5
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/benchmark.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+
+#include "normal_async_test.h"
+
+class VideoSource;
+
+class Benchmark : public NormalAsyncTest
+{
+public:
+    Benchmark();
+    virtual void Perform();
+
+protected:
+    Benchmark(std::string name, std::string description);
+    Benchmark(std::string name, std::string description, std::string resultsFileName, std::string codecName);
+    virtual webrtc::VideoEncoder* GetNewEncoder() = 0;
+    virtual webrtc::VideoDecoder* GetNewDecoder() = 0;
+    virtual void PerformNormalTest();
+    virtual void CodecSpecific_InitBitrate();
+    static const char* GetMagicStr() { return "#!benchmark1.0"; }
+
+    const VideoSource* _target;
+    std::string        _resultsFileName;
+    std::ofstream      _results;
+    std::string        _codecName;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_BENCHMARK_H_
+
diff --git a/src/modules/video_coding/codecs/test_framework/exportfig.m b/src/modules/video_coding/codecs/test_framework/exportfig.m
new file mode 100644
index 0000000..d0d5ed9
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/exportfig.m
@@ -0,0 +1,500 @@
+function exportfig(varargin)
+%EXPORTFIG  Export a figure to Encapsulated Postscript.
+%   EXPORTFIG(H, FILENAME) writes the figure H to FILENAME.  H is
+%   a figure handle and FILENAME is a string that specifies the
+%   name of the output file.
+%
+%   EXPORTFIG(...,PARAM1,VAL1,PARAM2,VAL2,...) specifies
+%   parameters that control various characteristics of the output
+%   file.
+%
+%   Format Paramter:
+%     'Format'  one of the strings 'eps','eps2','jpeg','png','preview'
+%          specifies the output format. Defaults to 'eps'.
+%          The output format 'preview' does not generate an output
+%          file but instead creates a new figure window with a
+%          preview of the exported figure. In this case the
+%          FILENAME parameter is ignored.
+%
+%     'Preview' one of the strings 'none', 'tiff'
+%          specifies a preview for EPS files. Defaults to 'none'.
+%
+%   Size Parameters:
+%     'Width'   a positive scalar
+%          specifies the width in the figure's PaperUnits
+%     'Height'  a positive scalar
+%          specifies the height in the figure's PaperUnits
+%
+%     Specifying only one dimension sets the other dimension
+%     so that the exported aspect ratio is the same as the
+%     figure's current aspect ratio. 
+%     If neither dimension is specified the size defaults to 
+%     the width and height from the figure's PaperPosition. 
+%           
+%   Rendering Parameters:
+%     'Color'     one of the strings 'bw', 'gray', 'cmyk'
+%         'bw'    specifies that lines and text are exported in
+%                 black and all other objects in grayscale
+%         'gray'  specifies that all objects are exported in grayscale
+%         'cmyk'  specifies that all objects are exported in color
+%                 using the CMYK color space
+%     'Renderer'  one of the strings 'painters', 'zbuffer', 'opengl'
+%         specifies the renderer to use
+%     'Resolution'   a positive scalar
+%         specifies the resolution in dots-per-inch.
+%     
+%     The default color setting is 'bw'.
+%
+%   Font Parameters:
+%     'FontMode'     one of the strings 'scaled', 'fixed'
+%     'FontSize'     a positive scalar
+%          in 'scaled' mode multiplies with the font size of each
+%          text object to obtain the exported font size
+%          in 'fixed' mode specifies the font size of all text
+%          objects in points
+%     'FontEncoding' one of the strings 'latin1', 'adobe'
+%          specifies the character encoding of the font
+%
+%     If FontMode is 'scaled' but FontSize is not specified then a
+%     scaling factor is computed from the ratio of the size of the
+%     exported figure to the size of the actual figure. The minimum
+%     font size allowed after scaling is 5 points.
+%     If FontMode is 'fixed' but FontSize is not specified then the
+%     exported font sizes of all text objects is 7 points.
+%
+%     The default 'FontMode' setting is 'scaled'.
+%
+%   Line Width Parameters:
+%     'LineMode'     one of the strings 'scaled', 'fixed'
+%     'LineWidth'    a positive scalar
+%          the semantics of LineMode and LineWidth are exactly the
+%          same as FontMode and FontSize, except that they apply
+%          to line widths instead of font sizes. The minumum line
+%          width allowed after scaling is 0.5 points.
+%          If LineMode is 'fixed' but LineWidth is not specified 
+%          then the exported line width of all line objects is 1
+%          point. 
+%
+%   Examples:
+%     exportfig(gcf,'fig1.eps','height',3);
+%       Exports the current figure to the file named 'fig1.eps' with
+%       a height of 3 inches (assuming the figure's PaperUnits is 
+%       inches) and an aspect ratio the same as the figure's aspect
+%       ratio on screen.
+%
+%     exportfig(gcf, 'fig2.eps', 'FontMode', 'fixed',...
+%                'FontSize', 10, 'color', 'cmyk' );
+%       Exports the current figure to 'fig2.eps' in color with all
+%       text in 10 point fonts. The size of the exported figure is
+%       the figure's PaperPostion width and height.
+
+
+if (nargin < 2)
+  error('Too few input arguments');
+end
+
+% exportfig(H, filename, ...)
+H = varargin{1};
+if ~ishandle(H) | ~strcmp(get(H,'type'), 'figure')
+  error('First argument must be a handle to a figure.');
+end
+filename = varargin{2};
+if ~ischar(filename)
+  error('Second argument must be a string.');
+end
+paramPairs = varargin(3:end);
+
+% Do some validity checking on param-value pairs
+if (rem(length(paramPairs),2) ~= 0)
+  error(['Invalid input syntax. Optional parameters and values' ...
+	 ' must be in pairs.']);
+end
+
+format = 'eps';
+preview = 'none';
+width = -1;
+height = -1;
+color = 'bw';
+fontsize = -1;
+fontmode='scaled';
+linewidth = -1;
+linemode=[];
+fontencoding = 'latin1';
+renderer = [];
+resolution = [];
+
+% Process param-value pairs
+args = {};
+for k = 1:2:length(paramPairs)
+  param = lower(paramPairs{k});
+  if (~ischar(param))
+    error('Optional parameter names must be strings');
+  end
+  value = paramPairs{k+1};
+  
+  switch (param)
+   case 'format'
+    format = value;
+    if (~strcmp(format,{'eps','eps2','jpeg','png','preview'}))
+      error(['Format must be ''eps'', ''eps2'', ''jpeg'', ''png'' or' ...
+	     ' ''preview''.']);
+    end
+   case 'preview'
+    preview = value;
+    if (~strcmp(preview,{'none','tiff'}))
+      error('Preview must be ''none'' or ''tiff''.');
+    end
+   case 'width'
+    width = LocalToNum(value);
+    if(~LocalIsPositiveScalar(width))
+      error('Width must be a numeric scalar > 0');
+    end
+   case 'height'
+    height = LocalToNum(value);
+    if(~LocalIsPositiveScalar(height))
+      error('Height must be a numeric scalar > 0');
+    end
+   case 'color'
+    color = lower(value);
+    if (~strcmp(color,{'bw','gray','cmyk'}))
+      error('Color must be ''bw'', ''gray'' or ''cmyk''.');
+    end
+   case 'fontmode'
+    fontmode = lower(value);
+    if (~strcmp(fontmode,{'scaled','fixed'}))
+      error('FontMode must be ''scaled'' or ''fixed''.');
+    end
+   case 'fontsize'
+    fontsize = LocalToNum(value);
+    if(~LocalIsPositiveScalar(fontsize))
+      error('FontSize must be a numeric scalar > 0');
+    end
+   case 'fontencoding'
+    fontencoding = lower(value);
+    if (~strcmp(fontencoding,{'latin1','adobe'}))
+      error('FontEncoding must be ''latin1'' or ''adobe''.');
+    end
+   case 'linemode'
+    linemode = lower(value);
+    if (~strcmp(linemode,{'scaled','fixed'}))
+      error('LineMode must be ''scaled'' or ''fixed''.');
+    end
+   case 'linewidth'
+    linewidth = LocalToNum(value);
+    if(~LocalIsPositiveScalar(linewidth))
+      error('LineWidth must be a numeric scalar > 0');
+    end
+   case 'renderer'
+    renderer = lower(value);
+    if (~strcmp(renderer,{'painters','zbuffer','opengl'}))
+      error('Renderer must be ''painters'', ''zbuffer'' or ''opengl''.');
+    end
+   case 'resolution'
+    resolution = LocalToNum(value);
+    if ~(isnumeric(value) & (prod(size(value)) == 1) & (value >= 0));
+      error('Resolution must be a numeric scalar >= 0');
+    end
+   otherwise
+    error(['Unrecognized option ' param '.']);
+  end
+end
+
+allLines  = findall(H, 'type', 'line');
+allText   = findall(H, 'type', 'text');
+allAxes   = findall(H, 'type', 'axes');
+allImages = findall(H, 'type', 'image');
+allLights = findall(H, 'type', 'light');
+allPatch  = findall(H, 'type', 'patch');
+allSurf   = findall(H, 'type', 'surface');
+allRect   = findall(H, 'type', 'rectangle');
+allFont   = [allText; allAxes];
+allColor  = [allLines; allText; allAxes; allLights];
+allMarker = [allLines; allPatch; allSurf];
+allEdge   = [allPatch; allSurf];
+allCData  = [allImages; allPatch; allSurf];
+
+old.objs = {};
+old.prop = {};
+old.values = {};
+
+% Process format and preview parameter
+showPreview = strcmp(format,'preview');
+if showPreview
+  format = 'png';
+  filename = [tempName '.png'];
+end
+if strncmp(format,'eps',3) & ~strcmp(preview,'none')
+  args = {args{:}, ['-' preview]};
+end
+
+hadError = 0;
+try
+  % Process size parameters
+  paperPos = get(H, 'PaperPosition');
+  old = LocalPushOldData(old, H, 'PaperPosition', paperPos);
+  figureUnits = get(H, 'Units');
+  set(H, 'Units', get(H,'PaperUnits'));
+  figurePos = get(H, 'Position');
+  aspectRatio = figurePos(3)/figurePos(4);
+  set(H, 'Units', figureUnits);
+  if (width == -1) & (height == -1)
+    width = paperPos(3);
+    height = paperPos(4);
+  elseif (width == -1)
+    width = height * aspectRatio;
+  elseif (height == -1)
+    height = width / aspectRatio;
+  end
+  set(H, 'PaperPosition', [0 0 width height]);
+  paperPosMode = get(H, 'PaperPositionMode');
+  old = LocalPushOldData(old, H, 'PaperPositionMode', paperPosMode);
+  set(H, 'PaperPositionMode', 'manual');
+
+  % Process rendering parameters
+  switch (color)
+   case {'bw', 'gray'}
+    if ~strcmp(color,'bw') & strncmp(format,'eps',3)
+      format = [format 'c'];
+    end
+    args = {args{:}, ['-d' format]};
+
+    %compute and set gray colormap
+    oldcmap = get(H,'Colormap');
+    newgrays = 0.30*oldcmap(:,1) + 0.59*oldcmap(:,2) + 0.11*oldcmap(:,3);
+    newcmap = [newgrays newgrays newgrays];
+    old = LocalPushOldData(old, H, 'Colormap', oldcmap);
+    set(H, 'Colormap', newcmap);
+
+    %compute and set ColorSpec and CData properties
+    old = LocalUpdateColors(allColor, 'color', old);
+    old = LocalUpdateColors(allAxes, 'xcolor', old);
+    old = LocalUpdateColors(allAxes, 'ycolor', old);
+    old = LocalUpdateColors(allAxes, 'zcolor', old);
+    old = LocalUpdateColors(allMarker, 'MarkerEdgeColor', old);
+    old = LocalUpdateColors(allMarker, 'MarkerFaceColor', old);
+    old = LocalUpdateColors(allEdge, 'EdgeColor', old);
+    old = LocalUpdateColors(allEdge, 'FaceColor', old);
+    old = LocalUpdateColors(allCData, 'CData', old);
+    
+   case 'cmyk'
+    if strncmp(format,'eps',3)
+      format = [format 'c'];
+      args = {args{:}, ['-d' format], '-cmyk'};
+    else
+      args = {args{:}, ['-d' format]};
+    end
+   otherwise
+    error('Invalid Color parameter');
+  end
+  if (~isempty(renderer))
+    args = {args{:}, ['-' renderer]};
+  end
+  if (~isempty(resolution)) | ~strncmp(format,'eps',3)
+    if isempty(resolution)
+      resolution = 0;
+    end
+    args = {args{:}, ['-r' int2str(resolution)]};
+  end
+
+  % Process font parameters
+  if (~isempty(fontmode))
+    oldfonts = LocalGetAsCell(allFont,'FontSize');
+    switch (fontmode)
+     case 'fixed'
+      oldfontunits = LocalGetAsCell(allFont,'FontUnits');
+      old = LocalPushOldData(old, allFont, {'FontUnits'}, oldfontunits);
+      set(allFont,'FontUnits','points');
+      if (fontsize == -1)
+	set(allFont,'FontSize',7);
+      else
+	set(allFont,'FontSize',fontsize);
+      end
+     case 'scaled'
+      if (fontsize == -1)
+	wscale = width/figurePos(3);
+	hscale = height/figurePos(4);
+	scale = min(wscale, hscale);
+      else
+	scale = fontsize;
+      end
+      newfonts = LocalScale(oldfonts,scale,5);
+      set(allFont,{'FontSize'},newfonts);
+     otherwise
+      error('Invalid FontMode parameter');
+    end
+    % make sure we push the size after the units
+    old = LocalPushOldData(old, allFont, {'FontSize'}, oldfonts);
+  end
+  if strcmp(fontencoding,'adobe') & strncmp(format,'eps',3)
+    args = {args{:}, '-adobecset'};
+  end
+
+  % Process linewidth parameters
+  if (~isempty(linemode))
+    oldlines = LocalGetAsCell(allMarker,'LineWidth');
+    old = LocalPushOldData(old, allMarker, {'LineWidth'}, oldlines);
+    switch (linemode)
+     case 'fixed'
+      if (linewidth == -1)
+	set(allMarker,'LineWidth',1);
+      else
+	set(allMarker,'LineWidth',linewidth);
+      end
+     case 'scaled'
+      if (linewidth == -1)
+	wscale = width/figurePos(3);
+	hscale = height/figurePos(4);
+	scale = min(wscale, hscale);
+      else
+	scale = linewidth;
+      end
+      newlines = LocalScale(oldlines, scale, 0.5);
+      set(allMarker,{'LineWidth'},newlines);
+     otherwise
+      error('Invalid LineMode parameter');
+    end
+  end
+
+  % Export
+  print(H, filename, args{:});
+
+catch
+  hadError = 1;
+end
+
+% Restore figure settings
+for n=1:length(old.objs)
+  set(old.objs{n}, old.prop{n}, old.values{n});
+end
+
+if hadError
+  error(deblank(lasterr));
+end
+
+% Show preview if requested
+if showPreview
+  X = imread(filename,'png');
+  delete(filename);
+  f = figure( 'Name', 'Preview', ...
+	      'Menubar', 'none', ...
+	      'NumberTitle', 'off', ...
+	      'Visible', 'off');
+  image(X);
+  axis image;
+  ax = findobj(f, 'type', 'axes');
+  set(ax, 'Units', get(H,'PaperUnits'), ...
+	  'Position', [0 0 width height], ...
+	  'Visible', 'off');
+  set(ax, 'Units', 'pixels');
+  axesPos = get(ax,'Position');
+  figPos = get(f,'Position');
+  rootSize = get(0,'ScreenSize');
+  figPos(3:4) = axesPos(3:4);
+  if figPos(1) + figPos(3) > rootSize(3)
+    figPos(1) = rootSize(3) - figPos(3) - 50;
+  end
+  if figPos(2) + figPos(4) > rootSize(4)
+    figPos(2) = rootSize(4) - figPos(4) - 50;
+  end
+  set(f, 'Position',figPos, ...
+	 'Visible', 'on');
+end
+
+%
+%  Local Functions
+%
+
+function outData = LocalPushOldData(inData, objs, prop, values)
+outData.objs = {inData.objs{:}, objs};
+outData.prop = {inData.prop{:}, prop};
+outData.values = {inData.values{:}, values};
+
+function cellArray = LocalGetAsCell(fig,prop);
+cellArray = get(fig,prop);
+if (~isempty(cellArray)) & (~iscell(cellArray))
+  cellArray = {cellArray};
+end
+
+function newArray = LocalScale(inArray, scale, minValue)
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  newArray{k} = max(minValue,scale*inArray{k}(1));
+end
+
+function newArray = LocalMapToGray(inArray);
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  color = inArray{k};
+  if (~isempty(color))
+    if ischar(color)
+      switch color(1)
+       case 'y'
+	color = [1 1 0];
+       case 'm'
+	color = [1 0 1];
+       case 'c'
+	color = [0 1 1];
+       case 'r'
+	color = [1 0 0];
+       case 'g'
+	color = [0 1 0];
+       case 'b'
+	color = [0 0 1];
+       case 'w'
+	color = [1 1 1];
+       case 'k'
+	color = [0 0 0];
+       otherwise
+	newArray{k} = color;
+      end
+    end
+    if ~ischar(color)
+      color = 0.30*color(1) + 0.59*color(2) + 0.11*color(3);
+    end
+  end
+  if isempty(color) | ischar(color)
+    newArray{k} = color;
+  else
+    newArray{k} = [color color color];
+  end
+end
+
+function newArray = LocalMapCData(inArray);
+n = length(inArray);
+newArray = cell(n,1);
+for k=1:n
+  color = inArray{k};
+  if (ndims(color) == 3) & isa(color,'double')
+    gray = 0.30*color(:,:,1) + 0.59*color(:,:,2) + 0.11*color(:,:,3);
+    color(:,:,1) = gray;
+    color(:,:,2) = gray;
+    color(:,:,3) = gray;
+  end
+  newArray{k} = color;
+end
+
+function outData = LocalUpdateColors(inArray, prop, inData)
+value = LocalGetAsCell(inArray,prop);
+outData.objs = {inData.objs{:}, inArray};
+outData.prop = {inData.prop{:}, {prop}};
+outData.values = {inData.values{:}, value};
+if (~isempty(value))
+  if strcmp(prop,'CData') 
+    value = LocalMapCData(value);
+  else
+    value = LocalMapToGray(value);
+  end
+  set(inArray,{prop},value);
+end
+
+function bool = LocalIsPositiveScalar(value)
+bool = isnumeric(value) & ...
+       prod(size(value)) == 1 & ...
+       value > 0;
+
+function value = LocalToNum(value)
+if ischar(value)
+  value = str2num(value);
+end
diff --git a/src/modules/video_coding/codecs/test_framework/normal_async_test.cc b/src/modules/video_coding/codecs/test_framework/normal_async_test.cc
new file mode 100644
index 0000000..c9081d9
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/normal_async_test.cc
@@ -0,0 +1,599 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_async_test.h"
+
+#include <assert.h>
+#include <string.h>
+#include <sstream>
+#include <queue>
+
+#include "gtest/gtest.h"
+#include "tick_util.h"
+#include "testsupport/fileutils.h"
+#include "typedefs.h"
+
+using namespace webrtc;
+
+NormalAsyncTest::NormalAsyncTest()
+:
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
+           _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(1),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(WebRtc_UWord32 bitRate)
+:
+NormalTest("Async Normal Test 1", "A test of normal execution of the codec",
+           bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(1),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 unsigned int testNo)
+:
+NormalTest(name, description, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 WebRtc_UWord32 bitRate, unsigned int testNo)
+:
+NormalTest(name, description, bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(0),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+NormalAsyncTest::NormalAsyncTest(std::string name, std::string description,
+                                 WebRtc_UWord32 bitRate, unsigned int testNo,
+                                 unsigned int rttFrames)
+:
+NormalTest(name, description, bitRate, _testNo),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_encFrameCnt(0),
+_decFrameCnt(0),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false),
+_missingFrames(false),
+_rttFrames(rttFrames),
+_hasReceivedSLI(false),
+_hasReceivedRPSI(false),
+_hasReceivedPLI(false),
+_waitForKey(false)
+{
+}
+
+void
+NormalAsyncTest::Setup()
+{
+    CodecTest::Setup();
+    std::stringstream ss;
+    std::string strTestNo;
+    ss << _testNo;
+    ss >> strTestNo;
+
+    // Check if settings exist. Otherwise use defaults.
+    if (_outname == "")
+    {
+        _outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
+            ".yuv";
+    }
+
+    if (_encodedName == "")
+    {
+        _encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
+            strTestNo + ".yuv";
+    }
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    char mode[3] = "wb";
+    if (_appendNext)
+    {
+        strncpy(mode, "ab", 3);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _appendNext = true;
+}
+
+void
+NormalAsyncTest::Teardown()
+{
+    CodecTest::Teardown();
+    fclose(_sourceFile);
+    fclose(_encodedFile);
+    fclose(_decodedFile);
+}
+
+FrameQueueTuple::~FrameQueueTuple()
+{
+    if (_codecSpecificInfo != NULL)
+    {
+        delete _codecSpecificInfo;
+    }
+    if (_frame != NULL)
+    {
+        delete _frame;
+    }
+}
+
+void FrameQueue::PushFrame(TestVideoEncodedBuffer *frame,
+                           webrtc::CodecSpecificInfo* codecSpecificInfo)
+{
+    WriteLockScoped cs(_queueRWLock);
+    _frameBufferQueue.push(new FrameQueueTuple(frame, codecSpecificInfo));
+}
+
+FrameQueueTuple* FrameQueue::PopFrame()
+{
+    WriteLockScoped cs(_queueRWLock);
+    if (_frameBufferQueue.empty())
+    {
+        return NULL;
+    }
+    FrameQueueTuple* tuple = _frameBufferQueue.front();
+    _frameBufferQueue.pop();
+    return tuple;
+}
+
+bool FrameQueue::Empty()
+{
+    ReadLockScoped cs(_queueRWLock);
+    return _frameBufferQueue.empty();
+}
+
+WebRtc_UWord32 VideoEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+WebRtc_Word32
+VideoEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+                                     const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                                     const webrtc::RTPFragmentationHeader*
+                                     fragmentation)
+{
+    _test.Encoded(encodedImage);
+    TestVideoEncodedBuffer *newBuffer = new TestVideoEncodedBuffer();
+    //newBuffer->VerifyAndAllocate(encodedImage._length);
+    newBuffer->VerifyAndAllocate(encodedImage._size);
+    _encodedBytes += encodedImage._length;
+    // If _frameQueue would have been a fixed sized buffer we could have asked
+    // it for an empty frame and then just do:
+    // emptyFrame->SwapBuffers(encodedBuffer);
+    // This is how it should be done in Video Engine to save in on memcpys
+    webrtc::CodecSpecificInfo* codecSpecificInfoCopy =
+        _test.CopyCodecSpecificInfo(codecSpecificInfo);
+    _test.CopyEncodedImage(*newBuffer, encodedImage, codecSpecificInfoCopy);
+    if (_encodedFile != NULL)
+    {
+      if (fwrite(newBuffer->GetBuffer(), 1, newBuffer->GetLength(),
+                 _encodedFile) !=  newBuffer->GetLength()) {
+        return -1;
+      }
+    }
+    _frameQueue->PushFrame(newBuffer, codecSpecificInfoCopy);
+    return 0;
+}
+
+WebRtc_UWord32 VideoDecodeCompleteCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::Decoded(VideoFrame& image)
+{
+    _test.Decoded(image);
+    _decodedBytes += image.Length();
+    if (_decodedFile != NULL)
+    {
+      if (fwrite(image.Buffer(), 1, image.Length(),
+                 _decodedFile) !=  image.Length()) {
+        return -1;
+      }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    return _test.ReceivedDecodedReferenceFrame(pictureId);
+}
+
+WebRtc_Word32
+VideoDecodeCompleteCallback::ReceivedDecodedFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    return _test.ReceivedDecodedFrame(pictureId);
+}
+
+void
+NormalAsyncTest::Encoded(const EncodedImage& encodedImage)
+{
+    _encodeCompleteTime = tGetTime();
+    _encFrameCnt++;
+    _totalEncodePipeTime += _encodeCompleteTime -
+        _encodeTimes[encodedImage._timeStamp];
+}
+
+void
+NormalAsyncTest::Decoded(const VideoFrame& decodedImage)
+{
+    _decodeCompleteTime = tGetTime();
+    _decFrameCnt++;
+    _totalDecodePipeTime += _decodeCompleteTime -
+        _decodeTimes[decodedImage.TimeStamp()];
+    _decodedWidth = decodedImage.Width();
+    _decodedHeight = decodedImage.Height();
+}
+
+void
+NormalAsyncTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    if(_encoder->InitEncode(&_inst, 1, 1440) < 0)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _decoder->InitDecode(&_inst, 1);
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    VideoDecodeCompleteCallback decCallback(_decodedFile, *this);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _decFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    double starttime = tGetTime();
+    while (!complete)
+    {
+        CodecSpecific_InitBitrate();
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode =
+                    static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                int lost = DoPacketLoss();
+                if (lost == 2)
+                {
+                    // Lost the whole frame, continue
+                    _missingFrames = true;
+                    delete _frameToDecode;
+                    _frameToDecode = NULL;
+                    continue;
+                }
+                int ret = Decode(lost);
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr,
+                        "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+    }
+    double endtime = tGetTime();
+    double totalExecutionTime = endtime - starttime;
+    printf("Total execution time: %.1f s\n", totalExecutionTime);
+    _sumEncBytes = encCallback.EncodedBytes();
+    double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _encFrameCnt;
+    double avgDecTime = _totalDecodeTime / _decFrameCnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+    printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+    printf("Average encode pipeline time: %.1f ms\n",
+           1000 * _totalEncodePipeTime / _encFrameCnt);
+    printf("Average decode pipeline  time: %.1f ms\n",
+           1000 * _totalDecodePipeTime / _decFrameCnt);
+    printf("Number of encoded frames: %u\n", _encFrameCnt);
+    printf("Number of decoded frames: %u\n", _decFrameCnt);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+        _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    _encoder->Release();
+    _decoder->Release();
+    Teardown();
+}
+
+bool
+NormalAsyncTest::Encode()
+{
+    _lengthEncFrame = 0;
+    EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.SetTimeStamp((unsigned int)
+        (_encFrameCnt * 9e4 / _inst.maxFramerate));
+    _inputVideoBuffer.SetWidth(_inst.width);
+    _inputVideoBuffer.SetHeight(_inst.height);
+    VideoFrame rawImage;
+    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+    if (feof(_sourceFile) != 0)
+    {
+        return true;
+    }
+    _encodeCompleteTime = 0;
+    _encodeTimes[rawImage.TimeStamp()] = tGetTime();
+    VideoFrameType frameType = kDeltaFrame;
+
+    // check SLI queue
+    _hasReceivedSLI = false;
+    while (!_signalSLI.empty() && _signalSLI.front().delay == 0)
+    {
+        // SLI message has arrived at sender side
+        _hasReceivedSLI = true;
+        _pictureIdSLI = _signalSLI.front().id;
+        _signalSLI.pop_front();
+    }
+    // decrement SLI queue times
+    for (std::list<fbSignal>::iterator it = _signalSLI.begin();
+        it !=_signalSLI.end(); it++)
+    {
+        (*it).delay--;
+    }
+
+    // check PLI queue
+    _hasReceivedPLI = false;
+    while (!_signalPLI.empty() && _signalPLI.front().delay == 0)
+    {
+        // PLI message has arrived at sender side
+        _hasReceivedPLI = true;
+        _signalPLI.pop_front();
+    }
+    // decrement PLI queue times
+    for (std::list<fbSignal>::iterator it = _signalPLI.begin();
+        it != _signalPLI.end(); it++)
+    {
+        (*it).delay--;
+    }
+
+    if (_hasReceivedPLI)
+    {
+        // respond to PLI by encoding a key frame
+        frameType = kKeyFrame;
+        _hasReceivedPLI = false;
+        _hasReceivedSLI = false; // don't trigger both at once
+    }
+
+    webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+    EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
+    if (codecSpecificInfo != NULL)
+    {
+        delete codecSpecificInfo;
+        codecSpecificInfo = NULL;
+    }
+    if (_encodeCompleteTime > 0)
+    {
+        _totalEncodeTime += _encodeCompleteTime -
+            _encodeTimes[rawImage.TimeStamp()];
+    }
+    else
+    {
+        _totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
+    }
+    assert(ret >= 0);
+    return false;
+}
+
+int
+NormalAsyncTest::Decode(int lossValue)
+{
+    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    _decodeCompleteTime = 0;
+    _decodeTimes[encodedImage._timeStamp] = tGetTime();
+    int ret = WEBRTC_VIDEO_CODEC_OK;
+    if (!_waitForKey || encodedImage._frameType == kKeyFrame)
+    {
+        _waitForKey = false;
+        ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+
+        if (ret >= 0)
+        {
+            _missingFrames = false;
+        }
+    }
+
+    // check for SLI
+    if (ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
+    {
+        // add an SLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalSLI.push_back(fbSignal(_rttFrames,
+            static_cast<WebRtc_UWord8>((_lastDecPictureId) & 0x3f))); // 6 lsb
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI)
+    {
+        // add an SLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalSLI.push_back(fbSignal(_rttFrames,
+            static_cast<WebRtc_UWord8>((_lastDecPictureId + 1) & 0x3f)));//6 lsb
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_ERROR)
+    {
+        // wait for new key frame
+        // add an PLI feedback to the feedback "queue"
+        // to be delivered to encoder with _rttFrames delay
+        _signalPLI.push_back(fbSignal(_rttFrames, 0 /* picId not used*/));
+        _waitForKey = true;
+
+        ret = WEBRTC_VIDEO_CODEC_OK;
+    }
+
+    if (_decodeCompleteTime > 0)
+    {
+        _totalDecodeTime += _decodeCompleteTime -
+            _decodeTimes[encodedImage._timeStamp];
+    }
+    else
+    {
+        _totalDecodeTime += tGetTime() - _decodeTimes[encodedImage._timeStamp];
+    }
+    return ret;
+}
+
+webrtc::CodecSpecificInfo*
+NormalAsyncTest::CopyCodecSpecificInfo(
+        const webrtc::CodecSpecificInfo* codecSpecificInfo) const
+{
+    webrtc::CodecSpecificInfo* info = new webrtc::CodecSpecificInfo;
+    *info = *codecSpecificInfo;
+    return info;
+}
+
+void NormalAsyncTest::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }
+    else
+    {
+        _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
+void NormalAsyncTest::CopyEncodedImage(TestVideoEncodedBuffer& dest,
+                                       EncodedImage& src,
+                                       void* /*codecSpecificInfo*/) const
+{
+    dest.CopyBuffer(src._length, src._buffer);
+    dest.SetFrameType(src._frameType);
+    dest.SetCaptureWidth((WebRtc_UWord16)src._encodedWidth);
+    dest.SetCaptureHeight((WebRtc_UWord16)src._encodedHeight);
+    dest.SetTimeStamp(src._timeStamp);
+}
+
+WebRtc_Word32 NormalAsyncTest::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId) {
+  _lastDecRefPictureId = pictureId;
+  return 0;
+}
+
+WebRtc_Word32 NormalAsyncTest::ReceivedDecodedFrame(
+    const WebRtc_UWord64 pictureId) {
+  _lastDecPictureId = pictureId;
+  return 0;
+}
+
+double
+NormalAsyncTest::tGetTime()
+{// return time in sec
+    return ((double) (TickTime::MillisecondTimestamp())/1000);
+ }
diff --git a/src/modules/video_coding/codecs/test_framework/normal_async_test.h b/src/modules/video_coding/codecs/test_framework/normal_async_test.h
new file mode 100644
index 0000000..c866217
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/normal_async_test.h
@@ -0,0 +1,187 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
+
+#include "common_types.h"
+
+#include "normal_test.h"
+#include "rw_lock_wrapper.h"
+#include <list>
+#include <map>
+#include <queue>
+
+class FrameQueueTuple
+{
+public:
+    FrameQueueTuple(TestVideoEncodedBuffer *frame,
+                    const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL)
+    :
+        _frame(frame),
+        _codecSpecificInfo(codecSpecificInfo)
+    {};
+    ~FrameQueueTuple();
+    TestVideoEncodedBuffer*          _frame;
+    const webrtc::CodecSpecificInfo* _codecSpecificInfo;
+};
+
+class FrameQueue
+{
+public:
+    FrameQueue()
+    :
+        _queueRWLock(*webrtc::RWLockWrapper::CreateRWLock())
+    {
+    }
+
+    ~FrameQueue()
+    {
+        delete &_queueRWLock;
+    }
+
+    void PushFrame(TestVideoEncodedBuffer *frame,
+                   webrtc::CodecSpecificInfo* codecSpecificInfo = NULL);
+    FrameQueueTuple* PopFrame();
+    bool Empty();
+
+private:
+    webrtc::RWLockWrapper&                       _queueRWLock;
+    std::queue<FrameQueueTuple *>     _frameBufferQueue;
+};
+
+// feedback signal to encoder
+struct fbSignal
+{
+    fbSignal(int d, WebRtc_UWord8 pid) : delay(d), id(pid) {};
+    int         delay;
+    WebRtc_UWord8 id;
+};
+
+class NormalAsyncTest : public NormalTest
+{
+public:
+    NormalAsyncTest();
+    NormalAsyncTest(WebRtc_UWord32 bitRate);
+    NormalAsyncTest(std::string name, std::string description,
+                    unsigned int testNo);
+    NormalAsyncTest(std::string name, std::string description,
+                    WebRtc_UWord32 bitRate, unsigned int testNo);
+    NormalAsyncTest(std::string name, std::string description,
+                    WebRtc_UWord32 bitRate, unsigned int testNo,
+                    unsigned int rttFrames);
+    virtual ~NormalAsyncTest() {};
+    virtual void Perform();
+    virtual void Encoded(const webrtc::EncodedImage& encodedImage);
+    virtual void Decoded(const webrtc::VideoFrame& decodedImage);
+    virtual webrtc::CodecSpecificInfo*
+    CopyCodecSpecificInfo(
+        const webrtc::CodecSpecificInfo* codecSpecificInfo) const;
+    virtual void CopyEncodedImage(TestVideoEncodedBuffer& dest,
+                                  webrtc::EncodedImage& src,
+                                  void* /*codecSpecificInfo*/) const;
+    virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const
+    {
+        return NULL;
+    };
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+        const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+protected:
+    virtual void Setup();
+    virtual void Teardown();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void CodecSpecific_InitBitrate();
+    virtual int SetCodecSpecificParameters() {return 0;};
+    double tGetTime();// return time in sec
+
+    FILE*                   _sourceFile;
+    FILE*                   _decodedFile;
+    WebRtc_UWord32          _decodedWidth;
+    WebRtc_UWord32          _decodedHeight;
+    double                  _totalEncodeTime;
+    double                  _totalDecodeTime;
+    double                  _decodeCompleteTime;
+    double                  _encodeCompleteTime;
+    double                  _totalEncodePipeTime;
+    double                  _totalDecodePipeTime;
+    int                     _framecnt;
+    int                     _encFrameCnt;
+    int                     _decFrameCnt;
+    bool                    _requestKeyFrame;
+    unsigned int            _testNo;
+    unsigned int            _lengthEncFrame;
+    FrameQueueTuple*        _frameToDecode;
+    bool                    _appendNext;
+    std::map<WebRtc_UWord32, double> _encodeTimes;
+    std::map<WebRtc_UWord32, double> _decodeTimes;
+    bool                    _missingFrames;
+    std::list<fbSignal>     _signalSLI;
+    int                     _rttFrames;
+    mutable bool            _hasReceivedSLI;
+    mutable bool            _hasReceivedRPSI;
+    WebRtc_UWord8           _pictureIdSLI;
+    WebRtc_UWord16          _pictureIdRPSI;
+    WebRtc_UWord64          _lastDecRefPictureId;
+    WebRtc_UWord64          _lastDecPictureId;
+    std::list<fbSignal>     _signalPLI;
+    bool                    _hasReceivedPLI;
+    bool                    _waitForKey;
+};
+
+class VideoEncodeCompleteCallback : public webrtc::EncodedImageCallback
+{
+public:
+    VideoEncodeCompleteCallback(FILE* encodedFile, FrameQueue *frameQueue,
+                                NormalAsyncTest& test)
+    :
+      _encodedFile(encodedFile),
+      _frameQueue(frameQueue),
+      _test(test),
+      _encodedBytes(0)
+    {}
+
+    WebRtc_Word32
+    Encoded(webrtc::EncodedImage& encodedImage,
+            const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
+            const webrtc::RTPFragmentationHeader* fragmentation = NULL);
+    WebRtc_UWord32 EncodedBytes();
+private:
+    FILE*             _encodedFile;
+    FrameQueue*       _frameQueue;
+    NormalAsyncTest&  _test;
+    WebRtc_UWord32    _encodedBytes;
+};
+
+class VideoDecodeCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    VideoDecodeCompleteCallback(FILE* decodedFile, NormalAsyncTest& test)
+    :
+        _decodedFile(decodedFile),
+        _test(test),
+        _decodedBytes(0)
+    {}
+
+    virtual WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
+    virtual WebRtc_Word32
+    ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+    WebRtc_UWord32 DecodedBytes();
+private:
+    FILE* _decodedFile;
+    NormalAsyncTest& _test;
+    WebRtc_UWord32    _decodedBytes;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_ASYNC_TEST_H_
diff --git a/src/modules/video_coding/codecs/test_framework/normal_test.cc b/src/modules/video_coding/codecs/test_framework/normal_test.cc
new file mode 100644
index 0000000..b5dc961
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/normal_test.cc
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_test.h"
+
+#include <time.h>
+#include <sstream>
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+NormalTest::NormalTest()
+:
+CodecTest("Normal Test 1", "A test of normal execution of the codec"),
+_testNo(1),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+NormalTest::NormalTest(std::string name, std::string description,
+                       unsigned int testNo)
+:
+CodecTest(name, description),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+NormalTest::NormalTest(std::string name, std::string description,
+                       WebRtc_UWord32 bitRate, unsigned int testNo)
+:
+CodecTest(name, description, bitRate),
+_requestKeyFrame(false),
+_testNo(testNo),
+_lengthEncFrame(0),
+_appendNext(false)
+{
+}
+
+void
+NormalTest::Setup()
+{
+    CodecTest::Setup();
+    std::stringstream ss;
+    std::string strTestNo;
+    ss << _testNo;
+    ss >> strTestNo;
+
+    // Check if settings exist. Otherwise use defaults.
+    if (_outname == "")
+    {
+        _outname = webrtc::test::OutputPath() + "out_normaltest" + strTestNo +
+            ".yuv";
+    }
+
+    if (_encodedName == "")
+    {
+        _encodedName = webrtc::test::OutputPath() + "encoded_normaltest" +
+            strTestNo + ".yuv";
+    }
+    
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    char mode[3] = "wb";
+    if (_appendNext)
+    {
+        strncpy(mode, "ab", 3);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), mode)) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _appendNext = true;
+}
+
+void
+NormalTest::Teardown()
+{
+    CodecTest::Teardown();
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+}
+
+void
+NormalTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+
+    _encoder->InitEncode(&_inst, 1, 1460);
+    CodecSpecific_InitBitrate();
+    _decoder->InitDecode(&_inst,1);
+
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _framecnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    int decodeLength = 0;
+    while (!Encode())
+    {
+        DoPacketLoss();
+        _encodedVideoBuffer.UpdateLength(_encodedVideoBuffer.GetLength());
+        if (fwrite(_encodedVideoBuffer.GetBuffer(), 1,
+                   _encodedVideoBuffer.GetLength(),
+                   _encodedFile) !=  _encodedVideoBuffer.GetLength()) {
+          return;
+        }
+        decodeLength = Decode();
+        if (decodeLength < 0)
+        {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
+            exit(EXIT_FAILURE);
+        }
+        if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
+                   _decodedFile) != static_cast<unsigned int>(decodeLength)) {
+          return;
+        }
+        CodecSpecific_InitBitrate();
+        _framecnt++;
+    }
+
+    // Ensure we empty the decoding queue.
+    while (decodeLength > 0)
+    {
+        decodeLength = Decode();
+        if (decodeLength < 0)
+        {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", decodeLength);
+            exit(EXIT_FAILURE);
+        }
+        if (fwrite(_decodedVideoBuffer.GetBuffer(), 1, decodeLength,
+                   _decodedFile) != static_cast<unsigned int>(decodeLength)) {
+          return;
+        }
+    }
+
+    double actualBitRate = ActualBitRate(_framecnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _framecnt;
+    double avgDecTime = _totalDecodeTime / _framecnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %f s\n", avgEncTime);
+    printf("Average decode time: %f s\n", avgDecTime);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+
+    _inputVideoBuffer.Free();
+    _encodedVideoBuffer.Reset();
+    _decodedVideoBuffer.Free();
+
+    _encoder->Release();
+    _decoder->Release();
+
+    Teardown();
+}
+
+bool
+NormalTest::Encode()
+{
+    _lengthEncFrame = 0;
+    EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile), 0u);
+    if (feof(_sourceFile) != 0)
+    {
+        return true;
+    }
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+    _inputVideoBuffer.SetTimeStamp(_framecnt);
+
+    // This multiple attempt ridiculousness is to accomodate VP7:
+    // 1. The wrapper can unilaterally reduce the framerate for low bitrates.
+    // 2. The codec inexplicably likes to reject some frames. Perhaps there
+    //    is a good reason for this...
+    int encodingAttempts = 0;
+    double starttime = 0;
+    double endtime = 0;
+    while (_lengthEncFrame == 0)
+    {
+        starttime = clock()/(double)CLOCKS_PER_SEC;
+
+        _inputVideoBuffer.SetWidth(_inst.width);
+        _inputVideoBuffer.SetHeight(_inst.height);
+        //_lengthEncFrame = _encoder->Encode(_inputVideoBuffer, _encodedVideoBuffer, _frameInfo,
+        //  _inst.frameRate, _requestKeyFrame && !(_framecnt%50));
+
+        endtime = clock()/(double)CLOCKS_PER_SEC;
+
+        _encodedVideoBuffer.SetCaptureHeight(_inst.height);
+        _encodedVideoBuffer.SetCaptureWidth(_inst.width);
+        if (_lengthEncFrame < 0)
+        {
+            (*_log) << "Error in encoder: " << _lengthEncFrame << std::endl;
+            fprintf(stderr,"\n\nError in encoder: %d\n\n", _lengthEncFrame);
+            exit(EXIT_FAILURE);
+        }
+        _sumEncBytes += _lengthEncFrame;
+
+        encodingAttempts++;
+        if (encodingAttempts > 50)
+        {
+            (*_log) << "Unable to encode frame: " << _framecnt << std::endl;
+            fprintf(stderr,"\n\nUnable to encode frame: %d\n\n", _framecnt);
+            exit(EXIT_FAILURE);
+        }
+    }
+    _totalEncodeTime += endtime - starttime;
+
+    if (encodingAttempts > 1)
+    {
+        (*_log) << encodingAttempts << " attempts required to encode frame: " <<
+            _framecnt + 1 << std::endl;
+        fprintf(stderr,"\n%d attempts required to encode frame: %d\n", encodingAttempts,
+            _framecnt + 1);
+    }
+        
+    return false;
+}
+
+int
+NormalTest::Decode(int lossValue)
+{
+    _encodedVideoBuffer.SetWidth(_inst.width);
+    _encodedVideoBuffer.SetHeight(_inst.height);
+    int lengthDecFrame = 0;
+    //int lengthDecFrame = _decoder->Decode(_encodedVideoBuffer, _decodedVideoBuffer);
+    //_totalDecodeTime += (double)((clock()/(double)CLOCKS_PER_SEC) - starttime);
+    if (lengthDecFrame < 0)
+    {
+        return lengthDecFrame;
+    }
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return lengthDecFrame;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/normal_test.h b/src/modules/video_coding/codecs/test_framework/normal_test.h
new file mode 100644
index 0000000..061fb6d
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/normal_test.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+
+#include "test.h"
+
+class NormalTest : public CodecTest
+{
+public:
+    NormalTest();
+    NormalTest(std::string name, std::string description, unsigned int testNo);
+    NormalTest(std::string name, std::string description, WebRtc_UWord32 bitRate, unsigned int testNo);
+    virtual ~NormalTest() {};
+    virtual void Perform();
+
+protected:
+    virtual void Setup();
+    virtual void Teardown();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void CodecSpecific_InitBitrate()=0;
+    virtual int DoPacketLoss() {return 0;};
+
+    FILE*                   _sourceFile;
+    FILE*                   _decodedFile;
+    FILE*                   _encodedFile;
+    double                  _totalEncodeTime;
+    double                  _totalDecodeTime;
+    unsigned int            _framecnt;
+    bool                    _requestKeyFrame;
+    unsigned int            _testNo;
+    int                     _lengthEncFrame;
+    bool                    _appendNext;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_NORMAL_TEST_H_
+
diff --git a/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc b/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc
new file mode 100644
index 0000000..aa51fc5
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/packet_loss_test.cc
@@ -0,0 +1,252 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet_loss_test.h"
+#include "video_source.h"
+#include <sstream>
+#include <cassert>
+#include <string.h>
+
+using namespace webrtc;
+
+PacketLossTest::PacketLossTest()
+:
+NormalAsyncTest("PacketLossTest", "Encode, remove lost packets, decode", 300,
+                5),
+_lossRate(0.1),
+_lossProbability(0.1),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+}
+
+PacketLossTest::PacketLossTest(std::string name, std::string description)
+:
+NormalAsyncTest(name, description, 300, 5),
+_lossRate(0.1),
+_lossProbability(0.1),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+}
+
+PacketLossTest::PacketLossTest(std::string name, std::string description, double lossRate, bool useNack, unsigned int rttFrames /* = 0*/)
+:
+NormalAsyncTest(name, description, 300, 5, rttFrames),
+_lossRate(lossRate),
+_lastFrame(NULL),
+_lastFrameLength(0)
+{
+    assert(lossRate >= 0 && lossRate <= 1);
+    if (useNack)
+    {
+        _lossProbability = 0;
+    }
+    else
+    {
+        _lossProbability = lossRate;
+    }
+}
+
+void
+PacketLossTest::Encoded(const EncodedImage& encodedImage)
+{
+    // push timestamp to queue
+    _frameQueue.push_back(encodedImage._timeStamp);
+    NormalAsyncTest::Encoded(encodedImage);
+}
+
+void
+PacketLossTest::Decoded(const VideoFrame& decodedImage)
+{
+    // check the frame queue if any frames have gone missing
+    assert(!_frameQueue.empty()); // decoded frame is not in the queue
+    while(_frameQueue.front() < decodedImage.TimeStamp())
+    {
+        // this frame is missing
+        // write previous decoded frame again (frame freeze)
+        if (_decodedFile && _lastFrame)
+        {
+          if (fwrite(_lastFrame, 1, _lastFrameLength,
+                     _decodedFile) != _lastFrameLength) {
+            return;
+          }
+        }
+
+        // remove frame from queue
+        _frameQueue.pop_front();
+    }
+    // Decoded frame is not in the queue.
+    assert(_frameQueue.front() == decodedImage.TimeStamp());
+
+    // pop the current frame
+    _frameQueue.pop_front();
+
+    // save image for future freeze-frame
+    if (_lastFrameLength < decodedImage.Length())
+    {
+        if (_lastFrame) delete [] _lastFrame;
+
+        _lastFrame = new WebRtc_UWord8[decodedImage.Length()];
+    }
+    memcpy(_lastFrame, decodedImage.Buffer(), decodedImage.Length());
+    _lastFrameLength = decodedImage.Length();
+
+    NormalAsyncTest::Decoded(decodedImage);
+}
+
+void
+PacketLossTest::Teardown()
+{
+    if (_totalKept + _totalThrown > 0)
+    {
+        printf("Target packet loss rate: %.4f\n", _lossProbability);
+        printf("Actual packet loss rate: %.4f\n", (_totalThrown * 1.0f) / (_totalKept + _totalThrown));
+        printf("Channel rate: %.2f kbps\n",
+            0.001 * 8.0 * _sumChannelBytes / ((_framecnt * 1.0f) / _inst.maxFramerate));
+    }
+    else
+    {
+        printf("No packet losses inflicted\n");
+    }
+
+    NormalAsyncTest::Teardown();
+}
+
+void
+PacketLossTest::Setup()
+{
+    const VideoSource source(_inname, _inst.width, _inst.height, _inst.maxFramerate);
+
+    std::stringstream ss;
+    std::string lossRateStr;
+    ss << _lossRate;
+    ss >> lossRateStr;
+    _encodedName = source.GetName() + "-" + lossRateStr;
+    _outname = "out-" + source.GetName() + "-" + lossRateStr;
+
+    if (_lossProbability != _lossRate)
+    {
+        _encodedName += "-nack";
+        _outname += "-nack";
+    }
+    _encodedName += ".vp8";
+    _outname += ".yuv";
+
+    _totalKept = 0;
+    _totalThrown = 0;
+    _sumChannelBytes = 0;
+
+    NormalAsyncTest::Setup();
+}
+
+void
+PacketLossTest::CodecSpecific_InitBitrate()
+{
+    assert(_bitRate > 0);
+    WebRtc_UWord32 simulatedBitRate;
+    if (_lossProbability != _lossRate)
+    {
+        // Simulating NACK
+        simulatedBitRate = WebRtc_UWord32(_bitRate / (1 + _lossRate));
+    }
+    else
+    {
+        simulatedBitRate = _bitRate;
+    }
+    int rtt = 0;
+    if (_inst.maxFramerate > 0)
+      rtt = _rttFrames * (1000 / _inst.maxFramerate);
+    _encoder->SetChannelParameters((WebRtc_UWord32)(_lossProbability * 255.0),
+                                                    rtt);
+    _encoder->SetRates(simulatedBitRate, _inst.maxFramerate);
+}
+
+int PacketLossTest::DoPacketLoss()
+{
+    // Only packet loss for delta frames
+    if (_frameToDecode->_frame->GetLength() == 0 || _frameToDecode->_frame->GetFrameType() != kDeltaFrame)
+    {
+        _sumChannelBytes += _frameToDecode->_frame->GetLength();
+        return 0;
+    }
+    unsigned char *packet = NULL;
+    TestVideoEncodedBuffer newEncBuf;
+    newEncBuf.VerifyAndAllocate(_lengthSourceFrame);
+    _inBufIdx = 0;
+    _outBufIdx = 0;
+    int size = 1;
+    int kept = 0;
+    int thrown = 0;
+    while ((size = NextPacket(1500, &packet)) > 0)
+    {
+        if (!PacketLoss(_lossProbability, thrown))
+        {
+            InsertPacket(&newEncBuf, packet, size);
+            kept++;
+        }
+        else
+        {
+            // Use the ByteLoss function if you want to lose only
+            // parts of a packet, and not the whole packet.
+
+            //int size2 = ByteLoss(size, packet, 15);
+            thrown++;
+            //if (size2 != size)
+            //{
+            //    InsertPacket(&newEncBuf, packet, size2);
+            //}
+        }
+    }
+    int	lossResult  = (thrown!=0);	// 0 = no loss	1 = loss(es)
+    if (lossResult)
+    {
+        lossResult += (kept==0);	// 2 = all lost = full frame
+    }
+    _frameToDecode->_frame->CopyBuffer(newEncBuf.GetLength(), newEncBuf.GetBuffer());
+    _sumChannelBytes += newEncBuf.GetLength();
+    _totalKept += kept;
+    _totalThrown += thrown;
+
+    return lossResult;
+    //printf("Threw away: %d out of %d packets\n", thrown, thrown + kept);
+    //printf("Encoded left: %d bytes\n", _encodedVideoBuffer.GetLength());
+}
+
+int PacketLossTest::NextPacket(int mtu, unsigned char **pkg)
+{
+    unsigned char *buf = _frameToDecode->_frame->GetBuffer();
+    *pkg = buf + _inBufIdx;
+    if (static_cast<long>(_frameToDecode->_frame->GetLength()) - _inBufIdx <= mtu)
+    {
+        int size = _frameToDecode->_frame->GetLength() - _inBufIdx;
+        _inBufIdx = _frameToDecode->_frame->GetLength();
+        return size;
+    }
+    _inBufIdx += mtu;
+    return mtu;
+}
+
+int PacketLossTest::ByteLoss(int size, unsigned char *pkg, int bytesToLose)
+{
+    return size;
+}
+
+void PacketLossTest::InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size)
+{
+    if (static_cast<long>(buf->GetSize()) - _outBufIdx < size)
+    {
+        printf("InsertPacket error!\n");
+        return;
+    }
+    memcpy(buf->GetBuffer() + _outBufIdx, pkg, size);
+    buf->UpdateLength(buf->GetLength() + size);
+    _outBufIdx += size;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/packet_loss_test.h b/src/modules/video_coding/codecs/test_framework/packet_loss_test.h
new file mode 100644
index 0000000..1051ce5
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/packet_loss_test.h
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
+
+#include <list>
+
+#include "normal_async_test.h"
+
+class PacketLossTest : public NormalAsyncTest
+{
+public:
+    PacketLossTest();
+    virtual ~PacketLossTest() {if(_lastFrame) {delete [] _lastFrame; _lastFrame = NULL;}}
+    virtual void Encoded(const webrtc::EncodedImage& encodedImage);
+    virtual void Decoded(const webrtc::VideoFrame& decodedImage);
+protected:
+    PacketLossTest(std::string name, std::string description);
+    PacketLossTest(std::string name,
+                   std::string description,
+                   double lossRate,
+                   bool useNack,
+                   unsigned int rttFrames = 0);
+
+    virtual void Setup();
+    virtual void Teardown();
+    virtual void CodecSpecific_InitBitrate();
+    virtual int DoPacketLoss();
+    virtual int NextPacket(int size, unsigned char **pkg);
+    virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
+    virtual void InsertPacket(TestVideoEncodedBuffer *buf, unsigned char *pkg, int size);
+    int _inBufIdx;
+    int _outBufIdx;
+
+    // When NACK is being simulated _lossProbabilty is zero,
+    // otherwise it is set equal to _lossRate.
+    // Desired channel loss rate.
+    double _lossRate;
+    // Probability used to simulate packet drops.
+    double _lossProbability;
+
+    int _totalKept;
+    int _totalThrown;
+    int _sumChannelBytes;
+    std::list<WebRtc_UWord32> _frameQueue;
+    WebRtc_UWord8* _lastFrame;
+    WebRtc_UWord32 _lastFrameLength;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PACKET_LOSS_TEST_H_
diff --git a/src/modules/video_coding/codecs/test_framework/performance_test.cc b/src/modules/video_coding/codecs/test_framework/performance_test.cc
new file mode 100644
index 0000000..18c6ad9
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/performance_test.cc
@@ -0,0 +1,296 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "performance_test.h"
+
+#include <assert.h>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+
+using namespace webrtc;
+
+#define NUM_FRAMES 300
+
+PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate)
+:
+NormalAsyncTest(bitRate),
+_numCodecs(0),
+_tests(NULL),
+_encoders(NULL),
+_decoders(NULL),
+_threads(NULL),
+_rawImageLock(NULL),
+_encodeEvents(new EventWrapper*[1]),
+_stopped(true),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+PerformanceTest::PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs)
+:
+NormalAsyncTest(bitRate),
+_numCodecs(numCodecs),
+_tests(new PerformanceTest*[_numCodecs]),
+_encoders(new VideoEncoder*[_numCodecs]),
+_decoders(new VideoDecoder*[_numCodecs]),
+_threads(new ThreadWrapper*[_numCodecs]),
+_rawImageLock(RWLockWrapper::CreateRWLock()),
+_encodeEvents(new EventWrapper*[_numCodecs]),
+_stopped(true),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _tests[i] = new PerformanceTest(bitRate);
+        _encodeEvents[i] = EventWrapper::Create();
+    }
+}
+
+PerformanceTest::~PerformanceTest()
+{
+    if (_encoders != NULL)
+    {
+        delete [] _encoders;
+    }
+    if (_decoders != NULL)
+    {
+        delete [] _decoders;
+    }
+    if (_tests != NULL)
+    {
+        delete [] _tests;
+    }
+    if (_threads != NULL)
+    {
+        delete [] _threads;
+    }
+    if (_rawImageLock != NULL)
+    {
+        delete _rawImageLock;
+    }
+    if (_encodeEvents != NULL)
+    {
+        delete [] _encodeEvents;
+    }
+}
+
+void
+PerformanceTest::Setup()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    NormalAsyncTest::Setup(); // Setup input and output files
+    CodecSettings(352, 288, 30, _bitRate); // common to all codecs
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _encoders[i] = CreateEncoder();
+        _decoders[i] = CreateDecoder();
+        if (_encoders[i] == NULL)
+        {
+            printf("Must create a codec specific test!\n");
+            exit(EXIT_FAILURE);
+        }
+        if(_encoders[i]->InitEncode(&_inst, 4, 1440) < 0)
+        {
+            exit(EXIT_FAILURE);
+        }
+        if (_decoders[i]->InitDecode(&_inst, 1))
+        {
+            exit(EXIT_FAILURE);
+        }
+        _tests[i]->SetEncoder(_encoders[i]);
+        _tests[i]->SetDecoder(_decoders[i]);
+        _tests[i]->_rawImageLock = _rawImageLock;
+        _encodeEvents[i]->Reset();
+        _tests[i]->_encodeEvents[0] = _encodeEvents[i];
+        _tests[i]->_inst = _inst;
+        _threads[i] = ThreadWrapper::CreateThread(PerformanceTest::RunThread, _tests[i]);
+        unsigned int id = 0;
+        _tests[i]->_stopped = false;
+        _threads[i]->Start(id);
+    }
+}
+
+void
+PerformanceTest::Perform()
+{
+    Setup();
+    EventWrapper& sleepEvent = *EventWrapper::Create();
+    const WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    for (int i=0; i < NUM_FRAMES; i++)
+    {
+        {
+            // Read a new frame from file
+            WriteLockScoped imageLock(*_rawImageLock);
+            _lengthEncFrame = 0;
+            EXPECT_GT(fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile),
+                      0u);
+            if (feof(_sourceFile) != 0)
+            {
+                rewind(_sourceFile);
+            }
+            _inputVideoBuffer.VerifyAndAllocate(_inst.width*_inst.height*3/2);
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+            _inputVideoBuffer.SetTimeStamp((unsigned int) (_encFrameCnt * 9e4 / static_cast<float>(_inst.maxFramerate)));
+            _inputVideoBuffer.SetWidth(_inst.width);
+            _inputVideoBuffer.SetHeight(_inst.height);
+            for (int i=0; i < _numCodecs; i++)
+            {
+                _tests[i]->_inputVideoBuffer.CopyPointer(_inputVideoBuffer);
+                _encodeEvents[i]->Set();
+            }
+        }
+        if (i < NUM_FRAMES - 1)
+        {
+            sleepEvent.Wait(33);
+        }
+    }
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _tests[i]->_stopped = true;
+        _encodeEvents[i]->Set();
+        _threads[i]->Stop();
+    }
+    const WebRtc_UWord32 totalTime =
+            static_cast<WebRtc_UWord32>(TickTime::MillisecondTimestamp() - startTime);
+    printf("Total time: %u\n", totalTime);
+    delete &sleepEvent;
+    Teardown();
+}
+
+void PerformanceTest::Teardown()
+{
+    if (_encodeCompleteCallback != NULL)
+    {
+        delete _encodeCompleteCallback;
+    }
+    if (_decodeCompleteCallback != NULL)
+    {
+        delete _decodeCompleteCallback;
+    }
+    // main test only, all others have numCodecs = 0:
+    if (_numCodecs > 0)
+    {
+        WriteLockScoped imageLock(*_rawImageLock);
+        _inputVideoBuffer.Free();
+        NormalAsyncTest::Teardown();
+    }
+    for (int i=0; i < _numCodecs; i++)
+    {
+        _encoders[i]->Release();
+        delete _encoders[i];
+        _decoders[i]->Release();
+        delete _decoders[i];
+        _tests[i]->_inputVideoBuffer.ClearPointer();
+        _tests[i]->_rawImageLock = NULL;
+        _tests[i]->Teardown();
+        delete _tests[i];
+        delete _encodeEvents[i];
+        delete _threads[i];
+    }
+}
+
+bool
+PerformanceTest::RunThread(void* obj)
+{
+    PerformanceTest& test = *static_cast<PerformanceTest*>(obj);
+    return test.PerformSingleTest();
+}
+
+bool
+PerformanceTest::PerformSingleTest()
+{
+    if (_encodeCompleteCallback == NULL)
+    {
+        _encodeCompleteCallback = new VideoEncodeCompleteCallback(NULL, &_frameQueue, *this);
+        _encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        _decodeCompleteCallback = new VideoDecodeCompleteCallback(NULL, *this);
+        _decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
+    }
+    (*_encodeEvents)->Wait(WEBRTC_EVENT_INFINITE); // The first event is used for every single test
+    CodecSpecific_InitBitrate();
+    bool complete = false;
+    {
+        ReadLockScoped imageLock(*_rawImageLock);
+        complete = Encode();
+    }
+    if (!_frameQueue.Empty() || complete)
+    {
+        while (!_frameQueue.Empty())
+        {
+            _frameToDecode = static_cast<FrameQueueTuple *>(_frameQueue.PopFrame());
+            int lost = DoPacketLoss();
+            if (lost == 2)
+            {
+                // Lost the whole frame, continue
+                _missingFrames = true;
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                continue;
+            }
+            int ret = Decode(lost);
+            delete _frameToDecode;
+            _frameToDecode = NULL;
+            if (ret < 0)
+            {
+                fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                return false;
+            }
+            else if (ret < 0)
+            {
+                fprintf(stderr, "\n\nPositive return value from decode!\n\n");
+                return false;
+            }
+        }
+    }
+    if (_stopped)
+    {
+        return false;
+    }
+    return true;
+}
+
+bool PerformanceTest::Encode()
+{
+    VideoFrame rawImage;
+    VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+    VideoFrameType frameType = kDeltaFrame;
+    if (_requestKeyFrame && !(_encFrameCnt%50))
+    {
+        frameType = kKeyFrame;
+    }
+    webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+    int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+    EXPECT_EQ(ret, WEBRTC_VIDEO_CODEC_OK);
+    if (codecSpecificInfo != NULL)
+    {
+        delete codecSpecificInfo;
+        codecSpecificInfo = NULL;
+    }
+    assert(ret >= 0);
+    return false;
+}
+
+int PerformanceTest::Decode(int lossValue)
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+    _missingFrames = false;
+    return ret;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/performance_test.h b/src/modules/video_coding/codecs/test_framework/performance_test.h
new file mode 100644
index 0000000..d060832
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/performance_test.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
+
+#include "normal_async_test.h"
+#include "thread_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include "event_wrapper.h"
+
+class PerformanceTest : public NormalAsyncTest
+{
+public:
+    PerformanceTest(WebRtc_UWord32 bitRate, WebRtc_UWord8 numCodecs);
+    virtual ~PerformanceTest();
+
+    virtual void Perform();
+    virtual void Print() {};
+
+protected:
+    PerformanceTest(WebRtc_UWord32 bitRate);
+    virtual void Setup();
+    virtual bool Encode();
+    virtual int Decode(int lossValue = 0);
+    virtual void Teardown();
+    static bool RunThread(void* obj);
+    bool PerformSingleTest();
+
+    virtual webrtc::VideoEncoder* CreateEncoder() const { return NULL; };
+    virtual webrtc::VideoDecoder* CreateDecoder() const { return NULL; };
+
+    WebRtc_UWord8                 _numCodecs;
+    PerformanceTest**             _tests;
+    webrtc::VideoEncoder**        _encoders;
+    webrtc::VideoDecoder**        _decoders;
+    webrtc::ThreadWrapper**       _threads;
+    webrtc::RWLockWrapper*        _rawImageLock;
+    webrtc::EventWrapper**        _encodeEvents;
+    FrameQueue                    _frameQueue;
+    bool                          _stopped;
+    webrtc::EncodedImageCallback* _encodeCompleteCallback;
+    webrtc::DecodedImageCallback* _decodeCompleteCallback;
+    FILE*                         _outFile;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_PERFORMANCE_TEST_H_
diff --git a/src/modules/video_coding/codecs/test_framework/plotBenchmark.m b/src/modules/video_coding/codecs/test_framework/plotBenchmark.m
new file mode 100644
index 0000000..33c8eb6
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/plotBenchmark.m
@@ -0,0 +1,427 @@
+function plotBenchmark(fileNames, export)
+%PLOTBENCHMARK Plots and exports video codec benchmarking results.
+%   PLOTBENCHMARK(FILENAMES, EXPORT) parses the video codec benchmarking result
+%   files given by the cell array of strings FILENAME. It plots the results and
+%   optionally exports each plot to an appropriately named file.
+%
+%   EXPORT parameter:
+%       'none'  No file exports.
+%       'eps'   Exports to eps files (default).
+%       'pdf'   Exports to eps files and uses the command-line utility
+%               epstopdf to obtain pdf files.
+%
+%   Example:
+%       plotBenchmark({'H264Benchmark.txt' 'LSVXBenchmark.txt'}, 'pdf')
+
+if (nargin < 1)
+    error('Too few input arguments');
+elseif (nargin < 2)
+    export = 'eps';
+end
+
+if ~iscell(fileNames)
+    if ischar(fileNames)
+        % one single file name as a string is ok
+        if size(fileNames,1) > 1
+            % this is a char matrix, not ok
+            error('First argument must not be a char matrix');
+        end
+        % wrap in a cell array
+        fileNames = {fileNames};
+    else
+        error('First argument must be a cell array of strings');
+    end
+end
+
+if ~ischar(export)
+    error('Second argument must be a string');
+end
+
+outpath = 'BenchmarkPlots';
+[status, errMsg] = mkdir(outpath);
+if status == 0
+    error(errMsg);
+end
+
+nCases = 0;
+testCases = [];
+% Read each test result file
+for fileIdx = 1:length(fileNames)
+    if ~isstr(fileNames{fileIdx})
+        error('First argument must be a cell array of strings');
+    end
+
+    fid = fopen(fileNames{fileIdx}, 'rt');
+    if fid == -1
+        error(['Unable to open ' fileNames{fileIdx}]);
+    end
+
+    version = '1.0';
+    if ~strcmp(fgetl(fid), ['#!benchmark' version])
+        fclose(fid);
+        error(['Requires benchmark file format version ' version]);
+    end
+
+    % Parse results file into testCases struct
+    codec = fgetl(fid);
+    tline = fgetl(fid);
+    while(tline ~= -1)
+        nCases = nCases + 1;
+
+        delim = strfind(tline, ',');
+        name = tline(1:delim(1)-1);
+        % Drop underscored suffix from name
+        underscore = strfind(name, '_'); 
+        if ~isempty(underscore)
+            name = name(1:underscore(1)-1);
+        end
+
+        resolution = tline(delim(1)+1:delim(2)-1);
+        frameRate = tline(delim(2)+1:end);
+
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        bitrateLabel = tline(1:delim(1)-1); 
+        bitrate = sscanf(tline(delim(1):end),',%f');
+
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        psnrLabel = tline(1:delim(1)-1); 
+        psnr = sscanf(tline(delim(1):end),',%f'); 
+
+
+        % Default data for the optional lines
+        speedLabel = 'Default';
+        speed = 0;
+        ssimLabel = 'Default';
+        ssim = 0;
+        
+        tline = fgetl(fid);
+        delim = strfind(tline, ',');
+        
+        while ~isempty(delim)
+            % More data
+            % Check type of data
+            if strncmp(lower(tline), 'speed', 5)
+                % Speed data included
+                speedLabel = tline(1:delim(1)-1);
+                speed = sscanf(tline(delim(1):end), ',%f');
+
+                tline = fgetl(fid);
+                
+            elseif strncmp(lower(tline), 'encode time', 11)
+                % Encode and decode times included
+                % TODO: take care of the data
+                
+                % pop two lines from file
+                tline = fgetl(fid);
+                tline = fgetl(fid);
+                
+            elseif strncmp(tline, 'SSIM', 4)
+                % SSIM data included
+                ssimLabel = tline(1:delim(1)-1);
+                ssim = sscanf(tline(delim(1):end), ',%f');
+
+                tline = fgetl(fid);
+            end
+            delim = strfind(tline, ',');
+        end
+
+        testCases = [testCases struct('codec', codec, 'name', name, 'resolution', ...
+            resolution, 'frameRate', frameRate, 'bitrate', bitrate, 'psnr', psnr, ...
+            'speed', speed, 'bitrateLabel', bitrateLabel, 'psnrLabel', psnrLabel, ...
+            'speedLabel', speedLabel, ...
+            'ssim', ssim, 'ssimLabel', ssimLabel)];
+
+        tline = fgetl(fid);
+    end
+
+    fclose(fid);
+end
+
+i = 0;
+casesPsnr = testCases;
+while ~isempty(casesPsnr)
+    i = i + 1;
+    casesPsnr = plotOnePsnr(casesPsnr, i, export, outpath);
+end
+
+casesSSIM = testCases;
+while ~isempty(casesSSIM)
+    i = i + 1;
+    casesSSIM = plotOneSSIM(casesSSIM, i, export, outpath);
+end
+
+casesSpeed = testCases;
+while ~isempty(casesSpeed)
+    if casesSpeed(1).speed == 0
+        casesSpeed = casesSpeed(2:end);
+    else
+        i = i + 1;
+        casesSpeed = plotOneSpeed(casesSpeed, i, export, outpath);
+    end
+end
+
+
+
+%%%%%%%%%%%%%%%%%%
+%% SUBFUNCTIONS %%
+%%%%%%%%%%%%%%%%%%
+
+function casesOut = plotOnePsnr(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
+        cases(i).psnr = cases(i).psnr(diffIndx);
+        cases(i).bitrate = cases(i).bitrate(diffIndx);
+        simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    end
+end
+
+% Prepare figure with axis labels and so on
+hFig = figure(num);
+clf;
+hold on;
+grid on;
+axis([0 1100 20 50]);
+set(gca, 'XTick', 0:200:1000);
+set(gca, 'YTick', 20:10:60);
+xlabel(cases(1).bitrateLabel);
+ylabel(cases(1).psnrLabel);
+res = cases(1).resolution;
+frRate = cases(1).frameRate;
+title([res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'psnr', i, sequences, 1);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+legend(hLines, codecs, 4);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/psnr-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function casesOut = plotOneSSIM(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).bitrate), simIndx);
+        cases(i).ssim = cases(i).ssim(diffIndx);
+        cases(i).bitrate = cases(i).bitrate(diffIndx);
+        simIndx = find(abs(cases(i).bitrate - [cases(i).bitrate(2:end) ; 0]) < 10);
+    end
+end
+
+% Prepare figure with axis labels and so on
+hFig = figure(num);
+clf;
+hold on;
+grid on;
+axis([0 1100 0.5 1]); % y-limit are set to 'auto' below
+set(gca, 'XTick', 0:200:1000);
+%set(gca, 'YTick', 20:10:60);
+xlabel(cases(1).bitrateLabel);
+ylabel(cases(1).ssimLabel);
+res = cases(1).resolution;
+frRate = cases(1).frameRate;
+title([res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'bitrate', 'ssim', i, sequences, 1);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+%set(gca,'YLimMode','auto')
+set(gca,'YLim',[0.5 1])
+set(gca,'YScale','log')
+legend(hLines, codecs, 4);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/psnr-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function casesOut = plotOneSpeed(cases, num, export, outpath)
+% Find matching specs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).resolution, cases(i).resolution) & ...
+        strcmp(cases(1).frameRate, cases(i).frameRate) & ...
+        strcmp(cases(1).name, cases(i).name)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+% Prune similar results
+for i = 1:length(cases)
+    simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
+    while ~isempty(simIndx)
+        diffIndx = setdiff(1:length(cases(i).psnr), simIndx);
+        cases(i).psnr = cases(i).psnr(diffIndx);
+        cases(i).speed = cases(i).speed(diffIndx);
+        simIndx = find(abs(cases(i).psnr - [cases(i).psnr(2:end) ; 0]) < 0.25);
+    end
+end
+
+hFig = figure(num);
+clf;
+hold on;
+%grid on;
+xlabel(cases(1).psnrLabel);
+ylabel(cases(1).speedLabel);
+res = cases(1).resolution;
+name = cases(1).name;
+frRate = cases(1).frameRate;
+title([name ', ' res ', ' frRate]);
+
+hLines = [];
+codecs = {};
+sequences = {};
+i = 0;
+while ~isempty(cases)
+    i = i + 1;
+    [cases, hLine, codec, sequences] = plotOneCodec(cases, 'psnr', 'speed', i, sequences, 0);
+
+    % Stored to generate the legend
+    hLines = [hLines ; hLine];
+    codecs = {codecs{:} codec};
+end
+legend(hLines, codecs, 1);
+hold off;
+
+if ~strcmp(export, 'none')
+    % Export figure to an eps file
+    res = stripws(res);
+    frRate = stripws(frRate);
+    exportName = [outpath '/speed-' name '-' res '-' frRate];
+    exportfig(hFig, exportName, 'Format', 'eps2', 'Color', 'cmyk');
+end
+
+if strcmp(export, 'pdf')
+    % Use the epstopdf utility to convert to pdf
+    system(['epstopdf ' exportName '.eps']);  
+end
+
+
+function [casesOut, hLine, codec, sequences] = plotOneCodec(cases, xfield, yfield, num, sequences, annotatePlot)
+plotStr = {'gx-', 'bo-', 'r^-', 'kd-', 'cx-', 'go--', 'b^--'};
+% Find matching codecs
+plotIdx = 1;
+for i = 2:length(cases)
+    if strcmp(cases(1).codec, cases(i).codec)
+        plotIdx = [plotIdx i];
+    end
+end
+
+% Return unplotted cases
+casesOut = cases(setdiff(1:length(cases), plotIdx));
+cases = cases(plotIdx);
+
+for i = 1:length(cases)
+    % Plot a single case
+    hLine = plot(getfield(cases(i), xfield), getfield(cases(i), yfield), plotStr{num}, ...
+        'LineWidth', 1.1, 'MarkerSize', 6);
+end
+
+% hLine handle and codec are returned to construct the legend afterwards
+codec = cases(1).codec;
+
+if annotatePlot == 0
+    return;
+end
+
+for i = 1:length(cases)
+    % Print the codec name as a text label
+    % Ensure each codec is only printed once
+    sequencePlotted = 0;
+    for j = 1:length(sequences)
+        if strcmp(cases(i).name, sequences{j})
+            sequencePlotted = 1;
+            break;
+        end
+    end
+
+    if sequencePlotted == 0
+        text(getfield(cases(i), xfield, {1}), getfield(cases(i), yfield, {1}), ...
+            ['    ' cases(i).name]);
+        sequences = {sequences{:} cases(i).name};
+    end
+end
+
+
+% Strip whitespace from string
+function str = stripws(str)
+if ~isstr(str)
+    error('String required');
+end
+str = str(setdiff(1:length(str), find(isspace(str) == 1)));
diff --git a/src/modules/video_coding/codecs/test_framework/test.cc b/src/modules/video_coding/codecs/test_framework/test.cc
new file mode 100644
index 0000000..de04213
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/test.cc
@@ -0,0 +1,169 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test.h"
+
+#include <cstring>
+#include <iostream>
+
+#include "testsupport/metrics/video_metrics.h"
+
+using namespace webrtc;
+
+long filesize(const char *filename); // local function defined at end of file
+
+CodecTest::CodecTest(std::string name, std::string description)
+:
+_bitRate(0),
+_inname(""),
+_outname(""),
+_encodedName(""),
+_name(name),
+_description(description)
+{
+    memset(&_inst, 0, sizeof(_inst));
+    unsigned int seed = static_cast<unsigned int>(0);
+    std::srand(seed);
+}
+
+CodecTest::CodecTest(std::string name, std::string description,
+                     WebRtc_UWord32 bitRate)
+:
+_bitRate(bitRate),
+_inname(""),
+_outname(""),
+_encodedName(""),
+_name(name),
+_description(description)
+{
+    memset(&_inst, 0, sizeof(_inst));
+    unsigned int seed = static_cast<unsigned int>(0);
+    std::srand(seed);
+}
+
+void
+CodecTest::Print()
+{
+    std::cout << _name << " completed!" << std::endl;
+    (*_log) << _name << std::endl;
+    (*_log) << _description << std::endl;
+    (*_log) << "Input file: " << _inname << std::endl;
+    (*_log) << "Output file: " << _outname << std::endl;
+    webrtc::test::QualityMetricsResult psnr;
+    webrtc::test::QualityMetricsResult ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
+                      _inst.height, &psnr);
+    I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _inst.width,
+                      _inst.height, &ssim);
+
+    (*_log) << "PSNR: " << psnr.average << std::endl;
+    std::cout << "PSNR: " << psnr.average << std::endl << std::endl;
+    (*_log) << "SSIM: " << ssim.average << std::endl;
+    std::cout << "SSIM: " << ssim.average << std::endl << std::endl;
+    (*_log) << std::endl;
+}
+
+void
+CodecTest::Setup()
+{
+    int widhei          = _inst.width*_inst.height;
+    _lengthSourceFrame  = 3*widhei/2;
+    _sourceBuffer       = new unsigned char[_lengthSourceFrame];
+}
+
+void
+CodecTest::CodecSettings(int width, int height,
+                         WebRtc_UWord32 frameRate /*=30*/,
+                         WebRtc_UWord32 bitRate /*=0*/)
+{
+    if (bitRate > 0)
+    {
+        _bitRate = bitRate;
+    }
+    else if (_bitRate == 0)
+    {
+        _bitRate = 600;
+    }
+    _inst.codecType = kVideoCodecVP8;
+    _inst.codecSpecific.VP8.feedbackModeOn = true;
+    _inst.maxFramerate = (unsigned char)frameRate;
+    _inst.startBitrate = (int)_bitRate;
+    _inst.maxBitrate = 8000;
+    _inst.width = width;
+    _inst.height = height;
+}
+
+void
+CodecTest::Teardown()
+{
+    delete [] _sourceBuffer;
+}
+
+void
+CodecTest::SetEncoder(webrtc::VideoEncoder*encoder)
+{
+    _encoder = encoder;
+}
+
+void
+CodecTest::SetDecoder(VideoDecoder*decoder)
+{
+    _decoder = decoder;
+}
+
+void
+CodecTest::SetLog(std::fstream* log)
+{
+    _log = log;
+}
+
+double CodecTest::ActualBitRate(int nFrames)
+{
+    return 8.0 * _sumEncBytes / (nFrames / _inst.maxFramerate);
+}
+
+bool CodecTest::PacketLoss(double lossRate, int /*thrown*/)
+{
+    return RandUniform() < lossRate;
+}
+
+void
+CodecTest::VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
+                                 VideoFrame &image)
+{
+  // TODO(mikhal): Use videoBuffer in lieu of TestVideoBuffer.
+  image.CopyFrame(videoBuffer.GetLength(), videoBuffer.GetBuffer());
+  image.SetWidth(videoBuffer.GetWidth());
+  image.SetHeight(videoBuffer.GetHeight());
+  image.SetTimeStamp(videoBuffer.GetTimeStamp());
+}
+void
+CodecTest::VideoEncodedBufferToEncodedImage(TestVideoEncodedBuffer& videoBuffer,
+                                            EncodedImage &image)
+{
+    image._buffer = videoBuffer.GetBuffer();
+    image._length = videoBuffer.GetLength();
+    image._size = videoBuffer.GetSize();
+    image._frameType = static_cast<VideoFrameType>(videoBuffer.GetFrameType());
+    image._timeStamp = videoBuffer.GetTimeStamp();
+    image._encodedWidth = videoBuffer.GetCaptureWidth();
+    image._encodedHeight = videoBuffer.GetCaptureHeight();
+    image._completeFrame = true;
+}
+
+long filesize(const char *filename)
+{
+    FILE *f = fopen(filename,"rb");  /* open the file in read only */
+    long size = 0;
+    if (fseek(f,0,SEEK_END)==0) /* seek was successful */
+        size = ftell(f);
+    fclose(f);
+    return size;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/test.h b/src/modules/video_coding/codecs/test_framework/test.h
new file mode 100644
index 0000000..0df6a0a
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/test.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
+
+#include "video_codec_interface.h"
+#include "video_buffer.h"
+#include <string>
+#include <fstream>
+#include <cstdlib>
+
+class CodecTest
+{
+public:
+    CodecTest(std::string name, std::string description);
+    CodecTest(std::string name, std::string description,
+              WebRtc_UWord32 bitRate);
+    virtual ~CodecTest() {};
+    virtual void Perform()=0;
+    virtual void Print();
+    void SetEncoder(webrtc::VideoEncoder *encoder);
+    void SetDecoder(webrtc::VideoDecoder *decoder);
+    void SetLog(std::fstream* log);
+
+protected:
+    virtual void Setup();
+    virtual void CodecSettings(int width,
+                               int height,
+                               WebRtc_UWord32 frameRate=30,
+                               WebRtc_UWord32 bitRate=0);
+    virtual void Teardown();
+    double ActualBitRate(int nFrames);
+    virtual bool PacketLoss(double lossRate, int /*thrown*/);
+    static double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+    static void VideoBufferToRawImage(TestVideoBuffer& videoBuffer,
+                                      webrtc::VideoFrame &image);
+    static void VideoEncodedBufferToEncodedImage(
+        TestVideoEncodedBuffer& videoBuffer,
+        webrtc::EncodedImage &image);
+
+    webrtc::VideoEncoder*   _encoder;
+    webrtc::VideoDecoder*   _decoder;
+    WebRtc_UWord32          _bitRate;
+    unsigned int            _lengthSourceFrame;
+    unsigned char*          _sourceBuffer;
+    TestVideoBuffer         _inputVideoBuffer;
+    TestVideoEncodedBuffer  _encodedVideoBuffer;
+    TestVideoBuffer         _decodedVideoBuffer;
+    webrtc::VideoCodec      _inst;
+    std::fstream*           _log;
+    std::string             _inname;
+    std::string             _outname;
+    std::string             _encodedName;
+    int                     _sumEncBytes;
+
+private:
+    std::string             _name;
+    std::string             _description;
+
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAWEWORK_TEST_H_
diff --git a/src/modules/video_coding/codecs/test_framework/test_framework.gypi b/src/modules/video_coding/codecs/test_framework/test_framework.gypi
new file mode 100644
index 0000000..67b262f
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/test_framework.gypi
@@ -0,0 +1,64 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'test_framework',
+          'type': '<(library)',
+
+          'dependencies': [
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/metrics.gyp:metrics',
+            '<(webrtc_root)/test/test.gyp:test_support',
+          ],
+
+          'include_dirs': [
+            '../interface',
+            '<(DEPTH)/testing/gtest/include',
+            '../../../../common_video/interface',
+          ],
+
+          'direct_dependent_settings': {
+            'include_dirs': [
+              '../interface',
+            ],
+          },
+
+          'sources': [
+            # header files
+            'benchmark.h',
+            'normal_async_test.h',
+            'normal_test.h',
+            'packet_loss_test.h',
+            'performance_test.h',
+            'test.h',
+            'unit_test.h',
+            'video_buffer.h',
+            'video_source.h',
+
+            # source files
+            'benchmark.cc',
+            'normal_async_test.cc',
+            'normal_test.cc',
+            'packet_loss_test.cc',
+            'performance_test.cc',
+            'test.cc',
+            'unit_test.cc',
+            'video_buffer.cc',
+            'video_source.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/modules/video_coding/codecs/test_framework/unit_test.cc b/src/modules/video_coding/codecs/test_framework/unit_test.cc
new file mode 100644
index 0000000..8184a2e
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/unit_test.cc
@@ -0,0 +1,785 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <cassert>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "tick_util.h"
+#include "unit_test.h"
+#include "video_source.h"
+
+using namespace webrtc;
+
+UnitTest::UnitTest()
+:
+CodecTest("UnitTest", "Unit test"),
+_tests(0),
+_errors(0),
+_source(NULL),
+_refFrame(NULL),
+_refEncFrame(NULL),
+_refDecFrame(NULL),
+_refEncFrameLength(0),
+_sourceFile(NULL),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+UnitTest::UnitTest(std::string name, std::string description)
+:
+CodecTest(name, description),
+_tests(0),
+_errors(0),
+_source(NULL),
+_refFrame(NULL),
+_refEncFrame(NULL),
+_refDecFrame(NULL),
+_refEncFrameLength(0),
+_sourceFile(NULL),
+_encodeCompleteCallback(NULL),
+_decodeCompleteCallback(NULL)
+{
+}
+
+UnitTest::~UnitTest()
+{
+    if (_encodeCompleteCallback) {
+        delete _encodeCompleteCallback;
+    }
+
+    if (_decodeCompleteCallback) {
+        delete _decodeCompleteCallback;
+    }
+
+    if (_source) {
+        delete _source;
+    }
+
+    if (_refFrame) {
+        delete [] _refFrame;
+    }
+
+    if (_refDecFrame) {
+        delete [] _refDecFrame;
+    }
+
+    if (_sourceBuffer) {
+        delete [] _sourceBuffer;
+    }
+
+    if (_sourceFile) {
+        fclose(_sourceFile);
+    }
+
+    if (_refEncFrame) {
+        delete [] _refEncFrame;
+    }
+}
+
+WebRtc_Word32
+UnitTestEncodeCompleteCallback::Encoded(EncodedImage& encodedImage,
+                                        const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                                        const webrtc::RTPFragmentationHeader*
+                                        fragmentation)
+{
+    _encodedVideoBuffer->VerifyAndAllocate(encodedImage._size);
+    _encodedVideoBuffer->CopyBuffer(encodedImage._size, encodedImage._buffer);
+    _encodedVideoBuffer->UpdateLength(encodedImage._length);
+    _encodedVideoBuffer->SetFrameType(encodedImage._frameType);
+    _encodedVideoBuffer->SetCaptureWidth(
+        (WebRtc_UWord16)encodedImage._encodedWidth);
+    _encodedVideoBuffer->SetCaptureHeight(
+        (WebRtc_UWord16)encodedImage._encodedHeight);
+    _encodedVideoBuffer->SetTimeStamp(encodedImage._timeStamp);
+    _encodeComplete = true;
+    _encodedFrameType = encodedImage._frameType;
+    return 0;
+}
+
+WebRtc_Word32 UnitTestDecodeCompleteCallback::Decoded(VideoFrame& image)
+{
+    _decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
+    _decodedVideoBuffer->SetWidth(image.Width());
+    _decodedVideoBuffer->SetHeight(image.Height());
+    _decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
+    _decodeComplete = true;
+    return 0;
+}
+
+bool
+UnitTestEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+VideoFrameType
+UnitTestEncodeCompleteCallback::EncodedFrameType() const
+{
+    return _encodedFrameType;
+}
+
+bool
+UnitTestDecodeCompleteCallback::DecodeComplete()
+{
+    if (_decodeComplete)
+    {
+        _decodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+WebRtc_UWord32
+UnitTest::WaitForEncodedFrame() const
+{
+    WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs)
+    {
+        if (_encodeCompleteCallback->EncodeComplete())
+        {
+            return _encodedVideoBuffer.GetLength();
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+UnitTest::WaitForDecodedFrame() const
+{
+    WebRtc_Word64 startTime = TickTime::MillisecondTimestamp();
+    while (TickTime::MillisecondTimestamp() - startTime < kMaxWaitDecTimeMs)
+    {
+        if (_decodeCompleteCallback->DecodeComplete())
+        {
+            return _decodedVideoBuffer.GetLength();
+        }
+    }
+    return 0;
+}
+
+WebRtc_UWord32
+UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
+                                   WebRtc_UWord32 /* frameRate */)
+{
+    return _encoder->SetRates(bitRate, _inst.maxFramerate);
+}
+
+void
+UnitTest::Setup()
+{
+    // Use _sourceFile as a check to prevent multiple Setup() calls.
+    if (_sourceFile != NULL)
+    {
+        return;
+    }
+
+    if (_encodeCompleteCallback == NULL)
+    {
+        _encodeCompleteCallback =
+            new UnitTestEncodeCompleteCallback(&_encodedVideoBuffer);
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        _decodeCompleteCallback =
+            new UnitTestDecodeCompleteCallback(&_decodedVideoBuffer);
+    }
+
+    _encoder->RegisterEncodeCompleteCallback(_encodeCompleteCallback);
+    _decoder->RegisterDecodeCompleteCallback(_decodeCompleteCallback);
+
+    _source = new VideoSource(webrtc::test::ProjectRootPath() +
+                              "resources/foreman_cif.yuv", kCIF);
+
+    _lengthSourceFrame = _source->GetFrameLength();
+    _refFrame = new unsigned char[_lengthSourceFrame];
+    _refDecFrame = new unsigned char[_lengthSourceFrame];
+    _sourceBuffer = new unsigned char [_lengthSourceFrame];
+    _sourceFile = fopen(_source->GetFileName().c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL);
+
+    _inst.maxFramerate = _source->GetFrameRate();
+    _bitRate = 300;
+    _inst.startBitrate = 300;
+    _inst.maxBitrate = 4000;
+    _inst.width = _source->GetWidth();
+    _inst.height = _source->GetHeight();
+    _inst.codecSpecific.VP8.denoisingOn = true;
+
+    // Get input frame.
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame, _sourceFile)
+                           == _lengthSourceFrame);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+    _inputVideoBuffer.SetWidth(_source->GetWidth());
+    _inputVideoBuffer.SetHeight(_source->GetHeight());
+    rewind(_sourceFile);
+
+    // Get a reference encoded frame.
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+
+    VideoFrame image;
+    VideoBufferToRawImage(_inputVideoBuffer, image);
+
+    // Ensures our initial parameters are valid.
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    VideoFrameType videoFrameType = kDeltaFrame;
+    _encoder->Encode(image, NULL, videoFrameType);
+    _refEncFrameLength = WaitForEncodedFrame();
+    ASSERT_TRUE(_refEncFrameLength > 0);
+    _refEncFrame = new unsigned char[_refEncFrameLength];
+    memcpy(_refEncFrame, _encodedVideoBuffer.GetBuffer(), _refEncFrameLength);
+
+    // Get a reference decoded frame.
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+
+    unsigned int frameLength = 0;
+    int i=0;
+    while (frameLength == 0)
+    {
+        if (i > 0)
+        {
+            // Insert yet another frame
+            _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+            ASSERT_TRUE(fread(_refFrame, 1, _lengthSourceFrame,
+                _sourceFile) == _lengthSourceFrame);
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+            _inputVideoBuffer.SetWidth(_source->GetWidth());
+            _inputVideoBuffer.SetHeight(_source->GetHeight());
+            VideoBufferToRawImage(_inputVideoBuffer, image);
+            _encoder->Encode(image, NULL, videoFrameType);
+            ASSERT_TRUE(WaitForEncodedFrame() > 0);
+        }
+        EncodedImage encodedImage;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        ASSERT_TRUE(_decoder->Decode(encodedImage, 0, NULL)
+                               == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = WaitForDecodedFrame();
+        _encodedVideoBuffer.Reset();
+        _encodedVideoBuffer.UpdateLength(0);
+        i++;
+    }
+    rewind(_sourceFile);
+    EXPECT_TRUE(frameLength == _lengthSourceFrame);
+    memcpy(_refDecFrame, _decodedVideoBuffer.GetBuffer(), _lengthSourceFrame);
+}
+
+void
+UnitTest::Teardown()
+{
+    // Use _sourceFile as a check to prevent multiple Teardown() calls.
+    if (_sourceFile == NULL)
+    {
+        return;
+    }
+
+    _encoder->Release();
+    _decoder->Release();
+
+    fclose(_sourceFile);
+    _sourceFile = NULL;
+    delete [] _refFrame;
+    _refFrame = NULL;
+    delete [] _refEncFrame;
+    _refEncFrame = NULL;
+    delete [] _refDecFrame;
+    _refDecFrame = NULL;
+    delete [] _sourceBuffer;
+    _sourceBuffer = NULL;
+}
+
+void
+UnitTest::Print()
+{
+}
+
+int
+UnitTest::DecodeWithoutAssert()
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    int ret = _decoder->Decode(encodedImage, 0, NULL);
+    int frameLength = WaitForDecodedFrame();
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
+}
+
+int
+UnitTest::Decode()
+{
+    EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    if (encodedImage._length == 0)
+    {
+        return WEBRTC_VIDEO_CODEC_OK;
+    }
+    int ret = _decoder->Decode(encodedImage, 0, NULL);
+    unsigned int frameLength = WaitForDecodedFrame();
+    assert(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
+        == _lengthSourceFrame));
+    EXPECT_TRUE(ret == WEBRTC_VIDEO_CODEC_OK && (frameLength == 0 || frameLength
+        == _lengthSourceFrame));
+    _encodedVideoBuffer.Reset();
+    _encodedVideoBuffer.UpdateLength(0);
+    return ret == WEBRTC_VIDEO_CODEC_OK ? frameLength : ret;
+}
+
+// Test pure virtual VideoEncoder and VideoDecoder APIs.
+void
+UnitTest::Perform()
+{
+    UnitTest::Setup();
+    int frameLength;
+    VideoFrame inputImage;
+    EncodedImage encodedImage;
+    VideoFrameType videoFrameType = kDeltaFrame;
+
+    //----- Encoder parameter tests -----
+
+    //-- Calls before InitEncode() --
+    // We want to revert the initialization done in Setup().
+    EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType)
+               == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+    //-- InitEncode() errors --
+    // Null pointer.
+    EXPECT_TRUE(_encoder->InitEncode(NULL, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    // bit rate exceeds max bit rate
+    WebRtc_Word32 tmpBitRate = _inst.startBitrate;
+    WebRtc_Word32 tmpMaxBitRate = _inst.maxBitrate;
+    _inst.startBitrate = 4000;
+    _inst.maxBitrate = 3000;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440)  ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.startBitrate = tmpBitRate;
+    _inst.maxBitrate = tmpMaxBitRate; //unspecified value
+
+    // Bad framerate.
+    _inst.maxFramerate = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    // Seems like we should allow any framerate in range [0, 255].
+    //_inst.frameRate = 100;
+    //EXPECT_TRUE(_encoder->InitEncode(&_inst, 1) == -1); // FAILS
+    _inst.maxFramerate = 30;
+
+    // Bad bitrate.
+    _inst.startBitrate = -1;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = _inst.startBitrate - 1;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = 0;
+    _inst.startBitrate = 300;
+
+    // Bad maxBitRate.
+    _inst.maxBitrate = 200;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inst.maxBitrate = 4000;
+
+    // Bad width.
+    _inst.width = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
+    _inst.width = _source->GetWidth();
+
+    // Bad height.
+    _inst.height = 0;
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) < 0);
+    _inst.height = _source->GetHeight();
+
+    // Bad number of cores.
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, -1, 1440) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+
+    //-- Encode() errors --
+
+    // inputVideoBuffer unallocated.
+    _inputVideoBuffer.Free();
+    inputImage.Free();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _refFrame);
+    _inputVideoBuffer.SetWidth(_source->GetWidth());
+    _inputVideoBuffer.SetHeight(_source->GetHeight());
+
+    //----- Encoder stress tests -----
+
+    // Vary frame rate and I-frame request.
+    VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+    for (int i = 1; i <= 60; i++)
+    {
+        VideoFrameType frameType = !(i % 2) ? kKeyFrame : kDeltaFrame;
+        EXPECT_TRUE(_encoder->Encode(inputImage, NULL, frameType) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(WaitForEncodedFrame() > 0);
+    }
+
+    // Init then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(WaitForEncodedFrame() > 0);
+
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+            _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    // Reset then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    WaitForEncodedFrame();
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    // Release then encode.
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+    EXPECT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+        WEBRTC_VIDEO_CODEC_OK);
+    WaitForEncodedFrame();
+    EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    _encoder->Encode(inputImage, NULL, videoFrameType);
+    frameLength = WaitForEncodedFrame();
+    EXPECT_TRUE(frameLength > 0);
+    EXPECT_TRUE(CheckIfBitExact(_refEncFrame, _refEncFrameLength,
+        _encodedVideoBuffer.GetBuffer(), frameLength) == true);
+
+    //----- Decoder parameter tests -----
+
+    //-- Calls before InitDecode() --
+    // We want to revert the initialization done in Setup().
+    EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    EXPECT_TRUE(_decoder->Decode(encodedImage, false, NULL) ==
+        WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+    WaitForDecodedFrame();
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+
+    //-- Decode() errors --
+    // Unallocated encodedVideoBuffer.
+    _encodedVideoBuffer.Free();
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    encodedImage._length = 10;  // Buffer NULL but length > 0
+    EXPECT_EQ(_decoder->Decode(encodedImage, false, NULL),
+              WEBRTC_VIDEO_CODEC_ERR_PARAMETER);
+    _encodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+
+    //----- Decoder stress tests -----
+    unsigned char* tmpBuf = new unsigned char[_lengthSourceFrame];
+
+    // "Random" and zero data.
+    // We either expect an error, or at the least, no output.
+    // This relies on the codec's ability to detect an erroneous bitstream.
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+    for (int i = 0; i < 100; i++)
+    {
+        ASSERT_TRUE(fread(tmpBuf, 1, _refEncFrameLength, _sourceFile)
+            == _refEncFrameLength);
+        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        int ret = _decoder->Decode(encodedImage, false, NULL);
+        EXPECT_TRUE(ret <= 0);
+        if (ret == 0)
+        {
+            EXPECT_TRUE(WaitForDecodedFrame() == 0);
+        }
+
+        memset(tmpBuf, 0, _refEncFrameLength);
+        _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, tmpBuf);
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        ret = _decoder->Decode(encodedImage, false, NULL);
+        EXPECT_TRUE(ret <= 0);
+        if (ret == 0)
+        {
+            EXPECT_TRUE(WaitForDecodedFrame() == 0);
+        }
+    }
+    rewind(_sourceFile);
+
+    _encodedVideoBuffer.UpdateLength(_refEncFrameLength);
+    _encodedVideoBuffer.CopyBuffer(_refEncFrameLength, _refEncFrame);
+
+    // Init then decode.
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+
+    // Reset then decode.
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+
+    // Decode with other size, reset, then decode with original size again
+    // to verify that decoder is reset to a "fresh" state upon Reset().
+    {
+        // Assert that input frame size is a factor of two, so that we can use
+        // quarter size below.
+        EXPECT_TRUE((_inst.width % 2 == 0) && (_inst.height % 2 == 0));
+
+        VideoCodec tempInst;
+        memcpy(&tempInst, &_inst, sizeof(VideoCodec));
+        tempInst.width /= 2;
+        tempInst.height /= 2;
+
+        // Encode reduced (quarter) frame size.
+        EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(_encoder->InitEncode(&tempInst, 1, 1440) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        VideoFrame tempInput;
+        unsigned int tmpLength = inputImage.Length() / 4;
+        tempInput.CopyFrame(tmpLength, inputImage.Buffer());
+        tempInput.SetWidth(tempInst.width);
+        tempInput.SetHeight(tempInst.height);
+        VideoFrameType videoFrameType = kDeltaFrame;
+        _encoder->Encode(tempInput, NULL, videoFrameType);
+        frameLength = WaitForEncodedFrame();
+        EXPECT_TRUE(frameLength > 0);
+        tempInput.Free();
+        // Reset then decode.
+        EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = 0;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        while (frameLength == 0)
+        {
+            _decoder->Decode(encodedImage, false, NULL);
+            frameLength = WaitForDecodedFrame();
+        }
+
+        // Encode original frame again
+        EXPECT_TRUE(_encoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+        EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        _encoder->Encode(inputImage, NULL, videoFrameType);
+        frameLength = WaitForEncodedFrame();
+        EXPECT_TRUE(frameLength > 0);
+
+        // Reset then decode original frame again.
+        EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+        frameLength = 0;
+        VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+        while (frameLength == 0)
+        {
+            _decoder->Decode(encodedImage, false, NULL);
+            frameLength = WaitForDecodedFrame();
+        }
+
+        // check that decoded frame matches with reference
+        EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+            _refDecFrame, _lengthSourceFrame) == true);
+
+    }
+
+    // Release then decode.
+    EXPECT_TRUE(_decoder->Release() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+    frameLength = 0;
+    VideoEncodedBufferToEncodedImage(_encodedVideoBuffer, encodedImage);
+    while (frameLength == 0)
+    {
+        _decoder->Decode(encodedImage, false, NULL);
+        frameLength = WaitForDecodedFrame();
+    }
+    EXPECT_TRUE(CheckIfBitExact(_decodedVideoBuffer.GetBuffer(), frameLength,
+        _refDecFrame, _lengthSourceFrame) == true);
+    _encodedVideoBuffer.UpdateLength(0);
+    _encodedVideoBuffer.Reset();
+
+    delete [] tmpBuf;
+
+    //----- Function tests -----
+    int frames = 0;
+    // Do not specify maxBitRate (as in ViE).
+    _inst.maxBitrate = 0;
+
+    //-- Timestamp propagation --
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    ASSERT_FALSE(SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK);
+
+    frames = 0;
+    int frameDelay = 0;
+    int encTimeStamp;
+    _decodedVideoBuffer.SetTimeStamp(0);
+    while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+        _lengthSourceFrame)
+    {
+        _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+        _inputVideoBuffer.SetTimeStamp(frames);
+        VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+        VideoFrameType videoFrameType = kDeltaFrame;
+        ASSERT_TRUE(_encoder->Encode(inputImage, NULL, videoFrameType) ==
+            WEBRTC_VIDEO_CODEC_OK);
+        frameLength = WaitForEncodedFrame();
+        //ASSERT_TRUE(frameLength);
+        EXPECT_TRUE(frameLength > 0);
+        encTimeStamp = _encodedVideoBuffer.GetTimeStamp();
+        EXPECT_TRUE(_inputVideoBuffer.GetTimeStamp() ==
+                static_cast<unsigned>(encTimeStamp));
+
+        frameLength = Decode();
+        if (frameLength == 0)
+        {
+            frameDelay++;
+        }
+
+        encTimeStamp -= frameDelay;
+        if (encTimeStamp < 0)
+        {
+            encTimeStamp = 0;
+        }
+        EXPECT_TRUE(_decodedVideoBuffer.GetTimeStamp() ==
+                static_cast<unsigned>(encTimeStamp));
+        frames++;
+    }
+    ASSERT_TRUE(feof(_sourceFile) != 0);
+    rewind(_sourceFile);
+
+    RateControlTests();
+    inputImage.Free();
+
+    Teardown();
+}
+
+void
+UnitTest::RateControlTests()
+{
+    int frames = 0;
+    VideoFrame inputImage;
+    WebRtc_UWord32 frameLength;
+
+    // Do not specify maxBitRate (as in ViE).
+    _inst.maxBitrate = 0;
+    //-- Verify rate control --
+    EXPECT_TRUE(_encoder->InitEncode(&_inst, 1, 1440) == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->Reset() == WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_TRUE(_decoder->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+    // add: should also be 0, and 1
+    const int bitRate[] = {30, 100, 500, 1000, 2000};
+    const int nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+
+    printf("\nRate control test\n");
+    for (int i = 0; i < nBitrates; i++)
+    {
+        _bitRate = bitRate[i];
+        int totalBytes = 0;
+        _inst.startBitrate = _bitRate;
+        _encoder->InitEncode(&_inst, 4, 1440);
+        _decoder->Reset();
+        _decoder->InitDecode(&_inst, 1);
+        frames = 0;
+
+        if (_bitRate > _inst.maxBitrate)
+        {
+            CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
+        }
+        else
+        {
+            CodecSpecific_SetBitrate(_bitRate, _inst.maxFramerate);
+        }
+
+        while (fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+            _lengthSourceFrame)
+        {
+            _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+            _inputVideoBuffer.SetTimeStamp(_inputVideoBuffer.GetTimeStamp() +
+                static_cast<WebRtc_UWord32>(9e4 /
+                    static_cast<float>(_inst.maxFramerate)));
+            VideoBufferToRawImage(_inputVideoBuffer, inputImage);
+            VideoFrameType videoFrameType = kDeltaFrame;
+            ASSERT_EQ(_encoder->Encode(inputImage, NULL, videoFrameType),
+                      WEBRTC_VIDEO_CODEC_OK);
+            frameLength = WaitForEncodedFrame();
+            ASSERT_GE(frameLength, 0u);
+            totalBytes += frameLength;
+            frames++;
+
+            _encodedVideoBuffer.UpdateLength(0);
+            _encodedVideoBuffer.Reset();
+        }
+        WebRtc_UWord32 actualBitrate =
+            (totalBytes  / frames * _inst.maxFramerate * 8)/1000;
+        printf("Target bitrate: %d kbps, actual bitrate: %d kbps\n", _bitRate,
+            actualBitrate);
+        // Test for close match over reasonable range.
+        if (_bitRate >= 100 && _bitRate <= 2500)
+        {
+            EXPECT_TRUE(abs(WebRtc_Word32(actualBitrate - _bitRate)) <
+                0.1 * _bitRate); // for VP8
+        }
+        ASSERT_TRUE(feof(_sourceFile) != 0);
+        rewind(_sourceFile);
+    }
+    inputImage.Free();
+}
+
+bool
+UnitTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                          const void* ptrB, unsigned int bLengthBytes)
+{
+    if (aLengthBytes != bLengthBytes)
+    {
+        return false;
+    }
+
+    return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/unit_test.h b/src/modules/video_coding/codecs/test_framework/unit_test.h
new file mode 100644
index 0000000..0a4fee1
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/unit_test.h
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+
+#include "test.h"
+#include "event_wrapper.h"
+
+// Disable "conditional expression is constant" warnings on the perfectly
+// acceptable
+// do { ... } while (0) constructions below.
+// Refer to http://stackoverflow.com/questions/1946445/
+//   is-there-better-way-to-write-do-while0-construct-to-avoid-compiler-warnings
+// for some discussion of the issue.
+#ifdef _WIN32
+#pragma warning(disable : 4127)
+#endif
+
+class VideoSource;
+class UnitTestEncodeCompleteCallback;
+class UnitTestDecodeCompleteCallback;
+
+class UnitTest : public CodecTest
+{
+public:
+    UnitTest();
+    virtual ~UnitTest();
+    virtual void Perform();
+    virtual void Print();
+
+protected:
+    UnitTest(std::string name, std::string description);
+    virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
+        WebRtc_UWord32 bitRate,
+        WebRtc_UWord32 /* frameRate */);
+    virtual void Setup();
+    virtual void Teardown();
+    virtual void RateControlTests();
+    virtual int Decode();
+    virtual int DecodeWithoutAssert();
+    virtual int SetCodecSpecificParameters() {return 0;};
+
+    virtual bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
+                                 const void *ptrB, unsigned int bLengthBytes);
+
+    WebRtc_UWord32 WaitForEncodedFrame() const;
+    WebRtc_UWord32 WaitForDecodedFrame() const;
+
+    int _tests;
+    int _errors;
+
+    VideoSource* _source;
+    unsigned char* _refFrame;
+    unsigned char* _refEncFrame;
+    unsigned char* _refDecFrame;
+    unsigned int _refEncFrameLength;
+    FILE* _sourceFile;
+
+    UnitTestEncodeCompleteCallback* _encodeCompleteCallback;
+    UnitTestDecodeCompleteCallback* _decodeCompleteCallback;
+    enum { kMaxWaitEncTimeMs = 100 };
+    enum { kMaxWaitDecTimeMs = 25 };
+};
+
+class UnitTestEncodeCompleteCallback : public webrtc::EncodedImageCallback
+{
+public:
+    UnitTestEncodeCompleteCallback(TestVideoEncodedBuffer* buffer,
+                                   WebRtc_UWord32 decoderSpecificSize = 0,
+                                   void* decoderSpecificInfo = NULL) :
+      _encodedVideoBuffer(buffer),
+      _encodeComplete(false) {}
+    WebRtc_Word32 Encoded(webrtc::EncodedImage& encodedImage,
+                          const webrtc::CodecSpecificInfo* codecSpecificInfo,
+                          const webrtc::RTPFragmentationHeader*
+                          fragmentation = NULL);
+    bool EncodeComplete();
+    // Note that this only makes sense if an encode has been completed
+    webrtc::VideoFrameType EncodedFrameType() const;
+private:
+    TestVideoEncodedBuffer* _encodedVideoBuffer;
+    bool _encodeComplete;
+    webrtc::VideoFrameType _encodedFrameType;
+};
+
+class UnitTestDecodeCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    UnitTestDecodeCompleteCallback(TestVideoBuffer* buffer) :
+        _decodedVideoBuffer(buffer), _decodeComplete(false) {}
+    WebRtc_Word32 Decoded(webrtc::VideoFrame& image);
+    bool DecodeComplete();
+private:
+    TestVideoBuffer* _decodedVideoBuffer;
+    bool _decodeComplete;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_UNIT_TEST_H_
+
diff --git a/src/modules/video_coding/codecs/test_framework/video_buffer.cc b/src/modules/video_coding/codecs/test_framework/video_buffer.cc
new file mode 100644
index 0000000..3958e90
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/video_buffer.cc
@@ -0,0 +1,319 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+#include <string.h>
+#include "video_buffer.h"
+
+using namespace webrtc;
+
+TestVideoBuffer::TestVideoBuffer():
+_buffer(0),
+_bufferSize(0),
+_bufferLength(0),
+_startOffset(0),
+_timeStamp(0),
+_width(0),
+_height(0)
+{
+   //
+}
+
+
+TestVideoBuffer::~TestVideoBuffer()
+{
+    _timeStamp = 0;
+    _startOffset = 0;
+    _bufferLength = 0;
+    _bufferSize = 0;
+
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = 0;
+    }
+}
+
+TestVideoBuffer::TestVideoBuffer(const TestVideoBuffer& rhs)
+:
+_buffer(0),
+_bufferSize(rhs._bufferSize),
+_bufferLength(rhs._bufferLength),
+_startOffset(rhs._startOffset),
+_timeStamp(rhs._timeStamp),
+_width(rhs._width),
+_height(rhs._height)
+{
+    // make sure that our buffer is big enough
+    _buffer = new unsigned char[_bufferSize];
+
+    // only copy required length
+    memcpy(_buffer + _startOffset, rhs._buffer, _bufferLength);  // GetBuffer() includes _startOffset
+}
+
+void TestVideoBuffer::SetTimeStamp(unsigned int timeStamp)
+{
+    _timeStamp = timeStamp;
+}
+
+unsigned int
+TestVideoBuffer::GetWidth() const
+{
+    return _width;
+}
+
+unsigned int
+TestVideoBuffer::GetHeight() const
+{
+    return _height;
+}
+
+void
+TestVideoBuffer::SetWidth(unsigned int width)
+{
+    _width = width;
+}
+
+void
+TestVideoBuffer::SetHeight(unsigned int height)
+{
+    _height = height;
+}
+
+
+void TestVideoBuffer::Free()
+{
+    _timeStamp = 0;
+    _startOffset = 0;
+    _bufferLength = 0;
+    _bufferSize = 0;
+    _height = 0;
+    _width = 0;
+
+    if(_buffer)
+    {
+        delete [] _buffer;
+        _buffer = 0;
+    }
+}
+
+void TestVideoBuffer::VerifyAndAllocate(unsigned int minimumSize)
+{
+    if(minimumSize > _bufferSize)
+    {
+        // make sure that our buffer is big enough
+        unsigned char * newBufferBuffer = new unsigned char[minimumSize];
+        if(_buffer)
+        {
+            // copy the old data
+            memcpy(newBufferBuffer, _buffer, _bufferSize);
+            delete [] _buffer;
+        }
+        _buffer = newBufferBuffer;
+        _bufferSize = minimumSize;
+    }
+}
+
+int TestVideoBuffer::SetOffset(unsigned int length)
+{
+    if (length > _bufferSize ||
+        length > _bufferLength)
+    {
+        return -1;
+    }
+
+    unsigned int oldOffset = _startOffset;
+
+    if(oldOffset > length)
+    {
+        unsigned int newLength = _bufferLength + (oldOffset-length);// increase by the diff
+        assert(newLength <= _bufferSize);
+        _bufferLength = newLength;
+    }
+    if(oldOffset < length)
+    {
+        if(_bufferLength > (length-oldOffset))
+        {
+            _bufferLength -= (length-oldOffset); // decrease by the diff
+        }
+    }
+    _startOffset = length; // update
+
+    return 0;
+}
+
+void TestVideoBuffer::UpdateLength(unsigned int newLength)
+{
+    assert(newLength +_startOffset <= _bufferSize);
+    _bufferLength = newLength;
+}
+
+void TestVideoBuffer::CopyBuffer(unsigned int length, const unsigned char* buffer)
+{
+    assert(length+_startOffset <= _bufferSize);
+    memcpy(_buffer+_startOffset, buffer, length);
+    _bufferLength = length;
+}
+
+void TestVideoBuffer::CopyBuffer(TestVideoBuffer& fromVideoBuffer)
+{
+    assert(fromVideoBuffer.GetLength() + fromVideoBuffer.GetStartOffset() <= _bufferSize);
+    assert(fromVideoBuffer.GetSize() <= _bufferSize);
+
+    _bufferLength = fromVideoBuffer.GetLength();
+    _startOffset = fromVideoBuffer.GetStartOffset();
+    _timeStamp = fromVideoBuffer.GetTimeStamp();
+    _height = fromVideoBuffer.GetHeight();
+    _width = fromVideoBuffer.GetWidth();
+
+    // only copy required length
+    memcpy(_buffer+_startOffset, fromVideoBuffer.GetBuffer(), fromVideoBuffer.GetLength());  // GetBuffer() includes _startOffset
+
+}
+
+void TestVideoBuffer::CopyPointer(const TestVideoBuffer& fromVideoBuffer)
+{
+    _bufferSize = fromVideoBuffer.GetSize();
+    _bufferLength = fromVideoBuffer.GetLength();
+    _startOffset = fromVideoBuffer.GetStartOffset();
+    _timeStamp = fromVideoBuffer.GetTimeStamp();
+    _height = fromVideoBuffer.GetHeight();
+    _width = fromVideoBuffer.GetWidth();
+
+    _buffer = fromVideoBuffer.GetBuffer();
+}
+
+void TestVideoBuffer::ClearPointer()
+{
+    _buffer = NULL;
+}
+
+void TestVideoBuffer::SwapBuffers(TestVideoBuffer& videoBuffer)
+{
+    unsigned char*  tempBuffer = _buffer;
+    unsigned int    tempSize = _bufferSize;
+    unsigned int    tempLength =_bufferLength;
+    unsigned int    tempOffset = _startOffset;
+    unsigned int    tempTime = _timeStamp;
+    unsigned int    tempHeight = _height;
+    unsigned int    tempWidth = _width;
+
+    _buffer = videoBuffer.GetBuffer();
+    _bufferSize = videoBuffer.GetSize();
+    _bufferLength = videoBuffer.GetLength();
+    _startOffset = videoBuffer.GetStartOffset();
+    _timeStamp =  videoBuffer.GetTimeStamp();
+    _height = videoBuffer.GetHeight();
+    _width = videoBuffer.GetWidth();
+
+
+    videoBuffer.Set(tempBuffer, tempSize, tempLength, tempOffset, tempTime);
+    videoBuffer.SetHeight(tempHeight);
+    videoBuffer.SetWidth(tempWidth);
+}
+
+void TestVideoBuffer::Set(unsigned char* tempBuffer,unsigned int tempSize,unsigned int tempLength, unsigned int tempOffset,unsigned int timeStamp)
+{
+    _buffer = tempBuffer;
+    _bufferSize = tempSize;
+    _bufferLength = tempLength;
+    _startOffset = tempOffset;
+    _timeStamp = timeStamp;
+}
+
+unsigned char* TestVideoBuffer::GetBuffer() const
+{
+    return _buffer+_startOffset;
+}
+
+unsigned int TestVideoBuffer::GetStartOffset() const
+{
+    return _startOffset;
+}
+
+unsigned int TestVideoBuffer::GetSize() const
+{
+    return _bufferSize;
+}
+
+unsigned int TestVideoBuffer::GetLength() const
+{
+    return _bufferLength;
+}
+
+unsigned int TestVideoBuffer::GetTimeStamp() const
+{
+    return _timeStamp;
+}
+
+/**
+*   TestVideoEncodedBuffer
+*
+*/
+
+TestVideoEncodedBuffer::TestVideoEncodedBuffer() :
+    _captureWidth(0),
+    _captureHeight(0),
+    _frameRate(-1)
+{
+    _frameType = kDeltaFrame;
+}
+
+TestVideoEncodedBuffer::~TestVideoEncodedBuffer()
+{
+}
+
+void TestVideoEncodedBuffer::SetCaptureWidth(unsigned short width)
+{
+    _captureWidth = width;
+}
+
+void TestVideoEncodedBuffer::SetCaptureHeight(unsigned short height)
+{
+    _captureHeight = height;
+}
+
+unsigned short TestVideoEncodedBuffer::GetCaptureWidth()
+{
+    return _captureWidth;
+}
+
+unsigned short TestVideoEncodedBuffer::GetCaptureHeight()
+{
+    return _captureHeight;
+}
+
+VideoFrameType TestVideoEncodedBuffer::GetFrameType()
+{
+    return _frameType;
+}
+
+void TestVideoEncodedBuffer::SetFrameType(VideoFrameType frametype)
+{
+    _frameType = frametype;
+}
+
+void TestVideoEncodedBuffer::Reset()
+{
+    _captureWidth = 0;
+    _captureHeight = 0;
+    _frameRate = -1;
+    _frameType = kDeltaFrame;
+}
+
+void  TestVideoEncodedBuffer::SetFrameRate(float frameRate)
+{
+    _frameRate = frameRate;
+}
+
+float  TestVideoEncodedBuffer::GetFrameRate()
+{
+    return _frameRate;
+}
diff --git a/src/modules/video_coding/codecs/test_framework/video_buffer.h b/src/modules/video_coding/codecs/test_framework/video_buffer.h
new file mode 100644
index 0000000..824440e
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/video_buffer.h
@@ -0,0 +1,122 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
+
+#include "typedefs.h"
+#include "video_image.h"
+
+class TestVideoBuffer
+{
+public:
+    TestVideoBuffer();
+
+    virtual ~TestVideoBuffer();
+
+    TestVideoBuffer(const TestVideoBuffer& rhs);
+
+    /**
+    * Verifies that current allocated buffer size is larger than or equal to the input size.
+    * If the current buffer size is smaller, a new allocation is made and the old buffer data is copied to the new buffer.
+    */
+    void VerifyAndAllocate(unsigned int minimumSize);
+
+    void UpdateLength(unsigned int newLength);
+
+    void SwapBuffers(TestVideoBuffer& videoBuffer);
+
+    void CopyBuffer(unsigned int length, const unsigned char* fromBuffer);
+
+    void CopyBuffer(TestVideoBuffer& fromVideoBuffer);
+
+    // Use with care, and remember to call ClearPointer() when done.
+    void CopyPointer(const TestVideoBuffer& fromVideoBuffer);
+
+    void ClearPointer();
+
+    int  SetOffset(unsigned int length);            // Sets offset to beginning of frame in buffer
+
+    void Free();                                    // Deletes frame buffer and resets members to zero
+
+    void SetTimeStamp(unsigned int timeStamp);      // Sets timestamp of frame (90kHz)
+
+    /**
+    *   Gets pointer to frame buffer
+    */
+    unsigned char* GetBuffer() const;
+
+    /**
+    *   Gets allocated buffer size
+    */
+    unsigned int	GetSize() const;
+
+    /**
+    *   Gets length of frame
+    */
+    unsigned int	GetLength() const;
+
+    /**
+    *   Gets timestamp of frame (90kHz)
+    */
+    unsigned int	GetTimeStamp() const;
+
+    unsigned int	GetWidth() const;
+    unsigned int	GetHeight() const;
+
+    void            SetWidth(unsigned int width);
+    void            SetHeight(unsigned int height);
+
+private:
+    TestVideoBuffer& operator=(const TestVideoBuffer& inBuffer);
+
+private:
+    void Set(unsigned char* buffer,unsigned int size,unsigned int length,unsigned int offset, unsigned int timeStamp);
+    unsigned int GetStartOffset() const;
+
+    unsigned char*		  _buffer;          // Pointer to frame buffer
+    unsigned int		  _bufferSize;      // Allocated buffer size
+    unsigned int		  _bufferLength;    // Length (in bytes) of frame
+    unsigned int		  _startOffset;     // Offset (in bytes) to beginning of frame in buffer
+    unsigned int		  _timeStamp;       // Timestamp of frame (90kHz)
+    unsigned int          _width;
+    unsigned int          _height;
+};
+
+class TestVideoEncodedBuffer: public TestVideoBuffer
+{
+public:
+    TestVideoEncodedBuffer();
+    ~TestVideoEncodedBuffer();
+
+    void SetCaptureWidth(unsigned short width);
+    void SetCaptureHeight(unsigned short height);
+    unsigned short GetCaptureWidth();
+    unsigned short GetCaptureHeight();
+
+    webrtc::VideoFrameType GetFrameType();
+    void SetFrameType(webrtc::VideoFrameType frametype);
+
+    void Reset();
+
+    void SetFrameRate(float frameRate);
+    float GetFrameRate();
+
+private:
+    TestVideoEncodedBuffer& operator=(const TestVideoEncodedBuffer& inBuffer);
+
+private:
+    unsigned short			   _captureWidth;
+    unsigned short			   _captureHeight;
+    webrtc::VideoFrameType     _frameType;
+    float                      _frameRate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_BUFFER_H_
diff --git a/src/modules/video_coding/codecs/test_framework/video_source.cc b/src/modules/video_coding/codecs/test_framework/video_source.cc
new file mode 100644
index 0000000..a0299c4
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/video_source.cc
@@ -0,0 +1,425 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_source.h"
+
+#include <stdio.h>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+VideoSource::VideoSource()
+:
+_fileName(webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"),
+_width(352),
+_height(288),
+_type(webrtc::kI420),
+_frameRate(30)
+{
+}
+
+VideoSource::VideoSource(std::string fileName, VideoSize size,
+    int frameRate /*= 30*/, webrtc::VideoType type /*=  webrtc::kI420*/)
+:
+_fileName(fileName),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(size != kUndefined && size != kNumberOfVideoSizes);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+    if (GetWidthHeight(size, _width, _height) != 0) {
+        assert(false);
+    }
+}
+
+VideoSource::VideoSource(std::string fileName, int width, int height,
+    int frameRate /*= 30*/,  webrtc::VideoType type /*=  webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(width),
+_height(height),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(width > 0);
+    assert(height > 0);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+}
+
+VideoSize
+VideoSource::GetSize() const
+{
+    return GetSize(_width, _height);
+}
+
+VideoSize
+VideoSource::GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height)
+{
+    if(width == 128 && height == 96)
+    {
+        return kSQCIF;
+    }else if(width == 160 && height == 120)
+    {
+        return kQQVGA;
+    }else if(width == 176 && height == 144)
+    {
+        return kQCIF;
+    }else if(width == 320 && height == 240)
+    {
+        return kQVGA;
+    }else if(width == 352 && height == 288)
+    {
+        return kCIF;
+    }else if(width == 640 && height == 480)
+    {
+        return kVGA;
+    }else if(width == 720 && height == 480)
+    {
+        return kNTSC;
+    }else if(width == 704 && height == 576)
+    {
+        return k4CIF;
+    }else if(width == 800 && height == 600)
+    {
+        return kSVGA;
+    }else if(width == 960 && height == 720)
+    {
+        return kHD;
+    }else if(width == 1024 && height == 768)
+    {
+        return kXGA;
+    }else if(width == 1440 && height == 1080)
+    {
+        return kFullHD;
+    }else if(width == 400 && height == 240)
+    {
+        return kWQVGA;
+    }else if(width == 800 && height == 480)
+    {
+        return kWVGA;
+    }else if(width == 1280 && height == 720)
+    {
+        return kWHD;
+    }else if(width == 1920 && height == 1080)
+    {
+        return kWFullHD;
+    }
+    return kUndefined;
+}
+
+unsigned int
+VideoSource::GetFrameLength() const
+{
+    return webrtc::CalcBufferSize(_type, _width, _height);
+}
+
+const char*
+VideoSource::GetMySizeString() const
+{
+    return VideoSource::GetSizeString(GetSize());
+}
+
+const char*
+VideoSource::GetSizeString(VideoSize size)
+{
+    switch (size)
+    {
+        case kSQCIF:
+            return "SQCIF";
+        case kQQVGA:
+            return "QQVGA";
+        case kQCIF:
+            return "QCIF";
+        case kQVGA:
+            return "QVGA";
+        case kCIF:
+            return "CIF";
+        case kVGA:
+            return "VGA";
+        case kNTSC:
+            return "NTSC";
+        case k4CIF:
+            return "4CIF";
+        case kSVGA:
+            return "SVGA";
+        case kHD:
+            return "HD";
+        case kXGA:
+            return "XGA";
+        case kFullHD:
+            return "Full_HD";
+        case kWQVGA:
+            return "WQVGA";
+        case kWHD:
+            return "WHD";
+        case kWFullHD:
+            return "WFull_HD";
+        default:
+            return "Undefined";
+    }
+}
+
+std::string
+VideoSource::GetFilePath() const
+{
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        return ".";
+    }
+
+    return _fileName.substr(0, slashPos);
+}
+
+std::string
+VideoSource::GetName() const
+{
+    // Remove path.
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        slashPos = 0;
+    }
+    else
+    {
+        slashPos++;
+    }
+
+    // Remove extension and underscored suffix if it exists.
+    return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
+        _fileName.find_last_of(".")) - slashPos);
+}
+
+void
+VideoSource::Convert(const VideoSource &target, bool force /* = false */) const
+{
+    // Ensure target rate is less than or equal to source
+    // (i.e. we are only temporally downsampling).
+    ASSERT_TRUE(target.GetFrameRate() <= _frameRate);
+    // Only supports YUV420 currently.
+    ASSERT_TRUE(_type == webrtc::kI420 && target.GetType() == webrtc::kI420);
+    if (!force && (FileExists(target.GetFileName().c_str()) ||
+        (target.GetWidth() == _width && target.GetHeight() == _height && target.GetFrameRate() == _frameRate)))
+    {
+        // Assume that the filename uniquely defines the content.
+        // If the file already exists, it is the correct file.
+        return;
+    }
+    FILE *inFile = NULL;
+    FILE *outFile = NULL;
+
+    inFile = fopen(_fileName.c_str(), "rb");
+    ASSERT_TRUE(inFile != NULL);
+
+    outFile = fopen(target.GetFileName().c_str(), "wb");
+    ASSERT_TRUE(outFile != NULL);
+
+    FrameDropper fd;
+    fd.SetFrameRate(target.GetFrameRate(), _frameRate);
+
+    const size_t lengthOutFrame = webrtc::CalcBufferSize(target.GetType(),
+        target.GetWidth(), target.GetHeight());
+    ASSERT_TRUE(lengthOutFrame > 0);
+    unsigned char *outFrame = new unsigned char[lengthOutFrame];
+
+    const size_t lengthInFrame = webrtc::CalcBufferSize(_type, _width, _height);
+    ASSERT_TRUE(lengthInFrame > 0);
+    unsigned char *inFrame = new unsigned char[lengthInFrame];
+
+    while (fread(inFrame, 1, lengthInFrame, inFile) == lengthInFrame)
+    {
+        if (!fd.DropFrame())
+        {
+            ASSERT_TRUE(target.GetWidth() == _width &&
+                   target.GetHeight() == _height);
+            // Add video interpolator here!
+            if (fwrite(outFrame, 1, lengthOutFrame,
+                       outFile) !=  lengthOutFrame) {
+              return;
+            }
+        }
+    }
+
+    delete inFrame;
+    delete outFrame;
+    fclose(inFile);
+    fclose(outFile);
+}
+
+bool VideoSource::FileExists(const char* fileName)
+{
+    FILE* fp = NULL;
+    fp = fopen(fileName, "rb");
+    if(fp != NULL)
+    {
+        fclose(fp);
+        return true;
+    }
+    return false;
+}
+
+
+int
+VideoSource::GetWidthHeight( VideoSize size, int & width, int& height)
+{
+    switch(size)
+    {
+    case kSQCIF:
+        width = 128;
+        height = 96;
+        return 0;
+    case kQQVGA:
+        width = 160;
+        height = 120;
+        return 0;
+    case kQCIF:
+        width = 176;
+        height = 144;
+        return 0;
+    case kCGA:
+        width = 320;
+        height = 200;
+        return 0;
+    case kQVGA:
+        width = 320;
+        height = 240;
+        return 0;
+    case kSIF:
+        width = 352;
+        height = 240;
+        return 0;
+    case kWQVGA:
+        width = 400;
+        height = 240;
+        return 0;
+    case kCIF:
+        width = 352;
+        height = 288;
+        return 0;
+    case kW288p:
+        width = 512;
+        height = 288;
+        return 0;
+    case k448p:
+        width = 576;
+        height = 448;
+        return 0;
+    case kVGA:
+        width = 640;
+        height = 480;
+        return 0;
+    case k432p:
+        width = 720;
+        height = 432;
+        return 0;
+    case kW432p:
+        width = 768;
+        height = 432;
+        return 0;
+    case k4SIF:
+        width = 704;
+        height = 480;
+        return 0;
+    case kW448p:
+        width = 768;
+        height = 448;
+        return 0;
+    case kNTSC:
+        width = 720;
+        height = 480;
+        return 0;
+    case kFW448p:
+        width = 800;
+        height = 448;
+        return 0;
+    case kWVGA:
+        width = 800;
+        height = 480;
+        return 0;
+    case k4CIF:
+        width = 704;
+        height = 576;
+        return 0;
+    case kSVGA:
+        width = 800;
+        height = 600;
+        return 0;
+    case kW544p:
+        width = 960;
+        height = 544;
+        return 0;
+    case kW576p:
+        width = 1024;
+        height = 576;
+        return 0;
+    case kHD:
+        width = 960;
+        height = 720;
+        return 0;
+    case kXGA:
+        width = 1024;
+        height = 768;
+        return 0;
+    case kFullHD:
+        width = 1440;
+        height = 1080;
+        return 0;
+    case kWHD:
+        width = 1280;
+        height = 720;
+        return 0;
+    case kWFullHD:
+        width = 1920;
+        height = 1080;
+        return 0;
+    default:
+        return -1;
+    }
+}
+
+FrameDropper::FrameDropper()
+:
+_dropsBetweenRenders(0),
+_frameCounter(0)
+{
+}
+
+bool
+FrameDropper::DropFrame()
+{
+    _frameCounter++;
+    if (_frameCounter > _dropsBetweenRenders)
+    {
+        _frameCounter = 0;
+        return false;
+    }
+    return true;
+}
+
+unsigned int
+FrameDropper::DropsBetweenRenders()
+{
+    return _dropsBetweenRenders;
+}
+
+void
+FrameDropper::SetFrameRate(double frameRate, double maxFrameRate)
+{
+    if (frameRate >= 1.0)
+    {
+        _dropsBetweenRenders = static_cast<unsigned int>(maxFrameRate / frameRate + 0.5) - 1;
+    }
+    else
+    {
+        _dropsBetweenRenders = 0;
+    }
+}
diff --git a/src/modules/video_coding/codecs/test_framework/video_source.h b/src/modules/video_coding/codecs/test_framework/video_source.h
new file mode 100644
index 0000000..782fff1
--- /dev/null
+++ b/src/modules/video_coding/codecs/test_framework/video_source.h
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+
+#include <string>
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+enum VideoSize
+    {
+        kUndefined,
+        kSQCIF,     // 128*96       = 12 288
+        kQQVGA,     // 160*120      = 19 200
+        kQCIF,      // 176*144      = 25 344
+        kCGA,       // 320*200      = 64 000
+        kQVGA,      // 320*240      = 76 800
+        kSIF,       // 352*240      = 84 480
+        kWQVGA,     // 400*240      = 96 000
+        kCIF,       // 352*288      = 101 376
+        kW288p,     // 512*288      = 147 456 (WCIF)
+        k448p,      // 576*448      = 281 088
+        kVGA,       // 640*480      = 307 200
+        k432p,      // 720*432      = 311 040
+        kW432p,     // 768*432      = 331 776
+        k4SIF,      // 704*480      = 337 920
+        kW448p,     // 768*448      = 344 064
+        kNTSC,		// 720*480      = 345 600
+        kFW448p,    // 800*448      = 358 400
+        kWVGA,      // 800*480      = 384 000
+        k4CIF,      // 704�576      = 405 504
+        kSVGA,      // 800*600      = 480 000
+        kW544p,     // 960*544      = 522 240
+        kW576p,     // 1024*576     = 589 824 (W4CIF)
+        kHD,        // 960*720      = 691 200
+        kXGA,       // 1024*768     = 786 432
+        kWHD,       // 1280*720     = 921 600
+        kFullHD,    // 1440*1080    = 1 555 200
+        kWFullHD,   // 1920*1080    = 2 073 600
+
+        kNumberOfVideoSizes
+    };
+
+class VideoSource
+{
+public:
+    VideoSource();
+    VideoSource(std::string fileName, VideoSize size, int frameRate = 30,
+        webrtc::VideoType type = webrtc::kI420);
+    VideoSource(std::string fileName, int width, int height, int frameRate = 30,
+                webrtc::VideoType type = webrtc::kI420);
+
+    std::string GetFileName() const { return _fileName; }
+    int GetWidth() const { return _width; }
+    int GetHeight() const { return _height; }
+    webrtc::VideoType GetType() const { return _type; }
+    int GetFrameRate() const { return _frameRate; }
+
+    // Returns the file path without a trailing slash.
+    std::string GetFilePath() const;
+
+    // Returns the filename with the path (including the leading slash) removed.
+    std::string GetName() const;
+
+    VideoSize GetSize() const;
+    static VideoSize GetSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
+    unsigned int GetFrameLength() const;
+
+    // Returns a human-readable size string.
+    static const char* GetSizeString(VideoSize size);
+    const char* GetMySizeString() const;
+
+    // Opens the video source, converting and writing to the specified target.
+    // If force is true, the conversion will be done even if the target file
+    // already exists.
+    void Convert(const VideoSource& target, bool force = false) const;
+    static bool FileExists(const char* fileName);
+private:
+    static int GetWidthHeight( VideoSize size, int& width, int& height);
+    std::string _fileName;
+    int _width;
+    int _height;
+    webrtc::VideoType _type;
+    int _frameRate;
+};
+
+class FrameDropper
+{
+public:
+    FrameDropper();
+    bool DropFrame();
+    unsigned int DropsBetweenRenders();
+    void SetFrameRate(double frameRate, double maxFrameRate);
+
+private:
+    unsigned int _dropsBetweenRenders;
+    unsigned int _frameCounter;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_TEST_FRAMEWORK_VIDEO_SOURCE_H_
+
diff --git a/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi b/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi
new file mode 100644
index 0000000..02ac465
--- /dev/null
+++ b/src/modules/video_coding/codecs/tools/video_codecs_tools.gypi
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'video_quality_measurement',
+          'type': 'executable',
+          'dependencies': [
+            'video_codecs_test_framework',
+            'webrtc_video_coding',
+            '<(DEPTH)/third_party/google-gflags/google-gflags.gyp:google-gflags',
+            '<(webrtc_root)/test/metrics.gyp:metrics',
+            '<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
+           ],
+           'sources': [
+             'video_quality_measurement.cc',
+           ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/modules/video_coding/codecs/tools/video_quality_measurement.cc b/src/modules/video_coding/codecs/tools/video_quality_measurement.cc
new file mode 100644
index 0000000..6ea13d1
--- /dev/null
+++ b/src/modules/video_coding/codecs/tools/video_quality_measurement.cc
@@ -0,0 +1,526 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdarg.h>
+#include <sys/stat.h>  // To check for directory existence.
+
+#include <cassert>
+#include <cstdio>
+#include <ctime>
+
+#ifndef S_ISDIR  // Not defined in stat.h on Windows.
+#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#endif
+
+#include "common_types.h"
+#include "google/gflags.h"
+#include "modules/video_coding/codecs/test/packet_manipulator.h"
+#include "modules/video_coding/codecs/test/stats.h"
+#include "modules/video_coding/codecs/test/videoprocessor.h"
+#include "modules/video_coding/codecs/vp8/include/vp8.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/trace.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "testsupport/packet_reader.h"
+
+DEFINE_string(test_name, "Quality test", "The name of the test to run. ");
+DEFINE_string(test_description, "", "A more detailed description about what "
+              "the current test is about.");
+DEFINE_string(input_filename, "", "Input file. "
+              "The source video file to be encoded and decoded. Must be in "
+              ".yuv format");
+DEFINE_int32(width, -1, "Width in pixels of the frames in the input file.");
+DEFINE_int32(height, -1, "Height in pixels of the frames in the input file.");
+DEFINE_int32(framerate, 30, "Frame rate of the input file, in FPS "
+             "(frames-per-second). ");
+DEFINE_string(output_dir, ".", "Output directory. "
+              "The directory where the output file will be put. Must already "
+              "exist.");
+DEFINE_bool(use_single_core, false, "Force using a single core. If set to "
+            "true, only one core will be used for processing. Using a single "
+            "core is necessary to get a deterministic behavior for the"
+            "encoded frames - using multiple cores will produce different "
+            "encoded frames since multiple cores are competing to consume the "
+            "byte budget for each frame in parallel. If set to false, "
+            "the maximum detected number of cores will be used. ");
+DEFINE_bool(disable_fixed_random_seed , false, "Set this flag to disable the"
+            "usage of a fixed random seed for the random generator used "
+            "for packet loss. Disabling this will cause consecutive runs "
+            "loose packets at different locations, which is bad for "
+            "reproducibility.");
+DEFINE_string(output_filename, "", "Output file. "
+              "The name of the output video file resulting of the processing "
+              "of the source file. By default this is the same name as the "
+              "input file with '_out' appended before the extension.");
+DEFINE_int32(bitrate, 500, "Bit rate in kilobits/second.");
+DEFINE_int32(keyframe_interval, 0, "Forces a keyframe every Nth frame. "
+             "0 means the encoder decides when to insert keyframes.  Note that "
+             "the encoder may create a keyframe in other locations in addition "
+             "to the interval that is set using this parameter.");
+DEFINE_int32(temporal_layers, 0, "The number of temporal layers to use "
+             "(VP8 specific codec setting). Must be 0-4.");
+DEFINE_int32(packet_size, 1500, "Simulated network packet size in bytes (MTU). "
+             "Used for packet loss simulation.");
+DEFINE_int32(max_payload_size, 1440, "Max payload size in bytes for the "
+             "encoder.");
+DEFINE_string(packet_loss_mode, "uniform", "Packet loss mode. Two different "
+              "packet loss models are supported: uniform or burst. This "
+              "setting has no effect unless packet_loss_rate is >0. ");
+DEFINE_double(packet_loss_probability, 0.0, "Packet loss probability. A value "
+              "between 0.0 and 1.0 that defines the probability of a packet "
+              "being lost. 0.1 means 10% and so on.");
+DEFINE_int32(packet_loss_burst_length, 1, "Packet loss burst length. Defines "
+             "how many packets will be lost in a burst when a packet has been "
+             "decided to be lost. Must be >=1.");
+DEFINE_bool(csv, false, "CSV output. Enabling this will output all frame "
+            "statistics at the end of execution. Recommended to run combined "
+            "with --noverbose to avoid mixing output.");
+DEFINE_bool(python, false, "Python output. Enabling this will output all frame "
+            "statistics as a Python script at the end of execution. "
+            "Recommended to run combine with --noverbose to avoid mixing "
+            "output.");
+DEFINE_bool(verbose, true, "Verbose mode. Prints a lot of debugging info. "
+            "Suitable for tracking progress but not for capturing output. "
+            "Disable with --noverbose flag.");
+
+// Custom log method that only prints if the verbose flag is given.
+// Supports all the standard printf parameters and formatting (just forwarded).
+int Log(const char *format, ...) {
+  int result = 0;
+  if (FLAGS_verbose) {
+    va_list args;
+    va_start(args, format);
+    result = vprintf(format, args);
+    va_end(args);
+  }
+  return result;
+}
+
+// Validates the arguments given as command line flags and fills in the
+// TestConfig struct with all configurations needed for video processing.
+// Returns 0 if everything is OK, otherwise an exit code.
+int HandleCommandLineFlags(webrtc::test::TestConfig* config) {
+  // Validate the mandatory flags:
+  if (FLAGS_input_filename == "" || FLAGS_width == -1 || FLAGS_height == -1) {
+    printf("%s\n", google::ProgramUsage());
+    return 1;
+  }
+  config->name = FLAGS_test_name;
+  config->description = FLAGS_test_description;
+
+  // Verify the input file exists and is readable.
+  FILE* test_file;
+  test_file = fopen(FLAGS_input_filename.c_str(), "rb");
+  if (test_file == NULL) {
+    fprintf(stderr, "Cannot read the specified input file: %s\n",
+            FLAGS_input_filename.c_str());
+    return 2;
+  }
+  fclose(test_file);
+  config->input_filename = FLAGS_input_filename;
+
+  // Verify the output dir exists.
+  struct stat dir_info;
+  if (!(stat(FLAGS_output_dir.c_str(), &dir_info) == 0 &&
+      S_ISDIR(dir_info.st_mode))) {
+    fprintf(stderr, "Cannot find output directory: %s\n",
+              FLAGS_output_dir.c_str());
+    return 3;
+  }
+  config->output_dir = FLAGS_output_dir;
+
+  // Manufacture an output filename if none was given.
+  if (FLAGS_output_filename == "") {
+    // Cut out the filename without extension from the given input file
+    // (which may include a path)
+    int startIndex = FLAGS_input_filename.find_last_of("/") + 1;
+    if (startIndex == 0) {
+      startIndex = 0;
+    }
+    FLAGS_output_filename =
+        FLAGS_input_filename.substr(startIndex,
+                                    FLAGS_input_filename.find_last_of(".")
+                                    - startIndex) + "_out.yuv";
+  }
+
+  // Verify output file can be written.
+  if (FLAGS_output_dir == ".") {
+    config->output_filename = FLAGS_output_filename;
+  } else {
+    config->output_filename = FLAGS_output_dir + "/"+ FLAGS_output_filename;
+  }
+  test_file = fopen(config->output_filename.c_str(), "wb");
+  if (test_file == NULL) {
+    fprintf(stderr, "Cannot write output file: %s\n",
+            config->output_filename.c_str());
+    return 4;
+  }
+  fclose(test_file);
+
+  // Check single core flag.
+  config->use_single_core = FLAGS_use_single_core;
+
+  // Get codec specific configuration.
+  webrtc::VideoCodingModule::Codec(webrtc::kVideoCodecVP8,
+                                   config->codec_settings);
+
+  // Check the temporal layers.
+  if (FLAGS_temporal_layers < 0 ||
+      FLAGS_temporal_layers > webrtc::kMaxTemporalStreams) {
+    fprintf(stderr, "Temporal layers number must be 0-4, was: %d\n",
+            FLAGS_temporal_layers);
+    return 13;
+  }
+  config->codec_settings->codecSpecific.VP8.numberOfTemporalLayers =
+      FLAGS_temporal_layers;
+
+  // Check the bit rate.
+  if (FLAGS_bitrate <= 0) {
+    fprintf(stderr, "Bit rate must be >0 kbps, was: %d\n", FLAGS_bitrate);
+    return 5;
+  }
+  config->codec_settings->startBitrate = FLAGS_bitrate;
+
+  // Check the keyframe interval.
+  if (FLAGS_keyframe_interval < 0) {
+    fprintf(stderr, "Keyframe interval must be >=0, was: %d\n",
+            FLAGS_keyframe_interval);
+    return 6;
+  }
+  config->keyframe_interval = FLAGS_keyframe_interval;
+
+  // Check packet size and max payload size.
+  if (FLAGS_packet_size <= 0) {
+    fprintf(stderr, "Packet size must be >0 bytes, was: %d\n",
+            FLAGS_packet_size);
+    return 7;
+  }
+  config->networking_config.packet_size_in_bytes = FLAGS_packet_size;
+
+  if (FLAGS_max_payload_size <= 0) {
+    fprintf(stderr, "Max payload size must be >0 bytes, was: %d\n",
+            FLAGS_max_payload_size);
+    return 8;
+  }
+  config->networking_config.max_payload_size_in_bytes =
+      FLAGS_max_payload_size;
+
+  // Check the width and height
+  if (FLAGS_width <= 0 || FLAGS_height <= 0) {
+    fprintf(stderr, "Width and height must be >0.");
+    return 9;
+  }
+  config->codec_settings->width = FLAGS_width;
+  config->codec_settings->height = FLAGS_height;
+  config->codec_settings->maxFramerate = FLAGS_framerate;
+
+  // Calculate the size of each frame to read (according to YUV spec).
+  config->frame_length_in_bytes =
+      3 * config->codec_settings->width * config->codec_settings->height / 2;
+
+  // Check packet loss settings
+  if (FLAGS_packet_loss_mode != "uniform" &&
+      FLAGS_packet_loss_mode != "burst") {
+    fprintf(stderr, "Unsupported packet loss mode, must be 'uniform' or "
+            "'burst'\n.");
+    return 10;
+  }
+  config->networking_config.packet_loss_mode = webrtc::test::kUniform;
+  if (FLAGS_packet_loss_mode == "burst") {
+    config->networking_config.packet_loss_mode =  webrtc::test::kBurst;
+  }
+
+  if (FLAGS_packet_loss_probability < 0.0 ||
+      FLAGS_packet_loss_probability > 1.0) {
+    fprintf(stderr, "Invalid packet loss probability. Must be 0.0 - 1.0, "
+            "was: %f\n", FLAGS_packet_loss_probability);
+    return 11;
+  }
+  config->networking_config.packet_loss_probability =
+      FLAGS_packet_loss_probability;
+
+  if (FLAGS_packet_loss_burst_length < 1) {
+    fprintf(stderr, "Invalid packet loss burst length, must be >=1, "
+            "was: %d\n", FLAGS_packet_loss_burst_length);
+    return 12;
+  }
+  config->networking_config.packet_loss_burst_length =
+      FLAGS_packet_loss_burst_length;
+  config->verbose = FLAGS_verbose;
+  return 0;
+}
+
+void CalculateSsimVideoMetrics(webrtc::test::TestConfig* config,
+                               webrtc::test::QualityMetricsResult* result) {
+  Log("Calculating SSIM...\n");
+  I420SSIMFromFiles(config->input_filename.c_str(),
+                    config->output_filename.c_str(),
+                    config->codec_settings->width,
+                    config->codec_settings->height, result);
+  Log("  Average: %3.2f\n", result->average);
+  Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
+  Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
+}
+
+void CalculatePsnrVideoMetrics(webrtc::test::TestConfig* config,
+                               webrtc::test::QualityMetricsResult* result) {
+  Log("Calculating PSNR...\n");
+  I420PSNRFromFiles(config->input_filename.c_str(),
+                    config->output_filename.c_str(),
+                    config->codec_settings->width,
+                    config->codec_settings->height, result);
+  Log("  Average: %3.2f\n", result->average);
+  Log("  Min    : %3.2f (frame %d)\n", result->min, result->min_frame_number);
+  Log("  Max    : %3.2f (frame %d)\n", result->max, result->max_frame_number);
+}
+
+void PrintConfigurationSummary(const webrtc::test::TestConfig& config) {
+  Log("Quality test with parameters:\n");
+  Log("  Test name        : %s\n", config.name.c_str());
+  Log("  Description      : %s\n", config.description.c_str());
+  Log("  Input filename   : %s\n", config.input_filename.c_str());
+  Log("  Output directory : %s\n", config.output_dir.c_str());
+  Log("  Output filename  : %s\n", config.output_filename.c_str());
+  Log("  Frame length       : %d bytes\n", config.frame_length_in_bytes);
+  Log("  Packet size      : %d bytes\n",
+      config.networking_config.packet_size_in_bytes);
+  Log("  Max payload size : %d bytes\n",
+      config.networking_config.max_payload_size_in_bytes);
+  Log("  Packet loss:\n");
+  Log("    Mode           : %s\n",
+      PacketLossModeToStr(config.networking_config.packet_loss_mode));
+  Log("    Probability    : %2.1f\n",
+      config.networking_config.packet_loss_probability);
+  Log("    Burst length   : %d packets\n",
+      config.networking_config.packet_loss_burst_length);
+}
+
+void PrintCsvOutput(const webrtc::test::Stats& stats,
+                    const webrtc::test::QualityMetricsResult& ssim_result,
+                    const webrtc::test::QualityMetricsResult& psnr_result) {
+  Log("\nCSV output (recommended to run with --noverbose to skip the "
+              "above output)\n");
+  printf("frame_number encoding_successful decoding_successful "
+      "encode_return_code decode_return_code "
+      "encode_time_in_us decode_time_in_us "
+      "bit_rate_in_kbps encoded_frame_length_in_bytes frame_type "
+      "packets_dropped total_packets "
+      "ssim psnr\n");
+
+  for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
+    const webrtc::test::FrameStatistic& f = stats.stats_[i];
+    const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
+    const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
+    printf("%4d, %d, %d, %2d, %2d, %6d, %6d, %5d, %7d, %d, %2d, %2d, "
+           "%5.3f, %5.2f\n",
+           f.frame_number,
+           f.encoding_successful,
+           f.decoding_successful,
+           f.encode_return_code,
+           f.decode_return_code,
+           f.encode_time_in_us,
+           f.decode_time_in_us,
+           f.bit_rate_in_kbps,
+           f.encoded_frame_length_in_bytes,
+           f.frame_type,
+           f.packets_dropped,
+           f.total_packets,
+           ssim.value,
+           psnr.value);
+  }
+}
+
+void PrintPythonOutput(const webrtc::test::TestConfig& config,
+                       const webrtc::test::Stats& stats,
+                       const webrtc::test::QualityMetricsResult& ssim_result,
+                       const webrtc::test::QualityMetricsResult& psnr_result) {
+  Log("\nPython output (recommended to run with --noverbose to skip the "
+               "above output)\n");
+  printf("test_configuration = ["
+         "{'name': 'name',                      'value': '%s'},\n"
+         "{'name': 'description',               'value': '%s'},\n"
+         "{'name': 'test_number',               'value': '%d'},\n"
+         "{'name': 'input_filename',            'value': '%s'},\n"
+         "{'name': 'output_filename',           'value': '%s'},\n"
+         "{'name': 'output_dir',                'value': '%s'},\n"
+         "{'name': 'packet_size_in_bytes',      'value': '%d'},\n"
+         "{'name': 'max_payload_size_in_bytes', 'value': '%d'},\n"
+         "{'name': 'packet_loss_mode',          'value': '%s'},\n"
+         "{'name': 'packet_loss_probability',   'value': '%f'},\n"
+         "{'name': 'packet_loss_burst_length',  'value': '%d'},\n"
+         "{'name': 'exclude_frame_types',       'value': '%s'},\n"
+         "{'name': 'frame_length_in_bytes',     'value': '%d'},\n"
+         "{'name': 'use_single_core',           'value': '%s'},\n"
+         "{'name': 'keyframe_interval;',        'value': '%d'},\n"
+         "{'name': 'video_codec_type',          'value': '%s'},\n"
+         "{'name': 'width',                     'value': '%d'},\n"
+         "{'name': 'height',                    'value': '%d'},\n"
+         "{'name': 'bit_rate_in_kbps',          'value': '%d'},\n"
+         "]\n",
+         config.name.c_str(),
+         config.description.c_str(),
+         config.test_number,
+         config.input_filename.c_str(),
+         config.output_filename.c_str(),
+         config.output_dir.c_str(),
+         config.networking_config.packet_size_in_bytes,
+         config.networking_config.max_payload_size_in_bytes,
+         PacketLossModeToStr(config.networking_config.packet_loss_mode),
+         config.networking_config.packet_loss_probability,
+         config.networking_config.packet_loss_burst_length,
+         ExcludeFrameTypesToStr(config.exclude_frame_types),
+         config.frame_length_in_bytes,
+         config.use_single_core ? "True " : "False",
+         config.keyframe_interval,
+         webrtc::test::VideoCodecTypeToStr(config.codec_settings->codecType),
+         config.codec_settings->width,
+         config.codec_settings->height,
+         config.codec_settings->startBitrate);
+  printf("frame_data_types = {"
+         "'frame_number': ('number', 'Frame number'),\n"
+         "'encoding_successful': ('boolean', 'Encoding successful?'),\n"
+         "'decoding_successful': ('boolean', 'Decoding successful?'),\n"
+         "'encode_time': ('number', 'Encode time (us)'),\n"
+         "'decode_time': ('number', 'Decode time (us)'),\n"
+         "'encode_return_code': ('number', 'Encode return code'),\n"
+         "'decode_return_code': ('number', 'Decode return code'),\n"
+         "'bit_rate': ('number', 'Bit rate (kbps)'),\n"
+         "'encoded_frame_length': "
+         "('number', 'Encoded frame length (bytes)'),\n"
+         "'frame_type': ('string', 'Frame type'),\n"
+         "'packets_dropped': ('number', 'Packets dropped'),\n"
+         "'total_packets': ('number', 'Total packets'),\n"
+         "'ssim': ('number', 'SSIM'),\n"
+         "'psnr': ('number', 'PSNR (dB)'),\n"
+         "}\n");
+  printf("frame_data = [");
+  for (unsigned int i = 0; i < stats.stats_.size(); ++i) {
+    const webrtc::test::FrameStatistic& f = stats.stats_[i];
+    const webrtc::test::FrameResult& ssim = ssim_result.frames[i];
+    const webrtc::test::FrameResult& psnr = psnr_result.frames[i];
+    printf("{'frame_number': %d, "
+           "'encoding_successful': %s, 'decoding_successful': %s, "
+           "'encode_time': %d, 'decode_time': %d, "
+           "'encode_return_code': %d, 'decode_return_code': %d, "
+           "'bit_rate': %d, 'encoded_frame_length': %d, 'frame_type': %s, "
+           "'packets_dropped': %d, 'total_packets': %d, "
+           "'ssim': %f, 'psnr': %f},\n",
+           f.frame_number,
+           f.encoding_successful ? "True " : "False",
+           f.decoding_successful ? "True " : "False",
+           f.encode_time_in_us,
+           f.decode_time_in_us,
+           f.encode_return_code,
+           f.decode_return_code,
+           f.bit_rate_in_kbps,
+           f.encoded_frame_length_in_bytes,
+           f.frame_type == webrtc::kDeltaFrame ? "'Delta'" : "'Other'",
+           f.packets_dropped,
+           f.total_packets,
+           ssim.value,
+           psnr.value);
+  }
+  printf("]\n");
+}
+
+// Runs a quality measurement on the input file supplied to the program.
+// The input file must be in YUV format.
+int main(int argc, char* argv[]) {
+  std::string program_name = argv[0];
+  std::string usage = "Quality test application for video comparisons.\n"
+    "Run " + program_name + " --helpshort for usage.\n"
+    "Example usage:\n" + program_name +
+    " --input_filename=filename.yuv --width=352 --height=288\n";
+  google::SetUsageMessage(usage);
+
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  // Create TestConfig and codec settings struct.
+  webrtc::test::TestConfig config;
+  webrtc::VideoCodec codec_settings;
+  config.codec_settings = &codec_settings;
+
+  int return_code = HandleCommandLineFlags(&config);
+  // Exit if an invalid argument is supplied.
+  if (return_code != 0) {
+    return return_code;
+  }
+
+  PrintConfigurationSummary(config);
+
+  webrtc::VP8Encoder* encoder = webrtc::VP8Encoder::Create();
+  webrtc::VP8Decoder* decoder = webrtc::VP8Decoder::Create();
+  webrtc::test::Stats stats;
+  webrtc::test::FrameReaderImpl frame_reader(config.input_filename,
+                                             config.frame_length_in_bytes);
+  webrtc::test::FrameWriterImpl frame_writer(config.output_filename,
+                                             config.frame_length_in_bytes);
+  frame_reader.Init();
+  frame_writer.Init();
+  webrtc::test::PacketReader packet_reader;
+
+  webrtc::test::PacketManipulatorImpl packet_manipulator(
+      &packet_reader, config.networking_config, config.verbose);
+  // By default the packet manipulator is seeded with a fixed random.
+  // If disabled we must generate a new seed.
+  if (FLAGS_disable_fixed_random_seed) {
+    packet_manipulator.InitializeRandomSeed(time(NULL));
+  }
+  webrtc::test::VideoProcessor* processor =
+      new webrtc::test::VideoProcessorImpl(encoder, decoder,
+                                           &frame_reader,
+                                           &frame_writer,
+                                           &packet_manipulator,
+                                           config, &stats);
+  processor->Init();
+
+  int frame_number = 0;
+  while (processor->ProcessFrame(frame_number)) {
+    if (frame_number % 80 == 0) {
+      Log("\n");  // make the output a bit nicer.
+    }
+    Log(".");
+    frame_number++;
+  }
+  Log("\n");
+  Log("Processed %d frames\n", frame_number);
+
+  // Release encoder and decoder to make sure they have finished processing.
+  encoder->Release();
+  decoder->Release();
+
+  // Verify statistics are correct:
+  assert(frame_number == static_cast<int>(stats.stats_.size()));
+
+  // Close the files before we start using them for SSIM/PSNR calculations.
+  frame_reader.Close();
+  frame_writer.Close();
+
+  stats.PrintSummary();
+
+  webrtc::test::QualityMetricsResult ssim_result;
+  CalculateSsimVideoMetrics(&config, &ssim_result);
+  webrtc::test::QualityMetricsResult psnr_result;
+  CalculatePsnrVideoMetrics(&config, &psnr_result);
+
+  if (FLAGS_csv) {
+    PrintCsvOutput(stats, ssim_result, psnr_result);
+  }
+  if (FLAGS_python) {
+    PrintPythonOutput(config, stats, ssim_result, psnr_result);
+  }
+  delete processor;
+  delete encoder;
+  delete decoder;
+  Log("Quality test finished!");
+  return 0;
+}
diff --git a/src/modules/video_coding/codecs/vp8/include/vp8.h b/src/modules/video_coding/codecs/vp8/include/vp8.h
new file mode 100644
index 0000000..e6416f5
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/include/vp8.h
@@ -0,0 +1,240 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ * WEBRTC VP8 wrapper interface
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
+
+#include "video_codec_interface.h"
+
+// VPX forward declaration
+typedef struct vpx_codec_ctx vpx_codec_ctx_t;
+typedef struct vpx_codec_ctx vpx_dec_ctx_t;
+typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
+typedef struct vpx_image vpx_image_t;
+typedef struct vpx_ref_frame vpx_ref_frame_t;
+struct vpx_codec_cx_pkt;
+
+namespace webrtc
+{
+class TemporalLayers;
+class ReferencePictureSelection;
+
+class VP8Encoder : public VideoEncoder {
+ public:
+  static VP8Encoder* Create();
+
+  virtual ~VP8Encoder();
+
+  // Free encoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int Release();
+
+  // Initialize the encoder with the information from the codecSettings
+  //
+  // Input:
+  //          - codec_settings    : Codec settings
+  //          - number_of_cores   : Number of cores available for the encoder
+  //          - max_payload_size  : The maximum size each payload is allowed
+  //                                to have. Usually MTU - overhead.
+  //
+  // Return value                 : Set bit rate if OK
+  //                                <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  //                                  WEBRTC_VIDEO_CODEC_ERR_SIZE
+  //                                  WEBRTC_VIDEO_CODEC_LEVEL_EXCEEDED
+  //                                  WEBRTC_VIDEO_CODEC_MEMORY
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int InitEncode(const VideoCodec* codec_settings,
+                         int number_of_cores,
+                         uint32_t max_payload_size);
+
+  // Encode an I420 image (as a part of a video stream). The encoded image
+  // will be returned to the user through the encode complete callback.
+  //
+  // Input:
+  //          - input_image       : Image to be encoded
+  //          - frame_types       : Frame type to be generated by the encoder.
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+  //                                <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  //                                  WEBRTC_VIDEO_CODEC_MEMORY
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  //                                  WEBRTC_VIDEO_CODEC_TIMEOUT
+
+  virtual int Encode(const VideoFrame& input_image,
+                     const CodecSpecificInfo* codec_specific_info,
+                     const VideoFrameType frame_type);
+
+  // Register an encode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles encoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterEncodeCompleteCallback(EncodedImageCallback* callback);
+
+  // Inform the encoder of the new packet loss rate and the round-trip time of
+  // the network.
+  //
+  //          - packet_loss : Fraction lost
+  //                          (loss rate in percent = 100 * packetLoss / 255)
+  //          - rtt         : Round-trip time in milliseconds
+  // Return value           : WEBRTC_VIDEO_CODEC_OK if OK
+  //                          <0 - Errors: WEBRTC_VIDEO_CODEC_ERROR
+  //
+  virtual int SetChannelParameters(uint32_t packet_loss, int rtt);
+
+  // Inform the encoder about the new target bit rate.
+  //
+  //          - new_bitrate_kbit : New target bit rate
+  //          - frame_rate       : The target frame rate
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int SetRates(uint32_t new_bitrate_kbit, uint32_t frame_rate);
+
+ private:
+  VP8Encoder();
+
+  // Call encoder initialize function and set control settings.
+  int InitAndSetControlSettings(const VideoCodec* inst);
+
+  // Update frame size for codec.
+  int UpdateCodecFrameSize(WebRtc_UWord32 input_image_width,
+                           WebRtc_UWord32 input_image_height);
+
+  void PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
+                             const vpx_codec_cx_pkt& pkt);
+
+  int GetEncodedFrame(const VideoFrame& input_image);
+
+  int GetEncodedPartitions(const VideoFrame& input_image);
+
+  // Determine maximum target for Intra frames
+  //
+  // Input:
+  //    - optimal_buffer_size : Optimal buffer size
+  // Return Value             : Max target size for Intra frames represented as
+  //                            percentage of the per frame bandwidth
+  uint32_t MaxIntraTarget(uint32_t optimal_buffer_size);
+
+  EncodedImage encoded_image_;
+  EncodedImageCallback* encoded_complete_callback_;
+  VideoCodec codec_;
+  bool inited_;
+  int64_t timestamp_;
+  uint16_t picture_id_;
+  bool feedback_mode_;
+  int cpu_speed_;
+  uint32_t rc_max_intra_target_;
+  int token_partitions_;
+  ReferencePictureSelection* rps_;
+  TemporalLayers* temporal_layers_;
+  vpx_codec_ctx_t* encoder_;
+  vpx_codec_enc_cfg_t* config_;
+  vpx_image_t* raw_;
+};  // end of VP8Encoder class
+
+
+class VP8Decoder : public VideoDecoder {
+ public:
+  static VP8Decoder* Create();
+
+  virtual ~VP8Decoder();
+
+  // Initialize the decoder.
+  //
+  // Return value         :  WEBRTC_VIDEO_CODEC_OK.
+  //                        <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int InitDecode(const VideoCodec* inst, int number_of_cores);
+
+  // Decode encoded image (as a part of a video stream). The decoded image
+  // will be returned to the user through the decode complete callback.
+  //
+  // Input:
+  //          - input_image         : Encoded image to be decoded
+  //          - missing_frames      : True if one or more frames have been lost
+  //                                  since the previous decode call.
+  //          - fragmentation       : Specifies the start and length of each VP8
+  //                                  partition.
+  //          - codec_specific_info : pointer to specific codec data
+  //          - render_time_ms      : Render time in Ms
+  //
+  // Return value                 : WEBRTC_VIDEO_CODEC_OK if OK
+  //                                <0 - Errors:
+  //                                      WEBRTC_VIDEO_CODEC_ERROR
+  //                                      WEBRTC_VIDEO_CODEC_ERR_PARAMETER
+  virtual int Decode(const EncodedImage& input_image,
+                     bool missing_frames,
+                     const RTPFragmentationHeader* fragmentation,
+                     const CodecSpecificInfo* codec_specific_info,
+                     int64_t /*render_time_ms*/);
+
+  // Register a decode complete callback object.
+  //
+  // Input:
+  //          - callback         : Callback object which handles decoded images.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK, < 0 otherwise.
+  virtual int RegisterDecodeCompleteCallback(DecodedImageCallback* callback);
+
+  // Free decoder memory.
+  //
+  // Return value                : WEBRTC_VIDEO_CODEC_OK if OK
+  //                               <0 - Errors:
+  //                                      WEBRTC_VIDEO_CODEC_ERROR
+  virtual int Release();
+
+  // Reset decoder state and prepare for a new call.
+  //
+  // Return value         : WEBRTC_VIDEO_CODEC_OK.
+  //                        <0 - Errors:
+  //                                  WEBRTC_VIDEO_CODEC_UNINITIALIZED
+  //                                  WEBRTC_VIDEO_CODEC_ERROR
+  virtual int Reset();
+
+  // Create a copy of the codec and its internal state.
+  //
+  // Return value                : A copy of the instance if OK, NULL otherwise.
+  virtual VideoDecoder* Copy();
+
+ private:
+  VP8Decoder();
+
+  // Copy reference image from this _decoder to the _decoder in copyTo. Set
+  // which frame type to copy in _refFrame->frame_type before the call to
+  // this function.
+  int CopyReference(VP8Decoder* copy);
+
+  int DecodePartitions(const EncodedImage& input_image,
+                       const RTPFragmentationHeader* fragmentation);
+
+  int ReturnFrame(const vpx_image_t* img, uint32_t timeStamp);
+
+  VideoFrame decoded_image_;
+  DecodedImageCallback* decode_complete_callback_;
+  bool inited_;
+  bool feedback_mode_;
+  vpx_dec_ctx_t* decoder_;
+  VideoCodec codec_;
+  EncodedImage last_keyframe_;
+  int image_format_;
+  vpx_ref_frame_t* ref_frame_;
+  int propagation_cnt_;
+  bool latest_keyframe_complete_;
+  bool mfqe_enabled_;
+};  // end of VP8Decoder class
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_H_
diff --git a/src/modules/video_coding/codecs/vp8/include/vp8_common_types.h b/src/modules/video_coding/codecs/vp8/include/vp8_common_types.h
new file mode 100644
index 0000000..6f347cd
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/include/vp8_common_types.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+// Ratio allocation between temporal streams:
+// Values as required for the VP8 codec (accumulating).
+static const float
+  kVp8LayerRateAlloction[kMaxTemporalStreams][kMaxTemporalStreams] = {
+    {1.0f, 0, 0, 0},  // 1 layer
+    {0.6f, 1.0f , 0 , 0},  // 2 layers {60%, 40%}
+    {0.4f, 0.6f , 1.0f, 0},  // 3 layers {40%, 20%, 40%}
+    {0.25f, 0.4f, 0.6f, 1.0f}  // 4 layers {25%, 15%, 20%, 40%}
+};
+
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_COMMON_TYPES_H_
diff --git a/src/modules/video_coding/codecs/vp8/reference_picture_selection.cc b/src/modules/video_coding/codecs/vp8/reference_picture_selection.cc
new file mode 100644
index 0000000..3ae6f19
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/reference_picture_selection.cc
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "reference_picture_selection.h"
+
+#include "typedefs.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+ReferencePictureSelection::ReferencePictureSelection()
+    : kRttConfidence(1.33),
+      update_golden_next_(true),
+      established_golden_(false),
+      received_ack_(false),
+      last_sent_ref_picture_id_(0),
+      last_sent_ref_update_time_(0),
+      established_ref_picture_id_(0),
+      last_refresh_time_(0),
+      rtt_(0) {
+}
+
+void ReferencePictureSelection::Init() {
+  update_golden_next_ = true;
+  established_golden_ = false;
+  received_ack_ = false;
+  last_sent_ref_picture_id_ = 0;
+  last_sent_ref_update_time_ = 0;
+  established_ref_picture_id_ = 0;
+  last_refresh_time_ = 0;
+  rtt_ = 0;
+}
+
+void ReferencePictureSelection::ReceivedRPSI(int rpsi_picture_id) {
+  // Assume RPSI is signaled with 14 bits.
+  if ((rpsi_picture_id & 0x3fff) == (last_sent_ref_picture_id_ & 0x3fff)) {
+    // Remote peer has received our last reference frame, switch frame type.
+    received_ack_ = true;
+    established_golden_ = update_golden_next_;
+    update_golden_next_ = !update_golden_next_;
+    established_ref_picture_id_ = last_sent_ref_picture_id_;
+  }
+}
+
+bool ReferencePictureSelection::ReceivedSLI(uint32_t now_ts) {
+  bool send_refresh = false;
+  // Don't send a refresh more than once per round-trip time.
+  // This is to avoid too frequent refreshes, since the receiver
+  // will signal an SLI for every corrupt frame.
+  if (TimestampDiff(now_ts, last_refresh_time_) > rtt_) {
+    send_refresh = true;
+    last_refresh_time_ = now_ts;
+  }
+  return send_refresh;
+}
+
+int ReferencePictureSelection::EncodeFlags(int picture_id, bool send_refresh,
+                                           uint32_t now_ts) {
+  int flags = 0;
+  // We can't refresh the decoder until we have established the key frame.
+  if (send_refresh && received_ack_) {
+    flags |= VP8_EFLAG_NO_REF_LAST;  // Don't reference the last frame
+    if (established_golden_)
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    else
+      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame
+  }
+
+  // Make sure we don't update the reference frames too often. We must wait long
+  // enough for an RPSI to arrive after the decoder decoded the reference frame.
+  // Ideally that should happen after one round-trip time.
+  // Add a margin defined by |kRttConfidence|.
+  uint32_t update_interval = kRttConfidence * rtt_;
+  if (update_interval < kMinUpdateInterval)
+    update_interval = kMinUpdateInterval;
+  // Don't send reference frame updates until we have an established reference.
+  if (TimestampDiff(now_ts, last_sent_ref_update_time_) > update_interval &&
+      received_ack_) {
+    flags |= VP8_EFLAG_NO_REF_LAST;  // Don't reference the last frame.
+    if (update_golden_next_) {
+      flags |= VP8_EFLAG_FORCE_GF;  // Update the golden reference.
+      flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update alt-ref.
+      flags |= VP8_EFLAG_NO_REF_GF;  // Don't reference the golden frame.
+    } else {
+      flags |= VP8_EFLAG_FORCE_ARF;  // Update the alt-ref reference.
+      flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    }
+    last_sent_ref_picture_id_ = picture_id;
+    last_sent_ref_update_time_ = now_ts;
+  } else {
+    // No update of golden or alt-ref. We can therefore freely reference the
+    // established reference frame and the last frame.
+    if (established_golden_)
+      flags |= VP8_EFLAG_NO_REF_ARF;  // Don't reference the alt-ref frame.
+    else
+      flags |= VP8_EFLAG_NO_REF_GF;   // Don't reference the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_GF;  // Don't update the golden frame.
+    flags |= VP8_EFLAG_NO_UPD_ARF;  // Don't update the alt-ref frame.
+  }
+  return flags;
+}
+
+void ReferencePictureSelection::EncodedKeyFrame(int picture_id) {
+  last_sent_ref_picture_id_ = picture_id;
+  received_ack_ = false;
+}
+
+void ReferencePictureSelection::SetRtt(int rtt) {
+  // Convert from milliseconds to timestamp frequency.
+  rtt_ = 90 * rtt;
+}
+
+uint32_t ReferencePictureSelection::TimestampDiff(uint32_t new_ts,
+                                                  uint32_t old_ts) {
+  if (old_ts > new_ts) {
+    // Assuming this is a wrap, doing a compensated subtraction.
+    return (new_ts + (static_cast<int64_t>(1) << 32)) - old_ts;
+  }
+  return new_ts - old_ts;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/vp8/reference_picture_selection.h b/src/modules/video_coding/codecs/vp8/reference_picture_selection.h
new file mode 100644
index 0000000..59e5940
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/reference_picture_selection.h
@@ -0,0 +1,78 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file defines classes for doing reference picture selection, primarily
+ * with VP8.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+class ReferencePictureSelection {
+ public:
+  ReferencePictureSelection();
+  void Init();
+
+  // Report a received reference picture selection indication. This will
+  // introduce a new established reference if the received RPSI isn't too late.
+  void ReceivedRPSI(int rpsi_picture_id);
+
+  // Report a received slice loss indication. Returns true if a refresh frame
+  // must be sent to the receiver, which is accomplished by only predicting
+  // from the established reference.
+  // |now_ts| is the RTP timestamp corresponding to the current time. Typically
+  // the capture timestamp of the frame currently being processed.
+  // Returns true if it's time to encode a decoder refresh, otherwise false.
+  bool ReceivedSLI(uint32_t now_ts);
+
+  // Returns the recommended VP8 encode flags needed. May refresh the decoder
+  // and/or update the reference buffers.
+  // |picture_id| picture id of the frame to be encoded.
+  // |send_refresh| should be set to true if a decoder refresh should be
+  // encoded, otherwise false.
+  // |now_ts| is the RTP timestamp corresponding to the current time. Typically
+  // the capture timestamp of the frame currently being processed.
+  // Returns the flags to be given to the libvpx encoder when encoding the next
+  // frame.
+  int EncodeFlags(int picture_id, bool send_refresh, uint32_t now_ts);
+
+  // Notify the RPS that the frame with picture id |picture_id| was encoded as
+  // a key frame, effectively updating all reference buffers.
+  void EncodedKeyFrame(int picture_id);
+
+  // Set the round-trip time between the sender and the receiver to |rtt|
+  // milliseconds.
+  void SetRtt(int rtt);
+
+ private:
+  static uint32_t TimestampDiff(uint32_t new_ts, uint32_t old_ts);
+
+  // The minimum time between reference frame updates.
+  enum { kMinUpdateInterval = 90 * 10 };  // Timestamp frequency
+  const double kRttConfidence;
+
+  bool update_golden_next_;
+  bool established_golden_;
+  bool received_ack_;
+  int last_sent_ref_picture_id_;
+  uint32_t last_sent_ref_update_time_;
+  int established_ref_picture_id_;
+  uint32_t last_refresh_time_;
+  uint32_t rtt_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_REFERENCE_PICTURE_SELECTION_H_
diff --git a/src/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc b/src/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
new file mode 100644
index 0000000..cdac76c
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/reference_picture_selection_unittest.cc
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+#include "reference_picture_selection.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+using webrtc::ReferencePictureSelection;
+
+// The minimum time between reference frame updates. Should match the values
+// set in reference_picture_selection.h
+enum { kMinUpdateInterval = 10 };
+// The minimum time between decoder refreshes through restricted prediction.
+// Should match the values set in reference_picture_selection.h
+enum { kRtt = 10 };
+
+enum {
+  kNoPropagationGolden    = VP8_EFLAG_NO_REF_ARF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_UPD_ARF,
+  kNoPropagationAltRef    = VP8_EFLAG_NO_REF_GF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_UPD_ARF,
+  kPropagateGolden        = VP8_EFLAG_FORCE_GF |
+                            VP8_EFLAG_NO_UPD_ARF |
+                            VP8_EFLAG_NO_REF_GF |
+                            VP8_EFLAG_NO_REF_LAST,
+  kPropagateAltRef        = VP8_EFLAG_FORCE_ARF |
+                            VP8_EFLAG_NO_UPD_GF |
+                            VP8_EFLAG_NO_REF_ARF |
+                            VP8_EFLAG_NO_REF_LAST,
+  kRefreshFromGolden      = VP8_EFLAG_NO_REF_LAST |
+                            VP8_EFLAG_NO_REF_ARF,
+  kRefreshFromAltRef      = VP8_EFLAG_NO_REF_LAST |
+                            VP8_EFLAG_NO_REF_GF
+};
+
+class TestRPS : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    rps_.Init();
+    // Initialize with sending a key frame and acknowledging it.
+    rps_.EncodedKeyFrame(0);
+    rps_.ReceivedRPSI(0);
+    rps_.SetRtt(kRtt);
+  }
+
+  ReferencePictureSelection rps_;
+};
+
+TEST_F(TestRPS, TestPropagateReferenceFrames) {
+  // Should propagate the alt-ref reference.
+  uint32_t time = (4 * kMinUpdateInterval) / 3 + 1;
+  EXPECT_EQ(rps_.EncodeFlags(1, false, 90 * time), kPropagateAltRef);
+  rps_.ReceivedRPSI(1);
+  time += (4 * (time + kMinUpdateInterval)) / 3 + 1;
+  // Should propagate the golden reference.
+  EXPECT_EQ(rps_.EncodeFlags(2, false, 90 * time), kPropagateGolden);
+  rps_.ReceivedRPSI(2);
+  // Should propagate the alt-ref reference.
+  time = (4 * (time + kMinUpdateInterval)) / 3 + 1;
+  EXPECT_EQ(rps_.EncodeFlags(3, false, 90 * time), kPropagateAltRef);
+  rps_.ReceivedRPSI(3);
+  // Shouldn't propagate any reference frames (except last), and the established
+  // reference is alt-ref.
+  time = time + kMinUpdateInterval;
+  EXPECT_EQ(rps_.EncodeFlags(4, false, 90 * time), kNoPropagationAltRef);
+}
+
+TEST_F(TestRPS, TestDecoderRefresh) {
+  uint32_t time = kRtt + 1;
+  // No more than one refresh per RTT.
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  time += 5;
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), false);
+  time += kRtt - 4;
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  // Enough time have elapsed since the previous reference propagation, we will
+  // therefore get both a refresh from golden and a propagation of alt-ref.
+  EXPECT_EQ(rps_.EncodeFlags(5, true, 90 * time), kRefreshFromGolden |
+            kPropagateAltRef);
+  rps_.ReceivedRPSI(5);
+  time += kRtt + 1;
+  // Enough time for a new refresh, but not enough time for a reference
+  // propagation.
+  EXPECT_EQ(rps_.ReceivedSLI(90 * time), true);
+  EXPECT_EQ(rps_.EncodeFlags(6, true, 90 * time), kRefreshFromAltRef |
+            kNoPropagationAltRef);
+}
+
+TEST_F(TestRPS, TestWrap) {
+  EXPECT_EQ(rps_.ReceivedSLI(0xffffffff), true);
+  EXPECT_EQ(rps_.ReceivedSLI(1), false);
+  EXPECT_EQ(rps_.ReceivedSLI(90 * 100), true);
+
+  EXPECT_EQ(rps_.EncodeFlags(7, false, 0xffffffff), kPropagateAltRef);
+  EXPECT_EQ(rps_.EncodeFlags(8, false, 1), kNoPropagationGolden);
+  EXPECT_EQ(rps_.EncodeFlags(10, false, 90 * 100), kPropagateAltRef);
+}
diff --git a/src/modules/video_coding/codecs/vp8/temporal_layers.cc b/src/modules/video_coding/codecs/vp8/temporal_layers.cc
new file mode 100644
index 0000000..63874e0
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/temporal_layers.cc
@@ -0,0 +1,246 @@
+/* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*/
+
+#include "temporal_layers.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <cassert>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+TemporalLayers::TemporalLayers(int numberOfTemporalLayers)
+    : number_of_temporal_layers_(numberOfTemporalLayers),
+      temporal_ids_length_(0),
+      temporal_pattern_length_(0),
+      tl0_pic_idx_(rand()),
+      pattern_idx_(255) {
+  assert(kMaxTemporalStreams >= numberOfTemporalLayers);
+  memset(temporal_ids_, 0, sizeof(temporal_ids_));
+  memset(temporal_pattern_, 0, sizeof(temporal_pattern_));
+}
+
+bool TemporalLayers::ConfigureBitrates(int bitrateKbit,
+                                       vpx_codec_enc_cfg_t* cfg) {
+  switch (number_of_temporal_layers_) {
+    case 0:
+    case 1:
+      // Do nothing.
+      break;
+    case 2:
+      temporal_ids_length_ = 2;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 1;
+      cfg->ts_number_layers = number_of_temporal_layers_;
+      cfg->ts_periodicity = temporal_ids_length_;
+      // Split stream 60% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[1][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 2;
+      cfg->ts_rate_decimator[1] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 8;
+      temporal_pattern_[0] = kTemporalUpdateLast;
+      temporal_pattern_[1] = kTemporalUpdateGoldenWithoutDependency;
+      temporal_pattern_[2] = kTemporalUpdateLast;
+      temporal_pattern_[3] = kTemporalUpdateGolden;
+      temporal_pattern_[4] = kTemporalUpdateLast;
+      temporal_pattern_[5] = kTemporalUpdateGolden;
+      temporal_pattern_[6] = kTemporalUpdateLast;
+      temporal_pattern_[7] = kTemporalUpdateNoneNoRefAltref;
+      break;
+    case 3:
+      temporal_ids_length_ = 4;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 2;
+      temporal_ids_[2] = 1;
+      temporal_ids_[3] = 2;
+      cfg->ts_number_layers = number_of_temporal_layers_;
+      cfg->ts_periodicity = temporal_ids_length_;
+      // Split stream 40% 20% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[2][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[2][1];
+      cfg->ts_target_bitrate[2] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 4;
+      cfg->ts_rate_decimator[1] = 2;
+      cfg->ts_rate_decimator[2] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 8;
+      temporal_pattern_[0] = kTemporalUpdateLastAndGoldenRefAltRef;
+      temporal_pattern_[1] = kTemporalUpdateNoneNoRefGoldenRefAltRef;
+      temporal_pattern_[2] = kTemporalUpdateGoldenWithoutDependencyRefAltRef;
+      temporal_pattern_[3] = kTemporalUpdateNone;
+      temporal_pattern_[4] = kTemporalUpdateLastRefAltRef;
+      temporal_pattern_[5] = kTemporalUpdateNone;
+      temporal_pattern_[6] = kTemporalUpdateGoldenRefAltRef;
+      temporal_pattern_[7] = kTemporalUpdateNone;
+      break;
+    case 4:
+      temporal_ids_length_ = 8;
+      temporal_ids_[0] = 0;
+      temporal_ids_[1] = 3;
+      temporal_ids_[2] = 2;
+      temporal_ids_[3] = 3;
+      temporal_ids_[4] = 1;
+      temporal_ids_[5] = 3;
+      temporal_ids_[6] = 2;
+      temporal_ids_[7] = 3;
+      // Split stream 25% 15% 20% 40%.
+      // Bitrate API for VP8 is the agregated bitrate for all lower layers.
+      cfg->ts_number_layers = 4;
+      cfg->ts_periodicity = temporal_ids_length_;
+      cfg->ts_target_bitrate[0] = bitrateKbit * kVp8LayerRateAlloction[3][0];
+      cfg->ts_target_bitrate[1] = bitrateKbit * kVp8LayerRateAlloction[3][1];
+      cfg->ts_target_bitrate[2] = bitrateKbit * kVp8LayerRateAlloction[3][2];
+      cfg->ts_target_bitrate[3] = bitrateKbit;
+      cfg->ts_rate_decimator[0] = 8;
+      cfg->ts_rate_decimator[1] = 4;
+      cfg->ts_rate_decimator[2] = 2;
+      cfg->ts_rate_decimator[3] = 1;
+      memcpy(cfg->ts_layer_id,
+             temporal_ids_,
+             sizeof(unsigned int) * temporal_ids_length_);
+      temporal_pattern_length_ = 16;
+      temporal_pattern_[0] = kTemporalUpdateLast;
+      temporal_pattern_[1] = kTemporalUpdateNone;
+      temporal_pattern_[2] = kTemporalUpdateAltrefWithoutDependency;
+      temporal_pattern_[3] = kTemporalUpdateNone;
+      temporal_pattern_[4] = kTemporalUpdateGoldenWithoutDependency;
+      temporal_pattern_[5] = kTemporalUpdateNone;
+      temporal_pattern_[6] = kTemporalUpdateAltref;
+      temporal_pattern_[7] = kTemporalUpdateNone;
+      temporal_pattern_[8] = kTemporalUpdateLast;
+      temporal_pattern_[9] = kTemporalUpdateNone;
+      temporal_pattern_[10] = kTemporalUpdateAltref;
+      temporal_pattern_[11] = kTemporalUpdateNone;
+      temporal_pattern_[12] = kTemporalUpdateGolden;
+      temporal_pattern_[13] = kTemporalUpdateNone;
+      temporal_pattern_[14] = kTemporalUpdateAltref;
+      temporal_pattern_[15] = kTemporalUpdateNone;
+      break;
+    default:
+      assert(false);
+      return false;
+  }
+  return true;
+}
+
+int TemporalLayers::EncodeFlags() {
+  assert(number_of_temporal_layers_ > 1);
+  assert(kMaxTemporalPattern >= temporal_pattern_length_);
+  assert(0 < temporal_pattern_length_);
+
+  int flags = 0;
+  int patternIdx = ++pattern_idx_ % temporal_pattern_length_;
+  assert(kMaxTemporalPattern >= patternIdx);
+  switch (temporal_pattern_[patternIdx]) {
+    case kTemporalUpdateLast:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      break;
+    case kTemporalUpdateGoldenWithoutDependency:
+      flags |= VP8_EFLAG_NO_REF_GF;
+      // Deliberately no break here.
+    case kTemporalUpdateGolden:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateAltrefWithoutDependency:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      // Deliberately no break here.
+    case kTemporalUpdateAltref:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateNoneNoRefAltref:
+      flags |= VP8_EFLAG_NO_REF_ARF;
+      // Deliberately no break here.
+    case kTemporalUpdateNone:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+      break;
+    case kTemporalUpdateNoneNoRefGoldenRefAltRef:
+      flags |= VP8_EFLAG_NO_REF_GF;
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      flags |= VP8_EFLAG_NO_UPD_ENTROPY;
+      break;
+    case kTemporalUpdateGoldenWithoutDependencyRefAltRef:
+      flags |= VP8_EFLAG_NO_REF_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateLastRefAltRef:
+      flags |= VP8_EFLAG_NO_UPD_GF;
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      break;
+    case kTemporalUpdateGoldenRefAltRef:
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_UPD_LAST;
+      break;
+    case kTemporalUpdateLastAndGoldenRefAltRef:
+      flags |= VP8_EFLAG_NO_UPD_ARF;
+      flags |= VP8_EFLAG_NO_REF_GF;
+      break;
+  }
+  return flags;
+}
+
+void TemporalLayers::PopulateCodecSpecific(bool key_frame,
+                                           CodecSpecificInfoVP8 *vp8_info) {
+  assert(number_of_temporal_layers_ > 1);
+  assert(0 < temporal_ids_length_);
+
+  if (key_frame) {
+    // Keyframe is always temporal layer 0
+    vp8_info->temporalIdx = 0;
+  } else {
+    vp8_info->temporalIdx = temporal_ids_[pattern_idx_ % temporal_ids_length_];
+  }
+  TemporalReferences temporal_reference =
+      temporal_pattern_[pattern_idx_ % temporal_pattern_length_];
+
+  if (temporal_reference == kTemporalUpdateAltrefWithoutDependency ||
+      temporal_reference == kTemporalUpdateGoldenWithoutDependency ||
+      temporal_reference == kTemporalUpdateGoldenWithoutDependencyRefAltRef ||
+      temporal_reference == kTemporalUpdateNoneNoRefGoldenRefAltRef ||
+      (temporal_reference == kTemporalUpdateNone &&
+      number_of_temporal_layers_ == 4)) {
+    vp8_info->layerSync = true;
+  } else {
+    vp8_info->layerSync = false;
+  }
+
+  if (vp8_info->temporalIdx == 0) {
+    tl0_pic_idx_++;
+  }
+  vp8_info->tl0PicIdx = tl0_pic_idx_;
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/vp8/temporal_layers.h b/src/modules/video_coding/codecs/vp8/temporal_layers.h
new file mode 100644
index 0000000..6f738b5
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/temporal_layers.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*/
+/*
+* This file defines classes for doing temporal layers with VP8.
+*/
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+
+#include <typedefs.h>
+
+ // VPX forward declaration
+typedef struct vpx_codec_enc_cfg vpx_codec_enc_cfg_t;
+
+namespace webrtc {
+
+struct CodecSpecificInfoVP8;
+
+class TemporalLayers {
+ public:
+  TemporalLayers(int number_of_temporal_layers);
+
+  // Returns the recommended VP8 encode flags needed. May refresh the decoder
+  // and/or update the reference buffers.
+  int EncodeFlags();
+
+  bool ConfigureBitrates(int bitrate_kbit, vpx_codec_enc_cfg_t* cfg);
+
+  void PopulateCodecSpecific(bool key_frame, CodecSpecificInfoVP8 *vp8_info);
+
+ private:
+  enum TemporalReferences {
+    // First base layer frame for 3 temporal layers, which updates last and
+    // golden with alt ref dependency.
+    kTemporalUpdateLastAndGoldenRefAltRef = 11,
+    // First enhancement layer with alt ref dependency.
+    kTemporalUpdateGoldenRefAltRef = 10,
+    // First enhancement layer with alt ref dependency.
+    kTemporalUpdateGoldenWithoutDependencyRefAltRef = 9,
+    // Base layer with alt ref dependency.
+    kTemporalUpdateLastRefAltRef = 8,
+    // Highest enhacement layer without dependency on golden with alt ref
+    // dependency.
+    kTemporalUpdateNoneNoRefGoldenRefAltRef = 7,
+    // Second layer and last frame in cycle, for 2 layers.
+    kTemporalUpdateNoneNoRefAltref = 6,
+    // Highest enhancement layer.
+    kTemporalUpdateNone = 5,
+    // Second enhancement layer.
+    kTemporalUpdateAltref = 4,
+    // Second enhancement layer without dependency on previous frames in
+    // the second enhancement layer.
+    kTemporalUpdateAltrefWithoutDependency = 3,
+    // First enhancement layer.
+    kTemporalUpdateGolden = 2,
+    // First enhancement layer without dependency on previous frames in
+    // the first enhancement layer.
+    kTemporalUpdateGoldenWithoutDependency = 1,
+    // Base layer.
+    kTemporalUpdateLast = 0,
+  };
+  enum { kMaxTemporalPattern = 16 };
+
+  int number_of_temporal_layers_;
+  int temporal_ids_length_;
+  int temporal_ids_[kMaxTemporalPattern];
+  int temporal_pattern_length_;
+  TemporalReferences temporal_pattern_[kMaxTemporalPattern];
+  uint8_t tl0_pic_idx_;
+  uint8_t pattern_idx_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_TEMPORAL_LAYERS_H_
+
diff --git a/src/modules/video_coding/codecs/vp8/temporal_layers_unittest.cc b/src/modules/video_coding/codecs/vp8/temporal_layers_unittest.cc
new file mode 100644
index 0000000..2572fcd
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/temporal_layers_unittest.cc
@@ -0,0 +1,217 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include "gtest/gtest.h"
+#include "temporal_layers.h"
+#include "video_codec_interface.h"
+
+#include "vpx/vpx_encoder.h"
+#include "vpx/vp8cx.h"
+
+namespace webrtc {
+
+enum {
+  kTemporalUpdateLast = VP8_EFLAG_NO_UPD_GF |
+                        VP8_EFLAG_NO_UPD_ARF |
+                        VP8_EFLAG_NO_REF_GF |
+                        VP8_EFLAG_NO_REF_ARF,
+  kTemporalUpdateGoldenWithoutDependency = VP8_EFLAG_NO_REF_GF |
+                                           VP8_EFLAG_NO_REF_ARF |
+                                           VP8_EFLAG_NO_UPD_ARF |
+                                           VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateGolden = VP8_EFLAG_NO_REF_ARF |
+                          VP8_EFLAG_NO_UPD_ARF |
+                          VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltrefWithoutDependency = VP8_EFLAG_NO_REF_ARF |
+                                           VP8_EFLAG_NO_REF_GF |
+                                           VP8_EFLAG_NO_UPD_GF |
+                                           VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateAltref = VP8_EFLAG_NO_UPD_GF |
+                          VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateNone = VP8_EFLAG_NO_UPD_GF |
+                        VP8_EFLAG_NO_UPD_ARF |
+                        VP8_EFLAG_NO_UPD_LAST |
+                        VP8_EFLAG_NO_UPD_ENTROPY,
+  kTemporalUpdateNoneNoRefAltRef = VP8_EFLAG_NO_REF_ARF |
+                                   VP8_EFLAG_NO_UPD_GF |
+                                   VP8_EFLAG_NO_UPD_ARF |
+                                   VP8_EFLAG_NO_UPD_LAST |
+                                   VP8_EFLAG_NO_UPD_ENTROPY,
+  kTemporalUpdateNoneNoRefGolden = VP8_EFLAG_NO_REF_GF |
+                                   VP8_EFLAG_NO_UPD_GF |
+                                   VP8_EFLAG_NO_UPD_ARF |
+                                   VP8_EFLAG_NO_UPD_LAST |
+                                   VP8_EFLAG_NO_UPD_ENTROPY,
+  kTemporalUpdateGoldenWithoutDependencyRefAltRef = VP8_EFLAG_NO_REF_GF |
+                                                    VP8_EFLAG_NO_UPD_ARF |
+                                                    VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
+                                   VP8_EFLAG_NO_UPD_LAST,
+  kTemporalUpdateLastRefAltRef = VP8_EFLAG_NO_UPD_GF |
+                                 VP8_EFLAG_NO_UPD_ARF |
+                                 VP8_EFLAG_NO_REF_GF,
+  kTemporalUpdateLastAndGoldenRefAltRef = VP8_EFLAG_NO_UPD_ARF |
+                                          VP8_EFLAG_NO_REF_GF,
+};
+
+TEST(TemporalLayersTest, 2Layers) {
+  TemporalLayers tl(2);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[16] = { kTemporalUpdateLast,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateNoneNoRefAltRef,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGoldenWithoutDependency,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateGolden,
+                             kTemporalUpdateLast,
+                             kTemporalUpdateNoneNoRefAltRef
+  };
+  int expected_temporal_idx[16] =
+      { 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1 };
+
+  bool expected_layer_sync[16] =
+      { false, true, false, false, false, false, false, false,
+        false, true, false, false, false, false, false, false };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, 3Layers) {
+  TemporalLayers tl(3);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[16] = { kTemporalUpdateLastAndGoldenRefAltRef,
+                             kTemporalUpdateNoneNoRefGolden,
+                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateLastRefAltRef,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateGoldenRefAltRef,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateLastAndGoldenRefAltRef,
+                             kTemporalUpdateNoneNoRefGolden,
+                             kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateLastRefAltRef,
+                             kTemporalUpdateNone,
+                             kTemporalUpdateGoldenRefAltRef,
+                             kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[16] =
+      { 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2, 0, 2, 1, 2 };
+
+  bool expected_layer_sync[16] =
+      { false, true, true, false, false, false, false, false,
+        false, true, true, false, false, false, false, false };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, 4Layers) {
+  TemporalLayers tl(4);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+  int expected_flags[16] = {
+      kTemporalUpdateLast,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltrefWithoutDependency,
+      kTemporalUpdateNone,
+      kTemporalUpdateGoldenWithoutDependency,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+      kTemporalUpdateLast,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+      kTemporalUpdateGolden,
+      kTemporalUpdateNone,
+      kTemporalUpdateAltref,
+      kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[16] =
+      { 0, 3, 2, 3, 1, 3, 2, 3, 0, 3, 2, 3, 1, 3, 2, 3 };
+
+  bool expected_layer_sync[16] =
+      { false, true, true, true, true, true, false, true,
+        false, true, false, true, false, true, false, true };
+
+  for (int i = 0; i < 16; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(false, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+}
+
+TEST(TemporalLayersTest, KeyFrame) {
+  TemporalLayers tl(3);
+  vpx_codec_enc_cfg_t cfg;
+  CodecSpecificInfoVP8 vp8_info;
+  tl.ConfigureBitrates(500, &cfg);
+
+  int expected_flags[8] = {
+      kTemporalUpdateLastAndGoldenRefAltRef,
+      kTemporalUpdateNoneNoRefGolden,
+      kTemporalUpdateGoldenWithoutDependencyRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateLastRefAltRef,
+      kTemporalUpdateNone,
+      kTemporalUpdateGoldenRefAltRef,
+      kTemporalUpdateNone,
+  };
+  int expected_temporal_idx[8] =
+      { 0, 0, 0, 0, 0, 0, 0, 2};
+
+  bool expected_layer_sync[8] =
+      { false, true, true, false, false, false, false, false };
+
+  for (int i = 0; i < 7; ++i) {
+    EXPECT_EQ(expected_flags[i], tl.EncodeFlags());
+    tl.PopulateCodecSpecific(true, &vp8_info);
+    EXPECT_EQ(expected_temporal_idx[i], vp8_info.temporalIdx);
+    bool expected_sync = expected_layer_sync[i];
+    EXPECT_EQ(expected_sync, vp8_info.layerSync);
+  }
+  EXPECT_EQ(expected_flags[7], tl.EncodeFlags());
+  tl.PopulateCodecSpecific(false, &vp8_info);
+  EXPECT_EQ(expected_temporal_idx[7], vp8_info.temporalIdx);
+  bool expected_sync = expected_layer_sync[7];
+  EXPECT_EQ(expected_sync, vp8_info.layerSync);
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/codecs/vp8/test/benchmark.cc b/src/modules/video_coding/codecs/vp8/test/benchmark.cc
new file mode 100644
index 0000000..4fc0e25
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/benchmark.cc
@@ -0,0 +1,39 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "benchmark.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+VP8Benchmark::VP8Benchmark()
+    : Benchmark("VP8Benchmark", "VP8 benchmark over a range of test cases",
+                webrtc::test::OutputPath() + "VP8Benchmark.txt", "VP8") {
+}
+
+VP8Benchmark::VP8Benchmark(std::string name, std::string description)
+    : Benchmark(name, description,
+                webrtc::test::OutputPath() + "VP8Benchmark.txt",
+                "VP8") {
+}
+
+VP8Benchmark::VP8Benchmark(std::string name, std::string description,
+                           std::string resultsFileName)
+    : Benchmark(name, description, resultsFileName, "VP8") {
+}
+
+VideoEncoder* VP8Benchmark::GetNewEncoder() {
+    return VP8Encoder::Create();
+}
+
+VideoDecoder* VP8Benchmark::GetNewDecoder() {
+    return VP8Decoder::Create();
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/benchmark.h b/src/modules/video_coding/codecs/vp8/test/benchmark.h
new file mode 100644
index 0000000..90e13e1
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/benchmark.h
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
+
+#include "modules/video_coding/codecs/test_framework/benchmark.h"
+
+class VP8Benchmark : public Benchmark
+{
+public:
+    VP8Benchmark();
+    VP8Benchmark(std::string name, std::string description);
+    VP8Benchmark(std::string name, std::string description, std::string resultsFileName);
+
+protected:
+    virtual webrtc::VideoEncoder* GetNewEncoder();
+    virtual webrtc::VideoDecoder* GetNewDecoder();
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_BENCHMARK_H_
diff --git a/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc b/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
new file mode 100644
index 0000000..60cd97a
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.cc
@@ -0,0 +1,221 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dual_decoder_test.h"
+
+#include <assert.h>
+#include <string.h> // memcmp
+#include <time.h>
+
+#include "testsupport/fileutils.h"
+
+VP8DualDecoderTest::VP8DualDecoderTest(float bitRate)
+:
+VP8NormalAsyncTest(bitRate)
+{
+    _decoder2 = NULL;
+}
+
+VP8DualDecoderTest::VP8DualDecoderTest()
+:
+VP8NormalAsyncTest("VP8 Dual Decoder Test", "Tests VP8 dual decoder", 1),
+_decoder2(NULL)
+{}
+
+VP8DualDecoderTest::~VP8DualDecoderTest()
+{
+    if(_decoder2)
+    {
+        _decoder2->Release();
+        delete _decoder2;
+    }
+
+    _decodedVideoBuffer2.Free();
+}
+
+void
+VP8DualDecoderTest::Perform()
+{
+    _inname = webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv";
+    CodecSettings(352, 288, 30, _bitRate);
+    Setup();
+    _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+    _decodedVideoBuffer2.VerifyAndAllocate(_lengthSourceFrame);
+    if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _decoder->InitDecode(&_inst,1);
+
+    FrameQueue frameQueue;
+    VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+    DualDecoderCompleteCallback decCallback(&_decodedVideoBuffer);
+    DualDecoderCompleteCallback decCallback2(&_decodedVideoBuffer2);
+    _encoder->RegisterEncodeCompleteCallback(&encCallback);
+    _decoder->RegisterDecodeCompleteCallback(&decCallback);
+    if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    {
+        exit(EXIT_FAILURE);
+    }
+    _totalEncodeTime = _totalDecodeTime = 0;
+    _totalEncodePipeTime = _totalDecodePipeTime = 0;
+    bool complete = false;
+    _framecnt = 0;
+    _encFrameCnt = 0;
+    _decFrameCnt = 0;
+    _sumEncBytes = 0;
+    _lengthEncFrame = 0;
+    double starttime = clock()/(double)CLOCKS_PER_SEC;
+    while (!complete)
+    {
+        if (_encFrameCnt == 10)
+        {
+            // initialize second decoder and copy state
+            _decoder2 = static_cast<webrtc::VP8Decoder *>(_decoder->Copy());
+            assert(_decoder2 != NULL);
+            _decoder2->RegisterDecodeCompleteCallback(&decCallback2);
+        }
+        CodecSpecific_InitBitrate();
+        complete = Encode();
+        if (!frameQueue.Empty() || complete)
+        {
+            while (!frameQueue.Empty())
+            {
+                _frameToDecode =
+                    static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+                int lost = DoPacketLoss();
+                if (lost == 2)
+                {
+                    // Lost the whole frame, continue
+                    _missingFrames = true;
+                    delete _frameToDecode;
+                    _frameToDecode = NULL;
+                    continue;
+                }
+                int ret = Decode(lost);
+                delete _frameToDecode;
+                _frameToDecode = NULL;
+                if (ret < 0)
+                {
+                    fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+                    exit(EXIT_FAILURE);
+                }
+                else if (ret == 0)
+                {
+                    _framecnt++;
+                }
+                else
+                {
+                    fprintf(stderr,
+                        "\n\nPositive return value from decode!\n\n");
+                }
+            }
+        }
+    }
+    double endtime = clock()/(double)CLOCKS_PER_SEC;
+    double totalExecutionTime = endtime - starttime;
+    printf("Total execution time: %.1f s\n", totalExecutionTime);
+    _sumEncBytes = encCallback.EncodedBytes();
+    double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+    double avgEncTime = _totalEncodeTime / _encFrameCnt;
+    double avgDecTime = _totalDecodeTime / _decFrameCnt;
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+    printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+    printf("Average encode pipeline time: %.1f ms\n",
+           1000 * _totalEncodePipeTime / _encFrameCnt);
+    printf("Average decode pipeline  time: %.1f ms\n",
+           1000 * _totalDecodePipeTime / _decFrameCnt);
+    printf("Number of encoded frames: %u\n", _encFrameCnt);
+    printf("Number of decoded frames: %u\n", _decFrameCnt);
+    (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+        _bitRate << " kbps" << std::endl;
+    (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    _encoder->Release();
+    _decoder->Release();
+    Teardown();
+}
+
+
+int
+VP8DualDecoderTest::Decode(int lossValue)
+{
+    _sumEncBytes += _frameToDecode->_frame->GetLength();
+    webrtc::EncodedImage encodedImage;
+    VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+    encodedImage._completeFrame = !lossValue;
+    _decodeCompleteTime = 0;
+    _decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
+    int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                               _frameToDecode->_codecSpecificInfo);
+    // second decoder
+    if (_decoder2)
+    {
+        int ret2 = _decoder2->Decode(encodedImage, _missingFrames, NULL,
+                                     _frameToDecode->_codecSpecificInfo,
+                                     0 /* dummy */);
+
+        // check return values
+        if (ret < 0 || ret2 < 0 || ret2 != ret)
+        {
+            exit(EXIT_FAILURE);
+        }
+
+        // compare decoded images
+        if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+            _decodedVideoBuffer.GetLength(),
+            _decodedVideoBuffer2.GetBuffer(), _decodedVideoBuffer.GetLength()))
+        {
+            fprintf(stderr,"\n\nClone output different from master.\n\n");
+            exit(EXIT_FAILURE);
+        }
+
+    }
+
+    _missingFrames = false;
+    return ret;
+}
+
+
+bool
+VP8DualDecoderTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                                    const void* ptrB, unsigned int bLengthBytes)
+{
+    if (aLengthBytes != bLengthBytes)
+    {
+        return false;
+    }
+
+    return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
+
+WebRtc_Word32 DualDecoderCompleteCallback::Decoded(webrtc::VideoFrame& image)
+{
+    _decodedVideoBuffer->VerifyAndAllocate(image.Length());
+    _decodedVideoBuffer->CopyBuffer(image.Length(), image.Buffer());
+    _decodedVideoBuffer->SetWidth(image.Width());
+    _decodedVideoBuffer->SetHeight(image.Height());
+    _decodedVideoBuffer->SetTimeStamp(image.TimeStamp());
+    _decodeComplete = true;
+    return 0;
+}
+
+bool DualDecoderCompleteCallback::DecodeComplete()
+{
+    if (_decodeComplete)
+    {
+        _decodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
diff --git a/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.h b/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.h
new file mode 100644
index 0000000..b1d84a7
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/dual_decoder_test.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_DUAL_DECODER_TEST_H_
+
+#include "vp8.h"
+#include "normal_async_test.h"
+
+class DualDecoderCompleteCallback;
+
+class VP8DualDecoderTest : public VP8NormalAsyncTest
+{
+public:
+    VP8DualDecoderTest(float bitRate);
+    VP8DualDecoderTest();
+    virtual ~VP8DualDecoderTest();
+    virtual void Perform();
+protected:
+    VP8DualDecoderTest(std::string name, std::string description,
+                       unsigned int testNo)
+    : VP8NormalAsyncTest(name, description, testNo) {}
+    virtual int Decode(int lossValue = 0);
+
+    webrtc::VP8Decoder*     _decoder2;
+    TestVideoBuffer         _decodedVideoBuffer2;
+    static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes, 
+        const void *ptrB, unsigned int bLengthBytes);
+private:
+};
+
+class DualDecoderCompleteCallback : public webrtc::DecodedImageCallback
+{
+public:
+    DualDecoderCompleteCallback(TestVideoBuffer* buffer)
+    : _decodedVideoBuffer(buffer), _decodeComplete(false) {}
+    WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
+    bool DecodeComplete();
+private:
+    TestVideoBuffer* _decodedVideoBuffer;
+    bool _decodeComplete;
+};
+
+
+#endif
diff --git a/src/modules/video_coding/codecs/vp8/test/normal_async_test.cc b/src/modules/video_coding/codecs/vp8/test/normal_async_test.cc
new file mode 100644
index 0000000..9ed508b
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/normal_async_test.cc
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_async_test.h"
+
+using namespace webrtc;
+
+VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate) :
+    NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, 1),
+    _hasReceivedRPSI(false)
+{
+}
+
+VP8NormalAsyncTest::VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo):
+    NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", bitRate, testNo),
+    _hasReceivedRPSI(false)
+{
+}
+
+void
+VP8NormalAsyncTest::CodecSettings(int width, int height, WebRtc_UWord32 frameRate /*=30*/, WebRtc_UWord32 bitRate /*=0*/)
+{
+    if (bitRate > 0)
+    {
+        _bitRate = bitRate;
+
+    }else if (_bitRate == 0)
+    {
+        _bitRate = 600;
+    }
+    _inst.codecType = kVideoCodecVP8;
+    _inst.codecSpecific.VP8.feedbackModeOn = true;
+    _inst.codecSpecific.VP8.pictureLossIndicationOn = true;
+    _inst.codecSpecific.VP8.complexity = kComplexityNormal;
+    _inst.maxFramerate = (unsigned char)frameRate;
+    _inst.startBitrate = _bitRate;
+    _inst.maxBitrate = 8000;
+    _inst.width = width;
+    _inst.height = height;
+}
+
+void
+VP8NormalAsyncTest::CodecSpecific_InitBitrate()
+{
+    if (_bitRate == 0)
+    {
+        _encoder->SetRates(600, _inst.maxFramerate);
+    }else
+    {
+         _encoder->SetRates(_bitRate, _inst.maxFramerate);
+    }
+}
+
+WebRtc_Word32
+VP8NormalAsyncTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
+{
+    _pictureIdRPSI = pictureId;
+    _hasReceivedRPSI = true;
+    return 0;
+}
+
+CodecSpecificInfo*
+VP8NormalAsyncTest::CreateEncoderSpecificInfo() const
+{
+    CodecSpecificInfo* vp8CodecSpecificInfo = new CodecSpecificInfo();
+    vp8CodecSpecificInfo->codecType = kVideoCodecVP8;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
+
+    _hasReceivedSLI = false;
+    _hasReceivedRPSI = false;
+
+    return vp8CodecSpecificInfo;
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/normal_async_test.h b/src/modules/video_coding/codecs/vp8/test/normal_async_test.h
new file mode 100644
index 0000000..e2f548e
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/normal_async_test.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_NORMAL_ASYNC_TEST_H_
+
+#include "modules/video_coding/codecs/test_framework/normal_async_test.h"
+
+class VP8NormalAsyncTest : public NormalAsyncTest
+{
+public:
+    VP8NormalAsyncTest(WebRtc_UWord32 bitRate);
+    VP8NormalAsyncTest(WebRtc_UWord32 bitRate, unsigned int testNo);
+    VP8NormalAsyncTest() : NormalAsyncTest("VP8 Normal Test 1", "Tests VP8 normal execution", 1) {}
+protected:
+    VP8NormalAsyncTest(std::string name, std::string description, unsigned int testNo) : NormalAsyncTest(name, description, testNo) {}
+    virtual void CodecSpecific_InitBitrate();
+    virtual void CodecSettings(int width, int height, WebRtc_UWord32 frameRate=30, WebRtc_UWord32 bitRate=0);
+    virtual webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+private:
+    mutable bool  _hasReceivedRPSI;
+    WebRtc_UWord64  _pictureIdRPSI;
+};
+
+#endif
diff --git a/src/modules/video_coding/codecs/vp8/test/packet_loss_test.cc b/src/modules/video_coding/codecs/vp8/test/packet_loss_test.cc
new file mode 100644
index 0000000..69e028a
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/packet_loss_test.cc
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet_loss_test.h"
+#include <cassert>
+
+VP8PacketLossTest::VP8PacketLossTest()
+:
+PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode")
+{
+}
+
+VP8PacketLossTest::VP8PacketLossTest(std::string name, std::string description)
+:
+PacketLossTest(name, description)
+{
+}
+
+VP8PacketLossTest::VP8PacketLossTest(double lossRate,
+                                     bool useNack,
+                                     int rttFrames)
+:
+PacketLossTest("VP8PacketLossTest", "Encode, remove lost packets, decode",
+               lossRate, useNack, rttFrames)
+{
+}
+
+int VP8PacketLossTest::ByteLoss(int size, unsigned char* /* pkg */, int bytesToLose)
+{
+    int retLength = size - bytesToLose;
+    if (retLength < 4)
+    {
+        retLength = 4;
+    }
+    return retLength;
+}
+
+WebRtc_Word32
+VP8PacketLossTest::ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId)
+{
+    _pictureIdRPSI = pictureId;
+    _hasReceivedRPSI = true;
+    return 0;
+}
+
+webrtc::CodecSpecificInfo*
+VP8PacketLossTest::CreateEncoderSpecificInfo() const
+{
+    webrtc::CodecSpecificInfo* vp8CodecSpecificInfo =
+      new webrtc::CodecSpecificInfo();
+    vp8CodecSpecificInfo->codecType = webrtc::kVideoCodecVP8;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI = _hasReceivedRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdRPSI = _pictureIdRPSI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = _hasReceivedSLI;
+    vp8CodecSpecificInfo->codecSpecific.VP8.pictureIdSLI = _pictureIdSLI;
+
+    _hasReceivedSLI = false;
+    _hasReceivedRPSI = false;
+
+    return vp8CodecSpecificInfo;
+}
+
+bool VP8PacketLossTest::PacketLoss(double lossRate, int numLosses) {
+  if (numLosses)
+    return true;
+  return RandUniform() < lossRate;
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/packet_loss_test.h b/src/modules/video_coding/codecs/vp8/test/packet_loss_test.h
new file mode 100644
index 0000000..44fa729
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/packet_loss_test.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
+
+#include "modules/video_coding/codecs/test_framework/packet_loss_test.h"
+
+class VP8PacketLossTest : public PacketLossTest
+{
+public:
+    VP8PacketLossTest();
+    VP8PacketLossTest(double lossRate, bool useNack, int rttFrames);
+
+protected:
+    VP8PacketLossTest(std::string name, std::string description);
+    virtual int ByteLoss(int size, unsigned char *pkg, int bytesToLose);
+    WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    // |lossRate| is the probability of packet loss between 0 and 1.
+    // |numLosses| is the number of packets already lost in the current frame.
+    virtual bool PacketLoss(double lossRate, int numLosses);
+
+    webrtc::CodecSpecificInfo* CreateEncoderSpecificInfo() const;
+
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_PACKET_LOSS_TEST_H_
diff --git a/src/modules/video_coding/codecs/vp8/test/rps_test.cc b/src/modules/video_coding/codecs/vp8/test/rps_test.cc
new file mode 100644
index 0000000..82b63db
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/rps_test.cc
@@ -0,0 +1,311 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rps_test.h"
+
+#include <assert.h>
+#include <string.h> // memcmp
+#include <time.h>
+
+#include "vp8.h"
+
+VP8RpsTest::VP8RpsTest(float bitRate)
+    : VP8NormalAsyncTest(bitRate),
+      decoder2_(webrtc::VP8Decoder::Create()),
+      sli_(false) {
+}
+
+VP8RpsTest::VP8RpsTest()
+    : VP8NormalAsyncTest("VP8 Reference Picture Selection Test",
+                         "VP8 Reference Picture Selection Test", 1),
+      decoder2_(webrtc::VP8Decoder::Create()),
+      sli_(false) {
+}
+
+VP8RpsTest::~VP8RpsTest() {
+  if (decoder2_) {
+    decoder2_->Release();
+    delete decoder2_;
+  }
+  decoded_frame2_.Free();
+}
+
+void VP8RpsTest::Perform() {
+  _inname = "test/testFiles/foreman_cif.yuv";
+  CodecSettings(352, 288, 30, _bitRate);
+  Setup();
+  _inputVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+  _decodedVideoBuffer.VerifyAndAllocate(_lengthSourceFrame);
+  decoded_frame2_.VerifyAndAllocate(_lengthSourceFrame);
+
+  // Enable RPS functionality
+  _inst.codecSpecific.VP8.pictureLossIndicationOn = true;
+  _inst.codecSpecific.VP8.feedbackModeOn = true;
+
+  if(_encoder->InitEncode(&_inst, 4, 1460) < 0)
+    exit(EXIT_FAILURE);
+
+  _decoder->InitDecode(&_inst,1);
+  decoder2_->InitDecode(&_inst,1);
+
+  FrameQueue frameQueue;
+  VideoEncodeCompleteCallback encCallback(_encodedFile, &frameQueue, *this);
+  RpsDecodeCompleteCallback decCallback(&_decodedVideoBuffer);
+  RpsDecodeCompleteCallback decCallback2(&decoded_frame2_);
+  _encoder->RegisterEncodeCompleteCallback(&encCallback);
+  _decoder->RegisterDecodeCompleteCallback(&decCallback);
+  decoder2_->RegisterDecodeCompleteCallback(&decCallback2);
+
+  if (SetCodecSpecificParameters() != WEBRTC_VIDEO_CODEC_OK)
+    exit(EXIT_FAILURE);
+
+  _totalEncodeTime = _totalDecodeTime = 0;
+  _totalEncodePipeTime = _totalDecodePipeTime = 0;
+  bool complete = false;
+  _framecnt = 0;
+  _encFrameCnt = 0;
+  _decFrameCnt = 0;
+  _sumEncBytes = 0;
+  _lengthEncFrame = 0;
+  double starttime = clock()/(double)CLOCKS_PER_SEC;
+  while (!complete) {
+    CodecSpecific_InitBitrate();
+    complete = EncodeRps(&decCallback2);
+    if (!frameQueue.Empty() || complete) {
+      while (!frameQueue.Empty()) {
+        _frameToDecode =
+            static_cast<FrameQueueTuple *>(frameQueue.PopFrame());
+        int lost = DoPacketLoss();
+        if (lost == 2) {
+            // Lost the whole frame, continue
+            _missingFrames = true;
+            delete _frameToDecode;
+            _frameToDecode = NULL;
+            continue;
+        }
+        int ret = Decode(lost);
+        delete _frameToDecode;
+        _frameToDecode = NULL;
+        if (ret < 0) {
+            fprintf(stderr,"\n\nError in decoder: %d\n\n", ret);
+            exit(EXIT_FAILURE);
+        }
+        else if (ret == 0) {
+            _framecnt++;
+        }
+        else {
+            fprintf(stderr,
+                "\n\nPositive return value from decode!\n\n");
+        }
+      }
+    }
+  }
+  double endtime = clock()/(double)CLOCKS_PER_SEC;
+  double totalExecutionTime = endtime - starttime;
+  printf("Total execution time: %.1f s\n", totalExecutionTime);
+  _sumEncBytes = encCallback.EncodedBytes();
+  double actualBitRate = ActualBitRate(_encFrameCnt) / 1000.0;
+  double avgEncTime = _totalEncodeTime / _encFrameCnt;
+  double avgDecTime = _totalDecodeTime / _decFrameCnt;
+  printf("Actual bitrate: %f kbps\n", actualBitRate);
+  printf("Average encode time: %.1f ms\n", 1000 * avgEncTime);
+  printf("Average decode time: %.1f ms\n", 1000 * avgDecTime);
+  printf("Average encode pipeline time: %.1f ms\n",
+         1000 * _totalEncodePipeTime / _encFrameCnt);
+  printf("Average decode pipeline  time: %.1f ms\n",
+         1000 * _totalDecodePipeTime / _decFrameCnt);
+  printf("Number of encoded frames: %u\n", _encFrameCnt);
+  printf("Number of decoded frames: %u\n", _decFrameCnt);
+  (*_log) << "Actual bitrate: " << actualBitRate << " kbps\tTarget: " <<
+      _bitRate << " kbps" << std::endl;
+  (*_log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+  (*_log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+  _encoder->Release();
+  _decoder->Release();
+  Teardown();
+}
+
+bool VP8RpsTest::EncodeRps(RpsDecodeCompleteCallback* decodeCallback) {
+  _lengthEncFrame = 0;
+  size_t bytes_read = fread(_sourceBuffer, 1, _lengthSourceFrame, _sourceFile);
+  if (bytes_read < _lengthSourceFrame)
+    return true;
+  _inputVideoBuffer.CopyBuffer(_lengthSourceFrame, _sourceBuffer);
+  _inputVideoBuffer.SetTimeStamp((unsigned int)
+      (_encFrameCnt * 9e4 / _inst.maxFramerate));
+  _inputVideoBuffer.SetWidth(_inst.width);
+  _inputVideoBuffer.SetHeight(_inst.height);
+  webrtc::VideoFrame rawImage;
+  VideoBufferToRawImage(_inputVideoBuffer, rawImage);
+  if (feof(_sourceFile) != 0) {
+      return true;
+  }
+  _encodeCompleteTime = 0;
+  _encodeTimes[rawImage.TimeStamp()] = tGetTime();
+  webrtc::VideoFrameType frameType = webrtc::kDeltaFrame;
+
+  webrtc::CodecSpecificInfo* codecSpecificInfo = CreateEncoderSpecificInfo();
+  codecSpecificInfo->codecSpecific.VP8.pictureIdRPSI =
+      decodeCallback->LastDecodedRefPictureId(
+          &codecSpecificInfo->codecSpecific.VP8.hasReceivedRPSI);
+  if (sli_) {
+    codecSpecificInfo->codecSpecific.VP8.pictureIdSLI =
+        decodeCallback->LastDecodedPictureId();
+    codecSpecificInfo->codecSpecific.VP8.hasReceivedSLI = true;
+    sli_ = false;
+  }
+  printf("Encoding: %u\n", _framecnt);
+  int ret = _encoder->Encode(rawImage, codecSpecificInfo, frameType);
+  if (ret < 0)
+    printf("Failed to encode: %u\n", _framecnt);
+
+  if (codecSpecificInfo != NULL) {
+      delete codecSpecificInfo;
+      codecSpecificInfo = NULL;
+  }
+  if (_encodeCompleteTime > 0) {
+      _totalEncodeTime += _encodeCompleteTime -
+          _encodeTimes[rawImage.TimeStamp()];
+  }
+  else {
+      _totalEncodeTime += tGetTime() - _encodeTimes[rawImage.TimeStamp()];
+  }
+  return false;
+}
+
+//#define FRAME_LOSS 1
+
+int VP8RpsTest::Decode(int lossValue) {
+  _sumEncBytes += _frameToDecode->_frame->GetLength();
+  webrtc::EncodedImage encodedImage;
+  VideoEncodedBufferToEncodedImage(*(_frameToDecode->_frame), encodedImage);
+  encodedImage._completeFrame = !lossValue;
+  _decodeCompleteTime = 0;
+  _decodeTimes[encodedImage._timeStamp] = clock()/(double)CLOCKS_PER_SEC;
+  int ret = _decoder->Decode(encodedImage, _missingFrames, NULL,
+                             _frameToDecode->_codecSpecificInfo);
+  // Drop every 10th frame for the second decoder
+#if FRAME_LOSS
+  if (_framecnt == 0 || _framecnt % 10 != 0) {
+    printf("Decoding: %u\n", _framecnt);
+    if (_framecnt > 1 && (_framecnt - 1) % 10 == 0)
+      _missingFrames = true;
+#else
+  if (true) {
+    if (_framecnt > 0 && _framecnt % 10 == 0) {
+      encodedImage._length = std::rand() % encodedImage._length;
+      printf("Decoding with loss: %u\n", _framecnt);
+    }
+    else
+      printf("Decoding: %u\n", _framecnt);
+#endif
+    int ret2 = decoder2_->Decode(encodedImage, _missingFrames, NULL,
+                                 _frameToDecode->_codecSpecificInfo,
+                                 0 /* dummy */);
+
+    // check return values
+    if (ret < 0 || ret2 < 0) {
+      return -1;
+    } else if (ret2 == WEBRTC_VIDEO_CODEC_ERR_REQUEST_SLI ||
+        ret2 == WEBRTC_VIDEO_CODEC_REQUEST_SLI) {
+      sli_ = true;
+    }
+
+    // compare decoded images
+#if FRAME_LOSS
+    if (!_missingFrames) {
+      if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+        _decodedVideoBuffer.GetLength(),
+        decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
+        fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
+                _framecnt);
+        return -1;
+      }
+    }
+#else
+    if (_framecnt > 0 && _framecnt % 10 != 0) {
+      if (!CheckIfBitExact(_decodedVideoBuffer.GetBuffer(),
+        _decodedVideoBuffer.GetLength(),
+        decoded_frame2_.GetBuffer(), _decodedVideoBuffer.GetLength())) {
+        fprintf(stderr,"\n\nRPS decoder different from master: %u\n\n",
+                _framecnt);
+        return -1;
+      }
+    }
+#endif
+  }
+#if FRAME_LOSS
+  else
+    printf("Dropping %u\n", _framecnt);
+#endif
+  _missingFrames = false;
+  return 0;
+}
+
+
+bool
+VP8RpsTest::CheckIfBitExact(const void* ptrA, unsigned int aLengthBytes,
+                            const void* ptrB, unsigned int bLengthBytes) {
+  if (aLengthBytes != bLengthBytes)
+    return false;
+  return memcmp(ptrA, ptrB, aLengthBytes) == 0;
+}
+
+RpsDecodeCompleteCallback::RpsDecodeCompleteCallback(TestVideoBuffer* buffer)
+    : decoded_frame_(buffer),
+      decode_complete_(false),
+      last_decoded_picture_id_(0),
+      last_decoded_ref_picture_id_(0),
+      updated_ref_picture_id_(false) {
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::Decoded(webrtc::VideoFrame& image) {
+  decoded_frame_->VerifyAndAllocate(image.Length());
+  decoded_frame_->CopyBuffer(image.Length(), image.Buffer());
+  decoded_frame_->SetWidth(image.Width());
+  decoded_frame_->SetHeight(image.Height());
+  decoded_frame_->SetTimeStamp(image.TimeStamp());
+  decode_complete_ = true;
+  return 0;
+}
+
+bool RpsDecodeCompleteCallback::DecodeComplete() {
+  if (decode_complete_)
+  {
+    decode_complete_ = false;
+    return true;
+  }
+  return false;
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 picture_id) {
+  last_decoded_ref_picture_id_ = picture_id & 0x7FFF;
+  updated_ref_picture_id_ = true;
+  return 0;
+}
+
+WebRtc_Word32 RpsDecodeCompleteCallback::ReceivedDecodedFrame(
+    const WebRtc_UWord64 picture_id) {
+  last_decoded_picture_id_ = picture_id & 0x3F;
+  return 0;
+}
+
+WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedPictureId() const {
+  return last_decoded_picture_id_;
+}
+
+WebRtc_UWord64 RpsDecodeCompleteCallback::LastDecodedRefPictureId(
+    bool *updated) {
+  if (updated)
+    *updated = updated_ref_picture_id_;
+  updated_ref_picture_id_ = false;
+  return last_decoded_ref_picture_id_;
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/rps_test.h b/src/modules/video_coding/codecs/vp8/test/rps_test.h
new file mode 100644
index 0000000..f5cdcc6
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/rps_test.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
+
+#include "vp8.h"
+#include "normal_async_test.h"
+
+class RpsDecodeCompleteCallback;
+
+class VP8RpsTest : public VP8NormalAsyncTest {
+ public:
+  VP8RpsTest(float bitRate);
+  VP8RpsTest();
+  virtual ~VP8RpsTest();
+  virtual void Perform();
+ private:
+  VP8RpsTest(std::string name, std::string description, unsigned int testNo)
+  : VP8NormalAsyncTest(name, description, testNo) {}
+  virtual bool EncodeRps(RpsDecodeCompleteCallback* decodeCallback);
+  virtual int Decode(int lossValue = 0);
+
+  static bool CheckIfBitExact(const void *ptrA, unsigned int aLengthBytes,
+      const void *ptrB, unsigned int bLengthBytes);
+
+  webrtc::VP8Decoder* decoder2_;
+  TestVideoBuffer decoded_frame2_;
+  bool sli_;
+};
+
+class RpsDecodeCompleteCallback : public webrtc::DecodedImageCallback {
+ public:
+  RpsDecodeCompleteCallback(TestVideoBuffer* buffer);
+  WebRtc_Word32 Decoded(webrtc::VideoFrame& decodedImage);
+  bool DecodeComplete();
+  WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 picture_id);
+  WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 picture_id);
+  WebRtc_UWord64 LastDecodedPictureId() const;
+  WebRtc_UWord64 LastDecodedRefPictureId(bool *updated);
+
+ private:
+  TestVideoBuffer* decoded_frame_;
+  bool decode_complete_;
+  WebRtc_UWord64 last_decoded_picture_id_;
+  WebRtc_UWord64 last_decoded_ref_picture_id_;
+  bool updated_ref_picture_id_;
+};
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_RPS_TEST_H_
diff --git a/src/modules/video_coding/codecs/vp8/test/tester.cc b/src/modules/video_coding/codecs/vp8/test/tester.cc
new file mode 100644
index 0000000..18fd32e
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/tester.cc
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <fstream>
+#include <iostream>
+#include <vector>
+
+#include "benchmark.h"
+#include "dual_decoder_test.h"
+#include "gtest/gtest.h"
+#include "normal_async_test.h"
+#include "packet_loss_test.h"
+#include "vp8_unittest.h"
+#include "rps_test.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+void PopulateTests(std::vector<CodecTest*>* tests)
+{
+//    tests->push_back(new VP8RpsTest());
+    tests->push_back(new VP8UnitTest());
+//    tests->push_back(new VP8DualDecoderTest());
+//    tests->push_back(new VP8Benchmark());
+//    tests->push_back(new VP8PacketLossTest(0.05, false, 5));
+//    tests->push_back(new VP8NormalAsyncTest());
+}
+
+TEST(Vp8WrapperTest, RunAllTests)
+{
+    VP8Encoder* enc;
+    VP8Decoder* dec;
+    std::vector<CodecTest*> tests;
+    PopulateTests(&tests);
+    std::fstream log;
+    std::string log_file = webrtc::test::OutputPath() + "VP8_test_log.txt";
+    log.open(log_file.c_str(), std::fstream::out | std::fstream::app);
+    std::vector<CodecTest*>::iterator it;
+    for (it = tests.begin() ; it < tests.end(); it++)
+    {
+        enc = VP8Encoder::Create();
+        dec = VP8Decoder::Create();
+        (*it)->SetEncoder(enc);
+        (*it)->SetDecoder(dec);
+        (*it)->SetLog(&log);
+        (*it)->Perform();
+        (*it)->Print();
+        delete enc;
+        delete dec;
+        delete *it;
+    }
+   log.close();
+   tests.pop_back();
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/vp8_unittest.cc b/src/modules/video_coding/codecs/vp8/test/vp8_unittest.cc
new file mode 100644
index 0000000..3c1c30e
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/vp8_unittest.cc
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vp8_unittest.h"
+
+#include <string.h>
+
+#include "modules/video_coding/codecs/test_framework/video_source.h"
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "vp8.h"
+
+using namespace webrtc;
+
+VP8UnitTest::VP8UnitTest()
+:
+UnitTest("VP8UnitTest", "Unit test")
+{
+}
+
+VP8UnitTest::VP8UnitTest(std::string name, std::string description)
+:
+UnitTest(name, description)
+{
+}
+
+WebRtc_UWord32
+VP8UnitTest::CodecSpecific_SetBitrate(WebRtc_UWord32 bitRate,
+                                      WebRtc_UWord32 /*frameRate*/)
+{
+    int rate = _encoder->SetRates(bitRate, _inst.maxFramerate);
+    EXPECT_TRUE(rate >= 0);
+    return rate;
+}
+
+void
+VP8UnitTest::Perform()
+{
+    Setup();
+    VP8Encoder* enc = (VP8Encoder*)_encoder;
+    VP8Decoder* dec = (VP8Decoder*)_decoder;
+
+    //----- Encoder parameter tests -----
+    //-- Calls before InitEncode() --
+    EXPECT_EQ(enc->Release(), WEBRTC_VIDEO_CODEC_OK);
+    EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+    EXPECT_EQ(enc->SetRates(_bitRate, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_UNINITIALIZED);
+
+    VideoCodec codecInst;
+    memset(&codecInst, 0, sizeof(codecInst));
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 1440;
+    codecInst.height = 1080;
+    codecInst.maxFramerate = 30;
+    codecInst.startBitrate = 300;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    codecInst.codecSpecific.VP8.numberOfTemporalLayers = 1;
+    EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+
+    //-- Test two problematic level settings --
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 352;
+    codecInst.height = 288;
+    codecInst.maxFramerate = 30;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    codecInst.startBitrate = 300;
+    EXPECT_EQ(enc->InitEncode(&codecInst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+    // Settings not correct for this profile
+    strncpy(codecInst.plName, "VP8", 31);
+    codecInst.plType = 126;
+    codecInst.maxBitrate = 0;
+    codecInst.minBitrate = 0;
+    codecInst.width = 176;
+    codecInst.height = 144;
+    codecInst.maxFramerate = 15;
+    codecInst.codecSpecific.VP8.complexity = kComplexityNormal;
+    codecInst.startBitrate = 300;
+    ASSERT_EQ(enc->InitEncode(&_inst, 1, 1440), WEBRTC_VIDEO_CODEC_OK);
+
+
+    //-- ProcessNewBitrate() errors --
+    // Bad bitrate.
+    EXPECT_EQ(enc->SetRates(_inst.maxBitrate + 1, _inst.maxFramerate),
+              WEBRTC_VIDEO_CODEC_OK);
+
+    //----- Decoder parameter tests -----
+    //-- Calls before InitDecode() --
+    EXPECT_TRUE(dec->Release() == 0);
+    ASSERT_TRUE(dec->InitDecode(&_inst, 1) == WEBRTC_VIDEO_CODEC_OK);
+
+    //-- SetCodecConfigParameters() errors --
+    unsigned char tmpBuf[128];
+    EXPECT_TRUE(dec->SetCodecConfigParameters(NULL, sizeof(tmpBuf)) == -1);
+    EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, 1) == -1);
+   // Garbage data.
+    EXPECT_TRUE(dec->SetCodecConfigParameters(tmpBuf, sizeof(tmpBuf)) == -1);
+
+    UnitTest::Perform();
+    Teardown();
+
+}
diff --git a/src/modules/video_coding/codecs/vp8/test/vp8_unittest.h b/src/modules/video_coding/codecs/vp8/test/vp8_unittest.h
new file mode 100644
index 0000000..f7e4697
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/test/vp8_unittest.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
+
+#include "modules/video_coding/codecs/test_framework/unit_test.h"
+
+class VP8UnitTest : public UnitTest
+{
+public:
+    VP8UnitTest();
+    VP8UnitTest(std::string name, std::string description);
+    virtual void Perform();
+
+protected:
+    virtual WebRtc_UWord32 CodecSpecific_SetBitrate(
+        WebRtc_UWord32 bitRate,
+        WebRtc_UWord32 /*frameRate*/);
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODECS_VP8_VP8_UNITTEST_H_
diff --git a/src/modules/video_coding/codecs/vp8/vp8.cc b/src/modules/video_coding/codecs/vp8/vp8.cc
new file mode 100644
index 0000000..2016559
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/vp8.cc
@@ -0,0 +1,1023 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ *
+ * This file contains the WEBRTC VP8 wrapper implementation
+ *
+ */
+#include "vp8.h"
+
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "module_common_types.h"
+#include "reference_picture_selection.h"
+#include "temporal_layers.h"
+#include "tick_util.h"
+#include "vpx/vpx_encoder.h"
+#include "vpx/vpx_decoder.h"
+#include "vpx/vp8cx.h"
+#include "vpx/vp8dx.h"
+
+enum { kVp8ErrorPropagationTh = 30 };
+
+namespace webrtc
+{
+
+VP8Encoder* VP8Encoder::Create() {
+  return new VP8Encoder();
+}
+
+VP8Encoder::VP8Encoder()
+    : encoded_image_(),
+      encoded_complete_callback_(NULL),
+      inited_(false),
+      timestamp_(0),
+      picture_id_(0),
+      feedback_mode_(false),
+      cpu_speed_(-6), // default value
+      rc_max_intra_target_(0),
+      token_partitions_(VP8_ONE_TOKENPARTITION),
+      rps_(new ReferencePictureSelection),
+#if WEBRTC_LIBVPX_VERSION >= 971
+      temporal_layers_(NULL),
+#endif
+      encoder_(NULL),
+      config_(NULL),
+      raw_(NULL) {
+  memset(&codec_, 0, sizeof(codec_));
+  uint32_t seed = static_cast<uint32_t>(TickTime::MillisecondTimestamp());
+  srand(seed);
+}
+
+VP8Encoder::~VP8Encoder() {
+  Release();
+  delete rps_;
+}
+
+int VP8Encoder::Release() {
+  if (encoded_image_._buffer != NULL) {
+    delete [] encoded_image_._buffer;
+    encoded_image_._buffer = NULL;
+  }
+  if (encoder_ != NULL) {
+    if (vpx_codec_destroy(encoder_)) {
+      return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    delete encoder_;
+    encoder_ = NULL;
+  }
+  if (config_ != NULL) {
+    delete config_;
+    config_ = NULL;
+  }
+  if (raw_ != NULL) {
+    vpx_img_free(raw_);
+    raw_ = NULL;
+  }
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_ != NULL) {
+    delete temporal_layers_;
+    temporal_layers_ = NULL;
+  }
+#endif
+  inited_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::SetRates(uint32_t new_bitrate_kbit, uint32_t new_framerate) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (encoder_->err) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  if (new_framerate < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // update bit rate
+  if (codec_.maxBitrate > 0 && new_bitrate_kbit > codec_.maxBitrate) {
+    new_bitrate_kbit = codec_.maxBitrate;
+  }
+  config_->rc_target_bitrate = new_bitrate_kbit; // in kbit/s
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->ConfigureBitrates(new_bitrate_kbit, config_);
+  }
+#endif
+  codec_.maxFramerate = new_framerate;
+
+  // update encoder context
+  if (vpx_codec_enc_config_set(encoder_, config_)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::InitEncode(const VideoCodec* inst,
+                           int number_of_cores,
+                           uint32_t /*max_payload_size*/) {
+  if (inst == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (inst->maxFramerate < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  // allow zero to represent an unspecified maxBitRate
+  if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (inst->width < 1 || inst->height < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (number_of_cores < 1) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
+
+  int retVal = Release();
+  if (retVal < 0) {
+    return retVal;
+  }
+  if (encoder_ == NULL) {
+    encoder_ = new vpx_codec_ctx_t;
+  }
+  if (config_ == NULL) {
+    config_ = new vpx_codec_enc_cfg_t;
+  }
+  timestamp_ = 0;
+
+  codec_ = *inst;
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (inst->codecSpecific.VP8.numberOfTemporalLayers > 1) {
+    assert(temporal_layers_ == NULL);
+    temporal_layers_ =
+        new TemporalLayers(inst->codecSpecific.VP8.numberOfTemporalLayers);
+  }
+#endif
+  // random start 16 bits is enough.
+  picture_id_ = static_cast<uint16_t>(rand()) & 0x7FFF;
+
+  // allocate memory for encoded image
+  if (encoded_image_._buffer != NULL) {
+    delete [] encoded_image_._buffer;
+  }
+  encoded_image_._size = CalcBufferSize(kI420, codec_.width, codec_.height);
+  encoded_image_._buffer = new uint8_t[encoded_image_._size];
+  encoded_image_._completeFrame = true;
+
+  unsigned int align = 1;
+  if (codec_.width % 32 == 0) {
+    align = 32;
+  }
+  raw_ = vpx_img_alloc(NULL, IMG_FMT_I420, codec_.width, codec_.height, align);
+  // populate encoder configuration with default values
+  if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), config_, 0)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  config_->g_w = codec_.width;
+  config_->g_h = codec_.height;
+  config_->rc_target_bitrate = inst->startBitrate;  // in kbit/s
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->ConfigureBitrates(inst->startBitrate, config_);
+  }
+#endif
+  // setting the time base of the codec
+  config_->g_timebase.num = 1;
+  config_->g_timebase.den = 90000;
+
+  // Set the error resilience mode according to user settings.
+  switch (inst->codecSpecific.VP8.resilience) {
+    case kResilienceOff:
+      config_->g_error_resilient = 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+      if (temporal_layers_) {
+        // Must be on for temporal layers.
+        config_->g_error_resilient = 1;
+      }
+#endif
+      break;
+    case kResilientStream:
+      config_->g_error_resilient = 1;  // TODO(holmer): Replace with
+      // VPX_ERROR_RESILIENT_DEFAULT when we
+      // drop support for libvpx 9.6.0.
+      break;
+    case kResilientFrames:
+#ifdef INDEPENDENT_PARTITIONS
+      config_->g_error_resilient = VPX_ERROR_RESILIENT_DEFAULT |
+      VPX_ERROR_RESILIENT_PARTITIONS;
+      break;
+#else
+      return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;  // Not supported
+#endif
+  }
+  config_->g_lag_in_frames = 0; // 0- no frame lagging
+
+  // Determining number of threads based on the image size
+  if (codec_.width * codec_.height > 704 * 576 && number_of_cores > 1) {
+    // 2 threads when larger than 4CIF
+    config_->g_threads = 2;
+  } else {
+    config_->g_threads = 1;
+  }
+
+  // rate control settings
+  config_->rc_dropframe_thresh = inst->codecSpecific.VP8.frameDroppingOn ?
+      30 : 0;
+  config_->rc_end_usage = VPX_CBR;
+  config_->g_pass = VPX_RC_ONE_PASS;
+  config_->rc_resize_allowed = inst->codecSpecific.VP8.automaticResizeOn ?
+      1 : 0;
+  config_->rc_min_quantizer = 2;
+  config_->rc_max_quantizer = 56;
+  config_->rc_undershoot_pct = 100;
+  config_->rc_overshoot_pct = 15;
+  config_->rc_buf_initial_sz = 500;
+  config_->rc_buf_optimal_sz = 600;
+  config_->rc_buf_sz = 1000;
+  // set the maximum target size of any key-frame.
+  rc_max_intra_target_ = MaxIntraTarget(config_->rc_buf_optimal_sz);
+
+  if (feedback_mode_) {
+    // Disable periodic key frames if we get feedback from the decoder
+    // through SLI and RPSI.
+    config_->kf_mode = VPX_KF_DISABLED;
+  } else {
+    config_->kf_mode = VPX_KF_AUTO;
+    config_->kf_max_dist = 3000;
+  }
+  switch (inst->codecSpecific.VP8.complexity) {
+    case kComplexityHigh:
+      cpu_speed_ = -5;
+      break;
+    case kComplexityHigher:
+      cpu_speed_ = -4;
+      break;
+    case kComplexityMax:
+      cpu_speed_ = -3;
+      break;
+    default:
+      cpu_speed_ = -6;
+      break;
+  }
+#ifdef WEBRTC_ANDROID
+  // On mobile platform, always set to -12 to leverage between cpu usage
+  // and video quality
+  cpu_speed_ = -12;
+#endif
+  rps_->Init();
+  return InitAndSetControlSettings(inst);
+}
+
+int VP8Encoder::InitAndSetControlSettings(const VideoCodec* inst) {
+  vpx_codec_flags_t flags = 0;
+  // TODO(holmer): We should make a smarter decision on the number of
+  // partitions. Eight is probably not the optimal number for low resolution
+  // video.
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  flags |= VPX_CODEC_USE_OUTPUT_PARTITION;
+#endif
+  if (vpx_codec_enc_init(encoder_, vpx_codec_vp8_cx(), config_, flags)) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  vpx_codec_control(encoder_, VP8E_SET_STATIC_THRESHOLD, 1);
+  vpx_codec_control(encoder_, VP8E_SET_CPUUSED, cpu_speed_);
+  vpx_codec_control(encoder_, VP8E_SET_TOKEN_PARTITIONS,
+                    static_cast<vp8e_token_partitions>(token_partitions_));
+  vpx_codec_control(encoder_, VP8E_SET_NOISE_SENSITIVITY,
+                    inst->codecSpecific.VP8.denoisingOn ? 1 : 0);
+#if WEBRTC_LIBVPX_VERSION >= 971
+  vpx_codec_control(encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT,
+                    rc_max_intra_target_);
+#endif
+  inited_ = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+uint32_t VP8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) {
+  // Set max to the optimal buffer level (normalized by target BR),
+  // and scaled by a scalePar.
+  // Max target size = scalePar * optimalBufferSize * targetBR[Kbps].
+  // This values is presented in percentage of perFrameBw:
+  // perFrameBw = targetBR[Kbps] * 1000 / frameRate.
+  // The target in % is as follows:
+
+  float scalePar = 0.5;
+  uint32_t targetPct = optimalBuffersize * scalePar * codec_.maxFramerate / 10;
+
+  // Don't go below 3 times the per frame bandwidth.
+  const uint32_t minIntraTh = 300;
+  return (targetPct < minIntraTh) ? minIntraTh: targetPct;
+}
+
+int VP8Encoder::Encode(const VideoFrame& input_image,
+                       const CodecSpecificInfo* codec_specific_info,
+                       const VideoFrameType frame_type) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (input_image.Buffer() == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  if (encoded_complete_callback_ == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+
+  // Check for change in frame size.
+  if (input_image.Width() != codec_.width ||
+      input_image.Height() != codec_.height) {
+    int ret = UpdateCodecFrameSize(input_image.Width(), input_image.Height());
+    if (ret < 0) {
+      return ret;
+    }
+  }
+  // Image in vpx_image_t format.
+  uint8_t* buffer = input_image.Buffer();
+  uint32_t v_plane_loc = codec_.height * codec_.width +
+    ((codec_.width + 1) >> 1) * ((codec_.height + 1) >> 1);
+  raw_->planes[PLANE_Y] = buffer;
+  raw_->planes[PLANE_U] = &buffer[codec_.width * codec_.height];
+  raw_->planes[PLANE_V] = &buffer[v_plane_loc];
+
+  int flags = 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    flags |= temporal_layers_->EncodeFlags();
+  }
+#endif
+  bool send_keyframe = (frame_type == kKeyFrame);
+  if (send_keyframe) {
+    // Key frame request from caller.
+    // Will update both golden and alt-ref.
+    flags = VPX_EFLAG_FORCE_KF;
+  } else if (feedback_mode_ && codec_specific_info) {
+    // Handle RPSI and SLI messages and set up the appropriate encode flags.
+    bool sendRefresh = false;
+    if (codec_specific_info->codecType == kVideoCodecVP8) {
+      if (codec_specific_info->codecSpecific.VP8.hasReceivedRPSI) {
+        rps_->ReceivedRPSI(
+            codec_specific_info->codecSpecific.VP8.pictureIdRPSI);
+      }
+      if (codec_specific_info->codecSpecific.VP8.hasReceivedSLI) {
+        sendRefresh = rps_->ReceivedSLI(input_image.TimeStamp());
+      }
+    }
+    flags = rps_->EncodeFlags(picture_id_, sendRefresh,
+                              input_image.TimeStamp());
+  }
+
+  // TODO(holmer): Ideally the duration should be the timestamp diff of this
+  // frame and the next frame to be encoded, which we don't have. Instead we
+  // would like to use the duration of the previous frame. Unfortunately the
+  // rate control seems to be off with that setup. Using the average input
+  // frame rate to calculate an average duration for now.
+  assert(codec_.maxFramerate > 0);
+  uint32_t duration = 90000 / codec_.maxFramerate;
+  if (vpx_codec_encode(encoder_, raw_, timestamp_, duration, flags,
+                       VPX_DL_REALTIME)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  timestamp_ += duration;
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+  return GetEncodedPartitions(input_image);
+#else
+  return GetEncodedFrame(input_image);
+#endif
+}
+
+int VP8Encoder::UpdateCodecFrameSize(WebRtc_UWord32 input_image_width,
+                                     WebRtc_UWord32 input_image_height) {
+  codec_.width = input_image_width;
+  codec_.height = input_image_height;
+
+  raw_->w = codec_.width;
+  raw_->h = codec_.height;
+  raw_->d_w = codec_.width;
+  raw_->d_h = codec_.height;
+  raw_->stride[VPX_PLANE_Y] = codec_.width;
+  raw_->stride[VPX_PLANE_U] = codec_.width / 2;
+  raw_->stride[VPX_PLANE_V] = codec_.width / 2;
+  vpx_img_set_rect(raw_, 0, 0, codec_.width, codec_.height);
+
+  // Update encoder context for new frame size.
+  // Change of frame size will automatically trigger a key frame.
+  config_->g_w = codec_.width;
+  config_->g_h = codec_.height;
+  if (vpx_codec_enc_config_set(encoder_, config_)) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+void VP8Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific,
+                                       const vpx_codec_cx_pkt& pkt) {
+  assert(codec_specific != NULL);
+  codec_specific->codecType = kVideoCodecVP8;
+  CodecSpecificInfoVP8 *vp8Info = &(codec_specific->codecSpecific.VP8);
+  vp8Info->pictureId = picture_id_;
+  vp8Info->simulcastIdx = 0;
+  vp8Info->keyIdx = kNoKeyIdx;  // TODO(hlundin) populate this
+  vp8Info->nonReference = (pkt.data.frame.flags & VPX_FRAME_IS_DROPPABLE) != 0;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  if (temporal_layers_) {
+    temporal_layers_->PopulateCodecSpecific(
+        (pkt.data.frame.flags & VPX_FRAME_IS_KEY) ? true : false, vp8Info);
+  } else {
+#endif
+    vp8Info->temporalIdx = kNoTemporalIdx;
+    vp8Info->layerSync = false;
+    vp8Info->tl0PicIdx = kNoTl0PicIdx;
+#if WEBRTC_LIBVPX_VERSION >= 971
+  }
+#endif
+  picture_id_ = (picture_id_ + 1) & 0x7FFF;  // prepare next
+}
+
+int VP8Encoder::GetEncodedFrame(const VideoFrame& input_image) {
+  vpx_codec_iter_t iter = NULL;
+  encoded_image_._frameType = kDeltaFrame;
+  const vpx_codec_cx_pkt_t *pkt= vpx_codec_get_cx_data(encoder_, &iter);
+  if (pkt == NULL) {
+    if (!encoder_->err) {
+      // dropped frame
+      return WEBRTC_VIDEO_CODEC_OK;
+    } else {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  } else if (pkt->kind == VPX_CODEC_CX_FRAME_PKT) {
+    CodecSpecificInfo codecSpecific;
+    PopulateCodecSpecific(&codecSpecific, *pkt);
+
+    assert(pkt->data.frame.sz <= encoded_image_._size);
+    memcpy(encoded_image_._buffer, pkt->data.frame.buf, pkt->data.frame.sz);
+    encoded_image_._length = uint32_t(pkt->data.frame.sz);
+    encoded_image_._encodedHeight = raw_->h;
+    encoded_image_._encodedWidth = raw_->w;
+
+    // Check if encoded frame is a key frame.
+    if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
+      encoded_image_._frameType = kKeyFrame;
+      rps_->EncodedKeyFrame(picture_id_);
+    }
+
+    if (encoded_image_._length > 0) {
+      encoded_image_._timeStamp = input_image.TimeStamp();
+      // TODO(mikhal): Resolve confusion in terms.
+      encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
+
+      // Figure out where partition boundaries are located.
+      RTPFragmentationHeader fragInfo;
+      fragInfo.VerifyAndAllocateFragmentationHeader(2);
+      // two partitions: 1st and 2nd
+
+      // First partition
+      fragInfo.fragmentationOffset[0] = 0;
+      uint8_t *firstByte = encoded_image_._buffer;
+      uint32_t tmpSize = (firstByte[2] << 16) | (firstByte[1] << 8)
+                    | firstByte[0];
+      fragInfo.fragmentationLength[0] = (tmpSize >> 5) & 0x7FFFF;
+      fragInfo.fragmentationPlType[0] = 0; // not known here
+      fragInfo.fragmentationTimeDiff[0] = 0;
+
+      // Second partition
+      fragInfo.fragmentationOffset[1] = fragInfo.fragmentationLength[0];
+      fragInfo.fragmentationLength[1] = encoded_image_._length -
+          fragInfo.fragmentationLength[0];
+      fragInfo.fragmentationPlType[1] = 0; // not known here
+      fragInfo.fragmentationTimeDiff[1] = 0;
+
+      encoded_complete_callback_->Encoded(encoded_image_, &codecSpecific,
+                                        &fragInfo);
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+  }
+  return WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+#if WEBRTC_LIBVPX_VERSION >= 971
+int VP8Encoder::GetEncodedPartitions(const VideoFrame& input_image) {
+  vpx_codec_iter_t iter = NULL;
+  int part_idx = 0;
+  encoded_image_._length = 0;
+  encoded_image_._frameType = kDeltaFrame;
+  RTPFragmentationHeader frag_info;
+  frag_info.VerifyAndAllocateFragmentationHeader((1 << token_partitions_) + 1);
+  CodecSpecificInfo codec_specific;
+
+  const vpx_codec_cx_pkt_t *pkt = NULL;
+  while ((pkt = vpx_codec_get_cx_data(encoder_, &iter)) != NULL) {
+    switch(pkt->kind) {
+      case VPX_CODEC_CX_FRAME_PKT: {
+        memcpy(&encoded_image_._buffer[encoded_image_._length],
+               pkt->data.frame.buf,
+               pkt->data.frame.sz);
+        frag_info.fragmentationOffset[part_idx] = encoded_image_._length;
+        frag_info.fragmentationLength[part_idx] =  pkt->data.frame.sz;
+        frag_info.fragmentationPlType[part_idx] = 0;  // not known here
+        frag_info.fragmentationTimeDiff[part_idx] = 0;
+        encoded_image_._length += pkt->data.frame.sz;
+        assert(encoded_image_._length <= encoded_image_._size);
+        ++part_idx;
+        break;
+      }
+      default: {
+        break;
+      }
+    }
+    // End of frame
+    if ((pkt->data.frame.flags & VPX_FRAME_IS_FRAGMENT) == 0) {
+      // check if encoded frame is a key frame
+      if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) {
+          encoded_image_._frameType = kKeyFrame;
+          rps_->EncodedKeyFrame(picture_id_);
+      }
+      PopulateCodecSpecific(&codec_specific, *pkt);
+      break;
+    }
+  }
+  if (encoded_image_._length > 0) {
+    encoded_image_._timeStamp = input_image.TimeStamp();
+    encoded_image_.capture_time_ms_ = input_image.RenderTimeMs();
+    encoded_image_._encodedHeight = raw_->h;
+    encoded_image_._encodedWidth = raw_->w;
+    encoded_complete_callback_->Encoded(encoded_image_, &codec_specific,
+                                      &frag_info);
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+#endif
+
+int VP8Encoder::SetChannelParameters(uint32_t /*packet_loss*/, int rtt) {
+  rps_->SetRtt(rtt);
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Encoder::RegisterEncodeCompleteCallback(
+    EncodedImageCallback* callback) {
+  encoded_complete_callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+VP8Decoder* VP8Decoder::Create() {
+  return new VP8Decoder();
+}
+
+VP8Decoder::VP8Decoder()
+    : decode_complete_callback_(NULL),
+      inited_(false),
+      feedback_mode_(false),
+      decoder_(NULL),
+      last_keyframe_(),
+      image_format_(VPX_IMG_FMT_NONE),
+      ref_frame_(NULL),
+      propagation_cnt_(-1),
+      latest_keyframe_complete_(false),
+      mfqe_enabled_(false) {
+  memset(&codec_, 0, sizeof(codec_));
+}
+
+VP8Decoder::~VP8Decoder() {
+  inited_ = true; // in order to do the actual release
+  Release();
+}
+
+int VP8Decoder::Reset() {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  InitDecode(&codec_, 1);
+  propagation_cnt_ = -1;
+  latest_keyframe_complete_ = false;
+  mfqe_enabled_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::InitDecode(const VideoCodec* inst, int number_of_cores) {
+  if (inst == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+  int ret_val = Release();
+  if (ret_val < 0 ) {
+    return ret_val;
+  }
+  if (decoder_ == NULL) {
+    decoder_ = new vpx_dec_ctx_t;
+  }
+  if (inst->codecType == kVideoCodecVP8) {
+    feedback_mode_ = inst->codecSpecific.VP8.feedbackModeOn;
+  }
+  vpx_codec_dec_cfg_t  cfg;
+  // Setting number of threads to a constant value (1)
+  cfg.threads = 1;
+  cfg.h = cfg.w = 0; // set after decode
+
+  vpx_codec_flags_t flags = 0;
+#if (WEBRTC_LIBVPX_VERSION >= 971) && !defined(WEBRTC_ANDROID)
+  flags = VPX_CODEC_USE_POSTPROC;
+  if (inst->codecSpecific.VP8.errorConcealmentOn) {
+    flags |= VPX_CODEC_USE_ERROR_CONCEALMENT;
+  }
+#ifdef INDEPENDENT_PARTITIONS
+  flags |= VPX_CODEC_USE_INPUT_PARTITION;
+#endif
+#endif
+
+  if (vpx_codec_dec_init(decoder_, vpx_codec_vp8_dx(), &cfg, flags)) {
+    return WEBRTC_VIDEO_CODEC_MEMORY;
+  }
+
+#if (WEBRTC_LIBVPX_VERSION >= 971) && !defined(WEBRTC_ANDROID)
+  vp8_postproc_cfg_t  ppcfg;
+  ppcfg.post_proc_flag = VP8_DEMACROBLOCK | VP8_DEBLOCK;
+  // Strength of deblocking filter. Valid range:[0,16]
+  ppcfg.deblocking_level = 3;
+  vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
+#endif
+
+  // Save VideoCodec instance for later; mainly for duplicating the decoder.
+  codec_ = *inst;
+  propagation_cnt_ = -1;
+  latest_keyframe_complete_ = false;
+
+  inited_ = true;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::Decode(const EncodedImage& input_image,
+                       bool missing_frames,
+                       const RTPFragmentationHeader* fragmentation,
+                       const CodecSpecificInfo* codec_specific_info,
+                       int64_t /*render_time_ms*/) {
+  if (!inited_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (decode_complete_callback_ == NULL) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (input_image._buffer == NULL && input_image._length > 0) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+
+#ifdef INDEPENDENT_PARTITIONS
+  if (fragmentation == NULL) {
+    return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+  }
+#endif
+
+#if (WEBRTC_LIBVPX_VERSION >= 971) && !defined(WEBRTC_ANDROID)
+  if (!mfqe_enabled_ && codec_specific_info &&
+      codec_specific_info->codecSpecific.VP8.temporalIdx > 0) {
+    // Enable MFQE if we are receiving layers.
+    // temporalIdx is set in the jitter buffer according to what the RTP
+    // header says.
+    mfqe_enabled_ = true;
+    vp8_postproc_cfg_t  ppcfg;
+    ppcfg.post_proc_flag = VP8_MFQE | VP8_DEMACROBLOCK | VP8_DEBLOCK;
+    ppcfg.deblocking_level = 3;
+    vpx_codec_control(decoder_, VP8_SET_POSTPROC, &ppcfg);
+  }
+#endif
+
+  // Restrict error propagation using key frame requests. Disabled when
+  // the feedback mode is enabled (RPS).
+  // Reset on a key frame refresh.
+  if (!feedback_mode_) {
+    if (input_image._frameType == kKeyFrame && input_image._completeFrame)
+      propagation_cnt_ = -1;
+    // Start count on first loss.
+    else if ((!input_image._completeFrame || missing_frames) &&
+        propagation_cnt_ == -1)
+      propagation_cnt_ = 0;
+    if (propagation_cnt_ >= 0)
+      propagation_cnt_++;
+  }
+
+  vpx_codec_iter_t iter = NULL;
+  vpx_image_t* img;
+  int ret;
+
+  // Check for missing frames.
+  if (missing_frames) {
+    // Call decoder with zero data length to signal missing frames.
+    if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    // We don't render this frame.
+    vpx_codec_get_frame(decoder_, &iter);
+    iter = NULL;
+  }
+
+#ifdef INDEPENDENT_PARTITIONS
+  if (DecodePartitions(inputImage, fragmentation)) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0) {
+      propagation_cnt_ = 0;
+    }
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+#else
+  uint8_t* buffer = input_image._buffer;
+  if (input_image._length == 0) {
+    buffer = NULL; // Triggers full frame concealment.
+  }
+  if (vpx_codec_decode(decoder_,
+                       buffer,
+                       input_image._length,
+                       0,
+                       VPX_DL_REALTIME)) {
+    // Reset to avoid requesting key frames too often.
+    if (propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+#endif
+
+  // Store encoded frame if key frame. (Used in Copy method.)
+  if (input_image._frameType == kKeyFrame && input_image._buffer != NULL) {
+    const uint32_t bytes_to_copy = input_image._length;
+    if (last_keyframe_._size < bytes_to_copy) {
+      delete [] last_keyframe_._buffer;
+      last_keyframe_._buffer = NULL;
+      last_keyframe_._size = 0;
+    }
+
+    uint8_t* temp_buffer = last_keyframe_._buffer; // Save buffer ptr.
+    uint32_t temp_size = last_keyframe_._size; // Save size.
+    last_keyframe_ = input_image; // Shallow copy.
+    last_keyframe_._buffer = temp_buffer; // Restore buffer ptr.
+    last_keyframe_._size = temp_size; // Restore buffer size.
+    if (!last_keyframe_._buffer) {
+      // Allocate memory.
+      last_keyframe_._size = bytes_to_copy;
+      last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
+    }
+    // Copy encoded frame.
+    memcpy(last_keyframe_._buffer, input_image._buffer, bytes_to_copy);
+    last_keyframe_._length = bytes_to_copy;
+  }
+
+  img = vpx_codec_get_frame(decoder_, &iter);
+  ret = ReturnFrame(img, input_image._timeStamp);
+  if (ret != 0) {
+    // Reset to avoid requesting key frames too often.
+    if (ret < 0 && propagation_cnt_ > 0)
+      propagation_cnt_ = 0;
+    return ret;
+  }
+  if (feedback_mode_) {
+    // Whenever we receive an incomplete key frame all reference buffers will
+    // be corrupt. If that happens we must request new key frames until we
+    // decode a complete.
+    if (input_image._frameType == kKeyFrame)
+      latest_keyframe_complete_ = input_image._completeFrame;
+    if (!latest_keyframe_complete_)
+      return WEBRTC_VIDEO_CODEC_ERROR;
+
+    // Check for reference updates and last reference buffer corruption and
+    // signal successful reference propagation or frame corruption to the
+    // encoder.
+    int reference_updates = 0;
+    if (vpx_codec_control(decoder_, VP8D_GET_LAST_REF_UPDATES,
+                          &reference_updates)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    int corrupted = 0;
+    if (vpx_codec_control(decoder_, VP8D_GET_FRAME_CORRUPTED, &corrupted)) {
+      // Reset to avoid requesting key frames too often.
+      if (propagation_cnt_ > 0)
+        propagation_cnt_ = 0;
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+    int16_t picture_id = -1;
+    if (codec_specific_info) {
+      picture_id = codec_specific_info->codecSpecific.VP8.pictureId;
+    }
+    if (picture_id > -1) {
+      if (((reference_updates & VP8_GOLD_FRAME) ||
+          (reference_updates & VP8_ALTR_FRAME)) && !corrupted) {
+        decode_complete_callback_->ReceivedDecodedReferenceFrame(picture_id);
+      }
+      decode_complete_callback_->ReceivedDecodedFrame(picture_id);
+    }
+    if (corrupted) {
+      // we can decode but with artifacts
+      return WEBRTC_VIDEO_CODEC_REQUEST_SLI;
+    }
+  }
+  // Check Vs. threshold
+  if (propagation_cnt_ > kVp8ErrorPropagationTh) {
+    // Reset to avoid requesting key frames too often.
+    propagation_cnt_ = 0;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::DecodePartitions(
+    const EncodedImage& input_image,
+    const RTPFragmentationHeader* fragmentation) {
+  for (int i = 0; i < fragmentation->fragmentationVectorSize; ++i) {
+    const uint8_t* partition = input_image._buffer +
+        fragmentation->fragmentationOffset[i];
+    const uint32_t partition_length =
+        fragmentation->fragmentationLength[i];
+    if (vpx_codec_decode(decoder_,
+                         partition,
+                         partition_length,
+                         0,
+                         VPX_DL_REALTIME)) {
+      return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+  }
+  // Signal end of frame data. If there was no frame data this will trigger
+  // a full frame concealment.
+  if (vpx_codec_decode(decoder_, NULL, 0, 0, VPX_DL_REALTIME))
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::ReturnFrame(const vpx_image_t* img, uint32_t timestamp) {
+  if (img == NULL) {
+    // Decoder OK and NULL image => No show frame
+    return WEBRTC_VIDEO_CODEC_NO_OUTPUT;
+  }
+
+  uint32_t required_size = CalcBufferSize(kI420, img->d_w, img->d_h);
+  decoded_image_.VerifyAndAllocate(required_size);
+
+  uint8_t* buf;
+  uint32_t pos = 0;
+  uint32_t plane, y;
+  uint8_t* buffer = decoded_image_.Buffer();
+  for (plane = 0; plane < 3; plane++) {
+    unsigned int width = (plane ? (img->d_w + 1) >> 1 : img->d_w);
+    unsigned int height = (plane ? (img->d_h + 1) >> 1 : img->d_h);
+    buf = img->planes[plane];
+    for(y = 0; y < height; y++) {
+      memcpy(&buffer[pos], buf, width);
+      pos += width;
+      buf += img->stride[plane];
+    }
+  }
+
+  // Set decoded image parameters.
+  decoded_image_.SetHeight(img->d_h);
+  decoded_image_.SetWidth(img->d_w);
+  decoded_image_.SetLength(CalcBufferSize(kI420, img->d_w, img->d_h));
+  decoded_image_.SetTimeStamp(timestamp);
+  int ret = decode_complete_callback_->Decoded(decoded_image_);
+  if (ret != 0)
+    return ret;
+
+  // Remember image format for later
+  image_format_ = img->fmt;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::RegisterDecodeCompleteCallback(
+    DecodedImageCallback* callback) {
+  decode_complete_callback_ = callback;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+int VP8Decoder::Release() {
+  decoded_image_.Free();
+  if (last_keyframe_._buffer != NULL) {
+    delete [] last_keyframe_._buffer;
+    last_keyframe_._buffer = NULL;
+  }
+  if (decoder_ != NULL) {
+    if(vpx_codec_destroy(decoder_)) {
+      return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    delete decoder_;
+    decoder_ = NULL;
+  }
+  if (ref_frame_ != NULL) {
+    vpx_img_free(&ref_frame_->img);
+    delete ref_frame_;
+    ref_frame_ = NULL;
+  }
+  inited_ = false;
+  return WEBRTC_VIDEO_CODEC_OK;
+}
+
+VideoDecoder* VP8Decoder::Copy() {
+  // Sanity checks.
+  if (!inited_) {
+    // Not initialized.
+    assert(false);
+    return NULL;
+  }
+  if (decoded_image_.Buffer() == NULL) {
+    // Nothing has been decoded before; cannot clone.
+    return NULL;
+  }
+  if (last_keyframe_._buffer == NULL) {
+    // Cannot clone if we have no key frame to start with.
+    return NULL;
+  }
+  // Create a new VideoDecoder object
+  VP8Decoder *copy = new VP8Decoder;
+
+  // Initialize the new decoder
+  if (copy->InitDecode(&codec_, 1) != WEBRTC_VIDEO_CODEC_OK) {
+    delete copy;
+    return NULL;
+  }
+  // Inject last key frame into new decoder.
+  if (vpx_codec_decode(copy->decoder_, last_keyframe_._buffer,
+                       last_keyframe_._length, NULL, VPX_DL_REALTIME)) {
+    delete copy;
+    return NULL;
+  }
+  // Allocate memory for reference image copy
+  assert(decoded_image_.Width() > 0);
+  assert(decoded_image_.Height() > 0);
+  assert(image_format_ > VPX_IMG_FMT_NONE);
+  // Check if frame format has changed.
+  if (ref_frame_ &&
+      (decoded_image_.Width() != ref_frame_->img.d_w ||
+          decoded_image_.Height() != ref_frame_->img.d_h ||
+          image_format_ != ref_frame_->img.fmt)) {
+    vpx_img_free(&ref_frame_->img);
+    delete ref_frame_;
+    ref_frame_ = NULL;
+  }
+
+
+  if (!ref_frame_) {
+    ref_frame_ = new vpx_ref_frame_t;
+
+    unsigned int align = 1;
+    if (decoded_image_.Width() % 32 == 0) {
+      align = 32;
+    }
+    if (!vpx_img_alloc(&ref_frame_->img,
+                       static_cast<vpx_img_fmt_t>(image_format_),
+                       decoded_image_.Width(), decoded_image_.Height(),
+                       align)) {
+      assert(false);
+      delete copy;
+      return NULL;
+    }
+  }
+  const vpx_ref_frame_type_t type_vec[] = { VP8_LAST_FRAME, VP8_GOLD_FRAME,
+      VP8_ALTR_FRAME };
+  for (uint32_t ix = 0;
+      ix < sizeof(type_vec) / sizeof(vpx_ref_frame_type_t); ++ix) {
+    ref_frame_->frame_type = type_vec[ix];
+    if (CopyReference(copy) < 0) {
+      delete copy;
+      return NULL;
+    }
+  }
+  // Copy all member variables (that are not set in initialization).
+  copy->feedback_mode_ = feedback_mode_;
+  copy->image_format_ = image_format_;
+  copy->last_keyframe_ = last_keyframe_; // Shallow copy.
+  // Allocate memory. (Discard copied _buffer pointer.)
+  copy->last_keyframe_._buffer = new uint8_t[last_keyframe_._size];
+  memcpy(copy->last_keyframe_._buffer, last_keyframe_._buffer,
+         last_keyframe_._length);
+
+  return static_cast<VideoDecoder*>(copy);
+}
+
+int VP8Decoder::CopyReference(VP8Decoder* copyTo) {
+  // The type of frame to copy should be set in ref_frame_->frame_type
+  // before the call to this function.
+  if (vpx_codec_control(decoder_, VP8_COPY_REFERENCE, ref_frame_)
+      != VPX_CODEC_OK) {
+    return -1;
+  }
+  if (vpx_codec_control(copyTo->decoder_, VP8_SET_REFERENCE, ref_frame_)
+      != VPX_CODEC_OK) {
+    return -1;
+  }
+  return 0;
+}
+
+} // namespace webrtc
diff --git a/src/modules/video_coding/codecs/vp8/vp8.gyp b/src/modules/video_coding/codecs/vp8/vp8.gyp
new file mode 100644
index 0000000..18b50ab
--- /dev/null
+++ b/src/modules/video_coding/codecs/vp8/vp8.gyp
@@ -0,0 +1,117 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../../../../build/common.gypi',
+    '../test_framework/test_framework.gypi'
+  ],
+  'targets': [
+    {
+      'target_name': 'webrtc_vp8',
+      'type': '<(library)',
+      'dependencies': [
+        '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+        'include',
+        '<(webrtc_root)/common_video/interface',
+        '<(webrtc_root)/modules/video_coding/codecs/interface',
+        '<(webrtc_root)/modules/interface',
+      ],
+      'conditions': [
+        # TODO(mikhal): Investigate this mechanism for handling differences
+        # between the Chromium and standalone builds.
+        # http://code.google.com/p/webrtc/issues/detail?id=201
+        ['build_with_chromium==1', {
+          'defines': [
+            'WEBRTC_LIBVPX_VERSION=960' # Bali
+          ],
+        },{
+          'defines': [
+            'WEBRTC_LIBVPX_VERSION=971' # Cayuga
+          ],
+          'sources': [
+            'temporal_layers.h',
+            'temporal_layers.cc',
+          ],
+        }],
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+          '<(webrtc_root)/common_video/interface',
+          '<(webrtc_root)/modules/video_coding/codecs/interface',
+        ],
+      },
+      'sources': [
+        'reference_picture_selection.h',
+        'reference_picture_selection.cc',
+        'include/vp8.h',
+        'include/vp8_common_types.h',
+        'vp8.cc',
+      ],
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'vp8_integrationtests',
+          'type': 'executable',
+          'dependencies': [
+            'test_framework',
+            'webrtc_vp8',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/test.gyp:test_support',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+          ],
+         'sources': [
+            # header files
+            'test/benchmark.h',
+            'test/dual_decoder_test.h',
+            'test/normal_async_test.h',
+            'test/packet_loss_test.h',
+            'test/rps_test.h',
+            'test/vp8_unittest.h',
+
+           # source files
+            'test/benchmark.cc',
+            'test/dual_decoder_test.cc',
+            'test/normal_async_test.cc',
+            'test/packet_loss_test.cc',
+            'test/rps_test.cc',
+            'test/tester.cc',
+            'test/vp8_unittest.cc',
+          ],
+        },
+        {
+          'target_name': 'vp8_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'webrtc_vp8',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'include_dirs': [
+            '<(DEPTH)/third_party/libvpx/source/libvpx',
+          ],
+          'sources': [
+            'reference_picture_selection_unittest.cc',
+            'temporal_layers_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ],
+}
diff --git a/src/modules/video_coding/main/OWNERS b/src/modules/video_coding/main/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/src/modules/video_coding/main/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h b/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
new file mode 100644
index 0000000..c84d5e7
--- /dev/null
+++ b/src/modules/video_coding/main/interface/mock/mock_vcm_callbacks.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
+
+#include "gmock/gmock.h"
+#include "typedefs.h"
+
+namespace webrtc {
+
+class MockVCMFrameTypeCallback : public VCMFrameTypeCallback {
+ public:
+  MOCK_METHOD0(RequestKeyFrame, int32_t());
+  MOCK_METHOD1(SliceLossIndicationRequest,
+               WebRtc_Word32(const WebRtc_UWord64 pictureId));
+};
+
+class MockPacketRequestCallback : public VCMPacketRequestCallback {
+ public:
+  MOCK_METHOD2(ResendPackets, int32_t(const uint16_t* sequenceNumbers,
+                                      uint16_t length));
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_INTERFACE_MOCK_MOCK_VCM_CALLBACKS_H_
diff --git a/src/modules/video_coding/main/interface/video_coding.h b/src/modules/video_coding/main/interface/video_coding.h
new file mode 100644
index 0000000..af05491
--- /dev/null
+++ b/src/modules/video_coding/main/interface/video_coding.h
@@ -0,0 +1,557 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
+#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
+
+#include "modules/interface/module.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+class VideoEncoder;
+class VideoDecoder;
+struct CodecSpecificInfo;
+
+class VideoCodingModule : public Module
+{
+public:
+    enum SenderNackMode {
+        kNackNone,
+        kNackAll,
+        kNackSelective
+    };
+
+    enum ReceiverRobustness {
+        kNone,
+        kHardNack,
+        kSoftNack,
+        kDualDecoder,
+        kReferenceSelection
+    };
+
+    enum DecodeErrors {
+        kNoDecodeErrors,
+        kAllowDecodeErrors
+    };
+
+    static VideoCodingModule* Create(const WebRtc_Word32 id);
+
+    static VideoCodingModule* Create(const WebRtc_Word32 id,
+                                     TickTimeBase* clock);
+
+    static void Destroy(VideoCodingModule* module);
+
+    // Get number of supported codecs
+    //
+    // Return value     : Number of supported codecs
+    static WebRtc_UWord8 NumberOfCodecs();
+
+    // Get supported codec settings with using id
+    //
+    // Input:
+    //      - listId         : Id or index of the codec to look up
+    //      - codec          : Memory where the codec settings will be stored
+    //
+    // Return value     : VCM_OK,              on success
+    //                    VCM_PARAMETER_ERROR  if codec not supported or id too high
+    static WebRtc_Word32 Codec(const WebRtc_UWord8 listId, VideoCodec* codec);
+
+    // Get supported codec settings using codec type
+    //
+    // Input:
+    //      - codecType      : The codec type to get settings for
+    //      - codec          : Memory where the codec settings will be stored
+    //
+    // Return value     : VCM_OK,              on success
+    //                    VCM_PARAMETER_ERROR  if codec not supported
+    static WebRtc_Word32 Codec(VideoCodecType codecType, VideoCodec* codec);
+
+    /*
+    *   Sender
+    */
+
+    // Any encoder-related state of VCM will be initialized to the
+    // same state as when the VCM was created. This will not interrupt
+    // or effect decoding functionality of VCM. VCM will lose all the
+    // encoding-related settings by calling this function.
+    // For instance, a send codec has to be registered again.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 InitializeSender() = 0;
+
+    // Registers a codec to be used for encoding. Calling this
+    // API multiple times overwrites any previously registered codecs.
+    //
+    // Input:
+    //      - sendCodec      : Settings for the codec to be registered.
+    //      - numberOfCores  : The number of cores the codec is allowed
+    //                         to use.
+    //      - maxPayloadSize : The maximum size each payload is allowed
+    //                                to have. Usually MTU - overhead.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                            WebRtc_UWord32 numberOfCores,
+                                            WebRtc_UWord32 maxPayloadSize) = 0;
+
+    // API to get the current send codec in use.
+    //
+    // Input:
+    //      - currentSendCodec : Address where the sendCodec will be written.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const = 0;
+
+    // API to get the current send codec type
+    //
+    // Return value      : Codec type, on success.
+    //                     kVideoCodecUnknown, on error or if no send codec is set
+    virtual VideoCodecType SendCodec() const = 0;
+
+    // Register an external encoder object. This can not be used together with
+    // external decoder callbacks.
+    //
+    // Input:
+    //      - externalEncoder : Encoder object to be used for encoding frames inserted
+    //                          with the AddVideoFrame API.
+    //      - payloadType     : The payload type bound which this encoder is bound to.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalSource = false) = 0;
+
+    // API to get codec config parameters to be sent out-of-band to a receiver.
+    //
+    // Input:
+    //      - buffer          : Memory where the codec config parameters should be written.
+    //      - size            : Size of the memory available.
+    //
+    // Return value      : Number of bytes written, on success.
+    //                     < 0,                     on error.
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size) = 0;
+
+    // API to get currently configured encoder target bitrate in kbit/s.
+    //
+    // Return value      : 0,   on success.
+    //                     < 0, on error.
+    virtual int Bitrate(unsigned int* bitrate) const = 0;
+
+    // API to get currently configured encoder target frame rate.
+    //
+    // Return value      : 0,   on success.
+    //                     < 0, on error.
+    virtual int FrameRate(unsigned int* framerate) const = 0;
+
+    // Sets the parameters describing the send channel. These parameters are inputs to the
+    // Media Optimization inside the VCM and also specifies the target bit rate for the
+    // encoder. Bit rate used by NACK should already be compensated for by the user.
+    //
+    // Input:
+    //      - availableBandWidth    : Band width available for the VCM in kbit/s.
+    //      - lossRate              : Fractions of lost packets the past second.
+    //                                (loss rate in percent = 100 * packetLoss / 255)
+    //      - rtt                   : Current round-trip time in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 availableBandWidth,
+                                               WebRtc_UWord8 lossRate,
+                                               WebRtc_UWord32 rtt) = 0;
+
+    // Sets the parameters describing the receive channel. These parameters are inputs to the
+    // Media Optimization inside the VCM.
+    //
+    // Input:
+    //      - rtt                   : Current round-trip time in ms.
+    //                                with the most amount available bandwidth in a conference
+    //                                scenario
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt) = 0;
+
+    // Register a transport callback which will be called to deliver the encoded data and
+    // side information.
+    //
+    // Input:
+    //      - transport  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterTransportCallback(VCMPacketizationCallback* transport) = 0;
+
+    // Register video output information callback which will be called to deliver information
+    // about the video stream produced by the encoder, for instance the average frame rate and
+    // bit rate.
+    //
+    // Input:
+    //      - outputInformation  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterSendStatisticsCallback(
+                                     VCMSendStatisticsCallback* sendStats) = 0;
+
+    // Register a video quality settings callback which will be called when
+    // frame rate/dimensions need to be updated for video quality optimization
+    //
+    // Input:
+    //		- videoQMSettings  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error
+    virtual WebRtc_Word32 RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings) = 0;
+
+    // Register a video protection callback which will be called to deliver
+    // the requested FEC rate and NACK status (on/off).
+    //
+    // Input:
+    //      - protection  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterProtectionCallback(VCMProtectionCallback* protection) = 0;
+
+    // Enable or disable a video protection method.
+    //
+    // Input:
+    //      - videoProtection  : The method to enable or disable.
+    //      - enable           : True if the method should be enabled, false if
+    //                           it should be disabled.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetVideoProtection(VCMVideoProtection videoProtection,
+                                             bool enable) = 0;
+
+    // Add one raw video frame to the encoder. This function does all the necessary
+    // processing, then decides what frame type to encode, or if the frame should be
+    // dropped. If the frame should be encoded it passes the frame to the encoder
+    // before it returns.
+    //
+    // Input:
+    //      - videoFrame        : Video frame to encode.
+    //      - codecSpecificInfo : Extra codec information, e.g., pre-parsed in-band signaling.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 AddVideoFrame(
+        const VideoFrame& videoFrame,
+        const VideoContentMetrics* contentMetrics = NULL,
+        const CodecSpecificInfo* codecSpecificInfo = NULL) = 0;
+
+    // Next frame encoded should be an intra frame (keyframe).
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 IntraFrameRequest() = 0;
+
+    // Frame Dropper enable. Can be used to disable the frame dropping when the encoder
+    // over-uses its bit rate. This API is designed to be used when the encoded frames
+    // are supposed to be stored to an AVI file, or when the I420 codec is used and the
+    // target bit rate shouldn't affect the frame rate.
+    //
+    // Input:
+    //      - enable            : True to enable the setting, false to disable it.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 EnableFrameDropper(bool enable) = 0;
+
+    // Sent frame counters
+    virtual WebRtc_Word32 SentFrameCount(VCMFrameCount& frameCount) const = 0;
+
+    /*
+    *   Receiver
+    */
+
+    // The receiver state of the VCM will be initialized to the
+    // same state as when the VCM was created. This will not interrupt
+    // or effect the send side functionality of VCM. VCM will lose all the
+    // decoding-related settings by calling this function. All frames
+    // inside the jitter buffer are flushed and the delay is reset.
+    // For instance, a receive codec has to be registered again.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 InitializeReceiver() = 0;
+
+    // Register possible receive codecs, can be called multiple times for different codecs.
+    // The module will automatically switch between registered codecs depending on the
+    // payload type of incoming frames. The actual decoder will be created when needed.
+    //
+    // Input:
+    //      - receiveCodec      : Settings for the codec to be registered.
+    //      - numberOfCores     : Number of CPU cores that the decoder is allowed to use.
+    //      - requireKeyFrame   : Set this to true if you don't want any delta frames
+    //                            to be decoded until the first key frame has been decoded.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                               WebRtc_Word32 numberOfCores,
+                                               bool requireKeyFrame = false) = 0;
+
+    // Register an externally defined decoder/renderer object. Can be a decoder only or a
+    // decoder coupled with a renderer. Note that RegisterReceiveCodec must be called to
+    // be used for decoding incoming streams.
+    //
+    // Input:
+    //      - externalDecoder        : The external decoder/renderer object.
+    //      - payloadType            : The payload type which this decoder should be
+    //                                 registered to.
+    //      - internalRenderTiming   : True if the internal renderer (if any) of the decoder
+    //                                 object can make sure to render at a given time in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalRenderTiming) = 0;
+
+    // Register a receive callback. Will be called whenever there is a new frame ready
+    // for rendering.
+    //
+    // Input:
+    //      - receiveCallback        : The callback object to be used by the module when a
+    //                                 frame is ready for rendering.
+    //                                 De-register with a NULL pointer.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveCallback(VCMReceiveCallback* receiveCallback) = 0;
+
+    // Register a receive statistics callback which will be called to deliver information
+    // about the video stream received by the receiving side of the VCM, for instance the
+    // average frame rate and bit rate.
+    //
+    // Input:
+    //      - receiveStats  : The callback object to register.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterReceiveStatisticsCallback(
+                               VCMReceiveStatisticsCallback* receiveStats) = 0;
+
+    // Register a frame type request callback. This callback will be called when the
+    // module needs to request specific frame types from the send side.
+    //
+    // Input:
+    //      - frameTypeCallback      : The callback object to be used by the module when
+    //                                 requesting a specific type of frame from the send side.
+    //                                 De-register with a NULL pointer.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 RegisterFrameTypeCallback(
+                                  VCMFrameTypeCallback* frameTypeCallback) = 0;
+
+    // Register a frame storage callback. This callback will be called right before an
+    // encoded frame is given to the decoder. Useful for recording the incoming video sequence.
+    //
+    // Input:
+    //      - frameStorageCallback    : The callback object used by the module
+    //                                  to store a received encoded frame.
+    //
+    // Return value     : VCM_OK, on success.
+    //                    < 0,         on error.
+    virtual WebRtc_Word32 RegisterFrameStorageCallback(
+                             VCMFrameStorageCallback* frameStorageCallback) = 0;
+
+    // Registers a callback which is called whenever the receive side of the VCM
+    // encounters holes in the packet sequence and needs packets to be retransmitted.
+    //
+    // Input:
+    //              - callback      : The callback to be registered in the VCM.
+    //
+    // Return value     : VCM_OK,     on success.
+    //                    <0,              on error.
+    virtual WebRtc_Word32 RegisterPacketRequestCallback(
+                                        VCMPacketRequestCallback* callback) = 0;
+
+    // Waits for the next frame in the jitter buffer to become complete
+    // (waits no longer than maxWaitTimeMs), then passes it to the decoder for decoding.
+    // Should be called as often as possible to get the most out of the decoder.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 Decode(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
+
+    // Waits for the next frame in the dual jitter buffer to become complete
+    // (waits no longer than maxWaitTimeMs), then passes it to the dual decoder
+    // for decoding. This will never trigger a render callback. Should be
+    // called frequently, and as long as it returns 1 it should be called again
+    // as soon as possible.
+    //
+    // Return value      : 1,           if a frame was decoded
+    //                     0,           if no frame was decoded
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs = 200) = 0;
+
+    // Decodes a frame and sets an appropriate render time in ms relative to the system time.
+    // Should be used in conjunction with VCMFrameStorageCallback.
+    //
+    // Input:
+    //      - frameFromStorage      : Encoded frame read from file or received through
+    //                                the VCMFrameStorageCallback callback.
+    //
+    // Return value:        : VCM_OK, on success
+    //                        < 0,         on error
+    virtual WebRtc_Word32 DecodeFromStorage(const EncodedVideoData& frameFromStorage) = 0;
+
+    // Reset the decoder state to the initial state.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 ResetDecoder() = 0;
+
+    // API to get the codec which is currently used for decoding by the module.
+    //
+    // Input:
+    //      - currentReceiveCodec      : Settings for the codec to be registered.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const = 0;
+
+    // API to get the codec type currently used for decoding by the module.
+    //
+    // Return value      : codecy type,            on success.
+    //                     kVideoCodecUnknown, on error or if no receive codec is registered
+    virtual VideoCodecType ReceiveCodec() const = 0;
+
+    // Insert a parsed packet into the receiver side of the module. Will be placed in the
+    // jitter buffer waiting for the frame to become complete. Returns as soon as the packet
+    // has been placed in the jitter buffer.
+    //
+    // Input:
+    //      - incomingPayload      : Payload of the packet.
+    //      - payloadLength        : Length of the payload.
+    //      - rtpInfo              : The parsed header.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                       WebRtc_UWord32 payloadLength,
+                                       const WebRtcRTPHeader& rtpInfo) = 0;
+
+    // Minimum playout delay (Used for lip-sync). This is the minimum delay required
+    // to sync with audio. Not included in  VideoCodingModule::Delay()
+    // Defaults to 0 ms.
+    //
+    // Input:
+    //      - minPlayoutDelayMs   : Additional delay in ms.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetMinimumPlayoutDelay(WebRtc_UWord32 minPlayoutDelayMs) = 0;
+
+    // Set the time required by the renderer to render a frame.
+    //
+    // Input:
+    //      - timeMS        : The time in ms required by the renderer to render a frame.
+    //
+    // Return value      : VCM_OK, on success.
+    //                     < 0,         on error.
+    virtual WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 timeMS) = 0;
+
+    // The total delay desired by the VCM. Can be less than the minimum
+    // delay set with SetMinimumPlayoutDelay.
+    //
+    // Return value      : Total delay in ms, on success.
+    //                     < 0,               on error.
+    virtual WebRtc_Word32 Delay() const = 0;
+
+    // Get the received frame counters. Keeps track of the number of each frame type
+    // received since the start of the call.
+    //
+    // Output:
+    //      - frameCount      : Struct to be filled with the number of frames received.
+    //
+    // Return value           : VCM_OK,        on success.
+    //                          <0,                 on error.
+    virtual WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const = 0;
+
+    // Returns the number of packets discarded by the jitter buffer due to being
+    // too late. This can include duplicated packets which arrived after the
+    // frame was sent to the decoder. Therefore packets which were prematurely
+    // NACKed will be counted.
+    virtual WebRtc_UWord32 DiscardedPackets() const = 0;
+
+
+    // Robustness APIs
+
+    // Set the sender RTX/NACK mode.
+    // Input:
+    //      - mode       : the selected NACK mode.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderNackMode(SenderNackMode mode) = 0;
+
+    // Set the sender reference picture selection (RPS) mode.
+    // Input:
+    //      - enable     : true or false, for enable and disable, respectively.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderReferenceSelection(bool enable) = 0;
+
+    // Set the sender forward error correction (FEC) mode.
+    // Input:
+    //      - enable     : true or false, for enable and disable, respectively.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderFEC(bool enable) = 0;
+
+    // Set the key frame period, or disable periodic key frames (I-frames).
+    // Input:
+    //      - periodMs   : period in ms; <= 0 to disable periodic key frames.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetSenderKeyFramePeriod(int periodMs) = 0;
+
+    // Set the receiver robustness mode. The mode decides how the receiver
+    // responds to losses in the stream. The type of counter-measure (soft or
+    // hard NACK, dual decoder, RPS, etc.) is selected through the
+    // robustnessMode parameter. The errorMode parameter decides if it is
+    // allowed to display frames corrupted by losses. Note that not all
+    // combinations of the two parameters are feasible. An error will be
+    // returned for invalid combinations.
+    // Input:
+    //      - robustnessMode : selected robustness mode.
+    //      - errorMode      : selected error mode.
+    //
+    // Return value      : VCM_OK, on success;
+    //                     < 0, on error.
+    virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+                                          DecodeErrors errorMode) = 0;
+
+    // Enables recording of debugging information.
+    virtual int StartDebugRecording(const char* file_name_utf8) = 0;
+
+    // Disables recording of debugging information.
+    virtual int StopDebugRecording() = 0;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_H_
diff --git a/src/modules/video_coding/main/interface/video_coding_defines.h b/src/modules/video_coding/main/interface/video_coding_defines.h
new file mode 100644
index 0000000..324b24b
--- /dev/null
+++ b/src/modules/video_coding/main/interface/video_coding_defines.h
@@ -0,0 +1,190 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
+#define WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
+
+#include "typedefs.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+// Error codes
+#define VCM_FRAME_NOT_READY      3
+#define VCM_REQUEST_SLI          2
+#define VCM_MISSING_CALLBACK     1
+#define VCM_OK                   0
+#define VCM_GENERAL_ERROR       -1
+#define VCM_LEVEL_EXCEEDED      -2
+#define VCM_MEMORY              -3
+#define VCM_PARAMETER_ERROR     -4
+#define VCM_UNKNOWN_PAYLOAD     -5
+#define VCM_CODEC_ERROR         -6
+#define VCM_UNINITIALIZED       -7
+#define VCM_NO_CODEC_REGISTERED -8
+#define VCM_JITTER_BUFFER_ERROR -9
+#define VCM_OLD_PACKET_ERROR    -10
+#define VCM_NO_FRAME_DECODED    -11
+#define VCM_ERROR_REQUEST_SLI   -12
+#define VCM_NOT_IMPLEMENTED     -20
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+#define VCM_VP8_PAYLOAD_TYPE       120
+#define VCM_I420_PAYLOAD_TYPE      124
+
+enum VCMNackProperties {
+  kNackHistoryLength = 450
+};
+
+enum VCMVideoProtection {
+  kProtectionNack,                // Both send-side and receive-side
+  kProtectionNackSender,          // Send-side only
+  kProtectionNackReceiver,        // Receive-side only
+  kProtectionDualDecoder,
+  kProtectionFEC,
+  kProtectionNackFEC,
+  kProtectionKeyOnLoss,
+  kProtectionKeyOnKeyLoss,
+  kProtectionPeriodicKeyFrames
+};
+
+enum VCMTemporalDecimation {
+  kBitrateOverUseDecimation,
+};
+
+struct VCMFrameCount {
+  WebRtc_UWord32 numKeyFrames;
+  WebRtc_UWord32 numDeltaFrames;
+};
+
+// Callback class used for sending data ready to be packetized
+class VCMPacketizationCallback {
+ public:
+  virtual WebRtc_Word32 SendData(
+      FrameType frameType,
+      WebRtc_UWord8 payloadType,
+      WebRtc_UWord32 timeStamp,
+      int64_t capture_time_ms,
+      const WebRtc_UWord8* payloadData,
+      WebRtc_UWord32 payloadSize,
+      const RTPFragmentationHeader& fragmentationHeader,
+      const RTPVideoHeader* rtpVideoHdr) = 0;
+ protected:
+  virtual ~VCMPacketizationCallback() {
+  }
+};
+
+// Callback class used for passing decoded frames which are ready to be rendered.
+class VCMFrameStorageCallback {
+ public:
+  virtual WebRtc_Word32 StoreReceivedFrame(
+      const EncodedVideoData& frameToStore) = 0;
+
+ protected:
+  virtual ~VCMFrameStorageCallback() {
+  }
+};
+
+// Callback class used for passing decoded frames which are ready to be rendered.
+class VCMReceiveCallback {
+ public:
+  virtual WebRtc_Word32 FrameToRender(VideoFrame& videoFrame) = 0;
+  virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+      const WebRtc_UWord64 pictureId) {
+    return -1;
+  }
+
+ protected:
+  virtual ~VCMReceiveCallback() {
+  }
+};
+
+// Callback class used for informing the user of the bit rate and frame rate produced by the
+// encoder.
+class VCMSendStatisticsCallback {
+ public:
+  virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
+                                       const WebRtc_UWord32 frameRate) = 0;
+
+ protected:
+  virtual ~VCMSendStatisticsCallback() {
+  }
+};
+
+// Callback class used for informing the user of the incoming bit rate and frame rate.
+class VCMReceiveStatisticsCallback {
+ public:
+  virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bitRate,
+                                          const WebRtc_UWord32 frameRate) = 0;
+
+ protected:
+  virtual ~VCMReceiveStatisticsCallback() {
+  }
+};
+
+// Callback class used for telling the user about how to configure the FEC,
+// and the rates sent the last second is returned to the VCM.
+class VCMProtectionCallback {
+ public:
+  virtual int ProtectionRequest(const FecProtectionParams* delta_params,
+                                const FecProtectionParams* key_params,
+                                uint32_t* sent_video_rate_bps,
+                                uint32_t* sent_nack_rate_bps,
+                                uint32_t* sent_fec_rate_bps) = 0;
+
+ protected:
+  virtual ~VCMProtectionCallback() {
+  }
+};
+
+// Callback class used for telling the user about what frame type needed to continue decoding.
+// Typically a key frame when the stream has been corrupted in some way.
+class VCMFrameTypeCallback {
+ public:
+  virtual WebRtc_Word32 RequestKeyFrame() = 0;
+  virtual WebRtc_Word32 SliceLossIndicationRequest(
+      const WebRtc_UWord64 pictureId) {
+    return -1;
+  }
+
+ protected:
+  virtual ~VCMFrameTypeCallback() {
+  }
+};
+
+// Callback class used for telling the user about which packet sequence numbers are currently
+// missing and need to be resent.
+class VCMPacketRequestCallback {
+ public:
+  virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+                                      WebRtc_UWord16 length) = 0;
+
+ protected:
+  virtual ~VCMPacketRequestCallback() {
+  }
+};
+
+// Callback used to inform the user of the the desired resolution
+// as subscribed by Media Optimization (Quality Modes)
+class VCMQMSettingsCallback {
+ public:
+  virtual WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                           const WebRtc_UWord32 width,
+                                           const WebRtc_UWord32 height) = 0;
+
+ protected:
+  virtual ~VCMQMSettingsCallback() {
+  }
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_INTERFACE_VIDEO_CODING_DEFINES_H_
diff --git a/src/modules/video_coding/main/source/codec_database.cc b/src/modules/video_coding/main/source/codec_database.cc
new file mode 100644
index 0000000..fbb9836
--- /dev/null
+++ b/src/modules/video_coding/main/source/codec_database.cc
@@ -0,0 +1,722 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_database.h"
+
+#include <assert.h>
+
+#include "../../../../engine_configurations.h"
+#include "internal_defines.h"
+#include "trace.h"
+
+#if defined(_WIN32)
+// Don't warn for strncpy being unsecure.
+// switch statement contains 'default' but no 'case' labels
+#pragma warning(disable:4996; disable:4065)
+#endif
+
+// Supported codecs
+#ifdef VIDEOCODEC_VP8
+#include "vp8.h"
+#endif
+#ifdef VIDEOCODEC_I420
+#include "i420.h"
+#endif
+
+namespace webrtc
+{
+
+VCMDecoderMapItem::VCMDecoderMapItem(VideoCodec* settings,
+                                     WebRtc_UWord32 numberOfCores,
+                                     bool requireKeyFrame)
+:
+_settings(settings),
+_numberOfCores(numberOfCores),
+_requireKeyFrame(requireKeyFrame)
+{
+}
+
+VCMExtDecoderMapItem::VCMExtDecoderMapItem(VideoDecoder* externalDecoderInstance,
+                                           WebRtc_UWord8 payloadType,
+                                           bool internalRenderTiming)
+:
+_payloadType(payloadType),
+_externalDecoderInstance(externalDecoderInstance),
+_internalRenderTiming(internalRenderTiming)
+{
+}
+
+VCMCodecDataBase::VCMCodecDataBase(WebRtc_Word32 id):
+_id(id),
+_numberOfCores(0),
+_maxPayloadSize(kDefaultPayloadSize),
+_periodicKeyFrames(false),
+_currentEncIsExternal(false),
+_sendCodec(),
+_receiveCodec(),
+_externalPayloadType(0),
+_externalEncoder(NULL),
+_internalSource(false),
+_ptrEncoder(NULL),
+_ptrDecoder(NULL),
+_currentDecIsExternal(false),
+_decMap(),
+_decExternalMap()
+{
+    //
+}
+
+VCMCodecDataBase::~VCMCodecDataBase()
+{
+    Reset();
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Reset()
+{
+    WebRtc_Word32 ret = ResetReceiver();
+    if (ret < 0)
+    {
+        return ret;
+    }
+    ret = ResetSender();
+    if (ret < 0)
+    {
+        return ret;
+    }
+   return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ResetSender()
+{
+    DeleteEncoder();
+    _periodicKeyFrames = false;
+    return VCM_OK;
+}
+
+VCMGenericEncoder* VCMCodecDataBase::CreateEncoder(
+    const VideoCodecType type) const {
+
+    switch(type)
+    {
+#ifdef VIDEOCODEC_VP8
+        case kVideoCodecVP8:
+            return new VCMGenericEncoder(*(VP8Encoder::Create()));
+#endif
+#ifdef VIDEOCODEC_I420
+        case kVideoCodecI420:
+            return new VCMGenericEncoder(*(new I420Encoder));
+#endif
+        default:
+            return NULL;
+    }
+}
+
+void
+VCMCodecDataBase::DeleteEncoder()
+{
+    if (_ptrEncoder)
+    {
+        _ptrEncoder->Release();
+        if (!_currentEncIsExternal)
+        {
+            delete &_ptrEncoder->_encoder;
+        }
+        delete _ptrEncoder;
+        _ptrEncoder = NULL;
+    }
+}
+
+WebRtc_UWord8
+VCMCodecDataBase::NumberOfCodecs()
+{
+    return VCM_NUM_VIDEO_CODECS_AVAILABLE;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Codec(WebRtc_UWord8 listId, VideoCodec *settings)
+{
+    if (settings == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+
+    if (listId >= VCM_NUM_VIDEO_CODECS_AVAILABLE)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    memset(settings, 0, sizeof(VideoCodec));
+    switch (listId)
+    {
+#ifdef VIDEOCODEC_VP8
+    case VCM_VP8_IDX:
+        {
+            strncpy(settings->plName, "VP8", 4);
+            settings->codecType = kVideoCodecVP8;
+            // 96 to 127 dynamic payload types for video codecs
+            settings->plType = VCM_VP8_PAYLOAD_TYPE;
+            settings->startBitrate = 100;
+            settings->minBitrate = VCM_MIN_BITRATE;
+            settings->maxBitrate = 0;
+            settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+            settings->width = VCM_DEFAULT_CODEC_WIDTH;
+            settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+            settings->numberOfSimulcastStreams = 0;
+            settings->codecSpecific.VP8.resilience = kResilientStream;
+            settings->codecSpecific.VP8.numberOfTemporalLayers = 1;
+            settings->codecSpecific.VP8.denoisingOn = true;
+            settings->codecSpecific.VP8.errorConcealmentOn = false;
+            settings->codecSpecific.VP8.automaticResizeOn = false;
+            settings->codecSpecific.VP8.frameDroppingOn = true;
+            break;
+        }
+#endif
+#ifdef VIDEOCODEC_I420
+    case VCM_I420_IDX:
+        {
+            strncpy(settings->plName, "I420", 5);
+            settings->codecType = kVideoCodecI420;
+            // 96 to 127 dynamic payload types for video codecs
+            settings->plType = VCM_I420_PAYLOAD_TYPE;
+            // Bitrate needed for this size and framerate
+            settings->startBitrate = 3*VCM_DEFAULT_CODEC_WIDTH*
+                                       VCM_DEFAULT_CODEC_HEIGHT*8*
+                                       VCM_DEFAULT_FRAME_RATE/1000/2;
+            settings->maxBitrate = settings->startBitrate;
+            settings->maxFramerate = VCM_DEFAULT_FRAME_RATE;
+            settings->width = VCM_DEFAULT_CODEC_WIDTH;
+            settings->height = VCM_DEFAULT_CODEC_HEIGHT;
+            settings->minBitrate = VCM_MIN_BITRATE;
+            settings->numberOfSimulcastStreams = 0;
+            break;
+        }
+#endif
+    default:
+        {
+            return VCM_PARAMETER_ERROR;
+        }
+    }
+
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::Codec(VideoCodecType codecType, VideoCodec* settings)
+{
+    for (int i = 0; i < VCMCodecDataBase::NumberOfCodecs(); i++)
+    {
+        const WebRtc_Word32 ret = VCMCodecDataBase::Codec(i, settings);
+        if (ret != VCM_OK)
+        {
+            return ret;
+        }
+        if (codecType == settings->codecType)
+        {
+            return VCM_OK;
+        }
+    }
+    return VCM_PARAMETER_ERROR;
+}
+
+// assuming only one registered encoder - since only one used, no need for more
+WebRtc_Word32
+VCMCodecDataBase::RegisterSendCodec(const VideoCodec* sendCodec,
+                                    WebRtc_UWord32 numberOfCores,
+                                    WebRtc_UWord32 maxPayloadSize)
+ {
+    if (sendCodec == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    if (maxPayloadSize == 0)
+    {
+        maxPayloadSize = kDefaultPayloadSize;
+    }
+    if (numberOfCores > 32)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (sendCodec->plType <= 0)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    // Make sure the start bit rate is sane...
+    if (sendCodec->startBitrate > 1000000)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (sendCodec->codecType == kVideoCodecUnknown)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    _numberOfCores = numberOfCores;
+    _maxPayloadSize = maxPayloadSize;
+
+    memcpy(&_sendCodec, sendCodec, sizeof(VideoCodec));
+
+    if (_sendCodec.maxBitrate == 0)
+    {
+        // max is one bit per pixel
+        _sendCodec.maxBitrate = ((WebRtc_Word32)_sendCodec.height *
+                                 (WebRtc_Word32)_sendCodec.width *
+                                 (WebRtc_Word32)_sendCodec.maxFramerate) / 1000;
+        if (_sendCodec.startBitrate > _sendCodec.maxBitrate)
+        {
+            // but if the customer tries to set a higher start bit rate we will increase
+            // the max accordingly
+            _sendCodec.maxBitrate = _sendCodec.startBitrate;
+        }
+    }
+
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::SendCodec(VideoCodec* currentSendCodec) const
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(_id), "SendCodec");
+
+    if(_ptrEncoder == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    memcpy(currentSendCodec, &_sendCodec, sizeof(VideoCodec));
+    return VCM_OK;
+}
+
+VideoCodecType
+VCMCodecDataBase::SendCodec() const
+{
+    WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCoding, VCMId(_id),
+            "SendCodec type");
+    if (_ptrEncoder == NULL)
+    {
+        return kVideoCodecUnknown;
+    }
+    return _sendCodec.codecType;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::DeRegisterExternalEncoder(WebRtc_UWord8 payloadType, bool& wasSendCodec)
+{
+    wasSendCodec = false;
+    if (_externalPayloadType != payloadType)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    if (_sendCodec.plType == payloadType)
+    {
+        //De-register as send codec if needed
+        DeleteEncoder();
+        memset(&_sendCodec, 0, sizeof(VideoCodec));
+        _currentEncIsExternal = false;
+        wasSendCodec = true;
+    }
+    _externalPayloadType = 0;
+    _externalEncoder = NULL;
+    _internalSource = false;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                          WebRtc_UWord8 payloadType,
+                                          bool internalSource)
+{
+    // since only one encoder can be used at a given time,
+    // only one external encoder can be registered/used
+    _externalEncoder = externalEncoder;
+    _externalPayloadType = payloadType;
+    _internalSource = internalSource;
+
+    return VCM_OK;
+}
+
+VCMGenericEncoder*
+VCMCodecDataBase::SetEncoder(const VideoCodec* settings,
+                             VCMEncodedFrameCallback* VCMencodedFrameCallback)
+
+{
+    // if encoder exists, will destroy it and create new one
+    DeleteEncoder();
+
+    if (settings->plType == _externalPayloadType)
+    {
+        // External encoder
+        _ptrEncoder = new VCMGenericEncoder(*_externalEncoder, _internalSource);
+        _currentEncIsExternal = true;
+    }
+    else
+    {
+        _ptrEncoder = CreateEncoder(settings->codecType);
+        _currentEncIsExternal = false;
+    }
+    VCMencodedFrameCallback->SetPayloadType(settings->plType);
+
+    if (_ptrEncoder == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to create encoder: %s.",
+                     settings->plName);
+        return NULL;
+    }
+    if (_ptrEncoder->InitEncode(settings, _numberOfCores, _maxPayloadSize) < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to initialize encoder: %s.",
+                     settings->plName);
+        DeleteEncoder();
+        return NULL;
+    }
+    else if (_ptrEncoder->RegisterEncodeCallback(VCMencodedFrameCallback) < 0)
+    {
+        DeleteEncoder();
+        return NULL;
+    }
+    // Intentionally don't check return value since the encoder registration
+    // shouldn't fail because the codec doesn't support changing the
+    // periodic key frame setting.
+    _ptrEncoder->SetPeriodicKeyFrames(_periodicKeyFrames);
+    return _ptrEncoder;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::SetPeriodicKeyFrames(bool enable)
+{
+    _periodicKeyFrames = enable;
+    if (_ptrEncoder != NULL)
+    {
+        return _ptrEncoder->SetPeriodicKeyFrames(_periodicKeyFrames);
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                       WebRtc_UWord32 numberOfCores,
+                                       bool requireKeyFrame)
+{
+    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCoding, VCMId(_id),
+                 "Codec: %s, Payload type %d, Height %d, Width %d, Bitrate %d, Framerate %d.",
+                 receiveCodec->plName, receiveCodec->plType,
+                 receiveCodec->height, receiveCodec->width,
+                 receiveCodec->startBitrate, receiveCodec->maxFramerate);
+
+    // check if payload value already exists, if so  - erase old and insert new
+    DeRegisterReceiveCodec(receiveCodec->plType);
+    if (receiveCodec->codecType == kVideoCodecUnknown)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    VideoCodec* newReceiveCodec = new VideoCodec(*receiveCodec);
+    _decMap[receiveCodec->plType] =
+        new VCMDecoderMapItem(newReceiveCodec, numberOfCores, requireKeyFrame);
+
+    return VCM_OK;
+}
+
+WebRtc_Word32 VCMCodecDataBase::DeRegisterReceiveCodec(
+    WebRtc_UWord8 payloadType)
+{
+    DecoderMap::iterator it = _decMap.find(payloadType);
+    if (it == _decMap.end())
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    VCMDecoderMapItem* decItem = (*it).second;
+    delete decItem->_settings;
+    delete decItem;
+    _decMap.erase(it);
+    if (_receiveCodec.plType == payloadType)
+    {
+        // This codec is currently in use.
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+        _currentDecIsExternal = false;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ResetReceiver()
+{
+    ReleaseDecoder(_ptrDecoder);
+    _ptrDecoder = NULL;
+    memset(&_receiveCodec, 0, sizeof(VideoCodec));
+    DecoderMap::iterator it = _decMap.begin();
+    while (it != _decMap.end()) {
+        if ((*it).second->_settings != NULL)
+        {
+            delete (*it).second->_settings;
+        }
+        delete (*it).second;
+        _decMap.erase(it);
+        it = _decMap.begin();
+    }
+    ExternalDecoderMap::iterator exterit = _decExternalMap.begin();
+    while (exterit != _decExternalMap.end()) {
+        delete (*exterit).second;
+        _decExternalMap.erase(exterit);
+        exterit = _decExternalMap.begin();
+    }
+
+    _currentDecIsExternal = false;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMCodecDataBase::DeRegisterExternalDecoder(WebRtc_UWord8 payloadType)
+{
+    ExternalDecoderMap::iterator it = _decExternalMap.find(payloadType);
+    if (it == _decExternalMap.end())
+    {
+        // Not found
+        return VCM_PARAMETER_ERROR;
+    }
+    if (_receiveCodec.plType == payloadType)
+    {
+        // Release it if it was registered and in use
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+    }
+    DeRegisterReceiveCodec(payloadType);
+    delete (*it).second;
+    _decExternalMap.erase(it);
+    return VCM_OK;
+}
+
+// Add the external encoder object to the list of external decoders.
+// Won't be registered as a receive codec until RegisterReceiveCodec is called.
+WebRtc_Word32
+VCMCodecDataBase::RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                          WebRtc_UWord8 payloadType,
+                                          bool internalRenderTiming)
+{
+    // check if payload value already exists, if so  - erase old and insert new
+    VCMExtDecoderMapItem* extDecoder = new VCMExtDecoderMapItem(externalDecoder,
+                                                                payloadType,
+                                                                internalRenderTiming);
+    if (extDecoder == NULL)
+    {
+        return VCM_MEMORY;
+    }
+    DeRegisterExternalDecoder(payloadType);
+    _decExternalMap[payloadType] = extDecoder;
+
+    return VCM_OK;
+}
+
+bool
+VCMCodecDataBase::DecoderRegistered() const
+{
+    return !_decMap.empty();
+}
+
+WebRtc_Word32
+VCMCodecDataBase::ReceiveCodec(VideoCodec* currentReceiveCodec) const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return VCM_NO_FRAME_DECODED;
+    }
+    memcpy(currentReceiveCodec, &_receiveCodec, sizeof(VideoCodec));
+    return VCM_OK;
+}
+
+VideoCodecType
+VCMCodecDataBase::ReceiveCodec() const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return kVideoCodecUnknown;
+    }
+    return _receiveCodec.codecType;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::SetDecoder(WebRtc_UWord8 payloadType,
+                             VCMDecodedFrameCallback& callback)
+{
+    if (payloadType == _receiveCodec.plType || payloadType == 0)
+    {
+        return _ptrDecoder;
+    }
+    // check for exisitng decoder, if exists - delete
+    if (_ptrDecoder)
+    {
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+    }
+    _ptrDecoder = CreateAndInitDecoder(payloadType, _receiveCodec,
+                                       _currentDecIsExternal);
+    if (_ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+    if (_ptrDecoder->RegisterDecodeCompleteCallback(&callback) < 0)
+    {
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = NULL;
+        memset(&_receiveCodec, 0, sizeof(VideoCodec));
+        return NULL;
+    }
+    return _ptrDecoder;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateAndInitDecoder(WebRtc_UWord8 payloadType,
+                                       VideoCodec& newCodec,
+                                       bool &external) const
+{
+    VCMDecoderMapItem* decoderItem = FindDecoderItem(payloadType);
+    if (decoderItem == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id),
+                     "Unknown payload type: %u", payloadType);
+        return NULL;
+    }
+    VCMGenericDecoder* ptrDecoder = NULL;
+    VCMExtDecoderMapItem* externalDecItem = FindExternalDecoderItem(
+        payloadType);
+    if (externalDecItem != NULL)
+    {
+        // External codec
+        ptrDecoder = new VCMGenericDecoder(
+            *externalDecItem->_externalDecoderInstance,
+            _id,
+            true);
+        external = true;
+    }
+    else
+    {
+        // create decoder
+        ptrDecoder = CreateDecoder(decoderItem->_settings->codecType);
+        external = false;
+    }
+    if (ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+
+    if (ptrDecoder->InitDecode(decoderItem->_settings,
+                               decoderItem->_numberOfCores,
+                               decoderItem->_requireKeyFrame) < 0)
+    {
+        ReleaseDecoder(ptrDecoder);
+        return NULL;
+    }
+    memcpy(&newCodec, decoderItem->_settings, sizeof(VideoCodec));
+    return ptrDecoder;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateDecoderCopy() const
+{
+    if (_ptrDecoder == NULL)
+    {
+        return NULL;
+    }
+    VideoDecoder* decoderCopy = _ptrDecoder->_decoder.Copy();
+    if (decoderCopy == NULL)
+    {
+        return NULL;
+    }
+    return new VCMGenericDecoder(*decoderCopy, _id, _ptrDecoder->External());
+}
+
+void
+VCMCodecDataBase::CopyDecoder(const VCMGenericDecoder& decoder)
+{
+    VideoDecoder* decoderCopy = decoder._decoder.Copy();
+    if (decoderCopy != NULL)
+    {
+        VCMDecodedFrameCallback* cb = _ptrDecoder->_callback;
+        ReleaseDecoder(_ptrDecoder);
+        _ptrDecoder = new VCMGenericDecoder(*decoderCopy, _id,
+                                            decoder.External());
+        if (cb && _ptrDecoder->RegisterDecodeCompleteCallback(cb))
+        {
+            assert(false);
+        }
+    }
+}
+
+bool
+VCMCodecDataBase::RenderTiming() const
+{
+    bool renderTiming = true;
+    if (_currentDecIsExternal)
+    {
+        VCMExtDecoderMapItem* extItem = FindExternalDecoderItem(_receiveCodec.plType);
+        renderTiming = extItem->_internalRenderTiming;
+    }
+    return renderTiming;
+}
+
+void
+VCMCodecDataBase::ReleaseDecoder(VCMGenericDecoder* decoder) const
+{
+    if (decoder != NULL)
+    {
+        assert(&decoder->_decoder != NULL);
+        decoder->Release();
+        if (!decoder->External())
+        {
+            delete &decoder->_decoder;
+        }
+        delete decoder;
+    }
+}
+
+VCMDecoderMapItem*
+VCMCodecDataBase::FindDecoderItem(WebRtc_UWord8 payloadType) const
+{
+    DecoderMap::const_iterator it = _decMap.find(payloadType);
+    if (it != _decMap.end())
+    {
+        return (*it).second;
+    }
+    return NULL;
+}
+
+VCMExtDecoderMapItem*
+VCMCodecDataBase::FindExternalDecoderItem(WebRtc_UWord8 payloadType) const
+{
+    ExternalDecoderMap::const_iterator it = _decExternalMap.find(payloadType);
+    if (it != _decExternalMap.end())
+    {
+        return (*it).second;
+    }
+    return NULL;
+}
+
+VCMGenericDecoder*
+VCMCodecDataBase::CreateDecoder(VideoCodecType type) const
+{
+    switch(type)
+    {
+#ifdef VIDEOCODEC_VP8
+    case kVideoCodecVP8:
+        return new VCMGenericDecoder(*(VP8Decoder::Create()), _id);
+#endif
+#ifdef VIDEOCODEC_I420
+    case kVideoCodecI420:
+         return new VCMGenericDecoder(*(new I420Decoder), _id);
+#endif
+    default:
+        return NULL;
+    }
+}
+}
diff --git a/src/modules/video_coding/main/source/codec_database.h b/src/modules/video_coding/main/source/codec_database.h
new file mode 100644
index 0000000..aab8229
--- /dev/null
+++ b/src/modules/video_coding/main/source/codec_database.h
@@ -0,0 +1,210 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
+
+#include <map>
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/source/generic_decoder.h"
+#include "modules/video_coding/main/source/generic_encoder.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum VCMCodecDBProperties
+{
+    kDefaultPayloadSize = 1440
+};
+
+class VCMDecoderMapItem {
+public:
+    VCMDecoderMapItem(VideoCodec* settings,
+                      WebRtc_UWord32 numberOfCores,
+                      bool requireKeyFrame);
+
+    VideoCodec*     _settings;
+    WebRtc_UWord32  _numberOfCores;
+    bool            _requireKeyFrame;
+};
+
+class VCMExtDecoderMapItem {
+public:
+    VCMExtDecoderMapItem(VideoDecoder* externalDecoderInstance,
+                         WebRtc_UWord8 payloadType,
+                         bool internalRenderTiming);
+
+    WebRtc_UWord8   _payloadType;
+    VideoDecoder*   _externalDecoderInstance;
+    bool            _internalRenderTiming;
+};
+
+/*******************************/
+/* VCMCodecDataBase class      */
+/*******************************/
+class VCMCodecDataBase
+{
+public:
+    VCMCodecDataBase(WebRtc_Word32 id);
+    ~VCMCodecDataBase();
+    /**
+    * Release codecdatabase - release all memory for both send and receive side
+    */
+    WebRtc_Word32 Reset();
+    /**
+    * Sender Side
+    */
+    /**
+    * Returns the number of supported codecs (or -1 in case of error).
+    */
+    static WebRtc_UWord8 NumberOfCodecs();
+    /**
+    * Get supported codecs with ID
+    * Input Values:
+    *       listnr    : Requested codec id number
+    *       codec_inst: Pointer to the struct in which the returned codec information is copied
+    * Return Values: 0 if successful, otherwise
+    */
+    static WebRtc_Word32 Codec(WebRtc_UWord8 listId, VideoCodec* settings);
+    static WebRtc_Word32 Codec(VideoCodecType codecType, VideoCodec* settings);
+    /**
+    * Reset Sender side
+    */
+    WebRtc_Word32 ResetSender();
+    /**
+    * Setting the sender side codec and initiaiting the desired codec given the VideoCodec
+    * struct.
+    * Return Value:	0 if the codec and the settings are supported, otherwise
+    */
+    WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                  WebRtc_UWord32 numberOfCores,
+                                  WebRtc_UWord32 maxPayloadSize);
+    /**
+    * Get current send side codec. Relevant for internal codecs only.
+    */
+    WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const;
+    /**
+    * Get current send side codec type. Relevant for internal codecs only.
+    */
+    VideoCodecType SendCodec() const;
+    /**
+    * Register external encoder - current assumption - if one is registered then it will also
+    * be used, and therefore it is also initialized
+    * Return value: A pointer to the encoder on success, or null, in case of an error.
+    */
+    WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 payloadType, bool& wasSendCodec);
+    WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                        WebRtc_UWord8 payloadType,
+                                        bool internalSource);
+    /**
+    * Returns a encoder given a payloadname - to be used with internal encoders only.
+    * Special cases:
+    *	 Encoder exists -  If payload matches, returns existing one, otherwise,
+    *	 deletes existing one and creates new one.
+    *	 No match found / Error - returns NULL.
+    */
+    VCMGenericEncoder* SetEncoder(const VideoCodec* settings,
+                                  VCMEncodedFrameCallback* VCMencodedFrameCallback);
+
+    WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    bool InternalSource() const;
+
+    /*
+    * Receiver Side
+    */
+    WebRtc_Word32 ResetReceiver();
+    /**
+    * Register external decoder/render object
+    */
+    WebRtc_Word32 DeRegisterExternalDecoder(WebRtc_UWord8 payloadType);
+    WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                        WebRtc_UWord8 payloadType,
+                                        bool internalRenderTiming);
+
+    bool DecoderRegistered() const;
+    /**
+    * Register recieve codec
+    */
+    WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                     WebRtc_UWord32 numberOfCores,
+                                     bool requireKeyFrame);
+    WebRtc_Word32 DeRegisterReceiveCodec(WebRtc_UWord8 payloadType);
+    /**
+    * Get current receive side codec. Relevant for internal codecs only.
+    */
+    WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const;
+    /**
+    * Get current receive side codec type. Relevant for internal codecs only.
+    */
+    VideoCodecType ReceiveCodec() const;
+    /**
+    * Returns a decoder given which matches a payload type.
+    * Special cases:
+    *	 Decoder exists -  If payload matches, returns existing one, otherwise, deletes
+    *	 existing one, and creates new one.
+    *	 No match found / Error - returns NULL.
+    */
+    VCMGenericDecoder* SetDecoder(WebRtc_UWord8 payloadType, VCMDecodedFrameCallback& callback);
+
+    VCMGenericDecoder* CreateAndInitDecoder(WebRtc_UWord8 payloadType,
+                                            VideoCodec& newCodec,
+                                            bool &external) const;
+
+    VCMGenericDecoder* CreateDecoderCopy() const;
+
+    void ReleaseDecoder(VCMGenericDecoder* decoder) const;
+
+    void CopyDecoder(const VCMGenericDecoder& decoder);
+
+    bool RenderTiming() const;
+
+protected:
+    /**
+    * Create an internal Encoder given a codec type
+    */
+    VCMGenericEncoder* CreateEncoder(const VideoCodecType type) const;
+
+    void DeleteEncoder();
+    /*
+    * Create an internal Decoder given a codec type
+    */
+    VCMGenericDecoder* CreateDecoder(VideoCodecType type) const;
+
+    VCMDecoderMapItem* FindDecoderItem(WebRtc_UWord8 payloadType) const;
+
+    VCMExtDecoderMapItem* FindExternalDecoderItem(WebRtc_UWord8 payloadType) const;
+
+private:
+    typedef std::map<uint8_t, VCMDecoderMapItem*> DecoderMap;
+    typedef std::map<uint8_t, VCMExtDecoderMapItem*> ExternalDecoderMap;
+    WebRtc_Word32 _id;
+    WebRtc_UWord32 _numberOfCores;
+    WebRtc_UWord32 _maxPayloadSize;
+    bool _periodicKeyFrames;
+    bool _currentEncIsExternal;
+    VideoCodec _sendCodec;
+    VideoCodec _receiveCodec;
+    WebRtc_UWord8 _externalPayloadType;
+    VideoEncoder* _externalEncoder;
+    bool _internalSource;
+    VCMGenericEncoder* _ptrEncoder;
+    VCMGenericDecoder* _ptrDecoder;
+    bool _currentDecIsExternal;
+    DecoderMap _decMap;
+    ExternalDecoderMap _decExternalMap;
+}; // end of VCMCodecDataBase class definition
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_DATABASE_H_
diff --git a/src/modules/video_coding/main/source/codec_timer.cc b/src/modules/video_coding/main/source/codec_timer.cc
new file mode 100644
index 0000000..1d112fa
--- /dev/null
+++ b/src/modules/video_coding/main/source/codec_timer.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_timer.h"
+
+#include <assert.h>
+
+namespace webrtc
+{
+
+VCMCodecTimer::VCMCodecTimer()
+:
+_filteredMax(0),
+_firstDecodeTime(true),
+_shortMax(0),
+_history()
+{
+    Reset();
+}
+
+WebRtc_Word32 VCMCodecTimer::StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs)
+{
+    const WebRtc_Word32 timeDiff = static_cast<WebRtc_Word32>(nowMs - startTimeMs);
+    MaxFilter(timeDiff, nowMs);
+    return timeDiff;
+}
+
+void VCMCodecTimer::Reset()
+{
+    _filteredMax = 0;
+    _firstDecodeTime = true;
+    _shortMax = 0;
+    for (int i=0; i < MAX_HISTORY_SIZE; i++)
+    {
+        _history[i].shortMax = 0;
+        _history[i].timeMs = -1;
+    }
+}
+
+// Update the max-value filter
+void VCMCodecTimer::MaxFilter(WebRtc_Word32 decodeTime, WebRtc_Word64 nowMs)
+{
+    if (!_firstDecodeTime)
+    {
+        UpdateMaxHistory(decodeTime, nowMs);
+        ProcessHistory(nowMs);
+    }
+    else
+    {
+        _firstDecodeTime = false;
+    }
+}
+
+void
+VCMCodecTimer::UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now)
+{
+    if (_history[0].timeMs >= 0 &&
+        now - _history[0].timeMs < SHORT_FILTER_MS)
+    {
+        if (decodeTime > _shortMax)
+        {
+            _shortMax = decodeTime;
+        }
+    }
+    else
+    {
+        // Only add a new value to the history once a second
+        if(_history[0].timeMs == -1)
+        {
+            // First, no shift
+            _shortMax = decodeTime;
+        }
+        else
+        {
+            // Shift
+            for(int i = (MAX_HISTORY_SIZE - 2); i >= 0 ; i--)
+            {
+                _history[i+1].shortMax = _history[i].shortMax;
+                _history[i+1].timeMs = _history[i].timeMs;
+            }
+        }
+        if (_shortMax == 0)
+        {
+            _shortMax = decodeTime;
+        }
+
+        _history[0].shortMax = _shortMax;
+        _history[0].timeMs = now;
+        _shortMax = 0;
+    }
+}
+
+void
+VCMCodecTimer::ProcessHistory(WebRtc_Word64 nowMs)
+{
+    _filteredMax = _shortMax;
+    if (_history[0].timeMs == -1)
+    {
+        return;
+    }
+    for (int i=0; i < MAX_HISTORY_SIZE; i++)
+    {
+        if (_history[i].timeMs == -1)
+        {
+            break;
+        }
+        if (nowMs - _history[i].timeMs > MAX_HISTORY_SIZE * SHORT_FILTER_MS)
+        {
+            // This sample (and all samples after this) is too old
+            break;
+        }
+        if (_history[i].shortMax > _filteredMax)
+        {
+            // This sample is the largest one this far into the history
+            _filteredMax = _history[i].shortMax;
+        }
+    }
+}
+
+// Get the maximum observed time within a time window
+WebRtc_Word32 VCMCodecTimer::RequiredDecodeTimeMs(FrameType /*frameType*/) const
+{
+    return _filteredMax;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/codec_timer.h b/src/modules/video_coding/main/source/codec_timer.h
new file mode 100644
index 0000000..e03c5bf
--- /dev/null
+++ b/src/modules/video_coding/main/source/codec_timer.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+
+// MAX_HISTORY_SIZE * SHORT_FILTER_MS defines the window size in milliseconds
+#define MAX_HISTORY_SIZE 20
+#define SHORT_FILTER_MS 1000
+
+class VCMShortMaxSample
+{
+public:
+    VCMShortMaxSample() : shortMax(0), timeMs(-1) {};
+
+    WebRtc_Word32     shortMax;
+    WebRtc_Word64     timeMs;
+};
+
+class VCMCodecTimer
+{
+public:
+    VCMCodecTimer();
+
+    // Updates and returns the max filtered decode time.
+    WebRtc_Word32 StopTimer(WebRtc_Word64 startTimeMs, WebRtc_Word64 nowMs);
+
+    // Empty the list of timers.
+    void Reset();
+
+    // Get the required decode time in ms.
+    WebRtc_Word32 RequiredDecodeTimeMs(FrameType frameType) const;
+
+private:
+    void UpdateMaxHistory(WebRtc_Word32 decodeTime, WebRtc_Word64 now);
+    void MaxFilter(WebRtc_Word32 newTime, WebRtc_Word64 nowMs);
+    void ProcessHistory(WebRtc_Word64 nowMs);
+
+    WebRtc_Word32                     _filteredMax;
+    bool                              _firstDecodeTime;
+    WebRtc_Word32                     _shortMax;
+    VCMShortMaxSample                 _history[MAX_HISTORY_SIZE];
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_CODEC_TIMER_H_
diff --git a/src/modules/video_coding/main/source/content_metrics_processing.cc b/src/modules/video_coding/main/source/content_metrics_processing.cc
new file mode 100644
index 0000000..99160c9
--- /dev/null
+++ b/src/modules/video_coding/main/source/content_metrics_processing.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/content_metrics_processing.h"
+
+#include <math.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc {
+//////////////////////////////////
+/// VCMContentMetricsProcessing //
+//////////////////////////////////
+
+VCMContentMetricsProcessing::VCMContentMetricsProcessing()
+    : recursive_avg_factor_(1 / 150.0f),  // matched to  30fps.
+      frame_cnt_uniform_avg_(0),
+      avg_motion_level_(0.0f),
+      avg_spatial_level_(0.0f) {
+  recursive_avg_ = new VideoContentMetrics();
+  uniform_avg_ = new VideoContentMetrics();
+}
+
+VCMContentMetricsProcessing::~VCMContentMetricsProcessing() {
+  delete recursive_avg_;
+  delete uniform_avg_;
+}
+
+int VCMContentMetricsProcessing::Reset() {
+  recursive_avg_->Reset();
+  uniform_avg_->Reset();
+  frame_cnt_uniform_avg_ = 0;
+  avg_motion_level_  = 0.0f;
+  avg_spatial_level_ = 0.0f;
+  return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateFrameRate(uint32_t frameRate) {
+  // Update factor for recursive averaging.
+  recursive_avg_factor_ = static_cast<float> (1000.0f) /
+      static_cast<float>(frameRate *  kQmMinIntervalMs);
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::LongTermAvgData() {
+  return recursive_avg_;
+}
+
+VideoContentMetrics* VCMContentMetricsProcessing::ShortTermAvgData() {
+  if (frame_cnt_uniform_avg_ == 0) {
+    return NULL;
+  }
+  // Two metrics are used: motion and spatial level.
+  uniform_avg_->motion_magnitude = avg_motion_level_ /
+      static_cast<float>(frame_cnt_uniform_avg_);
+  uniform_avg_->spatial_pred_err = avg_spatial_level_ /
+      static_cast<float>(frame_cnt_uniform_avg_);
+  return uniform_avg_;
+}
+
+void VCMContentMetricsProcessing::ResetShortTermAvgData() {
+  // Reset.
+  avg_motion_level_ = 0.0f;
+  avg_spatial_level_ = 0.0f;
+  frame_cnt_uniform_avg_ = 0;
+}
+
+int VCMContentMetricsProcessing::UpdateContentData(
+    const VideoContentMetrics *contentMetrics) {
+  if (contentMetrics == NULL) {
+    return VCM_OK;
+  }
+  return ProcessContent(contentMetrics);
+}
+
+int VCMContentMetricsProcessing::ProcessContent(
+    const VideoContentMetrics *contentMetrics) {
+  // Update the recursive averaged metrics: average is over longer window
+  // of time: over QmMinIntervalMs ms.
+  UpdateRecursiveAvg(contentMetrics);
+  // Update the uniform averaged metrics: average is over shorter window
+  // of time: based on ~RTCP reports.
+  UpdateUniformAvg(contentMetrics);
+  return VCM_OK;
+}
+
+void VCMContentMetricsProcessing::UpdateUniformAvg(
+    const VideoContentMetrics *contentMetrics) {
+  // Update frame counter.
+  frame_cnt_uniform_avg_ += 1;
+  // Update averaged metrics: motion and spatial level are used.
+  avg_motion_level_ += contentMetrics->motion_magnitude;
+  avg_spatial_level_ +=  contentMetrics->spatial_pred_err;
+  return;
+}
+
+void VCMContentMetricsProcessing::UpdateRecursiveAvg(
+    const VideoContentMetrics *contentMetrics) {
+
+  // Spatial metrics: 2x2, 1x2(H), 2x1(V).
+  recursive_avg_->spatial_pred_err = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err;
+
+  recursive_avg_->spatial_pred_err_h = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err_h +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err_h;
+
+  recursive_avg_->spatial_pred_err_v = (1 - recursive_avg_factor_) *
+      recursive_avg_->spatial_pred_err_v +
+      recursive_avg_factor_ * contentMetrics->spatial_pred_err_v;
+
+  // Motion metric: Derived from NFD (normalized frame difference).
+  recursive_avg_->motion_magnitude = (1 - recursive_avg_factor_) *
+      recursive_avg_->motion_magnitude +
+      recursive_avg_factor_ * contentMetrics->motion_magnitude;
+}
+}  // end of namespace
diff --git a/src/modules/video_coding/main/source/content_metrics_processing.h b/src/modules/video_coding/main/source/content_metrics_processing.h
new file mode 100644
index 0000000..0317add
--- /dev/null
+++ b/src/modules/video_coding/main/source/content_metrics_processing.h
@@ -0,0 +1,76 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+struct VideoContentMetrics;
+
+// QM interval time (in ms)
+enum {
+  kQmMinIntervalMs = 10000
+};
+
+// Flag for NFD metric vs motion metric
+enum {
+  kNfdMetric = 1
+};
+
+/**********************************/
+/* Content Metrics Processing     */
+/**********************************/
+class VCMContentMetricsProcessing {
+ public:
+  VCMContentMetricsProcessing();
+  ~VCMContentMetricsProcessing();
+
+  // Update class with latest metrics.
+  int UpdateContentData(const VideoContentMetrics *contentMetrics);
+
+  // Reset the short-term averaged content data.
+  void ResetShortTermAvgData();
+
+  // Initialize.
+  int Reset();
+
+  // Inform class of current frame rate.
+  void UpdateFrameRate(uint32_t frameRate);
+
+  // Returns the long-term averaged content data: recursive average over longer
+  // time scale.
+  VideoContentMetrics* LongTermAvgData();
+
+  // Returns the short-term averaged content data: uniform average over
+  // shorter time scalE.
+  VideoContentMetrics* ShortTermAvgData();
+
+ private:
+  // Compute working average.
+  int ProcessContent(const VideoContentMetrics *contentMetrics);
+
+  // Update the recursive averaged metrics: longer time average (~5/10 secs).
+  void UpdateRecursiveAvg(const VideoContentMetrics *contentMetrics);
+
+  // Update the uniform averaged metrics: shorter time average (~RTCP report).
+  void UpdateUniformAvg(const VideoContentMetrics *contentMetrics);
+
+  VideoContentMetrics* recursive_avg_;
+  VideoContentMetrics* uniform_avg_;
+  float recursive_avg_factor_;
+  uint32_t frame_cnt_uniform_avg_;
+  float avg_motion_level_;
+  float avg_spatial_level_;
+};
+}  // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_CONTENT_METRICS_PROCESSING_H_
diff --git a/src/modules/video_coding/main/source/decoding_state.cc b/src/modules/video_coding/main/source/decoding_state.cc
new file mode 100644
index 0000000..eea0a2c
--- /dev/null
+++ b/src/modules/video_coding/main/source/decoding_state.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/decoding_state.h"
+
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+VCMDecodingState::VCMDecodingState()
+    : sequence_num_(0),
+      time_stamp_(0),
+      picture_id_(kNoPictureId),
+      temporal_id_(kNoTemporalIdx),
+      tl0_pic_id_(kNoTl0PicIdx),
+      full_sync_(true),
+      init_(true) {}
+
+VCMDecodingState::~VCMDecodingState() {}
+
+void VCMDecodingState::Reset() {
+  // TODO(mikhal): Verify - not always would want to reset the sync
+  sequence_num_ = 0;
+  time_stamp_ = 0;
+  picture_id_ = kNoPictureId;
+  temporal_id_ = kNoTemporalIdx;
+  tl0_pic_id_ = kNoTl0PicIdx;
+  full_sync_ = true;
+  init_ = true;
+}
+
+uint32_t VCMDecodingState::time_stamp() const {
+  return time_stamp_;
+}
+
+uint16_t VCMDecodingState::sequence_num() const {
+  return sequence_num_;
+}
+
+bool VCMDecodingState::IsOldFrame(const VCMFrameBuffer* frame) const {
+  assert(frame != NULL);
+  if (init_)
+    return false;
+  return (LatestTimestamp(time_stamp_, frame->TimeStamp(), NULL)
+          == time_stamp_);
+}
+
+bool VCMDecodingState::IsOldPacket(const VCMPacket* packet) const {
+  assert(packet != NULL);
+  if (init_)
+    return false;
+  return (LatestTimestamp(time_stamp_, packet->timestamp, NULL)
+           == time_stamp_);
+}
+
+void VCMDecodingState::SetState(const VCMFrameBuffer* frame) {
+  assert(frame != NULL && frame->GetHighSeqNum() >= 0);
+  UpdateSyncState(frame);
+  sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum());
+  time_stamp_ = frame->TimeStamp();
+  picture_id_ = frame->PictureId();
+  temporal_id_ = frame->TemporalId();
+  tl0_pic_id_ = frame->Tl0PicId();
+  init_ = false;
+}
+
+void VCMDecodingState::SetStateOneBack(const VCMFrameBuffer* frame) {
+  assert(frame != NULL && frame->GetHighSeqNum() >= 0);
+  sequence_num_ = static_cast<uint16_t>(frame->GetHighSeqNum()) - 1u;
+  time_stamp_ = frame->TimeStamp() - 1u;
+  temporal_id_ = frame->TemporalId();
+  if (frame->PictureId() != kNoPictureId) {
+    if (frame->PictureId() == 0)
+      picture_id_ = 0x7FFF;
+    else
+      picture_id_ =  frame->PictureId() - 1;
+  }
+  if (frame->Tl0PicId() != kNoTl0PicIdx) {
+    if (frame->Tl0PicId() == 0)
+      tl0_pic_id_ = 0x00FF;
+    else
+      tl0_pic_id_ = frame->Tl0PicId() - 1;
+  }
+  init_ = false;
+}
+
+void VCMDecodingState::UpdateOldPacket(const VCMPacket* packet) {
+  assert(packet != NULL);
+  if (packet->timestamp == time_stamp_) {
+    // Late packet belonging to the last decoded frame - make sure we update the
+    // last decoded sequence number.
+    sequence_num_ = LatestSequenceNumber(packet->seqNum, sequence_num_, NULL);
+  }
+}
+
+void VCMDecodingState::SetSeqNum(uint16_t new_seq_num) {
+  sequence_num_ = new_seq_num;
+}
+
+bool VCMDecodingState::init() const {
+  return init_;
+}
+
+bool VCMDecodingState::full_sync() const {
+  return full_sync_;
+}
+
+void VCMDecodingState::UpdateSyncState(const VCMFrameBuffer* frame) {
+  if (init_)
+    return;
+  if (frame->TemporalId() == kNoTemporalIdx ||
+      frame->Tl0PicId() == kNoTl0PicIdx) {
+    full_sync_ = true;
+  } else if (frame->FrameType() == kVideoFrameKey || frame->LayerSync()) {
+    full_sync_ = true;
+  } else if (full_sync_) {
+    // Verify that we are still in sync.
+    // Sync will be broken if continuity is true for layers but not for the
+    // other methods (PictureId and SeqNum).
+    if (!ContinuousPictureId(frame->PictureId()) &&
+        !ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()))) {
+      // Non-layered methods have failed.
+      full_sync_ = false;
+    }
+  }
+}
+
+bool VCMDecodingState::ContinuousFrame(const VCMFrameBuffer* frame) const {
+  // Check continuity based on the following hierarchy:
+  // - Temporal layers (stop here if out of sync).
+  // - Picture Id when available.
+  // - Sequence numbers.
+  // Return true when in initial state.
+  // Note that when a method is not applicable it will return false.
+  assert(frame != NULL);
+  if (init_)
+    return true;
+
+  if (!ContinuousLayer(frame->TemporalId(), frame->Tl0PicId())) {
+    // Base layers are not continuous or temporal layers are inactive.
+    // In the presence of temporal layers, check for Picture ID/sequence number
+    // continuity if sync can be restored by this frame.
+    if (!full_sync_ && !frame->LayerSync())
+      return false;
+    else if (!ContinuousPictureId(frame->PictureId()))
+      return ContinuousSeqNum(static_cast<uint16_t>(frame->GetLowSeqNum()));
+  }
+  return true;
+}
+
+bool VCMDecodingState::ContinuousPictureId(int picture_id) const {
+  // First, check if applicable.
+  if (picture_id == kNoPictureId || picture_id_ == kNoPictureId)
+    return false;
+
+  int next_picture_id = picture_id_ + 1;
+  if (picture_id < picture_id_) {
+    // Wrap
+    if (picture_id_ >= 0x80) {
+      // 15 bits used for picture id
+      return ((next_picture_id & 0x7FFF) == picture_id);
+    } else {
+      // 7 bits used for picture id
+      return ((next_picture_id & 0x7F) == picture_id);
+    }
+  }
+  // No wrap
+  return (next_picture_id == picture_id);
+}
+
+bool VCMDecodingState::ContinuousSeqNum(uint16_t seq_num) const {
+  return (seq_num == static_cast<uint16_t>(sequence_num_ + 1));
+}
+
+bool VCMDecodingState::ContinuousLayer(int temporal_id,
+                                       int tl0_pic_id) const {
+  // First, check if applicable.
+  if (temporal_id == kNoTemporalIdx || tl0_pic_id == kNoTl0PicIdx)
+    return false;
+  // If this is the first frame to use temporal layers, make sure we start
+  // from base.
+  else if (tl0_pic_id_ == kNoTl0PicIdx && temporal_id_ == kNoTemporalIdx &&
+           temporal_id == 0)
+    return true;
+
+  // Current implementation: Look for base layer continuity.
+  if (temporal_id != 0)
+    return false;
+  return (static_cast<uint8_t>(tl0_pic_id_ + 1) == tl0_pic_id);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/decoding_state.h b/src/modules/video_coding/main/source/decoding_state.h
new file mode 100644
index 0000000..afec3ba
--- /dev/null
+++ b/src/modules/video_coding/main/source/decoding_state.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Forward declarations
+class VCMFrameBuffer;
+class VCMPacket;
+
+class VCMDecodingState {
+ public:
+  VCMDecodingState();
+  ~VCMDecodingState();
+  // Check for old frame
+  bool IsOldFrame(const VCMFrameBuffer* frame) const;
+  // Check for old packet
+  bool IsOldPacket(const VCMPacket* packet) const;
+  // Check for frame continuity based on current decoded state. Use best method
+  // possible, i.e. temporal info, picture ID or sequence number.
+  bool ContinuousFrame(const VCMFrameBuffer* frame) const;
+  void SetState(const VCMFrameBuffer* frame);
+  // Set the decoding state one frame back.
+  void SetStateOneBack(const VCMFrameBuffer* frame);
+  // Update the sequence number if the timestamp matches current state and the
+  // sequence number is higher than the current one. This accounts for packets
+  // arriving late.
+  void UpdateOldPacket(const VCMPacket* packet);
+  void SetSeqNum(uint16_t new_seq_num);
+  void Reset();
+  uint32_t time_stamp() const;
+  uint16_t sequence_num() const;
+  // Return true if at initial state.
+  bool init() const;
+  // Return true when sync is on - decode all layers.
+  bool full_sync() const;
+
+ private:
+  void UpdateSyncState(const VCMFrameBuffer* frame);
+  // Designated continuity functions
+  bool ContinuousPictureId(int picture_id) const;
+  bool ContinuousSeqNum(uint16_t seq_num) const;
+  bool ContinuousLayer(int temporal_id, int tl0_pic_id) const;
+
+  // Keep state of last decoded frame.
+  // TODO(mikhal/stefan): create designated classes to handle these types.
+  uint16_t    sequence_num_;
+  uint32_t    time_stamp_;
+  int         picture_id_;
+  int         temporal_id_;
+  int         tl0_pic_id_;
+  bool        full_sync_;  // Sync flag when temporal layers are used.
+  bool        init_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_DECODING_STATE_H_
diff --git a/src/modules/video_coding/main/source/decoding_state_unittest.cc b/src/modules/video_coding/main/source/decoding_state_unittest.cc
new file mode 100644
index 0000000..853f42a
--- /dev/null
+++ b/src/modules/video_coding/main/source/decoding_state_unittest.cc
@@ -0,0 +1,430 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "modules/video_coding/main/source/decoding_state.h"
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+
+TEST(TestDecodingState, Sanity) {
+  VCMDecodingState dec_state;
+  dec_state.Reset();
+  EXPECT_TRUE(dec_state.init());
+  EXPECT_TRUE(dec_state.full_sync());
+}
+
+TEST(TestDecodingState, FrameContinuity) {
+  VCMDecodingState dec_state;
+  // Check that makes decision based on correct method.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->isFirstPacket = 1;
+  packet->timestamp = 1;
+  packet->seqNum = 0xffff;
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x007F;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Should return true on init.
+  dec_state.Reset();
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  frame.Reset();
+  // Use pictureId
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0x0002;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->seqNum = 10;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+  // Use sequence numbers.
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->seqNum = dec_state.sequence_num() - 1u;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->seqNum = dec_state.sequence_num() + 1u;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Insert another packet to this frame
+  packet->seqNum++;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Verify wrap.
+  EXPECT_EQ(dec_state.sequence_num(), 0xffff);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+
+  // Insert packet with temporal info.
+  dec_state.Reset();
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->seqNum = 1;
+  packet->timestamp = 1;
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  // 1 layer up - still good.
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  packet->seqNum = 2;
+  packet->timestamp = 2;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  // Lost non-base layer packet => should update sync parameter.
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+  packet->seqNum = 4;
+  packet->timestamp = 4;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  // Now insert the next non-base layer (belonging to a next tl0PicId).
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+  packet->seqNum = 5;
+  packet->timestamp = 5;
+  frame.InsertPacket(*packet, 0, false, 0);
+  // Checking continuity and not updating the state - this should not trigger
+  // an update of sync state.
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  // Next base layer (dropped interim non-base layers) - should update sync.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+  packet->seqNum = 6;
+  packet->timestamp = 6;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+
+  // Check wrap for temporal layers.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x00FF;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+  packet->seqNum = 7;
+  packet->timestamp = 7;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0x0000;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 7;
+  packet->seqNum = 8;
+  packet->timestamp = 8;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  // The current frame is not continuous
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  delete packet;
+}
+
+TEST(TestDecodingState, SetStateOneBack) {
+  VCMDecodingState dec_state;
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  // Based on PictureId.
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->frameType = kVideoFrameDelta;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetStateOneBack(&frame);
+  EXPECT_EQ(dec_state.sequence_num(), 0xFFFF);
+  // Check continuity.
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+
+  // Based on Temporal layers.
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = kNoPictureId;
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetStateOneBack(&frame);
+  // Check continuity
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  delete packet;
+}
+
+TEST(TestDecodingState, UpdateOldPacket) {
+  VCMDecodingState dec_state;
+  // Update only if zero size and newer than previous.
+  // Should only update if the timeStamp match.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->frameType = kVideoFrameDelta;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_EQ(dec_state.sequence_num(), 1);
+  // Insert an empty packet that does not belong to the same frame.
+  // => Sequence num should be the same.
+  packet->timestamp = 2;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 1);
+  // Now insert empty packet belonging to the same frame.
+  packet->timestamp = 1;
+  packet->seqNum = 2;
+  packet->frameType = kFrameEmpty;
+  packet->sizeBytes = 0;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 2);
+  // Now insert delta packet belonging to the same frame.
+  packet->timestamp = 1;
+  packet->seqNum = 3;
+  packet->frameType = kVideoFrameDelta;
+  packet->sizeBytes = 1400;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 3);
+  // Insert a packet belonging to an older timestamp - should not update the
+  // sequence number.
+  packet->timestamp = 0;
+  packet->seqNum = 4;
+  packet->frameType = kFrameEmpty;
+  packet->sizeBytes = 0;
+  dec_state.UpdateOldPacket(packet);
+  EXPECT_EQ(dec_state.sequence_num(), 3);
+
+  delete packet;
+}
+
+TEST(TestDecodingState, MultiLayerBehavior) {
+  // Identify sync/non-sync when more than one layer.
+  VCMDecodingState dec_state;
+  // Identify packets belonging to old frames/packets.
+  // Set state for current frames.
+  // tl0PicIdx 0, temporal id 0.
+  VCMFrameBuffer frame;
+  VCMPacket* packet = new VCMPacket();
+  packet->frameType = kVideoFrameDelta;
+  packet->codecSpecificHeader.codec = kRTPVideoVP8;
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  // tl0PicIdx 0, temporal id 1.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // Lost tl0PicIdx 0, temporal id 2.
+  // Insert tl0PicIdx 0, temporal id 3.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 3;
+  packet->seqNum = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 3;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  // Insert next base layer
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->timestamp = 4;
+  packet->seqNum = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 4;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+  // Insert key frame - should update sync value.
+  // A key frame is always a base layer.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameKey;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 5;
+  packet->seqNum = 5;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 5;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // After sync, a continuous PictureId is required
+  // (continuous base layer is not enough )
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->timestamp = 6;
+  packet->seqNum = 6;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 6;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 8;
+  packet->seqNum = 8;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 8;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+  dec_state.SetState(&frame);
+  EXPECT_FALSE(dec_state.full_sync());
+
+  // Insert a non-ref frame - should update sync value.
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->timestamp = 9;
+  packet->seqNum = 9;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 4;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 9;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+
+  // The following test will verify the sync flag behavior after a loss.
+  // Create the following pattern:
+  // Update base layer, lose packet 1 (sync flag on, layer 2), insert packet 3
+  // (sync flag on, layer 2) check continuity and sync flag after inserting
+  // packet 2 (sync flag on, layer 1).
+  // Base layer.
+  frame.Reset();
+  dec_state.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 1;
+  packet->timestamp = 0;
+  packet->seqNum = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = false;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  EXPECT_TRUE(dec_state.full_sync());
+  // Layer 2 - 2 packets (insert one, lose one).
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 0;
+  packet->timestamp = 1;
+  packet->seqNum = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.ContinuousFrame(&frame));
+  // Layer 1
+  frame.Reset();
+  frame.SetState(kStateEmpty);
+  packet->frameType = kVideoFrameDelta;
+  packet->isFirstPacket = 1;
+  packet->markerBit = 1;
+  packet->timestamp = 2;
+  packet->seqNum = 3;
+  packet->codecSpecificHeader.codecHeader.VP8.tl0PicIdx = 0;
+  packet->codecSpecificHeader.codecHeader.VP8.temporalIdx = 1;
+  packet->codecSpecificHeader.codecHeader.VP8.pictureId = 2;
+  packet->codecSpecificHeader.codecHeader.VP8.layerSync = true;
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_FALSE(dec_state.ContinuousFrame(&frame));
+  EXPECT_TRUE(dec_state.full_sync());
+
+  delete packet;
+}
+
+TEST(TestDecodingState, OldInput) {
+  VCMDecodingState dec_state;
+  // Identify packets belonging to old frames/packets.
+  // Set state for current frames.
+  VCMFrameBuffer frame;
+  frame.SetState(kStateEmpty);
+  VCMPacket* packet = new VCMPacket();
+  packet->timestamp = 10;
+  packet->seqNum = 1;
+  frame.InsertPacket(*packet, 0, false, 0);
+  dec_state.SetState(&frame);
+  packet->timestamp = 9;
+  EXPECT_TRUE(dec_state.IsOldPacket(packet));
+  // Check for old frame
+  frame.Reset();
+  frame.InsertPacket(*packet, 0, false, 0);
+  EXPECT_TRUE(dec_state.IsOldFrame(&frame));
+
+
+  delete packet;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/encoded_frame.cc b/src/modules/video_coding/main/source/encoded_frame.cc
new file mode 100644
index 0000000..dff9df3
--- /dev/null
+++ b/src/modules/video_coding/main/source/encoded_frame.cc
@@ -0,0 +1,257 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "encoded_frame.h"
+#include "generic_encoder.h"
+#include "jitter_buffer_common.h"
+#include "video_coding_defines.h"
+
+namespace webrtc {
+
+VCMEncodedFrame::VCMEncodedFrame()
+:
+webrtc::EncodedImage(),
+_renderTimeMs(-1),
+_payloadType(0),
+_missingFrame(false),
+_codec(kVideoCodecUnknown),
+_fragmentation()
+{
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const webrtc::EncodedImage& rhs)
+:
+webrtc::EncodedImage(rhs),
+_renderTimeMs(-1),
+_payloadType(0),
+_missingFrame(false),
+_codec(kVideoCodecUnknown),
+_fragmentation()
+{
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+    _buffer = NULL;
+    _size = 0;
+    _length = 0;
+    if (rhs._buffer != NULL)
+    {
+        VerifyAndAllocate(rhs._length);
+        memcpy(_buffer, rhs._buffer, rhs._length);
+    }
+}
+
+VCMEncodedFrame::VCMEncodedFrame(const VCMEncodedFrame& rhs)
+  :
+    webrtc::EncodedImage(rhs),
+    _renderTimeMs(rhs._renderTimeMs),
+    _payloadType(rhs._payloadType),
+    _missingFrame(rhs._missingFrame),
+    _codecSpecificInfo(rhs._codecSpecificInfo),
+    _codec(rhs._codec),
+    _fragmentation() {
+  _buffer = NULL;
+  _size = 0;
+  _length = 0;
+  if (rhs._buffer != NULL)
+  {
+      VerifyAndAllocate(rhs._length);
+      memcpy(_buffer, rhs._buffer, rhs._length);
+      _length = rhs._length;
+  }
+  // Deep operator=
+  _fragmentation = rhs._fragmentation;
+}
+
+VCMEncodedFrame::~VCMEncodedFrame()
+{
+    Free();
+}
+
+void VCMEncodedFrame::Free()
+{
+    Reset();
+    if (_buffer != NULL)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+}
+
+void VCMEncodedFrame::Reset()
+{
+    _renderTimeMs = -1;
+    _timeStamp = 0;
+    _payloadType = 0;
+    _frameType = kDeltaFrame;
+    _encodedWidth = 0;
+    _encodedHeight = 0;
+    _completeFrame = false;
+    _missingFrame = false;
+    _length = 0;
+    _codecSpecificInfo.codecType = kVideoCodecUnknown;
+    _codec = kVideoCodecUnknown;
+}
+
+void VCMEncodedFrame::CopyCodecSpecific(const RTPVideoHeader* header)
+{
+    if (header)
+    {
+        switch (header->codec)
+        {
+            case kRTPVideoVP8:
+            {
+                if (_codecSpecificInfo.codecType != kVideoCodecVP8)
+                {
+                    // This is the first packet for this frame.
+                    _codecSpecificInfo.codecSpecific.VP8.pictureId = -1;
+                    _codecSpecificInfo.codecSpecific.VP8.temporalIdx = 0;
+                    _codecSpecificInfo.codecSpecific.VP8.layerSync = false;
+                    _codecSpecificInfo.codecSpecific.VP8.keyIdx = -1;
+                    _codecSpecificInfo.codecType = kVideoCodecVP8;
+                }
+                _codecSpecificInfo.codecSpecific.VP8.nonReference =
+                    header->codecHeader.VP8.nonReference;
+                if (header->codecHeader.VP8.pictureId != kNoPictureId)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.pictureId =
+                        header->codecHeader.VP8.pictureId;
+                }
+                if (header->codecHeader.VP8.temporalIdx != kNoTemporalIdx)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.temporalIdx =
+                        header->codecHeader.VP8.temporalIdx;
+                    _codecSpecificInfo.codecSpecific.VP8.layerSync =
+                        header->codecHeader.VP8.layerSync;
+                }
+                if (header->codecHeader.VP8.keyIdx != kNoKeyIdx)
+                {
+                    _codecSpecificInfo.codecSpecific.VP8.keyIdx =
+                        header->codecHeader.VP8.keyIdx;
+                }
+                break;
+            }
+            default:
+            {
+                _codecSpecificInfo.codecType = kVideoCodecUnknown;
+                break;
+            }
+        }
+    }
+}
+
+const RTPFragmentationHeader* VCMEncodedFrame::FragmentationHeader() const {
+  return &_fragmentation;
+}
+
+WebRtc_Word32
+VCMEncodedFrame::Store(VCMFrameStorageCallback& storeCallback) const
+{
+    EncodedVideoData frameToStore;
+    frameToStore.codec = _codec;
+    if (_buffer != NULL)
+    {
+        frameToStore.VerifyAndAllocate(_length);
+        memcpy(frameToStore.payloadData, _buffer, _length);
+        frameToStore.payloadSize = _length;
+    }
+    frameToStore.completeFrame = _completeFrame;
+    frameToStore.encodedWidth = _encodedWidth;
+    frameToStore.encodedHeight = _encodedHeight;
+    frameToStore.frameType = ConvertFrameType(_frameType);
+    frameToStore.missingFrame = _missingFrame;
+    frameToStore.payloadType = _payloadType;
+    frameToStore.renderTimeMs = _renderTimeMs;
+    frameToStore.timeStamp = _timeStamp;
+    storeCallback.StoreReceivedFrame(frameToStore);
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMEncodedFrame::VerifyAndAllocate(const WebRtc_UWord32 minimumSize)
+{
+    if(minimumSize > _size)
+    {
+        // create buffer of sufficient size
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[minimumSize];
+        if (newBuffer == NULL)
+        {
+            return -1;
+        }
+        if(_buffer)
+        {
+            // copy old data
+            memcpy(newBuffer, _buffer, _size);
+            delete [] _buffer;
+        }
+        _buffer = newBuffer;
+        _size = minimumSize;
+    }
+    return 0;
+}
+
+webrtc::FrameType VCMEncodedFrame::ConvertFrameType(VideoFrameType frameType)
+{
+    switch(frameType)
+    {
+    case kKeyFrame:
+        {
+            return  kVideoFrameKey;
+        }
+    case kDeltaFrame:
+        {
+            return kVideoFrameDelta;
+        }
+    case kGoldenFrame:
+        {
+            return kVideoFrameGolden;
+        }
+    case kAltRefFrame:
+        {
+            return kVideoFrameAltRef;
+        }
+    case kSkipFrame:
+        {
+            return kFrameEmpty;
+        }
+    default:
+        {
+            return kVideoFrameDelta;
+        }
+    }
+}
+
+VideoFrameType VCMEncodedFrame::ConvertFrameType(webrtc::FrameType frameType)
+{
+    switch (frameType)
+    {
+    case kVideoFrameKey:
+        {
+            return kKeyFrame;
+        }
+    case kVideoFrameDelta:
+        {
+            return kDeltaFrame;
+        }
+    case kVideoFrameGolden:
+        {
+            return kGoldenFrame;
+        }
+    case kVideoFrameAltRef:
+        {
+            return kAltRefFrame;
+        }
+    default:
+        {
+            return kDeltaFrame;
+        }
+    }
+}
+
+}
diff --git a/src/modules/video_coding/main/source/encoded_frame.h b/src/modules/video_coding/main/source/encoded_frame.h
new file mode 100644
index 0000000..6289e9e
--- /dev/null
+++ b/src/modules/video_coding/main/source/encoded_frame.h
@@ -0,0 +1,120 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+#define WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
+
+#include "common_types.h"
+#include "common_video/interface/video_image.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+
+namespace webrtc
+{
+
+class VCMEncodedFrame : protected EncodedImage
+{
+public:
+    VCMEncodedFrame();
+    VCMEncodedFrame(const webrtc::EncodedImage& rhs);
+    VCMEncodedFrame(const VCMEncodedFrame& rhs);
+
+    ~VCMEncodedFrame();
+    /**
+    *   Delete VideoFrame and resets members to zero
+    */
+    void Free();
+    /**
+    *   Set render time in milliseconds
+    */
+    void SetRenderTime(const WebRtc_Word64 renderTimeMs) {_renderTimeMs = renderTimeMs;}
+
+    /**
+    *   Set the encoded frame size
+    */
+    void SetEncodedSize(WebRtc_UWord32 width, WebRtc_UWord32 height)
+                       { _encodedWidth  = width; _encodedHeight = height; }
+    /**
+    *   Get the encoded image
+    */
+    const webrtc::EncodedImage& EncodedImage() const
+                       { return static_cast<const webrtc::EncodedImage&>(*this); }
+    /**
+    *   Get pointer to frame buffer
+    */
+    const WebRtc_UWord8* Buffer() const {return _buffer;}
+    /**
+    *   Get frame length
+    */
+    WebRtc_UWord32 Length() const {return _length;}
+    /**
+    *   Get frame timestamp (90kHz)
+    */
+    WebRtc_UWord32 TimeStamp() const {return _timeStamp;}
+    /**
+    *   Get render time in milliseconds
+    */
+    WebRtc_Word64 RenderTimeMs() const {return _renderTimeMs;}
+    /**
+    *   Get frame type
+    */
+    webrtc::FrameType FrameType() const {return ConvertFrameType(_frameType);}
+    /**
+    *   True if this frame is complete, false otherwise
+    */
+    bool Complete() const { return _completeFrame; }
+    /**
+    *   True if there's a frame missing before this frame
+    */
+    bool MissingFrame() const { return _missingFrame; }
+    /**
+    *   Payload type of the encoded payload
+    */
+    WebRtc_UWord8 PayloadType() const { return _payloadType; }
+    /**
+    *   Get codec specific info.
+    *   The returned pointer is only valid as long as the VCMEncodedFrame
+    *   is valid. Also, VCMEncodedFrame owns the pointer and will delete
+    *   the object.
+    */
+    const CodecSpecificInfo* CodecSpecific() const {return &_codecSpecificInfo;}
+
+    const RTPFragmentationHeader* FragmentationHeader() const;
+
+    WebRtc_Word32 Store(VCMFrameStorageCallback& storeCallback) const;
+
+    static webrtc::FrameType ConvertFrameType(VideoFrameType frameType);
+    static VideoFrameType ConvertFrameType(webrtc::FrameType frameType);
+
+protected:
+    /**
+    * Verifies that current allocated buffer size is larger than or equal to the input size.
+    * If the current buffer size is smaller, a new allocation is made and the old buffer data
+    * is copied to the new buffer.
+    * Buffer size is updated to minimumSize.
+    */
+    WebRtc_Word32 VerifyAndAllocate(const WebRtc_UWord32 minimumSize);
+
+    void Reset();
+
+    void CopyCodecSpecific(const RTPVideoHeader* header);
+
+    WebRtc_Word64                 _renderTimeMs;
+    WebRtc_UWord8                 _payloadType;
+    bool                          _missingFrame;
+    CodecSpecificInfo             _codecSpecificInfo;
+    webrtc::VideoCodecType        _codec;
+    RTPFragmentationHeader        _fragmentation;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_ENCODED_FRAME_H_
diff --git a/src/modules/video_coding/main/source/er_tables_xor.h b/src/modules/video_coding/main/source/er_tables_xor.h
new file mode 100644
index 0000000..99163ba
--- /dev/null
+++ b/src/modules/video_coding/main/source/er_tables_xor.h
@@ -0,0 +1,38742 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
+
+// This is a private header for media_opt_util.cc.
+// It should not be included by other files.
+
+namespace webrtc {
+
+// Table for average FEC recovery from packet loss, for XOR code.
+// From RPL model of random loss.
+// Input is the received packet loss (up to 50%), and FEC code parameters
+// (up to 24x24):
+// i.e., kAvgFECRecoveryXOR[k] where k = code_i*129 + loss_j;
+// code_i=1x1,2x1,2x2,..24x24, loss_j = 0,1,..128.
+
+// Maximum number of source packets in off-line model
+static const int kMaxNumPackets = 24;
+// Max value of loss rates in off-line model
+static const int kPacketLossMax = 129;
+
+// Table size for model is: kPacketLossMax * numberOfFecCodes = 38700
+// numberOfFecCodes is determined as:
+// {(1,1), (2,1), (2,2),...(n,1),..(n,n-1), (n,n)} = n*(n+1)/2
+// for n = kMaxNumPackets.
+static const int kSizeAvgFECRecoveryXOR = 38700;
+static const unsigned char kAvgFECRecoveryXOR[kSizeAvgFECRecoveryXOR] = {
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+61,
+61,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+46,
+46,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+62,
+62,
+61,
+60,
+59,
+59,
+58,
+57,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+58,
+57,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+71,
+72,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+68,
+68,
+67,
+66,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+57,
+57,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+60,
+59,
+58,
+57,
+57,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+50,
+49,
+49,
+48,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+70,
+71,
+71,
+71,
+72,
+72,
+72,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+72,
+72,
+72,
+71,
+71,
+71,
+70,
+70,
+70,
+69,
+69,
+68,
+68,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+62,
+62,
+61,
+60,
+59,
+58,
+58,
+57,
+56,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+62,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+63,
+62,
+62,
+62,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+68,
+68,
+68,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+68,
+68,
+68,
+68,
+67,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+56,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+50,
+49,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+66,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+59,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+66,
+66,
+66,
+66,
+66,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+59,
+58,
+57,
+57,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+63,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+68,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+72,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+72,
+71,
+71,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+63,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+51,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+62,
+62,
+63,
+63,
+63,
+64,
+64,
+64,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+64,
+64,
+64,
+63,
+63,
+63,
+62,
+62,
+61,
+60,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+69,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+71,
+71,
+71,
+71,
+71,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+65,
+65,
+64,
+63,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+57,
+58,
+59,
+60,
+60,
+61,
+62,
+63,
+64,
+64,
+65,
+66,
+66,
+67,
+68,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+73,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+71,
+71,
+70,
+70,
+69,
+69,
+68,
+67,
+67,
+66,
+65,
+64,
+63,
+62,
+61,
+61,
+60,
+59,
+58,
+57,
+56,
+54,
+53,
+52,
+51,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+47,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+50,
+51,
+51,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+60,
+60,
+60,
+61,
+61,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+61,
+61,
+61,
+60,
+60,
+60,
+59,
+59,
+58,
+58,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+57,
+58,
+58,
+59,
+60,
+60,
+61,
+62,
+62,
+63,
+63,
+64,
+64,
+65,
+65,
+65,
+66,
+66,
+66,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+67,
+66,
+66,
+66,
+65,
+65,
+65,
+64,
+64,
+63,
+62,
+62,
+61,
+61,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+0,
+1,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+57,
+57,
+58,
+59,
+60,
+61,
+61,
+62,
+63,
+63,
+64,
+65,
+65,
+66,
+66,
+67,
+67,
+68,
+68,
+68,
+69,
+69,
+69,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+70,
+69,
+69,
+69,
+68,
+68,
+68,
+67,
+67,
+66,
+66,
+65,
+64,
+64,
+63,
+62,
+61,
+60,
+60,
+59,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+45,
+44,
+43,
+42,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+58,
+59,
+60,
+61,
+62,
+62,
+63,
+64,
+65,
+65,
+66,
+67,
+67,
+68,
+69,
+69,
+70,
+70,
+71,
+71,
+72,
+72,
+73,
+73,
+73,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+74,
+73,
+73,
+73,
+72,
+72,
+71,
+71,
+70,
+69,
+69,
+68,
+67,
+66,
+66,
+65,
+64,
+63,
+62,
+61,
+60,
+59,
+58,
+57,
+56,
+54,
+53,
+52,
+51,
+50,
+49,
+0,
+0,
+1,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+27,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+44,
+44,
+44,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+0,
+0,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+55,
+55,
+56,
+56,
+56,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+0,
+0,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+56,
+57,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+0,
+1,
+2,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+60,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+0,
+0,
+1,
+2,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+48,
+48,
+48,
+49,
+49,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+49,
+49,
+48,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+54,
+54,
+53,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+0,
+1,
+2,
+3,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+56,
+56,
+56,
+56,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+53,
+53,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+33,
+34,
+35,
+36,
+37,
+38,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+50,
+50,
+51,
+52,
+52,
+52,
+53,
+53,
+53,
+54,
+54,
+54,
+54,
+54,
+54,
+53,
+53,
+53,
+53,
+52,
+52,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+52,
+53,
+53,
+54,
+54,
+54,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+54,
+54,
+53,
+53,
+53,
+52,
+51,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+40,
+41,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+56,
+56,
+57,
+57,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+7,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+25,
+26,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+45,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+44,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+19,
+18,
+18,
+18,
+18,
+17,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+24,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+25,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+41,
+41,
+41,
+41,
+41,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+41,
+41,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+36,
+37,
+37,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+52,
+51,
+51,
+51,
+50,
+50,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+45,
+46,
+47,
+48,
+49,
+49,
+50,
+51,
+52,
+53,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+55,
+54,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+30,
+32,
+33,
+35,
+36,
+38,
+39,
+40,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+52,
+53,
+54,
+54,
+55,
+55,
+55,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+55,
+55,
+54,
+54,
+54,
+53,
+52,
+52,
+51,
+50,
+49,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+15,
+14,
+13,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+15,
+17,
+18,
+19,
+21,
+22,
+24,
+25,
+27,
+28,
+30,
+31,
+33,
+35,
+36,
+38,
+39,
+41,
+42,
+43,
+45,
+46,
+47,
+48,
+49,
+51,
+52,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+58,
+58,
+57,
+57,
+56,
+55,
+55,
+54,
+53,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+24,
+24,
+24,
+24,
+24,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+25,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+26,
+26,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+27,
+26,
+26,
+26,
+26,
+25,
+25,
+24,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+36,
+36,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+36,
+36,
+37,
+37,
+37,
+36,
+36,
+36,
+36,
+36,
+36,
+35,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+32,
+33,
+33,
+34,
+35,
+35,
+36,
+37,
+37,
+38,
+38,
+39,
+39,
+39,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+39,
+39,
+39,
+38,
+38,
+37,
+37,
+36,
+36,
+35,
+35,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+28,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+44,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+23,
+24,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+23,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+18,
+19,
+20,
+22,
+23,
+25,
+26,
+28,
+29,
+30,
+32,
+33,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+50,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+52,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+12,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+16,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+20,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+23,
+23,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+27,
+28,
+28,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+28,
+28,
+28,
+28,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+23,
+24,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+28,
+29,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+34,
+33,
+32,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+27,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+29,
+29,
+30,
+31,
+31,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+34,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+35,
+34,
+34,
+34,
+33,
+33,
+33,
+32,
+31,
+31,
+30,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+25,
+24,
+23,
+22,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+19,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+21,
+22,
+24,
+25,
+26,
+27,
+28,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+45,
+46,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+47,
+46,
+45,
+45,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+24,
+25,
+26,
+28,
+29,
+31,
+32,
+34,
+35,
+36,
+38,
+39,
+40,
+41,
+42,
+44,
+44,
+45,
+46,
+47,
+48,
+48,
+49,
+49,
+50,
+50,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+47,
+47,
+46,
+45,
+44,
+44,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+18,
+19,
+21,
+22,
+23,
+25,
+26,
+28,
+29,
+31,
+32,
+34,
+35,
+37,
+38,
+39,
+40,
+42,
+43,
+44,
+45,
+46,
+46,
+47,
+48,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+43,
+42,
+41,
+40,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+9,
+9,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+10,
+10,
+10,
+11,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+26,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+44,
+45,
+46,
+47,
+47,
+48,
+49,
+49,
+49,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+49,
+49,
+49,
+48,
+48,
+47,
+46,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+40,
+39,
+38,
+37,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+31,
+32,
+34,
+35,
+37,
+39,
+40,
+42,
+43,
+44,
+46,
+47,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+59,
+58,
+58,
+57,
+57,
+56,
+56,
+55,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+44,
+43,
+42,
+41,
+40,
+38,
+37,
+36,
+35,
+34,
+32,
+31,
+30,
+29,
+28,
+27,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+23,
+25,
+26,
+28,
+30,
+31,
+33,
+35,
+36,
+38,
+39,
+41,
+42,
+44,
+45,
+46,
+48,
+49,
+50,
+51,
+52,
+53,
+54,
+55,
+55,
+56,
+56,
+57,
+57,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+57,
+57,
+57,
+56,
+56,
+55,
+54,
+54,
+53,
+52,
+51,
+51,
+50,
+49,
+48,
+47,
+46,
+45,
+43,
+42,
+41,
+40,
+39,
+38,
+36,
+35,
+34,
+33,
+32,
+31,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+17,
+16,
+15,
+14,
+13,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+9,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+19,
+20,
+21,
+23,
+24,
+26,
+27,
+29,
+30,
+32,
+34,
+35,
+37,
+38,
+40,
+41,
+43,
+44,
+46,
+47,
+48,
+50,
+51,
+52,
+53,
+54,
+55,
+56,
+57,
+58,
+59,
+59,
+60,
+61,
+61,
+61,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+62,
+61,
+61,
+60,
+60,
+59,
+58,
+58,
+57,
+56,
+55,
+54,
+53,
+52,
+51,
+50,
+49,
+48,
+47,
+46,
+44,
+43,
+42,
+41,
+39,
+38,
+37,
+36,
+34,
+33,
+32,
+31,
+29,
+28,
+27,
+26,
+25,
+24,
+22,
+21,
+20,
+19,
+18,
+0,
+0,
+1,
+2,
+2,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+4,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+9,
+9,
+10,
+10,
+10,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+10,
+10,
+10,
+10,
+10,
+9,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+15,
+14,
+14,
+14,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+12,
+12,
+13,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+16,
+17,
+17,
+17,
+17,
+17,
+16,
+16,
+16,
+16,
+16,
+15,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+22,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+22,
+22,
+22,
+21,
+21,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+1,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+13,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+18,
+18,
+19,
+19,
+19,
+19,
+20,
+20,
+20,
+20,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+21,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+22,
+21,
+21,
+21,
+21,
+21,
+21,
+20,
+20,
+20,
+20,
+19,
+19,
+19,
+18,
+18,
+18,
+17,
+17,
+17,
+16,
+16,
+15,
+15,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+20,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+3,
+2,
+2,
+2,
+2,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+16,
+17,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+23,
+24,
+25,
+26,
+26,
+27,
+27,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+34,
+33,
+33,
+33,
+33,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+5,
+4,
+4,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+11,
+12,
+13,
+14,
+14,
+15,
+15,
+16,
+16,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+33,
+32,
+32,
+32,
+32,
+31,
+31,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+14,
+15,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+24,
+25,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+31,
+32,
+32,
+33,
+33,
+34,
+34,
+35,
+35,
+35,
+36,
+36,
+36,
+37,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+32,
+31,
+30,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+16,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+20,
+21,
+21,
+22,
+23,
+23,
+24,
+25,
+25,
+26,
+26,
+27,
+27,
+28,
+28,
+29,
+29,
+29,
+30,
+30,
+30,
+30,
+30,
+31,
+31,
+31,
+30,
+30,
+30,
+30,
+30,
+29,
+29,
+29,
+28,
+28,
+27,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+15,
+16,
+16,
+16,
+17,
+17,
+17,
+18,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+22,
+23,
+24,
+24,
+25,
+26,
+26,
+27,
+28,
+28,
+29,
+29,
+30,
+30,
+30,
+31,
+31,
+31,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+32,
+31,
+31,
+31,
+30,
+30,
+30,
+29,
+29,
+28,
+28,
+27,
+27,
+26,
+25,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+3,
+3,
+3,
+3,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+9,
+10,
+11,
+11,
+12,
+12,
+12,
+13,
+13,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+14,
+15,
+15,
+15,
+16,
+16,
+17,
+17,
+18,
+19,
+19,
+20,
+21,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+7,
+8,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+13,
+13,
+14,
+14,
+14,
+14,
+15,
+15,
+16,
+16,
+16,
+17,
+18,
+18,
+19,
+20,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+38,
+38,
+39,
+39,
+40,
+40,
+41,
+41,
+41,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+41,
+41,
+41,
+41,
+40,
+40,
+39,
+39,
+38,
+38,
+37,
+36,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+7,
+6,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+14,
+15,
+15,
+16,
+17,
+18,
+19,
+20,
+21,
+22,
+23,
+24,
+25,
+26,
+27,
+27,
+28,
+29,
+30,
+31,
+32,
+32,
+33,
+34,
+34,
+35,
+35,
+36,
+36,
+37,
+37,
+37,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+38,
+37,
+37,
+37,
+36,
+36,
+36,
+35,
+35,
+34,
+34,
+33,
+33,
+32,
+31,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+13,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+9,
+9,
+9,
+9,
+9,
+9,
+10,
+10,
+11,
+11,
+12,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+20,
+22,
+23,
+24,
+25,
+27,
+28,
+29,
+30,
+31,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+42,
+42,
+43,
+43,
+44,
+44,
+44,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+29,
+28,
+27,
+26,
+25,
+24,
+23,
+23,
+22,
+21,
+20,
+19,
+19,
+18,
+17,
+16,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+19,
+20,
+22,
+23,
+24,
+25,
+26,
+28,
+29,
+30,
+31,
+32,
+33,
+34,
+34,
+35,
+36,
+37,
+37,
+38,
+38,
+38,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+38,
+38,
+38,
+37,
+37,
+36,
+35,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+24,
+23,
+22,
+21,
+20,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+8,
+7,
+7,
+6,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+8,
+8,
+8,
+8,
+8,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+8,
+8,
+8,
+9,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+19,
+21,
+22,
+23,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+39,
+38,
+37,
+37,
+36,
+35,
+34,
+33,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+10,
+9,
+9,
+8,
+8,
+7,
+7,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+6,
+6,
+6,
+7,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+19,
+20,
+21,
+22,
+24,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+39,
+40,
+41,
+41,
+42,
+42,
+42,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+43,
+42,
+42,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+31,
+30,
+29,
+28,
+27,
+26,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+15,
+15,
+14,
+13,
+13,
+12,
+12,
+11,
+10,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+5,
+6,
+7,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+16,
+17,
+18,
+20,
+21,
+22,
+24,
+25,
+26,
+28,
+29,
+30,
+32,
+33,
+34,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+41,
+42,
+43,
+43,
+44,
+44,
+44,
+44,
+44,
+45,
+45,
+44,
+44,
+44,
+44,
+44,
+43,
+43,
+42,
+42,
+41,
+41,
+40,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+14,
+14,
+13,
+12,
+12,
+11,
+11,
+10,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+8,
+8,
+9,
+10,
+11,
+12,
+13,
+15,
+16,
+17,
+18,
+20,
+21,
+23,
+24,
+25,
+27,
+28,
+29,
+31,
+32,
+33,
+34,
+35,
+37,
+38,
+39,
+40,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+45,
+46,
+46,
+46,
+47,
+47,
+47,
+47,
+47,
+47,
+47,
+46,
+46,
+46,
+46,
+45,
+45,
+44,
+44,
+44,
+43,
+42,
+42,
+41,
+41,
+40,
+39,
+38,
+38,
+37,
+36,
+35,
+34,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+28,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+21,
+20,
+19,
+18,
+17,
+17,
+16,
+15,
+0,
+0,
+1,
+2,
+3,
+4,
+5,
+6,
+6,
+6,
+7,
+7,
+7,
+7,
+7,
+7,
+6,
+6,
+6,
+5,
+5,
+5,
+5,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+4,
+5,
+5,
+6,
+6,
+7,
+7,
+8,
+9,
+10,
+11,
+12,
+13,
+14,
+15,
+16,
+17,
+18,
+20,
+21,
+22,
+23,
+25,
+26,
+27,
+29,
+30,
+31,
+32,
+33,
+35,
+36,
+37,
+38,
+39,
+40,
+41,
+42,
+43,
+43,
+44,
+45,
+45,
+46,
+46,
+47,
+47,
+47,
+47,
+48,
+48,
+48,
+48,
+48,
+47,
+47,
+47,
+47,
+46,
+46,
+45,
+45,
+44,
+44,
+43,
+42,
+42,
+41,
+40,
+39,
+39,
+38,
+37,
+36,
+35,
+34,
+33,
+32,
+31,
+30,
+29,
+28,
+27,
+27,
+26,
+25,
+24,
+23,
+22,
+21,
+20,
+19,
+18,
+18,
+17,
+16,
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_ER_TABLES_XOR_H_
diff --git a/src/modules/video_coding/main/source/event.h b/src/modules/video_coding/main/source/event.h
new file mode 100644
index 0000000..39fd494
--- /dev/null
+++ b/src/modules/video_coding/main/source/event.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
+#define WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
+
+#include "event_wrapper.h"
+
+namespace webrtc
+{
+
+//#define EVENT_DEBUG
+
+class VCMEvent : public EventWrapper
+{
+public:
+    VCMEvent() : _event(*EventWrapper::Create()) {};
+
+    virtual ~VCMEvent() { delete &_event; };
+
+    /**
+    *   Release waiting threads
+    */
+    bool Set() { return _event.Set(); };
+
+    bool Reset() { return _event.Reset(); };
+
+    /**
+    *   Wait for this event
+    */
+    EventTypeWrapper Wait(unsigned long maxTime)
+    {
+#ifdef EVENT_DEBUG
+        return kEventTimeout;
+#else
+        return _event.Wait(maxTime);
+#endif
+    };
+
+    /**
+    *   Start a timer
+    */
+    bool StartTimer(bool periodic, unsigned long time)
+                   { return _event.StartTimer(periodic, time); };
+    /**
+    *   Stop the timer
+    */
+    bool StopTimer() { return _event.StopTimer(); };
+
+private:
+    EventWrapper&      _event;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_EVENT_H_
diff --git a/src/modules/video_coding/main/source/exp_filter.cc b/src/modules/video_coding/main/source/exp_filter.cc
new file mode 100644
index 0000000..1d6f9a7
--- /dev/null
+++ b/src/modules/video_coding/main/source/exp_filter.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "exp_filter.h"
+
+#include <math.h>
+
+namespace webrtc {
+
+void
+VCMExpFilter::Reset(float alpha)
+{
+    _alpha = alpha;
+    _filtered = -1.0;
+}
+
+float
+VCMExpFilter::Apply(float exp, float sample)
+{
+    if (_filtered == -1.0)
+    {
+        // Initialize filtered bit rates
+        _filtered = sample;
+    }
+    else if (exp == 1.0)
+    {
+        _filtered = _alpha * _filtered + (1 - _alpha) * sample;
+    }
+    else
+    {
+        float alpha = pow(_alpha, exp);
+        _filtered = alpha * _filtered + (1 - alpha) * sample;
+    }
+    if (_max != -1 && _filtered > _max)
+    {
+        _filtered = _max;
+    }
+    return _filtered;
+}
+
+void
+VCMExpFilter::UpdateBase(float alpha)
+{
+    _alpha = alpha;
+}
+
+float
+VCMExpFilter::Value() const
+{
+    return _filtered;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/exp_filter.h b/src/modules/video_coding/main/source/exp_filter.h
new file mode 100644
index 0000000..46d206a
--- /dev/null
+++ b/src/modules/video_coding/main/source/exp_filter.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
+
+namespace webrtc
+{
+
+/**********************/
+/* ExpFilter class    */
+/**********************/
+
+class VCMExpFilter
+{
+public:
+    VCMExpFilter(float alpha, float max = -1.0) : _alpha(alpha), _filtered(-1.0), _max(max) {}
+
+    // Resets the filter to its initial state, and resets alpha to the given value
+    //
+    // Input:
+    //          - alpha     : the new value of the filter factor base.
+    void Reset(float alpha);
+
+    // Applies the filter with the given exponent on the provided sample
+    //
+    // Input:
+    //          - exp       : Exponent T in y(k) = alpha^T * y(k-1) + (1 - alpha^T) * x(k)
+    //          - sample    : x(k) in the above filter equation
+    float Apply(float exp, float sample);
+
+    // Return current filtered value: y(k)
+    //
+    // Return value         : The current filter output
+    float Value() const;
+
+    // Change the filter factor base
+    //
+    // Input:
+    //          - alpha     : The new filter factor base.
+    void UpdateBase(float alpha);
+
+private:
+    float          _alpha;     // Filter factor base
+    float          _filtered;  // Current filter output
+    const float    _max;
+}; // end of ExpFilter class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_EXP_FILTER_H_
diff --git a/src/modules/video_coding/main/source/fec_tables_xor.h b/src/modules/video_coding/main/source/fec_tables_xor.h
new file mode 100644
index 0000000..27db9a4
--- /dev/null
+++ b/src/modules/video_coding/main/source/fec_tables_xor.h
@@ -0,0 +1,6481 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
+
+// This is a private header for media_opt_util.cc.
+// It should not be included by other files.
+
+namespace webrtc {
+
+// Table for Protection factor (code rate) of delta frames, for the XOR FEC.
+// Input is the packet loss and an effective rate (bits/frame).
+// Output is array kCodeRateXORTable[k], where k = rate_i*129 + loss_j;
+// loss_j = 0,1,..128, and rate_i varies over some range.
+static const int kSizeCodeRateXORTable = 6450;
+static const unsigned char kCodeRateXORTable[kSizeCodeRateXORTable] = {
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+11,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+39,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+51,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+8,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+30,
+56,
+56,
+56,
+56,
+56,
+56,
+56,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+65,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+87,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+78,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+6,
+6,
+6,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+23,
+44,
+44,
+44,
+44,
+44,
+44,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+50,
+68,
+68,
+68,
+68,
+68,
+68,
+68,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+85,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+105,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+88,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+5,
+5,
+5,
+5,
+5,
+5,
+19,
+19,
+19,
+36,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+41,
+55,
+55,
+55,
+55,
+55,
+55,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+75,
+75,
+80,
+80,
+80,
+80,
+80,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+97,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+102,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+4,
+16,
+16,
+16,
+16,
+16,
+16,
+30,
+35,
+35,
+47,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+58,
+63,
+63,
+63,
+63,
+63,
+63,
+77,
+77,
+77,
+77,
+77,
+77,
+77,
+82,
+82,
+82,
+82,
+94,
+94,
+94,
+94,
+94,
+105,
+105,
+105,
+105,
+110,
+110,
+110,
+110,
+110,
+110,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+4,
+14,
+27,
+27,
+27,
+27,
+27,
+31,
+41,
+52,
+52,
+56,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+69,
+79,
+79,
+79,
+79,
+83,
+83,
+83,
+94,
+94,
+94,
+94,
+106,
+106,
+106,
+106,
+106,
+115,
+115,
+115,
+115,
+125,
+125,
+125,
+125,
+125,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+3,
+3,
+3,
+17,
+28,
+38,
+38,
+38,
+38,
+38,
+47,
+51,
+63,
+63,
+63,
+72,
+72,
+72,
+72,
+72,
+72,
+72,
+76,
+76,
+76,
+76,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+84,
+84,
+84,
+84,
+93,
+93,
+93,
+105,
+105,
+105,
+105,
+114,
+114,
+114,
+114,
+114,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+12,
+12,
+12,
+35,
+43,
+47,
+47,
+47,
+47,
+47,
+58,
+58,
+66,
+66,
+66,
+70,
+70,
+70,
+70,
+70,
+73,
+73,
+82,
+82,
+82,
+86,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+94,
+105,
+105,
+105,
+114,
+114,
+114,
+114,
+117,
+117,
+117,
+117,
+117,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+0,
+24,
+24,
+24,
+49,
+53,
+53,
+53,
+53,
+53,
+53,
+61,
+61,
+64,
+64,
+64,
+64,
+70,
+70,
+70,
+70,
+78,
+78,
+88,
+88,
+88,
+96,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+106,
+112,
+112,
+112,
+120,
+120,
+120,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+5,
+36,
+36,
+36,
+55,
+55,
+55,
+55,
+55,
+55,
+55,
+58,
+58,
+58,
+58,
+58,
+64,
+78,
+78,
+78,
+78,
+87,
+87,
+94,
+94,
+94,
+103,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+18,
+43,
+43,
+43,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+53,
+58,
+58,
+58,
+58,
+71,
+87,
+87,
+87,
+87,
+94,
+94,
+97,
+97,
+97,
+109,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+125,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+0,
+31,
+46,
+46,
+46,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+48,
+66,
+66,
+66,
+66,
+80,
+93,
+93,
+93,
+93,
+95,
+95,
+95,
+95,
+100,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+115,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+4,
+40,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+45,
+49,
+49,
+49,
+74,
+74,
+74,
+74,
+86,
+90,
+90,
+90,
+90,
+95,
+95,
+95,
+95,
+106,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+14,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+42,
+46,
+56,
+56,
+56,
+80,
+80,
+80,
+80,
+84,
+84,
+84,
+84,
+88,
+99,
+99,
+99,
+99,
+111,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+0,
+26,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+40,
+54,
+66,
+66,
+66,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+84,
+94,
+106,
+106,
+106,
+106,
+116,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+120,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+3,
+34,
+38,
+38,
+38,
+38,
+38,
+42,
+42,
+42,
+63,
+72,
+72,
+76,
+80,
+80,
+80,
+80,
+80,
+80,
+80,
+89,
+101,
+114,
+114,
+114,
+114,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+118,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+12,
+36,
+36,
+36,
+36,
+36,
+36,
+49,
+49,
+49,
+69,
+73,
+76,
+86,
+86,
+86,
+86,
+86,
+86,
+86,
+86,
+97,
+109,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+22,
+34,
+34,
+34,
+34,
+38,
+38,
+57,
+57,
+57,
+69,
+73,
+82,
+92,
+92,
+92,
+92,
+92,
+92,
+96,
+96,
+104,
+117,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+29,
+33,
+33,
+33,
+33,
+44,
+44,
+62,
+62,
+62,
+69,
+77,
+87,
+95,
+95,
+95,
+95,
+95,
+95,
+107,
+107,
+110,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+31,
+31,
+31,
+31,
+31,
+51,
+51,
+62,
+65,
+65,
+73,
+83,
+91,
+94,
+94,
+94,
+94,
+97,
+97,
+114,
+114,
+114,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+29,
+29,
+29,
+29,
+29,
+56,
+56,
+59,
+70,
+70,
+79,
+86,
+89,
+89,
+89,
+89,
+89,
+100,
+100,
+116,
+116,
+116,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+28,
+28,
+28,
+28,
+28,
+57,
+57,
+57,
+76,
+76,
+83,
+86,
+86,
+86,
+86,
+86,
+89,
+104,
+104,
+114,
+114,
+114,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+27,
+27,
+27,
+27,
+30,
+55,
+55,
+55,
+80,
+80,
+83,
+86,
+86,
+86,
+86,
+86,
+93,
+108,
+108,
+111,
+111,
+111,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+26,
+26,
+26,
+26,
+36,
+53,
+53,
+53,
+80,
+80,
+80,
+90,
+90,
+90,
+90,
+90,
+98,
+107,
+107,
+107,
+107,
+107,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+26,
+26,
+26,
+28,
+42,
+52,
+54,
+54,
+78,
+78,
+78,
+95,
+95,
+95,
+97,
+97,
+104,
+106,
+106,
+106,
+106,
+106,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+24,
+24,
+24,
+33,
+47,
+49,
+58,
+58,
+74,
+74,
+74,
+97,
+97,
+97,
+106,
+106,
+108,
+108,
+108,
+108,
+108,
+108,
+124,
+124,
+124,
+124,
+124,
+124,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+24,
+24,
+24,
+39,
+48,
+50,
+63,
+63,
+72,
+74,
+74,
+96,
+96,
+96,
+109,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+119,
+119,
+122,
+122,
+122,
+122,
+122,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+23,
+23,
+23,
+43,
+46,
+54,
+66,
+66,
+69,
+77,
+77,
+92,
+92,
+92,
+105,
+113,
+113,
+113,
+113,
+113,
+113,
+113,
+115,
+117,
+123,
+123,
+123,
+123,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+22,
+22,
+22,
+44,
+44,
+59,
+67,
+67,
+67,
+81,
+81,
+89,
+89,
+89,
+97,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+119,
+126,
+126,
+126,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+21,
+21,
+24,
+43,
+45,
+63,
+65,
+65,
+67,
+85,
+85,
+87,
+87,
+87,
+91,
+109,
+109,
+109,
+111,
+111,
+111,
+111,
+111,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+21,
+21,
+28,
+42,
+50,
+63,
+63,
+66,
+71,
+85,
+85,
+85,
+85,
+87,
+92,
+106,
+106,
+108,
+114,
+114,
+114,
+114,
+114,
+125,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+20,
+20,
+34,
+41,
+54,
+62,
+62,
+69,
+75,
+82,
+82,
+82,
+82,
+92,
+98,
+105,
+105,
+110,
+117,
+117,
+117,
+117,
+117,
+124,
+124,
+126,
+126,
+126,
+126,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+20,
+20,
+38,
+40,
+58,
+60,
+60,
+73,
+78,
+80,
+80,
+80,
+80,
+100,
+105,
+107,
+107,
+113,
+118,
+118,
+118,
+118,
+118,
+120,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+19,
+21,
+38,
+40,
+58,
+58,
+60,
+75,
+77,
+77,
+77,
+81,
+81,
+107,
+109,
+109,
+109,
+114,
+116,
+116,
+116,
+116,
+116,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+25,
+37,
+44,
+56,
+56,
+63,
+75,
+75,
+75,
+75,
+88,
+88,
+111,
+111,
+111,
+111,
+112,
+112,
+112,
+112,
+112,
+112,
+112,
+114,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+30,
+36,
+48,
+55,
+55,
+67,
+73,
+73,
+73,
+73,
+97,
+97,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+110,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+18,
+34,
+36,
+52,
+55,
+55,
+70,
+72,
+73,
+73,
+73,
+102,
+104,
+108,
+108,
+108,
+108,
+109,
+109,
+109,
+109,
+109,
+109,
+109,
+119,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+17,
+35,
+35,
+52,
+59,
+59,
+70,
+70,
+76,
+76,
+76,
+99,
+105,
+105,
+105,
+105,
+105,
+111,
+111,
+111,
+111,
+111,
+111,
+111,
+121,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+17,
+34,
+36,
+51,
+61,
+62,
+70,
+70,
+80,
+80,
+80,
+93,
+103,
+103,
+103,
+103,
+103,
+112,
+112,
+112,
+112,
+112,
+116,
+118,
+124,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+16,
+33,
+39,
+50,
+59,
+65,
+72,
+72,
+82,
+82,
+82,
+91,
+100,
+100,
+100,
+100,
+100,
+109,
+109,
+109,
+109,
+109,
+121,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+16,
+32,
+43,
+48,
+54,
+66,
+75,
+75,
+81,
+83,
+83,
+92,
+97,
+97,
+97,
+99,
+99,
+105,
+105,
+105,
+105,
+105,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+31,
+46,
+47,
+49,
+69,
+77,
+77,
+81,
+85,
+85,
+93,
+95,
+95,
+95,
+100,
+100,
+102,
+102,
+102,
+102,
+102,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+30,
+46,
+48,
+48,
+70,
+75,
+79,
+82,
+87,
+87,
+92,
+94,
+94,
+94,
+103,
+103,
+103,
+103,
+103,
+104,
+104,
+115,
+120,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+15,
+30,
+45,
+50,
+50,
+68,
+70,
+80,
+85,
+89,
+89,
+90,
+95,
+95,
+95,
+104,
+104,
+104,
+104,
+104,
+109,
+109,
+112,
+114,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+29,
+44,
+54,
+54,
+64,
+64,
+83,
+87,
+88,
+88,
+88,
+98,
+98,
+98,
+103,
+103,
+103,
+103,
+103,
+113,
+113,
+113,
+113,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+29,
+43,
+56,
+56,
+61,
+61,
+84,
+85,
+88,
+88,
+88,
+100,
+100,
+100,
+102,
+102,
+102,
+102,
+102,
+113,
+116,
+116,
+116,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+28,
+42,
+57,
+57,
+62,
+62,
+80,
+80,
+91,
+91,
+91,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+109,
+119,
+119,
+119,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+14,
+28,
+42,
+56,
+56,
+65,
+66,
+76,
+76,
+92,
+92,
+92,
+97,
+97,
+97,
+101,
+101,
+101,
+101,
+101,
+106,
+121,
+121,
+121,
+126,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+13,
+27,
+41,
+55,
+55,
+67,
+72,
+74,
+74,
+90,
+90,
+90,
+91,
+91,
+91,
+105,
+105,
+105,
+105,
+105,
+107,
+122,
+122,
+122,
+123,
+123,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+0,
+13,
+27,
+40,
+54,
+54,
+67,
+76,
+76,
+76,
+85,
+85,
+85,
+85,
+85,
+85,
+112,
+112,
+112,
+112,
+112,
+112,
+121,
+121,
+121,
+121,
+121,
+126,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+127,
+
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_FEC_TABLES_XOR_H_
diff --git a/src/modules/video_coding/main/source/frame_buffer.cc b/src/modules/video_coding/main/source/frame_buffer.cc
new file mode 100644
index 0000000..abaadff
--- /dev/null
+++ b/src/modules/video_coding/main/source/frame_buffer.cc
@@ -0,0 +1,410 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_buffer.h"
+#include "packet.h"
+
+#include <cassert>
+#include <string.h>
+
+namespace webrtc {
+
+VCMFrameBuffer::VCMFrameBuffer()
+  :
+    _state(kStateFree),
+    _frameCounted(false),
+    _nackCount(0),
+    _latestPacketTimeMs(-1) {
+}
+
+VCMFrameBuffer::~VCMFrameBuffer() {
+}
+
+VCMFrameBuffer::VCMFrameBuffer(VCMFrameBuffer& rhs)
+:
+VCMEncodedFrame(rhs),
+_state(rhs._state),
+_frameCounted(rhs._frameCounted),
+_sessionInfo(),
+_nackCount(rhs._nackCount),
+_latestPacketTimeMs(rhs._latestPacketTimeMs)
+{
+    _sessionInfo = rhs._sessionInfo;
+    _sessionInfo.UpdateDataPointers(rhs._buffer, _buffer);
+}
+
+webrtc::FrameType
+VCMFrameBuffer::FrameType() const
+{
+    return _sessionInfo.FrameType();
+}
+
+void
+VCMFrameBuffer::SetPreviousFrameLoss()
+{
+    _sessionInfo.SetPreviousFrameLoss();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::GetLowSeqNum() const
+{
+    return _sessionInfo.LowSequenceNumber();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::GetHighSeqNum() const
+{
+    return _sessionInfo.HighSequenceNumber();
+}
+
+int VCMFrameBuffer::PictureId() const {
+  return _sessionInfo.PictureId();
+}
+
+int VCMFrameBuffer::TemporalId() const {
+  return _sessionInfo.TemporalId();
+}
+
+bool VCMFrameBuffer::LayerSync() const {
+  return _sessionInfo.LayerSync();
+}
+
+int VCMFrameBuffer::Tl0PicId() const {
+  return _sessionInfo.Tl0PicId();
+}
+
+bool VCMFrameBuffer::NonReference() const {
+  return _sessionInfo.NonReference();
+}
+
+bool
+VCMFrameBuffer::IsSessionComplete() const
+{
+    return _sessionInfo.complete();
+}
+
+// Insert packet
+VCMFrameBufferEnum
+VCMFrameBuffer::InsertPacket(const VCMPacket& packet, WebRtc_Word64 timeInMs,
+                             bool enableDecodableState, WebRtc_UWord32 rttMS)
+{
+    if (_state == kStateDecoding)
+    {
+        // Do not insert packet
+        return kNoError;
+    }
+
+    // Sanity to check if the frame has been freed. (Too old for example)
+    if (_state == kStateFree)
+    {
+        return kStateError;
+    }
+
+    // is this packet part of this frame
+    if (TimeStamp() && (TimeStamp() != packet.timestamp))
+    {
+        return kTimeStampError;
+    }
+
+    // sanity checks
+    if (_size + packet.sizeBytes +
+        (packet.insertStartCode ?  kH264StartCodeLengthBytes : 0 )
+        > kMaxJBFrameSizeBytes)
+    {
+        return kSizeError;
+    }
+    if (NULL == packet.dataPtr && packet.sizeBytes > 0)
+    {
+        return kSizeError;
+    }
+    if (packet.dataPtr != NULL)
+    {
+        _payloadType = packet.payloadType;
+    }
+
+    if (kStateEmpty == _state)
+    {
+        // First packet (empty and/or media) inserted into this frame.
+        // store some info and set some initial values.
+        _timeStamp = packet.timestamp;
+        _codec = packet.codec;
+        if (packet.frameType != kFrameEmpty)
+        {
+            // first media packet
+            SetState(kStateIncomplete);
+        }
+    }
+
+    WebRtc_UWord32 requiredSizeBytes = Length() + packet.sizeBytes +
+                   (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+    if (requiredSizeBytes >= _size)
+    {
+        const WebRtc_UWord8* prevBuffer = _buffer;
+        const WebRtc_UWord32 increments = requiredSizeBytes /
+                                          kBufferIncStepSizeBytes +
+                                        (requiredSizeBytes %
+                                         kBufferIncStepSizeBytes > 0);
+        const WebRtc_UWord32 newSize = _size +
+                                       increments * kBufferIncStepSizeBytes;
+        if (newSize > kMaxJBFrameSizeBytes)
+        {
+            return kSizeError;
+        }
+        if (VerifyAndAllocate(newSize) == -1)
+        {
+            return kSizeError;
+        }
+        _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
+    }
+
+    CopyCodecSpecific(&packet.codecSpecificHeader);
+
+    int retVal = _sessionInfo.InsertPacket(packet, _buffer,
+                                           enableDecodableState,
+                                           rttMS);
+    if (retVal == -1)
+    {
+        return kSizeError;
+    }
+    else if (retVal == -2)
+    {
+        return kDuplicatePacket;
+    }
+    // update length
+    _length = Length() + static_cast<WebRtc_UWord32>(retVal);
+
+    _latestPacketTimeMs = timeInMs;
+
+    if (_sessionInfo.complete()) {
+      return kCompleteSession;
+    } else if (_sessionInfo.decodable()) {
+      SetState(kStateDecodable);
+      return kDecodableSession;
+    } else {
+      // this layer is not complete
+      if (_state == kStateComplete) {
+        // we already have a complete layer
+        // wait for all independent layers belonging to the same frame
+        _state = kStateIncomplete;
+      }
+    }
+    return kIncomplete;
+}
+
+WebRtc_Word64
+VCMFrameBuffer::LatestPacketTimeMs()
+{
+    return _latestPacketTimeMs;
+}
+
+// Build hard NACK list:Zero out all entries in list up to and including the
+// (first) entry equal to _lowSeqNum.
+int VCMFrameBuffer::BuildHardNackList(int* list, int num) {
+  if (_sessionInfo.BuildHardNackList(list, num) != 0) {
+   return -1;
+  }
+  return 0;
+}
+
+// Build selective NACK list: Create a soft (selective) list of entries to zero
+// out up to and including the (first) entry equal to _lowSeqNum.
+int VCMFrameBuffer::BuildSoftNackList(int* list, int num, int rttMs) {
+  return _sessionInfo.BuildSoftNackList(list, num, rttMs);
+}
+
+void
+VCMFrameBuffer::IncrementNackCount()
+{
+    _nackCount++;
+}
+
+WebRtc_Word16
+VCMFrameBuffer::GetNackCount() const
+{
+    return _nackCount;
+}
+
+bool
+VCMFrameBuffer::HaveLastPacket() const
+{
+    return _sessionInfo.HaveLastPacket();
+}
+
+void
+VCMFrameBuffer::Reset()
+{
+    _length = 0;
+    _timeStamp = 0;
+    _sessionInfo.Reset();
+    _frameCounted = false;
+    _payloadType = 0;
+    _nackCount = 0;
+    _latestPacketTimeMs = -1;
+    _state = kStateFree;
+    VCMEncodedFrame::Reset();
+}
+
+// Makes sure the session contains a decodable stream.
+void
+VCMFrameBuffer::MakeSessionDecodable()
+{
+    WebRtc_UWord32 retVal;
+#ifdef INDEPENDENT_PARTITIONS
+    if (_codec != kVideoCodecVP8) {
+        retVal = _sessionInfo.MakeDecodable();
+        _length -= retVal;
+    }
+#else
+    retVal = _sessionInfo.MakeDecodable();
+    _length -= retVal;
+#endif
+}
+
+// Set state of frame
+void
+VCMFrameBuffer::SetState(VCMFrameBufferStateEnum state)
+{
+    if (_state == state)
+    {
+        return;
+    }
+    switch (state)
+    {
+    case kStateFree:
+        // Reset everything
+        // We can go to this state from all other states.
+        // The one setting the state to free must ensure
+        // that the frame is removed from the timestamp
+        // ordered frame list in the jb.
+        Reset();
+        break;
+
+    case kStateIncomplete:
+        // we can go to this state from state kStateEmpty
+        assert(_state == kStateEmpty ||
+            _state == kStateDecoding);
+
+        // Do nothing, we received a packet
+        break;
+
+    case kStateComplete:
+        assert(_state == kStateEmpty ||
+               _state == kStateIncomplete ||
+               _state == kStateDecodable);
+
+        break;
+
+    case kStateEmpty:
+        assert(_state == kStateFree);
+        // Do nothing
+        break;
+
+    case kStateDecoding:
+        // A frame might have received empty packets, or media packets might
+        // have been removed when making the frame decodable. The frame can
+        // still be set to decodable since it can be used to inform the
+        // decoder of a frame loss.
+        assert(_state == kStateComplete || _state == kStateIncomplete ||
+               _state == kStateDecodable || _state == kStateEmpty);
+        // Transfer frame information to EncodedFrame and create any codec
+        // specific information
+        RestructureFrameInformation();
+        break;
+
+    case kStateDecodable:
+        assert(_state == kStateEmpty ||
+               _state == kStateIncomplete);
+        break;
+    }
+    _state = state;
+}
+
+void
+VCMFrameBuffer::RestructureFrameInformation()
+{
+    PrepareForDecode();
+    _frameType = ConvertFrameType(_sessionInfo.FrameType());
+    _completeFrame = _sessionInfo.complete();
+    _missingFrame = _sessionInfo.PreviousFrameLoss();
+}
+
+WebRtc_Word32
+VCMFrameBuffer::ExtractFromStorage(const EncodedVideoData& frameFromStorage)
+{
+    _frameType = ConvertFrameType(frameFromStorage.frameType);
+    _timeStamp = frameFromStorage.timeStamp;
+    _payloadType = frameFromStorage.payloadType;
+    _encodedWidth = frameFromStorage.encodedWidth;
+    _encodedHeight = frameFromStorage.encodedHeight;
+    _missingFrame = frameFromStorage.missingFrame;
+    _completeFrame = frameFromStorage.completeFrame;
+    _renderTimeMs = frameFromStorage.renderTimeMs;
+    _codec = frameFromStorage.codec;
+    const WebRtc_UWord8 *prevBuffer = _buffer;
+    if (VerifyAndAllocate(frameFromStorage.payloadSize) < 0)
+    {
+        return VCM_MEMORY;
+    }
+    _sessionInfo.UpdateDataPointers(prevBuffer, _buffer);
+    memcpy(_buffer, frameFromStorage.payloadData, frameFromStorage.payloadSize);
+    _length = frameFromStorage.payloadSize;
+    return VCM_OK;
+}
+
+int VCMFrameBuffer::NotDecodablePackets() const {
+  return _sessionInfo.packets_not_decodable();
+}
+
+// Set counted status (as counted by JB or not)
+void VCMFrameBuffer::SetCountedFrame(bool frameCounted)
+{
+    _frameCounted = frameCounted;
+}
+
+bool VCMFrameBuffer::GetCountedFrame() const
+{
+    return _frameCounted;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum
+VCMFrameBuffer::GetState() const
+{
+    return _state;
+}
+
+// Get current state of frame
+VCMFrameBufferStateEnum
+VCMFrameBuffer::GetState(WebRtc_UWord32& timeStamp) const
+{
+    timeStamp = TimeStamp();
+    return GetState();
+}
+
+bool
+VCMFrameBuffer::IsRetransmitted() const
+{
+    return _sessionInfo.session_nack();
+}
+
+void
+VCMFrameBuffer::PrepareForDecode()
+{
+#ifdef INDEPENDENT_PARTITIONS
+    if (_codec == kVideoCodecVP8)
+    {
+        _length =
+            _sessionInfo.BuildVP8FragmentationHeader(_buffer, _length,
+                                                     &_fragmentation);
+    }
+#endif
+}
+
+}
diff --git a/src/modules/video_coding/main/source/frame_buffer.h b/src/modules/video_coding/main/source/frame_buffer.h
new file mode 100644
index 0000000..ea05754
--- /dev/null
+++ b/src/modules/video_coding/main/source/frame_buffer.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/encoded_frame.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/session_info.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMFrameBuffer : public VCMEncodedFrame
+{
+public:
+    VCMFrameBuffer();
+    virtual ~VCMFrameBuffer();
+
+    VCMFrameBuffer(VCMFrameBuffer& rhs);
+
+    virtual void Reset();
+
+    VCMFrameBufferEnum InsertPacket(const VCMPacket& packet,
+                                    WebRtc_Word64 timeInMs,
+                                    bool enableDecodableState,
+                                    WebRtc_UWord32 rttMs);
+
+    // State
+    // Get current state of frame
+    VCMFrameBufferStateEnum GetState() const;
+    // Get current state and timestamp of frame
+    VCMFrameBufferStateEnum GetState(WebRtc_UWord32& timeStamp) const;
+    void SetState(VCMFrameBufferStateEnum state); // Set state of frame
+
+    bool IsRetransmitted() const;
+    bool IsSessionComplete() const;
+    bool HaveLastPacket() const;
+    // Makes sure the session contain a decodable stream.
+    void MakeSessionDecodable();
+
+    // Sequence numbers
+    // Get lowest packet sequence number in frame
+    WebRtc_Word32 GetLowSeqNum() const;
+    // Get highest packet sequence number in frame
+    WebRtc_Word32 GetHighSeqNum() const;
+
+    int PictureId() const;
+    int TemporalId() const;
+    bool LayerSync() const;
+    int Tl0PicId() const;
+    bool NonReference() const;
+
+    // Set counted status (as counted by JB or not)
+    void SetCountedFrame(bool frameCounted);
+    bool GetCountedFrame() const;
+
+    // NACK - Building the NACK lists.
+    // Build hard NACK list: Zero out all entries in list up to and including
+    // _lowSeqNum.
+    int BuildHardNackList(int* list, int num);
+    // Build soft NACK list: Zero out only a subset of the packets, discard
+    // empty packets.
+    int BuildSoftNackList(int* list, int num, int rttMs);
+    void IncrementNackCount();
+    WebRtc_Word16 GetNackCount() const;
+
+    WebRtc_Word64 LatestPacketTimeMs();
+
+    webrtc::FrameType FrameType() const;
+    void SetPreviousFrameLoss();
+
+    WebRtc_Word32 ExtractFromStorage(const EncodedVideoData& frameFromStorage);
+
+    // The number of packets discarded because the decoder can't make use of
+    // them.
+    int NotDecodablePackets() const;
+
+protected:
+    void RestructureFrameInformation();
+    void PrepareForDecode();
+
+private:
+    VCMFrameBufferStateEnum    _state;         // Current state of the frame
+    bool                       _frameCounted;  // Was this frame counted by JB?
+    VCMSessionInfo             _sessionInfo;
+    WebRtc_UWord16             _nackCount;
+    WebRtc_Word64              _latestPacketTimeMs;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_BUFFER_H_
diff --git a/src/modules/video_coding/main/source/frame_dropper.cc b/src/modules/video_coding/main/source/frame_dropper.cc
new file mode 100644
index 0000000..065e452
--- /dev/null
+++ b/src/modules/video_coding/main/source/frame_dropper.cc
@@ -0,0 +1,331 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_dropper.h"
+#include "internal_defines.h"
+#include "trace.h"
+
+namespace webrtc
+{
+
+VCMFrameDropper::VCMFrameDropper(WebRtc_Word32 vcmId)
+:
+_vcmId(vcmId),
+_keyFrameSizeAvgKbits(0.9f),
+_keyFrameRatio(0.99f),
+_dropRatio(0.9f, 0.96f)
+{
+    Reset();
+}
+
+void
+VCMFrameDropper::Reset()
+{
+    _keyFrameRatio.Reset(0.99f);
+    _keyFrameRatio.Apply(1.0f, 1.0f/300.0f); // 1 key frame every 10th second in 30 fps
+    _keyFrameSizeAvgKbits.Reset(0.9f);
+    _keyFrameCount = 0;
+    _accumulator = 0.0f;
+    _accumulatorMax = 150.0f; // assume 300 kb/s and 0.5 s window
+    _targetBitRate = 300.0f;
+    _userFrameRate = 30;
+    _keyFrameSpreadFrames = 0.5f * _userFrameRate;
+    _dropNext = false;
+    _dropRatio.Reset(0.9f);
+    _dropRatio.Apply(0.0f, 0.0f); // Initialize to 0
+    _dropCount = 0;
+    _windowSize = 0.5f;
+    _wasBelowMax = true;
+    _enabled = true;
+    _fastMode = false; // start with normal (non-aggressive) mode
+}
+
+void
+VCMFrameDropper::Enable(bool enable)
+{
+    _enabled = enable;
+}
+
+void
+VCMFrameDropper::Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    float frameSizeKbits = 8.0f * static_cast<float>(frameSizeBytes) / 1000.0f;
+    if (!deltaFrame && !_fastMode) // fast mode does not treat key-frames any different
+    {
+        _keyFrameSizeAvgKbits.Apply(1, frameSizeKbits);
+        _keyFrameRatio.Apply(1.0, 1.0);
+        if (frameSizeKbits > _keyFrameSizeAvgKbits.Value())
+        {
+            // Remove the average key frame size since we
+            // compensate for key frames when adding delta
+            // frames.
+            frameSizeKbits -= _keyFrameSizeAvgKbits.Value();
+        }
+        else
+        {
+            // Shouldn't be negative, so zero is the lower bound.
+            frameSizeKbits = 0;
+        }
+        if (_keyFrameRatio.Value() > 1e-5 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
+        {
+            // We are sending key frames more often than our upper bound for
+            // how much we allow the key frame compensation to be spread
+            // out in time. Therefor we must use the key frame ratio rather
+            // than keyFrameSpreadFrames.
+            _keyFrameCount = static_cast<WebRtc_Word32>(1 / _keyFrameRatio.Value() + 0.5);
+        }
+        else
+        {
+            // Compensate for the key frame the following frames
+            _keyFrameCount = static_cast<WebRtc_Word32>(_keyFrameSpreadFrames + 0.5);
+        }
+    }
+    else
+    {
+        // Decrease the keyFrameRatio
+        _keyFrameRatio.Apply(1.0, 0.0);
+    }
+    // Change the level of the accumulator (bucket)
+    _accumulator += frameSizeKbits;
+}
+
+void
+VCMFrameDropper::Leak(WebRtc_UWord32 inputFrameRate)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    if (inputFrameRate < 1)
+    {
+        return;
+    }
+    if (_targetBitRate < 0.0f)
+    {
+        return;
+    }
+    _keyFrameSpreadFrames = 0.5f * inputFrameRate;
+    // T is the expected bits per frame (target). If all frames were the same size,
+    // we would get T bits per frame. Notice that T is also weighted to be able to
+    // force a lower frame rate if wanted.
+    float T = _targetBitRate / inputFrameRate;
+    if (_keyFrameCount > 0)
+    {
+        // Perform the key frame compensation
+        if (_keyFrameRatio.Value() > 0 && 1 / _keyFrameRatio.Value() < _keyFrameSpreadFrames)
+        {
+            T -= _keyFrameSizeAvgKbits.Value() * _keyFrameRatio.Value();
+        }
+        else
+        {
+            T -= _keyFrameSizeAvgKbits.Value() / _keyFrameSpreadFrames;
+        }
+        _keyFrameCount--;
+    }
+    _accumulator -= T;
+    UpdateRatio();
+
+}
+
+void
+VCMFrameDropper::UpdateNack(WebRtc_UWord32 nackBytes)
+{
+    if (!_enabled)
+    {
+        return;
+    }
+    _accumulator += static_cast<float>(nackBytes) * 8.0f / 1000.0f;
+}
+
+void
+VCMFrameDropper::FillBucket(float inKbits, float outKbits)
+{
+    _accumulator += (inKbits - outKbits);
+}
+
+void
+VCMFrameDropper::UpdateRatio()
+{
+    if (_accumulator > 1.3f * _accumulatorMax)
+    {
+        // Too far above accumulator max, react faster
+        _dropRatio.UpdateBase(0.8f);
+    }
+    else
+    {
+        // Go back to normal reaction
+        _dropRatio.UpdateBase(0.9f);
+    }
+    if (_accumulator > _accumulatorMax)
+    {
+        // We are above accumulator max, and should ideally
+        // drop a frame. Increase the dropRatio and drop
+        // the frame later.
+        if (_wasBelowMax)
+        {
+            _dropNext = true;
+        }
+        if (_fastMode)
+        {
+            // always drop in aggressive mode
+            _dropNext = true;
+        }
+
+        _dropRatio.Apply(1.0f, 1.0f);
+        _dropRatio.UpdateBase(0.9f);
+    }
+    else
+    {
+        _dropRatio.Apply(1.0f, 0.0f);
+    }
+    if (_accumulator < 0.0f)
+    {
+        _accumulator = 0.0f;
+    }
+    _wasBelowMax = _accumulator < _accumulatorMax;
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId),  "FrameDropper: dropRatio = %f accumulator = %f, accumulatorMax = %f", _dropRatio.Value(), _accumulator, _accumulatorMax);
+}
+
+// This function signals when to drop frames to the caller. It makes use of the dropRatio
+// to smooth out the drops over time.
+bool
+VCMFrameDropper::DropFrame()
+{
+    if (!_enabled)
+    {
+        return false;
+    }
+    if (_dropNext)
+    {
+        _dropNext = false;
+        _dropCount = 0;
+    }
+
+    if (_dropRatio.Value() >= 0.5f) // Drops per keep
+    {
+        // limit is the number of frames we should drop between each kept frame
+        // to keep our drop ratio. limit is positive in this case.
+        float denom = 1.0f - _dropRatio.Value();
+        if (denom < 1e-5)
+        {
+            denom = (float)1e-5;
+        }
+        WebRtc_Word32 limit = static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
+        if (_dropCount < 0)
+        {
+            // Reset the _dropCount since it was negative and should be positive.
+            if (_dropRatio.Value() > 0.4f)
+            {
+                _dropCount = -_dropCount;
+            }
+            else
+            {
+                _dropCount = 0;
+            }
+        }
+        if (_dropCount < limit)
+        {
+            // As long we are below the limit we should drop frames.
+            _dropCount++;
+            return true;
+        }
+        else
+        {
+            // Only when we reset _dropCount a frame should be kept.
+            _dropCount = 0;
+            return false;
+        }
+    }
+    else if (_dropRatio.Value() > 0.0f && _dropRatio.Value() < 0.5f) // Keeps per drop
+    {
+        // limit is the number of frames we should keep between each drop
+        // in order to keep the drop ratio. limit is negative in this case,
+        // and the _dropCount is also negative.
+        float denom = _dropRatio.Value();
+        if (denom < 1e-5)
+        {
+            denom = (float)1e-5;
+        }
+        WebRtc_Word32 limit = -static_cast<WebRtc_Word32>(1.0f / denom - 1.0f + 0.5f);
+        if (_dropCount > 0)
+        {
+            // Reset the _dropCount since we have a positive
+            // _dropCount, and it should be negative.
+            if (_dropRatio.Value() < 0.6f)
+            {
+                _dropCount = -_dropCount;
+            }
+            else
+            {
+                _dropCount = 0;
+            }
+        }
+        if (_dropCount > limit)
+        {
+            if (_dropCount == 0)
+            {
+                // Drop frames when we reset _dropCount.
+                _dropCount--;
+                return true;
+            }
+            else
+            {
+                // Keep frames as long as we haven't reached limit.
+                _dropCount--;
+                return false;
+            }
+        }
+        else
+        {
+            _dropCount = 0;
+            return false;
+        }
+    }
+    _dropCount = 0;
+    return false;
+
+    // A simpler version, unfiltered and quicker
+    //bool dropNext = _dropNext;
+    //_dropNext = false;
+    //return dropNext;
+}
+
+void
+VCMFrameDropper::SetRates(float bitRate, float userFrameRate)
+{
+    // Bit rate of -1 means infinite bandwidth.
+    _accumulatorMax = bitRate * _windowSize; // bitRate * windowSize (in seconds)
+    if (_targetBitRate > 0.0f && bitRate < _targetBitRate && _accumulator > _accumulatorMax)
+    {
+        // Rescale the accumulator level if the accumulator max decreases
+        _accumulator = bitRate / _targetBitRate * _accumulator;
+    }
+    _targetBitRate = bitRate;
+    if (userFrameRate > 0.0f)
+    {
+        _userFrameRate = userFrameRate;
+    }
+}
+
+float
+VCMFrameDropper::ActualFrameRate(WebRtc_UWord32 inputFrameRate) const
+{
+    if (!_enabled)
+    {
+        return static_cast<float>(inputFrameRate);
+    }
+    return inputFrameRate * (1.0f - _dropRatio.Value());
+}
+
+}
diff --git a/src/modules/video_coding/main/source/frame_dropper.h b/src/modules/video_coding/main/source/frame_dropper.h
new file mode 100644
index 0000000..5e7e8a1
--- /dev/null
+++ b/src/modules/video_coding/main/source/frame_dropper.h
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
+
+#include "exp_filter.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+/******************************/
+/* VCMFrameDropper class     */
+/****************************/
+// The Frame Dropper implements a variant of the leaky bucket algorithm
+// for keeping track of when to drop frames to avoid bit rate
+// over use when the encoder can't keep its bit rate.
+class VCMFrameDropper
+{
+public:
+    VCMFrameDropper(WebRtc_Word32 vcmId = 0);
+    // Resets the FrameDropper to its initial state.
+    // This means that the frameRateWeight is set to its
+    // default value as well.
+    void Reset();
+
+    void Enable(bool enable);
+    // Answers the question if it's time to drop a frame
+    // if we want to reach a given frame rate. Must be
+    // called for every frame.
+    //
+    // Return value     : True if we should drop the current frame
+    bool DropFrame();
+    // Updates the FrameDropper with the size of the latest encoded
+    // frame. The FrameDropper calculates a new drop ratio (can be
+    // seen as the probability to drop a frame) and updates its
+    // internal statistics.
+    //
+    // Input:
+    //          - frameSizeBytes    : The size of the latest frame
+    //                                returned from the encoder.
+    //          - deltaFrame        : True if the encoder returned
+    //                                a key frame.
+    void Fill(WebRtc_UWord32 frameSizeBytes, bool deltaFrame);
+
+    void Leak(WebRtc_UWord32 inputFrameRate);
+
+    void UpdateNack(WebRtc_UWord32 nackBytes);
+
+    // Sets the target bit rate and the frame rate produced by
+    // the camera.
+    //
+    // Input:
+    //          - bitRate       : The target bit rate
+    void SetRates(float bitRate, float userFrameRate);
+
+    // Return value     : The current average frame rate produced
+    //                    if the DropFrame() function is used as
+    //                    instruction of when to drop frames.
+    float ActualFrameRate(WebRtc_UWord32 inputFrameRate) const;
+
+private:
+    void FillBucket(float inKbits, float outKbits);
+    void UpdateRatio();
+
+    WebRtc_Word32     _vcmId;
+    VCMExpFilter       _keyFrameSizeAvgKbits;
+    VCMExpFilter       _keyFrameRatio;
+    float           _keyFrameSpreadFrames;
+    WebRtc_Word32     _keyFrameCount;
+    float           _accumulator;
+    float           _accumulatorMax;
+    float           _targetBitRate;
+    bool            _dropNext;
+    VCMExpFilter       _dropRatio;
+    WebRtc_Word32     _dropCount;
+    float           _windowSize;
+    float           _userFrameRate;
+    bool            _wasBelowMax;
+    bool            _enabled;
+    bool            _fastMode;
+}; // end of VCMFrameDropper class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_FRAME_DROPPER_H_
diff --git a/src/modules/video_coding/main/source/generic_decoder.cc b/src/modules/video_coding/main/source/generic_decoder.cc
new file mode 100644
index 0000000..a9f9682
--- /dev/null
+++ b/src/modules/video_coding/main/source/generic_decoder.cc
@@ -0,0 +1,221 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_coding.h"
+#include "trace.h"
+#include "generic_decoder.h"
+#include "internal_defines.h"
+#include "tick_time_base.h"
+
+namespace webrtc {
+
+VCMDecodedFrameCallback::VCMDecodedFrameCallback(VCMTiming& timing,
+                                                 TickTimeBase* clock)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_clock(clock),
+_receiveCallback(NULL),
+_timing(timing),
+_timestampMap(kDecoderFrameMemoryLength),
+_lastReceivedPictureID(0)
+{
+}
+
+VCMDecodedFrameCallback::~VCMDecodedFrameCallback()
+{
+    delete _critSect;
+}
+
+void VCMDecodedFrameCallback::SetUserReceiveCallback(
+    VCMReceiveCallback* receiveCallback)
+{
+    CriticalSectionScoped cs(_critSect);
+    _receiveCallback = receiveCallback;
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Decoded(VideoFrame& decodedImage)
+{
+    // TODO(holmer): We should improve this so that we can handle multiple
+    // callbacks from one call to Decode().
+    CriticalSectionScoped cs(_critSect);
+    VCMFrameInformation* frameInfo = static_cast<VCMFrameInformation*>(
+        _timestampMap.Pop(decodedImage.TimeStamp()));
+    if (frameInfo == NULL)
+    {
+        // The map should never be empty or full if this callback is called.
+        return WEBRTC_VIDEO_CODEC_ERROR;
+    }
+
+    _timing.StopDecodeTimer(
+        decodedImage.TimeStamp(),
+        frameInfo->decodeStartTimeMs,
+        _clock->MillisecondTimestamp());
+
+    if (_receiveCallback != NULL)
+    {
+        _frame.SwapFrame(decodedImage);
+        _frame.SetRenderTime(frameInfo->renderTimeMs);
+        WebRtc_Word32 callbackReturn = _receiveCallback->FrameToRender(_frame);
+        if (callbackReturn < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug,
+                         webrtc::kTraceVideoCoding,
+                         -1,
+                         "Render callback returned error: %d", callbackReturn);
+        }
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32
+VCMDecodedFrameCallback::ReceivedDecodedReferenceFrame(
+    const WebRtc_UWord64 pictureId)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_receiveCallback != NULL)
+    {
+        return _receiveCallback->ReceivedDecodedReferenceFrame(pictureId);
+    }
+    return -1;
+}
+
+WebRtc_Word32
+VCMDecodedFrameCallback::ReceivedDecodedFrame(const WebRtc_UWord64 pictureId)
+{
+    _lastReceivedPictureID = pictureId;
+    return 0;
+}
+
+WebRtc_UWord64 VCMDecodedFrameCallback::LastReceivedPictureID() const
+{
+    return _lastReceivedPictureID;
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo)
+{
+    CriticalSectionScoped cs(_critSect);
+    return _timestampMap.Add(timestamp, frameInfo);
+}
+
+WebRtc_Word32 VCMDecodedFrameCallback::Pop(WebRtc_UWord32 timestamp)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_timestampMap.Pop(timestamp) == NULL)
+    {
+        return VCM_GENERAL_ERROR;
+    }
+    return VCM_OK;
+}
+
+VCMGenericDecoder::VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id, bool isExternal)
+:
+_id(id),
+_callback(NULL),
+_frameInfos(),
+_nextFrameInfoIdx(0),
+_decoder(decoder),
+_codecType(kVideoCodecUnknown),
+_isExternal(isExternal),
+_requireKeyFrame(false),
+_keyFrameDecoded(false)
+{
+}
+
+VCMGenericDecoder::~VCMGenericDecoder()
+{
+}
+
+WebRtc_Word32 VCMGenericDecoder::InitDecode(const VideoCodec* settings,
+                                            WebRtc_Word32 numberOfCores,
+                                            bool requireKeyFrame)
+{
+    _requireKeyFrame = requireKeyFrame;
+    _keyFrameDecoded = false;
+    _codecType = settings->codecType;
+
+    return _decoder.InitDecode(settings, numberOfCores);
+}
+
+WebRtc_Word32 VCMGenericDecoder::Decode(const VCMEncodedFrame& frame,
+                                        int64_t nowMs)
+{
+    if (_requireKeyFrame &&
+        !_keyFrameDecoded &&
+        frame.FrameType() != kVideoFrameKey &&
+        frame.FrameType() != kVideoFrameGolden)
+    {
+        // Require key frame is enabled, meaning that one key frame must be decoded
+        // before we can decode delta frames.
+        return VCM_CODEC_ERROR;
+    }
+    _frameInfos[_nextFrameInfoIdx].decodeStartTimeMs = nowMs;
+    _frameInfos[_nextFrameInfoIdx].renderTimeMs = frame.RenderTimeMs();
+    _callback->Map(frame.TimeStamp(), &_frameInfos[_nextFrameInfoIdx]);
+
+    WEBRTC_TRACE(webrtc::kTraceDebug,
+                 webrtc::kTraceVideoCoding,
+                 VCMId(_id),
+                 "Decoding timestamp %u", frame.TimeStamp());
+
+    _nextFrameInfoIdx = (_nextFrameInfoIdx + 1) % kDecoderFrameMemoryLength;
+
+    WebRtc_Word32 ret = _decoder.Decode(frame.EncodedImage(),
+                                        frame.MissingFrame(),
+                                        frame.FragmentationHeader(),
+                                        frame.CodecSpecific(),
+                                        frame.RenderTimeMs());
+
+    if (ret < WEBRTC_VIDEO_CODEC_OK)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_id), "Decoder error: %d\n", ret);
+        _callback->Pop(frame.TimeStamp());
+        return ret;
+    }
+    else if (ret == WEBRTC_VIDEO_CODEC_NO_OUTPUT ||
+             ret == WEBRTC_VIDEO_CODEC_REQUEST_SLI)
+    {
+        // No output
+        _callback->Pop(frame.TimeStamp());
+    }
+    // Update the key frame decoded variable so that we know whether or not we've decoded a key frame since reset.
+    _keyFrameDecoded = (frame.FrameType() == kVideoFrameKey || frame.FrameType() == kVideoFrameGolden);
+    return ret;
+}
+
+WebRtc_Word32
+VCMGenericDecoder::Release()
+{
+    _keyFrameDecoded = false;
+    return _decoder.Release();
+}
+
+WebRtc_Word32 VCMGenericDecoder::Reset()
+{
+    _keyFrameDecoded = false;
+    return _decoder.Reset();
+}
+
+WebRtc_Word32 VCMGenericDecoder::SetCodecConfigParameters(const WebRtc_UWord8* buffer, WebRtc_Word32 size)
+{
+    return _decoder.SetCodecConfigParameters(buffer, size);
+}
+
+WebRtc_Word32 VCMGenericDecoder::RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback)
+{
+    _callback = callback;
+    return _decoder.RegisterDecodeCompleteCallback(callback);
+}
+
+bool VCMGenericDecoder::External() const
+{
+    return _isExternal;
+}
+
+} // namespace
diff --git a/src/modules/video_coding/main/source/generic_decoder.h b/src/modules/video_coding/main/source/generic_decoder.h
new file mode 100644
index 0000000..5299f42
--- /dev/null
+++ b/src/modules/video_coding/main/source/generic_decoder.h
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
+
+#include "timing.h"
+#include "timestamp_map.h"
+#include "video_codec_interface.h"
+#include "encoded_frame.h"
+#include "module_common_types.h"
+
+namespace webrtc
+{
+
+class VCMReceiveCallback;
+
+enum { kDecoderFrameMemoryLength = 10 };
+
+struct VCMFrameInformation
+{
+    WebRtc_Word64     renderTimeMs;
+    WebRtc_Word64     decodeStartTimeMs;
+    void*             userData;
+};
+
+class VCMDecodedFrameCallback : public DecodedImageCallback
+{
+public:
+    VCMDecodedFrameCallback(VCMTiming& timing, TickTimeBase* clock);
+    virtual ~VCMDecodedFrameCallback();
+    void SetUserReceiveCallback(VCMReceiveCallback* receiveCallback);
+
+    virtual WebRtc_Word32 Decoded(VideoFrame& decodedImage);
+    virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(const WebRtc_UWord64 pictureId);
+    virtual WebRtc_Word32 ReceivedDecodedFrame(const WebRtc_UWord64 pictureId);
+
+    WebRtc_UWord64 LastReceivedPictureID() const;
+
+    WebRtc_Word32 Map(WebRtc_UWord32 timestamp, VCMFrameInformation* frameInfo);
+    WebRtc_Word32 Pop(WebRtc_UWord32 timestamp);
+
+private:
+    CriticalSectionWrapper* _critSect;
+    TickTimeBase* _clock;
+    VideoFrame _frame;
+    VCMReceiveCallback* _receiveCallback;
+    VCMTiming& _timing;
+    VCMTimestampMap _timestampMap;
+    WebRtc_UWord64 _lastReceivedPictureID;
+};
+
+
+class VCMGenericDecoder
+{
+    friend class VCMCodecDataBase;
+public:
+    VCMGenericDecoder(VideoDecoder& decoder, WebRtc_Word32 id = 0, bool isExternal = false);
+    ~VCMGenericDecoder();
+
+    /**
+    *	Initialize the decoder with the information from the VideoCodec
+    */
+    WebRtc_Word32 InitDecode(const VideoCodec* settings,
+                             WebRtc_Word32 numberOfCores,
+                             bool requireKeyFrame);
+
+    /**
+    *	Decode to a raw I420 frame,
+    *
+    *	inputVideoBuffer	reference to encoded video frame
+    */
+    WebRtc_Word32 Decode(const VCMEncodedFrame& inputFrame, int64_t nowMs);
+
+    /**
+    *	Free the decoder memory
+    */
+    WebRtc_Word32 Release();
+
+    /**
+    *	Reset the decoder state, prepare for a new call
+    */
+    WebRtc_Word32 Reset();
+
+    /**
+    *	Codec configuration data sent out-of-band, i.e. in SIP call setup
+    *
+    *	buffer pointer to the configuration data
+    *	size the size of the configuration data in bytes
+    */
+    WebRtc_Word32 SetCodecConfigParameters(const WebRtc_UWord8* /*buffer*/,
+                                           WebRtc_Word32 /*size*/);
+
+    WebRtc_Word32 RegisterDecodeCompleteCallback(VCMDecodedFrameCallback* callback);
+
+    bool External() const;
+
+protected:
+
+    WebRtc_Word32               _id;
+    VCMDecodedFrameCallback*    _callback;
+    VCMFrameInformation         _frameInfos[kDecoderFrameMemoryLength];
+    WebRtc_UWord32              _nextFrameInfoIdx;
+    VideoDecoder&               _decoder;
+    VideoCodecType              _codecType;
+    bool                        _isExternal;
+    bool                        _requireKeyFrame;
+    bool                        _keyFrameDecoded;
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_DECODER_H_
diff --git a/src/modules/video_coding/main/source/generic_encoder.cc b/src/modules/video_coding/main/source/generic_encoder.cc
new file mode 100644
index 0000000..8ead0e5
--- /dev/null
+++ b/src/modules/video_coding/main/source/generic_encoder.cc
@@ -0,0 +1,267 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "encoded_frame.h"
+#include "generic_encoder.h"
+#include "media_optimization.h"
+#include "../../../../engine_configurations.h"
+
+namespace webrtc {
+
+//#define DEBUG_ENCODER_BIT_STREAM
+
+VCMGenericEncoder::VCMGenericEncoder(VideoEncoder& encoder, bool internalSource /*= false*/)
+:
+_encoder(encoder),
+_codecType(kVideoCodecUnknown),
+_VCMencodedFrameCallback(NULL),
+_bitRate(0),
+_frameRate(0),
+_internalSource(false)
+{
+}
+
+
+VCMGenericEncoder::~VCMGenericEncoder()
+{
+}
+
+WebRtc_Word32 VCMGenericEncoder::Release()
+{
+    _bitRate = 0;
+    _frameRate = 0;
+    _VCMencodedFrameCallback = NULL;
+    return _encoder.Release();
+}
+
+WebRtc_Word32
+VCMGenericEncoder::InitEncode(const VideoCodec* settings,
+                              WebRtc_Word32 numberOfCores,
+                              WebRtc_UWord32 maxPayloadSize)
+{
+    _bitRate = settings->startBitrate;
+    _frameRate = settings->maxFramerate;
+    _codecType = settings->codecType;
+    if (_VCMencodedFrameCallback != NULL)
+    {
+        _VCMencodedFrameCallback->SetCodecType(_codecType);
+    }
+    return _encoder.InitEncode(settings, numberOfCores, maxPayloadSize);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::Encode(const VideoFrame& inputFrame,
+                          const CodecSpecificInfo* codecSpecificInfo,
+                          const FrameType frameType) {
+  VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
+  return _encoder.Encode(inputFrame, codecSpecificInfo, videoFrameType);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetChannelParameters(WebRtc_Word32 packetLoss, int rtt)
+{
+    return _encoder.SetChannelParameters(packetLoss, rtt);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate)
+{
+    WebRtc_Word32 ret = _encoder.SetRates(newBitRate, frameRate);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    _bitRate = newBitRate;
+    _frameRate = frameRate;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMGenericEncoder::CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size)
+{
+    WebRtc_Word32 ret = _encoder.CodecConfigParameters(buffer, size);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    return ret;
+}
+
+WebRtc_UWord32 VCMGenericEncoder::BitRate() const
+{
+    return _bitRate;
+}
+
+WebRtc_UWord32 VCMGenericEncoder::FrameRate() const
+{
+    return _frameRate;
+}
+
+WebRtc_Word32
+VCMGenericEncoder::SetPeriodicKeyFrames(bool enable)
+{
+    return _encoder.SetPeriodicKeyFrames(enable);
+}
+
+WebRtc_Word32 VCMGenericEncoder::RequestFrame(const FrameType frameType) {
+  VideoFrame image;
+  VideoFrameType videoFrameType = VCMEncodedFrame::ConvertFrameType(frameType);
+  return _encoder.Encode(image, NULL,  videoFrameType);
+}
+
+WebRtc_Word32
+VCMGenericEncoder::RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback)
+{
+   _VCMencodedFrameCallback = VCMencodedFrameCallback;
+
+   _VCMencodedFrameCallback->SetCodecType(_codecType);
+   _VCMencodedFrameCallback->SetInternalSource(_internalSource);
+   return _encoder.RegisterEncodeCompleteCallback(_VCMencodedFrameCallback);
+}
+
+bool
+VCMGenericEncoder::InternalSource() const
+{
+    return _internalSource;
+}
+
+ /***************************
+  * Callback Implementation
+  ***************************/
+VCMEncodedFrameCallback::VCMEncodedFrameCallback():
+_sendCallback(),
+_mediaOpt(NULL),
+_encodedBytes(0),
+_payloadType(0),
+_codecType(kVideoCodecUnknown),
+_internalSource(false)
+#ifdef DEBUG_ENCODER_BIT_STREAM
+, _bitStreamAfterEncoder(NULL)
+#endif
+{
+#ifdef DEBUG_ENCODER_BIT_STREAM
+    _bitStreamAfterEncoder = fopen("encoderBitStream.bit", "wb");
+#endif
+}
+
+VCMEncodedFrameCallback::~VCMEncodedFrameCallback()
+{
+#ifdef DEBUG_ENCODER_BIT_STREAM
+    fclose(_bitStreamAfterEncoder);
+#endif
+}
+
+WebRtc_Word32
+VCMEncodedFrameCallback::SetTransportCallback(VCMPacketizationCallback* transport)
+{
+    _sendCallback = transport;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMEncodedFrameCallback::Encoded(
+    EncodedImage &encodedImage,
+    const CodecSpecificInfo* codecSpecificInfo,
+    const RTPFragmentationHeader* fragmentationHeader)
+{
+    FrameType frameType = VCMEncodedFrame::ConvertFrameType(encodedImage._frameType);
+
+    WebRtc_UWord32 encodedBytes = 0;
+    if (_sendCallback != NULL)
+    {
+        encodedBytes = encodedImage._length;
+
+#ifdef DEBUG_ENCODER_BIT_STREAM
+        if (_bitStreamAfterEncoder != NULL)
+        {
+            fwrite(encodedImage._buffer, 1, encodedImage._length, _bitStreamAfterEncoder);
+        }
+#endif
+
+        RTPVideoHeader rtpVideoHeader;
+        RTPVideoHeader* rtpVideoHeaderPtr = &rtpVideoHeader;
+        if (codecSpecificInfo)
+        {
+            CopyCodecSpecific(*codecSpecificInfo, &rtpVideoHeaderPtr);
+        }
+        else
+        {
+            rtpVideoHeaderPtr = NULL;
+        }
+
+        WebRtc_Word32 callbackReturn = _sendCallback->SendData(
+            frameType,
+            _payloadType,
+            encodedImage._timeStamp,
+            encodedImage.capture_time_ms_,
+            encodedImage._buffer,
+            encodedBytes,
+            *fragmentationHeader,
+            rtpVideoHeaderPtr);
+       if (callbackReturn < 0)
+       {
+           return callbackReturn;
+       }
+    }
+    else
+    {
+        return VCM_UNINITIALIZED;
+    }
+    _encodedBytes = encodedBytes;
+    if (_mediaOpt != NULL) {
+      _mediaOpt->UpdateWithEncodedData(_encodedBytes, frameType);
+      if (_internalSource)
+      {
+          return _mediaOpt->DropFrame(); // Signal to encoder to drop next frame
+      }
+    }
+    return VCM_OK;
+}
+
+WebRtc_UWord32
+VCMEncodedFrameCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+void
+VCMEncodedFrameCallback::SetMediaOpt(VCMMediaOptimization *mediaOpt)
+{
+    _mediaOpt = mediaOpt;
+}
+
+void VCMEncodedFrameCallback::CopyCodecSpecific(const CodecSpecificInfo& info,
+                                                RTPVideoHeader** rtp) {
+    switch (info.codecType) {
+        case kVideoCodecVP8: {
+            (*rtp)->codecHeader.VP8.InitRTPVideoHeaderVP8();
+            (*rtp)->codecHeader.VP8.pictureId =
+                info.codecSpecific.VP8.pictureId;
+            (*rtp)->codecHeader.VP8.nonReference =
+                info.codecSpecific.VP8.nonReference;
+            (*rtp)->codecHeader.VP8.temporalIdx =
+                info.codecSpecific.VP8.temporalIdx;
+            (*rtp)->codecHeader.VP8.layerSync =
+                info.codecSpecific.VP8.layerSync;
+            (*rtp)->codecHeader.VP8.tl0PicIdx =
+                info.codecSpecific.VP8.tl0PicIdx;
+            (*rtp)->codecHeader.VP8.keyIdx =
+                info.codecSpecific.VP8.keyIdx;
+            (*rtp)->simulcastIdx = info.codecSpecific.VP8.simulcastIdx;
+            return;
+        }
+        default: {
+            // No codec specific info. Change RTP header pointer to NULL.
+            *rtp = NULL;
+            return;
+        }
+    }
+}
+} // namespace webrtc
diff --git a/src/modules/video_coding/main/source/generic_encoder.h b/src/modules/video_coding/main/source/generic_encoder.h
new file mode 100644
index 0000000..c75339b
--- /dev/null
+++ b/src/modules/video_coding/main/source/generic_encoder.h
@@ -0,0 +1,145 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
+
+#include "video_codec_interface.h"
+
+#include <stdio.h>
+
+namespace webrtc
+{
+
+class VCMMediaOptimization;
+
+/*************************************/
+/* VCMEncodeFrameCallback class     */
+/***********************************/
+class VCMEncodedFrameCallback : public EncodedImageCallback
+{
+public:
+    VCMEncodedFrameCallback();
+    virtual ~VCMEncodedFrameCallback();
+
+    /*
+    * Callback implementation - codec encode complete
+    */
+    WebRtc_Word32 Encoded(
+        EncodedImage& encodedImage,
+        const CodecSpecificInfo* codecSpecificInfo = NULL,
+        const RTPFragmentationHeader* fragmentationHeader = NULL);
+    /*
+    * Get number of encoded bytes
+    */
+    WebRtc_UWord32 EncodedBytes();
+    /*
+    * Callback implementation - generic encoder encode complete
+    */
+    WebRtc_Word32 SetTransportCallback(VCMPacketizationCallback* transport);
+    /**
+    * Set media Optimization
+    */
+    void SetMediaOpt (VCMMediaOptimization* mediaOpt);
+
+    void SetPayloadType(WebRtc_UWord8 payloadType) { _payloadType = payloadType; };
+    void SetCodecType(VideoCodecType codecType) {_codecType = codecType;};
+    void SetInternalSource(bool internalSource) { _internalSource = internalSource; };
+
+private:
+    /*
+     * Map information from info into rtp. If no relevant information is found
+     * in info, rtp is set to NULL.
+     */
+    static void CopyCodecSpecific(const CodecSpecificInfo& info,
+                                  RTPVideoHeader** rtp);
+
+    VCMPacketizationCallback* _sendCallback;
+    VCMMediaOptimization*     _mediaOpt;
+    WebRtc_UWord32            _encodedBytes;
+    WebRtc_UWord8             _payloadType;
+    VideoCodecType            _codecType;
+    bool                      _internalSource;
+#ifdef DEBUG_ENCODER_BIT_STREAM
+    FILE*                     _bitStreamAfterEncoder;
+#endif
+};// end of VCMEncodeFrameCallback class
+
+
+/******************************/
+/* VCMGenericEncoder class    */
+/******************************/
+class VCMGenericEncoder
+{
+    friend class VCMCodecDataBase;
+public:
+    VCMGenericEncoder(VideoEncoder& encoder, bool internalSource = false);
+    ~VCMGenericEncoder();
+    /**
+    *	Free encoder memory
+    */
+    WebRtc_Word32 Release();
+    /**
+    *	Initialize the encoder with the information from the VideoCodec
+    */
+    WebRtc_Word32 InitEncode(const VideoCodec* settings,
+                             WebRtc_Word32 numberOfCores,
+                             WebRtc_UWord32 maxPayloadSize);
+    /**
+    *	Encode raw image
+    *	inputFrame        : Frame containing raw image
+    *	codecSpecificInfo : Specific codec data
+    *	cameraFrameRate	  :	request or information from the remote side
+    *	frameType         : The requested frame type to encode
+    */
+    WebRtc_Word32 Encode(const VideoFrame& inputFrame,
+                         const CodecSpecificInfo* codecSpecificInfo,
+                         const FrameType frameType);
+    /**
+    *	Set new target bit rate and frame rate
+    * Return Value: new bit rate if OK, otherwise <0s
+    */
+    WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate, WebRtc_UWord32 frameRate);
+    /**
+    * Set a new packet loss rate and a new round-trip time in milliseconds.
+    */
+    WebRtc_Word32 SetChannelParameters(WebRtc_Word32 packetLoss, int rtt);
+    WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer, WebRtc_Word32 size);
+    /**
+    * Register a transport callback which will be called to deliver the encoded buffers
+    */
+    WebRtc_Word32 RegisterEncodeCallback(VCMEncodedFrameCallback* VCMencodedFrameCallback);
+    /**
+    * Get encoder bit rate
+    */
+    WebRtc_UWord32 BitRate() const;
+     /**
+    * Get encoder frame rate
+    */
+    WebRtc_UWord32 FrameRate() const;
+
+    WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    WebRtc_Word32 RequestFrame(const FrameType frameType);
+
+    bool InternalSource() const;
+
+private:
+    VideoEncoder&               _encoder;
+    VideoCodecType              _codecType;
+    VCMEncodedFrameCallback*    _VCMencodedFrameCallback;
+    WebRtc_UWord32              _bitRate;
+    WebRtc_UWord32              _frameRate;
+    bool                        _internalSource;
+}; // end of VCMGenericEncoder class
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_GENERIC_ENCODER_H_
diff --git a/src/modules/video_coding/main/source/inter_frame_delay.cc b/src/modules/video_coding/main/source/inter_frame_delay.cc
new file mode 100644
index 0000000..3b520b3
--- /dev/null
+++ b/src/modules/video_coding/main/source/inter_frame_delay.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "inter_frame_delay.h"
+
+namespace webrtc {
+
+VCMInterFrameDelay::VCMInterFrameDelay(int64_t currentWallClock)
+{
+    Reset(currentWallClock);
+}
+
+// Resets the delay estimate
+void
+VCMInterFrameDelay::Reset(int64_t currentWallClock)
+{
+    _zeroWallClock = currentWallClock;
+    _wrapArounds = 0;
+    _prevWallClock = 0;
+    _prevTimestamp = 0;
+    _dTS = 0;
+}
+
+// Calculates the delay of a frame with the given timestamp.
+// This method is called when the frame is complete.
+bool
+VCMInterFrameDelay::CalculateDelay(WebRtc_UWord32 timestamp,
+                                WebRtc_Word64 *delay,
+                                int64_t currentWallClock)
+{
+    if (_prevWallClock == 0)
+    {
+        // First set of data, initialization, wait for next frame
+        _prevWallClock = currentWallClock;
+        _prevTimestamp = timestamp;
+        *delay = 0;
+        return true;
+    }
+
+    WebRtc_Word32 prevWrapArounds = _wrapArounds;
+    CheckForWrapArounds(timestamp);
+
+    // This will be -1 for backward wrap arounds and +1 for forward wrap arounds
+    WebRtc_Word32 wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
+
+    // Account for reordering in jitter variance estimate in the future?
+    // Note that this also captures incomplete frames which are grabbed
+    // for decoding after a later frame has been complete, i.e. real
+    // packet losses.
+    if ((wrapAroundsSincePrev == 0 && timestamp < _prevTimestamp) || wrapAroundsSincePrev < 0)
+    {
+        *delay = 0;
+        return false;
+    }
+
+    // Compute the compensated timestamp difference and convert it to ms and
+    // round it to closest integer.
+    _dTS = static_cast<WebRtc_Word64>((timestamp + wrapAroundsSincePrev *
+                (static_cast<WebRtc_Word64>(1)<<32) - _prevTimestamp) / 90.0 + 0.5);
+
+    // frameDelay is the difference of dT and dTS -- i.e. the difference of
+    // the wall clock time difference and the timestamp difference between
+    // two following frames.
+    *delay = static_cast<WebRtc_Word64>(currentWallClock - _prevWallClock - _dTS);
+
+    _prevTimestamp = timestamp;
+    _prevWallClock = currentWallClock;
+
+    return true;
+}
+
+// Returns the current difference between incoming timestamps
+WebRtc_UWord32 VCMInterFrameDelay::CurrentTimeStampDiffMs() const
+{
+    if (_dTS < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(_dTS);
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp and
+// keeps track of the number of wrap arounds since reset.
+void
+VCMInterFrameDelay::CheckForWrapArounds(WebRtc_UWord32 timestamp)
+{
+    if (timestamp < _prevTimestamp)
+    {
+        // This difference will probably be less than -2^31 if we have had a wrap around
+        // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is cast to a Word32,
+        // it should be positive.
+        if (static_cast<WebRtc_Word32>(timestamp - _prevTimestamp) > 0)
+        {
+            // Forward wrap around
+            _wrapArounds++;
+        }
+    }
+    // This difference will probably be less than -2^31 if we have had a backward wrap around.
+    // Since it is cast to a Word32, it should be positive.
+    else if (static_cast<WebRtc_Word32>(_prevTimestamp - timestamp) > 0)
+    {
+        // Backward wrap around
+        _wrapArounds--;
+    }
+}
+
+}
diff --git a/src/modules/video_coding/main/source/inter_frame_delay.h b/src/modules/video_coding/main/source/inter_frame_delay.h
new file mode 100644
index 0000000..807c64b
--- /dev/null
+++ b/src/modules/video_coding/main/source/inter_frame_delay.h
@@ -0,0 +1,66 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+#define WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMInterFrameDelay
+{
+public:
+    VCMInterFrameDelay(int64_t currentWallClock);
+
+    // Resets the estimate. Zeros are given as parameters.
+    void Reset(int64_t currentWallClock);
+
+    // Calculates the delay of a frame with the given timestamp.
+    // This method is called when the frame is complete.
+    //
+    // Input:
+    //          - timestamp         : RTP timestamp of a received frame
+    //          - *delay            : Pointer to memory where the result should be stored
+    //          - currentWallClock  : The current time in milliseconds.
+    //                                Should be -1 for normal operation, only used for testing.
+    // Return value                 : true if OK, false when reordered timestamps
+    bool CalculateDelay(WebRtc_UWord32 timestamp,
+                        WebRtc_Word64 *delay,
+                        int64_t currentWallClock);
+
+    // Returns the current difference between incoming timestamps
+    //
+    // Return value                 : Wrap-around compensated difference between incoming
+    //                                timestamps.
+    WebRtc_UWord32 CurrentTimeStampDiffMs() const;
+
+private:
+    // Controls if the RTP timestamp counter has had a wrap around
+    // between the current and the previously received frame.
+    //
+    // Input:
+    //          - timestmap         : RTP timestamp of the current frame.
+    void CheckForWrapArounds(WebRtc_UWord32 timestamp);
+
+    WebRtc_Word64         _zeroWallClock; // Local timestamp of the first video packet received
+    WebRtc_Word32         _wrapArounds;   // Number of wrapArounds detected
+    // The previous timestamp passed to the delay estimate
+    WebRtc_UWord32        _prevTimestamp;
+    // The previous wall clock timestamp used by the delay estimate
+    WebRtc_Word64         _prevWallClock;
+    // Wrap-around compensated difference between incoming timestamps
+    WebRtc_Word64         _dTS;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_INTER_FRAME_DELAY_H_
diff --git a/src/modules/video_coding/main/source/internal_defines.h b/src/modules/video_coding/main/source/internal_defines.h
new file mode 100644
index 0000000..0ba6385
--- /dev/null
+++ b/src/modules/video_coding/main/source/internal_defines.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+#define MASK_32_BITS(x) (0xFFFFFFFF & (x))
+
+inline WebRtc_UWord32 MaskWord64ToUWord32(WebRtc_Word64 w64)
+{
+    return static_cast<WebRtc_UWord32>(MASK_32_BITS(w64));
+}
+
+#define VCM_MAX(a, b) (((a) > (b)) ? (a) : (b))
+#define VCM_MIN(a, b) (((a) < (b)) ? (a) : (b))
+
+#define VCM_DEFAULT_CODEC_WIDTH 352
+#define VCM_DEFAULT_CODEC_HEIGHT 288
+#define VCM_DEFAULT_FRAME_RATE 30
+#define VCM_MIN_BITRATE 30
+#define VCM_FLUSH_INDICATOR 4
+
+// Helper macros for creating the static codec list
+#define VCM_NO_CODEC_IDX -1
+#ifdef VIDEOCODEC_VP8
+  #define VCM_VP8_IDX VCM_NO_CODEC_IDX + 1
+#else
+  #define VCM_VP8_IDX VCM_NO_CODEC_IDX
+#endif
+#ifdef VIDEOCODEC_I420
+  #define VCM_I420_IDX VCM_VP8_IDX + 1
+#else
+  #define VCM_I420_IDX VCM_VP8_IDX
+#endif
+#define VCM_NUM_VIDEO_CODECS_AVAILABLE VCM_I420_IDX + 1
+
+#define VCM_NO_RECEIVER_ID 0
+
+inline WebRtc_Word32 VCMId(const WebRtc_Word32 vcmId, const WebRtc_Word32 receiverId = 0)
+{
+    return static_cast<WebRtc_Word32>((vcmId << 16) + receiverId);
+}
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_INTERNAL_DEFINES_H_
diff --git a/src/modules/video_coding/main/source/jitter_buffer.cc b/src/modules/video_coding/main/source/jitter_buffer.cc
new file mode 100644
index 0000000..23f2e48
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_buffer.cc
@@ -0,0 +1,1770 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "modules/video_coding/main/source/jitter_buffer.h"
+
+#include <algorithm>
+#include <cassert>
+
+#include "modules/video_coding/main/source/event.h"
+#include "modules/video_coding/main/source/frame_buffer.h"
+#include "modules/video_coding/main/source/inter_frame_delay.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/jitter_estimator.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+// Predicates used when searching for frames in the frame buffer list
+class FrameSmallerTimestamp {
+ public:
+  FrameSmallerTimestamp(uint32_t timestamp) : timestamp_(timestamp) {}
+  bool operator()(VCMFrameBuffer* frame) {
+    return (LatestTimestamp(timestamp_, frame->TimeStamp(), NULL) ==
+        timestamp_);
+  }
+
+ private:
+  uint32_t timestamp_;
+};
+
+class FrameEqualTimestamp {
+ public:
+  FrameEqualTimestamp(uint32_t timestamp) : timestamp_(timestamp) {}
+  bool operator()(VCMFrameBuffer* frame) {
+    return (timestamp_ == frame->TimeStamp());
+  }
+
+ private:
+  uint32_t timestamp_;
+};
+
+class CompleteDecodableKeyFrameCriteria {
+ public:
+  bool operator()(VCMFrameBuffer* frame) {
+    return (frame->FrameType() == kVideoFrameKey) &&
+           (frame->GetState() == kStateComplete ||
+            frame->GetState() == kStateDecodable);
+  }
+};
+
+// Constructor
+VCMJitterBuffer::VCMJitterBuffer(TickTimeBase* clock,
+                                 WebRtc_Word32 vcmId,
+                                 WebRtc_Word32 receiverId,
+                                 bool master) :
+    _vcmId(vcmId),
+    _receiverId(receiverId),
+    _clock(clock),
+    _running(false),
+    _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+    _master(master),
+    _frameEvent(),
+    _packetEvent(),
+    _maxNumberOfFrames(kStartNumberOfFrames),
+    _frameBuffers(),
+    _frameList(),
+    _lastDecodedState(),
+    _packetsNotDecodable(0),
+    _receiveStatistics(),
+    _incomingFrameRate(0),
+    _incomingFrameCount(0),
+    _timeLastIncomingFrameCount(0),
+    _incomingBitCount(0),
+    _incomingBitRate(0),
+    _dropCount(0),
+    _numConsecutiveOldFrames(0),
+    _numConsecutiveOldPackets(0),
+    _discardedPackets(0),
+    _jitterEstimate(vcmId, receiverId),
+    _delayEstimate(_clock->MillisecondTimestamp()),
+    _rttMs(0),
+    _nackMode(kNoNack),
+    _lowRttNackThresholdMs(-1),
+    _highRttNackThresholdMs(-1),
+    _NACKSeqNum(),
+    _NACKSeqNumLength(0),
+    _waitingForKeyFrame(false),
+    _firstPacket(true)
+{
+    memset(_frameBuffers, 0, sizeof(_frameBuffers));
+    memset(_receiveStatistics, 0, sizeof(_receiveStatistics));
+    memset(_NACKSeqNumInternal, -1, sizeof(_NACKSeqNumInternal));
+
+    for (int i = 0; i< kStartNumberOfFrames; i++)
+    {
+        _frameBuffers[i] = new VCMFrameBuffer();
+    }
+}
+
+// Destructor
+VCMJitterBuffer::~VCMJitterBuffer()
+{
+    Stop();
+    for (int i = 0; i< kMaxNumberOfFrames; i++)
+    {
+        if (_frameBuffers[i])
+        {
+            delete _frameBuffers[i];
+        }
+    }
+    delete _critSect;
+}
+
+void
+VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs)
+{
+    if (this != &rhs)
+    {
+        _critSect->Enter();
+        rhs._critSect->Enter();
+        _vcmId = rhs._vcmId;
+        _receiverId = rhs._receiverId;
+        _running = rhs._running;
+        _master = !rhs._master;
+        _maxNumberOfFrames = rhs._maxNumberOfFrames;
+        _incomingFrameRate = rhs._incomingFrameRate;
+        _incomingFrameCount = rhs._incomingFrameCount;
+        _timeLastIncomingFrameCount = rhs._timeLastIncomingFrameCount;
+        _incomingBitCount = rhs._incomingBitCount;
+        _incomingBitRate = rhs._incomingBitRate;
+        _dropCount = rhs._dropCount;
+        _numConsecutiveOldFrames = rhs._numConsecutiveOldFrames;
+        _numConsecutiveOldPackets = rhs._numConsecutiveOldPackets;
+        _discardedPackets = rhs._discardedPackets;
+        _jitterEstimate = rhs._jitterEstimate;
+        _delayEstimate = rhs._delayEstimate;
+        _waitingForCompletion = rhs._waitingForCompletion;
+        _rttMs = rhs._rttMs;
+        _NACKSeqNumLength = rhs._NACKSeqNumLength;
+        _waitingForKeyFrame = rhs._waitingForKeyFrame;
+        _firstPacket = rhs._firstPacket;
+        _lastDecodedState =  rhs._lastDecodedState;
+        _packetsNotDecodable = rhs._packetsNotDecodable;
+        memcpy(_receiveStatistics, rhs._receiveStatistics,
+               sizeof(_receiveStatistics));
+        memcpy(_NACKSeqNumInternal, rhs._NACKSeqNumInternal,
+               sizeof(_NACKSeqNumInternal));
+        memcpy(_NACKSeqNum, rhs._NACKSeqNum, sizeof(_NACKSeqNum));
+        for (int i = 0; i < kMaxNumberOfFrames; i++)
+        {
+            if (_frameBuffers[i] != NULL)
+            {
+                delete _frameBuffers[i];
+                _frameBuffers[i] = NULL;
+            }
+        }
+        _frameList.clear();
+        for (int i = 0; i < _maxNumberOfFrames; i++)
+        {
+            _frameBuffers[i] = new VCMFrameBuffer(*(rhs._frameBuffers[i]));
+            if (_frameBuffers[i]->Length() > 0)
+            {
+                FrameList::reverse_iterator rit = std::find_if(
+                    _frameList.rbegin(), _frameList.rend(),
+                    FrameSmallerTimestamp(_frameBuffers[i]->TimeStamp()));
+                _frameList.insert(rit.base(), _frameBuffers[i]);
+            }
+        }
+        rhs._critSect->Leave();
+        _critSect->Leave();
+    }
+}
+
+// Start jitter buffer
+void
+VCMJitterBuffer::Start()
+{
+    CriticalSectionScoped cs(_critSect);
+    _running = true;
+    _incomingFrameCount = 0;
+    _incomingFrameRate = 0;
+    _incomingBitCount = 0;
+    _incomingBitRate = 0;
+    _timeLastIncomingFrameCount = _clock->MillisecondTimestamp();
+    memset(_receiveStatistics, 0, sizeof(_receiveStatistics));
+
+    _numConsecutiveOldFrames = 0;
+    _numConsecutiveOldPackets = 0;
+    _discardedPackets = 0;
+
+    _frameEvent.Reset(); // start in a non-signaled state
+    _packetEvent.Reset(); // start in a non-signaled state
+    _waitingForCompletion.frameSize = 0;
+    _waitingForCompletion.timestamp = 0;
+    _waitingForCompletion.latestPacketTime = -1;
+    _firstPacket = true;
+    _NACKSeqNumLength = 0;
+    _waitingForKeyFrame = false;
+    _rttMs = 0;
+    _packetsNotDecodable = 0;
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: start", this);
+}
+
+
+// Stop jitter buffer
+void
+VCMJitterBuffer::Stop()
+{
+    _critSect->Enter();
+    _running = false;
+    _lastDecodedState.Reset();
+    _frameList.clear();
+    for (int i = 0; i < kMaxNumberOfFrames; i++)
+    {
+        if (_frameBuffers[i] != NULL)
+        {
+            static_cast<VCMFrameBuffer*>(_frameBuffers[i])->SetState(kStateFree);
+        }
+    }
+
+    _critSect->Leave();
+    _frameEvent.Set(); // Make sure we exit from trying to get a frame to decoder
+    _packetEvent.Set(); // Make sure we exit from trying to get a sequence number
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: stop", this);
+}
+
+bool
+VCMJitterBuffer::Running() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _running;
+}
+
+// Flush jitter buffer
+void
+VCMJitterBuffer::Flush()
+{
+    CriticalSectionScoped cs(_critSect);
+    FlushInternal();
+}
+
+// Must be called under the critical section _critSect
+void
+VCMJitterBuffer::FlushInternal()
+{
+    // Erase all frames from the sorted list and set their state to free.
+    _frameList.clear();
+    for (WebRtc_Word32 i = 0; i < _maxNumberOfFrames; i++)
+    {
+        ReleaseFrameInternal(_frameBuffers[i]);
+    }
+    _lastDecodedState.Reset(); // TODO (mikhal): sync reset
+    _packetsNotDecodable = 0;
+
+    _frameEvent.Reset();
+    _packetEvent.Reset();
+
+    _numConsecutiveOldFrames = 0;
+    _numConsecutiveOldPackets = 0;
+
+    // Also reset the jitter and delay estimates
+    _jitterEstimate.Reset();
+    _delayEstimate.Reset(_clock->MillisecondTimestamp());
+
+    _waitingForCompletion.frameSize = 0;
+    _waitingForCompletion.timestamp = 0;
+    _waitingForCompletion.latestPacketTime = -1;
+
+    _firstPacket = true;
+
+    _NACKSeqNumLength = 0;
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId,
+                 _receiverId), "JB(0x%x): Jitter buffer: flush", this);
+}
+
+// Set the frame state to free and remove it from the sorted
+// frame list. Must be called from inside the critical section _critSect.
+void
+VCMJitterBuffer::ReleaseFrameInternal(VCMFrameBuffer* frame)
+{
+    if (frame != NULL && frame->GetState() != kStateDecoding)
+    {
+        frame->SetState(kStateFree);
+    }
+}
+
+// Update frame state (set as complete if conditions are met)
+// Doing it here increases the degree of freedom for e.g. future
+// reconstructability of separate layers. Must be called under the
+// critical section _critSect.
+VCMFrameBufferEnum
+VCMJitterBuffer::UpdateFrameState(VCMFrameBuffer* frame)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "JB(0x%x) FB(0x%x): "
+                         "UpdateFrameState NULL frame pointer", this, frame);
+        return kNoError;
+    }
+
+    int length = frame->Length();
+    if (_master)
+    {
+        // Only trace the primary jitter buffer to make it possible to parse
+        // and plot the trace file.
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "JB(0x%x) FB(0x%x): Complete frame added to jitter buffer,"
+                     " size:%d type %d",
+                     this, frame,length,frame->FrameType());
+    }
+
+    if (length != 0 && !frame->GetCountedFrame())
+    {
+        // ignore Ack frames
+        _incomingFrameCount++;
+        frame->SetCountedFrame(true);
+    }
+
+    // Check if we should drop frame
+    // an old complete frame can arrive too late
+    if (_lastDecodedState.IsOldFrame(frame))
+    {
+        // Frame is older than the latest decoded frame, drop it. Will be
+        // released by CleanUpOldFrames later.
+        frame->Reset();
+        frame->SetState(kStateEmpty);
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "JB(0x%x) FB(0x%x): Dropping old frame in Jitter buffer",
+                     this, frame);
+        _dropCount++;
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Jitter buffer drop count: %d, consecutive drops: %u",
+                     _dropCount, _numConsecutiveOldFrames);
+        // Flush() if this happens consistently.
+        _numConsecutiveOldFrames++;
+        if (_numConsecutiveOldFrames > kMaxConsecutiveOldFrames) {
+          FlushInternal();
+          return kFlushIndicator;
+        }
+        return kNoError;
+    }
+    _numConsecutiveOldFrames = 0;
+    frame->SetState(kStateComplete);
+
+
+    // Update receive statistics. We count all layers, thus when you use layers
+    // adding all key and delta frames might differ from frame count
+    if (frame->IsSessionComplete())
+    {
+        switch (frame->FrameType())
+        {
+        case kVideoFrameKey:
+            {
+                _receiveStatistics[0]++;
+                break;
+            }
+        case kVideoFrameDelta:
+            {
+                _receiveStatistics[1]++;
+                break;
+            }
+        case kVideoFrameGolden:
+            {
+                _receiveStatistics[2]++;
+                break;
+            }
+        case kVideoFrameAltRef:
+            {
+                _receiveStatistics[3]++;
+                break;
+            }
+        default:
+            assert(false);
+
+        }
+    }
+    const FrameList::iterator it = FindOldestCompleteContinuousFrame(false);
+    VCMFrameBuffer* oldFrame = NULL;
+    if (it != _frameList.end())
+    {
+        oldFrame = *it;
+    }
+
+    // Only signal if this is the oldest frame.
+    // Not necessary the case due to packet reordering or NACK.
+    if (!WaitForNack() || (oldFrame != NULL && oldFrame == frame))
+    {
+        _frameEvent.Set();
+    }
+    return kNoError;
+}
+
+// Get received key and delta frames
+WebRtc_Word32
+VCMJitterBuffer::GetFrameStatistics(WebRtc_UWord32& receivedDeltaFrames,
+                                    WebRtc_UWord32& receivedKeyFrames) const
+{
+    {
+        CriticalSectionScoped cs(_critSect);
+        receivedDeltaFrames = _receiveStatistics[1] + _receiveStatistics[3];
+        receivedKeyFrames = _receiveStatistics[0] + _receiveStatistics[2];
+    }
+    return 0;
+}
+
+WebRtc_UWord32 VCMJitterBuffer::NumNotDecodablePackets() const {
+  CriticalSectionScoped cs(_critSect);
+  return _packetsNotDecodable;
+}
+
+WebRtc_UWord32 VCMJitterBuffer::DiscardedPackets() const {
+  CriticalSectionScoped cs(_critSect);
+  return _discardedPackets;
+}
+
+// Gets frame to use for this timestamp. If no match, get empty frame.
+WebRtc_Word32
+VCMJitterBuffer::GetFrame(const VCMPacket& packet, VCMEncodedFrame*& frame)
+{
+    if (!_running) // don't accept incoming packets until we are started
+    {
+        return VCM_UNINITIALIZED;
+    }
+
+    _critSect->Enter();
+    // Does this packet belong to an old frame?
+    if (_lastDecodedState.IsOldPacket(&packet))
+    {
+        // Account only for media packets
+        if (packet.sizeBytes > 0)
+        {
+            _discardedPackets++;
+            _numConsecutiveOldPackets++;
+        }
+        // Update last decoded sequence number if the packet arrived late and
+        // belongs to a frame with a timestamp equal to the last decoded
+        // timestamp.
+        _lastDecodedState.UpdateOldPacket(&packet);
+
+        if (_numConsecutiveOldPackets > kMaxConsecutiveOldPackets)
+        {
+            FlushInternal();
+            _critSect->Leave();
+            return VCM_FLUSH_INDICATOR;
+        }
+        _critSect->Leave();
+        return VCM_OLD_PACKET_ERROR;
+    }
+    _numConsecutiveOldPackets = 0;
+
+    FrameList::iterator it = std::find_if(
+        _frameList.begin(),
+        _frameList.end(),
+        FrameEqualTimestamp(packet.timestamp));
+
+    if (it != _frameList.end()) {
+      frame = *it;
+      _critSect->Leave();
+      return VCM_OK;
+    }
+
+    _critSect->Leave();
+
+    // No match, return empty frame
+    frame = GetEmptyFrame();
+    if (frame != NULL)
+    {
+        return VCM_OK;
+    }
+    // No free frame! Try to reclaim some...
+    _critSect->Enter();
+    RecycleFramesUntilKeyFrame();
+    _critSect->Leave();
+
+    frame = GetEmptyFrame();
+    if (frame != NULL)
+    {
+        return VCM_OK;
+    }
+    return VCM_JITTER_BUFFER_ERROR;
+}
+
+// Deprecated! Kept for testing purposes.
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrame(const VCMPacket& packet)
+{
+    VCMEncodedFrame* frame = NULL;
+    if (GetFrame(packet, frame) < 0)
+    {
+        return NULL;
+    }
+    return frame;
+}
+
+// Get empty frame, creates new (i.e. increases JB size) if necessary
+VCMFrameBuffer*
+VCMJitterBuffer::GetEmptyFrame()
+{
+    if (!_running) // don't accept incoming packets until we are started
+    {
+        return NULL;
+    }
+
+    _critSect->Enter();
+
+    for (int i = 0; i <_maxNumberOfFrames; ++i)
+    {
+        if (kStateFree == _frameBuffers[i]->GetState())
+        {
+            // found a free buffer
+            _frameBuffers[i]->SetState(kStateEmpty);
+            _critSect->Leave();
+            return _frameBuffers[i];
+        }
+    }
+
+    // Check if we can increase JB size
+    if (_maxNumberOfFrames < kMaxNumberOfFrames)
+    {
+        VCMFrameBuffer* ptrNewBuffer = new VCMFrameBuffer();
+        ptrNewBuffer->SetState(kStateEmpty);
+        _frameBuffers[_maxNumberOfFrames] = ptrNewBuffer;
+        _maxNumberOfFrames++;
+
+        _critSect->Leave();
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+        VCMId(_vcmId, _receiverId), "JB(0x%x) FB(0x%x): Jitter buffer "
+        "increased to:%d frames", this, ptrNewBuffer, _maxNumberOfFrames);
+        return ptrNewBuffer;
+    }
+    _critSect->Leave();
+
+    // We have reached max size, cannot increase JB size
+    return NULL;
+}
+
+
+// Find oldest complete frame used for getting next frame to decode
+// Must be called under critical section
+FrameList::iterator
+VCMJitterBuffer::FindOldestCompleteContinuousFrame(bool enable_decodable) {
+  // If we have more than one frame done since last time, pick oldest.
+  VCMFrameBuffer* oldest_frame = NULL;
+  FrameList::iterator it = _frameList.begin();
+
+  // When temporal layers are available, we search for a complete or decodable
+  // frame until we hit one of the following:
+  // 1. Continuous base or sync layer.
+  // 2. The end of the list was reached.
+  for (; it != _frameList.end(); ++it)  {
+    oldest_frame = *it;
+    VCMFrameBufferStateEnum state = oldest_frame->GetState();
+    // Is this frame complete or decodable and continuous?
+    if ((state == kStateComplete ||
+        (enable_decodable && state == kStateDecodable)) &&
+        _lastDecodedState.ContinuousFrame(oldest_frame)) {
+      break;
+    } else {
+      int temporal_id = oldest_frame->TemporalId();
+      oldest_frame = NULL;
+      if (temporal_id <= 0) {
+        // When temporal layers are disabled or we have hit a base layer
+        // we break (regardless of continuity and completeness).
+        break;
+      }
+    }
+  }
+
+  if (oldest_frame == NULL) {
+    // No complete frame no point to continue.
+    return _frameList.end();
+  } else  if (_waitingForKeyFrame &&
+              oldest_frame->FrameType() != kVideoFrameKey) {
+    // We are waiting for a key frame.
+    return _frameList.end();
+  }
+
+  // We have a complete continuous frame.
+  return it;
+}
+
+// Call from inside the critical section _critSect
+void
+VCMJitterBuffer::RecycleFrame(VCMFrameBuffer* frame)
+{
+    if (frame == NULL)
+    {
+        return;
+    }
+
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                 VCMId(_vcmId, _receiverId),
+                 "JB(0x%x) FB(0x%x): RecycleFrame, size:%d",
+                 this, frame, frame->Length());
+
+    ReleaseFrameInternal(frame);
+}
+
+// Calculate frame and bit rates
+WebRtc_Word32
+VCMJitterBuffer::GetUpdate(WebRtc_UWord32& frameRate, WebRtc_UWord32& bitRate)
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    WebRtc_Word64 diff = now - _timeLastIncomingFrameCount;
+    if (diff < 1000 && _incomingFrameRate > 0 && _incomingBitRate > 0)
+    {
+        // Make sure we report something even though less than
+        // 1 second has passed since last update.
+        frameRate = _incomingFrameRate;
+        bitRate = _incomingBitRate;
+    }
+    else if (_incomingFrameCount != 0)
+    {
+        // We have received frame(s) since last call to this function
+
+        // Prepare calculations
+        if (diff <= 0)
+        {
+            diff = 1;
+        }
+        // we add 0.5f for rounding
+        float rate = 0.5f + ((_incomingFrameCount * 1000.0f) / diff);
+        if (rate < 1.0f) // don't go below 1, can crash
+        {
+            rate = 1.0f;
+        }
+
+        // Calculate frame rate
+        // Let r be rate.
+        // r(0) = 1000*framecount/delta_time.
+        // (I.e. frames per second since last calculation.)
+        // frameRate = r(0)/2 + r(-1)/2
+        // (I.e. fr/s average this and the previous calculation.)
+        frameRate = (_incomingFrameRate + (WebRtc_Word32)rate) >> 1;
+        _incomingFrameRate = (WebRtc_UWord8)rate;
+
+        // Calculate bit rate
+        if (_incomingBitCount == 0)
+        {
+            bitRate = 0;
+        }
+        else
+        {
+            bitRate = 10 * ((100 * _incomingBitCount) /
+                      static_cast<WebRtc_UWord32>(diff));
+        }
+        _incomingBitRate = bitRate;
+
+        // Reset count
+        _incomingFrameCount = 0;
+        _incomingBitCount = 0;
+        _timeLastIncomingFrameCount = now;
+
+    }
+    else
+    {
+        // No frames since last call
+        _timeLastIncomingFrameCount = _clock->MillisecondTimestamp();
+        frameRate = 0;
+        bitRate = 0;
+        _incomingBitRate = 0;
+    }
+
+    return 0;
+}
+
+// Returns immediately or a X ms event hang waiting for a complete frame,
+// X decided by caller
+VCMEncodedFrame*
+VCMJitterBuffer::GetCompleteFrameForDecoding(WebRtc_UWord32 maxWaitTimeMS)
+{
+    if (!_running)
+    {
+        return NULL;
+    }
+
+    _critSect->Enter();
+
+    CleanUpOldFrames();
+
+    if (_lastDecodedState.init() && WaitForNack()) {
+      _waitingForKeyFrame = true;
+    }
+
+    FrameList::iterator it = FindOldestCompleteContinuousFrame(false);
+    if (it == _frameList.end())
+    {
+        if (maxWaitTimeMS == 0)
+        {
+            _critSect->Leave();
+            return NULL;
+        }
+        const WebRtc_Word64 endWaitTimeMs = _clock->MillisecondTimestamp()
+                                            + maxWaitTimeMS;
+        WebRtc_Word64 waitTimeMs = maxWaitTimeMS;
+        while (waitTimeMs > 0)
+        {
+            _critSect->Leave();
+            const EventTypeWrapper ret =
+                  _frameEvent.Wait(static_cast<WebRtc_UWord32>(waitTimeMs));
+            _critSect->Enter();
+            if (ret == kEventSignaled)
+            {
+                // are we closing down the Jitter buffer
+                if (!_running)
+                {
+                    _critSect->Leave();
+                    return NULL;
+                }
+
+                // Finding oldest frame ready for decoder, but check
+                // sequence number and size
+                CleanUpOldFrames();
+                it = FindOldestCompleteContinuousFrame(false);
+                if (it == _frameList.end())
+                {
+                    waitTimeMs = endWaitTimeMs -
+                                 _clock->MillisecondTimestamp();
+                }
+                else
+                {
+                    break;
+                }
+            }
+            else
+            {
+                _critSect->Leave();
+                return NULL;
+            }
+        }
+        // Inside critSect
+    }
+    else
+    {
+        // we already have a frame reset the event
+        _frameEvent.Reset();
+    }
+
+    if (it == _frameList.end())
+    {
+        // Even after signaling we're still missing a complete continuous frame
+        _critSect->Leave();
+        return NULL;
+    }
+
+    VCMFrameBuffer* oldestFrame = *it;
+    it = _frameList.erase(it);
+
+    // Update jitter estimate
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        UpdateJitterAndDelayEstimates(*oldestFrame, false);
+    }
+
+    oldestFrame->SetState(kStateDecoding);
+
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    _critSect->Leave();
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+WebRtc_UWord32
+VCMJitterBuffer::GetEstimatedJitterMS()
+{
+    CriticalSectionScoped cs(_critSect);
+    return GetEstimatedJitterMsInternal();
+}
+
+WebRtc_UWord32
+VCMJitterBuffer::GetEstimatedJitterMsInternal()
+{
+    WebRtc_UWord32 estimate = VCMJitterEstimator::OPERATING_SYSTEM_JITTER;
+
+    // Compute RTT multiplier for estimation
+    // _lowRttNackThresholdMs == -1 means no FEC.
+    double rttMult = 1.0f;
+    if (_nackMode == kNackHybrid && (_lowRttNackThresholdMs >= 0 &&
+        static_cast<int>(_rttMs) > _lowRttNackThresholdMs))
+    {
+        // from here we count on FEC
+        rttMult = 0.0f;
+    }
+    estimate += static_cast<WebRtc_UWord32>
+                (_jitterEstimate.GetJitterEstimate(rttMult) + 0.5);
+    return estimate;
+}
+
+void
+VCMJitterBuffer::UpdateRtt(WebRtc_UWord32 rttMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _rttMs = rttMs;
+    _jitterEstimate.UpdateRtt(rttMs);
+}
+
+// wait for the first packet in the next frame to arrive
+WebRtc_Word64
+VCMJitterBuffer::GetNextTimeStamp(WebRtc_UWord32 maxWaitTimeMS,
+                                  FrameType& incomingFrameType,
+                                  WebRtc_Word64& renderTimeMs)
+{
+    if (!_running)
+    {
+        return -1;
+    }
+
+    _critSect->Enter();
+
+    // Finding oldest frame ready for decoder, check sequence number and size
+    CleanUpOldFrames();
+
+    FrameList::iterator it = _frameList.begin();
+
+    if (it == _frameList.end())
+    {
+        _packetEvent.Reset();
+        _critSect->Leave();
+
+        if (_packetEvent.Wait(maxWaitTimeMS) == kEventSignaled)
+        {
+            // are we closing down the Jitter buffer
+            if (!_running)
+            {
+                return -1;
+            }
+            _critSect->Enter();
+
+            CleanUpOldFrames();
+            it = _frameList.begin();
+        }
+        else
+        {
+            _critSect->Enter();
+        }
+    }
+
+    if (it == _frameList.end())
+    {
+        _critSect->Leave();
+        return -1;
+    }
+    // we have a frame
+
+    // return frame type
+    // All layers are assumed to have the same type
+    incomingFrameType = (*it)->FrameType();
+
+    renderTimeMs = (*it)->RenderTimeMs();
+
+    const WebRtc_UWord32 timestamp = (*it)->TimeStamp();
+
+    _critSect->Leave();
+
+    // return current time
+    return timestamp;
+}
+
+// Answers the question:
+// Will the packet sequence be complete if the next frame is grabbed for
+// decoding right now? That is, have we lost a frame between the last decoded
+// frame and the next, or is the next
+// frame missing one or more packets?
+bool
+VCMJitterBuffer::CompleteSequenceWithNextFrame()
+{
+    CriticalSectionScoped cs(_critSect);
+    // Finding oldest frame ready for decoder, check sequence number and size
+    CleanUpOldFrames();
+
+    if (_frameList.empty())
+      return true;
+
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    if (_frameList.size() <= 1 &&
+        oldestFrame->GetState() != kStateComplete)
+    {
+        // Frame not ready to be decoded.
+        return true;
+    }
+    if (!oldestFrame->Complete())
+    {
+        return false;
+    }
+
+    // See if we have lost a frame before this one.
+    if (_lastDecodedState.init())
+    {
+        // Following start, reset or flush -> check for key frame.
+        if (oldestFrame->FrameType() != kVideoFrameKey)
+        {
+            return false;
+        }
+    }
+    else if (oldestFrame->GetLowSeqNum() == -1)
+    {
+        return false;
+    }
+    else if (!_lastDecodedState.ContinuousFrame(oldestFrame))
+    {
+        return false;
+    }
+    return true;
+}
+
+// Returns immediately
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrameForDecoding()
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_running)
+    {
+        return NULL;
+    }
+
+    if (WaitForNack())
+    {
+        return GetFrameForDecodingNACK();
+    }
+
+    CleanUpOldFrames();
+
+    if (_frameList.empty()) {
+      return NULL;
+    }
+
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    if (_frameList.size() <= 1 &&
+        oldestFrame->GetState() != kStateComplete) {
+      return NULL;
+    }
+
+    // Incomplete frame pulled out from jitter buffer,
+    // update the jitter estimate with what we currently know.
+    // This frame shouldn't have been retransmitted, but if we recently
+    // turned off NACK this might still happen.
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        // Update with the previous incomplete frame first
+        if (_waitingForCompletion.latestPacketTime >= 0)
+        {
+            UpdateJitterAndDelayEstimates(_waitingForCompletion, true);
+        }
+        // Then wait for this one to get complete
+        _waitingForCompletion.frameSize = oldestFrame->Length();
+        _waitingForCompletion.latestPacketTime =
+                              oldestFrame->LatestPacketTimeMs();
+        _waitingForCompletion.timestamp = oldestFrame->TimeStamp();
+    }
+    _frameList.erase(_frameList.begin());
+
+    // Look for previous frame loss
+    VerifyAndSetPreviousFrameLost(*oldestFrame);
+
+    // The state must be changed to decoding before cleaning up zero sized
+    // frames to avoid empty frames being cleaned up and then given to the
+    // decoder.
+    // Set as decoding. Propagates the missingFrame bit.
+    oldestFrame->SetState(kStateDecoding);
+
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    _packetsNotDecodable += oldestFrame->NotDecodablePackets();
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+VCMEncodedFrame*
+VCMJitterBuffer::GetFrameForDecodingNACK()
+{
+    // when we use NACK we don't release non complete frames
+    // unless we have a complete key frame.
+    // In hybrid mode, we may release decodable frames (non-complete)
+
+    // Clean up old frames and empty frames
+    CleanUpOldFrames();
+
+    // First look for a complete _continuous_ frame.
+    // When waiting for nack, wait for a key frame, if a continuous frame cannot
+    // be determined (i.e. initial decoding state).
+    if (_lastDecodedState.init()) {
+      _waitingForKeyFrame = true;
+    }
+
+    // Allow for a decodable frame when in Hybrid mode.
+    bool enableDecodable = _nackMode == kNackHybrid ? true : false;
+    FrameList::iterator it = FindOldestCompleteContinuousFrame(enableDecodable);
+    if (it == _frameList.end())
+    {
+        // If we didn't find one we're good with a complete key/decodable frame.
+        it = find_if(_frameList.begin(), _frameList.end(),
+                     CompleteDecodableKeyFrameCriteria());
+        if (it == _frameList.end())
+        {
+            return NULL;
+        }
+    }
+    VCMFrameBuffer* oldestFrame = *it;
+    // Update jitter estimate
+    const bool retransmitted = (oldestFrame->GetNackCount() > 0);
+    if (retransmitted)
+    {
+        _jitterEstimate.FrameNacked();
+    }
+    else if (oldestFrame->Length() > 0)
+    {
+        // Ignore retransmitted and empty frames.
+        UpdateJitterAndDelayEstimates(*oldestFrame, false);
+    }
+    it = _frameList.erase(it);
+
+    // Look for previous frame loss
+    VerifyAndSetPreviousFrameLost(*oldestFrame);
+
+    // The state must be changed to decoding before cleaning up zero sized
+    // frames to avoid empty frames being cleaned up and then given to the
+    // decoder.
+    oldestFrame->SetState(kStateDecoding);
+
+    // Clean up old frames and empty frames
+    CleanUpOldFrames();
+
+    if (oldestFrame->FrameType() == kVideoFrameKey)
+    {
+        _waitingForKeyFrame = false;
+    }
+
+    // We have a frame - update decoded state with frame info.
+    _lastDecodedState.SetState(oldestFrame);
+
+    return oldestFrame;
+}
+
+// Must be called under the critical section _critSect. Should never be called
+// with retransmitted frames, they must be filtered out before this function is
+// called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(VCMJitterSample& sample,
+                                               bool incompleteFrame)
+{
+    if (sample.latestPacketTime == -1)
+    {
+        return;
+    }
+    if (incompleteFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "Received incomplete frame "
+                     "timestamp %u frame size %u at time %u",
+                     sample.timestamp, sample.frameSize,
+                     MaskWord64ToUWord32(sample.latestPacketTime));
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId), "Received complete frame "
+                     "timestamp %u frame size %u at time %u",
+                     sample.timestamp, sample.frameSize,
+                     MaskWord64ToUWord32(sample.latestPacketTime));
+    }
+    UpdateJitterAndDelayEstimates(sample.latestPacketTime,
+                                  sample.timestamp,
+                                  sample.frameSize,
+                                  incompleteFrame);
+}
+
+// Must be called under the critical section _critSect. Should never be
+// called with retransmitted frames, they must be filtered out before this
+// function is called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(VCMFrameBuffer& frame,
+                                               bool incompleteFrame)
+{
+    if (frame.LatestPacketTimeMs() == -1)
+    {
+        return;
+    }
+    // No retransmitted frames should be a part of the jitter
+    // estimate.
+    if (incompleteFrame)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                   "Received incomplete frame timestamp %u frame type %d "
+                   "frame size %u at time %u, jitter estimate was %u",
+                   frame.TimeStamp(), frame.FrameType(), frame.Length(),
+                   MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
+                   GetEstimatedJitterMsInternal());
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),"Received complete frame "
+                     "timestamp %u frame type %d frame size %u at time %u, "
+                     "jitter estimate was %u",
+                     frame.TimeStamp(), frame.FrameType(), frame.Length(),
+                     MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
+                     GetEstimatedJitterMsInternal());
+    }
+    UpdateJitterAndDelayEstimates(frame.LatestPacketTimeMs(), frame.TimeStamp(),
+                                  frame.Length(), incompleteFrame);
+}
+
+// Must be called under the critical section _critSect. Should never be called
+// with retransmitted frames, they must be filtered out before this function
+// is called.
+void
+VCMJitterBuffer::UpdateJitterAndDelayEstimates(WebRtc_Word64 latestPacketTimeMs,
+                                               WebRtc_UWord32 timestamp,
+                                               WebRtc_UWord32 frameSize,
+                                               bool incompleteFrame)
+{
+    if (latestPacketTimeMs == -1)
+    {
+        return;
+    }
+    WebRtc_Word64 frameDelay;
+    // Calculate the delay estimate
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                 VCMId(_vcmId, _receiverId),
+                 "Packet received and sent to jitter estimate with: "
+                 "timestamp=%u wallClock=%u", timestamp,
+                 MaskWord64ToUWord32(latestPacketTimeMs));
+    bool notReordered = _delayEstimate.CalculateDelay(timestamp,
+                                                      &frameDelay,
+                                                      latestPacketTimeMs);
+    // Filter out frames which have been reordered in time by the network
+    if (notReordered)
+    {
+        // Update the jitter estimate with the new samples
+        _jitterEstimate.UpdateEstimate(frameDelay, frameSize, incompleteFrame);
+    }
+}
+
+WebRtc_UWord16*
+VCMJitterBuffer::GetNackList(WebRtc_UWord16& nackSize,bool& listExtended)
+{
+    return CreateNackList(nackSize,listExtended);
+}
+
+// Assume called internally with critsect
+WebRtc_Word32
+VCMJitterBuffer::GetLowHighSequenceNumbers(WebRtc_Word32& lowSeqNum,
+                                           WebRtc_Word32& highSeqNum) const
+{
+    // TODO (mikhal/stefan): refactor to use lastDecodedState
+    WebRtc_Word32 i = 0;
+    WebRtc_Word32 seqNum = -1;
+
+    highSeqNum = -1;
+    lowSeqNum = -1;
+    if (!_lastDecodedState.init())
+      lowSeqNum = _lastDecodedState.sequence_num();
+
+    // find highest seq numbers
+    for (i = 0; i < _maxNumberOfFrames; ++i)
+    {
+        seqNum = _frameBuffers[i]->GetHighSeqNum();
+
+        // Ignore free / empty frames
+        VCMFrameBufferStateEnum state = _frameBuffers[i]->GetState();
+
+        if ((kStateFree != state) &&
+            (kStateEmpty != state) &&
+            (kStateDecoding != state) &&
+             seqNum != -1)
+        {
+            bool wrap;
+            highSeqNum = LatestSequenceNumber(seqNum, highSeqNum, &wrap);
+        }
+    } // for
+    return 0;
+}
+
+
+WebRtc_UWord16*
+VCMJitterBuffer::CreateNackList(WebRtc_UWord16& nackSize, bool& listExtended)
+{
+    // TODO (mikhal/stefan): Refactor to use lastDecodedState.
+    CriticalSectionScoped cs(_critSect);
+    int i = 0;
+    WebRtc_Word32 lowSeqNum = -1;
+    WebRtc_Word32 highSeqNum = -1;
+    listExtended = false;
+
+    // Don't create list, if we won't wait for it
+    if (!WaitForNack())
+    {
+        nackSize = 0;
+        return NULL;
+    }
+
+    // Find the lowest (last decoded) sequence number and
+    // the highest (highest sequence number of the newest frame)
+    // sequence number. The nack list is a subset of the range
+    // between those two numbers.
+    GetLowHighSequenceNumbers(lowSeqNum, highSeqNum);
+
+    // write a list of all seq num we have
+    if (lowSeqNum == -1 || highSeqNum == -1)
+    {
+        // This happens if we lose the first packet, nothing is popped
+        if (highSeqNum == -1)
+        {
+            // we have not received any packets yet
+            nackSize = 0;
+        }
+        else
+        {
+            // signal that we want a key frame request to be sent
+            nackSize = 0xffff;
+        }
+        return NULL;
+    }
+
+    int numberOfSeqNum = 0;
+    if (lowSeqNum > highSeqNum)
+    {
+        if (lowSeqNum - highSeqNum > 0x00ff)
+        {
+            // wrap
+            numberOfSeqNum = (0xffff-lowSeqNum) + highSeqNum + 1;
+        }
+    }
+    else
+    {
+        numberOfSeqNum = highSeqNum - lowSeqNum;
+    }
+
+    if (numberOfSeqNum > kNackHistoryLength)
+    {
+        // Nack list is too big, flush and try to restart.
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Nack list too large, try to find a key frame and restart "
+                     "from seq: %d. Lowest seq in jb %d", highSeqNum,lowSeqNum);
+
+        // This nack size will trigger a key request...
+        bool foundKeyFrame = false;
+
+        while (numberOfSeqNum > kNackHistoryLength)
+        {
+            foundKeyFrame = RecycleFramesUntilKeyFrame();
+
+            if (!foundKeyFrame)
+            {
+                break;
+            }
+
+            // Check if we still have too many packets in JB
+            lowSeqNum = -1;
+            highSeqNum = -1;
+            GetLowHighSequenceNumbers(lowSeqNum, highSeqNum);
+
+            if (highSeqNum == -1)
+            {
+                assert(lowSeqNum != -1); // This should never happen
+                // We can't calculate the nack list length...
+                return NULL;
+            }
+
+            numberOfSeqNum = 0;
+            if (lowSeqNum > highSeqNum)
+            {
+                if (lowSeqNum - highSeqNum > 0x00ff)
+                {
+                    // wrap
+                    numberOfSeqNum = (0xffff-lowSeqNum) + highSeqNum + 1;
+                    highSeqNum=lowSeqNum;
+                }
+            }
+            else
+            {
+                numberOfSeqNum = highSeqNum - lowSeqNum;
+            }
+
+        } // end while
+
+        if (!foundKeyFrame)
+        {
+            // No key frame in JB.
+
+            // Set the last decoded sequence number to current high.
+            // This is to not get a large nack list again right away
+            _lastDecodedState.SetSeqNum(static_cast<uint16_t>(highSeqNum));
+            // Set to trigger key frame signal
+            nackSize = 0xffff;
+            listExtended = true;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                    "\tNo key frame found, request one. _lastDecodedSeqNum[0] "
+                    "%d", _lastDecodedState.sequence_num());
+        }
+        else
+        {
+            // We have cleaned up the jb and found a key frame
+            // The function itself has set last decoded seq.
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                    "\tKey frame found. _lastDecodedSeqNum[0] %d",
+                    _lastDecodedState.sequence_num());
+            nackSize = 0;
+        }
+
+        return NULL;
+    }
+
+    WebRtc_UWord16 seqNumberIterator = (WebRtc_UWord16)(lowSeqNum + 1);
+    for (i = 0; i < numberOfSeqNum; i++)
+    {
+        _NACKSeqNumInternal[i] = seqNumberIterator;
+        seqNumberIterator++;
+    }
+
+    // now we have a list of all sequence numbers that could have been sent
+
+    // zero out the ones we have received
+    for (i = 0; i < _maxNumberOfFrames; i++)
+    {
+        // loop all created frames
+        // We don't need to check if frame is decoding since lowSeqNum is based
+        // on _lastDecodedSeqNum
+        // Ignore free frames
+        VCMFrameBufferStateEnum state = _frameBuffers[i]->GetState();
+
+        if ((kStateFree != state) &&
+            (kStateEmpty != state) &&
+            (kStateDecoding != state))
+        {
+            // Reaching thus far means we are going to update the nack list
+            // When in hybrid mode, we use the soft NACKing feature.
+            if (_nackMode == kNackHybrid)
+            {
+                _frameBuffers[i]->BuildSoftNackList(_NACKSeqNumInternal,
+                                                    numberOfSeqNum,
+                                                    _rttMs);
+            }
+            else
+            {
+                // Used when the frame is being processed by the decoding thread
+                // don't need to use that info in this loop.
+                _frameBuffers[i]->BuildHardNackList(_NACKSeqNumInternal,
+                                                    numberOfSeqNum);
+            }
+        }
+    }
+
+    // compress list
+    int emptyIndex = -1;
+    for (i = 0; i < numberOfSeqNum; i++)
+    {
+        if (_NACKSeqNumInternal[i] == -1 || _NACKSeqNumInternal[i] == -2 )
+        {
+            // this is empty
+            if (emptyIndex == -1)
+            {
+                // no empty index before, remember this position
+                emptyIndex = i;
+            }
+        }
+        else
+        {
+            // this is not empty
+            if (emptyIndex == -1)
+            {
+                // no empty index, continue
+            }
+            else
+            {
+                _NACKSeqNumInternal[emptyIndex] = _NACKSeqNumInternal[i];
+                _NACKSeqNumInternal[i] = -1;
+                emptyIndex++;
+            }
+        }
+    } // for
+
+    if (emptyIndex == -1)
+    {
+        // no empty
+        nackSize = numberOfSeqNum;
+    }
+    else
+    {
+        nackSize = emptyIndex;
+    }
+
+    if (nackSize > _NACKSeqNumLength)
+    {
+        // Larger list: nack list was extended since the last call.
+        listExtended = true;
+    }
+
+    for (WebRtc_UWord32 j = 0; j < nackSize; j++)
+    {
+        // Check if the list has been extended since it was last created. I.e,
+        // new items have been added
+        if (_NACKSeqNumLength > j && !listExtended)
+        {
+            WebRtc_UWord32 k = 0;
+            for (k = j; k < _NACKSeqNumLength; k++)
+            {
+                // Found the item in the last list, i.e, no new items found yet.
+                if (_NACKSeqNum[k] == (WebRtc_UWord16)_NACKSeqNumInternal[j])
+                {
+                   break;
+                }
+            }
+            if (k == _NACKSeqNumLength) // New item not found in last list.
+            {
+                listExtended = true;
+            }
+        }
+        else
+        {
+            listExtended = true;
+        }
+        _NACKSeqNum[j] = (WebRtc_UWord16)_NACKSeqNumInternal[j];
+    }
+
+    _NACKSeqNumLength = nackSize;
+
+    return _NACKSeqNum;
+}
+
+// Release frame when done with decoding. Should never be used to release
+// frames from within the jitter buffer.
+void
+VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame)
+{
+    CriticalSectionScoped cs(_critSect);
+    VCMFrameBuffer* frameBuffer = static_cast<VCMFrameBuffer*>(frame);
+    if (frameBuffer != NULL)
+        frameBuffer->SetState(kStateFree);
+}
+
+WebRtc_Word64
+VCMJitterBuffer::LastPacketTime(VCMEncodedFrame* frame,
+                                bool& retransmitted) const
+{
+    CriticalSectionScoped cs(_critSect);
+    retransmitted = (static_cast<VCMFrameBuffer*>(frame)->GetNackCount() > 0);
+    return static_cast<VCMFrameBuffer*>(frame)->LatestPacketTimeMs();
+}
+
+WebRtc_Word64
+VCMJitterBuffer::LastDecodedTimestamp() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _lastDecodedState.time_stamp();
+}
+
+// Insert packet
+// Takes crit sect, and inserts packet in frame buffer, possibly does logging
+VCMFrameBufferEnum
+VCMJitterBuffer::InsertPacket(VCMEncodedFrame* buffer, const VCMPacket& packet)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+    VCMFrameBufferEnum bufferReturn = kSizeError;
+    VCMFrameBufferEnum ret = kSizeError;
+    VCMFrameBuffer* frame = static_cast<VCMFrameBuffer*>(buffer);
+
+    // We are keeping track of the first seq num, the latest seq num and
+    // the number of wraps to be able to calculate how many packets we expect.
+    if (_firstPacket)
+    {
+        // Now it's time to start estimating jitter
+        // reset the delay estimate.
+        _delayEstimate.Reset(_clock->MillisecondTimestamp());
+        _firstPacket = false;
+    }
+
+    // Empty packets may bias the jitter estimate (lacking size component),
+    // therefore don't let empty packet trigger the following updates:
+    if (packet.frameType != kFrameEmpty)
+    {
+        if (_waitingForCompletion.timestamp == packet.timestamp)
+        {
+            // This can get bad if we have a lot of duplicate packets,
+            // we will then count some packet multiple times.
+            _waitingForCompletion.frameSize += packet.sizeBytes;
+            _waitingForCompletion.latestPacketTime = nowMs;
+        }
+        else if (_waitingForCompletion.latestPacketTime >= 0 &&
+                 _waitingForCompletion.latestPacketTime + 2000 <= nowMs)
+        {
+            // A packet should never be more than two seconds late
+            UpdateJitterAndDelayEstimates(_waitingForCompletion, true);
+            _waitingForCompletion.latestPacketTime = -1;
+            _waitingForCompletion.frameSize = 0;
+            _waitingForCompletion.timestamp = 0;
+        }
+    }
+
+    if (frame != NULL)
+    {
+        VCMFrameBufferStateEnum state = frame->GetState();
+        _lastDecodedState.UpdateOldPacket(&packet);
+        // Insert packet
+        // Check for first packet
+        // High sequence number will be -1 if neither an empty packet nor
+        // a media packet has been inserted.
+        bool first = (frame->GetHighSeqNum() == -1);
+        // When in Hybrid mode, we allow for a decodable state
+        // Note: Under current version, a decodable frame will never be
+        // triggered, as the body of the function is empty.
+        // TODO (mikhal): Update when decodable is enabled.
+        bufferReturn = frame->InsertPacket(packet, nowMs,
+                                           _nackMode == kNackHybrid,
+                                           _rttMs);
+        ret = bufferReturn;
+
+        if (bufferReturn > 0)
+        {
+            _incomingBitCount += packet.sizeBytes << 3;
+
+            // Has this packet been nacked or is it about to be nacked?
+            if (IsPacketRetransmitted(packet))
+            {
+                frame->IncrementNackCount();
+            }
+
+            // Insert each frame once on the arrival of the first packet
+            // belonging to that frame (media or empty)
+            if (state == kStateEmpty && first)
+            {
+                ret = kFirstPacket;
+                FrameList::reverse_iterator rit = std::find_if(
+                    _frameList.rbegin(), _frameList.rend(),
+                    FrameSmallerTimestamp(frame->TimeStamp()));
+                _frameList.insert(rit.base(), frame);
+            }
+        }
+    }
+    switch(bufferReturn)
+    {
+    case kStateError:
+    case kTimeStampError:
+    case kSizeError:
+        {
+            if (frame != NULL)
+            {
+                // Will be released when it gets old.
+                frame->Reset();
+                frame->SetState(kStateEmpty);
+            }
+            break;
+        }
+    case kCompleteSession:
+        {
+            // Only update return value for a JB flush indicator.
+            if (UpdateFrameState(frame) == kFlushIndicator)
+              ret = kFlushIndicator;
+            // Signal that we have a received packet
+            _packetEvent.Set();
+            break;
+        }
+    case kDecodableSession:
+    case kIncomplete:
+        {
+          // Signal that we have a received packet
+          _packetEvent.Set();
+          break;
+        }
+    case kNoError:
+    case kDuplicatePacket:
+        {
+            break;
+        }
+    default:
+        {
+            assert(false && "JitterBuffer::InsertPacket: Undefined value");
+        }
+    }
+   return ret;
+}
+
+// Must be called from within _critSect
+void
+VCMJitterBuffer::UpdateOldJitterSample(const VCMPacket& packet)
+{
+    if (_waitingForCompletion.timestamp != packet.timestamp &&
+        LatestTimestamp(_waitingForCompletion.timestamp, packet.timestamp,
+                        NULL) == packet.timestamp)
+    {
+        // This is a newer frame than the one waiting for completion.
+        _waitingForCompletion.frameSize = packet.sizeBytes;
+        _waitingForCompletion.timestamp = packet.timestamp;
+    }
+    else
+    {
+        // This can get bad if we have a lot of duplicate packets,
+        // we will then count some packet multiple times.
+        _waitingForCompletion.frameSize += packet.sizeBytes;
+        _jitterEstimate.UpdateMaxFrameSize(_waitingForCompletion.frameSize);
+    }
+}
+
+// Must be called from within _critSect
+bool
+VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const
+{
+    if (_NACKSeqNumLength > 0)
+    {
+        for (WebRtc_UWord16 i = 0; i < _NACKSeqNumLength; i++)
+        {
+            if (packet.seqNum == _NACKSeqNum[i])
+            {
+                return true;
+            }
+        }
+    }
+    return false;
+}
+
+// Get nack status (enabled/disabled)
+VCMNackMode
+VCMJitterBuffer::GetNackMode() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _nackMode;
+}
+
+// Set NACK mode
+void
+VCMJitterBuffer::SetNackMode(VCMNackMode mode,
+                             int lowRttNackThresholdMs,
+                             int highRttNackThresholdMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _nackMode = mode;
+    assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
+    assert(highRttNackThresholdMs == -1 ||
+           lowRttNackThresholdMs <= highRttNackThresholdMs);
+    assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
+    _lowRttNackThresholdMs = lowRttNackThresholdMs;
+    _highRttNackThresholdMs = highRttNackThresholdMs;
+    if (_nackMode == kNoNack)
+    {
+        _jitterEstimate.ResetNackCount();
+    }
+}
+
+
+// Recycle oldest frames up to a key frame, used if JB is completely full
+bool
+VCMJitterBuffer::RecycleFramesUntilKeyFrame()
+{
+    // Remove up to oldest key frame
+    while (_frameList.size() > 0)
+    {
+        // Throw at least one frame.
+        _dropCount++;
+        FrameList::iterator it = _frameList.begin();
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
+                     VCMId(_vcmId, _receiverId),
+                     "Jitter buffer drop count:%d, lowSeq %d", _dropCount,
+                     (*it)->GetLowSeqNum());
+        RecycleFrame(*it);
+        it = _frameList.erase(it);
+        if (it != _frameList.end() && (*it)->FrameType() == kVideoFrameKey)
+        {
+            // Fake the lastDecodedState to match this key frame.
+            _lastDecodedState.SetStateOneBack(*it);
+            return true;
+        }
+    }
+    _waitingForKeyFrame = true;
+    _lastDecodedState.Reset(); // TODO (mikhal): no sync
+    return false;
+}
+
+// Must be called under the critical section _critSect.
+void VCMJitterBuffer::CleanUpOldFrames() {
+  while (_frameList.size() > 0) {
+    VCMFrameBuffer* oldestFrame = _frameList.front();
+    bool nextFrameEmpty = (_lastDecodedState.ContinuousFrame(oldestFrame) &&
+        oldestFrame->GetState() == kStateEmpty);
+    if (_lastDecodedState.IsOldFrame(oldestFrame) ||
+        (nextFrameEmpty && _frameList.size() > 1)) {
+      ReleaseFrameInternal(_frameList.front());
+      _frameList.erase(_frameList.begin());
+    } else {
+      break;
+    }
+  }
+}
+
+// Used in GetFrameForDecoding
+void VCMJitterBuffer::VerifyAndSetPreviousFrameLost(VCMFrameBuffer& frame) {
+  frame.MakeSessionDecodable();  // Make sure the session can be decoded.
+  if (frame.FrameType() == kVideoFrameKey)
+    return;
+
+  if (!_lastDecodedState.ContinuousFrame(&frame))
+    frame.SetPreviousFrameLoss();
+}
+
+bool
+VCMJitterBuffer::WaitForNack()
+{
+     // NACK disabled -> can't wait
+     if (_nackMode == kNoNack)
+     {
+         return false;
+     }
+     // NACK only -> always wait
+     else if (_nackMode == kNackInfinite)
+     {
+         return true;
+     }
+     // else: hybrid mode, evaluate
+     // RTT high, don't wait
+     if (_highRttNackThresholdMs >= 0 &&
+         _rttMs >= static_cast<unsigned int>(_highRttNackThresholdMs))
+     {
+         return false;
+     }
+     // Either NACK only or hybrid
+     return true;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/jitter_buffer.h b/src/modules/video_coding/main/source/jitter_buffer.h
new file mode 100644
index 0000000..d951187
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_buffer.h
@@ -0,0 +1,259 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
+
+#include <list>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "modules/video_coding/main/source/decoding_state.h"
+#include "modules/video_coding/main/source/event.h"
+#include "modules/video_coding/main/source/inter_frame_delay.h"
+#include "modules/video_coding/main/source/jitter_buffer_common.h"
+#include "modules/video_coding/main/source/jitter_estimator.h"
+#include "system_wrappers/interface/constructor_magic.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum VCMNackMode
+{
+    kNackInfinite,
+    kNackHybrid,
+    kNoNack
+};
+
+typedef std::list<VCMFrameBuffer*> FrameList;
+
+// forward declarations
+class TickTimeBase;
+class VCMFrameBuffer;
+class VCMPacket;
+class VCMEncodedFrame;
+
+class VCMJitterSample
+{
+public:
+    VCMJitterSample() : timestamp(0), frameSize(0), latestPacketTime(-1) {}
+    WebRtc_UWord32 timestamp;
+    WebRtc_UWord32 frameSize;
+    WebRtc_Word64 latestPacketTime;
+};
+
+class VCMJitterBuffer
+{
+public:
+    VCMJitterBuffer(TickTimeBase* clock,
+                    WebRtc_Word32 vcmId = -1,
+                    WebRtc_Word32 receiverId = -1,
+                    bool master = true);
+    virtual ~VCMJitterBuffer();
+
+    void CopyFrom(const VCMJitterBuffer& rhs);
+
+    // We need a start and stop to break out of the wait event
+    // used in GetCompleteFrameForDecoding
+    void Start();
+    void Stop();
+    bool Running() const;
+
+    // Empty the Jitter buffer of all its data
+    void Flush();
+
+    // Statistics, Get received key and delta frames
+    WebRtc_Word32 GetFrameStatistics(WebRtc_UWord32& receivedDeltaFrames,
+                                     WebRtc_UWord32& receivedKeyFrames) const;
+
+    // The number of packets discarded by the jitter buffer because the decoder
+    // won't be able to decode them.
+    WebRtc_UWord32 NumNotDecodablePackets() const;
+    // Get number of packets discarded by the jitter buffer
+    WebRtc_UWord32 DiscardedPackets() const;
+
+    // Statistics, Calculate frame and bit rates
+    WebRtc_Word32 GetUpdate(WebRtc_UWord32& frameRate, WebRtc_UWord32& bitRate);
+
+    // Wait for the first packet in the next frame to arrive, blocks
+    // for <= maxWaitTimeMS ms
+    WebRtc_Word64 GetNextTimeStamp(WebRtc_UWord32 maxWaitTimeMS,
+                                   FrameType& incomingFrameType,
+                                   WebRtc_Word64& renderTimeMs);
+
+    // Will the packet sequence be complete if the next frame is grabbed
+    // for decoding right now? That is, have we lost a frame between the
+    // last decoded frame and the next, or is the next frame missing one
+    // or more packets?
+    bool CompleteSequenceWithNextFrame();
+
+    // TODO (mikhal/stefan): Merge all GetFrameForDecoding into one.
+    // Wait maxWaitTimeMS for a complete frame to arrive. After timeout NULL
+    // is returned.
+    VCMEncodedFrame* GetCompleteFrameForDecoding(WebRtc_UWord32 maxWaitTimeMS);
+
+    // Get a frame for decoding (even an incomplete) without delay.
+    VCMEncodedFrame* GetFrameForDecoding();
+
+    VCMEncodedFrame* GetFrameForDecodingNACK();
+
+    // Release frame (when done with decoding)
+    void ReleaseFrame(VCMEncodedFrame* frame);
+
+    // Get frame to use for this timestamp
+    WebRtc_Word32 GetFrame(const VCMPacket& packet, VCMEncodedFrame*&);
+    VCMEncodedFrame* GetFrame(const VCMPacket& packet); // deprecated
+
+    // Returns the time in ms when the latest packet was inserted into the frame.
+    // Retransmitted is set to true if any of the packets belonging to the frame
+    // has been retransmitted.
+    WebRtc_Word64 LastPacketTime(VCMEncodedFrame* frame,
+                                 bool& retransmitted) const;
+
+    // Insert a packet into a frame
+    VCMFrameBufferEnum InsertPacket(VCMEncodedFrame* frame,
+                                    const VCMPacket& packet);
+
+    // Sync
+    WebRtc_UWord32 GetEstimatedJitterMS();
+    void UpdateRtt(WebRtc_UWord32 rttMs);
+
+    // NACK
+    // Set the NACK mode. "highRttNackThreshold" is an RTT threshold in ms above
+    // which NACK will be disabled if the NACK mode is "kNackHybrid",
+    // -1 meaning that NACK is always enabled in the Hybrid mode.
+    // "lowRttNackThreshold" is an RTT threshold in ms below which we expect to
+    // rely on NACK only, and therefore are using larger buffers to have time to
+    // wait for retransmissions.
+    void SetNackMode(VCMNackMode mode,
+                     int lowRttNackThresholdMs,
+                     int highRttNackThresholdMs);
+    VCMNackMode GetNackMode() const;    // Get nack mode
+    // Get list of missing sequence numbers (size in number of elements)
+    WebRtc_UWord16* GetNackList(WebRtc_UWord16& nackSize,
+                                bool& listExtended);
+
+    WebRtc_Word64 LastDecodedTimestamp() const;
+
+private:
+    // Misc help functions
+    // Recycle (release) frame, used if we didn't receive whole frame
+    void RecycleFrame(VCMFrameBuffer* frame);
+    void ReleaseFrameInternal(VCMFrameBuffer* frame);
+    // Flush and reset the jitter buffer. Call under critical section.
+    void FlushInternal();
+
+    // Help functions for insert packet
+    // Get empty frame, creates new (i.e. increases JB size) if necessary
+    VCMFrameBuffer* GetEmptyFrame();
+    // Recycle oldest frames up to a key frame, used if JB is completely full
+    bool RecycleFramesUntilKeyFrame();
+    // Update frame state
+    // (set as complete or reconstructable if conditions are met)
+    VCMFrameBufferEnum UpdateFrameState(VCMFrameBuffer* frameListItem);
+
+    // Help functions for getting a frame
+    // Find oldest complete frame, used for getting next frame to decode
+    // When enabled, will return a decodable frame
+    FrameList::iterator FindOldestCompleteContinuousFrame(bool enableDecodable);
+
+    void CleanUpOldFrames();
+
+    void VerifyAndSetPreviousFrameLost(VCMFrameBuffer& frame);
+    bool IsPacketRetransmitted(const VCMPacket& packet) const;
+
+    void UpdateJitterAndDelayEstimates(VCMJitterSample& sample,
+                                       bool incompleteFrame);
+    void UpdateJitterAndDelayEstimates(VCMFrameBuffer& frame,
+                                       bool incompleteFrame);
+    void UpdateJitterAndDelayEstimates(WebRtc_Word64 latestPacketTimeMs,
+                                       WebRtc_UWord32 timestamp,
+                                       WebRtc_UWord32 frameSize,
+                                       bool incompleteFrame);
+    void UpdateOldJitterSample(const VCMPacket& packet);
+    WebRtc_UWord32 GetEstimatedJitterMsInternal();
+
+    // NACK help
+    WebRtc_UWord16* CreateNackList(WebRtc_UWord16& nackSize,
+                                   bool& listExtended);
+    WebRtc_Word32 GetLowHighSequenceNumbers(WebRtc_Word32& lowSeqNum,
+                                            WebRtc_Word32& highSeqNum) const;
+
+    // Decide whether should wait for NACK (mainly relevant for hybrid mode)
+    bool WaitForNack();
+
+    WebRtc_Word32                 _vcmId;
+    WebRtc_Word32                 _receiverId;
+    TickTimeBase*                 _clock;
+    // If we are running (have started) or not
+    bool                          _running;
+    CriticalSectionWrapper*       _critSect;
+    bool                          _master;
+    // Event to signal when we have a frame ready for decoder
+    VCMEvent                      _frameEvent;
+    // Event to signal when we have received a packet
+    VCMEvent                      _packetEvent;
+    // Number of allocated frames
+    WebRtc_Word32                 _maxNumberOfFrames;
+    // Array of pointers to the frames in JB
+    VCMFrameBuffer*               _frameBuffers[kMaxNumberOfFrames];
+    FrameList _frameList;
+
+    // timing
+    VCMDecodingState       _lastDecodedState;
+    WebRtc_UWord32          _packetsNotDecodable;
+
+    // Statistics
+    // Frame counter for each type (key, delta, golden, key-delta)
+    WebRtc_UWord8           _receiveStatistics[4];
+    // Latest calculated frame rates of incoming stream
+    WebRtc_UWord8           _incomingFrameRate;
+    // Frame counter, reset in GetUpdate
+    WebRtc_UWord32          _incomingFrameCount;
+    // Real time for last _frameCount reset
+    WebRtc_Word64           _timeLastIncomingFrameCount;
+    // Received bits counter, reset in GetUpdate
+    WebRtc_UWord32          _incomingBitCount;
+    WebRtc_UWord32          _incomingBitRate;
+    WebRtc_UWord32          _dropCount;            // Frame drop counter
+    // Number of frames in a row that have been too old
+    WebRtc_UWord32          _numConsecutiveOldFrames;
+    // Number of packets in a row that have been too old
+    WebRtc_UWord32          _numConsecutiveOldPackets;
+    // Number of packets discarded by the jitter buffer
+    WebRtc_UWord32          _discardedPackets;
+
+    // Filters for estimating jitter
+    VCMJitterEstimator      _jitterEstimate;
+    // Calculates network delays used for jitter calculations
+    VCMInterFrameDelay      _delayEstimate;
+    VCMJitterSample         _waitingForCompletion;
+    WebRtc_UWord32          _rttMs;
+
+    // NACK
+    VCMNackMode             _nackMode;
+    int                     _lowRttNackThresholdMs;
+    int                     _highRttNackThresholdMs;
+    // Holds the internal nack list (the missing sequence numbers)
+    WebRtc_Word32           _NACKSeqNumInternal[kNackHistoryLength];
+    WebRtc_UWord16          _NACKSeqNum[kNackHistoryLength];
+    WebRtc_UWord32          _NACKSeqNumLength;
+    bool                    _waitingForKeyFrame;
+
+    bool                    _firstPacket;
+
+    DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer);
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_H_
diff --git a/src/modules/video_coding/main/source/jitter_buffer_common.cc b/src/modules/video_coding/main/source/jitter_buffer_common.cc
new file mode 100644
index 0000000..79a21b4
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_buffer_common.cc
@@ -0,0 +1,60 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "jitter_buffer_common.h"
+
+#include <cstdlib>
+
+namespace webrtc {
+
+WebRtc_UWord32 LatestTimestamp(WebRtc_UWord32 timestamp1,
+                               WebRtc_UWord32 timestamp2,
+                               bool* has_wrapped) {
+  bool wrap = (timestamp2 < 0x0000ffff && timestamp1 > 0xffff0000) ||
+      (timestamp2 > 0xffff0000 && timestamp1 < 0x0000ffff);
+  if (has_wrapped != NULL)
+    *has_wrapped = wrap;
+  if (timestamp1 > timestamp2 && !wrap)
+      return timestamp1;
+  else if (timestamp1 <= timestamp2 && !wrap)
+      return timestamp2;
+  else if (timestamp1 < timestamp2 && wrap)
+      return timestamp1;
+  else
+      return timestamp2;
+}
+
+WebRtc_Word32 LatestSequenceNumber(WebRtc_Word32 seq_num1,
+                                   WebRtc_Word32 seq_num2,
+                                   bool* has_wrapped) {
+  if (seq_num1 < 0 && seq_num2 < 0)
+    return -1;
+  else if (seq_num1 < 0)
+    return seq_num2;
+  else if (seq_num2 < 0)
+    return seq_num1;
+
+  bool wrap = (seq_num1 < 0x00ff && seq_num2 > 0xff00) ||
+          (seq_num1 > 0xff00 && seq_num2 < 0x00ff);
+
+  if (has_wrapped != NULL)
+    *has_wrapped = wrap;
+
+  if (seq_num2 > seq_num1 && !wrap)
+    return seq_num2;
+  else if (seq_num2 <= seq_num1 && !wrap)
+    return seq_num1;
+  else if (seq_num2 < seq_num1 && wrap)
+    return seq_num2;
+  else
+    return seq_num1;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/jitter_buffer_common.h b/src/modules/video_coding/main/source/jitter_buffer_common.h
new file mode 100644
index 0000000..38cea42
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_buffer_common.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+enum { kMaxNumberOfFrames     = 100 };
+enum { kStartNumberOfFrames   = 6 };    // in packets, 6 packets are approximately 198 ms,
+                                        // we need at least one more for process
+enum { kMaxVideoDelayMs       = 2000 }; // in ms
+
+enum VCMJitterBufferEnum
+{
+    kMaxConsecutiveOldFrames        = 60,
+    kMaxConsecutiveOldPackets       = 300,
+    kMaxPacketsInSession            = 800,
+    kBufferIncStepSizeBytes         = 30000,       // >20 packets
+    kMaxJBFrameSizeBytes            = 4000000      // sanity don't go above 4Mbyte
+};
+
+enum VCMFrameBufferEnum
+{
+    kStateError           = -4,
+    kFlushIndicator       = -3,   // Indicator that a flush has occurred.
+    kTimeStampError       = -2,
+    kSizeError            = -1,
+    kNoError              = 0,
+    kIncomplete           = 1,    // Frame incomplete
+    kFirstPacket          = 2,
+    kCompleteSession      = 3,    // at least one layer in the frame complete.
+    kDecodableSession     = 4,    // Frame incomplete, but ready to be decoded
+    kDuplicatePacket      = 5     // We're receiving a duplicate packet.
+};
+
+enum VCMFrameBufferStateEnum
+{
+    kStateFree,               // Unused frame in the JB
+    kStateEmpty,              // frame popped by the RTP receiver
+    kStateIncomplete,         // frame that have one or more packet(s) stored
+    kStateComplete,           // frame that have all packets
+    kStateDecoding,           // frame popped by the decoding thread
+    kStateDecodable           // Hybrid mode - frame can be decoded
+};
+
+enum { kH264StartCodeLengthBytes = 4};
+
+// Used to indicate if a received packet contain a complete NALU (or equivalent)
+enum VCMNaluCompleteness
+{
+    kNaluUnset = 0,       //Packet has not been filled.
+    kNaluComplete = 1,    //Packet can be decoded as is.
+    kNaluStart,           // Packet contain beginning of NALU
+    kNaluIncomplete,      //Packet is not beginning or end of NALU
+    kNaluEnd,             // Packet is the end of a NALU
+};
+
+// Returns the latest of the two timestamps, compensating for wrap arounds.
+// This function assumes that the two timestamps are close in time.
+WebRtc_UWord32 LatestTimestamp(WebRtc_UWord32 timestamp1,
+                               WebRtc_UWord32 timestamp2,
+                               bool* has_wrapped);
+
+// Returns the latest of the two sequence numbers, compensating for wrap
+// arounds. This function assumes that the two sequence numbers are close in
+// time.
+WebRtc_Word32 LatestSequenceNumber(WebRtc_Word32 seq_num1,
+                                   WebRtc_Word32 seq_num2,
+                                   bool* has_wrapped);
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_BUFFER_COMMON_H_
diff --git a/src/modules/video_coding/main/source/jitter_buffer_unittest.cc b/src/modules/video_coding/main/source/jitter_buffer_unittest.cc
new file mode 100644
index 0000000..6e48ea7
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_buffer_unittest.cc
@@ -0,0 +1,376 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include <list>
+
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/jitter_buffer.h"
+#include "modules/video_coding/main/source/media_opt_util.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+class StreamGenerator {
+ public:
+  StreamGenerator(uint16_t start_seq_num, uint32_t start_timestamp,
+                  int64_t current_time)
+      : packets_(),
+        sequence_number_(start_seq_num),
+        timestamp_(start_timestamp),
+        start_time_(current_time) {}
+
+  void Init(uint16_t start_seq_num, uint32_t start_timestamp,
+            int64_t current_time) {
+    packets_.clear();
+    sequence_number_ = start_seq_num;
+    timestamp_ = start_timestamp;
+    start_time_ = current_time;
+  }
+
+  void GenerateFrame(FrameType type, int num_media_packets,
+                     int num_empty_packets, int64_t current_time) {
+    timestamp_ += 90 * (current_time - start_time_);
+    // Move the sequence number counter if all packets from the previous frame
+    // wasn't collected.
+    sequence_number_ += packets_.size();
+    packets_.clear();
+    for (int i = 0; i < num_media_packets; ++i) {
+      packets_.push_back(GeneratePacket(sequence_number_,
+                                        timestamp_,
+                                        (i == 0),
+                                        (i == num_media_packets - 1),
+                                        type));
+      ++sequence_number_;
+    }
+    for (int i = 0; i < num_empty_packets; ++i) {
+      packets_.push_back(GeneratePacket(sequence_number_,
+                                        timestamp_,
+                                        false,
+                                        false,
+                                        kFrameEmpty));
+      ++sequence_number_;
+    }
+  }
+
+  static VCMPacket GeneratePacket(uint16_t sequence_number,
+                                  uint32_t timestamp,
+                                  bool first_packet,
+                                  bool marker_bit,
+                                  FrameType type) {
+    VCMPacket packet;
+    packet.seqNum = sequence_number;
+    packet.timestamp = timestamp;
+    packet.frameType = type;
+    packet.isFirstPacket = first_packet;
+    packet.markerBit = marker_bit;
+    if (packet.isFirstPacket)
+      packet.completeNALU = kNaluStart;
+    else if (packet.markerBit)
+      packet.completeNALU = kNaluEnd;
+    else
+      packet.completeNALU = kNaluIncomplete;
+    return packet;
+  }
+
+  bool PopPacket(VCMPacket* packet, int index) {
+    std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+    if (it == packets_.end())
+      return false;
+    if (packet)
+      *packet = (*it);
+    packets_.erase(it);
+    return true;
+  }
+
+  bool GetPacket(VCMPacket* packet, int index) {
+    std::list<VCMPacket>::iterator it = GetPacketIterator(index);
+    if (it == packets_.end())
+      return false;
+    if (packet)
+      *packet = (*it);
+    return true;
+  }
+
+  bool NextPacket(VCMPacket* packet) {
+    if (packets_.empty())
+      return false;
+    if (packet != NULL)
+      *packet = packets_.front();
+    packets_.pop_front();
+    return true;
+  }
+
+  uint16_t NextSequenceNumber() const {
+    if (packets_.empty())
+      return sequence_number_;
+    return packets_.front().seqNum;
+  }
+
+  int PacketsRemaining() const {
+    return packets_.size();
+  }
+
+ private:
+  std::list<VCMPacket>::iterator GetPacketIterator(int index) {
+    std::list<VCMPacket>::iterator it = packets_.begin();
+    for (int i = 0; i < index; ++i) {
+      ++it;
+      if (it == packets_.end()) break;
+    }
+    return it;
+  }
+
+  std::list<VCMPacket> packets_;
+  uint16_t sequence_number_;
+  uint32_t timestamp_;
+  int64_t start_time_;
+
+  DISALLOW_COPY_AND_ASSIGN(StreamGenerator);
+};
+
+class TestRunningJitterBuffer : public ::testing::Test {
+ protected:
+  enum { kDataBufferSize = 10 };
+  enum { kDefaultFrameRate = 25 };
+  enum { kDefaultFramePeriodMs = 1000 / kDefaultFrameRate };
+
+  virtual void SetUp() {
+    clock_ = new FakeTickTime(0);
+    jitter_buffer_ = new VCMJitterBuffer(clock_);
+    stream_generator = new StreamGenerator(0, 0,
+                                           clock_->MillisecondTimestamp());
+    jitter_buffer_->Start();
+    memset(data_buffer_, 0, kDataBufferSize);
+  }
+
+  virtual void TearDown() {
+    jitter_buffer_->Stop();
+    delete stream_generator;
+    delete jitter_buffer_;
+    delete clock_;
+  }
+
+  VCMFrameBufferEnum InsertPacketAndPop(int index) {
+    VCMPacket packet;
+    VCMEncodedFrame* frame;
+
+    packet.dataPtr = data_buffer_;
+    bool packet_available = stream_generator->PopPacket(&packet, index);
+    EXPECT_TRUE(packet_available);
+    if (!packet_available)
+      return kStateError;  // Return here to avoid crashes below.
+    EXPECT_EQ(VCM_OK, jitter_buffer_->GetFrame(packet, frame));
+    return jitter_buffer_->InsertPacket(frame, packet);
+  }
+
+  VCMFrameBufferEnum InsertPacket(int index) {
+    VCMPacket packet;
+    VCMEncodedFrame* frame;
+
+    packet.dataPtr = data_buffer_;
+    bool packet_available = stream_generator->GetPacket(&packet, index);
+    EXPECT_TRUE(packet_available);
+    if (!packet_available)
+      return kStateError;  // Return here to avoid crashes below.
+    EXPECT_EQ(VCM_OK, jitter_buffer_->GetFrame(packet, frame));
+    return jitter_buffer_->InsertPacket(frame, packet);
+  }
+
+  void InsertFrame(FrameType frame_type) {
+    stream_generator->GenerateFrame(frame_type,
+                                    (frame_type != kFrameEmpty) ? 1 : 0,
+                                    (frame_type == kFrameEmpty) ? 1 : 0,
+                                    clock_->MillisecondTimestamp());
+    EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+    clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  }
+
+  void InsertFrames(int num_frames, FrameType frame_type) {
+    for (int i = 0; i < num_frames; ++i) {
+      InsertFrame(frame_type);
+    }
+  }
+
+  void DropFrame(int num_packets) {
+    stream_generator->GenerateFrame(kVideoFrameDelta, num_packets, 0,
+                                    clock_->MillisecondTimestamp());
+    clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  }
+
+  bool DecodeCompleteFrame() {
+    VCMEncodedFrame* frame = jitter_buffer_->GetCompleteFrameForDecoding(0);
+    bool ret = (frame != NULL);
+    jitter_buffer_->ReleaseFrame(frame);
+    return ret;
+  }
+
+  bool DecodeFrame() {
+    VCMEncodedFrame* frame = jitter_buffer_->GetFrameForDecoding();
+    bool ret = (frame != NULL);
+    jitter_buffer_->ReleaseFrame(frame);
+    return ret;
+  }
+
+  VCMJitterBuffer* jitter_buffer_;
+  StreamGenerator* stream_generator;
+  FakeTickTime* clock_;
+  uint8_t data_buffer_[kDataBufferSize];
+};
+
+class TestJitterBufferNack : public TestRunningJitterBuffer {
+ protected:
+  virtual void SetUp() {
+    TestRunningJitterBuffer::SetUp();
+    jitter_buffer_->SetNackMode(kNackInfinite, -1, -1);
+  }
+
+  virtual void TearDown() {
+    TestRunningJitterBuffer::TearDown();
+  }
+};
+
+TEST_F(TestRunningJitterBuffer, TestFull) {
+  // Insert a key frame and decode it.
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+  DropFrame(1);
+  // Fill the jitter buffer.
+  InsertFrames(kMaxNumberOfFrames, kVideoFrameDelta);
+  // Make sure we can't decode these frames.
+  EXPECT_FALSE(DecodeCompleteFrame());
+  // This frame will make the jitter buffer recycle frames until a key frame.
+  // Since none is found it will have to wait until the next key frame before
+  // decoding.
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+}
+
+TEST_F(TestRunningJitterBuffer, TestEmptyPackets) {
+  // Make sure a frame can get complete even though empty packets are missing.
+  stream_generator->GenerateFrame(kVideoFrameKey, 3, 3,
+                                  clock_->MillisecondTimestamp());
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(4));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(4));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(kCompleteSession, InsertPacketAndPop(0));
+}
+
+TEST_F(TestJitterBufferNack, TestEmptyPackets) {
+  // Make sure empty packets doesn't clog the jitter buffer.
+  jitter_buffer_->SetNackMode(kNackHybrid, kLowRttNackMs, -1);
+  InsertFrames(kMaxNumberOfFrames, kFrameEmpty);
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+}
+
+TEST_F(TestJitterBufferNack, TestNackListFull) {
+  // Insert a key frame and decode it.
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+
+  // Generate and drop |kNackHistoryLength| packets to fill the NACK list.
+  DropFrame(kNackHistoryLength);
+  // Insert a frame which should trigger a recycle until the next key frame.
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+
+  uint16_t nack_list_length = kNackHistoryLength;
+  bool extended;
+  uint16_t* nack_list = jitter_buffer_->GetNackList(nack_list_length, extended);
+  // Verify that the jitter buffer requests a key frame.
+  EXPECT_TRUE(nack_list_length == 0xffff && nack_list == NULL);
+
+  InsertFrame(kVideoFrameDelta);
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeFrame());
+}
+
+TEST_F(TestJitterBufferNack, TestNackBeforeDecode) {
+  DropFrame(10);
+  // Insert a frame and try to generate a NACK list. Shouldn't get one.
+  InsertFrame(kVideoFrameDelta);
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // No list generated, and a key frame request is signaled.
+  EXPECT_TRUE(list == NULL);
+  EXPECT_EQ(0xFFFF, nack_list_size);
+}
+
+TEST_F(TestJitterBufferNack, TestNormalOperation) {
+  EXPECT_EQ(kNackInfinite, jitter_buffer_->GetNackMode());
+
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeFrame());
+
+  //  ----------------------------------------------------------------
+  // | 1 | 2 | .. | 8 | 9 | x | 11 | 12 | .. | 19 | x | 21 | .. | 100 |
+  //  ----------------------------------------------------------------
+  stream_generator->GenerateFrame(kVideoFrameKey, 100, 0,
+                                  clock_->MillisecondTimestamp());
+  clock_->IncrementDebugClock(kDefaultFramePeriodMs);
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+  // Verify that the frame is incomplete.
+  EXPECT_FALSE(DecodeCompleteFrame());
+  while (stream_generator->PacketsRemaining() > 1) {
+    if (stream_generator->NextSequenceNumber() % 10 != 0)
+      EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+    else
+      stream_generator->NextPacket(NULL);  // Drop packet
+  }
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(0, stream_generator->PacketsRemaining());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeFrame());
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // Verify the NACK list.
+  const int kExpectedNackSize = 9;
+  ASSERT_EQ(kExpectedNackSize, nack_list_size);
+  for (int i = 0; i < nack_list_size; ++i)
+    EXPECT_EQ((1 + i) * 10, list[i]);
+}
+
+TEST_F(TestJitterBufferNack, TestNormalOperationWrap) {
+  //  -------   ------------------------------------------------------------
+  // | 65532 | | 65533 | 65534 | 65535 | x | 1 | .. | 9 | x | 11 |.....| 96 |
+  //  -------   ------------------------------------------------------------
+  stream_generator->Init(65532, 0, clock_->MillisecondTimestamp());
+  InsertFrame(kVideoFrameKey);
+  EXPECT_TRUE(DecodeCompleteFrame());
+  stream_generator->GenerateFrame(kVideoFrameDelta, 100, 0,
+                                  clock_->MillisecondTimestamp());
+  EXPECT_EQ(kFirstPacket, InsertPacketAndPop(0));
+  while (stream_generator->PacketsRemaining() > 1) {
+    if (stream_generator->NextSequenceNumber() % 10 != 0)
+      EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+    else
+      stream_generator->NextPacket(NULL);  // Drop packet
+  }
+  EXPECT_EQ(kIncomplete, InsertPacketAndPop(0));
+  EXPECT_EQ(0, stream_generator->PacketsRemaining());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  EXPECT_FALSE(DecodeCompleteFrame());
+  uint16_t nack_list_size = 0;
+  bool extended = false;
+  uint16_t* list = jitter_buffer_->GetNackList(nack_list_size, extended);
+  // Verify the NACK list.
+  const int kExpectedNackSize = 10;
+  ASSERT_EQ(kExpectedNackSize, nack_list_size);
+  for (int i = 0; i < nack_list_size; ++i)
+    EXPECT_EQ(i * 10, list[i]);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/jitter_estimator.cc b/src/modules/video_coding/main/source/jitter_estimator.cc
new file mode 100644
index 0000000..56bc689
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_estimator.cc
@@ -0,0 +1,421 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "jitter_estimator.h"
+#include "rtt_filter.h"
+
+#include <assert.h>
+#include <math.h>
+#include <stdlib.h>
+#include <string.h>
+
+namespace webrtc {
+
+VCMJitterEstimator::VCMJitterEstimator(WebRtc_Word32 vcmId, WebRtc_Word32 receiverId) :
+_vcmId(vcmId),
+_receiverId(receiverId),
+_phi(0.97),
+_psi(0.9999),
+_alphaCountMax(400),
+_thetaLow(0.000001),
+_nackLimit(3),
+_numStdDevDelayOutlier(15),
+_numStdDevFrameSizeOutlier(3),
+_noiseStdDevs(2.33), // ~Less than 1% chance
+                     // (look up in normal distribution table)...
+_noiseStdDevOffset(30.0), // ...of getting 30 ms freezes
+_rttFilter(vcmId, receiverId)
+{
+    Reset();
+}
+
+VCMJitterEstimator&
+VCMJitterEstimator::operator=(const VCMJitterEstimator& rhs)
+{
+    if (this != &rhs)
+    {
+        memcpy(_thetaCov, rhs._thetaCov, sizeof(_thetaCov));
+        memcpy(_Qcov, rhs._Qcov, sizeof(_Qcov));
+
+        _vcmId = rhs._vcmId;
+        _receiverId = rhs._receiverId;
+        _avgFrameSize = rhs._avgFrameSize;
+        _varFrameSize = rhs._varFrameSize;
+        _maxFrameSize = rhs._maxFrameSize;
+        _fsSum = rhs._fsSum;
+        _fsCount = rhs._fsCount;
+        _lastUpdateT = rhs._lastUpdateT;
+        _prevEstimate = rhs._prevEstimate;
+        _prevFrameSize = rhs._prevFrameSize;
+        _avgNoise = rhs._avgNoise;
+        _alphaCount = rhs._alphaCount;
+        _filterJitterEstimate = rhs._filterJitterEstimate;
+        _startupCount = rhs._startupCount;
+        _latestNackTimestamp = rhs._latestNackTimestamp;
+        _nackCount = rhs._nackCount;
+        _rttFilter = rhs._rttFilter;
+    }
+    return *this;
+}
+
+// Resets the JitterEstimate
+void
+VCMJitterEstimator::Reset()
+{
+    _theta[0] = 1/(512e3/8);
+    _theta[1] = 0;
+    _varNoise = 4.0;
+
+    _thetaCov[0][0] = 1e-4;
+    _thetaCov[1][1] = 1e2;
+    _thetaCov[0][1] = _thetaCov[1][0] = 0;
+    _Qcov[0][0] = 2.5e-10;
+    _Qcov[1][1] = 1e-10;
+    _Qcov[0][1] = _Qcov[1][0] = 0;
+    _avgFrameSize = 500;
+    _maxFrameSize = 500;
+    _varFrameSize = 100;
+    _lastUpdateT = -1;
+    _prevEstimate = -1.0;
+    _prevFrameSize = 0;
+    _avgNoise = 0.0;
+    _alphaCount = 1;
+    _filterJitterEstimate = 0.0;
+    _latestNackTimestamp = 0;
+    _nackCount = 0;
+    _fsSum = 0;
+    _fsCount = 0;
+    _startupCount = 0;
+    _rttFilter.Reset();
+}
+
+void
+VCMJitterEstimator::ResetNackCount()
+{
+    _nackCount = 0;
+}
+
+// Updates the estimates with the new measurements
+void
+VCMJitterEstimator::UpdateEstimate(WebRtc_Word64 frameDelayMS, WebRtc_UWord32 frameSizeBytes,
+                                            bool incompleteFrame /* = false */)
+{
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+               VCMId(_vcmId, _receiverId),
+               "Jitter estimate updated with: frameSize=%d frameDelayMS=%d",
+               frameSizeBytes, frameDelayMS);
+    if (frameSizeBytes == 0)
+    {
+        return;
+    }
+    int deltaFS = frameSizeBytes - _prevFrameSize;
+    if (_fsCount < kFsAccuStartupSamples)
+    {
+        _fsSum += frameSizeBytes;
+        _fsCount++;
+    }
+    else if (_fsCount == kFsAccuStartupSamples)
+    {
+        // Give the frame size filter
+        _avgFrameSize = static_cast<double>(_fsSum) /
+                        static_cast<double>(_fsCount);
+        _fsCount++;
+    }
+    if (!incompleteFrame || frameSizeBytes > _avgFrameSize)
+    {
+        double avgFrameSize = _phi * _avgFrameSize +
+                              (1 - _phi) * frameSizeBytes;
+        if (frameSizeBytes < _avgFrameSize + 2 * sqrt(_varFrameSize))
+        {
+            // Only update the average frame size if this sample wasn't a
+            // key frame
+            _avgFrameSize = avgFrameSize;
+        }
+        // Update the variance anyway since we want to capture cases where we only get
+        // key frames.
+        _varFrameSize = VCM_MAX(_phi * _varFrameSize + (1 - _phi) *
+                                (frameSizeBytes - avgFrameSize) *
+                                (frameSizeBytes - avgFrameSize), 1.0);
+    }
+
+    // Update max frameSize estimate
+    _maxFrameSize = VCM_MAX(_psi * _maxFrameSize, static_cast<double>(frameSizeBytes));
+
+    if (_prevFrameSize == 0)
+    {
+        _prevFrameSize = frameSizeBytes;
+        return;
+    }
+    _prevFrameSize = frameSizeBytes;
+
+    // Only update the Kalman filter if the sample is not considered
+    // an extreme outlier. Even if it is an extreme outlier from a
+    // delay point of view, if the frame size also is large the
+    // deviation is probably due to an incorrect line slope.
+    double deviation = DeviationFromExpectedDelay(frameDelayMS, deltaFS);
+
+    if (abs(deviation) < _numStdDevDelayOutlier * sqrt(_varNoise) ||
+        frameSizeBytes > _avgFrameSize + _numStdDevFrameSizeOutlier * sqrt(_varFrameSize))
+    {
+        // Update the variance of the deviation from the
+        // line given by the Kalman filter
+        EstimateRandomJitter(deviation, incompleteFrame);
+        // Prevent updating with frames which have been congested by a large
+        // frame, and therefore arrives almost at the same time as that frame.
+        // This can occur when we receive a large frame (key frame) which
+        // has been delayed. The next frame is of normal size (delta frame),
+        // and thus deltaFS will be << 0. This removes all frame samples
+        // which arrives after a key frame.
+        if ((!incompleteFrame || deviation >= 0.0) &&
+            static_cast<double>(deltaFS) > - 0.25 * _maxFrameSize)
+        {
+            // Update the Kalman filter with the new data
+            KalmanEstimateChannel(frameDelayMS, deltaFS);
+        }
+    }
+    else
+    {
+        int nStdDev = (deviation >= 0) ? _numStdDevDelayOutlier : -_numStdDevDelayOutlier;
+        EstimateRandomJitter(nStdDev * sqrt(_varNoise), incompleteFrame);
+    }
+    // Post process the total estimated jitter
+    if (_startupCount >= kStartupDelaySamples)
+    {
+        PostProcessEstimate();
+    }
+    else
+    {
+        _startupCount++;
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Framesize statistics: max=%f average=%f", _maxFrameSize, _avgFrameSize);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "The estimated slope is: theta=(%f, %f)", _theta[0], _theta[1]);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Random jitter: mean=%f variance=%f", _avgNoise, _varNoise);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Current jitter estimate: %f", _filterJitterEstimate);
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "Current max RTT: %u", _rttFilter.RttMs());
+}
+
+// Updates the nack/packet ratio
+void
+VCMJitterEstimator::FrameNacked()
+{
+    // Wait until _nackLimit retransmissions has been received,
+    // then always add ~1 RTT delay.
+    // TODO(holmer): Should we ever remove the additional delay if the
+    // the packet losses seem to have stopped? We could for instance scale
+    // the number of RTTs to add with the amount of retransmissions in a given
+    // time interval, or similar.
+    if (_nackCount < _nackLimit)
+    {
+        _nackCount++;
+    }
+}
+
+// Updates Kalman estimate of the channel
+// The caller is expected to sanity check the inputs.
+void
+VCMJitterEstimator::KalmanEstimateChannel(WebRtc_Word64 frameDelayMS,
+                                          WebRtc_Word32 deltaFSBytes)
+{
+    double Mh[2];
+    double hMh_sigma;
+    double kalmanGain[2];
+    double measureRes;
+    double t00, t01;
+
+    // Kalman filtering
+
+    // Prediction
+    // M = M + Q
+    _thetaCov[0][0] += _Qcov[0][0];
+    _thetaCov[0][1] += _Qcov[0][1];
+    _thetaCov[1][0] += _Qcov[1][0];
+    _thetaCov[1][1] += _Qcov[1][1];
+
+    // Kalman gain
+    // K = M*h'/(sigma2n + h*M*h') = M*h'/(1 + h*M*h')
+    // h = [dFS 1]
+    // Mh = M*h'
+    // hMh_sigma = h*M*h' + R
+    Mh[0] = _thetaCov[0][0] * deltaFSBytes + _thetaCov[0][1];
+    Mh[1] = _thetaCov[1][0] * deltaFSBytes + _thetaCov[1][1];
+    // sigma weights measurements with a small deltaFS as noisy and
+    // measurements with large deltaFS as good
+    if (_maxFrameSize < 1.0)
+    {
+        return;
+    }
+    double sigma = (300.0 * exp(-abs(static_cast<double>(deltaFSBytes)) /
+                   (1e0 * _maxFrameSize)) + 1) * sqrt(_varNoise);
+    if (sigma < 1.0)
+    {
+        sigma = 1.0;
+    }
+    hMh_sigma = deltaFSBytes * Mh[0] + Mh[1] + sigma;
+    if ((hMh_sigma < 1e-9 && hMh_sigma >= 0) || (hMh_sigma > -1e-9 && hMh_sigma <= 0))
+    {
+        assert(false);
+        return;
+    }
+    kalmanGain[0] = Mh[0] / hMh_sigma;
+    kalmanGain[1] = Mh[1] / hMh_sigma;
+
+    // Correction
+    // theta = theta + K*(dT - h*theta)
+    measureRes = frameDelayMS - (deltaFSBytes * _theta[0] + _theta[1]);
+    _theta[0] += kalmanGain[0] * measureRes;
+    _theta[1] += kalmanGain[1] * measureRes;
+
+    if (_theta[0] < _thetaLow)
+    {
+        _theta[0] = _thetaLow;
+    }
+
+    // M = (I - K*h)*M
+    t00 = _thetaCov[0][0];
+    t01 = _thetaCov[0][1];
+    _thetaCov[0][0] = (1 - kalmanGain[0] * deltaFSBytes) * t00 -
+                      kalmanGain[0] * _thetaCov[1][0];
+    _thetaCov[0][1] = (1 - kalmanGain[0] * deltaFSBytes) * t01 -
+                      kalmanGain[0] * _thetaCov[1][1];
+    _thetaCov[1][0] = _thetaCov[1][0] * (1 - kalmanGain[1]) -
+                      kalmanGain[1] * deltaFSBytes * t00;
+    _thetaCov[1][1] = _thetaCov[1][1] * (1 - kalmanGain[1]) -
+                      kalmanGain[1] * deltaFSBytes * t01;
+
+    // Covariance matrix, must be positive semi-definite
+    assert(_thetaCov[0][0] + _thetaCov[1][1] >= 0 &&
+           _thetaCov[0][0] * _thetaCov[1][1] - _thetaCov[0][1] * _thetaCov[1][0] >= 0 &&
+           _thetaCov[0][0] >= 0);
+}
+
+// Calculate difference in delay between a sample and the
+// expected delay estimated by the Kalman filter
+double
+VCMJitterEstimator::DeviationFromExpectedDelay(WebRtc_Word64 frameDelayMS,
+                                               WebRtc_Word32 deltaFSBytes) const
+{
+    return frameDelayMS - (_theta[0] * deltaFSBytes + _theta[1]);
+}
+
+// Estimates the random jitter by calculating the variance of the
+// sample distance from the line given by theta.
+void
+VCMJitterEstimator::EstimateRandomJitter(double d_dT, bool incompleteFrame)
+{
+    double alpha;
+    if (_alphaCount == 0)
+    {
+        assert(_alphaCount > 0);
+        return;
+    }
+    alpha = static_cast<double>(_alphaCount - 1) / static_cast<double>(_alphaCount);
+    _alphaCount++;
+    if (_alphaCount > _alphaCountMax)
+    {
+        _alphaCount = _alphaCountMax;
+    }
+    double avgNoise = alpha * _avgNoise + (1 - alpha) * d_dT;
+    double varNoise = alpha * _varNoise +
+                      (1 - alpha) * (d_dT - _avgNoise) * (d_dT - _avgNoise);
+    if (!incompleteFrame || varNoise > _varNoise)
+    {
+        _avgNoise = avgNoise;
+        _varNoise = varNoise;
+    }
+    if (_varNoise < 1.0)
+    {
+        // The variance should never be zero, since we might get
+        // stuck and consider all samples as outliers.
+        _varNoise = 1.0;
+    }
+}
+
+double
+VCMJitterEstimator::NoiseThreshold() const
+{
+    double noiseThreshold = _noiseStdDevs * sqrt(_varNoise) - _noiseStdDevOffset;
+    if (noiseThreshold < 1.0)
+    {
+        noiseThreshold = 1.0;
+    }
+    return noiseThreshold;
+}
+
+// Calculates the current jitter estimate from the filtered estimates
+double
+VCMJitterEstimator::CalculateEstimate()
+{
+    double ret = _theta[0] * (_maxFrameSize - _avgFrameSize) + NoiseThreshold();
+
+    // A very low estimate (or negative) is neglected
+    if (ret < 1.0) {
+        if (_prevEstimate <= 0.01)
+        {
+            ret = 1.0;
+        }
+        else
+        {
+            ret = _prevEstimate;
+        }
+    }
+    if (ret > 10000.0) // Sanity
+    {
+        ret = 10000.0;
+    }
+    _prevEstimate = ret;
+    return ret;
+}
+
+void
+VCMJitterEstimator::PostProcessEstimate()
+{
+    _filterJitterEstimate = CalculateEstimate();
+}
+
+void
+VCMJitterEstimator::UpdateRtt(WebRtc_UWord32 rttMs)
+{
+    _rttFilter.Update(rttMs);
+}
+
+void
+VCMJitterEstimator::UpdateMaxFrameSize(WebRtc_UWord32 frameSizeBytes)
+{
+    if (_maxFrameSize < frameSizeBytes)
+    {
+        _maxFrameSize = frameSizeBytes;
+    }
+}
+
+// Returns the current filtered estimate if available,
+// otherwise tries to calculate an estimate.
+double
+VCMJitterEstimator::GetJitterEstimate(double rttMultiplier)
+{
+    double jitterMS = CalculateEstimate();
+    if (_filterJitterEstimate > jitterMS)
+    {
+        jitterMS = _filterJitterEstimate;
+    }
+    if (_nackCount >= _nackLimit)
+    {
+        return jitterMS + _rttFilter.RttMs() * rttMultiplier;
+    }
+    return jitterMS;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/jitter_estimator.h b/src/modules/video_coding/main/source/jitter_estimator.h
new file mode 100644
index 0000000..5c48d91
--- /dev/null
+++ b/src/modules/video_coding/main/source/jitter_estimator.h
@@ -0,0 +1,154 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
+
+#include "typedefs.h"
+#include "rtt_filter.h"
+
+namespace webrtc
+{
+
+class VCMJitterEstimator
+{
+public:
+    VCMJitterEstimator(WebRtc_Word32 vcmId = 0, WebRtc_Word32 receiverId = 0);
+
+    VCMJitterEstimator& operator=(const VCMJitterEstimator& rhs);
+
+    // Resets the estimate to the initial state
+    void Reset();
+    void ResetNackCount();
+
+    // Updates the jitter estimate with the new data.
+    //
+    // Input:
+    //          - frameDelay      : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - frameSize       : Frame size of the current frame.
+    //          - incompleteFrame : Flags if the frame is used to update the estimate before it
+    //                              was complete. Default is false.
+    void UpdateEstimate(WebRtc_Word64 frameDelayMS,
+                        WebRtc_UWord32 frameSizeBytes,
+                        bool incompleteFrame = false);
+
+    // Returns the current jitter estimate in milliseconds and adds
+    // also adds an RTT dependent term in cases of retransmission.
+    //  Input:
+    //          - rttMultiplier  : RTT param multiplier (when applicable).
+    //
+    // Return value                   : Jitter estimate in milliseconds
+    double GetJitterEstimate(double rttMultiplier);
+
+    // Updates the nack counter.
+    void FrameNacked();
+
+    // Updates the RTT filter.
+    //
+    // Input:
+    //          - rttMs               : RTT in ms
+    void UpdateRtt(WebRtc_UWord32 rttMs);
+
+    void UpdateMaxFrameSize(WebRtc_UWord32 frameSizeBytes);
+
+    // A constant describing the delay from the jitter buffer
+    // to the delay on the receiving side which is not accounted
+    // for by the jitter buffer nor the decoding delay estimate.
+    static const WebRtc_UWord32 OPERATING_SYSTEM_JITTER = 10;
+
+protected:
+    // These are protected for better testing possibilities
+    double              _theta[2]; // Estimated line parameters (slope, offset)
+    double              _varNoise; // Variance of the time-deviation from the line
+
+private:
+    // Updates the Kalman filter for the line describing
+    // the frame size dependent jitter.
+    //
+    // Input:
+    //          - frameDelayMS    : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - deltaFSBytes    : Frame size delta, i.e.
+    //                            : frame size at time T minus frame size at time T-1
+    void KalmanEstimateChannel(WebRtc_Word64 frameDelayMS, WebRtc_Word32 deltaFSBytes);
+
+    // Updates the random jitter estimate, i.e. the variance
+    // of the time deviations from the line given by the Kalman filter.
+    //
+    // Input:
+    //          - d_dT              : The deviation from the kalman estimate
+    //          - incompleteFrame   : True if the frame used to update the estimate
+    //                                with was incomplete
+    void EstimateRandomJitter(double d_dT, bool incompleteFrame);
+
+    double NoiseThreshold() const;
+
+    // Calculates the current jitter estimate.
+    //
+    // Return value                 : The current jitter estimate in milliseconds
+    double CalculateEstimate();
+
+    // Post process the calculated estimate
+    void PostProcessEstimate();
+
+    // Calculates the difference in delay between a sample and the
+    // expected delay estimated by the Kalman filter.
+    //
+    // Input:
+    //          - frameDelayMS    : Delay-delta calculated by UTILDelayEstimate in milliseconds
+    //          - deltaFS         : Frame size delta, i.e. frame size at time
+    //                              T minus frame size at time T-1
+    //
+    // Return value                 : The difference in milliseconds
+    double DeviationFromExpectedDelay(WebRtc_Word64 frameDelayMS,
+                                      WebRtc_Word32 deltaFSBytes) const;
+
+    // Constants, filter parameters
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _receiverId;
+    const double          _phi;
+    const double          _psi;
+    const WebRtc_UWord32  _alphaCountMax;
+    const double          _thetaLow;
+    const WebRtc_UWord32  _nackLimit;
+    const WebRtc_Word32   _numStdDevDelayOutlier;
+    const WebRtc_Word32   _numStdDevFrameSizeOutlier;
+    const double          _noiseStdDevs;
+    const double          _noiseStdDevOffset;
+
+    double                _thetaCov[2][2]; // Estimate covariance
+    double                _Qcov[2][2];     // Process noise covariance
+    double                _avgFrameSize;   // Average frame size
+    double                _varFrameSize;   // Frame size variance
+    double                _maxFrameSize;   // Largest frame size received (descending
+                                           // with a factor _psi)
+    WebRtc_UWord32        _fsSum;
+    WebRtc_UWord32        _fsCount;
+
+    WebRtc_Word64         _lastUpdateT;
+    double                _prevEstimate;         // The previously returned jitter estimate
+    WebRtc_UWord32        _prevFrameSize;        // Frame size of the previous frame
+    double                _avgNoise;             // Average of the random jitter
+    WebRtc_UWord32        _alphaCount;
+    double                _filterJitterEstimate; // The filtered sum of jitter estimates
+
+    WebRtc_UWord32        _startupCount;
+
+    WebRtc_Word64         _latestNackTimestamp;  // Timestamp in ms when the latest nack was seen
+    WebRtc_UWord32        _nackCount;            // Keeps track of the number of nacks received,
+                                                 // but never goes above _nackLimit
+    VCMRttFilter          _rttFilter;
+
+    enum { kStartupDelaySamples = 30 };
+    enum { kFsAccuStartupSamples = 5 };
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_JITTER_ESTIMATOR_H_
diff --git a/src/modules/video_coding/main/source/media_opt_util.cc b/src/modules/video_coding/main/source/media_opt_util.cc
new file mode 100644
index 0000000..a105827
--- /dev/null
+++ b/src/modules/video_coding/main/source/media_opt_util.cc
@@ -0,0 +1,953 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/media_opt_util.h"
+
+#include <algorithm>
+#include <math.h>
+#include <float.h>
+#include <limits.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "modules/video_coding/main/source/er_tables_xor.h"
+#include "modules/video_coding/main/source/fec_tables_xor.h"
+#include "modules/video_coding/main/source/nack_fec_tables.h"
+
+namespace webrtc {
+
+VCMProtectionMethod::VCMProtectionMethod():
+_effectivePacketLoss(0),
+_protectionFactorK(0),
+_protectionFactorD(0),
+_residualPacketLossFec(0.0f),
+_scaleProtKey(2.0f),
+_maxPayloadSize(1460),
+_qmRobustness(new VCMQmRobustness()),
+_useUepProtectionK(false),
+_useUepProtectionD(true),
+_corrFecCost(1.0),
+_type(kNone),
+_efficiency(0)
+{
+    //
+}
+
+VCMProtectionMethod::~VCMProtectionMethod()
+{
+    delete _qmRobustness;
+}
+void
+VCMProtectionMethod::UpdateContentMetrics(const
+                                          VideoContentMetrics* contentMetrics)
+{
+    _qmRobustness->UpdateContent(contentMetrics);
+}
+
+VCMNackFecMethod::VCMNackFecMethod(int lowRttNackThresholdMs,
+                                   int highRttNackThresholdMs)
+    : VCMFecMethod(),
+      _lowRttNackMs(lowRttNackThresholdMs),
+      _highRttNackMs(highRttNackThresholdMs),
+      _maxFramesFec(1) {
+  assert(lowRttNackThresholdMs >= -1 && highRttNackThresholdMs >= -1);
+  assert(highRttNackThresholdMs == -1 ||
+         lowRttNackThresholdMs <= highRttNackThresholdMs);
+  assert(lowRttNackThresholdMs > -1 || highRttNackThresholdMs == -1);
+  _type = kNackFec;
+}
+
+VCMNackFecMethod::~VCMNackFecMethod()
+{
+    //
+}
+bool
+VCMNackFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
+{
+    // Hybrid Nack FEC has three operational modes:
+    // 1. Low RTT (below kLowRttNackMs) - Nack only: Set FEC rate
+    //    (_protectionFactorD) to zero. -1 means no FEC.
+    // 2. High RTT (above _highRttNackMs) - FEC Only: Keep FEC factors.
+    //    -1 means always allow NACK.
+    // 3. Medium RTT values - Hybrid mode: We will only nack the
+    //    residual following the decoding of the FEC (refer to JB logic). FEC
+    //    delta protection factor will be adjusted based on the RTT.
+
+    // Otherwise: we count on FEC; if the RTT is below a threshold, then we
+    // nack the residual, based on a decision made in the JB.
+
+    // Compute the protection factors
+    VCMFecMethod::ProtectionFactor(parameters);
+    if (_lowRttNackMs == -1 || parameters->rtt < _lowRttNackMs)
+    {
+        _protectionFactorD = 0;
+        VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+    }
+
+    // When in Hybrid mode (RTT range), adjust FEC rates based on the
+    // RTT (NACK effectiveness) - adjustment factor is in the range [0,1].
+    else if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs)
+    {
+        // TODO(mikhal): Disabling adjustment temporarily.
+        // WebRtc_UWord16 rttIndex = (WebRtc_UWord16) parameters->rtt;
+        float adjustRtt = 1.0f;// (float)VCMNackFecTable[rttIndex] / 100.0f;
+
+        // Adjust FEC with NACK on (for delta frame only)
+        // table depends on RTT relative to rttMax (NACK Threshold)
+        _protectionFactorD = static_cast<WebRtc_UWord8>
+                            (adjustRtt *
+                             static_cast<float>(_protectionFactorD));
+        // update FEC rates after applying adjustment
+        VCMFecMethod::UpdateProtectionFactorD(_protectionFactorD);
+    }
+
+    return true;
+}
+
+int VCMNackFecMethod::ComputeMaxFramesFec(
+    const VCMProtectionParameters* parameters) {
+  if (parameters->numLayers > 2) {
+    // For more than 2 temporal layers we will only have FEC on the base layer,
+    // and the base layers will be pretty far apart. Therefore we force one
+    // frame FEC.
+    return 1;
+  }
+  // We set the max number of frames to base the FEC on so that on average
+  // we will have complete frames in one RTT. Note that this is an upper
+  // bound, and that the actual number of frames used for FEC is decided by the
+  // RTP module based on the actual number of packets and the protection factor.
+  float base_layer_framerate = parameters->frameRate /
+      static_cast<float>(1 << (parameters->numLayers - 1));
+  int max_frames_fec = std::max(static_cast<int>(
+      2.0f * base_layer_framerate * parameters->rtt /
+      1000.0f + 0.5f), 1);
+  // |kUpperLimitFramesFec| is the upper limit on how many frames we
+  // allow any FEC to be based on.
+  if (max_frames_fec > kUpperLimitFramesFec) {
+    max_frames_fec = kUpperLimitFramesFec;
+  }
+  return max_frames_fec;
+}
+
+int VCMNackFecMethod::MaxFramesFec() const {
+  return _maxFramesFec;
+}
+
+bool VCMNackFecMethod::BitRateTooLowForFec(
+    const VCMProtectionParameters* parameters) {
+  // Bitrate below which we turn off FEC, regardless of reported packet loss.
+  // The condition should depend on resolution and content. For now, use
+  // threshold on bytes per frame, with some effect for the frame size.
+  // The condition for turning off FEC is also based on other factors,
+  // such as |_numLayers|, |_maxFramesFec|, and |_rtt|.
+  int estimate_bytes_per_frame = 1000 * BitsPerFrame(parameters) / 8;
+  int max_bytes_per_frame = kMaxBytesPerFrameForFec;
+  int num_pixels = parameters->codecWidth * parameters->codecHeight;
+  if (num_pixels <= 352 * 288) {
+    max_bytes_per_frame = kMaxBytesPerFrameForFecLow;
+  } else if (num_pixels > 640 * 480) {
+    max_bytes_per_frame = kMaxBytesPerFrameForFecHigh;
+  }
+  // TODO (marpan): add condition based on maximum frames used for FEC,
+  // and expand condition based on frame size.
+  if (estimate_bytes_per_frame < max_bytes_per_frame &&
+      parameters->numLayers < 3 &&
+      parameters->rtt < kMaxRttTurnOffFec) {
+    return true;
+  }
+  return false;
+}
+
+bool
+VCMNackFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
+{
+    // Set the effective packet loss for encoder (based on FEC code).
+    // Compute the effective packet loss and residual packet loss due to FEC.
+    VCMFecMethod::EffectivePacketLoss(parameters);
+    return true;
+}
+
+bool
+VCMNackFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    ProtectionFactor(parameters);
+    EffectivePacketLoss(parameters);
+    _maxFramesFec = ComputeMaxFramesFec(parameters);
+    if (BitRateTooLowForFec(parameters)) {
+      _protectionFactorK = 0;
+      _protectionFactorD = 0;
+    }
+
+    // Efficiency computation is based on FEC and NACK
+
+    // Add FEC cost: ignore I frames for now
+    float fecRate = static_cast<float> (_protectionFactorD) / 255.0f;
+    _efficiency = parameters->bitRate * fecRate * _corrFecCost;
+
+    // Add NACK cost, when applicable
+    if (_highRttNackMs == -1 || parameters->rtt < _highRttNackMs)
+    {
+        // nackCost  = (bitRate - nackCost) * (lossPr)
+        _efficiency += parameters->bitRate * _residualPacketLossFec /
+                       (1.0f + _residualPacketLossFec);
+    }
+
+    // Protection/fec rates obtained above are defined relative to total number
+    // of packets (total rate: source + fec) FEC in RTP module assumes
+    // protection factor is defined relative to source number of packets so we
+    // should convert the factor to reduce mismatch between mediaOpt's rate and
+    // the actual one
+    _protectionFactorK = VCMFecMethod::ConvertFECRate(_protectionFactorK);
+    _protectionFactorD = VCMFecMethod::ConvertFECRate(_protectionFactorD);
+
+    return true;
+}
+
+VCMNackMethod::VCMNackMethod():
+VCMProtectionMethod()
+{
+    _type = kNack;
+}
+
+VCMNackMethod::~VCMNackMethod()
+{
+    //
+}
+
+bool
+VCMNackMethod::EffectivePacketLoss(const VCMProtectionParameters* parameter)
+{
+    // Effective Packet Loss, NA in current version.
+    _effectivePacketLoss = 0;
+    return true;
+}
+
+bool
+VCMNackMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    // Compute the effective packet loss
+    EffectivePacketLoss(parameters);
+
+    // nackCost  = (bitRate - nackCost) * (lossPr)
+    _efficiency = parameters->bitRate * parameters->lossPr /
+                  (1.0f + parameters->lossPr);
+    return true;
+}
+
+VCMFecMethod::VCMFecMethod():
+VCMProtectionMethod()
+{
+    _type = kFec;
+}
+VCMFecMethod::~VCMFecMethod()
+{
+    //
+}
+
+WebRtc_UWord8
+VCMFecMethod::BoostCodeRateKey(WebRtc_UWord8 packetFrameDelta,
+                               WebRtc_UWord8 packetFrameKey) const
+{
+    WebRtc_UWord8 boostRateKey = 2;
+    // Default: ratio scales the FEC protection up for I frames
+    WebRtc_UWord8 ratio = 1;
+
+    if (packetFrameDelta > 0)
+    {
+        ratio = (WebRtc_Word8) (packetFrameKey / packetFrameDelta);
+    }
+    ratio = VCM_MAX(boostRateKey, ratio);
+
+    return ratio;
+}
+
+WebRtc_UWord8
+VCMFecMethod::ConvertFECRate(WebRtc_UWord8 codeRateRTP) const
+{
+    return static_cast<WebRtc_UWord8> (VCM_MIN(255,(0.5 + 255.0 * codeRateRTP /
+                                      (float)(255 - codeRateRTP))));
+}
+
+// Update FEC with protectionFactorD
+void
+VCMFecMethod::UpdateProtectionFactorD(WebRtc_UWord8 protectionFactorD)
+{
+    _protectionFactorD = protectionFactorD;
+}
+
+// Update FEC with protectionFactorK
+void
+VCMFecMethod::UpdateProtectionFactorK(WebRtc_UWord8 protectionFactorK)
+{
+    _protectionFactorK = protectionFactorK;
+}
+
+// AvgRecoveryFEC: computes the residual packet loss (RPL) function.
+// This is the average recovery from the FEC, assuming random packet loss model.
+// Computed off-line for a range of FEC code parameters and loss rates.
+float
+VCMFecMethod::AvgRecoveryFEC(const VCMProtectionParameters* parameters) const
+{
+    // Total (avg) bits available per frame: total rate over actual/sent frame
+    // rate units are kbits/frame
+    const WebRtc_UWord16 bitRatePerFrame = static_cast<WebRtc_UWord16>
+                        (parameters->bitRate / (parameters->frameRate));
+
+    // Total (average) number of packets per frame (source and fec):
+    const WebRtc_UWord8 avgTotPackets = 1 + static_cast<WebRtc_UWord8>
+                        (static_cast<float> (bitRatePerFrame * 1000.0) /
+                         static_cast<float> (8.0 * _maxPayloadSize) + 0.5);
+
+    const float protectionFactor = static_cast<float>(_protectionFactorD) /
+                                                      255.0;
+
+    WebRtc_UWord8 fecPacketsPerFrame = static_cast<WebRtc_UWord8>
+                                      (0.5 + protectionFactor * avgTotPackets);
+
+    WebRtc_UWord8 sourcePacketsPerFrame = avgTotPackets - fecPacketsPerFrame;
+
+    if ( (fecPacketsPerFrame == 0) || (sourcePacketsPerFrame == 0) )
+    {
+        // No protection, or rate too low: so average recovery from FEC == 0.
+        return 0.0;
+    }
+
+    // Table defined up to kMaxNumPackets
+    if (sourcePacketsPerFrame > kMaxNumPackets)
+    {
+        sourcePacketsPerFrame = kMaxNumPackets;
+    }
+
+    // Table defined up to kMaxNumPackets
+    if (fecPacketsPerFrame > kMaxNumPackets)
+    {
+        fecPacketsPerFrame = kMaxNumPackets;
+    }
+
+    // Code index for tables: up to (kMaxNumPackets * kMaxNumPackets)
+    WebRtc_UWord16 codeIndexTable[kMaxNumPackets * kMaxNumPackets];
+    WebRtc_UWord16 k = 0;
+    for (WebRtc_UWord8 i = 1; i <= kMaxNumPackets; i++)
+    {
+        for (WebRtc_UWord8 j = 1; j <= i; j++)
+        {
+            codeIndexTable[(j - 1) * kMaxNumPackets + i - 1] = k;
+            k += 1;
+        }
+    }
+
+    WebRtc_UWord8 lossRate = static_cast<WebRtc_UWord8> (255.0 *
+                             parameters->lossPr + 0.5f);
+
+    // Constrain lossRate to 50%: tables defined up to 50%
+    if (lossRate >= kPacketLossMax)
+    {
+        lossRate = kPacketLossMax - 1;
+    }
+
+    const WebRtc_UWord16 codeIndex = (fecPacketsPerFrame - 1) * kMaxNumPackets +
+                                     (sourcePacketsPerFrame - 1);
+
+    const WebRtc_UWord16 indexTable = codeIndexTable[codeIndex] * kPacketLossMax +
+                                      lossRate;
+
+    // Check on table index
+    assert(indexTable < kSizeAvgFECRecoveryXOR);
+    float avgFecRecov = static_cast<float>(kAvgFECRecoveryXOR[indexTable]);
+
+    return avgFecRecov;
+}
+
+bool
+VCMFecMethod::ProtectionFactor(const VCMProtectionParameters* parameters)
+{
+    // FEC PROTECTION SETTINGS: varies with packet loss and bitrate
+
+    // No protection if (filtered) packetLoss is 0
+    WebRtc_UWord8 packetLoss = (WebRtc_UWord8) (255 * parameters->lossPr);
+    if (packetLoss == 0)
+    {
+        _protectionFactorK = 0;
+        _protectionFactorD = 0;
+         return true;
+    }
+
+    // Parameters for FEC setting:
+    // first partition size, thresholds, table pars, spatial resoln fac.
+
+    // First partition protection: ~ 20%
+    WebRtc_UWord8 firstPartitionProt = (WebRtc_UWord8) (255 * 0.20);
+
+    // Minimum protection level needed to generate one FEC packet for one
+    // source packet/frame (in RTP sender)
+    WebRtc_UWord8 minProtLevelFec = 85;
+
+    // Threshold on packetLoss and bitRrate/frameRate (=average #packets),
+    // above which we allocate protection to cover at least first partition.
+    WebRtc_UWord8 lossThr = 0;
+    WebRtc_UWord8 packetNumThr = 1;
+
+    // Parameters for range of rate index of table.
+    const WebRtc_UWord8 ratePar1 = 5;
+    const WebRtc_UWord8 ratePar2 = 49;
+
+    // Spatial resolution size, relative to a reference size.
+    float spatialSizeToRef = static_cast<float>
+                           (parameters->codecWidth * parameters->codecHeight) /
+                           (static_cast<float>(704 * 576));
+    // resolnFac: This parameter will generally increase/decrease the FEC rate
+    // (for fixed bitRate and packetLoss) based on system size.
+    // Use a smaller exponent (< 1) to control/soften system size effect.
+    const float resolnFac = 1.0 / powf(spatialSizeToRef, 0.3f);
+
+    const int bitRatePerFrame = BitsPerFrame(parameters);
+
+
+    // Average number of packets per frame (source and fec):
+    const WebRtc_UWord8 avgTotPackets = 1 + (WebRtc_UWord8)
+                                        ((float) bitRatePerFrame * 1000.0
+                                       / (float) (8.0 * _maxPayloadSize) + 0.5);
+
+    // FEC rate parameters: for P and I frame
+    WebRtc_UWord8 codeRateDelta = 0;
+    WebRtc_UWord8 codeRateKey = 0;
+
+    // Get index for table: the FEC protection depends on an effective rate.
+    // The range on the rate index corresponds to rates (bps)
+    // from ~200k to ~8000k, for 30fps
+    const WebRtc_UWord16 effRateFecTable = static_cast<WebRtc_UWord16>
+                                           (resolnFac * bitRatePerFrame);
+    WebRtc_UWord8 rateIndexTable =
+        (WebRtc_UWord8) VCM_MAX(VCM_MIN((effRateFecTable - ratePar1) /
+                                         ratePar1, ratePar2), 0);
+
+    // Restrict packet loss range to 50:
+    // current tables defined only up to 50%
+    if (packetLoss >= kPacketLossMax)
+    {
+        packetLoss = kPacketLossMax - 1;
+    }
+    WebRtc_UWord16 indexTable = rateIndexTable * kPacketLossMax + packetLoss;
+
+    // Check on table index
+    assert(indexTable < kSizeCodeRateXORTable);
+
+    // Protection factor for P frame
+    codeRateDelta = kCodeRateXORTable[indexTable];
+
+    if (packetLoss > lossThr && avgTotPackets > packetNumThr)
+    {
+        // Set a minimum based on first partition size.
+        if (codeRateDelta < firstPartitionProt)
+        {
+            codeRateDelta = firstPartitionProt;
+        }
+    }
+
+    // Check limit on amount of protection for P frame; 50% is max.
+    if (codeRateDelta >= kPacketLossMax)
+    {
+        codeRateDelta = kPacketLossMax - 1;
+    }
+
+    float adjustFec = 1.0f;
+    // Avoid additional adjustments when layers are active.
+    // TODO(mikhal/marco): Update adjusmtent based on layer info.
+    if (parameters->numLayers == 1)
+    {
+        adjustFec = _qmRobustness->AdjustFecFactor(codeRateDelta,
+                                                   parameters->bitRate,
+                                                   parameters->frameRate,
+                                                   parameters->rtt,
+                                                   packetLoss);
+    }
+
+    codeRateDelta = static_cast<WebRtc_UWord8>(codeRateDelta * adjustFec);
+
+    // For Key frame:
+    // Effectively at a higher rate, so we scale/boost the rate
+    // The boost factor may depend on several factors: ratio of packet
+    // number of I to P frames, how much protection placed on P frames, etc.
+    const WebRtc_UWord8 packetFrameDelta = (WebRtc_UWord8)
+                                           (0.5 + parameters->packetsPerFrame);
+    const WebRtc_UWord8 packetFrameKey = (WebRtc_UWord8)
+                                         (0.5 + parameters->packetsPerFrameKey);
+    const WebRtc_UWord8 boostKey = BoostCodeRateKey(packetFrameDelta,
+                                                    packetFrameKey);
+
+    rateIndexTable = (WebRtc_UWord8) VCM_MAX(VCM_MIN(
+                      1 + (boostKey * effRateFecTable - ratePar1) /
+                      ratePar1,ratePar2),0);
+    WebRtc_UWord16 indexTableKey = rateIndexTable * kPacketLossMax + packetLoss;
+
+    indexTableKey = VCM_MIN(indexTableKey, kSizeCodeRateXORTable);
+
+    // Check on table index
+    assert(indexTableKey < kSizeCodeRateXORTable);
+
+    // Protection factor for I frame
+    codeRateKey = kCodeRateXORTable[indexTableKey];
+
+    // Boosting for Key frame.
+    int boostKeyProt = _scaleProtKey * codeRateDelta;
+    if (boostKeyProt >= kPacketLossMax)
+    {
+        boostKeyProt = kPacketLossMax - 1;
+    }
+
+    // Make sure I frame protection is at least larger than P frame protection,
+    // and at least as high as filtered packet loss.
+    codeRateKey = static_cast<WebRtc_UWord8> (VCM_MAX(packetLoss,
+            VCM_MAX(boostKeyProt, codeRateKey)));
+
+    // Check limit on amount of protection for I frame: 50% is max.
+    if (codeRateKey >= kPacketLossMax)
+    {
+        codeRateKey = kPacketLossMax - 1;
+    }
+
+    _protectionFactorK = codeRateKey;
+    _protectionFactorD = codeRateDelta;
+
+    // Generally there is a rate mis-match between the FEC cost estimated
+    // in mediaOpt and the actual FEC cost sent out in RTP module.
+    // This is more significant at low rates (small # of source packets), where
+    // the granularity of the FEC decreases. In this case, non-zero protection
+    // in mediaOpt may generate 0 FEC packets in RTP sender (since actual #FEC
+    // is based on rounding off protectionFactor on actual source packet number).
+    // The correction factor (_corrFecCost) attempts to corrects this, at least
+    // for cases of low rates (small #packets) and low protection levels.
+
+    float numPacketsFl = 1.0f + ((float) bitRatePerFrame * 1000.0
+                                / (float) (8.0 * _maxPayloadSize) + 0.5);
+
+    const float estNumFecGen = 0.5f + static_cast<float> (_protectionFactorD *
+                                                         numPacketsFl / 255.0f);
+
+
+    // We reduce cost factor (which will reduce overhead for FEC and
+    // hybrid method) and not the protectionFactor.
+    _corrFecCost = 1.0f;
+    if (estNumFecGen < 1.1f && _protectionFactorD < minProtLevelFec)
+    {
+        _corrFecCost = 0.5f;
+    }
+    if (estNumFecGen < 0.9f && _protectionFactorD < minProtLevelFec)
+    {
+        _corrFecCost = 0.0f;
+    }
+
+     // TODO (marpan): Set the UEP protection on/off for Key and Delta frames
+    _useUepProtectionK = _qmRobustness->SetUepProtection(codeRateKey,
+                                                         parameters->bitRate,
+                                                         packetLoss,
+                                                         0);
+
+    _useUepProtectionD = _qmRobustness->SetUepProtection(codeRateDelta,
+                                                         parameters->bitRate,
+                                                         packetLoss,
+                                                         1);
+
+    // DONE WITH FEC PROTECTION SETTINGS
+    return true;
+}
+
+int VCMFecMethod::BitsPerFrame(const VCMProtectionParameters* parameters) {
+  // When temporal layers are available FEC will only be applied on the base
+  // layer.
+  const float bitRateRatio =
+    kVp8LayerRateAlloction[parameters->numLayers - 1][0];
+  float frameRateRatio = powf(1 / 2.0, parameters->numLayers - 1);
+  float bitRate = parameters->bitRate * bitRateRatio;
+  float frameRate = parameters->frameRate * frameRateRatio;
+
+  // TODO(mikhal): Update factor following testing.
+  float adjustmentFactor = 1;
+
+  // Average bits per frame (units of kbits)
+  return static_cast<int>(adjustmentFactor * bitRate / frameRate);
+}
+
+bool
+VCMFecMethod::EffectivePacketLoss(const VCMProtectionParameters* parameters)
+{
+    // Effective packet loss to encoder is based on RPL (residual packet loss)
+    // this is a soft setting based on degree of FEC protection
+    // RPL = received/input packet loss - average_FEC_recovery
+    // note: received/input packet loss may be filtered based on FilteredLoss
+
+    // The packet loss:
+    WebRtc_UWord8 packetLoss = (WebRtc_UWord8) (255 * parameters->lossPr);
+
+    float avgFecRecov = AvgRecoveryFEC(parameters);
+
+    // Residual Packet Loss:
+    _residualPacketLossFec = (float) (packetLoss - avgFecRecov) / 255.0f;
+
+    // Effective Packet Loss, NA in current version.
+    _effectivePacketLoss = 0;
+
+    return true;
+}
+
+bool
+VCMFecMethod::UpdateParameters(const VCMProtectionParameters* parameters)
+{
+    // Compute the protection factor
+    ProtectionFactor(parameters);
+
+    // Compute the effective packet loss
+    EffectivePacketLoss(parameters);
+
+    // Compute the bit cost
+    // Ignore key frames for now.
+    float fecRate = static_cast<float> (_protectionFactorD) / 255.0f;
+    if (fecRate >= 0.0f)
+    {
+        // use this formula if the fecRate (protection factor) is defined
+        // relative to number of source packets
+        // this is the case for the previous tables:
+        // _efficiency = parameters->bitRate * ( 1.0 - 1.0 / (1.0 + fecRate));
+
+        // in the new tables, the fecRate is defined relative to total number of
+        // packets (total rate), so overhead cost is:
+        _efficiency = parameters->bitRate * fecRate * _corrFecCost;
+    }
+    else
+    {
+        _efficiency = 0.0f;
+    }
+
+    // Protection/fec rates obtained above is defined relative to total number
+    // of packets (total rate: source+fec) FEC in RTP module assumes protection
+    // factor is defined relative to source number of packets so we should
+    // convert the factor to reduce mismatch between mediaOpt suggested rate and
+    // the actual rate
+    _protectionFactorK = ConvertFECRate(_protectionFactorK);
+    _protectionFactorD = ConvertFECRate(_protectionFactorD);
+
+    return true;
+}
+VCMLossProtectionLogic::VCMLossProtectionLogic(int64_t nowMs):
+_selectedMethod(NULL),
+_currentParameters(),
+_rtt(0),
+_lossPr(0.0f),
+_bitRate(0.0f),
+_frameRate(0.0f),
+_keyFrameSize(0.0f),
+_fecRateKey(0),
+_fecRateDelta(0),
+_lastPrUpdateT(0),
+_lossPr255(0.9999f),
+_lossPrHistory(),
+_shortMaxLossPr255(0),
+_packetsPerFrame(0.9999f),
+_packetsPerFrameKey(0.9999f),
+_residualPacketLossFec(0),
+_codecWidth(0),
+_codecHeight(0),
+_numLayers(1)
+{
+    Reset(nowMs);
+}
+
+VCMLossProtectionLogic::~VCMLossProtectionLogic()
+{
+    Release();
+}
+
+bool
+VCMLossProtectionLogic::SetMethod(enum VCMProtectionMethodEnum newMethodType)
+{
+    if (_selectedMethod != NULL)
+    {
+        if (_selectedMethod->Type() == newMethodType)
+        {
+            // Nothing to update
+            return false;
+        }
+        // New method - delete existing one
+        delete _selectedMethod;
+    }
+    VCMProtectionMethod *newMethod = NULL;
+    switch (newMethodType)
+    {
+        case kNack:
+        {
+            newMethod = new VCMNackMethod();
+            break;
+        }
+        case kFec:
+        {
+            newMethod  = new VCMFecMethod();
+            break;
+        }
+        case kNackFec:
+        {
+            // Default to always having NACK enabled for the hybrid mode.
+            newMethod =  new VCMNackFecMethod(kLowRttNackMs, -1);
+            break;
+        }
+        default:
+        {
+          return false;
+          break;
+        }
+
+    }
+    _selectedMethod = newMethod;
+    return true;
+}
+bool
+VCMLossProtectionLogic::RemoveMethod(enum VCMProtectionMethodEnum method)
+{
+    if (_selectedMethod == NULL)
+    {
+        return false;
+    }
+    else if (_selectedMethod->Type() == method)
+    {
+        delete _selectedMethod;
+        _selectedMethod = NULL;
+    }
+    return true;
+}
+
+float
+VCMLossProtectionLogic::RequiredBitRate() const
+{
+    float RequiredBitRate = 0.0f;
+    if (_selectedMethod != NULL)
+    {
+        RequiredBitRate = _selectedMethod->RequiredBitRate();
+    }
+    return RequiredBitRate;
+}
+
+void
+VCMLossProtectionLogic::UpdateRtt(WebRtc_UWord32 rtt)
+{
+    _rtt = rtt;
+}
+
+void
+VCMLossProtectionLogic::UpdateResidualPacketLoss(float residualPacketLoss)
+{
+    _residualPacketLossFec = residualPacketLoss;
+}
+
+void
+VCMLossProtectionLogic::UpdateMaxLossHistory(WebRtc_UWord8 lossPr255,
+                                             WebRtc_Word64 now)
+{
+    if (_lossPrHistory[0].timeMs >= 0 &&
+        now - _lossPrHistory[0].timeMs < kLossPrShortFilterWinMs)
+    {
+        if (lossPr255 > _shortMaxLossPr255)
+        {
+            _shortMaxLossPr255 = lossPr255;
+        }
+    }
+    else
+    {
+        // Only add a new value to the history once a second
+        if (_lossPrHistory[0].timeMs == -1)
+        {
+            // First, no shift
+            _shortMaxLossPr255 = lossPr255;
+        }
+        else
+        {
+            // Shift
+            for (WebRtc_Word32 i = (kLossPrHistorySize - 2); i >= 0; i--)
+            {
+                _lossPrHistory[i + 1].lossPr255 = _lossPrHistory[i].lossPr255;
+                _lossPrHistory[i + 1].timeMs = _lossPrHistory[i].timeMs;
+            }
+        }
+        if (_shortMaxLossPr255 == 0)
+        {
+            _shortMaxLossPr255 = lossPr255;
+        }
+
+        _lossPrHistory[0].lossPr255 = _shortMaxLossPr255;
+        _lossPrHistory[0].timeMs = now;
+        _shortMaxLossPr255 = 0;
+    }
+}
+
+WebRtc_UWord8
+VCMLossProtectionLogic::MaxFilteredLossPr(WebRtc_Word64 nowMs) const
+{
+    WebRtc_UWord8 maxFound = _shortMaxLossPr255;
+    if (_lossPrHistory[0].timeMs == -1)
+    {
+        return maxFound;
+    }
+    for (WebRtc_Word32 i = 0; i < kLossPrHistorySize; i++)
+    {
+        if (_lossPrHistory[i].timeMs == -1)
+        {
+            break;
+        }
+        if (nowMs - _lossPrHistory[i].timeMs >
+            kLossPrHistorySize * kLossPrShortFilterWinMs)
+        {
+            // This sample (and all samples after this) is too old
+            break;
+        }
+        if (_lossPrHistory[i].lossPr255 > maxFound)
+        {
+            // This sample is the largest one this far into the history
+            maxFound = _lossPrHistory[i].lossPr255;
+        }
+    }
+    return maxFound;
+}
+
+WebRtc_UWord8 VCMLossProtectionLogic::FilteredLoss(
+    int64_t nowMs,
+    FilterPacketLossMode filter_mode,
+    WebRtc_UWord8 lossPr255) {
+
+  // Update the max window filter.
+  UpdateMaxLossHistory(lossPr255, nowMs);
+
+  // Update the recursive average filter.
+  _lossPr255.Apply(static_cast<float> (nowMs - _lastPrUpdateT),
+                   static_cast<float> (lossPr255));
+  _lastPrUpdateT = nowMs;
+
+  // Filtered loss: default is received loss (no filtering).
+  WebRtc_UWord8 filtered_loss = lossPr255;
+
+  switch (filter_mode) {
+    case kNoFilter:
+      break;
+    case kAvgFilter:
+      filtered_loss = static_cast<WebRtc_UWord8> (_lossPr255.Value() + 0.5);
+      break;
+    case kMaxFilter:
+      filtered_loss = MaxFilteredLossPr(nowMs);
+      break;
+  }
+
+  return filtered_loss;
+}
+
+void
+VCMLossProtectionLogic::UpdateFilteredLossPr(WebRtc_UWord8 packetLossEnc)
+{
+    _lossPr = (float) packetLossEnc / (float) 255.0;
+}
+
+void
+VCMLossProtectionLogic::UpdateBitRate(float bitRate)
+{
+    _bitRate = bitRate;
+}
+
+void
+VCMLossProtectionLogic::UpdatePacketsPerFrame(float nPackets, int64_t nowMs)
+{
+    _packetsPerFrame.Apply(static_cast<float>(nowMs - _lastPacketPerFrameUpdateT),
+                           nPackets);
+    _lastPacketPerFrameUpdateT = nowMs;
+}
+
+void
+VCMLossProtectionLogic::UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs)
+{
+    _packetsPerFrameKey.Apply(static_cast<float>(nowMs -
+                              _lastPacketPerFrameUpdateTKey), nPackets);
+    _lastPacketPerFrameUpdateTKey = nowMs;
+}
+
+void
+VCMLossProtectionLogic::UpdateKeyFrameSize(float keyFrameSize)
+{
+    _keyFrameSize = keyFrameSize;
+}
+
+void
+VCMLossProtectionLogic::UpdateFrameSize(WebRtc_UWord16 width,
+                                        WebRtc_UWord16 height)
+{
+    _codecWidth = width;
+    _codecHeight = height;
+}
+
+void VCMLossProtectionLogic::UpdateNumLayers(int numLayers) {
+  _numLayers = (numLayers == 0) ? 1 : numLayers;
+}
+
+bool
+VCMLossProtectionLogic::UpdateMethod()
+{
+    if (_selectedMethod == NULL)
+    {
+        return false;
+    }
+    _currentParameters.rtt = _rtt;
+    _currentParameters.lossPr = _lossPr;
+    _currentParameters.bitRate = _bitRate;
+    _currentParameters.frameRate = _frameRate; // rename actual frame rate?
+    _currentParameters.keyFrameSize = _keyFrameSize;
+    _currentParameters.fecRateDelta = _fecRateDelta;
+    _currentParameters.fecRateKey = _fecRateKey;
+    _currentParameters.packetsPerFrame = _packetsPerFrame.Value();
+    _currentParameters.packetsPerFrameKey = _packetsPerFrameKey.Value();
+    _currentParameters.residualPacketLossFec = _residualPacketLossFec;
+    _currentParameters.codecWidth = _codecWidth;
+    _currentParameters.codecHeight = _codecHeight;
+    _currentParameters.numLayers = _numLayers;
+    return _selectedMethod->UpdateParameters(&_currentParameters);
+}
+
+VCMProtectionMethod*
+VCMLossProtectionLogic::SelectedMethod() const
+{
+    return _selectedMethod;
+}
+
+VCMProtectionMethodEnum
+VCMLossProtectionLogic::SelectedType() const
+{
+    return _selectedMethod->Type();
+}
+
+void
+VCMLossProtectionLogic::Reset(int64_t nowMs)
+{
+    _lastPrUpdateT = nowMs;
+    _lastPacketPerFrameUpdateT = nowMs;
+    _lastPacketPerFrameUpdateTKey = nowMs;
+    _lossPr255.Reset(0.9999f);
+    _packetsPerFrame.Reset(0.9999f);
+    _fecRateDelta = _fecRateKey = 0;
+    for (WebRtc_Word32 i = 0; i < kLossPrHistorySize; i++)
+    {
+        _lossPrHistory[i].lossPr255 = 0;
+        _lossPrHistory[i].timeMs = -1;
+    }
+    _shortMaxLossPr255 = 0;
+    Release();
+}
+
+void
+VCMLossProtectionLogic::Release()
+{
+    delete _selectedMethod;
+    _selectedMethod = NULL;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/media_opt_util.h b/src/modules/video_coding/main/source/media_opt_util.h
new file mode 100644
index 0000000..7cf97fb
--- /dev/null
+++ b/src/modules/video_coding/main/source/media_opt_util.h
@@ -0,0 +1,395 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
+
+#include "typedefs.h"
+#include "trace.h"
+#include "exp_filter.h"
+#include "internal_defines.h"
+#include "qm_select.h"
+
+#include <cmath>
+#include <cstdlib>
+
+
+namespace webrtc
+{
+// Number of time periods used for (max) window filter for packet loss
+// TODO (marpan): set reasonable window size for filtered packet loss,
+// adjustment should be based on logged/real data of loss stats/correlation.
+enum { kLossPrHistorySize = 10 };
+
+// 1000 ms, total filter length is (kLossPrHistorySize * 1000) ms
+enum { kLossPrShortFilterWinMs = 1000 };
+
+// The type of filter used on the received packet loss reports.
+enum FilterPacketLossMode {
+  kNoFilter,    // No filtering on received loss.
+  kAvgFilter,   // Recursive average filter.
+  kMaxFilter    // Max-window filter, over the time interval of:
+                // (kLossPrHistorySize * kLossPrShortFilterWinMs) ms.
+};
+
+// Thresholds for hybrid NACK/FEC
+// common to media optimization and the jitter buffer.
+enum HybridNackTH {
+    kHighRttNackMs = 100,
+    kLowRttNackMs = 20
+};
+
+struct VCMProtectionParameters
+{
+    VCMProtectionParameters() : rtt(0), lossPr(0.0f), bitRate(0.0f),
+        packetsPerFrame(0.0f), packetsPerFrameKey(0.0f), frameRate(0.0f),
+        keyFrameSize(0.0f), fecRateDelta(0), fecRateKey(0),
+        residualPacketLossFec(0.0f), codecWidth(0), codecHeight(0),
+        numLayers(1)
+        {}
+
+    int                 rtt;
+    float               lossPr;
+    float               bitRate;
+    float               packetsPerFrame;
+    float               packetsPerFrameKey;
+    float               frameRate;
+    float               keyFrameSize;
+    WebRtc_UWord8       fecRateDelta;
+    WebRtc_UWord8       fecRateKey;
+    float               residualPacketLossFec;
+    WebRtc_UWord16      codecWidth;
+    WebRtc_UWord16      codecHeight;
+    int                 numLayers;
+};
+
+
+/******************************/
+/* VCMProtectionMethod class    */
+/****************************/
+
+enum VCMProtectionMethodEnum
+{
+    kNack,
+    kFec,
+    kNackFec,
+    kNone
+};
+
+class VCMLossProbabilitySample
+{
+public:
+    VCMLossProbabilitySample() : lossPr255(0), timeMs(-1) {};
+
+    WebRtc_UWord8     lossPr255;
+    WebRtc_Word64     timeMs;
+};
+
+
+class VCMProtectionMethod
+{
+public:
+    VCMProtectionMethod();
+    virtual ~VCMProtectionMethod();
+
+    // Updates the efficiency of the method using the parameters provided
+    //
+    // Input:
+    //         - parameters         : Parameters used to calculate efficiency
+    //
+    // Return value                 : True if this method is recommended in
+    //                                the given conditions.
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters) = 0;
+
+    // Returns the protection type
+    //
+    // Return value                 : The protection type
+    enum VCMProtectionMethodEnum Type() const { return _type; }
+
+    // Returns the bit rate required by this protection method
+    // during these conditions.
+    //
+    // Return value                 : Required bit rate
+    virtual float RequiredBitRate() { return _efficiency; }
+
+    // Returns the effective packet loss for ER, required by this protection method
+    //
+    // Return value                 : Required effective packet loss
+    virtual WebRtc_UWord8 RequiredPacketLossER() { return _effectivePacketLoss; }
+
+    // Extracts the FEC protection factor for Key frame, required by this protection method
+    //
+    // Return value                 : Required protectionFactor for Key frame
+    virtual WebRtc_UWord8 RequiredProtectionFactorK() { return _protectionFactorK; }
+
+    // Extracts the FEC protection factor for Delta frame, required by this protection method
+    //
+    // Return value                 : Required protectionFactor for delta frame
+    virtual WebRtc_UWord8 RequiredProtectionFactorD() { return _protectionFactorD; }
+
+    // Extracts whether the FEC Unequal protection (UEP) is used for Key frame.
+    //
+    // Return value                 : Required Unequal protection on/off state.
+    virtual bool RequiredUepProtectionK() { return _useUepProtectionK; }
+
+    // Extracts whether the the FEC Unequal protection (UEP) is used for Delta frame.
+    //
+    // Return value                 : Required Unequal protection on/off state.
+    virtual bool RequiredUepProtectionD() { return _useUepProtectionD; }
+
+    virtual int MaxFramesFec() const { return 1; }
+
+    // Updates content metrics
+    void UpdateContentMetrics(const VideoContentMetrics* contentMetrics);
+
+protected:
+
+    WebRtc_UWord8                        _effectivePacketLoss;
+    WebRtc_UWord8                        _protectionFactorK;
+    WebRtc_UWord8                        _protectionFactorD;
+    // Estimation of residual loss after the FEC
+    float                                _residualPacketLossFec;
+    float                                _scaleProtKey;
+    WebRtc_Word32                        _maxPayloadSize;
+
+    VCMQmRobustness*                     _qmRobustness;
+    bool                                 _useUepProtectionK;
+    bool                                 _useUepProtectionD;
+    float                                _corrFecCost;
+    enum VCMProtectionMethodEnum         _type;
+    float                                _efficiency;
+};
+
+class VCMNackMethod : public VCMProtectionMethod
+{
+public:
+    VCMNackMethod();
+    virtual ~VCMNackMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameter);
+};
+
+class VCMFecMethod : public VCMProtectionMethod
+{
+public:
+    VCMFecMethod();
+    virtual ~VCMFecMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss for ER
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+    // Get the FEC protection factors
+    bool ProtectionFactor(const VCMProtectionParameters* parameters);
+    // Get the boost for key frame protection
+    WebRtc_UWord8 BoostCodeRateKey(WebRtc_UWord8 packetFrameDelta,
+                                   WebRtc_UWord8 packetFrameKey) const;
+    // Convert the rates: defined relative to total# packets or source# packets
+    WebRtc_UWord8 ConvertFECRate(WebRtc_UWord8 codeRate) const;
+    // Get the average effective recovery from FEC: for random loss model
+    float AvgRecoveryFEC(const VCMProtectionParameters* parameters) const;
+    // Update FEC with protectionFactorD
+    void UpdateProtectionFactorD(WebRtc_UWord8 protectionFactorD);
+    // Update FEC with protectionFactorK
+    void UpdateProtectionFactorK(WebRtc_UWord8 protectionFactorK);
+    // Compute the bits per frame. Account for temporal layers when applicable.
+    int BitsPerFrame(const VCMProtectionParameters* parameters);
+
+protected:
+    enum { kUpperLimitFramesFec = 6 };
+    // Thresholds values for the bytes/frame and round trip time, below which we
+    // may turn off FEC, depending on |_numLayers| and |_maxFramesFec|.
+    // Max bytes/frame for VGA, corresponds to ~140k at 25fps.
+    enum { kMaxBytesPerFrameForFec = 700 };
+    // Max bytes/frame for CIF and lower: corresponds to ~80k at 25fps.
+    enum { kMaxBytesPerFrameForFecLow = 400 };
+    // Max bytes/frame for frame size larger than VGA, ~200k at 25fps.
+    enum { kMaxBytesPerFrameForFecHigh = 1000 };
+    // Max round trip time threshold in ms.
+    enum { kMaxRttTurnOffFec = 200 };
+};
+
+
+class VCMNackFecMethod : public VCMFecMethod
+{
+public:
+    VCMNackFecMethod(int lowRttNackThresholdMs,
+                     int highRttNackThresholdMs);
+    virtual ~VCMNackFecMethod();
+    virtual bool UpdateParameters(const VCMProtectionParameters* parameters);
+    // Get the effective packet loss for ER
+    bool EffectivePacketLoss(const VCMProtectionParameters* parameters);
+    // Get the protection factors
+    bool ProtectionFactor(const VCMProtectionParameters* parameters);
+    // Get the max number of frames the FEC is allowed to be based on.
+    int MaxFramesFec() const;
+    // Turn off the FEC based on low bitrate and other factors.
+    bool BitRateTooLowForFec(const VCMProtectionParameters* parameters);
+private:
+    int ComputeMaxFramesFec(const VCMProtectionParameters* parameters);
+
+    int _lowRttNackMs;
+    int _highRttNackMs;
+    int _maxFramesFec;
+};
+
+class VCMLossProtectionLogic
+{
+public:
+    VCMLossProtectionLogic(int64_t nowMs);
+    ~VCMLossProtectionLogic();
+
+    // Set the protection method to be used
+    //
+    // Input:
+    //        - newMethodType    : New requested protection method type. If one
+    //                           is already set, it will be deleted and replaced
+    // Return value:             Returns true on update
+    bool SetMethod(enum VCMProtectionMethodEnum newMethodType);
+
+    // Remove requested protection method
+    // Input:
+    //        - method          : method to be removed (if currently selected)
+    //
+    // Return value:             Returns true on update
+    bool RemoveMethod(enum VCMProtectionMethodEnum method);
+
+    // Return required bit rate per selected protectin method
+    float RequiredBitRate() const;
+
+    // Update the round-trip time
+    //
+    // Input:
+    //          - rtt           : Round-trip time in seconds.
+    void UpdateRtt(WebRtc_UWord32 rtt);
+
+    // Update residual packet loss
+    //
+    // Input:
+    //          - residualPacketLoss  : residual packet loss:
+    //                                  effective loss after FEC recovery
+    void UpdateResidualPacketLoss(float _residualPacketLoss);
+
+    // Update the filtered packet loss.
+    //
+    // Input:
+    //          - packetLossEnc :  The reported packet loss filtered
+    //                             (max window or average)
+    void UpdateFilteredLossPr(WebRtc_UWord8 packetLossEnc);
+
+    // Update the current target bit rate.
+    //
+    // Input:
+    //          - bitRate          : The current target bit rate in kbits/s
+    void UpdateBitRate(float bitRate);
+
+    // Update the number of packets per frame estimate, for delta frames
+    //
+    // Input:
+    //          - nPackets         : Number of packets in the latest sent frame.
+    void UpdatePacketsPerFrame(float nPackets, int64_t nowMs);
+
+   // Update the number of packets per frame estimate, for key frames
+    //
+    // Input:
+    //          - nPackets         : umber of packets in the latest sent frame.
+    void UpdatePacketsPerFrameKey(float nPackets, int64_t nowMs);
+
+    // Update the keyFrameSize estimate
+    //
+    // Input:
+    //          - keyFrameSize     : The size of the latest sent key frame.
+    void UpdateKeyFrameSize(float keyFrameSize);
+
+    // Update the frame rate
+    //
+    // Input:
+    //          - frameRate        : The current target frame rate.
+    void UpdateFrameRate(float frameRate) { _frameRate = frameRate; }
+
+    // Update the frame size
+    //
+    // Input:
+    //          - width        : The codec frame width.
+    //          - height       : The codec frame height.
+    void UpdateFrameSize(WebRtc_UWord16 width, WebRtc_UWord16 height);
+
+    // Update the number of active layers
+    //
+    // Input:
+    //          - numLayers    : Number of layers used.
+    void UpdateNumLayers(int numLayers);
+
+    // The amount of packet loss to cover for with FEC.
+    //
+    // Input:
+    //          - fecRateKey      : Packet loss to cover for with FEC when
+    //                              sending key frames.
+    //          - fecRateDelta    : Packet loss to cover for with FEC when
+    //                              sending delta frames.
+    void UpdateFECRates(WebRtc_UWord8 fecRateKey, WebRtc_UWord8 fecRateDelta)
+                       { _fecRateKey = fecRateKey;
+                         _fecRateDelta = fecRateDelta; }
+
+    // Update the protection methods with the current VCMProtectionParameters
+    // and set the requested protection settings.
+    // Return value     : Returns true on update
+    bool UpdateMethod();
+
+    // Returns the method currently selected.
+    //
+    // Return value                 : The protection method currently selected.
+    VCMProtectionMethod* SelectedMethod() const;
+
+    // Return the protection type of the currently selected method
+    VCMProtectionMethodEnum SelectedType() const;
+
+    // Updates the filtered loss for the average and max window packet loss,
+    // and returns the filtered loss probability in the interval [0, 255].
+    // The returned filtered loss value depends on the parameter |filter_mode|.
+    // The input parameter |lossPr255| is the received packet loss.
+
+    // Return value                 : The filtered loss probability
+    WebRtc_UWord8 FilteredLoss(int64_t nowMs, FilterPacketLossMode filter_mode,
+                               WebRtc_UWord8 lossPr255);
+
+    void Reset(int64_t nowMs);
+
+    void Release();
+
+private:
+    // Sets the available loss protection methods.
+    void UpdateMaxLossHistory(WebRtc_UWord8 lossPr255, WebRtc_Word64 now);
+    WebRtc_UWord8 MaxFilteredLossPr(WebRtc_Word64 nowMs) const;
+    VCMProtectionMethod*      _selectedMethod;
+    VCMProtectionParameters   _currentParameters;
+    WebRtc_UWord32            _rtt;
+    float                     _lossPr;
+    float                     _bitRate;
+    float                     _frameRate;
+    float                     _keyFrameSize;
+    WebRtc_UWord8             _fecRateKey;
+    WebRtc_UWord8             _fecRateDelta;
+    WebRtc_Word64             _lastPrUpdateT;
+    WebRtc_Word64             _lastPacketPerFrameUpdateT;
+    WebRtc_Word64             _lastPacketPerFrameUpdateTKey;
+    VCMExpFilter              _lossPr255;
+    VCMLossProbabilitySample  _lossPrHistory[kLossPrHistorySize];
+    WebRtc_UWord8             _shortMaxLossPr255;
+    VCMExpFilter              _packetsPerFrame;
+    VCMExpFilter              _packetsPerFrameKey;
+    float                     _residualPacketLossFec;
+    WebRtc_UWord16            _codecWidth;
+    WebRtc_UWord16            _codecHeight;
+    int                       _numLayers;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPT_UTIL_H_
diff --git a/src/modules/video_coding/main/source/media_optimization.cc b/src/modules/video_coding/main/source/media_optimization.cc
new file mode 100644
index 0000000..b2ed54c
--- /dev/null
+++ b/src/modules/video_coding/main/source/media_optimization.cc
@@ -0,0 +1,672 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "media_optimization.h"
+
+#include "content_metrics_processing.h"
+#include "frame_dropper.h"
+#include "qm_select.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc {
+
+VCMMediaOptimization::VCMMediaOptimization(WebRtc_Word32 id,
+                                           TickTimeBase* clock):
+_id(id),
+_clock(clock),
+_maxBitRate(0),
+_sendCodecType(kVideoCodecUnknown),
+_codecWidth(0),
+_codecHeight(0),
+_userFrameRate(0),
+_fractionLost(0),
+_sendStatisticsZeroEncode(0),
+_maxPayloadSize(1460),
+_targetBitRate(0),
+_incomingFrameRate(0),
+_enableQm(false),
+_videoProtectionCallback(NULL),
+_videoQMSettingsCallback(NULL),
+_encodedFrameSamples(),
+_avgSentBitRateBps(0.0f),
+_keyFrameCnt(0),
+_deltaFrameCnt(0),
+_lastQMUpdateTime(0),
+_lastChangeTime(0),
+_numLayers(0)
+{
+    memset(_sendStatistics, 0, sizeof(_sendStatistics));
+    memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+
+    _frameDropper  = new VCMFrameDropper(_id);
+    _lossProtLogic = new VCMLossProtectionLogic(_clock->MillisecondTimestamp());
+    _content = new VCMContentMetricsProcessing();
+    _qmResolution = new VCMQmResolution();
+}
+
+VCMMediaOptimization::~VCMMediaOptimization(void)
+{
+    _lossProtLogic->Release();
+    delete _lossProtLogic;
+    delete _frameDropper;
+    delete _content;
+    delete _qmResolution;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::Reset()
+{
+    memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+    _incomingFrameRate = 0.0;
+    _frameDropper->Reset();
+    _lossProtLogic->Reset(_clock->MillisecondTimestamp());
+    _frameDropper->SetRates(0, 0);
+    _content->Reset();
+    _qmResolution->Reset();
+    _lossProtLogic->UpdateFrameRate(_incomingFrameRate);
+    _lossProtLogic->Reset(_clock->MillisecondTimestamp());
+    _sendStatisticsZeroEncode = 0;
+    _targetBitRate = 0;
+    _codecWidth = 0;
+    _codecHeight = 0;
+    _userFrameRate = 0;
+    _keyFrameCnt = 0;
+    _deltaFrameCnt = 0;
+    _lastQMUpdateTime = 0;
+    _lastChangeTime = 0;
+    for (WebRtc_Word32 i = 0; i < kBitrateMaxFrameSamples; i++)
+    {
+        _encodedFrameSamples[i]._sizeBytes = -1;
+        _encodedFrameSamples[i]._timeCompleteMs = -1;
+    }
+    _avgSentBitRateBps = 0.0f;
+    _numLayers = 1;
+    return VCM_OK;
+}
+
+WebRtc_UWord32
+VCMMediaOptimization::SetTargetRates(WebRtc_UWord32 bitRate,
+                                     WebRtc_UWord8 &fractionLost,
+                                     WebRtc_UWord32 roundTripTimeMs)
+{
+    VCMProtectionMethod *selectedMethod = _lossProtLogic->SelectedMethod();
+    _lossProtLogic->UpdateBitRate(static_cast<float>(bitRate));
+    _lossProtLogic->UpdateRtt(roundTripTimeMs);
+    _lossProtLogic->UpdateResidualPacketLoss(static_cast<float>(fractionLost));
+
+    // Get frame rate for encoder: this is the actual/sent frame rate
+    float actualFrameRate = SentFrameRate();
+
+    // sanity
+    if (actualFrameRate  < 1.0)
+    {
+        actualFrameRate = 1.0;
+    }
+
+    // Update frame rate for the loss protection logic class: frame rate should
+    // be the actual/sent rate
+    _lossProtLogic->UpdateFrameRate(actualFrameRate);
+
+    _fractionLost = fractionLost;
+
+    // Returns the filtered packet loss, used for the protection setting.
+    // The filtered loss may be the received loss (no filter), or some
+    // filtered value (average or max window filter).
+    // Use max window filter for now.
+    FilterPacketLossMode filter_mode = kMaxFilter;
+    WebRtc_UWord8 packetLossEnc = _lossProtLogic->FilteredLoss(
+        _clock->MillisecondTimestamp(), filter_mode, fractionLost);
+
+    // For now use the filtered loss for computing the robustness settings
+    _lossProtLogic->UpdateFilteredLossPr(packetLossEnc);
+
+    // Rate cost of the protection methods
+    uint32_t protection_overhead_kbps = 0;
+
+    // Update protection settings, when applicable
+    float sent_video_rate = 0.0f;
+    if (selectedMethod)
+    {
+        // Update protection method with content metrics
+        selectedMethod->UpdateContentMetrics(_content->ShortTermAvgData());
+
+        // Update method will compute the robustness settings for the given
+        // protection method and the overhead cost
+        // the protection method is set by the user via SetVideoProtection.
+        _lossProtLogic->UpdateMethod();
+
+        // Update protection callback with protection settings.
+        uint32_t sent_video_rate_bps = 0;
+        uint32_t sent_nack_rate_bps = 0;
+        uint32_t sent_fec_rate_bps = 0;
+        // Get the bit cost of protection method, based on the amount of
+        // overhead data actually transmitted (including headers) the last
+        // second.
+        UpdateProtectionCallback(selectedMethod,
+                                 &sent_video_rate_bps,
+                                 &sent_nack_rate_bps,
+                                 &sent_fec_rate_bps);
+        uint32_t sent_total_rate_bps = sent_video_rate_bps +
+            sent_nack_rate_bps + sent_fec_rate_bps;
+        // Estimate the overhead costs of the next second as staying the same
+        // wrt the source bitrate.
+        if (sent_total_rate_bps > 0) {
+          protection_overhead_kbps = static_cast<uint32_t>(bitRate *
+              static_cast<double>(sent_nack_rate_bps + sent_fec_rate_bps) /
+              sent_total_rate_bps + 0.5);
+        }
+        // Cap the overhead estimate to 50%.
+        if (protection_overhead_kbps > bitRate / 2)
+          protection_overhead_kbps = bitRate / 2;
+
+        // Get the effective packet loss for encoder ER
+        // when applicable, should be passed to encoder via fractionLost
+        packetLossEnc = selectedMethod->RequiredPacketLossER();
+        sent_video_rate =  static_cast<float>(sent_video_rate_bps / 1000.0);
+    }
+
+    // Source coding rate: total rate - protection overhead
+    _targetBitRate = bitRate - protection_overhead_kbps;
+
+    // Update encoding rates following protection settings
+    _frameDropper->SetRates(static_cast<float>(_targetBitRate), 0);
+
+    if (_enableQm)
+    {
+        // Update QM with rates
+        _qmResolution->UpdateRates((float)_targetBitRate, sent_video_rate,
+                                  _incomingFrameRate, _fractionLost);
+        // Check for QM selection
+        bool selectQM = checkStatusForQMchange();
+        if (selectQM)
+        {
+            SelectQuality();
+        }
+        // Reset the short-term averaged content data.
+        _content->ResetShortTermAvgData();
+    }
+
+    return _targetBitRate;
+}
+
+int VCMMediaOptimization::UpdateProtectionCallback(
+    VCMProtectionMethod *selected_method,
+    uint32_t* video_rate_bps,
+    uint32_t* nack_overhead_rate_bps,
+    uint32_t* fec_overhead_rate_bps)
+{
+    if (!_videoProtectionCallback)
+    {
+        return VCM_OK;
+    }
+    FecProtectionParams delta_fec_params;
+    FecProtectionParams key_fec_params;
+    // Get the FEC code rate for Key frames (set to 0 when NA)
+    key_fec_params.fec_rate = selected_method->RequiredProtectionFactorK();
+
+    // Get the FEC code rate for Delta frames (set to 0 when NA)
+    delta_fec_params.fec_rate =
+        selected_method->RequiredProtectionFactorD();
+
+    // Get the FEC-UEP protection status for Key frames: UEP on/off
+    key_fec_params.use_uep_protection =
+        selected_method->RequiredUepProtectionK();
+
+    // Get the FEC-UEP protection status for Delta frames: UEP on/off
+    delta_fec_params.use_uep_protection =
+        selected_method->RequiredUepProtectionD();
+
+    // The RTP module currently requires the same |max_fec_frames| for both
+    // key and delta frames.
+    delta_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+    key_fec_params.max_fec_frames = selected_method->MaxFramesFec();
+
+    // Set the FEC packet mask type. |kFecMaskBursty| is more effective for
+    // consecutive losses and little/no packet re-ordering. As we currently
+    // do not have feedback data on the degree of correlated losses and packet
+    // re-ordering, we keep default setting to |kFecMaskRandom| for now.
+    delta_fec_params.fec_mask_type = kFecMaskRandom;
+    key_fec_params.fec_mask_type = kFecMaskRandom;
+
+    // TODO(Marco): Pass FEC protection values per layer.
+    return _videoProtectionCallback->ProtectionRequest(&delta_fec_params,
+                                                       &key_fec_params,
+                                                       video_rate_bps,
+                                                       nack_overhead_rate_bps,
+                                                       fec_overhead_rate_bps);
+}
+
+bool
+VCMMediaOptimization::DropFrame()
+{
+    // leak appropriate number of bytes
+    _frameDropper->Leak((WebRtc_UWord32)(InputFrameRate() + 0.5f));
+    return _frameDropper->DropFrame();
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SentFrameCount(VCMFrameCount &frameCount) const
+{
+    frameCount.numDeltaFrames = _deltaFrameCnt;
+    frameCount.numKeyFrames = _keyFrameCnt;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SetEncodingData(VideoCodecType sendCodecType,
+                                      WebRtc_Word32 maxBitRate,
+                                      WebRtc_UWord32 frameRate,
+                                      WebRtc_UWord32 bitRate,
+                                      WebRtc_UWord16 width,
+                                      WebRtc_UWord16 height,
+                                      int numLayers)
+{
+    // Everything codec specific should be reset here since this means the codec
+    // has changed. If native dimension values have changed, then either user
+    // initiated change, or QM initiated change. Will be able to determine only
+    // after the processing of the first frame.
+    _lastChangeTime = _clock->MillisecondTimestamp();
+    _content->Reset();
+    _content->UpdateFrameRate(frameRate);
+
+    _maxBitRate = maxBitRate;
+    _sendCodecType = sendCodecType;
+    _targetBitRate = bitRate;
+    _lossProtLogic->UpdateBitRate(static_cast<float>(bitRate));
+    _lossProtLogic->UpdateFrameRate(static_cast<float>(frameRate));
+    _lossProtLogic->UpdateFrameSize(width, height);
+    _lossProtLogic->UpdateNumLayers(numLayers);
+    _frameDropper->Reset();
+    _frameDropper->SetRates(static_cast<float>(bitRate),
+                            static_cast<float>(frameRate));
+    _userFrameRate = static_cast<float>(frameRate);
+    _codecWidth = width;
+    _codecHeight = height;
+    _numLayers = (numLayers <= 1) ? 1 : numLayers;  // Can also be zero.
+    WebRtc_Word32 ret = VCM_OK;
+    ret = _qmResolution->Initialize((float)_targetBitRate, _userFrameRate,
+                                    _codecWidth, _codecHeight, _numLayers);
+    return ret;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::RegisterProtectionCallback(VCMProtectionCallback*
+                                                 protectionCallback)
+{
+    _videoProtectionCallback = protectionCallback;
+    return VCM_OK;
+
+}
+
+void
+VCMMediaOptimization::EnableFrameDropper(bool enable)
+{
+    _frameDropper->Enable(enable);
+}
+
+void
+VCMMediaOptimization::EnableProtectionMethod(bool enable,
+                                             VCMProtectionMethodEnum method)
+{
+    bool updated = false;
+    if (enable)
+    {
+        updated = _lossProtLogic->SetMethod(method);
+    }
+    else
+    {
+        _lossProtLogic->RemoveMethod(method);
+    }
+    if (updated)
+    {
+        _lossProtLogic->UpdateMethod();
+    }
+}
+
+bool
+VCMMediaOptimization::IsProtectionMethodEnabled(VCMProtectionMethodEnum method)
+{
+    return (_lossProtLogic->SelectedType() == method);
+}
+
+void
+VCMMediaOptimization::SetMtu(WebRtc_Word32 mtu)
+{
+    _maxPayloadSize = mtu;
+}
+
+float
+VCMMediaOptimization::SentFrameRate()
+{
+    if (_frameDropper)
+    {
+        return _frameDropper->ActualFrameRate((WebRtc_UWord32)(InputFrameRate()
+                                                               + 0.5f));
+    }
+
+    return VCM_CODEC_ERROR;
+}
+
+float
+VCMMediaOptimization::SentBitRate()
+{
+    UpdateBitRateEstimate(-1, _clock->MillisecondTimestamp());
+    return _avgSentBitRateBps / 1000.0f;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::MaxBitRate()
+{
+    return _maxBitRate;
+}
+
+WebRtc_Word32
+VCMMediaOptimization::UpdateWithEncodedData(WebRtc_Word32 encodedLength,
+                                            FrameType encodedFrameType)
+{
+    // look into the ViE version - debug mode - needs also number of layers.
+    UpdateBitRateEstimate(encodedLength, _clock->MillisecondTimestamp());
+    if(encodedLength > 0)
+    {
+        const bool deltaFrame = (encodedFrameType != kVideoFrameKey &&
+                                 encodedFrameType != kVideoFrameGolden);
+
+        _frameDropper->Fill(encodedLength, deltaFrame);
+        if (_maxPayloadSize > 0 && encodedLength > 0)
+        {
+            const float minPacketsPerFrame = encodedLength /
+                                             static_cast<float>(_maxPayloadSize);
+            if (deltaFrame)
+            {
+                _lossProtLogic->UpdatePacketsPerFrame(
+                    minPacketsPerFrame, _clock->MillisecondTimestamp());
+            }
+            else
+            {
+                _lossProtLogic->UpdatePacketsPerFrameKey(
+                    minPacketsPerFrame, _clock->MillisecondTimestamp());
+            }
+
+            if (_enableQm)
+            {
+                // update quality select with encoded length
+                _qmResolution->UpdateEncodedSize(encodedLength,
+                                                 encodedFrameType);
+            }
+        }
+        if (!deltaFrame && encodedLength > 0)
+        {
+            _lossProtLogic->UpdateKeyFrameSize(static_cast<float>(encodedLength));
+        }
+
+        // updating counters
+        if (deltaFrame)
+        {
+            _deltaFrameCnt++;
+        }
+        else
+        {
+            _keyFrameCnt++;
+        }
+
+    }
+
+     return VCM_OK;
+
+}
+
+void VCMMediaOptimization::UpdateBitRateEstimate(WebRtc_Word64 encodedLength,
+                                                 WebRtc_Word64 nowMs)
+{
+    int i = kBitrateMaxFrameSamples - 1;
+    WebRtc_UWord32 frameSizeSum = 0;
+    WebRtc_Word64 timeOldest = -1;
+    // Find an empty slot for storing the new sample and at the same time
+    // accumulate the history.
+    for (; i >= 0; i--)
+    {
+        if (_encodedFrameSamples[i]._sizeBytes == -1)
+        {
+            // Found empty slot
+            break;
+        }
+        if (nowMs - _encodedFrameSamples[i]._timeCompleteMs <
+            kBitrateAverageWinMs)
+        {
+            frameSizeSum += static_cast<WebRtc_UWord32>
+                            (_encodedFrameSamples[i]._sizeBytes);
+            if (timeOldest == -1)
+            {
+                timeOldest = _encodedFrameSamples[i]._timeCompleteMs;
+            }
+        }
+    }
+    if (encodedLength > 0)
+    {
+        if (i < 0)
+        {
+            // No empty slot, shift
+            for (i = kBitrateMaxFrameSamples - 2; i >= 0; i--)
+            {
+                _encodedFrameSamples[i + 1] = _encodedFrameSamples[i];
+            }
+            i++;
+        }
+        // Insert new sample
+        _encodedFrameSamples[i]._sizeBytes = encodedLength;
+        _encodedFrameSamples[i]._timeCompleteMs = nowMs;
+    }
+    if (timeOldest > -1)
+    {
+        // Update average bit rate
+        float denom = static_cast<float>(nowMs - timeOldest);
+        if (denom < 1.0)
+        {
+            denom = 1.0;
+        }
+        _avgSentBitRateBps = (frameSizeSum + encodedLength) * 8 * 1000 / denom;
+    }
+    else if (encodedLength > 0)
+    {
+        _avgSentBitRateBps = static_cast<float>(encodedLength * 8);
+    }
+    else
+    {
+        _avgSentBitRateBps = 0;
+    }
+}
+
+
+WebRtc_Word32
+VCMMediaOptimization::RegisterVideoQMCallback(VCMQMSettingsCallback*
+                                              videoQMSettings)
+{
+    _videoQMSettingsCallback = videoQMSettings;
+    // Callback setting controls QM
+    if (_videoQMSettingsCallback != NULL)
+    {
+        _enableQm = true;
+    }
+    else
+    {
+        _enableQm = false;
+    }
+    return VCM_OK;
+}
+
+void
+VCMMediaOptimization::updateContentData(const VideoContentMetrics*
+                                        contentMetrics)
+{
+    // Updating content metrics
+    if (contentMetrics == NULL)
+    {
+         // Disable QM if metrics are NULL
+         _enableQm = false;
+         _qmResolution->Reset();
+    }
+    else
+    {
+        _content->UpdateContentData(contentMetrics);
+    }
+}
+
+WebRtc_Word32
+VCMMediaOptimization::SelectQuality()
+{
+    // Reset quantities for QM select
+    _qmResolution->ResetQM();
+
+    // Update QM will long-term averaged content metrics.
+    _qmResolution->UpdateContent(_content->LongTermAvgData());
+
+    // Select quality mode
+    VCMResolutionScale* qm = NULL;
+    WebRtc_Word32 ret = _qmResolution->SelectResolution(&qm);
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    // Check for updates to spatial/temporal modes
+    QMUpdate(qm);
+
+    // Reset all the rate and related frame counters quantities
+    _qmResolution->ResetRates();
+
+    // Reset counters
+    _lastQMUpdateTime = _clock->MillisecondTimestamp();
+
+    // Reset content metrics
+    _content->Reset();
+
+    return VCM_OK;
+}
+
+
+// Check timing constraints and look for significant change in:
+// (1) scene content
+// (2) target bit rate
+
+bool
+VCMMediaOptimization::checkStatusForQMchange()
+{
+
+    bool status  = true;
+
+    // Check that we do not call QMSelect too often, and that we waited some time
+    // (to sample the metrics) from the event lastChangeTime
+    // lastChangeTime is the time where user changed the size/rate/frame rate
+    // (via SetEncodingData)
+    WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    if ((now - _lastQMUpdateTime) < kQmMinIntervalMs ||
+        (now  - _lastChangeTime) <  kQmMinIntervalMs)
+    {
+        status = false;
+    }
+
+    return status;
+
+}
+
+bool VCMMediaOptimization::QMUpdate(VCMResolutionScale* qm) {
+  // Check for no change
+  if (!qm->change_resolution_spatial && !qm->change_resolution_temporal) {
+    return false;
+  }
+
+  // Check for change in frame rate.
+  if (qm->change_resolution_temporal) {
+    _incomingFrameRate = qm->frame_rate;
+    // Reset frame rate estimate.
+    memset(_incomingFrameTimes, -1, sizeof(_incomingFrameTimes));
+  }
+
+  // Check for change in frame size.
+  if (qm->change_resolution_spatial) {
+    _codecWidth = qm->codec_width;
+    _codecHeight = qm->codec_height;
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, _id,
+               "Resolution change from QM select: W = %d, H = %d, FR = %f",
+               qm->codec_width, qm->codec_height, qm->frame_rate);
+
+  // Update VPM with new target frame rate and frame size.
+  // Note: use |qm->frame_rate| instead of |_incomingFrameRate| for updating
+  // target frame rate in VPM frame dropper. The quantity |_incomingFrameRate|
+  // will vary/fluctuate, and since we don't want to change the state of the
+  // VPM frame dropper, unless a temporal action was selected, we use the
+  // quantity |qm->frame_rate| for updating.
+  _videoQMSettingsCallback->SetVideoQMSettings(qm->frame_rate,
+                                               _codecWidth,
+                                               _codecHeight);
+  _content->UpdateFrameRate(qm->frame_rate);
+  _qmResolution->UpdateCodecParameters(qm->frame_rate, _codecWidth,
+                                       _codecHeight);
+  return true;
+}
+
+void
+VCMMediaOptimization::UpdateIncomingFrameRate()
+{
+    WebRtc_Word64 now = _clock->MillisecondTimestamp();
+    if (_incomingFrameTimes[0] == 0)
+    {
+        // first no shift
+    } else
+    {
+        // shift
+        for(WebRtc_Word32 i = (kFrameCountHistorySize - 2); i >= 0 ; i--)
+        {
+            _incomingFrameTimes[i+1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = now;
+    ProcessIncomingFrameRate(now);
+}
+
+// allowing VCM to keep track of incoming frame rate
+void
+VCMMediaOptimization::ProcessIncomingFrameRate(WebRtc_Word64 now)
+{
+    WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for (num = 1; num < (kFrameCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num] <= 0 ||
+            // don't use data older than 2 s
+            now - _incomingFrameTimes[num] > kFrameHistoryWinMs)
+        {
+            break;
+        } else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        const WebRtc_Word64 diff = now - _incomingFrameTimes[num-1];
+        _incomingFrameRate = 1.0;
+        if(diff >0)
+        {
+            _incomingFrameRate = nrOfFrames * 1000.0f / static_cast<float>(diff);
+        }
+    }
+}
+
+WebRtc_UWord32
+VCMMediaOptimization::InputFrameRate()
+{
+    ProcessIncomingFrameRate(_clock->MillisecondTimestamp());
+    return WebRtc_UWord32 (_incomingFrameRate + 0.5f);
+}
+
+}
diff --git a/src/modules/video_coding/main/source/media_optimization.h b/src/modules/video_coding/main/source/media_optimization.h
new file mode 100644
index 0000000..3b6c978
--- /dev/null
+++ b/src/modules/video_coding/main/source/media_optimization.h
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
+
+#include "module_common_types.h"
+#include "video_coding.h"
+#include "trace.h"
+#include "media_opt_util.h"
+#include "qm_select.h"
+
+namespace webrtc
+{
+
+enum { kBitrateMaxFrameSamples = 60 };
+enum { kBitrateAverageWinMs    = 1000 };
+
+class TickTimeBase;
+class VCMContentMetricsProcessing;
+class VCMFrameDropper;
+
+struct VCMEncodedFrameSample
+{
+    VCMEncodedFrameSample() : _sizeBytes(-1), _timeCompleteMs(-1) {}
+
+    WebRtc_Word64     _sizeBytes;
+    WebRtc_Word64     _timeCompleteMs;
+};
+
+class VCMMediaOptimization
+{
+public:
+    VCMMediaOptimization(WebRtc_Word32 id, TickTimeBase* clock);
+    ~VCMMediaOptimization(void);
+    /*
+    * Reset the Media Optimization module
+    */
+    WebRtc_Word32 Reset();
+    /**
+    * Set target Rates for the encoder given the channel parameters
+    * Inputs:       bitRate - target bitRate, in the conference case this is the rate
+    *                         between the sending client and the server
+    *               fractionLost - packet loss in % in the network
+    *               roundTripTimeMs - round trip time in miliseconds
+    *               minBitRate - the bit rate of the end-point with lowest rate
+    *               maxBitRate - the bit rate of the end-point with highest rate
+    */
+    WebRtc_UWord32 SetTargetRates(WebRtc_UWord32 bitRate,
+                                  WebRtc_UWord8 &fractionLost,
+                                  WebRtc_UWord32 roundTripTimeMs);
+
+    /**
+    * Inform media optimization of initial encoding state
+    */
+    WebRtc_Word32 SetEncodingData(VideoCodecType sendCodecType,
+                                  WebRtc_Word32 maxBitRate,
+                                  WebRtc_UWord32 frameRate,
+                                  WebRtc_UWord32 bitRate,
+                                  WebRtc_UWord16 width,
+                                  WebRtc_UWord16 height,
+                                  int numTemporalLayers);
+    /**
+    * Enable protection method
+    */
+    void EnableProtectionMethod(bool enable, VCMProtectionMethodEnum method);
+    /**
+    * Returns weather or not protection method is enabled
+    */
+    bool IsProtectionMethodEnabled(VCMProtectionMethodEnum method);
+    /**
+    * Updates the max pay load size
+    */
+    void SetMtu(WebRtc_Word32 mtu);
+    /*
+    * Get actual input frame rate
+    */
+    WebRtc_UWord32 InputFrameRate();
+
+    /*
+    * Get actual sent frame rate
+    */
+    float SentFrameRate();
+    /*
+    * Get actual sent bit rate
+    */
+    float SentBitRate();
+    /*
+    * Get maximum allowed bit rate
+    */
+    WebRtc_Word32 MaxBitRate();
+    /*
+    * Inform Media Optimization of encoding output: Length and frame type
+    */
+    WebRtc_Word32 UpdateWithEncodedData(WebRtc_Word32 encodedLength,
+                                        FrameType encodedFrameType);
+    /*
+    * Register a protection callback to be used to inform the user about the
+    * protection methods used
+    */
+    WebRtc_Word32 RegisterProtectionCallback(VCMProtectionCallback*
+                                             protectionCallback);
+    /*
+    * Register a quality settings callback to be used to inform VPM/user about
+    */
+    WebRtc_Word32 RegisterVideoQMCallback(VCMQMSettingsCallback* videoQMSettings);
+    void EnableFrameDropper(bool enable);
+
+    bool DropFrame();
+
+      /*
+    * Get number of key/delta frames encoded
+    */
+    WebRtc_Word32 SentFrameCount(VCMFrameCount &frameCount) const;
+
+    /*
+    *  update incoming frame rate value
+    */
+    void UpdateIncomingFrameRate();
+
+    /**
+    * Update content metric Data
+    */
+    void updateContentData(const VideoContentMetrics* contentMetrics);
+
+    /**
+    * Compute new Quality Mode
+    */
+    WebRtc_Word32 SelectQuality();
+
+private:
+
+    /*
+     *  Update protection callback with protection settings
+     */
+    int UpdateProtectionCallback(VCMProtectionMethod *selected_method,
+                                 uint32_t* total_video_rate_bps,
+                                 uint32_t* nack_overhead_rate_bps,
+                                 uint32_t* fec_overhead_rate_bps);
+
+    void UpdateBitRateEstimate(WebRtc_Word64 encodedLength, WebRtc_Word64 nowMs);
+    /*
+    * verify if QM settings differ from default, i.e. if an update is required
+    * Compute actual values, as will be sent to the encoder
+    */
+    bool QMUpdate(VCMResolutionScale* qm);
+    /**
+    * check if we should make a QM change
+    * will return 1 if yes, 0 otherwise
+    */
+    bool checkStatusForQMchange();
+
+    void ProcessIncomingFrameRate(WebRtc_Word64 now);
+
+    enum { kFrameCountHistorySize = 90};
+    enum { kFrameHistoryWinMs = 2000};
+
+    WebRtc_Word32                     _id;
+    TickTimeBase*                     _clock;
+    WebRtc_Word32                     _maxBitRate;
+    VideoCodecType                    _sendCodecType;
+    WebRtc_UWord16                    _codecWidth;
+    WebRtc_UWord16                    _codecHeight;
+    float                             _userFrameRate;
+
+    VCMFrameDropper*                  _frameDropper;
+    VCMLossProtectionLogic*           _lossProtLogic;
+    WebRtc_UWord8                     _fractionLost;
+
+
+    WebRtc_UWord32                    _sendStatistics[4];
+    WebRtc_UWord32                    _sendStatisticsZeroEncode;
+    WebRtc_Word32                     _maxPayloadSize;
+    WebRtc_UWord32                    _targetBitRate;
+
+    float                             _incomingFrameRate;
+    WebRtc_Word64                     _incomingFrameTimes[kFrameCountHistorySize];
+
+    bool                              _enableQm;
+
+    VCMProtectionCallback*            _videoProtectionCallback;
+    VCMQMSettingsCallback*            _videoQMSettingsCallback;
+
+    VCMEncodedFrameSample             _encodedFrameSamples[kBitrateMaxFrameSamples];
+    float                             _avgSentBitRateBps;
+
+    WebRtc_UWord32                    _keyFrameCnt;
+    WebRtc_UWord32                    _deltaFrameCnt;
+
+    VCMContentMetricsProcessing*      _content;
+    VCMQmResolution*                  _qmResolution;
+
+    WebRtc_Word64                     _lastQMUpdateTime;
+    WebRtc_Word64                     _lastChangeTime; // content/user triggered
+    int                               _numLayers;
+
+
+}; // end of VCMMediaOptimization class definition
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_MEDIA_OPTIMIZATION_H_
diff --git a/src/modules/video_coding/main/source/mock/fake_tick_time.h b/src/modules/video_coding/main/source/mock/fake_tick_time.h
new file mode 100644
index 0000000..c6da348
--- /dev/null
+++ b/src/modules/video_coding/main/source/mock/fake_tick_time.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
+
+#include <assert.h>
+
+#include <limits>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc {
+
+// Provides a fake implementation of TickTimeBase, intended for offline
+// testing. This implementation does not query the system clock, but returns a
+// time value set by the user when creating the object, and incremented with
+// the method IncrementDebugClock.
+class FakeTickTime : public TickTimeBase {
+ public:
+  explicit FakeTickTime(int64_t start_time_ms) : fake_now_ms_(start_time_ms) {}
+  virtual ~FakeTickTime() {}
+  virtual int64_t MillisecondTimestamp() const {
+    return fake_now_ms_;
+  }
+  virtual int64_t MicrosecondTimestamp() const {
+    return 1000 * fake_now_ms_;
+  }
+  virtual void IncrementDebugClock(int64_t increase_ms) {
+    assert(increase_ms <= std::numeric_limits<int64_t>::max() - fake_now_ms_);
+    fake_now_ms_ += increase_ms;
+  }
+
+ private:
+  int64_t fake_now_ms_;
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_MOCK_FAKE_TICK_TIME_H_
diff --git a/src/modules/video_coding/main/source/nack_fec_tables.h b/src/modules/video_coding/main/source/nack_fec_tables.h
new file mode 100644
index 0000000..88e225d
--- /dev/null
+++ b/src/modules/video_coding/main/source/nack_fec_tables.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
+
+namespace webrtc
+{
+
+// Table for adjusting FEC rate for NACK/FEC protection method
+// Table values are built as a sigmoid function, ranging from 0 to
+// kHighRttNackMs (100), based on the HybridNackTH values defined in
+// media_opt_util.h.
+const WebRtc_UWord16 VCMNackFecTable[100] = {
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+0,
+1,
+1,
+1,
+1,
+1,
+2,
+2,
+2,
+3,
+3,
+4,
+5,
+6,
+7,
+9,
+10,
+12,
+15,
+18,
+21,
+24,
+28,
+32,
+37,
+41,
+46,
+51,
+56,
+61,
+66,
+70,
+74,
+78,
+81,
+84,
+86,
+89,
+90,
+92,
+93,
+95,
+95,
+96,
+97,
+97,
+98,
+98,
+99,
+99,
+99,
+99,
+99,
+99,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+100,
+
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_SOURCE_NACK_FEC_TABLES_H_
diff --git a/src/modules/video_coding/main/source/packet.cc b/src/modules/video_coding/main/source/packet.cc
new file mode 100644
index 0000000..e52cbdd
--- /dev/null
+++ b/src/modules/video_coding/main/source/packet.cc
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "packet.h"
+#include "module_common_types.h"
+
+#include <assert.h>
+
+namespace webrtc {
+
+VCMPacket::VCMPacket()
+  :
+    payloadType(0),
+    timestamp(0),
+    seqNum(0),
+    dataPtr(NULL),
+    sizeBytes(0),
+    markerBit(false),
+    frameType(kFrameEmpty),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(false),
+    completeNALU(kNaluUnset),
+    insertStartCode(false),
+    codecSpecificHeader() {
+}
+
+VCMPacket::VCMPacket(const WebRtc_UWord8* ptr,
+                               const WebRtc_UWord32 size,
+                               const WebRtcRTPHeader& rtpHeader) :
+    payloadType(rtpHeader.header.payloadType),
+    timestamp(rtpHeader.header.timestamp),
+    seqNum(rtpHeader.header.sequenceNumber),
+    dataPtr(ptr),
+    sizeBytes(size),
+    markerBit(rtpHeader.header.markerBit),
+
+    frameType(rtpHeader.frameType),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(rtpHeader.type.Video.isFirstPacket),
+    completeNALU(kNaluComplete),
+    insertStartCode(false),
+    codecSpecificHeader(rtpHeader.type.Video)
+{
+    CopyCodecSpecifics(rtpHeader.type.Video);
+}
+
+VCMPacket::VCMPacket(const WebRtc_UWord8* ptr, WebRtc_UWord32 size, WebRtc_UWord16 seq, WebRtc_UWord32 ts, bool mBit) :
+    payloadType(0),
+    timestamp(ts),
+    seqNum(seq),
+    dataPtr(ptr),
+    sizeBytes(size),
+    markerBit(mBit),
+
+    frameType(kVideoFrameDelta),
+    codec(kVideoCodecUnknown),
+    isFirstPacket(false),
+    completeNALU(kNaluComplete),
+    insertStartCode(false),
+    codecSpecificHeader()
+{}
+
+void VCMPacket::Reset() {
+  payloadType = 0;
+  timestamp = 0;
+  seqNum = 0;
+  dataPtr = NULL;
+  sizeBytes = 0;
+  markerBit = false;
+  frameType = kFrameEmpty;
+  codec = kVideoCodecUnknown;
+  isFirstPacket = false;
+  completeNALU = kNaluUnset;
+  insertStartCode = false;
+  memset(&codecSpecificHeader, 0, sizeof(RTPVideoHeader));
+}
+
+void VCMPacket::CopyCodecSpecifics(const RTPVideoHeader& videoHeader)
+{
+    switch(videoHeader.codec)
+    {
+        case kRTPVideoVP8:
+            {
+                // Handle all packets within a frame as depending on the previous packet
+                // TODO(holmer): This should be changed to make fragments independent
+                // when the VP8 RTP receiver supports fragments.
+                if (isFirstPacket && markerBit)
+                    completeNALU = kNaluComplete;
+                else if (isFirstPacket)
+                    completeNALU = kNaluStart;
+                else if (markerBit)
+                    completeNALU = kNaluEnd;
+                else
+                    completeNALU = kNaluIncomplete;
+
+                codec = kVideoCodecVP8;
+                break;
+            }
+        case kRTPVideoI420:
+            {
+                codec = kVideoCodecI420;
+                break;
+            }
+        default:
+            {
+                codec = kVideoCodecUnknown;
+                break;
+            }
+    }
+}
+
+}
diff --git a/src/modules/video_coding/main/source/packet.h b/src/modules/video_coding/main/source/packet.h
new file mode 100644
index 0000000..2035653
--- /dev/null
+++ b/src/modules/video_coding/main/source/packet.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+#define WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
+
+#include "typedefs.h"
+#include "module_common_types.h"
+#include "jitter_buffer_common.h"
+
+namespace webrtc
+{
+
+class VCMPacket
+{
+public:
+    VCMPacket();
+    VCMPacket(const WebRtc_UWord8* ptr,
+              const WebRtc_UWord32 size,
+              const WebRtcRTPHeader& rtpHeader);
+    VCMPacket(const WebRtc_UWord8* ptr,
+              WebRtc_UWord32 size,
+              WebRtc_UWord16 seqNum,
+              WebRtc_UWord32 timestamp,
+              bool markerBit);
+
+    void Reset();
+
+    WebRtc_UWord8           payloadType;
+    WebRtc_UWord32          timestamp;
+    WebRtc_UWord16          seqNum;
+    const WebRtc_UWord8*    dataPtr;
+    WebRtc_UWord32          sizeBytes;
+    bool                    markerBit;
+
+    FrameType               frameType;
+    webrtc::VideoCodecType  codec;
+
+    bool isFirstPacket;                 // Is this first packet in a frame.
+    VCMNaluCompleteness completeNALU;   // Default is kNaluIncomplete.
+    bool insertStartCode;               // True if a start code should be inserted before this
+                                        // packet.
+    RTPVideoHeader codecSpecificHeader;
+
+protected:
+    void CopyCodecSpecifics(const RTPVideoHeader& videoHeader);
+};
+
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_PACKET_H_
diff --git a/src/modules/video_coding/main/source/qm_select.cc b/src/modules/video_coding/main/source/qm_select.cc
new file mode 100644
index 0000000..507becd
--- /dev/null
+++ b/src/modules/video_coding/main/source/qm_select.cc
@@ -0,0 +1,958 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/qm_select.h"
+
+#include <math.h>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/qm_select_data.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+// QM-METHOD class
+
+VCMQmMethod::VCMQmMethod()
+    : content_metrics_(NULL),
+      width_(0),
+      height_(0),
+      user_frame_rate_(0.0f),
+      native_width_(0),
+      native_height_(0),
+      native_frame_rate_(0.0f),
+      framerate_level_(kFrameRateHigh),
+      init_(false) {
+  ResetQM();
+}
+
+VCMQmMethod::~VCMQmMethod() {
+}
+
+void VCMQmMethod::ResetQM() {
+  aspect_ratio_ = 1.0f;
+  motion_.Reset();
+  spatial_.Reset();
+  content_class_ = 0;
+}
+
+uint8_t VCMQmMethod::ComputeContentClass() {
+  ComputeMotionNFD();
+  ComputeSpatial();
+  return content_class_ = 3 * motion_.level + spatial_.level;
+}
+
+void VCMQmMethod::UpdateContent(const VideoContentMetrics*  contentMetrics) {
+  content_metrics_ = contentMetrics;
+}
+
+void VCMQmMethod::ComputeMotionNFD() {
+  if (content_metrics_) {
+    motion_.value = content_metrics_->motion_magnitude;
+  }
+  // Determine motion level.
+  if (motion_.value < kLowMotionNfd) {
+    motion_.level = kLow;
+  } else if (motion_.value > kHighMotionNfd) {
+    motion_.level  = kHigh;
+  } else {
+    motion_.level = kDefault;
+  }
+}
+
+void VCMQmMethod::ComputeSpatial() {
+  float spatial_err = 0.0;
+  float spatial_err_h = 0.0;
+  float spatial_err_v = 0.0;
+  if (content_metrics_) {
+    spatial_err =  content_metrics_->spatial_pred_err;
+    spatial_err_h = content_metrics_->spatial_pred_err_h;
+    spatial_err_v = content_metrics_->spatial_pred_err_v;
+  }
+  // Spatial measure: take average of 3 prediction errors.
+  spatial_.value = (spatial_err + spatial_err_h + spatial_err_v) / 3.0f;
+
+  // Reduce thresholds for large scenes/higher pixel correlation.
+  float scale2 = image_type_ > kVGA ? kScaleTexture : 1.0;
+
+  if (spatial_.value > scale2 * kHighTexture) {
+    spatial_.level = kHigh;
+  } else if (spatial_.value < scale2 * kLowTexture) {
+    spatial_.level = kLow;
+  } else {
+    spatial_.level = kDefault;
+  }
+}
+
+ImageType VCMQmMethod::GetImageType(uint16_t width,
+                                    uint16_t height) {
+  // Get the image type for the encoder frame size.
+  uint32_t image_size = width * height;
+  if (image_size == kSizeOfImageType[kQCIF]) {
+    return kQCIF;
+  } else if (image_size == kSizeOfImageType[kHCIF]) {
+    return kHCIF;
+  } else if (image_size == kSizeOfImageType[kQVGA]) {
+    return kQVGA;
+  } else if (image_size == kSizeOfImageType[kCIF]) {
+    return kCIF;
+  } else if (image_size == kSizeOfImageType[kHVGA]) {
+    return kHVGA;
+  } else if (image_size == kSizeOfImageType[kVGA]) {
+    return kVGA;
+  } else if (image_size == kSizeOfImageType[kQFULLHD]) {
+    return kQFULLHD;
+  } else if (image_size == kSizeOfImageType[kWHD]) {
+    return kWHD;
+  } else if (image_size == kSizeOfImageType[kFULLHD]) {
+    return kFULLHD;
+  } else {
+    // No exact match, find closet one.
+    return FindClosestImageType(width, height);
+  }
+}
+
+ImageType VCMQmMethod::FindClosestImageType(uint16_t width, uint16_t height) {
+  float size = static_cast<float>(width * height);
+  float min = size;
+  int isel = 0;
+  for (int i = 0; i < kNumImageTypes; ++i) {
+    float dist = fabs(size - kSizeOfImageType[i]);
+    if (dist < min) {
+      min = dist;
+      isel = i;
+    }
+  }
+  return static_cast<ImageType>(isel);
+}
+
+FrameRateLevelClass VCMQmMethod::FrameRateLevel(float avg_framerate) {
+  if (avg_framerate <= kLowFrameRate) {
+    return kFrameRateLow;
+  } else if (avg_framerate <= kMiddleFrameRate) {
+    return kFrameRateMiddle1;
+  } else if (avg_framerate <= kHighFrameRate) {
+     return kFrameRateMiddle2;
+  } else {
+    return kFrameRateHigh;
+  }
+}
+
+// RESOLUTION CLASS
+
+VCMQmResolution::VCMQmResolution()
+    :  qm_(new VCMResolutionScale()) {
+  Reset();
+}
+
+VCMQmResolution::~VCMQmResolution() {
+  delete qm_;
+}
+
+void VCMQmResolution::ResetRates() {
+  sum_target_rate_ = 0.0f;
+  sum_incoming_framerate_ = 0.0f;
+  sum_rate_MM_ = 0.0f;
+  sum_rate_MM_sgn_ = 0.0f;
+  sum_packet_loss_ = 0.0f;
+  buffer_level_ = kInitBufferLevel * target_bitrate_;
+  frame_cnt_ = 0;
+  frame_cnt_delta_ = 0;
+  low_buffer_cnt_ = 0;
+  update_rate_cnt_ = 0;
+}
+
+void VCMQmResolution::ResetDownSamplingState() {
+  state_dec_factor_spatial_ = 1.0;
+  state_dec_factor_temporal_  = 1.0;
+  for (int i = 0; i < kDownActionHistorySize; i++) {
+    down_action_history_[i].spatial = kNoChangeSpatial;
+    down_action_history_[i].temporal = kNoChangeTemporal;
+  }
+}
+
+void VCMQmResolution::Reset() {
+  target_bitrate_ = 0.0f;
+  incoming_framerate_ = 0.0f;
+  buffer_level_ = 0.0f;
+  per_frame_bandwidth_ = 0.0f;
+  avg_target_rate_ = 0.0f;
+  avg_incoming_framerate_ = 0.0f;
+  avg_ratio_buffer_low_ = 0.0f;
+  avg_rate_mismatch_ = 0.0f;
+  avg_rate_mismatch_sgn_ = 0.0f;
+  avg_packet_loss_ = 0.0f;
+  encoder_state_ = kStableEncoding;
+  num_layers_ = 1;
+  ResetRates();
+  ResetDownSamplingState();
+  ResetQM();
+}
+
+EncoderState VCMQmResolution::GetEncoderState() {
+  return encoder_state_;
+}
+
+// Initialize state after re-initializing the encoder,
+// i.e., after SetEncodingData() in mediaOpt.
+int VCMQmResolution::Initialize(float bitrate,
+                                float user_framerate,
+                                uint16_t width,
+                                uint16_t height,
+                                int num_layers) {
+  if (user_framerate == 0.0f || width == 0 || height == 0) {
+    return VCM_PARAMETER_ERROR;
+  }
+  Reset();
+  target_bitrate_ = bitrate;
+  incoming_framerate_ = user_framerate;
+  UpdateCodecParameters(user_framerate, width, height);
+  native_width_ = width;
+  native_height_ = height;
+  native_frame_rate_ = user_framerate;
+  num_layers_ = num_layers;
+  // Initial buffer level.
+  buffer_level_ = kInitBufferLevel * target_bitrate_;
+  // Per-frame bandwidth.
+  per_frame_bandwidth_ = target_bitrate_ / user_framerate;
+  init_  = true;
+  return VCM_OK;
+}
+
+void VCMQmResolution::UpdateCodecParameters(float frame_rate, uint16_t width,
+                                            uint16_t height) {
+  width_ = width;
+  height_ = height;
+  // |user_frame_rate| is the target frame rate for VPM frame dropper.
+  user_frame_rate_ = frame_rate;
+  image_type_ = GetImageType(width, height);
+}
+
+// Update rate data after every encoded frame.
+void VCMQmResolution::UpdateEncodedSize(int encoded_size,
+                                        FrameType encoded_frame_type) {
+  frame_cnt_++;
+  // Convert to Kbps.
+  float encoded_size_kbits = static_cast<float>((encoded_size * 8.0) / 1000.0);
+
+  // Update the buffer level:
+  // Note this is not the actual encoder buffer level.
+  // |buffer_level_| is reset to an initial value after SelectResolution is
+  // called, and does not account for frame dropping by encoder or VCM.
+  buffer_level_ += per_frame_bandwidth_ - encoded_size_kbits;
+
+  // Counter for occurrences of low buffer level:
+  // low/negative values means encoder is likely dropping frames.
+  if (buffer_level_ <= kPercBufferThr * kInitBufferLevel * target_bitrate_) {
+    low_buffer_cnt_++;
+  }
+}
+
+// Update various quantities after SetTargetRates in MediaOpt.
+void VCMQmResolution::UpdateRates(float target_bitrate,
+                                  float encoder_sent_rate,
+                                  float incoming_framerate,
+                                  uint8_t packet_loss) {
+  // Sum the target bitrate: this is the encoder rate from previous update
+  // (~1sec), i.e, before the update for next ~1sec.
+  sum_target_rate_ += target_bitrate_;
+  update_rate_cnt_++;
+
+  // Sum the received (from RTCP reports) packet loss rates.
+  sum_packet_loss_ += static_cast<float>(packet_loss / 255.0);
+
+  // Sum the sequence rate mismatch:
+  // Mismatch here is based on the difference between the target rate
+  // used (in previous ~1sec) and the average actual encoding rate measured
+  // at previous ~1sec.
+  float diff = target_bitrate_ - encoder_sent_rate;
+  if (target_bitrate_ > 0.0)
+    sum_rate_MM_ += fabs(diff) / target_bitrate_;
+  int sgnDiff = diff > 0 ? 1 : (diff < 0 ? -1 : 0);
+  // To check for consistent under(+)/over_shooting(-) of target rate.
+  sum_rate_MM_sgn_ += sgnDiff;
+
+  // Update with the current new target and frame rate:
+  // these values are ones the encoder will use for the current/next ~1sec.
+  target_bitrate_ =  target_bitrate;
+  incoming_framerate_ = incoming_framerate;
+  sum_incoming_framerate_ += incoming_framerate_;
+  // Update the per_frame_bandwidth:
+  // this is the per_frame_bw for the current/next ~1sec.
+  per_frame_bandwidth_  = 0.0f;
+  if (incoming_framerate_ > 0.0f) {
+    per_frame_bandwidth_ = target_bitrate_ / incoming_framerate_;
+  }
+}
+
+// Select the resolution factors: frame size and frame rate change (qm scales).
+// Selection is for going down in resolution, or for going back up
+// (if a previous down-sampling action was taken).
+
+// In the current version the following constraints are imposed:
+// 1) We only allow for one action, either down or up, at a given time.
+// 2) The possible down-sampling actions are: spatial by 1/2x1/2, 3/4x3/4;
+//    temporal/frame rate reduction by 1/2 and 2/3.
+// 3) The action for going back up is the reverse of last (spatial or temporal)
+//    down-sampling action. The list of down-sampling actions from the
+//    Initialize() state are kept in |down_action_history_|.
+// 4) The total amount of down-sampling (spatial and/or temporal) from the
+//    Initialize() state (native resolution) is limited by various factors.
+int VCMQmResolution::SelectResolution(VCMResolutionScale** qm) {
+  if (!init_) {
+    return VCM_UNINITIALIZED;
+  }
+  if (content_metrics_ == NULL) {
+    Reset();
+    *qm =  qm_;
+    return VCM_OK;
+  }
+
+  // Check conditions on down-sampling state.
+  assert(state_dec_factor_spatial_ >= 1.0f);
+  assert(state_dec_factor_temporal_ >= 1.0f);
+  assert(state_dec_factor_spatial_ <= kMaxSpatialDown);
+  assert(state_dec_factor_temporal_ <= kMaxTempDown);
+  assert(state_dec_factor_temporal_ * state_dec_factor_spatial_ <=
+         kMaxTotalDown);
+
+  // Compute content class for selection.
+  content_class_ = ComputeContentClass();
+  // Compute various rate quantities for selection.
+  ComputeRatesForSelection();
+
+  // Get the encoder state.
+  ComputeEncoderState();
+
+  // Default settings: no action.
+  SetDefaultAction();
+  *qm = qm_;
+
+  // Check for going back up in resolution, if we have had some down-sampling
+  // relative to native state in Initialize().
+  if (down_action_history_[0].spatial != kNoChangeSpatial ||
+      down_action_history_[0].temporal != kNoChangeTemporal) {
+    if (GoingUpResolution()) {
+      *qm = qm_;
+      return VCM_OK;
+    }
+  }
+
+  // Check for going down in resolution.
+  if (GoingDownResolution()) {
+    *qm = qm_;
+    return VCM_OK;
+  }
+  return VCM_OK;
+}
+
+void VCMQmResolution::SetDefaultAction() {
+  qm_->codec_width = width_;
+  qm_->codec_height = height_;
+  qm_->frame_rate = user_frame_rate_;
+  qm_->change_resolution_spatial = false;
+  qm_->change_resolution_temporal = false;
+  qm_->spatial_width_fact = 1.0f;
+  qm_->spatial_height_fact = 1.0f;
+  qm_->temporal_fact = 1.0f;
+  action_.spatial = kNoChangeSpatial;
+  action_.temporal = kNoChangeTemporal;
+}
+
+void VCMQmResolution::ComputeRatesForSelection() {
+  avg_target_rate_ = 0.0f;
+  avg_incoming_framerate_ = 0.0f;
+  avg_ratio_buffer_low_ = 0.0f;
+  avg_rate_mismatch_ = 0.0f;
+  avg_rate_mismatch_sgn_ = 0.0f;
+  avg_packet_loss_ = 0.0f;
+  if (frame_cnt_ > 0) {
+    avg_ratio_buffer_low_ = static_cast<float>(low_buffer_cnt_) /
+        static_cast<float>(frame_cnt_);
+  }
+  if (update_rate_cnt_ > 0) {
+    avg_rate_mismatch_ = static_cast<float>(sum_rate_MM_) /
+        static_cast<float>(update_rate_cnt_);
+    avg_rate_mismatch_sgn_ = static_cast<float>(sum_rate_MM_sgn_) /
+        static_cast<float>(update_rate_cnt_);
+    avg_target_rate_ = static_cast<float>(sum_target_rate_) /
+        static_cast<float>(update_rate_cnt_);
+    avg_incoming_framerate_ = static_cast<float>(sum_incoming_framerate_) /
+        static_cast<float>(update_rate_cnt_);
+    avg_packet_loss_ =  static_cast<float>(sum_packet_loss_) /
+        static_cast<float>(update_rate_cnt_);
+  }
+  // For selection we may want to weight some quantities more heavily
+  // with the current (i.e., next ~1sec) rate values.
+  avg_target_rate_ = kWeightRate * avg_target_rate_ +
+      (1.0 - kWeightRate) * target_bitrate_;
+  avg_incoming_framerate_ = kWeightRate * avg_incoming_framerate_ +
+      (1.0 - kWeightRate) * incoming_framerate_;
+  // Use base layer frame rate for temporal layers: this will favor spatial.
+  assert(num_layers_ > 0);
+  framerate_level_ = FrameRateLevel(
+      avg_incoming_framerate_ / static_cast<float>(1 << (num_layers_ - 1)));
+}
+
+void VCMQmResolution::ComputeEncoderState() {
+  // Default.
+  encoder_state_ = kStableEncoding;
+
+  // Assign stressed state if:
+  // 1) occurrences of low buffer levels is high, or
+  // 2) rate mis-match is high, and consistent over-shooting by encoder.
+  if ((avg_ratio_buffer_low_ > kMaxBufferLow) ||
+      ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
+          (avg_rate_mismatch_sgn_ < -kRateOverShoot))) {
+    encoder_state_ = kStressedEncoding;
+  }
+  // Assign easy state if:
+  // 1) rate mis-match is high, and
+  // 2) consistent under-shooting by encoder.
+  if ((avg_rate_mismatch_ > kMaxRateMisMatch) &&
+      (avg_rate_mismatch_sgn_ > kRateUnderShoot)) {
+    encoder_state_ = kEasyEncoding;
+  }
+}
+
+bool VCMQmResolution::GoingUpResolution() {
+  // For going up, we check for undoing the previous down-sampling action.
+
+  float fac_width = kFactorWidthSpatial[down_action_history_[0].spatial];
+  float fac_height = kFactorHeightSpatial[down_action_history_[0].spatial];
+  float fac_temp = kFactorTemporal[down_action_history_[0].temporal];
+  // For going up spatially, we allow for going up by 3/4x3/4 at each stage.
+  // So if the last spatial action was 1/2x1/2 it would be undone in 2 stages.
+  // Modify the fac_width/height for this case.
+  if (down_action_history_[0].spatial == kOneQuarterSpatialUniform) {
+    fac_width = kFactorWidthSpatial[kOneQuarterSpatialUniform] /
+        kFactorWidthSpatial[kOneHalfSpatialUniform];
+    fac_height = kFactorHeightSpatial[kOneQuarterSpatialUniform] /
+        kFactorHeightSpatial[kOneHalfSpatialUniform];
+  }
+
+  // Check if we should go up both spatially and temporally.
+  if (down_action_history_[0].spatial != kNoChangeSpatial &&
+      down_action_history_[0].temporal != kNoChangeTemporal) {
+    if (ConditionForGoingUp(fac_width, fac_height, fac_temp,
+                            kTransRateScaleUpSpatialTemp)) {
+      action_.spatial = down_action_history_[0].spatial;
+      action_.temporal = down_action_history_[0].temporal;
+      UpdateDownsamplingState(kUpResolution);
+      return true;
+    }
+  }
+  // Check if we should go up either spatially or temporally.
+  bool selected_up_spatial = false;
+  bool selected_up_temporal = false;
+  if (down_action_history_[0].spatial != kNoChangeSpatial) {
+    selected_up_spatial = ConditionForGoingUp(fac_width, fac_height, 1.0f,
+                                              kTransRateScaleUpSpatial);
+  }
+  if (down_action_history_[0].temporal != kNoChangeTemporal) {
+    selected_up_temporal = ConditionForGoingUp(1.0f, 1.0f, fac_temp,
+                                               kTransRateScaleUpTemp);
+  }
+  if (selected_up_spatial && !selected_up_temporal) {
+    action_.spatial = down_action_history_[0].spatial;
+    action_.temporal = kNoChangeTemporal;
+    UpdateDownsamplingState(kUpResolution);
+    return true;
+  } else if (!selected_up_spatial && selected_up_temporal) {
+    action_.spatial = kNoChangeSpatial;
+    action_.temporal = down_action_history_[0].temporal;
+    UpdateDownsamplingState(kUpResolution);
+    return true;
+  } else if (selected_up_spatial && selected_up_temporal) {
+    PickSpatialOrTemporal();
+    UpdateDownsamplingState(kUpResolution);
+    return true;
+  }
+  return false;
+}
+
+bool VCMQmResolution::ConditionForGoingUp(float fac_width,
+                                          float fac_height,
+                                          float fac_temp,
+                                          float scale_fac) {
+  float estimated_transition_rate_up = GetTransitionRate(fac_width, fac_height,
+                                                         fac_temp, scale_fac);
+  // Go back up if:
+  // 1) target rate is above threshold and current encoder state is stable, or
+  // 2) encoder state is easy (encoder is significantly under-shooting target).
+  if (((avg_target_rate_ > estimated_transition_rate_up) &&
+      (encoder_state_ == kStableEncoding)) ||
+      (encoder_state_ == kEasyEncoding)) {
+    return true;
+  } else {
+    return false;
+  }
+}
+
+bool VCMQmResolution::GoingDownResolution() {
+  float estimated_transition_rate_down =
+      GetTransitionRate(1.0f, 1.0f, 1.0f, 1.0f);
+  float max_rate = kFrameRateFac[framerate_level_] * kMaxRateQm[image_type_];
+  // Resolution reduction if:
+  // (1) target rate is below transition rate, or
+  // (2) encoder is in stressed state and target rate below a max threshold.
+  if ((avg_target_rate_ < estimated_transition_rate_down ) ||
+      (encoder_state_ == kStressedEncoding && avg_target_rate_ < max_rate)) {
+    // Get the down-sampling action: based on content class, and how low
+    // average target rate is relative to transition rate.
+    uint8_t spatial_fact =
+        kSpatialAction[content_class_ +
+                       9 * RateClass(estimated_transition_rate_down)];
+    uint8_t temp_fact =
+        kTemporalAction[content_class_ +
+                        9 * RateClass(estimated_transition_rate_down)];
+
+    switch (spatial_fact) {
+      case 4: {
+        action_.spatial = kOneQuarterSpatialUniform;
+        break;
+      }
+      case 2: {
+        action_.spatial = kOneHalfSpatialUniform;
+        break;
+      }
+      case 1: {
+        action_.spatial = kNoChangeSpatial;
+        break;
+      }
+      default: {
+        assert(false);
+      }
+    }
+    switch (temp_fact) {
+      case 3: {
+        action_.temporal = kTwoThirdsTemporal;
+        break;
+      }
+      case 2: {
+        action_.temporal = kOneHalfTemporal;
+        break;
+      }
+      case 1: {
+        action_.temporal = kNoChangeTemporal;
+        break;
+      }
+      default: {
+        assert(false);
+      }
+    }
+    // Only allow for one action (spatial or temporal) at a given time.
+    assert(action_.temporal == kNoChangeTemporal ||
+           action_.spatial == kNoChangeSpatial);
+
+    // Adjust cases not captured in tables, mainly based on frame rate, and
+    // also check for odd frame sizes.
+    AdjustAction();
+
+    // Update down-sampling state.
+    if (action_.spatial != kNoChangeSpatial ||
+        action_.temporal != kNoChangeTemporal) {
+      UpdateDownsamplingState(kDownResolution);
+      return true;
+    }
+  }
+  return false;
+}
+
+float VCMQmResolution::GetTransitionRate(float fac_width,
+                                         float fac_height,
+                                         float fac_temp,
+                                         float scale_fac) {
+  ImageType image_type = GetImageType(
+      static_cast<uint16_t>(fac_width * width_),
+      static_cast<uint16_t>(fac_height * height_));
+
+  FrameRateLevelClass framerate_level =
+      FrameRateLevel(fac_temp * avg_incoming_framerate_);
+  // If we are checking for going up temporally, and this is the last
+  // temporal action, then use native frame rate.
+  if (down_action_history_[1].temporal == kNoChangeTemporal &&
+      fac_temp > 1.0f) {
+    framerate_level = FrameRateLevel(native_frame_rate_);
+  }
+
+  // The maximum allowed rate below which down-sampling is allowed:
+  // Nominal values based on image format (frame size and frame rate).
+  float max_rate = kFrameRateFac[framerate_level] * kMaxRateQm[image_type];
+
+  uint8_t image_class = image_type > kVGA ? 1: 0;
+  uint8_t table_index = image_class * 9 + content_class_;
+  // Scale factor for down-sampling transition threshold:
+  // factor based on the content class and the image size.
+  float scaleTransRate = kScaleTransRateQm[table_index];
+  // Threshold bitrate for resolution action.
+  return static_cast<float> (scale_fac * scaleTransRate * max_rate);
+}
+
+void VCMQmResolution::UpdateDownsamplingState(UpDownAction up_down) {
+  if (up_down == kUpResolution) {
+    qm_->spatial_width_fact = 1.0f / kFactorWidthSpatial[action_.spatial];
+    qm_->spatial_height_fact = 1.0f / kFactorHeightSpatial[action_.spatial];
+    // If last spatial action was 1/2x1/2, we undo it in two steps, so the
+    // spatial scale factor in this first step is modified as (4.0/3.0 / 2.0).
+    if (action_.spatial == kOneQuarterSpatialUniform) {
+      qm_->spatial_width_fact =
+          1.0f * kFactorWidthSpatial[kOneHalfSpatialUniform] /
+          kFactorWidthSpatial[kOneQuarterSpatialUniform];
+      qm_->spatial_height_fact =
+          1.0f * kFactorHeightSpatial[kOneHalfSpatialUniform] /
+          kFactorHeightSpatial[kOneQuarterSpatialUniform];
+    }
+    qm_->temporal_fact = 1.0f / kFactorTemporal[action_.temporal];
+    RemoveLastDownAction();
+  } else if (up_down == kDownResolution) {
+    ConstrainAmountOfDownSampling();
+    ConvertSpatialFractionalToWhole();
+    qm_->spatial_width_fact = kFactorWidthSpatial[action_.spatial];
+    qm_->spatial_height_fact = kFactorHeightSpatial[action_.spatial];
+    qm_->temporal_fact = kFactorTemporal[action_.temporal];
+    InsertLatestDownAction();
+  } else {
+    // This function should only be called if either the Up or Down action
+    // has been selected.
+    assert(false);
+  }
+  UpdateCodecResolution();
+  state_dec_factor_spatial_ = state_dec_factor_spatial_ *
+      qm_->spatial_width_fact * qm_->spatial_height_fact;
+  state_dec_factor_temporal_ = state_dec_factor_temporal_ * qm_->temporal_fact;
+}
+
+void  VCMQmResolution::UpdateCodecResolution() {
+  if (action_.spatial != kNoChangeSpatial) {
+    qm_->change_resolution_spatial = true;
+    qm_->codec_width = static_cast<uint16_t>(width_ /
+                                             qm_->spatial_width_fact + 0.5f);
+    qm_->codec_height = static_cast<uint16_t>(height_ /
+                                              qm_->spatial_height_fact + 0.5f);
+    // Size should not exceed native sizes.
+    assert(qm_->codec_width <= native_width_);
+    assert(qm_->codec_height <= native_height_);
+    // New sizes should be multiple of 2, otherwise spatial should not have
+    // been selected.
+    assert(qm_->codec_width % 2 == 0);
+    assert(qm_->codec_height % 2 == 0);
+  }
+  if (action_.temporal != kNoChangeTemporal) {
+    qm_->change_resolution_temporal = true;
+    // Update the frame rate based on the average incoming frame rate.
+    qm_->frame_rate = avg_incoming_framerate_ / qm_->temporal_fact + 0.5f;
+    if (down_action_history_[0].temporal == 0) {
+      // When we undo the last temporal-down action, make sure we go back up
+      // to the native frame rate. Since the incoming frame rate may
+      // fluctuate over time, |avg_incoming_framerate_| scaled back up may
+      // be smaller than |native_frame rate_|.
+      qm_->frame_rate = native_frame_rate_;
+    }
+  }
+}
+
+uint8_t VCMQmResolution::RateClass(float transition_rate) {
+  return avg_target_rate_ < (kFacLowRate * transition_rate) ? 0:
+  (avg_target_rate_ >= transition_rate ? 2 : 1);
+}
+
+// TODO(marpan): Would be better to capture these frame rate adjustments by
+// extending the table data (qm_select_data.h).
+void VCMQmResolution::AdjustAction() {
+  // If the spatial level is default state (neither low or high), motion level
+  // is not high, and spatial action was selected, switch to 2/3 frame rate
+  // reduction if the average incoming frame rate is high.
+  if (spatial_.level == kDefault && motion_.level != kHigh &&
+      action_.spatial != kNoChangeSpatial &&
+      framerate_level_ == kFrameRateHigh) {
+    action_.spatial = kNoChangeSpatial;
+    action_.temporal = kTwoThirdsTemporal;
+  }
+  // If both motion and spatial level are low, and temporal down action was
+  // selected, switch to spatial 3/4x3/4 if the frame rate is not above the
+  // lower middle level (|kFrameRateMiddle1|).
+  if (motion_.level == kLow && spatial_.level == kLow &&
+      framerate_level_ <= kFrameRateMiddle1 &&
+      action_.temporal != kNoChangeTemporal) {
+    action_.spatial = kOneHalfSpatialUniform;
+    action_.temporal = kNoChangeTemporal;
+  }
+  // If spatial action is selected, and there has been too much spatial
+  // reduction already (i.e., 1/4), then switch to temporal action if the
+  // average frame rate is not low.
+  if (action_.spatial != kNoChangeSpatial &&
+      down_action_history_[0].spatial == kOneQuarterSpatialUniform &&
+      framerate_level_ != kFrameRateLow) {
+    action_.spatial = kNoChangeSpatial;
+    action_.temporal = kTwoThirdsTemporal;
+  }
+  // Never use temporal action if number of temporal layers is above 2.
+  if (num_layers_ > 2) {
+    if (action_.temporal !=  kNoChangeTemporal) {
+      action_.spatial = kOneHalfSpatialUniform;
+    }
+    action_.temporal = kNoChangeTemporal;
+  }
+  // If spatial action was selected, we need to make sure the frame sizes
+  // are multiples of two. Otherwise switch to 2/3 temporal.
+  if (action_.spatial != kNoChangeSpatial &&
+      !EvenFrameSize()) {
+    action_.spatial = kNoChangeSpatial;
+    // Only one action (spatial or temporal) is allowed at a given time, so need
+    // to check whether temporal action is currently selected.
+    action_.temporal = kTwoThirdsTemporal;
+  }
+}
+
+void VCMQmResolution::ConvertSpatialFractionalToWhole() {
+  // If 3/4 spatial is selected, check if there has been another 3/4,
+  // and if so, combine them into 1/2. 1/2 scaling is more efficient than 9/16.
+  // Note we define 3/4x3/4 spatial as kOneHalfSpatialUniform.
+  if (action_.spatial == kOneHalfSpatialUniform) {
+    bool found = false;
+    int isel = kDownActionHistorySize;
+    for (int i = 0; i < kDownActionHistorySize; ++i) {
+      if (down_action_history_[i].spatial ==  kOneHalfSpatialUniform) {
+        isel = i;
+        found = true;
+        break;
+      }
+    }
+    if (found) {
+       action_.spatial = kOneQuarterSpatialUniform;
+       state_dec_factor_spatial_ = state_dec_factor_spatial_ /
+           (kFactorWidthSpatial[kOneHalfSpatialUniform] *
+            kFactorHeightSpatial[kOneHalfSpatialUniform]);
+       // Check if switching to 1/2x1/2 (=1/4) spatial is allowed.
+       ConstrainAmountOfDownSampling();
+       if (action_.spatial == kNoChangeSpatial) {
+         // Not allowed. Go back to 3/4x3/4 spatial.
+         action_.spatial = kOneHalfSpatialUniform;
+         state_dec_factor_spatial_ = state_dec_factor_spatial_ *
+             kFactorWidthSpatial[kOneHalfSpatialUniform] *
+             kFactorHeightSpatial[kOneHalfSpatialUniform];
+       } else {
+         // Switching is allowed. Remove 3/4x3/4 from the history, and update
+         // the frame size.
+         for (int i = isel; i < kDownActionHistorySize - 1; ++i) {
+           down_action_history_[i].spatial =
+               down_action_history_[i + 1].spatial;
+         }
+         width_ = width_ * kFactorWidthSpatial[kOneHalfSpatialUniform];
+         height_ = height_ * kFactorHeightSpatial[kOneHalfSpatialUniform];
+       }
+    }
+  }
+}
+
+// Returns false if the new frame sizes, under the current spatial action,
+// are not multiples of two.
+bool VCMQmResolution::EvenFrameSize() {
+  if (action_.spatial == kOneHalfSpatialUniform) {
+    if ((width_ * 3 / 4) % 2 != 0 || (height_ * 3 / 4) % 2 != 0) {
+      return false;
+    }
+  } else if (action_.spatial == kOneQuarterSpatialUniform) {
+    if ((width_ * 1 / 2) % 2 != 0 || (height_ * 1 / 2) % 2 != 0) {
+      return false;
+    }
+  }
+  return true;
+}
+
+void VCMQmResolution::InsertLatestDownAction() {
+  if (action_.spatial != kNoChangeSpatial) {
+    for (int i = kDownActionHistorySize - 1; i > 0; --i) {
+      down_action_history_[i].spatial = down_action_history_[i - 1].spatial;
+    }
+    down_action_history_[0].spatial = action_.spatial;
+  }
+  if (action_.temporal != kNoChangeTemporal) {
+    for (int i = kDownActionHistorySize - 1; i > 0; --i) {
+      down_action_history_[i].temporal = down_action_history_[i - 1].temporal;
+    }
+    down_action_history_[0].temporal = action_.temporal;
+  }
+}
+
+void VCMQmResolution::RemoveLastDownAction() {
+  if (action_.spatial != kNoChangeSpatial) {
+    // If the last spatial action was 1/2x1/2 we replace it with 3/4x3/4.
+    if (action_.spatial == kOneQuarterSpatialUniform) {
+      down_action_history_[0].spatial = kOneHalfSpatialUniform;
+    } else {
+      for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
+        down_action_history_[i].spatial = down_action_history_[i + 1].spatial;
+      }
+      down_action_history_[kDownActionHistorySize - 1].spatial =
+          kNoChangeSpatial;
+    }
+  }
+  if (action_.temporal != kNoChangeTemporal) {
+    for (int i = 0; i < kDownActionHistorySize - 1; ++i) {
+      down_action_history_[i].temporal = down_action_history_[i + 1].temporal;
+    }
+    down_action_history_[kDownActionHistorySize - 1].temporal =
+        kNoChangeTemporal;
+  }
+}
+
+void VCMQmResolution::ConstrainAmountOfDownSampling() {
+  // Sanity checks on down-sampling selection:
+  // override the settings for too small image size and/or frame rate.
+  // Also check the limit on current down-sampling states.
+
+  float spatial_width_fact = kFactorWidthSpatial[action_.spatial];
+  float spatial_height_fact = kFactorHeightSpatial[action_.spatial];
+  float temporal_fact = kFactorTemporal[action_.temporal];
+  float new_dec_factor_spatial = state_dec_factor_spatial_ *
+      spatial_width_fact * spatial_height_fact;
+  float new_dec_factor_temp = state_dec_factor_temporal_ * temporal_fact;
+
+  // No spatial sampling if current frame size is too small, or if the
+  // amount of spatial down-sampling is above maximum spatial down-action.
+  if ((width_ * height_) <= kMinImageSize ||
+      new_dec_factor_spatial > kMaxSpatialDown) {
+    action_.spatial = kNoChangeSpatial;
+    new_dec_factor_spatial = state_dec_factor_spatial_;
+  }
+  // No frame rate reduction if average frame rate is below some point, or if
+  // the amount of temporal down-sampling is above maximum temporal down-action.
+  if (avg_incoming_framerate_ <= kMinFrameRate ||
+      new_dec_factor_temp > kMaxTempDown) {
+    action_.temporal = kNoChangeTemporal;
+    new_dec_factor_temp = state_dec_factor_temporal_;
+  }
+  // Check if the total (spatial-temporal) down-action is above maximum allowed,
+  // if so, disallow the current selected down-action.
+  if (new_dec_factor_spatial * new_dec_factor_temp > kMaxTotalDown) {
+    if (action_.spatial != kNoChangeSpatial) {
+      action_.spatial = kNoChangeSpatial;
+    } else if (action_.temporal != kNoChangeTemporal) {
+      action_.temporal = kNoChangeTemporal;
+    } else {
+      // We only allow for one action (spatial or temporal) at a given time, so
+      // either spatial or temporal action is selected when this function is
+      // called. If the selected action is disallowed from one of the above
+      // 2 prior conditions (on spatial & temporal max down-action), then this
+      // condition "total down-action > |kMaxTotalDown|" would not be entered.
+      assert(false);
+    }
+  }
+}
+
+void VCMQmResolution::PickSpatialOrTemporal() {
+  // Pick the one that has had the most down-sampling thus far.
+  if (state_dec_factor_spatial_ > state_dec_factor_temporal_) {
+    action_.spatial = down_action_history_[0].spatial;
+    action_.temporal = kNoChangeTemporal;
+  } else {
+    action_.spatial = kNoChangeSpatial;
+    action_.temporal = down_action_history_[0].temporal;
+  }
+}
+
+// TODO(marpan): Update when we allow for directional spatial down-sampling.
+void VCMQmResolution::SelectSpatialDirectionMode(float transition_rate) {
+  // Default is 4/3x4/3
+  // For bit rates well below transitional rate, we select 2x2.
+  if (avg_target_rate_ < transition_rate * kRateRedSpatial2X2) {
+    qm_->spatial_width_fact = 2.0f;
+    qm_->spatial_height_fact = 2.0f;
+  }
+  // Otherwise check prediction errors and aspect ratio.
+  float spatial_err = 0.0f;
+  float spatial_err_h = 0.0f;
+  float spatial_err_v = 0.0f;
+  if (content_metrics_) {
+    spatial_err = content_metrics_->spatial_pred_err;
+    spatial_err_h = content_metrics_->spatial_pred_err_h;
+    spatial_err_v = content_metrics_->spatial_pred_err_v;
+  }
+
+  // Favor 1x2 if aspect_ratio is 16:9.
+  if (aspect_ratio_ >= 16.0f / 9.0f) {
+    // Check if 1x2 has lowest prediction error.
+    if (spatial_err_h < spatial_err && spatial_err_h < spatial_err_v) {
+      qm_->spatial_width_fact = 2.0f;
+      qm_->spatial_height_fact = 1.0f;
+    }
+  }
+  // Check for 4/3x4/3 selection: favor 2x2 over 1x2 and 2x1.
+  if (spatial_err < spatial_err_h * (1.0f + kSpatialErr2x2VsHoriz) &&
+      spatial_err < spatial_err_v * (1.0f + kSpatialErr2X2VsVert)) {
+    qm_->spatial_width_fact = 4.0f / 3.0f;
+    qm_->spatial_height_fact = 4.0f / 3.0f;
+  }
+  // Check for 2x1 selection.
+  if (spatial_err_v < spatial_err_h * (1.0f - kSpatialErrVertVsHoriz) &&
+      spatial_err_v < spatial_err * (1.0f - kSpatialErr2X2VsVert)) {
+    qm_->spatial_width_fact = 1.0f;
+    qm_->spatial_height_fact = 2.0f;
+  }
+}
+
+// ROBUSTNESS CLASS
+
+VCMQmRobustness::VCMQmRobustness() {
+  Reset();
+}
+
+VCMQmRobustness::~VCMQmRobustness() {
+}
+
+void VCMQmRobustness::Reset() {
+  prev_total_rate_ = 0.0f;
+  prev_rtt_time_ = 0;
+  prev_packet_loss_ = 0;
+  prev_code_rate_delta_ = 0;
+  ResetQM();
+}
+
+// Adjust the FEC rate based on the content and the network state
+// (packet loss rate, total rate/bandwidth, round trip time).
+// Note that packetLoss here is the filtered loss value.
+float VCMQmRobustness::AdjustFecFactor(uint8_t code_rate_delta,
+                                       float total_rate,
+                                       float framerate,
+                                       uint32_t rtt_time,
+                                       uint8_t packet_loss) {
+  // Default: no adjustment
+  float adjust_fec =  1.0f;
+  if (content_metrics_ == NULL) {
+    return adjust_fec;
+  }
+  // Compute class state of the content.
+  ComputeMotionNFD();
+  ComputeSpatial();
+
+  // TODO(marpan): Set FEC adjustment factor.
+
+  // Keep track of previous values of network state:
+  // adjustment may be also based on pattern of changes in network state.
+  prev_total_rate_ = total_rate;
+  prev_rtt_time_ = rtt_time;
+  prev_packet_loss_ = packet_loss;
+  prev_code_rate_delta_ = code_rate_delta;
+  return adjust_fec;
+}
+
+// Set the UEP (unequal-protection across packets) on/off for the FEC.
+bool VCMQmRobustness::SetUepProtection(uint8_t code_rate_delta,
+                                       float total_rate,
+                                       uint8_t packet_loss,
+                                       bool frame_type) {
+  // Default.
+  return false;
+}
+}  // end of namespace
diff --git a/src/modules/video_coding/main/source/qm_select.h b/src/modules/video_coding/main/source/qm_select.h
new file mode 100644
index 0000000..4a74fa5
--- /dev/null
+++ b/src/modules/video_coding/main/source/qm_select.h
@@ -0,0 +1,375 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+#define WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+
+#include "common_types.h"
+#include "typedefs.h"
+
+/******************************************************/
+/* Quality Modes: Resolution and Robustness settings  */
+/******************************************************/
+
+namespace webrtc {
+struct VideoContentMetrics;
+
+struct VCMResolutionScale {
+  VCMResolutionScale()
+      : codec_width(640),
+        codec_height(480),
+        frame_rate(30.0f),
+        spatial_width_fact(1.0f),
+        spatial_height_fact(1.0f),
+        temporal_fact(1.0f),
+        change_resolution_spatial(false),
+        change_resolution_temporal(false) {
+  }
+  uint16_t codec_width;
+  uint16_t codec_height;
+  float frame_rate;
+  float spatial_width_fact;
+  float spatial_height_fact;
+  float temporal_fact;
+  bool change_resolution_spatial;
+  bool change_resolution_temporal;
+};
+
+enum ImageType {
+  kQCIF = 0,            // 176x144
+  kHCIF,                // 264x216 = half(~3/4x3/4) CIF.
+  kQVGA,                // 320x240 = quarter VGA.
+  kCIF,                 // 352x288
+  kHVGA,                // 480x360 = half(~3/4x3/4) VGA.
+  kVGA,                 // 640x480
+  kQFULLHD,             // 960x540 = quarter FULLHD, and half(~3/4x3/4) WHD.
+  kWHD,                 // 1280x720
+  kFULLHD,              // 1920x1080
+  kNumImageTypes
+};
+
+const uint32_t kSizeOfImageType[kNumImageTypes] =
+{ 25344, 57024, 76800, 101376, 172800, 307200, 518400, 921600, 2073600 };
+
+enum FrameRateLevelClass {
+  kFrameRateLow,
+  kFrameRateMiddle1,
+  kFrameRateMiddle2,
+  kFrameRateHigh
+};
+
+enum ContentLevelClass {
+  kLow,
+  kHigh,
+  kDefault
+};
+
+struct VCMContFeature {
+  VCMContFeature()
+      : value(0.0f),
+        level(kDefault) {
+  }
+  void Reset() {
+    value = 0.0f;
+    level = kDefault;
+  }
+  float value;
+  ContentLevelClass level;
+};
+
+enum UpDownAction {
+  kUpResolution,
+  kDownResolution
+};
+
+enum SpatialAction {
+  kNoChangeSpatial,
+  kOneHalfSpatialUniform,        // 3/4 x 3/4: 9/6 ~1/2 pixel reduction.
+  kOneQuarterSpatialUniform,     // 1/2 x 1/2: 1/4 pixel reduction.
+  kNumModesSpatial
+};
+
+enum TemporalAction {
+  kNoChangeTemporal,
+  kTwoThirdsTemporal,     // 2/3 frame rate reduction
+  kOneHalfTemporal,       // 1/2 frame rate reduction
+  kNumModesTemporal
+};
+
+struct ResolutionAction {
+  ResolutionAction()
+      : spatial(kNoChangeSpatial),
+        temporal(kNoChangeTemporal) {
+  }
+  SpatialAction spatial;
+  TemporalAction temporal;
+};
+
+// Down-sampling factors for spatial (width and height), and temporal.
+const float kFactorWidthSpatial[kNumModesSpatial] =
+    { 1.0f, 4.0f / 3.0f, 2.0f };
+
+const float kFactorHeightSpatial[kNumModesSpatial] =
+    { 1.0f, 4.0f / 3.0f, 2.0f };
+
+const float kFactorTemporal[kNumModesTemporal] =
+    { 1.0f, 1.5f, 2.0f };
+
+enum EncoderState {
+  kStableEncoding,    // Low rate mis-match, stable buffer levels.
+  kStressedEncoding,  // Significant over-shooting of target rate,
+                      // Buffer under-flow, etc.
+  kEasyEncoding       // Significant under-shooting of target rate.
+};
+
+// QmMethod class: main class for resolution and robustness settings
+
+class VCMQmMethod {
+ public:
+  VCMQmMethod();
+  virtual ~VCMQmMethod();
+
+  // Reset values
+  void ResetQM();
+  virtual void Reset() = 0;
+
+  // Compute content class.
+  uint8_t ComputeContentClass();
+
+  // Update with the content metrics.
+  void UpdateContent(const VideoContentMetrics* content_metrics);
+
+  // Compute spatial texture magnitude and level.
+  // Spatial texture is a spatial prediction error measure.
+  void ComputeSpatial();
+
+  // Compute motion magnitude and level for NFD metric.
+  // NFD is normalized frame difference (normalized by spatial variance).
+  void ComputeMotionNFD();
+
+  // Get the imageType (CIF, VGA, HD, etc) for the system width/height.
+  ImageType GetImageType(uint16_t width, uint16_t height);
+
+  // Return the closest image type.
+  ImageType FindClosestImageType(uint16_t width, uint16_t height);
+
+  // Get the frame rate level.
+  FrameRateLevelClass FrameRateLevel(float frame_rate);
+
+ protected:
+  // Content Data.
+  const VideoContentMetrics* content_metrics_;
+
+  // Encoder frame sizes and native frame sizes.
+  uint16_t width_;
+  uint16_t height_;
+  float user_frame_rate_;
+  uint16_t native_width_;
+  uint16_t native_height_;
+  float native_frame_rate_;
+  float aspect_ratio_;
+  // Image type and frame rate leve, for the current encoder resolution.
+  ImageType image_type_;
+  FrameRateLevelClass framerate_level_;
+  // Content class data.
+  VCMContFeature motion_;
+  VCMContFeature spatial_;
+  uint8_t content_class_;
+  bool init_;
+};
+
+// Resolution settings class
+
+class VCMQmResolution : public VCMQmMethod {
+ public:
+  VCMQmResolution();
+  virtual ~VCMQmResolution();
+
+  // Reset all quantities.
+  virtual void Reset();
+
+  // Reset rate quantities and counters after every SelectResolution() call.
+  void ResetRates();
+
+  // Reset down-sampling state.
+  void ResetDownSamplingState();
+
+  // Get the encoder state.
+  EncoderState GetEncoderState();
+
+  // Initialize after SetEncodingData in media_opt.
+  int Initialize(float bitrate,
+                 float user_framerate,
+                 uint16_t width,
+                 uint16_t height,
+                 int num_layers);
+
+  // Update the encoder frame size.
+  void UpdateCodecParameters(float frame_rate, uint16_t width, uint16_t height);
+
+  // Update with actual bit rate (size of the latest encoded frame)
+  // and frame type, after every encoded frame.
+  void UpdateEncodedSize(int encoded_size,
+                         FrameType encoded_frame_type);
+
+  // Update with new target bitrate, actual encoder sent rate, frame_rate,
+  // loss rate: every ~1 sec from SetTargetRates in media_opt.
+  void UpdateRates(float target_bitrate,
+                   float encoder_sent_rate,
+                   float incoming_framerate,
+                   uint8_t packet_loss);
+
+  // Extract ST (spatio-temporal) resolution action.
+  // Inputs: qm: Reference to the quality modes pointer.
+  // Output: the spatial and/or temporal scale change.
+  int SelectResolution(VCMResolutionScale** qm);
+
+ private:
+  // Set the default resolution action.
+  void SetDefaultAction();
+
+  // Compute rates for the selection of down-sampling action.
+  void ComputeRatesForSelection();
+
+  // Compute the encoder state.
+  void ComputeEncoderState();
+
+  // Return true if the action is to go back up in resolution.
+  bool GoingUpResolution();
+
+  // Return true if the action is to go down in resolution.
+  bool GoingDownResolution();
+
+  // Check the condition for going up in resolution by the scale factors:
+  // |facWidth|, |facHeight|, |facTemp|.
+  // |scaleFac| is a scale factor for the transition rate.
+  bool ConditionForGoingUp(float fac_width,
+                           float fac_height,
+                           float fac_temp,
+                           float scale_fac);
+
+  // Get the bitrate threshold for the resolution action.
+  // The case |facWidth|=|facHeight|=|facTemp|==1 is for down-sampling action.
+  // |scaleFac| is a scale factor for the transition rate.
+  float GetTransitionRate(float fac_width,
+                          float fac_height,
+                          float fac_temp,
+                          float scale_fac);
+
+  // Update the down-sampling state.
+  void UpdateDownsamplingState(UpDownAction up_down);
+
+  // Update the codec frame size and frame rate.
+  void UpdateCodecResolution();
+
+  // Return a state based on average target rate relative transition rate.
+  uint8_t RateClass(float transition_rate);
+
+  // Adjust the action selected from the table.
+  void AdjustAction();
+
+  // Covert 2 stages of 3/4 (=9/16) spatial decimation to 1/2.
+  void ConvertSpatialFractionalToWhole();
+
+  // Returns true if the new frame sizes, under the selected spatial action,
+  // are of even size.
+  bool EvenFrameSize();
+
+  // Insert latest down-sampling action into the history list.
+  void InsertLatestDownAction();
+
+  // Remove the last (first element) down-sampling action from the list.
+  void RemoveLastDownAction();
+
+  // Check constraints on the amount of down-sampling allowed.
+  void ConstrainAmountOfDownSampling();
+
+  // For going up in resolution: pick spatial or temporal action,
+  // if both actions were separately selected.
+  void PickSpatialOrTemporal();
+
+  // Select the directional (1x2 or 2x1) spatial down-sampling action.
+  void SelectSpatialDirectionMode(float transition_rate);
+
+  enum { kDownActionHistorySize = 10};
+
+  VCMResolutionScale* qm_;
+  // Encoder rate control parameters.
+  float target_bitrate_;
+  float incoming_framerate_;
+  float per_frame_bandwidth_;
+  float buffer_level_;
+
+  // Data accumulated every ~1sec from MediaOpt.
+  float sum_target_rate_;
+  float sum_incoming_framerate_;
+  float sum_rate_MM_;
+  float sum_rate_MM_sgn_;
+  float sum_packet_loss_;
+  // Counters.
+  uint32_t frame_cnt_;
+  uint32_t frame_cnt_delta_;
+  uint32_t update_rate_cnt_;
+  uint32_t low_buffer_cnt_;
+
+  // Resolution state parameters.
+  float state_dec_factor_spatial_;
+  float state_dec_factor_temporal_;
+
+  // Quantities used for selection.
+  float avg_target_rate_;
+  float avg_incoming_framerate_;
+  float avg_ratio_buffer_low_;
+  float avg_rate_mismatch_;
+  float avg_rate_mismatch_sgn_;
+  float avg_packet_loss_;
+  EncoderState encoder_state_;
+  ResolutionAction action_;
+  // Short history of the down-sampling actions from the Initialize() state.
+  // This is needed for going up in resolution. Since the total amount of
+  // down-sampling actions are constrained, the length of the list need not be
+  // large: i.e., (4/3) ^{kDownActionHistorySize} <= kMaxDownSample.
+  ResolutionAction down_action_history_[kDownActionHistorySize];
+  int num_layers_;
+};
+
+// Robustness settings class.
+
+class VCMQmRobustness : public VCMQmMethod {
+ public:
+  VCMQmRobustness();
+  ~VCMQmRobustness();
+
+  virtual void Reset();
+
+  // Adjust FEC rate based on content: every ~1 sec from SetTargetRates.
+  // Returns an adjustment factor.
+  float AdjustFecFactor(uint8_t code_rate_delta,
+                        float total_rate,
+                        float framerate,
+                        uint32_t rtt_time,
+                        uint8_t packet_loss);
+
+  // Set the UEP protection on/off.
+  bool SetUepProtection(uint8_t code_rate_delta,
+                        float total_rate,
+                        uint8_t packet_loss,
+                        bool frame_type);
+
+ private:
+  // Previous state of network parameters.
+  float prev_total_rate_;
+  uint32_t prev_rtt_time_;
+  uint8_t prev_packet_loss_;
+  uint8_t prev_code_rate_delta_;
+};
+}   // namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_CODING_QM_SELECT_H_
+
diff --git a/src/modules/video_coding/main/source/qm_select_data.h b/src/modules/video_coding/main/source/qm_select_data.h
new file mode 100644
index 0000000..4d4f339
--- /dev/null
+++ b/src/modules/video_coding/main/source/qm_select_data.h
@@ -0,0 +1,228 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+
+/***************************************************************
+*QMSelectData.h
+* This file includes parameters for content-aware media optimization
+****************************************************************/
+
+#include "typedefs.h"
+
+namespace webrtc {
+//
+// PARAMETERS FOR RESOLUTION ADAPTATION
+//
+
+// Initial level of buffer in secs.
+const float kInitBufferLevel = 0.5f;
+
+// Threshold of (max) buffer size below which we consider too low (underflow).
+const float kPercBufferThr = 0.10f;
+
+// Threshold on the occurrences of low buffer levels.
+const float kMaxBufferLow = 0.30f;
+
+// Threshold on rate mismatch.
+const float kMaxRateMisMatch = 0.5f;
+
+// Threshold on amount of under/over encoder shooting.
+const float kRateOverShoot = 0.75f;
+const float kRateUnderShoot = 0.75f;
+
+// Factor to favor weighting the average rates with the current/last data.
+const float kWeightRate = 0.70f;
+
+// Factor for transitional rate for going back up in resolution.
+const float kTransRateScaleUpSpatial = 1.25f;
+const float kTransRateScaleUpTemp = 1.25f;
+const float kTransRateScaleUpSpatialTemp = 1.25f;
+
+// Threshold on packet loss rate, above which favor resolution reduction.
+const float kPacketLossThr = 0.1f;
+
+// Factor for reducing transitional bitrate under packet loss.
+const float kPacketLossRateFac = 1.0f;
+
+// Maximum possible transitional rate for down-sampling:
+// (units in kbps), for 30fps.
+const uint16_t kMaxRateQm[9] = {
+    0,     // QCIF
+    50,    // kHCIF
+    125,   // kQVGA
+    200,   // CIF
+    280,   // HVGA
+    400,   // VGA
+    700,   // QFULLHD
+    1000,  // WHD
+    1500   // FULLHD
+};
+
+// Frame rate scale for maximum transition rate.
+const float kFrameRateFac[4] = {
+    0.5f,    // Low
+    0.7f,    // Middle level 1
+    0.85f,   // Middle level 2
+    1.0f,    // High
+};
+
+// Scale for transitional rate: based on content class
+// motion=L/H/D,spatial==L/H/D: for low, high, middle levels
+const float kScaleTransRateQm[18] = {
+    // VGA and lower
+    0.40f,       // L, L
+    0.50f,       // L, H
+    0.40f,       // L, D
+    0.60f,       // H ,L
+    0.60f,       // H, H
+    0.60f,       // H, D
+    0.50f,       // D, L
+    0.50f,       // D, D
+    0.50f,       // D, H
+
+    // over VGA
+    0.40f,       // L, L
+    0.50f,       // L, H
+    0.40f,       // L, D
+    0.60f,       // H ,L
+    0.60f,       // H, H
+    0.60f,       // H, D
+    0.50f,       // D, L
+    0.50f,       // D, D
+    0.50f,       // D, H
+};
+
+// Threshold on the target rate relative to transitional rate.
+const float kFacLowRate = 0.5f;
+
+// Action for down-sampling:
+// motion=L/H/D,spatial==L/H/D, for low, high, middle levels;
+// rate = 0/1/2, for target rate state relative to transition rate.
+const uint8_t kSpatialAction[27] = {
+// rateClass = 0:
+    1,       // L, L
+    1,       // L, H
+    1,       // L, D
+    4,       // H ,L
+    1,       // H, H
+    4,       // H, D
+    4,       // D, L
+    1,       // D, H
+    2,       // D, D
+
+// rateClass = 1:
+    1,       // L, L
+    1,       // L, H
+    1,       // L, D
+    2,       // H ,L
+    1,       // H, H
+    2,       // H, D
+    2,       // D, L
+    1,       // D, H
+    2,       // D, D
+
+// rateClass = 2:
+    1,       // L, L
+    1,       // L, H
+    1,       // L, D
+    2,       // H ,L
+    1,       // H, H
+    2,       // H, D
+    2,       // D, L
+    1,       // D, H
+    2,       // D, D
+};
+
+const uint8_t kTemporalAction[27] = {
+// rateClass = 0:
+    3,       // L, L
+    2,       // L, H
+    2,       // L, D
+    1,       // H ,L
+    3,       // H, H
+    1,       // H, D
+    1,       // D, L
+    2,       // D, H
+    1,       // D, D
+
+// rateClass = 1:
+    3,       // L, L
+    3,       // L, H
+    3,       // L, D
+    1,       // H ,L
+    3,       // H, H
+    1,       // H, D
+    1,       // D, L
+    3,       // D, H
+    1,       // D, D
+
+// rateClass = 2:
+    1,       // L, L
+    3,       // L, H
+    3,       // L, D
+    1,       // H ,L
+    3,       // H, H
+    1,       // H, D
+    1,       // D, L
+    3,       // D, H
+    1,       // D, D
+};
+
+// Control the total amount of down-sampling allowed.
+const float kMaxSpatialDown = 8.0f;
+const float kMaxTempDown = 3.0f;
+const float kMaxTotalDown = 9.0f;
+
+// Minimum image size for a spatial down-sampling.
+const int kMinImageSize = 176 * 144;
+
+// Minimum frame rate for temporal down-sampling:
+// no frame rate reduction if incomingFrameRate <= MIN_FRAME_RATE.
+const int kMinFrameRate = 8;
+
+//
+// PARAMETERS FOR FEC ADJUSTMENT: TODO (marpan)
+//
+
+//
+// PARAMETETS FOR SETTING LOW/HIGH STATES OF CONTENT METRICS:
+//
+
+// Thresholds for frame rate:
+const int kLowFrameRate = 10;
+const int kMiddleFrameRate = 15;
+const int kHighFrameRate = 25;
+
+// Thresholds for motion: motion level is from NFD.
+const float kHighMotionNfd = 0.075f;
+const float kLowMotionNfd = 0.03f;
+
+// Thresholds for spatial prediction error:
+// this is applied on the average of (2x2,1x2,2x1).
+const float kHighTexture = 0.035f;
+const float kLowTexture = 0.020f;
+
+// Used to reduce thresholds for larger/HD scenes: correction factor since
+// higher correlation in HD scenes means lower spatial prediction error.
+const float kScaleTexture = 0.9f;
+
+// Percentage reduction in transitional bitrate for 2x2 selected over 1x2/2x1.
+const float kRateRedSpatial2X2 = 0.6f;
+
+const float kSpatialErr2x2VsHoriz = 0.1f;   // percentage to favor 2x2 over H
+const float kSpatialErr2X2VsVert = 0.1f;    // percentage to favor 2x2 over V
+const float kSpatialErrVertVsHoriz = 0.1f;  // percentage to favor H over V
+
+}  //  namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_SOURCE_QM_SELECT_DATA_H_
+
diff --git a/src/modules/video_coding/main/source/qm_select_unittest.cc b/src/modules/video_coding/main/source/qm_select_unittest.cc
new file mode 100644
index 0000000..7f309ce
--- /dev/null
+++ b/src/modules/video_coding/main/source/qm_select_unittest.cc
@@ -0,0 +1,1311 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file includes unit tests the QmResolution class
+ * In particular, for the selection of spatial and/or temporal down-sampling.
+ */
+
+#include <gtest/gtest.h>
+
+#include "modules/video_coding/main/source/qm_select.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc {
+
+// Representative values of content metrics for: low/high/medium(default) state,
+// based on parameters settings in qm_select_data.h.
+const float kSpatialLow = 0.01f;
+const float kSpatialMedium = 0.03f;
+const float kSpatialHigh = 0.1f;
+const float kTemporalLow = 0.01f;
+const float kTemporalMedium = 0.06f;
+const float kTemporalHigh = 0.1f;
+
+class QmSelectTest : public ::testing::Test {
+ protected:
+  QmSelectTest()
+      :  qm_resolution_(new VCMQmResolution()),
+         content_metrics_(new VideoContentMetrics()),
+         qm_scale_(NULL) {
+  }
+  VCMQmResolution* qm_resolution_;
+  VideoContentMetrics* content_metrics_;
+  VCMResolutionScale* qm_scale_;
+
+  void InitQmNativeData(float initial_bit_rate,
+                        int user_frame_rate,
+                        int native_width,
+                        int native_height,
+                        int num_layers);
+
+  void UpdateQmEncodedFrame(int* encoded_size, int num_updates);
+
+  void UpdateQmRateData(int* target_rate,
+                        int* encoder_sent_rate,
+                        int* incoming_frame_rate,
+                        uint8_t* fraction_lost,
+                        int num_updates);
+
+  void UpdateQmContentData(float motion_metric,
+                           float spatial_metric,
+                           float spatial_metric_horiz,
+                           float spatial_metric_vert);
+
+  bool IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+                               float fac_width,
+                               float fac_height,
+                               float fac_temp,
+                               uint16_t new_width,
+                               uint16_t new_height,
+                               float new_frame_rate);
+
+  void TearDown() {
+    delete qm_resolution_;
+    delete content_metrics_;
+  }
+};
+
+TEST_F(QmSelectTest, HandleInputs) {
+  // Expect parameter error. Initialize with invalid inputs.
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 0, 640, 480, 1));
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 640, 0, 1));
+  EXPECT_EQ(-4, qm_resolution_->Initialize(1000, 30, 0, 480, 1));
+
+  // Expect uninitialized error.: No valid initialization before selection.
+  EXPECT_EQ(-7, qm_resolution_->SelectResolution(&qm_scale_));
+
+  VideoContentMetrics* content_metrics = NULL;
+  EXPECT_EQ(0, qm_resolution_->Initialize(1000, 30, 640, 480, 1));
+  qm_resolution_->UpdateContent(content_metrics);
+  // Content metrics are NULL: Expect success and no down-sampling action.
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0, 1.0, 1.0, 640, 480,
+                                      30.0f));
+}
+
+// TODO(marpan): Add a test for number of temporal layers > 1.
+
+// No down-sampling action at high rates.
+TEST_F(QmSelectTest, NoActionHighRate) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(800, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {800, 800, 800};
+  int encoder_sent_rate[] = {800, 800, 800};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      30.0f));
+}
+
+// Rate is well below transition, down-sampling action is taken,
+// depending on the content state.
+TEST_F(QmSelectTest, DownActionLowRate) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial: 2x2 spatial expected.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, low spatial: 2/3 temporal is expected.
+  UpdateQmContentData(kTemporalLow, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(0, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+                                      20.5f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, low spatial: 2x2 spatial expected.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // High motion, high spatial: 2/3 temporal expected.
+  UpdateQmContentData(kTemporalHigh, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(4, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+                                      20.5f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial: 1/2 temporal expected.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+                                      15.5f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, high spatial: 1/2 temporal expected.
+  UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
+                      kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+                                      15.5f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // High motion, medium spatial: 2x2 spatial expected.
+  UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
+                      kSpatialMedium);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+  // Target frame rate for frame dropper should be the same as previous == 15.
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, medium spatial: high frame rate, so 1/2 temporal expected.
+  UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
+                      kSpatialMedium);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+                                      15.5f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Medium motion, medium spatial: high frame rate, so 2/3 temporal expected.
+  UpdateQmContentData(kTemporalMedium, kSpatialMedium, kSpatialMedium,
+                      kSpatialMedium);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(8, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+                                      20.5f));
+}
+
+// Rate mis-match is high, and we have over-shooting.
+// since target rate is below max for down-sampling, down-sampling is selected.
+TEST_F(QmSelectTest, DownActionHighRateMMOvershoot) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(300, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {300, 300, 300};
+  int encoder_sent_rate[] = {900, 900, 900};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 360, 30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+                                      20.5f));
+}
+
+// Rate mis-match is high, target rate is below max for down-sampling,
+// but since we have consistent under-shooting, no down-sampling action.
+TEST_F(QmSelectTest, NoActionHighRateMMUndershoot) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(300, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {300, 300, 300};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      30.0f));
+}
+
+// Buffer is underflowing, and target rate is below max for down-sampling,
+// so action is taken.
+TEST_F(QmSelectTest, DownActionBufferUnderflow) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(300, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update with encoded size over a number of frames.
+  // per-frame bandwidth = 15 = 450/30: simulate (decoder) buffer underflow:
+  int encoded_size[] = {200, 100, 50, 30, 60, 40, 20, 30, 20, 40};
+  UpdateQmEncodedFrame(encoded_size, 10);
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {300, 300, 300};
+  int encoder_sent_rate[] = {450, 450, 450};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 360, 30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 640, 480,
+                                      20.5f));
+}
+
+// Target rate is below max for down-sampling, but buffer level is stable,
+// so no action is taken.
+TEST_F(QmSelectTest, NoActionBufferStable) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(350, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update with encoded size over a number of frames.
+  // per-frame bandwidth = 15 = 450/30: simulate stable (decoder) buffer levels.
+  int32_t encoded_size[] = {40, 10, 10, 16, 18, 20, 17, 20, 16, 15};
+  UpdateQmEncodedFrame(encoded_size, 10);
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {350, 350, 350};
+  int encoder_sent_rate[] = {350, 450, 450};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      30.0f));
+
+  qm_resolution_->ResetDownSamplingState();
+  // Low motion, high spatial
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      30.0f));
+}
+
+// Very low rate, but no spatial down-sampling below some size (QCIF).
+TEST_F(QmSelectTest, LimitDownSpatialAction) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(10, 30, 176, 144, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 176;
+  uint16_t codec_height = 144;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(0, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {10, 10, 10};
+  int encoder_sent_rate[] = {10, 10, 10};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 176, 144,
+                                      30.0f));
+}
+
+// Very low rate, but no frame reduction below some frame_rate (8fps).
+TEST_F(QmSelectTest, LimitDownTemporalAction) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(10, 8, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(8.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {10, 10, 10};
+  int encoder_sent_rate[] = {10, 10, 10};
+  int incoming_frame_rate[] = {8, 8, 8};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, medium spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialMedium, kSpatialMedium,
+                      kSpatialMedium);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(2, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      8.0f));
+}
+
+// Two stages: spatial down-sample and then back up spatially,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatial) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  // Reset and go up in rate: expected to go back up, in 2 stages of 3/4.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  float scale = (4.0f / 3.0f) / 2.0f;
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360,
+                                      30.0f));
+
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+                                      640, 480, 30.0f));
+}
+
+// Two stages: spatial down-sample and then back up spatially, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StageDownSpatialUpSpatialUndershoot) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  // Reset rates and simulate under-shooting scenario.: expect to go back up.
+  // Goes up spatially in two stages for 1/2x1/2 down-sampling.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {200, 200, 200, 200, 200};
+  int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  float scale = (4.0f / 3.0f) / 2.0f;
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 1.0f, 480, 360,
+                                      30.0f));
+
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+                                      640, 480, 30.0f));
+}
+
+// Two stages: spatial down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownSpatialNoActionUp) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  // Reset and simulate large rate mis-match: expect no action to go back up.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 320, 240,
+                                      30.0f));
+}
+
+// Two stages: temporally down-sample and then back up temporally,
+// as rate as increased.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporal) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+                                      15.5f));
+
+  // Reset rates and go up in rate: expect to go back up.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {400, 400, 400, 400, 400};
+  int encoder_sent_rate2[] = {400, 400, 400, 400, 400};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
+                                      30.0f));
+}
+
+// Two stages: temporal down-sample and then back up temporally, since encoder
+// is under-shooting target even though rate has not increased much.
+TEST_F(QmSelectTest, 2StatgeDownTemporalUpTemporalUndershoot) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                    fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f, 640, 480,
+                                      15.5f));
+
+  // Reset rates and simulate under-shooting scenario.: expect to go back up.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {150, 150, 150, 150, 150};
+  int encoder_sent_rate2[] = {50, 50, 50, 50, 50};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kEasyEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 0.5f, 640, 480,
+                                      30.0f));
+}
+
+// Two stages: temporal down-sample and then no action to go up,
+// as encoding rate mis-match is too high.
+TEST_F(QmSelectTest, 2StageDownTemporalNoActionUp) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(50, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {50, 50, 50};
+  int encoder_sent_rate[] = {50, 50, 50};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1, 1, 2, 640, 480, 15.5f));
+
+  // Reset and simulate large rate mis-match: expect no action to go back up.
+  qm_resolution_->UpdateCodecParameters(15.0f, codec_width, codec_height);
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {600, 600, 600, 600, 600};
+  int encoder_sent_rate2[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate2[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStressedEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 640, 480,
+                                      15.0f));
+}
+// 3 stages: spatial down-sample, followed by temporal down-sample,
+// and then go up to full state, as encoding rate has increased.
+TEST_F(QmSelectTest, 3StageDownSpatialTemporlaUpSpatialTemporal) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(80, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {80, 80, 80};
+  int encoder_sent_rate[] = {80, 80, 80};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  // Change content data: expect temporal down-sample.
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+
+  // Reset rates and go lower in rate.
+  qm_resolution_->ResetRates();
+  int target_rate2[] = {40, 40, 40, 40, 40};
+  int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                    fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
+                                      20.5f));
+
+  // Reset rates and go high up in rate: expect to go back up both spatial
+  // and temporally. The 1/2x1/2 spatial is undone in two stages.
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate3[] = {1000, 1000, 1000, 1000, 1000};
+  int encoder_sent_rate3[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                   fraction_lost3, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  float scale = (4.0f / 3.0f) / 2.0f;
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f,
+                                      480, 360, 30.0f));
+
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+                                      640, 480, 30.0f));
+}
+
+// No down-sampling below some total amount.
+TEST_F(QmSelectTest, NoActionTooMuchDownSampling) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(150, 30, 1280, 720, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 1280;
+  uint16_t codec_height = 720;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(7, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {150, 150, 150};
+  int encoder_sent_rate[] = {150, 150, 150};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 640, 360,
+              30.0f));
+
+  // Reset and lower rates to get another spatial action (3/4x3/4).
+  // Lower the frame rate for spatial to be selected again.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecParameters(10.0f, 640, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(640, 360));
+  // Update rates for a sequence of intervals.
+  int target_rate2[] = {70, 70, 70, 70, 70};
+  int encoder_sent_rate2[] = {70, 70, 70, 70, 70};
+  int incoming_frame_rate2[] = {10, 10, 10, 10, 10};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, medium spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialMedium, kSpatialMedium,
+                      kSpatialMedium);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 270, 10.0f));
+
+  // Reset and go to very low rate: no action should be taken,
+  // we went down too much already.
+  qm_resolution_->ResetRates();
+  qm_resolution_->UpdateCodecParameters(10.0f, 480, 270);
+  EXPECT_EQ(3, qm_resolution_->GetImageType(480, 270));
+  // Update rates for a sequence of intervals.
+  int target_rate3[] = {10, 10, 10, 10, 10};
+  int encoder_sent_rate3[] = {10, 10, 10, 10, 10};
+  int incoming_frame_rate3[] = {10, 10, 10, 10, 10};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                   fraction_lost3, 5);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(5, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.0f, 480, 270,
+                                      10.0f));
+}
+
+// Multiple down-sampling stages and then undo all of them.
+// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
+// followed by spatial 3/4x3/4. Then go up to full state,
+// as encoding rate has increased.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory1) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(150, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Go down spatial 3/4x3/4.
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {150, 150, 150};
+  int encoder_sent_rate[] = {150, 150, 150};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, low spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 360, 30.0f));
+  // Go down 2/3 temporal.
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  qm_resolution_->ResetRates();
+  int target_rate2[] = {100, 100, 100, 100, 100};
+  int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360,
+                                      20.5f));
+
+  // Go down 3/4x3/4 spatial:
+  qm_resolution_->UpdateCodecParameters(20.0f, 480, 360);
+  qm_resolution_->ResetRates();
+  int target_rate3[] = {80, 80, 80, 80, 80};
+  int encoder_sent_rate3[] = {80, 80, 80, 80, 80};
+  int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                    fraction_lost3, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // High motion, low spatial.
+  UpdateQmContentData(kTemporalHigh, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  // The two spatial actions of 3/4x3/4 are converted to 1/2x1/2,
+  // so scale factor is 2.0.
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      20.0f));
+
+  // Reset rates and go high up in rate: expect to go up:
+  // 1/2x1x2 spatial and 1/2 temporally.
+
+  // Go up 1/2x1/2 spatially and 1/2 temporally. Spatial is done in 2 stages.
+  qm_resolution_->UpdateCodecParameters(15.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate4[] = {1000, 1000, 1000, 1000, 1000};
+  int encoder_sent_rate4[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate4[] = {15, 15, 15, 15, 15};
+  uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+                   fraction_lost4, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(3, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  float scale = (4.0f / 3.0f) / 2.0f;
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f, 480,
+                                      360, 30.0f));
+
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+                                      640, 480, 30.0f));
+}
+
+// Multiple down-sampling and up-sample stages, with partial undoing.
+// Spatial down-sample 1/2x1/2, followed by temporal down-sample 2/3, undo the
+// temporal, then another temporal, and then undo both spatial and temporal.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory2) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(80, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Go down 1/2x1/2 spatial.
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {80, 80, 80};
+  int encoder_sent_rate[] = {80, 80, 80};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, low spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+
+  // Go down 2/3 temporal.
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  qm_resolution_->ResetRates();
+  int target_rate2[] = {40, 40, 40, 40, 40};
+  int encoder_sent_rate2[] = {40, 40, 40, 40, 40};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, high spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialHigh, kSpatialHigh,
+                      kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
+                                      20.5f));
+
+  // Go up 2/3 temporally.
+  qm_resolution_->UpdateCodecParameters(20.0f, 320, 240);
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate3[] = {150, 150, 150, 150, 150};
+  int encoder_sent_rate3[] = {150, 150, 150, 150, 150};
+  int incoming_frame_rate3[] = {20, 20, 20, 20, 20};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                   fraction_lost3, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(7, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 320,
+                                      240, 30.0f));
+
+  // Go down 2/3 temporal.
+  qm_resolution_->UpdateCodecParameters(30.0f, 320, 240);
+  EXPECT_EQ(2, qm_resolution_->GetImageType(320, 240));
+  qm_resolution_->ResetRates();
+  int target_rate4[] = {40, 40, 40, 40, 40};
+  int encoder_sent_rate4[] = {40, 40, 40, 40, 40};
+  int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost4[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+                   fraction_lost4, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 320, 240,
+                                      20.5f));
+
+  // Go up spatial and temporal. Spatial undoing is done in 2 stages.
+  qm_resolution_->UpdateCodecParameters(20.5f, 320, 240);
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate5[] = {1000, 1000, 1000, 1000, 1000};
+  int encoder_sent_rate5[] = {1000, 1000, 1000, 1000, 1000};
+  int incoming_frame_rate5[] = {20, 20, 20, 20, 20};
+  uint8_t fraction_lost5[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate5, encoder_sent_rate5, incoming_frame_rate5,
+                   fraction_lost5, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  float scale = (4.0f / 3.0f) / 2.0f;
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, scale, scale, 2.0f / 3.0f,
+                                      480, 360, 30.0f));
+
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f, 1.0f,
+                                      640, 480, 30.0f));
+}
+
+// Multiple down-sampling and up-sample stages, with partial undoing.
+// Spatial down-sample 3/4x3/4, followed by temporal down-sample 2/3,
+// undo the temporal 2/3, and then undo the spatial.
+TEST_F(QmSelectTest, MultipleStagesCheckActionHistory3) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(100, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Go down 3/4x3/4 spatial.
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {100, 100, 100};
+  int encoder_sent_rate[] = {100, 100, 100};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, low spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 360, 30.0f));
+
+  // Go down 2/3 temporal.
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  qm_resolution_->ResetRates();
+  int target_rate2[] = {100, 100, 100, 100, 100};
+  int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Low motion, high spatial.
+  UpdateQmContentData(kTemporalLow, kSpatialHigh, kSpatialHigh, kSpatialHigh);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 1.5f, 480, 360,
+                                      20.5f));
+
+  // Go up 2/3 temporal.
+  qm_resolution_->UpdateCodecParameters(20.5f, 480, 360);
+  qm_resolution_->ResetRates();
+  // Update rates for a sequence of intervals.
+  int target_rate3[] = {250, 250, 250, 250, 250};
+  int encoder_sent_rate3[] = {250, 250, 250, 250, 250};
+  int incoming_frame_rate3[] = {20, 20, 20, 20, 120};
+  uint8_t fraction_lost3[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate3, encoder_sent_rate3, incoming_frame_rate3,
+                   fraction_lost3, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(1, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 1.0f, 1.0f, 2.0f / 3.0f, 480,
+                                      360, 30.0f));
+
+  // Go up spatial.
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  qm_resolution_->ResetRates();
+  int target_rate4[] = {500, 500, 500, 500, 500};
+  int encoder_sent_rate4[] = {500, 500, 500, 500, 500};
+  int incoming_frame_rate4[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost4[] = {30, 30, 30, 30, 30};
+  UpdateQmRateData(target_rate4, encoder_sent_rate4, incoming_frame_rate4,
+                   fraction_lost4, 5);
+
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 3.0f / 4.0f, 3.0f / 4.0f,
+                                      1.0f, 640, 480, 30.0f));
+}
+
+// Two stages of 3/4x3/4 converted to one stage of 1/2x1/2.
+TEST_F(QmSelectTest, ConvertThreeQuartersToOneHalf) {
+  // Initialize with bitrate, frame rate, native system width/height, and
+  // number of temporal layers.
+  InitQmNativeData(150, 30, 640, 480, 1);
+
+  // Update with encoder frame size.
+  uint16_t codec_width = 640;
+  uint16_t codec_height = 480;
+  qm_resolution_->UpdateCodecParameters(30.0f, codec_width, codec_height);
+  EXPECT_EQ(5, qm_resolution_->GetImageType(codec_width, codec_height));
+
+  // Go down 3/4x3/4 spatial.
+  // Update rates for a sequence of intervals.
+  int target_rate[] = {150, 150, 150};
+  int encoder_sent_rate[] = {150, 150, 150};
+  int incoming_frame_rate[] = {30, 30, 30};
+  uint8_t fraction_lost[] = {10, 10, 10};
+  UpdateQmRateData(target_rate, encoder_sent_rate, incoming_frame_rate,
+                   fraction_lost, 3);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, low spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 4.0f / 3.0f, 4.0f / 3.0f,
+                                      1.0f, 480, 360, 30.0f));
+
+  // Set rates to go down another 3/4 spatial. Should be converted ton 1/2.
+  qm_resolution_->UpdateCodecParameters(30.0f, 480, 360);
+  EXPECT_EQ(4, qm_resolution_->GetImageType(480, 360));
+  qm_resolution_->ResetRates();
+  int target_rate2[] = {100, 100, 100, 100, 100};
+  int encoder_sent_rate2[] = {100, 100, 100, 100, 100};
+  int incoming_frame_rate2[] = {30, 30, 30, 30, 30};
+  uint8_t fraction_lost2[] = {10, 10, 10, 10, 10};
+  UpdateQmRateData(target_rate2, encoder_sent_rate2, incoming_frame_rate2,
+                   fraction_lost2, 5);
+
+  // Update content: motion level, and 3 spatial prediction errors.
+  // Medium motion, low spatial.
+  UpdateQmContentData(kTemporalMedium, kSpatialLow, kSpatialLow, kSpatialLow);
+  EXPECT_EQ(0, qm_resolution_->SelectResolution(&qm_scale_));
+  EXPECT_EQ(6, qm_resolution_->ComputeContentClass());
+  EXPECT_EQ(kStableEncoding, qm_resolution_->GetEncoderState());
+  EXPECT_TRUE(IsSelectedActionCorrect(qm_scale_, 2.0f, 2.0f, 1.0f, 320, 240,
+                                      30.0f));
+}
+
+void QmSelectTest::InitQmNativeData(float initial_bit_rate,
+                                    int user_frame_rate,
+                                    int native_width,
+                                    int native_height,
+                                    int num_layers) {
+  EXPECT_EQ(0, qm_resolution_->Initialize(initial_bit_rate,
+                                          user_frame_rate,
+                                          native_width,
+                                          native_height,
+                                          num_layers));
+}
+
+void QmSelectTest::UpdateQmContentData(float motion_metric,
+                                       float spatial_metric,
+                                       float spatial_metric_horiz,
+                                       float spatial_metric_vert) {
+  content_metrics_->motion_magnitude = motion_metric;
+  content_metrics_->spatial_pred_err = spatial_metric;
+  content_metrics_->spatial_pred_err_h = spatial_metric_horiz;
+  content_metrics_->spatial_pred_err_v = spatial_metric_vert;
+  qm_resolution_->UpdateContent(content_metrics_);
+}
+
+void QmSelectTest::UpdateQmEncodedFrame(int* encoded_size, int num_updates) {
+  FrameType frame_type = kVideoFrameDelta;
+  for (int i = 0; i < num_updates; ++i) {
+    // Convert to bytes.
+    int32_t encoded_size_update = 1000 * encoded_size[i] / 8;
+    qm_resolution_->UpdateEncodedSize(encoded_size_update, frame_type);
+  }
+}
+
+void QmSelectTest::UpdateQmRateData(int* target_rate,
+                                    int* encoder_sent_rate,
+                                    int* incoming_frame_rate,
+                                    uint8_t* fraction_lost,
+                                    int num_updates) {
+  for (int i = 0; i < num_updates; ++i) {
+    float target_rate_update = target_rate[i];
+    float encoder_sent_rate_update = encoder_sent_rate[i];
+    float incoming_frame_rate_update = incoming_frame_rate[i];
+    uint8_t fraction_lost_update = fraction_lost[i];
+    qm_resolution_->UpdateRates(target_rate_update,
+                                encoder_sent_rate_update,
+                                incoming_frame_rate_update,
+                                fraction_lost_update);
+  }
+}
+
+// Check is the selected action from the QmResolution class is the same
+// as the expected scales from |fac_width|, |fac_height|, |fac_temp|.
+bool QmSelectTest::IsSelectedActionCorrect(VCMResolutionScale* qm_scale,
+                                           float fac_width,
+                                           float fac_height,
+                                           float fac_temp,
+                                           uint16_t new_width,
+                                           uint16_t new_height,
+                                           float new_frame_rate) {
+  if (qm_scale->spatial_width_fact == fac_width &&
+      qm_scale->spatial_height_fact == fac_height &&
+      qm_scale->temporal_fact == fac_temp &&
+      qm_scale->codec_width == new_width &&
+      qm_scale->codec_height == new_height &&
+      qm_scale->frame_rate == new_frame_rate) {
+    return true;
+  } else {
+    return false;
+  }
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/receiver.cc b/src/modules/video_coding/main/source/receiver.cc
new file mode 100644
index 0000000..6be5336
--- /dev/null
+++ b/src/modules/video_coding/main/source/receiver.cc
@@ -0,0 +1,493 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/receiver.h"
+
+#include <assert.h>
+
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/source/encoded_frame.h"
+#include "modules/video_coding/main/source/internal_defines.h"
+#include "modules/video_coding/main/source/media_opt_util.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+VCMReceiver::VCMReceiver(VCMTiming& timing,
+                         TickTimeBase* clock,
+                         WebRtc_Word32 vcmId,
+                         WebRtc_Word32 receiverId,
+                         bool master)
+    : _critSect(CriticalSectionWrapper::CreateCriticalSection()),
+      _vcmId(vcmId),
+      _clock(clock),
+      _receiverId(receiverId),
+      _master(master),
+      _jitterBuffer(_clock, vcmId, receiverId, master),
+      _timing(timing),
+      _renderWaitEvent(*new VCMEvent()),
+      _state(kPassive) {}
+
+VCMReceiver::~VCMReceiver()
+{
+    _renderWaitEvent.Set();
+    delete &_renderWaitEvent;
+    delete _critSect;
+}
+
+void
+VCMReceiver::Reset()
+{
+    CriticalSectionScoped cs(_critSect);
+    if (!_jitterBuffer.Running())
+    {
+        _jitterBuffer.Start();
+    }
+    else
+    {
+        _jitterBuffer.Flush();
+    }
+    _renderWaitEvent.Reset();
+    if (_master)
+    {
+        _state = kReceiving;
+    }
+    else
+    {
+        _state = kPassive;
+    }
+}
+
+WebRtc_Word32
+VCMReceiver::Initialize()
+{
+    CriticalSectionScoped cs(_critSect);
+    Reset();
+    if (!_master)
+    {
+        SetNackMode(kNoNack);
+    }
+    return VCM_OK;
+}
+
+void VCMReceiver::UpdateRtt(WebRtc_UWord32 rtt)
+{
+    _jitterBuffer.UpdateRtt(rtt);
+}
+
+WebRtc_Word32
+VCMReceiver::InsertPacket(const VCMPacket& packet,
+                          WebRtc_UWord16 frameWidth,
+                          WebRtc_UWord16 frameHeight)
+{
+    // Find an empty frame
+    VCMEncodedFrame *buffer = NULL;
+    const WebRtc_Word32 error = _jitterBuffer.GetFrame(packet, buffer);
+    if (error == VCM_OLD_PACKET_ERROR)
+    {
+        return VCM_OK;
+    }
+    else if (error != VCM_OK)
+    {
+        return error;
+    }
+    assert(buffer);
+    {
+        CriticalSectionScoped cs(_critSect);
+
+        if (frameWidth && frameHeight)
+        {
+            buffer->SetEncodedSize(static_cast<WebRtc_UWord32>(frameWidth),
+                                   static_cast<WebRtc_UWord32>(frameHeight));
+        }
+
+        if (_master)
+        {
+            // Only trace the primary receiver to make it possible
+            // to parse and plot the trace file.
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
+                         VCMId(_vcmId, _receiverId),
+                         "Packet seqNo %u of frame %u at %u",
+                         packet.seqNum, packet.timestamp,
+                         MaskWord64ToUWord32(_clock->MillisecondTimestamp()));
+        }
+
+        const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+
+        WebRtc_Word64 renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
+
+        if (renderTimeMs < 0)
+        {
+            // Render time error. Assume that this is due to some change in
+            // the incoming video stream and reset the JB and the timing.
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+        else if (renderTimeMs < nowMs - kMaxVideoDelayMs)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                "This frame should have been rendered more than %u ms ago."
+                "Flushing jitter buffer and resetting timing.", kMaxVideoDelayMs);
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+        else if (_timing.TargetVideoDelay() > kMaxVideoDelayMs)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                "More than %u ms target delay. Flushing jitter buffer and resetting timing.",
+                kMaxVideoDelayMs);
+            _jitterBuffer.Flush();
+            _timing.Reset(_clock->MillisecondTimestamp());
+            return VCM_FLUSH_INDICATOR;
+        }
+
+        // First packet received belonging to this frame.
+        if (buffer->Length() == 0)
+        {
+            const WebRtc_Word64 nowMs = _clock->MillisecondTimestamp();
+            if (_master)
+            {
+                // Only trace the primary receiver to make it possible to parse and plot the trace file.
+                WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                           "First packet of frame %u at %u", packet.timestamp,
+                           MaskWord64ToUWord32(nowMs));
+            }
+            renderTimeMs = _timing.RenderTimeMs(packet.timestamp, nowMs);
+            if (renderTimeMs >= 0)
+            {
+                buffer->SetRenderTime(renderTimeMs);
+            }
+            else
+            {
+                buffer->SetRenderTime(nowMs);
+            }
+        }
+
+        // Insert packet into the jitter buffer
+        // both media and empty packets
+        const VCMFrameBufferEnum
+        ret = _jitterBuffer.InsertPacket(buffer, packet);
+        if (ret == kFlushIndicator) {
+          return VCM_FLUSH_INDICATOR;
+        } else if (ret < 0) {
+          WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding,
+                       VCMId(_vcmId, _receiverId),
+                       "Error inserting packet seqNo=%u, timeStamp=%u",
+                       packet.seqNum, packet.timestamp);
+          return VCM_JITTER_BUFFER_ERROR;
+        }
+    }
+    return VCM_OK;
+}
+
+VCMEncodedFrame* VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                               WebRtc_Word64& nextRenderTimeMs,
+                                               bool renderTiming,
+                                               VCMReceiver* dualReceiver)
+{
+    // No need to enter the critical section here since the jitter buffer
+    // is thread-safe.
+    FrameType incomingFrameType = kVideoFrameDelta;
+    nextRenderTimeMs = -1;
+    const WebRtc_Word64 startTimeMs = _clock->MillisecondTimestamp();
+    WebRtc_Word64 ret = _jitterBuffer.GetNextTimeStamp(maxWaitTimeMs,
+                                                       incomingFrameType,
+                                                       nextRenderTimeMs);
+    if (ret < 0)
+    {
+        // No timestamp in jitter buffer at the moment
+        return NULL;
+    }
+    const WebRtc_UWord32 timeStamp = static_cast<WebRtc_UWord32>(ret);
+
+    // Update the timing
+    _timing.SetRequiredDelay(_jitterBuffer.GetEstimatedJitterMS());
+    _timing.UpdateCurrentDelay(timeStamp);
+
+    const WebRtc_Word32 tempWaitTime = maxWaitTimeMs -
+            static_cast<WebRtc_Word32>(_clock->MillisecondTimestamp() - startTimeMs);
+    WebRtc_UWord16 newMaxWaitTime = static_cast<WebRtc_UWord16>(VCM_MAX(tempWaitTime, 0));
+
+    VCMEncodedFrame* frame = NULL;
+
+    if (renderTiming)
+    {
+        frame = FrameForDecoding(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
+    }
+    else
+    {
+        frame = FrameForRendering(newMaxWaitTime, nextRenderTimeMs, dualReceiver);
+    }
+
+    if (frame != NULL)
+    {
+        bool retransmitted = false;
+        const WebRtc_Word64 lastPacketTimeMs =
+                _jitterBuffer.LastPacketTime(frame, retransmitted);
+        if (lastPacketTimeMs >= 0 && !retransmitted)
+        {
+            // We don't want to include timestamps which have suffered from retransmission
+            // here, since we compensate with extra retransmission delay within
+            // the jitter estimate.
+            _timing.IncomingTimestamp(timeStamp, lastPacketTimeMs);
+        }
+        if (dualReceiver != NULL)
+        {
+            dualReceiver->UpdateState(*frame);
+        }
+    }
+    return frame;
+}
+
+VCMEncodedFrame*
+VCMReceiver::FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                              WebRtc_Word64 nextRenderTimeMs,
+                              VCMReceiver* dualReceiver)
+{
+    // How long can we wait until we must decode the next frame
+    WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
+                                          _clock->MillisecondTimestamp());
+
+    // Try to get a complete frame from the jitter buffer
+    VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
+
+    if (frame == NULL && maxWaitTimeMs == 0 && waitTimeMs > 0)
+    {
+        // If we're not allowed to wait for frames to get complete we must
+        // calculate if it's time to decode, and if it's not we will just return
+        // for now.
+        return NULL;
+    }
+
+    if (frame == NULL && VCM_MIN(waitTimeMs, maxWaitTimeMs) == 0)
+    {
+        // No time to wait for a complete frame,
+        // check if we have an incomplete
+        const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
+                                     dualReceiver->State() == kPassive &&
+                                     dualReceiver->NackMode() == kNackInfinite);
+        if (dualReceiverEnabledAndPassive &&
+            !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+            frame = _jitterBuffer.GetFrameForDecoding();
+            assert(frame);
+        } else {
+            frame = _jitterBuffer.GetFrameForDecoding();
+        }
+    }
+    if (frame == NULL)
+    {
+        // Wait for a complete frame
+        frame = _jitterBuffer.GetCompleteFrameForDecoding(maxWaitTimeMs);
+    }
+    if (frame == NULL)
+    {
+        // Get an incomplete frame
+        if (_timing.MaxWaitingTime(nextRenderTimeMs,
+                                   _clock->MillisecondTimestamp()) > 0)
+        {
+            // Still time to wait for a complete frame
+            return NULL;
+        }
+
+        // No time left to wait, we must decode this frame now.
+        const bool dualReceiverEnabledAndPassive = (dualReceiver != NULL &&
+                                     dualReceiver->State() == kPassive &&
+                                     dualReceiver->NackMode() == kNackInfinite);
+        if (dualReceiverEnabledAndPassive &&
+            !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+        }
+
+        frame = _jitterBuffer.GetFrameForDecoding();
+    }
+    return frame;
+}
+
+VCMEncodedFrame*
+VCMReceiver::FrameForRendering(WebRtc_UWord16 maxWaitTimeMs,
+                               WebRtc_Word64 nextRenderTimeMs,
+                               VCMReceiver* dualReceiver)
+{
+    // How long MUST we wait until we must decode the next frame. This is different for the case
+    // where we have a renderer which can render at a specified time. Here we must wait as long
+    // as possible before giving the frame to the decoder, which will render the frame as soon
+    // as it has been decoded.
+    WebRtc_UWord32 waitTimeMs = _timing.MaxWaitingTime(nextRenderTimeMs,
+                                                       _clock->MillisecondTimestamp());
+    if (maxWaitTimeMs < waitTimeMs)
+    {
+        // If we're not allowed to wait until the frame is supposed to be rendered
+        // we will have to return NULL for now.
+        return NULL;
+    }
+    // Wait until it's time to render
+    _renderWaitEvent.Wait(waitTimeMs);
+
+    // Get a complete frame if possible
+    VCMEncodedFrame* frame = _jitterBuffer.GetCompleteFrameForDecoding(0);
+
+    if (frame == NULL)
+    {
+        // Get an incomplete frame
+        const bool dualReceiverEnabledAndPassive = dualReceiver != NULL &&
+                                                   dualReceiver->State() == kPassive &&
+                                                   dualReceiver->NackMode() == kNackInfinite;
+        if (dualReceiverEnabledAndPassive && !_jitterBuffer.CompleteSequenceWithNextFrame())
+        {
+            // Jitter buffer state might get corrupt with this frame.
+            dualReceiver->CopyJitterBufferStateFromReceiver(*this);
+        }
+
+        frame = _jitterBuffer.GetFrameForDecoding();
+    }
+    return frame;
+}
+
+void
+VCMReceiver::ReleaseFrame(VCMEncodedFrame* frame)
+{
+    _jitterBuffer.ReleaseFrame(frame);
+}
+
+WebRtc_Word32
+VCMReceiver::ReceiveStatistics(WebRtc_UWord32& bitRate, WebRtc_UWord32& frameRate)
+{
+    const WebRtc_Word32 ret = _jitterBuffer.GetUpdate(frameRate, bitRate);
+    bitRate /= 1000; // Should be in kbps
+    return ret;
+}
+
+WebRtc_Word32
+VCMReceiver::ReceivedFrameCount(VCMFrameCount& frameCount) const
+{
+    return _jitterBuffer.GetFrameStatistics(frameCount.numDeltaFrames,
+                                            frameCount.numKeyFrames);
+}
+
+WebRtc_UWord32 VCMReceiver::DiscardedPackets() const {
+  return _jitterBuffer.DiscardedPackets();
+}
+
+void
+VCMReceiver::SetNackMode(VCMNackMode nackMode)
+{
+    CriticalSectionScoped cs(_critSect);
+    // Default to always having NACK enabled in hybrid mode.
+    _jitterBuffer.SetNackMode(nackMode, kLowRttNackMs, -1);
+    if (!_master)
+    {
+        _state = kPassive; // The dual decoder defaults to passive
+    }
+}
+
+VCMNackMode
+VCMReceiver::NackMode() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _jitterBuffer.GetNackMode();
+}
+
+VCMNackStatus
+VCMReceiver::NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size)
+{
+    bool extended = false;
+    WebRtc_UWord16 nackListSize = 0;
+    WebRtc_UWord16* internalNackList = _jitterBuffer.GetNackList(nackListSize, extended);
+    if (internalNackList == NULL && nackListSize == 0xffff)
+    {
+        // This combination is used to trigger key frame requests.
+        size = 0;
+        return kNackKeyFrameRequest;
+    }
+    if (nackListSize > size)
+    {
+        size = nackListSize;
+        return kNackNeedMoreMemory;
+    }
+    if (internalNackList != NULL && nackListSize > 0) {
+      memcpy(nackList, internalNackList, nackListSize * sizeof(WebRtc_UWord16));
+    }
+    size = nackListSize;
+    return kNackOk;
+}
+
+// Decide whether we should change decoder state. This should be done if the dual decoder
+// has caught up with the decoder decoding with packet losses.
+bool
+VCMReceiver::DualDecoderCaughtUp(VCMEncodedFrame* dualFrame, VCMReceiver& dualReceiver) const
+{
+    if (dualFrame == NULL)
+    {
+        return false;
+    }
+    if (_jitterBuffer.LastDecodedTimestamp() == dualFrame->TimeStamp())
+    {
+        dualReceiver.UpdateState(kWaitForPrimaryDecode);
+        return true;
+    }
+    return false;
+}
+
+void
+VCMReceiver::CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver)
+{
+    _jitterBuffer.CopyFrom(receiver._jitterBuffer);
+}
+
+VCMReceiverState
+VCMReceiver::State() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return _state;
+}
+
+void
+VCMReceiver::UpdateState(VCMReceiverState newState)
+{
+    CriticalSectionScoped cs(_critSect);
+    assert(!(_state == kPassive && newState == kWaitForPrimaryDecode));
+//    assert(!(_state == kReceiving && newState == kPassive));
+    _state = newState;
+}
+
+void
+VCMReceiver::UpdateState(VCMEncodedFrame& frame)
+{
+    if (_jitterBuffer.GetNackMode() == kNoNack)
+    {
+        // Dual decoder mode has not been enabled.
+        return;
+    }
+    // Update the dual receiver state
+    if (frame.Complete() && frame.FrameType() == kVideoFrameKey)
+    {
+        UpdateState(kPassive);
+    }
+    if (State() == kWaitForPrimaryDecode &&
+        frame.Complete() && !frame.MissingFrame())
+    {
+        UpdateState(kPassive);
+    }
+    if (frame.MissingFrame() || !frame.Complete())
+    {
+        // State was corrupted, enable dual receiver.
+        UpdateState(kReceiving);
+    }
+}
+
+}
diff --git a/src/modules/video_coding/main/source/receiver.h b/src/modules/video_coding/main/source/receiver.h
new file mode 100644
index 0000000..0081ed1
--- /dev/null
+++ b/src/modules/video_coding/main/source/receiver.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
+
+#include "critical_section_wrapper.h"
+#include "jitter_buffer.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "timing.h"
+#include "packet.h"
+
+namespace webrtc
+{
+
+class VCMEncodedFrame;
+
+enum VCMNackStatus
+{
+    kNackOk,
+    kNackNeedMoreMemory,
+    kNackKeyFrameRequest
+};
+
+
+enum VCMReceiverState
+{
+    kReceiving,
+    kPassive,
+    kWaitForPrimaryDecode
+};
+
+class VCMReceiver
+{
+public:
+    VCMReceiver(VCMTiming& timing,
+                TickTimeBase* clock,
+                WebRtc_Word32 vcmId = -1,
+                WebRtc_Word32 receiverId = -1,
+                bool master = true);
+    ~VCMReceiver();
+
+    void Reset();
+    WebRtc_Word32 Initialize();
+    void UpdateRtt(WebRtc_UWord32 rtt);
+    WebRtc_Word32 InsertPacket(const VCMPacket& packet,
+                               WebRtc_UWord16 frameWidth,
+                               WebRtc_UWord16 frameHeight);
+    VCMEncodedFrame* FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                      WebRtc_Word64& nextRenderTimeMs,
+                                      bool renderTiming = true,
+                                      VCMReceiver* dualReceiver = NULL);
+    void ReleaseFrame(VCMEncodedFrame* frame);
+    WebRtc_Word32 ReceiveStatistics(WebRtc_UWord32& bitRate, WebRtc_UWord32& frameRate);
+    WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const;
+    WebRtc_UWord32 DiscardedPackets() const;
+
+    // NACK
+    void SetNackMode(VCMNackMode nackMode);
+    VCMNackMode NackMode() const;
+    VCMNackStatus NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size);
+
+    // Dual decoder
+    bool DualDecoderCaughtUp(VCMEncodedFrame* dualFrame, VCMReceiver& dualReceiver) const;
+    VCMReceiverState State() const;
+
+private:
+    VCMEncodedFrame* FrameForDecoding(WebRtc_UWord16 maxWaitTimeMs,
+                                      WebRtc_Word64 nextrenderTimeMs,
+                                      VCMReceiver* dualReceiver);
+    VCMEncodedFrame* FrameForRendering(WebRtc_UWord16 maxWaitTimeMs,
+                                       WebRtc_Word64 nextrenderTimeMs,
+                                       VCMReceiver* dualReceiver);
+    void CopyJitterBufferStateFromReceiver(const VCMReceiver& receiver);
+    void UpdateState(VCMReceiverState newState);
+    void UpdateState(VCMEncodedFrame& frame);
+    static WebRtc_Word32 GenerateReceiverId();
+
+    CriticalSectionWrapper* _critSect;
+    WebRtc_Word32           _vcmId;
+    TickTimeBase*           _clock;
+    WebRtc_Word32           _receiverId;
+    bool                    _master;
+    VCMJitterBuffer         _jitterBuffer;
+    VCMTiming&              _timing;
+    VCMEvent&               _renderWaitEvent;
+    VCMReceiverState        _state;
+
+    static WebRtc_Word32    _receiverIdCounter;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RECEIVER_H_
diff --git a/src/modules/video_coding/main/source/rtt_filter.cc b/src/modules/video_coding/main/source/rtt_filter.cc
new file mode 100644
index 0000000..36f7660
--- /dev/null
+++ b/src/modules/video_coding/main/source/rtt_filter.cc
@@ -0,0 +1,214 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "rtt_filter.h"
+
+#include <cmath>
+#include <stdlib.h>
+#include <string.h>
+
+namespace webrtc {
+
+VCMRttFilter::VCMRttFilter(WebRtc_Word32 vcmId, WebRtc_Word32 receiverId)
+:
+_vcmId(vcmId),
+_receiverId(receiverId),
+_filtFactMax(35),
+_jumpStdDevs(2.5),
+_driftStdDevs(3.5),
+_detectThreshold(kMaxDriftJumpCount)
+{
+    Reset();
+}
+
+VCMRttFilter&
+VCMRttFilter::operator=(const VCMRttFilter& rhs)
+{
+    if (this != &rhs)
+    {
+        _gotNonZeroUpdate = rhs._gotNonZeroUpdate;
+        _avgRtt = rhs._avgRtt;
+        _varRtt = rhs._varRtt;
+        _maxRtt = rhs._maxRtt;
+        _filtFactCount = rhs._filtFactCount;
+        _jumpCount = rhs._jumpCount;
+        _driftCount = rhs._driftCount;
+        memcpy(_jumpBuf, rhs._jumpBuf, sizeof(_jumpBuf));
+        memcpy(_driftBuf, rhs._driftBuf, sizeof(_driftBuf));
+    }
+    return *this;
+}
+
+void
+VCMRttFilter::Reset()
+{
+    _gotNonZeroUpdate = false;
+    _avgRtt = 0;
+    _varRtt = 0;
+    _maxRtt = 0;
+    _filtFactCount = 1;
+    _jumpCount = 0;
+    _driftCount = 0;
+    memset(_jumpBuf, 0, kMaxDriftJumpCount);
+    memset(_driftBuf, 0, kMaxDriftJumpCount);
+}
+
+void
+VCMRttFilter::Update(WebRtc_UWord32 rttMs)
+{
+    if (!_gotNonZeroUpdate)
+    {
+        if (rttMs == 0)
+        {
+            return;
+        }
+        _gotNonZeroUpdate = true;
+    }
+
+    // Sanity check
+    if (rttMs > 3000)
+    {
+        rttMs = 3000;
+    }
+
+    double filtFactor = 0;
+    if (_filtFactCount > 1)
+    {
+        filtFactor = static_cast<double>(_filtFactCount - 1) / _filtFactCount;
+    }
+    _filtFactCount++;
+    if (_filtFactCount > _filtFactMax)
+    {
+        // This prevents filtFactor from going above
+        // (_filtFactMax - 1) / _filtFactMax,
+        // e.g., _filtFactMax = 50 => filtFactor = 49/50 = 0.98
+        _filtFactCount = _filtFactMax;
+    }
+    double oldAvg = _avgRtt;
+    double oldVar = _varRtt;
+    _avgRtt = filtFactor * _avgRtt + (1 - filtFactor) * rttMs;
+    _varRtt = filtFactor * _varRtt + (1 - filtFactor) *
+                (rttMs - _avgRtt) * (rttMs - _avgRtt);
+    _maxRtt = VCM_MAX(rttMs, _maxRtt);
+    if (!JumpDetection(rttMs) || !DriftDetection(rttMs))
+    {
+        // In some cases we don't want to update the statistics
+        _avgRtt = oldAvg;
+        _varRtt = oldVar;
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+               "RttFilter Update: sample=%u avgRtt=%f varRtt=%f maxRtt=%u",
+               rttMs, _avgRtt, _varRtt, _maxRtt);
+}
+
+bool
+VCMRttFilter::JumpDetection(WebRtc_UWord32 rttMs)
+{
+    double diffFromAvg = _avgRtt - rttMs;
+    if (abs(diffFromAvg) > _jumpStdDevs * sqrt(_varRtt))
+    {
+        int diffSign = (diffFromAvg >= 0) ? 1 : -1;
+        int jumpCountSign = (_jumpCount >= 0) ? 1 : -1;
+        if (diffSign != jumpCountSign)
+        {
+            // Since the signs differ the samples currently
+            // in the buffer is useless as they represent a
+            // jump in a different direction.
+            _jumpCount = 0;
+        }
+        if (abs(_jumpCount) < kMaxDriftJumpCount)
+        {
+            // Update the buffer used for the short time
+            // statistics.
+            // The sign of the diff is used for updating the counter since
+            // we want to use the same buffer for keeping track of when
+            // the RTT jumps down and up.
+            _jumpBuf[abs(_jumpCount)] = rttMs;
+            _jumpCount += diffSign;
+        }
+        if (abs(_jumpCount) >= _detectThreshold)
+        {
+            // Detected an RTT jump
+            ShortRttFilter(_jumpBuf, abs(_jumpCount));
+            _filtFactCount = _detectThreshold + 1;
+            _jumpCount = 0;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                       "Detected an RTT jump");
+        }
+        else
+        {
+            return false;
+        }
+    }
+    else
+    {
+        _jumpCount = 0;
+    }
+    return true;
+}
+
+bool
+VCMRttFilter::DriftDetection(WebRtc_UWord32 rttMs)
+{
+    if (_maxRtt - _avgRtt > _driftStdDevs * sqrt(_varRtt))
+    {
+        if (_driftCount < kMaxDriftJumpCount)
+        {
+            // Update the buffer used for the short time
+            // statistics.
+            _driftBuf[_driftCount] = rttMs;
+            _driftCount++;
+        }
+        if (_driftCount >= _detectThreshold)
+        {
+            // Detected an RTT drift
+            ShortRttFilter(_driftBuf, _driftCount);
+            _filtFactCount = _detectThreshold + 1;
+            _driftCount = 0;
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _receiverId),
+                       "Detected an RTT drift");
+        }
+    }
+    else
+    {
+        _driftCount = 0;
+    }
+    return true;
+}
+
+void
+VCMRttFilter::ShortRttFilter(WebRtc_UWord32* buf, WebRtc_UWord32 length)
+{
+    if (length == 0)
+    {
+        return;
+    }
+    _maxRtt = 0;
+    _avgRtt = 0;
+    for (WebRtc_UWord32 i=0; i < length; i++)
+    {
+        if (buf[i] > _maxRtt)
+        {
+            _maxRtt = buf[i];
+        }
+        _avgRtt += buf[i];
+    }
+    _avgRtt = _avgRtt / static_cast<double>(length);
+}
+
+WebRtc_UWord32
+VCMRttFilter::RttMs() const
+{
+    return static_cast<WebRtc_UWord32>(_maxRtt + 0.5);
+}
+
+}
diff --git a/src/modules/video_coding/main/source/rtt_filter.h b/src/modules/video_coding/main/source/rtt_filter.h
new file mode 100644
index 0000000..5ec85fd
--- /dev/null
+++ b/src/modules/video_coding/main/source/rtt_filter.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+class VCMRttFilter
+{
+public:
+    VCMRttFilter(WebRtc_Word32 vcmId = 0, WebRtc_Word32 receiverId = 0);
+
+    VCMRttFilter& operator=(const VCMRttFilter& rhs);
+
+    // Resets the filter.
+    void Reset();
+    // Updates the filter with a new sample.
+    void Update(WebRtc_UWord32 rttMs);
+    // A getter function for the current RTT level in ms.
+    WebRtc_UWord32 RttMs() const;
+
+private:
+    // The size of the drift and jump memory buffers
+    // and thus also the detection threshold for these
+    // detectors in number of samples.
+    enum { kMaxDriftJumpCount = 5 };
+    // Detects RTT jumps by comparing the difference between
+    // samples and average to the standard deviation.
+    // Returns true if the long time statistics should be updated
+    // and false otherwise
+    bool JumpDetection(WebRtc_UWord32 rttMs);
+    // Detects RTT drifts by comparing the difference between
+    // max and average to the standard deviation.
+    // Returns true if the long time statistics should be updated
+    // and false otherwise
+    bool DriftDetection(WebRtc_UWord32 rttMs);
+    // Computes the short time average and maximum of the vector buf.
+    void ShortRttFilter(WebRtc_UWord32* buf, WebRtc_UWord32 length);
+
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _receiverId;
+    bool                  _gotNonZeroUpdate;
+    double                _avgRtt;
+    double                _varRtt;
+    WebRtc_UWord32        _maxRtt;
+    WebRtc_UWord32        _filtFactCount;
+    const WebRtc_UWord32  _filtFactMax;
+    const double          _jumpStdDevs;
+    const double          _driftStdDevs;
+    WebRtc_Word32         _jumpCount;
+    WebRtc_Word32         _driftCount;
+    const WebRtc_Word32   _detectThreshold;
+    WebRtc_UWord32        _jumpBuf[kMaxDriftJumpCount];
+    WebRtc_UWord32        _driftBuf[kMaxDriftJumpCount];
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_RTT_FILTER_H_
diff --git a/src/modules/video_coding/main/source/session_info.cc b/src/modules/video_coding/main/source/session_info.cc
new file mode 100644
index 0000000..7261403
--- /dev/null
+++ b/src/modules/video_coding/main/source/session_info.cc
@@ -0,0 +1,595 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_coding/main/source/session_info.h"
+
+#include "modules/video_coding/main/source/packet.h"
+
+namespace webrtc {
+
+VCMSessionInfo::VCMSessionInfo()
+    : session_nack_(false),
+      complete_(false),
+      decodable_(false),
+      frame_type_(kVideoFrameDelta),
+      previous_frame_loss_(false),
+      packets_(),
+      empty_seq_num_low_(-1),
+      empty_seq_num_high_(-1),
+      packets_not_decodable_(0) {
+}
+
+void VCMSessionInfo::UpdateDataPointers(const uint8_t* old_base_ptr,
+                                        const uint8_t* new_base_ptr) {
+  for (PacketIterator it = packets_.begin(); it != packets_.end(); ++it)
+    if ((*it).dataPtr != NULL) {
+      assert(old_base_ptr != NULL && new_base_ptr != NULL);
+      (*it).dataPtr = new_base_ptr + ((*it).dataPtr - old_base_ptr);
+    }
+}
+
+int VCMSessionInfo::LowSequenceNumber() const {
+  if (packets_.empty())
+    return empty_seq_num_low_;
+  return packets_.front().seqNum;
+}
+
+int VCMSessionInfo::HighSequenceNumber() const {
+  if (packets_.empty())
+    return empty_seq_num_high_;
+  return LatestSequenceNumber(packets_.back().seqNum, empty_seq_num_high_,
+                              NULL);
+}
+
+int VCMSessionInfo::PictureId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoPictureId;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.pictureId;
+}
+
+int VCMSessionInfo::TemporalId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoTemporalIdx;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.temporalIdx;
+}
+
+bool VCMSessionInfo::LayerSync() const {
+  if (packets_.empty() ||
+        packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return false;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.layerSync;
+}
+
+int VCMSessionInfo::Tl0PicId() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return kNoTl0PicIdx;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.tl0PicIdx;
+}
+
+bool VCMSessionInfo::NonReference() const {
+  if (packets_.empty() ||
+      packets_.front().codecSpecificHeader.codec != kRTPVideoVP8)
+    return false;
+  return packets_.front().codecSpecificHeader.codecHeader.VP8.nonReference;
+}
+
+void VCMSessionInfo::Reset() {
+  session_nack_ = false;
+  complete_ = false;
+  decodable_ = false;
+  frame_type_ = kVideoFrameDelta;
+  previous_frame_loss_ = false;
+  packets_.clear();
+  empty_seq_num_low_ = -1;
+  empty_seq_num_high_ = -1;
+  packets_not_decodable_ = 0;
+}
+
+int VCMSessionInfo::SessionLength() const {
+  int length = 0;
+  for (PacketIteratorConst it = packets_.begin(); it != packets_.end(); ++it)
+    length += (*it).sizeBytes;
+  return length;
+}
+
+int VCMSessionInfo::InsertBuffer(uint8_t* frame_buffer,
+                                 PacketIterator packet_it) {
+  VCMPacket& packet = *packet_it;
+  PacketIterator it;
+
+  int packet_size = packet.sizeBytes;
+  packet_size += (packet.insertStartCode ? kH264StartCodeLengthBytes : 0);
+
+  // Calculate the offset into the frame buffer for this packet.
+  int offset = 0;
+  for (it = packets_.begin(); it != packet_it; ++it)
+    offset += (*it).sizeBytes;
+
+  // Set the data pointer to pointing to the start of this packet in the
+  // frame buffer.
+  const uint8_t* data = packet.dataPtr;
+  packet.dataPtr = frame_buffer + offset;
+  packet.sizeBytes = packet_size;
+
+  ShiftSubsequentPackets(packet_it, packet_size);
+
+  const unsigned char startCode[] = {0, 0, 0, 1};
+  if (packet.insertStartCode) {
+    memcpy(const_cast<uint8_t*>(packet.dataPtr), startCode,
+           kH264StartCodeLengthBytes);
+  }
+  memcpy(const_cast<uint8_t*>(packet.dataPtr
+      + (packet.insertStartCode ? kH264StartCodeLengthBytes : 0)),
+      data,
+      packet.sizeBytes);
+
+  return packet_size;
+}
+
+void VCMSessionInfo::ShiftSubsequentPackets(PacketIterator it,
+                                            int steps_to_shift) {
+  ++it;
+  if (it == packets_.end())
+    return;
+  uint8_t* first_packet_ptr = const_cast<WebRtc_UWord8*>((*it).dataPtr);
+  int shift_length = 0;
+  // Calculate the total move length and move the data pointers in advance.
+  for (; it != packets_.end(); ++it) {
+    shift_length += (*it).sizeBytes;
+    if ((*it).dataPtr != NULL)
+      (*it).dataPtr += steps_to_shift;
+  }
+  memmove(first_packet_ptr + steps_to_shift, first_packet_ptr, shift_length);
+}
+
+void VCMSessionInfo::UpdateCompleteSession() {
+  if (packets_.front().isFirstPacket && packets_.back().markerBit) {
+    // Do we have all the packets in this session?
+    bool complete_session = true;
+    PacketIterator it = packets_.begin();
+    PacketIterator prev_it = it;
+    ++it;
+    for (; it != packets_.end(); ++it) {
+      if (!InSequence(it, prev_it)) {
+        complete_session = false;
+        break;
+      }
+      prev_it = it;
+    }
+    complete_ = complete_session;
+  }
+}
+
+void VCMSessionInfo::UpdateDecodableSession(int rttMs) {
+  // Irrelevant if session is already complete or decodable
+  if (complete_ || decodable_)
+    return;
+  // First iteration - do nothing
+}
+
+bool VCMSessionInfo::complete() const {
+  return complete_;
+}
+
+bool VCMSessionInfo::decodable() const {
+  return decodable_;
+}
+
+// Find the end of the NAL unit which the packet pointed to by |packet_it|
+// belongs to. Returns an iterator to the last packet of the frame if the end
+// of the NAL unit wasn't found.
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNaluEnd(
+    PacketIterator packet_it) const {
+  if ((*packet_it).completeNALU == kNaluEnd ||
+      (*packet_it).completeNALU == kNaluComplete) {
+    return packet_it;
+  }
+  // Find the end of the NAL unit.
+  for (; packet_it != packets_.end(); ++packet_it) {
+    if (((*packet_it).completeNALU == kNaluComplete &&
+        (*packet_it).sizeBytes > 0) ||
+        // Found next NALU.
+        (*packet_it).completeNALU == kNaluStart)
+      return --packet_it;
+    if ((*packet_it).completeNALU == kNaluEnd)
+      return packet_it;
+  }
+  // The end wasn't found.
+  return --packet_it;
+}
+
+int VCMSessionInfo::DeletePacketData(PacketIterator start,
+                                     PacketIterator end) {
+  int bytes_to_delete = 0;  // The number of bytes to delete.
+  PacketIterator packet_after_end = end;
+  ++packet_after_end;
+
+  // Get the number of bytes to delete.
+  // Clear the size of these packets.
+  for (PacketIterator it = start; it != packet_after_end; ++it) {
+    bytes_to_delete += (*it).sizeBytes;
+    (*it).sizeBytes = 0;
+    (*it).dataPtr = NULL;
+    ++packets_not_decodable_;
+  }
+  if (bytes_to_delete > 0)
+    ShiftSubsequentPackets(end, -bytes_to_delete);
+  return bytes_to_delete;
+}
+
+int VCMSessionInfo::BuildVP8FragmentationHeader(
+    uint8_t* frame_buffer,
+    int frame_buffer_length,
+    RTPFragmentationHeader* fragmentation) {
+  int new_length = 0;
+  // Allocate space for max number of partitions
+  fragmentation->VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+  fragmentation->fragmentationVectorSize = 0;
+  memset(fragmentation->fragmentationLength, 0,
+         kMaxVP8Partitions * sizeof(WebRtc_UWord32));
+  if (packets_.empty())
+      return new_length;
+  PacketIterator it = FindNextPartitionBeginning(packets_.begin(),
+                                                 &packets_not_decodable_);
+  while (it != packets_.end()) {
+    const int partition_id =
+        (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+    PacketIterator partition_end = FindPartitionEnd(it);
+    fragmentation->fragmentationOffset[partition_id] =
+        (*it).dataPtr - frame_buffer;
+    assert(fragmentation->fragmentationOffset[partition_id] <
+           static_cast<WebRtc_UWord32>(frame_buffer_length));
+    fragmentation->fragmentationLength[partition_id] =
+        (*partition_end).dataPtr + (*partition_end).sizeBytes - (*it).dataPtr;
+    assert(fragmentation->fragmentationLength[partition_id] <=
+           static_cast<WebRtc_UWord32>(frame_buffer_length));
+    new_length += fragmentation->fragmentationLength[partition_id];
+    ++partition_end;
+    it = FindNextPartitionBeginning(partition_end, &packets_not_decodable_);
+    if (partition_id + 1 > fragmentation->fragmentationVectorSize)
+      fragmentation->fragmentationVectorSize = partition_id + 1;
+  }
+  // Set all empty fragments to start where the previous fragment ends,
+  // and have zero length.
+  if (fragmentation->fragmentationLength[0] == 0)
+      fragmentation->fragmentationOffset[0] = 0;
+  for (int i = 1; i < fragmentation->fragmentationVectorSize; ++i) {
+    if (fragmentation->fragmentationLength[i] == 0)
+      fragmentation->fragmentationOffset[i] =
+          fragmentation->fragmentationOffset[i - 1] +
+          fragmentation->fragmentationLength[i - 1];
+    assert(i == 0 ||
+           fragmentation->fragmentationOffset[i] >=
+           fragmentation->fragmentationOffset[i - 1]);
+  }
+  assert(new_length <= frame_buffer_length);
+  return new_length;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindNextPartitionBeginning(
+    PacketIterator it, int* packets_skipped) const {
+  while (it != packets_.end()) {
+    if ((*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition) {
+      return it;
+    } else if (packets_skipped !=  NULL) {
+      // This packet belongs to a partition with a previous loss and can't
+      // be decoded.
+      ++(*packets_skipped);
+    }
+    ++it;
+  }
+  return it;
+}
+
+VCMSessionInfo::PacketIterator VCMSessionInfo::FindPartitionEnd(
+    PacketIterator it) const {
+  assert((*it).codec == kVideoCodecVP8);
+  PacketIterator prev_it = it;
+  const int partition_id =
+      (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+  while (it != packets_.end()) {
+    bool beginning =
+        (*it).codecSpecificHeader.codecHeader.VP8.beginningOfPartition;
+    int current_partition_id =
+        (*it).codecSpecificHeader.codecHeader.VP8.partitionId;
+    bool packet_loss_found = (!beginning && !InSequence(it, prev_it));
+    if (packet_loss_found ||
+        (beginning && current_partition_id != partition_id)) {
+      // Missing packet, the previous packet was the last in sequence.
+      return prev_it;
+    }
+    prev_it = it;
+    ++it;
+  }
+  return prev_it;
+}
+
+bool VCMSessionInfo::InSequence(const PacketIterator& packet_it,
+                                const PacketIterator& prev_packet_it) {
+  // If the two iterators are pointing to the same packet they are considered
+  // to be in sequence.
+  return (packet_it == prev_packet_it ||
+      (static_cast<WebRtc_UWord16>((*prev_packet_it).seqNum + 1) ==
+          (*packet_it).seqNum));
+}
+
+int VCMSessionInfo::MakeDecodable() {
+  int return_length = 0;
+  if (packets_.empty()) {
+    return 0;
+  }
+  PacketIterator it = packets_.begin();
+  // Make sure we remove the first NAL unit if it's not decodable.
+  if ((*it).completeNALU == kNaluIncomplete ||
+      (*it).completeNALU == kNaluEnd) {
+    PacketIterator nalu_end = FindNaluEnd(it);
+    return_length += DeletePacketData(it, nalu_end);
+    it = nalu_end;
+  }
+  PacketIterator prev_it = it;
+  // Take care of the rest of the NAL units.
+  for (; it != packets_.end(); ++it) {
+    bool start_of_nalu = ((*it).completeNALU == kNaluStart ||
+        (*it).completeNALU == kNaluComplete);
+    if (!start_of_nalu && !InSequence(it, prev_it)) {
+      // Found a sequence number gap due to packet loss.
+      PacketIterator nalu_end = FindNaluEnd(it);
+      return_length += DeletePacketData(it, nalu_end);
+      it = nalu_end;
+    }
+    prev_it = it;
+  }
+  return return_length;
+}
+
+int VCMSessionInfo::BuildHardNackList(int* seq_num_list,
+                                      int seq_num_list_length) {
+  if (NULL == seq_num_list || seq_num_list_length < 1) {
+    return -1;
+  }
+  if (packets_.empty()) {
+    return 0;
+  }
+
+  // Find end point (index of entry equals the sequence number of the first
+  // packet).
+  int index = 0;
+  for (; index < seq_num_list_length; ++index) {
+    if (seq_num_list[index] == packets_.front().seqNum) {
+      seq_num_list[index] = -1;
+      ++index;
+      break;
+    }
+  }
+
+  // Zero out between the first entry and the end point.
+  PacketIterator it = packets_.begin();
+  PacketIterator prev_it = it;
+  ++it;
+  while (it != packets_.end() && index < seq_num_list_length) {
+    if (!InSequence(it, prev_it)) {
+      // Found a sequence number gap due to packet loss.
+      index += PacketsMissing(it, prev_it);
+      session_nack_ = true;
+    }
+    seq_num_list[index] = -1;
+    ++index;
+    prev_it = it;
+    ++it;
+  }
+  if (!packets_.front().isFirstPacket)
+    session_nack_ = true;
+  return 0;
+}
+
+int VCMSessionInfo::BuildSoftNackList(int* seq_num_list,
+                                      int seq_num_list_length,
+                                      int rtt_ms) {
+  if (NULL == seq_num_list || seq_num_list_length < 1) {
+    return -1;
+  }
+  if (packets_.empty() && empty_seq_num_low_ == -1) {
+    return 0;
+  }
+
+  int index = 0;
+  int low_seq_num = (packets_.empty()) ? empty_seq_num_low_:
+      packets_.front().seqNum;
+  // Find entrance point (index of entry equals the sequence number of the
+  // first packet).
+  for (; index < seq_num_list_length; ++index) {
+    if (seq_num_list[index] == low_seq_num) {
+      seq_num_list[index] = -1;
+      break;
+    }
+  }
+
+  // TODO(mikhal): 1. Update score based on RTT value 2. Add partition data.
+  // Use the previous available.
+  bool base_available = false;
+  if ((index > 0) && (seq_num_list[index] == -1)) {
+    // Found first packet, for now let's go only one back.
+    if ((seq_num_list[index - 1] == -1) || (seq_num_list[index - 1] == -2)) {
+      // This is indeed the first packet, as previous packet was populated.
+      base_available = true;
+    }
+  }
+  bool allow_nack = ((packets_.size() > 0 && !packets_.front().isFirstPacket)
+    || !base_available);
+
+  // Zero out between first entry and end point.
+
+  int media_high_seq_num;
+  if (HaveLastPacket()) {
+    media_high_seq_num = packets_.back().seqNum;
+  } else {
+    // Estimation.
+    if (empty_seq_num_low_ >= 0) {
+      // Assuming empty packets have later sequence numbers than media packets.
+      media_high_seq_num = empty_seq_num_low_ - 1;
+    } else {
+      // Since this frame doesn't have the marker bit we can assume it should
+      // contain at least one more packet.
+      media_high_seq_num = static_cast<uint16_t>(packets_.back().seqNum + 1);
+    }
+  }
+
+  // Compute session/packet scores and thresholds:
+  // based on RTT and layer info (when available).
+  float nack_score_threshold = 0.25f;
+  float layer_score = TemporalId() > 0 ? 0.0f : 1.0f;
+  float rtt_score = 1.0f;
+  float score_multiplier = rtt_score * layer_score;
+  // Zero out between first entry and end point.
+  if (!packets_.empty()) {
+    PacketIterator it = packets_.begin();
+    PacketIterator prev_it = it;
+    ++index;
+    ++it;
+    // TODO(holmer): Rewrite this in a way which better makes use of the list.
+    while (it != packets_.end() && index < seq_num_list_length) {
+    // Only process media packet sequence numbers.
+      if (LatestSequenceNumber((*it).seqNum, media_high_seq_num, NULL) ==
+        (*it).seqNum && (*it).seqNum != media_high_seq_num)
+        break;
+      if (!InSequence(it, prev_it)) {
+        // Found a sequence number gap due to packet loss.
+        int num_lost = PacketsMissing(it, prev_it);
+        for (int i = 0 ; i < num_lost; ++i) {
+          // Compute score of the packet.
+          float score = 1.0f;
+          // Multiply internal score (packet) by score multiplier.
+          score *= score_multiplier;
+          if (score > nack_score_threshold) {
+            allow_nack = true;
+          } else {
+            seq_num_list[index] = -1;
+          }
+          ++index;
+        }
+      }
+      seq_num_list[index] = -1;
+      ++index;
+      prev_it = it;
+      ++it;
+    }
+  }
+
+  // Empty packets follow the data packets, and therefore have a higher
+  // sequence number. We do not want to NACK empty packets.
+  if ((empty_seq_num_low_ != -1) && (empty_seq_num_high_ != -1) &&
+      (index < seq_num_list_length)) {
+    // First make sure that we are at least at the minimum value (if not we are
+    // missing last packet(s)).
+    while (seq_num_list[index] < empty_seq_num_low_ &&
+        index < seq_num_list_length) {
+      ++index;
+    }
+
+    // Mark empty packets.
+    while (seq_num_list[index] <= empty_seq_num_high_ &&
+        index < seq_num_list_length) {
+      seq_num_list[index] = -2;
+      ++index;
+    }
+  }
+
+  session_nack_ = allow_nack;
+  return 0;
+}
+
+int VCMSessionInfo::PacketsMissing(const PacketIterator& packet_it,
+                                   const PacketIterator& prev_packet_it) {
+  if (packet_it == prev_packet_it)
+    return 0;
+  if ((*prev_packet_it).seqNum > (*packet_it).seqNum)  // Wrap.
+    return static_cast<WebRtc_UWord16>(
+        static_cast<WebRtc_UWord32>((*packet_it).seqNum + 0x10000) -
+        (*prev_packet_it).seqNum) - 1;
+  else
+    return (*packet_it).seqNum - (*prev_packet_it).seqNum - 1;
+}
+
+bool
+VCMSessionInfo::HaveLastPacket() const {
+  return (!packets_.empty() && packets_.back().markerBit);
+}
+
+bool
+VCMSessionInfo::session_nack() const {
+  return session_nack_;
+}
+
+int VCMSessionInfo::InsertPacket(const VCMPacket& packet,
+                                 uint8_t* frame_buffer,
+                                 bool enable_decodable_state,
+                                 int rtt_ms) {
+  // Check if this is first packet (only valid for some codecs)
+  if (packet.isFirstPacket) {
+    // The first packet in a frame signals the frame type.
+    frame_type_ = packet.frameType;
+  } else if (frame_type_ == kFrameEmpty && packet.frameType != kFrameEmpty) {
+    // Update the frame type with the first media packet.
+    frame_type_ = packet.frameType;
+  }
+  if (packet.frameType == kFrameEmpty) {
+    // Update sequence number of an empty packet.
+    // Only media packets are inserted into the packet list.
+    InformOfEmptyPacket(packet.seqNum);
+    return 0;
+  }
+
+  if (packets_.size() == kMaxPacketsInSession)
+    return -1;
+
+  // Find the position of this packet in the packet list in sequence number
+  // order and insert it. Loop over the list in reverse order.
+  ReversePacketIterator rit = packets_.rbegin();
+  for (; rit != packets_.rend(); ++rit)
+    if (LatestSequenceNumber((*rit).seqNum, packet.seqNum, NULL) ==
+        packet.seqNum)
+      break;
+
+  // Check for duplicate packets.
+  if (rit != packets_.rend() &&
+      (*rit).seqNum == packet.seqNum && (*rit).sizeBytes > 0)
+    return -2;
+
+  // The insert operation invalidates the iterator |rit|.
+  PacketIterator packet_list_it = packets_.insert(rit.base(), packet);
+
+  int returnLength = InsertBuffer(frame_buffer, packet_list_it);
+  UpdateCompleteSession();
+  if (enable_decodable_state)
+    UpdateDecodableSession(rtt_ms);
+  return returnLength;
+}
+
+void VCMSessionInfo::InformOfEmptyPacket(uint16_t seq_num) {
+  // Empty packets may be FEC or filler packets. They are sequential and
+  // follow the data packets, therefore, we should only keep track of the high
+  // and low sequence numbers and may assume that the packets in between are
+  // empty packets belonging to the same frame (timestamp).
+  empty_seq_num_high_ = LatestSequenceNumber(seq_num, empty_seq_num_high_,
+                                             NULL);
+  if (empty_seq_num_low_ == -1 ||
+      LatestSequenceNumber(seq_num, empty_seq_num_low_, NULL) ==
+          empty_seq_num_low_)
+    empty_seq_num_low_ = seq_num;
+}
+
+int VCMSessionInfo::packets_not_decodable() const {
+  return packets_not_decodable_;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/session_info.h b/src/modules/video_coding/main/source/session_info.h
new file mode 100644
index 0000000..27533ce
--- /dev/null
+++ b/src/modules/video_coding/main/source/session_info.h
@@ -0,0 +1,134 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+#define WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
+
+#include <list>
+
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "typedefs.h"  // NOLINT(build/include)
+
+namespace webrtc {
+
+class VCMSessionInfo {
+ public:
+  VCMSessionInfo();
+
+  void UpdateDataPointers(const uint8_t* old_base_ptr,
+                          const uint8_t* new_base_ptr);
+  // NACK - Building the NACK lists.
+  // Build hard NACK list: Zero out all entries in list up to and including
+  // _lowSeqNum.
+  int BuildHardNackList(int* seq_num_list,
+                        int seq_num_list_length);
+
+  // Build soft NACK list:  Zero out only a subset of the packets, discard
+  // empty packets.
+  int BuildSoftNackList(int* seq_num_list,
+                        int seq_num_list_length,
+                        int rtt_ms);
+  void Reset();
+  int InsertPacket(const VCMPacket& packet,
+                   uint8_t* frame_buffer,
+                   bool enable_decodable_state,
+                   int rtt_ms);
+  bool complete() const;
+  bool decodable() const;
+
+  // Builds fragmentation headers for VP8, each fragment being a decodable
+  // VP8 partition. Returns the total number of bytes which are decodable. Is
+  // used instead of MakeDecodable for VP8.
+  int BuildVP8FragmentationHeader(uint8_t* frame_buffer,
+                                  int frame_buffer_length,
+                                  RTPFragmentationHeader* fragmentation);
+
+  // Makes the frame decodable. I.e., only contain decodable NALUs. All
+  // non-decodable NALUs will be deleted and packets will be moved to in
+  // memory to remove any empty space.
+  // Returns the number of bytes deleted from the session.
+  int MakeDecodable();
+  int SessionLength() const;
+  bool HaveLastPacket() const;
+  bool session_nack() const;
+  webrtc::FrameType FrameType() const { return frame_type_; }
+  int LowSequenceNumber() const;
+
+  // Returns highest sequence number, media or empty.
+  int HighSequenceNumber() const;
+  int PictureId() const;
+  int TemporalId() const;
+  bool LayerSync() const;
+  int Tl0PicId() const;
+  bool NonReference() const;
+  void SetPreviousFrameLoss() { previous_frame_loss_ = true; }
+  bool PreviousFrameLoss() const { return previous_frame_loss_; }
+
+  // The number of packets discarded because the decoder can't make use of
+  // them.
+  int packets_not_decodable() const;
+
+ private:
+  enum { kMaxVP8Partitions = 9 };
+
+  typedef std::list<VCMPacket> PacketList;
+  typedef PacketList::iterator PacketIterator;
+  typedef PacketList::const_iterator PacketIteratorConst;
+  typedef PacketList::reverse_iterator ReversePacketIterator;
+
+  void InformOfEmptyPacket(uint16_t seq_num);
+
+  // Finds the packet of the beginning of the next VP8 partition. If
+  // none is found the returned iterator points to |packets_.end()|.
+  // |it| is expected to point to the last packet of the previous partition,
+  // or to the first packet of the frame. |packets_skipped| is incremented
+  // for each packet found which doesn't have the beginning bit set.
+  PacketIterator FindNextPartitionBeginning(PacketIterator it,
+                                            int* packets_skipped) const;
+
+  // Returns an iterator pointing to the last packet of the partition pointed to
+  // by |it|.
+  PacketIterator FindPartitionEnd(PacketIterator it) const;
+  static bool InSequence(const PacketIterator& it,
+                         const PacketIterator& prev_it);
+  static int PacketsMissing(const PacketIterator& packet_it,
+                            const PacketIterator& prev_packet_it);
+  int InsertBuffer(uint8_t* frame_buffer,
+                   PacketIterator packetIterator);
+  void ShiftSubsequentPackets(PacketIterator it, int steps_to_shift);
+  PacketIterator FindNaluEnd(PacketIterator packet_iter) const;
+  // Deletes the data of all packets between |start| and |end|, inclusively.
+  // Note that this function doesn't delete the actual packets.
+  int DeletePacketData(PacketIterator start,
+                       PacketIterator end);
+  void UpdateCompleteSession();
+
+  // When enabled, determine if session is decodable, i.e. incomplete but
+  // would be sent to the decoder.
+  void UpdateDecodableSession(int rtt_ms);
+
+  // If this session has been NACKed by the jitter buffer.
+  bool session_nack_;
+  bool complete_;
+  bool decodable_;
+  webrtc::FrameType frame_type_;
+  bool previous_frame_loss_;
+  // Packets in this frame.
+  PacketList packets_;
+  int empty_seq_num_low_;
+  int empty_seq_num_high_;
+  // Number of packets discarded because the decoder can't use them.
+  int packets_not_decodable_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_SESSION_INFO_H_
diff --git a/src/modules/video_coding/main/source/session_info_unittest.cc b/src/modules/video_coding/main/source/session_info_unittest.cc
new file mode 100644
index 0000000..e017735
--- /dev/null
+++ b/src/modules/video_coding/main/source/session_info_unittest.cc
@@ -0,0 +1,931 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <string.h>
+
+#include "gtest/gtest.h"
+#include "modules/interface/module_common_types.h"
+#include "modules/video_coding/main/source/packet.h"
+#include "modules/video_coding/main/source/session_info.h"
+
+namespace webrtc {
+
+class TestSessionInfo : public ::testing::Test {
+ protected:
+  enum { kPacketBufferSize = 10 };
+  enum { kFrameBufferSize = 10 * kPacketBufferSize };
+
+  virtual void SetUp() {
+    memset(packet_buffer_, 0, kPacketBufferSize);
+    memset(frame_buffer_, 0, kFrameBufferSize);
+    session_.Reset();
+    packet_.Reset();
+    packet_.frameType = kVideoFrameDelta;
+    packet_.sizeBytes = kPacketBufferSize;
+    packet_.dataPtr = packet_buffer_;
+    packet_.seqNum = 0;
+    packet_.timestamp = 0;
+  }
+
+  void FillPacket(uint8_t start_value) {
+    for (int i = 0; i < kPacketBufferSize; ++i)
+      packet_buffer_[i] = start_value + i;
+  }
+
+  void VerifyPacket(uint8_t* start_ptr, uint8_t start_value) {
+    for (int j = 0; j < kPacketBufferSize; ++j) {
+      ASSERT_EQ(start_value + j, start_ptr[j]);
+    }
+  }
+
+  uint8_t packet_buffer_[kPacketBufferSize];
+  uint8_t frame_buffer_[kFrameBufferSize];
+  VCMSessionInfo session_;
+  VCMPacket packet_;
+};
+
+class TestVP8Partitions : public TestSessionInfo {
+ protected:
+  enum { kMaxVP8Partitions = 9 };
+
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    vp8_header_ = &packet_header_.type.Video.codecHeader.VP8;
+    packet_header_.frameType = kVideoFrameDelta;
+    packet_header_.type.Video.codec = kRTPVideoVP8;
+    vp8_header_->InitRTPVideoHeaderVP8();
+    fragmentation_.VerifyAndAllocateFragmentationHeader(kMaxVP8Partitions);
+  }
+
+  bool VerifyPartition(int partition_id,
+                       int packets_expected,
+                       int start_value) {
+    EXPECT_EQ(static_cast<uint32_t>(packets_expected * kPacketBufferSize),
+              fragmentation_.fragmentationLength[partition_id]);
+    for (int i = 0; i < packets_expected; ++i) {
+      int packet_index = fragmentation_.fragmentationOffset[partition_id] +
+          i * kPacketBufferSize;
+      if (packet_index + kPacketBufferSize > kFrameBufferSize)
+        return false;
+      VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+    }
+    return true;
+  }
+
+  WebRtcRTPHeader packet_header_;
+  RTPVideoHeaderVP8* vp8_header_;
+  RTPFragmentationHeader fragmentation_;
+};
+
+class TestNalUnits : public TestSessionInfo {
+ protected:
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    packet_.codec = kVideoCodecVP8;
+  }
+
+  bool VerifyNalu(int offset, int packets_expected, int start_value) {
+    EXPECT_GE(session_.SessionLength(),
+              packets_expected * kPacketBufferSize);
+    for (int i = 0; i < packets_expected; ++i) {
+      int packet_index = offset * kPacketBufferSize + i * kPacketBufferSize;
+      VerifyPacket(frame_buffer_ + packet_index, start_value + i);
+    }
+    return true;
+  }
+};
+
+class TestNackList : public TestSessionInfo {
+ protected:
+  enum { kMaxSeqNumListLength = 30 };
+
+  virtual void SetUp() {
+    TestSessionInfo::SetUp();
+    seq_num_list_length_ = 0;
+    memset(seq_num_list_, 0, sizeof(seq_num_list_));
+  }
+
+  void BuildSeqNumList(uint16_t low,
+                       uint16_t high) {
+    int i = 0;
+    while (low != high + 1) {
+      EXPECT_LT(i, kMaxSeqNumListLength);
+      if (i >= kMaxSeqNumListLength) {
+        seq_num_list_length_ = kMaxSeqNumListLength;
+        return;
+      }
+      seq_num_list_[i] = low;
+      low++;
+      i++;
+    }
+    seq_num_list_length_ = i;
+  }
+
+  void VerifyAll(int value) {
+    for (int i = 0; i < seq_num_list_length_; ++i)
+      EXPECT_EQ(seq_num_list_[i], value);
+  }
+
+  int seq_num_list_[kMaxSeqNumListLength];
+  int seq_num_list_length_;
+};
+
+TEST_F(TestSessionInfo, TestSimpleAPIs) {
+  packet_.isFirstPacket = true;
+  packet_.seqNum = 0xFFFE;
+  packet_.sizeBytes = kPacketBufferSize;
+  packet_.frameType = kVideoFrameKey;
+  FillPacket(0);
+  ASSERT_EQ(kPacketBufferSize,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_FALSE(session_.HaveLastPacket());
+  EXPECT_EQ(kVideoFrameKey, session_.FrameType());
+
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  packet_.seqNum += 1;
+  ASSERT_EQ(kPacketBufferSize,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_TRUE(session_.HaveLastPacket());
+  EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+  EXPECT_EQ(0xFFFE, session_.LowSequenceNumber());
+
+  // Insert empty packet which will be the new high sequence number.
+  // To make things more difficult we will make sure to have a wrap here.
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  packet_.seqNum  = 2;
+  packet_.sizeBytes = 0;
+  packet_.frameType = kFrameEmpty;
+  ASSERT_EQ(0,
+            session_.InsertPacket(packet_, frame_buffer_, false, 0));
+  EXPECT_EQ(packet_.seqNum, session_.HighSequenceNumber());
+}
+
+TEST_F(TestSessionInfo, NormalOperation) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = true;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    FillPacket(i);
+    ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+              kPacketBufferSize);
+  }
+
+  packet_.seqNum += 1;
+  packet_.markerBit = true;
+  FillPacket(9);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  EXPECT_EQ(10 * kPacketBufferSize, session_.SessionLength());
+  for (int i = 0; i < 10; ++i) {
+    SCOPED_TRACE("Calling VerifyPacket");
+    VerifyPacket(frame_buffer_ + i * kPacketBufferSize, i);
+  }
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss) {
+  // Partition 0 | Partition 1
+  // [ 0 ] [ 2 ] | [ 3 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed (end of partition 0).
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            2*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 1, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 3));
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsOneLoss2) {
+  // Partition 0 | Partition 1
+  // [ 1 ] [ 2 ] | [ 3 ] [ 5 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0)
+            , kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed (end of partition 2), 3 left.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 3));
+  EXPECT_EQ(1, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsNoLossWrap) {
+  // Partition 0       | Partition 1
+  // [ fffd ] [ fffe ] | [ ffff ] [ 0 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0xfffd;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packet should be removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 2, 2));
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, TwoPartitionsLossWrap) {
+  // Partition 0       | Partition 1
+  // [ fffd ] [ fffe ] | [ ffff ] [ 1 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0xfffd;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(3);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // One packet should be removed from the last partition
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 1, 2));
+  EXPECT_EQ(1, session_.packets_not_decodable());
+}
+
+
+TEST_F(TestVP8Partitions, ThreePartitionsOneMissing) {
+  // Partition 1  |Partition 2    | Partition 3
+  // [ 1 ] [ 2 ]  |               | [ 5 ] | [ 6 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 3;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(6);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packet should be removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 2, 5));
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, ThreePartitionsLossInSecond) {
+  // Partition 0  |Partition 1          | Partition 2
+  // [ 1 ] [ 2 ]  |        [ 4 ] [ 5 ]  | [ 6 ] [ 7 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 1;
+  FillPacket(1);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 0;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 2;
+  FillPacket(4);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(5);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(6);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(7);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // 2 partitions left. 2 packets removed from second partition
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            4*kPacketBufferSize);
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 1));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 2, 6));
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestVP8Partitions, AggregationOverTwoPackets) {
+  // Partition 0   | Partition 1         | Partition 2
+  // [ 0           |           ]  [ 1 ]  | [ 2 ]
+  packet_header_.type.Video.isFirstPacket = true;
+  vp8_header_->beginningOfPartition = true;
+  vp8_header_->partitionId = 0;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber = 0;
+  FillPacket(0);
+  VCMPacket* packet = new VCMPacket(packet_buffer_, kPacketBufferSize,
+                                    packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 1;
+  vp8_header_->beginningOfPartition = false;
+  packet_header_.header.markerBit = false;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(1);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  packet_header_.type.Video.isFirstPacket = false;
+  vp8_header_->partitionId = 2;
+  vp8_header_->beginningOfPartition = true;
+  packet_header_.header.markerBit = true;
+  packet_header_.header.sequenceNumber += 1;
+  FillPacket(2);
+  packet = new VCMPacket(packet_buffer_, kPacketBufferSize, packet_header_);
+  ASSERT_EQ(session_.InsertPacket(*packet, frame_buffer_, false, 0),
+            kPacketBufferSize);
+  delete packet;
+
+  // No packets removed.
+  EXPECT_EQ(session_.BuildVP8FragmentationHeader(frame_buffer_,
+                                                 kFrameBufferSize,
+                                                 &fragmentation_),
+            3*kPacketBufferSize);
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(0, 2, 0));
+  // This partition is aggregated in partition 0
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(1, 0, 0));
+  SCOPED_TRACE("Calling VerifyPartition");
+  EXPECT_TRUE(VerifyPartition(2, 1, 2));
+}
+
+TEST_F(TestNalUnits, OnlyReceivedEmptyPacket) {
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluComplete;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  ASSERT_EQ(0, session_.InsertPacket(packet_, frame_buffer_, false, 0));
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+}
+
+TEST_F(TestNalUnits, OneIsolatedNaluLoss) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(2 * kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(1, 1, 2));
+}
+
+TEST_F(TestNalUnits, LossInMiddleOfNalu) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(1, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, StartAndEndOfLastNalUnitLost) {
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum = 0;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.seqNum += 2;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  EXPECT_EQ(1, session_.packets_not_decodable());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, ReorderWrapNoLoss) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.seqNum += 1;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = true;
+  packet_.completeNALU = kNaluComplete;
+  packet_.seqNum -= 1;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(0, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.packets_not_decodable());
+  EXPECT_EQ(3*kPacketBufferSize, session_.SessionLength());
+  SCOPED_TRACE("Calling VerifyNalu");
+  EXPECT_TRUE(VerifyNalu(0, 1, 0));
+}
+
+TEST_F(TestNalUnits, WrapLosses) {
+  packet_.seqNum = 0xFFFF;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(2 * kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestNalUnits, ReorderWrapLosses) {
+  packet_.seqNum = 0xFFFF;
+
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluEnd;
+  packet_.seqNum += 2;
+  packet_.markerBit = true;
+  FillPacket(2);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  packet_.seqNum -= 2;
+  packet_.isFirstPacket = false;
+  packet_.completeNALU = kNaluIncomplete;
+  packet_.markerBit = false;
+  FillPacket(1);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(2 * kPacketBufferSize, session_.MakeDecodable());
+  EXPECT_EQ(0, session_.SessionLength());
+  EXPECT_EQ(2, session_.packets_not_decodable());
+}
+
+TEST_F(TestNackList, NoLosses) {
+  uint16_t low = 0xFFFF - 5;
+
+  packet_.seqNum = low;
+  packet_.isFirstPacket = true;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    packet_.isFirstPacket = false;
+    packet_.markerBit = false;
+    FillPacket(i + 1);
+    ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+              kPacketBufferSize);
+  }
+
+  packet_.seqNum += 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  FillPacket(10);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(10 * kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  EXPECT_FALSE(session_.session_nack());
+  SCOPED_TRACE("Calling VerifyAll");
+  VerifyAll(-1);
+
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  SCOPED_TRACE("Calling VerifyAll");
+  VerifyAll(-1);
+}
+
+TEST_F(TestNackList, FiveLossesSpreadOut) {
+  uint16_t low = 0xFFFF - 5;
+
+  packet_.seqNum = low;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = true;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  for (int i = 1; i < 9; ++i) {
+    packet_.seqNum += 1;
+    packet_.isFirstPacket = false;
+    packet_.markerBit = false;
+    FillPacket(i);
+    if ((i + 1) % 2)
+      ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+                kPacketBufferSize);
+  }
+
+  packet_.seqNum++;  // Simulate loss of last packet.
+
+  EXPECT_EQ(5 * kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  for (int i = 0; i < seq_num_list_length_; ++i) {
+    if (i % 2)
+      EXPECT_EQ(static_cast<uint16_t>(low + i), seq_num_list_[i]);
+    else
+      EXPECT_EQ(-1, seq_num_list_[i]);
+  }
+
+  BuildSeqNumList(low, packet_.seqNum);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  for (int i = 0; i < seq_num_list_length_; ++i) {
+    if (i % 2)
+      EXPECT_EQ(static_cast<uint16_t>(low + i), seq_num_list_[i]);
+    else
+      EXPECT_EQ(-1, seq_num_list_[i]);
+  }
+}
+
+TEST_F(TestNackList, FirstAndLastLost) {
+  uint16_t low = 0xFFFF;
+
+  packet_.seqNum = low + 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0),
+            kPacketBufferSize);
+
+  EXPECT_EQ(kPacketBufferSize, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildHardNackList(seq_num_list_, seq_num_list_length_));
+  EXPECT_EQ(0xFFFF, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(1, seq_num_list_[2]);
+
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_,seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  EXPECT_EQ(0xFFFF, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(1, seq_num_list_[2]);
+}
+
+TEST_F(TestNackList, LostAllButEmptyPackets) {
+  uint16_t low = 0;
+  packet_.seqNum = low + 1;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0), 0);
+
+  packet_.seqNum = low + 3;
+  packet_.isFirstPacket = false;
+  packet_.markerBit = false;
+  packet_.frameType = kFrameEmpty;
+  packet_.sizeBytes = 0;
+  FillPacket(0);
+  ASSERT_EQ(session_.InsertPacket(packet_, frame_buffer_, false, 0), 0);
+
+  EXPECT_EQ(0, session_.SessionLength());
+  BuildSeqNumList(low, packet_.seqNum + 1);
+  EXPECT_EQ(0, session_.BuildSoftNackList(seq_num_list_, seq_num_list_length_,
+                                          60));
+  EXPECT_EQ(true, session_.session_nack());
+  EXPECT_EQ(0, seq_num_list_[0]);
+  EXPECT_EQ(-1, seq_num_list_[1]);
+  EXPECT_EQ(-2, seq_num_list_[2]);
+  EXPECT_EQ(-2, seq_num_list_[3]);
+  EXPECT_EQ(4, seq_num_list_[4]);
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/tick_time_base.h b/src/modules/video_coding/main/source/tick_time_base.h
new file mode 100644
index 0000000..a212591
--- /dev/null
+++ b/src/modules/video_coding/main/source/tick_time_base.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
+
+#include "system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+// This class provides a mockable wrapper to TickTime.
+class TickTimeBase {
+ public:
+  virtual ~TickTimeBase() {}
+
+  // "Now" in milliseconds.
+  virtual int64_t MillisecondTimestamp() const {
+    return TickTime::MillisecondTimestamp();
+  }
+
+  // "Now" in microseconds.
+  virtual int64_t MicrosecondTimestamp() const {
+    return TickTime::MicrosecondTimestamp();
+  }
+};
+
+}  // namespace
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_MAIN_SOURCE_TICK_TIME_BASE_H_
diff --git a/src/modules/video_coding/main/source/timestamp_extrapolator.cc b/src/modules/video_coding/main/source/timestamp_extrapolator.cc
new file mode 100644
index 0000000..e272eb9
--- /dev/null
+++ b/src/modules/video_coding/main/source/timestamp_extrapolator.cc
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "internal_defines.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "timestamp_extrapolator.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VCMTimestampExtrapolator::VCMTimestampExtrapolator(TickTimeBase* clock,
+                                                   WebRtc_Word32 vcmId,
+                                                   WebRtc_Word32 id)
+:
+_rwLock(RWLockWrapper::CreateRWLock()),
+_vcmId(vcmId),
+_id(id),
+_clock(clock),
+_startMs(0),
+_firstTimestamp(0),
+_wrapArounds(0),
+_prevTs90khz(0),
+_lambda(1),
+_firstAfterReset(true),
+_packetCount(0),
+_startUpFilterDelayInPackets(2),
+_detectorAccumulatorPos(0),
+_detectorAccumulatorNeg(0),
+_alarmThreshold(60e3),
+_accDrift(6600), // in timestamp ticks, i.e. 15 ms
+_accMaxError(7000),
+_P11(1e10)
+{
+    Reset(_clock->MillisecondTimestamp());
+}
+
+VCMTimestampExtrapolator::~VCMTimestampExtrapolator()
+{
+    delete _rwLock;
+}
+
+void
+VCMTimestampExtrapolator::Reset(const WebRtc_Word64 nowMs /* = -1 */)
+{
+    WriteLockScoped wl(*_rwLock);
+    if (nowMs > -1)
+    {
+        _startMs = nowMs;
+    }
+    else
+    {
+        _startMs = _clock->MillisecondTimestamp();
+    }
+    _prevMs = _startMs;
+    _firstTimestamp = 0;
+    _w[0] = 90.0;
+    _w[1] = 0;
+    _P[0][0] = 1;
+    _P[1][1] = _P11;
+    _P[0][1] = _P[1][0] = 0;
+    _firstAfterReset = true;
+    _prevTs90khz = 0;
+    _wrapArounds = 0;
+    _packetCount = 0;
+    _detectorAccumulatorPos = 0;
+    _detectorAccumulatorNeg = 0;
+}
+
+void
+VCMTimestampExtrapolator::Update(WebRtc_Word64 tMs, WebRtc_UWord32 ts90khz, bool trace)
+{
+
+    _rwLock->AcquireLockExclusive();
+    if (tMs - _prevMs > 10e3)
+    {
+        // Ten seconds without a complete frame.
+        // Reset the extrapolator
+        _rwLock->ReleaseLockExclusive();
+        Reset();
+        _rwLock->AcquireLockExclusive();
+    }
+    else
+    {
+        _prevMs = tMs;
+    }
+
+    // Remove offset to prevent badly scaled matrices
+    tMs -= _startMs;
+
+    WebRtc_Word32 prevWrapArounds = _wrapArounds;
+    CheckForWrapArounds(ts90khz);
+    WebRtc_Word32 wrapAroundsSincePrev = _wrapArounds - prevWrapArounds;
+
+    if (wrapAroundsSincePrev == 0 && ts90khz < _prevTs90khz)
+    {
+        _rwLock->ReleaseLockExclusive();
+        return;
+    }
+
+    if (_firstAfterReset)
+    {
+        // Make an initial guess of the offset,
+        // should be almost correct since tMs - _startMs
+        // should about zero at this time.
+        _w[1] = -_w[0] * tMs;
+        _firstTimestamp = ts90khz;
+        _firstAfterReset = false;
+    }
+
+    // Compensate for wraparounds by changing the line offset
+    _w[1] = _w[1] - wrapAroundsSincePrev * ((static_cast<WebRtc_Word64>(1)<<32) - 1);
+
+    double residual = (static_cast<double>(ts90khz) - _firstTimestamp) - static_cast<double>(tMs) * _w[0] - _w[1];
+    if (DelayChangeDetection(residual, trace) &&
+        _packetCount >= _startUpFilterDelayInPackets)
+    {
+        // A sudden change of average network delay has been detected.
+        // Force the filter to adjust its offset parameter by changing
+        // the offset uncertainty. Don't do this during startup.
+        _P[1][1] = _P11;
+    }
+    //T = [t(k) 1]';
+    //that = T'*w;
+    //K = P*T/(lambda + T'*P*T);
+    double K[2];
+    K[0] = _P[0][0] * tMs + _P[0][1];
+    K[1] = _P[1][0] * tMs + _P[1][1];
+    double TPT = _lambda + tMs * K[0] + K[1];
+    K[0] /= TPT;
+    K[1] /= TPT;
+    //w = w + K*(ts(k) - that);
+    _w[0] = _w[0] + K[0] * residual;
+    _w[1] = _w[1] + K[1] * residual;
+    //P = 1/lambda*(P - K*T'*P);
+    double p00 = 1 / _lambda * (_P[0][0] - (K[0] * tMs * _P[0][0] + K[0] * _P[1][0]));
+    double p01 = 1 / _lambda * (_P[0][1] - (K[0] * tMs * _P[0][1] + K[0] * _P[1][1]));
+    _P[1][0] = 1 / _lambda * (_P[1][0] - (K[1] * tMs * _P[0][0] + K[1] * _P[1][0]));
+    _P[1][1] = 1 / _lambda * (_P[1][1] - (K[1] * tMs * _P[0][1] + K[1] * _P[1][1]));
+    _P[0][0] = p00;
+    _P[0][1] = p01;
+    if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        _packetCount++;
+    }
+    if (trace)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "w[0]=%f w[1]=%f ts=%u tMs=%u", _w[0], _w[1], ts90khz, tMs);
+    }
+    _rwLock->ReleaseLockExclusive();
+}
+
+WebRtc_UWord32
+VCMTimestampExtrapolator::ExtrapolateTimestamp(WebRtc_Word64 tMs) const
+{
+    ReadLockScoped rl(*_rwLock);
+    WebRtc_UWord32 timestamp = 0;
+    if (_packetCount == 0)
+    {
+        timestamp = 0;
+    }
+    else if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        timestamp = static_cast<WebRtc_UWord32>(90.0 * (tMs - _prevMs) + _prevTs90khz + 0.5);
+    }
+    else
+    {
+        timestamp = static_cast<WebRtc_UWord32>(_w[0] * (tMs - _startMs) + _w[1] + _firstTimestamp + 0.5);
+    }
+    return timestamp;
+}
+
+WebRtc_Word64
+VCMTimestampExtrapolator::ExtrapolateLocalTime(WebRtc_UWord32 timestamp90khz) const
+{
+    ReadLockScoped rl(*_rwLock);
+    WebRtc_Word64 localTimeMs = 0;
+    if (_packetCount == 0)
+    {
+        localTimeMs = -1;
+    }
+    else if (_packetCount < _startUpFilterDelayInPackets)
+    {
+        localTimeMs = _prevMs + static_cast<WebRtc_Word64>(static_cast<double>(timestamp90khz - _prevTs90khz) / 90.0 + 0.5);
+    }
+    else
+    {
+        if (_w[0] < 1e-3)
+        {
+            localTimeMs = _startMs;
+        }
+        else
+        {
+            double timestampDiff = static_cast<double>(timestamp90khz) - static_cast<double>(_firstTimestamp);
+            localTimeMs = static_cast<WebRtc_Word64>(static_cast<double>(_startMs) + (timestampDiff - _w[1]) / _w[0] + 0.5);
+        }
+    }
+    return localTimeMs;
+}
+
+// Investigates if the timestamp clock has overflowed since the last timestamp and
+// keeps track of the number of wrap arounds since reset.
+void
+VCMTimestampExtrapolator::CheckForWrapArounds(WebRtc_UWord32 ts90khz)
+{
+    if (_prevTs90khz == 0)
+    {
+        _prevTs90khz = ts90khz;
+        return;
+    }
+    if (ts90khz < _prevTs90khz)
+    {
+        // This difference will probably be less than -2^31 if we have had a wrap around
+        // (e.g. timestamp = 1, _previousTimestamp = 2^32 - 1). Since it is casted to a Word32,
+        // it should be positive.
+        if (static_cast<WebRtc_Word32>(ts90khz - _prevTs90khz) > 0)
+        {
+            // Forward wrap around
+            _wrapArounds++;
+        }
+    }
+    // This difference will probably be less than -2^31 if we have had a backward wrap around.
+    // Since it is casted to a Word32, it should be positive.
+    else if (static_cast<WebRtc_Word32>(_prevTs90khz - ts90khz) > 0)
+    {
+        // Backward wrap around
+        _wrapArounds--;
+    }
+    _prevTs90khz = ts90khz;
+}
+
+bool
+VCMTimestampExtrapolator::DelayChangeDetection(double error, bool trace)
+{
+    // CUSUM detection of sudden delay changes
+    error = (error > 0) ? VCM_MIN(error, _accMaxError) : VCM_MAX(error, -_accMaxError);
+    _detectorAccumulatorPos = VCM_MAX(_detectorAccumulatorPos + error - _accDrift, (double)0);
+    _detectorAccumulatorNeg = VCM_MIN(_detectorAccumulatorNeg + error + _accDrift, (double)0);
+    if (_detectorAccumulatorPos > _alarmThreshold || _detectorAccumulatorNeg < -_alarmThreshold)
+    {
+        // Alarm
+        if (trace)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "g1=%f g2=%f alarm=1", _detectorAccumulatorPos, _detectorAccumulatorNeg);
+        }
+        _detectorAccumulatorPos = _detectorAccumulatorNeg = 0;
+        return true;
+    }
+    if (trace)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _id),  "g1=%f g2=%f alarm=0", _detectorAccumulatorPos, _detectorAccumulatorNeg);
+    }
+    return false;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/timestamp_extrapolator.h b/src/modules/video_coding/main/source/timestamp_extrapolator.h
new file mode 100644
index 0000000..901d8d4
--- /dev/null
+++ b/src/modules/video_coding/main/source/timestamp_extrapolator.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
+
+#include "typedefs.h"
+#include "rw_lock_wrapper.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+
+class VCMTimestampExtrapolator
+{
+public:
+    VCMTimestampExtrapolator(TickTimeBase* clock,
+                             WebRtc_Word32 vcmId = 0,
+                             WebRtc_Word32 receiverId = 0);
+    ~VCMTimestampExtrapolator();
+    void Update(WebRtc_Word64 tMs, WebRtc_UWord32 ts90khz, bool trace = true);
+    WebRtc_UWord32 ExtrapolateTimestamp(WebRtc_Word64 tMs) const;
+    WebRtc_Word64 ExtrapolateLocalTime(WebRtc_UWord32 timestamp90khz) const;
+    void Reset(WebRtc_Word64 nowMs = -1);
+
+private:
+    void CheckForWrapArounds(WebRtc_UWord32 ts90khz);
+    bool DelayChangeDetection(double error, bool trace = true);
+    RWLockWrapper*        _rwLock;
+    WebRtc_Word32         _vcmId;
+    WebRtc_Word32         _id;
+    TickTimeBase*         _clock;
+    double              _w[2];
+    double              _P[2][2];
+    WebRtc_Word64         _startMs;
+    WebRtc_Word64         _prevMs;
+    WebRtc_UWord32        _firstTimestamp;
+    WebRtc_Word32         _wrapArounds;
+    WebRtc_UWord32        _prevTs90khz;
+    const double        _lambda;
+    bool                _firstAfterReset;
+    WebRtc_UWord32        _packetCount;
+    const WebRtc_UWord32  _startUpFilterDelayInPackets;
+
+    double              _detectorAccumulatorPos;
+    double              _detectorAccumulatorNeg;
+    const double        _alarmThreshold;
+    const double        _accDrift;
+    const double        _accMaxError;
+    const double        _P11;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_EXTRAPOLATOR_H_
diff --git a/src/modules/video_coding/main/source/timestamp_map.cc b/src/modules/video_coding/main/source/timestamp_map.cc
new file mode 100644
index 0000000..f19819b
--- /dev/null
+++ b/src/modules/video_coding/main/source/timestamp_map.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "timestamp_map.h"
+#include <stdlib.h>
+#include <assert.h>
+
+namespace webrtc {
+
+// Constructor. Optional parameter specifies maximum number of
+// coexisting timers.
+VCMTimestampMap::VCMTimestampMap(WebRtc_Word32 length):
+    _nextAddIx(0),
+    _nextPopIx(0)
+{
+    if (length <= 0)
+    {
+        // default
+        length = 10;
+    }
+
+    _map = new VCMTimestampDataTuple[length];
+    _length = length;
+}
+
+// Destructor.
+VCMTimestampMap::~VCMTimestampMap()
+{
+    delete [] _map;
+}
+
+// Empty the list of timers.
+void
+VCMTimestampMap::Reset()
+{
+    _nextAddIx = 0;
+    _nextPopIx = 0;
+}
+
+WebRtc_Word32
+VCMTimestampMap::Add(WebRtc_UWord32 timestamp, void* data)
+{
+    _map[_nextAddIx].timestamp = timestamp;
+    _map[_nextAddIx].data = data;
+    _nextAddIx = (_nextAddIx + 1) % _length;
+
+    if (_nextAddIx == _nextPopIx)
+    {
+        // Circular list full; forget oldest entry
+        _nextPopIx = (_nextPopIx + 1) % _length;
+        return -1;
+    }
+    return 0;
+}
+
+void*
+VCMTimestampMap::Pop(WebRtc_UWord32 timestamp)
+{
+    while (!IsEmpty())
+    {
+        if (_map[_nextPopIx].timestamp == timestamp)
+        {
+            // found start time for this timestamp
+            void* data = _map[_nextPopIx].data;
+            _map[_nextPopIx].data = NULL;
+            _nextPopIx = (_nextPopIx + 1) % _length;
+            return data;
+        }
+        else if (_map[_nextPopIx].timestamp > timestamp)
+        {
+            // the timestamp we are looking for is not in the list
+            assert(_nextPopIx < _length && _nextPopIx >= 0);
+            return NULL;
+        }
+
+        // not in this position, check next (and forget this position)
+        _nextPopIx = (_nextPopIx + 1) % _length;
+    }
+
+    // could not find matching timestamp in list
+    assert(_nextPopIx < _length && _nextPopIx >= 0);
+    return NULL;
+}
+
+// Check if no timers are currently running
+bool
+VCMTimestampMap::IsEmpty() const
+{
+    return (_nextAddIx == _nextPopIx);
+}
+
+}
diff --git a/src/modules/video_coding/main/source/timestamp_map.h b/src/modules/video_coding/main/source/timestamp_map.h
new file mode 100644
index 0000000..fd532bc
--- /dev/null
+++ b/src/modules/video_coding/main/source/timestamp_map.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
+
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+struct VCMTimestampDataTuple
+{
+    WebRtc_UWord32    timestamp;
+    void*             data;
+};
+
+class VCMTimestampMap
+{
+public:
+    // Constructor. Optional parameter specifies maximum number of
+    // timestamps in map.
+    VCMTimestampMap(const WebRtc_Word32 length = 10);
+
+    // Destructor.
+    ~VCMTimestampMap();
+
+    // Empty the map
+    void Reset();
+
+    WebRtc_Word32 Add(WebRtc_UWord32 timestamp, void*  data);
+    void* Pop(WebRtc_UWord32 timestamp);
+
+private:
+    bool IsEmpty() const;
+
+    VCMTimestampDataTuple* _map;
+    WebRtc_Word32                   _nextAddIx;
+    WebRtc_Word32                   _nextPopIx;
+    WebRtc_Word32                   _length;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMESTAMP_MAP_H_
diff --git a/src/modules/video_coding/main/source/timing.cc b/src/modules/video_coding/main/source/timing.cc
new file mode 100644
index 0000000..aca05fa
--- /dev/null
+++ b/src/modules/video_coding/main/source/timing.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "trace.h"
+#include "internal_defines.h"
+#include "jitter_buffer_common.h"
+#include "timing.h"
+#include "timestamp_extrapolator.h"
+
+namespace webrtc {
+
+VCMTiming::VCMTiming(TickTimeBase* clock,
+                     WebRtc_Word32 vcmId,
+                     WebRtc_Word32 timingId,
+                     VCMTiming* masterTiming)
+:
+_critSect(CriticalSectionWrapper::CreateCriticalSection()),
+_vcmId(vcmId),
+_clock(clock),
+_timingId(timingId),
+_master(false),
+_tsExtrapolator(),
+_codecTimer(),
+_renderDelayMs(kDefaultRenderDelayMs),
+_minTotalDelayMs(0),
+_requiredDelayMs(0),
+_currentDelayMs(0),
+_prevFrameTimestamp(0)
+{
+    if (masterTiming == NULL)
+    {
+        _master = true;
+        _tsExtrapolator = new VCMTimestampExtrapolator(_clock, vcmId, timingId);
+    }
+    else
+    {
+        _tsExtrapolator = masterTiming->_tsExtrapolator;
+    }
+}
+
+VCMTiming::~VCMTiming()
+{
+    if (_master)
+    {
+        delete _tsExtrapolator;
+    }
+    delete _critSect;
+}
+
+void
+VCMTiming::Reset(WebRtc_Word64 nowMs /* = -1 */)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (nowMs > -1)
+    {
+        _tsExtrapolator->Reset(nowMs);
+    }
+    else
+    {
+        _tsExtrapolator->Reset();
+    }
+    _codecTimer.Reset();
+    _renderDelayMs = kDefaultRenderDelayMs;
+    _minTotalDelayMs = 0;
+    _requiredDelayMs = 0;
+    _currentDelayMs = 0;
+    _prevFrameTimestamp = 0;
+}
+
+void VCMTiming::ResetDecodeTime()
+{
+    _codecTimer.Reset();
+}
+
+void
+VCMTiming::SetRenderDelay(WebRtc_UWord32 renderDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _renderDelayMs = renderDelayMs;
+}
+
+void
+VCMTiming::SetMinimumTotalDelay(WebRtc_UWord32 minTotalDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _minTotalDelayMs = minTotalDelayMs;
+}
+
+void
+VCMTiming::SetRequiredDelay(WebRtc_UWord32 requiredDelayMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (requiredDelayMs != _requiredDelayMs)
+    {
+        if (_master)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                    "Desired jitter buffer level: %u ms", requiredDelayMs);
+        }
+        _requiredDelayMs = requiredDelayMs;
+    }
+}
+
+void VCMTiming::UpdateCurrentDelay(WebRtc_UWord32 frameTimestamp)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_UWord32 targetDelayMs = TargetDelayInternal();
+
+    // Make sure we try to sync with audio
+    if (targetDelayMs < _minTotalDelayMs)
+    {
+        targetDelayMs = _minTotalDelayMs;
+    }
+
+    if (_currentDelayMs == 0)
+    {
+        // Not initialized, set current delay to target.
+        _currentDelayMs = targetDelayMs;
+    }
+    else if (targetDelayMs != _currentDelayMs)
+    {
+        WebRtc_Word64 delayDiffMs = static_cast<WebRtc_Word64>(targetDelayMs) -
+                                    _currentDelayMs;
+        // Never change the delay with more than 100 ms every second. If we're changing the
+        // delay in too large steps we will get noticable freezes. By limiting the change we
+        // can increase the delay in smaller steps, which will be experienced as the video is
+        // played in slow motion. When lowering the delay the video will be played at a faster
+        // pace.
+        WebRtc_Word64 maxChangeMs = 0;
+        if (frameTimestamp < 0x0000ffff && _prevFrameTimestamp > 0xffff0000)
+        {
+            // wrap
+            maxChangeMs = kDelayMaxChangeMsPerS * (frameTimestamp +
+                         (static_cast<WebRtc_Word64>(1)<<32) - _prevFrameTimestamp) / 90000;
+        }
+        else
+        {
+            maxChangeMs = kDelayMaxChangeMsPerS *
+                          (frameTimestamp - _prevFrameTimestamp) / 90000;
+        }
+        if (maxChangeMs <= 0)
+        {
+            // Any changes less than 1 ms are truncated and
+            // will be postponed. Negative change will be due
+            // to reordering and should be ignored.
+            return;
+        }
+        else if (delayDiffMs < -maxChangeMs)
+        {
+            delayDiffMs = -maxChangeMs;
+        }
+        else if (delayDiffMs > maxChangeMs)
+        {
+            delayDiffMs = maxChangeMs;
+        }
+        _currentDelayMs = _currentDelayMs + static_cast<WebRtc_Word32>(delayDiffMs);
+    }
+    _prevFrameTimestamp = frameTimestamp;
+}
+
+void VCMTiming::UpdateCurrentDelay(WebRtc_Word64 renderTimeMs,
+                                   WebRtc_Word64 actualDecodeTimeMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_UWord32 targetDelayMs = TargetDelayInternal();
+    // Make sure we try to sync with audio
+    if (targetDelayMs < _minTotalDelayMs)
+    {
+        targetDelayMs = _minTotalDelayMs;
+    }
+    WebRtc_Word64 delayedMs = actualDecodeTimeMs -
+                              (renderTimeMs - MaxDecodeTimeMs() - _renderDelayMs);
+    if (delayedMs < 0)
+    {
+        return;
+    }
+    else if (_currentDelayMs + delayedMs <= targetDelayMs)
+    {
+        _currentDelayMs += static_cast<WebRtc_UWord32>(delayedMs);
+    }
+    else
+    {
+        _currentDelayMs = targetDelayMs;
+    }
+}
+
+WebRtc_Word32
+VCMTiming::StopDecodeTimer(WebRtc_UWord32 timeStamp,
+                           WebRtc_Word64 startTimeMs,
+                           WebRtc_Word64 nowMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word32 maxDecTime = MaxDecodeTimeMs();
+    WebRtc_Word32 timeDiffMs = _codecTimer.StopTimer(startTimeMs, nowMs);
+    if (timeDiffMs < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Codec timer error: %d", timeDiffMs);
+        assert(false);
+    }
+
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                "Frame decoded: timeStamp=%u decTime=%d maxDecTime=%u, at %u",
+                timeStamp, timeDiffMs, maxDecTime, MaskWord64ToUWord32(nowMs));
+    }
+    return 0;
+}
+
+void
+VCMTiming::IncomingTimestamp(WebRtc_UWord32 timeStamp, WebRtc_Word64 nowMs)
+{
+    CriticalSectionScoped cs(_critSect);
+    _tsExtrapolator->Update(nowMs, timeStamp, _master);
+}
+
+WebRtc_Word64
+VCMTiming::RenderTimeMs(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+    const WebRtc_Word64 renderTimeMs = RenderTimeMsInternal(frameTimestamp, nowMs);
+    if (renderTimeMs < 0)
+    {
+        return renderTimeMs;
+    }
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Render frame %u at %u. Render delay %u, required delay %u,"
+                " max decode time %u, min total delay %u",
+            frameTimestamp, MaskWord64ToUWord32(renderTimeMs), _renderDelayMs,
+            _requiredDelayMs, MaxDecodeTimeMs(),_minTotalDelayMs);
+    }
+    return renderTimeMs;
+}
+
+WebRtc_Word64
+VCMTiming::RenderTimeMsInternal(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const
+{
+    WebRtc_Word64 estimatedCompleteTimeMs =
+            _tsExtrapolator->ExtrapolateLocalTime(frameTimestamp);
+    if (estimatedCompleteTimeMs - nowMs > kMaxVideoDelayMs)
+    {
+        if (_master)
+        {
+            WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                    "Timestamp arrived 2 seconds early, reset statistics",
+                    frameTimestamp, estimatedCompleteTimeMs);
+        }
+        return -1;
+    }
+    if (_master)
+    {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+                "ExtrapolateLocalTime(%u)=%u ms",
+                frameTimestamp, MaskWord64ToUWord32(estimatedCompleteTimeMs));
+    }
+    if (estimatedCompleteTimeMs == -1)
+    {
+        estimatedCompleteTimeMs = nowMs;
+    }
+
+    return estimatedCompleteTimeMs + _currentDelayMs;
+}
+
+// Must be called from inside a critical section
+WebRtc_Word32
+VCMTiming::MaxDecodeTimeMs(FrameType frameType /*= kVideoFrameDelta*/) const
+{
+    const WebRtc_Word32 decodeTimeMs = _codecTimer.RequiredDecodeTimeMs(frameType);
+
+    if (decodeTimeMs < 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCoding, VCMId(_vcmId, _timingId),
+            "Negative maximum decode time: %d", decodeTimeMs);
+        return -1;
+    }
+    return decodeTimeMs;
+}
+
+WebRtc_UWord32
+VCMTiming::MaxWaitingTime(WebRtc_Word64 renderTimeMs, WebRtc_Word64 nowMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+
+    const WebRtc_Word64 maxWaitTimeMs = renderTimeMs - nowMs -
+                                        MaxDecodeTimeMs() - _renderDelayMs;
+
+    if (maxWaitTimeMs < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(maxWaitTimeMs);
+}
+
+bool
+VCMTiming::EnoughTimeToDecode(WebRtc_UWord32 availableProcessingTimeMs) const
+{
+    CriticalSectionScoped cs(_critSect);
+    WebRtc_Word32 maxDecodeTimeMs = MaxDecodeTimeMs();
+    if (maxDecodeTimeMs < 0)
+    {
+        // Haven't decoded any frames yet, try decoding one to get an estimate
+        // of the decode time.
+        return true;
+    }
+    else if (maxDecodeTimeMs == 0)
+    {
+        // Decode time is less than 1, set to 1 for now since
+        // we don't have any better precision. Count ticks later?
+        maxDecodeTimeMs = 1;
+    }
+    return static_cast<WebRtc_Word32>(availableProcessingTimeMs) - maxDecodeTimeMs > 0;
+}
+
+WebRtc_UWord32
+VCMTiming::TargetVideoDelay() const
+{
+    CriticalSectionScoped cs(_critSect);
+    return TargetDelayInternal();
+}
+
+WebRtc_UWord32
+VCMTiming::TargetDelayInternal() const
+{
+    return _requiredDelayMs + MaxDecodeTimeMs() + _renderDelayMs;
+}
+
+}
diff --git a/src/modules/video_coding/main/source/timing.h b/src/modules/video_coding/main/source/timing.h
new file mode 100644
index 0000000..41a4945
--- /dev/null
+++ b/src/modules/video_coding/main/source/timing.h
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
+
+#include "typedefs.h"
+#include "critical_section_wrapper.h"
+#include "codec_timer.h"
+
+namespace webrtc
+{
+
+class TickTimeBase;
+class VCMTimestampExtrapolator;
+
+class VCMTiming
+{
+public:
+    // The primary timing component should be passed
+    // if this is the dual timing component.
+    VCMTiming(TickTimeBase* clock,
+              WebRtc_Word32 vcmId = 0,
+              WebRtc_Word32 timingId = 0,
+              VCMTiming* masterTiming = NULL);
+    ~VCMTiming();
+
+    // Resets the timing to the initial state.
+    void Reset(WebRtc_Word64 nowMs = -1);
+    void ResetDecodeTime();
+
+    // The amount of time needed to render an image. Defaults to 10 ms.
+    void SetRenderDelay(WebRtc_UWord32 renderDelayMs);
+
+    // The minimum time the video must be delayed on the receiver to
+    // get the desired jitter buffer level.
+    void SetRequiredDelay(WebRtc_UWord32 requiredDelayMs);
+
+    // Minimum total delay required to sync video with audio.
+    void SetMinimumTotalDelay(WebRtc_UWord32 minTotalDelayMs);
+
+    // Increases or decreases the current delay to get closer to the target delay.
+    // Calculates how long it has been since the previous call to this function,
+    // and increases/decreases the delay in proportion to the time difference.
+    void UpdateCurrentDelay(WebRtc_UWord32 frameTimestamp);
+
+    // Increases or decreases the current delay to get closer to the target delay.
+    // Given the actual decode time in ms and the render time in ms for a frame, this
+    // function calculates how late the frame is and increases the delay accordingly.
+    void UpdateCurrentDelay(WebRtc_Word64 renderTimeMs, WebRtc_Word64 actualDecodeTimeMs);
+
+    // Stops the decoder timer, should be called when the decoder returns a frame
+    // or when the decoded frame callback is called.
+    WebRtc_Word32 StopDecodeTimer(WebRtc_UWord32 timeStamp,
+                                  WebRtc_Word64 startTimeMs,
+                                  WebRtc_Word64 nowMs);
+
+    // Used to report that a frame is passed to decoding. Updates the timestamp filter
+    // which is used to map between timestamps and receiver system time.
+    void IncomingTimestamp(WebRtc_UWord32 timeStamp, WebRtc_Word64 lastPacketTimeMs);
+
+    // Returns the receiver system time when the frame with timestamp frameTimestamp
+    // should be rendered, assuming that the system time currently is nowMs.
+    WebRtc_Word64 RenderTimeMs(WebRtc_UWord32 frameTimestamp, WebRtc_Word64 nowMs) const;
+
+    // Returns the maximum time in ms that we can wait for a frame to become complete
+    // before we must pass it to the decoder.
+    WebRtc_UWord32 MaxWaitingTime(WebRtc_Word64 renderTimeMs, WebRtc_Word64 nowMs) const;
+
+    // Returns the current target delay which is required delay + decode time + render
+    // delay.
+    WebRtc_UWord32 TargetVideoDelay() const;
+
+    // Calculates whether or not there is enough time to decode a frame given a
+    // certain amount of processing time.
+    bool EnoughTimeToDecode(WebRtc_UWord32 availableProcessingTimeMs) const;
+
+    enum { kDefaultRenderDelayMs = 10 };
+    enum { kDelayMaxChangeMsPerS = 100 };
+
+protected:
+    WebRtc_Word32 MaxDecodeTimeMs(FrameType frameType = kVideoFrameDelta) const;
+    WebRtc_Word64 RenderTimeMsInternal(WebRtc_UWord32 frameTimestamp,
+                                       WebRtc_Word64 nowMs) const;
+    WebRtc_UWord32 TargetDelayInternal() const;
+
+private:
+    CriticalSectionWrapper*       _critSect;
+    WebRtc_Word32                 _vcmId;
+    TickTimeBase*                 _clock;
+    WebRtc_Word32                 _timingId;
+    bool                          _master;
+    VCMTimestampExtrapolator*     _tsExtrapolator;
+    VCMCodecTimer                 _codecTimer;
+    WebRtc_UWord32                _renderDelayMs;
+    WebRtc_UWord32                _minTotalDelayMs;
+    WebRtc_UWord32                _requiredDelayMs;
+    WebRtc_UWord32                _currentDelayMs;
+    WebRtc_UWord32                _prevFrameTimestamp;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TIMING_H_
diff --git a/src/modules/video_coding/main/source/video_coding.gypi b/src/modules/video_coding/main/source/video_coding.gypi
new file mode 100644
index 0000000..b26cf5a
--- /dev/null
+++ b/src/modules/video_coding/main/source/video_coding.gypi
@@ -0,0 +1,100 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'webrtc_video_coding',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_i420',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_vp8_dir)/vp8.gyp:webrtc_vp8',
+      ],
+      'include_dirs': [
+        '../interface',
+        '../../../interface',
+        '../../codecs/interface',
+        '../../../../common_video/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../codecs/interface',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_coding.h',
+        '../interface/video_coding_defines.h',
+
+        # headers
+        'codec_database.h',
+        'codec_timer.h',
+        'content_metrics_processing.h',
+        'decoding_state.h',
+        'encoded_frame.h',
+        'er_tables_xor.h',
+        'event.h',
+        'exp_filter.h',
+        'fec_tables_xor.h',
+        'frame_buffer.h',
+        'frame_dropper.h',
+        'generic_decoder.h',
+        'generic_encoder.h',
+        'inter_frame_delay.h',
+        'internal_defines.h',
+        'jitter_buffer.h',
+        'jitter_buffer_common.h',
+        'jitter_estimator.h',
+        'media_opt_util.h',
+        'media_optimization.h',
+        'nack_fec_tables.h',
+        'packet.h',
+        'qm_select_data.h',
+        'qm_select.h',
+        'receiver.h',
+        'rtt_filter.h',
+        'session_info.h',
+        'tick_time_base.h',
+        'timestamp_extrapolator.h',
+        'timestamp_map.h',
+        'timing.h',
+        'video_coding_impl.h',
+
+        # sources
+        'codec_database.cc',
+        'codec_timer.cc',
+        'content_metrics_processing.cc',
+        'decoding_state.cc',
+        'encoded_frame.cc',
+        'exp_filter.cc',
+        'frame_buffer.cc',
+        'frame_dropper.cc',
+        'generic_decoder.cc',
+        'generic_encoder.cc',
+        'inter_frame_delay.cc',
+        'jitter_buffer.cc',
+        'jitter_buffer_common.cc',
+        'jitter_estimator.cc',
+        'media_opt_util.cc',
+        'media_optimization.cc',
+        'packet.cc',
+        'qm_select.cc',
+        'receiver.cc',
+        'rtt_filter.cc',
+        'session_info.cc',
+        'timestamp_extrapolator.cc',
+        'timestamp_map.cc',
+        'timing.cc',
+        'video_coding_impl.cc',
+      ], # source
+    },
+  ],
+}
diff --git a/src/modules/video_coding/main/source/video_coding_impl.cc b/src/modules/video_coding/main/source/video_coding_impl.cc
new file mode 100644
index 0000000..c73fb30
--- /dev/null
+++ b/src/modules/video_coding/main/source/video_coding_impl.cc
@@ -0,0 +1,1386 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_coding_impl.h"
+#include "common_types.h"
+#include "encoded_frame.h"
+#include "jitter_buffer.h"
+#include "packet.h"
+#include "trace.h"
+#include "video_codec_interface.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+namespace webrtc
+{
+
+//#define DEBUG_DECODER_BIT_STREAM
+
+WebRtc_UWord32
+VCMProcessTimer::Period() const
+{
+    return _periodMs;
+}
+
+WebRtc_UWord32
+VCMProcessTimer::TimeUntilProcess() const
+{
+    return static_cast<WebRtc_UWord32>(
+        VCM_MAX(static_cast<WebRtc_Word64>(_periodMs) -
+                (_clock->MillisecondTimestamp() - _latestMs), 0));
+}
+
+void
+VCMProcessTimer::Processed()
+{
+    _latestMs = _clock->MillisecondTimestamp();
+}
+
+VideoCodingModuleImpl::VideoCodingModuleImpl(const WebRtc_Word32 id,
+                                             TickTimeBase* clock,
+                                             bool delete_clock_on_destroy)
+:
+_id(id),
+clock_(clock),
+delete_clock_on_destroy_(delete_clock_on_destroy),
+_receiveCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_receiverInited(false),
+_timing(clock_, id, 1),
+_dualTiming(clock_, id, 2, &_timing),
+_receiver(_timing, clock_, id, 1),
+_dualReceiver(_dualTiming, clock_, id, 2, false),
+_decodedFrameCallback(_timing, clock_),
+_dualDecodedFrameCallback(_dualTiming, clock_),
+_frameTypeCallback(NULL),
+_frameStorageCallback(NULL),
+_receiveStatsCallback(NULL),
+_packetRequestCallback(NULL),
+_decoder(NULL),
+_dualDecoder(NULL),
+#ifdef DEBUG_DECODER_BIT_STREAM
+_bitStreamBeforeDecoder(NULL),
+#endif
+_frameFromFile(),
+_keyRequestMode(kKeyOnError),
+_scheduleKeyRequest(false),
+
+_sendCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+_encoder(),
+_encodedFrameCallback(),
+_nextFrameType(kVideoFrameDelta),
+_mediaOpt(id, clock_),
+_sendCodecType(kVideoCodecUnknown),
+_sendStatsCallback(NULL),
+_encoderInputFile(NULL),
+_codecDataBase(id),
+_receiveStatsTimer(1000, clock_),
+_sendStatsTimer(1000, clock_),
+_retransmissionTimer(10, clock_),
+_keyRequestTimer(500, clock_)
+{
+    assert(clock_);
+#ifdef DEBUG_DECODER_BIT_STREAM
+    _bitStreamBeforeDecoder = fopen("decoderBitStream.bit", "wb");
+#endif
+}
+
+VideoCodingModuleImpl::~VideoCodingModuleImpl()
+{
+    if (_dualDecoder != NULL)
+    {
+        _codecDataBase.ReleaseDecoder(_dualDecoder);
+    }
+    delete _receiveCritSect;
+    delete _sendCritSect;
+    if (delete_clock_on_destroy_) delete clock_;
+#ifdef DEBUG_DECODER_BIT_STREAM
+    fclose(_bitStreamBeforeDecoder);
+#endif
+    if (_encoderInputFile != NULL)
+    {
+        fclose(_encoderInputFile);
+    }
+}
+
+VideoCodingModule*
+VideoCodingModule::Create(const WebRtc_Word32 id)
+{
+    return new VideoCodingModuleImpl(id, new TickTimeBase(), true);
+}
+
+VideoCodingModule*
+VideoCodingModule::Create(const WebRtc_Word32 id, TickTimeBase* clock)
+{
+    assert(clock);
+    return new VideoCodingModuleImpl(id, clock, false);
+}
+
+void
+VideoCodingModule::Destroy(VideoCodingModule* module)
+{
+    if (module != NULL)
+    {
+        delete static_cast<VideoCodingModuleImpl*>(module);
+    }
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::Process()
+{
+    WebRtc_Word32 returnValue = VCM_OK;
+
+    // Receive-side statistics
+    if (_receiveStatsTimer.TimeUntilProcess() == 0)
+    {
+        _receiveStatsTimer.Processed();
+        if (_receiveStatsCallback != NULL)
+        {
+            WebRtc_UWord32 bitRate;
+            WebRtc_UWord32 frameRate;
+            const WebRtc_Word32 ret = _receiver.ReceiveStatistics(bitRate,
+                                                                  frameRate);
+            if (ret == 0)
+            {
+                _receiveStatsCallback->ReceiveStatistics(bitRate, frameRate);
+            }
+            else if (returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+        }
+    }
+
+    // Send-side statistics
+    if (_sendStatsTimer.TimeUntilProcess() == 0)
+    {
+        _sendStatsTimer.Processed();
+        if (_sendStatsCallback != NULL)
+        {
+            WebRtc_UWord32 bitRate;
+            WebRtc_UWord32 frameRate;
+            {
+                CriticalSectionScoped cs(_sendCritSect);
+                bitRate = static_cast<WebRtc_UWord32>(
+                    _mediaOpt.SentBitRate() + 0.5f);
+                frameRate = static_cast<WebRtc_UWord32>(
+                    _mediaOpt.SentFrameRate() + 0.5f);
+            }
+            _sendStatsCallback->SendStatistics(bitRate, frameRate);
+        }
+    }
+
+    // Packet retransmission requests
+    if (_retransmissionTimer.TimeUntilProcess() == 0)
+    {
+        _retransmissionTimer.Processed();
+        if (_packetRequestCallback != NULL)
+        {
+            WebRtc_UWord16 nackList[kNackHistoryLength];
+            WebRtc_UWord16 length = kNackHistoryLength;
+            const WebRtc_Word32 ret = NackList(nackList, length);
+            if (ret != VCM_OK && returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+            if (length > 0)
+            {
+                _packetRequestCallback->ResendPackets(nackList, length);
+            }
+        }
+    }
+
+    // Key frame requests
+    if (_keyRequestTimer.TimeUntilProcess() == 0)
+    {
+        _keyRequestTimer.Processed();
+        if (_scheduleKeyRequest && _frameTypeCallback != NULL)
+        {
+            const WebRtc_Word32 ret = RequestKeyFrame();
+            if (ret != VCM_OK && returnValue == VCM_OK)
+            {
+                returnValue = ret;
+            }
+        }
+    }
+
+    return returnValue;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::Id() const
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        return _id;
+    }
+}
+
+//  Change the unique identifier of this object
+WebRtc_Word32
+VideoCodingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        _id = id;
+        return VCM_OK;
+    }
+}
+
+// Returns the number of milliseconds until the module wants a worker thread to
+// call Process
+WebRtc_Word32
+VideoCodingModuleImpl::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 timeUntilNextProcess = VCM_MIN(
+                                    _receiveStatsTimer.TimeUntilProcess(),
+                                    _sendStatsTimer.TimeUntilProcess());
+    if ((_receiver.NackMode() != kNoNack) ||
+        (_dualReceiver.State() != kPassive))
+    {
+        // We need a Process call more often if we are relying on
+        // retransmissions
+        timeUntilNextProcess = VCM_MIN(timeUntilNextProcess,
+                                       _retransmissionTimer.TimeUntilProcess());
+    }
+    timeUntilNextProcess = VCM_MIN(timeUntilNextProcess,
+                                   _keyRequestTimer.TimeUntilProcess());
+
+    return timeUntilNextProcess;
+}
+
+// Get number of supported codecs
+WebRtc_UWord8
+VideoCodingModule::NumberOfCodecs()
+{
+    return VCMCodecDataBase::NumberOfCodecs();
+}
+
+// Get supported codec with id
+WebRtc_Word32
+VideoCodingModule::Codec(WebRtc_UWord8 listId, VideoCodec* codec)
+{
+    if (codec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return VCMCodecDataBase::Codec(listId, codec);
+}
+
+// Get supported codec with type
+WebRtc_Word32
+VideoCodingModule::Codec(VideoCodecType codecType, VideoCodec* codec)
+{
+    if (codec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return VCMCodecDataBase::Codec(codecType, codec);
+}
+
+/*
+*   Sender
+*/
+
+// Reset send side to initial state - all components
+WebRtc_Word32
+VideoCodingModuleImpl::InitializeSender()
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _codecDataBase.ResetSender();
+    _encoder = NULL;
+    _encodedFrameCallback.SetTransportCallback(NULL);
+    // setting default bitRate and frameRate to 0
+    _mediaOpt.SetEncodingData(kVideoCodecUnknown, 0, 0, 0, 0, 0, 0);
+    _mediaOpt.Reset(); // Resetting frame dropper
+    return VCM_OK;
+}
+
+// Register the send codec to be used.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterSendCodec(const VideoCodec* sendCodec,
+                                         WebRtc_UWord32 numberOfCores,
+                                         WebRtc_UWord32 maxPayloadSize)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    if (sendCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    WebRtc_Word32 ret = _codecDataBase.RegisterSendCodec(sendCodec,
+                                                         numberOfCores,
+                                                         maxPayloadSize);
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    _encoder = _codecDataBase.SetEncoder(sendCodec, &_encodedFrameCallback);
+    if (_encoder == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Failed to initialize encoder");
+        return VCM_CODEC_ERROR;
+    }
+    _sendCodecType = sendCodec->codecType;
+    int numLayers = (_sendCodecType != kVideoCodecVP8) ? 1 :
+                        sendCodec->codecSpecific.VP8.numberOfTemporalLayers;
+
+    _mediaOpt.SetEncodingData(_sendCodecType,
+                              sendCodec->maxBitrate,
+                              sendCodec->maxFramerate,
+                              sendCodec->startBitrate,
+                              sendCodec->width,
+                              sendCodec->height,
+                              numLayers);
+    _mediaOpt.SetMtu(maxPayloadSize);
+
+    return VCM_OK;
+}
+
+// Get current send codec
+WebRtc_Word32
+VideoCodingModuleImpl::SendCodec(VideoCodec* currentSendCodec) const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (currentSendCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.SendCodec(currentSendCodec);
+}
+
+// Get the current send codec type
+VideoCodecType
+VideoCodingModuleImpl::SendCodec() const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    return _codecDataBase.SendCodec();
+}
+
+// Register an external decoder object.
+// This can not be used together with external decoder callbacks.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                               WebRtc_UWord8 payloadType,
+                                               bool internalSource /*= false*/)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (externalEncoder == NULL)
+    {
+        bool wasSendCodec = false;
+        const WebRtc_Word32 ret = _codecDataBase.DeRegisterExternalEncoder(
+                                                                  payloadType,
+                                                                  wasSendCodec);
+        if (wasSendCodec)
+        {
+            // Make sure the VCM doesn't use the de-registered codec
+            _encoder = NULL;
+        }
+        return ret;
+    }
+    return _codecDataBase.RegisterExternalEncoder(externalEncoder,
+                                                  payloadType,
+                                                  internalSource);
+}
+
+// Get codec config parameters
+WebRtc_Word32
+VideoCodingModuleImpl::CodecConfigParameters(WebRtc_UWord8* buffer,
+                                             WebRtc_Word32 size)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    if (_encoder != NULL)
+    {
+        return _encoder->CodecConfigParameters(buffer, size);
+    }
+    return VCM_UNINITIALIZED;
+}
+
+// Get encode bitrate
+int VideoCodingModuleImpl::Bitrate(unsigned int* bitrate) const
+{
+  CriticalSectionScoped cs(_sendCritSect);
+  // return the bit rate which the encoder is set to
+  if (!_encoder) {
+    return VCM_UNINITIALIZED;
+  }
+  *bitrate = _encoder->BitRate();
+  return 0;
+}
+
+// Get encode frame rate
+int VideoCodingModuleImpl::FrameRate(unsigned int* framerate) const
+{
+  CriticalSectionScoped cs(_sendCritSect);
+  // input frame rate, not compensated
+  if (!_encoder) {
+    return VCM_UNINITIALIZED;
+  }
+  *framerate = _encoder->FrameRate();
+  return 0;
+}
+
+// Set channel parameters
+WebRtc_Word32
+VideoCodingModuleImpl::SetChannelParameters(WebRtc_UWord32 availableBandWidth,
+                                            WebRtc_UWord8 lossRate,
+                                            WebRtc_UWord32 rtt)
+{
+    WebRtc_Word32 ret = 0;
+    {
+        CriticalSectionScoped sendCs(_sendCritSect);
+        WebRtc_UWord32 targetRate = _mediaOpt.SetTargetRates(availableBandWidth,
+                                                             lossRate,
+                                                             rtt);
+        if (_encoder != NULL)
+        {
+            ret = _encoder->SetChannelParameters(lossRate, rtt);
+            if (ret < 0 )
+            {
+                return ret;
+            }
+            ret = (WebRtc_Word32)_encoder->SetRates(targetRate,
+                                                    _mediaOpt.InputFrameRate());
+            if (ret < 0)
+            {
+                return ret;
+            }
+        }
+        else
+        {
+            return VCM_UNINITIALIZED;
+        } // encoder
+    }// send side
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::SetReceiveChannelParameters(WebRtc_UWord32 rtt)
+{
+    CriticalSectionScoped receiveCs(_receiveCritSect);
+    _receiver.UpdateRtt(rtt);
+    return 0;
+}
+
+// Register a transport callback which will be called to deliver the encoded
+// buffers
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterTransportCallback(
+    VCMPacketizationCallback* transport)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _encodedFrameCallback.SetMediaOpt(&_mediaOpt);
+    _encodedFrameCallback.SetTransportCallback(transport);
+    return VCM_OK;
+}
+
+// Register video output information callback which will be called to deliver
+// information about the video stream produced by the encoder, for instance the
+// average frame rate and bit rate.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterSendStatisticsCallback(
+    VCMSendStatisticsCallback* sendStats)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _sendStatsCallback = sendStats;
+    return VCM_OK;
+}
+
+// Register a video quality settings callback which will be called when frame
+// rate/dimensions need to be updated for video quality optimization
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterVideoQMCallback(
+    VCMQMSettingsCallback* videoQMSettings)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    return _mediaOpt.RegisterVideoQMCallback(videoQMSettings);
+}
+
+
+// Register a video protection callback which will be called to deliver the
+// requested FEC rate and NACK status (on/off).
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterProtectionCallback(
+    VCMProtectionCallback* protection)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _mediaOpt.RegisterProtectionCallback(protection);
+    return VCM_OK;
+}
+
+// Enable or disable a video protection method.
+WebRtc_Word32
+VideoCodingModuleImpl::SetVideoProtection(VCMVideoProtection videoProtection,
+                                          bool enable)
+{
+
+    switch (videoProtection)
+    {
+
+    case kProtectionNack:
+        {
+            // Both send-side and receive-side
+            SetVideoProtection(kProtectionNackSender, enable);
+            SetVideoProtection(kProtectionNackReceiver, enable);
+            break;
+        }
+
+    case kProtectionNackSender:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            _mediaOpt.EnableProtectionMethod(enable, kNack);
+            break;
+        }
+
+    case kProtectionNackReceiver:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _receiver.SetNackMode(kNackInfinite);
+            }
+            else
+            {
+                _receiver.SetNackMode(kNoNack);
+            }
+            break;
+        }
+
+    case kProtectionDualDecoder:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _receiver.SetNackMode(kNoNack);
+                _dualReceiver.SetNackMode(kNackInfinite);
+            }
+            else
+            {
+                _dualReceiver.SetNackMode(kNoNack);
+            }
+            break;
+        }
+
+    case kProtectionKeyOnLoss:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _keyRequestMode = kKeyOnLoss;
+            }
+            else if (_keyRequestMode == kKeyOnLoss)
+            {
+                _keyRequestMode = kKeyOnError; // default mode
+            }
+            else
+            {
+                return VCM_PARAMETER_ERROR;
+            }
+            break;
+        }
+
+    case kProtectionKeyOnKeyLoss:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            if (enable)
+            {
+                _keyRequestMode = kKeyOnKeyLoss;
+            }
+            else if (_keyRequestMode == kKeyOnKeyLoss)
+            {
+                _keyRequestMode = kKeyOnError; // default mode
+            }
+            else
+            {
+                return VCM_PARAMETER_ERROR;
+            }
+            break;
+        }
+
+    case kProtectionNackFEC:
+        {
+            {
+              // Receive side
+                CriticalSectionScoped cs(_receiveCritSect);
+                if (enable)
+                {
+                    _receiver.SetNackMode(kNackHybrid);
+                }
+                else
+                {
+                    _receiver.SetNackMode(kNoNack);
+                }
+            }
+            // Send Side
+            {
+                CriticalSectionScoped cs(_sendCritSect);
+                _mediaOpt.EnableProtectionMethod(enable, kNackFec);
+            }
+            break;
+        }
+
+    case kProtectionFEC:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            _mediaOpt.EnableProtectionMethod(enable, kFec);
+            break;
+        }
+
+    case kProtectionPeriodicKeyFrames:
+        {
+            CriticalSectionScoped cs(_sendCritSect);
+            return _codecDataBase.SetPeriodicKeyFrames(enable);
+            break;
+        }
+    }
+    return VCM_OK;
+}
+
+// Add one raw video frame to the encoder, blocking.
+WebRtc_Word32
+VideoCodingModuleImpl::AddVideoFrame(const VideoFrame& videoFrame,
+                                     const VideoContentMetrics* contentMetrics,
+                                     const CodecSpecificInfo* codecSpecificInfo)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+
+    if (_encoder == NULL)
+    {
+        return VCM_UNINITIALIZED;
+    }
+    if (_nextFrameType == kFrameEmpty)
+    {
+        return VCM_OK;
+    }
+    _mediaOpt.UpdateIncomingFrameRate();
+
+    if (_mediaOpt.DropFrame())
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Drop frame due to bitrate");
+    }
+    else
+    {
+        _mediaOpt.updateContentData(contentMetrics);
+        WebRtc_Word32 ret = _encoder->Encode(videoFrame,
+                                             codecSpecificInfo,
+                                             _nextFrameType);
+        if (_encoderInputFile != NULL)
+        {
+          if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+                     _encoderInputFile) !=  videoFrame.Length()) {
+            return -1;
+          }
+        }
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Encode error: %d", ret);
+            return ret;
+        }
+        _nextFrameType = kVideoFrameDelta; // default frame type
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32 VideoCodingModuleImpl::IntraFrameRequest() {
+  CriticalSectionScoped cs(_sendCritSect);
+  _nextFrameType = kVideoFrameKey;
+  if (_encoder != NULL && _encoder->InternalSource()) {
+    // Try to request the frame if we have an external encoder with
+    // internal source since AddVideoFrame never will be called.
+    if (_encoder->RequestFrame(_nextFrameType) == WEBRTC_VIDEO_CODEC_OK) {
+      _nextFrameType = kVideoFrameDelta;
+    }
+  }
+  return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::EnableFrameDropper(bool enable)
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    _mediaOpt.EnableFrameDropper(enable);
+    return VCM_OK;
+}
+
+
+WebRtc_Word32
+VideoCodingModuleImpl::SentFrameCount(VCMFrameCount &frameCount) const
+{
+    CriticalSectionScoped cs(_sendCritSect);
+    return _mediaOpt.SentFrameCount(frameCount);
+}
+
+// Initialize receiver, resets codec database etc
+WebRtc_Word32
+VideoCodingModuleImpl::InitializeReceiver()
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    WebRtc_Word32 ret = _receiver.Initialize();
+    if (ret < 0)
+    {
+        return ret;
+    }
+
+    ret = _dualReceiver.Initialize();
+    if (ret < 0)
+    {
+        return ret;
+    }
+    _codecDataBase.ResetReceiver();
+    _timing.Reset();
+
+    _decoder = NULL;
+    _decodedFrameCallback.SetUserReceiveCallback(NULL);
+    _receiverInited = true;
+    _frameTypeCallback = NULL;
+    _frameStorageCallback = NULL;
+    _receiveStatsCallback = NULL;
+    _packetRequestCallback = NULL;
+    _keyRequestMode = kKeyOnError;
+    _scheduleKeyRequest = false;
+
+    return VCM_OK;
+}
+
+// Register a receive callback. Will be called whenever there is a new frame
+// ready for rendering.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveCallback(
+    VCMReceiveCallback* receiveCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _decodedFrameCallback.SetUserReceiveCallback(receiveCallback);
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveStatisticsCallback(
+                                     VCMReceiveStatisticsCallback* receiveStats)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _receiveStatsCallback = receiveStats;
+    return VCM_OK;
+}
+
+// Register an externally defined decoder/render object.
+// Can be a decoder only or a decoder coupled with a renderer.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                               WebRtc_UWord8 payloadType,
+                                               bool internalRenderTiming)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (externalDecoder == NULL)
+    {
+        // Make sure the VCM updates the decoder next time it decodes.
+        _decoder = NULL;
+        return _codecDataBase.DeRegisterExternalDecoder(payloadType);
+    }
+    else
+    {
+        return _codecDataBase.RegisterExternalDecoder(externalDecoder,
+                                                      payloadType,
+                                                      internalRenderTiming);
+    }
+}
+
+// Register a frame type request callback.
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterFrameTypeCallback(
+    VCMFrameTypeCallback* frameTypeCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _frameTypeCallback = frameTypeCallback;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterFrameStorageCallback(
+    VCMFrameStorageCallback* frameStorageCallback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _frameStorageCallback = frameStorageCallback;
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterPacketRequestCallback(
+    VCMPacketRequestCallback* callback)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    _packetRequestCallback = callback;
+    return VCM_OK;
+}
+
+// Decode next frame, blocking.
+// Should be called as often as possible to get the most out of the decoder.
+WebRtc_Word32
+VideoCodingModuleImpl::Decode(WebRtc_UWord16 maxWaitTimeMs)
+{
+    WebRtc_Word64 nextRenderTimeMs;
+    {
+        CriticalSectionScoped cs(_receiveCritSect);
+        if (!_receiverInited)
+        {
+            return VCM_UNINITIALIZED;
+        }
+        if (!_codecDataBase.DecoderRegistered())
+        {
+            return VCM_NO_CODEC_REGISTERED;
+        }
+    }
+
+    const bool dualReceiverEnabledNotReceiving =
+        (_dualReceiver.State() != kReceiving &&
+         _dualReceiver.NackMode() == kNackInfinite);
+
+    VCMEncodedFrame* frame = _receiver.FrameForDecoding(
+                                                  maxWaitTimeMs,
+                                                  nextRenderTimeMs,
+                                                  _codecDataBase.RenderTiming(),
+                                                  &_dualReceiver);
+
+    if (dualReceiverEnabledNotReceiving && _dualReceiver.State() == kReceiving)
+    {
+        // Dual receiver is enabled (kNACK enabled), but was not receiving
+        // before the call to FrameForDecoding(). After the call the state
+        // changed to receiving, and therefore we must copy the primary decoder
+        // state to the dual decoder to make it possible for the dual decoder to
+        // start decoding retransmitted frames and recover.
+        CriticalSectionScoped cs(_receiveCritSect);
+        if (_dualDecoder != NULL)
+        {
+            _codecDataBase.ReleaseDecoder(_dualDecoder);
+        }
+        _dualDecoder = _codecDataBase.CreateDecoderCopy();
+        if (_dualDecoder != NULL)
+        {
+            _dualDecoder->RegisterDecodeCompleteCallback(
+                &_dualDecodedFrameCallback);
+        }
+        else
+        {
+            _dualReceiver.Reset();
+        }
+    }
+
+    if (frame == NULL)
+      return VCM_FRAME_NOT_READY;
+    else
+    {
+        CriticalSectionScoped cs(_receiveCritSect);
+
+        // If this frame was too late, we should adjust the delay accordingly
+        _timing.UpdateCurrentDelay(frame->RenderTimeMs(),
+                                   clock_->MillisecondTimestamp());
+
+#ifdef DEBUG_DECODER_BIT_STREAM
+        if (_bitStreamBeforeDecoder != NULL)
+        {
+          // Write bit stream to file for debugging purposes
+          if (fwrite(frame->Buffer(), 1, frame->Length(),
+                     _bitStreamBeforeDecoder) !=  frame->Length()) {
+            return -1;
+          }
+        }
+#endif
+        if (_frameStorageCallback != NULL)
+        {
+            WebRtc_Word32 ret = frame->Store(*_frameStorageCallback);
+            if (ret < 0)
+            {
+                return ret;
+            }
+        }
+
+        const WebRtc_Word32 ret = Decode(*frame);
+        _receiver.ReleaseFrame(frame);
+        frame = NULL;
+        if (ret != VCM_OK)
+        {
+            return ret;
+        }
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RequestSliceLossIndication(
+    const WebRtc_UWord64 pictureID) const
+{
+    if (_frameTypeCallback != NULL)
+    {
+        const WebRtc_Word32 ret =
+            _frameTypeCallback->SliceLossIndicationRequest(pictureID);
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to request key frame");
+            return ret;
+        }
+    } else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "No frame type request callback registered");
+        return VCM_MISSING_CALLBACK;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::RequestKeyFrame()
+{
+    if (_frameTypeCallback != NULL)
+    {
+        const WebRtc_Word32 ret = _frameTypeCallback->RequestKeyFrame();
+        if (ret < 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to request key frame");
+            return ret;
+        }
+        _scheduleKeyRequest = false;
+    }
+    else
+    {
+        WEBRTC_TRACE(webrtc::kTraceWarning,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "No frame type request callback registered");
+        return VCM_MISSING_CALLBACK;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (_dualReceiver.State() != kReceiving ||
+        _dualReceiver.NackMode() != kNackInfinite)
+    {
+        // The dual receiver is currently not receiving or
+        // dual decoder mode is disabled.
+        return VCM_OK;
+    }
+    WebRtc_Word64 dummyRenderTime;
+    WebRtc_Word32 decodeCount = 0;
+    VCMEncodedFrame* dualFrame = _dualReceiver.FrameForDecoding(
+                                                            maxWaitTimeMs,
+                                                            dummyRenderTime);
+    if (dualFrame != NULL && _dualDecoder != NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream,
+                     webrtc::kTraceVideoCoding,
+                     VCMId(_id),
+                     "Decoding frame %u with dual decoder",
+                     dualFrame->TimeStamp());
+        // Decode dualFrame and try to catch up
+        WebRtc_Word32 ret = _dualDecoder->Decode(*dualFrame,
+                                                 clock_->MillisecondTimestamp());
+        if (ret != WEBRTC_VIDEO_CODEC_OK)
+        {
+            WEBRTC_TRACE(webrtc::kTraceWarning,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to decode frame with dual decoder");
+            _dualReceiver.ReleaseFrame(dualFrame);
+            return VCM_CODEC_ERROR;
+        }
+        if (_receiver.DualDecoderCaughtUp(dualFrame, _dualReceiver))
+        {
+            // Copy the complete decoder state of the dual decoder
+            // to the primary decoder.
+            WEBRTC_TRACE(webrtc::kTraceStream,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Dual decoder caught up");
+            _codecDataBase.CopyDecoder(*_dualDecoder);
+            _codecDataBase.ReleaseDecoder(_dualDecoder);
+            _dualDecoder = NULL;
+        }
+        decodeCount++;
+    }
+    _dualReceiver.ReleaseFrame(dualFrame);
+    return decodeCount;
+}
+
+
+// Must be called from inside the receive side critical section.
+WebRtc_Word32
+VideoCodingModuleImpl::Decode(const VCMEncodedFrame& frame)
+{
+    // Change decoder if payload type has changed
+    const bool renderTimingBefore = _codecDataBase.RenderTiming();
+    _decoder = _codecDataBase.SetDecoder(frame.PayloadType(),
+                                         _decodedFrameCallback);
+    if (renderTimingBefore != _codecDataBase.RenderTiming())
+    {
+        // Make sure we reset the decode time estimate since it will
+        // be zero for codecs without render timing.
+        _timing.ResetDecodeTime();
+    }
+    if (_decoder == NULL)
+    {
+        return VCM_NO_CODEC_REGISTERED;
+    }
+    // Decode a frame
+    WebRtc_Word32 ret = _decoder->Decode(frame, clock_->MillisecondTimestamp());
+
+    // Check for failed decoding, run frame type request callback if needed.
+    if (ret < 0)
+    {
+        if (ret == VCM_ERROR_REQUEST_SLI)
+        {
+            return RequestSliceLossIndication(
+                    _decodedFrameCallback.LastReceivedPictureID() + 1);
+        }
+        else
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to decode frame %u, requesting key frame",
+                         frame.TimeStamp());
+            ret = RequestKeyFrame();
+        }
+    }
+    else if (ret == VCM_REQUEST_SLI)
+    {
+        ret = RequestSliceLossIndication(
+            _decodedFrameCallback.LastReceivedPictureID() + 1);
+    }
+    if (!frame.Complete() || frame.MissingFrame())
+    {
+        switch (_keyRequestMode)
+        {
+            case kKeyOnKeyLoss:
+            {
+                if (frame.FrameType() == kVideoFrameKey)
+                {
+                    _scheduleKeyRequest = true;
+                    return VCM_OK;
+                }
+                break;
+            }
+            case kKeyOnLoss:
+            {
+                _scheduleKeyRequest = true;
+                return VCM_OK;
+            }
+            default:
+                break;
+        }
+    }
+    return ret;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::DecodeFromStorage(
+    const EncodedVideoData& frameFromStorage)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    WebRtc_Word32 ret = _frameFromFile.ExtractFromStorage(frameFromStorage);
+    if (ret < 0)
+    {
+        return ret;
+    }
+    return Decode(_frameFromFile);
+}
+
+// Reset the decoder state
+WebRtc_Word32
+VideoCodingModuleImpl::ResetDecoder()
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (_decoder != NULL)
+    {
+        _receiver.Initialize();
+        _timing.Reset();
+        _scheduleKeyRequest = false;
+        _decoder->Reset();
+    }
+    if (_dualReceiver.State() != kPassive)
+    {
+        _dualReceiver.Initialize();
+    }
+    if (_dualDecoder != NULL)
+    {
+        _codecDataBase.ReleaseDecoder(_dualDecoder);
+        _dualDecoder = NULL;
+    }
+    return VCM_OK;
+}
+
+// Register possible receive codecs, can be called multiple times
+WebRtc_Word32
+VideoCodingModuleImpl::RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                                WebRtc_Word32 numberOfCores,
+                                                bool requireKeyFrame)
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (receiveCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.RegisterReceiveCodec(receiveCodec, numberOfCores,
+                                               requireKeyFrame);
+}
+
+// Get current received codec
+WebRtc_Word32
+VideoCodingModuleImpl::ReceiveCodec(VideoCodec* currentReceiveCodec) const
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    if (currentReceiveCodec == NULL)
+    {
+        return VCM_PARAMETER_ERROR;
+    }
+    return _codecDataBase.ReceiveCodec(currentReceiveCodec);
+}
+
+// Get current received codec
+VideoCodecType
+VideoCodingModuleImpl::ReceiveCodec() const
+{
+    CriticalSectionScoped cs(_receiveCritSect);
+    return _codecDataBase.ReceiveCodec();
+}
+
+// Incoming packet from network parsed and ready for decode, non blocking.
+WebRtc_Word32
+VideoCodingModuleImpl::IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                    WebRtc_UWord32 payloadLength,
+                                    const WebRtcRTPHeader& rtpInfo)
+{
+    const VCMPacket packet(incomingPayload, payloadLength, rtpInfo);
+    WebRtc_Word32 ret;
+    if (_dualReceiver.State() != kPassive)
+    {
+        ret = _dualReceiver.InsertPacket(packet,
+                                         rtpInfo.type.Video.width,
+                                         rtpInfo.type.Video.height);
+        if (ret == VCM_FLUSH_INDICATOR) {
+          RequestKeyFrame();
+          ResetDecoder();
+        } else if (ret < 0) {
+          return ret;
+        }
+    }
+    ret = _receiver.InsertPacket(packet, rtpInfo.type.Video.width,
+                                 rtpInfo.type.Video.height);
+    if (ret == VCM_FLUSH_INDICATOR) {
+      RequestKeyFrame();
+      ResetDecoder();
+    } else if (ret < 0) {
+      return ret;
+    }
+    return VCM_OK;
+}
+
+// Minimum playout delay (used for lip-sync). This is the minimum delay required
+// to sync with audio. Not included in  VideoCodingModule::Delay()
+// Defaults to 0 ms.
+WebRtc_Word32
+VideoCodingModuleImpl::SetMinimumPlayoutDelay(WebRtc_UWord32 minPlayoutDelayMs)
+{
+    _timing.SetMinimumTotalDelay(minPlayoutDelayMs);
+    return VCM_OK;
+}
+
+// The estimated delay caused by rendering, defaults to
+// kDefaultRenderDelayMs = 10 ms
+WebRtc_Word32
+VideoCodingModuleImpl::SetRenderDelay(WebRtc_UWord32 timeMS)
+{
+    _timing.SetRenderDelay(timeMS);
+    return VCM_OK;
+}
+
+// Current video delay
+WebRtc_Word32
+VideoCodingModuleImpl::Delay() const
+{
+    return _timing.TargetVideoDelay();
+}
+
+// Nack list
+WebRtc_Word32
+VideoCodingModuleImpl::NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size)
+{
+    VCMNackStatus nackStatus = kNackOk;
+    // Collect sequence numbers from the default receiver
+    // if in normal nack mode. Otherwise collect them from
+    // the dual receiver if the dual receiver is receiving.
+    if (_receiver.NackMode() != kNoNack)
+    {
+        nackStatus = _receiver.NackList(nackList, size);
+    }
+    else if (_dualReceiver.State() != kPassive)
+    {
+        nackStatus = _dualReceiver.NackList(nackList, size);
+    }
+    else
+    {
+        size = 0;
+    }
+
+    switch (nackStatus)
+    {
+    case kNackNeedMoreMemory:
+        {
+            WEBRTC_TRACE(webrtc::kTraceError,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Out of memory");
+            return VCM_MEMORY;
+        }
+    case kNackKeyFrameRequest:
+        {
+            CriticalSectionScoped cs(_receiveCritSect);
+            WEBRTC_TRACE(webrtc::kTraceWarning,
+                         webrtc::kTraceVideoCoding,
+                         VCMId(_id),
+                         "Failed to get NACK list, requesting key frame");
+            return RequestKeyFrame();
+        }
+    default:
+        break;
+    }
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VideoCodingModuleImpl::ReceivedFrameCount(VCMFrameCount& frameCount) const
+{
+    return _receiver.ReceivedFrameCount(frameCount);
+}
+
+WebRtc_UWord32 VideoCodingModuleImpl::DiscardedPackets() const {
+  return _receiver.DiscardedPackets();
+}
+
+int VideoCodingModuleImpl::SetSenderNackMode(SenderNackMode mode) {
+  CriticalSectionScoped cs(_sendCritSect);
+
+  switch (mode) {
+    case kNackNone:
+      _mediaOpt.EnableProtectionMethod(false, kNack);
+      break;
+    case kNackAll:
+      _mediaOpt.EnableProtectionMethod(true, kNack);
+      break;
+    case kNackSelective:
+      return VCM_NOT_IMPLEMENTED;
+      break;
+  }
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::SetSenderReferenceSelection(bool enable) {
+  return VCM_NOT_IMPLEMENTED;
+}
+
+int VideoCodingModuleImpl::SetSenderFEC(bool enable) {
+  CriticalSectionScoped cs(_sendCritSect);
+  _mediaOpt.EnableProtectionMethod(enable, kFec);
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::SetSenderKeyFramePeriod(int periodMs) {
+  return VCM_NOT_IMPLEMENTED;
+}
+
+int VideoCodingModuleImpl::SetReceiverRobustnessMode(
+    ReceiverRobustness robustnessMode,
+    DecodeErrors errorMode) {
+  CriticalSectionScoped cs(_receiveCritSect);
+  switch (robustnessMode) {
+    case kNone:
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNoNack);
+      if (errorMode == kNoDecodeErrors) {
+        _keyRequestMode = kKeyOnLoss;
+      } else {
+        _keyRequestMode = kKeyOnError;
+      }
+      break;
+    case kHardNack:
+      if (errorMode == kAllowDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNackInfinite);
+      _dualReceiver.SetNackMode(kNoNack);
+      _keyRequestMode = kKeyOnError;  // TODO(hlundin): On long NACK list?
+      break;
+    case kSoftNack:
+      assert(false); // TODO(hlundin): Not completed.
+      return VCM_NOT_IMPLEMENTED;
+      _receiver.SetNackMode(kNackHybrid);
+      _dualReceiver.SetNackMode(kNoNack);
+      _keyRequestMode = kKeyOnError;
+      break;
+    case kDualDecoder:
+      if (errorMode == kNoDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNackInfinite);
+      _keyRequestMode = kKeyOnError;
+      break;
+    case kReferenceSelection:
+      assert(false); // TODO(hlundin): Not completed.
+      return VCM_NOT_IMPLEMENTED;
+      if (errorMode == kNoDecodeErrors) {
+        return VCM_PARAMETER_ERROR;
+      }
+      _receiver.SetNackMode(kNoNack);
+      _dualReceiver.SetNackMode(kNoNack);
+      break;
+  }
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::StartDebugRecording(const char* file_name_utf8) {
+  CriticalSectionScoped cs(_sendCritSect);
+  _encoderInputFile = fopen(file_name_utf8, "wb");
+  if (_encoderInputFile == NULL)
+    return VCM_GENERAL_ERROR;
+  return VCM_OK;
+}
+
+int VideoCodingModuleImpl::StopDebugRecording(){
+  CriticalSectionScoped cs(_sendCritSect);
+  if (_encoderInputFile != NULL) {
+    fclose(_encoderInputFile);
+    _encoderInputFile = NULL;
+  }
+  return VCM_OK;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/video_coding_impl.h b/src/modules/video_coding/main/source/video_coding_impl.h
new file mode 100644
index 0000000..1e39cbb
--- /dev/null
+++ b/src/modules/video_coding/main/source/video_coding_impl.h
@@ -0,0 +1,315 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
+
+#include "video_coding.h"
+#include "critical_section_wrapper.h"
+#include "frame_buffer.h"
+#include "receiver.h"
+#include "timing.h"
+#include "jitter_buffer.h"
+#include "codec_database.h"
+#include "generic_decoder.h"
+#include "generic_encoder.h"
+#include "media_optimization.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+#include <stdio.h>
+
+namespace webrtc
+{
+
+class VCMProcessTimer
+{
+public:
+    VCMProcessTimer(WebRtc_UWord32 periodMs, TickTimeBase* clock)
+        : _clock(clock),
+          _periodMs(periodMs),
+          _latestMs(_clock->MillisecondTimestamp()) {}
+    WebRtc_UWord32 Period() const;
+    WebRtc_UWord32 TimeUntilProcess() const;
+    void Processed();
+
+private:
+    TickTimeBase*         _clock;
+    WebRtc_UWord32        _periodMs;
+    WebRtc_Word64         _latestMs;
+};
+
+enum VCMKeyRequestMode
+{
+    kKeyOnError,    // Normal mode, request key frames on decoder error
+    kKeyOnKeyLoss,  // Request key frames on decoder error and on packet loss
+                    // in key frames.
+    kKeyOnLoss,     // Request key frames on decoder error and on packet loss
+                    // in any frame
+};
+
+class VideoCodingModuleImpl : public VideoCodingModule
+{
+public:
+    VideoCodingModuleImpl(const WebRtc_Word32 id,
+                          TickTimeBase* clock,
+                          bool delete_clock_on_destroy);
+
+    virtual ~VideoCodingModuleImpl();
+
+    WebRtc_Word32 Id() const;
+
+    //  Change the unique identifier of this object
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    // Returns the number of milliseconds until the module want a worker thread
+    // to call Process
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+
+    virtual WebRtc_Word32 Process();
+
+    /*
+    *   Sender
+    */
+
+    // Initialize send codec
+    virtual WebRtc_Word32 InitializeSender();
+
+    // Register the send codec to be used.
+    virtual WebRtc_Word32 RegisterSendCodec(const VideoCodec* sendCodec,
+                                            WebRtc_UWord32 numberOfCores,
+                                            WebRtc_UWord32 maxPayloadSize);
+
+    // Get current send codec
+    virtual WebRtc_Word32 SendCodec(VideoCodec* currentSendCodec) const;
+
+    // Get current send codec type
+    virtual VideoCodecType SendCodec() const;
+
+    // Register an external encoder object.
+    virtual WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* externalEncoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalSource = false);
+
+    // Get codec config parameters
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* buffer,
+                                                WebRtc_Word32 size);
+
+    // Get encode bitrate
+    virtual int Bitrate(unsigned int* bitrate) const;
+
+    // Get encode frame rate
+    virtual int FrameRate(unsigned int* framerate) const;
+
+    // Set channel parameters
+    virtual WebRtc_Word32 SetChannelParameters(
+        WebRtc_UWord32 availableBandWidth,
+        WebRtc_UWord8 lossRate,
+        WebRtc_UWord32 rtt);
+
+    // Set recieve channel parameters
+    virtual WebRtc_Word32 SetReceiveChannelParameters(WebRtc_UWord32 rtt);
+
+    // Register a transport callback which will be called to deliver the
+    // encoded buffers
+    virtual WebRtc_Word32 RegisterTransportCallback(
+        VCMPacketizationCallback* transport);
+
+    // Register a send statistics callback which will be called to deliver
+    // information about the video stream produced by the encoder,
+    // for instance the average frame rate and bit rate.
+    virtual WebRtc_Word32 RegisterSendStatisticsCallback(
+        VCMSendStatisticsCallback* sendStats);
+
+    // Register a video quality settings callback which will be called when
+    // frame rate/dimensions need to be updated for video quality optimization
+    virtual WebRtc_Word32 RegisterVideoQMCallback(
+        VCMQMSettingsCallback* videoQMSettings);
+
+    // Register a video protection callback which will be called to deliver
+    // the requested FEC rate and NACK status (on/off).
+    virtual WebRtc_Word32 RegisterProtectionCallback(
+        VCMProtectionCallback* protection);
+
+    // Enable or disable a video protection method.
+   virtual WebRtc_Word32 SetVideoProtection(VCMVideoProtection videoProtection,
+                                            bool enable);
+
+    // Add one raw video frame to the encoder, blocking.
+    virtual WebRtc_Word32 AddVideoFrame(
+        const VideoFrame& videoFrame,
+        const VideoContentMetrics* _contentMetrics = NULL,
+        const CodecSpecificInfo* codecSpecificInfo = NULL);
+
+    virtual WebRtc_Word32 IntraFrameRequest();
+
+    //Enable frame dropper
+    virtual WebRtc_Word32 EnableFrameDropper(bool enable);
+
+    // Sent frame counters
+    virtual WebRtc_Word32 SentFrameCount(VCMFrameCount& frameCount) const;
+
+    /*
+    *   Receiver
+    */
+
+    // Initialize receiver, resets codec database etc
+    virtual WebRtc_Word32 InitializeReceiver();
+
+    // Register possible reveive codecs, can be called multiple times
+    virtual WebRtc_Word32 RegisterReceiveCodec(const VideoCodec* receiveCodec,
+                                               WebRtc_Word32 numberOfCores,
+                                               bool requireKeyFrame = false);
+
+    // Register an externally defined decoder/render object.
+    // Can be a decoder only or a decoder coupled with a renderer.
+    virtual WebRtc_Word32 RegisterExternalDecoder(VideoDecoder* externalDecoder,
+                                                  WebRtc_UWord8 payloadType,
+                                                  bool internalRenderTiming);
+
+    // Register a receive callback. Will be called whenever there are a new
+    // frame ready for rendering.
+    virtual WebRtc_Word32 RegisterReceiveCallback(
+        VCMReceiveCallback* receiveCallback);
+
+    // Register a receive statistics callback which will be called to deliver
+    // information about the video stream received by the receiving side of the
+    // VCM, for instance the average frame rate and bit rate.
+    virtual WebRtc_Word32 RegisterReceiveStatisticsCallback(
+        VCMReceiveStatisticsCallback* receiveStats);
+
+    // Register a frame type request callback.
+    virtual WebRtc_Word32 RegisterFrameTypeCallback(
+        VCMFrameTypeCallback* frameTypeCallback);
+
+    // Register a frame storage callback.
+    virtual WebRtc_Word32 RegisterFrameStorageCallback(
+        VCMFrameStorageCallback* frameStorageCallback);
+
+    // Nack callback
+    virtual WebRtc_Word32 RegisterPacketRequestCallback(
+        VCMPacketRequestCallback* callback);
+
+    // Decode next frame, blocks for a maximum of maxWaitTimeMs milliseconds.
+    // Should be called as often as possible to get the most out of the decoder.
+    virtual WebRtc_Word32 Decode(WebRtc_UWord16 maxWaitTimeMs = 200);
+
+    // Decode next dual frame, blocks for a maximum of maxWaitTimeMs
+    // milliseconds.
+    virtual WebRtc_Word32 DecodeDualFrame(WebRtc_UWord16 maxWaitTimeMs = 200);
+
+    // Reset the decoder state
+    virtual WebRtc_Word32 ResetDecoder();
+
+    // Get current received codec
+    virtual WebRtc_Word32 ReceiveCodec(VideoCodec* currentReceiveCodec) const;
+
+    // Get current received codec type
+    virtual VideoCodecType ReceiveCodec() const;
+
+    // Incoming packet from network parsed and ready for decode, non blocking.
+    virtual WebRtc_Word32 IncomingPacket(const WebRtc_UWord8* incomingPayload,
+                                         WebRtc_UWord32 payloadLength,
+                                         const WebRtcRTPHeader& rtpInfo);
+
+    // A part of an encoded frame to be decoded.
+    // Used in conjunction with VCMFrameStorageCallback.
+    virtual WebRtc_Word32 DecodeFromStorage(
+        const EncodedVideoData& frameFromStorage);
+
+    // Minimum playout delay (Used for lip-sync). This is the minimum delay
+    // required to sync with audio. Not included in  VideoCodingModule::Delay()
+    // Defaults to 0 ms.
+    virtual WebRtc_Word32 SetMinimumPlayoutDelay(
+        WebRtc_UWord32 minPlayoutDelayMs);
+
+    // The estimated delay caused by rendering
+    virtual WebRtc_Word32 SetRenderDelay(WebRtc_UWord32 timeMS);
+
+    // Current delay
+    virtual WebRtc_Word32 Delay() const;
+
+    // Received frame counters
+    virtual WebRtc_Word32 ReceivedFrameCount(VCMFrameCount& frameCount) const;
+
+    // Returns the number of packets discarded by the jitter buffer.
+    virtual WebRtc_UWord32 DiscardedPackets() const;
+
+
+    // Robustness APIs
+
+    // Set the sender RTX/NACK mode.
+    virtual int SetSenderNackMode(SenderNackMode mode);
+
+    // Set the sender reference picture selection (RPS) mode.
+    virtual int SetSenderReferenceSelection(bool enable);
+
+    // Set the sender forward error correction (FEC) mode.
+    virtual int SetSenderFEC(bool enable);
+
+    // Set the key frame period, or disable periodic key frames (I-frames).
+    virtual int SetSenderKeyFramePeriod(int periodMs);
+
+    // Set the receiver robustness mode.
+    virtual int SetReceiverRobustnessMode(ReceiverRobustness robustnessMode,
+                                          DecodeErrors errorMode);
+    // Enables recording of debugging information.
+    virtual int StartDebugRecording(const char* file_name_utf8);
+
+    // Disables recording of debugging information.
+    virtual int StopDebugRecording();
+
+protected:
+    WebRtc_Word32 Decode(const webrtc::VCMEncodedFrame& frame);
+    WebRtc_Word32 RequestKeyFrame();
+    WebRtc_Word32 RequestSliceLossIndication(
+        const WebRtc_UWord64 pictureID) const;
+    WebRtc_Word32 NackList(WebRtc_UWord16* nackList, WebRtc_UWord16& size);
+
+private:
+    WebRtc_Word32                       _id;
+    TickTimeBase*                       clock_;
+    bool                                delete_clock_on_destroy_;
+    CriticalSectionWrapper*             _receiveCritSect;
+    bool                                _receiverInited;
+    VCMTiming                           _timing;
+    VCMTiming                           _dualTiming;
+    VCMReceiver                         _receiver;
+    VCMReceiver                         _dualReceiver;
+    VCMDecodedFrameCallback             _decodedFrameCallback;
+    VCMDecodedFrameCallback             _dualDecodedFrameCallback;
+    VCMFrameTypeCallback*               _frameTypeCallback;
+    VCMFrameStorageCallback*            _frameStorageCallback;
+    VCMReceiveStatisticsCallback*       _receiveStatsCallback;
+    VCMPacketRequestCallback*           _packetRequestCallback;
+    VCMGenericDecoder*                  _decoder;
+    VCMGenericDecoder*                  _dualDecoder;
+#ifdef DEBUG_DECODER_BIT_STREAM
+    FILE*                               _bitStreamBeforeDecoder;
+#endif
+    VCMFrameBuffer                      _frameFromFile;
+    VCMKeyRequestMode                   _keyRequestMode;
+    bool                                _scheduleKeyRequest;
+
+    CriticalSectionWrapper*             _sendCritSect; // Critical section for send side
+    VCMGenericEncoder*                  _encoder;
+    VCMEncodedFrameCallback             _encodedFrameCallback;
+    FrameType                           _nextFrameType;
+    VCMMediaOptimization                _mediaOpt;
+    VideoCodecType                      _sendCodecType;
+    VCMSendStatisticsCallback*          _sendStatsCallback;
+    FILE*                               _encoderInputFile;
+    VCMCodecDataBase                    _codecDataBase;
+    VCMProcessTimer                     _receiveStatsTimer;
+    VCMProcessTimer                     _sendStatsTimer;
+    VCMProcessTimer                     _retransmissionTimer;
+    VCMProcessTimer                     _keyRequestTimer;
+};
+} // namespace webrtc
+#endif // WEBRTC_MODULES_VIDEO_CODING_VIDEO_CODING_IMPL_H_
diff --git a/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc b/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc
new file mode 100644
index 0000000..0ee9657
--- /dev/null
+++ b/src/modules/video_coding/main/source/video_coding_robustness_unittest.cc
@@ -0,0 +1,396 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gmock/gmock.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/codecs/interface/mock/mock_video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/interface/mock/mock_vcm_callbacks.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+namespace webrtc {
+
+using ::testing::Return;
+using ::testing::_;
+using ::testing::ElementsAre;
+using ::testing::AllOf;
+using ::testing::Args;
+using ::testing::Field;
+using ::testing::Pointee;
+using ::testing::NiceMock;
+using ::testing::Sequence;
+
+class VCMRobustnessTest : public ::testing::Test {
+ protected:
+  static const size_t kPayloadLen = 10;
+
+  virtual void SetUp() {
+    clock_ = new FakeTickTime(0);
+    ASSERT_TRUE(clock_ != NULL);
+    vcm_ = VideoCodingModule::Create(0, clock_);
+    ASSERT_TRUE(vcm_ != NULL);
+    ASSERT_EQ(0, vcm_->InitializeReceiver());
+    ASSERT_EQ(0, vcm_->RegisterFrameTypeCallback(&frame_type_callback_));
+    ASSERT_EQ(0, vcm_->RegisterPacketRequestCallback(&request_callback_));
+    ASSERT_EQ(VCM_OK, vcm_->Codec(kVideoCodecVP8, &video_codec_));
+    ASSERT_EQ(VCM_OK, vcm_->RegisterReceiveCodec(&video_codec_, 1));
+    ASSERT_EQ(VCM_OK, vcm_->RegisterExternalDecoder(&decoder_,
+                                                    video_codec_.plType,
+                                                    true));
+  }
+
+  virtual void TearDown() {
+    VideoCodingModule::Destroy(vcm_);
+    delete clock_;
+  }
+
+  void InsertPacket(uint32_t timestamp,
+                    uint16_t seq_no,
+                    bool first,
+                    bool marker_bit,
+                    FrameType frame_type) {
+    const uint8_t payload[kPayloadLen] = {0};
+    WebRtcRTPHeader rtp_info;
+    memset(&rtp_info, 0, sizeof(rtp_info));
+    rtp_info.frameType = frame_type;
+    rtp_info.header.timestamp = timestamp;
+    rtp_info.header.sequenceNumber = seq_no;
+    rtp_info.header.markerBit = marker_bit;
+    rtp_info.header.payloadType = video_codec_.plType;
+    rtp_info.type.Video.codec = kRTPVideoVP8;
+    rtp_info.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+    rtp_info.type.Video.isFirstPacket = first;
+
+    ASSERT_EQ(VCM_OK, vcm_->IncomingPacket(payload, kPayloadLen, rtp_info));
+  }
+
+  VideoCodingModule* vcm_;
+  VideoCodec video_codec_;
+  MockVCMFrameTypeCallback frame_type_callback_;
+  MockPacketRequestCallback request_callback_;
+  NiceMock<MockVideoDecoder> decoder_;
+  NiceMock<MockVideoDecoder> decoderCopy_;
+  FakeTickTime* clock_;
+};
+
+TEST_F(VCMRobustnessTest, TestHardNack) {
+  Sequence s;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 2))
+      .With(Args<0, 1>(ElementsAre(6, 7)))
+      .Times(1);
+  for (int ts = 0; ts <= 6000; ts += 3000) {
+    EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, ts),
+                                       Field(&EncodedImage::_length,
+                                             kPayloadLen * 3),
+                                       Field(&EncodedImage::_completeFrame,
+                                             true)),
+                                 false, _, _, _))
+        .Times(1)
+        .InSequence(s);
+  }
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kHardNack,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  clock_->IncrementDebugClock(10);
+
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  clock_->IncrementDebugClock(10);
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  clock_->IncrementDebugClock(10);
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  ASSERT_EQ(VCM_OK, vcm_->Decode(0));
+}
+
+TEST_F(VCMRobustnessTest, TestHardNackNoneDecoded) {
+  EXPECT_CALL(request_callback_, ResendPackets(_, _))
+      .Times(0);
+  EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
+        .Times(1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kHardNack,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+
+  clock_->IncrementDebugClock(10);
+
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  ASSERT_EQ(VCM_OK, vcm_->Process());
+}
+
+TEST_F(VCMRobustnessTest, TestDualDecoder) {
+  Sequence s1, s2;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(1);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(1)
+      .WillOnce(Return(&decoderCopy_));
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(1)
+      .WillOnce(Return(&decoder_));
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+
+  EXPECT_CALL(decoderCopy_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s2);
+  EXPECT_CALL(decoderCopy_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s2);
+
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kDualDecoder,
+      VideoCodingModule::kAllowDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+                                       // Spawn a decoder copy.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Generate NACK list.
+
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+  EXPECT_EQ(1, vcm_->DecodeDualFrame(0));  // Dual decode of timestamp 3000.
+  EXPECT_EQ(1, vcm_->DecodeDualFrame(0));  // Dual decode of timestamp 6000.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // No more frames.
+
+  InsertPacket(9000, 9, true, false, kVideoFrameDelta);
+  InsertPacket(9000, 10, false, false, kVideoFrameDelta);
+  InsertPacket(9000, 11, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 9000 complete.
+  EXPECT_EQ(0, vcm_->DecodeDualFrame(0));  // Expect no dual decoder action.
+}
+
+TEST_F(VCMRobustnessTest, TestModeNoneWithErrors) {
+  EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
+  EXPECT_CALL(decoder_, Release()).Times(1);
+  Sequence s1;
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(0);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(0);
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(0);
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 9000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kNone,
+      VideoCodingModule::kAllowDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(23);
+  InsertPacket(3000, 4, false, false, kVideoFrameDelta);
+
+  InsertPacket(9000, 9, true, false, kVideoFrameDelta);
+  InsertPacket(9000, 10, false, false, kVideoFrameDelta);
+  InsertPacket(9000, 11, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 9000 complete.
+}
+
+TEST_F(VCMRobustnessTest, TestModeNoneWithoutErrors) {
+  Sequence s1;
+  EXPECT_CALL(decoder_, InitDecode(_, _)).Times(1);
+  EXPECT_CALL(decoder_, Release()).Times(1);
+  EXPECT_CALL(request_callback_, ResendPackets(_, 1))
+      .With(Args<0, 1>(ElementsAre(4)))
+      .Times(0);
+
+  EXPECT_CALL(decoder_, Copy())
+      .Times(0);
+  EXPECT_CALL(decoderCopy_, Copy())
+      .Times(0);
+
+  // Decode operations
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 0),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 3000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           false)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(decoder_, Decode(AllOf(Field(&EncodedImage::_timeStamp, 6000),
+                                     Field(&EncodedImage::_completeFrame,
+                                           true)),
+                               false, _, _, _))
+        .Times(1)
+        .InSequence(s1);
+  EXPECT_CALL(frame_type_callback_, RequestKeyFrame())
+        .Times(1);
+
+  ASSERT_EQ(VCM_OK, vcm_->SetReceiverRobustnessMode(
+      VideoCodingModule::kNone,
+      VideoCodingModule::kNoDecodeErrors));
+
+  InsertPacket(0, 0, true, false, kVideoFrameKey);
+  InsertPacket(0, 1, false, false, kVideoFrameKey);
+  InsertPacket(0, 2, false, true, kVideoFrameKey);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 0.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(3000, 3, true, false, kVideoFrameDelta);
+  // Packet 4 missing
+  InsertPacket(3000, 5, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_FRAME_NOT_READY, vcm_->Decode(0));
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(33);
+  InsertPacket(6000, 6, true, false, kVideoFrameDelta);
+  InsertPacket(6000, 7, false, false, kVideoFrameDelta);
+  InsertPacket(6000, 8, false, true, kVideoFrameDelta);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 3000 incomplete.
+                                       // Schedule key frame request.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(10);
+  EXPECT_EQ(VCM_OK, vcm_->Decode(0));  // Decode timestamp 6000 complete.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect no NACK list.
+
+  clock_->IncrementDebugClock(500);    // Wait for the key request timer to set.
+  EXPECT_EQ(VCM_OK, vcm_->Process());  // Expect key frame request.
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/source/video_coding_test.gypi b/src/modules/video_coding/main/source/video_coding_test.gypi
new file mode 100644
index 0000000..eb9dbe4
--- /dev/null
+++ b/src/modules/video_coding/main/source/video_coding_test.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [{
+      'target_name': 'video_coding_test',
+      'type': 'executable',
+      'dependencies': [
+         '<(DEPTH)/testing/gtest.gyp:gtest',
+         '<(webrtc_root)/test/test.gyp:test_support',
+         '<(webrtc_root)/test/metrics.gyp:metrics',
+         'webrtc_video_coding',
+         'rtp_rtcp',
+         'webrtc_utility',
+         'video_processing',
+         '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+         '../../../interface',
+         '../../codecs/vp8/include',
+         '../../../../system_wrappers/interface',
+          '../../../../common_video/interface',
+         '../source',
+      ],
+      'sources': [
+        # headers
+        '../test/codec_database_test.h',
+        '../test/generic_codec_test.h',
+        '../test/jitter_estimate_test.h',
+        '../test/media_opt_test.h',
+        '../test/mt_test_common.h',
+        '../test/normal_test.h',
+        '../test/quality_modes_test.h',
+        '../test/receiver_tests.h',
+        '../test/release_test.h',
+        '../test/rtp_player.h',
+        '../test/test_callbacks.h',
+        '../test/test_util.h',
+        '../test/video_source.h',
+
+        # sources
+        '../test/codec_database_test.cc',
+        '../test/decode_from_storage_test.cc',
+        '../test/generic_codec_test.cc',
+        '../test/jitter_buffer_test.cc',
+        '../test/media_opt_test.cc',
+        '../test/mt_test_common.cc',
+        '../test/mt_rx_tx_test.cc',
+        '../test/normal_test.cc',
+        '../test/quality_modes_test.cc',
+        '../test/receiver_timing_tests.cc',
+        '../test/rtp_player.cc',
+        '../test/test_callbacks.cc',
+        '../test/test_util.cc',
+        '../test/tester_main.cc',
+        '../test/video_rtp_play_mt.cc',
+        '../test/video_rtp_play.cc',
+        '../test/video_source.cc',
+      ], # source
+    },
+    {
+      'target_name': 'video_coding_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'webrtc_video_coding',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/testing/gmock.gyp:gmock',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../../../interface',
+        '../../codecs/interface',
+      ],
+      'sources': [
+        '../interface/mock/mock_vcm_callbacks.h',
+        'decoding_state_unittest.cc',
+        'jitter_buffer_unittest.cc',
+        'session_info_unittest.cc',
+        'video_coding_robustness_unittest.cc',
+        'qm_select_unittest.cc',
+      ],
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/video_coding/main/test/codec_database_test.cc b/src/modules/video_coding/main/test/codec_database_test.cc
new file mode 100644
index 0000000..10487bd
--- /dev/null
+++ b/src/modules/video_coding/main/test/codec_database_test.cc
@@ -0,0 +1,408 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation of codec data base test
+// testing is done via the VCM module, no specific CodecDataBase module functionality.
+
+#include "codec_database_test.h"
+
+#include <assert.h>
+#include <stdio.h>
+
+#include "../../../../engine_configurations.h"
+#include "../source/event.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "test_util.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "vp8.h" // for external codecs test
+
+
+using namespace webrtc;
+
+int CodecDataBaseTest::RunTest(CmdArgs& args)
+{
+    VideoCodingModule* vcm = VideoCodingModule::Create(1);
+    CodecDataBaseTest* cdbt = new CodecDataBaseTest(vcm);
+    cdbt->Perform(args);
+    VideoCodingModule::Destroy(vcm);
+    delete cdbt;
+    return 0;
+
+}
+
+CodecDataBaseTest::CodecDataBaseTest(VideoCodingModule* vcm):
+_vcm(vcm),
+_width(0),
+_height(0),
+_lengthSourceFrame(0),
+_timeStamp(0)
+{
+    //
+}
+CodecDataBaseTest::~CodecDataBaseTest()
+{
+    //
+}
+void
+CodecDataBaseTest::Setup(CmdArgs& args)
+{
+    _inname= args.inputFile;
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _lengthSourceFrame  = 3*_width*_height/2;
+    if (args.outputFile.compare(""))
+        _outname = test::OutputPath() + "CDBtest_decoded.yuv";
+    else
+        _outname = args.outputFile;
+    _outname = args.outputFile;
+    _encodedName = test::OutputPath() + "CDBtest_encoded.vp8";
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    return;
+}
+
+
+
+WebRtc_Word32
+CodecDataBaseTest::Perform(CmdArgs& args)
+{
+#ifndef VIDEOCODEC_VP8
+    assert(false);
+#endif
+    Setup(args);
+    EventWrapper* waitEvent = EventWrapper::Create();
+
+    /**************************/
+    /* General Sanity Checks */
+    /************************/
+    VideoCodec sendCodec, receiveCodec;
+    TEST(VideoCodingModule::NumberOfCodecs() > 0);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    VCMDecodeCompleteCallback *_decodeCallback = new VCMDecodeCompleteCallback(_decodedFile);
+    VCMEncodeCompleteCallback *_encodeCompleteCallback = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    _encodeCompleteCallback->SetFrameDimensions(_width, _height);
+    // registering the callback - encode and decode with the same vcm (could be later changed)
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+    // preparing a frame to be encoded
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+    sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+    sourceFrame.SetHeight(_height);
+    sourceFrame.SetWidth(_width);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    // Encoder registration
+    TEST (VideoCodingModule::NumberOfCodecs() > 0);
+    TEST(VideoCodingModule::Codec(-1, &sendCodec) == VCM_PARAMETER_ERROR);
+    TEST(VideoCodingModule::Codec(VideoCodingModule::NumberOfCodecs() + 1, &sendCodec) == VCM_PARAMETER_ERROR);
+    VideoCodingModule::Codec(1, &sendCodec);
+    sendCodec.plType = 0; // random value
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    printf(" \nNumber of Registered Codecs: %d \n\n", VideoCodingModule::NumberOfCodecs());
+    printf("Registered codec names: ");
+    for (int i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+    {
+        VideoCodingModule::Codec(i, &sendCodec);
+        printf("%s   ", sendCodec.plName);
+    }
+    printf("\n\nVerify that all requested codecs are used\n \n \n");
+
+    // Testing with VP8.
+    VideoCodingModule::Codec(kVideoCodecVP8, &sendCodec);
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _encodeCompleteCallback->SetCodecType(kRTPVideoVP8);
+    _vcm->InitializeReceiver();
+    TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK );
+    _vcm->InitializeSender();
+    TEST (_vcm->AddVideoFrame(sourceFrame) < 0 );
+
+    // Test changing frame size while keeping the same payload type
+    VideoCodingModule::Codec(0, &sendCodec);
+    sendCodec.width = 352;
+    sendCodec.height = 288;
+    VideoCodec currentSendCodec;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->SendCodec(&currentSendCodec);
+    TEST(currentSendCodec.width == sendCodec.width &&
+        currentSendCodec.height == sendCodec.height);
+    sendCodec.width = 352/2;
+    sendCodec.height = 288/2;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->SendCodec(&currentSendCodec);
+    TEST(currentSendCodec.width == sendCodec.width &&
+        currentSendCodec.height == sendCodec.height);
+
+    delete _decodeCallback;
+    _decodeCallback = NULL;
+    delete _encodeCompleteCallback;
+    _encodeCompleteCallback = NULL;
+
+    VCMEncodeCompleteCallback *_encodeCallback = new VCMEncodeCompleteCallback(_encodedFile);
+
+    /*************************/
+    /* External codecs       */
+    /*************************/
+
+
+    _vcm->InitializeReceiver();
+    VP8Decoder* decoder = VP8Decoder::Create();
+    VideoCodec vp8DecSettings;
+    VideoCodingModule::Codec(kVideoCodecVP8, &vp8DecSettings);
+    TEST(_vcm->RegisterExternalDecoder(decoder, vp8DecSettings.plType, false) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&vp8DecSettings, 1, false) == VCM_OK);
+    VP8Encoder* encoder = VP8Encoder::Create();
+    VideoCodec vp8EncSettings;
+    VideoCodingModule::Codec(kVideoCodecVP8, &vp8EncSettings);
+    _vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
+    _encodeCallback->RegisterReceiverVCM(_vcm);
+    _encodeCallback->SetCodecType(kRTPVideoVP8);
+    TEST(_vcm->RegisterExternalEncoder(encoder, vp8EncSettings.plType) == VCM_OK);
+    TEST(_vcm->RegisterSendCodec(&vp8EncSettings, 4, 1440) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    // De-register and try again.
+    TEST(_vcm->RegisterExternalDecoder(NULL, vp8DecSettings.plType, false) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() < 0); // Expect an error since we have de-registered the decoder
+    TEST(_vcm->RegisterExternalEncoder(NULL, vp8DecSettings.plType) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) < 0); // No send codec registered
+
+    delete decoder;
+    decoder = NULL;
+    delete encoder;
+    encoder = NULL;
+
+    /***************************************
+     * Test the "require key frame" setting*
+     ***************************************/
+
+    TEST(_vcm->InitializeSender() == VCM_OK);
+    TEST(_vcm->InitializeReceiver() == VCM_OK);
+    VideoCodingModule::Codec(kVideoCodecVP8, &receiveCodec);
+    receiveCodec.height = _height;
+    receiveCodec.width = _width;
+    TEST(_vcm->RegisterSendCodec(&receiveCodec, 4, 1440) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&receiveCodec, 1, true) == VCM_OK); // Require key frame
+    _vcm->RegisterTransportCallback(_encodeCallback); // encode returns error if callback uninitialized
+    _encodeCallback->RegisterReceiverVCM(_vcm);
+    _encodeCallback->SetCodecType(kRTPVideoVP8);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->ResetDecoder() == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    // Try to decode a delta frame. Should get a warning since we have enabled the "require key frame" setting
+    // and because no frame type request callback has been registered.
+    TEST(_vcm->Decode() == VCM_MISSING_CALLBACK);
+    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    // Make sure we can register another codec with the same
+    // payload type without crash.
+    _vcm->InitializeReceiver();
+    sendCodec.width = _width;
+    sendCodec.height = _height;
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    waitEvent->Wait(33);
+    _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+    sourceFrame.SetTimeStamp(_timeStamp);
+    TEST(_vcm->IntraFrameRequest() == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+    TEST(_vcm->ResetDecoder() == VCM_OK);
+
+    delete _encodeCallback;
+
+    /*************************/
+    /* Send/Receive Control */
+    /***********************/
+    /*
+    1. check available codecs (N)
+    2. register all corresponding decoders
+    3. encode 300/N frames with each encoder, and hope to properly decode
+    4. encode without a matching decoder - expect an error
+    */
+    rewind(_sourceFile);
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    sourceFrame.Free();
+    VCMDecodeCompleteCallback* decodeCallCDT = new VCMDecodeCompleteCallback(_decodedFile);
+    VCMEncodeCompleteCallback* encodeCallCDT = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(decodeCallCDT);
+    _vcm->RegisterTransportCallback(encodeCallCDT);
+    encodeCallCDT->RegisterReceiverVCM(_vcm);
+    if (VideoCodingModule::NumberOfCodecs() > 0)
+    {
+        // Register all available decoders.
+        int i, j;
+        //double psnr;
+        sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+        _vcm->RegisterReceiveCallback(decodeCallCDT);
+        for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+        {
+            VideoCodingModule::Codec(i, &receiveCodec);
+            if (strcmp(receiveCodec.plName, "I420") == 0)
+            {
+                receiveCodec.height = _height;
+                receiveCodec.width = _width;
+            }
+            _vcm->RegisterReceiveCodec(&receiveCodec, 1);
+        }
+        // start encoding - iterating over available encoders
+        _vcm->RegisterTransportCallback(encodeCallCDT);
+        encodeCallCDT->RegisterReceiverVCM(_vcm);
+        encodeCallCDT->Initialize();
+        int frameCnt = 0;
+        for (i=0; i < VideoCodingModule::NumberOfCodecs(); i++)
+        {
+            encodeCallCDT->ResetByteCount();
+            VideoCodingModule::Codec(i, &sendCodec);
+            sendCodec.height = _height;
+            sendCodec.width = _width;
+            sendCodec.startBitrate = 1000;
+            sendCodec.maxBitrate = 8000;
+            encodeCallCDT->SetFrameDimensions(_width, _height);
+            encodeCallCDT->SetCodecType(ConvertCodecType(sendCodec.plName));
+            TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) == VCM_OK);
+
+            // We disable the frame dropper to avoid dropping frames due to
+            // bad rate control. This isn't a codec performance test, and the
+            // I420 codec is expected to produce too many bits.
+            _vcm->EnableFrameDropper(false);
+
+            printf("Encoding with %s \n\n", sendCodec.plName);
+            for (j=0; j < int(300/VideoCodingModule::NumberOfCodecs()); j++)// assuming 300 frames, NumberOfCodecs <= 10
+            {
+                frameCnt++;
+                TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+                // building source frame
+                sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+                sourceFrame.SetHeight(_height);
+                sourceFrame.SetWidth(_width);
+                sourceFrame.SetLength(_lengthSourceFrame);
+                _timeStamp += (WebRtc_UWord32)(9e4 / _frameRate);
+                sourceFrame.SetTimeStamp(_timeStamp);
+                // send frame to the encoder
+                TEST (_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+                waitEvent->Wait(33); // was 100
+
+                int ret =_vcm->Decode();
+                TEST(ret == 0);
+                if (ret < 0)
+                {
+                    printf("Error #%d in frame number %d \n",ret, frameCnt);
+                }
+                 // verifying matching payload types:
+                _vcm->SendCodec(&sendCodec);
+                _vcm->ReceiveCodec(&receiveCodec);
+                TEST(sendCodec.plType == receiveCodec.plType);
+                if (sendCodec.plType != receiveCodec.plType)
+                {
+                    printf("frame number:%d\n",frameCnt);
+                }
+            } // end for:encode-decode
+           // byte count for codec specific
+
+            printf("Total bytes encoded: %f \n\n",(8.0/1000)*(encodeCallCDT->EncodedBytes()/((int)10/VideoCodingModule::NumberOfCodecs())));
+            // decode what's left in the buffer....
+            _vcm->Decode();
+            _vcm->Decode();
+            // Don't measure PSNR for I420 since it will be perfect.
+            if (sendCodec.codecType != kVideoCodecI420) {
+                webrtc::test::QualityMetricsResult psnr;
+                I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _width,
+                                  _height, &psnr);
+                printf("\n @ %d KBPS:  ", sendCodec.startBitrate);
+                printf("PSNR from encoder-decoder send-receive control test"
+                       "is %f\n\n", psnr.average);
+            }
+        } // end: iterate codecs
+        rewind(_sourceFile);
+        sourceFrame.Free();
+        delete [] tmpBuffer;
+        delete decodeCallCDT;
+        delete encodeCallCDT;
+        // closing and calculating PSNR for prior encoder-decoder test
+        TearDown(); // closing open files
+    } // end of #codecs >1
+
+    delete waitEvent;
+    Print();
+    return 0;
+}
+void
+CodecDataBaseTest::Print()
+{
+    printf("\nVCM Codec DataBase Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+void
+CodecDataBaseTest::TearDown()
+{
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+    fclose(_encodedFile);
+    return;
+}
diff --git a/src/modules/video_coding/main/test/codec_database_test.h b/src/modules/video_coding/main/test/codec_database_test.h
new file mode 100644
index 0000000..cc33e05
--- /dev/null
+++ b/src/modules/video_coding/main/test/codec_database_test.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
+
+#include "video_coding.h"
+#include "test_util.h"
+
+#include <string.h>
+
+/*
+Test consists of:
+1. Sanity chacks: Send and Receive side (bad input, etc. )
+2. Send-side control (encoder registration etc.)
+3. Decoder-side control - encode with various encoders, and verify correct decoding
+*/
+
+class CodecDataBaseTest
+{
+public:
+    CodecDataBaseTest(webrtc::VideoCodingModule* vcm);
+    ~CodecDataBaseTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32 Perform(CmdArgs& args);
+private:
+    void TearDown();
+    void Setup(CmdArgs& args);
+    void Print();
+    webrtc::VideoCodingModule*       _vcm;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _encodedName;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _encodedFile;
+    WebRtc_UWord16                   _width;
+    WebRtc_UWord16                   _height;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    float                            _frameRate;
+}; // end of codecDBTest class definition
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_CODEC_DATABASE_TEST_H_
diff --git a/src/modules/video_coding/main/test/decode_from_storage_test.cc b/src/modules/video_coding/main/test/decode_from_storage_test.cc
new file mode 100644
index 0000000..628d509
--- /dev/null
+++ b/src/modules/video_coding/main/test/decode_from_storage_test.cc
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "rtp_player.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+using namespace webrtc;
+
+class FrameStorageCallback : public VCMFrameStorageCallback
+{
+public:
+    FrameStorageCallback(VideoCodingModule* vcm) : _vcm(vcm) {}
+
+    WebRtc_Word32 StoreReceivedFrame(const EncodedVideoData& frameToStore)
+    {
+        _vcm->DecodeFromStorage(frameToStore);
+        return VCM_OK;
+    }
+
+private:
+    VideoCodingModule* _vcm;
+};
+
+int DecodeFromStorageTest(CmdArgs& args)
+{
+    // Make sure this test isn't executed without simulated events.
+#if !defined(EVENT_DEBUG)
+    return -1;
+#endif
+    // BEGIN Settings
+
+    bool protectionEnabled = false;
+    VCMVideoProtection protectionMethod = kProtectionNack;
+    WebRtc_UWord32 rttMS = 100;
+    float lossRate = 0.00f;
+    bool reordering = false;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = -1;
+    std::string rtpFilename = args.inputFile;
+    std::string outFilename = args.outputFile;
+    if (outFilename == "")
+        outFilename = test::OutputPath() + "DecodeFromStorage.yuv";
+
+    FrameReceiveCallback receiveCallback(outFilename.c_str());
+
+    // END Settings
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "decodeFromStorageTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+
+    FakeTickTime clock(0);
+    // TODO(hlundin): This test was not verified after changing to FakeTickTime.
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    VideoCodingModule* vcmPlayback = VideoCodingModule::Create(2, &clock);
+    FrameStorageCallback storageCallback(vcmPlayback);
+    RtpDataCallback dataCallback(vcm);
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    ret = vcmPlayback->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    vcm->RegisterFrameStorageCallback(&storageCallback);
+    vcmPlayback->RegisterReceiveCallback(&receiveCallback);
+    RTPPlayer rtpStream(rtpFilename.c_str(), &dataCallback, &clock);
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            memset(&codec, 0, sizeof(codec));
+            strncpy(codec.plName, payloadType->name.c_str(), payloadType->name.length());
+            codec.plName[payloadType->name.length()] = '\0';
+            codec.plType = payloadType->payloadType;
+            codec.codecType = payloadType->codecType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+            if (vcmPlayback->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    bool nackEnabled = protectionEnabled && (protectionMethod == kProtectionNack ||
+                                            protectionMethod == kProtectionDualDecoder);
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+    rtpStream.SetReordering(reordering);
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protectionMethod, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    ret = 0;
+
+    // RTP stream main loop
+    while ((ret = rtpStream.NextPacket(clock.MillisecondTimestamp())) == 0)
+    {
+        if (clock.MillisecondTimestamp() % 5 == 0)
+        {
+            ret = vcm->Decode();
+            if (ret < 0)
+            {
+                return -1;
+            }
+        }
+        if (vcm->TimeUntilNextProcess() <= 0)
+        {
+            vcm->Process();
+        }
+        if (MAX_RUNTIME_MS > -1 && clock.MillisecondTimestamp() >= MAX_RUNTIME_MS)
+        {
+            break;
+        }
+        clock.IncrementDebugClock(1);
+    }
+
+    switch (ret)
+    {
+    case 1:
+        printf("Success\n");
+        break;
+    case -1:
+        printf("Failed\n");
+        break;
+    case 0:
+        printf("Timeout\n");
+        break;
+    }
+
+    rtpStream.Print();
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    VideoCodingModule::Destroy(vcm);
+    vcm = NULL;
+    VideoCodingModule::Destroy(vcmPlayback);
+    vcmPlayback = NULL;
+    Trace::ReturnTrace();
+
+    return 0;
+}
diff --git a/src/modules/video_coding/main/test/generic_codec_test.cc b/src/modules/video_coding/main/test/generic_codec_test.cc
new file mode 100644
index 0000000..773f7ab
--- /dev/null
+++ b/src/modules/video_coding/main/test/generic_codec_test.cc
@@ -0,0 +1,592 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "generic_codec_test.h"
+#include <cmath>
+#include <stdio.h>
+#include "../source/event.h"
+#include "rtp_rtcp.h"
+#include "module_common_types.h"
+#include "test_macros.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+using namespace webrtc;
+
+enum { kMaxWaitEncTimeMs = 100 };
+
+int GenericCodecTest::RunTest(CmdArgs& args)
+{
+#if !defined(EVENT_DEBUG)
+    printf("\n\nEnable debug events to run this test!\n\n");
+    return -1;
+#endif
+    FakeTickTime clock(0);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    GenericCodecTest* get = new GenericCodecTest(vcm, &clock);
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "genericCodecTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    get->Perform(args);
+    Trace::ReturnTrace();
+    delete get;
+    VideoCodingModule::Destroy(vcm);
+    return 0;
+}
+
+GenericCodecTest::GenericCodecTest(VideoCodingModule* vcm, FakeTickTime* clock):
+_clock(clock),
+_vcm(vcm),
+_width(0),
+_height(0),
+_frameRate(0),
+_lengthSourceFrame(0),
+_timeStamp(0)
+{
+}
+
+GenericCodecTest::~GenericCodecTest()
+{
+}
+
+void
+GenericCodecTest::Setup(CmdArgs& args)
+{
+    _timeStamp = 0;
+
+    /* Test Sequence parameters */
+
+    _inname= args.inputFile;
+    if (args.outputFile.compare(""))
+        _outname = test::OutputPath() + "GCTest_decoded.yuv";
+    else
+        _outname = args.outputFile;
+    _encodedName = test::OutputPath() + "GCTest_encoded.vp8";
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _lengthSourceFrame  = 3*_width*_height/2;
+
+    /* File settings */
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    return;
+}
+WebRtc_Word32
+GenericCodecTest::Perform(CmdArgs& args)
+{
+    WebRtc_Word32 ret;
+    Setup(args);
+    /*
+    1. sanity checks
+    2. encode/decoder individuality
+    3. API testing
+    4. Target bitrate (within a specific timespan)
+    5. Pipeline Delay
+    */
+
+    /*******************************/
+    /* sanity checks on inputs    */
+    /*****************************/
+    VideoCodec sendCodec, receiveCodec;
+    sendCodec.maxBitrate = 8000;
+    TEST(_vcm->NumberOfCodecs() > 0); // This works since we now initialize the list in the constructor
+    TEST(_vcm->Codec(0, &sendCodec)  == VCM_OK);
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    WebRtc_Word32 NumberOfCodecs = _vcm->NumberOfCodecs();
+    // registration of first codec in the list
+    int i = 0;
+    _vcm->Codec(0, &_sendCodec);
+    TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1440) == VCM_OK);
+    // sanity on encoder registration
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    _vcm->InitializeSender();
+    TEST(_vcm->Codec(kVideoCodecVP8, &sendCodec) == 0);
+    TEST(_vcm->RegisterSendCodec(&sendCodec, -1, 1440) < 0); // bad number of cores
+    sendCodec.maxBitrate = 8000;
+    _vcm->RegisterSendCodec(&sendCodec, 1, 1440);
+    _vcm->InitializeSender();
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    sendCodec.height = 0;
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0); // bad height
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    sendCodec.startBitrate = -2;
+    TEST(_vcm->RegisterSendCodec(&sendCodec, 1, 1440) < 0); // bad bit rate
+    _vcm->Codec(kVideoCodecVP8, &sendCodec);
+    _vcm->InitializeSender();
+    TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rate when encoder uninitialized
+    // register all availbale decoders -- need to have more for this test
+    for (i=0; i< NumberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &receiveCodec);
+        _vcm->RegisterReceiveCodec(&receiveCodec, 1);
+    }
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+    // building source frame
+    sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+    sourceFrame.SetHeight(_height);
+    sourceFrame.SetWidth(_width);
+    sourceFrame.SetTimeStamp(_timeStamp++);
+    // encode/decode
+    TEST(_vcm->AddVideoFrame(sourceFrame) < 0 ); // encoder uninitialized
+    _vcm->InitializeReceiver();
+    TEST(_vcm->SetChannelParameters(100, 0, 0) < 0);// setting rtt when receiver uninitialized
+
+      /**************************************/
+     /* encoder/decoder individuality test */
+    /**************************************/
+    //Register both encoder and decoder, reset decoder - encode, set up decoder, reset encoder - decode.
+    rewind(_sourceFile);
+    sourceFrame.Free();
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    // Register VP8
+    _vcm->Codec(kVideoCodecVP8, &_sendCodec);
+    _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+    _vcm->SendCodec(&sendCodec);
+    sendCodec.startBitrate = 2000;
+
+    // Set target frame rate to half of the incoming frame rate
+    // to test the frame rate control in the VCM
+    sendCodec.maxFramerate = (WebRtc_UWord8)(_frameRate / 2);
+    sendCodec.width = _width;
+    sendCodec.height = _height;
+    TEST(strncmp(_sendCodec.plName, "VP8", 3) == 0); // was VP8
+
+    _decodeCallback = new VCMDecodeCompleteCallback(_decodedFile);
+    _encodeCompleteCallback = new VCMEncodeCompleteCallback(_encodedFile);
+    _vcm->RegisterReceiveCallback(_decodeCallback);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+
+    _vcm->RegisterSendCodec(&sendCodec, 4, 1440);
+    _encodeCompleteCallback->SetCodecType(ConvertCodecType(sendCodec.plName));
+
+    _vcm->InitializeReceiver();
+    _vcm->Process();
+
+    //encoding 1 second of video
+    for (i = 0; i < _frameRate; i++)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+        IncrementDebugClock(_frameRate);
+        _vcm->Process();
+    }
+    sendCodec.maxFramerate = (WebRtc_UWord8)_frameRate;
+    _vcm->InitializeSender();
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK); // same codec for encode and decode
+    ret = 0;
+    i = 0;
+    while ((i < 25) && (ret == 0) )
+    {
+        ret = _vcm->Decode();
+        TEST(ret == VCM_OK);
+        if (ret < 0)
+        {
+            printf("error in frame # %d \n", i);
+        }
+        IncrementDebugClock(_frameRate);
+        i++;
+    }
+    //TEST((ret == 0) && (i = 50));
+    if (ret == 0)
+    {
+        printf("Encoder/Decoder individuality test complete - View output files \n");
+    }
+    // last frame - not decoded
+    _vcm->InitializeReceiver();
+    TEST(_vcm->Decode() < 0); // frame to be encoded exists, decoder uninitialized
+
+
+    // Test key frame request on packet loss mode.
+    // This a frame as a key frame and fooling the receiver
+    // that the last packet was lost. The decoding will succeed,
+    // but the VCM will see a packet loss and request a new key frame.
+    VCMEncComplete_KeyReqTest keyReqTest_EncCompleteCallback(*_vcm);
+    KeyFrameReqTest frameTypeCallback;
+    _vcm->RegisterTransportCallback(&keyReqTest_EncCompleteCallback);
+    _encodeCompleteCallback->RegisterReceiverVCM(_vcm);
+    _vcm->RegisterSendCodec(&sendCodec, 4, 1440);
+    _encodeCompleteCallback->SetCodecType(ConvertCodecType(sendCodec.plName));
+    TEST(_vcm->SetVideoProtection(kProtectionKeyOnKeyLoss, true) == VCM_OK);
+    TEST(_vcm->RegisterFrameTypeCallback(&frameTypeCallback) == VCM_OK);
+    TEST(_vcm->RegisterReceiveCodec(&sendCodec, 1) == VCM_OK);
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+    sourceFrame.SetTimeStamp(_timeStamp);
+    // First packet of a subsequent frame required before the jitter buffer
+    // will allow decoding an incomplete frame.
+    TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+    TEST(_vcm->Decode() == VCM_OK);
+
+    printf("API tests complete \n");
+
+     /*******************/
+    /* Bit Rate Tests */
+    /*****************/
+    /* Requirements:
+    * 1. OneSecReq = 15 % above/below target over a time period of 1s (_frameRate number of frames)
+    * 3. FullReq  = 10% for total seq. (for 300 frames/seq. coincides with #1)
+    * 4. Test will go over all registered codecs
+    //NOTE: time requirements are not part of the release tests
+    */
+    double FullReq   =  0.1;
+    //double OneSecReq = 0.15;
+    printf("\n RATE CONTROL TEST\n");
+    // initializing....
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    rewind(_sourceFile);
+    sourceFrame.Free();
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    const float bitRate[] = {100, 400, 600, 1000, 2000};
+    const float nBitrates = sizeof(bitRate)/sizeof(*bitRate);
+    float _bitRate = 0;
+    int _frameCnt = 0;
+    float totalBytesOneSec;//, totalBytesTenSec;
+    float totalBytes, actualBitrate;
+    VCMFrameCount frameCount; // testing frame type counters
+    // start test
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    // going over all available codecs
+    _encodeCompleteCallback->SetFrameDimensions(_width, _height);
+    SendStatsTest sendStats;
+    for (int k = 0; k < NumberOfCodecs; k++)
+    //for (int k = NumberOfCodecs - 1; k >=0; k--)
+    {// static list starts from 0
+        //just checking
+        _vcm->InitializeSender();
+        _sendCodec.maxBitrate = 8000;
+        TEST(_vcm->Codec(k, &_sendCodec)== VCM_OK);
+        _vcm->RegisterSendCodec(&_sendCodec, 1, 1440);
+        _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+        _encodeCompleteCallback->SetCodecType(ConvertCodecType(_sendCodec.plName));
+        printf (" \n\n Codec type = %s \n\n",_sendCodec.plName);
+        for (i = 0; i < nBitrates; i++)
+        {
+             _bitRate = static_cast<float>(bitRate[i]);
+            // just testing
+            _vcm->InitializeSender();
+            _sendCodec.startBitrate = (int)_bitRate;
+            _sendCodec.maxBitrate = 8000;
+            _sendCodec.maxFramerate = _frameRate;
+            _vcm->RegisterSendCodec(&_sendCodec, 1, 1440);
+            _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+            // up to here
+            _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 20);
+            _frameCnt = 0;
+            totalBytes = 0;
+            _encodeCompleteCallback->Initialize();
+            sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+            _vcm->RegisterSendStatisticsCallback(&sendStats);
+            while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+                _lengthSourceFrame)
+            {
+                _frameCnt++;
+                sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+                sourceFrame.SetHeight(_height);
+                sourceFrame.SetWidth(_width);
+                _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+                sourceFrame.SetTimeStamp(_timeStamp);
+
+                ret = _vcm->AddVideoFrame(sourceFrame);
+                IncrementDebugClock(_frameRate);
+                // The following should be uncommneted for timing tests. Release tests only include
+                // compliance with full sequence bit rate.
+
+
+                //totalBytes = WaitForEncodedFrame();
+                //currentTime = VCMTickTime::MillisecondTimestamp();//clock()/(double)CLOCKS_PER_SEC;
+                if (_frameCnt == _frameRate)// @ 1sec
+                {
+                    totalBytesOneSec =  _encodeCompleteCallback->EncodedBytes();//totalBytes;
+                }
+                TEST(_vcm->TimeUntilNextProcess() >= 0);
+            } // video seq. encode done
+            TEST(_vcm->TimeUntilNextProcess() == 0);
+            _vcm->Process(); // Let the module calculate its send bit rate estimate
+            // estimating rates
+            // complete sequence
+            // bit rate assumes input frame rate is as specified
+            totalBytes = _encodeCompleteCallback->EncodedBytes();
+            actualBitrate = (float)(8.0/1000)*(totalBytes / (_frameCnt / _frameRate));
+
+            printf("Complete Seq.: target bitrate: %.0f kbps, actual bitrate: %.1f kbps\n", _bitRate, actualBitrate);
+            TEST((fabs(actualBitrate - _bitRate) < FullReq * _bitRate) ||
+                 (strncmp(_sendCodec.plName, "I420", 4) == 0));
+
+            // 1 Sec.
+            actualBitrate = (float)(8.0/1000)*(totalBytesOneSec);
+            //actualBitrate = (float)(8.0*totalBytesOneSec)/(oneSecTime - startTime);
+            //printf("First 1Sec: target bitrate: %.0f kbps, actual bitrate: %.1f kbps\n", _bitRate, actualBitrate);
+            //TEST(fabs(actualBitrate - _bitRate) < OneSecReq * _bitRate);
+            rewind(_sourceFile);
+
+            //checking key/delta frame count
+            _vcm->SentFrameCount(frameCount);
+            printf("frame count: %d delta, %d key\n", frameCount.numDeltaFrames, frameCount.numKeyFrames);
+        }// end per codec
+
+    } // end rate control test
+    /********************************/
+    /* Encoder Pipeline Delay Test */
+    /******************************/
+    _vcm->InitializeSender();
+    sourceFrame.Free();
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    NumberOfCodecs = _vcm->NumberOfCodecs();
+    bool encodeComplete = false;
+    // going over all available codecs
+    for (int k = 0; k < NumberOfCodecs; k++)
+    {
+        _vcm->Codec(k, &_sendCodec);
+        _vcm->InitializeSender();
+        _sendCodec.maxBitrate = 8000;
+        _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+        _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+
+        _frameCnt = 0;
+        encodeComplete = false;
+        while (encodeComplete == false)
+        {
+            TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+            _frameCnt++;
+            sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+            sourceFrame.SetHeight(_height);
+            sourceFrame.SetWidth(_width);
+            _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+            sourceFrame.SetTimeStamp(_timeStamp);
+            _vcm->AddVideoFrame(sourceFrame);
+            encodeComplete = _encodeCompleteCallback->EncodeComplete();
+        } // first frame encoded
+        printf ("\n Codec type = %s \n", _sendCodec.plName);
+        printf(" Encoder pipeline delay = %d frames\n", _frameCnt - 1);
+    } // end for all codecs
+
+    /********************************/
+    /* Encoder Packet Size Test     */
+    /********************************/
+    RTPSendCallback_SizeTest sendCallback;
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = 1;
+    configuration.audio = false;
+    configuration.outgoing_transport = &sendCallback;
+
+    RtpRtcp& rtpModule = *RtpRtcp::CreateRtpRtcp(configuration);
+
+    VCMRTPEncodeCompleteCallback encCompleteCallback(&rtpModule);
+    _vcm->InitializeSender();
+
+    // TEST DISABLED FOR NOW SINCE VP8 DOESN'T HAVE THIS FEATURE
+
+//    sourceFrame.Free();
+//    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+//    NumberOfCodecs = _vcm->NumberOfCodecs();
+//    WebRtc_UWord32 targetPayloadSize = 500;
+//    rtpModule.SetMaxTransferUnit(targetPayloadSize);
+//    // going over all available codecs
+//    for (int k = 0; k < NumberOfCodecs; k++)
+//    {
+//        _vcm->Codec(k, &_sendCodec);
+//        if (strncmp(_sendCodec.plName, "VP8", 3) == 0)
+//        {
+//            // Only test with VP8
+//            continue;
+//        }
+//        rtpModule.RegisterSendPayload(_sendCodec.plName, _sendCodec.plType);
+//        // Make sure we only get one NAL unit per packet
+//        _vcm->InitializeSender();
+//        _vcm->RegisterSendCodec(&_sendCodec, 4, targetPayloadSize);
+//        sendCallback.SetMaxPayloadSize(targetPayloadSize);
+//        _vcm->RegisterTransportCallback(&encCompleteCallback);
+//        sendCallback.Reset();
+//        _frameCnt = 0;
+//        rewind(_sourceFile);
+//        while (!feof(_sourceFile))
+//        {
+//            fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile);
+//            _frameCnt++;
+//            sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+//            sourceFrame.SetHeight(_height);
+//            sourceFrame.SetWidth(_width);
+//            _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+//            sourceFrame.SetTimeStamp(_timeStamp);
+//            ret = _vcm->AddVideoFrame(sourceFrame);
+//        } // first frame encoded
+//        printf ("\n Codec type = %s \n",_sendCodec.plName);
+//        printf(" Average payload size = %f bytes, target = %u bytes\n", sendCallback.AveragePayloadSize(), targetPayloadSize);
+//    } // end for all codecs
+
+
+    // Test temporal decimation settings
+    for (int k = 0; k < NumberOfCodecs; k++)
+    {
+        _vcm->Codec(k, &_sendCodec);
+        if (strncmp(_sendCodec.plName, "I420", 4) == 0)
+        {
+            // Only test with I420
+            break;
+        }
+    }
+    TEST(strncmp(_sendCodec.plName, "I420", 4) == 0);
+    _vcm->InitializeSender();
+    _sendCodec.maxFramerate = static_cast<WebRtc_UWord8>(_frameRate / 2.0 + 0.5f);
+    _vcm->RegisterSendCodec(&_sendCodec, 4, 1440);
+    _vcm->SetChannelParameters(2000, 0, 0);
+    _vcm->RegisterTransportCallback(_encodeCompleteCallback);
+    // up to here
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 20);
+    _encodeCompleteCallback->Initialize();
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+    rewind(_sourceFile);
+    while (fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) ==
+        _lengthSourceFrame)
+    {
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        ret = _vcm->AddVideoFrame(sourceFrame);
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        IncrementDebugClock(_frameRate);
+    } // first frame encoded
+
+    delete &rtpModule;
+    Print();
+    delete tmpBuffer;
+    delete _decodeCallback;
+    delete _encodeCompleteCallback;
+    return 0;
+}
+
+
+void
+GenericCodecTest::Print()
+{
+    printf(" \n\n VCM Generic Encoder Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+float
+GenericCodecTest::WaitForEncodedFrame() const
+{
+    WebRtc_Word64 startTime = _clock->MillisecondTimestamp();
+    while (_clock->MillisecondTimestamp() - startTime < kMaxWaitEncTimeMs*10)
+    {
+        if (_encodeCompleteCallback->EncodeComplete())
+        {
+            return _encodeCompleteCallback->EncodedBytes();
+        }
+    }
+    return 0;
+}
+
+void
+GenericCodecTest::IncrementDebugClock(float frameRate)
+{
+    _clock->IncrementDebugClock(1000/frameRate);
+}
+
+int
+RTPSendCallback_SizeTest::SendPacket(int channel, const void *data, int len)
+{
+    _nPackets++;
+    _payloadSizeSum += len;
+    // Make sure no payloads (len - header size) are larger than maxPayloadSize
+    TEST(len > 0 && static_cast<WebRtc_UWord32>(len - 12) <= _maxPayloadSize);
+    return 0;
+}
+
+void
+RTPSendCallback_SizeTest::SetMaxPayloadSize(WebRtc_UWord32 maxPayloadSize)
+{
+    _maxPayloadSize = maxPayloadSize;
+}
+
+void
+RTPSendCallback_SizeTest::Reset()
+{
+    _nPackets = 0;
+    _payloadSizeSum = 0;
+}
+
+float
+RTPSendCallback_SizeTest::AveragePayloadSize() const
+{
+    if (_nPackets > 0)
+    {
+        return _payloadSizeSum / static_cast<float>(_nPackets);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+VCMEncComplete_KeyReqTest::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8 payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /*fragmentationHeader*/,
+        const webrtc::RTPVideoHeader* /*videoHdr*/)
+{
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true; // end of frame
+    rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+    rtpInfo.type.Video.codec = kRTPVideoVP8;
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo;
+    _seqNo += 2;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = _timeStamp;
+    _timeStamp += 3000;
+    rtpInfo.type.Video.isFirstPacket = false;
+    rtpInfo.frameType = kVideoFrameKey;
+    return _vcm.IncomingPacket(payloadData, payloadSize, rtpInfo);
+}
diff --git a/src/modules/video_coding/main/test/generic_codec_test.h b/src/modules/video_coding/main/test/generic_codec_test.h
new file mode 100644
index 0000000..f60893b
--- /dev/null
+++ b/src/modules/video_coding/main/test/generic_codec_test.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
+
+#include "video_coding.h"
+
+#include <string.h>
+#include <fstream>
+
+#include "test_callbacks.h"
+#include "test_util.h"
+/*
+Test consists of:
+1. Sanity checks
+2. Bit rate validation
+3. Encoder control test / General API functionality
+4. Decoder control test / General API functionality
+
+*/
+
+namespace webrtc {
+
+int VCMGenericCodecTest(CmdArgs& args);
+
+class FakeTickTime;
+
+class GenericCodecTest
+{
+public:
+    GenericCodecTest(webrtc::VideoCodingModule* vcm,
+                     webrtc::FakeTickTime* clock);
+    ~GenericCodecTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32 Perform(CmdArgs& args);
+    float WaitForEncodedFrame() const;
+
+private:
+    void Setup(CmdArgs& args);
+    void Print();
+    WebRtc_Word32 TearDown();
+    void IncrementDebugClock(float frameRate);
+
+    webrtc::FakeTickTime*                _clock;
+    webrtc::VideoCodingModule*           _vcm;
+    webrtc::VideoCodec                   _sendCodec;
+    webrtc::VideoCodec                   _receiveCodec;
+    std::string                          _inname;
+    std::string                          _outname;
+    std::string                          _encodedName;
+    WebRtc_Word32                        _sumEncBytes;
+    FILE*                                _sourceFile;
+    FILE*                                _decodedFile;
+    FILE*                                _encodedFile;
+    WebRtc_UWord16                       _width;
+    WebRtc_UWord16                       _height;
+    float                                _frameRate;
+    WebRtc_UWord32                       _lengthSourceFrame;
+    WebRtc_UWord32                       _timeStamp;
+    VCMDecodeCompleteCallback*           _decodeCallback;
+    VCMEncodeCompleteCallback*           _encodeCompleteCallback;
+
+}; // end of GenericCodecTest class definition
+
+class RTPSendCallback_SizeTest : public webrtc::Transport
+{
+public:
+    // constructor input: (receive side) rtp module to send encoded data to
+    RTPSendCallback_SizeTest() : _maxPayloadSize(0), _payloadSizeSum(0), _nPackets(0) {}
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len) {return 0;}
+    void SetMaxPayloadSize(WebRtc_UWord32 maxPayloadSize);
+    void Reset();
+    float AveragePayloadSize() const;
+private:
+    WebRtc_UWord32         _maxPayloadSize;
+    WebRtc_UWord32         _payloadSizeSum;
+    WebRtc_UWord32         _nPackets;
+};
+
+class VCMEncComplete_KeyReqTest : public webrtc::VCMPacketizationCallback
+{
+public:
+    VCMEncComplete_KeyReqTest(webrtc::VideoCodingModule &vcm) : _vcm(vcm), _seqNo(0), _timeStamp(0) {}
+    WebRtc_Word32 SendData(
+            const webrtc::FrameType frameType,
+            const WebRtc_UWord8 payloadType,
+            WebRtc_UWord32 timeStamp,
+            int64_t capture_time_ms,
+            const WebRtc_UWord8* payloadData,
+            const WebRtc_UWord32 payloadSize,
+            const webrtc::RTPFragmentationHeader& fragmentationHeader,
+            const webrtc::RTPVideoHeader* videoHdr);
+private:
+    webrtc::VideoCodingModule& _vcm;
+    WebRtc_UWord16 _seqNo;
+    WebRtc_UWord32 _timeStamp;
+}; // end of VCMEncodeCompleteCallback
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_GENERIC_CODEC_TEST_H_
diff --git a/src/modules/video_coding/main/test/jitter_buffer_test.cc b/src/modules/video_coding/main/test/jitter_buffer_test.cc
new file mode 100644
index 0000000..2066983
--- /dev/null
+++ b/src/modules/video_coding/main/test/jitter_buffer_test.cc
@@ -0,0 +1,1936 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <stdio.h>
+
+#include "common_types.h"
+#include "../source/event.h"
+#include "frame_buffer.h"
+#include "inter_frame_delay.h"
+#include "jitter_buffer.h"
+#include "jitter_estimate_test.h"
+#include "jitter_estimator.h"
+#include "media_opt_util.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "packet.h"
+#include "test_util.h"
+#include "test_macros.h"
+
+// TODO(holmer): Get rid of this to conform with style guide.
+using namespace webrtc;
+
+// TODO (Mikhal/Stefan): Update as gtest and separate to specific tests.
+
+int CheckOutFrame(VCMEncodedFrame* frameOut, unsigned int size, bool startCode)
+{
+    if (frameOut == 0)
+    {
+        return -1;
+    }
+
+    const WebRtc_UWord8* outData = frameOut->Buffer();
+
+    unsigned int i = 0;
+
+    if(startCode)
+    {
+        if (outData[0] != 0 || outData[1] != 0 || outData[2] != 0 ||
+            outData[3] != 1)
+        {
+            return -2;
+        }
+        i+= 4;
+    }
+
+    // check the frame data
+    int count = 3;
+
+    // check the frame length
+    if (frameOut->Length() != size)
+    {
+        return -3;
+    }
+
+    for(; i < size; i++)
+    {
+        if (outData[i] == 0 && outData[i + 1] == 0 && outData[i + 2] == 0x80)
+        {
+            i += 2;
+        }
+        else if(startCode && outData[i] == 0 && outData[i + 1] == 0)
+        {
+            if (outData[i] != 0 || outData[i + 1] != 0 ||
+                outData[i + 2] != 0 || outData[i + 3] != 1)
+            {
+                return -3;
+            }
+            i += 3;
+        }
+        else
+        {
+            if (outData[i] != count)
+            {
+                return -4;
+            }
+            count++;
+            if(count == 10)
+            {
+                count = 3;
+            }
+        }
+    }
+    return 0;
+}
+
+
+int JitterBufferTest(CmdArgs& args)
+{
+    // Don't run these tests with debug event.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+    TickTimeBase clock;
+
+    // Start test
+    WebRtc_UWord16 seqNum = 1234;
+    WebRtc_UWord32 timeStamp = 0;
+    int size = 1400;
+    WebRtc_UWord8 data[1500];
+    VCMPacket packet(data, size, seqNum, timeStamp, true);
+
+    VCMJitterBuffer jb(&clock);
+
+    seqNum = 1234;
+    timeStamp = 123*90;
+    FrameType incomingFrameType(kVideoFrameKey);
+    VCMEncodedFrame* frameOut=NULL;
+    WebRtc_Word64 renderTimeMs = 0;
+    packet.timestamp = timeStamp;
+    packet.seqNum = seqNum;
+
+    // build a data vector with 0, 0, 0x80, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0x80, 3....
+    data[0] = 0;
+    data[1] = 0;
+    data[2] = 0x80;
+    int count = 3;
+    for (unsigned int i = 3; i < sizeof(data) - 3; ++i)
+    {
+        data[i] = count;
+        count++;
+        if(count == 10)
+        {
+            data[i+1] = 0;
+            data[i+2] = 0;
+            data[i+3] = 0x80;
+            count = 3;
+            i += 3;
+        }
+    }
+
+    // Test out of range inputs
+    TEST(kSizeError == jb.InsertPacket(0, packet));
+    jb.ReleaseFrame(0);
+
+    // Not started
+    TEST(0 == jb.GetFrame(packet));
+    TEST(-1 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+    TEST(0 == jb.GetFrameForDecoding());
+
+    // Start
+    jb.Start();
+
+    // Get frame to use for this timestamp
+    VCMEncodedFrame* frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // No packets inserted
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+
+
+    //
+    // TEST single packet frame
+    //
+    //  --------
+    // |  1234  |
+    //  --------
+
+    // packet.frameType;
+    // packet.dataPtr;
+    // packet.sizeBytes;
+    // packet.timestamp;
+    // packet.seqNum;
+    // packet.isFirstPacket;
+    // packet.markerBit;
+    //
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 1 packet\n");
+
+    //
+    // TEST dual packet frame
+    //
+    //  -----------------
+    // |  1235  |  1236  |
+    //  -----------------
+    //
+
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 2 packets\n");
+
+
+    //
+    // TEST 100 packets frame Key frame
+    //
+    //  ----------------------------------
+    // |  1237  |  1238  |  .... |  1336  |
+    //  ----------------------------------
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum++;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameKey);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    int loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameKey);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE key frame 100 packets\n");
+
+    //
+    // TEST 100 packets frame Delta frame
+    //
+    //  ----------------------------------
+    // |  1337  |  1238  |  .... |  1436  |
+    //  ----------------------------------
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum++;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets\n");
+
+    //
+    // TEST packet re-ordering reverse order
+    //
+    //  ----------------------------------
+    // |  1437  |  1238  |  .... |  1536  |
+    //  ----------------------------------
+    //            <----------
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum += 100;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 packets
+    loop = 0;
+    do
+    {
+        seqNum--;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum--;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets reverse order\n");
+
+    seqNum+= 100;
+
+    //
+    // TEST frame re-ordering 2 frames 2 packets each
+    //
+    //  -----------------     -----------------
+    // |  1539  |  1540  |   |  1537  |  1538  |
+    //  -----------------     -----------------
+
+    seqNum += 2;
+    timeStamp += 2* 33 * 90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // check that we fail to get frame since seqnum is not continuous
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(frameOut == 0);
+
+    seqNum -= 3;
+    timeStamp -= 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    seqNum += 2;
+    //printf("DONE frame re-ordering 2 frames 2 packets\n");
+
+    // restore
+    packet.dataPtr = data;
+    packet.codec = kVideoCodecUnknown;
+
+    //
+    // TEST duplicate packets
+    //
+    //  -----------------
+    // |  1543  |  1543  |
+    //  -----------------
+    //
+
+   seqNum++;
+    timeStamp += 2*33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kDuplicatePacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE test duplicate packets\n");
+
+    //
+    // TEST H.264 insert start code
+    //
+    //  -----------------
+    // |  1544  |  1545  |
+    //  -----------------
+    // insert start code, both packets
+
+    seqNum++;
+    timeStamp += 33 * 90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.insertStartCode = true;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size * 2 + 4 * 2, true) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    // reset
+    packet.insertStartCode = false;
+    //printf("DONE H.264 insert start code test 2 packets\n");
+
+    //
+    // TEST statistics
+    //
+    WebRtc_UWord32 numDeltaFrames = 0;
+    WebRtc_UWord32 numKeyFrames = 0;
+    TEST(jb.GetFrameStatistics(numDeltaFrames, numKeyFrames) == 0);
+
+    TEST(numDeltaFrames == 8);
+    TEST(numKeyFrames == 1);
+
+    WebRtc_UWord32 frameRate;
+    WebRtc_UWord32 bitRate;
+    TEST(jb.GetUpdate(frameRate, bitRate) == 0);
+
+    // these depend on CPU speed works on a T61
+    TEST(frameRate > 30);
+    TEST(bitRate > 10000000);
+
+
+    jb.Flush();
+
+    //
+    // TEST packet loss. Verify missing packets statistics and not decodable
+    // packets statistics.
+    // Insert 10 frames consisting of 4 packets and remove one from all of them.
+    // The last packet is an empty (non-media) packet
+    //
+
+    // Select a start seqNum which triggers a difficult wrap situation
+    // The JB will only output (incomplete)frames if the next one has started
+    // to arrive. Start by inserting one frame (key).
+    seqNum = 0xffff - 4;
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.completeNALU = kNaluStart;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    for (int i = 0; i < 11; ++i) {
+      webrtc::FrameType frametype = kVideoFrameDelta;
+      seqNum++;
+      timeStamp += 33*90;
+      packet.frameType = frametype;
+      packet.isFirstPacket = true;
+      packet.markerBit = false;
+      packet.seqNum = seqNum;
+      packet.timestamp = timeStamp;
+      packet.completeNALU = kNaluStart;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+      // Get packet notification
+      TEST(timeStamp - 33 * 90 == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                      renderTimeMs));
+
+      // Check incoming frame type
+      if (i == 0)
+      {
+          TEST(incomingFrameType == kVideoFrameKey);
+      }
+      else
+      {
+          TEST(incomingFrameType == frametype);
+      }
+
+      // Get the frame
+      frameOut = jb.GetCompleteFrameForDecoding(10);
+
+      // Should not be complete
+      TEST(frameOut == 0);
+
+      seqNum += 2;
+      packet.isFirstPacket = false;
+      packet.markerBit = true;
+      packet.seqNum = seqNum;
+      packet.completeNALU = kNaluEnd;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+
+      // Insert an empty (non-media) packet
+      seqNum++;
+      packet.isFirstPacket = false;
+      packet.markerBit = false;
+      packet.seqNum = seqNum;
+      packet.completeNALU = kNaluEnd;
+      packet.frameType = kFrameEmpty;
+
+      frameIn = jb.GetFrame(packet);
+      TEST(frameIn != 0);
+
+      // Insert a packet into a frame
+      TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+      // Get the frame
+      frameOut = jb.GetFrameForDecoding();
+
+      // One of the packets has been discarded by the jitter buffer.
+      // Last frame can't be extracted yet.
+      if (i < 10)
+      {
+          TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+          // check the frame type
+          if (i == 0)
+          {
+              TEST(frameOut->FrameType() == kVideoFrameKey);
+          }
+         else
+         {
+             TEST(frameOut->FrameType() == frametype);
+         }
+          TEST(frameOut->Complete() == false);
+          TEST(frameOut->MissingFrame() == false);
+      }
+
+      // Release frame (when done with decoding)
+      jb.ReleaseFrame(frameOut);
+    }
+
+    TEST(jb.NumNotDecodablePackets() == 10);
+
+    // Insert 3 old packets and verify that we have 3 discarded packets
+    // Match value to actual latest timestamp decoded
+    timeStamp -= 33 * 90;
+    packet.timestamp = timeStamp - 1000;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    packet.timestamp = timeStamp - 500;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    packet.timestamp = timeStamp - 100;
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn == NULL);
+
+    TEST(jb.DiscardedPackets() == 3);
+
+    jb.Flush();
+
+    // This statistic shouldn't be reset by a flush.
+    TEST(jb.DiscardedPackets() == 3);
+
+    //printf("DONE Statistics\n");
+
+
+    // Temporarily do this to make the rest of the test work:
+    timeStamp += 33*90;
+    seqNum += 4;
+
+
+    //
+    // TEST delta frame 100 packets with seqNum wrap
+    //
+    //  ---------------------------------------
+    // |  65520  |  65521  | ... |  82  |  83  |
+    //  ---------------------------------------
+    //
+
+    jb.Flush();
+
+    // insert first packet
+    timeStamp += 33*90;
+    seqNum = 0xfff0;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 packets
+    loop = 0;
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+        // get packet notification
+        TEST(timeStamp == jb.GetNextTimeStamp(2, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        // get the frame
+        frameOut = jb.GetCompleteFrameForDecoding(2);
+
+        // it should not be complete
+        TEST(frameOut == 0);
+
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets with wrap in seqNum\n");
+
+    //
+    // TEST packet re-ordering reverse order with neg seqNum warp
+    //
+    //  ----------------------------------------
+    // |  65447  |  65448  | ... |   9   |  10  |
+    //  ----------------------------------------
+    //              <-------------
+
+    // test flush
+    jb.Flush();
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum = 10;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert 98 frames
+    loop = 0;
+    do
+    {
+        seqNum--;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert a packet into a frame
+        TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+        // get packet notification
+        TEST(timeStamp == jb.GetNextTimeStamp(2, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        // get the frame
+        frameOut = jb.GetCompleteFrameForDecoding(2);
+
+        // it should not be complete
+        TEST(frameOut == 0);
+
+        loop++;
+    } while (loop < 98);
+
+    // insert last packet
+    seqNum--;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*100, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 100 packets reverse order with wrap in seqNum \n");
+
+    // test flush
+    jb.Flush();
+
+    //
+    // TEST packet re-ordering with seqNum wrap
+    //
+    //  -----------------------
+    // |   1   | 65535 |   0   |
+    //  -----------------------
+
+    // insert "first" packet last seqnum
+    timeStamp += 33*90;
+    seqNum = 1;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    // insert last packet
+    seqNum -= 2;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*3, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE delta frame 3 packets re-ordering with wrap in seqNum \n");
+
+    // test flush
+    jb.Flush();
+
+    //
+    // TEST insert old frame
+    //
+    //   -------      -------
+    //  |   2   |    |   1   |
+    //   -------      -------
+    //  t = 3000     t = 2000
+
+    seqNum = 2;
+    timeStamp = 3000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(3000 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(3000 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum--;
+    timeStamp = 2000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    // Changed behavior, never insert packets into frames older than the
+    // last decoded frame.
+    TEST(frameIn == 0);
+
+    //printf("DONE insert old frame\n");
+
+    jb.Flush();
+
+   //
+    // TEST insert old frame with wrap in timestamp
+    //
+    //   -------      -------
+    //  |   2   |    |   1   |
+    //   -------      -------
+    //  t = 3000     t = 0xffffff00
+
+    seqNum = 2;
+    timeStamp = 3000;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(timeStamp == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum--;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    // This timestamp is old
+    TEST(frameIn == 0);
+
+    jb.Flush();
+
+    //
+    // TEST wrap in timeStamp
+    //
+    //  ---------------     ---------------
+    // |   1   |   2   |   |   3   |   4   |
+    //  ---------------     ---------------
+    //  t = 0xffffff00        t = 33*90
+
+    seqNum = 1;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    jb.ReleaseFrame(frameOut);
+
+    seqNum++;
+    timeStamp += 33*90;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = false;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameDelta);
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    // it should not be complete
+    TEST(frameOut == 0);
+
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, packet));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+
+    TEST(CheckOutFrame(frameOut, size*2, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    //printf("DONE time stamp wrap 2 frames 2 packets\n");
+
+    jb.Flush();
+
+    //
+    // TEST insert 2 frames with wrap in timeStamp
+    //
+    //   -------          -------
+    //  |   1   |        |   2   |
+    //   -------          -------
+    // t = 0xffffff00    t = 2700
+
+    seqNum = 1;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert first frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Insert next frame
+    seqNum++;
+    timeStamp = 2700;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(0xffffff00 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // Get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(0, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    VCMEncodedFrame* frameOut2 = jb.GetCompleteFrameForDecoding(10);
+    TEST(2700 == frameOut2->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut2, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut2->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+    jb.ReleaseFrame(frameOut2);
+
+    //printf("DONE insert 2 frames (1 packet) with wrap in timestamp\n");
+
+    jb.Flush();
+
+    //
+    // TEST insert 2 frames re-ordered with wrap in timeStamp
+    //
+    //   -------          -------
+    //  |   2   |        |   1   |
+    //   -------          -------
+    //  t = 2700        t = 0xffffff00
+
+    seqNum = 2;
+    timeStamp = 2700;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert first frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Insert second frame
+    seqNum--;
+    timeStamp = 0xffffff00;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(0xffffff00 == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(0xffffff00 == frameOut->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameDelta);
+
+    // get packet notification
+    TEST(2700 == jb.GetNextTimeStamp(0, incomingFrameType, renderTimeMs));
+    TEST(kVideoFrameDelta == incomingFrameType);
+
+    // Get frame
+    frameOut2 = jb.GetCompleteFrameForDecoding(10);
+    TEST(2700 == frameOut2->TimeStamp());
+
+    TEST(CheckOutFrame(frameOut2, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut2->FrameType() == kVideoFrameDelta);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+    jb.ReleaseFrame(frameOut2);
+
+    //printf("DONE insert 2 frames (1 packet) re-ordered with wrap in timestamp\n");
+
+    //
+    // TEST delta frame with more than max number of packets
+    //
+
+    jb.Start();
+
+    loop = 0;
+    packet.timestamp += 33*90;
+    bool firstPacket = true;
+    // insert kMaxPacketsInJitterBuffer into frame
+    do
+    {
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        // Insert frame
+        if (firstPacket)
+        {
+            TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+            firstPacket = false;
+        }
+        else
+        {
+            TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+        }
+
+        // get packet notification
+        TEST(packet.timestamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        loop++;
+    } while (loop < kMaxPacketsInSession);
+
+    // Max number of packets inserted
+
+    // Insert one more packet
+    seqNum++;
+    packet.isFirstPacket = false;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+
+    frameIn = jb.GetFrame(packet);
+    TEST(frameIn != 0);
+
+    // Insert the packet -> frame recycled
+    TEST(kSizeError == jb.InsertPacket(frameIn, packet));
+
+    TEST(0 == jb.GetCompleteFrameForDecoding(10));
+
+    //printf("DONE fill frame - packets > max number of packets\n");
+
+    //
+    // TEST fill JB with more than max number of frame (50 delta frames +
+    // 51 key frames) with wrap in seqNum
+    //
+    //  --------------------------------------------------------------
+    // | 65485 | 65486 | 65487 | .... | 65535 | 0 | 1 | 2 | .....| 50 |
+    //  --------------------------------------------------------------
+    // |<-----------delta frames------------->|<------key frames----->|
+
+    jb.Flush();
+
+    loop = 0;
+    seqNum = 65485;
+    WebRtc_UWord32 timeStampStart = timeStamp +  33*90;
+    WebRtc_UWord32 timeStampFirstKey = 0;
+    VCMEncodedFrame* ptrLastDeltaFrame = NULL;
+    VCMEncodedFrame* ptrFirstKeyFrame = NULL;
+    // insert MAX_NUMBER_OF_FRAMES frames
+    do
+    {
+        timeStamp += 33*90;
+        seqNum++;
+        packet.isFirstPacket = true;
+        packet.markerBit = true;
+        packet.seqNum = seqNum;
+        packet.timestamp = timeStamp;
+
+        frameIn = jb.GetFrame(packet);
+        TEST(frameIn != 0);
+
+        if (loop == 49)  // last delta
+        {
+            ptrLastDeltaFrame = frameIn;
+        }
+        if (loop == 50)  // first key
+        {
+            ptrFirstKeyFrame = frameIn;
+            packet.frameType = kVideoFrameKey;
+            timeStampFirstKey = packet.timestamp;
+        }
+
+        // Insert frame
+        TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+        // Get packet notification, should be first inserted frame
+        TEST(timeStampStart == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                   renderTimeMs));
+
+        // check incoming frame type
+        TEST(incomingFrameType == kVideoFrameDelta);
+
+        loop++;
+    } while (loop < kMaxNumberOfFrames);
+
+    // Max number of frames inserted
+
+    // Insert one more frame
+    timeStamp += 33*90;
+    seqNum++;
+    packet.isFirstPacket = true;
+    packet.markerBit = true;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+
+    // Now, no free frame - frames will be recycled until first key frame
+    frameIn = jb.GetFrame(packet);
+    // ptr to last inserted delta frame should be returned
+    TEST(frameIn != 0 && frameIn && ptrLastDeltaFrame);
+
+    // Insert frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // First inserted key frame should be oldest in buffer
+    TEST(timeStampFirstKey == jb.GetNextTimeStamp(10, incomingFrameType,
+                                                  renderTimeMs));
+
+    // check incoming frame type
+    TEST(incomingFrameType == kVideoFrameKey);
+
+    // get the first key frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    TEST(ptrFirstKeyFrame == frameOut);
+
+    TEST(CheckOutFrame(frameOut, size, false) == 0);
+
+    // check the frame type
+    TEST(frameOut->FrameType() == kVideoFrameKey);
+
+    // Release frame (when done with decoding)
+    jb.ReleaseFrame(frameOut);
+
+    jb.Flush();
+
+    // printf("DONE fill JB - nr of delta + key frames (w/ wrap in seqNum) >
+    // max nr of frames\n");
+
+    // Testing that 1 empty packet inserted last will not be set for decoding
+    seqNum = 3;
+    // Insert one empty packet per frame, should never return the last timestamp
+    // inserted. Only return empty frames in the presence of subsequent frames.
+    int maxSize = 1000;
+    for (int i = 0; i < maxSize + 10; i++)
+    {
+        timeStamp += 33 * 90;
+        seqNum++;
+        packet.isFirstPacket = false;
+        packet.markerBit = false;
+        packet.seqNum = seqNum;
+        packet.timestamp = timeStamp;
+        packet.frameType = kFrameEmpty;
+        VCMEncodedFrame* testFrame = jb.GetFrameForDecoding();
+        // timestamp should bever be the last TS inserted
+        if (testFrame != NULL)
+        {
+            TEST(testFrame->TimeStamp() < timeStamp);
+            printf("Not null TS = %d\n",testFrame->TimeStamp());
+        }
+    }
+
+    jb.Flush();
+
+
+    // printf(DONE testing inserting empty packets to the JB)
+
+
+    // H.264 tests
+    // Test incomplete NALU frames
+
+    jb.Flush();
+    jb.SetNackMode(kNoNack, -1, -1);
+    seqNum ++;
+    timeStamp += 33 * 90;
+    int insertedLength = 0;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+
+    frameIn = jb.GetFrame(packet);
+
+     // Insert a packet into a frame
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum += 2; // Skip one packet
+    packet.seqNum = seqNum;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluIncomplete;
+    packet.markerBit = false;
+
+     // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = false;
+
+    // Insert a packet into a frame
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+    seqNum++;
+    packet.seqNum = seqNum;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = true; // Last packet
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+
+
+    // The JB will only output (incomplete) frames if a packet belonging to a
+    // subsequent frame was already inserted. Insert one packet of a subsequent
+    // frame. place high timestamp so the JB would always have a next frame
+    // (otherwise, for every inserted frame we need to take care of the next
+    // frame as well).
+    packet.seqNum = 1;
+    packet.timestamp = timeStamp + 33 * 90 * 10;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    frameIn = jb.GetFrame(packet);
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    // Get packet notification
+    TEST(timeStamp == jb.GetNextTimeStamp(10, incomingFrameType, renderTimeMs));
+    frameOut = jb.GetFrameForDecoding();
+
+    // We can decode everything from a NALU until a packet has been lost.
+    // Thus we can decode the first packet of the first NALU and the second NALU
+    // which consists of one packet.
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes * 2, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+    // Test reordered start frame + 1 lost
+    seqNum += 2; // Reoreder 1 frame
+    timeStamp += 33*90;
+    insertedLength = 0;
+
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = false;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum--;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum += 3; // One packet drop
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    insertedLength += packet.sizeBytes; // This packet should be decoded
+
+    seqNum += 1;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluStart;
+    packet.markerBit = false;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    // This packet should be decoded since it's the beginning of a NAL
+    insertedLength += packet.sizeBytes;
+
+    seqNum += 2;
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = false;
+    packet.completeNALU = kNaluEnd;
+    packet.markerBit = true;
+    TEST(kIncomplete == jb.InsertPacket(frameIn, packet));
+    // This packet should not be decoded because it is an incomplete NAL if it
+    // is the last
+
+    frameOut = jb.GetFrameForDecoding();
+    // Only last NALU is complete
+    TEST(CheckOutFrame(frameOut, insertedLength, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+
+    // Test to insert empty packet
+    seqNum += 1;
+    timeStamp += 33 * 90;
+    VCMPacket emptypacket(data, 0, seqNum, timeStamp, true);
+    emptypacket.seqNum = seqNum;
+    emptypacket.timestamp = timeStamp;
+    emptypacket.frameType = kVideoFrameKey;
+    emptypacket.isFirstPacket = true;
+    emptypacket.completeNALU = kNaluComplete;
+    emptypacket.markerBit = true;
+    TEST(frameIn = jb.GetFrame(emptypacket));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, emptypacket));
+    // This packet should not be decoded because it is an incomplete NAL if it
+    // is the last
+    insertedLength += 0;
+
+    // Will be sent to the decoder, as a packet belonging to a subsequent frame
+    // has arrived.
+    frameOut = jb.GetFrameForDecoding();
+
+
+    // Test that a frame can include an empty packet.
+    seqNum += 1;
+    timeStamp += 33 * 90;
+
+    packet.seqNum = seqNum;
+    packet.timestamp = timeStamp;
+    packet.frameType = kVideoFrameKey;
+    packet.isFirstPacket = true;
+    packet.completeNALU = kNaluComplete;
+    packet.markerBit = false;
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    seqNum += 1;
+    emptypacket.seqNum = seqNum;
+    emptypacket.timestamp = timeStamp;
+    emptypacket.frameType = kVideoFrameKey;
+    emptypacket.isFirstPacket = true;
+    emptypacket.completeNALU = kNaluComplete;
+    emptypacket.markerBit = true;
+    TEST(kCompleteSession == jb.InsertPacket(frameIn, emptypacket));
+
+    // get the frame
+    frameOut = jb.GetCompleteFrameForDecoding(10);
+    // Only last NALU is complete
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
+
+    jb.ReleaseFrame(frameOut);
+
+    jb.Flush();
+
+    // Test that a we cannot get incomplete frames from the JB if we haven't
+    // received the marker bit, unless we have received a packet from a later
+    // timestamp.
+
+    packet.seqNum += 2;
+    packet.frameType = kVideoFrameDelta;
+    packet.isFirstPacket = false;
+    packet.markerBit = false;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetFrameForDecoding();
+    TEST(frameOut == NULL);
+
+    packet.seqNum += 2;
+    packet.timestamp += 33 * 90;
+
+    TEST(frameIn = jb.GetFrame(packet));
+    TEST(kFirstPacket == jb.InsertPacket(frameIn, packet));
+
+    frameOut = jb.GetFrameForDecoding();
+
+    TEST(frameOut != NULL);
+    TEST(CheckOutFrame(frameOut, packet.sizeBytes, false) == 0);
+    jb.ReleaseFrame(frameOut);
+
+    jb.Stop();
+
+    printf("DONE !!!\n");
+
+    printf("\nVCM Jitter Buffer Test: \n\n%i tests completed\n",
+           vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    return 0;
+
+}
diff --git a/src/modules/video_coding/main/test/jitter_estimate_test.cc b/src/modules/video_coding/main/test/jitter_estimate_test.cc
new file mode 100644
index 0000000..ac74a8a
--- /dev/null
+++ b/src/modules/video_coding/main/test/jitter_estimate_test.cc
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <ctime>
+#include "JitterEstimateTest.h"
+
+using namespace webrtc;
+
+JitterEstimateTest::JitterEstimateTest(unsigned int frameRate) :
+_frameRate(frameRate),
+_capacity(2000),
+_rate(500),
+_jitter(5, 0),
+_keyFrameRate(1.0),
+_deltaFrameSize(10000, 1e6),
+_counter(0),
+_lossrate(0.0)
+{
+    // Assign to random value between 0 and max of unsigned int
+    _seed = static_cast<unsigned>(std::time(0));
+    std::srand(_seed);
+    _prevTimestamp = static_cast<unsigned int>((std::rand() + 1.0)/(RAND_MAX + 1.0)*(pow((float) 2, (long) sizeof(unsigned int)*8)-1));
+    _prevWallClock = VCMTickTime::MillisecondTimestamp();
+}
+
+FrameSample
+JitterEstimateTest::GenerateFrameSample()
+{
+    double increment = 1.0/_frameRate;
+    unsigned int frameSize = static_cast<unsigned int>(_deltaFrameSize.RandValue());
+    bool keyFrame = false;
+    bool resent = false;
+    _prevTimestamp += static_cast<unsigned int>(90000*increment + 0.5);
+    double deltaFrameRate = _frameRate - _keyFrameRate;
+    double ratio = deltaFrameRate/static_cast<double>(_keyFrameRate);
+    if (ratio < 1.0)
+    {
+        ratio = 1.0/ratio;
+        if (_counter >= ratio)
+            _counter = 0;
+        else
+        {
+            _counter++;
+            frameSize += static_cast<unsigned int>(3*_deltaFrameSize.GetAverage());
+            keyFrame = true;
+        }
+    }
+    else
+    {
+        if (_counter >= ratio)
+        {
+            frameSize += static_cast<unsigned int>(3*_deltaFrameSize.GetAverage());
+            _counter = 0;
+            keyFrame = true;
+        }
+        else
+            _counter++;
+    }
+    WebRtc_Word64 jitter =  static_cast<WebRtc_Word64>(_jitter.RandValue() + 1.0/_capacity * frameSize + 0.5);
+    _prevWallClock += static_cast<WebRtc_Word64>(1000*increment + 0.5);
+    double rndValue = RandUniform();
+    resent = (rndValue < _lossrate);
+    //printf("rndValue = %f\n", rndValue);
+    return FrameSample(_prevTimestamp, _prevWallClock + jitter, frameSize, keyFrame, resent);
+}
+
+void
+JitterEstimateTest::SetCapacity(unsigned int c)
+{
+    _capacity = c;
+}
+
+void
+JitterEstimateTest::SetRate(unsigned int r)
+{
+    _rate = r;
+}
+
+void
+JitterEstimateTest::SetJitter(double m, double v)
+{
+    _jitter.SetParams(m, v);
+}
+
+void
+JitterEstimateTest::SetFrameSizeStats(double m, double v)
+{
+    _deltaFrameSize.SetParams(m, v);
+}
+
+void
+JitterEstimateTest::SetKeyFrameRate(int rate)
+{
+    _keyFrameRate = rate;
+}
+
+void
+JitterEstimateTest::SetLossRate(double rate)
+{
+    _lossrate = rate;
+}
diff --git a/src/modules/video_coding/main/test/jitter_estimate_test.h b/src/modules/video_coding/main/test/jitter_estimate_test.h
new file mode 100644
index 0000000..cd7338a
--- /dev/null
+++ b/src/modules/video_coding/main/test/jitter_estimate_test.h
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
+
+#include "typedefs.h"
+#include "jitter_buffer.h"
+#include "jitter_estimator.h"
+#include <cstdlib>
+#include <cmath>
+
+double const pi = 4*std::atan(1.0);
+
+class GaussDist
+{
+public:
+    GaussDist(double m, double v): _mu(m), _sigma(sqrt(v)) {}
+
+    double RandValue() // returns a single normally distributed number
+    {
+        double r1 = (std::rand() + 1.0)/(RAND_MAX + 1.0); // gives equal distribution in (0, 1]
+        double r2 = (std::rand() + 1.0)/(RAND_MAX + 1.0);
+        return _mu + _sigma * std::sqrt(-2*std::log(r1))*std::cos(2*pi*r2);
+    }
+
+    double GetAverage()
+    {
+        return _mu;
+    }
+
+    double GetVariance()
+    {
+        return _sigma*_sigma;
+    }
+
+    void SetParams(double m, double v)
+    {
+        _mu = m;
+        _sigma = sqrt(v);
+    }
+
+private:
+    double _mu, _sigma;
+};
+
+class JitterEstimateTestWrapper : public webrtc::VCMJitterEstimator
+{
+public:
+    JitterEstimateTestWrapper() : VCMJitterEstimator() {}
+    double GetTheta() { return _theta[0]; }
+    double GetVarNoise() { return _varNoise; }
+};
+
+class FrameSample
+{
+public:
+    FrameSample() {FrameSample(0, 0, 0, false, false);}
+    FrameSample(unsigned int ts, WebRtc_Word64 wallClk, unsigned int fs, bool _keyFrame, bool _resent):
+      timestamp90Khz(ts), wallClockMs(wallClk), frameSize(fs), keyFrame(_keyFrame), resent(_resent) {}
+
+    unsigned int timestamp90Khz;
+    WebRtc_Word64 wallClockMs;
+    unsigned int frameSize;
+    bool keyFrame;
+    bool resent;
+};
+
+class JitterEstimateTest
+{
+public:
+    JitterEstimateTest(unsigned int frameRate);
+    FrameSample GenerateFrameSample();
+    void SetCapacity(unsigned int c);
+    void SetRate(unsigned int r);
+    void SetJitter(double m, double v);
+    void SetFrameSizeStats(double m, double v);
+    void SetKeyFrameRate(int rate);
+    void SetLossRate(double rate);
+
+private:
+    double RandUniform() { return (std::rand() + 1.0)/(RAND_MAX + 1.0); }
+    unsigned int _frameRate;
+    unsigned int _capacity;
+    unsigned int _rate;
+    GaussDist _jitter;
+    //GaussDist _noResend;
+    GaussDist _deltaFrameSize;
+    unsigned int _prevTimestamp;
+    WebRtc_Word64 _prevWallClock;
+    unsigned int _nextDelay;
+    double _keyFrameRate;
+    unsigned int _counter;
+    unsigned int _seed;
+    double _lossrate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_JITTER_ESTIMATE_TEST_H_
diff --git a/src/modules/video_coding/main/test/media_opt_test.cc b/src/modules/video_coding/main/test/media_opt_test.cc
new file mode 100644
index 0000000..692305b
--- /dev/null
+++ b/src/modules/video_coding/main/test/media_opt_test.cc
@@ -0,0 +1,542 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Implementation of Media Optimization Test
+// testing is done via the VCM module, no specific Media opt functionality.
+
+#include "media_opt_test.h"
+
+#include <string.h>
+#include <stdio.h>
+#include <time.h>
+#include <vector>
+
+#include "../source/event.h"
+#include "test_macros.h"
+#include "test_util.h" // send side callback
+#include "testsupport/metrics/video_metrics.h"
+#include "video_coding.h"
+
+
+using namespace webrtc;
+
+int MediaOptTest::RunTest(int testNum, CmdArgs& args)
+{
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "mediaOptTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    MediaOptTest* mot = new MediaOptTest(vcm, &clock);
+    if (testNum == 0)
+    { // regular
+         mot->Setup(0, args);
+         mot->GeneralSetup();
+         mot->Perform();
+         mot->Print(1);// print to screen
+         mot->TearDown();
+    }
+    if (testNum == 1)
+    {   // release test
+        mot->Setup(0, args);
+        mot->RTTest();
+    }
+    if (testNum == 2)
+    { // release test, running from script
+         mot->Setup(1, args);
+         mot->GeneralSetup();
+         mot->Perform();
+         mot->Print(1);// print to screen
+         mot->TearDown();
+    }
+
+    VideoCodingModule::Destroy(vcm);
+    delete mot;
+    Trace::ReturnTrace();
+    return 0;
+
+}
+
+
+MediaOptTest::MediaOptTest(VideoCodingModule* vcm, TickTimeBase* clock)
+    : _vcm(vcm),
+      _rtp(NULL),
+      _outgoingTransport(NULL),
+      _dataCallback(NULL),
+      _clock(clock),
+      _width(0),
+      _height(0),
+      _lengthSourceFrame(0),
+      _timeStamp(0),
+      _frameRate(30.0f),
+      _nackEnabled(false),
+      _fecEnabled(false),
+      _rttMS(0),
+      _bitRate(300.0f),
+      _lossRate(0.0f),
+      _renderDelayMs(0),
+      _frameCnt(0),
+      _sumEncBytes(0),
+      _numFramesDropped(0),
+      _numberOfCores(4) {
+}
+
+MediaOptTest::~MediaOptTest() {
+  delete _rtp;
+}
+
+void MediaOptTest::Setup(int testType, CmdArgs& args) {
+    /*TEST USER SETTINGS*/
+    // test parameters
+    _inname = args.inputFile;
+    if (args.outputFile == "")
+        _outname = test::OutputPath() + "MOTest_out.vp8";
+    else
+        _outname = args.outputFile;
+    // actual source after frame dropping
+    _actualSourcename = test::OutputPath() + "MOTestSource.yuv";
+    _codecName = args.codecName;
+    _sendCodecType = args.codecType;
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _bitRate = args.bitRate;
+    _numberOfCores = 4;
+
+    // error resilience
+    _nackEnabled = false;
+    _fecEnabled = true;
+    _nackFecEnabled = false;
+
+    _rttMS = 100;
+    _lossRate = 0.00*255; // no packet loss
+
+    _testType = testType;
+
+    //For multiple runs with script
+    if (_testType == 1)
+    {
+        float rateTest,lossTest;
+        int numRuns;
+        _fpinp = fopen("dat_inp","rb");
+        _fpout = fopen("test_runs/dat_out","ab");
+        _fpout2 = fopen("test_runs/dat_out2","ab");
+        TEST(fscanf(_fpinp,"%f %f %d \n",&rateTest,&lossTest,&numRuns) > 0);
+        _bitRate = rateTest;
+        _lossRate = lossTest;
+        _testNum = 0;
+
+        // for bit rates: 500, 1000, 2000, 3000,4000
+        // for loss rates: 0, 1, 3, 5, 10%
+        _numParRuns = 25;
+
+        _testNum = numRuns + 1;
+        if (rateTest == 0.0) _lossRate = 0.0;
+        else
+        {
+            if (rateTest == 4000)  //final bit rate
+            {
+                if (lossTest == 0.1*255) _lossRate = 0.0;  //start at 1%
+                else
+                    if (lossTest == 0.05*255) _lossRate = 0.1*255;  //final loss rate
+                    else
+                        if (lossTest == 0.0) _lossRate = 0.01*255;
+                        else _lossRate = lossTest + 0.02*255;
+            }
+        }
+
+        if (rateTest == 0.0 || rateTest == 4000) _bitRate = 500; //starting bit rate
+        else
+            if (rateTest == 500) _bitRate = 1000;
+                else _bitRate = rateTest +  1000;
+    }
+   //
+
+    _renderDelayMs = 0;
+    /* test settings end*/
+
+   _lengthSourceFrame  = 3*_width*_height/2;
+    _log.open((test::OutputPath() + "VCM_MediaOptLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+}
+
+void
+MediaOptTest::GeneralSetup()
+{
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+
+    if ((_decodedFile = fopen(_outname.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    if ((_actualSourceFile = fopen(_actualSourcename.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _actualSourcename.c_str());
+        exit(1);
+    }
+    if (_vcm->InitializeReceiver() < 0)
+    {
+        exit(1);
+    }
+    if (_vcm->InitializeSender())
+    {
+        exit(1);
+    }
+    _outgoingTransport = new RTPSendCompleteCallback(_clock);
+    _dataCallback = new RtpDataCallback(_vcm);
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = 1;
+    configuration.audio = false;
+    configuration.incoming_data = _dataCallback;
+    configuration.outgoing_transport = _outgoingTransport;
+    _rtp = RtpRtcp::CreateRtpRtcp(configuration);
+
+    _outgoingTransport->SetRtpModule(_rtp);
+
+    // Registering codecs for the RTP module
+
+    // Register receive and send payload
+    VideoCodec videoCodec;
+    strncpy(videoCodec.plName, "VP8", 32);
+    videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    strncpy(videoCodec.plName, "ULPFEC", 32);
+    videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    strncpy(videoCodec.plName, "RED", 32);
+    videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
+    _rtp->RegisterReceivePayload(videoCodec);
+    _rtp->RegisterSendPayload(videoCodec);
+
+    if (_nackFecEnabled == 1)
+        _rtp->SetGenericFECStatus(_nackFecEnabled, VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+    else
+        _rtp->SetGenericFECStatus(_fecEnabled, VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+
+    // VCM: Registering codecs
+    VideoCodec sendCodec;
+    _vcm->InitializeSender();
+    _vcm->InitializeReceiver();
+    WebRtc_Word32 numberOfCodecs = _vcm->NumberOfCodecs();
+    if (numberOfCodecs < 1)
+    {
+        exit(1);
+    }
+
+    if (_vcm->Codec(_sendCodecType, &sendCodec) != 0)
+    {
+        printf("Unknown codec\n");
+        exit(1);
+    }
+    // register codec
+    sendCodec.startBitrate = (int) _bitRate;
+    sendCodec.height = _height;
+    sendCodec.width = _width;
+    sendCodec.maxFramerate = (WebRtc_UWord8)_frameRate;
+    _vcm->RegisterSendCodec(&sendCodec, _numberOfCores, 1440);
+    _vcm->RegisterReceiveCodec(&sendCodec, _numberOfCores); // same settings for encode and decode
+
+    _vcm->SetRenderDelay(_renderDelayMs);
+    _vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+}
+// The following test shall be conducted under release tests
+
+
+
+WebRtc_Word32
+MediaOptTest::Perform()
+{
+    VCMDecodeCompleteCallback receiveCallback(_decodedFile);
+
+    VCMRTPEncodeCompleteCallback* encodeCompleteCallback = new VCMRTPEncodeCompleteCallback(_rtp);
+    _vcm->RegisterTransportCallback(encodeCompleteCallback);
+    encodeCompleteCallback->SetCodecType(ConvertCodecType(_codecName.c_str()));
+    encodeCompleteCallback->SetFrameDimensions(_width, _height);
+
+    // callback settings
+    VideoProtectionCallback  protectionCallback;
+    protectionCallback.RegisterRtpModule(_rtp);
+    _vcm->RegisterProtectionCallback(&protectionCallback);
+
+    // set error resilience / test parameters:
+    _outgoingTransport->SetLossPct(_lossRate);
+    if (_nackFecEnabled == 1) {
+        _vcm->SetVideoProtection(kProtectionNackFEC, _nackFecEnabled);
+    } else {
+        _vcm->SetVideoProtection(kProtectionNack, _nackEnabled);
+        _vcm->SetVideoProtection(kProtectionFEC, _fecEnabled);
+    }
+
+    // START TEST
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, (WebRtc_UWord8)_lossRate, _rttMS);
+    _vcm->RegisterReceiveCallback(&receiveCallback);
+
+    _frameCnt  = 0;
+    _sumEncBytes = 0.0;
+    _numFramesDropped = 0;
+
+    while (feof(_sourceFile)== 0)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        _frameCnt++;
+
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_frameRate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        TEST(_vcm->AddVideoFrame(sourceFrame) == VCM_OK);
+        // inform RTP Module of error resilience features
+        //_rtp->SetFECCodeRate(protectionCallback.FECKeyRate(),protectionCallback.FECDeltaRate());
+        //_rtp->SetNACKStatus(protectionCallback.NACKMethod());
+
+        WebRtc_Word32 ret = _vcm->Decode();
+        if (ret < 0 )
+        {
+            TEST(ret == 0);
+            printf ("Decode error in frame # %d",_frameCnt);
+        }
+
+        float encBytes = encodeCompleteCallback->EncodedBytes();
+        if (encBytes == 0)
+        {
+            _numFramesDropped += 1;
+            //printf("frame #%d dropped \n", _frameCnt );
+        }
+        else
+        {
+          // write frame to file
+          if (fwrite(sourceFrame.Buffer(), 1, sourceFrame.Length(),
+                     _actualSourceFile) !=  sourceFrame.Length()) {
+            return -1;
+          }
+        }
+
+        _sumEncBytes += encBytes;
+    }
+
+    //END TEST
+    delete encodeCompleteCallback;
+    delete tmpBuffer;
+
+return 0;
+
+}
+
+void
+MediaOptTest::RTTest()
+{
+    // will only calculate PSNR - not create output files for all
+    // SET UP
+    // Set bit rates
+    const float bitRateVec[] = {500, 1000, 2000,3000, 4000};
+    //const float bitRateVec[] = {1000};
+    // Set Packet loss values ([0,255])
+    const double lossPctVec[]     = {0.0*255, 0.0*255, 0.01*255, 0.01*255, 0.03*255, 0.03*255, 0.05*255, 0.05*255, 0.1*255, 0.1*255};
+    const bool  nackEnabledVec[] = {false  , false, false, false, false, false, false, false , false, false};
+    const bool  fecEnabledVec[]  = {false  , true,  false, true , false, true , false, true , false, true};
+    // fec and nack are set according to the packet loss values
+
+    const float nBitrates = sizeof(bitRateVec)/sizeof(*bitRateVec);
+    const float nlossPct = sizeof(lossPctVec)/sizeof(*lossPctVec);
+
+    std::vector<const VideoSource*> sources;
+    std::vector<const VideoSource*>::iterator it;
+
+    sources.push_back(new const VideoSource(_inname, _width, _height));
+    int numOfSrc = 1;
+
+    // constant settings (valid for entire run time)
+    _rttMS = 20;
+    _renderDelayMs = 0;
+
+    // same out name for all
+    _outname = test::OutputPath() + "RTMOTest_out.yuv";
+    // actual source after frame dropping
+    _actualSourcename = test::OutputPath() + "RTMOTestSource.yuv";
+
+    _codecName = "VP8";  // for now just this one - later iterate over all codec types
+    _log.open((test::OutputPath() + "/VCM_RTMediaOptLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    _outputRes=fopen((test::OutputPath() + "VCM_MediaOptResults.txt").c_str(),
+                     "ab");
+
+    //char filename[128];
+    /* test settings end*/
+
+    // START TEST
+    // iterate over test sequences
+    printf("\n****START TEST OVER ALL RUNS ****\n");
+    int runCnt = 0;
+    for (it = sources.begin() ; it < sources.end(); it++)
+    {
+
+        // test set up
+        _inname = (*it)->GetFileName();
+        _width  = (*it)->GetWidth();
+        _height = (*it)->GetHeight();
+        _lengthSourceFrame  = 3*_width*_height/2;
+        _frameRate = (*it)->GetFrameRate();
+         //GeneralSetup();
+
+
+        // iterate over all bit rates
+        for (int i = 0; i < nBitrates; i++)
+        {
+           _bitRate = static_cast<float>(bitRateVec[i]);
+            // iterate over all packet loss values
+            for (int j = 0; j < nlossPct; j++)
+            {
+                 _lossRate = static_cast<float>(lossPctVec[j]);
+                 _nackEnabled = static_cast<bool>(nackEnabledVec[j]);
+                 _fecEnabled = static_cast<bool>(fecEnabledVec[j]);
+
+                 runCnt++;
+                 printf("run #%d out of %d \n", runCnt,(int)(nlossPct*nBitrates*numOfSrc));
+
+                //printf("**FOR RUN: **%d %d %d %d \n",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+
+                 /*
+                 int ch = sprintf(filename,"../test_mediaOpt/RTMOTest_%d_%d_%d_%d.yuv",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+                _outname = filename;
+
+                printf("**FOR RUN: **%d %d %d %d \n",_nackEnabled,_fecEnabled,int(lossPctVec[j]),int(_bitRate));
+               */
+                 GeneralSetup();
+                 Perform();
+                 Print(1);
+                 TearDown();
+
+                 printf("\n");
+                  //printf("**DONE WITH RUN: **%d %d %f %d \n",_nackEnabled,_fecEnabled,lossPctVec[j],int(_bitRate));
+                 //
+
+            }// end of packet loss loop
+        }// end of bit rate loop
+        delete *it;
+    }// end of video sequence loop
+    // at end of sequence
+    fclose(_outputRes);
+    printf("\nVCM Media Optimization Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+
+
+void
+MediaOptTest::Print(int mode)
+{
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _frameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    webrtc::test::QualityMetricsResult psnr;
+    I420PSNRFromFiles(_actualSourcename.c_str(), _outname.c_str(), _width,
+                      _height, &psnr);
+
+    (_log) << "VCM: Media Optimization Test Cycle Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file:" << _outname << std::endl;
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    (_log) << "Error Reslience: NACK:" << _nackEnabled << "; FEC: " << _fecEnabled << std::endl;
+    (_log) << "Packet Loss applied= %f " << _lossRate << std::endl;
+    (_log) << _numFramesDropped << " FRames were dropped" << std::endl;
+     ( _log) << "PSNR: " << psnr.average << std::endl;
+    (_log) << std::endl;
+
+    if (_testType == 2)
+    {
+        fprintf(_outputRes,"************\n");
+        fprintf(_outputRes,"\n\n\n");
+        fprintf(_outputRes,"Actual bitrate: %f kbps\n", actualBitRate);
+        fprintf(_outputRes,"Target bitrate: %f kbps\n", _bitRate);
+        fprintf(_outputRes,"NACK: %s  ",(_nackEnabled)?"true":"false");
+        fprintf(_outputRes,"FEC: %s \n ",(_fecEnabled)?"true":"false");
+        fprintf(_outputRes,"Packet loss applied = %f\n", _lossRate);
+        fprintf(_outputRes,"%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        fprintf(_outputRes,"PSNR: %f \n", psnr.average);
+        fprintf(_outputRes,"************\n");
+    }
+
+
+    //
+    if (_testType == 1)
+    {
+        fprintf(_fpout,"************\n");
+        fprintf(_fpout,"\n\n\n");
+        fprintf(_fpout,"Actual bitrate: %f kbps\n", actualBitRate);
+        fprintf(_fpout,"Target bitrate: %f kbps\n", _bitRate);
+        fprintf(_fpout,"NACK: %s  ",(_nackEnabled)?"true":"false");
+        fprintf(_fpout,"FEC: %s \n ",(_fecEnabled)?"true":"false");
+        fprintf(_fpout,"Packet loss applied = %f\n", _lossRate);
+        fprintf(_fpout,"%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        fprintf(_fpout,"PSNR: %f \n", psnr.average);
+        fprintf(_fpout,"************\n");
+
+        int testNum1 = _testNum/(_numParRuns +1) + 1;
+        int testNum2 = _testNum%_numParRuns;
+        if (testNum2 == 0) testNum2 = _numParRuns;
+        fprintf(_fpout2,"%d %d %f %f %f %f \n",testNum1,testNum2,_bitRate,actualBitRate,_lossRate,psnr.average);
+        fclose(_fpinp);
+        _fpinp = fopen("dat_inp","wb");
+        fprintf(_fpinp,"%f %f %d \n",_bitRate,_lossRate,_testNum);
+    }
+    //
+
+
+    if (mode == 1)
+    {
+        // print to screen
+        printf("\n\n\n");
+        printf("Actual bitrate: %f kbps\n", actualBitRate);
+        printf("Target bitrate: %f kbps\n", _bitRate);
+        printf("NACK: %s  ",(_nackEnabled)?"true":"false");
+        printf("FEC: %s \n",(_fecEnabled)?"true":"false");
+        printf("Packet loss applied = %f\n", _lossRate);
+        printf("%d frames were dropped, and total number of frames processed %d  \n",_numFramesDropped,_frameCnt);
+        printf("PSNR: %f \n", psnr.average);
+    }
+    TEST(psnr.average > 10); // low becuase of possible frame dropping (need to verify that OK for all packet loss values/ rates)
+}
+
+void MediaOptTest::TearDown() {
+  delete _rtp;
+  _rtp = NULL;
+  delete _outgoingTransport;
+  _outgoingTransport = NULL;
+  delete _dataCallback;
+  _dataCallback = NULL;
+  _log.close();
+  fclose(_sourceFile);
+  fclose(_decodedFile);
+  fclose(_actualSourceFile);
+}
diff --git a/src/modules/video_coding/main/test/media_opt_test.h b/src/modules/video_coding/main/test/media_opt_test.h
new file mode 100644
index 0000000..5f210e6
--- /dev/null
+++ b/src/modules/video_coding/main/test/media_opt_test.h
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// VCM Media Optimization Test
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
+
+
+#include <string>
+
+#include "receiver_tests.h"  // receive side callbacks
+#include "rtp_rtcp.h"
+#include "test_callbacks.h"
+#include "test_util.h"
+#include "video_coding.h"
+#include "video_source.h"
+
+// media optimization test
+// This test simulates a complete encode-decode cycle via the RTP module.
+// allows error resilience tests, packet loss tests, etc.
+// Does not test the media optimization deirectly, but via the VCM API only.
+// The test allows two modes:
+// 1 - Standard, basic settings, one run
+// 2 - Release test - iterates over a number of video sequences, bit rates, packet loss values ,etc.
+
+class MediaOptTest
+{
+public:
+    MediaOptTest(webrtc::VideoCodingModule* vcm,
+                 webrtc::TickTimeBase* clock);
+    ~MediaOptTest();
+
+    static int RunTest(int testNum, CmdArgs& args);
+    // perform encode-decode of an entire sequence
+    WebRtc_Word32 Perform();
+    // Set up for a single mode test
+    void Setup(int testType, CmdArgs& args);
+    // General set up - applicable for both modes
+    void GeneralSetup();
+    // Run release testing
+    void RTTest();
+    void TearDown();
+    // mode = 1; will print to screen, otherwise only to log file
+    void Print(int mode);
+
+private:
+
+    webrtc::VideoCodingModule*       _vcm;
+    webrtc::RtpRtcp*                 _rtp;
+    webrtc::RTPSendCompleteCallback* _outgoingTransport;
+    RtpDataCallback*                 _dataCallback;
+
+    webrtc::TickTimeBase*            _clock;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _actualSourcename;
+    std::fstream                     _log;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _actualSourceFile;
+    FILE*                            _outputRes;
+    WebRtc_UWord16                   _width;
+    WebRtc_UWord16                   _height;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    float                            _frameRate;
+    bool                             _nackEnabled;
+    bool                             _fecEnabled;
+    bool                             _nackFecEnabled;
+    WebRtc_UWord8                    _rttMS;
+    float                            _bitRate;
+    double                           _lossRate;
+    WebRtc_UWord32                   _renderDelayMs;
+    WebRtc_Word32                    _frameCnt;
+    float                            _sumEncBytes;
+    WebRtc_Word32                    _numFramesDropped;
+    std::string                      _codecName;
+    webrtc::VideoCodecType           _sendCodecType;
+    WebRtc_Word32                    _numberOfCores;
+
+    //for release test#2
+    FILE*                            _fpinp;
+    FILE*                            _fpout;
+    FILE*                            _fpout2;
+    int                              _testType;
+    int                              _testNum;
+    int                              _numParRuns;
+
+}; // end of MediaOptTest class definition
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_MEDIA_OPT_TEST_H_
diff --git a/src/modules/video_coding/main/test/mt_rx_tx_test.cc b/src/modules/video_coding/main/test/mt_rx_tx_test.cc
new file mode 100644
index 0000000..3eac939
--- /dev/null
+++ b/src/modules/video_coding/main/test/mt_rx_tx_test.cc
@@ -0,0 +1,359 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*************************************************
+ *
+ * Testing multi thread - receive and send sides
+ *
+ **************************************************/
+
+#include <string.h>
+
+#include "../source/event.h"
+#include "media_opt_test.h"
+#include "mt_test_common.h"
+#include "receiver_tests.h" // shared RTP state and receive side threads
+#include "rtp_rtcp.h"
+#include "test_macros.h"
+#include "test_util.h" // send side callback
+#include "thread_wrapper.h"
+#include "video_coding.h"
+
+using namespace webrtc;
+
+bool
+MainSenderThread(void* obj)
+{
+    SendSharedState* state = static_cast<SendSharedState*>(obj);
+    EventWrapper& waitEvent = *EventWrapper::Create();
+    // preparing a frame for encoding
+    VideoFrame sourceFrame;
+    WebRtc_Word32 width = state->_args.width;
+    WebRtc_Word32 height = state->_args.height;
+    float frameRate = state->_args.frameRate;
+    WebRtc_Word32 lengthSourceFrame  = 3*width*height/2;
+    sourceFrame.VerifyAndAllocate(lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[lengthSourceFrame];
+
+    if (state->_sourceFile == NULL)
+    {
+        state->_sourceFile = fopen(state->_args.inputFile.c_str(), "rb");
+        if (state->_sourceFile == NULL)
+        {
+            printf ("Error when opening file \n");
+            delete &waitEvent;
+            delete [] tmpBuffer;
+            return false;
+        }
+    }
+    if (feof(state->_sourceFile) == 0)
+    {
+        TEST(fread(tmpBuffer, 1, lengthSourceFrame,state->_sourceFile) > 0 ||
+             feof(state->_sourceFile));
+        state->_frameCnt++;
+        sourceFrame.CopyFrame(lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(height);
+        sourceFrame.SetWidth(width);
+        state->_timestamp += (WebRtc_UWord32)(9e4 / frameRate);
+        sourceFrame.SetTimeStamp(state->_timestamp);
+
+        WebRtc_Word32 ret = state->_vcm.AddVideoFrame(sourceFrame);
+        if (ret < 0)
+        {
+            printf("Add Frame error: %d\n", ret);
+            delete &waitEvent;
+            delete [] tmpBuffer;
+            return false;
+        }
+        waitEvent.Wait(33);
+    }
+
+    delete &waitEvent;
+    delete [] tmpBuffer;
+
+    return true;
+}
+
+bool
+IntSenderThread(void* obj)
+{
+    SendSharedState* state = static_cast<SendSharedState*>(obj);
+    state->_vcm.SetChannelParameters(1000,30,0);
+
+    return true;
+}
+
+
+int MTRxTxTest(CmdArgs& args)
+{
+    /* TEST SETTINGS */
+    std::string   inname = args.inputFile;
+    std::string outname;
+    if (args.outputFile == "")
+        outname = test::OutputPath() + "MTRxTxTest_decoded.yuv";
+    else
+        outname = args.outputFile;
+
+    WebRtc_UWord16  width = args.width;
+    WebRtc_UWord16  height = args.height;
+
+    float         frameRate = args.frameRate;
+    float         bitRate = args.bitRate;
+    WebRtc_Word32   numberOfCores = 1;
+
+    // error resilience/network
+    // Nack support is currently not implemented in this test.
+    bool          nackEnabled = false;
+    bool          fecEnabled = false;
+    WebRtc_UWord8   rttMS = 20;
+    float         lossRate = 0.0*255; // no packet loss
+    WebRtc_UWord32  renderDelayMs = 0;
+    WebRtc_UWord32  minPlayoutDelayMs = 0;
+
+    /* TEST SET-UP */
+
+    // Set up trace
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "MTRxTxTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    FILE* sourceFile;
+    FILE* decodedFile;
+
+    if ((sourceFile = fopen(inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", inname.c_str());
+        return -1;
+    }
+
+    if ((decodedFile = fopen(outname.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", outname.c_str());
+        return -1;
+    }
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    RtpDataCallback dataCallback(vcm);
+
+    RTPSendCompleteCallback* outgoingTransport =
+        new RTPSendCompleteCallback(&clock, "dump.rtp");
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = 1;
+    configuration.audio = false;
+    configuration.incoming_data = &dataCallback;
+    configuration.outgoing_transport = outgoingTransport;
+    RtpRtcp* rtp = RtpRtcp::CreateRtpRtcp(configuration);
+
+    // registering codecs for the RTP module
+    VideoCodec videoCodec;
+    strncpy(videoCodec.plName, "ULPFEC", 32);
+    videoCodec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+
+    strncpy(videoCodec.plName, "RED", 32);
+    videoCodec.plType = VCM_RED_PAYLOAD_TYPE;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+
+    strncpy(videoCodec.plName, args.codecName.c_str(), 32);
+    videoCodec.plType = VCM_VP8_PAYLOAD_TYPE;
+    videoCodec.maxBitrate = 10000;
+    videoCodec.codecType = args.codecType;
+    TEST(rtp->RegisterReceivePayload(videoCodec) == 0);
+    TEST(rtp->RegisterSendPayload(videoCodec) == 0);
+
+    // inform RTP Module of error resilience features
+    TEST(rtp->SetGenericFECStatus(fecEnabled, VCM_RED_PAYLOAD_TYPE, VCM_ULPFEC_PAYLOAD_TYPE) == 0);
+
+    //VCM
+    if (vcm->InitializeReceiver() < 0)
+    {
+        return -1;
+    }
+    if (vcm->InitializeSender())
+    {
+        return -1;
+    }
+    // registering codecs for the VCM module
+    VideoCodec sendCodec;
+    vcm->InitializeSender();
+    WebRtc_Word32 numberOfCodecs = vcm->NumberOfCodecs();
+    if (numberOfCodecs < 1)
+    {
+        return -1;
+    }
+
+    if (vcm->Codec(args.codecType, &sendCodec) != 0)
+    {
+        // desired codec unavailable
+        printf("Codec not registered\n");
+        return -1;
+    }
+    // register codec
+    sendCodec.startBitrate = (int) bitRate;
+    sendCodec.height = height;
+    sendCodec.width = width;
+    sendCodec.maxFramerate = (WebRtc_UWord8)frameRate;
+    vcm->RegisterSendCodec(&sendCodec, numberOfCores, 1440);
+    vcm->RegisterReceiveCodec(&sendCodec, numberOfCores); // same settings for encode and decode
+
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    // Callback Settings
+
+    PacketRequester packetRequester(*rtp);
+    vcm->RegisterPacketRequestCallback(&packetRequester);
+
+    VCMRTPEncodeCompleteCallback* encodeCompleteCallback = new VCMRTPEncodeCompleteCallback(rtp);
+    vcm->RegisterTransportCallback(encodeCompleteCallback);
+    encodeCompleteCallback->SetCodecType(ConvertCodecType(args.codecName.c_str()));
+    encodeCompleteCallback->SetFrameDimensions(width, height);
+    // frame ready to be sent to network
+
+    VCMDecodeCompleteCallback receiveCallback(decodedFile);
+    vcm->RegisterReceiveCallback(&receiveCallback);
+
+    VideoProtectionCallback protectionCallback;
+    vcm->RegisterProtectionCallback(&protectionCallback);
+
+    outgoingTransport->SetLossPct(lossRate);
+    // Nack support is currently not implemented in this test
+    assert(nackEnabled == false);
+    vcm->SetVideoProtection(kProtectionNack, nackEnabled);
+    vcm->SetVideoProtection(kProtectionFEC, fecEnabled);
+
+    // inform RTP Module of error resilience features
+    FecProtectionParams delta_params = protectionCallback.DeltaFecParameters();
+    FecProtectionParams key_params = protectionCallback.KeyFecParameters();
+    rtp->SetFecParameters(&delta_params, &key_params);
+    rtp->SetNACKStatus(nackEnabled ? kNackRtcp : kNackOff);
+
+    vcm->SetChannelParameters((WebRtc_UWord32) bitRate,
+                              (WebRtc_UWord8) lossRate, rttMS);
+
+    SharedRTPState mtState(*vcm, *rtp); // receive side
+    SendSharedState mtSendState(*vcm, *rtp, args); // send side
+
+    /*START TEST*/
+
+    // Create and start all threads
+    // send side threads
+    ThreadWrapper* mainSenderThread = ThreadWrapper::CreateThread(MainSenderThread,
+            &mtSendState, kNormalPriority, "MainSenderThread");
+    ThreadWrapper* intSenderThread = ThreadWrapper::CreateThread(IntSenderThread,
+            &mtSendState, kNormalPriority, "IntThread");
+
+    if (mainSenderThread != NULL)
+    {
+        unsigned int tid;
+        mainSenderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start main sender thread\n");
+        return -1;
+    }
+
+    if (intSenderThread != NULL)
+    {
+        unsigned int tid;
+        intSenderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start sender interference thread\n");
+        return -1;
+    }
+
+    // Receive side threads
+    ThreadWrapper* processingThread = ThreadWrapper::CreateThread(ProcessingThread,
+            &mtState, kNormalPriority, "ProcessingThread");
+    ThreadWrapper* decodeThread = ThreadWrapper::CreateThread(DecodeThread,
+            &mtState, kNormalPriority, "DecodeThread");
+
+    if (processingThread != NULL)
+    {
+        unsigned int tid;
+        processingThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start processing thread\n");
+        return -1;
+    }
+
+    if (decodeThread != NULL)
+    {
+        unsigned int tid;
+        decodeThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start decode thread\n");
+        return -1;
+    }
+
+    EventWrapper& waitEvent = *EventWrapper::Create();
+
+    // Decode for 10 seconds and then tear down and exit.
+    waitEvent.Wait(30000);
+
+    // Tear down
+
+    while (!mainSenderThread->Stop())
+    {
+        ;
+    }
+
+    while (!intSenderThread->Stop())
+    {
+        ;
+    }
+
+
+    while (!processingThread->Stop())
+    {
+        ;
+    }
+
+    while (!decodeThread->Stop())
+    {
+        ;
+    }
+
+    printf("\nVCM MT RX/TX Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    delete &waitEvent;
+    delete mainSenderThread;
+    delete intSenderThread;
+    delete processingThread;
+    delete decodeThread;
+    delete encodeCompleteCallback;
+    delete outgoingTransport;
+    VideoCodingModule::Destroy(vcm);
+    delete rtp;
+    rtp = NULL;
+    vcm = NULL;
+    Trace::ReturnTrace();
+    fclose(decodedFile);
+    printf("Multi-Thread test Done: View output file \n");
+    return 0;
+
+}
+
diff --git a/src/modules/video_coding/main/test/mt_test_common.cc b/src/modules/video_coding/main/test/mt_test_common.cc
new file mode 100644
index 0000000..ece0d9c
--- /dev/null
+++ b/src/modules/video_coding/main/test/mt_test_common.cc
@@ -0,0 +1,135 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "mt_test_common.h"
+
+#include <cmath>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_dump.h"
+
+namespace webrtc {
+
+TransportCallback::TransportCallback(TickTimeBase* clock, const char* filename)
+    : RTPSendCompleteCallback(clock, filename) {
+}
+
+TransportCallback::~TransportCallback()
+{
+    //
+}
+
+int
+TransportCallback::SendPacket(int channel, const void *data, int len)
+{
+    _sendCount++;
+    _totalSentLength += len;
+
+    if (_rtpDump != NULL)
+    {
+        if (_rtpDump->DumpPacket((const WebRtc_UWord8*)data, len) != 0)
+        {
+            return -1;
+        }
+    }
+
+    bool transmitPacket = true;
+    // Off-line tests, don't drop first Key frame (approx.)
+    if (_sendCount > 20)
+    {
+        transmitPacket = PacketLoss();
+    }
+
+    TickTimeBase clock;
+    int64_t now = clock.MillisecondTimestamp();
+    // Insert outgoing packet into list
+    if (transmitPacket)
+    {
+        RtpPacket* newPacket = new RtpPacket();
+        memcpy(newPacket->data, data, len);
+        newPacket->length = len;
+        // Simulate receive time = network delay + packet jitter
+        // simulated as a Normal distribution random variable with
+        // mean = networkDelay and variance = jitterVar
+        WebRtc_Word32
+        simulatedDelay = (WebRtc_Word32)NormalDist(_networkDelayMs,
+                                                   sqrt(_jitterVar));
+        newPacket->receiveTime = now + simulatedDelay;
+        _rtpPackets.push_back(newPacket);
+    }
+    return 0;
+}
+
+int
+TransportCallback::TransportPackets()
+{
+    // Are we ready to send packets to the receiver?
+    RtpPacket* packet = NULL;
+    TickTimeBase clock;
+    int64_t now = clock.MillisecondTimestamp();
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - now;
+        if (timeToReceive > 0)
+        {
+            // No available packets to send
+            break;
+        }
+
+        _rtpPackets.pop_front();
+        // Send to receive side
+        if (_rtp->IncomingPacket((const WebRtc_UWord8*)packet->data,
+                                     packet->length) < 0)
+        {
+            delete packet;
+            packet = NULL;
+            // Will return an error after the first packet that goes wrong
+            return -1;
+        }
+        delete packet;
+        packet = NULL;
+    }
+    return 0; // OK
+}
+
+
+
+bool VCMProcessingThread(void* obj)
+{
+    SharedRTPState* state = static_cast<SharedRTPState*>(obj);
+    if (state->_vcm.TimeUntilNextProcess() <= 0)
+    {
+        if (state->_vcm.Process() < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+
+bool VCMDecodeThread(void* obj)
+{
+    SharedRTPState* state = static_cast<SharedRTPState*>(obj);
+    state->_vcm.Decode();
+    return true;
+}
+
+bool TransportThread(void *obj)
+{
+    SharedTransportState* state = static_cast<SharedTransportState*>(obj);
+    state->_transport.TransportPackets();
+    return true;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/test/mt_test_common.h b/src/modules/video_coding/main/test/mt_test_common.h
new file mode 100644
index 0000000..c17d269
--- /dev/null
+++ b/src/modules/video_coding/main/test/mt_test_common.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * Common multi-thread functionality across video coding module tests
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
+
+#include "rtp_rtcp.h"
+#include "test_callbacks.h"
+#include "test_util.h"
+#include "video_coding.h"
+
+namespace webrtc {
+
+class SendSharedState
+{
+public:
+    SendSharedState(webrtc::VideoCodingModule& vcm, webrtc::RtpRtcp& rtp,
+            CmdArgs args) :
+            _vcm(vcm),
+            _rtp(rtp),
+            _args(args),
+            _sourceFile(NULL),
+            _frameCnt(0),
+            _timestamp(0) {}
+
+    webrtc::VideoCodingModule&  _vcm;
+    webrtc::RtpRtcp&            _rtp;
+    CmdArgs                     _args;
+    FILE*                       _sourceFile;
+    WebRtc_Word32               _frameCnt;
+    WebRtc_Word32               _timestamp;
+};
+
+// MT implementation of the RTPSendCompleteCallback (Transport)
+class TransportCallback:public RTPSendCompleteCallback
+{
+ public:
+    // constructor input: (receive side) rtp module to send encoded data to
+    TransportCallback(TickTimeBase* clock, const char* filename = NULL);
+    virtual ~TransportCallback();
+    // Add packets to list
+    // Incorporate network conditions - delay and packet loss
+    // Actual transmission will occur on a separate thread
+    int SendPacket(int channel, const void *data, int len);
+    // Send to the receiver packets which are ready to be submitted
+    int TransportPackets();
+};
+
+class SharedRTPState
+{
+public:
+    SharedRTPState(webrtc::VideoCodingModule& vcm, webrtc::RtpRtcp& rtp) :
+        _vcm(vcm),
+        _rtp(rtp) {}
+    webrtc::VideoCodingModule&  _vcm;
+    webrtc::RtpRtcp&            _rtp;
+};
+
+
+class SharedTransportState
+{
+public:
+    SharedTransportState(webrtc::RtpRtcp& rtp, TransportCallback& transport):
+        _rtp(rtp),
+        _transport(transport) {}
+    webrtc::RtpRtcp&            _rtp;
+    TransportCallback&          _transport;
+};
+
+bool VCMProcessingThread(void* obj);
+bool VCMDecodeThread(void* obj);
+bool TransportThread(void *obj);
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_CODING_TEST_MT_TEST_COMMON_H_
diff --git a/src/modules/video_coding/main/test/normal_test.cc b/src/modules/video_coding/main/test/normal_test.cc
new file mode 100644
index 0000000..bd37766
--- /dev/null
+++ b/src/modules/video_coding/main/test/normal_test.cc
@@ -0,0 +1,404 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "normal_test.h"
+
+#include <assert.h>
+#include <iostream>
+#include <sstream>
+#include <time.h>
+
+#include "../source/event.h"
+#include "common_types.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "test_util.h"
+#include "trace.h"
+#include "testsupport/metrics/video_metrics.h"
+
+using namespace webrtc;
+
+int NormalTest::RunTest(CmdArgs& args)
+{
+#if defined(EVENT_DEBUG)
+    printf("SIMULATION TIME\n");
+    FakeTickTime clock(0);
+#else
+    printf("REAL-TIME\n");
+    TickTimeBase clock;
+#endif
+    Trace::CreateTrace();
+    Trace::SetTraceFile(
+        (test::OutputPath() + "VCMNormalTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    NormalTest VCMNTest(vcm, &clock);
+    VCMNTest.Perform(args);
+    VideoCodingModule::Destroy(vcm);
+    Trace::ReturnTrace();
+    return 0;
+}
+
+////////////////
+// Callback Implementation
+//////////////
+
+VCMNTEncodeCompleteCallback::VCMNTEncodeCompleteCallback(FILE* encodedFile,
+                                                         NormalTest& test):
+    _encodedFile(encodedFile),
+    _encodedBytes(0),
+    _skipCnt(0),
+    _VCMReceiver(NULL),
+    _seqNo(0),
+    _test(test)
+{
+    //
+}
+VCMNTEncodeCompleteCallback::~VCMNTEncodeCompleteCallback()
+{
+}
+
+void
+VCMNTEncodeCompleteCallback::RegisterTransportCallback(VCMPacketizationCallback* transport)
+{
+}
+
+WebRtc_Word32
+VCMNTEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& /*fragmentationHeader*/,
+        const webrtc::RTPVideoHeader* videoHdr)
+
+{
+    // will call the VCMReceiver input packet
+    _frameType = frameType;
+    // writing encodedData into file
+    if (fwrite(payloadData, 1, payloadSize, _encodedFile) !=  payloadSize) {
+      return -1;
+    }
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true;
+    rtpInfo.type.Video.width = 0;
+    rtpInfo.type.Video.height = 0;
+    switch (_test.VideoType())
+    {
+    case kVideoCodecVP8:
+        rtpInfo.type.Video.codec = kRTPVideoVP8;
+        rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+        rtpInfo.type.Video.codecHeader.VP8.nonReference =
+            videoHdr->codecHeader.VP8.nonReference;
+        rtpInfo.type.Video.codecHeader.VP8.pictureId =
+            videoHdr->codecHeader.VP8.pictureId;
+        break;
+    case kVideoCodecI420:
+        rtpInfo.type.Video.codec = kRTPVideoI420;
+        break;
+    default:
+        assert(false);
+        return -1;
+    }
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = timeStamp;
+    rtpInfo.frameType = frameType;
+    rtpInfo.type.Video.isFirstPacket = true;
+    // Size should also be received from that table, since the payload type
+    // defines the size.
+
+    _encodedBytes += payloadSize;
+    if (payloadSize < 20)
+    {
+        _skipCnt++;
+    }
+    _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+    return 0;
+}
+void
+VCMNTEncodeCompleteCallback::RegisterReceiverVCM(VideoCodingModule *vcm)
+{
+    _VCMReceiver = vcm;
+    return;
+}
+ WebRtc_Word32
+VCMNTEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+WebRtc_UWord32
+VCMNTEncodeCompleteCallback::SkipCnt()
+{
+    return _skipCnt;
+}
+
+// Decoded Frame Callback Implementation
+VCMNTDecodeCompleCallback::~VCMNTDecodeCompleCallback()
+{
+  if (_decodedFile)
+    fclose(_decodedFile);
+}
+ WebRtc_Word32
+VCMNTDecodeCompleCallback::FrameToRender(webrtc::VideoFrame& videoFrame)
+{
+    if (videoFrame.Width() != _currentWidth ||
+        videoFrame.Height() != _currentHeight)
+    {
+        _currentWidth = videoFrame.Width();
+        _currentHeight = videoFrame.Height();
+        if (_decodedFile != NULL)
+        {
+            fclose(_decodedFile);
+            _decodedFile = NULL;
+        }
+        _decodedFile = fopen(_outname.c_str(), "wb");
+    }
+    if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+               _decodedFile) !=  videoFrame.Length()) {
+      return -1;
+    }
+    _decodedBytes+= videoFrame.Length();
+    return VCM_OK;
+}
+
+ WebRtc_Word32
+VCMNTDecodeCompleCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+ //VCM Normal Test Class implementation
+
+NormalTest::NormalTest(VideoCodingModule* vcm, TickTimeBase* clock)
+:
+_clock(clock),
+_vcm(vcm),
+_sumEncBytes(0),
+_timeStamp(0),
+_totalEncodeTime(0),
+_totalDecodeTime(0),
+_decodeCompleteTime(0),
+_encodeCompleteTime(0),
+_totalEncodePipeTime(0),
+_totalDecodePipeTime(0),
+_frameCnt(0),
+_encFrameCnt(0),
+_decFrameCnt(0)
+{
+    //
+}
+
+NormalTest::~NormalTest()
+{
+    //
+}
+void
+NormalTest::Setup(CmdArgs& args)
+{
+    _inname = args.inputFile;
+    _encodedName = test::OutputPath() + "encoded_normaltest.yuv";
+    _width = args.width;
+    _height = args.height;
+    _frameRate = args.frameRate;
+    _bitRate = args.bitRate;
+    if (args.outputFile == "")
+    {
+        std::ostringstream filename;
+        filename << test::OutputPath() << "NormalTest_" <<
+            _width << "x" << _height << "_" << _frameRate << "Hz_P420.yuv";
+        _outname = filename.str();
+    }
+    else
+    {
+        _outname = args.outputFile;
+    }
+    _lengthSourceFrame  = 3*_width*_height/2;
+    _videoType = args.codecType;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+    if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+
+    _log.open((test::OutputPath() + "TestLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    return;
+}
+
+WebRtc_Word32
+NormalTest::Perform(CmdArgs& args)
+{
+    Setup(args);
+    EventWrapper* waitEvent = EventWrapper::Create();
+    VideoCodec _sendCodec;//, _receiveCodec; // tmp - sendCodecd used as receive codec
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    TEST(VideoCodingModule::Codec(_videoType, &_sendCodec) == VCM_OK);
+    _sendCodec.startBitrate = (int)_bitRate; // should be later on changed via the API
+    _sendCodec.width = static_cast<WebRtc_UWord16>(_width);
+    _sendCodec.height = static_cast<WebRtc_UWord16>(_height);
+    _sendCodec.maxFramerate = _frameRate;
+    TEST(_vcm->RegisterSendCodec(&_sendCodec, 4, 1400) == VCM_OK);// will also set and init the desired codec
+    // register a decoder (same codec for decoder and encoder )
+    TEST(_vcm->RegisterReceiveCodec(&_sendCodec, 1) == VCM_OK);
+    /* Callback Settings */
+    VCMNTDecodeCompleCallback _decodeCallback(_outname);
+    _vcm->RegisterReceiveCallback(&_decodeCallback);
+    VCMNTEncodeCompleteCallback _encodeCompleteCallback(_encodedFile, *this);
+    _vcm->RegisterTransportCallback(&_encodeCompleteCallback);
+    // encode and decode with the same vcm
+    _encodeCompleteCallback.RegisterReceiverVCM(_vcm);
+    ///////////////////////
+    /// Start Test
+    ///////////////////////
+    VideoFrame sourceFrame;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    double startTime = clock()/(double)CLOCKS_PER_SEC;
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
+
+    SendStatsTest sendStats;
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+
+    while (feof(_sourceFile) == 0)
+    {
+#if !defined(EVENT_DEBUG)
+        WebRtc_Word64 processStartTime = _clock->MillisecondTimestamp();
+#endif
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0 ||
+             feof(_sourceFile));
+        _frameCnt++;
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_height);
+        sourceFrame.SetWidth(_width);
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(_sendCodec.maxFramerate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+        _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
+        WebRtc_Word32 ret = _vcm->AddVideoFrame(sourceFrame);
+        double encodeTime = clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
+        _totalEncodeTime += encodeTime;
+        if (ret < 0)
+        {
+            printf("Error in AddFrame: %d\n", ret);
+            //exit(1);
+        }
+        _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
+        ret = _vcm->Decode();
+        _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
+        if (ret < 0)
+        {
+            printf("Error in Decode: %d\n", ret);
+            //exit(1);
+        }
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        WebRtc_UWord32 framePeriod = static_cast<WebRtc_UWord32>(1000.0f/static_cast<float>(_sendCodec.maxFramerate) + 0.5f);
+#if defined(EVENT_DEBUG)
+        static_cast<FakeTickTime*>(_clock)->IncrementDebugClock(framePeriod);
+#else
+        WebRtc_Word64 timeSpent = _clock->MillisecondTimestamp() - processStartTime;
+        if (timeSpent < framePeriod)
+        {
+            waitEvent->Wait(framePeriod - timeSpent);
+        }
+#endif
+    }
+    double endTime = clock()/(double)CLOCKS_PER_SEC;
+    _testTotalTime = endTime - startTime;
+    _sumEncBytes = _encodeCompleteCallback.EncodedBytes();
+
+    delete [] tmpBuffer;
+    delete waitEvent;
+    Teardown();
+    Print();
+    return 0;
+}
+
+void
+NormalTest::FrameEncoded(WebRtc_UWord32 timeStamp)
+{
+    _encodeCompleteTime = clock()/(double)CLOCKS_PER_SEC;
+    _encFrameCnt++;
+    _totalEncodePipeTime += _encodeCompleteTime - _encodeTimes[int(timeStamp)];
+
+}
+
+void
+NormalTest::FrameDecoded(WebRtc_UWord32 timeStamp)
+{
+    _decodeCompleteTime = clock()/(double)CLOCKS_PER_SEC;
+    _decFrameCnt++;
+    _totalDecodePipeTime += _decodeCompleteTime - _decodeTimes[timeStamp];
+}
+
+void
+NormalTest::Print()
+{
+    std::cout << "Normal Test Completed!" << std::endl;
+    (_log) << "Normal Test Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file: " << _outname << std::endl;
+    (_log) << "Total run time: " << _testTotalTime << std::endl;
+    printf("Total run time: %f s \n", _testTotalTime);
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _frameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    double avgEncTime = _totalEncodeTime / _frameCnt;
+    double avgDecTime = _totalDecodeTime / _frameCnt;
+    webrtc::test::QualityMetricsResult psnr, ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _width, _height,
+                      &psnr);
+    I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _width, _height,
+                      &ssim);
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Target bitrate: %f kbps\n", _bitRate);
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    printf("Average encode time: %f s\n", avgEncTime);
+    ( _log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    printf("Average decode time: %f s\n", avgDecTime);
+    ( _log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    printf("PSNR: %f \n", psnr.average);
+    ( _log) << "PSNR: " << psnr.average << std::endl;
+    printf("SSIM: %f \n", ssim.average);
+    ( _log) << "SSIM: " << ssim.average << std::endl;
+    (_log) << std::endl;
+
+    printf("\nVCM Normal Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+void
+NormalTest::Teardown()
+{
+    //_log.close();
+    fclose(_sourceFile);
+    fclose(_encodedFile);
+    return;
+}
diff --git a/src/modules/video_coding/main/test/normal_test.h b/src/modules/video_coding/main/test/normal_test.h
new file mode 100644
index 0000000..982fba4
--- /dev/null
+++ b/src/modules/video_coding/main/test/normal_test.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
+
+#include "video_coding.h"
+#include "test_util.h"
+
+#include <map>
+
+class NormalTest;
+
+//Send Side - Packetization callback - will create and send a packet to the VCMReceiver
+class VCMNTEncodeCompleteCallback : public webrtc::VCMPacketizationCallback
+{
+public:
+    // constructor input: file in which encoded data will be written
+    VCMNTEncodeCompleteCallback(FILE* encodedFile, NormalTest& test);
+    virtual ~VCMNTEncodeCompleteCallback();
+    // Register transport callback
+    void RegisterTransportCallback(webrtc::VCMPacketizationCallback* transport);
+    // process encoded data received from the encoder, pass stream to the VCMReceiver module
+    WebRtc_Word32 SendData(const webrtc::FrameType frameType,
+                           const WebRtc_UWord8 payloadType,
+                           const WebRtc_UWord32 timeStamp,
+                           int64_t capture_time_ms,
+                           const WebRtc_UWord8* payloadData,
+                           const WebRtc_UWord32 payloadSize,
+                           const webrtc::RTPFragmentationHeader& fragmentationHeader,
+                           const webrtc::RTPVideoHeader* videoHdr);
+
+    // Register exisitng VCM. Currently - encode and decode with the same vcm module.
+    void RegisterReceiverVCM(webrtc::VideoCodingModule *vcm);
+    // Return sum of encoded data (all frames in the sequence)
+    WebRtc_Word32 EncodedBytes();
+    // return number of encoder-skipped frames
+    WebRtc_UWord32 SkipCnt();;
+    // conversion function for payload type (needed for the callback function)
+//    RTPVideoVideoCodecTypes ConvertPayloadType(WebRtc_UWord8 payloadType);
+
+private:
+    FILE*                       _encodedFile;
+    WebRtc_UWord32              _encodedBytes;
+    WebRtc_UWord32              _skipCnt;
+    webrtc::VideoCodingModule*  _VCMReceiver;
+    webrtc::FrameType           _frameType;
+    WebRtc_UWord16              _seqNo;
+    NormalTest&                 _test;
+}; // end of VCMEncodeCompleteCallback
+
+class VCMNTDecodeCompleCallback: public webrtc::VCMReceiveCallback
+{
+public:
+    VCMNTDecodeCompleCallback(std::string outname): // or should it get a name?
+        _decodedFile(NULL),
+        _outname(outname),
+        _decodedBytes(0),
+        _currentWidth(0),
+        _currentHeight(0) {}
+    virtual ~VCMNTDecodeCompleCallback();
+    void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
+    // will write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+private:
+    FILE*             _decodedFile;
+    std::string       _outname;
+    WebRtc_UWord32    _decodedBytes;
+    WebRtc_UWord32    _currentWidth;
+    WebRtc_UWord32    _currentHeight;
+
+}; // end of VCMDecodeCompleCallback class
+
+
+class NormalTest
+{
+public:
+    NormalTest(webrtc::VideoCodingModule* vcm,
+               webrtc::TickTimeBase* clock);
+    ~NormalTest();
+    static int RunTest(CmdArgs& args);
+    WebRtc_Word32    Perform(CmdArgs& args);
+    // option:: turn into private and call from perform
+    WebRtc_UWord32   Width() const { return _width; };
+    WebRtc_UWord32   Height() const { return _height; };
+    webrtc::VideoCodecType VideoType() const { return _videoType; };
+
+
+protected:
+    // test setup - open files, general initializations
+    void            Setup(CmdArgs& args);
+   // close open files, delete used memory
+    void            Teardown();
+    // print results to std output and to log file
+    void            Print();
+    // calculating pipeline delay, and encoding time
+    void            FrameEncoded(WebRtc_UWord32 timeStamp);
+    // calculating pipeline delay, and decoding time
+    void            FrameDecoded(WebRtc_UWord32 timeStamp);
+
+    webrtc::TickTimeBase*            _clock;
+    webrtc::VideoCodingModule*       _vcm;
+    webrtc::VideoCodec               _sendCodec;
+    webrtc::VideoCodec               _receiveCodec;
+    std::string                      _inname;
+    std::string                      _outname;
+    std::string                      _encodedName;
+    WebRtc_Word32                    _sumEncBytes;
+    FILE*                            _sourceFile;
+    FILE*                            _decodedFile;
+    FILE*                            _encodedFile;
+    std::fstream                     _log;
+    WebRtc_UWord32                   _width;
+    WebRtc_UWord32                   _height;
+    float                            _frameRate;
+    float                            _bitRate;
+    WebRtc_UWord32                   _lengthSourceFrame;
+    WebRtc_UWord32                   _timeStamp;
+    webrtc::VideoCodecType           _videoType;
+    double                           _totalEncodeTime;
+    double                           _totalDecodeTime;
+    double                           _decodeCompleteTime;
+    double                           _encodeCompleteTime;
+    double                           _totalEncodePipeTime;
+    double                           _totalDecodePipeTime;
+    double                           _testTotalTime;
+    std::map<int, double>            _encodeTimes;
+    std::map<int, double>            _decodeTimes;
+    WebRtc_Word32                    _frameCnt;
+    WebRtc_Word32                    _encFrameCnt;
+    WebRtc_Word32                    _decFrameCnt;
+
+}; // end of VCMNormalTestClass
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_NORMAL_TEST_H_
diff --git a/src/modules/video_coding/main/test/plotJitterEstimate.m b/src/modules/video_coding/main/test/plotJitterEstimate.m
new file mode 100644
index 0000000..d6185f5
--- /dev/null
+++ b/src/modules/video_coding/main/test/plotJitterEstimate.m
@@ -0,0 +1,52 @@
+function plotJitterEstimate(filename)
+
+[timestamps, framedata, slopes, randJitters, framestats, timetable, filtjitter, rtt, rttStatsVec] = jitterBufferTraceParser(filename);
+
+x = 1:size(framestats, 1);
+%figure(2);
+subfigure(3, 2, 1);
+hold on;
+plot(x, slopes(x, 1).*(framestats(x, 1) - framestats(x, 2)) + 3*sqrt(randJitters(x,2)), 'b'); title('Estimate ms');
+plot(x, filtjitter, 'r');
+plot(x, slopes(x, 1).*(framestats(x, 1) - framestats(x, 2)), 'g');
+subfigure(3, 2, 2);
+%subplot(211);
+plot(x, slopes(x, 1)); title('Line slope');
+%subplot(212);
+%plot(x, slopes(x, 2)); title('Line offset');
+subfigure(3, 2, 3); hold on;
+plot(x, framestats); plot(x, framedata(x, 1)); title('frame size and average frame size');
+subfigure(3, 2, 4);
+plot(x, framedata(x, 2)); title('Delay');
+subfigure(3, 2, 5);
+hold on;
+plot(x, randJitters(x,1),'r');
+plot(x, randJitters(x,2)); title('Random jitter');
+
+subfigure(3, 2, 6);
+delays = framedata(:,2);
+dL = [0; framedata(2:end, 1) - framedata(1:end-1, 1)];
+hold on;
+plot(dL, delays, '.');
+s = [min(dL) max(dL)];
+plot(s, slopes(end, 1)*s + slopes(end, 2), 'g');
+plot(s, slopes(end, 1)*s + slopes(end, 2) + 3*sqrt(randJitters(end,2)), 'r');
+plot(s, slopes(end, 1)*s + slopes(end, 2) - 3*sqrt(randJitters(end,2)), 'r');
+title('theta(1)*x+theta(2), (dT-dTS)/dL');
+if sum(size(rttStatsVec)) > 0
+    figure; hold on; 
+    rttNstdDevsDrift = 3.5;
+    rttNstdDevsJump = 2.5;
+    rttSamples = rttStatsVec(:, 1);
+    rttAvgs = rttStatsVec(:, 2);
+    rttStdDevs = sqrt(rttStatsVec(:, 3));
+    rttMax = rttStatsVec(:, 4);
+    plot(rttSamples, 'ko-');
+    plot(rttAvgs, 'g');
+    plot(rttAvgs + rttNstdDevsDrift*rttStdDevs, 'b--'); 
+    plot(rttAvgs + rttNstdDevsJump*rttStdDevs, 'b'); 
+    plot(rttAvgs - rttNstdDevsJump*rttStdDevs, 'b');
+    plot(rttMax, 'r');
+    %plot(driftRestarts*max(maxRtts), '.');
+    %plot(jumpRestarts*max(maxRtts), '.');
+end
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/plotReceiveTrace.m b/src/modules/video_coding/main/test/plotReceiveTrace.m
new file mode 100644
index 0000000..4d262aa
--- /dev/null
+++ b/src/modules/video_coding/main/test/plotReceiveTrace.m
@@ -0,0 +1,213 @@
+function [t, TS] = plotReceiveTrace(filename, flat)
+fid=fopen(filename);
+%DEBUG     ; ( 8:32:33:375 |    0)        VIDEO:1          ;      5260; First packet of frame 1869537938
+%DEBUG     ; ( 8:32:33:375 |    0) VIDEO CODING:1          ;      5260; Decoding timestamp 1869534934
+%DEBUG     ; ( 8:32:33:375 |    0)        VIDEO:1          ;      5260; Render frame 1869534934 at 20772610
+%DEBUG     ; ( 8:32:33:375 |    0) VIDEO CODING:-1         ;      5260; Frame decoded: timeStamp=1870511259 decTime=0 maxDecTime=0, at 19965
+%DEBUG     ; ( 7:59:42:500 |    0)        VIDEO:-1         ;      2500; Received complete frame timestamp 1870514263 frame type 1 frame size 7862 at time 19965, jitter estimate was 130
+%DEBUG     ; ( 8: 5:51:774 |    0)        VIDEO:-1         ;      3968; ExtrapolateLocalTime(1870967878)=24971 ms
+
+if nargin == 1
+    flat = 0;
+end
+line = fgetl(fid);
+estimatedArrivalTime = [];
+packetTime = [];
+firstPacketTime = [];
+decodeTime = [];
+decodeCompleteTime = [];
+renderTime = [];
+completeTime = [];
+while ischar(line)%line ~= -1
+    if length(line) == 0
+        line = fgetl(fid);
+        continue;
+    end
+    % Parse the trace line header
+    [tempres, count] = sscanf(line, 'DEBUG     ; (%u:%u:%u:%u |%*lu)%13c:');
+    if count < 5
+        line = fgetl(fid);
+        continue;
+    end
+    hr=tempres(1);
+    mn=tempres(2);
+    sec=tempres(3);
+    ms=tempres(4);
+    timeInMs=hr*60*60*1000 + mn*60*1000 + sec*1000 + ms;
+    label = tempres(5:end);
+    I = find(label ~= 32); 
+    label = label(I(1):end); % remove white spaces
+    if ~strncmp(char(label), 'VIDEO', 5) & ~strncmp(char(label), 'VIDEO CODING', 12)
+        line = fgetl(fid);
+        continue;
+    end
+    message = line(72:end);
+    
+    % Parse message
+    [p, count] = sscanf(message, 'ExtrapolateLocalTime(%lu)=%lu ms');
+    if count == 2
+        estimatedArrivalTime = [estimatedArrivalTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'Packet seqNo %u of frame %lu at %lu');
+    if count == 3
+        packetTime = [packetTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'First packet of frame %lu at %lu');
+    if count == 2
+        firstPacketTime = [firstPacketTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    [p, count] = sscanf(message, 'Decoding timestamp %lu at %lu');
+    if count == 2
+        decodeTime = [decodeTime; p'];
+        line = fgetl(fid);
+        continue;        
+    end
+    
+    [p, count] = sscanf(message, 'Render frame %lu at %lu. Render delay %lu, required delay %lu, max decode time %lu, min total delay %lu');
+    if count == 6
+        renderTime = [renderTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+
+    [p, count] = sscanf(message, 'Frame decoded: timeStamp=%lu decTime=%d maxDecTime=%lu, at %lu');
+    if count == 4
+        decodeCompleteTime = [decodeCompleteTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+
+    [p, count] = sscanf(message, 'Received complete frame timestamp %lu frame type %u frame size %*u at time %lu, jitter estimate was %lu');
+    if count == 4
+        completeTime = [completeTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    
+    line = fgetl(fid);
+end
+fclose(fid);
+
+t = completeTime(:,3);
+TS = completeTime(:,1);
+
+figure;
+subplot(211);
+hold on;
+slope = 0;
+
+if sum(size(packetTime)) > 0
+    % Plot the time when each packet arrives
+    firstTimeStamp = packetTime(1,2);
+    x = (packetTime(:,2) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    firstTime = packetTime(1,3);
+    plot(x, packetTime(:,3) - firstTime - slope, 'b.');
+else
+    % Plot the time when the first packet of a frame arrives
+    firstTimeStamp = firstPacketTime(1,1);
+    x = (firstPacketTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    firstTime = firstPacketTime(1,2);
+    plot(x, firstPacketTime(:,2) - firstTime - slope, 'b.');
+end
+
+% Plot the frame complete time
+if prod(size(completeTime)) > 0
+    x = (completeTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, completeTime(:,3) - firstTime - slope, 'ks');
+end
+
+% Plot the time the decode starts
+if prod(size(decodeTime)) > 0
+    x = (decodeTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, decodeTime(:,2) - firstTime - slope, 'r.');
+end
+
+% Plot the decode complete time
+if prod(size(decodeCompleteTime)) > 0
+    x = (decodeCompleteTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, decodeCompleteTime(:,4) - firstTime - slope, 'g.');
+end
+
+if prod(size(renderTime)) > 0
+    % Plot the wanted render time in ms
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope, 'c-');
+    
+    % Plot the render time if there were no render delay or decoding delay.
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope - renderTime(:, 3) - renderTime(:, 5), 'c--');
+    
+    % Plot the render time if there were no render delay.
+    x = (renderTime(:,1) - firstTimeStamp)/90;
+    if flat
+        slope = x;
+    end
+    plot(x, renderTime(:,2) - firstTime - slope - renderTime(:, 3) - renderTime(:, 5), 'b-');
+end
+
+%plot(x, 90*x, 'r-');
+
+xlabel('RTP timestamp (in ms)');
+ylabel('Time (ms)');
+legend('Packet arrives', 'Frame complete', 'Decode', 'Decode complete', 'Time to render', 'Only jitter', 'Must decode');
+
+% subplot(312);
+% hold on;
+% completeTs = completeTime(:, 1);
+% arrivalTs = estimatedArrivalTime(:, 1);
+% [c, completeIdx, arrivalIdx] = intersect(completeTs, arrivalTs);
+% %plot(completeTs(completeIdx), completeTime(completeIdx, 3) - estimatedArrivalTime(arrivalIdx, 2));
+% timeUntilComplete = completeTime(completeIdx, 3) - estimatedArrivalTime(arrivalIdx, 2);
+% devFromAvgCompleteTime = timeUntilComplete - mean(timeUntilComplete);
+% plot(completeTs(completeIdx) - completeTs(completeIdx(1)), devFromAvgCompleteTime);
+% plot(completeTime(:, 1) - completeTime(1, 1), completeTime(:, 4), 'r');
+% plot(decodeCompleteTime(:, 1) - decodeCompleteTime(1, 1), decodeCompleteTime(:, 2), 'g');
+% plot(decodeCompleteTime(:, 1) - decodeCompleteTime(1, 1), decodeCompleteTime(:, 3), 'k');
+% xlabel('RTP timestamp');
+% ylabel('Time (ms)');
+% legend('Complete time - Estimated arrival time', 'Desired jitter buffer level', 'Actual decode time', 'Max decode time', 0);
+
+if prod(size(renderTime)) > 0
+    subplot(212);
+    hold on;
+    firstTime = renderTime(1, 1);
+    targetDelay = max(renderTime(:, 3) + renderTime(:, 4) + renderTime(:, 5), renderTime(:, 6));
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 3), 'r-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 4), 'b-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 5), 'g-');
+    plot(renderTime(:, 1) - firstTime, renderTime(:, 6), 'k-');
+    plot(renderTime(:, 1) - firstTime, targetDelay, 'c-');
+    xlabel('RTP timestamp');
+    ylabel('Time (ms)');
+    legend('Render delay', 'Jitter delay', 'Decode delay', 'Extra delay', 'Min total delay');
+end
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/plotTimingTest.m b/src/modules/video_coding/main/test/plotTimingTest.m
new file mode 100644
index 0000000..52a6f30
--- /dev/null
+++ b/src/modules/video_coding/main/test/plotTimingTest.m
@@ -0,0 +1,62 @@
+function plotTimingTest(filename)
+fid=fopen(filename);
+
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; Stochastic test 1
+%DEBUG     ; ( 9:53:33:859 |    0) VIDEO CODING:-1         ;      7132; Frame decoded: timeStamp=3000 decTime=10 at 10012
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; timeStamp=3000 clock=10037 maxWaitTime=0
+%DEBUG     ; ( 9:53:33:859 |    0)        VIDEO:-1         ;      7132; timeStampMs=33 renderTime=54
+line = fgetl(fid);
+decTime = [];
+waitTime = [];
+renderTime = [];
+foundStart = 0;
+testName = 'Stochastic test 1';
+while ischar(line)
+    if length(line) == 0
+        line = fgetl(fid);
+        continue;
+    end
+    lineOrig = line;
+    line = line(72:end);
+    if ~foundStart
+        if strncmp(line, testName, length(testName)) 
+            foundStart = 1;
+        end
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'Frame decoded: timeStamp=%lu decTime=%d maxDecTime=%d, at %lu');
+    if count == 4
+        decTime = [decTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'timeStamp=%u clock=%u maxWaitTime=%u');
+    if count == 3
+        waitTime = [waitTime; p'];
+        line = fgetl(fid);
+        continue;
+    end
+    [p, count] = sscanf(line, 'timeStamp=%u renderTime=%u');
+    if count == 2
+        renderTime = [renderTime; p'];
+        line = fgetl(fid);
+        continue;
+    end    
+    line = fgetl(fid);
+end
+fclose(fid);
+
+% Compensate for wrap arounds and start counting from zero.
+timeStamps = waitTime(:, 1);
+tsDiff = diff(timeStamps);
+wrapIdx = find(tsDiff < 0);
+timeStamps(wrapIdx+1:end) = hex2dec('ffffffff') + timeStamps(wrapIdx+1:end);
+timeStamps = timeStamps - timeStamps(1);
+
+figure;
+hold on;
+plot(timeStamps, decTime(:, 2), 'r');
+plot(timeStamps, waitTime(:, 3), 'g');
+plot(timeStamps(2:end), diff(renderTime(:, 2)), 'b');
+legend('Decode time', 'Max wait time', 'Render time diff');
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/quality_modes_test.cc b/src/modules/video_coding/main/test/quality_modes_test.cc
new file mode 100644
index 0000000..bc3d9dd
--- /dev/null
+++ b/src/modules/video_coding/main/test/quality_modes_test.cc
@@ -0,0 +1,491 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "quality_modes_test.h"
+
+#include <iostream>
+#include <string>
+#include <time.h>
+
+#include "../source/event.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "test_callbacks.h"
+#include "test_macros.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+using namespace webrtc;
+
+int qualityModeTest()
+{
+    // Don't run this test with debug events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+    TickTimeBase clock;
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    QualityModesTest QMTest(vcm, &clock);
+    QMTest.Perform();
+    VideoCodingModule::Destroy(vcm);
+    return 0;
+}
+
+
+QualityModesTest::QualityModesTest(VideoCodingModule* vcm,
+                                   TickTimeBase* clock):
+NormalTest(vcm, clock),
+_vpm()
+{
+    //
+}
+
+
+QualityModesTest::~QualityModesTest()
+{
+    //
+}
+
+void
+QualityModesTest::Setup()
+{
+
+
+    _inname= test::ProjectRootPath() + "resources/crew_30f_4CIF.yuv";
+    _outname = test::OutputPath() + "out_qmtest.yuv";
+    _encodedName = test::OutputPath() + "encoded_qmtest.yuv";
+
+    //NATIVE/SOURCE VALUES
+    _nativeWidth = 2*352;
+    _nativeHeight = 2*288;
+    _nativeFrameRate = 30;
+
+
+    //TARGET/ENCODER VALUES
+     _width = 2*352;
+     _height = 2*288;
+    _frameRate = 30;
+    //
+    _bitRate = 400;
+
+    _flagSSIM = false;
+
+    _lengthSourceFrame  = 3*_nativeWidth*_nativeHeight/2;
+
+    if ((_sourceFile = fopen(_inname.c_str(), "rb")) == NULL)
+    {
+        printf("Cannot read file %s.\n", _inname.c_str());
+        exit(1);
+    }
+     if ((_encodedFile = fopen(_encodedName.c_str(), "wb")) == NULL)
+    {
+        printf("Cannot write encoded file.\n");
+        exit(1);
+    }
+    if ((_decodedFile = fopen(_outname.c_str(),  "wb")) == NULL)
+    {
+        printf("Cannot write file %s.\n", _outname.c_str());
+        exit(1);
+    }
+
+    _log.open((test::OutputPath() + "TestLog.txt").c_str(),
+              std::fstream::out | std::fstream::app);
+    return;
+}
+
+
+void
+QualityModesTest::Print()
+{
+    std::cout << "Quality Modes Test Completed!" << std::endl;
+    (_log) << "Quality Modes Test Completed!" << std::endl;
+    (_log) << "Input file: " << _inname << std::endl;
+    (_log) << "Output file: " << _outname << std::endl;
+    (_log) << "Total run time: " << _testTotalTime << std::endl;
+    printf("Total run time: %f s \n", _testTotalTime);
+    double ActualBitRate =  8.0 *( _sumEncBytes / (_frameCnt / _nativeFrameRate));
+    double actualBitRate = ActualBitRate / 1000.0;
+    double avgEncTime = _totalEncodeTime / _frameCnt;
+    double avgDecTime = _totalDecodeTime / _frameCnt;
+    webrtc::test::QualityMetricsResult psnr,ssim;
+    I420PSNRFromFiles(_inname.c_str(), _outname.c_str(), _nativeWidth,
+                      _nativeHeight, &psnr);
+    printf("Actual bitrate: %f kbps\n", actualBitRate);
+    printf("Target bitrate: %f kbps\n", _bitRate);
+    ( _log) << "Actual bitrate: " << actualBitRate<< " kbps\tTarget: " << _bitRate << " kbps" << std::endl;
+    printf("Average encode time: %f s\n", avgEncTime);
+    ( _log) << "Average encode time: " << avgEncTime << " s" << std::endl;
+    printf("Average decode time: %f s\n", avgDecTime);
+    ( _log) << "Average decode time: " << avgDecTime << " s" << std::endl;
+    printf("PSNR: %f \n", psnr.average);
+    printf("**Number of frames dropped in VPM***%d \n",_numFramesDroppedVPM);
+    ( _log) << "PSNR: " << psnr.average << std::endl;
+    if (_flagSSIM == 1)
+    {
+        printf("***computing SSIM***\n");
+        I420SSIMFromFiles(_inname.c_str(), _outname.c_str(), _nativeWidth,
+                          _nativeHeight, &ssim);
+        printf("SSIM: %f \n", ssim.average);
+    }
+    (_log) << std::endl;
+
+    printf("\nVCM Qualit Modes Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+}
+void
+QualityModesTest::Teardown()
+{
+    _log.close();
+    fclose(_sourceFile);
+    fclose(_decodedFile);
+    fclose(_encodedFile);
+    return;
+}
+
+
+WebRtc_Word32
+QualityModesTest::Perform()
+{
+    Setup();
+    // changing bit/frame rate during the test
+    const float bitRateUpdate[] = {1000};
+    const float frameRateUpdate[] = {30};
+    const int updateFrameNum[] = {10000}; // frame numbers at which an update will occur
+
+    WebRtc_UWord32 numChanges = sizeof(updateFrameNum)/sizeof(*updateFrameNum);
+    WebRtc_UWord8 change = 0;// change counter
+
+    _vpm = VideoProcessingModule::Create(1);
+
+    EventWrapper* waitEvent = EventWrapper::Create();
+    VideoCodec codec;//both send and receive
+    _vcm->InitializeReceiver();
+    _vcm->InitializeSender();
+    WebRtc_Word32 NumberOfCodecs = _vcm->NumberOfCodecs();
+    for (int i = 0; i < NumberOfCodecs; i++)
+    {
+        _vcm->Codec(i, &codec);
+        if(strncmp(codec.plName,"VP8" , 5) == 0)
+        {
+             codec.startBitrate = (int)_bitRate;
+             codec.maxFramerate = (WebRtc_UWord8) _frameRate;
+             codec.width = (WebRtc_UWord16)_width;
+             codec.height = (WebRtc_UWord16)_height;
+             TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec
+             i = NumberOfCodecs;
+        }
+    }
+
+    // register a decoder (same codec for decoder and encoder )
+    TEST(_vcm->RegisterReceiveCodec(&codec, 2) == VCM_OK);
+    /* Callback Settings */
+    VCMQMDecodeCompleCallback  _decodeCallback(_decodedFile);
+    _vcm->RegisterReceiveCallback(&_decodeCallback);
+    VCMNTEncodeCompleteCallback   _encodeCompleteCallback(_encodedFile, *this);
+    _vcm->RegisterTransportCallback(&_encodeCompleteCallback);
+    // encode and decode with the same vcm
+    _encodeCompleteCallback.RegisterReceiverVCM(_vcm);
+
+    //quality modes callback
+    QMTestVideoSettingsCallback QMCallback;
+    QMCallback.RegisterVCM(_vcm);
+    QMCallback.RegisterVPM(_vpm);
+    _vcm->RegisterVideoQMCallback(&QMCallback);
+
+    ///////////////////////
+    /// Start Test
+    ///////////////////////
+    _vpm->EnableTemporalDecimation(true);
+    _vpm->EnableContentAnalysis(true);
+    _vpm->SetInputFrameResampleMode(kFastRescaling);
+
+    // disabling internal VCM frame dropper
+    _vcm->EnableFrameDropper(false);
+
+    VideoFrame sourceFrame;
+    VideoFrame *decimatedFrame = NULL;
+    sourceFrame.VerifyAndAllocate(_lengthSourceFrame);
+    WebRtc_UWord8* tmpBuffer = new WebRtc_UWord8[_lengthSourceFrame];
+    double startTime = clock()/(double)CLOCKS_PER_SEC;
+    _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 0);
+
+    SendStatsTest sendStats;
+    sendStats.SetTargetFrameRate(static_cast<WebRtc_UWord32>(_frameRate));
+    _vcm->RegisterSendStatisticsCallback(&sendStats);
+
+    VideoContentMetrics* contentMetrics = NULL;
+    // setting user frame rate
+    _vpm->SetMaxFrameRate((WebRtc_UWord32)(_nativeFrameRate+ 0.5f));
+    // for starters: keeping native values:
+    _vpm->SetTargetResolution(_width, _height, (WebRtc_UWord32)(_frameRate+ 0.5f));
+    _decodeCallback.SetOriginalFrameDimensions(_nativeWidth, _nativeHeight);
+
+    //tmp  - disabling VPM frame dropping
+    _vpm->EnableTemporalDecimation(false);
+
+
+    WebRtc_Word32 ret = 0;
+      _numFramesDroppedVPM = 0;
+
+    while (feof(_sourceFile)== 0)
+    {
+        TEST(fread(tmpBuffer, 1, _lengthSourceFrame, _sourceFile) > 0);
+        _frameCnt++;
+        sourceFrame.CopyFrame(_lengthSourceFrame, tmpBuffer);
+        sourceFrame.SetHeight(_nativeHeight);
+        sourceFrame.SetWidth(_nativeWidth);
+
+        _timeStamp += (WebRtc_UWord32)(9e4 / static_cast<float>(codec.maxFramerate));
+        sourceFrame.SetTimeStamp(_timeStamp);
+
+        ret = _vpm->PreprocessFrame(&sourceFrame, &decimatedFrame);
+        if (ret  == 1)
+        {
+            printf("VD: frame drop %d \n",_frameCnt);
+            _numFramesDroppedVPM += 1;
+            continue; // frame drop
+        }
+        else if (ret < 0)
+        {
+            printf("Error in PreprocessFrame: %d\n", ret);
+            //exit(1);
+        }
+        contentMetrics = _vpm->ContentMetrics();
+        if (contentMetrics == NULL)
+        {
+            printf("error: contentMetrics = NULL\n");
+        }
+
+        // counting only encoding time
+        _encodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC;
+
+        WebRtc_Word32 ret = _vcm->AddVideoFrame(*decimatedFrame, contentMetrics);
+
+        _totalEncodeTime += clock()/(double)CLOCKS_PER_SEC - _encodeTimes[int(sourceFrame.TimeStamp())];
+
+        if (ret < 0)
+        {
+            printf("Error in AddFrame: %d\n", ret);
+            //exit(1);
+        }
+        _decodeTimes[int(sourceFrame.TimeStamp())] = clock()/(double)CLOCKS_PER_SEC; // same timestamp value for encode and decode
+        ret = _vcm->Decode();
+        _totalDecodeTime += clock()/(double)CLOCKS_PER_SEC - _decodeTimes[int(sourceFrame.TimeStamp())];
+        if (ret < 0)
+        {
+            printf("Error in Decode: %d\n", ret);
+            //exit(1);
+        }
+        if (_vcm->TimeUntilNextProcess() <= 0)
+        {
+            _vcm->Process();
+        }
+        // mimicking setTargetRates - update every 1 sec
+        // this will trigger QMSelect
+        if (_frameCnt%((int)_frameRate) == 0)
+        {
+            _vcm->SetChannelParameters((WebRtc_UWord32)_bitRate, 0, 1);
+            waitEvent->Wait(33);
+        }
+        waitEvent->Wait(33);
+        // check for bit rate update
+        if (change < numChanges && _frameCnt == updateFrameNum[change])
+        {
+            _bitRate = bitRateUpdate[change];
+            _frameRate = frameRateUpdate[change];
+            codec.startBitrate = (int)_bitRate;
+            codec.maxFramerate = (WebRtc_UWord8) _frameRate;
+            TEST(_vcm->RegisterSendCodec(&codec, 2, 1440) == VCM_OK);// will also set and init the desired codec
+            change++;
+        }
+    }
+
+    double endTime = clock()/(double)CLOCKS_PER_SEC;
+    _testTotalTime = endTime - startTime;
+    _sumEncBytes = _encodeCompleteCallback.EncodedBytes();
+
+    delete tmpBuffer;
+    delete waitEvent;
+    _vpm->Reset();
+    Teardown();
+    Print();
+    VideoProcessingModule::Destroy(_vpm);
+    return 0;
+}
+
+
+// implementing callback to be called from VCM to update VPM of frame rate and size
+QMTestVideoSettingsCallback::QMTestVideoSettingsCallback():
+_vpm(NULL),
+_vcm(NULL)
+{
+    //
+}
+
+void
+QMTestVideoSettingsCallback::RegisterVPM(VideoProcessingModule *vpm)
+{
+    _vpm = vpm;
+}
+void
+QMTestVideoSettingsCallback::RegisterVCM(VideoCodingModule *vcm)
+{
+    _vcm = vcm;
+}
+
+bool
+QMTestVideoSettingsCallback::Updated()
+{
+    if (_updated)
+    {
+        _updated = false;
+        return true;
+    }
+    return false;
+}
+
+WebRtc_Word32
+QMTestVideoSettingsCallback::SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                                const WebRtc_UWord32 width,
+                                                const WebRtc_UWord32 height)
+{
+    WebRtc_Word32 retVal = 0;
+    printf("QM updates: W = %d, H = %d, FR = %d, \n", width, height, frameRate);
+    retVal = _vpm->SetTargetResolution(width, height, frameRate);
+    //Initialize codec with new values - is this the best place to do it?
+    if (!retVal)
+    {
+        // first get current settings
+        VideoCodec currentCodec;
+        _vcm->SendCodec(&currentCodec);
+        // now set new values:
+        currentCodec.height = (WebRtc_UWord16)height;
+        currentCodec.width = (WebRtc_UWord16)width;
+        currentCodec.maxFramerate = (WebRtc_UWord8)frameRate;
+
+        // re-register encoder
+        retVal = _vcm->RegisterSendCodec(&currentCodec, 2, 1440);
+        _updated = true;
+    }
+
+    return retVal;
+}
+
+
+// Decoded Frame Callback Implmentation
+VCMQMDecodeCompleCallback::VCMQMDecodeCompleCallback(FILE* decodedFile):
+_decodedFile(decodedFile),
+_decodedBytes(0),
+//_test(test),
+_origWidth(0),
+_origHeight(0),
+_decWidth(0),
+_decHeight(0),
+//_interpolator(NULL),
+_decBuffer(NULL),
+_frameCnt(0)
+{
+    //
+}
+
+VCMQMDecodeCompleCallback::~VCMQMDecodeCompleCallback()
+ {
+//     if (_interpolator != NULL)
+//     {
+//         deleteInterpolator(_interpolator);
+//         _interpolator = NULL;
+//     }
+     if (_decBuffer != NULL)
+     {
+         delete [] _decBuffer;
+         _decBuffer = NULL;
+     }
+ }
+WebRtc_Word32
+VCMQMDecodeCompleCallback::FrameToRender(VideoFrame& videoFrame)
+{
+    if ((_origWidth == videoFrame.Width()) && (_origHeight == videoFrame.Height()))
+    {
+      if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+                 _decodedFile) !=  videoFrame.Length()) {
+        return -1;
+      }
+      _frameCnt++;
+      //printf("frame dec # %d", _frameCnt);
+        // no need for interpolator and decBuffer
+        if (_decBuffer != NULL)
+        {
+            delete [] _decBuffer;
+            _decBuffer = NULL;
+        }
+//        if (_interpolator != NULL)
+//        {
+//            deleteInterpolator(_interpolator);
+//            _interpolator = NULL;
+//        }
+        _decWidth = 0;
+        _decHeight = 0;
+    }
+    else
+    {
+        if ((_decWidth != videoFrame.Width()) || (_decHeight != videoFrame.Height()))
+        {
+            _decWidth = videoFrame.Width();
+            _decHeight = videoFrame.Height();
+            buildInterpolator();
+        }
+
+//        interpolateFrame(_interpolator, videoFrame.Buffer(),_decBuffer);
+        if (fwrite(_decBuffer, 1, _origWidth*_origHeight * 3/2,
+                   _decodedFile) !=  _origWidth*_origHeight * 3/2) {
+          return -1;
+        }
+        _frameCnt++;
+    }
+
+    _decodedBytes += videoFrame.Length();
+    return VCM_OK;
+}
+
+WebRtc_Word32
+VCMQMDecodeCompleCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+void
+VCMQMDecodeCompleCallback::SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height)
+{
+    _origWidth = width;
+    _origHeight = height;
+}
+
+WebRtc_Word32
+VCMQMDecodeCompleCallback::buildInterpolator()
+{
+    WebRtc_UWord32 decFrameLength  = _origWidth*_origHeight*3 >> 1;
+    if (_decBuffer != NULL)
+    {
+        delete [] _decBuffer;
+    }
+    _decBuffer = new WebRtc_UWord8[decFrameLength];
+    if (_decBuffer == NULL)
+    {
+        return -1;
+    }
+
+    return 0;
+}
diff --git a/src/modules/video_coding/main/test/quality_modes_test.h b/src/modules/video_coding/main/test/quality_modes_test.h
new file mode 100644
index 0000000..87fa01f
--- /dev/null
+++ b/src/modules/video_coding/main/test/quality_modes_test.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
+
+#include "video_processing.h"
+#include "normal_test.h"
+#include "video_coding_defines.h"
+
+int qualityModeTest();
+
+class QualityModesTest : public NormalTest
+{
+public:
+    QualityModesTest(webrtc::VideoCodingModule* vcm,
+                     webrtc::TickTimeBase* clock);
+    virtual ~QualityModesTest();
+    WebRtc_Word32 Perform();
+
+private:
+
+    void Setup();
+    void Print();
+    void Teardown();
+    void SsimComp();
+
+    webrtc::VideoProcessingModule*  _vpm;
+
+    WebRtc_UWord32                      _width;
+    WebRtc_UWord32                      _height;
+    float                               _frameRate;
+    WebRtc_UWord32                      _nativeWidth;
+    WebRtc_UWord32                      _nativeHeight;
+    float                               _nativeFrameRate;
+
+    WebRtc_UWord32                      _numFramesDroppedVPM;
+    bool                                _flagSSIM;
+
+}; // end of QualityModesTest class
+
+
+class VCMQMDecodeCompleCallback: public webrtc::VCMReceiveCallback
+{
+public:
+    VCMQMDecodeCompleCallback(FILE* decodedFile);
+    virtual ~VCMQMDecodeCompleCallback();
+    void SetUserReceiveCallback(webrtc::VCMReceiveCallback* receiveCallback);
+    // will write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+    void SetOriginalFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height);
+    WebRtc_Word32 buildInterpolator();
+private:
+    FILE*                _decodedFile;
+    WebRtc_UWord32       _decodedBytes;
+   // QualityModesTest&  _test;
+    WebRtc_UWord32       _origWidth;
+    WebRtc_UWord32       _origHeight;
+    WebRtc_UWord32       _decWidth;
+    WebRtc_UWord32       _decHeight;
+//    VideoInterpolator* _interpolator;
+    WebRtc_UWord8*       _decBuffer;
+    WebRtc_UWord32       _frameCnt; // debug
+
+}; // end of VCMQMDecodeCompleCallback class
+
+class QMTestVideoSettingsCallback : public webrtc::VCMQMSettingsCallback
+{
+public:
+    QMTestVideoSettingsCallback();
+    // update VPM with QM settings
+    WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frameRate,
+                                     const WebRtc_UWord32 width,
+                                     const WebRtc_UWord32 height);
+    // register VPM used by test
+    void RegisterVPM(webrtc::VideoProcessingModule* vpm);
+    void RegisterVCM(webrtc::VideoCodingModule* vcm);
+    bool Updated();
+
+private:
+    webrtc::VideoProcessingModule*         _vpm;
+    webrtc::VideoCodingModule*             _vcm;
+    bool                                   _updated;
+};
+
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_QUALITY_MODSE_TEST_H_
diff --git a/src/modules/video_coding/main/test/receiver_tests.h b/src/modules/video_coding/main/test/receiver_tests.h
new file mode 100644
index 0000000..cb45ca1
--- /dev/null
+++ b/src/modules/video_coding/main/test/receiver_tests.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
+
+#include "video_coding.h"
+#include "module_common_types.h"
+#include "common_types.h"
+#include "rtp_rtcp.h"
+#include "typedefs.h"
+#include "rtp_player.h"
+#include "test_util.h"
+
+#include <string>
+#include <stdio.h>
+
+class RtpDataCallback : public webrtc::RtpData
+{
+public:
+    RtpDataCallback(webrtc::VideoCodingModule* vcm)
+        : _vcm(vcm) {};
+
+    virtual WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                              const WebRtc_UWord16 payloadSize,
+                                              const webrtc::WebRtcRTPHeader* rtpHeader);
+private:
+    webrtc::VideoCodingModule* _vcm;
+};
+
+class FrameReceiveCallback : public webrtc::VCMReceiveCallback
+{
+public:
+    FrameReceiveCallback(std::string outFilename) :
+        _outFilename(outFilename),
+        _outFile(NULL),
+        _timingFile(NULL) {}
+
+    virtual ~FrameReceiveCallback();
+
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+
+private:
+    std::string     _outFilename;
+    FILE*           _outFile;
+    FILE*           _timingFile;
+};
+
+class SharedState
+{
+public:
+    SharedState(webrtc::VideoCodingModule& vcm, RTPPlayer& rtpPlayer) :
+        _vcm(vcm),
+        _rtpPlayer(rtpPlayer) {}
+    webrtc::VideoCodingModule&  _vcm;
+    RTPPlayer&              _rtpPlayer;
+};
+
+
+int RtpPlay(CmdArgs& args);
+int RtpPlayMT(CmdArgs& args,
+              int releaseTest = 0,
+              webrtc::VideoCodecType releaseTestVideoType = webrtc::kVideoCodecVP8);
+int ReceiverTimingTests(CmdArgs& args);
+int JitterBufferTest(CmdArgs& args);
+int DecodeFromStorageTest(CmdArgs& args);
+
+// Thread functions:
+bool ProcessingThread(void* obj);
+bool RtpReaderThread(void* obj);
+bool DecodeThread(void* obj);
+bool NackThread(void* obj);
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RECEIVER_TESTS_H_
diff --git a/src/modules/video_coding/main/test/receiver_timing_tests.cc b/src/modules/video_coding/main/test/receiver_timing_tests.cc
new file mode 100644
index 0000000..0b09256
--- /dev/null
+++ b/src/modules/video_coding/main/test/receiver_timing_tests.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "../source/internal_defines.h"
+#include "timing.h"
+#include "test_macros.h"
+#include "test_util.h"
+
+#include <cstdio>
+#include <cstdlib>
+#include <cmath>
+
+using namespace webrtc;
+
+float vcmFloatMax(float a, float b)
+{
+    return a > b ? a : b;
+}
+
+float vcmFloatMin(float a, float b)
+{
+    return a < b ? a : b;
+}
+
+double const pi = 4*std::atan(1.0);
+
+class GaussDist
+{
+public:
+    static float RandValue(float m, float stdDev) // returns a single normally distributed number
+    {
+        float r1 = static_cast<float>((std::rand() + 1.0)/(RAND_MAX + 1.0)); // gives equal distribution in (0, 1]
+        float r2 = static_cast<float>((std::rand() + 1.0)/(RAND_MAX + 1.0));
+        return m + stdDev * static_cast<float>(std::sqrt(-2*std::log(r1))*std::cos(2*pi*r2));
+    }
+};
+
+int ReceiverTimingTests(CmdArgs& args)
+{
+    // Make sure this test is never executed with simulated events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+
+    // Set up trace
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "receiverTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    // A static random seed
+    srand(0);
+
+    TickTimeBase clock;
+    VCMTiming timing(&clock);
+    float clockInMs = 0.0;
+    WebRtc_UWord32 waitTime = 0;
+    WebRtc_UWord32 jitterDelayMs = 0;
+    WebRtc_UWord32 maxDecodeTimeMs = 0;
+    WebRtc_UWord32 timeStamp = 0;
+
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    timing.UpdateCurrentDelay(timeStamp);
+
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    jitterDelayMs = 20;
+    timing.SetRequiredDelay(jitterDelayMs);
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    // First update initializes the render time. Since we have no decode delay
+    // we get waitTime = renderTime - now - renderDelay = jitter
+    TEST(waitTime == jitterDelayMs);
+
+    jitterDelayMs += VCMTiming::kDelayMaxChangeMsPerS + 10;
+    timeStamp += 90000;
+    clockInMs += 1000.0f;
+    timing.SetRequiredDelay(jitterDelayMs);
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    // Since we gradually increase the delay we only get
+    // 100 ms every second.
+    TEST(waitTime == jitterDelayMs - 10);
+
+    timeStamp += 90000;
+    clockInMs += 1000.0;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    // 300 incoming frames without jitter, verify that this gives the exact wait time
+    for (int i=0; i < 300; i++)
+    {
+        clockInMs += 1000.0f/30.0f;
+        timeStamp += 3000;
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    }
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    // Add decode time estimates
+    for (int i=0; i < 10; i++)
+    {
+        WebRtc_Word64 startTimeMs = static_cast<WebRtc_Word64>(clockInMs + 0.5);
+        clockInMs += 10.0f;
+        timing.StopDecodeTimer(timeStamp, startTimeMs, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+        timeStamp += 3000;
+        clockInMs += 1000.0f/30.0f - 10.0f;
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    }
+    maxDecodeTimeMs = 10;
+    timing.SetRequiredDelay(jitterDelayMs);
+    clockInMs += 1000.0f;
+    timeStamp += 90000;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(waitTime == jitterDelayMs);
+
+    WebRtc_UWord32 totalDelay1 = timing.TargetVideoDelay();
+    WebRtc_UWord32 minTotalDelayMs = 200;
+    timing.SetMinimumTotalDelay(minTotalDelayMs);
+    clockInMs += 5000.0f;
+    timeStamp += 5*90000;
+    timing.UpdateCurrentDelay(timeStamp);
+    waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+        static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    WebRtc_UWord32 totalDelay2 = timing.TargetVideoDelay();
+    // We should at least have minTotalDelayMs - decodeTime (10) - renderTime (10) to wait
+    TEST(waitTime == minTotalDelayMs - maxDecodeTimeMs - 10);
+    // The total video delay should not increase with the extra delay,
+    // the extra delay should be independent.
+    TEST(totalDelay1 == totalDelay2);
+
+    // Reset min total delay
+    timing.SetMinimumTotalDelay(0);
+    clockInMs += 5000.0f;
+    timeStamp += 5*90000;
+    timing.UpdateCurrentDelay(timeStamp);
+
+    // A sudden increase in timestamp of 2.1 seconds
+    clockInMs += 1000.0f/30.0f;
+    timeStamp += static_cast<WebRtc_UWord32>(2.1*90000 + 0.5);
+    WebRtc_Word64 ret = timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+    TEST(ret == -1);
+    timing.Reset();
+
+    // This test produces a trace which can be parsed with plotTimingTest.m. The plot
+    // can be used to see that the timing is reasonable under noise, and that the
+    // gradual transition between delays works as expected.
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "Stochastic test 1");
+
+    jitterDelayMs = 60;
+    maxDecodeTimeMs = 10;
+
+    timeStamp = static_cast<WebRtc_UWord32>(-10000); // To produce a wrap
+    clockInMs = 10000.0f;
+    timing.Reset(static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+    float noise = 0.0f;
+    for (int i=0; i < 1400; i++)
+    {
+        if (i == 400)
+        {
+            jitterDelayMs = 30;
+        }
+        else if (i == 700)
+        {
+            jitterDelayMs = 100;
+        }
+        else if (i == 1000)
+        {
+            minTotalDelayMs = 200;
+            timing.SetMinimumTotalDelay(minTotalDelayMs);
+        }
+        else if (i == 1200)
+        {
+            minTotalDelayMs = 0;
+            timing.SetMinimumTotalDelay(minTotalDelayMs);
+        }
+        WebRtc_Word64 startTimeMs = static_cast<WebRtc_Word64>(clockInMs + 0.5);
+        noise = vcmFloatMin(vcmFloatMax(GaussDist::RandValue(0, 2), -10.0f), 30.0f);
+        clockInMs += 10.0f;
+        timing.StopDecodeTimer(timeStamp, startTimeMs, static_cast<WebRtc_Word64>(clockInMs + noise + 0.5));
+        timeStamp += 3000;
+        clockInMs += 1000.0f/30.0f - 10.0f;
+        noise = vcmFloatMin(vcmFloatMax(GaussDist::RandValue(0, 8), -15.0f), 15.0f);
+        timing.IncomingTimestamp(timeStamp, static_cast<WebRtc_Word64>(clockInMs + noise + 0.5));
+        timing.SetRequiredDelay(jitterDelayMs);
+        timing.UpdateCurrentDelay(timeStamp);
+        waitTime = timing.MaxWaitingTime(timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5)),
+            static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "timeStamp=%u clock=%u maxWaitTime=%u", timeStamp,
+            static_cast<WebRtc_UWord32>(clockInMs + 0.5), waitTime);
+
+        WebRtc_Word64 renderTimeMs = timing.RenderTimeMs(timeStamp, static_cast<WebRtc_Word64>(clockInMs + 0.5));
+
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,
+                   "timeStamp=%u renderTime=%u",
+                   timeStamp,
+                   MaskWord64ToUWord32(renderTimeMs));
+    }
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding, -1,  "End Stochastic test 1");
+
+    printf("\nVCM Timing Test: \n\n%i tests completed\n", vcmMacrosTests);
+    if (vcmMacrosErrors > 0)
+    {
+        printf("%i FAILED\n\n", vcmMacrosErrors);
+    }
+    else
+    {
+        printf("ALL PASSED\n\n");
+    }
+
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/src/modules/video_coding/main/test/release_test.cc b/src/modules/video_coding/main/test/release_test.cc
new file mode 100644
index 0000000..8e3a073
--- /dev/null
+++ b/src/modules/video_coding/main/test/release_test.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ReleaseTest.h"
+#include "ReceiverTests.h"
+#include "TestMacros.h"
+#include "MediaOptTest.h"
+#include "CodecDataBaseTest.h"
+#include "GenericCodecTest.h"
+
+
+
+
+int ReleaseTest()
+{
+    printf("VCM RELEASE TESTS \n\n");
+    
+    // Automatic tests
+
+    printf("Testing receive side timing...\n");
+    TEST(ReceiverTimingTests() == 0);
+    
+    printf("Testing jitter buffer...\n");
+    TEST(JitterBufferTest() == 0);
+    
+    printf("Testing Codec Data Base...\n");
+    TEST(CodecDBTest() == 0);
+    
+    printf("Testing Media Optimization....\n");
+    TEST(VCMMediaOptTest(1) == 0); 
+
+    // Tests requiring verification
+    
+    printf("Testing Multi thread send-receive....\n");
+    TEST(MTRxTxTest() == 0);
+    printf("Verify by viewing output file MTRxTx_out.yuv \n");
+    
+    return 0;
+}
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/release_test.h b/src/modules/video_coding/main/test/release_test.h
new file mode 100644
index 0000000..2578160
--- /dev/null
+++ b/src/modules/video_coding/main/test/release_test.h
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef RELEASE_TEST_H
+#define RELEASE_TEST_H
+
+int ReleaseTest();
+int ReleaseTestPart2();
+
+#endif
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/release_test_pt2.cc b/src/modules/video_coding/main/test/release_test_pt2.cc
new file mode 100644
index 0000000..5ff48e5
--- /dev/null
+++ b/src/modules/video_coding/main/test/release_test_pt2.cc
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ReleaseTest.h"
+#include "ReceiverTests.h"
+#include "TestMacros.h"
+#include "MediaOptTest.h"
+#include "CodecDataBaseTest.h"
+#include "GenericCodecTest.h"
+
+
+
+
+int ReleaseTestPart2()
+{
+    printf("Verify that TICK_TIME_DEBUG and EVENT_DEBUG are uncommented");
+    // Tests requiring verification
+
+    printf("Testing Generic Codecs...\n");
+    TEST(VCMGenericCodecTest() == 0);
+    printf("Verify by viewing output file GCTest_out.yuv \n");
+    
+    return 0;
+}
\ No newline at end of file
diff --git a/src/modules/video_coding/main/test/rtp_player.cc b/src/modules/video_coding/main/test/rtp_player.cc
new file mode 100644
index 0000000..9eacf34
--- /dev/null
+++ b/src/modules/video_coding/main/test/rtp_player.cc
@@ -0,0 +1,441 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "rtp_player.h"
+
+#include <cstdlib>
+#ifdef WIN32
+#include <windows.h>
+#include <Winsock2.h>
+#else
+#include <arpa/inet.h>
+#endif
+
+#include "../source/internal_defines.h"
+#include "gtest/gtest.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_rtcp.h"
+
+using namespace webrtc;
+
+RawRtpPacket::RawRtpPacket(uint8_t* rtp_data, uint16_t rtp_length)
+    : data(rtp_data),
+      length(rtp_length),
+      resend_time_ms(-1) {
+  data = new uint8_t[length];
+  memcpy(data, rtp_data, length);
+}
+
+RawRtpPacket::~RawRtpPacket() {
+  delete [] data;
+}
+
+LostPackets::LostPackets()
+    : crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
+      loss_count_(0),
+      debug_file_(NULL),
+      packets_() {
+  debug_file_ = fopen("PacketLossDebug.txt", "w");
+}
+
+LostPackets::~LostPackets() {
+  if (debug_file_) {
+      fclose(debug_file_);
+  }
+  while (!packets_.empty()) {
+    delete packets_.front();
+    packets_.pop_front();
+  }
+  delete crit_sect_;
+}
+
+void LostPackets::AddPacket(RawRtpPacket* packet) {
+  CriticalSectionScoped cs(crit_sect_);
+  packets_.push_back(packet);
+  uint16_t seq_num = (packet->data[2] << 8) + packet->data[3];
+  if (debug_file_ != NULL) {
+    fprintf(debug_file_, "%u Lost packet: %u\n", loss_count_, seq_num);
+  }
+  ++loss_count_;
+}
+
+void LostPackets::SetResendTime(uint16_t resend_seq_num,
+                                int64_t resend_time_ms,
+                                int64_t now_ms) {
+  CriticalSectionScoped cs(crit_sect_);
+  for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+    const uint16_t seq_num = ((*it)->data[2] << 8) +
+        (*it)->data[3];
+    if (resend_seq_num == seq_num) {
+      if ((*it)->resend_time_ms + 10 < now_ms) {
+        if (debug_file_ != NULL) {
+          fprintf(debug_file_, "Resend %u at %u\n", seq_num,
+                  MaskWord64ToUWord32(resend_time_ms));
+        }
+        (*it)->resend_time_ms = resend_time_ms;
+      }
+      return;
+    }
+  }
+  assert(false);
+}
+
+RawRtpPacket* LostPackets::NextPacketToResend(int64_t timeNow) {
+  CriticalSectionScoped cs(crit_sect_);
+  for (RtpPacketIterator it = packets_.begin(); it != packets_.end(); ++it) {
+    if (timeNow >= (*it)->resend_time_ms && (*it)->resend_time_ms != -1) {
+      RawRtpPacket* packet = *it;
+      it = packets_.erase(it);
+      return packet;
+    }
+  }
+  return NULL;
+}
+
+int LostPackets::NumberOfPacketsToResend() const {
+  CriticalSectionScoped cs(crit_sect_);
+  int count = 0;
+  for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+      ++it) {
+    if ((*it)->resend_time_ms >= 0) {
+        count++;
+    }
+  }
+  return count;
+}
+
+void LostPackets::SetPacketResent(uint16_t seq_num, int64_t now_ms) {
+  CriticalSectionScoped cs(crit_sect_);
+  if (debug_file_ != NULL) {
+    fprintf(debug_file_, "Resent %u at %u\n", seq_num,
+            MaskWord64ToUWord32(now_ms));
+  }
+}
+
+void LostPackets::Print() const {
+  CriticalSectionScoped cs(crit_sect_);
+  printf("Lost packets: %u\n", loss_count_);
+  printf("Packets waiting to be resent: %u\n",
+         NumberOfPacketsToResend());
+  printf("Packets still lost: %u\n",
+         static_cast<unsigned int>(packets_.size()));
+  printf("Sequence numbers:\n");
+  for (ConstRtpPacketIterator it = packets_.begin(); it != packets_.end();
+      ++it) {
+    uint16_t seq_num = ((*it)->data[2] << 8) + (*it)->data[3];
+    printf("%u, ", seq_num);
+  }
+  printf("\n");
+}
+
+RTPPlayer::RTPPlayer(const char* filename,
+                     RtpData* callback,
+                     TickTimeBase* clock)
+:
+_clock(clock),
+_rtpModule(NULL),
+_nextRtpTime(0),
+_dataCallback(callback),
+_firstPacket(true),
+_lossRate(0.0f),
+_nackEnabled(false),
+_resendPacketCount(0),
+_noLossStartup(100),
+_endOfFile(false),
+_rttMs(0),
+_firstPacketRtpTime(0),
+_firstPacketTimeMs(0),
+_reorderBuffer(NULL),
+_reordering(false),
+_nextPacket(),
+_nextPacketLength(0),
+_randVec(),
+_randVecPos(0)
+{
+    _rtpFile = fopen(filename, "rb");
+    memset(_nextPacket, 0, sizeof(_nextPacket));
+}
+
+RTPPlayer::~RTPPlayer()
+{
+    delete _rtpModule;
+    if (_rtpFile != NULL)
+    {
+        fclose(_rtpFile);
+    }
+    if (_reorderBuffer != NULL)
+    {
+        delete _reorderBuffer;
+        _reorderBuffer = NULL;
+    }
+}
+
+WebRtc_Word32 RTPPlayer::Initialize(const PayloadTypeList* payloadList)
+{
+    RtpRtcp::Configuration configuration;
+    configuration.id = 1;
+    configuration.audio = false;
+    configuration.incoming_data = _dataCallback;
+    _rtpModule = RtpRtcp::CreateRtpRtcp(configuration);
+
+    std::srand(321);
+    for (int i=0; i < RAND_VEC_LENGTH; i++)
+    {
+        _randVec[i] = rand();
+    }
+    _randVecPos = 0;
+    WebRtc_Word32 ret = _rtpModule->SetNACKStatus(kNackOff);
+    if (ret < 0)
+    {
+        return -1;
+    }
+    _rtpModule->SetRTCPStatus(kRtcpNonCompound);
+    _rtpModule->SetTMMBRStatus(true);
+
+    if (ret < 0)
+    {
+        return -1;
+    }
+    // Register payload types
+    for (PayloadTypeList::const_iterator it = payloadList->begin();
+        it != payloadList->end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec videoCodec;
+            strncpy(videoCodec.plName, payloadType->name.c_str(), 32);
+            videoCodec.plType = payloadType->payloadType;
+            if (_rtpModule->RegisterReceivePayload(videoCodec) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+    if (ReadHeader() < 0)
+    {
+        return -1;
+    }
+    memset(_nextPacket, 0, sizeof(_nextPacket));
+    _nextPacketLength = ReadPacket(_nextPacket, &_nextRtpTime);
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::ReadHeader()
+{
+    char firstline[FIRSTLINELEN];
+    if (_rtpFile == NULL)
+    {
+        return -1;
+    }
+    EXPECT_TRUE(fgets(firstline, FIRSTLINELEN, _rtpFile) != NULL);
+    if(strncmp(firstline,"#!rtpplay",9) == 0) {
+        if(strncmp(firstline,"#!rtpplay1.0",12) != 0){
+            printf("ERROR: wrong rtpplay version, must be 1.0\n");
+            return -1;
+        }
+    }
+    else if (strncmp(firstline,"#!RTPencode",11) == 0) {
+        if(strncmp(firstline,"#!RTPencode1.0",14) != 0){
+            printf("ERROR: wrong RTPencode version, must be 1.0\n");
+            return -1;
+        }
+    }
+    else {
+        printf("ERROR: wrong file format of input file\n");
+        return -1;
+    }
+
+    WebRtc_UWord32 start_sec;
+    WebRtc_UWord32 start_usec;
+    WebRtc_UWord32 source;
+    WebRtc_UWord16 port;
+    WebRtc_UWord16 padding;
+
+    EXPECT_GT(fread(&start_sec, 4, 1, _rtpFile), 0u);
+    start_sec=ntohl(start_sec);
+    EXPECT_GT(fread(&start_usec, 4, 1, _rtpFile), 0u);
+    start_usec=ntohl(start_usec);
+    EXPECT_GT(fread(&source, 4, 1, _rtpFile), 0u);
+    source=ntohl(source);
+    EXPECT_GT(fread(&port, 2, 1, _rtpFile), 0u);
+    port=ntohs(port);
+    EXPECT_GT(fread(&padding, 2, 1, _rtpFile), 0u);
+    padding=ntohs(padding);
+    return 0;
+}
+
+WebRtc_UWord32 RTPPlayer::TimeUntilNextPacket() const
+{
+    WebRtc_Word64 timeLeft = (_nextRtpTime - _firstPacketRtpTime) - (_clock->MillisecondTimestamp() - _firstPacketTimeMs);
+    if (timeLeft < 0)
+    {
+        return 0;
+    }
+    return static_cast<WebRtc_UWord32>(timeLeft);
+}
+
+WebRtc_Word32 RTPPlayer::NextPacket(const WebRtc_Word64 timeNow)
+{
+    // Send any packets ready to be resent,
+    RawRtpPacket* resend_packet = _lostPackets.NextPacketToResend(timeNow);
+    while (resend_packet != NULL) {
+      const uint16_t seqNo = (resend_packet->data[2] << 8) +
+          resend_packet->data[3];
+      printf("Resend: %u\n", seqNo);
+      int ret = SendPacket(resend_packet->data, resend_packet->length);
+      delete resend_packet;
+      _resendPacketCount++;
+      if (ret > 0) {
+        _lostPackets.SetPacketResent(seqNo, _clock->MillisecondTimestamp());
+      } else if (ret < 0) {
+        return ret;
+      }
+      resend_packet = _lostPackets.NextPacketToResend(timeNow);
+    }
+
+    // Send any packets from rtp file
+    if (!_endOfFile && (TimeUntilNextPacket() == 0 || _firstPacket))
+    {
+        _rtpModule->Process();
+        if (_firstPacket)
+        {
+            _firstPacketRtpTime = static_cast<WebRtc_Word64>(_nextRtpTime);
+            _firstPacketTimeMs = _clock->MillisecondTimestamp();
+        }
+        if (_reordering && _reorderBuffer == NULL)
+        {
+            _reorderBuffer = new RawRtpPacket(reinterpret_cast<WebRtc_UWord8*>(_nextPacket), static_cast<WebRtc_UWord16>(_nextPacketLength));
+            return 0;
+        }
+        WebRtc_Word32 ret = SendPacket(reinterpret_cast<WebRtc_UWord8*>(_nextPacket), static_cast<WebRtc_UWord16>(_nextPacketLength));
+        if (_reordering && _reorderBuffer != NULL)
+        {
+            RawRtpPacket* rtpPacket = _reorderBuffer;
+            _reorderBuffer = NULL;
+            SendPacket(rtpPacket->data, rtpPacket->length);
+            delete rtpPacket;
+        }
+        _firstPacket = false;
+        if (ret < 0)
+        {
+            return ret;
+        }
+        _nextPacketLength = ReadPacket(_nextPacket, &_nextRtpTime);
+        if (_nextPacketLength < 0)
+        {
+            _endOfFile = true;
+            return 0;
+        }
+        else if (_nextPacketLength == 0)
+        {
+            return 0;
+        }
+    }
+    if (_endOfFile && _lostPackets.NumberOfPacketsToResend() == 0)
+    {
+        return 1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::SendPacket(WebRtc_UWord8* rtpData, WebRtc_UWord16 rtpLen)
+{
+    if ((_randVec[(_randVecPos++) % RAND_VEC_LENGTH] + 1.0)/(RAND_MAX + 1.0) < _lossRate &&
+        _noLossStartup < 0)
+    {
+        if (_nackEnabled)
+        {
+            const WebRtc_UWord16 seqNo = (rtpData[2] << 8) + rtpData[3];
+            printf("Throw: %u\n", seqNo);
+            _lostPackets.AddPacket(new RawRtpPacket(rtpData, rtpLen));
+            return 0;
+        }
+    }
+    else if (rtpLen > 0)
+    {
+        WebRtc_Word32 ret = _rtpModule->IncomingPacket(rtpData, rtpLen);
+        if (ret < 0)
+        {
+            return -1;
+        }
+    }
+    if (_noLossStartup >= 0)
+    {
+        _noLossStartup--;
+    }
+    return 1;
+}
+
+WebRtc_Word32 RTPPlayer::ReadPacket(WebRtc_Word16* rtpdata, WebRtc_UWord32* offset)
+{
+    WebRtc_UWord16 length, plen;
+
+    if (fread(&length,2,1,_rtpFile)==0)
+        return(-1);
+    length=ntohs(length);
+
+    if (fread(&plen,2,1,_rtpFile)==0)
+        return(-1);
+    plen=ntohs(plen);
+
+    if (fread(offset,4,1,_rtpFile)==0)
+        return(-1);
+    *offset=ntohl(*offset);
+
+    // Use length here because a plen of 0 specifies rtcp
+    length = (WebRtc_UWord16) (length - HDR_SIZE);
+    if (fread((unsigned short *) rtpdata,1,length,_rtpFile) != length)
+        return(-1);
+
+#ifdef JUNK_DATA
+    // destroy the RTP payload with random data
+    if (plen > 12) { // ensure that we have more than just a header
+        for ( int ix = 12; ix < plen; ix=ix+2 ) {
+            rtpdata[ix>>1] = (short) (rtpdata[ix>>1] + (short) rand());
+        }
+    }
+#endif
+    return plen;
+}
+
+WebRtc_Word32 RTPPlayer::SimulatePacketLoss(float lossRate, bool enableNack, WebRtc_UWord32 rttMs)
+{
+    _nackEnabled = enableNack;
+    _lossRate = lossRate;
+    _rttMs = rttMs;
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::SetReordering(bool enabled)
+{
+    _reordering = enabled;
+    return 0;
+}
+
+WebRtc_Word32 RTPPlayer::ResendPackets(const WebRtc_UWord16* sequenceNumbers, WebRtc_UWord16 length)
+{
+    if (sequenceNumbers == NULL)
+    {
+        return 0;
+    }
+    for (int i=0; i < length; i++)
+    {
+        _lostPackets.SetResendTime(sequenceNumbers[i],
+                                   _clock->MillisecondTimestamp() + _rttMs,
+                                   _clock->MillisecondTimestamp());
+    }
+    return 0;
+}
+
+void RTPPlayer::Print() const
+{
+    printf("Resent packets: %u\n", _resendPacketCount);
+    _lostPackets.Print();
+}
diff --git a/src/modules/video_coding/main/test/rtp_player.h b/src/modules/video_coding/main/test/rtp_player.h
new file mode 100644
index 0000000..9920d0c
--- /dev/null
+++ b/src/modules/video_coding/main/test/rtp_player.h
@@ -0,0 +1,119 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
+
+#include "typedefs.h"
+#include "rtp_rtcp.h"
+#include "critical_section_wrapper.h"
+#include "video_coding_defines.h"
+#include "modules/video_coding/main/source/tick_time_base.h"
+
+#include <stdio.h>
+#include <list>
+#include <string>
+
+#define HDR_SIZE 8 // rtpplay packet header size in bytes
+#define FIRSTLINELEN 40
+#define RAND_VEC_LENGTH 4096
+
+struct PayloadCodecTuple;
+
+struct RawRtpPacket
+{
+public:
+    RawRtpPacket(WebRtc_UWord8* rtp_data, WebRtc_UWord16 rtp_length);
+    ~RawRtpPacket();
+
+    uint8_t* data;
+    uint16_t length;
+    int64_t resend_time_ms;
+};
+
+typedef std::list<PayloadCodecTuple*> PayloadTypeList;
+typedef std::list<RawRtpPacket*> RtpPacketList;
+typedef RtpPacketList::iterator RtpPacketIterator;
+typedef RtpPacketList::const_iterator ConstRtpPacketIterator;
+
+class LostPackets {
+ public:
+  LostPackets();
+  ~LostPackets();
+
+  void AddPacket(RawRtpPacket* packet);
+  void SetResendTime(uint16_t sequenceNumber,
+                     int64_t resendTime,
+                     int64_t nowMs);
+  RawRtpPacket* NextPacketToResend(int64_t timeNow);
+  int NumberOfPacketsToResend() const;
+  void SetPacketResent(uint16_t seqNo, int64_t nowMs);
+  void Print() const;
+
+ private:
+  webrtc::CriticalSectionWrapper* crit_sect_;
+  int loss_count_;
+  FILE* debug_file_;
+  RtpPacketList packets_;
+};
+
+struct PayloadCodecTuple
+{
+    PayloadCodecTuple(WebRtc_UWord8 plType, std::string codecName, webrtc::VideoCodecType type) :
+        name(codecName), payloadType(plType), codecType(type) {};
+    const std::string name;
+    const WebRtc_UWord8 payloadType;
+    const webrtc::VideoCodecType codecType;
+};
+
+class RTPPlayer : public webrtc::VCMPacketRequestCallback
+{
+public:
+    RTPPlayer(const char* filename,
+              webrtc::RtpData* callback,
+              webrtc::TickTimeBase* clock);
+    virtual ~RTPPlayer();
+
+    WebRtc_Word32 Initialize(const PayloadTypeList* payloadList);
+    WebRtc_Word32 NextPacket(const WebRtc_Word64 timeNow);
+    WebRtc_UWord32 TimeUntilNextPacket() const;
+    WebRtc_Word32 SimulatePacketLoss(float lossRate, bool enableNack = false, WebRtc_UWord32 rttMs = 0);
+    WebRtc_Word32 SetReordering(bool enabled);
+    WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers, WebRtc_UWord16 length);
+    void Print() const;
+
+private:
+    WebRtc_Word32 SendPacket(WebRtc_UWord8* rtpData, WebRtc_UWord16 rtpLen);
+    WebRtc_Word32 ReadPacket(WebRtc_Word16* rtpdata, WebRtc_UWord32* offset);
+    WebRtc_Word32 ReadHeader();
+    webrtc::TickTimeBase* _clock;
+    FILE*              _rtpFile;
+    webrtc::RtpRtcp*   _rtpModule;
+    WebRtc_UWord32     _nextRtpTime;
+    webrtc::RtpData*   _dataCallback;
+    bool               _firstPacket;
+    float              _lossRate;
+    bool               _nackEnabled;
+    LostPackets        _lostPackets;
+    WebRtc_UWord32     _resendPacketCount;
+    WebRtc_Word32      _noLossStartup;
+    bool               _endOfFile;
+    WebRtc_UWord32     _rttMs;
+    WebRtc_Word64      _firstPacketRtpTime;
+    WebRtc_Word64      _firstPacketTimeMs;
+    RawRtpPacket*      _reorderBuffer;
+    bool               _reordering;
+    WebRtc_Word16      _nextPacket[8000];
+    WebRtc_Word32      _nextPacketLength;
+    int                _randVec[RAND_VEC_LENGTH];
+    int                _randVecPos;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_RTP_PLAYER_H_
diff --git a/src/modules/video_coding/main/test/subfigure.m b/src/modules/video_coding/main/test/subfigure.m
new file mode 100644
index 0000000..eadfcb6
--- /dev/null
+++ b/src/modules/video_coding/main/test/subfigure.m
@@ -0,0 +1,30 @@
+function H = subfigure(m, n, p)
+%
+% H = SUBFIGURE(m, n, p)
+%
+% Create a new figure window and adjust position and size such that it will
+% become the p-th tile in an m-by-n matrix of windows. (The interpretation of
+% m, n, and p is the same as for SUBPLOT.
+%
+% Henrik Lundin, 2009-01-19
+%
+
+
+h = figure;
+
+[j, i] = ind2sub([n m], p);
+scrsz = get(0,'ScreenSize'); % get screen size
+%scrsz = [1, 1, 1600, 1200];
+
+taskbarSize = 58;
+windowbarSize = 68;
+windowBorder = 4;
+
+scrsz(2) = scrsz(2) + taskbarSize;
+scrsz(4) = scrsz(4) - taskbarSize;
+
+set(h, 'position', [(j-1)/n * scrsz(3) + scrsz(1) + windowBorder,...
+        (m-i)/m * scrsz(4) + scrsz(2) + windowBorder, ...
+        scrsz(3)/n - (windowBorder + windowBorder),...
+        scrsz(4)/m - (windowbarSize + windowBorder + windowBorder)]);
+
diff --git a/src/modules/video_coding/main/test/test_callbacks.cc b/src/modules/video_coding/main/test/test_callbacks.cc
new file mode 100644
index 0000000..2f18dd1
--- /dev/null
+++ b/src/modules/video_coding/main/test/test_callbacks.cc
@@ -0,0 +1,467 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test_callbacks.h"
+
+#include <cmath>
+
+#include "modules/video_coding/main/source/tick_time_base.h"
+#include "rtp_dump.h"
+#include "test_macros.h"
+
+namespace webrtc {
+
+/******************************
+ *  VCMEncodeCompleteCallback
+ *****************************/
+// Basic callback implementation
+// passes the encoded frame directly to the encoder
+// Packetization callback implementation
+VCMEncodeCompleteCallback::VCMEncodeCompleteCallback(FILE* encodedFile):
+    _encodedFile(encodedFile),
+    _encodedBytes(0),
+    _VCMReceiver(NULL),
+    _seqNo(0),
+    _encodeComplete(false),
+    _width(0),
+    _height(0),
+    _codecType(kRTPVideoNoVideo)
+{
+    //
+}
+VCMEncodeCompleteCallback::~VCMEncodeCompleteCallback()
+{
+}
+
+void
+VCMEncodeCompleteCallback::RegisterTransportCallback(
+                                            VCMPacketizationCallback* transport)
+{
+}
+
+WebRtc_Word32
+VCMEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& fragmentationHeader,
+        const RTPVideoHeader* videoHdr)
+{
+    // will call the VCMReceiver input packet
+    _frameType = frameType;
+    // writing encodedData into file
+    if (fwrite(payloadData, 1, payloadSize, _encodedFile) !=  payloadSize) {
+      return -1;
+    }
+    WebRtcRTPHeader rtpInfo;
+    rtpInfo.header.markerBit = true; // end of frame
+    rtpInfo.type.Video.isFirstPacket = true;
+    rtpInfo.type.Video.codec = _codecType;
+    rtpInfo.type.Video.height = (WebRtc_UWord16)_height;
+    rtpInfo.type.Video.width = (WebRtc_UWord16)_width;
+    switch (_codecType)
+    {
+    case webrtc::kRTPVideoVP8:
+        rtpInfo.type.Video.codecHeader.VP8.InitRTPVideoHeaderVP8();
+        rtpInfo.type.Video.codecHeader.VP8.nonReference =
+            videoHdr->codecHeader.VP8.nonReference;
+        rtpInfo.type.Video.codecHeader.VP8.pictureId =
+            videoHdr->codecHeader.VP8.pictureId;
+        break;
+    case webrtc::kRTPVideoI420:
+        break;
+    default:
+        assert(false);
+        return -1;
+    }
+
+    rtpInfo.header.payloadType = payloadType;
+    rtpInfo.header.sequenceNumber = _seqNo++;
+    rtpInfo.header.ssrc = 0;
+    rtpInfo.header.timestamp = timeStamp;
+    rtpInfo.frameType = frameType;
+    // Size should also be received from that table, since the payload type
+    // defines the size.
+
+    _encodedBytes += payloadSize;
+    // directly to receiver
+    int ret = _VCMReceiver->IncomingPacket(payloadData, payloadSize, rtpInfo);
+    _encodeComplete = true;
+
+    return ret;
+}
+
+float
+VCMEncodeCompleteCallback::EncodedBytes()
+{
+    return _encodedBytes;
+}
+
+bool
+VCMEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+void
+VCMEncodeCompleteCallback::Initialize()
+{
+    _encodeComplete = false;
+    _encodedBytes = 0;
+    _seqNo = 0;
+    return;
+}
+
+void
+VCMEncodeCompleteCallback::ResetByteCount()
+{
+    _encodedBytes = 0;
+}
+
+/***********************************/
+/*   VCMRTPEncodeCompleteCallback  */
+/***********************************/
+// Encode Complete callback implementation
+// passes the encoded frame via the RTP module to the decoder
+// Packetization callback implementation
+
+WebRtc_Word32
+VCMRTPEncodeCompleteCallback::SendData(
+        const FrameType frameType,
+        const WebRtc_UWord8  payloadType,
+        const WebRtc_UWord32 timeStamp,
+        int64_t capture_time_ms,
+        const WebRtc_UWord8* payloadData,
+        const WebRtc_UWord32 payloadSize,
+        const RTPFragmentationHeader& fragmentationHeader,
+        const RTPVideoHeader* videoHdr)
+{
+    _frameType = frameType;
+    _encodedBytes+= payloadSize;
+    _encodeComplete = true;
+    return _RTPModule->SendOutgoingData(frameType,
+                                        payloadType,
+                                        timeStamp,
+                                        capture_time_ms,
+                                        payloadData,
+                                        payloadSize,
+                                        &fragmentationHeader,
+                                        videoHdr);
+}
+
+float
+VCMRTPEncodeCompleteCallback::EncodedBytes()
+{
+    // only good for one call  - after which will reset value;
+    float tmp = _encodedBytes;
+    _encodedBytes = 0;
+    return tmp;
+ }
+
+bool
+VCMRTPEncodeCompleteCallback::EncodeComplete()
+{
+    if (_encodeComplete)
+    {
+        _encodeComplete = false;
+        return true;
+    }
+    return false;
+}
+
+// Decoded Frame Callback Implementation
+
+WebRtc_Word32
+VCMDecodeCompleteCallback::FrameToRender(VideoFrame& videoFrame)
+{
+  if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+             _decodedFile) !=  videoFrame.Length()) {
+    return -1;
+  }
+  _decodedBytes+= videoFrame.Length();
+  return VCM_OK;
+ }
+
+WebRtc_Word32
+VCMDecodeCompleteCallback::DecodedBytes()
+{
+    return _decodedBytes;
+}
+
+RTPSendCompleteCallback::RTPSendCompleteCallback(TickTimeBase* clock,
+                                                 const char* filename):
+    _clock(clock),
+    _sendCount(0),
+    _rtp(NULL),
+    _lossPct(0),
+    _burstLength(0),
+    _networkDelayMs(0),
+    _jitterVar(0),
+    _prevLossState(0),
+    _totalSentLength(0),
+    _rtpPackets(),
+    _rtpDump(NULL)
+{
+    if (filename != NULL)
+    {
+        _rtpDump = RtpDump::CreateRtpDump();
+        _rtpDump->Start(filename);
+    }
+}
+
+RTPSendCompleteCallback::~RTPSendCompleteCallback()
+{
+    if (_rtpDump != NULL)
+    {
+        _rtpDump->Stop();
+        RtpDump::DestroyRtpDump(_rtpDump);
+    }
+    // Delete remaining packets
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        delete _rtpPackets.front();
+        _rtpPackets.pop_front();
+    }
+}
+
+int
+RTPSendCompleteCallback::SendPacket(int channel, const void *data, int len)
+{
+    _sendCount++;
+    _totalSentLength += len;
+
+    if (_rtpDump != NULL)
+    {
+        if (_rtpDump->DumpPacket((const WebRtc_UWord8*)data, len) != 0)
+        {
+            return -1;
+        }
+    }
+
+    bool transmitPacket = true;
+    transmitPacket = PacketLoss();
+
+    WebRtc_UWord64 now = _clock->MillisecondTimestamp();
+    // Insert outgoing packet into list
+    if (transmitPacket)
+    {
+        RtpPacket* newPacket = new RtpPacket();
+        memcpy(newPacket->data, data, len);
+        newPacket->length = len;
+        // Simulate receive time = network delay + packet jitter
+        // simulated as a Normal distribution random variable with
+        // mean = networkDelay and variance = jitterVar
+        WebRtc_Word32
+        simulatedDelay = (WebRtc_Word32)NormalDist(_networkDelayMs,
+                                                   sqrt(_jitterVar));
+        newPacket->receiveTime = now + simulatedDelay;
+        _rtpPackets.push_back(newPacket);
+    }
+
+    // Are we ready to send packets to the receiver?
+    RtpPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in list
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - now;
+        if (timeToReceive > 0)
+        {
+            // No available packets to send
+            break;
+        }
+
+        _rtpPackets.pop_front();
+        assert(_rtp);  // We must have a configured RTP module for this test.
+        // Send to receive side
+        if (_rtp->IncomingPacket((const WebRtc_UWord8*)packet->data,
+                                 packet->length) < 0)
+        {
+            delete packet;
+            packet = NULL;
+            // Will return an error after the first packet that goes wrong
+            return -1;
+        }
+        delete packet;
+        packet = NULL;
+    }
+    return len; // OK
+}
+
+int
+RTPSendCompleteCallback::SendRTCPPacket(int channel, const void *data, int len)
+{
+    // Incorporate network conditions
+    return SendPacket(channel, data, len);
+}
+
+void
+RTPSendCompleteCallback::SetLossPct(double lossPct)
+{
+    _lossPct = lossPct;
+    return;
+}
+
+void
+RTPSendCompleteCallback::SetBurstLength(double burstLength)
+{
+    _burstLength = burstLength;
+    return;
+}
+
+bool
+RTPSendCompleteCallback::PacketLoss()
+{
+    bool transmitPacket = true;
+    if (_burstLength <= 1.0)
+    {
+        // Random loss: if _burstLength parameter is not set, or <=1
+        if (UnifomLoss(_lossPct))
+        {
+            // drop
+            transmitPacket = false;
+        }
+    }
+    else
+    {
+        // Simulate bursty channel (Gilbert model)
+        // (1st order) Markov chain model with memory of the previous/last
+        // packet state (loss or received)
+
+        // 0 = received state
+        // 1 = loss state
+
+        // probTrans10: if previous packet is lost, prob. to -> received state
+        // probTrans11: if previous packet is lost, prob. to -> loss state
+
+        // probTrans01: if previous packet is received, prob. to -> loss state
+        // probTrans00: if previous packet is received, prob. to -> received
+
+        // Map the two channel parameters (average loss rate and burst length)
+        // to the transition probabilities:
+        double probTrans10 = 100 * (1.0 / _burstLength);
+        double probTrans11 = (100.0 - probTrans10);
+        double probTrans01 = (probTrans10 * ( _lossPct / (100.0 - _lossPct)));
+
+        // Note: Random loss (Bernoulli) model is a special case where:
+        // burstLength = 100.0 / (100.0 - _lossPct) (i.e., p10 + p01 = 100)
+
+        if (_prevLossState == 0 )
+        {
+            // previous packet was received
+            if (UnifomLoss(probTrans01))
+            {
+                // drop, update previous state to loss
+                _prevLossState = 1;
+                transmitPacket = false;
+            }
+        }
+        else if (_prevLossState == 1)
+        {
+            _prevLossState = 0;
+            // previous packet was lost
+            if (UnifomLoss(probTrans11))
+            {
+                // drop, update previous state to loss
+                _prevLossState = 1;
+                transmitPacket = false;
+             }
+        }
+    }
+    return transmitPacket;
+}
+
+
+bool
+RTPSendCompleteCallback::UnifomLoss(double lossPct)
+{
+    double randVal = (std::rand() + 1.0)/(RAND_MAX + 1.0);
+    return randVal < lossPct/100;
+}
+
+WebRtc_Word32
+PacketRequester::ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+                               WebRtc_UWord16 length)
+{
+    return _rtp.SendNACK(sequenceNumbers, length);
+}
+
+WebRtc_Word32
+SendStatsTest::SendStatistics(const WebRtc_UWord32 bitRate,
+                              const WebRtc_UWord32 frameRate)
+{
+    TEST(frameRate <= _frameRate);
+    TEST(bitRate > 0 && bitRate < 100000);
+    printf("VCM 1 sec: Bit rate: %u\tFrame rate: %u\n", bitRate, frameRate);
+    return 0;
+}
+
+WebRtc_Word32 KeyFrameReqTest::RequestKeyFrame() {
+  printf("Key frame requested\n");
+  return 0;
+}
+
+
+VideoProtectionCallback::VideoProtectionCallback():
+delta_fec_params_(),
+key_fec_params_()
+{
+    memset(&delta_fec_params_, 0, sizeof(delta_fec_params_));
+    memset(&key_fec_params_, 0, sizeof(key_fec_params_));
+}
+
+VideoProtectionCallback::~VideoProtectionCallback()
+{
+    //
+}
+
+WebRtc_Word32
+VideoProtectionCallback::ProtectionRequest(
+    const FecProtectionParams* delta_fec_params,
+    const FecProtectionParams* key_fec_params,
+    WebRtc_UWord32* sent_video_rate_bps,
+    WebRtc_UWord32* sent_nack_rate_bps,
+    WebRtc_UWord32* sent_fec_rate_bps)
+{
+    key_fec_params_ = *key_fec_params;
+    delta_fec_params_ = *delta_fec_params;
+
+    // Update RTP
+    if (_rtp->SetFecParameters(&delta_fec_params_,
+                               &key_fec_params_) != 0)
+    {
+        printf("Error in Setting FEC rate\n");
+        return -1;
+
+    }
+    return 0;
+
+}
+
+FecProtectionParams VideoProtectionCallback::DeltaFecParameters() const
+{
+    return delta_fec_params_;
+}
+
+FecProtectionParams VideoProtectionCallback::KeyFecParameters() const
+{
+    return key_fec_params_;
+}
+}  // namespace webrtc
diff --git a/src/modules/video_coding/main/test/test_callbacks.h b/src/modules/video_coding/main/test/test_callbacks.h
new file mode 100644
index 0000000..6731f8c
--- /dev/null
+++ b/src/modules/video_coding/main/test/test_callbacks.h
@@ -0,0 +1,256 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_CALLBACKS_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_CALLBACKS_H_
+
+/*
+ * Declaration of general callbacks that are used throughout VCM's offline tests
+ */
+
+
+#include <string.h>
+#include <cstdlib>
+#include <fstream>
+#include <list>
+
+#include "module_common_types.h"
+#include "rtp_rtcp.h"
+#include "test_util.h"
+#include "trace.h"
+#include "video_coding.h"
+
+namespace webrtc
+{
+class RtpDump;
+
+// Send Side - Packetization callback - send an encoded frame to the VCMReceiver
+class VCMEncodeCompleteCallback: public VCMPacketizationCallback
+{
+public:
+    // Constructor input: file in which encoded data will be written
+    VCMEncodeCompleteCallback(FILE* encodedFile);
+    virtual ~VCMEncodeCompleteCallback();
+    // Register transport callback
+    void RegisterTransportCallback(VCMPacketizationCallback* transport);
+    // Process encoded data received from the encoder, pass stream to the
+    // VCMReceiver module
+    WebRtc_Word32 SendData(const FrameType frameType,
+                           const WebRtc_UWord8 payloadType,
+                           const WebRtc_UWord32 timeStamp,
+                           int64_t capture_time_ms,
+                           const WebRtc_UWord8* payloadData,
+                           const WebRtc_UWord32 payloadSize,
+                           const RTPFragmentationHeader& fragmentationHeader,
+                           const RTPVideoHeader* videoHdr);
+    // Register exisitng VCM. Currently - encode and decode under same module.
+    void RegisterReceiverVCM(VideoCodingModule *vcm) {_VCMReceiver = vcm;}
+    // Return size of last encoded frame data (all frames in the sequence)
+    // Good for only one call - after which will reset value
+    // (to allow detection of frame drop)
+    float EncodedBytes();
+    // Return encode complete (true/false)
+    bool EncodeComplete();
+    // Inform callback of codec used
+    void SetCodecType(RTPVideoCodecTypes codecType)
+    {_codecType = codecType;}
+    // Inform callback of frame dimensions
+    void SetFrameDimensions(WebRtc_Word32 width, WebRtc_Word32 height)
+    {
+        _width = width;
+        _height = height;
+    }
+    // Initialize callback data
+    void Initialize();
+    void ResetByteCount();
+
+    // Conversion function for payload type (needed for the callback function)
+
+private:
+    FILE*              _encodedFile;
+    float              _encodedBytes;
+    VideoCodingModule* _VCMReceiver;
+    FrameType          _frameType;
+    WebRtc_UWord16     _seqNo;
+    bool               _encodeComplete;
+    WebRtc_Word32      _width;
+    WebRtc_Word32      _height;
+    RTPVideoCodecTypes _codecType;
+
+}; // end of VCMEncodeCompleteCallback
+
+// Send Side - Packetization callback - packetize an encoded frame via the
+// RTP module
+class VCMRTPEncodeCompleteCallback: public VCMPacketizationCallback
+{
+public:
+    VCMRTPEncodeCompleteCallback(RtpRtcp* rtp) :
+        _encodedBytes(0),
+        _encodeComplete(false),
+        _RTPModule(rtp) {}
+
+    virtual ~VCMRTPEncodeCompleteCallback() {}
+    // Process encoded data received from the encoder, pass stream to the
+    // RTP module
+    WebRtc_Word32 SendData(const FrameType frameType,
+                           const WebRtc_UWord8 payloadType,
+                           const WebRtc_UWord32 timeStamp,
+                           int64_t capture_time_ms,
+                           const WebRtc_UWord8* payloadData,
+                           const WebRtc_UWord32 payloadSize,
+                           const RTPFragmentationHeader& fragmentationHeader,
+                           const RTPVideoHeader* videoHdr);
+    // Return size of last encoded frame. Value good for one call
+    // (resets to zero after call to inform test of frame drop)
+    float EncodedBytes();
+    // Return encode complete (true/false)
+    bool EncodeComplete();
+    // Inform callback of codec used
+    void SetCodecType(RTPVideoCodecTypes codecType)
+    {_codecType = codecType;}
+
+    // Inform callback of frame dimensions
+    void SetFrameDimensions(WebRtc_Word16 width, WebRtc_Word16 height)
+    {
+        _width = width;
+        _height = height;
+    }
+
+private:
+    float              _encodedBytes;
+    FrameType          _frameType;
+    bool               _encodeComplete;
+    RtpRtcp*           _RTPModule;
+    WebRtc_Word16      _width;
+    WebRtc_Word16      _height;
+    RTPVideoCodecTypes _codecType;
+}; // end of VCMEncodeCompleteCallback
+
+// Decode Complete callback
+// Writes the decoded frames to a given file.
+class VCMDecodeCompleteCallback: public VCMReceiveCallback
+{
+public:
+    VCMDecodeCompleteCallback(FILE* decodedFile) :
+        _decodedFile(decodedFile), _decodedBytes(0) {}
+    virtual ~VCMDecodeCompleteCallback() {}
+    // Write decoded frame into file
+    WebRtc_Word32 FrameToRender(webrtc::VideoFrame& videoFrame);
+    WebRtc_Word32 DecodedBytes();
+private:
+    FILE*               _decodedFile;
+    WebRtc_UWord32      _decodedBytes;
+}; // end of VCMDecodeCompleCallback class
+
+// Transport callback
+// Called by the RTP Sender - simulates sending packets through a network to the
+// RTP receiver. User can set network conditions as: RTT, packet loss,
+// burst length and jitter.
+class RTPSendCompleteCallback: public Transport
+{
+public:
+    // Constructor input: (receive side) rtp module to send encoded data to
+    RTPSendCompleteCallback(TickTimeBase* clock,
+                            const char* filename = NULL);
+    virtual ~RTPSendCompleteCallback();
+
+    void SetRtpModule(RtpRtcp* rtp_module) { _rtp = rtp_module; }
+    // Send Packet to receive side RTP module
+    virtual int SendPacket(int channel, const void *data, int len);
+    // Send RTCP Packet to receive side RTP module
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+    // Set percentage of channel loss in the network
+    void SetLossPct(double lossPct);
+    // Set average size of burst loss
+    void SetBurstLength(double burstLength);
+    // Set network delay in the network
+    void SetNetworkDelay(WebRtc_UWord32 networkDelayMs)
+                        {_networkDelayMs = networkDelayMs;};
+    // Set Packet jitter delay
+    void SetJitterVar(WebRtc_UWord32 jitterVar)
+                      {_jitterVar = jitterVar;};
+    // Return send count
+    int SendCount() {return _sendCount; }
+    // Return accumulated length in bytes of transmitted packets
+    WebRtc_UWord32 TotalSentLength() {return _totalSentLength;}
+protected:
+    // Randomly decide whether to drop packets, based on the channel model
+    bool PacketLoss();
+    // Random uniform loss model
+    bool UnifomLoss(double lossPct);
+
+    TickTimeBase*           _clock;
+    WebRtc_UWord32          _sendCount;
+    RtpRtcp*                _rtp;
+    double                  _lossPct;
+    double                  _burstLength;
+    WebRtc_UWord32          _networkDelayMs;
+    double                  _jitterVar;
+    bool                    _prevLossState;
+    WebRtc_UWord32          _totalSentLength;
+    std::list<RtpPacket*>   _rtpPackets;
+    RtpDump*                _rtpDump;
+};
+
+// Request re-transmission of packets (NACK)
+class PacketRequester: public VCMPacketRequestCallback
+{
+public:
+    PacketRequester(RtpRtcp& rtp) :
+        _rtp(rtp) {}
+    WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequenceNumbers,
+            WebRtc_UWord16 length);
+private:
+    webrtc::RtpRtcp& _rtp;
+};
+
+// Key frame request
+class KeyFrameReqTest: public VCMFrameTypeCallback
+{
+public:
+    WebRtc_Word32 RequestKeyFrame();
+};
+
+
+// VCM statistics
+class SendStatsTest: public webrtc::VCMSendStatisticsCallback
+{
+public:
+    SendStatsTest() : _frameRate(15) {}
+    WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bitRate,
+            const WebRtc_UWord32 frameRate);
+    void SetTargetFrameRate(WebRtc_UWord32 frameRate) {_frameRate = frameRate;}
+private:
+    WebRtc_UWord32 _frameRate;
+};
+
+// Protection callback - allows the VCM (media optimization) to inform the RTP
+// module of the required protection(FEC rates/settings and NACK mode).
+class VideoProtectionCallback: public VCMProtectionCallback
+{
+public:
+    VideoProtectionCallback();
+    virtual ~VideoProtectionCallback();
+    void RegisterRtpModule(RtpRtcp* rtp) {_rtp = rtp;}
+    WebRtc_Word32 ProtectionRequest(
+        const FecProtectionParams* delta_fec_params,
+        const FecProtectionParams* key_fec_params,
+        WebRtc_UWord32* sent_video_rate_bps,
+        WebRtc_UWord32* sent_nack_rate_bps,
+        WebRtc_UWord32* sent_fec_rate_bps);
+    FecProtectionParams DeltaFecParameters() const;
+    FecProtectionParams KeyFecParameters() const;
+private:
+    RtpRtcp* _rtp;
+    FecProtectionParams delta_fec_params_;
+    FecProtectionParams key_fec_params_;
+};
+}  // namespace webrtc
+#endif
diff --git a/src/modules/video_coding/main/test/test_macros.h b/src/modules/video_coding/main/test/test_macros.h
new file mode 100644
index 0000000..31693b5
--- /dev/null
+++ b/src/modules/video_coding/main/test/test_macros.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VCM_TEST_MACROS_H
+#define VCM_TEST_MACROS_H
+
+#include <cstdio>
+#include <cstdlib>
+
+extern int vcmMacrosTests;
+extern int vcmMacrosErrors;
+
+#define PRINT_ERR_MSG(msg)                              \
+    do {                                                \
+        fprintf(stderr, "Error at line %i of %s\n%s",   \
+            __LINE__, __FILE__, msg);                   \
+    } while(0)
+
+#define TEST(expr)                                              \
+    do {                                                        \
+        vcmMacrosTests++;                                       \
+        if (!(expr)) {                                          \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\n\n");   \
+            vcmMacrosErrors++;                                  \
+        }                                                       \
+    } while(0)
+
+#define TEST_EXIT_ON_FAIL(expr)                                             \
+    do {                                                                    \
+        vcmMacrosTests++;                                                   \
+        if (!(expr)) {                                                      \
+            PRINT_ERR_MSG("Assertion failed: " #expr "\nExiting...\n\n");   \
+            vcmMacrosErrors++;                                              \
+            exit(EXIT_FAILURE);                                             \
+        }                                                                   \
+    } while(0)
+
+#endif
diff --git a/src/modules/video_coding/main/test/test_util.cc b/src/modules/video_coding/main/test/test_util.cc
new file mode 100644
index 0000000..1f89168
--- /dev/null
+++ b/src/modules/video_coding/main/test/test_util.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test_util.h"
+#include "test_macros.h"
+#include "rtp_dump.h"
+#include <cmath>
+
+using namespace webrtc;
+
+// Normal Distribution
+#define PI  3.14159265
+double
+NormalDist(double mean, double stdDev)
+{
+    // Creating a Normal distribution variable from two independent uniform
+    // variables based on the Box-Muller transform
+    double uniform1 = (std::rand() + 1.0) / (RAND_MAX + 1.0);
+    double uniform2 = (std::rand() + 1.0) / (RAND_MAX + 1.0);
+    return (mean + stdDev * sqrt(-2 * log(uniform1)) * cos(2 * PI * uniform2));
+}
+
+RTPVideoCodecTypes
+ConvertCodecType(const char* plname)
+{
+    if (strncmp(plname,"VP8" , 3) == 0)
+    {
+        return kRTPVideoVP8;
+    }
+    else if (strncmp(plname,"I420" , 5) == 0)
+    {
+        return kRTPVideoI420;
+    }
+    else
+    {
+        return kRTPVideoNoVideo; // Default value
+    }
+}
+
diff --git a/src/modules/video_coding/main/test/test_util.h b/src/modules/video_coding/main/test/test_util.h
new file mode 100644
index 0000000..d705434
--- /dev/null
+++ b/src/modules/video_coding/main/test/test_util.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_TEST_UTIL_H_
+
+/*
+ * General declarations used through out VCM offline tests.
+ */
+
+#include <string.h>
+#include <fstream>
+#include <cstdlib>
+
+#include "module_common_types.h"
+#include "testsupport/fileutils.h"
+
+// Class used for passing command line arguments to tests
+class CmdArgs
+{
+ public:
+  CmdArgs()
+      : codecName("VP8"),
+        codecType(webrtc::kVideoCodecVP8),
+        width(352),
+        height(288),
+        bitRate(500),
+        frameRate(30),
+        packetLoss(0),
+        rtt(0),
+        protectionMode(0),
+        camaEnable(0),
+        inputFile(webrtc::test::ProjectRootPath() +
+                  "/resources/foreman_cif.yuv"),
+        outputFile(webrtc::test::OutputPath() +
+                   "video_coding_test_output_352x288.yuv"),
+        testNum(11) {}
+     std::string codecName;
+     webrtc::VideoCodecType codecType;
+     int width;
+     int height;
+     int bitRate;
+     int frameRate;
+     int packetLoss;
+     int rtt;
+     int protectionMode;
+     int camaEnable;
+     std::string inputFile;
+     std::string outputFile;
+     int testNum;
+};
+
+// forward declaration
+int MTRxTxTest(CmdArgs& args);
+double NormalDist(double mean, double stdDev);
+
+struct RtpPacket {
+  WebRtc_Word8 data[1650]; // max packet size
+  WebRtc_Word32 length;
+  WebRtc_Word64 receiveTime;
+};
+
+
+// Codec type conversion
+webrtc::RTPVideoCodecTypes
+ConvertCodecType(const char* plname);
+
+#endif
diff --git a/src/modules/video_coding/main/test/tester_main.cc b/src/modules/video_coding/main/test/tester_main.cc
new file mode 100644
index 0000000..e5d7cd3
--- /dev/null
+++ b/src/modules/video_coding/main/test/tester_main.cc
@@ -0,0 +1,208 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "normal_test.h"
+#include "codec_database_test.h"
+#include "generic_codec_test.h"
+#include "../source/event.h"
+#include "media_opt_test.h"
+#include "quality_modes_test.h"
+#include "test_util.h"
+
+#include <stdlib.h>
+#include <string.h>
+
+#ifdef _WIN32
+//#include "vld.h"
+#endif
+
+using namespace webrtc;
+
+/*
+ * Build with EVENT_DEBUG defined
+ * to build the tests with simulated events.
+ */
+
+int vcmMacrosTests = 0;
+int vcmMacrosErrors = 0;
+
+int ParseArguments(int argc, char **argv, CmdArgs& args)
+{
+    int i = 1;
+    while (i < argc)
+    {
+        if (argv[i][0] != '-')
+        {
+            return -1;
+        }
+        switch (argv[i][1])
+        {
+        case 'w':
+        {
+            int w = atoi(argv[i+1]);
+            if (w < 1)
+                return -1;
+            args.width = w;
+            break;
+        }
+        case 'h':
+        {
+            int h = atoi(argv[i+1]);
+            if (h < 1)
+                return -1;
+            args.height = h;
+            break;
+        }
+        case 'b':
+        {
+            int b = atoi(argv[i+1]);
+            if (b < 1)
+                return -1;
+            args.bitRate = b;
+            break;
+        }
+        case 'f':
+        {
+            int f = atoi(argv[i+1]);
+            if (f < 1)
+                return -1;
+            args.frameRate = f;
+            break;
+        }
+        case 'c':
+        {
+            // TODO(holmer): This should be replaced with a map if more codecs
+            // are added
+            args.codecName = argv[i+1];
+            if (strncmp(argv[i+1], "VP8", 3) == 0)
+            {
+                args.codecType = kVideoCodecVP8;
+            }
+            else if (strncmp(argv[i+1], "I420", 4) == 0)
+            {
+                args.codecType = kVideoCodecI420;
+            }
+            else
+                return -1;
+
+            break;
+        }
+        case 'i':
+        {
+            args.inputFile = argv[i+1];
+            break;
+        }
+        case 'o':
+            args.outputFile = argv[i+1];
+            break;
+        case 'n':
+        {
+            int n = atoi(argv[i+1]);
+            if (n < 1)
+                return -1;
+            args.testNum = n;
+            break;
+        }
+        case 'p':
+        {
+            args.packetLoss = atoi(argv[i+1]);
+            break;
+        }
+        case 'r':
+        {
+            args.rtt = atoi(argv[i+1]);
+            break;
+        }
+        case 'm':
+        {
+            args.protectionMode = atoi(argv[i+1]);
+            break;
+        }
+        case 'e':
+        {
+            args.camaEnable = atoi(argv[i+1]);
+            break;
+        }
+        default:
+            return -1;
+        }
+        i += 2;
+    }
+    return 0;
+}
+
+int main(int argc, char **argv)
+{
+    CmdArgs args;
+
+    if (ParseArguments(argc, argv, args) != 0)
+    {
+        printf("Unable to parse input arguments\n");
+        printf("args: -n <test #> -w <width> -h <height> -f <fps> -b <bps> "
+               "-c <codec>  -i <input file> -o <output file> -p <packet loss> "
+               "-r <round-trip-time> -e <cama enable> -m <protection mode> \n");
+        return -1;
+    }
+
+    int ret = 0;
+    switch (args.testNum)
+    {
+    case 1:
+        ret = NormalTest::RunTest(args);
+        break;
+    case 2:
+        ret = MTRxTxTest(args);
+        break;
+    case 3:
+        ret = GenericCodecTest::RunTest(args);
+        break;
+    case 4:
+        ret = CodecDataBaseTest::RunTest(args);
+        break;
+    case 5:
+        // 0- normal, 1-Release test(50 runs) 2- from file
+        ret = MediaOptTest::RunTest(0, args);
+        break;
+    case 6:
+        ret = ReceiverTimingTests(args);
+        break;
+    case 7:
+        ret = RtpPlay(args);
+        break;
+    case 8:
+        ret = RtpPlayMT(args);
+        break;
+    case 9:
+        ret = JitterBufferTest(args);
+        break;
+    case 10:
+        ret = DecodeFromStorageTest(args);
+        break;
+    case 11:
+        ret = NormalTest::RunTest(args);
+        ret |= CodecDataBaseTest::RunTest(args);
+        ret |= ReceiverTimingTests(args);
+        ret |= JitterBufferTest(args);
+        break;
+    default:
+        ret = -1;
+        break;
+    }
+    if (ret != 0)
+    {
+        printf("Test failed!\n");
+        return -1;
+    }
+    return 0;
+}
+
+
+
diff --git a/src/modules/video_coding/main/test/video_rtp_play.cc b/src/modules/video_coding/main/test/video_rtp_play.cc
new file mode 100644
index 0000000..d07711b
--- /dev/null
+++ b/src/modules/video_coding/main/test/video_rtp_play.cc
@@ -0,0 +1,206 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "../source/event.h"
+#include "../source/internal_defines.h"
+#include "test_macros.h"
+#include "rtp_player.h"
+#include "modules/video_coding/main/source/mock/fake_tick_time.h"
+
+#include <stdio.h>
+#include <string.h>
+
+using namespace webrtc;
+
+WebRtc_Word32
+RtpDataCallback::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                          const WebRtc_UWord16 payloadSize,
+                                          const WebRtcRTPHeader* rtpHeader)
+{
+    return _vcm->IncomingPacket(payloadData, payloadSize, *rtpHeader);
+}
+
+FrameReceiveCallback::~FrameReceiveCallback()
+{
+    if (_timingFile != NULL)
+    {
+        fclose(_timingFile);
+    }
+    if (_outFile != NULL)
+    {
+        fclose(_outFile);
+    }
+}
+
+WebRtc_Word32
+FrameReceiveCallback::FrameToRender(VideoFrame& videoFrame)
+{
+    if (_timingFile == NULL)
+    {
+        _timingFile = fopen((test::OutputPath() + "renderTiming.txt").c_str(),
+                            "w");
+        if (_timingFile == NULL)
+        {
+            return -1;
+        }
+    }
+    if (_outFile == NULL)
+    {
+        _outFile = fopen(_outFilename.c_str(), "wb");
+        if (_outFile == NULL)
+        {
+            return -1;
+        }
+    }
+    fprintf(_timingFile, "%u, %u\n",
+            videoFrame.TimeStamp(),
+            MaskWord64ToUWord32(videoFrame.RenderTimeMs()));
+    if (fwrite(videoFrame.Buffer(), 1, videoFrame.Length(),
+               _outFile) !=  videoFrame.Length()) {
+      return -1;
+    }
+    return 0;
+}
+
+int RtpPlay(CmdArgs& args)
+{
+    // Make sure this test isn't executed without simulated events.
+#if !defined(EVENT_DEBUG)
+    return -1;
+#endif
+    // BEGIN Settings
+
+    bool protectionEnabled = false;
+    VCMVideoProtection protectionMethod = kProtectionNack;
+    WebRtc_UWord32 rttMS = 0;
+    float lossRate = 0.0f;
+    bool reordering = false;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = -1;
+    std::string outFile = args.outputFile;
+    if (outFile == "")
+        outFile = test::OutputPath() + "RtpPlay_decoded.yuv";
+    FrameReceiveCallback receiveCallback(outFile);
+    FakeTickTime clock(0);
+    VideoCodingModule* vcm = VideoCodingModule::Create(1, &clock);
+    RtpDataCallback dataCallback(vcm);
+    RTPPlayer rtpStream(args.inputFile.c_str(), &dataCallback, &clock);
+
+
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+
+    Trace::CreateTrace();
+    Trace::SetTraceFile((test::OutputPath() + "receiverTestTrace.txt").c_str());
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+    // END Settings
+
+    // Set up
+
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+    vcm->RegisterReceiveCallback(&receiveCallback);
+    vcm->RegisterPacketRequestCallback(&rtpStream);
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            if (VideoCodingModule::Codec(payloadType->codecType, &codec) < 0)
+            {
+                return -1;
+            }
+            codec.plType = payloadType->payloadType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    bool nackEnabled = protectionEnabled &&
+        (protectionMethod == kProtectionNack ||
+         protectionMethod == kProtectionDualDecoder);
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+    rtpStream.SetReordering(reordering);
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protectionMethod, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    ret = 0;
+
+    // RTP stream main loop
+    while ((ret = rtpStream.NextPacket(clock.MillisecondTimestamp())) == 0)
+    {
+        if (clock.MillisecondTimestamp() % 5 == 0)
+        {
+            ret = vcm->Decode();
+            if (ret < 0)
+            {
+                return -1;
+            }
+        }
+        while (vcm->DecodeDualFrame(0) == 1) {
+        }
+        if (vcm->TimeUntilNextProcess() <= 0)
+        {
+            vcm->Process();
+        }
+        if (MAX_RUNTIME_MS > -1 && clock.MillisecondTimestamp() >=
+            MAX_RUNTIME_MS)
+        {
+            break;
+        }
+        clock.IncrementDebugClock(1);
+    }
+
+    switch (ret)
+    {
+    case 1:
+        printf("Success\n");
+        break;
+    case -1:
+        printf("Failed\n");
+        break;
+    case 0:
+        printf("Timeout\n");
+        break;
+    }
+
+    rtpStream.Print();
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    delete vcm;
+    vcm = NULL;
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/src/modules/video_coding/main/test/video_rtp_play_mt.cc b/src/modules/video_coding/main/test/video_rtp_play_mt.cc
new file mode 100644
index 0000000..ecb7ab6
--- /dev/null
+++ b/src/modules/video_coding/main/test/video_rtp_play_mt.cc
@@ -0,0 +1,262 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "receiver_tests.h"
+#include "video_coding.h"
+#include "rtp_rtcp.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "../source/event.h"
+#include "test_macros.h"
+#include "rtp_player.h"
+
+#include <string.h>
+
+using namespace webrtc;
+
+bool ProcessingThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    if (state->_vcm.TimeUntilNextProcess() <= 0)
+    {
+        if (state->_vcm.Process() < 0)
+        {
+            return false;
+        }
+    }
+    return true;
+}
+
+bool RtpReaderThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    EventWrapper& waitEvent = *EventWrapper::Create();
+    // RTP stream main loop
+    TickTimeBase clock;
+    if (state->_rtpPlayer.NextPacket(clock.MillisecondTimestamp()) < 0)
+    {
+        return false;
+    }
+    waitEvent.Wait(state->_rtpPlayer.TimeUntilNextPacket());
+    delete &waitEvent;
+    return true;
+}
+
+bool DecodeThread(void* obj)
+{
+    SharedState* state = static_cast<SharedState*>(obj);
+    state->_vcm.Decode(10000);
+    while (state->_vcm.DecodeDualFrame(0) == 1) {
+    }
+    return true;
+}
+
+int RtpPlayMT(CmdArgs& args, int releaseTestNo, webrtc::VideoCodecType releaseTestVideoType)
+{
+    // Don't run these tests with debug events.
+#if defined(EVENT_DEBUG)
+    return -1;
+#endif
+
+    // BEGIN Settings
+
+    bool protectionEnabled = true;
+    VCMVideoProtection protection = kProtectionDualDecoder;
+    WebRtc_UWord8 rttMS = 50;
+    float lossRate = 0.05f;
+    WebRtc_UWord32 renderDelayMs = 0;
+    WebRtc_UWord32 minPlayoutDelayMs = 0;
+    const WebRtc_Word64 MAX_RUNTIME_MS = 10000;
+    std::string outFilename = args.outputFile;
+    if (outFilename == "")
+        outFilename = test::OutputPath() + "RtpPlayMT_decoded.yuv";
+
+    bool nackEnabled = (protectionEnabled &&
+                (protection == kProtectionDualDecoder ||
+                protection == kProtectionNack ||
+                kProtectionNackFEC));
+    TickTimeBase clock;
+    VideoCodingModule* vcm =
+            VideoCodingModule::Create(1, &clock);
+    RtpDataCallback dataCallback(vcm);
+    std::string rtpFilename;
+    rtpFilename = args.inputFile;
+    if (releaseTestNo > 0)
+    {
+        // Setup a release test
+        switch (releaseTestVideoType)
+        {
+        case webrtc::kVideoCodecVP8:
+            rtpFilename = args.inputFile;
+            outFilename = test::OutputPath() + "MTReceiveTest_VP8";
+            break;
+        default:
+            return -1;
+        }
+        switch (releaseTestNo)
+        {
+        case 1:
+            // Normal execution
+            protectionEnabled = false;
+            nackEnabled = false;
+            rttMS = 0;
+            lossRate = 0.0f;
+            outFilename += "_Normal.yuv";
+            break;
+        case 2:
+            // Packet loss
+            protectionEnabled = false;
+            nackEnabled = false;
+            rttMS = 0;
+            lossRate = 0.05f;
+            outFilename += "_0.05.yuv";
+            break;
+        case 3:
+            // Packet loss and NACK
+            protection = kProtectionNack;
+            nackEnabled = true;
+            protectionEnabled = true;
+            rttMS = 100;
+            lossRate = 0.05f;
+            outFilename += "_0.05_NACK_100ms.yuv";
+            break;
+        case 4:
+            // Packet loss and dual decoder
+            // Not implemented
+            return 0;
+            break;
+        default:
+            return -1;
+        }
+        printf("Watch %s to verify that the output is reasonable\n", outFilename.c_str());
+    }
+    RTPPlayer rtpStream(rtpFilename.c_str(), &dataCallback, &clock);
+    PayloadTypeList payloadTypes;
+    payloadTypes.push_front(new PayloadCodecTuple(VCM_VP8_PAYLOAD_TYPE, "VP8",
+                                                  kVideoCodecVP8));
+    Trace::CreateTrace();
+    Trace::SetTraceFile("receiverTestTrace.txt");
+    Trace::SetLevelFilter(webrtc::kTraceAll);
+
+    // END Settings
+
+    // Set up
+
+    SharedState mtState(*vcm, rtpStream);
+
+    if (rtpStream.Initialize(&payloadTypes) < 0)
+    {
+        return -1;
+    }
+    rtpStream.SimulatePacketLoss(lossRate, nackEnabled, rttMS);
+
+    WebRtc_Word32 ret = vcm->InitializeReceiver();
+    if (ret < 0)
+    {
+        return -1;
+    }
+
+    // Create and start all threads
+    ThreadWrapper* processingThread = ThreadWrapper::CreateThread(ProcessingThread,
+            &mtState, kNormalPriority, "ProcessingThread");
+    ThreadWrapper* rtpReaderThread = ThreadWrapper::CreateThread(RtpReaderThread,
+            &mtState, kNormalPriority, "RtpReaderThread");
+    ThreadWrapper* decodeThread = ThreadWrapper::CreateThread(DecodeThread,
+            &mtState, kNormalPriority, "DecodeThread");
+
+    // Register receive codecs in VCM
+    for (PayloadTypeList::iterator it = payloadTypes.begin();
+        it != payloadTypes.end(); ++it) {
+        PayloadCodecTuple* payloadType = *it;
+        if (payloadType != NULL)
+        {
+            VideoCodec codec;
+            VideoCodingModule::Codec(payloadType->codecType, &codec);
+            codec.plType = payloadType->payloadType;
+            if (vcm->RegisterReceiveCodec(&codec, 1) < 0)
+            {
+                return -1;
+            }
+        }
+    }
+
+    if (processingThread != NULL)
+    {
+        unsigned int tid;
+        processingThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start processing thread\n");
+        return -1;
+    }
+    if (rtpReaderThread != NULL)
+    {
+        unsigned int tid;
+        rtpReaderThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start RTP reader thread\n");
+        return -1;
+    }
+    if (decodeThread != NULL)
+    {
+        unsigned int tid;
+        decodeThread->Start(tid);
+    }
+    else
+    {
+        printf("Unable to start decode thread\n");
+        return -1;
+    }
+
+    FrameReceiveCallback receiveCallback(outFilename);
+    vcm->RegisterReceiveCallback(&receiveCallback);
+    vcm->RegisterPacketRequestCallback(&rtpStream);
+
+    vcm->SetChannelParameters(0, 0, rttMS);
+    vcm->SetVideoProtection(protection, protectionEnabled);
+    vcm->SetRenderDelay(renderDelayMs);
+    vcm->SetMinimumPlayoutDelay(minPlayoutDelayMs);
+
+    EventWrapper& waitEvent = *EventWrapper::Create();
+
+    // Decode for 10 seconds and then tear down and exit.
+    waitEvent.Wait(MAX_RUNTIME_MS);
+
+    // Tear down
+    while (!payloadTypes.empty())
+    {
+        delete payloadTypes.front();
+        payloadTypes.pop_front();
+    }
+    while (!processingThread->Stop())
+    {
+        ;
+    }
+    while (!rtpReaderThread->Stop())
+    {
+        ;
+    }
+    while (!decodeThread->Stop())
+    {
+        ;
+    }
+    VideoCodingModule::Destroy(vcm);
+    vcm = NULL;
+    delete &waitEvent;
+    delete processingThread;
+    delete decodeThread;
+    delete rtpReaderThread;
+    rtpStream.Print();
+    Trace::ReturnTrace();
+    return 0;
+}
diff --git a/src/modules/video_coding/main/test/video_source.cc b/src/modules/video_coding/main/test/video_source.cc
new file mode 100644
index 0000000..d7ba0b9
--- /dev/null
+++ b/src/modules/video_coding/main/test/video_source.cc
@@ -0,0 +1,202 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_source.h"
+
+#include <cassert>
+
+#include "testsupport/fileutils.h"
+
+VideoSource::VideoSource()
+:
+_fileName(webrtc::test::ProjectRootPath() + "resources/foreman_cif.yuv"),
+_width(352),
+_height(288),
+_type(webrtc::kI420),
+_frameRate(30)
+{
+   //
+}
+
+VideoSource::VideoSource(std::string fileName, VideoSize size,
+    float frameRate, webrtc::VideoType type /*= webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(0),
+_height(0),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(size != kUndefined && size != kNumberOfVideoSizes);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+    GetWidthHeight(size);
+}
+
+VideoSource::VideoSource(std::string fileName, WebRtc_UWord16 width, WebRtc_UWord16 height,
+    float frameRate /*= 30*/, webrtc::VideoType type /*= webrtc::kI420*/)
+:
+_fileName(fileName),
+_width(width),
+_height(height),
+_type(type),
+_frameRate(frameRate)
+{
+    assert(width > 0);
+    assert(height > 0);
+    assert(type != webrtc::kUnknown);
+    assert(frameRate > 0);
+}
+
+WebRtc_Word32
+VideoSource::GetFrameLength() const
+{
+    return webrtc::CalcBufferSize(_type, _width, _height);
+}
+
+std::string
+VideoSource::GetName() const
+{
+    // Remove path.
+    size_t slashPos = _fileName.find_last_of("/\\");
+    if (slashPos == std::string::npos)
+    {
+        slashPos = 0;
+    }
+    else
+    {
+        slashPos++;
+    }
+
+    // Remove extension and underscored suffix if it exists.
+    //return _fileName.substr(slashPos, std::min(_fileName.find_last_of("_"),
+    //    _fileName.find_last_of(".")) - slashPos);
+    // MS: Removing suffix, not underscore....keeping full file name
+    return _fileName.substr(slashPos, _fileName.find_last_of(".") - slashPos);
+
+}
+
+int
+VideoSource::GetWidthHeight( VideoSize size)
+{
+    switch(size)
+    {
+    case kSQCIF:
+        _width = 128;
+        _height = 96;
+        return 0;
+    case kQQVGA:
+        _width = 160;
+        _height = 120;
+        return 0;
+    case kQCIF:
+        _width = 176;
+        _height = 144;
+        return 0;
+    case kCGA:
+        _width = 320;
+        _height = 200;
+        return 0;
+    case kQVGA:
+        _width = 320;
+        _height = 240;
+        return 0;
+    case kSIF:
+        _width = 352;
+        _height = 240;
+        return 0;
+    case kWQVGA:
+        _width = 400;
+        _height = 240;
+        return 0;
+    case kCIF:
+        _width = 352;
+        _height = 288;
+        return 0;
+    case kW288p:
+        _width = 512;
+        _height = 288;
+        return 0;
+    case k448p:
+        _width = 576;
+        _height = 448;
+        return 0;
+    case kVGA:
+        _width = 640;
+        _height = 480;
+        return 0;
+    case k432p:
+        _width = 720;
+        _height = 432;
+        return 0;
+    case kW432p:
+        _width = 768;
+        _height = 432;
+        return 0;
+    case k4SIF:
+        _width = 704;
+        _height = 480;
+        return 0;
+    case kW448p:
+        _width = 768;
+        _height = 448;
+        return 0;
+    case kNTSC:
+        _width = 720;
+        _height = 480;
+        return 0;
+    case kFW448p:
+        _width = 800;
+        _height = 448;
+        return 0;
+    case kWVGA:
+        _width = 800;
+        _height = 480;
+        return 0;
+    case k4CIF:
+        _width = 704;
+        _height = 576;
+        return 0;
+    case kSVGA:
+        _width = 800;
+        _height = 600;
+        return 0;
+    case kW544p:
+        _width = 960;
+        _height = 544;
+        return 0;
+    case kW576p:
+        _width = 1024;
+        _height = 576;
+        return 0;
+    case kHD:
+        _width = 960;
+        _height = 720;
+        return 0;
+    case kXGA:
+        _width = 1024;
+        _height = 768;
+        return 0;
+    case kFullHD:
+        _width = 1440;
+        _height = 1080;
+        return 0;
+    case kWHD:
+        _width = 1280;
+        _height = 720;
+        return 0;
+    case kWFullHD:
+        _width = 1920;
+        _height = 1080;
+        return 0;
+    default:
+        return -1;
+    }
+}
diff --git a/src/modules/video_coding/main/test/video_source.h b/src/modules/video_coding/main/test/video_source.h
new file mode 100644
index 0000000..980650b
--- /dev/null
+++ b/src/modules/video_coding/main/test/video_source.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+#define WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "typedefs.h"
+
+#include <string>
+
+enum VideoSize
+    {
+        kUndefined,
+        kSQCIF,     // 128*96       = 12 288
+        kQQVGA,     // 160*120      = 19 200
+        kQCIF,      // 176*144      = 25 344
+        kCGA,       // 320*200      = 64 000
+        kQVGA,      // 320*240      = 76 800
+        kSIF,       // 352*240      = 84 480
+        kWQVGA,     // 400*240      = 96 000
+        kCIF,       // 352*288      = 101 376
+        kW288p,     // 512*288      = 147 456 (WCIF)
+        k448p,      // 576*448      = 281 088
+        kVGA,       // 640*480      = 307 200
+        k432p,      // 720*432      = 311 040
+        kW432p,     // 768*432      = 331 776
+        k4SIF,      // 704*480      = 337 920
+        kW448p,     // 768*448      = 344 064
+        kNTSC,		// 720*480      = 345 600
+        kFW448p,    // 800*448      = 358 400
+        kWVGA,      // 800*480      = 384 000
+        k4CIF,      // 704*576      = 405 504
+        kSVGA,      // 800*600      = 480 000
+        kW544p,     // 960*544      = 522 240
+        kW576p,     // 1024*576     = 589 824 (W4CIF)
+        kHD,        // 960*720      = 691 200
+        kXGA,       // 1024*768     = 786 432
+        kWHD,       // 1280*720     = 921 600
+        kFullHD,   // 1440*1080    = 1 555 200
+        kWFullHD,  // 1920*1080    = 2 073 600
+
+        kNumberOfVideoSizes
+    };
+
+
+class VideoSource
+{
+public:
+  VideoSource();
+  VideoSource(std::string fileName, VideoSize size, float frameRate, webrtc::VideoType type = webrtc::kI420);
+  VideoSource(std::string fileName, WebRtc_UWord16 width, WebRtc_UWord16 height,
+      float frameRate = 30, webrtc::VideoType type = webrtc::kI420);
+
+    std::string GetFileName() const { return _fileName; }
+    WebRtc_UWord16  GetWidth() const { return _width; }
+    WebRtc_UWord16 GetHeight() const { return _height; }
+    webrtc::VideoType GetType() const { return _type; }
+    float GetFrameRate() const { return _frameRate; }
+    int GetWidthHeight( VideoSize size);
+
+    // Returns the filename with the path (including the leading slash) removed.
+    std::string GetName() const;
+
+    WebRtc_Word32 GetFrameLength() const;
+
+private:
+    std::string         _fileName;
+    WebRtc_UWord16      _width;
+    WebRtc_UWord16      _height;
+    webrtc::VideoType   _type;
+    float               _frameRate;
+};
+
+#endif // WEBRTC_MODULES_VIDEO_CODING_TEST_VIDEO_SOURCE_H_
+
diff --git a/src/modules/video_processing/main/OWNERS b/src/modules/video_processing/main/OWNERS
new file mode 100644
index 0000000..7183cf2
--- /dev/null
+++ b/src/modules/video_processing/main/OWNERS
@@ -0,0 +1,4 @@
+stefan@webrtc.org
+mikhal@webrtc.org
+marpan@webrtc.org
+henrik.lundin@webrtc.org
diff --git a/src/modules/video_processing/main/interface/video_processing.h b/src/modules/video_processing/main/interface/video_processing.h
new file mode 100644
index 0000000..512cace
--- /dev/null
+++ b/src/modules/video_processing/main/interface/video_processing.h
@@ -0,0 +1,382 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_processing.h
+ * This header file contains the API required for the video
+ * processing module class.
+ */
+
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
+#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_H
+
+#include "module.h"
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+/**
+   The module is largely intended to process video streams, except functionality 
+   provided by static functions which operate independent of previous frames. It
+   is recommended, but not required that a unique instance be used for each 
+   concurrently processed stream. Similarly, it is recommended to call Reset()
+   before switching to a new stream, but this is not absolutely required.
+   
+   The module provides basic thread safety by permitting only a single function to
+   execute concurrently.
+*/
+
+namespace webrtc {
+
+class VideoProcessingModule : public Module
+{
+public:
+    /**
+       Structure to hold frame statistics. Populate it with GetFrameStats().
+    */
+    struct FrameStats
+    {
+        FrameStats() :
+            mean(0),
+            sum(0),
+            numPixels(0),
+            subSamplWidth(0),
+            subSamplHeight(0)
+        {
+            memset(hist, 0, sizeof(hist));
+        }
+
+        WebRtc_UWord32 hist[256];      /**< Histogram of frame */
+        WebRtc_UWord32 mean;           /**< Mean value of frame */
+        WebRtc_UWord32 sum;            /**< Sum of frame */
+        WebRtc_UWord32 numPixels;      /**< Number of pixels */
+        WebRtc_UWord8  subSamplWidth;  /**< Subsampling rate of width in powers of 2 */
+        WebRtc_UWord8  subSamplHeight; /**< Subsampling rate of height in powers of 2 */
+    };
+
+    /**
+       Specifies the warning types returned by BrightnessDetection().
+    */
+    enum BrightnessWarning 
+    {
+        kNoWarning,                /**< Frame has acceptable brightness */
+        kDarkWarning,              /**< Frame is too dark */
+        kBrightWarning            /**< Frame is too bright */
+    };
+
+    /*
+       Creates a VPM object.
+      
+       \param[in] id
+           Unique identifier of this object.
+      
+       \return Pointer to a VPM object.
+    */
+    static VideoProcessingModule* Create(WebRtc_Word32 id);
+
+    /**
+       Destroys a VPM object.
+      
+       \param[in] module
+           Pointer to the VPM object to destroy.
+    */
+    static void Destroy(VideoProcessingModule* module);
+
+    /**
+       Not supported.
+    */
+    virtual WebRtc_Word32 TimeUntilNextProcess() { return -1; }
+
+    /**
+       Not supported.
+    */
+    virtual WebRtc_Word32 Process() { return -1; }
+
+    /**
+       Resets all processing components to their initial states. This should be
+       called whenever a new video stream is started.
+    */
+    virtual void Reset() = 0;
+
+    /**
+       Retrieves statistics for the input frame. This function must be used to
+       prepare a FrameStats struct for use in certain VPM functions.
+      
+       \param[out] stats
+           The frame statistics will be stored here on return.
+      
+       \param[in]  frame
+           Pointer to the video frame.
+      
+       \param[in]  width
+           Frame width in pixels.
+      
+       \param[in]  height
+           Frame height in pixels.
+      
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 GetFrameStats(FrameStats& stats,
+                                     const WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height);
+
+    /**
+       \overload
+    */
+     static WebRtc_Word32 GetFrameStats(FrameStats& stats,
+                                     const VideoFrame& frame);
+
+    /**
+       Checks the validity of a FrameStats struct. Currently, valid implies only
+       that is had changed from its initialized state.
+      
+       \param[in] stats
+           Frame statistics.
+      
+       \return True on valid stats, false on invalid stats.
+    */
+    static bool ValidFrameStats(const FrameStats& stats);
+
+    /**
+       Returns a FrameStats struct to its intialized state.
+      
+       \param[in,out] stats
+           Frame statistics.
+    */
+    static void ClearFrameStats(FrameStats& stats);
+
+    /**
+       Enhances the color of an image through a constant mapping. Only the 
+       chrominance is altered. Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
+                                        WebRtc_UWord32 width,
+                                        WebRtc_UWord32 height);
+
+    /**
+       \overload
+    */
+    static WebRtc_Word32 ColorEnhancement(VideoFrame& frame);
+
+    /**
+       Increases/decreases the luminance value.
+
+       \param[in,out] frame
+           Pointer to the video frame buffer.
+
+       \param[in]     width
+           Frame width in pixels.
+
+       \param[in]     height
+           Frame height in pixels.
+
+      \param[in] delta
+           The amount to change the chrominance value of every single pixel.
+           Can be < 0 also.
+
+       \return 0 on success, -1 on failure.
+    */
+    static WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                                  int width, int height, int delta);
+    /**
+       \overload
+    */
+    static WebRtc_Word32 Brighten(VideoFrame& frame, int delta);
+
+    /**
+       Detects and removes camera flicker from a video stream. Every frame from the
+       stream must be passed in. A frame will only be altered if flicker has been
+       detected. Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \param[in]     timestamp
+           Frame timestamp in 90 kHz format.
+      
+       \param[in,out] stats
+           Frame statistics provided by GetFrameStats(). On return the stats will
+           be reset to zero if the frame was altered. Call GetFrameStats() again
+           if the statistics for the altered frame are required.
+      
+       \return 0 on success, -1 on failure.
+    */
+    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height,
+                                     WebRtc_UWord32 timestamp,
+                                     FrameStats& stats) = 0;
+    
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
+                                     FrameStats& stats) = 0;
+
+    /**
+       Denoises a video frame. Every frame from the stream should be passed in.
+       Has a fixed-point implementation.
+      
+       \param[in,out] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \return The number of modified pixels on success, -1 on failure.
+    */
+    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
+                                  WebRtc_UWord32 width,
+                                  WebRtc_UWord32 height) = 0;
+    
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 Denoising(VideoFrame& frame) = 0;
+
+    /**
+       Detects if a video frame is excessively bright or dark. Returns a warning if
+       this is the case. Multiple frames should be passed in before expecting a 
+       warning. Has a floating-point implementation.
+      
+       \param[in] frame
+           Pointer to the video frame.
+      
+       \param[in]     width
+           Frame width in pixels.
+      
+       \param[in]     height
+           Frame height in pixels.
+      
+       \param[in] stats
+           Frame statistics provided by GetFrameStats().
+      
+       \return A member of BrightnessWarning on success, -1 on error
+    */
+    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
+                                            WebRtc_UWord32 width,
+                                            WebRtc_UWord32 height,
+                                            const FrameStats& stats) = 0;
+
+    /**
+       \overload
+    */
+    virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
+                                            const FrameStats& stats) = 0;
+
+
+    /**
+    The following functions refer to the pre-processor unit within VPM. The pre-processor
+    perfoms spatial/temporal decimation and content analysis on the frames prior to encoding.
+    */
+	
+    /**
+    Enable/disable temporal decimation
+
+    \param[in] enable when true, temporal decimation is enabled
+    */
+    virtual void EnableTemporalDecimation(bool enable) = 0;
+	
+    /**
+   Set target resolution
+    
+   \param[in] width
+   Target width
+    
+   \param[in] height
+   Target height
+    
+    \param[in] frameRate
+    Target frameRate
+           
+    \return VPM_OK on success, a negative value on error (see error codes)
+
+    */
+    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate) = 0;
+    
+    /**
+    Set max frame rate
+    \param[in] maxFrameRate: maximum frame rate (limited to native frame rate)
+
+    \return VPM_OK on success, a negative value on error (see error codes)
+    */
+    virtual WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate) = 0;
+
+    /**
+    Get decimated(target) frame rate
+    */
+    virtual WebRtc_UWord32 DecimatedFrameRate() = 0;
+	
+    /**
+    Get decimated(target) frame width
+    */
+    virtual WebRtc_UWord32 DecimatedWidth() const = 0;
+
+    /**
+    Get decimated(target) frame height
+    */
+    virtual WebRtc_UWord32 DecimatedHeight() const = 0 ;
+
+    /**
+    Set the spatial resampling settings of the VPM: The resampler may either be disabled or one of the following:
+    scaling to a close to target dimension followed by crop/pad
+
+    \param[in] resamplingMode
+    Set resampling mode (a member of VideoFrameResampling)
+    */
+    virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode) = 0;
+  
+    /**
+    Get Processed (decimated) frame
+	  
+    \param[in] frame pointer to the video frame.
+	  
+	  \param[in] processedFrame pointer (double) to the processed frame
+    
+    \return VPM_OK on success, a negative value on error (see error codes)
+    */
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame) = 0;
+
+    /**
+    Return content metrics for the last processed frame
+    */
+    virtual VideoContentMetrics* ContentMetrics() const = 0 ;
+
+    /**
+    Enable content analysis
+    */
+    virtual void EnableContentAnalysis(bool enable) = 0;
+
+};
+
+} //namespace
+
+#endif
diff --git a/src/modules/video_processing/main/interface/video_processing_defines.h b/src/modules/video_processing/main/interface/video_processing_defines.h
new file mode 100644
index 0000000..d9bebd4
--- /dev/null
+++ b/src/modules/video_processing/main/interface/video_processing_defines.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_processing_defines.h
+ * This header file includes the definitions used in the video processor module
+ */
+
+#ifndef WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
+#define WEBRTC_MODULES_INTERFACE_VIDEO_PROCESSING_DEFINES_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+// Error codes
+#define VPM_OK                   0
+#define VPM_GENERAL_ERROR       -1
+#define VPM_MEMORY              -2
+#define VPM_PARAMETER_ERROR     -3
+#define VPM_SCALE_ERROR         -4
+#define VPM_UNINITIALIZED       -5
+#define VPM_UNIMPLEMENTED       -6
+
+enum VideoFrameResampling
+{
+  // TODO: Do we still need crop/pad?
+    kNoRescaling,         // disables rescaling
+    kFastRescaling,       // point
+    kBiLinear,            // bi-linear interpolation
+    kBox,                 // Box inteprolation
+};
+
+} //namespace
+
+#endif
diff --git a/src/modules/video_processing/main/source/brighten.cc b/src/modules/video_processing/main/source/brighten.cc
new file mode 100644
index 0000000..51e4b6b
--- /dev/null
+++ b/src/modules/video_processing/main/source/brighten.cc
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/source/brighten.h"
+
+#include <cstdlib>
+
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+namespace VideoProcessing {
+
+WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                       int width, int height, int delta) {
+  if (frame == NULL) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                 "Null frame pointer");
+    return VPM_PARAMETER_ERROR;
+  }
+
+  if (width <= 0 || height <= 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1,
+                 "Invalid frame size");
+    return VPM_PARAMETER_ERROR;
+  }
+
+  int numPixels = width * height;
+
+  int lookUp[256];
+  for (int i = 0; i < 256; i++) {
+    int val = i + delta;
+    lookUp[i] = ((((val < 0) ? 0 : val) > 255) ? 255 : val);
+  }
+
+  WebRtc_UWord8* tempPtr = frame;
+
+  for (int i = 0; i < numPixels; i++) {
+    *tempPtr = static_cast<WebRtc_UWord8>(lookUp[*tempPtr]);
+    tempPtr++;
+  }
+  return VPM_OK;
+}
+
+}  // namespace VideoProcessing
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/source/brighten.h b/src/modules/video_processing/main/source/brighten.h
new file mode 100644
index 0000000..b7e6fb7
--- /dev/null
+++ b/src/modules/video_processing/main/source/brighten.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
+#define MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
+
+#include "typedefs.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+
+namespace webrtc {
+namespace VideoProcessing {
+
+WebRtc_Word32 Brighten(WebRtc_UWord8* frame,
+                       int width, int height, int delta);
+
+}  // namespace VideoProcessing
+}  // namespace webrtc
+
+#endif  // MODULES_VIDEO_PROCESSING_MAIN_SOURCE_BRIGHTEN_H_
diff --git a/src/modules/video_processing/main/source/brightness_detection.cc b/src/modules/video_processing/main/source/brightness_detection.cc
new file mode 100644
index 0000000..6840df2
--- /dev/null
+++ b/src/modules/video_processing/main/source/brightness_detection.cc
@@ -0,0 +1,195 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_processing.h"
+#include "brightness_detection.h"
+#include "trace.h"
+
+#include <math.h>
+
+namespace webrtc {
+
+VPMBrightnessDetection::VPMBrightnessDetection() :
+    _id(0)
+{
+    Reset();
+}
+
+VPMBrightnessDetection::~VPMBrightnessDetection()
+{
+}
+
+WebRtc_Word32
+VPMBrightnessDetection::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void
+VPMBrightnessDetection::Reset()
+{
+    _frameCntBright = 0;
+    _frameCntDark = 0;
+}
+
+WebRtc_Word32
+VPMBrightnessDetection::ProcessFrame(const WebRtc_UWord8* frame,
+                                     const WebRtc_UWord32 width,
+                                     const WebRtc_UWord32 height,
+                                     const VideoProcessingModule::FrameStats& stats)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_PARAMETER_ERROR;
+    }
+    
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    if (!VideoProcessingModule::ValidFrameStats(stats))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    const WebRtc_UWord8 frameCntAlarm = 2;
+
+    // Get proportion in lowest bins 
+    WebRtc_UWord8 lowTh = 20;
+    float propLow = 0;
+    for (WebRtc_UWord32 i = 0; i < lowTh; i++)
+    {
+        propLow += stats.hist[i];
+    }
+    propLow /= stats.numPixels;
+
+    // Get proportion in highest bins 
+    unsigned char highTh = 230;
+    float propHigh = 0;
+    for (WebRtc_UWord32 i = highTh; i < 256; i++)
+    {
+        propHigh += stats.hist[i];
+    }
+    propHigh /= stats.numPixels;
+
+    if(propHigh < 0.4)
+    {
+        if (stats.mean < 90 || stats.mean > 170)
+        {
+            // Standard deviation of Y
+            float stdY = 0;
+            for (WebRtc_UWord32 h = 0; h < height; h += (1 << stats.subSamplHeight))
+            {
+                WebRtc_UWord32 row = h*width;
+                for (WebRtc_UWord32 w = 0; w < width; w += (1 << stats.subSamplWidth))
+                {
+                    stdY += (frame[w + row] - stats.mean) * (frame[w + row] - stats.mean);
+                }
+            }           
+            stdY = sqrt(stdY / stats.numPixels);
+
+            // Get percentiles
+            WebRtc_UWord32 sum = 0;
+            WebRtc_UWord32 medianY = 140;
+            WebRtc_UWord32 perc05 = 0;
+            WebRtc_UWord32 perc95 = 255;
+            float posPerc05 = stats.numPixels * 0.05f;
+            float posMedian = stats.numPixels * 0.5f;
+            float posPerc95 = stats.numPixels * 0.95f;
+            for (WebRtc_UWord32 i = 0; i < 256; i++)
+            {
+                sum += stats.hist[i];
+
+                if (sum < posPerc05)
+                {
+                    perc05 = i;     // 5th perc
+                }
+                if (sum < posMedian)
+                {
+                    medianY = i;    // 50th perc
+                }
+                if (sum < posPerc95)
+                {
+                    perc95 = i;     // 95th perc
+                }
+                else
+                {
+                    break;
+                }
+            }
+
+            // Check if image is too dark
+            if ((stdY < 55) && (perc05 < 50))
+            { 
+                if (medianY < 60 || stats.mean < 80 ||  perc95 < 130 || propLow > 0.20)
+                {
+                    _frameCntDark++;
+                }
+                else
+                {
+                    _frameCntDark = 0;
+                }
+            } 
+            else
+            {
+                _frameCntDark = 0;
+            }
+
+            // Check if image is too bright
+            if ((stdY < 52) && (perc95 > 200) && (medianY > 160))
+            {
+                if (medianY > 185 || stats.mean > 185 || perc05 > 140 || propHigh > 0.25)
+                {
+                    _frameCntBright++;  
+                }
+                else 
+                {
+                    _frameCntBright = 0;
+                }
+            } 
+            else
+            {
+                _frameCntBright = 0;
+            }
+
+        } 
+        else
+        {
+            _frameCntDark = 0;
+            _frameCntBright = 0;
+        }
+
+    } 
+    else
+    {
+        _frameCntBright++;
+        _frameCntDark = 0;
+    }
+    
+    if (_frameCntDark > frameCntAlarm)
+    {
+        return VideoProcessingModule::kDarkWarning;
+    }
+    else if (_frameCntBright > frameCntAlarm)
+    {
+        return VideoProcessingModule::kBrightWarning;
+    }
+    else
+    {
+        return VideoProcessingModule::kNoWarning;
+    }
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/brightness_detection.h b/src/modules/video_processing/main/source/brightness_detection.h
new file mode 100644
index 0000000..7bed556
--- /dev/null
+++ b/src/modules/video_processing/main/source/brightness_detection.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * brightness_detection.h
+ */
+#ifndef VPM_BRIGHTNESS_DETECTION_H
+#define VPM_BRIGHTNESS_DETECTION_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+class VPMBrightnessDetection
+{
+public:
+    VPMBrightnessDetection();
+    ~VPMBrightnessDetection();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(const WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height,
+                             const VideoProcessingModule::FrameStats& stats);
+
+private:
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32 _frameCntBright;
+    WebRtc_UWord32 _frameCntDark;
+};
+
+} //namespace
+
+#endif // VPM_BRIGHTNESS_DETECTION_H
diff --git a/src/modules/video_processing/main/source/color_enhancement.cc b/src/modules/video_processing/main/source/color_enhancement.cc
new file mode 100644
index 0000000..426596f
--- /dev/null
+++ b/src/modules/video_processing/main/source/color_enhancement.cc
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "color_enhancement.h"
+#include "color_enhancement_private.h"
+#include "trace.h"
+#include <cstdlib>  // NULL
+
+namespace webrtc {
+
+namespace VideoProcessing
+{ 
+    WebRtc_Word32
+    ColorEnhancement(WebRtc_UWord8* frame,
+                     const WebRtc_UWord32 width,
+                     const WebRtc_UWord32 height)
+    {
+        // pointers to U and V color pixels
+        WebRtc_UWord8* ptrU;
+        WebRtc_UWord8* ptrV;
+        WebRtc_UWord8 tempChroma;
+        const WebRtc_UWord32 numPixels = width * height;
+
+
+        if (frame == NULL)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+            return VPM_GENERAL_ERROR;
+        }
+
+        if (width == 0 || height == 0)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+            return VPM_GENERAL_ERROR;
+        }
+        
+        // set pointers to first U and V pixels
+        
+        // stream format:
+        // | numPixels bytes luminance | numPixels/4 bytes chroma U | numPixels/4 chroma V |
+        
+        ptrU = frame + numPixels;       // skip luminance
+        ptrV = ptrU + (numPixels>>2);
+
+        // loop through all chrominance pixels and modify color
+        for (WebRtc_UWord32 ix = 0; ix < (numPixels>>2); ix++)
+        {
+            tempChroma = colorTable[*ptrU][*ptrV];
+            *ptrV = colorTable[*ptrV][*ptrU];
+            *ptrU = tempChroma;
+            
+            // increment pointers
+            ptrU++;
+            ptrV++;
+        }
+        return VPM_OK;
+    }
+
+} //namespace
+
+} //namespace webrtc
diff --git a/src/modules/video_processing/main/source/color_enhancement.h b/src/modules/video_processing/main/source/color_enhancement.h
new file mode 100644
index 0000000..87fabc3
--- /dev/null
+++ b/src/modules/video_processing/main/source/color_enhancement.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * color_enhancement.h
+ */
+#ifndef VPM_COLOR_ENHANCEMENT_H
+#define VPM_COLOR_ENHANCEMENT_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+namespace VideoProcessing
+{
+    WebRtc_Word32 ColorEnhancement(WebRtc_UWord8* frame,
+                                 WebRtc_UWord32 width,
+                                 WebRtc_UWord32 height);
+}
+
+} //namespace
+
+#endif // VPM_COLOR_ENHANCEMENT_H
diff --git a/src/modules/video_processing/main/source/color_enhancement_private.h b/src/modules/video_processing/main/source/color_enhancement_private.h
new file mode 100644
index 0000000..b88fc1a
--- /dev/null
+++ b/src/modules/video_processing/main/source/color_enhancement_private.h
@@ -0,0 +1,273 @@
+#ifndef VPM_COLOR_ENHANCEMENT_PRIVATE_H
+#define VPM_COLOR_ENHANCEMENT_PRIVATE_H
+
+#include "typedefs.h"
+
+namespace webrtc {
+
+//Table created with Matlab script createTable.m
+//Usage:
+//    Umod=colorTable[U][V]
+//    Vmod=colorTable[V][U]
+static const WebRtc_UWord8 colorTable[256][256] = {
+    {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
+    {1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
+    {2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2},
+    {3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3},
+    {4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4},
+    {5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5},
+    {6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6},
+    {7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7},
+    {8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8},
+    {9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
+    {10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10},
+    {11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11},
+    {12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12},
+    {13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13, 13},
+    {14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14, 14},
+    {15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15},
+    {16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16},
+    {17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17, 17},
+    {18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18, 18},
+    {19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19, 19},
+    {20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20, 20},
+    {21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21, 21},
+    {22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22},
+    {23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23, 23},
+    {24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24, 24},
+    {25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25, 25},
+    {26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26, 26},
+    {27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27, 27},
+    {28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28},
+    {29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29, 29},
+    {30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30},
+    {31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31},
+    {32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32},
+    {33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33, 33},
+    {34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34},
+    {35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35, 35},
+    {36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36},
+    {37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37, 37},
+    {38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38, 38},
+    {39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39, 39},
+    {40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40},
+    {41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41, 41},
+    {42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42, 42},
+    {43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43, 43},
+    {44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44, 44},
+    {45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45, 45},
+    {46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46, 46},
+    {47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47, 47},
+    {48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48, 48},
+    {49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49, 49},
+    {50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50, 50},
+    {51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51, 51},
+    {52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52, 52},
+    {53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53, 53},
+    {54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54, 54},
+    {55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55, 55},
+    {56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56, 56},
+    {57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57},
+    {58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58},
+    {59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59},
+    {60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 59, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60},
+    {61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61},
+    {62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 61, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62},
+    {63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63},
+    {64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 62, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64},
+    {65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 63, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65},
+    {66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 64, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66},
+    {67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67},
+    {68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68},
+    {69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 66, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69},
+    {70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 67, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70},
+    {71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71},
+    {72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 68, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72},
+    {73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 69, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73},
+    {74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 70, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74},
+    {75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75},
+    {76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 71, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76},
+    {77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 72, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77},
+    {78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 73, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78},
+    {79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79},
+    {80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 74, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80},
+    {81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 75, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81},
+    {82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 76, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82},
+    {83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83},
+    {84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 77, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84},
+    {85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 78, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85},
+    {86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 79, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86},
+    {87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 80, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87},
+    {88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 81, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88},
+    {89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 81, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89},
+    {90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 82, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90},
+    {91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 83, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91},
+    {92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 84, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92},
+    {93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 86, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 85, 86, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93},
+    {94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 86, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94},
+    {95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 87, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95},
+    {96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 88, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96},
+    {97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 89, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97},
+    {98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 90, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98},
+    {99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 91, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99},
+    {100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 92, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100},
+    {101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 93, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 94, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101},
+    {102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 95, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102},
+    {103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 96, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103},
+    {104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 97, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104},
+    {105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 98, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105},
+    {106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 99, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106},
+    {107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 101, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107},
+    {108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 102, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108},
+    {109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 103, 103, 103, 103, 103, 103, 103, 103, 103, 103, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 104, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109},
+    {110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 105, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110},
+    {111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 106, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111},
+    {112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 107, 107, 107, 107, 107, 107, 107, 107, 107, 107, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 108, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112},
+    {113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 109, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113},
+    {114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 110, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114},
+    {115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 112, 112, 112, 112, 112, 112, 112, 112, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 111, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 112, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115},
+    {116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 113, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116},
+    {117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 114, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117},
+    {118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 116, 116, 116, 116, 116, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 115, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 116, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118},
+    {119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 119, 119, 119, 119, 120, 120, 120, 119, 119, 119, 119, 118, 118, 118, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 117, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119},
+    {120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 119, 119, 119, 119, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 118, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120},
+    {121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 121, 121, 122, 122, 122, 122, 123, 123, 123, 122, 122, 122, 122, 121, 121, 120, 120, 120, 120, 120, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 119, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121},
+    {122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 122, 122, 122, 123, 123, 123, 124, 124, 124, 124, 124, 123, 123, 123, 122, 122, 122, 121, 121, 121, 121, 121, 121, 121, 121, 120, 120, 120, 120, 120, 120, 120, 120, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 121, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122},
+    {123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 123, 123, 123, 124, 124, 124, 124, 125, 125, 125, 125, 125, 124, 124, 124, 124, 123, 123, 123, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 122, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123},
+    {124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 126, 126, 126, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 123, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124},
+    {125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 125, 125, 125, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 125, 125, 125, 125, 125, 125, 125, 125, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 124, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125, 125},
+    {126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 127, 127, 127, 127, 127, 127, 127, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126, 126},
+    {127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127, 127},
+    {128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 127, 127, 127, 127, 127, 127, 127, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128},
+    {129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 129, 129, 129, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 128, 129, 129, 129, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129, 129},
+    {130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 130, 130, 130, 130, 130, 129, 129, 129, 129, 129, 128, 128, 128, 129, 129, 129, 129, 129, 130, 130, 130, 130, 130, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130, 130},
+    {131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 131, 131, 131, 130, 130, 130, 130, 129, 129, 129, 129, 129, 130, 130, 130, 130, 131, 131, 131, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131, 131},
+    {132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 132, 132, 132, 131, 131, 131, 130, 130, 130, 130, 130, 131, 131, 131, 132, 132, 132, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132, 132},
+    {133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 133, 133, 132, 132, 132, 132, 131, 131, 131, 132, 132, 132, 132, 133, 133, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133, 133},
+    {134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 134, 134, 133, 133, 133, 133, 133, 133, 133, 133, 133, 134, 134, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134, 134},
+    {135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 135, 135, 135, 135, 134, 134, 134, 135, 135, 135, 135, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135, 135},
+    {136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 137, 137, 137, 137, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136, 136},
+    {137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137, 137},
+    {138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138, 138},
+    {139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139, 139},
+    {140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140, 140},
+    {141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141, 141},
+    {142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142, 142},
+    {143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143, 143},
+    {144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144, 144},
+    {145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145, 145},
+    {146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146, 146},
+    {147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147, 147},
+    {148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148, 148},
+    {149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149, 149},
+    {150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150},
+    {151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 152, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 152, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151, 151},
+    {152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 153, 153, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 153, 153, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152, 152},
+    {153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153, 153},
+    {154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154, 154},
+    {155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155, 155},
+    {156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 157, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 157, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156, 156},
+    {157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 158, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 158, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157, 157},
+    {158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 159, 159, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 159, 159, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158, 158},
+    {159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159, 159},
+    {160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160, 160},
+    {161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161, 161},
+    {162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162, 162},
+    {163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163, 163},
+    {164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164, 164},
+    {165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165, 165},
+    {166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 167, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 167, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166, 166},
+    {167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167, 167},
+    {168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168, 168},
+    {169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169, 169},
+    {170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170, 170},
+    {171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171, 171},
+    {172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 173, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 173, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172, 172},
+    {173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 174, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 174, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173, 173},
+    {174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174, 174},
+    {175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175, 175},
+    {176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176, 176},
+    {177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177, 177},
+    {178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178, 178},
+    {179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179, 179},
+    {180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180, 180},
+    {181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181, 181},
+    {182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182, 182},
+    {183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183, 183},
+    {184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184, 184},
+    {185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185, 185},
+    {186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186, 186},
+    {187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187, 187},
+    {188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188, 188},
+    {189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189, 189},
+    {190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190, 190},
+    {191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191, 191},
+    {192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192, 192},
+    {193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193, 193},
+    {194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194, 194},
+    {195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195, 195},
+    {196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196, 196},
+    {197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197, 197},
+    {198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198, 198},
+    {199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199, 199},
+    {200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200, 200},
+    {201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201, 201},
+    {202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202, 202},
+    {203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203, 203},
+    {204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204, 204},
+    {205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205, 205},
+    {206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206, 206},
+    {207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207, 207},
+    {208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208, 208},
+    {209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209, 209},
+    {210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210, 210},
+    {211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211, 211},
+    {212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212, 212},
+    {213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213, 213},
+    {214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214, 214},
+    {215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215, 215},
+    {216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216, 216},
+    {217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217, 217},
+    {218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218, 218},
+    {219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219},
+    {220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220, 220},
+    {221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221, 221},
+    {222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222, 222},
+    {223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223, 223},
+    {224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224, 224},
+    {225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225, 225},
+    {226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226, 226},
+    {227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227, 227},
+    {228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228, 228},
+    {229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229, 229},
+    {230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230, 230},
+    {231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231, 231},
+    {232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232, 232},
+    {233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233, 233},
+    {234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234, 234},
+    {235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235, 235},
+    {236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236, 236},
+    {237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237, 237},
+    {238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238, 238},
+    {239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239, 239},
+    {240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240, 240},
+    {241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241, 241},
+    {242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242, 242},
+    {243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243, 243},
+    {244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244, 244},
+    {245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245, 245},
+    {246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246, 246},
+    {247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247, 247},
+    {248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248, 248},
+    {249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249, 249},
+    {250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250, 250},
+    {251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251, 251},
+    {252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252, 252},
+    {253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253, 253},
+    {254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254},
+    {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}
+};
+
+} //namespace
+
+#endif // VPM_COLOR_ENHANCEMENT_PRIVATE_H
diff --git a/src/modules/video_processing/main/source/content_analysis.cc b/src/modules/video_processing/main/source/content_analysis.cc
new file mode 100644
index 0000000..bd15e17
--- /dev/null
+++ b/src/modules/video_processing/main/source/content_analysis.cc
@@ -0,0 +1,337 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "content_analysis.h"
+#include "tick_util.h"
+#include "system_wrappers/interface/cpu_features_wrapper.h"
+
+#include <math.h>
+#include <stdlib.h>
+
+namespace webrtc {
+
+VPMContentAnalysis::VPMContentAnalysis(bool runtime_cpu_detection):
+_origFrame(NULL),
+_prevFrame(NULL),
+_width(0),
+_height(0),
+_skipNum(1),
+_border(8),
+_motionMagnitude(0.0f),
+_spatialPredErr(0.0f),
+_spatialPredErrH(0.0f),
+_spatialPredErrV(0.0f),
+_firstFrame(true),
+_CAInit(false),
+_cMetrics(NULL)
+{
+    ComputeSpatialMetrics = &VPMContentAnalysis::ComputeSpatialMetrics_C;
+    TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_C;
+
+    if (runtime_cpu_detection)
+    {
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+        if (WebRtc_GetCPUInfo(kSSE2))
+        {
+            ComputeSpatialMetrics =
+                          &VPMContentAnalysis::ComputeSpatialMetrics_SSE2;
+            TemporalDiffMetric = &VPMContentAnalysis::TemporalDiffMetric_SSE2;
+        }
+#endif
+    }
+
+    Release();
+}
+
+VPMContentAnalysis::~VPMContentAnalysis()
+{
+    Release();
+}
+
+
+VideoContentMetrics*
+VPMContentAnalysis::ComputeContentMetrics(const VideoFrame* inputFrame)
+{
+    if (inputFrame == NULL)
+    {
+        return NULL;
+    }
+
+    // Init if needed (native dimension change)
+    if (_width != static_cast<int>(inputFrame->Width()) ||
+        _height != static_cast<int>(inputFrame->Height()))
+    {
+        if (VPM_OK != Initialize(static_cast<int>(inputFrame->Width()),
+                                 static_cast<int>(inputFrame->Height())))
+        {
+            return NULL;
+        }
+    }
+
+    _origFrame = inputFrame->Buffer();
+
+    // compute spatial metrics: 3 spatial prediction errors
+    (this->*ComputeSpatialMetrics)();
+
+    // compute motion metrics
+    if (_firstFrame == false)
+        ComputeMotionMetrics();
+
+    // saving current frame as previous one: Y only
+    memcpy(_prevFrame, _origFrame, _width * _height);
+
+    _firstFrame =  false;
+    _CAInit = true;
+
+    return ContentMetrics();
+}
+
+WebRtc_Word32
+VPMContentAnalysis::Release()
+{
+    if (_cMetrics != NULL)
+    {
+        delete _cMetrics;
+       _cMetrics = NULL;
+    }
+
+    if (_prevFrame != NULL)
+    {
+        delete [] _prevFrame;
+        _prevFrame = NULL;
+    }
+
+    _width = 0;
+    _height = 0;
+    _firstFrame = true;
+
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMContentAnalysis::Initialize(int width, int height)
+{
+   _width = width;
+   _height = height;
+   _firstFrame = true;
+
+    // skip parameter: # of skipped rows: for complexity reduction
+    //  temporal also currently uses it for column reduction.
+    _skipNum = 1;
+
+    // use skipNum = 2 for 4CIF, WHD
+    if ( (_height >=  576) && (_width >= 704) )
+    {
+        _skipNum = 2;
+    }
+    // use skipNum = 4 for FULLL_HD images
+    if ( (_height >=  1080) && (_width >= 1920) )
+    {
+        _skipNum = 4;
+    }
+
+    if (_cMetrics != NULL)
+    {
+        delete _cMetrics;
+    }
+
+    if (_prevFrame != NULL)
+    {
+        delete [] _prevFrame;
+    }
+
+    // Spatial Metrics don't work on a border of 8.  Minimum processing
+    // block size is 16 pixels.  So make sure the width and height support this.
+    if (_width <= 32 || _height <= 32)
+    {
+        _CAInit = false;
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _cMetrics = new VideoContentMetrics();
+    if (_cMetrics == NULL)
+    {
+        return VPM_MEMORY;
+    }
+
+    _prevFrame = new WebRtc_UWord8[_width * _height] ; // Y only
+    if (_prevFrame == NULL)
+    {
+        return VPM_MEMORY;
+    }
+
+    return VPM_OK;
+}
+
+
+// Compute motion metrics: magnitude over non-zero motion vectors,
+//  and size of zero cluster
+WebRtc_Word32
+VPMContentAnalysis::ComputeMotionMetrics()
+{
+
+    // Motion metrics: only one is derived from normalized
+    //  (MAD) temporal difference
+    (this->*TemporalDiffMetric)();
+
+    return VPM_OK;
+}
+
+// Normalized temporal difference (MAD): used as a motion level metric
+// Normalize MAD by spatial contrast: images with more contrast
+//  (pixel variance) likely have larger temporal difference
+// To reduce complexity, we compute the metric for a reduced set of points.
+WebRtc_Word32
+VPMContentAnalysis::TemporalDiffMetric_C()
+{
+    // size of original frame
+    int sizei = _height;
+    int sizej = _width;
+
+    WebRtc_UWord32 tempDiffSum = 0;
+    WebRtc_UWord32 pixelSum = 0;
+    WebRtc_UWord64 pixelSqSum = 0;
+
+    WebRtc_UWord32 numPixels = 0; // counter for # of pixels
+
+    const int width_end = ((_width - 2*_border) & -16) + _border;
+
+    for(int i = _border; i < sizei - _border; i += _skipNum)
+    {
+        for(int j = _border; j < width_end; j++)
+        {
+            numPixels += 1;
+            int ssn =  i * sizej + j;
+
+            WebRtc_UWord8 currPixel  = _origFrame[ssn];
+            WebRtc_UWord8 prevPixel  = _prevFrame[ssn];
+
+            tempDiffSum += (WebRtc_UWord32)
+                            abs((WebRtc_Word16)(currPixel - prevPixel));
+            pixelSum += (WebRtc_UWord32) currPixel;
+            pixelSqSum += (WebRtc_UWord64) (currPixel * currPixel);
+        }
+    }
+
+    // default
+    _motionMagnitude = 0.0f;
+
+    if (tempDiffSum == 0)
+    {
+        return VPM_OK;
+    }
+
+    // normalize over all pixels
+    float const tempDiffAvg = (float)tempDiffSum / (float)(numPixels);
+    float const pixelSumAvg = (float)pixelSum / (float)(numPixels);
+    float const pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels);
+    float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+    if (contrast > 0.0)
+    {
+        contrast = sqrt(contrast);
+       _motionMagnitude = tempDiffAvg/contrast;
+    }
+
+    return VPM_OK;
+
+}
+
+// Compute spatial metrics:
+// To reduce complexity, we compute the metric for a reduced set of points.
+// The spatial metrics are rough estimates of the prediction error cost for
+//  each QM spatial mode: 2x2,1x2,2x1
+// The metrics are a simple estimate of the up-sampling prediction error,
+// estimated assuming sub-sampling for decimation (no filtering),
+// and up-sampling back up with simple bilinear interpolation.
+WebRtc_Word32
+VPMContentAnalysis::ComputeSpatialMetrics_C()
+{
+    //size of original frame
+    const int sizei = _height;
+    const int sizej = _width;
+
+    // pixel mean square average: used to normalize the spatial metrics
+    WebRtc_UWord32 pixelMSA = 0;
+
+    WebRtc_UWord32 spatialErrSum = 0;
+    WebRtc_UWord32 spatialErrVSum = 0;
+    WebRtc_UWord32 spatialErrHSum = 0;
+
+    // make sure work section is a multiple of 16
+    const int width_end = ((sizej - 2*_border) & -16) + _border;
+
+    for(int i = _border; i < sizei - _border; i += _skipNum)
+    {
+        for(int j = _border; j < width_end; j++)
+        {
+
+            int ssn1=  i * sizej + j;
+            int ssn2 = (i + 1) * sizej + j; // bottom
+            int ssn3 = (i - 1) * sizej + j; // top
+            int ssn4 = i * sizej + j + 1;   // right
+            int ssn5 = i * sizej + j - 1;   // left
+
+            WebRtc_UWord16 refPixel1  = _origFrame[ssn1] << 1;
+            WebRtc_UWord16 refPixel2  = _origFrame[ssn1] << 2;
+
+            WebRtc_UWord8 bottPixel = _origFrame[ssn2];
+            WebRtc_UWord8 topPixel = _origFrame[ssn3];
+            WebRtc_UWord8 rightPixel = _origFrame[ssn4];
+            WebRtc_UWord8 leftPixel = _origFrame[ssn5];
+
+            spatialErrSum  += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel2
+                            - (WebRtc_UWord16)(bottPixel + topPixel
+                                             + leftPixel + rightPixel)));
+            spatialErrVSum += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel1
+                            - (WebRtc_UWord16)(bottPixel + topPixel)));
+            spatialErrHSum += (WebRtc_UWord32) abs((WebRtc_Word16)(refPixel1
+                            - (WebRtc_UWord16)(leftPixel + rightPixel)));
+
+            pixelMSA += _origFrame[ssn1];
+        }
+    }
+
+    // normalize over all pixels
+    const float spatialErr  = (float)(spatialErrSum >> 2);
+    const float spatialErrH = (float)(spatialErrHSum >> 1);
+    const float spatialErrV = (float)(spatialErrVSum >> 1);
+    const float norm = (float)pixelMSA;
+
+    // 2X2:
+    _spatialPredErr = spatialErr / norm;
+
+    // 1X2:
+    _spatialPredErrH = spatialErrH / norm;
+
+    // 2X1:
+    _spatialPredErrV = spatialErrV / norm;
+
+    return VPM_OK;
+}
+
+VideoContentMetrics*
+VPMContentAnalysis::ContentMetrics()
+{
+    if (_CAInit == false)
+    {
+        return NULL;
+    }
+
+    _cMetrics->spatial_pred_err = _spatialPredErr;
+    _cMetrics->spatial_pred_err_h = _spatialPredErrH;
+    _cMetrics->spatial_pred_err_v = _spatialPredErrV;
+    // Motion metric: normalized temporal difference (MAD)
+    _cMetrics->motion_magnitude = _motionMagnitude;
+
+    return _cMetrics;
+
+}
+
+} // namespace
diff --git a/src/modules/video_processing/main/source/content_analysis.h b/src/modules/video_processing/main/source/content_analysis.h
new file mode 100644
index 0000000..f927a01
--- /dev/null
+++ b/src/modules/video_processing/main/source/content_analysis.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VPM_CONTENT_ANALYSIS_H
+#define VPM_CONTENT_ANALYSIS_H
+
+#include "typedefs.h"
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+namespace webrtc {
+
+class VPMContentAnalysis
+{
+public:
+    // When |runtime_cpu_detection| is true, runtime selection of an optimized
+    // code path is allowed.
+    VPMContentAnalysis(bool runtime_cpu_detection);
+    ~VPMContentAnalysis();
+
+    // Initialize ContentAnalysis - should be called prior to
+    //  extractContentFeature
+    // Inputs:         width, height
+    // Return value:   0 if OK, negative value upon error
+    WebRtc_Word32 Initialize(int width, int height);
+
+    // Extract content Feature - main function of ContentAnalysis
+    // Input:           new frame
+    // Return value:    pointer to structure containing content Analysis
+    //                  metrics or NULL value upon error
+    VideoContentMetrics* ComputeContentMetrics(const VideoFrame* inputFrame);
+
+    // Release all allocated memory
+    // Output: 0 if OK, negative value upon error
+    WebRtc_Word32 Release();
+
+private:
+
+    // return motion metrics
+    VideoContentMetrics* ContentMetrics();
+
+    // Normalized temporal difference metric: for motion magnitude
+    typedef WebRtc_Word32 (VPMContentAnalysis::*TemporalDiffMetricFunc)();
+    TemporalDiffMetricFunc TemporalDiffMetric;
+    WebRtc_Word32 TemporalDiffMetric_C();
+
+    // Motion metric method: call 2 metrics (magnitude and size)
+    WebRtc_Word32 ComputeMotionMetrics();
+
+    // Spatial metric method: computes the 3 frame-average spatial
+    //  prediction errors (1x2,2x1,2x2)
+    typedef WebRtc_Word32 (VPMContentAnalysis::*ComputeSpatialMetricsFunc)();
+    ComputeSpatialMetricsFunc ComputeSpatialMetrics;
+    WebRtc_Word32 ComputeSpatialMetrics_C();
+
+#if defined(WEBRTC_ARCH_X86_FAMILY)
+    WebRtc_Word32 ComputeSpatialMetrics_SSE2();
+    WebRtc_Word32 TemporalDiffMetric_SSE2();
+#endif
+
+    const WebRtc_UWord8*       _origFrame;
+    WebRtc_UWord8*             _prevFrame;
+    int                        _width;
+    int                        _height;
+    int                        _skipNum;
+    int                        _border;
+
+    // Content Metrics:
+    // stores the local average of the metrics
+    float                  _motionMagnitude;    // motion class
+    float                  _spatialPredErr;     // spatial class
+    float                  _spatialPredErrH;    // spatial class
+    float                  _spatialPredErrV;    // spatial class
+    bool                   _firstFrame;
+    bool                   _CAInit;
+
+    VideoContentMetrics*   _cMetrics;
+
+}; // end of VPMContentAnalysis class definition
+
+} // namespace
+
+#endif
diff --git a/src/modules/video_processing/main/source/content_analysis_sse2.cc b/src/modules/video_processing/main/source/content_analysis_sse2.cc
new file mode 100644
index 0000000..f505850
--- /dev/null
+++ b/src/modules/video_processing/main/source/content_analysis_sse2.cc
@@ -0,0 +1,300 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "content_analysis.h"
+
+#include <emmintrin.h>
+#include <math.h>
+
+namespace webrtc {
+
+WebRtc_Word32
+VPMContentAnalysis::TemporalDiffMetric_SSE2()
+{
+    WebRtc_UWord32 numPixels = 0;       // counter for # of pixels
+
+    const WebRtc_UWord8* imgBufO = _origFrame + _border*_width + _border;
+    const WebRtc_UWord8* imgBufP = _prevFrame + _border*_width + _border;
+
+    const WebRtc_Word32 width_end = ((_width - 2*_border) & -16) + _border;
+
+    __m128i sad_64   = _mm_setzero_si128();
+    __m128i sum_64   = _mm_setzero_si128();
+    __m128i sqsum_64 = _mm_setzero_si128();
+    const __m128i z  = _mm_setzero_si128();
+
+    for(WebRtc_UWord16 i = 0; i < (_height - 2*_border); i += _skipNum)
+    {
+        __m128i sqsum_32  = _mm_setzero_si128();
+
+        const WebRtc_UWord8 *lineO = imgBufO;
+        const WebRtc_UWord8 *lineP = imgBufP;
+
+        // Work on 16 pixels at a time.  For HD content with a width of 1920
+        // this loop will run ~67 times (depending on border).  Maximum for
+        // abs(o-p) and sum(o) will be 255. _mm_sad_epu8 produces 2 64 bit
+        // results which are then accumulated.  There is no chance of
+        // rollover for these two accumulators.
+        // o*o will have a maximum of 255*255 = 65025.  This will roll over
+        // a 16 bit accumulator as 67*65025 > 65535, but will fit in a
+        // 32 bit accumulator.
+        for(WebRtc_UWord16 j = 0; j < width_end - _border; j += 16)
+        {
+            const __m128i o = _mm_loadu_si128((__m128i*)(lineO));
+            const __m128i p = _mm_loadu_si128((__m128i*)(lineP));
+
+            lineO += 16;
+            lineP += 16;
+
+            // abs pixel difference between frames
+            sad_64 = _mm_add_epi64 (sad_64, _mm_sad_epu8(o, p));
+
+            // sum of all pixels in frame
+            sum_64 = _mm_add_epi64 (sum_64, _mm_sad_epu8(o, z));
+
+            // squared sum of all pixels in frame
+            const __m128i olo = _mm_unpacklo_epi8(o,z);
+            const __m128i ohi = _mm_unpackhi_epi8(o,z);
+
+            const __m128i sqsum_32_lo = _mm_madd_epi16(olo, olo);
+            const __m128i sqsum_32_hi = _mm_madd_epi16(ohi, ohi);
+
+            sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_lo);
+            sqsum_32 = _mm_add_epi32(sqsum_32, sqsum_32_hi);
+        }
+
+        // Add to 64 bit running sum as to not roll over.
+        sqsum_64 = _mm_add_epi64(sqsum_64,
+                                _mm_add_epi64(_mm_unpackhi_epi32(sqsum_32,z),
+                                              _mm_unpacklo_epi32(sqsum_32,z)));
+
+        imgBufO += _width * _skipNum;
+        imgBufP += _width * _skipNum;
+        numPixels += (width_end - _border);
+    }
+
+    __m128i sad_final_128;
+    __m128i sum_final_128;
+    __m128i sqsum_final_128;
+
+    // bring sums out of vector registers and into integer register
+    // domain, summing them along the way
+    _mm_store_si128 (&sad_final_128, sad_64);
+    _mm_store_si128 (&sum_final_128, sum_64);
+    _mm_store_si128 (&sqsum_final_128, sqsum_64);
+
+    WebRtc_UWord64 *sad_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sad_final_128);
+    WebRtc_UWord64 *sum_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sum_final_128);
+    WebRtc_UWord64 *sqsum_final_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sqsum_final_128);
+
+    const WebRtc_UWord32 pixelSum = sum_final_64[0] + sum_final_64[1];
+    const WebRtc_UWord64 pixelSqSum = sqsum_final_64[0] + sqsum_final_64[1];
+    const WebRtc_UWord32 tempDiffSum = sad_final_64[0] + sad_final_64[1];
+
+    // default
+    _motionMagnitude = 0.0f;
+
+    if (tempDiffSum == 0)
+    {
+        return VPM_OK;
+    }
+
+    // normalize over all pixels
+    const float tempDiffAvg = (float)tempDiffSum / (float)(numPixels);
+    const float pixelSumAvg = (float)pixelSum / (float)(numPixels);
+    const float pixelSqSumAvg = (float)pixelSqSum / (float)(numPixels);
+    float contrast = pixelSqSumAvg - (pixelSumAvg * pixelSumAvg);
+
+    if (contrast > 0.0)
+    {
+        contrast = sqrt(contrast);
+       _motionMagnitude = tempDiffAvg/contrast;
+    }
+
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMContentAnalysis::ComputeSpatialMetrics_SSE2()
+{
+    const WebRtc_UWord8* imgBuf = _origFrame + _border*_width;
+    const WebRtc_Word32 width_end = ((_width - 2*_border) & -16) + _border;
+
+    __m128i se_32  = _mm_setzero_si128();
+    __m128i sev_32 = _mm_setzero_si128();
+    __m128i seh_32 = _mm_setzero_si128();
+    __m128i msa_32 = _mm_setzero_si128();
+    const __m128i z = _mm_setzero_si128();
+
+    // Error is accumulated as a 32 bit value.  Looking at HD content with a
+    // height of 1080 lines, or about 67 macro blocks.  If the 16 bit row
+    // value is maxed out at 65529 for every row, 65529*1080 = 70777800, which
+    // will not roll over a 32 bit accumulator.
+    // _skipNum is also used to reduce the number of rows
+    for(WebRtc_Word32 i = 0; i < (_height - 2*_border); i += _skipNum)
+    {
+        __m128i se_16  = _mm_setzero_si128();
+        __m128i sev_16 = _mm_setzero_si128();
+        __m128i seh_16 = _mm_setzero_si128();
+        __m128i msa_16 = _mm_setzero_si128();
+
+        // Row error is accumulated as a 16 bit value.  There are 8
+        // accumulators.  Max value of a 16 bit number is 65529.  Looking
+        // at HD content, 1080p, has a width of 1920, 120 macro blocks.
+        // A mb at a time is processed at a time.  Absolute max error at
+        // a point would be abs(0-255+255+255+255) which equals 1020.
+        // 120*1020 = 122400.  The probability of hitting this is quite low
+        // on well behaved content.  A specially crafted image could roll over.
+        // _border could also be adjusted to concentrate on just the center of
+        // the images for an HD capture in order to reduce the possiblity of
+        // rollover.
+        const WebRtc_UWord8 *lineTop = imgBuf - _width + _border;
+        const WebRtc_UWord8 *lineCen = imgBuf + _border;
+        const WebRtc_UWord8 *lineBot = imgBuf + _width + _border;
+
+        for(WebRtc_Word32 j = 0; j < width_end - _border; j += 16)
+        {
+            const __m128i t = _mm_loadu_si128((__m128i*)(lineTop));
+            const __m128i l = _mm_loadu_si128((__m128i*)(lineCen - 1));
+            const __m128i c = _mm_loadu_si128((__m128i*)(lineCen));
+            const __m128i r = _mm_loadu_si128((__m128i*)(lineCen + 1));
+            const __m128i b = _mm_loadu_si128((__m128i*)(lineBot));
+
+            lineTop += 16;
+            lineCen += 16;
+            lineBot += 16;
+
+            // center pixel unpacked
+            __m128i clo = _mm_unpacklo_epi8(c,z);
+            __m128i chi = _mm_unpackhi_epi8(c,z);
+
+            // left right pixels unpacked and added together
+            const __m128i lrlo = _mm_add_epi16(_mm_unpacklo_epi8(l,z),
+                                               _mm_unpacklo_epi8(r,z));
+            const __m128i lrhi = _mm_add_epi16(_mm_unpackhi_epi8(l,z),
+                                               _mm_unpackhi_epi8(r,z));
+
+            // top & bottom pixels unpacked and added together
+            const __m128i tblo = _mm_add_epi16(_mm_unpacklo_epi8(t,z),
+                                               _mm_unpacklo_epi8(b,z));
+            const __m128i tbhi = _mm_add_epi16(_mm_unpackhi_epi8(t,z),
+                                               _mm_unpackhi_epi8(b,z));
+
+            // running sum of all pixels
+            msa_16 = _mm_add_epi16(msa_16, _mm_add_epi16(chi, clo));
+
+            clo = _mm_slli_epi16(clo, 1);
+            chi = _mm_slli_epi16(chi, 1);
+            const __m128i sevtlo = _mm_subs_epi16(clo, tblo);
+            const __m128i sevthi = _mm_subs_epi16(chi, tbhi);
+            const __m128i sehtlo = _mm_subs_epi16(clo, lrlo);
+            const __m128i sehthi = _mm_subs_epi16(chi, lrhi);
+
+            clo = _mm_slli_epi16(clo, 1);
+            chi = _mm_slli_epi16(chi, 1);
+            const __m128i setlo = _mm_subs_epi16(clo,
+                                                 _mm_add_epi16(lrlo, tblo));
+            const __m128i sethi = _mm_subs_epi16(chi,
+                                                 _mm_add_epi16(lrhi, tbhi));
+
+            // Add to 16 bit running sum
+            se_16  = _mm_add_epi16(se_16,
+                                   _mm_max_epi16(setlo,
+                                                 _mm_subs_epi16(z, setlo)));
+            se_16  = _mm_add_epi16(se_16,
+                                   _mm_max_epi16(sethi,
+                                                 _mm_subs_epi16(z, sethi)));
+            sev_16 = _mm_add_epi16(sev_16,
+                                   _mm_max_epi16(sevtlo,
+                                                 _mm_subs_epi16(z, sevtlo)));
+            sev_16 = _mm_add_epi16(sev_16,
+                                   _mm_max_epi16(sevthi,
+                                                 _mm_subs_epi16(z, sevthi)));
+            seh_16 = _mm_add_epi16(seh_16,
+                                   _mm_max_epi16(sehtlo,
+                                                 _mm_subs_epi16(z, sehtlo)));
+            seh_16 = _mm_add_epi16(seh_16,
+                                   _mm_max_epi16(sehthi,
+                                                 _mm_subs_epi16(z, sehthi)));
+        }
+
+        // Add to 32 bit running sum as to not roll over.
+        se_32  = _mm_add_epi32(se_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(se_16,z),
+                                             _mm_unpacklo_epi16(se_16,z)));
+        sev_32 = _mm_add_epi32(sev_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(sev_16,z),
+                                             _mm_unpacklo_epi16(sev_16,z)));
+        seh_32 = _mm_add_epi32(seh_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(seh_16,z),
+                                             _mm_unpacklo_epi16(seh_16,z)));
+        msa_32 = _mm_add_epi32(msa_32,
+                               _mm_add_epi32(_mm_unpackhi_epi16(msa_16,z),
+                                             _mm_unpacklo_epi16(msa_16,z)));
+
+        imgBuf += _width * _skipNum;
+    }
+
+    __m128i se_128;
+    __m128i sev_128;
+    __m128i seh_128;
+    __m128i msa_128;
+
+    // bring sums out of vector registers and into integer register
+    // domain, summing them along the way
+    _mm_store_si128 (&se_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(se_32,z),
+                                   _mm_unpacklo_epi32(se_32,z)));
+    _mm_store_si128 (&sev_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(sev_32,z),
+                                   _mm_unpacklo_epi32(sev_32,z)));
+    _mm_store_si128 (&seh_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(seh_32,z),
+                                   _mm_unpacklo_epi32(seh_32,z)));
+    _mm_store_si128 (&msa_128,
+                     _mm_add_epi64(_mm_unpackhi_epi32(msa_32,z),
+                                   _mm_unpacklo_epi32(msa_32,z)));
+
+    WebRtc_UWord64 *se_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&se_128);
+    WebRtc_UWord64 *sev_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&sev_128);
+    WebRtc_UWord64 *seh_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&seh_128);
+    WebRtc_UWord64 *msa_64 =
+                   reinterpret_cast<WebRtc_UWord64*>(&msa_128);
+
+    const WebRtc_UWord32 spatialErrSum  = se_64[0] + se_64[1];
+    const WebRtc_UWord32 spatialErrVSum = sev_64[0] + sev_64[1];
+    const WebRtc_UWord32 spatialErrHSum = seh_64[0] + seh_64[1];
+    const WebRtc_UWord32 pixelMSA = msa_64[0] + msa_64[1];
+
+    // normalize over all pixels
+    const float spatialErr  = (float)(spatialErrSum >> 2);
+    const float spatialErrH = (float)(spatialErrHSum >> 1);
+    const float spatialErrV = (float)(spatialErrVSum >> 1);
+    const float norm = (float)pixelMSA;
+
+    // 2X2:
+    _spatialPredErr = spatialErr / norm;
+
+    // 1X2:
+    _spatialPredErrH = spatialErrH / norm;
+
+    // 2X1:
+    _spatialPredErrV = spatialErrV / norm;
+
+    return VPM_OK;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/source/deflickering.cc b/src/modules/video_processing/main/source/deflickering.cc
new file mode 100644
index 0000000..d0b8d3b
--- /dev/null
+++ b/src/modules/video_processing/main/source/deflickering.cc
@@ -0,0 +1,445 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+#include <math.h>
+#include <stdlib.h>
+
+#include "deflickering.h"
+#include "trace.h"
+#include "signal_processing_library.h"
+#include "sort.h"
+
+namespace webrtc {
+
+// Detection constants
+enum { kFrequencyDeviation = 39 };      // (Q4) Maximum allowed deviation for detection
+enum { kMinFrequencyToDetect = 32 };    // (Q4) Minimum frequency that can be detected
+enum { kNumFlickerBeforeDetect = 2 };   // Number of flickers before we accept detection
+enum { kMeanValueScaling = 4 };         // (Q4) In power of 2
+enum { kZeroCrossingDeadzone = 10 };    // Deadzone region in terms of pixel values
+
+// Deflickering constants
+// Compute the quantiles over 1 / DownsamplingFactor of the image.
+enum { kDownsamplingFactor = 8 };
+enum { kLog2OfDownsamplingFactor = 3 };
+
+// To generate in Matlab:
+// >> probUW16 = round(2^11 * [0.05,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,0.95,0.97]);
+// >> fprintf('%d, ', probUW16)
+// Resolution reduced to avoid overflow when multiplying with the (potentially) large 
+// number of pixels.
+const WebRtc_UWord16 VPMDeflickering::_probUW16[kNumProbs] =
+    {102, 205, 410, 614, 819, 1024, 1229, 1434, 1638, 1843, 1946, 1987}; // <Q11>
+
+// To generate in Matlab:
+// >> numQuants = 14; maxOnlyLength = 5;
+// >> weightUW16 = round(2^15 * [linspace(0.5, 1.0, numQuants - maxOnlyLength)]);
+// >> fprintf('%d, %d,\n ', weightUW16);
+const WebRtc_UWord16 VPMDeflickering::_weightUW16[kNumQuants - kMaxOnlyLength] =
+    {16384, 18432, 20480, 22528, 24576, 26624, 28672, 30720, 32768}; // <Q15>
+ 
+VPMDeflickering::VPMDeflickering() :
+    _id(0)
+{
+    Reset();
+}
+
+VPMDeflickering::~VPMDeflickering()
+{
+}
+
+WebRtc_Word32
+VPMDeflickering::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return 0;
+}
+
+void
+VPMDeflickering::Reset()
+{
+    _meanBufferLength = 0;
+    _detectionState = 0;
+    _frameRate = 0;
+
+    memset(_meanBuffer, 0, sizeof(WebRtc_Word32) * kMeanBufferLength);
+    memset(_timestampBuffer, 0, sizeof(WebRtc_Word32) * kMeanBufferLength);
+
+    // Initialize the history with a uniformly distributed histogram
+    _quantHistUW8[0][0] = 0;
+    _quantHistUW8[0][kNumQuants - 1] = 255;
+    for (WebRtc_Word32 i = 0; i < kNumProbs; i++)
+    {
+        _quantHistUW8[0][i + 1] = static_cast<WebRtc_UWord8>((WEBRTC_SPL_UMUL_16_16(
+            _probUW16[i], 255) + (1 << 10)) >> 11); // Unsigned round. <Q0>
+    }
+    
+    for (WebRtc_Word32 i = 1; i < kFrameHistorySize; i++)
+    {
+        memcpy(_quantHistUW8[i], _quantHistUW8[0], sizeof(WebRtc_UWord8) * kNumQuants);
+    }
+}
+
+WebRtc_Word32
+VPMDeflickering::ProcessFrame(WebRtc_UWord8* frame,
+                              const WebRtc_UWord32 width,
+                              const WebRtc_UWord32 height,
+                              const WebRtc_UWord32 timestamp,
+                              VideoProcessingModule::FrameStats& stats)
+{
+    WebRtc_UWord32 frameMemory;
+    WebRtc_UWord8 quantUW8[kNumQuants];
+    WebRtc_UWord8 maxQuantUW8[kNumQuants];
+    WebRtc_UWord8 minQuantUW8[kNumQuants];
+    WebRtc_UWord16 targetQuantUW16[kNumQuants];
+    WebRtc_UWord16 incrementUW16;
+    WebRtc_UWord8 mapUW8[256];
+
+    WebRtc_UWord16 tmpUW16;
+    WebRtc_UWord32 tmpUW32;
+
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_GENERAL_ERROR;
+    }
+
+    // Stricter height check due to subsampling size calculation below.
+    if (width == 0 || height < 2)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (!VideoProcessingModule::ValidFrameStats(stats))
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame stats");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (PreDetection(timestamp, stats) == -1)
+    {
+        return VPM_GENERAL_ERROR;
+    }
+
+    // Flicker detection
+    WebRtc_Word32 detFlicker = DetectFlicker();
+    if (detFlicker < 0)
+    { // Error
+        return VPM_GENERAL_ERROR;
+    }
+    else if (detFlicker != 1)
+    {
+        return 0;
+    }
+
+    // Size of luminance component
+    const WebRtc_UWord32 ySize = height * width;
+
+    const WebRtc_UWord32 ySubSize = width * (((height - 1) >>
+        kLog2OfDownsamplingFactor) + 1);
+    WebRtc_UWord8* ySorted = new WebRtc_UWord8[ySubSize];
+    WebRtc_UWord32 sortRowIdx = 0;
+    for (WebRtc_UWord32 i = 0; i < height; i += kDownsamplingFactor)
+    {
+        memcpy(ySorted + sortRowIdx * width, frame + i * width, width);
+        sortRowIdx++;
+    }
+    
+    webrtc::Sort(ySorted, ySubSize, webrtc::TYPE_UWord8);
+
+    WebRtc_UWord32 probIdxUW32 = 0;
+    quantUW8[0] = 0;
+    quantUW8[kNumQuants - 1] = 255;
+
+    // Ensure we won't get an overflow below.
+    // In practice, the number of subsampled pixels will not become this large.
+    if (ySubSize > (1 << 21) - 1)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, 
+            "Subsampled number of pixels too large");
+        return -1;
+    }
+
+    for (WebRtc_Word32 i = 0; i < kNumProbs; i++)
+    {
+        probIdxUW32 = WEBRTC_SPL_UMUL_32_16(ySubSize, _probUW16[i]) >> 11; // <Q0>
+        quantUW8[i + 1] = ySorted[probIdxUW32];
+    }
+
+    delete [] ySorted;
+    ySorted = NULL;
+
+    // Shift history for new frame.
+    memmove(_quantHistUW8[1], _quantHistUW8[0], (kFrameHistorySize - 1) * kNumQuants *
+        sizeof(WebRtc_UWord8));
+    // Store current frame in history.
+    memcpy(_quantHistUW8[0], quantUW8, kNumQuants * sizeof(WebRtc_UWord8));
+
+    // We use a frame memory equal to the ceiling of half the frame rate to ensure we
+    // capture an entire period of flicker.
+    frameMemory = (_frameRate + (1 << 5)) >> 5; // Unsigned ceiling. <Q0>
+                                                // _frameRate in Q4.
+    if (frameMemory > kFrameHistorySize)
+    {
+        frameMemory = kFrameHistorySize;
+    }
+
+    // Get maximum and minimum.
+    for (WebRtc_Word32 i = 0; i < kNumQuants; i++)
+    {
+        maxQuantUW8[i] = 0;
+        minQuantUW8[i] = 255;
+        for (WebRtc_UWord32 j = 0; j < frameMemory; j++)
+        {
+            if (_quantHistUW8[j][i] > maxQuantUW8[i])
+            {
+                maxQuantUW8[i] = _quantHistUW8[j][i];
+            }
+
+            if (_quantHistUW8[j][i] < minQuantUW8[i])
+            {
+                minQuantUW8[i] = _quantHistUW8[j][i];
+            }
+        }
+    }
+    
+    // Get target quantiles.
+    for (WebRtc_Word32 i = 0; i < kNumQuants - kMaxOnlyLength; i++)
+    {
+        targetQuantUW16[i] = static_cast<WebRtc_UWord16>((WEBRTC_SPL_UMUL_16_16(
+            _weightUW16[i], maxQuantUW8[i]) + WEBRTC_SPL_UMUL_16_16((1 << 15) -
+            _weightUW16[i], minQuantUW8[i])) >> 8); // <Q7>
+    }
+
+    for (WebRtc_Word32 i = kNumQuants - kMaxOnlyLength; i < kNumQuants; i++)
+    {
+        targetQuantUW16[i] = ((WebRtc_UWord16)maxQuantUW8[i]) << 7;
+    }
+
+    // Compute the map from input to output pixels.
+    WebRtc_UWord16 mapUW16; // <Q7>
+    for (WebRtc_Word32 i = 1; i < kNumQuants; i++)
+    {
+        // As quant and targetQuant are limited to UWord8, we're safe to use Q7 here.
+        tmpUW32 = static_cast<WebRtc_UWord32>(targetQuantUW16[i] -
+            targetQuantUW16[i - 1]); // <Q7>
+        tmpUW16 = static_cast<WebRtc_UWord16>(quantUW8[i] - quantUW8[i - 1]); // <Q0>
+
+        if (tmpUW16 > 0)
+        {
+            incrementUW16 = static_cast<WebRtc_UWord16>(WebRtcSpl_DivU32U16(tmpUW32,
+                tmpUW16)); // <Q7>
+         }
+        else
+        {
+            // The value is irrelevant; the loop below will only iterate once.
+            incrementUW16 = 0;
+        }
+
+        mapUW16 = targetQuantUW16[i - 1];
+        for (WebRtc_UWord32 j = quantUW8[i - 1]; j < (WebRtc_UWord32)(quantUW8[i] + 1); j++)
+        {
+            mapUW8[j] = (WebRtc_UWord8)((mapUW16 + (1 << 6)) >> 7); // Unsigned round. <Q0>
+            mapUW16 += incrementUW16;
+        }
+    }
+
+    // Map to the output frame.
+    for (WebRtc_UWord32 i = 0; i < ySize; i++)
+    {
+        frame[i] = mapUW8[frame[i]];
+    }
+
+    // Frame was altered, so reset stats.
+    VideoProcessingModule::ClearFrameStats(stats);
+
+    return 0;
+}
+
+/**
+   Performs some pre-detection operations. Must be called before 
+   DetectFlicker().
+
+   \param[in] timestamp Timestamp of the current frame.
+   \param[in] stats     Statistics of the current frame.
+ 
+   \return 0: Success\n
+           2: Detection not possible due to flickering frequency too close to
+              zero.\n
+          -1: Error
+*/
+WebRtc_Word32
+VPMDeflickering::PreDetection(const WebRtc_UWord32 timestamp,
+                              const VideoProcessingModule::FrameStats& stats)
+{
+    WebRtc_Word32 meanVal; // Mean value of frame (Q4)
+    WebRtc_UWord32 frameRate = 0;
+    WebRtc_Word32 meanBufferLength; // Temp variable
+
+    meanVal = ((stats.sum << kMeanValueScaling) / stats.numPixels);
+    /* Update mean value buffer.
+     * This should be done even though we might end up in an unreliable detection.
+     */
+    memmove(_meanBuffer + 1, _meanBuffer, (kMeanBufferLength - 1) * sizeof(WebRtc_Word32));
+    _meanBuffer[0] = meanVal;
+
+    /* Update timestamp buffer.
+     * This should be done even though we might end up in an unreliable detection.
+     */
+    memmove(_timestampBuffer + 1, _timestampBuffer, (kMeanBufferLength - 1) *
+        sizeof(WebRtc_UWord32));
+    _timestampBuffer[0] = timestamp;
+
+    /* Compute current frame rate (Q4) */
+    if (_timestampBuffer[kMeanBufferLength - 1] != 0)
+    {
+        frameRate = ((90000 << 4) * (kMeanBufferLength - 1));
+        frameRate /= (_timestampBuffer[0] - _timestampBuffer[kMeanBufferLength - 1]);
+    }else if (_timestampBuffer[1] != 0)
+    {
+        frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]);
+    }
+
+    /* Determine required size of mean value buffer (_meanBufferLength) */
+    if (frameRate == 0) {
+        meanBufferLength = 1;
+    }
+    else {
+        meanBufferLength = (kNumFlickerBeforeDetect * frameRate) / kMinFrequencyToDetect;
+    }
+    /* Sanity check of buffer length */
+    if (meanBufferLength >= kMeanBufferLength)
+    {
+        /* Too long buffer. The flickering frequency is too close to zero, which
+         * makes the estimation unreliable.
+         */
+        _meanBufferLength = 0;
+        return 2;
+    }
+    _meanBufferLength = meanBufferLength;
+
+    if ((_timestampBuffer[_meanBufferLength - 1] != 0) && (_meanBufferLength != 1))
+    {
+        frameRate = ((90000 << 4) * (_meanBufferLength - 1));
+        frameRate /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]);
+    }else if (_timestampBuffer[1] != 0)
+    {
+        frameRate = (90000 << 4) / (_timestampBuffer[0] - _timestampBuffer[1]);
+    }
+    _frameRate = frameRate;
+
+    return 0;
+}
+
+/**
+   This function detects flicker in the video stream. As a side effect the mean value
+   buffer is updated with the new mean value.
+ 
+   \return 0: No flickering detected\n
+           1: Flickering detected\n
+           2: Detection not possible due to unreliable frequency interval
+          -1: Error
+*/
+WebRtc_Word32 VPMDeflickering::DetectFlicker()
+{
+    /* Local variables */
+    WebRtc_UWord32  i;
+    WebRtc_Word32  freqEst;       // (Q4) Frequency estimate to base detection upon
+    WebRtc_Word32  retVal = -1;
+
+    /* Sanity check for _meanBufferLength */
+    if (_meanBufferLength < 2)
+    {
+        /* Not possible to estimate frequency */
+        return(2);
+    }
+    /* Count zero crossings with a dead zone to be robust against noise.
+     * If the noise std is 2 pixel this corresponds to about 95% confidence interval.
+     */
+    WebRtc_Word32 deadzone = (kZeroCrossingDeadzone << kMeanValueScaling); // Q4
+    WebRtc_Word32 meanOfBuffer = 0; // Mean value of mean value buffer
+    WebRtc_Word32 numZeros     = 0; // Number of zeros that cross the deadzone
+    WebRtc_Word32 cntState     = 0; // State variable for zero crossing regions
+    WebRtc_Word32 cntStateOld  = 0; // Previous state variable for zero crossing regions
+
+    for (i = 0; i < _meanBufferLength; i++)
+    {
+        meanOfBuffer += _meanBuffer[i];
+    }
+    meanOfBuffer += (_meanBufferLength >> 1); // Rounding, not truncation
+    meanOfBuffer /= _meanBufferLength;
+
+    /* Count zero crossings */
+    cntStateOld = (_meanBuffer[0] >= (meanOfBuffer + deadzone));
+    cntStateOld -= (_meanBuffer[0] <= (meanOfBuffer - deadzone));
+    for (i = 1; i < _meanBufferLength; i++)
+    {
+        cntState = (_meanBuffer[i] >= (meanOfBuffer + deadzone));
+        cntState -= (_meanBuffer[i] <= (meanOfBuffer - deadzone));
+        if (cntStateOld == 0)
+        {
+            cntStateOld = -cntState;
+        }
+        if (((cntState + cntStateOld) == 0) && (cntState != 0))
+        {
+            numZeros++;
+            cntStateOld = cntState;
+        }
+    }
+    /* END count zero crossings */
+
+    /* Frequency estimation according to:
+     * freqEst = numZeros * frameRate / 2 / _meanBufferLength;
+     *
+     * Resolution is set to Q4
+     */
+    freqEst = ((numZeros * 90000) << 3);
+    freqEst /= (_timestampBuffer[0] - _timestampBuffer[_meanBufferLength - 1]);
+
+    /* Translate frequency estimate to regions close to 100 and 120 Hz */
+    WebRtc_UWord8 freqState = 0; // Current translation state;
+                               // (0) Not in interval,
+                               // (1) Within valid interval,
+                               // (2) Out of range
+    WebRtc_Word32 freqAlias = freqEst;
+    if (freqEst > kMinFrequencyToDetect)
+    {
+        WebRtc_UWord8 aliasState = 1;
+        while(freqState == 0)
+        {
+            /* Increase frequency */
+            freqAlias += (aliasState * _frameRate);
+            freqAlias += ((freqEst << 1) * (1 - (aliasState << 1)));
+            /* Compute state */
+            freqState = (abs(freqAlias - (100 << 4)) <= kFrequencyDeviation);
+            freqState += (abs(freqAlias - (120 << 4)) <= kFrequencyDeviation);
+            freqState += 2 * (freqAlias > ((120 << 4) + kFrequencyDeviation));
+            /* Switch alias state */
+            aliasState++;
+            aliasState &= 0x01;
+        }
+    }
+    /* Is frequency estimate within detection region? */
+    if (freqState == 1)
+    {
+        retVal = 1;
+    }else if (freqState == 0)
+    {
+        retVal = 2;
+    }else
+    {
+        retVal = 0;
+    }
+    return retVal;
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/deflickering.h b/src/modules/video_processing/main/source/deflickering.h
new file mode 100644
index 0000000..ee5f90d
--- /dev/null
+++ b/src/modules/video_processing/main/source/deflickering.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * deflickering.h
+ */
+
+#ifndef VPM_DEFLICKERING_H
+#define VPM_DEFLICKERING_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+#include <cstring>  // NULL
+
+namespace webrtc {
+
+class VPMDeflickering
+{
+public:
+    VPMDeflickering();
+    ~VPMDeflickering();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height,
+                             WebRtc_UWord32 timestamp,
+                             VideoProcessingModule::FrameStats& stats);
+private:
+    WebRtc_Word32 PreDetection(WebRtc_UWord32 timestamp,
+                             const VideoProcessingModule::FrameStats& stats);
+
+    WebRtc_Word32 DetectFlicker();
+
+    enum { kMeanBufferLength = 32 };
+    enum { kFrameHistorySize = 15 };
+    enum { kNumProbs = 12 };
+    enum { kNumQuants = kNumProbs + 2 };
+    enum { kMaxOnlyLength = 5 };
+
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32  _meanBufferLength;
+    WebRtc_UWord8   _detectionState;    // 0: No flickering
+                                      // 1: Flickering detected
+                                      // 2: In flickering
+    WebRtc_Word32    _meanBuffer[kMeanBufferLength];
+    WebRtc_UWord32   _timestampBuffer[kMeanBufferLength];
+    WebRtc_UWord32   _frameRate;
+    static const WebRtc_UWord16 _probUW16[kNumProbs];
+    static const WebRtc_UWord16 _weightUW16[kNumQuants - kMaxOnlyLength];
+    WebRtc_UWord8 _quantHistUW8[kFrameHistorySize][kNumQuants];
+};
+
+} //namespace
+
+#endif // VPM_DEFLICKERING_H
+
diff --git a/src/modules/video_processing/main/source/denoising.cc b/src/modules/video_processing/main/source/denoising.cc
new file mode 100644
index 0000000..d8931c9
--- /dev/null
+++ b/src/modules/video_processing/main/source/denoising.cc
@@ -0,0 +1,180 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "denoising.h"
+#include "trace.h"
+
+#include <cstring>
+
+namespace webrtc {
+
+enum { kSubsamplingTime = 0 };       // Down-sampling in time (unit: number of frames)
+enum { kSubsamplingWidth = 0 };      // Sub-sampling in width (unit: power of 2)
+enum { kSubsamplingHeight = 0 };     // Sub-sampling in height (unit: power of 2)
+enum { kDenoiseFiltParam = 179 };    // (Q8) De-noising filter parameter
+enum { kDenoiseFiltParamRec = 77 };  // (Q8) 1 - filter parameter
+enum { kDenoiseThreshold = 19200 };  // (Q8) De-noising threshold level
+
+VPMDenoising::VPMDenoising() :
+    _id(0),
+    _moment1(NULL),
+    _moment2(NULL)
+{
+    Reset();
+}
+
+VPMDenoising::~VPMDenoising()
+{
+    if (_moment1)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+    }
+    
+    if (_moment2)
+    {
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+}
+
+WebRtc_Word32
+VPMDenoising::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void
+VPMDenoising::Reset()
+{
+    _frameSize = 0;
+    _denoiseFrameCnt = 0;
+
+    if (_moment1)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+    }
+    
+    if (_moment2)
+    {
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+}
+
+WebRtc_Word32
+VPMDenoising::ProcessFrame(WebRtc_UWord8* frame,
+                           const WebRtc_UWord32 width,
+                           const WebRtc_UWord32 height)
+{
+    WebRtc_Word32     thevar;
+    WebRtc_UWord32    k;
+    WebRtc_UWord32    jsub, ksub;
+    WebRtc_Word32     diff0;
+    WebRtc_UWord32    tmpMoment1;
+    WebRtc_UWord32    tmpMoment2;
+    WebRtc_UWord32    tmp;
+    WebRtc_Word32     numPixelsChanged = 0;
+
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Null frame pointer");
+        return VPM_GENERAL_ERROR;
+    }
+
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, _id, "Invalid frame size");
+        return VPM_GENERAL_ERROR;
+    }
+
+    /* Size of luminance component */
+    const WebRtc_UWord32 ysize  = height * width;
+
+    /* Initialization */
+    if (ysize != _frameSize)
+    {
+        delete [] _moment1;
+        _moment1 = NULL;
+
+        delete [] _moment2;
+        _moment2 = NULL;
+    }
+    _frameSize = ysize;
+
+    if (!_moment1)
+    {
+        _moment1 = new WebRtc_UWord32[ysize];
+        memset(_moment1, 0, sizeof(WebRtc_UWord32)*ysize);
+    }
+    
+    if (!_moment2)
+    {
+        _moment2 = new WebRtc_UWord32[ysize];
+        memset(_moment2, 0, sizeof(WebRtc_UWord32)*ysize);
+    }
+
+    /* Apply de-noising on each pixel, but update variance sub-sampled */
+    for (WebRtc_UWord32 i = 0; i < height; i++)
+    { // Collect over height
+        k = i * width;
+        ksub = ((i >> kSubsamplingHeight) << kSubsamplingHeight) * width;
+        for (WebRtc_UWord32 j = 0; j < width; j++)
+        { // Collect over width
+            jsub = ((j >> kSubsamplingWidth) << kSubsamplingWidth);
+            /* Update mean value for every pixel and every frame */
+            tmpMoment1 = _moment1[k + j];
+            tmpMoment1 *= kDenoiseFiltParam; // Q16
+            tmpMoment1 += ((kDenoiseFiltParamRec * ((WebRtc_UWord32)frame[k + j])) << 8);
+            tmpMoment1 >>= 8; // Q8
+            _moment1[k + j] = tmpMoment1;
+
+            tmpMoment2 = _moment2[ksub + jsub];
+            if ((ksub == k) && (jsub == j) && (_denoiseFrameCnt == 0))
+            {
+                tmp = ((WebRtc_UWord32)frame[k + j] * (WebRtc_UWord32)frame[k + j]);
+                tmpMoment2 *= kDenoiseFiltParam; // Q16
+                tmpMoment2 += ((kDenoiseFiltParamRec * tmp)<<8);
+                tmpMoment2 >>= 8; // Q8
+            }
+            _moment2[k + j] = tmpMoment2;
+            /* Current event = deviation from mean value */
+            diff0 = ((WebRtc_Word32)frame[k + j] << 8) - _moment1[k + j];
+            /* Recent events = variance (variations over time) */
+            thevar = _moment2[k + j];
+            thevar -= ((_moment1[k + j] * _moment1[k + j]) >> 8);
+            /***************************************************************************
+             * De-noising criteria, i.e., when should we replace a pixel by its mean
+             *
+             * 1) recent events are minor
+             * 2) current events are minor
+             ***************************************************************************/
+            if ((thevar < kDenoiseThreshold)
+                && ((diff0 * diff0 >> 8) < kDenoiseThreshold))
+            { // Replace with mean
+                frame[k + j] = (WebRtc_UWord8)(_moment1[k + j] >> 8);
+                numPixelsChanged++;
+            }
+        }
+    }
+
+    /* Update frame counter */
+    _denoiseFrameCnt++;
+    if (_denoiseFrameCnt > kSubsamplingTime)
+    {
+        _denoiseFrameCnt = 0;
+    }
+
+    return numPixelsChanged;
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/denoising.h b/src/modules/video_processing/main/source/denoising.h
new file mode 100644
index 0000000..f53157c
--- /dev/null
+++ b/src/modules/video_processing/main/source/denoising.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * denoising.h
+ */
+#ifndef VPM_DENOISING_H
+#define VPM_DENOISING_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+
+namespace webrtc {
+
+class VPMDenoising
+{
+public:
+    VPMDenoising();
+    ~VPMDenoising();
+
+    WebRtc_Word32 ChangeUniqueId(WebRtc_Word32 id);
+
+    void Reset();
+
+    WebRtc_Word32 ProcessFrame(WebRtc_UWord8* frame,
+                             WebRtc_UWord32 width,
+                             WebRtc_UWord32 height);
+
+private:
+    WebRtc_Word32 _id;
+
+    WebRtc_UWord32*   _moment1;           // (Q8) First order moment (mean)
+    WebRtc_UWord32*   _moment2;           // (Q8) Second order moment
+    WebRtc_UWord32    _frameSize;         // Size (# of pixels) of frame
+    WebRtc_Word32     _denoiseFrameCnt;   // Counter for subsampling in time
+};
+
+} //namespace
+
+#endif // VPM_DENOISING_H
+  
diff --git a/src/modules/video_processing/main/source/frame_preprocessor.cc b/src/modules/video_processing/main/source/frame_preprocessor.cc
new file mode 100644
index 0000000..c317528
--- /dev/null
+++ b/src/modules/video_processing/main/source/frame_preprocessor.cc
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "frame_preprocessor.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VPMFramePreprocessor::VPMFramePreprocessor():
+_id(0),
+_contentMetrics(NULL),
+_maxFrameRate(0),
+_resampledFrame(),
+_enableCA(false),
+_frameCnt(0)
+{
+    _spatialResampler = new VPMSimpleSpatialResampler();
+    _ca = new VPMContentAnalysis(true);
+    _vd = new VPMVideoDecimator();
+}
+
+VPMFramePreprocessor::~VPMFramePreprocessor()
+{
+    Reset();
+    delete _spatialResampler;
+    delete _ca;
+    delete _vd;
+    _resampledFrame.Free(); // is this needed?
+}
+
+WebRtc_Word32
+VPMFramePreprocessor::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    _id = id;
+    return VPM_OK;
+}
+
+void 
+VPMFramePreprocessor::Reset()
+{
+    _ca->Release();
+    _vd->Reset();
+    _contentMetrics = NULL;
+    _spatialResampler->Reset();
+    _enableCA = false;
+    _frameCnt = 0;
+}
+	
+    
+void 
+VPMFramePreprocessor::EnableTemporalDecimation(bool enable)
+{
+    _vd->EnableTemporalDecimation(enable);
+}
+void
+VPMFramePreprocessor::EnableContentAnalysis(bool enable)
+{
+    _enableCA = enable;
+}
+
+void 
+VPMFramePreprocessor::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
+{
+    _spatialResampler->SetInputFrameResampleMode(resamplingMode);
+}
+
+    
+WebRtc_Word32
+VPMFramePreprocessor::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    if (maxFrameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    //Max allowed frame rate
+    _maxFrameRate = maxFrameRate;
+
+    return _vd->SetMaxFrameRate(maxFrameRate);
+}
+    
+
+WebRtc_Word32
+VPMFramePreprocessor::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
+{
+    if ( (width == 0) || (height == 0) || (frameRate == 0))
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    WebRtc_Word32 retVal = 0;
+    retVal = _spatialResampler->SetTargetFrameSize(width, height);
+    if (retVal < 0)
+    {
+        return retVal;
+    }
+    retVal = _vd->SetTargetFrameRate(frameRate);
+    if (retVal < 0)
+    {
+        return retVal;
+    }
+
+	  return VPM_OK;
+}
+
+void 
+VPMFramePreprocessor::UpdateIncomingFrameRate()
+{
+    _vd->UpdateIncomingFrameRate();
+}
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedFrameRate()
+{
+    return _vd->DecimatedFrameRate();
+}
+
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedWidth() const
+{
+    return _spatialResampler->TargetWidth();
+}
+
+
+WebRtc_UWord32
+VPMFramePreprocessor::DecimatedHeight() const
+{
+    return _spatialResampler->TargetHeight();
+}
+
+
+WebRtc_Word32
+VPMFramePreprocessor::PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame)
+{
+    if (frame == NULL || frame->Height() == 0 || frame->Width() == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _vd->UpdateIncomingFrameRate();
+
+    if (_vd->DropFrame())
+    {
+        WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, _id, "Drop frame due to frame rate");
+        return 1;  // drop 1 frame
+    }
+
+    // Resizing incoming frame if needed.
+    // Note that we must make a copy of it.
+    // We are not allowed to resample the input frame.
+    *processedFrame = NULL;
+    if (_spatialResampler->ApplyResample(frame->Width(), frame->Height()))  {
+      WebRtc_Word32 ret = _spatialResampler->ResampleFrame(*frame, _resampledFrame);
+      if (ret != VPM_OK)
+        return ret;
+      *processedFrame = &_resampledFrame;
+    }
+
+    // Perform content analysis on the frame to be encoded.
+    if (_enableCA)
+    {
+        // Compute new metrics every |kSkipFramesCA| frames, starting with
+        // the first frame.
+        if (_frameCnt % kSkipFrameCA == 0) {
+          if (*processedFrame == NULL)  {
+            _contentMetrics = _ca->ComputeContentMetrics(frame);
+          } else {
+            _contentMetrics = _ca->ComputeContentMetrics(&_resampledFrame);
+          }
+        }
+        ++_frameCnt;
+    }
+    return VPM_OK;
+}
+
+
+VideoContentMetrics*
+VPMFramePreprocessor::ContentMetrics() const
+{
+    return _contentMetrics;
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/frame_preprocessor.h b/src/modules/video_processing/main/source/frame_preprocessor.h
new file mode 100644
index 0000000..2d89c4e
--- /dev/null
+++ b/src/modules/video_processing/main/source/frame_preprocessor.h
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * frame_preprocessor.h
+ */
+#ifndef VPM_FRAME_PREPROCESSOR_H
+#define VPM_FRAME_PREPROCESSOR_H
+
+#include "typedefs.h"
+#include "video_processing.h"
+#include "content_analysis.h"
+#include "spatial_resampler.h"
+#include "video_decimator.h"
+
+namespace webrtc {
+
+
+class VPMFramePreprocessor
+{
+public:
+
+    VPMFramePreprocessor();
+    ~VPMFramePreprocessor();
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    void Reset();
+
+    // Enable temporal decimation
+    void EnableTemporalDecimation(bool enable);
+
+    void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+
+    //Enable content analysis
+    void EnableContentAnalysis(bool enable);
+
+    //Set max frame rate
+    WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+
+    //Set target resolution: frame rate and dimension
+    WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate);
+
+    //Update incoming frame rate/dimension
+    void UpdateIncomingFrameRate();
+
+    WebRtc_Word32 updateIncomingFrameSize(WebRtc_UWord32 width, WebRtc_UWord32 height);
+
+    //Set decimated values: frame rate/dimension
+    WebRtc_UWord32 DecimatedFrameRate();
+    WebRtc_UWord32 DecimatedWidth() const;
+    WebRtc_UWord32 DecimatedHeight() const;
+
+    //Preprocess output:
+    WebRtc_Word32 PreprocessFrame(const VideoFrame* frame, VideoFrame** processedFrame);
+    VideoContentMetrics* ContentMetrics() const;
+
+private:
+    // The content does not change so much every frame, so to reduce complexity
+    // we can compute new content metrics every |kSkipFrameCA| frames.
+    enum { kSkipFrameCA = 2 };
+
+    WebRtc_Word32              _id;
+    VideoContentMetrics*      _contentMetrics;
+    WebRtc_UWord32             _maxFrameRate;
+    VideoFrame                _resampledFrame;
+    VPMSpatialResampler*     _spatialResampler;
+    VPMContentAnalysis*      _ca;
+    VPMVideoDecimator*       _vd;
+    bool                     _enableCA;
+    int                      _frameCnt;
+    
+}; // end of VPMFramePreprocessor class definition
+
+} //namespace
+
+#endif // VPM_FRAME_PREPROCESS_H
diff --git a/src/modules/video_processing/main/source/spatial_resampler.cc b/src/modules/video_processing/main/source/spatial_resampler.cc
new file mode 100644
index 0000000..f4c4415
--- /dev/null
+++ b/src/modules/video_processing/main/source/spatial_resampler.cc
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "spatial_resampler.h"
+
+
+namespace webrtc {
+
+VPMSimpleSpatialResampler::VPMSimpleSpatialResampler()
+:
+_resamplingMode(kFastRescaling),
+_targetWidth(0),
+_targetHeight(0),
+_scaler()
+{
+}
+
+VPMSimpleSpatialResampler::~VPMSimpleSpatialResampler()
+{
+  //
+}
+
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::SetTargetFrameSize(WebRtc_Word32 width,
+                                              WebRtc_Word32 height)
+{
+  if (_resamplingMode == kNoRescaling)  {
+    return VPM_OK;
+  }
+
+  if (width < 1 || height < 1)  {
+    return VPM_PARAMETER_ERROR;
+  }
+
+  _targetWidth = width;
+  _targetHeight = height;
+
+  return VPM_OK;
+}
+
+void
+VPMSimpleSpatialResampler::SetInputFrameResampleMode(VideoFrameResampling
+                                                     resamplingMode)
+{
+  _resamplingMode = resamplingMode;
+}
+
+void
+VPMSimpleSpatialResampler::Reset()
+{
+  _resamplingMode = kFastRescaling;
+  _targetWidth = 0;
+  _targetHeight = 0;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::ResampleFrame(const VideoFrame& inFrame,
+                                         VideoFrame& outFrame)
+{
+  if (_resamplingMode == kNoRescaling)
+     return outFrame.CopyFrame(inFrame);
+  // Check if re-sampling is needed
+  if ((inFrame.Width() == (WebRtc_UWord32)_targetWidth) &&
+    (inFrame.Height() == (WebRtc_UWord32)_targetHeight))  {
+    return outFrame.CopyFrame(inFrame);
+  }
+
+  // Setting scaler
+  // TODO(mikhal/marpan): Should we allow for setting the filter mode in
+  // _scale.Set() with |_resamplingMode|?
+  int retVal = 0;
+  retVal = _scaler.Set(inFrame.Width(), inFrame.Height(),
+                       _targetWidth, _targetHeight, kI420, kI420, kScaleBox);
+  if (retVal < 0)
+    return retVal;
+
+  // Disabling cut/pad for now - only scaling.
+  int requiredSize = CalcBufferSize(kI420, _targetWidth, _targetHeight);
+  outFrame.VerifyAndAllocate(requiredSize);
+  outFrame.SetTimeStamp(inFrame.TimeStamp());
+  outFrame.SetRenderTime(inFrame.RenderTimeMs());
+  outFrame.SetWidth(_targetWidth);
+  outFrame.SetHeight(_targetHeight);
+
+  retVal = _scaler.Scale(inFrame.Buffer(), outFrame.Buffer(), requiredSize);
+  outFrame.SetLength(requiredSize);
+  if (retVal == 0)
+    return VPM_OK;
+  else
+    return VPM_SCALE_ERROR;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::TargetHeight()
+{
+  return _targetHeight;
+}
+
+WebRtc_Word32
+VPMSimpleSpatialResampler::TargetWidth()
+{
+  return _targetWidth;
+}
+
+bool
+VPMSimpleSpatialResampler::ApplyResample(WebRtc_Word32 width,
+                                         WebRtc_Word32 height)
+{
+  if ((width == _targetWidth && height == _targetHeight) ||
+       _resamplingMode == kNoRescaling)
+    return false;
+  else
+    return true;
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/spatial_resampler.h b/src/modules/video_processing/main/source/spatial_resampler.h
new file mode 100644
index 0000000..28a5a6c
--- /dev/null
+++ b/src/modules/video_processing/main/source/spatial_resampler.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * spatial_resampler.h
+ */
+
+#ifndef VPM_SPATIAL_RESAMPLER_H
+#define VPM_SPATIAL_RESAMPLER_H
+
+#include "typedefs.h"
+
+#include "module_common_types.h"
+#include "video_processing_defines.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "common_video/libyuv/include/scaler.h"
+
+namespace webrtc {
+
+class VPMSpatialResampler
+{
+public:
+  virtual ~VPMSpatialResampler() {};
+  virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
+                                           WebRtc_Word32 height) = 0;
+  virtual void SetInputFrameResampleMode(VideoFrameResampling
+                                         resamplingMode) = 0;
+  virtual void Reset() = 0;
+  virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
+                                      VideoFrame& outFrame) = 0;
+  virtual WebRtc_Word32 TargetWidth() = 0;
+  virtual WebRtc_Word32 TargetHeight() = 0;
+  virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height) = 0;
+};
+
+class VPMSimpleSpatialResampler : public VPMSpatialResampler
+{
+public:
+  VPMSimpleSpatialResampler();
+  ~VPMSimpleSpatialResampler();
+  virtual WebRtc_Word32 SetTargetFrameSize(WebRtc_Word32 width,
+                                           WebRtc_Word32 height);
+  virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+  virtual void Reset();
+  virtual WebRtc_Word32 ResampleFrame(const VideoFrame& inFrame,
+                                      VideoFrame& outFrame);
+  virtual WebRtc_Word32 TargetWidth();
+  virtual WebRtc_Word32 TargetHeight();
+  virtual bool ApplyResample(WebRtc_Word32 width, WebRtc_Word32 height);
+
+private:
+
+  VideoFrameResampling        _resamplingMode;
+  WebRtc_Word32               _targetWidth;
+  WebRtc_Word32               _targetHeight;
+  Scaler                      _scaler;
+};
+
+} //namespace
+
+#endif
diff --git a/src/modules/video_processing/main/source/video_decimator.cc b/src/modules/video_processing/main/source/video_decimator.cc
new file mode 100644
index 0000000..43bda08
--- /dev/null
+++ b/src/modules/video_processing/main/source/video_decimator.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_decimator.h"
+#include "tick_util.h"
+#include "video_processing.h"
+
+#define VD_MIN(a, b) ((a) < (b)) ? (a) : (b)
+
+namespace webrtc {
+
+VPMVideoDecimator::VPMVideoDecimator()
+:
+_overShootModifier(0),
+_dropCount(0),
+_keepCount(0),
+_targetFrameRate(30),
+_incomingFrameRate(0.0f),
+_maxFrameRate(30),
+_incomingFrameTimes(),
+_enableTemporalDecimation(true)
+{
+    Reset();
+}
+
+VPMVideoDecimator::~VPMVideoDecimator()
+{
+	//
+}
+
+void
+VPMVideoDecimator::Reset() 
+{
+   _overShootModifier = 0;
+    _dropCount = 0;
+    _keepCount = 0;
+    _targetFrameRate = 30;
+    _incomingFrameRate = 0.0f;
+    _maxFrameRate = 30;
+    memset(_incomingFrameTimes, 0, sizeof(_incomingFrameTimes));
+    _enableTemporalDecimation = true;
+}
+
+void
+VPMVideoDecimator::EnableTemporalDecimation(bool enable)
+{
+    _enableTemporalDecimation = enable;
+}
+WebRtc_Word32
+VPMVideoDecimator::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    if (maxFrameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+
+    _maxFrameRate = maxFrameRate;
+    
+    if (_targetFrameRate > _maxFrameRate)
+    {
+        _targetFrameRate = _maxFrameRate;
+
+    }
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VPMVideoDecimator::SetTargetFrameRate(WebRtc_UWord32 frameRate)
+{
+    if (frameRate == 0)
+    {
+        return VPM_PARAMETER_ERROR;
+    }
+    if (frameRate > _maxFrameRate)
+    {
+        //override
+        _targetFrameRate = _maxFrameRate;
+    }
+    else
+    {
+        _targetFrameRate = frameRate;
+    }
+    return VPM_OK;
+}
+
+bool
+VPMVideoDecimator::DropFrame()
+{
+    if (!_enableTemporalDecimation)
+    {
+        return false;
+    }
+
+    if (_incomingFrameRate <= 0)
+    {
+        return false;
+    }
+
+    const WebRtc_UWord32 incomingFrameRate = static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+
+    if (_targetFrameRate == 0)
+    {
+        return true;
+    }
+    
+    bool drop = false; 
+    if (incomingFrameRate > _targetFrameRate)
+    {       
+        WebRtc_Word32 overshoot = _overShootModifier + (incomingFrameRate - _targetFrameRate);
+        if(overshoot < 0)
+        {
+            overshoot = 0;
+            _overShootModifier = 0;
+        }
+        
+        if (overshoot && 2 * overshoot < (WebRtc_Word32) incomingFrameRate)
+        {
+
+            if (_dropCount) // Just got here so drop to be sure.
+            {
+                _dropCount = 0;         
+                return true;
+            }                        
+            const WebRtc_UWord32 dropVar = incomingFrameRate / overshoot;
+
+            if (_keepCount >= dropVar)
+            {
+                drop = true;                           
+                _overShootModifier = -((WebRtc_Word32) incomingFrameRate % overshoot) / 3;
+                _keepCount = 1;
+            }
+            else
+            {                        
+                
+                _keepCount++;
+            }
+        }
+        else
+        {
+            _keepCount = 0;         
+            const WebRtc_UWord32 dropVar = overshoot / _targetFrameRate;
+            if (_dropCount < dropVar)
+            {                
+                drop = true;
+                _dropCount++;                
+            }
+            else
+            {
+                _overShootModifier = overshoot % _targetFrameRate;
+                drop = false;
+                _dropCount = 0;                
+            }
+        }
+    }
+
+    return drop;
+}
+
+
+WebRtc_UWord32
+VPMVideoDecimator::DecimatedFrameRate()
+{
+    ProcessIncomingFrameRate(TickTime::MillisecondTimestamp());
+    if (!_enableTemporalDecimation)
+    {
+        return static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+    }
+    return VD_MIN(_targetFrameRate, static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f));
+}
+
+WebRtc_UWord32
+VPMVideoDecimator::InputFrameRate()
+{
+    ProcessIncomingFrameRate(TickTime::MillisecondTimestamp());
+    return static_cast<WebRtc_UWord32>(_incomingFrameRate + 0.5f);
+}
+
+void
+VPMVideoDecimator::UpdateIncomingFrameRate()
+{
+   WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+    if(_incomingFrameTimes[0] == 0)
+    {
+        // first no shift
+    } else
+    {
+        // shift 
+        for(int i = (kFrameCountHistorySize - 2); i >= 0 ; i--)
+        {
+            _incomingFrameTimes[i+1] = _incomingFrameTimes[i];
+        }
+    }
+    _incomingFrameTimes[0] = now;
+    ProcessIncomingFrameRate(now);
+}
+
+void 
+VPMVideoDecimator::ProcessIncomingFrameRate(WebRtc_Word64 now)
+{
+   WebRtc_Word32 num = 0;
+    WebRtc_Word32 nrOfFrames = 0;
+    for(num = 1; num < (kFrameCountHistorySize - 1); num++)
+    {
+        if (_incomingFrameTimes[num] <= 0 ||
+            now - _incomingFrameTimes[num] > kFrameHistoryWindowMs) // don't use data older than 2sec
+        {
+            break;
+        } else
+        {
+            nrOfFrames++;
+        }
+    }
+    if (num > 1)
+    {
+        WebRtc_Word64 diff = now - _incomingFrameTimes[num-1];
+        _incomingFrameRate = 1.0;
+        if(diff >0)
+        {
+            _incomingFrameRate = nrOfFrames * 1000.0f / static_cast<float>(diff);
+        }
+    }
+    else
+    {
+        _incomingFrameRate = static_cast<float>(nrOfFrames);
+    }
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/video_decimator.h b/src/modules/video_processing/main/source/video_decimator.h
new file mode 100644
index 0000000..e152bb9
--- /dev/null
+++ b/src/modules/video_processing/main/source/video_decimator.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * video_decimator.h
+ */
+#ifndef VPM_VIDEO_DECIMATOR_H
+#define VPM_VIDEO_DECIMATOR_H
+
+#include "typedefs.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+class VPMVideoDecimator
+{
+public:
+    VPMVideoDecimator();
+    ~VPMVideoDecimator();
+    
+    void Reset();
+    
+    void EnableTemporalDecimation(bool enable);
+    
+    WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+    WebRtc_Word32 SetTargetFrameRate(WebRtc_UWord32 frameRate);
+
+    bool DropFrame();
+    
+    void UpdateIncomingFrameRate();
+
+    // Get Decimated Frame Rate/Dimensions
+    WebRtc_UWord32 DecimatedFrameRate();
+
+    //Get input frame rate
+    WebRtc_UWord32 InputFrameRate();
+
+private:
+    void ProcessIncomingFrameRate(WebRtc_Word64 now);
+
+    enum { kFrameCountHistorySize = 90};
+    enum { kFrameHistoryWindowMs = 2000};
+
+    // Temporal decimation
+    WebRtc_Word32         _overShootModifier;
+    WebRtc_UWord32        _dropCount;
+    WebRtc_UWord32        _keepCount;
+    WebRtc_UWord32        _targetFrameRate;
+    float               _incomingFrameRate;
+    WebRtc_UWord32        _maxFrameRate;
+    WebRtc_Word64         _incomingFrameTimes[kFrameCountHistorySize];
+    bool                _enableTemporalDecimation;
+
+};
+
+} //namespace
+
+#endif
diff --git a/src/modules/video_processing/main/source/video_processing.gypi b/src/modules/video_processing/main/source/video_processing.gypi
new file mode 100644
index 0000000..3bc03bc
--- /dev/null
+++ b/src/modules/video_processing/main/source/video_processing.gypi
@@ -0,0 +1,88 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_processing',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+        ],
+      },
+      'sources': [
+        '../interface/video_processing.h',
+        '../interface/video_processing_defines.h',
+        'brighten.cc',
+        'brighten.h',
+        'brightness_detection.cc',
+        'brightness_detection.h',
+        'color_enhancement.cc',
+        'color_enhancement.h',
+        'color_enhancement_private.h',
+        'content_analysis.cc',
+        'content_analysis.h',
+        'deflickering.cc',
+        'deflickering.h',
+        'denoising.cc',
+        'denoising.h',
+        'frame_preprocessor.cc',
+        'frame_preprocessor.h',
+        'spatial_resampler.cc',
+        'spatial_resampler.h',
+        'video_decimator.cc',
+        'video_decimator.h',
+        'video_processing_impl.cc',
+        'video_processing_impl.h',
+      ],
+      'conditions': [
+        ['target_arch=="ia32" or target_arch=="x64"', {
+          'dependencies': [ 'video_processing_sse2', ],
+        }],
+      ],
+    },
+  ],
+  'conditions': [
+    ['target_arch=="ia32" or target_arch=="x64"', {
+      'targets': [
+        {
+          'target_name': 'video_processing_sse2',
+          'type': '<(library)',
+          'sources': [
+            'content_analysis_sse2.cc',
+          ],
+          'include_dirs': [
+            '../interface',
+            '../../../interface',
+          ],
+          'conditions': [
+            ['os_posix==1 and OS!="mac"', {
+              'cflags': [ '-msse2', ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_CFLAGS': [ '-msse2', ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+  ],
+}
+
diff --git a/src/modules/video_processing/main/source/video_processing_impl.cc b/src/modules/video_processing/main/source/video_processing_impl.cc
new file mode 100644
index 0000000..3619996
--- /dev/null
+++ b/src/modules/video_processing/main/source/video_processing_impl.cc
@@ -0,0 +1,340 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_processing_impl.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+#include <cassert>
+
+namespace webrtc {
+
+namespace
+{
+    void
+    SetSubSampling(VideoProcessingModule::FrameStats& stats,
+                   const WebRtc_Word32 width,
+                   const WebRtc_Word32 height)
+    {
+        if (width * height >= 640 * 480)
+        {
+            stats.subSamplWidth = 3; 
+            stats.subSamplHeight = 3;
+        }
+        else if (width * height >= 352 * 288)
+        {
+            stats.subSamplWidth = 2; 
+            stats.subSamplHeight = 2;
+        }
+        else if (width * height >= 176 * 144)
+        {
+            stats.subSamplWidth = 1; 
+            stats.subSamplHeight = 1;
+        }
+        else
+        {
+            stats.subSamplWidth = 0; 
+            stats.subSamplHeight = 0;
+        }
+    }
+}
+
+VideoProcessingModule*
+VideoProcessingModule::Create(const WebRtc_Word32 id)
+{
+
+    return new VideoProcessingModuleImpl(id);
+}
+
+void
+VideoProcessingModule::Destroy(VideoProcessingModule* module)
+{
+    if (module)
+    {
+        delete static_cast<VideoProcessingModuleImpl*>(module);
+    }
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _id = id;
+    _brightnessDetection.ChangeUniqueId(id);
+    _deflickering.ChangeUniqueId(id);
+    _denoising.ChangeUniqueId(id);
+    _framePreProcessor.ChangeUniqueId(id);
+    return VPM_OK;
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Id() const
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _id;
+}
+
+VideoProcessingModuleImpl::VideoProcessingModuleImpl(const WebRtc_Word32 id) :
+    _id(id),
+    _mutex(*CriticalSectionWrapper::CreateCriticalSection())
+{
+    _brightnessDetection.ChangeUniqueId(id);
+    _deflickering.ChangeUniqueId(id);
+    _denoising.ChangeUniqueId(id);
+    _framePreProcessor.ChangeUniqueId(id);
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Created");
+}
+
+
+VideoProcessingModuleImpl::~VideoProcessingModuleImpl()
+{
+    WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideoPreocessing, _id, "Destroyed");
+    
+    delete &_mutex;
+}
+
+void
+VideoProcessingModuleImpl::Reset()
+{
+    CriticalSectionScoped mutex(_mutex);
+    _deflickering.Reset();
+    _denoising.Reset();
+    _brightnessDetection.Reset();
+    _framePreProcessor.Reset();
+
+}
+
+WebRtc_Word32
+VideoProcessingModule::GetFrameStats(FrameStats& stats,
+                                         const VideoFrame& frame)
+{
+    return GetFrameStats(stats, frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModule::GetFrameStats(FrameStats& stats,
+                                         const WebRtc_UWord8* frame,
+                                         const WebRtc_UWord32 width,
+                                         const WebRtc_UWord32 height)
+{
+    if (frame == NULL)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Null frame pointer");
+        return VPM_PARAMETER_ERROR;
+    }
+    
+    if (width == 0 || height == 0)
+    {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoPreocessing, -1, "Invalid frame size");
+        return VPM_PARAMETER_ERROR;
+    }
+
+    ClearFrameStats(stats); // The histogram needs to be zeroed out.
+    SetSubSampling(stats, width, height);
+
+    // Compute histogram and sum of frame
+    for (WebRtc_UWord32 i = 0; i < height; i += (1 << stats.subSamplHeight))
+    {
+        WebRtc_Word32 k = i * width;
+        for (WebRtc_UWord32 j = 0; j < width; j += (1 << stats.subSamplWidth))
+        { 
+            stats.hist[frame[k + j]]++;
+            stats.sum += frame[k + j];
+        }
+    }
+
+    stats.numPixels = (width * height) / ((1 << stats.subSamplWidth) * (1 << stats.subSamplHeight));
+    assert(stats.numPixels > 0);
+
+    // Compute mean value of frame
+    stats.mean = stats.sum / stats.numPixels;
+    
+    return VPM_OK;
+}
+
+bool
+VideoProcessingModule::ValidFrameStats(const FrameStats& stats)
+{
+    if (stats.numPixels == 0)
+    {
+        return false;
+    }
+
+    return true;
+}
+
+void
+VideoProcessingModule::ClearFrameStats(FrameStats& stats)
+{
+    stats.mean = 0;
+    stats.sum = 0;
+    stats.numPixels = 0;
+    stats.subSamplWidth = 0;
+    stats.subSamplHeight = 0;
+    memset(stats.hist, 0, sizeof(stats.hist));
+}
+
+WebRtc_Word32
+VideoProcessingModule::ColorEnhancement(VideoFrame& frame)
+{
+    return ColorEnhancement(frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModule::ColorEnhancement(WebRtc_UWord8* frame,
+                                            const WebRtc_UWord32 width,
+                                            const WebRtc_UWord32 height)
+{
+    return VideoProcessing::ColorEnhancement(frame, width, height);
+}
+
+WebRtc_Word32
+VideoProcessingModule::Brighten(VideoFrame& frame, int delta)
+{
+    return Brighten(frame.Buffer(), frame.Width(), frame.Height(), delta);
+}
+
+WebRtc_Word32
+VideoProcessingModule::Brighten(WebRtc_UWord8* frame,
+                                    int width,
+                                    int height,
+                                    int delta)
+{
+    return VideoProcessing::Brighten(frame, width, height, delta);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Deflickering(VideoFrame& frame,
+                                            FrameStats& stats)
+{
+    return Deflickering(frame.Buffer(), frame.Width(), frame.Height(), 
+        frame.TimeStamp(), stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Deflickering(WebRtc_UWord8* frame,
+                                            const WebRtc_UWord32 width,
+                                            const WebRtc_UWord32 height,
+                                            const WebRtc_UWord32 timestamp,
+                                            FrameStats& stats)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _deflickering.ProcessFrame(frame, width, height, timestamp, stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Denoising(VideoFrame& frame)
+{
+    return Denoising(frame.Buffer(), frame.Width(), frame.Height());
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::Denoising(WebRtc_UWord8* frame,
+                                         const WebRtc_UWord32 width,
+                                         const WebRtc_UWord32 height)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _denoising.ProcessFrame(frame, width, height);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::BrightnessDetection(const VideoFrame& frame,
+                                                   const FrameStats& stats)
+{
+    return BrightnessDetection(frame.Buffer(), frame.Width(), frame.Height(), stats);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::BrightnessDetection(const WebRtc_UWord8* frame,
+                                                   const WebRtc_UWord32 width,
+                                                   const WebRtc_UWord32 height,
+                                                   const FrameStats& stats)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _brightnessDetection.ProcessFrame(frame, width, height, stats);
+}
+
+
+void 
+VideoProcessingModuleImpl::EnableTemporalDecimation(bool enable)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _framePreProcessor.EnableTemporalDecimation(enable);
+}
+
+
+void 
+VideoProcessingModuleImpl::SetInputFrameResampleMode(VideoFrameResampling resamplingMode)
+{
+    CriticalSectionScoped cs(_mutex);
+    _framePreProcessor.SetInputFrameResampleMode(resamplingMode);
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::SetMaxFrameRate(WebRtc_UWord32 maxFrameRate)
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.SetMaxFrameRate(maxFrameRate);
+
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::SetTargetResolution(WebRtc_UWord32 width, WebRtc_UWord32 height, WebRtc_UWord32 frameRate)
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.SetTargetResolution(width, height, frameRate);
+}
+
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedFrameRate()
+{
+    CriticalSectionScoped cs(_mutex);
+    return  _framePreProcessor.DecimatedFrameRate();
+}
+
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedWidth() const
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.DecimatedWidth();
+}
+
+WebRtc_UWord32
+VideoProcessingModuleImpl::DecimatedHeight() const
+{
+    CriticalSectionScoped cs(_mutex);
+    return _framePreProcessor.DecimatedHeight();
+}
+
+WebRtc_Word32
+VideoProcessingModuleImpl::PreprocessFrame(const VideoFrame *frame, VideoFrame **processedFrame)
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _framePreProcessor.PreprocessFrame(frame, processedFrame);
+}
+
+VideoContentMetrics*
+VideoProcessingModuleImpl::ContentMetrics() const
+{
+    CriticalSectionScoped mutex(_mutex);
+    return _framePreProcessor.ContentMetrics();
+}
+
+
+void
+VideoProcessingModuleImpl::EnableContentAnalysis(bool enable)
+{
+    CriticalSectionScoped mutex(_mutex);
+    _framePreProcessor.EnableContentAnalysis(enable);
+}
+
+} //namespace
diff --git a/src/modules/video_processing/main/source/video_processing_impl.h b/src/modules/video_processing/main/source/video_processing_impl.h
new file mode 100644
index 0000000..3170ab1
--- /dev/null
+++ b/src/modules/video_processing/main/source/video_processing_impl.h
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
+#define WEBRTC_MODULE_VIDEO_PROCESSING_IMPL_H
+
+#include "video_processing.h"
+#include "brighten.h"
+#include "brightness_detection.h"
+#include "color_enhancement.h"
+#include "deflickering.h"
+#include "denoising.h"
+#include "frame_preprocessor.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoProcessingModuleImpl : public VideoProcessingModule
+{
+public:
+
+    VideoProcessingModuleImpl(WebRtc_Word32 id);
+
+    virtual ~VideoProcessingModuleImpl();
+
+    WebRtc_Word32 Id() const;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual void Reset();
+
+    virtual WebRtc_Word32 Deflickering(WebRtc_UWord8* frame,
+                                     WebRtc_UWord32 width,
+                                     WebRtc_UWord32 height,
+                                     WebRtc_UWord32 timestamp,
+                                     FrameStats& stats);
+
+    virtual WebRtc_Word32 Deflickering(VideoFrame& frame,
+                                       FrameStats& stats);
+
+    virtual WebRtc_Word32 Denoising(WebRtc_UWord8* frame,
+                                    WebRtc_UWord32 width,
+                                    WebRtc_UWord32 height);
+
+    virtual WebRtc_Word32 Denoising(VideoFrame& frame);
+
+    virtual WebRtc_Word32 BrightnessDetection(const WebRtc_UWord8* frame,
+                                              WebRtc_UWord32 width,
+                                              WebRtc_UWord32 height,
+                                              const FrameStats& stats);
+
+    virtual WebRtc_Word32 BrightnessDetection(const VideoFrame& frame,
+                                              const FrameStats& stats);
+
+
+    //Frame pre-processor functions
+
+    //Enable temporal decimation
+    virtual void EnableTemporalDecimation(bool enable);
+
+    virtual void SetInputFrameResampleMode(VideoFrameResampling resamplingMode);
+
+    //Enable content analysis
+    virtual void EnableContentAnalysis(bool enable);
+
+    //Set max frame rate
+    virtual WebRtc_Word32 SetMaxFrameRate(WebRtc_UWord32 maxFrameRate);
+
+    // Set Target Resolution: frame rate and dimension
+    virtual WebRtc_Word32 SetTargetResolution(WebRtc_UWord32 width,
+                                              WebRtc_UWord32 height,
+                                              WebRtc_UWord32 frameRate);
+
+
+    // Get decimated values: frame rate/dimension
+    virtual WebRtc_UWord32 DecimatedFrameRate();
+    virtual WebRtc_UWord32 DecimatedWidth() const;
+    virtual WebRtc_UWord32 DecimatedHeight() const;
+
+    // Preprocess:
+    // Pre-process incoming frame: Sample when needed and compute content
+    // metrics when enabled.
+    // If no resampling takes place - processedFrame is set to NULL.
+    virtual WebRtc_Word32 PreprocessFrame(const VideoFrame* frame,
+                                          VideoFrame** processedFrame);
+    virtual VideoContentMetrics* ContentMetrics() const;
+
+private:
+    WebRtc_Word32              _id;
+    CriticalSectionWrapper&    _mutex;
+
+    VPMDeflickering            _deflickering;
+    VPMDenoising               _denoising;
+    VPMBrightnessDetection     _brightnessDetection;
+    VPMFramePreprocessor       _framePreProcessor;
+};
+
+} // namespace
+
+#endif
diff --git a/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc b/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
new file mode 100644
index 0000000..6510a5c
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/brightness_detection_test.cc
@@ -0,0 +1,107 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "unit_test.h"
+#include "video_processing.h"
+
+using namespace webrtc;
+
+TEST_F(VideoProcessingModuleTest, BrightnessDetection)
+{
+    WebRtc_UWord32 frameNum = 0;
+    WebRtc_Word32 brightnessWarning = 0;
+    WebRtc_UWord32 warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+    {
+        frameNum++;
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        if (brightnessWarning != VideoProcessingModule::kNoWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect few warnings
+    float warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("\nWarning proportions:\n");
+    printf("Stock foreman: %.1f %%\n", warningProportion);
+    EXPECT_LT(warningProportion, 10);
+
+    rewind(_sourceFile);
+    frameNum = 0;
+    warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
+        frameNum < 300)
+    {
+        frameNum++;
+
+        WebRtc_UWord8* frame = _videoFrame.Buffer();
+        WebRtc_UWord32 yTmp = 0;
+        for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
+        {
+            yTmp = frame[yIdx] << 1;
+            if (yTmp > 255)
+            {
+                yTmp = 255;
+            }
+            frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
+        }
+
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        EXPECT_NE(VideoProcessingModule::kDarkWarning, brightnessWarning);
+        if (brightnessWarning == VideoProcessingModule::kBrightWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect many brightness warnings
+    warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("Bright foreman: %.1f %%\n", warningProportion);
+    EXPECT_GT(warningProportion, 95);
+
+    rewind(_sourceFile);
+    frameNum = 0;
+    warningCount = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength &&
+        frameNum < 300)
+    {
+        frameNum++;
+
+        WebRtc_UWord8* frame = _videoFrame.Buffer();
+        WebRtc_Word32 yTmp = 0;
+        for (WebRtc_UWord32 yIdx = 0; yIdx < _width * _height; yIdx++)
+        {
+            yTmp = frame[yIdx] >> 1;
+            frame[yIdx] = static_cast<WebRtc_UWord8>(yTmp);
+        }
+
+        VideoProcessingModule::FrameStats stats;
+        ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+        ASSERT_GE(brightnessWarning = _vpm->BrightnessDetection(_videoFrame, stats), 0);
+        EXPECT_NE(VideoProcessingModule::kBrightWarning, brightnessWarning);
+        if (brightnessWarning == VideoProcessingModule::kDarkWarning)
+        {
+            warningCount++;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Expect many darkness warnings
+    warningProportion = static_cast<float>(warningCount) / frameNum * 100;
+    printf("Dark foreman: %.1f %%\n\n", warningProportion);
+    EXPECT_GT(warningProportion, 90);
+}
diff --git a/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc b/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
new file mode 100644
index 0000000..0a94db4
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/color_enhancement_test.cc
@@ -0,0 +1,128 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, ColorEnhancement)
+{
+    TickTime t0;
+    TickTime t1;
+    TickInterval accTicks;
+
+    // Use a shorter version of the Foreman clip for this test.
+    fclose(_sourceFile);
+    const std::string video_file =
+      webrtc::test::ResourcePath("foreman_cif_short", "yuv");
+    _sourceFile  = fopen(video_file.c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL) <<
+        "Cannot read source file: " + video_file + "\n";
+
+    std::string output_file = webrtc::test::OutputPath() +
+        "foremanColorEnhancedVPM_cif_short.yuv";
+    FILE* modFile = fopen(output_file.c_str(), "w+b");
+    ASSERT_TRUE(modFile != NULL) << "Could not open output file.\n";
+
+    WebRtc_UWord32 frameNum = 0;
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+    {
+        frameNum++;
+        t0 = TickTime::Now();
+        ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(_videoFrame));
+        t1 = TickTime::Now();
+        accTicks += t1 - t0;
+        if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
+                   modFile) !=  _frameLength) {
+          return;
+        }
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    printf("\nTime per frame: %d us \n",
+        static_cast<int>(accTicks.Microseconds() / frameNum));
+    rewind(modFile);
+
+    printf("Comparing files...\n\n");
+    std::string reference_filename =
+        webrtc::test::ResourcePath("foremanColorEnhanced_cif_short", "yuv");
+    FILE* refFile = fopen(reference_filename.c_str(), "rb");
+    ASSERT_TRUE(refFile != NULL) << "Cannot open reference file: " <<
+        reference_filename << "\n"
+        "Create the reference by running Matlab script createTable.m.";
+
+    // get file lenghts
+    ASSERT_EQ(0, fseek(refFile, 0L, SEEK_END));
+    long refLen = ftell(refFile);
+    ASSERT_NE(-1L, refLen);
+    rewind(refFile);
+    ASSERT_EQ(0, fseek(modFile, 0L, SEEK_END));
+    long testLen = ftell(modFile);
+    ASSERT_NE(-1L, testLen);
+    rewind(modFile);
+    ASSERT_EQ(refLen, testLen) << "File lengths differ.";
+
+    VideoFrame refVideoFrame;
+    refVideoFrame.VerifyAndAllocate(_frameLength);
+    refVideoFrame.SetWidth(_width);
+    refVideoFrame.SetHeight(_height);
+
+    // Compare frame-by-frame.
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, modFile) == _frameLength)
+    {
+        ASSERT_EQ(_frameLength, fread(refVideoFrame.Buffer(), 1, _frameLength, refFile));
+        EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), refVideoFrame.Buffer(), _frameLength));
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+    // Verify that all color pixels are enhanced, that no luminance values are altered,
+    // and that the function does not write outside the vector.
+    WebRtc_UWord32 safeGuard = 1000;
+    WebRtc_UWord32 numPixels = 352*288; // CIF size
+    WebRtc_UWord8 *testFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
+    WebRtc_UWord8 *refFrame = new WebRtc_UWord8[numPixels + (numPixels / 2) + (2 * safeGuard)];
+
+    // use value 128 as probe value, since we know that this will be changed in the enhancement
+    memset(testFrame, 128, safeGuard);
+    memset(&testFrame[safeGuard], 128, numPixels);
+    memset(&testFrame[safeGuard + numPixels], 128, numPixels / 2);
+    memset(&testFrame[safeGuard + numPixels + (numPixels / 2)], 128, safeGuard);
+
+    memcpy(refFrame, testFrame, numPixels + (numPixels / 2) + (2 * safeGuard));
+
+    ASSERT_EQ(0, VideoProcessingModule::ColorEnhancement(&testFrame[safeGuard], 352, 288));
+
+    EXPECT_EQ(0, memcmp(testFrame, refFrame, safeGuard)) <<
+        "Function is writing outside the frame memory.";
+    
+    EXPECT_EQ(0, memcmp(&testFrame[safeGuard + numPixels + (numPixels / 2)], 
+        &refFrame[safeGuard + numPixels + (numPixels / 2)], safeGuard)) <<
+        "Function is writing outside the frame memory.";
+
+    EXPECT_EQ(0, memcmp(&testFrame[safeGuard], &refFrame[safeGuard], numPixels)) <<
+        "Function is modifying the luminance.";
+
+    EXPECT_NE(0, memcmp(&testFrame[safeGuard + numPixels],
+        &refFrame[safeGuard + numPixels], numPixels / 2)) <<
+        "Function is not modifying all chrominance pixels";
+
+    ASSERT_EQ(0, fclose(refFile));
+    ASSERT_EQ(0, fclose(modFile));
+    delete [] testFrame;
+    delete [] refFrame;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc b/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc
new file mode 100644
index 0000000..54a1390
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/content_metrics_test.cc
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/source/content_analysis.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, ContentAnalysis)
+{
+    VPMContentAnalysis    _ca_c(false);
+    VPMContentAnalysis    _ca_sse(true);
+    VideoContentMetrics  *_cM_c, *_cM_SSE;
+
+    _ca_c.Initialize(_width,_height);
+    _ca_sse.Initialize(_width,_height);
+
+    while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile)
+           == _frameLength)
+    {
+        _cM_c   = _ca_c.ComputeContentMetrics(&_videoFrame);
+        _cM_SSE = _ca_sse.ComputeContentMetrics(&_videoFrame);
+
+        ASSERT_EQ(_cM_c->spatial_pred_err, _cM_SSE->spatial_pred_err);
+        ASSERT_EQ(_cM_c->spatial_pred_err_v, _cM_SSE->spatial_pred_err_v);
+        ASSERT_EQ(_cM_c->spatial_pred_err_h, _cM_SSE->spatial_pred_err_h);
+        ASSERT_EQ(_cM_c->motion_magnitude, _cM_SSE->motion_magnitude);
+    }
+    ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/test/unit_test/createTable.m b/src/modules/video_processing/main/test/unit_test/createTable.m
new file mode 100644
index 0000000..2c7fb52
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/createTable.m
@@ -0,0 +1,179 @@
+% Create the color enhancement look-up table and write it to
+% file colorEnhancementTable.cpp. Copy contents of that file into
+% the source file for the color enhancement function.
+
+clear
+close all
+
+
+% First, define the color enhancement in a normalized domain
+
+% Compander function is defined in three radial zones.
+% 1. From 0 to radius r0, the compander function
+% is a second-order polynomial intersecting the points (0,0)
+% and (r0, r0), and with a slope B in (0,0).
+% 2. From r0 to r1, the compander is a third-order polynomial
+% intersecting the points (r0, r0) and (r1, r1), and with the
+% same slope as the first part in the point (r0, r0) and slope
+% equal to 1 in (r1, r1).
+% 3. For radii larger than r1, the compander function is the
+% unity scale function (no scaling at all).
+
+r0=0.07; % Dead zone radius (must be > 0)
+r1=0.6; % Enhancement zone radius (must be > r0 and < 1)
+B=0.2; % initial slope of compander function (between 0 and 1)
+
+x0=linspace(0,r0).'; % zone 1
+x1=linspace(r0,r1).'; % zone 2
+x2=linspace(r1,1).'; % zone 3
+
+A=(1-B)/r0;
+f0=A*x0.^2+B*x0; % compander function in zone 1
+
+% equation system for finding second zone parameters
+M=[r0^3 r0^2 r0 1; 
+    3*r0^2 2*r0 1 0;
+    3*r1^2 2*r1 1 0;
+    r1^3 r1^2 r1 1];
+m=[A*r0^2+B*r0; 2*A*r0+B; 1; r1];
+% solve equations
+theta=M\m;
+
+% compander function in zone 1
+f1=[x1.^3 x1.^2 x1 ones(size(x1))]*theta;
+
+x=[x0; x1; x2];
+f=[f0; f1; x2];
+
+% plot it
+figure(1)
+plot(x,f,x,x,':')
+xlabel('Normalized radius')
+ylabel('Modified radius')
+
+
+% Now, create the look-up table in the integer color space
+[U,V]=meshgrid(0:255, 0:255); % U-V space
+U0=U;
+V0=V;
+
+% Conversion matrix from normalized YUV to RGB
+T=[1 0 1.13983; 1 -0.39465 -0.58060; 1 2.03211 0];
+Ylum=0.5;
+
+figure(2)
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(121)
+image(Z);
+axis square
+axis off
+set(gcf,'color','k')
+
+R = sqrt((U-127).^2 + (V-127).^2);
+Rnorm = R/127;
+RnormMod = Rnorm;
+RnormMod(RnormMod==0)=1; % avoid division with zero
+
+% find indices to pixels in dead-zone (zone 1)
+ix=find(Rnorm<=r0);
+scaleMatrix = (A*Rnorm(ix).^2 + B*Rnorm(ix))./RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix+127;
+V(ix)=(V(ix)-127).*scaleMatrix+127;
+
+% find indices to pixels in zone 2
+ix=find(Rnorm>r0 & Rnorm<=r1);
+scaleMatrix = (theta(1)*Rnorm(ix).^3 + theta(2)*Rnorm(ix).^2 + ...
+    theta(3)*Rnorm(ix) + theta(4)) ./ RnormMod(ix);
+U(ix)=(U(ix)-127).*scaleMatrix + 127;
+V(ix)=(V(ix)-127).*scaleMatrix + 127;
+
+% round to integer values and saturate
+U=round(U);
+V=round(V);
+U=max(min(U,255),0);
+V=max(min(V,255),0);
+
+Z(:,:,1)=Ylum + (U-127)/256*T(1,2) + (V-127)/256*T(1,3);
+Z(:,:,2)=Ylum + (U-127)/256*T(2,2) + (V-127)/256*T(2,3);
+Z(:,:,3)=Ylum + (U-127)/256*T(3,2) + (V-127)/256*T(3,3);
+Z=max(Z,0);
+Z=min(Z,1);
+subplot(122)
+image(Z);
+axis square
+axis off
+
+figure(3)
+subplot(121)
+mesh(U-U0)
+subplot(122)
+mesh(V-V0)
+
+
+
+% Last, write to file
+% Write only one matrix, since U=V'
+
+fid = fopen('../out/Debug/colorEnhancementTable.h','wt');
+if fid==-1
+    error('Cannot open file colorEnhancementTable.cpp');
+end
+
+fprintf(fid,'//colorEnhancementTable.h\n\n');
+fprintf(fid,'//Copy the constant table to the appropriate header file.\n\n');
+
+fprintf(fid,'//Table created with Matlab script createTable.m\n\n');
+fprintf(fid,'//Usage:\n');
+fprintf(fid,'//    Umod=colorTable[U][V]\n');
+fprintf(fid,'//    Vmod=colorTable[V][U]\n');
+
+fprintf(fid,'static unsigned char colorTable[%i][%i] = {\n', size(U,1), size(U,2));
+
+for u=1:size(U,2)
+    fprintf(fid,'    {%i', U(1,u));
+    for v=2:size(U,1)
+        fprintf(fid,', %i', U(v,u));
+    end
+    fprintf(fid,'}');
+    if u<size(U,2)
+        fprintf(fid,',');
+    end
+    fprintf(fid,'\n');
+end
+fprintf(fid,'};\n\n');
+fclose(fid);
+fprintf('done');
+
+
+answ=input('Create test vector (takes some time...)? y/n : ','s');
+if answ ~= 'y'
+    return
+end
+
+% Also, create test vectors
+
+% Read test file foreman.yuv
+fprintf('Reading test file...')
+[y,u,v]=readYUV420file('../out/Debug/testFiles/foreman_cif.yuv',352,288);
+fprintf(' done\n');
+unew=uint8(zeros(size(u)));
+vnew=uint8(zeros(size(v)));
+
+% traverse all frames
+for k=1:size(y,3)
+    fprintf('Frame %i\n', k);
+    for r=1:size(u,1)
+        for c=1:size(u,2)
+            unew(r,c,k) = uint8(U(double(v(r,c,k))+1, double(u(r,c,k))+1));
+            vnew(r,c,k) = uint8(V(double(v(r,c,k))+1, double(u(r,c,k))+1));
+        end
+    end
+end
+      
+fprintf('\nWriting modified test file...')
+writeYUV420file('../out/Debug/foremanColorEnhanced.yuv',y,unew,vnew);
+fprintf(' done\n');
diff --git a/src/modules/video_processing/main/test/unit_test/deflickering_test.cc b/src/modules/video_processing/main/test/unit_test/deflickering_test.cc
new file mode 100644
index 0000000..c189490
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/deflickering_test.cc
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, Deflickering)
+{
+    enum { NumRuns = 30 };
+    WebRtc_UWord32 frameNum = 0;
+    const WebRtc_UWord32 frameRate = 15;
+
+    WebRtc_Word64 minRuntime = 0;
+    WebRtc_Word64 avgRuntime = 0;
+
+    // Close automatically opened Foreman.
+    fclose(_sourceFile);
+    const std::string input_file =
+        webrtc::test::ResourcePath("deflicker_before_cif_short", "yuv");
+    _sourceFile  = fopen(input_file.c_str(), "rb");
+    ASSERT_TRUE(_sourceFile != NULL) <<
+        "Cannot read input file: " << input_file << "\n";
+
+    const std::string output_file =
+        webrtc::test::OutputPath() + "deflicker_output_cif_short.yuv";
+    FILE* deflickerFile = fopen(output_file.c_str(), "wb");
+    ASSERT_TRUE(deflickerFile != NULL) <<
+        "Could not open output file: " << output_file << "\n";
+
+    printf("\nRun time [us / frame]:\n");
+    for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+    {
+        TickTime t0;
+        TickTime t1;
+        TickInterval accTicks;
+        WebRtc_UWord32 timeStamp = 1;
+
+        frameNum = 0;
+        while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+        {
+            frameNum++;
+            _videoFrame.SetTimeStamp(timeStamp);
+
+            t0 = TickTime::Now();
+            VideoProcessingModule::FrameStats stats;
+            ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+            ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+            t1 = TickTime::Now();
+            accTicks += t1 - t0;
+
+            if (runIdx == 0)
+            {
+              if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
+                         deflickerFile) !=  _frameLength) {
+                return;
+              }
+            }
+            timeStamp += (90000 / frameRate);
+        }
+        ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+        printf("%u\n", static_cast<int>(accTicks.Microseconds() / frameNum));
+        if (accTicks.Microseconds() < minRuntime || runIdx == 0)
+        {
+            minRuntime = accTicks.Microseconds();
+        }
+        avgRuntime += accTicks.Microseconds();
+
+        rewind(_sourceFile);
+    }
+    ASSERT_EQ(0, fclose(deflickerFile));
+    // TODO(kjellander): Add verification of deflicker output file.
+
+    printf("\nAverage run time = %d us / frame\n",
+        static_cast<int>(avgRuntime / frameNum / NumRuns));
+    printf("Min run time = %d us / frame\n\n",
+        static_cast<int>(minRuntime / frameNum));
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/test/unit_test/denoising_test.cc b/src/modules/video_processing/main/test/unit_test/denoising_test.cc
new file mode 100644
index 0000000..0787f1d
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/denoising_test.cc
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+TEST_F(VideoProcessingModuleTest, Denoising)
+{
+    enum { NumRuns = 10 };
+    WebRtc_UWord32 frameNum = 0;
+
+    WebRtc_Word64 minRuntime = 0;
+    WebRtc_Word64 avgRuntime = 0;
+
+    const std::string denoise_filename =
+        webrtc::test::OutputPath() + "denoise_testfile.yuv";
+    FILE* denoiseFile = fopen(denoise_filename.c_str(), "wb");
+    ASSERT_TRUE(denoiseFile != NULL) <<
+        "Could not open output file: " << denoise_filename << "\n";
+
+    const std::string noise_filename =
+        webrtc::test::OutputPath() + "noise_testfile.yuv";
+    FILE* noiseFile = fopen(noise_filename.c_str(), "wb");
+    ASSERT_TRUE(noiseFile != NULL) <<
+        "Could not open noisy file: " << noise_filename << "\n";
+
+    printf("\nRun time [us / frame]:\n");
+    for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+    {
+        TickTime t0;
+        TickTime t1;
+        TickInterval accTicks;
+        WebRtc_Word32 modifiedPixels = 0;
+
+        frameNum = 0;
+        while (fread(_videoFrame.Buffer(), 1, _frameLength, _sourceFile) == _frameLength)
+        {
+            frameNum++;
+            WebRtc_UWord8* sourceBuffer = _videoFrame.Buffer();
+
+            // Add noise to a part in video stream
+            // Random noise
+            // TODO: investigate the effectiveness of this test.
+
+            //for(WebRtc_UWord32 ir = 0; ir < _frameLength; ir++)
+            //    sourceBuffer[ir] = 128
+            for (WebRtc_UWord32 ir = 0; ir < _height; ir++)
+            {
+                WebRtc_UWord32 ik = ir * _width;
+                for (WebRtc_UWord32 ic = 0; ic < _width; ic++)
+                {
+                    WebRtc_UWord8 r = rand() % 16;
+                    r -= 8;
+                    if (ir < _height / 4)
+                        r = 0;
+                    if (ir >= 3 * _height / 4)
+                        r = 0;
+                    if (ic < _width / 4)
+                        r = 0;
+                    if (ic >= 3 * _width / 4)
+                        r = 0;
+
+                    /*WebRtc_UWord8 pixelValue = 0;
+                    if (ir >= _height / 2)
+                    { // Region 3 or 4
+                        pixelValue = 170;
+                    }
+                    if (ic >= _width / 2)
+                    { // Region 2 or 4
+                        pixelValue += 85;
+                    }
+                    pixelValue += r;
+                    sourceBuffer[ik + ic] = pixelValue;
+                    */
+                    sourceBuffer[ik + ic] += r;
+                }
+            }
+
+            if (runIdx == 0)
+            {
+              if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
+                         noiseFile) !=  _frameLength) {
+                return;
+              }
+            }
+
+            t0 = TickTime::Now();
+            ASSERT_GE(modifiedPixels = _vpm->Denoising(_videoFrame), 0);
+            t1 = TickTime::Now();
+            accTicks += t1 - t0;
+
+            if (runIdx == 0)
+            {
+              if (fwrite(_videoFrame.Buffer(), 1, _frameLength,
+                         denoiseFile) !=  _frameLength) {
+                return;
+              }
+            }
+        }
+        ASSERT_NE(0, feof(_sourceFile)) << "Error reading source file";
+
+        printf("%u\n", static_cast<int>(accTicks.Microseconds() / frameNum));
+        if (accTicks.Microseconds() < minRuntime || runIdx == 0)
+        {
+            minRuntime = accTicks.Microseconds();
+        }
+        avgRuntime += accTicks.Microseconds();
+
+        rewind(_sourceFile);
+    }
+    ASSERT_EQ(0, fclose(denoiseFile));
+    ASSERT_EQ(0, fclose(noiseFile));
+    printf("\nAverage run time = %d us / frame\n",
+        static_cast<int>(avgRuntime / frameNum / NumRuns));
+    printf("Min run time = %d us / frame\n\n",
+        static_cast<int>(minRuntime / frameNum));
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/test/unit_test/readYUV420file.m b/src/modules/video_processing/main/test/unit_test/readYUV420file.m
new file mode 100644
index 0000000..03013ef
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/readYUV420file.m
@@ -0,0 +1,45 @@
+function [Y,U,V] = readYUV420file(filename, width, height)
+% [Y,U,V] = readYUVfile(filename, width, height)
+
+fid = fopen(filename,'rb');
+if fid==-1
+    error(['Cannot open file ' filename]);
+end
+
+% Number of pixels per image
+nPx=width*height;
+
+% nPx bytes luminance, nPx/4 bytes U, nPx/4 bytes V
+frameSizeBytes = nPx*1.5; 
+
+% calculate number of frames
+fseek(fid,0,'eof'); % move to end of file
+fileLen=ftell(fid); % number of bytes
+fseek(fid,0,'bof'); % rewind to start
+
+% calculate number of frames
+numFrames = floor(fileLen/frameSizeBytes);
+
+Y=uint8(zeros(height,width,numFrames));
+U=uint8(zeros(height/2,width/2,numFrames));
+V=uint8(zeros(height/2,width/2,numFrames));
+
+[X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+
+for k=1:numFrames
+    
+    % Store luminance
+    Y(:,:,k)=uint8(reshape(X(1:nPx), width, height).');
+    
+    % Store U channel
+    U(:,:,k)=uint8(reshape(X(nPx + (1:nPx/4)), width/2, height/2).');
+
+    % Store V channel
+    V(:,:,k)=uint8(reshape(X(nPx + nPx/4 + (1:nPx/4)), width/2, height/2).');
+    
+    % Read next frame
+    [X,nBytes]=fread(fid, frameSizeBytes, 'uchar');
+end
+
+    
+fclose(fid);
diff --git a/src/modules/video_processing/main/test/unit_test/unit_test.cc b/src/modules/video_processing/main/test/unit_test/unit_test.cc
new file mode 100644
index 0000000..c6fdb2b
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/unit_test.cc
@@ -0,0 +1,396 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_processing/main/test/unit_test/unit_test.h"
+
+#include <string>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+// The |sourceFrame| is scaled to |target_width|,|target_height|, using the
+// filter mode set to |mode|. The |expected_psnr| is used to verify basic
+// quality when the resampled frame is scaled back up/down to the
+// original/source size. |expected_psnr| is set to be  ~0.1/0.05dB lower than
+// actual PSNR verified under the same conditions.
+void TestSize(const VideoFrame& sourceFrame, int target_width,
+              int target_height, int mode, double expected_psnr,
+              VideoProcessingModule* vpm);
+
+VideoProcessingModuleTest::VideoProcessingModuleTest() :
+  _vpm(NULL),
+  _sourceFile(NULL),
+  _width(352),
+  _height(288),
+  _frameLength(CalcBufferSize(kI420, 352, 288))
+{
+}
+
+void VideoProcessingModuleTest::SetUp()
+{
+  _vpm = VideoProcessingModule::Create(0);
+  ASSERT_TRUE(_vpm != NULL);
+
+  ASSERT_EQ(0, _videoFrame.VerifyAndAllocate(_frameLength));
+  _videoFrame.SetWidth(_width);
+  _videoFrame.SetHeight(_height);
+
+  const std::string video_file =
+      webrtc::test::ResourcePath("foreman_cif", "yuv");
+  _sourceFile  = fopen(video_file.c_str(),"rb");
+  ASSERT_TRUE(_sourceFile != NULL) <<
+      "Cannot read source file: " + video_file + "\n";
+}
+
+void VideoProcessingModuleTest::TearDown()
+{
+  if (_sourceFile != NULL)  {
+    ASSERT_EQ(0, fclose(_sourceFile));
+  }
+  _sourceFile = NULL;
+
+  if (_vpm != NULL)  {
+    VideoProcessingModule::Destroy(_vpm);
+  }
+  _vpm = NULL;
+}
+
+TEST_F(VideoProcessingModuleTest, HandleNullBuffer)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  // Video frame with unallocated buffer.
+  VideoFrame videoFrame;
+  videoFrame.SetWidth(_width);
+  videoFrame.SetHeight(_height);
+
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, NULL, _width, _height));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(NULL, _width, _height));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(NULL, _width, _height, 0, stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(NULL, _width, _height));
+  EXPECT_EQ(-1, _vpm->Denoising(videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(NULL, _width, _height, stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(videoFrame, stats));
+
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(NULL, NULL));
+}
+
+TEST_F(VideoProcessingModuleTest, HandleBadStats)
+{
+  VideoProcessingModule::FrameStats stats;
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, _height, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width,
+                                          _height, stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+}
+
+TEST_F(VideoProcessingModuleTest, HandleBadSize)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+
+  // Bad width
+  _videoFrame.SetWidth(0);
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), 0, _height, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), 0, _height));
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), 0, _height,
+                                          stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+
+
+  // Bad height
+  _videoFrame.SetWidth(_width);
+  _videoFrame.SetHeight(0);
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-3, _vpm->GetFrameStats(stats, _videoFrame));
+
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-1, _vpm->ColorEnhancement(_videoFrame));
+
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame.Buffer(), _width, 0, 0,
+                                   stats));
+  EXPECT_EQ(-1, _vpm->Deflickering(_videoFrame, stats));
+
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame.Buffer(), _width, 0));
+  EXPECT_EQ(-1, _vpm->Denoising(_videoFrame));
+
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame.Buffer(), _width, 0,
+                                          stats));
+  EXPECT_EQ(-3, _vpm->BrightnessDetection(_videoFrame, stats));
+
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetTargetResolution(0,0,0));
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->SetMaxFrameRate(0));
+
+  VideoFrame *outFrame = NULL;
+  EXPECT_EQ(VPM_PARAMETER_ERROR, _vpm->PreprocessFrame(&_videoFrame,
+                                                       &outFrame));
+}
+
+TEST_F(VideoProcessingModuleTest, IdenticalResultsAfterReset)
+{
+  VideoFrame videoFrame2;
+  VideoProcessingModule::FrameStats stats;
+
+  ASSERT_EQ(0, videoFrame2.VerifyAndAllocate(_frameLength));
+  videoFrame2.SetWidth(_width);
+  videoFrame2.SetHeight(_height);
+
+  // Only testing non-static functions here.
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_EQ(0, _vpm->Deflickering(_videoFrame, stats));
+  _vpm->Reset();
+  // Retrieve frame stats again in case Deflickering() has zeroed them.
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, videoFrame2));
+  ASSERT_EQ(0, _vpm->Deflickering(videoFrame2, stats));
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_GE(_vpm->Denoising(_videoFrame), 0);
+  _vpm->Reset();
+  ASSERT_GE(_vpm->Denoising(videoFrame2), 0);
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+  ASSERT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  memcpy(videoFrame2.Buffer(), _videoFrame.Buffer(), _frameLength);
+  ASSERT_EQ(0, _vpm->BrightnessDetection(_videoFrame, stats));
+  _vpm->Reset();
+  ASSERT_EQ(0, _vpm->BrightnessDetection(videoFrame2, stats));
+  EXPECT_EQ(0, memcmp(_videoFrame.Buffer(), videoFrame2.Buffer(),
+                      _frameLength));
+}
+
+TEST_F(VideoProcessingModuleTest, FrameStats)
+{
+  VideoProcessingModule::FrameStats stats;
+  ASSERT_EQ(_frameLength, fread(_videoFrame.Buffer(), 1, _frameLength,
+                                _sourceFile));
+
+  EXPECT_FALSE(_vpm->ValidFrameStats(stats));
+  EXPECT_EQ(0, _vpm->GetFrameStats(stats, _videoFrame));
+  EXPECT_TRUE(_vpm->ValidFrameStats(stats));
+
+  printf("\nFrameStats\n");
+  printf("mean: %u\nnumPixels: %u\nsubSamplWidth: "
+         "%u\nsumSamplHeight: %u\nsum: %u\n\n",
+         static_cast<unsigned int>(stats.mean),
+         static_cast<unsigned int>(stats.numPixels),
+         static_cast<unsigned int>(stats.subSamplHeight),
+         static_cast<unsigned int>(stats.subSamplWidth),
+         static_cast<unsigned int>(stats.sum));
+
+  _vpm->ClearFrameStats(stats);
+  EXPECT_FALSE(_vpm->ValidFrameStats(stats));
+}
+
+TEST_F(VideoProcessingModuleTest, PreprocessorLogic)
+{
+  // Disable temporal sampling
+  _vpm->EnableTemporalDecimation(false);
+  ASSERT_EQ(VPM_OK, _vpm->SetMaxFrameRate(30));
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 15));
+  // Revert
+  _vpm->EnableTemporalDecimation(true);
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
+  // Disable spatial sampling
+  _vpm->SetInputFrameResampleMode(kNoRescaling);
+  ASSERT_EQ(VPM_OK, _vpm->SetTargetResolution(100, 100, 30));
+  VideoFrame *outFrame = NULL;
+  ASSERT_EQ(VPM_OK, _vpm->PreprocessFrame(&_videoFrame, &outFrame));
+  // No rescaling=> output frame = NULL
+  ASSERT_TRUE(outFrame == NULL);
+}
+
+TEST_F(VideoProcessingModuleTest, Resampler)
+{
+  enum { NumRuns = 1 };
+
+  WebRtc_Word64 minRuntime = 0;
+  WebRtc_Word64 avgRuntime = 0;
+
+  TickTime t0;
+  TickTime t1;
+  TickInterval accTicks;
+  WebRtc_Word32 height = 288;
+  WebRtc_Word32 width = 352;
+  WebRtc_Word32 lengthSourceFrame = width*height*3/2;
+
+  rewind(_sourceFile);
+  ASSERT_TRUE(_sourceFile != NULL) <<
+      "Cannot read input file \n";
+
+  // CA not needed here
+  _vpm->EnableContentAnalysis(false);
+  // no temporal decimation
+  _vpm->EnableTemporalDecimation(false);
+
+  // Reading test frame
+  VideoFrame sourceFrame;
+  ASSERT_EQ(0, sourceFrame.VerifyAndAllocate(lengthSourceFrame));
+  EXPECT_GT(fread(sourceFrame.Buffer(), 1, lengthSourceFrame, _sourceFile), 0u);
+  ASSERT_EQ(0, sourceFrame.SetLength(lengthSourceFrame));
+  sourceFrame.SetHeight(height);
+  sourceFrame.SetWidth(width);
+
+  for (WebRtc_UWord32 runIdx = 0; runIdx < NumRuns; runIdx++)
+  {
+    // initiate test timer
+    t0 = TickTime::Now();
+
+    // Test scaling to different sizes: source is of |width|/|height| = 352/288.
+    // Scaling mode in VPM is currently fixed to kScaleBox (mode = 3).
+    TestSize(sourceFrame, 100, 50, 3, 24.0, _vpm);
+    TestSize(sourceFrame, 352/4, 288/4, 3, 25.2, _vpm);
+    TestSize(sourceFrame, 352/2, 288/2, 3, 28.1, _vpm);
+    TestSize(sourceFrame, 352, 288, 3, -1, _vpm);  // no resampling.
+    TestSize(sourceFrame, 2*352, 2*288, 3, 32.2, _vpm);
+    TestSize(sourceFrame, 400, 256, 3, 31.3, _vpm);
+    TestSize(sourceFrame, 480, 640, 3, 32.15, _vpm);
+    TestSize(sourceFrame, 960, 720, 3, 32.2, _vpm);
+    TestSize(sourceFrame, 1280, 720, 3, 32.15, _vpm);
+    // Upsampling to odd size.
+    TestSize(sourceFrame, 501, 333, 3, 32.05, _vpm);
+    // Downsample to odd size.
+    TestSize(sourceFrame, 281, 175, 3, 29.3, _vpm);
+
+    // stop timer
+    t1 = TickTime::Now();
+    accTicks += t1 - t0;
+
+    if (accTicks.Microseconds() < minRuntime || runIdx == 0)  {
+      minRuntime = accTicks.Microseconds();
+    }
+    avgRuntime += accTicks.Microseconds();
+  }
+
+  sourceFrame.Free();
+
+  printf("\nAverage run time = %d us / frame\n",
+         //static_cast<int>(avgRuntime / frameNum / NumRuns));
+         static_cast<int>(avgRuntime));
+  printf("Min run time = %d us / frame\n\n",
+         //static_cast<int>(minRuntime / frameNum));
+         static_cast<int>(minRuntime));
+}
+
+void TestSize(const VideoFrame& source_frame, int target_width,
+              int target_height, int mode, double expected_psnr,
+              VideoProcessingModule* vpm) {
+  int source_width = source_frame.Width();
+  int source_height = source_frame.Height();
+  VideoFrame* out_frame = NULL;
+
+  ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(target_width, target_height, 30));
+  ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&source_frame, &out_frame));
+
+  // If the frame was resampled (scale changed) then:
+  // (1) verify the new size and write out processed frame for viewing.
+  // (2) scale the resampled frame (|out_frame|) back to the original size and
+  // compute PSNR relative to |source_frame| (for automatic verification).
+  // (3) write out the processed frame for viewing.
+  if (target_width != static_cast<int>(source_width) ||
+      target_height != static_cast<int>(source_height))  {
+    int target_half_width = (target_width + 1) >> 1;
+    int target_half_height = (target_height + 1) >> 1;
+    int required_size_resampled = target_width * target_height +
+        2 * (target_half_width * target_half_height);
+    ASSERT_EQ(required_size_resampled, static_cast<int>(out_frame->Length()));
+
+    // Write the processed frame to file for visual inspection.
+    std::ostringstream filename;
+    filename << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
+        "from_" << source_width << "x" << source_height << "_to_" <<
+        target_width << "x" << target_height << "_30Hz_P420.yuv";
+    std::cout << "Watch " << filename.str() << " and verify that it is okay."
+        << std::endl;
+    FILE* stand_alone_file = fopen(filename.str().c_str(), "wb");
+    if (fwrite(out_frame->Buffer(), 1,
+               out_frame->Length(), stand_alone_file) != out_frame->Length()) {
+      fprintf(stderr, "Failed to write frame for scaling to width/height: "
+          " %d %d \n", target_width, target_height);
+      return;
+    }
+    fclose(stand_alone_file);
+
+    VideoFrame resampled_source_frame;
+    resampled_source_frame.CopyFrame(*out_frame);
+
+    // Scale |resampled_source_frame| back to original/source size.
+    ASSERT_EQ(VPM_OK, vpm->SetTargetResolution(source_width,
+                                               source_height,
+                                               30));
+    ASSERT_EQ(VPM_OK, vpm->PreprocessFrame(&resampled_source_frame,
+                                           &out_frame));
+
+    // Write the processed frame to file for visual inspection.
+    std::ostringstream filename2;
+    filename2 << webrtc::test::OutputPath() << "Resampler_"<< mode << "_" <<
+          "from_" << target_width << "x" << target_height << "_to_" <<
+          source_width << "x" << source_height << "_30Hz_P420.yuv";
+    std::cout << "Watch " << filename2.str() << " and verify that it is okay."
+                << std::endl;
+    stand_alone_file = fopen(filename2.str().c_str(), "wb");
+    if (fwrite(out_frame->Buffer(), 1,
+               out_frame->Length(), stand_alone_file) != out_frame->Length()) {
+      fprintf(stderr, "Failed to write frame for scaling to width/height "
+          "%d %d \n", source_width, source_height);
+      return;
+    }
+    fclose(stand_alone_file);
+
+    // Compute the PSNR and check expectation.
+    double psnr = I420PSNR(source_frame.Buffer(), out_frame->Buffer(),
+                           source_width, source_height);
+    EXPECT_GT(psnr, expected_psnr);
+    printf("PSNR: %f. PSNR is between source of size %d %d, and a modified "
+        "source which is scaled down/up to: %d %d, and back to source size \n",
+        psnr, source_width, source_height, target_width, target_height);
+
+    resampled_source_frame.Free();
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_processing/main/test/unit_test/unit_test.h b/src/modules/video_processing/main/test/unit_test/unit_test.h
new file mode 100644
index 0000000..2363e1a
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/unit_test.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
+#define WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
+
+#include "gtest/gtest.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "system_wrappers/interface/trace.h"
+#include "testsupport/fileutils.h"
+
+namespace webrtc {
+
+class VideoProcessingModuleTest : public ::testing::Test
+{
+protected:
+    VideoProcessingModuleTest();
+    virtual void SetUp();
+    virtual void TearDown();
+    static void SetUpTestCase()
+    {
+      Trace::CreateTrace();
+      std::string trace_file = webrtc::test::OutputPath() + "VPMTrace.txt";
+      ASSERT_EQ(0, Trace::SetTraceFile(trace_file.c_str()));
+    }
+    static void TearDownTestCase()
+    {
+      Trace::ReturnTrace();
+    }
+    VideoProcessingModule* _vpm;
+    FILE* _sourceFile;
+    VideoFrame _videoFrame;
+    const WebRtc_UWord32 _width;
+    const WebRtc_UWord32 _height;
+    const WebRtc_UWord32 _frameLength;
+};
+
+}  // namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_PROCESSING_MAIN_TEST_UNIT_TEST_VPM_UNIT_TEST_H
diff --git a/src/modules/video_processing/main/test/unit_test/writeYUV420file.m b/src/modules/video_processing/main/test/unit_test/writeYUV420file.m
new file mode 100644
index 0000000..69a8808
--- /dev/null
+++ b/src/modules/video_processing/main/test/unit_test/writeYUV420file.m
@@ -0,0 +1,22 @@
+function writeYUV420file(filename, Y, U, V)
+% writeYUV420file(filename, Y, U, V)
+
+fid = fopen(filename,'wb');
+if fid==-1
+    error(['Cannot open file ' filename]);
+end
+
+numFrames=size(Y,3);
+
+for k=1:numFrames
+   % Write luminance
+   fwrite(fid,uint8(Y(:,:,k).'), 'uchar');
+   
+   % Write U channel
+   fwrite(fid,uint8(U(:,:,k).'), 'uchar');
+   
+   % Write V channel
+   fwrite(fid,uint8(V(:,:,k).'), 'uchar');
+end
+
+fclose(fid);
diff --git a/src/modules/video_processing/main/test/vpm_tests.gypi b/src/modules/video_processing/main/test/vpm_tests.gypi
new file mode 100644
index 0000000..0888b1d
--- /dev/null
+++ b/src/modules/video_processing/main/test/vpm_tests.gypi
@@ -0,0 +1,39 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+     'target_name': 'video_processing_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'video_processing',
+        'webrtc_utility',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        # headers
+        'unit_test/unit_test.h',
+        # sources
+        'unit_test/brightness_detection_test.cc',
+        'unit_test/color_enhancement_test.cc',
+        'unit_test/content_metrics_test.cc',
+        'unit_test/deflickering_test.cc',
+        'unit_test/denoising_test.cc',
+        'unit_test/unit_test.cc',
+      ], # sources
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/video_render/OWNERS b/src/modules/video_render/OWNERS
new file mode 100644
index 0000000..ac607bd
--- /dev/null
+++ b/src/modules/video_render/OWNERS
@@ -0,0 +1,4 @@
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+mallinath@webrtc.org
diff --git a/src/modules/video_render/main/interface/video_render.h b/src/modules/video_render/main/interface/video_render.h
new file mode 100644
index 0000000..4fc7f2c
--- /dev/null
+++ b/src/modules/video_render/main/interface/video_render.h
@@ -0,0 +1,283 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
+
+/*
+ * video_render.h
+ *
+ * This header file together with module.h and module_common_types.h
+ * contains all of the APIs that are needed for using the video render
+ * module class.
+ *
+ */
+
+#include "modules/interface/module.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+
+namespace webrtc {
+
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+WebRtc_Word32 SetRenderAndroidVM(void* javaVM);
+#endif
+
+// Class definitions
+class VideoRender: public Module
+{
+public:
+    /*
+     *   Create a video render module object
+     *
+     *   id              - unique identifier of this video render module object
+     *   window          - pointer to the window to render to
+     *   fullscreen      - true if this is a fullscreen renderer
+     *   videoRenderType - type of renderer to create
+     */
+    static VideoRender
+            * CreateVideoRender(
+                                          const WebRtc_Word32 id,
+                                          void* window,
+                                          const bool fullscreen,
+                                          const VideoRenderType videoRenderType =
+                                                  kRenderDefault);
+
+    /*
+     *   Destroy a video render module object
+     *
+     *   module  - object to destroy
+     */
+    static void DestroyVideoRender(VideoRender* module);
+
+    /*
+     *   Change the unique identifier of this object
+     *
+     *   id      - new unique identifier of this video render module object
+     */
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    virtual WebRtc_Word32 TimeUntilNextProcess() = 0;
+    virtual WebRtc_Word32 Process() = 0;
+
+    /**************************************************************************
+     *
+     *   Window functions
+     *
+     ***************************************************************************/
+
+    /*
+     *   Get window for this renderer
+     */
+    virtual void* Window() = 0;
+
+    /*
+     *   Change render window
+     *
+     *   window      - the new render window, assuming same type as originally created.
+     */
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     *   zOrder      - relative render order for the streams, 0 = on top
+     *   left        - position of the stream in the window, [0.0f, 1.0f]
+     *   top         - position of the stream in the window, [0.0f, 1.0f]
+     *   right       - position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - position of the stream in the window, [0.0f, 1.0f]
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom) = 0;
+    /*
+     *   Delete incoming render stream
+     *
+     *   streamID    - id of the stream to add
+     */
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     *
+     *   streamID     - id of the stream the callback is used for
+     *   renderObject - the VideoRenderCallback to use for this stream, NULL to remove
+     *
+     *   Return      - callback class to use for delivering new frames to render.
+     */
+    virtual WebRtc_Word32
+            AddExternalRenderCallback(const WebRtc_UWord32 streamId,
+                                      VideoRenderCallback* renderObject) = 0;
+
+    /*
+     *   Get the porperties for an incoming render stream
+     *
+     *   streamID    - [in] id of the stream to get properties for
+     *   zOrder      - [out] relative render order for the streams, 0 = on top
+     *   left        - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   top         - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   right       - [out] position of the stream in the window, [0.0f, 1.0f]
+     *   bottom      - [out] position of the stream in the window, [0.0f, 1.0f]
+     */
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const = 0;
+    /*
+     *   The incoming frame rate to the module, not the rate rendered in the window.
+     */
+    virtual WebRtc_UWord32
+            GetIncomingFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const = 0;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool
+            HasIncomingRenderStream(const WebRtc_UWord32 streamId) const = 0;
+
+    /*
+     *   Registers a callback to get raw images in the same time as sent
+     *   to the renderer. To be used for external rendering.
+     */
+    virtual WebRtc_Word32
+            RegisterRawFrameCallback(const WebRtc_UWord32 streamId,
+                                     VideoRenderCallback* callbackObj) = 0;
+
+    /*
+     * This method is usefull to get last rendered frame for the stream specified
+     */
+    virtual WebRtc_Word32
+            GetLastRenderedFrame(const WebRtc_UWord32 streamId,
+                                 VideoFrame &frame) const = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual WebRtc_Word32 StartRender(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Stops the renderer
+     */
+    virtual WebRtc_Word32 StopRender(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Resets the renderer
+     *   No streams are removed. The state should be as after AddStream was called.
+     */
+    virtual WebRtc_Word32 ResetRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the preferred render video type
+     */
+    virtual RawVideoType PreferredVideoType() const = 0;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen() = 0;
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const = 0;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    /*
+     * re-configure renderer
+     */
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom) = 0;
+
+    /*
+     * Set a start image. The image is rendered before the first image has been delivered
+     */
+    virtual WebRtc_Word32
+            SetStartImage(const WebRtc_UWord32 streamId,
+                          const VideoFrame& videoFrame) = 0;
+
+    /*
+     * Set a timout image. The image is rendered if no videoframe has been delivered
+     */
+    virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId,
+                                          const VideoFrame& videoFrame,
+                                          const WebRtc_UWord32 timeout)= 0;
+
+    virtual WebRtc_Word32 MirrorRenderStream(const int renderId,
+                                             const bool enable,
+                                             const bool mirrorXAxis,
+                                             const bool mirrorYAxis) = 0;
+};
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_H_
diff --git a/src/modules/video_render/main/interface/video_render_defines.h b/src/modules/video_render/main/interface/video_render_defines.h
new file mode 100644
index 0000000..b1034a6
--- /dev/null
+++ b/src/modules/video_render/main/interface/video_render_defines.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
+
+// Includes
+#include "common_types.h"
+#include "modules/interface/module_common_types.h"
+
+namespace webrtc
+{
+// Defines
+#ifndef NULL
+#define NULL    0
+#endif
+
+// Enums
+enum VideoRenderType
+{
+    kRenderExternal = 0, // External
+    kRenderWindows = 1, // Windows
+    kRenderCocoa = 2, // Mac
+    kRenderCarbon = 3,
+    kRenderiPhone = 4, // iPhone
+    kRenderAndroid = 5, // Android
+    kRenderX11 = 6, // Linux
+    kRenderDefault
+};
+
+// Runtime errors
+enum VideoRenderError
+{
+    kRenderShutDown = 0,
+    kRenderPerformanceAlarm = 1
+};
+
+// The object a module user uses to send new frames to the renderer
+// One object is used for each incoming stream
+class VideoRenderCallback
+{
+public:
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame) = 0;
+
+protected:
+    virtual ~VideoRenderCallback()
+    {
+    }
+};
+
+// Feedback class to be implemented by module user
+class VideoRenderFeedback
+{
+public:
+    virtual void OnRenderError(const WebRtc_Word32 streamId,
+                               const VideoRenderError error) = 0;
+
+protected:
+    virtual ~VideoRenderFeedback()
+    {
+    }
+};
+
+// Mobile enums
+enum StretchMode
+{
+    kStretchToInsideEdge = 1,
+    kStretchToOutsideEdge = 2,
+    kStretchMatchWidth = 3,
+    kStretchMatchHeight = 4,
+    kStretchNone = 5
+};
+
+enum Rotation
+{
+    kRotation0 = 0,
+    kRotation90 = 1,
+    kRotation180 = 2,
+    kRotation270 = 3
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_INTERFACE_VIDEO_RENDER_DEFINES_H_
diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
new file mode 100644
index 0000000..c3471d5
--- /dev/null
+++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViEAndroidGLES20.java
@@ -0,0 +1,370 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import java.util.concurrent.locks.ReentrantLock;
+
+import javax.microedition.khronos.egl.EGL10;
+import javax.microedition.khronos.egl.EGLConfig;
+import javax.microedition.khronos.egl.EGLContext;
+import javax.microedition.khronos.egl.EGLDisplay;
+import javax.microedition.khronos.opengles.GL10;
+
+import android.app.ActivityManager;
+import android.content.Context;
+import android.content.pm.ConfigurationInfo;
+import android.graphics.PixelFormat;
+import android.opengl.GLSurfaceView;
+import android.util.Log;
+
+public class ViEAndroidGLES20 extends GLSurfaceView
+        implements GLSurfaceView.Renderer {
+    private static String TAG = "WEBRTC-JR";
+    private static final boolean DEBUG = false;
+    // True if onSurfaceCreated has been called.
+    private boolean surfaceCreated = false;
+    private boolean openGLCreated = false;
+    // True if NativeFunctionsRegistered has been called.
+    private boolean nativeFunctionsRegisted = false;
+    private ReentrantLock nativeFunctionLock = new ReentrantLock();
+    // Address of Native object that will do the drawing.
+    private long nativeObject = 0;
+    private int viewWidth = 0;
+    private int viewHeight = 0;
+
+    public static boolean UseOpenGL2(Object renderWindow) {
+        return ViEAndroidGLES20.class.isInstance(renderWindow);
+    }
+
+    public ViEAndroidGLES20(Context context) {
+        super(context);
+        init(false, 0, 0);
+    }
+
+    public ViEAndroidGLES20(Context context, boolean translucent,
+            int depth, int stencil) {
+        super(context);
+        init(translucent, depth, stencil);
+    }
+
+    private void init(boolean translucent, int depth, int stencil) {
+
+        // By default, GLSurfaceView() creates a RGB_565 opaque surface.
+        // If we want a translucent one, we should change the surface's
+        // format here, using PixelFormat.TRANSLUCENT for GL Surfaces
+        // is interpreted as any 32-bit surface with alpha by SurfaceFlinger.
+        if (translucent) {
+            this.getHolder().setFormat(PixelFormat.TRANSLUCENT);
+        }
+
+        // Setup the context factory for 2.0 rendering.
+        // See ContextFactory class definition below
+        setEGLContextFactory(new ContextFactory());
+
+        // We need to choose an EGLConfig that matches the format of
+        // our surface exactly. This is going to be done in our
+        // custom config chooser. See ConfigChooser class definition
+        // below.
+        setEGLConfigChooser( translucent ?
+                             new ConfigChooser(8, 8, 8, 8, depth, stencil) :
+                             new ConfigChooser(5, 6, 5, 0, depth, stencil) );
+
+        // Set the renderer responsible for frame rendering
+        this.setRenderer(this);
+        this.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
+    }
+
+    private static class ContextFactory implements GLSurfaceView.EGLContextFactory {
+        private static int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
+        public EGLContext createContext(EGL10 egl, EGLDisplay display, EGLConfig eglConfig) {
+            Log.w(TAG, "creating OpenGL ES 2.0 context");
+            checkEglError("Before eglCreateContext", egl);
+            int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
+            EGLContext context = egl.eglCreateContext(display, eglConfig,
+                    EGL10.EGL_NO_CONTEXT, attrib_list);
+            checkEglError("After eglCreateContext", egl);
+            return context;
+        }
+
+        public void destroyContext(EGL10 egl, EGLDisplay display, EGLContext context) {
+            egl.eglDestroyContext(display, context);
+        }
+    }
+
+    private static void checkEglError(String prompt, EGL10 egl) {
+        int error;
+        while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
+            Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
+        }
+    }
+
+    private static class ConfigChooser implements GLSurfaceView.EGLConfigChooser {
+
+        public ConfigChooser(int r, int g, int b, int a, int depth, int stencil) {
+            mRedSize = r;
+            mGreenSize = g;
+            mBlueSize = b;
+            mAlphaSize = a;
+            mDepthSize = depth;
+            mStencilSize = stencil;
+        }
+
+        // This EGL config specification is used to specify 2.0 rendering.
+        // We use a minimum size of 4 bits for red/green/blue, but will
+        // perform actual matching in chooseConfig() below.
+        private static int EGL_OPENGL_ES2_BIT = 4;
+        private static int[] s_configAttribs2 =
+        {
+            EGL10.EGL_RED_SIZE, 4,
+            EGL10.EGL_GREEN_SIZE, 4,
+            EGL10.EGL_BLUE_SIZE, 4,
+            EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+            EGL10.EGL_NONE
+        };
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
+
+            // Get the number of minimally matching EGL configurations
+            int[] num_config = new int[1];
+            egl.eglChooseConfig(display, s_configAttribs2, null, 0, num_config);
+
+            int numConfigs = num_config[0];
+
+            if (numConfigs <= 0) {
+                throw new IllegalArgumentException("No configs match configSpec");
+            }
+
+            // Allocate then read the array of minimally matching EGL configs
+            EGLConfig[] configs = new EGLConfig[numConfigs];
+            egl.eglChooseConfig(display, s_configAttribs2, configs, numConfigs, num_config);
+
+            if (DEBUG) {
+                printConfigs(egl, display, configs);
+            }
+            // Now return the "best" one
+            return chooseConfig(egl, display, configs);
+        }
+
+        public EGLConfig chooseConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig[] configs) {
+            for(EGLConfig config : configs) {
+                int d = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_DEPTH_SIZE, 0);
+                int s = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_STENCIL_SIZE, 0);
+
+                // We need at least mDepthSize and mStencilSize bits
+                if (d < mDepthSize || s < mStencilSize)
+                    continue;
+
+                // We want an *exact* match for red/green/blue/alpha
+                int r = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_RED_SIZE, 0);
+                int g = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_GREEN_SIZE, 0);
+                int b = findConfigAttrib(egl, display, config,
+                            EGL10.EGL_BLUE_SIZE, 0);
+                int a = findConfigAttrib(egl, display, config,
+                        EGL10.EGL_ALPHA_SIZE, 0);
+
+                if (r == mRedSize && g == mGreenSize && b == mBlueSize && a == mAlphaSize)
+                    return config;
+            }
+            return null;
+        }
+
+        private int findConfigAttrib(EGL10 egl, EGLDisplay display,
+                EGLConfig config, int attribute, int defaultValue) {
+
+            if (egl.eglGetConfigAttrib(display, config, attribute, mValue)) {
+                return mValue[0];
+            }
+            return defaultValue;
+        }
+
+        private void printConfigs(EGL10 egl, EGLDisplay display,
+            EGLConfig[] configs) {
+            int numConfigs = configs.length;
+            Log.w(TAG, String.format("%d configurations", numConfigs));
+            for (int i = 0; i < numConfigs; i++) {
+                Log.w(TAG, String.format("Configuration %d:\n", i));
+                printConfig(egl, display, configs[i]);
+            }
+        }
+
+        private void printConfig(EGL10 egl, EGLDisplay display,
+                EGLConfig config) {
+            int[] attributes = {
+                    EGL10.EGL_BUFFER_SIZE,
+                    EGL10.EGL_ALPHA_SIZE,
+                    EGL10.EGL_BLUE_SIZE,
+                    EGL10.EGL_GREEN_SIZE,
+                    EGL10.EGL_RED_SIZE,
+                    EGL10.EGL_DEPTH_SIZE,
+                    EGL10.EGL_STENCIL_SIZE,
+                    EGL10.EGL_CONFIG_CAVEAT,
+                    EGL10.EGL_CONFIG_ID,
+                    EGL10.EGL_LEVEL,
+                    EGL10.EGL_MAX_PBUFFER_HEIGHT,
+                    EGL10.EGL_MAX_PBUFFER_PIXELS,
+                    EGL10.EGL_MAX_PBUFFER_WIDTH,
+                    EGL10.EGL_NATIVE_RENDERABLE,
+                    EGL10.EGL_NATIVE_VISUAL_ID,
+                    EGL10.EGL_NATIVE_VISUAL_TYPE,
+                    0x3030, // EGL10.EGL_PRESERVED_RESOURCES,
+                    EGL10.EGL_SAMPLES,
+                    EGL10.EGL_SAMPLE_BUFFERS,
+                    EGL10.EGL_SURFACE_TYPE,
+                    EGL10.EGL_TRANSPARENT_TYPE,
+                    EGL10.EGL_TRANSPARENT_RED_VALUE,
+                    EGL10.EGL_TRANSPARENT_GREEN_VALUE,
+                    EGL10.EGL_TRANSPARENT_BLUE_VALUE,
+                    0x3039, // EGL10.EGL_BIND_TO_TEXTURE_RGB,
+                    0x303A, // EGL10.EGL_BIND_TO_TEXTURE_RGBA,
+                    0x303B, // EGL10.EGL_MIN_SWAP_INTERVAL,
+                    0x303C, // EGL10.EGL_MAX_SWAP_INTERVAL,
+                    EGL10.EGL_LUMINANCE_SIZE,
+                    EGL10.EGL_ALPHA_MASK_SIZE,
+                    EGL10.EGL_COLOR_BUFFER_TYPE,
+                    EGL10.EGL_RENDERABLE_TYPE,
+                    0x3042 // EGL10.EGL_CONFORMANT
+            };
+            String[] names = {
+                    "EGL_BUFFER_SIZE",
+                    "EGL_ALPHA_SIZE",
+                    "EGL_BLUE_SIZE",
+                    "EGL_GREEN_SIZE",
+                    "EGL_RED_SIZE",
+                    "EGL_DEPTH_SIZE",
+                    "EGL_STENCIL_SIZE",
+                    "EGL_CONFIG_CAVEAT",
+                    "EGL_CONFIG_ID",
+                    "EGL_LEVEL",
+                    "EGL_MAX_PBUFFER_HEIGHT",
+                    "EGL_MAX_PBUFFER_PIXELS",
+                    "EGL_MAX_PBUFFER_WIDTH",
+                    "EGL_NATIVE_RENDERABLE",
+                    "EGL_NATIVE_VISUAL_ID",
+                    "EGL_NATIVE_VISUAL_TYPE",
+                    "EGL_PRESERVED_RESOURCES",
+                    "EGL_SAMPLES",
+                    "EGL_SAMPLE_BUFFERS",
+                    "EGL_SURFACE_TYPE",
+                    "EGL_TRANSPARENT_TYPE",
+                    "EGL_TRANSPARENT_RED_VALUE",
+                    "EGL_TRANSPARENT_GREEN_VALUE",
+                    "EGL_TRANSPARENT_BLUE_VALUE",
+                    "EGL_BIND_TO_TEXTURE_RGB",
+                    "EGL_BIND_TO_TEXTURE_RGBA",
+                    "EGL_MIN_SWAP_INTERVAL",
+                    "EGL_MAX_SWAP_INTERVAL",
+                    "EGL_LUMINANCE_SIZE",
+                    "EGL_ALPHA_MASK_SIZE",
+                    "EGL_COLOR_BUFFER_TYPE",
+                    "EGL_RENDERABLE_TYPE",
+                    "EGL_CONFORMANT"
+            };
+            int[] value = new int[1];
+            for (int i = 0; i < attributes.length; i++) {
+                int attribute = attributes[i];
+                String name = names[i];
+                if (egl.eglGetConfigAttrib(display, config, attribute, value)) {
+                    Log.w(TAG, String.format("  %s: %d\n", name, value[0]));
+                } else {
+                    // Log.w(TAG, String.format("  %s: failed\n", name));
+                    while (egl.eglGetError() != EGL10.EGL_SUCCESS);
+                }
+            }
+        }
+
+        // Subclasses can adjust these values:
+        protected int mRedSize;
+        protected int mGreenSize;
+        protected int mBlueSize;
+        protected int mAlphaSize;
+        protected int mDepthSize;
+        protected int mStencilSize;
+        private int[] mValue = new int[1];
+    }
+
+    // IsSupported
+    // Return true if this device support Open GL ES 2.0 rendering.
+    public static boolean IsSupported(Context context) {
+        ActivityManager am =
+                (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
+        ConfigurationInfo info = am.getDeviceConfigurationInfo();
+        if(info.reqGlEsVersion >= 0x20000) {
+            // Open GL ES 2.0 is supported.
+            return true;
+        }
+        return false;
+    }
+
+    public void onDrawFrame(GL10 gl) {
+        nativeFunctionLock.lock();
+        if(!nativeFunctionsRegisted || !surfaceCreated) {
+            nativeFunctionLock.unlock();
+            return;
+        }
+
+        if(!openGLCreated) {
+            if(0 != CreateOpenGLNative(nativeObject, viewWidth, viewHeight)) {
+                return; // Failed to create OpenGL
+            }
+            openGLCreated = true; // Created OpenGL successfully
+        }
+        DrawNative(nativeObject); // Draw the new frame
+        nativeFunctionLock.unlock();
+    }
+
+    public void onSurfaceChanged(GL10 gl, int width, int height) {
+        surfaceCreated = true;
+        viewWidth = width;
+        viewHeight = height;
+
+        nativeFunctionLock.lock();
+        if(nativeFunctionsRegisted) {
+            if(CreateOpenGLNative(nativeObject,width,height) == 0)
+                openGLCreated = true;
+        }
+        nativeFunctionLock.unlock();
+    }
+
+    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
+    }
+
+    public void RegisterNativeObject(long nativeObject) {
+        nativeFunctionLock.lock();
+        this.nativeObject = nativeObject;
+        nativeFunctionsRegisted = true;
+        nativeFunctionLock.unlock();
+    }
+
+    public void DeRegisterNativeObject() {
+        nativeFunctionLock.lock();
+        nativeFunctionsRegisted = false;
+        openGLCreated = false;
+        this.nativeObject = 0;
+        nativeFunctionLock.unlock();
+    }
+
+    public void ReDraw() {
+        if(surfaceCreated) {
+            // Request the renderer to redraw using the render thread context.
+            this.requestRender();
+        }
+    }
+
+    private native int CreateOpenGLNative(long nativeObject,
+            int width, int height);
+    private native void DrawNative(long nativeObject);
+
+}
diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java
new file mode 100644
index 0000000..d45fb81
--- /dev/null
+++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViERenderer.java
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+import android.content.Context;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+public class ViERenderer {
+
+    // View used for local rendering that Cameras can use for Video Overlay.
+    private static SurfaceHolder g_localRenderer;
+
+    public static SurfaceView CreateRenderer(Context context) {
+        return  CreateRenderer(context,false);
+    }
+
+    public static SurfaceView CreateRenderer(Context context,
+            boolean useOpenGLES2) {
+        if(useOpenGLES2 == true && ViEAndroidGLES20.IsSupported(context))
+            return new ViEAndroidGLES20(context);
+        else
+            return new SurfaceView(context);
+    }
+
+    // Creates a SurfaceView to be used by Android Camera
+    // service to display a local preview.
+    // This needs to be used on Android prior to version 2.1
+    // in order to run the camera.
+    // Call this function before ViECapture::StartCapture.
+    // The created view needs to be added to a visible layout
+    // after a camera has been allocated
+    // (with the call ViECapture::AllocateCaptureDevice).
+    // IE.
+    // CreateLocalRenderer
+    // ViECapture::AllocateCaptureDevice
+    // LinearLayout.addview
+    // ViECapture::StartCapture
+    public static SurfaceView CreateLocalRenderer(Context context) {
+        SurfaceView localRender = new SurfaceView(context);
+        g_localRenderer = localRender.getHolder();
+        g_localRenderer.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
+        return  localRender;
+    }
+
+    public static SurfaceHolder GetLocalRenderer() {
+        return g_localRenderer;
+    }
+
+}
diff --git a/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
new file mode 100644
index 0000000..1fda021
--- /dev/null
+++ b/src/modules/video_render/main/source/android/java/org/webrtc/videoengine/ViESurfaceRenderer.java
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengine;
+
+// The following four imports are needed saveBitmapToJPEG which
+// is for debug only
+import java.io.ByteArrayOutputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Rect;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.SurfaceHolder.Callback;
+
+public class ViESurfaceRenderer implements Callback {
+
+    private final static String TAG = "WEBRTC";
+
+    // the bitmap used for drawing.
+    private Bitmap bitmap = null;
+    private ByteBuffer byteBuffer = null;
+    private SurfaceHolder surfaceHolder;
+    // Rect of the source bitmap to draw
+    private Rect srcRect = new Rect();
+    // Rect of the destination canvas to draw to
+    private Rect dstRect = new Rect();
+    private float dstTopScale = 0;
+    private float dstBottomScale = 1;
+    private float dstLeftScale = 0;
+    private float dstRightScale = 1;
+
+    public ViESurfaceRenderer(SurfaceView view) {
+        surfaceHolder = view.getHolder();
+        if(surfaceHolder == null)
+            return;
+        surfaceHolder.addCallback(this);
+    }
+
+    // surfaceChanged and surfaceCreated share this function
+    private void changeDestRect(int dstWidth, int dstHeight) {
+        dstRect.right = (int)(dstRect.left + dstRightScale * dstWidth);
+        dstRect.bottom = (int)(dstRect.top + dstBottomScale * dstHeight);
+    }
+
+    public void surfaceChanged(SurfaceHolder holder, int format,
+            int in_width, int in_height) {
+        Log.d(TAG, "ViESurfaceRender::surfaceChanged");
+
+        changeDestRect(in_width, in_height);
+
+        Log.d(TAG, "ViESurfaceRender::surfaceChanged" +
+                " in_width:" + in_width + " in_height:" + in_height +
+                " srcRect.left:" + srcRect.left +
+                " srcRect.top:" + srcRect.top +
+                " srcRect.right:" + srcRect.right +
+                " srcRect.bottom:" + srcRect.bottom +
+                " dstRect.left:" + dstRect.left +
+                " dstRect.top:" + dstRect.top +
+                " dstRect.right:" + dstRect.right +
+                " dstRect.bottom:" + dstRect.bottom);
+    }
+
+    public void surfaceCreated(SurfaceHolder holder) {
+        Canvas canvas = surfaceHolder.lockCanvas();
+        if(canvas != null) {
+            Rect dst = surfaceHolder.getSurfaceFrame();
+            if(dst != null) {
+                changeDestRect(dst.right - dst.left, dst.bottom - dst.top);
+                Log.d(TAG, "ViESurfaceRender::surfaceCreated" +
+                        " dst.left:" + dst.left +
+                        " dst.top:" + dst.top +
+                        " dst.right:" + dst.right +
+                        " dst.bottom:" + dst.bottom +
+                        " srcRect.left:" + srcRect.left +
+                        " srcRect.top:" + srcRect.top +
+                        " srcRect.right:" + srcRect.right +
+                        " srcRect.bottom:" + srcRect.bottom +
+                        " dstRect.left:" + dstRect.left +
+                        " dstRect.top:" + dstRect.top +
+                        " dstRect.right:" + dstRect.right +
+                        " dstRect.bottom:" + dstRect.bottom);
+            }
+            surfaceHolder.unlockCanvasAndPost(canvas);
+        }
+    }
+
+    public void surfaceDestroyed(SurfaceHolder holder) {
+        Log.d(TAG, "ViESurfaceRenderer::surfaceDestroyed");
+        bitmap = null;
+        byteBuffer = null;
+    }
+
+    public Bitmap CreateBitmap(int width, int height) {
+        Log.d(TAG, "CreateByteBitmap " + width + ":" + height);
+        if (bitmap == null) {
+            try {
+                android.os.Process.setThreadPriority(
+                    android.os.Process.THREAD_PRIORITY_DISPLAY);
+            }
+            catch (Exception e) {
+            }
+        }
+        bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565);
+        srcRect.left = 0;
+        srcRect.top = 0;
+        srcRect.bottom = height;
+        srcRect.right = width;
+        return bitmap;
+    }
+
+    public ByteBuffer CreateByteBuffer(int width, int height) {
+        Log.d(TAG, "CreateByteBuffer " + width + ":" + height);
+        if (bitmap == null) {
+            bitmap = CreateBitmap(width, height);
+            byteBuffer = ByteBuffer.allocateDirect(width * height * 2);
+        }
+        return byteBuffer;
+    }
+
+    public void SetCoordinates(float left, float top,
+            float right, float bottom) {
+        Log.d(TAG, "SetCoordinates " + left + "," + top + ":" +
+                right + "," + bottom);
+        dstLeftScale = left;
+        dstTopScale = top;
+        dstRightScale = right;
+        dstBottomScale = bottom;
+    }
+
+    // It saves bitmap data to a JPEG picture, this function is for debug only.
+    private void saveBitmapToJPEG(int width, int height) {
+        ByteArrayOutputStream byteOutStream = new ByteArrayOutputStream();
+        bitmap.compress(Bitmap.CompressFormat.JPEG, 100, byteOutStream);
+
+        try{
+            FileOutputStream output = new FileOutputStream(String.format(
+                "/sdcard/render_%d.jpg", System.currentTimeMillis()));
+            output.write(byteOutStream.toByteArray());
+            output.flush();
+            output.close();
+        }
+        catch (FileNotFoundException e) {
+        }
+        catch (IOException e) {
+        }
+    }
+
+    public void DrawByteBuffer() {
+        if(byteBuffer == null)
+            return;
+        byteBuffer.rewind();
+        bitmap.copyPixelsFromBuffer(byteBuffer);
+        DrawBitmap();
+    }
+
+    public void DrawBitmap() {
+        if(bitmap == null)
+            return;
+
+        Canvas canvas = surfaceHolder.lockCanvas();
+        if(canvas != null) {
+            // The follow line is for debug only
+            // saveBitmapToJPEG(srcRect.right - srcRect.left,
+            //                  srcRect.bottom - srcRect.top);
+            canvas.drawBitmap(bitmap, srcRect, dstRect, null);
+            surfaceHolder.unlockCanvasAndPost(canvas);
+        }
+    }
+
+}
diff --git a/src/modules/video_render/main/source/android/video_render_android_impl.cc b/src/modules/video_render/main/source/android/video_render_android_impl.cc
new file mode 100644
index 0000000..9e709cb
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_impl.cc
@@ -0,0 +1,347 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+JavaVM* VideoRenderAndroid::g_jvm = NULL;
+
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+WebRtc_Word32 SetRenderAndroidVM(void* javaVM) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1, "%s", __FUNCTION__);
+  VideoRenderAndroid::g_jvm = (JavaVM*)javaVM;
+  return 0;
+}
+#endif
+
+VideoRenderAndroid::VideoRenderAndroid(
+    const WebRtc_Word32 id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool /*fullscreen*/):
+    _id(id),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderType(videoRenderType),
+    _ptrWindow((jobject)(window)),
+    _streamsMap(),
+    _javaShutDownFlag(false),
+    _javaShutdownEvent(*EventWrapper::Create()),
+    _javaRenderEvent(*EventWrapper::Create()),
+    _lastJavaRenderEvent(0),
+    _javaRenderJniEnv(NULL),
+    _javaRenderThread(NULL) {
+}
+
+VideoRenderAndroid::~VideoRenderAndroid() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "VideoRenderAndroid dtor");
+
+  if (_javaRenderThread)
+    StopRender();
+
+  for (MapItem* item = _streamsMap.First(); item != NULL; item
+           = _streamsMap.Next(item)) { // Delete streams
+    delete static_cast<AndroidStream*> (item->GetItem());
+  }
+  delete &_javaShutdownEvent;
+  delete &_javaRenderEvent;
+  delete &_critSect;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeUniqueId(const WebRtc_Word32 id) {
+  CriticalSectionScoped cs(&_critSect);
+  _id = id;
+
+  return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ChangeWindow(void* /*window*/) {
+  return -1;
+}
+
+VideoRenderCallback*
+VideoRenderAndroid::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                            const WebRtc_UWord32 zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) {
+  CriticalSectionScoped cs(&_critSect);
+
+  AndroidStream* renderStream = NULL;
+  MapItem* item = _streamsMap.Find(streamId);
+  if (item) {
+    renderStream = (AndroidStream*) (item->GetItem());
+    if (NULL != renderStream) {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                   "%s: Render stream already exists", __FUNCTION__);
+      return renderStream;
+    }
+  }
+
+  renderStream = CreateAndroidRenderChannel(streamId, zOrder, left, top,
+                                            right, bottom, *this);
+  if (renderStream) {
+    _streamsMap.Insert(streamId, renderStream);
+  }
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+    return NULL;
+  }
+  return renderStream;
+}
+
+WebRtc_Word32 VideoRenderAndroid::DeleteIncomingRenderStream(
+    const WebRtc_UWord32 streamId) {
+  CriticalSectionScoped cs(&_critSect);
+
+  MapItem* item = _streamsMap.Find(streamId);
+  if (item) {
+    delete (AndroidStream*) item->GetItem();
+    _streamsMap.Erase(streamId);
+  }
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s:%d): renderStream is NULL", __FUNCTION__, __LINE__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetIncomingRenderStreamProperties(
+    const WebRtc_UWord32 streamId,
+    WebRtc_UWord32& zOrder,
+    float& left,
+    float& top,
+    float& right,
+    float& bottom) const {
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StartRender() {
+  CriticalSectionScoped cs(&_critSect);
+
+  if (_javaRenderThread) {
+    // StartRender is called when this stream should start render.
+    // However StopRender is not called when the streams stop rendering.
+    // Thus the the thread  is only deleted when the renderer is removed.
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+                 "%s, Render thread already exist", __FUNCTION__);
+    return 0;
+  }
+
+  _javaRenderThread = ThreadWrapper::CreateThread(JavaRenderThreadFun, this,
+                                                  kRealtimePriority,
+                                                  "AndroidRenderThread");
+  if (!_javaRenderThread) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: No thread", __FUNCTION__);
+    return -1;
+  }
+
+  unsigned int tId = 0;
+  if (_javaRenderThread->Start(tId))
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                 "%s: thread started: %u", __FUNCTION__, tId);
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not start send thread", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 VideoRenderAndroid::StopRender() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:", __FUNCTION__);
+  {
+    CriticalSectionScoped cs(&_critSect);
+    if (!_javaRenderThread)
+    {
+      return -1;
+    }
+    _javaShutDownFlag = true;
+    _javaRenderEvent.Set();
+  }
+
+  _javaShutdownEvent.Wait(3000);
+  CriticalSectionScoped cs(&_critSect);
+  _javaRenderThread->SetNotAlive();
+  if (_javaRenderThread->Stop()) {
+    delete _javaRenderThread;
+    _javaRenderThread = NULL;
+  }
+  else {
+    assert(false);
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                 "%s: Not able to stop thread, leaking", __FUNCTION__);
+    _javaRenderThread = NULL;
+  }
+  return 0;
+}
+
+void VideoRenderAndroid::ReDraw() {
+  CriticalSectionScoped cs(&_critSect);
+  // Allow redraw if it was more than 20ms since last.
+  if (_lastJavaRenderEvent < TickTime::MillisecondTimestamp() - 20) {
+    _lastJavaRenderEvent = TickTime::MillisecondTimestamp();
+    _javaRenderEvent.Set();
+  }
+}
+
+bool VideoRenderAndroid::JavaRenderThreadFun(void* obj) {
+  return static_cast<VideoRenderAndroid*> (obj)->JavaRenderThreadProcess();
+}
+
+bool VideoRenderAndroid::JavaRenderThreadProcess()
+{
+  _javaRenderEvent.Wait(1000);
+
+  CriticalSectionScoped cs(&_critSect);
+  if (!_javaRenderJniEnv) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&_javaRenderJniEnv, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !_javaRenderJniEnv) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, _javaRenderJniEnv);
+      return false;
+    }
+  }
+
+  for (MapItem* item = _streamsMap.First(); item != NULL;
+       item = _streamsMap.Next(item)) {
+    static_cast<AndroidStream*> (item->GetItem())->DeliverFrame(
+        _javaRenderJniEnv);
+  }
+
+  if (_javaShutDownFlag) {
+    if (g_jvm->DetachCurrentThread() < 0)
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    else {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+                   "%s: Java thread detached", __FUNCTION__);
+    }
+    _javaRenderJniEnv = false;
+    _javaShutDownFlag = false;
+    _javaShutdownEvent.Set();
+    return false; // Do not run this thread again.
+  }
+  return true;
+}
+
+VideoRenderType VideoRenderAndroid::RenderType() {
+  return _renderType;
+}
+
+RawVideoType VideoRenderAndroid::PerferedVideoType() {
+  return kVideoI420;
+}
+
+bool VideoRenderAndroid::FullScreen() {
+  return false;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetGraphicsMemory(
+    WebRtc_UWord64& /*totalGraphicsMemory*/,
+    WebRtc_UWord64& /*availableGraphicsMemory*/) const {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::GetScreenResolution(
+    WebRtc_UWord32& /*screenWidth*/,
+    WebRtc_UWord32& /*screenHeight*/) const {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_UWord32 VideoRenderAndroid::RenderFrameRate(
+    const WebRtc_UWord32 /*streamId*/) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetStreamCropping(
+    const WebRtc_UWord32 /*streamId*/,
+    const float /*left*/,
+    const float /*top*/,
+    const float /*right*/,
+    const float /*bottom*/) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetTransparentBackground(const bool enable) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::ConfigureRenderer(
+    const WebRtc_UWord32 streamId,
+    const unsigned int zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetText(
+    const WebRtc_UWord8 textId,
+    const WebRtc_UWord8* text,
+    const WebRtc_Word32 textLength,
+    const WebRtc_UWord32 textColorRef,
+    const WebRtc_UWord32 backgroundColorRef,
+    const float left, const float top,
+    const float rigth, const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+WebRtc_Word32 VideoRenderAndroid::SetBitmap(const void* bitMap,
+                                            const WebRtc_UWord8 pictureId,
+                                            const void* colorKey,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom) {
+  WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+               "%s - not supported on Android", __FUNCTION__);
+  return -1;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_render/main/source/android/video_render_android_impl.h b/src/modules/video_render/main/source/android/video_render_android_impl.h
new file mode 100644
index 0000000..d3f140a
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_impl.h
@@ -0,0 +1,153 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
+
+#include <jni.h>
+#include "i_video_render.h"
+#include "map_wrapper.h"
+
+
+namespace webrtc {
+
+//#define ANDROID_LOG
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+// The object a module user uses to send new frames to the java renderer
+// Base class for android render streams.
+
+class AndroidStream : public VideoRenderCallback {
+ public:
+  // DeliverFrame is called from a thread connected to the Java VM.
+  // Used for Delivering frame for rendering.
+  virtual void DeliverFrame(JNIEnv* jniEnv)=0;
+
+  virtual ~AndroidStream() {};
+};
+
+class VideoRenderAndroid: IVideoRender {
+ public:
+  VideoRenderAndroid(const WebRtc_Word32 id,
+                     const VideoRenderType videoRenderType,
+                     void* window,
+                     const bool fullscreen);
+
+  virtual ~VideoRenderAndroid();
+
+  virtual WebRtc_Word32 Init()=0;
+
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+  virtual WebRtc_Word32 ChangeWindow(void* window);
+
+  virtual VideoRenderCallback* AddIncomingRenderStream(
+      const WebRtc_UWord32 streamId,
+      const WebRtc_UWord32 zOrder,
+      const float left, const float top,
+      const float right, const float bottom);
+
+  virtual WebRtc_Word32 DeleteIncomingRenderStream(
+      const WebRtc_UWord32 streamId);
+
+  virtual WebRtc_Word32 GetIncomingRenderStreamProperties(
+      const WebRtc_UWord32 streamId,
+      WebRtc_UWord32& zOrder,
+      float& left, float& top,
+      float& right, float& bottom) const;
+
+  virtual WebRtc_Word32 StartRender();
+
+  virtual WebRtc_Word32 StopRender();
+
+  virtual void ReDraw();
+
+  // Properties
+
+  virtual VideoRenderType RenderType();
+
+  virtual RawVideoType PerferedVideoType();
+
+  virtual bool FullScreen();
+
+  virtual WebRtc_Word32 GetGraphicsMemory(
+      WebRtc_UWord64& totalGraphicsMemory,
+      WebRtc_UWord64& availableGraphicsMemory) const;
+
+  virtual WebRtc_Word32 GetScreenResolution(
+      WebRtc_UWord32& screenWidth,
+      WebRtc_UWord32& screenHeight) const;
+
+  virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+  virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                          const float left, const float top,
+                                          const float right,
+                                          const float bottom);
+
+  virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+  virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                          const unsigned int zOrder,
+                                          const float left, const float top,
+                                          const float right,
+                                          const float bottom);
+
+  virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                const WebRtc_UWord8* text,
+                                const WebRtc_Word32 textLength,
+                                const WebRtc_UWord32 textColorRef,
+                                const WebRtc_UWord32 backgroundColorRef,
+                                const float left, const float top,
+                                const float rigth, const float bottom);
+
+  virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                  const WebRtc_UWord8 pictureId,
+                                  const void* colorKey, const float left,
+                                  const float top, const float right,
+                                  const float bottom);
+  static JavaVM* g_jvm;
+
+ protected:
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      WebRtc_Word32 streamId,
+      WebRtc_Word32 zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer) = 0;
+
+  WebRtc_Word32 _id;
+  CriticalSectionWrapper& _critSect;
+  VideoRenderType _renderType;
+  jobject _ptrWindow;
+
+ private:
+  static bool JavaRenderThreadFun(void* obj);
+  bool JavaRenderThreadProcess();
+
+  // Map with streams to render.
+  MapWrapper _streamsMap;
+  // True if the _javaRenderThread thread shall be detached from the JVM.
+  bool _javaShutDownFlag;
+  EventWrapper& _javaShutdownEvent;
+  EventWrapper& _javaRenderEvent;
+  WebRtc_Word64 _lastJavaRenderEvent;
+  JNIEnv* _javaRenderJniEnv; // JNIEnv for the java render thread.
+  ThreadWrapper* _javaRenderThread;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_IMPL_H_
diff --git a/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc
new file mode 100644
index 0000000..0bcf676
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.cc
@@ -0,0 +1,448 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_native_opengl2.h"
+#include "critical_section_wrapper.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidNativeOpenGl2Renderer::AndroidNativeOpenGl2Renderer(
+    const WebRtc_Word32 id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool fullscreen) :
+    VideoRenderAndroid(id, videoRenderType, window, fullscreen),
+    _javaRenderObj(NULL),
+    _javaRenderClass(NULL) {
+}
+
+bool AndroidNativeOpenGl2Renderer::UseOpenGL2(void* window) {
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "RendererAndroid():UseOpenGL No JVM set.");
+    return false;
+  }
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(
+          kTraceError,
+          kTraceVideoRenderer,
+          -1,
+          "RendererAndroid(): Could not attach thread to JVM (%d, %p)",
+          res, env);
+      return false;
+    }
+    isAttached = true;
+  }
+
+  // get the renderer class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: could not find ViEAndroidRenderer class",
+                 __FUNCTION__);
+    return false;
+  }
+
+  // get the method ID for UseOpenGL
+  jmethodID cidUseOpenGL = env->GetStaticMethodID(javaRenderClassLocal,
+                                                  "UseOpenGL2",
+                                                  "(Ljava/lang/Object;)Z");
+  if (cidUseOpenGL == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: could not get UseOpenGL ID", __FUNCTION__);
+    return false;
+  }
+  jboolean res = env->CallStaticBooleanMethod(javaRenderClassLocal,
+                                              cidUseOpenGL, (jobject) window);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+  return res;
+}
+
+AndroidNativeOpenGl2Renderer::~AndroidNativeOpenGl2Renderer() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidNativeOpenGl2Renderer dtor");
+  if (g_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        env = NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+
+    env->DeleteGlobalRef(_javaRenderObj);
+    env->DeleteGlobalRef(_javaRenderClass);
+
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Renderer::Init() {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "(%s): Not a valid Java VM pointer.", __FUNCTION__);
+    return -1;
+  }
+  if (!_ptrWindow) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                 "(%s): No window have been provided.", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  // get the ViEAndroidGLES20 class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not find ViEAndroidGLES20", __FUNCTION__);
+    return -1;
+  }
+
+  // create a global reference to the class (to tell JNI that
+  // we are referencing it after this function has returned)
+  _javaRenderClass =
+      reinterpret_cast<jclass> (env->NewGlobalRef(javaRenderClassLocal));
+  if (!_javaRenderClass) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not create Java SurfaceHolder class reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Delete local class ref, we only use the global ref
+  env->DeleteLocalRef(javaRenderClassLocal);
+
+  // create a reference to the object (to tell JNI that we are referencing it
+  // after this function has returned)
+  _javaRenderObj = env->NewGlobalRef(_ptrWindow);
+  if (!_javaRenderObj) {
+    WEBRTC_TRACE(
+        kTraceError,
+        kTraceVideoRenderer,
+        _id,
+        "%s: could not create Java SurfaceRender object reference",
+        __FUNCTION__);
+    return -1;
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done",
+               __FUNCTION__);
+  return 0;
+
+}
+AndroidStream*
+AndroidNativeOpenGl2Renderer::CreateAndroidRenderChannel(
+    WebRtc_Word32 streamId,
+    WebRtc_Word32 zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom,
+    VideoRenderAndroid& renderer) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: Id %d",
+               __FUNCTION__, streamId);
+  AndroidNativeOpenGl2Channel* stream =
+      new AndroidNativeOpenGl2Channel(streamId, g_jvm, renderer,
+                                      _javaRenderObj);
+  if (stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+    return stream;
+  else {
+    delete stream;
+  }
+  return NULL;
+}
+
+AndroidNativeOpenGl2Channel::AndroidNativeOpenGl2Channel(
+    WebRtc_UWord32 streamId,
+    JavaVM* jvm,
+    VideoRenderAndroid& renderer,jobject javaRenderObj):
+    _id(streamId),
+    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderer(renderer), _jvm(jvm), _javaRenderObj(javaRenderObj),
+    _registerNativeCID(NULL), _deRegisterNativeCID(NULL),
+    _openGLRenderer(streamId) {
+
+}
+AndroidNativeOpenGl2Channel::~AndroidNativeOpenGl2Channel() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidNativeOpenGl2Channel dtor");
+  delete &_renderCritSect;
+  if (_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__, res, env);
+        env = NULL;
+      } else {
+        isAttached = true;
+      }
+    }
+    if (env && _deRegisterNativeCID) {
+      env->CallVoidMethod(_javaRenderObj, _deRegisterNativeCID);
+    }
+
+    if (isAttached) {
+      if (_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Channel::Init(WebRtc_Word32 zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: AndroidNativeOpenGl2Channel", __FUNCTION__);
+  if (!_jvm) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Not a valid Java VM pointer", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__, res, env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  jclass javaRenderClass =
+      env->FindClass("org/webrtc/videoengine/ViEAndroidGLES20");
+  if (!javaRenderClass) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not find ViESurfaceRenderer", __FUNCTION__);
+    return -1;
+  }
+
+  // get the method ID for the ReDraw function
+  _redrawCid = env->GetMethodID(javaRenderClass, "ReDraw", "()V");
+  if (_redrawCid == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get ReDraw ID", __FUNCTION__);
+    return -1;
+  }
+
+  _registerNativeCID = env->GetMethodID(javaRenderClass,
+                                        "RegisterNativeObject", "(J)V");
+  if (_registerNativeCID == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get RegisterNativeObject ID", __FUNCTION__);
+    return -1;
+  }
+
+  _deRegisterNativeCID = env->GetMethodID(javaRenderClass,
+                                          "DeRegisterNativeObject", "()V");
+  if (_deRegisterNativeCID == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: could not get DeRegisterNativeObject ID",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  JNINativeMethod nativeFunctions[2] = {
+    { "DrawNative",
+      "(J)V",
+      (void*) &AndroidNativeOpenGl2Channel::DrawNativeStatic, },
+    { "CreateOpenGLNative",
+      "(JII)I",
+      (void*) &AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic },
+  };
+  if (env->RegisterNatives(javaRenderClass, nativeFunctions, 2) == 0) {
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, -1,
+                 "%s: Registered native functions", __FUNCTION__);
+  }
+  else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "%s: Failed to register native functions", __FUNCTION__);
+    return -1;
+  }
+
+  env->CallVoidMethod(_javaRenderObj, _registerNativeCID, (jlong) this);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  if (_openGLRenderer.SetCoordinates(zOrder, left, top, right, bottom) != 0) {
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: AndroidNativeOpenGl2Channel done", __FUNCTION__);
+  return 0;
+}
+
+WebRtc_Word32 AndroidNativeOpenGl2Channel::RenderFrame(
+    const WebRtc_UWord32 /*streamId*/,
+    VideoFrame& videoFrame) {
+  //   WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+  _renderCritSect.Enter();
+  _bufferToRender.SwapFrame(videoFrame);
+  _renderCritSect.Leave();
+  _renderer.ReDraw();
+  return 0;
+}
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidNativeOpenGl2Channel::DeliverFrame(JNIEnv* jniEnv) {
+  //TickTime timeNow=TickTime::Now();
+
+  //Draw the Surface
+  jniEnv->CallVoidMethod(_javaRenderObj, _redrawCid);
+
+  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id,
+  // "%s: time to deliver %lld" ,__FUNCTION__,
+  // (TickTime::Now()-timeNow).Milliseconds());
+}
+
+/*
+ * JNI callback from Java class. Called when the render
+ * want to render a frame. Called from the GLRenderThread
+ * Method:    DrawNative
+ * Signature: (J)V
+ */
+void JNICALL AndroidNativeOpenGl2Channel::DrawNativeStatic(
+    JNIEnv * env, jobject, jlong context) {
+  AndroidNativeOpenGl2Channel* renderChannel =
+      reinterpret_cast<AndroidNativeOpenGl2Channel*>(context);
+  renderChannel->DrawNative();
+}
+
+void AndroidNativeOpenGl2Channel::DrawNative() {
+  _openGLRenderer.Render(_bufferToRender);
+}
+
+/*
+ * JNI callback from Java class. Called when the GLSurfaceview
+ * have created a surface. Called from the GLRenderThread
+ * Method:    CreateOpenGLNativeStatic
+ * Signature: (JII)I
+ */
+jint JNICALL AndroidNativeOpenGl2Channel::CreateOpenGLNativeStatic(
+    JNIEnv * env,
+    jobject,
+    jlong context,
+    jint width,
+    jint height) {
+  AndroidNativeOpenGl2Channel* renderChannel =
+      reinterpret_cast<AndroidNativeOpenGl2Channel*> (context);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1, "%s:", __FUNCTION__);
+  return renderChannel->CreateOpenGLNative(width, height);
+}
+
+jint AndroidNativeOpenGl2Channel::CreateOpenGLNative(
+    int width, int height) {
+  return _openGLRenderer.Setup(width, height);
+}
+
+}  //namespace webrtc
diff --git a/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h
new file mode 100644
index 0000000..c69f17d
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_native_opengl2.h
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+#include "video_render_android_impl.h"
+#include "video_render_opengles20.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidNativeOpenGl2Channel: public AndroidStream {
+ public:
+  AndroidNativeOpenGl2Channel(
+      WebRtc_UWord32 streamId,
+      JavaVM* jvm,
+      VideoRenderAndroid& renderer,jobject javaRenderObj);
+  ~AndroidNativeOpenGl2Channel();
+
+  WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+                     const float left,
+                     const float top,
+                     const float right,
+                     const float bottom);
+
+  //Implement VideoRenderCallback
+  virtual WebRtc_Word32 RenderFrame(
+      const WebRtc_UWord32 streamId,
+      VideoFrame& videoFrame);
+
+  //Implements AndroidStream
+  virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+  static jint CreateOpenGLNativeStatic(
+      JNIEnv * env,
+      jobject,
+      jlong context,
+      jint width,
+      jint height);
+  jint CreateOpenGLNative(int width, int height);
+
+  static void DrawNativeStatic(JNIEnv * env,jobject, jlong context);
+  void DrawNative();
+  WebRtc_UWord32 _id;
+  CriticalSectionWrapper& _renderCritSect;
+
+  VideoFrame _bufferToRender;
+  VideoRenderAndroid& _renderer;
+  JavaVM*     _jvm;
+  jobject     _javaRenderObj;
+
+  jmethodID      _redrawCid;
+  jmethodID      _registerNativeCID;
+  jmethodID      _deRegisterNativeCID;
+  VideoRenderOpenGles20 _openGLRenderer;
+};
+
+
+class AndroidNativeOpenGl2Renderer: private VideoRenderAndroid {
+ public:
+  AndroidNativeOpenGl2Renderer(const WebRtc_Word32 id,
+                               const VideoRenderType videoRenderType,
+                               void* window,
+                               const bool fullscreen);
+
+  ~AndroidNativeOpenGl2Renderer();
+  static bool UseOpenGL2(void* window);
+
+  WebRtc_Word32 Init();
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      WebRtc_Word32 streamId,
+      WebRtc_Word32 zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer);
+
+ private:
+  jobject _javaRenderObj;
+  jclass _javaRenderClass;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_NATIVE_OPENGL2_H_
diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.cc b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc
new file mode 100644
index 0000000..889a6e7
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.cc
@@ -0,0 +1,477 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_android_surface_view.h"
+#include "critical_section_wrapper.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "tick_util.h"
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTC*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+AndroidSurfaceViewRenderer::AndroidSurfaceViewRenderer(
+    const WebRtc_Word32 id,
+    const VideoRenderType videoRenderType,
+    void* window,
+    const bool fullscreen) :
+    VideoRenderAndroid(id,videoRenderType,window,fullscreen),
+    _javaRenderObj(NULL),
+    _javaRenderClass(NULL) {
+}
+
+AndroidSurfaceViewRenderer::~AndroidSurfaceViewRenderer() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id,
+               "AndroidSurfaceViewRenderer dtor");
+  if(g_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__,
+                     res,
+                     env);
+        env=NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+    env->DeleteGlobalRef(_javaRenderObj);
+    env->DeleteGlobalRef(_javaRenderClass);
+
+    if (isAttached) {
+      if (g_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 AndroidSurfaceViewRenderer::Init() {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+  if (!g_jvm) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "(%s): Not a valid Java VM pointer.",
+                 __FUNCTION__);
+    return -1;
+  }
+  if(!_ptrWindow) {
+    WEBRTC_TRACE(kTraceWarning,
+                 kTraceVideoRenderer,
+                 _id,
+                 "(%s): No window have been provided.",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (g_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = g_jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__,
+                   res,
+                   env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  // get the ViESurfaceRender class
+  jclass javaRenderClassLocal =
+      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+  if (!javaRenderClassLocal) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not find ViESurfaceRenderer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // create a global reference to the class (to tell JNI that
+  // we are referencing it after this function has returned)
+  _javaRenderClass =
+      reinterpret_cast<jclass>(env->NewGlobalRef(javaRenderClassLocal));
+  if (!_javaRenderClass) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java ViESurfaceRenderer class reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Delete local class ref, we only use the global ref
+  env->DeleteLocalRef(javaRenderClassLocal);
+
+  // get the method ID for the constructor
+  jmethodID cid = env->GetMethodID(_javaRenderClass,
+                                   "<init>",
+                                   "(Landroid/view/SurfaceView;)V");
+  if (cid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get constructor ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // construct the object
+  jobject javaRenderObjLocal = env->NewObject(_javaRenderClass,
+                                              cid,
+                                              _ptrWindow);
+  if (!javaRenderObjLocal) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java Render",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // create a reference to the object (to tell JNI that we are referencing it
+  // after this function has returned)
+  _javaRenderObj = env->NewGlobalRef(javaRenderObjLocal);
+  if (!_javaRenderObj) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not create Java SurfaceRender object reference",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (g_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not detach thread from JVM", __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s done", __FUNCTION__);
+  return 0;
+}
+
+AndroidStream*
+AndroidSurfaceViewRenderer::CreateAndroidRenderChannel(
+    WebRtc_Word32 streamId,
+    WebRtc_Word32 zOrder,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom,
+    VideoRenderAndroid& renderer) {
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: Id %d",
+               __FUNCTION__,
+               streamId);
+  AndroidSurfaceViewChannel* stream =
+      new AndroidSurfaceViewChannel(streamId, g_jvm, renderer, _javaRenderObj);
+  if(stream && stream->Init(zOrder, left, top, right, bottom) == 0)
+    return stream;
+  else
+    delete stream;
+  return NULL;
+}
+
+AndroidSurfaceViewChannel::AndroidSurfaceViewChannel(
+    WebRtc_UWord32 streamId,
+    JavaVM* jvm,
+    VideoRenderAndroid& renderer,
+    jobject javaRenderObj) :
+    _id(streamId),
+    _renderCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _renderer(renderer),
+    _jvm(jvm),
+    _javaRenderObj(javaRenderObj),
+#ifndef ANDROID_NDK_8_OR_ABOVE
+    _javaByteBufferObj(NULL),
+    _directBuffer(NULL),
+#endif
+    _bitmapWidth(0),
+    _bitmapHeight(0) {
+}
+
+AndroidSurfaceViewChannel::~AndroidSurfaceViewChannel() {
+  WEBRTC_TRACE(kTraceInfo,
+               kTraceVideoRenderer,
+               _id,
+               "AndroidSurfaceViewChannel dtor");
+  delete &_renderCritSect;
+  if(_jvm) {
+    // get the JNI env for this thread
+    bool isAttached = false;
+    JNIEnv* env = NULL;
+    if ( _jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+      // try to attach the thread and get the env
+      // Attach this thread to JVM
+      jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+      // Get the JNI env for this thread
+      if ((res < 0) || !env) {
+        WEBRTC_TRACE(kTraceError,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not attach thread to JVM (%d, %p)",
+                     __FUNCTION__,
+                     res,
+                     env);
+        env=NULL;
+      }
+      else {
+        isAttached = true;
+      }
+    }
+
+    env->DeleteGlobalRef(_javaByteBufferObj);
+    if (isAttached) {
+      if (_jvm->DetachCurrentThread() < 0) {
+        WEBRTC_TRACE(kTraceWarning,
+                     kTraceVideoRenderer,
+                     _id,
+                     "%s: Could not detach thread from JVM",
+                     __FUNCTION__);
+      }
+    }
+  }
+}
+
+WebRtc_Word32 AndroidSurfaceViewChannel::Init(
+    WebRtc_Word32 /*zOrder*/,
+    const float left,
+    const float top,
+    const float right,
+    const float bottom) {
+
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: AndroidSurfaceViewChannel",
+               __FUNCTION__);
+  if (!_jvm) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: Not a valid Java VM pointer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  if( (top > 1 || top < 0) ||
+      (right > 1 || right < 0) ||
+      (bottom > 1 || bottom < 0) ||
+      (left > 1 || left < 0)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Wrong coordinates", __FUNCTION__);
+    return -1;
+  }
+
+  // get the JNI env for this thread
+  bool isAttached = false;
+  JNIEnv* env = NULL;
+  if (_jvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+    // try to attach the thread and get the env
+    // Attach this thread to JVM
+    jint res = _jvm->AttachCurrentThread(&env, NULL);
+
+    // Get the JNI env for this thread
+    if ((res < 0) || !env) {
+      WEBRTC_TRACE(kTraceError,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not attach thread to JVM (%d, %p)",
+                   __FUNCTION__,
+                   res,
+                   env);
+      return -1;
+    }
+    isAttached = true;
+  }
+
+  jclass javaRenderClass =
+      env->FindClass("org/webrtc/videoengine/ViESurfaceRenderer");
+  if (!javaRenderClass) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not find ViESurfaceRenderer",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  // get the method ID for the CreateIntArray
+  _createByteBufferCid =
+      env->GetMethodID(javaRenderClass,
+                       "CreateByteBuffer",
+                       "(II)Ljava/nio/ByteBuffer;");
+  if (_createByteBufferCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get CreateByteBuffer ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // get the method ID for the DrawByteBuffer function
+  _drawByteBufferCid = env->GetMethodID(javaRenderClass,
+                                        "DrawByteBuffer",
+                                        "()V");
+  if (_drawByteBufferCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get DrawByteBuffer ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  // get the method ID for the SetCoordinates function
+  _setCoordinatesCid = env->GetMethodID(javaRenderClass,
+                                        "SetCoordinates",
+                                        "(FFFF)V");
+  if (_setCoordinatesCid == NULL) {
+    WEBRTC_TRACE(kTraceError,
+                 kTraceVideoRenderer,
+                 _id,
+                 "%s: could not get SetCoordinates ID",
+                 __FUNCTION__);
+    return -1; /* exception thrown */
+  }
+
+  env->CallVoidMethod(_javaRenderObj, _setCoordinatesCid,
+                      left, top, right, bottom);
+
+  // Detach this thread if it was attached
+  if (isAttached) {
+    if (_jvm->DetachCurrentThread() < 0) {
+      WEBRTC_TRACE(kTraceWarning,
+                   kTraceVideoRenderer,
+                   _id,
+                   "%s: Could not detach thread from JVM",
+                   __FUNCTION__);
+    }
+  }
+
+  WEBRTC_TRACE(kTraceDebug,
+               kTraceVideoRenderer,
+               _id,
+               "%s: AndroidSurfaceViewChannel done",
+               __FUNCTION__);
+  return 0;
+}
+
+
+WebRtc_Word32 AndroidSurfaceViewChannel::RenderFrame(
+    const WebRtc_UWord32 /*streamId*/,
+    VideoFrame& videoFrame) {
+  // WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer,_id, "%s:" ,__FUNCTION__);
+  _renderCritSect.Enter();
+  _bufferToRender.SwapFrame(videoFrame);
+  _renderCritSect.Leave();
+  _renderer.ReDraw();
+  return 0;
+}
+
+
+/*Implements AndroidStream
+ * Calls the Java object and render the buffer in _bufferToRender
+ */
+void AndroidSurfaceViewChannel::DeliverFrame(JNIEnv* jniEnv) {
+  _renderCritSect.Enter();
+
+  if (_bitmapWidth != _bufferToRender.Width() ||
+      _bitmapHeight != _bufferToRender.Height()) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s: New render size %d "
+                 "%d",__FUNCTION__,
+                 _bufferToRender.Width(), _bufferToRender.Height());
+    if (_javaByteBufferObj) {
+      jniEnv->DeleteGlobalRef(_javaByteBufferObj);
+      _javaByteBufferObj = NULL;
+      _directBuffer = NULL;
+    }
+
+    jobject javaByteBufferObj =
+        jniEnv->CallObjectMethod(_javaRenderObj, _createByteBufferCid,
+                                 _bufferToRender.Width(),
+                                 _bufferToRender.Height());
+    _javaByteBufferObj = jniEnv->NewGlobalRef(javaByteBufferObj);
+    if (!_javaByteBufferObj) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,  "%s: could not "
+                   "create Java ByteBuffer object reference", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    } else {
+      _directBuffer = static_cast<unsigned char*>
+          (jniEnv->GetDirectBufferAddress(_javaByteBufferObj));
+      _bitmapWidth = _bufferToRender.Width();
+      _bitmapHeight = _bufferToRender.Height();
+    }
+  }
+
+  if(_javaByteBufferObj && _bitmapWidth && _bitmapHeight) {
+    const int conversionResult =
+        ConvertFromI420((unsigned char* )_bufferToRender.Buffer(), _bitmapWidth,
+                        kRGB565, 0, _bitmapWidth, _bitmapHeight, _directBuffer);
+
+    if (conversionResult < 0)  {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s: Color conversion"
+                   " failed.", __FUNCTION__);
+      _renderCritSect.Leave();
+      return;
+    }
+  }
+  _renderCritSect.Leave();
+  // Draw the Surface
+  jniEnv->CallVoidMethod(_javaRenderObj, _drawByteBufferCid);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_render/main/source/android/video_render_android_surface_view.h b/src/modules/video_render/main/source/android/video_render_android_surface_view.h
new file mode 100644
index 0000000..1122a79
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_android_surface_view.h
@@ -0,0 +1,86 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
+
+#include <jni.h>
+
+#include "video_render_defines.h"
+#include "video_render_android_impl.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class AndroidSurfaceViewChannel : public AndroidStream {
+ public:
+  AndroidSurfaceViewChannel(WebRtc_UWord32 streamId,
+                            JavaVM* jvm,
+                            VideoRenderAndroid& renderer,
+                            jobject javaRenderObj);
+  ~AndroidSurfaceViewChannel();
+
+  WebRtc_Word32 Init(WebRtc_Word32 zOrder,
+                     const float left,
+                     const float top,
+                     const float right,
+                     const float bottom);
+
+  //Implement VideoRenderCallback
+  virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                    VideoFrame& videoFrame);
+
+  //Implements AndroidStream
+  virtual void DeliverFrame(JNIEnv* jniEnv);
+
+ private:
+  WebRtc_UWord32 _id;
+  CriticalSectionWrapper& _renderCritSect;
+
+  VideoFrame _bufferToRender;
+  VideoRenderAndroid& _renderer;
+  JavaVM* _jvm;
+  jobject _javaRenderObj;
+
+  jobject _javaByteBufferObj;
+  unsigned char* _directBuffer;
+  jmethodID _createByteBufferCid;
+  jmethodID _drawByteBufferCid;
+
+  jmethodID _setCoordinatesCid;
+  unsigned int _bitmapWidth;
+  unsigned int _bitmapHeight;
+};
+
+class AndroidSurfaceViewRenderer : private VideoRenderAndroid {
+ public:
+  AndroidSurfaceViewRenderer(const WebRtc_Word32 id,
+                             const VideoRenderType videoRenderType,
+                             void* window,
+                             const bool fullscreen);
+  ~AndroidSurfaceViewRenderer();
+  WebRtc_Word32 Init();
+  virtual AndroidStream* CreateAndroidRenderChannel(
+      WebRtc_Word32 streamId,
+      WebRtc_Word32 zOrder,
+      const float left,
+      const float top,
+      const float right,
+      const float bottom,
+      VideoRenderAndroid& renderer);
+ private:
+  jobject _javaRenderObj;
+  jclass _javaRenderClass;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_ANDROID_SURFACE_VIEW_H_
diff --git a/src/modules/video_render/main/source/android/video_render_opengles20.cc b/src/modules/video_render/main/source/android/video_render_opengles20.cc
new file mode 100644
index 0000000..28bf9ae
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_opengles20.cc
@@ -0,0 +1,413 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include "video_render_opengles20.h"
+
+//#define ANDROID_LOG
+
+#ifdef ANDROID_LOG
+#include <stdio.h>
+#include <android/log.h>
+
+#undef WEBRTC_TRACE
+#define WEBRTC_TRACE(a,b,c,...)  __android_log_print(ANDROID_LOG_DEBUG, "*WEBRTCN*", __VA_ARGS__)
+#else
+#include "trace.h"
+#endif
+
+namespace webrtc {
+
+const char VideoRenderOpenGles20::g_indices[] = { 0, 3, 2, 0, 2, 1 };
+
+const char VideoRenderOpenGles20::g_vertextShader[] = {
+  "attribute vec4 aPosition;\n"
+  "attribute vec2 aTextureCoord;\n"
+  "varying vec2 vTextureCoord;\n"
+  "void main() {\n"
+  "  gl_Position = aPosition;\n"
+  "  vTextureCoord = aTextureCoord;\n"
+  "}\n" };
+
+// The fragment shader.
+// Do YUV to RGB565 conversion.
+const char VideoRenderOpenGles20::g_fragmentShader[] = {
+  "precision mediump float;\n"
+  "uniform sampler2D Ytex;\n"
+  "uniform sampler2D Utex,Vtex;\n"
+  "varying vec2 vTextureCoord;\n"
+  "void main(void) {\n"
+  "  float nx,ny,r,g,b,y,u,v;\n"
+  "  mediump vec4 txl,ux,vx;"
+  "  nx=vTextureCoord[0];\n"
+  "  ny=vTextureCoord[1];\n"
+  "  y=texture2D(Ytex,vec2(nx,ny)).r;\n"
+  "  u=texture2D(Utex,vec2(nx,ny)).r;\n"
+  "  v=texture2D(Vtex,vec2(nx,ny)).r;\n"
+
+  //"  y = v;\n"+
+  "  y=1.1643*(y-0.0625);\n"
+  "  u=u-0.5;\n"
+  "  v=v-0.5;\n"
+
+  "  r=y+1.5958*v;\n"
+  "  g=y-0.39173*u-0.81290*v;\n"
+  "  b=y+2.017*u;\n"
+  "  gl_FragColor=vec4(r,g,b,1.0);\n"
+  "}\n" };
+
+VideoRenderOpenGles20::VideoRenderOpenGles20(WebRtc_Word32 id) :
+    _id(id),
+    _textureWidth(-1),
+    _textureHeight(-1) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+               __FUNCTION__, (int) _id);
+
+  const GLfloat vertices[20] = {
+    // X, Y, Z, U, V
+    -1, -1, 0, 0, 1, // Bottom Left
+    1, -1, 0, 1, 1, //Bottom Right
+    1, 1, 0, 1, 0, //Top Right
+    -1, 1, 0, 0, 0 }; //Top Left
+
+  memcpy(_vertices, vertices, sizeof(_vertices));
+}
+
+VideoRenderOpenGles20::~VideoRenderOpenGles20() {
+}
+
+WebRtc_Word32 VideoRenderOpenGles20::Setup(WebRtc_Word32 width,
+                                           WebRtc_Word32 height) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: width %d, height %d", __FUNCTION__, (int) width,
+               (int) height);
+
+  printGLString("Version", GL_VERSION);
+  printGLString("Vendor", GL_VENDOR);
+  printGLString("Renderer", GL_RENDERER);
+  printGLString("Extensions", GL_EXTENSIONS);
+
+  int maxTextureImageUnits[2];
+  int maxTextureSize[2];
+  glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, maxTextureImageUnits);
+  glGetIntegerv(GL_MAX_TEXTURE_SIZE, maxTextureSize);
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: number of textures %d, size %d", __FUNCTION__,
+               (int) maxTextureImageUnits[0], (int) maxTextureSize[0]);
+
+  _program = createProgram(g_vertextShader, g_fragmentShader);
+  if (!_program) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not create program", __FUNCTION__);
+    return -1;
+  }
+
+  int positionHandle = glGetAttribLocation(_program, "aPosition");
+  checkGlError("glGetAttribLocation aPosition");
+  if (positionHandle == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not get aPosition handle", __FUNCTION__);
+    return -1;
+  }
+
+  int textureHandle = glGetAttribLocation(_program, "aTextureCoord");
+  checkGlError("glGetAttribLocation aTextureCoord");
+  if (textureHandle == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Could not get aTextureCoord handle", __FUNCTION__);
+    return -1;
+  }
+
+  // set the vertices array in the shader
+  // _vertices contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(positionHandle, 3, GL_FLOAT, false,
+                        5 * sizeof(GLfloat), _vertices);
+  checkGlError("glVertexAttribPointer aPosition");
+
+  glEnableVertexAttribArray(positionHandle);
+  checkGlError("glEnableVertexAttribArray positionHandle");
+
+  // set the texture coordinate array in the shader
+  // _vertices contains 4 vertices with 5 coordinates.
+  // 3 for (xyz) for the vertices and 2 for the texture
+  glVertexAttribPointer(textureHandle, 2, GL_FLOAT, false, 5
+                        * sizeof(GLfloat), &_vertices[3]);
+  checkGlError("glVertexAttribPointer maTextureHandle");
+  glEnableVertexAttribArray(textureHandle);
+  checkGlError("glEnableVertexAttribArray textureHandle");
+
+  glUseProgram(_program);
+  int i = glGetUniformLocation(_program, "Ytex");
+  checkGlError("glGetUniformLocation");
+  glUniform1i(i, 0); /* Bind Ytex to texture unit 0 */
+  checkGlError("glUniform1i Ytex");
+
+  i = glGetUniformLocation(_program, "Utex");
+  checkGlError("glGetUniformLocation Utex");
+  glUniform1i(i, 1); /* Bind Utex to texture unit 1 */
+  checkGlError("glUniform1i Utex");
+
+  i = glGetUniformLocation(_program, "Vtex");
+  checkGlError("glGetUniformLocation");
+  glUniform1i(i, 2); /* Bind Vtex to texture unit 2 */
+  checkGlError("glUniform1i");
+
+  glViewport(0, 0, width, height);
+  checkGlError("glViewport");
+  return 0;
+}
+
+// SetCoordinates
+// Sets the coordinates where the stream shall be rendered.
+// Values must be between 0 and 1.
+WebRtc_Word32 VideoRenderOpenGles20::SetCoordinates(WebRtc_Word32 zOrder,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom) {
+  if ((top > 1 || top < 0) || (right > 1 || right < 0) ||
+      (bottom > 1 || bottom < 0) || (left > 1 || left < 0)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s: Wrong coordinates", __FUNCTION__);
+    return -1;
+  }
+
+  //  X, Y, Z, U, V
+  // -1, -1, 0, 0, 1, // Bottom Left
+  //  1, -1, 0, 1, 1, //Bottom Right
+  //  1,  1, 0, 1, 0, //Top Right
+  // -1,  1, 0, 0, 0  //Top Left
+
+  // Bottom Left
+  _vertices[0] = (left * 2) - 1;
+  _vertices[1] = -1 * (2 * bottom) + 1;
+  _vertices[2] = zOrder;
+
+  //Bottom Right
+  _vertices[5] = (right * 2) - 1;
+  _vertices[6] = -1 * (2 * bottom) + 1;
+  _vertices[7] = zOrder;
+
+  //Top Right
+  _vertices[10] = (right * 2) - 1;
+  _vertices[11] = -1 * (2 * top) + 1;
+  _vertices[12] = zOrder;
+
+  //Top Left
+  _vertices[15] = (left * 2) - 1;
+  _vertices[16] = -1 * (2 * top) + 1;
+  _vertices[17] = zOrder;
+
+  return 0;
+}
+
+WebRtc_Word32 VideoRenderOpenGles20::Render(const VideoFrame& frameToRender) {
+
+  if (frameToRender.Length() == 0) {
+    return -1;
+  }
+
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s: id %d",
+               __FUNCTION__, (int) _id);
+
+  glUseProgram(_program);
+  checkGlError("glUseProgram");
+
+  if (_textureWidth != (GLsizei) frameToRender.Width() ||
+      _textureHeight != (GLsizei) frameToRender.Height()) {
+    SetupTextures(frameToRender);
+  }
+  else {
+    UpdateTextures(frameToRender);
+  }
+
+  glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, g_indices);
+  checkGlError("glDrawArrays");
+
+  return 0;
+}
+
+GLuint VideoRenderOpenGles20::loadShader(GLenum shaderType,
+                                         const char* pSource) {
+  GLuint shader = glCreateShader(shaderType);
+  if (shader) {
+    glShaderSource(shader, 1, &pSource, NULL);
+    glCompileShader(shader);
+    GLint compiled = 0;
+    glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
+    if (!compiled) {
+      GLint infoLen = 0;
+      glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
+      if (infoLen) {
+        char* buf = (char*) malloc(infoLen);
+        if (buf) {
+          glGetShaderInfoLog(shader, infoLen, NULL, buf);
+          WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                       "%s: Could not compile shader %d: %s",
+                       __FUNCTION__, shaderType, buf);
+          free(buf);
+        }
+        glDeleteShader(shader);
+        shader = 0;
+      }
+    }
+  }
+  return shader;
+}
+
+GLuint VideoRenderOpenGles20::createProgram(const char* pVertexSource,
+                                            const char* pFragmentSource) {
+  GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
+  if (!vertexShader) {
+    return 0;
+  }
+
+  GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
+  if (!pixelShader) {
+    return 0;
+  }
+
+  GLuint program = glCreateProgram();
+  if (program) {
+    glAttachShader(program, vertexShader);
+    checkGlError("glAttachShader");
+    glAttachShader(program, pixelShader);
+    checkGlError("glAttachShader");
+    glLinkProgram(program);
+    GLint linkStatus = GL_FALSE;
+    glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
+    if (linkStatus != GL_TRUE) {
+      GLint bufLength = 0;
+      glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
+      if (bufLength) {
+        char* buf = (char*) malloc(bufLength);
+        if (buf) {
+          glGetProgramInfoLog(program, bufLength, NULL, buf);
+          WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                       "%s: Could not link program: %s",
+                       __FUNCTION__, buf);
+          free(buf);
+        }
+      }
+      glDeleteProgram(program);
+      program = 0;
+    }
+  }
+  return program;
+}
+
+void VideoRenderOpenGles20::printGLString(const char *name, GLenum s) {
+  const char *v = (const char *) glGetString(s);
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "GL %s = %s\n",
+               name, v);
+}
+
+void VideoRenderOpenGles20::checkGlError(const char* op) {
+#ifdef ANDROID_LOG
+  for (GLint error = glGetError(); error; error = glGetError()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "after %s() glError (0x%x)\n", op, error);
+  }
+#else
+  return;
+#endif
+}
+
+void VideoRenderOpenGles20::SetupTextures(const VideoFrame& frameToRender) {
+  WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id,
+               "%s: width %d, height %d length %u", __FUNCTION__,
+               frameToRender.Width(), frameToRender.Height(),
+               frameToRender.Length());
+
+  const GLsizei width = frameToRender.Width();
+  const GLsizei height = frameToRender.Height();
+
+  glGenTextures(3, _textureIds); //Generate  the Y, U and V texture
+  GLuint currentTextureId = _textureIds[0]; // Y
+  glActiveTexture( GL_TEXTURE0);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+  glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
+               GL_LUMINANCE, GL_UNSIGNED_BYTE,
+               (const GLvoid*) frameToRender.Buffer());
+
+  currentTextureId = _textureIds[1]; // U
+  glActiveTexture( GL_TEXTURE1);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+  const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
+  glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
+               GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
+
+  currentTextureId = _textureIds[2]; // V
+  glActiveTexture( GL_TEXTURE2);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+  glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+  const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
+  glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, width / 2, height / 2, 0,
+               GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
+  checkGlError("SetupTextures");
+
+  _textureWidth = width;
+  _textureHeight = height;
+}
+
+void VideoRenderOpenGles20::UpdateTextures(const VideoFrame& frameToRender) {
+  const GLsizei width = frameToRender.Width();
+  const GLsizei height = frameToRender.Height();
+
+  GLuint currentTextureId = _textureIds[0]; // Y
+  glActiveTexture( GL_TEXTURE0);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width, height, GL_LUMINANCE,
+                  GL_UNSIGNED_BYTE, (const GLvoid*) frameToRender.Buffer());
+
+  currentTextureId = _textureIds[1]; // U
+  glActiveTexture( GL_TEXTURE1);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+  const WebRtc_UWord8* uComponent = frameToRender.Buffer() + width * height;
+  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
+                  GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) uComponent);
+
+  currentTextureId = _textureIds[2]; // V
+  glActiveTexture( GL_TEXTURE2);
+  glBindTexture(GL_TEXTURE_2D, currentTextureId);
+  const WebRtc_UWord8* vComponent = uComponent + (width * height) / 4;
+  glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, width / 2, height / 2,
+                  GL_LUMINANCE, GL_UNSIGNED_BYTE, (const GLvoid*) vComponent);
+  checkGlError("UpdateTextures");
+
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_render/main/source/android/video_render_opengles20.h b/src/modules/video_render/main/source/android/video_render_opengles20.h
new file mode 100644
index 0000000..8f1743e
--- /dev/null
+++ b/src/modules/video_render/main/source/android/video_render_opengles20.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
+
+#include "video_render_defines.h"
+
+#include <GLES2/gl2.h>
+#include <GLES2/gl2ext.h>
+
+namespace webrtc
+{
+
+class VideoRenderOpenGles20 {
+ public:
+  VideoRenderOpenGles20(WebRtc_Word32 id);
+  ~VideoRenderOpenGles20();
+
+  WebRtc_Word32 Setup(WebRtc_Word32 widht, WebRtc_Word32 height);
+  WebRtc_Word32 Render(const VideoFrame& frameToRender);
+  WebRtc_Word32 SetCoordinates(WebRtc_Word32 zOrder,
+                               const float left,
+                               const float top,
+                               const float right,
+                               const float bottom);
+
+ private:
+  void printGLString(const char *name, GLenum s);
+  void checkGlError(const char* op);
+  GLuint loadShader(GLenum shaderType, const char* pSource);
+  GLuint createProgram(const char* pVertexSource,
+                       const char* pFragmentSource);
+  void SetupTextures(const VideoFrame& frameToRender);
+  void UpdateTextures(const VideoFrame& frameToRender);
+
+  WebRtc_Word32 _id;
+  GLuint _textureIds[3]; // Texture id of Y,U and V texture.
+  GLuint _program;
+  GLuint _vPositionHandle;
+  GLsizei _textureWidth;
+  GLsizei _textureHeight;
+
+  GLfloat _vertices[20];
+  static const char g_indices[];
+
+  static const char g_vertextShader[];
+  static const char g_fragmentShader[];
+
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_ANDROID_VIDEO_RENDER_OPENGLES20_H_
diff --git a/src/modules/video_render/main/source/external/video_render_external_impl.cc b/src/modules/video_render/main/source/external/video_render_external_impl.cc
new file mode 100644
index 0000000..a460033
--- /dev/null
+++ b/src/modules/video_render/main/source/external/video_render_external_impl.cc
@@ -0,0 +1,205 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_external_impl.h"
+
+namespace webrtc {
+
+VideoRenderExternalImpl::VideoRenderExternalImpl(
+                                                 const WebRtc_Word32 id,
+                                                 const VideoRenderType videoRenderType,
+                                                 void* window,
+                                                 const bool fullscreen) :
+    _id(id), _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+            _fullscreen(fullscreen)
+{
+}
+
+VideoRenderExternalImpl::~VideoRenderExternalImpl()
+{
+    delete &_critSect;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::Init()
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(&_critSect);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderExternalImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                                 const WebRtc_UWord32 zOrder,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return this;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::DeleteIncomingRenderStream(
+                                                                  const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetIncomingRenderStreamProperties(
+                                                                         const WebRtc_UWord32 streamId,
+                                                                         WebRtc_UWord32& zOrder,
+                                                                         float& left,
+                                                                         float& top,
+                                                                         float& right,
+                                                                         float& bottom) const
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::StartRender()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::StopRender()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+VideoRenderType VideoRenderExternalImpl::RenderType()
+{
+    return kRenderExternal;
+}
+
+RawVideoType VideoRenderExternalImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderExternalImpl::FullScreen()
+{
+    CriticalSectionScoped cs(&_critSect);
+    return _fullscreen;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetGraphicsMemory(
+                                                         WebRtc_UWord64& totalGraphicsMemory,
+                                                         WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::GetScreenResolution(
+                                                           WebRtc_UWord32& screenWidth,
+                                                           WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(&_critSect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+WebRtc_UWord32 VideoRenderExternalImpl::RenderFrameRate(
+                                                        const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetStreamCropping(
+                                                         const WebRtc_UWord32 streamId,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::ConfigureRenderer(
+                                                         const WebRtc_UWord32 streamId,
+                                                         const unsigned int zOrder,
+                                                         const float left,
+                                                         const float top,
+                                                         const float right,
+                                                         const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetTransparentBackground(
+                                                                const bool enable)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetText(
+                                               const WebRtc_UWord8 textId,
+                                               const WebRtc_UWord8* text,
+                                               const WebRtc_Word32 textLength,
+                                               const WebRtc_UWord32 textColorRef,
+                                               const WebRtc_UWord32 backgroundColorRef,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderExternalImpl::SetBitmap(const void* bitMap,
+                                                 const WebRtc_UWord8 pictureId,
+                                                 const void* colorKey,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    return 0;
+}
+
+// VideoRenderCallback
+WebRtc_Word32 VideoRenderExternalImpl::RenderFrame(
+                                                   const WebRtc_UWord32 streamId,
+                                                   VideoFrame& videoFrame)
+{
+    return 0;
+}
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/external/video_render_external_impl.h b/src/modules/video_render/main/source/external/video_render_external_impl.h
new file mode 100644
index 0000000..547c410
--- /dev/null
+++ b/src/modules/video_render/main/source/external/video_render_external_impl.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
+
+#include "i_video_render.h"
+#include "critical_section_wrapper.h"
+#include "module_common_types.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRenderExternalImpl: IVideoRender, public VideoRenderCallback
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderExternalImpl(const WebRtc_Word32 id,
+                            const VideoRenderType videoRenderType,
+                            void* window, const bool fullscreen);
+
+    virtual ~VideoRenderExternalImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    // VideoRenderCallback
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _critSect;
+    bool _fullscreen;
+};
+
+} //namespace webrtc
+
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_EXTERNAL_VIDEO_RENDER_EXTERNAL_IMPL_H_
diff --git a/src/modules/video_render/main/source/i_video_render.h b/src/modules/video_render/main/source/i_video_render.h
new file mode 100644
index 0000000..2799a79
--- /dev/null
+++ b/src/modules/video_render/main/source/i_video_render.h
@@ -0,0 +1,133 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
+
+#include "video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    virtual ~IVideoRender()
+    {
+    };
+
+    virtual WebRtc_Word32 Init() = 0;
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id) = 0;
+
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left,
+                                              float& top,
+                                              float& right,
+                                              float& bottom) const = 0;
+    // Implemented in common code?
+    //virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const = 0;
+    //virtual bool HasIncomingRenderStream(const WebRtc_UWord16 stramId) const = 0;
+
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender() = 0;
+
+    virtual WebRtc_Word32 StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+    virtual VideoRenderType RenderType() = 0;
+
+    virtual RawVideoType PerferedVideoType() = 0;
+
+    virtual bool FullScreen() = 0;
+
+    // TODO: This should be treated in platform specific code only
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const = 0;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const = 0;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom) = 0;
+
+};
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_I_VIDEO_RENDER_H_
diff --git a/src/modules/video_render/main/source/incoming_video_stream.cc b/src/modules/video_render/main/source/incoming_video_stream.cc
new file mode 100644
index 0000000..e44862a
--- /dev/null
+++ b/src/modules/video_render/main/source/incoming_video_stream.cc
@@ -0,0 +1,362 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_render/main/source/incoming_video_stream.h"
+
+#include <cassert>
+
+#if defined(_WIN32)
+#include <windows.h>
+#elif defined(WEBRTC_LINUX)
+#include <ctime>
+#include <sys/time.h>
+#else
+#include <sys/time.h>
+#endif
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "modules/video_render/main/source/video_render_frames.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+IncomingVideoStream::IncomingVideoStream(const WebRtc_Word32 module_id,
+                                         const WebRtc_UWord32 stream_id)
+    : module_id_(module_id),
+      stream_id_(stream_id),
+      stream_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
+      thread_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
+      buffer_critsect_(*CriticalSectionWrapper::CreateCriticalSection()),
+      incoming_render_thread_(),
+      deliver_buffer_event_(*EventWrapper::Create()),
+      running_(false),
+      external_callback_(NULL),
+      render_callback_(NULL),
+      render_buffers_(*(new VideoRenderFrames)),
+      callbackVideoType_(kVideoI420),
+      callbackWidth_(0),
+      callbackHeight_(0),
+      incoming_rate_(0),
+      last_rate_calculation_time_ms_(0),
+      num_frames_since_last_calculation_(0),
+      last_rendered_frame_(),
+      temp_frame_(),
+      start_image_(),
+      timeout_image_(),
+      timeout_time_(),
+      mirror_frames_enabled_(false),
+      mirroring_(),
+      transformed_video_frame_() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
+               "%s created for stream %d", __FUNCTION__, stream_id);
+}
+
+IncomingVideoStream::~IncomingVideoStream() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, module_id_,
+               "%s deleted for stream %d", __FUNCTION__, stream_id_);
+
+  Stop();
+
+  // incoming_render_thread_ - Delete in stop
+  delete &render_buffers_;
+  delete &stream_critsect_;
+  delete &buffer_critsect_;
+  delete &thread_critsect_;
+  delete &deliver_buffer_event_;
+}
+
+WebRtc_Word32 IncomingVideoStream::ChangeModuleId(const WebRtc_Word32 id) {
+  CriticalSectionScoped cs(&stream_critsect_);
+  module_id_ = id;
+  return 0;
+}
+
+VideoRenderCallback* IncomingVideoStream::ModuleCallback() {
+  CriticalSectionScoped cs(&stream_critsect_);
+  return this;
+}
+
+WebRtc_Word32 IncomingVideoStream::RenderFrame(const WebRtc_UWord32 stream_id,
+                                               VideoFrame& video_frame) {
+  CriticalSectionScoped csS(&stream_critsect_);
+  WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
+               "%s for stream %d, render time: %u", __FUNCTION__, stream_id_,
+               video_frame.RenderTimeMs());
+
+  if (!running_) {
+    WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
+                 "%s: Not running", __FUNCTION__);
+    return -1;
+  }
+
+  if (true == mirror_frames_enabled_) {
+    transformed_video_frame_.VerifyAndAllocate(video_frame.Length());
+    if (mirroring_.mirror_x_axis) {
+      MirrorI420UpDown(video_frame.Buffer(),
+                       transformed_video_frame_.Buffer(),
+                       video_frame.Width(), video_frame.Height());
+      transformed_video_frame_.SetLength(video_frame.Length());
+      transformed_video_frame_.SetWidth(video_frame.Width());
+      transformed_video_frame_.SetHeight(video_frame.Height());
+      video_frame.SwapFrame(transformed_video_frame_);
+    }
+    if (mirroring_.mirror_y_axis) {
+      MirrorI420LeftRight(video_frame.Buffer(),
+                          transformed_video_frame_.Buffer(),
+                          video_frame.Width(), video_frame.Height());
+      transformed_video_frame_.SetLength(video_frame.Length());
+      transformed_video_frame_.SetWidth(video_frame.Width());
+      transformed_video_frame_.SetHeight(video_frame.Height());
+      video_frame.SwapFrame(transformed_video_frame_);
+    }
+  }
+
+  // Rate statistics.
+  num_frames_since_last_calculation_++;
+  WebRtc_Word64 now_ms = TickTime::MillisecondTimestamp();
+  if (now_ms >= last_rate_calculation_time_ms_ + KFrameRatePeriodMs) {
+    incoming_rate_ =
+        static_cast<WebRtc_UWord32>(1000 * num_frames_since_last_calculation_ /
+                                    (now_ms - last_rate_calculation_time_ms_));
+    num_frames_since_last_calculation_ = 0;
+    last_rate_calculation_time_ms_ = now_ms;
+  }
+
+  // Insert frame.
+  CriticalSectionScoped csB(&buffer_critsect_);
+  if (render_buffers_.AddFrame(&video_frame) == 1)
+    deliver_buffer_event_.Set();
+
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::SetStartImage(
+    const VideoFrame& video_frame) {
+  CriticalSectionScoped csS(&thread_critsect_);
+  return start_image_.CopyFrame(video_frame);
+}
+
+WebRtc_Word32 IncomingVideoStream::SetTimeoutImage(
+    const VideoFrame& video_frame, const WebRtc_UWord32 timeout) {
+  CriticalSectionScoped csS(&thread_critsect_);
+  timeout_time_ = timeout;
+  return timeout_image_.CopyFrame(video_frame);
+}
+
+WebRtc_Word32 IncomingVideoStream::SetRenderCallback(
+    VideoRenderCallback* render_callback) {
+  CriticalSectionScoped cs(&stream_critsect_);
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
+               "%s(%x) for stream %d", __FUNCTION__, render_callback,
+               stream_id_);
+  render_callback_ = render_callback;
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::EnableMirroring(const bool enable,
+                                                   const bool mirror_x_axis,
+                                                   const bool mirror_y_axis) {
+  CriticalSectionScoped cs(&stream_critsect_);
+  mirror_frames_enabled_ = enable;
+  mirroring_.mirror_x_axis = mirror_x_axis;
+  mirroring_.mirror_y_axis = mirror_y_axis;
+
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::SetExternalCallback(
+    VideoRenderCallback* external_callback) {
+  CriticalSectionScoped cs(&stream_critsect_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
+               "%s(%x) for stream %d", __FUNCTION__, external_callback,
+               stream_id_);
+  external_callback_ = external_callback;
+  callbackVideoType_ = kVideoI420;
+  callbackWidth_ = 0;
+  callbackHeight_ = 0;
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Start() {
+  CriticalSectionScoped csS(&stream_critsect_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
+               "%s for stream %d", __FUNCTION__, stream_id_);
+  if (running_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
+                 "%s: Already running", __FUNCTION__);
+    return 0;
+  }
+
+  CriticalSectionScoped csT(&thread_critsect_);
+  assert(incoming_render_thread_ == NULL);
+
+  incoming_render_thread_ = ThreadWrapper::CreateThread(
+      IncomingVideoStreamThreadFun, this, kRealtimePriority,
+      "IncomingVideoStreamThread");
+  if (!incoming_render_thread_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
+                 "%s: No thread", __FUNCTION__);
+    return -1;
+  }
+
+  unsigned int t_id = 0;
+  if (incoming_render_thread_->Start(t_id)) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
+                 "%s: thread started: %u", __FUNCTION__, t_id);
+  } else {
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, module_id_,
+                 "%s: Could not start send thread", __FUNCTION__);
+    return -1;
+  }
+  deliver_buffer_event_.StartTimer(false, KEventStartupTimeMS);
+
+  running_ = true;
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Stop() {
+  CriticalSectionScoped cs_stream(&stream_critsect_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, module_id_,
+               "%s for stream %d", __FUNCTION__, stream_id_);
+
+  if (!running_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
+                 "%s: Not running", __FUNCTION__);
+    return 0;
+  }
+
+  thread_critsect_.Enter();
+  if (incoming_render_thread_) {
+    ThreadWrapper* thread = incoming_render_thread_;
+    incoming_render_thread_ = NULL;
+    thread->SetNotAlive();
+#ifndef WIN32_
+    deliver_buffer_event_.StopTimer();
+#endif
+    thread_critsect_.Leave();
+    if (thread->Stop()) {
+      delete thread;
+    } else {
+      assert(false);
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, module_id_,
+                   "%s: Not able to stop thread, leaking", __FUNCTION__);
+    }
+  } else {
+    thread_critsect_.Leave();
+  }
+  running_ = false;
+  return 0;
+}
+
+WebRtc_Word32 IncomingVideoStream::Reset() {
+  CriticalSectionScoped cs_stream(&stream_critsect_);
+  CriticalSectionScoped cs_buffer(&buffer_critsect_);
+  render_buffers_.ReleaseAllFrames();
+  return 0;
+}
+
+WebRtc_UWord32 IncomingVideoStream::StreamId() const {
+  CriticalSectionScoped cs_stream(&stream_critsect_);
+  return stream_id_;
+}
+
+WebRtc_UWord32 IncomingVideoStream::IncomingRate() const {
+  CriticalSectionScoped cs(&stream_critsect_);
+  return incoming_rate_;
+}
+
+bool IncomingVideoStream::IncomingVideoStreamThreadFun(void* obj) {
+  return static_cast<IncomingVideoStream*>(obj)->IncomingVideoStreamProcess();
+}
+
+bool IncomingVideoStream::IncomingVideoStreamProcess() {
+  if (kEventError != deliver_buffer_event_.Wait(KEventMaxWaitTimeMs)) {
+    if (incoming_render_thread_ == NULL) {
+      // Terminating
+      return false;
+    }
+
+    thread_critsect_.Enter();
+    VideoFrame* frame_to_render = NULL;
+
+    // Get a new frame to render and the time for the frame after this one.
+    buffer_critsect_.Enter();
+    frame_to_render = render_buffers_.FrameToRender();
+    WebRtc_UWord32 wait_time = render_buffers_.TimeToNextFrameRelease();
+    buffer_critsect_.Leave();
+
+    // Set timer for next frame to render.
+    if (wait_time > KEventMaxWaitTimeMs) {
+      wait_time = KEventMaxWaitTimeMs;
+    }
+    deliver_buffer_event_.StartTimer(false, wait_time);
+
+    if (!frame_to_render) {
+      if (render_callback_) {
+        if (last_rendered_frame_.RenderTimeMs() == 0 &&
+            start_image_.Size()) {
+          // We have not rendered anything and have a start image.
+          temp_frame_.CopyFrame(start_image_);
+          render_callback_->RenderFrame(stream_id_, temp_frame_);
+        } else if (timeout_image_.Size() &&
+                   last_rendered_frame_.RenderTimeMs() + timeout_time_ <
+                       TickTime::MillisecondTimestamp()) {
+          // Render a timeout image.
+          temp_frame_.CopyFrame(timeout_image_);
+          render_callback_->RenderFrame(stream_id_, temp_frame_);
+        }
+      }
+
+      // No frame.
+      thread_critsect_.Leave();
+      return true;
+    }
+
+    // Send frame for rendering.
+    if (external_callback_) {
+      WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
+                   "%s: executing external renderer callback to deliver frame",
+                   __FUNCTION__, frame_to_render->RenderTimeMs());
+      external_callback_->RenderFrame(stream_id_, *frame_to_render);
+    } else {
+      if (render_callback_) {
+        WEBRTC_TRACE(kTraceStream, kTraceVideoRenderer, module_id_,
+                     "%s: Render frame, time: ", __FUNCTION__,
+                     frame_to_render->RenderTimeMs());
+        render_callback_->RenderFrame(stream_id_, *frame_to_render);
+      }
+    }
+
+    // Release critsect before calling the module user.
+    thread_critsect_.Leave();
+
+    // We're done with this frame, delete it.
+    if (frame_to_render) {
+      CriticalSectionScoped cs(&buffer_critsect_);
+      last_rendered_frame_.SwapFrame(*frame_to_render);
+      render_buffers_.ReturnFrame(frame_to_render);
+    }
+  }
+  return true;
+}
+
+WebRtc_Word32 IncomingVideoStream::GetLastRenderedFrame(
+    VideoFrame& video_frame) const {
+  CriticalSectionScoped cs(&buffer_critsect_);
+  return video_frame.CopyFrame(last_rendered_frame_);
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_render/main/source/incoming_video_stream.h b/src/modules/video_render/main/source/incoming_video_stream.h
new file mode 100644
index 0000000..11bc115
--- /dev/null
+++ b/src/modules/video_render/main/source/incoming_video_stream.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
+
+#include "modules/video_render/main/interface/video_render.h"
+#include "system_wrappers/interface/map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class VideoRenderCallback;
+class VideoRenderFrames;
+
+struct VideoMirroring {
+  VideoMirroring() : mirror_x_axis(false), mirror_y_axis(false) {}
+  bool mirror_x_axis;
+  bool mirror_y_axis;
+};
+
+class IncomingVideoStream : public VideoRenderCallback {
+ public:
+  IncomingVideoStream(const WebRtc_Word32 module_id,
+                      const WebRtc_UWord32 stream_id);
+  ~IncomingVideoStream();
+
+  WebRtc_Word32 ChangeModuleId(const WebRtc_Word32 id);
+
+  // Get callback to deliver frames to the module.
+  VideoRenderCallback* ModuleCallback();
+  virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
+                                    VideoFrame& video_frame);
+
+  // Set callback to the platform dependent code.
+  WebRtc_Word32 SetRenderCallback(VideoRenderCallback* render_callback);
+
+  // Callback for file recording, snapshot, ...
+  WebRtc_Word32 SetExternalCallback(VideoRenderCallback* render_object);
+
+  // Start/Stop.
+  WebRtc_Word32 Start();
+  WebRtc_Word32 Stop();
+
+  // Clear all buffers.
+  WebRtc_Word32 Reset();
+
+  // Properties.
+  WebRtc_UWord32 StreamId() const;
+  WebRtc_UWord32 IncomingRate() const;
+
+  WebRtc_Word32 GetLastRenderedFrame(VideoFrame& video_frame) const;
+
+  WebRtc_Word32 SetStartImage(const VideoFrame& video_frame);
+
+  WebRtc_Word32 SetTimeoutImage(const VideoFrame& video_frame,
+                                const WebRtc_UWord32 timeout);
+
+  WebRtc_Word32 EnableMirroring(const bool enable,
+                                const bool mirror_xaxis,
+                                const bool mirror_yaxis);
+
+ protected:
+  static bool IncomingVideoStreamThreadFun(void* obj);
+  bool IncomingVideoStreamProcess();
+
+ private:
+  enum { KEventStartupTimeMS = 10 };
+  enum { KEventMaxWaitTimeMs = 100 };
+  enum { KFrameRatePeriodMs = 1000 };
+
+  WebRtc_Word32 module_id_;
+  WebRtc_UWord32 stream_id_;
+  // Critsects in allowed to enter order.
+  CriticalSectionWrapper& stream_critsect_;
+  CriticalSectionWrapper& thread_critsect_;
+  CriticalSectionWrapper& buffer_critsect_;
+  ThreadWrapper* incoming_render_thread_;
+  EventWrapper& deliver_buffer_event_;
+  bool running_;
+
+  VideoRenderCallback* external_callback_;
+  VideoRenderCallback* render_callback_;
+  VideoRenderFrames& render_buffers_;
+
+  RawVideoType callbackVideoType_;
+  WebRtc_UWord32 callbackWidth_;
+  WebRtc_UWord32 callbackHeight_;
+
+  WebRtc_UWord32 incoming_rate_;
+  WebRtc_Word64 last_rate_calculation_time_ms_;
+  WebRtc_UWord16 num_frames_since_last_calculation_;
+  VideoFrame last_rendered_frame_;
+  VideoFrame temp_frame_;
+  VideoFrame start_image_;
+  VideoFrame timeout_image_;
+  WebRtc_UWord32 timeout_time_;
+
+  bool mirror_frames_enabled_;
+  VideoMirroring mirroring_;
+  VideoFrame transformed_video_frame_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_INCOMING_VIDEO_STREAM_H_
diff --git a/src/modules/video_render/main/source/linux/video_render_linux_impl.cc b/src/modules/video_render/main/source/linux/video_render_linux_impl.cc
new file mode 100644
index 0000000..a9e0fe5
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_render_linux_impl.cc
@@ -0,0 +1,270 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_linux_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "video_x11_render.h"
+
+#include <X11/Xlib.h>
+
+namespace webrtc {
+
+VideoRenderLinuxImpl::VideoRenderLinuxImpl(
+                                           const WebRtc_Word32 id,
+                                           const VideoRenderType videoRenderType,
+                                           void* window, const bool fullscreen) :
+            _id(id),
+            _renderLinuxCritsect(
+                                 *CriticalSectionWrapper::CreateCriticalSection()),
+            _ptrWindow(window), _ptrX11Render(NULL)
+{
+}
+
+VideoRenderLinuxImpl::~VideoRenderLinuxImpl()
+{
+    if (_ptrX11Render)
+        delete _ptrX11Render;
+
+    delete &_renderLinuxCritsect;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+    _ptrX11Render = new VideoX11Render((Window) _ptrWindow);
+    if (!_ptrX11Render)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s",
+                     "Failed to create instance of VideoX11Render object");
+        return -1;
+    }
+    int retVal = _ptrX11Render->Init();
+    if (retVal == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+    _ptrWindow = window;
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->ChangeWindow((Window) window);
+    }
+
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderLinuxImpl::AddIncomingRenderStream(
+                                                                       const WebRtc_UWord32 streamId,
+                                                                       const WebRtc_UWord32 zOrder,
+                                                                       const float left,
+                                                                       const float top,
+                                                                       const float right,
+                                                                       const float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    VideoRenderCallback* renderCallback = NULL;
+    if (_ptrX11Render)
+    {
+        VideoX11Channel* renderChannel =
+                _ptrX11Render->CreateX11RenderChannel(streamId, zOrder, left,
+                                                      top, right, bottom);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                         "Render channel creation failed for stream id: %d",
+                         streamId);
+            return NULL;
+        }
+        renderCallback = (VideoRenderCallback *) renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "_ptrX11Render is NULL");
+        return NULL;
+    }
+    return renderCallback;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::DeleteIncomingRenderStream(
+                                                               const WebRtc_UWord32 streamId)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->DeleteX11RenderChannel(streamId);
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetIncomingRenderStreamProperties(
+                                                                      const WebRtc_UWord32 streamId,
+                                                                      WebRtc_UWord32& zOrder,
+                                                                      float& left,
+                                                                      float& top,
+                                                                      float& right,
+                                                                      float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_renderLinuxCritsect);
+
+    if (_ptrX11Render)
+    {
+        return _ptrX11Render->GetIncomingStreamProperties(streamId, zOrder,
+                                                          left, top, right,
+                                                          bottom);
+    }
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::StartRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::StopRender()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s",
+                 __FUNCTION__);
+    return 0;
+}
+
+VideoRenderType VideoRenderLinuxImpl::RenderType()
+{
+    return kRenderX11;
+}
+
+RawVideoType VideoRenderLinuxImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderLinuxImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetGraphicsMemory(
+                                                      WebRtc_UWord64& /*totalGraphicsMemory*/,
+                                                      WebRtc_UWord64& /*availableGraphicsMemory*/) const
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::GetScreenResolution(
+                                                        WebRtc_UWord32& /*screenWidth*/,
+                                                        WebRtc_UWord32& /*screenHeight*/) const
+{
+    return -1;
+}
+
+WebRtc_UWord32 VideoRenderLinuxImpl::RenderFrameRate(const WebRtc_UWord32 /*streamId*/)
+{
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetStreamCropping(
+                                                      const WebRtc_UWord32 /*streamId*/,
+                                                      const float /*left*/,
+                                                      const float /*top*/,
+                                                      const float /*right*/,
+                                                      const float /*bottom*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetTransparentBackground(const bool /*enable*/)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::ConfigureRenderer(
+                                                      const WebRtc_UWord32 streamId,
+                                                      const unsigned int zOrder,
+                                                      const float left,
+                                                      const float top,
+                                                      const float right,
+                                                      const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetText(
+                                            const WebRtc_UWord8 textId,
+                                            const WebRtc_UWord8* text,
+                                            const WebRtc_Word32 textLength,
+                                            const WebRtc_UWord32 textColorRef,
+                                            const WebRtc_UWord32 backgroundColorRef,
+                                            const float left, const float top,
+                                            const float rigth,
+                                            const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderLinuxImpl::SetBitmap(const void* bitMap,
+                                              const WebRtc_UWord8 pictureId,
+                                              const void* colorKey,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                 "%s - not supported on Linux", __FUNCTION__);
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/linux/video_render_linux_impl.h b/src/modules/video_render/main/source/linux/video_render_linux_impl.h
new file mode 100644
index 0000000..063ce2a
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_render_linux_impl.h
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Render;
+
+// Class definitions
+class VideoRenderLinuxImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderLinuxImpl(const WebRtc_Word32 id,
+                         const VideoRenderType videoRenderType,
+                         void* window, const bool fullscreen);
+
+    virtual ~VideoRenderLinuxImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float rigth, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderLinuxCritsect;
+
+    void* _ptrWindow;
+
+    // X11 Render
+    VideoX11Render* _ptrX11Render;
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_RENDER_LINUX_IMPL_H_
diff --git a/src/modules/video_render/main/source/linux/video_x11_channel.cc b/src/modules/video_render/main/source/linux/video_x11_channel.cc
new file mode 100644
index 0000000..4d574e3
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_x11_channel.cc
@@ -0,0 +1,332 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_x11_channel.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+
+#define DISP_MAX 128
+
+static Display *dispArray[DISP_MAX];
+static int dispCount = 0;
+
+
+VideoX11Channel::VideoX11Channel(WebRtc_Word32 id) :
+    _crit(*CriticalSectionWrapper::CreateCriticalSection()), _display(NULL),
+          _shminfo(), _image(NULL), _window(0L), _gc(NULL),
+          _width(DEFAULT_RENDER_FRAME_WIDTH),
+          _height(DEFAULT_RENDER_FRAME_HEIGHT), _outWidth(0), _outHeight(0),
+          _xPos(0), _yPos(0), _prepared(false), _dispCount(0), _buffer(NULL),
+          _top(0.0), _left(0.0), _right(0.0), _bottom(0.0),
+          _Id(id)
+{
+}
+
+VideoX11Channel::~VideoX11Channel()
+{
+    if (_prepared)
+    {
+        _crit.Enter();
+        ReleaseWindow();
+        _crit.Leave();
+    }
+    delete &_crit;
+}
+
+WebRtc_Word32 VideoX11Channel::RenderFrame(const WebRtc_UWord32 streamId,
+                                               VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(&_crit);
+    if (_width != (WebRtc_Word32) videoFrame.Width() || _height
+            != (WebRtc_Word32) videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+WebRtc_Word32 VideoX11Channel::FrameSizeChange(WebRtc_Word32 width,
+                                                   WebRtc_Word32 height,
+                                                   WebRtc_Word32 /*numberOfStreams */)
+{
+    CriticalSectionScoped cs(&_crit);
+    if (_prepared)
+    {
+        RemoveRenderer();
+    }
+    if (CreateLocalRenderer(width, height) == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::DeliverFrame(unsigned char* buffer,
+                                                WebRtc_Word32 bufferSize,
+                                                unsigned WebRtc_Word32 /*timeStamp90kHz*/)
+{
+    CriticalSectionScoped cs(&_crit);
+    if (!_prepared)
+    {
+        return 0;
+    }
+
+    if (!dispArray[_dispCount])
+    {
+        return -1;
+    }
+
+    unsigned char *pBuf = buffer;
+    // convert to RGB32, setting stride = width.
+    ConvertFromI420(pBuf, _width, kARGB, 0, _width, _height, _buffer);
+
+    // put image in window
+    XShmPutImage(_display, _window, _gc, _image, 0, 0, _xPos, _yPos, _width,
+                 _height, True);
+
+    // very important for the image to update properly!
+    XSync(_display, False);
+    return 0;
+
+}
+
+WebRtc_Word32 VideoX11Channel::GetFrameSize(WebRtc_Word32& width,
+                                                WebRtc_Word32& height)
+{
+    width = _width;
+    height = _height;
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::Init(Window window, float left, float top,
+                                        float right, float bottom)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    _window = window;
+    _left = left;
+    _right = right;
+    _top = top;
+    _bottom = bottom;
+
+    _display = XOpenDisplay(NULL); // Use default display
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (dispCount < DISP_MAX)
+    {
+        dispArray[dispCount] = _display;
+        _dispCount = dispCount;
+        dispCount++;
+    }
+    else
+    {
+        return -1;
+    }
+
+    if ((1 < left || left < 0) || (1 < top || top < 0) || (1 < right || right
+            < 0) || (1 < bottom || bottom < 0))
+    {
+        return -1;
+    }
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == 0)
+    {
+        return -1;
+    }
+
+    _xPos = (WebRtc_Word32) (winWidth * left);
+    _yPos = (WebRtc_Word32) (winHeight * top);
+    _outWidth = (WebRtc_Word32) (winWidth * (right - left));
+    _outHeight = (WebRtc_Word32) (winHeight * (bottom - top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    _gc = XCreateGC(_display, _window, 0, 0);
+    if (!_gc) {
+      // Failed to create the graphics context.
+      assert(false);
+      return -1;
+    }
+
+    if (CreateLocalRenderer(winWidth, winHeight) == -1)
+    {
+        return -1;
+    }
+    return 0;
+
+}
+
+WebRtc_Word32 VideoX11Channel::ChangeWindow(Window window)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    // Stop the rendering, if we are rendering...
+    RemoveRenderer();
+    _window = window;
+
+    // calculate position and size of rendered video
+    int x, y;
+    unsigned int winWidth, winHeight, borderwidth, depth;
+    Window rootret;
+    if (XGetGeometry(_display, _window, &rootret, &x, &y, &winWidth,
+                     &winHeight, &borderwidth, &depth) == -1)
+    {
+        return -1;
+    }
+    _xPos = (int) (winWidth * _left);
+    _yPos = (int) (winHeight * _top);
+    _outWidth = (int) (winWidth * (_right - _left));
+    _outHeight = (int) (winHeight * (_bottom - _top));
+    if (_outWidth % 2)
+        _outWidth++; // the renderer want's sizes that are multiples of two
+    if (_outHeight % 2)
+        _outHeight++;
+
+    // Prepare rendering using the
+    if (CreateLocalRenderer(_width, _height) == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::ReleaseWindow()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    RemoveRenderer();
+    if (_gc) {
+      XFreeGC(_display, _gc);
+      _gc = NULL;
+    }
+    if (_display)
+    {
+        XCloseDisplay(_display);
+        _display = NULL;
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::CreateLocalRenderer(WebRtc_Word32 width,
+                                                       WebRtc_Word32 height)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+    CriticalSectionScoped cs(&_crit);
+
+    if (!_window || !_display)
+    {
+        return -1;
+    }
+
+    if (_prepared)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _Id,
+                     "Renderer already prepared, exits.");
+        return -1;
+    }
+
+    _width = width;
+    _height = height;
+
+    // create shared memory image
+    _image = XShmCreateImage(_display, CopyFromParent, 24, ZPixmap, NULL,
+                             &_shminfo, _width, _height); // this parameter needs to be the same for some reason.
+    _shminfo.shmid = shmget(IPC_PRIVATE, (_image->bytes_per_line
+            * _image->height), IPC_CREAT | 0777);
+    _shminfo.shmaddr = _image->data = (char*) shmat(_shminfo.shmid, 0, 0);
+    if (_image->data == reinterpret_cast<char*>(-1))
+    {
+        return -1;
+    }
+    _buffer = (unsigned char*) _image->data;
+    _shminfo.readOnly = False;
+
+    // attach image to display
+    if (!XShmAttach(_display, &_shminfo))
+    {
+        //printf("XShmAttach failed !\n");
+        return -1;
+    }
+    XSync(_display, False);
+
+    _prepared = true;
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::RemoveRenderer()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    if (!_prepared)
+    {
+        return 0;
+    }
+    _prepared = false;
+
+    // Free the memory.
+    XShmDetach(_display, &_shminfo);
+    XDestroyImage( _image );
+    _image = NULL;
+    shmdt(_shminfo.shmaddr);
+    _shminfo.shmaddr = NULL;
+    _buffer = NULL;
+    shmctl(_shminfo.shmid, IPC_RMID, 0);
+    _shminfo.shmid = 0;
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Channel::GetStreamProperties(WebRtc_UWord32& zOrder,
+                                                       float& left, float& top,
+                                                       float& right,
+                                                       float& bottom) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _Id, "%s",
+                 __FUNCTION__);
+
+    zOrder = 0; // no z-order support yet
+    left = _left;
+    top = _top;
+    right = _right;
+    bottom = _bottom;
+
+    return 0;
+}
+
+
+} //namespace webrtc
+
+
diff --git a/src/modules/video_render/main/source/linux/video_x11_channel.h b/src/modules/video_render/main/source/linux/video_x11_channel.h
new file mode 100644
index 0000000..d713422
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_x11_channel.h
@@ -0,0 +1,97 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
+
+#include "video_render_defines.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include <sys/shm.h>
+
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <X11/extensions/XShm.h>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define DEFAULT_RENDER_FRAME_WIDTH 352
+#define DEFAULT_RENDER_FRAME_HEIGHT 288
+
+
+class VideoX11Channel: public VideoRenderCallback
+{
+public:
+    VideoX11Channel(WebRtc_Word32 id);
+
+    virtual ~VideoX11Channel();
+
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    WebRtc_Word32 FrameSizeChange(WebRtc_Word32 width, WebRtc_Word32 height,
+                                  WebRtc_Word32 numberOfStreams);
+    WebRtc_Word32 DeliverFrame(unsigned char* buffer, WebRtc_Word32 bufferSize,
+                               unsigned WebRtc_Word32 /*timeStamp90kHz*/);
+    WebRtc_Word32 GetFrameSize(WebRtc_Word32& width, WebRtc_Word32& height);
+    WebRtc_Word32 Init(Window window, float left, float top, float right,
+                       float bottom);
+    WebRtc_Word32 ChangeWindow(Window window);
+    WebRtc_Word32
+            GetStreamProperties(WebRtc_UWord32& zOrder, float& left,
+                                float& top, float& right, float& bottom) const;
+    WebRtc_Word32 ReleaseWindow();
+
+    bool IsPrepared()
+    {
+        return _prepared;
+    }
+
+private:
+
+    WebRtc_Word32
+            CreateLocalRenderer(WebRtc_Word32 width, WebRtc_Word32 height);
+    WebRtc_Word32 RemoveRenderer();
+
+    //FIXME a better place for this method? the GetWidthHeight no longer
+    // supported by common_video.
+    int GetWidthHeight(VideoType type, int bufferSize, int& width,
+                       int& height);
+
+    CriticalSectionWrapper& _crit;
+
+    Display* _display;
+    XShmSegmentInfo _shminfo;
+    XImage* _image;
+    Window _window;
+    GC _gc;
+    WebRtc_Word32 _width; // incoming frame width
+    WebRtc_Word32 _height; // incoming frame height
+    WebRtc_Word32 _outWidth; // render frame width
+    WebRtc_Word32 _outHeight; // render frame height
+    WebRtc_Word32 _xPos; // position within window
+    WebRtc_Word32 _yPos;
+    bool _prepared; // true if ready to use
+    WebRtc_Word32 _dispCount;
+
+    unsigned char* _buffer;
+    float _top;
+    float _left;
+    float _right;
+    float _bottom;
+
+    WebRtc_Word32 _Id;
+
+};
+
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_CHANNEL_H_
diff --git a/src/modules/video_render/main/source/linux/video_x11_render.cc b/src/modules/video_render/main/source/linux/video_x11_render.cc
new file mode 100644
index 0000000..9e29fe3
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_x11_render.cc
@@ -0,0 +1,154 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_x11_render.h"
+#include "video_x11_channel.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VideoX11Render::VideoX11Render(Window window) :
+    _window(window),
+            _critSect(*CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+VideoX11Render::~VideoX11Render()
+{
+    delete &_critSect;
+}
+
+WebRtc_Word32 VideoX11Render::Init()
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    _streamIdToX11ChannelMap.clear();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoX11Render::ChangeWindow(Window window)
+{
+    CriticalSectionScoped cs(&_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.begin();
+
+    while (iter != _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ChangeWindow(window);
+        }
+        iter++;
+    }
+
+    _window = window;
+
+    return 0;
+}
+
+VideoX11Channel* VideoX11Render::CreateX11RenderChannel(
+                                                                WebRtc_Word32 streamId,
+                                                                WebRtc_Word32 zOrder,
+                                                                const float left,
+                                                                const float top,
+                                                                const float right,
+                                                                const float bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+    VideoX11Channel* renderChannel = NULL;
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+
+    if (iter == _streamIdToX11ChannelMap.end())
+    {
+        renderChannel = new VideoX11Channel(streamId);
+        if (!renderChannel)
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "Failed to create VideoX11Channel for streamId : %d",
+                         streamId);
+            return NULL;
+        }
+        renderChannel->Init(_window, left, top, right, bottom);
+        _streamIdToX11ChannelMap[streamId] = renderChannel;
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                     "Render Channel already exists for streamId: %d", streamId);
+        renderChannel = iter->second;
+    }
+
+    return renderChannel;
+}
+
+WebRtc_Word32 VideoX11Render::DeleteX11RenderChannel(WebRtc_Word32 streamId)
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->ReleaseWindow();
+            delete renderChannel;
+            renderChannel = NULL;
+        }
+        _streamIdToX11ChannelMap.erase(iter);
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+WebRtc_Word32 VideoX11Render::GetIncomingStreamProperties(
+                                                              WebRtc_Word32 streamId,
+                                                              WebRtc_UWord32& zOrder,
+                                                              float& left,
+                                                              float& top,
+                                                              float& right,
+                                                              float& bottom)
+{
+    CriticalSectionScoped cs(&_critSect);
+
+    std::map<int, VideoX11Channel*>::iterator iter =
+            _streamIdToX11ChannelMap.find(streamId);
+    if (iter != _streamIdToX11ChannelMap.end())
+    {
+        VideoX11Channel *renderChannel = iter->second;
+        if (renderChannel)
+        {
+            renderChannel->GetStreamProperties(zOrder, left, top, right, bottom);
+        }
+    }
+
+    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                 "No VideoX11Channel object exists for stream id: %d",
+                 streamId);
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/linux/video_x11_render.h b/src/modules/video_render/main/source/linux/video_x11_render.h
new file mode 100644
index 0000000..9b140ef
--- /dev/null
+++ b/src/modules/video_render/main/source/linux/video_x11_render.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
+
+#include "video_render_defines.h"
+
+#include <X11/Xlib.h>
+#include <map>
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class VideoX11Channel;
+
+class VideoX11Render
+{
+
+public:
+    VideoX11Render(Window window);
+    ~VideoX11Render();
+
+    WebRtc_Word32 Init();
+    WebRtc_Word32 ChangeWindow(Window window);
+
+    VideoX11Channel* CreateX11RenderChannel(WebRtc_Word32 streamId,
+                                                WebRtc_Word32 zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom);
+
+    WebRtc_Word32 DeleteX11RenderChannel(WebRtc_Word32 streamId);
+
+    WebRtc_Word32 GetIncomingStreamProperties(WebRtc_Word32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom);
+
+private:
+    Window _window;
+    CriticalSectionWrapper& _critSect;
+    std::map<int, VideoX11Channel*> _streamIdToX11ChannelMap;
+
+};
+
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_LINUX_VIDEO_X11_RENDER_H_
diff --git a/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h b/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h
new file mode 100644
index 0000000..c8e98bb
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/cocoa_full_screen_window.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_full_screen_window.h
+//
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
+
+#import <Cocoa/Cocoa.h>
+//#define GRAB_ALL_SCREENS 1
+
+@interface CocoaFullScreenWindow : NSObject {
+	NSWindow*			_window;
+}
+
+-(id)init;
+-(void)grabFullScreen;
+-(void)releaseFullScreen;
+-(NSWindow*)window;
+
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_FULL_SCREEN_WINDOW_H_
diff --git a/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm b/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm
new file mode 100644
index 0000000..e86bab1
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/cocoa_full_screen_window.mm
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "cocoa_full_screen_window.h"
+#include "trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaFullScreenWindow
+
+-(id)init{	
+	
+	self = [super init];
+	if(!self){
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d COULD NOT CREATE INSTANCE", __FUNCTION__, __LINE__); 
+		return nil;
+	}
+	
+	
+	WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, 0, "%s:%d Created instance", __FUNCTION__, __LINE__); 
+	return self;
+}
+
+-(void)grabFullScreen{
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGCaptureAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayCapture(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not capture main level", __FUNCTION__, __LINE__); 
+	}
+	
+	// get the shielding window level
+	int windowLevel = CGShieldingWindowLevel();
+	
+	// get the screen rect of main display
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+	
+	_window = [[NSWindow alloc]initWithContentRect:screenRect 
+										   styleMask:NSBorderlessWindowMask
+											 backing:NSBackingStoreBuffered
+											   defer:NO
+											  screen:[NSScreen mainScreen]];
+	
+	[_window setLevel:windowLevel];
+	[_window setBackgroundColor:[NSColor blackColor]];
+	[_window makeKeyAndOrderFront:nil];
+
+}
+ 
+-(void)releaseFullScreen
+{
+	[_window orderOut:self];
+	
+#ifdef GRAB_ALL_SCREENS
+	if(CGReleaseAllDisplays() != kCGErrorSuccess)
+#else
+	if(CGDisplayRelease(kCGDirectMainDisplay) != kCGErrorSuccess)
+#endif
+	{
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not release the displays", __FUNCTION__, __LINE__); 
+	}		
+}
+
+- (NSWindow*)window
+{
+  return _window;
+}
+
+- (void) dealloc
+{
+	[self releaseFullScreen];
+	[super dealloc];
+}	
+
+
+	
+@end
diff --git a/src/modules/video_render/main/source/mac/cocoa_render_view.h b/src/modules/video_render/main/source/mac/cocoa_render_view.h
new file mode 100644
index 0000000..15a8108
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/cocoa_render_view.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  cocoa_render_view.h
+//
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/gl.h>
+#import <OpenGL/glu.h>
+#import <OpenGL/OpenGL.h>
+
+@interface CocoaRenderView : NSOpenGLView {
+  NSOpenGLContext* _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt;
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt;
+-(NSOpenGLContext*)nsOpenGLContext;
+@end
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_COCOA_RENDER_VIEW_H_
diff --git a/src/modules/video_render/main/source/mac/cocoa_render_view.mm b/src/modules/video_render/main/source/mac/cocoa_render_view.mm
new file mode 100644
index 0000000..567d171
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/cocoa_render_view.mm
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+#import "cocoa_render_view.h"
+#include "trace.h"
+
+using namespace webrtc;
+
+@implementation CocoaRenderView
+
+-(void)initCocoaRenderView:(NSOpenGLPixelFormat*)fmt{
+	
+	self = [super initWithFrame:[self frame] pixelFormat:[fmt autorelease]];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+-(NSOpenGLContext*)nsOpenGLContext {
+    return _nsOpenGLContext;
+}
+
+-(void)initCocoaRenderViewFullScreen:(NSOpenGLPixelFormat*)fmt{
+	
+	NSRect screenRect = [[NSScreen mainScreen]frame];
+//	[_windowRef setFrame:screenRect];
+//	[_windowRef setBounds:screenRect];
+	self = [super initWithFrame:screenRect	pixelFormat:[fmt autorelease]];
+	if (self == nil){
+		
+		WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, 0, "%s:%d Could not create instance", __FUNCTION__, __LINE__); 
+	}
+	
+	_nsOpenGLContext = [self openGLContext];
+
+}
+
+@end
+
+
diff --git a/src/modules/video_render/main/source/mac/video_render_agl.cc b/src/modules/video_render/main/source/mac/video_render_agl.cc
new file mode 100644
index 0000000..b431cce
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_agl.cc
@@ -0,0 +1,2007 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#include "video_render_agl.h"
+
+//  includes
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+
+/*
+ *
+ *    VideoChannelAGL
+ *
+ */
+
+#pragma mark VideoChannelAGL constructor
+
+VideoChannelAGL::VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner) :
+    _aglContext( aglContext),
+    _id( iId),
+    _owner( owner),
+    _width( 0),
+    _height( 0),
+    _stretchedWidth( 0),
+    _stretchedHeight( 0),
+    _startWidth( 0.0f),
+    _startHeight( 0.0f),
+    _stopWidth( 0.0f),
+    _stopHeight( 0.0f),
+    _xOldWidth( 0),
+    _yOldHeight( 0),
+    _oldStretchedHeight(0),
+    _oldStretchedWidth( 0),
+    _buffer( 0),
+    _bufferSize( 0),
+    _incommingBufferSize(0),
+    _bufferIsUpdated( false),
+    _sizeInitialized( false),
+    _numberOfStreams( 0),
+    _bVideoSizeStartedChanging(false),
+    _pixelFormat( GL_RGBA),
+    _pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+    _texture( 0)
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Constructor", __FUNCTION__, __LINE__);
+}
+
+VideoChannelAGL::~VideoChannelAGL()
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Destructor", __FUNCTION__, __LINE__);
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    aglSetCurrentContext(_aglContext);
+
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+WebRtc_Word32 VideoChannelAGL::RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame)
+{
+    _owner->LockAGLCntx();
+    if(_width != videoFrame.Width() ||
+            _height != videoFrame.Height())
+    {
+        if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        { //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d FrameSizeChange returned an error", __FUNCTION__, __LINE__);
+            _owner->UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    _owner->UnlockAGLCntx();
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
+}
+
+int VideoChannelAGL::UpdateSize(int /*width*/, int /*height*/)
+{
+    _owner->LockAGLCntx();
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We'll get a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Delete a possible old texture
+    if (_texture != 0)
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+    }
+
+    // Do the setup for both textures
+    // Note: we setup two textures even if we're not running full screen
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+    //glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    // Maximum width/height for a texture
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        // Image too big for memory
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_BGRA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+// Called from video engine when a new frame should be rendered.
+int VideoChannelAGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
+{
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    if (bufferSize != _incommingBufferSize)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Setting stride = width.
+    int rgbret = ConvertFromYV12(buffer, _width, kBGRA, 0, _width, _height,
+                                 _buffer);
+    if (rgbret < 0)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    aglSetCurrentContext(_aglContext);
+
+    // Put the new frame into the graphic card texture.
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
+    GLenum glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Copy buffer to texture
+    glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+            0, // Level, not use
+            0, // start point x, (low left of pic)
+            0, // start point y,
+            _width, // width
+            _height, // height
+            _pixelFormat, // pictue format for _buffer
+            _pixelDataType, // data type of _buffer
+            (const GLvoid*) _buffer); // the pixel data
+
+    if (glGetError() != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+    _owner->UnlockAGLCntx();
+
+    return 0;
+}
+
+int VideoChannelAGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    aglSetCurrentContext(_aglContext);
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    if(_stretchedWidth != _oldStretchedWidth || _stretchedHeight != _oldStretchedHeight)
+    {
+        glViewport(0, 0, _stretchedWidth, _stretchedHeight);
+    }
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    // Now really put the texture into the framebuffer
+    glLoadIdentity();
+
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelAGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+    isUpdated = _bufferIsUpdated;
+    _owner->UnlockAGLCntx();
+
+    return 0;
+}
+
+int VideoChannelAGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+
+    return retVal;
+}
+
+int VideoChannelAGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+#pragma mark VideoRenderAGL WindowRef constructor
+
+VideoRenderAGL::VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( 0),
+_windowRef( windowRef),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+_threadID( )
+
+{
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s");
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if(!IsValidWindowPtr(_windowRef))
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef:0x%x", __FUNCTION__, __LINE__, _windowRef);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d WindowRef 0x%x is valid", __FUNCTION__, __LINE__, _windowRef);
+    }
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+}
+
+// this is a static function. It has been registered (in class constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark WindowRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleWindowResized (EventHandlerCallRef /*nextHandler*/,
+        EventRef theEvent,
+        void* userData)
+{
+    WindowRef windowRef = NULL;
+
+    int eventType = GetEventKind(theEvent);
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeWindowRef,
+            NULL,
+            sizeof (WindowRef),
+            NULL,
+            &windowRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+
+    bool updateUI = true;
+    if(kEventWindowBoundsChanged == eventType)
+    {
+    }
+    else if(kEventWindowBoundsChanging == eventType)
+    {
+    }
+    else if(kEventWindowZoomed == eventType)
+    {
+    }
+    else if(kEventWindowExpanding == eventType)
+    {
+    }
+    else if(kEventWindowExpanded == eventType)
+    {
+    }
+    else if(kEventWindowClickResizeRgn == eventType)
+    {
+    }
+    else if(kEventWindowClickDragRgn == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(windowRef);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return noErr;
+}
+
+#pragma mark VideoRenderAGL HIViewRef constructor
+
+VideoRenderAGL::VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId) :
+_hiviewRef( windowRef),
+_windowRef( 0),
+_fullScreen( fullscreen),
+_id( iId),
+_renderCritSec(*CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_isHIViewRef( false),
+_aglContext( 0),
+_windowWidth( 0),
+_windowHeight( 0),
+_lastWindowWidth( -1),
+_lastWindowHeight( -1),
+_lastHiViewWidth( -1),
+_lastHiViewHeight( -1),
+_currentParentWindowHeight( 0),
+_currentParentWindowWidth( 0),
+_currentParentWindowBounds( ),
+_windowHasResized( false),
+_lastParentWindowBounds( ),
+_currentHIViewBounds( ),
+_lastHIViewBounds( ),
+_windowRect( ),
+_aglChannels( ),
+_zOrderToChannel( ),
+_hiviewEventHandlerRef( NULL),
+_windowEventHandlerRef( NULL),
+_currentViewBounds( ),
+_lastViewBounds( ),
+_renderingIsPaused( false),
+_threadID( )
+{
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Constructor", __FUNCTION__, __LINE__);
+    //    _renderCritSec = CriticalSectionWrapper::CreateCriticalSection();
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    GetWindowRect(_windowRect);
+
+    _lastViewBounds.origin.x = 0;
+    _lastViewBounds.origin.y = 0;
+    _lastViewBounds.size.width = 0;
+    _lastViewBounds.size.height = 0;
+
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    // This gets the parent window of the HIViewRef that's passed in and installs a WindowRef event handler on it
+    // The event handler looks for window resize events and adjusts the offset of the controls.
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef's parent window", __FUNCTION__, __LINE__);
+
+
+    static const EventTypeSpec windowEventTypes[] =
+    {
+        kEventClassWindow, kEventWindowBoundsChanged,
+        kEventClassWindow, kEventWindowBoundsChanging,
+        kEventClassWindow, kEventWindowZoomed,
+        kEventClassWindow, kEventWindowExpanded,
+        kEventClassWindow, kEventWindowClickResizeRgn,
+        kEventClassWindow, kEventWindowClickDragRgn
+    };
+
+    WindowRef parentWindow = HIViewGetWindow(windowRef);
+
+    InstallWindowEventHandler (parentWindow,
+            NewEventHandlerUPP (sHandleWindowResized),
+            GetEventTypeCount(windowEventTypes),
+            windowEventTypes,
+            (void *) this, // this is an arbitrary parameter that will be passed on to your event handler when it is called later
+            &_windowEventHandlerRef);
+
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER	
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Installing Eventhandler for hiviewRef", __FUNCTION__, __LINE__);
+
+    static const EventTypeSpec hiviewEventTypes[] =
+    {
+        kEventClassControl, kEventControlBoundsChanged,
+        kEventClassControl, kEventControlDraw
+        //			kEventControlDragLeave
+        //			kEventControlDragReceive
+        //			kEventControlGetFocusPart
+        //			kEventControlApplyBackground
+        //			kEventControlDraw
+        //			kEventControlHit
+
+    };
+
+    HIViewInstallEventHandler(_hiviewRef,
+            NewEventHandlerUPP(sHandleHiViewResized),
+            GetEventTypeCount(hiviewEventTypes),
+            hiviewEventTypes,
+            (void *) this,
+            &_hiviewEventHandlerRef);
+
+#endif
+}
+
+// this is a static function. It has been registered (in constructor) to be called on various window redrawing or resizing.
+// Since it is a static method, I have passed in "this" as the userData (one and only allowed) parameter, then calling member methods on it.
+#pragma mark HIViewRef Event Handler
+pascal OSStatus VideoRenderAGL::sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData)
+{
+    //static int      callbackCounter = 1;
+    HIViewRef hiviewRef = NULL;
+
+    // see https://dcs.sourcerepo.com/dcs/tox_view/trunk/tox/libraries/i686-win32/include/quicktime/CarbonEvents.h for a list of codes
+    int eventType = GetEventKind(theEvent);
+    OSStatus status = noErr;
+    status = GetEventParameter (theEvent,
+            kEventParamDirectObject,
+            typeControlRef,
+            NULL,
+            sizeof (ControlRef),
+            NULL,
+            &hiviewRef);
+
+    VideoRenderAGL* obj = (VideoRenderAGL*)(userData);
+    WindowRef parentWindow = HIViewGetWindow(hiviewRef);
+    bool updateUI = true;
+
+    if(kEventControlBoundsChanged == eventType)
+    {
+    }
+    else if(kEventControlDraw == eventType)
+    {
+    }
+    else
+    {
+        updateUI = false;
+    }
+
+    if(true == updateUI)
+    {
+        obj->ParentWindowResized(parentWindow);
+        obj->UpdateClipping();
+        obj->RenderOffScreenBuffers();
+    }
+
+    return status;
+}
+
+VideoRenderAGL::~VideoRenderAGL()
+{
+
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Destructor", __FUNCTION__, __LINE__);
+
+
+#ifdef USE_EVENT_HANDLERS
+    // remove event handlers
+    OSStatus status;
+    if(_isHIViewRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+    }
+    else
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+    }
+    if(noErr != status)
+    {
+        if(_isHIViewRef)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+        else
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+
+#endif
+
+    OSStatus status;
+#ifdef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if(_windowEventHandlerRef)
+    {
+        status = RemoveEventHandler(_windowEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d failed to remove window event handler %d", __FUNCTION__, __LINE__, (int)_windowEventHandlerRef);
+        }
+    }
+#endif
+
+#ifdef NEW_HIVIEW_EVENT_HANDLER	
+    if(_hiviewEventHandlerRef)
+    {
+        status = RemoveEventHandler(_hiviewEventHandlerRef);
+        if(status != noErr)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Failed to remove hiview event handler: %d", __FUNCTION__, __LINE__, (int)_hiviewEventHandlerRef);
+        }
+    }
+#endif
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_aglContext != 0)
+    {
+        aglSetCurrentContext(_aglContext);
+        aglDestroyContext(_aglContext);
+        _aglContext = 0;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+    while (it!= _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+    //delete _renderCritSec;
+
+
+}
+
+int VideoRenderAGL::GetOpenGLVersion(int& aglMajor, int& aglMinor)
+{
+    aglGetVersion((GLint *) &aglMajor, (GLint *) &aglMinor);
+    return 0;
+}
+
+int VideoRenderAGL::Init()
+{
+    LockAGLCntx();
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        //WEBRTC_TRACE(kTraceError, "%s:%d Thread not created", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    unsigned int threadId;
+    _screenUpdateThread->Start(threadId);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    // Create mixing textures
+    if (CreateMixingContext() == -1)
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create a mixing context", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Creating AGL channel: %d", __FUNCTION__, __LINE__, channel);
+
+    if (HasChannel(channel))
+    {
+        //WEBRTC_TRACE(kTraceError, "%s:%d Channel already exists", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();k
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+        // There are already one channel using this zOrder
+        // TODO: Allow multiple channels with same zOrder
+    }
+
+    VideoChannelAGL* newAGLChannel = new VideoChannelAGL(_aglContext, _id, this);
+
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+        //WEBRTC_LOG(kTraceError, "Could not create AGL channel");
+        //WEBRTC_TRACE(kTraceError, "%s:%d Could not create AGL channel", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return NULL;
+    }
+k
+    _aglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    UnlockAGLCntx();
+    return newAGLChannel;
+}
+
+int VideoRenderAGL::DeleteAllAGLChannels()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    //WEBRTC_TRACE(kTraceInfo, "%s:%d Deleting all AGL channels", __FUNCTION__, __LINE__);
+    //int i = 0 ;
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        if (channel)
+        delete channel;
+
+        _aglChannels.erase(it);
+        it = _aglChannels.begin();
+    }
+    _aglChannels.clear();
+    return 0;
+}
+
+int VideoRenderAGL::DeleteAGLChannel(int channel)
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    //WEBRTC_TRACE(kTraceDebug, "%s:%d Deleting AGL channel %d", __FUNCTION__, __LINE__, channel);
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        delete it->second;
+        _aglChannels.erase(it);
+    }
+    else
+    {
+        //WEBRTC_TRACE(kTraceWarning, "%s:%d Channel not found", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;// = _zOrderToChannel.begin();
+    }
+
+    return 0;
+}
+
+int VideoRenderAGL::StopThread()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    //_screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderAGL::IsFullScreen()
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderAGL::HasChannels()
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    if (_aglChannels.begin() != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+bool VideoRenderAGL::HasChannel(int channel)
+{
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+    if (it != _aglChannels.end())
+    {
+        return true;
+    }
+
+    return false;
+}
+
+int VideoRenderAGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelAGL* VideoRenderAGL::ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(&_renderCritSec);
+
+    std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channel);
+
+    if (it != _aglChannels.end())
+    {
+        VideoChannelAGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            return NULL;
+        }
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+bool VideoRenderAGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderAGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderAGL::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return false;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == GL_FALSE)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        // We have a new window size, update the context.
+        if (aglUpdateContext(_aglContext) == GL_FALSE)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+
+    // this section will poll to see if the window size has changed
+    // this is causing problem w/invalid windowRef
+    // this code has been modified and exists now in the window event handler
+#ifndef NEW_HIVIEW_PARENT_EVENT_HANDLER
+    if (_isHIViewRef)
+    {
+
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalid windowRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        WindowRef window = HIViewGetWindow(_hiviewRef);
+
+        if(FALSE == IsValidWindowPtr(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Invalide hiviewRef", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+        if (window == NULL)
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d WindowRef = NULL", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        if(FALSE == MacIsWindowVisible(window))
+        {
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d MacIsWindowVisible == FALSE. Returning early", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return true;
+        }
+
+        HIRect viewBounds; // Placement and size for HIView
+        int windowWidth = 0; // Parent window width
+        int windowHeight = 0; // Parent window height
+
+        // NOTE: Calling GetWindowBounds with kWindowStructureRgn will crash intermittentaly if the OS decides it needs to push it into the back for a moment.
+        // To counter this, we get the titlebar height on class construction and then add it to the content region here. Content regions seems not to crash
+        Rect contentBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+        Rect globalBounds =
+        {   0, 0, 0, 0}; // The bounds for the parent window
+        globalBounds.top = contentBounds.top;
+        globalBounds.right = contentBounds.right;
+        globalBounds.bottom = contentBounds.bottom;
+        globalBounds.left = contentBounds.left;
+
+        windowHeight = globalBounds.bottom - globalBounds.top;
+        windowWidth = globalBounds.right - globalBounds.left;
+
+        // Get the size of the HIViewRef
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        // Check if this is the first call..
+        if (_lastWindowHeight == -1 &&
+                _lastWindowWidth == -1)
+        {
+            _lastWindowWidth = windowWidth;
+            _lastWindowHeight = windowHeight;
+
+            _lastViewBounds.origin.x = viewBounds.origin.x;
+            _lastViewBounds.origin.y = viewBounds.origin.y;
+            _lastViewBounds.size.width = viewBounds.size.width;
+            _lastViewBounds.size.height = viewBounds.size.height;
+        }
+        sfasdfasdf
+
+        bool resized = false;
+
+        // Check if parent window size has changed
+        if (windowHeight != _lastWindowHeight ||
+                windowWidth != _lastWindowWidth)
+        {
+            resized = true;
+        }
+
+        // Check if the HIView has new size or is moved in the parent window
+        if (_lastViewBounds.origin.x != viewBounds.origin.x ||
+                _lastViewBounds.origin.y != viewBounds.origin.y ||
+                _lastViewBounds.size.width != viewBounds.size.width ||
+                _lastViewBounds.size.height != viewBounds.size.height)
+        {
+            // The HiView is resized or has moved.
+            resized = true;
+        }
+
+        if (resized)
+        {
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d Window has resized", __FUNCTION__, __LINE__);
+
+            // Calculate offset between the windows
+            // {x, y, widht, height}, x,y = lower left corner
+            const GLint offs[4] =
+            {   (int)(0.5f + viewBounds.origin.x),
+                (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+                viewBounds.size.width, viewBounds.size.height};
+
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, __LINE__,
+            contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d windowHeight=%d", __FUNCTION__, __LINE__, windowHeight);
+            //WEBRTC_TRACE(kTraceDebug, "%s:%d offs[4] = %d, %d, %d, %d", __FUNCTION__, __LINE__, offs[0], offs[1], offs[2], offs[3]);
+
+            aglSetDrawable (_aglContext, GetWindowPort(window));
+            aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+            aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+            // We need to change the viewport too if the HIView size has changed
+            glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+        }
+        _lastWindowWidth = windowWidth;
+        _lastWindowHeight = windowHeight;
+
+        _lastViewBounds.origin.x = viewBounds.origin.x;
+        _lastViewBounds.origin.y = viewBounds.origin.y;
+        _lastViewBounds.size.width = viewBounds.size.width;
+        _lastViewBounds.size.height = viewBounds.size.height;
+
+    }
+#endif
+    if (_fullScreen)
+    {
+        // TODO
+        // We use double buffers, must always update
+        //RenderOffScreenBuffersToBackBuffer();
+    }
+    else
+    {
+        // Check if there are any updated buffers
+        bool updated = false;
+
+        // TODO: check if window size is updated!
+        // TODO Improvement: Walk through the zOrder Map to only render the ones in need of update
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.begin();
+        while (it != _aglChannels.end())
+        {
+
+            VideoChannelAGL* aglChannel = it->second;
+            aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+            aglChannel->IsUpdated(updated);
+            if (updated)
+            {
+                break;
+            }
+            it++;
+        }
+
+        if (updated)
+        {
+            // At least on buffers is updated, we need to repaint the texture
+            if (RenderOffScreenBuffers() != -1)
+            {
+                // MF
+                //SwapAndDisplayBuffers();
+            }
+            else
+            {
+                // Error updating the mixing texture, don't swap.
+            }
+        }
+    }
+
+    UnlockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving ScreenUpdateProcess()");
+    return true;
+}
+
+void VideoRenderAGL::ParentWindowResized(WindowRef window)
+{
+    //WEBRTC_LOG(kTraceDebug, "%s HIViewRef:%d owner window has resized", __FUNCTION__, (int)_hiviewRef);
+
+    LockAGLCntx();
+k
+    // set flag
+    _windowHasResized = false;
+
+    if(FALSE == HIViewIsValid(_hiviewRef))
+    {
+        //WEBRTC_LOG(kTraceDebug, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == IsValidWindowPtr(window))
+    {
+        //WEBRTC_LOG(kTraceError, "invalid windowRef");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if (window == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "windowRef = NULL");
+        UnlockAGLCntx();
+        return;
+    }
+
+    if(FALSE == MacIsWindowVisible(window))
+    {
+        //WEBRTC_LOG(kTraceDebug, "MacIsWindowVisible = FALSE. Returning early.");
+        UnlockAGLCntx();
+        return;
+    }
+
+    Rect contentBounds =
+    {   0, 0, 0, 0};
+
+#if		defined(USE_CONTENT_RGN)
+    GetWindowBounds(window, kWindowContentRgn, &contentBounds);
+#elif	defined(USE_STRUCT_RGN)
+    GetWindowBounds(window, kWindowStructureRgn, &contentBounds);
+#endif
+
+    //WEBRTC_LOG(kTraceDebug, "%s contentBounds	t:%d r:%d b:%d l:%d", __FUNCTION__, contentBounds.top, contentBounds.right, contentBounds.bottom, contentBounds.left);
+
+    // update global vars
+    _currentParentWindowBounds.top = contentBounds.top;
+    _currentParentWindowBounds.left = contentBounds.left;
+    _currentParentWindowBounds.bottom = contentBounds.bottom;
+    _currentParentWindowBounds.right = contentBounds.right;
+
+    _currentParentWindowWidth = _currentParentWindowBounds.right - _currentParentWindowBounds.left;
+    _currentParentWindowHeight = _currentParentWindowBounds.bottom - _currentParentWindowBounds.top;
+
+    _windowHasResized = true;
+
+    // ********* update AGL offsets
+    HIRect viewBounds;
+    HIViewGetBounds(_hiviewRef, &viewBounds);
+    HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+    const GLint offs[4] =
+    {   (int)(0.5f + viewBounds.origin.x),
+        (int)(0.5f + _currentParentWindowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+        viewBounds.size.width, viewBounds.size.height};
+    //WEBRTC_LOG(kTraceDebug, "%s _currentParentWindowHeight=%d", __FUNCTION__, _currentParentWindowHeight);
+    //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+    aglSetCurrentContext(_aglContext);
+    aglSetDrawable (_aglContext, GetWindowPort(window));
+    aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+    aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+    // We need to change the viewport too if the HIView size has changed
+    glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+
+    UnlockAGLCntx();
+
+    return;
+}
+
+int VideoRenderAGL::CreateMixingContext()
+{
+
+    LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CreateMixingContext()");
+
+    // Use both AGL_ACCELERATED and AGL_NO_RECOVERY to make sure 
+    // a hardware renderer is used and not a software renderer.
+
+    GLint attributes[] =
+    {
+        AGL_DOUBLEBUFFER,
+        AGL_WINDOW,
+        AGL_RGBA,
+        AGL_NO_RECOVERY,
+        AGL_ACCELERATED,
+        AGL_RED_SIZE, 8,
+        AGL_GREEN_SIZE, 8,
+        AGL_BLUE_SIZE, 8,
+        AGL_ALPHA_SIZE, 8,
+        AGL_DEPTH_SIZE, 24,
+        AGL_NONE,
+    };
+
+    AGLPixelFormat aglPixelFormat;
+
+    // ***** Set up the OpenGL Context *****
+
+    // Get a pixel format for the attributes above
+    aglPixelFormat = aglChoosePixelFormat(NULL, 0, attributes);
+    if (NULL == aglPixelFormat)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not create pixel format");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Create an AGL context
+    _aglContext = aglCreateContext(aglPixelFormat, NULL);
+    if (_aglContext == NULL)
+    {
+        //WEBRTC_LOG(kTraceError, "Could no create AGL context");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Release the pixel format memory
+    aglDestroyPixelFormat(aglPixelFormat);
+
+    // Set the current AGL context for the rest of the settings
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (_isHIViewRef)
+    {
+        //---------------------------
+        // BEGIN: new test code
+#if 0
+        // Don't use this one!
+        // There seems to be an OS X bug that can't handle
+        // movements and resizing of the parent window
+        // and or the HIView
+        if (aglSetHIViewRef(_aglContext,_hiviewRef) == false)
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+#else
+
+        // Get the parent window for this control
+        WindowRef window = GetControlOwner(_hiviewRef);
+
+        Rect globalBounds =
+        {   0,0,0,0}; // The bounds for the parent window
+        HIRect viewBounds; // Placemnt in the parent window and size.
+        int windowHeight = 0;
+
+        //		Rect titleBounds = {0,0,0,0};
+        //		GetWindowBounds(window, kWindowTitleBarRgn, &titleBounds);
+        //		_titleBarHeight = titleBounds.top - titleBounds.bottom;
+        //		if(0 == _titleBarHeight)
+        //		{
+        //            //WEBRTC_LOG(kTraceError, "Titlebar height = 0");
+        //            //return -1;
+        //		}
+
+
+        // Get the bounds for the parent window
+#if		defined(USE_CONTENT_RGN)
+        GetWindowBounds(window, kWindowContentRgn, &globalBounds);
+#elif	defined(USE_STRUCT_RGN)
+        GetWindowBounds(window, kWindowStructureRgn, &globalBounds);
+#endif
+        windowHeight = globalBounds.bottom - globalBounds.top;
+
+        // Get the bounds for the HIView
+        HIViewGetBounds(_hiviewRef, &viewBounds);
+
+        HIViewConvertRect(&viewBounds, _hiviewRef, NULL);
+
+        const GLint offs[4] =
+        {   (int)(0.5f + viewBounds.origin.x),
+            (int)(0.5f + windowHeight - (viewBounds.origin.y + viewBounds.size.height)),
+            viewBounds.size.width, viewBounds.size.height};
+
+        //WEBRTC_LOG(kTraceDebug, "%s offs[4] = %d, %d, %d, %d", __FUNCTION__, offs[0], offs[1], offs[2], offs[3]);
+
+
+        aglSetDrawable (_aglContext, GetWindowPort(window));
+        aglSetInteger(_aglContext, AGL_BUFFER_RECT, offs);
+        aglEnable(_aglContext, AGL_BUFFER_RECT);
+
+        GLint surfaceOrder = 1; // 1: above window, -1 below.
+        //OSStatus status = aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+        aglSetInteger(_aglContext, AGL_SURFACE_ORDER, &surfaceOrder);
+
+        glViewport(0.0f, 0.0f, (GLsizei) viewBounds.size.width, (GLsizei) viewBounds.size.height);
+#endif
+
+    }
+    else
+    {
+        if(GL_FALSE == aglSetDrawable (_aglContext, GetWindowPort(_windowRef)))
+        {
+            //WEBRTC_LOG(kTraceError, "Could not set WindowRef: %d", aglGetError());
+            UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    _windowWidth = _windowRect.right - _windowRect.left;
+    _windowHeight = _windowRect.bottom - _windowRect.top;
+
+    // opaque surface
+    int surfaceOpacity = 1;
+    if (aglSetInteger(_aglContext, AGL_SURFACE_OPACITY, (const GLint *) &surfaceOpacity) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set surface opacity: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // 1 -> sync to screen rat, slow...
+    //int swapInterval = 0;  // 0 don't sync with vertical trace
+    int swapInterval = 0; // 1 sync with vertical trace
+    if (aglSetInteger(_aglContext, AGL_SWAP_INTERVAL, (const GLint *) &swapInterval) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set swap interval: %d", aglGetError());
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Update the rect with the current size
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window size");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    GLenum glErr = glGetError();
+
+    if (glErr)
+    {
+    }
+
+    UpdateClipping();
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CreateMixingContext()");
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not get window rect");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    if (aglSetCurrentContext(_aglContext) == false)
+    {
+        //WEBRTC_LOG(kTraceError, "Could not set current context for rendering");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    // HERE - onl if updated!
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+    rIt != _zOrderToChannel.rend();
+    rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelAGL*>::iterator it = _aglChannels.find(channelId);
+
+        VideoChannelAGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    SwapAndDisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::SwapAndDisplayBuffers()
+{
+
+    LockAGLCntx();
+    if (_fullScreen)
+    {
+        // TODO:
+        // Swap front and back buffers, rendering taking care of in the same call
+        //aglSwapBuffers(_aglContext);
+        // Update buffer index to the idx for the next rendering!
+        //_textureIdx = (_textureIdx + 1) & 1;
+    }
+    else
+    {
+        // Single buffer rendering, only update context.
+        glFlush();
+        aglSwapBuffers(_aglContext);
+        HIViewSetNeedsDisplay(_hiviewRef, true);
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderAGL::GetWindowRect(Rect& rect)
+{
+
+    LockAGLCntx();
+
+    if (_isHIViewRef)
+    {
+        if (_hiviewRef)
+        {
+            HIRect HIViewRect1;
+            if(FALSE == HIViewIsValid(_hiviewRef))
+            {
+                rect.top = 0;
+                rect.left = 0;
+                rect.right = 0;
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceError,"GetWindowRect() HIViewIsValid() returned false");
+                UnlockAGLCntx();
+            }
+            HIViewGetBounds(_hiviewRef,&HIViewRect1);
+            HIRectConvert(&HIViewRect1, 1, NULL, 2, NULL);
+            if(HIViewRect1.origin.x < 0)
+            {
+                rect.top = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.top = 0");
+            }
+            else
+            {
+                rect.top = HIViewRect1.origin.x;
+            }
+
+            if(HIViewRect1.origin.y < 0)
+            {
+                rect.left = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.left = 0");
+            }
+            else
+            {
+                rect.left = HIViewRect1.origin.y;
+            }
+
+            if(HIViewRect1.size.width < 0)
+            {
+                rect.right = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.right = 0");
+            }
+            else
+            {
+                rect.right = HIViewRect1.size.width;
+            }
+
+            if(HIViewRect1.size.height < 0)
+            {
+                rect.bottom = 0;
+                //WEBRTC_LOG(kTraceDebug, "GetWindowRect() rect.bottom = 0");
+            }
+            else
+            {
+                rect.bottom = HIViewRect1.size.height;
+            }
+
+            ////WEBRTC_LOG(kTraceDebug,"GetWindowRect() HIViewRef: rect.top = %d, rect.left = %d, rect.right = %d, rect.bottom =%d in GetWindowRect", rect.top,rect.left,rect.right,rect.bottom);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "invalid HIViewRef");
+            UnlockAGLCntx();
+        }
+    }
+    else
+    {
+        if (_windowRef)
+        {
+            GetWindowBounds(_windowRef, kWindowContentRgn, &rect);
+            UnlockAGLCntx();
+        }
+        else
+        {
+            //WEBRTC_LOG(kTraceError, "No WindowRef");
+            UnlockAGLCntx();
+        }
+    }
+}
+
+int VideoRenderAGL::UpdateClipping()
+{
+    //WEBRTC_LOG(kTraceDebug, "Entering UpdateClipping()");
+    LockAGLCntx();
+
+    if(_isHIViewRef)
+    {
+        if(FALSE == HIViewIsValid(_hiviewRef))
+        {
+            //WEBRTC_LOG(kTraceError, "UpdateClipping() _isHIViewRef is invalid. Returning -1");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        RgnHandle visibleRgn = NewRgn();
+        SetEmptyRgn (visibleRgn);
+
+        if(-1 == CalculateVisibleRegion((ControlRef)_hiviewRef, visibleRgn, true))
+        {
+        }
+
+        if(GL_FALSE == aglSetCurrentContext(_aglContext))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetCurrentContext returned FALSE with error code %d at line %d", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglEnable(_aglContext, AGL_CLIP_REGION))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglEnable returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        if(GL_FALSE == aglSetInteger(_aglContext, AGL_CLIP_REGION, (const GLint*)visibleRgn))
+        {
+            GLenum glErr = aglGetError();
+            //WEBRTC_LOG(kTraceError, "aglSetInteger returned FALSE with error code %d at line %d\n", glErr, __LINE__);
+        }
+
+        DisposeRgn(visibleRgn);
+    }
+    else
+    {
+        //WEBRTC_LOG(kTraceDebug, "Not using a hiviewref!\n");
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving UpdateClipping()");
+    UnlockAGLCntx();
+    return true;
+}
+
+int VideoRenderAGL::CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren)
+{
+
+    //	LockAGLCntx();
+
+    //WEBRTC_LOG(kTraceDebug, "Entering CalculateVisibleRegion()");
+    OSStatus osStatus = 0;
+    OSErr osErr = 0;
+
+    RgnHandle tempRgn = NewRgn();
+    if (IsControlVisible(control))
+    {
+        RgnHandle childRgn = NewRgn();
+        WindowRef window = GetControlOwner(control);
+        ControlRef rootControl;
+        GetRootControl(window, &rootControl); // 'wvnc'
+        ControlRef masterControl;
+        osStatus = GetSuperControl(rootControl, &masterControl);
+        // //WEBRTC_LOG(kTraceDebug, "IBM GetSuperControl=%d", osStatus);
+
+        if (masterControl != NULL)
+        {
+            CheckValidRegion(visibleRgn);
+            // init visibleRgn with region of 'wvnc'
+            osStatus = GetControlRegion(rootControl, kControlStructureMetaPart, visibleRgn);
+            // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+            //GetSuperControl(rootControl, &rootControl);
+            ControlRef tempControl = control, lastControl = 0;
+            while (tempControl != masterControl) // current control != master
+
+            {
+                CheckValidRegion(tempRgn);
+
+                // //WEBRTC_LOG(kTraceDebug, "IBM tempControl=%d masterControl=%d", tempControl, masterControl);
+                ControlRef subControl;
+
+                osStatus = GetControlRegion(tempControl, kControlStructureMetaPart, tempRgn); // intersect the region of the current control with visibleRgn
+                // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d : %d", osStatus, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                osErr = HIViewConvertRegion(tempRgn, tempControl, rootControl);
+                // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d : %d", osErr, __LINE__);
+                CheckValidRegion(tempRgn);
+
+                SectRgn(tempRgn, visibleRgn, visibleRgn);
+                CheckValidRegion(tempRgn);
+                CheckValidRegion(visibleRgn);
+                if (EmptyRgn(visibleRgn)) // if the region is empty, bail
+                break;
+
+                if (clipChildren || tempControl != control) // clip children if true, cut out the tempControl if it's not one passed to this function
+
+                {
+                    UInt16 numChildren;
+                    osStatus = CountSubControls(tempControl, &numChildren); // count the subcontrols
+                    // //WEBRTC_LOG(kTraceDebug, "IBM CountSubControls=%d : %d", osStatus, __LINE__);
+
+                    // //WEBRTC_LOG(kTraceDebug, "IBM numChildren=%d", numChildren);
+                    for (int i = 0; i < numChildren; i++)
+                    {
+                        osErr = GetIndexedSubControl(tempControl, numChildren - i, &subControl); // retrieve the subcontrol in order by zorder
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetIndexedSubControls=%d : %d", osErr, __LINE__);
+                        if ( subControl == lastControl ) // break because of zorder
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM breaking because of zorder %d", __LINE__);
+                            break;
+                        }
+
+                        if (!IsControlVisible(subControl)) // dont' clip invisible controls
+
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM continue. Control is not visible %d", __LINE__);
+                            continue;
+                        }
+
+                        if(!subControl) continue;
+
+                        osStatus = GetControlRegion(subControl, kControlStructureMetaPart, tempRgn); //get the region of the current control and union to childrg
+                        // //WEBRTC_LOG(kTraceDebug, "IBM GetControlRegion=%d %d", osStatus, __LINE__);
+                        CheckValidRegion(tempRgn);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!tempRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !tempRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        osStatus = HIViewConvertRegion(tempRgn, subControl, rootControl);
+                        CheckValidRegion(tempRgn);
+                        // //WEBRTC_LOG(kTraceDebug, "IBM HIViewConvertRegion=%d %d", osStatus, __LINE__);
+                        if(osStatus != 0)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! osStatus=%d. Continuing. %d", osStatus, __LINE__);
+                            continue;
+                        }
+                        if(!rootControl)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !rootControl %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                        UnionRgn(tempRgn, childRgn, childRgn);
+                        CheckValidRegion(tempRgn);
+                        CheckValidRegion(childRgn);
+                        CheckValidRegion(visibleRgn);
+                        if(!childRgn)
+                        {
+                            // //WEBRTC_LOG(kTraceDebug, "IBM ERROR! !childRgn %d", osStatus, __LINE__);
+                            continue;
+                        }
+
+                    } // next child control
+                }
+                lastControl = tempControl;
+                GetSuperControl(tempControl, &subControl);
+                tempControl = subControl;
+            }
+
+            DiffRgn(visibleRgn, childRgn, visibleRgn);
+            CheckValidRegion(visibleRgn);
+            CheckValidRegion(childRgn);
+            DisposeRgn(childRgn);
+        }
+        else
+        {
+            CopyRgn(tempRgn, visibleRgn);
+            CheckValidRegion(tempRgn);
+            CheckValidRegion(visibleRgn);
+        }
+        DisposeRgn(tempRgn);
+    }
+
+    //WEBRTC_LOG(kTraceDebug, "Leaving CalculateVisibleRegion()");
+    //_aglCritPtr->Leave();
+    return 0;
+}
+
+bool VideoRenderAGL::CheckValidRegion(RgnHandle rHandle)
+{
+
+    Handle hndSize = (Handle)rHandle;
+    long size = GetHandleSize(hndSize);
+    if(0 == size)
+    {
+
+        OSErr memErr = MemError();
+        if(noErr != memErr)
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle. MemError() returned %d", memErr);
+        }
+        else
+        {
+            // //WEBRTC_LOG(kTraceError, "IBM ERROR Could not get size of handle yet MemError() returned noErr");
+        }
+
+    }
+    else
+    {
+        // //WEBRTC_LOG(kTraceDebug, "IBM handleSize = %d", size);
+    }
+
+    if(false == IsValidRgnHandle(rHandle))
+    {
+        // //WEBRTC_LOG(kTraceError, "IBM ERROR Invalid Region found : $%d", rHandle);
+        assert(false);
+    }
+
+    int err = QDError();
+    switch(err)
+    {
+        case 0:
+        break;
+        case -147:
+        //WEBRTC_LOG(kTraceError, "ERROR region too big");
+        assert(false);
+        break;
+
+        case -149:
+        //WEBRTC_LOG(kTraceError, "ERROR not enough stack");
+        assert(false);
+        break;
+
+        default:
+        //WEBRTC_LOG(kTraceError, "ERROR Unknown QDError %d", err);
+        assert(false);
+        break;
+    }
+
+    return true;
+}
+
+int VideoRenderAGL::ChangeWindow(void* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    UnlockAGLCntx();
+    return -1;
+}
+WebRtc_Word32 VideoRenderAGL::ChangeUniqueID(WebRtc_Word32 id)
+{
+    LockAGLCntx();
+
+    UnlockAGLCntx();
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderAGL::StartRender()
+{
+
+    LockAGLCntx();
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Rendering is paused. Restarting now", __FUNCTION__, __LINE__);
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        if(FALSE == _screenUpdateThread->Start(_threadID))
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+        if(FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
+        {
+            //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateEvent", __FUNCTION__, __LINE__);
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        return 0;
+    }
+
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+
+    if (!_screenUpdateThread)
+    {
+        //WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to start screenUpdateThread", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start(_threadID);
+    _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ);
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Started screenUpdateThread", __FUNCTION__, __LINE__);
+
+    UnlockAGLCntx();
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderAGL::StopRender()
+{
+    LockAGLCntx();
+
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
+    {
+        _renderingIsPaused = FALSE;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Could not stop either: screenUpdateThread or screenUpdateEvent", __FUNCTION__, __LINE__);
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Stopped screenUpdateThread", __FUNCTION__, __LINE__);
+    UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAGL::DeleteAGLChannel(const WebRtc_UWord32 streamID)
+{
+
+    LockAGLCntx();
+
+    std::map<int, VideoChannelAGL*>::iterator it;
+    it = _aglChannels.begin();
+
+    while (it != _aglChannels.end())
+    {
+        VideoChannelAGL* channel = it->second;
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Deleting channel %d", __FUNCTION__, __LINE__, streamID);
+        delete channel;
+        it++;
+    }
+    _aglChannels.clear();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderAGL::GetChannelProperties(const WebRtc_UWord16 streamId,
+WebRtc_UWord32& zOrder,
+float& left,
+float& top,
+float& right,
+float& bottom)
+{
+
+    LockAGLCntx();
+    UnlockAGLCntx();
+    return -1;
+
+}
+
+void VideoRenderAGL::LockAGLCntx()
+{
+    _renderCritSec.Enter();
+}
+void VideoRenderAGL::UnlockAGLCntx()
+{
+    _renderCritSec.Leave();
+}
+
+} //namespace webrtc
+
+#endif   // CARBON_RENDERING
+
diff --git a/src/modules/video_render/main/source/mac/video_render_agl.h b/src/modules/video_render/main/source/mac/video_render_agl.h
new file mode 100644
index 0000000..bdee619
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_agl.h
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+
+#include "video_render_defines.h"
+
+
+#define NEW_HIVIEW_PARENT_EVENT_HANDLER 1
+#define NEW_HIVIEW_EVENT_HANDLER		1
+#define USE_STRUCT_RGN
+
+#include <AGL/agl.h>
+#include <Carbon/Carbon.h>
+#include <OpenGL/OpenGL.h>
+#include <OpenGL/glu.h>
+#include <OpenGL/glext.h>
+#include <list>
+#include <map>
+
+class VideoRenderAGL;
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+
+class VideoChannelAGL : public VideoRenderCallback
+{
+public:
+
+	VideoChannelAGL(AGLContext& aglContext, int iId, VideoRenderAGL* owner);
+    virtual ~VideoChannelAGL();
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);    
+    virtual int UpdateSize(int width, int height);
+    int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int RenderOffScreenBuffer();
+    int IsUpdated(bool& isUpdated);
+	virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+	virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
+    
+    
+private:
+	
+	AGLContext      _aglContext;
+	int				_id;
+    VideoRenderAGL* _owner;
+    int             _width;
+    int             _height;
+	int				_stretchedWidth;
+	int				_stretchedHeight;
+    float           _startHeight;
+    float           _startWidth;
+    float           _stopWidth;
+    float           _stopHeight;
+    int				_xOldWidth;
+	int				_yOldHeight;
+	int				_oldStretchedHeight;
+	int				_oldStretchedWidth;
+	unsigned char*  _buffer;
+    int             _bufferSize;
+    int             _incommingBufferSize;
+    bool            _bufferIsUpdated;
+	bool			_sizeInitialized;
+    int             _numberOfStreams;
+	bool			_bVideoSizeStartedChanging;
+	GLenum          _pixelFormat;
+    GLenum          _pixelDataType;
+    unsigned int    _texture;	
+};
+
+
+
+
+class VideoRenderAGL
+{
+public:
+    VideoRenderAGL(WindowRef windowRef, bool fullscreen, int iId);
+    VideoRenderAGL(HIViewRef windowRef, bool fullscreen, int iId);
+    ~VideoRenderAGL();
+
+    int		Init();
+    VideoChannelAGL* CreateAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    VideoChannelAGL* ConfigureAGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int		DeleteAGLChannel(int channel);
+    int		DeleteAllAGLChannels();
+    int		StopThread();
+    bool	IsFullScreen();
+    bool	HasChannels();
+    bool	HasChannel(int channel);
+    int		GetChannels(std::list<int>& channelList);
+    void	LockAGLCntx();    
+    void	UnlockAGLCntx();
+	
+	static int GetOpenGLVersion(int& aglMajor, int& aglMinor);
+	
+	// ********** new module functions ************ //
+	int ChangeWindow(void* newWindowRef);
+	WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
+	WebRtc_Word32 StartRender();
+	WebRtc_Word32 StopRender();
+	WebRtc_Word32 DeleteAGLChannel(const WebRtc_UWord32 streamID);
+	WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
+									 WebRtc_UWord32& zOrder,
+									 float& left,
+									 float& top,
+									 float& right,
+									 float& bottom);
+
+protected:
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+    int GetWindowRect(Rect& rect);
+	
+private:
+    int		CreateMixingContext();
+    int		RenderOffScreenBuffers();
+    int		SwapAndDisplayBuffers();
+	int		UpdateClipping();
+    int		CalculateVisibleRegion(ControlRef control, RgnHandle &visibleRgn, bool clipChildren);
+    bool	CheckValidRegion(RgnHandle rHandle);
+    void	ParentWindowResized(WindowRef window);
+
+    // Carbon GUI event handlers
+    static pascal OSStatus sHandleWindowResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+    static pascal OSStatus sHandleHiViewResized (EventHandlerCallRef nextHandler, EventRef theEvent, void* userData);
+
+    HIViewRef                       _hiviewRef;
+	WindowRef                       _windowRef;
+    bool                            _fullScreen;
+	int								_id;
+    webrtc::CriticalSectionWrapper&            _renderCritSec;
+    webrtc::ThreadWrapper*                     _screenUpdateThread;
+    webrtc::EventWrapper*                      _screenUpdateEvent;
+	bool                            _isHIViewRef; 
+    AGLContext                      _aglContext;
+    int                             _windowWidth;
+    int                             _windowHeight;
+    int                             _lastWindowWidth;
+    int                             _lastWindowHeight;
+    int                             _lastHiViewWidth;
+    int                             _lastHiViewHeight;
+	int								_currentParentWindowHeight;
+	int								_currentParentWindowWidth;
+	Rect							_currentParentWindowBounds;
+	bool							_windowHasResized;
+	Rect							_lastParentWindowBounds;
+	Rect							_currentHIViewBounds;
+	Rect							_lastHIViewBounds;
+    Rect                            _windowRect;
+    std::map<int, VideoChannelAGL*> _aglChannels;
+    std::multimap<int, int>			_zOrderToChannel;
+	EventHandlerRef					_hiviewEventHandlerRef;
+	EventHandlerRef					_windowEventHandlerRef;
+	HIRect							_currentViewBounds;
+	HIRect							_lastViewBounds;
+	bool							_renderingIsPaused;
+	unsigned int					_threadID;
+	
+	
+	
+    
+};
+
+} //namespace webrtc
+
+
+#endif   // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_AGL_H_
+
+#endif // CARBON_RENDERING
diff --git a/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc b/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc
new file mode 100644
index 0000000..aec73bc
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.cc
@@ -0,0 +1,297 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#include "video_render_mac_carbon_impl.h"
+#include "critical_section_wrapper.h"
+#include "video_render_agl.h"
+#include "trace.h"
+#include <AGL/agl.h>
+
+namespace webrtc {
+
+VideoRenderMacCarbonImpl::VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCarbonCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+
+}
+
+VideoRenderMacCarbonImpl::~VideoRenderMacCarbonImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCarbonCritsect;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::Init()
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    // We don't know if the user passed us a WindowRef or a HIViewRef, so test.
+    bool referenceIsValid = false;
+
+    // Check if it's a valid WindowRef
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef before WindowRef cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    WindowRef* windowRef = static_cast<WindowRef*>(_ptrWindow);
+    //WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d _ptrWindowRef after cast: %x", __FUNCTION__, __LINE__, _ptrWindowRef);
+    if (IsValidWindowPtr(*windowRef))
+    {
+        _ptrCarbonRender = new VideoRenderAGL(*windowRef, _fullScreen, _id);
+        referenceIsValid = true;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with WindowRef:%x", __FUNCTION__, __LINE__, *windowRef);
+    }
+    else
+    {
+        HIViewRef* hiviewRef = static_cast<HIViewRef*>(_ptrWindow);
+        if (HIViewIsValid(*hiviewRef))
+        {
+            _ptrCarbonRender = new VideoRenderAGL(*hiviewRef, _fullScreen, _id);
+            referenceIsValid = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d Successfully initialized CarbonRenderer with HIViewRef:%x", __FUNCTION__, __LINE__, hiviewRef);
+        }
+    }
+
+    if(!referenceIsValid)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Invalid WindowRef/HIViewRef Returning -1", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    if(!_ptrCarbonRender)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to create an instance of VideoRenderAGL. Returning -1", __FUNCTION__, __LINE__);
+    }
+
+    int retVal = _ptrCarbonRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s:%d Failed to init CarbonRenderer", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    return -1;
+
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    _id = id;
+
+    if(_ptrCarbonRender)
+    {
+        _ptrCarbonRender->ChangeUniqueID(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::ChangeWindow(void* window)
+{
+    return -1;
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCarbonImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+        const WebRtc_UWord32 zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelAGL* AGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!AGLChannel)
+    {
+        AGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return AGLChannel;
+
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
+{
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    _ptrCarbonRender->DeleteAGLChannel(streamId);
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return -1;
+    return _ptrCarbonRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::StartRender()
+{
+    return _ptrCarbonRender->StartRender();
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::StopRender()
+{
+    return _ptrCarbonRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCarbonImpl::RenderType()
+{
+    return kRenderCarbon;
+}
+
+RawVideoType
+VideoRenderMacCarbonImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCarbonImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
+        WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    //NSScreen* mainScreen = [NSScreen mainScreen];
+
+    //NSRect frame = [mainScreen frame];
+
+    //screenWidth = frame.size.width;
+    //screenHeight = frame.size.height;
+    return 0;
+}
+
+WebRtc_UWord32
+VideoRenderMacCarbonImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_renderMacCarbonCritsect);
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
+        const unsigned int zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCarbonImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::SetText(const WebRtc_UWord8 textId,
+        const WebRtc_UWord8* text,
+        const WebRtc_Word32 textLength,
+        const WebRtc_UWord32 textColorRef,
+        const WebRtc_UWord32 backgroundColorRef,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCarbonImpl::SetBitmap(const void* bitMap,
+        const WebRtc_UWord8 pictureId,
+        const void* colorKey,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+
+} //namespace webrtc
+
+#endif // CARBON_RENDERING
diff --git a/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h b/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h
new file mode 100644
index 0000000..3ff3d26
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_mac_carbon_impl.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(CARBON_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class VideoRenderAGL;
+
+// Class definitions
+class VideoRenderMacCarbonImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCarbonImpl(const WebRtc_Word32 id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCarbonImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+            const WebRtc_UWord32 zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+            WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
+            WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+            const unsigned int zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+            const WebRtc_UWord8 pictureId,
+            const void* colorKey,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable)
+    {
+        // not supported in Carbon at this time
+        return -1;
+    }
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderMacCarbonCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderAGL* _ptrCarbonRender;
+
+};
+
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_CARBON_IMPL_H_
+#endif // CARBON_RENDERING
diff --git a/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h b/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h
new file mode 100644
index 0000000..f9216bb
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.h
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+
+#include "i_video_render.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class VideoRenderNSOpenGL;
+
+// Class definitions
+class VideoRenderMacCocoaImpl : IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
+            const VideoRenderType videoRenderType,
+            void* window,
+            const bool fullscreen);
+
+    virtual ~VideoRenderMacCocoaImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback* AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+            const WebRtc_UWord32 zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+            WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32 GetScreenResolution(WebRtc_UWord32& screenWidth,
+            WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+            const unsigned int zOrder,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+            const WebRtc_UWord8 pictureId,
+            const void* colorKey,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderMacCocoaCritsect;
+    bool _fullScreen;
+    void* _ptrWindow;
+    VideoRenderNSOpenGL* _ptrCocoaRender;
+
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_MAC_COCOA_IMPL_H_
+#endif	// COCOA_RENDERING
diff --git a/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm b/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm
new file mode 100644
index 0000000..bc5057f
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_mac_cocoa_impl.mm
@@ -0,0 +1,269 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#import "cocoa_render_view.h"
+
+#include "video_render_mac_cocoa_impl.h"
+#include "critical_section_wrapper.h"
+#include "video_render_nsopengl.h"
+#include "trace.h"
+
+namespace webrtc {
+
+VideoRenderMacCocoaImpl::VideoRenderMacCocoaImpl(const WebRtc_Word32 id,
+        const VideoRenderType videoRenderType,
+        void* window,
+        const bool fullscreen) :
+_id(id),
+_renderMacCocoaCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+_fullScreen(fullscreen),
+_ptrWindow(window)
+{
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+}
+
+VideoRenderMacCocoaImpl::~VideoRenderMacCocoaImpl()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Destructor %s:%d", __FUNCTION__, __LINE__);
+    delete &_renderMacCocoaCritsect;
+    if (_ptrCocoaRender)
+    {
+        delete _ptrCocoaRender;
+        _ptrCocoaRender = NULL;
+    }
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::Init()
+{
+
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s:%d", __FUNCTION__, __LINE__);
+
+    // cast ptrWindow from void* to CocoaRenderer. Void* was once NSOpenGLView, and CocoaRenderer is NSOpenGLView.
+    _ptrCocoaRender = new VideoRenderNSOpenGL((CocoaRenderView*)_ptrWindow, _fullScreen, _id);
+    if (!_ptrWindow)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+    int retVal = _ptrCocoaRender->Init();
+    if (retVal == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "Failed to init %s:%d", __FUNCTION__, __LINE__);
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    _id = id;
+
+    if(_ptrCocoaRender)
+    {
+        _ptrCocoaRender->ChangeUniqueID(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s changing ID to ", __FUNCTION__, window);
+
+    if (window == NULL)
+    {
+        return -1;
+    }
+    _ptrWindow = window;
+
+
+    _ptrWindow = window;
+    _ptrCocoaRender->ChangeWindow((CocoaRenderView*)_ptrWindow);
+
+    return 0;
+}
+
+VideoRenderCallback*
+VideoRenderMacCocoaImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+        const WebRtc_UWord32 zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s", __FUNCTION__);
+    VideoChannelNSOpenGL* nsOpenGLChannel = NULL;
+
+    if(!_ptrWindow)
+    {
+    }
+
+    if(!nsOpenGLChannel)
+    {
+        nsOpenGLChannel = _ptrCocoaRender->CreateNSGLChannel(streamId, zOrder, left, top, right, bottom);
+    }
+
+    return nsOpenGLChannel;
+
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::DeleteIncomingRenderStream(const WebRtc_UWord32 streamId)
+{
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Constructor %s:%d", __FUNCTION__, __LINE__);
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    _ptrCocoaRender->DeleteNSGLChannel(streamId);
+
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom) const
+{
+    return _ptrCocoaRender->GetChannelProperties(streamId, zOrder, left, top, right, bottom);
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::StartRender()
+{
+    return _ptrCocoaRender->StartRender();
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::StopRender()
+{
+    return _ptrCocoaRender->StopRender();
+}
+
+VideoRenderType
+VideoRenderMacCocoaImpl::RenderType()
+{
+    return kRenderCocoa;
+}
+
+RawVideoType
+VideoRenderMacCocoaImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool
+VideoRenderMacCocoaImpl::FullScreen()
+{
+    return false;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::GetScreenResolution(WebRtc_UWord32& screenWidth,
+        WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    NSScreen* mainScreen = [NSScreen mainScreen];
+
+    NSRect frame = [mainScreen frame];
+
+    screenWidth = frame.size.width;
+    screenHeight = frame.size.height;
+    return 0;
+}
+
+WebRtc_UWord32
+VideoRenderMacCocoaImpl::RenderFrameRate(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_renderMacCocoaCritsect);
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::SetStreamCropping(const WebRtc_UWord32 streamId,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::ConfigureRenderer(const WebRtc_UWord32 streamId,
+        const unsigned int zOrder,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32
+VideoRenderMacCocoaImpl::SetTransparentBackground(const bool enable)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::SetText(const WebRtc_UWord8 textId,
+        const WebRtc_UWord8* text,
+        const WebRtc_Word32 textLength,
+        const WebRtc_UWord32 textColorRef,
+        const WebRtc_UWord32 backgroundColorRef,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return _ptrCocoaRender->SetText(textId, text, textLength, textColorRef, backgroundColorRef, left, top, right, bottom);
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::SetBitmap(const void* bitMap,
+        const WebRtc_UWord8 pictureId,
+        const void* colorKey,
+        const float left,
+        const float top,
+        const float right,
+        const float bottom)
+{
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderMacCocoaImpl::FullScreenRender(void* window, const bool enable)
+{
+    return -1;
+}
+
+} //namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/src/modules/video_render/main/source/mac/video_render_nsopengl.h b/src/modules/video_render/main/source/mac/video_render_nsopengl.h
new file mode 100644
index 0000000..56058dc
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_nsopengl.h
@@ -0,0 +1,190 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+
+#import <Cocoa/Cocoa.h>
+#import <OpenGL/OpenGL.h>
+#import <OpenGL/glu.h>
+#import <OpenGL/glext.h>
+#include <QuickTime/QuickTime.h>
+#include <list>
+#include <map>
+
+#include "video_render_defines.h"
+
+#import "cocoa_render_view.h"
+#import "cocoa_full_screen_window.h"
+
+class Trace;
+
+namespace webrtc {
+class EventWrapper;
+class ThreadWrapper;
+class VideoRenderNSOpenGL;
+class CriticalSectionWrapper;
+
+class VideoChannelNSOpenGL : public VideoRenderCallback
+{
+
+public:
+
+    VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner);
+    virtual ~VideoChannelNSOpenGL();
+
+    // A new frame is delivered
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int timeStame90kHz);
+
+    // Called when the incomming frame size and/or number of streams in mix changes
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    virtual int UpdateSize(int width, int height);
+
+    // Setup
+    int SetStreamSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int SetStreamCropSettings(int streamId, float startWidth, float startHeight, float stopWidth, float stopHeight);
+
+    // Called when it's time to render the last frame for the channel
+    int RenderOffScreenBuffer();
+
+    // Returns true if a new buffer has been delivered to the texture
+    int IsUpdated(bool& isUpdated);
+    virtual int UpdateStretchSize(int stretchHeight, int stretchWidth);
+
+    // ********** new module functions ************ //
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId, VideoFrame& videoFrame);
+
+    // ********** new module helper functions ***** //
+    int ChangeContext(NSOpenGLContext *nsglContext);
+    WebRtc_Word32 GetChannelProperties(float& left,
+            float& top,
+            float& right,
+            float& bottom);
+
+private:
+
+    NSOpenGLContext* _nsglContext;
+    int _id;
+    VideoRenderNSOpenGL* _owner;
+    WebRtc_Word32 _width;
+    WebRtc_Word32 _height;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+    int _stretchedWidth;
+    int _stretchedHeight;
+    int _oldStretchedHeight;
+    int _oldStretchedWidth;
+    unsigned char* _buffer;
+    int _bufferSize;
+    int _incommingBufferSize;
+    bool _bufferIsUpdated;
+    int _numberOfStreams;
+    GLenum _pixelFormat;
+    GLenum _pixelDataType;
+    unsigned int _texture;
+};
+
+class VideoRenderNSOpenGL
+{
+
+public: // methods
+    VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId);
+    ~VideoRenderNSOpenGL();
+
+    static int GetOpenGLVersion(int& nsglMajor, int& nsglMinor);
+
+    // Allocates textures
+    int Init();
+    VideoChannelNSOpenGL* CreateNSGLChannel(int streamID, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    VideoChannelNSOpenGL* ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight);
+    int DeleteNSGLChannel(int channel);
+    int DeleteAllNSGLChannels();
+    int StopThread();
+    bool IsFullScreen();
+    bool HasChannels();
+    bool HasChannel(int channel);
+    int GetChannels(std::list<int>& channelList);
+    void LockAGLCntx();
+    void UnlockAGLCntx();
+
+    // ********** new module functions ************ //
+    int ChangeWindow(CocoaRenderView* newWindowRef);
+    WebRtc_Word32 ChangeUniqueID(WebRtc_Word32 id);
+    WebRtc_Word32 StartRender();
+    WebRtc_Word32 StopRender();
+    WebRtc_Word32 DeleteNSGLChannel(const WebRtc_UWord32 streamID);
+    WebRtc_Word32 GetChannelProperties(const WebRtc_UWord16 streamId,
+            WebRtc_UWord32& zOrder,
+            float& left,
+            float& top,
+            float& right,
+            float& bottom);
+
+    WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+            const WebRtc_UWord8* text,
+            const WebRtc_Word32 textLength,
+            const WebRtc_UWord32 textColorRef,
+            const WebRtc_UWord32 backgroundColorRef,
+            const float left,
+            const float top,
+            const float right,
+            const float bottom);
+
+    // ********** new module helper functions ***** //
+    int configureNSOpenGLEngine();
+    int configureNSOpenGLView();
+    int setRenderTargetWindow();
+    int setRenderTargetFullScreen();
+
+protected: // methods
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+    int GetWindowRect(Rect& rect);
+
+private: // methods
+
+    int CreateMixingContext();
+    int RenderOffScreenBuffers();
+    int DisplayBuffers();
+
+private: // variables
+
+
+    CocoaRenderView* _windowRef;
+    bool _fullScreen;
+    int _id;
+    CriticalSectionWrapper& _nsglContextCritSec;
+    ThreadWrapper* _screenUpdateThread;
+    EventWrapper* _screenUpdateEvent;
+    NSOpenGLContext* _nsglContext;
+    NSOpenGLContext* _nsglFullScreenContext;
+    CocoaFullScreenWindow* _fullScreenWindow;
+    Rect _windowRect; // The size of the window
+    int _windowWidth;
+    int _windowHeight;
+    std::map<int, VideoChannelNSOpenGL*> _nsglChannels;
+    std::multimap<int, int> _zOrderToChannel;
+    unsigned int _threadID;
+    bool _renderingIsPaused;
+    NSView* _windowRefSuperView;
+    NSRect _windowRefSuperViewFrame;
+};
+
+} //namespace webrtc
+
+#endif   // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_MAC_VIDEO_RENDER_NSOPENGL_H_
+#endif	 // COCOA_RENDERING
+
diff --git a/src/modules/video_render/main/source/mac/video_render_nsopengl.mm b/src/modules/video_render/main/source/mac/video_render_nsopengl.mm
new file mode 100644
index 0000000..65b2e48
--- /dev/null
+++ b/src/modules/video_render/main/source/mac/video_render_nsopengl.mm
@@ -0,0 +1,1271 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+
+#include "video_render_nsopengl.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+
+VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
+_nsglContext( nsglContext),
+_id( iId),
+_owner( owner),
+_width( 0),
+_height( 0),
+_startWidth( 0.0f),
+_startHeight( 0.0f),
+_stopWidth( 0.0f),
+_stopHeight( 0.0f),
+_stretchedWidth( 0),
+_stretchedHeight( 0),
+_oldStretchedHeight( 0),
+_oldStretchedWidth( 0),
+_buffer( 0),
+_bufferSize( 0),
+_incommingBufferSize( 0),
+_bufferIsUpdated( false),
+_numberOfStreams( 0),
+_pixelFormat( GL_RGBA),
+_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
+_texture( 0)
+{
+
+}
+
+VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
+{
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _buffer = NULL;
+    }
+
+    if (_texture != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+}
+
+int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
+{
+    _owner->UnlockAGLCntx();
+
+    _nsglContext = nsglContext;
+    [_nsglContext makeCurrentContext];
+
+    _owner->UnlockAGLCntx();
+    return 0;
+
+}
+
+WebRtc_Word32 VideoChannelNSOpenGL::GetChannelProperties(float& left,
+        float& top,
+        float& right,
+        float& bottom)
+{
+
+    _owner->LockAGLCntx();
+
+    left = _startWidth;
+    top = _startHeight;
+    right = _stopWidth;
+    bottom = _stopHeight;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+WebRtc_Word32 VideoChannelNSOpenGL::RenderFrame(const WebRtc_UWord32 /*streamId*/, VideoFrame& videoFrame)
+{
+
+    _owner->LockAGLCntx();
+
+    if(_width != (int)videoFrame.Width() ||
+            _height != (int)videoFrame.Height())
+    {
+        if(FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            _owner->UnlockAGLCntx();
+            return -1;
+        }
+    }
+
+    int ret = DeliverFrame(videoFrame.Buffer(), videoFrame.Length(), videoFrame.TimeStamp());
+
+    _owner->UnlockAGLCntx();
+    return ret;
+}
+
+int VideoChannelNSOpenGL::UpdateSize(int width, int height)
+{
+    _owner->LockAGLCntx();
+    _width = width;
+    _height = height;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
+{
+
+    _owner->LockAGLCntx();
+    _stretchedHeight = stretchHeight;
+    _stretchedWidth = stretchWidth;
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    //  We got a new frame size from VideoAPI, prepare the buffer
+
+    _owner->LockAGLCntx();
+
+    if (width == _width && _height == height)
+    {
+        // We already have a correct buffer size
+        _numberOfStreams = numberOfStreams;
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    _width = width;
+    _height = height;
+
+    // Delete the old buffer, create a new one with correct size.
+    if (_buffer)
+    {
+        delete [] _buffer;
+        _bufferSize = 0;
+    }
+
+    _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
+    _bufferSize = CalcBufferSize(kARGB, _width, _height);//_width * _height * bytesPerPixel;
+    _buffer = new unsigned char [_bufferSize];
+    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
+
+    [_nsglContext makeCurrentContext];
+
+    if(glIsTexture(_texture))
+    {
+        glDeleteTextures(1, (const GLuint*) &_texture);
+        _texture = 0;
+    }
+
+    // Create a new texture
+    glGenTextures(1, (GLuint *) &_texture);
+
+    GLenum glErr = glGetError();
+
+    if (glErr != GL_NO_ERROR)
+    {
+
+    }
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+
+    GLint texSize;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
+
+    if (texSize < _width || texSize < _height)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    // Set up th texture type and size
+    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
+            0, // level
+            GL_RGBA, // internal format
+            _width, // width
+            _height, // height
+            0, // border 0/1 = off/on
+            _pixelFormat, // format, GL_RGBA
+            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
+            _buffer); // pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::DeliverFrame(unsigned char* buffer, int bufferSize, unsigned int /*timeStamp90kHz*/)
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    if (bufferSize != _incommingBufferSize)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    int rgbRet = ConvertFromYV12(buffer, _width,
+                                 kBGRA, 0, _width, _height,
+                                 _buffer);
+    if (rgbRet < 0)
+    {
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    [_nsglContext makeCurrentContext];
+
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture); // Make sure this texture is the active one
+    GLenum glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glBindTexture", glErr);
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
+            0, // Level, not use
+            0, // start point x, (low left of pic)
+            0, // start point y,
+            _width, // width
+            _height, // height
+            _pixelFormat, // pictue format for _buffer
+            _pixelDataType, // data type of _buffer
+            (const GLvoid*) _buffer); // the pixel data
+
+    glErr = glGetError();
+    if (glErr != GL_NO_ERROR)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "ERROR %d while calling glTexSubImage2d", glErr);
+        _owner->UnlockAGLCntx();
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::RenderOffScreenBuffer()
+{
+
+    _owner->LockAGLCntx();
+
+    if (_texture == 0)
+    {
+        _owner->UnlockAGLCntx();
+        return 0;
+    }
+
+    //	if(_fullscreen)
+    //	{
+    // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+    //		_width = mainDisplayRect.size.width;
+    //		_height = mainDisplayRect.size.height;
+    //		glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+    //		float newX = mainDisplayRect.size.width/_width;
+    //		float newY = mainDisplayRect.size.height/_height;
+
+    // convert from 0.0 <= size <= 1.0 to
+    // open gl world -1.0 < size < 1.0
+    GLfloat xStart = 2.0f * _startWidth - 1.0f;
+    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
+    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
+    GLfloat yStop = 1.0f - 2.0f * _startHeight;
+
+    [_nsglContext makeCurrentContext];
+
+    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
+    _oldStretchedHeight = _stretchedHeight;
+    _oldStretchedWidth = _stretchedWidth;
+
+    glLoadIdentity();
+    glEnable(GL_TEXTURE_RECTANGLE_EXT);
+    glBegin(GL_POLYGON);
+    {
+        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
+        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
+        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
+        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
+    }
+    glEnd();
+
+    glDisable(GL_TEXTURE_RECTANGLE_EXT);
+
+    _bufferIsUpdated = false;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
+{
+    _owner->LockAGLCntx();
+
+    isUpdated = _bufferIsUpdated;
+
+    _owner->UnlockAGLCntx();
+    return 0;
+}
+
+int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    _owner->LockAGLCntx();
+
+    _startWidth = startWidth;
+    _stopWidth = stopWidth;
+    _startHeight = startHeight;
+    _stopHeight = stopHeight;
+
+    int oldWidth = _width;
+    int oldHeight = _height;
+    int oldNumberOfStreams = _numberOfStreams;
+
+    _width = 0;
+    _height = 0;
+
+    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
+
+    _owner->UnlockAGLCntx();
+    return retVal;
+}
+
+int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
+{
+    return -1;
+}
+
+/*
+ *
+ *    VideoRenderNSOpenGL
+ *
+ */
+
+VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
+_windowRef( (CocoaRenderView*)windowRef),
+_fullScreen( fullScreen),
+_id( iId),
+_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
+_screenUpdateThread( 0),
+_screenUpdateEvent( 0),
+_nsglContext( 0),
+_nsglFullScreenContext( 0),
+_fullScreenWindow( nil),
+_windowRect( ),
+_windowWidth( 0),
+_windowHeight( 0),
+_nsglChannels( ),
+_zOrderToChannel( ),
+_threadID (0),
+_renderingIsPaused (FALSE),
+_windowRefSuperView(NULL),
+_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
+{
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+}
+
+int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
+{
+
+    LockAGLCntx();
+
+    _windowRef = newWindowRef;
+
+    if(CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    int error = 0;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        error |= (it->second)->ChangeContext(_nsglContext);
+        it++;
+    }
+    if(error != 0)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/* Check if the thread and event already exist.
+ * If so then they will simply be restarted
+ * If not then create them and continue
+ */
+WebRtc_Word32 VideoRenderNSOpenGL::StartRender()
+{
+
+    LockAGLCntx();
+
+    const unsigned int MONITOR_FREQ = 60;
+    if(TRUE == _renderingIsPaused)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
+
+        // we already have the thread. Most likely StopRender() was called and they were paused
+        if(FALSE == _screenUpdateThread->Start(_threadID) ||
+                FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
+            UnlockAGLCntx();
+            return -1;
+        }
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
+        UnlockAGLCntx();
+        return -1;
+    }
+
+
+    UnlockAGLCntx();
+    return 0;
+}
+WebRtc_Word32 VideoRenderNSOpenGL::StopRender()
+{
+
+    LockAGLCntx();
+
+    /* The code below is functional
+     * but it pauses for several seconds
+     */
+
+    // pause the update thread and the event timer
+    if(!_screenUpdateThread || !_screenUpdateEvent)
+    {
+        _renderingIsPaused = TRUE;
+
+        UnlockAGLCntx();
+        return 0;
+    }
+
+    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
+    {
+        _renderingIsPaused = FALSE;
+
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _renderingIsPaused = TRUE;
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLView()
+{
+    return 0;
+
+}
+
+int VideoRenderNSOpenGL::configureNSOpenGLEngine()
+{
+
+    LockAGLCntx();
+
+    // Disable not needed functionality to increase performance
+    glDisable(GL_DITHER);
+    glDisable(GL_ALPHA_TEST);
+    glDisable(GL_STENCIL_TEST);
+    glDisable(GL_FOG);
+    glDisable(GL_TEXTURE_2D);
+    glPixelZoom(1.0, 1.0);
+    glDisable(GL_BLEND);
+    glDisable(GL_DEPTH_TEST);
+    glDepthMask(GL_FALSE);
+    glDisable(GL_CULL_FACE);
+
+    // Set texture parameters
+    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
+    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
+    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+    }
+    glViewport(0, 0, _windowWidth, _windowHeight);
+
+    // Synchronize buffer swaps with vertical refresh rate
+    GLint swapInt = 1;
+    [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetWindow()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[NSOpenGLPixelFormat alloc] initWithAttributes: (NSOpenGLPixelFormatAttribute*) attribs];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderView:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [fmt release];
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::setRenderTargetFullScreen()
+{
+    LockAGLCntx();
+
+
+    GLuint attribs[] =
+    {
+        NSOpenGLPFAColorSize, 24,
+        NSOpenGLPFAAlphaSize, 8,
+        NSOpenGLPFADepthSize, 16,
+        NSOpenGLPFAAccelerated,
+        0
+    };
+
+    NSOpenGLPixelFormat* fmt = [[NSOpenGLPixelFormat alloc] initWithAttributes: (NSOpenGLPixelFormatAttribute*) attribs];
+
+    // Store original superview and frame for use when exiting full screens
+    _windowRefSuperViewFrame = [_windowRef frame];
+    _windowRefSuperView = [_windowRef superview];
+
+
+    // create new fullscreen window
+    NSRect screenRect = [[NSScreen mainScreen]frame];
+    [_windowRef setFrame:screenRect];
+    [_windowRef setBounds:screenRect];
+
+
+    _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
+    [_fullScreenWindow grabFullScreen];
+    [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
+
+    if(_windowRef)
+    {
+        [_windowRef initCocoaRenderViewFullScreen:fmt];
+    }
+    else
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [fmt release];
+
+    _nsglContext = [_windowRef nsOpenGLContext];
+    [_nsglContext makeCurrentContext];
+
+    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
+{
+
+    if(_fullScreen)
+    {
+        if(_fullScreenWindow)
+        {
+            // Detach CocoaRenderView from full screen view back to
+            // it's original parent.
+            [_windowRef removeFromSuperview];
+            if(_windowRefSuperView)
+            {
+              [_windowRefSuperView addSubview:_windowRef];
+              [_windowRef setFrame:_windowRefSuperViewFrame];
+            }
+
+            WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
+            [_fullScreenWindow releaseFullScreen];
+
+        }
+    }
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+        delete _screenUpdateEvent;
+        _screenUpdateEvent = NULL;
+    }
+
+    if (_nsglContext != 0)
+    {
+        [_nsglContext makeCurrentContext];
+        _nsglContext = nil;
+    }
+
+    // Delete all channels
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it!= _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+        it = _nsglChannels.begin();
+    }
+    _nsglChannels.clear();
+
+    // Clean the zOrder map
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while(zIt != _zOrderToChannel.end())
+    {
+        _zOrderToChannel.erase(zIt);
+        zIt = _zOrderToChannel.begin();
+    }
+    _zOrderToChannel.clear();
+
+}
+
+/* static */
+int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
+{
+    return -1;
+}
+
+int VideoRenderNSOpenGL::Init()
+{
+
+    LockAGLCntx();
+    if (!_screenUpdateThread)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    _screenUpdateThread->Start(_threadID);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
+
+    if (CreateMixingContext() == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (HasChannel(channel))
+    {
+        return NULL;
+    }
+
+    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
+    {
+
+    }
+
+    VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
+    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    {
+        if (newAGLChannel)
+        {
+            delete newAGLChannel;
+            newAGLChannel = NULL;
+        }
+
+        return NULL;
+    }
+
+    _nsglChannels[channel] = newAGLChannel;
+    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
+
+    return newAGLChannel;
+}
+
+int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* channel = it->second;
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
+        delete channel;
+        it++;
+    }
+    _nsglChannels.clear();
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::DeleteNSGLChannel(const WebRtc_UWord32 channel)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it;
+    it = _nsglChannels.find(channel);
+    if (it != _nsglChannels.end())
+    {
+        delete it->second;
+        _nsglChannels.erase(it);
+    }
+    else
+    {
+        return -1;
+    }
+
+    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
+    while( zIt != _zOrderToChannel.end())
+    {
+        if (zIt->second == (int)channel)
+        {
+            _zOrderToChannel.erase(zIt);
+            break;
+        }
+        zIt++;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::GetChannelProperties(const WebRtc_UWord16 streamId,
+        WebRtc_UWord32& zOrder,
+        float& left,
+        float& top,
+        float& right,
+        float& bottom)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    bool channelFound = false;
+
+    // Loop through all channels until we find a match.
+    // From that, get zorder.
+    // From that, get T, L, R, B
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        if(streamId == rIt->second)
+        {
+            channelFound = true;
+
+            zOrder = rIt->second;
+
+            std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
+            VideoChannelNSOpenGL* tempChannel = rIt->second;
+
+            if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
+            {
+                return -1;
+            }
+            break;
+        }
+    }
+
+    if(false == channelFound)
+    {
+
+        return -1;
+    }
+
+    return 0;
+}
+
+int VideoRenderNSOpenGL::StopThread()
+{
+
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Stopping thread ", __FUNCTION__, _screenUpdateThread);
+    _screenUpdateThread = NULL;
+
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+
+    delete _screenUpdateEvent;
+    _screenUpdateEvent = NULL;
+
+    return 0;
+}
+
+bool VideoRenderNSOpenGL::IsFullScreen()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+    return _fullScreen;
+}
+
+bool VideoRenderNSOpenGL::HasChannels()
+{
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (_nsglChannels.begin() != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderNSOpenGL::HasChannel(int channel)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+
+    while (it != _nsglChannels.end())
+    {
+        channelList.push_back(it->first);
+        it++;
+    }
+
+    return 0;
+}
+
+VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
+
+    if (it != _nsglChannels.end())
+    {
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                    __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+            return NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
+                __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
+
+        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
+        while(it != _zOrderToChannel.end())
+        {
+            if (it->second == channel)
+            {
+                if (it->first != zOrder)
+                {
+                    _zOrderToChannel.erase(it);
+                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
+                }
+                break;
+            }
+            it++;
+        }
+        return aglChannel;
+    }
+
+    return NULL;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+
+bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderNSOpenGL::ScreenUpdateProcess()
+{
+
+    _screenUpdateEvent->Wait(10);
+    LockAGLCntx();
+
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
+        UnlockAGLCntx();
+        return false;
+    }
+
+    [_nsglContext makeCurrentContext];
+
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return true;
+    }
+
+    if (_windowWidth != (_windowRect.right - _windowRect.left)
+            || _windowHeight != (_windowRect.bottom - _windowRect.top))
+    {
+        _windowWidth = _windowRect.right - _windowRect.left;
+        _windowHeight = _windowRect.bottom - _windowRect.top;
+        glViewport(0, 0, _windowWidth, _windowHeight);
+    }
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
+    while (it != _nsglChannels.end())
+    {
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+        aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
+        aglChannel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+
+    if (updated)
+    {
+
+        // At least on buffers is updated, we need to repaint the texture
+        if (RenderOffScreenBuffers() != -1)
+        {
+            UnlockAGLCntx();
+            return true;
+        }
+    }
+    //    }
+    UnlockAGLCntx();
+    return true;
+}
+
+/*
+ *
+ *    Functions for creating mixing buffers and screen settings
+ *
+ */
+
+int VideoRenderNSOpenGL::CreateMixingContext()
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if(_fullScreen)
+    {
+        if(-1 == setRenderTargetFullScreen())
+        {
+            return -1;
+        }
+    }
+    else
+    {
+
+        if(-1 == setRenderTargetWindow())
+        {
+            return -1;
+        }
+    }
+
+    configureNSOpenGLEngine();
+
+    DisplayBuffers();
+
+    GLenum glErr = glGetError();
+    if (glErr)
+    {
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    Rendering functions
+ *
+ */
+
+int VideoRenderNSOpenGL::RenderOffScreenBuffers()
+{
+    LockAGLCntx();
+
+    // Get the current window size, it might have changed since last render.
+    if (GetWindowRect(_windowRect) == -1)
+    {
+        UnlockAGLCntx();
+        return -1;
+    }
+
+    [_nsglContext makeCurrentContext];
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    // Loop through all channels starting highest zOrder ending with lowest.
+    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
+            rIt != _zOrderToChannel.rend();
+            rIt++)
+    {
+        int channelId = rIt->second;
+        std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
+
+        VideoChannelNSOpenGL* aglChannel = it->second;
+
+        aglChannel->RenderOffScreenBuffer();
+    }
+
+    DisplayBuffers();
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+/*
+ *
+ * Help functions
+ *
+ * All help functions assumes external protections
+ *
+ */
+
+int VideoRenderNSOpenGL::DisplayBuffers()
+{
+
+    LockAGLCntx();
+
+    glFinish();
+    [_nsglContext flushBuffer];
+
+    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
+
+    UnlockAGLCntx();
+    return 0;
+}
+
+int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+
+    if (_windowRef)
+    {
+        if(_fullScreen)
+        {
+            NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
+            rect.bottom = 0;
+            rect.left = 0;
+            rect.right = mainDisplayRect.size.width;
+            rect.top = mainDisplayRect.size.height;
+        }
+        else
+        {
+            rect.top = [_windowRef frame].origin.y;
+            rect.left = [_windowRef frame].origin.x;
+            rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
+            rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
+        }
+
+        return 0;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::ChangeUniqueID(WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(&_nsglContextCritSec);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderNSOpenGL::SetText(const WebRtc_UWord8 /*textId*/,
+        const WebRtc_UWord8* /*text*/,
+        const WebRtc_Word32 /*textLength*/,
+        const WebRtc_UWord32 /*textColorRef*/,
+        const WebRtc_UWord32 /*backgroundColorRef*/,
+        const float /*left*/,
+        const float /*top*/,
+        const float /*right*/,
+        const float /*bottom*/)
+{
+
+    return 0;
+
+}
+
+void VideoRenderNSOpenGL::LockAGLCntx()
+{
+    _nsglContextCritSec.Enter();
+}
+void VideoRenderNSOpenGL::UnlockAGLCntx()
+{
+    _nsglContextCritSec.Leave();
+}
+
+/*
+
+ bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
+ {
+ NSRect mainDisplayRect, viewRect;
+
+ // Create a screen-sized window on the display you want to take over
+ // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
+ mainDisplayRect = [[NSScreen mainScreen] frame];
+ fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
+ backing:NSBackingStoreBuffered defer:YES];
+
+ // Set the window level to be above the menu bar
+ [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
+
+ // Perform any other window configuration you desire
+ [fullScreenWindow setOpaque:YES];
+ [fullScreenWindow setHidesOnDeactivate:YES];
+
+ // Create a view with a double-buffered OpenGL context and attach it to the window
+ // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
+ viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
+ fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
+ [fullScreenWindow setContentView:fullScreenView];
+
+ // Show the window
+ [fullScreenWindow makeKeyAndOrderFront:self];
+
+ // Set the scene with the full-screen viewport and viewing transformation
+ [scene setViewportRect:viewRect];
+
+ // Assign the view's MainController to self
+ [fullScreenView setMainController:self];
+
+ if (!isAnimating) {
+ // Mark the view as needing drawing to initalize its contents
+ [fullScreenView setNeedsDisplay:YES];
+ }
+ else {
+ // Start playing the animation
+ [fullScreenView startAnimation];
+ }
+
+ }
+
+
+
+ */
+
+
+} //namespace webrtc
+
+#endif // COCOA_RENDERING
diff --git a/src/modules/video_render/main/source/video_render.gypi b/src/modules/video_render/main/source/video_render.gypi
new file mode 100644
index 0000000..ea1eefa
--- /dev/null
+++ b/src/modules/video_render/main/source/video_render.gypi
@@ -0,0 +1,219 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'video_render_module',
+      'type': '<(library)',
+      'dependencies': [
+        'webrtc_utility',
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        '.',
+        '../interface',
+        '../../../interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '../interface',
+          '../../../interface',
+        ],
+      },
+      'sources': [
+        # interfaces
+        '../interface/video_render.h',
+        '../interface/video_render_defines.h',
+
+        # headers
+        'incoming_video_stream.h',
+        'video_render_frames.h',
+        'video_render_impl.h',
+        'i_video_render.h',
+        # Android
+        'android/video_render_android_impl.h',
+        'android/video_render_android_native_opengl2.h',
+        'android/video_render_android_surface_view.h',
+        'android/video_render_opengles20.h',
+        # Linux
+        'linux/video_render_linux_impl.h',
+        'linux/video_x11_channel.h',
+        'linux/video_x11_render.h',
+        # Mac
+        'mac/cocoa_full_screen_window.h',
+        'mac/cocoa_render_view.h',
+        'mac/video_render_agl.h',
+        'mac/video_render_mac_carbon_impl.h',
+        'mac/video_render_mac_cocoa_impl.h',
+        'mac/video_render_nsopengl.h',
+        # Windows
+        'windows/i_video_render_win.h',
+        'windows/video_render_direct3d9.h',
+        'windows/video_render_directdraw.h',
+        'windows/video_render_windows_impl.h',
+        # External
+        'external/video_render_external_impl.h',
+
+        # PLATFORM INDEPENDENT SOURCE FILES
+        'incoming_video_stream.cc',
+        'video_render_frames.cc',
+        'video_render_impl.cc',
+        # PLATFORM SPECIFIC SOURCE FILES - Will be filtered below
+        # Android
+        'android/video_render_android_impl.cc',
+        'android/video_render_android_native_opengl2.cc',
+        'android/video_render_android_surface_view.cc',
+        'android/video_render_opengles20.cc',
+        # Linux
+        'linux/video_render_linux_impl.cc',
+        'linux/video_x11_channel.cc',
+        'linux/video_x11_render.cc',
+        # Mac
+        'mac/video_render_nsopengl.mm',
+        'mac/video_render_mac_cocoa_impl.mm',
+        'mac/video_render_agl.cc',
+        'mac/video_render_mac_carbon_impl.cc',
+        'mac/cocoa_render_view.mm',
+        'mac/cocoa_full_screen_window.mm',
+        # Windows
+        'windows/video_render_direct3d9.cc',
+        'windows/video_render_directdraw.cc',
+        'windows/video_render_windows_impl.cc',
+        # External
+        'external/video_render_external_impl.cc',
+      ],
+      # TODO(andrew): with the proper suffix, these files will be excluded
+      # automatically.
+      'conditions': [
+        ['include_internal_video_render==1', {
+          'defines': [
+            'WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',
+          ],
+        }],
+        ['OS!="android" or include_internal_video_render==0', {
+          'sources!': [
+            # Android
+            'android/video_render_android_impl.h',
+            'android/video_render_android_native_opengl2.h',
+            'android/video_render_android_surface_view.h',
+            'android/video_render_opengles20.h',
+            'android/video_render_android_impl.cc',
+            'android/video_render_android_native_opengl2.cc',
+            'android/video_render_android_surface_view.cc',
+            'android/video_render_opengles20.cc',
+          ],
+        }],
+        ['OS!="linux" or include_internal_video_render==0', {
+          'sources!': [
+            'linux/video_render_linux_impl.h',
+            'linux/video_x11_channel.h',
+            'linux/video_x11_render.h',
+            'linux/video_render_linux_impl.cc',
+            'linux/video_x11_channel.cc',
+            'linux/video_x11_render.cc',
+          ],
+        }],
+        ['OS!="mac" or include_internal_video_render==0', {
+          'sources!': [
+            'mac/cocoa_full_screen_window.h',
+            'mac/cocoa_render_view.h',
+            'mac/video_render_agl.h',
+            'mac/video_render_mac_carbon_impl.h',
+            'mac/video_render_mac_cocoa_impl.h',
+            'mac/video_render_nsopengl.h',
+            'mac/video_render_nsopengl.mm',
+            'mac/video_render_mac_cocoa_impl.mm',
+            'mac/video_render_agl.cc',
+            'mac/video_render_mac_carbon_impl.cc',
+            'mac/cocoa_render_view.mm',
+            'mac/cocoa_full_screen_window.mm',
+          ],
+        }],
+        ['OS=="mac"', {
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'mac',
+            ],
+          },
+        }],
+        ['OS!="win" or include_internal_video_render==0', {
+          'sources!': [
+            'windows/i_video_render_win.h',
+            'windows/video_render_direct3d9.h',
+            'windows/video_render_directdraw.h',
+            'windows/video_render_windows_impl.h',
+            'windows/video_render_direct3d9.cc',
+            'windows/video_render_directdraw.cc',
+            'windows/video_render_windows_impl.cc',
+          ],
+        }],
+      ] # conditions
+    }, # video_render_module
+  ], # targets
+
+  # Exclude the test target when building with chromium.
+  'conditions': [
+    ['include_internal_video_render==1', {
+      'defines': [
+        'WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER',
+      ],
+    }],
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'video_render_module_test',
+          'type': 'executable',
+          'dependencies': [
+            'video_render_module',
+            'webrtc_utility',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+          ],
+          'sources': [
+            '../test/testAPI/testAPI.cc',
+            '../test/testAPI/testAPI.h',
+            '../test/testAPI/testAPI_android.cc',
+            '../test/testAPI/testAPI_mac.mm',
+          ],
+          'conditions': [
+            ['OS=="mac" or OS=="linux"', {
+              'cflags': [
+                '-Wno-write-strings',
+              ],
+              'ldflags': [
+                '-lpthread -lm',
+              ],
+            }],
+            ['OS=="linux"', {
+              'libraries': [
+                '-lrt',
+                '-lXext',
+                '-lX11',
+              ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_LDFLAGS': [
+                  '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL',
+                ],
+              },
+            }],
+          ] # conditions
+        }, # video_render_module_test
+      ], # targets
+    }], # include_tests==0
+  ], # conditions
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/modules/video_render/main/source/video_render_frames.cc b/src/modules/video_render/main/source/video_render_frames.cc
new file mode 100644
index 0000000..259e4ed
--- /dev/null
+++ b/src/modules/video_render/main/source/video_render_frames.cc
@@ -0,0 +1,174 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_render/main/source/video_render_frames.h"
+
+#include <cassert>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+WebRtc_Word32 KEventMaxWaitTimeMs = 200;
+
+VideoRenderFrames::VideoRenderFrames()
+    : incoming_frames_(),
+      render_delay_ms_(10) {
+}
+
+VideoRenderFrames::~VideoRenderFrames() {
+  ReleaseAllFrames();
+}
+
+WebRtc_Word32 VideoRenderFrames::AddFrame(VideoFrame* new_frame) {
+  const WebRtc_Word64 time_now = TickTime::MillisecondTimestamp();
+
+  if (new_frame->RenderTimeMs() + KOldRenderTimestampMS < time_now) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                 "%s: too old frame.", __FUNCTION__);
+    return -1;
+  }
+  if (new_frame->RenderTimeMs() > time_now + KFutureRenderTimestampMS) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, -1,
+                 "%s: frame too long into the future.", __FUNCTION__);
+    return -1;
+  }
+
+  // Get an empty frame
+  VideoFrame* frame_to_add = NULL;
+  if (!empty_frames_.Empty()) {
+    ListItem* item = empty_frames_.First();
+    if (item) {
+      frame_to_add = static_cast<VideoFrame*>(item->GetItem());
+      empty_frames_.Erase(item);
+    }
+  }
+  if (!frame_to_add) {
+    if (empty_frames_.GetSize() + incoming_frames_.GetSize() >
+        KMaxNumberOfFrames) {
+      // Already allocated too many frames.
+      WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer,
+                   -1, "%s: too many frames, limit: %d", __FUNCTION__,
+                   KMaxNumberOfFrames);
+      return -1;
+    }
+
+    // Allocate new memory.
+    WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer, -1,
+                 "%s: allocating buffer %d", __FUNCTION__,
+                 empty_frames_.GetSize() + incoming_frames_.GetSize());
+
+    frame_to_add = new VideoFrame();
+    if (!frame_to_add) {
+      WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                   "%s: could not create new frame for", __FUNCTION__);
+      return -1;
+    }
+  }
+
+  frame_to_add->VerifyAndAllocate(new_frame->Length());
+  // TODO(mflodman) Change this!
+  // Remove const ness. Copying will be costly.
+  frame_to_add->SwapFrame(const_cast<VideoFrame&>(*new_frame));
+  incoming_frames_.PushBack(frame_to_add);
+
+  return incoming_frames_.GetSize();
+}
+
+VideoFrame* VideoRenderFrames::FrameToRender() {
+  VideoFrame* render_frame = NULL;
+  while (!incoming_frames_.Empty()) {
+    ListItem* item = incoming_frames_.First();
+    if (item) {
+      VideoFrame* oldest_frame_in_list =
+          static_cast<VideoFrame*>(item->GetItem());
+      if (oldest_frame_in_list->RenderTimeMs() <=
+          TickTime::MillisecondTimestamp() + render_delay_ms_) {
+        // This is the oldest one so far and it's OK to render.
+        if (render_frame) {
+          // This one is older than the newly found frame, remove this one.
+          render_frame->SetWidth(0);
+          render_frame->SetHeight(0);
+          render_frame->SetLength(0);
+          render_frame->SetRenderTime(0);
+          render_frame->SetTimeStamp(0);
+          empty_frames_.PushFront(render_frame);
+        }
+        render_frame = oldest_frame_in_list;
+        incoming_frames_.Erase(item);
+      } else {
+        // We can't release this one yet, we're done here.
+        break;
+      }
+    } else {
+      assert(false);
+    }
+  }
+  return render_frame;
+}
+
+WebRtc_Word32 VideoRenderFrames::ReturnFrame(VideoFrame* old_frame) {
+  old_frame->SetWidth(0);
+  old_frame->SetHeight(0);
+  old_frame->SetRenderTime(0);
+  old_frame->SetLength(0);
+  empty_frames_.PushBack(old_frame);
+  return 0;
+}
+
+WebRtc_Word32 VideoRenderFrames::ReleaseAllFrames() {
+  while (!incoming_frames_.Empty()) {
+    ListItem* item = incoming_frames_.First();
+    if (item) {
+      VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
+      assert(frame != NULL);
+      frame->Free();
+      delete frame;
+    }
+    incoming_frames_.Erase(item);
+  }
+  while (!empty_frames_.Empty()) {
+    ListItem* item = empty_frames_.First();
+    if (item) {
+      VideoFrame* frame = static_cast<VideoFrame*>(item->GetItem());
+      assert(frame != NULL);
+      frame->Free();
+      delete frame;
+    }
+    empty_frames_.Erase(item);
+  }
+  return 0;
+}
+
+WebRtc_UWord32 VideoRenderFrames::TimeToNextFrameRelease() {
+  WebRtc_Word64 time_to_release = 0;
+  ListItem* item = incoming_frames_.First();
+  if (item) {
+    VideoFrame* oldest_frame = static_cast<VideoFrame*>(item->GetItem());
+    time_to_release = oldest_frame->RenderTimeMs() - render_delay_ms_
+                      - TickTime::MillisecondTimestamp();
+    if (time_to_release < 0) {
+      time_to_release = 0;
+    }
+  } else {
+    time_to_release = KEventMaxWaitTimeMs;
+  }
+  return static_cast<WebRtc_UWord32>(time_to_release);
+}
+
+WebRtc_Word32 VideoRenderFrames::SetRenderDelay(
+    const WebRtc_UWord32 render_delay) {
+  render_delay_ms_ = render_delay;
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/modules/video_render/main/source/video_render_frames.h b/src/modules/video_render/main/source/video_render_frames.h
new file mode 100644
index 0000000..bdacc36
--- /dev/null
+++ b/src/modules/video_render/main/source/video_render_frames.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_  // NOLINT
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_  // NOLINT
+
+#include "modules/video_render/main/interface/video_render.h"
+#include "system_wrappers/interface/list_wrapper.h"
+
+namespace webrtc {
+
+// Class definitions
+class VideoRenderFrames {
+ public:
+  VideoRenderFrames();
+  ~VideoRenderFrames();
+
+  // Add a frame to the render queue
+  WebRtc_Word32 AddFrame(VideoFrame* new_frame);
+
+  // Get a frame for rendering, if it's time to render.
+  VideoFrame* FrameToRender();
+
+  // Return an old frame
+  WebRtc_Word32 ReturnFrame(VideoFrame* old_frame);
+
+  // Releases all frames
+  WebRtc_Word32 ReleaseAllFrames();
+
+  // Returns the number of ms to next frame to render
+  WebRtc_UWord32 TimeToNextFrameRelease();
+
+  // Sets estimates delay in renderer
+  WebRtc_Word32 SetRenderDelay(const WebRtc_UWord32 render_delay);
+
+ private:
+  // 10 seconds for 30 fps.
+  enum { KMaxNumberOfFrames = 300 };
+  // Don't render frames with timestamp older than 500ms from now.
+  enum { KOldRenderTimestampMS = 500 };
+  // Don't render frames with timestamp more than 10s into the future.
+  enum { KFutureRenderTimestampMS = 10000 };
+
+  // Sorted list with framed to be rendered, oldest first.
+  ListWrapper incoming_frames_;
+  // Empty frames.
+  ListWrapper empty_frames_;
+
+  // Estimated delay from a frame is released until it's rendered.
+  WebRtc_UWord32 render_delay_ms_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_FRAMES_H_  // NOLINT
diff --git a/src/modules/video_render/main/source/video_render_impl.cc b/src/modules/video_render/main/source/video_render_impl.cc
new file mode 100644
index 0000000..6291791
--- /dev/null
+++ b/src/modules/video_render/main/source/video_render_impl.cc
@@ -0,0 +1,971 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_impl.h"
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "video_render_defines.h"
+#include "trace.h"
+#include "incoming_video_stream.h"
+#include "i_video_render.h"
+
+#include <cassert>
+
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined (_WIN32)
+#include "windows/video_render_windows_impl.h"
+#define STANDARD_RENDERING kRenderWindows
+
+// MAC_IPHONE should go before WEBRTC_MAC_INTEL because WEBRTC_MAC_INTEL
+// gets defined if MAC_IPHONE is defined
+#elif defined(MAC_IPHONE)
+#if defined(IPHONE_GLES_RENDERING)
+#define STANDARD_RENDERING kRenderiPhone
+#include "iPhone/video_render_iphone_impl.h"
+#endif
+
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+#if defined(COCOA_RENDERING)
+#define STANDARD_RENDERING kRenderCocoa
+#include "mac/video_render_mac_cocoa_impl.h"
+#elif defined(CARBON_RENDERING)
+#define STANDARD_RENDERING kRenderCarbon
+#include "mac/video_render_mac_carbon_impl.h"
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+#include "android/video_render_android_impl.h"
+#include "android/video_render_android_surface_view.h"
+#include "android/video_render_android_native_opengl2.h"
+#define STANDARD_RENDERING	kRenderAndroid
+
+#elif defined(WEBRTC_LINUX)
+#include "linux/video_render_linux_impl.h"
+#define STANDARD_RENDERING kRenderX11
+
+#else
+//Other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+// For external rendering
+#include "external/video_render_external_impl.h"
+#ifndef STANDARD_RENDERING
+#define STANDARD_RENDERING kRenderExternal
+#endif  // STANDARD_RENDERING
+
+namespace webrtc {
+
+VideoRender*
+VideoRender::CreateVideoRender(const WebRtc_Word32 id,
+                               void* window,
+                               const bool fullscreen,
+                               const VideoRenderType videoRenderType/*=kRenderDefault*/)
+{
+    VideoRenderType resultVideoRenderType = videoRenderType;
+    if (videoRenderType == kRenderDefault)
+    {
+        resultVideoRenderType = STANDARD_RENDERING;
+    }
+    return new ModuleVideoRenderImpl(id, resultVideoRenderType, window,
+                                     fullscreen);
+}
+
+void VideoRender::DestroyVideoRender(
+                                                         VideoRender* module)
+{
+    if (module)
+    {
+        delete module;
+    }
+}
+
+ModuleVideoRenderImpl::ModuleVideoRenderImpl(
+                                             const WebRtc_Word32 id,
+                                             const VideoRenderType videoRenderType,
+                                             void* window,
+                                             const bool fullscreen) :
+    _id(id), _moduleCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _ptrWindow(window), _fullScreen(fullscreen), _ptrRenderer(NULL),
+    _streamRenderMap(*(new MapWrapper()))
+{
+
+    // Create platform specific renderer
+    switch (videoRenderType)
+    {
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(_WIN32)
+        case kRenderWindows:
+        {
+            VideoRenderWindowsImpl* ptrRenderer;
+            ptrRenderer = new VideoRenderWindowsImpl(_id, videoRenderType, window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(MAC_IPHONE)
+        case kRenderiPhone:
+        {
+            VideoRenderIPhoneImpl* ptrRenderer = new VideoRenderIPhoneImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+
+#if defined(COCOA_RENDERING)
+        case kRenderCocoa:
+        {
+            VideoRenderMacCocoaImpl* ptrRenderer = new VideoRenderMacCocoaImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+
+        break;
+#elif defined(CARBON_RENDERING)
+        case kRenderCarbon:
+        {
+            VideoRenderMacCarbonImpl* ptrRenderer = new VideoRenderMacCarbonImpl(_id, videoRenderType, window, _fullScreen);
+            if(ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+            }
+        }
+        break;
+#endif
+
+#elif defined(WEBRTC_ANDROID)
+        case kRenderAndroid:
+        {
+            if(AndroidNativeOpenGl2Renderer::UseOpenGL2(window))
+            {
+                AndroidNativeOpenGl2Renderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidNativeOpenGl2Renderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+            else
+            {
+                AndroidSurfaceViewRenderer* ptrRenderer = NULL;
+                ptrRenderer = new AndroidSurfaceViewRenderer(_id, videoRenderType, window, _fullScreen);
+                if (ptrRenderer)
+                {
+                    _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+                }
+            }
+
+        }
+        break;
+#elif defined(WEBRTC_LINUX)
+        case kRenderX11:
+        {
+            VideoRenderLinuxImpl* ptrRenderer = NULL;
+            ptrRenderer = new VideoRenderLinuxImpl(_id, videoRenderType, window, _fullScreen);
+            if ( ptrRenderer )
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+        break;
+
+#else
+        // Other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+        case kRenderExternal:
+        {
+            VideoRenderExternalImpl* ptrRenderer(NULL);
+            ptrRenderer = new VideoRenderExternalImpl(_id, videoRenderType,
+                                                      window, _fullScreen);
+            if (ptrRenderer)
+            {
+                _ptrRenderer = reinterpret_cast<IVideoRender*> (ptrRenderer);
+            }
+        }
+            break;
+        default:
+            // Error...
+            break;
+    }
+    if (_ptrRenderer)
+    {
+        if (_ptrRenderer->Init() == -1)
+        {
+        }
+    }
+}
+
+ModuleVideoRenderImpl::~ModuleVideoRenderImpl()
+{
+    delete &_moduleCrit;
+
+    while (_streamRenderMap.Size() > 0)
+    {
+        MapItem* item = _streamRenderMap.First();
+        IncomingVideoStream* ptrIncomingStream =
+                static_cast<IncomingVideoStream*> (item->GetItem());
+        assert(ptrIncomingStream != NULL);
+        delete ptrIncomingStream;
+        _streamRenderMap.Erase(item);
+    }
+    delete &_streamRenderMap;
+
+    // Delete platform specific renderer
+    if (_ptrRenderer)
+    {
+        VideoRenderType videoRenderType = _ptrRenderer->RenderType();
+        switch (videoRenderType)
+        {
+            case kRenderExternal:
+            {
+                VideoRenderExternalImpl
+                        * ptrRenderer =
+                                reinterpret_cast<VideoRenderExternalImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(_WIN32)
+            case kRenderWindows:
+            {
+                VideoRenderWindowsImpl* ptrRenderer = reinterpret_cast<VideoRenderWindowsImpl*>(_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(WEBRTC_MAC) || defined(WEBRTC_MAC_INTEL)
+
+#if defined(COCOA_RENDERING)
+            case kRenderCocoa:
+            {
+                VideoRenderMacCocoaImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCocoaImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#elif defined(CARBON_RENDERING)
+            case kRenderCarbon:
+            {
+                VideoRenderMacCarbonImpl* ptrRenderer = reinterpret_cast<VideoRenderMacCarbonImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#endif
+
+#elif defined(MAC_IPHONE)
+            case kRenderiPhone:
+            break;
+
+#elif defined(WEBRTC_ANDROID)
+            case kRenderAndroid:
+            {
+                VideoRenderAndroid* ptrRenderer = reinterpret_cast<VideoRenderAndroid*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+
+#elif defined(WEBRTC_LINUX)
+            case kRenderX11:
+            {
+                VideoRenderLinuxImpl* ptrRenderer = reinterpret_cast<VideoRenderLinuxImpl*> (_ptrRenderer);
+                _ptrRenderer = NULL;
+                delete ptrRenderer;
+            }
+            break;
+#else
+            //other platforms
+#endif
+
+#endif  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+            default:
+                // Error...
+                break;
+        }
+    }
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    _id = id;
+
+    if (_ptrRenderer)
+    {
+        _ptrRenderer->ChangeUniqueId(_id);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::TimeUntilNextProcess()
+{
+    // Not used
+    return 50;
+}
+WebRtc_Word32 ModuleVideoRenderImpl::Process()
+{
+    // Not used
+    return 0;
+}
+
+void*
+ModuleVideoRenderImpl::Window()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _ptrWindow;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ChangeWindow(void* window)
+{
+
+    CriticalSectionScoped cs(&_moduleCrit);
+
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+
+#if defined(MAC_IPHONE) // MAC_IPHONE must go before WEBRTC_MAC or WEBRTC_MAC_INTEL
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+    VideoRenderIPhoneImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderIPhoneImpl(_id, kRenderiPhone, window, _fullScreen);
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+
+#elif defined(WEBRTC_MAC) | defined(WEBRTC_MAC_INTEL)
+
+    _ptrRenderer = NULL;
+    delete _ptrRenderer;
+
+#if defined(COCOA_RENDERING)
+    VideoRenderMacCocoaImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCocoaImpl(_id, kRenderCocoa, window, _fullScreen);
+#elif defined(CARBON_RENDERING)
+    VideoRenderMacCarbonImpl* ptrRenderer;
+    ptrRenderer = new VideoRenderMacCarbonImpl(_id, kRenderCarbon, window, _fullScreen);
+#endif
+    if (!ptrRenderer)
+    {
+        return -1;
+    }
+    _ptrRenderer = reinterpret_cast<IVideoRender*>(ptrRenderer);
+    return _ptrRenderer->ChangeWindow(window);
+
+#else
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->ChangeWindow(window);
+
+#endif
+
+#else  // WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    return -1;
+#endif
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::Id()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+    return _id;
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::GetIncomingFrameRate(
+                                                           const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (mapItem == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(mapItem);
+        return 0;
+    }
+    return incomingStream->IncomingRate();
+}
+
+VideoRenderCallback*
+ModuleVideoRenderImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                               const WebRtc_UWord32 zOrder,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return NULL;
+    }
+
+    if (_streamRenderMap.Find(streamId) != NULL)
+    {
+        // The stream already exists...
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream already exists", __FUNCTION__);
+        return NULL;
+    }
+
+    VideoRenderCallback* ptrRenderCallback =
+            _ptrRenderer->AddIncomingRenderStream(streamId, zOrder, left, top,
+                                                  right, bottom);
+    if (ptrRenderCallback == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream in renderer",
+                     __FUNCTION__);
+        return NULL;
+    }
+
+    // Create platform independant code
+    IncomingVideoStream* ptrIncomingStream = new IncomingVideoStream(_id,
+                                                                     streamId);
+    if (ptrIncomingStream == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't create incoming stream", __FUNCTION__);
+        return NULL;
+    }
+
+
+    if (ptrIncomingStream->SetRenderCallback(ptrRenderCallback) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: Can't set render callback", __FUNCTION__);
+        delete ptrIncomingStream;
+        _ptrRenderer->DeleteIncomingRenderStream(streamId);
+        return NULL;
+    }
+
+    VideoRenderCallback* moduleCallback =
+            ptrIncomingStream->ModuleCallback();
+
+    // Store the stream
+    _streamRenderMap.Insert(streamId, ptrIncomingStream);
+
+    return moduleCallback;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::DeleteIncomingRenderStream(
+                                                                const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (!mapItem)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStream* ptrIncomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    delete ptrIncomingStream;
+    ptrIncomingStream = NULL;
+    _ptrRenderer->DeleteIncomingRenderStream(streamId);
+    _streamRenderMap.Erase(mapItem);
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::AddExternalRenderCallback(
+                                                               const WebRtc_UWord32 streamId,
+                                                               VideoRenderCallback* renderObject)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    MapItem* mapItem = _streamRenderMap.Find(streamId);
+    if (!mapItem)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+
+    IncomingVideoStream* ptrIncomingStream =
+            static_cast<IncomingVideoStream*> (mapItem->GetItem());
+    if (!ptrIncomingStream) {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: could not get stream", __FUNCTION__);
+        return -1;
+    }
+    return ptrIncomingStream->SetExternalCallback(renderObject);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetIncomingRenderStreamProperties(
+                                                                       const WebRtc_UWord32 streamId,
+                                                                       WebRtc_UWord32& zOrder,
+                                                                       float& left,
+                                                                       float& top,
+                                                                       float& right,
+                                                                       float& bottom) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    return _ptrRenderer->GetIncomingRenderStreamProperties(streamId, zOrder,
+                                                           left, top, right,
+                                                           bottom);
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::GetNumIncomingRenderStreams() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    return (WebRtc_UWord32) _streamRenderMap.Size();
+}
+
+bool ModuleVideoRenderImpl::HasIncomingRenderStream(
+                                                    const WebRtc_UWord32 streamId) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    bool hasStream = false;
+    if (_streamRenderMap.Find(streamId) != NULL)
+    {
+        hasStream = true;
+    }
+    return hasStream;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::RegisterRawFrameCallback(
+                                                              const WebRtc_UWord32 streamId,
+                                                              VideoRenderCallback* callbackObj)
+{
+    return -1;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::StartRender(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    // Start the stream
+    MapItem* item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        return -1;
+    }
+
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream->Start() == -1)
+    {
+        return -1;
+    }
+
+    // Start the HW renderer
+    if (_ptrRenderer->StartRender() == -1)
+    {
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::StopRender(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s(%d): No renderer", __FUNCTION__, streamId);
+        return -1;
+    }
+
+    // Stop the incoming stream
+    MapItem* item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        return -1;
+    }
+
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream->Stop() == -1)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ResetRender()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    WebRtc_Word32 error = 0;
+
+    // Loop through all incoming streams and stop them
+    MapItem* item = _streamRenderMap.First();
+    while (item)
+    {
+        IncomingVideoStream* incomingStream =
+                static_cast<IncomingVideoStream*> (item->GetItem());
+        if (incomingStream->Reset() == -1)
+        {
+            error = -1;
+        }
+        item = _streamRenderMap.Next(item);
+    }
+    return error;
+}
+
+RawVideoType ModuleVideoRenderImpl::PreferredVideoType() const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (_ptrRenderer == NULL)
+    {
+        return kVideoI420;
+    }
+
+    return _ptrRenderer->PerferedVideoType();
+}
+
+bool ModuleVideoRenderImpl::IsFullScreen()
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->FullScreen();
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetScreenResolution(
+                                                         WebRtc_UWord32& screenWidth,
+                                                         WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->GetScreenResolution(screenWidth, screenHeight);
+}
+
+WebRtc_UWord32 ModuleVideoRenderImpl::RenderFrameRate(
+                                                      const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->RenderFrameRate(streamId);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetStreamCropping(
+                                                       const WebRtc_UWord32 streamId,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetStreamCropping(streamId, left, top, right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetTransparentBackground(const bool enable)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->SetTransparentBackground(enable);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::FullScreenRender(void* window,
+                                                      const bool enable)
+{
+    return -1;
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetText(
+                                             const WebRtc_UWord8 textId,
+                                             const WebRtc_UWord8* text,
+                                             const WebRtc_Word32 textLength,
+                                             const WebRtc_UWord32 textColorRef,
+                                             const WebRtc_UWord32 backgroundColorRef,
+                                             const float left, const float top,
+                                             const float right,
+                                             const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetText(textId, text, textLength, textColorRef,
+                                 backgroundColorRef, left, top, right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetBitmap(const void* bitMap,
+                                               const WebRtc_UWord8 pictureId,
+                                               const void* colorKey,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+    return _ptrRenderer->SetBitmap(bitMap, pictureId, colorKey, left, top,
+                                   right, bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::GetLastRenderedFrame(
+                                                          const WebRtc_UWord32 streamId,
+                                                          VideoFrame &frame) const
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->GetLastRenderedFrame(frame);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::ConfigureRenderer(
+                                                       const WebRtc_UWord32 streamId,
+                                                       const unsigned int zOrder,
+                                                       const float left,
+                                                       const float top,
+                                                       const float right,
+                                                       const float bottom)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return false;
+    }
+    return _ptrRenderer->ConfigureRenderer(streamId, zOrder, left, top, right,
+                                           bottom);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetStartImage(
+                                                   const WebRtc_UWord32 streamId,
+                                                   const VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->SetStartImage(videoFrame);
+
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::SetTimeoutImage(
+                                                     const WebRtc_UWord32 streamId,
+                                                     const VideoFrame& videoFrame,
+                                                     const WebRtc_UWord32 timeout)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(streamId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return -1;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+    return incomingStream->SetTimeoutImage(videoFrame, timeout);
+}
+
+WebRtc_Word32 ModuleVideoRenderImpl::MirrorRenderStream(const int renderId,
+                                                        const bool enable,
+                                                        const bool mirrorXAxis,
+                                                        const bool mirrorYAxis)
+{
+    CriticalSectionScoped cs(&_moduleCrit);
+
+    if (!_ptrRenderer)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: No renderer", __FUNCTION__);
+        return -1;
+    }
+
+    MapItem *item = _streamRenderMap.Find(renderId);
+    if (item == NULL)
+    {
+        // This stream doesn't exist
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
+                     "%s: stream doesn't exist", __FUNCTION__);
+        return 0;
+    }
+    IncomingVideoStream* incomingStream =
+            static_cast<IncomingVideoStream*> (item->GetItem());
+    if (incomingStream == NULL)
+    {
+        // This should never happen
+        assert(false);
+        _streamRenderMap.Erase(item);
+        return 0;
+    }
+
+    return incomingStream->EnableMirroring(enable, mirrorXAxis, mirrorYAxis);
+}
+
+} //namespace webrtc
diff --git a/src/modules/video_render/main/source/video_render_impl.h b/src/modules/video_render/main/source/video_render_impl.h
new file mode 100644
index 0000000..ac50092
--- /dev/null
+++ b/src/modules/video_render/main/source/video_render_impl.h
@@ -0,0 +1,228 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
+
+#include "engine_configurations.h"
+#include "video_render.h"
+#include "map_wrapper.h"
+
+//#include "video_render_defines.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class IncomingVideoStream;
+class IVideoRender;
+class MapWrapper;
+
+// Class definitions
+class ModuleVideoRenderImpl: public VideoRender
+{
+public:
+    /*
+     *   VideoRenderer constructor/destructor
+     */
+    ModuleVideoRenderImpl(const WebRtc_Word32 id,
+                          const VideoRenderType videoRenderType,
+                          void* window, const bool fullscreen);
+
+    virtual ~ModuleVideoRenderImpl();
+
+    /*
+     *   Change the unique identifier of this object
+     */
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 TimeUntilNextProcess();
+    virtual WebRtc_Word32 Process();
+
+    /*
+     *   Returns the render window
+     */
+    virtual void* Window();
+
+    /*
+     *   Change render window
+     */
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /*
+     *   Returns module id
+     */
+    WebRtc_Word32 Id();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    /*
+     *   Add incoming render stream
+     */
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+    /*
+     *   Delete incoming render stream
+     */
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Add incoming render callback, used for external rendering
+     */
+    virtual WebRtc_Word32
+            AddExternalRenderCallback(const WebRtc_UWord32 streamId,
+                                      VideoRenderCallback* renderObject);
+
+    /*
+     *   Get the porperties for an incoming render stream
+     */
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+    /*
+     *   Incoming frame rate for the specified stream.
+     */
+    virtual WebRtc_UWord32 GetIncomingFrameRate(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Returns the number of incoming streams added to this render module
+     */
+    virtual WebRtc_UWord32 GetNumIncomingRenderStreams() const;
+
+    /*
+     *   Returns true if this render module has the streamId added, false otherwise.
+     */
+    virtual bool HasIncomingRenderStream(const WebRtc_UWord32 streamId) const;
+
+    /*
+     *
+     */
+    virtual WebRtc_Word32
+            RegisterRawFrameCallback(const WebRtc_UWord32 streamId,
+                                     VideoRenderCallback* callbackObj);
+
+    virtual WebRtc_Word32 GetLastRenderedFrame(const WebRtc_UWord32 streamId,
+                                               VideoFrame &frame) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    /*
+     *   Starts rendering the specified stream
+     */
+    virtual WebRtc_Word32 StartRender(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Stops the renderer
+     */
+    virtual WebRtc_Word32 StopRender(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Sets the renderer in start state, no streams removed.
+     */
+    virtual WebRtc_Word32 ResetRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    /*
+     *   Returns the prefered render video type
+     */
+    virtual RawVideoType PreferredVideoType() const;
+
+    /*
+     *   Returns true if the renderer is in fullscreen mode, otherwise false.
+     */
+    virtual bool IsFullScreen();
+
+    /*
+     *   Gets screen resolution in pixels
+     */
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    /*
+     *   Get the actual render rate for this stream. I.e rendered frame rate,
+     *   not frames delivered to the renderer.
+     */
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    /*
+     *   Set cropping of incoming stream
+     */
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 FullScreenRender(void* window, const bool enable);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetStartImage(const WebRtc_UWord32 streamId,
+                                        const VideoFrame& videoFrame);
+
+    virtual WebRtc_Word32 SetTimeoutImage(const WebRtc_UWord32 streamId,
+                                          const VideoFrame& videoFrame,
+                                          const WebRtc_UWord32 timeout);
+
+    virtual WebRtc_Word32 MirrorRenderStream(const int renderId,
+                                             const bool enable,
+                                             const bool mirrorXAxis,
+                                             const bool mirrorYAxis);
+
+private:
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _moduleCrit;
+    void* _ptrWindow;
+    bool _fullScreen;
+
+    IVideoRender* _ptrRenderer;
+    MapWrapper& _streamRenderMap;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_VIDEO_RENDER_IMPL_H_
diff --git a/src/modules/video_render/main/source/windows/i_video_render_win.h b/src/modules/video_render/main/source/windows/i_video_render_win.h
new file mode 100644
index 0000000..a765134
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/i_video_render_win.h
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
+
+#include "video_render.h"
+
+namespace webrtc {
+
+// Class definitions
+class IVideoRenderWin
+{
+public:
+    /**************************************************************************
+     *
+     *   Constructor/destructor
+     *
+     ***************************************************************************/
+    virtual ~IVideoRenderWin()
+    {
+    };
+
+    virtual WebRtc_Word32 Init() = 0;
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId) = 0;
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left,
+                                            float& top,
+                                            float& right,
+                                            float& bottom) = 0;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender() = 0;
+
+    virtual WebRtc_Word32 StopRender() = 0;
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen() = 0;
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom) = 0;
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable) = 0;
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom) = 0;
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom) = 0;
+
+    virtual WebRtc_Word32 ChangeWindow(void* window) = 0;
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory) = 0;
+
+};
+
+} //namespace webrtc
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_I_VIDEO_RENDER_WIN_H_
diff --git a/src/modules/video_render/main/source/windows/video_render_direct3d9.cc b/src/modules/video_render/main/source/windows/video_render_direct3d9.cc
new file mode 100644
index 0000000..309ccb3
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_direct3d9.cc
@@ -0,0 +1,1191 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Own include file
+#include "video_render_direct3d9.h"
+
+// System include files
+#include <windows.h>
+
+// WebRtc include files
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "thread_wrapper.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+
+// A structure for our custom vertex type
+struct CUSTOMVERTEX
+{
+    FLOAT x, y, z;
+    DWORD color; // The vertex color
+    FLOAT u, v;
+};
+
+// Our custom FVF, which describes our custom vertex structure
+#define D3DFVF_CUSTOMVERTEX (D3DFVF_XYZ|D3DFVF_DIFFUSE|D3DFVF_TEX1)
+
+/*
+ *
+ *    D3D9Channel
+ *
+ */
+D3D9Channel::D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                                 CriticalSectionWrapper* critSect,
+                                 Trace* trace) :
+    _width(0),
+    _height(0),
+    _pd3dDevice(pd3DDevice),
+    _pTexture(NULL),
+    _bufferIsUpdated(false),
+    _critSect(critSect),
+    _streamId(0),
+    _zOrder(0),
+    _startWidth(0),
+    _startHeight(0),
+    _stopWidth(0),
+    _stopHeight(0)
+{
+
+}
+
+D3D9Channel::~D3D9Channel()
+{
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+}
+
+void D3D9Channel::SetStreamSettings(WebRtc_UWord16 streamId,
+                                        WebRtc_UWord32 zOrder,
+                                        float startWidth,
+                                        float startHeight,
+                                        float stopWidth,
+                                        float stopHeight)
+{
+    _streamId = streamId;
+    _zOrder = zOrder;
+    _startWidth = startWidth;
+    _startHeight = startHeight;
+    _stopWidth = stopWidth;
+    _stopHeight = stopHeight;
+}
+
+int D3D9Channel::GetStreamSettings(WebRtc_UWord16 streamId,
+                                       WebRtc_UWord32& zOrder,
+                                       float& startWidth,
+                                       float& startHeight,
+                                       float& stopWidth,
+                                       float& stopHeight)
+{
+    streamId = _streamId;
+    zOrder = _zOrder;
+    startWidth = _startWidth;
+    startHeight = _startHeight;
+    stopWidth = _stopWidth;
+    stopHeight = _stopHeight;
+    return 0;
+}
+
+int D3D9Channel::GetTextureWidth()
+{
+    return _width;
+}
+
+int D3D9Channel::GetTextureHeight()
+{
+    return _height;
+}
+
+// Called from video engine when a the frame size changed
+int D3D9Channel::FrameSizeChange(int width, int height, int numberOfStreams)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "FrameSizeChange, wifth: %d, height: %d, streams: %d", width,
+                 height, numberOfStreams);
+
+    CriticalSectionScoped cs(_critSect);
+    _width = width;
+    _height = height;
+
+    //clean the previous texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret = E_POINTER;
+
+    if (_pd3dDevice)
+      ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                       D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 D3D9Channel::RenderFrame(const WebRtc_UWord32 streamId,
+                                           VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_width != videoFrame.Width() || _height != videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+// Called from video engine when a new frame should be rendered.
+int D3D9Channel::DeliverFrame(unsigned char* buffer,
+                                  int bufferSize,
+                                  unsigned int timeStamp90kHz)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "DeliverFrame to D3D9Channel");
+
+    CriticalSectionScoped cs(_critSect);
+
+    //FIXME if _bufferIsUpdated is still true (not be renderred), do we what to update the texture?)
+    //probably not
+    if (_bufferIsUpdated)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                     "Last frame hasn't been rendered yet. Drop this frame.");
+        return -1;
+    }
+
+    if (!_pd3dDevice)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "D3D for rendering not initialized.");
+        return -1;
+    }
+
+    if (!_pTexture)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Texture for rendering not initialized.");
+        return -1;
+    }
+
+    D3DLOCKED_RECT lr;
+
+    if (FAILED(_pTexture->LockRect(0, &lr, NULL, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock a texture in D3D9 Channel.");
+        return -1;
+    }
+    UCHAR* pRect = (UCHAR*) lr.pBits;
+
+    ConvertFromI420(buffer, _width, kARGB, 0, _width, _height, pRect);
+
+    if (FAILED(_pTexture->UnlockRect(0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to unlock a texture in D3D9 Channel.");
+        return -1;
+    }
+
+    _bufferIsUpdated = true;
+
+    return 0;
+}
+
+// Called by d3d channel owner to indicate the frame/texture has been rendered off
+int D3D9Channel::RenderOffFrame()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+                 "Frame has been rendered to the screen.");
+    CriticalSectionScoped cs(_critSect);
+    _bufferIsUpdated = false;
+    return 0;
+}
+
+// Called by d3d channel owner to check if the texture is updated
+int D3D9Channel::IsUpdated(bool& isUpdated)
+{
+    CriticalSectionScoped cs(_critSect);
+    isUpdated = _bufferIsUpdated;
+    return 0;
+}
+
+// Called by d3d channel owner to get the texture
+LPDIRECT3DTEXTURE9 D3D9Channel::GetTexture()
+{
+    CriticalSectionScoped cs(_critSect);
+    return _pTexture;
+}
+
+int D3D9Channel::ReleaseTexture()
+{
+    CriticalSectionScoped cs(_critSect);
+
+    //release the texture
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+    _pd3dDevice = NULL;
+    return 0;
+}
+
+int D3D9Channel::RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    _pd3dDevice = pd3DDevice;
+
+    if (_pTexture != NULL)
+    {
+        _pTexture->Release();
+        _pTexture = NULL;
+    }
+
+    HRESULT ret;
+
+    ret = _pd3dDevice->CreateTexture(_width, _height, 1, 0, D3DFMT_A8R8G8B8,
+                                     D3DPOOL_MANAGED, &_pTexture, NULL);
+
+    if (FAILED(ret))
+    {
+        _pTexture = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+/*
+ *
+ *    VideoRenderDirect3D9
+ *
+ */
+VideoRenderDirect3D9::VideoRenderDirect3D9(Trace* trace,
+                                                   HWND hWnd,
+                                                   bool fullScreen) :
+    _refD3DCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _trace(trace),
+    _hWnd(hWnd),
+    _fullScreen(fullScreen),
+    _pTextureLogo(NULL),
+    _pVB(NULL),
+    _pd3dDevice(NULL),
+    _pD3D(NULL),
+    _d3dChannels(),
+    _d3dZorder(),
+    _screenUpdateThread(NULL),
+    _screenUpdateEvent(NULL),
+    _logoLeft(0),
+    _logoTop(0),
+    _logoRight(0),
+    _logoBottom(0),
+    _pd3dSurface(NULL),
+    _totalMemory(-1),
+    _availableMemory(-1)
+{
+    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc,
+                                                      this, kRealtimePriority);
+    _screenUpdateEvent = EventWrapper::Create();
+    SetRect(&_originalHwndRect, 0, 0, 0, 0);
+}
+
+VideoRenderDirect3D9::~VideoRenderDirect3D9()
+{
+    //NOTE: we should not enter CriticalSection in here!
+
+    // Signal event to exit thread, then delete it
+    ThreadWrapper* tmpPtr = _screenUpdateThread;
+    _screenUpdateThread = NULL;
+    if (tmpPtr)
+    {
+        tmpPtr->SetNotAlive();
+        _screenUpdateEvent->Set();
+        _screenUpdateEvent->StopTimer();
+
+        if (tmpPtr->Stop())
+        {
+            delete tmpPtr;
+        }
+    }
+    delete _screenUpdateEvent;
+
+    //close d3d device
+    CloseDevice();
+
+    // Delete all channels
+    std::map<int, D3D9Channel*>::iterator it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        delete it->second;
+        it = _d3dChannels.erase(it);
+    }
+    // Clean the zOrder map
+    _d3dZorder.clear();
+
+    if (_fullScreen)
+    {
+        // restore hwnd to original size and position
+        ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
+                       _originalHwndRect.top, _originalHwndRect.right
+                               - _originalHwndRect.left,
+                       _originalHwndRect.bottom - _originalHwndRect.top,
+                       SWP_FRAMECHANGED);
+        ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+        ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                | RDW_ERASE);
+    }
+
+    delete &_refD3DCritsect;
+}
+
+DWORD VideoRenderDirect3D9::GetVertexProcessingCaps()
+{
+    D3DCAPS9 caps;
+    DWORD dwVertexProcessing = D3DCREATE_SOFTWARE_VERTEXPROCESSING;
+    if (SUCCEEDED(_pD3D->GetDeviceCaps(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL,
+                                       &caps)))
+    {
+        if ((caps.DevCaps & D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+                == D3DDEVCAPS_HWTRANSFORMANDLIGHT)
+        {
+            dwVertexProcessing = D3DCREATE_HARDWARE_VERTEXPROCESSING;
+        }
+    }
+    return dwVertexProcessing;
+}
+
+int VideoRenderDirect3D9::InitializeD3D(HWND hWnd,
+                                            D3DPRESENT_PARAMETERS* pd3dpp)
+{
+    // initialize Direct3D
+    if (NULL == (_pD3D = Direct3DCreate9(D3D_SDK_VERSION)))
+    {
+        return -1;
+    }
+
+    // determine what type of vertex processing to use based on the device capabilities
+    DWORD dwVertexProcessing = GetVertexProcessingCaps();
+
+    // get the display mode
+    D3DDISPLAYMODE d3ddm;
+    _pD3D->GetAdapterDisplayMode(D3DADAPTER_DEFAULT, &d3ddm);
+    pd3dpp->BackBufferFormat = d3ddm.Format;
+
+    // create the D3D device
+    if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_HAL, hWnd,
+                                   dwVertexProcessing | D3DCREATE_MULTITHREADED
+                                           | D3DCREATE_FPU_PRESERVE, pd3dpp,
+                                   &_pd3dDevice)))
+    {
+        //try the ref device
+        if (FAILED(_pD3D->CreateDevice(D3DADAPTER_DEFAULT, D3DDEVTYPE_REF,
+                                       hWnd, dwVertexProcessing
+                                               | D3DCREATE_MULTITHREADED
+                                               | D3DCREATE_FPU_PRESERVE,
+                                       pd3dpp, &_pd3dDevice)))
+        {
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::ResetDevice()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::ResetDevice");
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    //release the channel texture
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->ReleaseTexture();
+        }
+        it++;
+    }
+
+    //close d3d device
+    if (CloseDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to CloseDevice");
+        return -1;
+    }
+
+    //reinit d3d device
+    if (InitDevice() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::ResetDevice failed to InitDevice");
+        return -1;
+    }
+
+    //recreate channel texture
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+        if (it->second)
+        {
+            it->second->RecreateTexture(_pd3dDevice);
+        }
+        it++;
+    }
+
+    return 0;
+}
+
+int VideoRenderDirect3D9::InitDevice()
+{
+    // Set up the structure used to create the D3DDevice
+    ZeroMemory(&_d3dpp, sizeof(_d3dpp));
+    _d3dpp.SwapEffect = D3DSWAPEFFECT_DISCARD;
+    _d3dpp.BackBufferFormat = D3DFMT_A8R8G8B8;
+    if (GetWindowRect(_hWnd, &_originalHwndRect) == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice Could not get window size");
+        return -1;
+    }
+    if (!_fullScreen)
+    {
+        _winWidth = _originalHwndRect.right - _originalHwndRect.left;
+        _winHeight = _originalHwndRect.bottom - _originalHwndRect.top;
+        _d3dpp.Windowed = TRUE;
+        _d3dpp.BackBufferHeight = 0;
+        _d3dpp.BackBufferWidth = 0;
+    }
+    else
+    {
+        _winWidth = (LONG) ::GetSystemMetrics(SM_CXSCREEN);
+        _winHeight = (LONG) ::GetSystemMetrics(SM_CYSCREEN);
+        _d3dpp.Windowed = FALSE;
+        _d3dpp.BackBufferWidth = _winWidth;
+        _d3dpp.BackBufferHeight = _winHeight;
+        _d3dpp.PresentationInterval = D3DPRESENT_INTERVAL_IMMEDIATE;
+    }
+
+    if (InitializeD3D(_hWnd, &_d3dpp) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "VideoRenderDirect3D9::InitDevice failed in InitializeD3D");
+        return -1;
+    }
+
+    // Turn off culling, so we see the front and back of the triangle
+    _pd3dDevice->SetRenderState(D3DRS_CULLMODE, D3DCULL_NONE);
+
+    // Turn off D3D lighting, since we are providing our own vertex colors
+    _pd3dDevice->SetRenderState(D3DRS_LIGHTING, FALSE);
+
+    // Settings for alpha blending
+    _pd3dDevice->SetRenderState(D3DRS_ALPHABLENDENABLE, TRUE);
+    _pd3dDevice->SetRenderState(D3DRS_SRCBLEND, D3DBLEND_SRCALPHA);
+    _pd3dDevice->SetRenderState(D3DRS_DESTBLEND, D3DBLEND_INVSRCALPHA);
+
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MINFILTER, D3DTEXF_LINEAR );
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MAGFILTER, D3DTEXF_LINEAR );
+    _pd3dDevice->SetSamplerState( 0, D3DSAMP_MIPFILTER, D3DTEXF_LINEAR );
+
+    // Initialize Vertices
+    CUSTOMVERTEX Vertices[] = {
+            //front
+            { -1.0f, -1.0f, 0.0f, 0xffffffff, 0, 1 }, { -1.0f, 1.0f, 0.0f,
+                    0xffffffff, 0, 0 },
+            { 1.0f, -1.0f, 0.0f, 0xffffffff, 1, 1 }, { 1.0f, 1.0f, 0.0f,
+                    0xffffffff, 1, 0 } };
+
+    // Create the vertex buffer. 
+    if (FAILED(_pd3dDevice->CreateVertexBuffer(sizeof(Vertices), 0,
+                                               D3DFVF_CUSTOMVERTEX,
+                                               D3DPOOL_DEFAULT, &_pVB, NULL )))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to create the vertex buffer.");
+        return -1;
+    }
+
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(_pVB->Lock(0, sizeof(Vertices), (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, Vertices, sizeof(Vertices));
+    _pVB->Unlock();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::Init");
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    // Start rendering thread...
+    if (!_screenUpdateThread)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Thread not created");
+        return -1;
+    }
+    unsigned int threadId;
+    _screenUpdateThread->Start(threadId);
+
+    // Start the event triggering the render process
+    unsigned int monitorFreq = 60;
+    DEVMODE dm;
+    // initialize the DEVMODE structure
+    ZeroMemory(&dm, sizeof(dm));
+    dm.dmSize = sizeof(dm);
+    if (0 != EnumDisplaySettings(NULL, ENUM_CURRENT_SETTINGS, &dm))
+    {
+        monitorFreq = dm.dmDisplayFrequency;
+    }
+    _screenUpdateEvent->StartTimer(true, 1000 / monitorFreq);
+
+    return InitDevice();
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return -1;
+}
+
+int VideoRenderDirect3D9::UpdateRenderSurface()
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    // Check if there are any updated buffers
+    bool updated = false;
+    std::map<int, D3D9Channel*>::iterator it;
+    it = _d3dChannels.begin();
+    while (it != _d3dChannels.end())
+    {
+
+        D3D9Channel* channel = it->second;
+        channel->IsUpdated(updated);
+        if (updated)
+        {
+            break;
+        }
+        it++;
+    }
+    //nothing is updated, continue
+    if (!updated)
+        return -1;
+
+    // Clear the backbuffer to a black color
+    _pd3dDevice->Clear(0, NULL, D3DCLEAR_TARGET, D3DCOLOR_XRGB(0, 0, 0), 1.0f,
+                       0);
+
+    // Begin the scene
+    if (SUCCEEDED(_pd3dDevice->BeginScene()))
+    {
+        _pd3dDevice->SetStreamSource(0, _pVB, 0, sizeof(CUSTOMVERTEX));
+        _pd3dDevice->SetFVF(D3DFVF_CUSTOMVERTEX);
+
+        D3DXMATRIX matWorld;
+        D3DXMATRIX matWorldTemp;
+
+        //draw all the channels
+        //get texture from the channels
+        LPDIRECT3DTEXTURE9 textureFromChannel = NULL;
+        DWORD textureWidth, textureHeight;
+
+        std::multimap<int, unsigned int>::reverse_iterator it;
+        it = _d3dZorder.rbegin();
+        while (it != _d3dZorder.rend())
+        {
+            // loop through all channels and streams in Z order
+            int channel = it->second & 0x0000ffff;
+
+            std::map<int, D3D9Channel*>::iterator ddIt;
+            ddIt = _d3dChannels.find(channel);
+            if (ddIt != _d3dChannels.end())
+            {
+                // found the channel
+                D3D9Channel* channelObj = ddIt->second;
+                if (channelObj)
+                {
+                    textureFromChannel = channelObj->GetTexture();
+                    textureWidth = channelObj->GetTextureWidth();
+                    textureHeight = channelObj->GetTextureHeight();
+
+                    WebRtc_UWord32 zOrder;
+                    float startWidth, startHeight, stopWidth, stopHeight;
+                    channelObj->GetStreamSettings(0, zOrder, startWidth,
+                                                  startHeight, stopWidth,
+                                                  stopHeight);
+
+                    //draw the video stream
+                    UpdateVerticeBuffer(_pVB, 0, startWidth, startHeight,
+                                        stopWidth, stopHeight);
+                    _pd3dDevice->SetTexture(0, textureFromChannel);
+                    _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+
+                    //Notice channel that this frame as been rendered
+                    channelObj->RenderOffFrame();
+                }
+            }
+            it++;
+        }
+
+        //draw the logo
+        if (_pTextureLogo)
+        {
+            UpdateVerticeBuffer(_pVB, 0, _logoLeft, _logoTop, _logoRight,
+                                _logoBottom);
+            _pd3dDevice->SetTexture(0, _pTextureLogo);
+            _pd3dDevice->DrawPrimitive(D3DPT_TRIANGLESTRIP, 0, 2);
+        }
+
+        // End the scene
+        _pd3dDevice->EndScene();
+    }
+
+    // Present the backbuffer contents to the display
+    _pd3dDevice->Present(NULL, NULL, NULL, NULL );
+
+    return 0;
+}
+
+//set the  alpha value of the pixal with a particular colorkey as 0
+int VideoRenderDirect3D9::SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                                                  DDCOLORKEY* transparentColorKey,
+                                                  DWORD width,
+                                                  DWORD height)
+{
+    D3DLOCKED_RECT lr;
+    if (!pTexture)
+        return -1;
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+    if (SUCCEEDED(pTexture->LockRect(0, &lr, NULL, D3DLOCK_DISCARD)))
+    {
+        for (DWORD y = 0; y < height; y++)
+        {
+            DWORD dwOffset = y * width;
+
+            for (DWORD x = 0; x < width; x)
+            {
+                DWORD temp = ((DWORD*) lr.pBits)[dwOffset + x];
+                if ((temp & 0x00FFFFFF)
+                        == transparentColorKey->dwColorSpaceLowValue)
+                {
+                    temp &= 0x00FFFFFF;
+                }
+                else
+                {
+                    temp |= 0xFF000000;
+                }
+                ((DWORD*) lr.pBits)[dwOffset + x] = temp;
+                x++;
+            }
+        }
+        pTexture->UnlockRect(0);
+        return 0;
+    }
+    return -1;
+}
+
+/*
+ *
+ *    Rendering process
+ *
+ */
+bool VideoRenderDirect3D9::ScreenUpdateThreadProc(void* obj)
+{
+    return static_cast<VideoRenderDirect3D9*> (obj)->ScreenUpdateProcess();
+}
+
+bool VideoRenderDirect3D9::ScreenUpdateProcess()
+{
+    _screenUpdateEvent->Wait(100);
+
+    if (!_screenUpdateThread)
+    {
+        //stop the thread
+        return false;
+    }
+    if (!_pd3dDevice)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "d3dDevice not created.");
+        return true;
+    }
+
+    HRESULT hr = _pd3dDevice->TestCooperativeLevel();
+
+    if (SUCCEEDED(hr))
+    {
+        UpdateRenderSurface();
+    }
+
+    if (hr == D3DERR_DEVICELOST)
+    {
+        //Device is lost and cannot be reset yet
+
+    }
+    else if (hr == D3DERR_DEVICENOTRESET)
+    {
+        //Lost but we can reset it now
+        //Note: the standard way is to call Reset, however for some reason doesn't work here.
+        //so we will release the device and create it again.
+        ResetDevice();
+    }
+
+    return true;
+}
+
+int VideoRenderDirect3D9::CloseDevice()
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "VideoRenderDirect3D9::CloseDevice");
+
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+
+    if (_pVB != NULL)
+    {
+        _pVB->Release();
+        _pVB = NULL;
+    }
+
+    if (_pd3dDevice != NULL)
+    {
+        _pd3dDevice->Release();
+        _pd3dDevice = NULL;
+    }
+
+    if (_pD3D != NULL)
+    {
+        _pD3D->Release();
+        _pD3D = NULL;
+    }
+
+    if (_pd3dSurface != NULL)
+        _pd3dSurface->Release();
+    return 0;
+}
+
+D3D9Channel* VideoRenderDirect3D9::GetD3DChannel(int channel)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return NULL;
+    }
+    return ddobj;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::DeleteChannel(const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+
+    std::multimap<int, unsigned int>::iterator it;
+    it = _d3dZorder.begin();
+    while (it != _d3dZorder.end())
+    {
+        if ((streamId & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            it = _d3dZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(streamId & 0x0000ffff);
+    if (ddIt != _d3dChannels.end())
+    {
+        delete ddIt->second;
+        _d3dChannels.erase(ddIt);        
+        return 0;
+    }
+    return -1;
+}
+
+VideoRenderCallback* VideoRenderDirect3D9::CreateChannel(const WebRtc_UWord32 channel,
+                                                                 const WebRtc_UWord32 zOrder,
+                                                                 const float left,
+                                                                 const float top,
+                                                                 const float right,
+                                                                 const float bottom)
+{
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    //FIXME this should be done in VideoAPIWindows? stop the frame deliver first
+    //remove the old channel	
+    DeleteChannel(channel);
+
+    D3D9Channel* d3dChannel = new D3D9Channel(_pd3dDevice,
+                                                      &_refD3DCritsect, _trace);
+    d3dChannel->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    // store channel
+    _d3dChannels[channel & 0x0000ffff] = d3dChannel;
+
+    // store Z order
+    // default streamID is 0
+    _d3dZorder.insert(
+                      std::pair<int, unsigned int>(zOrder, channel & 0x0000ffff));
+
+    return d3dChannel;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::GetStreamSettings(const WebRtc_UWord32 channel,
+                                                          const WebRtc_UWord16 streamId,
+                                                          WebRtc_UWord32& zOrder,
+                                                          float& left,
+                                                          float& top,
+                                                          float& right,
+                                                          float& bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is 
+    return ddobj->GetStreamSettings(0, zOrder, left, top, right, bottom);
+    //return ddobj->GetStreamSettings(streamId, zOrder, left, top, right, bottom);    
+}
+
+int VideoRenderDirect3D9::UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB,
+                                                  int offset,
+                                                  float startWidth,
+                                                  float startHeight,
+                                                  float stopWidth,
+                                                  float stopHeight)
+{
+    if (pVB == NULL)
+        return -1;
+
+    float left, right, top, bottom;
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    left = startWidth * 2 - 1;
+    right = stopWidth * 2 - 1;
+
+    //0,1 => 1,-1
+    top = 1 - startHeight * 2;
+    bottom = 1 - stopHeight * 2;
+
+    CUSTOMVERTEX newVertices[] = {
+            //logo
+            { left, bottom, 0.0f, 0xffffffff, 0, 1 }, { left, top, 0.0f,
+                    0xffffffff, 0, 0 },
+            { right, bottom, 0.0f, 0xffffffff, 1, 1 }, { right, top, 0.0f,
+                    0xffffffff, 1, 0 }, };
+    // Now we fill the vertex buffer.
+    VOID* pVertices;
+    if (FAILED(pVB->Lock(sizeof(CUSTOMVERTEX) * offset, sizeof(newVertices),
+                         (void**) &pVertices, 0)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to lock the vertex buffer.");
+        return -1;
+    }
+    memcpy(pVertices, newVertices, sizeof(newVertices));
+    pVB->Unlock();
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::StartRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::StopRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+bool VideoRenderDirect3D9::IsFullScreen()
+{
+    return _fullScreen;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetCropping(const WebRtc_UWord32 channel,
+                                                    const WebRtc_UWord16 streamId,
+                                                    const float left,
+                                                    const float top,
+                                                    const float right,
+                                                    const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetTransparentBackground(
+                                                                 const bool enable)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetText(const WebRtc_UWord8 textId,
+                                                const WebRtc_UWord8* text,
+                                                const WebRtc_Word32 textLength,
+                                                const WebRtc_UWord32 colorText,
+                                                const WebRtc_UWord32 colorBg,
+                                                const float left,
+                                                const float top,
+                                                const float rigth,
+                                                const float bottom)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::SetBitmap(const void* bitMap,
+                                                  const WebRtc_UWord8 pictureId,
+                                                  const void* colorKey,
+                                                  const float left,
+                                                  const float top,
+                                                  const float right,
+                                                  const float bottom)
+{
+    if (!bitMap)
+    {
+        if (_pTextureLogo != NULL)
+        {
+            _pTextureLogo->Release();
+            _pTextureLogo = NULL;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "Remove bitmap.");
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f ||
+        top > 1.0f || top < 0.0f ||
+        right > 1.0f || right < 0.0f ||
+        bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    if ((bottom <= top) || (right <= left))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D SetBitmap invalid parameter");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(&_refD3DCritsect);
+
+    unsigned char* srcPtr;
+    HGDIOBJ oldhand;
+    BITMAPINFO pbi;
+    BITMAP bmap;
+    HDC hdcNew;
+    hdcNew = CreateCompatibleDC(0);
+    // Fill out the BITMAP structure.
+    GetObject((HBITMAP)bitMap, sizeof(bmap), &bmap);
+    //Select the bitmap handle into the new device context.
+    oldhand = SelectObject(hdcNew, (HGDIOBJ) bitMap);
+    // we are done with this object
+    DeleteObject(oldhand);
+    pbi.bmiHeader.biSize = 40;
+    pbi.bmiHeader.biWidth = bmap.bmWidth;
+    pbi.bmiHeader.biHeight = bmap.bmHeight;
+    pbi.bmiHeader.biPlanes = 1;
+    pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
+    pbi.bmiHeader.biCompression = BI_RGB;
+    pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
+    srcPtr = new unsigned char[bmap.bmWidth * bmap.bmHeight * 4];
+    // the original un-stretched image in RGB24
+    int pixelHeight = GetDIBits(hdcNew, (HBITMAP)bitMap, 0, bmap.bmHeight, srcPtr, &pbi,
+                                DIB_RGB_COLORS);
+    if (pixelHeight == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to GetDIBits in SetBitmap");
+        return -1;
+    }
+    DeleteDC(hdcNew);
+    if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D failed to SetBitmap invalid bit depth");
+        return -1;
+    }
+
+    HRESULT ret;
+    //release the previous logo texture
+    if (_pTextureLogo != NULL)
+    {
+        _pTextureLogo->Release();
+        _pTextureLogo = NULL;
+    }
+    ret = _pd3dDevice->CreateTexture(bmap.bmWidth, bmap.bmHeight, 1, 0,
+                                     D3DFMT_A8R8G8B8, D3DPOOL_MANAGED,
+                                     &_pTextureLogo, NULL);
+    if (FAILED(ret))
+    {
+        _pTextureLogo = NULL;
+        return -1;
+    }
+    if (!_pTextureLogo)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Texture for rendering not initialized.");
+        return -1;
+    }
+
+    D3DLOCKED_RECT lr;
+    if (FAILED(_pTextureLogo->LockRect(0, &lr, NULL, 0)))
+    {
+        return -1;
+    }
+    unsigned char* dstPtr = (UCHAR*) lr.pBits;
+    int pitch = bmap.bmWidth * 4;
+
+    if (pbi.bmiHeader.biBitCount == 24)
+    {       
+        ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight, 0);
+    }
+    else
+    {
+        unsigned char* srcTmp = srcPtr + (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
+        for (int i = 0; i < bmap.bmHeight; ++i)
+        {
+            memcpy(dstPtr, srcTmp, bmap.bmWidth * 4);
+            srcTmp -= bmap.bmWidth * 4;
+            dstPtr += pitch;
+        }
+    }
+
+    delete srcPtr;
+    if (FAILED(_pTextureLogo->UnlockRect(0)))
+    {
+        return -1;
+    }
+
+    if (colorKey)
+    {
+        DDCOLORKEY* ddColorKey =
+                static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
+        SetTransparentColor(_pTextureLogo, ddColorKey, bmap.bmWidth,
+                            bmap.bmHeight);
+    }
+
+    //update the vertice buffer
+    //0,1 => -1,1
+    _logoLeft = left;
+    _logoRight = right;
+
+    //0,1 => 1,-1
+    _logoTop = top;
+    _logoBottom = bottom;
+
+    return 0;
+
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                                          WebRtc_UWord64& availableMemory)
+{
+    if (_totalMemory == -1 || _availableMemory == -1)
+    {
+        totalMemory = 0;
+        availableMemory = 0;
+        return -1;
+    }
+    totalMemory = _totalMemory;
+    availableMemory = _availableMemory;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirect3D9::ConfigureRenderer(const WebRtc_UWord32 channel,
+                                                          const WebRtc_UWord16 streamId,
+                                                          const unsigned int zOrder,
+                                                          const float left,
+                                                          const float top,
+                                                          const float right,
+                                                          const float bottom)
+{
+    std::map<int, D3D9Channel*>::iterator ddIt;
+    ddIt = _d3dChannels.find(channel & 0x0000ffff);
+    D3D9Channel* ddobj = NULL;
+    if (ddIt != _d3dChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct3D render failed to find channel");
+        return -1;
+    }
+    // Only allow one stream per channel, demuxing is 
+    ddobj->SetStreamSettings(0, zOrder, left, top, right, bottom);
+
+    return 0;
+}
+
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/windows/video_render_direct3d9.h b/src/modules/video_render/main/source/windows/video_render_direct3d9.h
new file mode 100644
index 0000000..6d6fef3
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_direct3d9.h
@@ -0,0 +1,267 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
+
+// WebRtc includes
+#include "i_video_render_win.h"
+
+#include <d3d9.h>
+#include <d3dx9.h>
+#include "ddraw.h"
+
+#include <Map>
+
+// Added
+#include "video_render_defines.h"
+
+#pragma comment(lib, "d3d9.lib")       // located in DirectX SDK
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class Trace;
+class ThreadWrapper;
+
+class D3D9Channel: public VideoRenderCallback
+{
+public:
+    D3D9Channel(LPDIRECT3DDEVICE9 pd3DDevice,
+                    CriticalSectionWrapper* critSect, Trace* trace);
+
+    virtual ~D3D9Channel();
+
+    // Inherited from VideoRencerCallback, called from VideoAPI class.
+    // Called when the incomming frame size and/or number of streams in mix changes
+    virtual int FrameSizeChange(int width, int height, int numberOfStreams);
+
+    // A new frame is delivered
+    virtual int DeliverFrame(unsigned char* buffer,
+                             int bufferSize,
+                             unsigned int timeStame90kHz);
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    // Called to check if the video frame is updated.
+    int IsUpdated(bool& isUpdated);
+    // Called after the video frame has been render to the screen
+    int RenderOffFrame();
+    // Called to get the texture that contains the video frame
+    LPDIRECT3DTEXTURE9 GetTexture();
+    // Called to get the texture(video frame) size
+    int GetTextureWidth();
+    int GetTextureHeight();
+    //
+    void SetStreamSettings(WebRtc_UWord16 streamId,
+                           WebRtc_UWord32 zOrder,
+                           float startWidth,
+                           float startHeight,
+                           float stopWidth,
+                           float stopHeight);
+    int GetStreamSettings(WebRtc_UWord16 streamId,
+                          WebRtc_UWord32& zOrder,
+                          float& startWidth,
+                          float& startHeight,
+                          float& stopWidth,
+                          float& stopHeight);
+
+    int ReleaseTexture();
+    int RecreateTexture(LPDIRECT3DDEVICE9 pd3DDevice);
+
+protected:
+
+private:
+    //critical section passed from the owner
+    CriticalSectionWrapper* _critSect;
+    LPDIRECT3DDEVICE9 _pd3dDevice;
+    LPDIRECT3DTEXTURE9 _pTexture;
+
+    bool _bufferIsUpdated;
+    // the frame size
+    int _width;
+    int _height;
+    //sream settings
+    //TODO support multiple streams in one channel
+    WebRtc_UWord16 _streamId;
+    WebRtc_UWord32 _zOrder;
+    float _startWidth;
+    float _startHeight;
+    float _stopWidth;
+    float _stopHeight;
+};
+
+class VideoRenderDirect3D9: IVideoRenderWin
+{
+public:
+    VideoRenderDirect3D9(Trace* trace, HWND hWnd, bool fullScreen);
+    ~VideoRenderDirect3D9();
+
+public:
+    //IVideoRenderWin
+
+    /**************************************************************************
+     *
+     *   Init
+     *
+     ***************************************************************************/
+    virtual WebRtc_Word32 Init();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder,
+                            const float left,
+                            const float top,
+                            const float right,
+                            const float bottom);
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left,
+                                            float& top,
+                                            float& right,
+                                            float& bottom);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen();
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left,
+                                      const float top,
+                                      const float right,
+                                      const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left,
+                                  const float top,
+                                  const float rigth,
+                                  const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey,
+                                    const float left,
+                                    const float top,
+                                    const float right,
+                                    const float bottom);
+
+public:
+    // Get a channel by channel id
+    D3D9Channel* GetD3DChannel(int channel);
+    int UpdateRenderSurface();
+
+protected:
+    // The thread rendering the screen
+    static bool ScreenUpdateThreadProc(void* obj);
+    bool ScreenUpdateProcess();
+
+private:
+    // Init/close the d3d device
+    int InitDevice();
+    int CloseDevice();
+
+    // Transparent related functions
+    int SetTransparentColor(LPDIRECT3DTEXTURE9 pTexture,
+                            DDCOLORKEY* transparentColorKey,
+                            DWORD width,
+                            DWORD height);
+
+    CriticalSectionWrapper& _refD3DCritsect;
+    Trace* _trace;
+    ThreadWrapper* _screenUpdateThread;
+    EventWrapper* _screenUpdateEvent;
+
+    HWND _hWnd;
+    bool _fullScreen;
+    RECT _originalHwndRect;
+    //FIXME we probably don't need this since all the information can be get from _d3dChannels
+    int _channel;
+    //Window size
+    UINT _winWidth;
+    UINT _winHeight;
+
+    // Device
+    LPDIRECT3D9 _pD3D; // Used to create the D3DDevice
+    LPDIRECT3DDEVICE9 _pd3dDevice; // Our rendering device
+    LPDIRECT3DVERTEXBUFFER9 _pVB; // Buffer to hold Vertices
+    LPDIRECT3DTEXTURE9 _pTextureLogo;
+
+    std::map<int, D3D9Channel*> _d3dChannels;
+    std::multimap<int, unsigned int> _d3dZorder;
+
+    // The position where the logo will be placed
+    float _logoLeft;
+    float _logoTop;
+    float _logoRight;
+    float _logoBottom;
+
+    typedef HRESULT (WINAPI *DIRECT3DCREATE9EX)(UINT SDKVersion, IDirect3D9Ex**);
+    LPDIRECT3DSURFACE9 _pd3dSurface;
+
+    DWORD GetVertexProcessingCaps();
+    int InitializeD3D(HWND hWnd, D3DPRESENT_PARAMETERS* pd3dpp);
+
+    D3DPRESENT_PARAMETERS _d3dpp;
+    int ResetDevice();
+
+    int UpdateVerticeBuffer(LPDIRECT3DVERTEXBUFFER9 pVB, int offset,
+                            float startWidth, float startHeight,
+                            float stopWidth, float stopHeight);
+
+    //code for providing graphics settings
+    DWORD _totalMemory;
+    DWORD _availableMemory;
+};
+
+} //namespace webrtc
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECT3D9_H_
diff --git a/src/modules/video_render/main/source/windows/video_render_directdraw.cc b/src/modules/video_render/main/source/windows/video_render_directdraw.cc
new file mode 100644
index 0000000..53bd6a0
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_directdraw.cc
@@ -0,0 +1,3944 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_render_directdraw.h"
+#include "video_render_windows_impl.h"
+#include "Windows.h"
+#include <ddraw.h>
+#include <assert.h>
+#include <initguid.h>
+#include <MMSystem.h> // timeGetTime
+DEFINE_GUID( IID_IDirectDraw7,0x15e65ec0,0x3b9c,0x11d2,0xb9,0x2f,0x00,0x60,0x97,0x97,0xea,0x5b );
+
+#include "thread_wrapper.h"
+#include "event_wrapper.h"
+#include "trace.h"
+#include "critical_section_wrapper.h"
+//#include "VideoErrors.h"
+
+// Added
+#include "module_common_types.h"
+
+#pragma warning(disable: 4355) // 'this' : used in base member initializer list
+// picture in picture do we need overlay? answer no we can blit directly
+// conference is easy since we can blt the quadrants seperatly
+
+// To determine if the driver supports DMA, retrieve the driver capabilities by calling the IDirectDraw::GetCaps method, 
+// then look for DDBLTCAPS_READSYSMEM and/or DDBLTCAPS_WRITESYSMEM. If either of these flags is set, the device supports DMA.
+// Blt with SRCCOPY should do this can we use it?
+// investigate DDLOCK_NOSYSLOCK 
+
+namespace webrtc {
+
+#define EXTRACT_BITS_RL(the_val, bits_start, bits_len) ((the_val >> (bits_start - 1)) & ((1 << bits_len) - 1)) 
+
+WindowsThreadCpuUsage::WindowsThreadCpuUsage() :
+    _lastGetCpuUsageTime(0),
+    _lastCpuUsageTime(0),
+    _hThread(::GetCurrentThread()),
+    _cores(0),
+    _lastCpuUsage(0)
+{
+
+    DWORD_PTR pmask, smask;
+    DWORD access = PROCESS_QUERY_INFORMATION;
+    if (GetProcessAffinityMask(
+                               OpenProcess(access, false, GetCurrentProcessId()),
+                               &pmask, &smask) != 0)
+    {
+
+        for (int i = 1; i < 33; i++)
+        {
+            if (EXTRACT_BITS_RL(pmask,i,1) == 0)
+            {
+                break;
+            }
+            _cores++;
+        }
+        //sanity
+        if (_cores > 32)
+        {
+            _cores = 32;
+        }
+        if (_cores < 1)
+        {
+            _cores = 1;
+        }
+    }
+    else
+    {
+        _cores = 1;
+    }
+    GetCpuUsage();
+}
+
+//in % since last call
+int WindowsThreadCpuUsage::GetCpuUsage()
+{
+    DWORD now = timeGetTime();
+
+    _int64 newTime = 0;
+    FILETIME creationTime;
+    FILETIME exitTime;
+    _int64 kernelTime = 0;
+    _int64 userTime = 0;
+    if (GetThreadTimes(_hThread, (FILETIME*) &creationTime, &exitTime,
+                       (FILETIME*) &kernelTime, (FILETIME*) &userTime) != 0)
+    {
+        newTime = (kernelTime + userTime);
+    }
+    if (newTime == 0)
+    {
+        _lastGetCpuUsageTime = now;
+        return _lastCpuUsage;
+    }
+
+    // calculate the time difference since last call
+    const DWORD diffTime = (now - _lastGetCpuUsageTime);
+    _lastGetCpuUsageTime = now;
+
+    if (newTime < _lastCpuUsageTime)
+    {
+        _lastCpuUsageTime = newTime;
+        return _lastCpuUsage;
+    }
+    const int cpuDiff = (int) (newTime - _lastCpuUsageTime) / 10000;
+    _lastCpuUsageTime = newTime;
+
+    // calculate the CPU usage
+
+    _lastCpuUsage = (int) (float((cpuDiff * 100)) / (diffTime * _cores) + 0.5f);
+
+    if (_lastCpuUsage > 100)
+    {
+        _lastCpuUsage = 100;
+    }
+    return _lastCpuUsage;
+
+}
+
+DirectDrawStreamSettings::DirectDrawStreamSettings() :
+    _startWidth(0.0F),
+    _stopWidth(1.0F),
+    _startHeight(0.0F),
+    _stopHeight(1.0F),
+    _cropStartWidth(0.0F),
+    _cropStopWidth(1.0F),
+    _cropStartHeight(0.0F),
+    _cropStopHeight(1.0F)
+{
+}
+;
+
+DirectDrawBitmapSettings::DirectDrawBitmapSettings() :
+    _transparentBitMap(NULL),
+    _transparentBitmapLeft(0.0f),
+    _transparentBitmapRight(1.0f),
+    _transparentBitmapTop(0.0f),
+    _transparentBitmapBottom(1.0f),
+    _transparentBitmapWidth(0),
+    _transparentBitmapHeight(0),
+    _transparentBitmapColorKey(NULL),
+    _transparentBitmapSurface(NULL)
+{
+}
+;
+
+DirectDrawBitmapSettings::~DirectDrawBitmapSettings()
+{
+    if (_transparentBitmapColorKey)
+    {
+        delete _transparentBitmapColorKey;
+    }
+    if (_transparentBitmapSurface)
+    {
+        _transparentBitmapSurface->Release();
+    }
+    _transparentBitmapColorKey = NULL;
+    _transparentBitmapSurface = NULL;
+}
+;
+
+int DirectDrawBitmapSettings::SetBitmap(Trace* _trace,
+                                            DirectDraw* directDraw)
+{
+    VideoFrame tempVideoBuffer;
+    HGDIOBJ oldhand;
+    BITMAPINFO pbi;
+    BITMAP bmap;
+    HDC hdcNew;
+
+    hdcNew = CreateCompatibleDC(0);
+
+    // Fill out the BITMAP structure.
+    GetObject(_transparentBitMap, sizeof(bmap), &bmap);
+
+    //Select the bitmap handle into the new device context.
+    oldhand = SelectObject(hdcNew, (HGDIOBJ) _transparentBitMap);
+
+    // we are done with this object
+    DeleteObject(oldhand);
+
+    pbi.bmiHeader.biSize = 40;
+    pbi.bmiHeader.biWidth = bmap.bmWidth;
+    pbi.bmiHeader.biHeight = bmap.bmHeight;
+    pbi.bmiHeader.biPlanes = 1;
+    pbi.bmiHeader.biBitCount = bmap.bmBitsPixel;
+    pbi.bmiHeader.biCompression = BI_RGB;
+    pbi.bmiHeader.biSizeImage = bmap.bmWidth * bmap.bmHeight * 3;
+
+    tempVideoBuffer.VerifyAndAllocate(bmap.bmWidth * bmap.bmHeight * 4);
+
+    // the original un-stretched image in RGB24
+    // todo is there another struct for pbi purify reports read of 24 bytes larger than size
+    int pixelHeight = GetDIBits(hdcNew, _transparentBitMap, 0, bmap.bmHeight,
+                                tempVideoBuffer.Buffer(), &pbi, DIB_RGB_COLORS);
+    if (pixelHeight == 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to GetDIBits in SetBitmap.");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    DeleteDC(hdcNew);
+
+    if (pbi.bmiHeader.biBitCount != 24 && pbi.bmiHeader.biBitCount != 32)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to SetBitmap invalid bit depth");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = bmap.bmHeight;
+    ddsd.dwWidth = bmap.bmWidth;
+
+    ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+    ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+
+    _transparentBitmapWidth = bmap.bmWidth;
+    _transparentBitmapHeight = bmap.bmHeight;
+
+    ddsd.ddpfPixelFormat.dwRGBBitCount = 32;
+    ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+    ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+    ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+    ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+
+    if (_transparentBitmapSurface)
+    {
+        _transparentBitmapSurface->Release();
+        _transparentBitmapSurface = NULL;
+    }
+
+    HRESULT ddrval =
+            directDraw->CreateSurface(&ddsd, &_transparentBitmapSurface, NULL);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw failed to CreateSurface _transparentBitmapSurface: 0x%x",
+                     ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    memset(&ddsd, 0, sizeof(DDSURFACEDESC));
+    ddsd.dwSize = sizeof(DDSURFACEDESC);
+    ddrval = _transparentBitmapSurface->Lock(NULL, &ddsd, DDLOCK_WAIT, NULL);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        ddrval = _transparentBitmapSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to restore lost _transparentBitmapSurface");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored lost _transparentBitmapSurface");
+
+        ddrval
+                = _transparentBitmapSurface->Lock(NULL, &ddsd, DDLOCK_WAIT,
+                                                  NULL);
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(
+                         kTraceInfo,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw lock error 0x%x _transparentBitmapSurface",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    unsigned char* dstPtr = (unsigned char*) ddsd.lpSurface;
+    unsigned char* srcPtr = (unsigned char*) tempVideoBuffer.Buffer();
+
+    int pitch = bmap.bmWidth * 4;
+    if (ddsd.dwFlags & DDSD_PITCH)
+    {
+        pitch = ddsd.lPitch;
+    }
+
+    if (pbi.bmiHeader.biBitCount == 24)
+    {
+        ConvertRGB24ToARGB(srcPtr, dstPtr, bmap.bmWidth, bmap.bmHeight,
+                                   0);
+    }
+    else
+    {
+        srcPtr += (bmap.bmWidth * 4) * (bmap.bmHeight - 1);
+
+        for (int i = 0; i < bmap.bmHeight; ++i)
+        {
+            memcpy(dstPtr, srcPtr, bmap.bmWidth * 4);
+            srcPtr -= bmap.bmWidth * 4;
+            dstPtr += pitch;
+        }
+    }
+
+    _transparentBitmapSurface->Unlock(NULL);
+    return 0;
+}
+/**
+ *
+ *   DirectDrawTextSettings
+ *
+ */
+DirectDrawTextSettings::DirectDrawTextSettings() :
+    _ptrText(NULL),
+    _textLength(0),
+    _colorRefText(RGB(255, 255, 255)), // white
+    _colorRefBackground(RGB(0, 0, 0)), // black
+    _textLeft(0.0f),
+    _textRight(0.0f),
+    _textTop(0.0f),
+    _textBottom(0.0f),
+    _transparent(true)
+{
+}
+
+DirectDrawTextSettings::~DirectDrawTextSettings()
+{
+    if (_ptrText)
+    {
+        delete[] _ptrText;
+    }
+}
+
+int DirectDrawTextSettings::SetText(const char* text, int textLength,
+                                        COLORREF colorText, COLORREF colorBg,
+                                        float left, float top, float right,
+                                        float bottom)
+{
+    if (_ptrText)
+    {
+        delete[] _ptrText;
+    }
+    _ptrText = new char[textLength];
+    memcpy(_ptrText, text, textLength);
+    _textLength = textLength;
+    _colorRefText = colorText;
+    _colorRefBackground = colorBg;
+    //_transparent = transparent;
+    _textLeft = left;
+    _textRight = right;
+    _textTop = top;
+    _textBottom = bottom;
+    return 0;
+}
+
+/**
+ *
+ *	DirectDrawChannel
+ *
+ *
+ */
+
+// this need to have a refcount dueto multiple HWNDS demux
+DirectDrawChannel::DirectDrawChannel(DirectDraw* directDraw,
+                                             VideoType blitVideoType,
+                                             VideoType incomingVideoType,
+                                             VideoType screenVideoType,
+                                             VideoRenderDirectDraw* owner) :
+
+    _critSect(CriticalSectionWrapper::CreateCriticalSection()), _refCount(1),
+            _width(0), _height(0), _numberOfStreams(0), _doubleBuffer(false),
+            _directDraw(directDraw), _offScreenSurface(NULL),
+            _offScreenSurfaceNext(NULL), _incomingVideoType(incomingVideoType),
+            _blitVideoType(blitVideoType),
+            _originalBlitVideoType(blitVideoType),
+            _screenVideoType(screenVideoType), _deliverInScreenType(false),
+            _owner(owner)
+{
+    _directDraw->AddRef();
+}
+
+DirectDrawChannel::~DirectDrawChannel()
+{
+    if (_directDraw)
+    {
+        _directDraw->Release();
+    }
+    if (_offScreenSurface)
+    {
+        _offScreenSurface->Release();
+    }
+    if (_offScreenSurfaceNext)
+    {
+        _offScreenSurfaceNext->Release();
+    }
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.begin();
+    while (it != _streamIdToSettings.end())
+    {
+        DirectDrawStreamSettings* streamSettings = it->second;
+        if (streamSettings)
+        {
+            delete streamSettings;
+        }
+        it = _streamIdToSettings.erase(it);
+    }
+    delete _critSect;
+}
+
+void DirectDrawChannel::AddRef()
+{
+    CriticalSectionScoped cs(_critSect);
+    _refCount++;
+}
+
+void DirectDrawChannel::Release()
+{
+    bool deleteObj = false;
+    _critSect->Enter();
+    _refCount--;
+    if (_refCount == 0)
+    {
+        deleteObj = true;
+    }
+    _critSect->Leave();
+
+    if (deleteObj)
+    {
+        delete this;
+    }
+}
+
+void DirectDrawChannel::SetStreamSettings(VideoRenderDirectDraw* DDobj,
+                                              short streamId, float startWidth,
+                                              float startHeight,
+                                              float stopWidth, float stopHeight)
+{
+    // we can save 5 bits due to 16 byte alignment of the pointer
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDobj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    CriticalSectionScoped cs(_critSect);
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        streamSettings = new DirectDrawStreamSettings();
+        _streamIdToSettings[lookupID] = streamSettings;
+    }
+    else
+    {
+        streamSettings = it->second;
+    }
+
+    streamSettings->_startHeight = startHeight;
+    streamSettings->_startWidth = startWidth;
+    streamSettings->_stopWidth = stopWidth;
+    streamSettings->_stopHeight = stopHeight;
+
+    _offScreenSurfaceUpdated = false;
+}
+
+void DirectDrawChannel::SetStreamCropSettings(VideoRenderDirectDraw* DDObj,
+                                                  short streamId,
+                                                  float startWidth,
+                                                  float startHeight,
+                                                  float stopWidth,
+                                                  float stopHeight)
+{
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDObj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    CriticalSectionScoped cs(_critSect);
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        streamSettings = new DirectDrawStreamSettings();
+        _streamIdToSettings[streamId] = streamSettings;
+    }
+    else
+    {
+        streamSettings = it->second;
+    }
+    streamSettings->_cropStartWidth = startWidth;
+    streamSettings->_cropStopWidth = stopWidth;
+    streamSettings->_cropStartHeight = startHeight;
+    streamSettings->_cropStopHeight = stopHeight;
+}
+
+int DirectDrawChannel::GetStreamSettings(VideoRenderDirectDraw* DDObj,
+                                             short streamId, float& startWidth,
+                                             float& startHeight,
+                                             float& stopWidth,
+                                             float& stopHeight)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDObj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamId;
+
+    DirectDrawStreamSettings* streamSettings = NULL;
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        // Didn't find this stream...
+        return -1;
+    }
+    streamSettings = it->second;
+    startWidth = streamSettings->_startWidth;
+    startHeight = streamSettings->_startHeight;
+    stopWidth = streamSettings->_stopWidth;
+    stopHeight = streamSettings->_stopHeight;
+
+    return 0;
+}
+
+bool DirectDrawChannel::IsOffScreenSurfaceUpdated(VideoRenderDirectDraw* DDobj)
+{
+    CriticalSectionScoped cs(_critSect);
+    return _offScreenSurfaceUpdated;
+}
+
+void DirectDrawChannel::GetLargestSize(RECT* mixingRect)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (mixingRect)
+    {
+        if (mixingRect->bottom < _height)
+        {
+            mixingRect->bottom = _height;
+        }
+        if (mixingRect->right < _width)
+        {
+            mixingRect->right = _width;
+        }
+    }
+}
+
+int DirectDrawChannel::ChangeDeliverColorFormat(bool useScreenType)
+{
+    _deliverInScreenType = useScreenType;
+    return FrameSizeChange(0, 0, 0);
+}
+
+WebRtc_Word32 DirectDrawChannel::RenderFrame(const WebRtc_UWord32 streamId,
+                                                 VideoFrame& videoFrame)
+{
+    CriticalSectionScoped cs(_critSect);
+    if (_width != videoFrame.Width() || _height != videoFrame.Height())
+    {
+        if (FrameSizeChange(videoFrame.Width(), videoFrame.Height(), 1) == -1)
+        {
+            return -1;
+        }
+    }
+    return DeliverFrame(videoFrame.Buffer(), videoFrame.Length(),
+                        videoFrame.TimeStamp());
+}
+
+int DirectDrawChannel::FrameSizeChange(int width, int height,
+                                           int numberOfStreams)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (_directDraw == NULL)
+    {
+        return -1; // signal that we are not ready for the change
+    }
+    if (_width == width && _height == height && _offScreenSurface
+            && _offScreenSurfaceNext)
+    {
+        _numberOfStreams = numberOfStreams;
+        return 0;
+    }
+    if (_offScreenSurface)
+    {
+        _offScreenSurface->Release();
+        _offScreenSurface = NULL;
+    }
+    if (_offScreenSurfaceNext)
+    {
+        _offScreenSurfaceNext->Release();
+        _offScreenSurfaceNext = NULL;
+    }
+    if (width && height)
+    {
+        _width = width;
+        _height = height;
+        _numberOfStreams = numberOfStreams;
+    }
+
+    // create this channels offscreen buffer
+    DirectDrawSurfaceDesc ddsd;
+    HRESULT ddrval = DD_OK;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH | DDSD_PIXELFORMAT;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = _height;
+    ddsd.dwWidth = _width;
+    /*
+     char logStr[256];
+     _snprintf(logStr,256, "offscreen H:%d W:%d \n",_height, _width);
+     OutputDebugString(logStr);
+     */
+    //Fix for bad video driver on HP Mini. If it takes to long time to deliver a frame - try to blit using the same pixel format as used by the screen.
+    if (_deliverInScreenType && _screenVideoType != kUnknown)
+    {
+        //The HP mini netbook, which this fix for, uses the VIA processor.
+        //The measuring shows that this fix will impact systems with Intel processor, including Atom.
+        //So let's disable it here. If we really need this for VIA processor, we should have additional logic to detect
+        //the processor model.
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDrawChannel changing to screen video type");
+        //_blitVideoType=_screenVideoType;
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceInfo,
+                     kTraceVideo,
+                     -1,
+                     "DirectDrawChannel changing to originial blit video type %d",
+                     _originalBlitVideoType);
+        _blitVideoType = _originalBlitVideoType;
+    }
+
+    WEBRTC_TRACE(
+                 kTraceInfo,
+                 kTraceVideo,
+                 -1,
+                 "DirectDrawChannel::FrameSizeChange height %d, width %d, _blitVideoType %d",
+                 ddsd.dwHeight, ddsd.dwWidth, _blitVideoType);
+    switch (_blitVideoType)
+    {
+        case kYV12:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('Y', 'V', '1', '2');
+        }
+            break;
+        case kYUY2:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('Y', 'U', 'Y', '2');
+        }
+            break;
+        case kUYVY:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('U', 'Y', 'V', 'Y');
+        }
+            break;
+        case kIYUV:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('I', 'Y', 'U', 'V');
+        }
+            break;
+        case kARGB:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 32;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kRGB24:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 24;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0xff0000;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0xff00;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0xff;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kRGB565:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x0000F800;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x000007e0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x0000001F;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+        }
+            break;
+        case kARGB4444:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x00000f00;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x000000f0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x0000000f;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+            break;
+        }
+        case kARGB1555:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_RGB;
+            ddsd.ddpfPixelFormat.dwRGBBitCount = 16;
+            ddsd.ddpfPixelFormat.dwRBitMask = 0x00007C00;
+            ddsd.ddpfPixelFormat.dwGBitMask = 0x3E0;
+            ddsd.ddpfPixelFormat.dwBBitMask = 0x1F;
+            ddsd.ddpfPixelFormat.dwRGBAlphaBitMask = 0;
+            break;
+        }
+        case kI420:
+        {
+            ddsd.ddpfPixelFormat.dwSize = sizeof(DDPIXELFORMAT);
+            ddsd.ddpfPixelFormat.dwFlags = DDPF_FOURCC;
+            ddsd.ddpfPixelFormat.dwFourCC = MAKEFOURCC('I', '4', '2', '0');
+        }
+            break;
+        default:
+            ddrval = S_FALSE;
+    }
+
+    if (ddrval == DD_OK)
+    {
+        if (!_owner->IsPrimaryOrMixingSurfaceOnSystem())
+        {
+            ddrval
+                    = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                 NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceInfo,
+                             kTraceVideo,
+                             -1,
+                             "CreateSurface failed for _offScreenSurface on VideoMemory, trying on System Memory");
+
+                memset(&ddsd, 0, sizeof(ddsd));
+                ddsd.dwSize = sizeof(ddsd);
+                ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+                ddsd.dwHeight = _height;
+                ddsd.dwWidth = _width;
+
+                ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+                _blitVideoType = kARGB;
+
+                ddrval = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                    NULL);
+                if (FAILED(ddrval))
+                {
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceVideo,
+                                 -1,
+                                 "DirectDraw failed to CreateSurface _offScreenSurface using SystemMemory: 0x%x",
+                                 ddrval);
+                }
+                ddrval = _directDraw->CreateSurface(&ddsd,
+                                                    &_offScreenSurfaceNext,
+                                                    NULL);
+                if (FAILED(ddrval))
+                {
+                    WEBRTC_TRACE(
+                                 kTraceError,
+                                 kTraceVideo,
+                                 -1,
+                                 "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                                 ddrval);
+                }
+            }
+            else
+            {
+                ddrval = _directDraw->CreateSurface(&ddsd,
+                                                    &_offScreenSurfaceNext,
+                                                    NULL);
+                if (ddrval == DDERR_OUTOFVIDEOMEMORY)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "CreateSurface failed for _offScreenSurfaceNext on VideoMemory, trying on System Memory");
+
+                    memset(&ddsd, 0, sizeof(ddsd));
+                    ddsd.dwSize = sizeof(ddsd);
+                    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+                    ddsd.dwHeight = _height;
+                    ddsd.dwWidth = _width;
+
+                    ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+                    _blitVideoType = kARGB;
+
+                    ddrval = _directDraw->CreateSurface(&ddsd,
+                                                        &_offScreenSurfaceNext,
+                                                        NULL);
+                    if (FAILED(ddrval))
+                    {
+                        WEBRTC_TRACE(
+                                     kTraceError,
+                                     kTraceVideo,
+                                     -1,
+                                     "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                                     ddrval);
+                    }
+                }
+            }
+        }
+        else
+        {
+            memset(&ddsd, 0, sizeof(ddsd));
+            ddsd.dwSize = sizeof(ddsd);
+            ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+
+            ddsd.dwHeight = _height;
+            ddsd.dwWidth = _width;
+
+            ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+            if (_owner->CanBltFourCC())
+            {
+                _blitVideoType = kARGB;
+            }
+            else
+            {
+                _blitVideoType = _originalBlitVideoType;
+            }
+
+            ddrval
+                    = _directDraw->CreateSurface(&ddsd, &_offScreenSurface,
+                                                 NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _offScreenSurface using SystemMemory: 0x%x",
+                             ddrval);
+            }
+
+            ddrval = _directDraw->CreateSurface(&ddsd, &_offScreenSurfaceNext,
+                                                NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _offScreenSurfaceNext using SystemMemory: 0x%x",
+                             ddrval);
+            }
+        }
+    }
+
+    if (FAILED(ddrval))
+    {
+        // failed to change size
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to CreateSurface : 0x%x", ddrval);
+        return -1;
+    }
+
+    return 0;
+}
+
+int DirectDrawChannel::DeliverFrame(unsigned char* buffer, int bufferSize,
+                                        unsigned int /*timeStamp90KHz*/)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    if (CalcBufferSize(_incomingVideoType, _width, _height)
+            != bufferSize)
+    {
+        // sanity
+        return -1;
+    }
+    if (!_offScreenSurface || !_offScreenSurfaceNext)
+    {
+        if (_width && _height && _numberOfStreams)
+        {
+            // our surface was lost recreate it
+            FrameSizeChange(_width, _height, _numberOfStreams);
+        }
+        return -1;
+    }
+    if (_offScreenSurface->IsLost() == DDERR_SURFACELOST)
+    {
+        HRESULT ddrval = _offScreenSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+        ddrval = _offScreenSurfaceNext->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+    }
+    _doubleBuffer = false;
+
+    // check if _offScreenSurfaceUpdated is true
+    DirectDrawSurface* offScreenSurface = _offScreenSurface;
+    {
+
+        if (_offScreenSurfaceUpdated)
+        {
+            // this frame is not yet rendered
+            offScreenSurface = _offScreenSurfaceNext;
+            _doubleBuffer = true;
+        }
+    }
+
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    HRESULT ddrval = offScreenSurface->Lock(NULL, &ddsd, DDLOCK_WAIT, NULL);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDrawChannel::DeliverFrame offScreenSurface lost");
+        ddrval = offScreenSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // failed to restore our surface remove it and it will be re-created in next frame
+            _offScreenSurface->Release();
+            _offScreenSurface = NULL;
+            _offScreenSurfaceNext->Release();
+            _offScreenSurfaceNext = NULL;
+            return -1;
+        }
+        return 0;
+    }
+    if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDrawChannel::DeliverFrame failed to lock");
+        // failed to lock our surface remove it and it will be re-created in next frame
+        _offScreenSurface->Release();
+        _offScreenSurface = NULL;
+        _offScreenSurfaceNext->Release();
+        _offScreenSurfaceNext = NULL;
+        return -1;
+    }
+
+    int ret = 0;
+    if (_incomingVideoType == kI420) {
+      unsigned char* ptr = static_cast<unsigned char*>(ddsd.lpSurface);
+      ret = ConvertFromI420(buffer, ddsd.lPitch, _blitVideoType, 0,
+                            _width, _height, ptr);
+    } else {
+      assert(false &&
+             "DirectDrawChannel::DeliverFrame wrong incoming video type");
+             WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+             "%s wrong incoming video type:%d",
+             __FUNCTION__, _incomingVideoType);
+      ret = -1;
+    }
+    _offScreenSurfaceUpdated = true;
+    offScreenSurface->Unlock(NULL);
+    return ret;
+}
+
+int DirectDrawChannel::BlitFromOffscreenBufferToMixingBuffer(
+                                                                 VideoRenderDirectDraw* DDobj,
+                                                                 short streamID,
+                                                                 DirectDrawSurface* mixingSurface,
+                                                                 RECT &hwndRect,
+                                                                 bool demuxing)
+{
+    HRESULT ddrval;
+    RECT srcRect;
+    RECT dstRect;
+    DirectDrawStreamSettings* streamSettings = NULL;
+    unsigned long long lookupID = reinterpret_cast<unsigned long long> (DDobj);
+    lookupID &= 0xffffffffffffffe0;
+    lookupID <<= 11;
+    lookupID += streamID;
+
+    CriticalSectionScoped cs(_critSect);
+
+    if (_offScreenSurface == NULL)
+    {
+        // The offscreen surface has been deleted but not restored yet
+        return 0;
+    }
+    if (mixingSurface == NULL)
+    {
+        // Not a valid input argument
+        return 0;
+    }
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>::iterator it =
+            _streamIdToSettings.find(lookupID);
+    if (it == _streamIdToSettings.end())
+    {
+        // ignore this stream id
+        return 0;
+    }
+    streamSettings = it->second;
+
+    int numberOfStreams = _numberOfStreams;
+    if (!demuxing)
+    {
+        numberOfStreams = 1; // treat as one stream if we only have one config
+    }
+
+    switch (numberOfStreams)
+    {
+        case 0:
+            return 0;
+        case 1:
+        {
+            // no demux
+            if (streamID > 0)
+                return 0;
+
+            ::SetRect(&srcRect, int(_width * streamSettings->_cropStartWidth),
+                      int(_height * streamSettings->_cropStartHeight),
+                      int(_width * streamSettings->_cropStopWidth), int(_height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+        }
+            break;
+        case 2:
+        case 3:
+        case 4:
+            // classic quadrant demux
+        {
+            int width = _width >> 1;
+            int height = _height >> 1;
+            ::SetRect(&srcRect, int(width * streamSettings->_cropStartWidth),
+                      int(height * streamSettings->_cropStartHeight), int(width
+                              * streamSettings->_cropStopWidth), int(height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            // stream id to select quadrant
+            if (streamID == 1)
+            {
+                ::OffsetRect(&srcRect, width, 0);
+            }
+            if (streamID == 2)
+            {
+                ::OffsetRect(&srcRect, 0, height);
+            }
+            if (streamID == 3)
+            {
+                ::OffsetRect(&srcRect, width, height);
+            }
+        }
+            break;
+        case 5:
+        case 6:
+        {
+            const int width = (_width / (3 * 16)) * 16;
+            const int widthMidCol = width + ((_width % (16 * 3)) / 16) * 16;
+            const int height = _height / (2 * 16) * 16;
+            if (streamID == 1 || streamID == 4)
+            {
+                ::SetRect(&srcRect, int(widthMidCol
+                        * streamSettings->_cropStartWidth), int(height
+                        * streamSettings->_cropStartHeight), int(widthMidCol
+                        * streamSettings->_cropStopWidth), int(height
+                        * streamSettings->_cropStopHeight));
+            }
+            else
+            {
+                ::SetRect(&srcRect,
+                          int(width * streamSettings->_cropStartWidth),
+                          int(height * streamSettings->_cropStartHeight),
+                          int(width * streamSettings->_cropStopWidth),
+                          int(height * streamSettings->_cropStopHeight));
+            }
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            // stream id to select quadrant
+            switch (streamID)
+            {
+                case 1:
+                    ::OffsetRect(&srcRect, width, 0);
+                    break;
+                case 2:
+                    ::OffsetRect(&srcRect, width + widthMidCol, 0);
+                    break;
+                case 3:
+                    ::OffsetRect(&srcRect, 0, height);
+                    break;
+                case 4:
+                    ::OffsetRect(&srcRect, width, height);
+                    break;
+                case 5:
+                    ::OffsetRect(&srcRect, width + widthMidCol, height);
+                    break;
+            }
+        }
+            break;
+        case 7:
+        case 8:
+        case 9:
+
+        {
+            const int width = (_width / (3 * 16)) * 16;
+            const int widthMidCol = width + ((_width % (16 * 3)) / 16) * 16;
+            const int height = _height / (3 * 16) * 16;
+            const int heightMidRow = height + ((_height % (16 * 3)) / 16) * 16;
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+
+            switch (streamID)
+            {
+                case 0:
+                    //Size
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    //Position
+                    ::OffsetRect(&srcRect, 0, 0);
+                    break;
+                case 1:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(height * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(height * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, 0);
+                    break;
+                case 2:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, 0);
+                    break;
+                case 3:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(width * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, 0, height);
+                    break;
+                case 4:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, height);
+
+                    break;
+                case 5:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStartHeight),
+                              int(width * streamSettings->_cropStopWidth),
+                              int(heightMidRow
+                                      * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, height);
+                    break;
+                case 6:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, 0, height + heightMidRow);
+                    break;
+                case 7:
+                    ::SetRect(
+                              &srcRect,
+                              int(widthMidCol * streamSettings->_cropStartWidth),
+                              int(height * streamSettings->_cropStartHeight),
+                              int(widthMidCol * streamSettings->_cropStopWidth),
+                              int(height * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width, height + heightMidRow);
+                    break;
+                case 8:
+                    ::SetRect(&srcRect, int(width
+                            * streamSettings->_cropStartWidth), int(height
+                            * streamSettings->_cropStartHeight), int(width
+                            * streamSettings->_cropStopWidth), int(height
+                            * streamSettings->_cropStopHeight));
+                    ::OffsetRect(&srcRect, width + widthMidCol, height
+                            + heightMidRow);
+                    break;
+            }
+        }
+            break;
+        case 10:
+        case 11:
+        case 12:
+        case 13:
+        case 14:
+        case 15:
+        case 16:
+        default:
+        {
+            ::SetRect(&srcRect, int(_width * streamSettings->_cropStartWidth),
+                      int(_height * streamSettings->_cropStartHeight),
+                      int(_width * streamSettings->_cropStopWidth), int(_height
+                              * streamSettings->_cropStopHeight));
+
+            ::SetRect(&dstRect, int(hwndRect.right
+                    * streamSettings->_startWidth), int(hwndRect.bottom
+                    * streamSettings->_startHeight), int(hwndRect.right
+                    * streamSettings->_stopWidth), int(hwndRect.bottom
+                    * streamSettings->_stopHeight));
+        }
+    }
+
+    if (dstRect.right > hwndRect.right)
+    {
+        srcRect.right -= (int) ((float) (srcRect.right - srcRect.left)
+                * ((float) (dstRect.right - hwndRect.right)
+                        / (float) (dstRect.right - dstRect.left)));
+        dstRect.right = hwndRect.right;
+    }
+    if (dstRect.left < hwndRect.left)
+    {
+        srcRect.left += (int) ((float) (srcRect.right - srcRect.left)
+                * ((float) (hwndRect.left - dstRect.left)
+                        / (float) (dstRect.right - dstRect.left)));
+        dstRect.left = hwndRect.left;
+    }
+    if (dstRect.bottom > hwndRect.bottom)
+    {
+        srcRect.bottom -= (int) ((float) (srcRect.bottom - srcRect.top)
+                * ((float) (dstRect.bottom - hwndRect.bottom)
+                        / (float) (dstRect.bottom - dstRect.top)));
+        dstRect.bottom = hwndRect.bottom;
+    }
+    if (dstRect.top < hwndRect.top)
+    {
+        srcRect.top += (int) ((float) (srcRect.bottom - srcRect.top)
+                * ((float) (hwndRect.top - dstRect.top)
+                        / (float) (dstRect.bottom - dstRect.top)));
+        dstRect.top = hwndRect.top;
+    }
+
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    // wait for the _mixingSurface to be available
+    ddrval = mixingSurface->Blt(&dstRect, _offScreenSurface, &srcRect,
+                                DDBLT_WAIT | DDBLT_DDFX, &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt surface lost");
+        ddrval = mixingSurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            // we dont own the surface just report the error
+            return -1;
+        }
+    }
+    else if (ddrval == DDERR_INVALIDRECT)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt DDERR_INVALIDRECT");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "dstRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "srcRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+
+        // ignore
+    }
+    else if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "mixingSurface->Blt !DD_OK");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw blt mixingSurface BlitFromOffscreenBufferToMixingBuffer error 0x%x  ",
+                     ddrval);
+
+        //logging the co-ordinates and hwnd
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "dstRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "srcRect co-ordinates - top: %d left: %d bottom: %d right: %d",
+                     srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+
+        /*      char logStr[256];
+         _snprintf(logStr,256, "srcRect T:%d L:%d B:%d R:%d\n",srcRect.top, srcRect.left, srcRect.bottom, srcRect.right);
+         OutputDebugString(logStr);
+         char logStr1[256];
+         _snprintf(logStr1,256, "dstRect T:%d L:%d B:%d R:%d\n",dstRect.top, dstRect.left, dstRect.bottom, dstRect.right);
+         OutputDebugString(logStr1);
+         char logStr2[256];
+         _snprintf(logStr2,256, "error 0x:%x \n",ddrval);
+         OutputDebugString(logStr2);
+         */
+        // we dont own the surface just report the error
+        return -1;
+    }
+    if (_doubleBuffer)
+    {
+        DirectDrawSurface* oldOffScreenSurface = _offScreenSurface;
+        _offScreenSurface = _offScreenSurfaceNext;
+        _offScreenSurfaceNext = oldOffScreenSurface;
+        _doubleBuffer = false;
+    }
+    else
+    {
+        _offScreenSurfaceUpdated = false;
+    }
+    return 0;
+}
+
+/**
+ *
+ *	VideoRenderDirectDraw
+ *
+ *
+ */
+
+VideoRenderDirectDraw::VideoRenderDirectDraw(Trace* trace,
+                                                     HWND hWnd, bool fullscreen) :
+            _trace(trace),
+            _confCritSect(CriticalSectionWrapper::CreateCriticalSection()),
+            _fullscreen(fullscreen),
+            _demuxing(false),
+            _transparentBackground(false),
+            _supportTransparency(false),
+            _canStretch(false),
+            _canMirrorLeftRight(false),
+            _clearMixingSurface(false),
+            _deliverInScreenType(false),
+            _renderModeWaitForCorrectScanLine(false),
+            _deliverInHalfFrameRate(false),
+            _deliverInQuarterFrameRate(false),
+            _bCanBltFourcc(true),
+            _frameChanged(false),
+            _processCount(0),
+            _hWnd(hWnd),
+            _screenRect(),
+            _mixingRect(),
+
+            _incomingVideoType(kUnknown),
+            _blitVideoType(kUnknown),
+            _rgbVideoType(kUnknown),
+
+            _directDraw(NULL),
+            _primarySurface(NULL),
+            _backSurface(NULL),
+            _mixingSurface(NULL),
+            _bitmapSettings(),
+            _textSettings(),
+            _directDrawChannels(),
+            _directDrawZorder(),
+
+            _fullScreenWaitEvent(EventWrapper::Create()),
+            _screenEvent(EventWrapper::Create()),
+            _screenRenderThread(
+                                ThreadWrapper::CreateThread(
+                                                            RemoteRenderingThreadProc,
+                                                            this,
+                                                            kRealtimePriority,
+                                                            "Video_directdraw_thread")),
+            _blit(true), _lastRenderModeCpuUsage(-1), _totalMemory(-1),
+            _availableMemory(-1), _systemCPUUsage(0), _maxAllowedRenderTime(0),
+            _nrOfTooLongRenderTimes(0),
+            _isPrimaryOrMixingSurfaceOnSystem(false)
+{
+    SetRect(&_screenRect, 0, 0, 0, 0);
+    SetRect(&_mixingRect, 0, 0, 0, 0);
+    SetRect(&_originalHwndRect, 0, 0, 0, 0);
+    ::GetClientRect(_hWnd, &_hwndRect);
+}
+
+VideoRenderDirectDraw::~VideoRenderDirectDraw()
+{
+    ThreadWrapper* temp = _screenRenderThread;
+    _screenRenderThread = NULL;
+    if (temp)
+    {
+        temp->SetNotAlive();
+        _screenEvent->Set();
+        _screenEvent->StopTimer();
+        _fullScreenWaitEvent->StopTimer();
+
+        if (temp->Stop())
+        {
+            delete temp;
+        }
+    }
+    delete _screenEvent;
+    delete _fullScreenWaitEvent;
+
+    std::map<int, DirectDrawChannel*>::iterator it;
+    it = _directDrawChannels.begin();
+    while (it != _directDrawChannels.end())
+    {
+        it->second->Release();
+        it = _directDrawChannels.erase(it);
+    }
+    if (_primarySurface)
+    {
+        _primarySurface->Release();
+    }
+    if (_mixingSurface)
+    {
+        _mixingSurface->Release();
+    }
+
+    std::map<unsigned char, DirectDrawBitmapSettings*>::iterator bitIt;
+
+    bitIt = _bitmapSettings.begin();
+    while (_bitmapSettings.end() != bitIt)
+    {
+        delete bitIt->second;
+        bitIt = _bitmapSettings.erase(bitIt);
+    }
+
+    std::map<unsigned char, DirectDrawTextSettings*>::iterator textIt;
+    textIt = _textSettings.begin();
+    while (_textSettings.end() != textIt)
+    {
+        delete textIt->second;
+        textIt = _textSettings.erase(textIt);
+    }
+    if (_directDraw)
+    {
+        _directDraw->Release();
+        if (_fullscreen)
+        {
+            // restore hwnd to original size and position
+            ::SetWindowPos(_hWnd, HWND_NOTOPMOST, _originalHwndRect.left,
+                           _originalHwndRect.top, _originalHwndRect.right
+                                   - _originalHwndRect.left,
+                           _originalHwndRect.bottom - _originalHwndRect.top,
+                           SWP_FRAMECHANGED);
+            ::RedrawWindow(_hWnd, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                    | RDW_ERASE);
+            ::RedrawWindow(NULL, NULL, NULL, RDW_INVALIDATE | RDW_UPDATENOW
+                    | RDW_ERASE);
+        }
+    }
+    delete _confCritSect;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::Init()
+{
+    int retVal = 0;
+    HRESULT ddrval = DirectDrawCreateEx(NULL, (void**) &_directDraw,
+                                        IID_IDirectDraw7, NULL);
+    if (FAILED(ddrval) || NULL == _directDraw)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Failed to created DirectDraw7 object");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    retVal = CheckCapabilities();
+    if (retVal != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw CheckCapabilities failed");
+        return retVal;
+    }
+    if (_hWnd)
+    {
+        retVal = CreatePrimarySurface();
+        if (retVal != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreatePrimarySurface");
+            return retVal;
+        }
+        retVal = CreateMixingSurface();
+        if (retVal != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateMixingSurface");
+            return retVal;
+        }
+        if (_screenRenderThread)
+        {
+            unsigned int tid;
+            _screenRenderThread->Start(tid);
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Screen Render thread started, thread id: %d", tid);
+        }
+        DWORD freq = 0;
+        _directDraw->GetMonitorFrequency(&freq);
+        if (freq == 0)
+        {
+            freq = 60;
+        }
+        // Do this now to not do it in each render process loop
+        _maxAllowedRenderTime = (int) (1000 / freq * 0.8F);
+        _nrOfTooLongRenderTimes = 0;
+
+        _screenEvent->StartTimer(true, 1000 / freq);
+
+        _deliverInScreenType = false;
+        _renderModeWaitForCorrectScanLine = false;
+        _deliverInHalfFrameRate = false;
+        _deliverInQuarterFrameRate = false;
+
+        _lastRenderModeCpuUsage = -1;
+        if (_fullscreen)
+        {
+            _fullScreenWaitEvent->StartTimer(true, 1);
+        }
+
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Screen freq %d", freq);
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "Created DirectDraw object");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::GetGraphicsMemory(
+                                                           WebRtc_UWord64& totalMemory,
+                                                           WebRtc_UWord64& availableMemory)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    if (_totalMemory == -1 || _availableMemory == -1)
+    {
+        totalMemory = 0;
+        availableMemory = 0;
+        return -1;
+    }
+    totalMemory = _totalMemory;
+    availableMemory = _availableMemory;
+    return 0;
+}
+
+int VideoRenderDirectDraw::GetScreenResolution(int& screenWidth,
+                                                   int& screenHeight)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    screenWidth = _screenRect.right - _screenRect.left;
+    screenHeight = _screenRect.bottom - _screenRect.top;
+    return 0;
+}
+
+int VideoRenderDirectDraw::UpdateSystemCPUUsage(int systemCPU)
+{
+    CriticalSectionScoped cs(_confCritSect);
+    if (systemCPU <= 100 && systemCPU >= 0)
+    {
+        _systemCPUUsage = systemCPU;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::CheckCapabilities()
+{
+    HRESULT ddrval = DD_OK;
+    DDCAPS ddcaps;
+    DDCAPS ddcapsEmul;
+    memset(&ddcaps, 0, sizeof(ddcaps));
+    memset(&ddcapsEmul, 0, sizeof(ddcapsEmul));
+    ddcaps.dwSize = sizeof(ddcaps);
+    ddcapsEmul.dwSize = sizeof(ddcapsEmul);
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (IsRectEmpty(&_screenRect))
+    {
+        ::GetWindowRect(GetDesktopWindow(), &_screenRect);
+    }
+    // Log Screen resolution
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "ScreenRect. Top: %d, left: %d, bottom: %d, right: %d",
+                 _screenRect.top, _screenRect.left, _screenRect.bottom,
+                 _screenRect.right);
+
+    bool fullAccelerationEnabled = false;
+    bool badDriver = false;
+    VideoRenderWindowsImpl::CheckHWDriver(badDriver, fullAccelerationEnabled);
+    if (!fullAccelerationEnabled)
+    {
+
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "Direct draw Hardware acceleration is not enabled.");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_HWACC_NOT_ENABLED;
+
+    }
+
+    // ddcaps supported by the HW
+    // ddcapsEmul supported by the OS emulating the HW
+    ddrval = _directDraw->GetCaps(&ddcaps, &ddcapsEmul);
+    if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw HW: could not get capabilities: %x", ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    unsigned int minVideoMemory = 3 * 4 * (_screenRect.right
+            * _screenRect.bottom); // assuming ARGB size (4 bytes)
+
+    // Store the memory for possible calls to GetMemory()
+    _totalMemory = ddcaps.dwVidMemTotal;
+    _availableMemory = ddcaps.dwVidMemFree;
+
+    if (ddcaps.dwVidMemFree < minVideoMemory)
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw HW does not have enough memory, freeMem:%d, requiredMem:%d",
+                     ddcaps.dwVidMemFree, minVideoMemory);
+        // If memory is not available on the Video Card...allocate it on RAM
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw video memory, freeMem:%d, totalMem:%d",
+                     ddcaps.dwVidMemFree, ddcaps.dwVidMemTotal);
+    }
+
+    /*
+     DirectDrawCaps       ddsCaps ;
+     ZeroMemory(&ddsCaps, sizeof(ddsCaps)) ;
+     ddsCaps.dwCaps  = DDSCAPS_VIDEOMEMORY;
+     DWORD memTotal=0;
+     DWORD memFree=0;
+     ddrval = _directDraw->GetAvailableVidMem(&ddsCaps, &memTotal, &memFree);
+     if(ddrval == DD_OK)
+     {
+     WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw video memory, freeMem:%d, totalMem:%d", memFree, memTotal);
+     }
+     */
+    // Determine if the hardware supports overlay deinterlacing
+    //	bCanDeinterlace = (ddcaps.dwCaps2 & DDCAPS2_CANFLIPODDEVEN) ? 1 : 0;
+
+    // this fail since we check before we set the mode
+    //	bool bCanFlip =(ddcaps.dwCaps & DDSCAPS_FLIP) ? 1 : 0;
+
+    // Determine if the hardware supports colorkeying
+    _supportTransparency = (ddcaps.dwCaps & DDCAPS_COLORKEY) ? 1 : 0;
+    if (_supportTransparency)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw support colorkey");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                     "DirectDraw don't support colorkey");
+    }
+
+    if (ddcaps.dwCaps2 & DDCAPS2_CANRENDERWINDOWED)
+    {
+        //	required for _directDraw->FlipToGDISurface();
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw support CANRENDERWINDOWED");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw don't support CANRENDERWINDOWED");
+    }
+
+    // Determine if the hardware supports scaling during a blit
+    _canStretch = (ddcaps.dwCaps & DDCAPS_BLTSTRETCH) ? 1 : 0;
+    if (_canStretch)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw blit can stretch");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                     "DirectDraw blit can't stretch");
+    }
+
+    _canMirrorLeftRight = (ddcaps.dwFXAlphaCaps & DDBLTFX_MIRRORLEFTRIGHT) ? 1
+            : 0;
+    if (_canMirrorLeftRight)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw mirroring is supported");
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw mirroring is not supported");
+    }
+
+    // Determine if the hardware supports color conversion during a blit
+    _bCanBltFourcc = (ddcaps.dwCaps & DDCAPS_BLTFOURCC) ? 1 : 0;
+    if (_bCanBltFourcc)
+        _bCanBltFourcc = (ddcaps.dwCKeyCaps & DDCKEYCAPS_DESTBLT) ? 1 : 0;
+
+    if (_bCanBltFourcc)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw can blit Fourcc");
+        DWORD i_codes;
+        ddrval = _directDraw->GetFourCCCodes(&i_codes, NULL);
+
+        if (i_codes > 0)
+        {
+            DWORD* pi_codes = new DWORD[i_codes];
+
+            ddrval = _directDraw->GetFourCCCodes(&i_codes, pi_codes);
+            for (unsigned int i = 0; i < i_codes && _blitVideoType
+                    != kI420; i++)
+            {
+                DWORD w = pi_codes[i];
+                switch (w)
+                {
+                    case MAKEFOURCC('I', '4', '2', '0'):
+                        //					_blitVideoType = kI420;
+                        // not enabled since its not tested
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support I420");
+                        break;
+                    case MAKEFOURCC('I', 'Y', 'U', 'V'): // same as YV12
+                    //					_blitVideoType = kIYUV;
+                        // not enabled since its not tested
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support IYUV");
+                        break;
+                    case MAKEFOURCC('U', 'Y', 'N', 'V'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support UYNV");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', '4', '2', '2'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support Y422");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', 'U', 'N', 'V'): // same shit different name
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YUNV");
+                        // not enabled since its not tested
+                        break;
+                    case MAKEFOURCC('Y', 'V', '1', '2'):
+                        _blitVideoType = kYV12;
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YV12");
+                        break;
+                    case MAKEFOURCC('Y', 'U', 'Y', '2'):
+                        if (_blitVideoType != kYV12)
+                        {
+                            _blitVideoType = kYUY2;
+                        }
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support YUY2");
+                        break;
+                    case MAKEFOURCC('U', 'Y', 'V', 'Y'):
+                        if (_blitVideoType != kYV12)
+                        {
+                            _blitVideoType = kUYVY;
+                        }
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw support UYVY");
+                        break;
+                    default:
+                        WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                                     -1, "DirectDraw unknown blit type %x", w);
+                        break;
+                }
+            }
+            delete[] pi_codes;
+        }
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::Stop()
+{
+    _confCritSect->Enter();
+
+    _blit = false;
+
+    _confCritSect->Leave();
+    return 0;
+}
+
+bool VideoRenderDirectDraw::IsPrimaryOrMixingSurfaceOnSystem()
+{
+    return _isPrimaryOrMixingSurfaceOnSystem;
+}
+
+int VideoRenderDirectDraw::CreatePrimarySurface()
+{
+    // Create the primary surface
+    DirectDrawSurfaceDesc ddsd;
+    ZeroMemory(&ddsd, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    HRESULT ddrval = DD_OK;
+
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (_primarySurface)
+    {
+        _primarySurface->Release();
+        _primarySurface = NULL;
+    }
+
+    if (!_fullscreen)
+    {
+        // create a normal window
+        ddrval = _directDraw->SetCooperativeLevel(_hWnd, DDSCL_NORMAL);
+        if (FAILED(ddrval))
+        {
+            //******** Potential workaround for D#4608 *************** Ignore error.
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to set SetCooperativeLevel %x, ddrval");
+        }
+        // we cant size the primary surface based on _hwndRect
+        ddsd.dwFlags = DDSD_CAPS;
+        ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_VIDEOMEMORY;
+
+#ifndef NOGRAPHICSCARD_MEMORY
+        ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _primarySurface using VideoMemory: 0x%x",
+                         ddrval);
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+#endif
+            //allocate using System memory
+            ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_SYSTEMMEMORY;
+            ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+            if (FAILED(ddrval))
+            {
+                WEBRTC_TRACE(
+                             kTraceError,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw failed to CreateSurface _primarySurface using SystemMemory: 0x%x",
+                             ddrval);
+                if (ddrval != 0x887600E1)
+                {
+                    _directDraw->Release();
+                    _directDraw = 0;
+                }
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+            _isPrimaryOrMixingSurfaceOnSystem = true;
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw _primarySurface on SystemMemory");
+
+#ifndef NOGRAPHICSCARD_MEMORY
+        }
+#endif
+
+        // Create a clipper to ensure that our drawing stays inside our window
+        LPDIRECTDRAWCLIPPER directDrawClipper;
+        ddrval = _directDraw->CreateClipper(0, &directDrawClipper, NULL );
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateClipper");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // setting it to our hwnd gives the clipper the coordinates from our window
+        // when using cliplist we run into problem with transparent HWNDs (such as REX)
+        ddrval = directDrawClipper->SetHWnd(0, _hWnd);
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetHWnd");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // attach the clipper to the primary surface
+        ddrval = _primarySurface->SetClipper(directDrawClipper);
+        directDrawClipper->Release(); // no need to keep the clipper around
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetClipper");
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    else
+    {
+        /* The cooperative level determines how much control we have over the
+         * screen. This must at least be either DDSCL_EXCLUSIVE or DDSCL_NORMAL
+         *
+         * DDSCL_EXCLUSIVE allows us to change video modes, and requires
+         * the DDSCL_FULLSCREEN flag, which will cause the window to take over
+         * the fullscreen. This is the preferred DirectDraw mode because it allows
+         * us to have control of the whole screen without regard for GDI.
+         *
+         * DDSCL_NORMAL is used to allow the DirectDraw app to run windowed.
+         */
+
+        // Note: debuging in fullscreen mode does not work, thanks MS...
+        ::GetWindowRect(_hWnd, &_originalHwndRect);
+
+        // DDSCL_NOWINDOWCHANGES prevents DD to change the window but it give us trouble too, not using it
+        ddrval = _directDraw->SetCooperativeLevel(_hWnd, DDSCL_EXCLUSIVE
+                | DDSCL_FULLSCREEN | DDSCL_ALLOWREBOOT);
+
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetCooperativeLevel DDSCL_EXCLUSIVE");
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _directDraw->Release();
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        ddsd.dwFlags = DDSD_CAPS | DDSD_BACKBUFFERCOUNT;
+        ddsd.ddsCaps.dwCaps = DDSCAPS_PRIMARYSURFACE | DDSCAPS_FLIP
+                | DDSCAPS_COMPLEX | DDSCAPS_VIDEOMEMORY;
+        ddsd.dwBackBufferCount = 1;
+
+        ddrval = _directDraw->CreateSurface(&ddsd, &_primarySurface, NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _primarySurface, fullscreen mode: 0x%x",
+                         ddrval);
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _directDraw->Release();
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // Get a pointer to the back buffer
+        DirectDrawCaps ddsCaps;
+        ZeroMemory(&ddsCaps, sizeof(ddsCaps));
+        ddsCaps.dwCaps = DDSCAPS_BACKBUFFER | DDSCAPS_VIDEOMEMORY;
+
+        ddrval = _primarySurface->GetAttachedSurface(&ddsCaps, &_backSurface);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to GetAttachedSurface, fullscreen mode ");
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                         _hWnd, _hwndRect.top, _hwndRect.left,
+                         _hwndRect.bottom, _hwndRect.right, ddsd.dwFlags,
+                         __LINE__);
+
+            _primarySurface->Release();
+            _directDraw->Release();
+            _primarySurface = 0;
+            _directDraw = 0;
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        // Get the screen size and save it as a rect
+        ZeroMemory(&ddsd, sizeof(ddsd));
+        ddsd.dwSize = sizeof(ddsd);
+    }
+
+    ZeroMemory(&ddsd, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+
+    // get our prinmary surface description
+    ddrval = _primarySurface->GetSurfaceDesc(&ddsd);
+    if (!(SUCCEEDED(ddrval) && (ddsd.dwFlags & DDSD_WIDTH) && (ddsd.dwFlags
+            & DDSD_HEIGHT)))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to GetSurfaceDesc _primarySurface");
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d. Line : %d",
+                     _hWnd, _hwndRect.top, _hwndRect.left, _hwndRect.bottom,
+                     _hwndRect.right, ddsd.dwFlags, __LINE__);
+
+        _primarySurface->Release();
+        _directDraw->Release();
+        _primarySurface = 0;
+        _directDraw = 0;
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    // first we need to figure out the size of the primary surface
+
+    // store screen size
+    ::SetRect(&_screenRect, 0, 0, ddsd.dwWidth, ddsd.dwHeight);
+
+    // store RGB type
+    if (ddsd.ddpfPixelFormat.dwFlags & DDPF_RGB)
+    {
+        // RGB surface
+        switch (ddsd.ddpfPixelFormat.dwRGBBitCount)
+        {
+            case 16:
+                switch (ddsd.ddpfPixelFormat.dwGBitMask)
+                {
+                    case 0x00e0:
+                        _rgbVideoType = kARGB4444;
+                        break;
+                    case 0x03e0:
+                        _rgbVideoType = kARGB1555;
+                        break;
+                    case 0x07e0:
+                        _rgbVideoType = kRGB565;
+                        break;
+                }
+                break;
+            case 24:
+                _rgbVideoType = kRGB24;
+                break;
+            case 32:
+                _rgbVideoType = kARGB;
+                break;
+        }
+    }
+    switch (_blitVideoType)
+    {
+        case kI420:
+        case kIYUV:
+        case kYUY2:
+        case kYV12:
+        case kUYVY:
+            _incomingVideoType = kI420;
+            break;
+        case kUnknown:
+            _blitVideoType = _rgbVideoType;
+            _incomingVideoType = kI420;
+            break;
+        default:
+            _blitVideoType = _rgbVideoType;
+            _incomingVideoType = kI420;
+            break;
+    }
+    WEBRTC_TRACE(
+                 kTraceInfo,
+                 kTraceVideo,
+                 -1,
+                 "DirectDraw created _primarySurface, _blitVideoType %d, _rgbvideoType %d",
+                 _blitVideoType, _rgbVideoType);
+    return 0;
+}
+
+int VideoRenderDirectDraw::CreateMixingSurface()
+{
+    if (_directDraw == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw object not created");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    if (_fullscreen)
+    {
+        ::CopyRect(&_hwndRect, &_screenRect);
+    }
+    else
+    {
+        // update our _hWnd size
+        ::GetClientRect(_hWnd, &_hwndRect);
+    }
+
+    if (_mixingSurface)
+    {
+        _mixingSurface->Release();
+        _mixingSurface = NULL;
+    }
+    // create mixing surface
+    DirectDrawSurfaceDesc ddsd;
+    memset(&ddsd, 0, sizeof(ddsd));
+    ddsd.dwSize = sizeof(ddsd);
+    ddsd.dwFlags = DDSD_CAPS | DDSD_HEIGHT | DDSD_WIDTH;
+    ddsd.ddsCaps.dwCaps = DDSCAPS_VIDEOMEMORY;
+    ddsd.dwHeight = _hwndRect.bottom;
+    ddsd.dwWidth = _hwndRect.right;
+
+    /*    char logStr[256];
+     _snprintf(logStr,256, "CreateMixingSurface H:%d W:%d \n",_hwndRect.bottom, _hwndRect.right);
+     OutputDebugString(logStr);
+     */
+
+#ifndef NOGRAPHICSCARD_MEMORY
+    HRESULT ddrval = _directDraw->CreateSurface(&ddsd, &_mixingSurface, NULL);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "DirectDraw failed to CreateSurface _mixingSurface using VideoMemory: 0x%x",
+                     ddrval);
+        WEBRTC_TRACE(
+                     kTraceError,
+                     kTraceVideo,
+                     -1,
+                     "\t HWND: 0x%x, top: %d, left: %d, bottom: %d, right: %d, dwFlags: %d",
+                     _hWnd, _hwndRect.top, _hwndRect.left, _hwndRect.bottom,
+                     _hwndRect.right, ddsd.dwFlags);
+#endif
+
+        ddsd.ddsCaps.dwCaps = DDSCAPS_SYSTEMMEMORY;
+        HRESULT ddrval = _directDraw->CreateSurface(&ddsd, &_mixingSurface,
+                                                    NULL);
+        if (FAILED(ddrval))
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to CreateSurface _mixingSurface on System Memory: 0x%x",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        _isPrimaryOrMixingSurfaceOnSystem = true;
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw CreateSurface _mixingSurface on SystemMemory");
+
+#ifndef NOGRAPHICSCARD_MEMORY        
+    }
+#endif
+
+    _clearMixingSurface = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                 "DirectDraw _mixingSurface created");
+    return 0;
+}
+
+VideoRenderCallback* VideoRenderDirectDraw::CreateChannel(WebRtc_UWord32 channel,
+                                                                  WebRtc_UWord32 zOrder,
+                                                                  float startWidth,
+                                                                  float startHeight,
+                                                                  float stopWidth,
+                                                                  float stopHeight)
+{
+    if (!_canStretch)
+    {
+        if (startWidth != 0.0f || startHeight != 0.0f || stopWidth != 1.0f
+                || stopHeight != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to CreateChannel HW don't support stretch");
+            return NULL;
+        }
+    }
+    DirectDrawChannel* ddobj =
+            new DirectDrawChannel(_directDraw, _blitVideoType,
+                                      _incomingVideoType, _rgbVideoType, this);
+    ddobj->SetStreamSettings(this, 0, startWidth, startHeight, stopWidth,
+                             stopHeight);
+
+    // store channel
+    _directDrawChannels[channel & 0x0000ffff] = ddobj;
+
+    // store Z order
+    // default streamID is 0
+    _directDrawZorder.insert(ZorderPair(zOrder, channel & 0x0000ffff));
+    return ddobj;
+}
+
+int VideoRenderDirectDraw::AddDirectDrawChannel(int channel,
+                                                    unsigned char streamID,
+                                                    int zOrder,
+                                                    DirectDrawChannel* ddObj)
+{
+    // Only allow one stream per channel, demuxing is done outside of DirectDraw...
+    streamID = 0;
+    unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);
+
+    // store channel
+    _directDrawChannels[channel & 0x0000ffff] = ddObj;
+
+    _demuxing = true; // with this function it's always demux
+
+    // store Z order
+    _directDrawZorder.insert(ZorderPair(zOrder, streamChannel));
+    return 0;
+}
+
+DirectDrawChannel* VideoRenderDirectDraw::ShareDirectDrawChannel(
+                                                                         int channel)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    DirectDrawChannel* obj = NULL;
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        obj = ddIt->second;
+        obj->AddRef();
+    }
+    return obj;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::DeleteChannel(const WebRtc_UWord32 channel)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    // Remove the old z order
+
+    //unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);	
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        //if(streamChannel == it->second )
+        if ((channel & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            it = _directDrawZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        ddIt->second->Release();
+        _directDrawChannels.erase(ddIt);
+        _clearMixingSurface = true;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::GetStreamSettings(const WebRtc_UWord32 channel,
+                                                           const WebRtc_UWord16 streamId,
+                                                           WebRtc_UWord32& zOrder,
+                                                           float& startWidth,
+                                                           float& startHeight,
+                                                           float& stopWidth,
+                                                           float& stopHeight)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt == _directDrawChannels.end())
+    {
+        // This channel doesn't exist.
+        return -1;
+    }
+
+    DirectDrawChannel* ptrChannel = ddIt->second;
+    // Only support one stream per channel, is demuxing done outside if DD.
+    //if (ptrChannel->GetStreamSettings(this, streamId, startWidth, startHeight, stopWidth, stopHeight) == -1)
+    if (ptrChannel->GetStreamSettings(this, 0, startWidth, startHeight,
+                                      stopWidth, stopHeight) == -1)
+    {
+        // Error for this stream
+        return -1;
+    }
+
+    // Get the zOrder
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if ((channel & 0x0000ffff) == (it->second & 0x0000ffff))
+        {
+            // We found our channel zOrder
+            zOrder = (unsigned int) (it->first);
+            break;
+        }
+        it++;
+    }
+
+    return 0;
+}
+
+int VideoRenderDirectDraw::GetChannels(std::list<int>& channelList)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.begin();
+
+    while (ddIt != _directDrawChannels.end())
+    {
+        int channel = ddIt->first;
+        if (channel == 0x0000ffff)
+        {
+            channel = -1;
+        }
+        channelList.push_back(channel);
+        ddIt++;
+    }
+    return 0;
+}
+
+bool VideoRenderDirectDraw::HasChannel(int channel)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderDirectDraw::HasChannels()
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    if (_directDrawChannels.begin() != _directDrawChannels.end())
+    {
+        return true;
+    }
+    return false;
+}
+
+bool VideoRenderDirectDraw::IsFullScreen()
+{
+    return _fullscreen;
+}
+
+VideoType VideoRenderDirectDraw::GetPerferedVideoFormat()
+{
+    return _incomingVideoType;
+}
+
+// this can be called rutime from another thread
+DirectDrawChannel* VideoRenderDirectDraw::ConfigureDirectDrawChannel(int channel,
+                                                                             unsigned char streamID,
+                                                                             int zOrder,
+                                                                             float left,
+                                                                             float top,
+                                                                             float right,
+                                                                             float bottom)
+{
+    // Only support one stream per channel, is demuxing done outside if DD.
+    streamID = 0;
+
+    CriticalSectionScoped cs(_confCritSect);
+
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to ConfigureDirectDrawChannel HW don't support stretch");
+            return NULL;
+        }
+    }
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    DirectDrawChannel* ddobj = NULL;
+    if (ddIt != _directDrawChannels.end())
+    {
+        ddobj = ddIt->second;
+    }
+    if (ddobj == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                     "DirectDraw failed to find channel");
+        return NULL;
+    }
+    unsigned int streamChannel = (streamID << 16) + (channel & 0x0000ffff);
+    // remove the old z order
+    std::multimap<int, unsigned int>::iterator it;
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if (streamChannel == it->second)
+        {
+            it = _directDrawZorder.erase(it);
+            break;
+        }
+        it++;
+    }
+    // if this channel already are in the zOrder map it's demux
+    it = _directDrawZorder.begin();
+    while (it != _directDrawZorder.end())
+    {
+        if (channel == (it->second & 0x0000ffff))
+        {
+            _demuxing = true;
+            break;
+        }
+        it++;
+    }
+    if (it == _directDrawZorder.end())
+    {
+        _demuxing = false;
+    }
+
+    _clearMixingSurface = true;
+
+    if (left == 0.0f && top == 0.0f && right == 0.0f && bottom == 0.0f)
+    {
+        // remove
+        _directDrawChannels.erase(ddIt);
+        ddobj->Release();
+        return NULL;
+    }
+    ddobj->SetStreamSettings(this, streamID, left, top, right, bottom);
+
+    _directDrawZorder.insert(ZorderPair(zOrder, streamChannel));
+    return ddobj;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::SetCropping(const WebRtc_UWord32 channel,
+                                                     const WebRtc_UWord16 streamID,
+                                                     float left, float top,
+                                                     float right, float bottom)
+{
+    CriticalSectionScoped cs(_confCritSect);
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, -1,
+                         "DirectDraw failed to SetCropping HW don't support stretch");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator ddIt;
+    ddIt = _directDrawChannels.find(channel & 0x0000ffff);
+    if (ddIt != _directDrawChannels.end())
+    {
+        DirectDrawChannel* ddobj = ddIt->second;
+        if (ddobj)
+        {
+            // Only support one stream per channel, is demuxing done outside if DD.
+            ddobj->SetStreamCropSettings(this, 0, left, top, right, bottom);
+            //ddobj->SetStreamCropSettings(this, streamID, left, top, right, bottom);
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::ConfigureRenderer(const WebRtc_UWord32 channel,
+                                                           const WebRtc_UWord16 streamId,
+                                                           const unsigned int zOrder,
+                                                           const float left,
+                                                           const float top,
+                                                           const float right,
+                                                           const float bottom)
+{
+    if (ConfigureDirectDrawChannel(channel, (unsigned char) streamId, zOrder,
+                                   left, top, right, bottom) == NULL)
+    {
+        if (left == 0.0f && top == 0.0f && right == 0.0f && bottom == 0.0f)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, -1,
+                         "ConfigureRender, removed channel:%d streamId:%d",
+                         channel, streamId);
+        }
+        else
+        {
+            WEBRTC_TRACE(
+                         kTraceError,
+                         kTraceVideoRenderer,
+                         -1,
+                         "DirectDraw failed to ConfigureRenderer for channel: %d",
+                         channel);
+            return -1;
+        }
+    }
+    return 0;
+}
+
+// this can be called runtime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetText(const WebRtc_UWord8 textId,
+                                                 const WebRtc_UWord8* text,
+                                                 const WebRtc_Word32 textLength,
+                                                 const WebRtc_UWord32 colorText,
+                                                 const WebRtc_UWord32 colorBg,
+                                                 const float left,
+                                                 const float top,
+                                                 const float right,
+                                                 const float bottom)
+{
+    DirectDrawTextSettings* textSetting = NULL;
+
+    CriticalSectionScoped cs(_confCritSect);
+
+    _frameChanged = true;
+
+    std::map<unsigned char, DirectDrawTextSettings*>::iterator it;
+    it = _textSettings.find(textId);
+    if (it != _textSettings.end())
+    {
+        if (it->second)
+        {
+            textSetting = it->second;
+        }
+    }
+    _clearMixingSurface = true;
+
+    if (text == NULL || textLength == 0)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw remove text textId:%d", textId);
+        if (textSetting)
+        {
+            delete textSetting;
+            _textSettings.erase(it);
+        }
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (top > 1.0f || top < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (right > 1.0f || right < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetText invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (textSetting == NULL)
+    {
+        textSetting = new DirectDrawTextSettings();
+    }
+    int retVal = textSetting->SetText((const char*) text, textLength,
+                                      (COLORREF) colorText, (COLORREF) colorBg,
+                                      left, top, right, bottom);
+    if (retVal != 0)
+    {
+        delete textSetting;
+        textSetting = NULL;
+        _textSettings.erase(textId);
+        return retVal;
+    }
+    if (textSetting)
+    {
+        _textSettings[textId] = textSetting;
+    }
+    return retVal;
+}
+
+// this can be called runtime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetBitmap(const void* bitMap,
+                                                   const WebRtc_UWord8 pictureId,
+                                                   const void* colorKey,
+                                                   const float left,
+                                                   const float top,
+                                                   const float right,
+                                                   const float bottom)
+{
+    DirectDrawBitmapSettings* bitmapSetting = NULL;
+
+    CriticalSectionScoped cs(_confCritSect);
+
+    _frameChanged = true;
+    std::map<unsigned char, DirectDrawBitmapSettings*>::iterator it;
+    it = _bitmapSettings.find(pictureId);
+    if (it != _bitmapSettings.end())
+    {
+        if (it->second)
+        {
+            bitmapSetting = it->second;
+        }
+    }
+    _clearMixingSurface = true;
+
+    if (bitMap == NULL)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw remove bitmap pictureId:%d", pictureId);
+        if (bitmapSetting)
+        {
+            delete bitmapSetting;
+            _bitmapSettings.erase(it);
+        }
+        return 0;
+    }
+
+    // sanity
+    if (left > 1.0f || left < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (top > 1.0f || top < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (right > 1.0f || right < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (bottom > 1.0f || bottom < 0.0f)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw SetBitmap invalid parameter");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+    }
+    if (!_canStretch)
+    {
+        if (left != 0.0f || top != 0.0f || right != 1.0f || bottom != 1.0f)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetBitmap HW don't support stretch");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+        }
+    }
+    if (bitmapSetting == NULL)
+    {
+        bitmapSetting = new DirectDrawBitmapSettings();
+    }
+
+    bitmapSetting->_transparentBitMap = (HBITMAP) bitMap;
+    bitmapSetting->_transparentBitmapLeft = left;
+    bitmapSetting->_transparentBitmapRight = right;
+    bitmapSetting->_transparentBitmapTop = top;
+    bitmapSetting->_transparentBitmapBottom = bottom;
+
+    // colorKey == NULL equals no transparency
+    if (colorKey)
+    {
+        // first remove constness
+        DDCOLORKEY* ddColorKey =
+                static_cast<DDCOLORKEY*> (const_cast<void*> (colorKey));
+        if (!_supportTransparency)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw failed to SetBitmap HW don't support transparency");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+        }
+        if (bitmapSetting->_transparentBitmapColorKey == NULL)
+        {
+            bitmapSetting->_transparentBitmapColorKey = new DDCOLORKEY();
+        }
+
+        if (ddColorKey)
+        {
+            bitmapSetting->_transparentBitmapColorKey->dwColorSpaceLowValue
+                    = ddColorKey->dwColorSpaceLowValue;
+            bitmapSetting->_transparentBitmapColorKey->dwColorSpaceHighValue
+                    = ddColorKey->dwColorSpaceHighValue;
+        }
+    }
+    int retval = bitmapSetting->SetBitmap(_trace, _directDraw);
+    if (retval != 0)
+    {
+        delete bitmapSetting;
+        bitmapSetting = NULL;
+        _bitmapSettings.erase(pictureId);
+        return retval;
+    }
+    if (bitmapSetting)
+    {
+        _bitmapSettings[pictureId] = bitmapSetting;
+    }
+    return retval;
+}
+
+// this can be called rutime from another thread
+WebRtc_Word32 VideoRenderDirectDraw::SetTransparentBackground(
+                                                                  const bool enable)
+{
+    CriticalSectionScoped cs(_confCritSect);
+
+    if (_supportTransparency)
+    {
+        _transparentBackground = enable;
+        if (enable)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw enabled TransparentBackground");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "DirectDraw disabled TransparentBackground");
+        }
+        return 0;
+    }
+    WEBRTC_TRACE(
+                 kTraceError,
+                 kTraceVideo,
+                 -1,
+                 "DirectDraw failed to EnableTransparentBackground HW don't support transparency");
+    return -1;
+    //return VIDEO_DIRECT_DRAW_INVALID_ARG;
+}
+
+int VideoRenderDirectDraw::FillSurface(DirectDrawSurface *pDDSurface,
+                                           RECT* rect)
+{
+    // sanity checks
+    if (NULL == pDDSurface)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (NULL == rect)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+
+    // Repaint the whole specified surface
+    HRESULT ddrval;
+    DDBLTFX ddFX;
+
+    ZeroMemory(&ddFX, sizeof(ddFX));
+    ddFX.dwSize = sizeof(ddFX);
+    ddFX.dwFillColor = RGB(0, 0, 0);
+
+    // Draw color key on the video area of given surface
+    ddrval = pDDSurface->Blt(rect, NULL, NULL, DDBLT_COLORFILL | DDBLT_WAIT,
+                             &ddFX);
+    if (FAILED(ddrval))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw failed to fill surface");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+// the real rendering thread
+bool VideoRenderDirectDraw::RemoteRenderingThreadProc(void *obj)
+{
+    return static_cast<VideoRenderDirectDraw*> (obj)->RemoteRenderingProcess();
+}
+
+bool VideoRenderDirectDraw::RemoteRenderingProcess()
+{
+    bool hwndChanged = false;
+    int waitTime = 0;
+
+    _screenEvent->Wait(100);
+
+    _confCritSect->Enter();
+
+    if (_blit == false)
+    {
+        _confCritSect->Leave();
+        return true;
+    }
+
+    if (!::GetForegroundWindow())
+    {
+        //no window, i.e the user have clicked CTRL+ALT+DEL, return true and wait
+        _confCritSect->Leave();
+        return true;
+    }
+
+    // Skip to blit if last render to primare surface took too long time.
+    _processCount++;
+    if (_deliverInQuarterFrameRate)
+    {
+        if (_processCount % 4 != 0)
+        {
+            _confCritSect->Leave();
+            return true;
+        }
+    }
+    else if (_deliverInHalfFrameRate)
+    {
+        if (_processCount % 2 != 0)
+        {
+            _confCritSect->Leave();
+            return true;
+        }
+    }
+
+    // Calculate th erender process time
+    unsigned int startProcessTime = timeGetTime();
+
+    hwndChanged = HasHWNDChanged();
+    if (hwndChanged)
+    {
+        _clearMixingSurface = true;
+    }
+
+    std::map<int, DirectDrawChannel*>::iterator it;
+    it = _directDrawChannels.begin();
+    while (it != _directDrawChannels.end() && !_frameChanged)
+    {
+        if (it->second)
+        {
+            _frameChanged = it->second->IsOffScreenSurfaceUpdated(this);
+        }
+        it++;
+    }
+    if (_backSurface)
+    {
+        if (hwndChanged || _frameChanged)
+        {
+            BlitFromOffscreenBuffersToMixingBuffer();
+            BlitFromBitmapBuffersToMixingBuffer();
+            BlitFromTextToMixingBuffer();
+        }
+        BlitFromMixingBufferToBackBuffer();
+        WaitAndFlip(waitTime);
+    }
+    else
+    {
+        if (hwndChanged || _frameChanged)
+        {
+            BlitFromOffscreenBuffersToMixingBuffer();
+            BlitFromBitmapBuffersToMixingBuffer();
+            BlitFromTextToMixingBuffer();
+        }
+        BlitFromMixingBufferToFrontBuffer(hwndChanged, waitTime);
+
+    }
+    // Check the total time it took processing all rendering. Don't consider waitTime.
+    //const int totalRenderTime=GET_TIME_IN_MS()- startProcessTime-waitTime;            
+    const int totalRenderTime = ::timeGetTime() - startProcessTime - waitTime;
+    DecideBestRenderingMode(hwndChanged, totalRenderTime);
+    _frameChanged = false;
+    _confCritSect->Leave();
+
+    return true;
+}
+void VideoRenderDirectDraw::DecideBestRenderingMode(bool hwndChanged,
+                                                        int totalRenderTime)
+{
+    /* Apply variuos fixes for bad graphic drivers.
+     1. If cpu to high- test wait fix
+     2. If cpu still too high render in 1/2 display update period.
+     3. If RemoteRenderingProcess take to long time reduce the blit period to 1/2 display update period.
+     4. If RemoteRenderingProcess still take to long time try color conversion fix. It do color conversion in VieoRenderDirectDrawChannel::DeliverFrame
+     5. If RemoteRenderingProcess still take to long time reduce the blit period to 1/4 display update period and disable color conversion fix.
+     6  if  RemoteRenderingProcess still take to long time reduce the blit period to 1/4 display update period and enable color conversion fix again.
+     */
+
+    const int timesSinceLastCPUCheck = timeGetTime()
+            - _screenRenderCpuUsage.LastGetCpuTime();
+    int cpu = 0;
+
+    if (hwndChanged) // Render window changed.
+    {
+        cpu = _screenRenderCpuUsage.GetCpuUsage(); // Get CPU usage for this thread. (Called if hwndCanged just to reset the GET CPU Usage function)
+        _nrOfTooLongRenderTimes = 0; // Reset count of too long render times.
+        return; // Return - nothing more to do since the window has changed.
+    }
+    // Check total rendering times
+    if (_maxAllowedRenderTime > 0 && totalRenderTime > _maxAllowedRenderTime)
+    {
+        if (!_deliverInHalfFrameRate || totalRenderTime > 2
+                * _maxAllowedRenderTime)
+        {
+            _nrOfTooLongRenderTimes += totalRenderTime / _maxAllowedRenderTime; //Weighted with the number of to long render times
+        }
+    }
+
+    // If we are not using back surface (ie full screen rendering) we might try to switch BlitFromMixingBufferToFrontBuffer mode. 
+    if (timesSinceLastCPUCheck > WindowsThreadCpuUsage::CPU_CHECK_INTERVAL)
+    {
+        cpu = _screenRenderCpuUsage.GetCpuUsage(); // Get CPU usage for this thread. (Called if hwndCanged just to reset the GET CPU Usage function)
+        WEBRTC_TRACE(
+                     kTraceStream,
+                     kTraceVideo,
+                     -1,
+                     "Screen render thread cpu usage. (Tid %d), cpu usage %d processTime %d, no of too long render times %d",
+                     GetCurrentThreadId(), cpu, totalRenderTime,
+                     _nrOfTooLongRenderTimes);
+
+        // If this screen render thread uses more than 5% of the total CPU time and the 
+        // 1. try waitFix     
+        if (cpu >= 5 && _renderModeWaitForCorrectScanLine == false
+                && !_backSurface)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "HIGH screen render thread cpu usage. (Tid %d), cpu usage %d, applying wait for scan line",
+                         GetCurrentThreadId(), cpu);
+            _renderModeWaitForCorrectScanLine = true;
+            _fullScreenWaitEvent->StartTimer(true, 1);
+        }
+        else if (cpu >= 10 && _deliverInHalfFrameRate == false)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "HIGH screen render thread cpu usage. (Tid %d), cpu usage %d, Render half rate",
+                         GetCurrentThreadId(), cpu);
+            _deliverInHalfFrameRate = true;
+        }
+        else
+        {
+            // Check if rendering takes too long time
+            if (_nrOfTooLongRenderTimes > 15 || totalRenderTime
+                    >= WindowsThreadCpuUsage::CPU_CHECK_INTERVAL)
+            {
+
+                // The rendering is taking too long time
+                if (_deliverInHalfFrameRate == false)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "Render half rate, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInHalfFrameRate = true;
+                }
+                else if (_deliverInScreenType == false
+                        && !_deliverInQuarterFrameRate)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Applying deliver in screen type format, tid: %d",
+                                 GetCurrentThreadId());
+                    // 2. try RGB fix
+                    std::map<int, DirectDrawChannel*>::iterator it;
+                    it = _directDrawChannels.begin();
+                    while (it != _directDrawChannels.end())
+                    {
+                        it->second->ChangeDeliverColorFormat(true);
+                        it++;
+                    }
+                    _deliverInScreenType = true;
+                }
+                else if (_deliverInQuarterFrameRate == false)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Render quarter rate and disable deliver in screen type format, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInQuarterFrameRate = true;
+                    if (_deliverInScreenType)
+                    {
+                        //Disable  RGB fix
+                        std::map<int, DirectDrawChannel*>::iterator it;
+                        it = _directDrawChannels.begin();
+                        while (it != _directDrawChannels.end())
+                        {
+                            it->second->ChangeDeliverColorFormat(false);
+                            it++;
+                        }
+                        _deliverInScreenType = false;
+                    }
+                }
+                else if (_deliverInQuarterFrameRate == true
+                        && !_deliverInScreenType)
+                {
+                    WEBRTC_TRACE(
+                                 kTraceInfo,
+                                 kTraceVideo,
+                                 -1,
+                                 "Render quarter rate and enable RGB fix, tid: %d",
+                                 GetCurrentThreadId());
+                    _deliverInQuarterFrameRate = true;
+
+                    //Enabe  RGB fix
+                    std::map<int, DirectDrawChannel*>::iterator it;
+                    it = _directDrawChannels.begin();
+                    while (it != _directDrawChannels.end())
+                    {
+                        it->second->ChangeDeliverColorFormat(true);
+                        it++;
+                    }
+                    _deliverInScreenType = true;
+                }
+            }
+        }
+        _nrOfTooLongRenderTimes = 0; // Reset count of too long render times.
+    }
+}
+
+/*
+ *	Internal help functions for blitting
+ */
+
+bool VideoRenderDirectDraw::HasHWNDChanged()
+{
+    //	we check if the HWND has changed 
+    if (!_fullscreen)
+    {
+        RECT currentRect;
+        ::GetClientRect(_hWnd, &currentRect);
+        if (!EqualRect(&currentRect, &_hwndRect))
+        {
+            int retVal = CreateMixingSurface(); // this will delete the old mixing surface
+            if (retVal != 0)
+            {
+                return false;
+            }
+            return true;
+        }
+    }
+    return false;
+}
+
+int VideoRenderDirectDraw::BlitFromOffscreenBuffersToMixingBuffer()
+{
+    bool updateAll = false; // used to minimize the number of blt
+
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    if (_mixingSurface == NULL)
+    {
+        int retVal = CreateMixingSurface();
+        if (retVal != 0)
+        {
+            // trace done
+            return retVal;
+        }
+    }
+    RECT mixingRect;
+    ::SetRectEmpty(&mixingRect);
+
+    if (_fullscreen)
+    {
+        ::CopyRect(&mixingRect, &_screenRect);
+    }
+    else
+    {
+        ::CopyRect(&mixingRect, &_hwndRect);
+        // what if largest size is larger than screen
+        if (mixingRect.right > _screenRect.right)
+        {
+            mixingRect.right = _screenRect.right;
+        }
+        if (mixingRect.bottom > _screenRect.bottom)
+        {
+            mixingRect.bottom = _screenRect.bottom;
+        }
+    }
+    if (!EqualRect(&_mixingRect, &mixingRect))
+    {
+        // size changed
+        CopyRect(&_mixingRect, &mixingRect);
+        FillSurface(_mixingSurface, &mixingRect);
+        updateAll = true;
+    }
+
+    if (_clearMixingSurface)
+    {
+        FillSurface(_mixingSurface, &_mixingRect);
+        _clearMixingSurface = false;
+        updateAll = true;
+    }
+
+    std::multimap<int, unsigned int>::reverse_iterator it;
+    it = _directDrawZorder.rbegin();
+    while (it != _directDrawZorder.rend())
+    {
+        // loop through all channels and streams in Z order
+        short streamID = (it->second >> 16);
+        int channel = it->second & 0x0000ffff;
+
+        std::map<int, DirectDrawChannel*>::iterator ddIt;
+        ddIt = _directDrawChannels.find(channel);
+        if (ddIt != _directDrawChannels.end())
+        {
+            // found the channel
+            DirectDrawChannel* channelObj = ddIt->second;
+            if (channelObj && _mixingSurface)
+            {
+                if (updateAll || channelObj->IsOffScreenSurfaceUpdated(this))
+                {
+                    updateAll = true;
+                    if (channelObj->BlitFromOffscreenBufferToMixingBuffer(
+                                                                          this,
+                                                                          streamID,
+                                                                          _mixingSurface,
+                                                                          _mixingRect,
+                                                                          _demuxing)
+                            != 0)
+                    {
+                        WEBRTC_TRACE(kTraceError, kTraceVideo,
+                                     -1,
+                                     "DirectDraw error BlitFromOffscreenBufferToMixingBuffer ");
+                        _mixingSurface->Release();
+                        _mixingSurface = NULL;
+                    }
+                }
+            }
+        }
+        it++;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromTextToMixingBuffer()
+{
+    if (_directDraw == NULL)
+    {
+        return -1;
+    }
+    if (!_mixingSurface)
+    {
+        return -1;
+    }
+    if (_textSettings.empty())
+    {
+        return 0;
+    }
+
+    HDC hdcDDSurface;
+    HRESULT res = _mixingSurface->GetDC(&hdcDDSurface);
+    if (res != S_OK)
+    {
+        return -1;
+    }
+    //        
+    std::map<unsigned char, DirectDrawTextSettings*>::reverse_iterator it;
+    it = _textSettings.rbegin();
+
+    while (it != _textSettings.rend())
+    {
+        DirectDrawTextSettings* settings = it->second;
+        it++;
+        if (settings == NULL)
+        {
+            continue;
+        }
+        SetTextColor(hdcDDSurface, settings->_colorRefText);
+        SetBkColor(hdcDDSurface, settings->_colorRefBackground);
+
+        if (settings->_transparent)
+        {
+            SetBkMode(hdcDDSurface, TRANSPARENT); // do we need to call this all the time?
+        }
+        else
+        {
+            SetBkMode(hdcDDSurface, OPAQUE); // do we need to call this all the time?
+        }
+        RECT textRect;
+        textRect.left = int(_mixingRect.right * settings->_textLeft);
+        textRect.right = int(_mixingRect.right * settings->_textRight);
+        textRect.top = int(_mixingRect.bottom * settings->_textTop);
+        textRect.bottom = int(_mixingRect.bottom * settings->_textBottom);
+
+        DrawTextA(hdcDDSurface, settings->_ptrText, settings->_textLength,
+                  &textRect, DT_LEFT);
+    }
+    _mixingSurface->ReleaseDC(hdcDDSurface);
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromBitmapBuffersToMixingBuffer()
+{
+    HRESULT ddrval;
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    if (_directDraw == NULL)
+    {
+        return -1; // signal that we are not ready for the change
+    }
+
+    std::map<unsigned char, DirectDrawBitmapSettings*>::reverse_iterator it;
+    it = _bitmapSettings.rbegin();
+
+    while (it != _bitmapSettings.rend())
+    {
+        DirectDrawBitmapSettings* settings = it->second;
+        it++;
+        if (settings == NULL)
+        {
+            continue;
+        }
+
+        // Color keying lets you set colors on a surface to be completely transparent.
+        // always blit _transparentBitmapSurface last
+        if (_mixingSurface && settings->_transparentBitmapSurface
+                && settings->_transparentBitmapWidth
+                && settings->_transparentBitmapHeight)
+        {
+            DWORD signal = DDBLT_WAIT | DDBLT_DDFX;
+            // Set transparent color
+            if (settings->_transparentBitmapColorKey)
+            {
+                signal |= DDBLT_KEYSRC;
+                settings->_transparentBitmapSurface->SetColorKey(
+                                                                 DDCKEY_SRCBLT,
+                                                                 settings->_transparentBitmapColorKey);
+            }
+
+            // Now we can blt the transparent surface to another surface
+            RECT srcRect;
+            SetRect(&srcRect, 0, 0, settings->_transparentBitmapWidth,
+                    settings->_transparentBitmapHeight);
+
+            RECT dstRect;
+            if (settings->_transparentBitmapLeft
+                    != settings->_transparentBitmapRight
+                    && settings->_transparentBitmapTop
+                            != settings->_transparentBitmapBottom)
+            {
+                CopyRect(&dstRect, &_mixingRect);
+                dstRect.left = (int) (dstRect.right
+                        * settings->_transparentBitmapLeft);
+                dstRect.right = (int) (dstRect.right
+                        * settings->_transparentBitmapRight);
+                dstRect.top = (int) (dstRect.bottom
+                        * settings->_transparentBitmapTop);
+                dstRect.bottom = (int) (dstRect.bottom
+                        * settings->_transparentBitmapBottom);
+            }
+            else
+            {
+
+                // if left, right, top and bottom are describing one point use the original size
+                CopyRect(&dstRect, &srcRect);
+                POINT startp;
+                startp.x = (int) (_mixingRect.right
+                        * settings->_transparentBitmapLeft);
+                startp.y = (int) (_mixingRect.bottom
+                        * settings->_transparentBitmapTop);
+                OffsetRect(&dstRect, startp.x, startp.y);
+
+                // make sure that we blit inside our surface
+                if (dstRect.bottom > _mixingRect.bottom)
+                {
+                    srcRect.bottom -= dstRect.bottom - _mixingRect.bottom;
+                    // sanity
+                    if (srcRect.bottom < 0)
+                    {
+                        srcRect.bottom = 0;
+                    }
+                    dstRect.bottom = _mixingRect.bottom;
+                }
+                if (dstRect.right > _mixingRect.right)
+                {
+                    srcRect.right -= dstRect.right - _mixingRect.right;
+                    // sanity
+                    if (srcRect.right < 0)
+                    {
+                        srcRect.right = 0;
+                    }
+                    dstRect.right = _mixingRect.right;
+                }
+            }
+            // ddbltfx.dwDDFX |= DDBLTFX_MIRRORUPDOWN; //only for test requires hw support
+
+            // wait for the  _mixingSurface to be available
+            ddrval = _mixingSurface->Blt(&dstRect,
+                                         settings->_transparentBitmapSurface,
+                                         &srcRect, signal, &ddbltfx);
+            if (ddrval == DDERR_SURFACELOST)
+            {
+                if (!::GetForegroundWindow())
+                {
+                    // no window, i.e the user have clicked CTRL+ALT+DEL
+                    return 0;
+                }
+                // always re-creted via the SetBitmap call
+                settings->_transparentBitmapSurface->Release();
+                settings->_transparentBitmapSurface = NULL;
+
+                _clearMixingSurface = true;
+
+                if (settings->_transparentBitMap)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "DirectDraw re-set transparent bitmap");
+                    settings->SetBitmap(_trace, _directDraw);
+                }
+            }
+            else if (ddrval != DD_OK)
+            {
+                settings->_transparentBitmapSurface->Release();
+                settings->_transparentBitmapSurface = NULL;
+                WEBRTC_TRACE(
+                             kTraceInfo,
+                             kTraceVideo,
+                             -1,
+                             "DirectDraw blt error 0x%x _transparentBitmapSurface",
+                             ddrval);
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+        }
+    }
+    return 0;
+}
+
+/**
+ *	normal blitting
+ */
+int VideoRenderDirectDraw::BlitFromMixingBufferToFrontBuffer(
+                                                                 bool hwndChanged,
+                                                                 int& waitTime)
+{
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+    RECT rcRectDest;
+
+    // test for changing mode
+    /*    for(int i= 0; i< 6000000; i ++)
+     {
+     rcRectDest.left = i;
+     }
+     */
+
+    if (IsRectEmpty(&_mixingRect))
+    {
+        // no error just nothing to blit
+        return 0;
+    }
+    if (_mixingSurface == NULL)
+    {
+        // The mixing surface has probably been deleted
+        // and we haven't had time to restore it yet. Wait...
+        return 0;
+    }
+    if (_primarySurface == NULL)
+    {
+        int retVal = CreatePrimarySurface();
+        if (retVal != 0)
+        {
+            // tracing done
+            return retVal;
+        }
+    }
+
+    // first we need to figure out where on the primary surface our window lives
+    ::GetWindowRect(_hWnd, &rcRectDest);
+
+    DWORD signal = DDBLT_WAIT | DDBLT_DDFX;
+
+    // Set transparent color
+    if (_transparentBackground)
+    {
+        signal |= DDBLT_KEYSRC;
+        DDCOLORKEY ColorKey;
+        ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
+        ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
+        _mixingSurface->SetColorKey(DDCKEY_SRCBLT, &ColorKey);
+    }
+
+    if (_renderModeWaitForCorrectScanLine)
+    {
+        // wait for previus draw to complete
+        DWORD scanLines = 0;
+        DWORD screenLines = _screenRect.bottom - 1; // scanlines start on 0
+        DWORD screenLines90 = (screenLines * 9) / 10; //  % of the screen is rendered
+        //waitTime=GET_TIME_IN_MS();
+        waitTime = ::timeGetTime();
+        HRESULT hr = _directDraw->GetScanLine(&scanLines);
+        while (screenLines90 > scanLines && hr == DD_OK)
+        {
+            _confCritSect->Leave();
+            _fullScreenWaitEvent->Wait(3);
+            _confCritSect->Enter();
+            if (_directDraw == NULL)
+            {
+                return -1;
+                //return VIDEO_DIRECT_DRAW_FAILURE;
+            }
+            hr = _directDraw->GetScanLine(&scanLines);
+        }
+        //waitTime=GET_TIME_IN_MS()-waitTime;
+        waitTime = ::timeGetTime() - waitTime;
+    }
+
+    HRESULT ddrval = _primarySurface->Blt(&rcRectDest, _mixingSurface,
+                                          &_mixingRect, signal, &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        ddrval = _primarySurface->Restore();
+        if (ddrval == DD_OK) // Try again
+        {
+            ddrval = _primarySurface->Blt(&rcRectDest, _mixingSurface,
+                                          &_mixingRect, signal, &ddbltfx);
+        }
+        if (ddrval != DD_OK) // If restore failed or second time blt failed. Delete the surface. It will be recreated next time.
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to restore lost _primarySurface  0x%x",
+                         ddrval);
+            _primarySurface->Release();
+            _primarySurface = NULL;
+            if (_mixingSurface)
+            {
+                _mixingSurface->Release();
+                _mixingSurface = NULL;
+            }
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored lost _primarySurface");
+    }
+    else if (ddrval == DDERR_EXCEPTION)
+    {
+        _primarySurface->Release();
+        _primarySurface = NULL;
+        if (_mixingSurface)
+        {
+            _mixingSurface->Release();
+            _mixingSurface = NULL;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw exception in _primarySurface");
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (ddrval != DD_OK)
+    {
+        if (ddrval != 0x80004005) // Undefined error. Ignore
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                         "DirectDraw blt error 0x%x _primarySurface", ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+    }
+    return 0;
+}
+
+/**
+ *	fullscreen mode blitting
+ */
+
+int VideoRenderDirectDraw::WaitAndFlip(int& waitTime)
+{
+    if (_primarySurface == NULL)
+    {
+        // no trace, too much in the file
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (_directDraw == NULL)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    // wait for previus draw to complete
+    DWORD scanLines = 0;
+    DWORD screenLines = _screenRect.bottom - 1; // scanlines start on 0
+    DWORD screenLines90 = (screenLines * 9) / 10; //  % of the screen is rendered
+
+    //waitTime=GET_TIME_IN_MS();
+    waitTime = ::timeGetTime();
+    HRESULT hr = _directDraw->GetScanLine(&scanLines);
+    while (screenLines90 > scanLines && hr == DD_OK)
+    {
+        _confCritSect->Leave();
+        _fullScreenWaitEvent->Wait(3);
+        _confCritSect->Enter();
+        if (_directDraw == NULL)
+        {
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        hr = _directDraw->GetScanLine(&scanLines);
+    }
+    //waitTime=GET_TIME_IN_MS()-waitTime;    
+    waitTime = ::timeGetTime() - waitTime;
+    if (screenLines > scanLines)
+    {
+        // this function sucks a lot of the CPU... but it's worth it
+        _directDraw->WaitForVerticalBlank(DDWAITVB_BLOCKBEGIN, NULL);
+    }
+
+    // schedule a flip
+    HRESULT ddrval = _primarySurface->Flip(NULL, DDFLIP_WAIT); // schedule flip DDFLIP_WAIT
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        //if(::IsIconic(_hWnd))
+        //{
+        // need to do this before Restore
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw our window is an icon maximize it ");
+        // When the full screen window is switched out by ALT-TAB or ALT-CTRL-DEL-TASKMANAGER,
+        // this call will hang the app. Remove it to fix the problem.
+        // FIXME:
+        // 1) Why we want to active and max the window when it was minimized?
+        // 2) Why this is needed before restore? We didn't do that in non full screen mode.
+        //::ShowWindow(_hWnd, SW_SHOWMAXIMIZED);
+        //}
+        ddrval = _primarySurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideo,
+                         -1,
+                         "DirectDraw failed to restore _primarySurface, in flip, 0x%x",
+                         ddrval);
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restore _primarySurface in flip");
+
+    }
+    else if (ddrval != DD_OK)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+int VideoRenderDirectDraw::BlitFromMixingBufferToBackBuffer()
+{
+    if (_backSurface == NULL)
+    {
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    if (IsRectEmpty(&_mixingRect))
+    {
+        // nothing to blit
+        return 0;
+    }
+    DDBLTFX ddbltfx;
+    ZeroMemory(&ddbltfx, sizeof(ddbltfx));
+    ddbltfx.dwSize = sizeof(ddbltfx);
+    ddbltfx.dwDDFX = DDBLTFX_NOTEARING;
+
+    // wait for the _backSurface to be available
+    HRESULT ddrval = _backSurface->Blt(&_screenRect, _mixingSurface,
+                                       &_mixingRect, DDBLT_WAIT | DDBLT_DDFX,
+                                       &ddbltfx);
+    if (ddrval == DDERR_SURFACELOST)
+    {
+        if (!::GetForegroundWindow())
+        {
+            // no window, i.e the user have clicked CTRL+ALT+DEL
+            return 0;
+        }
+        //if(::IsIconic(_hWnd))
+        //{
+        // need to do this before Restore
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw our window is an icon maximize it ");
+        //WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "DirectDraw show our window is an icon maximize it ");
+        // When the full screen window is switch out by ALT-TAB or ALT-CTRL-DEL-TASKMANAGER,
+        // this call will hang the app. Remove it to fix the problem.
+        // FIXME:
+        // 1) Why we want to active and max the window when it was minimized?
+        // 2) Why this is needed before restore? We didn't do that in non full screen mode.
+        //::ShowWindow(_hWnd, SW_SHOWMAXIMIZED);
+        //}
+        ddrval = _primarySurface->Restore();
+        if (ddrval != DD_OK)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                         "DirectDraw failed to restore _primarySurface");
+            return -1;
+            //return VIDEO_DIRECT_DRAW_FAILURE;
+        }
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "DirectDraw restored _primarySurface");
+
+        _clearMixingSurface = true;
+
+    }
+    else if (ddrval != DD_OK)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                     "DirectDraw blt error 0x%x _backSurface", ddrval);
+        return -1;
+        //return VIDEO_DIRECT_DRAW_FAILURE;
+    }
+    return 0;
+}
+
+/*
+ Saving the code for using a clip list instead of HWND, problem was that other transparent
+ HWNDs caused us not to update an area or that we painted in other HWNDs area.
+
+ RECT hWndRect;
+ ::GetWindowRect(_hWnd, &hWndRect);
+
+ LPRGNDATA lpClipList = (LPRGNDATA)malloc(sizeof(RGNDATAHEADER) + sizeof(RECT));
+
+ // now fill out all the structure fields
+ memcpy(lpClipList->Buffer, &hWndRect, sizeof(RECT));
+
+ ::CopyRect(&(lpClipList->rdh.rcBound), &hWndRect);
+ lpClipList->rdh.dwSize = sizeof(RGNDATAHEADER);
+ lpClipList->rdh.iType = RDH_RECTANGLES;
+ lpClipList->rdh.nCount = 1;
+ lpClipList->rdh.nRgnSize = sizeof(RECT) * lpClipList->rdh.nCount;
+ ddrval= _directDrawClipper->SetClipList(lpClipList, 0);
+
+ void Visible(HWND hwnd, HRGN &hRgn)
+ {
+ if (!IsWindowVisible(hwnd))      // If the window is visible
+ {
+ if(CombineRgn(hRgn, hRgn, hRgn, RGN_XOR) == NULLREGION)
+ {
+ return;
+ }
+ }
+ // Gets the topmost window
+ HWND hWnd=GetTopWindow(NULL);
+ while (hWnd != NULL && hWnd != hwnd)  // If the window is above in Z-order
+ {
+ if (IsWindowVisible(hWnd))      // If the window is visible
+ {
+ RECT Rect;
+ // Gets window dimension
+ GetWindowRect(hWnd, &Rect);
+ // Creates a region corresponding to the window
+ if(Rect.left > 0) // test fo rnow
+ {
+ HRGN hrgnWnd = CreateRectRgn(Rect.left, Rect.top, Rect.right, Rect.bottom);
+ //                int err = GetUpdateRgn(hWnd, hrgnWnd, FALSE);
+ // Creates a region corresponding to region not overlapped
+ if(CombineRgn(hRgn, hRgn, hrgnWnd, RGN_DIFF) == COMPLEXREGION)
+ {
+ int a = 0;
+ }
+ DeleteObject(hrgnWnd);
+ }
+ }
+ // Loops through all windows till the specified window
+ hWnd = GetWindow(hWnd, GW_HWNDNEXT);
+ }
+
+ HRGN region;
+ region = CreateRectRgn(0, 0, 500, 500);
+
+ // Get the affected region
+ //    if (GetUpdateRgn(_hWnd, region, FALSE) != ERROR)
+ HDC dc = GetDC(_hWnd);
+ if(GetClipRgn(dc, region) > 0)
+ {
+ int buffsize;
+ UINT x;
+ RGNDATA *buff;
+ POINT TopLeft;
+
+ // Get the top-left point of the client area
+ TopLeft.x = 0;
+ TopLeft.y = 0;
+ if (!ClientToScreen(_hWnd, &TopLeft))
+ {
+ int a = 0;
+ }
+
+
+ // Get the size of buffer required
+ buffsize = GetRegionData(region, 0, 0);
+ if (buffsize != 0)
+ {
+ buff = (RGNDATA *) new BYTE [buffsize];
+ if (buff == NULL)
+ {
+ int a = 0;
+ }
+
+ // Now get the region data
+ if(GetRegionData(region, buffsize, buff))
+ {
+ if(buff->rdh.nCount > 0)
+ {
+ ::OffsetRect(&(buff->rdh.rcBound), TopLeft.x, TopLeft.y);
+ for (x=0; x<(buff->rdh.nCount); x++)
+ {
+ RECT *urect = (RECT *) (((BYTE *) buff) + sizeof(RGNDATAHEADER) + (x * sizeof(RECT)));
+ ::OffsetRect(urect, TopLeft.x, TopLeft.y);
+ char logStr[256];
+ _snprintf(logStr,256, "rect T:%d L:%d B:%d R:%d\n",urect->top, urect->left, urect->bottom, urect->right);
+ OutputDebugString(logStr);
+
+ }
+ OutputDebugString("\n");
+ _directDrawClipper->SetClipList(buff, 0);
+ }
+ LPRGNDATA lpClipList = (LPRGNDATA)malloc(sizeof(RGNDATAHEADER) + sizeof(RECT) * buff->rdh.nCount);
+ if(buff->rdh.nCount > 0)
+ {
+ _directDrawClipper->SetClipList(lpClipList, 0);
+
+ lpClipList->
+ DWORD size = sizeof(RGNDATAHEADER) + sizeof(RECT)* buff->rdh.nCount;
+ lpClipList->rdh.dwSize = sizeof(RGNDATAHEADER);
+ lpClipList->rdh.iType = RDH_RECTANGLES;
+ lpClipList->rdh.nCount = 1;
+
+ HRESULT ddrval1 = _directDrawClipper->GetClipList(NULL, lpClipList, &size);
+ memcpy(lpClipList->Buffer, &rcRectDest, sizeof(RECT));
+ ::CopyRect(&(lpClipList->rdh.rcBound), &rcRectDest);
+ _directDrawClipper->SetClipList(lpClipList, 0);
+ }                    }
+
+ for (x=0; x<(buff->rdh.nCount); x++)
+ {
+ // Obtain the rectangles from the list
+ RECT *urect = (RECT *) (((BYTE *) buff) + sizeof(RGNDATAHEADER) + (x * sizeof(RECT)));
+ int a = 0;
+
+ }
+ delete lpClipList;
+ }
+ delete buff;
+ }
+ }
+ */
+/*
+ void VideoRenderDirectDraw::Wait()
+ {
+ // wait for previus draw to complete
+ int count = 0;
+ DWORD scanLines = 0;
+ DWORD screenLines = _screenRect.bottom -1; // scanlines start on 0
+ DWORD screenLines75 = (screenLines*3)/4; //  % of the screen is rendered
+ HRESULT hr = DD_OK;
+ if(_directDraw == NULL)
+ {
+ return;
+ }
+ hr =_directDraw->GetScanLine(&scanLines);
+ while ( screenLines75 > scanLines && hr == DD_OK)
+ {
+ //   		_confCritSect->Leave();
+ _screenEvent->Wait(10);
+ //      _confCritSect->Enter();
+ if(_directDraw == NULL)
+ {
+ return;
+ }
+ hr = _directDraw->GetScanLine(&scanLines);
+ }
+ }
+ */
+
+WebRtc_Word32 VideoRenderDirectDraw::StartRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::StopRender()
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderDirectDraw::ChangeWindow(void* window)
+{
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Not supported.");
+    return -1;
+}
+
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/windows/video_render_directdraw.h b/src/modules/video_render/main/source/windows/video_render_directdraw.h
new file mode 100644
index 0000000..19fc5c3
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_directdraw.h
@@ -0,0 +1,399 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
+
+#include "typedefs.h"
+#include "i_video_render_win.h"
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+#include "ddraw.h"
+#include <Map>
+#include <List>
+
+// Added
+#include "video_render_defines.h"
+
+#pragma comment(lib, "ddraw.lib")       // located in DirectX SDK
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class Trace;
+
+class VideoRenderDirectDraw;
+
+// some typedefs to make it easy to test different versions
+typedef IDirectDraw7 DirectDraw;
+typedef IDirectDrawSurface7 DirectDrawSurface;
+typedef DDSURFACEDESC2 DirectDrawSurfaceDesc;
+typedef DDSCAPS2 DirectDrawCaps;
+typedef std::pair<int, unsigned int> ZorderPair;
+
+class WindowsThreadCpuUsage
+{
+public:
+    WindowsThreadCpuUsage();
+    int GetCpuUsage(); //in % since last call
+    DWORD LastGetCpuTime()
+    {
+        return _lastGetCpuUsageTime;
+    }
+    const enum
+    {
+        CPU_CHECK_INTERVAL = 1000
+    };
+private:
+    _int64 _lastCpuUsageTime;
+    DWORD _lastGetCpuUsageTime;
+    int _lastCpuUsage;
+    HANDLE _hThread;
+    int _cores;
+};
+
+class DirectDrawStreamSettings
+{
+public:
+    DirectDrawStreamSettings();
+
+    float _startWidth;
+    float _stopWidth;
+    float _startHeight;
+    float _stopHeight;
+
+    float _cropStartWidth;
+    float _cropStopWidth;
+    float _cropStartHeight;
+    float _cropStopHeight;
+};
+
+class DirectDrawBitmapSettings
+{
+public:
+    DirectDrawBitmapSettings();
+    ~DirectDrawBitmapSettings();
+
+    int SetBitmap(Trace* trace, DirectDraw* directDraw);
+
+    HBITMAP _transparentBitMap;
+    float _transparentBitmapLeft;
+    float _transparentBitmapRight;
+    float _transparentBitmapTop;
+    float _transparentBitmapBottom;
+    int _transparentBitmapWidth;
+    int _transparentBitmapHeight;
+    DDCOLORKEY* _transparentBitmapColorKey;
+    DirectDrawSurface* _transparentBitmapSurface; // size of bitmap image
+};
+
+class DirectDrawTextSettings
+{
+public:
+    DirectDrawTextSettings();
+    ~DirectDrawTextSettings();
+
+    int SetText(const char* text, int textLength, COLORREF colorText,
+                COLORREF colorBg, float left, float top, float right,
+                float bottom);
+
+    char* _ptrText;
+    WebRtc_UWord32 _textLength;
+    COLORREF _colorRefText;
+    COLORREF _colorRefBackground;
+    float _textLeft;
+    float _textRight;
+    float _textTop;
+    float _textBottom;
+    bool _transparent;
+};
+
+class DirectDrawChannel: public VideoRenderCallback
+{
+public:
+    DirectDrawChannel(DirectDraw* directDraw,
+                          VideoType blitVideoType,
+                          VideoType incomingVideoType,
+                          VideoType screenVideoType,
+                          VideoRenderDirectDraw* owner);
+
+    int FrameSizeChange(int width, int height, int numberOfStreams);
+    int DeliverFrame(unsigned char* buffer, int buffeSize,
+                     unsigned int timeStamp90KHz);
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame);
+
+    int ChangeDeliverColorFormat(bool useScreenType);
+
+    void AddRef();
+    void Release();
+
+    void SetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
+                           float startWidth, float startHeight,
+                           float stopWidth, float stopHeight);
+    void SetStreamCropSettings(VideoRenderDirectDraw* DDObj,
+                               short streamId, float startWidth,
+                               float startHeight, float stopWidth,
+                               float stopHeight);
+
+    int GetStreamSettings(VideoRenderDirectDraw* DDObj, short streamId,
+                          float& startWidth, float& startHeight,
+                          float& stopWidth, float& stopHeight);
+
+    void GetLargestSize(RECT* mixingRect);
+    int
+            BlitFromOffscreenBufferToMixingBuffer(
+                                                  VideoRenderDirectDraw* DDObj,
+                                                  short streamID,
+                                                  DirectDrawSurface* mixingSurface,
+                                                  RECT &dstRect, bool demuxing);
+    bool IsOffScreenSurfaceUpdated(VideoRenderDirectDraw* DDobj);
+
+protected:
+    virtual ~DirectDrawChannel();
+
+private:
+    CriticalSectionWrapper* _critSect; // protect members from change while using them
+    int _refCount;
+    int _width;
+    int _height;
+    int _numberOfStreams;
+    bool _deliverInScreenType;
+    bool _doubleBuffer;
+    DirectDraw* _directDraw;
+    DirectDrawSurface* _offScreenSurface; // size of incoming stream
+    DirectDrawSurface* _offScreenSurfaceNext; // size of incoming stream
+    VideoType _blitVideoType;
+    VideoType _originalBlitVideoType;
+    VideoType _incomingVideoType;
+    VideoType _screenVideoType;
+    enum
+    {
+        MAX_FRAMEDELIVER_TIME = 20
+    }; //Maximum time it might take to deliver a frame (process time in DeliverFrame)
+    enum
+    {
+        MAX_NO_OF_LATE_FRAMEDELIVER_TIME = 10
+    }; //No of times we allow DeliverFrame process time to exceed MAX_FRAMEDELIVER_TIME before we take action.
+    VideoFrame _tempRenderBuffer;
+
+    std::map<unsigned long long, DirectDrawStreamSettings*>
+            _streamIdToSettings;
+    bool _offScreenSurfaceUpdated;
+    VideoRenderDirectDraw* _owner;
+};
+
+class VideoRenderDirectDraw: IVideoRenderWin
+{
+public:
+    VideoRenderDirectDraw(Trace* trace, HWND hWnd, bool fullscreen);
+    ~VideoRenderDirectDraw();
+public:
+    //IVideoRenderWin
+
+    /**************************************************************************
+     *
+     *   Init
+     *
+     ***************************************************************************/
+    virtual WebRtc_Word32 Init();
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+    virtual VideoRenderCallback
+            * CreateChannel(const WebRtc_UWord32 streamId,
+                            const WebRtc_UWord32 zOrder, const float left,
+                            const float top, const float right,
+                            const float bottom);
+
+    virtual WebRtc_Word32 DeleteChannel(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 GetStreamSettings(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            WebRtc_UWord32& zOrder,
+                                            float& left, float& top,
+                                            float& right, float& bottom);
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual bool IsFullScreen();
+
+    virtual WebRtc_Word32 SetCropping(const WebRtc_UWord32 channel,
+                                      const WebRtc_UWord16 streamId,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    virtual WebRtc_Word32 GetGraphicsMemory(WebRtc_UWord64& totalMemory,
+                                            WebRtc_UWord64& availableMemory);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 colorText,
+                                  const WebRtc_UWord32 colorBg,
+                                  const float left, const float top,
+                                  const float rigth, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 channel,
+                                            const WebRtc_UWord16 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+public:
+
+    // Used for emergency stops...
+    int Stop();
+
+    DirectDrawChannel* ShareDirectDrawChannel(int channel);
+    DirectDrawChannel* ConfigureDirectDrawChannel(int channel,
+                                                      unsigned char streamID,
+                                                      int zOrder, float left,
+                                                      float top, float right,
+                                                      float bottom);
+
+    int AddDirectDrawChannel(int channel, unsigned char streamID, int zOrder,
+                             DirectDrawChannel*);
+
+    VideoType GetPerferedVideoFormat();
+    bool HasChannels();
+    bool HasChannel(int channel);
+    bool DeliverInScreenType();
+    int GetChannels(std::list<int>& channelList);
+
+    // code for getting graphics settings
+    int GetScreenResolution(int& screenWidth, int& screenHeight);
+    int UpdateSystemCPUUsage(int systemCPU);
+
+    int SetBitmap(HBITMAP bitMap, unsigned char pictureId,
+                  DDCOLORKEY* colorKey, float left, float top, float rigth,
+                  float bottom);
+
+    bool IsPrimaryOrMixingSurfaceOnSystem();
+    bool CanBltFourCC()
+    {
+        return _bCanBltFourcc;
+    }
+
+protected:
+    static bool RemoteRenderingThreadProc(void* obj);
+    bool RemoteRenderingProcess();
+
+private:
+    int CheckCapabilities();
+    int CreateMixingSurface();
+    int CreatePrimarySurface();
+
+    int FillSurface(DirectDrawSurface *pDDSurface, RECT* rect);
+    int DrawOnSurface(unsigned char* buffer, int buffeSize);
+    int BlitFromOffscreenBuffersToMixingBuffer();
+    int BlitFromBitmapBuffersToMixingBuffer();
+    int BlitFromTextToMixingBuffer();
+
+    bool HasHWNDChanged();
+    void DecideBestRenderingMode(bool hwndChanged, int totalRenderTime);
+
+    // in fullscreen flip mode
+    int WaitAndFlip(int& waitTime);
+    int BlitFromMixingBufferToBackBuffer();
+
+    // in normal window mode
+    int BlitFromMixingBufferToFrontBuffer(bool hwndChanged, int& waitTime);
+
+    // private members
+    Trace* _trace;
+    CriticalSectionWrapper* _confCritSect; // protect members from change while using them
+
+    bool _fullscreen;
+    bool _demuxing;
+    bool _transparentBackground;
+    bool _supportTransparency;
+    bool _canStretch;
+    bool _canMirrorLeftRight;
+    bool _clearMixingSurface;
+    bool _deliverInScreenType;
+    bool _renderModeWaitForCorrectScanLine;
+    bool _deliverInHalfFrameRate;
+    bool _deliverInQuarterFrameRate;
+    bool _bCanBltFourcc;
+    bool _frameChanged; // True if a frame has changed or bitmap or text has changed.
+    int _processCount;
+    HWND _hWnd;
+    RECT _screenRect; // whole screen as a rect
+    RECT _mixingRect;
+    RECT _originalHwndRect;
+    RECT _hwndRect;
+
+    VideoType _incomingVideoType;
+    VideoType _blitVideoType;
+    VideoType _rgbVideoType;
+
+    DirectDraw* _directDraw;
+    DirectDrawSurface* _primarySurface; // size of screen
+    DirectDrawSurface* _backSurface; // size of screen
+    DirectDrawSurface* _mixingSurface; // size of screen
+
+    std::map<unsigned char, DirectDrawBitmapSettings*> _bitmapSettings;
+    std::map<unsigned char, DirectDrawTextSettings*> _textSettings;
+    std::map<int, DirectDrawChannel*> _directDrawChannels;
+    std::multimap<int, unsigned int> _directDrawZorder;
+
+    EventWrapper* _fullScreenWaitEvent;
+    EventWrapper* _screenEvent;
+    ThreadWrapper* _screenRenderThread;
+    WindowsThreadCpuUsage _screenRenderCpuUsage;
+
+    int _lastRenderModeCpuUsage;
+
+    // Used for emergency stop caused by OnDisplayChange
+    bool _blit;
+
+    //code for providing graphics settings
+    DWORD _totalMemory;
+    DWORD _availableMemory;
+    int _systemCPUUsage;
+
+    // Variables used for checking render time
+    int _maxAllowedRenderTime;
+    int _nrOfTooLongRenderTimes;
+    bool _isPrimaryOrMixingSurfaceOnSystem;
+};
+
+} //namespace webrtc
+
+
+#endif // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_DIRECTDRAW_H_
diff --git a/src/modules/video_render/main/source/windows/video_render_windows_impl.cc b/src/modules/video_render/main/source/windows/video_render_windows_impl.cc
new file mode 100644
index 0000000..7a7bd80
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_windows_impl.cc
@@ -0,0 +1,986 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+#include "video_render_windows_impl.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#ifdef DIRECTDRAW_RENDERING
+#include "video_render_directdraw.h"
+#endif
+#ifdef DIRECT3D9_RENDERING
+#include "video_render_direct3d9.h"
+#endif
+
+#include <tchar.h>
+
+namespace webrtc {
+
+VideoRenderWindowsImpl::VideoRenderWindowsImpl(
+                                               const WebRtc_Word32 id,
+                                               const VideoRenderType videoRenderType,
+                                               void* window,
+                                               const bool fullscreen) :
+            _id(id),
+            _renderWindowsCritsect(
+                                   *CriticalSectionWrapper::CreateCriticalSection()),
+            _prtWindow(window), _fullscreen(fullscreen), _ptrRendererWin(NULL)
+{
+}
+
+VideoRenderWindowsImpl::~VideoRenderWindowsImpl()
+{
+    delete &_renderWindowsCritsect;
+    if (_ptrRendererWin)
+    {
+        delete _ptrRendererWin;
+        _ptrRendererWin = NULL;
+    }
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::Init()
+{
+    //LogOSAndHardwareDetails();
+    CheckHWAcceleration();
+
+    _renderMethod = kVideoRenderWinD3D9;
+
+    // Create the win renderer
+    switch (_renderMethod)
+    {
+        case kVideoRenderWinDd:
+        {
+#ifdef DIRECTDRAW_RENDERING
+            VideoRenderDirectDraw* ptrRenderer;
+            ptrRenderer = new VideoRenderDirectDraw(NULL, (HWND) _prtWindow, _fullscreen);
+            if (ptrRenderer == NULL)
+            {
+                break;
+            }
+            _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
+#else
+            return NULL;
+#endif  //DIRECTDRAW_RENDERING
+        }
+            break;
+        case kVideoRenderWinD3D9:
+        {
+#ifdef DIRECT3D9_RENDERING
+            VideoRenderDirect3D9* ptrRenderer;
+            ptrRenderer = new VideoRenderDirect3D9(NULL, (HWND) _prtWindow, _fullscreen);
+            if (ptrRenderer == NULL)
+            {
+                break;
+            }
+            _ptrRendererWin = reinterpret_cast<IVideoRenderWin*>(ptrRenderer);
+#else
+            return NULL;
+#endif  //DIRECT3D9_RENDERING
+        }
+            break;
+        default:
+            break;
+    }
+
+    //Init renderer
+    if (_ptrRendererWin)
+        return _ptrRendererWin->Init();
+    else
+        return -1;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    _id = id;
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ChangeWindow(void* window)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    if (!_ptrRendererWin)
+    {
+        return -1;
+    }
+    else
+    {
+        return _ptrRendererWin->ChangeWindow(window);
+    }
+}
+
+VideoRenderCallback*
+VideoRenderWindowsImpl::AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                                const WebRtc_UWord32 zOrder,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    VideoRenderCallback* renderCallback = NULL;
+
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        renderCallback = _ptrRendererWin->CreateChannel(streamId, zOrder, left,
+                                                        top, right, bottom);
+    }
+
+    return renderCallback;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::DeleteIncomingRenderStream(
+                                                                 const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->DeleteChannel(streamId);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetIncomingRenderStreamProperties(
+                                                                        const WebRtc_UWord32 streamId,
+                                                                        WebRtc_UWord32& zOrder,
+                                                                        float& left,
+                                                                        float& top,
+                                                                        float& right,
+                                                                        float& bottom) const
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    zOrder = 0;
+    left = 0;
+    top = 0;
+    right = 0;
+    bottom = 0;
+
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->GetStreamSettings(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::StartRender()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StartRender();
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::StopRender()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->StopRender();
+    }
+    return error;
+}
+
+VideoRenderType VideoRenderWindowsImpl::RenderType()
+{
+    return kRenderWindows;
+}
+
+RawVideoType VideoRenderWindowsImpl::PerferedVideoType()
+{
+    return kVideoI420;
+}
+
+bool VideoRenderWindowsImpl::FullScreen()
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    bool fullscreen = false;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        fullscreen = _ptrRendererWin->IsFullScreen();
+    }
+    return fullscreen;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetGraphicsMemory(
+                                                        WebRtc_UWord64& totalGraphicsMemory,
+                                                        WebRtc_UWord64& availableGraphicsMemory) const
+{
+    if (_ptrRendererWin)
+    {
+        return _ptrRendererWin->GetGraphicsMemory(totalGraphicsMemory,
+                                                  availableGraphicsMemory);
+    }
+
+    totalGraphicsMemory = 0;
+    availableGraphicsMemory = 0;
+    return -1;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::GetScreenResolution(
+                                                          WebRtc_UWord32& screenWidth,
+                                                          WebRtc_UWord32& screenHeight) const
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    screenWidth = 0;
+    screenHeight = 0;
+    return 0;
+}
+
+WebRtc_UWord32 VideoRenderWindowsImpl::RenderFrameRate(
+                                                       const WebRtc_UWord32 streamId)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    return 0;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetStreamCropping(
+                                                        const WebRtc_UWord32 streamId,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetCropping(streamId, 0, left, top, right,
+                                             bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::ConfigureRenderer(
+                                                        const WebRtc_UWord32 streamId,
+                                                        const unsigned int zOrder,
+                                                        const float left,
+                                                        const float top,
+                                                        const float right,
+                                                        const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->ConfigureRenderer(streamId, 0, zOrder, left,
+                                                   top, right, bottom);
+    }
+
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetTransparentBackground(
+                                                               const bool enable)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetTransparentBackground(enable);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetText(
+                                              const WebRtc_UWord8 textId,
+                                              const WebRtc_UWord8* text,
+                                              const WebRtc_Word32 textLength,
+                                              const WebRtc_UWord32 textColorRef,
+                                              const WebRtc_UWord32 backgroundColorRef,
+                                              const float left,
+                                              const float top,
+                                              const float right,
+                                              const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetText(textId, text, textLength,
+                                         textColorRef, backgroundColorRef,
+                                         left, top, right, bottom);
+    }
+    return error;
+}
+
+WebRtc_Word32 VideoRenderWindowsImpl::SetBitmap(const void* bitMap,
+                                                const WebRtc_UWord8 pictureId,
+                                                const void* colorKey,
+                                                const float left,
+                                                const float top,
+                                                const float right,
+                                                const float bottom)
+{
+    CriticalSectionScoped cs(&_renderWindowsCritsect);
+    WebRtc_Word32 error = -1;
+    if (!_ptrRendererWin)
+    {
+    }
+    else
+    {
+        error = _ptrRendererWin->SetBitmap(bitMap, pictureId, colorKey, left,
+                                           top, right, bottom);
+    }
+    return error;
+}
+
+void VideoRenderWindowsImpl::LogOSAndHardwareDetails()
+{
+    HRESULT hr;
+    IDxDiagProvider* m_pDxDiagProvider = NULL;
+    IDxDiagContainer* m_pDxDiagRoot = NULL;
+
+    hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
+    bool coUninitializeIsRequired = true;
+    if (FAILED(hr))
+    {
+        // Avoid calling CoUninitialize() since CoInitializeEx() failed.
+        coUninitializeIsRequired = false;
+        if (hr == RPC_E_CHANGED_MODE)
+        {
+            // Calling thread has already initialized COM to be used in a single-threaded
+            // apartment (STA). We are then prevented from using STA.
+            // Details: hr = 0x80010106 <=> "Cannot change thread mode after it is set".
+            //
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVideoRenderer,
+                         _id,
+                         "VideoRenderWindowsImpl::LogOSAndHardwareDetails() CoInitializeEx(NULL, COINIT_APARTMENTTHREADED) => RPC_E_CHANGED_MODE, error 0x%x",
+                         hr);
+        }
+    }
+
+    hr = CoCreateInstance(CLSID_DxDiagProvider, NULL, CLSCTX_INPROC_SERVER,
+                          IID_IDxDiagProvider, (LPVOID*) &m_pDxDiagProvider);
+
+    if (FAILED(hr) || m_pDxDiagProvider == NULL)
+    {
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    // Fill out a DXDIAG_INIT_PARAMS struct and pass it to IDxDiagContainer::Initialize
+    // Passing in TRUE for bAllowWHQLChecks, allows dxdiag to check if drivers are 
+    // digital signed as logo'd by WHQL which may connect via internet to update 
+    // WHQL certificates.    
+    DXDIAG_INIT_PARAMS dxDiagInitParam;
+    ZeroMemory(&dxDiagInitParam, sizeof(DXDIAG_INIT_PARAMS));
+
+    dxDiagInitParam.dwSize = sizeof(DXDIAG_INIT_PARAMS);
+    dxDiagInitParam.dwDxDiagHeaderVersion = DXDIAG_DX9_SDK_VERSION;
+    dxDiagInitParam.bAllowWHQLChecks = TRUE;
+    dxDiagInitParam.pReserved = NULL;
+
+    hr = m_pDxDiagProvider->Initialize(&dxDiagInitParam);
+    if (FAILED(hr))
+    {
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    hr = m_pDxDiagProvider->GetRootContainer(&m_pDxDiagRoot);
+    if (FAILED(hr) || m_pDxDiagRoot == NULL)
+    {
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    IDxDiagContainer* pObject = NULL;
+
+    hr = m_pDxDiagRoot->GetChildContainer(L"DxDiag_SystemInfo", &pObject);
+    if (FAILED(hr) || pObject == NULL)
+    {
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    TCHAR m_szDirectXVersionLongEnglish[100];
+    TCHAR m_szOSLocalized[100];
+    TCHAR m_szProcessorEnglish[200];
+    TCHAR m_szSystemManufacturerEnglish[200];
+
+    ZeroMemory(m_szDirectXVersionLongEnglish, sizeof(TCHAR) * 100);
+    ZeroMemory(m_szOSLocalized, sizeof(TCHAR) * 100);
+    ZeroMemory(m_szProcessorEnglish, sizeof(TCHAR) * 200);
+    ZeroMemory(m_szSystemManufacturerEnglish, sizeof(TCHAR) * 200);
+
+    GetStringValue( pObject, L"szDirectXVersionLongEnglish",
+                   EXPAND(m_szDirectXVersionLongEnglish) );
+    GetStringValue(pObject, L"szOSLocalized", EXPAND(m_szOSLocalized) );
+    GetStringValue(pObject, L"szProcessorEnglish", EXPAND(m_szProcessorEnglish) );
+    GetStringValue( pObject, L"szSystemManufacturerEnglish",
+                   EXPAND(m_szSystemManufacturerEnglish) );
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "System Manufacturer             --- %s",
+                 m_szSystemManufacturerEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Processor                       --- %s", m_szProcessorEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Operating System                --- %s", m_szOSLocalized);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DirectX Version                 --- %s",
+                 m_szDirectXVersionLongEnglish);
+
+    if (pObject)
+        pObject->Release();
+
+    struct DisplayInfo
+    {
+        TCHAR m_szDescription[200];
+        TCHAR m_szManufacturer[200];
+        TCHAR m_szChipType[100];
+        TCHAR m_szDisplayMemoryEnglish[100];
+        TCHAR m_szDisplayModeEnglish[100];
+        TCHAR m_szDriverName[100];
+        TCHAR m_szDriverVersion[100];
+        TCHAR m_szDDStatusEnglish[100];
+        TCHAR m_szD3DStatusEnglish[100];
+        BOOL m_bDDAccelerationEnabled;
+        BOOL m_bNoHardware;
+        BOOL m_b3DAccelerationExists;
+        BOOL m_b3DAccelerationEnabled;
+    };
+
+    WCHAR wszContainer[256];
+    IDxDiagContainer* pContainer = NULL;
+
+    DWORD nInstanceCount = 0;
+    DWORD nItem = 0;
+
+    // Get the IDxDiagContainer object called "DxDiag_DisplayDevices".
+    // This call may take some time while dxdiag gathers the info.
+    if (FAILED(hr = m_pDxDiagRoot->GetChildContainer(L"DxDiag_DisplayDevices",
+                                                     &pContainer)))
+    {
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    if (FAILED(hr = pContainer->GetNumberOfChildContainers(&nInstanceCount)))
+    {
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    DisplayInfo *pDisplayInfo = new DisplayInfo;
+    if (pDisplayInfo == NULL)
+        return;
+    ZeroMemory(pDisplayInfo, sizeof(DisplayInfo));
+
+    hr = pContainer->EnumChildContainerNames(nItem, wszContainer, 256);
+    if (FAILED(hr))
+    {
+        delete pDisplayInfo;
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    hr = pContainer->GetChildContainer(wszContainer, &pObject);
+    if (FAILED(hr) || pObject == NULL)
+    {
+        delete pDisplayInfo;
+        pContainer->Release();
+        m_pDxDiagRoot->Release();
+        m_pDxDiagProvider->Release();
+        if (coUninitializeIsRequired)
+            CoUninitialize();
+        return;
+    }
+
+    GetStringValue( pObject, L"szDescription",
+                   EXPAND(pDisplayInfo->m_szDescription) );
+    GetStringValue( pObject, L"szManufacturer",
+                   EXPAND(pDisplayInfo->m_szManufacturer) );
+    GetStringValue(pObject, L"szChipType", EXPAND(pDisplayInfo->m_szChipType) );
+    GetStringValue( pObject, L"szDisplayMemoryEnglish",
+                   EXPAND(pDisplayInfo->m_szDisplayMemoryEnglish) );
+    GetStringValue( pObject, L"szDisplayModeEnglish",
+                   EXPAND(pDisplayInfo->m_szDisplayModeEnglish) );
+    GetStringValue( pObject, L"szDriverName",
+                   EXPAND(pDisplayInfo->m_szDriverName) );
+    GetStringValue( pObject, L"szDriverVersion",
+                   EXPAND(pDisplayInfo->m_szDriverVersion) );
+    GetBoolValue(pObject, L"bDDAccelerationEnabled",
+                 &pDisplayInfo->m_bDDAccelerationEnabled);
+    GetBoolValue(pObject, L"bNoHardware", &pDisplayInfo->m_bNoHardware);
+    GetBoolValue(pObject, L"bDDAccelerationEnabled",
+                 &pDisplayInfo->m_bDDAccelerationEnabled);
+    GetBoolValue(pObject, L"b3DAccelerationExists",
+                 &pDisplayInfo->m_b3DAccelerationExists);
+    GetBoolValue(pObject, L"b3DAccelerationEnabled",
+                 &pDisplayInfo->m_b3DAccelerationEnabled);
+    GetStringValue( pObject, L"szDDStatusEnglish",
+                   EXPAND(pDisplayInfo->m_szDDStatusEnglish));
+    GetStringValue( pObject, L"szD3DStatusEnglish",
+                   EXPAND(pDisplayInfo->m_szD3DStatusEnglish));
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Name                     --- %s",
+                 pDisplayInfo->m_szDescription);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Manufacturer             --- %s",
+                 pDisplayInfo->m_szManufacturer);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device ChipType                 --- %s",
+                 pDisplayInfo->m_szChipType);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Approx. Total Device Memory     --- %s",
+                 pDisplayInfo->m_szDisplayMemoryEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Current Display Mode            --- %s",
+                 pDisplayInfo->m_szDisplayModeEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Driver Name              --- %s",
+                 pDisplayInfo->m_szDriverName);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "Device Driver Version           --- %s",
+                 pDisplayInfo->m_szDriverVersion);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DirectDraw Acceleration Enabled --- %s",
+                 pDisplayInfo->m_szDescription ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "bNoHardware                     --- %s",
+                 pDisplayInfo->m_bNoHardware ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "b3DAccelerationExists Enabled   --- %s",
+                 pDisplayInfo->m_b3DAccelerationExists ? "Enabled" : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "b3DAccelerationEnabled Enabled  --- %s",
+                 pDisplayInfo->m_b3DAccelerationEnabled ? "Enabled"
+                         : "Disabled");
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "DDraw Status                    --- %s",
+                 pDisplayInfo->m_szDDStatusEnglish);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+                 "D3D Status                      --- %s",
+                 pDisplayInfo->m_szD3DStatusEnglish);
+
+    // Get OS version
+    OSVERSIONINFOEX osvie;
+    osvie.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+    GetVersionEx((LPOSVERSIONINFO) & osvie);
+    /*
+     Operating system	    Version number	dwMajorVersion	dwMinorVersion
+     Windows 7	            6.1	            6	            1
+     Windows Server 2008 R2	6.1	            6	            1
+     Windows Server 2008	    6.0	            6           	0
+     Windows Vista	        6.0	            6	            0
+     Windows Server 2003 R2	5.2	            5	            2
+     Windows Server 2003	    5.2	            5           	2
+     Windows XP	            5.1	            5           	1
+     Windows 2000	        5.0         	5	            0
+     */
+    //RDP problem exists only when XP is involved
+    if (osvie.dwMajorVersion < 6)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideoRenderer, _id,
+                     "Checking for RDP driver");
+        if (_tcsncmp(pDisplayInfo->m_szDriverName, _T("RDPDD.dll"), 9) == 0)
+        {
+            //
+        }
+    }
+
+    if (pObject)
+    {
+        pObject->Release();
+        pObject = NULL;
+    }
+
+    if (pContainer)
+        pContainer->Release();
+
+    if (m_pDxDiagProvider)
+        m_pDxDiagProvider->Release();
+
+    if (m_pDxDiagRoot)
+        m_pDxDiagRoot->Release();
+
+    if (pDisplayInfo)
+        delete pDisplayInfo;
+
+    if (coUninitializeIsRequired)
+        CoUninitialize();
+
+    return;
+}
+
+//-----------------------------------------------------------------------------
+// Name: GetStringValue()
+// Desc: Get a string value from a IDxDiagContainer object
+//-----------------------------------------------------------------------------
+HRESULT VideoRenderWindowsImpl::GetStringValue(IDxDiagContainer* pObject,
+                                               WCHAR* wstrName,
+                                               TCHAR* strValue, int nStrLen)
+{
+    HRESULT hr;
+    VARIANT var;
+    VariantInit(&var);
+
+    if (FAILED(hr = pObject->GetProp(wstrName, &var)))
+        return hr;
+
+    if (var.vt != VT_BSTR)
+        return E_INVALIDARG;
+
+#ifdef _UNICODE
+    wcsncpy( strValue, var.bstrVal, nStrLen-1 );
+#else
+    wcstombs(strValue, var.bstrVal, nStrLen);
+#endif
+    strValue[nStrLen - 1] = TEXT('\0');
+    VariantClear(&var);
+
+    return S_OK;
+}
+
+//-----------------------------------------------------------------------------
+// Name: GetBoolValue()
+// Desc: Get a BOOL value from a IDxDiagContainer object
+//-----------------------------------------------------------------------------
+HRESULT VideoRenderWindowsImpl::GetBoolValue(IDxDiagContainer* pObject,
+                                             WCHAR* wstrName, BOOL* pbValue)
+{
+    HRESULT hr;
+    VARIANT var;
+    VariantInit(&var);
+
+    if (FAILED(hr = pObject->GetProp(wstrName, &var)))
+        return hr;
+
+    if (var.vt != VT_BOOL)
+        return E_INVALIDARG;
+
+    *pbValue = (var.boolVal != 0);
+    VariantClear(&var);
+
+    return S_OK;
+}
+
+int VideoRenderWindowsImpl::CheckHWAcceleration()
+{
+    // Read the registry to check if HW acceleration is enabled or not.
+    HKEY regKey;
+    DWORD value = 0;
+    DWORD valueLength = 4;
+
+    bool directDraw = true;
+    bool direct3D = true;
+    bool dci = true;
+
+    // DirectDraw
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, _T("SOFTWARE\\Microsoft\\DirectDraw"),
+                     0, KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("EmulationOnly"), NULL, NULL,
+                            (BYTE*) &value, &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DirectDraw acceleration is disabled");
+                directDraw = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DirectDraw acceleration is enabled");
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find EmulationOnly key, DirectDraw acceleration is probably enabled");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open DirectDraw settings");
+    }
+
+    // Direct3D
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE,
+                     _T("SOFTWARE\\Microsoft\\Direct3D\\Drivers"), 0,
+                     KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("SoftwareOnly"), NULL, NULL,
+                            (BYTE*) &value, &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "Direct3D acceleration is disabled");
+                direct3D = false;
+            }
+            else
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "Direct3D acceleration is enabled");
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find SoftwarOnly key, Direct3D acceleration is probably enabled");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open Direct3D settings");
+    }
+
+    // DCI
+    if (RegOpenKeyEx(
+                     HKEY_LOCAL_MACHINE,
+                     _T(
+                        "SYSTEM\\CurrentControlSet\\Control\\GraphicsDrivers\\DCI"),
+                     0, KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have found the registry key
+        value = 0;
+        if (RegQueryValueEx(regKey, _T("Timeout"), NULL, NULL, (BYTE*) &value,
+                            &valueLength) == ERROR_SUCCESS)
+        {
+            if (value == 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DCI - DirectDraw acceleration is disabled");
+                dci = false;
+            }
+            else if (value == 7)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVideo, -1,
+                             "DCI is fully enabled");
+            }
+            else
+            {
+                WEBRTC_TRACE(
+                             kTraceWarning,
+                             kTraceVideo,
+                             -1,
+                             "DCI - DirectDraw acceleration is enabled, but short timeout: %d",
+                             value);
+            }
+        }
+        else
+        {
+            // Could not get the value for this one.
+            WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                         "Could not find Timeout key");
+        }
+        RegCloseKey(regKey);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                     "Could not open DCI settings");
+    }
+
+    // We don't care about Direct3D right now...
+    if (dci == false || directDraw == false)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+void VideoRenderWindowsImpl::CheckHWDriver(bool& badDriver,
+                                           bool& fullAccelerationEnabled)
+{
+    // Read the registry to check if HW acceleration is enabled or not.
+    HKEY regKey;
+    DWORD value = 0;
+
+    //Assume the best
+    badDriver = false;
+    fullAccelerationEnabled = true;
+
+    // Check the path to the currently used driver
+    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, _T("HARDWARE\\DEVICEMAP\\VIDEO"), 0,
+                     KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+    {
+        // We have found the registry key containing the driver location
+        value = 0;
+        DWORD driverPathLen = 512;
+        TCHAR driverPath[512];
+        memset(driverPath, 0, driverPathLen * sizeof(TCHAR));
+
+        long retVal = RegQueryValueEx(regKey, _T("\\Device\\Video0"), NULL,
+                                      NULL, (BYTE*) driverPath, &driverPathLen);
+
+        // Close the key...
+        RegCloseKey(regKey);
+
+        if (retVal == ERROR_SUCCESS)
+        {
+            // We have the path to the currently used video card
+
+            // trueDriverPath = modified nameStr, from above, that works
+            // for RegOpenKeyEx
+            TCHAR trueDriverPath[512];
+            memset(trueDriverPath, 0, 512 * sizeof(TCHAR));
+
+            // Convert the path to correct format.
+            //      - Remove \Registry\Machine\
+            //      - Replace '\' with '\\'
+            // Should be something like this: System\\CurrentControlSet\\Control\\Video\\{F6987E15-F12C-4B15-8C84-0F635F3F09EA}\\0000"
+            int idx = 0;
+            for (DWORD i = 18; i < (driverPathLen / sizeof(TCHAR)); i++)
+            {
+                trueDriverPath[idx++] = driverPath[i];
+                if (driverPath[i] == _T('\\'))
+                {
+                    trueDriverPath[idx++] = driverPath[i];
+                }
+            }
+
+            // Open the driver key
+            if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, trueDriverPath, 0,
+                             KEY_QUERY_VALUE, &regKey) == ERROR_SUCCESS)
+            {
+                TCHAR driverName[64];
+                memset(driverName, 0, 64 * sizeof(TCHAR));
+                DWORD driverNameLength = 64;
+                retVal = RegQueryValueEx(regKey, _T("drv"), NULL, NULL,
+                                         (BYTE*) driverName, &driverNameLength);
+                if (retVal == ERROR_SUCCESS)
+                {
+                    WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1,
+                                 "Graphics card driver name: %s", driverName);
+                }
+                DWORD accLevel = 0;
+                DWORD accLevelS = sizeof(accLevel);
+
+                RegQueryValueEx(regKey, _T("Acceleration.Level"), NULL, NULL,
+                                (LPBYTE) & accLevel, &accLevelS);
+                //Don't care if the key is not found. It probably means that acceleration is enabled
+                if (accLevel != 0)
+                {
+                    // Close the key...
+                    RegCloseKey(regKey);
+
+                    if (RegOpenKeyEx(HKEY_LOCAL_MACHINE, trueDriverPath, 0,
+                                     KEY_SET_VALUE, &regKey) == ERROR_SUCCESS)
+                    {
+                        // try setting it to full
+                        accLevel = 0;
+                        LONG retVal;
+                        retVal = RegSetValueEx(regKey,
+                                               _T("Acceleration.Level"), NULL,
+                                               REG_DWORD, (PBYTE) & accLevel,
+                                               sizeof(DWORD));
+                        if (retVal != ERROR_SUCCESS)
+                        {
+                            fullAccelerationEnabled = false;
+                        }
+                        else
+                        {
+                            RegQueryValueEx(regKey, _T("Acceleration.Level"),
+                                            NULL, NULL, (LPBYTE) & accLevel,
+                                            &accLevelS);
+                            if (accLevel != 0)
+                            {
+                                fullAccelerationEnabled = false;
+                            }
+                            else
+                            {
+                                fullAccelerationEnabled = true;
+                            }
+                        }
+                    }
+                    else
+                    {
+                        fullAccelerationEnabled = false;
+                    }
+                }
+                else
+                {
+                    fullAccelerationEnabled = true;
+                }
+
+                // Close the key...
+                RegCloseKey(regKey);
+            }
+        }
+    }
+}
+
+} //namespace webrtc
+
diff --git a/src/modules/video_render/main/source/windows/video_render_windows_impl.h b/src/modules/video_render/main/source/windows/video_render_windows_impl.h
new file mode 100644
index 0000000..bdc0a0a
--- /dev/null
+++ b/src/modules/video_render/main/source/windows/video_render_windows_impl.h
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
+
+#include <Winerror.h>
+#include <dxdiag.h>
+
+#include "i_video_render.h"
+#include "i_video_render_win.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+#define EXPAND(x)            x, sizeof(x)/sizeof(TCHAR)
+
+#pragma comment(lib, "dxguid.lib")
+
+enum VideoRenderWinMethod
+{
+    kVideoRenderWinDd = 0, kVideoRenderWinD3D9 = 1
+};
+
+// Class definitions
+class VideoRenderWindowsImpl: IVideoRender
+{
+public:
+    /*
+     *   Constructor/destructor
+     */
+
+    VideoRenderWindowsImpl(const WebRtc_Word32 id,
+                           const VideoRenderType videoRenderType,
+                           void* window, const bool fullscreen);
+
+    virtual ~VideoRenderWindowsImpl();
+
+    virtual WebRtc_Word32 Init();
+
+    virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    virtual WebRtc_Word32 ChangeWindow(void* window);
+
+    /**************************************************************************
+     *
+     *   Incoming Streams
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderCallback
+            * AddIncomingRenderStream(const WebRtc_UWord32 streamId,
+                                      const WebRtc_UWord32 zOrder,
+                                      const float left, const float top,
+                                      const float right, const float bottom);
+
+    virtual WebRtc_Word32
+            DeleteIncomingRenderStream(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32
+            GetIncomingRenderStreamProperties(const WebRtc_UWord32 streamId,
+                                              WebRtc_UWord32& zOrder,
+                                              float& left, float& top,
+                                              float& right, float& bottom) const;
+
+    /**************************************************************************
+     *
+     *   Start/Stop
+     *
+     ***************************************************************************/
+
+    virtual WebRtc_Word32 StartRender();
+
+    virtual WebRtc_Word32 StopRender();
+
+    /**************************************************************************
+     *
+     *   Properties
+     *
+     ***************************************************************************/
+
+    virtual VideoRenderType RenderType();
+
+    virtual RawVideoType PerferedVideoType();
+
+    virtual bool FullScreen();
+
+    virtual WebRtc_Word32
+            GetGraphicsMemory(WebRtc_UWord64& totalGraphicsMemory,
+                              WebRtc_UWord64& availableGraphicsMemory) const;
+
+    virtual WebRtc_Word32
+            GetScreenResolution(WebRtc_UWord32& screenWidth,
+                                WebRtc_UWord32& screenHeight) const;
+
+    virtual WebRtc_UWord32 RenderFrameRate(const WebRtc_UWord32 streamId);
+
+    virtual WebRtc_Word32 SetStreamCropping(const WebRtc_UWord32 streamId,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 ConfigureRenderer(const WebRtc_UWord32 streamId,
+                                            const unsigned int zOrder,
+                                            const float left, const float top,
+                                            const float right,
+                                            const float bottom);
+
+    virtual WebRtc_Word32 SetTransparentBackground(const bool enable);
+
+    virtual WebRtc_Word32 SetText(const WebRtc_UWord8 textId,
+                                  const WebRtc_UWord8* text,
+                                  const WebRtc_Word32 textLength,
+                                  const WebRtc_UWord32 textColorRef,
+                                  const WebRtc_UWord32 backgroundColorRef,
+                                  const float left, const float top,
+                                  const float right, const float bottom);
+
+    virtual WebRtc_Word32 SetBitmap(const void* bitMap,
+                                    const WebRtc_UWord8 pictureId,
+                                    const void* colorKey, const float left,
+                                    const float top, const float right,
+                                    const float bottom);
+
+    static int CheckHWAcceleration();
+    static void CheckHWDriver(bool& badDriver, bool& fullAccelerationEnabled);
+
+private:
+
+    void LogOSAndHardwareDetails();
+    HRESULT GetBoolValue(IDxDiagContainer* pObject, WCHAR* wstrName,
+                         BOOL* pbValue);
+    HRESULT GetStringValue(IDxDiagContainer* pObject, WCHAR* wstrName,
+                           TCHAR* strValue, int nStrLen);
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _renderWindowsCritsect;
+
+    void* _prtWindow;
+    bool _fullscreen;
+
+    VideoRenderWinMethod _renderMethod;
+    IVideoRenderWin* _ptrRendererWin;
+};
+
+} //namespace webrtc
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_SOURCE_WINDOWS_VIDEO_RENDER_WINDOWS_IMPL_H_
diff --git a/src/modules/video_render/main/test/testAPI/renderStartImage.bmp b/src/modules/video_render/main/test/testAPI/renderStartImage.bmp
new file mode 100644
index 0000000..c443a58
--- /dev/null
+++ b/src/modules/video_render/main/test/testAPI/renderStartImage.bmp
Binary files differ
diff --git a/src/modules/video_render/main/test/testAPI/testAPI.cc b/src/modules/video_render/main/test/testAPI/testAPI.cc
new file mode 100644
index 0000000..96defb5
--- /dev/null
+++ b/src/modules/video_render/main/test/testAPI/testAPI.cc
@@ -0,0 +1,664 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testAPI.h"
+
+#include <stdio.h>
+
+#if defined(_WIN32)
+#include <tchar.h>
+#include <windows.h>
+#include <cassert>
+#include <fstream>
+#include <iostream>
+#include <string>
+#include <windows.h>
+#include <ddraw.h>
+
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+
+#include <iostream>
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+#include <sys/time.h>
+
+#endif
+
+#include "common_types.h"
+#include "process_thread.h"
+#include "module_common_types.h"
+#include "video_render_defines.h"
+#include "video_render.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "system_wrappers/interface/sleep.h"
+
+using namespace webrtc;
+
+void GetTestVideoFrame(WebRtc_UWord8* frame,
+                       WebRtc_Word32 width,
+                       WebRtc_Word32 height,
+                       WebRtc_UWord8 startColor);
+int TestSingleStream(VideoRender* renderModule);
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType);
+int TestBitmapText(VideoRender* renderModule);
+int TestMultipleStreams(VideoRender* renderModule);
+int TestExternalRender(VideoRender* renderModule);
+
+#define TEST_FRAME_RATE 30
+#define TEST_TIME_SECOND 5
+#define TEST_FRAME_NUM (TEST_FRAME_RATE*TEST_TIME_SECOND)
+#define TEST_STREAM0_START_COLOR 0
+#define TEST_STREAM1_START_COLOR 64
+#define TEST_STREAM2_START_COLOR 128
+#define TEST_STREAM3_START_COLOR 192
+
+#if defined(WEBRTC_LINUX)
+
+#define GET_TIME_IN_MS timeGetTime()
+
+unsigned long timeGetTime()
+{
+    struct timeval tv;
+    struct timezone tz;
+    unsigned long val;
+
+    gettimeofday(&tv, &tz);
+    val= tv.tv_sec*1000+ tv.tv_usec/1000;
+    return(val);
+}
+
+#elif defined(WEBRTC_MAC_INTEL)
+
+#include <unistd.h>
+
+#define GET_TIME_IN_MS timeGetTime()
+
+unsigned long timeGetTime()
+{
+    return 0;
+}
+
+#else
+
+#define GET_TIME_IN_MS ::timeGetTime()
+
+#endif
+
+using namespace std;
+
+#if defined(_WIN32)
+LRESULT CALLBACK WebRtcWinProc( HWND hWnd,UINT uMsg,WPARAM wParam,LPARAM lParam)
+{
+    switch(uMsg)
+    {
+        case WM_DESTROY:
+        break;
+        case WM_COMMAND:
+        break;
+    }
+    return DefWindowProc(hWnd,uMsg,wParam,lParam);
+}
+
+int WebRtcCreateWindow(HWND &hwndMain,int winNum, int width, int height)
+{
+    HINSTANCE hinst = GetModuleHandle(0);
+    WNDCLASSEX wcx;
+    wcx.hInstance = hinst;
+    wcx.lpszClassName = TEXT("VideoRenderTest");
+    wcx.lpfnWndProc = (WNDPROC)WebRtcWinProc;
+    wcx.style = CS_DBLCLKS;
+    wcx.hIcon = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hIconSm = LoadIcon (NULL, IDI_APPLICATION);
+    wcx.hCursor = LoadCursor (NULL, IDC_ARROW);
+    wcx.lpszMenuName = NULL;
+    wcx.cbSize = sizeof (WNDCLASSEX);
+    wcx.cbClsExtra = 0;
+    wcx.cbWndExtra = 0;
+    wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+
+    // Register our window class with the operating system.
+    // If there is an error, exit program.
+    if ( !RegisterClassEx (&wcx) )
+    {
+        MessageBox( 0, TEXT("Failed to register window class!"),TEXT("Error!"), MB_OK|MB_ICONERROR );
+        return 0;
+    }
+
+    // Create the main window.
+    hwndMain = CreateWindowEx(
+            0, // no extended styles
+            TEXT("VideoRenderTest"), // class name
+            TEXT("VideoRenderTest Window"), // window name
+            WS_OVERLAPPED |WS_THICKFRAME, // overlapped window
+            800, // horizontal position
+            0, // vertical position
+            width, // width
+            height, // height
+            (HWND) NULL, // no parent or owner window
+            (HMENU) NULL, // class menu used
+            hinst, // instance handle
+            NULL); // no window creation data
+
+    if (!hwndMain)
+        return -1;
+
+    // Show the window using the flag specified by the program
+    // that started the application, and send the application
+    // a WM_PAINT message.
+
+    ShowWindow(hwndMain, SW_SHOWDEFAULT);
+    UpdateWindow(hwndMain);
+    return 0;
+}
+
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+
+int WebRtcCreateWindow(Window *outWindow, Display **outDisplay, int winNum, int width, int height) // unsigned char* title, int titleLength)
+
+{
+    int screen, xpos = 10, ypos = 10;
+    XEvent evnt;
+    XSetWindowAttributes xswa; // window attribute struct
+    XVisualInfo vinfo; // screen visual info struct
+    unsigned long mask; // attribute mask
+
+    // get connection handle to xserver
+    Display* _display = XOpenDisplay( NULL );
+
+    // get screen number
+    screen = DefaultScreen(_display);
+
+    // put desired visual info for the screen in vinfo
+    if( XMatchVisualInfo(_display, screen, 24, TrueColor, &vinfo) != 0 )
+    {
+        //printf( "Screen visual info match!\n" );
+    }
+
+    // set window attributes
+    xswa.colormap = XCreateColormap(_display, DefaultRootWindow(_display), vinfo.visual, AllocNone);
+    xswa.event_mask = StructureNotifyMask | ExposureMask;
+    xswa.background_pixel = 0;
+    xswa.border_pixel = 0;
+
+    // value mask for attributes
+    mask = CWBackPixel | CWBorderPixel | CWColormap | CWEventMask;
+
+    switch( winNum )
+    {
+        case 0:
+        xpos = 200;
+        ypos = 200;
+        break;
+        case 1:
+        xpos = 300;
+        ypos = 200;
+        break;
+        default:
+        break;
+    }
+
+    // create a subwindow for parent (defroot)
+    Window _window = XCreateWindow(_display, DefaultRootWindow(_display),
+            xpos, ypos,
+            width,
+            height,
+            0, vinfo.depth,
+            InputOutput,
+            vinfo.visual,
+            mask, &xswa);
+
+    // Set window name
+    if( winNum == 0 )
+    {
+        XStoreName(_display, _window, "VE MM Local Window");
+        XSetIconName(_display, _window, "VE MM Local Window");
+    }
+    else if( winNum == 1 )
+    {
+        XStoreName(_display, _window, "VE MM Remote Window");
+        XSetIconName(_display, _window, "VE MM Remote Window");
+    }
+
+    // make x report events for mask
+    XSelectInput(_display, _window, StructureNotifyMask);
+
+    // map the window to the display
+    XMapWindow(_display, _window);
+
+    // wait for map event
+    do
+    {
+        XNextEvent(_display, &evnt);
+    }
+    while (evnt.type != MapNotify || evnt.xmap.event != _window);
+
+    *outWindow = _window;
+    *outDisplay = _display;
+
+    return 0;
+}
+#endif  // LINUX
+
+// Note: Mac code is in testApi_mac.mm.
+
+class MyRenderCallback: public VideoRenderCallback
+{
+public:
+    MyRenderCallback() :
+        _cnt(0)
+    {
+    }
+    ;
+    ~MyRenderCallback()
+    {
+    }
+    ;
+    virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 streamId,
+                                      VideoFrame& videoFrame)
+    {
+        _cnt++;
+        if (_cnt % 100 == 0)
+        {
+            printf("Render callback %d \n",_cnt);
+        }
+        return 0;
+    }
+    WebRtc_Word32 _cnt;
+};
+
+void GetTestVideoFrame(WebRtc_UWord8* frame,
+                       WebRtc_Word32 width,
+                       WebRtc_Word32 height,
+                       WebRtc_UWord8 startColor) {
+    // changing color
+    static WebRtc_UWord8 color = startColor;
+
+    WebRtc_UWord8* destY = frame;
+    WebRtc_UWord8* destU = &frame[width*height];
+    WebRtc_UWord8* destV = &frame[width*height*5/4];
+    //Y
+    for (WebRtc_Word32 y=0; y<(width*height); y++)
+    {
+      destY[y] = color;
+    }
+    //U
+    for (WebRtc_Word32 u=0; u<(width*height/4); u++)
+    {
+      destU[u] = color;
+    }
+    //V
+    for (WebRtc_Word32 v=0; v<(width*height/4); v++)
+    {
+      destV[v] = color;
+    }
+
+    color++;
+}
+
+int TestSingleStream(VideoRender* renderModule) {
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    MyRenderCallback externalRender;
+    renderModule->AddExternalRenderCallback(streamId0, &externalRender);
+#endif
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    if (error != 0) {
+      // TODO(phoglund): This test will not work if compiled in release mode.
+      // This rather silly construct here is to avoid compilation errors when
+      // compiling in release. Release => no asserts => unused 'error' variable.
+      assert(false);
+    }
+
+    // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(1000/TEST_FRAME_RATE);
+    }
+
+    videoFrame0.Free();
+
+    // Shut down
+    printf("Closing...\n");
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+
+    return 0;
+}
+
+int TestFullscreenStream(VideoRender* &renderModule,
+                         void* window,
+                         const VideoRenderType videoRenderType) {
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, true, videoRenderType);
+
+    TestSingleStream(renderModule);
+
+    VideoRender::DestroyVideoRender(renderModule);
+    renderModule = VideoRender::CreateVideoRender(12345, window, false, videoRenderType);
+
+    return 0;
+}
+
+int TestBitmapText(VideoRender* renderModule) {
+#if defined(WIN32)
+
+    int error = 0;
+    // Add settings for a stream to render
+    printf("Add stream 0 to entire window\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 = renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+
+    printf("Adding Bitmap\n");
+    DDCOLORKEY ColorKey; // black
+    ColorKey.dwColorSpaceHighValue = RGB(0, 0, 0);
+    ColorKey.dwColorSpaceLowValue = RGB(0, 0, 0);
+    HBITMAP hbm = (HBITMAP)LoadImage(NULL,
+                                     (LPCTSTR)_T("renderStartImage.bmp"),
+                                     IMAGE_BITMAP, 0, 0, LR_LOADFROMFILE);
+    renderModule->SetBitmap(hbm, 0, &ColorKey, 0.0f, 0.0f, 0.3f,
+                             0.3f);
+
+    printf("Adding Text\n");
+    renderModule->SetText(1, (WebRtc_UWord8*) "WebRtc Render Demo App", 20,
+                           RGB(255, 0, 0), RGB(0, 0, 0), 0.25f, 0.1f, 1.0f,
+                           1.0f);
+
+    printf("Start render\n");
+    error = renderModule->StartRender(streamId0);
+    assert(error == 0);
+
+        // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(1000/TEST_FRAME_RATE);
+    }
+    videoFrame0.Free();
+    // Sleep and let all frames be rendered before closing
+    SleepMs(renderDelayMs*2);
+
+
+    // Shut down
+    printf("Closing...\n");
+    ColorKey.dwColorSpaceHighValue = RGB(0,0,0);
+    ColorKey.dwColorSpaceLowValue = RGB(0,0,0);
+    renderModule->SetBitmap(NULL, 0, &ColorKey, 0.0f, 0.0f, 0.0f, 0.0f);
+    renderModule->SetText(1, NULL, 20, RGB(255,255,255),
+                    RGB(0,0,0), 0.0f, 0.0f, 0.0f, 0.0f);
+
+    error = renderModule->StopRender(streamId0);
+    assert(error == 0);
+
+    error = renderModule->DeleteIncomingRenderStream(streamId0);
+    assert(error == 0);
+#endif
+
+    return 0;
+}
+
+int TestMultipleStreams(VideoRender* renderModule) {
+    // Add settings for a stream to render
+    printf("Add stream 0\n");
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f, 0.45f, 0.45f);
+    assert(renderCallback0 != NULL);
+    printf("Add stream 1\n");
+    const int streamId1 = 1;
+    VideoRenderCallback* renderCallback1 =
+        renderModule->AddIncomingRenderStream(streamId1, 0, 0.55f, 0.0f, 1.0f, 0.45f);
+    assert(renderCallback1 != NULL);
+    printf("Add stream 2\n");
+    const int streamId2 = 2;
+    VideoRenderCallback* renderCallback2 =
+        renderModule->AddIncomingRenderStream(streamId2, 0, 0.0f, 0.55f, 0.45f, 1.0f);
+    assert(renderCallback2 != NULL);
+    printf("Add stream 3\n");
+    const int streamId3 = 3;
+    VideoRenderCallback* renderCallback3 =
+        renderModule->AddIncomingRenderStream(streamId3, 0, 0.55f, 0.55f, 1.0f, 1.0f);
+    assert(renderCallback3 != NULL);
+    assert(renderModule->StartRender(streamId0) == 0);
+    assert(renderModule->StartRender(streamId1) == 0);
+    assert(renderModule->StartRender(streamId2) == 0);
+    assert(renderModule->StartRender(streamId3) == 0);
+
+    // Loop through an I420 file and render each frame
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32)(1.5 * width * height);
+
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame1;
+    videoFrame1.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame2;
+    videoFrame2.VerifyAndAllocate(numBytes);
+    VideoFrame videoFrame3;
+    videoFrame3.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+
+    for (int i=0; i<TEST_FRAME_NUM; i++) {
+        GetTestVideoFrame(videoFrame0.Buffer(), width, height, TEST_STREAM0_START_COLOR);
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        videoFrame0.SetLength(numBytes);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+
+        GetTestVideoFrame(videoFrame1.Buffer(), width, height, TEST_STREAM1_START_COLOR);
+        videoFrame1.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame1.SetWidth(width);
+        videoFrame1.SetHeight(height);
+        videoFrame1.SetLength(numBytes);
+        renderCallback1->RenderFrame(streamId1, videoFrame1);
+
+        GetTestVideoFrame(videoFrame2.Buffer(), width, height, TEST_STREAM2_START_COLOR);
+        videoFrame2.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame2.SetWidth(width);
+        videoFrame2.SetHeight(height);
+        videoFrame2.SetLength(numBytes);
+        renderCallback2->RenderFrame(streamId2, videoFrame2);
+
+        GetTestVideoFrame(videoFrame3.Buffer(), width, height, TEST_STREAM3_START_COLOR);
+        videoFrame3.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs); // Render this frame with the specified delay
+        videoFrame3.SetWidth(width);
+        videoFrame3.SetHeight(height);
+        videoFrame3.SetLength(numBytes);
+        renderCallback3->RenderFrame(streamId3, videoFrame3);
+
+        SleepMs(1000/TEST_FRAME_RATE);
+    }
+
+    videoFrame0.Free();
+    videoFrame1.Free();
+    videoFrame2.Free();
+    videoFrame3.Free();
+
+    // Shut down
+    printf("Closing...\n");
+    assert(renderModule->StopRender(streamId0) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId0) == 0);
+    assert(renderModule->StopRender(streamId1) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId1) == 0);
+    assert(renderModule->StopRender(streamId2) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId2) == 0);
+    assert(renderModule->StopRender(streamId3) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId3) == 0);
+
+    return 0;
+}
+
+int TestExternalRender(VideoRender* renderModule) {
+    MyRenderCallback *externalRender = new MyRenderCallback();
+
+    const int streamId0 = 0;
+    VideoRenderCallback* renderCallback0 =
+        renderModule->AddIncomingRenderStream(streamId0, 0, 0.0f, 0.0f,
+                                                   1.0f, 1.0f);
+    assert(renderCallback0 != NULL);
+    assert(renderModule->AddExternalRenderCallback(streamId0,
+                                                   externalRender) == 0);
+
+    assert(renderModule->StartRender(streamId0) == 0);
+
+    const WebRtc_UWord32 width = 352;
+    const WebRtc_UWord32 height = 288;
+    const WebRtc_UWord32 numBytes = (WebRtc_UWord32) (1.5 * width * height);
+    VideoFrame videoFrame0;
+    videoFrame0.VerifyAndAllocate(numBytes);
+
+    const WebRtc_UWord32 renderDelayMs = 500;
+    int frameCount = TEST_FRAME_NUM;
+    for (int i=0; i<frameCount; i++) {
+        videoFrame0.SetRenderTime(TickTime::MillisecondTimestamp() + renderDelayMs);
+        videoFrame0.SetWidth(width);
+        videoFrame0.SetHeight(height);
+        renderCallback0->RenderFrame(streamId0, videoFrame0);
+        SleepMs(33);
+    }
+
+    // Sleep and let all frames be rendered before closing
+    SleepMs(2*renderDelayMs);
+    videoFrame0.Free();
+
+    assert(renderModule->StopRender(streamId0) == 0);
+    assert(renderModule->DeleteIncomingRenderStream(streamId0) == 0);
+    assert(frameCount == externalRender->_cnt);
+
+    delete externalRender;
+    externalRender = NULL;
+
+    return 0;
+}
+
+void RunVideoRenderTests(void* window, VideoRenderType windowType) {
+#ifndef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
+    windowType = kRenderExternal;
+#endif
+
+    int myId = 12345;
+
+    // Create the render module
+    printf("Create render module\n");
+    VideoRender* renderModule = NULL;
+    renderModule = VideoRender::CreateVideoRender(myId,
+                                                  window,
+                                                  false,
+                                                  windowType);
+    assert(renderModule != NULL);
+
+
+    // ##### Test single stream rendering ####
+    printf("#### TestSingleStream ####\n");
+    if (TestSingleStream(renderModule) != 0) {
+        printf ("TestSingleStream failed\n");
+    }
+
+    // ##### Test fullscreen rendering ####
+    printf("#### TestFullscreenStream ####\n");
+    if (TestFullscreenStream(renderModule, window, windowType) != 0) {
+        printf ("TestFullscreenStream failed\n");
+    }
+
+    // ##### Test bitmap and text ####
+    printf("#### TestBitmapText ####\n");
+    if (TestBitmapText(renderModule) != 0) {
+        printf ("TestBitmapText failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestMultipleStreams ####\n");
+    if (TestMultipleStreams(renderModule) != 0) {
+        printf ("TestMultipleStreams failed\n");
+    }
+
+    // ##### Test multiple streams ####
+    printf("#### TestExternalRender ####\n");
+    if (TestExternalRender(renderModule) != 0) {
+        printf ("TestExternalRender failed\n");
+    }
+
+    delete renderModule;
+    renderModule = NULL;
+
+    printf("VideoRender unit tests passed.\n");
+}
+
+// Note: The Mac main is implemented in testApi_mac.mm.
+#if defined(_WIN32)
+int _tmain(int argc, _TCHAR* argv[])
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+int main(int argc, char* argv[])
+#endif
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+{
+    // Create a window for testing.
+    void* window = NULL;
+#if defined (_WIN32)
+    HWND testHwnd;
+    WebRtcCreateWindow(testHwnd, 0, 352, 288);
+    window = (void*)testHwnd;
+    VideoRenderType windowType = kRenderWindows;
+#elif defined(WEBRTC_LINUX)
+    Window testWindow;
+    Display* display;
+    WebRtcCreateWindow(&testWindow, &display, 0, 352, 288);
+    VideoRenderType windowType = kRenderX11;
+    window = (void*)testWindow;
+#endif // WEBRTC_LINUX
+
+    RunVideoRenderTests(window, windowType);
+    return 0;
+}
+#endif  // !WEBRTC_MAC
diff --git a/src/modules/video_render/main/test/testAPI/testAPI.h b/src/modules/video_render/main/test/testAPI/testAPI.h
new file mode 100644
index 0000000..e0e0631
--- /dev/null
+++ b/src/modules/video_render/main/test/testAPI/testAPI.h
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+#define WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
+
+#include "video_render_defines.h"
+
+void RunVideoRenderTests(void* window, webrtc::VideoRenderType windowType);
+
+#endif  // WEBRTC_MODULES_VIDEO_RENDER_MAIN_TEST_TESTAPI_TESTAPI_H
diff --git a/src/modules/video_render/main/test/testAPI/testAPI_android.cc b/src/modules/video_render/main/test/testAPI/testAPI_android.cc
new file mode 100644
index 0000000..c62a62f
--- /dev/null
+++ b/src/modules/video_render/main/test/testAPI/testAPI_android.cc
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+int main(int argc, char* argv[]) {
+  // TODO(leozwang): Video render test app is not ready on android,
+  // make it dummy test now, will add android specific tests
+  return 0;
+}
diff --git a/src/modules/video_render/main/test/testAPI/testAPI_mac.mm b/src/modules/video_render/main/test/testAPI/testAPI_mac.mm
new file mode 100644
index 0000000..2f836ab
--- /dev/null
+++ b/src/modules/video_render/main/test/testAPI/testAPI_mac.mm
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testAPI.h"
+
+#include <iostream>
+
+#import <Foundation/Foundation.h>
+#import <Cocoa/Cocoa.h>
+#import <AppKit/AppKit.h>
+#import <QTKit/QTKit.h>
+#include <sys/time.h>
+
+#include "common_types.h"
+#import "modules/video_render/main/source/mac/cocoa_render_view.h"
+#include "module_common_types.h"
+#include "process_thread.h"
+#include "tick_util.h"
+#include "trace.h"
+#include "video_render_defines.h"
+#include "video_render.h"
+
+using namespace webrtc;
+
+int WebRtcCreateWindow(CocoaRenderView*& cocoaRenderer, int winNum, int width, int height)
+{
+    // In Cocoa, rendering is not done directly to a window like in Windows and Linux.
+    // It is rendererd to a Subclass of NSOpenGLView
+
+    // create cocoa container window
+    NSRect outWindowFrame = NSMakeRect(200, 800, width + 20, height + 20);
+    NSWindow* outWindow = [[NSWindow alloc] initWithContentRect:outWindowFrame 
+                                                      styleMask:NSTitledWindowMask 
+                                                        backing:NSBackingStoreBuffered 
+                                                          defer:NO];
+    [outWindow orderOut:nil];
+    [outWindow setTitle:@"Cocoa Renderer"];
+    [outWindow setBackgroundColor:[NSColor blueColor]];
+
+    // create renderer and attach to window
+    NSRect cocoaRendererFrame = NSMakeRect(10, 10, width, height);
+    cocoaRenderer = [[CocoaRenderView alloc] initWithFrame:cocoaRendererFrame];
+    [[outWindow contentView] addSubview:(NSView*)cocoaRenderer];
+
+    [outWindow makeKeyAndOrderFront:NSApp];
+
+    return 0;
+}
+
+int main (int argc, const char * argv[]) {
+    NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
+    [NSApplication sharedApplication];
+
+    CocoaRenderView* testWindow;
+    WebRtcCreateWindow(testWindow, 0, 352, 288);
+    VideoRenderType windowType = kRenderCocoa;
+    void* window = (void*)testWindow;
+
+    RunVideoRenderTests(window, windowType);
+
+    [pool release];
+}
diff --git a/src/system_wrappers/test/TestSort/TestSort.cpp b/src/system_wrappers/test/TestSort/TestSort.cc
similarity index 100%
rename from src/system_wrappers/test/TestSort/TestSort.cpp
rename to src/system_wrappers/test/TestSort/TestSort.cc
diff --git a/src/test/OWNERS b/src/test/OWNERS
new file mode 100644
index 0000000..fec9caa
--- /dev/null
+++ b/src/test/OWNERS
@@ -0,0 +1,4 @@
+phoglund@webrtc.org

+kjellander@webrtc.org

+ivinnichenko@webrtc.org

+

diff --git a/src/test/fuzz/corpus/template.html b/src/test/fuzz/corpus/template.html
new file mode 100644
index 0000000..fc71799
--- /dev/null
+++ b/src/test/fuzz/corpus/template.html
@@ -0,0 +1,45 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>WebRTC Fuzz Test Template</title>
+  <script type="text/javascript">
+  function requestVideo() {
+    BEFORE_GET_USER_MEDIA_CALL
+    navigator.webkitGetUserMedia(FUZZ_USER_MEDIA_INPUT,
+                                 FUZZ_OK_CALLBACK,
+                                 FUZZ_FAIL_CALLBACK);
+    AFTER_GET_USER_MEDIA_CALL
+  }
+
+  function getUserMediaFailedCallback(error) {
+    console.log(error.code)
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    document.getElementById("view1").src = streamUrl;
+    stream.stop()
+  }
+  </script>
+</head>
+<body onload="requestVideo();">
+  <table border="0">
+    <tr>
+      <td>Local Preview</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view1"
+          autoplay="autoplay"></video></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/fuzz/fuzz_main_run.py b/src/test/fuzz/fuzz_main_run.py
new file mode 100755
index 0000000..1aab256
--- /dev/null
+++ b/src/test/fuzz/fuzz_main_run.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+# Based on the ClusterFuzz simple fuzzer template.
+
+import getopt
+import os
+import sys
+import tempfile
+import time
+
+import get_user_media_fuzz
+
+
+def GenerateData(input_dir):
+  template = open(os.path.join(input_dir, 'template.html'))
+  file_data = template.read()
+  template.close()
+  file_extension = 'html'
+
+  file_data = get_user_media_fuzz.Fuzz(file_data)
+
+  return file_data, file_extension
+
+
+if __name__ == '__main__':
+  start_time = time.time()
+
+  no_of_files = None
+  input_dir = None
+  output_dir = None
+  optlist, args = getopt.getopt(sys.argv[1:], '', \
+      ['no_of_files=', 'output_dir=', 'input_dir='])
+  for option, value in optlist:
+    if option == '--no_of_files':     no_of_files = int(value)
+    elif option == '--output_dir':    output_dir = value
+    elif option == '--input_dir':     input_dir = value
+  assert no_of_files is not None, 'Missing "--no_of_files" argument'
+  assert output_dir is not None, 'Missing "--output_dir" argument'
+  assert input_dir is not None, 'Missing "--input_dir" argument'
+
+  for file_no in range(no_of_files):
+    file_data, file_extension = GenerateData(input_dir)
+    file_descriptor, file_path = tempfile.mkstemp(
+        prefix='fuzz-%d-%d' % (start_time, file_no),
+        suffix='.' + file_extension,
+        dir=output_dir)
+    file = os.fdopen(file_descriptor, 'wb')
+    print 'Writing %d bytes to "%s"' % (len(file_data), file_path)
+    print file_data
+    file.write(file_data)
+    file.close()
\ No newline at end of file
diff --git a/src/test/fuzz/get_user_media_fuzz.py b/src/test/fuzz/get_user_media_fuzz.py
new file mode 100644
index 0000000..c35ef1b
--- /dev/null
+++ b/src/test/fuzz/get_user_media_fuzz.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+import random
+import string
+
+
+class MissingParameterException(Exception):
+  pass
+
+
+def FillInParameter(parameter, value, template):
+  if parameter not in template:
+    raise MissingParameterException('Did not find parameter %s in template.' %
+                                    parameter)
+
+  return template.replace(parameter, value)
+
+
+def RandomIdentifier():
+  length = random.randint(1, 25)
+  return (random.choice(string.letters) +
+          ''.join(random.choice(string.letters + string.digits)
+                  for i in xrange(length)))
+
+
+def GenerateRandomJavascriptAttributes(num_attributes):
+  return ['%s: %s' % (RandomIdentifier(), GenerateRandomJavascriptValue())
+          for i in xrange(num_attributes)]
+
+
+def MakeJavascriptObject(attributes):
+  return '{ ' + ', '.join(attributes) + ' }'
+
+
+def GenerateRandomJavascriptFunction():
+  num_parameters = random.randint(0, 10)
+  parameter_list = ', '.join(RandomIdentifier() for i in xrange(num_parameters))
+  return 'function ' + RandomIdentifier() + '(' + parameter_list + ')' + '{ }'
+
+
+def GenerateRandomJavascriptValue():
+  roll = random.random()
+  if roll < 0.3:
+    return '"' + RandomIdentifier() + '"'
+  elif roll < 0.6:
+    return str(random.randint(-10000000, 10000000))
+  elif roll < 0.9:
+    # Functions are first-class objects.
+    return GenerateRandomJavascriptFunction()
+  else:
+    return 'true' if random.random() < 0.5 else 'false'
+
+
+def Fuzz(template):
+  """Generates a single random HTML page which tries to mess with getUserMedia.
+
+  We require a template which has certain placeholders defined in it (such
+  as FUZZ_USER_MEDIA_INPUT). We then replace these placeholders with random
+  identifiers and data in certain patterns. For instance, since the getUserMedia
+  function accepts an object, we try to pass in everything from {video:true,
+  audio:false} (which is a correct value) to {sdjkjsjh34sd:455, video:'yxuhsd'}
+  and other strange things.
+
+  See the template at corpus/template.html for an example of how a template
+  looks like.
+  """
+  random.seed()
+  attributes = GenerateRandomJavascriptAttributes(random.randint(0, 10))
+  if (random.random() < 0.8):
+    attributes.append('video: %s' % GenerateRandomJavascriptValue())
+  if (random.random() < 0.8):
+    attributes.append('audio: %s' % GenerateRandomJavascriptValue())
+  input_object = MakeJavascriptObject(attributes)
+  template = FillInParameter('FUZZ_USER_MEDIA_INPUT', input_object, template)
+
+  ok_callback = (GenerateRandomJavascriptValue()
+                 if random.random() < 0.5 else 'getUserMediaOkCallback')
+  template = FillInParameter('FUZZ_OK_CALLBACK', ok_callback, template)
+
+  fail_callback = (GenerateRandomJavascriptValue()
+                   if random.random() < 0.5 else 'getUserMediaFailedCallback')
+  template = FillInParameter('FUZZ_FAIL_CALLBACK', fail_callback, template)
+
+  before_call = 'location.reload();' if random.random() < 0.1 else ''
+  template = FillInParameter('BEFORE_GET_USER_MEDIA_CALL', before_call,
+                             template)
+
+  after_call = 'location.reload();' if random.random() < 0.3 else ''
+  template = FillInParameter('AFTER_GET_USER_MEDIA_CALL', after_call,
+                             template)
+
+  return template
diff --git a/src/test/libtest/helpers/bit_flip_encryption.cc b/src/test/libtest/helpers/bit_flip_encryption.cc
new file mode 100644
index 0000000..e6c4152
--- /dev/null
+++ b/src/test/libtest/helpers/bit_flip_encryption.cc
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/libtest/include/bit_flip_encryption.h"
+
+#include <cstdlib>
+
+float NormalizedRand() {
+  return static_cast<float>(rand()) /
+         static_cast<float>(RAND_MAX);
+}
+
+BitFlipEncryption::BitFlipEncryption(unsigned int rand_seed,
+                                     float flip_probability)
+    : flip_probability_(flip_probability),
+      flip_count_(0) {
+  srand(rand_seed);
+}
+
+void BitFlipEncryption::FlipSomeBitsInData(const unsigned char* in_data,
+                                           unsigned char* out_data,
+                                           int bytes_in, int* bytes_out) {
+  for (int i = 0; i < bytes_in; i++) {
+    out_data[i] = in_data[i];
+
+    if (NormalizedRand() < flip_probability_) {
+      int bit_to_flip = rand() % 8;
+      out_data[i] ^= 1 << bit_to_flip;
+      flip_count_++;
+    }
+  }
+  *bytes_out = bytes_in;
+}
diff --git a/src/test/libtest/helpers/random_encryption.cc b/src/test/libtest/helpers/random_encryption.cc
new file mode 100644
index 0000000..4798d13
--- /dev/null
+++ b/src/test/libtest/helpers/random_encryption.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/libtest/include/random_encryption.h"
+
+#include <algorithm>
+#include <cstdlib>
+#include <cmath>
+
+#include "video_engine/vie_defines.h"
+
+static int Saturate(int value, int min, int max) {
+  return std::min(std::max(value, min), max);
+}
+
+RandomEncryption::RandomEncryption(unsigned int rand_seed) {
+  srand(rand_seed);
+}
+
+// Generates some completely random data with roughly the right length.
+void RandomEncryption::GenerateRandomData(unsigned char* out_data, int bytes_in,
+                                          int* bytes_out) {
+  int out_length = MakeUpSimilarLength(bytes_in);
+  for (int i = 0; i < out_length; i++) {
+    // The modulo will skew the random distribution a bit, but I think it
+    // will be random enough.
+    out_data[i] = static_cast<unsigned char>(rand() % 256);
+  }
+  *bytes_out = out_length;
+}
+
+// Makes up a length within +- 50 of the original length, without
+// overstepping the contract for encrypt / decrypt.
+int RandomEncryption::MakeUpSimilarLength(int original_length) {
+  int sign = rand() - RAND_MAX / 2;
+  int length = original_length + sign * rand() % 50;
+
+  return Saturate(length, 0, static_cast<int>(webrtc::kViEMaxMtu));
+}
diff --git a/src/test/libtest/include/bit_flip_encryption.h b/src/test/libtest/include/bit_flip_encryption.h
new file mode 100644
index 0000000..7e14c88
--- /dev/null
+++ b/src/test/libtest/include/bit_flip_encryption.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
+
+#include "common_types.h"
+
+// This encryption scheme will randomly flip bits every now and then in the
+// input data.
+class BitFlipEncryption : public webrtc::Encryption {
+ public:
+  // Args:
+  //   rand_seed: the seed to initialize the test's random generator with.
+  //   flip_probability: A number [0, 1] which is the percentage chance a bit
+  //       gets flipped in a particular byte.
+  BitFlipEncryption(unsigned int rand_seed, float flip_probability);
+
+  virtual void encrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void encrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    FlipSomeBitsInData(in_data, out_data, bytes_in, bytes_out);
+  }
+
+  int64_t flip_count() const { return flip_count_; }
+
+ private:
+  // The flip probability ([0, 1]).
+  float flip_probability_;
+  // The number of bits we've flipped so far.
+  int64_t flip_count_;
+
+  // Flips some bits in the data at random.
+  void FlipSomeBitsInData(const unsigned char *in_data, unsigned char* out_data,
+                          int bytes_in, int* bytes_out);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_BIT_FLIP_ENCRYPTION_H_
diff --git a/src/test/libtest/include/random_encryption.h b/src/test/libtest/include/random_encryption.h
new file mode 100644
index 0000000..beaae6c
--- /dev/null
+++ b/src/test/libtest/include/random_encryption.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
+
+#include "common_types.h"
+
+// These algorithms attempt to create an uncrackable encryption
+// scheme by completely disregarding the input data.
+class RandomEncryption : public webrtc::Encryption {
+ public:
+  explicit RandomEncryption(unsigned int rand_seed);
+
+  virtual void encrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt(int channel_no, unsigned char* in_data,
+                       unsigned char* out_data, int bytes_in, int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void encrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+  virtual void decrypt_rtcp(int channel_no, unsigned char* in_data,
+                            unsigned char* out_data, int bytes_in,
+                            int* bytes_out) {
+    GenerateRandomData(out_data, bytes_in, bytes_out);
+  }
+
+ private:
+  // Generates some completely random data with roughly the right length.
+  void GenerateRandomData(unsigned char* out_data, int bytes_in,
+                          int* bytes_out);
+
+  // Makes up a length within +- 50 of the original length, without
+  // overstepping the contract for encrypt / decrypt.
+  int MakeUpSimilarLength(int original_length);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_RANDOM_ENCRYPTION_H_
diff --git a/src/test/libtest/libtest.gyp b/src/test/libtest/libtest.gyp
new file mode 100644
index 0000000..ea1ecc7
--- /dev/null
+++ b/src/test/libtest/libtest.gyp
@@ -0,0 +1,26 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+  'includes': [
+    '../../build/common.gypi'
+  ],
+  'targets': [
+    {
+      'target_name': 'libtest',
+      'type': '<(library)',
+      'sources': [
+        # Helper classes
+        'include/bit_flip_encryption.h',
+        'include/random_encryption.h',
+
+        'helpers/bit_flip_encryption.cc',
+        'helpers/random_encryption.cc',
+      ],
+    },
+  ],
+}
diff --git a/src/test/manual/README b/src/test/manual/README
new file mode 100644
index 0000000..0fc0b75
--- /dev/null
+++ b/src/test/manual/README
@@ -0,0 +1,8 @@
+================================================================
+WEBRTC MANUAL TESTS
+================================================================
+
+You will need to serve these files off some kind of web server. Currently,
+GetUserMedia does not work when run off a file:// URL.
+
+Contact person: phoglund@webrtc.org
\ No newline at end of file
diff --git a/src/test/manual/audio-and-video.html b/src/test/manual/audio-and-video.html
new file mode 100644
index 0000000..1e54424
--- /dev/null
+++ b/src/test/manual/audio-and-video.html
@@ -0,0 +1,44 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Single Local Preview (Video and Audio)</title>
+  <script type="text/javascript">
+  function requestVideoAndAudio() {
+    navigator.webkitGetUserMedia({video: true, audio: true},
+                                 getUserMediaOkCallback,
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    document.getElementById("view1").src = streamUrl;
+    //document.getElementById("audio1").src = streamUrl;
+  }
+  </script>
+</head>
+<body onload="requestVideoAndAudio();">
+  <table border="0">
+    <tr>
+      <td>Local Preview</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view1"
+          autoplay="autoplay"></video></td>
+      <td><audio id="audio1" autoplay="autoplay"></audio></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/iframe-video.html b/src/test/manual/iframe-video.html
new file mode 100644
index 0000000..dffdbef
--- /dev/null
+++ b/src/test/manual/iframe-video.html
@@ -0,0 +1,18 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>IFRAME Single Local Preview (Video Only)</title>
+</head>
+<body>
+  <iframe width="100%" height="100%" src="single-video.html"></iframe>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/multiple-audio.html b/src/test/manual/multiple-audio.html
new file mode 100644
index 0000000..1575c7d
--- /dev/null
+++ b/src/test/manual/multiple-audio.html
@@ -0,0 +1,52 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Multiple Local Preview (Audio Only)</title>
+  <script type="text/javascript">
+  function requestAudio() {
+    navigator.webkitGetUserMedia({video: false, audio: true},
+                                 getUserMediaOkCallback,
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    for (var i = 1; i <= 10; i++) {
+      document.getElementById("audio" + i).src = streamUrl;
+    }
+  }
+  </script>
+</head>
+<body onload="requestAudio();">
+  <table border="0">
+    <tr>
+      <td>Sound test</td>
+    </tr>
+    <tr>
+      <td><audio id="audio1" autoplay="autoplay"></audio></td>
+      <td><audio id="audio2" autoplay="autoplay"></audio></td>
+      <td><audio id="audio3" autoplay="autoplay"></audio></td>
+      <td><audio id="audio4" autoplay="autoplay"></audio></td>
+      <td><audio id="audio5" autoplay="autoplay"></audio></td>
+      <td><audio id="audio6" autoplay="autoplay"></audio></td>
+      <td><audio id="audio7" autoplay="autoplay"></audio></td>
+      <td><audio id="audio8" autoplay="autoplay"></audio></td>
+      <td><audio id="audio9" autoplay="autoplay"></audio></td>
+      <td><audio id="audio10" autoplay="autoplay"></audio></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/multiple-video.html b/src/test/manual/multiple-video.html
new file mode 100644
index 0000000..1ba46c1
--- /dev/null
+++ b/src/test/manual/multiple-video.html
@@ -0,0 +1,68 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Multiple Local Preview (Video Only)</title>
+  <script type="text/javascript">
+  function requestVideo() {
+    navigator.webkitGetUserMedia({video: true, audio: false},
+                                 getUserMediaOkCallback,
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    for (var i = 1; i <= 10; i++) {
+      document.getElementById("view" + i).src = streamUrl;
+    }
+  }
+  </script>
+</head>
+<body onload="requestVideo();">
+  <table border="0">
+    <tr>
+      <td>Local Preview</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view1"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view2"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view3"
+          autoplay="autoplay"></video></td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view4"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view5"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view6"
+          autoplay="autoplay"></video></td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view7"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view8"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view9"
+          autoplay="autoplay"></video></td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view10"
+          autoplay="autoplay"></video></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/single-audio.html b/src/test/manual/single-audio.html
new file mode 100644
index 0000000..e003a62
--- /dev/null
+++ b/src/test/manual/single-audio.html
@@ -0,0 +1,41 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Single Local Preview (Audio Only)</title>
+  <script type="text/javascript">
+  function requestAudio() {
+    navigator.webkitGetUserMedia({video: false, audio: true},
+                                 getUserMediaOkCallback,
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    document.getElementById("audio1").src = streamUrl;
+  }
+  </script>
+</head>
+<body onload="requestAudio();">
+  <table border="0">
+    <tr>
+      <td>Sound test</td>
+    </tr>
+    <tr>
+      <td><audio id="audio1" autoplay="autoplay"></audio></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/single-video.html b/src/test/manual/single-video.html
new file mode 100644
index 0000000..6b3283c
--- /dev/null
+++ b/src/test/manual/single-video.html
@@ -0,0 +1,42 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Single Local Preview (Video Only)</title>
+  <script type="text/javascript">
+  function requestVideo() {
+    navigator.webkitGetUserMedia({video: true, audio: false},
+                                 getUserMediaOkCallback,
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    document.getElementById("view1").src = streamUrl;
+  }
+  </script>
+</head>
+<body onload="requestVideo();">
+  <table border="0">
+    <tr>
+      <td>Local Preview</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view1"
+          autoplay="autoplay"></video></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/manual/two-video-devices.html b/src/test/manual/two-video-devices.html
new file mode 100644
index 0000000..bdd197c
--- /dev/null
+++ b/src/test/manual/two-video-devices.html
@@ -0,0 +1,46 @@
+<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
+<!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+-->
+<html>
+<head>
+  <title>Single Local Preview (Video Only)</title>
+  <script type="text/javascript">
+  function requestVideo(target) {
+    navigator.webkitGetUserMedia({video: true, audio: false},
+                                 function(stream) {
+                                   getUserMediaOkCallback(stream, target);
+                                 },
+                                 getUserMediaFailedCallback);
+  }
+
+  function getUserMediaFailedCallback(error) {
+    alert("User media request denied with error code " + error.code);
+  }
+
+  function getUserMediaOkCallback(stream, target) {
+    var streamUrl = webkitURL.createObjectURL(stream);
+    document.getElementById(target).src = streamUrl;
+  }
+  </script>
+</head>
+<body onload="requestVideo('view1'); requestVideo('view2');">
+  <table border="0">
+    <tr>
+      <td>Local Preview</td>
+    </tr>
+    <tr>
+      <td><video width="320" height="240" id="view1"
+          autoplay="autoplay"></video></td>
+      <td><video width="320" height="240" id="view2"
+          autoplay="autoplay"></video></td>
+    </tr>
+  </table>
+</body>
+</html>
\ No newline at end of file
diff --git a/src/test/metrics.gyp b/src/test/metrics.gyp
new file mode 100644
index 0000000..11573ae
--- /dev/null
+++ b/src/test/metrics.gyp
@@ -0,0 +1,46 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+  ],
+  'targets': [
+    {
+      # The metrics code must be kept in its own GYP file in order to
+      # avoid a circular dependency error due to the dependency on libyuv.
+      # If the code would be put in test.gyp a circular dependency error during
+      # GYP generation would occur, because the libyuv.gypi unittest target
+      # depends on test_support_main. See issue #160 for more info.
+      'target_name': 'metrics',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+      ],
+      'include_dirs': [
+        '.',
+      ],
+      'sources': [
+        'testsupport/metrics/video_metrics.h',
+        'testsupport/metrics/video_metrics.cc',
+      ],
+    },
+    {
+      'target_name': 'metrics_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'metrics',
+        '<(webrtc_root)/test/test.gyp:test_support_main',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        'testsupport/metrics/video_metrics_unittest.cc',
+      ],
+    },
+  ],
+}
diff --git a/test/run_all_unittests.cc b/src/test/run_all_unittests.cc
similarity index 100%
rename from test/run_all_unittests.cc
rename to src/test/run_all_unittests.cc
diff --git a/src/test/test.gyp b/src/test/test.gyp
new file mode 100644
index 0000000..5fe5f16
--- /dev/null
+++ b/src/test/test.gyp
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# TODO(andrew): consider moving test_support to src/base/test.
+{
+  'includes': [
+    '../build/common.gypi',
+  ],
+  'targets': [
+    {
+      'target_name': 'test_support',
+      'type': 'static_library',
+      'include_dirs': [
+        '.',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '.', # Some includes are hierarchical
+        ],
+      },
+      'dependencies': [
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/testing/gmock.gyp:gmock',
+      ],
+      'all_dependent_settings': {
+        'include_dirs': [
+          '.',
+        ],
+      },
+      'sources': [
+        'test_suite.cc',
+        'test_suite.h',
+        'testsupport/fileutils.h',
+        'testsupport/fileutils.cc',
+        'testsupport/frame_reader.h',
+        'testsupport/frame_reader.cc',
+        'testsupport/frame_writer.h',
+        'testsupport/frame_writer.cc',
+        'testsupport/gtest_prod_util.h',
+        'testsupport/packet_reader.h',
+        'testsupport/packet_reader.cc',
+        'testsupport/mock/mock_frame_reader.h',
+        'testsupport/mock/mock_frame_writer.h',
+      ],
+    },
+    {
+      # Depend on this target when you want to have test_support but also the
+      # main method needed for gtest to execute!
+      'target_name': 'test_support_main',
+      'type': 'static_library',
+      'dependencies': [
+        'test_support',
+      ],
+      'sources': [
+        'run_all_unittests.cc',
+      ],
+    },
+    {
+      # Depend on this target when you want to have test_support and a special
+      # main for mac which will run your test on a worker thread and consume
+      # events on the main thread. Useful if you want to access a webcam.
+      # This main will provide all the scaffolding and objective-c black magic
+      # for you. All you need to do is to implement a function in the
+      # run_threaded_main_mac.h file (ImplementThisToRunYourTest).
+      'target_name': 'test_support_main_threaded_mac',
+      'type': 'static_library',
+      'dependencies': [
+        'test_support',
+      ],
+      'sources': [
+        'testsupport/mac/run_threaded_main_mac.h',
+        'testsupport/mac/run_threaded_main_mac.mm',
+      ],
+    },
+    {
+      'target_name': 'test_support_unittests',
+      'type': 'executable',
+      'dependencies': [
+        'test_support_main',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+      ],
+      'sources': [
+        'testsupport/unittest_utils.h',
+        'testsupport/fileutils_unittest.cc',
+        'testsupport/frame_reader_unittest.cc',
+        'testsupport/frame_writer_unittest.cc',
+        'testsupport/packet_reader_unittest.cc',
+      ],
+    },
+  ],
+}
diff --git a/test/test_suite.cc b/src/test/test_suite.cc
similarity index 100%
rename from test/test_suite.cc
rename to src/test/test_suite.cc
diff --git a/test/test_suite.h b/src/test/test_suite.h
similarity index 100%
rename from test/test_suite.h
rename to src/test/test_suite.h
diff --git a/src/test/testsupport/fileutils.cc b/src/test/testsupport/fileutils.cc
new file mode 100644
index 0000000..0679b1f
--- /dev/null
+++ b/src/test/testsupport/fileutils.cc
@@ -0,0 +1,187 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "test/testsupport/fileutils.h"
+
+#ifdef WIN32
+#include <direct.h>
+#define GET_CURRENT_DIR _getcwd
+#else
+#include <unistd.h>
+#define GET_CURRENT_DIR getcwd
+#endif
+
+#include <sys/stat.h>  // To check for directory existence.
+#ifndef S_ISDIR  // Not defined in stat.h on Windows.
+#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
+#endif
+
+#include <cstdio>
+
+#include "typedefs.h"  // For architecture defines
+
+namespace webrtc {
+namespace test {
+
+#ifdef WIN32
+static const char* kPathDelimiter = "\\";
+#else
+static const char* kPathDelimiter = "/";
+#endif
+// The file we're looking for to identify the project root dir.
+static const char* kProjectRootFileName = "DEPS";
+static const char* kOutputDirName = "out";
+static const char* kFallbackPath = "./";
+#ifdef WEBRTC_ANDROID
+static const char* kResourcesDirName = "/sdcard/";
+#else
+static const char* kResourcesDirName = "resources";
+#endif
+const char* kCannotFindProjectRootDir = "ERROR_CANNOT_FIND_PROJECT_ROOT_DIR";
+
+std::string ProjectRootPath() {
+  std::string working_dir = WorkingDir();
+  if (working_dir == kFallbackPath) {
+    return kCannotFindProjectRootDir;
+  }
+  // Check for our file that verifies the root dir.
+  std::string current_path(working_dir);
+  FILE* file = NULL;
+  int path_delimiter_index = current_path.find_last_of(kPathDelimiter);
+  while (path_delimiter_index > -1) {
+    std::string root_filename = current_path + kPathDelimiter +
+        kProjectRootFileName;
+    file = fopen(root_filename.c_str(), "r");
+    if (file != NULL) {
+      fclose(file);
+      return current_path + kPathDelimiter;
+    }
+    // Move up one directory in the directory tree.
+    current_path = current_path.substr(0, path_delimiter_index);
+    path_delimiter_index = current_path.find_last_of(kPathDelimiter);
+  }
+  // Reached the root directory.
+  fprintf(stderr, "Cannot find project root directory!\n");
+  return kCannotFindProjectRootDir;
+}
+
+#ifdef WEBRTC_ANDROID
+
+std::string OutputPath() {
+  // We need to touch this variable so it doesn't get flagged as unused.
+  (void)kOutputDirName;
+  return "/sdcard/";
+}
+
+#else  // WEBRTC_ANDROID
+
+std::string OutputPath() {
+  std::string path = ProjectRootPath();
+  if (path == kCannotFindProjectRootDir) {
+    return kFallbackPath;
+  }
+  path += kOutputDirName;
+  if (!CreateDirectory(path)) {
+    return kFallbackPath;
+  }
+  return path + kPathDelimiter;
+}
+
+#endif  // !WEBRTC_ANDROID
+
+std::string WorkingDir() {
+  char path_buffer[FILENAME_MAX];
+  if (!GET_CURRENT_DIR(path_buffer, sizeof(path_buffer))) {
+    fprintf(stderr, "Cannot get current directory!\n");
+    return kFallbackPath;
+  } else {
+    return std::string(path_buffer);
+  }
+}
+
+bool CreateDirectory(std::string directory_name) {
+  struct stat path_info = {0};
+  // Check if the path exists already:
+  if (stat(directory_name.c_str(), &path_info) == 0) {
+    if (!S_ISDIR(path_info.st_mode)) {
+      fprintf(stderr, "Path %s exists but is not a directory! Remove this "
+              "file and re-run to create the directory.\n",
+              directory_name.c_str());
+      return false;
+    }
+  } else {
+#ifdef WIN32
+    return _mkdir(directory_name.c_str()) == 0;
+#else
+    return mkdir(directory_name.c_str(),  S_IRWXU | S_IRWXG | S_IRWXO) == 0;
+#endif
+  }
+  return true;
+}
+
+bool FileExists(std::string file_name) {
+  struct stat file_info = {0};
+  return stat(file_name.c_str(), &file_info) == 0;
+}
+
+std::string ResourcePath(std::string name, std::string extension) {
+  std::string platform = "win";
+#ifdef WEBRTC_LINUX
+  platform = "linux";
+#endif  // WEBRTC_LINUX
+#ifdef WEBRTC_MAC
+  platform = "mac";
+#endif  // WEBRTC_MAC
+
+#ifdef WEBRTC_ARCH_64_BITS
+  std::string architecture = "64";
+#else
+  std::string architecture = "32";
+#endif  // WEBRTC_ARCH_64_BITS
+
+#ifdef WEBRTC_ANDROID
+  std::string resources_path = kResourcesDirName;
+#else
+  std::string resources_path = ProjectRootPath() + kResourcesDirName +
+      kPathDelimiter;
+  std::string resource_file = resources_path + name + "_" + platform + "_" +
+      architecture + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+  // Try without architecture.
+  resource_file = resources_path + name + "_" + platform + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+  // Try without platform.
+  resource_file = resources_path + name + "_" + architecture + "." + extension;
+  if (FileExists(resource_file)) {
+    return resource_file;
+  }
+#endif
+  // Fall back on name without architecture or platform.
+  return resources_path + name + "." + extension;
+}
+
+size_t GetFileSize(std::string filename) {
+  FILE* f = fopen(filename.c_str(), "rb");
+  size_t size = 0;
+  if (f != NULL) {
+    if (fseek(f, 0, SEEK_END) == 0) {
+      size = ftell(f);
+    }
+    fclose(f);
+  }
+  return size;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/test/testsupport/fileutils.h b/src/test/testsupport/fileutils.h
new file mode 100644
index 0000000..b6c1346
--- /dev/null
+++ b/src/test/testsupport/fileutils.h
@@ -0,0 +1,144 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+
+// File utilities for testing purposes.
+//
+// The ProjectRootPath() method is a convenient way of getting an absolute
+// path to the project source tree root directory. Using this, it is easy to
+// refer to test resource files in a portable way.
+//
+// Notice that even if Windows platforms use backslash as path delimiter, it is
+// also supported to use slash, so there's no need for #ifdef checks in test
+// code for setting up the paths to the resource files.
+//
+// Example use:
+// Assume we have the following code being used in a test source file:
+// const std::string kInputFile = webrtc::test::ProjectRootPath() +
+//     "test/data/voice_engine/audio_long16.wav";
+// // Use the kInputFile for the tests...
+//
+// Then here's some example outputs for different platforms:
+// Linux:
+// * Source tree located in /home/user/webrtc/trunk
+// * Test project located in /home/user/webrtc/trunk/src/testproject
+// * Test binary compiled as:
+//   /home/user/webrtc/trunk/out/Debug/testproject_unittests
+// Then ProjectRootPath() will return /home/user/webrtc/trunk/ no matter if
+// the test binary is executed from standing in either of:
+// /home/user/webrtc/trunk
+// or
+// /home/user/webrtc/trunk/out/Debug
+// (or any other directory below the trunk for that matter).
+//
+// Windows:
+// * Source tree located in C:\Users\user\webrtc\trunk
+// * Test project located in C:\Users\user\webrtc\trunk\src\testproject
+// * Test binary compiled as:
+//   C:\Users\user\webrtc\trunk\src\testproject\Debug\testproject_unittests.exe
+// Then ProjectRootPath() will return C:\Users\user\webrtc\trunk\ when the
+// test binary is executed from inside Visual Studio.
+// It will also return the same path if the test is executed from a command
+// prompt standing in C:\Users\user\webrtc\trunk\src\testproject\Debug
+//
+// Mac:
+// * Source tree located in /Users/user/webrtc/trunk
+// * Test project located in /Users/user/webrtc/trunk/src/testproject
+// * Test binary compiled as:
+//   /Users/user/webrtc/trunk/xcodebuild/Debug/testproject_unittests
+// Then ProjectRootPath() will return /Users/user/webrtc/trunk/ no matter if
+// the test binary is executed from standing in either of:
+// /Users/user/webrtc/trunk
+// or
+// /Users/user/webrtc/trunk/out/Debug
+// (or any other directory below the trunk for that matter).
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
+#define WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
+
+#include <string>
+
+namespace webrtc {
+namespace test {
+
+// This is the "directory" returned if the ProjectPath() function fails
+// to find the project root.
+extern const char* kCannotFindProjectRootDir;
+
+// Finds the root dir of the project, to be able to set correct paths to
+// resource files used by tests.
+// The implementation is simple: it just looks for the file defined by
+// kProjectRootFileName, starting in the current directory (the working
+// directory) and then steps upward until it is found (or it is at the root of
+// the file system).
+// If the current working directory is above the project root dir, it will not
+// be found.
+//
+// If symbolic links occur in the path they will be resolved and the actual
+// directory will be returned.
+//
+// Returns the absolute path to the project root dir (usually the trunk dir)
+// WITH a trailing path delimiter.
+// If the project root is not found, the string specified by
+// kCannotFindProjectRootDir is returned.
+std::string ProjectRootPath();
+
+// Creates and returns the absolute path to the output directory where log files
+// and other test artifacts should be put. The output directory is generally a
+// directory named "out" at the top-level of the project, i.e. a subfolder to
+// the path returned by ProjectRootPath(). The exception is Android where we use
+// /sdcard/ instead.
+//
+// Details described for ProjectRootPath() apply here too.
+//
+// Returns the path WITH a trailing path delimiter. If the project root is not
+// found, the current working directory ("./") is returned as a fallback.
+std::string OutputPath();
+
+// Returns a path to a resource file for the currently executing platform.
+// Adapts to what filenames are currently present in the
+// [project-root]/resources/ dir.
+// Returns an absolute path according to this priority list (the directory
+// part of the path is left out for readability):
+// 1. [name]_[platform]_[architecture].[extension]
+// 2. [name]_[platform].[extension]
+// 3. [name]_[architecture].[extension]
+// 4. [name].[extension]
+// Where
+// * platform is either of "win", "mac" or "linux".
+// * architecture is either of "32" or "64".
+//
+// Arguments:
+//    name - Name of the resource file. If a plain filename (no directory path)
+//           is supplied, the file is assumed to be located in resources/
+//           If a directory path is prepended to the filename, a subdirectory
+//           hierarchy reflecting that path is assumed to be present.
+//    extension - File extension, without the dot, i.e. "bmp" or "yuv".
+std::string ResourcePath(std::string name, std::string extension);
+
+// Gets the current working directory for the executing program.
+// Returns "./" if for some reason it is not possible to find the working
+// directory.
+std::string WorkingDir();
+
+// Creates a directory if it not already exists.
+// Returns true if successful. Will print an error message to stderr and return
+// false if a file with the same name already exists.
+bool CreateDirectory(std::string directory_name);
+
+// File size of the supplied file in bytes. Will return 0 if the file is
+// empty or if the file does not exist/is readable.
+size_t GetFileSize(std::string filename);
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
diff --git a/src/test/testsupport/fileutils_unittest.cc b/src/test/testsupport/fileutils_unittest.cc
new file mode 100644
index 0000000..1b76b3c
--- /dev/null
+++ b/src/test/testsupport/fileutils_unittest.cc
@@ -0,0 +1,192 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/fileutils.h"
+
+#include <cstdio>
+#include <list>
+#include <string>
+
+#include "gtest/gtest.h"
+
+#ifdef WIN32
+#define chdir _chdir
+static const char* kPathDelimiter = "\\";
+#else
+static const char* kPathDelimiter = "/";
+#endif
+
+static const std::string kDummyDir = "file_utils_unittest_dummy_dir";
+static const std::string kResourcesDir = "resources";
+static const std::string kTestName = "fileutils_unittest";
+static const std::string kExtension = "tmp";
+
+typedef std::list<std::string> FileList;
+
+namespace webrtc {
+
+// Test fixture to restore the working directory between each test, since some
+// of them change it with chdir during execution (not restored by the
+// gtest framework).
+class FileUtilsTest : public testing::Test {
+ protected:
+  FileUtilsTest() {
+  }
+  virtual ~FileUtilsTest() {}
+  // Runs before the first test
+  static void SetUpTestCase() {
+    original_working_dir_ = webrtc::test::WorkingDir();
+    std::string resources_path = original_working_dir_ + kPathDelimiter +
+        kResourcesDir + kPathDelimiter;
+    webrtc::test::CreateDirectory(resources_path);
+
+    files_.push_back(resources_path + kTestName + "." + kExtension);
+    files_.push_back(resources_path + kTestName + "_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win_32." + kExtension);
+    files_.push_back(resources_path + kTestName + "_linux_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_mac_64." + kExtension);
+    files_.push_back(resources_path + kTestName + "_win_64." + kExtension);
+
+    // Now that the resources dir exists, write some empty test files into it.
+    for (FileList::iterator file_it = files_.begin();
+        file_it != files_.end(); ++file_it) {
+      FILE* file = fopen(file_it->c_str(), "wb");
+      ASSERT_TRUE(file != NULL) << "Failed to write file: " << file_it->c_str();
+      ASSERT_GT(fprintf(file, "%s",  "Dummy data"), 0);
+      fclose(file);
+    }
+    // Create a dummy subdir that can be chdir'ed into for testing purposes.
+    empty_dummy_dir_ = original_working_dir_ + kPathDelimiter + kDummyDir;
+    webrtc::test::CreateDirectory(empty_dummy_dir_);
+  }
+  static void TearDownTestCase() {
+    // Clean up all resource files written
+    for (FileList::iterator file_it = files_.begin();
+            file_it != files_.end(); ++file_it) {
+      remove(file_it->c_str());
+    }
+    std::remove(empty_dummy_dir_.c_str());
+  }
+  void SetUp() {
+    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
+  }
+  void TearDown() {
+    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
+  }
+ protected:
+  static FileList files_;
+  static std::string empty_dummy_dir_;
+ private:
+  static std::string original_working_dir_;
+};
+
+FileList FileUtilsTest::files_;
+std::string FileUtilsTest::original_working_dir_ = "";
+std::string FileUtilsTest::empty_dummy_dir_ = "";
+
+// Tests that the project root path is returned for the default working
+// directory that is automatically set when the test executable is launched.
+// The test is not fully testing the implementation, since we cannot be sure
+// of where the executable was launched from.
+// The test will fail if the top level directory is not named "trunk".
+TEST_F(FileUtilsTest, ProjectRootPathFromUnchangedWorkingDir) {
+  std::string path = webrtc::test::ProjectRootPath();
+  std::string expected_end = "trunk";
+  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
+  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromUnchangedWorkingDir) {
+  std::string path = webrtc::test::OutputPath();
+  std::string expected_end = "out";
+  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
+  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
+}
+
+// Tests setting the current working directory to a directory three levels
+// deeper from the current one. Then testing that the project path returned
+// is still the same, when the function under test is called again.
+TEST_F(FileUtilsTest, ProjectRootPathFromDeeperWorkingDir) {
+  std::string path = webrtc::test::ProjectRootPath();
+  std::string original_working_dir = path;  // This is the correct project root
+  // Change to a subdirectory path.
+  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
+  ASSERT_EQ(original_working_dir, webrtc::test::ProjectRootPath());
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromDeeperWorkingDir) {
+  std::string path = webrtc::test::OutputPath();
+  std::string original_working_dir = path;
+  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
+  ASSERT_EQ(original_working_dir, webrtc::test::OutputPath());
+}
+
+// Tests with current working directory set to a directory higher up in the
+// directory tree than the project root dir. This case shall return a specified
+// error string as a directory (which will be an invalid path).
+TEST_F(FileUtilsTest, ProjectRootPathFromRootWorkingDir) {
+  // Change current working dir to the root of the current file system
+  // (this will always be "above" our project root dir).
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ(webrtc::test::kCannotFindProjectRootDir,
+            webrtc::test::ProjectRootPath());
+}
+
+// Similar to the above test, but for the output dir
+TEST_F(FileUtilsTest, OutputPathFromRootWorkingDir) {
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ("./", webrtc::test::OutputPath());
+}
+
+// Only tests that the code executes
+TEST_F(FileUtilsTest, CreateDirectory) {
+  std::string directory = "fileutils-unittest-empty-dir";
+  // Make sure it's removed if a previous test has failed:
+  std::remove(directory.c_str());
+  ASSERT_TRUE(webrtc::test::CreateDirectory(directory));
+  std::remove(directory.c_str());
+}
+
+TEST_F(FileUtilsTest, WorkingDirReturnsValue) {
+  // Hard to cover all platforms. Just test that it returns something without
+  // crashing:
+  std::string working_dir = webrtc::test::WorkingDir();
+  ASSERT_GT(working_dir.length(), 0u);
+}
+
+// Due to multiple platforms, it is hard to make a complete test for
+// ResourcePath. Manual testing has been performed by removing files and
+// verified the result confirms with the specified documentation for the
+// function.
+TEST_F(FileUtilsTest, ResourcePathReturnsValue) {
+  std::string resource = webrtc::test::ResourcePath(kTestName, kExtension);
+  ASSERT_GT(resource.find(kTestName), 0u);
+  ASSERT_GT(resource.find(kExtension), 0u);
+  ASSERT_EQ(0, chdir(kPathDelimiter));
+  ASSERT_EQ("./", webrtc::test::OutputPath());
+}
+
+TEST_F(FileUtilsTest, GetFileSizeExistingFile) {
+  ASSERT_GT(webrtc::test::GetFileSize(files_.front()), 0u);
+}
+
+TEST_F(FileUtilsTest, GetFileSizeNonExistingFile) {
+  ASSERT_EQ(0u, webrtc::test::GetFileSize("non-existing-file.tmp"));
+}
+
+}  // namespace webrtc
diff --git a/test/testsupport/frame_reader.cc b/src/test/testsupport/frame_reader.cc
similarity index 100%
rename from test/testsupport/frame_reader.cc
rename to src/test/testsupport/frame_reader.cc
diff --git a/test/testsupport/frame_reader.h b/src/test/testsupport/frame_reader.h
similarity index 100%
rename from test/testsupport/frame_reader.h
rename to src/test/testsupport/frame_reader.h
diff --git a/test/testsupport/frame_reader_unittest.cc b/src/test/testsupport/frame_reader_unittest.cc
similarity index 100%
rename from test/testsupport/frame_reader_unittest.cc
rename to src/test/testsupport/frame_reader_unittest.cc
diff --git a/test/testsupport/frame_writer.cc b/src/test/testsupport/frame_writer.cc
similarity index 100%
rename from test/testsupport/frame_writer.cc
rename to src/test/testsupport/frame_writer.cc
diff --git a/src/test/testsupport/frame_writer.h b/src/test/testsupport/frame_writer.h
new file mode 100644
index 0000000..e91a299
--- /dev/null
+++ b/src/test/testsupport/frame_writer.h
@@ -0,0 +1,69 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
+#define WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
+
+#include <cstdio>
+#include <string>
+
+#include "typedefs.h"
+
+namespace webrtc {
+namespace test {
+
+// Handles writing of video files.
+class FrameWriter {
+ public:
+  virtual ~FrameWriter() {}
+
+  // Initializes the file handler, i.e. opens the input and output files etc.
+  // This must be called before reading or writing frames has started.
+  // Returns false if an error has occurred, in addition to printing to stderr.
+  virtual bool Init() = 0;
+
+  // Writes a frame of the configured frame length to the output file.
+  // Returns true if the write was successful, false otherwise.
+  virtual bool WriteFrame(WebRtc_UWord8* frame_buffer) = 0;
+
+  // Closes the output file if open. Essentially makes this class impossible
+  // to use anymore. Will also be invoked by the destructor.
+  virtual void Close() = 0;
+
+  // Frame length in bytes of a single frame image.
+  virtual int FrameLength() = 0;
+};
+
+class FrameWriterImpl : public FrameWriter {
+ public:
+  // Creates a file handler. The input file is assumed to exist and be readable
+  // and the output file must be writable.
+  // Parameters:
+  //   output_filename         The file to write. Will be overwritten if already
+  //                           existing.
+  //   frame_length_in_bytes   The size of each frame.
+  //                           For YUV: 3*width*height/2
+  FrameWriterImpl(std::string output_filename, int frame_length_in_bytes);
+  virtual ~FrameWriterImpl();
+  bool Init();
+  bool WriteFrame(WebRtc_UWord8* frame_buffer);
+  void Close();
+  int FrameLength() { return frame_length_in_bytes_; }
+
+ private:
+  std::string output_filename_;
+  int frame_length_in_bytes_;
+  FILE* output_file_;
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
diff --git a/test/testsupport/frame_writer_unittest.cc b/src/test/testsupport/frame_writer_unittest.cc
similarity index 100%
rename from test/testsupport/frame_writer_unittest.cc
rename to src/test/testsupport/frame_writer_unittest.cc
diff --git a/src/test/testsupport/gtest_prod_util.h b/src/test/testsupport/gtest_prod_util.h
new file mode 100644
index 0000000..7d123a8
--- /dev/null
+++ b/src/test/testsupport/gtest_prod_util.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
+#define WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
+#pragma once
+
+#include "gtest/gtest_prod.h"
+
+// This file is a plain copy of Chromium's base/gtest_prod_util.h.
+//
+// This is a wrapper for gtest's FRIEND_TEST macro that friends
+// test with all possible prefixes. This is very helpful when changing the test
+// prefix, because the friend declarations don't need to be updated.
+//
+// Example usage:
+//
+// class MyClass {
+//  private:
+//   void MyMethod();
+//   FRIEND_TEST_ALL_PREFIXES(MyClassTest, MyMethod);
+// };
+#define FRIEND_TEST_ALL_PREFIXES(test_case_name, test_name) \
+  FRIEND_TEST(test_case_name, test_name); \
+  FRIEND_TEST(test_case_name, DISABLED_##test_name); \
+  FRIEND_TEST(test_case_name, FLAKY_##test_name); \
+  FRIEND_TEST(test_case_name, FAILS_##test_name)
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_GTEST_PROD_UTIL_H_
diff --git a/src/test/testsupport/mac/run_threaded_main_mac.h b/src/test/testsupport/mac/run_threaded_main_mac.h
new file mode 100644
index 0000000..c8cc4bb
--- /dev/null
+++ b/src/test/testsupport/mac/run_threaded_main_mac.h
@@ -0,0 +1,22 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/**
+ * This file and its corresponding .mm file implement a main function on Mac.
+ * It's useful if you need to access a webcam in your Mac application. The code
+ * forks a worker thread which runs the below ImplementThisToRunYourTest
+ * function, and uses the main thread to pump messages. That way we can run our
+ * code in a regular sequential fashion and still pump events, which are
+ * necessary to access the webcam for instance.
+ */
+
+// Implement this method to do whatever you want to do in the worker thread.
+// The argc and argv variables are the unmodified command line from main.
+int ImplementThisToRunYourTest(int argc, char** argv);
diff --git a/src/test/testsupport/mac/run_threaded_main_mac.mm b/src/test/testsupport/mac/run_threaded_main_mac.mm
new file mode 100644
index 0000000..b0c07ed
--- /dev/null
+++ b/src/test/testsupport/mac/run_threaded_main_mac.mm
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#import "run_threaded_main_mac.h"
+
+#import <Cocoa/Cocoa.h>
+
+// This class passes parameter from main to the worked thread and back.
+@interface AutoTestInWorkerThread : NSObject {
+  int    argc_;
+  char** argv_;
+  int    result_;
+  bool   done_;
+}
+
+- (void)setDone:(bool)done;
+- (bool)done;
+- (void)setArgc:(int)argc argv:(char**)argv;
+- (int) result;
+- (void)runTest:(NSObject*)ignored;
+
+@end
+
+@implementation AutoTestInWorkerThread
+
+- (void)setDone:(bool)done {
+  done_ = done;
+}
+
+- (bool)done {
+  return done_;
+}
+
+- (void)setArgc:(int)argc argv:(char**)argv {
+  argc_ = argc;
+  argv_ = argv;
+}
+
+- (void)runTest:(NSObject*)ignored {
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    result_ = ImplementThisToRunYourTest(argc_, argv_);
+    done_ = true;
+
+    [pool release];
+    return;
+}
+
+- (int)result {
+  return result_;
+}
+
+@end
+
+int main(int argc, char * argv[]) {
+    NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];
+
+    [NSApplication sharedApplication];
+
+    int result = 0;
+    AutoTestInWorkerThread* tests = [[AutoTestInWorkerThread alloc] init];
+
+    [tests setArgc:argc argv:argv];
+    [tests setDone:false];
+
+    [NSThread detachNewThreadSelector:@selector(runTest:)
+                             toTarget:tests
+                           withObject:nil];
+
+    NSRunLoop* main_run_loop = [NSRunLoop mainRunLoop];
+    NSDate *loop_until = [NSDate dateWithTimeIntervalSinceNow:0.1];
+    bool runloop_ok = true;
+    while (![tests done] && runloop_ok) {
+      runloop_ok = [main_run_loop runMode:NSDefaultRunLoopMode
+                               beforeDate:loop_until];
+      loop_until = [NSDate dateWithTimeIntervalSinceNow:0.1];
+    }
+
+    result = [tests result];
+
+    [pool release];
+    return result;
+}
diff --git a/src/test/testsupport/metrics/video_metrics.cc b/src/test/testsupport/metrics/video_metrics.cc
new file mode 100644
index 0000000..87dc33b
--- /dev/null
+++ b/src/test/testsupport/metrics/video_metrics.cc
@@ -0,0 +1,185 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/metrics/video_metrics.h"
+
+#include <algorithm> // min_element, max_element
+#include <cassert>
+#include <cstdio>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+namespace webrtc {
+namespace test {
+
+// Used for calculating min and max values
+static bool LessForFrameResultValue (const FrameResult& s1,
+                                     const FrameResult& s2) {
+    return s1.value < s2.value;
+}
+
+enum VideoMetricsType { kPSNR, kSSIM, kBoth };
+
+// Calculates metrics for a frame and adds statistics to the result for it.
+void CalculateFrame(VideoMetricsType video_metrics_type,
+                    uint8_t* ref,
+                    uint8_t* test,
+                    int width,
+                    int height,
+                    int frame_number,
+                    QualityMetricsResult* result) {
+  FrameResult frame_result = {0, 0};
+  frame_result.frame_number = frame_number;
+  switch (video_metrics_type) {
+    case kPSNR:
+      frame_result.value = I420PSNR(ref, test, width, height);
+      break;
+    case kSSIM:
+      frame_result.value = I420SSIM(ref, test, width, height);
+      break;
+    default:
+      assert(false);
+  }
+  result->frames.push_back(frame_result);
+}
+
+// Calculates average, min and max values for the supplied struct, if non-NULL.
+void CalculateStats(QualityMetricsResult* result) {
+  if (result == NULL || result->frames.size() == 0) {
+    return;
+  }
+  // Calculate average
+  std::vector<FrameResult>::iterator iter;
+  double metrics_values_sum = 0.0;
+  for (iter = result->frames.begin(); iter != result->frames.end(); ++iter) {
+    metrics_values_sum += iter->value;
+  }
+  result->average = metrics_values_sum / result->frames.size();
+
+  // Calculate min/max statistics
+  iter = std::min_element(result->frames.begin(), result->frames.end(),
+                     LessForFrameResultValue);
+  result->min = iter->value;
+  result->min_frame_number = iter->frame_number;
+  iter = std::max_element(result->frames.begin(), result->frames.end(),
+                     LessForFrameResultValue);
+  result->max = iter->value;
+  result->max_frame_number = iter->frame_number;
+}
+
+// Single method that handles all combinations of video metrics calculation, to
+// minimize code duplication. Either psnr_result or ssim_result may be NULL,
+// depending on which VideoMetricsType is targeted.
+int CalculateMetrics(VideoMetricsType video_metrics_type,
+                     const char* ref_filename,
+                     const char* test_filename,
+                     int width,
+                     int height,
+                     QualityMetricsResult* psnr_result,
+                     QualityMetricsResult* ssim_result) {
+  assert(ref_filename != NULL);
+  assert(test_filename != NULL);
+  assert(width > 0);
+  assert(height > 0);
+
+  FILE* ref_fp = fopen(ref_filename, "rb");
+  if (ref_fp == NULL) {
+    // cannot open reference file
+    fprintf(stderr, "Cannot open file %s\n", ref_filename);
+    return -1;
+  }
+  FILE* test_fp = fopen(test_filename, "rb");
+  if (test_fp == NULL) {
+    // cannot open test file
+    fprintf(stderr, "Cannot open file %s\n", test_filename);
+    fclose(ref_fp);
+    return -2;
+  }
+  int frame_number = 0;
+
+  // Allocating size for one I420 frame.
+  const int frame_length = 3 * width * height >> 1;
+  uint8_t* ref = new uint8_t[frame_length];
+  uint8_t* test = new uint8_t[frame_length];
+
+  int ref_bytes = fread(ref, 1, frame_length, ref_fp);
+  int test_bytes = fread(test, 1, frame_length, test_fp);
+  while (ref_bytes == frame_length && test_bytes == frame_length) {
+    switch (video_metrics_type) {
+      case kPSNR:
+        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
+                       psnr_result);
+        break;
+      case kSSIM:
+        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
+                       ssim_result);
+        break;
+      case kBoth:
+        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
+                       psnr_result);
+        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
+                       ssim_result);
+        break;
+    }
+    frame_number++;
+    ref_bytes = fread(ref, 1, frame_length, ref_fp);
+    test_bytes = fread(test, 1, frame_length, test_fp);
+  }
+  int return_code = 0;
+  if (frame_number == 0) {
+    fprintf(stderr, "Tried to measure video metrics from empty files "
+            "(reference file: %s  test file: %s)\n", ref_filename,
+            test_filename);
+    return_code = -3;
+  } else {
+    CalculateStats(psnr_result);
+    CalculateStats(ssim_result);
+  }
+  delete [] ref;
+  delete [] test;
+  fclose(ref_fp);
+  fclose(test_fp);
+  return return_code;
+}
+
+int I420MetricsFromFiles(const char* ref_filename,
+                         const char* test_filename,
+                         int width,
+                         int height,
+                         QualityMetricsResult* psnr_result,
+                         QualityMetricsResult* ssim_result) {
+  assert(psnr_result != NULL);
+  assert(ssim_result != NULL);
+  return CalculateMetrics(kBoth, ref_filename, test_filename, width, height,
+                          psnr_result, ssim_result);
+}
+
+int I420PSNRFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result) {
+  assert(result != NULL);
+  return CalculateMetrics(kPSNR, ref_filename, test_filename, width, height,
+                          result, NULL);
+}
+
+int I420SSIMFromFiles(const char* ref_filename,
+                      const char* test_filename,
+                      int width,
+                      int height,
+                      QualityMetricsResult* result) {
+  assert(result != NULL);
+  return CalculateMetrics(kSSIM, ref_filename, test_filename, width, height,
+                          NULL, result);
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/test/testsupport/metrics/video_metrics.h b/src/test/testsupport/metrics/video_metrics.h
similarity index 100%
rename from test/testsupport/metrics/video_metrics.h
rename to src/test/testsupport/metrics/video_metrics.h
diff --git a/test/testsupport/metrics/video_metrics_unittest.cc b/src/test/testsupport/metrics/video_metrics_unittest.cc
similarity index 100%
rename from test/testsupport/metrics/video_metrics_unittest.cc
rename to src/test/testsupport/metrics/video_metrics_unittest.cc
diff --git a/test/testsupport/mock/mock_frame_reader.h b/src/test/testsupport/mock/mock_frame_reader.h
similarity index 100%
rename from test/testsupport/mock/mock_frame_reader.h
rename to src/test/testsupport/mock/mock_frame_reader.h
diff --git a/test/testsupport/mock/mock_frame_writer.h b/src/test/testsupport/mock/mock_frame_writer.h
similarity index 100%
rename from test/testsupport/mock/mock_frame_writer.h
rename to src/test/testsupport/mock/mock_frame_writer.h
diff --git a/test/testsupport/packet_reader.cc b/src/test/testsupport/packet_reader.cc
similarity index 100%
rename from test/testsupport/packet_reader.cc
rename to src/test/testsupport/packet_reader.cc
diff --git a/test/testsupport/packet_reader.h b/src/test/testsupport/packet_reader.h
similarity index 100%
rename from test/testsupport/packet_reader.h
rename to src/test/testsupport/packet_reader.h
diff --git a/src/test/testsupport/packet_reader_unittest.cc b/src/test/testsupport/packet_reader_unittest.cc
new file mode 100644
index 0000000..9c7fc3b
--- /dev/null
+++ b/src/test/testsupport/packet_reader_unittest.cc
@@ -0,0 +1,124 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/packet_reader.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/unittest_utils.h"
+
+namespace webrtc {
+namespace test {
+
+class PacketReaderTest: public PacketRelatedTest {
+ protected:
+  PacketReaderTest() {}
+  virtual ~PacketReaderTest() {}
+  void SetUp() {
+    reader_ = new PacketReader();
+  }
+  void TearDown() {
+    delete reader_;
+  }
+  void VerifyPacketData(int expected_length,
+                        int actual_length,
+                        WebRtc_UWord8* original_data_pointer,
+                        WebRtc_UWord8* new_data_pointer) {
+    EXPECT_EQ(expected_length, actual_length);
+    EXPECT_EQ(*original_data_pointer, *new_data_pointer);
+    EXPECT_EQ(0, memcmp(original_data_pointer, new_data_pointer,
+                        actual_length));
+  }
+  PacketReader* reader_;
+};
+
+// Test lack of initialization
+TEST_F(PacketReaderTest, Uninitialized) {
+  WebRtc_UWord8* data_pointer = NULL;
+  EXPECT_EQ(-1, reader_->NextPacket(&data_pointer));
+  EXPECT_EQ(NULL, data_pointer);
+}
+
+TEST_F(PacketReaderTest, InitializeZeroLengthArgument) {
+  reader_->InitializeReading(packet_data_, 0, kPacketSizeInBytes);
+  ASSERT_EQ(0, reader_->NextPacket(&packet_data_pointer_));
+}
+
+// Test with something smaller than one packet
+TEST_F(PacketReaderTest, NormalSmallData) {
+  const int kDataLengthInBytes = 1499;
+  WebRtc_UWord8 data[kDataLengthInBytes];
+  WebRtc_UWord8* data_pointer = data;
+  memset(data, 1, kDataLengthInBytes);
+
+  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
+  int length_to_read = reader_->NextPacket(&data_pointer);
+  VerifyPacketData(kDataLengthInBytes, length_to_read, data, data_pointer);
+  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&data_pointer);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kDataLengthInBytes, data_pointer - data);
+}
+
+// Test with data length that exactly matches one packet
+TEST_F(PacketReaderTest, NormalOnePacketData) {
+  WebRtc_UWord8 data[kPacketSizeInBytes];
+  WebRtc_UWord8* data_pointer = data;
+  memset(data, 1, kPacketSizeInBytes);
+
+  reader_->InitializeReading(data, kPacketSizeInBytes, kPacketSizeInBytes);
+  int length_to_read = reader_->NextPacket(&data_pointer);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read, data, data_pointer);
+  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&data_pointer);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kPacketSizeInBytes, data_pointer - data);
+}
+
+// Test with data length that will result in 3 packets
+TEST_F(PacketReaderTest, NormalLargeData) {
+  reader_->InitializeReading(packet_data_, kPacketDataLength,
+                             kPacketSizeInBytes);
+
+  int length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read,
+                   packet1_, packet_data_pointer_);
+
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(kPacketSizeInBytes, length_to_read,
+                   packet2_, packet_data_pointer_);
+
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  VerifyPacketData(1u, length_to_read,
+                   packet3_, packet_data_pointer_);
+
+  // Reading another one shall result in 0 bytes:
+  length_to_read = reader_->NextPacket(&packet_data_pointer_);
+  EXPECT_EQ(0, length_to_read);
+  EXPECT_EQ(kPacketDataLength, packet_data_pointer_ - packet_data_);
+}
+
+// Test with empty data.
+TEST_F(PacketReaderTest, EmptyData) {
+  const int kDataLengthInBytes = 0;
+  // But don't really try to allocate a zero-length array...
+  WebRtc_UWord8 data[kPacketSizeInBytes];
+  WebRtc_UWord8* data_pointer = data;
+  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
+  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data_pointer));
+  // Do it again to make sure nothing changes
+  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data_pointer));
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/test/testsupport/unittest_utils.h b/src/test/testsupport/unittest_utils.h
new file mode 100644
index 0000000..30464de
--- /dev/null
+++ b/src/test/testsupport/unittest_utils.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
+#define WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
+
+namespace webrtc {
+namespace test {
+
+const int kPacketSizeInBytes = 1500;
+const int kPacketDataLength = kPacketSizeInBytes * 2 + 1;
+const int kPacketDataNumberOfPackets = 3;
+
+// A base test fixture for packet related tests. Contains
+// two full prepared packets with 1s, 2s in their data and a third packet with
+// a single 3 in it (size=1).
+// A packet data structure is also available, that contains these three packets
+// in order.
+class PacketRelatedTest: public testing::Test {
+ protected:
+  // Tree packet byte arrays with data used for verification:
+  WebRtc_UWord8 packet1_[kPacketSizeInBytes];
+  WebRtc_UWord8 packet2_[kPacketSizeInBytes];
+  WebRtc_UWord8 packet3_[1];
+  // Construct a data structure containing these packets
+  WebRtc_UWord8 packet_data_[kPacketDataLength];
+  WebRtc_UWord8* packet_data_pointer_;
+
+  PacketRelatedTest() {
+    packet_data_pointer_ = packet_data_;
+
+    memset(packet1_, 1, kPacketSizeInBytes);
+    memset(packet2_, 2, kPacketSizeInBytes);
+    memset(packet3_, 3, 1);
+    // Fill the packet_data:
+    memcpy(packet_data_pointer_, packet1_, kPacketSizeInBytes);
+    memcpy(packet_data_pointer_ + kPacketSizeInBytes, packet2_,
+           kPacketSizeInBytes);
+    memcpy(packet_data_pointer_ + kPacketSizeInBytes * 2, packet3_, 1);
+  }
+  virtual ~PacketRelatedTest() {}
+  void SetUp() {}
+  void TearDown() {}
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
diff --git a/src/tools/OWNERS b/src/tools/OWNERS
new file mode 100644
index 0000000..7b4acbb
--- /dev/null
+++ b/src/tools/OWNERS
@@ -0,0 +1,2 @@
+phoglund@webrtc.org
+kjellander@webrtc.org
\ No newline at end of file
diff --git a/src/tools/converter/converter.cc b/src/tools/converter/converter.cc
new file mode 100644
index 0000000..3b5f27b
--- /dev/null
+++ b/src/tools/converter/converter.cc
@@ -0,0 +1,174 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include <stdlib.h>
+#include <sys/stat.h>
+
+#include <cstdio>
+#include <iomanip>
+#include <sstream>
+
+#include "tools/converter/converter.h"
+
+#ifdef WIN32
+#define SEPARATOR '\\'
+#define STAT _stat
+#else
+#define SEPARATOR '/'
+#define STAT stat
+#endif
+
+namespace webrtc {
+namespace test {
+
+Converter::Converter(int width, int height)
+    : width_(width),
+      height_(height) {
+}
+
+bool Converter::ConvertRGBAToI420Video(std::string frames_dir,
+                                       std::string output_file_name,
+                                       bool delete_frames) {
+  FILE* output_file = fopen(output_file_name.c_str(), "wb");
+
+  // Open output file in append mode.
+  if (output_file == NULL) {
+    fprintf(stderr, "Couldn't open input file for reading: %s\n",
+            output_file_name.c_str());
+    return false;
+  }
+
+  int input_frame_size = InputFrameSize();
+  uint8* rgba_buffer = new uint8[input_frame_size];
+  int y_plane_size = YPlaneSize();
+  uint8* dst_y = new uint8[y_plane_size];
+  int u_plane_size = UPlaneSize();
+  uint8* dst_u = new uint8[u_plane_size];
+  int v_plane_size = VPlaneSize();
+  uint8* dst_v = new uint8[v_plane_size];
+
+  int counter = 0;  // Counter to form frame names.
+  bool success = false;  // Is conversion successful.
+
+  while (true) {
+    std::string file_name = FormFrameName(4, counter);
+    // Get full path file name.
+    std::string input_file_name = FindFullFileName(frames_dir, file_name);
+
+    if (FileExists(input_file_name)) {
+      ++counter;  // Update counter for the next round.
+    } else {
+      fprintf(stdout, "Reached end of frames list\n");
+      break;
+    }
+
+    // Read the RGBA frame into rgba_buffer.
+    ReadRGBAFrame(input_file_name.c_str(), input_frame_size, rgba_buffer);
+
+    // Delete the input frame.
+    if (delete_frames) {
+      if (remove(input_file_name.c_str()) != 0) {
+        fprintf(stderr, "Cannot delete file %s\n", input_file_name.c_str());
+      }
+    }
+
+    // Convert to I420 frame.
+    libyuv::ABGRToI420(rgba_buffer, SrcStrideFrame(),
+                       dst_y, DstStrideY(),
+                       dst_u, DstStrideU(),
+                       dst_v, DstStrideV(),
+                       width_, height_);
+
+    // Add the I420 frame to the YUV video file.
+    success = AddYUVToFile(dst_y, y_plane_size, dst_u, u_plane_size,
+                           dst_v, v_plane_size, output_file);
+
+
+    if (!success) {
+      fprintf(stderr, "LibYUV error during RGBA to I420 frame conversion\n");
+      break;
+    }
+  }
+
+  delete[] rgba_buffer;
+  delete[] dst_y;
+  delete[] dst_u;
+  delete[] dst_v;
+
+  fclose(output_file);
+
+  return success;
+}
+
+bool Converter::AddYUVToFile(uint8* y_plane, int y_plane_size,
+                             uint8* u_plane, int u_plane_size,
+                             uint8* v_plane, int v_plane_size,
+                             FILE* output_file) {
+  bool success = AddYUVPlaneToFile(y_plane, y_plane_size, output_file) &&
+                 AddYUVPlaneToFile(u_plane, u_plane_size, output_file) &&
+                 AddYUVPlaneToFile(v_plane, v_plane_size, output_file);
+  return success;
+}
+
+bool Converter::AddYUVPlaneToFile(uint8* yuv_plane, int yuv_plane_size,
+                                  FILE* file) {
+  size_t bytes_written = fwrite(yuv_plane, 1, yuv_plane_size, file);
+
+  if (bytes_written != static_cast<size_t>(yuv_plane_size)) {
+    fprintf(stderr, "Number of bytes written (%d) doesn't match size of y plane"
+            " (%d)\n", static_cast<int>(bytes_written), yuv_plane_size);
+    return false;
+  }
+  return true;
+}
+
+bool Converter::ReadRGBAFrame(const char* input_file_name, int input_frame_size,
+                              unsigned char* buffer) {
+  FILE* input_file = fopen(input_file_name, "rb");
+  if (input_file == NULL) {
+    fprintf(stderr, "Couldn't open input file for reading: %s\n",
+            input_file_name);
+    return false;
+  }
+
+  size_t nbr_read = fread(buffer, 1, input_frame_size, input_file);
+  fclose(input_file);
+
+  if (nbr_read != static_cast<size_t>(input_frame_size)) {
+    fprintf(stderr, "Error reading from input file: %s\n", input_file_name);
+    return false;
+  }
+
+  return true;
+}
+
+std::string Converter::FindFullFileName(std::string dir_name,
+                                        std::string file_name) {
+  return dir_name + SEPARATOR + file_name;
+}
+
+bool Converter:: FileExists(std::string file_name_to_check) {
+  struct STAT file_info;
+  int result = STAT(file_name_to_check.c_str(), &file_info);
+  return (result == 0);
+}
+
+std::string Converter::FormFrameName(int width, int number) {
+  std::stringstream tmp;
+
+  // Zero-pad number to a string.
+  tmp << std::setfill('0') << std::setw(width) << number;
+
+  return "frame_" + tmp.str();
+}
+
+}  // namespace test
+}  // namespace webrtc
+
+
diff --git a/src/tools/converter/converter.h b/src/tools/converter/converter.h
new file mode 100644
index 0000000..1d8afd1
--- /dev/null
+++ b/src/tools/converter/converter.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TOOLS_CONVERTER_CONVERTER_H_
+#define WEBRTC_TOOLS_CONVERTER_CONVERTER_H_
+
+#include <string>
+
+#include "libyuv/convert.h"
+#include "libyuv/compare.h"
+
+namespace webrtc {
+namespace test {
+
+// Handles a conversion between a set of RGBA frames to a YUV (I420) video.
+class Converter {
+ public:
+  Converter(int width, int height);
+
+  // Converts RGBA to YUV video. If the delete_frames argument is true, the
+  // method will delete the input frames after conversion.
+  bool ConvertRGBAToI420Video(std::string frames_dir,
+                              std::string output_file_name, bool delete_frames);
+
+ private:
+  int width_;  // Width of the video (respectively of the RGBA frames).
+  int height_;  // Height of the video (respectively of the RGBA frames).
+
+  // Returns the size of the Y plane in bytes.
+  int YPlaneSize() const {
+    return width_*height_;
+  }
+
+  // Returns the size of the U plane in bytes.
+  int UPlaneSize() const {
+    return ((width_+1)/2)*((height_)/2);
+  }
+
+  // Returns the size of the V plane in bytes.
+  int VPlaneSize() const {
+    return ((width_+1)/2)*((height_)/2);
+  }
+
+  // Returns the number of bytes per row in the RGBA frame.
+  int SrcStrideFrame() const {
+    return width_*4;
+  }
+
+  // Returns the number of bytes in the Y plane.
+  int DstStrideY() const {
+    return width_;
+  }
+
+  // Returns the number of bytes in the U plane.
+  int DstStrideU() const {
+    return (width_+1)/2;
+  }
+
+  // Returns the number of bytes in the V plane.
+  int DstStrideV() const {
+    return (width_+1)/2;
+  }
+
+  // Returns the size in bytes of the input RGBA frames.
+  int InputFrameSize() const {
+    return width_*height_*4;
+  }
+
+  // Writes the Y, U and V (in this order) planes to the file, thus adding a
+  // raw YUV frame to the file.
+  bool AddYUVToFile(uint8* y_plane, int y_plane_size,
+                    uint8* u_plane, int u_plane_size,
+                    uint8* v_plane, int v_plane_size,
+                    FILE* output_file);
+
+  // Adds the Y, U or V plane to the file.
+  bool AddYUVPlaneToFile(uint8* yuv_plane, int yuv_plane_size, FILE* file);
+
+  // Reads a RGBA frame from input_file_name with input_frame_size size in bytes
+  // into the buffer.
+  bool ReadRGBAFrame(const char* input_file_name, int input_frame_size,
+                     unsigned char* buffer);
+
+  // Finds the full path name of the file - concatenates the directory and file
+  // names.
+  std::string FindFullFileName(std::string dir_name, std::string file_name);
+
+  // Checks if a file exists.
+  bool FileExists(std::string file_name_to_check);
+
+  // Returns the name of the file in the form frame_<number>, where <number> is
+    // 4 zero padded (i.e. frame_0000, frame_0001, etc.).
+  std::string FormFrameName(int width, int number);
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TOOLS_CONVERTER_CONVERTER_H_
diff --git a/src/tools/converter/rgba_to_i420_converter.cc b/src/tools/converter/rgba_to_i420_converter.cc
new file mode 100644
index 0000000..ee15f80
--- /dev/null
+++ b/src/tools/converter/rgba_to_i420_converter.cc
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "tools/converter/converter.h"
+#include "tools/simple_command_line_parser.h"
+
+/*
+ * A command-line tool based on libyuv to convert a set of RGBA files to a YUV
+ * video.
+ * Usage:
+ * rgba_to_i420_converter --frames_dir=<directory_to_rgba_frames>
+ * --output_file=<output_yuv_file> --width=<width_of_input_frames>
+ * --height=<height_of_input_frames>
+ */
+int main(int argc, char** argv) {
+  std::string program_name = argv[0];
+  std::string usage = "Converts RGBA raw image files to I420 frames for YUV.\n"
+    "Example usage:\n" + program_name +
+    " --frames_dir=. --output_file=output.yuv --width=320 --height=240\n"
+    "IMPORTANT: If you pass the --delete_frames command line parameter, the "
+    "tool will delete the input frames after conversion.\n"
+    "Command line flags:\n"
+    "  - width(int): Width in pixels of the frames in the input file."
+    " Default: -1\n"
+    "  - height(int): Height in pixels of the frames in the input file."
+    " Default: -1\n"
+    "  - frames_dir(string): The path to the directory where the frames reside."
+    " Default: .\n"
+    "  - output_file(string): The output file to which frames are written."
+    " Default: output.yuv\n"
+    "  - delete_frames(bool): Whether or not to delete the input frames after"
+    " the conversion. Default: false.\n";
+
+  webrtc::test::CommandLineParser parser;
+
+  // Init the parser and set the usage message
+  parser.Init(argc, argv);
+  parser.SetUsageMessage(usage);
+
+  parser.SetFlag("width", "-1");
+  parser.SetFlag("height", "-1");
+  parser.SetFlag("frames_dir", ".");
+  parser.SetFlag("output_file", "output.yuv");
+  parser.SetFlag("delete_frames", "false");
+  parser.SetFlag("help", "false");
+
+  parser.ProcessFlags();
+  if (parser.GetFlag("help") == "true") {
+    parser.PrintUsageMessage();
+  }
+  parser.PrintEnteredFlags();
+
+  int width = strtol((parser.GetFlag("width")).c_str(), NULL, 10);
+  int height = strtol((parser.GetFlag("height")).c_str(), NULL, 10);
+
+  if (width <= 0 || height <= 0) {
+    fprintf(stderr, "Error: width or height cannot be <= 0!\n");
+    return -1;
+  }
+
+  bool del_frames = (parser.GetFlag("delete_frames") == "true") ? true : false;
+
+  webrtc::test::Converter converter(width, height);
+  bool success = converter.ConvertRGBAToI420Video(parser.GetFlag("frames_dir"),
+                                                  parser.GetFlag("output_file"),
+                                                  del_frames);
+
+  if (success) {
+    fprintf(stdout, "Successful conversion of RGBA frames to YUV video!\n");
+    return 0;
+  } else {
+    fprintf(stdout, "Unsuccessful conversion of RGBA frames to YUV video!\n");
+    return -1;
+  }
+}
+
diff --git a/src/tools/frame_analyzer/frame_analyzer.cc b/src/tools/frame_analyzer/frame_analyzer.cc
new file mode 100644
index 0000000..2e9154c
--- /dev/null
+++ b/src/tools/frame_analyzer/frame_analyzer.cc
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+#include <string>
+#include <map>
+#include <vector>
+
+#include "tools/frame_analyzer/video_quality_analysis.h"
+#include "tools/simple_command_line_parser.h"
+
+/*
+ * A command line tool running PSNR and SSIM on a reference video and a test
+ * video. The test video is a record of the reference video which can start at
+ * an arbitrary point. It is possible that there will be repeated frames or
+ * skipped frames as well. In order to have a way to compare corresponding
+ * frames from the two videos, a stats file should be provided. The stats file
+ * is a text file assumed to be in the format:
+ * frame_xxxx yyyy
+ * where xxxx is the frame number in the test video and yyyy is the
+ * corresponding frame number in the original video.
+ * The video files should be 1420 YUV videos.
+ * The tool prints the result to the standard output in the following format:
+ * BSTATS
+ * <psnr_value> <ssim_value>; <psnr_value> <ssim_value>; ....
+ * ESTATS
+ * Unique_frames_count:<value>
+ * Max_repeated:<value>
+ * Max_skipped<value>
+ *
+ * The max value for PSNR is 48.0 (between equal frames), as for SSIM it is 1.0.
+ *
+ * Usage:
+ * frame_analyzer --reference_file=<name_of_file> --test_file=<name_of_file>
+ * --stats_file=<name_of_file> --width=<frame_width> --height=<frame_height>
+ */
+int main(int argc, char** argv) {
+  std::string program_name = argv[0];
+  std::string usage = "Compares the output video with the initially sent video."
+      "\nExample usage:\n" + program_name + " --stats_file=stats.txt "
+      "--reference_file=ref.yuv --test_file=test.yuv --width=320 --height=240\n"
+      "Command line flags:\n"
+      "  - width(int): The width of the reference and test files. Default: -1\n"
+      "  - height(int): The height of the reference and test files. "
+      " Default: -1\n"
+      "  - stats_file(string): The full name of the file containing the stats"
+      " after decoding of the received YUV video. Default: stats.txt\n"
+      "  - reference_file(string): The reference YUV file to compare against."
+      " Default: ref.yuv\n"
+      "  - test_file(string): The test YUV file to run the analysis for."
+      " Default: test_file.yuv\n";
+
+  webrtc::test::CommandLineParser parser;
+
+  // Init the parser and set the usage message
+  parser.Init(argc, argv);
+  parser.SetUsageMessage(usage);
+
+  parser.SetFlag("width", "-1");
+  parser.SetFlag("height", "-1");
+  parser.SetFlag("stats_file", "stats.txt");
+  parser.SetFlag("reference_file", "ref.yuv");
+  parser.SetFlag("test_file", "test.yuv");
+  parser.SetFlag("help", "false");
+
+  parser.ProcessFlags();
+  if (parser.GetFlag("help") == "true") {
+    parser.PrintUsageMessage();
+  }
+  parser.PrintEnteredFlags();
+
+  int width = strtol((parser.GetFlag("width")).c_str(), NULL, 10);
+  int height = strtol((parser.GetFlag("height")).c_str(), NULL, 10);
+
+  if (width <= 0 || height <= 0) {
+    fprintf(stderr, "Error: width or height cannot be <= 0!\n");
+    return -1;
+  }
+
+  webrtc::test::ResultsContainer results;
+
+  webrtc::test::RunAnalysis(parser.GetFlag("reference_file").c_str(),
+                            parser.GetFlag("test_file").c_str(),
+                            parser.GetFlag("stats_file").c_str(), width, height,
+                            &results);
+
+  webrtc::test::PrintAnalysisResults(&results);
+  webrtc::test::PrintMaxRepeatedAndSkippedFrames(
+      parser.GetFlag("stats_file").c_str());
+}
diff --git a/src/tools/frame_analyzer/video_quality_analysis.cc b/src/tools/frame_analyzer/video_quality_analysis.cc
new file mode 100644
index 0000000..8314e91
--- /dev/null
+++ b/src/tools/frame_analyzer/video_quality_analysis.cc
@@ -0,0 +1,287 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tools/frame_analyzer/video_quality_analysis.h"
+
+#include <cassert>
+#include <cstdio>
+#include <cstdlib>
+#include <string>
+
+#define STATS_LINE_LENGTH 32
+
+namespace webrtc {
+namespace test {
+
+int GetI420FrameSize(int width, int height) {
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+
+  int y_plane = width * height;  // I420 Y plane.
+  int u_plane = half_width * half_height;  // I420 U plane.
+  int v_plane = half_width * half_height;  // I420 V plane.
+
+  return y_plane + u_plane + v_plane;
+}
+
+int ExtractFrameSequenceNumber(std::string line) {
+  int space_position = line.find(' ');
+  if (space_position == -1) {
+    return -1;
+  }
+  std::string frame = line.substr(0, space_position);
+
+  int underscore_position = frame.find('_');
+  if (underscore_position == -1) {
+    return -1;
+  }
+  std::string frame_number = frame.substr(underscore_position + 1);
+
+  return strtol(frame_number.c_str(), NULL, 10);
+}
+
+int ExtractDecodedFrameNumber(std::string line) {
+  int space_position = line.find(' ');
+  if (space_position == -1) {
+    return -1;
+  }
+  std::string decoded_number = line.substr(space_position + 1);
+
+  return strtol(decoded_number.c_str(), NULL, 10);
+}
+
+bool IsThereBarcodeError(std::string line) {
+  int barcode_error_position = line.find("Barcode error");
+  if (barcode_error_position != -1) {
+    return true;
+  }
+  return false;
+}
+
+bool GetNextStatsLine(FILE* stats_file, char* line) {
+  int chars = 0;
+  char buf = 0;
+
+  while (buf != '\n') {
+    size_t chars_read = fread(&buf, 1, 1, stats_file);
+    if (chars_read != 1 || feof(stats_file)) {
+      return false;
+    }
+    line[chars] = buf;
+    ++chars;
+  }
+  line[chars-1] = '\0';  // Strip the trailing \n and put end of string.
+  return true;
+}
+
+bool GetNextI420Frame(FILE* input_file, int width, int height,
+                      uint8* result_frame) {
+  int frame_size = GetI420FrameSize(width, height);
+  bool errors = false;
+
+  size_t bytes_read = fread(result_frame, 1, frame_size, input_file);
+  if (bytes_read != static_cast<size_t>(frame_size)) {
+    // If end-of-file is reached, don't print an error.
+    if (feof(input_file)) {
+      return false;
+    }
+    fprintf(stdout, "Error while reading frame from file\n");
+    errors = true;
+  }
+  return !errors;
+}
+
+bool ExtractFrameFromI420(const char* i420_file_name, int width, int height,
+                          int frame_number, uint8* result_frame) {
+  int frame_size = GetI420FrameSize(width, height);
+  int offset = frame_number * frame_size;  // Calculate offset for the frame.
+  bool errors = false;
+
+  FILE* input_file = fopen(i420_file_name, "rb");
+  if (input_file == NULL) {
+    fprintf(stderr, "Couldn't open input file for reading: %s\n",
+            i420_file_name);
+    return false;
+  }
+
+  // Change stream pointer to new offset.
+  fseek(input_file, offset, SEEK_SET);
+
+  size_t bytes_read = fread(result_frame, 1, frame_size, input_file);
+  if (bytes_read != static_cast<size_t>(frame_size) &&
+      ferror(input_file)) {
+    fprintf(stdout, "Error while reading frame no %d from file %s\n",
+            frame_number, i420_file_name);
+    errors = true;
+  }
+  fclose(input_file);
+  return !errors;
+}
+
+double CalculateMetrics(VideoAnalysisMetricsType video_metrics_type,
+                        const uint8* ref_frame,  const uint8* test_frame,
+                        int width, int height) {
+  if (!ref_frame || !test_frame)
+    return -1;
+  else if (height < 0 || width < 0)
+    return -1;
+  int half_width = (width + 1) >> 1;
+  int half_height = (height + 1) >> 1;
+  const uint8* src_y_a = ref_frame;
+  const uint8* src_u_a = src_y_a + width * height;
+  const uint8* src_v_a = src_u_a + half_width * half_height;
+  const uint8* src_y_b = test_frame;
+  const uint8* src_u_b = src_y_b + width * height;
+  const uint8* src_v_b = src_u_b + half_width * half_height;
+
+  int stride_y = width;
+  int stride_uv = half_width;
+
+  double result = 0.0;
+
+  switch (video_metrics_type) {
+    case kPSNR:
+      // In the following: stride is determined by width.
+      result = libyuv::I420Psnr(src_y_a, width, src_u_a, half_width,
+                                src_v_a, half_width, src_y_b, width,
+                                src_u_b, half_width, src_v_b, half_width,
+                                width, height);
+      // LibYuv sets the max psnr value to 128, we restrict it to 48.
+      // In case of 0 mse in one frame, 128 can skew the results significantly.
+      result = (result > 48.0) ? 48.0 : result;
+      break;
+    case kSSIM:
+      result = libyuv::I420Ssim(src_y_a, stride_y, src_u_a, stride_uv,
+                                src_v_a, stride_uv, src_y_b, stride_y,
+                                src_u_b, stride_uv, src_v_b, stride_uv,
+                                width, height);
+      break;
+    default:
+      assert(false);
+  }
+
+  return result;
+}
+
+void RunAnalysis(const char* reference_file_name, const char* test_file_name,
+                 const char* stats_file_name, int width, int height,
+                 ResultsContainer* results) {
+  int size = GetI420FrameSize(width, height);
+  FILE* stats_file = fopen(stats_file_name, "r");
+
+  // String buffer for the lines in the stats file.
+  char line[STATS_LINE_LENGTH];
+
+  // Allocate buffers for test and reference frames.
+  uint8* test_frame = new uint8[size];
+  uint8* reference_frame = new uint8[size];
+  int previous_frame_number = -1;
+
+  // While there are entries in the stats file.
+  while (GetNextStatsLine(stats_file, line)) {
+    int extracted_test_frame = ExtractFrameSequenceNumber(line);
+    int decoded_frame_number = ExtractDecodedFrameNumber(line);
+
+    // If there was problem decoding the barcode in this frame or the frame has
+    // been duplicated, continue.
+    if (IsThereBarcodeError(line) ||
+        decoded_frame_number == previous_frame_number) {
+      continue;
+    }
+
+    assert(extracted_test_frame != -1);
+    assert(decoded_frame_number != -1);
+
+    ExtractFrameFromI420(test_file_name, width, height, extracted_test_frame,
+                         test_frame);
+    ExtractFrameFromI420(reference_file_name, width, height,
+                         decoded_frame_number, reference_frame);
+
+    // Calculate the PSNR and SSIM.
+    double result_psnr = CalculateMetrics(kPSNR, reference_frame, test_frame,
+                                          width, height);
+    double result_ssim = CalculateMetrics(kSSIM, reference_frame, test_frame,
+                                          width, height);
+
+    previous_frame_number = decoded_frame_number;
+
+    // Fill in the result struct.
+    AnalysisResult result;
+    result.frame_number = decoded_frame_number;
+    result.psnr_value = result_psnr;
+    result.ssim_value = result_ssim;
+
+    results->frames.push_back(result);
+  }
+
+  // Cleanup.
+  fclose(stats_file);
+  delete[] test_frame;
+  delete[] reference_frame;
+}
+
+void PrintMaxRepeatedAndSkippedFrames(const char* stats_file_name) {
+  FILE* stats_file = fopen(stats_file_name, "r");
+  char line[STATS_LINE_LENGTH];
+
+  int repeated_frames = 1;
+  int max_repeated_frames = 1;
+  int max_skipped_frames = 1;
+  int previous_frame_number = -1;
+
+  while (GetNextStatsLine(stats_file, line)) {
+    int decoded_frame_number = ExtractDecodedFrameNumber(line);
+
+    if (decoded_frame_number == -1) {
+      continue;
+    }
+
+    // Calculate how many frames a cluster of repeated frames contains.
+    if (decoded_frame_number == previous_frame_number) {
+      ++repeated_frames;
+      if (repeated_frames > max_repeated_frames) {
+        max_repeated_frames = repeated_frames;
+      }
+    } else {
+      repeated_frames = 1;
+    }
+
+    // Calculate how much frames have been skipped.
+    if (decoded_frame_number != 0 && previous_frame_number != -1) {
+      int skipped_frames = decoded_frame_number - previous_frame_number - 1;
+      if (skipped_frames > max_skipped_frames) {
+        max_skipped_frames = skipped_frames;
+      }
+    }
+    previous_frame_number = decoded_frame_number;
+  }
+  fprintf(stdout, "Max_repeated:%d Max_skipped:%d\n", max_repeated_frames,
+          max_skipped_frames);
+}
+
+void PrintAnalysisResults(ResultsContainer* results) {
+  std::vector<AnalysisResult>::iterator iter;
+  int frames_counter = 0;
+
+  fprintf(stdout, "BSTATS\n");
+  for (iter = results->frames.begin(); iter != results->frames.end(); ++iter) {
+    ++frames_counter;
+    fprintf(stdout, "%f %f;", iter->psnr_value, iter->ssim_value);
+  }
+  fprintf(stdout, "ESTATS\n");
+  if (frames_counter > 0) {
+    fprintf(stdout, "Unique_frames_count:%d\n", frames_counter);
+  } else {
+    fprintf(stdout, "Unique_frames_count:undef\n");
+  }
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/tools/frame_analyzer/video_quality_analysis.h b/src/tools/frame_analyzer/video_quality_analysis.h
new file mode 100644
index 0000000..a385b77
--- /dev/null
+++ b/src/tools/frame_analyzer/video_quality_analysis.h
@@ -0,0 +1,95 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TOOLS_FRAME_ANALYZER_VIDEO_QUALITY_ANALYSIS_H_
+#define WEBRTC_TOOLS_FRAME_ANALYZER_VIDEO_QUALITY_ANALYSIS_H_
+
+#include <string>
+#include <vector>
+
+#include "libyuv/convert.h"
+#include "libyuv/compare.h"
+
+namespace webrtc {
+namespace test {
+
+struct AnalysisResult {
+  int frame_number;
+  double psnr_value;
+  double ssim_value;
+};
+
+struct ResultsContainer {
+  std::vector<AnalysisResult> frames;
+};
+
+enum VideoAnalysisMetricsType {kPSNR, kSSIM};
+
+// A function to run the PSNR and SSIM analysis on the test file. The test file
+// comprises the frames that were captured during the quality measurement test.
+// There may be missing or duplicate frames. Also the frames start at a random
+// position in the original video. We should provide a statistics file along
+// with the test video. The stats file contains the connection between the
+// actual frames in the test file and their position in the reference video, so
+// that the analysis could run with the right frames from both videos. The stats
+// file should be in the form 'frame_xxxx yyyy', where xxxx is the consecutive
+// number of the frame in the test video, and yyyy is the equivalent frame in
+// the reference video. The stats file could be produced by
+// tools/barcode_tools/barcode_decoder.py. This script decodes the barcodes
+// integrated in every video and generates the stats file. If three was some
+// problem with the decoding there would be 'Barcode error' instead of yyyy.
+void RunAnalysis(const char* reference_file_name, const char* test_file_name,
+                 const char* stats_file_name, int width, int height,
+                 ResultsContainer* results);
+
+// Compute PSNR or SSIM for an I420 frame (all planes). When we are calculating
+// PSNR values, the max return value (in the case where the test and reference
+// frames are exactly the same) will be 48. In the case of SSIM the max return
+// value will be 1.
+double CalculateMetrics(VideoAnalysisMetricsType video_metrics_type,
+                        const uint8* ref_frame,  const uint8* test_frame,
+                        int width, int height);
+
+// Function to print the result from the analysis.
+void PrintAnalysisResults(ResultsContainer* results);
+
+// Calculates max repeated and skipped frames.
+void PrintMaxRepeatedAndSkippedFrames(const char* stats_file_name);
+
+// Gets the next line from an open stats file.
+bool GetNextStatsLine(FILE* stats_file, char* line);
+
+// Calculates the size of a I420 frame if given the width and height.
+int GetI420FrameSize(int width, int height);
+
+// Extract the sequence of the frame in the video. I.e. if line is
+// frame_0023 0284, we will get 23.
+int ExtractFrameSequenceNumber(std::string line);
+
+// Checks if there is 'Barcode error' for the given line.
+bool IsThereBarcodeError(std::string line);
+
+// Extract the frame number in the reference video. I.e. if line is
+// frame_0023 0284, we will get 284.
+int ExtractDecodedFrameNumber(std::string line);
+
+// Gets the next frame from an open I420 file.
+bool GetNextI420Frame(FILE* input_file, int width, int height,
+                      uint8* result_frame);
+
+// Extracts an I420 frame at position frame_number from the file.
+bool ExtractFrameFromI420(const char* i420_file_name, int width, int height,
+                          int frame_number, uint8* result_frame);
+
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TOOLS_FRAME_ANALYZER_VIDEO_QUALITY_ANALYSIS_H_
diff --git a/src/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc b/src/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
new file mode 100644
index 0000000..00fdf63
--- /dev/null
+++ b/src/tools/psnr_ssim_analyzer/psnr_ssim_analyzer.cc
@@ -0,0 +1,118 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstdio>
+#include <cstdlib>
+#include <map>
+#include <string>
+#include <vector>
+
+#include "tools/frame_analyzer/video_quality_analysis.h"
+#include "tools/simple_command_line_parser.h"
+
+void CompareFiles(const char* reference_file_name, const char* test_file_name,
+                  const char* results_file_name, int width, int height) {
+  FILE* ref_file = fopen(reference_file_name, "rb");
+  FILE* test_file = fopen(test_file_name, "rb");
+  FILE* results_file = fopen(results_file_name, "w");
+
+  int size = webrtc::test::GetI420FrameSize(width, height);
+
+  // Allocate buffers for test and reference frames.
+  uint8* test_frame = new uint8[size];
+  uint8* ref_frame = new uint8[size];
+
+  int frame_counter = 0;
+
+  while (webrtc::test::GetNextI420Frame(ref_file, width, height, ref_frame) &&
+         webrtc::test::GetNextI420Frame(test_file, width, height, test_frame)) {
+    // Calculate the PSNR and SSIM.
+    double result_psnr = webrtc::test::CalculateMetrics(
+        webrtc::test::kPSNR, ref_frame, test_frame, width, height);
+    double result_ssim = webrtc::test::CalculateMetrics(
+        webrtc::test::kSSIM, ref_frame, test_frame, width, height);
+    fprintf(results_file, "Frame: %d, PSNR: %f, SSIM: %f\n", frame_counter,
+            result_psnr, result_ssim);
+    ++frame_counter;
+  }
+  delete[] test_frame;
+  delete[] ref_frame;
+
+  fclose(ref_file);
+  fclose(test_file);
+  fclose(results_file);
+}
+
+/*
+ * A tool running PSNR and SSIM analysis on two videos - a reference video and a
+ * test video. The two videos should be I420 YUV videos.
+ * The tool just runs PSNR and SSIM on the corresponding frames in the test and
+ * the reference videos until either the first or the second video runs out of
+ * frames. The result is written in a results text file in the format:
+ * Frame: <frame_number>, PSNR: <psnr_value>, SSIM: <ssim_value>
+ * Frame: <frame_number>, ........
+ *
+ * The max value for PSNR is 48.0 (between equal frames), as for SSIM it is 1.0.
+ *
+ * Usage:
+ * psnr_ssim_analyzer --reference_file=<name_of_file> --test_file=<name_of_file>
+ * --results_file=<name_of_file> --width=<width_of_frames>
+ * --height=<height_of_frames>
+ */
+int main(int argc, char** argv) {
+  std::string program_name = argv[0];
+  std::string usage = "Runs PSNR and SSIM on two I420 videos and write the"
+      "results in a file.\n"
+      "Example usage:\n" + program_name + " --reference_file=ref.yuv "
+      "--test_file=test.yuv --results_file=results.txt --width=320 "
+      "--height=240\n"
+      "Command line flags:\n"
+      "  - width(int): The width of the reference and test files. Default: -1\n"
+      "  - height(int): The height of the reference and test files. "
+      " Default: -1\n"
+      "  - reference_file(string): The reference YUV file to compare against."
+      " Default: ref.yuv\n"
+      "  - test_file(string): The test YUV file to run the analysis for."
+      " Default: test_file.yuv\n"
+      "  - results_file(string): The full name of the file where the results "
+      "will be written. Default: results.txt\n";
+
+  webrtc::test::CommandLineParser parser;
+
+  // Init the parser and set the usage message
+  parser.Init(argc, argv);
+  parser.SetUsageMessage(usage);
+
+  parser.SetFlag("width", "-1");
+  parser.SetFlag("height", "-1");
+  parser.SetFlag("results_file", "results.txt");
+  parser.SetFlag("reference_file", "ref.yuv");
+  parser.SetFlag("test_file", "test.yuv");
+  parser.SetFlag("results_file", "results.txt");
+  parser.SetFlag("help", "false");
+
+  parser.ProcessFlags();
+  if (parser.GetFlag("help") == "true") {
+    parser.PrintUsageMessage();
+  }
+  parser.PrintEnteredFlags();
+
+  int width = strtol((parser.GetFlag("width")).c_str(), NULL, 10);
+  int height = strtol((parser.GetFlag("height")).c_str(), NULL, 10);
+
+  if (width <= 0 || height <= 0) {
+    fprintf(stderr, "Error: width or height cannot be <= 0!\n");
+    return -1;
+  }
+
+  CompareFiles(parser.GetFlag("reference_file").c_str(),
+               parser.GetFlag("test_file").c_str(),
+               parser.GetFlag("results_file").c_str(), width, height);
+}
diff --git a/src/tools/simple_command_line_parser.cc b/src/tools/simple_command_line_parser.cc
new file mode 100644
index 0000000..0ad5e60
--- /dev/null
+++ b/src/tools/simple_command_line_parser.cc
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "tools/simple_command_line_parser.h"
+
+#include <cstdio>
+#include <cstdlib>
+
+namespace webrtc {
+namespace test {
+
+void CommandLineParser::Init(int argc, char** argv) {
+  args_ = std::vector<std::string> (argv + 1, argv + argc);
+}
+
+bool CommandLineParser::IsStandaloneFlag(std::string flag) {
+  int equal_pos = flag.find("=");
+
+  if (equal_pos < 0) {
+    return true;
+  }
+  return false;
+}
+
+bool CommandLineParser::IsFlagWellFormed(std::string flag) {
+  int dash_pos = flag.find("--");
+  int equal_pos = flag.find("=");
+
+  if (dash_pos != 0) {
+    fprintf(stderr, "Wrong switch format: %s\n", flag.c_str());
+    fprintf(stderr, "Flag doesn't start with --\n");
+    return false;
+  }
+
+  int flag_length = flag.length() - 1;
+
+  // We use 3 here because we assume that the flags are in the format
+  // --flag_name=flag_value, thus -- are at positions 0 and 1 and we should have
+  // at least one symbor for the flag name.
+  if (equal_pos >= 0 && (equal_pos < 3 || equal_pos == flag_length)) {
+    fprintf(stderr, "Wrong switch format: %s\n", flag.c_str());
+    fprintf(stderr, "Wrong placement of =\n");
+    return false;
+  }
+  return true;
+}
+
+std::string CommandLineParser::GetCommandLineFlagName(std::string flag) {
+  int dash_pos = flag.find("--");
+  int equal_pos = flag.find("=");
+
+  if (equal_pos < 0) {
+    return flag.substr(dash_pos+2);
+  } else {
+    return flag.substr(dash_pos+2, equal_pos-2);
+  }
+}
+
+std::string CommandLineParser::GetCommandLineFlagValue(std::string flag) {
+  int equal_pos = flag.find("=");
+
+  return flag.substr(equal_pos+1);
+}
+
+void CommandLineParser::PrintEnteredFlags() {
+  std::map<std::string, std::string>::iterator flag_iter;
+  fprintf(stdout, "You have entered:\n");
+  for (flag_iter = flags_.begin(); flag_iter != flags_.end(); ++flag_iter) {
+    if (flag_iter->first != "help") {
+      fprintf(stdout, "%s=%s, ", flag_iter->first.c_str(),
+              flag_iter->second.c_str());
+    }
+  }
+  fprintf(stdout, "\n");
+}
+
+void CommandLineParser::ProcessFlags() {
+  std::map<std::string, std::string>::iterator flag_iter;
+  std::vector<std::string>::iterator iter;
+  for (iter = args_.begin(); iter != args_.end(); ++iter) {
+    if (!IsFlagWellFormed(*iter)) {
+      // Ignore badly formated flags.
+      continue;
+    }
+    std::string flag_name = GetCommandLineFlagName(*iter);
+    flag_iter = flags_.find(flag_name);
+    if (flag_iter == flags_.end()) {
+      // Ignore unknown flags.
+      fprintf(stdout, "Flag '%s' is not recognized\n", flag_name.c_str());
+      continue;
+    }
+    if (IsStandaloneFlag(*iter)) {
+      flags_[flag_name] = "true";
+    } else {
+      flags_[flag_name] = GetCommandLineFlagValue(*iter);
+    }
+  }
+}
+
+void CommandLineParser::SetUsageMessage(std::string usage_message) {
+  usage_message_ = usage_message;
+}
+
+void CommandLineParser::PrintUsageMessage() {
+  fprintf(stdout, "%s", usage_message_.c_str());
+}
+
+void CommandLineParser::SetFlag(std::string flag_name, std::string flag_value) {
+  flags_[flag_name] = flag_value;
+}
+
+std::string CommandLineParser::GetFlag(std::string flag_name) {
+  std::map<std::string, std::string>::iterator flag_iter;
+  flag_iter = flags_.find(flag_name);
+  // If no such file.
+  if (flag_iter == flags_.end()) {
+    return "";
+  }
+  return flag_iter->second;
+}
+
+}  // namespace test
+}  // namespace webrtc
diff --git a/src/tools/simple_command_line_parser.h b/src/tools/simple_command_line_parser.h
new file mode 100644
index 0000000..31cac65
--- /dev/null
+++ b/src/tools/simple_command_line_parser.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_TOOLS_SIMPLE_COMMAND_LINE_PARSER_H_
+#define WEBRTC_TOOLS_SIMPLE_COMMAND_LINE_PARSER_H_
+
+#include <string>
+#include <map>
+#include <vector>
+
+// This is a very basic command line parsing class. We pass the command line
+// arguments and their number and the class forms a vector out of these. Than we
+// should set up the flags - we provide a name and a string value and map these.
+
+namespace webrtc {
+namespace test {
+
+class CommandLineParser {
+ public:
+  CommandLineParser() {}
+  ~CommandLineParser() {}
+
+  void Init(int argc, char** argv);
+
+  // Prints the entered flags and their values (without --help).
+  void PrintEnteredFlags();
+
+  // Processes the vector of command line arguments and puts the value of each
+  // flag in the corresponding map entry for this flag's name. We don't process
+  // flags which haven't been defined in the map.
+  void ProcessFlags();
+
+  // Sets the usage message to be shown if we pass --help.
+  void SetUsageMessage(std::string usage_message);
+
+  // prints the usage message.
+  void PrintUsageMessage();
+
+  // Set a flag into the map of flag names/values.
+  void SetFlag(std::string flag_name, std::string flag_value);
+
+  // Gets a flag when provided a flag name. Returns "" if the flag is unknown.
+  std::string GetFlag(std::string flag_name);
+
+ private:
+  // The vector of passed command line arguments.
+  std::vector<std::string> args_;
+  // The map of the flag names/values.
+  std::map<std::string, std::string> flags_;
+  // The usage message.
+  std::string usage_message_;
+
+  // Returns whether the passed flag is standalone or not. By standalone we
+  // understand e.g. --standalone (in contrast to --non_standalone=1).
+  bool IsStandaloneFlag(std::string flag);
+
+  // Checks weather the flag is in the format --flag_name=flag_value.
+  bool IsFlagWellFormed(std::string flag);
+
+  // Extracts the flag name from the flag.
+  std::string GetCommandLineFlagName(std::string flag);
+
+  // Extracts the falg value from the flag.
+  std::string GetCommandLineFlagValue(std::string flag);
+};
+
+}  // namespace test
+}  // namespace webrtc
+
+#endif  // WEBRTC_TOOLS_SIMPLE_COMMAND_LINE_PARSER_H_
diff --git a/src/tools/tools.gyp b/src/tools/tools.gyp
new file mode 100644
index 0000000..bff24a5
--- /dev/null
+++ b/src/tools/tools.gyp
@@ -0,0 +1,88 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+  ],
+  'targets': [
+    {
+      'target_name': 'command_line_parser',
+      'type': '<(library)',
+      'include_dirs': [
+        '.',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '.',
+        ],
+      },
+      'sources': [
+        'simple_command_line_parser.h',
+        'simple_command_line_parser.cc',
+      ],
+    }, # command_line_parser
+    {
+      'target_name': 'video_quality_analysis',
+      'type': '<(library)',
+      'dependencies': [
+        '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv',
+      ],
+      'include_dirs': [
+        'frame_analyzer',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'frame_analyzer',
+        ],
+      },
+      'export_dependent_settings': [
+        '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv',
+      ],
+      'sources': [
+        'frame_analyzer/video_quality_analysis.h',
+        'frame_analyzer/video_quality_analysis.cc',
+      ],
+    }, # video_quality_analysis
+    {
+      'target_name': 'frame_analyzer',
+      'type': 'executable',
+      'dependencies': [
+        'command_line_parser',
+        'video_quality_analysis',
+      ],
+      'sources': [
+        'frame_analyzer/frame_analyzer.cc',
+      ],
+    }, # frame_analyzer
+    {
+      'target_name': 'psnr_ssim_analyzer',
+      'type': 'executable',
+      'dependencies': [
+        'command_line_parser',
+        'video_quality_analysis',
+      ],
+      'sources': [
+        'psnr_ssim_analyzer/psnr_ssim_analyzer.cc',
+      ],
+    }, # psnr_ssim_analyzer
+    {
+      'target_name': 'rgba_to_i420_converter',
+      'type': 'executable',
+      'dependencies': [
+        'command_line_parser',
+        '<(DEPTH)/third_party/libyuv/libyuv.gyp:libyuv',
+      ],
+      'sources': [
+        'converter/converter.h',
+        'converter/converter.cc',
+        'converter/rgba_to_i420_converter.cc',
+      ],
+    }, # rgba_to_i420_converter
+  ],
+}
diff --git a/src/typedefs.h b/src/typedefs.h
index 0546262..e8fcd65 100644
--- a/src/typedefs.h
+++ b/src/typedefs.h
@@ -1,5 +1,5 @@
 /*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
@@ -9,63 +9,39 @@
  */
 
 // This file contains platform-specific typedefs and defines.
+// Much of it is derived from Chromium's build/build_config.h.
 
 #ifndef WEBRTC_TYPEDEFS_H_
 #define WEBRTC_TYPEDEFS_H_
 
 // Reserved words definitions
-// TODO(andrew): Look at removing these.
-#define WEBRTC_EXTERN extern
+// TODO(andrew): Remove this.
 #define G_CONST const
-#define WEBRTC_INLINE extern __inline
 
-// Define WebRTC preprocessor identifiers based on the current build platform.
-// TODO(andrew): Clean these up. We can probably remove everything in this
-// block.
-//   - TARGET_MAC_INTEL and TARGET_MAC aren't used anywhere.
-//   - In the few places where TARGET_PC is used, it should be replaced by
-//     something more specific.
-//   - Do we really support PowerPC? Probably not. Remove WEBRTC_MAC_INTEL
-//     from build/common.gypi as well.
-#if defined(WIN32)
-    // Windows & Windows Mobile.
-    #if !defined(WEBRTC_TARGET_PC)
-        #define WEBRTC_TARGET_PC
-    #endif
-#elif defined(__APPLE__)
-    // Mac OS X.
-    #if defined(__LITTLE_ENDIAN__ )
-        #if !defined(WEBRTC_TARGET_MAC_INTEL)
-            #define WEBRTC_TARGET_MAC_INTEL
-        #endif
-    #else
-        #if !defined(WEBRTC_TARGET_MAC)
-            #define WEBRTC_TARGET_MAC
-        #endif
-    #endif
-#else
-    // Linux etc.
-    #if !defined(WEBRTC_TARGET_PC)
-        #define WEBRTC_TARGET_PC
-    #endif
+// For access to standard POSIXish features, use WEBRTC_POSIX instead of a
+// more specific macro.
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || \
+    defined(WEBRTC_ANDROID)
+#define WEBRTC_POSIX
 #endif
 
-// Derived from Chromium's build/build_config.h
 // Processor architecture detection.  For more info on what's defined, see:
 //   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
 //   http://www.agner.org/optimize/calling_conventions.pdf
 //   or with gcc, run: "echo | gcc -E -dM -"
-// TODO(andrew): replace WEBRTC_LITTLE_ENDIAN with WEBRTC_ARCH_LITTLE_ENDIAN?
+// TODO(andrew): replace WEBRTC_LITTLE_ENDIAN with WEBRTC_ARCH_LITTLE_ENDIAN.
 #if defined(_M_X64) || defined(__x86_64__)
 #define WEBRTC_ARCH_X86_FAMILY
 #define WEBRTC_ARCH_X86_64
 #define WEBRTC_ARCH_64_BITS
 #define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
 #elif defined(_M_IX86) || defined(__i386__)
 #define WEBRTC_ARCH_X86_FAMILY
 #define WEBRTC_ARCH_X86
 #define WEBRTC_ARCH_32_BITS
 #define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
 #elif defined(__ARMEL__)
 // TODO(andrew): We'd prefer to control platform defines here, but this is
 // currently provided by the Android makefiles. Commented to avoid duplicate
@@ -76,73 +52,41 @@
 //#define WEBRTC_ARCH_ARMEL
 #define WEBRTC_ARCH_32_BITS
 #define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
+#elif defined(__MIPSEL__)
+#define WEBRTC_ARCH_32_BITS
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
 #elif defined(__aarch64__)
 #define WEBRTC_ARCH_64_BITS
 #define WEBRTC_ARCH_LITTLE_ENDIAN
-#elif defined(__mips__)
-#define WEBRTC_ARCH_32_BITS
-#define WEBRTC_ARCH_LITTLE_ENDIAN
 #else
 #error Please add support for your architecture in typedefs.h
 #endif
 
-#if defined(__SSE2__) || defined(_MSC_VER)
-#define WEBRTC_USE_SSE2
-#endif
-
-#if defined(WEBRTC_TARGET_PC)
-
 #if !defined(_MSC_VER)
-  #include <stdint.h>
+#include <stdint.h>
 #else
-    // Define C99 equivalent types.
-    // Since MSVC doesn't include these headers, we have to write our own
-    // version to provide a compatibility layer between MSVC and the WebRTC
-    // headers.
-    typedef signed char         int8_t;
-    typedef signed short        int16_t;
-    typedef signed int          int32_t;
-    typedef signed long long    int64_t;
-    typedef unsigned char       uint8_t;
-    typedef unsigned short      uint16_t;
-    typedef unsigned int        uint32_t;
-    typedef unsigned long long  uint64_t;
+// Define C99 equivalent types, since MSVC doesn't provide stdint.h.
+typedef signed char         int8_t;
+typedef signed short        int16_t;
+typedef signed int          int32_t;
+typedef __int64             int64_t;
+typedef unsigned char       uint8_t;
+typedef unsigned short      uint16_t;
+typedef unsigned int        uint32_t;
+typedef unsigned __int64    uint64_t;
 #endif
 
-#if defined(WIN32)
-    typedef __int64             WebRtc_Word64;
-    typedef unsigned __int64    WebRtc_UWord64;
-#else
-    typedef int64_t             WebRtc_Word64;
-    typedef uint64_t            WebRtc_UWord64;
-#endif
-    typedef int32_t             WebRtc_Word32;
-    typedef uint32_t            WebRtc_UWord32;
-    typedef int16_t             WebRtc_Word16;
-    typedef uint16_t            WebRtc_UWord16;
-    typedef char                WebRtc_Word8;
-    typedef uint8_t             WebRtc_UWord8;
-
-    // Define endian for the platform
-    #define WEBRTC_LITTLE_ENDIAN
-
-#elif defined(WEBRTC_TARGET_MAC_INTEL)
-    #include <stdint.h>
-
-    typedef int64_t             WebRtc_Word64;
-    typedef uint64_t            WebRtc_UWord64;
-    typedef int32_t             WebRtc_Word32;
-    typedef uint32_t            WebRtc_UWord32;
-    typedef int16_t             WebRtc_Word16;
-    typedef char                WebRtc_Word8;
-    typedef uint16_t            WebRtc_UWord16;
-    typedef uint8_t             WebRtc_UWord8;
-
-    // Define endian for the platform
-    #define WEBRTC_LITTLE_ENDIAN
-
-#else
-    #error "No platform defined for WebRTC type definitions (typedefs.h)"
-#endif
+// TODO(andrew): remove WebRtc_ types:
+// http://code.google.com/p/webrtc/issues/detail?id=314
+typedef int8_t              WebRtc_Word8;
+typedef int16_t             WebRtc_Word16;
+typedef int32_t             WebRtc_Word32;
+typedef int64_t             WebRtc_Word64;
+typedef uint8_t             WebRtc_UWord8;
+typedef uint16_t            WebRtc_UWord16;
+typedef uint32_t            WebRtc_UWord32;
+typedef uint64_t            WebRtc_UWord64;
 
 #endif  // WEBRTC_TYPEDEFS_H_
diff --git a/src/video_engine/OWNERS b/src/video_engine/OWNERS
new file mode 100644
index 0000000..ac607bd
--- /dev/null
+++ b/src/video_engine/OWNERS
@@ -0,0 +1,4 @@
+mflodman@webrtc.org
+perkj@webrtc.org
+wu@webrtc.org
+mallinath@webrtc.org
diff --git a/src/video_engine/include/vie_base.h b/src/video_engine/include/vie_base.h
new file mode 100644
index 0000000..5c7759d
--- /dev/null
+++ b/src/video_engine/include/vie_base.h
@@ -0,0 +1,149 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Creating and deleting VideoEngine instances.
+//  - Creating and deleting channels.
+//  - Connect a video channel with a corresponding voice channel for audio/video
+//    synchronization.
+//  - Start and stop sending and receiving.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_BASE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// Class used for all callbacks from ViEBase.
+class WEBRTC_DLLEXPORT ViEBaseObserver {
+ public:
+  // This method will be called periodically if the average system CPU usage
+  // exceeds 75%.
+  virtual void PerformanceAlarm(const unsigned int cpu_load) = 0;
+
+ protected:
+  virtual ~ViEBaseObserver() {}
+};
+
+class WEBRTC_DLLEXPORT VideoEngine {
+ public:
+  // Creates a VideoEngine object, which can then be used to acquire sub‐APIs.
+  static VideoEngine* Create();
+
+  // Deletes a VideoEngine instance.
+  static bool Delete(VideoEngine*& video_engine);
+
+  // Specifies the amount and type of trace information, which will be created
+  // by the VideoEngine.
+  static int SetTraceFilter(const unsigned int filter);
+
+  // Sets the name of the trace file and enables non‐encrypted trace messages.
+  static int SetTraceFile(const char* file_nameUTF8,
+                          const bool add_file_counter = false);
+
+  // Installs the TraceCallback implementation to ensure that the VideoEngine
+  // user receives callbacks for generated trace messages.
+  static int SetTraceCallback(TraceCallback* callback);
+
+  // Android specific.
+  // Provides VideoEngine with pointers to objects supplied by the Java
+  // applications JNI interface.
+  static int SetAndroidObjects(void* java_vm, void* java_context);
+
+ protected:
+  VideoEngine() {}
+  virtual ~VideoEngine() {}
+};
+
+class WEBRTC_DLLEXPORT ViEBase {
+ public:
+  // Factory for the ViEBase sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEBase* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEBase sub-API and decreases an internal reference counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Initiates all common parts of the VideoEngine.
+  virtual int Init() = 0;
+
+  // Connects a VideoEngine instance to a VoiceEngine instance for audio video
+  // synchronization.
+  virtual int SetVoiceEngine(VoiceEngine* voice_engine) = 0;
+
+  // Creates a new channel.
+  virtual int CreateChannel(int& video_channel) = 0;
+
+  // Creates a new channel grouped together with |original_channel|. The channel
+  // can both send and receive video. It is assumed the channel is sending
+  // and/or receiving video to the same end-point.
+  // Note: |CreateReceiveChannel| will give better performance and network
+  // properties for receive only channels.
+  virtual int CreateChannel(int& video_channel,
+                            int original_channel) = 0;
+
+  // Creates a new channel grouped together with |original_channel|. The channel
+  // can only receive video and it is assumed the remote end-point is the same
+  // as for |original_channel|.
+  virtual int CreateReceiveChannel(int& video_channel,
+                                   int original_channel) = 0;
+
+  // Deletes an existing channel and releases the utilized resources.
+  virtual int DeleteChannel(const int video_channel) = 0;
+
+  // Specifies the VoiceEngine and VideoEngine channel pair to use for
+  // audio/video synchronization.
+  virtual int ConnectAudioChannel(const int video_channel,
+                                  const int audio_channel) = 0;
+
+  // Disconnects a previously paired VideoEngine and VoiceEngine channel pair.
+  virtual int DisconnectAudioChannel(const int video_channel) = 0;
+
+  // Starts sending packets to an already specified IP address and port number
+  // for a specified channel.
+  virtual int StartSend(const int video_channel) = 0;
+
+  // Stops packets from being sent for a specified channel.
+  virtual int StopSend(const int video_channel) = 0;
+
+  // Prepares VideoEngine for receiving packets on the specified channel.
+  virtual int StartReceive(const int video_channel) = 0;
+
+  // Stops receiving incoming RTP and RTCP packets on the specified channel.
+  virtual int StopReceive(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViEBase
+  // observer.
+  virtual int RegisterObserver(ViEBaseObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEBaseObserver.
+  virtual int DeregisterObserver() = 0;
+
+  // Retrieves the version information for VideoEngine and its components.
+  virtual int GetVersion(char version[1024]) = 0;
+
+  // Returns the last VideoEngine error code.
+  virtual int LastError() = 0;
+
+ protected:
+  ViEBase() {}
+  virtual ~ViEBase() {}
+};
+
+}  // namespace webrtc
+
+#endif  // #define WEBRTC_VIDEO_ENGINE_MAIN_INTERFACE_VIE_BASE_H_
diff --git a/src/video_engine/include/vie_capture.h b/src/video_engine/include/vie_capture.h
new file mode 100644
index 0000000..45d62e5
--- /dev/null
+++ b/src/video_engine/include/vie_capture.h
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Allocating capture devices.
+//  - Connect a capture device with one or more channels.
+//  - Start and stop capture devices.
+//  - Getting capture device capabilities.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class VideoCaptureModule;
+
+// This structure describes one set of the supported capabilities for a capture
+// device.
+struct CaptureCapability {
+  unsigned int width;
+  unsigned int height;
+  unsigned int maxFPS;
+  RawVideoType rawType;
+  VideoCodecType codecType;
+  unsigned int expectedCaptureDelay;
+  bool interlaced;
+  CaptureCapability() {
+    width = 0;
+    height = 0;
+    maxFPS = 0;
+    rawType = kVideoI420;
+    codecType = kVideoCodecUnknown;
+    expectedCaptureDelay = 0;
+    interlaced = false;
+  }
+};
+
+// This enumerator tells the current brightness alarm mode.
+enum Brightness {
+  Normal = 0,
+  Bright = 1,
+  Dark = 2
+};
+
+// This enumerator describes the capture alarm mode.
+enum CaptureAlarm {
+  AlarmRaised = 0,
+  AlarmCleared = 1
+};
+
+enum RotateCapturedFrame {
+  RotateCapturedFrame_0 = 0,
+  RotateCapturedFrame_90 = 90,
+  RotateCapturedFrame_180 = 180,
+  RotateCapturedFrame_270 = 270
+};
+
+struct ViEVideoFrameI420 {
+  ViEVideoFrameI420() {
+    y_plane = NULL;
+    u_plane = NULL;
+    v_plane = NULL;
+    y_pitch = 0;
+    u_pitch = 0;
+    v_pitch = 0;
+    width = 0;
+    height = 0;
+  }
+
+  unsigned char* y_plane;
+  unsigned char* u_plane;
+  unsigned char* v_plane;
+
+  int y_pitch;
+  int u_pitch;
+  int v_pitch;
+
+  unsigned short width;
+  unsigned short height;
+};
+
+// This class declares an abstract interface to be used when implementing
+// a user-defined capture device. This interface is not meant to be
+// implemented by the user. Instead, the user should call AllocateCaptureDevice
+// in the ViECapture interface, which will create a suitable implementation.
+// The user should then call IncomingFrame in this interface to deliver
+// captured frames to the system.
+class WEBRTC_DLLEXPORT ViEExternalCapture {
+ public:
+  ViEExternalCapture() {}
+  virtual ~ViEExternalCapture() {}
+
+  // This method is called by the user to deliver a new captured frame to
+  // VideoEngine.
+  virtual int IncomingFrame(unsigned char* video_frame,
+                            unsigned int video_frame_length,
+                            unsigned short width,
+                            unsigned short height,
+                            RawVideoType video_type,
+                            unsigned long long capture_time = 0) = 0;
+
+  // This method is specifically for delivering a new captured I420 frame to
+  // VideoEngine.
+  virtual int IncomingFrameI420(
+      const ViEVideoFrameI420& video_frame,
+      unsigned long long capture_time = 0) = 0;
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViECaptureObserver {
+ public:
+  // This method is called if a bright or dark captured image is detected.
+  virtual void BrightnessAlarm(const int capture_id,
+                               const Brightness brightness) = 0;
+
+  // This method is called periodically telling the capture device frame rate.
+  virtual void CapturedFrameRate(const int capture_id,
+                                 const unsigned char frame_rate) = 0;
+
+  // This method is called if the capture device stops delivering images to
+  // VideoEngine.
+  virtual void NoPictureAlarm(const int capture_id,
+                              const CaptureAlarm alarm) = 0;
+
+ protected:
+  virtual ~ViECaptureObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViECapture {
+ public:
+  // Factory for the ViECapture sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViECapture* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViECapture sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Gets the number of available capture devices.
+  virtual int NumberOfCaptureDevices() = 0;
+
+  // Gets the name and unique id of a capture device.
+  virtual int GetCaptureDevice(unsigned int list_number,
+                               char* device_nameUTF8,
+                               const unsigned int device_nameUTF8Length,
+                               char* unique_idUTF8,
+                               const unsigned int unique_idUTF8Length) = 0;
+
+  // Allocates a capture device to be used in VideoEngine.
+  virtual int AllocateCaptureDevice(const char* unique_idUTF8,
+                                    const unsigned int unique_idUTF8Length,
+                                    int& capture_id) = 0;
+
+  // Registers an external capture device to be used in VideoEngine
+  virtual int AllocateExternalCaptureDevice(
+      int& capture_id,
+      ViEExternalCapture *&external_capture) = 0;
+
+  // Use capture device using external capture module.
+  virtual int AllocateCaptureDevice(VideoCaptureModule& capture_module,
+                                    int& capture_id) = 0;
+
+  // Releases a capture device and makes it available for other applications.
+  virtual int ReleaseCaptureDevice(const int capture_id) = 0;
+
+  // This function connects a capture device with a channel. Multiple channels
+  // can be connected to the same capture device.
+  virtual int ConnectCaptureDevice(const int capture_id,
+                                   const int video_channel) = 0;
+
+  // Disconnects a capture device as input for a specified channel.
+  virtual int DisconnectCaptureDevice(const int video_channel) = 0;
+
+  // Makes a capture device start capturing video frames.
+  virtual int StartCapture(
+      const int capture_id,
+      const CaptureCapability& capture_capability = CaptureCapability()) = 0;
+
+  // Stops a started capture device from capturing video frames.
+  virtual int StopCapture(const int capture_id) = 0;
+
+  // Rotates captured frames before encoding and sending.
+  // Used on mobile devices with rotates cameras.
+  virtual int SetRotateCapturedFrames(const int capture_id,
+                                      const RotateCapturedFrame rotation) = 0;
+
+  // This function sets the expected delay from when a video frame is captured
+  // to when that frame is delivered to VideoEngine.
+  virtual int SetCaptureDelay(const int capture_id,
+                              const unsigned int capture_delay_ms) = 0;
+
+  // Returns the number of sets of capture capabilities the capture device
+  // supports.
+  virtual int NumberOfCapabilities(
+      const char* unique_id_utf8,
+      const unsigned int unique_id_utf8_length) = 0;
+
+  // Gets a set of capture capabilities for a specified capture device.
+  virtual int GetCaptureCapability(const char* unique_id_utf8,
+                                   const unsigned int unique_id_utf8_length,
+                                   const unsigned int capability_number,
+                                   CaptureCapability& capability) = 0;
+
+  // Displays the capture device property dialog box for the specified capture
+  // device. Windows only.
+  virtual int ShowCaptureSettingsDialogBox(
+      const char* unique_idUTF8,
+      const unsigned int unique_id_utf8_length,
+      const char* dialog_title,
+      void* parent_window = NULL,
+      const unsigned int x = 200,
+      const unsigned int y = 200) = 0;
+
+  // Gets the clockwise angle the frames from the camera must be rotated in
+  // order to display the frames correctly if the display is rotated in its
+  // natural orientation.
+  virtual int GetOrientation(const char* unique_id_utf8,
+                             RotateCapturedFrame& orientation) = 0;
+
+  // Enables brightness alarm detection and the brightness alarm callback.
+  virtual int EnableBrightnessAlarm(const int capture_id,
+                                    const bool enable) = 0;
+
+  // Registers an instance of a user implementation of the ViECaptureObserver.
+  virtual int RegisterObserver(const int capture_id,
+                               ViECaptureObserver& observer) = 0;
+
+  // Removes an already registered instance of ViECaptureObserver.
+  virtual int DeregisterObserver(const int capture_id) = 0;
+
+ protected:
+  ViECapture() {}
+  virtual ~ViECapture() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CAPTURE_H_
diff --git a/src/video_engine/include/vie_codec.h b/src/video_engine/include/vie_codec.h
new file mode 100644
index 0000000..0535038
--- /dev/null
+++ b/src/video_engine/include/vie_codec.h
@@ -0,0 +1,176 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Setting send and receive codecs.
+//  - Codec specific settings.
+//  - Key frame signaling.
+//  - Stream management settings.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+struct VideoCodec;
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterEncoderObserver()
+// and deregistered using DeregisterEncoderObserver().
+class WEBRTC_DLLEXPORT ViEEncoderObserver {
+ public:
+  // This method is called once per second with the current encoded frame rate
+  // and bit rate.
+  virtual void OutgoingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) = 0;
+ protected:
+  virtual ~ViEEncoderObserver() {}
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterDecoderObserver()
+// and deregistered using DeregisterDecoderObserver().
+class WEBRTC_DLLEXPORT ViEDecoderObserver {
+ public:
+  // This method is called when a new incoming stream is detected, normally
+  // triggered by a new incoming SSRC or payload type.
+  virtual void IncomingCodecChanged(const int video_channel,
+                                    const VideoCodec& video_codec) = 0;
+
+  // This method is called once per second containing the frame rate and bit
+  // rate for the incoming stream
+  virtual void IncomingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) = 0;
+
+  // This method is called when the decoder needs a new key frame from encoder
+  // on the sender.
+  virtual void RequestNewKeyFrame(const int video_channel) = 0;
+
+ protected:
+  virtual ~ViEDecoderObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViECodec {
+ public:
+  // Factory for the ViECodec sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViECodec* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViECodec sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Gets the number of available codecs for the VideoEngine build.
+  virtual int NumberOfCodecs() const = 0;
+
+  // Gets a VideoCodec struct for a codec containing the default configuration
+  // for that codec type.
+  virtual int GetCodec(const unsigned char list_number,
+                       VideoCodec& video_codec) const = 0;
+
+  // Sets the send codec to use for a specified channel.
+  virtual int SetSendCodec(const int video_channel,
+                           const VideoCodec& video_codec) = 0;
+
+  // Gets the current send codec settings.
+  virtual int GetSendCodec(const int video_channel,
+                           VideoCodec& video_codec) const = 0;
+
+  // Prepares VideoEngine to receive a certain codec type and setting for a
+  // specified payload type.
+  virtual int SetReceiveCodec(const int video_channel,
+                              const VideoCodec& video_codec) = 0;
+
+  // Gets the current receive codec.
+  virtual int GetReceiveCodec(const int video_channel,
+                              VideoCodec& video_codec) const = 0;
+
+  // This function is used to get codec configuration parameters to be
+  // signaled from the encoder to the decoder in the call setup.
+  virtual int GetCodecConfigParameters(
+      const int video_channel,
+      unsigned char config_parameters[kConfigParameterSize],
+      unsigned char& config_parameters_size) const = 0;
+
+  // Enables advanced scaling of the captured video stream if the stream
+  // differs from the send codec settings.
+  virtual int SetImageScaleStatus(const int video_channel,
+                                  const bool enable) = 0;
+
+  // Gets the number of sent key frames and number of sent delta frames.
+  virtual int GetSendCodecStastistics(const int video_channel,
+                                      unsigned int& key_frames,
+                                      unsigned int& delta_frames) const = 0;
+
+  // Gets the number of decoded key frames and number of decoded delta frames.
+  virtual int GetReceiveCodecStastistics(const int video_channel,
+                                         unsigned int& key_frames,
+                                         unsigned int& delta_frames) const = 0;
+
+  // Gets the bitrate targeted by the video codec rate control in kbit/s.
+  virtual int GetCodecTargetBitrate(const int video_channel,
+                                    unsigned int* bitrate) const = 0;
+
+  // Gets the number of packets discarded by the jitter buffer because they
+  // arrived too late.
+  virtual unsigned int GetDiscardedPackets(const int video_channel) const = 0;
+
+  // Enables key frame request callback in ViEDecoderObserver.
+  virtual int SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                               const bool enable) = 0;
+
+  // Enables key frame requests for detected lost packets.
+  virtual int SetSignalKeyPacketLossStatus(
+      const int video_channel,
+      const bool enable,
+      const bool only_key_frames = false) = 0;
+
+  // Registers an instance of a user implementation of the ViEEncoderObserver.
+  virtual int RegisterEncoderObserver(const int video_channel,
+                                      ViEEncoderObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEEncoderObserver.
+  virtual int DeregisterEncoderObserver(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViEDecoderObserver.
+  virtual int RegisterDecoderObserver(const int video_channel,
+                                      ViEDecoderObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEDecoderObserver.
+  virtual int DeregisterDecoderObserver(const int video_channel) = 0;
+
+  // This function forces the next encoded frame to be a key frame. This is
+  // normally used when the remote endpoint only supports out‐band key frame
+  // request.
+  virtual int SendKeyFrame(const int video_channel) = 0;
+
+  // This function makes the decoder wait for a key frame before starting to
+  // decode the incoming video stream.
+  virtual int WaitForFirstKeyFrame(const int video_channel,
+                                   const bool wait) = 0;
+
+ protected:
+  ViECodec() {}
+  virtual ~ViECodec() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_CODEC_H_
diff --git a/src/video_engine/include/vie_encryption.h b/src/video_engine/include/vie_encryption.h
new file mode 100644
index 0000000..8e7c955
--- /dev/null
+++ b/src/video_engine/include/vie_encryption.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - External encryption and decryption.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+class VideoEngine;
+
+class WEBRTC_DLLEXPORT ViEEncryption {
+ public:
+  // Factory for the ViEEncryption sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEEncryption* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEEncryption sub-API and decreases an internal reference
+  // counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function registers a encryption derived instance and enables
+  // external encryption for the specified channel.
+  virtual int RegisterExternalEncryption(const int video_channel,
+                                         Encryption& encryption) = 0;
+
+  // This function deregisters a registered encryption derived instance
+  // and disables external encryption.
+  virtual int DeregisterExternalEncryption(const int video_channel) = 0;
+
+ protected:
+  ViEEncryption() {}
+  virtual ~ViEEncryption() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ENCRYPTION_H_
diff --git a/src/video_engine/include/vie_errors.h b/src/video_engine/include/vie_errors.h
new file mode 100644
index 0000000..16c9299
--- /dev/null
+++ b/src/video_engine/include/vie_errors.h
@@ -0,0 +1,121 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
+
+enum ViEErrors {
+  // ViEBase.
+  kViENotInitialized = 12000,        // Init has not been called successfully.
+  kViEBaseVoEFailure,                // SetVoiceEngine. ViE failed to use VE instance. Check VE instance pointer.ConnectAudioChannel failed to set voice channel. Have SetVoiceEngine been called? Is the voice channel correct.
+  kViEBaseChannelCreationFailed,     // CreateChannel.
+  kViEBaseInvalidChannelId,          // The channel does not exist.
+  kViEAPIDoesNotExist,               // Release called on Interface that has not been created.
+  kViEBaseInvalidArgument,
+  kViEBaseAlreadySending,            // StartSend called on channel that is already sending.
+  kViEBaseNotSending,                // StopSend called on channel that is not sending.
+  kViEBaseReceiveOnlyChannel,        // Can't send on a receive only channel.
+  kViEBaseAlreadyReceiving,          // StartReceive called on channel that is already receiving.
+  kViEBaseObserverAlreadyRegistered,  // RegisterObserver- an observer has already been set.
+  kViEBaseObserverNotRegistered,     // DeregisterObserver - no observer has been registered.
+  kViEBaseUnknownError,              // An unknown error has occurred. Check the log file.
+
+  // ViECodec.
+  kViECodecInvalidArgument  = 12100,    // Wrong input parameter to function.
+  kViECodecObserverAlreadyRegistered,   // RegisterEncoderObserver, RegisterDecoderObserver.
+  kViECodecObserverNotRegistered,       // DeregisterEncoderObserver, DeregisterDecoderObserver.
+  kViECodecInvalidCodec,                // SetSendCodec,SetReceiveCodec- The codec structure is invalid.
+  kViECodecInvalidChannelId,            // The channel does not exist.
+  kViECodecInUse,                       // SetSendCodec- Can't change codec size or type when multiple channels use the same encoder.
+  kViECodecReceiveOnlyChannel,          // SetSendCodec, can't change receive only channel.
+  kViECodecUnknownError,                // An unknown error has occurred. Check the log file.
+
+  // ViERender.
+  kViERenderInvalidRenderId = 12200,  // No renderer with the ID exist. In AddRenderer - The render ID is invalid. No capture device, channel or file is allocated with that id.
+  kViERenderAlreadyExists,            // AddRenderer: the renderer already exist.
+  kViERenderInvalidFrameFormat,       // AddRender (external renderer). The user has requested a frame format that we don't support.
+  kViERenderUnknownError,             // An unknown error has occurred. Check the log file.
+
+  // ViECapture.
+  kViECaptureDeviceAlreadyConnected = 12300,  // ConnectCaptureDevice - A capture device has already been connected to this video channel.
+  kViECaptureDeviceDoesNotExist,              // No capture device exist with the provided capture id or unique name.
+  kViECaptureDeviceInvalidChannelId,          // ConnectCaptureDevice, DisconnectCaptureDevice- No Channel exist with the provided channel id.
+  kViECaptureDeviceNotConnected,              // DisconnectCaptureDevice- No capture device is connected to the channel.
+  kViECaptureDeviceNotStarted,                // Stop- The capture device is not started.
+  kViECaptureDeviceAlreadyStarted,            // Start- The capture device is already started.
+  kViECaptureDeviceAlreadyAllocated,          // AllocateCaptureDevice The device is already allocated.
+  kViECaptureDeviceMaxNoDevicesAllocated,     // AllocateCaptureDevice Max number of devices already allocated.
+  kViECaptureObserverAlreadyRegistered,       // RegisterObserver- An observer is already registered. Need to deregister first.
+  kViECaptureDeviceObserverNotRegistered,     // DeregisterObserver- No observer is registered.
+  kViECaptureDeviceUnknownError,              // An unknown error has occurred. Check the log file.
+  kViECaptureDeviceMacQtkitNotSupported,      // QTKit handles the capture devices automatically. Thus querying capture capabilities is not supported.
+
+  // ViEFile.
+  kViEFileInvalidChannelId  = 12400,  // No Channel exist with the provided channel id.
+  kViEFileInvalidArgument,            // Incorrect input argument
+  kViEFileAlreadyRecording,           // StartRecordOutgoingVideo - already recording channel
+  kViEFileVoENotSet,                  // StartRecordOutgoingVideo. Failed to access voice engine. Has SetVoiceEngine been called?
+  kViEFileNotRecording,               // StopRecordOutgoingVideo
+  kViEFileMaxNoOfFilesOpened,         // StartPlayFile
+  kViEFileNotPlaying,                 // StopPlayFile. The file with the provided id is not playing.
+  kViEFileObserverAlreadyRegistered,  // RegisterObserver
+  kViEFileObserverNotRegistered,      // DeregisterObserver
+  kViEFileInputAlreadyConnected,      // SendFileOnChannel- the video channel already have a connected input.
+  kViEFileNotConnected,               // StopSendFileOnChannel- No file is being sent on the channel.
+  kViEFileVoEFailure,                 // SendFileOnChannel,StartPlayAudioLocally - failed to play audio stream
+  kViEFileInvalidRenderId,            // SetRenderTimeoutImage and SetRenderStartImage: Renderer with the provided render id does not exist.
+  kViEFileInvalidFile,                // Can't open the file with provided filename. Is the path and file format correct?
+  kViEFileInvalidCapture,             // Can't use ViEPicture. Is the object correct?
+  kViEFileSetRenderTimeoutError,      // SetRenderTimeoutImage- Please see log file.
+  kViEFileInvalidCaptureId,           // SetCaptureDeviceImage capture id does not exist.
+  kViEFileSetCaptureImageError,       // SetCaptureDeviceImage error. Please see log file.
+  kViEFileSetStartImageError,         // SetRenderStartImage error. Please see log file.
+  kViEFileUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViENetwork.
+  kViENetworkInvalidChannelId = 12500,   // No Channel exist with the provided channel id.
+  kViENetworkAlreadyReceiving,           // SetLocalReceiver: Can not change ports while receiving.
+  kViENetworkLocalReceiverNotSet,        // GetLocalReceiver: SetLocalReceiver not called.
+  kViENetworkAlreadySending,             // SetSendDestination
+  kViENetworkDestinationNotSet,          // GetSendDestination
+  kViENetworkInvalidArgument,            // GetLocalIP- Check function  arguments.
+  kViENetworkSendCodecNotSet,            // SetSendGQoS- Need to set the send codec first.
+  kViENetworkServiceTypeNotSupported,    // SetSendGQoS
+  kViENetworkNotSupported,               // SetSendGQoS Not supported on this OS.
+  kViENetworkObserverAlreadyRegistered,  // RegisterObserver
+  kViENetworkObserverNotRegistered,      // SetPeriodicDeadOrAliveStatus - Need to call RegisterObserver first, DeregisterObserver if no observer is registered.
+  kViENetworkUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViERTP_RTCP.
+  kViERtpRtcpInvalidChannelId = 12600,   // No Channel exist with the provided channel id.
+  kViERtpRtcpAlreadySending,             // The channel is already sending. Need to stop send before calling this API.
+  kViERtpRtcpNotSending,                 // The channel needs to be sending in order for this function to work.
+  kViERtpRtcpRtcpDisabled,               // Functions failed because RTCP is disabled.
+  kViERtpRtcpObserverAlreadyRegistered,  // An observer is already registered. Need to deregister the old first.
+  kViERtpRtcpObserverNotRegistered,      // No observer registered.
+  kViERtpRtcpUnknownError,               // An unknown error has occurred. Check the log file.
+
+  // ViEEncryption.
+  kViEEncryptionInvalidChannelId = 12700,  // Channel id does not exist.
+  kViEEncryptionInvalidSrtpParameter,      // EnableSRTPSend, EnableSRTPReceive-  Check the SRTP parameters.
+  kViEEncryptionSrtpNotSupported,          // This build does not support SRTP.
+  kViEEncryptionUnknownError,              // An unknown error has occurred. Check the log file.
+
+  // ViEImageProcess.
+  kViEImageProcessInvalidChannelId  = 12800,  // No Channel exist with the provided channel id.
+  kViEImageProcessInvalidCaptureId,          // No capture device exist with the provided capture id.
+  kViEImageProcessFilterExists,              // RegisterCaptureEffectFilter,RegisterSendEffectFilter,RegisterRenderEffectFilter - Effect filter already registered.
+  kViEImageProcessFilterDoesNotExist,        // DeRegisterCaptureEffectFilter,DeRegisterSendEffectFilter,DeRegisterRenderEffectFilter - Effect filter not registered.
+  kViEImageProcessAlreadyEnabled,            // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already enabled.
+  kViEImageProcessAlreadyDisabled,           // EnableDeflickering,EnableDenoising,EnableColorEnhancement- Function already disabled.
+  kViEImageProcessUnknownError               // An unknown error has occurred. Check the log file.
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_ERRORS_H_
diff --git a/src/video_engine/include/vie_external_codec.h b/src/video_engine/include/vie_external_codec.h
new file mode 100644
index 0000000..a2e686a
--- /dev/null
+++ b/src/video_engine/include/vie_external_codec.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoDecoder;
+class VideoEncoder;
+class VideoEngine;
+
+class WEBRTC_DLLEXPORT ViEExternalCodec {
+ public:
+  static ViEExternalCodec* GetInterface(VideoEngine* video_engine);
+
+  virtual int Release() = 0;
+
+  virtual int RegisterExternalSendCodec(const int video_channel,
+                                        const unsigned char pl_type,
+                                        VideoEncoder* encoder) = 0;
+
+  virtual int DeRegisterExternalSendCodec(const int video_channel,
+                                          const unsigned char pl_type) = 0;
+
+  virtual int RegisterExternalReceiveCodec(const int video_channel,
+                                           const unsigned int pl_type,
+                                           VideoDecoder* decoder,
+                                           bool decoder_render = false,
+                                           int render_delay = 0) = 0;
+
+  virtual int DeRegisterExternalReceiveCodec(const int video_channel,
+                                             const unsigned char pl_type) = 0;
+
+ protected:
+  ViEExternalCodec() {}
+  virtual ~ViEExternalCodec() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_EXTERNAL_CODEC_H_
diff --git a/src/video_engine/include/vie_file.h b/src/video_engine/include/vie_file.h
new file mode 100644
index 0000000..6a521cf
--- /dev/null
+++ b/src/video_engine/include/vie_file.h
@@ -0,0 +1,228 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - File recording and playing.
+//  - Snapshots.
+//  - Background images.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+struct VideoCodec;
+
+// This structure contains picture data and describes the picture type.
+struct ViEPicture {
+  unsigned char* data;
+  unsigned int size;
+  unsigned int width;
+  unsigned int height;
+  RawVideoType type;
+
+  ViEPicture() {
+    data = NULL;
+    size = 0;
+    width = 0;
+    height = 0;
+    type = kVideoI420;
+  }
+
+  // Call FreePicture to free data.
+  ~ViEPicture() {
+    data = NULL;
+    size = 0;
+    width = 0;
+    height = 0;
+    type = kVideoUnknown;
+  }
+};
+
+// This enumerator tells which audio source to use for media files.
+enum AudioSource {
+  NO_AUDIO,
+  MICROPHONE,
+  PLAYOUT,
+  VOICECALL
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViEFileObserver {
+ public:
+  // This method is called when the end is reached of a played file.
+  virtual void PlayFileEnded(const WebRtc_Word32 file_id) = 0;
+
+ protected:
+  virtual ~ViEFileObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViEFile {
+ public:
+  // Factory for the ViEFile sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViEFile* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEFile sub-API and decreases an internal reference counter.
+  // Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Starts playing a video file.
+  virtual int StartPlayFile(
+      const char* file_name_utf8,
+      int& file_id,
+      const bool loop = false,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // Stops a file from being played.
+  virtual int StopPlayFile(const int file_id) = 0;
+
+  // Registers an instance of a user implementation of the ViEFileObserver.
+  virtual int RegisterObserver(int file_id, ViEFileObserver& observer) = 0;
+
+  // Removes an already registered instance of ViEFileObserver.
+  virtual int DeregisterObserver(int file_id, ViEFileObserver& observer) = 0;
+
+  // This function tells which channel, if any, the file should be sent on.
+  virtual int SendFileOnChannel(const int file_id, const int video_channel) = 0;
+
+  // Stops a file from being sent on a a channel.
+  virtual int StopSendFileOnChannel(const int video_channel) = 0;
+
+  // Starts playing the file audio as microphone input for the specified voice
+  // channel.
+  virtual int StartPlayFileAsMicrophone(const int file_id,
+                                        const int audio_channel,
+                                        bool mix_microphone = false,
+                                        float volume_scaling = 1) = 0;
+
+  // The function stop the audio from being played on a VoiceEngine channel.
+  virtual int StopPlayFileAsMicrophone(const int file_id,
+                                       const int audio_channel) = 0;
+
+  // The function plays and mixes the file audio with the local speaker signal
+  // for playout.
+  virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
+                                    float volume_scaling = 1) = 0;
+
+  // Stops the audio from a file from being played locally.
+  virtual int StopPlayAudioLocally(const int file_id,
+                                   const int audio_channel) = 0;
+
+  // This function starts recording the video transmitted to another endpoint.
+  virtual int StartRecordOutgoingVideo(
+      const int video_channel,
+      const char* file_name_utf8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // This function starts recording the incoming video stream on a channel.
+  virtual int StartRecordIncomingVideo(
+      const int video_channel,
+      const char* file_name_utf8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // Stops the file recording of the outgoing stream.
+  virtual int StopRecordOutgoingVideo(const int video_channel) = 0;
+
+  // Stops the file recording of the incoming stream.
+  virtual int StopRecordIncomingVideo(const int video_channel) = 0;
+
+  // Gets the audio codec, video codec and file format of a recorded file.
+  virtual int GetFileInformation(
+      const char* file_name,
+      VideoCodec& video_codec,
+      CodecInst& audio_codec,
+      const FileFormats file_format = kFileFormatAviFile) = 0;
+
+  // The function takes a snapshot of the last rendered image for a video
+  // channel.
+  virtual int GetRenderSnapshot(const int video_channel,
+                                const char* file_name_utf8) = 0;
+
+  // The function takes a snapshot of the last rendered image for a video
+  // channel
+  virtual int GetRenderSnapshot(const int video_channel,
+                                ViEPicture& picture) = 0;
+
+  // The function takes a snapshot of the last captured image by a specified
+  // capture device.
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       const char* file_name_utf8) = 0;
+
+  // The function takes a snapshot of the last captured image by a specified
+  // capture device.
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to show before the first frame is captured
+  // by the capture device. This frame will be encoded and transmitted to a
+  // possible receiver
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const char* file_name_utf8) = 0;
+
+  // This function sets an image to show before the first frame is captured by
+  // the capture device. This frame will be encoded and transmitted to a
+  // possible receiver
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const ViEPicture& picture) = 0;
+
+  virtual int FreePicture(ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to render before the first received video
+  // frame is decoded for a specified channel.
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const char* file_name_utf8) = 0;
+
+  // This function sets an image to render before the first received video
+  // frame is decoded for a specified channel.
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const ViEPicture& picture) = 0;
+
+  // This function sets a jpg image to render if no frame is decoded for a
+  // specified time interval.
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const char* file_name_utf8,
+                                    const unsigned int timeout_ms = 1000) = 0;
+
+  // This function sets an image to render if no frame is decoded for a
+  // specified time interval.
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const ViEPicture& picture,
+                                    const unsigned int timeout_ms) = 0;
+
+  // Enables recording of debugging information.
+  virtual int StartDebugRecording(int video_channel,
+                                  const char* file_name_utf8) = 0;
+  // Disables recording of debugging information.
+  virtual int StopDebugRecording(int video_channel) = 0;
+
+
+ protected:
+  ViEFile() {}
+  virtual ~ViEFile() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_FILE_H_
diff --git a/src/video_engine/include/vie_image_process.h b/src/video_engine/include/vie_image_process.h
new file mode 100644
index 0000000..dfad08d
--- /dev/null
+++ b/src/video_engine/include/vie_image_process.h
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Effect filters
+//  - Deflickering
+//  - Denoising
+//  - Color enhancement
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+
+// This class declares an abstract interface for a user defined effect filter.
+// The effect filter is registered using RegisterCaptureEffectFilter(),
+// RegisterSendEffectFilter() or RegisterRenderEffectFilter() and deregistered
+// with the corresponding deregister function.
+class WEBRTC_DLLEXPORT ViEEffectFilter {
+ public:
+  // This method is called with an I420 video frame allowing the user to
+  // modify the video frame.
+  virtual int Transform(int size,
+                        unsigned char* frame_buffer,
+                        unsigned int time_stamp90KHz,
+                        unsigned int width,
+                        unsigned int height) = 0;
+ protected:
+  ViEEffectFilter() {}
+  virtual ~ViEEffectFilter() {}
+};
+
+class WEBRTC_DLLEXPORT ViEImageProcess {
+ public:
+  // Factory for the ViEImageProcess sub‐API and increases an internal
+  // reference counter if successful. Returns NULL if the API is not supported
+  // or if construction fails.
+  static ViEImageProcess* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViEImageProcess sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function registers a EffectFilter to use for a specified capture
+  // device.
+  virtual int RegisterCaptureEffectFilter(const int capture_id,
+                                          ViEEffectFilter& capture_filter) = 0;
+
+  // This function deregisters a EffectFilter for a specified capture device.
+  virtual int DeregisterCaptureEffectFilter(const int capture_id) = 0;
+
+  // This function registers an EffectFilter to use for a specified channel.
+  virtual int RegisterSendEffectFilter(const int video_channel,
+                                       ViEEffectFilter& send_filter) = 0;
+
+  // This function deregisters a send effect filter for a specified channel.
+  virtual int DeregisterSendEffectFilter(const int video_channel) = 0;
+
+  // This function registers a EffectFilter to use for the rendered video
+  // stream on an incoming channel.
+  virtual int RegisterRenderEffectFilter(const int video_channel,
+                                         ViEEffectFilter& render_filter) = 0;
+
+  // This function deregisters a render effect filter for a specified channel.
+  virtual int DeregisterRenderEffectFilter(const int video_channel) = 0;
+
+  // All cameras run the risk of getting in almost perfect sync with
+  // florescent lamps, which will result in a very annoying flickering of the
+  // image. Most cameras have some type of filter to protect against this but
+  // not all of them succeed. Enabling this function will remove the flicker.
+  virtual int EnableDeflickering(const int capture_id, const bool enable) = 0;
+
+  // Some cameras produce very noisy captured images, especially in low‐light
+  // conditions. This functionality will reduce the camera noise.
+  virtual int EnableDenoising(const int capture_id, const bool enable) = 0;
+
+  // This function enhances the colors on the decoded video stream, enabled by
+  // default.
+  virtual int EnableColorEnhancement(const int video_channel,
+                                     const bool enable) = 0;
+
+ protected:
+  ViEImageProcess() {}
+  virtual ~ViEImageProcess() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_IMAGE_PROCESS_H_
diff --git a/src/video_engine/include/vie_network.h b/src/video_engine/include/vie_network.h
new file mode 100644
index 0000000..9752008
--- /dev/null
+++ b/src/video_engine/include/vie_network.h
@@ -0,0 +1,215 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
+
+// This sub-API supports the following functionalities:
+//  - Configuring send and receive addresses.
+//  - External transport support.
+//  - Port and address filters.
+//  - Windows GQoS functions and ToS functions.
+//  - Packet timeout notification.
+//  - Dead‐or‐Alive connection observations.
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class Transport;
+class VideoEngine;
+
+// This enumerator describes VideoEngine packet timeout states.
+enum ViEPacketTimeout {
+  NoPacket = 0,
+  PacketReceived = 1
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterObserver() and
+// deregistered using DeregisterObserver().
+class WEBRTC_DLLEXPORT ViENetworkObserver {
+ public:
+  // This method will be called periodically delivering a dead‐or‐alive
+  // decision for a specified channel.
+  virtual void OnPeriodicDeadOrAlive(const int video_channel,
+                                     const bool alive) = 0;
+
+  // This method is called once if a packet timeout occurred.
+  virtual void PacketTimeout(const int video_channel,
+                             const ViEPacketTimeout timeout) = 0;
+ protected:
+  virtual ~ViENetworkObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViENetwork {
+ public:
+  // Default values.
+  enum { KDefaultSampleTimeSeconds = 2 };
+
+  // Factory for the ViENetwork sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViENetwork* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViENetwork sub-API and decreases an internal reference
+  // counter.Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Specifies the ports to receive RTP packets on. It is also possible to set
+  // port for RTCP and local IP address.
+  virtual int SetLocalReceiver(const int video_channel,
+                               const unsigned short rtp_port,
+                               const unsigned short rtcp_port = 0,
+                               const char* ip_address = NULL) = 0;
+
+  // Gets the local receiver ports and address for a specified channel.
+  virtual int GetLocalReceiver(const int video_channel,
+                               unsigned short& rtp_port,
+                               unsigned short& rtcp_port, char* ip_address) = 0;
+
+  // Specifies the destination port and IP address for a specified channel.
+  virtual int SetSendDestination(const int video_channel,
+                                 const char* ip_address,
+                                 const unsigned short rtp_port,
+                                 const unsigned short rtcp_port = 0,
+                                 const unsigned short source_rtp_port = 0,
+                                 const unsigned short source_rtcp_port = 0) = 0;
+
+  // Get the destination port and address for a specified channel.
+  virtual int GetSendDestination(const int video_channel,
+                                 char* ip_address,
+                                 unsigned short& rtp_port,
+                                 unsigned short& rtcp_port,
+                                 unsigned short& source_rtp_port,
+                                 unsigned short& source_rtcp_port) = 0;
+
+  // This function registers a user implementation of Transport to use for
+  // sending RTP and RTCP packets on this channel.
+  virtual int RegisterSendTransport(const int video_channel,
+                                    Transport& transport) = 0;
+
+  // This function deregisters a used Transport for a specified channel.
+  virtual int DeregisterSendTransport(const int video_channel) = 0;
+
+  // When using external transport for a channel, received RTP packets should
+  // be passed to VideoEngine using this function. The input should contain
+  // the RTP header and payload.
+  virtual int ReceivedRTPPacket(const int video_channel,
+                                const void* data,
+                                const int length) = 0;
+
+  // When using external transport for a channel, received RTCP packets should
+  // be passed to VideoEngine using this function.
+  virtual int ReceivedRTCPPacket(const int video_channel,
+                                 const void* data,
+                                 const int length) = 0;
+
+  // Gets the source ports and IP address of the incoming stream for a
+  // specified channel.
+  virtual int GetSourceInfo(const int video_channel,
+                            unsigned short& rtp_port,
+                            unsigned short& rtcp_port,
+                            char* ip_address,
+                            unsigned int ip_address_length) = 0;
+
+  // Gets the local IP address, in string format.
+  virtual int GetLocalIP(char ip_address[64], bool ipv6 = false) = 0;
+
+  // Enables IPv6, instead of IPv4, for a specified channel.
+  virtual int EnableIPv6(int video_channel) = 0;
+
+  // The function returns true if IPv6 is enabled, false otherwise.
+  virtual bool IsIPv6Enabled(int video_channel) = 0;
+
+  // Enables a port and IP address filtering for incoming packets on a
+  // specific channel.
+  virtual int SetSourceFilter(const int video_channel,
+                              const unsigned short rtp_port,
+                              const unsigned short rtcp_port = 0,
+                              const char* ip_address = NULL) = 0;
+
+  // Gets current port and IP address filter for a specified channel.
+  virtual int GetSourceFilter(const int video_channel,
+                              unsigned short& rtp_port,
+                              unsigned short& rtcp_port,
+                              char* ip_address) = 0;
+
+  // This function sets the six‐bit Differentiated Services Code Point (DSCP)
+  // in the IP header of the outgoing stream for a specific channel.
+  // Windows and Linux only.
+  virtual int SetSendToS(const int video_channel,
+                         const int DSCP,
+                         const bool use_set_sockOpt = false) = 0;
+
+  // Retrieves the six‐bit Differentiated Services Code Point (DSCP) in the IP
+  // header of the outgoing stream for a specific channel.
+  virtual int GetSendToS(const int video_channel,
+                         int& DSCP,
+                         bool& use_set_sockOpt) = 0;
+
+  // This function sets the Generic Quality of Service (GQoS) service level.
+  // The Windows operating system then maps to a Differentiated Services Code
+  // Point (DSCP) and to an 802.1p setting. Windows only.
+  virtual int SetSendGQoS(const int video_channel, const bool enable,
+                          const int service_type,
+                          const int overrideDSCP = 0) = 0;
+
+  // This function retrieves the currently set GQoS service level for a
+  // specific channel.
+  virtual int GetSendGQoS(const int video_channel,
+                          bool& enabled,
+                          int& service_type,
+                          int& overrideDSCP) = 0;
+
+  // This function sets the Maximum Transition Unit (MTU) for a channel. The
+  // RTP packet will be packetized based on this MTU to optimize performance
+  // over the network.
+  virtual int SetMTU(int video_channel, unsigned int mtu) = 0;
+
+  // This function enables or disables warning reports if packets have not
+  // been received for a specified time interval.
+  virtual int SetPacketTimeoutNotification(const int video_channel,
+                                           bool enable,
+                                           int timeout_seconds) = 0;
+
+  // Registers an instance of a user implementation of the ViENetwork
+  // observer.
+  virtual int RegisterObserver(const int video_channel,
+                               ViENetworkObserver& observer) = 0;
+
+  // Removes a registered instance of ViENetworkObserver.
+  virtual int DeregisterObserver(const int video_channel) = 0;
+
+  // This function enables or disables the periodic dead‐or‐alive callback
+  // functionality for a specified channel.
+  virtual int SetPeriodicDeadOrAliveStatus(
+      const int video_channel,
+      const bool enable,
+      const unsigned int sample_time_seconds = KDefaultSampleTimeSeconds) = 0;
+
+  // This function handles sending a raw UDP data packet over an existing RTP
+  // or RTCP socket.
+  virtual int SendUDPPacket(const int video_channel,
+                            const void* data,
+                            const unsigned int length,
+                            int& transmitted_bytes,
+                            bool use_rtcp_socket = false) = 0;
+
+ protected:
+  ViENetwork() {}
+  virtual ~ViENetwork() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_NETWORK_H_
diff --git a/src/video_engine/include/vie_render.h b/src/video_engine/include/vie_render.h
new file mode 100644
index 0000000..0b8328e
--- /dev/null
+++ b/src/video_engine/include/vie_render.h
@@ -0,0 +1,110 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Specify render destinations for incoming video streams, capture devices
+//    and files.
+//  - Configuring render streams.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+class VideoRender;
+
+// This class declares an abstract interface to be used for external renderers.
+// The user implemented derived class is registered using AddRenderer().
+class WEBRTC_DLLEXPORT ExternalRenderer {
+ public:
+  // This method will be called when the stream to be rendered changes in
+  // resolution or number of streams mixed in the image.
+  virtual int FrameSizeChange(unsigned int width,
+                              unsigned int height,
+                              unsigned int number_of_streams) = 0;
+
+  // This method is called when a new frame should be rendered.
+  virtual int DeliverFrame(unsigned char* buffer,
+                           int buffer_size,
+                           // RTP timestamp in 90kHz.
+                           uint32_t time_stamp,
+                           // Wallclock render time in miliseconds
+                           int64_t render_time) = 0;
+
+ protected:
+  virtual ~ExternalRenderer() {}
+};
+
+class WEBRTC_DLLEXPORT ViERender {
+ public:
+  // Factory for the ViERender sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViERender* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViERender sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // Registers render module.
+  virtual int RegisterVideoRenderModule(VideoRender& render_module) = 0;
+
+  // Deregisters render module.
+  virtual int DeRegisterVideoRenderModule(VideoRender& render_module) = 0;
+
+  // Sets the render destination for a given render ID.
+  virtual int AddRenderer(const int render_id,
+                          void* window,
+                          const unsigned int z_order,
+                          const float left,
+                          const float top,
+                          const float right,
+                          const float bottom) = 0;
+
+  // Removes the renderer for a stream.
+  virtual int RemoveRenderer(const int render_id) = 0;
+
+  // Starts rendering a render stream.
+  virtual int StartRender(const int render_id) = 0;
+
+  // Stops rendering a render stream.
+  virtual int StopRender(const int render_id) = 0;
+
+  // Configures an already added render stream.
+  virtual int ConfigureRender(int render_id,
+                              const unsigned int z_order,
+                              const float left,
+                              const float top,
+                              const float right,
+                              const float bottom) = 0;
+
+  // This function mirrors the rendered stream left and right or up and down.
+  virtual int MirrorRenderStream(const int render_id,
+                                 const bool enable,
+                                 const bool mirror_xaxis,
+                                 const bool mirror_yaxis) = 0;
+
+  // External render.
+  virtual int AddRenderer(const int render_id,
+                          RawVideoType video_input_format,
+                          ExternalRenderer* renderer) = 0;
+
+ protected:
+  ViERender() {}
+  virtual ~ViERender() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RENDER_H_
diff --git a/src/video_engine/include/vie_rtp_rtcp.h b/src/video_engine/include/vie_rtp_rtcp.h
new file mode 100644
index 0000000..15eef35
--- /dev/null
+++ b/src/video_engine/include/vie_rtp_rtcp.h
@@ -0,0 +1,313 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//  - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
+//  - SSRC handling.
+//  - Transmission of RTCP reports.
+//  - Obtaining RTCP data from incoming RTCP sender reports.
+//  - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
+//  - Forward Error Correction (FEC).
+//  - Writing RTP and RTCP packets to binary files for off‐line analysis of the
+//    call quality.
+//  - Inserting extra RTP packets into active audio stream.
+
+#ifndef WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
+#define WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VideoEngine;
+
+// This enumerator sets the RTCP mode.
+enum ViERTCPMode {
+  kRtcpNone = 0,
+  kRtcpCompound_RFC4585 = 1,
+  kRtcpNonCompound_RFC5506 = 2
+};
+
+// This enumerator describes the key frame request mode.
+enum ViEKeyFrameRequestMethod {
+  kViEKeyFrameRequestNone = 0,
+  kViEKeyFrameRequestPliRtcp = 1,
+  kViEKeyFrameRequestFirRtp = 2,
+  kViEKeyFrameRequestFirRtcp = 3
+};
+
+enum StreamType {
+  kViEStreamTypeNormal = 0,  // Normal media stream
+  kViEStreamTypeRtx = 1  // Retransmission media stream
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterRTPObserver() and
+// deregistered using DeregisterRTPObserver().
+class WEBRTC_DLLEXPORT ViERTPObserver {
+ public:
+  // This method is called if SSRC of the incoming stream is changed.
+  virtual void IncomingSSRCChanged(const int video_channel,
+                                   const unsigned int SSRC) = 0;
+
+  // This method is called if a field in CSRC changes or if the number of
+  // CSRCs changes.
+  virtual void IncomingCSRCChanged(const int video_channel,
+                                   const unsigned int CSRC,
+                                   const bool added) = 0;
+ protected:
+  virtual ~ViERTPObserver() {}
+};
+
+// This class declares an abstract interface for a user defined observer. It is
+// up to the VideoEngine user to implement a derived class which implements the
+// observer class. The observer is registered using RegisterRTCPObserver() and
+// deregistered using DeregisterRTCPObserver().
+
+class WEBRTC_DLLEXPORT ViERTCPObserver {
+ public:
+  // This method is called if a application-defined RTCP packet has been
+  // received.
+  virtual void OnApplicationDataReceived(
+      const int video_channel,
+      const unsigned char sub_type,
+      const unsigned int name,
+      const char* data,
+      const unsigned short data_length_in_bytes) = 0;
+ protected:
+  virtual ~ViERTCPObserver() {}
+};
+
+class WEBRTC_DLLEXPORT ViERTP_RTCP {
+ public:
+  enum { KDefaultDeltaTransmitTimeSeconds = 15 };
+  enum { KMaxRTCPCNameLength = 256 };
+
+  // Factory for the ViERTP_RTCP sub‐API and increases an internal reference
+  // counter if successful. Returns NULL if the API is not supported or if
+  // construction fails.
+  static ViERTP_RTCP* GetInterface(VideoEngine* video_engine);
+
+  // Releases the ViERTP_RTCP sub-API and decreases an internal reference
+  // counter. Returns the new reference count. This value should be zero
+  // for all sub-API:s before the VideoEngine object can be safely deleted.
+  virtual int Release() = 0;
+
+  // This function enables you to specify the RTP synchronization source
+  // identifier (SSRC) explicitly.
+  virtual int SetLocalSSRC(const int video_channel,
+                           const unsigned int SSRC,
+                           const StreamType usage = kViEStreamTypeNormal,
+                           const unsigned char simulcast_idx = 0) = 0;
+
+  // This function gets the SSRC for the outgoing RTP stream for the specified
+  // channel.
+  virtual int GetLocalSSRC(const int video_channel,
+                           unsigned int& SSRC) const = 0;
+
+  // This function map a incoming SSRC to a StreamType so that the engine
+  // can know which is the normal stream and which is the RTX
+  virtual int SetRemoteSSRCType(const int video_channel,
+                                const StreamType usage,
+                                const unsigned int SSRC) const = 0;
+
+  // This function gets the SSRC for the incoming RTP stream for the specified
+  // channel.
+  virtual int GetRemoteSSRC(const int video_channel,
+                            unsigned int& SSRC) const = 0;
+
+  // This function returns the CSRCs of the incoming RTP packets.
+  virtual int GetRemoteCSRCs(const int video_channel,
+                             unsigned int CSRCs[kRtpCsrcSize]) const = 0;
+
+  // This function enables manual initialization of the sequence number. The
+  // start sequence number is normally a random number.
+  virtual int SetStartSequenceNumber(const int video_channel,
+                                     unsigned short sequence_number) = 0;
+
+  // This function sets the RTCP status for the specified channel.
+  // Default mode is kRtcpCompound_RFC4585.
+  virtual int SetRTCPStatus(const int video_channel,
+                            const ViERTCPMode rtcp_mode) = 0;
+
+  // This function gets the RTCP status for the specified channel.
+  virtual int GetRTCPStatus(const int video_channel,
+                            ViERTCPMode& rtcp_mode) const = 0;
+
+  // This function sets the RTCP canonical name (CNAME) for the RTCP reports
+  // on a specific channel.
+  virtual int SetRTCPCName(const int video_channel,
+                           const char rtcp_cname[KMaxRTCPCNameLength]) = 0;
+
+  // This function gets the RTCP canonical name (CNAME) for the RTCP reports
+  // sent the specified channel.
+  virtual int GetRTCPCName(const int video_channel,
+                           char rtcp_cname[KMaxRTCPCNameLength]) const = 0;
+
+  // This function gets the RTCP canonical name (CNAME) for the RTCP reports
+  // received on the specified channel.
+  virtual int GetRemoteRTCPCName(
+      const int video_channel,
+      char rtcp_cname[KMaxRTCPCNameLength]) const = 0;
+
+  // This function sends an RTCP APP packet on a specific channel.
+  virtual int SendApplicationDefinedRTCPPacket(
+      const int video_channel,
+      const unsigned char sub_type,
+      unsigned int name,
+      const char* data,
+      unsigned short data_length_in_bytes) = 0;
+
+  // This function enables Negative Acknowledgment (NACK) using RTCP,
+  // implemented based on RFC 4585. NACK retransmits RTP packets if lost on
+  // the network. This creates a lossless transport at the expense of delay.
+  // If using NACK, NACK should be enabled on both endpoints in a call.
+  virtual int SetNACKStatus(const int video_channel, const bool enable) = 0;
+
+  // This function enables Forward Error Correction (FEC) using RTCP,
+  // implemented based on RFC 5109, to improve packet loss robustness. Extra
+  // FEC packets are sent together with the usual media packets, hence
+  // part of the bitrate will be used for FEC packets.
+  virtual int SetFECStatus(const int video_channel,
+                           const bool enable,
+                           const unsigned char payload_typeRED,
+                           const unsigned char payload_typeFEC) = 0;
+
+  // This function enables hybrid Negative Acknowledgment using RTCP
+  // and Forward Error Correction (FEC) implemented based on RFC 5109,
+  // to improve packet loss robustness. Extra
+  // FEC packets are sent together with the usual media packets, hence will
+  // part of the bitrate be used for FEC packets.
+  // The hybrid mode will choose between nack only, fec only and both based on
+  // network conditions. When both are applied, only packets that were not
+  // recovered by the FEC will be nacked.
+  virtual int SetHybridNACKFECStatus(const int video_channel,
+                                     const bool enable,
+                                     const unsigned char payload_typeRED,
+                                     const unsigned char payload_typeFEC) = 0;
+
+  // This function enables RTCP key frame requests.
+  virtual int SetKeyFrameRequestMethod(
+    const int video_channel, const ViEKeyFrameRequestMethod method) = 0;
+
+  // This function enables signaling of temporary bitrate constraints using
+  // RTCP, implemented based on RFC4585.
+  virtual int SetTMMBRStatus(const int video_channel, const bool enable) = 0;
+
+  // Enables and disables REMB packets for this channel. |sender| indicates
+  // this channel is encoding, |receiver| tells the bitrate estimate for
+  // this channel should be included in the REMB packet.
+  virtual int SetRembStatus(int video_channel,
+                            bool sender,
+                            bool receiver) = 0;
+
+  // Enables RTP timestamp extension offset described in RFC 5450. This call
+  // must be done before ViECodec::SetSendCodec is called.
+  virtual int SetSendTimestampOffsetStatus(int video_channel,
+                                           bool enable,
+                                           int id) = 0;
+
+  virtual int SetReceiveTimestampOffsetStatus(int video_channel,
+                                              bool enable,
+                                              int id) = 0;
+
+  // This function returns our locally created statistics of the received RTP
+  // stream.
+  virtual int GetReceivedRTCPStatistics(
+      const int video_channel,
+      unsigned short& fraction_lost,
+      unsigned int& cumulative_lost,
+      unsigned int& extended_max,
+      unsigned int& jitter,
+      int& rtt_ms) const = 0;
+
+  // This function returns statistics reported by the remote client in a RTCP
+  // packet.
+  virtual int GetSentRTCPStatistics(const int video_channel,
+                                    unsigned short& fraction_lost,
+                                    unsigned int& cumulative_lost,
+                                    unsigned int& extended_max,
+                                    unsigned int& jitter,
+                                    int& rtt_ms) const = 0;
+
+  // The function gets statistics from the sent and received RTP streams.
+  virtual int GetRTPStatistics(const int video_channel,
+                               unsigned int& bytes_sent,
+                               unsigned int& packets_sent,
+                               unsigned int& bytes_received,
+                               unsigned int& packets_received) const = 0;
+
+  // The function gets bandwidth usage statistics from the sent RTP streams in
+  // bits/s.
+  virtual int GetBandwidthUsage(const int video_channel,
+                                unsigned int& total_bitrate_sent,
+                                unsigned int& video_bitrate_sent,
+                                unsigned int& fec_bitrate_sent,
+                                unsigned int& nackBitrateSent) const = 0;
+
+  // This function gets the send-side estimated bandwidth available for video,
+  // including overhead, in bits/s.
+  virtual int GetEstimatedSendBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const = 0;
+
+  // This function gets the receive-side estimated bandwidth available for
+  // video, including overhead, in bits/s.
+  // Returns -1 when no valid estimate is available.
+  virtual int GetEstimatedReceiveBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const = 0;
+
+  // This function sets various options for the bandwidth estimator
+  // code.  The options are applied to new channels only.  For a given
+  // channel, the options that are active at the time when the channel
+  // is created are immutable for that channel.  See
+  // http://tools.ietf.org/html/draft-alvestrand-rtcweb-congestion-02
+  // (or later, updated documentation) and common_types.h to get a
+  // feel for what the options do.
+  virtual int SetOverUseDetectorOptions(
+      const OverUseDetectorOptions& options) const = 0;
+
+  // This function enables capturing of RTP packets to a binary file on a
+  // specific channel and for a given direction. The file can later be
+  // replayed using e.g. RTP Tools rtpplay since the binary file format is
+  // compatible with the rtpdump format.
+  virtual int StartRTPDump(const int video_channel,
+                           const char file_nameUTF8[1024],
+                           RTPDirections direction) = 0;
+
+  // This function disables capturing of RTP packets to a binary file on a
+  // specific channel and for a given direction.
+  virtual int StopRTPDump(const int video_channel,
+                          RTPDirections direction) = 0;
+
+  // Registers an instance of a user implementation of the ViERTPObserver.
+  virtual int RegisterRTPObserver(const int video_channel,
+                                  ViERTPObserver& observer) = 0;
+
+  // Removes a registered instance of ViERTPObserver.
+  virtual int DeregisterRTPObserver(const int video_channel) = 0;
+
+  // Registers an instance of a user implementation of the ViERTCPObserver.
+  virtual int RegisterRTCPObserver(const int video_channel,
+                                   ViERTCPObserver& observer) = 0;
+
+  // Removes a registered instance of ViERTCPObserver.
+  virtual int DeregisterRTCPObserver(const int video_channel) = 0;
+
+ protected:
+  ViERTP_RTCP() {}
+  virtual ~ViERTP_RTCP() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_INCLUDE_VIE_RTP_RTCP_H_
diff --git a/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h b/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h
new file mode 100644
index 0000000..8382844
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/GUI_Defines.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  GUI_Defines.h
+ *
+ */
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
+
+#define		ViE_TEST(x) if(-1 == x){ \
+int errNum = _ptrViEBase->LastError();	\
+NSLog(@"ERROR: %d at %s:%d", errNum, __FUNCTION__, __LINE__); \
+} 
+
+
+// Video Engine Related
+#define	V_CAPTURE_DEVICE_INDEX		0
+#define V_VIE_CAPTURE_ID			747
+#define V_DEVICE_NAME_LENGTH		256
+#define V_CODEC_INDEX		2
+#define V_IP_ADDRESS		"127.0.0.1"
+#define V_RTP_PORT			8000
+
+
+
+#endif	// WEBRTC_VIDEO_ENGINE_MAIN_TEST_SIMPLECOCOAGUI_GUI_DEFINES_H_
diff --git a/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist
new file mode 100644
index 0000000..d0d3a18
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI-Info.plist
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">

+<plist version="1.0">

+<dict>

+	<key>CFBundleDevelopmentRegion</key>

+	<string>English</string>

+	<key>CFBundleExecutable</key>

+	<string>${EXECUTABLE_NAME}</string>

+	<key>CFBundleIconFile</key>

+	<string></string>

+	<key>CFBundleIdentifier</key>

+	<string>com.yourcompany.${PRODUCT_NAME:rfc1034identifier}</string>

+	<key>CFBundleInfoDictionaryVersion</key>

+	<string>6.0</string>

+	<key>CFBundleName</key>

+	<string>${PRODUCT_NAME}</string>

+	<key>CFBundlePackageType</key>

+	<string>APPL</string>

+	<key>CFBundleShortVersionString</key>

+	<string>1.0</string>

+	<key>CFBundleSignature</key>

+	<string>????</string>

+	<key>CFBundleVersion</key>

+	<string>1</string>

+	<key>LSMinimumSystemVersion</key>

+	<string>${MACOSX_DEPLOYMENT_TARGET}</string>

+	<key>NSMainNibFile</key>

+	<string>SimpleCocoaGUI</string>

+	<key>NSPrincipalClass</key>

+	<string>NSApplication</string>

+</dict>

+</plist>

diff --git a/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h
new file mode 100644
index 0000000..10d52fc
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.h
@@ -0,0 +1,77 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+//  SimpleCocoaGUIAppDelegate.h
+//
+
+#import <Cocoa/Cocoa.h>
+#include <iostream>
+using namespace std;
+
+@class ViECocoaRenderView;
+
+#include "GUI_Defines.h"
+
+#include "common_types.h"
+#include "voe_base.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_file.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_errors.h"
+
+
+
+@interface SimpleCocoaGUIAppDelegate : NSObject <NSApplicationDelegate> {
+    NSWindow*						_window;
+	IBOutlet NSOpenGLView*			_vieCocoaRenderView1;
+	IBOutlet NSOpenGLView*			_vieCocoaRenderView2;
+	IBOutlet NSButton*				_butRestartLoopback;
+	VideoEngine*				_ptrViE;
+	ViEBase*					_ptrViEBase;
+	ViECapture*					_ptrViECapture;
+	ViERender*					_ptrViERender;
+	ViECodec*					_ptrViECodec;
+	ViENetwork*					_ptrViENetwork;
+	
+	bool							_fullScreen;
+	int								_videoChannel;
+	
+	int _captureId;
+	
+	VideoEngine* ptrViE;
+	ViEBase* ptrViEBase;
+	ViECapture* ptrViECapture;
+	ViERTP_RTCP* ptrViERtpRtcp;
+	ViERender* ptrViERender;
+	ViECodec* ptrViECodec;
+	ViENetwork* ptrViENetwork;
+}
+
+@property (assign) IBOutlet NSWindow* window;
+-(void)createUI:(bool)fullScreen;
+-(void)initViECocoaTest;
+-(void)initializeVariables;
+-(void)NSLogVideoCodecs;
+-(void)startViECocoaTest;
+-(int)initLoopback;
+-(int)ioLooback;
+-(int)startLoopback;
+-(int)stopLooback;
+
+-(IBAction)handleRestart:(id)sender;
+
+
+@end
diff --git a/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm
new file mode 100644
index 0000000..d594cfe
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUIAppDelegate.mm
@@ -0,0 +1,1075 @@
+//
+//  SimpleCocoaGUIAppDelegate.m
+//
+
+#import "SimpleCocoaGUIAppDelegate.h"
+
+@implementation SimpleCocoaGUIAppDelegate
+
+@synthesize window = _window;
+
+- (void)applicationDidFinishLaunching:(NSNotification *)aNotification {
+
+//	[self initializeVariables];
+	[self createUI];
+//	[self initViECocoaTest];
+//	[self NSLogVideoCodecs];
+//	[self startViECocoaTest];
+	
+//	[self startLoopback];
+	
+	[self ioLooback];
+}
+
+-(void)createUI{
+	
+	NSRect outWindow1Frame = NSMakeRect(200, 200, 200, 200);
+	NSWindow* outWindow1 = [[NSWindow alloc] initWithContentRect:outWindow1Frame styleMask:NSTitledWindowMask backing:NSBackingStoreBuffered defer:NO];
+	[outWindow1 orderOut:nil];
+	NSRect vieAutotestCocoaRenderView1Frame = NSMakeRect(0, 0, 200, 200);
+	_vieCocoaRenderView1 = [[ViECocoaRenderView alloc] initWithFrame:vieAutotestCocoaRenderView1Frame];
+	[[outWindow1 contentView] addSubview:_vieCocoaRenderView1];
+	[outWindow1 setTitle:[NSString stringWithFormat:@"window1"]];
+	[outWindow1 makeKeyAndOrderFront:NSApp];	
+
+	
+	NSRect outWindow2Frame = NSMakeRect(400, 200, 200, 200);
+	NSWindow* outWindow2 = [[NSWindow alloc] initWithContentRect:outWindow2Frame styleMask:NSTitledWindowMask backing:NSBackingStoreBuffered defer:NO];
+	[outWindow2 orderOut:nil];
+	NSRect vieAutotestCocoaRenderView2Frame = NSMakeRect(0, 0, 200, 200);
+	_vieCocoaRenderView2 = [[ViECocoaRenderView alloc] initWithFrame:vieAutotestCocoaRenderView2Frame];
+	[[outWindow2 contentView] addSubview:_vieCocoaRenderView2];
+	[outWindow2 setTitle:[NSString stringWithFormat:@"window2"]];
+	[outWindow2 makeKeyAndOrderFront:NSApp];	
+	
+	
+	
+
+
+
+
+}
+
+-(void)initViECocoaTest{
+	
+	int _error = 0;
+    _ptrViE = VideoEngine::Create();
+	_ptrViEBase = ViEBase::GetInterface(_ptrViE);
+	_error = _ptrViEBase->Init();
+		
+	_ptrViECapture = ViECapture::GetInterface(_ptrViE);
+	_ptrViERender = ViERender::GetInterface(_ptrViE);
+	_ptrViECodec = ViECodec::GetInterface(_ptrViE);	
+	_ptrViENetwork = ViENetwork::GetInterface(_ptrViE);
+	
+
+	_error = _ptrViE->SetTraceFile("ViEBaseStandardTest.txt");
+    _error = _ptrViE->SetEncryptedTraceFile("ViEBaseStandardTestEncrypted.txt");
+
+	
+}
+
+
+-(void)initializeVariables{
+	_fullScreen = YES;
+}
+
+-(void)NSLogVideoCodecs{
+	NSLog(@"Searching for video codecs.....");
+
+	VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    for(int index = 0; index < _ptrViECodec->NumberOfCodecs(); index++)
+    {
+        ViE_TEST(_ptrViECodec->GetCodec(index, videoCodec));
+		NSLog(@"Video codec found: %s", videoCodec.plName);
+    }	
+	
+}
+-(void)startViECocoaTest{
+
+
+
+
+    int error=0;
+
+    char deviceName[128];
+    char deviceUniqueName[512];
+    int captureId = 0;
+    int dummy = 0;
+
+	//ViE_TEST(_ptrViEBase->CreateChannel(_videoChannel));
+    //ViE_TEST(_ptrViECapture->GetCaptureDevice(0,deviceName,sizeof(deviceName),deviceUniqueName,sizeof(deviceUniqueName)));
+    //ViE_TEST(_ptrViECapture->AllocateCaptureDevice(deviceUniqueName,sizeof(deviceUniqueName),captureId));
+    //ViE_TEST(_ptrViECapture->AllocateCaptureDevice("dummydevicethatdoesnotexist",sizeof(deviceUniqueName),dummy));
+
+    char	captureDeviceName[V_DEVICE_NAME_LENGTH] = "";
+    char	captureDeviceUniqueId[V_DEVICE_NAME_LENGTH] = "";
+	int		captureDeviceId = 0;
+	
+	
+	
+	ViE_TEST(_ptrViE->SetTraceFilter(webrtc::TR_ALL));
+    ViE_TEST(_ptrViE->SetTraceFile("ViECocoaTrace.txt"));
+    ViE_TEST(_ptrViE->SetEncryptedTraceFile("ViECocoaEncryptedTrace.txt"));
+
+	
+	
+	
+	// base
+    ViE_TEST(_ptrViEBase->CreateChannel(_videoChannel));
+    
+	// capture device
+    ViE_TEST(_ptrViECapture->GetCaptureDevice(V_CAPTURE_DEVICE_INDEX, captureDeviceName, V_DEVICE_NAME_LENGTH, captureDeviceUniqueId, V_DEVICE_NAME_LENGTH));    
+	ViE_TEST(_ptrViECapture->AllocateCaptureDevice(captureDeviceUniqueId, V_DEVICE_NAME_LENGTH, captureDeviceId));
+    ViE_TEST(_ptrViECapture->ConnectCaptureDevice(captureDeviceId, _videoChannel));
+    ViE_TEST(_ptrViECapture->StartCapture(captureDeviceId));
+	
+	// renderer
+    ViE_TEST(_ptrViERender->AddRenderer(captureDeviceId,  (void*)_vieCocoaRenderView1, 0, 0.0, 0.0, 1.0, 1.0));
+    ViE_TEST(_ptrViERender->StartRender(captureDeviceId));
+//	usleep(3 * 1000);
+//	ViE_TEST(_ptrViERender->RemoveRenderer(captureDeviceId));
+	//exit(0);
+
+	
+//	// codec
+//	[self NSLogVideoCodecs];
+//	VideoCodec videoCodec;
+//    memset(&videoCodec, 0, sizeof(VideoCodec));
+//	ViE_TEST(_ptrViECodec->GetCodec(V_CODEC_INDEX, videoCodec));
+//	ViE_TEST(_ptrViECodec->SetReceiveCodec(_videoChannel, videoCodec));
+//	ViE_TEST(_ptrViECodec->SetSendCodec(_videoChannel, videoCodec));
+//	
+//	// network + base
+//	ViE_TEST(_ptrViENetwork->SetLocalReceiver(_videoChannel, V_RTP_PORT)); 
+//	ViE_TEST(_ptrViEBase->StartReceive(_videoChannel));
+//    ViE_TEST(_ptrViENetwork->SetSendDestination(_videoChannel, V_IP_ADDRESS, V_RTP_PORT));	
+//    ViE_TEST(_ptrViEBase->StartSend(_videoChannel));
+//	ViE_TEST(_ptrViERender->MirrorRenderStream(captureDeviceId, true, false, true));
+	
+	
+}
+
+-(int)initLoopback
+{
+	
+}
+-(int)startLoopback
+{
+	//********************************************************
+    //  Begin create/initialize  Video Engine for testing
+    //********************************************************	
+	
+    int error = 0;
+    bool succeeded = true;
+    int numberOfErrors = 0;
+    std::string str;
+    
+	//
+    // Create a  VideoEngine instance
+    //
+//    VideoEngine* ptrViE = NULL;
+    ptrViE = VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+	
+	error = ptrViE->SetTraceFilter(webrtc::TR_ALL);
+	if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+	
+	
+    error = ptrViE->SetTraceFile("ViETrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+	
+    error = ptrViE->SetEncryptedTraceFile("ViEEncryptedTrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetEncryptedTraceFile\n");
+        return -1;
+    }
+	
+    //
+    // Init  VideoEngine and create a channel
+    //
+    ptrViEBase = ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+	
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+	
+    //
+    // List available capture devices, allocate and connect.
+    //
+	ptrViECapture = ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+	
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    std::cout << std::endl;
+    std::cout << "Available capture devices:" << std::endl;
+    unsigned int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+		
+        error = ptrViECapture->GetCaptureDevice(captureIdx,
+														deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        std::cout << "   " << captureIdx+1 << ". " << deviceName
+		<< std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose capture devices: ";
+//    std::getline(std::cin, str);
+//    captureIdx = atoi(str.c_str()) - 1;
+	captureIdx = 0;
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+													KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+	
+    _captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+														 KMaxUniqueIdLength, _captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ConnectCaptureDevice(_captureId,
+														_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StartCapture(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+	
+    //
+    // RTP/RTCP settings
+    //
+	ptrViERtpRtcp = ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetRTCPStatus(_videoChannel,
+												 kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(_videoChannel,
+															kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetTMMBRStatus(_videoChannel, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+	
+    //
+    // Set up rendering
+    //
+    ptrViERender = ViERender::GetInterface(ptrViE);
+	if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(_captureId, _vieCocoaRenderView1,
+											  0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(_videoChannel, _vieCocoaRenderView2,
+											  1, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    //
+    // Setup codecs
+    //
+    ptrViECodec = ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }    
+	
+    std::cout << std::endl;
+    std::cout << "Available codecs:" << std::endl;
+	
+    // Check available codecs and prepare receive codecs
+    VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    unsigned int codecIdx = 0;
+    for (codecIdx = 0;
+         codecIdx < ptrViECodec->NumberOfCodecs();
+         codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+		
+        error = ptrViECodec->SetReceiveCodec(_videoChannel,
+													 videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != kVideoCodecRED &&
+            videoCodec.codecType != kVideoCodecULPFEC)
+        {
+            std::cout << "   " << codecIdx+1 << ". " << videoCodec.plName
+			<< std::endl;
+        }
+    }
+//    std::cout << std::endl;
+//    std::cout << "Choose codec: ";
+//    std::getline(std::cin, str);
+//    codecIdx = atoi(str.c_str()) - 1;
+	codecIdx = 0;
+	
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+	
+    error = ptrViECodec->SetSendCodec(_videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+	
+    //
+    // Address settings
+    //
+    ptrViENetwork = ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+	
+    const char* ipAddress = "127.0.0.1";
+    const unsigned short rtpPort = 6000;
+    error = ptrViENetwork->SetLocalReceiver(_videoChannel, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetLocalReceiver\n");
+        return -1;
+    }
+    
+    error = ptrViEBase->StartReceive(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+	
+    error = ptrViENetwork->SetSendDestination(_videoChannel,
+													  ipAddress, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetSendDestination\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StartSend(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+	
+	
+    //********************************************************
+    //  Engine started
+    //********************************************************
+	
+	
+    // Call started
+    std::cout << std::endl;
+    std::cout << "Loopback call started" << std::endl;
+//    std::cout << std::endl << std::endl;
+//    std::cout << "Press enter to stop...";
+//    std::getline(std::cin, str);
+}
+
+-(int)stopLooback
+{
+	int error = 0;
+	
+	
+	
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+	
+    error = ptrViEBase->StopReceive(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StopSend(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StopRender(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1; 
+    }
+	
+    error = ptrViERender->StopRender(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StopCapture(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1; 
+    }
+	
+    error = ptrViECapture->DisconnectCaptureDevice(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ReleaseCaptureDevice(_captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->DeleteChannel(_videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+	
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+	
+    bool deleted = VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+	
+    return 0;
+	
+	// ===================================================================
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+	
+	
+}
+
+-(int)ioLooback
+{
+    //********************************************************
+    //  Begin create/initialize  Video Engine for testing
+    //********************************************************	
+	
+    int error = 0;
+    bool succeeded = true;
+    int numberOfErrors = 0;
+    std::string str;
+    
+	//
+    // Create a  VideoEngine instance
+    //
+    VideoEngine* ptrViE = NULL;
+    ptrViE = VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+	
+	error = ptrViE->SetTraceFilter(webrtc::TR_ALL);
+	if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+	
+	
+    error = ptrViE->SetTraceFile("ViETrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+	
+    error = ptrViE->SetEncryptedTraceFile("ViEEncryptedTrace.txt");
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetEncryptedTraceFile\n");
+        return -1;
+    }
+	
+    //
+    // Init  VideoEngine and create a channel
+    //
+    ViEBase* ptrViEBase = ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+	
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+	
+    //
+    // List available capture devices, allocate and connect.
+    //
+    ViECapture* ptrViECapture = 
+	ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+	
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+	
+    std::cout << std::endl;
+    std::cout << "Available capture devices:" << std::endl;
+    unsigned int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+		
+        error = ptrViECapture->GetCaptureDevice(captureIdx,
+														deviceName, KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        std::cout << "   " << captureIdx+1 << ". " << deviceName
+		<< std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose capture devices: ";
+//    std::getline(std::cin, str);
+//    captureIdx = atoi(str.c_str()) - 1;
+	captureIdx = 0;
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+													KMaxDeviceNameLength, uniqueId, KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+	
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId,
+														 KMaxUniqueIdLength, captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ConnectCaptureDevice(captureId,
+														videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+	
+    //
+    // RTP/RTCP settings
+    //
+    ViERTP_RTCP* ptrViERtpRtcp =
+	ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+												 kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(videoChannel,
+															kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+	
+    error = ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+	
+    //
+    // Set up rendering
+    //
+    ViERender* ptrViERender =
+	ViERender::GetInterface(ptrViE);
+	if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+	
+//    error = ptrViERender->EnableFullScreenRender(_vieCocoaRenderView1);
+//    if (error == -1)
+//    {
+//        printf("ERROR in ViERender::AddRenderer\n");
+//        return -1;
+//    }	
+	
+	
+    error = ptrViERender->AddRenderer(captureId, _vieCocoaRenderView1,
+											0, 0.5, 0.5, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->AddRenderer(videoChannel, _vieCocoaRenderView2,
+											  1, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+	
+    //
+    // Setup codecs
+    //
+    ViECodec* ptrViECodec = ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }    
+	
+    std::cout << std::endl;
+    std::cout << "Available codecs:" << std::endl;
+	
+    // Check available codecs and prepare receive codecs
+    VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(VideoCodec));
+    unsigned int codecIdx = 0;
+    for (codecIdx = 0;
+         codecIdx < ptrViECodec->NumberOfCodecs();
+         codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+		
+        error = ptrViECodec->SetReceiveCodec(videoChannel,
+													 videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != kVideoCodecRED &&
+            videoCodec.codecType != kVideoCodecULPFEC)
+        {
+            std::cout << "   " << codecIdx+1 << ". " << videoCodec.plName
+			<< std::endl;
+        }
+    }
+    std::cout << std::endl;
+    std::cout << "Choose codec: ";
+//    std::getline(std::cin, str);
+//    codecIdx = atoi(str.c_str()) - 1;
+	
+	
+	error = ptrViECapture->ShowCaptureSettingsDialogBox("unique",10, "mytitle");
+	codecIdx = 0;
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+	
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+	
+    //
+    // Address settings
+    //
+    ViENetwork* ptrViENetwork =
+	ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+	
+    const char* ipAddress = "127.0.0.1";
+    const unsigned short rtpPort = 6000;
+    error = ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetLocalReceiver\n");
+        return -1;
+    }
+    
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+	
+    error = ptrViENetwork->SetSendDestination(videoChannel,
+													  ipAddress, rtpPort);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::SetSendDestination\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+	
+	
+    //********************************************************
+    //  Engine started
+    //********************************************************
+	
+	
+    // Call started
+    std::cout << std::endl;
+    std::cout << "Loopback call started" << std::endl;
+    std::cout << std::endl << std::endl;
+    std::cout << "Press enter to stop...";
+//	[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
+//    std::getline(std::cin, str);
+	usleep(5 * 1000 * 1000);
+	
+	//int i = 0;
+//	while(1)
+//	{
+//		NSLog(@"app iteration %d", i);
+//		i++;
+//		[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:1]];
+//		std::getline(std::cin, str);
+//		if(i > 3)
+//		{
+//			break;
+//		}
+//	}
+	
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+	
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+	
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1; 
+    }
+	
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+	
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1; 
+    }
+	
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+	
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+	
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+	
+    bool deleted = VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+	
+	NSLog(@"Finished function");
+    return 0;
+	
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+
+
+
+-(IBAction)handleRestart:(id)sender
+{
+//	[self stopLooback];
+//	[self startLoopback];
+	[self ioLooback];
+}
+@end
diff --git a/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch
new file mode 100644
index 0000000..72b5870
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/SimpleCocoaGUI_Prefix.pch
@@ -0,0 +1,7 @@
+//

+// Prefix header for all source files of the 'SimpleCocoaGUI' target in the 'SimpleCocoaGUI' project

+//

+

+#ifdef __OBJC__

+    #import <Cocoa/Cocoa.h>

+#endif

diff --git a/src/video_engine/main/test/SimpleCocoaGUI/main.m b/src/video_engine/main/test/SimpleCocoaGUI/main.m
new file mode 100644
index 0000000..9d52a1c
--- /dev/null
+++ b/src/video_engine/main/test/SimpleCocoaGUI/main.m
@@ -0,0 +1,12 @@
+//
+//  main.m
+//  SimpleCocoaGUI
+//
+//
+
+#import <Cocoa/Cocoa.h>
+
+int main(int argc, char *argv[])
+{
+    return NSApplicationMain(argc,  (const char **) argv);
+}
diff --git a/src/video_engine/main/test/WindowsTest/Capture.rc b/src/video_engine/main/test/WindowsTest/Capture.rc
new file mode 100644
index 0000000..962256c
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/Capture.rc
@@ -0,0 +1,255 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "resource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Korean resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)

+#ifdef _WIN32

+LANGUAGE LANG_KOREAN, SUBLANG_DEFAULT

+#pragma code_page(949)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "resource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "#define _AFX_NO_SPLITTER_RESOURCES\r\n"

+    "#define _AFX_NO_OLE_RESOURCES\r\n"

+    "#define _AFX_NO_TRACKER_RESOURCES\r\n"

+    "#define _AFX_NO_PROPERTY_RESOURCES\r\n"

+    "\r\n"

+    "#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)\r\n"

+    "#ifdef _WIN32\r\n"

+    "LANGUAGE 18, 1\r\n"

+    "#pragma code_page(949)\r\n"

+    "#endif //_WIN32\r\n"

+    "#include ""res\\Capture.rc2""  // non-Microsoft Visual C++ edited resources\r\n"

+    "#include ""l.kor\\afxres.rc""          // Standard components\r\n"

+    "#endif\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Version

+//

+

+VS_VERSION_INFO VERSIONINFO

+ FILEVERSION 1,0,0,1

+ PRODUCTVERSION 1,0,0,1

+ FILEFLAGSMASK 0x3fL

+#ifdef _DEBUG

+ FILEFLAGS 0x1L

+#else

+ FILEFLAGS 0x0L

+#endif

+ FILEOS 0x4L

+ FILETYPE 0x1L

+ FILESUBTYPE 0x0L

+BEGIN

+END

+

+#endif    // Korean resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_SLAVE_CHANNEL DIALOGEX 0, 0, 677, 358

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Slave channel"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    CONTROL         "",IDC_IPADDRESS1,"SysIPAddress32",WS_TABSTOP,485,18,105,15

+    EDITTEXT        IDC_LOCAL_PORT1,631,18,36,16,ES_AUTOHSCROLL

+    LTEXT           "IP-address",IDC_STATIC,495,7,42,9

+    LTEXT           "Local Port",IDC_STATIC,633,7,37,9

+    EDITTEXT        IDC_REMOTE_PORT1,593,18,36,16,ES_AUTOHSCROLL

+    LTEXT           "Port",IDC_STATIC,595,7,17,9

+    CONTROL         "Ext",IDC_EXTTRANSPORT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,489,41,29,12

+    LTEXT           "delay",IDC_STATIC,589,41,21,9

+    COMBOBOX        IDC_PACKETLOSS,535,40,45,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_DELAY,611,40,45,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_SLAVE_CHANNEL, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 670

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 351

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Neutral (Sys. Default) resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_NEUSD)

+#ifdef _WIN32

+LANGUAGE LANG_NEUTRAL, SUBLANG_SYS_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_DXQUALITY_DIALOG DIALOGEX 0, 0, 699, 385

+STYLE DS_ABSALIGN | DS_SETFONT | DS_MODALFRAME | DS_3DLOOK | WS_MINIMIZEBOX | WS_POPUP | WS_VISIBLE | WS_CAPTION | WS_SYSMENU

+EXSTYLE WS_EX_WINDOWEDGE | WS_EX_STATICEDGE | WS_EX_APPWINDOW | WS_EX_NOINHERITLAYOUT

+CAPTION "webrtc ViE test program"

+FONT 9, "Arial", 400, 0, 0x0

+BEGIN

+    PUSHBUTTON      "Start Send",IDC_STARTSEND,589,270,50,19

+    PUSHBUTTON      "Stop Send",IDC_STOPSend,639,270,50,19

+    PUSHBUTTON      "Start Listen",IDC_STARTLISTEN,589,291,50,19

+    PUSHBUTTON      "StopListen",IDC_STOPLISTEN,639,291,50,19

+    CONTROL         "",IDC_LIVEVIDEO,"Static",SS_BITMAP | SS_CENTERIMAGE | SS_SUNKEN,450,179,139,101

+    COMBOBOX        IDC_DEVICE,487,14,185,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    CTEXT           "Select Capture Device",IDC_STATIC,485,7,78,8

+    COMBOBOX        IDC_CODEC_LIST,490,90,58,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Codec",IDC_STATIC,490,82,21,8

+    COMBOBOX        IDC_CODEC_SIZE,627,90,62,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Codec Size",IDC_STATIC,611,82,36,8

+    CONTROL         "",IDC_IPADDRESS1,"SysIPAddress32",WS_TABSTOP,490,46,90,13

+    EDITTEXT        IDC_LOCAL_PORT1,615,46,31,14,ES_AUTOHSCROLL

+    LTEXT           "IP-address",IDC_STATIC,498,37,36,8

+    LTEXT           "Local Port",IDC_STATIC,616,36,32,8

+    LTEXT           "Start Bitrate",IDC_STATIC,553,80,37,8

+    COMBOBOX        IDC_BITRATE,558,90,49,30,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    EDITTEXT        IDC_REMOTE_PORT1,582,46,31,14,ES_AUTOHSCROLL

+    LTEXT           "Port",IDC_STATIC,584,37,14,8

+    GROUPBOX        "Remote client 1",IDC_STATIC,487,27,203,50

+    LTEXT           "Max FrameRate",IDC_STATIC,488,106,50,8

+    COMBOBOX        IDC_MIN_FRAME_RATE,488,115,48,82,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    CONTROL         "",IDC_CAPTURE,"Static",SS_BITMAP | SS_CENTERIMAGE | SS_REALSIZEIMAGE,7,7,418,276

+    CONTROL         "TMMBR",IDC_TMMBR,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,486,138,40,10

+    GROUPBOX        "Standard Protection",IDC_STATIC,607,138,72,55

+    CONTROL         "None",IDC_PROT_NONE,"Button",BS_AUTORADIOBUTTON | WS_GROUP,615,146,33,10

+    CONTROL         "NACK",IDC_PROT_NACK,"Button",BS_AUTORADIOBUTTON,615,165,35,10

+    CONTROL         "FEC",IDC_PROT_FEC,"Button",BS_AUTORADIOBUTTON,615,155,30,10

+    CONTROL         "NACK & FEC",IDC_PROT_NACKFEC,"Button",BS_AUTORADIOBUTTON,615,174,52,10

+    COMBOBOX        IDC_RTCPMODE,571,119,80,57,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "RTCP Mode",IDC_STATIC,571,110,39,8

+    LISTBOX         IDC_INFORMATION,476,309,214,63,LBS_SORT | LBS_NOINTEGRALHEIGHT | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_PACKETBURST,653,118,36,57,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Packet Burst",IDC_STATIC,649,109,40,8

+    CONTROL         "Stop Log",IDC_FREEZELOG,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,465,292,44,10

+    PUSHBUTTON      "Version",IDC_VERSION,530,291,55,16

+    CONTROL         "Ext",IDC_EXTTRANSPORT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,493,66,25,10

+    LTEXT           "loss",IDC_STATIC,519,66,15,8

+    LTEXT           "delay",IDC_STATIC,578,66,18,8

+    COMBOBOX        IDC_PACKETLOSS,533,65,38,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    COMBOBOX        IDC_DELAY,598,65,38,82,CBS_DROPDOWN | WS_DISABLED | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Record Incoming",IDC_BTN_RECORD_INCOMING,587,198,69,14

+    PUSHBUTTON      "Record outgoing",IDC_BTN_RECORD_OUTGOING,587,212,69,14

+    PUSHBUTTON      "Create Slave",IDC_BTN_CREATE_SLAVE,586,231,50,14

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_DXQUALITY_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 690

+        VERTGUIDE, 321

+        VERTGUIDE, 372

+        VERTGUIDE, 425

+        VERTGUIDE, 465

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 372

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Neutral (Sys. Default) resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+#define _AFX_NO_SPLITTER_RESOURCES

+#define _AFX_NO_OLE_RESOURCES

+#define _AFX_NO_TRACKER_RESOURCES

+#define _AFX_NO_PROPERTY_RESOURCES

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_KOR)

+#ifdef _WIN32

+LANGUAGE 18, 1

+#pragma code_page(949)

+#endif //_WIN32

+#include "res\Capture.rc2"  // non-Microsoft Visual C++ edited resources

+#include "l.kor\afxres.rc"          // Standard components

+#endif

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc b/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc
new file mode 100644
index 0000000..d666aa7
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/CaptureDevicePool.cc
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "CaptureDevicePool.h"
+#include "map_wrapper.h"
+#include <string.h>
+#include <assert.h>
+#include "critical_section_wrapper.h"
+#include "vie_file.h"
+
+CaptureDevicePool::CaptureDevicePool(VideoEngine* videoEngine):
+_critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+_vieCapture(ViECapture::GetInterface(videoEngine)),
+_vieFile(ViEFile::GetInterface(videoEngine))
+{
+}
+
+CaptureDevicePool::~CaptureDevicePool(void)
+{
+    assert(_deviceMap.Size()==0);
+    _vieCapture->Release();
+    _vieFile->Release();
+    delete &_critSect;
+}
+
+WebRtc_Word32 CaptureDevicePool::GetCaptureDevice(int& captureId, const char* uniqeDeviceName)
+{
+    CriticalSectionScoped cs(_critSect);
+    DeviceItem* device=NULL;
+    
+    for(MapItem* item=_deviceMap.First();
+        item!=NULL;
+        item=_deviceMap.Next(item))
+    {
+        //Found the device?
+        if(strcmp(uniqeDeviceName,(static_cast<DeviceItem*>( item->GetItem()))->uniqeDeviceName)==0)
+        {
+            device=static_cast<DeviceItem*>( item->GetItem());
+            device->refCount++;
+            captureId=device->captureId;
+            return 0;
+        }
+    }
+    device = new DeviceItem;
+    strncpy(device->uniqeDeviceName,uniqeDeviceName,255);
+
+
+    // Device does not exist. Create it.
+    WebRtc_Word32 result=_vieCapture->AllocateCaptureDevice(device->uniqeDeviceName,strlen(device->uniqeDeviceName),device->captureId);
+    if(result==0)
+    {
+        result=_vieFile->SetCaptureDeviceImage(device->captureId,
+                            "./main/test/WindowsTest/captureDeviceImage.jpg");
+    }
+    captureId=device->captureId;
+    _deviceMap.Insert(captureId,device);
+    device->refCount++;
+    
+    return result;
+
+
+}
+WebRtc_Word32 CaptureDevicePool::ReturnCaptureDevice(int captureId)
+{
+    CriticalSectionScoped cs(_critSect);
+
+    MapItem* mapItem=_deviceMap.Find(captureId);
+    if(!mapItem)
+        return -1;
+
+    DeviceItem* item=static_cast<DeviceItem*> (mapItem->GetItem());
+    if(!item)
+        return 0;
+    item->refCount--;
+    WebRtc_Word32 result=0;
+
+    if(item->refCount==0)
+    {
+        result=_vieCapture->ReleaseCaptureDevice(captureId);
+        
+        _deviceMap.Erase(mapItem);
+        delete item;
+
+    }
+    return result;
+}
diff --git a/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h b/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h
new file mode 100644
index 0000000..104b84f
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/CaptureDevicePool.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_file.h"
+#include "map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+}
+using namespace webrtc;
+class CaptureDevicePool
+{
+public:
+    CaptureDevicePool(VideoEngine* videoEngine);
+    ~CaptureDevicePool(void);
+    WebRtc_Word32 GetCaptureDevice(int& captureId, const char uniqeDeviceName[256]);
+    WebRtc_Word32 ReturnCaptureDevice(int captureId);
+
+    private: 
+        struct DeviceItem
+        {
+            int captureId;
+            WebRtc_Word32 refCount;
+            char uniqeDeviceName[256];
+            DeviceItem()
+            {
+                captureId=-1;
+                refCount=0;
+            }
+        };
+        CriticalSectionWrapper& _critSect;
+        ViECapture* _vieCapture;
+        ViEFile*    _vieFile;
+        MapWrapper _deviceMap;
+
+};
diff --git a/src/video_engine/main/test/WindowsTest/ChannelDlg.cc b/src/video_engine/main/test/WindowsTest/ChannelDlg.cc
new file mode 100644
index 0000000..50ae26f
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/ChannelDlg.cc
@@ -0,0 +1,1271 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ChannelDlg.h"
+#include "VideoSize.h"
+#include "CaptureDevicePool.h"
+#include "ChannelPool.h"
+
+#include <Mmsystem.h>
+#include <dbt.h>
+
+
+#include "assert.h"
+
+
+#include <process.h> // threads.
+
+#if defined _WIN32
+    #define SLEEP_10_SEC ::Sleep(10000)
+    #define GET_TIME_IN_MS timeGetTime
+#endif
+
+// Hack to convert char to TCHAR, using two buffers to be able to
+// call twice in the same statement
+TCHAR convertTemp1[256] = {0};
+TCHAR convertTemp2[256] = {0};
+bool convertBufferSwitch(false);
+TCHAR* CharToTchar(const char* str, int len)
+{
+#ifdef _UNICODE
+  TCHAR* temp = convertBufferSwitch ? convertTemp1 : convertTemp2;
+  convertBufferSwitch = !convertBufferSwitch;
+  memset(temp, 0, sizeof(convertTemp1));
+  MultiByteToWideChar(CP_UTF8, 0, str, len, temp, 256);
+  return temp;
+#else
+  return str;
+#endif
+}
+
+// Hack to convert TCHAR to char
+char convertTemp3[256] = {0};
+char* TcharToChar(TCHAR* str, int len)
+{
+#ifdef _UNICODE
+  memset(convertTemp3, 0, sizeof(convertTemp3));
+  WideCharToMultiByte(CP_UTF8, 0, str, len, convertTemp3, 256, 0, 0);
+  return convertTemp3;
+#else
+  return str;
+#endif
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg dialog
+
+CDXChannelDlg::CDXChannelDlg(VideoEngine* videoEngine,
+                             CaptureDevicePool& captureDevicePool,
+                             ChannelPool& channelPool,
+    void* voiceEngine
+    ,CWnd* pParent,CDXChannelDlgObserver* observer,
+    int parentChannel/*=-1*/)
+  : CDialog(CDXChannelDlg::IDD, pParent),
+    _canAddLog(true),
+    _dialogObserver(observer),
+    _videoEngine(videoEngine),
+    _captureDevicePool(captureDevicePool),
+    _channelPool(channelPool),
+    _parentChannel(parentChannel),
+#ifndef NO_VOICE_ENGINE
+    _voiceEngine((VoiceEngine*) voiceEngine),
+#endif
+    _callbackEvent(::CreateEvent( NULL, FALSE, FALSE, NULL)),
+    _externalTransport(NULL)
+{
+    strcpy(_logMsg,"");
+    _channelId = -1;
+    _audioChannel=-1;
+    _captureId=-1;
+
+    //{{AFX_DATA_INIT(CDXChannelDlg)
+    //}}AFX_DATA_INIT
+    // Note that LoadIcon does not require a subsequent DestroyIcon in Win32
+
+    InitializeCriticalSection(&_critCallback);
+    unsigned int threadID;
+    _callbackThread=(HANDLE)_beginthreadex(NULL,1024*1024,CallbackThread,(void*)this,0, &threadID);
+}
+
+void CDXChannelDlg::DoDataExchange(CDataExchange* pDX)
+{
+  CDialog::DoDataExchange(pDX);
+  //{{AFX_DATA_MAP(CDXChannelDlg)
+  DDX_Control(pDX, IDC_DEVICE, m_ctrlDevice);
+  DDX_Control(pDX, IDC_CODEC_LIST, m_ctrlCodec);
+  DDX_Control(pDX, IDC_CAPTURE, m_ctrlLiveRemoteVideo);
+  DDX_Control(pDX, IDC_LIVEVIDEO, m_ctrlLiveVideo);
+  DDX_Control(pDX, IDC_LOCAL_PORT1, m_localPort1);
+  DDX_Control(pDX, IDC_REMOTE_PORT1, m_remotePort1);
+  DDX_Control(pDX, IDC_IPADDRESS1, m_remoteIp1);
+  DDX_Control(pDX, IDC_CODEC_SIZE, m_ctrlCodecSize);
+  DDX_Control(pDX, IDC_RTCPMODE, m_ctrlRtcpMode);
+  DDX_Control(pDX, IDC_PACKETBURST, m_ctrlPacketBurst);
+  DDX_Control(pDX, IDC_BITRATE, m_ctrlBitrate);
+  DDX_Control(pDX, IDC_MIN_FRAME_RATE, m_ctrlMinFrameRate);
+  DDX_Control(pDX, IDC_TMMBR,m_cbTmmbr);
+  DDX_Control(pDX, IDC_EXTTRANSPORT,m_cbExternalTransport);
+  DDX_Control(pDX, IDC_PACKETLOSS,m_ctrlPacketLoss);
+  DDX_Control(pDX, IDC_DELAY,m_ctrlDelay);
+  DDX_Control(pDX, IDC_FREEZELOG,m_cbFreezeLog);
+  DDX_Control(pDX,IDC_INFORMATION,m_ctrlInfo);
+  //}}AFX_DATA_MAP
+}
+
+// ON_WM_SYSKEYDOWN      ALT+key
+
+BEGIN_MESSAGE_MAP(CDXChannelDlg, CDialog)
+  //{{AFX_MSG_MAP(CDXChannelDlg)
+  ON_WM_SYSCOMMAND()
+  ON_WM_RBUTTONUP()
+  //ON_WM_DEVICECHANGE()
+  ON_WM_PAINT()
+  ON_WM_QUERYDRAGICON()
+  ON_BN_CLICKED(IDC_STARTSEND, OnStartSend)
+  ON_BN_CLICKED(IDC_STOPSend, OnStopSend)
+  //ON_WM_TIMER()
+  ON_WM_DESTROY()
+  //}}AFX_MSG_MAP
+  ON_CBN_SELCHANGE(IDC_CODEC_LIST, OnCbnSelchangeCodecList)
+  ON_CBN_SELCHANGE(IDC_DEVICE, OnCbnSelchangeDevice)
+  ON_CBN_SELCHANGE(IDC_CODEC_SIZE, OnCbnSelchangeSize)
+  ON_CBN_SELCHANGE(IDC_BITRATE, OnCbnSelchangeBitrate)
+  //ON_MESSAGE(WM_DISPLAYCHANGE, OnDisplayChange)
+  ON_CBN_SELCHANGE(IDC_MIN_FRAME_RATE, OnCbnSelchangeMinFrameRate)
+  ON_BN_CLICKED(IDC_STARTLISTEN, OnBnClickedStartlisten)
+  ON_BN_CLICKED(IDC_STOPLISTEN, OnBnClickedStoplisten)
+  ON_BN_CLICKED(IDC_TMMBR, &CDXChannelDlg::OnBnClickedTmmbr)
+  ON_CBN_SELCHANGE(IDC_RTCPMODE, &CDXChannelDlg::OnCbnSelchangeRtcpmode)
+  ON_BN_CLICKED(IDC_PROT_NACK, &CDXChannelDlg::OnBnClickedProtNack)
+  ON_BN_CLICKED(IDC_PROT_NONE, &CDXChannelDlg::OnBnClickedProtNone)
+  ON_BN_CLICKED(IDC_PROT_FEC, &CDXChannelDlg::OnBnClickedProtFec)
+  ON_BN_CLICKED(IDC_FREEZELOG, &CDXChannelDlg::OnBnClickedFreezelog)
+  ON_BN_CLICKED(IDC_VERSION, &CDXChannelDlg::OnBnClickedVersion)
+  ON_BN_CLICKED(IDC_EXTTRANSPORT, &CDXChannelDlg::OnBnClickedExttransport)
+  ON_CBN_SELCHANGE(IDC_PACKETLOSS, &CDXChannelDlg::OnCbnSelchangePacketloss)
+  ON_CBN_SELCHANGE(IDC_DELAY, &CDXChannelDlg::OnCbnSelchangeDelay)
+  ON_BN_CLICKED(IDC_BTN_RECORD_INCOMING, &CDXChannelDlg::OnBnClickedBtnRecordIncoming)
+  ON_BN_CLICKED(IDC_BTN_RECORD_OUTGOING, &CDXChannelDlg::OnBnClickedBtnRecordOutgoing)
+  ON_BN_CLICKED(IDC_BTN_CREATE_SLAVE, &CDXChannelDlg::OnBnClickedBtnCreateSlave)
+  ON_BN_CLICKED(IDC_PROT_NACKFEC, &CDXChannelDlg::OnBnClickedProtNackFec)
+END_MESSAGE_MAP()
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg message handlers
+
+
+BOOL CDXChannelDlg::OnInitDialog()
+{
+  CDialog::OnInitDialog();
+
+  // Set the icon for this dialog.  The framework does this automatically
+  //  when the application's main window is not a dialog
+  SetIcon(m_hIcon, TRUE);      // Set big icon
+  SetIcon(m_hIcon, FALSE);    // Set small icon
+
+
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("5"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("7"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("8"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("9"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("11"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("12"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("13"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("14"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("15"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("16"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("17"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("18"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("19"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("21"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("22"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("23"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("24"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("25"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("26"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("27"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("28"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("29"));
+  ::SendMessage(m_ctrlMinFrameRate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+  m_ctrlMinFrameRate.SetCurSel(25);
+
+  // Codec sizes
+  for(VideoSize i=UNDEFINED;i<NUMBER_OF_VIDEO_SIZE;i=VideoSize(i+1))
+  {
+    char sizeStr[64];
+    int width=0;
+    int height=0;
+    GetWidthHeight(i,width,height);
+    sprintf(sizeStr,"%d x %d",width,height);
+    ::SendMessage(m_ctrlCodecSize.m_hWnd, CB_ADDSTRING, 0,(LPARAM) CharToTchar(sizeStr,-1));
+  }
+  m_ctrlCodecSize.SetCurSel(8);
+
+  // RTCP mode
+  /*
+  kRtcpNone     = 0,
+  kRtcpCompound_RFC4585     = 1,
+  kRtcpNonCompound_RFC5506 = 2 */
+  ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_NONE"));
+  ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_COMPOUND_RFC4585"));
+  ::SendMessage(m_ctrlRtcpMode.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("RTCP_NON_COMPOUND_RFC5506"));
+  m_ctrlRtcpMode.SetCurSel(2);
+
+
+  //Packet Burst
+  ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+  ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+  ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+  ::SendMessage(m_ctrlPacketBurst.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+  m_ctrlPacketBurst.SetCurSel(0);
+
+
+  //Send Bitrate
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("50"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("100"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("200"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("300"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("500"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("1000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("2000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("3000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("4000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("5000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6000"));
+  ::SendMessage(m_ctrlBitrate.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("7000"));
+
+  m_ctrlBitrate.SetCurSel(3);
+
+  // External transport packet loss
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("2"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("4"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("6"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("8"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("10"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("12"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("14"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("16"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("18"));
+  ::SendMessage(m_ctrlPacketLoss.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("20"));
+  m_ctrlPacketLoss.SetCurSel(0);
+
+  // External transport delay
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("0"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("30"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("60"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("90"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("120"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("150"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("180"));
+  ::SendMessage(m_ctrlDelay.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("210"));
+  m_ctrlDelay.SetCurSel(0);
+
+  _vieBase=ViEBase::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieBase==0,-5);
+
+  _vieCapture=ViECapture::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieCapture==0,-5);
+
+  _vieRTPRTCP=ViERTP_RTCP::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieRTPRTCP==0,-5);
+
+  _vieRender=ViERender::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieRender==0,-5);
+
+  _vieCodec=ViECodec::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieCodec==0,-5);
+  _vieNetwork=ViENetwork::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieNetwork==0,-5);
+
+  _vieFile=ViEFile::GetInterface(_videoEngine);
+  TEST_MUSTPASS(_vieFile==0,-5);
+
+#ifndef NO_VOICE_ENGINE
+
+  _veBase = VoEBase::GetInterface(_voiceEngine);
+  _veNetwork = VoENetwork::GetInterface(_voiceEngine);
+  _veCodec = VoECodec::GetInterface(_voiceEngine);
+  _veRTCP = VoERTP_RTCP::GetInterface(_voiceEngine);
+  TEST_MUSTPASS(_vieBase->SetVoiceEngine(_voiceEngine),-5);
+#endif
+
+  char str[64];
+  bool found = false;
+
+  int captureIdx = 0;
+  while (-1 !=_vieCapture->GetCaptureDevice(captureIdx,str,sizeof(str),NULL,0))
+  {
+    char* tmp = strstr(str,"(VFW)");
+    if (!tmp)
+    {
+      ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(str,-1));
+      found = true;
+    }
+    captureIdx++;
+    memset(str, 0, 64);
+  }
+  WIN32_FIND_DATA FindFileData;
+  HANDLE hFind;
+  //char fileSearch[256];
+  //strcpy(fileSearch,_T("*.avi"));
+  hFind = FindFirstFile(_T("*.avi"), &FindFileData);
+  if (hFind != INVALID_HANDLE_VALUE)
+  {
+    ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)(FindFileData.cFileName));
+    while(FindNextFile(hFind,&FindFileData))
+    {
+      ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)(FindFileData.cFileName));
+    }
+    FindClose(hFind);
+  }
+
+  ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("Conference"));
+  ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)_T("None"));
+
+  if (!found)
+  {
+    strncpy(str,"N/A",64);
+    ::SendMessage(m_ctrlDevice.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(str,-1));
+  }
+  m_ctrlDevice.SetCurSel(0);
+
+  //Codecs
+  int numOfCodecs = _vieCodec->NumberOfCodecs();
+  for(int i=0; i<numOfCodecs;++i)
+  {
+    VideoCodec codec;
+    if(-1 !=_vieCodec->GetCodec(i,codec))
+    {
+      ::SendMessage(m_ctrlCodec.m_hWnd, CB_ADDSTRING, 0,(LPARAM)CharToTchar(codec.plName,-1));
+    }
+  }
+  m_ctrlCodec.SetCurSel(0);
+
+#ifndef NO_VOICE_ENGINE
+  CodecInst voiceCodec;
+  int numOfVeCodecs = _veCodec->NumOfCodecs();
+  for(int i=0; i<numOfVeCodecs;++i)
+  {
+    if(_veCodec->GetCodec(i,voiceCodec)!=-1)
+    {
+      if(strncmp(voiceCodec.plname,"ISAC",4)==0)
+        break;
+    }
+  }
+
+  _audioChannel = _veBase->CreateChannel();
+
+  TEST_MUSTPASS(_veRTCP->SetRTCPStatus(_audioChannel, true),-5);
+  TEST_MUSTPASS(_veCodec->SetSendCodec(_audioChannel, voiceCodec),-5);
+  TEST_MUSTPASS(_veBase->StartPlayout(_audioChannel),-5);
+#endif  //NO_VOICE_ENGINE
+
+  if(_parentChannel==-1)
+  {
+    TEST_MUSTPASS(_vieBase->CreateChannel(_channelId),-5);
+  }
+  else // This is a slave channel
+  {
+    TEST_MUSTPASS(_vieBase->CreateChannel(_channelId,_parentChannel),-5);
+  }
+#ifndef NO_VOICE_ENGINE
+  TEST_MUSTPASS(_vieBase->ConnectAudioChannel(_channelId,_audioChannel),-5);
+#endif
+
+  _channelPool.AddChannel(_channelId);
+
+  //Set Receive codec
+  {
+    VideoCodec codec;
+    int numOfCodecs = _vieCodec->NumberOfCodecs();;
+    for(int i=0; i<numOfCodecs;++i)
+    {
+      if(-1 !=_vieCodec->GetCodec(i,codec))
+      {
+        if(codec.codecType == webrtc::kVideoCodecVP8)
+        {
+          codec.codecSpecific.VP8.feedbackModeOn = true;
+          codec.codecSpecific.VP8.pictureLossIndicationOn = true;
+        }
+        TEST_MUSTPASS(_vieCodec->SetReceiveCodec(_channelId,codec),-5);
+      }
+    }
+  }
+
+  //TMMBR
+  m_cbTmmbr.SetCheck(BST_CHECKED);
+  OnBnClickedTmmbr();
+
+  //Packet Burst
+  m_ctrlPacketBurst.SetCurSel(0);
+
+
+  //Protection method none
+  CButton *opProtection = (CButton *) GetDlgItem(IDC_PROT_NONE);
+  opProtection->SetCheck(BST_CHECKED);
+  OnBnClickedProtNone();
+
+
+  // Configure the renderer
+  ConfigureRender();
+
+  TEST_MUSTPASS(_vieCodec->RegisterEncoderObserver(_channelId,*this),kViECodecObserverAlreadyRegistered);
+  TEST_MUSTPASS(_vieCodec->RegisterDecoderObserver(_channelId,*this),-5);
+
+  TEST_MUSTPASS(_vieBase->RegisterObserver(*this),kViEBaseObserverAlreadyRegistered);
+
+
+
+
+  //Set captions based on channel id
+  m_remoteIp1.SetAddress(127,0,0,1);
+  CString port;
+  port.AppendFormat(_T("%d"),11111+_channelId*4);
+  m_remotePort1.SetWindowText(port);
+  m_localPort1.SetWindowText(port);
+
+  CString title;
+  this->GetWindowText(title);
+  if(_parentChannel==-1)
+  {
+    title.AppendFormat(_T("%s - channel %d"),title,_channelId);
+  }
+  else
+  {
+    title.AppendFormat(_T("%s - slave channel %d - parent %d"),title,_channelId,_parentChannel);
+  }
+  this->SetWindowText(title);
+
+  if(_parentChannel!=-1)
+    m_ctrlDevice.EnableWindow(FALSE); //Prevent from changing capture device
+
+  return TRUE;  // return TRUE  unless you set the focus to a control
+}
+
+
+
+void CDXChannelDlg::OnTimer(UINT nIDEvent)
+{
+  CDialog::OnTimer(nIDEvent);
+}
+
+void CDXChannelDlg::SetSendCodec()
+{
+    // Get the codec stucture
+    int codecSel= m_ctrlCodec.GetCurSel();
+    VideoCodec codec;
+    TEST_MUSTPASS(_vieCodec->GetCodec(codecSel,codec),-5);
+
+
+    // Set Codec Size
+    VideoSize sizeSel=VideoSize(m_ctrlCodecSize.GetCurSel());
+    int width, height;
+    GetWidthHeight(sizeSel, width, height);
+    codec.width=width;
+    codec.height=height;
+
+    //Set the codec bitrate
+    CString bitrateStr;
+  m_ctrlBitrate.GetLBText(m_ctrlBitrate.GetCurSel(), bitrateStr);
+    int bitrate = _ttoi(bitrateStr.GetBuffer(0));
+    if(codec.codecType!=kVideoCodecI420)
+    {
+        codec.startBitrate=bitrate;
+        codec.maxBitrate=bitrate*4;
+    }
+
+
+    //Set the codec frame rate
+    codec.maxFramerate = m_ctrlMinFrameRate.GetCurSel() +5;
+
+    if(strncmp(codec.plName, "VP8", 5) == 0)
+    {
+    codec.codecSpecific.VP8.feedbackModeOn = true;
+    codec.codecSpecific.VP8.pictureLossIndicationOn = true;
+        TEST_MUSTPASS(_vieRTPRTCP->SetKeyFrameRequestMethod(_channelId, kViEKeyFrameRequestPliRtcp),-5);
+    }else
+    {
+        TEST_MUSTPASS(_vieRTPRTCP->SetKeyFrameRequestMethod(_channelId, kViEKeyFrameRequestPliRtcp),-5);
+    }
+    TEST_MUSTPASS(_vieCodec->SetSendCodec(_channelId, codec),-5);
+
+    if (codec.codecType == webrtc::kVideoCodecI420)
+    {        // Need to set the receive codec size
+        _vieCodec->SetReceiveCodec(_channelId, codec);
+    }
+}
+
+void CDXChannelDlg::SetSendDestination()
+{
+    if(_externalTransport)
+        return;
+
+    BYTE part1, part2, part3, part4;
+  char sendIP1[16];
+  m_remoteIp1.GetAddress(part1, part2, part3, part4);
+  sprintf(sendIP1,"%d.%d.%d.%d",part1,part2,part3,part4);
+
+    CString strPort;
+    m_remotePort1.GetWindowText(strPort);
+  int remotePort1 = _ttoi(strPort.GetString());
+
+  TEST_MUSTPASS(_vieNetwork->SetSendDestination(_channelId,sendIP1,remotePort1),kViENetworkAlreadySending);
+
+#ifndef NO_VOICE_ENGINE
+  m_localPort1.GetWindowText(strPort);
+  int localPort1 = _ttoi(strPort.GetString());
+  _veBase->SetLocalReceiver(_audioChannel,localPort1+2);
+  TEST_MUSTPASS(_veBase->SetSendDestination(_audioChannel, remotePort1+2, sendIP1),-5)
+#endif
+}
+
+void CDXChannelDlg::SetLocalReceiver()
+{
+    if(_externalTransport)
+        return;
+
+    CString strPort;
+  m_localPort1.GetWindowText(strPort);
+  int localPort1 = _ttoi(strPort.GetString());
+
+
+
+    // May fail because we are sending
+    TEST_MUSTPASS(_vieNetwork->SetLocalReceiver(_channelId, localPort1),-5);
+
+#ifndef NO_VOICE_ENGINE
+    _veBase->SetLocalReceiver(_audioChannel,localPort1+2);
+#endif
+}
+
+void CDXChannelDlg::SetCaptureDevice()
+{
+    if(_parentChannel!=-1) // don't accept changing input on slave channels.
+        return;
+
+    int camSel=-1;
+    camSel=m_ctrlDevice.GetCurSel();
+
+  CString captureStr;
+    //captureStr.Compare
+  m_ctrlDevice.GetLBText(camSel, captureStr);
+  if(captureStr!=_T("N/A") != 0)
+  {
+
+        TEST_MUSTPASS(_vieFile->StopPlayFile(_captureId),kViEFileNotPlaying);
+        TEST_MUSTPASS(_vieCapture->DisconnectCaptureDevice(_channelId),kViECaptureDeviceNotConnected);
+        TEST_MUSTPASS(_vieRender->RemoveRenderer(_captureId),kViERenderInvalidRenderId);
+
+        if(_captureId>=0x1001 && _captureId<0x10FF)// ID is a capture device
+        {
+            TEST_MUSTPASS(_captureDevicePool.ReturnCaptureDevice(_captureId),-5);
+        }
+
+        if(captureStr!=_T("None")==0)
+        {
+            _captureId=-1;
+        }
+        else if(_tcsstr(captureStr,_T(".avi"))!=NULL ) // Selected an AVI file
+        {
+            TEST_MUSTPASS(_vieFile->StartPlayFile(TcharToChar(captureStr.GetBuffer(),-1),_captureId,false,webrtc::kFileFormatAviFile),-5);
+            TEST_MUSTPASS(_vieRender->AddRenderer(_captureId,m_ctrlLiveVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+            TEST_MUSTPASS(_vieRender->StartRender(_captureId),-5);
+            TEST_MUSTPASS(_vieFile->SendFileOnChannel(_captureId,_channelId),-5);
+            TEST_MUSTPASS(_vieFile->StartPlayFileAsMicrophone(_captureId,_channelId,true),-5);
+            //TEST_MUSTPASS(_vieFile->StartPlayAudioLocally(_captureId,_channelId),-5);
+        }
+        else
+        {
+
+            char captureName[256];
+            char uniqueCaptureName[256];
+
+            TEST_MUSTPASS(_vieCapture->GetCaptureDevice(camSel,captureName,256,uniqueCaptureName,256),-5);
+
+            TEST_MUSTPASS(_captureDevicePool.GetCaptureDevice(_captureId,uniqueCaptureName),-5);
+            TEST_MUSTPASS(_vieCapture->StartCapture(_captureId),kViECaptureDeviceAlreadyStarted);
+            TEST_MUSTPASS(_vieCapture->RegisterObserver(_captureId,*this),kViECaptureObserverAlreadyRegistered);
+
+            TEST_MUSTPASS(_vieRender->AddRenderer(_captureId,m_ctrlLiveVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+            TEST_MUSTPASS(_vieCapture->ConnectCaptureDevice(_captureId,_channelId),-5);
+            TEST_MUSTPASS(_vieRender->StartRender(_captureId),-5);            
+        }
+    }
+
+}
+
+
+
+void CDXChannelDlg::OnBnClickedStartlisten()
+{
+
+
+    // Configure the local ports
+    SetLocalReceiver();
+
+    //Configure the remote destination- needed in order to be able to respond to RTCP messages
+    SetSendDestination();
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StartReceive(_audioChannel),-5);
+    #endif
+    TEST_MUSTPASS(_vieBase->StartReceive(_channelId),-5);
+
+
+}
+
+void CDXChannelDlg::OnStartSend()
+{
+
+    // Set the send destination
+    SetSendDestination();
+
+    // Configure the local ports (Needed to be able to receive RTCP
+    //SetLocalReceiver();
+
+
+    // Set the send codec
+    SetSendCodec();
+
+    if(_captureId==-1) // If no capture device has been set.
+      SetCaptureDevice(); //Set the capture device
+
+
+
+    //TEST_MUSTPASS(_vieRTPRTCP->SetStartSequenceNumber(_channelId,1),-5);
+
+    // Start sending
+    TEST_MUSTPASS(_vieBase->StartSend(_channelId),-5);
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StartSend(_audioChannel),-5);
+    #endif
+
+
+}
+
+void CDXChannelDlg::ConfigureRender()
+{
+    TEST_MUSTPASS(_vieRender->AddRenderer(_channelId,m_ctrlLiveRemoteVideo.m_hWnd, 0, 0.0f, 0.0f,1.0f,1.0f),-5);
+
+    TEST_MUSTPASS(_vieFile->SetRenderStartImage(_channelId,
+                           "./main/test/WindowsTest/renderStartImage.jpg"),-5);
+    TEST_MUSTPASS(_vieRender->StartRender(_channelId),-5);
+    TEST_MUSTPASS(_vieFile->SetRenderTimeoutImage(_channelId,
+                         "./main/test/WindowsTest/renderTimeoutImage.jpg"),-5);
+
+
+}
+
+
+void CDXChannelDlg::OnStopSend()
+{
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StopSend(_audioChannel),-5);
+    #endif
+
+
+    TEST_MUSTPASS(_vieBase->StopSend(_channelId),kViEBaseNotSending);   // Accept error Not sending
+
+
+}
+void CDXChannelDlg::OnBnClickedStoplisten()
+{
+
+
+    #ifndef NO_VOICE_ENGINE
+        TEST_MUSTPASS(_veBase->StopReceive(_audioChannel),-5);
+    #endif
+    TEST_MUSTPASS(_vieBase->StopReceive(_channelId),-5);
+}
+
+
+void CDXChannelDlg::OnDestroy()
+{
+
+    OnStopSend();
+    OnBnClickedStoplisten();
+
+    if(_vieCapture && _parentChannel==-1)
+    {
+        _vieCapture->DisconnectCaptureDevice(_channelId);
+        _captureDevicePool.ReturnCaptureDevice(_captureId);
+    }
+    if(_vieFile && _parentChannel!=-1)
+    {
+        TEST_MUSTPASS(_vieFile->StopPlayFile(_captureId),kViEFileNotPlaying);
+    }
+
+
+
+
+    if(_videoEngine)
+  {
+        if(_parentChannel==-1)
+        {
+            _vieCodec->DeregisterEncoderObserver(_channelId);
+        }
+        _vieBase->DeleteChannel(_channelId);
+        _channelPool.RemoveChannel(_channelId);
+  }
+
+  _videoEngine = NULL;
+#ifndef NO_VOICE_ENGINE
+
+    if (_voiceEngine)
+  {
+        _veBase->DeleteChannel(_audioChannel);
+        _veBase->Release();
+        _veNetwork->Release();
+        _veCodec->Release();
+        _veRTCP->Release();
+  }
+#endif
+
+
+    strcpy(_logMsg,"");
+    SetEvent(_callbackEvent);
+    MSG msg; // Wait until the callback thread exits. Need to handle messages since the callback thread can call SendMessage when updating UI
+    while(WaitForSingleObject(_callbackThread,10)==WAIT_TIMEOUT)
+    {
+        DWORD ret = PeekMessage( &msg, NULL, 0, 0,PM_REMOVE );
+        if (ret >0)
+        {
+            TranslateMessage(&msg);
+            DispatchMessage(&msg);
+        }
+    }
+
+    CloseHandle(_callbackThread);
+    CloseHandle(_callbackEvent);
+    DeleteCriticalSection(&_critCallback);
+
+    TEST_MUSTPASS(_vieCapture->Release()<0,-5);
+    TEST_MUSTPASS(_vieRTPRTCP->Release()<0,-5);
+    TEST_MUSTPASS(_vieRender->Release()<0,-5);
+    TEST_MUSTPASS(_vieCodec->Release()<0,-5);
+    TEST_MUSTPASS(_vieNetwork->Release()<0,-5);
+    TEST_MUSTPASS(_vieFile->Release()<0,-5);
+    TEST_MUSTPASS(_vieBase->Release()<0,-5);
+
+
+
+#ifdef TEST_EXTERNAL_TRANSPORT
+  if(_transport)
+    delete _transport;
+  _transport = NULL;
+#endif
+
+    delete _externalTransport;
+
+  CDialog::OnDestroy();
+    if(_dialogObserver)
+    {
+        _dialogObserver->ChannelDialogEnded(this);
+    }
+}
+
+void CDXChannelDlg::OnCancel()
+{
+    DestroyWindow();
+}
+// If you add a minimize button to your dialog, you will need the code below
+//  to draw the icon.  For MFC applications using the document/view model,
+//  this is automatically done for you by the framework.
+
+void CDXChannelDlg::OnPaint()
+{
+    if (IsIconic())
+  {
+    CPaintDC dc(this); // device context for painting
+
+    SendMessage(WM_ICONERASEBKGND, (WPARAM) dc.GetSafeHdc(), 0);
+
+    // Center icon in client rectangle
+    int cxIcon = GetSystemMetrics(SM_CXICON);
+    int cyIcon = GetSystemMetrics(SM_CYICON);
+    CRect rect;
+    GetClientRect(&rect);
+    int x = (rect.Width() - cxIcon + 1) / 2;
+    int y = (rect.Height() - cyIcon + 1) / 2;
+
+    // Draw the icon
+    dc.DrawIcon(x, y, m_hIcon);
+  }
+  else
+  {
+    CDialog::OnPaint();
+  }
+}
+
+BOOL CDXChannelDlg::OnDeviceChange( UINT nID, DWORD lParam)
+{
+  if(nID ==  DBT_DEVNODES_CHANGED)
+  {
+    //  SetCaptureDevice();
+  }
+  return CDialog::OnDeviceChange(nID, lParam);
+}
+
+
+void CDXChannelDlg::OnSysCommand(UINT nID, LPARAM lParam)
+{
+  if(SC_MAXIMIZE == nID)
+  {}
+  CDialog::OnSysCommand(nID, lParam);
+}
+
+
+static bool fullScreen = false;
+void CDXChannelDlg::OnRButtonUp( UINT nFlags, CPoint point)
+{
+  CDialog::OnRButtonUp( nFlags,  point);
+}
+
+// The system calls this to obtain the cursor to display while the user drags
+//  the minimized window.
+HCURSOR CDXChannelDlg::OnQueryDragIcon()
+{
+  return (HCURSOR) m_hIcon;
+}
+
+void CDXChannelDlg::OnCbnSelchangeCodecList()
+{
+    SetSendCodec();
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeSize()
+{
+    SetSendCodec();
+}
+
+void CDXChannelDlg::OnCbnSelchangeDevice()
+{
+
+
+    SetCaptureDevice();
+
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeBitrate()
+{
+
+    SetSendCodec();
+
+}
+
+void CDXChannelDlg::OnCbnSelchangeMinFrameRate()
+{
+
+    SetSendCodec();
+
+}
+
+
+void CDXChannelDlg::OnBnClickedTmmbr()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetTMMBRStatus(_channelId,m_cbTmmbr.GetCheck()==BST_CHECKED),-5);
+
+}
+
+void CDXChannelDlg::OnCbnSelchangeRtcpmode()
+{
+
+ /*
+ kRtcpNone     = 0,
+ kRtcpCompound_RFC4585     = 1,
+ kRtcpNonCompound_RFC5506 = 2 */
+    ViERTCPMode mode=ViERTCPMode(m_ctrlRtcpMode.GetCurSel());
+    TEST_MUSTPASS(_vieRTPRTCP->SetRTCPStatus(_channelId,mode),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedFreezelog()
+{
+    _canAddLog=m_cbFreezeLog.GetCheck()!=BST_CHECKED;
+}
+
+void CDXChannelDlg::OnBnClickedProtNack()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetNACKStatus(_channelId,true),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedProtNone()
+{
+
+    TEST_MUSTPASS(_vieRTPRTCP->SetNACKStatus(_channelId,false),-5);
+    TEST_MUSTPASS(_vieRTPRTCP->SetFECStatus(_channelId,false,0,0),-5);
+    TEST_MUSTPASS(_vieRTPRTCP->SetHybridNACKFECStatus(_channelId,false,0,0),-5);
+}
+
+void CDXChannelDlg::OnBnClickedProtFec()
+{
+    int noCodec=_vieCodec->NumberOfCodecs();
+    int redPayloadType=0;
+    int fecPayloadType=0;
+    for(unsigned char i=0;i<noCodec;++i)
+    {
+        VideoCodec codec;
+        _vieCodec->GetCodec(i,codec);
+        if(codec.codecType==webrtc::kVideoCodecRED)
+        {
+            redPayloadType=codec.plType;
+        }
+        if(codec.codecType==webrtc::kVideoCodecULPFEC)
+        {
+            fecPayloadType=codec.plType;
+        }
+    }
+    TEST_MUSTPASS(_vieRTPRTCP->SetFECStatus(_channelId,true,redPayloadType,fecPayloadType),-5);
+}
+
+void CDXChannelDlg::OnBnClickedProtNackFec()
+{
+    int noCodec=_vieCodec->NumberOfCodecs();
+    int redPayloadType=0;
+    int fecPayloadType=0;
+    for(unsigned char i=0;i<noCodec;++i)
+    {
+        VideoCodec codec;
+        _vieCodec->GetCodec(i,codec);
+        if(codec.codecType==webrtc::kVideoCodecRED)
+        {
+            redPayloadType=codec.plType;
+        }
+        if(codec.codecType==webrtc::kVideoCodecULPFEC)
+        {
+            fecPayloadType=codec.plType;
+        }
+    }
+    TEST_MUSTPASS(_vieRTPRTCP->SetHybridNACKFECStatus(_channelId,true,
+                                                      redPayloadType,
+                                                      fecPayloadType),-5);
+
+}
+
+void CDXChannelDlg::OnBnClickedVersion()
+{
+    char version[1024];
+    _vieBase->GetVersion(version);
+    MessageBox(CharToTchar(version,-1));
+#ifndef NO_VOICE_ENGINE
+    _veBase->GetVersion(version);
+    MessageBox(CharToTchar(version,-1));
+#endif
+}
+
+unsigned int WINAPI CDXChannelDlg::CallbackThread(LPVOID lpParameter)
+{
+    static_cast<CDXChannelDlg*>(lpParameter)->CallbackThreadProcess();
+    return 0;
+}
+
+void CDXChannelDlg::CallbackThreadProcess()
+{
+    while(1)
+    {
+        if(WAIT_OBJECT_0==WaitForSingleObject(_callbackEvent,INFINITE))
+        {
+            char smsg[512];
+            EnterCriticalSection(&_critCallback);
+            strncpy(smsg,_logMsg,strlen(_logMsg)+1);
+            strcpy(_logMsg,"");
+
+
+            LeaveCriticalSection(&_critCallback);
+            if(strstr(smsg,"Send")!=NULL)
+            {
+                unsigned short fractionLost=0;
+                unsigned int cumulativeLost=0;
+                unsigned int extendedMax=0;
+                unsigned int jitter=0;
+                int rttMs=0;
+
+
+
+                _vieRTPRTCP->GetReceivedRTCPStatistics(_channelId,
+                                                  fractionLost,
+                                                  cumulativeLost,
+                                                  extendedMax,
+                                                  jitter,
+                                                  rttMs);
+
+                //int bw=0;
+                //if(_vieCodec->GetAvailableBandwidth(_channelId,bw)==0)
+                //{
+                //    sprintf(smsg,"%s, rtt %d, loss %d,bw %d", smsg,rttMs,fractionLost,bw);
+                //}
+                //else
+                //{
+                //    _vieBase->LastError(); // Reset last error.
+                //}
+
+
+
+            }
+            if(strlen(smsg))
+            {
+                m_ctrlInfo.InsertString(0,(LPCTSTR) CharToTchar(smsg,-1));
+                while(m_ctrlInfo.GetCount()==151)
+                    m_ctrlInfo.DeleteString(150);
+            }
+            else
+            {
+                break; // End the callback thread
+            }
+        }
+    }
+
+}
+void CDXChannelDlg::AddToInfo(const char* msg)
+{
+    if(!_canAddLog)
+        return;
+    EnterCriticalSection(&_critCallback);
+
+    SYSTEMTIME systemTime;
+    GetSystemTime(&systemTime);
+
+    if(strlen(_logMsg)==0)
+    {
+        SetEvent(_callbackEvent); // Notify of new
+    }
+
+    sprintf (_logMsg, "(%2u:%2u:%2u:%3u) %s", systemTime.wHour,
+                                                           systemTime.wMinute,
+                                                           systemTime.wSecond,
+                                                           systemTime.wMilliseconds,
+                                                           msg
+                                                           );
+
+
+
+    LeaveCriticalSection(&_critCallback);
+
+
+}
+
+void CDXChannelDlg::IncomingRate(const int videoChannel,
+                              unsigned int framerate,
+                              unsigned int bitrate)
+{
+  char str[64];
+  sprintf(str,"Incoming Fr:%d br %d\n", framerate, bitrate);
+    AddToInfo(str);
+}
+
+void CDXChannelDlg::RequestNewKeyFrame(int channel)
+{
+    assert(false && "(RequestNewKeyFrame why is it called");
+}
+void CDXChannelDlg::PerformanceAlarm(unsigned int cpuLoad)
+{
+    char str[64];
+    sprintf(str,"Performance alarm %d",cpuLoad);    
+    AddToInfo(str);
+}
+void CDXChannelDlg::OutgoingRate(const int videoChannel,
+                              unsigned int framerate,
+                              unsigned int bitrate)
+  {
+    char str[64];
+        sprintf(str,"Send Fr:%d br %d", framerate, bitrate);
+        AddToInfo(str);
+  }
+void CDXChannelDlg::IncomingCodecChanged(const int  videoChannel,
+                                      const VideoCodec& videoCodec)
+  {
+    char str[128];
+        sprintf(str,"Incoming codec channel:%d pltype:%d width:%d height:%d\n", videoChannel, videoCodec.plType, videoCodec.width,videoCodec.height);        
+        AddToInfo(str);
+  }
+void CDXChannelDlg::BrightnessAlarm(const int captureId,
+                                 const Brightness brightness)
+{
+
+    switch(brightness)
+    {
+    case Normal:        
+        AddToInfo("BrightnessAlarm - image ok.\n");
+        break;
+    case Bright:        
+        AddToInfo("BrightnessAlarm - light image.\n");
+        break;
+    case Dark:        
+        AddToInfo("BrightnessAlarm - dark image.\n");
+        break;
+    }
+}
+
+void CDXChannelDlg::CapturedFrameRate(const int captureId,
+                                   const unsigned char frameRate)
+{
+   char str[64];
+   sprintf(str,"Local Camera Frame rate:%d \n", frameRate);
+   AddToInfo(str);
+}
+
+void CDXChannelDlg::NoPictureAlarm(const int captureId,
+                                const CaptureAlarm alarm)
+{
+   char str[64];
+   sprintf(str,"No Picture alarm\n");   
+   AddToInfo(str);
+
+}
+
+
+void CDXChannelDlg::OnBnClickedExttransport()
+{
+    if(m_cbExternalTransport.GetCheck()==BST_CHECKED)
+    {
+        m_localPort1.EnableWindow(FALSE);
+        m_remotePort1.EnableWindow(FALSE);
+        m_remoteIp1.EnableWindow(FALSE);
+        m_ctrlPacketLoss.EnableWindow(TRUE);
+        m_ctrlDelay.EnableWindow(TRUE);
+        _externalTransport= new TbExternalTransport(*_vieNetwork);
+        _vieNetwork->RegisterSendTransport(_channelId,*_externalTransport);
+    }
+    else
+    {
+        _vieNetwork->DeregisterSendTransport(_channelId);
+
+        delete _externalTransport;
+        _externalTransport=NULL;
+        m_localPort1.EnableWindow(TRUE);
+        m_remotePort1.EnableWindow(TRUE);
+        m_remoteIp1.EnableWindow(TRUE);
+        m_ctrlPacketLoss.EnableWindow(FALSE);
+        m_ctrlDelay.EnableWindow(FALSE);
+    }
+}
+
+
+void CDXChannelDlg::OnCbnSelchangePacketloss()
+{
+    if(_externalTransport)
+    {
+        _externalTransport->SetPacketLoss(m_ctrlPacketLoss.GetCurSel()*2);
+    }
+}
+
+
+void CDXChannelDlg::OnCbnSelchangeDelay()
+{
+    if(_externalTransport)
+    {
+        _externalTransport->SetNetworkDelay(m_ctrlDelay.GetCurSel()*30);
+    }
+
+}
+
+void CDXChannelDlg::OnBnClickedBtnRecordIncoming()
+{
+
+    CButton *recordBtn = (CButton *) GetDlgItem(IDC_BTN_RECORD_INCOMING);
+    
+    CString text;
+    recordBtn->GetWindowText(text);
+    if(text!=_T("Stop Rec Inc")!=0)
+    {
+        recordBtn->SetWindowText(_T("Stop Rec Inc"));
+        SYSTEMTIME time;
+        GetSystemTime(&time);
+        sprintf(_fileName,"IncomingChannel%d_%4d%2d%2d%2d%2d.avi",_channelId,time.wYear,time.wMonth,time.wDay,time.wHour,time.wMinute);
+
+        AudioSource audioSource=PLAYOUT;
+        webrtc::CodecInst audioCodec;
+        strcpy(audioCodec.plname,"L16");
+        audioCodec.rate     = 256000;
+        audioCodec.plfreq   = 16000;
+        audioCodec.pacsize  = 160;
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec,0,sizeof(videoCodec));
+
+        strcpy(videoCodec.plName,"VP8");
+        videoCodec.maxBitrate=1000;
+        videoCodec.startBitrate=1000;
+        videoCodec.width=352;
+        videoCodec.height=288;
+        videoCodec.codecType=webrtc::kVideoCodecVP8;
+        videoCodec.maxFramerate=30;        
+        TEST_MUSTPASS(_vieFile->StartRecordIncomingVideo(_channelId,_fileName,audioSource,audioCodec, videoCodec),-5);
+    }
+    else
+    {
+        recordBtn->SetWindowText(_T("Record Incoming"));
+        TEST_MUSTPASS(_vieFile->StopRecordIncomingVideo(_channelId),-5);
+        CString msg;
+        msg.AppendFormat(_T("Recorded file %s"),_fileName);
+        MessageBox(msg);
+    }
+}
+
+void CDXChannelDlg::OnBnClickedBtnRecordOutgoing()
+{
+
+    CButton *recordBtn = (CButton *) GetDlgItem(IDC_BTN_RECORD_OUTGOING);
+    CString text;
+    recordBtn->GetWindowText(text);
+    if(text!=_T("Stop Rec Out"))
+    {
+        recordBtn->SetWindowText(_T("Stop Rec Out"));
+        SYSTEMTIME time;
+        GetSystemTime(&time);
+        sprintf(_fileName,"OutgoingChannel%d_%4d%2d%2d%2d%2d.avi",_channelId,time.wYear,time.wMonth,time.wDay,time.wHour,time.wMinute);
+
+        AudioSource audioSource=MICROPHONE;
+        webrtc::CodecInst audioCodec;
+        strcpy(audioCodec.plname,"L16");
+        audioCodec.rate     = 256000;
+        audioCodec.plfreq   = 16000;
+        audioCodec.pacsize  = 160;
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec,0,sizeof(videoCodec));
+
+        strcpy(videoCodec.plName,"VP8");
+        videoCodec.maxBitrate=1000;
+        videoCodec.startBitrate=1000;
+        videoCodec.width=352;
+        videoCodec.height=288;
+        videoCodec.codecType=webrtc::kVideoCodecVP8;
+        videoCodec.maxFramerate=30;        
+        TEST_MUSTPASS(_vieFile->StartRecordOutgoingVideo(_channelId,_fileName,audioSource,audioCodec,videoCodec),-5);
+    }
+    else
+    {
+        recordBtn->SetWindowText(_T("Record Outgoing"));
+        TEST_MUSTPASS(_vieFile->StopRecordOutgoingVideo(_channelId),-5);
+        CString msg;
+        msg.AppendFormat(_T("Recorded file %s"),_fileName);
+        MessageBox(msg);
+    }
+}
+
+void CDXChannelDlg::OnBnClickedBtnCreateSlave()
+{
+    CDXChannelDlg* newSlave =new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,_dialogObserver,_channelId);
+    newSlave->Create(CDXChannelDlg::IDD,NULL);
+}
diff --git a/src/video_engine/main/test/WindowsTest/ChannelDlg.h b/src/video_engine/main/test/WindowsTest/ChannelDlg.h
new file mode 100644
index 0000000..43aeb09
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/ChannelDlg.h
@@ -0,0 +1,273 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
+
+#include "StdAfx.h"
+//#define NO_VOICE_ENGINE
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXChannelDlg dialog
+// Include ViE headers
+
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_errors.h"
+#include "vie_file.h"
+#include "tbExternalTransport.h"
+
+#include "resource.h"		// main symbols
+
+
+#ifndef NO_VOICE_ENGINE
+
+#include "voe_base.h"
+#include "voe_errors.h"
+#include "voe_base.h"
+#include "voe_network.h"
+#include "voe_codec.h"
+#include "voe_rtp_rtcp.h"
+#endif
+
+using namespace webrtc;
+class CDXChannelDlg;
+class CaptureDevicePool;
+class ChannelPool;
+
+#define TEST_MUSTPASS(expr,oklasterror)                                         \
+    {                                                               \
+        if ((expr))                                                 \
+        {                                                           \
+            CString r_msg;                                        \
+            int r_lastError=_vieBase->LastError();    \
+            CString exp;    \
+            exp=#expr;\
+            r_msg.Format(_T("\nError at line:%i, %s \nError code: %i\n"),__LINE__, exp,r_lastError);      \
+            if(r_lastError!=oklasterror) \
+            ::MessageBox (NULL, (LPCTSTR)r_msg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);                                   \
+        }                                                           \
+    }
+
+class CDXChannelDlgObserver
+{
+public:
+    virtual void ChannelDialogEnded(CDXChannelDlg* context)=0;
+
+protected:
+    virtual ~CDXChannelDlgObserver(){};
+
+};
+
+class CDXChannelDlg : public CDialog , public ViEEncoderObserver, public ViEDecoderObserver, public ViEBaseObserver, public ViECaptureObserver
+{
+// Construction
+public:
+	CDXChannelDlg(VideoEngine* videoEngine,
+        CaptureDevicePool& captureDevicePool,
+        ChannelPool& channelPool,
+        void* voiceEngine=NULL
+    ,CWnd* pParent = NULL,CDXChannelDlgObserver* observer=NULL,int parentChannel=-1);	// standard constructor
+
+// Dialog Data
+	//{{AFX_DATA(CDXChannelDlg)
+	enum { IDD = IDD_DXQUALITY_DIALOG };
+	CComboBox	m_ctrlDevice;
+	CComboBox	m_ctrlCodec;	
+	CComboBox	m_ctrlBitrate;
+	CComboBox	m_ctrlCodecSize;
+    CComboBox	m_ctrlRtcpMode;    
+    CComboBox	m_ctrlPacketBurst;    
+	CComboBox	m_ctrlMinFrameRate;	
+    
+    CListBox 	m_ctrlInfo;
+	
+	CStatic		m_ctrlLiveRemoteVideo;
+	CStatic		m_ctrlLiveVideo;
+	CEdit		m_localPort1;
+	CEdit		m_remotePort1;	
+	CIPAddressCtrl	m_remoteIp1;
+    CButton     m_cbTmmbr;
+    CButton     m_cbExternalTransport;
+    CButton     m_cbFreezeLog;
+    CButton     m_cbDefaultSendChannel;
+    CComboBox   m_ctrlPacketLoss;
+    CComboBox   m_ctrlDelay;
+    
+	
+	//}}AFX_DATA
+
+	// ClassWizard generated virtual function overrides
+	//{{AFX_VIRTUAL(CDXChannelDlg)
+	protected:
+	virtual void DoDataExchange(CDataExchange* pDX);	// DDX/DDV support
+	//}}AFX_VIRTUAL
+
+
+
+public : 
+    // Callback
+
+    //Capture observer
+    virtual void BrightnessAlarm(const int captureId,
+                                 const Brightness brightness);
+
+    virtual void CapturedFrameRate(const int captureId,
+                                   const unsigned char frameRate);
+
+    virtual void NoPictureAlarm(const int captureId,
+                                const CaptureAlarm alarm);
+
+
+    // same callback method is being used to raise also to clear.
+    // true - raise, false - clear
+    // virtual void NoPictureAlarm(bool active = true);
+
+    // Encoder observer
+    virtual void OutgoingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate) ;
+
+    //Decoder observer
+    virtual void IncomingCodecChanged(const int  videoChannel,
+                                      const VideoCodec& videoCodec);
+
+    virtual void IncomingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate);
+    
+    virtual void RequestNewKeyFrame(const int videoChannel);
+
+    // Base observer
+    virtual void PerformanceAlarm(const unsigned int cpuLoad);
+
+    
+    //virtual void IncomingCSRCChanged(int channel, unsigned int csrc, bool added);    
+    
+
+
+// Implementation
+protected:
+	HICON m_hIcon;
+    int _channelId;
+    int _parentChannel;
+    int _audioChannel;
+	bool _canAddLog;
+
+    // Thread and function for callbacks
+    CRITICAL_SECTION _critCallback;
+    HANDLE _callbackThread;
+    HANDLE _callbackEvent;
+    char _logMsg[512];
+    static  unsigned int WINAPI CallbackThread(LPVOID lpParameter);    
+    void CallbackThreadProcess();
+
+
+
+	//void GetSize(int sizeSel, int &width, int &height);
+	virtual void ConfigureRender();
+
+    virtual void SetCaptureDevice();
+    virtual void SetLocalReceiver();
+    virtual void SetSendDestination();
+    virtual void SetSendCodec();
+
+    
+    void AddToInfo(const char* msg);
+
+	//	afx_msg void Command(UINT nID, LPARAM lParam);
+
+	// Generated message map functions
+	//{{AFX_MSG(CDXChannelDlg)
+	virtual BOOL OnInitDialog();
+	afx_msg void OnSysCommand(UINT nID, LPARAM lParam);
+	afx_msg void OnRButtonUp( UINT nFlags, CPoint point);	
+	afx_msg BOOL OnDeviceChange( UINT, DWORD );
+	afx_msg void OnPaint();
+	//afx_msg LRESULT OnDisplayChange(WPARAM, LPARAM);
+	afx_msg HCURSOR OnQueryDragIcon();
+	virtual afx_msg void OnStartSend();	
+	virtual afx_msg void OnDestroy();
+	virtual afx_msg void OnStopSend();    
+    virtual afx_msg void OnCancel();
+	afx_msg void OnTimer(UINT nIDEvent);
+
+	//}}AFX_MSG
+	DECLARE_MESSAGE_MAP()
+
+private:
+	CDXChannelDlgObserver*  _dialogObserver;
+	
+	VideoEngine* _videoEngine;
+    ViEBase*     _vieBase;
+    ViECapture*  _vieCapture;
+    ViERTP_RTCP* _vieRTPRTCP;
+    ViERender*   _vieRender;
+    ViECodec*    _vieCodec;
+    ViENetwork*  _vieNetwork;
+    ViEFile*      _vieFile;
+    TbExternalTransport* _externalTransport;
+    char             _fileName[256];
+
+
+#ifndef NO_VOICE_ENGINE
+    VoiceEngine*		_voiceEngine;
+    VoEBase*             _veBase;
+    VoENetwork*          _veNetwork;
+    VoECodec*            _veCodec;
+    VoERTP_RTCP*         _veRTCP;
+#else
+    void*                   _voiceEngine;
+
+#endif
+
+    VideoCodec     _sendCodec;
+    int _captureId;
+    CaptureDevicePool& _captureDevicePool;
+    ChannelPool& _channelPool;
+
+
+	afx_msg void OnCbnSelchangeCodecList();
+	afx_msg void OnCbnSelchangeDevice();
+	afx_msg void OnCbnSelchangeSize();
+	afx_msg void OnCbnSelchangeBitrate();    
+	afx_msg void OnCbnSelchangeWindowSize();	
+	afx_msg void OnBnClickedversion();
+	afx_msg void OnCbnSelchangeMinFrameRate();	
+    afx_msg void OnBnClickedStartlisten();
+    afx_msg void OnBnClickedStoplisten();
+    afx_msg void OnBnClickedStopsend();
+    afx_msg void OnBnClickedTmmbr();
+    afx_msg void OnCbnSelchangeRtcpmode();
+    afx_msg void OnBnClickedProtNack();
+    afx_msg void OnBnClickedProtNone();
+    afx_msg void OnBnClickedProtFec();
+    afx_msg void OnBnClickedProtNackFec();  
+    afx_msg void OnBnClickedFreezelog();
+public:
+    afx_msg void OnBnClickedExttransport();    
+    afx_msg void OnCbnSelchangePacketloss();
+    afx_msg void OnCbnSelchangeDelay();
+    afx_msg void OnBnClickedBtnRecordIncoming();
+    afx_msg void OnBnClickedBtnRecordOutgoing();
+    afx_msg void OnBnClickedBtnCreateSlave();
+    afx_msg void OnBnClickedVersion();
+};
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_CHANNELDLG_H_
diff --git a/src/video_engine/main/test/WindowsTest/ChannelPool.cc b/src/video_engine/main/test/WindowsTest/ChannelPool.cc
new file mode 100644
index 0000000..dbd1644
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/ChannelPool.cc
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "ChannelPool.h"
+#include "map_wrapper.h"
+#include <string.h>
+#include <assert.h>
+#include "critical_section_wrapper.h"
+
+ChannelPool::ChannelPool():
+_critSect(*webrtc::CriticalSectionWrapper::CreateCriticalSection())
+{
+}
+
+ChannelPool::~ChannelPool(void)
+{
+    assert(_channelMap.Size()==0);    
+    delete &_critSect;
+}
+
+WebRtc_Word32 ChannelPool::AddChannel(int channel)
+{
+    return _channelMap.Insert(channel,(void*) channel);
+}
+WebRtc_Word32 ChannelPool::RemoveChannel(int channel)
+{
+    return _channelMap.Erase(channel);
+}
+
+webrtc::MapWrapper& ChannelPool::ChannelMap()
+{
+    return _channelMap;
+}
diff --git a/src/video_engine/main/test/WindowsTest/ChannelPool.h b/src/video_engine/main/test/WindowsTest/ChannelPool.h
new file mode 100644
index 0000000..374c676
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/ChannelPool.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+#include "StdAfx.h"
+#include "common_types.h"
+
+#include "vie_base.h"
+#include "map_wrapper.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+}
+
+class ChannelPool
+{
+public:
+    ChannelPool();
+    ~ChannelPool(void);
+    WebRtc_Word32 AddChannel(int channel);
+    WebRtc_Word32 RemoveChannel(int channel);    
+
+    webrtc::MapWrapper& ChannelMap();
+
+    private:     
+        webrtc::CriticalSectionWrapper& _critSect;        
+        webrtc::MapWrapper _channelMap;
+
+};
diff --git a/src/video_engine/main/test/WindowsTest/StdAfx.h b/src/video_engine/main/test/WindowsTest/StdAfx.h
new file mode 100644
index 0000000..78b1fbd
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/StdAfx.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+//  or project specific include files that are used frequently, but
+//      are changed infrequently
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
+
+#if _MSC_VER > 1000
+#pragma once
+#endif // _MSC_VER > 1000
+
+#define VC_EXTRALEAN		// Exclude rarely-used stuff from Windows headers
+
+#include <afxwin.h>         // MFC core and standard components
+#include <afxext.h>         // MFC extensions
+#include <afxdisp.h>        // MFC Automation classes
+#include <afxdtctl.h>		// MFC support for Internet Explorer 4 Common Controls
+#ifndef _AFX_NO_AFXCMN_SUPPORT
+#include <afxcmn.h>			// MFC support for Windows Common Controls
+#endif // _AFX_NO_AFXCMN_SUPPORT
+
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_STDAFX_H_
diff --git a/src/video_engine/main/test/WindowsTest/VideoSize.h b/src/video_engine/main/test/WindowsTest/VideoSize.h
new file mode 100644
index 0000000..60e2bdd
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/VideoSize.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
+#include "StdAfx.h"
+enum VideoSize
+	{
+		UNDEFINED, 
+		SQCIF,     // 128*96       = 12 288
+		QQVGA,     // 160*120      = 19 200
+		QCIF,      // 176*144      = 25 344
+        CGA,       // 320*200      = 64 000
+		QVGA,      // 320*240      = 76 800
+        SIF,       // 352*240      = 84 480
+		WQVGA,     // 400*240      = 96 000
+		CIF,       // 352*288      = 101 376
+        W288P,     // 512*288      = 147 456 (WCIF)
+        W368P,     // 640*368      = 235 520
+        S_448P,      // 576*448      = 281 088
+		VGA,       // 640*480      = 307 200
+        S_432P,      // 720*432      = 311 040
+        W432P,     // 768*432      = 331 776 (a.k.a WVGA 16:9)
+        S_4SIF,      // 704*480      = 337 920
+        W448P,     // 768*448      = 344 064
+		NTSC,		// 720*480      = 345 600
+        FW448P,    // 800*448      = 358 400
+        S_768x480P,  // 768*480      = 368 640 (a.k.a WVGA 16:10)
+		WVGA,      // 800*480      = 384 000
+		S_4CIF,      // 704576      = 405 504
+		SVGA,      // 800*600      = 480 000
+        W544P,     // 960*544      = 522 240
+        W576P,     // 1024*576     = 589 824 (W4CIF)
+		HD,        // 960*720      = 691 200
+		XGA,       // 1024*768     = 786 432
+		WHD,       // 1280*720     = 921 600
+		FULL_HD,   // 1440*1080    = 1 555 200
+        UXGA,      // 1600*1200    = 1 920 000
+		WFULL_HD,  // 1920*1080    = 2 073 600
+		NUMBER_OF_VIDEO_SIZE
+	};
+
+int GetWidthHeight(VideoSize size, int& width, int& height);
+
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_VIDEOSIZE_H_
diff --git a/src/video_engine/main/test/WindowsTest/WindowsTest.cc b/src/video_engine/main/test/WindowsTest/WindowsTest.cc
new file mode 100644
index 0000000..ff8159c
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTest.cc
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "WindowsTest.h"
+#include "ChannelDlg.h"
+#include "WindowsTestMainDlg.h"
+#include "engine_configurations.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#undef THIS_FILE
+static char THIS_FILE[] = __FILE__;
+#endif
+
+// Check memory leaks id running debug
+#if (defined(_DEBUG) && defined(_WIN32))
+//    #include "vld.h"
+#endif
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp
+
+BEGIN_MESSAGE_MAP(CDXWindowsTestApp, CWinApp)
+	//{{AFX_MSG_MAP(CDXWindowsTestApp)
+		// NOTE - the ClassWizard will add and remove mapping macros here.
+		//    DO NOT EDIT what you see in these blocks of generated code!
+	//}}AFX_MSG
+	ON_COMMAND(ID_HELP, CWinApp::OnHelp)
+END_MESSAGE_MAP()
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp construction
+
+CDXWindowsTestApp::CDXWindowsTestApp()
+{
+    
+}
+
+/////////////////////////////////////////////////////////////////////////////
+// The one and only object
+
+CDXWindowsTestApp theApp;
+
+/////////////////////////////////////////////////////////////////////////////
+// CDXWindowsTestApp initialization
+
+BOOL CDXWindowsTestApp::InitInstance()
+{
+    int result=0;
+    #ifndef NO_VOICE_ENGINE
+        _voiceEngine = VoiceEngine::Create();
+        _veBase = VoEBase::GetInterface(_voiceEngine);
+         result+=_veBase->Init();
+     #else
+        _voiceEngine=NULL;
+    #endif
+
+    _videoEngine = VideoEngine::Create();
+
+    _videoEngine->SetTraceFilter(webrtc::kTraceDefault);//webrtc::kTraceDebug | webrtc::kTraceError | webrtc::kTraceApiCall | webrtc::kTraceWarning | webrtc::kTraceCritical | webrtc::kTraceStateInfo | webrtc::kTraceInfo | webrtc::kTraceStream);
+    _videoEngine->SetTraceFile("trace.txt");
+    
+    ViEBase* vieBase=ViEBase::GetInterface(_videoEngine);
+    result+=vieBase->Init();
+    if(result!=0)
+    {
+        ::MessageBox (NULL, (LPCTSTR)("failed to init VideoEngine"), TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);                
+    }
+    
+    {
+      WindowsTestMainDlg dlg(_videoEngine,_voiceEngine);
+
+      m_pMainWnd = &dlg;
+      dlg.DoModal();
+    }
+    
+    vieBase->Release();
+
+    if(!VideoEngine::Delete(_videoEngine))
+    {
+        char errorMsg[255];
+        sprintf(errorMsg,"All VideoEngine interfaces are not released properly!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+
+  #ifndef NO_VOICE_ENGINE
+    
+    _veBase->Terminate();
+    if(_veBase->Release()!=0)        
+    {
+        // ensure that no interface is still referenced
+        char errorMsg[256];
+        sprintf(errorMsg,"All VoiceEngine interfaces are not released properly!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+
+    if (false == VoiceEngine::Delete(_voiceEngine))
+    {
+        char errorMsg[256];
+        sprintf(errorMsg,"VoiceEngine::Delete() failed!");
+        ::MessageBox (NULL, (LPCTSTR)errorMsg, TEXT("Error Message"),  MB_OK | MB_ICONINFORMATION);
+    }
+   #endif
+
+	// Since the dialog has been closed, return FALSE so that we exit the
+	//  application, rather than start the application's message pump.
+	return FALSE;
+}
diff --git a/src/video_engine/main/test/WindowsTest/WindowsTest.h b/src/video_engine/main/test/WindowsTest/WindowsTest.h
new file mode 100644
index 0000000..dc3ee9d
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTest.h
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
+
+
+#include "StdAfx.h"
+#include "resource.h"		// main symbols
+
+
+
+/////////////////////////////////////////////////////////////////////////////
+
+//Forward declarations
+namespace webrtc {
+    class VoiceEngine;
+    class VoEBase;
+    class VideoEngine;
+}
+using namespace webrtc;
+
+class CDXWindowsTestApp : public CWinApp
+{
+public:
+	CDXWindowsTestApp();
+
+// Overrides
+	// ClassWizard generated virtual function overrides
+	//{{AFX_VIRTUAL(CDXWindowsTestApp)
+	public:
+	virtual BOOL InitInstance();
+	//}}AFX_VIRTUAL
+
+// Implementation
+
+	//{{AFX_MSG(CDXWindowsTestApp)
+		// NOTE - the ClassWizard will add and remove member functions here.
+		//    DO NOT EDIT what you see in these blocks of generated code !
+	//}}AFX_MSG
+	DECLARE_MESSAGE_MAP()
+
+	VideoEngine*  _videoEngine;
+    VoiceEngine*  _voiceEngine;
+    VoEBase*       _veBase;
+};
+
+
+/////////////////////////////////////////////////////////////////////////////
+
+//{{AFX_INSERT_LOCATION}}
+// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_WINDOWSTEST_H_
diff --git a/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc b/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc
new file mode 100644
index 0000000..fcc490d
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.cc
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// WindowsTestMainDlg.cpp : implementation file
+//
+#include "WindowsTestMainDlg.h"
+#include "WindowsTest.h"
+#include "ChannelDlg.h"
+
+#include "voe_base.h"
+
+// WindowsTestMainDlg dialog
+
+IMPLEMENT_DYNAMIC(WindowsTestMainDlg, CDialog)
+
+WindowsTestMainDlg::WindowsTestMainDlg(VideoEngine* videoEngine,void* voiceEngine,CWnd* pParent /*=NULL*/)
+	: CDialog(WindowsTestMainDlg::IDD, pParent),
+        _videoEngine(videoEngine),
+        _voiceEngine((VoiceEngine*) voiceEngine),
+        _testDlg1(NULL),
+        _testDlg2(NULL),
+        _testDlg3(NULL),
+        _testDlg4(NULL),    
+        _externalInWidth(0),   
+        _externalInHeight(0),    
+        _externalInVideoType(0),
+        _captureDevicePool(videoEngine)
+{
+    
+}
+
+WindowsTestMainDlg::~WindowsTestMainDlg()
+{        
+}
+
+void WindowsTestMainDlg::DoDataExchange(CDataExchange* pDX)
+{
+	CDialog::DoDataExchange(pDX);
+}
+
+
+BEGIN_MESSAGE_MAP(WindowsTestMainDlg, CDialog)
+        ON_BN_CLICKED(IDC_CHANNEL1, &WindowsTestMainDlg::OnBnClickedChannel1)
+        ON_BN_CLICKED(IDC_CHANNEL2, &WindowsTestMainDlg::OnBnClickedChannel2)
+        ON_BN_CLICKED(IDC_CHANNEL3, &WindowsTestMainDlg::OnBnClickedChannel3)
+        ON_BN_CLICKED(IDC_CHANNEL4, &WindowsTestMainDlg::OnBnClickedChannel4)
+END_MESSAGE_MAP()
+
+
+
+void WindowsTestMainDlg::OnBnClickedChannel1()
+{
+    if(!_testDlg1)
+    {
+        _testDlg1=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg1->Create(CDXChannelDlg::IDD,this);
+    }
+    else
+    {
+        _testDlg1->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::OnBnClickedChannel2()
+{
+    if(!_testDlg2)
+    {
+        _testDlg2=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg2->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg2->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::ChannelDialogEnded(CDXChannelDlg* context)
+{
+    if(context==_testDlg4)
+    {
+        delete _testDlg4;
+        _testDlg4=NULL;
+    }
+    else if(context==_testDlg3)
+    {
+        delete _testDlg3;
+        _testDlg3=NULL;
+    }
+    else if(context==_testDlg2)
+    {
+        delete _testDlg2;
+        _testDlg2=NULL;
+    }
+    else if(context==_testDlg1)
+    {
+        delete _testDlg1;
+        _testDlg1=NULL;
+    }
+    else // Slave channel
+    {
+        delete context;
+    }
+
+}
+
+
+
+void WindowsTestMainDlg::OnBnClickedChannel3()
+{
+    if(!_testDlg3)
+    {
+        _testDlg3=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg3->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg3->SetActiveWindow();
+    }    
+}
+
+void WindowsTestMainDlg::OnBnClickedChannel4()
+{
+    if(!_testDlg4)
+    {
+        _testDlg4=new CDXChannelDlg(_videoEngine,_captureDevicePool,_channelPool,_voiceEngine,NULL,this);
+        _testDlg4->Create(CDXChannelDlg::IDD,this);
+
+    }
+    else
+    {
+        _testDlg4->SetActiveWindow();
+    }                
+}
diff --git a/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h b/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h
new file mode 100644
index 0000000..8aae99a
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTestMainDlg.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+#include "StdAfx.h"
+#include "WindowsTestResource.h"
+
+#include "ChannelDlg.h"
+#include "CaptureDevicePool.h"
+#include "ChannelPool.h"
+
+//Forward declarations
+namespace webrtc {
+    class VideoEngine;
+    class VoiceEngine;
+}
+using namespace webrtc;
+class CDXCaptureDlg;
+
+
+class WindowsTestMainDlg : public CDialog, private CDXChannelDlgObserver
+{
+	DECLARE_DYNAMIC(WindowsTestMainDlg)
+
+public:
+	WindowsTestMainDlg(VideoEngine* videoEngine,void* voiceEngine=NULL,CWnd* pParent = NULL);   // standard constructor
+	virtual ~WindowsTestMainDlg();
+
+// Dialog Data
+	enum { IDD = IDD_WINDOWSTEST_MAIN };
+
+protected:
+	virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+	DECLARE_MESSAGE_MAP()
+public:
+     afx_msg void OnBnClickedChannel1();
+     afx_msg void OnBnClickedChannel2();
+     afx_msg void OnBnClickedChannel3();
+     afx_msg void OnBnClickedChannel4();
+
+
+     VideoEngine* _videoEngine;
+    VoiceEngine*		_voiceEngine;
+    VoEBase* _veBase;
+
+    CDXChannelDlg* _testDlg1;
+    CDXChannelDlg* _testDlg2;
+    CDXChannelDlg* _testDlg3;
+    CDXChannelDlg* _testDlg4;
+
+    int _externalInWidth;   
+    int _externalInHeight;
+    int _externalInVideoType;
+
+    CaptureDevicePool _captureDevicePool;
+    ChannelPool       _channelPool;
+
+
+private:
+    virtual void ChannelDialogEnded(CDXChannelDlg* context);
+
+public:
+
+};
diff --git a/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc b/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc
new file mode 100644
index 0000000..5e866ad
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTestResouce.rc
@@ -0,0 +1,101 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "WindowsTestResource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "WindowsTestResource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_WINDOWSTEST_MAIN DIALOGEX 0, 0, 186, 156

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Windows ViE Test"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,129,7,50,14

+    PUSHBUTTON      "Cancel",IDCANCEL,129,24,50,14

+    PUSHBUTTON      "Channel 1",IDC_CHANNEL1,129,45,50,14

+    PUSHBUTTON      "Channel 2",IDC_CHANNEL2,129,62,50,14

+    PUSHBUTTON      "Channel 3",IDC_CHANNEL3,129,79,50,14

+    PUSHBUTTON      "Channel 4",IDC_CHANNEL4,129,96,50,14

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_WINDOWSTEST_MAIN, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 179

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 149

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/src/video_engine/main/test/WindowsTest/WindowsTestResource.h b/src/video_engine/main/test/WindowsTest/WindowsTestResource.h
new file mode 100644
index 0000000..2d49c28
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/WindowsTestResource.h
@@ -0,0 +1,28 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by WindowsTestResouce.rc
+//
+#define IDD_WINDOWSTEST_MAIN            101
+#define IDC_CHANNEL1                    1001
+#define IDC_CHANNEL2                    1002
+#define IDC_CHANNEL3                    1004
+#define IDC_CHANNEL4                    1005
+#define IDC_POSITION                    1009
+#define IDC_INFORMATION                 1050
+#define IDC_CHECK_CHANNEL1              1070
+#define IDC_CHECK_CHANNEL2              1071
+#define IDC_CHECK_CHANNEL3              1072
+#define IDC_CHECK_CHANNEL4              1073
+#define IDC_COMBO1                      1074
+#define IDC_BTN_CREATE2                 1076
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        106
+#define _APS_NEXT_COMMAND_VALUE         40001
+#define _APS_NEXT_CONTROL_VALUE         1076
+#define _APS_NEXT_SYMED_VALUE           107
+#endif
+#endif
diff --git a/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg b/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg
new file mode 100644
index 0000000..3bb3ba4
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/captureDeviceImage.jpg
Binary files differ
diff --git a/src/video_engine/main/test/WindowsTest/renderStartImage.jpg b/src/video_engine/main/test/WindowsTest/renderStartImage.jpg
new file mode 100644
index 0000000..b10a842
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/renderStartImage.jpg
Binary files differ
diff --git a/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg b/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg
new file mode 100644
index 0000000..cb34d67
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/renderTimeoutImage.jpg
Binary files differ
diff --git a/src/video_engine/main/test/WindowsTest/res/Capture.rc2 b/src/video_engine/main/test/WindowsTest/res/Capture.rc2
new file mode 100644
index 0000000..d9acfd2
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/res/Capture.rc2
@@ -0,0 +1,13 @@
+//

+// DXCAPTURE.RC2 - resources Microsoft Visual C++ does not edit directly

+//

+

+#ifdef APSTUDIO_INVOKED

+	#error this file is not editable by Microsoft Visual C++

+#endif //APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Add manually edited resources here...

+

+/////////////////////////////////////////////////////////////////////////////

diff --git a/src/video_engine/main/test/WindowsTest/resource.h b/src/video_engine/main/test/WindowsTest/resource.h
new file mode 100644
index 0000000..34e90eb
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/resource.h
@@ -0,0 +1,58 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by Capture.rc
+//
+#define IDM_ABOUTBOX                    0x0010
+#define IDD_DXQUALITY_DIALOG            102
+#define IDI_ICON1                       130
+#define IDD_SLAVE_CHANNEL               132
+#define IDC_LIVEVIDEO                   1000
+#define IDC_CAPTURE                     1001
+#define IDC_DEVICE                      1003
+#define IDC_STARTSEND                   1004
+#define IDC_STARTLISTEN                 1006
+#define IDC_STOPLISTEN                  1007
+#define IDC_STOPSend                    1008
+#define IDC_CODEC_LIST                  1010
+#define IDC_CODEC_SIZE                  1011
+#define IDC_IPADDRESS1                  1012
+#define IDC_LOCAL_PORT1                 1013
+#define IDC_CHANGE_SIZE                 1017
+#define IDC_QUALITY                     1018
+#define IDC_BITRATE                     1019
+#define IDC_WINDOW_SIZE                 1022
+#define IDC_REMOTE_PORT1                1025
+#define IDC_START_REC                   1030
+#define IDC_STOP_REC                    1031
+#define IDC_CAM_PROPERTY                1033
+#define IDC_ONMODE                      1034
+#define IDC_CAPTURECAP                  1038
+#define IDC_RADIO1                      1039
+#define IDC_MIN_FRAME_RATE              1040
+#define IDC_RTCPMODE                    1042
+#define IDC_TMMBR                       1043
+#define IDC_PACKETBURST                 1044
+#define IDC_PROT_NONE                   1045
+#define IDC_PROT_NACK                   1046
+#define IDC_PROT_FEC                    1047
+#define IDC_PROT_NACKFEC                1048
+#define IDC_INFORMATION                 1050
+#define IDC_PACKETLOSS                  1051
+#define IDC_FREEZELOG                   1052
+#define IDC_VERSION                     1053
+#define IDC_EXTTRANSPORT                1054
+#define IDC_DELAY                       1055
+#define IDC_BTN_RECORD_INCOMING         1056
+#define IDC_BTN_RECORD_OUTGOING         1057
+#define IDC_BTN_CREATE_SLAVE            1058
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        133
+#define _APS_NEXT_COMMAND_VALUE         32771
+#define _APS_NEXT_CONTROL_VALUE         1059
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
diff --git a/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc b/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc
new file mode 100644
index 0000000..df490fa
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/tbExternalTransport.cc
@@ -0,0 +1,313 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tbExternalTransport.cpp
+//
+
+#include "tbExternalTransport.h"
+
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "thread_wrapper.h"
+#include "tick_util.h"
+#include "vie_network.h"
+#include "tick_util.h"
+
+using namespace webrtc;
+
+TbExternalTransport::TbExternalTransport(ViENetwork& vieNetwork)
+    :
+    _vieNetwork(vieNetwork),
+    _thread(*ThreadWrapper::CreateThread(ViEExternalTransportRun, this, kHighPriority, "AutotestTransport")), 
+    _event(*EventWrapper::Create()),
+    _crit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _statCrit(*CriticalSectionWrapper::CreateCriticalSection()),
+    _lossRate(0),
+    _networkDelayMs(0),
+    _rtpCount(0),
+    _dropCount(0),
+    _rtcpCount(0),
+    _rtpPackets(),
+    _rtcpPackets(),
+    _checkSSRC(false),
+    _lastSSRC(0),
+    _checkSequenceNumber(0),
+    _firstSequenceNumber(0),
+    _lastSeq(0)
+{
+    srand((int)TickTime::MicrosecondTimestamp());
+    unsigned int tId = 0;
+    _thread.Start(tId);
+}
+
+
+TbExternalTransport::~TbExternalTransport()
+{
+    // TODO: stop thread
+    _thread.SetNotAlive();
+    _event.Set();
+    if (_thread.Stop())
+    {
+        delete &_thread;
+        delete &_event;
+    }
+    delete &_crit;
+    delete &_statCrit;
+}
+
+
+
+
+    
+int TbExternalTransport::SendPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtpCount++;
+    _statCrit.Leave();
+
+
+    unsigned short sequenceNumber =  (((unsigned char*) data)[2]) << 8;
+    sequenceNumber +=  (((unsigned char*) data)[3]);
+
+            
+        // Packet loss
+    int dropThis = rand() % 100;
+    bool nacked=false;
+    if(sequenceNumber<_lastSeq)
+    {
+        nacked=true;
+    }
+    else
+    {
+        _lastSeq=sequenceNumber;
+    }
+
+    if (dropThis < _lossRate)
+    {
+        _statCrit.Enter();
+        _dropCount++;
+        _statCrit.Leave();
+
+      
+      /*  char str[256];
+        sprintf(str,"Dropping seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+        OutputDebugString(str);*/
+        
+        return len;
+    }
+    else
+    {
+        if(nacked)
+        {
+            /*char str[256];
+            sprintf(str,"Resending seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+            OutputDebugString(str);*/
+        }    
+        else
+        {
+            /*char str[256];
+            sprintf(str,"Sending seq %d length %d m %d, ts %u\n", sequenceNumber,len,marker,timestamp) ;
+            OutputDebugString(str);*/
+         
+        }
+    }    
+    
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtpPackets.push(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+int TbExternalTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtcpCount++;
+    _statCrit.Leave();
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtcpPackets.push(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+WebRtc_Word32 TbExternalTransport::SetPacketLoss(WebRtc_Word32 lossRate)
+{
+    CriticalSectionScoped cs(_statCrit);
+    _lossRate = lossRate;
+    return 0;
+}
+
+void TbExternalTransport::SetNetworkDelay(WebRtc_Word64 delayMs)
+{
+    CriticalSectionScoped cs(_crit);
+    _networkDelayMs = delayMs;
+    return;
+}
+
+void TbExternalTransport::ClearStats()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _rtpCount = 0;
+    _dropCount = 0;
+    _rtcpCount = 0;
+    return;
+}
+
+void TbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets, WebRtc_Word32& numDroppedPackets, WebRtc_Word32& numRtcpPackets)
+{
+    CriticalSectionScoped cs(_statCrit);
+    numRtpPackets = _rtpCount;
+    numDroppedPackets = _dropCount;
+    numRtcpPackets = _rtcpCount;
+    return;
+}
+
+void TbExternalTransport::EnableSSRCCheck()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _checkSSRC = true;
+}
+unsigned int TbExternalTransport::ReceivedSSRC()
+{
+    CriticalSectionScoped cs(_statCrit);
+    return _lastSSRC;
+}
+
+void TbExternalTransport::EnableSequenceNumberCheck()
+{
+    CriticalSectionScoped cs(_statCrit);
+    _checkSequenceNumber = true;
+}
+
+unsigned short TbExternalTransport::GetFirstSequenceNumber()
+{
+    CriticalSectionScoped cs(_statCrit);
+    return _firstSequenceNumber;
+}
+
+
+bool TbExternalTransport::ViEExternalTransportRun(void* object)
+{
+    return static_cast<TbExternalTransport*>(object)->ViEExternalTransportProcess();
+}
+bool TbExternalTransport::ViEExternalTransportProcess()
+{
+    unsigned int waitTime = KMaxWaitTimeMs;
+
+    VideoPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - NowMs();
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime &&
+                timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtpPackets.pop();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            {
+                CriticalSectionScoped cs(_statCrit);
+                if (_checkSSRC)
+                {
+                    _lastSSRC  = ((packet->packetBuffer[8]) << 24);
+                    _lastSSRC += (packet->packetBuffer[9] << 16);
+                    _lastSSRC += (packet->packetBuffer[10] << 8);
+                    _lastSSRC += packet->packetBuffer[11];
+                    _checkSSRC = false;
+                }
+                if (_checkSequenceNumber)
+                {
+                    _firstSequenceNumber = (unsigned char)packet->packetBuffer[2] << 8;
+                    _firstSequenceNumber += (unsigned char)packet->packetBuffer[3];
+                    _checkSequenceNumber = false;
+                }
+            }
+            /*
+            unsigned short sequenceNumber =  (unsigned char)packet->packetBuffer[2] << 8;
+            sequenceNumber +=  (unsigned char)packet->packetBuffer[3];
+            
+            int marker=packet->packetBuffer[1] & 0x80;
+            unsigned int timestamp=((((unsigned char*)packet->packetBuffer)[4]) << 24) + ((((unsigned char*)packet->packetBuffer)[5])<<16) +((((unsigned char*)packet->packetBuffer)[6])<<8)+(((unsigned char*)packet->packetBuffer)[7]);
+            char str[256];
+            sprintf(str,"Receiving seq %u length %d m %d, ts %u\n", sequenceNumber,packet->length,marker,timestamp) ;
+            OutputDebugString(str);*/
+
+            _vieNetwork.ReceivedRTPPacket(packet->channel, packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    while (!_rtcpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtcpPackets.front();
+        WebRtc_Word64 timeToReceive = packet->receiveTime - NowMs();
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime &&
+                timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        packet = _rtcpPackets.front();
+        _rtcpPackets.pop();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            _vieNetwork.ReceivedRTCPPacket(packet->channel, packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    _event.Wait(waitTime + 1); // Add 1 ms to not call to early...
+    return true;
+}
+
+WebRtc_Word64 TbExternalTransport::NowMs()
+{
+    return TickTime::MillisecondTimestamp();
+}
diff --git a/src/video_engine/main/test/WindowsTest/tbExternalTransport.h b/src/video_engine/main/test/WindowsTest/tbExternalTransport.h
new file mode 100644
index 0000000..53226c6
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/tbExternalTransport.h
@@ -0,0 +1,106 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tbExternalTransport.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
+
+#include "common_types.h"
+#include <queue>
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViENetwork;
+}
+
+class TbExternalTransport :  public webrtc::Transport
+{
+public:
+    TbExternalTransport(webrtc::ViENetwork& vieNetwork);
+    ~TbExternalTransport(void);
+
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+    WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate);  // Rate in %
+    void SetNetworkDelay(WebRtc_Word64 delayMs);
+
+    void ClearStats();
+    void GetStats(WebRtc_Word32& numRtpPackets, WebRtc_Word32& numDroppedPackets, WebRtc_Word32& numRtcpPackets);
+
+    void EnableSSRCCheck();
+    unsigned int ReceivedSSRC();
+
+    void EnableSequenceNumberCheck();
+    unsigned short GetFirstSequenceNumber();
+
+    
+protected:
+    static bool ViEExternalTransportRun(void* object);
+    bool ViEExternalTransportProcess();
+private:
+    WebRtc_Word64 NowMs();
+
+    enum { KMaxPacketSize = 1650};
+    enum { KMaxWaitTimeMs = 100};
+    typedef struct
+    {
+        WebRtc_Word8  packetBuffer[KMaxPacketSize];
+        WebRtc_Word32 length;
+        WebRtc_Word32 channel;
+        WebRtc_Word64 receiveTime;
+    } VideoPacket;
+
+    typedef std::queue<VideoPacket*>  VideoPacketQueue;
+
+
+    webrtc::ViENetwork&      _vieNetwork;
+    webrtc::ThreadWrapper&   _thread;
+    webrtc::EventWrapper&           _event;
+    webrtc::CriticalSectionWrapper& _crit;
+    webrtc::CriticalSectionWrapper& _statCrit;
+
+    WebRtc_Word32          _lossRate;
+    WebRtc_Word64          _networkDelayMs;
+    WebRtc_Word32          _rtpCount;
+    WebRtc_Word32          _rtcpCount;
+    WebRtc_Word32          _dropCount;
+
+    VideoPacketQueue     _rtpPackets;
+    VideoPacketQueue     _rtcpPackets;
+
+    bool                 _checkSSRC;
+    WebRtc_UWord32         _lastSSRC;
+    bool                 _checkSequenceNumber;
+    WebRtc_UWord16         _firstSequenceNumber;
+    WebRtc_Word32          _lastSeq;
+
+    //int& numberOfErrors;
+
+    //int _bits;
+    //int _lastTicks;            
+    //int _dropCnt;
+    //int _sentCount;
+    //int _frameCount;
+    //int _packetLoss;
+
+    //VideoEngine* _video;
+
+    //ReceiveBufferQueue _videoBufferQueue;
+    //ReceiveBufferQueue _rtcpBufferQueue;
+};
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_WINDOWSTEST_TBEXTERNALTRANSPORT_H_
diff --git a/src/video_engine/main/test/WindowsTest/videosize.cc b/src/video_engine/main/test/WindowsTest/videosize.cc
new file mode 100644
index 0000000..a675ec5
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/videosize.cc
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "VideoSize.h"
+int GetWidthHeight( VideoSize size, int& width, int& height)
+{
+	switch(size)
+	{
+	case SQCIF:
+		width = 128;
+		height = 96;
+		return 0;
+	case QQVGA:
+		width = 160;
+		height = 120;
+		return 0;
+	case QCIF:
+		width = 176;
+		height = 144;
+		return 0;
+	case CGA:
+		width = 320;
+		height = 200;
+		return 0;
+	case QVGA:
+		width = 320;
+		height = 240;
+		return 0;
+	case SIF:
+		width = 352;
+		height = 240;
+		return 0;
+    case WQVGA:
+		width = 400;
+		height = 240;
+		return 0;
+	case CIF:
+		width = 352;
+		height = 288;
+		return 0;
+	case W288P:
+		width = 512;
+		height = 288;
+		return 0;
+    case W368P:
+        width = 640;
+        height = 368;
+        return 0;
+	case S_448P:
+		width = 576;
+		height = 448;
+		return 0;
+	case VGA:
+		width = 640;
+		height = 480;
+		return 0;
+	case S_432P:
+		width = 720;
+		height = 432;
+		return 0;
+	case W432P:
+		width = 768;
+		height = 432;
+		return 0;
+	case S_4SIF:
+		width = 704;
+		height = 480;
+		return 0;
+	case W448P:
+		width = 768;
+		height = 448;
+		return 0;
+	case NTSC:
+		width = 720;
+		height = 480;
+		return 0;
+    case FW448P:
+        width = 800;
+        height = 448;
+        return 0;
+    case S_768x480P:
+        width = 768;
+        height = 480;
+        return 0;
+    case WVGA:
+		width = 800;
+		height = 480;
+		return 0;
+	case S_4CIF:
+		width = 704;
+		height = 576;
+		return 0;
+	case SVGA:
+		width = 800;
+		height = 600; 
+		return 0;
+    case W544P:
+        width = 960;
+        height = 544;
+        return 0;
+	case W576P:
+		width = 1024;
+		height = 576;
+		return 0;
+	case HD:
+		width = 960;
+		height = 720;
+		return 0;
+	case XGA:
+		width = 1024;
+		height = 768;
+		return 0;
+	case FULL_HD:
+		width = 1440;
+		height = 1080;
+		return 0;	
+	case WHD:
+		width = 1280;
+		height = 720;
+		return 0;
+    case UXGA:
+        width = 1600;
+        height = 1200;
+        return 0;
+	case WFULL_HD:
+		width = 1920;
+		height = 1080;
+		return 0;
+	default:
+		return -1;
+	}
+	return -1;
+}
\ No newline at end of file
diff --git a/src/video_engine/main/test/WindowsTest/windowstest.gypi b/src/video_engine/main/test/WindowsTest/windowstest.gypi
new file mode 100644
index 0000000..83d0515
--- /dev/null
+++ b/src/video_engine/main/test/WindowsTest/windowstest.gypi
@@ -0,0 +1,86 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'conditions': [
+    # TODO(kjellander): Support UseoFMFC on VS2010.
+    # http://code.google.com/p/webrtc/issues/detail?id=709
+    ['OS=="win" and MSVS_VERSION < "2010"', {
+      'targets': [
+        # WinTest - GUI test for Windows
+        {
+          'target_name': 'vie_win_test',
+          'type': 'executable',
+          'dependencies': [
+            '<(webrtc_root)/modules/modules.gyp:video_render_module',
+            '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            ## VoiceEngine
+            '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+            ## VideoEngine
+            'video_engine_core',
+          ],
+          'include_dirs': [
+            './interface',
+            '../../../../', # common_types.h and typedefs.h
+            '../commonTestClasses/'
+          ],
+          'sources': [
+            'Capture.rc',
+            'captureDeviceImage.jpg',
+            'ChannelDlg.cc',
+            'ChannelDlg.h',
+            'ChannelPool.cc',
+            'ChannelPool.h',
+            'renderStartImage.jpg',
+            'renderTimeoutImage.jpg',
+            'res\Capture.rc2',
+            'resource.h',
+            'StdAfx.h',
+            'videosize.cc',
+            'VideoSize.h',
+            'WindowsTest.cc',
+            'WindowsTest.h',
+            'WindowsTestMainDlg.cc',
+            'WindowsTestMainDlg.h',
+            'WindowsTestResouce.rc',
+            'WindowsTestResource.h',
+            'tbExternalTransport.cc',
+            'CaptureDevicePool.cc',
+            'tbExternalTransport.h',
+            'CaptureDevicePool.h',
+
+          ],
+           'configurations': {
+            'Common_Base': {
+              'msvs_configuration_attributes': {
+                'conditions': [
+                  ['component=="shared_library"', {
+                    'UseOfMFC': '2',  # Shared DLL
+                  },{
+                    'UseOfMFC': '1',  # Static
+                  }],
+                ],
+              },
+            },
+          },
+          'msvs_settings': {
+            'VCLinkerTool': {
+              'SubSystem': '2',   # Windows
+            },
+          },
+        },
+      ],
+    }],
+  ],
+}
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/video_engine/main/test/android_test/AndroidManifest.xml b/src/video_engine/main/test/android_test/AndroidManifest.xml
new file mode 100644
index 0000000..74e5a38
--- /dev/null
+++ b/src/video_engine/main/test/android_test/AndroidManifest.xml
@@ -0,0 +1,28 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+      android:versionCode="1" package="org.webrtc.videoengineapp" android:versionName="1.07">
+    <application android:icon="@drawable/logo"
+		 android:label="@string/app_name"
+		 android:debuggable="true">
+        <activity android:name=".WebRTCDemo"
+                  android:label="@string/app_name" 
+                  android:configChanges="keyboardHidden|orientation"                  
+                  >
+                  <!--android:configChanges="keyboardHidden|orientation"  -->
+                  <!-- android:screenOrientation="landscape" -->
+            <intent-filter>
+                <action android:name="android.intent.action.MAIN" />
+                <category android:name="android.intent.category.LAUNCHER" />
+            </intent-filter>
+        </activity>
+    </application>
+    <uses-sdk android:minSdkVersion="10" />
+    <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+    <uses-feature android:name="android.hardware.camera" />
+    <uses-feature android:name="android.hardware.camera.autofocus" />
+    <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+    <uses-permission android:name="android.permission.RECORD_AUDIO" />
+    <uses-permission android:name="android.permission.INTERNET" />
+    <uses-permission android:name="android.permission.WAKE_LOCK" />
+    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+</manifest> 
diff --git a/src/video_engine/main/test/android_test/build.xml b/src/video_engine/main/test/android_test/build.xml
new file mode 100644
index 0000000..cd46e4d
--- /dev/null
+++ b/src/video_engine/main/test/android_test/build.xml
@@ -0,0 +1,1664 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<project name="WebRTCDemo" default="help">
+
+    <!--
+Notice for all the files in this folder.
+
+   Copyright (c) 2005-2008, The Android Open Source Project
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+    -->
+
+    <!--
+        This build file is imported by the project build file. It contains
+        all the targets and tasks necessary to build Android projects, be they
+        regular projects, library projects, or test projects.
+
+        At the beginning of the file is a list of properties that can be overridden
+        by adding them to your ant.properties (properties are immutable, so their
+        first definition sticks and is never changed).
+
+        Follows:
+        - custom task definitions,
+        - more properties (do not override those unless the whole build system is modified).
+        - macros used throughout the build,
+        - base build targets,
+        - debug-specific build targets,
+        - release-specific build targets,
+        - instrument-specific build targets,
+        - test project-specific build targets,
+        - install targets,
+        - help target
+    -->
+
+    <!-- Set global properties for this build -->
+    <property environment="env"/>
+    <property name="sdk.dir" location="${env.ANDROID_SDK_ROOT}"/>
+    <property name="sdk.version" value="${env.ANDROID_SDK_VERSION}"/>
+
+    <property name="target" value="android-10"/>
+
+    <!-- ******************************************************* -->
+    <!-- **************** Overridable Properties *************** -->
+    <!-- ******************************************************* -->
+
+    <!-- You can override these values in your build.xml or ant.properties.
+         Overriding any other properties may result in broken build. -->
+
+    <!-- Tells adb which device to target. You can change this from the command line
+         by invoking "ant -Dadb.device.arg=-d" for device "ant -Dadb.device.arg=-e" for
+         the emulator. -->
+    <property name="adb.device.arg" value="" />
+
+    <!-- fileset exclude patterns (space separated) to prevent
+         files inside src/ from being packaged. -->
+    <property name="android.package.excludes" value="" />
+
+    <!-- set some properties used for filtering/override. If those weren't defined
+         before, then this will create them with empty values, which are then ignored
+         by the custom tasks receiving them. -->
+    <property name="version.code" value="" />
+    <property name="version.name" value="" />
+    <property name="aapt.resource.filter" value="" />
+    <!-- 'aapt.ignore.assets' is the list of file patterns to ignore under /res and /assets.
+         Default is "!.svn:!.git:.*:<dir>_*:!CVS:!thumbs.db:!picasa.ini:!*.scc:*~"
+
+         Overall patterns syntax is:
+           [!][<dir>|<file>][*suffix-match|prefix-match*|full-match]:more:patterns...
+
+         - The first character flag ! avoids printing a warning.
+         - Pattern can have the flag "<dir>" to match only directories
+           or "<file>" to match only files. Default is to match both.
+         - Match is not case-sensitive.
+    -->
+    <property name="aapt.ignore.assets" value="" />
+
+    <!-- compilation options -->
+    <property name="java.encoding" value="UTF-8" />
+    <property name="java.target" value="1.5" />
+    <property name="java.source" value="1.5" />
+    <property name="java.compilerargs" value="" />
+
+    <!-- Renderscript options -->
+    <property name="renderscript.debug.opt.level" value="O0" />
+    <property name="renderscript.release.opt.level" value="O3" />
+
+    <!-- manifest merger default value -->
+    <property name="manifestmerger.enabled" value="false" />
+
+    <!-- instrumentation options -->
+    <property name="emma.filter" value="" />
+
+    <!-- Verbosity -->
+    <property name="verbose" value="false" />
+
+    <!-- ******************************************************* -->
+    <!-- ********************* Custom Tasks ******************** -->
+    <!-- ******************************************************* -->
+
+    <!-- jar file from where the tasks are loaded -->
+    <path id="android.antlibs">
+        <pathelement path="${sdk.dir}/tools/lib/anttasks.jar" />
+    </path>
+
+    <!-- Custom tasks -->
+    <taskdef resource="anttasks.properties" classpathref="android.antlibs" />
+
+    <!-- Emma configuration -->
+    <property name="emma.dir" value="${sdk.dir}/tools/lib" />
+    <path id="emma.lib">
+        <pathelement location="${emma.dir}/emma.jar" />
+        <pathelement location="${emma.dir}/emma_ant.jar" />
+    </path>
+    <taskdef resource="emma_ant.properties" classpathref="emma.lib" />
+    <!-- End of emma configuration -->
+
+
+    <!-- ******************************************************* -->
+    <!-- ******************* Other Properties ****************** -->
+    <!-- ******************************************************* -->
+    <!-- overriding these properties may break the build
+         unless the whole file is updated -->
+
+    <!-- Input directories -->
+    <property name="source.dir" value="src" />
+    <property name="source.absolute.dir" location="${source.dir}" />
+    <property name="gen.absolute.dir" location="gen" />
+    <property name="resource.absolute.dir" location="res" />
+    <property name="asset.dir" value="assets" />
+    <property name="asset.absolute.dir" location="${asset.dir}" />
+    <property name="jar.libs.dir" value="libs" />
+    <property name="jar.libs.absolute.dir" location="${jar.libs.dir}" />
+    <property name="native.libs.absolute.dir" location="libs" />
+
+    <property name="manifest.file" value="AndroidManifest.xml" />
+    <property name="manifest.abs.file" location="${manifest.file}" />
+
+    <!-- Output directories -->
+    <property name="out.dir" value="bin" />
+    <property name="out.absolute.dir" location="${out.dir}" />
+    <property name="out.classes.absolute.dir" location="${out.dir}/classes" />
+    <property name="out.res.absolute.dir" location="${out.dir}/res" />
+    <property name="out.aidl.absolute.dir" location="${out.dir}/aidl" />
+    <property name="out.manifest.abs.file" location="${out.dir}/AndroidManifest.xml" />
+
+    <!-- tools location -->
+    <property name="android.tools.dir" location="${sdk.dir}/tools" />
+    <property name="android.platform.tools.dir" location="${sdk.dir}/platform-tools" />
+    <condition property="exe" value=".exe" else=""><os family="windows" /></condition>
+    <condition property="bat" value=".bat" else=""><os family="windows" /></condition>
+    <property name="adb" location="${android.platform.tools.dir}/adb${exe}" />
+    <property name="zipalign" location="${android.tools.dir}/zipalign${exe}" />
+    <property name="aidl" location="${android.platform.tools.dir}/aidl${exe}" />
+    <property name="aapt" location="${android.platform.tools.dir}/aapt${exe}" />
+    <property name="dx" location="${android.platform.tools.dir}/dx${bat}" />
+    <property name="renderscript" location="${android.platform.tools.dir}/llvm-rs-cc${exe}"/>
+
+    <!-- Renderscript include Path -->
+    <path id="android.renderscript.include.path">
+        <pathelement location="${android.platform.tools.dir}/renderscript/include" />
+        <pathelement location="${android.platform.tools.dir}/renderscript/clang-include" />
+    </path>
+
+    <!-- Intermediate files -->
+    <property name="dex.file.name" value="classes.dex" />
+    <property name="intermediate.dex.file" location="${out.absolute.dir}/${dex.file.name}" />
+    <property name="resource.package.file.name" value="${ant.project.name}.ap_" />
+
+    <!-- Build property file -->
+    <property name="out.build.prop.file" location="${out.absolute.dir}/build.prop" />
+
+
+    <!-- This is needed by emma as it uses multilevel verbosity instead of simple 'true' or 'false'
+         The property 'verbosity' is not user configurable and depends exclusively on 'verbose'
+         value.-->
+    <condition property="verbosity" value="verbose" else="quiet">
+        <istrue value="${verbose}" />
+    </condition>
+
+    <!-- properties for signing in release mode -->
+    <condition property="has.keystore">
+        <and>
+            <isset property="key.store" />
+            <length string="${key.store}" when="greater" length="0" />
+            <isset property="key.alias" />
+        </and>
+    </condition>
+    <condition property="has.password">
+        <and>
+            <isset property="has.keystore" />
+            <isset property="key.store.password" />
+            <isset property="key.alias.password" />
+        </and>
+    </condition>
+
+    <!-- properties for packaging -->
+    <property name="build.packaging.nocrunch" value="true" />
+
+    <!-- whether we need to fork javac.
+         This is only needed on Windows when running Java < 7 -->
+    <condition else="false" property="need.javac.fork">
+        <and>
+            <matches pattern="1\.[56]" string="${java.specification.version}"/>
+            <not>
+                <os family="unix"/>
+            </not>
+        </and>
+    </condition>
+
+    <!-- ******************************************************* -->
+    <!-- ************************ Macros *********************** -->
+    <!-- ******************************************************* -->
+
+    <!-- macro to do a task on if project.is.library is false.
+         elseText attribute is displayed otherwise -->
+    <macrodef name="do-only-if-not-library">
+        <attribute name="elseText" />
+        <element name="task-to-do" implicit="yes" />
+        <sequential>
+        <if condition="${project.is.library}">
+            <else>
+                <task-to-do />
+            </else>
+            <then>
+                <echo level="info">@{elseText}</echo>
+            </then>
+        </if>
+        </sequential>
+    </macrodef>
+
+    <!-- macro to do a task on if manifest.hasCode is true.
+         elseText attribute is displayed otherwise -->
+    <macrodef name="do-only-if-manifest-hasCode">
+        <attribute name="elseText" default=""/>
+        <element name="task-to-do" implicit="yes" />
+        <sequential>
+        <if condition="${manifest.hasCode}">
+            <then>
+                <task-to-do />
+            </then>
+            <else>
+                <if>
+                    <condition>
+                        <length string="@{elseText}" trim="true" when="greater" length="0" />
+                    </condition>
+                    <then>
+                        <echo level="info">@{elseText}</echo>
+                    </then>
+                </if>
+            </else>
+        </if>
+        </sequential>
+    </macrodef>
+
+
+    <!-- Configurable macro, which allows to pass as parameters output directory,
+         output dex filename and external libraries to dex (optional) -->
+    <macrodef name="dex-helper">
+        <element name="external-libs" optional="yes" />
+        <attribute name="nolocals" default="false" />
+        <sequential>
+            <!-- sets the primary input for dex. If a pre-dex task sets it to
+                 something else this has no effect -->
+            <property name="out.dex.input.absolute.dir" value="${out.classes.absolute.dir}" />
+
+            <!-- set the secondary dx input: the project (and library) jar files
+                 If a pre-dex task sets it to something else this has no effect -->
+            <if>
+                <condition>
+                    <isreference refid="out.dex.jar.input.ref" />
+                </condition>
+                <else>
+                    <path id="out.dex.jar.input.ref">
+                        <path refid="project.all.jars.path" />
+                    </path>
+                </else>
+            </if>
+
+            <dex executable="${dx}"
+                    output="${intermediate.dex.file}"
+                    nolocals="@{nolocals}"
+                    verbose="${verbose}">
+                <path path="${out.dex.input.absolute.dir}"/>
+                <path refid="out.dex.jar.input.ref" />
+                <external-libs />
+            </dex>
+        </sequential>
+    </macrodef>
+
+    <!-- This is macro that enable passing variable list of external jar files to ApkBuilder
+         Example of use:
+         <package-helper>
+             <extra-jars>
+                <jarfolder path="my_jars" />
+                <jarfile path="foo/bar.jar" />
+                <jarfolder path="your_jars" />
+             </extra-jars>
+         </package-helper> -->
+    <macrodef name="package-helper">
+        <element name="extra-jars" optional="yes" />
+        <sequential>
+            <apkbuilder
+                    outfolder="${out.absolute.dir}"
+                    resourcefile="${resource.package.file.name}"
+                    apkfilepath="${out.packaged.file}"
+                    debugpackaging="${build.is.packaging.debug}"
+                    debugsigning="${build.is.signing.debug}"
+                    verbose="${verbose}"
+                    hascode="${manifest.hasCode}"
+                    previousBuildType="${build.last.is.packaging.debug}/${build.last.is.signing.debug}"
+                    buildType="${build.is.packaging.debug}/${build.is.signing.debug}">
+                <dex path="${intermediate.dex.file}"/>
+                <sourcefolder path="${source.absolute.dir}"/>
+                <jarfile refid="project.all.jars.path" />
+                <nativefolder path="${native.libs.absolute.dir}" />
+                <nativefolder refid="project.library.native.folder.path" />
+                <extra-jars/>
+            </apkbuilder>
+        </sequential>
+    </macrodef>
+
+    <!-- This is macro which zipaligns in.package and outputs it to out.package. Used by targets
+         debug, -debug-with-emma and release.-->
+    <macrodef name="zipalign-helper">
+        <attribute name="in.package" />
+        <attribute name="out.package" />
+        <sequential>
+            <zipalign
+                    executable="${zipalign}"
+                    input="@{in.package}"
+                    output="@{out.package}"
+                    verbose="${verbose}" />
+        </sequential>
+    </macrodef>
+
+    <macrodef name="run-tests-helper">
+        <attribute name="emma.enabled" default="false" />
+        <element name="extra-instrument-args" optional="yes" />
+        <sequential>
+            <echo level="info">Running tests ...</echo>
+            <exec executable="${adb}" failonerror="true">
+                <arg line="${adb.device.arg}" />
+                <arg value="shell" />
+                <arg value="am" />
+                <arg value="instrument" />
+                <arg value="-w" />
+                <arg value="-e" />
+                <arg value="coverage" />
+                <arg value="@{emma.enabled}" />
+                <extra-instrument-args />
+                <arg value="${project.app.package}/${test.runner}" />
+            </exec>
+        </sequential>
+    </macrodef>
+
+    <macrodef name="record-build-key">
+        <attribute name="key" default="false" />
+        <attribute name="value" default="false" />
+        <sequential>
+            <propertyfile file="${out.build.prop.file}" comment="Last build type">
+                <entry key="@{key}" value="@{value}"/>
+            </propertyfile>
+        </sequential>
+    </macrodef>
+
+    <macrodef name="record-build-info">
+        <sequential>
+            <record-build-key key="build.last.target" value="${build.target}" />
+            <record-build-key key="build.last.is.instrumented" value="${build.is.instrumented}" />
+            <record-build-key key="build.last.is.packaging.debug" value="${build.is.packaging.debug}" />
+            <record-build-key key="build.last.is.signing.debug" value="${build.is.signing.debug}" />
+        </sequential>
+    </macrodef>
+
+    <macrodef name="uninstall-helper">
+        <attribute name="app.package" default="false" />
+        <sequential>
+            <echo level="info">Uninstalling @{app.package} from the default emulator or device...</echo>
+            <exec executable="${adb}" failonerror="true">
+                <arg line="${adb.device.arg}" />
+                <arg value="uninstall" />
+                <arg value="@{app.package}" />
+            </exec>
+        </sequential>
+    </macrodef>
+
+    <!-- ******************************************************* -->
+    <!-- ******************** Build Targets ******************** -->
+    <!-- ******************************************************* -->
+
+    <!-- Basic Ant + SDK check -->
+    <target name="-check-env">
+        <checkenv />
+    </target>
+
+    <!-- target to disable building dependencies -->
+    <target name="nodeps">
+        <property name="dont.do.deps" value="true" />
+    </target>
+
+    <!-- generic setup -->
+    <target name="-setup" depends="-check-env">
+        <echo level="info">Project Name: ${ant.project.name}</echo>
+        <gettype projectTypeOut="project.type" />
+
+        <!-- sets a few boolean based on project.type
+             to make the if task easier -->
+        <condition property="project.is.library" value="true" else="false">
+            <equals arg1="${project.type}" arg2="library" />
+        </condition>
+        <condition property="project.is.test" value="true" else="false">
+            <equals arg1="${project.type}" arg2="test" />
+        </condition>
+        <condition property="project.is.testapp" value="true" else="false">
+            <equals arg1="${project.type}" arg2="test-app" />
+        </condition>
+
+        <!-- If a test project, resolve absolute path to tested project. -->
+        <if condition="${project.is.test}">
+            <then>
+                <property name="tested.project.absolute.dir" location="${tested.project.dir}" />
+            </then>
+        </if>
+
+        <!-- If the "debug" build type changed, clear out the compiled code.
+             This is to make sure the new BuildConfig.DEBUG value is picked up
+             as javac can't deal with this type of change in its dependency computation. -->
+        <if>
+            <condition>
+                <and>
+                    <length string="${build.last.is.packaging.debug}" trim="true" when="greater" length="0" />
+                    <not><equals
+                            arg1="${build.is.packaging.debug}"
+                            arg2="${build.last.is.packaging.debug}" /></not>
+                </and>
+            </condition>
+            <then>
+                <echo level="info">Switching between debug and non debug build: Deleting previous compilation output...</echo>
+                <delete dir="${out.classes.absolute.dir}" verbose="${verbose}" />
+            </then>
+            <else>
+                <!-- Else, we may still need to clean the code, for another reason.
+                     special case for instrumented: if the previous build was
+                     instrumented but not this one, clear out the compiled code -->
+                <if>
+                    <condition>
+                        <and>
+                            <istrue value="${build.last.is.instrumented}" />
+                            <isfalse value="${build.is.instrumented}" />
+                        </and>
+                    </condition>
+                    <then>
+                        <echo level="info">Switching from instrumented to non-instrumented build: Deleting previous compilation output...</echo>
+                        <delete dir="${out.classes.absolute.dir}" verbose="${verbose}" />
+                    </then>
+                </if>
+            </else>
+        </if>
+
+
+        <!-- get the project manifest package -->
+        <xpath input="${manifest.abs.file}"
+                expression="/manifest/@package" output="project.app.package" />
+
+    </target>
+
+    <!-- empty default pre-clean target. Create a similar target in
+         your build.xml and it'll be called instead of this one. -->
+    <target name="-pre-clean"/>
+
+    <!-- clean target -->
+    <target name="clean" depends="-setup, -pre-clean"
+            description="Removes output files created by other targets.">
+        <delete dir="${out.absolute.dir}" verbose="${verbose}" />
+        <delete dir="${gen.absolute.dir}" verbose="${verbose}" />
+
+        <!-- if we know about a tested project or libraries, we clean them too. -->
+        <if condition="${project.is.test}">
+            <then>
+                <property name="tested.project.absolute.dir" location="${tested.project.dir}" />
+                <subant failonerror="true">
+                    <fileset dir="${tested.project.absolute.dir}" includes="build.xml" />
+                    <target name="clean" />
+                </subant>
+            </then>
+        </if>
+
+        <!-- get all the libraries -->
+        <if>
+            <condition><not><isset property="dont.do.deps" /></not></condition>
+            <then>
+                <getlibpath libraryFolderPathOut="project.library.folder.path" />
+                <if>
+                    <condition>
+                        <isreference refid="project.library.folder.path" />
+                    </condition>
+                    <then>
+                        <!-- clean the libraries with nodeps since we already
+                             know about all the libraries even the indirect one -->
+                        <subant
+                                buildpathref="project.library.folder.path"
+                                antfile="build.xml"
+                                failonerror="true">
+                            <target name="nodeps" />
+                            <target name="clean" />
+                        </subant>
+                    </then>
+                </if>
+            </then>
+        </if>
+    </target>
+
+    <!-- Pre build setup -->
+    <target name="-build-setup" depends="-setup">
+
+        <!-- read the previous build mode -->
+        <property file="${out.build.prop.file}" />
+        <!-- if empty the props won't be set, meaning it's a new build.
+             To force a build, set the prop to empty values. -->
+        <property name="build.last.target" value="" />
+        <property name="build.last.is.instrumented" value="" />
+        <property name="build.last.is.packaging.debug" value="" />
+        <property name="build.last.is.signing.debug" value="" />
+
+        <echo level="info">Resolving Build Target for ${ant.project.name}...</echo>
+        <!-- load project properties, resolve Android target, library dependencies
+             and set some properties with the results.
+             All property names are passed as parameters ending in -Out -->
+        <gettarget
+                androidJarFileOut="project.target.android.jar"
+                androidAidlFileOut="project.target.framework.aidl"
+                bootClassPathOut="project.target.class.path"
+                targetApiOut="project.target.apilevel"
+                minSdkVersionOut="project.minSdkVersion" />
+
+        <!-- Value of the hasCode attribute (Application node) extracted from manifest file -->
+        <xpath input="${manifest.abs.file}" expression="/manifest/application/@android:hasCode"
+                    output="manifest.hasCode" default="true"/>
+
+        <echo level="info">----------</echo>
+        <echo level="info">Creating output directories if needed...</echo>
+        <mkdir dir="${resource.absolute.dir}" />
+        <mkdir dir="${jar.libs.absolute.dir}" />
+        <mkdir dir="${out.absolute.dir}" />
+        <mkdir dir="${out.res.absolute.dir}" />
+        <do-only-if-manifest-hasCode>
+            <mkdir dir="${gen.absolute.dir}" />
+            <mkdir dir="${out.classes.absolute.dir}" />
+        </do-only-if-manifest-hasCode>
+
+        <echo level="info">----------</echo>
+        <echo level="info">Resolving Dependencies for ${ant.project.name}...</echo>
+        <dependency
+                libraryFolderPathOut="project.library.folder.path"
+                libraryPackagesOut="project.library.packages"
+                libraryManifestFilePathOut="project.library.manifest.file.path"
+                libraryResFolderPathOut="project.library.res.folder.path"
+                libraryBinAidlFolderPathOut="project.library.bin.aidl.folder.path"
+                libraryNativeFolderPathOut="project.library.native.folder.path"
+                jarLibraryPathOut="project.all.jars.path"
+                targetApi="${project.target.apilevel}"
+                verbose="${verbose}" />
+
+        <!-- compile the libraries if any -->
+        <if>
+            <condition>
+                <and>
+                    <isreference refid="project.library.folder.path" />
+                    <not><isset property="dont.do.deps" /></not>
+                </and>
+            </condition>
+            <then>
+                <!-- figure out which target must be used to build the library projects.
+                     If emma is enabled, then use 'instrument' otherwise, use 'debug' -->
+                <condition property="project.libraries.target" value="instrument" else="${build.target}">
+                    <istrue value="${build.is.instrumented}" />
+                </condition>
+
+                <echo level="info">----------</echo>
+                <echo level="info">Building Libraries with '${project.libraries.target}'...</echo>
+
+                <!-- no need to build the deps as we have already
+                     the full list of libraries -->
+                <subant failonerror="true"
+                        buildpathref="project.library.folder.path"
+                        antfile="build.xml">
+                    <target name="nodeps" />
+                    <target name="${project.libraries.target}" />
+                    <property name="emma.coverage.absolute.file" location="${out.absolute.dir}/coverage.em" />
+                </subant>
+            </then>
+        </if>
+
+        <!-- compile the main project if this is a test project -->
+        <if condition="${project.is.test}">
+            <then>
+                <!-- figure out which target must be used to build the tested project.
+                     If emma is enabled, then use 'instrument' otherwise, use 'debug' -->
+                <condition property="tested.project.target" value="instrument" else="debug">
+                    <isset property="emma.enabled" />
+                </condition>
+
+                <echo level="info">----------</echo>
+                <echo level="info">Building tested project at ${tested.project.absolute.dir} with '${tested.project.target}'...</echo>
+                <subant target="${tested.project.target}" failonerror="true">
+                    <fileset dir="${tested.project.absolute.dir}" includes="build.xml" />
+                </subant>
+
+                <!-- get the tested project full classpath to be able to build
+                     the test project -->
+                <testedprojectclasspath
+                        projectLocation="${tested.project.absolute.dir}"
+                        projectClassPathOut="tested.project.classpath"/>
+            </then>
+            <else>
+                <!-- no tested project, make an empty Path object so that javac doesn't
+                     complain -->
+                <path id="tested.project.classpath" />
+            </else>
+        </if>
+    </target>
+
+    <!-- empty default pre-build target. Create a similar target in
+         your build.xml and it'll be called instead of this one. -->
+    <target name="-pre-build"/>
+
+    <!-- Code Generation: compile resources (aapt -> R.java), aidl, renderscript -->
+    <target name="-code-gen">
+        <!-- always merge manifest -->
+        <mergemanifest
+                appManifest="${manifest.abs.file}"
+                outManifest="${out.manifest.abs.file}"
+                enabled="${manifestmerger.enabled}">
+            <library refid="project.library.manifest.file.path" />
+        </mergemanifest>
+
+        <do-only-if-manifest-hasCode
+                elseText="hasCode = false. Skipping aidl/renderscript/R.java">
+            <echo level="info">Handling aidl files...</echo>
+            <aidl executable="${aidl}"
+                    framework="${project.target.framework.aidl}"
+                    libraryBinAidlFolderPathRefid="project.library.bin.aidl.folder.path"
+                    genFolder="${gen.absolute.dir}"
+                    aidlOutFolder="${out.aidl.absolute.dir}">
+                <source path="${source.absolute.dir}"/>
+            </aidl>
+
+            <!-- renderscript generates resources so it must be called before aapt -->
+            <echo level="info">----------</echo>
+            <echo level="info">Handling RenderScript files...</echo>
+            <renderscript executable="${renderscript}"
+                    includePath="${android.renderscript.include.path}"
+                    genFolder="${gen.absolute.dir}"
+                    resFolder="${out.res.absolute.dir}/raw"
+                    targetApi="${project.minSdkVersion}"
+                    optLevel="${renderscript.opt.level}"
+                    buildType="${build.is.packaging.debug}"
+                    previousBuildType="${build.last.is.packaging.debug}">
+                <source path="${source.absolute.dir}"/>
+            </renderscript>
+
+            <echo level="info">----------</echo>
+            <echo level="info">Handling Resources...</echo>
+            <aapt executable="${aapt}"
+                    command="package"
+                    verbose="${verbose}"
+                    manifest="${out.manifest.abs.file}"
+                    androidjar="${project.target.android.jar}"
+                    rfolder="${gen.absolute.dir}"
+                    nonConstantId="${android.library}"
+                    libraryResFolderPathRefid="project.library.res.folder.path"
+                    libraryPackagesRefid="project.library.packages"
+                    ignoreAssets="${aapt.ignore.assets}"
+                    proguardFile="${out.absolute.dir}/proguard.txt">
+                <res path="${out.res.absolute.dir}" />
+                <res path="${resource.absolute.dir}" />
+            </aapt>
+
+            <echo level="info">----------</echo>
+            <echo level="info">Handling BuildConfig class...</echo>
+            <buildconfig
+                    genFolder="${gen.absolute.dir}"
+                    package="${project.app.package}"
+                    buildType="${build.is.packaging.debug}"
+                    previousBuildType="${build.last.is.packaging.debug}"/>
+
+        </do-only-if-manifest-hasCode>
+    </target>
+
+    <!-- empty default pre-compile target. Create a similar target in
+         your build.xml and it'll be called instead of this one. -->
+    <target name="-pre-compile"/>
+
+    <!-- Compiles this project's .java files into .class files. -->
+    <target name="-compile" depends="-build-setup, -pre-build, -code-gen, -pre-compile">
+        <do-only-if-manifest-hasCode elseText="hasCode = false. Skipping...">
+            <!-- merge the project's own classpath and the tested project's classpath -->
+            <path id="project.javac.classpath">
+                <path refid="project.all.jars.path" />
+                <path refid="tested.project.classpath" />
+            </path>
+            <javac encoding="${java.encoding}"
+                    source="${java.source}" target="${java.target}"
+                    debug="true" extdirs="" includeantruntime="false"
+                    destdir="${out.classes.absolute.dir}"
+                    bootclasspathref="project.target.class.path"
+                    verbose="${verbose}"
+                    classpathref="project.javac.classpath"
+                    fork="${need.javac.fork}">
+                <src path="${source.absolute.dir}:../../../../modules/video_capture/main/source/android/java:../../../../modules/video_render/main/source/android/java" />
+                <src path="${gen.absolute.dir}" />
+                <compilerarg line="${java.compilerargs}" />
+            </javac>
+
+            <!-- if the project is instrumented, intrument the classes -->
+            <if condition="${build.is.instrumented}">
+                <then>
+                    <echo level="info">Instrumenting classes from ${out.absolute.dir}/classes...</echo>
+
+                    <!-- build the filter to remove R, Manifest, BuildConfig -->
+                    <getemmafilter
+                            appPackage="${project.app.package}"
+                            libraryPackagesRefId="project.library.packages"
+                            filterOut="emma.default.filter"/>
+
+                    <!-- define where the .em file is going. This may have been
+                         setup already if this is a library -->
+                    <property name="emma.coverage.absolute.file" location="${out.absolute.dir}/coverage.em" />
+
+                    <!-- It only instruments class files, not any external libs -->
+                    <emma enabled="true">
+                        <instr verbosity="${verbosity}"
+                               mode="overwrite"
+                               instrpath="${out.absolute.dir}/classes"
+                               outdir="${out.absolute.dir}/classes"
+                               metadatafile="${emma.coverage.absolute.file}">
+                            <filter excludes="${emma.default.filter}" />
+                            <filter value="${emma.filter}" />
+                        </instr>
+                    </emma>
+                </then>
+            </if>
+
+            <!-- if the project is a library then we generate a jar file -->
+            <if condition="${project.is.library}">
+                <then>
+                    <echo level="info">Creating library output jar file...</echo>
+                    <property name="out.library.jar.file" location="${out.absolute.dir}/classes.jar" />
+                    <if>
+                        <condition>
+                            <length string="${android.package.excludes}" trim="true" when="greater" length="0" />
+                        </condition>
+                        <then>
+                            <echo level="info">Custom jar packaging exclusion: ${android.package.excludes}</echo>
+                        </then>
+                    </if>
+
+                    <propertybyreplace name="project.app.package.path" input="${project.app.package}" replace="." with="/" />
+
+                    <jar destfile="${out.library.jar.file}">
+                        <fileset dir="${out.classes.absolute.dir}"
+                                includes="**/*.class"
+                                excludes="${project.app.package.path}/R.class ${project.app.package.path}/R$*.class ${project.app.package.path}/Manifest.class ${project.app.package.path}/Manifest$*.class ${project.app.package.path}/BuildConfig.class"/>
+                        <fileset dir="${source.absolute.dir}" excludes="**/*.java ${android.package.excludes}" />
+                    </jar>
+                </then>
+            </if>
+
+        </do-only-if-manifest-hasCode>
+    </target>
+
+    <!-- empty default post-compile target. Create a similar target in
+         your build.xml and it'll be called instead of this one. -->
+    <target name="-post-compile"/>
+
+    <!-- Obfuscate target
+        This is only active in release builds when proguard.config is defined
+        in default.properties.
+
+        To replace Proguard with a different obfuscation engine:
+        Override the following targets in your build.xml, before the call to <setup>
+            -release-obfuscation-check
+                Check whether obfuscation should happen, and put the result in a property.
+            -debug-obfuscation-check
+                Obfuscation should not happen. Set the same property to false.
+            -obfuscate
+                check if the property set in -debug/release-obfuscation-check is set to true.
+                If true:
+                    Perform obfuscation
+                    Set property out.dex.input.absolute.dir to be the output of the obfuscation
+    -->
+    <target name="-obfuscate">
+        <if condition="${proguard.enabled}">
+            <then>
+                <property name="obfuscate.absolute.dir" location="${out.absolute.dir}/proguard" />
+                <property name="preobfuscate.jar.file" value="${obfuscate.absolute.dir}/original.jar" />
+                <property name="obfuscated.jar.file" value="${obfuscate.absolute.dir}/obfuscated.jar" />
+                <!-- input for dex will be proguard's output -->
+                <property name="out.dex.input.absolute.dir" value="${obfuscated.jar.file}" />
+
+                <!-- Add Proguard Tasks -->
+                <property name="proguard.jar" location="${android.tools.dir}/proguard/lib/proguard.jar" />
+                <taskdef name="proguard" classname="proguard.ant.ProGuardTask" classpath="${proguard.jar}" />
+
+                <!-- Set the android classpath Path object into a single property. It'll be
+                     all the jar files separated by a platform path-separator.
+                     Each path must be quoted if it contains spaces.
+                -->
+                <pathconvert property="project.target.classpath.value" refid="project.target.class.path">
+                    <firstmatchmapper>
+                        <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+                        <identitymapper/>
+                    </firstmatchmapper>
+                </pathconvert>
+
+                <!-- Build a path object with all the jar files that must be obfuscated.
+                     This include the project compiled source code and any 3rd party jar
+                     files. -->
+                <path id="project.all.classes.path">
+                    <pathelement location="${preobfuscate.jar.file}" />
+                    <path refid="project.all.jars.path" />
+                </path>
+                <!-- Set the project jar files Path object into a single property. It'll be
+                     all the jar files separated by a platform path-separator.
+                     Each path must be quoted if it contains spaces.
+                -->
+                <pathconvert property="project.all.classes.value" refid="project.all.classes.path">
+                    <firstmatchmapper>
+                        <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+                        <identitymapper/>
+                    </firstmatchmapper>
+                </pathconvert>
+
+                <!-- Turn the path property ${proguard.config} from an A:B:C property
+                     into a series of includes: -include A -include B -include C
+                     suitable for processing by the ProGuard task. Note - this does
+                     not include the leading '-include "' or the closing '"'; those
+                     are added under the <proguard> call below.
+                -->
+                <path id="proguard.configpath">
+                    <pathelement path="${proguard.config}"/>
+                </path>
+                <pathconvert pathsep='" -include "' property="proguard.configcmd" refid="proguard.configpath"/>
+
+                <mkdir   dir="${obfuscate.absolute.dir}" />
+                <delete file="${preobfuscate.jar.file}"/>
+                <delete file="${obfuscated.jar.file}"/>
+                <jar basedir="${out.classes.absolute.dir}"
+                    destfile="${preobfuscate.jar.file}" />
+                <proguard>
+                    -include      "${proguard.configcmd}"
+                    -include      "${out.absolute.dir}/proguard.txt"
+                    -injars       ${project.all.classes.value}
+                    -outjars      "${obfuscated.jar.file}"
+                    -libraryjars  ${project.target.classpath.value}
+                    -dump         "${obfuscate.absolute.dir}/dump.txt"
+                    -printseeds   "${obfuscate.absolute.dir}/seeds.txt"
+                    -printusage   "${obfuscate.absolute.dir}/usage.txt"
+                    -printmapping "${obfuscate.absolute.dir}/mapping.txt"
+                </proguard>
+            </then>
+        </if>
+    </target>
+
+    <!-- Converts this project's .class files into .dex files -->
+    <target name="-dex" depends="-compile, -post-compile, -obfuscate">
+        <do-only-if-manifest-hasCode elseText="hasCode = false. Skipping...">
+            <!-- only convert to dalvik bytecode is *not* a library -->
+            <do-only-if-not-library elseText="Library project: do not convert bytecode..." >
+                <!-- special case for instrumented builds: need to use no-locals and need
+                     to pass in the emma jar. -->
+                <if condition="${build.is.instrumented}">
+                    <then>
+                        <dex-helper nolocals="true">
+                            <external-libs>
+                                <fileset file="${emma.dir}/emma_device.jar" />
+                            </external-libs>
+                        </dex-helper>
+                    </then>
+                    <else>
+                        <dex-helper />
+                    </else>
+                </if>
+            </do-only-if-not-library>
+        </do-only-if-manifest-hasCode>
+    </target>
+
+<!-- Updates the pre-processed PNG cache -->
+    <target name="-crunch">
+        <exec executable="${aapt}" taskName="crunch">
+            <arg value="crunch" />
+            <arg value="-v" />
+            <arg value="-S" />
+            <arg path="${resource.absolute.dir}" />
+            <arg value="-C" />
+            <arg path="${out.res.absolute.dir}" />
+        </exec>
+    </target>
+
+    <!-- Puts the project's resources into the output package file
+         This actually can create multiple resource package in case
+         Some custom apk with specific configuration have been
+         declared in default.properties.
+         -->
+    <target name="-package-resources" depends="-crunch">
+        <!-- only package resources if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: do not package resources..." >
+            <aapt executable="${aapt}"
+                    command="package"
+                    versioncode="${version.code}"
+                    versionname="${version.name}"
+                    debug="${build.is.packaging.debug}"
+                    manifest="${out.manifest.abs.file}"
+                    assets="${asset.absolute.dir}"
+                    androidjar="${project.target.android.jar}"
+                    apkfolder="${out.absolute.dir}"
+                    nocrunch="${build.packaging.nocrunch}"
+                    resourcefilename="${resource.package.file.name}"
+                    resourcefilter="${aapt.resource.filter}"
+                    libraryResFolderPathRefid="project.library.res.folder.path"
+                    libraryPackagesRefid="project.library.packages"
+                    previousBuildType="${build.last.target}"
+                    buildType="${build.target}"
+                    ignoreAssets="${aapt.ignore.assets}">
+                <res path="${out.res.absolute.dir}" />
+                <res path="${resource.absolute.dir}" />
+                <!-- <nocompress /> forces no compression on any files in assets or res/raw -->
+                <!-- <nocompress extension="xml" /> forces no compression on specific file extensions in assets and res/raw -->
+            </aapt>
+        </do-only-if-not-library>
+    </target>
+
+    <!-- Packages the application. -->
+    <target name="-package" depends="-dex, -package-resources">
+        <!-- only package apk if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: do not package apk..." >
+            <if condition="${build.is.instrumented}">
+                <then>
+                    <package-helper>
+                        <extra-jars>
+                            <!-- Injected from external file -->
+                            <jarfile path="${emma.dir}/emma_device.jar" />
+                        </extra-jars>
+                    </package-helper>
+                </then>
+                <else>
+                    <package-helper />
+                </else>
+            </if>
+        </do-only-if-not-library>
+    </target>
+
+    <target name="-post-package" />
+    <target name="-post-build" />
+
+    <target name="-set-mode-check">
+        <fail if="build.is.mode.set"
+                message="Cannot run two different modes at the same time. If you are running more than one debug/release/instrument type targets, call them from different Ant calls." />
+    </target>
+
+    <!-- ******************************************************* -->
+    <!-- **************** Debug specific targets *************** -->
+    <!-- ******************************************************* -->
+
+    <target name="-set-debug-files" depends="-set-mode-check">
+
+        <property name="out.packaged.file" location="${out.absolute.dir}/${ant.project.name}-debug-unaligned.apk" />
+        <property name="out.final.file" location="${out.absolute.dir}/${ant.project.name}-debug.apk" />
+        <property name="build.is.mode.set" value="true" />
+    </target>
+
+
+    <target name="-set-debug-mode" depends="-setup">
+        <!-- record the current build target -->
+        <property name="build.target" value="debug" />
+
+        <if>
+            <condition>
+                <and>
+                    <istrue value="${project.is.testapp}" />
+                    <istrue value="${emma.enabled}" />
+                </and>
+            </condition>
+            <then>
+                <property name="build.is.instrumented" value="true" />
+            </then>
+            <else>
+                <property name="build.is.instrumented" value="false" />
+            </else>
+        </if>
+
+        <!-- whether the build is a debug build. always set. -->
+        <property name="build.is.packaging.debug" value="true" />
+
+        <!-- signing mode: debug -->
+        <property name="build.is.signing.debug" value="true" />
+
+        <!-- Renderscript optimization level: none -->
+        <property name="renderscript.opt.level" value="${renderscript.debug.opt.level}" />
+
+    </target>
+
+    <target name="-debug-obfuscation-check">
+        <!-- proguard is never enabled in debug mode -->
+        <property name="proguard.enabled" value="false"/>
+    </target>
+
+    <!-- Builds debug output package -->
+    <target name="-do-debug" depends="-set-debug-mode, -debug-obfuscation-check, -package, -post-package">
+        <!-- only create apk if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: do not create apk..." >
+            <sequential>
+                <zipalign-helper in.package="${out.packaged.file}" out.package="${out.final.file}" />
+                <echo level="info">Debug Package: ${out.final.file}</echo>
+            </sequential>
+        </do-only-if-not-library>
+        <record-build-info />
+    </target>
+
+    <!-- Builds debug output package -->
+    <target name="debug" depends="-set-debug-files, -do-debug, -post-build"
+                description="Builds the application and signs it with a debug key.">
+    </target>
+
+
+    <!-- ******************************************************* -->
+    <!-- *************** Release specific targets ************** -->
+    <!-- ******************************************************* -->
+
+    <!-- called through target 'release'. Only executed if the keystore and
+         key alias are known but not their password. -->
+    <target name="-release-prompt-for-password" if="has.keystore" unless="has.password">
+        <!-- Gets passwords -->
+        <input
+                message="Please enter keystore password (store:${key.store}):"
+                addproperty="key.store.password" />
+        <input
+                message="Please enter password for alias '${key.alias}':"
+                addproperty="key.alias.password" />
+    </target>
+
+    <!-- called through target 'release'. Only executed if there's no
+         keystore/key alias set -->
+    <target name="-release-nosign" unless="has.keystore">
+        <!-- no release builds for library project -->
+        <do-only-if-not-library elseText="" >
+            <sequential>
+                <echo level="info">No key.store and key.alias properties found in build.properties.</echo>
+                <echo level="info">Please sign ${out.packaged.file} manually</echo>
+                <echo level="info">and run zipalign from the Android SDK tools.</echo>
+            </sequential>
+        </do-only-if-not-library>
+        <record-build-info />
+    </target>
+
+    <target name="-release-obfuscation-check">
+        <echo level="info">proguard.config is ${proguard.config}</echo>
+        <condition property="proguard.enabled" value="true" else="false">
+            <and>
+                <isset property="build.is.mode.release" />
+                <isset property="proguard.config" />
+            </and>
+        </condition>
+        <if condition="${proguard.enabled}">
+            <then>
+                <echo level="info">Proguard.config is enabled</echo>
+                <!-- Secondary dx input (jar files) is empty since all the
+                     jar files will be in the obfuscated jar -->
+                <path id="out.dex.jar.input.ref" />
+            </then>
+        </if>
+    </target>
+
+    <target name="-set-release-mode" depends="-set-mode-check">
+        <property name="out.packaged.file" location="${out.absolute.dir}/${ant.project.name}-release-unsigned.apk" />
+        <property name="out.final.file" location="${out.absolute.dir}/${ant.project.name}-release.apk" />
+        <property name="build.is.mode.set" value="true" />
+
+        <!-- record the current build target -->
+        <property name="build.target" value="release" />
+
+        <property name="build.is.instrumented" value="false" />
+
+        <!-- release mode is only valid if the manifest does not explicitly
+             set debuggable to true. default is false. -->
+        <xpath input="${manifest.abs.file}" expression="/manifest/application/@android:debuggable"
+                output="build.is.packaging.debug" default="false"/>
+
+        <!-- signing mode: release -->
+        <property name="build.is.signing.debug" value="false" />
+
+        <!-- Renderscript optimization level: aggressive -->
+        <property name="renderscript.opt.level" value="${renderscript.release.opt.level}" />
+
+        <if condition="${build.is.packaging.debug}">
+            <then>
+                <echo>*************************************************</echo>
+                <echo>****  Android Manifest has debuggable=true   ****</echo>
+                <echo>**** Doing DEBUG packaging with RELEASE keys ****</echo>
+                <echo>*************************************************</echo>
+            </then>
+            <else>
+                <!-- property only set in release mode.
+                     Useful for if/unless attributes in target node
+                     when using Ant before 1.8 -->
+                <property name="build.is.mode.release" value="true"/>
+            </else>
+        </if>
+    </target>
+
+    <target name="-release-sign" if="has.keystore" >
+        <!-- only create apk if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: do not create apk..." >
+            <sequential>
+                <property name="out.unaligned.file" location="${out.absolute.dir}/${ant.project.name}-release-unaligned.apk" />
+
+                <!-- Signs the APK -->
+                <echo level="info">Signing final apk...</echo>
+                <signapk
+                        input="${out.packaged.file}"
+                        output="${out.unaligned.file}"
+                        keystore="${key.store}"
+                        storepass="${key.store.password}"
+                        alias="${key.alias}"
+                        keypass="${key.alias.password}"/>
+
+                <!-- Zip aligns the APK -->
+                <zipalign-helper
+                        in.package="${out.unaligned.file}"
+                        out.package="${out.final.file}" />
+                <echo level="info">Release Package: ${out.final.file}</echo>
+            </sequential>
+        </do-only-if-not-library>
+        <record-build-info />
+    </target>
+
+    <!-- This runs -package-release and -release-nosign first and then runs
+         only if release-sign is true (set in -release-check,
+         called by -release-no-sign)-->
+    <target name="release"
+                depends="-set-release-mode, -release-obfuscation-check, -package, -post-package, -release-prompt-for-password, -release-nosign, -release-sign, -post-build"
+                description="Builds the application in release mode.">
+    </target>
+
+    <!-- ******************************************************* -->
+    <!-- ************ Instrumented specific targets ************ -->
+    <!-- ******************************************************* -->
+
+    <!-- These targets are specific for the project under test when it
+         gets compiled by the test projects in a way that will make it
+         support emma code coverage -->
+
+    <target name="-set-instrumented-mode" depends="-set-mode-check">
+        <property name="out.packaged.file" location="${out.absolute.dir}/${ant.project.name}-instrumented-unaligned.apk" />
+        <property name="out.final.file" location="${out.absolute.dir}/${ant.project.name}-instrumented.apk" />
+        <property name="build.is.mode.set" value="true" />
+
+        <!-- whether the build is an instrumented build. -->
+        <property name="build.is.instrumented" value="true" />
+    </target>
+
+    <!-- Builds instrumented output package -->
+    <target name="instrument" depends="-set-instrumented-mode, -do-debug"
+                description="Builds an instrumented packaged.">
+        <!-- only create apk if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: do not create apk..." >
+            <sequential>
+                <zipalign-helper in.package="${out.packaged.file}" out.package="${out.final.file}" />
+                <echo level="info">Instrumented Package: ${out.final.file}</echo>
+            </sequential>
+        </do-only-if-not-library>
+        <record-build-info />
+    </target>
+
+    <!-- ******************************************************* -->
+    <!-- ************ Test project specific targets ************ -->
+    <!-- ******************************************************* -->
+
+    <!-- enable code coverage -->
+    <target name="emma">
+        <property name="emma.enabled" value="true" />
+    </target>
+
+    <!-- fails if the project is not a test project -->
+    <target name="-test-project-check" depends="-setup">
+        <if>
+            <condition>
+                <and>
+                    <isfalse value="${project.is.test}" />
+                    <isfalse value="${project.is.testapp}" />
+                </and>
+            </condition>
+            <then>
+                <fail message="Project is not a test project." />
+            </then>
+        </if>
+    </target>
+
+    <target name="test" depends="-test-project-check"
+                description="Runs tests from the package defined in test.package property">
+        <property name="test.runner" value="android.test.InstrumentationTestRunner" />
+
+        <if condition="${project.is.test}">
+        <then>
+            <property name="tested.project.absolute.dir" location="${tested.project.dir}" />
+
+            <!-- Application package of the tested project extracted from its manifest file -->
+            <xpath input="${tested.project.absolute.dir}/AndroidManifest.xml"
+                    expression="/manifest/@package" output="tested.project.app.package" />
+
+            <if condition="${emma.enabled}">
+                <then>
+                    <getprojectpaths projectPath="${tested.project.absolute.dir}"
+                            binOut="tested.project.out.absolute.dir"
+                            srcOut="tested.project.source.absolute.dir" />
+
+                    <getlibpath projectPath="${tested.project.absolute.dir}"
+                            libraryFolderPathOut="tested.project.lib.source.path"
+                            leaf="@{source.dir}" />
+
+                </then>
+            </if>
+
+        </then>
+        <else>
+            <!-- this is a test app, the tested package is the app's own package -->
+            <property name="tested.project.app.package" value="${project.app.package}" />
+
+            <if condition="${emma.enabled}">
+                <then>
+                    <property name="tested.project.out.absolute.dir" value="${out.absolute.dir}" />
+                    <property name="tested.project.source.absolute.dir" value="${source.absolute.dir}" />
+
+                    <getlibpath
+                            libraryFolderPathOut="tested.project.lib.source.path"
+                            leaf="@{source.dir}" />
+
+                </then>
+            </if>
+
+        </else>
+        </if>
+
+        <property name="emma.dump.file"
+                value="/data/data/${tested.project.app.package}/coverage.ec" />
+
+        <if condition="${emma.enabled}">
+            <then>
+                <echo>WARNING: Code Coverage is currently only supported on the emulator and rooted devices.</echo>
+                <run-tests-helper emma.enabled="true">
+                    <extra-instrument-args>
+                        <arg value="-e" />
+                           <arg value="coverageFile" />
+                           <arg value="${emma.dump.file}" />
+                    </extra-instrument-args>
+                </run-tests-helper>
+                <echo level="info">Downloading coverage file into project directory...</echo>
+                <exec executable="${adb}" failonerror="true">
+                    <arg line="${adb.device.arg}" />
+                    <arg value="pull" />
+                    <arg value="${emma.dump.file}" />
+                    <arg path="${out.absolute.dir}/coverage.ec" />
+                </exec>
+                <echo level="info">Extracting coverage report...</echo>
+
+                <pathconvert property="tested.project.lib.source.path.value" refid="tested.project.lib.source.path">
+                    <firstmatchmapper>
+                        <regexpmapper from='^([^ ]*)( .*)$$' to='"\1\2"'/>
+                        <identitymapper/>
+                    </firstmatchmapper>
+                </pathconvert>
+
+
+                <emma>
+                    <report sourcepath="${tested.project.source.absolute.dir}:${tested.project.lib.source.path.value}"
+                            verbosity="${verbosity}">
+                        <!-- TODO: report.dir or something like should be introduced if necessary -->
+                        <infileset file="${out.absolute.dir}/coverage.ec" />
+                        <infileset file="${tested.project.out.absolute.dir}/coverage.em" />
+                        <!-- TODO: reports in other, indicated by user formats -->
+                        <html outfile="${out.absolute.dir}/coverage.html" />
+                   </report>
+                </emma>
+                <echo level="info">Cleaning up temporary files...</echo>
+                <delete file="${out.absolute.dir}/coverage.ec" />
+                <delete file="${out.absolute.dir}/coverage.em" />
+                <echo level="info">Saving the report file in ${out.absolute.dir}/coverage.html</echo>
+            </then>
+            <else>
+                <run-tests-helper />
+            </else>
+        </if>
+    </target>
+
+
+    <!-- ******************************************************* -->
+    <!-- ********** Install/uninstall specific targets ********* -->
+    <!-- ******************************************************* -->
+
+    <target name="install"
+                description="Installs the newly build package. Must be used in conjunction with a build target
+                            (debug/release/instrument). If the application was previously installed, the application
+                            is reinstalled if the signature matches." >
+        <!-- only do install if *not* a library project -->
+        <do-only-if-not-library elseText="Library project: nothing to install!" >
+            <if>
+                <condition>
+                    <isset property="out.final.file" />
+                </condition>
+                <then>
+                    <if>
+                        <condition>
+                            <resourceexists>
+                                <file file="${out.final.file}"/>
+                            </resourceexists>
+                        </condition>
+                        <then>
+                            <echo level="info">Installing ${out.final.file} onto default emulator or device...</echo>
+                            <exec executable="${adb}" failonerror="true">
+                                <arg line="${adb.device.arg}" />
+                                <arg value="install" />
+                                <arg value="-r" />
+                                <arg path="${out.final.file}" />
+                            </exec>
+
+                            <!-- now install the tested project if applicable -->
+                            <!-- can't use project.is.test since the setup target might not have run -->
+                            <if>
+                                <condition>
+                                    <and>
+                                        <isset property="tested.project.dir" />
+                                        <not>
+                                            <isset property="dont.do.deps" />
+                                        </not>
+                                    </and>
+                                </condition>
+                                <then>
+                                    <property name="tested.project.absolute.dir" location="${tested.project.dir}" />
+
+                                    <!-- figure out which tested package to install based on emma.enabled -->
+                                    <condition property="tested.project.install.target" value="installi" else="installd">
+                                        <isset property="emma.enabled" />
+                                    </condition>
+                                    <subant target="${tested.project.install.target}" failonerror="true">
+                                        <fileset dir="${tested.project.absolute.dir}" includes="build.xml" />
+                                    </subant>
+                                </then>
+                            </if>
+                        </then>
+                        <else>
+                            <fail message="File ${out.final.file} does not exist." />
+                        </else>
+                    </if>
+                </then>
+                <else>
+                    <echo>Install file not specified.</echo>
+                    <echo></echo>
+                    <echo>'ant install' now requires the build target to be specified as well.</echo>
+                    <echo></echo>
+                    <echo></echo>
+                    <echo>    ant debug install</echo>
+                    <echo>    ant release install</echo>
+                    <echo>    ant instrument install</echo>
+                    <echo>This will build the given package and install it.</echo>
+                    <echo></echo>
+                    <echo>Alternatively, you can use</echo>
+                    <echo>    ant installd</echo>
+                    <echo>    ant installr</echo>
+                    <echo>    ant installi</echo>
+                    <echo>    ant installt</echo>
+                    <echo>to only install an existing package (this will not rebuild the package.)</echo>
+                    <fail />
+                </else>
+            </if>
+        </do-only-if-not-library>
+    </target>
+
+    <target name="installd" depends="-set-debug-files, install"
+            description="Installs (only) the debug package." />
+    <target name="installr" depends="-set-release-mode, install"
+            description="Installs (only) the release package." />
+    <target name="installi" depends="-set-instrumented-mode, install"
+            description="Installs (only) the instrumented package." />
+    <target name="installt" depends="-test-project-check, installd"
+            description="Installs (only) the test and tested packages." />
+
+
+    <!-- Uninstalls the package from the default emulator/device -->
+    <target name="uninstall" depends="-setup"
+                description="Uninstalls the application from a running emulator or device.">
+        <if>
+            <condition>
+                <isset property="project.app.package" />
+            </condition>
+            <then>
+                <uninstall-helper app.package="${project.app.package}" />
+            </then>
+            <else>
+                <fail message="Could not find application package in manifest. Cannot run 'adb uninstall'." />
+            </else>
+        </if>
+
+        <!-- Now uninstall the tested project, if applicable -->
+        <if>
+            <condition>
+                <and>
+                    <istrue value="${project.is.test}" />
+                    <not>
+                        <isset property="dont.do.deps" />
+                    </not>
+                </and>
+            </condition>
+            <then>
+                <property name="tested.project.absolute.dir" location="${tested.project.dir}" />
+
+                <!-- Application package of the tested project extracted from its manifest file -->
+                <xpath input="${tested.project.absolute.dir}/AndroidManifest.xml"
+                    expression="/manifest/@package" output="tested.project.app.package" />
+                <if>
+                    <condition>
+                        <isset property="tested.project.app.package" />
+                    </condition>
+                    <then>
+                        <uninstall-helper app.package="${tested.project.app.package}" />
+                    </then>
+                    <else>
+                        <fail message="Could not find tested application package in manifest. Cannot run 'adb uninstall'." />
+                    </else>
+                </if>
+            </then>
+        </if>
+
+    </target>
+
+
+    <!-- ******************************************************* -->
+    <!-- ************************* Help ************************ -->
+    <!-- ******************************************************* -->
+
+    <target name="help">
+        <!-- displays starts at col 13
+              |13                                                              80| -->
+        <echo>Android Ant Build. Available targets:</echo>
+        <echo>   help:      Displays this help.</echo>
+        <echo>   clean:     Removes output files created by other targets.</echo>
+        <echo>              The 'all' target can be used to clean dependencies</echo>
+        <echo>              (tested projects and libraries)at the same time</echo>
+        <echo>              using: 'ant all clean'</echo>
+        <echo>   debug:     Builds the application and signs it with a debug key.</echo>
+        <echo>              The 'nodeps' target can be used to only build the</echo>
+        <echo>              current project and ignore the libraries using:</echo>
+        <echo>              'ant nodeps debug'</echo>
+        <echo>   release:   Builds the application. The generated apk file must be</echo>
+        <echo>              signed before it is published.</echo>
+        <echo>              The 'nodeps' target can be used to only build the</echo>
+        <echo>              current project and ignore the libraries using:</echo>
+        <echo>              'ant nodeps release'</echo>
+        <echo>   instrument:Builds an instrumented package and signs it with a</echo>
+        <echo>              debug key.</echo>
+        <echo>   test:      Runs the tests. Project must be a test project and</echo>
+        <echo>              must have been built. Typical usage would be:</echo>
+        <echo>                  ant [emma] debug install test</echo>
+        <echo>   emma:      Transiently enables code coverage for subsequent</echo>
+        <echo>              targets.</echo>
+        <echo>   install:   Installs the newly build package. Must either be used</echo>
+        <echo>              in conjunction with a build target (debug/release/</echo>
+        <echo>              instrument) or with the proper suffix indicating</echo>
+        <echo>              which package to install (see below).</echo>
+        <echo>              If the application was previously installed, the</echo>
+        <echo>              application is reinstalled if the signature matches.</echo>
+        <echo>   installd:  Installs (only) the debug package.</echo>
+        <echo>   installr:  Installs (only) the release package.</echo>
+        <echo>   installi:  Installs (only) the instrumented package.</echo>
+        <echo>   installt:  Installs (only) the test and tested packages (unless</echo>
+        <echo>              nodeps is used as well.</echo>
+        <echo>   uninstall: Uninstalls the application from a running emulator or</echo>
+        <echo>              device. Also uninstall tested package if applicable</echo>
+        <echo>              unless 'nodeps' is used as well.</echo>
+    </target>
+</project>
diff --git a/src/video_engine/main/test/android_test/jni/Application.mk b/src/video_engine/main/test/android_test/jni/Application.mk
new file mode 100644
index 0000000..647560e
--- /dev/null
+++ b/src/video_engine/main/test/android_test/jni/Application.mk
@@ -0,0 +1,11 @@
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+# Build both ARMv5TE and ARMv7-A machine code.
+APP_ABI := armeabi-v7a #armeabi armeabi-v7a x86
+APP_STL := stlport_static
diff --git a/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h b/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h
new file mode 100644
index 0000000..f2beb40
--- /dev/null
+++ b/src/video_engine/main/test/android_test/jni/org_webrtc_videoengineapp_vie_android_java_api.h
@@ -0,0 +1,483 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_videoengineapp_ViEAndroidJavaAPI */
+
+#ifndef _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
+#define _Included_org_webrtc_videoengineapp_ViEAndroidJavaAPI
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    NativeInit
+ * Signature: (Landroid/content/Context;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit
+  (JNIEnv *, jobject, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetVideoEngine
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Init
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartReceive
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopReceive
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    CreateChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendDestination
+ * Signature: (IILjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetCodecs(
+ * Signature: ()I
+ */
+JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs(
+    JNIEnv *env,
+    jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetReceiveCodec
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec
+  (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendCodec
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec
+  (JNIEnv *, jobject, jint, jint, jint, jint, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    AddRemoteRenderer
+ * Signature: (ILjava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer
+  (JNIEnv *, jobject, jint, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    RemoveRemoteRenderer
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartCamera
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopCamera
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetCameraOrientation
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetRotation
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnableNACK
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK
+  (JNIEnv *, jobject, jint, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnablePLI
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI
+  (JNIEnv *, jobject, jint, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetCallback
+ * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback
+  (JNIEnv *, jobject, jint, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartIncomingRTPDump
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump
+  (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopIncomingRTPDump
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Create
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Delete
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Init
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_CreateChannel
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_DeleteChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSendDestination
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSpeakerVolume
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume
+  (JNIEnv *, jobject, jint);
+
+
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetLoudspeakerStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayingFileLocally
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayingFileLocally
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayingFileAsMicrophone
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayingFileAsMicrophone
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_NumOfCodecs
+ * Signature: ()Z
+ */
+JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSendCodec
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetECStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetAGCStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetNSStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus
+  (JNIEnv *, jobject, jboolean);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartDebugRecording
+ * Signature: (Ljava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording
+  (JNIEnv *, jobject, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopDebugRecording
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartIncomingRTPDump
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump
+   (JNIEnv *, jobject, jint, jstring);
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopIncomingRTPDump
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopIncomingRTPDump
+   (JNIEnv *, jobject, jint);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc b/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc
new file mode 100644
index 0000000..4b44aa3
--- /dev/null
+++ b/src/video_engine/main/test/android_test/jni/vie_android_java_api.cc
@@ -0,0 +1,1893 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <android/log.h>
+
+#include "org_webrtc_videoengineapp_vie_android_java_api.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_file.h"
+#include "voe_network.h"
+#include "voe_audio_processing.h"
+#include "voe_volume_control.h"
+#include "voe_hardware.h"
+#include "voe_rtp_rtcp.h"
+
+#include "vie_base.h"
+#include "vie_codec.h"
+#include "vie_capture.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#include "common_types.h"
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*"
+#define VALIDATE_BASE_POINTER                                           \
+  if (!voeData.base)                                                    \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Base pointer doesn't exist");                  \
+    return -1;                                                          \
+  }
+#define VALIDATE_CODEC_POINTER                                          \
+  if (!voeData.codec)                                                   \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Codec pointer doesn't exist");                 \
+    return -1;                                                          \
+  }
+#define VALIDATE_FILE_POINTER                                           \
+  if (!voeData.file)                                                    \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "File pointer doesn't exist");                  \
+    return -1;                                                          \
+  }
+#define VALIDATE_APM_POINTER                                            \
+  if (!voeData.codec)                                                   \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Apm pointer doesn't exist");                   \
+    return -1;                                                          \
+  }
+#define VALIDATE_HARDWARE_POINTER                                       \
+  if (!voeData.hardware)                                                \
+  {                                                                     \
+    __android_log_write(                                                \
+        ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,                              \
+        "Hardware pointer doesn't exist");                              \
+    return -1;                                                          \
+  }
+#define VALIDATE_VOLUME_POINTER                                         \
+  if (!voeData.volume)                                                  \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Volume pointer doesn't exist");                \
+    return -1;                                                          \
+  }
+
+#define VALIDATE_RTP_POINTER                                            \
+  if (!voeData.rtp)                                                     \
+  {                                                                     \
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "rtp pointer doesn't exist");                   \
+    return -1;                                                          \
+  }
+
+using namespace webrtc;
+
+//Forward declaration.
+class VideoCallbackAndroid;
+
+// VoiceEngine data struct
+typedef struct
+{
+  // VoiceEngine
+  VoiceEngine* ve;
+  // Sub-APIs
+  VoEBase* base;
+  VoECodec* codec;
+  VoEFile* file;
+  VoENetwork* netw;
+  VoEAudioProcessing* apm;
+  VoEVolumeControl* volume;
+  VoEHardware* hardware;
+  VoERTP_RTCP* rtp;
+  JavaVM* jvm;
+} VoiceEngineData;
+
+class AndroidVideoRenderCallback;
+// VideoEngine data struct
+typedef struct
+{
+  VideoEngine* vie;
+  ViEBase* base;
+  ViECodec* codec;
+  ViENetwork* netw;
+  ViERTP_RTCP* rtp;
+  ViERender* render;
+  ViECapture* capture;
+  VideoCallbackAndroid* callback;
+
+} VideoEngineData;
+
+// Global variables
+JavaVM* webrtcGlobalVM;
+
+// Global variables visible in this file
+static VoiceEngineData voeData;
+static VideoEngineData vieData;
+
+// "Local" functions (i.e. not Java accessible)
+#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
+static bool VE_GetSubApis();
+static bool VE_ReleaseSubApis();
+
+#define CHECK_API_RETURN(ret)                                           \
+  if (ret!=0)                                                           \
+  {                                                                     \
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,              \
+                        "Return error %d",ret);                         \
+    break;                                                              \
+  }
+
+class VideoCallbackAndroid: public ViEDecoderObserver,
+                            public ViEEncoderObserver
+{
+
+    // Implements ViEDecoderObserver
+    virtual void IncomingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate)
+    {
+        // Let's print out the network statistics from this call back as well
+        unsigned short fraction_lost;
+        unsigned int dummy;
+        int intdummy;
+        _vieData.rtp->GetReceivedRTCPStatistics(videoChannel, fraction_lost,
+                                                dummy, dummy, dummy, intdummy);
+        unsigned short packetLossRate = 0;
+        if (fraction_lost > 0)
+        {
+            // Change from frac to %
+            packetLossRate = (fraction_lost * 100) >> 8;
+        }
+
+        JNIEnv* threadEnv = NULL;
+        int ret = webrtcGlobalVM->AttachCurrentThread(&threadEnv, NULL);
+        // Get the JNI env for this thread
+        if ((ret < 0) || !threadEnv)
+        {
+            __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "Could not attach thread to JVM (%d, %p)", ret,
+                                threadEnv);
+            return;
+        }
+        threadEnv->CallIntMethod(_callbackObj, _callbackId, framerate, bitrate,
+                                 packetLossRate, _frameRateO, _bitRateO);
+        webrtcGlobalVM->DetachCurrentThread();
+    }
+    ;
+
+    virtual void IncomingCodecChanged(const int videoChannel,
+                                      const webrtc::VideoCodec& videoCodec)
+    {
+    }
+    ;
+
+    virtual void RequestNewKeyFrame(const int videoChannel)
+    {
+    }
+    ;
+
+    virtual void OutgoingRate(const int videoChannel,
+                              const unsigned int framerate,
+                              const unsigned int bitrate)
+    {
+        _frameRateO = framerate;
+        _bitRateO = bitrate;
+        //__android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+        // "SendRate frameRate %d bitrate %d\n",frameRate,bitrate);
+    }
+    ;
+
+public:
+  VideoEngineData& _vieData;
+  JNIEnv * _env;
+  jobject _callbackObj;
+  jclass _callbackCls;
+  jmethodID _callbackId;
+  int _frameRateO, _bitRateO;
+  VideoCallbackAndroid(VideoEngineData& vieData, JNIEnv * env,
+                       jobject callback) :
+      _vieData(vieData), _env(env), _callbackObj(callback),
+      _frameRateO(0), _bitRateO(0) {
+    _callbackCls = _env->GetObjectClass(_callbackObj);
+    _callbackId
+        = _env->GetMethodID(_callbackCls, "UpdateStats", "(IIIII)I");
+    if (_callbackId == NULL) {
+      __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to get jid");
+    }
+    _callbackObj = _env->NewGlobalRef(_callbackObj);
+  }
+};
+
+// JNI_OnLoad
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
+  webrtcGlobalVM = vm;
+  if (!webrtcGlobalVM)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  // Get JNI
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                           JNI_VERSION_1_4)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init VoiceEngine data
+  memset(&voeData, 0, sizeof(voeData));
+  // Store the JVM
+  voeData.jvm = vm;
+
+  // Init VideoEngineData data
+  memset(&vieData, 0, sizeof(vieData));
+
+  return JNI_VERSION_1_4;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    NativeInit
+ * Signature: (Landroid/content/Context;)Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_NativeInit(
+    JNIEnv * env,
+    jobject,
+    jobject context)
+{
+  return true;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetVideoEngine
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetVideoEngine(
+    JNIEnv *,
+    jobject context) {
+
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "GetVideoEngine");
+
+  VideoEngine::SetAndroidObjects(webrtcGlobalVM, context);
+
+  // Check if already got
+  if (vieData.vie) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "ViE already got");
+    return -1;
+  }
+
+  // Create
+  vieData.vie = VideoEngine::Create();
+  if (!vieData.vie) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, "Get ViE failed");
+    return -1;
+  }
+  vieData.base = ViEBase::GetInterface(vieData.vie);
+  if (!vieData.base) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get base sub-API failed");
+    return -1;
+  }
+
+  vieData.codec = ViECodec::GetInterface(vieData.vie);
+  if (!vieData.codec) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get codec sub-API failed");
+    return -1;
+  }
+
+  vieData.netw = ViENetwork::GetInterface(vieData.vie);
+  if (!vieData.netw) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get network sub-API failed");
+    return -1;
+  }
+
+  vieData.rtp = ViERTP_RTCP::GetInterface(vieData.vie);
+  if (!vieData.rtp) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get RTP sub-API failed");
+    return -1;
+  }
+
+  vieData.render = ViERender::GetInterface(vieData.vie);
+  if (!vieData.render) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get Render sub-API failed");
+    return -1;
+  }
+
+  vieData.capture = ViECapture::GetInterface(vieData.vie);
+  if (!vieData.capture) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get Capture sub-API failed");
+    return -1;
+  }
+
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Init
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Init(
+    JNIEnv *,
+    jobject,
+    jboolean enableTrace)
+{
+    if (vieData.vie) {
+      __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Init");
+
+      int ret = vieData.base->Init();
+      __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "Init return %d", ret);
+        if (enableTrace)
+        {
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetTraceFile");
+            if (0 != vieData.vie->SetTraceFile(("/sdcard/trace.txt"), false))
+            {
+                __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                    "Video Engine could not enable trace");
+            }
+
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetTraceFilter");
+            if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceDefault))
+            {
+                __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                    "Could not set trace filter");
+            }
+        }
+        else
+        {
+            if (0 != vieData.vie->SetTraceFilter(webrtc::kTraceNone))
+            {
+                __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                    "Could not set trace filter");
+            }
+        }
+        if (voeData.ve) // VoiceEngine is enabled
+        {
+            __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                "SetVoiceEngine");
+            if (0 != vieData.base->SetVoiceEngine(voeData.ve))
+            {
+                __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                                    "SetVoiceEngine failed");
+            }
+        }
+        return ret;
+    }
+    else
+    {
+        return -1;
+    }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_Terminate(
+    JNIEnv *,
+    jobject)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Terminate");
+
+  if (vieData.vie) {
+    if (!vieData.rtp || vieData.rtp->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release RTP sub-API");
+    }
+
+    if (!vieData.netw || vieData.netw->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Network sub-API");
+    }
+
+    if (!vieData.codec || vieData.codec->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Failed to release Codec sub-API");
+    }
+
+    if (!vieData.render || vieData.render->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Render sub-API");
+    }
+
+    if (!vieData.capture || vieData.capture->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Capture sub-API");
+    }
+
+    if (!vieData.base || vieData.base->Release() != 0) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to release Base sub-API");
+    }
+
+    // Delete Vie
+    if (!VideoEngine::Delete(vieData.vie)) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Failed to delete ViE ");
+      return -1;
+    }
+    memset(&vieData, 0, sizeof(vieData));
+    VideoEngine::SetAndroidObjects(NULL, NULL);
+    return 0;
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
+
+  if (vieData.base) {
+    int ret = vieData.base->StartSend(channel);
+    return ret;
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopRender(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopRender");
+
+    if (vieData.render) {
+        return vieData.render->StopRender(channel);
+    }
+    else {
+        return -1;
+    }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopSend");
+
+  if (vieData.base) {
+    return vieData.base->StopSend(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartReceive
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartReceive(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartReceive");
+
+  if (vieData.base) {
+    return vieData.base->StartReceive(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopReceive
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopReceive(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StopReceive");
+  if (vieData.base) {
+    return vieData.base->StopReceive(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    CreateChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_CreateChannel(
+    JNIEnv *,
+    jobject,
+    jint voiceChannel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "CreateChannel");
+
+  if (vieData.vie) {
+    int channel = 0;
+    if (vieData.base->CreateChannel(channel) != 0) {
+      return -1;
+    }
+    if (voiceChannel >= 0) {
+      vieData.base->ConnectAudioChannel(channel, voiceChannel);
+    }
+
+    return channel;
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetLocalReceiver(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint port)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
+
+  if (vieData.vie) {
+    int ret = vieData.netw->SetLocalReceiver(channel, port);
+    return ret;
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendDestination
+ * Signature: (IILjava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendDestination(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jint port,
+    jstring ipaddr)
+{
+
+  if (NULL == vieData.vie)
+    return -1;
+
+  const char* ip = env->GetStringUTFChars(ipaddr, NULL);
+  if (!ip) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "SetSendDestination: channel=%d, port=%d, ip=%s\n",
+                      channel, port, ip);
+
+  return vieData.netw->SetSendDestination(channel, ip, port);
+}
+
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetReceiveCodec
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetReceiveCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint codecNum,
+    jint intbitRate,
+    jint width,
+    jint height,
+    jint frameRate)
+{
+  if (NULL == vieData.codec)
+    return -1;
+
+  //Create codec
+  webrtc::VideoCodec codec;
+  vieData.codec->GetCodec(codecNum, codec);
+
+  __android_log_print(
+      ANDROID_LOG_DEBUG,
+      WEBRTC_LOG_TAG,
+      "SetReceiveCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
+      " width=%d, height=%d, frameRate=%d, codecSpecific=%d \n",
+      codec.plName, codec.plType, codec.startBitrate,
+      codec.maxBitrate, codec.width, codec.height,
+      codec.maxFramerate, codec.codecSpecific);
+  int ret = vieData.codec->SetReceiveCodec(channel, codec);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "SetReceiveCodec return %d", ret);
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendCodec
+ * Signature: (IIIIII)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetSendCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint codecNum,
+    jint intbitRate,
+    jint width,
+    jint height,
+    jint frameRate)
+{
+  if (NULL == vieData.codec)
+    return -1;
+
+  //Create codec
+  webrtc::VideoCodec codec;
+  vieData.codec->GetCodec(codecNum, codec);
+  codec.startBitrate = intbitRate;
+  codec.maxBitrate = 600;
+  codec.width = width;
+  codec.height = height;
+  codec.maxFramerate = frameRate;
+
+  for (int i = 0; i < vieData.codec->NumberOfCodecs(); ++i) {
+    webrtc::VideoCodec codecToList;
+    vieData.codec->GetCodec(i, codecToList);
+    __android_log_print(
+        ANDROID_LOG_DEBUG,
+        WEBRTC_LOG_TAG,
+        "Codec list %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
+        " width=%d, height=%d, frameRate=%d\n",
+        codecToList.plName, codecToList.plType,
+        codecToList.startBitrate, codecToList.maxBitrate,
+        codecToList.width, codecToList.height,
+        codecToList.maxFramerate);
+  }
+  __android_log_print(
+      ANDROID_LOG_DEBUG,
+      WEBRTC_LOG_TAG,
+      "SetSendCodec %s, pltype=%d, bitRate=%d, maxBitRate=%d, "
+      "width=%d, height=%d, frameRate=%d\n",
+      codec.plName, codec.plType, codec.startBitrate,
+      codec.maxBitrate, codec.width, codec.height,
+      codec.maxFramerate);
+
+  return vieData.codec->SetSendCodec(channel, codec);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetSendCodec
+ * Signature: ()Z
+ */
+JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCodecs(
+    JNIEnv *env,
+    jobject)
+{
+  if (NULL == vieData.codec) {
+    return NULL;
+  }
+
+  jobjectArray ret;
+  int i;
+  int num = vieData.codec->NumberOfCodecs();
+  char info[32];
+
+  ret = (jobjectArray)env->NewObjectArray(
+      num,
+      env->FindClass("java/lang/String"),
+      env->NewStringUTF(""));
+
+  for (int i = 0; i < num; ++i) {
+    webrtc::VideoCodec codecToList;
+    vieData.codec->GetCodec(i, codecToList);
+    sprintf(info, "%s pltype:%d", codecToList.plName, codecToList.plType);
+    env->SetObjectArrayElement(ret, i, env->NewStringUTF( info ));
+
+    __android_log_print(
+        ANDROID_LOG_DEBUG,
+        WEBRTC_LOG_TAG,
+        "Codec[%d] %s, pltype=%d, bitRate=%d, maxBitRate=%d,"
+        " width=%d, height=%d, frameRate=%d\n",
+        i, codecToList.plName, codecToList.plType,
+        codecToList.startBitrate, codecToList.maxBitrate,
+        codecToList.width, codecToList.height,
+        codecToList.maxFramerate);
+  }
+
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    AddRemoteRenderer
+ * Signature: (ILjava/lang/Object;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_AddRemoteRenderer(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jobject glSurface)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "AddRemoteRenderer");
+  if (vieData.vie) {
+    return vieData.render->AddRenderer(channel, glSurface, 0, 0, 0, 1, 1);
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    RemoveRemoteRenderer
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_RemoveRemoteRenderer(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "RemoveRemoteRenderer");
+
+  if (vieData.vie) {
+    return vieData.render->RemoveRenderer(channel);
+  }
+  else {
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartRender
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartRender(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartRender");
+
+  if (vieData.render) {
+    return vieData.render->StartRender(channel);
+  }
+  else {
+    return -1;
+  }
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartCamera
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartCamera(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jint cameraNum)
+{
+  if (NULL == vieData.vie)
+    return -1;
+
+  int i = 0;
+  char deviceName[64];
+  char deviceUniqueName[64];
+  int re;
+  do {
+      re = vieData.capture->GetCaptureDevice(i, deviceName,
+                                             sizeof(deviceName),
+                                             deviceUniqueName,
+                                             sizeof(deviceUniqueName));
+      __android_log_print(
+          ANDROID_LOG_DEBUG,
+          WEBRTC_LOG_TAG,
+          "GetCaptureDevice ret %d devicenum %d deviceUniqueName %s",
+          re, i, deviceUniqueName);
+      i++;
+  } while (re == 0);
+
+  int ret;
+  int cameraId;
+  vieData.capture->GetCaptureDevice(cameraNum, deviceName,
+                                    sizeof(deviceName), deviceUniqueName,
+                                    sizeof(deviceUniqueName));
+  vieData.capture->AllocateCaptureDevice(deviceUniqueName,
+                                         sizeof(deviceUniqueName), cameraId);
+
+  if (cameraId >= 0) { //Connect the
+    ret = vieData.capture->ConnectCaptureDevice(cameraId, channel);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "ConnectCaptureDevice ret %d ", ret);
+
+    ret = vieData.capture->StartCapture(cameraId);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "StartCapture ret %d ", ret);
+  }
+
+  return cameraId;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopCamera
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopCamera(
+    JNIEnv *,
+    jobject,
+    jint cameraId)
+{
+  if (NULL == vieData.capture)
+    return -1;
+
+  int ret = vieData.capture->StopCapture(cameraId);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "StopCapture  ret %d ", ret);
+  ret = vieData.capture->ReleaseCaptureDevice(cameraId);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "ReleaseCaptureDevice  ret %d ", ret);
+
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    GetCameraOrientation
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_GetCameraOrientation(
+    JNIEnv *,
+    jobject,
+    jint cameraNum)
+{
+  char deviceName[64];
+  char deviceUniqueName[64];
+  int ret;
+
+  ret = vieData.capture->GetCaptureDevice(cameraNum, deviceName,
+                                          sizeof(deviceName),
+                                          deviceUniqueName,
+                                          sizeof(deviceUniqueName));
+  if (ret != 0) {
+    return -1;
+  }
+
+  RotateCapturedFrame orientation;
+  ret = vieData.capture->GetOrientation(deviceUniqueName, orientation);
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "GetOrientation  ret %d orientation %d", ret,
+                      orientation);
+
+  return (jint) orientation;
+
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetRotation
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetRotation(
+    JNIEnv *,
+    jobject,
+    jint captureId,
+    jint degrees)
+{
+
+  if (NULL == vieData.capture)
+    return -1;
+  RotateCapturedFrame rotation = RotateCapturedFrame_0;
+  if (degrees == 90)
+    rotation = RotateCapturedFrame_90;
+  else if (degrees == 180)
+    rotation = RotateCapturedFrame_180;
+  else if (degrees == 270)
+    rotation = RotateCapturedFrame_270;
+
+  int ret = vieData.capture->SetRotateCapturedFrames(captureId, rotation);
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnableNACK
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnableNACK(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jboolean enable)
+{
+  if (NULL == vieData.rtp)
+    return -1;
+
+  if (enable)
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "EnableNACK enable");
+  else
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "EnableNACK disable");
+
+  int ret = vieData.rtp->SetNACKStatus(channel, enable);
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    EnablePLI
+ * Signature: (IZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_EnablePLI(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jboolean enable)
+{
+  if (NULL == vieData.rtp)
+    return -1;
+
+  if (enable)
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "EnablePLI enable");
+  else
+    __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "EnablePLI disable");
+
+  int ret = vieData.rtp->SetKeyFrameRequestMethod(channel,
+                                                  kViEKeyFrameRequestPliRtcp);
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    SetCallback
+ * Signature: (ILorg/webrtc/videoengineapp/IViEAndroidCallback;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_SetCallback(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jobject callback)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetCallback");
+
+  if (NULL == vieData.codec)
+    return -1;
+  if (vieData.callback == NULL) {
+    vieData.callback = new VideoCallbackAndroid(vieData, env, callback);
+  }
+  else if (vieData.codec) {
+    vieData.codec->DeregisterDecoderObserver(channel); // Wrong channel?
+    vieData.codec->DeregisterEncoderObserver(channel);
+  }
+
+  vieData.codec->RegisterDecoderObserver(channel, *vieData.callback);
+  vieData.codec->RegisterEncoderObserver(channel, *vieData.callback);
+
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StartIncomingRTPDump
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StartIncomingRTPDump(
+    JNIEnv* env,
+    jobject,
+    jint channel,
+    jstring filename) {
+  if (NULL == vieData.rtp) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "video RTP_RTCP interface is null");
+    return -1;
+  }
+  const char* file = env->GetStringUTFChars(filename, NULL);
+  if (!file) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Video StartRTPDump file name error");
+    return -1;
+  }
+  if (vieData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Video StartRTPDump error");
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    StopIncomingRTPDump
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_StopIncomingRTPDump(
+    JNIEnv *,
+    jobject,
+    jint channel) {
+  if (NULL == vieData.rtp) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "video RTP_RTCP interface is null");
+    return -1;
+  }
+  if (vieData.rtp->StopRTPDump(channel, kRtpIncoming) != 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Video StopRTPDump error");
+    return -1;
+  }
+  return 0;
+}
+
+//
+// VoiceEngine API wrapper functions
+//
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Create
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Create(
+    JNIEnv *env,
+    jobject)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "Create");
+
+  // Check if already created
+  if (voeData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "VoE already created");
+    return false;
+  }
+
+  // Create
+  voeData.ve = VoiceEngine::Create();
+  if (!voeData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Create VoE failed");
+    return false;
+  }
+
+  // Get sub-APIs
+  if (!VE_GetSubApis()) {
+    // If not OK, release all sub-APIs and delete VoE
+    VE_ReleaseSubApis();
+    if (!VoiceEngine::Delete(voeData.ve)) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Delete VoE failed");
+    }
+    return false;
+  }
+
+  return true;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Delete
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Delete(
+    JNIEnv *,
+    jobject)
+{
+  // Check if exists
+  if (!voeData.ve) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "VoE does not exist");
+    return false;
+  }
+
+  // Release sub-APIs
+  VE_ReleaseSubApis();
+
+  // Delete
+  if (!VoiceEngine::Delete(voeData.ve)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Delete VoE failed");
+    return false;
+  }
+
+  voeData.ve = NULL;
+
+  // Clear instance independent Java objects
+  VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+  return true;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Init
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Init(
+    JNIEnv *,
+    jobject,
+    jboolean enableTrace)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "VE_Init");
+
+  VALIDATE_BASE_POINTER;
+
+  return voeData.base->Init();
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1Terminate(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_BASE_POINTER;
+
+  jint retVal = voeData.base->Terminate();
+  return retVal;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_CreateChannel
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1CreateChannel(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_BASE_POINTER;
+
+  webrtc::CodecInst voiceCodec;
+  int numOfVeCodecs = voeData.codec->NumOfCodecs();
+
+  //enum all the supported codec
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                      "Supported Voice Codec:\n");
+  for (int i = 0; i < numOfVeCodecs; ++i) {
+    if (voeData.codec->GetCodec(i, voiceCodec) != -1) {
+      __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                          "num: %d name: %s\n", i, voiceCodec.plname);
+    }
+  }
+
+  jint channel = voeData.base->CreateChannel();
+
+  return channel;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_DeleteChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1DeleteChannel(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return voeData.base->DeleteChannel(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLocalReceiver(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint port)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetLocalReceiver");
+  VALIDATE_BASE_POINTER;
+  return voeData.base->SetLocalReceiver(channel, port);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSendDestination
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendDestination(
+    JNIEnv *env,
+    jobject,
+    jint channel,
+    jint port,
+    jstring ipaddr)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendDestination");
+  VALIDATE_BASE_POINTER;
+
+  const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL);
+  if (!ipaddrNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+  jint retVal = voeData.base->SetSendDestination(channel, port, ipaddrNative);
+  env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
+  return retVal;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartListen(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartListen");
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StartReceive(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayout(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartPlayout");
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StartPlayout(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  __android_log_write(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "StartSend");
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StartSend(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopListen(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StartReceive(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayout(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StopPlayout(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopSend(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_BASE_POINTER;
+  return voeData.base->StopSend(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSpeakerVolume
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSpeakerVolume(
+    JNIEnv *,
+    jobject,
+    jint level)
+{
+  VALIDATE_VOLUME_POINTER;
+
+  if (voeData.volume->SetSpeakerVolume(level) != 0) {
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetLoudspeakerStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetLoudspeakerStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable)
+{
+  VALIDATE_HARDWARE_POINTER;
+
+  if (voeData.hardware->SetLoudspeakerStatus(enable) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayingFileLocally
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileLocally(
+    JNIEnv * env,
+    jobject,
+    jint channel,
+    jstring fileName,
+    jboolean loop)
+{
+  VALIDATE_FILE_POINTER;
+
+  const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+  if (!fileNameNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+
+  jint retVal = voeData.file->StartPlayingFileLocally(channel,
+                                                     fileNameNative,
+                                                     loop);
+
+  env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+  return retVal;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayingFileLocally
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileLocally(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_FILE_POINTER;
+  return voeData.file->StopPlayingFileLocally(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartPlayingFileAsMicrophone
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartPlayingFileAsMicrophone(
+    JNIEnv *env,
+    jobject,
+    jint channel,
+    jstring fileName,
+    jboolean loop)
+{
+  VALIDATE_FILE_POINTER;
+
+  const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+  if (!fileNameNative) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get UTF string");
+    return -1;
+  }
+
+  jint retVal = voeData.file->StartPlayingFileAsMicrophone(channel,
+                                                          fileNameNative,
+                                                          loop);
+
+  env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+  return retVal;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopPlayingFileAsMicrophone
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopPlayingFileAsMicrophone(
+    JNIEnv *,
+    jobject,
+    jint channel)
+{
+  VALIDATE_FILE_POINTER;
+  return voeData.file->StopPlayingFileAsMicrophone(channel);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1NumOfCodecs(
+    JNIEnv *,
+    jobject)
+{
+  VALIDATE_CODEC_POINTER;
+  return voeData.codec->NumOfCodecs();
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jobjectArray JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1GetCodecs(
+    JNIEnv *env,
+    jobject)
+{
+  if (!voeData.codec) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Codec pointer doesn't exist");
+    return NULL;
+  }
+
+  jobjectArray ret;
+  int i;
+  int num = voeData.codec->NumOfCodecs();
+  char info[32];
+
+  ret = (jobjectArray)env->NewObjectArray(
+      num,
+      env->FindClass("java/lang/String"),
+      env->NewStringUTF(""));
+
+  for(i = 0; i < num; i++) {
+    webrtc::CodecInst codecToList;
+    voeData.codec->GetCodec(i, codecToList);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "VoiceEgnine Codec[%d] %s, pltype=%d\n",
+                        i, codecToList.plname, codecToList.pltype);
+    sprintf(info, "%s pltype:%d", codecToList.plname, codecToList.pltype);
+    env->SetObjectArrayElement(ret, i, env->NewStringUTF( info ));
+  }
+
+  return ret;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetSendCodec
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetSendCodec(
+    JNIEnv *,
+    jobject,
+    jint channel,
+    jint index)
+{
+  VALIDATE_CODEC_POINTER;
+
+  webrtc::CodecInst codec;
+
+  for (int i = 0; i < voeData.codec->NumOfCodecs(); ++i) {
+    webrtc::CodecInst codecToList;
+    voeData.codec->GetCodec(i, codecToList);
+    __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                        "VE Codec list %s, pltype=%d\n",
+                        codecToList.plname, codecToList.pltype);
+  }
+
+  if (voeData.codec->GetCodec(index, codec) != 0) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Failed to get codec");
+    return -1;
+  }
+  __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG, "SetSendCodec %s\n",
+                      codec.plname);
+
+  return voeData.codec->SetSendCodec(channel, codec);
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetECStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetECStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable) {
+  VALIDATE_APM_POINTER;
+  if (voeData.apm->SetEcStatus(enable, kEcAecm) < 0)
+    return -1;
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetAGCStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetAGCStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable) {
+  VALIDATE_APM_POINTER;
+  if (voeData.apm->SetAgcStatus(enable, kAgcFixedDigital) < 0)
+    return -1;
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_SetNSStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1SetNSStatus(
+    JNIEnv *,
+    jobject,
+    jboolean enable) {
+  VALIDATE_APM_POINTER;
+  if (voeData.apm->SetNsStatus(enable) < 0) {
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartDebugRecording
+ * Signature: (Ljava/lang/String)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartDebugRecording(
+    JNIEnv* env,
+    jobject,
+    jstring filename) {
+  VALIDATE_APM_POINTER;
+
+  const char* file = env->GetStringUTFChars(filename, NULL);
+  if (!file) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StartDebugRecording file error");
+    return -1;
+  }
+  if (voeData.apm->StartDebugRecording(file) != 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StartDebugRecording error");
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopDebugRecording
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopDebugRecording(
+    JNIEnv *,
+    jobject) {
+  VALIDATE_APM_POINTER;
+  if (voeData.apm->StopDebugRecording() < 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StopDebugRecording error");
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StartIncomingRTPDump
+ * Signature: (ILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StartIncomingRTPDump(
+    JNIEnv* env,
+    jobject,
+    jint channel,
+    jstring filename) {
+  VALIDATE_RTP_POINTER;
+  const char* file = env->GetStringUTFChars(filename, NULL);
+  if (!file) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StartRTPDump file error");
+    return -1;
+  }
+  if (voeData.rtp->StartRTPDump(channel, file, kRtpIncoming) != 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StartRTPDump error");
+    return -1;
+  }
+  return 0;
+}
+
+/*
+ * Class:     org_webrtc_videoengineapp_ViEAndroidJavaAPI
+ * Method:    VoE_StopRTPDump
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_videoengineapp_ViEAndroidJavaAPI_VoE_1StopRTPDump(
+    JNIEnv *,
+    jobject,
+    jint channel) {
+  VALIDATE_RTP_POINTER;
+  if (voeData.rtp->StopRTPDump(channel) < 0) {
+    __android_log_print(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Voice StopRTPDump error");
+    return -1;
+  }
+  return 0;
+}
+
+//
+// local function
+//
+
+// Get all sub-APIs
+bool VE_GetSubApis() {
+  bool getOK = true;
+
+  // Base
+  voeData.base = VoEBase::GetInterface(voeData.ve);
+  if (!voeData.base) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get base sub-API failed");
+    getOK = false;
+  }
+
+  // Codec
+  voeData.codec = VoECodec::GetInterface(voeData.ve);
+  if (!voeData.codec) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get codec sub-API failed");
+    getOK = false;
+  }
+
+  // File
+  voeData.file = VoEFile::GetInterface(voeData.ve);
+  if (!voeData.file) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get file sub-API failed");
+    getOK = false;
+  }
+
+  // Network
+  voeData.netw = VoENetwork::GetInterface(voeData.ve);
+  if (!voeData.netw) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get network sub-API failed");
+    getOK = false;
+  }
+
+  // audioprocessing
+  voeData.apm = VoEAudioProcessing::GetInterface(voeData.ve);
+  if (!voeData.apm) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get VoEAudioProcessing sub-API failed");
+    getOK = false;
+  }
+
+  // Volume
+  voeData.volume = VoEVolumeControl::GetInterface(voeData.ve);
+  if (!voeData.volume) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get volume sub-API failed");
+    getOK = false;
+  }
+
+  // Hardware
+  voeData.hardware = VoEHardware::GetInterface(voeData.ve);
+  if (!voeData.hardware) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get hardware sub-API failed");
+    getOK = false;
+  }
+
+  // RTP
+  voeData.rtp = VoERTP_RTCP::GetInterface(voeData.ve);
+  if (!voeData.rtp) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Get rtp sub-API failed");
+    getOK = false;
+  }
+
+  return getOK;
+}
+
+// Release all sub-APIs
+bool VE_ReleaseSubApis() {
+  bool releaseOK = true;
+
+  // Base
+  if (voeData.base) {
+    if (0 != voeData.base->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release base sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.base = NULL;
+    }
+  }
+
+  // Codec
+  if (voeData.codec) {
+    if (0 != voeData.codec->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release codec sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.codec = NULL;
+    }
+  }
+
+  // File
+  if (voeData.file) {
+    if (0 != voeData.file->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release file sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.file = NULL;
+    }
+  }
+
+  // Network
+  if (voeData.netw) {
+    if (0 != voeData.netw->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release network sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.netw = NULL;
+    }
+  }
+
+  // apm
+  if (voeData.apm) {
+    if (0 != voeData.apm->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release apm sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.apm = NULL;
+    }
+  }
+
+  // Volume
+  if (voeData.volume) {
+    if (0 != voeData.volume->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release volume sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.volume = NULL;
+    }
+  }
+
+  // Hardware
+  if (voeData.hardware) {
+    if (0 != voeData.hardware->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release hardware sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.hardware = NULL;
+    }
+  }
+
+  if (voeData.rtp) {
+    if (0 != voeData.rtp->Release()) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release rtp sub-API failed");
+      releaseOK = false;
+    }
+    else {
+      voeData.rtp = NULL;
+    }
+  }
+
+  return releaseOK;
+}
diff --git a/src/video_engine/main/test/android_test/project.properties b/src/video_engine/main/test/android_test/project.properties
new file mode 100644
index 0000000..ddd0fc4
--- /dev/null
+++ b/src/video_engine/main/test/android_test/project.properties
@@ -0,0 +1,13 @@
+# This file is automatically generated by Android Tools.
+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
+#
+# This file must be checked in Version Control Systems.
+#
+# To customize properties used by the Ant build system use,
+# "ant.properties", and override values to adapt the script to your
+# project structure.
+
+# Indicates whether an apk should be generated for each density.
+split.density=false
+# Project target.
+target=android-10
diff --git a/src/video_engine/main/test/android_test/res/drawable/logo.png b/src/video_engine/main/test/android_test/res/drawable/logo.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/drawable/logo.png
Binary files differ
diff --git a/src/video_engine/main/test/android_test/res/layout/aconfig.xml b/src/video_engine/main/test/android_test/res/layout/aconfig.xml
new file mode 100644
index 0000000..5f995c6
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/aconfig.xml
@@ -0,0 +1,87 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+              android:layout_height="fill_parent" android:orientation="vertical"
+              xmlns:android="http://schemas.android.com/apk/res/android">
+  <TextView android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:textStyle="bold"
+            android:textSize="24dip"
+            android:text="Audio Settings"></TextView>
+  <TextView android:id="@+id/TextView03"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content">
+  </TextView>
+  <TextView android:id="@+id/TextView01"
+            android:layout_height="wrap_content"
+            android:layout_gravity="bottom"
+            android:layout_width="wrap_content"
+            android:text="@string/codecType">
+  </TextView>
+  <Spinner android:layout_height="wrap_content"
+           android:layout_width="fill_parent"
+           android:id="@+id/spVoiceCodecType">
+  </Spinner>
+
+  <LinearLayout android:id="@+id/LinearLayout02"
+                android:layout_height="wrap_content"
+                android:layout_width="fill_parent">
+    <TextView android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/aTxPort">
+    </TextView>
+    <EditText android:layout_height="wrap_content"
+              android:layout_width="wrap_content"
+              android:id="@+id/etATxPort">
+    </EditText>
+    <TextView android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:text="@string/aRxPort">
+    </TextView>
+    <EditText android:layout_height="wrap_content"
+              android:layout_width="wrap_content"
+              android:id="@+id/etARxPort" >
+    </EditText>
+  </LinearLayout>
+	
+  <LinearLayout android:id="@+id/LinearLayout02"
+                android:layout_height="wrap_content"
+                android:layout_width="fill_parent">
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbAECM"
+              android:text="@string/AECM">
+    </CheckBox>
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbNoiseSuppression"
+              android:text="@string/NoiseSuppression">
+    </CheckBox>
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbAutoGainControl"
+              android:text="@string/AutoGainControl">
+    </CheckBox>
+  </LinearLayout>
+
+  <LinearLayout android:id="@+id/LinearLayout02"
+                android:layout_height="wrap_content"
+                android:layout_width="fill_parent">
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbSpeaker"
+              android:text="@string/speaker">
+    </CheckBox>
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbDebugRecording"
+              android:text="@string/debugrecording">
+    </CheckBox>
+    <CheckBox android:layout_width="wrap_content"
+              android:layout_height="wrap_content"
+              android:id="@+id/cbVoiceRTPDump"
+              android:text="@string/rtpdump">
+    </CheckBox>
+
+  </LinearLayout>
+
+</LinearLayout>
diff --git a/src/video_engine/main/test/android_test/res/layout/both.xml b/src/video_engine/main/test/android_test/res/layout/both.xml
new file mode 100644
index 0000000..d29d906
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/both.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout 
+    	xmlns:android="http://schemas.android.com/apk/res/android"    		    		
+    		android:orientation="horizontal"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent">
+          <LinearLayout 
+                android:orientation="vertical"
+                android:layout_width="120dip"
+                android:layout_height="fill_parent">
+           <LinearLayout android:id="@+id/llLocalView" 
+            	android:layout_width="fill_parent" 
+            	android:layout_height="80dip">
+            </LinearLayout>
+            <TextView
+                android:layout_width="fill_parent"
+                android:layout_height="fill_parent"
+                android:layout_weight="1"
+                android:text="" />
+            <Button android:id="@+id/btSwitchCamera"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:text="@string/frontCamera"
+                    android:layout_gravity="bottom"/>
+            <Button android:id="@+id/btStartStopCall"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:text="@string/startCall"
+                    android:layout_gravity="bottom"/>
+			<Button android:id="@+id/btExit"
+                    android:layout_width="fill_parent"
+                    android:layout_height="wrap_content"
+                    android:layout_gravity="bottom"
+                    android:text="@string/exit"/>
+        </LinearLayout>
+        <LinearLayout 
+            android:id="@+id/llRemoteView"
+            android:layout_width="fill_parent"
+            android:layout_height="fill_parent"
+            android:layout_weight="1">
+        </LinearLayout>
+    </LinearLayout >   
+
diff --git a/src/video_engine/main/test/android_test/res/layout/main.xml b/src/video_engine/main/test/android_test/res/layout/main.xml
new file mode 100644
index 0000000..aa6bb88
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/main.xml
@@ -0,0 +1,87 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:layout_gravity="right"
+	android:orientation="vertical"
+	      xmlns:android="http://schemas.android.com/apk/res/android">
+
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/tvTitle"
+		  android:textStyle="bold"
+		  android:textSize="24dip"
+		  android:text="@string/gSettings">
+	</TextView>
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>
+
+        <LinearLayout android:orientation="horizontal"
+                      android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVideoReceive"
+		  android:text="@string/enableVideoReceive">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVideoSend"
+		  android:text="@string/enableVideoSend">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbVoice"
+		  android:text="@string/enableVoice">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbLoopback"
+		  android:text="@string/loopback">
+	</CheckBox>
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbStats"
+		  android:text="@string/stats">
+	</CheckBox>
+        </LinearLayout>
+
+	<TextView android:id="@+id/TextView02"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/remoteIp">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="fill_parent"
+		  android:id="@+id/etRemoteIp" >
+	</EditText>
+	
+	<LinearLayout android:orientation="horizontal"
+                      android:id="@+id/LinearLayout03"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+        <RadioGroup
+                  android:layout_width="fill_parent"
+                  android:layout_height="wrap_content"
+                  android:orientation="vertical"
+                  android:id="@+id/radio_group1">
+                  <RadioButton
+                          android:id="@+id/radio_opengl"
+                          android:onClick="onClick"
+                          android:layout_width="wrap_content"
+                          android:layout_height="wrap_content"
+                          android:text="@string/opengl"
+                          android:checked="true"
+                          android:textColor="#fff"/>
+                 <RadioButton
+                          android:id="@+id/radio_surface"
+                          android:onClick="onClick"
+                          android:layout_width="wrap_content"
+                          android:layout_height="wrap_content"
+                          android:text="@string/surfaceview"
+                          android:textColor="#fff" />
+        </RadioGroup>
+	</LinearLayout>
+
+</LinearLayout>
diff --git a/src/video_engine/main/test/android_test/res/layout/row.xml b/src/video_engine/main/test/android_test/res/layout/row.xml
new file mode 100644
index 0000000..aa4f0ca
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/row.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="utf-8"?>
+<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
+                android:layout_width="fill_parent"
+                android:layout_height="wrap_content"
+                android:orientation="vertical"
+                android:padding="3dip">
+  <TextView
+     android:layout_toRightOf="@+id/image"
+     android:padding="3dip"
+     android:layout_marginTop="2dip"
+     android:textColor="#000"
+     android:textStyle="bold"
+     android:id="@+id/spinner_row"
+     android:text="description"
+     android:layout_marginLeft="5dip"
+     android:layout_width="wrap_content"
+     android:layout_height="wrap_content"/>
+</RelativeLayout>
diff --git a/src/video_engine/main/test/android_test/res/layout/send.xml b/src/video_engine/main/test/android_test/res/layout/send.xml
new file mode 100644
index 0000000..ee230f5
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/send.xml
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+
+  <SurfaceView android:id="@+id/svLocal"
+	       android:layout_width="wrap_content"
+	       android:layout_height="wrap_content">
+  </SurfaceView>
+  <ImageView android:id="@+id/ivPreview"
+	     android:layout_height="fill_parent"
+	     android:layout_width="fill_parent">
+  </ImageView>
+
+</LinearLayout>
diff --git a/src/video_engine/main/test/android_test/res/layout/tabhost.xml b/src/video_engine/main/test/android_test/res/layout/tabhost.xml
new file mode 100644
index 0000000..42383fd
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/tabhost.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="utf-8"?>
+<TabHost xmlns:android="http://schemas.android.com/apk/res/android"
+	 android:id="@android:id/tabhost"
+	 android:layout_width="fill_parent"
+	 android:layout_height="fill_parent">
+  <LinearLayout
+     android:orientation="vertical"
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent">
+    <TabWidget
+       android:id="@android:id/tabs"
+       android:layout_width="fill_parent"
+       android:layout_height="wrap_content" />
+    <FrameLayout
+       android:id="@android:id/tabcontent"
+       android:layout_width="fill_parent"
+       android:layout_height="fill_parent">            
+      <include android:id="@+id/tab_video" layout="@layout/both" />
+      <include android:id="@+id/tab_config" layout="@layout/main" />
+      <include android:id="@+id/tab_vconfig" layout="@layout/vconfig" />
+      <include android:id="@+id/tab_aconfig" layout="@layout/aconfig" />
+    </FrameLayout>
+  </LinearLayout>
+</TabHost>
diff --git a/src/video_engine/main/test/android_test/res/layout/vconfig.xml b/src/video_engine/main/test/android_test/res/layout/vconfig.xml
new file mode 100644
index 0000000..d72f257
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/layout/vconfig.xml
@@ -0,0 +1,73 @@
+<?xml version="1.0" encoding="utf-8"?>
+<LinearLayout android:layout_width="fill_parent"
+	android:layout_height="fill_parent" android:orientation="vertical" 
+	xmlns:android="http://schemas.android.com/apk/res/android">
+	<TextView android:layout_width="wrap_content" 
+	android:layout_height="wrap_content"
+	android:textStyle="bold" 
+	android:textSize="24dip" 
+	android:text="@string/vSettings">
+	</TextView>
+	
+	<TextView android:id="@+id/TextView03"
+		  android:layout_width="wrap_content"
+		  android:layout_height="wrap_content">
+	</TextView>	
+	
+	<TextView android:id="@+id/TextView01"
+		  android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:text="@string/codecType">
+	</TextView>
+	<Spinner android:layout_height="wrap_content"
+		 android:layout_width="fill_parent"
+		 android:id="@+id/spCodecType">
+	</Spinner>
+	
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/tvCodecSize"
+		  android:text="@string/codecSize">
+	</TextView>
+	<Spinner android:layout_height="wrap_content"
+		 android:layout_width="fill_parent"
+		 android:id="@+id/spCodecSize">
+	</Spinner>
+	
+	<LinearLayout android:id="@+id/LinearLayout02"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/vTxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etVTxPort" >
+	</EditText>
+	<TextView android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:text="@string/vRxPort">
+	</TextView>
+	<EditText android:layout_height="wrap_content"
+		  android:layout_width="wrap_content"
+		  android:id="@+id/etVRxPort" >
+	</EditText>
+	</LinearLayout>
+	
+	<LinearLayout android:id="@+id/LinearLayout03"
+		      android:layout_height="wrap_content"
+		      android:layout_width="fill_parent">
+	<CheckBox android:layout_width="wrap_content"
+		  android:layout_height="wrap_content"
+		  android:id="@+id/cbNack"
+		  android:text="@string/nack">
+	</CheckBox>
+        <CheckBox android:layout_width="wrap_content"
+                  android:layout_height="wrap_content"
+                  android:id="@+id/cbVideoRTPDump"
+                  android:text="@string/rtpdump">
+        </CheckBox>
+
+	</LinearLayout>
+</LinearLayout>
diff --git a/src/video_engine/main/test/android_test/res/values/strings.xml b/src/video_engine/main/test/android_test/res/values/strings.xml
new file mode 100644
index 0000000..82760b0
--- /dev/null
+++ b/src/video_engine/main/test/android_test/res/values/strings.xml
@@ -0,0 +1,40 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <string name="app_name">WebRTC</string>
+  <string name="error">Error</string>
+  <string name="errorCamera">Camera Error</string>
+<string name="codectype_prompt">Choose a codec type</string>
+<string name="demoTitle">Video Engine Android Demo</string>
+<string name="codecType">Codec Type</string>
+<string name="codecSize">Codec Size</string>
+<string name="remoteIp">Remote IP address</string>
+<string name="loopback">Loopback</string>
+<string name="stats">Stats</string>
+<string name="startListen">Start Listen</string>
+<string name="startSend">Start Send</string>
+<string name="startBoth">Start Both</string>
+<string name="enableVoice">Voice</string>
+<string name="enableVideoReceive">Video Receive</string>
+<string name="enableVideoSend">Video Send</string>
+<string name="gSettings">Global Settings</string>
+<string name="vSettings">Video Settings</string>
+<string name="vTxPort">Video Tx Port</string>
+<string name="vRxPort">Video Rx Port</string>
+<string name="aTxPort">Audio Tx Port</string>
+<string name="aRxPort">Audio Rx Port</string>
+<string name="AutoGainControl">AGC</string>
+<string name="VoiceActivityDetection">VAD</string>
+<string name="AECM">AECM</string>
+<string name="NoiseSuppression">NS</string>
+<string name="nack">NACK</string>
+<string name="frontCamera">SwitchToFront</string>
+<string name="backCamera">SwitchToBack</string>
+<string name="startCall">StartCall</string>
+<string name="stopCall">StopCall</string>
+<string name="exit">Exit</string>
+<string name="speaker">Speaker</string>
+<string name="debugrecording">APMRecord</string>
+<string name="rtpdump">rtpdump</string>
+<string name="surfaceview">SurfaceView</string>
+<string name="opengl">OpenGL</string>
+</resources>
diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
new file mode 100644
index 0000000..5a26190
--- /dev/null
+++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/IViEAndroidCallback.java
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+public interface IViEAndroidCallback {
+    public int UpdateStats(int frameRateI, int bitRateI,
+        int packetLoss, int frameRateO,
+        int bitRateO);
+}
diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
new file mode 100644
index 0000000..9fd060e
--- /dev/null
+++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/ViEAndroidJavaAPI.java
@@ -0,0 +1,146 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+import android.app.Activity;
+import android.content.Context;
+import android.util.Log;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+
+public class ViEAndroidJavaAPI {
+
+    public ViEAndroidJavaAPI(Context context) {
+        Log.d("*WEBRTCJ*", "Loading ViEAndroidJavaAPI...");
+        System.loadLibrary("webrtc-video-demo-jni");
+
+        Log.d("*WEBRTCJ*", "Calling native init...");
+        if (!NativeInit(context)) {
+            Log.e("*WEBRTCJ*", "Native init failed");
+            throw new RuntimeException("Native init failed");
+        }
+        else {
+            Log.d("*WEBRTCJ*", "Native init successful");
+        }
+        String a = "";
+        a.getBytes();
+    }
+
+    // API Native
+    private native boolean NativeInit(Context context);
+
+    // Video Engine API
+    // Initialization and Termination functions
+    public native int GetVideoEngine();
+    public native int Init(boolean enableTrace);
+    public native int Terminate();
+
+    public native int StartSend(int channel);
+    public native int StopRender(int channel);
+    public native int StopSend(int channel);
+    public native int StartReceive(int channel);
+    public native int StopReceive(int channel);
+    // Channel functions
+    public native int CreateChannel(int voiceChannel);
+    // Receiver & Destination functions
+    public native int SetLocalReceiver(int channel, int port);
+    public native int SetSendDestination(int channel, int port, String ipaddr);
+    // Codec
+    public native String[] GetCodecs();
+    public native int SetReceiveCodec(int channel, int codecNum,
+            int intbitRate, int width,
+            int height, int frameRate);
+    public native int SetSendCodec(int channel, int codecNum,
+            int intbitRate, int width,
+            int height, int frameRate);
+    // Rendering
+    public native int AddRemoteRenderer(int channel, Object glSurface);
+    public native int RemoveRemoteRenderer(int channel);
+    public native int StartRender(int channel);
+
+    // Capture
+    public native int StartCamera(int channel, int cameraNum);
+    public native int StopCamera(int cameraId);
+    public native int GetCameraOrientation(int cameraNum);
+    public native int SetRotation(int cameraId,int degrees);
+
+    // NACK
+    public native int EnableNACK(int channel, boolean enable);
+
+    // PLI
+    public native int EnablePLI(int channel, boolean enable);
+
+    // Enable stats callback
+    public native int SetCallback(int channel, IViEAndroidCallback callback);
+
+    public native int StartIncomingRTPDump(int channel, String file);
+    public native int StopIncomingRTPDump(int channel);
+
+    // Voice Engine API
+    // Create and Delete functions
+    public native boolean VoE_Create();
+    public native boolean VoE_Delete();
+
+    // Initialization and Termination functions
+    public native int VoE_Init(boolean enableTrace);
+    public native int VoE_Terminate();
+
+    // Channel functions
+    public native int VoE_CreateChannel();
+    public native int VoE_DeleteChannel(int channel);
+
+    // Receiver & Destination functions
+    public native int VoE_SetLocalReceiver(int channel, int port);
+    public native int VoE_SetSendDestination(int channel, int port,
+                                             String ipaddr);
+
+    // Media functions
+    public native int VoE_StartListen(int channel);
+    public native int VoE_StartPlayout(int channel);
+    public native int VoE_StartSend(int channel);
+    public native int VoE_StopListen(int channel);
+    public native int VoE_StopPlayout(int channel);
+    public native int VoE_StopSend(int channel);
+
+    // Volume
+    public native int VoE_SetSpeakerVolume(int volume);
+
+    // Hardware
+    public native int VoE_SetLoudspeakerStatus(boolean enable);
+
+    // Playout file locally
+    public native int VoE_StartPlayingFileLocally(
+        int channel,
+        String fileName,
+        boolean loop);
+    public native int VoE_StopPlayingFileLocally(int channel);
+
+    // Play file as microphone
+    public native int VoE_StartPlayingFileAsMicrophone(
+        int channel,
+        String fileName,
+        boolean loop);
+    public native int VoE_StopPlayingFileAsMicrophone(int channel);
+
+    // Codec-setting functions
+    public native int VoE_NumOfCodecs();
+    public native String[] VoE_GetCodecs();
+    public native int VoE_SetSendCodec(int channel, int index);
+
+    //VoiceEngine funtions
+    public native int VoE_SetECStatus(boolean enable);
+    public native int VoE_SetAGCStatus(boolean enable);
+    public native int VoE_SetNSStatus(boolean enable);
+    public native int VoE_StartDebugRecording(String file);
+    public native int VoE_StopDebugRecording();
+    public native int VoE_StartIncomingRTPDump(int channel, String file);
+    public native int VoE_StopIncomingRTPDump(int channel);
+}
diff --git a/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/WebRTCDemo.java b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/WebRTCDemo.java
new file mode 100644
index 0000000..b5bbc93
--- /dev/null
+++ b/src/video_engine/main/test/android_test/src/org/webrtc/videoengineapp/WebRTCDemo.java
@@ -0,0 +1,996 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.videoengineapp;
+
+import java.io.File;
+import java.net.InetAddress;
+import java.net.NetworkInterface;
+import java.net.SocketException;
+import java.util.Enumeration;
+
+import org.webrtc.videoengine.ViERenderer;
+
+import android.app.TabActivity;
+import android.app.AlertDialog;
+import android.app.Dialog;
+import android.content.Context;
+import android.content.DialogInterface;
+import android.content.res.Configuration;
+import android.content.pm.ActivityInfo;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.DashPathEffect;
+import android.graphics.Paint;
+import android.graphics.Path;
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.hardware.SensorManager;
+import android.media.AudioManager;
+import android.os.Bundle;
+import android.os.Environment;
+import android.os.PowerManager;
+import android.os.PowerManager.WakeLock;
+
+import android.util.Log;
+import android.view.Gravity;
+import android.view.KeyEvent;
+import android.view.LayoutInflater;
+import android.view.Surface;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.Display;
+import android.view.Window;
+import android.view.WindowManager;
+import android.view.WindowManager.LayoutParams;
+
+import android.widget.AdapterView;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.CheckBox;
+
+import android.widget.EditText;
+import android.widget.LinearLayout;
+import android.widget.RadioGroup;
+import android.widget.Spinner;
+import android.widget.TabHost;
+import android.widget.TextView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.TabHost.TabSpec;
+import android.view.OrientationEventListener;
+
+public class WebRTCDemo extends TabActivity implements IViEAndroidCallback,
+                                                View.OnClickListener,
+                                                OnItemSelectedListener {
+    private ViEAndroidJavaAPI ViEAndroidAPI = null;
+
+    // remote renderer
+    private SurfaceView remoteSurfaceView = null;
+
+    // local renderer and camera
+    private SurfaceView svLocal = null;
+
+    // channel number
+    private int channel;
+    private int cameraId;
+    private int voiceChannel = -1;
+
+    // flags
+    private boolean viERunning = false;
+    private boolean voERunning = false;
+
+    // debug
+    private boolean enableTrace = false;
+
+    // Constant
+    private static final String TAG = "WEBRTC";
+    private static final int RECEIVE_CODEC_FRAMERATE = 15;
+    private static final int SEND_CODEC_FRAMERATE = 15;
+    private static final int INIT_BITRATE = 500;
+    private static final String LOOPBACK_IP = "127.0.0.1";
+
+    private int volumeLevel = 204;
+
+    private TabHost mTabHost = null;
+
+    private TabSpec mTabSpecConfig;
+    private TabSpec mTabSpecVideo;
+
+    private LinearLayout mLlRemoteSurface = null;
+    private LinearLayout mLlLocalSurface = null;
+
+    private Button btStartStopCall;
+    private Button btSwitchCamera;
+
+    // Global Settings
+    private CheckBox cbVideoSend;
+    private boolean enableVideoSend = true;
+    private CheckBox cbVideoReceive;
+    private boolean enableVideoReceive = true;
+    private boolean enableVideo = true;
+    private CheckBox cbVoice;
+    private boolean enableVoice = true;
+    private EditText etRemoteIp;
+    private String remoteIp = "";
+    private CheckBox cbLoopback;
+    private boolean loopbackMode = true;
+    private CheckBox cbStats;
+    private boolean isStatsOn = true;
+    private boolean useOpenGLRender = true;
+
+    // Video settings
+    private Spinner spCodecType;
+    private int codecType = 0;
+    private Spinner spCodecSize;
+    private int codecSizeWidth = 0;
+    private int codecSizeHeight = 0;
+    private TextView etVRxPort;
+    private int receivePortVideo = 11111;
+    private TextView etVTxPort;
+    private int destinationPortVideo = 11111;
+    private CheckBox cbEnableNack;
+    private boolean enableNack = false;
+    private CheckBox cbEnableVideoRTPDump;
+
+    // Audio settings
+    private Spinner spVoiceCodecType;
+    private int voiceCodecType = 0;
+    private TextView etARxPort;
+    private int receivePortVoice = 11113;
+    private TextView etATxPort;
+    private int destinationPortVoice = 11113;
+    private CheckBox cbEnableSpeaker;
+    private boolean enableSpeaker = false;
+    private CheckBox cbEnableAGC;
+    private boolean enableAGC = false;
+    private CheckBox cbEnableAECM;
+    private boolean enableAECM = false;
+    private CheckBox cbEnableNS;
+    private boolean enableNS = false;
+    private CheckBox cbEnableDebugAPM;
+    private CheckBox cbEnableVoiceRTPDump;
+
+    // Stats variables
+    private int frameRateI;
+    private int bitRateI;
+    private int packetLoss;
+    private int frameRateO;
+    private int bitRateO;
+
+    // Variable for storing variables
+    private String webrtcName = "/webrtc";
+    private String webrtcDebugDir = null;
+
+    private WakeLock wakeLock;
+
+    private boolean usingFrontCamera = true;
+
+    private String[] mVideoCodecsStrings = null;
+    private String[] mVideoCodecsSizeStrings = { "176x144", "320x240",
+                                                 "352x288", "640x480" };
+    private String[] mVoiceCodecsStrings = null;
+
+    private OrientationEventListener orientationListener;
+    int currentOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
+    int currentCameraOrientation = 0;
+
+    private StatsView statsView = null;
+
+    public int GetCameraOrientation(int cameraOrientation) {
+        Display display = this.getWindowManager().getDefaultDisplay();
+        int displatyRotation = display.getRotation();
+        int degrees = 0;
+        switch (displatyRotation) {
+            case Surface.ROTATION_0: degrees = 0; break;
+            case Surface.ROTATION_90: degrees = 90; break;
+            case Surface.ROTATION_180: degrees = 180; break;
+            case Surface.ROTATION_270: degrees = 270; break;
+        }
+        int result=0;
+        if(cameraOrientation>180) {
+            result=(cameraOrientation + degrees) % 360;
+        }
+        else {
+            result=(cameraOrientation - degrees+360) % 360;
+        }
+        return result;
+    }
+
+    public void onConfigurationChanged(Configuration newConfig) {
+        super.onConfigurationChanged(newConfig);
+        int newRotation = GetCameraOrientation(currentCameraOrientation);
+        if (viERunning){
+            ViEAndroidAPI.SetRotation(cameraId,newRotation);
+        }
+    }
+
+    // Called when the activity is first created.
+    @Override
+    public void onCreate(Bundle savedInstanceState) {
+        Log.d(TAG, "onCreate");
+
+        super.onCreate(savedInstanceState);
+        requestWindowFeature(Window.FEATURE_NO_TITLE);
+        getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
+                WindowManager.LayoutParams.FLAG_FULLSCREEN);
+        // Set screen orientation
+        setRequestedOrientation (ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
+
+        PowerManager pm = (PowerManager)this.getSystemService(
+            Context.POWER_SERVICE);
+        wakeLock = pm.newWakeLock(
+            PowerManager.SCREEN_DIM_WAKE_LOCK, TAG);
+
+        setContentView(R.layout.tabhost);
+        mTabHost = getTabHost();
+
+        // Main tab
+        mTabSpecVideo = mTabHost.newTabSpec("tab_video");
+        mTabSpecVideo.setIndicator("Main");
+        mTabSpecVideo.setContent(R.id.tab_video);
+        mTabHost.addTab(mTabSpecVideo);
+
+        // Shared config tab
+        mTabHost = getTabHost();
+        mTabSpecConfig = mTabHost.newTabSpec("tab_config");
+        mTabSpecConfig.setIndicator("Settings");
+        mTabSpecConfig.setContent(R.id.tab_config);
+        mTabHost.addTab(mTabSpecConfig);
+
+        TabSpec mTabv;
+        mTabv = mTabHost.newTabSpec("tab_vconfig");
+        mTabv.setIndicator("Video");
+        mTabv.setContent(R.id.tab_vconfig);
+        mTabHost.addTab(mTabv);
+        TabSpec mTaba;
+        mTaba = mTabHost.newTabSpec("tab_aconfig");
+        mTaba.setIndicator("Audio");
+        mTaba.setContent(R.id.tab_aconfig);
+        mTabHost.addTab(mTaba);
+
+        int childCount = mTabHost.getTabWidget().getChildCount();
+        for (int i=0; i<childCount; i++)
+            mTabHost.getTabWidget().getChildAt(i).getLayoutParams().height = 50;
+
+        orientationListener =
+                new OrientationEventListener(this,SensorManager.SENSOR_DELAY_UI) {
+                    public void onOrientationChanged (int orientation) {
+                        if (orientation != ORIENTATION_UNKNOWN) {
+                            currentOrientation = orientation;
+                        }
+                    }
+                };
+        orientationListener.enable ();
+
+        // Create a folder named webrtc in /scard for debugging
+        webrtcDebugDir = Environment.getExternalStorageDirectory().toString() +
+                webrtcName;
+        File webrtcDir = new File(webrtcDebugDir);
+        if (!webrtcDir.exists() && webrtcDir.mkdir() == false) {
+            Log.v(TAG, "Failed to create " + webrtcDebugDir);
+        }
+        else if (!webrtcDir.isDirectory()) {
+            Log.v(TAG, webrtcDebugDir + " exists but not a folder");
+            webrtcDebugDir = null;
+        }
+
+        StartMain();
+        return;
+    }
+
+    private class StatsView extends View{
+        public StatsView(Context context){
+            super(context);
+        }
+
+        @Override protected void onDraw(Canvas canvas) {
+            super.onDraw(canvas);
+
+            Paint mLoadPaint = new Paint();
+            mLoadPaint.setAntiAlias(true);
+            mLoadPaint.setTextSize(16);
+            mLoadPaint.setARGB(255, 255, 255, 255);
+
+            String mLoadText;
+            mLoadText = "> " + frameRateI + " fps/" + bitRateI + "k bps/ " + packetLoss;
+            canvas.drawText(mLoadText, 4, 172, mLoadPaint);
+            mLoadText = "< " + frameRateO + " fps/ " + bitRateO + "k bps";
+            canvas.drawText(mLoadText, 4, 192, mLoadPaint);
+
+            updateDisplay();
+        }
+
+        void updateDisplay() {
+            invalidate();
+        }
+    }
+
+    private String GetLocalIpAddress() {
+        String localIPs = "";
+        try {
+            for (Enumeration<NetworkInterface> en = NetworkInterface
+                         .getNetworkInterfaces(); en.hasMoreElements();) {
+                NetworkInterface intf = en.nextElement();
+                for (Enumeration<InetAddress> enumIpAddr =
+                             intf.getInetAddresses();
+                     enumIpAddr.hasMoreElements(); ) {
+                    InetAddress inetAddress = enumIpAddr.nextElement();
+                    if (!inetAddress.isLoopbackAddress()) {
+                        localIPs +=
+                                inetAddress.getHostAddress().toString() + " ";
+                        // Set the remote ip address the same as
+                        // the local ip address of the last netif
+                        remoteIp = inetAddress.getHostAddress().toString();
+                    }
+                }
+            }
+        } catch (SocketException ex) {
+            Log.e(TAG, ex.toString());
+        }
+        return localIPs;
+    }
+
+    @Override
+    public boolean onKeyDown(int keyCode, KeyEvent event) {
+        if (keyCode == KeyEvent.KEYCODE_BACK) {
+            if (viERunning) {
+                StopAll();
+                StartMain();
+            }
+            finish();
+            return true;
+        }
+        return super.onKeyDown(keyCode, event);
+    }
+
+    private void StopAll() {
+        Log.d(TAG, "StopAll");
+
+        if (ViEAndroidAPI != null) {
+            if (voERunning) {
+                voERunning = false;
+                StopVoiceEngine();
+            }
+
+            if (viERunning) {
+                viERunning = false;
+                ViEAndroidAPI.StopRender(channel);
+                ViEAndroidAPI.StopReceive(channel);
+                ViEAndroidAPI.StopSend(channel);
+                ViEAndroidAPI.RemoveRemoteRenderer(channel);
+                ViEAndroidAPI.StopCamera(cameraId);
+                ViEAndroidAPI.Terminate();
+                mLlRemoteSurface.removeView(remoteSurfaceView);
+                mLlLocalSurface.removeView(svLocal);
+                remoteSurfaceView = null;
+                svLocal = null;
+            }
+        }
+    }
+
+    public class SpinnerAdapter extends ArrayAdapter<String> {
+        private String[] mCodecString = null;
+        public SpinnerAdapter(Context context, int textViewResourceId, String[] objects) {
+            super(context, textViewResourceId, objects);
+            mCodecString = objects;
+        }
+
+        @Override public View getDropDownView(int position, View convertView, ViewGroup parent) {
+            return getCustomView(position, convertView, parent);
+        }
+
+        @Override public View getView(int position, View convertView, ViewGroup parent) {
+            return getCustomView(position, convertView, parent);
+        }
+
+        public View getCustomView(int position, View convertView, ViewGroup parent) {
+            LayoutInflater inflater = getLayoutInflater();
+            View row = inflater.inflate(R.layout.row, parent, false);
+            TextView label = (TextView)row.findViewById(R.id.spinner_row);
+            label.setText(mCodecString[position]);
+            return row;
+        }
+    }
+
+    private void StartMain() {
+        mTabHost.setCurrentTab(0);
+
+        mLlRemoteSurface = (LinearLayout) findViewById(R.id.llRemoteView);
+        mLlLocalSurface = (LinearLayout) findViewById(R.id.llLocalView);
+
+        if (null == ViEAndroidAPI)
+            ViEAndroidAPI = new ViEAndroidJavaAPI(this);
+
+        if (0 > SetupVoE() || 0 > ViEAndroidAPI.GetVideoEngine() ||
+                0 > ViEAndroidAPI.Init(enableTrace) ) {
+            // Show dialog
+            AlertDialog alertDialog = new AlertDialog.Builder(this).create();
+            alertDialog.setTitle("WebRTC Error");
+            alertDialog.setMessage("Can not init video engine.");
+            alertDialog.setButton("OK", new DialogInterface.OnClickListener() {
+                    public void onClick(DialogInterface dialog, int which) {
+                        return;
+                    } });
+            alertDialog.show();
+        }
+
+        btSwitchCamera = (Button)findViewById(R.id.btSwitchCamera);
+        btSwitchCamera.setOnClickListener(this);
+        btStartStopCall = (Button)findViewById(R.id.btStartStopCall);
+        btStartStopCall.setOnClickListener(this);
+        findViewById(R.id.btExit).setOnClickListener(this);
+
+        // cleaning
+        remoteSurfaceView = null;
+        svLocal = null;
+
+        // Video codec
+        mVideoCodecsStrings = ViEAndroidAPI.GetCodecs();
+        spCodecType = (Spinner)findViewById(R.id.spCodecType);
+        spCodecType.setOnItemSelectedListener(this);
+        spCodecType.setAdapter(new SpinnerAdapter(this,
+                        R.layout.row,
+                        mVideoCodecsStrings));
+        spCodecType.setSelection(0);
+
+        // Video Codec size
+        spCodecSize = (Spinner) findViewById(R.id.spCodecSize);
+        spCodecSize.setOnItemSelectedListener(this);
+        spCodecSize.setAdapter(new SpinnerAdapter(this,
+                        R.layout.row,
+                        mVideoCodecsSizeStrings));
+        spCodecSize.setSelection(0);
+
+        // Voice codec
+        mVoiceCodecsStrings = ViEAndroidAPI.VoE_GetCodecs();
+        spVoiceCodecType = (Spinner)findViewById(R.id.spVoiceCodecType);
+        spVoiceCodecType.setOnItemSelectedListener(this);
+        spVoiceCodecType.setAdapter(new SpinnerAdapter(this,
+                        R.layout.row,
+                        mVoiceCodecsStrings));
+        spVoiceCodecType.setSelection(0);
+        // Find PCMU and use it
+        for (int i=0; i<mVoiceCodecsStrings.length; ++i) {
+            if (mVoiceCodecsStrings[i].contains("PCMU")) {
+                spVoiceCodecType.setSelection(i);
+                break;
+            }
+        }
+
+        RadioGroup radioGroup = (RadioGroup)findViewById(R.id.radio_group1);
+        radioGroup.clearCheck();
+        if (useOpenGLRender == true) {
+            radioGroup.check(R.id.radio_opengl);
+        }
+        else {
+            radioGroup.check(R.id.radio_surface);
+        }
+
+        etRemoteIp = (EditText) findViewById(R.id.etRemoteIp);
+        etRemoteIp.setText(remoteIp);
+
+        cbLoopback = (CheckBox) findViewById(R.id.cbLoopback);
+        cbLoopback.setChecked(loopbackMode);
+
+        cbStats = (CheckBox) findViewById(R.id.cbStats);
+        cbStats.setChecked(isStatsOn);
+
+        cbVoice = (CheckBox) findViewById(R.id.cbVoice);
+        cbVoice.setChecked(enableVoice);
+
+        cbVideoSend = (CheckBox) findViewById(R.id.cbVideoSend);
+        cbVideoSend.setChecked(enableVideoSend);
+        cbVideoReceive = (CheckBox) findViewById(R.id.cbVideoReceive);
+        cbVideoReceive.setChecked(enableVideoReceive);
+
+        etVTxPort = (EditText) findViewById(R.id.etVTxPort);
+        etVTxPort.setText(Integer.toString(destinationPortVideo));
+
+        etVRxPort = (EditText) findViewById(R.id.etVRxPort);
+        etVRxPort.setText(Integer.toString(receivePortVideo));
+
+        etATxPort = (EditText) findViewById(R.id.etATxPort);
+        etATxPort.setText(Integer.toString(destinationPortVoice));
+
+        etARxPort = (EditText) findViewById(R.id.etARxPort);
+        etARxPort.setText(Integer.toString(receivePortVoice));
+
+        cbEnableNack = (CheckBox) findViewById(R.id.cbNack);
+        cbEnableNack.setChecked(enableNack);
+
+        cbEnableSpeaker = (CheckBox) findViewById(R.id.cbSpeaker);
+        cbEnableSpeaker.setChecked(enableSpeaker);
+        cbEnableAGC = (CheckBox) findViewById(R.id.cbAutoGainControl);
+        cbEnableAGC.setChecked(enableAGC);
+        cbEnableAECM = (CheckBox) findViewById(R.id.cbAECM);
+        cbEnableAECM.setChecked(enableAECM);
+        cbEnableNS = (CheckBox) findViewById(R.id.cbNoiseSuppression);
+        cbEnableNS.setChecked(enableNS);
+
+        cbEnableDebugAPM = (CheckBox) findViewById(R.id.cbDebugRecording);
+        cbEnableDebugAPM.setChecked(false);  // Disable APM debugging by default
+
+        cbEnableVideoRTPDump = (CheckBox) findViewById(R.id.cbVideoRTPDump);
+        cbEnableVideoRTPDump.setChecked(false);  // Disable Video RTP Dump
+
+        cbEnableVoiceRTPDump = (CheckBox) findViewById(R.id.cbVoiceRTPDump);
+        cbEnableVoiceRTPDump.setChecked(false);  // Disable Voice RTP Dump
+
+        etRemoteIp.setOnClickListener(this);
+        cbLoopback.setOnClickListener(this);
+        cbStats.setOnClickListener(this);
+        cbEnableNack.setOnClickListener(this);
+        cbEnableSpeaker.setOnClickListener(this);
+        cbEnableAECM.setOnClickListener(this);
+        cbEnableAGC.setOnClickListener(this);
+        cbEnableNS.setOnClickListener(this);
+        cbEnableDebugAPM.setOnClickListener(this);
+        cbEnableVideoRTPDump.setOnClickListener(this);
+        cbEnableVoiceRTPDump.setOnClickListener(this);
+
+        if (loopbackMode) {
+            remoteIp = LOOPBACK_IP;
+            etRemoteIp.setText(remoteIp);
+        }
+        else {
+            GetLocalIpAddress();
+            etRemoteIp.setText(remoteIp);
+        }
+
+        // Read settings to refresh each configuration
+        ReadSettings();
+    }
+
+    private String GetRemoteIPString() {
+        return etRemoteIp.getText().toString();
+    }
+
+    private void StartCall() {
+        int ret = 0;
+
+        if (enableVoice) {
+            StartVoiceEngine();
+        }
+
+        if (enableVideo) {
+            if (enableVideoSend) {
+                // camera and preview surface
+                svLocal = ViERenderer.CreateLocalRenderer(this);
+            }
+
+            channel = ViEAndroidAPI.CreateChannel(voiceChannel);
+            ret = ViEAndroidAPI.SetLocalReceiver(channel,
+                                                 receivePortVideo);
+            ret = ViEAndroidAPI.SetSendDestination(channel,
+                                                   destinationPortVideo,
+                                                   GetRemoteIPString());
+
+            if (enableVideoReceive) {
+                if(useOpenGLRender) {
+                    Log.v(TAG, "Create OpenGL Render");
+                    remoteSurfaceView = ViERenderer.CreateRenderer(this, true);
+                    ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
+                }
+                else {
+                    Log.v(TAG, "Create SurfaceView Render");
+                    remoteSurfaceView = ViERenderer.CreateRenderer(this, false);
+                    ret = ViEAndroidAPI.AddRemoteRenderer(channel, remoteSurfaceView);
+                }
+
+                ret = ViEAndroidAPI.SetReceiveCodec(channel,
+                        codecType,
+                        INIT_BITRATE,
+                        codecSizeWidth,
+                        codecSizeHeight,
+                        RECEIVE_CODEC_FRAMERATE);
+                ret = ViEAndroidAPI.StartRender(channel);
+                ret = ViEAndroidAPI.StartReceive(channel);
+            }
+
+            if (enableVideoSend) {
+                currentCameraOrientation =
+                        ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0);
+                ret = ViEAndroidAPI.SetSendCodec(channel, codecType, INIT_BITRATE,
+                        codecSizeWidth, codecSizeHeight, SEND_CODEC_FRAMERATE);
+                int camId = ViEAndroidAPI.StartCamera(channel, usingFrontCamera?1:0);
+
+                if(camId > 0) {
+                    cameraId = camId;
+                    int neededRotation = GetCameraOrientation(currentCameraOrientation);
+                    ViEAndroidAPI.SetRotation(cameraId, neededRotation);
+                }
+                else {
+                    ret = camId;
+                }
+                ret = ViEAndroidAPI.StartSend(channel);
+            }
+
+            // TODO(leozwang): Add more options besides PLI, currently use pli
+            // as the default. Also check return value.
+            ret = ViEAndroidAPI.EnablePLI(channel, true);
+            ret = ViEAndroidAPI.SetCallback(channel, this);
+
+            if (enableVideoSend) {
+                if (mLlLocalSurface != null)
+                    mLlLocalSurface.addView(svLocal);
+            }
+
+            if (enableVideoReceive) {
+                if (mLlRemoteSurface != null) {
+                    mLlRemoteSurface.addView(remoteSurfaceView);
+                }
+            }
+
+            isStatsOn = cbStats.isChecked();
+            if (isStatsOn) {
+                AddStatsView();
+            }
+            else {
+                RemoveSatsView();
+            }
+
+            viERunning = true;
+        }
+    }
+
+    private void StopVoiceEngine() {
+        // Stop send
+        if (0 != ViEAndroidAPI.VoE_StopSend(voiceChannel)) {
+            Log.d(TAG, "VoE stop send failed");
+        }
+
+        // Stop listen
+        if (0 != ViEAndroidAPI.VoE_StopListen(voiceChannel)) {
+            Log.d(TAG, "VoE stop listen failed");
+        }
+
+        // Stop playout
+        if (0 != ViEAndroidAPI.VoE_StopPlayout(voiceChannel)) {
+            Log.d(TAG, "VoE stop playout failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_DeleteChannel(voiceChannel)) {
+            Log.d(TAG, "VoE delete channel failed");
+        }
+        voiceChannel=-1;
+
+        // Terminate
+        if (0 != ViEAndroidAPI.VoE_Terminate()) {
+            Log.d(TAG, "VoE terminate failed");
+        }
+    }
+
+    private int SetupVoE() {
+        // Create VoiceEngine
+        // Error logging is done in native API wrapper
+        ViEAndroidAPI.VoE_Create();
+
+        // Initialize
+        if (0 != ViEAndroidAPI.VoE_Init(enableTrace)) {
+            Log.d(TAG, "VoE init failed");
+            return -1;
+        }
+
+        // Create channel
+        voiceChannel = ViEAndroidAPI.VoE_CreateChannel();
+        if (0 != voiceChannel) {
+            Log.d(TAG, "VoE create channel failed");
+            return -1;
+        }
+
+        // Suggest to use the voice call audio stream for hardware volume controls
+        setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);
+        return 0;
+    }
+
+    private int StartVoiceEngine() {
+        // Set local receiver
+        if (0 != ViEAndroidAPI.VoE_SetLocalReceiver(voiceChannel,
+                        receivePortVoice)) {
+            Log.d(TAG, "VoE set local receiver failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_StartListen(voiceChannel)) {
+            Log.d(TAG, "VoE start listen failed");
+        }
+
+        // Route audio
+        RouteAudio(enableSpeaker);
+
+        // set volume to default value
+        if (0 != ViEAndroidAPI.VoE_SetSpeakerVolume(volumeLevel)) {
+            Log.d(TAG, "VoE set speaker volume failed");
+        }
+
+        // Start playout
+        if (0 != ViEAndroidAPI.VoE_StartPlayout(voiceChannel)) {
+            Log.d(TAG, "VoE start playout failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_SetSendDestination(voiceChannel,
+                                                      destinationPortVoice,
+                                                      GetRemoteIPString())) {
+            Log.d(TAG, "VoE set send  destination failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
+            Log.d(TAG, "VoE set send codec failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_SetECStatus(enableAECM)) {
+            Log.d(TAG, "VoE set EC Status failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_SetAGCStatus(enableAGC)) {
+            Log.d(TAG, "VoE set AGC Status failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_SetNSStatus(enableNS)) {
+            Log.d(TAG, "VoE set NS Status failed");
+        }
+
+        if (0 != ViEAndroidAPI.VoE_StartSend(voiceChannel)) {
+            Log.d(TAG, "VoE start send failed");
+        }
+
+        voERunning = true;
+        return 0;
+    }
+
+    private void RouteAudio(boolean enableSpeaker) {
+        int sdkVersion = Integer.parseInt(android.os.Build.VERSION.SDK);
+        if (sdkVersion >= 5) {
+            AudioManager am =
+                    (AudioManager) this.getSystemService(Context.AUDIO_SERVICE);
+            am.setSpeakerphoneOn(enableSpeaker);
+        }
+        else {
+            if (0 != ViEAndroidAPI.VoE_SetLoudspeakerStatus(enableSpeaker)) {
+                Log.d(TAG, "VoE set louspeaker status failed");
+            }
+        }
+    }
+
+    public void onClick(View arg0) {
+        switch (arg0.getId()) {
+            case R.id.btSwitchCamera:
+                if (usingFrontCamera ){
+                    btSwitchCamera.setText(R.string.frontCamera);
+                }
+                else {
+                    btSwitchCamera.setText(R.string.backCamera);
+                }
+                usingFrontCamera = !usingFrontCamera;
+
+                if (viERunning) {
+                    currentCameraOrientation =
+                            ViEAndroidAPI.GetCameraOrientation(usingFrontCamera?1:0);
+                    ViEAndroidAPI.StopCamera(cameraId);
+                    mLlLocalSurface.removeView(svLocal);
+
+                    ViEAndroidAPI.StartCamera(channel,usingFrontCamera?1:0);
+                    mLlLocalSurface.addView(svLocal);
+                    int neededRotation = GetCameraOrientation(currentCameraOrientation);
+                    ViEAndroidAPI.SetRotation(cameraId, neededRotation);
+                }
+                break;
+            case R.id.btStartStopCall:
+                ReadSettings();
+                if (viERunning || voERunning) {
+                    StopAll();
+                    wakeLock.release(); // release the wake lock
+                    btStartStopCall.setText(R.string.startCall);
+                }
+                else if (enableVoice || enableVideo){
+                    StartCall();
+                    wakeLock.acquire(); // screen stay on during the call
+                    btStartStopCall.setText(R.string.stopCall);
+                }
+                break;
+            case R.id.btExit:
+                StopAll();
+                finish();
+                break;
+            case R.id.cbLoopback:
+                loopbackMode  = cbLoopback.isChecked();
+                if (loopbackMode) {
+                    remoteIp = LOOPBACK_IP;
+                    etRemoteIp.setText(LOOPBACK_IP);
+                }
+                else {
+                    GetLocalIpAddress();
+                    etRemoteIp.setText(remoteIp);
+                }
+                break;
+            case R.id.etRemoteIp:
+                remoteIp = etRemoteIp.getText().toString();
+                break;
+            case R.id.cbStats:
+                isStatsOn = cbStats.isChecked();
+                if (isStatsOn) {
+                    AddStatsView();
+                }
+                else {
+                    RemoveSatsView();
+                }
+                break;
+            case R.id.radio_surface:
+                useOpenGLRender = false;
+                break;
+            case R.id.radio_opengl:
+                useOpenGLRender = true;
+                break;
+            case R.id.cbNack:
+                enableNack  = cbEnableNack.isChecked();
+                if (viERunning) {
+                    ViEAndroidAPI.EnableNACK(channel, enableNack);
+                }
+                break;
+            case R.id.cbSpeaker:
+                enableSpeaker = cbEnableSpeaker.isChecked();
+                if (voERunning){
+                    RouteAudio(enableSpeaker);
+                }
+                break;
+            case R.id.cbDebugRecording:
+                if(voERunning && webrtcDebugDir != null) {
+                    if (cbEnableDebugAPM.isChecked() ) {
+                        ViEAndroidAPI.VoE_StartDebugRecording(
+                            webrtcDebugDir + String.format("/apm_%d.dat",
+                                    System.currentTimeMillis()));
+                    }
+                    else {
+                        ViEAndroidAPI.VoE_StopDebugRecording();
+                    }
+                }
+                break;
+            case R.id.cbVoiceRTPDump:
+                if(voERunning && webrtcDebugDir != null) {
+                    if (cbEnableVoiceRTPDump.isChecked() ) {
+                        ViEAndroidAPI.VoE_StartIncomingRTPDump(channel,
+                                webrtcDebugDir + String.format("/voe_%d.rtp",
+                                        System.currentTimeMillis()));
+                    }
+                    else {
+                        ViEAndroidAPI.VoE_StopIncomingRTPDump(channel);
+                    }
+                }
+                break;
+            case R.id.cbVideoRTPDump:
+                if(viERunning && webrtcDebugDir != null) {
+                    if (cbEnableVideoRTPDump.isChecked() ) {
+                        ViEAndroidAPI.StartIncomingRTPDump(channel,
+                                webrtcDebugDir + String.format("/vie_%d.rtp",
+                                        System.currentTimeMillis()));
+                    }
+                    else {
+                        ViEAndroidAPI.StopIncomingRTPDump(channel);
+                    }
+                }
+                break;
+            case R.id.cbAutoGainControl:
+                enableAGC=cbEnableAGC.isChecked();
+                if(voERunning) {
+                    ViEAndroidAPI.VoE_SetAGCStatus(enableAGC);
+                }
+                break;
+            case R.id.cbNoiseSuppression:
+                enableNS=cbEnableNS.isChecked();
+                if(voERunning) {
+                    ViEAndroidAPI.VoE_SetNSStatus(enableNS);
+                }
+                break;
+            case R.id.cbAECM:
+                enableAECM = cbEnableAECM.isChecked();
+                if (voERunning) {
+                    ViEAndroidAPI.VoE_SetECStatus(enableAECM);
+                }
+                break;
+        }
+    }
+
+    private void ReadSettings() {
+        codecType = spCodecType.getSelectedItemPosition();
+        voiceCodecType = spVoiceCodecType.getSelectedItemPosition();
+
+        String sCodecSize = spCodecSize.getSelectedItem().toString();
+        String[] aCodecSize = sCodecSize.split("x");
+        codecSizeWidth = Integer.parseInt(aCodecSize[0]);
+        codecSizeHeight = Integer.parseInt(aCodecSize[1]);
+
+        loopbackMode  = cbLoopback.isChecked();
+        enableVoice  = cbVoice.isChecked();
+        enableVideoSend = cbVideoSend.isChecked();
+        enableVideoReceive = cbVideoReceive.isChecked();
+        enableVideo = enableVideoSend || enableVideoReceive;
+
+        destinationPortVideo =
+                Integer.parseInt(etVTxPort.getText().toString());
+        receivePortVideo =
+                Integer.parseInt(etVRxPort.getText().toString());
+        destinationPortVoice =
+                Integer.parseInt(etATxPort.getText().toString());
+        receivePortVoice =
+                Integer.parseInt(etARxPort.getText().toString());
+
+        enableNack  = cbEnableNack.isChecked();
+        enableSpeaker  = cbEnableSpeaker.isChecked();
+        enableAGC  = cbEnableAGC.isChecked();
+        enableAECM  = cbEnableAECM.isChecked();
+        enableNS  = cbEnableNS.isChecked();
+    }
+
+    public void onItemSelected(AdapterView<?> adapterView, View view,
+            int position, long id) {
+        if ((adapterView == spCodecType || adapterView == spCodecSize) &&
+                viERunning) {
+            ReadSettings();
+            // change the codectype
+            if (enableVideoReceive) {
+                if (0 != ViEAndroidAPI.SetReceiveCodec(channel, codecType,
+                                INIT_BITRATE, codecSizeWidth,
+                                codecSizeHeight,
+                                RECEIVE_CODEC_FRAMERATE))
+                    Log.d(TAG, "ViE set receive codec failed");
+            }
+            if (enableVideoSend) {
+                if (0 != ViEAndroidAPI.SetSendCodec(channel, codecType,
+                                INIT_BITRATE, codecSizeWidth, codecSizeHeight,
+                                SEND_CODEC_FRAMERATE))
+                    Log.d(TAG, "ViE set send codec failed");
+            }
+        }
+        else if ((adapterView == spVoiceCodecType) && voERunning) {
+            // change voice engine codec
+            ReadSettings();
+            if (0 != ViEAndroidAPI.VoE_SetSendCodec(voiceChannel, voiceCodecType)) {
+                Log.d(TAG, "VoE set send codec failed");
+            }
+        }
+    }
+
+    public void onNothingSelected(AdapterView<?> arg0) {
+        Log.d(TAG, "No setting selected");
+    }
+
+    public int UpdateStats(int in_frameRateI, int in_bitRateI, int in_packetLoss,
+            int in_frameRateO, int in_bitRateO) {
+        frameRateI = in_frameRateI;
+        bitRateI = in_bitRateI;
+        packetLoss = in_packetLoss;
+        frameRateO = in_frameRateO;
+        bitRateO = in_bitRateO;
+        return 0;
+    }
+
+    private void AddStatsView() {
+        if (statsView != null) {
+            return;
+        }
+        statsView = new StatsView(this);
+        WindowManager.LayoutParams params = new WindowManager.LayoutParams(
+            WindowManager.LayoutParams.MATCH_PARENT,
+            WindowManager.LayoutParams.WRAP_CONTENT,
+            WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY,
+            WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE |
+            WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE,
+            PixelFormat.TRANSLUCENT);
+        params.gravity = Gravity.RIGHT | Gravity.TOP;
+        params.setTitle("Load Average");
+        mTabHost.addView(statsView, params);
+        statsView.setBackgroundColor(0);
+    }
+
+    private void RemoveSatsView() {
+        mTabHost.removeView(statsView);
+        statsView = null;
+    }
+}
diff --git a/src/video_engine/stream_synchronization.cc b/src/video_engine/stream_synchronization.cc
new file mode 100644
index 0000000..1ba1f09
--- /dev/null
+++ b/src/video_engine/stream_synchronization.cc
@@ -0,0 +1,240 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/stream_synchronization.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+const float FracMS = 4.294967296E6f;
+
+struct ViESyncDelay {
+  ViESyncDelay() {
+    extra_video_delay_ms = 0;
+    last_video_delay_ms = 0;
+    extra_audio_delay_ms = 0;
+    last_sync_delay = 0;
+    network_delay = 120;
+  }
+
+  int extra_video_delay_ms;
+  int last_video_delay_ms;
+  int extra_audio_delay_ms;
+  int last_sync_delay;
+  int network_delay;
+};
+
+StreamSynchronization::StreamSynchronization(int audio_channel_id,
+                                             int video_channel_id)
+    : channel_delay_(new ViESyncDelay),
+      audio_channel_id_(audio_channel_id),
+      video_channel_id_(video_channel_id) {}
+
+StreamSynchronization::~StreamSynchronization() {
+  delete channel_delay_;
+}
+
+int StreamSynchronization::ComputeDelays(const Measurements& audio,
+                                         int current_audio_delay_ms,
+                                         int* extra_audio_delay_ms,
+                                         const Measurements& video,
+                                         int* total_video_delay_target_ms) {
+  // ReceivedNTPxxx is NTP at sender side when sent.
+  // RTCPArrivalTimexxx is NTP at receiver side when received.
+  // can't use ConvertNTPTimeToMS since calculation can be
+  //  negative
+  int NTPdiff = (audio.received_ntp_secs - video.received_ntp_secs)
+                * 1000;  // ms
+  float ntp_diff_frac = audio.received_ntp_frac / FracMS -
+        video.received_ntp_frac / FracMS;
+  if (ntp_diff_frac > 0.0f)
+    NTPdiff += static_cast<int>(ntp_diff_frac + 0.5f);
+  else
+    NTPdiff += static_cast<int>(ntp_diff_frac - 0.5f);
+
+  int RTCPdiff = (audio.rtcp_arrivaltime_secs - video.rtcp_arrivaltime_secs)
+                 * 1000;  // ms
+  float rtcp_diff_frac = audio.rtcp_arrivaltime_frac / FracMS -
+        video.rtcp_arrivaltime_frac / FracMS;
+  if (rtcp_diff_frac > 0.0f)
+    RTCPdiff += static_cast<int>(rtcp_diff_frac + 0.5f);
+  else
+    RTCPdiff += static_cast<int>(rtcp_diff_frac - 0.5f);
+
+  int diff = NTPdiff - RTCPdiff;
+  // if diff is + video is behind
+  if (diff < -1000 || diff > 1000) {
+    // unresonable ignore value.
+    return -1;
+  }
+  channel_delay_->network_delay = diff;
+
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
+               "Audio delay is: %d for voice channel: %d",
+               current_audio_delay_ms, audio_channel_id_);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
+               "Network delay diff is: %d for voice channel: %d",
+               channel_delay_->network_delay, audio_channel_id_);
+  // Calculate the difference between the lowest possible video delay and
+  // the current audio delay.
+  int current_diff_ms = *total_video_delay_target_ms - current_audio_delay_ms +
+      channel_delay_->network_delay;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
+               "Current diff is: %d for audio channel: %d",
+               current_diff_ms, audio_channel_id_);
+
+  int video_delay_ms = 0;
+  if (current_diff_ms > 0) {
+    // The minimum video delay is longer than the current audio delay.
+    // We need to decrease extra video delay, if we have added extra delay
+    // earlier, or add extra audio delay.
+    if (channel_delay_->extra_video_delay_ms > 0) {
+      // We have extra delay added to ViE. Reduce this delay before adding
+      // extra delay to VoE.
+
+      // This is the desired delay, we can't reduce more than this.
+      video_delay_ms = *total_video_delay_target_ms;
+
+      // Check that we don't reduce the delay more than what is allowed.
+      if (video_delay_ms <
+          channel_delay_->last_video_delay_ms - kMaxVideoDiffMs) {
+        video_delay_ms =
+            channel_delay_->last_video_delay_ms - kMaxVideoDiffMs;
+        channel_delay_->extra_video_delay_ms =
+            video_delay_ms - *total_video_delay_target_ms;
+      } else {
+        channel_delay_->extra_video_delay_ms = 0;
+      }
+      channel_delay_->last_video_delay_ms = video_delay_ms;
+      channel_delay_->last_sync_delay = -1;
+      channel_delay_->extra_audio_delay_ms = 0;
+    } else {  // channel_delay_->extra_video_delay_ms > 0
+      // We have no extra video delay to remove, increase the audio delay.
+      if (channel_delay_->last_sync_delay >= 0) {
+        // We have increased the audio delay earlier, increase it even more.
+        int audio_diff_ms = current_diff_ms / 2;
+        if (audio_diff_ms > kMaxAudioDiffMs) {
+          // We only allow a maximum change of KMaxAudioDiffMS for audio
+          // due to NetEQ maximum changes.
+          audio_diff_ms = kMaxAudioDiffMs;
+        }
+        // Increase the audio delay
+        channel_delay_->extra_audio_delay_ms += audio_diff_ms;
+
+        // Don't set a too high delay.
+        if (channel_delay_->extra_audio_delay_ms > kMaxDelay) {
+          channel_delay_->extra_audio_delay_ms = kMaxDelay;
+        }
+
+        // Don't add any extra video delay.
+        video_delay_ms = *total_video_delay_target_ms;
+        channel_delay_->extra_video_delay_ms = 0;
+        channel_delay_->last_video_delay_ms = video_delay_ms;
+        channel_delay_->last_sync_delay = 1;
+      } else {  // channel_delay_->last_sync_delay >= 0
+        // First time after a delay change, don't add any extra delay.
+        // This is to not toggle back and forth too much.
+        channel_delay_->extra_audio_delay_ms = 0;
+        // Set minimum video delay
+        video_delay_ms = *total_video_delay_target_ms;
+        channel_delay_->extra_video_delay_ms = 0;
+        channel_delay_->last_video_delay_ms = video_delay_ms;
+        channel_delay_->last_sync_delay = 0;
+      }
+    }
+  } else {  // if (current_diffMS > 0)
+    // The minimum video delay is lower than the current audio delay.
+    // We need to decrease possible extra audio delay, or
+    // add extra video delay.
+
+    if (channel_delay_->extra_audio_delay_ms > 0) {
+      // We have extra delay in VoiceEngine
+      // Start with decreasing the voice delay
+      int audio_diff_ms = current_diff_ms / 2;
+      if (audio_diff_ms < -1 * kMaxAudioDiffMs) {
+        // Don't change the delay too much at once.
+        audio_diff_ms = -1 * kMaxAudioDiffMs;
+      }
+      // Add the negative difference.
+      channel_delay_->extra_audio_delay_ms += audio_diff_ms;
+
+      if (channel_delay_->extra_audio_delay_ms < 0) {
+        // Negative values not allowed.
+        channel_delay_->extra_audio_delay_ms = 0;
+        channel_delay_->last_sync_delay = 0;
+      } else {
+        // There is more audio delay to use for the next round.
+        channel_delay_->last_sync_delay = 1;
+      }
+
+      // Keep the video delay at the minimum values.
+      video_delay_ms = *total_video_delay_target_ms;
+      channel_delay_->extra_video_delay_ms = 0;
+      channel_delay_->last_video_delay_ms = video_delay_ms;
+    } else {  // channel_delay_->extra_audio_delay_ms > 0
+      // We have no extra delay in VoiceEngine, increase the video delay.
+      channel_delay_->extra_audio_delay_ms = 0;
+
+      // Make the difference positive.
+      int video_diff_ms = -1 * current_diff_ms;
+
+      // This is the desired delay.
+      video_delay_ms = *total_video_delay_target_ms + video_diff_ms;
+      if (video_delay_ms > channel_delay_->last_video_delay_ms) {
+        if (video_delay_ms >
+            channel_delay_->last_video_delay_ms + kMaxVideoDiffMs) {
+          // Don't increase the delay too much at once
+          video_delay_ms =
+              channel_delay_->last_video_delay_ms + kMaxVideoDiffMs;
+        }
+        // Verify we don't go above the maximum allowed delay
+        if (video_delay_ms > kMaxDelay) {
+          video_delay_ms = kMaxDelay;
+        }
+      } else {
+        if (video_delay_ms <
+            channel_delay_->last_video_delay_ms - kMaxVideoDiffMs) {
+          // Don't decrease the delay too much at once
+          video_delay_ms =
+              channel_delay_->last_video_delay_ms - kMaxVideoDiffMs;
+        }
+        // Verify we don't go below the minimum delay
+        if (video_delay_ms < *total_video_delay_target_ms) {
+          video_delay_ms = *total_video_delay_target_ms;
+        }
+      }
+      // Store the values
+      channel_delay_->extra_video_delay_ms =
+          video_delay_ms - *total_video_delay_target_ms;
+      channel_delay_->last_video_delay_ms = video_delay_ms;
+      channel_delay_->last_sync_delay = -1;
+    }
+  }
+
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, video_channel_id_,
+      "Sync video delay %d ms for video channel and audio delay %d for audio "
+      "channel %d",
+      video_delay_ms, channel_delay_->extra_audio_delay_ms, audio_channel_id_);
+
+  *extra_audio_delay_ms = channel_delay_->extra_audio_delay_ms;
+
+  if (video_delay_ms < 0) {
+    video_delay_ms = 0;
+  }
+  *total_video_delay_target_ms =
+      (*total_video_delay_target_ms  >  video_delay_ms) ?
+      *total_video_delay_target_ms : video_delay_ms;
+  return 0;
+}
+}  // namespace webrtc
diff --git a/src/video_engine/stream_synchronization.h b/src/video_engine/stream_synchronization.h
new file mode 100644
index 0000000..6da5921
--- /dev/null
+++ b/src/video_engine/stream_synchronization.h
@@ -0,0 +1,51 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
+#define WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
+
+#include "typedefs.h"  // NOLINT
+
+namespace webrtc {
+
+struct ViESyncDelay;
+
+class StreamSynchronization {
+ public:
+  struct Measurements {
+    Measurements()
+        : received_ntp_secs(0),
+          received_ntp_frac(0),
+          rtcp_arrivaltime_secs(0),
+          rtcp_arrivaltime_frac(0) {}
+    uint32_t received_ntp_secs;
+    uint32_t received_ntp_frac;
+    uint32_t rtcp_arrivaltime_secs;
+    uint32_t rtcp_arrivaltime_frac;
+  };
+
+  StreamSynchronization(int audio_channel_id, int video_channel_id);
+  ~StreamSynchronization();
+
+  int ComputeDelays(const Measurements& audio,
+                    int current_audio_delay_ms,
+                    int* extra_audio_delay_ms,
+                    const Measurements& video,
+                    int* total_video_delay_target_ms);
+
+ private:
+  ViESyncDelay* channel_delay_;
+  int audio_channel_id_;
+  int video_channel_id_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_STREAM_SYNCHRONIZATION_H_
diff --git a/src/video_engine/stream_synchronization_unittest.cc b/src/video_engine/stream_synchronization_unittest.cc
new file mode 100644
index 0000000..e0a7494
--- /dev/null
+++ b/src/video_engine/stream_synchronization_unittest.cc
@@ -0,0 +1,429 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include <algorithm>
+
+#include "gtest/gtest.h"
+#include "video_engine/stream_synchronization.h"
+
+namespace webrtc {
+
+// These correspond to the same constants defined in vie_sync_module.cc.
+enum { kMaxVideoDiffMs = 80 };
+enum { kMaxAudioDiffMs = 80 };
+enum { kMaxDelay = 1500 };
+
+class Time {
+ public:
+  explicit Time(int64_t offset)
+      : kNtpJan1970(2208988800UL),
+        time_now_ms_(offset) {}
+
+  void NowNtp(uint32_t* ntp_secs, uint32_t* ntp_frac) const {
+    *ntp_secs = time_now_ms_ / 1000 + kNtpJan1970;
+    int64_t remainder = time_now_ms_ % 1000;
+    *ntp_frac = static_cast<uint32_t>(
+        static_cast<double>(remainder) / 1000.0 * pow(2.0, 32.0) + 0.5);
+  }
+
+  void IncreaseTimeMs(int64_t inc) {
+    time_now_ms_ += inc;
+  }
+
+  int64_t time_now_ms() const {
+    return time_now_ms_;
+  }
+ private:
+  // January 1970, in NTP seconds.
+  const uint32_t kNtpJan1970;
+  int64_t time_now_ms_;
+};
+
+class StreamSynchronizationTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    sync_ = new StreamSynchronization(0, 0);
+    send_time_ = new Time(kSendTimeOffsetMs);
+    receive_time_ = new Time(kReceiveTimeOffsetMs);
+  }
+
+  virtual void TearDown() {
+    delete sync_;
+    delete send_time_;
+    delete receive_time_;
+  }
+
+  int DelayedAudio(int delay_ms,
+                   int current_audio_delay_ms,
+                   int* extra_audio_delay_ms,
+                   int* total_video_delay_ms) {
+    StreamSynchronization::Measurements audio;
+    StreamSynchronization::Measurements video;
+    send_time_->NowNtp(&audio.received_ntp_secs, &audio.received_ntp_frac);
+    send_time_->NowNtp(&video.received_ntp_secs, &video.received_ntp_frac);
+    receive_time_->NowNtp(&video.rtcp_arrivaltime_secs,
+                          &video.rtcp_arrivaltime_frac);
+    // Audio later than video.
+    receive_time_->IncreaseTimeMs(delay_ms);
+    receive_time_->NowNtp(&audio.rtcp_arrivaltime_secs,
+                          &audio.rtcp_arrivaltime_frac);
+    return sync_->ComputeDelays(audio,
+                                current_audio_delay_ms,
+                                extra_audio_delay_ms,
+                                video,
+                                total_video_delay_ms);
+  }
+
+  int DelayedVideo(int delay_ms,
+                   int current_audio_delay_ms,
+                   int* extra_audio_delay_ms,
+                   int* total_video_delay_ms) {
+    StreamSynchronization::Measurements audio;
+    StreamSynchronization::Measurements video;
+    send_time_->NowNtp(&audio.received_ntp_secs, &audio.received_ntp_frac);
+    send_time_->NowNtp(&video.received_ntp_secs, &video.received_ntp_frac);
+    receive_time_->NowNtp(&audio.rtcp_arrivaltime_secs,
+                          &audio.rtcp_arrivaltime_frac);
+    // Video later than audio.
+    receive_time_->IncreaseTimeMs(delay_ms);
+    receive_time_->NowNtp(&video.rtcp_arrivaltime_secs,
+                          &video.rtcp_arrivaltime_frac);
+    return sync_->ComputeDelays(audio,
+                                current_audio_delay_ms,
+                                extra_audio_delay_ms,
+                                video,
+                                total_video_delay_ms);
+  }
+
+  int DelayedAudioAndVideo(int audio_delay_ms,
+                           int video_delay_ms,
+                           int current_audio_delay_ms,
+                           int* extra_audio_delay_ms,
+                           int* total_video_delay_ms) {
+    StreamSynchronization::Measurements audio;
+    StreamSynchronization::Measurements video;
+    send_time_->NowNtp(&audio.received_ntp_secs, &audio.received_ntp_frac);
+    send_time_->NowNtp(&video.received_ntp_secs, &video.received_ntp_frac);
+
+    if (audio_delay_ms > video_delay_ms) {
+      // Audio later than video.
+      receive_time_->IncreaseTimeMs(video_delay_ms);
+      receive_time_->NowNtp(&video.rtcp_arrivaltime_secs,
+                            &video.rtcp_arrivaltime_frac);
+      receive_time_->IncreaseTimeMs(audio_delay_ms - video_delay_ms);
+      receive_time_->NowNtp(&audio.rtcp_arrivaltime_secs,
+                            &audio.rtcp_arrivaltime_frac);
+    } else {
+      // Video later than audio.
+      receive_time_->IncreaseTimeMs(audio_delay_ms);
+      receive_time_->NowNtp(&audio.rtcp_arrivaltime_secs,
+                            &audio.rtcp_arrivaltime_frac);
+      receive_time_->IncreaseTimeMs(video_delay_ms - audio_delay_ms);
+      receive_time_->NowNtp(&video.rtcp_arrivaltime_secs,
+                            &video.rtcp_arrivaltime_frac);
+    }
+    return sync_->ComputeDelays(audio,
+                                current_audio_delay_ms,
+                                extra_audio_delay_ms,
+                                video,
+                                total_video_delay_ms);
+  }
+
+  int MaxAudioDelayIncrease(int current_audio_delay_ms, int delay_ms) {
+    return std::min((delay_ms - current_audio_delay_ms) / 2,
+                    static_cast<int>(kMaxAudioDiffMs));
+  }
+
+  int MaxAudioDelayDecrease(int current_audio_delay_ms, int delay_ms) {
+    return std::max((delay_ms - current_audio_delay_ms) / 2, -kMaxAudioDiffMs);
+  }
+
+  enum { kSendTimeOffsetMs = 0 };
+  enum { kReceiveTimeOffsetMs = 123456 };
+
+  StreamSynchronization* sync_;
+  Time* send_time_;
+  Time* receive_time_;
+};
+
+TEST_F(StreamSynchronizationTest, NoDelay) {
+  uint32_t current_audio_delay_ms = 0;
+  int delay_ms = 0;
+  int extra_audio_delay_ms = 0;
+  int total_video_delay_ms = 0;
+
+  EXPECT_EQ(0, DelayedAudio(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  EXPECT_EQ(0, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, VideoDelay) {
+  uint32_t current_audio_delay_ms = 0;
+  int delay_ms = 200;
+  int extra_audio_delay_ms = 0;
+  int total_video_delay_ms = 0;
+
+  EXPECT_EQ(0, DelayedAudio(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  // The video delay is not allowed to change more than this in 1 second.
+  EXPECT_EQ(kMaxVideoDiffMs, total_video_delay_ms);
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudio(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  // The video delay is not allowed to change more than this in 1 second.
+  EXPECT_EQ(2*kMaxVideoDiffMs, total_video_delay_ms);
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudio(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  // The video delay is not allowed to change more than this in 1 second.
+  EXPECT_EQ(delay_ms, total_video_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, AudioDelay) {
+  int current_audio_delay_ms = 0;
+  int delay_ms = 200;
+  int extra_audio_delay_ms = 0;
+  int total_video_delay_ms = 0;
+
+  EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than this in 1 second.
+  EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+  int current_extra_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms +
+            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+            extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms +
+            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+            extra_audio_delay_ms);
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  // Simulate that NetEQ for some reason reduced the delay.
+  current_audio_delay_ms = 170;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // Since we only can ask NetEQ for a certain amount of extra delay, and
+  // we only measure the total NetEQ delay, we will ask for additional delay
+  // here to try to
+  EXPECT_EQ(current_extra_delay_ms +
+            MaxAudioDelayIncrease(current_audio_delay_ms, delay_ms),
+            extra_audio_delay_ms);
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  // Simulate that NetEQ for some reason significantly increased the delay.
+  current_audio_delay_ms = 250;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedVideo(delay_ms, current_audio_delay_ms,
+                            &extra_audio_delay_ms, &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms +
+            MaxAudioDelayDecrease(current_audio_delay_ms, delay_ms),
+            extra_audio_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedVideoLater) {
+  int current_audio_delay_ms = 0;
+  int audio_delay_ms = 100;
+  int video_delay_ms = 300;
+  int extra_audio_delay_ms = 0;
+  int total_video_delay_ms = 0;
+
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than this in 1 second.
+  EXPECT_EQ(kMaxAudioDiffMs, extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+  int current_extra_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+      current_audio_delay_ms, video_delay_ms - audio_delay_ms),
+      extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+      current_audio_delay_ms, video_delay_ms - audio_delay_ms),
+      extra_audio_delay_ms);
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  // Simulate that NetEQ for some reason reduced the delay.
+  current_audio_delay_ms = 170;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // Since we only can ask NetEQ for a certain amount of extra delay, and
+  // we only measure the total NetEQ delay, we will ask for additional delay
+  // here to try to stay in sync.
+  EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+      current_audio_delay_ms, video_delay_ms - audio_delay_ms),
+      extra_audio_delay_ms);
+  current_extra_delay_ms = extra_audio_delay_ms;
+
+  // Simulate that NetEQ for some reason significantly increased the delay.
+  current_audio_delay_ms = 250;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(800);
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(0, total_video_delay_ms);
+  // The audio delay is not allowed to change more than the half of the required
+  // change in delay.
+  EXPECT_EQ(current_extra_delay_ms + MaxAudioDelayIncrease(
+      current_audio_delay_ms, video_delay_ms - audio_delay_ms),
+      extra_audio_delay_ms);
+}
+
+TEST_F(StreamSynchronizationTest, BothDelayedAudioLater) {
+  int current_audio_delay_ms = 0;
+  int audio_delay_ms = 300;
+  int video_delay_ms = 100;
+  int extra_audio_delay_ms = 0;
+  int total_video_delay_ms = 0;
+
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(kMaxVideoDiffMs, total_video_delay_ms);
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+                                                video_delay_ms));
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(2 * kMaxVideoDiffMs, total_video_delay_ms);
+  EXPECT_EQ(0, extra_audio_delay_ms);
+  current_audio_delay_ms = extra_audio_delay_ms;
+
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+                                                video_delay_ms));
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(audio_delay_ms - video_delay_ms, total_video_delay_ms);
+  EXPECT_EQ(0, extra_audio_delay_ms);
+
+  // Simulate that NetEQ introduces some audio delay.
+  current_audio_delay_ms = 50;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+                                                video_delay_ms));
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(audio_delay_ms - video_delay_ms + current_audio_delay_ms,
+            total_video_delay_ms);
+  EXPECT_EQ(0, extra_audio_delay_ms);
+
+  // Simulate that NetEQ reduces its delay.
+  current_audio_delay_ms = 10;
+  send_time_->IncreaseTimeMs(1000);
+  receive_time_->IncreaseTimeMs(1000 - std::max(audio_delay_ms,
+                                                video_delay_ms));
+  // Simulate 0 minimum delay in the VCM.
+  total_video_delay_ms = 0;
+  EXPECT_EQ(0, DelayedAudioAndVideo(audio_delay_ms,
+                                    video_delay_ms,
+                                    current_audio_delay_ms,
+                                    &extra_audio_delay_ms,
+                                    &total_video_delay_ms));
+  EXPECT_EQ(audio_delay_ms - video_delay_ms + current_audio_delay_ms,
+            total_video_delay_ms);
+  EXPECT_EQ(0, extra_audio_delay_ms);
+}
+}  // namespace webrtc
diff --git a/src/video_engine/test/auto_test/android/.classpath b/src/video_engine/test/auto_test/android/.classpath
new file mode 100644
index 0000000..f2adf55
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/.classpath
@@ -0,0 +1,9 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="lib" path="libs/VideoEngine_android_java.jar"/>

+	<classpathentry kind="lib" path="libs/VoiceEngine_android_java.jar"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/src/video_engine/test/auto_test/android/.project b/src/video_engine/test/auto_test/android/.project
new file mode 100644
index 0000000..aca793b
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/.project
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<projectDescription>

+	<name>ViEAutotest</name>

+	<comment></comment>

+	<projects>

+	</projects>

+	<buildSpec>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>org.eclipse.jdt.core.javabuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ApkBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+	</buildSpec>

+	<natures>

+		<nature>com.android.ide.eclipse.adt.AndroidNature</nature>

+		<nature>org.eclipse.jdt.core.javanature</nature>

+	</natures>

+</projectDescription>

diff --git a/src/video_engine/test/auto_test/android/AndroidManifest.xml b/src/video_engine/test/auto_test/android/AndroidManifest.xml
new file mode 100644
index 0000000..11b3e27
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/AndroidManifest.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.vieautotest">
+  <application android:label="@string/app_name"
+	       android:debuggable="true"
+	       android:icon="@drawable/logo">
+    <activity android:label="@string/app_name"
+	      android:name="ViEAutotest">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+
+  </application>
+  <uses-sdk android:minSdkVersion="3" android:targetSdkVersion="8" />
+  <uses-permission android:name="android.permission.CAMERA"></uses-permission>
+  <uses-feature android:name="android.hardware.camera" />
+  <uses-feature android:name="android.hardware.camera.autofocus" />
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+  <uses-permission android:name="android.permission.RECORD_AUDIO" />
+  <uses-permission android:name="android.permission.INTERNET" />
+  <uses-permission android:name="android.permission.WAKE_LOCK" />
+  <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+</manifest> 
diff --git a/src/video_engine/test/auto_test/android/default.properties b/src/video_engine/test/auto_test/android/default.properties
new file mode 100644
index 0000000..2ad44a4
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target.

+target=android-9

diff --git a/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java b/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java
new file mode 100644
index 0000000..4b46020
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/gen/org/webrtc/vieautotest/R.java
@@ -0,0 +1,37 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.vieautotest;

+

+public final class R {

+    public static final class array {

+        public static final int subtest_array=0x7f050001;

+        public static final int test_array=0x7f050000;

+    }

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int logo=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f060004;

+        public static final int LocalView=0x7f060001;

+        public static final int RemoteView=0x7f060000;

+        public static final int subtestSpinner=0x7f060003;

+        public static final int testSpinner=0x7f060002;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int SpinnerSubtest=0x7f040004;

+        public static final int SpinnerTitle=0x7f040003;

+        public static final int TitleName=0x7f040001;

+        public static final int app_name=0x7f040000;

+        public static final int run_button=0x7f040002;

+    }

+}

diff --git a/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h b/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h
new file mode 100644
index 0000000..68ec601
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/jni/org_webrtc_vieautotest_vie_autotest.h
@@ -0,0 +1,34 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_vieautotest_ViEAutotest */
+
+#ifndef _Included_org_webrtc_vieautotest_ViEAutotest
+#define _Included_org_webrtc_vieautotest_ViEAutotest
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/*
+ * Class:     org_webrtc_vieautotest_ViEAutotest
+ * Method:    RunTest
+ * Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2
+(JNIEnv *, jobject, jint, jint, jobject, jobject);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc b/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc
new file mode 100644
index 0000000..2e55283
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/jni/vie_autotest_jni.cc
@@ -0,0 +1,137 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <android/log.h>
+#include <pthread.h>
+#include <unistd.h>
+
+#include "org_webrtc_vieautotest_vie_autotest.h"
+
+#include "vie_autotest_android.h"
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*"
+
+// VideoEngine data struct
+typedef struct
+{
+    JavaVM* jvm;
+} VideoEngineData;
+
+// Global variables
+JavaVM* webrtcGlobalVM;
+
+// Global variables visible in this file
+static VideoEngineData vieData;
+
+// "Local" functions (i.e. not Java accessible)
+#define WEBRTC_TRACE_MAX_MESSAGE_SIZE 1024
+
+static bool GetSubAPIs(VideoEngineData& vieData);
+static bool ReleaseSubAPIs(VideoEngineData& vieData);
+
+//
+// General functions
+//
+
+// JNI_OnLoad
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/) {
+  if (!vm) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad did not receive a valid VM pointer");
+    return -1;
+  }
+
+  JNIEnv* env;
+  if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                           JNI_VERSION_1_4)) {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "JNI_OnLoad could not get JNI env");
+    return -1;
+  }
+
+  // Init ViE data
+  vieData.jvm = vm;
+
+  return JNI_VERSION_1_4;
+}
+
+// Class:     org_webrtc_vieautotest_ViEAutotest
+// Method:    RunTest
+// Signature: (IILandroid/opengl/GLSurfaceView;Landroid/opengl/GLSurfaceView;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_opengl_GLSurfaceView_2Landroid_opengl_GLSurfaceView_2(
+    JNIEnv* env,
+    jobject context,
+    jint testType,
+    jint subtestType,
+    jobject glView1,
+    jobject glView2)
+{
+  int numErrors = -1;
+  numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType, glView1,
+                                              glView2, vieData.jvm, env,
+                                              context);
+  return numErrors;
+}
+
+// Class:     org_webrtc_vieautotest_ViEAutotest
+// Method:    RunTest
+// Signature: (IILandroid/view/SurfaceView;Landroid/view/SurfaceView;)I
+JNIEXPORT jint JNICALL
+Java_org_webrtc_vieautotest_ViEAutotest_RunTest__IILandroid_view_SurfaceView_2Landroid_view_SurfaceView_2(
+    JNIEnv* env,
+    jobject context,
+    jint testType,
+    jint subtestType,
+    jobject surfaceHolder1,
+    jobject surfaceHolder2)
+{
+  int numErrors = -1;
+  numErrors = ViEAutoTestAndroid::RunAutotest(testType, subtestType,
+                                              surfaceHolder1, surfaceHolder2,
+                                              vieData.jvm, env, context);
+  return numErrors;
+}
+
+//
+//local function
+//
+
+bool GetSubAPIs(VideoEngineData& vieData) {
+  bool retVal = true;
+  //vieData.base = ViEBase::GetInterface(vieData.vie);
+  //if (vieData.base == NULL)
+  {
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                        "Could not get Base API");
+    retVal = false;
+  }
+  return retVal;
+}
+
+bool ReleaseSubAPIs(VideoEngineData& vieData) {
+  bool releaseOk = true;
+  //if (vieData.base)
+  {
+    //if (vieData.base->Release() != 0)
+    if (false) {
+      __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                          "Release base sub-API failed");
+      releaseOk = false;
+    }
+    else {
+      //vieData.base = NULL;
+    }
+  }
+
+  return releaseOk;
+}
diff --git a/src/video_engine/test/auto_test/android/res/drawable/logo.png b/src/video_engine/test/auto_test/android/res/drawable/logo.png
new file mode 100644
index 0000000..c3e0a12
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/res/drawable/logo.png
Binary files differ
diff --git a/src/video_engine/test/auto_test/android/res/layout/main.xml b/src/video_engine/test/auto_test/android/res/layout/main.xml
new file mode 100644
index 0000000..1f2aaf9
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/res/layout/main.xml
@@ -0,0 +1,64 @@
+<?xml version="1.0" encoding="utf-8"?>
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	     android:layout_width="fill_parent"
+	     android:layout_height="fill_parent">
+  <RelativeLayout
+     android:layout_width="fill_parent"
+     android:layout_height="fill_parent">
+    <LinearLayout 
+       android:id="@+id/RemoteView"
+       android:layout_width="fill_parent"
+       android:layout_height="fill_parent"
+       android:layout_weight="1">
+      <!-- log instead of video
+           <ImageView
+              android:layout_width="fill_parent"
+              android:layout_height="fill_parent" 
+              android:scaleType="fitXY"
+              android:src="@drawable/logo" /> -->
+
+    </LinearLayout>
+    <LinearLayout 
+       android:id="@+id/LocalView"
+       android:layout_width="120dip"
+       android:layout_height="120dip"
+       android:layout_weight="1">
+      <!-- <ImageView
+       	      android:layout_width="fill_parent"
+       	      android:layout_height="fill_parent" 
+       	      android:scaleType="fitXY"
+       	      android:src="@drawable/logo" /> -->
+    </LinearLayout>
+    <LinearLayout
+       android:orientation="horizontal"
+       android:layout_width="fill_parent"
+       android:layout_height="wrap_content"
+       android:layout_alignParentBottom="true">
+      <LinearLayout
+	 android:orientation="vertical"
+	 android:layout_width="fill_parent"
+	 android:layout_height="wrap_content"
+	 android:layout_alignParentBottom="true">
+	<Spinner
+	   android:id="@+id/testSpinner"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content"
+	   android:prompt="@string/SpinnerTitle"
+	   />
+	<Spinner
+	   android:id="@+id/subtestSpinner"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content"
+	   android:prompt="@string/SpinnerSubtest"
+	   />
+	<Button
+	   android:text="@string/run_button"
+	   android:id="@+id/Button01"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+	</Button>
+      </LinearLayout>
+
+    </LinearLayout>
+  </RelativeLayout>
+</FrameLayout>
diff --git a/src/video_engine/test/auto_test/android/res/values/strings.xml b/src/video_engine/test/auto_test/android/res/values/strings.xml
new file mode 100644
index 0000000..ba59c5e
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/res/values/strings.xml
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+    
+<string name="app_name">ViEAutotest</string>
+<string name="TitleName">ViEAutotest</string>
+<string name="run_button">Run Test</string>
+<string name="SpinnerTitle">Test type...</string>
+<string-array name="test_array">
+	<item>Standard</item>
+	<item>API</item>
+	<item>Extended</item>
+	<item>Loopback</item>
+	<item>Custom</item>
+</string-array>
+<string name="SpinnerSubtest">Run...</string>
+<string-array name="subtest_array">
+	<item>All</item>
+	<item>Base</item>
+	<item>Capture</item>
+	<item>Codec</item>
+	<item>Mix</item>
+	<item>Encryption</item>
+	<item>External Codec</item>
+	<item>File</item>
+	<item>Image Process</item>
+	<item>Network</item>
+	<item>Render</item>
+	<item>RTP/RTCP</item>
+</string-array>
+
+</resources>
diff --git a/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java b/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java
new file mode 100644
index 0000000..de228a8
--- /dev/null
+++ b/src/video_engine/test/auto_test/android/src/org/webrtc/vieautotest/ViEAutotest.java
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+package org.webrtc.vieautotest;
+
+import org.webrtc.vieautotest.R;
+
+import android.app.Activity;
+import android.os.Bundle;
+import android.util.Log;
+import android.widget.Button;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.SurfaceHolder;
+import android.widget.LinearLayout;
+import android.opengl.GLSurfaceView;
+import android.widget.Spinner;
+import android.widget.ArrayAdapter;
+import android.widget.AdapterView;
+
+public class ViEAutotest extends Activity
+    implements
+      AdapterView.OnItemSelectedListener,
+      View.OnClickListener {
+
+  private Thread testThread;
+  private Spinner testSpinner;
+  private Spinner subtestSpinner;
+  private int testSelection;
+  private int subTestSelection;
+
+  // View for remote video
+  private LinearLayout remoteSurface = null;
+  private GLSurfaceView glSurfaceView = null;
+  private SurfaceView surfaceView = null;
+
+  private LinearLayout localSurface = null;
+  private GLSurfaceView glLocalSurfaceView = null;
+  private SurfaceView localSurfaceView = null;
+
+  /** Called when the activity is first created. */
+  @Override
+  public void onCreate(Bundle savedInstanceState) {
+
+    Log.d("*WEBRTC*", "onCreate called");
+
+    super.onCreate(savedInstanceState);
+    setContentView(R.layout.main);
+
+    // Set the Start button action
+    final Button buttonStart = (Button) findViewById(R.id.Button01);
+    buttonStart.setOnClickListener(this);
+
+    // Set test spinner
+    testSpinner = (Spinner) findViewById(R.id.testSpinner);
+    ArrayAdapter<CharSequence> adapter =
+        ArrayAdapter.createFromResource(this, R.array.test_array,
+                                        android.R.layout.simple_spinner_item);
+
+    int resource = android.R.layout.simple_spinner_dropdown_item;
+    adapter.setDropDownViewResource(resource);
+    testSpinner.setAdapter(adapter);
+    testSpinner.setOnItemSelectedListener(this);
+
+    // Set sub test spinner
+    subtestSpinner = (Spinner) findViewById(R.id.subtestSpinner);
+    ArrayAdapter<CharSequence> subtestAdapter =
+        ArrayAdapter.createFromResource(this, R.array.subtest_array,
+                                        android.R.layout.simple_spinner_item);
+
+    subtestAdapter.setDropDownViewResource(resource);
+    subtestSpinner.setAdapter(subtestAdapter);
+    subtestSpinner.setOnItemSelectedListener(this);
+
+    remoteSurface = (LinearLayout) findViewById(R.id.RemoteView);
+    surfaceView = new SurfaceView(this);
+    remoteSurface.addView(surfaceView);
+
+    localSurface = (LinearLayout) findViewById(R.id.LocalView);
+    localSurfaceView = new SurfaceView(this);
+    localSurfaceView.setZOrderMediaOverlay(true);
+    localSurface.addView(localSurfaceView);
+
+    // Set members
+    testSelection = 0;
+    subTestSelection = 0;
+  }
+
+  public void onClick(View v) {
+    Log.d("*WEBRTC*", "Button clicked...");
+    switch (v.getId()) {
+      case R.id.Button01:
+        new Thread(new Runnable() {
+            public void run() {
+              Log.d("*WEBRTC*", "Calling RunTest...");
+              RunTest(testSelection, subTestSelection,
+                      localSurfaceView, surfaceView);
+              Log.d("*WEBRTC*", "RunTest done");
+            }
+          }).start();
+    }
+  }
+
+  public void onItemSelected(AdapterView<?> parent, View v,
+                             int position, long id) {
+
+    if (parent == (Spinner) findViewById(R.id.testSpinner)) {
+      testSelection = position;
+    } else {
+      subTestSelection = position;
+    }
+  }
+
+  public void onNothingSelected(AdapterView<?> parent) {
+  }
+
+  @Override
+  protected void onStart() {
+    super.onStart();
+  }
+
+  @Override
+  protected void onResume() {
+    super.onResume();
+  }
+
+  @Override
+  protected void onPause() {
+    super.onPause();
+  }
+
+  @Override
+  protected void onStop() {
+    super.onStop();
+  }
+
+  @Override
+  protected void onDestroy() {
+
+    super.onDestroy();
+  }
+
+  // C++ function performing the chosen test
+  // private native int RunTest(int testSelection, int subtestSelection,
+  // GLSurfaceView window1, GLSurfaceView window2);
+  private native int RunTest(int testSelection, int subtestSelection,
+                             SurfaceView window1, SurfaceView window2);
+
+  // this is used to load the 'ViEAutotestJNIAPI' library on application
+  // startup.
+  static {
+    Log.d("*WEBRTC*", "Loading ViEAutotest...");
+    System.loadLibrary("webrtc-video-autotest-jni");
+  }
+}
diff --git a/src/video_engine/test/auto_test/automated/legacy_fixture.cc b/src/video_engine/test/auto_test/automated/legacy_fixture.cc
new file mode 100644
index 0000000..591a567
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/legacy_fixture.cc
@@ -0,0 +1,28 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/automated/legacy_fixture.h"
+
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+
+void LegacyFixture::SetUpTestCase() {
+  TwoWindowsFixture::SetUpTestCase();
+
+  // Create the test cases
+  tests_ = new ViEAutoTest(window_1_, window_2_);
+}
+
+void LegacyFixture::TearDownTestCase() {
+  delete tests_;
+
+  TwoWindowsFixture::TearDownTestCase();
+}
+
+ViEAutoTest* LegacyFixture::tests_ = NULL;
diff --git a/src/video_engine/test/auto_test/automated/legacy_fixture.h b/src/video_engine/test/auto_test/automated/legacy_fixture.h
new file mode 100644
index 0000000..b452766
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/legacy_fixture.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
+
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+
+// Inherited by old-style standard integration tests based on ViEAutoTest.
+class LegacyFixture : public TwoWindowsFixture {
+ public:
+  // Initializes ViEAutoTest in addition to the work done by ViEIntegrationTest.
+  static void SetUpTestCase();
+
+  // Releases anything allocated by SetupTestCase.
+  static void TearDownTestCase();
+
+ protected:
+  static ViEAutoTest* tests_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_AUTOMATED_VIE_LEGACY_FIXTURE_H_
diff --git a/src/video_engine/test/auto_test/automated/two_windows_fixture.cc b/src/video_engine/test/auto_test/automated/two_windows_fixture.cc
new file mode 100644
index 0000000..a4a551d
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/two_windows_fixture.cc
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+
+void TwoWindowsFixture::SetUpTestCase() {
+  window_creator_ = new ViEWindowCreator();
+
+  ViEAutoTestWindowManagerInterface* window_manager =
+      window_creator_->CreateTwoWindows();
+
+  window_1_ = window_manager->GetWindow1();
+  window_2_ = window_manager->GetWindow2();
+}
+
+void TwoWindowsFixture::TearDownTestCase() {
+  window_creator_->TerminateWindows();
+  delete window_creator_;
+}
+
+ViEWindowCreator* TwoWindowsFixture::window_creator_ = NULL;
+void* TwoWindowsFixture::window_1_ = NULL;
+void* TwoWindowsFixture::window_2_ = NULL;
diff --git a/src/video_engine/test/auto_test/automated/two_windows_fixture.h b/src/video_engine/test/auto_test/automated/two_windows_fixture.h
new file mode 100644
index 0000000..175a42d
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/two_windows_fixture.h
@@ -0,0 +1,35 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
+
+#include "gtest/gtest.h"
+
+class ViEWindowCreator;
+class ViEAutoTest;
+
+// Meant to be inherited by all standard test who require two windows.
+class TwoWindowsFixture : public testing::Test {
+ public:
+  // Launches two windows in a platform-dependent manner and stores the handles
+  // in the window_1_ and window_2_ fields.
+  static void SetUpTestCase();
+
+  // Releases anything allocated by SetupTestCase.
+  static void TearDownTestCase();
+
+ protected:
+  static void* window_1_;
+  static void* window_2_;
+  static ViEWindowCreator* window_creator_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_AUTOMATED_TWO_WINDOWS_FIXTURE_H_
diff --git a/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc b/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc
new file mode 100644
index 0000000..82a205b
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/vie_api_integration_test.cc
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/**
+ * Runs "extended" integration tests.
+ */
+
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "vie_autotest.h"
+
+namespace {
+
+class ViEApiIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEApiIntegrationTest, RunsBaseTestWithoutErrors) {
+  tests_->ViEBaseAPITest();
+}
+
+// TODO(phoglund): Crashes on the v4l2loopback camera.
+TEST_F(ViEApiIntegrationTest, DISABLED_RunsCaptureTestWithoutErrors) {
+  tests_->ViECaptureAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsCodecTestWithoutErrors) {
+  tests_->ViECodecAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsEncryptionTestWithoutErrors) {
+  tests_->ViEEncryptionAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsFileTestWithoutErrors) {
+  tests_->ViEFileAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsImageProcessTestWithoutErrors) {
+  tests_->ViEImageProcessAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsNetworkTestWithoutErrors) {
+  tests_->ViENetworkAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsRenderTestWithoutErrors) {
+  tests_->ViERenderAPITest();
+}
+
+TEST_F(ViEApiIntegrationTest, RunsRtpRtcpTestWithoutErrors) {
+  tests_->ViERtpRtcpAPITest();
+}
+
+} // namespace
diff --git a/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc b/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc
new file mode 100644
index 0000000..7d16db0
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/vie_extended_integration_test.cc
@@ -0,0 +1,61 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/**
+ * Runs "extended" integration tests.
+ */
+
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "vie_autotest.h"
+
+namespace {
+
+class ViEExtendedIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEExtendedIntegrationTest, RunsBaseTestWithoutErrors) {
+  tests_->ViEBaseExtendedTest();
+}
+
+// TODO(phoglund): Crashes on the v4l2loopback camera.
+TEST_F(ViEExtendedIntegrationTest, DISABLED_RunsCaptureTestWithoutErrors) {
+  tests_->ViECaptureExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsCodecTestWithoutErrors) {
+  tests_->ViECodecExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsEncryptionTestWithoutErrors) {
+  tests_->ViEEncryptionExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsFileTestWithoutErrors) {
+  tests_->ViEFileExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsImageProcessTestWithoutErrors) {
+  tests_->ViEImageProcessExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsNetworkTestWithoutErrors) {
+  tests_->ViENetworkExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsRenderTestWithoutErrors) {
+  tests_->ViERenderExtendedTest();
+}
+
+TEST_F(ViEExtendedIntegrationTest, RunsRtpRtcpTestWithoutErrors) {
+  tests_->ViERtpRtcpExtendedTest();
+}
+
+} // namespace
diff --git a/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc b/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
new file mode 100644
index 0000000..c86233e
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/vie_rtp_fuzz_test.cc
@@ -0,0 +1,150 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <ctime>
+
+#include "gtest/gtest.h"
+#include "gflags/gflags.h"
+#include "test/libtest/include/bit_flip_encryption.h"
+#include "test/libtest/include/random_encryption.h"
+#include "video_engine/test/auto_test/automated/two_windows_fixture.h"
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+
+namespace {
+
+DEFINE_int32(rtp_fuzz_test_rand_seed, 0, "The rand seed to use for "
+    "the RTP fuzz test. Defaults to time(). 0 cannot be specified.");
+
+class ViERtpFuzzTest : public TwoWindowsFixture {
+ protected:
+  TbVideoChannel* video_channel_;
+  TbInterfaces* video_engine_;
+  TbCaptureDevice* capture_device_;
+
+  void SetUp() {
+    video_engine_ = new TbInterfaces(
+        "ViERtpTryInjectingRandomPacketsIntoRtpStream");
+    video_channel_ = new TbVideoChannel(
+        *video_engine_, webrtc::kVideoCodecVP8);
+    capture_device_ = new TbCaptureDevice(*video_engine_);
+
+    capture_device_->ConnectTo(video_channel_->videoChannel);
+
+    // Enable PLI RTCP, which will allow the video engine to recover better.
+    video_engine_->rtp_rtcp->SetKeyFrameRequestMethod(
+        video_channel_->videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+
+    video_channel_->StartReceive();
+    video_channel_->StartSend();
+
+    RenderInWindow(
+        video_engine_->render, capture_device_->captureId, window_1_, 0);
+    RenderInWindow(
+        video_engine_->render, video_channel_->videoChannel, window_2_, 1);
+  }
+
+  void TearDown() {
+    delete capture_device_;
+    delete video_channel_;
+    delete video_engine_;
+  }
+
+  unsigned int FetchRandSeed() {
+    if (FLAGS_rtp_fuzz_test_rand_seed != 0) {
+      return FLAGS_rtp_fuzz_test_rand_seed;
+    }
+    return std::time(NULL);
+  }
+
+  // Pass in a number [0, 1] which will be the bit flip probability per byte.
+  void BitFlipFuzzTest(float flip_probability) {
+    unsigned int rand_seed = FetchRandSeed();
+    ViETest::Log("Running test with rand seed %d.", rand_seed);
+
+    ViETest::Log("Running as usual. You should see video output.");
+    AutoTestSleep(2000);
+
+    ViETest::Log("Starting to flip bits in packets (%f%% chance per byte).",
+                 flip_probability * 100);
+    BitFlipEncryption bit_flip_encryption(rand_seed, flip_probability);
+    video_engine_->encryption->RegisterExternalEncryption(
+        video_channel_->videoChannel, bit_flip_encryption);
+
+    AutoTestSleep(5000);
+
+    ViETest::Log("Back to normal. Flipped %d bits.",
+                 bit_flip_encryption.flip_count());
+    video_engine_->encryption->DeregisterExternalEncryption(
+        video_channel_->videoChannel);
+
+    AutoTestSleep(5000);
+  }
+};
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithASmallNumberOfTamperedPackets) {
+  // Try 0.005% bit flip chance per byte.
+  BitFlipFuzzTest(0.00005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithAMediumNumberOfTamperedPackets) {
+  // Try 0.05% bit flip chance per byte.
+  BitFlipFuzzTest(0.0005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithALargeNumberOfTamperedPackets) {
+  // Try 0.5% bit flip chance per byte.
+  BitFlipFuzzTest(0.005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithAVeryLargeNumberOfTamperedPackets) {
+  // Try 5% bit flip chance per byte.
+  BitFlipFuzzTest(0.05f);
+}
+
+TEST_F(ViERtpFuzzTest,
+       VideoEngineDealsWithAExtremelyLargeNumberOfTamperedPackets) {
+  // Try 25% bit flip chance per byte (madness!)
+  BitFlipFuzzTest(0.25f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineDealsWithSeveralPeriodsOfTamperedPackets) {
+  // Try 0.05% bit flip chance per byte.
+  BitFlipFuzzTest(0.0005f);
+  BitFlipFuzzTest(0.0005f);
+  BitFlipFuzzTest(0.0005f);
+}
+
+TEST_F(ViERtpFuzzTest, VideoEngineRecoversAfterSomeCompletelyRandomPackets) {
+  unsigned int rand_seed = FetchRandSeed();
+  ViETest::Log("Running test with rand seed %d.", rand_seed);
+
+  ViETest::Log("Running as usual. You should see video output.");
+  AutoTestSleep(2000);
+
+  ViETest::Log("Injecting completely random packets...");
+  RandomEncryption random_encryption(rand_seed);
+  video_engine_->encryption->RegisterExternalEncryption(
+      video_channel_->videoChannel, random_encryption);
+
+  AutoTestSleep(5000);
+
+  ViETest::Log("Back to normal.");
+  video_engine_->encryption->DeregisterExternalEncryption(
+      video_channel_->videoChannel);
+
+  AutoTestSleep(5000);
+}
+
+}
diff --git a/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc b/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc
new file mode 100644
index 0000000..e85b6e4
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/vie_standard_integration_test.cc
@@ -0,0 +1,71 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the "standard" suite of integration tests, implemented
+ * as a GUnit test. This file is a part of the effort to try to automate all
+ * tests in this section of the code. Currently, this code makes no attempt
+ * to verify any video output - it only checks for direct errors.
+ */
+
+#include <cstdio>
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "legacy_fixture.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "vie_autotest.h"
+#include "vie_autotest_window_manager_interface.h"
+#include "vie_to_file_renderer.h"
+#include "vie_window_creator.h"
+#include "testsupport/metrics/video_metrics.h"
+
+namespace {
+
+class ViEStandardIntegrationTest : public LegacyFixture {
+};
+
+TEST_F(ViEStandardIntegrationTest, RunsBaseTestWithoutErrors)  {
+  tests_->ViEBaseStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsCodecTestWithoutErrors)  {
+  tests_->ViECodecStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsCaptureTestWithoutErrors)  {
+  tests_->ViECaptureStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsEncryptionTestWithoutErrors)  {
+  tests_->ViEEncryptionStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsFileTestWithoutErrors)  {
+  tests_->ViEFileStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsImageProcessTestWithoutErrors)  {
+  tests_->ViEImageProcessStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsNetworkTestWithoutErrors)  {
+  tests_->ViENetworkStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsRenderTestWithoutErrors)  {
+  tests_->ViERenderStandardTest();
+}
+
+TEST_F(ViEStandardIntegrationTest, RunsRtpRtcpTestWithoutErrors)  {
+  tests_->ViERtpRtcpStandardTest();
+}
+
+} // namespace
diff --git a/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc b/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc
new file mode 100644
index 0000000..8ea8bd7
--- /dev/null
+++ b/src/video_engine/test/auto_test/automated/vie_video_verification_test.cc
@@ -0,0 +1,254 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/metrics/video_metrics.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
+
+namespace {
+
+// The input file must be QCIF since I420 gets scaled to that in the tests
+// (it is so bandwidth-heavy we have no choice). Our comparison algorithms
+// wouldn't like scaling, so this will work when we compare with the original.
+const int kInputWidth = 176;
+const int kInputHeight = 144;
+const int kVerifyingTestMaxNumAttempts = 3;
+
+class ViEVideoVerificationTest : public testing::Test {
+ protected:
+  void SetUp() {
+    input_file_ = webrtc::test::ResourcePath("paris_qcif", "yuv");
+  }
+
+  void TearDown() {
+    TearDownFileRenderer(local_file_renderer_);
+    TearDownFileRenderer(remote_file_renderer_);
+  }
+
+  void InitializeFileRenderers() {
+    local_file_renderer_ = new ViEToFileRenderer();
+    remote_file_renderer_ = new ViEToFileRenderer();
+    SetUpLocalFileRenderer(local_file_renderer_);
+    SetUpRemoteFileRenderer(remote_file_renderer_);
+  }
+
+  void SetUpLocalFileRenderer(ViEToFileRenderer* file_renderer) {
+    SetUpFileRenderer(file_renderer, "-local-preview.yuv");
+  }
+
+  void SetUpRemoteFileRenderer(ViEToFileRenderer* file_renderer) {
+    SetUpFileRenderer(file_renderer, "-remote.yuv");
+  }
+
+  // Must be called manually inside the tests.
+  void StopRenderers() {
+    local_file_renderer_->StopRendering();
+    remote_file_renderer_->StopRendering();
+  }
+
+  void TearDownFileRenderer(ViEToFileRenderer* file_renderer) {
+    assert(file_renderer);
+    bool test_failed = ::testing::UnitTest::GetInstance()->
+        current_test_info()->result()->Failed();
+    if (test_failed) {
+      // Leave the files for analysis if the test failed.
+      file_renderer->SaveOutputFile("failed-");
+    } else {
+      // No reason to keep the files if we succeeded.
+      file_renderer->DeleteOutputFile();
+    }
+    delete file_renderer;
+  }
+
+  void CompareFiles(const std::string& reference_file,
+                    const std::string& test_file,
+                    double* psnr_result, double *ssim_result) {
+    webrtc::test::QualityMetricsResult psnr;
+    int error = I420PSNRFromFiles(reference_file.c_str(), test_file.c_str(),
+                                  kInputWidth, kInputHeight, &psnr);
+
+    EXPECT_EQ(0, error) << "PSNR routine failed - output files missing?";
+    *psnr_result = psnr.average;
+
+    webrtc::test::QualityMetricsResult ssim;
+    error = I420SSIMFromFiles(reference_file.c_str(), test_file.c_str(),
+                              kInputWidth, kInputHeight, &ssim);
+    EXPECT_EQ(0, error) << "SSIM routine failed - output files missing?";
+    *ssim_result = ssim.average;
+
+    ViETest::Log("Results: PSNR is %f (dB), SSIM is %f (1 is perfect)",
+                 psnr.average, ssim.average);
+  }
+
+  std::string input_file_;
+  ViEToFileRenderer* local_file_renderer_;
+  ViEToFileRenderer* remote_file_renderer_;
+  ViEFileBasedComparisonTests tests_;
+
+ private:
+  void SetUpFileRenderer(ViEToFileRenderer* file_renderer,
+                         const std::string& suffix) {
+    std::string test_case_name =
+        ::testing::UnitTest::GetInstance()->current_test_info()->name();
+
+    std::string output_path = ViETest::GetResultOutputPath();
+    std::string filename = test_case_name + suffix;
+
+    if (!file_renderer->PrepareForRendering(output_path, filename)) {
+      FAIL() << "Could not open output file " << filename <<
+          " for writing.";
+    }
+  }
+};
+
+TEST_F(ViEVideoVerificationTest, RunsBaseStandardTestWithoutErrors)  {
+  // The I420 test should give pretty good values since it's a lossless codec
+  // running on the default bitrate. It should average about 30 dB but there
+  // may be cases where it dips as low as 26 under adverse conditions. That's
+  // why we have a retrying mechanism in place for this test.
+  const double kExpectedMinimumPSNR = 30;
+  const double kExpectedMinimumSSIM = 0.95;
+
+  for (int attempt = 0; attempt < kVerifyingTestMaxNumAttempts; attempt++) {
+    InitializeFileRenderers();
+    ASSERT_TRUE(tests_.TestCallSetup(input_file_, kInputWidth, kInputHeight,
+                                     local_file_renderer_,
+                                     remote_file_renderer_));
+    std::string output_file = remote_file_renderer_->GetFullOutputPath();
+    StopRenderers();
+
+    double actual_psnr = 0;
+    double actual_ssim = 0;
+    CompareFiles(input_file_, output_file, &actual_psnr, &actual_ssim);
+
+    if (actual_psnr >= kExpectedMinimumPSNR &&
+        actual_ssim >= kExpectedMinimumSSIM) {
+      // Test succeeded!
+      return;
+    }
+  }
+
+  ADD_FAILURE() << "Failed to achieve PSNR " << kExpectedMinimumPSNR <<
+      " and SSIM " << kExpectedMinimumSSIM << " after " <<
+      kVerifyingTestMaxNumAttempts << " attempts.";
+}
+
+TEST_F(ViEVideoVerificationTest, RunsCodecTestWithoutErrors)  {
+  // We compare the local and remote here instead of with the original.
+  // The reason is that it is hard to say when the three consecutive tests
+  // switch over into each other, at which point we would have to restart the
+  // original to get a fair comparison.
+  //
+  // The PSNR and SSIM values are quite low here, and they have to be since
+  // the codec switches will lead to lag in the output. This is considered
+  // acceptable, but it probably shouldn't get worse than this.
+  const double kExpectedMinimumPSNR = 20;
+  const double kExpectedMinimumSSIM = 0.7;
+
+  for (int attempt = 0; attempt < kVerifyingTestMaxNumAttempts; attempt++) {
+    InitializeFileRenderers();
+    ASSERT_TRUE(tests_.TestCodecs(input_file_, kInputWidth, kInputHeight,
+                                  local_file_renderer_,
+                                  remote_file_renderer_));
+    std::string reference_file = local_file_renderer_->GetFullOutputPath();
+    std::string output_file = remote_file_renderer_->GetFullOutputPath();
+    StopRenderers();
+
+    double actual_psnr = 0;
+    double actual_ssim = 0;
+    CompareFiles(reference_file, output_file, &actual_psnr, &actual_ssim);
+
+    if (actual_psnr >= kExpectedMinimumPSNR &&
+        actual_ssim >= kExpectedMinimumSSIM) {
+      // Test succeeded!
+      return;
+    }
+  }
+}
+
+// Runs a whole stack processing with tracking of which frames are dropped
+// in the encoder. The local and remote file will not be of equal size because
+// of unknown reasons. Tests show that they start at the same frame, which is
+// the important thing when doing frame-to-frame comparison with PSNR/SSIM.
+// TODO(phoglund): This is flaky and a bit incomplete - enable again when it has
+// been made more deterministic.
+TEST_F(ViEVideoVerificationTest, DISABLED_RunsFullStackWithoutErrors)  {
+  FrameDropDetector detector;
+  local_file_renderer_ = new ViEToFileRenderer();
+  remote_file_renderer_ = new FrameDropMonitoringRemoteFileRenderer(&detector);
+  SetUpLocalFileRenderer(local_file_renderer_);
+  SetUpRemoteFileRenderer(remote_file_renderer_);
+
+  // Set a low bit rate so the encoder budget will be tight, causing it to drop
+  // frames every now and then.
+  const int kBitRateKbps = 50;
+  const int kPacketLossPercent = 5;
+  const int kNetworkDelayMs = 100;
+  ViETest::Log("Bit rate     : %5d kbps", kBitRateKbps);
+  ViETest::Log("Packet loss  : %5d %%", kPacketLossPercent);
+  ViETest::Log("Network delay: %5d ms", kNetworkDelayMs);
+  tests_.TestFullStack(input_file_, kInputWidth, kInputHeight, kBitRateKbps,
+                       kPacketLossPercent, kNetworkDelayMs,
+                       local_file_renderer_, remote_file_renderer_, &detector);
+  const std::string reference_file = local_file_renderer_->GetFullOutputPath();
+  const std::string output_file = remote_file_renderer_->GetFullOutputPath();
+  StopRenderers();
+
+  detector.CalculateResults();
+  detector.PrintReport();
+
+  if (detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kRendered) !=
+      detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kDecoded)) {
+    detector.PrintDebugDump();
+  }
+
+  ASSERT_EQ(detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kRendered),
+      detector.GetNumberOfFramesDroppedAt(FrameDropDetector::kDecoded))
+      << "The number of dropped frames on the decode and render steps are not "
+      "equal. This may be because we have a major problem in the buffers of "
+      "the ViEToFileRenderer?";
+
+  // We may have dropped frames during the processing, which means the output
+  // file does not contain all the frames that are present in the input file.
+  // To make the quality measurement correct, we must adjust the output file to
+  // that by copying the last successful frame into the place where the dropped
+  // frame would be, for all dropped frames.
+  const int frame_length_in_bytes = 3 * kInputHeight * kInputWidth / 2;
+  ViETest::Log("Frame length: %d bytes", frame_length_in_bytes);
+  std::vector<Frame*> all_frames = detector.GetAllFrames();
+  FixOutputFileForComparison(output_file, frame_length_in_bytes, all_frames);
+
+  // Verify all sent frames are present in the output file.
+  size_t output_file_size = webrtc::test::GetFileSize(output_file);
+  EXPECT_EQ(all_frames.size(), output_file_size / frame_length_in_bytes)
+      << "The output file size is incorrect. It should be equal to the number "
+      "of frames multiplied by the frame size. This will likely affect "
+      "PSNR/SSIM calculations in a bad way.";
+
+  // We are running on a lower bitrate here so we need to settle for somewhat
+  // lower PSNR and SSIM values.
+  double actual_psnr = 0;
+  double actual_ssim = 0;
+  CompareFiles(reference_file, output_file, &actual_psnr, &actual_ssim);
+
+  const double kExpectedMinimumPSNR = 24;
+  const double kExpectedMinimumSSIM = 0.7;
+
+  EXPECT_GE(actual_psnr, kExpectedMinimumPSNR);
+  EXPECT_GE(actual_ssim, kExpectedMinimumSSIM);
+}
+
+}  // namespace
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest.h b/src/video_engine/test/auto_test/interface/vie_autotest.h
new file mode 100644
index 0000000..0608701
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest.h
@@ -0,0 +1,138 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
+
+#include "common_types.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_hardware.h"
+#include "voe_audio_processing.h"
+
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_file.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+#include "vie_defines.h"
+#include "vie_errors.h"
+#include "video_render_defines.h"
+
+#include "vie_autotest_defines.h"
+
+#ifndef WEBRTC_ANDROID
+#include <string>
+#endif
+
+class TbCaptureDevice;
+class TbInterfaces;
+class TbVideoChannel;
+class ViEToFileRenderer;
+
+// This class provides a bunch of methods, implemented across several .cc
+// files, which runs tests on the video engine. All methods will report
+// errors using standard googletest macros, except when marked otherwise.
+class ViEAutoTest
+{
+public:
+    ViEAutoTest(void* window1, void* window2);
+    ~ViEAutoTest();
+
+    // These three are special and should not be run in a googletest harness.
+    // They keep track of their errors by themselves and return the number
+    // of errors.
+    int ViELoopbackCall();
+    int ViESimulcastCall();
+    int ViECustomCall();
+    int ViERecordCall();
+
+    // All functions except the three above are meant to run in a
+    // googletest harness.
+    void ViEStandardTest();
+    void ViEExtendedTest();
+    void ViEAPITest();
+
+    // vie_autotest_base.cc
+    void ViEBaseStandardTest();
+    void ViEBaseExtendedTest();
+    void ViEBaseAPITest();
+
+    // vie_autotest_capture.cc
+    void ViECaptureStandardTest();
+    void ViECaptureExtendedTest();
+    void ViECaptureAPITest();
+    void ViECaptureExternalCaptureTest();
+
+    // vie_autotest_codec.cc
+    void ViECodecStandardTest();
+    void ViECodecExtendedTest();
+    void ViECodecExternalCodecTest();
+    void ViECodecAPITest();
+
+    // vie_autotest_encryption.cc
+    void ViEEncryptionStandardTest();
+    void ViEEncryptionExtendedTest();
+    void ViEEncryptionAPITest();
+
+    // vie_autotest_file.ccs
+    void ViEFileStandardTest();
+    void ViEFileExtendedTest();
+    void ViEFileAPITest();
+
+    // vie_autotest_image_process.cc
+    void ViEImageProcessStandardTest();
+    void ViEImageProcessExtendedTest();
+    void ViEImageProcessAPITest();
+
+    // vie_autotest_network.cc
+    void ViENetworkStandardTest();
+    void ViENetworkExtendedTest();
+    void ViENetworkAPITest();
+
+    // vie_autotest_render.cc
+    void ViERenderStandardTest();
+    void ViERenderExtendedTest();
+    void ViERenderAPITest();
+
+    // vie_autotest_rtp_rtcp.cc
+    void ViERtpRtcpStandardTest();
+    void ViERtpRtcpExtendedTest();
+    void ViERtpRtcpAPITest();
+
+    // vie_autotest_rtp_fuzz.cc
+    void ViERtpTryInjectingRandomPacketsIntoRtpStream(long rand_seed);
+
+private:
+    void PrintAudioCodec(const webrtc::CodecInst audioCodec);
+    void PrintVideoCodec(const webrtc::VideoCodec videoCodec);
+
+    // Sets up rendering so the capture device output goes to window 1 and
+    // the video engine output goes to window 2.
+    void RenderCaptureDeviceAndOutputStream(TbInterfaces* video_engine,
+                                            TbVideoChannel* video_channel,
+                                            TbCaptureDevice* capture_device);
+
+    void* _window1;
+    void* _window2;
+
+    webrtc::VideoRenderType _renderType;
+    webrtc::VideoRender* _vrm1;
+    webrtc::VideoRender* _vrm2;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_android.h b/src/video_engine/test/auto_test/interface/vie_autotest_android.h
new file mode 100644
index 0000000..53b8cc3
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_android.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
+
+class ViEAutoTestAndroid
+{
+public:
+	static int RunAutotest(int testSelection,
+						   int subTestSelection,
+						   void* window1,
+						   void* window2,
+						   void* javaVM,
+						   void* env,
+						   void* context);
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_ANDROID_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_defines.h b/src/video_engine/test/auto_test/interface/vie_autotest_defines.h
new file mode 100644
index 0000000..5367471
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_defines.h
@@ -0,0 +1,209 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_defines.h
+//
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
+
+#include <cassert>
+#include <stdarg.h>
+#include <stdio.h>
+#include <string>
+
+#include "engine_configurations.h"
+#include "gtest/gtest.h"
+
+#if defined(_WIN32)
+#include <windows.h>
+#elif defined (WEBRTC_ANDROID)
+#include <android/log.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <string.h>
+#include <time.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#endif
+
+// Choose how to log
+//#define VIE_LOG_TO_FILE
+#define VIE_LOG_TO_STDOUT
+
+// Choose one way to test error
+#define VIE_ASSERT_ERROR
+
+#define VIE_LOG_FILE_NAME "ViEAutotestLog.txt"
+
+#undef RGB
+#define RGB(r,g,b) r|g<<8|b<<16
+
+enum {
+  KAutoTestSleepTimeMs = 5000
+};
+
+struct AutoTestSize {
+  unsigned int width;
+  unsigned int height;
+  AutoTestSize() :
+    width(0), height(0) {
+  }
+  AutoTestSize(unsigned int iWidth, unsigned int iHeight) :
+    width(iWidth), height(iHeight) {
+  }
+};
+
+struct AutoTestOrigin {
+  unsigned int x;
+  unsigned int y;
+  AutoTestOrigin() :
+    x(0), y(0) {
+  }
+  AutoTestOrigin(unsigned int iX, unsigned int iY) :
+    x(iX), y(iY) {
+  }
+};
+
+struct AutoTestRect {
+  AutoTestSize size;
+  AutoTestOrigin origin;
+  AutoTestRect() :
+    size(), origin() {
+  }
+
+  AutoTestRect(unsigned int iX, unsigned int iY, unsigned int iWidth, unsigned int iHeight) :
+    size(iX, iY), origin(iWidth, iHeight) {
+  }
+
+  void Copy(AutoTestRect iRect) {
+    origin.x = iRect.origin.x;
+    origin.y = iRect.origin.y;
+    size.width = iRect.size.width;
+    size.height = iRect.size.height;
+  }
+};
+
+// ============================================
+
+class ViETest {
+ public:
+  static int Init() {
+#ifdef VIE_LOG_TO_FILE
+    log_file_ = fopen(VIE_LOG_FILE_NAME, "w+t");
+#else
+    log_file_ = NULL;
+#endif
+    log_str_ = new char[kMaxLogSize];
+    memset(log_str_, 0, kMaxLogSize);
+    return 0;
+  }
+
+  static int Terminate() {
+    if (log_file_) {
+      fclose(log_file_);
+      log_file_ = NULL;
+    }
+    if (log_str_) {
+      delete[] log_str_;
+      log_str_ = NULL;
+    }
+    return 0;
+  }
+
+  static void Log(const char* fmt, ...) {
+    va_list va;
+    va_start(va, fmt);
+    memset(log_str_, 0, kMaxLogSize);
+    vsprintf(log_str_, fmt, va);
+    va_end(va);
+
+    WriteToSuitableOutput(log_str_);
+  }
+
+  // Writes to a suitable output, depending on platform and log mode.
+  static void WriteToSuitableOutput(const char* message) {
+#ifdef VIE_LOG_TO_FILE
+    if (log_file_)
+    {
+      fwrite(log_str_, 1, strlen(log_str_), log_file_);
+      fwrite("\n", 1, 1, log_file_);
+      fflush(log_file_);
+    }
+#endif
+#ifdef VIE_LOG_TO_STDOUT
+#if WEBRTC_ANDROID
+    __android_log_write(ANDROID_LOG_DEBUG, "*WebRTCN*", log_str_);
+#else
+    printf("%s\n", log_str_);
+#endif
+#endif
+  }
+
+  // Deprecated(phoglund): Prefer to use googletest macros in all cases
+  // except the custom call case.
+  static int TestError(bool expr, const char* fmt, ...) {
+    if (!expr) {
+      va_list va;
+      va_start(va, fmt);
+      memset(log_str_, 0, kMaxLogSize);
+      vsprintf(log_str_, fmt, va);
+#ifdef WEBRTC_ANDROID
+      __android_log_write(ANDROID_LOG_ERROR, "*WebRTCN*", log_str_);
+#endif
+      WriteToSuitableOutput(log_str_);
+      va_end(va);
+
+      AssertError(log_str_);
+      return 1;
+    }
+    return 0;
+  }
+
+  // Returns a suitable path to write trace and result files to.
+  // You should always use this when you want to write output files.
+  // The returned path is guaranteed to end with a path separator.
+  // This function may be run at any time during the program's execution.
+  // Implemented in vie_autotest.cc
+  static std::string GetResultOutputPath();
+
+private:
+  static void AssertError(const char* message) {
+#ifdef VIE_ASSERT_ERROR
+    assert(false);
+#endif
+  }
+
+  static FILE* log_file_;
+  enum {
+    kMaxLogSize = 512
+  };
+  static char* log_str_;
+};
+
+// milliseconds
+#if defined(_WIN32)
+#define AutoTestSleep ::Sleep
+#elif defined(WEBRTC_MAC_INTEL)
+#define AutoTestSleep(x) usleep(x * 1000)
+#elif defined(WEBRTC_LINUX)
+namespace {
+  void Sleep(unsigned long x) {
+    timespec t;
+    t.tv_sec = x/1000;
+    t.tv_nsec = (x-(x/1000)*1000)*1000000;
+    nanosleep(&t,NULL);
+  }
+}
+#define AutoTestSleep ::Sleep
+#endif
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_DEFINES_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_linux.h b/src/video_engine/test/auto_test/interface/vie_autotest_linux.h
new file mode 100644
index 0000000..fb1d319
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_linux.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
+
+#include "vie_autotest_window_manager_interface.h"
+#include <X11/Xlib.h>
+#include <X11/Xutil.h>
+
+// Forward declaration
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
+{
+public:
+    ViEAutoTestWindowManager();
+    virtual ~ViEAutoTestWindowManager();
+    virtual void* GetWindow1();
+    virtual void* GetWindow2();
+    virtual int TerminateWindows();
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title);
+    virtual bool SetTopmostWindow();
+
+private:
+    int ViECreateWindow(Window *outWindow, Display **outDisplay, int xpos,
+                        int ypos, int width, int height, char* title);
+    int ViEDestroyWindow(Window *window, Display *display);
+
+    Window _hwnd1;
+    Window _hwnd2;
+    Display* _hdsp1;
+    Display* _hdsp2;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_LINUX_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h b/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h
new file mode 100644
index 0000000..62ce92a
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_mac_cocoa.h
@@ -0,0 +1,70 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#if defined(COCOA_RENDERING)
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+
+#include "vie_autotest_window_manager_interface.h"
+
+@class CocoaRenderView;
+
+#import <Cocoa/Cocoa.h>
+
+@interface TestCocoaUi : NSObject {
+  CocoaRenderView* cocoaRenderView1_;
+  CocoaRenderView* cocoaRenderView2_;
+  NSWindow* window1_;
+  NSWindow* window2_;
+
+  AutoTestRect window1Size_;
+  AutoTestRect window2Size_;
+  void* window1Title_;
+  void* window2Title_;
+}
+
+// Must be called as a selector in the main thread.
+- (void)createWindows:(NSObject*)ignored;
+
+// Used to transfer parameters from background thread.
+- (void)prepareToCreateWindowsWithSize:(AutoTestRect)window1Size
+                               andSize:(AutoTestRect)window2Size
+                             withTitle:(void*)window1Title
+                              andTitle:(void*)window2Title;
+
+- (NSWindow*)window1;
+- (NSWindow*)window2;
+- (CocoaRenderView*)cocoaRenderView1;
+- (CocoaRenderView*)cocoaRenderView2;
+
+@end
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface {
+ public:
+  ViEAutoTestWindowManager();
+  virtual ~ViEAutoTestWindowManager();
+  virtual void* GetWindow1();
+  virtual void* GetWindow2();
+  virtual int CreateWindows(AutoTestRect window1Size,
+                            AutoTestRect window2Size,
+                            void* window1Title,
+                            void* window2Title);
+  virtual int TerminateWindows();
+  virtual bool SetTopmostWindow();
+
+ private:
+  TestCocoaUi* cocoa_ui_;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAC_COCOA_H_
+#endif  // COCOA_RENDERING
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_main.h b/src/video_engine/test/auto_test/interface/vie_autotest_main.h
new file mode 100644
index 0000000..68d0079
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_main.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
+
+#include <string>
+#include <map>
+
+class ViEAutoTestMain {
+ public:
+  ViEAutoTestMain();
+
+  // Runs the test according to the specified arguments.
+  // Pass in --automated to run in automated mode; interactive
+  // mode is default. All usual googletest flags also apply.
+  int RunTests(int argc, char** argv);
+
+ private:
+  std::map<int, std::string> index_to_test_method_map_;
+
+  static const int kInvalidChoice = -1;
+
+  // Starts interactive mode.
+  int RunInteractiveMode();
+  // Prompts the user for a specific test method in the provided test case.
+  // Returns 0 on success, nonzero otherwise.
+  int RunSpecificTestCaseIn(const std::string test_case_name);
+  // Asks the user for a particular test case to run.
+  int AskUserForTestCase();
+  // Retrieves a number from the user in the interval
+  // [min_allowed, max_allowed]. Returns kInvalidChoice on failure.
+  int AskUserForNumber(int min_allowed, int max_allowed);
+  // Runs all tests matching the provided filter. * are wildcards.
+  // Returns the test runner result (0 == OK).
+  int RunTestMatching(const std::string test_case,
+                      const std::string test_method);
+  // Runs a non-gtest test case. Choice must be [7,9]. Returns 0 on success.
+  int RunSpecialTestCase(int choice);
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_MAIN_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h b/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h
new file mode 100644
index 0000000..6dd043e
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  vie_autotest_window_manager_interface.h
+ */
+
+#include "vie_autotest_defines.h"
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
+
+class ViEAutoTestWindowManagerInterface
+{
+public:
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title) = 0;
+    virtual int TerminateWindows() = 0;
+    virtual void* GetWindow1() = 0;
+    virtual void* GetWindow2() = 0;
+    virtual bool SetTopmostWindow() = 0;
+    virtual ~ViEAutoTestWindowManagerInterface() {}
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOW_MANAGER_INTERFACE_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_autotest_windows.h b/src/video_engine/test/auto_test/interface/vie_autotest_windows.h
new file mode 100644
index 0000000..314a121
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_autotest_windows.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
+
+#include "vie_autotest_window_manager_interface.h"
+#include "engine_configurations.h"
+
+#include <windows.h>
+#define TITLE_LENGTH 1024
+
+// Forward declaration
+namespace webrtc {
+class ThreadWrapper;
+class CriticalSectionWrapper;
+}
+
+class ViEAutoTestWindowManager: public ViEAutoTestWindowManagerInterface
+{
+public:
+    ViEAutoTestWindowManager();
+    virtual ~ViEAutoTestWindowManager();
+    virtual void* GetWindow1();
+    virtual void* GetWindow2();
+    virtual int CreateWindows(AutoTestRect window1Size,
+                              AutoTestRect window2Size, void* window1Title,
+                              void* window2Title);
+    virtual int TerminateWindows();
+    virtual bool SetTopmostWindow();
+protected:
+    static bool EventProcess(void* obj);
+    bool EventLoop();
+
+private:
+    int ViECreateWindow(HWND &hwndMain, int xPos, int yPos, int width,
+                        int height, TCHAR* className);
+    int ViEDestroyWindow(HWND& hwnd);
+
+    void* _window1;
+    void* _window2;
+
+    bool _terminate;
+    webrtc::ThreadWrapper& _eventThread;
+    webrtc::CriticalSectionWrapper& _crit;
+    HWND _hwndMain;
+    HWND _hwnd1;
+    HWND _hwnd2;
+
+    AutoTestRect _hwnd1Size;
+    AutoTestRect _hwnd2Size;
+    TCHAR _hwnd1Title[TITLE_LENGTH];
+    TCHAR _hwnd2Title[TITLE_LENGTH];
+
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_AUTOTEST_WINDOWS_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h b/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h
new file mode 100644
index 0000000..a9560bc
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
+
+#include <string>
+
+class FrameDropDetector;
+class ViEToFileRenderer;
+
+// This class contains comparison tests, which will exercise video engine
+// functionality and then run comparison tests on the result using PSNR and
+// SSIM algorithms. These tests are intended mostly as sanity checks so that
+// we know we are outputting roughly the right thing and not random noise or
+// black screens.
+//
+// We will set up a fake ExternalCapture device which will pose as a webcam
+// and read the input from the provided raw YUV file. Output will be written
+// as a local preview in the local file renderer; the remote side output gets
+// written to the provided remote file renderer.
+//
+// The local preview is a straight, unaltered copy of the input. This can be
+// useful for comparisons if the test method contains several stages where the
+// input is restarted between stages.
+class ViEFileBasedComparisonTests {
+ public:
+  // Test a typical simple call setup. Returns false if the input file
+  // could not be opened; reports errors using googletest macros otherwise.
+  bool TestCallSetup(
+      const std::string& i420_test_video_path,
+      int width,
+      int height,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer);
+
+  // Tries testing the I420 and VP8 codecs in turn. Returns false if the
+  // input file could not be opened; reports errors using googletest macros
+  // otherwise.
+  bool TestCodecs(
+      const std::string& i420_video_file,
+      int width,
+      int height,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer);
+
+  // Runs a full stack test using the VP8 codec. Tests the full stack and uses
+  // RTP timestamps to sync frames between the endpoints.
+  void TestFullStack(
+      const std::string& i420_video_file,
+      int width,
+      int height,
+      int bit_rate_kbps,
+      int packet_loss_percent,
+      int network_delay_ms,
+      ViEToFileRenderer* local_file_renderer,
+      ViEToFileRenderer* remote_file_renderer,
+      FrameDropDetector* frame_drop_detector);
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_INTERFACE_VIE_COMPARISON_TESTS_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_window_creator.h b/src/video_engine/test/auto_test/interface/vie_window_creator.h
new file mode 100644
index 0000000..c13a888
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_window_creator.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
+
+class ViEAutoTestWindowManagerInterface;
+
+class ViEWindowCreator {
+ public:
+  ViEWindowCreator();
+  virtual ~ViEWindowCreator();
+
+  // The pointer returned here will still be owned by this object.
+  // Only use it to retrieve the created windows.
+  ViEAutoTestWindowManagerInterface* CreateTwoWindows();
+
+  // Terminates windows opened by CreateTwoWindows, which must
+  // have been called before this method.
+  void TerminateWindows();
+ private:
+  ViEAutoTestWindowManagerInterface* window_manager_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_WINDOW_CREATOR_H_
diff --git a/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h b/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h
new file mode 100644
index 0000000..a85280d
--- /dev/null
+++ b/src/video_engine/test/auto_test/interface/vie_window_manager_factory.h
@@ -0,0 +1,25 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
+
+class ViEAutoTestWindowManagerInterface;
+
+class ViEWindowManagerFactory {
+ public:
+  // This method is implemented in different files depending on platform.
+  // The caller is responsible for freeing the resulting object using
+  // the delete operator.
+  static ViEAutoTestWindowManagerInterface*
+  CreateWindowManagerForCurrentPlatform();
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_VIE_WINDOW_MANAGER_FACTORY_H_
diff --git a/src/video_engine/test/auto_test/media/captureDeviceImage.bmp b/src/video_engine/test/auto_test/media/captureDeviceImage.bmp
new file mode 100644
index 0000000..6cd34ba
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/captureDeviceImage.bmp
Binary files differ
diff --git a/src/video_engine/test/auto_test/media/captureDeviceImage.jpg b/src/video_engine/test/auto_test/media/captureDeviceImage.jpg
new file mode 100644
index 0000000..3bb3ba4
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/captureDeviceImage.jpg
Binary files differ
diff --git a/src/video_engine/test/auto_test/media/renderStartImage.bmp b/src/video_engine/test/auto_test/media/renderStartImage.bmp
new file mode 100644
index 0000000..c443a58
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/renderStartImage.bmp
Binary files differ
diff --git a/src/video_engine/test/auto_test/media/renderStartImage.jpg b/src/video_engine/test/auto_test/media/renderStartImage.jpg
new file mode 100644
index 0000000..b10a842
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/renderStartImage.jpg
Binary files differ
diff --git a/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp b/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp
new file mode 100644
index 0000000..8159bad
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/renderTimeoutImage.bmp
Binary files differ
diff --git a/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg b/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg
new file mode 100644
index 0000000..cb34d67
--- /dev/null
+++ b/src/video_engine/test/auto_test/media/renderTimeoutImage.jpg
Binary files differ
diff --git a/src/video_engine/test/auto_test/primitives/base_primitives.cc b/src/video_engine/test/auto_test/primitives/base_primitives.cc
new file mode 100644
index 0000000..2bc42fd
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/base_primitives.cc
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "base_primitives.h"
+
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "video_capture_factory.h"
+
+void TestI420CallSetup(webrtc::ViECodec* codec_interface,
+                       webrtc::VideoEngine* video_engine,
+                       webrtc::ViEBase* base_interface,
+                       webrtc::ViENetwork* network_interface,
+                       int video_channel,
+                       const char* device_name) {
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+
+  // Set up the codec interface with all known receive codecs and with
+  // I420 as the send codec.
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
+
+    // Try to keep the test frame size small when I420.
+    if (video_codec.codecType == webrtc::kVideoCodecI420) {
+      video_codec.width = 176;
+      video_codec.height = 144;
+      EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec));
+    }
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+
+  // Verify that we really found the I420 codec.
+  EXPECT_EQ(0, codec_interface->GetSendCodec(video_channel, video_codec));
+  EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType);
+
+  // Set up senders and receivers.
+  char version[1024] = "";
+  EXPECT_EQ(0, base_interface->GetVersion(version));
+  ViETest::Log("\nUsing WebRTC Video Engine version: %s", version);
+
+  const char *ipAddress = "127.0.0.1";
+  WebRtc_UWord16 rtpPortListen = 6100;
+  WebRtc_UWord16 rtpPortSend = 6100;
+  EXPECT_EQ(0, network_interface->SetLocalReceiver(video_channel,
+                                                   rtpPortListen));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+  EXPECT_EQ(0, network_interface->SetSendDestination(video_channel, ipAddress,
+                                                     rtpPortSend));
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+
+  // Call started.
+  ViETest::Log("Call started");
+
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  // Done.
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+}
diff --git a/src/video_engine/test/auto_test/primitives/base_primitives.h b/src/video_engine/test/auto_test/primitives/base_primitives.h
new file mode 100644
index 0000000..13f7928
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/base_primitives.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
+
+namespace webrtc {
+class VideoEngine;
+class ViEBase;
+class ViECodec;
+class ViENetwork;
+}
+
+// Tests a I420-to-I420 call. This test exercises the most basic WebRTC
+// functionality by training the codec interface to recognize the most common
+// codecs, and the initiating a I420 call. A video channel with a capture device
+// must be set up prior to this call.
+void TestI420CallSetup(webrtc::ViECodec* codec_interface,
+                       webrtc::VideoEngine* video_engine,
+                       webrtc::ViEBase* base_interface,
+                       webrtc::ViENetwork* network_interface,
+                       int video_channel,
+                       const char* device_name);
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_SOURCE_BASE_PRIMITIVES_H_
diff --git a/src/video_engine/test/auto_test/primitives/codec_primitives.cc b/src/video_engine/test/auto_test/primitives/codec_primitives.cc
new file mode 100644
index 0000000..2d43440
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/codec_primitives.cc
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "codec_primitives.h"
+
+#include "general_primitives.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "vie_to_file_renderer.h"
+#include "video_capture_factory.h"
+#include "tb_interfaces.h"
+
+// Helper functions.
+
+void TestCodecImageProcess(webrtc::VideoCodec video_codec,
+                           webrtc::ViECodec* codec_interface,
+                           int video_channel,
+                           webrtc::ViEImageProcess* image_process) {
+
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, video_codec));
+  FrameCounterEffectFilter frame_counter;
+  EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
+                                                         frame_counter));
+  AutoTestSleep (KAutoTestSleepTimeMs);
+
+  int max_number_of_rendered_frames = video_codec.maxFramerate *
+      KAutoTestSleepTimeMs / 1000;
+
+  if (video_codec.codecType == webrtc::kVideoCodecI420) {
+    // Due to that I420 needs a huge bandwidth, rate control can set
+    // frame rate very low. This happen since we use the same channel
+    // as we just tested with vp8.
+    EXPECT_GT(frame_counter.numFrames, 0);
+  } else {
+#ifdef WEBRTC_ANDROID
+    // Special case to get the autotest to pass on some slow devices
+    EXPECT_GT(frame_counter.numFrames, max_number_of_rendered_frames / 6);
+#else
+    EXPECT_GT(frame_counter.numFrames, max_number_of_rendered_frames / 4);
+#endif
+  }
+  EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(video_channel));
+}
+
+// Test switching from i420 to VP8 as send codec and make sure that
+// the codec observer gets called after the switch.
+void TestCodecCallbacks(webrtc::ViEBase *& base_interface,
+                        webrtc::ViECodec *codec_interface,
+                        int video_channel,
+                        int forced_codec_width,
+                        int forced_codec_height) {
+
+  // Set I420 as send codec so we don't make any assumptions about what
+  // we currently have as send codec:
+  SetSendCodec(webrtc::kVideoCodecI420, codec_interface, video_channel,
+               forced_codec_width, forced_codec_height);
+
+  // Register the observer:
+  ViEAutotestCodecObserver codec_observer;
+  EXPECT_EQ(0, codec_interface->RegisterEncoderObserver(video_channel,
+                                                        codec_observer));
+  EXPECT_EQ(0, codec_interface->RegisterDecoderObserver(video_channel,
+                                                   codec_observer));
+
+  // Make the switch.
+  ViETest::Log("Testing codec callbacks...");
+
+  SetSendCodec(webrtc::kVideoCodecVP8, codec_interface, video_channel,
+               forced_codec_width, forced_codec_height);
+
+  AutoTestSleep (KAutoTestSleepTimeMs);
+
+  // Verify that we got the right codec.
+  EXPECT_EQ(webrtc::kVideoCodecVP8, codec_observer.incomingCodec.codecType);
+
+  // Clean up.
+  EXPECT_EQ(0, codec_interface->DeregisterEncoderObserver(video_channel));
+  EXPECT_EQ(0, codec_interface->DeregisterDecoderObserver(video_channel));
+
+  // Verify results.
+  EXPECT_GT(codec_observer.incomingCodecCalled, 0);
+  EXPECT_GT(codec_observer.incomingRatecalled, 0);
+  EXPECT_GT(codec_observer.outgoingRatecalled, 0);
+}
+
+void TestCodecs(const TbInterfaces& interfaces,
+                int capture_id,
+                int video_channel,
+                int forced_codec_width,
+                int forced_codec_height) {
+  webrtc::VideoEngine *video_engine_interface = interfaces.video_engine;
+  webrtc::ViEBase *base_interface = interfaces.base;
+  webrtc::ViECapture *capture_interface = interfaces.capture;
+  webrtc::ViERender *render_interface = interfaces.render;
+  webrtc::ViECodec *codec_interface = interfaces.codec;
+  webrtc::ViENetwork *network_interface = interfaces.network;
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof (webrtc::VideoCodec));
+
+  // Set up all receive codecs. This basically trains the codec interface
+  // to be able to recognize all receive codecs based on payload type.
+  for (int idx = 0; idx < codec_interface->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(idx, video_codec));
+    SetSuitableResolution(&video_codec,
+                          forced_codec_width,
+                          forced_codec_height);
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+  const char *ip_address = "127.0.0.1";
+  const unsigned short rtp_port = 6000;
+  EXPECT_EQ(0, network_interface->SetLocalReceiver(video_channel, rtp_port));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+  EXPECT_EQ(0, network_interface->SetSendDestination(video_channel, ip_address,
+                                                     rtp_port));
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+
+  // Run all found codecs
+  webrtc::ViEImageProcess *image_process =
+      webrtc::ViEImageProcess::GetInterface(video_engine_interface);
+  EXPECT_TRUE(image_process != NULL);
+
+  ViETest::Log("Loop through all codecs for %d seconds",
+               KAutoTestSleepTimeMs / 1000);
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(i, video_codec));
+
+    if (video_codec.codecType == webrtc::kVideoCodecRED ||
+        video_codec.codecType == webrtc::kVideoCodecULPFEC) {
+      ViETest::Log("\t %d. %s not tested", i, video_codec.plName);
+    } else {
+      ViETest::Log("\t %d. %s", i, video_codec.plName);
+      SetSuitableResolution(&video_codec, forced_codec_width,
+                            forced_codec_height);
+      TestCodecImageProcess(video_codec, codec_interface, video_channel,
+                            image_process);
+    }
+  }
+  image_process->Release();
+
+  TestCodecCallbacks(base_interface, codec_interface, video_channel,
+                     forced_codec_width, forced_codec_height);
+
+  ViETest::Log("Done!");
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+  EXPECT_EQ(0, render_interface->StopRender(capture_id));
+  EXPECT_EQ(0, render_interface->StopRender(video_channel));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
+  EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void SetSendCodec(webrtc::VideoCodecType of_type,
+                  webrtc::ViECodec* codec_interface,
+                  int video_channel,
+                  int forced_codec_width,
+                  int forced_codec_height) {
+  webrtc::VideoCodec codec;
+  bool ok;
+  EXPECT_TRUE(ok = FindSpecificCodec(of_type, codec_interface, &codec));
+  if (!ok) {
+    return;
+  }
+
+  SetSuitableResolution(&codec, forced_codec_width, forced_codec_height);
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, codec));
+}
diff --git a/src/video_engine/test/auto_test/primitives/codec_primitives.h b/src/video_engine/test/auto_test/primitives/codec_primitives.h
new file mode 100644
index 0000000..e778044
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/codec_primitives.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
+
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+
+class TbInterfaces;
+
+// Tests that a codec actually renders frames by registering a basic
+// render effect filter on the codec and then running it. This test is
+// quite lenient on the number of frames that get rendered, so it should not
+// be seen as a end-user-visible quality measure - it is more a sanity check
+// that the codec at least gets some frames through.
+
+// The codec resolution can be forced by specifying the forced* variables
+// (pass in kDoNotForceResolution if you don't care).
+void TestCodecs(const TbInterfaces& interfaces,
+                int capture_id,
+                int video_channel,
+                int forced_codec_width,
+                int forced_codec_height);
+
+// This helper function will set the send codec in the codec interface to a
+// codec of the specified type. It will generate a test failure if we do not
+// support the provided codec type.
+
+// The codec resolution can be forced by specifying the forced* variables
+// (pass in kDoNotForceResolution if you don't care).
+void SetSendCodec(webrtc::VideoCodecType of_type,
+                  webrtc::ViECodec* codec_interface,
+                  int video_channel,
+                  int forced_codec_width,
+                  int forced_codec_height);
+
+class ViEAutotestCodecObserver: public webrtc::ViEEncoderObserver,
+                                public webrtc::ViEDecoderObserver {
+ public:
+  int incomingCodecCalled;
+  int incomingRatecalled;
+  int outgoingRatecalled;
+
+  unsigned char lastPayloadType;
+  unsigned short lastWidth;
+  unsigned short lastHeight;
+
+  unsigned int lastOutgoingFramerate;
+  unsigned int lastOutgoingBitrate;
+  unsigned int lastIncomingFramerate;
+  unsigned int lastIncomingBitrate;
+
+  webrtc::VideoCodec incomingCodec;
+
+  ViEAutotestCodecObserver() {
+    incomingCodecCalled = 0;
+    incomingRatecalled = 0;
+    outgoingRatecalled = 0;
+    lastPayloadType = 0;
+    lastWidth = 0;
+    lastHeight = 0;
+    lastOutgoingFramerate = 0;
+    lastOutgoingBitrate = 0;
+    lastIncomingFramerate = 0;
+    lastIncomingBitrate = 0;
+    memset(&incomingCodec, 0, sizeof(incomingCodec));
+  }
+  virtual void IncomingCodecChanged(const int videoChannel,
+                                    const webrtc::VideoCodec& videoCodec) {
+    incomingCodecCalled++;
+    lastPayloadType = videoCodec.plType;
+    lastWidth = videoCodec.width;
+    lastHeight = videoCodec.height;
+
+    memcpy(&incomingCodec, &videoCodec, sizeof(videoCodec));
+  }
+
+  virtual void IncomingRate(const int videoChannel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    incomingRatecalled++;
+    lastIncomingFramerate += framerate;
+    lastIncomingBitrate += bitrate;
+  }
+
+  virtual void OutgoingRate(const int videoChannel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    outgoingRatecalled++;
+    lastOutgoingFramerate += framerate;
+    lastOutgoingBitrate += bitrate;
+  }
+
+  virtual void RequestNewKeyFrame(const int videoChannel) {
+  }
+};
+
+class FrameCounterEffectFilter : public webrtc::ViEEffectFilter
+{
+ public:
+  int numFrames;
+  FrameCounterEffectFilter() {
+    numFrames = 0;
+  }
+  virtual ~FrameCounterEffectFilter() {
+  }
+
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    numFrames++;
+    return 0;
+  }
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_CODEC_PRIMITIVES_H_
diff --git a/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc b/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc
new file mode 100644
index 0000000..fc7d87b
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/framedrop_primitives.cc
@@ -0,0 +1,513 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <string>
+
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
+
+// Tracks which frames are created on the local side and reports them to the
+// FrameDropDetector class.
+class CreatedTimestampEffectFilter : public webrtc::ViEEffectFilter {
+ public:
+  explicit CreatedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~CreatedTimestampEffectFilter() {}
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kCreated,
+                                           timeStamp90KHz);
+    return 0;
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks which frames are sent in external transport on the local side
+// and reports them to the FrameDropDetector class.
+class FrameSentCallback : public SendFrameCallback {
+ public:
+  explicit FrameSentCallback(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameSentCallback() {}
+  virtual void FrameSent(unsigned int rtp_timestamp) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kSent,
+                                           rtp_timestamp);
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks which frames are received in external transport on the remote side
+// and reports them to the FrameDropDetector class.
+class FrameReceivedCallback : public ReceiveFrameCallback {
+ public:
+  explicit FrameReceivedCallback(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameReceivedCallback() {}
+  virtual void FrameReceived(unsigned int rtp_timestamp) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kReceived,
+                                           rtp_timestamp);
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+// Tracks when frames are decoded on the remote side (received from the
+// jitter buffer) and reports them to the FrameDropDetector class.
+class DecodedTimestampEffectFilter : public webrtc::ViEEffectFilter {
+ public:
+  explicit DecodedTimestampEffectFilter(FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~DecodedTimestampEffectFilter() {}
+  virtual int Transform(int size, unsigned char* frameBuffer,
+                        unsigned int timeStamp90KHz, unsigned int width,
+                        unsigned int height) {
+    frame_drop_detector_->ReportFrameState(FrameDropDetector::kDecoded,
+                                           timeStamp90KHz);
+    return 0;
+  }
+
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+void TestFullStack(const TbInterfaces& interfaces,
+                   int capture_id,
+                   int video_channel,
+                   int width,
+                   int height,
+                   int bit_rate_kbps,
+                   int packet_loss_percent,
+                   int network_delay_ms,
+                   FrameDropDetector* frame_drop_detector) {
+  webrtc::VideoEngine *video_engine_interface = interfaces.video_engine;
+  webrtc::ViEBase *base_interface = interfaces.base;
+  webrtc::ViECapture *capture_interface = interfaces.capture;
+  webrtc::ViERender *render_interface = interfaces.render;
+  webrtc::ViECodec *codec_interface = interfaces.codec;
+  webrtc::ViENetwork *network_interface = interfaces.network;
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof (webrtc::VideoCodec));
+
+  // Set up all receive codecs. This basically setup the codec interface
+  // to be able to recognize all receive codecs based on payload type.
+  for (int idx = 0; idx < codec_interface->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec_interface->GetCodec(idx, video_codec));
+    SetSuitableResolution(&video_codec, width, height);
+
+    EXPECT_EQ(0, codec_interface->SetReceiveCodec(video_channel, video_codec));
+  }
+
+  // Configure External transport to simulate network interference:
+  TbExternalTransport external_transport(*interfaces.network);
+  external_transport.SetPacketLoss(packet_loss_percent);
+  external_transport.SetNetworkDelay(network_delay_ms);
+
+  FrameSentCallback frame_sent_callback(frame_drop_detector);
+  FrameReceivedCallback frame_received_callback(frame_drop_detector);
+  external_transport.RegisterSendFrameCallback(&frame_sent_callback);
+  external_transport.RegisterReceiveFrameCallback(&frame_received_callback);
+  EXPECT_EQ(0, network_interface->RegisterSendTransport(video_channel,
+                                                        external_transport));
+  EXPECT_EQ(0, base_interface->StartReceive(video_channel));
+
+  // Setup only the VP8 codec, which is what we'll use.
+  webrtc::VideoCodec codec;
+  EXPECT_TRUE(FindSpecificCodec(webrtc::kVideoCodecVP8, codec_interface,
+                                &codec));
+  codec.startBitrate = bit_rate_kbps;
+  codec.maxBitrate = bit_rate_kbps;
+  codec.width = width;
+  codec.height = height;
+  EXPECT_EQ(0, codec_interface->SetSendCodec(video_channel, codec));
+
+  webrtc::ViEImageProcess *image_process =
+      webrtc::ViEImageProcess::GetInterface(video_engine_interface);
+  EXPECT_TRUE(image_process);
+
+  // Setup the effect filters
+  CreatedTimestampEffectFilter create_filter(frame_drop_detector);
+  EXPECT_EQ(0, image_process->RegisterSendEffectFilter(video_channel,
+                                                       create_filter));
+  DecodedTimestampEffectFilter decode_filter(frame_drop_detector);
+  EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
+                                                         decode_filter));
+  // Send video.
+  EXPECT_EQ(0, base_interface->StartSend(video_channel));
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  // Cleanup.
+  EXPECT_EQ(0, image_process->DeregisterSendEffectFilter(video_channel));
+  EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(video_channel));
+  image_process->Release();
+  ViETest::Log("Done!");
+
+  WebRtc_Word32 num_rtp_packets = 0;
+  WebRtc_Word32 num_dropped_packets = 0;
+  WebRtc_Word32 num_rtcp_packets = 0;
+  external_transport.GetStats(num_rtp_packets, num_dropped_packets,
+                              num_rtcp_packets);
+  ViETest::Log("RTP packets    : %5d", num_rtp_packets);
+  ViETest::Log("Dropped packets: %5d", num_dropped_packets);
+  ViETest::Log("RTCP packets   : %5d", num_rtcp_packets);
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, base_interface->StopSend(video_channel));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+  EXPECT_EQ(0, network_interface->DeregisterSendTransport(video_channel));
+  EXPECT_EQ(0, render_interface->StopRender(capture_id));
+  EXPECT_EQ(0, render_interface->StopRender(video_channel));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(video_channel));
+  EXPECT_EQ(0, capture_interface->DisconnectCaptureDevice(video_channel));
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void FixOutputFileForComparison(const std::string& output_file,
+                                int frame_length_in_bytes,
+                                const std::vector<Frame*>& frames) {
+  webrtc::test::FrameReaderImpl frame_reader(output_file,
+                                             frame_length_in_bytes);
+  const std::string temp_file = output_file + ".fixed";
+  webrtc::test::FrameWriterImpl frame_writer(temp_file, frame_length_in_bytes);
+  frame_reader.Init();
+  frame_writer.Init();
+
+  ASSERT_FALSE(frames.front()->dropped_at_render) << "It should not be "
+      "possible to drop the first frame. Both because we don't have anything "
+      "useful to fill that gap with and it is impossible to detect it without "
+      "any previous timestamps to compare with.";
+
+  WebRtc_UWord8* last_frame_data = new WebRtc_UWord8[frame_length_in_bytes];
+
+  // Process the file and write frame duplicates for all dropped frames.
+  for (std::vector<Frame*>::const_iterator it = frames.begin();
+       it != frames.end(); ++it) {
+    if ((*it)->dropped_at_render) {
+      // Write the previous frame to the output file:
+      EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
+    } else {
+      EXPECT_TRUE(frame_reader.ReadFrame(last_frame_data));
+      EXPECT_TRUE(frame_writer.WriteFrame(last_frame_data));
+    }
+  }
+  delete[] last_frame_data;
+  frame_reader.Close();
+  frame_writer.Close();
+  ASSERT_EQ(0, std::remove(output_file.c_str()));
+  ASSERT_EQ(0, std::rename(temp_file.c_str(), output_file.c_str()));
+}
+
+void FrameDropDetector::ReportFrameState(State state, unsigned int timestamp) {
+  dirty_ = true;
+  switch (state) {
+    case kCreated: {
+      int number = created_frames_vector_.size();
+      Frame* frame = new Frame(number, timestamp);
+      frame->created_timestamp_in_us_ =
+          webrtc::TickTime::MicrosecondTimestamp();
+      created_frames_vector_.push_back(frame);
+      created_frames_[timestamp] = frame;
+      num_created_frames_++;
+      break;
+    }
+    case kSent:
+      sent_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      if (timestamp_diff_ == 0) {
+        // When the first created frame arrives we calculate the fixed
+        // difference between the timestamps of the frames entering and leaving
+        // the encoder. This diff is used to identify the frames from the
+        // created_frames_ map.
+        timestamp_diff_ =
+            timestamp - created_frames_vector_.front()->frame_timestamp_;
+      }
+      num_sent_frames_++;
+      break;
+    case kReceived:
+      received_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_received_frames_++;
+      break;
+    case kDecoded:
+      decoded_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_decoded_frames_++;
+      break;
+    case kRendered:
+      rendered_frames_[timestamp] = webrtc::TickTime::MicrosecondTimestamp();
+      num_rendered_frames_++;
+      break;
+  }
+}
+
+void FrameDropDetector::CalculateResults() {
+  // Fill in all fields of the Frame objects in the created_frames_ map.
+  // Iterate over the maps from converted timestamps to the arrival timestamps.
+  std::map<unsigned int, int64_t>::const_iterator it;
+  for (it = sent_frames_.begin(); it != sent_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->sent_timestamp_in_us_ = it->second;
+  }
+  for (it = received_frames_.begin(); it != received_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->received_timestamp_in_us_ = it->second;
+  }
+  for (it = decoded_frames_.begin(); it != decoded_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->decoded_timestamp_in_us_ =it->second;
+  }
+  for (it = rendered_frames_.begin(); it != rendered_frames_.end(); ++it) {
+    int created_timestamp = it->first - timestamp_diff_;
+    created_frames_[created_timestamp]->rendered_timestamp_in_us_ = it->second;
+  }
+  // Find out where the frames were not present in the different states.
+  dropped_frames_at_send_ = 0;
+  dropped_frames_at_receive_ = 0;
+  dropped_frames_at_decode_ = 0;
+  dropped_frames_at_render_ = 0;
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int encoded_timestamp = (*it)->frame_timestamp_ + timestamp_diff_;
+    if (sent_frames_.find(encoded_timestamp) == sent_frames_.end()) {
+      (*it)->dropped_at_send = true;
+      dropped_frames_at_send_++;
+    }
+    if (received_frames_.find(encoded_timestamp) == received_frames_.end()) {
+      (*it)->dropped_at_receive = true;
+      dropped_frames_at_receive_++;
+    }
+    if (decoded_frames_.find(encoded_timestamp) == decoded_frames_.end()) {
+      (*it)->dropped_at_decode = true;
+      dropped_frames_at_decode_++;
+    }
+    if (rendered_frames_.find(encoded_timestamp) == rendered_frames_.end()) {
+      (*it)->dropped_at_render = true;
+      dropped_frames_at_render_++;
+    }
+  }
+  dirty_ = false;
+}
+
+void FrameDropDetector::PrintReport() {
+  assert(!dirty_);
+  ViETest::Log("Frame Drop Detector report:");
+  ViETest::Log("  Created  frames: %ld", created_frames_.size());
+  ViETest::Log("  Sent     frames: %ld", sent_frames_.size());
+  ViETest::Log("  Received frames: %ld", received_frames_.size());
+  ViETest::Log("  Decoded  frames: %ld", decoded_frames_.size());
+  ViETest::Log("  Rendered frames: %ld", rendered_frames_.size());
+
+  // Display all frames and stats for them:
+  long last_created = 0;
+  long last_sent = 0;
+  long last_received = 0;
+  long last_decoded = 0;
+  long last_rendered = 0;
+  ViETest::Log("\nDeltas between sent frames and drop status:");
+  ViETest::Log("Unit: Microseconds");
+  ViETest::Log("Frame  Created    Sent    Received Decoded Rendered "
+      "Dropped at  Dropped at  Dropped at  Dropped at");
+  ViETest::Log(" nbr    delta     delta    delta    delta   delta   "
+      " Send?       Receive?    Decode?     Render?");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int created_delta =
+        static_cast<int>((*it)->created_timestamp_in_us_ - last_created);
+    int sent_delta = (*it)->dropped_at_send ? -1 :
+        static_cast<int>((*it)->sent_timestamp_in_us_ - last_sent);
+    int received_delta = (*it)->dropped_at_receive ? -1 :
+        static_cast<int>((*it)->received_timestamp_in_us_ - last_received);
+    int decoded_delta = (*it)->dropped_at_decode ? -1 :
+        static_cast<int>((*it)->decoded_timestamp_in_us_ - last_decoded);
+    int rendered_delta = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ - last_rendered);
+
+    // Set values to -1 for the first frame:
+    if ((*it)->number_ == 0) {
+      created_delta = -1;
+      sent_delta = -1;
+      received_delta = -1;
+      decoded_delta = -1;
+      rendered_delta = -1;
+    }
+    ViETest::Log("%5d %8d %8d %8d %8d %8d %10s %10s %10s %10s",
+                 (*it)->number_,
+                 created_delta,
+                 sent_delta,
+                 received_delta,
+                 decoded_delta,
+                 rendered_delta,
+                 (*it)->dropped_at_send ? "DROPPED" : "      ",
+                 (*it)->dropped_at_receive ? "DROPPED" : "      ",
+                 (*it)->dropped_at_decode ? "DROPPED" : "      ",
+                 (*it)->dropped_at_render ? "DROPPED" : "      ");
+    last_created = (*it)->created_timestamp_in_us_;
+    if (!(*it)->dropped_at_send) {
+      last_sent = (*it)->sent_timestamp_in_us_;
+    }
+     if (!(*it)->dropped_at_receive) {
+      last_received = (*it)->received_timestamp_in_us_;
+    }
+    if (!(*it)->dropped_at_decode) {
+      last_decoded = (*it)->decoded_timestamp_in_us_;
+    }
+    if (!(*it)->dropped_at_render) {
+      last_rendered = (*it)->rendered_timestamp_in_us_;
+    }
+  }
+  ViETest::Log("\nLatency between states (-1 means N/A because of drop):");
+  ViETest::Log("Unit: Microseconds");
+  ViETest::Log("Frame  Created    Sent      Received   Decoded      Total    "
+      "   Total");
+  ViETest::Log(" nbr   ->Sent  ->Received  ->Decoded ->Rendered    latency   "
+      "  latency");
+  ViETest::Log("                                               (incl network)"
+      "(excl network)");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    int created_to_sent = (*it)->dropped_at_send ? -1 :
+        static_cast<int>((*it)->sent_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_);
+    int sent_to_received = (*it)->dropped_at_receive ? -1 :
+        static_cast<int>((*it)->received_timestamp_in_us_ -
+                         (*it)->sent_timestamp_in_us_);
+    int received_to_decoded = (*it)->dropped_at_decode ? -1 :
+        static_cast<int>((*it)->decoded_timestamp_in_us_ -
+                         (*it)->received_timestamp_in_us_);
+    int decoded_to_render = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->decoded_timestamp_in_us_);
+    int total_latency_incl_network = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_);
+    int total_latency_excl_network = (*it)->dropped_at_render ? -1 :
+        static_cast<int>((*it)->rendered_timestamp_in_us_ -
+                         (*it)->created_timestamp_in_us_ - sent_to_received);
+    ViETest::Log("%5d %9d %9d %9d %9d %12d %12d",
+                 (*it)->number_,
+                 created_to_sent,
+                 sent_to_received,
+                 received_to_decoded,
+                 decoded_to_render,
+                 total_latency_incl_network,
+                 total_latency_excl_network);
+  }
+  // Find and print the dropped frames.
+  ViETest::Log("\nTotal # dropped frames at:");
+  ViETest::Log("  Send   : %d", dropped_frames_at_send_);
+  ViETest::Log("  Receive: %d", dropped_frames_at_receive_);
+  ViETest::Log("  Decode : %d", dropped_frames_at_decode_);
+  ViETest::Log("  Render : %d", dropped_frames_at_render_);
+}
+
+void FrameDropDetector::PrintDebugDump() {
+  assert(!dirty_);
+  ViETest::Log("\nPrintDebugDump: Frame objects:");
+  ViETest::Log("Frame FrTimeStamp Created       Sent      Received    Decoded"
+      "    Rendered ");
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    ViETest::Log("%5d %11d %11d %11d %11d %11d %11d",
+                 (*it)->number_,
+                 (*it)->frame_timestamp_,
+                 (*it)->created_timestamp_in_us_,
+                 (*it)->sent_timestamp_in_us_,
+                 (*it)->received_timestamp_in_us_,
+                 (*it)->decoded_timestamp_in_us_,
+                 (*it)->rendered_timestamp_in_us_);
+  }
+  std::vector<int> mismatch_frame_num_list;
+  for (std::vector<Frame*>::const_iterator it = created_frames_vector_.begin();
+       it != created_frames_vector_.end(); ++it) {
+    if ((*it)->dropped_at_render != (*it)->dropped_at_decode) {
+      mismatch_frame_num_list.push_back((*it)->number_);
+    }
+  }
+  if (mismatch_frame_num_list.size() > 0) {
+    ViETest::Log("\nDecoded/Rendered mismatches:");
+    ViETest::Log("Frame FrTimeStamp    Created       Sent      Received    "
+        "Decoded    Rendered ");
+    for (std::vector<int>::const_iterator it = mismatch_frame_num_list.begin();
+         it != mismatch_frame_num_list.end(); ++it) {
+      Frame* frame = created_frames_vector_[*it];
+      ViETest::Log("%5d %11d %11d %11d %11d %11d %11d",
+                 frame->number_,
+                 frame->frame_timestamp_,
+                 frame->created_timestamp_in_us_,
+                 frame->sent_timestamp_in_us_,
+                 frame->received_timestamp_in_us_,
+                 frame->decoded_timestamp_in_us_,
+                 frame->rendered_timestamp_in_us_);
+    }
+  }
+
+  ViETest::Log("\nReportFrameState method invocations:");
+  ViETest::Log("  Created : %d", num_created_frames_);
+  ViETest::Log("  Send    : %d", num_sent_frames_);
+  ViETest::Log("  Received: %d", num_received_frames_);
+  ViETest::Log("  Decoded : %d", num_decoded_frames_);
+  ViETest::Log("  Rendered: %d", num_rendered_frames_);
+}
+
+const std::vector<Frame*>& FrameDropDetector::GetAllFrames() {
+  assert(!dirty_);
+  return created_frames_vector_;
+}
+
+int FrameDropDetector::GetNumberOfFramesDroppedAt(State state) {
+  assert(!dirty_);
+  switch (state) {
+    case kSent:
+      return dropped_frames_at_send_;
+    case kReceived:
+      return dropped_frames_at_receive_;
+    case kDecoded:
+      return dropped_frames_at_decode_;
+    case kRendered:
+      return dropped_frames_at_render_;
+    default:
+      return 0;
+  }
+}
+
+int FrameDropMonitoringRemoteFileRenderer::DeliverFrame(
+    unsigned char *buffer, int buffer_size, uint32_t time_stamp,
+    int64_t render_time) {
+  // Register that this frame has been rendered:
+  frame_drop_detector_->ReportFrameState(FrameDropDetector::kRendered,
+                                         time_stamp);
+  return ViEToFileRenderer::DeliverFrame(buffer, buffer_size,
+                                         time_stamp, render_time);
+}
+
+int FrameDropMonitoringRemoteFileRenderer::FrameSizeChange(
+    unsigned int width, unsigned int height, unsigned int number_of_streams) {
+  return ViEToFileRenderer::FrameSizeChange(width, height, number_of_streams);
+}
diff --git a/src/video_engine/test/auto_test/primitives/framedrop_primitives.h b/src/video_engine/test/auto_test/primitives/framedrop_primitives.h
new file mode 100644
index 0000000..cf3c1de
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/framedrop_primitives.h
@@ -0,0 +1,229 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
+
+#include <map>
+#include <vector>
+
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
+
+class FrameDropDetector;
+class TbInterfaces;
+
+// Initializes the Video engine and its components, runs video playback using
+// for KAutoTestSleepTimeMs milliseconds, then shuts down everything.
+// The bit rate and packet loss parameters should be configured so that
+// frames are dropped, in order to test the frame drop detection that is
+// performed by the FrameDropDetector class.
+void TestFullStack(const TbInterfaces& interfaces,
+                   int capture_id,
+                   int video_channel,
+                   int width,
+                   int height,
+                   int bit_rate_kbps,
+                   int packet_loss_percent,
+                   int network_delay_ms,
+                   FrameDropDetector* frame_drop_detector);
+
+// A frame in a video file. The four different points in the stack when
+// register the frame state are (in time order): created, transmitted, decoded,
+// rendered.
+class Frame {
+ public:
+  Frame(int number, unsigned int timestamp)
+    : number_(number),
+      frame_timestamp_(timestamp),
+      created_timestamp_in_us_(-1),
+      sent_timestamp_in_us_(-1),
+      received_timestamp_in_us_(-1),
+      decoded_timestamp_in_us_(-1),
+      rendered_timestamp_in_us_(-1),
+      dropped_at_send(false),
+      dropped_at_receive(false),
+      dropped_at_decode(false),
+      dropped_at_render(false) {}
+
+  // Frame number, starting at 0.
+  int number_;
+
+  // Frame timestamp, that is used by Video Engine and RTP headers and set when
+  // the frame is sent into the stack.
+  unsigned int frame_timestamp_;
+
+  // Timestamps for our measurements of when the frame is in different states.
+  int64_t created_timestamp_in_us_;
+  int64_t sent_timestamp_in_us_;
+  int64_t received_timestamp_in_us_;
+  int64_t decoded_timestamp_in_us_;
+  int64_t rendered_timestamp_in_us_;
+
+  // Where the frame was dropped (more than one may be true).
+  bool dropped_at_send;
+  bool dropped_at_receive;
+  bool dropped_at_decode;
+  bool dropped_at_render;
+};
+
+// Fixes the output file by copying the last successful frame into the place
+// where the dropped frame would be, for all dropped frames (if any).
+// This method will not be able to fix data for the first frame if that is
+// dropped, since there'll be no previous frame to copy. This case should never
+// happen because of encoder frame dropping at least.
+// Parameters:
+//    output_file            The output file to modify (pad with frame copies
+//                           for all dropped frames)
+//    frame_length_in_bytes  Byte length of each frame.
+//    frames                 A vector of all Frame objects. Must be sorted by
+//                           frame number. If empty this method will do nothing.
+void FixOutputFileForComparison(const std::string& output_file,
+                                int frame_length_in_bytes,
+                                const std::vector<Frame*>& frames);
+
+// Handles statistics about dropped frames. Frames travel through the stack
+// with different timestamps. The frames created and sent to the encoder have
+// one timestamp on the sending side while the decoded/rendered frames have
+// another timestamp on the receiving side. The difference between these
+// timestamps is fixed, which we can use to identify the frames when they
+// arrive, since the FrameDropDetector class gets data reported from both sides.
+// The four different points in the stack when this class examines the frame
+// states are (in time order): created, sent, received, decoded, rendered.
+//
+// The flow can be visualized like this:
+//
+//         Created        Sent        Received               Decoded   Rendered
+// +-------+  |  +-------+ | +---------+ | +------+  +-------+  |  +--------+
+// |Capture|  |  |Encoder| | |  Ext.   | | |Jitter|  |Decoder|  |  |  Ext.  |
+// | device|---->|       |-->|transport|-->|buffer|->|       |---->|renderer|
+// +-------+     +-------+   +---------+   +------+  +-------+     +--------+
+//
+// This class has no intention of being thread-safe.
+class FrameDropDetector {
+ public:
+  enum State {
+    // A frame being created, i.e. sent to the encoder; the first step of
+    // a frame's life cycle. This timestamp becomes the frame timestamp in the
+    // Frame objects.
+    kCreated,
+    // A frame being sent in external transport (to the simulated network). This
+    // timestamp differs from the one in the Created state by a constant diff.
+    kSent,
+    // A frame being received in external transport (from the simulated
+    // network). This timestamp differs from the one in the Created state by a
+    // constant diff.
+    kReceived,
+    // A frame that has been decoded in the decoder. This timestamp differs
+    // from the one in the Created state by a constant diff.
+    kDecoded,
+    // A frame that has been rendered; the last step of a frame's life cycle.
+    // This timestamp differs from the one in the Created state by a constant
+    // diff.
+    kRendered
+  };
+
+  FrameDropDetector()
+      : dirty_(true),
+        dropped_frames_at_send_(0),
+        dropped_frames_at_receive_(0),
+        dropped_frames_at_decode_(0),
+        dropped_frames_at_render_(0),
+        num_created_frames_(0),
+        num_sent_frames_(0),
+        num_received_frames_(0),
+        num_decoded_frames_(0),
+        num_rendered_frames_(0),
+        timestamp_diff_(0) {}
+
+  // Reports a frame has reached a state in the frame life cycle.
+  void ReportFrameState(State state, unsigned int timestamp);
+
+  // Uses all the gathered timestamp information to calculate which frames have
+  // been dropped during the test and where they were dropped. Not until
+  // this method has been executed, the Frame objects will have all fields
+  // filled with the proper timestamp information.
+  void CalculateResults();
+
+  // Calculates the number of frames have been registered as dropped at the
+  // specified state of the frame life cycle.
+  // CalculateResults() must be called before calling this method.
+  int GetNumberOfFramesDroppedAt(State state);
+
+  // Gets a vector of all the created frames.
+  // CalculateResults() must be called before calling this method to have all
+  // fields of the Frame objects to represent the current state.
+  const std::vector<Frame*>& GetAllFrames();
+
+  // Prints a detailed report about all the different frame states and which
+  // ones are detected as dropped, using ViETest::Log.
+  // CalculateResults() must be called before calling this method.
+  void PrintReport();
+
+  // Prints all the timestamp maps. Mainly used for debugging purposes to find
+  // missing timestamps.
+  void PrintDebugDump();
+ private:
+  // Will be false until CalculateResults() is called. Switches to true
+  // as soon as new timestamps are reported using ReportFrameState().
+  bool dirty_;
+
+  // Map of frame creation timestamps to all Frame objects.
+  std::map<unsigned int, Frame*> created_frames_;
+
+  // Maps converted frame timestamps (differ from creation timestamp) to the
+  // time they arrived in the different states of the frame's life cycle.
+  std::map<unsigned int, int64_t> sent_frames_;
+  std::map<unsigned int, int64_t> received_frames_;
+  std::map<unsigned int, int64_t> decoded_frames_;
+  std::map<unsigned int, int64_t> rendered_frames_;
+
+  // A vector with the frames sorted in their created order.
+  std::vector<Frame*> created_frames_vector_;
+
+  // Statistics.
+  int dropped_frames_at_send_;
+  int dropped_frames_at_receive_;
+  int dropped_frames_at_decode_;
+  int dropped_frames_at_render_;
+
+  int num_created_frames_;
+  int num_sent_frames_;
+  int num_received_frames_;
+  int num_decoded_frames_;
+  int num_rendered_frames_;
+
+  // The constant diff between the created and transmitted frames, since their
+  // timestamps are converted.
+  unsigned int timestamp_diff_;
+};
+
+// Tracks which frames are received on the remote side and reports back to the
+// FrameDropDetector class when they are rendered.
+class FrameDropMonitoringRemoteFileRenderer : public ViEToFileRenderer {
+ public:
+  explicit FrameDropMonitoringRemoteFileRenderer(
+      FrameDropDetector* frame_drop_detector)
+      : frame_drop_detector_(frame_drop_detector) {}
+  virtual ~FrameDropMonitoringRemoteFileRenderer() {}
+
+  // Implementation of ExternalRenderer:
+  int FrameSizeChange(unsigned int width, unsigned int height,
+                      unsigned int number_of_streams);
+  int DeliverFrame(unsigned char* buffer, int buffer_size,
+                   uint32_t time_stamp,
+                   int64_t render_time);
+ private:
+  FrameDropDetector* frame_drop_detector_;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_SOURCE_FRAMEDROP_PRIMITIVES_H_
diff --git a/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc b/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc
new file mode 100644
index 0000000..f4cc390
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/framedrop_primitives_unittest.cc
@@ -0,0 +1,96 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "framedrop_primitives.h"
+
+#include <cstdio>
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+#include "testsupport/frame_reader.h"
+#include "testsupport/frame_writer.h"
+
+namespace webrtc {
+
+const std::string kOutputFilename = "temp_outputfile.tmp";
+const int kFrameLength = 1000;
+
+class FrameDropPrimitivesTest: public testing::Test {
+ protected:
+  FrameDropPrimitivesTest() {}
+  virtual ~FrameDropPrimitivesTest() {}
+  void SetUp() {
+    // Cleanup any previous output file.
+    std::remove(kOutputFilename.c_str());
+  }
+  void TearDown() {
+    // Cleanup the temporary file.
+    std::remove(kOutputFilename.c_str());
+  }
+};
+
+TEST_F(FrameDropPrimitivesTest, FixOutputFileForComparison) {
+  // Create test frame objects, where the second and fourth frame is marked
+  // as dropped at rendering.
+  std::vector<Frame*> frames;
+  Frame first_frame(0, kFrameLength);
+  Frame second_frame(0, kFrameLength);
+  Frame third_frame(0, kFrameLength);
+  Frame fourth_frame(0, kFrameLength);
+
+  second_frame.dropped_at_render = true;
+  fourth_frame.dropped_at_render = true;
+
+  frames.push_back(&first_frame);
+  frames.push_back(&second_frame);
+  frames.push_back(&third_frame);
+  frames.push_back(&fourth_frame);
+
+  // Prepare data for the first and third frames:
+  WebRtc_UWord8 first_frame_data[kFrameLength];
+  memset(first_frame_data, 5, kFrameLength);  // Fill it with 5's to identify.
+  WebRtc_UWord8 third_frame_data[kFrameLength];
+  memset(third_frame_data, 7, kFrameLength);  // Fill it with 7's to identify.
+
+  // Write the first and third frames to the temporary file. This means the fix
+  // method should add two frames of data by filling the file with data from
+  // the first and third frames after executing.
+  webrtc::test::FrameWriterImpl frame_writer(kOutputFilename, kFrameLength);
+  EXPECT_TRUE(frame_writer.Init());
+  EXPECT_TRUE(frame_writer.WriteFrame(first_frame_data));
+  EXPECT_TRUE(frame_writer.WriteFrame(third_frame_data));
+  frame_writer.Close();
+  EXPECT_EQ(2 * kFrameLength,
+            static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
+
+  FixOutputFileForComparison(kOutputFilename, kFrameLength, frames);
+
+  // Verify that the output file has correct size.
+  EXPECT_EQ(4 * kFrameLength,
+            static_cast<int>(webrtc::test::GetFileSize(kOutputFilename)));
+
+  webrtc::test::FrameReaderImpl frame_reader(kOutputFilename, kFrameLength);
+  frame_reader.Init();
+  WebRtc_UWord8 read_buffer[kFrameLength];
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, first_frame_data, kFrameLength));
+
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
+  EXPECT_TRUE(frame_reader.ReadFrame(read_buffer));
+  EXPECT_EQ(0, memcmp(read_buffer, third_frame_data, kFrameLength));
+
+  frame_reader.Close();
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/test/auto_test/primitives/general_primitives.cc b/src/video_engine/test/auto_test/primitives/general_primitives.cc
new file mode 100644
index 0000000..906f710
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/general_primitives.cc
@@ -0,0 +1,130 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "general_primitives.h"
+
+#include "video_capture_factory.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+#include "vie_to_file_renderer.h"
+
+void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
+                               char* device_name,
+                               unsigned int device_name_length,
+                               int* device_id,
+                               webrtc::VideoCaptureModule** device_video) {
+
+  bool capture_device_set = false;
+  webrtc::VideoCaptureModule::DeviceInfo *dev_info =
+      webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+
+  const unsigned int kMaxUniqueIdLength = 256;
+  char unique_id[kMaxUniqueIdLength];
+  memset(unique_id, 0, kMaxUniqueIdLength);
+
+  for (unsigned int i = 0; i < dev_info->NumberOfDevices(); i++) {
+    EXPECT_EQ(0, dev_info->GetDeviceName(i, device_name, device_name_length,
+                                         unique_id, kMaxUniqueIdLength));
+
+    *device_video = webrtc::VideoCaptureFactory::Create(4571, unique_id);
+    EXPECT_TRUE(*device_video != NULL);
+
+    (*device_video)->AddRef();
+
+    int error = capture->AllocateCaptureDevice(**device_video, *device_id);
+    if (error == 0) {
+      ViETest::Log("Using capture device: %s, captureId: %d.",
+                   device_name, *device_id);
+      capture_device_set = true;
+      break;
+    } else {
+      (*device_video)->Release();
+      (*device_video) = NULL;
+    }
+  }
+  delete dev_info;
+  EXPECT_TRUE(capture_device_set) << "Found no suitable camera on your system.";
+}
+
+void RenderInWindow(webrtc::ViERender* video_render_interface,
+                    int frame_provider_id,
+                    void* os_window,
+                    float z_index) {
+  EXPECT_EQ(0,
+            video_render_interface->AddRenderer(frame_provider_id, os_window,
+                                                z_index, 0.0, 0.0, 1.0, 1.0));
+  EXPECT_EQ(0, video_render_interface->StartRender(frame_provider_id));
+}
+
+void RenderToFile(webrtc::ViERender* renderer_interface,
+                  int frame_provider_id,
+                  ViEToFileRenderer *to_file_renderer) {
+  EXPECT_EQ(0, renderer_interface->AddRenderer(
+      frame_provider_id, webrtc::kVideoI420, to_file_renderer));
+  EXPECT_EQ(0, renderer_interface->StartRender(frame_provider_id));
+}
+
+void StopAndRemoveRenderers(webrtc::ViEBase* base_interface,
+                            webrtc::ViERender* render_interface,
+                            int channel_id,
+                            int capture_id) {
+  EXPECT_EQ(0, render_interface->StopRender(channel_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(channel_id));
+  EXPECT_EQ(0, render_interface->RemoveRenderer(capture_id));
+}
+
+void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
+                      int video_channel) {
+  EXPECT_EQ(0, rtcp_interface->SetRTCPStatus(video_channel,
+                                             webrtc::kRtcpCompound_RFC4585));
+  EXPECT_EQ(0, rtcp_interface->SetKeyFrameRequestMethod(
+      video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
+  EXPECT_EQ(0, rtcp_interface->SetTMMBRStatus(video_channel, true));
+}
+
+bool FindSpecificCodec(webrtc::VideoCodecType of_type,
+                       webrtc::ViECodec* codec_interface,
+                       webrtc::VideoCodec* result) {
+
+  memset(result, 0, sizeof(webrtc::VideoCodec));
+
+  for (int i = 0; i < codec_interface->NumberOfCodecs(); i++) {
+    webrtc::VideoCodec codec;
+    memset(&codec, 0, sizeof(webrtc::VideoCodec));
+    if (codec_interface->GetCodec(i, codec) != 0) {
+      return false;
+    }
+    if (codec.codecType == of_type) {
+      // Done
+      *result = codec;
+      return true;
+    }
+  }
+  // Didn't find it
+  return false;
+}
+
+void SetSuitableResolution(webrtc::VideoCodec* video_codec,
+                           int forced_codec_width,
+                           int forced_codec_height) {
+  if (forced_codec_width != kDoNotForceResolution &&
+      forced_codec_height != kDoNotForceResolution) {
+    video_codec->width = forced_codec_width;
+    video_codec->height = forced_codec_height;
+  } else if (video_codec->codecType == webrtc::kVideoCodecI420) {
+    // I420 is very bandwidth heavy, so limit it here.
+    video_codec->width = 176;
+    video_codec->height = 144;
+  } else {
+    // Otherwise go with 640x480.
+    video_codec->width = 640;
+    video_codec->height = 480;
+  }
+}
diff --git a/src/video_engine/test/auto_test/primitives/general_primitives.h b/src/video_engine/test/auto_test/primitives/general_primitives.h
new file mode 100644
index 0000000..7c1740d
--- /dev/null
+++ b/src/video_engine/test/auto_test/primitives/general_primitives.h
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
+
+class ViEToFileRenderer;
+
+#include "common_types.h"
+
+namespace webrtc {
+class VideoCaptureModule;
+class ViEBase;
+class ViECapture;
+class ViECodec;
+class ViERender;
+class ViERTP_RTCP;
+struct VideoCodec;
+}
+
+// This constant can be used as input to various functions to not force the
+// codec resolution.
+const int kDoNotForceResolution = 0;
+
+// Finds a suitable capture device (e.g. camera) on the current system
+// and allocates it. Details about the found device are filled into the out
+// parameters. If this operation fails, device_id is assigned a negative value
+// and number_of_errors is incremented.
+void FindCaptureDeviceOnSystem(webrtc::ViECapture* capture,
+                               char* device_name,
+                               const unsigned int kDeviceNameLength,
+                               int* device_id,
+                               webrtc::VideoCaptureModule** device_video);
+
+// Sets up rendering in a window previously created using a Window Manager
+// (See vie_window_manager_factory.h for more details on how to make one of
+// those). The frame provider id is a source of video frames, for instance
+// a capture device or a video channel.
+void RenderInWindow(webrtc::ViERender* video_render_interface,
+                    int  frame_provider_id,
+                    void* os_window,
+                    float z_index);
+
+// Similar in function to RenderInWindow, this function instead renders to
+// a file using a to-file-renderer. The frame provider id is a source of
+// video frames, for instance a capture device or a video channel.
+void RenderToFile(webrtc::ViERender* renderer_interface,
+                  int frame_provider_id,
+                  ViEToFileRenderer* to_file_renderer);
+
+// Stops all rendering given the normal case that we have a capture device
+// and a video channel set up for rendering.
+void StopAndRemoveRenderers(webrtc::ViEBase* base_interface,
+                            webrtc::ViERender* render_interface,
+                            int channel_id,
+                            int capture_id);
+
+// Configures RTP-RTCP.
+void ConfigureRtpRtcp(webrtc::ViERTP_RTCP* rtcp_interface,
+                      int video_channel);
+
+// Finds a codec in the codec list. Returns true on success, false otherwise.
+// The resulting codec is filled into result on success but is zeroed out
+// on failure.
+bool FindSpecificCodec(webrtc::VideoCodecType of_type,
+                       webrtc::ViECodec* codec_interface,
+                       webrtc::VideoCodec* result);
+
+// Sets up the provided codec with a resolution that takes individual codec
+// quirks into account (except if the forced* variables are
+// != kDoNotForceResolution)
+void SetSuitableResolution(webrtc::VideoCodec* video_codec,
+                           int forced_codec_width,
+                           int forced_codec_height);
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTO_TEST_PRIMITIVES_GENERAL_PRIMITIVES_H_
diff --git a/src/video_engine/test/auto_test/source/vie_autotest.cc b/src/video_engine/test/auto_test/source/vie_autotest.cc
new file mode 100644
index 0000000..28bedc9
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest.cc
@@ -0,0 +1,155 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest.cc
+//
+
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+
+#include <stdio.h>
+
+#include "engine_configurations.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "testsupport/fileutils.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+
+// ViETest implementation
+FILE* ViETest::log_file_ = NULL;
+char* ViETest::log_str_ = NULL;
+
+std::string ViETest::GetResultOutputPath() {
+  return webrtc::test::OutputPath();
+}
+
+// ViEAutoTest implementation
+ViEAutoTest::ViEAutoTest(void* window1, void* window2) :
+    _window1(window1),
+    _window2(window2),
+    _renderType(webrtc::kRenderDefault),
+    _vrm1(webrtc::VideoRender::CreateVideoRender(
+        4561, window1, false, _renderType)),
+    _vrm2(webrtc::VideoRender::CreateVideoRender(
+        4562, window2, false, _renderType))
+{
+    assert(_vrm1);
+    assert(_vrm2);
+}
+
+ViEAutoTest::~ViEAutoTest()
+{
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    webrtc::VideoRender::DestroyVideoRender(_vrm2);
+    _vrm2 = NULL;
+}
+
+void ViEAutoTest::ViEStandardTest()
+{
+    ViEBaseStandardTest();
+    ViECaptureStandardTest();
+    ViECodecStandardTest();
+    ViEEncryptionStandardTest();
+    ViEFileStandardTest();
+    ViEImageProcessStandardTest();
+    ViENetworkStandardTest();
+    ViERenderStandardTest();
+    ViERtpRtcpStandardTest();
+}
+
+void ViEAutoTest::ViEExtendedTest()
+{
+    ViEBaseExtendedTest();
+    ViECaptureExtendedTest();
+    ViECodecExtendedTest();
+    ViEEncryptionExtendedTest();
+    ViEFileExtendedTest();
+    ViEImageProcessExtendedTest();
+    ViENetworkExtendedTest();
+    ViERenderExtendedTest();
+    ViERtpRtcpExtendedTest();
+}
+
+void ViEAutoTest::ViEAPITest()
+{
+    ViEBaseAPITest();
+    ViECaptureAPITest();
+    ViECodecAPITest();
+    ViEEncryptionAPITest();
+    ViEFileAPITest();
+    ViEImageProcessAPITest();
+    ViENetworkAPITest();
+    ViERenderAPITest();
+    ViERtpRtcpAPITest();
+}
+
+void ViEAutoTest::PrintVideoCodec(const webrtc::VideoCodec videoCodec)
+{
+    ViETest::Log("Video Codec Information:");
+
+    switch (videoCodec.codecType)
+    {
+        case webrtc::kVideoCodecVP8:
+            ViETest::Log("\tcodecType: VP8");
+            break;
+            // TODO(sh): keep or remove MPEG4?
+            //    case webrtc::kVideoCodecMPEG4:
+            //        ViETest::Log("\tcodecType: MPEG4");
+            //        break;
+        case webrtc::kVideoCodecI420:
+            ViETest::Log("\tcodecType: I420");
+            break;
+        case webrtc::kVideoCodecRED:
+            ViETest::Log("\tcodecType: RED");
+            break;
+        case webrtc::kVideoCodecULPFEC:
+            ViETest::Log("\tcodecType: ULPFEC");
+            break;
+        case webrtc::kVideoCodecUnknown:
+            ViETest::Log("\tcodecType: ????");
+            break;
+    }
+
+    ViETest::Log("\theight: %u", videoCodec.height);
+    ViETest::Log("\tmaxBitrate: %u", videoCodec.maxBitrate);
+    ViETest::Log("\tmaxFramerate: %u", videoCodec.maxFramerate);
+    ViETest::Log("\tminBitrate: %u", videoCodec.minBitrate);
+    ViETest::Log("\tplName: %s", videoCodec.plName);
+    ViETest::Log("\tplType: %u", videoCodec.plType);
+    ViETest::Log("\tstartBitrate: %u", videoCodec.startBitrate);
+    ViETest::Log("\twidth: %u", videoCodec.width);
+    ViETest::Log("");
+}
+
+void ViEAutoTest::PrintAudioCodec(const webrtc::CodecInst audioCodec)
+{
+    ViETest::Log("Audio Codec Information:");
+    ViETest::Log("\tchannels: %u", audioCodec.channels);
+    ViETest::Log("\t: %u", audioCodec.pacsize);
+    ViETest::Log("\t: %u", audioCodec.plfreq);
+    ViETest::Log("\t: %s", audioCodec.plname);
+    ViETest::Log("\t: %u", audioCodec.pltype);
+    ViETest::Log("\t: %u", audioCodec.rate);
+    ViETest::Log("");
+}
+
+void ViEAutoTest::RenderCaptureDeviceAndOutputStream(
+    TbInterfaces* video_engine,
+    TbVideoChannel* video_channel,
+    TbCaptureDevice* capture_device) {
+  RenderInWindow(
+      video_engine->render, capture_device->captureId, _window1, 0);
+  RenderInWindow(
+      video_engine->render, video_channel->videoChannel, _window2, 1);
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_android.cc b/src/video_engine/test/auto_test/source/vie_autotest_android.cc
new file mode 100644
index 0000000..2b1412c
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_android.cc
@@ -0,0 +1,199 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "../interface/vie_autotest_android.h"
+
+#include <android/log.h>
+#include <stdio.h>
+
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+
+int ViEAutoTestAndroid::RunAutotest(int testSelection, int subTestSelection,
+                                    void* window1, void* window2,
+                                    void* javaVM, void* env, void* context) {
+  ViEAutoTest vieAutoTest(window1, window2);
+  ViETest::Log("RunAutoTest(%d, %d)", testSelection, subTestSelection);
+  webrtc::VideoEngine::SetAndroidObjects(javaVM, context);
+#ifndef WEBRTC_ANDROID_OPENSLES
+  // voice engine calls into ADM directly
+  webrtc::VoiceEngine::SetAndroidAudioDeviceObjects(javaVM, env, context);
+#endif
+
+  if (subTestSelection == 0) {
+    // Run all selected test
+    switch (testSelection) {
+      case 0:
+        vieAutoTest.ViEStandardTest();
+        break;
+      case 1:
+        vieAutoTest.ViEAPITest();
+        break;
+      case 2:
+        vieAutoTest.ViEExtendedTest();
+        break;
+      case 3:
+        vieAutoTest.ViELoopbackCall();
+        break;
+      default:
+        break;
+    }
+  }
+
+  switch (testSelection) {
+    case 0: // Specific standard test
+      switch (subTestSelection) {
+        case 1: // base
+          vieAutoTest.ViEBaseStandardTest();
+          break;
+
+        case 2: // capture
+          vieAutoTest.ViECaptureStandardTest();
+          break;
+
+        case 3: // codec
+          vieAutoTest.ViECodecStandardTest();
+          break;
+
+        case 5: //encryption
+          vieAutoTest.ViEEncryptionStandardTest();
+          break;
+
+        case 6: // file
+          vieAutoTest.ViEFileStandardTest();
+          break;
+
+        case 7: // image process
+          vieAutoTest.ViEImageProcessStandardTest();
+          break;
+
+        case 8: // network
+          vieAutoTest.ViENetworkStandardTest();
+          break;
+
+        case 9: // Render
+          vieAutoTest.ViERenderStandardTest();
+          break;
+
+        case 10: // RTP/RTCP
+          vieAutoTest.ViERtpRtcpStandardTest();
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 1:// specific API
+      switch (subTestSelection) {
+        case 1: // base
+          vieAutoTest.ViEBaseAPITest();
+          break;
+
+        case 2: // capture
+          vieAutoTest.ViECaptureAPITest();
+          break;
+
+        case 3: // codec
+          vieAutoTest.ViECodecAPITest();
+          break;
+
+        case 5: //encryption
+          vieAutoTest.ViEEncryptionAPITest();
+          break;
+
+        case 6: // file
+          vieAutoTest.ViEFileAPITest();
+          break;
+
+        case 7: // image process
+          vieAutoTest.ViEImageProcessAPITest();
+          break;
+
+        case 8: // network
+          vieAutoTest.ViENetworkAPITest();
+          break;
+
+        case 9: // Render
+          vieAutoTest.ViERenderAPITest();
+          break;
+
+        case 10: // RTP/RTCP
+          vieAutoTest.ViERtpRtcpAPITest();
+          break;
+        case 11:
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 2:// specific extended
+      switch (subTestSelection) {
+        case 1: // base
+          vieAutoTest.ViEBaseExtendedTest();
+          break;
+
+        case 2: // capture
+          vieAutoTest.ViECaptureExtendedTest();
+          break;
+
+        case 3: // codec
+          vieAutoTest.ViECodecExtendedTest();
+          break;
+
+        case 5: //encryption
+          vieAutoTest.ViEEncryptionExtendedTest();
+          break;
+
+        case 6: // file
+          vieAutoTest.ViEFileExtendedTest();
+          break;
+
+        case 7: // image process
+          vieAutoTest.ViEImageProcessExtendedTest();
+          break;
+
+        case 8: // network
+          vieAutoTest.ViENetworkExtendedTest();
+          break;
+
+        case 9: // Render
+          vieAutoTest.ViERenderExtendedTest();
+          break;
+
+        case 10: // RTP/RTCP
+          vieAutoTest.ViERtpRtcpExtendedTest();
+          break;
+
+        case 11:
+          break;
+
+        default:
+          break;
+      }
+      break;
+
+    case 3:
+      vieAutoTest.ViELoopbackCall();
+      break;
+
+    default:
+      break;
+    }
+
+  return 0;
+}
+
+int main(int argc, char** argv) {
+  // TODO(leozwang): Add real tests here
+  return 0;
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_base.cc b/src/video_engine/test/auto_test/source/vie_autotest_base.cc
new file mode 100644
index 0000000..0225cb6
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_base.cc
@@ -0,0 +1,235 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/base_primitives.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+
+class BaseObserver : public webrtc::ViEBaseObserver {
+ public:
+  BaseObserver()
+      : cpu_load_(0) {}
+
+  virtual void PerformanceAlarm(const unsigned int cpu_load) {
+    cpu_load_ = cpu_load;
+  }
+  unsigned int cpu_load_;
+};
+
+void ViEAutoTest::ViEBaseStandardTest() {
+  // ***************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing
+  // ***************************************************************
+
+  TbInterfaces interfaces("ViEBaseStandardTest");
+
+  // ***************************************************************
+  // Engine ready. Set up the test case:
+  // ***************************************************************
+  int video_channel = -1;
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  webrtc::VideoCaptureModule* video_capture_module(NULL);
+  const unsigned int kMaxDeviceNameLength = 128;
+  char device_name[kMaxDeviceNameLength];
+  memset(device_name, 0, kMaxDeviceNameLength);
+  int capture_id;
+
+  webrtc::ViEBase* base_interface = interfaces.base;
+  webrtc::ViERender* render_interface = interfaces.render;
+  webrtc::ViECapture* capture_interface = interfaces.capture;
+
+  FindCaptureDeviceOnSystem(capture_interface,
+                            device_name,
+                            kMaxDeviceNameLength,
+                            &capture_id,
+                            &video_capture_module);
+
+  EXPECT_EQ(0, capture_interface->ConnectCaptureDevice(capture_id,
+                                                       video_channel));
+  EXPECT_EQ(0, capture_interface->StartCapture(capture_id));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm1));
+  EXPECT_EQ(0, render_interface->RegisterVideoRenderModule(*_vrm2));
+
+  RenderInWindow(render_interface, capture_id, _window1, 0);
+  RenderInWindow(render_interface, video_channel, _window2, 1);
+
+  // ***************************************************************
+  // Run the actual test:
+  // ***************************************************************
+  ViETest::Log("You should shortly see a local preview from camera %s"
+               " in window 1 and the remote video in window 2.", device_name);
+  ::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
+                      base_interface, interfaces.network, video_channel,
+                      device_name);
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_EQ(0, capture_interface->StopCapture(capture_id));
+  EXPECT_EQ(0, base_interface->StopReceive(video_channel));
+
+  StopAndRemoveRenderers(base_interface, render_interface, video_channel,
+                         capture_id);
+
+  EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm1));
+  EXPECT_EQ(0, render_interface->DeRegisterVideoRenderModule(*_vrm2));
+
+  EXPECT_EQ(0, capture_interface->ReleaseCaptureDevice(capture_id));
+
+  video_capture_module->Release();
+  video_capture_module = NULL;
+
+  EXPECT_EQ(0, base_interface->DeleteChannel(video_channel));
+}
+
+void ViEAutoTest::ViEBaseExtendedTest() {
+  // Start with standard test
+  ViEBaseAPITest();
+  ViEBaseStandardTest();
+
+  // ***************************************************************
+  // Test BaseObserver
+  // ***************************************************************
+  // TODO(mflodman) Add test for base observer. Cpu load must be over 75%.
+//    BaseObserver base_observer;
+//    EXPECT_EQ(vie_base->RegisterObserver(base_observer), 0);
+//
+//    AutoTestSleep(KAutoTestSleepTimeMs);
+//
+//    EXPECT_EQ(vie_base->DeregisterObserver(), 0);
+//    EXPECT_GT(base_observer.cpu_load, 0);
+}
+
+void ViEAutoTest::ViEBaseAPITest() {
+  // ***************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing
+  // ***************************************************************
+  // Get the ViEBase API
+  webrtc::ViEBase* vie_base = webrtc::ViEBase::GetInterface(NULL);
+  EXPECT_EQ(NULL, vie_base) << "Should return null for a bad ViE pointer";
+
+  webrtc::VideoEngine* video_engine = webrtc::VideoEngine::Create();
+  EXPECT_TRUE(NULL != video_engine);
+
+  std::string trace_file_path =
+    ViETest::GetResultOutputPath() + "ViEBaseAPI_trace.txt";
+  EXPECT_EQ(0, video_engine->SetTraceFile(trace_file_path.c_str()));
+
+  vie_base = webrtc::ViEBase::GetInterface(video_engine);
+  EXPECT_TRUE(NULL != vie_base);
+
+  webrtc::ViENetwork* vie_network =
+      webrtc::ViENetwork::GetInterface(video_engine);
+  EXPECT_TRUE(vie_network != NULL);
+
+  // ***************************************************************
+  // Engine ready. Begin testing class
+  // ***************************************************************
+  char version[1024] = "";
+  EXPECT_EQ(0, vie_base->GetVersion(version));
+  EXPECT_EQ(0, vie_base->LastError());
+
+  // Create without init
+  int video_channel = -1;
+  EXPECT_NE(0, vie_base->CreateChannel(video_channel)) <<
+      "Should fail since Init has not been called yet";
+  EXPECT_EQ(0, vie_base->Init());
+  EXPECT_EQ(0, vie_base->CreateChannel(video_channel));
+
+  int video_channel2 = -1;
+  int video_channel3 = -1;
+  EXPECT_EQ(0, vie_base->CreateChannel(video_channel2));
+  EXPECT_NE(video_channel, video_channel2) <<
+      "Should allocate new number for independent channel";
+
+  EXPECT_EQ(0, vie_base->DeleteChannel(video_channel2));
+
+  EXPECT_EQ(-1, vie_base->CreateChannel(video_channel2, video_channel + 1))
+      << "Should fail since neither channel exists (the second must)";
+
+  // Create a receive only channel and a send channel. Verify we can't send on
+  // the receive only channel.
+  EXPECT_EQ(0, vie_base->CreateReceiveChannel(video_channel2,
+                                                  video_channel));
+  EXPECT_EQ(0, vie_base->CreateChannel(video_channel3, video_channel));
+
+  const char* ip_address = "127.0.0.1\0";
+  const int send_port = 1234;
+  EXPECT_EQ(0, vie_network->SetSendDestination(video_channel, ip_address,
+                                                   send_port));
+  EXPECT_EQ(0, vie_network->SetSendDestination(video_channel2, ip_address,
+                                                   send_port + 2));
+  EXPECT_EQ(0, vie_network->SetSendDestination(video_channel3, ip_address,
+                                                   send_port + 4));
+
+  EXPECT_EQ(0, vie_base->StartSend(video_channel));
+  EXPECT_EQ(-1, vie_base->StartSend(video_channel2));
+  EXPECT_EQ(0, vie_base->StartSend(video_channel3));
+  EXPECT_EQ(0, vie_base->StopSend(video_channel));
+  EXPECT_EQ(0, vie_base->StopSend(video_channel3));
+
+  // Test Voice Engine integration with Video Engine.
+  webrtc::VoiceEngine* voice_engine = NULL;
+  webrtc::VoEBase* voe_base = NULL;
+  int audio_channel = -1;
+
+  voice_engine = webrtc::VoiceEngine::Create();
+  EXPECT_TRUE(NULL != voice_engine);
+
+  voe_base = webrtc::VoEBase::GetInterface(voice_engine);
+  EXPECT_TRUE(NULL != voe_base);
+  EXPECT_EQ(0, voe_base->Init());
+
+  audio_channel = voe_base->CreateChannel();
+  EXPECT_NE(-1, audio_channel);
+
+  // Connect before setting VoE.
+  EXPECT_NE(0, vie_base->ConnectAudioChannel(video_channel, audio_channel))
+      << "Should fail since Voice Engine is not set yet.";
+
+  // Then do it right.
+  EXPECT_EQ(0, vie_base->SetVoiceEngine(voice_engine));
+  EXPECT_EQ(0, vie_base->ConnectAudioChannel(video_channel, audio_channel));
+
+  // ***************************************************************
+  // Testing finished. Tear down Video Engine
+  // ***************************************************************
+  EXPECT_NE(0, vie_base->DisconnectAudioChannel(video_channel + 5)) <<
+      "Should fail: disconnecting bogus channel";
+
+  EXPECT_EQ(0, vie_base->DisconnectAudioChannel(video_channel));
+
+  // Clean up voice engine
+  EXPECT_EQ(0, vie_network->Release());
+  EXPECT_EQ(0, vie_base->SetVoiceEngine(NULL));
+  // VoiceEngine reference counting is per object, not per interface, so
+  // Release should return != 0.
+  EXPECT_NE(0, voe_base->Release());
+  EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine));
+
+  webrtc::ViEBase* vie_base2 = webrtc::ViEBase::GetInterface(video_engine);
+  EXPECT_TRUE(NULL != vie_base2);
+
+  EXPECT_EQ(1, vie_base->Release()) <<
+      "There should be one interface left.";
+
+  EXPECT_FALSE(webrtc::VideoEngine::Delete(video_engine)) <<
+      "Should fail since there are interfaces left.";
+
+  EXPECT_EQ(0, vie_base->Release());
+  EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine));
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_capture.cc b/src/video_engine/test/auto_test/source/vie_autotest_capture.cc
new file mode 100644
index 0000000..174d1db
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_capture.cc
@@ -0,0 +1,534 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"  // NOLINT
+#include "engine_configurations.h"  // NOLINT
+#include "gflags/gflags.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+#include "voice_engine/include/voe_base.h"
+
+DEFINE_bool(capture_test_ensure_resolution_alignment_in_capture_device, true,
+            "If true, we will give resolutions slightly below a reasonable "
+            "value to test the camera's ability to choose a good resolution. "
+            "If false, we will provide reasonable resolutions instead.");
+
+class CaptureObserver : public webrtc::ViECaptureObserver {
+ public:
+  CaptureObserver()
+      : brightness_(webrtc::Normal),
+        alarm_(webrtc::AlarmCleared),
+        frame_rate_(0) {}
+
+  virtual void BrightnessAlarm(const int capture_id,
+                               const webrtc::Brightness brightness) {
+    brightness_ = brightness;
+    switch (brightness) {
+      case webrtc::Normal:
+        ViETest::Log("  BrightnessAlarm Normal");
+        break;
+      case webrtc::Bright:
+        ViETest::Log("  BrightnessAlarm Bright");
+        break;
+      case webrtc::Dark:
+        ViETest::Log("  BrightnessAlarm Dark");
+        break;
+    }
+  }
+
+  virtual void CapturedFrameRate(const int capture_id,
+                                 const unsigned char frame_rate) {
+    ViETest::Log("  CapturedFrameRate %u", frame_rate);
+    frame_rate_ = frame_rate;
+  }
+
+  virtual void NoPictureAlarm(const int capture_id,
+                              const webrtc::CaptureAlarm alarm) {
+    alarm_ = alarm;
+    if (alarm == webrtc::AlarmRaised) {
+      ViETest::Log("NoPictureAlarm CARaised.");
+    } else {
+      ViETest::Log("NoPictureAlarm CACleared.");
+    }
+  }
+
+  webrtc::Brightness brightness_;
+  webrtc::CaptureAlarm alarm_;
+  unsigned char frame_rate_;
+};
+
+class CaptureEffectFilter : public webrtc::ViEEffectFilter {
+ public:
+  CaptureEffectFilter(unsigned int expected_width, unsigned int expected_height)
+      : number_of_captured_frames_(0),
+        expected_width_(expected_width),
+        expected_height_(expected_height) {
+  }
+
+  // Implements video_engineEffectFilter.
+  virtual int Transform(int size, unsigned char* frame_buffer,
+                        unsigned int time_stamp90KHz, unsigned int width,
+                        unsigned int height) {
+    EXPECT_TRUE(frame_buffer != NULL);
+    EXPECT_EQ(expected_width_, width);
+    EXPECT_EQ(expected_height_, height);
+    ++number_of_captured_frames_;
+    return 0;
+  }
+
+  int number_of_captured_frames_;
+
+ protected:
+  unsigned int expected_width_;
+  unsigned int expected_height_;
+};
+
+void ViEAutoTest::ViECaptureStandardTest() {
+  /// **************************************************************
+  //  Begin create/initialize WebRTC Video Engine for testing
+  /// **************************************************************
+
+  /// **************************************************************
+  //  Engine ready. Begin testing class
+  /// **************************************************************
+
+  TbInterfaces video_engine("video_engineCaptureStandardTest");
+
+  webrtc::VideoCaptureModule::DeviceInfo* dev_info =
+      webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+  ASSERT_TRUE(dev_info != NULL);
+
+  int number_of_capture_devices = dev_info->NumberOfDevices();
+  ViETest::Log("Number of capture devices %d",
+                        number_of_capture_devices);
+  ASSERT_GT(number_of_capture_devices, 0)
+      << "This test requires a capture device (i.e. a webcam)";
+
+  int capture_device_id[10];
+  memset(capture_device_id, 0, sizeof(capture_device_id));
+  webrtc::VideoCaptureModule* vcpms[10];
+  memset(vcpms, 0, sizeof(vcpms));
+
+  // Check capabilities
+  for (int device_index = 0; device_index < number_of_capture_devices;
+       ++device_index) {
+    char device_name[128];
+    char device_unique_name[512];
+
+    EXPECT_EQ(0, dev_info->GetDeviceName(device_index,
+                                         device_name,
+                                         sizeof(device_name),
+                                         device_unique_name,
+                                         sizeof(device_unique_name)));
+    ViETest::Log("Found capture device %s\nUnique name %s",
+                          device_name, device_unique_name);
+
+#if !defined(WEBRTC_MAC_INTEL)  // these functions will return -1
+    int number_of_capabilities =
+        dev_info->NumberOfCapabilities(device_unique_name);
+    EXPECT_GT(number_of_capabilities, 0);
+
+    for (int cap_index = 0; cap_index < number_of_capabilities; ++cap_index) {
+      webrtc::VideoCaptureCapability capability;
+      EXPECT_EQ(0, dev_info->GetCapability(device_unique_name, cap_index,
+                                           capability));
+      ViETest::Log("Capture capability %d (of %u)", cap_index + 1,
+                   number_of_capabilities);
+      ViETest::Log("witdh %d, height %d, frame rate %d",
+                   capability.width, capability.height, capability.maxFPS);
+      ViETest::Log("expected delay %d, color type %d, encoding %d",
+                   capability.expectedCaptureDelay, capability.rawType,
+                   capability.codecType);
+      EXPECT_GT(capability.width, 0);
+      EXPECT_GT(capability.height, 0);
+      EXPECT_GT(capability.maxFPS, -1);  // >= 0
+      EXPECT_GT(capability.expectedCaptureDelay, 0);
+    }
+#endif
+  }
+  // Capture Capability Functions are not supported on WEBRTC_MAC_INTEL.
+#if !defined(WEBRTC_MAC_INTEL)
+
+  // Check allocation. Try to allocate them all after each other.
+  for (int device_index = 0; device_index < number_of_capture_devices;
+       ++device_index) {
+    char device_name[128];
+    char device_unique_name[512];
+    EXPECT_EQ(0, dev_info->GetDeviceName(device_index,
+                                         device_name,
+                                         sizeof(device_name),
+                                         device_unique_name,
+                                         sizeof(device_unique_name)));
+    webrtc::VideoCaptureModule* vcpm =
+        webrtc::VideoCaptureFactory::Create(device_index, device_unique_name);
+    EXPECT_TRUE(vcpm != NULL);
+    vcpm->AddRef();
+    vcpms[device_index] = vcpm;
+
+    EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(
+        *vcpm, capture_device_id[device_index]));
+
+    webrtc::VideoCaptureCapability capability;
+    EXPECT_EQ(0, dev_info->GetCapability(device_unique_name, 0, capability));
+
+    // Test that the camera select the closest capability to the selected
+    // width and height.
+    CaptureEffectFilter filter(capability.width, capability.height);
+    EXPECT_EQ(0, video_engine.image_process->RegisterCaptureEffectFilter(
+        capture_device_id[device_index], filter));
+
+    ViETest::Log("Testing Device %s capability width %d  height %d",
+                 device_unique_name, capability.width, capability.height);
+
+    if (FLAGS_capture_test_ensure_resolution_alignment_in_capture_device) {
+      // This tests that the capture device properly aligns to a
+      // multiple of 16 (or at least 8).
+      capability.height = capability.height - 2;
+      capability.width  = capability.width  - 2;
+    }
+
+    webrtc::CaptureCapability vie_capability;
+    vie_capability.width = capability.width;
+    vie_capability.height = capability.height;
+    vie_capability.codecType = capability.codecType;
+    vie_capability.maxFPS = capability.maxFPS;
+    vie_capability.rawType = capability.rawType;
+
+    EXPECT_EQ(0, video_engine.capture->StartCapture(
+        capture_device_id[device_index], vie_capability));
+    webrtc::TickTime start_time = webrtc::TickTime::Now();
+
+    while (filter.number_of_captured_frames_ < 10 &&
+           (webrtc::TickTime::Now() - start_time).Milliseconds() < 10000) {
+      AutoTestSleep(100);
+    }
+
+    EXPECT_GT(filter.number_of_captured_frames_, 9)
+        << "Should capture at least some frames";
+
+    EXPECT_EQ(0, video_engine.image_process->DeregisterCaptureEffectFilter(
+        capture_device_id[device_index]));
+
+#ifdef WEBRTC_ANDROID  // Can only allocate one camera at the time on Android.
+    EXPECT_EQ(0, video_engine.capture->StopCapture(
+        capture_device_id[device_index]));
+    EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(
+        capture_device_id[device_index]));
+#endif
+  }
+
+  /// **************************************************************
+  //  Testing finished. Tear down Video Engine
+  /// **************************************************************
+
+  // Stop all started capture devices.
+  for (int device_index = 0; device_index < number_of_capture_devices;
+       ++device_index) {
+#if !defined(WEBRTC_ANDROID)
+    // Don't stop on Android since we can only allocate one camera.
+    EXPECT_EQ(0, video_engine.capture->StopCapture(
+        capture_device_id[device_index]));
+    EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(
+        capture_device_id[device_index]));
+#endif  // !WEBRTC_ANDROID
+    vcpms[device_index]->Release();
+  }
+#endif  // !WEBRTC_MAC_INTEL
+}
+
+void ViEAutoTest::ViECaptureExtendedTest() {
+  ViECaptureExternalCaptureTest();
+}
+
+void ViEAutoTest::ViECaptureAPITest() {
+  /// **************************************************************
+  //  Begin create/initialize WebRTC Video Engine for testing
+  /// **************************************************************
+
+  /// **************************************************************
+  //  Engine ready. Begin testing class
+  /// **************************************************************
+  TbInterfaces video_engine("video_engineCaptureAPITest");
+
+  video_engine.capture->NumberOfCaptureDevices();
+
+  char device_name[128];
+  char device_unique_name[512];
+  int capture_id = 0;
+
+  webrtc::VideoCaptureModule::DeviceInfo* dev_info =
+      webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+  ASSERT_TRUE(dev_info != NULL);
+  ASSERT_GT(dev_info->NumberOfDevices(), 0u)
+      << "This test requires a capture device (i.e. a webcam)";
+
+  // Get the first capture device
+  EXPECT_EQ(0, dev_info->GetDeviceName(0, device_name,
+                                       sizeof(device_name),
+                                       device_unique_name,
+                                       sizeof(device_unique_name)));
+
+  webrtc::VideoCaptureModule* vcpm =
+      webrtc::VideoCaptureFactory::Create(0, device_unique_name);
+  vcpm->AddRef();
+  EXPECT_TRUE(vcpm != NULL);
+
+  // Allocate capture device.
+  EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
+
+  // Start the capture device.
+  EXPECT_EQ(0, video_engine.capture->StartCapture(capture_id));
+
+  // Start again. Should fail.
+  EXPECT_NE(0, video_engine.capture->StartCapture(capture_id));
+  EXPECT_EQ(kViECaptureDeviceAlreadyStarted, video_engine.LastError());
+
+  // Start invalid capture device.
+  EXPECT_NE(0, video_engine.capture->StartCapture(capture_id + 1));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Stop invalid capture device.
+  EXPECT_NE(0, video_engine.capture->StopCapture(capture_id + 1));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Stop the capture device.
+  EXPECT_EQ(0, video_engine.capture->StopCapture(capture_id));
+
+  // Stop the capture device again.
+  EXPECT_NE(0, video_engine.capture->StopCapture(capture_id));
+  EXPECT_EQ(kViECaptureDeviceNotStarted, video_engine.LastError());
+
+  // Connect to invalid channel.
+  EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id, 0));
+  EXPECT_EQ(kViECaptureDeviceInvalidChannelId,
+            video_engine.LastError());
+
+  TbVideoChannel channel(video_engine);
+
+  // Connect invalid capture_id.
+  EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id + 1,
+                                                 channel.videoChannel));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Connect the capture device to the channel.
+  EXPECT_EQ(0, video_engine.capture->ConnectCaptureDevice(capture_id,
+                                                 channel.videoChannel));
+
+  // Connect the channel again.
+  EXPECT_NE(0, video_engine.capture->ConnectCaptureDevice(capture_id,
+                                                 channel.videoChannel));
+  EXPECT_EQ(kViECaptureDeviceAlreadyConnected,
+            video_engine.LastError());
+
+  // Start the capture device.
+  EXPECT_EQ(0, video_engine.capture->StartCapture(capture_id));
+
+  // Release invalid capture device.
+  EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id + 1));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Release the capture device.
+  EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
+
+  // Release the capture device again.
+  EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Test GetOrientation.
+  webrtc::VideoCaptureRotation orientation;
+  char dummy_name[5];
+  EXPECT_NE(0, dev_info->GetOrientation(dummy_name, orientation));
+
+  // Test SetRotation.
+  EXPECT_NE(0, video_engine.capture->SetRotateCapturedFrames(
+      capture_id, webrtc::RotateCapturedFrame_90));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+
+  // Allocate capture device.
+  EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
+
+  EXPECT_EQ(0, video_engine.capture->SetRotateCapturedFrames(
+      capture_id, webrtc::RotateCapturedFrame_0));
+  EXPECT_EQ(0, video_engine.capture->SetRotateCapturedFrames(
+      capture_id, webrtc::RotateCapturedFrame_90));
+  EXPECT_EQ(0, video_engine.capture->SetRotateCapturedFrames(
+      capture_id, webrtc::RotateCapturedFrame_180));
+  EXPECT_EQ(0, video_engine.capture->SetRotateCapturedFrames(
+      capture_id, webrtc::RotateCapturedFrame_270));
+
+  // Release the capture device
+  EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
+
+  /// **************************************************************
+  //  Testing finished. Tear down Video Engine
+  /// **************************************************************
+  delete dev_info;
+  vcpm->Release();
+}
+
+void ViEAutoTest::ViECaptureExternalCaptureTest() {
+  /// **************************************************************
+  //  Begin create/initialize WebRTC Video Engine for testing
+  /// **************************************************************
+
+  TbInterfaces video_engine("video_engineCaptureExternalCaptureTest");
+  TbVideoChannel channel(video_engine);
+  channel.StartReceive();
+  channel.StartSend();
+
+  webrtc::VideoCaptureExternal* external_capture = NULL;
+  int capture_id = 0;
+
+  // Allocate the external capture device.
+  webrtc::VideoCaptureModule* vcpm =
+      webrtc::VideoCaptureFactory::Create(0, external_capture);
+  EXPECT_TRUE(vcpm != NULL);
+  EXPECT_TRUE(external_capture != NULL);
+  vcpm->AddRef();
+
+  EXPECT_EQ(0, video_engine.capture->AllocateCaptureDevice(*vcpm, capture_id));
+
+  // Connect the capture device to the channel.
+  EXPECT_EQ(0, video_engine.capture->ConnectCaptureDevice(capture_id,
+                                                 channel.videoChannel));
+
+  // Render the local capture.
+  EXPECT_EQ(0, video_engine.render->AddRenderer(capture_id, _window1, 1, 0.0,
+                                                0.0, 1.0, 1.0));
+
+  // Render the remote capture.
+  EXPECT_EQ(0, video_engine.render->AddRenderer(channel.videoChannel, _window2,
+                                                1, 0.0, 0.0, 1.0, 1.0));
+  EXPECT_EQ(0, video_engine.render->StartRender(capture_id));
+  EXPECT_EQ(0, video_engine.render->StartRender(channel.videoChannel));
+
+  // Register observer.
+  CaptureObserver observer;
+  EXPECT_EQ(0, video_engine.capture->RegisterObserver(capture_id, observer));
+
+  // Enable brightness alarm.
+  EXPECT_EQ(0, video_engine.capture->EnableBrightnessAlarm(capture_id, true));
+
+  CaptureEffectFilter effect_filter(176, 144);
+  EXPECT_EQ(0, video_engine.image_process->RegisterCaptureEffectFilter(
+      capture_id, effect_filter));
+
+  // Call started.
+  ViETest::Log("You should see local preview from external capture\n"
+               "in window 1 and the remote video in window 2.\n");
+
+  /// **************************************************************
+  //  Engine ready. Begin testing class
+  /// **************************************************************
+  const unsigned int video_frame_length = (176 * 144 * 3) / 2;
+  unsigned char* video_frame = new unsigned char[video_frame_length];
+  memset(video_frame, 128, 176 * 144);
+
+  int frame_count = 0;
+  webrtc::VideoCaptureCapability capability;
+  capability.width = 176;
+  capability.height = 144;
+  capability.rawType = webrtc::kVideoI420;
+
+  ViETest::Log("Testing external capturing and frame rate callbacks.");
+  // TODO(mflodman) Change when using a real file!
+  // while (fread(video_frame, video_frame_length, 1, foreman) == 1)
+  while (frame_count < 120) {
+    external_capture->IncomingFrame(
+        video_frame, video_frame_length, capability,
+        webrtc::TickTime::Now().MillisecondTimestamp());
+    AutoTestSleep(33);
+
+    if (effect_filter.number_of_captured_frames_ > 2) {
+      EXPECT_EQ(webrtc::Normal, observer.brightness_) <<
+          "Brightness or picture alarm should not have been called yet.";
+      EXPECT_EQ(webrtc::AlarmCleared, observer.alarm_) <<
+          "Brightness or picture alarm should not have been called yet.";
+    }
+    frame_count++;
+  }
+
+  // Test brightness alarm.
+  // Test bright image.
+  for (int i = 0; i < 176 * 144; ++i) {
+    if (video_frame[i] <= 155)
+      video_frame[i] = video_frame[i] + 100;
+    else
+      video_frame[i] = 255;
+  }
+  ViETest::Log("Testing Brighness alarm");
+  for (int frame = 0; frame < 30; ++frame) {
+    external_capture->IncomingFrame(
+        video_frame, video_frame_length, capability,
+        webrtc::TickTime::Now().MillisecondTimestamp());
+    AutoTestSleep(33);
+  }
+  EXPECT_EQ(webrtc::Bright, observer.brightness_) <<
+      "Should be bright at this point since we are using a bright image.";
+
+  // Test Dark image
+  for (int i = 0; i < 176 * 144; ++i) {
+    video_frame[i] = video_frame[i] > 200 ? video_frame[i] - 200 : 0;
+  }
+  for (int frame = 0; frame < 30; ++frame) {
+    external_capture->IncomingFrame(
+        video_frame, video_frame_length, capability,
+        webrtc::TickTime::Now().MillisecondTimestamp());
+    AutoTestSleep(33);
+  }
+  EXPECT_EQ(webrtc::Dark, observer.brightness_) <<
+      "Should be dark at this point since we are using a dark image.";
+  EXPECT_GT(effect_filter.number_of_captured_frames_, 150) <<
+      "Frames should have been played.";
+
+  EXPECT_GE(observer.frame_rate_, 29) <<
+      "Frame rate callback should be approximately correct.";
+  EXPECT_LE(observer.frame_rate_, 30) <<
+      "Frame rate callback should be approximately correct.";
+
+  // Test no picture alarm
+  ViETest::Log("Testing NoPictureAlarm.");
+  AutoTestSleep(1050);
+
+  EXPECT_EQ(webrtc::AlarmRaised, observer.alarm_) <<
+      "No picture alarm should be raised.";
+  for (int frame = 0; frame < 10; ++frame) {
+    external_capture->IncomingFrame(
+        video_frame, video_frame_length, capability,
+        webrtc::TickTime::Now().MillisecondTimestamp());
+    AutoTestSleep(33);
+  }
+  EXPECT_EQ(webrtc::AlarmCleared, observer.alarm_) <<
+  "Alarm should be cleared since ge just got some data.";
+
+  delete video_frame;
+
+  // Release the capture device
+  EXPECT_EQ(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
+
+  // Release the capture device again
+  EXPECT_NE(0, video_engine.capture->ReleaseCaptureDevice(capture_id));
+  EXPECT_EQ(kViECaptureDeviceDoesNotExist, video_engine.LastError());
+  vcpm->Release();
+
+  /// **************************************************************
+  //  Testing finished. Tear down Video Engine
+  /// **************************************************************
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm b/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm
new file mode 100644
index 0000000..f30fe0e
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_cocoa_mac.mm
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "engine_configurations.h"
+
+#import "cocoa_render_view.h"
+#import "testsupport/mac/run_threaded_main_mac.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_main.h"
+#include "vie_autotest_mac_cocoa.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_autotest_main.h"
+
+@implementation TestCocoaUi
+
+// TODO(phoglund): This file probably leaks memory like crazy. Find someone
+// who understands objective-c memory management and fix it.
+
+- (void)prepareToCreateWindowsWithSize:(AutoTestRect)window1Size
+                               andSize:(AutoTestRect)window2Size
+                             withTitle:(void*)window1_title
+                              andTitle:(void*)window2_title {
+  window1Size_ = window1Size;
+  window2Size_ = window2Size;
+  window1Title_ = window1_title;
+  window2Title_ = window2_title;
+}
+
+- (void)createWindows:(NSObject*)ignored {
+  NSRect window1Frame = NSMakeRect(
+      window1Size_.origin.x, window1Size_.origin.y,
+      window1Size_.size.width, window1Size_.size.height);
+
+  window1_ = [[NSWindow alloc]
+               initWithContentRect:window1Frame
+                         styleMask:NSTitledWindowMask
+                           backing:NSBackingStoreBuffered
+                             defer:NO];
+  [window1_ orderOut:nil];
+
+  NSRect render_view1_frame = NSMakeRect(
+      0, 0, window1Size_.size.width, window1Size_.size.height);
+  cocoaRenderView1_ =
+      [[CocoaRenderView alloc] initWithFrame:render_view1_frame];
+
+  [[window1_ contentView] addSubview:(NSView*)cocoaRenderView1_];
+  [window1_ setTitle:[NSString stringWithFormat:@"%s", window1Title_]];
+  [window1_ makeKeyAndOrderFront:NSApp];
+
+  NSRect window2_frame = NSMakeRect(
+      window2Size_.origin.x, window2Size_.origin.y,
+      window2Size_.size.width, window2Size_.size.height);
+
+  window2_ = [[NSWindow alloc]
+               initWithContentRect:window2_frame
+                         styleMask:NSTitledWindowMask
+                           backing:NSBackingStoreBuffered
+                             defer:NO];
+  [window2_ orderOut:nil];
+
+  NSRect render_view2_frame = NSMakeRect(
+      0, 0, window1Size_.size.width, window1Size_.size.height);
+  cocoaRenderView2_ =
+      [[CocoaRenderView alloc] initWithFrame:render_view2_frame];
+  [[window2_ contentView] addSubview:(NSView*)cocoaRenderView2_];
+  [window2_ setTitle:[NSString stringWithFormat:@"%s", window2Title_]];
+  [window2_ makeKeyAndOrderFront:NSApp];
+}
+
+- (NSWindow*)window1 {
+  return window1_;
+}
+
+- (NSWindow*)window2 {
+  return window2_;
+}
+
+- (CocoaRenderView*)cocoaRenderView1 {
+  return cocoaRenderView1_;
+}
+
+- (CocoaRenderView*)cocoaRenderView2 {
+  return cocoaRenderView2_;
+}
+
+@end
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager() {
+  cocoa_ui_ = [[TestCocoaUi alloc] init];
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
+  [cocoa_ui_ release];
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1_title,
+                                            void* window2_title) {
+    [cocoa_ui_ prepareToCreateWindowsWithSize:window1Size
+                                      andSize:window2Size
+                                    withTitle:window1_title
+                                     andTitle:window2_title];
+    [cocoa_ui_ performSelectorOnMainThread:@selector(createWindows:)
+                                withObject:nil
+                             waitUntilDone:YES];
+    return 0;
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+    [[cocoa_ui_ window1] close];
+    [[cocoa_ui_ window2] close];
+    return 0;
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+    return [cocoa_ui_ cocoaRenderView1];
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+    return [cocoa_ui_ cocoaRenderView2];
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+    return true;
+}
+
+// This is acts as our "main" for mac. The actual (reusable) main is defined in
+// testsupport/mac/run_threaded_main_mac.mm.
+int ImplementThisToRunYourTest(int argc, char** argv) {
+  ViEAutoTestMain auto_test;
+  return auto_test.RunTests(argc, argv);
+}
+
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_codec.cc b/src/video_engine/test/auto_test/source/vie_autotest_codec.cc
new file mode 100644
index 0000000..b1cb960
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_codec.cc
@@ -0,0 +1,749 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"  // NOLINT
+#include "engine_configurations.h"  // NOLINT
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+#include "video_engine/test/libvietest/include/tb_I420_codec.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "voice_engine/include/voe_base.h"
+
+class TestCodecObserver
+    : public webrtc::ViEEncoderObserver,
+    public webrtc::ViEDecoderObserver {
+     public:
+  int incoming_codec_called_;
+  int incoming_rate_called_;
+  int outgoing_rate_called_;
+
+  unsigned char last_payload_type_;
+  uint16_t last_width_;
+  uint16_t last_height_;
+
+  unsigned int last_outgoing_framerate_;
+  unsigned int last_outgoing_bitrate_;
+  unsigned int last_incoming_framerate_;
+  unsigned int last_incoming_bitrate_;
+
+  webrtc::VideoCodec incoming_codec_;
+
+  TestCodecObserver()
+      : incoming_codec_called_(0),
+        incoming_rate_called_(0),
+        outgoing_rate_called_(0),
+        last_payload_type_(0),
+        last_width_(0),
+        last_height_(0),
+        last_outgoing_framerate_(0),
+        last_outgoing_bitrate_(0),
+        last_incoming_framerate_(0),
+        last_incoming_bitrate_(0) {
+    memset(&incoming_codec_, 0, sizeof(incoming_codec_));
+  }
+  virtual void IncomingCodecChanged(const int video_channel,
+                                    const webrtc::VideoCodec& video_codec) {
+    incoming_codec_called_++;
+    last_payload_type_ = video_codec.plType;
+    last_width_ = video_codec.width;
+    last_height_ = video_codec.height;
+
+    memcpy(&incoming_codec_, &video_codec, sizeof(video_codec));
+  }
+
+  virtual void IncomingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    incoming_rate_called_++;
+    last_incoming_framerate_ += framerate;
+    last_incoming_bitrate_ += bitrate;
+  }
+
+  virtual void OutgoingRate(const int video_channel,
+                            const unsigned int framerate,
+                            const unsigned int bitrate) {
+    outgoing_rate_called_++;
+    last_outgoing_framerate_ += framerate;
+    last_outgoing_bitrate_ += bitrate;
+  }
+
+  virtual void RequestNewKeyFrame(const int video_channel) {
+  }
+};
+
+class RenderFilter : public webrtc::ViEEffectFilter {
+ public:
+  int num_frames_;
+  unsigned int last_render_width_;
+  unsigned int last_render_height_;
+
+  RenderFilter()
+      : num_frames_(0),
+        last_render_width_(0),
+        last_render_height_(0) {
+  }
+
+  virtual ~RenderFilter() {
+  }
+
+  virtual int Transform(int size,
+                        unsigned char* frame_buffer,
+                        unsigned int time_stamp90KHz,
+                        unsigned int width,
+                        unsigned int height) {
+    num_frames_++;
+    last_render_width_ = width;
+    last_render_height_ = height;
+    return 0;
+  }
+};
+
+void ViEAutoTest::ViECodecStandardTest() {
+  TbInterfaces interfaces("ViECodecStandardTest");
+
+  TbCaptureDevice capture_device = TbCaptureDevice(interfaces);
+  int capture_id = capture_device.captureId;
+
+  webrtc::VideoEngine* video_engine = interfaces.video_engine;
+  webrtc::ViEBase* base = interfaces.base;
+  webrtc::ViECapture* capture = interfaces.capture;
+  webrtc::ViERender* render = interfaces.render;
+  webrtc::ViECodec* codec = interfaces.codec;
+  webrtc::ViERTP_RTCP* rtp_rtcp = interfaces.rtp_rtcp;
+  webrtc::ViENetwork* network = interfaces.network;
+
+  int video_channel = -1;
+  EXPECT_EQ(0, base->CreateChannel(video_channel));
+  EXPECT_EQ(0, capture->ConnectCaptureDevice(capture_id, video_channel));
+  EXPECT_EQ(0, rtp_rtcp->SetRTCPStatus(
+      video_channel, webrtc::kRtcpCompound_RFC4585));
+
+  EXPECT_EQ(0, rtp_rtcp->SetKeyFrameRequestMethod(
+      video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
+  EXPECT_EQ(0, rtp_rtcp->SetTMMBRStatus(video_channel, true));
+  EXPECT_EQ(0, render->AddRenderer(capture_id, _window1, 0, 0.0, 0.0, 1.0,
+                                   1.0));
+  EXPECT_EQ(0, render->AddRenderer(video_channel, _window2, 1, 0.0, 0.0, 1.0,
+                                   1.0));
+  EXPECT_EQ(0, render->StartRender(capture_id));
+  EXPECT_EQ(0, render->StartRender(video_channel));
+
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+  for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
+    if (video_codec.codecType != webrtc::kVideoCodecI420) {
+      video_codec.width = 640;
+      video_codec.height = 480;
+    }
+    if (video_codec.codecType == webrtc::kVideoCodecI420) {
+      video_codec.width = 176;
+      video_codec.height = 144;
+    }
+    EXPECT_EQ(0, codec->SetReceiveCodec(video_channel, video_codec));
+  }
+
+  for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
+    if (video_codec.codecType == webrtc::kVideoCodecVP8) {
+      EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
+      break;
+    }
+  }
+
+  const char* ip_address = "127.0.0.1";
+  const uint16_t rtp_port = 6000;
+  EXPECT_EQ(0, network->SetLocalReceiver(video_channel, rtp_port));
+  EXPECT_EQ(0, base->StartReceive(video_channel));
+  EXPECT_EQ(0, network->SetSendDestination(
+      video_channel, ip_address, rtp_port));
+  EXPECT_EQ(0, base->StartSend(video_channel));
+
+  // Make sure all codecs runs
+  {
+    webrtc::ViEImageProcess* image_process =
+        webrtc::ViEImageProcess::GetInterface(video_engine);
+    TestCodecObserver codec_observer;
+    EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
+    ViETest::Log("Loop through all codecs for %d seconds",
+                 KAutoTestSleepTimeMs / 1000);
+
+    for (int i = 0; i < codec->NumberOfCodecs() - 2; i++) {
+      EXPECT_EQ(0, codec->GetCodec(i, video_codec));
+      if (video_codec.codecType == webrtc::kVideoCodecI420) {
+        // Lower resolution to sockets keep up.
+        video_codec.width = 176;
+        video_codec.height = 144;
+        video_codec.maxFramerate = 15;
+      }
+      EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
+      ViETest::Log("\t %d. %s", i, video_codec.plName);
+
+      RenderFilter frame_counter;
+      EXPECT_EQ(0, image_process->RegisterRenderEffectFilter(video_channel,
+                                                             frame_counter));
+      AutoTestSleep(KAutoTestSleepTimeMs);
+
+      // Verify we've received and decoded correct payload.
+      EXPECT_EQ(video_codec.codecType,
+                codec_observer.incoming_codec_.codecType);
+
+      int max_number_of_possible_frames = video_codec.maxFramerate
+          * KAutoTestSleepTimeMs / 1000;
+
+      if (video_codec.codecType == webrtc::kVideoCodecI420) {
+        // Don't expect too much from I420, it requires a lot of bandwidth.
+        EXPECT_GT(frame_counter.num_frames_, 0);
+      } else {
+#ifdef WEBRTC_ANDROID
+        // To get the autotest to pass on some slow devices
+        EXPECT_GT(frame_counter.num_frames_, max_number_of_possible_frames / 6);
+#else
+        EXPECT_GT(frame_counter.num_frames_, max_number_of_possible_frames / 4);
+#endif
+      }
+
+      EXPECT_EQ(0, image_process->DeregisterRenderEffectFilter(
+          video_channel));
+    }
+    image_process->Release();
+    EXPECT_EQ(0, codec->DeregisterDecoderObserver(video_channel));
+    ViETest::Log("Done!");
+  }
+
+  // Test Callbacks
+  TestCodecObserver codec_observer;
+  EXPECT_EQ(0, codec->RegisterEncoderObserver(video_channel, codec_observer));
+  EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
+
+  ViETest::Log("\nTesting codec callbacks...");
+
+  for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
+    EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
+    if (video_codec.codecType == webrtc::kVideoCodecVP8) {
+      EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
+      break;
+    }
+  }
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  EXPECT_EQ(0, base->StopSend(video_channel));
+  EXPECT_EQ(0, codec->DeregisterEncoderObserver(video_channel));
+  EXPECT_EQ(0, codec->DeregisterDecoderObserver(video_channel));
+
+  EXPECT_GT(codec_observer.incoming_codec_called_, 0);
+  EXPECT_GT(codec_observer.incoming_rate_called_, 0);
+  EXPECT_GT(codec_observer.outgoing_rate_called_, 0);
+
+  EXPECT_EQ(0, base->StopReceive(video_channel));
+  EXPECT_EQ(0, render->StopRender(video_channel));
+  EXPECT_EQ(0, render->RemoveRenderer(capture_id));
+  EXPECT_EQ(0, render->RemoveRenderer(video_channel));
+  EXPECT_EQ(0, capture->DisconnectCaptureDevice(video_channel));
+  EXPECT_EQ(0, base->DeleteChannel(video_channel));
+}
+
+void ViEAutoTest::ViECodecExtendedTest() {
+  {
+    ViETest::Log(" ");
+    ViETest::Log("========================================");
+    ViETest::Log(" ViECodec Extended Test\n");
+
+    ViECodecExternalCodecTest();
+
+    TbInterfaces interfaces("ViECodecExtendedTest");
+    webrtc::ViEBase* base = interfaces.base;
+    webrtc::ViECapture* capture = interfaces.capture;
+    webrtc::ViERender* render = interfaces.render;
+    webrtc::ViECodec* codec = interfaces.codec;
+    webrtc::ViERTP_RTCP* rtp_rtcp = interfaces.rtp_rtcp;
+    webrtc::ViENetwork* network = interfaces.network;
+
+    TbCaptureDevice capture_device = TbCaptureDevice(interfaces);
+    int capture_id = capture_device.captureId;
+
+    int video_channel = -1;
+    EXPECT_EQ(0, base->CreateChannel(video_channel));
+    EXPECT_EQ(0, capture->ConnectCaptureDevice(capture_id, video_channel));
+    EXPECT_EQ(0, rtp_rtcp->SetRTCPStatus(
+                video_channel, webrtc::kRtcpCompound_RFC4585));
+    EXPECT_EQ(0, rtp_rtcp->SetKeyFrameRequestMethod(
+                video_channel, webrtc::kViEKeyFrameRequestPliRtcp));
+    EXPECT_EQ(0, rtp_rtcp->SetTMMBRStatus(video_channel, true));
+    EXPECT_EQ(0, render->AddRenderer(capture_id, _window1, 0, 0.0, 0.0, 1.0,
+                                     1.0));
+
+    EXPECT_EQ(0, render->AddRenderer(video_channel, _window2, 1, 0.0, 0.0, 1.0,
+                                     1.0));
+    EXPECT_EQ(0, render->StartRender(capture_id));
+    EXPECT_EQ(0, render->StartRender(video_channel));
+
+    webrtc::VideoCodec video_codec;
+    memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+    for (int idx = 0; idx < codec->NumberOfCodecs(); idx++) {
+      EXPECT_EQ(0, codec->GetCodec(idx, video_codec));
+      if (video_codec.codecType != webrtc::kVideoCodecI420) {
+        video_codec.width = 640;
+        video_codec.height = 480;
+      }
+      EXPECT_EQ(0, codec->SetReceiveCodec(video_channel, video_codec));
+    }
+
+    const char* ip_address = "127.0.0.1";
+    const uint16_t rtp_port = 6000;
+    EXPECT_EQ(0, network->SetLocalReceiver(video_channel, rtp_port));
+    EXPECT_EQ(0, base->StartReceive(video_channel));
+    EXPECT_EQ(0, network->SetSendDestination(
+        video_channel, ip_address, rtp_port));
+    EXPECT_EQ(0, base->StartSend(video_channel));
+
+    // Codec specific tests
+    memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+    EXPECT_EQ(0, base->StopSend(video_channel));
+
+    TestCodecObserver codec_observer;
+    EXPECT_EQ(0, codec->RegisterEncoderObserver(video_channel, codec_observer));
+    EXPECT_EQ(0, codec->RegisterDecoderObserver(video_channel, codec_observer));
+    EXPECT_EQ(0, base->StopReceive(video_channel));
+
+    EXPECT_EQ(0, render->StopRender(video_channel));
+    EXPECT_EQ(0, render->RemoveRenderer(capture_id));
+    EXPECT_EQ(0, render->RemoveRenderer(video_channel));
+    EXPECT_EQ(0, capture->DisconnectCaptureDevice(video_channel));
+    EXPECT_EQ(0, base->DeleteChannel(video_channel));
+  }
+
+  // Multiple send channels.
+  {
+    // Create two channels, where the second channel is created from the
+    // first channel. Send different resolutions on the channels and verify
+    // the received streams.
+    TbInterfaces video_engine("ViECodecExtendedTest2");
+    TbCaptureDevice tb_capture(video_engine);
+
+    // Create channel 1.
+    int video_channel_1 = -1;
+    EXPECT_EQ(0, video_engine.base->CreateChannel(video_channel_1));
+
+    // Create channel 2 based on the first channel.
+    int video_channel_2 = -1;
+    EXPECT_EQ(0, video_engine.base->CreateChannel(
+        video_channel_2, video_channel_1));
+    EXPECT_NE(video_channel_1, video_channel_2)
+        << "Channel 2 should be unique.";
+
+    uint16_t rtp_port_1 = 12000;
+    uint16_t rtp_port_2 = 13000;
+    EXPECT_EQ(0, video_engine.network->SetLocalReceiver(
+        video_channel_1, rtp_port_1));
+    EXPECT_EQ(0, video_engine.network->SetSendDestination(
+        video_channel_1, "127.0.0.1", rtp_port_1));
+    EXPECT_EQ(0, video_engine.network->SetLocalReceiver(
+        video_channel_2, rtp_port_2));
+    EXPECT_EQ(0, video_engine.network->SetSendDestination(
+        video_channel_2, "127.0.0.1", rtp_port_2));
+    tb_capture.ConnectTo(video_channel_1);
+    tb_capture.ConnectTo(video_channel_2);
+    EXPECT_EQ(0, video_engine.rtp_rtcp->SetKeyFrameRequestMethod(
+        video_channel_1, webrtc::kViEKeyFrameRequestPliRtcp));
+    EXPECT_EQ(0, video_engine.rtp_rtcp->SetKeyFrameRequestMethod(
+        video_channel_2, webrtc::kViEKeyFrameRequestPliRtcp));
+    EXPECT_EQ(0, video_engine.render->AddRenderer(video_channel_1, _window1, 0,
+                                                  0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, video_engine.render->StartRender(video_channel_1));
+    EXPECT_EQ(0, video_engine.render->AddRenderer(video_channel_2, _window2, 0,
+                                                  0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, video_engine.render->StartRender(video_channel_2));
+
+    // Set Send codec.
+    uint16_t codec_width = 320;
+    uint16_t codec_height = 240;
+    bool codec_set = false;
+    webrtc::VideoCodec video_codec;
+    webrtc::VideoCodec send_codec1;
+    webrtc::VideoCodec send_codec2;
+    for (int idx = 0; idx < video_engine.codec->NumberOfCodecs(); idx++) {
+      EXPECT_EQ(0, video_engine.codec->GetCodec(idx, video_codec));
+      EXPECT_EQ(0, video_engine.codec->SetReceiveCodec(video_channel_1,
+                                                       video_codec));
+      if (video_codec.codecType == webrtc::kVideoCodecVP8) {
+        memcpy(&send_codec1, &video_codec, sizeof(video_codec));
+        send_codec1.width = codec_width;
+        send_codec1.height = codec_height;
+        EXPECT_EQ(0, video_engine.codec->SetSendCodec(
+                    video_channel_1, send_codec1));
+        memcpy(&send_codec2, &video_codec, sizeof(video_codec));
+        send_codec2.width = 2 * codec_width;
+        send_codec2.height = 2 * codec_height;
+        EXPECT_EQ(0, video_engine.codec->SetSendCodec(
+                    video_channel_2, send_codec2));
+        codec_set = true;
+        break;
+      }
+    }
+    EXPECT_TRUE(codec_set);
+
+    // We need to verify using render effect filter since we won't trigger
+    // a decode reset in loopback (due to using the same SSRC).
+    RenderFilter filter1;
+    RenderFilter filter2;
+    EXPECT_EQ(0, video_engine.image_process->RegisterRenderEffectFilter(
+        video_channel_1, filter1));
+    EXPECT_EQ(0, video_engine.image_process->RegisterRenderEffectFilter(
+        video_channel_2, filter2));
+
+    EXPECT_EQ(0, video_engine.base->StartReceive(video_channel_1));
+    EXPECT_EQ(0, video_engine.base->StartSend(video_channel_1));
+    EXPECT_EQ(0, video_engine.base->StartReceive(video_channel_2));
+    EXPECT_EQ(0, video_engine.base->StartSend(video_channel_2));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, video_engine.base->StopReceive(video_channel_1));
+    EXPECT_EQ(0, video_engine.base->StopSend(video_channel_1));
+    EXPECT_EQ(0, video_engine.base->StopReceive(video_channel_2));
+    EXPECT_EQ(0, video_engine.base->StopSend(video_channel_2));
+
+    EXPECT_EQ(0, video_engine.image_process->DeregisterRenderEffectFilter(
+        video_channel_1));
+    EXPECT_EQ(0, video_engine.image_process->DeregisterRenderEffectFilter(
+        video_channel_2));
+    EXPECT_EQ(send_codec1.width, filter1.last_render_width_);
+    EXPECT_EQ(send_codec1.height, filter1.last_render_height_);
+    EXPECT_EQ(send_codec2.width, filter2.last_render_width_);
+    EXPECT_EQ(send_codec2.height, filter2.last_render_height_);
+
+    EXPECT_EQ(0, video_engine.base->DeleteChannel(video_channel_1));
+    EXPECT_EQ(0, video_engine.base->DeleteChannel(video_channel_2));
+  }
+}
+
+void ViEAutoTest::ViECodecAPITest() {
+  webrtc::VideoEngine* video_engine = NULL;
+  video_engine = webrtc::VideoEngine::Create();
+  EXPECT_TRUE(video_engine != NULL);
+
+  webrtc::ViEBase* base = webrtc::ViEBase::GetInterface(video_engine);
+  EXPECT_EQ(0, base->Init());
+
+  int video_channel = -1;
+  EXPECT_EQ(0, base->CreateChannel(video_channel));
+
+  webrtc::ViECodec* codec = webrtc::ViECodec::GetInterface(video_engine);
+  EXPECT_TRUE(codec != NULL);
+
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+
+  const int number_of_codecs = codec->NumberOfCodecs();
+
+  for (int i = 0; i < number_of_codecs; i++) {
+    EXPECT_EQ(0, codec->GetCodec(i, video_codec));
+    if (video_codec.codecType == webrtc::kVideoCodecVP8) {
+      video_codec.codecSpecific.VP8.automaticResizeOn = true;
+      video_codec.codecSpecific.VP8.frameDroppingOn = true;
+      EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
+      break;
+    }
+  }
+  memset(&video_codec, 0, sizeof(video_codec));
+  EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
+  EXPECT_EQ(webrtc::kVideoCodecVP8, video_codec.codecType);
+  EXPECT_TRUE(video_codec.codecSpecific.VP8.automaticResizeOn);
+  EXPECT_TRUE(video_codec.codecSpecific.VP8.frameDroppingOn);
+
+  for (int i = 0; i < number_of_codecs; i++) {
+    EXPECT_EQ(0, codec->GetCodec(i, video_codec));
+    if (video_codec.codecType == webrtc::kVideoCodecI420) {
+      video_codec.codecSpecific.VP8.automaticResizeOn = false;
+      video_codec.codecSpecific.VP8.frameDroppingOn = false;
+      EXPECT_EQ(0, codec->SetSendCodec(video_channel, video_codec));
+      break;
+    }
+  }
+  memset(&video_codec, 0, sizeof(video_codec));
+  EXPECT_EQ(0, codec->GetSendCodec(video_channel, video_codec));
+  EXPECT_EQ(webrtc::kVideoCodecI420, video_codec.codecType);
+  EXPECT_FALSE(video_codec.codecSpecific.VP8.automaticResizeOn);
+  EXPECT_FALSE(video_codec.codecSpecific.VP8.frameDroppingOn);
+
+  EXPECT_EQ(0, base->DeleteChannel(video_channel));
+
+  EXPECT_EQ(0, codec->Release());
+  EXPECT_EQ(0, base->Release());
+  EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine));
+}
+
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+#include "video_engine/include/vie_external_codec.h"
+#endif
+void ViEAutoTest::ViECodecExternalCodecTest() {
+  ViETest::Log(" ");
+  ViETest::Log("========================================");
+  ViETest::Log(" ViEExternalCodec Test\n");
+
+  /// **************************************************************
+  //  Begin create/initialize WebRTC Video Engine for testing
+  /// **************************************************************
+
+  /// **************************************************************
+  //  Engine ready. Begin testing class
+  /// **************************************************************
+
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+  int number_of_errors = 0;
+  {
+    int error = 0;
+    TbInterfaces ViE("ViEExternalCodec");
+    TbCaptureDevice capture_device(ViE);
+    TbVideoChannel channel(ViE, webrtc::kVideoCodecI420, 352, 288, 30,
+                           (352 * 288 * 3 * 8 * 30) / (2 * 1000));
+    capture_device.ConnectTo(channel.videoChannel);
+
+    error = ViE.render->AddRenderer(channel.videoChannel, _window1, 0, 0.0, 0.0,
+                                    1.0, 1.0);
+    number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    error = ViE.render->StartRender(channel.videoChannel);
+    number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    channel.StartReceive();
+    channel.StartSend();
+
+    ViETest::Log("Using internal I420 codec");
+    AutoTestSleep(KAutoTestSleepTimeMs / 2);
+
+    webrtc::ViEExternalCodec* vie_external_codec =
+        webrtc::ViEExternalCodec::GetInterface(ViE.video_engine);
+    number_of_errors += ViETest::TestError(vie_external_codec != NULL,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    webrtc::VideoCodec codec_struct;
+    error = ViE.codec->GetSendCodec(channel.videoChannel, codecStruct);
+    number_of_errors += ViETest::TestError(vie_external_codec != NULL,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    // Use external encoder instead.
+    {
+      TbI420Encoder ext_encoder;
+
+      // Test to register on wrong channel.
+      error = vie_external_codec->RegisterExternalSendCodec(
+          channel.videoChannel + 5, codecStruct.plType, &ext_encoder);
+      number_of_errors += ViETest::TestError(error == -1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          ViE.LastError() == kViECodecInvalidArgument,
+          "ERROR: %s at line %d", __FUNCTION__, __LINE__);
+
+      error = vie_external_codec->RegisterExternalSendCodec(
+                channel.videoChannel, codecStruct.plType, &ext_encoder);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Use new external encoder
+      error = ViE.codec->SetSendCodec(channel.videoChannel, codecStruct);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      TbI420Decoder ext_decoder;
+      error = vie_external_codec->RegisterExternalReceiveCodec(
+          channel.videoChannel, codecStruct.plType, &ext_decoder);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      error = ViE.codec->SetReceiveCodec(channel.videoChannel, codec_struct);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      ViETest::Log("Using external I420 codec");
+      AutoTestSleep(KAutoTestSleepTimeMs);
+
+      // Test to deregister on wrong channel
+      error = vie_external_codec->DeRegisterExternalSendCodec(
+          channel.videoChannel + 5, codecStruct.plType);
+      number_of_errors += ViETest::TestError(error == -1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          ViE.LastError() == kViECodecInvalidArgument, "ERROR: %s at line %d",
+          __FUNCTION__, __LINE__);
+
+      // Test to deregister wrong payload type.
+      error = vie_external_codec->DeRegisterExternalSendCodec(
+          channel.videoChannel, codecStruct.plType - 1);
+      number_of_errors += ViETest::TestError(error == -1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Deregister external send codec
+      error = vie_external_codec->DeRegisterExternalSendCodec(
+          channel.videoChannel, codecStruct.plType);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      error = vie_external_codec->DeRegisterExternalReceiveCodec(
+          channel.videoChannel, codecStruct.plType);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Verify that the encoder and decoder has been used
+      TbI420Encoder::FunctionCalls encode_calls =
+          ext_encoder.GetFunctionCalls();
+      number_of_errors += ViETest::TestError(encode_calls.InitEncode == 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.Release == 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.Encode > 30,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          encode_calls.RegisterEncodeCompleteCallback == 1,
+          "ERROR: %s at line %d", __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.SetRates > 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.SetPacketLoss > 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      TbI420Decoder::FunctionCalls decode_calls =
+          ext_decoder.GetFunctionCalls();
+      number_of_errors += ViETest::TestError(decode_calls.InitDecode == 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(decode_calls.Release == 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(decode_calls.Decode > 30,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          decode_calls.RegisterDecodeCompleteCallback == 1,
+          "ERROR: %s at line %d", __FUNCTION__, __LINE__);
+
+      ViETest::Log("Changing payload type Using external I420 codec");
+
+      codec_struct.plType = codecStruct.plType - 1;
+      error = vie_external_codec->RegisterExternalReceiveCodec(
+          channel.videoChannel, codec_struct.plType, &ext_decoder);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      error = ViE.codec->SetReceiveCodec(channel.videoChannel,
+                                         codec_struct);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      error = vie_external_codec->RegisterExternalSendCodec(
+                channel.videoChannel, codec_struct.plType, &ext_encoder);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Use new external encoder
+      error = ViE.codec->SetSendCodec(channel.videoChannel,
+                                      codec_struct);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      AutoTestSleep(KAutoTestSleepTimeMs / 2);
+
+      /// **************************************************************
+      //  Testing finished. Tear down Video Engine
+      /// **************************************************************
+
+      error = vie_external_codec->DeRegisterExternalSendCodec(
+                channel.videoChannel, codecStruct.plType);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      error = vie_external_codec->DeRegisterExternalReceiveCodec(
+                channel.videoChannel, codecStruct.plType);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Verify that the encoder and decoder has been used
+      encode_calls = ext_encoder.GetFunctionCalls();
+      number_of_errors += ViETest::TestError(encode_calls.InitEncode == 2,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.Release == 2,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.Encode > 30,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          encode_calls.RegisterEncodeCompleteCallback == 2,
+          "ERROR: %s at line %d", __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.SetRates > 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(encode_calls.SetPacketLoss > 1,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      decode_calls = ext_decoder.GetFunctionCalls();
+      number_of_errors += ViETest::TestError(decode_calls.InitDecode == 2,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(decode_calls.Release == 2,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(decode_calls.Decode > 30,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      number_of_errors += ViETest::TestError(
+          decode_calls.RegisterDecodeCompleteCallback == 2,
+          "ERROR: %s at line %d", __FUNCTION__, __LINE__);
+
+      int remaining_interfaces = vie_external_codec->Release();
+      number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+    }  // tbI420Encoder and ext_decoder goes out of scope.
+
+    ViETest::Log("Using internal I420 codec");
+    AutoTestSleep(KAutoTestSleepTimeMs / 2);
+  }
+  if (number_of_errors > 0) {
+    // Test failed
+    ViETest::Log(" ");
+    ViETest::Log(" ERROR ViEExternalCodec Test FAILED!");
+    ViETest::Log(" Number of errors: %d", number_of_errors);
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return;
+  }
+
+  ViETest::Log(" ");
+  ViETest::Log(" ViEExternalCodec Test PASSED!");
+  ViETest::Log("========================================");
+  ViETest::Log(" ");
+  return;
+
+#else
+  ViETest::Log(" ViEExternalCodec not enabled\n");
+  return;
+#endif
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc b/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc
new file mode 100644
index 0000000..a0e991e
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_custom_call.cc
@@ -0,0 +1,2064 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+
+#define VCM_RED_PAYLOAD_TYPE                            96
+#define VCM_ULPFEC_PAYLOAD_TYPE                         97
+#define DEFAULT_SEND_IP                                 "127.0.0.1"
+#define DEFAULT_VIDEO_PORT                              11111
+#define DEFAULT_VIDEO_CODEC                             "vp8"
+#define DEFAULT_VIDEO_CODEC_WIDTH                       640
+#define DEFAULT_VIDEO_CODEC_HEIGHT                      480
+#define DEFAULT_VIDEO_CODEC_BITRATE                     300
+#define DEFAULT_VIDEO_CODEC_MIN_BITRATE                 100
+#define DEFAULT_VIDEO_CODEC_MAX_BITRATE                 1000
+#define DEFAULT_AUDIO_PORT                              11113
+#define DEFAULT_AUDIO_CODEC                             "ISAC"
+#define DEFAULT_INCOMING_FILE_NAME                      "IncomingFile.avi"
+#define DEFAULT_OUTGOING_FILE_NAME                      "OutgoingFile.avi"
+#define DEFAULT_VIDEO_CODEC_MAX_FRAMERATE               30
+#define DEFAULT_VIDEO_PROTECTION_METHOD                 0
+#define DEFAULT_TEMPORAL_LAYER                          0
+
+enum StatisticsType {
+  kSendStatistic,
+  kReceivedStatistic
+};
+
+class ViEAutotestFileObserver : public webrtc::ViEFileObserver {
+ public:
+  ViEAutotestFileObserver() {}
+  ~ViEAutotestFileObserver() {}
+
+  void PlayFileEnded(const WebRtc_Word32 file_id) {
+    ViETest::Log("PlayFile ended");
+  }
+};
+
+class ViEAutotestEncoderObserver : public webrtc::ViEEncoderObserver {
+ public:
+  ViEAutotestEncoderObserver() {}
+  ~ViEAutotestEncoderObserver() {}
+
+  void OutgoingRate(const int video_channel,
+                    const unsigned int framerate,
+                    const unsigned int bitrate) {
+    std::cout << "Send FR: " << framerate
+              << " BR: " << bitrate << std::endl;
+  }
+};
+
+class ViEAutotestDecoderObserver : public webrtc::ViEDecoderObserver {
+ public:
+  ViEAutotestDecoderObserver() {}
+  ~ViEAutotestDecoderObserver() {}
+
+  void IncomingRate(const int video_channel,
+                    const unsigned int framerate,
+                    const unsigned int bitrate) {
+    std::cout << "Received FR: " << framerate
+              << " BR: " << bitrate << std::endl;
+  }
+  void IncomingCodecChanged(const int video_channel,
+                            const webrtc::VideoCodec& codec) {}
+  void RequestNewKeyFrame(const int video_channel) {
+    std::cout << "Decoder requesting a new key frame." << std::endl;
+  }
+};
+
+// The following are general helper functions.
+bool GetVideoDevice(webrtc::ViEBase* vie_base,
+                    webrtc::ViECapture* vie_capture,
+                    char* capture_device_name, char* capture_device_unique_id);
+bool GetIPAddress(char* IP);
+bool ValidateIP(std::string i_str);
+
+// The following are Print to stdout functions.
+void PrintCallInformation(char* IP,
+                          char* video_capture_device_name,
+                          char* video_capture_unique_id,
+                          webrtc::VideoCodec video_codec,
+                          int video_tx_port,
+                          int video_rx_port,
+                          char* audio_capture_device_name,
+                          char* audio_playbackDeviceName,
+                          webrtc::CodecInst audio_codec,
+                          int audio_tx_port,
+                          int audio_rx_port,
+                          int protection_method);
+void PrintRTCCPStatistics(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                          int video_channel,
+                          StatisticsType stat_type);
+void PrintRTPStatistics(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                        int video_channel);
+void PrintBandwidthUsage(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                         int video_channel);
+void PrintCodecStatistics(webrtc::ViECodec* vie_codec,
+                          int video_channel,
+                          StatisticsType stat_type);
+void PrintGetDiscardedPackets(webrtc::ViECodec* vie_codec,
+                              int video_channel);
+void PrintVideoStreamInformation(webrtc::ViECodec* vie_codec,
+                                 int video_channel);
+void PrintVideoCodec(webrtc::VideoCodec video_codec);
+
+// The following are video functions.
+// TODO(amyfong): change to pointers as input arguments
+// instead of references
+bool SetVideoPorts(int* tx_port, int* rx_port);
+bool SetVideoCodecType(webrtc::ViECodec* vie_codec,
+                       webrtc::VideoCodec* video_codec);
+bool SetVideoCodecResolution(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec);
+bool SetVideoCodecSize(webrtc::ViECodec* vie_codec,
+                       webrtc::VideoCodec* video_codec);
+bool SetVideoCodecBitrate(webrtc::ViECodec* vie_codec,
+                          webrtc::VideoCodec* video_codec);
+bool SetVideoCodecMinBitrate(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec);
+bool SetVideoCodecMaxBitrate(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec);
+bool SetVideoCodecMaxFramerate(webrtc::ViECodec* vie_codec,
+                               webrtc::VideoCodec* video_codec);
+bool SetVideoCodecTemporalLayer(webrtc::VideoCodec* video_codec);
+int GetVideoProtection();
+bool SetVideoProtection(webrtc::ViECodec* vie_codec,
+                        webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                        int video_channel, int protection_method);
+bool GetBitrateSignaling();
+
+// The following are audio helper functions.
+bool GetAudioDevices(webrtc::VoEBase* voe_base,
+                     webrtc::VoEHardware* voe_hardware,
+                     char* recording_device_name, int& recording_device_index,
+                     char* playbackDeviceName, int& playback_device_index);
+bool GetAudioDevices(webrtc::VoEBase* voe_base,
+                     webrtc::VoEHardware* voe_hardware,
+                     int& recording_device_index, int& playback_device_index);
+bool GetAudioPorts(int* tx_port, int* rx_port);
+bool GetAudioCodec(webrtc::VoECodec* voe_codec,
+                   webrtc::CodecInst& audio_codec);
+
+int ViEAutoTest::ViECustomCall() {
+  ViETest::Log(" ");
+  ViETest::Log("========================================");
+  ViETest::Log(" Enter values to use custom settings\n");
+
+  int error = 0;
+  int number_of_errors = 0;
+  std::string str;
+
+  // Create the VoE and get the VoE interfaces.
+  webrtc::VoiceEngine* voe = webrtc::VoiceEngine::Create();
+  number_of_errors += ViETest::TestError(voe != NULL, "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+  webrtc::VoEBase* voe_base = webrtc::VoEBase::GetInterface(voe);
+  number_of_errors += ViETest::TestError(voe_base != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  error = voe_base->Init();
+  number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+  webrtc::VoECodec* voe_codec = webrtc::VoECodec::GetInterface(voe);
+  number_of_errors += ViETest::TestError(voe_codec != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::VoEHardware* voe_hardware =
+      webrtc::VoEHardware::GetInterface(voe);
+  number_of_errors += ViETest::TestError(voe_hardware != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::VoEAudioProcessing* voe_apm =
+      webrtc::VoEAudioProcessing::GetInterface(voe);
+  number_of_errors += ViETest::TestError(voe_apm != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  // Create the ViE and get the ViE Interfaces.
+  webrtc::VideoEngine* vie = webrtc::VideoEngine::Create();
+  number_of_errors += ViETest::TestError(vie != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::ViEBase* vie_base = webrtc::ViEBase::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_base != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  error = vie_base->Init();
+  number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+  webrtc::ViECapture* vie_capture =
+      webrtc::ViECapture::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_capture != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::ViERender* vie_renderer = webrtc::ViERender::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_renderer != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::ViECodec* vie_codec = webrtc::ViECodec::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_codec != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::ViENetwork* vie_network = webrtc::ViENetwork::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_network != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  webrtc::ViEFile* vie_file = webrtc::ViEFile::GetInterface(vie);
+  number_of_errors += ViETest::TestError(vie_file != NULL,
+                                         "ERROR: %s at line %d", __FUNCTION__,
+                                         __LINE__);
+
+  bool start_call = false;
+  const unsigned int kMaxIPLength = 16;
+  char ip_address[kMaxIPLength] = "";
+  const unsigned int KMaxUniqueIdLength = 256;
+  char unique_id[KMaxUniqueIdLength] = "";
+  char device_name[KMaxUniqueIdLength] = "";
+  int video_tx_port = 0;
+  int video_rx_port = 0;
+  int video_channel = -1;
+  webrtc::VideoCodec video_send_codec;
+  char audio_capture_device_name[KMaxUniqueIdLength] = "";
+  char audio_playbackDeviceName[KMaxUniqueIdLength] = "";
+  int audio_capture_device_index = -1;
+  int audio_playback_device_index = -1;
+  int audio_tx_port = 0;
+  int audio_rx_port = 0;
+  webrtc::CodecInst audio_codec;
+  int audio_channel = -1;
+  bool is_image_scale_enabled = false;
+  int protection_method = DEFAULT_VIDEO_PROTECTION_METHOD;
+  bool remb = true;
+
+  while (!start_call) {
+    // Get the IP address to use from call.
+    memset(ip_address, 0, kMaxIPLength);
+    GetIPAddress(ip_address);
+
+    // Get the video device to use for call.
+    memset(device_name, 0, KMaxUniqueIdLength);
+    memset(unique_id, 0, KMaxUniqueIdLength);
+    GetVideoDevice(vie_base, vie_capture, device_name, unique_id);
+
+    // Get and set the video ports for the call.
+    video_tx_port = 0;
+    video_rx_port = 0;
+    SetVideoPorts(&video_tx_port, &video_rx_port);
+
+    // Get and set the video codec parameters for the call.
+    memset(&video_send_codec, 0, sizeof(webrtc::VideoCodec));
+    SetVideoCodecType(vie_codec, &video_send_codec);
+    SetVideoCodecSize(vie_codec, &video_send_codec);
+    SetVideoCodecBitrate(vie_codec, &video_send_codec);
+    SetVideoCodecMinBitrate(vie_codec, &video_send_codec);
+    SetVideoCodecMaxBitrate(vie_codec, &video_send_codec);
+    SetVideoCodecMaxFramerate(vie_codec, &video_send_codec);
+    SetVideoCodecTemporalLayer(&video_send_codec);
+    remb = GetBitrateSignaling();
+
+    // Get the video protection method for the call.
+    protection_method = GetVideoProtection();
+
+    // Get the audio device for the call.
+    memset(audio_capture_device_name, 0, KMaxUniqueIdLength);
+    memset(audio_playbackDeviceName, 0, KMaxUniqueIdLength);
+    GetAudioDevices(voe_base, voe_hardware, audio_capture_device_name,
+                    audio_capture_device_index, audio_playbackDeviceName,
+                    audio_playback_device_index);
+
+    // Get the audio port for the call.
+    audio_tx_port = 0;
+    audio_rx_port = 0;
+    GetAudioPorts(&audio_tx_port, &audio_rx_port);
+
+    // Get the audio codec for the call.
+    memset(static_cast<void*>(&audio_codec), 0, sizeof(audio_codec));
+    GetAudioCodec(voe_codec, audio_codec);
+
+    // Now ready to start the call.  Check user wants to continue.
+    PrintCallInformation(ip_address, device_name, unique_id, video_send_codec,
+                         video_tx_port, video_rx_port,
+                         audio_capture_device_name, audio_playbackDeviceName,
+                         audio_codec, audio_tx_port, audio_rx_port,
+                         protection_method);
+
+    std::cout << std::endl;
+    std::cout << "1. Start the call" << std::endl;
+    std::cout << "2. Reconfigure call settings" << std::endl;
+    std::cout << "What do you want to do? Press enter for default "
+              << "(Start the call): ";
+
+    std::getline(std::cin, str);
+    int selection = 0;
+    selection = atoi(str.c_str());
+
+    switch (selection) {
+      case 0:
+        start_call = true;
+        break;
+      case 1:
+        start_call = true;
+        break;
+      case 2:
+        start_call = false;
+        break;
+      default:
+        // Invalid selection gets error mesage.
+        std::cout << "ERROR: Code=" << error
+                  << " Invalid selection" << std::endl;
+        continue;
+    }
+  }
+  /// **************************************************************
+  // Begin create/initialize WebRTC Video Engine for testing.
+  /// **************************************************************
+  if (start_call == true) {
+    // Configure audio channel first.
+    audio_channel = voe_base->CreateChannel();
+    error = voe_base->SetSendDestination(audio_channel, audio_tx_port,
+                                         ip_address);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_base->SetLocalReceiver(audio_channel, audio_rx_port);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_hardware->SetRecordingDevice(audio_capture_device_index);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_hardware->SetPlayoutDevice(audio_playback_device_index);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_codec->SetSendCodec(audio_channel, audio_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_apm->SetAgcStatus(true, webrtc::kAgcDefault);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_apm->SetNsStatus(true, webrtc::kNsHighSuppression);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    // Now configure the video channel.
+    error = vie->SetTraceFilter(webrtc::kTraceAll);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViECustomCall_trace.txt";
+    error = vie->SetTraceFile(trace_file.c_str());
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->SetVoiceEngine(voe);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->CreateChannel(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->ConnectAudioChannel(video_channel, audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    int capture_id = 0;
+    error = vie_capture->AllocateCaptureDevice(unique_id,
+                                               KMaxUniqueIdLength,
+                                               capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_capture->ConnectCaptureDevice(capture_id, video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_capture->StartCapture(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    webrtc::ViERTP_RTCP* vie_rtp_rtcp =
+        webrtc::ViERTP_RTCP::GetInterface(vie);
+    number_of_errors += ViETest::TestError(vie != NULL,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_rtp_rtcp->SetRTCPStatus(video_channel,
+                                        webrtc::kRtcpCompound_RFC4585);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_rtp_rtcp->SetKeyFrameRequestMethod(
+        video_channel, webrtc::kViEKeyFrameRequestPliRtcp);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    if (remb) {
+      error = vie_rtp_rtcp->SetRembStatus(video_channel, true, true);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+    } else  {
+      error = vie_rtp_rtcp->SetTMMBRStatus(video_channel, true);
+      number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+    }
+
+    error = vie_renderer->AddRenderer(capture_id, _window1, 0, 0.0, 0.0, 1.0,
+                                      1.0);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->AddRenderer(video_channel, _window2, 1, 0.0, 0.0, 1.0,
+                                      1.0);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    error = vie_network->SetSendDestination(video_channel, ip_address,
+                                                video_tx_port);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_network->SetLocalReceiver(video_channel, video_rx_port);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_codec->SetSendCodec(video_channel, video_send_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_codec->SetReceiveCodec(video_channel, video_send_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    // Set the Video Protection before start send and receive.
+    SetVideoProtection(vie_codec, vie_rtp_rtcp,
+                       video_channel, protection_method);
+
+    // Start Voice Playout and Receive.
+    error = voe_base->StartReceive(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_base->StartPlayout(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_base->StartSend(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    // Now start the Video Send & Receive.
+    error = vie_base->StartSend(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->StartReceive(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->StartRender(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->StartRender(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    ViEAutotestFileObserver file_observer;
+    int file_id;
+
+    ViEAutotestEncoderObserver* codec_encoder_observer = NULL;
+    ViEAutotestDecoderObserver* codec_decoder_observer = NULL;
+
+    //  Engine ready, wait for input.
+
+    // Call started.
+    std::cout << std::endl;
+    std::cout << "Custom call started" << std::endl;
+    std::cout << std::endl << std::endl;
+
+    // Modify call or stop call.
+
+    std::cout << "Custom call in progress, would you like do?" << std::endl;
+    std::cout << "  0. Stop the call" << std::endl;
+    std::cout << "  1. Modify the call" << std::endl;
+    std::cout << "What do you want to do? "
+              << "Press enter for default (Stop the call): ";
+
+    std::getline(std::cin, str);
+    int selection = 0;
+    selection = atoi(str.c_str());
+
+    // Keep on modifying the call until user selects finish modify call.
+    bool modify_call = false;
+
+    while (selection == 1) {
+      std::cout << "Modify Custom Call" << std::endl;
+      std::cout << "  0. Finished modifying custom call" << std::endl;
+      std::cout << "  1. Change Video Send Codec" << std::endl;
+      std::cout << "  2. Change Video Send Size by Common Resolutions"
+                << std::endl;
+      std::cout << "  3. Change Video Send Size by Width & Height" << std::endl;
+      std::cout << "  4. Change Video Capture Device" << std::endl;
+      std::cout << "  5. Record Incoming Call" << std::endl;
+      std::cout << "  6. Record Outgoing Call" << std::endl;
+      std::cout << "  7. Play File on Video Channel"
+                << "(Assumes you recorded incoming & outgoing call)"
+                << std::endl;
+      std::cout << "  8. Change Video Protection Method" << std::endl;
+      std::cout << "  9. Toggle Encoder Observer" << std::endl;
+      std::cout << " 10. Toggle Decoder Observer" << std::endl;
+      std::cout << " 11. Print Call Information" << std::endl;
+      std::cout << " 12. Print Call Statistics" << std::endl;
+      std::cout << " 13. Toggle Image Scaling "
+                << "(Warning high CPU usage when enabled)"
+                << std::endl;
+      std::cout << "What do you want to do? ";
+      std::cout << "Press enter for default "
+                << "(Finished modifying custom call): ";
+
+      std::getline(std::cin, str);
+      int modify_selection = 0;
+      int file_selection = 0;
+
+      modify_selection = atoi(str.c_str());
+
+      switch (modify_selection) {
+        case 0:
+          std::cout << "Finished modifying custom call." << std::endl;
+          modify_call = false;
+          break;
+        case 1:
+          // Change video codec.
+          SetVideoCodecType(vie_codec, &video_send_codec);
+          SetVideoCodecSize(vie_codec, &video_send_codec);
+          SetVideoCodecBitrate(vie_codec, &video_send_codec);
+          SetVideoCodecMinBitrate(vie_codec, &video_send_codec);
+          SetVideoCodecMaxBitrate(vie_codec, &video_send_codec);
+          SetVideoCodecMaxFramerate(vie_codec, &video_send_codec);
+          SetVideoCodecTemporalLayer(&video_send_codec);
+          PrintCallInformation(ip_address, device_name,
+                               unique_id, video_send_codec,
+                               video_tx_port, video_rx_port,
+                               audio_capture_device_name,
+                               audio_playbackDeviceName, audio_codec,
+                               audio_tx_port, audio_rx_port, protection_method);
+          error = vie_codec->SetSendCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_codec->SetReceiveCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 2:
+          // Change Video codec size by common resolution.
+          SetVideoCodecResolution(vie_codec, &video_send_codec);
+          PrintCallInformation(ip_address, device_name,
+                               unique_id, video_send_codec,
+                               video_tx_port, video_rx_port,
+                               audio_capture_device_name,
+                               audio_playbackDeviceName, audio_codec,
+                               audio_tx_port, audio_rx_port, protection_method);
+          error = vie_codec->SetSendCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_codec->SetReceiveCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 3:
+          // Change video codec by size height and width.
+          SetVideoCodecSize(vie_codec, &video_send_codec);
+          PrintCallInformation(ip_address, device_name,
+                               unique_id, video_send_codec,
+                               video_tx_port, video_rx_port,
+                               audio_capture_device_name,
+                               audio_playbackDeviceName, audio_codec,
+                               audio_tx_port, audio_rx_port, protection_method);
+          error = vie_codec->SetSendCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_codec->SetReceiveCodec(video_channel, video_send_codec);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 4:
+          error = vie_renderer->StopRender(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_renderer->RemoveRenderer(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->StopCapture(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->DisconnectCaptureDevice(video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->ReleaseCaptureDevice(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          memset(device_name, 0, KMaxUniqueIdLength);
+          memset(unique_id, 0, KMaxUniqueIdLength);
+          GetVideoDevice(vie_base, vie_capture, device_name, unique_id);
+          capture_id = 0;
+          error = vie_capture->AllocateCaptureDevice(unique_id,
+                                                     KMaxUniqueIdLength,
+                                                     capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->ConnectCaptureDevice(capture_id,
+                                                    video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->StartCapture(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_renderer->AddRenderer(capture_id, _window1, 0, 0.0, 0.0,
+                                            1.0, 1.0);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          error = vie_renderer->StartRender(capture_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR: %s at line %d",
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 5:
+          // Record the incoming call.
+          std::cout << "Start Recording Incoming Video "
+                    << DEFAULT_INCOMING_FILE_NAME <<  std::endl;
+          error = vie_file->StartRecordIncomingVideo(
+                    video_channel, DEFAULT_INCOMING_FILE_NAME,
+                    webrtc::NO_AUDIO, audio_codec, video_send_codec);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          error = vie_file->StopRecordIncomingVideo(video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 6:
+          // Record the outgoing call.
+          std::cout << "Start Recording Outgoing Video "
+                    << DEFAULT_OUTGOING_FILE_NAME <<  std::endl;
+          error = vie_file->StartRecordOutgoingVideo(
+                    video_channel, DEFAULT_OUTGOING_FILE_NAME,
+                    webrtc::NO_AUDIO, audio_codec, video_send_codec);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          error = vie_file->StopRecordOutgoingVideo(video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 7:
+          // Send the file on the video_channel.
+          file_selection = 0;
+          std::cout << "Available files to play" << std::endl;
+          std::cout << "  0. " << DEFAULT_INCOMING_FILE_NAME <<  std::endl;
+          std::cout << "  1. " << DEFAULT_OUTGOING_FILE_NAME <<  std::endl;
+          std::cout << "Press enter for default ("
+                    << DEFAULT_INCOMING_FILE_NAME << "): ";
+          std::getline(std::cin, str);
+          file_selection = atoi(str.c_str());
+          // Disconnect the camera first.
+          error = vie_capture->DisconnectCaptureDevice(video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          if (file_selection == 1)
+            error = vie_file->StartPlayFile(DEFAULT_OUTGOING_FILE_NAME,
+                                            file_id, true);
+          else
+            error = vie_file->StartPlayFile(DEFAULT_INCOMING_FILE_NAME,
+                                            file_id, true);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          ViETest::Log("Registering file observer");
+          error = vie_file->RegisterObserver(file_id, file_observer);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          std::cout << std::endl;
+          std::cout << "Start sending the file that is played in a loop "
+                    << std::endl;
+          error = vie_file->SendFileOnChannel(file_id, video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          std::cout << "Press enter to stop...";
+          std::getline(std::cin, str);
+          ViETest::Log("Stopped sending video on channel");
+          error = vie_file->StopSendFileOnChannel(video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          ViETest::Log("Stop playing the file.");
+          error = vie_file->StopPlayFile(file_id);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          error = vie_capture->ConnectCaptureDevice(capture_id,
+                                                        video_channel);
+          number_of_errors += ViETest::TestError(error == 0,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          error = vie_file->DeregisterObserver(file_id, file_observer);
+          number_of_errors += ViETest::TestError(error == -1,
+                                                 "ERROR:%d %s at line %d",
+                                                 vie_base->LastError(),
+                                                 __FUNCTION__, __LINE__);
+          modify_call = true;
+          break;
+        case 8:
+          // Change the Video Protection.
+          protection_method = GetVideoProtection();
+          SetVideoProtection(vie_codec, vie_rtp_rtcp,
+                             video_channel, protection_method);
+          modify_call = true;
+          break;
+        case 9:
+          // Toggle Encoder Observer.
+          if (!codec_encoder_observer) {
+            std::cout << "Registering Encoder Observer" << std::endl;
+            codec_encoder_observer = new ViEAutotestEncoderObserver();
+            error = vie_codec->RegisterEncoderObserver(video_channel,
+                                                       *codec_encoder_observer);
+            number_of_errors += ViETest::TestError(error == 0,
+                                                   "ERROR: %s at line %d",
+                                                   __FUNCTION__, __LINE__);
+          } else {
+            std::cout << "Deregistering Encoder Observer" << std::endl;
+            error = vie_codec->DeregisterEncoderObserver(video_channel);
+            delete codec_encoder_observer;
+            codec_encoder_observer = NULL;
+            number_of_errors += ViETest::TestError(error == 0,
+                                                   "ERROR: %s at line %d",
+                                                   __FUNCTION__, __LINE__);
+          }
+          modify_call = true;
+          break;
+        case 10:
+          // Toggle Decoder Observer.
+          if (!codec_decoder_observer) {
+            std::cout << "Registering Decoder Observer" << std::endl;
+            codec_decoder_observer = new ViEAutotestDecoderObserver();
+            error = vie_codec->RegisterDecoderObserver(video_channel,
+                                                       *codec_decoder_observer);
+            number_of_errors += ViETest::TestError(error == 0,
+                                                   "ERROR: %s at line %d",
+                                                   __FUNCTION__, __LINE__);
+          } else {
+            std::cout << "Deregistering Decoder Observer" << std::endl;
+            error = vie_codec->DeregisterDecoderObserver(video_channel);
+            delete codec_decoder_observer;
+            codec_decoder_observer = NULL;
+            number_of_errors += ViETest::TestError(error == 0,
+                                                   "ERROR: %s at line %d",
+                                                   __FUNCTION__, __LINE__);
+          }
+          modify_call = true;
+          break;
+        case 11:
+          // Print Call information..
+          PrintCallInformation(ip_address, device_name,
+                               unique_id, video_send_codec,
+                               video_tx_port, video_rx_port,
+                               audio_capture_device_name,
+                               audio_playbackDeviceName,
+                               audio_codec, audio_tx_port,
+                               audio_rx_port, protection_method);
+          PrintVideoStreamInformation(vie_codec,
+                                      video_channel);
+          modify_call = true;
+          break;
+        case 12:
+          // Print Call statistics.
+          PrintRTCCPStatistics(vie_rtp_rtcp, video_channel,
+                               kSendStatistic);
+          PrintRTCCPStatistics(vie_rtp_rtcp, video_channel,
+                               kReceivedStatistic);
+          PrintRTPStatistics(vie_rtp_rtcp, video_channel);
+          PrintBandwidthUsage(vie_rtp_rtcp, video_channel);
+          PrintCodecStatistics(vie_codec, video_channel,
+                               kSendStatistic);
+          PrintCodecStatistics(vie_codec, video_channel,
+                               kReceivedStatistic);
+          PrintGetDiscardedPackets(vie_codec, video_channel);
+          modify_call = true;
+          break;
+        case 13:
+          is_image_scale_enabled = !is_image_scale_enabled;
+          vie_codec->SetImageScaleStatus(video_channel, is_image_scale_enabled);
+          if (is_image_scale_enabled) {
+            std::cout << "Image Scale is now enabled" << std::endl;
+          } else {
+            std::cout << "Image Scale is now disabled" << std::endl;
+          }
+          modify_call = true;
+          break;
+        default:
+          // Invalid selection, shows options menu again.
+          std::cout << "Invalid selection. Select Again." << std::endl;
+          break;
+      }
+      // Modify_call is false if user does not select one of the modify options.
+      if (modify_call == false) {
+        selection = 0;
+      }
+    }
+
+    // Stop the Call
+    std::cout << "Press enter to stop...";
+    std::getline(std::cin, str);
+
+    // Testing finished. Tear down Voice and Video Engine.
+    // Tear down the VoE first.
+    error = voe_base->StopReceive(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_base->StopPlayout(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = voe_base->DeleteChannel(audio_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    // Now tear down the ViE engine.
+    error = vie_base->DisconnectAudioChannel(video_channel);
+
+    // If Encoder/Decoder Observer is running, delete them.
+    if (codec_encoder_observer) {
+      error = vie_codec->DeregisterEncoderObserver(video_channel);
+      delete codec_encoder_observer;
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+    }
+    if (codec_decoder_observer) {
+      error = vie_codec->DeregisterDecoderObserver(video_channel);
+      delete codec_decoder_observer;
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+    }
+
+    error = vie_base->StopReceive(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->StopSend(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->StopRender(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->StopRender(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->RemoveRenderer(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_renderer->RemoveRenderer(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_capture->StopCapture(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_capture->DisconnectCaptureDevice(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_capture->ReleaseCaptureDevice(capture_id);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    error = vie_base->DeleteChannel(video_channel);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    int remaining_interfaces = 0;
+    remaining_interfaces = vie_file->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    remaining_interfaces = vie_codec->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    remaining_interfaces = vie_capture->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    remaining_interfaces = vie_rtp_rtcp->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    remaining_interfaces = vie_renderer->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    remaining_interfaces = vie_network->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    remaining_interfaces = vie_base->Release();
+    number_of_errors += ViETest::TestError(remaining_interfaces == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    bool deleted = webrtc::VideoEngine::Delete(vie);
+    number_of_errors += ViETest::TestError(deleted == true,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Custom Call Started");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+  }
+  return number_of_errors;
+}
+
+bool GetVideoDevice(webrtc::ViEBase* vie_base,
+                    webrtc::ViECapture* vie_capture,
+                    char* capture_device_name,
+                    char* capture_device_unique_id) {
+  int error = 0;
+  int number_of_errors = 0;
+  int capture_device_index = 0;
+  std::string str;
+
+  const unsigned int KMaxDeviceNameLength = 128;
+  const unsigned int KMaxUniqueIdLength = 256;
+  char device_name[KMaxDeviceNameLength];
+  char unique_id[KMaxUniqueIdLength];
+
+  while (1) {
+    memset(device_name, 0, KMaxDeviceNameLength);
+    memset(unique_id, 0, KMaxUniqueIdLength);
+
+    std::cout << std::endl;
+    std::cout << "Available video capture devices:" << std::endl;
+    int capture_idx = 0;
+    for (capture_idx = 0;
+         capture_idx < vie_capture->NumberOfCaptureDevices();
+         capture_idx++) {
+      memset(device_name, 0, KMaxDeviceNameLength);
+      memset(unique_id, 0, KMaxUniqueIdLength);
+
+      error = vie_capture->GetCaptureDevice(capture_idx, device_name,
+                                            KMaxDeviceNameLength,
+                                            unique_id,
+                                            KMaxUniqueIdLength);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      std::cout << "   " << capture_idx + 1 << ". " << device_name
+                << "/" << unique_id
+                << std::endl;
+    }
+    //  Get the dev_name of the default (or first) camera for display.
+    error = vie_capture->GetCaptureDevice(0, device_name,
+                                          KMaxDeviceNameLength,
+                                          unique_id,
+                                          KMaxUniqueIdLength);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+
+    std::cout << "Choose a video capture device. Press enter for default ("
+              << device_name << "/" << unique_id << "): ";
+    std::getline(std::cin, str);
+    capture_device_index = atoi(str.c_str());
+
+    if (capture_device_index == 0) {
+      // Use the default (or first) camera.
+      error = vie_capture->GetCaptureDevice(0, device_name,
+                                            KMaxDeviceNameLength,
+                                            unique_id,
+                                            KMaxUniqueIdLength);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      strcpy(capture_device_unique_id, unique_id);
+      strcpy(capture_device_name, device_name);
+      return true;
+    } else if (
+        capture_device_index < 0 ||
+        (capture_device_index >
+            static_cast<int>(vie_capture->NumberOfCaptureDevices()))) {
+      // invalid selection
+      continue;
+    } else {
+      error = vie_capture->GetCaptureDevice(capture_device_index - 1,
+                                            device_name,
+                                            KMaxDeviceNameLength,
+                                            unique_id,
+                                            KMaxUniqueIdLength);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      strcpy(capture_device_unique_id, unique_id);
+      strcpy(capture_device_name, device_name);
+      return true;
+    }
+  }
+}
+
+bool GetAudioDevices(webrtc::VoEBase* voe_base,
+                     webrtc::VoEHardware* voe_hardware,
+                     char* recording_device_name,
+                     int& recording_device_index,
+                     char* playbackDeviceName,
+                     int& playback_device_index) {
+  int error = 0;
+  int number_of_errors = 0;
+  std::string str;
+
+  const unsigned int KMaxDeviceNameLength = 128;
+  const unsigned int KMaxUniqueIdLength = 128;
+  char recording_device_unique_name[KMaxDeviceNameLength];
+  char playbackDeviceUniqueName[KMaxUniqueIdLength];
+
+  int number_of_recording_devices = -1;
+  error = voe_hardware->GetNumOfRecordingDevices(number_of_recording_devices);
+  number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+  while (1) {
+    recording_device_index = -1;
+    std::cout << std::endl;
+    std::cout << "Available audio capture devices:" << std::endl;
+    int capture_idx = 0;
+
+    for (capture_idx = 0; capture_idx < number_of_recording_devices;
+         capture_idx++) {
+      memset(recording_device_name, 0, KMaxDeviceNameLength);
+      memset(recording_device_unique_name, 0, KMaxDeviceNameLength);
+      error = voe_hardware->GetRecordingDeviceName(
+          capture_idx, recording_device_name, recording_device_unique_name);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      std::cout << "   " << capture_idx + 1 << ". " << recording_device_name
+                << std::endl;
+    }
+
+    std::cout << "Choose an audio capture device. Press enter for default("
+              << recording_device_name << "): ";
+    std::getline(std::cin, str);
+    int capture_device_index = atoi(str.c_str());
+
+    if (capture_device_index == 0) {
+      // Use the default (or first) recording device.
+      recording_device_index = 0;
+      error = voe_hardware->GetRecordingDeviceName(
+          recording_device_index, recording_device_name,
+          recording_device_unique_name);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    } else if (capture_device_index < 0 ||
+               capture_device_index > number_of_recording_devices) {
+      // Invalid selection.
+      continue;
+    } else {
+      recording_device_index = capture_device_index - 1;
+      error = voe_hardware->GetRecordingDeviceName(
+          recording_device_index, recording_device_name,
+          recording_device_unique_name);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    }
+  }
+
+  int number_of_playbackDevices = -1;
+  error = voe_hardware->GetNumOfPlayoutDevices(number_of_playbackDevices);
+  number_of_errors += ViETest::TestError(error == 0, "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+
+  while (1) {
+    playback_device_index = -1;
+    std::cout << std::endl;
+    std::cout << "Available audio playout devices:" << std::endl;
+    int capture_idx = 0;
+
+    for (capture_idx = 0; capture_idx < number_of_playbackDevices;
+         capture_idx++) {
+      memset(playbackDeviceName, 0, KMaxDeviceNameLength);
+      memset(playbackDeviceUniqueName, 0, KMaxDeviceNameLength);
+      error = voe_hardware->GetPlayoutDeviceName(capture_idx,
+                                                 playbackDeviceName,
+                                                 playbackDeviceUniqueName);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      std::cout << "   " << capture_idx + 1 << ". " << playbackDeviceName
+                << std::endl;
+    }
+
+    std::cout << "Choose an audio playback device. Press enter for default ("
+              << playbackDeviceName << "): ";
+    std::getline(std::cin, str);
+    int capture_device_index = atoi(str.c_str());
+
+    if (capture_device_index == 0) {
+      // Use the default (or first) playout device.
+      playback_device_index = 0;
+      error = voe_hardware->GetPlayoutDeviceName(
+                playback_device_index, playbackDeviceName,
+                playbackDeviceUniqueName);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      return true;
+    } else if (capture_device_index < 0
+               || capture_device_index > number_of_playbackDevices) {
+      // Invalid selection.
+      continue;
+    } else {
+      playback_device_index = capture_device_index - 1;
+      error = voe_hardware->GetPlayoutDeviceName(playback_device_index,
+                                                 playbackDeviceName,
+                                                 playbackDeviceUniqueName);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      return true;
+    }
+  }
+}
+
+// General helper functions.
+
+bool GetIPAddress(char* i_ip) {
+  char o_ip[16] = DEFAULT_SEND_IP;
+  std::string str;
+
+  while (1) {
+    std::cout << std::endl;
+    std::cout << "Enter destination IP. Press enter for default ("
+              << o_ip << "): ";
+    std::getline(std::cin, str);
+
+    if (str.compare("") == 0) {
+      // use default value;
+      strcpy(i_ip, o_ip);
+      return true;
+    }
+    if (ValidateIP(str) == false) {
+      std::cout << "Invalid entry. Try again." << std::endl;
+      continue;
+    }
+    // Done, copy std::string to c_string and return.
+    strcpy(i_ip, str.c_str());
+    return true;
+  }
+  assert(false);
+  return false;
+}
+
+bool ValidateIP(std::string i_str) {
+  if (0 == i_str.compare("")) {
+    return false;
+  }
+  return true;
+}
+
+// Video settings functions.
+
+bool SetVideoPorts(int* tx_port, int* rx_port) {
+  std::string str;
+  int port = 0;
+
+  // Set to default values.
+  *tx_port = DEFAULT_VIDEO_PORT;
+  *rx_port = DEFAULT_VIDEO_PORT;
+
+  while (1) {
+    std::cout << "Enter video send port. Press enter for default ("
+              << *tx_port << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // Default value.
+      break;
+    } else {
+      // User selection.
+      if (port <= 0 || port > 63556) {
+        // Invalid selection.
+        continue;
+      } else {
+        *tx_port = port;
+        break;  // Move on to rx_port.
+      }
+    }
+  }
+
+  while (1) {
+    std::cout << "Enter video receive port. Press enter for default ("
+              << *rx_port << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // Default value
+      return true;
+    } else {
+      // User selection.
+      if (port <= 0 || port > 63556) {
+        // Invalid selection.
+        continue;
+      } else {
+        *rx_port = port;
+        return true;
+      }
+    }
+  }
+  assert(false);
+  return false;
+}
+
+// Audio settings functions.
+
+bool GetAudioPorts(int* tx_port, int* rx_port) {
+  int port = 0;
+  std::string str;
+
+  // set to default values.
+  *tx_port = DEFAULT_AUDIO_PORT;
+  *rx_port = DEFAULT_AUDIO_PORT;
+
+  while (1) {
+    std::cout << "Enter audio send port. Press enter for default ("
+              << *tx_port << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // Default value.
+      break;
+    } else {
+      // User selection.
+      if (port <= 0 || port > 63556) {
+        // Invalid selection.
+        continue;
+      } else {
+        *tx_port = port;
+        break;  // Move on to rx_port.
+      }
+    }
+  }
+
+  while (1) {
+    std::cout << "Enter audio receive port. Press enter for default ("
+              << *rx_port << "):  ";
+    std::getline(std::cin, str);
+    port = atoi(str.c_str());
+
+    if (port == 0) {
+      // Default value.
+      return true;
+    } else {
+      // User selection.
+      if (port <= 0 || port > 63556) {
+        // Invalid selection.
+        continue;
+      } else {
+        *rx_port = port;
+        return true;
+      }
+    }
+  }
+  assert(false);
+  return false;
+}
+
+bool GetAudioCodec(webrtc::VoECodec* voe_codec,
+                   webrtc::CodecInst& audio_codec) {
+  int error = 0;
+  int number_of_errors = 0;
+  int codec_selection = 0;
+  std::string str;
+  memset(&audio_codec, 0, sizeof(webrtc::CodecInst));
+
+  while (1) {
+    std::cout << std::endl;
+    std::cout << "Available audio codecs:" << std::endl;
+    int codec_idx = 0;
+    int default_codec_idx = 0;
+    for (codec_idx = 0; codec_idx < voe_codec->NumOfCodecs(); codec_idx++) {
+      error = voe_codec->GetCodec(codec_idx, audio_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Test for default codec index.
+      if (strcmp(audio_codec.plname, DEFAULT_AUDIO_CODEC) == 0) {
+        default_codec_idx = codec_idx;
+      }
+      std::cout << "   " << codec_idx + 1 << ". " << audio_codec.plname
+                << " type:" << audio_codec.pltype
+                << " freq:" << audio_codec.plfreq
+                << " chan:" << audio_codec.channels
+                << std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose audio codec. Press enter for default ("
+              << DEFAULT_AUDIO_CODEC << "):  ";
+    std::getline(std::cin, str);
+    codec_selection = atoi(str.c_str());
+
+    if (codec_selection == 0) {
+      // Use default.
+      error = voe_codec->GetCodec(default_codec_idx, audio_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      return true;
+    } else {
+      // User selection.
+      codec_selection = atoi(str.c_str()) - 1;
+      error = voe_codec->GetCodec(codec_selection, audio_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      if (error != 0) {
+        std::cout << "ERROR: Code = " << error << " Invalid selection"
+                  << std::endl;
+        continue;
+      }
+      return true;
+    }
+  }
+  assert(false);
+  return false;
+}
+
+void PrintCallInformation(char* IP, char* video_capture_device_name,
+                          char* video_capture_unique_id,
+                          webrtc::VideoCodec video_codec,
+                          int video_tx_port, int video_rx_port,
+                          char* audio_capture_device_name,
+                          char* audio_playbackDeviceName,
+                          webrtc::CodecInst audio_codec,
+                          int audio_tx_port, int audio_rx_port,
+                          int protection_method) {
+  std::string str;
+
+  std::cout << "************************************************"
+            << std::endl;
+  std::cout << "The call has the following settings: " << std::endl;
+  std::cout << "\tIP: " << IP << std::endl;
+  std::cout << "\tVideo Capture Device: " << video_capture_device_name
+            << std::endl;
+  std::cout << "\t\tName: " << video_capture_device_name << std::endl;
+  std::cout << "\t\tUniqueId: " << video_capture_unique_id << std::endl;
+  PrintVideoCodec(video_codec);
+  std::cout << "\t Video Tx Port: " << video_tx_port << std::endl;
+  std::cout << "\t Video Rx Port: " << video_rx_port << std::endl;
+  std::cout << "\t Video Protection Method: " << protection_method
+            << std::endl;
+  std::cout << "\tAudio Capture Device: " << audio_capture_device_name
+            << std::endl;
+  std::cout << "\tAudio Playback Device: " << audio_playbackDeviceName
+            << std::endl;
+  std::cout << "\tAudio Codec: " << std::endl;
+  std::cout << "\t\tplname: " << audio_codec.plname << std::endl;
+  std::cout << "\t\tpltype: " << static_cast<int>(audio_codec.pltype)
+            << std::endl;
+  std::cout << "\t Audio Tx Port: " << audio_tx_port << std::endl;
+  std::cout << "\t Audio Rx Port: " << audio_rx_port << std::endl;
+  std::cout << "************************************************"
+            << std::endl;
+}
+
+bool SetVideoCodecType(webrtc::ViECodec* vie_codec,
+                       webrtc::VideoCodec* video_codec) {
+  int error = 0;
+  int number_of_errors = 0;
+  int codec_selection = 0;
+  std::string str;
+  memset(video_codec, 0, sizeof(webrtc::VideoCodec));
+
+  bool exit_loop = false;
+  while (!exit_loop) {
+    std::cout << std::endl;
+    std::cout << "Available video codecs:" << std::endl;
+    int codec_idx = 0;
+    int default_codec_idx = 0;
+    // Print out all the codecs available to set Codec to.
+    for (codec_idx = 0; codec_idx < vie_codec->NumberOfCodecs(); codec_idx++) {
+      error = vie_codec->GetCodec(codec_idx, *video_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      // Test for default codec index.
+      if (strcmp(video_codec->plName, DEFAULT_VIDEO_CODEC) == 0) {
+        default_codec_idx = codec_idx;
+      }
+      std::cout << "   " << codec_idx + 1 << ". " << video_codec->plName
+                << std::endl;
+    }
+    std::cout << std::endl;
+    std::cout << "Choose video codec. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC << "):  ";
+    std::getline(std::cin, str);
+    codec_selection = atoi(str.c_str());
+    if (codec_selection == 0) {
+      // Use default.
+      error = vie_codec->GetCodec(default_codec_idx, *video_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      exit_loop = true;
+    } else {
+      // User selection.
+      codec_selection = atoi(str.c_str()) - 1;
+      error = vie_codec->GetCodec(codec_selection, *video_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      if (error != 0) {
+        std::cout << "ERROR: Code=" << error << " Invalid selection"
+                  << std::endl;
+        continue;
+      }
+      exit_loop = true;
+    }
+  }
+  if (video_codec->codecType == webrtc::kVideoCodecI420) {
+    video_codec->width = 176;
+    video_codec->height = 144;
+  }
+  return true;
+}
+
+bool SetVideoCodecResolution(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec) {
+  std::string str;
+  int size_option = 5;
+
+  if (video_codec->codecType == webrtc::kVideoCodecVP8) {
+    std::cout << std::endl;
+    std::cout << "Available Common Resolutions : " << std::endl;
+    std::cout << "  1. SQCIF (128X96) " << std::endl;
+    std::cout << "  2. QQVGA (160X120) " << std::endl;
+    std::cout << "  3. QCIF (176X144) " << std::endl;
+    std::cout << "  4. CIF  (352X288) " << std::endl;
+    std::cout << "  5. VGA  (640X480) " << std::endl;
+    std::cout << "  6. WVGA (800x480) " << std::endl;
+    std::cout << "  7. 4CIF (704X576) " << std::endl;
+    std::cout << "  8. SVGA (800X600) " << std::endl;
+    std::cout << "  9. HD   (1280X720) " << std::endl;
+    std::cout << " 10. XGA  (1024x768) " << std::endl;
+    std::cout << "Enter frame size option: " << std::endl;
+
+    std::getline(std::cin, str);
+    size_option = atoi(str.c_str());
+
+    switch (size_option) {
+      case 1:
+        video_codec->width = 128;
+        video_codec->height = 96;
+        break;
+      case 2:
+        video_codec->width = 160;
+        video_codec->height = 120;
+        break;
+      case 3:
+        video_codec->width = 176;
+        video_codec->height = 144;
+        break;
+      case 4:
+        video_codec->width = 352;
+        video_codec->height = 288;
+        break;
+      case 5:
+        video_codec->width = 640;
+        video_codec->height = 480;
+        break;
+      case 6:
+        video_codec->width = 800;
+        video_codec->height = 480;
+        break;
+      case 7:
+        video_codec->width = 704;
+        video_codec->height = 576;
+        break;
+      case 8:
+        video_codec->width = 800;
+        video_codec->height = 600;
+        break;
+      case 9:
+        video_codec->width = 1280;
+        video_codec->height = 720;
+        break;
+      case 10:
+        video_codec->width = 1024;
+        video_codec->height = 768;
+        break;
+    }
+  } else {
+    std::cout << "Can Only change codec size if it's VP8" << std::endl;
+  }
+  return true;
+}
+
+bool SetVideoCodecSize(webrtc::ViECodec* vie_codec,
+                       webrtc::VideoCodec* video_codec) {
+  if (video_codec->codecType == webrtc::kVideoCodecVP8) {
+    std::string str;
+    video_codec->width = DEFAULT_VIDEO_CODEC_WIDTH;
+    video_codec->height = DEFAULT_VIDEO_CODEC_HEIGHT;
+    std::cout << "Choose video width. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC_WIDTH << "):  ";
+    std::getline(std::cin, str);
+    int size_selection = atoi(str.c_str());
+    if (size_selection != 0) {
+      video_codec->width = size_selection;
+    }
+    std::cout << "Choose video height. Press enter for default ("
+              << DEFAULT_VIDEO_CODEC_HEIGHT << "):  ";
+    std::getline(std::cin, str);
+    size_selection = atoi(str.c_str());
+    if (size_selection != 0) {
+      video_codec->height = size_selection;
+    }
+  } else {
+    std::cout << "Can Only change codec size if it's VP8" << std::endl;
+  }
+  return true;
+}
+
+bool SetVideoCodecBitrate(webrtc::ViECodec* vie_codec,
+                          webrtc::VideoCodec* video_codec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose start rate (in kbps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  int start_rate = atoi(str.c_str());
+  video_codec->startBitrate = DEFAULT_VIDEO_CODEC_BITRATE;
+  if (start_rate != 0) {
+    video_codec->startBitrate = start_rate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMaxBitrate(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose max bitrate (in kbps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MAX_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  int max_rate = atoi(str.c_str());
+  video_codec->maxBitrate = DEFAULT_VIDEO_CODEC_MAX_BITRATE;
+  if (max_rate != 0) {
+    video_codec->maxBitrate = max_rate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMinBitrate(webrtc::ViECodec* vie_codec,
+                             webrtc::VideoCodec* video_codec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose min bitrate (in fps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MIN_BITRATE << "):  ";
+  std::getline(std::cin, str);
+  char min_bit_rate = atoi(str.c_str());
+  video_codec->minBitrate = DEFAULT_VIDEO_CODEC_MIN_BITRATE;
+  if (min_bit_rate != 0) {
+    video_codec->minBitrate = min_bit_rate;
+  }
+  return true;
+}
+
+bool SetVideoCodecMaxFramerate(webrtc::ViECodec* vie_codec,
+                               webrtc::VideoCodec* video_codec) {
+  std::string str;
+  std::cout << std::endl;
+  std::cout << "Choose max framerate (in fps). Press enter for default ("
+            << DEFAULT_VIDEO_CODEC_MAX_FRAMERATE << "):  ";
+  std::getline(std::cin, str);
+  char max_frame_rate = atoi(str.c_str());
+  video_codec->maxFramerate = DEFAULT_VIDEO_CODEC_MAX_FRAMERATE;
+  if (max_frame_rate != 0) {
+    video_codec->maxFramerate = max_frame_rate;
+  }
+  return true;
+}
+
+bool SetVideoCodecTemporalLayer(webrtc::VideoCodec* video_codec) {
+  if (video_codec->codecType == webrtc::kVideoCodecVP8) {
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Choose number of temporal layers (1 to 4). "
+              << "Press enter for default ("
+              << DEFAULT_TEMPORAL_LAYER << "):  ";
+    std::getline(std::cin, str);
+    char num_temporal_layers = atoi(str.c_str());
+    video_codec->codecSpecific.VP8.numberOfTemporalLayers =
+        DEFAULT_TEMPORAL_LAYER;
+    if (num_temporal_layers != 0) {
+      video_codec->codecSpecific.VP8.numberOfTemporalLayers =
+          num_temporal_layers;
+    }
+  }
+  return true;
+}
+
+// GetVideoProtection only prints the prompt to get a number
+// that SetVideoProtection method uses
+// 0 = None
+// 1 = FEC
+// 2 = NACK
+// 3 = NACK + FEC (aka Hybrid)
+// Default = DEFAULT_VIDEO_PROTECTION METHOD
+int GetVideoProtection() {
+  int protection_method = DEFAULT_VIDEO_PROTECTION_METHOD;
+
+  std::cout << "Available Video Protection Method." << std::endl;
+  std::cout << "  0. None" << std::endl;
+  std::cout << "  1. FEC" << std::endl;
+  std::cout << "  2. NACK" << std::endl;
+  std::cout << "  3. NACK+FEC" << std::endl;
+  std::cout << "Enter Video Protection Method. "
+            << "Press enter for default (" << protection_method << "):"
+            << std::endl;
+  std::string method;
+  std::getline(std::cin, method);
+  protection_method = atoi(method.c_str());
+
+  return protection_method;
+}
+
+bool SetVideoProtection(webrtc::ViECodec* vie_codec,
+                        webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                        int video_channel, int protection_method) {
+  int error = 0;
+  int number_of_errors = 0;
+  webrtc::VideoCodec video_codec;
+
+  memset(&video_codec, 0, sizeof(webrtc::VideoCodec));
+
+  // Set all video protection to false initially
+  error = vie_rtp_rtcp->SetHybridNACKFECStatus(video_channel, false,
+                                                   VCM_RED_PAYLOAD_TYPE,
+                                                   VCM_ULPFEC_PAYLOAD_TYPE);
+  number_of_errors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+  error = vie_rtp_rtcp->SetFECStatus(video_channel, false,
+                                     VCM_RED_PAYLOAD_TYPE,
+                                     VCM_ULPFEC_PAYLOAD_TYPE);
+  number_of_errors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+  error = vie_rtp_rtcp->SetNACKStatus(video_channel, false);
+  number_of_errors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+  // Set video protection for FEC, NACK or Hybrid.
+  switch (protection_method) {
+    case 0:  // None.
+      // No protection selected, all protection already at false.
+      std::cout << "Call using None protection Method"
+                << std::endl;
+      break;
+    case 1:  // FEC only.
+      std::cout << "Call using FEC protection Method"
+                << std::endl;
+      error = vie_rtp_rtcp->SetFECStatus(video_channel, true,
+                                         VCM_RED_PAYLOAD_TYPE,
+                                         VCM_ULPFEC_PAYLOAD_TYPE);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    case 2:  // NACK only.
+      std::cout << "Call using NACK protection Method"
+                << std::endl;
+      error = vie_rtp_rtcp->SetNACKStatus(video_channel, true);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    case 3:  // Hybrid NACK and FEC.
+      std::cout << "Call using Hybrid NACK and FEC protection Method"
+                << std::endl;
+      error = vie_rtp_rtcp->SetHybridNACKFECStatus(video_channel, true,
+                                                   VCM_RED_PAYLOAD_TYPE,
+                                                   VCM_ULPFEC_PAYLOAD_TYPE);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+  }
+
+  // Set receive codecs for FEC and hybrid NACK/FEC.
+  if (protection_method == 1 || protection_method == 3) {
+    // RED.
+    error = vie_codec->GetCodec(vie_codec->NumberOfCodecs() - 2,
+                                video_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    video_codec.plType = VCM_RED_PAYLOAD_TYPE;
+    error = vie_codec->SetReceiveCodec(video_channel, video_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    std::cout << "RED Codec Information:" << std::endl;
+    PrintVideoCodec(video_codec);
+    // ULPFEC.
+    error = vie_codec->GetCodec(vie_codec->NumberOfCodecs() - 1,
+                                video_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+    error = vie_codec->SetReceiveCodec(video_channel, video_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    std::cout << "ULPFEC Codec Information:" << std::endl;
+    PrintVideoCodec(video_codec);
+  }
+
+  return true;
+}
+
+bool GetBitrateSignaling() {
+  std::cout << std::endl;
+  std::cout << "Available bitrate signaling methods." << std::endl;
+  std::cout << "  0. REMB" << std::endl;
+  std::cout << "  1. TMMBR" << std::endl;
+  std::cout << "Enter bitrate signaling methods. "
+            << "Press enter for default (REMB): " << std::endl;
+  std::string method;
+  std::getline(std::cin, method);
+  if (atoi(method.c_str()) == 1) {
+    return false;
+  }
+  return true;
+}
+
+void PrintRTCCPStatistics(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                          int video_channel,
+                          StatisticsType stat_type) {
+  int error = 0;
+  int number_of_errors = 0;
+  uint16_t fraction_lost = 0;
+  unsigned int cumulative_lost = 0;
+  unsigned int extended_max = 0;
+  unsigned int jitter = 0;
+  int rtt_ms = 0;
+
+  switch (stat_type) {
+    case kReceivedStatistic:
+      std::cout << "RTCP Received statistics"
+                << std::endl;
+      // Get and print the Received RTCP Statistics
+      error = vie_rtp_rtcp->GetReceivedRTCPStatistics(video_channel,
+                                                      fraction_lost,
+                                                      cumulative_lost,
+                                                      extended_max,
+                                                      jitter, rtt_ms);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    case kSendStatistic:
+      std::cout << "RTCP Sent statistics"
+                << std::endl;
+      // Get and print the Sent RTCP Statistics
+      error = vie_rtp_rtcp->GetSentRTCPStatistics(video_channel, fraction_lost,
+                                                  cumulative_lost, extended_max,
+                                                  jitter, rtt_ms);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+  }
+  std::cout << "\tRTCP fraction of lost packets: "
+            << fraction_lost << std::endl;
+  std::cout << "\tRTCP cumulative number of lost packets: "
+            << cumulative_lost << std::endl;
+  std::cout << "\tRTCP max received sequence number "
+            << extended_max << std::endl;
+  std::cout << "\tRTCP jitter: "
+            << jitter << std::endl;
+  std::cout << "\tRTCP round trip (ms): "
+            << rtt_ms << std::endl;
+}
+
+void PrintRTPStatistics(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                        int video_channel) {
+  int error = 0;
+  int number_of_errors = 0;
+  unsigned int bytes_sent = 0;
+  unsigned int packets_sent = 0;
+  unsigned int bytes_received = 0;
+  unsigned int packets_received = 0;
+
+  std::cout << "RTP statistics"
+            << std::endl;
+
+  // Get and print the RTP Statistics
+  error = vie_rtp_rtcp->GetRTPStatistics(video_channel, bytes_sent,
+                                         packets_sent, bytes_received,
+                                         packets_received);
+  number_of_errors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+  std::cout << "\tRTP bytes sent: "
+            << bytes_sent << std::endl;
+  std::cout << "\tRTP packets sent: "
+            << packets_sent << std::endl;
+  std::cout << "\tRTP bytes received: "
+            << bytes_received << std::endl;
+  std::cout << "\tRTP packets received: "
+            << packets_received << std::endl;
+}
+
+void PrintBandwidthUsage(webrtc::ViERTP_RTCP* vie_rtp_rtcp,
+                         int video_channel) {
+  int error = 0;
+  int number_of_errors = 0;
+  unsigned int total_bitrate_sent = 0;
+  unsigned int video_bitrate_sent = 0;
+  unsigned int fec_bitrate_sent = 0;
+  unsigned int nack_bitrate_sent = 0;
+  double percentage_fec = 0;
+  double percentage_nack = 0;
+
+  std::cout << "Bandwidth Usage" << std::endl;
+
+  // Get and print Bandwidth usage
+  error = vie_rtp_rtcp->GetBandwidthUsage(video_channel, total_bitrate_sent,
+                                          video_bitrate_sent, fec_bitrate_sent,
+                                          nack_bitrate_sent);
+  number_of_errors += ViETest::TestError(error == 0,
+                                         "ERROR: %s at line %d",
+                                         __FUNCTION__, __LINE__);
+  std::cout << "\tTotal bitrate sent (Kbit/s): "
+            << total_bitrate_sent << std::endl;
+  std::cout << "\tVideo bitrate sent (Kbit/s): "
+            << video_bitrate_sent << std::endl;
+  std::cout << "\tFEC bitrate sent (Kbit/s): "
+            << fec_bitrate_sent << std::endl;
+  percentage_fec =
+      (static_cast<double>(fec_bitrate_sent) /
+      static_cast<double>(total_bitrate_sent)) * 100;
+  std::cout << "\tPercentage FEC bitrate sent from total bitrate: "
+            << percentage_fec << std::endl;
+  std::cout << "\tNACK bitrate sent (Kbit/s): "
+            << nack_bitrate_sent << std::endl;
+  percentage_nack =
+      (static_cast<double>(nack_bitrate_sent) /
+      static_cast<double>(total_bitrate_sent)) * 100;
+  std::cout << "\tPercentage NACK bitrate sent from total bitrate: "
+            << percentage_nack << std::endl;
+}
+
+void PrintCodecStatistics(webrtc::ViECodec* vie_codec,
+                          int video_channel,
+                          StatisticsType stat_type) {
+  int error = 0;
+  int number_of_errors = 0;
+  unsigned int key_frames = 0;
+  unsigned int delta_frames = 0;
+  switch (stat_type) {
+    case kReceivedStatistic:
+      std::cout << "Codec Receive statistics"
+                << std::endl;
+      // Get and print the Receive Codec Statistics
+      error = vie_codec->GetReceiveCodecStastistics(video_channel, key_frames,
+                                                    delta_frames);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+    case kSendStatistic:
+      std::cout << "Codec Send statistics"
+                << std::endl;
+      // Get and print the Send Codec Statistics
+      error = vie_codec->GetSendCodecStastistics(video_channel, key_frames,
+                                                 delta_frames);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+      break;
+  }
+  std::cout << "\tNumber of encoded key frames: "
+            << key_frames << std::endl;
+  std::cout << "\tNumber of encoded delta frames: "
+            << delta_frames << std::endl;
+}
+
+void PrintGetDiscardedPackets(webrtc::ViECodec* vie_codec, int video_channel) {
+  std::cout << "Discarded Packets" << std::endl;
+  int discarded_packets = 0;
+  discarded_packets = vie_codec->GetDiscardedPackets(video_channel);
+  std::cout << "\tNumber of discarded packets: "
+            << discarded_packets << std::endl;
+}
+
+void PrintVideoStreamInformation(webrtc::ViECodec* vie_codec,
+                                 int video_channel) {
+  webrtc::VideoCodec outgoing_codec;
+  webrtc::VideoCodec incoming_codec;
+
+  memset(&outgoing_codec, 0, sizeof(webrtc::VideoCodec));
+  memset(&incoming_codec, 0, sizeof(webrtc::VideoCodec));
+
+  vie_codec->GetSendCodec(video_channel, outgoing_codec);
+  vie_codec->GetReceiveCodec(video_channel, incoming_codec);
+
+  std::cout << "************************************************"
+            << std::endl;
+  std::cout << "ChannelId: " << video_channel << std::endl;
+  std::cout << "Outgoing Stream information:" << std::endl;
+  PrintVideoCodec(outgoing_codec);
+  std::cout << "Incoming Stream information:" << std::endl;
+  PrintVideoCodec(incoming_codec);
+  std::cout << "************************************************"
+            << std::endl;
+}
+
+void PrintVideoCodec(webrtc::VideoCodec video_codec) {
+  std::cout << "\t\tplName: " << video_codec.plName << std::endl;
+  std::cout << "\t\tplType: " << static_cast<int>(video_codec.plType)
+            << std::endl;
+  std::cout << "\t\twidth: " << video_codec.width << std::endl;
+  std::cout << "\t\theight: " << video_codec.height << std::endl;
+  std::cout << "\t\tstartBitrate: " << video_codec.startBitrate
+            << std::endl;
+  std::cout << "\t\tminBitrate: " << video_codec.minBitrate
+            << std::endl;
+  std::cout << "\t\tmaxBitrate: " << video_codec.maxBitrate
+            << std::endl;
+  std::cout << "\t\tmaxFramerate: "
+            << static_cast<int>(video_codec.maxFramerate) << std::endl;
+  if (video_codec.codecType == webrtc::kVideoCodecVP8) {
+    int number_of_layers =
+        static_cast<int>(video_codec.codecSpecific.VP8.numberOfTemporalLayers);
+    std::cout << "\t\tVP8 Temporal Layer: " << number_of_layers << std::endl;
+  }
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc b/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc
new file mode 100644
index 0000000..0c06f32
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_encryption.cc
@@ -0,0 +1,567 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_encryption.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+
+class ViEAutotestEncryption: public webrtc::Encryption
+{
+public:
+    ViEAutotestEncryption()
+    {
+    }
+    ~ViEAutotestEncryption()
+    {
+    }
+
+    virtual void encrypt(int channel_no, unsigned char * in_data,
+                         unsigned char * out_data, int bytes_in, int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in + 2;
+    }
+
+    virtual void decrypt(int channel_no, unsigned char * in_data,
+                         unsigned char * out_data, int bytes_in, int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in - 2; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in - 2;
+    }
+
+    virtual void encrypt_rtcp(int channel_no, unsigned char * in_data,
+                              unsigned char * out_data, int bytes_in,
+                              int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in + 2;
+    }
+
+    virtual void decrypt_rtcp(int channel_no, unsigned char * in_data,
+                              unsigned char * out_data, int bytes_in,
+                              int* bytes_out)
+    {
+        for (int i = 0; i < bytes_in - 2; i++)
+        {
+            out_data[i] = ~in_data[i];
+        }
+        *bytes_out = bytes_in - 2;
+    }
+};
+
+void ViEAutoTest::ViEEncryptionStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionStandardTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    tbChannel.StartReceive();
+
+    tbChannel.StartSend();
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+#ifdef WEBRTC_SRTP
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // SRTP
+    //
+    unsigned char srtpKey1[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    ViETest::Log("SRTP encryption only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+
+    ViETest::Log("SRTP authentication only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log("SRTP full protection");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+#endif  // WEBRTC_SRTP
+
+    //
+    // External encryption
+    //
+    ViEAutotestEncryption testEncryption;
+    // Note(qhogpat): StartSend fails, not sure if this is intentional.
+    EXPECT_NE(0, ViE.base->StartSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    ViETest::Log(
+        "External encryption/decryption added, you should still see video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEEncryptionExtendedTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionExtendedTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    tbChannel.StartReceive();
+    tbChannel.StartSend();
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+#ifdef WEBRTC_SRTP
+
+    //
+    // SRTP
+    //
+    unsigned char srtpKey1[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+    unsigned char srtpKey2[30] =
+    {   9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0, 9, 8, 7, 6,
+        5, 4, 3, 2, 1, 0};
+    // NULL
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey1));
+
+    ViETest::Log("SRTP NULL encryption/authentication");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey1));
+
+    ViETest::Log("SRTP encryption only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey1));
+
+    ViETest::Log("SRTP authentication only");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log("SRTP full protection");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Change receive key, but not send key...
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey2));
+
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey1));
+
+    ViETest::Log(
+        "\nSRTP receive key changed, you should not see any remote images");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    // Change send key too
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey2));
+
+    ViETest::Log("\nSRTP send key changed too, you should see remote video "
+                 "again with some decoding artefacts at start");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Disable receive, keep send
+    ViETest::Log("SRTP receive disabled , you shouldn't see any video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+#endif //WEBRTC_SRTP
+    //
+    // External encryption
+    //
+    ViEAutotestEncryption testEncryption;
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    ViETest::Log(
+        "External encryption/decryption added, you should still see video");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEEncryptionAPITest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViEEncryptionAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    // Connect to channel
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+#ifdef WEBRTC_SRTP
+    unsigned char srtpKey[30] =
+    {   0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3,
+        4, 5, 6, 7, 8, 9};
+
+    //
+    // EnableSRTPSend and DisableSRTPSend
+    //
+
+    // Incorrect input argument, complete protection not enabled
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kNoProtection, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryption, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kAuthentication, srtpKey));
+
+    // Incorrect cipher key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 15,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 257,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 15, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 257, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    // Incorrect auth key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode,
+        30, webrtc::kAuthHmacSha1, 21, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 257, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 21, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 20, 13, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // NULL input
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        NULL));
+
+    // Double enable and disable
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // Note(qhogpat): the second check is likely incorrect.
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // No protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        20, 20, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 1, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 16,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    //
+    // EnableSRTPReceive and DisableSRTPReceive
+    //
+
+    // Incorrect input argument, complete protection not enabled
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kNoProtection, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryption, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kAuthentication, srtpKey));
+
+    // Incorrect cipher key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 15,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 257,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 15, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 257, webrtc::kAuthHmacSha1,
+        20, 4, webrtc::kEncryptionAndAuthentication, srtpKey));
+
+    // Incorrect auth key length
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 21, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 257, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 21, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 20, 13, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+
+    // NULL input
+    EXPECT_NE(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        NULL));
+
+    // Double enable and disable
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_NE(0, ViE.encryption->EnableSRTPSend(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPSend(tbChannel.videoChannel));
+
+    // No protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthNull, 0, 0,
+        webrtc::kNoProtection, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Authentication only
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 4, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0,
+        webrtc::kAuthHmacSha1, 20, 20, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherNull, 0, webrtc::kAuthHmacSha1,
+        1, 1, webrtc::kAuthentication, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Encryption only
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 16,
+        webrtc::kAuthNull, 0, 0, webrtc::kEncryption, srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+
+    // Full protection
+    EXPECT_EQ(0, ViE.encryption->EnableSRTPReceive(
+        tbChannel.videoChannel, webrtc::kCipherAes128CounterMode, 30,
+        webrtc::kAuthHmacSha1, 20, 4, webrtc::kEncryptionAndAuthentication,
+        srtpKey));
+    EXPECT_EQ(0, ViE.encryption->DisableSRTPReceive(tbChannel.videoChannel));
+#endif //WEBRTC_SRTP
+    //
+    // External encryption
+    //
+
+    ViEAutotestEncryption testEncryption;
+    EXPECT_EQ(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    EXPECT_NE(0, ViE.encryption->RegisterExternalEncryption(
+        tbChannel.videoChannel, testEncryption));
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.encryption->DeregisterExternalEncryption(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_file.cc b/src/video_engine/test/auto_test/source/vie_autotest_file.cc
new file mode 100644
index 0000000..606149c
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_file.cc
@@ -0,0 +1,501 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "testsupport/fileutils.h"
+#include "tb_interfaces.h"
+#include "tb_capture_device.h"
+
+#include "voe_codec.h"
+
+class ViEAutotestFileObserver: public webrtc::ViEFileObserver
+{
+public:
+    ViEAutotestFileObserver() {};
+    ~ViEAutotestFileObserver() {};
+
+    void PlayFileEnded(const WebRtc_Word32 fileId)
+    {
+        ViETest::Log("PlayFile ended");
+    }
+};
+
+void ViEAutoTest::ViEFileStandardTest()
+{
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    {
+        ViETest::Log("Starting a loopback call...");
+
+        TbInterfaces interfaces("ViEFileStandardTest");
+
+        webrtc::VideoEngine* ptrViE = interfaces.video_engine;
+        webrtc::ViEBase* ptrViEBase = interfaces.base;
+        webrtc::ViECapture* ptrViECapture = interfaces.capture;
+        webrtc::ViERender* ptrViERender = interfaces.render;
+        webrtc::ViECodec* ptrViECodec = interfaces.codec;
+        webrtc::ViERTP_RTCP* ptrViERtpRtcp = interfaces.rtp_rtcp;
+        webrtc::ViENetwork* ptrViENetwork = interfaces.network;
+
+        TbCaptureDevice captureDevice = TbCaptureDevice(interfaces);
+        int captureId = captureDevice.captureId;
+
+        int videoChannel = -1;
+        EXPECT_EQ(0, ptrViEBase->CreateChannel(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
+            captureId, videoChannel));
+
+        EXPECT_EQ(0, ptrViERtpRtcp->SetRTCPStatus(
+            videoChannel, webrtc::kRtcpCompound_RFC4585));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetKeyFrameRequestMethod(
+            videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ptrViERtpRtcp->SetTMMBRStatus(videoChannel, true));
+
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->AddRenderer(
+            videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+        EXPECT_EQ(0, ptrViERender->StartRender(captureId));
+        EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+
+        webrtc::VideoCodec videoCodec;
+        memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            EXPECT_EQ(0, ptrViECodec->SetReceiveCodec(videoChannel,
+                                                      videoCodec));
+        }
+
+        // Find the codec used for encoding the channel
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            if (videoCodec.codecType == webrtc::kVideoCodecVP8)
+            {
+                EXPECT_EQ(0, ptrViECodec->SetSendCodec(videoChannel, videoCodec));
+                break;
+            }
+        }
+        // Find the codec used for recording.
+        for (int idx = 0; idx < ptrViECodec->NumberOfCodecs(); idx++)
+        {
+            EXPECT_EQ(0, ptrViECodec->GetCodec(idx, videoCodec));
+            if (videoCodec.codecType == webrtc::kVideoCodecI420)
+            {
+                break;
+            }
+        }
+
+
+        const char* ipAddress = "127.0.0.1";
+        const unsigned short rtpPort = 6000;
+        EXPECT_EQ(0, ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartReceive(videoChannel));
+        EXPECT_EQ(0, ptrViENetwork->SetSendDestination(
+            videoChannel, ipAddress, rtpPort));
+        EXPECT_EQ(0, ptrViEBase->StartSend(videoChannel));
+        webrtc::ViEFile* ptrViEFile = webrtc::ViEFile::GetInterface(ptrViE);
+        EXPECT_TRUE(ptrViEFile != NULL);
+
+        webrtc::VoiceEngine* ptrVEEngine = webrtc::VoiceEngine::Create();
+        webrtc::VoEBase* ptrVEBase = webrtc::VoEBase::GetInterface(ptrVEEngine);
+        ptrVEBase->Init();
+
+        int audioChannel = ptrVEBase->CreateChannel();
+        ptrViEBase->SetVoiceEngine(ptrVEEngine);
+        ptrViEBase->ConnectAudioChannel(videoChannel, audioChannel);
+
+        webrtc::CodecInst audioCodec;
+        webrtc::VoECodec* ptrVECodec =
+            webrtc::VoECodec::GetInterface(ptrVEEngine);
+        for (int index = 0; index < ptrVECodec->NumOfCodecs(); index++)
+        {
+            ptrVECodec->GetCodec(index, audioCodec);
+            if (0 == strcmp(audioCodec.plname, "PCMU") || 0
+                == strcmp(audioCodec.plname, "PCMA"))
+            {
+                break; // these two types are allowed as avi recording formats
+            }
+        }
+
+        webrtc::CodecInst audioCodec2;
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        // Call started
+        ViETest::Log("Call started\nYou should see local preview from camera\n"
+                     "in window 1 and the remote video in window 2.");
+        AutoTestSleep(2000);
+
+        const int RENDER_TIMEOUT = 1000;
+        const int TEST_SPACING = 1000;
+        const int VIDEO_LENGTH = 5000;
+
+        const std::string root = webrtc::test::ProjectRootPath() +
+            "src/video_engine/test/auto_test/media/";
+        const std::string renderStartImage = root + "renderStartImage.jpg";
+        const std::string captureDeviceImage = root + "captureDeviceImage.jpg";
+        const std::string renderTimeoutFile = root + "renderTimeoutImage.jpg";
+
+        const std::string output = webrtc::test::OutputPath();
+        const std::string snapshotCaptureDeviceFileName =
+            output + "snapshotCaptureDevice.jpg";
+        const std::string incomingVideo = output + "incomingVideo.avi";
+        const std::string outgoingVideo = output + "outgoingVideo.avi";
+        const std::string snapshotRenderFileName =
+            output + "snapshotRenderer.jpg";
+
+        webrtc::ViEPicture capturePicture;
+        webrtc::ViEPicture renderPicture;
+        webrtc::ViEPicture renderTimeoutPicture; // TODO: init with and image
+
+        ViEAutotestFileObserver fileObserver;
+        int fileId;
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Test debug information recording.
+        EXPECT_EQ(0, ptrViEFile->StartDebugRecording(videoChannel,
+                     "vie_autotest_debug.yuv"));
+
+        // testing StartRecordIncomingVideo and StopRecordIncomingVideo
+        {
+            ViETest::Log("Recording incoming video (currently no audio) for %d "
+                         "seconds", VIDEO_LENGTH);
+
+            EXPECT_EQ(0, ptrViEFile->StartRecordIncomingVideo(
+                videoChannel, incomingVideo.c_str(), webrtc::NO_AUDIO,
+                audioCodec2, videoCodec));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stop recording incoming video");
+
+            EXPECT_EQ(0, ptrViEFile->StopRecordIncomingVideo(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing GetFileInformation
+        {
+            webrtc::VideoCodec fileVideoCodec;
+            webrtc::CodecInst fileAudioCodec;
+            ViETest::Log("Reading video file information");
+
+            EXPECT_EQ(0, ptrViEFile->GetFileInformation(
+                incomingVideo.c_str(), fileVideoCodec, fileAudioCodec));
+            PrintAudioCodec(fileAudioCodec);
+            PrintVideoCodec(fileVideoCodec);
+        }
+
+        // testing StartPlayFile and RegisterObserver
+        {
+            ViETest::Log("Start playing file: %s with observer",
+                         incomingVideo.c_str());
+            EXPECT_EQ(0, ptrViEFile->StartPlayFile(incomingVideo.c_str(),
+                                                   fileId));
+
+            ViETest::Log("Registering file observer");
+            EXPECT_EQ(0, ptrViEFile->RegisterObserver(fileId, fileObserver));
+            ViETest::Log("Done\n");
+        }
+
+        // testing SendFileOnChannel and StopSendFileOnChannel
+        {
+            ViETest::Log("Sending video on channel");
+            // should fail since we are sending the capture device.
+            EXPECT_NE(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
+
+            // Disconnect the camera
+            EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
+
+            // And try playing the file again.
+            EXPECT_EQ(0, ptrViEFile->SendFileOnChannel(fileId, videoChannel));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stopped sending video on channel");
+            EXPECT_EQ(0, ptrViEFile->StopSendFileOnChannel(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // stop playing the file
+        {
+            ViETest::Log("Stop playing the file.");
+            EXPECT_EQ(0, ptrViEFile->StopPlayFile(fileId));
+            ViETest::Log("Done\n");
+        }
+
+        // testing StartRecordOutgoingVideo and StopRecordOutgoingVideo
+        {
+            // connect the camera to the output.
+            EXPECT_EQ(0, ptrViECapture->ConnectCaptureDevice(
+              captureId, videoChannel));
+
+            ViETest::Log("Recording outgoing video (currently no audio) for %d "
+                         "seconds", VIDEO_LENGTH);
+            EXPECT_EQ(0, ptrViEFile->StartRecordOutgoingVideo(
+                videoChannel, outgoingVideo.c_str(), webrtc::NO_AUDIO,
+                audioCodec2, videoCodec));
+
+            AutoTestSleep(VIDEO_LENGTH);
+            ViETest::Log("Stop recording outgoing video");
+            EXPECT_EQ(0, ptrViEFile->StopRecordOutgoingVideo(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        // again testing GetFileInformation
+        {
+            EXPECT_EQ(0, ptrViEFile->GetFileInformation(
+                incomingVideo.c_str(), videoCodec, audioCodec2));
+            PrintAudioCodec(audioCodec2);
+            PrintVideoCodec(videoCodec);
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetCaptureDeviceSnapshot
+        {
+            ViETest::Log("Testing GetCaptureDeviceSnapshot(int, ViEPicture)");
+            ViETest::Log("Taking a picture to use for displaying ViEPictures "
+                         "for the rest of file test");
+            ViETest::Log("Hold an object to the camera. Ready?...");
+            AutoTestSleep(1000);
+            ViETest::Log("3");
+            AutoTestSleep(1000);
+            ViETest::Log("...2");
+            AutoTestSleep(1000);
+            ViETest::Log("...1");
+            AutoTestSleep(1000);
+            ViETest::Log("...Taking picture!");
+            EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
+                captureId, capturePicture));
+            ViETest::Log("Picture has been taken.");
+            AutoTestSleep(TEST_SPACING);
+
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetRenderSnapshot
+        {
+            ViETest::Log("Testing GetRenderSnapshot(int, char*)");
+
+            ViETest::Log("Taking snapshot of videoChannel %d", captureId);
+            EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
+                captureId, snapshotRenderFileName.c_str()));
+            ViETest::Log("Wrote image to file %s",
+                         snapshotRenderFileName.c_str());
+            ViETest::Log("Done\n");
+            AutoTestSleep(TEST_SPACING);
+        }
+
+        // GetRenderSnapshot
+        {
+            ViETest::Log("Testing GetRenderSnapshot(int, ViEPicture)");
+            EXPECT_EQ(0, ptrViEFile->GetRenderSnapshot(
+                captureId, renderPicture));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // GetCaptureDeviceSnapshot
+        {
+            ViETest::Log("Testing GetCaptureDeviceSnapshot(int, char*)");
+            ViETest::Log("Taking snapshot from capture device %d", captureId);
+            EXPECT_EQ(0, ptrViEFile->GetCaptureDeviceSnapshot(
+                captureId, snapshotCaptureDeviceFileName.c_str()));
+            ViETest::Log("Wrote image to file %s",
+                         snapshotCaptureDeviceFileName.c_str());
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Testing: SetCaptureDeviceImage
+        {
+            ViETest::Log("Testing SetCaptureDeviceImage(int, char*)");
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
+                captureId, captureDeviceImage.c_str()));
+
+            ViETest::Log("you should see the capture device image now");
+            AutoTestSleep(2 * RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Testing: SetCaptureDeviceImage
+        {
+            ViETest::Log("Testing SetCaptureDeviceImage(int, ViEPicture)");
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViEFile->SetCaptureDeviceImage(
+                captureId, capturePicture));
+
+            ViETest::Log("you should see the capture device image now");
+            AutoTestSleep(2 * RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderStartImage(videoChannel, renderStartImage);
+        {
+            ViETest::Log("Testing SetRenderStartImage(int, char*)");
+            // set render image, then stop capture and stop render to display it
+            ViETest::Log("Stoping renderer, setting start image, then "
+                         "restarting");
+            EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
+                videoChannel, renderStartImage.c_str()));
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+
+            ViETest::Log("Render start image should be displayed.");
+            AutoTestSleep(RENDER_TIMEOUT);
+
+            // restarting capture and render
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderStartImage(videoChannel, renderStartImage);
+        {
+            ViETest::Log("Testing SetRenderStartImage(int, ViEPicture)");
+            // set render image, then stop capture and stop render to display it
+            ViETest::Log("Stoping renderer, setting start image, then "
+                         "restarting");
+            EXPECT_EQ(0, ptrViEFile->SetRenderStartImage(
+                videoChannel, capturePicture));
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+
+            ViETest::Log("Render start image should be displayed.");
+            AutoTestSleep(RENDER_TIMEOUT);
+
+            // restarting capture and render
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            EXPECT_EQ(0, ptrViERender->StartRender(videoChannel));
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // testing SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
+        // RENDER_TIMEOUT);
+        {
+            ViETest::Log("Testing SetRenderTimeoutImage(int, char*)");
+            ViETest::Log("Stopping capture device to induce timeout of %d ms",
+                         RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
+                videoChannel, renderTimeoutFile.c_str(), RENDER_TIMEOUT));
+
+            // now stop sending frames to the remote renderer and wait for
+            // timeout
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Timeout image should be displayed now for %d ms",
+                         RENDER_TIMEOUT * 2);
+            AutoTestSleep(RENDER_TIMEOUT * 2);
+
+            // restart the capture device to undo the timeout
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Restarting capture device");
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Done\n");
+        }
+
+        AutoTestSleep(TEST_SPACING);
+
+        // Need to create a ViEPicture object to pass into this function.
+        // SetRenderTimeoutImage(videoChannel, renderTimeoutFile,
+        // RENDER_TIMEOUT);
+        {
+            ViETest::Log("Testing SetRenderTimeoutImage(int, ViEPicture)");
+            ViETest::Log("Stopping capture device to induce timeout of %d",
+                         RENDER_TIMEOUT);
+            EXPECT_EQ(0, ptrViEFile->SetRenderTimeoutImage(
+                videoChannel, capturePicture, RENDER_TIMEOUT));
+
+            // now stop sending frames to the remote renderer and wait for
+            // timeout
+            EXPECT_EQ(0, ptrViECapture->StopCapture(captureId));
+            AutoTestSleep(RENDER_TIMEOUT);
+            ViETest::Log("Timeout image should be displayed now for %d",
+                         RENDER_TIMEOUT * 2);
+            AutoTestSleep(RENDER_TIMEOUT * 2);
+
+            // restart the capture device to undo the timeout
+            EXPECT_EQ(0, ptrViECapture->StartCapture(captureId));
+            ViETest::Log("Restarting capture device");
+            ViETest::Log("Done\n");
+        }
+
+        // testing DeregisterObserver
+        {
+            ViETest::Log("Deregistering file observer");
+            // Should fail since we don't observe this file.
+            EXPECT_NE(0, ptrViEFile->DeregisterObserver(fileId, fileObserver));
+        }
+
+        // Stop debug record.
+        EXPECT_EQ(0, ptrViEFile->StopDebugRecording(videoChannel));
+
+        //***************************************************************
+        //	Testing finished. Tear down Video Engine
+        //***************************************************************
+
+        EXPECT_EQ(0, ptrViEBase->DisconnectAudioChannel(videoChannel));
+        EXPECT_EQ(0, ptrViEBase->SetVoiceEngine(NULL));
+        EXPECT_EQ(0, ptrVEBase->DeleteChannel(audioChannel));
+        // VoE reference counting is per-object, so we use EXPECT_NE
+        EXPECT_NE(0, ptrVEBase->Release());
+        EXPECT_NE(0, ptrVECodec->Release());
+        EXPECT_TRUE(webrtc::VoiceEngine::Delete(ptrVEEngine));
+
+        EXPECT_EQ(0, ptrViEBase->StopReceive(videoChannel));
+        EXPECT_EQ(0, ptrViEBase->StopSend(videoChannel));
+        EXPECT_EQ(0, ptrViERender->StopRender(videoChannel));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(captureId));
+        EXPECT_EQ(0, ptrViERender->RemoveRenderer(videoChannel));
+        EXPECT_EQ(0, ptrViECapture->DisconnectCaptureDevice(videoChannel));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(capturePicture));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(renderPicture));
+        EXPECT_EQ(0, ptrViEFile->FreePicture(renderTimeoutPicture));
+        EXPECT_EQ(0, ptrViEBase->DeleteChannel(videoChannel));
+
+        EXPECT_EQ(0, ptrViEFile->Release());
+    }
+#endif
+}
+
+void ViEAutoTest::ViEFileExtendedTest()
+{
+}
+
+void ViEAutoTest::ViEFileAPITest()
+{
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc b/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc
new file mode 100644
index 0000000..c29d97d
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_image_process.cc
@@ -0,0 +1,237 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_image_process.cc
+//
+
+// Settings
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tb_capture_device.h"
+
+class MyEffectFilter: public webrtc::ViEEffectFilter
+{
+public:
+    MyEffectFilter() {}
+
+    ~MyEffectFilter() {}
+
+    virtual int Transform(int size, unsigned char* frameBuffer,
+                          unsigned int timeStamp90KHz, unsigned int width,
+                          unsigned int height)
+    {
+        // Black and white
+        memset(frameBuffer + (2 * size) / 3, 0x7f, size / 3);
+        return 0;
+    }
+};
+
+void ViEAutoTest::ViEImageProcessStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    int rtpPort = 6000;
+    // Create VIE
+    TbInterfaces ViE("ViEImageProcessAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    MyEffectFilter effectFilter;
+
+    RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+    ViETest::Log("Capture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    ViETest::Log("Black and white filter registered for capture device, "
+                 "affects both windows");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    ViETest::Log("Remove capture effect filter, adding filter for incoming "
+                 "stream");
+    ViETest::Log("Only Window 2 should be black and white");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    int rtpPort2 = rtpPort + 100;
+    // Create a video channel
+    TbVideoChannel tbChannel2(ViE, webrtc::kVideoCodecVP8);
+
+    tbCapture.ConnectTo(tbChannel2.videoChannel);
+    tbChannel2.StartReceive(rtpPort2);
+    tbChannel2.StartSend(rtpPort2);
+
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel2.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel2.videoChannel));
+    EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+
+    ViETest::Log("Local renderer removed, added new channel and rendering in "
+                 "Window1.");
+
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    ViETest::Log("Black and white filter registered for capture device, "
+                 "affects both windows");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    ViETest::Log("Capture filter removed.");
+    ViETest::Log("Black and white filter registered for one channel, Window2 "
+                 "should be black and white");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViEImageProcessExtendedTest()
+{
+}
+
+void ViEAutoTest::ViEImageProcessAPITest()
+{
+    TbInterfaces ViE("ViEImageProcessAPITest");
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE);
+
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    MyEffectFilter effectFilter;
+
+    //
+    // Capture effect filter
+    //
+    // Add effect filter
+    EXPECT_EQ(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+    // Add again -> error
+    EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbCapture.captureId, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+
+    // Double deregister
+    EXPECT_NE(0, ViE.image_process->DeregisterCaptureEffectFilter(
+        tbCapture.captureId));
+    // Non-existing capture device
+    EXPECT_NE(0, ViE.image_process->RegisterCaptureEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+
+    //
+    // Render effect filter
+    //
+    EXPECT_EQ(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->DeregisterRenderEffectFilter(
+        tbChannel.videoChannel));
+
+    // Non-existing channel id
+    EXPECT_NE(0, ViE.image_process->RegisterRenderEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    //
+    // Send effect filter
+    //
+    EXPECT_EQ(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
+        tbChannel.videoChannel, effectFilter));
+    EXPECT_EQ(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->DeregisterSendEffectFilter(
+        tbChannel.videoChannel));
+    EXPECT_NE(0, ViE.image_process->RegisterSendEffectFilter(
+        tbCapture.captureId, effectFilter));
+
+    //
+    // Denoising
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
+    // If the denoising is already enabled, it will just reuturn 0.
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(tbCapture.captureId, true));
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(
+        tbCapture.captureId, false));
+    // If the denoising is already disabled, it will just reuturn 0.
+    EXPECT_EQ(0, ViE.image_process->EnableDenoising(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDenoising(
+        tbChannel.videoChannel, true));
+
+    //
+    // Deflickering
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, true));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, true));
+    EXPECT_EQ(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbCapture.captureId, false));
+    EXPECT_NE(0, ViE.image_process->EnableDeflickering(
+        tbChannel.videoChannel, true));
+
+    //
+    // Color enhancement
+    //
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, false));
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, true));
+    EXPECT_EQ(0, ViE.image_process->EnableColorEnhancement(
+        tbChannel.videoChannel, false));
+    EXPECT_NE(0, ViE.image_process->EnableColorEnhancement(
+        tbCapture.captureId, true));
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_linux.cc b/src/video_engine/test/auto_test/source/vie_autotest_linux.cc
new file mode 100644
index 0000000..8a99c03
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_linux.cc
@@ -0,0 +1,143 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_linux.cc
+//
+#include "vie_autotest_linux.h"
+
+#include <string>
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest_main.h"
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "thread_wrapper.h"
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager()
+    : _hdsp1(NULL),
+      _hdsp2(NULL) {
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
+  TerminateWindows();
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+  return reinterpret_cast<void*>(_hwnd1);
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+  return reinterpret_cast<void*>(_hwnd2);
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+  if (_hdsp1) {
+    ViEDestroyWindow(&_hwnd1, _hdsp1);
+    _hdsp1 = NULL;
+  }
+  if (_hdsp2) {
+    ViEDestroyWindow(&_hwnd2, _hdsp2);
+    _hdsp2 = NULL;
+  }
+  return 0;
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1Title,
+                                            void* window2Title) {
+  ViECreateWindow(&_hwnd1, &_hdsp1, window1Size.origin.x,
+                  window1Size.origin.y, window1Size.size.width,
+                  window1Size.size.height,
+                  reinterpret_cast<char*>(window1Title));
+  ViECreateWindow(&_hwnd2, &_hdsp2, window2Size.origin.x,
+                  window2Size.origin.y, window2Size.size.width,
+                  window2Size.size.height,
+                  reinterpret_cast<char*>(window2Title));
+
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViECreateWindow(Window *out_window,
+                                              Display **out_display, int x_pos,
+                                              int y_pos, int width, int height,
+                                              char* title) {
+  Display* display = XOpenDisplay(NULL);
+  if (display == NULL) {
+    // There's no point to continue if this happens: nothing will work anyway.
+    printf("Failed to connect to X server: X environment likely broken\n");
+    exit(-1);
+  }
+
+  int screen = DefaultScreen(display);
+
+  // Try to establish a 24-bit TrueColor display
+  // (our environment must allow this).
+  XVisualInfo visual_info;
+  if (XMatchVisualInfo(display, screen, 24, TrueColor, &visual_info) == 0) {
+    printf("Failed to establish 24-bit TrueColor in X environment.\n");
+    exit(-1);
+  }
+
+  // Create suitable window attributes.
+  XSetWindowAttributes window_attributes;
+  window_attributes.colormap = XCreateColormap(
+      display, DefaultRootWindow(display), visual_info.visual, AllocNone);
+  window_attributes.event_mask = StructureNotifyMask | ExposureMask;
+  window_attributes.background_pixel = 0;
+  window_attributes.border_pixel = 0;
+
+  unsigned long attribute_mask = CWBackPixel | CWBorderPixel | CWColormap |
+                                 CWEventMask;
+
+  Window _window = XCreateWindow(display, DefaultRootWindow(display), x_pos,
+                                 y_pos, width, height, 0, visual_info.depth,
+                                 InputOutput, visual_info.visual,
+                                 attribute_mask, &window_attributes);
+
+  // Set window name.
+  XStoreName(display, _window, title);
+  XSetIconName(display, _window, title);
+
+  // Make x report events for mask.
+  XSelectInput(display, _window, StructureNotifyMask);
+
+  // Map the window to the display.
+  XMapWindow(display, _window);
+
+  // Wait for map event.
+  XEvent event;
+  do {
+    XNextEvent(display, &event);
+  } while (event.type != MapNotify || event.xmap.event != _window);
+
+  *out_window = _window;
+  *out_display = display;
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViEDestroyWindow(Window *window,
+                                               Display *display) {
+  XUnmapWindow(display, *window);
+  XDestroyWindow(display, *window);
+  XSync(display, false);
+  XCloseDisplay(display);
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+  return 0;
+}
+
+int main(int argc, char** argv) {
+  ViEAutoTestMain auto_test;
+  return auto_test.RunTests(argc, argv);
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc b/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc
new file mode 100644
index 0000000..d660745
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_loopback.cc
@@ -0,0 +1,663 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_loopback.cc
+//
+// This code is also used as sample code for ViE 3.0
+//
+
+// ===================================================================
+//
+// BEGIN: VideoEngine 3.0 Sample Code
+//
+
+#include <iostream>
+
+#include "common_types.h"
+#include "tb_external_transport.h"
+#include "voe_base.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+
+int VideoEngineSampleCode(void* window1, void* window2)
+{
+    //********************************************************
+    //  Begin create/initialize Video Engine for testing
+    //********************************************************
+
+    int error = 0;
+
+    //
+    // Create a VideoEngine instance
+    //
+    webrtc::VideoEngine* ptrViE = NULL;
+    ptrViE = webrtc::VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+
+    error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViELoopbackCall_trace.txt";
+    error = ptrViE->SetTraceFile(trace_file.c_str());
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+
+    //
+    // Init VideoEngine and create a channel
+    //
+    webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+
+    //
+    // List available capture devices, allocate and connect.
+    //
+    webrtc::ViECapture* ptrViECapture =
+        webrtc::ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    printf("Available capture devices:\n");
+    int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+
+        error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                                KMaxDeviceNameLength, uniqueId,
+                                                KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        printf("\t %d. %s\n", captureIdx + 1, deviceName);
+    }
+    printf("\nChoose capture device: ");
+#ifdef WEBRTC_ANDROID
+    captureIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &captureIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    captureIdx = captureIdx - 1; // Compensate for idx start at 1.
+#endif
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
+                                                 captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+
+    //
+    // RTP/RTCP settings
+    //
+    webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+        webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                         webrtc::kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
+        videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+        return -1;
+    }
+
+    //
+    // Set up rendering
+    //
+    webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+    if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+
+    error
+        = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
+                                      1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    //
+    // Setup codecs
+    //
+    webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }
+
+    // Check available codecs and prepare receive codecs
+    printf("\nAvailable codecs:\n");
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    int codecIdx = 0;
+    for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+
+        // try to keep the test frame size small when I420
+        if (videoCodec.codecType == webrtc::kVideoCodecI420)
+        {
+            videoCodec.width = 176;
+            videoCodec.height = 144;
+        }
+
+        error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != webrtc::kVideoCodecRED
+            && videoCodec.codecType != webrtc::kVideoCodecULPFEC)
+        {
+            printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
+        }
+    }
+    printf("Choose codec: ");
+#ifdef WEBRTC_ANDROID
+    codecIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &codecIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    codecIdx = codecIdx - 1; // Compensate for idx start at 1.
+#endif
+
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+
+    // Set spatial resolution option
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Enter frame size option (default is CIF):" << std::endl;
+    std::cout << "1. QCIF (176X144) " << std::endl;
+    std::cout << "2. CIF  (352X288) " << std::endl;
+    std::cout << "3. VGA  (640X480) " << std::endl;
+    std::cout << "4. 4CIF (704X576) " << std::endl;
+    std::cout << "5. WHD  (1280X720) " << std::endl;
+    std::getline(std::cin, str);
+    int resolnOption = atoi(str.c_str());
+    // Try to keep the test frame size small when I420
+    if (videoCodec.codecType == webrtc::kVideoCodecI420)
+    {
+       resolnOption = 1;
+    }
+    switch (resolnOption)
+    {
+        case 1:
+            videoCodec.width = 176;
+            videoCodec.height = 144;
+            break;
+        case 2:
+            videoCodec.width = 352;
+            videoCodec.height = 288;
+            break;
+        case 3:
+            videoCodec.width = 640;
+            videoCodec.height = 480;
+            break;
+        case 4:
+            videoCodec.width = 704;
+            videoCodec.height = 576;
+            break;
+        case 5:
+            videoCodec.width = 1280;
+            videoCodec.height = 720;
+            break;
+    }
+
+    // Set number of temporal layers.
+    std::cout << std::endl;
+    std::cout << "Choose number of temporal layers (1 to 4).";
+    std::cout << "Press enter for default: \n";
+    std::getline(std::cin, str);
+    int numTemporalLayers = atoi(str.c_str());
+    if(numTemporalLayers != 0)
+    {
+        videoCodec.codecSpecific.VP8.numberOfTemporalLayers = numTemporalLayers;
+    }
+
+    // Set start bit rate
+    std::cout << std::endl;
+    std::cout << "Choose start rate (in kbps). Press enter for default:  ";
+    std::getline(std::cin, str);
+    int startRate = atoi(str.c_str());
+    if(startRate != 0)
+    {
+        videoCodec.startBitrate=startRate;
+    }
+
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+
+    //
+    // Choose Protection Mode
+    //
+    std::cout << std::endl;
+    std::cout << "Enter Protection Method:" << std::endl;
+    std::cout << "0. None" << std::endl;
+    std::cout << "1. FEC" << std::endl;
+    std::cout << "2. NACK" << std::endl;
+    std::cout << "3. NACK+FEC" << std::endl;
+    std::getline(std::cin, str);
+    int protectionMethod = atoi(str.c_str());
+    error = 0;
+    bool temporalToggling = true;
+    switch (protectionMethod)
+    {
+        case 0: // None: default is no protection
+            break;
+
+        case 1: // FEC only
+            error = ptrViERtpRtcp->SetFECStatus(videoChannel,
+                                                true,
+                                                VCM_RED_PAYLOAD_TYPE,
+                                                VCM_ULPFEC_PAYLOAD_TYPE);
+            temporalToggling = false;
+            break;
+
+        case 2: // Nack only
+            error = ptrViERtpRtcp->SetNACKStatus(videoChannel, true);
+
+            break;
+
+        case 3: // Hybrid NAck and FEC
+            error = ptrViERtpRtcp->SetHybridNACKFECStatus(
+                videoChannel,
+                true,
+                VCM_RED_PAYLOAD_TYPE,
+                VCM_ULPFEC_PAYLOAD_TYPE);
+            temporalToggling = false;
+            break;
+     }
+
+    if (error < 0)
+    {
+        printf("ERROR in ViERTP_RTCP::SetProtectionStatus\n");
+    }
+
+
+    //
+    // Address settings
+    //
+    webrtc::ViENetwork* ptrViENetwork =
+        webrtc::ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+
+    // Setting External transport
+    TbExternalTransport extTransport(*(ptrViENetwork));
+
+    int testMode = 0;
+    std::cout << std::endl;
+    std::cout << "Enter 1 for testing packet loss and delay with "
+        "external transport: ";
+    std::string test_str;
+    std::getline(std::cin, test_str);
+    testMode = atoi(test_str.c_str());
+    if (testMode == 1)
+    {
+        // Avoid changing SSRC due to collision.
+        error = ptrViERtpRtcp->SetLocalSSRC(videoChannel, 1);
+
+        error = ptrViENetwork->RegisterSendTransport(videoChannel,
+                                                     extTransport);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::RegisterSendTransport \n");
+            return -1;
+        }
+
+        // Set up packet loss value
+        std::cout << "Enter Packet Loss Percentage" << std::endl;
+        std::string rate_str;
+        std::getline(std::cin, rate_str);
+        int rate = atoi(rate_str.c_str());
+        extTransport.SetPacketLoss(rate);
+        if (rate) {
+          temporalToggling = false;
+        }
+
+        // Set network delay value
+        std::cout << "Enter network delay value [mS]" << std::endl;
+        std::string delay_str;
+        std::getline(std::cin, delay_str);
+        int delayMs = atoi(delay_str.c_str());
+        extTransport.SetNetworkDelay(delayMs);
+
+        if (numTemporalLayers > 1 && temporalToggling) {
+          extTransport.SetTemporalToggle(numTemporalLayers);
+        } else {
+          // Disabled
+          extTransport.SetTemporalToggle(0);
+        }
+    }
+    else
+    {
+        const char* ipAddress = "127.0.0.1";
+        const unsigned short rtpPort = 6000;
+        std::cout << std::endl;
+        std::cout << "Using rtp port: " << rtpPort << std::endl;
+        std::cout << std::endl;
+        error = ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort);
+        if (error == -1)
+        {
+            printf("ERROR in ViENetwork::SetLocalReceiver\n");
+            return -1;
+        }
+        error = ptrViENetwork->SetSendDestination(videoChannel,
+                                                  ipAddress, rtpPort);
+        if (error == -1)
+        {
+            printf("ERROR in ViENetwork::SetSendDestination\n");
+            return -1;
+        }
+    }
+
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+
+    //********************************************************
+    //  Engine started
+    //********************************************************
+
+
+    // Call started
+    printf("\nLoopback call started\n\n");
+    printf("Press enter to stop...");
+    while ((getchar()) != '\n')
+        ;
+
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1;
+    }
+
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+
+    bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+
+    return 0;
+
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+int ViEAutoTest::ViELoopbackCall()
+{
+    ViETest::Log(" ");
+    ViETest::Log("========================================");
+    ViETest::Log(" ViE Autotest Loopback Call\n");
+
+    if (VideoEngineSampleCode(_window1, _window2) == 0)
+    {
+        ViETest::Log(" ");
+        ViETest::Log(" ViE Autotest Loopback Call Done");
+        ViETest::Log("========================================");
+        ViETest::Log(" ");
+
+        return 0;
+    }
+
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Loopback Call Failed");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return 1;
+
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_main.cc b/src/video_engine/test/auto_test/source/vie_autotest_main.cc
new file mode 100644
index 0000000..e71cb88
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_main.cc
@@ -0,0 +1,191 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_autotest_main.h"
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+#include "vie_autotest.h"
+#include "vie_autotest_window_manager_interface.h"
+#include "vie_window_creator.h"
+
+DEFINE_bool(automated, false, "Run Video engine tests in noninteractive mode.");
+
+static const std::string kStandardTest = "ViEStandardIntegrationTest";
+static const std::string kExtendedTest = "ViEExtendedIntegrationTest";
+static const std::string kApiTest = "ViEApiIntegrationTest";
+
+ViEAutoTestMain::ViEAutoTestMain() {
+  index_to_test_method_map_[1] = "RunsBaseTestWithoutErrors";
+  index_to_test_method_map_[2] = "RunsCaptureTestWithoutErrors";
+  index_to_test_method_map_[3] = "RunsCodecTestWithoutErrors";
+  index_to_test_method_map_[4] = "RunsEncryptionTestWithoutErrors";
+  index_to_test_method_map_[5] = "RunsFileTestWithoutErrors";
+  index_to_test_method_map_[6] = "RunsImageProcessTestWithoutErrors";
+  index_to_test_method_map_[7] = "RunsNetworkTestWithoutErrors";
+  index_to_test_method_map_[8] = "RunsRenderTestWithoutErrors";
+  index_to_test_method_map_[9] = "RunsRtpRtcpTestWithoutErrors";
+}
+
+int ViEAutoTestMain::RunTests(int argc, char** argv) {
+  // Initialize logging.
+  ViETest::Init();
+  // Initialize the testing framework.
+  testing::InitGoogleTest(&argc, argv);
+  // Parse remaining flags:
+  google::ParseCommandLineFlags(&argc, &argv, true);
+
+  int result;
+  if (FLAGS_automated) {
+    // Run in automated mode.
+    result = RUN_ALL_TESTS();
+  } else {
+    // Run in interactive mode.
+    result = RunInteractiveMode();
+  }
+
+  ViETest::Terminate();
+  return result;
+}
+
+int ViEAutoTestMain::AskUserForTestCase() {
+  int choice;
+  std::string answer;
+
+  do {
+    ViETest::Log("\nSpecific tests:");
+    ViETest::Log("\t 0. Go back to previous menu.");
+
+    // Print all test method choices. Assumes that map sorts on its key.
+    int last_valid_choice = 0;
+    std::map<int, std::string>::const_iterator iterator;
+    for (iterator = index_to_test_method_map_.begin();
+        iterator != index_to_test_method_map_.end();
+        ++iterator) {
+      ViETest::Log("\t %d. %s", iterator->first, iterator->second.c_str());
+      last_valid_choice = iterator->first;
+    }
+
+    ViETest::Log("Choose specific test:");
+    choice = AskUserForNumber(0, last_valid_choice);
+  } while (choice == kInvalidChoice);
+
+  return choice;
+}
+
+int ViEAutoTestMain::AskUserForNumber(int min_allowed, int max_allowed) {
+  int result;
+  if (scanf("%d", &result) <= 0) {
+    ViETest::Log("\nPlease enter a number instead, then hit enter.");
+    getchar();
+    return kInvalidChoice;
+  }
+  getchar();  // Consume enter key.
+
+  if (result < min_allowed || result > max_allowed) {
+    ViETest::Log("%d-%d are valid choices. Please try again.", min_allowed,
+                 max_allowed);
+    return kInvalidChoice;
+  }
+
+  return result;
+}
+
+int ViEAutoTestMain::RunTestMatching(const std::string test_case,
+                                     const std::string test_method) {
+  testing::FLAGS_gtest_filter = test_case + "." + test_method;
+  return RUN_ALL_TESTS();
+}
+
+int ViEAutoTestMain::RunSpecificTestCaseIn(const std::string test_case_name)
+{
+  // If user says 0, it means don't run anything.
+  int specific_choice = AskUserForTestCase();
+  if (specific_choice != 0){
+    return RunTestMatching(test_case_name,
+                           index_to_test_method_map_[specific_choice]);
+  }
+  return 0;
+}
+
+int ViEAutoTestMain::RunSpecialTestCase(int choice) {
+  // 7-9 don't run in GTest and need to initialize by themselves.
+  assert(choice >= 7 && choice <= 10);
+
+  // Create the windows
+  ViEWindowCreator windowCreator;
+  ViEAutoTestWindowManagerInterface* windowManager =
+      windowCreator.CreateTwoWindows();
+
+  // Create the test cases
+  ViEAutoTest vieAutoTest(windowManager->GetWindow1(),
+                          windowManager->GetWindow2());
+
+  int errors = 0;
+  switch (choice) {
+    case 7: errors = vieAutoTest.ViELoopbackCall();  break;
+    case 8: errors = vieAutoTest.ViECustomCall();    break;
+    case 9: errors = vieAutoTest.ViESimulcastCall(); break;
+    case 10: errors = vieAutoTest.ViERecordCall(); break;
+  }
+
+  windowCreator.TerminateWindows();
+  return errors;
+}
+
+int ViEAutoTestMain::RunInteractiveMode() {
+  ViETest::Log(" ============================== ");
+  ViETest::Log("    WebRTC ViE 3.x Autotest     ");
+  ViETest::Log(" ============================== \n");
+
+  int choice = 0;
+  int errors = 0;
+  do {
+    ViETest::Log("Test types: ");
+    ViETest::Log("\t 0. Quit");
+    ViETest::Log("\t 1. All standard tests (delivery test)");
+    ViETest::Log("\t 2. All API tests");
+    ViETest::Log("\t 3. All extended test");
+    ViETest::Log("\t 4. Specific standard test");
+    ViETest::Log("\t 5. Specific API test");
+    ViETest::Log("\t 6. Specific extended test");
+    ViETest::Log("\t 7. Simple loopback call");
+    ViETest::Log("\t 8. Custom configure a call");
+    ViETest::Log("\t 9. Simulcast in loopback");
+    ViETest::Log("\t 10. Record");
+    ViETest::Log("Select type of test:");
+
+    choice = AskUserForNumber(0, 10);
+    if (choice == kInvalidChoice) {
+      continue;
+    }
+    switch (choice) {
+      case 0:                                                 break;
+      case 1:  errors = RunTestMatching(kStandardTest, "*");  break;
+      case 2:  errors = RunTestMatching(kApiTest,      "*");  break;
+      case 3:  errors = RunTestMatching(kExtendedTest, "*");  break;
+      case 4:  errors = RunSpecificTestCaseIn(kStandardTest); break;
+      case 5:  errors = RunSpecificTestCaseIn(kApiTest);      break;
+      case 6:  errors = RunSpecificTestCaseIn(kExtendedTest); break;
+      default: errors = RunSpecialTestCase(choice);           break;
+    }
+  } while (choice != 0);
+
+  if (errors) {
+    ViETest::Log("Test done with errors, see ViEAutotestLog.txt for test "
+        "result.\n");
+    return 1;
+  } else {
+    ViETest::Log("Test done without errors, see ViEAutotestLog.txt for "
+        "test result.\n");
+    return 0;
+  }
+}
+
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_network.cc b/src/video_engine/test/auto_test/source/vie_autotest_network.cc
new file mode 100644
index 0000000..eba8a6c
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_network.cc
@@ -0,0 +1,568 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_network.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+
+#if defined(_WIN32)
+#include <qos.h>
+#elif defined(WEBRTC_MAC_INTEL)
+
+#endif
+
+class ViEAutoTestNetworkObserver: public webrtc::ViENetworkObserver
+{
+public:
+    ViEAutoTestNetworkObserver()
+    {
+    }
+    virtual ~ViEAutoTestNetworkObserver()
+    {
+    }
+    virtual void OnPeriodicDeadOrAlive(const int videoChannel, const bool alive)
+    {
+    }
+    virtual void PacketTimeout(const int videoChannel,
+                               const webrtc::ViEPacketTimeout timeout)
+    {
+    }
+};
+
+void ViEAutoTest::ViENetworkStandardTest()
+{
+    TbInterfaces ViE("ViENetworkStandardTest"); // Create VIE
+    TbCaptureDevice tbCapture(ViE);
+    {
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+        tbCapture.ConnectTo(tbChannel.videoChannel);
+
+        RenderCaptureDeviceAndOutputStream(&ViE, &tbChannel, &tbCapture);
+
+        // ***************************************************************
+        // Engine ready. Begin testing class
+        // ***************************************************************
+
+        //
+        // Transport
+        //
+        TbExternalTransport testTransport(*ViE.network);
+        EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+
+        ViETest::Log("Call started using external transport, video should "
+            "see video in both windows\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        unsigned short rtpPort = 1234;
+        memcpy(myIpAddress, "127.0.0.1", sizeof("127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, myIpAddress, rtpPort,
+            rtpPort + 1, rtpPort));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        ViETest::Log("Changed to WebRTC SocketTransport, you should still see "
+                     "video in both windows\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort + 10, rtpPort + 11, myIpAddress));
+        ViETest::Log("Added UDP port filter for incorrect ports, you should "
+                     "not see video in Window2");
+        AutoTestSleep(2000);
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, "123.1.1.0"));
+        ViETest::Log("Added IP filter for incorrect IP address, you should not "
+                     "see video in Window2");
+        AutoTestSleep(2000);
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        ViETest::Log("Added IP filter for this computer, you should see video "
+                     "in Window2 again\n");
+        AutoTestSleep(KAutoTestSleepTimeMs);
+
+        tbCapture.Disconnect(tbChannel.videoChannel);
+    }
+}
+
+void ViEAutoTest::ViENetworkExtendedTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    TbInterfaces ViE("ViENetworkExtendedTest"); // Create VIE
+    TbCaptureDevice tbCapture(ViE);
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+
+    {
+        //
+        // ToS
+        //
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+        tbCapture.ConnectTo(tbChannel.videoChannel);
+        const char* remoteIp = "192.168.200.1";
+        int DSCP = 0;
+        bool useSetSockOpt = false;
+
+        webrtc::VideoCodec videoCodec;
+        EXPECT_EQ(0, ViE.codec->GetSendCodec(
+            tbChannel.videoChannel, videoCodec));
+        videoCodec.maxFramerate = 5;
+        EXPECT_EQ(0, ViE.codec->SetSendCodec(
+            tbChannel.videoChannel, videoCodec));
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        unsigned short rtpPort = 9000;
+        EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtpPort + 1, myIpAddress));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, remoteIp, rtpPort, rtpPort + 1, rtpPort));
+
+        // ToS
+        int tos_result = ViE.network->SetSendToS(tbChannel.videoChannel, 2);
+        EXPECT_EQ(0, tos_result);
+        if (tos_result != 0)
+        {
+            ViETest::Log("ViESetSendToS error!.");
+            ViETest::Log("You must be admin to run these tests.");
+            ViETest::Log("On Win7 and late Vista, you need to right click the "
+                         "exe and choose");
+            ViETest::Log("\"Run as administrator\"\n");
+            getchar();
+        }
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        ViETest::Log("Use Wireshark to capture the outgoing video stream and "
+                     "verify ToS settings\n");
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0));
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 2, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 63, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        ViETest::Log(" DSCP set to 0x%x\n", DSCP);
+        AutoTestSleep(1000);
+
+        tbCapture.Disconnect(tbChannel.videoChannel);
+    }
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViENetworkAPITest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+
+    TbInterfaces ViE("ViENetworkAPITest"); // Create VIE
+    {
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecI420);
+
+        //***************************************************************
+        //	Engine ready. Begin testing class
+        //***************************************************************
+
+        //
+        // External transport
+        //
+        TbExternalTransport testTransport(*ViE.network);
+        EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+        EXPECT_NE(0, ViE.network->RegisterSendTransport(
+            tbChannel.videoChannel, testTransport));
+
+        // Create a empty RTP packet.
+        unsigned char packet[3000];
+        memset(packet, 0, sizeof(packet));
+        packet[0] = 0x80; // V=2, P=0, X=0, CC=0
+        packet[1] = 0x7C; // M=0, PT = 124 (I420)
+
+        // Create a empty RTCP app packet.
+        unsigned char rtcpacket[3000];
+        memset(rtcpacket,0, sizeof(rtcpacket));
+        rtcpacket[0] = 0x80; // V=2, P=0, X=0, CC=0
+        rtcpacket[1] = 0xCC; // M=0, PT = 204 (RTCP app)
+        rtcpacket[2] = 0x0;
+        rtcpacket[3] = 0x03; // 3 Octets long.
+
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_NE(0, ViE.network->ReceivedRTCPPacket(
+            tbChannel.videoChannel, rtcpacket, 1500));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 1500));
+        EXPECT_EQ(0, ViE.network->ReceivedRTCPPacket(
+            tbChannel.videoChannel, rtcpacket, 1500));
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 11));
+        EXPECT_NE(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 11));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 3000));
+        EXPECT_EQ(0, ViE.network->ReceivedRTPPacket(
+            tbChannel.videoChannel, packet, 3000));
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));  // Sending
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->DeregisterSendTransport(
+            tbChannel.videoChannel));  // Already deregistered
+
+        //
+        // Local receiver
+        //
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
+
+        unsigned short rtpPort = 0;
+        unsigned short rtcpPort = 0;
+        char ipAddress[64];
+        memset(ipAddress, 0, 64);
+        EXPECT_EQ(0, ViE.network->GetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->GetLocalReceiver(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+
+        //
+        // Send destination
+        //
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1236, 1237, 1234, 1235));
+
+        unsigned short sourceRtpPort = 0;
+        unsigned short sourceRtcpPort = 0;
+        EXPECT_EQ(0, ViE.network->GetSendDestination(
+            tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
+            sourceRtpPort, sourceRtcpPort));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+        // Not allowed while sending
+        EXPECT_NE(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(kViENetworkAlreadySending, ViE.base->LastError());
+
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 1234, 1235, 1234, 1235));
+        EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+        EXPECT_EQ(0, ViE.network->GetSendDestination(
+            tbChannel.videoChannel, ipAddress, rtpPort, rtcpPort,
+            sourceRtpPort, sourceRtcpPort));
+        EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+        //
+        // Address information
+        //
+
+        // GetSourceInfo: Tested in functional test
+        EXPECT_EQ(0, ViE.network->GetLocalIP(ipAddress, false));
+
+        // TODO(unknown): IPv6
+
+        //
+        // Filter
+        //
+        EXPECT_NE(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 1234, 1235, "10.10.10.10"));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 1236, 1237, "127.0.0.1"));
+        EXPECT_EQ(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+        EXPECT_EQ(0, ViE.network->SetSourceFilter(
+            tbChannel.videoChannel, 0, 0, NULL));
+        EXPECT_NE(0, ViE.network->GetSourceFilter(
+            tbChannel.videoChannel, rtpPort, rtcpPort, ipAddress));
+    }
+    {
+        TbVideoChannel tbChannel(ViE);  // Create a video channel
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234));
+
+        int DSCP = 0;
+        bool useSetSockOpt = false;
+        // SetSockOpt should work without a locally bind socket
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        EXPECT_EQ(0, DSCP);
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, -1, true));
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(tbChannel.videoChannel, 64, true));
+
+        // Valid
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 20, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+
+        EXPECT_EQ(20, DSCP);
+        EXPECT_TRUE(useSetSockOpt);
+
+        // Disable
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, true));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(0, DSCP);
+
+        char myIpAddress[64];
+        memset(myIpAddress, 0, 64);
+        // Get local ip to be able to set ToS withtou setSockOpt
+        EXPECT_EQ(0, ViE.network->GetLocalIP(myIpAddress, false));
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234, 1235, myIpAddress));
+
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetSendToS(
+            tbChannel.videoChannel, -1, false));
+        EXPECT_NE(0, ViE.network->SetSendToS(
+            tbChannel.videoChannel, 64, false));  // Invalid input
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));  // No ToS set
+        EXPECT_EQ(0, DSCP);
+        int tos_result = ViE.network->SetSendToS(
+            tbChannel.videoChannel, 20, false);  // Valid
+        EXPECT_EQ(0, tos_result);
+        if (tos_result != 0)
+        {
+            ViETest::Log("ViESetSendToS error!.");
+            ViETest::Log("You must be admin to run these tests.");
+            ViETest::Log("On Win7 and late Vista, you need to right click the "
+                         "exe and choose");
+            ViETest::Log("\"Run as administrator\"\n");
+            getchar();
+        }
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(20, DSCP);
+#ifdef _WIN32
+        EXPECT_FALSE(useSetSockOpt);
+#else // useSetSockOpt is true on Linux and Mac
+        EXPECT_TRUE(useSetSockOpt);
+#endif
+        EXPECT_EQ(0, ViE.network->SetSendToS(tbChannel.videoChannel, 0, false));
+        EXPECT_EQ(0, ViE.network->GetSendToS(
+            tbChannel.videoChannel, DSCP, useSetSockOpt));
+        EXPECT_EQ(0, DSCP);
+    }
+    {
+        // From qos.h. (*) -> supported by ViE
+        //
+        //  #define SERVICETYPE_NOTRAFFIC               0x00000000
+        //  #define SERVICETYPE_BESTEFFORT              0x00000001 (*)
+        //  #define SERVICETYPE_CONTROLLEDLOAD          0x00000002 (*)
+        //  #define SERVICETYPE_GUARANTEED              0x00000003 (*)
+        //  #define SERVICETYPE_NETWORK_UNAVAILABLE     0x00000004
+        //  #define SERVICETYPE_GENERAL_INFORMATION     0x00000005
+        //  #define SERVICETYPE_NOCHANGE                0x00000006
+        //  #define SERVICETYPE_NONCONFORMING           0x00000009
+        //  #define SERVICETYPE_NETWORK_CONTROL         0x0000000A
+        //  #define SERVICETYPE_QUALITATIVE             0x0000000D (*)
+        //
+        //  #define SERVICE_BESTEFFORT                  0x80010000
+        //  #define SERVICE_CONTROLLEDLOAD              0x80020000
+        //  #define SERVICE_GUARANTEED                  0x80040000
+        //  #define SERVICE_QUALITATIVE                 0x80200000
+
+        TbVideoChannel tbChannel(ViE);  // Create a video channel
+
+
+#if defined(_WIN32)
+        // No socket
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+
+        EXPECT_EQ(0, ViE.network->SetLocalReceiver(
+            tbChannel.videoChannel, 1234));
+
+        // Sender not initialized
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+        EXPECT_EQ(0, ViE.network->SetSendDestination(
+            tbChannel.videoChannel, "127.0.0.1", 12345));
+
+        // Try to set all non-supported service types
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NETWORK_UNAVAILABLE));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_GENERAL_INFORMATION));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOCHANGE));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NONCONFORMING));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NOTRAFFIC));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_NETWORK_CONTROL));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_BESTEFFORT));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_CONTROLLEDLOAD));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_GUARANTEED));
+        EXPECT_NE(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICE_QUALITATIVE));
+
+        // Loop through valid service settings
+        bool enabled = false;
+        int serviceType = 0;
+        int overrideDSCP = 0;
+
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_FALSE(enabled);
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_BESTEFFORT));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_BESTEFFORT, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_CONTROLLEDLOAD));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_CONTROLLEDLOAD, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_GUARANTEED));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_GUARANTEED, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, true, SERVICETYPE_QUALITATIVE));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_TRUE(enabled);
+        EXPECT_EQ(SERVICETYPE_QUALITATIVE, serviceType);
+        EXPECT_FALSE(overrideDSCP);
+
+        EXPECT_EQ(0, ViE.network->SetSendGQoS(
+            tbChannel.videoChannel, false, SERVICETYPE_QUALITATIVE));
+        EXPECT_EQ(0, ViE.network->GetSendGQoS(
+            tbChannel.videoChannel, enabled, serviceType, overrideDSCP));
+        EXPECT_FALSE(enabled);
+#endif
+    }
+    {
+        //
+        // MTU and packet burst
+        //
+        // Create a video channel
+        TbVideoChannel tbChannel(ViE);
+        // Invalid input
+        EXPECT_NE(0, ViE.network->SetMTU(tbChannel.videoChannel, 1600));
+        // Valid input
+        EXPECT_EQ(0, ViE.network->SetMTU(tbChannel.videoChannel, 800));
+
+        //
+        // Observer and timeout
+        //
+        ViEAutoTestNetworkObserver vieTestObserver;
+        EXPECT_EQ(0, ViE.network->RegisterObserver(
+            tbChannel.videoChannel, vieTestObserver));
+        EXPECT_NE(0, ViE.network->RegisterObserver(
+            tbChannel.videoChannel, vieTestObserver));
+        EXPECT_EQ(0, ViE.network->SetPeriodicDeadOrAliveStatus(
+            tbChannel.videoChannel, true)); // No observer
+        EXPECT_EQ(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
+
+        EXPECT_NE(0, ViE.network->DeregisterObserver(tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.network->SetPeriodicDeadOrAliveStatus(
+            tbChannel.videoChannel, true)); // No observer
+
+        // Packet timout notification
+        EXPECT_EQ(0, ViE.network->SetPacketTimeoutNotification(
+            tbChannel.videoChannel, true, 10));
+    }
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_record.cc b/src/video_engine/test/auto_test/source/vie_autotest_record.cc
new file mode 100644
index 0000000..9c423a7
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_record.cc
@@ -0,0 +1,594 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_record.cc
+//
+// This code is also used as sample code for ViE 3.0
+//
+
+#include <stdio.h>
+#include <fstream>
+
+#include "common_types.h"
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
+#include "voice_engine/include/voe_base.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/interface/vie_autotest.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_file.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "voice_engine/include/voe_rtp_rtcp.h"
+#include "system_wrappers/interface/tick_util.h"
+
+#define VCM_RED_PAYLOAD_TYPE            96
+#define VCM_ULPFEC_PAYLOAD_TYPE         97
+#define DEFAULT_AUDIO_PORT              11113
+#define DEFAULT_AUDIO_CODEC             "ISAC"
+#define DEFAULT_VIDEO_CODEC_WIDTH       640
+#define DEFAULT_VIDEO_CODEC_HEIGHT      480
+#define DEFAULT_VIDEO_CODEC_START_RATE  1000
+#define DEFAULT_RECORDING_FOLDER        "RECORDING"
+#define DEFAULT_RECORDING_AUDIO         "/audio_debug.aec"
+#define DEFAULT_RECORDING_VIDEO         "/video_debug.yuv"
+#define DEFAULT_RECORDING_AUDIO_RTP     "/audio_rtpdump.rtp"
+#define DEFAULT_RECORDING_VIDEO_RTP     "/video_rtpdump.rtp"
+
+bool GetAudioDevices(webrtc::VoEBase* voe_base,
+                     webrtc::VoEHardware* voe_hardware,
+                     char* recording_device_name,
+                     int& recording_device_index,
+                     char* playbackDeviceName,
+                     int& playback_device_index);
+bool GetAudioCodecRecord(webrtc::VoECodec* voe_codec,
+                         webrtc::CodecInst& audio_codec);
+
+int VideoEngineSampleRecordCode(void* window1, void* window2) {
+  int error = 0;
+  // Audio settings.
+  int audio_tx_port = DEFAULT_AUDIO_PORT;
+  int audio_rx_port = DEFAULT_AUDIO_PORT;
+  webrtc::CodecInst audio_codec;
+  int audio_channel = -1;
+  int audio_capture_device_index = -1;
+  int audio_playback_device_index = -1;
+  const unsigned int KMaxDeviceNameLength = 128;
+  const unsigned int KMaxUniqueIdLength = 256;
+  char deviceName[KMaxDeviceNameLength];
+  char audio_capture_device_name[KMaxUniqueIdLength] = "";
+  char audio_playbackDeviceName[KMaxUniqueIdLength] = "";
+
+  // Network settings.
+  const char* ipAddress = "127.0.0.1";
+  const int rtpPort = 6000;
+
+  //
+  // Create a VideoEngine instance
+  //
+  webrtc::VideoEngine* ptrViE = NULL;
+  ptrViE = webrtc::VideoEngine::Create();
+  if (ptrViE == NULL) {
+    printf("ERROR in VideoEngine::Create\n");
+    return -1;
+  }
+
+  error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+  if (error == -1) {
+    printf("ERROR in VideoEngine::SetTraceLevel\n");
+    return -1;
+  }
+
+  std::string trace_file =
+    ViETest::GetResultOutputPath() + "ViERecordCall_trace.txt";
+  error = ptrViE->SetTraceFile(trace_file.c_str());
+  if (error == -1) {
+    printf("ERROR in VideoEngine::SetTraceFile\n");
+    return -1;
+  }
+
+  //
+  // Create a VoE instance
+  //
+  webrtc::VoiceEngine* voe = webrtc::VoiceEngine::Create();
+  //
+  // Init VideoEngine and create a channel
+  //
+  webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+  if (ptrViEBase == NULL) {
+    printf("ERROR in ViEBase::GetInterface\n");
+    return -1;
+  }
+
+  error = ptrViEBase->Init();
+  if (error == -1) {
+    printf("ERROR in ViEBase::Init\n");
+    return -1;
+  }
+
+  webrtc::VoEBase* voe_base = webrtc::VoEBase::GetInterface(voe);
+  if (voe_base == NULL) {
+    printf("ERROR in VoEBase::GetInterface\n");
+    return -1;
+  }
+  error = voe_base->Init();
+  if (error == -1) {
+    printf("ERROR in VoEBase::Init\n");
+    return -1;
+  }
+
+  int videoChannel = -1;
+  error = ptrViEBase->CreateChannel(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViEBase::CreateChannel\n");
+    return -1;
+  }
+
+  webrtc::VoEHardware* voe_hardware =
+    webrtc::VoEHardware::GetInterface(voe);
+  webrtc::VoECodec* voe_codec = webrtc::VoECodec::GetInterface(voe);
+  webrtc::VoEAudioProcessing* voe_apm =
+       webrtc::VoEAudioProcessing::GetInterface(voe);
+
+  // Get the audio device for the call.
+  memset(audio_capture_device_name, 0, KMaxUniqueIdLength);
+  memset(audio_playbackDeviceName, 0, KMaxUniqueIdLength);
+  GetAudioDevices(voe_base, voe_hardware, audio_capture_device_name,
+                  audio_capture_device_index, audio_playbackDeviceName,
+                  audio_playback_device_index);
+
+
+  // Get the audio codec for the call.
+  memset(static_cast<void*>(&audio_codec), 0, sizeof(audio_codec));
+  GetAudioCodecRecord(voe_codec, audio_codec);
+
+  audio_channel = voe_base->CreateChannel();
+  error = voe_base->SetSendDestination(audio_channel, audio_tx_port,
+                                        ipAddress);
+  error = voe_base->SetLocalReceiver(audio_channel, audio_rx_port);
+  error = voe_hardware->SetRecordingDevice(audio_capture_device_index);
+  error = voe_hardware->SetPlayoutDevice(audio_playback_device_index);
+  error = voe_codec->SetSendCodec(audio_channel, audio_codec);
+  error = voe_apm->SetAgcStatus(true, webrtc::kAgcDefault);
+  error = voe_apm->SetNsStatus(true, webrtc::kNsHighSuppression);
+
+  //
+  // List available capture devices, allocate and connect.
+  //
+  webrtc::ViECapture* ptrViECapture =
+      webrtc::ViECapture::GetInterface(ptrViE);
+  if (ptrViECapture == NULL) {
+    printf("ERROR in ViECapture::GetInterface\n");
+    return -1;
+  }
+
+  webrtc::VoERTP_RTCP* ptrVoERtpRtcp =
+    webrtc::VoERTP_RTCP::GetInterface(voe);
+  if (ptrVoERtpRtcp == NULL) {
+    printf("ERROR in VoERTP_RTCP::GetInterface\n");
+    return -1;
+  }
+
+  memset(deviceName, 0, KMaxDeviceNameLength);
+  char uniqueId[KMaxUniqueIdLength];
+  memset(uniqueId, 0, KMaxUniqueIdLength);
+
+  printf("Available capture devices:\n");
+  int captureIdx = 0;
+  for (captureIdx = 0;
+       captureIdx < ptrViECapture->NumberOfCaptureDevices();
+       captureIdx++) {
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength);
+    if (error == -1) {
+      printf("ERROR in ViECapture::GetCaptureDevice\n");
+      return -1;
+    }
+    printf("\t %d. %s\n", captureIdx + 1, deviceName);
+  }
+  printf("\nChoose capture device: ");
+#ifdef WEBRTC_ANDROID
+  captureIdx = 0;
+  printf("0\n");
+#else
+  if (scanf("%d", &captureIdx) != 1) {
+    printf("Error in scanf()\n");
+    return -1;
+  }
+  getchar();
+  captureIdx = captureIdx - 1;  // Compensate for idx start at 1.
+#endif
+  error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                          KMaxDeviceNameLength, uniqueId,
+                                          KMaxUniqueIdLength);
+  if (error == -1) {
+    printf("ERROR in ViECapture::GetCaptureDevice\n");
+    return -1;
+  }
+
+  int captureId = 0;
+  error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
+                                               captureId);
+  if (error == -1) {
+    printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+    return -1;
+  }
+
+  error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+    return -1;
+  }
+
+  error = ptrViECapture->StartCapture(captureId);
+  if (error == -1) {
+    printf("ERROR in ViECapture::StartCapture\n");
+    return -1;
+  }
+
+  //
+  // RTP/RTCP settings
+  //
+  webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+      webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+  if (ptrViERtpRtcp == NULL) {
+    printf("ERROR in ViERTP_RTCP::GetInterface\n");
+    return -1;
+  }
+
+  error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                       webrtc::kRtcpCompound_RFC4585);
+  if (error == -1) {
+    printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+    return -1;
+  }
+
+  error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
+      videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+  if (error == -1) {
+    printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+    return -1;
+  }
+
+  error = ptrViERtpRtcp->SetRembStatus(videoChannel, true, true);
+  if (error == -1) {
+    printf("ERROR in ViERTP_RTCP::SetTMMBRStatus\n");
+    return -1;
+  }
+
+  //
+  // Set up rendering
+  //
+  webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+  if (ptrViERender == NULL) {
+    printf("ERROR in ViERender::GetInterface\n");
+    return -1;
+  }
+
+  error = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
+  if (error == -1) {
+    printf("ERROR in ViERender::AddRenderer\n");
+    return -1;
+  }
+
+  error = ptrViERender->StartRender(captureId);
+  if (error == -1) {
+    printf("ERROR in ViERender::StartRender\n");
+    return -1;
+  }
+
+  error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
+                                    1.0);
+  if (error == -1) {
+    printf("ERROR in ViERender::AddRenderer\n");
+    return -1;
+  }
+
+  error = ptrViERender->StartRender(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViERender::StartRender\n");
+    return -1;
+  }
+
+  //
+  // Setup codecs
+  //
+  webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+  if (ptrViECodec == NULL) {
+    printf("ERROR in ViECodec::GetInterface\n");
+    return -1;
+  }
+
+  webrtc::VideoCodec videoCodec;
+  memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+  int codecIdx = 0;
+
+#ifdef WEBRTC_ANDROID
+  codecIdx = 0;
+  printf("0\n");
+#else
+  codecIdx = 0;  // Compensate for idx start at 1.
+#endif
+
+  error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+  if (error == -1) {
+     printf("ERROR in ViECodec::GetCodec\n");
+     return -1;
+  }
+
+  // Set spatial resolution option
+  videoCodec.width = DEFAULT_VIDEO_CODEC_WIDTH;
+  videoCodec.height = DEFAULT_VIDEO_CODEC_HEIGHT;
+
+  // Set start bit rate
+  videoCodec.startBitrate = DEFAULT_VIDEO_CODEC_START_RATE;
+
+  error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+  if (error == -1) {
+    printf("ERROR in ViECodec::SetSendCodec\n");
+    return -1;
+  }
+
+  //
+  // Address settings
+  //
+  webrtc::ViENetwork* ptrViENetwork =
+      webrtc::ViENetwork::GetInterface(ptrViE);
+  if (ptrViENetwork == NULL) {
+    printf("ERROR in ViENetwork::GetInterface\n");
+    return -1;
+  }
+
+  // Setting External transport
+  TbExternalTransport extTransport(*(ptrViENetwork));
+  error = ptrViENetwork->SetLocalReceiver(videoChannel, rtpPort);
+  if (error == -1) {
+    printf("ERROR in ViENetwork::SetLocalReceiver\n");
+    return -1;
+  }
+  error = ptrViENetwork->SetSendDestination(videoChannel,
+                                            ipAddress, rtpPort);
+  if (error == -1) {
+    printf("ERROR in ViENetwork::SetSendDestination\n");
+    return -1;
+  }
+
+  std::string str;
+  int enable_labeling = 0;
+  std::cout << std::endl;
+  std::cout << "Do you want to label this recording?" << std::endl;
+  std::cout << "0. No (default)." << std::endl;
+  std::cout << "1. This call will be labeled on the fly." << std::endl;
+  std::getline(std::cin, str);
+  enable_labeling = atoi(str.c_str());
+
+  uint32_t folder_time = static_cast<uint32_t>
+    (webrtc::TickTime::MillisecondTimestamp());
+  std::stringstream folder_time_str;
+  folder_time_str <<  folder_time;
+  const std::string folder_name = "recording" + folder_time_str.str();
+  printf("recording name = %s\n", folder_name.c_str());
+  // TODO(mikhal): use file_utils.
+#ifdef WIN32
+  _mkdir(folder_name.c_str());
+#else
+  mkdir(folder_name.c_str(), 0777);
+#endif
+  const std::string audio_filename =  folder_name + DEFAULT_RECORDING_AUDIO;
+  const std::string video_filename =  folder_name + DEFAULT_RECORDING_VIDEO;
+  const std::string audio_rtp_filename = folder_name +
+    DEFAULT_RECORDING_AUDIO_RTP;
+  const std::string video_rtp_filename = folder_name +
+    DEFAULT_RECORDING_VIDEO_RTP;
+  std::fstream timing;
+  if (enable_labeling == 1) {
+    std::cout << "Press enter to stamp current time."<< std::endl;
+    std::string timing_file = folder_name + "/labeling.txt";
+    timing.open(timing_file.c_str(), std::fstream::out | std::fstream::app);
+  }
+  printf("\nPress enter to start recording\n");
+  std::getline(std::cin, str);
+  printf("\nRecording started\n\n");
+
+  error = ptrViEBase->StartReceive(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViENetwork::StartReceive\n");
+    return -1;
+  }
+
+  error = ptrViEBase->StartSend(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViENetwork::StartSend\n");
+    return -1;
+  }
+  error = voe_base->StartSend(audio_channel);
+  if (error == -1) {
+    printf("ERROR in VoENetwork::StartSend\n");
+    return -1;
+  }
+
+  // Get file interface (video recording)
+  webrtc::ViEFile* vie_file = webrtc::ViEFile::GetInterface(ptrViE);
+  //  Engine started
+
+  voe_apm->StartDebugRecording(audio_filename.c_str());
+  vie_file->StartDebugRecording(videoChannel, video_filename.c_str());
+  ptrViERtpRtcp->StartRTPDump(videoChannel,
+                              video_rtp_filename.c_str(), webrtc::kRtpOutgoing);
+  ptrVoERtpRtcp->StartRTPDump(audio_channel,
+                              audio_rtp_filename.c_str(), webrtc::kRtpOutgoing);
+  printf("Press s + enter to stop...");
+  int64_t clock_time;
+  if (enable_labeling == 1) {
+    clock_time = webrtc::TickTime::MillisecondTimestamp();
+    timing << clock_time << std::endl;
+  }
+  char c = getchar();
+  fflush(stdin);
+  while (c != 's') {
+    if (c == '\n' && enable_labeling == 1) {
+      clock_time = webrtc::TickTime::MillisecondTimestamp();
+      timing << clock_time << std::endl;
+    }
+    c = getchar();
+  }
+  if (enable_labeling == 1) {
+    clock_time = webrtc::TickTime::MillisecondTimestamp();
+    timing << clock_time << std::endl;
+  }
+
+  ptrViERtpRtcp->StopRTPDump(videoChannel, webrtc::kRtpOutgoing);
+  ptrVoERtpRtcp->StopRTPDump(audio_channel, webrtc::kRtpOutgoing);
+  voe_apm->StopDebugRecording();
+  vie_file->StopDebugRecording(videoChannel);
+  if (enable_labeling == 1)
+    timing.close();
+
+  //  Recording finished. Tear down Video Engine.
+
+  error = ptrViEBase->StopReceive(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViEBase::StopReceive\n");
+    return -1;
+  }
+
+  error = ptrViEBase->StopSend(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViEBase::StopSend\n");
+    return -1;
+  }
+  error = voe_base->StopSend(audio_channel);
+
+  error = ptrViERender->StopRender(captureId);
+  if (error == -1) {
+    printf("ERROR in ViERender::StopRender\n");
+    return -1;
+  }
+
+  error = ptrViERender->RemoveRenderer(captureId);
+  if (error == -1) {
+    printf("ERROR in ViERender::RemoveRenderer\n");
+    return -1;
+  }
+
+  error = ptrViERender->StopRender(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViERender::StopRender\n");
+    return -1;
+  }
+
+  error = ptrViERender->RemoveRenderer(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViERender::RemoveRenderer\n");
+    return -1;
+  }
+
+  error = ptrViECapture->StopCapture(captureId);
+  if (error == -1) {
+    printf("ERROR in ViECapture::StopCapture\n");
+    return -1;
+  }
+
+  error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+    return -1;
+  }
+
+  error = ptrViECapture->ReleaseCaptureDevice(captureId);
+  if (error == -1) {
+    printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+    return -1;
+  }
+
+  error = ptrViEBase->DeleteChannel(videoChannel);
+  if (error == -1) {
+    printf("ERROR in ViEBase::DeleteChannel\n");
+    return -1;
+  }
+
+  int remainingInterfaces = 0;
+  remainingInterfaces = ptrViECodec->Release();
+  remainingInterfaces += ptrViECapture->Release();
+  remainingInterfaces += ptrViERtpRtcp->Release();
+  remainingInterfaces += ptrViERender->Release();
+  remainingInterfaces += ptrViENetwork->Release();
+  remainingInterfaces += ptrViEBase->Release();
+  if (remainingInterfaces > 0) {
+    printf("ERROR: Could not release all interfaces\n");
+    return -1;
+  }
+
+  bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+  if (deleted == false) {
+    printf("ERROR in VideoEngine::Delete\n");
+    return -1;
+  }
+  return 0;
+}
+
+
+// TODO(mikhal): Place above functionality under this class.
+int ViEAutoTest::ViERecordCall() {
+  ViETest::Log(" ");
+  ViETest::Log("========================================");
+  ViETest::Log(" ViE Record Call\n");
+
+  if (VideoEngineSampleRecordCode(_window1, _window2) == 0) {
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Record Call Done");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return 0;
+  }
+
+  ViETest::Log(" ");
+  ViETest::Log(" ViE Autotest Record Call Failed");
+  ViETest::Log("========================================");
+  ViETest::Log(" ");
+  return 1;
+}
+
+bool GetAudioCodecRecord(webrtc::VoECodec* voe_codec,
+                       webrtc::CodecInst& audio_codec) {
+  int error = 0;
+  int number_of_errors = 0;
+  memset(&audio_codec, 0, sizeof(webrtc::CodecInst));
+
+  while (1) {
+    int codec_idx = 0;
+    int default_codec_idx = 0;
+    for (codec_idx = 0; codec_idx < voe_codec->NumOfCodecs(); codec_idx++) {
+      error = voe_codec->GetCodec(codec_idx, audio_codec);
+      number_of_errors += ViETest::TestError(error == 0,
+                                             "ERROR: %s at line %d",
+                                             __FUNCTION__, __LINE__);
+
+      // Test for default codec index.
+      if (strcmp(audio_codec.plname, DEFAULT_AUDIO_CODEC) == 0) {
+        default_codec_idx = codec_idx;
+      }
+    }
+    error = voe_codec->GetCodec(default_codec_idx, audio_codec);
+    number_of_errors += ViETest::TestError(error == 0,
+                                           "ERROR: %s at line %d",
+                                           __FUNCTION__, __LINE__);
+    return true;
+  }
+  assert(false);
+  return false;
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_render.cc b/src/video_engine/test/auto_test/source/vie_autotest_render.cc
new file mode 100644
index 0000000..0c8ca22
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_render.cc
@@ -0,0 +1,295 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_render.cc
+//
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "engine_configurations.h"
+
+#include "video_render.h"
+
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "tb_capture_device.h"
+
+#if defined(WIN32)
+#include <windows.h>
+#include <ddraw.h>
+#include <tchar.h>
+#elif defined(WEBRTC_LINUX)
+    //From windgi.h
+    #undef RGB
+    #define RGB(r,g,b)          ((unsigned long)(((unsigned char)(r)|((unsigned short)((unsigned char)(g))<<8))|(((unsigned long)(unsigned char)(b))<<16)))
+    //From ddraw.h
+/*    typedef struct _DDCOLORKEY
+ {
+ DWORD       dwColorSpaceLowValue;   // low boundary of color space that is to
+ DWORD       dwColorSpaceHighValue;  // high boundary of color space that is
+ } DDCOLORKEY;*/
+#elif defined(WEBRTC_MAC)
+#endif
+
+class ViEAutoTestExternalRenderer: public webrtc::ExternalRenderer
+{
+public:
+    ViEAutoTestExternalRenderer() :
+        _width(0),
+        _height(0)
+    {
+    }
+    virtual int FrameSizeChange(unsigned int width, unsigned int height,
+                                unsigned int numberOfStreams)
+    {
+        _width = width;
+        _height = height;
+        return 0;
+    }
+
+    virtual int DeliverFrame(unsigned char* buffer, int bufferSize,
+                             uint32_t time_stamp,
+                             int64_t render_time)
+    {
+        if (bufferSize != _width * _height * 3 / 2)
+        {
+            ViETest::Log("incorrect render buffer received, of length = %d\n",
+                         bufferSize);
+            return 0;
+        }
+        return 0;
+    }
+
+public:
+    virtual ~ViEAutoTestExternalRenderer()
+    {
+    }
+private:
+    int _width, _height;
+};
+
+void ViEAutoTest::ViERenderStandardTest()
+{
+    //***************************************************************
+    //	Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    int rtpPort = 6000;
+
+    TbInterfaces ViE("ViERenderStandardTest");
+
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE); // Create a capture device
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    ViETest::Log("\nCapture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    // PIP and full screen rendering is not supported on Android
+#ifndef WEBRTC_ANDROID
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window2, 0, 0.75, 0.75, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+
+    ViETest::Log("\nCapture device is now rendered in Window 2, PiP.");
+    ViETest::Log("Switching to full screen rendering in %d seconds.\n",
+                 KAutoTestSleepTimeMs / 1000);
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
+
+    // Destroy render module and create new in full screen mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4563, _window1, true, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.75f, 0.75f, 1.0f, 1.0f));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window1, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy full screen render module and create new in normal mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4561, _window1, false, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+#endif
+
+    //***************************************************************
+    //	Engine ready. Begin testing class
+    //***************************************************************
+
+
+    //***************************************************************
+    //	Testing finished. Tear down Video Engine
+    //***************************************************************
+    tbCapture.Disconnect(tbChannel.videoChannel);
+}
+
+void ViEAutoTest::ViERenderExtendedTest()
+{
+    int rtpPort = 6000;
+
+    TbInterfaces ViE("ViERenderExtendedTest");
+
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    TbCaptureDevice tbCapture(ViE); // Create a capture device
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+    tbChannel.StartReceive(rtpPort);
+    tbChannel.StartSend(rtpPort);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm2));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbChannel.videoChannel, _window2, 1, 0.0, 0.0, 1.0, 1.0));
+    EXPECT_EQ(0, ViE.render->StartRender(tbChannel.videoChannel));
+
+    ViETest::Log("\nCapture device is renderered in Window 1");
+    ViETest::Log("Remote stream is renderered in Window 2");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+#ifdef _WIN32
+    ViETest::Log("\nConfiguring Window2");
+    ViETest::Log("you will see video only in first quadrant");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.0f, 0.0f, 0.5f, 0.5f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("you will see video only in fourth quadrant");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.5f, 0.5f, 1.0f, 1.0f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("normal video on Window2");
+    EXPECT_EQ(0, ViE.render->ConfigureRender(
+        tbChannel.videoChannel, 0, 0.0f, 0.0f, 1.0f, 1.0f));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+#endif
+
+    ViETest::Log("Mirroring Local Preview (Window1) Left-Right");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, true, false, true));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nMirroring Local Preview (Window1) Left-Right and Up-Down");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, true, true, true));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nMirroring Remote Window(Window2) Up-Down");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbChannel.videoChannel, true, true, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("Disabling Mirroing on Window1 and Window2");
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbCapture.captureId, false, false, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+    EXPECT_EQ(0, ViE.render->MirrorRenderStream(
+        tbChannel.videoChannel, false, false, false));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nEnabling Full Screen render in 5 sec");
+
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm2));
+
+    // Destroy render module and create new in full screen mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4563, _window1, true, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, _window1, 0, 0.0f, 0.0f, 1.0f, 1.0f));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    ViETest::Log("\nStop renderer");
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    ViETest::Log("\nRemove renderer");
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy full screen render module and create new for external rendering
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(4564, NULL, false,
+                                                   _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+
+    EXPECT_EQ(0, ViE.render->RegisterVideoRenderModule(*_vrm1));
+
+    ViETest::Log("\nExternal Render Test");
+    ViEAutoTestExternalRenderer externalRenderObj;
+    EXPECT_EQ(0, ViE.render->AddRenderer(
+        tbCapture.captureId, webrtc::kVideoI420, &externalRenderObj));
+    EXPECT_EQ(0, ViE.render->StartRender(tbCapture.captureId));
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.render->StopRender(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->RemoveRenderer(tbCapture.captureId));
+    EXPECT_EQ(0, ViE.render->DeRegisterVideoRenderModule(*_vrm1));
+
+    // Destroy render module for external rendering and create new in normal
+    // mode
+    webrtc::VideoRender::DestroyVideoRender(_vrm1);
+    _vrm1 = NULL;
+    _vrm1 = webrtc::VideoRender::CreateVideoRender(
+        4561, _window1, false, _renderType);
+    EXPECT_TRUE(_vrm1 != NULL);
+    tbCapture.Disconnect(tbChannel.videoChannel);
+}
+
+void ViEAutoTest::ViERenderAPITest()
+{
+    // TODO(unknown): add the real tests cases
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc b/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
new file mode 100644
index 0000000..573a58a
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_rtp_rtcp.cc
@@ -0,0 +1,654 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_rtp_rtcp.cc
+//
+#include <iostream>
+
+#include "engine_configurations.h"
+#include "tb_capture_device.h"
+#include "tb_external_transport.h"
+#include "tb_interfaces.h"
+#include "tb_video_channel.h"
+#include "testsupport/fileutils.h"
+#include "vie_autotest.h"
+#include "vie_autotest_defines.h"
+
+class ViERtpObserver: public webrtc::ViERTPObserver
+{
+public:
+    ViERtpObserver()
+    {
+    }
+    virtual ~ViERtpObserver()
+    {
+    }
+
+    virtual void IncomingSSRCChanged(const int videoChannel,
+                                     const unsigned int SSRC)
+    {
+    }
+    virtual void IncomingCSRCChanged(const int videoChannel,
+                                     const unsigned int CSRC, const bool added)
+    {
+    }
+};
+
+class ViERtcpObserver: public webrtc::ViERTCPObserver
+{
+public:
+    int _channel;
+    unsigned char _subType;
+    unsigned int _name;
+    char* _data;
+    unsigned short _dataLength;
+
+    ViERtcpObserver() :
+        _channel(-1),
+        _subType(0),
+        _name(-1),
+        _data(NULL),
+        _dataLength(0)
+    {
+    }
+    ~ViERtcpObserver()
+    {
+        if (_data)
+        {
+            delete[] _data;
+        }
+    }
+    virtual void OnApplicationDataReceived(
+        const int videoChannel, const unsigned char subType,
+        const unsigned int name, const char* data,
+        const unsigned short dataLengthInBytes)
+    {
+        _channel = videoChannel;
+        _subType = subType;
+        _name = name;
+        if (dataLengthInBytes > _dataLength)
+        {
+            delete[] _data;
+            _data = NULL;
+        }
+        if (_data == NULL)
+        {
+            _data = new char[dataLengthInBytes];
+        }
+        memcpy(_data, data, dataLengthInBytes);
+        _dataLength = dataLengthInBytes;
+    }
+};
+
+void ViEAutoTest::ViERtpRtcpStandardTest()
+{
+    // ***************************************************************
+    // Begin create/initialize WebRTC Video Engine for testing
+    // ***************************************************************
+
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpStandardTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    ViETest::Log("\n");
+    TbExternalTransport myTransport(*(ViE.network));
+
+    EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+        tbChannel.videoChannel, myTransport));
+
+    // ***************************************************************
+    // Engine ready. Begin testing class
+    // ***************************************************************
+    unsigned short startSequenceNumber = 12345;
+    ViETest::Log("Set start sequence number: %u", startSequenceNumber);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, startSequenceNumber));
+
+    myTransport.EnableSequenceNumberCheck();
+
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(2000);
+
+    unsigned short receivedSequenceNumber =
+        myTransport.GetFirstSequenceNumber();
+    ViETest::Log("First received sequence number: %u\n",
+                 receivedSequenceNumber);
+    EXPECT_EQ(startSequenceNumber, receivedSequenceNumber);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    //
+    // RTCP CName
+    //
+    ViETest::Log("Testing CName\n");
+    const char* sendCName = "ViEAutoTestCName\0";
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(tbChannel.videoChannel, sendCName));
+
+    char returnCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
+    memset(returnCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
+        tbChannel.videoChannel, returnCName));
+    EXPECT_STRCASEEQ(sendCName, returnCName);
+
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(1000);
+
+    char remoteCName[webrtc::ViERTP_RTCP::KMaxRTCPCNameLength];
+    memset(remoteCName, 0, webrtc::ViERTP_RTCP::KMaxRTCPCNameLength);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteRTCPCName(
+        tbChannel.videoChannel, remoteCName));
+    EXPECT_STRCASEEQ(sendCName, remoteCName);
+
+    //
+    //  Statistics
+    //
+    // Stop and restart to clear stats
+    ViETest::Log("Testing statistics\n");
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    myTransport.ClearStats();
+    int rate = 20;
+    myTransport.SetPacketLoss(rate);
+
+    // Start send to verify sending stats
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, startSequenceNumber));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    unsigned short sentFractionsLost = 0;
+    unsigned int sentCumulativeLost = 0;
+    unsigned int sentExtendedMax = 0;
+    unsigned int sentJitter = 0;
+    int sentRttMs = 0;
+    unsigned short recFractionsLost = 0;
+    unsigned int recCumulativeLost = 0;
+    unsigned int recExtendedMax = 0;
+    unsigned int recJitter = 0;
+    int recRttMs = 0;
+
+    unsigned int sentTotalBitrate = 0;
+    unsigned int sentVideoBitrate = 0;
+    unsigned int sentFecBitrate = 0;
+    unsigned int sentNackBitrate = 0;
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+        sentFecBitrate, sentNackBitrate));
+
+    EXPECT_GT(sentTotalBitrate, 0u);
+    EXPECT_EQ(sentFecBitrate, 0u);
+    EXPECT_EQ(sentNackBitrate, 0u);
+
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+
+    AutoTestSleep(2000);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetSentRTCPStatistics(
+        tbChannel.videoChannel, sentFractionsLost, sentCumulativeLost,
+        sentExtendedMax, sentJitter, sentRttMs));
+    EXPECT_GT(sentCumulativeLost, 0u);
+    EXPECT_GT(sentExtendedMax, startSequenceNumber);
+    EXPECT_GT(sentJitter, 0u);
+    EXPECT_GT(sentRttMs, 0);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetReceivedRTCPStatistics(
+        tbChannel.videoChannel, recFractionsLost, recCumulativeLost,
+        recExtendedMax, recJitter, recRttMs));
+
+    EXPECT_GT(recCumulativeLost, 0u);
+    EXPECT_GT(recExtendedMax, startSequenceNumber);
+    EXPECT_GT(recJitter, 0u);
+    EXPECT_GT(recRttMs, 0);
+
+    unsigned int estimated_bandwidth = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedSendBandwidth(
+        tbChannel.videoChannel,
+        &estimated_bandwidth));
+    EXPECT_GT(estimated_bandwidth, 0u);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetEstimatedReceiveBandwidth(
+        tbChannel.videoChannel,
+        &estimated_bandwidth));
+    EXPECT_GT(estimated_bandwidth, 0u);
+
+    // Check that rec stats extended max is greater than what we've sent.
+    EXPECT_GE(recExtendedMax, sentExtendedMax);
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    //
+    // Test bandwidth statistics with NACK and FEC separately
+    //
+
+    myTransport.ClearStats();
+    myTransport.SetPacketLoss(rate);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
+        tbChannel.videoChannel, true, 96, 97));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+         sentFecBitrate, sentNackBitrate));
+
+    EXPECT_GT(sentTotalBitrate, 0u);
+    EXPECT_GE(sentFecBitrate, 10u);
+    EXPECT_EQ(sentNackBitrate, 0u);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetFECStatus(
+        tbChannel.videoChannel, false, 96, 97));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetBandwidthUsage(
+        tbChannel.videoChannel, sentTotalBitrate, sentVideoBitrate,
+        sentFecBitrate, sentNackBitrate));
+
+    // TODO(holmer): Write a non-flaky verification of this API.
+    // numberOfErrors += ViETest::TestError(sentTotalBitrate > 0 &&
+    //                                      sentFecBitrate == 0 &&
+    //                                      sentNackBitrate > 0,
+    //                                      "ERROR: %s at line %d",
+    //                                      __FUNCTION__, __LINE__);
+
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, false));
+
+
+    // Test to set SSRC
+    myTransport.SetPacketLoss(0);
+    myTransport.ClearStats();
+
+    unsigned int setSSRC = 0x01234567;
+    ViETest::Log("Set SSRC %u", setSSRC);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(tbChannel.videoChannel, setSSRC));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    myTransport.EnableSSRCCheck();
+
+    AutoTestSleep(2000);
+    unsigned int receivedSSRC = myTransport.ReceivedSSRC();
+    ViETest::Log("Received SSRC %u\n", receivedSSRC);
+    EXPECT_EQ(setSSRC, receivedSSRC);
+
+    unsigned int localSSRC = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, localSSRC));
+    EXPECT_EQ(setSSRC, localSSRC);
+
+    unsigned int remoteSSRC = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRemoteSSRC(
+        tbChannel.videoChannel, remoteSSRC));
+    EXPECT_EQ(setSSRC, remoteSSRC);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    ViETest::Log("Testing RTP dump...\n");
+
+    std::string inDumpName =
+        ViETest::GetResultOutputPath() + "IncomingRTPDump.rtp";
+    std::string outDumpName =
+        ViETest::GetResultOutputPath() + "OutgoingRTPDump.rtp";
+    EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+        tbChannel.videoChannel, inDumpName.c_str(), webrtc::kRtpIncoming));
+    EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+        tbChannel.videoChannel, outDumpName.c_str(), webrtc::kRtpOutgoing));
+
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    AutoTestSleep(1000);
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+        tbChannel.videoChannel, webrtc::kRtpIncoming));
+    EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+        tbChannel.videoChannel, webrtc::kRtpOutgoing));
+
+    // Make sure data was actually saved to the file and we stored the same
+    // amount of data in both files
+    FILE* inDump = fopen(inDumpName.c_str(), "r");
+    fseek(inDump, 0L, SEEK_END);
+    long inEndPos = ftell(inDump);
+    fclose(inDump);
+    FILE* outDump = fopen(outDumpName.c_str(), "r");
+    fseek(outDump, 0L, SEEK_END);
+    // long outEndPos = ftell(outDump);
+    fclose(outDump);
+
+    EXPECT_GT(inEndPos, 0);
+
+    // TODO(phoglund): This is flaky for some reason. Are the sleeps too
+    // short above?
+    // EXPECT_LT(inEndPos, outEndPos + 100);
+
+    // Deregister external transport
+    EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));
+
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+}
+
+void ViEAutoTest::ViERtpRtcpExtendedTest()
+{
+    //***************************************************************
+    //  Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpExtendedTest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    //tbChannel.StartReceive(rtpPort);
+    //tbChannel.StartSend(rtpPort);
+    TbExternalTransport myTransport(*(ViE.network));
+
+    EXPECT_EQ(0, ViE.network->RegisterSendTransport(
+        tbChannel.videoChannel, myTransport));
+    EXPECT_EQ(0, ViE.base->StartReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StartSend(tbChannel.videoChannel));
+
+    //***************************************************************
+    //  Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // Application specific RTCP
+    //
+    //
+
+    ViERtcpObserver rtcpObserver;
+    EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+        tbChannel.videoChannel, rtcpObserver));
+
+    unsigned char subType = 3;
+    unsigned int name = static_cast<unsigned int> (0x41424344); // 'ABCD';
+    const char* data = "ViEAutoTest Data of length 32 -\0";
+    const unsigned short numBytes = 32;
+
+    EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+        tbChannel.videoChannel, subType, name, data, numBytes));
+
+    ViETest::Log("Sending RTCP application data...\n");
+    AutoTestSleep(KAutoTestSleepTimeMs);
+
+    EXPECT_EQ(subType, rtcpObserver._subType);
+    EXPECT_STRCASEEQ(data, rtcpObserver._data);
+    EXPECT_EQ(name, rtcpObserver._name);
+    EXPECT_EQ(numBytes, rtcpObserver._dataLength);
+
+    ViETest::Log("\t RTCP application data received\n");
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+    EXPECT_EQ(0, ViE.base->StopReceive(tbChannel.videoChannel));
+    EXPECT_EQ(0, ViE.base->StopSend(tbChannel.videoChannel));
+
+    EXPECT_EQ(0, ViE.network->DeregisterSendTransport(tbChannel.videoChannel));
+}
+
+void ViEAutoTest::ViERtpRtcpAPITest()
+{
+    //***************************************************************
+    //  Begin create/initialize WebRTC Video Engine for testing
+    //***************************************************************
+    // Create VIE
+    TbInterfaces ViE("ViERtpRtcpAPITest");
+    // Create a video channel
+    TbVideoChannel tbChannel(ViE, webrtc::kVideoCodecVP8);
+    // Create a capture device
+    TbCaptureDevice tbCapture(ViE);
+    tbCapture.ConnectTo(tbChannel.videoChannel);
+
+    //***************************************************************
+    //  Engine ready. Begin testing class
+    //***************************************************************
+
+    //
+    // Check different RTCP modes
+    //
+    webrtc::ViERTCPMode rtcpMode = webrtc::kRtcpNone;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpCompound_RFC4585, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpNonCompound_RFC5506));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpNonCompound_RFC5506, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpNone));
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+        tbChannel.videoChannel, rtcpMode));
+    EXPECT_EQ(webrtc::kRtcpNone, rtcpMode);
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+        tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+
+    //
+    // CName is testedn in SimpleTest
+    // Start sequence number is tested in SimplTEst
+    //
+    const char* testCName = "ViEAutotestCName";
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPCName(
+        tbChannel.videoChannel, testCName));
+
+    char returnCName[256];
+    memset(returnCName, 0, 256);
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPCName(
+        tbChannel.videoChannel, returnCName));
+    EXPECT_STRCASEEQ(testCName, returnCName);
+
+    //
+    // SSRC
+    //
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
+        tbChannel.videoChannel, 0x01234567));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetLocalSSRC(
+        tbChannel.videoChannel, 0x76543210));
+
+    unsigned int ssrc = 0;
+    EXPECT_EQ(0, ViE.rtp_rtcp->GetLocalSSRC(tbChannel.videoChannel, ssrc));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 1000));
+    tbChannel.StartSend();
+    EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    tbChannel.StopSend();
+
+    //
+    // Start sequence number
+    //
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 1000));
+    tbChannel.StartSend();
+    EXPECT_NE(0, ViE.rtp_rtcp->SetStartSequenceNumber(
+        tbChannel.videoChannel, 12345));
+    tbChannel.StopSend();
+
+    //
+    // Application specific RTCP
+    //
+    {
+        unsigned char subType = 3;
+        unsigned int name = static_cast<unsigned int> (0x41424344); // 'ABCD';
+        const char* data = "ViEAutoTest Data of length 32 --";
+        const unsigned short numBytes = 32;
+
+        tbChannel.StartSend();
+        EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, NULL, numBytes)) <<
+                "Should fail on NULL input.";
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes - 1)) <<
+                "Should fail on incorrect length.";
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->GetRTCPStatus(
+            tbChannel.videoChannel, rtcpMode));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetRTCPStatus(
+            tbChannel.videoChannel, webrtc::kRtcpCompound_RFC4585));
+        tbChannel.StopSend();
+        EXPECT_NE(0, ViE.rtp_rtcp->SendApplicationDefinedRTCPPacket(
+            tbChannel.videoChannel, subType, name, data, numBytes));
+    }
+
+    //
+    // Statistics
+    //
+    // Tested in SimpleTest(), we'll get errors if we haven't received a RTCP
+    // packet.
+
+    //
+    // RTP Dump
+    //
+    {
+        std::string output_file = webrtc::test::OutputPath() +
+            "DumpFileName.rtp";
+        const char* dumpName = output_file.c_str();
+
+        EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, webrtc::kRtpIncoming));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpIncoming));
+        EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpIncoming));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, webrtc::kRtpOutgoing));
+        EXPECT_EQ(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpOutgoing));
+        EXPECT_NE(0, ViE.rtp_rtcp->StopRTPDump(
+            tbChannel.videoChannel, webrtc::kRtpOutgoing));
+        EXPECT_NE(0, ViE.rtp_rtcp->StartRTPDump(
+            tbChannel.videoChannel, dumpName, (webrtc::RTPDirections) 3));
+    }
+    //
+    // RTP/RTCP Observers
+    //
+    {
+        ViERtpObserver rtpObserver;
+        EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTPObserver(
+            tbChannel.videoChannel, rtpObserver));
+        EXPECT_NE(0, ViE.rtp_rtcp->RegisterRTPObserver(
+            tbChannel.videoChannel, rtpObserver));
+        EXPECT_EQ(0, ViE.rtp_rtcp->DeregisterRTPObserver(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.rtp_rtcp->DeregisterRTPObserver(
+            tbChannel.videoChannel));
+
+        ViERtcpObserver rtcpObserver;
+        EXPECT_EQ(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+            tbChannel.videoChannel, rtcpObserver));
+        EXPECT_NE(0, ViE.rtp_rtcp->RegisterRTCPObserver(
+            tbChannel.videoChannel, rtcpObserver));
+        EXPECT_EQ(0, ViE.rtp_rtcp->DeregisterRTCPObserver(
+            tbChannel.videoChannel));
+        EXPECT_NE(0, ViE.rtp_rtcp->DeregisterRTCPObserver(
+            tbChannel.videoChannel));
+    }
+    //
+    // PLI
+    //
+    {
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestPliRtcp));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
+        EXPECT_EQ(0, ViE.rtp_rtcp->SetKeyFrameRequestMethod(
+            tbChannel.videoChannel, webrtc::kViEKeyFrameRequestNone));
+    }
+    //
+    // NACK
+    //
+    {
+      EXPECT_EQ(0, ViE.rtp_rtcp->SetNACKStatus(tbChannel.videoChannel, true));
+    }
+
+    // Timsetamp offset extension.
+    // Valid range is 1 to 14 inclusive.
+    EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 0));
+    EXPECT_EQ(-1, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 15));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+            tbChannel.videoChannel, false, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+              tbChannel.videoChannel, false, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetSendTimestampOffsetStatus(
+            tbChannel.videoChannel, false, 3));
+
+    EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 0));
+    EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 15));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(-1, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+            tbChannel.videoChannel, false, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+        tbChannel.videoChannel, true, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+              tbChannel.videoChannel, false, 3));
+    EXPECT_EQ(0, ViE.rtp_rtcp->SetReceiveTimestampOffsetStatus(
+            tbChannel.videoChannel, false, 3));
+
+
+
+    //***************************************************************
+    //  Testing finished. Tear down Video Engine
+    //***************************************************************
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc b/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc
new file mode 100644
index 0000000..f26340b
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_simulcast.cc
@@ -0,0 +1,608 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <iostream>
+
+#include "common_types.h"
+#include "tb_external_transport.h"
+#include "voe_base.h"
+#include "vie_autotest_defines.h"
+#include "vie_autotest.h"
+#include "vie_base.h"
+#include "vie_capture.h"
+#include "vie_codec.h"
+#include "vie_network.h"
+#include "vie_render.h"
+#include "vie_rtp_rtcp.h"
+
+#define VCM_RED_PAYLOAD_TYPE        96
+#define VCM_ULPFEC_PAYLOAD_TYPE     97
+
+void InitialSingleStreamSettings(webrtc::VideoCodec* video_codec) {
+  video_codec->numberOfSimulcastStreams = 0;
+  video_codec->width = 1200;
+  video_codec->height = 800;
+}
+
+void SetSimulcastSettings(webrtc::VideoCodec* video_codec) {
+  video_codec->width = 1280;
+  video_codec->height = 720;
+  // simulcast settings
+  video_codec->numberOfSimulcastStreams = 3;
+  video_codec->simulcastStream[0].width = 320;
+  video_codec->simulcastStream[0].height = 180;
+  video_codec->simulcastStream[0].numberOfTemporalLayers = 0;
+  video_codec->simulcastStream[0].maxBitrate = 100;
+  video_codec->simulcastStream[0].qpMax = video_codec->qpMax;
+
+  video_codec->simulcastStream[1].width = 640;
+  video_codec->simulcastStream[1].height = 360;
+  video_codec->simulcastStream[1].numberOfTemporalLayers = 0;
+  video_codec->simulcastStream[1].maxBitrate = 500;
+  video_codec->simulcastStream[1].qpMax = video_codec->qpMax;
+
+  video_codec->simulcastStream[2].width = 1280;
+  video_codec->simulcastStream[2].height = 720;
+  video_codec->simulcastStream[2].numberOfTemporalLayers = 0;
+  video_codec->simulcastStream[2].maxBitrate = 1200;
+  video_codec->simulcastStream[2].qpMax = video_codec->qpMax;
+}
+
+void RuntimeSingleStreamSettings(webrtc::VideoCodec* video_codec) {
+  SetSimulcastSettings(video_codec);
+  video_codec->width = 1200;
+  video_codec->height = 800;
+  video_codec->numberOfSimulcastStreams = 3;
+  video_codec->simulcastStream[0].maxBitrate = 0;
+  video_codec->simulcastStream[1].maxBitrate = 0;
+  video_codec->simulcastStream[2].maxBitrate = 0;
+}
+
+int VideoEngineSimulcastTest(void* window1, void* window2)
+{
+    //********************************************************
+    //  Begin create/initialize Video Engine for testing
+    //********************************************************
+
+    int error = 0;
+
+    //
+    // Create a VideoEngine instance
+    //
+    webrtc::VideoEngine* ptrViE = NULL;
+    ptrViE = webrtc::VideoEngine::Create();
+    if (ptrViE == NULL)
+    {
+        printf("ERROR in VideoEngine::Create\n");
+        return -1;
+    }
+
+    error = ptrViE->SetTraceFilter(webrtc::kTraceAll);
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceLevel\n");
+        return -1;
+    }
+
+
+    std::string trace_file =
+        ViETest::GetResultOutputPath() + "ViESimulcast_trace.txt";
+    error = ptrViE->SetTraceFile(trace_file.c_str());
+    if (error == -1)
+    {
+        printf("ERROR in VideoEngine::SetTraceFile\n");
+        return -1;
+    }
+
+    //
+    // Init VideoEngine and create a channel
+    //
+    webrtc::ViEBase* ptrViEBase = webrtc::ViEBase::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViEBase::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViEBase->Init();
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::Init\n");
+        return -1;
+    }
+
+    int videoChannel = -1;
+    error = ptrViEBase->CreateChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::CreateChannel\n");
+        return -1;
+    }
+
+    //
+    // List available capture devices, allocate and connect.
+    //
+    webrtc::ViECapture* ptrViECapture =
+        webrtc::ViECapture::GetInterface(ptrViE);
+    if (ptrViEBase == NULL)
+    {
+        printf("ERROR in ViECapture::GetInterface\n");
+        return -1;
+    }
+
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    printf("Available capture devices:\n");
+    int captureIdx = 0;
+    for (captureIdx = 0;
+         captureIdx < ptrViECapture->NumberOfCaptureDevices();
+         captureIdx++)
+    {
+        memset(deviceName, 0, KMaxDeviceNameLength);
+        memset(uniqueId, 0, KMaxUniqueIdLength);
+
+        error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                                KMaxDeviceNameLength, uniqueId,
+                                                KMaxUniqueIdLength);
+        if (error == -1)
+        {
+            printf("ERROR in ViECapture::GetCaptureDevice\n");
+            return -1;
+        }
+        printf("\t %d. %s\n", captureIdx + 1, deviceName);
+    }
+    printf("\nChoose capture device: ");
+#ifdef WEBRTC_ANDROID
+    captureIdx = 0;
+    printf("0\n");
+#else
+    if (scanf("%d", &captureIdx) != 1)
+    {
+        printf("Error in scanf()\n");
+        return -1;
+    }
+    getchar();
+    captureIdx = captureIdx - 1; // Compensate for idx start at 1.
+#endif
+    error = ptrViECapture->GetCaptureDevice(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::GetCaptureDevice\n");
+        return -1;
+    }
+
+    int captureId = 0;
+    error = ptrViECapture->AllocateCaptureDevice(uniqueId, KMaxUniqueIdLength,
+                                                 captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::AllocateCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ConnectCaptureDevice(captureId, videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ConnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StartCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StartCapture\n");
+        return -1;
+    }
+
+    //
+    // RTP/RTCP settings
+    //
+    webrtc::ViERTP_RTCP* ptrViERtpRtcp =
+        webrtc::ViERTP_RTCP::GetInterface(ptrViE);
+    if (ptrViERtpRtcp == NULL)
+    {
+        printf("ERROR in ViERTP_RTCP::GetInterface\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetRTCPStatus(videoChannel,
+                                         webrtc::kRtcpCompound_RFC4585);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetRTCPStatus\n");
+        return -1;
+    }
+
+    error = ptrViERtpRtcp->SetKeyFrameRequestMethod(
+        videoChannel, webrtc::kViEKeyFrameRequestPliRtcp);
+    if (error == -1)
+    {
+        printf("ERROR in ViERTP_RTCP::SetKeyFrameRequestMethod\n");
+        return -1;
+    }
+
+    //
+    // Set up rendering
+    //
+    webrtc::ViERender* ptrViERender = webrtc::ViERender::GetInterface(ptrViE);
+    if (ptrViERender == NULL)
+    {
+        printf("ERROR in ViERender::GetInterface\n");
+        return -1;
+    }
+
+    error
+        = ptrViERender->AddRenderer(captureId, window1, 0, 0.0, 0.0, 1.0, 1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->AddRenderer(videoChannel, window2, 1, 0.0, 0.0, 1.0,
+                                      1.0);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::AddRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StartRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StartRender\n");
+        return -1;
+    }
+
+    //
+    // Setup codecs
+    //
+    webrtc::ViECodec* ptrViECodec = webrtc::ViECodec::GetInterface(ptrViE);
+    if (ptrViECodec == NULL)
+    {
+        printf("ERROR in ViECodec::GetInterface\n");
+        return -1;
+    }
+
+    // Check available codecs and prepare receive codecs
+    printf("\nAvailable codecs:\n");
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    int codecIdx = 0;
+    for (codecIdx = 0; codecIdx < ptrViECodec->NumberOfCodecs(); codecIdx++)
+    {
+        error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::GetCodec\n");
+            return -1;
+        }
+        // try to keep the test frame size small when I420
+        if (videoCodec.codecType != webrtc::kVideoCodecVP8)
+        {
+            continue;
+        }
+        error = ptrViECodec->SetReceiveCodec(videoChannel, videoCodec);
+        if (error == -1)
+        {
+            printf("ERROR in ViECodec::SetReceiveCodec\n");
+            return -1;
+        }
+        if (videoCodec.codecType != webrtc::kVideoCodecRED
+            && videoCodec.codecType != webrtc::kVideoCodecULPFEC)
+        {
+            printf("\t %d. %s\n", codecIdx + 1, videoCodec.plName);
+        }
+        break;
+    }
+    error = ptrViECodec->GetCodec(codecIdx, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::GetCodec\n");
+        return -1;
+    }
+
+    bool simulcast_mode = true;
+    int num_streams = 1;
+    // Set spatial resolution option
+    if (simulcast_mode) {
+      SetSimulcastSettings(&videoCodec);
+      num_streams = videoCodec.numberOfSimulcastStreams;
+    } else {
+      InitialSingleStreamSettings(&videoCodec);
+      num_streams = 1;
+    }
+
+    // Set start bit rate
+    std::string str;
+    std::cout << std::endl;
+    std::cout << "Choose start rate (in kbps). Press enter for default:  ";
+    std::getline(std::cin, str);
+    int startRate = atoi(str.c_str());
+    if(startRate != 0)
+    {
+        videoCodec.startBitrate=startRate;
+    }
+
+    error = ptrViECodec->SetSendCodec(videoChannel, videoCodec);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::SetSendCodec\n");
+        return -1;
+    }
+    //
+    // Address settings
+    //
+    webrtc::ViENetwork* ptrViENetwork =
+        webrtc::ViENetwork::GetInterface(ptrViE);
+    if (ptrViENetwork == NULL)
+    {
+        printf("ERROR in ViENetwork::GetInterface\n");
+        return -1;
+    }
+
+    // Setting External transport
+    TbExternalTransport extTransport(*(ptrViENetwork));
+
+    error = ptrViENetwork->RegisterSendTransport(videoChannel,
+                                                 extTransport);
+    if (error == -1)
+    {
+        printf("ERROR in ViECodec::RegisterSendTransport \n");
+        return -1;
+    }
+
+    extTransport.SetPacketLoss(0);
+
+    // Set network delay value
+    extTransport.SetNetworkDelay(10);
+
+    for (int idx = 0; idx < num_streams; idx++)
+    {
+        error = ptrViERtpRtcp->SetLocalSSRC(
+            videoChannel,
+            idx+1, // SSRC
+            webrtc::kViEStreamTypeNormal,
+            idx);
+        if (error == -1)
+        {
+            printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n",
+                   idx);
+            return -1;
+        }
+    }
+    extTransport.SetSSRCFilter(num_streams);
+
+    error = ptrViEBase->StartReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StartSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViENetwork::StartSend\n");
+        return -1;
+    }
+
+    // Create a receive channel to verify that it doesn't mess up toggling
+    // between single stream and simulcast.
+    int videoChannel2 = -1;
+    error = ptrViEBase->CreateReceiveChannel(videoChannel2, videoChannel);
+    if (error == -1) {
+      printf("ERROR in ViEBase::CreateReceiveChannel\n");
+      return -1;
+    }
+
+    //********************************************************
+    //  Engine started
+    //********************************************************
+
+    printf("\nSimulcast call started\n\n");
+    do
+    {
+        printf("Enter new SSRC filter 1,2 or 3\n");
+        printf("... or 0 to switch between simulcast and a single stream\n");
+        printf("Press enter to stop...");
+        str.clear();
+        std::getline(std::cin, str);
+        if (!str.empty())
+        {
+            int ssrc = atoi(str.c_str());
+            if (ssrc == 0) {
+              // Toggle between simulcast and a single stream with different
+              // resolution.
+              if (simulcast_mode) {
+                RuntimeSingleStreamSettings(&videoCodec);
+                num_streams = 1;
+                printf("Disabling simulcast\n");
+              } else {
+                SetSimulcastSettings(&videoCodec);
+                num_streams = videoCodec.numberOfSimulcastStreams;
+                printf("Enabling simulcast\n");
+              }
+              simulcast_mode = !simulcast_mode;
+              if (ptrViECodec->SetSendCodec(videoChannel, videoCodec) != 0) {
+                printf("ERROR switching between simulcast and single stream\n");
+                return -1;
+              }
+              for (int idx = 0; idx < num_streams; idx++)
+              {
+                  error = ptrViERtpRtcp->SetLocalSSRC(
+                      videoChannel,
+                      idx+1, // SSRC
+                      webrtc::kViEStreamTypeNormal,
+                      idx);
+                  if (error == -1)
+                  {
+                      printf("ERROR in ViERTP_RTCP::SetLocalSSRC(idx:%d)\n",
+                             idx);
+                      return -1;
+                  }
+              }
+              extTransport.SetSSRCFilter(num_streams);
+            } else if (ssrc > 0 && ssrc < 4)
+            {
+                extTransport.SetSSRCFilter(ssrc);
+            } else
+            {
+                printf("Invalid SSRC\n");
+            }
+        } else
+        {
+            break;
+        }
+    } while (true);
+
+    //********************************************************
+    //  Testing finished. Tear down Video Engine
+    //********************************************************
+
+    error = ptrViEBase->DeleteChannel(videoChannel2);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StopReceive(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopReceive\n");
+        return -1;
+    }
+
+    error = ptrViEBase->StopSend(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::StopSend\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViERender->StopRender(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::StopRender\n");
+        return -1;
+    }
+
+    error = ptrViERender->RemoveRenderer(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViERender::RemoveRenderer\n");
+        return -1;
+    }
+
+    error = ptrViECapture->StopCapture(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::StopCapture\n");
+        return -1;
+    }
+
+    error = ptrViECapture->DisconnectCaptureDevice(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::DisconnectCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViECapture->ReleaseCaptureDevice(captureId);
+    if (error == -1)
+    {
+        printf("ERROR in ViECapture::ReleaseCaptureDevice\n");
+        return -1;
+    }
+
+    error = ptrViEBase->DeleteChannel(videoChannel);
+    if (error == -1)
+    {
+        printf("ERROR in ViEBase::DeleteChannel\n");
+        return -1;
+    }
+
+    int remainingInterfaces = 0;
+    remainingInterfaces = ptrViECodec->Release();
+    remainingInterfaces += ptrViECapture->Release();
+    remainingInterfaces += ptrViERtpRtcp->Release();
+    remainingInterfaces += ptrViERender->Release();
+    remainingInterfaces += ptrViENetwork->Release();
+    remainingInterfaces += ptrViEBase->Release();
+    if (remainingInterfaces > 0)
+    {
+        printf("ERROR: Could not release all interfaces\n");
+        return -1;
+    }
+
+    bool deleted = webrtc::VideoEngine::Delete(ptrViE);
+    if (deleted == false)
+    {
+        printf("ERROR in VideoEngine::Delete\n");
+        return -1;
+    }
+    return 0;
+
+    //
+    // END:  VideoEngine 3.0 Sample Code
+    //
+    // ===================================================================
+}
+
+int ViEAutoTest::ViESimulcastCall()
+{
+    ViETest::Log(" ");
+    ViETest::Log("========================================");
+    ViETest::Log(" ViE Autotest Simulcast Call\n");
+
+    if (VideoEngineSimulcastTest(_window1, _window2) == 0)
+    {
+        ViETest::Log(" ");
+        ViETest::Log(" ViE Autotest Simulcast Call Done");
+        ViETest::Log("========================================");
+        ViETest::Log(" ");
+
+        return 0;
+    }
+    ViETest::Log(" ");
+    ViETest::Log(" ViE Autotest Simulcast Call Failed");
+    ViETest::Log("========================================");
+    ViETest::Log(" ");
+    return 1;
+}
diff --git a/src/video_engine/test/auto_test/source/vie_autotest_win.cc b/src/video_engine/test/auto_test/source/vie_autotest_win.cc
new file mode 100755
index 0000000..b722d3b
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_autotest_win.cc
@@ -0,0 +1,210 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// vie_autotest_windows.cc
+//
+
+#include "vie_autotest_windows.h"
+
+#include "vie_autotest_defines.h"
+#include "vie_autotest_main.h"
+
+#include "engine_configurations.h"
+#include "critical_section_wrapper.h"
+#include "thread_wrapper.h"
+
+#include <windows.h>
+
+#ifdef _DEBUG
+//#include "vld.h"
+#endif
+
+// Disable Visual studio warnings
+// 'this' : used in base member initializer list
+#pragma warning(disable: 4355)
+
+LRESULT CALLBACK ViEAutoTestWinProc(HWND hWnd, UINT uMsg, WPARAM wParam,
+                                    LPARAM lParam) {
+  switch (uMsg) {
+    case WM_DESTROY:
+      PostQuitMessage( WM_QUIT);
+      break;
+    case WM_COMMAND:
+      break;
+  }
+  return DefWindowProc(hWnd, uMsg, wParam, lParam);
+}
+
+ViEAutoTestWindowManager::ViEAutoTestWindowManager()
+    : _window1(NULL),
+      _window2(NULL),
+      _terminate(false),
+      _eventThread(*webrtc::ThreadWrapper::CreateThread(
+          EventProcess, this, webrtc::kNormalPriority,
+          "ViEAutotestEventThread")),
+      _crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+      _hwnd1(NULL),
+      _hwnd2(NULL),
+      _hwnd1Size(),
+      _hwnd2Size(),
+      _hwnd1Title(),
+      _hwnd2Title() {
+}
+
+ViEAutoTestWindowManager::~ViEAutoTestWindowManager() {
+  if (_hwnd1) {
+    ViEDestroyWindow(_hwnd1);
+  }
+  if (_hwnd2) {
+    ViEDestroyWindow(_hwnd1);
+  }
+  delete &_crit;
+}
+
+void* ViEAutoTestWindowManager::GetWindow1() {
+  return _window1;
+}
+
+void* ViEAutoTestWindowManager::GetWindow2() {
+  return _window2;
+}
+
+int ViEAutoTestWindowManager::CreateWindows(AutoTestRect window1Size,
+                                            AutoTestRect window2Size,
+                                            void* window1Title,
+                                            void* window2Title) {
+  _hwnd1Size.Copy(window1Size);
+  _hwnd2Size.Copy(window2Size);
+  memcpy(_hwnd1Title, window1Title, TITLE_LENGTH);
+  memcpy(_hwnd2Title, window2Title, TITLE_LENGTH);
+
+  unsigned int tId = 0;
+  _eventThread.Start(tId);
+
+  do {
+    _crit.Enter();
+    if (_window1 != NULL) {
+      break;
+    }
+    _crit.Leave();
+    AutoTestSleep(10);
+  } while (true);
+  _crit.Leave();
+  return 0;
+}
+
+int ViEAutoTestWindowManager::TerminateWindows() {
+  _eventThread.SetNotAlive();
+
+  _terminate = true;
+  if (_eventThread.Stop()) {
+    _crit.Enter();
+    delete &_eventThread;
+    _crit.Leave();
+  }
+
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::EventProcess(void* obj) {
+  return static_cast<ViEAutoTestWindowManager*> (obj)->EventLoop();
+}
+
+bool ViEAutoTestWindowManager::EventLoop() {
+  _crit.Enter();
+
+  ViECreateWindow(_hwnd1, _hwnd1Size.origin.x, _hwnd1Size.origin.y,
+                  _hwnd1Size.size.width, _hwnd1Size.size.height, _hwnd1Title);
+  ViECreateWindow(_hwnd2, _hwnd2Size.origin.x, _hwnd2Size.origin.y,
+                  _hwnd2Size.size.width, _hwnd2Size.size.height, _hwnd2Title);
+
+  _window1 = (void*) _hwnd1;
+  _window2 = (void*) _hwnd2;
+  MSG msg;
+  while (!_terminate) {
+    if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
+      TranslateMessage(&msg);
+      DispatchMessage(&msg);
+    }
+    _crit.Leave();
+    AutoTestSleep(10);
+    _crit.Enter();
+  }
+  ViEDestroyWindow(_hwnd1);
+  ViEDestroyWindow(_hwnd2);
+  _crit.Leave();
+
+  return false;
+}
+
+int ViEAutoTestWindowManager::ViECreateWindow(HWND &hwndMain, int xPos,
+                                              int yPos, int width, int height,
+                                              TCHAR* className) {
+  HINSTANCE hinst = GetModuleHandle(0);
+  WNDCLASSEX wcx;
+  wcx.hInstance = hinst;
+  wcx.lpszClassName = className;
+  wcx.lpfnWndProc = (WNDPROC) ViEAutoTestWinProc;
+  wcx.style = CS_DBLCLKS;
+  wcx.hIcon = LoadIcon(NULL, IDI_APPLICATION);
+  wcx.hIconSm = LoadIcon(NULL, IDI_APPLICATION);
+  wcx.hCursor = LoadCursor(NULL, IDC_ARROW);
+  wcx.lpszMenuName = NULL;
+  wcx.cbSize = sizeof(WNDCLASSEX);
+  wcx.cbClsExtra = 0;
+  wcx.cbWndExtra = 0;
+  wcx.hbrBackground = GetSysColorBrush(COLOR_3DFACE);
+
+  RegisterClassEx(&wcx);
+
+  // Create the main window.
+  hwndMain = CreateWindowEx(0,          // no extended styles
+                            className,  // class name
+                            className,  // window name
+                            WS_OVERLAPPED | WS_THICKFRAME,  // overlapped window
+                            xPos,    // horizontal position
+                            yPos,    // vertical position
+                            width,   // width
+                            height,  // height
+                            (HWND) NULL,   // no parent or owner window
+                            (HMENU) NULL,  // class menu used
+                            hinst,  // instance handle
+                            NULL);  // no window creation data
+
+  if (!hwndMain)
+    return -1;
+
+  // Show the window using the flag specified by the program
+  // that started the application, and send the application
+  // a WM_PAINT message.
+  ShowWindow(hwndMain, SW_SHOWDEFAULT);
+  UpdateWindow(hwndMain);
+
+  ::SetWindowPos(hwndMain, HWND_TOP, xPos, yPos, width, height,
+                 SWP_FRAMECHANGED);
+
+  return 0;
+}
+
+int ViEAutoTestWindowManager::ViEDestroyWindow(HWND& hwnd) {
+  ::DestroyWindow(hwnd);
+  return 0;
+}
+
+bool ViEAutoTestWindowManager::SetTopmostWindow() {
+  // Meant to put terminal window on top
+  return true;
+}
+
+int main(int argc, char* argv[]) {
+  ViEAutoTestMain auto_test;
+  return auto_test.RunTests(argc, argv);
+}
diff --git a/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc b/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
new file mode 100644
index 0000000..2f01381
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_file_based_comparison_tests.cc
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/interface/vie_file_based_comparison_tests.h"
+
+#include "video_engine/test/auto_test/interface/vie_autotest_defines.h"
+#include "video_engine/test/auto_test/primitives/base_primitives.h"
+#include "video_engine/test/auto_test/primitives/codec_primitives.h"
+#include "video_engine/test/auto_test/primitives/framedrop_primitives.h"
+#include "video_engine/test/auto_test/primitives/general_primitives.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+#include "video_engine/test/libvietest/include/vie_fake_camera.h"
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
+
+bool ViEFileBasedComparisonTests::TestCallSetup(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer) {
+
+  TbInterfaces interfaces("TestCallSetup");
+
+  int video_channel = -1;
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file,
+                                          width,
+                                          height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return false;
+  }
+  int capture_id = fake_camera.capture_id();
+
+  // Apparently, we need to connect external capture devices, but we should
+  // not start them since the external device is not a proper device.
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  webrtc::ViERender *render_interface = interfaces.render;
+
+  RenderToFile(render_interface, capture_id, local_file_renderer);
+  RenderToFile(render_interface, video_channel, remote_file_renderer);
+
+  // Run the test itself:
+  const char* device_name = "Fake Capture Device";
+
+  ::TestI420CallSetup(interfaces.codec, interfaces.video_engine,
+                      interfaces.base, interfaces.network, video_channel,
+                      device_name);
+
+  AutoTestSleep(KAutoTestSleepTimeMs);
+
+  EXPECT_EQ(0, interfaces.base->StopReceive(video_channel));
+
+  StopAndRemoveRenderers(interfaces.base, render_interface, video_channel,
+                         capture_id);
+
+  interfaces.capture->DisconnectCaptureDevice(video_channel);
+
+  // Stop sending data, clean up the camera thread and release the capture
+  // device. Note that this all happens after StopEverything, so this
+  // tests that the system doesn't mind that the external capture device sends
+  // data after rendering has been stopped.
+  fake_camera.StopCamera();
+
+  EXPECT_EQ(0, interfaces.base->DeleteChannel(video_channel));
+  return true;
+}
+
+bool ViEFileBasedComparisonTests::TestCodecs(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer) {
+
+  TbInterfaces interfaces("TestCodecs");
+
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file, width, height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return false;
+  }
+
+  int video_channel = -1;
+  int capture_id = fake_camera.capture_id();
+
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+
+  RenderToFile(interfaces.render, capture_id, local_file_renderer);
+  RenderToFile(interfaces.render, video_channel, remote_file_renderer);
+
+  // Force the codec resolution to what our input video is so we can make
+  // comparisons later. Our comparison algorithms wouldn't like scaling.
+  ::TestCodecs(interfaces, capture_id, video_channel, width, height);
+
+  fake_camera.StopCamera();
+  return true;
+}
+
+void ViEFileBasedComparisonTests::TestFullStack(
+    const std::string& i420_video_file,
+    int width,
+    int height,
+    int bit_rate_kbps,
+    int packet_loss_percent,
+    int network_delay_ms,
+    ViEToFileRenderer* local_file_renderer,
+    ViEToFileRenderer* remote_file_renderer,
+    FrameDropDetector* frame_drop_detector) {
+  TbInterfaces interfaces("TestFullStack");
+
+  // Setup camera capturing from file.
+  ViEFakeCamera fake_camera(interfaces.capture);
+  if (!fake_camera.StartCameraInNewThread(i420_video_file, width, height)) {
+    // No point in continuing if we have no proper video source
+    ADD_FAILURE() << "Could not open input video " << i420_video_file <<
+        ": aborting test...";
+    return;
+  }
+  int video_channel = -1;
+  int capture_id = fake_camera.capture_id();
+  EXPECT_EQ(0, interfaces.base->CreateChannel(video_channel));
+
+  // Must set SSRC to avoid SSRC collision detection since we're sending and
+  // receiving from the same machine (that would cause frames being discarded
+  // and decoder reset).
+  EXPECT_EQ(0, interfaces.rtp_rtcp->SetLocalSSRC(video_channel, 12345));
+
+  EXPECT_EQ(0, interfaces.capture->ConnectCaptureDevice(
+      capture_id, video_channel));
+  ConfigureRtpRtcp(interfaces.rtp_rtcp, video_channel);
+  RenderToFile(interfaces.render, capture_id, local_file_renderer);
+  RenderToFile(interfaces.render, video_channel, remote_file_renderer);
+
+  ::TestFullStack(interfaces, capture_id, video_channel, width, height,
+                  bit_rate_kbps, packet_loss_percent, network_delay_ms,
+                  frame_drop_detector);
+  EXPECT_TRUE(fake_camera.StopCamera());
+}
diff --git a/src/video_engine/test/auto_test/source/vie_window_creator.cc b/src/video_engine/test/auto_test/source/vie_window_creator.cc
new file mode 100644
index 0000000..2205266
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_window_creator.cc
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/auto_test/interface/vie_window_creator.h"
+
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_main.h"
+#include "video_engine/test/auto_test/interface/vie_autotest_window_manager_interface.h"
+#include "video_engine/test/auto_test/interface/vie_window_manager_factory.h"
+#include "voice_engine/include/voe_codec.h"
+
+#if defined(WIN32)
+#include <tchar.h>
+#endif
+
+ViEWindowCreator::ViEWindowCreator() {
+#ifndef WEBRTC_ANDROID
+  window_manager_ =
+      ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform();
+#endif
+}
+
+ViEWindowCreator::~ViEWindowCreator() {
+  delete window_manager_;
+}
+
+ViEAutoTestWindowManagerInterface*
+  ViEWindowCreator::CreateTwoWindows() {
+#if defined(WIN32)
+  TCHAR window1Title[1024] = _T("ViE Autotest Window 1");
+  TCHAR window2Title[1024] = _T("ViE Autotest Window 2");
+#else
+  char window1Title[1024] = "ViE Autotest Window 1";
+  char window2Title[1024] = "ViE Autotest Window 2";
+#endif
+
+  AutoTestRect window1Size(352, 288, 600, 100);
+  AutoTestRect window2Size(352, 288, 1000, 100);
+  window_manager_->CreateWindows(window1Size, window2Size, window1Title,
+                                 window2Title);
+  window_manager_->SetTopmostWindow();
+
+  return window_manager_;
+}
+
+void ViEWindowCreator::TerminateWindows() {
+  window_manager_->TerminateWindows();
+}
diff --git a/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc b/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc
new file mode 100644
index 0000000..1b16878
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_window_manager_factory_linux.cc
@@ -0,0 +1,18 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_window_manager_factory.h"
+
+#include "vie_autotest_linux.h"
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm b/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm
new file mode 100644
index 0000000..806d10f
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_window_manager_factory_mac.mm
@@ -0,0 +1,23 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "vie_window_manager_factory.h"
+
+#include "engine_configurations.h"
+#if defined(COCOA_RENDERING)
+#include "vie_autotest_mac_cocoa.h"
+#elif defined(CARBON_RENDERING)
+#include "vie_autotest_mac_carbon.h"
+#endif
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc b/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc
new file mode 100644
index 0000000..11114fd
--- /dev/null
+++ b/src/video_engine/test/auto_test/source/vie_window_manager_factory_win.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "vie_window_manager_factory.h"
+
+#include "vie_autotest_windows.h"
+
+ViEAutoTestWindowManagerInterface*
+ViEWindowManagerFactory::CreateWindowManagerForCurrentPlatform() {
+  return new ViEAutoTestWindowManager();
+}
diff --git a/src/video_engine/test/auto_test/vie_auto_test.gypi b/src/video_engine/test/auto_test/vie_auto_test.gypi
new file mode 100644
index 0000000..9caef6b
--- /dev/null
+++ b/src/video_engine/test/auto_test/vie_auto_test.gypi
@@ -0,0 +1,135 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    {
+      'target_name': 'vie_auto_test',
+      'type': 'executable',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/modules/modules.gyp:video_render_module',
+        '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+        '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/third_party/google-gflags/google-gflags.gyp:google-gflags',
+        '<(webrtc_root)/test/metrics.gyp:metrics',
+        '<(webrtc_root)/test/test.gyp:test_support',
+        '<(webrtc_root)/test/libtest/libtest.gyp:libtest',
+        'video_engine_core',
+        'libvietest',
+      ],
+      'include_dirs': [
+        'interface/',
+        'helpers/',
+        'primitives',
+        '../../include',
+        '../..',
+        '../../../modules/video_coding/codecs/interface',
+        '../../../common_video/interface',
+      ],
+      'sources': [
+        'interface/vie_autotest.h',
+        'interface/vie_autotest_defines.h',
+        'interface/vie_autotest_linux.h',
+        'interface/vie_autotest_mac_cocoa.h',
+        'interface/vie_autotest_main.h',
+        'interface/vie_autotest_window_manager_interface.h',
+        'interface/vie_autotest_windows.h',
+        'interface/vie_file_based_comparison_tests.h',
+        'interface/vie_window_manager_factory.h',
+        'interface/vie_window_creator.h',
+
+        # New, fully automated tests
+        'automated/legacy_fixture.cc',
+        'automated/two_windows_fixture.cc',
+        'automated/vie_api_integration_test.cc',
+        'automated/vie_extended_integration_test.cc',
+        'automated/vie_rtp_fuzz_test.cc',
+        'automated/vie_standard_integration_test.cc',
+        'automated/vie_video_verification_test.cc',
+
+        # Test primitives
+        'primitives/base_primitives.cc',
+        'primitives/base_primitives.h',
+        'primitives/codec_primitives.cc',
+        'primitives/codec_primitives.h',
+        'primitives/framedrop_primitives.h',
+        'primitives/framedrop_primitives.cc',
+        'primitives/framedrop_primitives_unittest.cc',
+        'primitives/general_primitives.cc',
+        'primitives/general_primitives.h',
+
+        # Platform independent
+        'source/vie_autotest.cc',
+        'source/vie_autotest_base.cc',
+        'source/vie_autotest_capture.cc',
+        'source/vie_autotest_codec.cc',
+        'source/vie_autotest_encryption.cc',
+        'source/vie_autotest_file.cc',
+        'source/vie_autotest_image_process.cc',
+        'source/vie_autotest_loopback.cc',
+        'source/vie_autotest_main.cc',
+        'source/vie_autotest_network.cc',
+        'source/vie_autotest_render.cc',
+        'source/vie_autotest_record.cc',
+        'source/vie_autotest_rtp_rtcp.cc',
+        'source/vie_autotest_custom_call.cc',
+        'source/vie_autotest_simulcast.cc',
+        'source/vie_file_based_comparison_tests.cc',
+        'source/vie_window_creator.cc',
+
+        # Platform dependent
+        # Android
+        'source/vie_autotest_android.cc',
+        # Linux
+        'source/vie_autotest_linux.cc',
+        'source/vie_window_manager_factory_linux.cc',
+        # Mac
+        'source/vie_autotest_cocoa_mac.mm',
+        'source/vie_window_manager_factory_mac.mm',
+        # Windows
+        'source/vie_autotest_win.cc',
+        'source/vie_window_manager_factory_win.cc',
+      ],
+      'conditions': [
+        ['OS=="android"', {
+          'libraries': [
+            '-lGLESv2',
+            '-llog',
+          ],
+        }],
+        ['OS=="linux"', {
+          # TODO(andrew): These should be provided directly by the projects
+          #               which require them instead.
+          'libraries': [
+            '-lXext',
+            '-lX11',
+          ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            # Use a special main for mac so we can access the webcam.
+            '<(webrtc_root)/test/test.gyp:test_support_main_threaded_mac',
+          ],
+          'xcode_settings': {
+            'OTHER_LDFLAGS': [
+              '-framework Foundation -framework AppKit -framework Cocoa -framework OpenGL -framework CoreVideo -framework CoreAudio -framework AudioToolbox',
+            ],
+          },
+        }],
+      ], # conditions
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/video_engine/test/libvietest/helpers/vie_fake_camera.cc b/src/video_engine/test/libvietest/helpers/vie_fake_camera.cc
new file mode 100644
index 0000000..b9b7281
--- /dev/null
+++ b/src/video_engine/test/libvietest/helpers/vie_fake_camera.cc
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "video_engine/test/libvietest/include/vie_fake_camera.h"
+
+#include <assert.h>
+
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/test/libvietest/include/vie_file_capture_device.h"
+
+// This callback runs the camera thread:
+bool StreamVideoFileRepeatedlyIntoCaptureDevice(void* data) {
+  ViEFileCaptureDevice* file_capture_device =
+      reinterpret_cast<ViEFileCaptureDevice*>(data);
+
+  // We want to interrupt the camera feeding thread every now and then in order
+  // to follow the contract for the system_wrappers thread library. 1.5 seconds
+  // seems about right here.
+  uint64_t time_slice_ms = 1500;
+  uint32_t max_fps = 30;
+
+  file_capture_device->ReadFileFor(time_slice_ms, max_fps);
+
+  return true;
+}
+
+ViEFakeCamera::ViEFakeCamera(webrtc::ViECapture* capture_interface)
+    : capture_interface_(capture_interface),
+      capture_id_(-1),
+      camera_thread_(NULL),
+      file_capture_device_(NULL) {
+}
+
+ViEFakeCamera::~ViEFakeCamera() {
+}
+
+bool ViEFakeCamera::StartCameraInNewThread(
+    const std::string& i420_test_video_path, int width, int height) {
+
+  assert(file_capture_device_ == NULL && camera_thread_ == NULL);
+
+  webrtc::ViEExternalCapture* externalCapture;
+  int result = capture_interface_->
+      AllocateExternalCaptureDevice(capture_id_, externalCapture);
+  if (result != 0) {
+    return false;
+  }
+
+  file_capture_device_ = new ViEFileCaptureDevice(externalCapture);
+  if (!file_capture_device_->OpenI420File(i420_test_video_path,
+                                          width,
+                                          height)) {
+    return false;
+  }
+
+  // Set up a thread which runs the fake camera. The capturer object is
+  // thread-safe.
+  camera_thread_ = webrtc::ThreadWrapper::CreateThread(
+      StreamVideoFileRepeatedlyIntoCaptureDevice, file_capture_device_);
+  unsigned int id;
+  camera_thread_->Start(id);
+
+  return true;
+}
+
+bool ViEFakeCamera::StopCamera() {
+  assert(file_capture_device_ != NULL && camera_thread_ != NULL);
+
+  camera_thread_->Stop();
+  file_capture_device_->CloseFile();
+
+  int result = capture_interface_->ReleaseCaptureDevice(capture_id_);
+
+  delete camera_thread_;
+  delete file_capture_device_;
+  camera_thread_ = NULL;
+  file_capture_device_ = NULL;
+
+  return result == 0;
+}
diff --git a/src/video_engine/test/libvietest/helpers/vie_file_capture_device.cc b/src/video_engine/test/libvietest/helpers/vie_file_capture_device.cc
new file mode 100644
index 0000000..5d6abfa
--- /dev/null
+++ b/src/video_engine/test/libvietest/helpers/vie_file_capture_device.cc
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#include "video_engine/test/libvietest/include/vie_file_capture_device.h"
+
+#include <assert.h>
+
+#include "common_types.h"
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "video_engine/include/vie_capture.h"
+
+// This class ensures we are not exceeding the max FPS.
+class FramePacemaker {
+ public:
+  explicit FramePacemaker(uint32_t max_fps)
+      : time_per_frame_ms_(1000 / max_fps) {
+    frame_start_ = webrtc::TickTime::MillisecondTimestamp();
+  }
+
+  void SleepIfNecessary(webrtc::EventWrapper* sleeper) {
+    uint64_t now = webrtc::TickTime::MillisecondTimestamp();
+    if (now - frame_start_ < time_per_frame_ms_) {
+      sleeper->Wait(time_per_frame_ms_ - (now - frame_start_));
+    }
+  }
+
+ private:
+  uint64_t frame_start_;
+  uint64_t time_per_frame_ms_;
+};
+
+ViEFileCaptureDevice::ViEFileCaptureDevice(
+    webrtc::ViEExternalCapture* input_sink)
+    : input_sink_(input_sink),
+      input_file_(NULL) {
+  mutex_ = webrtc::CriticalSectionWrapper::CreateCriticalSection();
+}
+
+ViEFileCaptureDevice::~ViEFileCaptureDevice() {
+  delete mutex_;
+}
+
+bool ViEFileCaptureDevice::OpenI420File(const std::string& path,
+                                        int width,
+                                        int height) {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ == NULL);
+
+  input_file_ = std::fopen(path.c_str(), "rb");
+  if (input_file_ == NULL) {
+    return false;
+  }
+
+  frame_length_ = 3 * width * height / 2;
+  width_  = width;
+  height_ = height;
+  return true;
+}
+
+void ViEFileCaptureDevice::ReadFileFor(uint64_t time_slice_ms,
+                                       uint32_t max_fps) {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ != NULL);
+
+  unsigned char* frame_buffer = new unsigned char[frame_length_];
+
+  webrtc::EventWrapper* sleeper = webrtc::EventWrapper::Create();
+
+  uint64_t start_time_ms = webrtc::TickTime::MillisecondTimestamp();
+  uint64_t elapsed_ms = 0;
+
+  while (elapsed_ms < time_slice_ms) {
+    FramePacemaker pacemaker(max_fps);
+    int read = std::fread(frame_buffer, 1, frame_length_, input_file_);
+
+    if (std::feof(input_file_)) {
+      std::rewind(input_file_);
+    }
+    input_sink_->IncomingFrame(frame_buffer, read, width_, height_,
+                               webrtc::kVideoI420,
+                               webrtc::TickTime::MillisecondTimestamp());
+
+    pacemaker.SleepIfNecessary(sleeper);
+    elapsed_ms = webrtc::TickTime::MillisecondTimestamp() - start_time_ms;
+  }
+
+  delete sleeper;
+  delete[] frame_buffer;
+}
+
+void ViEFileCaptureDevice::CloseFile() {
+  webrtc::CriticalSectionScoped cs(*mutex_);
+  assert(input_file_ != NULL);
+
+  std::fclose(input_file_);
+}
diff --git a/src/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc b/src/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
new file mode 100644
index 0000000..f186aad
--- /dev/null
+++ b/src/video_engine/test/libvietest/helpers/vie_to_file_renderer.cc
@@ -0,0 +1,94 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/vie_to_file_renderer.h"
+
+#include <assert.h>
+
+ViEToFileRenderer::ViEToFileRenderer()
+    : output_file_(NULL) {
+}
+
+ViEToFileRenderer::~ViEToFileRenderer() {
+}
+
+bool ViEToFileRenderer::PrepareForRendering(
+    const std::string& output_path,
+    const std::string& output_filename) {
+
+  assert(output_file_ == NULL);
+
+  output_file_ = std::fopen((output_path + output_filename).c_str(), "wb");
+  if (output_file_ == NULL) {
+    return false;
+  }
+
+  output_filename_ = output_filename;
+  output_path_ = output_path;
+  return true;
+}
+
+void ViEToFileRenderer::StopRendering() {
+  assert(output_file_ != NULL);
+  std::fclose(output_file_);
+  output_file_ = NULL;
+}
+
+bool ViEToFileRenderer::SaveOutputFile(const std::string& prefix) {
+  assert(output_file_ == NULL && output_filename_ != "");
+  if (std::rename((output_path_ + output_filename_).c_str(),
+                  (output_path_ + prefix + output_filename_).c_str()) != 0) {
+    std::perror("Failed to rename output file");
+    return false;
+  }
+  ForgetOutputFile();
+  return true;
+}
+
+bool ViEToFileRenderer::DeleteOutputFile() {
+  assert(output_file_ == NULL && output_filename_ != "");
+  if (std::remove((output_path_ + output_filename_).c_str()) != 0) {
+    std::perror("Failed to delete output file");
+    return false;
+  }
+  ForgetOutputFile();
+  return true;
+}
+
+const std::string ViEToFileRenderer::GetFullOutputPath() const {
+  return output_path_ + output_filename_;
+}
+
+void ViEToFileRenderer::ForgetOutputFile() {
+  output_filename_ = "";
+  output_path_ = "";
+}
+
+int ViEToFileRenderer::DeliverFrame(unsigned char *buffer,
+                                    int buffer_size,
+                                    uint32_t time_stamp,
+                                    int64_t render_time) {
+  assert(output_file_ != NULL);
+
+  int written = std::fwrite(buffer, sizeof(unsigned char),
+                            buffer_size, output_file_);
+
+  if (written == buffer_size) {
+    return 0;
+  } else {
+    return -1;
+  }
+}
+
+int ViEToFileRenderer::FrameSizeChange(unsigned int width,
+                                       unsigned int height,
+                                       unsigned int number_of_streams) {
+  return 0;
+}
diff --git a/src/video_engine/test/libvietest/include/tb_I420_codec.h b/src/video_engine/test/libvietest/include/tb_I420_codec.h
new file mode 100644
index 0000000..e1c9b79
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/tb_I420_codec.h
@@ -0,0 +1,131 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ * This file contains the interface to I420 "codec"
+ * This is a dummy wrapper to allow VCM deal with raw I420 sequences
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
+
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+
+class TbI420Encoder: public webrtc::VideoEncoder
+{
+public:
+    TbI420Encoder();
+    virtual ~TbI420Encoder();
+
+    static WebRtc_Word32 VersionStatic(char* version,
+                                       WebRtc_Word32 length);
+    virtual WebRtc_Word32  Version(char* version,
+                                   WebRtc_Word32 length) const;
+
+    virtual WebRtc_Word32 InitEncode(const webrtc::VideoCodec* codecSettings,
+                                     WebRtc_Word32 numberOfCores,
+                                     WebRtc_UWord32 maxPayloadSize);
+
+    virtual WebRtc_Word32 Encode(
+        const webrtc::VideoFrame& inputImage,
+        const webrtc::CodecSpecificInfo* codecSpecificInfo,
+        const webrtc::VideoFrameType frameType);
+
+    virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
+        webrtc::EncodedImageCallback* callback);
+
+    virtual WebRtc_Word32 Release();
+
+    virtual WebRtc_Word32 Reset();
+
+    virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                               int rtt);
+
+    virtual WebRtc_Word32 SetPacketLoss(WebRtc_UWord32 packetLoss);
+
+    virtual WebRtc_Word32 SetRates(WebRtc_UWord32 newBitRate,
+                                   WebRtc_UWord32 frameRate);
+
+    virtual WebRtc_Word32 SetPeriodicKeyFrames(bool enable);
+
+    virtual WebRtc_Word32 CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
+                                                WebRtc_Word32 /*size*/);
+
+    struct FunctionCalls
+    {
+        WebRtc_Word32 InitEncode;
+        WebRtc_Word32 Encode;
+        WebRtc_Word32 RegisterEncodeCompleteCallback;
+        WebRtc_Word32 Release;
+        WebRtc_Word32 Reset;
+        WebRtc_Word32 SetRates;
+        WebRtc_Word32 SetPacketLoss;
+        WebRtc_Word32 SetPeriodicKeyFrames;
+        WebRtc_Word32 CodecConfigParameters;
+
+    };
+
+    FunctionCalls GetFunctionCalls();
+private:
+    bool _inited;
+    webrtc::EncodedImage _encodedImage;
+    FunctionCalls _functionCalls;
+    webrtc::EncodedImageCallback* _encodedCompleteCallback;
+
+}; // end of tbI420Encoder class
+
+
+/***************************/
+/* tbI420Decoder class */
+/***************************/
+
+class TbI420Decoder: public webrtc::VideoDecoder
+{
+public:
+    TbI420Decoder();
+    virtual ~TbI420Decoder();
+
+    virtual WebRtc_Word32 InitDecode(const webrtc::VideoCodec* inst,
+                                     WebRtc_Word32 numberOfCores);
+    virtual WebRtc_Word32 Decode(
+        const webrtc::EncodedImage& inputImage,
+        bool missingFrames,
+        const webrtc::RTPFragmentationHeader* fragmentation,
+        const webrtc::CodecSpecificInfo* codecSpecificInfo = NULL,
+        WebRtc_Word64 renderTimeMs = -1);
+
+    virtual WebRtc_Word32
+        RegisterDecodeCompleteCallback(webrtc::DecodedImageCallback* callback);
+    virtual WebRtc_Word32 Release();
+    virtual WebRtc_Word32 Reset();
+
+    struct FunctionCalls
+    {
+        WebRtc_Word32 InitDecode;
+        WebRtc_Word32 Decode;
+        WebRtc_Word32 RegisterDecodeCompleteCallback;
+        WebRtc_Word32 Release;
+        WebRtc_Word32 Reset;
+    };
+
+    FunctionCalls GetFunctionCalls();
+
+private:
+
+    webrtc::VideoFrame _decodedImage;
+    WebRtc_Word32 _width;
+    WebRtc_Word32 _height;
+    bool _inited;
+    FunctionCalls _functionCalls;
+    webrtc::DecodedImageCallback* _decodeCompleteCallback;
+
+}; // end of tbI420Decoder class
+
+#endif // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_I420_CODEC_H_
diff --git a/src/video_engine/test/libvietest/include/tb_capture_device.h b/src/video_engine/test/libvietest/include/tb_capture_device.h
new file mode 100644
index 0000000..10f8db8
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/tb_capture_device.h
@@ -0,0 +1,37 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
+
+#include <string>
+
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+
+class TbInterfaces;
+
+class TbCaptureDevice
+{
+public:
+    TbCaptureDevice(TbInterfaces& Engine);
+    ~TbCaptureDevice(void);
+
+    int captureId;
+    void ConnectTo(int videoChannel);
+    void Disconnect(int videoChannel);
+    std::string device_name() const;
+
+private:
+    TbInterfaces& ViE;
+    webrtc::VideoCaptureModule* vcpm_;
+    std::string device_name_;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_CAPTURE_DEVICE_H_
diff --git a/src/video_engine/test/libvietest/include/tb_external_transport.h b/src/video_engine/test/libvietest/include/tb_external_transport.h
new file mode 100644
index 0000000..6578811
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/tb_external_transport.h
@@ -0,0 +1,159 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//
+// tb_external_transport.h
+//
+
+#ifndef WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
+#define WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
+
+#include <list>
+
+#include "common_types.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViENetwork;
+}
+
+// Allows to subscribe for callback when a frame is started being sent.
+class SendFrameCallback
+{
+public:
+    // Called once per frame (when a new RTP timestamp is detected) when the
+    // first data packet of the frame is being sent using the
+    // TbExternalTransport.SendPacket method.
+    virtual void FrameSent(unsigned int rtp_timestamp) = 0;
+protected:
+    SendFrameCallback() {}
+    virtual ~SendFrameCallback() {}
+};
+
+// Allows to subscribe for callback when the first packet of a frame is
+// received.
+class ReceiveFrameCallback
+{
+public:
+    // Called once per frame (when a new RTP timestamp is detected)
+    // during the processing of the RTP packet queue in
+    // TbExternalTransport::ViEExternalTransportProcess.
+    virtual void FrameReceived(unsigned int rtp_timestamp) = 0;
+protected:
+    ReceiveFrameCallback() {}
+    virtual ~ReceiveFrameCallback() {}
+};
+
+// External transport implementation for testing purposes.
+// A packet loss probability must be set in order to drop packets from the data
+// being sent to this class.
+// Will never drop packets from the first frame of a video sequence.
+class TbExternalTransport : public webrtc::Transport
+{
+public:
+    TbExternalTransport(webrtc::ViENetwork& vieNetwork);
+    ~TbExternalTransport(void);
+
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+    // Should only be called before/after traffic is being processed.
+    // Only one observer can be set (multiple calls will overwrite each other).
+    virtual void RegisterSendFrameCallback(SendFrameCallback* callback);
+
+    // Should only be called before/after traffic is being processed.
+    // Only one observer can be set (multiple calls will overwrite each other).
+    virtual void RegisterReceiveFrameCallback(ReceiveFrameCallback* callback);
+
+    // The probability of a packet of being dropped. Packets belonging to the
+    // first packet (same RTP timestamp) will never be dropped.
+    WebRtc_Word32 SetPacketLoss(WebRtc_Word32 lossRate);  // Rate in %
+    void SetNetworkDelay(WebRtc_Word64 delayMs);
+    void SetSSRCFilter(WebRtc_UWord32 SSRC);
+
+    void ClearStats();
+    void GetStats(WebRtc_Word32& numRtpPackets,
+                  WebRtc_Word32& numDroppedPackets,
+                  WebRtc_Word32& numRtcpPackets);
+
+    void SetTemporalToggle(unsigned char layers);
+    void EnableSSRCCheck();
+    unsigned int ReceivedSSRC();
+
+    void EnableSequenceNumberCheck();
+    unsigned short GetFirstSequenceNumber();
+
+protected:
+    static bool ViEExternalTransportRun(void* object);
+    bool ViEExternalTransportProcess();
+private:
+    WebRtc_Word64 NowMs();
+
+    enum
+    {
+        KMaxPacketSize = 1650
+    };
+    enum
+    {
+        KMaxWaitTimeMs = 100
+    };
+    typedef struct
+    {
+        WebRtc_Word8 packetBuffer[KMaxPacketSize];
+        WebRtc_Word32 length;
+        WebRtc_Word32 channel;
+        WebRtc_Word64 receiveTime;
+    } VideoPacket;
+
+    webrtc::ViENetwork& _vieNetwork;
+    webrtc::ThreadWrapper& _thread;
+    webrtc::EventWrapper& _event;
+    webrtc::CriticalSectionWrapper& _crit;
+    webrtc::CriticalSectionWrapper& _statCrit;
+
+    WebRtc_Word32 _lossRate;
+    WebRtc_Word64 _networkDelayMs;
+    WebRtc_Word32 _rtpCount;
+    WebRtc_Word32 _rtcpCount;
+    WebRtc_Word32 _dropCount;
+
+    std::list<VideoPacket*> _rtpPackets;
+    std::list<VideoPacket*> _rtcpPackets;
+
+    SendFrameCallback* _send_frame_callback;
+    ReceiveFrameCallback* _receive_frame_callback;
+
+    unsigned char _temporalLayers;
+    unsigned short _seqNum;
+    unsigned short _sendPID;
+    unsigned char _receivedPID;
+    bool _switchLayer;
+    unsigned char _currentRelayLayer;
+    unsigned int _lastTimeMs;
+
+    bool _checkSSRC;
+    WebRtc_UWord32 _lastSSRC;
+    bool _filterSSRC;
+    WebRtc_UWord32 _SSRC;
+    bool _checkSequenceNumber;
+    WebRtc_UWord16 _firstSequenceNumber;
+
+    // Keep track of the first RTP timestamp so we don't do packet loss on
+    // the first frame.
+    WebRtc_UWord32 _firstRTPTimestamp;
+    // Track RTP timestamps so we invoke callbacks properly (if registered).
+    WebRtc_UWord32 _lastSendRTPTimestamp;
+    WebRtc_UWord32 _lastReceiveRTPTimestamp;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_TEST_AUTOTEST_INTERFACE_TB_EXTERNAL_TRANSPORT_H_
diff --git a/src/video_engine/test/libvietest/include/tb_interfaces.h b/src/video_engine/test/libvietest/include/tb_interfaces.h
new file mode 100644
index 0000000..5060abb
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/tb_interfaces.h
@@ -0,0 +1,56 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
+
+#include <string>
+
+#include "constructor_magic.h"
+#include "common_types.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/include/vie_encryption.h"
+#include "video_engine/vie_defines.h"
+
+// This class deals with all the tedium of setting up video engine interfaces.
+// It does its work in constructor and destructor, so keeping it in scope is
+// enough. It also sets up tracing.
+class TbInterfaces
+{
+public:
+    // Sets up all interfaces and creates a trace file
+    TbInterfaces(const std::string& test_name);
+    ~TbInterfaces(void);
+
+    webrtc::VideoEngine* video_engine;
+    webrtc::ViEBase* base;
+    webrtc::ViECapture* capture;
+    webrtc::ViERender* render;
+    webrtc::ViERTP_RTCP* rtp_rtcp;
+    webrtc::ViECodec* codec;
+    webrtc::ViENetwork* network;
+    webrtc::ViEImageProcess* image_process;
+    webrtc::ViEEncryption* encryption;
+
+    int LastError() {
+        return base->LastError();
+    }
+
+private:
+    DISALLOW_COPY_AND_ASSIGN(TbInterfaces);
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_INTERFACES_H_
diff --git a/src/video_engine/test/libvietest/include/tb_video_channel.h b/src/video_engine/test/libvietest/include/tb_video_channel.h
new file mode 100644
index 0000000..5c7e6f8
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/tb_video_channel.h
@@ -0,0 +1,42 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
+
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+
+class TbVideoChannel
+{
+public:
+    TbVideoChannel(TbInterfaces& Engine,
+                   webrtc::VideoCodecType sendCodec = webrtc::kVideoCodecVP8,
+                   int width = 352, int height = 288, int frameRate = 30,
+                   int startBitrate = 300);
+
+    ~TbVideoChannel(void);
+
+    void SetFrameSettings(int width, int height, int frameRate);
+
+    void StartSend(const unsigned short rtpPort = 11000,
+                   const char* ipAddress = "127.0.0.1");
+
+    void StopSend();
+
+    void StartReceive(const unsigned short rtpPort = 11000);
+
+    void StopReceive();
+
+    int videoChannel;
+private:
+    TbInterfaces& ViE;
+};
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_INTERFACE_TB_VIDEO_CHANNEL_H_
diff --git a/src/video_engine/test/libvietest/include/vie_fake_camera.h b/src/video_engine/test/libvietest/include/vie_fake_camera.h
new file mode 100644
index 0000000..abc5d40
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/vie_fake_camera.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
+
+#include <string>
+
+namespace webrtc {
+class ViECapture;
+class ThreadWrapper;
+}
+
+class ViEFileCaptureDevice;
+
+// Registers an external capture device with the provided capture interface
+// and starts running a fake camera by reading frames from a file. The frame-
+// reading code runs in a separate thread which makes it possible to run tests
+// while the fake camera feeds data into the system. This class is not thread-
+// safe in itself (but handles its own thread in a safe manner).
+class ViEFakeCamera {
+ public:
+  // The argument is the capture interface to register with.
+  explicit ViEFakeCamera(webrtc::ViECapture* capture_interface);
+  virtual ~ViEFakeCamera();
+
+  // Runs the scenario in the class comments.
+  bool StartCameraInNewThread(const std::string& i420_test_video_path,
+                              int width,
+                              int height);
+  // Stops the camera and cleans up everything allocated by the start method.
+  bool StopCamera();
+
+  int capture_id() const { return capture_id_; }
+
+ private:
+  webrtc::ViECapture* capture_interface_;
+
+  int capture_id_;
+  webrtc::ThreadWrapper* camera_thread_;
+  ViEFileCaptureDevice* file_capture_device_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FAKE_CAMERA_H_
diff --git a/src/video_engine/test/libvietest/include/vie_file_capture_device.h b/src/video_engine/test/libvietest/include/vie_file_capture_device.h
new file mode 100644
index 0000000..6348c5a
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/vie_file_capture_device.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
+#define SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
+
+#include <cstdio>
+
+#include <string>
+
+#include "typedefs.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ViEExternalCapture;
+}
+
+// This class opens a i420 file and feeds it into a ExternalCapture instance,
+// thereby acting as a faked capture device with deterministic input.
+class ViEFileCaptureDevice {
+ public:
+  // The input sink is where to send the I420 video frames.
+  explicit ViEFileCaptureDevice(webrtc::ViEExternalCapture* input_sink);
+  virtual ~ViEFileCaptureDevice();
+
+  // Opens the provided I420 file and interprets it according to the provided
+  // width and height. Returns false if the file doesn't exist.
+  bool OpenI420File(const std::string& path, int width, int height);
+
+  // Reads the previously opened file for at most time_slice_ms milliseconds,
+  // after which it will return. It will make sure to sleep accordingly so we
+  // do not send more than max_fps cap (we may send less, though).
+  void ReadFileFor(uint64_t time_slice_ms, uint32_t max_fps);
+
+  // Closes the opened input file.
+  void CloseFile();
+
+ private:
+  webrtc::ViEExternalCapture* input_sink_;
+
+  std::FILE* input_file_;
+  webrtc::CriticalSectionWrapper* mutex_;
+
+  WebRtc_UWord32 frame_length_;
+  WebRtc_UWord32 width_;
+  WebRtc_UWord32 height_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_MAIN_TEST_AUTOTEST_HELPERS_VIE_FILE_CAPTURE_DEVICE_H_
diff --git a/src/video_engine/test/libvietest/include/vie_to_file_renderer.h b/src/video_engine/test/libvietest/include/vie_to_file_renderer.h
new file mode 100644
index 0000000..08559af
--- /dev/null
+++ b/src/video_engine/test/libvietest/include/vie_to_file_renderer.h
@@ -0,0 +1,59 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
+#define SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
+
+#include <cstdio>
+#include <string>
+
+#include "video_engine/include/vie_render.h"
+
+class ViEToFileRenderer: public webrtc::ExternalRenderer {
+ public:
+  ViEToFileRenderer();
+  virtual ~ViEToFileRenderer();
+
+  // Returns false if we fail opening the output filename for writing.
+  bool PrepareForRendering(const std::string& output_path,
+                           const std::string& output_filename);
+
+  // Closes the output file.
+  void StopRendering();
+
+  // Deletes the closed output file from the file system. This is one option
+  // after calling StopRendering, the other being KeepOutputFile. This file
+  // renderer will forget about the file after this call and can be used again.
+  bool DeleteOutputFile();
+
+  // Renames the closed output file to its previous name with the provided
+  // prefix prepended. This file renderer will forget about the file after this
+  // call and can be used again.
+  bool SaveOutputFile(const std::string& prefix);
+
+  // Implementation of ExternalRenderer:
+  int FrameSizeChange(unsigned int width, unsigned int height,
+                      unsigned int number_of_streams);
+
+  int DeliverFrame(unsigned char* buffer, int buffer_size,
+                   uint32_t time_stamp,
+                   int64_t render_time);
+
+  const std::string GetFullOutputPath() const;
+
+ private:
+  void ForgetOutputFile();
+
+  std::FILE* output_file_;
+  std::string output_path_;
+  std::string output_filename_;
+};
+
+#endif  // SRC_VIDEO_ENGINE_TEST_AUTO_TEST_HELPERS_VIE_TO_FILE_RENDERER_H_
diff --git a/src/video_engine/test/libvietest/libvietest.gypi b/src/video_engine/test/libvietest/libvietest.gypi
new file mode 100644
index 0000000..562bdbe
--- /dev/null
+++ b/src/video_engine/test/libvietest/libvietest.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+{
+  'targets': [
+    {
+      'target_name': 'libvietest',
+      'type': '<(library)',
+      'dependencies': [
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(webrtc_root)/test/test.gyp:test_support',
+        'video_engine_core',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include/',
+        ]
+      },
+      'include_dirs': [
+        'include/',
+        'helpers/',
+      ],
+      'sources': [
+        # Helper classes
+        'include/vie_fake_camera.h',
+        'include/vie_file_capture_device.h',
+        'include/vie_to_file_renderer.h',
+
+        'helpers/vie_fake_camera.cc',
+        'helpers/vie_file_capture_device.cc',
+        'helpers/vie_to_file_renderer.cc',
+
+        # Testbed classes
+        'include/tb_capture_device.h',
+        'include/tb_external_transport.h',
+        'include/tb_I420_codec.h',
+        'include/tb_interfaces.h',
+        'include/tb_video_channel.h',
+
+        'testbed/tb_capture_device.cc',
+        'testbed/tb_external_transport.cc',
+        'testbed/tb_I420_codec.cc',
+        'testbed/tb_interfaces.cc',
+        'testbed/tb_video_channel.cc',
+      ],
+    },
+  ],
+}
diff --git a/src/video_engine/test/libvietest/testbed/tb_I420_codec.cc b/src/video_engine/test/libvietest/testbed/tb_I420_codec.cc
new file mode 100644
index 0000000..af30307
--- /dev/null
+++ b/src/video_engine/test/libvietest/testbed/tb_I420_codec.cc
@@ -0,0 +1,299 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/tb_I420_codec.h"
+
+#include <string.h>
+#include <stdio.h>
+#include <assert.h>
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+
+TbI420Encoder::TbI420Encoder() :
+    _inited(false), _encodedImage(), _encodedCompleteCallback(NULL)
+{
+    //
+    memset(&_functionCalls, 0, sizeof(_functionCalls));
+}
+
+TbI420Encoder::~TbI420Encoder()
+{
+    _inited = false;
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+}
+
+WebRtc_Word32 TbI420Encoder::VersionStatic(char* version,
+                                           WebRtc_Word32 length)
+{
+    const char* str = "I420 version 1.0.0\n";
+    WebRtc_Word32 verLen = (WebRtc_Word32) strlen(str);
+    if (verLen > length)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    strncpy(version, str, length);
+    return verLen;
+}
+
+WebRtc_Word32 TbI420Encoder::Version(char* version,
+                                     WebRtc_Word32 length) const
+{
+    return VersionStatic(version, length);
+}
+
+WebRtc_Word32 TbI420Encoder::Release()
+{
+    _functionCalls.Release++;
+    // should allocate an encoded frame and then release it here, for that we
+    // actaully need an init flag
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+    }
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::Reset()
+{
+    _functionCalls.Reset++;
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    return WEBRTC_VIDEO_CODEC_OK;
+
+}
+
+WebRtc_Word32 TbI420Encoder::SetChannelParameters(WebRtc_UWord32 packetLoss,
+                                                  int rtt) {
+  return 0;
+}
+
+WebRtc_Word32 TbI420Encoder::InitEncode(const webrtc::VideoCodec* inst,
+                                        WebRtc_Word32 /*numberOfCores*/,
+                                        WebRtc_UWord32 /*maxPayloadSize */)
+{
+    _functionCalls.InitEncode++;
+    if (inst == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (inst->width < 1 || inst->height < 1)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+
+    // allocating encoded memory
+    if (_encodedImage._buffer != NULL)
+    {
+        delete[] _encodedImage._buffer;
+        _encodedImage._buffer = NULL;
+        _encodedImage._size = 0;
+    }
+    const WebRtc_UWord32 newSize = (3 * inst->width * inst->height) >> 1;
+    WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+    if (newBuffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_MEMORY;
+    }
+    _encodedImage._size = newSize;
+    _encodedImage._buffer = newBuffer;
+
+    // if no memeory allocation, no point to init
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::Encode(
+    const webrtc::VideoFrame& inputImage,
+    const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
+    const webrtc::VideoFrameType /*frameType*/)
+{
+    _functionCalls.Encode++;
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (_encodedCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    _encodedImage._frameType = webrtc::kKeyFrame; // no coding
+    _encodedImage._timeStamp = inputImage.TimeStamp();
+    _encodedImage._encodedHeight = inputImage.Height();
+    _encodedImage._encodedWidth = inputImage.Width();
+    if (inputImage.Length() > _encodedImage._size)
+    {
+
+        // allocating encoded memory
+        if (_encodedImage._buffer != NULL)
+        {
+            delete[] _encodedImage._buffer;
+            _encodedImage._buffer = NULL;
+            _encodedImage._size = 0;
+        }
+        const WebRtc_UWord32 newSize = (3 * _encodedImage._encodedWidth
+            * _encodedImage._encodedHeight) >> 1;
+        WebRtc_UWord8* newBuffer = new WebRtc_UWord8[newSize];
+        if (newBuffer == NULL)
+        {
+            return WEBRTC_VIDEO_CODEC_MEMORY;
+        }
+        _encodedImage._size = newSize;
+        _encodedImage._buffer = newBuffer;
+    }
+    assert(_encodedImage._size >= inputImage.Length());
+    memcpy(_encodedImage._buffer, inputImage.Buffer(), inputImage.Length());
+    _encodedImage._length = inputImage.Length();
+    _encodedCompleteCallback->Encoded(_encodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::RegisterEncodeCompleteCallback(
+    webrtc::EncodedImageCallback* callback)
+{
+    _functionCalls.RegisterEncodeCompleteCallback++;
+    _encodedCompleteCallback = callback;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetPacketLoss(WebRtc_UWord32 packetLoss)
+{
+    _functionCalls.SetPacketLoss++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetRates(WebRtc_UWord32 newBitRate,
+                                      WebRtc_UWord32 frameRate)
+{
+    _functionCalls.SetRates++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Encoder::SetPeriodicKeyFrames(bool enable)
+{
+    _functionCalls.SetPeriodicKeyFrames++;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+}
+
+WebRtc_Word32 TbI420Encoder::CodecConfigParameters(WebRtc_UWord8* /*buffer*/,
+                                                   WebRtc_Word32 /*size*/)
+{
+    _functionCalls.CodecConfigParameters++;
+    return WEBRTC_VIDEO_CODEC_ERROR;
+}
+TbI420Encoder::FunctionCalls TbI420Encoder::GetFunctionCalls()
+{
+    return _functionCalls;
+}
+
+TbI420Decoder::TbI420Decoder():
+    _decodedImage(), _width(0), _height(0), _inited(false),
+        _decodeCompleteCallback(NULL)
+{
+    memset(&_functionCalls, 0, sizeof(_functionCalls));
+}
+
+TbI420Decoder::~TbI420Decoder()
+{
+    Release();
+}
+
+WebRtc_Word32 TbI420Decoder::Reset()
+{
+    _functionCalls.Reset++;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::InitDecode(const webrtc::VideoCodec* inst,
+                                        WebRtc_Word32 /*numberOfCores */)
+{
+    _functionCalls.InitDecode++;
+    if (inst == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    else if (inst->width < 1 || inst->height < 1)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    _width = inst->width;
+    _height = inst->height;
+    _inited = true;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::Decode(
+    const webrtc::EncodedImage& inputImage,
+    bool /*missingFrames*/,
+    const webrtc::RTPFragmentationHeader* /*fragmentation*/,
+    const webrtc::CodecSpecificInfo* /*codecSpecificInfo*/,
+    WebRtc_Word64 /*renderTimeMs*/)
+{
+    _functionCalls.Decode++;
+    if (inputImage._buffer == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (_decodeCompleteCallback == NULL)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+    if (inputImage._length <= 0)
+    {
+        return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+    }
+    if (!_inited)
+    {
+        return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+    }
+
+    // Allocate memory for decoded image.
+    const WebRtc_UWord32 newSize = webrtc::CalcBufferSize(webrtc::kI420,
+                                                          _width, _height);
+    _decodedImage.VerifyAndAllocate(newSize);
+
+    // Set decoded image parameters.
+    _decodedImage.SetHeight(_height);
+    _decodedImage.SetWidth(_width);
+    _decodedImage.SetTimeStamp(inputImage._timeStamp);
+    _decodedImage.CopyFrame(inputImage._length, inputImage._buffer);
+
+    _decodeCompleteCallback->Decoded(_decodedImage);
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::RegisterDecodeCompleteCallback(
+    webrtc::DecodedImageCallback* callback)
+{
+    _functionCalls.RegisterDecodeCompleteCallback++;
+    _decodeCompleteCallback = callback;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+WebRtc_Word32 TbI420Decoder::Release()
+{
+    _functionCalls.Release++;
+    _decodedImage.Free();
+    _inited = false;
+    return WEBRTC_VIDEO_CODEC_OK;
+}
+
+TbI420Decoder::FunctionCalls TbI420Decoder::GetFunctionCalls()
+{
+    return _functionCalls;
+}
diff --git a/src/video_engine/test/libvietest/testbed/tb_capture_device.cc b/src/video_engine/test/libvietest/testbed/tb_capture_device.cc
new file mode 100644
index 0000000..684bff9
--- /dev/null
+++ b/src/video_engine/test/libvietest/testbed/tb_capture_device.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/tb_capture_device.h"
+
+#include "gtest/gtest.h"
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+
+TbCaptureDevice::TbCaptureDevice(TbInterfaces& Engine) :
+    captureId(-1),
+    ViE(Engine),
+    vcpm_(NULL)
+{
+    const unsigned int KMaxDeviceNameLength = 128;
+    const unsigned int KMaxUniqueIdLength = 256;
+    char deviceName[KMaxDeviceNameLength];
+    memset(deviceName, 0, KMaxDeviceNameLength);
+    char uniqueId[KMaxUniqueIdLength];
+    memset(uniqueId, 0, KMaxUniqueIdLength);
+
+    bool captureDeviceSet = false;
+
+    webrtc::VideoCaptureModule::DeviceInfo* devInfo =
+        webrtc::VideoCaptureFactory::CreateDeviceInfo(0);
+    for (size_t captureIdx = 0;
+        captureIdx < devInfo->NumberOfDevices();
+        captureIdx++)
+    {
+        EXPECT_EQ(0, devInfo->GetDeviceName(captureIdx, deviceName,
+                                            KMaxDeviceNameLength, uniqueId,
+                                            KMaxUniqueIdLength));
+
+        vcpm_ = webrtc::VideoCaptureFactory::Create(
+            captureIdx, uniqueId);
+        if (vcpm_ == NULL)  // Failed to open this device. Try next.
+        {
+            continue;
+        }
+        vcpm_->AddRef();
+
+        int error = ViE.capture->AllocateCaptureDevice(*vcpm_, captureId);
+        if (error == 0)
+        {
+            captureDeviceSet = true;
+            break;
+        }
+    }
+    delete devInfo;
+    EXPECT_TRUE(captureDeviceSet);
+    if (!captureDeviceSet) {
+        return;
+    }
+
+    device_name_ = deviceName;
+    EXPECT_EQ(0, ViE.capture->StartCapture(captureId));
+}
+
+TbCaptureDevice::~TbCaptureDevice(void)
+{
+    EXPECT_EQ(0, ViE.capture->StopCapture(captureId));
+    EXPECT_EQ(0, ViE.capture->ReleaseCaptureDevice(captureId));
+    vcpm_->Release();
+}
+
+void TbCaptureDevice::ConnectTo(int videoChannel)
+{
+    EXPECT_EQ(0, ViE.capture->ConnectCaptureDevice(captureId, videoChannel));
+}
+
+void TbCaptureDevice::Disconnect(int videoChannel)
+{
+    EXPECT_EQ(0, ViE.capture->DisconnectCaptureDevice(videoChannel));
+}
+
+std::string TbCaptureDevice::device_name() const {
+  return device_name_;
+}
diff --git a/src/video_engine/test/libvietest/testbed/tb_external_transport.cc b/src/video_engine/test/libvietest/testbed/tb_external_transport.cc
new file mode 100644
index 0000000..e1e30be
--- /dev/null
+++ b/src/video_engine/test/libvietest/testbed/tb_external_transport.cc
@@ -0,0 +1,445 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/tb_external_transport.h"
+
+#include <stdio.h> // printf
+#include <stdlib.h> // rand
+#include <cassert>
+
+#if defined(WEBRTC_LINUX) || defined(__linux__)
+#include <string.h>
+#endif
+#if defined(WEBRTC_MAC)
+#include <cstring>
+#endif
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "video_engine/include/vie_network.h"
+
+#if defined(_WIN32)
+#pragma warning(disable: 4355) // 'this' : used in base member initializer list
+#endif
+
+TbExternalTransport::TbExternalTransport(webrtc::ViENetwork& vieNetwork) :
+        _vieNetwork(vieNetwork),
+        _thread(*webrtc::ThreadWrapper::CreateThread(
+            ViEExternalTransportRun, this, webrtc::kHighPriority,
+            "AutotestTransport")),
+        _event(*webrtc::EventWrapper::Create()),
+        _crit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+        _statCrit(*webrtc::CriticalSectionWrapper::CreateCriticalSection()),
+        _lossRate(0),
+        _networkDelayMs(0),
+        _rtpCount(0),
+        _rtcpCount(0),
+        _dropCount(0),
+        _rtpPackets(),
+        _rtcpPackets(),
+        _send_frame_callback(NULL),
+        _receive_frame_callback(NULL),
+        _temporalLayers(0),
+        _seqNum(0),
+        _sendPID(0),
+        _receivedPID(0),
+        _switchLayer(false),
+        _currentRelayLayer(0),
+        _lastTimeMs(webrtc::TickTime::MillisecondTimestamp()),
+        _checkSSRC(false),
+        _lastSSRC(0),
+        _filterSSRC(false),
+        _SSRC(0),
+        _checkSequenceNumber(0),
+        _firstSequenceNumber(0),
+        _firstRTPTimestamp(0),
+        _lastSendRTPTimestamp(0),
+        _lastReceiveRTPTimestamp(0)
+{
+    srand((int) webrtc::TickTime::MicrosecondTimestamp());
+    unsigned int tId = 0;
+    _thread.Start(tId);
+}
+
+TbExternalTransport::~TbExternalTransport()
+{
+    _thread.SetNotAlive();
+    _event.Set();
+    if (_thread.Stop())
+    {
+        delete &_thread;
+        delete &_event;
+    }
+    delete &_crit;
+    delete &_statCrit;
+}
+
+int TbExternalTransport::SendPacket(int channel, const void *data, int len)
+{
+  // Parse timestamp from RTP header according to RFC 3550, section 5.1.
+    WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+    WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
+    rtp_timestamp += ptr[5] << 16;
+    rtp_timestamp += ptr[6] << 8;
+    rtp_timestamp += ptr[7];
+    _crit.Enter();
+    if (_firstRTPTimestamp == 0) {
+      _firstRTPTimestamp = rtp_timestamp;
+    }
+    _crit.Leave();
+    if (_send_frame_callback != NULL &&
+        _lastSendRTPTimestamp != rtp_timestamp) {
+      _send_frame_callback->FrameSent(rtp_timestamp);
+    }
+    _lastSendRTPTimestamp = rtp_timestamp;
+
+    if (_filterSSRC)
+    {
+        WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+        WebRtc_UWord32 ssrc = ptr[8] << 24;
+        ssrc += ptr[9] << 16;
+        ssrc += ptr[10] << 8;
+        ssrc += ptr[11];
+        if (ssrc != _SSRC)
+        {
+            return len; // return len to avoid error in trace file
+        }
+    }
+    if (_temporalLayers) {
+        // parse out vp8 temporal layers
+        // 12 bytes RTP
+        WebRtc_UWord8* ptr = (WebRtc_UWord8*)data;
+
+        if (ptr[12] & 0x80 &&  // X-bit
+            ptr[13] & 0x20)  // T-bit
+        {
+            int offset = 1;
+            if (ptr[13] & 0x80) // PID-bit
+            {
+                offset++;
+                if (ptr[14] & 0x80) // 2 byte PID
+                {
+                    offset++;
+                }
+            }
+            if (ptr[13] & 0x40)
+            {
+                offset++;
+            }
+            unsigned char TID = (ptr[13 + offset] >> 5);
+            unsigned int timeMs = NowMs();
+
+            // Every 5 second switch layer
+            if (_lastTimeMs + 5000 < timeMs)
+            {
+                _lastTimeMs = timeMs;
+                _switchLayer = true;
+            }
+            // Switch at the non ref frame
+            if (_switchLayer && (ptr[12] & 0x20))
+            {   // N-bit
+              _currentRelayLayer++;
+                if (_currentRelayLayer >= _temporalLayers)
+                  _currentRelayLayer = 0;
+
+                _switchLayer = false;
+                printf("\t Switching to layer:%d\n", _currentRelayLayer);
+            }
+            if (_currentRelayLayer < TID)
+            {
+                return len; // return len to avoid error in trace file
+            }
+            if (ptr[14] & 0x80) // 2 byte PID
+            {
+                if(_receivedPID != ptr[15])
+                {
+                    _sendPID++;
+                    _receivedPID = ptr[15];
+                }
+            } else
+            {
+              if(_receivedPID != ptr[14])
+              {
+                _sendPID++;
+                _receivedPID = ptr[14];
+              }
+            }
+        }
+    }
+    _statCrit.Enter();
+    _rtpCount++;
+    _statCrit.Leave();
+
+    // Packet loss. Never drop packets from the first RTP timestamp, i.e. the
+    // first frame being transmitted.
+    int dropThis = rand() % 100;
+    if (dropThis < _lossRate && _firstRTPTimestamp != rtp_timestamp)
+    {
+        _statCrit.Enter();
+        _dropCount++;
+        _statCrit.Leave();
+        return 0;
+    }
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+
+    if (_temporalLayers)
+    {
+        // rewrite seqNum
+        newPacket->packetBuffer[2] = _seqNum >> 8;
+        newPacket->packetBuffer[3] = _seqNum;
+        _seqNum++;
+
+        // rewrite PID
+        if (newPacket->packetBuffer[14] & 0x80) // 2 byte PID
+        {
+            newPacket->packetBuffer[14] = (_sendPID >> 8) | 0x80;
+            newPacket->packetBuffer[15] = _sendPID;
+        } else
+        {
+            newPacket->packetBuffer[14] = (_sendPID & 0x7f);
+        }
+    }
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtpPackets.push_back(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+void TbExternalTransport::RegisterSendFrameCallback(
+    SendFrameCallback* callback) {
+  _send_frame_callback = callback;
+}
+
+void TbExternalTransport::RegisterReceiveFrameCallback(
+    ReceiveFrameCallback* callback) {
+  _receive_frame_callback = callback;
+}
+
+// Set to 0 to disable.
+void TbExternalTransport::SetTemporalToggle(unsigned char layers)
+{
+    _temporalLayers = layers;
+}
+
+int TbExternalTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _statCrit.Enter();
+    _rtcpCount++;
+    _statCrit.Leave();
+
+    VideoPacket* newPacket = new VideoPacket();
+    memcpy(newPacket->packetBuffer, data, len);
+    newPacket->length = len;
+    newPacket->channel = channel;
+
+    _crit.Enter();
+    newPacket->receiveTime = NowMs() + _networkDelayMs;
+    _rtcpPackets.push_back(newPacket);
+    _event.Set();
+    _crit.Leave();
+    return len;
+}
+
+WebRtc_Word32 TbExternalTransport::SetPacketLoss(WebRtc_Word32 lossRate)
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _lossRate = lossRate;
+    return 0;
+}
+
+void TbExternalTransport::SetNetworkDelay(WebRtc_Word64 delayMs)
+{
+    webrtc::CriticalSectionScoped cs(_crit);
+    _networkDelayMs = delayMs;
+}
+
+void TbExternalTransport::SetSSRCFilter(WebRtc_UWord32 ssrc)
+{
+    webrtc::CriticalSectionScoped cs(_crit);
+    _filterSSRC = true;
+    _SSRC = ssrc;
+}
+
+void TbExternalTransport::ClearStats()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _rtpCount = 0;
+    _dropCount = 0;
+    _rtcpCount = 0;
+}
+
+void TbExternalTransport::GetStats(WebRtc_Word32& numRtpPackets,
+                                   WebRtc_Word32& numDroppedPackets,
+                                   WebRtc_Word32& numRtcpPackets)
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    numRtpPackets = _rtpCount;
+    numDroppedPackets = _dropCount;
+    numRtcpPackets = _rtcpCount;
+}
+
+void TbExternalTransport::EnableSSRCCheck()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _checkSSRC = true;
+}
+
+unsigned int TbExternalTransport::ReceivedSSRC()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    return _lastSSRC;
+}
+
+void TbExternalTransport::EnableSequenceNumberCheck()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    _checkSequenceNumber = true;
+}
+
+unsigned short TbExternalTransport::GetFirstSequenceNumber()
+{
+    webrtc::CriticalSectionScoped cs(_statCrit);
+    return _firstSequenceNumber;
+}
+
+bool TbExternalTransport::ViEExternalTransportRun(void* object)
+{
+    return static_cast<TbExternalTransport*>
+        (object)->ViEExternalTransportProcess();
+}
+bool TbExternalTransport::ViEExternalTransportProcess()
+{
+    unsigned int waitTime = KMaxWaitTimeMs;
+
+    VideoPacket* packet = NULL;
+
+    while (!_rtpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtpPackets.front();
+        WebRtc_Word64 timeToReceive = 0;
+        if (packet)
+        {
+          timeToReceive = packet->receiveTime - NowMs();
+        }
+        else
+        {
+          // There should never be any empty packets in the list.
+          assert(false);
+        }
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime && timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtpPackets.pop_front();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            {
+                webrtc::CriticalSectionScoped cs(_statCrit);
+                if (_checkSSRC)
+                {
+                    _lastSSRC = ((packet->packetBuffer[8]) << 24);
+                    _lastSSRC += (packet->packetBuffer[9] << 16);
+                    _lastSSRC += (packet->packetBuffer[10] << 8);
+                    _lastSSRC += packet->packetBuffer[11];
+                    _checkSSRC = false;
+                }
+                if (_checkSequenceNumber)
+                {
+                    _firstSequenceNumber
+                        = (unsigned char) packet->packetBuffer[2] << 8;
+                    _firstSequenceNumber
+                        += (unsigned char) packet->packetBuffer[3];
+                    _checkSequenceNumber = false;
+                }
+            }
+            // Signal received packet of frame
+            WebRtc_UWord8* ptr = (WebRtc_UWord8*)packet->packetBuffer;
+            WebRtc_UWord32 rtp_timestamp = ptr[4] << 24;
+            rtp_timestamp += ptr[5] << 16;
+            rtp_timestamp += ptr[6] << 8;
+            rtp_timestamp += ptr[7];
+            if (_receive_frame_callback != NULL &&
+                _lastReceiveRTPTimestamp != rtp_timestamp) {
+              _receive_frame_callback->FrameReceived(rtp_timestamp);
+            }
+            _lastReceiveRTPTimestamp = rtp_timestamp;
+
+            _vieNetwork.ReceivedRTPPacket(packet->channel,
+                                          packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    while (!_rtcpPackets.empty())
+    {
+        // Take first packet in queue
+        _crit.Enter();
+        packet = _rtcpPackets.front();
+        WebRtc_Word64 timeToReceive = 0;
+        if (packet)
+        {
+          timeToReceive = packet->receiveTime - NowMs();
+        }
+        else
+        {
+            // There should never be any empty packets in the list.
+            assert(false);
+        }
+        if (timeToReceive > 0)
+        {
+            // No packets to receive yet
+            if (timeToReceive < waitTime && timeToReceive > 0)
+            {
+                waitTime = (unsigned int) timeToReceive;
+            }
+            _crit.Leave();
+            break;
+        }
+        _rtcpPackets.pop_front();
+        _crit.Leave();
+
+        // Send to ViE
+        if (packet)
+        {
+            _vieNetwork.ReceivedRTCPPacket(
+                 packet->channel,
+                 packet->packetBuffer, packet->length);
+            delete packet;
+            packet = NULL;
+        }
+    }
+    _event.Wait(waitTime + 1); // Add 1 ms to not call to early...
+    return true;
+}
+
+WebRtc_Word64 TbExternalTransport::NowMs()
+{
+    return webrtc::TickTime::MillisecondTimestamp();
+}
diff --git a/src/video_engine/test/libvietest/testbed/tb_interfaces.cc b/src/video_engine/test/libvietest/testbed/tb_interfaces.cc
new file mode 100644
index 0000000..16d8902
--- /dev/null
+++ b/src/video_engine/test/libvietest/testbed/tb_interfaces.cc
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/tb_interfaces.h"
+
+#include "gtest/gtest.h"
+#include "testsupport/fileutils.h"
+
+TbInterfaces::TbInterfaces(const std::string& test_name) :
+    video_engine(NULL),
+    base(NULL),
+    capture(NULL),
+    render(NULL),
+    rtp_rtcp(NULL),
+    codec(NULL),
+    network(NULL),
+    image_process(NULL),
+    encryption(NULL)
+{
+    std::string complete_path =
+        webrtc::test::OutputPath() + test_name + "_trace.txt";
+
+    video_engine = webrtc::VideoEngine::Create();
+    EXPECT_TRUE(video_engine != NULL);
+
+    EXPECT_EQ(0, video_engine->SetTraceFile(complete_path.c_str()));
+    EXPECT_EQ(0, video_engine->SetTraceFilter(webrtc::kTraceAll));
+
+    base = webrtc::ViEBase::GetInterface(video_engine);
+    EXPECT_TRUE(base != NULL);
+
+    EXPECT_EQ(0, base->Init());
+
+    capture = webrtc::ViECapture::GetInterface(video_engine);
+    EXPECT_TRUE(capture != NULL);
+
+    rtp_rtcp = webrtc::ViERTP_RTCP::GetInterface(video_engine);
+    EXPECT_TRUE(rtp_rtcp != NULL);
+
+    render = webrtc::ViERender::GetInterface(video_engine);
+    EXPECT_TRUE(render != NULL);
+
+    codec = webrtc::ViECodec::GetInterface(video_engine);
+    EXPECT_TRUE(codec != NULL);
+
+    network = webrtc::ViENetwork::GetInterface(video_engine);
+    EXPECT_TRUE(network != NULL);
+
+    image_process = webrtc::ViEImageProcess::GetInterface(video_engine);
+    EXPECT_TRUE(image_process != NULL);
+
+    encryption = webrtc::ViEEncryption::GetInterface(video_engine);
+    EXPECT_TRUE(encryption != NULL);
+}
+
+TbInterfaces::~TbInterfaces(void)
+{
+    EXPECT_EQ(0, encryption->Release());
+    encryption = NULL;
+    EXPECT_EQ(0, image_process->Release());
+    image_process = NULL;
+    EXPECT_EQ(0, codec->Release());
+    codec = NULL;
+    EXPECT_EQ(0, capture->Release());
+    capture = NULL;
+    EXPECT_EQ(0, render->Release());
+    render = NULL;
+    EXPECT_EQ(0, rtp_rtcp->Release());
+    rtp_rtcp = NULL;
+    EXPECT_EQ(0, network->Release());
+    network = NULL;
+    EXPECT_EQ(0, base->Release());
+    base = NULL;
+    EXPECT_TRUE(webrtc::VideoEngine::Delete(video_engine)) <<
+        "Since we have released all interfaces at this point, deletion "
+        "should be successful.";
+    video_engine = NULL;
+}
diff --git a/src/video_engine/test/libvietest/testbed/tb_video_channel.cc b/src/video_engine/test/libvietest/testbed/tb_video_channel.cc
new file mode 100644
index 0000000..3359f18
--- /dev/null
+++ b/src/video_engine/test/libvietest/testbed/tb_video_channel.cc
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/test/libvietest/include/tb_video_channel.h"
+
+#include "gtest/gtest.h"
+
+TbVideoChannel::TbVideoChannel(TbInterfaces& Engine,
+                               webrtc::VideoCodecType sendCodec, int width,
+                               int height, int frameRate, int startBitrate) :
+    videoChannel(-1),  ViE(Engine)
+{
+    EXPECT_EQ(0, ViE.base->CreateChannel(videoChannel));
+
+    webrtc::VideoCodec videoCodec;
+    memset(&videoCodec, 0, sizeof(webrtc::VideoCodec));
+    bool sendCodecSet = false;
+    for (int idx = 0; idx < ViE.codec->NumberOfCodecs(); idx++)
+    {
+        EXPECT_EQ(0, ViE.codec->GetCodec(idx, videoCodec));
+        videoCodec.width = width;
+        videoCodec.height = height;
+        videoCodec.maxFramerate = frameRate;
+
+        if (videoCodec.codecType == sendCodec && sendCodecSet == false)
+        {
+            if(videoCodec.codecType != webrtc::kVideoCodecI420 )
+            {
+                videoCodec.startBitrate = startBitrate;
+                videoCodec.maxBitrate = startBitrate * 3;
+            }
+            EXPECT_EQ(0, ViE.codec->SetSendCodec(videoChannel, videoCodec));
+            sendCodecSet = true;
+        }
+        if (videoCodec.codecType == webrtc::kVideoCodecVP8)
+        {
+            videoCodec.width = 352;
+            videoCodec.height = 288;
+        }
+        EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel, videoCodec));
+    }
+    EXPECT_TRUE(sendCodecSet);
+}
+
+TbVideoChannel::~TbVideoChannel(void)
+{
+    EXPECT_EQ(0, ViE.base->DeleteChannel(videoChannel));
+}
+
+void TbVideoChannel::StartSend(const unsigned short rtpPort /*= 11000*/,
+                               const char* ipAddress /*= "127.0.0.1"*/)
+{
+    EXPECT_EQ(0, ViE.network->SetSendDestination(videoChannel, ipAddress,
+                                                 rtpPort));
+
+    EXPECT_EQ(0, ViE.base->StartSend(videoChannel));
+}
+
+void TbVideoChannel::SetFrameSettings(int width, int height, int frameRate)
+{
+    webrtc::VideoCodec videoCodec;
+    EXPECT_EQ(0, ViE.codec->GetSendCodec(videoChannel, videoCodec));
+    videoCodec.width = width;
+    videoCodec.height = height;
+    videoCodec.maxFramerate = frameRate;
+
+    EXPECT_EQ(0, ViE.codec->SetSendCodec(videoChannel, videoCodec));
+    EXPECT_EQ(0, ViE.codec->SetReceiveCodec(videoChannel, videoCodec));
+}
+
+void TbVideoChannel::StopSend()
+{
+    EXPECT_EQ(0, ViE.base->StopSend(videoChannel));
+}
+
+void TbVideoChannel::StartReceive(const unsigned short rtpPort /*= 11000*/)
+{
+    EXPECT_EQ(0, ViE.network->SetLocalReceiver(videoChannel, rtpPort));
+    EXPECT_EQ(0, ViE.base->StartReceive(videoChannel));
+}
+
+void TbVideoChannel::StopReceive()
+{
+    EXPECT_EQ(0, ViE.base->StopReceive(videoChannel));
+}
diff --git a/src/video_engine/video_engine.gyp b/src/video_engine/video_engine.gyp
new file mode 100644
index 0000000..c02deae
--- /dev/null
+++ b/src/video_engine/video_engine.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    './video_engine_core.gypi',
+  ],
+
+  'conditions': [
+    ['include_tests==1', {
+      'includes': [
+        'test/libvietest/libvietest.gypi',
+        'test/auto_test/vie_auto_test.gypi',
+        'main/test/WindowsTest/windowstest.gypi',
+      ],
+    }],
+  ],
+}
+
diff --git a/src/video_engine/video_engine_core.gypi b/src/video_engine/video_engine_core.gypi
new file mode 100644
index 0000000..e24d339
--- /dev/null
+++ b/src/video_engine/video_engine_core.gypi
@@ -0,0 +1,167 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'target_defaults': {
+    'conditions': [
+      ['include_video_engine_file_api==1', {
+        'defines': [ 'WEBRTC_VIDEO_ENGINE_FILE_API', ],
+      }],
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'video_engine_core',
+      'type': '<(library)',
+      'dependencies': [
+
+        # common_video
+       '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
+       '<(webrtc_root)/common_video/common_video.gyp:webrtc_jpeg',
+
+        # ModulesShared
+        '<(webrtc_root)/modules/modules.gyp:media_file',
+        '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+        '<(webrtc_root)/modules/modules.gyp:udp_transport',
+        '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+
+        # ModulesVideo
+        '<(webrtc_root)/modules/modules.gyp:bitrate_controller',
+        '<(webrtc_root)/modules/modules.gyp:video_capture_module',
+        '<(webrtc_root)/modules/modules.gyp:webrtc_video_coding',
+        '<(webrtc_root)/modules/modules.gyp:video_processing',
+        '<(webrtc_root)/modules/modules.gyp:video_render_module',
+
+        # VoiceEngine
+        '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+
+        # system_wrappers
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'include_dirs': [
+        'include',
+        '../common_video/interface',
+        '../modules/video_capture/main/interface',
+        '../modules/video_render/main/interface',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        # interface
+        'include/vie_base.h',
+        'include/vie_capture.h',
+        'include/vie_codec.h',
+        'include/vie_encryption.h',
+        'include/vie_errors.h',
+        'include/vie_external_codec.h',
+        'include/vie_file.h',
+        'include/vie_image_process.h',
+        'include/vie_network.h',
+        'include/vie_render.h',
+        'include/vie_rtp_rtcp.h',
+
+        # headers
+        'stream_synchronization.h',
+        'vie_base_impl.h',
+        'vie_capture_impl.h',
+        'vie_codec_impl.h',
+        'vie_defines.h',
+        'vie_encryption_impl.h',
+        'vie_external_codec_impl.h',
+        'vie_file_impl.h',
+        'vie_image_process_impl.h',
+        'vie_impl.h',
+        'vie_network_impl.h',
+        'vie_ref_count.h',
+        'vie_remb.h',
+        'vie_render_impl.h',
+        'vie_rtp_rtcp_impl.h',
+        'vie_shared_data.h',
+        'vie_capturer.h',
+        'vie_channel.h',
+        'vie_channel_group.h',
+        'vie_channel_manager.h',
+        'vie_encoder.h',
+        'vie_file_image.h',
+        'vie_file_player.h',
+        'vie_file_recorder.h',
+        'vie_frame_provider_base.h',
+        'vie_input_manager.h',
+        'vie_manager_base.h',
+        'vie_performance_monitor.h',
+        'vie_receiver.h',
+        'vie_renderer.h',
+        'vie_render_manager.h',
+        'vie_sender.h',
+        'vie_sync_module.h',
+
+        # ViE
+        'stream_synchronization.cc',
+        'vie_base_impl.cc',
+        'vie_capture_impl.cc',
+        'vie_codec_impl.cc',
+        'vie_encryption_impl.cc',
+        'vie_external_codec_impl.cc',
+        'vie_file_impl.cc',
+        'vie_image_process_impl.cc',
+        'vie_impl.cc',
+        'vie_network_impl.cc',
+        'vie_ref_count.cc',
+        'vie_render_impl.cc',
+        'vie_rtp_rtcp_impl.cc',
+        'vie_shared_data.cc',
+        'vie_capturer.cc',
+        'vie_channel.cc',
+        'vie_channel_group.cc',
+        'vie_channel_manager.cc',
+        'vie_encoder.cc',
+        'vie_file_image.cc',
+        'vie_file_player.cc',
+        'vie_file_recorder.cc',
+        'vie_frame_provider_base.cc',
+        'vie_input_manager.cc',
+        'vie_manager_base.cc',
+        'vie_performance_monitor.cc',
+        'vie_receiver.cc',
+        'vie_remb.cc',
+        'vie_renderer.cc',
+        'vie_render_manager.cc',
+        'vie_sender.cc',
+        'vie_sync_module.cc',
+      ], # source
+    },
+  ], # targets
+  'conditions': [
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'video_engine_core_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'video_engine_core',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(DEPTH)/testing/gmock.gyp:gmock',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+          ],
+          'include_dirs': [
+            '..',
+            '../modules/interface',
+            '../modules/rtp_rtcp/interface',
+          ],
+          'sources': [
+            'stream_synchronization_unittest.cc',
+            'vie_remb_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/video_engine/vie_base_impl.cc b/src/video_engine/vie_base_impl.cc
new file mode 100644
index 0000000..13a4195
--- /dev/null
+++ b/src/video_engine/vie_base_impl.cc
@@ -0,0 +1,457 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_base_impl.h"
+
+#include <sstream>
+#include <string>
+
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_performance_monitor.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEBase* ViEBase::GetInterface(VideoEngine* video_engine) {
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEBaseImpl* vie_base_impl = vie_impl;
+  (*vie_base_impl)++;  // Increase ref count.
+
+  return vie_base_impl;
+}
+
+int ViEBaseImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
+               "ViEBase::Release()");
+  (*this)--;  // Decrease ref count.
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
+                 "ViEBase release too many times");
+    shared_data_.SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_.instance_id(),
+               "ViEBase reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEBaseImpl::ViEBaseImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
+               "ViEBaseImpl::ViEBaseImpl() Ctor");
+}
+
+ViEBaseImpl::~ViEBaseImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_.instance_id(),
+               "ViEBaseImpl::ViEBaseImpl() Dtor");
+}
+
+int ViEBaseImpl::Init() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_.instance_id(),
+               "Init");
+  if (shared_data_.Initialized()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_.instance_id(),
+                 "Init called twice");
+    return 0;
+  }
+
+  shared_data_.SetInitialized();
+  return 0;
+}
+
+int ViEBaseImpl::SetVoiceEngine(VoiceEngine* voice_engine) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->SetVoiceEngine(voice_engine) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->CreateChannel(&video_channel) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not create channel", __FUNCTION__);
+    video_channel = -1;
+    shared_data_.SetLastError(kViEBaseChannelCreationFailed);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel created: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel,  // NOLINT
+                               int original_channel) {
+  return CreateChannel(video_channel, original_channel, true);
+}
+
+int ViEBaseImpl::CreateReceiveChannel(int& video_channel,  // NOLINT
+                                      int original_channel) {
+  return CreateChannel(video_channel, original_channel, false);
+}
+
+int ViEBaseImpl::DeleteChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  {
+    ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+    ViEChannel* vie_channel = cs.Channel(video_channel);
+    if (!vie_channel) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_.instance_id()),
+                   "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+      shared_data_.SetLastError(kViEBaseInvalidChannelId);
+      return -1;
+    }
+
+    // Deregister the ViEEncoder if no other channel is using it.
+    ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+    if (cs.ChannelUsingViEEncoder(video_channel) == false) {
+      ViEInputManagerScoped is(*(shared_data_.input_manager()));
+      ViEFrameProviderBase* provider = is.FrameProvider(vie_encoder);
+      if (provider) {
+        provider->DeregisterFrameCallback(vie_encoder);
+      }
+    }
+  }
+
+  if (shared_data_.channel_manager()->DeleteChannel(video_channel) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not delete channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel deleted: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+int ViEBaseImpl::ConnectAudioChannel(const int video_channel,
+                                     const int audio_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(video_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->ConnectVoiceChannel(video_channel,
+                                                          audio_channel) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::DisconnectAudioChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s(%d)", __FUNCTION__, video_channel);
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(video_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->DisconnectVoiceChannel(
+      video_channel) != 0) {
+    shared_data_.SetLastError(kViEBaseVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StartSend(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  assert(vie_encoder != NULL);
+  if (vie_encoder->Owner() != video_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "Can't start ssend on a receive only channel.");
+    shared_data_.SetLastError(kViEBaseReceiveOnlyChannel);
+    return -1;
+  }
+
+  // Pause and trigger a key frame.
+  vie_encoder->Pause();
+  WebRtc_Word32 error = vie_channel->StartSend();
+  if (error != 0) {
+    vie_encoder->Restart();
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Could not start sending on channel %d", __FUNCTION__,
+                 video_channel);
+    if (error == kViEBaseAlreadySending) {
+      shared_data_.SetLastError(kViEBaseAlreadySending);
+    }
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  vie_encoder->SendKeyFrame();
+  vie_encoder->Restart();
+  return 0;
+}
+
+int ViEBaseImpl::StopSend(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  WebRtc_Word32 error = vie_channel->StopSend();
+  if (error != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Could not stop sending on channel %d", __FUNCTION__,
+                 video_channel);
+    if (error == kViEBaseNotSending) {
+      shared_data_.SetLastError(kViEBaseNotSending);
+    } else {
+      shared_data_.SetLastError(kViEBaseUnknownError);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StartReceive(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Receiving()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d already receive.", __FUNCTION__,
+                 video_channel);
+    shared_data_.SetLastError(kViEBaseAlreadyReceiving);
+    return -1;
+  }
+  if (vie_channel->StartReceive() != 0) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::StopReceive(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_.instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_.instance_id(), video_channel),
+                 "%s: Channel %d does not exist", __FUNCTION__, video_channel);
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StopReceive() != 0) {
+    shared_data_.SetLastError(kViEBaseUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEBaseImpl::RegisterObserver(ViEBaseObserver& observer) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+  if (shared_data_.vie_performance_monitor()->ViEBaseObserverRegistered()) {
+    shared_data_.SetLastError(kViEBaseObserverAlreadyRegistered);
+    return -1;
+  }
+  return shared_data_.vie_performance_monitor()->Init(&observer);
+}
+
+int ViEBaseImpl::DeregisterObserver() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_.vie_performance_monitor()->ViEBaseObserverRegistered()) {
+    shared_data_.SetLastError(kViEBaseObserverNotRegistered);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_.instance_id(),
+                 "%s No observer registered.", __FUNCTION__);
+    return -1;
+  }
+  shared_data_.vie_performance_monitor()->Terminate();
+  return 0;
+}
+
+int ViEBaseImpl::GetVersion(char version[1024]) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "GetVersion(version=?)");
+  assert(kViEVersionMaxMessageSize == 1024);
+  if (!version) {
+    shared_data_.SetLastError(kViEBaseInvalidArgument);
+    return -1;
+  }
+
+  // Add WebRTC Version.
+  std::stringstream version_stream;
+  version_stream << "VideoEngine 3.11.0" << std::endl;
+
+  // Add build info.
+  version_stream << "Build: svn:" << WEBRTC_SVNREVISION << " " << BUILDINFO
+                 << std::endl;
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  version_stream << "External transport build" << std::endl;
+#endif
+  int version_length = version_stream.tellp();
+  assert(version_length < 1024);
+  memcpy(version, version_stream.str().c_str(), version_length);
+  version[version_length] = '\0';
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+               ViEId(shared_data_.instance_id()), "GetVersion() => %s",
+               version);
+  return 0;
+}
+
+int ViEBaseImpl::LastError() {
+  return shared_data_.LastErrorInternal();
+}
+
+int ViEBaseImpl::CreateChannel(int& video_channel,  // NOLINT
+                               int original_channel, bool sender) {
+  if (!(shared_data_.Initialized())) {
+    shared_data_.SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_.instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_.channel_manager()));
+  if (!cs.Channel(original_channel)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s - original_channel does not exist.", __FUNCTION__,
+                 shared_data_.instance_id());
+    shared_data_.SetLastError(kViEBaseInvalidChannelId);
+    return -1;
+  }
+
+  if (shared_data_.channel_manager()->CreateChannel(&video_channel,
+                                                    original_channel,
+                                                    sender) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_.instance_id()),
+                 "%s: Could not create channel", __FUNCTION__);
+    video_channel = -1;
+    shared_data_.SetLastError(kViEBaseChannelCreationFailed);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(shared_data_.instance_id()),
+               "%s: channel created: %d", __FUNCTION__, video_channel);
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_base_impl.h b/src/video_engine/vie_base_impl.h
new file mode 100644
index 0000000..3d914ec
--- /dev/null
+++ b/src/video_engine/vie_base_impl.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
+
+#include "video_engine/include/vie_base.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_ref_count.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+class Module;
+class VoiceEngine;
+
+class ViEBaseImpl
+    : public ViEBase,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Implements ViEBase.
+  virtual int Init();
+  virtual int SetVoiceEngine(VoiceEngine* voice_engine);
+  virtual int CreateChannel(int& video_channel);  // NOLINT
+  virtual int CreateChannel(int& video_channel,  // NOLINT
+                            int original_channel);
+  virtual int CreateReceiveChannel(int& video_channel,  // NOLINT
+                                   int original_channel);
+  virtual int DeleteChannel(const int video_channel);
+  virtual int ConnectAudioChannel(const int video_channel,
+                                  const int audio_channel);
+  virtual int DisconnectAudioChannel(const int video_channel);
+  virtual int StartSend(const int video_channel);
+  virtual int StopSend(const int video_channel);
+  virtual int StartReceive(const int video_channel);
+  virtual int StopReceive(const int video_channel);
+  virtual int RegisterObserver(ViEBaseObserver& observer);  // NOLINT
+  virtual int DeregisterObserver();
+  virtual int GetVersion(char version[1024]);
+  virtual int LastError();
+
+ protected:
+  ViEBaseImpl();
+  virtual ~ViEBaseImpl();
+
+  ViESharedData* shared_data() { return &shared_data_; }
+
+ private:
+  // Version functions.
+  WebRtc_Word32 AddViEVersion(char* str) const;
+  WebRtc_Word32 AddBuildInfo(char* str) const;
+  WebRtc_Word32 AddExternalTransportBuild(char* str) const;
+
+  int CreateChannel(int& video_channel, int original_channel,  // NOLINT
+                    bool sender);
+
+  // ViEBaseImpl owns ViESharedData used by all interface implementations.
+  ViESharedData shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_BASE_IMPL_H_
diff --git a/src/video_engine/vie_capture_impl.cc b/src/video_engine/vie_capture_impl.cc
new file mode 100644
index 0000000..410720d
--- /dev/null
+++ b/src/video_engine/vie_capture_impl.cc
@@ -0,0 +1,596 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_capture_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViECapture* ViECapture::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViECaptureImpl* vie_capture_impl = vie_impl;
+  // Increase ref count.
+  (*vie_capture_impl)++;
+  return vie_capture_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViECaptureImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViECapture::Release()");
+  // Decrease ref count
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViECapture release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViECapture reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViECaptureImpl::ViECaptureImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECaptureImpl::ViECaptureImpl() Ctor");
+}
+
+ViECaptureImpl::~ViECaptureImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECaptureImpl::~ViECaptureImpl() Dtor");
+}
+
+int ViECaptureImpl::NumberOfCaptureDevices() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return  shared_data_->input_manager()->NumberOfCaptureDevices();
+}
+
+
+int ViECaptureImpl::GetCaptureDevice(unsigned int list_number,
+                                     char* device_nameUTF8,
+                                     unsigned int device_nameUTF8Length,
+                                     char* unique_idUTF8,
+                                     unsigned int unique_idUTF8Length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(list_number: %d)", __FUNCTION__, list_number);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return shared_data_->input_manager()->GetDeviceName(
+      list_number,
+      device_nameUTF8, device_nameUTF8Length,
+      unique_idUTF8, unique_idUTF8Length);
+}
+
+int ViECaptureImpl::AllocateCaptureDevice(
+  const char* unique_idUTF8,
+  const unsigned int unique_idUTF8Length,
+  int& capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(unique_idUTF8: %s)", __FUNCTION__, unique_idUTF8);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateCaptureDevice(
+          unique_idUTF8,
+          static_cast<const WebRtc_UWord32>(unique_idUTF8Length),
+          capture_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::AllocateExternalCaptureDevice(
+  int& capture_id, ViEExternalCapture*& external_capture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateExternalCaptureDevice(
+          external_capture, capture_id);
+
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::AllocateCaptureDevice(
+    VideoCaptureModule& capture_module, int& capture_id) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  const WebRtc_Word32 result =
+      shared_data_->input_manager()->CreateCaptureDevice(&capture_module,
+                                                         capture_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::ReleaseCaptureDevice(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  {
+    ViEInputManagerScoped is((*(shared_data_->input_manager())));
+    ViECapturer* vie_capture = is.Capture(capture_id);
+    if (!vie_capture) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Capture device %d doesn't exist", __FUNCTION__,
+                   capture_id);
+      shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+      return -1;
+    }
+  }
+
+  // Destroy the capture device.
+  return shared_data_->input_manager()->DestroyCaptureDevice(capture_id);
+}
+
+int ViECaptureImpl::ConnectCaptureDevice(const int capture_id,
+                                         const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(capture_id: %d, video_channel: %d)", __FUNCTION__,
+               capture_id, video_channel);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->Owner() != video_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Can't connect capture device to a receive only channel.");
+    shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
+    return -1;
+  }
+  //  Check if the encoder already has a connected frame provider
+  if (is.FrameProvider(vie_encoder) != NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already connected to a capture device.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceAlreadyConnected);
+    return -1;
+  }
+  VideoCodec codec;
+  bool use_hardware_encoder = false;
+  if (vie_encoder->GetEncoder(&codec) == 0) {
+    // Try to provide the encoder with pre-encoded frames if possible.
+    if (vie_capture->PreEncodeToViEEncoder(codec, *vie_encoder,
+                                           video_channel) == 0) {
+      use_hardware_encoder = true;
+    }
+  }
+  // If we don't use the camera as hardware encoder, we register the vie_encoder
+  // for callbacks.
+  if (!use_hardware_encoder &&
+      vie_capture->RegisterFrameCallback(video_channel, vie_encoder) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::DisconnectCaptureDevice(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
+  if (!frame_provider) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: No capture device connected to channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceNotConnected);
+    return -1;
+  }
+  if (frame_provider->Id() < kViECaptureIdBase ||
+      frame_provider->Id() > kViECaptureIdMax) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id()),
+                 "%s: No capture device connected to channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECaptureDeviceNotConnected);
+    return -1;
+  }
+
+  if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECaptureImpl::StartCapture(const int capture_id,
+                                 const CaptureCapability& capture_capability) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->Started()) {
+    shared_data_->SetLastError(kViECaptureDeviceAlreadyStarted);
+    return -1;
+  }
+  if (vie_capture->Start(capture_capability) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::StopCapture(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (!vie_capture->Started()) {
+    shared_data_->SetLastError(kViECaptureDeviceNotStarted);
+    return -1;
+  }
+  if (vie_capture->Stop() != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECaptureImpl::SetRotateCapturedFrames(
+    const int capture_id,
+    const RotateCapturedFrame rotation) {
+  int i_rotation = -1;
+  switch (rotation) {
+    case RotateCapturedFrame_0:
+      i_rotation = 0;
+      break;
+    case RotateCapturedFrame_90:
+      i_rotation = 90;
+      break;
+    case RotateCapturedFrame_180:
+      i_rotation = 180;
+      break;
+    case RotateCapturedFrame_270:
+      i_rotation = 270;
+      break;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(rotation: %d)", __FUNCTION__, i_rotation);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->SetRotateCapturedFrames(rotation) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::SetCaptureDelay(const int capture_id,
+                                    const unsigned int capture_delay_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, capture_delay_ms %u)", __FUNCTION__,
+               capture_id, capture_delay_ms);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+
+  if (vie_capture->SetCaptureDelay(capture_delay_ms) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::NumberOfCapabilities(
+    const char* unique_idUTF8,
+    const unsigned int unique_idUTF8Length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module!
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  return shared_data_->input_manager()->NumberOfCaptureCapabilities(
+      unique_idUTF8);
+}
+
+
+int ViECaptureImpl::GetCaptureCapability(const char* unique_idUTF8,
+                                         const unsigned int unique_idUTF8Length,
+                                         const unsigned int capability_number,
+                                         CaptureCapability& capability) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module!
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  if (shared_data_->input_manager()->GetCaptureCapability(
+          unique_idUTF8, capability_number, capability) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::ShowCaptureSettingsDialogBox(
+    const char* unique_idUTF8,
+    const unsigned int unique_idUTF8Length,
+    const char* dialog_title,
+    void* parent_window,
+    const unsigned int x,
+    const unsigned int y) {
+#if defined(WEBRTC_MAC_INTEL)
+  // TODO(mflodman) Move to capture module
+  // QTKit framework handles all capabilities and capture settings
+  // automatically (mandatory).
+  // Thus this function cannot be supported on the Mac platform.
+  shared_data_->SetLastError(kViECaptureDeviceMacQtkitNotSupported);
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s This API is not supported on Mac OS", __FUNCTION__,
+               shared_data_->instance_id());
+  return -1;
+#endif
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s capture_id (capture_device_name: %s)", __FUNCTION__,
+               unique_idUTF8);
+
+  return shared_data_->input_manager()->DisplayCaptureSettingsDialogBox(
+           unique_idUTF8, dialog_title,
+           parent_window, x, y);
+}
+
+int ViECaptureImpl::GetOrientation(const char* unique_idUTF8,
+                                   RotateCapturedFrame& orientation) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (capture_device_name: %s)", __FUNCTION__, unique_idUTF8);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  if (shared_data_->input_manager()->GetOrientation(
+      unique_idUTF8,
+      orientation) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+
+int ViECaptureImpl::EnableBrightnessAlarm(const int capture_id,
+                                          const bool enable) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->EnableBrightnessAlarm(enable) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::RegisterObserver(const int capture_id,
+                                     ViECaptureObserver& observer) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (vie_capture->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Observer already registered", __FUNCTION__);
+    shared_data_->SetLastError(kViECaptureObserverAlreadyRegistered);
+    return -1;
+  }
+  if (vie_capture->RegisterObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECaptureImpl::DeregisterObserver(const int capture_id) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViECaptureDeviceDoesNotExist);
+    return -1;
+  }
+  if (!vie_capture->IsObserverRegistered()) {
+    shared_data_->SetLastError(kViECaptureDeviceObserverNotRegistered);
+    return -1;
+  }
+
+  if (vie_capture->DeRegisterObserver() != 0) {
+    shared_data_->SetLastError(kViECaptureDeviceUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_capture_impl.h b/src/video_engine/vie_capture_impl.h
new file mode 100644
index 0000000..fd6589d
--- /dev/null
+++ b/src/video_engine/vie_capture_impl.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
+
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViECaptureImpl
+    : public ViECapture,
+      public ViERefCount {
+ public:
+  // Implements ViECapture.
+  virtual int Release();
+  virtual int NumberOfCaptureDevices();
+  virtual int GetCaptureDevice(unsigned int list_number, char* device_nameUTF8,
+                               const unsigned int device_nameUTF8Length,
+                               char* unique_idUTF8,
+                               const unsigned int unique_idUTF8Length);
+  virtual int AllocateCaptureDevice(const char* unique_idUTF8,
+                                    const unsigned int unique_idUTF8Length,
+                                    int& capture_id);
+  virtual int AllocateCaptureDevice(
+      VideoCaptureModule& capture_module, int& capture_id);  // NOLINT
+  virtual int AllocateExternalCaptureDevice(
+      int& capture_id, ViEExternalCapture *&external_capture);
+  virtual int ReleaseCaptureDevice(const int capture_id);
+
+  virtual int ConnectCaptureDevice(const int capture_id,
+                                   const int video_channel);
+  virtual int DisconnectCaptureDevice(const int video_channel);
+  virtual int StartCapture(
+      const int capture_id,
+      const CaptureCapability& capture_capability = CaptureCapability());
+  virtual int StopCapture(const int capture_id);
+  virtual int SetRotateCapturedFrames(const int capture_id,
+                                      const RotateCapturedFrame rotation);
+  virtual int SetCaptureDelay(const int capture_id,
+                              const unsigned int capture_delay_ms);
+  virtual int NumberOfCapabilities(const char* unique_idUTF8,
+                                   const unsigned int unique_idUTF8Length);
+  virtual int GetCaptureCapability(const char* unique_idUTF8,
+                                   const unsigned int unique_idUTF8Length,
+                                   const unsigned int capability_number,
+                                   CaptureCapability& capability);
+  virtual int ShowCaptureSettingsDialogBox(
+    const char* unique_idUTF8, const unsigned int unique_idUTF8Length,
+    const char* dialog_title, void* parent_window = NULL,
+    const unsigned int x = 200, const unsigned int y = 200);
+  virtual int GetOrientation(const char* unique_idUTF8,
+                             RotateCapturedFrame& orientation);
+  virtual int EnableBrightnessAlarm(const int capture_id, const bool enable);
+  virtual int RegisterObserver(const int capture_id,
+                               ViECaptureObserver& observer);
+  virtual int DeregisterObserver(const int capture_id);
+
+ protected:
+  explicit ViECaptureImpl(ViESharedData* shared_data);
+  virtual ~ViECaptureImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CAPTURE_IMPL_H_
diff --git a/src/video_engine/vie_capturer.cc b/src/video_engine/vie_capturer.cc
new file mode 100644
index 0000000..498e8ea
--- /dev/null
+++ b/src/video_engine/vie_capturer.cc
@@ -0,0 +1,901 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_capturer.h"
+
+#include "modules/interface/module_common_types.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+
+namespace webrtc {
+
+const int kThreadWaitTimeMs = 100;
+const int kMaxDeliverWaitTime = 500;
+
+ViECapturer::ViECapturer(int capture_id,
+                         int engine_id,
+                         ProcessThread& module_process_thread)
+    : ViEFrameProviderBase(capture_id, engine_id),
+      capture_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      deliver_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      capture_module_(NULL),
+      external_capture_module_(NULL),
+      module_process_thread_(module_process_thread),
+      capture_id_(capture_id),
+      capture_thread_(*ThreadWrapper::CreateThread(ViECaptureThreadFunction,
+                                                   this, kHighPriority,
+                                                   "ViECaptureThread")),
+      capture_event_(*EventWrapper::Create()),
+      deliver_event_(*EventWrapper::Create()),
+      effect_filter_(NULL),
+      image_proc_module_(NULL),
+      image_proc_module_ref_counter_(0),
+      deflicker_frame_stats_(NULL),
+      brightness_frame_stats_(NULL),
+      current_brightness_level_(Normal),
+      reported_brightness_level_(Normal),
+      denoising_enabled_(false),
+      observer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      observer_(NULL),
+      encoding_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      capture_encoder_(NULL),
+      encode_complete_callback_(NULL),
+      vie_encoder_(NULL),
+      vcm_(NULL),
+      decoder_initialized_(false) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, capture_id),
+               "ViECapturer::ViECapturer(capture_id: %d, engine_id: %d)",
+               capture_id, engine_id);
+  unsigned int t_id = 0;
+  if (capture_thread_.Start(t_id)) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id, capture_id),
+                 "%s: thread started: %u", __FUNCTION__, t_id);
+  } else {
+    assert(false);
+  }
+}
+
+ViECapturer::~ViECapturer() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ViECapturer::~ViECapturer() - capture_id: %d, engine_id: %d",
+               capture_id_, engine_id_);
+
+  // Stop the thread.
+  deliver_cs_->Enter();
+  capture_cs_->Enter();
+  capture_thread_.SetNotAlive();
+  capture_event_.Set();
+  capture_cs_->Leave();
+  deliver_cs_->Leave();
+
+  provider_cs_->Enter();
+  if (vie_encoder_) {
+    vie_encoder_->DeRegisterExternalEncoder(codec_.plType);
+  }
+  provider_cs_->Leave();
+
+  // Stop the camera input.
+  if (capture_module_) {
+    module_process_thread_.DeRegisterModule(capture_module_);
+    capture_module_->DeRegisterCaptureDataCallback();
+    capture_module_->Release();
+    capture_module_ = NULL;
+  }
+  if (capture_thread_.Stop()) {
+    // Thread stopped.
+    delete &capture_thread_;
+    delete &capture_event_;
+    delete &deliver_event_;
+  } else {
+    assert(false);
+    WEBRTC_TRACE(kTraceMemory, kTraceVideoRenderer,
+                 ViEId(engine_id_, capture_id_),
+                 "%s: Not able to stop capture thread for device %d, leaking",
+                 __FUNCTION__, capture_id_);
+  }
+
+  if (image_proc_module_) {
+    VideoProcessingModule::Destroy(image_proc_module_);
+  }
+  if (deflicker_frame_stats_) {
+    delete deflicker_frame_stats_;
+    deflicker_frame_stats_ = NULL;
+  }
+  delete brightness_frame_stats_;
+  if (vcm_) {
+    delete vcm_;
+  }
+}
+
+ViECapturer* ViECapturer::CreateViECapture(
+    int capture_id,
+    int engine_id,
+    VideoCaptureModule* capture_module,
+    ProcessThread& module_process_thread) {
+  ViECapturer* capture = new ViECapturer(capture_id, engine_id,
+                                         module_process_thread);
+  if (!capture || capture->Init(capture_module) != 0) {
+    delete capture;
+    capture = NULL;
+  }
+  return capture;
+}
+
+WebRtc_Word32 ViECapturer::Init(VideoCaptureModule* capture_module) {
+  assert(capture_module_ == NULL);
+  capture_module_ = capture_module;
+  capture_module_->RegisterCaptureDataCallback(*this);
+  capture_module_->AddRef();
+  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+ViECapturer* ViECapturer::CreateViECapture(
+    int capture_id,
+    int engine_id,
+    const char* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length,
+    ProcessThread& module_process_thread) {
+  ViECapturer* capture = new ViECapturer(capture_id, engine_id,
+                                         module_process_thread);
+  if (!capture ||
+      capture->Init(device_unique_idUTF8, device_unique_idUTF8Length) != 0) {
+    delete capture;
+    capture = NULL;
+  }
+  return capture;
+}
+
+WebRtc_Word32 ViECapturer::Init(
+    const char* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length) {
+  assert(capture_module_ == NULL);
+  if (device_unique_idUTF8 == NULL) {
+    capture_module_  = VideoCaptureFactory::Create(
+        ViEModuleId(engine_id_, capture_id_), external_capture_module_);
+  } else {
+    capture_module_ = VideoCaptureFactory::Create(
+        ViEModuleId(engine_id_, capture_id_), device_unique_idUTF8);
+  }
+  if (!capture_module_) {
+    return -1;
+  }
+  capture_module_->AddRef();
+  capture_module_->RegisterCaptureDataCallback(*this);
+  if (module_process_thread_.RegisterModule(capture_module_) != 0) {
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViECapturer::FrameCallbackChanged() {
+  if (Started() && !EncoderActive() && !CaptureCapabilityFixed()) {
+    // Reconfigure the camera if a new size is required and the capture device
+    // does not provide encoded frames.
+    int best_width;
+    int best_height;
+    int best_frame_rate;
+    VideoCaptureCapability capture_settings;
+    capture_module_->CaptureSettings(capture_settings);
+    GetBestFormat(&best_width, &best_height, &best_frame_rate);
+    if (best_width != 0 && best_height != 0 && best_frame_rate != 0) {
+      if (best_width != capture_settings.width ||
+          best_height != capture_settings.height ||
+          best_frame_rate != capture_settings.maxFPS ||
+          capture_settings.codecType != kVideoCodecUnknown) {
+        Stop();
+        Start(requested_capability_);
+      }
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::Start(const CaptureCapability& capture_capability) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  int width;
+  int height;
+  int frame_rate;
+  VideoCaptureCapability capability;
+  requested_capability_ = capture_capability;
+  if (EncoderActive()) {
+    CriticalSectionScoped cs(encoding_cs_.get());
+    capability.width = codec_.width;
+    capability.height = codec_.height;
+    capability.maxFPS = codec_.maxFramerate;
+    capability.codecType = codec_.codecType;
+    capability.rawType = kVideoI420;
+
+  } else if (!CaptureCapabilityFixed()) {
+    // Ask the observers for best size.
+    GetBestFormat(&width, &height, &frame_rate);
+    if (width == 0) {
+      width = kViECaptureDefaultWidth;
+    }
+    if (height == 0) {
+      height = kViECaptureDefaultHeight;
+    }
+    if (frame_rate == 0) {
+      frame_rate = kViECaptureDefaultFramerate;
+    }
+    capability.height = height;
+    capability.width = width;
+    capability.maxFPS = frame_rate;
+    capability.rawType = kVideoI420;
+    capability.codecType = kVideoCodecUnknown;
+  } else {
+    // Width, height and type specified with call to Start, not set by
+    // observers.
+    capability.width = requested_capability_.width;
+    capability.height = requested_capability_.height;
+    capability.maxFPS = requested_capability_.maxFPS;
+    capability.rawType = requested_capability_.rawType;
+    capability.interlaced = requested_capability_.interlaced;
+  }
+  return capture_module_->StartCapture(capability);
+}
+
+WebRtc_Word32 ViECapturer::Stop() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  requested_capability_ = CaptureCapability();
+  return capture_module_->StopCapture();
+}
+
+bool ViECapturer::Started() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_), "%s",
+               __FUNCTION__);
+  return capture_module_->CaptureStarted();
+}
+
+const char* ViECapturer::CurrentDeviceName() const {
+  return capture_module_->CurrentDeviceName();
+}
+
+WebRtc_Word32 ViECapturer::SetCaptureDelay(WebRtc_Word32 delay_ms) {
+  return capture_module_->SetCaptureDelay(delay_ms);
+}
+
+WebRtc_Word32 ViECapturer::SetRotateCapturedFrames(
+  const RotateCapturedFrame rotation) {
+  VideoCaptureRotation converted_rotation = kCameraRotate0;
+  switch (rotation) {
+    case RotateCapturedFrame_0:
+      converted_rotation = kCameraRotate0;
+      break;
+    case RotateCapturedFrame_90:
+      converted_rotation = kCameraRotate90;
+      break;
+    case RotateCapturedFrame_180:
+      converted_rotation = kCameraRotate180;
+      break;
+    case RotateCapturedFrame_270:
+      converted_rotation = kCameraRotate270;
+      break;
+  }
+  return capture_module_->SetCaptureRotation(converted_rotation);
+}
+
+int ViECapturer::IncomingFrame(unsigned char* video_frame,
+                               unsigned int video_frame_length,
+                               uint16_t width,
+                               uint16_t height,
+                               RawVideoType video_type,
+                               unsigned long long capture_time) {  // NOLINT
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ExternalCapture::IncomingFrame width %d, height %d, "
+               "capture_time %u", width, height, capture_time);
+
+  if (!external_capture_module_) {
+    return -1;
+  }
+  VideoCaptureCapability capability;
+  capability.width = width;
+  capability.height = height;
+  capability.rawType = video_type;
+  return external_capture_module_->IncomingFrame(video_frame,
+                                                 video_frame_length,
+                                                 capability, capture_time);
+}
+
+int ViECapturer::IncomingFrameI420(const ViEVideoFrameI420& video_frame,
+                                   unsigned long long capture_time) {  // NOLINT
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "ExternalCapture::IncomingFrame width %d, height %d, "
+               " capture_time %u", video_frame.width, video_frame.height,
+               capture_time);
+
+  if (!external_capture_module_) {
+    return -1;
+  }
+
+  VideoFrameI420 frame;
+  frame.width = video_frame.width;
+  frame.height = video_frame.height;
+  frame.y_plane = video_frame.y_plane;
+  frame.u_plane = video_frame.u_plane;
+  frame.v_plane = video_frame.v_plane;
+  frame.y_pitch = video_frame.y_pitch;
+  frame.u_pitch = video_frame.u_pitch;
+  frame.v_pitch = video_frame.v_pitch;
+
+  return external_capture_module_->IncomingFrameI420(frame, capture_time);
+}
+
+void ViECapturer::OnIncomingCapturedFrame(const WebRtc_Word32 capture_id,
+                                          VideoFrame& video_frame,
+                                          VideoCodecType codec_type) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  CriticalSectionScoped cs(capture_cs_.get());
+  if (codec_type != kVideoCodecUnknown) {
+    if (encoded_frame_.Length() != 0) {
+      // The last encoded frame has not been sent yet. Need to wait.
+      deliver_event_.Reset();
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s(capture_id: %d) Last encoded frame not yet delivered.",
+                   __FUNCTION__, capture_id);
+      capture_cs_->Leave();
+      // Wait for the coded frame to be sent before unblocking this.
+      deliver_event_.Wait(kMaxDeliverWaitTime);
+      assert(encoded_frame_.Length() == 0);
+      capture_cs_->Enter();
+    }
+    encoded_frame_.SwapFrame(video_frame);
+  } else {
+    captured_frame_.SwapFrame(video_frame);
+  }
+  capture_event_.Set();
+  return;
+}
+
+void ViECapturer::OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                        const WebRtc_Word32 delay) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_id: %d) delay %d", __FUNCTION__, capture_id_,
+               delay);
+
+  // Deliver the network delay to all registered callbacks.
+  ViEFrameProviderBase::SetFrameDelay(delay);
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (vie_encoder_) {
+    vie_encoder_->DelayChanged(id, delay);
+  }
+}
+
+WebRtc_Word32 ViECapturer::RegisterEffectFilter(
+    ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(deliver_cs_.get());
+
+  if (!effect_filter) {
+    if (!effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: no effect filter added for capture device %d",
+                   __FUNCTION__, capture_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s: deregister effect filter for device %d", __FUNCTION__,
+                 capture_id_);
+  } else {
+    if (effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: effect filter already added for capture device %d",
+                   __FUNCTION__, capture_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s: register effect filter for device %d", __FUNCTION__,
+                 capture_id_);
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::IncImageProcRefCount() {
+  if (!image_proc_module_) {
+    assert(image_proc_module_ref_counter_ == 0);
+    image_proc_module_ = VideoProcessingModule::Create(
+        ViEModuleId(engine_id_, capture_id_));
+    if (!image_proc_module_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: could not create video processing module",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+  image_proc_module_ref_counter_++;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::DecImageProcRefCount() {
+  image_proc_module_ref_counter_--;
+  if (image_proc_module_ref_counter_ == 0) {
+    // Destroy module.
+    VideoProcessingModule::Destroy(image_proc_module_);
+    image_proc_module_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableDenoising(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (denoising_enabled_) {
+      // Already enabled, nothing need to be done.
+      return 0;
+    }
+    denoising_enabled_ = true;
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+  } else {
+    if (denoising_enabled_ == false) {
+      // Already disabled, nothing need to be done.
+      return 0;
+    }
+    denoising_enabled_ = false;
+    DecImageProcRefCount();
+  }
+
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableDeflickering(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (deflicker_frame_stats_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering already enabled", __FUNCTION__);
+      return -1;
+    }
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+    deflicker_frame_stats_ = new VideoProcessingModule::FrameStats();
+  } else {
+    if (deflicker_frame_stats_ == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering not enabled", __FUNCTION__);
+      return -1;
+    }
+    DecImageProcRefCount();
+    delete deflicker_frame_stats_;
+    deflicker_frame_stats_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::EnableBrightnessAlarm(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d, enable: %d)", __FUNCTION__,
+               capture_id_, enable);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (enable) {
+    if (brightness_frame_stats_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: BrightnessAlarm already enabled", __FUNCTION__);
+      return -1;
+    }
+    if (IncImageProcRefCount() != 0) {
+      return -1;
+    }
+    brightness_frame_stats_ = new VideoProcessingModule::FrameStats();
+  } else {
+    DecImageProcRefCount();
+    if (brightness_frame_stats_ == NULL) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: deflickering not enabled", __FUNCTION__);
+      return -1;
+    }
+    delete brightness_frame_stats_;
+    brightness_frame_stats_ = NULL;
+  }
+  return 0;
+}
+
+bool ViECapturer::ViECaptureThreadFunction(void* obj) {
+  return static_cast<ViECapturer*>(obj)->ViECaptureProcess();
+}
+
+bool ViECapturer::ViECaptureProcess() {
+  if (capture_event_.Wait(kThreadWaitTimeMs) == kEventSignaled) {
+    deliver_cs_->Enter();
+    if (captured_frame_.Length() > 0) {
+      // New I420 frame.
+      capture_cs_->Enter();
+      deliver_frame_.SwapFrame(captured_frame_);
+      captured_frame_.SetLength(0);
+      capture_cs_->Leave();
+      DeliverI420Frame(&deliver_frame_);
+    }
+    if (encoded_frame_.Length() > 0) {
+      capture_cs_->Enter();
+      deliver_frame_.SwapFrame(encoded_frame_);
+      encoded_frame_.SetLength(0);
+      deliver_event_.Set();
+      capture_cs_->Leave();
+      DeliverCodedFrame(&deliver_frame_);
+    }
+    deliver_cs_->Leave();
+    if (current_brightness_level_ != reported_brightness_level_) {
+      CriticalSectionScoped cs(observer_cs_.get());
+      if (observer_) {
+        observer_->BrightnessAlarm(id_, current_brightness_level_);
+        reported_brightness_level_ = current_brightness_level_;
+      }
+    }
+  }
+  // We're done!
+  return true;
+}
+
+void ViECapturer::DeliverI420Frame(VideoFrame* video_frame) {
+  // Apply image enhancement and effect filter.
+  if (deflicker_frame_stats_) {
+    if (image_proc_module_->GetFrameStats(*deflicker_frame_stats_,
+                                          *video_frame) == 0) {
+      image_proc_module_->Deflickering(*video_frame, *deflicker_frame_stats_);
+    } else {
+      WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+                   "%s: could not get frame stats for captured frame",
+                   __FUNCTION__);
+    }
+  }
+  if (denoising_enabled_) {
+    image_proc_module_->Denoising(*video_frame);
+  }
+  if (brightness_frame_stats_) {
+    if (image_proc_module_->GetFrameStats(*brightness_frame_stats_,
+                                          *video_frame) == 0) {
+      WebRtc_Word32 brightness = image_proc_module_->BrightnessDetection(
+          *video_frame, *brightness_frame_stats_);
+
+      switch (brightness) {
+      case VideoProcessingModule::kNoWarning:
+        current_brightness_level_ = Normal;
+        break;
+      case VideoProcessingModule::kDarkWarning:
+        current_brightness_level_ = Dark;
+        break;
+      case VideoProcessingModule::kBrightWarning:
+        current_brightness_level_ = Bright;
+        break;
+      default:
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                     "%s: Brightness detection failed", __FUNCTION__);
+      }
+    }
+  }
+  if (effect_filter_) {
+    effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(),
+                              video_frame->TimeStamp(), video_frame->Width(),
+                              video_frame->Height());
+  }
+  // Deliver the captured frame to all observers (channels, renderer or file).
+  ViEFrameProviderBase::DeliverFrame(video_frame);
+}
+
+void ViECapturer::DeliverCodedFrame(VideoFrame* video_frame) {
+  if (encode_complete_callback_) {
+    EncodedImage encoded_image(video_frame->Buffer(), video_frame->Length(),
+                               video_frame->Size());
+    encoded_image._timeStamp =
+        90 * static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
+    encode_complete_callback_->Encoded(encoded_image);
+  }
+
+  if (NumberOfRegisteredFrameCallbacks() > 0 && decoder_initialized_) {
+    video_frame->Swap(decode_buffer_.payloadData, decode_buffer_.bufferSize,
+                     decode_buffer_.payloadSize);
+    decode_buffer_.encodedHeight = video_frame->Height();
+    decode_buffer_.encodedWidth = video_frame->Width();
+    decode_buffer_.renderTimeMs = video_frame->RenderTimeMs();
+    const int kMsToRtpTimestamp = 90;
+    decode_buffer_.timeStamp = kMsToRtpTimestamp *
+        static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
+    decode_buffer_.payloadType = codec_.plType;
+    vcm_->DecodeFromStorage(decode_buffer_);
+  }
+}
+
+int ViECapturer::DeregisterFrameCallback(
+    const ViEFrameCallback* callbackObject) {
+  provider_cs_->Enter();
+  if (callbackObject == vie_encoder_) {
+    // Don't use this camera as encoder anymore. Need to tell the ViEEncoder.
+    ViEEncoder* vie_encoder = NULL;
+    vie_encoder = vie_encoder_;
+    vie_encoder_ = NULL;
+    provider_cs_->Leave();
+
+    // Need to take this here in order to avoid deadlock with VCM. The reason is
+    // that VCM will call ::Release and a deadlock can occur.
+    deliver_cs_->Enter();
+    vie_encoder->DeRegisterExternalEncoder(codec_.plType);
+    deliver_cs_->Leave();
+    return 0;
+  }
+  provider_cs_->Leave();
+  return ViEFrameProviderBase::DeregisterFrameCallback(callbackObject);
+}
+
+bool ViECapturer::IsFrameCallbackRegistered(
+    const ViEFrameCallback* callbackObject) {
+  CriticalSectionScoped cs(provider_cs_.get());
+  if (callbackObject == vie_encoder_) {
+    return true;
+  }
+  return ViEFrameProviderBase::IsFrameCallbackRegistered(callbackObject);
+}
+
+WebRtc_Word32 ViECapturer::PreEncodeToViEEncoder(const VideoCodec& codec,
+                                                 ViEEncoder& vie_encoder,
+                                                 WebRtc_Word32 vie_encoder_id) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  if (vie_encoder_ && &vie_encoder != vie_encoder_) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s(capture_device_id: %d Capture device already encoding)",
+                 __FUNCTION__, capture_id_);
+    return -1;
+  }
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder =
+    capture_module_->GetEncodeInterface(codec);
+  if (!capture_encoder) {
+    // Encoding not supported?
+    return -1;
+  }
+  capture_encoder_ = capture_encoder;
+
+  // Create VCM module used for decoding frames if needed.
+  if (!vcm_) {
+    vcm_ = VideoCodingModule::Create(capture_id_);
+  }
+
+  if (vie_encoder.RegisterExternalEncoder(this, codec.plType) != 0) {
+    return -1;
+  }
+  if (vie_encoder.SetEncoder(codec) != 0) {
+    vie_encoder.DeRegisterExternalEncoder(codec.plType);
+    return -1;
+  }
+
+  // Make sure the encoder is not an I420 observer.
+  ViEFrameProviderBase::DeregisterFrameCallback(&vie_encoder);
+  // Store the vie_encoder using this capture device.
+  vie_encoder_ = &vie_encoder;
+  vie_encoder_id_ = vie_encoder_id;
+  memcpy(&codec_, &codec, sizeof(VideoCodec));
+  return 0;
+}
+
+bool ViECapturer::EncoderActive() {
+  return vie_encoder_ != NULL;
+}
+
+bool ViECapturer::CaptureCapabilityFixed() {
+  return requested_capability_.width != 0 &&
+      requested_capability_.height != 0 &&
+      requested_capability_.maxFPS != 0;
+}
+
+WebRtc_Word32 ViECapturer::Version(char* version,
+                                   WebRtc_Word32 length) const {
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::InitEncode(const VideoCodec* codec_settings,
+                                      WebRtc_Word32 number_of_cores,
+                                      WebRtc_UWord32 max_payload_size) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_ || !codec_settings) {
+    return WEBRTC_VIDEO_CODEC_ERROR;
+  }
+
+  if (vcm_) {
+    // Initialize VCM to be able to decode frames if needed.
+    if (vcm_->InitializeReceiver() == 0) {
+      if (vcm_->RegisterReceiveCallback(this) == 0) {
+        if (vcm_->RegisterReceiveCodec(codec_settings, number_of_cores,
+                                       false) == 0) {
+          decoder_initialized_ = true;
+          WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+                       "%s(capture_device_id: %d) VCM Decoder initialized",
+                       __FUNCTION__, capture_id_);
+        }
+      }
+    }
+  }
+  return capture_encoder_->ConfigureEncoder(*codec_settings, max_payload_size);
+}
+
+WebRtc_Word32 ViECapturer::Encode(const VideoFrame& input_image,
+                                  const CodecSpecificInfo* codec_specific_info,
+                                  const VideoFrameType frame_type) {
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  if (frame_type == kKeyFrame) {
+    return capture_encoder_->EncodeFrameType(kVideoFrameKey);
+  }
+  if (frame_type == kSkipFrame) {
+    return capture_encoder_->EncodeFrameType(kFrameEmpty);
+  }
+  return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
+}
+
+WebRtc_Word32 ViECapturer::RegisterEncodeCompleteCallback(
+    EncodedImageCallback* callback) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(deliver_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  encode_complete_callback_ = callback;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::Release() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  {
+    CriticalSectionScoped cs(deliver_cs_.get());
+    encode_complete_callback_ = NULL;
+  }
+
+  {
+    CriticalSectionScoped cs(encoding_cs_.get());
+
+    decoder_initialized_ = false;
+    codec_.codecType = kVideoCodecUnknown;
+    // Reset the camera to output I420.
+    capture_encoder_->ConfigureEncoder(codec_, 0);
+
+    if (vie_encoder_) {
+      // Need to add the encoder as an observer of I420.
+      ViEFrameProviderBase::RegisterFrameCallback(vie_encoder_id_,
+                                                  vie_encoder_);
+    }
+    vie_encoder_ = NULL;
+  }
+  return 0;
+}
+
+// Should reset the capture device to the state it was in after the InitEncode
+// function. Current implementation do nothing.
+WebRtc_Word32 ViECapturer::Reset() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::SetChannelParameters(WebRtc_UWord32 packet_loss,
+                                                int rtt) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  return capture_encoder_->SetChannelParameters(packet_loss, rtt);
+}
+
+WebRtc_Word32 ViECapturer::SetRates(WebRtc_UWord32 new_bit_rate,
+                                    WebRtc_UWord32 frame_rate) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "%s(capture_device_id: %d)", __FUNCTION__, capture_id_);
+
+  CriticalSectionScoped cs(encoding_cs_.get());
+  if (!capture_encoder_) {
+    return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
+  }
+  return capture_encoder_->SetRates(new_bit_rate, frame_rate);
+}
+
+WebRtc_Word32 ViECapturer::FrameToRender(VideoFrame& video_frame) {  // NOLINT
+  deliver_cs_->Enter();
+  DeliverI420Frame(&video_frame);
+  deliver_cs_->Leave();
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::RegisterObserver(ViECaptureObserver* observer) {
+  if (observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s Observer already registered", __FUNCTION__, capture_id_);
+    return -1;
+  }
+  if (capture_module_->RegisterCaptureCallback(*this) != 0) {
+    return -1;
+  }
+  capture_module_->EnableFrameRateCallback(true);
+  capture_module_->EnableNoPictureAlarm(true);
+  observer_ = observer;
+  return 0;
+}
+
+WebRtc_Word32 ViECapturer::DeRegisterObserver() {
+  CriticalSectionScoped cs(observer_cs_.get());
+  if (!observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, capture_id_),
+                 "%s No observer registered", __FUNCTION__, capture_id_);
+    return -1;
+  }
+  capture_module_->EnableFrameRateCallback(false);
+  capture_module_->EnableNoPictureAlarm(false);
+  capture_module_->DeRegisterCaptureCallback();
+  observer_ = NULL;
+  return 0;
+}
+
+bool ViECapturer::IsObserverRegistered() {
+  CriticalSectionScoped cs(observer_cs_.get());
+  return observer_ != NULL;
+}
+
+void ViECapturer::OnCaptureFrameRate(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 frame_rate) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "OnCaptureFrameRate %d", frame_rate);
+
+  CriticalSectionScoped cs(observer_cs_.get());
+  observer_->CapturedFrameRate(id_, (WebRtc_UWord8) frame_rate);
+}
+
+void ViECapturer::OnNoPictureAlarm(const WebRtc_Word32 id,
+                                   const VideoCaptureAlarm alarm) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, capture_id_),
+               "OnNoPictureAlarm %d", alarm);
+
+  CriticalSectionScoped cs(observer_cs_.get());
+  CaptureAlarm vie_alarm = (alarm == Raised) ? AlarmRaised : AlarmCleared;
+  observer_->NoPictureAlarm(id, vie_alarm);
+}
+
+WebRtc_Word32 ViECapturer::SetCaptureDeviceImage(
+    const VideoFrame& capture_device_image) {
+  return capture_module_->StartSendImage(capture_device_image, 10);
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_capturer.h b/src/video_engine/vie_capturer.h
new file mode 100644
index 0000000..6b054fe
--- /dev/null
+++ b/src/video_engine/vie_capturer.h
@@ -0,0 +1,224 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
+
+#include "common_types.h"  // NOLINT
+#include "engine_configurations.h"  // NOLINT
+#include "modules/video_capture/main/interface/video_capture.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h" // NOLINT
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class EventWrapper;
+class ProcessThread;
+class ThreadWrapper;
+class ViEEffectFilter;
+class ViEEncoder;
+struct ViEPicture;
+
+class ViECapturer
+    : public ViEFrameProviderBase,
+      public ViEExternalCapture,
+      protected VCMReceiveCallback,
+      protected VideoCaptureDataCallback,
+      protected VideoCaptureFeedBack,
+      protected VideoEncoder {
+ public:
+  static ViECapturer* CreateViECapture(int capture_id,
+                                       int engine_id,
+                                       VideoCaptureModule* capture_module,
+                                       ProcessThread& module_process_thread);
+
+  static ViECapturer* CreateViECapture(
+      int capture_id,
+      int engine_id,
+      const char* device_unique_idUTF8,
+      WebRtc_UWord32 device_unique_idUTF8Length,
+      ProcessThread& module_process_thread);
+
+  ~ViECapturer();
+
+  // Implements ViEFrameProviderBase.
+  int FrameCallbackChanged();
+  virtual int DeregisterFrameCallback(const ViEFrameCallback* callbackObject);
+  bool IsFrameCallbackRegistered(const ViEFrameCallback* callbackObject);
+
+  // Implements ExternalCapture.
+  virtual int IncomingFrame(unsigned char* video_frame,
+                            unsigned int video_frame_length,
+                            uint16_t width,
+                            uint16_t height,
+                            RawVideoType video_type,
+                            unsigned long long capture_time = 0);  // NOLINT
+
+  virtual int IncomingFrameI420(const ViEVideoFrameI420& video_frame,
+                                unsigned long long capture_time = 0);  // NOLINT
+
+  // Use this capture device as encoder.
+  // Returns 0 if the codec is supported by this capture device.
+  virtual WebRtc_Word32 PreEncodeToViEEncoder(const VideoCodec& codec,
+                                              ViEEncoder& vie_encoder,
+                                              WebRtc_Word32 vie_encoder_id);
+
+  // Start/Stop.
+  WebRtc_Word32 Start(
+      const CaptureCapability& capture_capability = CaptureCapability());
+  WebRtc_Word32 Stop();
+  bool Started();
+
+  // Overrides the capture delay.
+  WebRtc_Word32 SetCaptureDelay(WebRtc_Word32 delay_ms);
+
+  // Sets rotation of the incoming captured frame.
+  WebRtc_Word32 SetRotateCapturedFrames(const RotateCapturedFrame rotation);
+
+  // Effect filter.
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+  WebRtc_Word32 EnableDenoising(bool enable);
+  WebRtc_Word32 EnableDeflickering(bool enable);
+  WebRtc_Word32 EnableBrightnessAlarm(bool enable);
+
+  // Statistics observer.
+  WebRtc_Word32 RegisterObserver(ViECaptureObserver* observer);
+  WebRtc_Word32 DeRegisterObserver();
+  bool IsObserverRegistered();
+
+  // Information.
+  const char* CurrentDeviceName() const;
+
+  // Set device image.
+  WebRtc_Word32 SetCaptureDeviceImage(const VideoFrame& capture_device_image);
+
+ protected:
+  ViECapturer(int capture_id,
+              int engine_id,
+              ProcessThread& module_process_thread);
+
+  WebRtc_Word32 Init(VideoCaptureModule* capture_module);
+  WebRtc_Word32 Init(const char* device_unique_idUTF8,
+                     const WebRtc_UWord32 device_unique_idUTF8Length);
+
+  // Implements VideoCaptureDataCallback.
+  virtual void OnIncomingCapturedFrame(const WebRtc_Word32 id,
+                                       VideoFrame& video_frame,
+                                       VideoCodecType codec_type);
+  virtual void OnCaptureDelayChanged(const WebRtc_Word32 id,
+                                     const WebRtc_Word32 delay);
+
+  bool EncoderActive();
+
+  // Returns true if the capture capability has been set in |StartCapture|
+  // function and may not be changed.
+  bool CaptureCapabilityFixed();
+
+  // Help function used for keeping track of VideoImageProcesingModule.
+  // Creates the module if it is needed, returns 0 on success and guarantees
+  // that the image proc module exist.
+  WebRtc_Word32 IncImageProcRefCount();
+  WebRtc_Word32 DecImageProcRefCount();
+
+  // Implements VideoEncoder.
+  virtual WebRtc_Word32 Version(char* version,
+                                WebRtc_Word32 length) const;
+  virtual WebRtc_Word32 InitEncode(const VideoCodec* codec_settings,
+                                   WebRtc_Word32 number_of_cores,
+                                   WebRtc_UWord32 max_payload_size);
+  virtual WebRtc_Word32 Encode(const VideoFrame& input_image,
+                               const CodecSpecificInfo* codec_specific_info,
+                               const VideoFrameType frame_type);
+  virtual WebRtc_Word32 RegisterEncodeCompleteCallback(
+      EncodedImageCallback* callback);
+  virtual WebRtc_Word32 Release();
+  virtual WebRtc_Word32 Reset();
+  virtual WebRtc_Word32 SetChannelParameters(WebRtc_UWord32 packet_loss,
+                                             int rtt);
+  virtual WebRtc_Word32 SetRates(WebRtc_UWord32 new_bit_rate,
+                                 WebRtc_UWord32 frame_rate);
+
+  // Implements  VCMReceiveCallback.
+  // TODO(mflodman) Change input argument to pointer.
+  virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame);  // NOLINT
+
+  // Implements VideoCaptureFeedBack
+  virtual void OnCaptureFrameRate(const WebRtc_Word32 id,
+                                  const WebRtc_UWord32 frame_rate);
+  virtual void OnNoPictureAlarm(const WebRtc_Word32 id,
+                                const VideoCaptureAlarm alarm);
+
+  // Thread functions for deliver captured frames to receivers.
+  static bool ViECaptureThreadFunction(void* obj);
+  bool ViECaptureProcess();
+
+  void DeliverI420Frame(VideoFrame* video_frame);
+  void DeliverCodedFrame(VideoFrame* video_frame);
+
+ private:
+  // Never take capture_cs_ before deliver_cs_!
+  scoped_ptr<CriticalSectionWrapper> capture_cs_;
+  scoped_ptr<CriticalSectionWrapper> deliver_cs_;
+  VideoCaptureModule* capture_module_;
+  VideoCaptureExternal* external_capture_module_;
+  ProcessThread& module_process_thread_;
+  const int capture_id_;
+
+  // Capture thread.
+  ThreadWrapper& capture_thread_;
+  EventWrapper& capture_event_;
+  EventWrapper& deliver_event_;
+
+  VideoFrame captured_frame_;
+  VideoFrame deliver_frame_;
+  VideoFrame encoded_frame_;
+
+  // Image processing.
+  ViEEffectFilter* effect_filter_;
+  VideoProcessingModule* image_proc_module_;
+  int image_proc_module_ref_counter_;
+  VideoProcessingModule::FrameStats* deflicker_frame_stats_;
+  VideoProcessingModule::FrameStats* brightness_frame_stats_;
+  Brightness current_brightness_level_;
+  Brightness reported_brightness_level_;
+  bool denoising_enabled_;
+
+  // Statistics observer.
+  scoped_ptr<CriticalSectionWrapper> observer_cs_;
+  ViECaptureObserver* observer_;
+
+  // Encoding using encoding capable cameras.
+  scoped_ptr<CriticalSectionWrapper> encoding_cs_;
+  VideoCaptureModule::VideoCaptureEncodeInterface* capture_encoder_;
+  EncodedImageCallback* encode_complete_callback_;
+  VideoCodec codec_;
+  // The ViEEncoder we are encoding for.
+  ViEEncoder* vie_encoder_;
+  // ViEEncoder id we are encoding for.
+  WebRtc_Word32 vie_encoder_id_;
+  // Used for decoding preencoded frames.
+  VideoCodingModule* vcm_;
+  EncodedVideoData decode_buffer_;
+  bool decoder_initialized_;
+  CaptureCapability requested_capability_;
+
+  VideoFrame capture_device_image_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CAPTURER_H_
diff --git a/src/video_engine/vie_channel.cc b/src/video_engine/vie_channel.cc
new file mode 100644
index 0000000..f909043
--- /dev/null
+++ b/src/video_engine/vie_channel.cc
@@ -0,0 +1,2442 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_channel.h"
+
+#include <algorithm>
+#include <vector>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/udp_transport/interface/udp_transport.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+const int kMaxDecodeWaitTimeMs = 50;
+const int kInvalidRtpExtensionId = 0;
+
+ViEChannel::ViEChannel(WebRtc_Word32 channel_id,
+                       WebRtc_Word32 engine_id,
+                       WebRtc_UWord32 number_of_cores,
+                       ProcessThread& module_process_thread,
+                       RtcpIntraFrameObserver* intra_frame_observer,
+                       RtcpBandwidthObserver* bandwidth_observer,
+                       RemoteBitrateEstimator* remote_bitrate_estimator,
+                       RtpRtcp* default_rtp_rtcp,
+                       bool sender)
+    : ViEFrameProviderBase(channel_id, engine_id),
+      channel_id_(channel_id),
+      engine_id_(engine_id),
+      number_of_cores_(number_of_cores),
+      num_socket_threads_(kViESocketThreads),
+      callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      rtp_rtcp_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      default_rtp_rtcp_(default_rtp_rtcp),
+      rtp_rtcp_(NULL),
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+      socket_transport_(*UdpTransport::Create(
+          ViEModuleId(engine_id, channel_id), num_socket_threads_)),
+#endif
+      vcm_(*VideoCodingModule::Create(ViEModuleId(engine_id, channel_id))),
+      vie_receiver_(channel_id, &vcm_),
+      vie_sender_(channel_id),
+      vie_sync_(channel_id, &vcm_),
+      module_process_thread_(module_process_thread),
+      codec_observer_(NULL),
+      do_key_frame_callbackRequest_(false),
+      rtp_observer_(NULL),
+      rtcp_observer_(NULL),
+      networkObserver_(NULL),
+      intra_frame_observer_(intra_frame_observer),
+      bandwidth_observer_(bandwidth_observer),
+      rtp_packet_timeout_(false),
+      send_timestamp_extension_id_(kInvalidRtpExtensionId),
+      using_packet_spread_(false),
+      external_transport_(NULL),
+      decoder_reset_(true),
+      wait_for_key_frame_(false),
+      decode_thread_(NULL),
+      external_encryption_(NULL),
+      effect_filter_(NULL),
+      color_enhancement_(false),
+      vcm_rttreported_(TickTime::Now()),
+      file_recorder_(channel_id),
+      mtu_(0),
+      sender_(sender) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id, channel_id),
+               "ViEChannel::ViEChannel(channel_id: %d, engine_id: %d)",
+               channel_id, engine_id);
+
+  RtpRtcp::Configuration configuration;
+  configuration.id = ViEModuleId(engine_id, channel_id);
+  configuration.audio = false;
+  configuration.default_module = default_rtp_rtcp;
+  configuration.incoming_data = &vie_receiver_;
+  configuration.incoming_messages = this;
+  configuration.outgoing_transport = &vie_sender_;
+  configuration.rtcp_feedback = this;
+  configuration.intra_frame_callback = intra_frame_observer;
+  configuration.bandwidth_callback = bandwidth_observer;
+  configuration.remote_bitrate_estimator = remote_bitrate_estimator;
+
+  rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
+  vie_receiver_.SetRtpRtcpModule(rtp_rtcp_.get());
+}
+
+WebRtc_Word32 ViEChannel::Init() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: channel_id: %d, engine_id: %d)", __FUNCTION__, channel_id_,
+               engine_id_);
+
+  // RTP/RTCP initialization.
+  if (rtp_rtcp_->SetSendingMediaStatus(false) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetSendingMediaStatus failure", __FUNCTION__);
+    return -1;
+  }
+  if (module_process_thread_.RegisterModule(rtp_rtcp_.get()) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::RegisterModule failure", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_->SetKeyFrameRequestMethod(kKeyFrameReqFirRtp) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetKeyFrameRequestMethod failure", __FUNCTION__);
+  }
+  if (rtp_rtcp_->SetRTCPStatus(kRtcpCompound) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
+  }
+
+  // VCM initialization
+  if (vcm_.InitializeReceiver() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: VCM::InitializeReceiver failure", __FUNCTION__);
+    return -1;
+  }
+  if (vcm_.RegisterReceiveCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterReceiveCallback failure", __FUNCTION__);
+    return -1;
+  }
+  if (vcm_.RegisterFrameTypeCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterFrameTypeCallback failure", __FUNCTION__);
+  }
+  if (vcm_.RegisterReceiveStatisticsCallback(this) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterReceiveStatisticsCallback failure",
+                 __FUNCTION__);
+  }
+  if (vcm_.SetRenderDelay(kViEDefaultRenderDelayMs) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::SetRenderDelay failure", __FUNCTION__);
+  }
+  if (module_process_thread_.RegisterModule(&vcm_) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: VCM::RegisterModule(vcm) failure", __FUNCTION__);
+    return -1;
+  }
+#ifdef VIDEOCODEC_VP8
+  VideoCodec video_codec;
+  if (vcm_.Codec(kVideoCodecVP8, &video_codec) == VCM_OK) {
+    rtp_rtcp_->RegisterSendPayload(video_codec);
+    rtp_rtcp_->RegisterReceivePayload(video_codec);
+    vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_);
+    vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                           rtp_rtcp_->MaxDataPayloadLength());
+  } else {
+    assert(false);
+  }
+#endif
+
+  return 0;
+}
+
+ViEChannel::~ViEChannel() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "ViEChannel Destructor, channel_id: %d, engine_id: %d",
+               channel_id_, engine_id_);
+
+  // Make sure we don't get more callbacks from the RTP module.
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  socket_transport_.StopReceiving();
+#endif
+  module_process_thread_.DeRegisterModule(rtp_rtcp_.get());
+  module_process_thread_.DeRegisterModule(&vcm_);
+  module_process_thread_.DeRegisterModule(&vie_sync_);
+  while (simulcast_rtp_rtcp_.size() > 0) {
+    std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+    RtpRtcp* rtp_rtcp = *it;
+    module_process_thread_.DeRegisterModule(rtp_rtcp);
+    delete rtp_rtcp;
+    simulcast_rtp_rtcp_.erase(it);
+  }
+  if (decode_thread_) {
+    StopDecodeThread();
+  }
+  // Release modules.
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  UdpTransport::Destroy(&socket_transport_);
+#endif
+  VideoCodingModule::Destroy(&vcm_);
+}
+
+WebRtc_Word32 ViEChannel::SetSendCodec(const VideoCodec& video_codec,
+                                       bool new_stream) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: codec_type: %d", __FUNCTION__, video_codec.codecType);
+
+  if (!sender_) {
+    return 0;
+  }
+  if (video_codec.codecType == kVideoCodecRED ||
+      video_codec.codecType == kVideoCodecULPFEC) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: codec_type: %d is not a valid send codec.", __FUNCTION__,
+                 video_codec.codecType);
+    return -1;
+  }
+  if (kMaxSimulcastStreams < video_codec.numberOfSimulcastStreams) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Too many simulcast streams", __FUNCTION__);
+    return -1;
+  }
+  // Update the RTP module with the settings.
+  // Stop and Start the RTP module -> trigger new SSRC, if an SSRC hasn't been
+  // set explicitly.
+  bool restart_rtp = false;
+  if (rtp_rtcp_->Sending() && new_stream) {
+    restart_rtp = true;
+    rtp_rtcp_->SetSendingStatus(false);
+  }
+  NACKMethod nack_method = rtp_rtcp_->NACK();
+
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+
+  if (video_codec.numberOfSimulcastStreams > 0) {
+    // Set correct bitrate to base layer.
+    // Create our simulcast RTP modules.
+
+    for (int i = simulcast_rtp_rtcp_.size();
+         i < video_codec.numberOfSimulcastStreams - 1;
+         i++) {
+      RtpRtcp::Configuration configuration;
+      configuration.id = ViEModuleId(engine_id_, channel_id_);
+      configuration.audio = false;  // Video.
+      configuration.default_module = default_rtp_rtcp_;
+      configuration.outgoing_transport = &vie_sender_;
+      configuration.intra_frame_callback = intra_frame_observer_;
+      configuration.bandwidth_callback = bandwidth_observer_.get();
+
+      RtpRtcp* rtp_rtcp = RtpRtcp::CreateRtpRtcp(configuration);
+
+      // Silently ignore error.
+      module_process_thread_.RegisterModule(rtp_rtcp);
+      if (rtp_rtcp->SetRTCPStatus(rtp_rtcp_->RTCP()) != 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: RTP::SetRTCPStatus failure", __FUNCTION__);
+      }
+      if (nack_method != kNackOff) {
+        rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
+        rtp_rtcp->SetNACKStatus(nack_method);
+      }
+      rtp_rtcp->SetSendingMediaStatus(rtp_rtcp_->SendingMedia());
+      simulcast_rtp_rtcp_.push_back(rtp_rtcp);
+    }
+    // Remove last in list if we have too many.
+    std::list<RtpRtcp*> modules_to_delete;
+    for (int j = simulcast_rtp_rtcp_.size();
+         j > (video_codec.numberOfSimulcastStreams - 1);
+         j--) {
+      RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
+      module_process_thread_.DeRegisterModule(rtp_rtcp);
+      simulcast_rtp_rtcp_.pop_back();
+      // We need to deregister the module before deleting.
+      modules_to_delete.push_back(rtp_rtcp);
+    }
+    WebRtc_UWord8 idx = 0;
+    // Configure all simulcast modules.
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      idx++;
+      RtpRtcp* rtp_rtcp = *it;
+      rtp_rtcp->DeRegisterSendPayload(video_codec.plType);
+      if (rtp_rtcp->RegisterSendPayload(video_codec) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: could not register payload type", __FUNCTION__);
+        return -1;
+      }
+      if (mtu_ != 0) {
+        rtp_rtcp->SetMaxTransferUnit(mtu_);
+      }
+      if (restart_rtp) {
+        rtp_rtcp->SetSendingStatus(true);
+      }
+      if (send_timestamp_extension_id_ != kInvalidRtpExtensionId) {
+        // Deregister in case the extension was previously enabled.
+        rtp_rtcp->DeregisterSendRtpHeaderExtension(
+            kRtpExtensionTransmissionTimeOffset);
+        if (rtp_rtcp->RegisterSendRtpHeaderExtension(
+            kRtpExtensionTransmissionTimeOffset,
+            send_timestamp_extension_id_) != 0) {
+          WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                       "%s: could not register transmission time extension",
+                       __FUNCTION__);
+        }
+      } else {
+        rtp_rtcp->DeregisterSendRtpHeaderExtension(
+            kRtpExtensionTransmissionTimeOffset);
+      }
+    }
+    // |RegisterSimulcastRtpRtcpModules| resets all old weak pointers and old
+    // modules can be deleted after this step.
+    vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
+    for (std::list<RtpRtcp*>::iterator it = modules_to_delete.begin();
+         it != modules_to_delete.end(); ++it) {
+      delete *it;
+    }
+    modules_to_delete.clear();
+  } else {
+    if (!simulcast_rtp_rtcp_.empty()) {
+      // Delete all simulcast rtp modules.
+      while (!simulcast_rtp_rtcp_.empty()) {
+        RtpRtcp* rtp_rtcp = simulcast_rtp_rtcp_.back();
+        module_process_thread_.DeRegisterModule(rtp_rtcp);
+        delete rtp_rtcp;
+        simulcast_rtp_rtcp_.pop_back();
+      }
+    }
+    // Clear any previous modules.
+    vie_receiver_.RegisterSimulcastRtpRtcpModules(simulcast_rtp_rtcp_);
+  }
+  // Enable this if H264 is available.
+  // This sets the wanted packetization mode.
+  // if (video_codec.plType == kVideoCodecH264) {
+  //   if (video_codec.codecSpecific.H264.packetization ==  kH264SingleMode) {
+  //     rtp_rtcp_->SetH264PacketizationMode(H264_SINGLE_NAL_MODE);
+  //   } else {
+  //     rtp_rtcp_->SetH264PacketizationMode(H264_NON_INTERLEAVED_MODE);
+  //   }
+  //   if (video_codec.codecSpecific.H264.configParametersSize > 0) {
+  //     rtp_rtcp_->SetH264SendModeNALU_PPS_SPS(true);
+  //   }
+  // }
+
+  // Don't log this error, no way to check in advance if this pl_type is
+  // registered or not...
+  rtp_rtcp_->DeRegisterSendPayload(video_codec.plType);
+  if (rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not register payload type", __FUNCTION__);
+    return -1;
+  }
+  if (restart_rtp) {
+    rtp_rtcp_->SetSendingStatus(true);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetReceiveCodec(const VideoCodec& video_codec) {
+  // We will not receive simulcast streams, so no need to handle that use case.
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_Word8 old_pltype = -1;
+  if (rtp_rtcp_->ReceivePayloadType(video_codec, &old_pltype) != -1) {
+    rtp_rtcp_->DeRegisterReceivePayload(old_pltype);
+  }
+
+  if (rtp_rtcp_->RegisterReceivePayload(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not register receive payload type", __FUNCTION__);
+    return -1;
+  }
+
+  if (video_codec.codecType != kVideoCodecRED &&
+      video_codec.codecType != kVideoCodecULPFEC) {
+    // Register codec type with VCM, but do not register RED or ULPFEC.
+    if (vcm_.RegisterReceiveCodec(&video_codec, number_of_cores_,
+                                  wait_for_key_frame_) != VCM_OK) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not register decoder", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetReceiveCodec(VideoCodec* video_codec) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (vcm_.ReceiveCodec(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get receive codec", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterCodecObserver(ViEDecoderObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (codec_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: already added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    codec_observer_ = observer;
+  } else {
+    if (!codec_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    codec_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
+                                                  VideoDecoder* decoder,
+                                                  bool decoder_render,
+                                                  WebRtc_Word32 render_delay) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_Word32 result = 0;
+  result = vcm_.RegisterExternalDecoder(decoder, pl_type, decoder_render);
+  if (decoder_render && result == 0) {
+    // Let VCM know how long before the actual render time the decoder needs
+    // to get a frame for decoding.
+    result = vcm_.SetRenderDelay(render_delay);
+  }
+  return result;
+}
+
+WebRtc_Word32 ViEChannel::DeRegisterExternalDecoder(
+    const WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s pl_type", __FUNCTION__, pl_type);
+
+  VideoCodec current_receive_codec;
+  WebRtc_Word32 result = 0;
+  result = vcm_.ReceiveCodec(&current_receive_codec);
+  if (vcm_.RegisterExternalDecoder(NULL, pl_type, false) != VCM_OK) {
+    return -1;
+  }
+
+  if (result == 0 && current_receive_codec.plType == pl_type) {
+    result = vcm_.RegisterReceiveCodec(&current_receive_codec, number_of_cores_,
+                                       wait_for_key_frame_);
+  }
+  return result;
+}
+
+WebRtc_Word32 ViEChannel::ReceiveCodecStatistics(
+    WebRtc_UWord32* num_key_frames, WebRtc_UWord32* num_delta_frames) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  VCMFrameCount received_frames;
+  if (vcm_.ReceivedFrameCount(received_frames) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get received frame information", __FUNCTION__);
+    return -1;
+  }
+  *num_key_frames = received_frames.numKeyFrames;
+  *num_delta_frames = received_frames.numDeltaFrames;
+  return 0;
+}
+
+WebRtc_UWord32 ViEChannel::DiscardedPackets() const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  return vcm_.DiscardedPackets();
+}
+
+WebRtc_Word32 ViEChannel::WaitForKeyFrame(bool wait) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(wait: %d)", __FUNCTION__, wait);
+  wait_for_key_frame_ = wait;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetSignalPacketLossStatus(bool enable,
+                                                    bool only_key_frames) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+  if (enable) {
+    if (only_key_frames) {
+      vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
+      if (vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, true) != VCM_OK) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s failed %d", __FUNCTION__, enable);
+        return -1;
+      }
+    } else {
+      vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+      if (vcm_.SetVideoProtection(kProtectionKeyOnLoss, true) != VCM_OK) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s failed %d", __FUNCTION__, enable);
+        return -1;
+      }
+    }
+  } else {
+    vcm_.SetVideoProtection(kProtectionKeyOnLoss, false);
+    vcm_.SetVideoProtection(kProtectionKeyOnKeyLoss, false);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetRTCPMode(const RTCPMethod rtcp_mode) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, rtcp_mode);
+
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetRTCPStatus(rtcp_mode);
+  }
+  return rtp_rtcp_->SetRTCPStatus(rtcp_mode);
+}
+
+WebRtc_Word32 ViEChannel::GetRTCPMode(RTCPMethod* rtcp_mode) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  *rtcp_mode = rtp_rtcp_->RTCP();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetNACKStatus(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  // Update the decoding VCM.
+  if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  if (enable) {
+    // Disable possible FEC.
+    SetFECStatus(false, 0, 0);
+  }
+  // Update the decoding VCM.
+  if (vcm_.SetVideoProtection(kProtectionNack, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  return ProcessNACKRequest(enable);
+}
+
+WebRtc_Word32 ViEChannel::ProcessNACKRequest(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  if (enable) {
+    // Turn on NACK.
+    NACKMethod nackMethod = kNackRtcp;
+    if (rtp_rtcp_->RTCP() == kRtcpOff) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not enable NACK, RTPC not on ", __FUNCTION__);
+      return -1;
+    }
+    if (rtp_rtcp_->SetNACKStatus(nackMethod) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not set NACK method %d", __FUNCTION__,
+                   nackMethod);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Using NACK method %d", __FUNCTION__, nackMethod);
+    rtp_rtcp_->SetStorePacketsStatus(true, kNackHistorySize);
+
+    vcm_.RegisterPacketRequestCallback(this);
+
+    CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      RtpRtcp* rtp_rtcp = *it;
+      rtp_rtcp->SetStorePacketsStatus(true, kNackHistorySize);
+      rtp_rtcp->SetNACKStatus(nackMethod);
+    }
+  } else {
+    CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end();
+         it++) {
+      RtpRtcp* rtp_rtcp = *it;
+      rtp_rtcp->SetStorePacketsStatus(false);
+      rtp_rtcp->SetNACKStatus(kNackOff);
+    }
+    rtp_rtcp_->SetStorePacketsStatus(false);
+    vcm_.RegisterPacketRequestCallback(NULL);
+    if (rtp_rtcp_->SetNACKStatus(kNackOff) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: Could not turn off NACK", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetFECStatus(const bool enable,
+                                       const unsigned char payload_typeRED,
+                                       const unsigned char payload_typeFEC) {
+  // Disable possible NACK.
+  if (enable) {
+    SetNACKStatus(false);
+  }
+
+  return ProcessFECRequest(enable, payload_typeRED, payload_typeFEC);
+}
+
+WebRtc_Word32 ViEChannel::ProcessFECRequest(
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d, payload_typeRED: %u, payload_typeFEC: %u)",
+               __FUNCTION__, enable, payload_typeRED, payload_typeFEC);
+
+  if (rtp_rtcp_->SetGenericFECStatus(enable, payload_typeRED,
+                                    payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not change FEC status to %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetGenericFECStatus(enable, payload_typeRED, payload_typeFEC);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetHybridNACKFECStatus(
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  // Update the decoding VCM with hybrid mode.
+  if (vcm_.SetVideoProtection(kProtectionNackFEC, enable) != VCM_OK) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set VCM NACK protection: %d", __FUNCTION__,
+                 enable);
+    return -1;
+  }
+
+  WebRtc_Word32 ret_val = 0;
+  ret_val = ProcessNACKRequest(enable);
+  if (ret_val < 0) {
+    return ret_val;
+  }
+  return ProcessFECRequest(enable, payload_typeRED, payload_typeFEC);
+}
+
+WebRtc_Word32 ViEChannel::SetKeyFrameRequestMethod(
+    const KeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, method);
+  return rtp_rtcp_->SetKeyFrameRequestMethod(method);
+}
+
+bool ViEChannel::EnableRemb(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "ViEChannel::EnableRemb: %d", enable);
+  if (rtp_rtcp_->SetREMBStatus(enable) != 0)
+    return false;
+  return true;
+}
+
+int ViEChannel::SetSendTimestampOffsetStatus(bool enable, int id) {
+  int error = 0;
+  if (enable) {
+    // Enable the extension, but disable possible old id to avoid errors.
+    send_timestamp_extension_id_ = id;
+    rtp_rtcp_->DeregisterSendRtpHeaderExtension(
+        kRtpExtensionTransmissionTimeOffset);
+    error = rtp_rtcp_->RegisterSendRtpHeaderExtension(
+        kRtpExtensionTransmissionTimeOffset, id);
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end(); it++) {
+      (*it)->DeregisterSendRtpHeaderExtension(
+          kRtpExtensionTransmissionTimeOffset);
+      error |= (*it)->RegisterSendRtpHeaderExtension(
+          kRtpExtensionTransmissionTimeOffset, id);
+    }
+  } else {
+    // Disable the extension.
+    send_timestamp_extension_id_ = kInvalidRtpExtensionId;
+    rtp_rtcp_->DeregisterSendRtpHeaderExtension(
+        kRtpExtensionTransmissionTimeOffset);
+    for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+         it != simulcast_rtp_rtcp_.end(); it++) {
+      (*it)->DeregisterSendRtpHeaderExtension(
+          kRtpExtensionTransmissionTimeOffset);
+    }
+  }
+  return error;
+}
+
+int ViEChannel::SetReceiveTimestampOffsetStatus(bool enable, int id) {
+  if (enable) {
+    return rtp_rtcp_->RegisterReceiveRtpHeaderExtension(
+        kRtpExtensionTransmissionTimeOffset, id);
+  } else {
+    return rtp_rtcp_->DeregisterReceiveRtpHeaderExtension(
+        kRtpExtensionTransmissionTimeOffset);
+  }
+}
+
+WebRtc_Word32 ViEChannel::EnableTMMBR(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, enable);
+  return rtp_rtcp_->SetTMMBRStatus(enable);
+}
+
+WebRtc_Word32 ViEChannel::EnableKeyFrameRequestCallback(const bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %d", __FUNCTION__, enable);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (enable && !codec_observer_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: No ViECodecObserver set", __FUNCTION__, enable);
+    return -1;
+  }
+  do_key_frame_callbackRequest_ = enable;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetSSRC(const WebRtc_UWord32 SSRC,
+                                  const StreamType usage,
+                                  const uint8_t simulcast_idx) {
+  WEBRTC_TRACE(webrtc::kTraceInfo,
+               webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(usage:%d, SSRC: 0x%x, idx:%u)",
+               __FUNCTION__, usage, SSRC, simulcast_idx);
+  if (simulcast_idx == 0) {
+    return rtp_rtcp_->SetSSRC(SSRC);
+  }
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  if (simulcast_idx > simulcast_rtp_rtcp_.size()) {
+      return -1;
+  }
+  std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+  for (int i = 1; i < simulcast_idx; ++i, ++it) {
+    if (it ==  simulcast_rtp_rtcp_.end()) {
+      return -1;
+    }
+  }
+  RtpRtcp* rtp_rtcp = *it;
+  if (usage == kViEStreamTypeRtx) {
+    return rtp_rtcp->SetRTXSendStatus(true, true, SSRC);
+  }
+  return rtp_rtcp->SetSSRC(SSRC);
+}
+
+WebRtc_Word32 ViEChannel::SetRemoteSSRCType(const StreamType usage,
+                                            const uint32_t SSRC) const {
+  WEBRTC_TRACE(webrtc::kTraceInfo,
+               webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(usage:%d, SSRC: 0x%x)",
+               __FUNCTION__, usage, SSRC);
+
+  return rtp_rtcp_->SetRTXReceiveStatus(true, SSRC);
+}
+
+WebRtc_Word32 ViEChannel::GetLocalSSRC(uint32_t* ssrc) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  *ssrc = rtp_rtcp_->SSRC();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteSSRC(uint32_t* ssrc) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  *ssrc = rtp_rtcp_->RemoteSSRC();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  uint32_t arrayCSRC[kRtpCsrcSize];
+  memset(arrayCSRC, 0, sizeof(arrayCSRC));
+
+  int num_csrcs = rtp_rtcp_->RemoteCSRCs(arrayCSRC);
+  if (num_csrcs > 0) {
+    memcpy(CSRCs, arrayCSRC, num_csrcs * sizeof(WebRtc_UWord32));
+    for (int idx = 0; idx < num_csrcs; idx++) {
+      WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "\tCSRC[%d] = %lu", idx, CSRCs[idx]);
+    }
+  } else {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: CSRC list is empty", __FUNCTION__);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetStartSequenceNumber(
+    WebRtc_UWord16 sequence_number) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already sending", __FUNCTION__);
+    return -1;
+  }
+  return rtp_rtcp_->SetSequenceNumber(sequence_number);
+}
+
+WebRtc_Word32 ViEChannel::SetRTCPCName(const char rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  if (rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already sending", __FUNCTION__);
+    return -1;
+  }
+  return rtp_rtcp_->SetCNAME(rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::GetRTCPCName(char rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  return rtp_rtcp_->CNAME(rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::GetRemoteRTCPCName(char rtcp_cname[]) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  WebRtc_UWord32 remoteSSRC = rtp_rtcp_->RemoteSSRC();
+  return rtp_rtcp_->RemoteCNAME(remoteSSRC, rtcp_cname);
+}
+
+WebRtc_Word32 ViEChannel::RegisterRtpObserver(ViERTPObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (rtp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    rtp_observer_ = observer;
+  } else {
+    if (!rtp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    rtp_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterRtcpObserver(ViERTCPObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (rtcp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    rtcp_observer_ = observer;
+  } else {
+    if (!rtcp_observer_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    rtcp_observer_ = NULL;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SendApplicationDefinedRTCPPacket(
+    const WebRtc_UWord8 sub_type,
+    WebRtc_UWord32 name,
+    const WebRtc_UWord8* data,
+    WebRtc_UWord16 data_length_in_bytes) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (!rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: not sending", __FUNCTION__);
+    return -1;
+  }
+  if (!data) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no input argument", __FUNCTION__);
+    return -1;
+  }
+  if (data_length_in_bytes % 4 != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: input length error", __FUNCTION__);
+    return -1;
+  }
+  RTCPMethod rtcp_method = rtp_rtcp_->RTCP();
+  if (rtcp_method == kRtcpOff) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: RTCP not enabled", __FUNCTION__);
+    return -1;
+  }
+  // Create and send packet.
+  if (rtp_rtcp_->SetRTCPApplicationSpecificData(sub_type, name, data,
+                                               data_length_in_bytes) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not send RTCP application data", __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetSendRtcpStatistics(uint16_t* fraction_lost,
+                                                uint32_t* cumulative_lost,
+                                                uint32_t* extended_max,
+                                                uint32_t* jitter_samples,
+                                                int32_t* rtt_ms) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  // TODO(pwestin) how do we do this for simulcast ? average for all
+  // except cumulative_lost that is the sum ?
+  // CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+
+  // for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+  //      it != simulcast_rtp_rtcp_.end();
+  //      it++) {
+  //   RtpRtcp* rtp_rtcp = *it;
+  // }
+  uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
+
+  // Get all RTCP receiver report blocks that have been received on this
+  // channel. If we receive RTP packets from a remote source we know the
+  // remote SSRC and use the report block from him.
+  // Otherwise use the first report block.
+  std::vector<RTCPReportBlock> remote_stats;
+  if (rtp_rtcp_->RemoteRTCPStat(&remote_stats) != 0 || remote_stats.empty()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get remote stats", __FUNCTION__);
+    return -1;
+  }
+  std::vector<RTCPReportBlock>::const_iterator statistics =
+      remote_stats.begin();
+  for (; statistics != remote_stats.end(); ++statistics) {
+    if (statistics->remoteSSRC == remote_ssrc)
+      break;
+  }
+
+  if (statistics == remote_stats.end()) {
+    // If we have not received any RTCP packets from this SSRC it probably means
+    // we have not received any RTP packets.
+    // Use the first received report block instead.
+    statistics = remote_stats.begin();
+    remote_ssrc = statistics->remoteSSRC;
+  }
+
+  *fraction_lost = statistics->fractionLost;
+  *cumulative_lost = statistics->cumulativeLost;
+  *extended_max = statistics->extendedHighSeqNum;
+  *jitter_samples = statistics->jitter;
+
+  WebRtc_UWord16 dummy;
+  WebRtc_UWord16 rtt = 0;
+  if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get RTT", __FUNCTION__);
+    return -1;
+  }
+  *rtt_ms = rtt;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetReceivedRtcpStatistics(uint16_t* fraction_lost,
+                                                    uint32_t* cumulative_lost,
+                                                    uint32_t* extended_max,
+                                                    uint32_t* jitter_samples,
+                                                    int32_t* rtt_ms) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  WebRtc_UWord8 frac_lost = 0;
+  if (rtp_rtcp_->StatisticsRTP(&frac_lost, cumulative_lost, extended_max,
+                              jitter_samples) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get received RTP statistics", __FUNCTION__);
+    return -1;
+  }
+  *fraction_lost = frac_lost;
+
+  uint32_t remote_ssrc = rtp_rtcp_->RemoteSSRC();
+  uint16_t dummy = 0;
+  uint16_t rtt = 0;
+  if (rtp_rtcp_->RTT(remote_ssrc, &rtt, &dummy, &dummy, &dummy) != 0) {
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get RTT", __FUNCTION__);
+  }
+  *rtt_ms = rtt;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::GetRtpStatistics(uint32_t* bytes_sent,
+                                           uint32_t* packets_sent,
+                                           uint32_t* bytes_received,
+                                           uint32_t* packets_received) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (rtp_rtcp_->DataCountersRTP(bytes_sent,
+                                 packets_sent,
+                                 bytes_received,
+                                 packets_received) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not get counters", __FUNCTION__);
+    return -1;
+  }
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    uint32_t bytes_sent_temp = 0;
+    uint32_t packets_sent_temp = 0;
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->DataCountersRTP(&bytes_sent_temp, &packets_sent_temp, NULL, NULL);
+    bytes_sent += bytes_sent_temp;
+    packets_sent += packets_sent_temp;
+  }
+  return 0;
+}
+
+void ViEChannel::GetBandwidthUsage(uint32_t* total_bitrate_sent,
+                                   uint32_t* video_bitrate_sent,
+                                   uint32_t* fec_bitrate_sent,
+                                   uint32_t* nackBitrateSent) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  rtp_rtcp_->BitrateSent(total_bitrate_sent, video_bitrate_sent,
+                         fec_bitrate_sent, nackBitrateSent);
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end(); it++) {
+    uint32_t stream_rate = 0;
+    uint32_t video_rate = 0;
+    uint32_t fec_rate = 0;
+    uint32_t nackRate = 0;
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->BitrateSent(&stream_rate, &video_rate, &fec_rate, &nackRate);
+    *total_bitrate_sent += stream_rate;
+    *fec_bitrate_sent += fec_rate;
+    *nackBitrateSent += nackRate;
+  }
+}
+
+int ViEChannel::GetEstimatedReceiveBandwidth(
+    uint32_t* estimated_bandwidth) const {
+  return rtp_rtcp_->EstimatedReceiveBandwidth(estimated_bandwidth);
+}
+
+WebRtc_Word32 ViEChannel::StartRTPDump(const char file_nameUTF8[1024],
+                                       RTPDirections direction) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (direction != kRtpIncoming && direction != kRtpOutgoing) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: invalid input", __FUNCTION__);
+    return -1;
+  }
+
+  if (direction == kRtpIncoming) {
+    return vie_receiver_.StartRTPDump(file_nameUTF8);
+  } else {
+    return vie_sender_.StartRTPDump(file_nameUTF8);
+  }
+}
+
+WebRtc_Word32 ViEChannel::StopRTPDump(RTPDirections direction) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (direction != kRtpIncoming && direction != kRtpOutgoing) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: invalid input", __FUNCTION__);
+    return -1;
+  }
+
+  if (direction == kRtpIncoming) {
+    return vie_receiver_.StopRTPDump();
+  } else {
+    return vie_sender_.StopRTPDump();
+  }
+}
+
+WebRtc_Word32 ViEChannel::SetLocalReceiver(const WebRtc_UWord16 rtp_port,
+                                           const WebRtc_UWord16 rtcp_port,
+                                           const char* ip_address) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.Receiving()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: already receiving", __FUNCTION__);
+    return -1;
+  }
+
+  const char* multicast_ip_address = NULL;
+  if (socket_transport_.InitializeReceiveSockets(&vie_receiver_, rtp_port,
+                                                 ip_address,
+                                                 multicast_ip_address,
+                                                 rtcp_port) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not initialize receive sockets. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetLocalReceiver(WebRtc_UWord16* rtp_port,
+                                           WebRtc_UWord16* rtcp_port,
+                                           char* ip_address) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.ReceiveSocketsInitialized() == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: receive sockets not initialized", __FUNCTION__);
+    return -1;
+  }
+
+  char multicast_ip_address[UdpTransport::kIpAddressVersion6Length];
+  if (socket_transport_.ReceiveSocketInformation(ip_address, *rtp_port,
+                                                 *rtcp_port,
+                                                 multicast_ip_address) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: could not get receive socket information. Socket error: %d",
+      __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSendDestination(
+    const char* ip_address,
+    const WebRtc_UWord16 rtp_port,
+    const WebRtc_UWord16 rtcp_port,
+    const WebRtc_UWord16 source_rtp_port,
+    const WebRtc_UWord16 source_rtcp_port) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  const bool is_ipv6 = socket_transport_.IpV6Enabled();
+  if (UdpTransport::IsIpAddressValid(ip_address, is_ipv6) == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Not a valid IP address: %s", __FUNCTION__, ip_address);
+    return -1;
+  }
+  if (socket_transport_.InitializeSendSockets(ip_address, rtp_port,
+                                              rtcp_port)!= 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not initialize send socket. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+
+  if (source_rtp_port != 0) {
+    WebRtc_UWord16 receive_rtp_port = 0;
+    WebRtc_UWord16 receive_rtcp_port = 0;
+    if (socket_transport_.ReceiveSocketInformation(NULL, receive_rtp_port,
+                                                   receive_rtcp_port,
+                                                   NULL) != 0) {
+      WebRtc_Word32 socket_error = socket_transport_.LastError();
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+        "%s: could not get receive port information. Socket error: %d",
+        __FUNCTION__, socket_error);
+      return -1;
+    }
+    // Initialize an extra socket only if send port differs from receive
+    // port.
+    if (source_rtp_port != receive_rtp_port) {
+      if (socket_transport_.InitializeSourcePorts(source_rtp_port,
+                                                  source_rtcp_port) != 0) {
+        WebRtc_Word32 socket_error = socket_transport_.LastError();
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: could not set source ports. Socket error: %d",
+                     __FUNCTION__, socket_error);
+        return -1;
+      }
+    }
+  }
+  vie_sender_.RegisterSendTransport(&socket_transport_);
+
+  // Workaround to avoid SSRC colision detection in loppback tests.
+  if (!is_ipv6) {
+    WebRtc_UWord32 local_host_address = 0;
+    const WebRtc_UWord32 current_ip_address =
+        UdpTransport::InetAddrIPV4(ip_address);
+
+    if ((UdpTransport::LocalHostAddress(local_host_address) == 0 &&
+        local_host_address == current_ip_address) ||
+        strncmp("127.0.0.1", ip_address, 9) == 0) {
+      rtp_rtcp_->SetSSRC(0xFFFFFFFF);
+      WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "Running in loopback. Forcing fixed SSRC");
+    }
+  } else {
+    char local_host_address[16];
+    char current_ip_address[16];
+
+    WebRtc_Word32 conv_result =
+      UdpTransport::LocalHostAddressIPV6(local_host_address);
+    conv_result += socket_transport_.InetPresentationToNumeric(
+        23, ip_address, current_ip_address);
+    if (conv_result == 0) {
+      bool local_host = true;
+      for (WebRtc_Word32 i = 0; i < 16; i++) {
+        if (local_host_address[i] != current_ip_address[i]) {
+          local_host = false;
+          break;
+        }
+      }
+      if (!local_host) {
+        local_host = true;
+        for (WebRtc_Word32 i = 0; i < 15; i++) {
+          if (current_ip_address[i] != 0) {
+            local_host = false;
+            break;
+          }
+        }
+        if (local_host == true && current_ip_address[15] != 1) {
+          local_host = false;
+        }
+      }
+      if (local_host) {
+        rtp_rtcp_->SetSSRC(0xFFFFFFFF);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "Running in loopback. Forcing fixed SSRC");
+      }
+    }
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSendDestination(
+    char* ip_address,
+    WebRtc_UWord16* rtp_port,
+    WebRtc_UWord16* rtcp_port,
+    WebRtc_UWord16* source_rtp_port,
+    WebRtc_UWord16* source_rtcp_port) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  callback_cs_->Enter();
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SendSocketsInitialized() == false) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: send sockets not initialized", __FUNCTION__);
+    return -1;
+  }
+  if (socket_transport_.SendSocketInformation(ip_address, *rtp_port,
+                                              *rtcp_port) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: could not get send socket information. Socket error: %d",
+      __FUNCTION__, socket_error);
+    return -1;
+  }
+  *source_rtp_port = 0;
+  *source_rtcp_port = 0;
+  if (socket_transport_.SourcePortsInitialized()) {
+    socket_transport_.SourcePorts(*source_rtp_port, *source_rtcp_port);
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+      "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::StartSend() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!external_transport_) {
+    if (socket_transport_.SendSocketsInitialized() == false) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: send sockets not initialized", __FUNCTION__);
+      return -1;
+    }
+  }
+#endif
+  rtp_rtcp_->SetSendingMediaStatus(true);
+
+  if (rtp_rtcp_->Sending()) {
+    // Already sending.
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Already sending", __FUNCTION__);
+    return kViEBaseAlreadySending;
+  }
+  if (rtp_rtcp_->SetSendingStatus(true) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not start sending RTP", __FUNCTION__);
+    return -1;
+  }
+  CriticalSectionScoped cs_rtp(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::const_iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetSendingMediaStatus(true);
+    rtp_rtcp->SetSendingStatus(true);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopSend() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  rtp_rtcp_->SetSendingMediaStatus(false);
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetSendingMediaStatus(false);
+  }
+  if (!rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Not sending", __FUNCTION__);
+    return kViEBaseNotSending;
+  }
+
+  // Reset.
+  rtp_rtcp_->ResetSendDataCountersRTP();
+  if (rtp_rtcp_->SetSendingStatus(false) != 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not stop RTP sending", __FUNCTION__);
+    return -1;
+  }
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->ResetSendDataCountersRTP();
+    rtp_rtcp->SetSendingStatus(false);
+  }
+  return 0;
+}
+
+bool ViEChannel::Sending() {
+  return rtp_rtcp_->Sending();
+}
+
+WebRtc_Word32 ViEChannel::StartReceive() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!external_transport_) {
+    if (socket_transport_.Receiving()) {
+      // Warning, don't return error.
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: already receiving", __FUNCTION__);
+      return 0;
+    }
+    if (socket_transport_.ReceiveSocketsInitialized() == false) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: receive sockets not initialized", __FUNCTION__);
+      return -1;
+    }
+    if (socket_transport_.StartReceiving(kViENumReceiveSocketBuffers) != 0) {
+      WebRtc_Word32 socket_error = socket_transport_.LastError();
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+        "%s: could not get receive socket information. Socket error:%d",
+        __FUNCTION__, socket_error);
+      return -1;
+    }
+  }
+#endif
+  if (StartDecodeThread() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not start decoder thread", __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    socket_transport_.StopReceiving();
+#endif
+    vie_receiver_.StopReceive();
+    return -1;
+  }
+  vie_receiver_.StartReceive();
+
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopReceive() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  vie_receiver_.StopReceive();
+  StopDecodeThread();
+  vcm_.ResetDecoder();
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      return 0;
+    }
+  }
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.Receiving() == false) {
+    // Warning, don't return error
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: not receiving",
+                 __FUNCTION__);
+    return 0;
+  }
+  if (socket_transport_.StopReceiving() != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__, socket_error);
+    return -1;
+  }
+#endif
+
+  return 0;
+}
+
+bool ViEChannel::Receiving() {
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  return socket_transport_.Receiving();
+#else
+  return false;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSourceInfo(WebRtc_UWord16* rtp_port,
+                                        WebRtc_UWord16* rtcp_port,
+                                        char* ip_address,
+                                        WebRtc_UWord32 ip_address_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+                 __FUNCTION__);
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: external transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.IpV6Enabled() &&
+      ip_address_length < UdpTransport::kIpAddressVersion6Length) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IP address length is too small for IPv6", __FUNCTION__);
+    return -1;
+  } else if (ip_address_length < UdpTransport::kIpAddressVersion4Length) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IP address length is too small for IPv4", __FUNCTION__);
+    return -1;
+  }
+
+  if (socket_transport_.RemoteSocketInformation(ip_address, *rtp_port,
+                                                *rtcp_port) != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Error getting source ports. Socket error: %d",
+                 __FUNCTION__, socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::RegisterSendTransport(Transport* transport) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SendSocketsInitialized() ||
+      socket_transport_.ReceiveSocketsInitialized()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s:  socket transport already initialized", __FUNCTION__);
+    return -1;
+  }
+#endif
+  if (rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Sending", __FUNCTION__);
+    return -1;
+  }
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (external_transport_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: transport already registered", __FUNCTION__);
+    return -1;
+  }
+  external_transport_ = transport;
+  vie_sender_.RegisterSendTransport(transport);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: Transport registered: 0x%p", __FUNCTION__,
+               &external_transport_);
+
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::DeregisterSendTransport() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (!external_transport_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no transport registered", __FUNCTION__);
+    return -1;
+  }
+  if (rtp_rtcp_->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Sending", __FUNCTION__);
+    return -1;
+  }
+  external_transport_ = NULL;
+  vie_sender_.DeregisterSendTransport();
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceivedRTPPacket(
+    const void* rtp_packet, const WebRtc_Word32 rtp_packet_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (!external_transport_) {
+      return -1;
+    }
+  }
+  return vie_receiver_.ReceivedRTPPacket(rtp_packet, rtp_packet_length);
+}
+
+WebRtc_Word32 ViEChannel::ReceivedRTCPPacket(
+  const void* rtcp_packet, const WebRtc_Word32 rtcp_packet_length) {
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (!external_transport_) {
+      return -1;
+    }
+  }
+  return vie_receiver_.ReceivedRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+WebRtc_Word32 ViEChannel::EnableIPv6() {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.IpV6Enabled()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: IPv6 already enabled", __FUNCTION__);
+    return -1;
+  }
+
+  if (socket_transport_.EnableIpV6() != 0) {
+    WebRtc_Word32 socket_error = socket_transport_.LastError();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not enable IPv6. Socket error: %d", __FUNCTION__,
+                 socket_error);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+bool ViEChannel::IsIPv6Enabled() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return false;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  return socket_transport_.IpV6Enabled();
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return false;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSourceFilter(const WebRtc_UWord16 rtp_port,
+                                          const WebRtc_UWord16 rtcp_port,
+                                          const char* ip_address) {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetFilterIP(ip_address) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  if (socket_transport_.SetFilterPorts(rtp_port, rtcp_port) != 0) {
+    // Logging done.
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSourceFilter(WebRtc_UWord16* rtp_port,
+                                          WebRtc_UWord16* rtcp_port,
+                                          char* ip_address) const {
+  callback_cs_->Enter();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (external_transport_) {
+    callback_cs_->Leave();
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: External transport registered", __FUNCTION__);
+    return -1;
+  }
+  callback_cs_->Leave();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.FilterIP(ip_address) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  if (socket_transport_.FilterPorts(*rtp_port, *rtcp_port) != 0) {
+    // Logging done in module.
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetToS(const WebRtc_Word32 DSCP,
+                                 const bool use_set_sockOpt) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetToS(DSCP, use_set_sockOpt) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetToS(WebRtc_Word32* DSCP,
+                                 bool* use_set_sockOpt) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.ToS(*DSCP, *use_set_sockOpt) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetSendGQoS(const bool enable,
+                                      const WebRtc_Word32 service_type,
+                                      const WebRtc_UWord32 max_bitrate,
+                                      const WebRtc_Word32 overrideDSCP) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.SetQoS(enable, service_type, max_bitrate, overrideDSCP,
+                               false) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::GetSendGQoS(bool* enabled,
+                                      WebRtc_Word32* service_type,
+                                      WebRtc_Word32* overrideDSCP) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (socket_transport_.QoS(*enabled, *service_type, *overrideDSCP) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Socket error: %d", __FUNCTION__,
+                 socket_transport_.LastError());
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::SetMTU(WebRtc_UWord16 mtu) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (rtp_rtcp_->SetMaxTransferUnit(mtu) != 0) {
+    // Logging done.
+    return -1;
+  }
+  CriticalSectionScoped cs(rtp_rtcp_cs_.get());
+  for (std::list<RtpRtcp*>::iterator it = simulcast_rtp_rtcp_.begin();
+       it != simulcast_rtp_rtcp_.end();
+       it++) {
+    RtpRtcp* rtp_rtcp = *it;
+    rtp_rtcp->SetMaxTransferUnit(mtu);
+  }
+  mtu_ = mtu;
+  return 0;
+}
+
+WebRtc_UWord16 ViEChannel::MaxDataPayloadLength() const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  return rtp_rtcp_->MaxDataPayloadLength();
+}
+
+WebRtc_Word32 ViEChannel::SetPacketTimeoutNotification(
+    bool enable, WebRtc_UWord32 timeout_seconds) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (enable) {
+    WebRtc_UWord32 timeout_ms = 1000 * timeout_seconds;
+    if (rtp_rtcp_->SetPacketTimeout(timeout_ms, 0) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s", __FUNCTION__);
+      return -1;
+    }
+  } else {
+    if (rtp_rtcp_->SetPacketTimeout(0, 0) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterNetworkObserver(
+    ViENetworkObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    if (networkObserver_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: observer alread added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer added", __FUNCTION__);
+    networkObserver_ = observer;
+  } else {
+    if (!networkObserver_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no observer added", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: observer removed", __FUNCTION__);
+    networkObserver_ = NULL;
+  }
+  return 0;
+}
+
+bool ViEChannel::NetworkObserverRegistered() {
+  CriticalSectionScoped cs(callback_cs_.get());
+  return networkObserver_ != NULL;
+}
+
+WebRtc_Word32 ViEChannel::SetPeriodicDeadOrAliveStatus(
+  const bool enable, const WebRtc_UWord32 sample_time_seconds) {
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!networkObserver_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: no observer added", __FUNCTION__);
+    return -1;
+  }
+
+  bool enabled = false;
+  WebRtc_UWord8 current_sampletime_seconds = 0;
+
+  // Get old settings.
+  rtp_rtcp_->PeriodicDeadOrAliveStatus(enabled, current_sampletime_seconds);
+  // Set new settings.
+  if (rtp_rtcp_->SetPeriodicDeadOrAliveStatus(
+        enable, static_cast<WebRtc_UWord8>(sample_time_seconds)) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: Could not set periodic dead-or-alive status",
+                 __FUNCTION__);
+    return -1;
+  }
+  if (!enable) {
+    // Restore last utilized sample time.
+    // Without this trick, the sample time would always be reset to default
+    // (2 sec), each time dead-or-alive was disabled without sample-time
+    // parameter.
+    rtp_rtcp_->SetPeriodicDeadOrAliveStatus(enable, current_sampletime_seconds);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SendUDPPacket(const WebRtc_Word8* data,
+                                        const WebRtc_UWord32 length,
+                                        WebRtc_Word32& transmitted_bytes,
+                                        bool use_rtcp_socket) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (external_transport_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: External transport registered", __FUNCTION__);
+      return -1;
+    }
+  }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  transmitted_bytes = socket_transport_.SendRaw(data, length, use_rtcp_socket);
+  if (transmitted_bytes == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+                 __FUNCTION__);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: not available for external transport", __FUNCTION__);
+  return -1;
+#endif
+}
+
+WebRtc_Word32 ViEChannel::EnableColorEnhancement(bool enable) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(enable: %d)", __FUNCTION__, enable);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  color_enhancement_ = enable;
+  return 0;
+}
+
+RtpRtcp* ViEChannel::rtp_rtcp() {
+  return rtp_rtcp_.get();
+}
+
+WebRtc_Word32 ViEChannel::FrameToRender(VideoFrame& video_frame) {  // NOLINT
+  CriticalSectionScoped cs(callback_cs_.get());
+
+  if (decoder_reset_) {
+    // Trigger a callback to the user if the incoming codec has changed.
+    if (codec_observer_) {
+      VideoCodec decoder;
+      memset(&decoder, 0, sizeof(decoder));
+      if (vcm_.ReceiveCodec(&decoder) == VCM_OK) {
+        // VCM::ReceiveCodec returns the codec set by
+        // RegisterReceiveCodec, which might not be the size we're
+        // actually decoding.
+        decoder.width = static_cast<uint16_t>(video_frame.Width());
+        decoder.height = static_cast<uint16_t>(video_frame.Height());
+        codec_observer_->IncomingCodecChanged(channel_id_, decoder);
+      } else {
+        assert(false);
+        WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                     "%s: Could not get receive codec", __FUNCTION__);
+      }
+    }
+    decoder_reset_ = false;
+  }
+  if (effect_filter_) {
+    effect_filter_->Transform(video_frame.Length(), video_frame.Buffer(),
+                              video_frame.TimeStamp(), video_frame.Width(),
+                              video_frame.Height());
+  }
+  if (color_enhancement_) {
+    VideoProcessingModule::ColorEnhancement(video_frame);
+  }
+
+  // Record videoframe.
+  file_recorder_.RecordVideoFrame(video_frame);
+
+  WebRtc_UWord32 arr_ofCSRC[kRtpCsrcSize];
+  WebRtc_Word32 no_of_csrcs = rtp_rtcp_->RemoteCSRCs(arr_ofCSRC);
+  if (no_of_csrcs <= 0) {
+    arr_ofCSRC[0] = rtp_rtcp_->RemoteSSRC();
+    no_of_csrcs = 1;
+  }
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(timestamp:%u)", __FUNCTION__, video_frame.TimeStamp());
+  DeliverFrame(&video_frame, no_of_csrcs, arr_ofCSRC);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceivedDecodedReferenceFrame(
+  const WebRtc_UWord64 picture_id) {
+  return rtp_rtcp_->SendRTCPReferencePictureSelection(picture_id);
+}
+
+WebRtc_Word32 ViEChannel::StoreReceivedFrame(
+  const EncodedVideoData& frame_to_store) {
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::ReceiveStatistics(const WebRtc_UWord32 bit_rate,
+                                            const WebRtc_UWord32 frame_rate) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (codec_observer_) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: bitrate %u, framerate %u", __FUNCTION__, bit_rate,
+                 frame_rate);
+    codec_observer_->IncomingRate(channel_id_, frame_rate, bit_rate);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RequestKeyFrame() {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (codec_observer_ && do_key_frame_callbackRequest_) {
+      codec_observer_->RequestNewKeyFrame(channel_id_);
+    }
+  }
+  return rtp_rtcp_->RequestKeyFrame();
+}
+
+WebRtc_Word32 ViEChannel::SliceLossIndicationRequest(
+  const WebRtc_UWord64 picture_id) {
+  return rtp_rtcp_->SendRTCPSliceLossIndication((WebRtc_UWord8) picture_id);
+}
+
+WebRtc_Word32 ViEChannel::ResendPackets(const WebRtc_UWord16* sequence_numbers,
+                                        WebRtc_UWord16 length) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(length: %d)", __FUNCTION__, length);
+  return rtp_rtcp_->SendNACK(sequence_numbers, length);
+}
+
+bool ViEChannel::ChannelDecodeThreadFunction(void* obj) {
+  return static_cast<ViEChannel*>(obj)->ChannelDecodeProcess();
+}
+
+bool ViEChannel::ChannelDecodeProcess() {
+  // Decode is blocking, but sleep some time anyway to not get a spin.
+  vcm_.Decode(kMaxDecodeWaitTimeMs);
+
+  if ((TickTime::Now() - vcm_rttreported_).Milliseconds() > 1000) {
+    WebRtc_UWord16 RTT;
+    WebRtc_UWord16 avgRTT;
+    WebRtc_UWord16 minRTT;
+    WebRtc_UWord16 maxRTT;
+
+    if (rtp_rtcp_->RTT(rtp_rtcp_->RemoteSSRC(), &RTT, &avgRTT, &minRTT, &maxRTT)
+        == 0) {
+      vcm_.SetReceiveChannelParameters(RTT);
+      vcm_rttreported_ = TickTime::Now();
+    } else if (!rtp_rtcp_->Sending() &&
+               (TickTime::Now() - vcm_rttreported_).Milliseconds() > 5000) {
+      // Wait at least 5 seconds before faking a 200 ms RTT. This is to
+      // make sure we have a chance to start sending before we decide to fake.
+      vcm_.SetReceiveChannelParameters(200);
+      vcm_rttreported_ = TickTime::Now();
+    }
+  }
+  return true;
+}
+
+WebRtc_Word32 ViEChannel::StartDecodeThread() {
+  // Start the decode thread
+  if (decode_thread_) {
+    // Already started.
+    return 0;
+  }
+  decode_thread_ = ThreadWrapper::CreateThread(ChannelDecodeThreadFunction,
+                                                   this, kHighestPriority,
+                                                   "DecodingThread");
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not create decode thread", __FUNCTION__);
+    return -1;
+  }
+
+  unsigned int thread_id;
+  if (decode_thread_->Start(thread_id) == false) {
+    delete decode_thread_;
+    decode_thread_ = NULL;
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not start decode thread", __FUNCTION__);
+    return -1;
+  }
+
+  // Used to make sure that we don't give the VCM a faked RTT
+  // too early.
+  vcm_rttreported_ = TickTime::Now();
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: decode thread with id %u started", __FUNCTION__);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::StopDecodeThread() {
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: decode thread not running", __FUNCTION__);
+    return 0;
+  }
+
+  decode_thread_->SetNotAlive();
+  if (decode_thread_->Stop()) {
+    delete decode_thread_;
+  } else {
+    // Couldn't stop the thread, leak instead of crash.
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: could not stop decode thread", __FUNCTION__);
+    assert(false && "could not stop decode thread");
+  }
+  decode_thread_ = NULL;
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::RegisterExternalEncryption(Encryption* encryption) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (external_encryption_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external encryption already registered", __FUNCTION__);
+    return -1;
+  }
+
+  external_encryption_ = encryption;
+
+  vie_receiver_.RegisterExternalDecryption(encryption);
+  vie_sender_.RegisterExternalEncryption(encryption);
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s", "external encryption object registerd with channel=%d",
+               channel_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::DeRegisterExternalEncryption() {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!external_encryption_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: external encryption is not registered", __FUNCTION__);
+    return -1;
+  }
+
+  external_transport_ = NULL;
+  vie_receiver_.DeregisterExternalDecryption();
+  vie_sender_.DeregisterExternalEncryption();
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s external encryption object de-registerd with channel=%d",
+               __FUNCTION__, channel_id_);
+  return 0;
+}
+
+WebRtc_Word32 ViEChannel::SetVoiceChannel(WebRtc_Word32 ve_channel_id,
+                                          VoEVideoSync* ve_sync_interface) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s, audio channel %d, video channel %d", __FUNCTION__,
+               ve_channel_id, channel_id_);
+
+  if (ve_sync_interface) {
+    // Register lip sync
+    module_process_thread_.RegisterModule(&vie_sync_);
+  } else {
+    module_process_thread_.DeRegisterModule(&vie_sync_);
+  }
+  return vie_sync_.ConfigureSync(ve_channel_id, ve_sync_interface,
+                                 rtp_rtcp_.get());
+}
+
+WebRtc_Word32 ViEChannel::VoiceChannel() {
+  return vie_sync_.VoiceChannel();
+}
+
+WebRtc_Word32 ViEChannel::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!effect_filter) {
+    if (!effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: no effect filter added for channel %d",
+                   __FUNCTION__, channel_id_);
+      return -1;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: deregister effect filter for device %d", __FUNCTION__,
+                 channel_id_);
+  } else {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s: register effect filter for device %d", __FUNCTION__,
+                 channel_id_);
+    if (effect_filter_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id_),
+                   "%s: effect filter already added for channel %d",
+                   __FUNCTION__, channel_id_);
+      return -1;
+    }
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+ViEFileRecorder& ViEChannel::GetIncomingFileRecorder() {
+  // Start getting callback of all frames before they are decoded.
+  vcm_.RegisterFrameStorageCallback(this);
+  return file_recorder_;
+}
+
+void ViEChannel::ReleaseIncomingFileRecorder() {
+  // Stop getting callback of all frames before they are decoded.
+  vcm_.RegisterFrameStorageCallback(NULL);
+}
+
+void ViEChannel::OnApplicationDataReceived(const WebRtc_Word32 id,
+                                           const WebRtc_UWord8 sub_type,
+                                           const WebRtc_UWord32 name,
+                                           const WebRtc_UWord16 length,
+                                           const WebRtc_UWord8* data) {
+  if (channel_id_ != ChannelId(id)) {
+    WEBRTC_TRACE(kTraceStream, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtcp_observer_) {
+      rtcp_observer_->OnApplicationDataReceived(
+          channel_id_, sub_type, name, reinterpret_cast<const char*>(data),
+          length);
+    }
+  }
+}
+
+WebRtc_Word32 ViEChannel::OnInitializeDecoder(
+    const WebRtc_Word32 id,
+    const WebRtc_Word8 payload_type,
+    const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+    const int frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: payload_type %d, payload_name %s", __FUNCTION__,
+               payload_type, payload_name);
+  vcm_.ResetDecoder();
+
+  callback_cs_->Enter();
+  decoder_reset_ = true;
+  callback_cs_->Leave();
+  return 0;
+}
+
+void ViEChannel::OnPacketTimeout(const WebRtc_Word32 id) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (networkObserver_) {
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (socket_transport_.Receiving() || external_transport_) {
+#else
+    if (external_transport_) {
+#endif
+      networkObserver_->PacketTimeout(channel_id_, NoPacket);
+      rtp_packet_timeout_ = true;
+    }
+  }
+}
+
+void ViEChannel::OnReceivedPacket(const WebRtc_Word32 id,
+                                  const RtpRtcpPacketType packet_type) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (rtp_packet_timeout_ && packet_type == kPacketRtp) {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (networkObserver_) {
+      networkObserver_->PacketTimeout(channel_id_, PacketReceived);
+    }
+
+    // Reset even if no observer set, might have been removed during timeout.
+    rtp_packet_timeout_ = false;
+  }
+}
+
+void ViEChannel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                       const RTPAliveType alive) {
+  assert(ChannelId(id) == channel_id_);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s(id=%d, alive=%d)", __FUNCTION__, id, alive);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (!networkObserver_) {
+    return;
+  }
+  bool is_alive = true;
+  if (alive == kRtpDead) {
+    is_alive = false;
+  }
+  networkObserver_->OnPeriodicDeadOrAlive(channel_id_, is_alive);
+  return;
+}
+
+void ViEChannel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 SSRC) {
+  if (channel_id_ != ChannelId(id)) {
+    assert(false);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u", __FUNCTION__, SSRC);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtp_observer_) {
+      rtp_observer_->IncomingSSRCChanged(channel_id_, SSRC);
+    }
+  }
+}
+
+void ViEChannel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 CSRC,
+                                       const bool added) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u added: %d", __FUNCTION__, CSRC, added);
+
+  if (channel_id_ != ChannelId(id)) {
+    assert(false);
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+                 "%s, incorrect id", __FUNCTION__, id);
+    return;
+  }
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_),
+               "%s: %u", __FUNCTION__, CSRC);
+
+  CriticalSectionScoped cs(callback_cs_.get());
+  {
+    if (rtp_observer_) {
+      rtp_observer_->IncomingCSRCChanged(channel_id_, CSRC, added);
+    }
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_channel.h b/src/video_engine/vie_channel.h
new file mode 100644
index 0000000..f9c7b8d
--- /dev/null
+++ b/src/video_engine/vie_channel.h
@@ -0,0 +1,405 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
+
+#include <list>
+
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "modules/udp_transport/interface/udp_transport.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_network.h"
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_file_recorder.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_receiver.h"
+#include "video_engine/vie_sender.h"
+#include "video_engine/vie_sync_module.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class Encryption;
+class ProcessThread;
+class RtpRtcp;
+class ThreadWrapper;
+class VideoCodingModule;
+class VideoDecoder;
+class VideoRenderCallback;
+class ViEDecoderObserver;
+class ViEEffectFilter;
+class ViENetworkObserver;
+class ViERTCPObserver;
+class ViERTPObserver;
+class VoEVideoSync;
+
+class ViEChannel
+    : public VCMFrameTypeCallback,
+      public VCMReceiveCallback,
+      public VCMReceiveStatisticsCallback,
+      public VCMPacketRequestCallback,
+      public VCMFrameStorageCallback,
+      public RtcpFeedback,
+      public RtpFeedback,
+      public ViEFrameProviderBase {
+ public:
+  ViEChannel(WebRtc_Word32 channel_id,
+             WebRtc_Word32 engine_id,
+             WebRtc_UWord32 number_of_cores,
+             ProcessThread& module_process_thread,
+             RtcpIntraFrameObserver* intra_frame_observer,
+             RtcpBandwidthObserver* bandwidth_observer,
+             RemoteBitrateEstimator* remote_bitrate_estimator,
+             RtpRtcp* default_rtp_rtcp,
+             bool sender);
+  ~ViEChannel();
+
+  WebRtc_Word32 Init();
+
+  // Sets the encoder to use for the channel. |new_stream| indicates the encoder
+  // type has changed and we should start a new RTP stream.
+  WebRtc_Word32 SetSendCodec(const VideoCodec& video_codec,
+                             bool new_stream = true);
+  WebRtc_Word32 SetReceiveCodec(const VideoCodec& video_codec);
+  WebRtc_Word32 GetReceiveCodec(VideoCodec* video_codec);
+  WebRtc_Word32 RegisterCodecObserver(ViEDecoderObserver* observer);
+  // Registers an external decoder. |decoder_render| is set to true if the
+  // decoder will do the rendering. If |decoder_render| is set,|render_delay|
+  // indicates the time needed to decode and render a frame.
+  WebRtc_Word32 RegisterExternalDecoder(const WebRtc_UWord8 pl_type,
+                                        VideoDecoder* decoder,
+                                        bool decoder_render,
+                                        WebRtc_Word32 render_delay);
+  WebRtc_Word32 DeRegisterExternalDecoder(const WebRtc_UWord8 pl_type);
+  WebRtc_Word32 ReceiveCodecStatistics(WebRtc_UWord32* num_key_frames,
+                                       WebRtc_UWord32* num_delta_frames);
+  WebRtc_UWord32 DiscardedPackets() const;
+
+  // Only affects calls to SetReceiveCodec done after this call.
+  WebRtc_Word32 WaitForKeyFrame(bool wait);
+
+  // If enabled, a key frame request will be sent as soon as there are lost
+  // packets. If |only_key_frames| are set, requests are only sent for loss in
+  // key frames.
+  WebRtc_Word32 SetSignalPacketLossStatus(bool enable, bool only_key_frames);
+
+  WebRtc_Word32 SetRTCPMode(const RTCPMethod rtcp_mode);
+  WebRtc_Word32 GetRTCPMode(RTCPMethod* rtcp_mode);
+  WebRtc_Word32 SetNACKStatus(const bool enable);
+  WebRtc_Word32 SetFECStatus(const bool enable,
+                             const unsigned char payload_typeRED,
+                             const unsigned char payload_typeFEC);
+  WebRtc_Word32 SetHybridNACKFECStatus(const bool enable,
+                                       const unsigned char payload_typeRED,
+                                       const unsigned char payload_typeFEC);
+  WebRtc_Word32 SetKeyFrameRequestMethod(const KeyFrameRequestMethod method);
+  bool EnableRemb(bool enable);
+  int SetSendTimestampOffsetStatus(bool enable, int id);
+  int SetReceiveTimestampOffsetStatus(bool enable, int id);
+  WebRtc_Word32 EnableTMMBR(const bool enable);
+  WebRtc_Word32 EnableKeyFrameRequestCallback(const bool enable);
+
+  // Sets SSRC for outgoing stream.
+  WebRtc_Word32 SetSSRC(const uint32_t SSRC,
+                        const StreamType usage,
+                        const unsigned char simulcast_idx);
+
+  // Gets SSRC for outgoing stream.
+  WebRtc_Word32 GetLocalSSRC(uint32_t* ssrc);
+
+  // Gets SSRC for the incoming stream.
+  WebRtc_Word32 GetRemoteSSRC(uint32_t* ssrc);
+
+  // Gets the CSRC for the incoming stream.
+  WebRtc_Word32 GetRemoteCSRC(uint32_t CSRCs[kRtpCsrcSize]);
+
+  // Sets the starting sequence number, must be called before StartSend.
+  WebRtc_Word32 SetStartSequenceNumber(WebRtc_UWord16 sequence_number);
+
+  // Sets the CName for the outgoing stream on the channel.
+  WebRtc_Word32 SetRTCPCName(const char rtcp_cname[]);
+
+  // Gets the CName for the outgoing stream on the channel.
+  WebRtc_Word32 GetRTCPCName(char rtcp_cname[]);
+
+  // Gets the CName of the incoming stream.
+  WebRtc_Word32 GetRemoteRTCPCName(char rtcp_cname[]);
+  WebRtc_Word32 RegisterRtpObserver(ViERTPObserver* observer);
+  WebRtc_Word32 RegisterRtcpObserver(ViERTCPObserver* observer);
+  WebRtc_Word32 SendApplicationDefinedRTCPPacket(
+      const WebRtc_UWord8 sub_type,
+      WebRtc_UWord32 name,
+      const WebRtc_UWord8* data,
+      WebRtc_UWord16 data_length_in_bytes);
+
+  // Returns statistics reported by the remote client in an RTCP packet.
+  WebRtc_Word32 GetSendRtcpStatistics(uint16_t* fraction_lost,
+                                      uint32_t* cumulative_lost,
+                                      uint32_t* extended_max,
+                                      uint32_t* jitter_samples,
+                                      int32_t* rtt_ms);
+
+  // Returns our localy created statistics of the received RTP stream.
+  WebRtc_Word32 GetReceivedRtcpStatistics(uint16_t* fraction_lost,
+                                          uint32_t* cumulative_lost,
+                                          uint32_t* extended_max,
+                                          uint32_t* jitter_samples,
+                                          int32_t* rtt_ms);
+
+  // Gets sent/received packets statistics.
+  WebRtc_Word32 GetRtpStatistics(uint32_t* bytes_sent,
+                                 uint32_t* packets_sent,
+                                 uint32_t* bytes_received,
+                                 uint32_t* packets_received) const;
+  void GetBandwidthUsage(uint32_t* total_bitrate_sent,
+                         uint32_t* video_bitrate_sent,
+                         uint32_t* fec_bitrate_sent,
+                         uint32_t* nackBitrateSent) const;
+  int GetEstimatedReceiveBandwidth(uint32_t* estimated_bandwidth) const;
+
+  WebRtc_Word32 StartRTPDump(const char file_nameUTF8[1024],
+                             RTPDirections direction);
+  WebRtc_Word32 StopRTPDump(RTPDirections direction);
+
+  // Implements RtcpFeedback.
+  // TODO(pwestin) Depricate this functionality.
+  virtual void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                         const WebRtc_UWord8 sub_type,
+                                         const WebRtc_UWord32 name,
+                                         const WebRtc_UWord16 length,
+                                         const WebRtc_UWord8* data);
+  // Implements RtpFeedback.
+  virtual WebRtc_Word32 OnInitializeDecoder(
+      const WebRtc_Word32 id,
+      const WebRtc_Word8 payload_type,
+      const char payload_name[RTP_PAYLOAD_NAME_SIZE],
+      const int frequency,
+      const WebRtc_UWord8 channels,
+      const WebRtc_UWord32 rate);
+  virtual void OnPacketTimeout(const WebRtc_Word32 id);
+  virtual void OnReceivedPacket(const WebRtc_Word32 id,
+                                const RtpRtcpPacketType packet_type);
+  virtual void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                                     const RTPAliveType alive);
+  virtual void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 SSRC);
+  virtual void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 CSRC,
+                                     const bool added);
+
+  WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtp_port,
+                                 const WebRtc_UWord16 rtcp_port,
+                                 const char* ip_address);
+  WebRtc_Word32 GetLocalReceiver(WebRtc_UWord16* rtp_port,
+                                 WebRtc_UWord16* rtcp_port,
+                                 char* ip_address) const;
+  WebRtc_Word32 SetSendDestination(const char* ip_address,
+                                   const WebRtc_UWord16 rtp_port,
+                                   const WebRtc_UWord16 rtcp_port,
+                                   const WebRtc_UWord16 source_rtp_port,
+                                   const WebRtc_UWord16 source_rtcp_port);
+  WebRtc_Word32 GetSendDestination(char* ip_address,
+                                   WebRtc_UWord16* rtp_port,
+                                   WebRtc_UWord16* rtcp_port,
+                                   WebRtc_UWord16* source_rtp_port,
+                                   WebRtc_UWord16* source_rtcp_port) const;
+  WebRtc_Word32 GetSourceInfo(WebRtc_UWord16* rtp_port,
+                              WebRtc_UWord16* rtcp_port,
+                              char* ip_address,
+                              WebRtc_UWord32 ip_address_length);
+
+  WebRtc_Word32 SetRemoteSSRCType(const StreamType usage,
+                                  const uint32_t SSRC) const;
+
+  WebRtc_Word32 StartSend();
+  WebRtc_Word32 StopSend();
+  bool Sending();
+  WebRtc_Word32 StartReceive();
+  WebRtc_Word32 StopReceive();
+  bool Receiving();
+
+  WebRtc_Word32 RegisterSendTransport(Transport* transport);
+  WebRtc_Word32 DeregisterSendTransport();
+
+  // Incoming packet from external transport.
+  WebRtc_Word32 ReceivedRTPPacket(const void* rtp_packet,
+                                  const WebRtc_Word32 rtp_packet_length);
+
+  // Incoming packet from external transport.
+  WebRtc_Word32 ReceivedRTCPPacket(const void* rtcp_packet,
+                                   const WebRtc_Word32 rtcp_packet_length);
+
+  WebRtc_Word32 EnableIPv6();
+  bool IsIPv6Enabled();
+  WebRtc_Word32 SetSourceFilter(const WebRtc_UWord16 rtp_port,
+                                const WebRtc_UWord16 rtcp_port,
+                                const char* ip_address);
+  WebRtc_Word32 GetSourceFilter(WebRtc_UWord16* rtp_port,
+                                WebRtc_UWord16* rtcp_port,
+                                char* ip_address) const;
+
+  WebRtc_Word32 SetToS(const WebRtc_Word32 DSCP, const bool use_set_sockOpt);
+  WebRtc_Word32 GetToS(WebRtc_Word32* DSCP, bool* use_set_sockOpt) const;
+  WebRtc_Word32 SetSendGQoS(const bool enable,
+                            const WebRtc_Word32 service_type,
+                            const WebRtc_UWord32 max_bitrate,
+                            const WebRtc_Word32 overrideDSCP);
+  WebRtc_Word32 GetSendGQoS(bool* enabled,
+                            WebRtc_Word32* service_type,
+                            WebRtc_Word32* overrideDSCP) const;
+
+  // Sets the maximum transfer unit size for the network link, i.e. including
+  // IP, UDP and RTP headers.
+  WebRtc_Word32 SetMTU(WebRtc_UWord16 mtu);
+
+  // Returns maximum allowed payload size, i.e. the maximum allowed size of
+  // encoded data in each packet.
+  WebRtc_UWord16 MaxDataPayloadLength() const;
+  WebRtc_Word32 SetMaxPacketBurstSize(WebRtc_UWord16 max_number_of_packets);
+  WebRtc_Word32 SetPacketBurstSpreadState(bool enable,
+                                          const WebRtc_UWord16 frame_periodMS);
+
+  WebRtc_Word32 SetPacketTimeoutNotification(bool enable,
+                                             WebRtc_UWord32 timeout_seconds);
+  WebRtc_Word32 RegisterNetworkObserver(ViENetworkObserver* observer);
+  bool NetworkObserverRegistered();
+  WebRtc_Word32 SetPeriodicDeadOrAliveStatus(
+      const bool enable, const WebRtc_UWord32 sample_time_seconds);
+
+  WebRtc_Word32 SendUDPPacket(const WebRtc_Word8* data,
+                              const WebRtc_UWord32 length,
+                              WebRtc_Word32& transmitted_bytes,
+                              bool use_rtcp_socket);
+
+  WebRtc_Word32 EnableColorEnhancement(bool enable);
+
+  // Gets the modules used by the channel.
+  RtpRtcp* rtp_rtcp();
+
+  // Implements VCMReceiveCallback.
+  virtual WebRtc_Word32 FrameToRender(VideoFrame& video_frame);  // NOLINT
+
+  // Implements VCMReceiveCallback.
+  virtual WebRtc_Word32 ReceivedDecodedReferenceFrame(
+      const WebRtc_UWord64 picture_id);
+
+  // Implements VCM.
+  virtual WebRtc_Word32 StoreReceivedFrame(
+      const EncodedVideoData& frame_to_store);
+
+  // Implements VideoReceiveStatisticsCallback.
+  virtual WebRtc_Word32 ReceiveStatistics(const WebRtc_UWord32 bit_rate,
+                                          const WebRtc_UWord32 frame_rate);
+
+  // Implements VideoFrameTypeCallback.
+  virtual WebRtc_Word32 RequestKeyFrame();
+
+  // Implements VideoFrameTypeCallback.
+  virtual WebRtc_Word32 SliceLossIndicationRequest(
+      const WebRtc_UWord64 picture_id);
+
+  // Implements VideoPacketRequestCallback.
+  virtual WebRtc_Word32 ResendPackets(const WebRtc_UWord16* sequence_numbers,
+                                      WebRtc_UWord16 length);
+
+  WebRtc_Word32 RegisterExternalEncryption(Encryption* encryption);
+  WebRtc_Word32 DeRegisterExternalEncryption();
+
+  WebRtc_Word32 SetVoiceChannel(WebRtc_Word32 ve_channel_id,
+                                VoEVideoSync* ve_sync_interface);
+  WebRtc_Word32 VoiceChannel();
+
+  // Implements ViEFrameProviderBase.
+  virtual int FrameCallbackChanged() {return -1;}
+
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+
+  ViEFileRecorder& GetIncomingFileRecorder();
+  void ReleaseIncomingFileRecorder();
+
+ protected:
+  static bool ChannelDecodeThreadFunction(void* obj);
+  bool ChannelDecodeProcess();
+
+ private:
+  // Assumed to be protected.
+  WebRtc_Word32 StartDecodeThread();
+  WebRtc_Word32 StopDecodeThread();
+
+  WebRtc_Word32 ProcessNACKRequest(const bool enable);
+  WebRtc_Word32 ProcessFECRequest(const bool enable,
+                                  const unsigned char payload_typeRED,
+                                  const unsigned char payload_typeFEC);
+
+  WebRtc_Word32 channel_id_;
+  WebRtc_Word32 engine_id_;
+  WebRtc_UWord32 number_of_cores_;
+  WebRtc_UWord8 num_socket_threads_;
+
+  // Used for all registered callbacks except rendering.
+  scoped_ptr<CriticalSectionWrapper> callback_cs_;
+  scoped_ptr<CriticalSectionWrapper> rtp_rtcp_cs_;
+
+  RtpRtcp* default_rtp_rtcp_;
+
+  // Owned modules/classes.
+  scoped_ptr<RtpRtcp> rtp_rtcp_;
+  std::list<RtpRtcp*> simulcast_rtp_rtcp_;
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  UdpTransport& socket_transport_;
+#endif
+  VideoCodingModule& vcm_;
+  ViEReceiver vie_receiver_;
+  ViESender vie_sender_;
+  ViESyncModule vie_sync_;
+
+  // Not owned.
+  ProcessThread& module_process_thread_;
+  ViEDecoderObserver* codec_observer_;
+  bool do_key_frame_callbackRequest_;
+  ViERTPObserver* rtp_observer_;
+  ViERTCPObserver* rtcp_observer_;
+  ViENetworkObserver* networkObserver_;
+  RtcpIntraFrameObserver* intra_frame_observer_;
+  scoped_ptr<RtcpBandwidthObserver> bandwidth_observer_;
+  bool rtp_packet_timeout_;
+  int send_timestamp_extension_id_;
+  bool using_packet_spread_;
+
+  Transport* external_transport_;
+
+  bool decoder_reset_;
+  bool wait_for_key_frame_;
+  ThreadWrapper* decode_thread_;
+
+  Encryption* external_encryption_;
+
+  ViEEffectFilter* effect_filter_;
+  bool color_enhancement_;
+
+  // Time when RTT time was last reported to VCM JB.
+  TickTime vcm_rttreported_;
+
+  ViEFileRecorder file_recorder_;
+
+  // User set MTU, -1 if not set.
+  uint16_t mtu_;
+  const bool sender_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_H_
diff --git a/src/video_engine/vie_channel_group.cc b/src/video_engine/vie_channel_group.cc
new file mode 100644
index 0000000..8d0e429
--- /dev/null
+++ b/src/video_engine/vie_channel_group.cc
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_channel_group.h"
+
+#include "modules/bitrate_controller/include/bitrate_controller.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_remb.h"
+
+namespace webrtc {
+
+ChannelGroup::ChannelGroup(ProcessThread* process_thread,
+                           const OverUseDetectorOptions& options)
+    : remb_(new VieRemb(process_thread)),
+      bitrate_controller_(BitrateController::CreateBitrateController()),
+      remote_bitrate_estimator_(new RemoteBitrateEstimator(remb_.get(),
+                                                           options)) {
+}
+
+ChannelGroup::~ChannelGroup() {
+  assert(channels_.empty());
+  assert(!remb_->InUse());
+}
+void ChannelGroup::AddChannel(int channel_id) {
+  channels_.insert(channel_id);
+}
+
+void ChannelGroup::RemoveChannel(int channel_id, unsigned int ssrc) {
+  channels_.erase(channel_id);
+  remote_bitrate_estimator_->RemoveStream(ssrc);
+}
+
+bool ChannelGroup::HasChannel(int channel_id) {
+  return channels_.find(channel_id) != channels_.end();
+}
+
+bool ChannelGroup::Empty() {
+  return channels_.empty();
+}
+
+BitrateController* ChannelGroup::GetBitrateController() {
+  return bitrate_controller_.get();
+}
+
+RemoteBitrateEstimator* ChannelGroup::GetRemoteBitrateEstimator() {
+  return remote_bitrate_estimator_.get();
+}
+
+bool ChannelGroup::SetChannelRembStatus(int channel_id,
+                                        bool sender,
+                                        bool receiver,
+                                        ViEChannel* channel,
+                                        ViEEncoder* encoder) {
+  // Update the channel state.
+  if (sender || receiver) {
+    if (!channel->EnableRemb(true)) {
+      return false;
+    }
+  } else if (channel) {
+    channel->EnableRemb(false);
+  }
+  // Update the REMB instance with necessary RTP modules.
+  RtpRtcp* rtp_module = channel->rtp_rtcp();
+  if (sender) {
+    remb_->AddRembSender(rtp_module);
+  } else {
+    remb_->RemoveRembSender(rtp_module);
+  }
+  if (receiver) {
+    remb_->AddReceiveChannel(rtp_module);
+  } else {
+    remb_->RemoveReceiveChannel(rtp_module);
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_channel_group.h b/src/video_engine/vie_channel_group.h
new file mode 100644
index 0000000..bcd58b2
--- /dev/null
+++ b/src/video_engine/vie_channel_group.h
@@ -0,0 +1,62 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_GROUP_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_GROUP_H_
+
+#include <set>
+
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class BitrateController;
+struct OverUseDetectorOptions;
+class ProcessThread;
+class RemoteBitrateEstimator;
+class RemoteBitrateObserver;
+class ViEChannel;
+class ViEEncoder;
+class VieRemb;
+
+// Channel group contains data common for several channels. All channels in the
+// group are assumed to send/receive data to the same end-point.
+class ChannelGroup {
+ public:
+  ChannelGroup(ProcessThread* process_thread,
+               const OverUseDetectorOptions& options);
+  ~ChannelGroup();
+
+  void AddChannel(int channel_id);
+  void RemoveChannel(int channel_id, unsigned int ssrc);
+  bool HasChannel(int channel_id);
+  bool Empty();
+
+  bool SetChannelRembStatus(int channel_id,
+                            bool sender,
+                            bool receiver,
+                            ViEChannel* channel,
+                            ViEEncoder* encoder);
+
+  BitrateController* GetBitrateController();
+  RemoteBitrateEstimator* GetRemoteBitrateEstimator();
+
+ private:
+  typedef std::set<int> ChannelSet;
+
+  scoped_ptr<VieRemb> remb_;
+  scoped_ptr<BitrateController> bitrate_controller_;
+  scoped_ptr<RemoteBitrateEstimator> remote_bitrate_estimator_;
+  ChannelSet channels_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_GROUP_H_
diff --git a/src/video_engine/vie_channel_manager.cc b/src/video_engine/vie_channel_manager.cc
new file mode 100644
index 0000000..50c7fdd
--- /dev/null
+++ b/src/video_engine/vie_channel_manager.cc
@@ -0,0 +1,485 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_channel_manager.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/utility/interface/process_thread.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_remb.h"
+#include "voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+ViEChannelManager::ViEChannelManager(
+    int engine_id,
+    int number_of_cores,
+    ViEPerformanceMonitor* vie_performance_monitor,
+    const OverUseDetectorOptions& options)
+    : channel_id_critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      engine_id_(engine_id),
+      number_of_cores_(number_of_cores),
+      free_channel_ids_(new bool[kViEMaxNumberOfChannels]),
+      free_channel_ids_size_(kViEMaxNumberOfChannels),
+      voice_sync_interface_(NULL),
+      voice_engine_(NULL),
+      module_process_thread_(NULL),
+      over_use_detector_options_(options) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id),
+               "ViEChannelManager::ViEChannelManager(engine_id: %d)",
+               engine_id);
+  for (int idx = 0; idx < free_channel_ids_size_; idx++) {
+    free_channel_ids_[idx] = true;
+  }
+}
+
+ViEChannelManager::~ViEChannelManager() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, ViEId(engine_id_),
+               "ViEChannelManager Destructor, engine_id: %d", engine_id_);
+
+  while (channel_map_.size() > 0) {
+    ChannelMap::iterator it = channel_map_.begin();
+    // DeleteChannel will erase this channel from the map and invalidate |it|.
+    DeleteChannel(it->first);
+  }
+
+  if (voice_sync_interface_) {
+    voice_sync_interface_->Release();
+  }
+  if (channel_id_critsect_) {
+    delete channel_id_critsect_;
+    channel_id_critsect_ = NULL;
+  }
+  if (free_channel_ids_) {
+    delete[] free_channel_ids_;
+    free_channel_ids_ = NULL;
+    free_channel_ids_size_ = 0;
+  }
+  assert(channel_groups_.empty());
+  assert(channel_map_.empty());
+  assert(vie_encoder_map_.empty());
+}
+
+void ViEChannelManager::SetModuleProcessThread(
+    ProcessThread* module_process_thread) {
+  assert(!module_process_thread_);
+  module_process_thread_ = module_process_thread;
+}
+
+int ViEChannelManager::CreateChannel(int* channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  // Get a new channel id.
+  int new_channel_id = FreeChannelId();
+  if (new_channel_id == -1) {
+    return -1;
+  }
+
+  // Create a new channel group and add this channel.
+  ChannelGroup* group = new ChannelGroup(module_process_thread_,
+                                         over_use_detector_options_);
+  BitrateController* bitrate_controller = group->GetBitrateController();
+  ViEEncoder* vie_encoder = new ViEEncoder(engine_id_, new_channel_id,
+                                           number_of_cores_,
+                                           *module_process_thread_,
+                                           bitrate_controller);
+
+  RtcpBandwidthObserver* bandwidth_observer =
+      bitrate_controller->CreateRtcpBandwidthObserver();
+  RemoteBitrateEstimator* remote_bitrate_estimator =
+      group->GetRemoteBitrateEstimator();
+
+  if (!(vie_encoder->Init() &&
+        CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer,
+                            remote_bitrate_estimator, true))) {
+    delete vie_encoder;
+    vie_encoder = NULL;
+    ReturnChannelId(new_channel_id);
+    delete group;
+    return -1;
+  }
+
+  *channel_id = new_channel_id;
+  group->AddChannel(*channel_id);
+  channel_groups_.push_back(group);
+  return 0;
+}
+
+int ViEChannelManager::CreateChannel(int* channel_id,
+                                     int original_channel,
+                                     bool sender) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  ChannelGroup* channel_group = FindGroup(original_channel);
+  if (!channel_group) {
+    return -1;
+  }
+
+  int new_channel_id = FreeChannelId();
+  if (new_channel_id == -1) {
+    return -1;
+  }
+
+  BitrateController* bitrate_controller = channel_group->GetBitrateController();
+
+  RtcpBandwidthObserver* bandwidth_observer =
+      bitrate_controller->CreateRtcpBandwidthObserver();
+  RemoteBitrateEstimator* remote_bitrate_estimator =
+      channel_group->GetRemoteBitrateEstimator();
+
+  ViEEncoder* vie_encoder = NULL;
+  if (sender) {
+    // We need to create a new ViEEncoder.
+    vie_encoder = new ViEEncoder(engine_id_, new_channel_id, number_of_cores_,
+                                 *module_process_thread_,
+                                 bitrate_controller);
+    if (!(vie_encoder->Init() &&
+          CreateChannelObject(new_channel_id, vie_encoder,
+                              bandwidth_observer,
+                              remote_bitrate_estimator,
+                              sender))) {
+      delete vie_encoder;
+      vie_encoder = NULL;
+    }
+  } else {
+    vie_encoder = ViEEncoderPtr(original_channel);
+    assert(vie_encoder);
+    if (!CreateChannelObject(new_channel_id, vie_encoder, bandwidth_observer,
+                             remote_bitrate_estimator, sender)) {
+      vie_encoder = NULL;
+    }
+  }
+
+  if (!vie_encoder) {
+    ReturnChannelId(new_channel_id);
+    return -1;
+  }
+
+  *channel_id = new_channel_id;
+  channel_group->AddChannel(*channel_id);
+  return 0;
+}
+
+int ViEChannelManager::DeleteChannel(int channel_id) {
+  ViEChannel* vie_channel = NULL;
+  ViEEncoder* vie_encoder = NULL;
+  ChannelGroup* group = NULL;
+  {
+    // Write lock to make sure no one is using the channel.
+    ViEManagerWriteScoped wl(this);
+
+    // Protect the maps.
+    CriticalSectionScoped cs(*channel_id_critsect_);
+
+    ChannelMap::iterator c_it = channel_map_.find(channel_id);
+    if (c_it == channel_map_.end()) {
+      // No such channel.
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                   "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
+      return -1;
+    }
+    vie_channel = c_it->second;
+    channel_map_.erase(c_it);
+
+    ReturnChannelId(channel_id);
+
+    // Find the encoder object.
+    EncoderMap::iterator e_it = vie_encoder_map_.find(channel_id);
+    assert(e_it != vie_encoder_map_.end());
+    vie_encoder = e_it->second;
+
+    group = FindGroup(channel_id);
+    group->SetChannelRembStatus(channel_id, false, false, vie_channel,
+                                vie_encoder);
+    unsigned int ssrc = 0;
+    vie_channel->GetRemoteSSRC(&ssrc);
+    group->RemoveChannel(channel_id, ssrc);
+
+    // Check if other channels are using the same encoder.
+    if (ChannelUsingViEEncoder(channel_id)) {
+      vie_encoder = NULL;
+    } else {
+      // Delete later when we've released the critsect.
+    }
+
+    // We can't erase the item before we've checked for other channels using
+    // same ViEEncoder.
+    vie_encoder_map_.erase(e_it);
+
+    if (group->Empty()) {
+      channel_groups_.remove(group);
+    } else {
+      group = NULL;  // Prevent group from being deleted.
+    }
+  }
+  delete vie_channel;
+  // Leave the write critsect before deleting the objects.
+  // Deleting a channel can cause other objects, such as renderers, to be
+  // deleted, which might take time.
+  // If statment just to show that this object is not always deleted.
+  if (vie_encoder) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+                 "%s ViEEncoder deleted for channel %d", __FUNCTION__,
+                 channel_id);
+    delete vie_encoder;
+  }
+  // If statment just to show that this object is not always deleted.
+  if (group) {
+    // Delete the group if empty last since the encoder holds a pointer to the
+    // BitrateController object that the group owns.
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+                 "%s ChannelGroup deleted for channel %d", __FUNCTION__,
+                 channel_id);
+    delete group;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_),
+               "%s Channel %d deleted", __FUNCTION__, channel_id);
+  return 0;
+}
+
+int ViEChannelManager::SetVoiceEngine(VoiceEngine* voice_engine) {
+  // Write lock to make sure no one is using the channel.
+  ViEManagerWriteScoped wl(this);
+
+  CriticalSectionScoped cs(*channel_id_critsect_);
+
+  VoEVideoSync* sync_interface = NULL;
+  if (voice_engine) {
+    // Get new sync interface.
+    sync_interface = VoEVideoSync::GetInterface(voice_engine);
+    if (!sync_interface) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                   "%s Can't get audio sync interface from VoiceEngine.",
+                   __FUNCTION__);
+      return -1;
+    }
+  }
+
+  for (ChannelMap::iterator it = channel_map_.begin(); it != channel_map_.end();
+       ++it) {
+    it->second->SetVoiceChannel(-1, sync_interface);
+  }
+  if (voice_sync_interface_) {
+    voice_sync_interface_->Release();
+  }
+  voice_engine_ = voice_engine;
+  voice_sync_interface_ = sync_interface;
+  return 0;
+}
+
+int ViEChannelManager::ConnectVoiceChannel(int channel_id,
+                                           int audio_channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  if (!voice_sync_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "No VoE set");
+    return -1;
+  }
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  if (!channel) {
+    return -1;
+  }
+  return channel->SetVoiceChannel(audio_channel_id, voice_sync_interface_);
+}
+
+int ViEChannelManager::DisconnectVoiceChannel(int channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  if (channel) {
+    channel->SetVoiceChannel(-1, NULL);
+    return 0;
+  }
+  return -1;
+}
+
+VoiceEngine* ViEChannelManager::GetVoiceEngine() {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  return voice_engine_;
+}
+
+bool ViEChannelManager::SetRembStatus(int channel_id, bool sender,
+                                      bool receiver) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ChannelGroup* group = FindGroup(channel_id);
+  if (!group) {
+    return false;
+  }
+  ViEChannel* channel = ViEChannelPtr(channel_id);
+  assert(channel);
+  ViEEncoder* encoder = ViEEncoderPtr(channel_id);
+  assert(encoder);
+
+  return group->SetChannelRembStatus(channel_id, sender, receiver, channel,
+                                     encoder);
+}
+
+bool ViEChannelManager::CreateChannelObject(
+    int channel_id,
+    ViEEncoder* vie_encoder,
+    RtcpBandwidthObserver* bandwidth_observer,
+    RemoteBitrateEstimator* remote_bitrate_estimator,
+    bool sender) {
+  // Register the channel at the encoder.
+  RtpRtcp* send_rtp_rtcp_module = vie_encoder->SendRtpRtcpModule();
+
+  ViEChannel* vie_channel = new ViEChannel(channel_id, engine_id_,
+                                           number_of_cores_,
+                                           *module_process_thread_,
+                                           vie_encoder,
+                                           bandwidth_observer,
+                                           remote_bitrate_estimator,
+                                           send_rtp_rtcp_module,
+                                           sender);
+  if (vie_channel->Init() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s could not init channel", __FUNCTION__, channel_id);
+    delete vie_channel;
+    return false;
+  }
+  VideoCodec encoder;
+  if (vie_encoder->GetEncoder(&encoder) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "%s: Could not GetEncoder.", __FUNCTION__);
+    delete vie_channel;
+    return false;
+  }
+  if (sender && vie_channel->SetSendCodec(encoder) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, channel_id),
+                 "%s: Could not SetSendCodec.", __FUNCTION__);
+    delete vie_channel;
+    return false;
+  }
+  // Store the channel, add it to the channel group and save the vie_encoder.
+  channel_map_[channel_id] = vie_channel;
+  vie_encoder_map_[channel_id] = vie_encoder;
+  return true;
+}
+
+ViEChannel* ViEChannelManager::ViEChannelPtr(int channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  ChannelMap::const_iterator it = channel_map_.find(channel_id);
+  if (it == channel_map_.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+                 "%s Channel doesn't exist: %d", __FUNCTION__, channel_id);
+    return NULL;
+  }
+  return it->second;
+}
+
+ViEEncoder* ViEChannelManager::ViEEncoderPtr(int video_channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator it = vie_encoder_map_.find(video_channel_id);
+  if (it == vie_encoder_map_.end()) {
+    return NULL;
+  }
+  return it->second;
+}
+
+int ViEChannelManager::FreeChannelId() {
+  int idx = 0;
+  while (idx < free_channel_ids_size_) {
+    if (free_channel_ids_[idx] == true) {
+      // We've found a free id, allocate it and return.
+      free_channel_ids_[idx] = false;
+      return idx + kViEChannelIdBase;
+    }
+    idx++;
+  }
+  WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_),
+               "Max number of channels reached: %d", channel_map_.size());
+  return -1;
+}
+
+void ViEChannelManager::ReturnChannelId(int channel_id) {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  assert(channel_id < kViEMaxNumberOfChannels + kViEChannelIdBase &&
+         channel_id >= kViEChannelIdBase);
+  free_channel_ids_[channel_id - kViEChannelIdBase] = true;
+}
+
+ChannelGroup* ViEChannelManager::FindGroup(int channel_id) {
+  for (ChannelGroups::iterator it = channel_groups_.begin();
+       it != channel_groups_.end(); ++it) {
+    if ((*it)->HasChannel(channel_id)) {
+      return *it;
+    }
+  }
+  return NULL;
+}
+
+bool ViEChannelManager::ChannelUsingViEEncoder(int channel_id) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator orig_it = vie_encoder_map_.find(channel_id);
+  if (orig_it == vie_encoder_map_.end()) {
+    // No ViEEncoder for this channel.
+    return false;
+  }
+
+  // Loop through all other channels to see if anyone points at the same
+  // ViEEncoder.
+  for (EncoderMap::const_iterator comp_it = vie_encoder_map_.begin();
+       comp_it != vie_encoder_map_.end(); ++comp_it) {
+    // Make sure we're not comparing the same channel with itself.
+    if (comp_it->first != channel_id) {
+      if (comp_it->second == orig_it->second) {
+        return true;
+      }
+    }
+  }
+  return false;
+}
+
+void ViEChannelManager::ChannelsUsingViEEncoder(int channel_id,
+                                                ChannelList* channels) const {
+  CriticalSectionScoped cs(*channel_id_critsect_);
+  EncoderMap::const_iterator orig_it = vie_encoder_map_.find(channel_id);
+
+  for (ChannelMap::const_iterator c_it = channel_map_.begin();
+       c_it != channel_map_.end(); ++c_it) {
+    EncoderMap::const_iterator comp_it = vie_encoder_map_.find(c_it->first);
+    assert(comp_it != vie_encoder_map_.end());
+    if (comp_it->second == orig_it->second) {
+      channels->push_back(c_it->second);
+    }
+  }
+}
+
+ViEChannelManagerScoped::ViEChannelManagerScoped(
+    const ViEChannelManager& vie_channel_manager)
+    : ViEManagerScopedBase(vie_channel_manager) {
+}
+
+ViEChannel* ViEChannelManagerScoped::Channel(int vie_channel_id) const {
+  return static_cast<const ViEChannelManager*>(vie_manager_)->ViEChannelPtr(
+      vie_channel_id);
+}
+ViEEncoder* ViEChannelManagerScoped::Encoder(int vie_channel_id) const {
+  return static_cast<const ViEChannelManager*>(vie_manager_)->ViEEncoderPtr(
+      vie_channel_id);
+}
+
+bool ViEChannelManagerScoped::ChannelUsingViEEncoder(int channel_id) const {
+  return (static_cast<const ViEChannelManager*>(vie_manager_))->
+      ChannelUsingViEEncoder(channel_id);
+}
+
+void ViEChannelManagerScoped::ChannelsUsingViEEncoder(
+    int channel_id, ChannelList* channels) const {
+  (static_cast<const ViEChannelManager*>(vie_manager_))->
+      ChannelsUsingViEEncoder(channel_id, channels);
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_channel_manager.h b/src/video_engine/vie_channel_manager.h
new file mode 100644
index 0000000..6294ab1
--- /dev/null
+++ b/src/video_engine/vie_channel_manager.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
+
+#include <list>
+#include <map>
+
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/vie_channel_group.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_manager_base.h"
+#include "video_engine/vie_remb.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class MapWrapper;
+class ProcessThread;
+class ViEChannel;
+class ViEEncoder;
+class ViEPerformanceMonitor;
+class VoEVideoSync;
+class VoiceEngine;
+
+typedef std::list<ChannelGroup*> ChannelGroups;
+typedef std::list<ViEChannel*> ChannelList;
+typedef std::map<int, ViEChannel*> ChannelMap;
+typedef std::map<int, ViEEncoder*> EncoderMap;
+
+class ViEChannelManager: private ViEManagerBase {
+  friend class ViEChannelManagerScoped;
+ public:
+  ViEChannelManager(int engine_id,
+                    int number_of_cores,
+                    ViEPerformanceMonitor* vie_performance_monitor,
+                    const OverUseDetectorOptions& options);
+  ~ViEChannelManager();
+
+  void SetModuleProcessThread(ProcessThread* module_process_thread);
+
+  // Creates a new channel. 'channel_id' will be the id of the created channel.
+  int CreateChannel(int* channel_id);
+
+  // Creates a new channel grouped with |original_channel|. The new channel
+  // will get its own |ViEEncoder| if |sender| is set to true. It will be a
+  // receive only channel, without an own |ViEEncoder| if |sender| is false.
+  int CreateChannel(int* channel_id, int original_channel, bool sender);
+
+  // Deletes a channel.
+  int DeleteChannel(int channel_id);
+
+  // Set the voice engine instance to be used by all video channels.
+  int SetVoiceEngine(VoiceEngine* voice_engine);
+
+  // Enables lip sync of the channel.
+  int ConnectVoiceChannel(int channel_id, int audio_channel_id);
+
+  // Disables lip sync of the channel.
+  int DisconnectVoiceChannel(int channel_id);
+
+  VoiceEngine* GetVoiceEngine();
+
+  // Adds a channel to include when sending REMB.
+  bool SetRembStatus(int channel_id, bool sender, bool receiver);
+
+ private:
+  // Creates a channel object connected to |vie_encoder|. Assumed to be called
+  // protected.
+  bool CreateChannelObject(int channel_id, ViEEncoder* vie_encoder,
+                           RtcpBandwidthObserver* bandwidth_observer,
+                           RemoteBitrateEstimator* remote_bitrate_estimator,
+                           bool sender);
+
+  // Used by ViEChannelScoped, forcing a manager user to use scoped.
+  // Returns a pointer to the channel with id 'channel_id'.
+  ViEChannel* ViEChannelPtr(int channel_id) const;
+
+  // Methods used by ViECaptureScoped and ViEEncoderScoped.
+  // Gets the ViEEncoder used as input for video_channel_id
+  ViEEncoder* ViEEncoderPtr(int video_channel_id) const;
+
+  // Returns a free channel id, -1 if failing.
+  int FreeChannelId();
+
+  // Returns a previously allocated channel id.
+  void ReturnChannelId(int channel_id);
+
+  // Returns the iterator to the ChannelGroup containing |channel_id|.
+  ChannelGroup* FindGroup(int channel_id);
+
+  // Returns true if at least one other channels uses the same ViEEncoder as
+  // channel_id.
+  bool ChannelUsingViEEncoder(int channel_id) const;
+  void ChannelsUsingViEEncoder(int channel_id, ChannelList* channels) const;
+
+  // Protects channel_map_ and free_channel_ids_.
+  CriticalSectionWrapper* channel_id_critsect_;
+  int engine_id_;
+  int number_of_cores_;
+
+  // TODO(mflodman) Make part of channel group.
+  ChannelMap channel_map_;
+  bool* free_channel_ids_;
+  int free_channel_ids_size_;
+
+  // List with all channel groups.
+  std::list<ChannelGroup*> channel_groups_;
+
+  // TODO(mflodman) Make part of channel group.
+  // Maps Channel id -> ViEEncoder.
+  EncoderMap vie_encoder_map_;
+  VoEVideoSync* voice_sync_interface_;
+
+  VoiceEngine* voice_engine_;
+  ProcessThread* module_process_thread_;
+  const OverUseDetectorOptions& over_use_detector_options_;
+};
+
+class ViEChannelManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViEChannelManagerScoped(
+      const ViEChannelManager& vie_channel_manager);
+  ViEChannel* Channel(int vie_channel_id) const;
+  ViEEncoder* Encoder(int vie_channel_id) const;
+
+  // Returns true if at least one other channels uses the same ViEEncoder as
+  // channel_id.
+  bool ChannelUsingViEEncoder(int channel_id) const;
+
+  // Returns a list with pointers to all channels using the same encoder as the
+  // channel with |channel_id|, including the one with the specified id.
+  void ChannelsUsingViEEncoder(int channel_id, ChannelList* channels) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CHANNEL_MANAGER_H_
diff --git a/src/video_engine/vie_codec_impl.cc b/src/video_engine/vie_codec_impl.cc
new file mode 100644
index 0000000..dd4d37d
--- /dev/null
+++ b/src/video_engine/vie_codec_impl.cc
@@ -0,0 +1,721 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_codec_impl.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViECodec* ViECodec::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViECodecImpl* vie_codec_impl = vie_impl;
+  // Increase ref count.
+  (*vie_codec_impl)++;
+  return vie_codec_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViECodecImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViECodec released too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViECodec reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViECodecImpl::ViECodecImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::ViECodecImpl() Ctor");
+}
+
+ViECodecImpl::~ViECodecImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViECodecImpl::~ViECodecImpl() Dtor");
+}
+
+int ViECodecImpl::NumberOfCodecs() const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+  // +2 because of FEC(RED and ULPFEC)
+  return static_cast<int>((VideoCodingModule::NumberOfCodecs() + 2));
+}
+
+int ViECodecImpl::GetCodec(const unsigned char list_number,
+                           VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(list_number: %d, codec_type: %d)", __FUNCTION__,
+               list_number, video_codec.codecType);
+  if (list_number == VideoCodingModule::NumberOfCodecs()) {
+    memset(&video_codec, 0, sizeof(VideoCodec));
+    strncpy(video_codec.plName, "red", 3);
+    video_codec.codecType = kVideoCodecRED;
+    video_codec.plType = VCM_RED_PAYLOAD_TYPE;
+  } else if (list_number == VideoCodingModule::NumberOfCodecs() + 1) {
+    memset(&video_codec, 0, sizeof(VideoCodec));
+    strncpy(video_codec.plName, "ulpfec", 6);
+    video_codec.codecType = kVideoCodecULPFEC;
+    video_codec.plType = VCM_ULPFEC_PAYLOAD_TYPE;
+  } else if (VideoCodingModule::Codec(list_number, &video_codec) != VCM_OK) {
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Could not get codec for list_number: %u", __FUNCTION__,
+                 list_number);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetSendCodec(const int video_channel,
+                               const VideoCodec& video_codec) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d"
+               "maxBr: %d, min_br: %d, frame_rate: %d, qpMax: %u,"
+               "numberOfSimulcastStreams: %u )", __FUNCTION__,
+               video_codec.codecType, video_codec.plType, video_codec.width,
+               video_codec.height, video_codec.startBitrate,
+               video_codec.maxBitrate, video_codec.minBitrate,
+               video_codec.maxFramerate, video_codec.qpMax,
+               video_codec.numberOfSimulcastStreams);
+  if (video_codec.codecType == kVideoCodecVP8) {
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "pictureLossIndicationOn: %d, feedbackModeOn: %d, "
+                 "complexity: %d, resilience: %d, numberOfTemporalLayers: %u",
+                 video_codec.codecSpecific.VP8.pictureLossIndicationOn,
+                 video_codec.codecSpecific.VP8.feedbackModeOn,
+                 video_codec.codecSpecific.VP8.complexity,
+                 video_codec.codecSpecific.VP8.resilience,
+                 video_codec.codecSpecific.VP8.numberOfTemporalLayers);
+  }
+  if (!CodecValid(video_codec)) {
+    // Error logged.
+    shared_data_->SetLastError(kViECodecInvalidCodec);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  assert(vie_encoder);
+  if (vie_encoder->Owner() != video_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Receive only channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecReceiveOnlyChannel);
+    return -1;
+  }
+
+  // Set a max_bitrate if the user hasn't set one.
+  VideoCodec video_codec_internal;
+  memcpy(&video_codec_internal, &video_codec, sizeof(VideoCodec));
+  if (video_codec_internal.maxBitrate == 0) {
+    // Max is one bit per pixel.
+    video_codec_internal.maxBitrate = (video_codec_internal.width *
+                                       video_codec_internal.height *
+                                       video_codec_internal.maxFramerate)
+                                       / 1000;
+    if (video_codec_internal.startBitrate > video_codec_internal.maxBitrate) {
+      // Don't limit the set start bitrate.
+      video_codec_internal.maxBitrate = video_codec_internal.startBitrate;
+    }
+    WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: New max bitrate set to %d kbps", __FUNCTION__,
+                 video_codec_internal.maxBitrate);
+  }
+
+  VideoCodec encoder;
+  vie_encoder->GetEncoder(&encoder);
+
+  // Make sure to generate a new SSRC if the codec type and/or resolution has
+  // changed. This won't have any effect if the user has set an SSRC.
+  bool new_rtp_stream = false;
+  if (encoder.codecType != video_codec_internal.codecType) {
+    new_rtp_stream = true;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = NULL;
+
+  // Stop the media flow while reconfiguring.
+  vie_encoder->Pause();
+
+  // Check if we have a frame provider that is a camera and can provide this
+  // codec for us.
+  bool use_capture_device_as_encoder = false;
+  frame_provider = is.FrameProvider(vie_encoder);
+  if (frame_provider) {
+    if (frame_provider->Id() >= kViECaptureIdBase &&
+        frame_provider->Id() <= kViECaptureIdMax) {
+      ViECapturer* vie_capture = static_cast<ViECapturer*>(frame_provider);
+      // Try to get preencoded. Nothing to do if it is not supported.
+      if (vie_capture && vie_capture->PreEncodeToViEEncoder(
+          video_codec_internal,
+          *vie_encoder,
+          video_channel) == 0) {
+        use_capture_device_as_encoder = true;
+      }
+    }
+  }
+
+  // Update the encoder settings if we are not using a capture device capable
+  // of this codec.
+  if (!use_capture_device_as_encoder &&
+      vie_encoder->SetEncoder(video_codec_internal) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not change encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+
+  // Give the channel(s) the new information.
+  ChannelList channels;
+  cs.ChannelsUsingViEEncoder(video_channel, &channels);
+  for (ChannelList::iterator it = channels.begin(); it != channels.end();
+       ++it) {
+    bool ret = true;
+    if ((*it)->SetSendCodec(video_codec_internal, new_rtp_stream) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Could not set send codec for channel %d", __FUNCTION__,
+                   video_channel);
+      ret = false;
+    }
+    if (!ret) {
+      shared_data_->SetLastError(kViECodecUnknownError);
+      return -1;
+    }
+  }
+
+  // Update the protection mode, we might be switching NACK/FEC.
+  vie_encoder->UpdateProtectionMethod();
+
+  // Get new best format for frame provider.
+  if (frame_provider) {
+    frame_provider->FrameCallbackChanged();
+  }
+  // Restart the media flow
+  if (new_rtp_stream) {
+    // Stream settings changed, make sure we get a key frame.
+    vie_encoder->SendKeyFrame();
+  }
+  vie_encoder->Restart();
+  return 0;
+}
+
+int ViECodecImpl::GetSendCodec(const int video_channel,
+                               VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->GetEncoder(&video_codec);
+}
+
+int ViECodecImpl::SetReceiveCodec(const int video_channel,
+                                  const VideoCodec& video_codec) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: codec: %d, pl_type: %d, width: %d, height: %d, bitrate: %d,"
+               "maxBr: %d, min_br: %d, frame_rate: %d", __FUNCTION__,
+               video_codec.codecType, video_codec.plType, video_codec.width,
+               video_codec.height, video_codec.startBitrate,
+               video_codec.maxBitrate, video_codec.minBitrate,
+               video_codec.maxFramerate);
+
+  if (CodecValid(video_codec) == false) {
+    // Error logged.
+    shared_data_->SetLastError(kViECodecInvalidCodec);
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->SetReceiveCodec(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not set receive codec for channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetReceiveCodec(const int video_channel,
+                                  VideoCodec& video_codec) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel, video_codec.codecType);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->GetReceiveCodec(&video_codec) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetCodecConfigParameters(
+  const int video_channel,
+  unsigned char config_parameters[kConfigParameterSize],
+  unsigned char& config_parameters_size) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->GetCodecConfigParameters(config_parameters,
+                                            config_parameters_size) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetImageScaleStatus(const int video_channel,
+                                      const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->ScaleInputImage(enable) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetSendCodecStastistics(const int video_channel,
+                                          unsigned int& key_frames,
+                                          unsigned int& delta_frames) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No send codec for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->SendCodecStatistics(&key_frames, &delta_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetReceiveCodecStastistics(const int video_channel,
+                                             unsigned int& key_frames,
+                                             unsigned int& delta_frames) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->ReceiveCodecStatistics(&key_frames, &delta_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::GetCodecTargetBitrate(const int video_channel,
+                                        unsigned int* bitrate) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No send codec for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->CodecTargetBitrate(static_cast<WebRtc_UWord32*>(bitrate));
+}
+
+unsigned int ViECodecImpl::GetDiscardedPackets(const int video_channel) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, codec_type: %d)", __FUNCTION__,
+               video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->DiscardedPackets();
+}
+
+int ViECodecImpl::SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                                   const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableKeyFrameRequestCallback(enable) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SetSignalKeyPacketLossStatus(const int video_channel,
+                                               const bool enable,
+                                               const bool only_key_frames) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d, enable: %d, only_key_frames: %d)",
+               __FUNCTION__, video_channel, enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSignalPacketLossStatus(enable, only_key_frames) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::RegisterEncoderObserver(const int video_channel,
+                                          ViEEncoderObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterCodecObserver(&observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not register codec observer at channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::DeregisterEncoderObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No encoder for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterCodecObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViECodecObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::RegisterDecoderObserver(const int video_channel,
+                                          ViEDecoderObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterCodecObserver(&observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not register codec observer at channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViECodecObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::DeregisterDecoderObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id()), "%s",
+               __FUNCTION__);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterCodecObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViECodecObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::SendKeyFrame(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->SendKeyFrame() != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViECodecImpl::WaitForFirstKeyFrame(const int video_channel,
+                                       const bool wait) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d, wait: %d)", __FUNCTION__, video_channel,
+               wait);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->WaitForKeyFrame(wait) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+bool ViECodecImpl::CodecValid(const VideoCodec& video_codec) {
+  // Check pl_name matches codec_type.
+  if (video_codec.codecType == kVideoCodecRED) {
+#if defined(WIN32)
+    if (_strnicmp(video_codec.plName, "red", 3) == 0) {
+#else
+    if (strncasecmp(video_codec.plName, "red", 3) == 0) {
+#endif
+      // We only care about the type and name for red.
+      return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  } else if (video_codec.codecType == kVideoCodecULPFEC) {
+#if defined(WIN32)
+    if (_strnicmp(video_codec.plName, "ULPFEC", 6) == 0) {
+#else
+    if (strncasecmp(video_codec.plName, "ULPFEC", 6) == 0) {
+#endif
+      // We only care about the type and name for ULPFEC.
+      return true;
+    }
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  } else if ((video_codec.codecType == kVideoCodecVP8 &&
+                  strncmp(video_codec.plName, "VP8", 4) == 0) ||
+              (video_codec.codecType == kVideoCodecI420 &&
+                  strncmp(video_codec.plName, "I420", 4) == 0)) {
+    // OK.
+  } else {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Codec type doesn't match pl_name", video_codec.plType);
+    return false;
+  }
+
+  if (video_codec.plType == 0 && video_codec.plType > 127) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1,
+                 "Invalid codec payload type: %d", video_codec.plType);
+    return false;
+  }
+
+  if (video_codec.width > kViEMaxCodecWidth ||
+      video_codec.height > kViEMaxCodecHeight) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid codec size: %u x %u",
+                 video_codec.width, video_codec.height);
+    return false;
+  }
+
+  if (video_codec.startBitrate < kViEMinCodecBitrate) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid start_bitrate: %u",
+                 video_codec.startBitrate);
+    return false;
+  }
+  if (video_codec.minBitrate < kViEMinCodecBitrate) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, -1, "Invalid min_bitrate: %u",
+                 video_codec.minBitrate);
+    return false;
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_codec_impl.h b/src/video_engine/vie_codec_impl.h
new file mode 100644
index 0000000..2d34540
--- /dev/null
+++ b/src/video_engine/vie_codec_impl.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
+
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViECodecImpl
+    : public ViECodec,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Implements ViECodec.
+  virtual int NumberOfCodecs() const;
+  virtual int GetCodec(const unsigned char list_number,
+                       VideoCodec& video_codec) const;
+  virtual int SetSendCodec(const int video_channel,
+                           const VideoCodec& video_codec);
+  virtual int GetSendCodec(const int video_channel,
+                           VideoCodec& video_codec) const;
+  virtual int SetReceiveCodec(const int video_channel,
+                              const VideoCodec& video_codec);
+  virtual int GetReceiveCodec(const int video_channel,
+                              VideoCodec& video_codec) const;
+  virtual int GetCodecConfigParameters(
+    const int video_channel,
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size) const;
+  virtual int SetImageScaleStatus(const int video_channel, const bool enable);
+  virtual int GetSendCodecStastistics(const int video_channel,
+                                      unsigned int& key_frames,
+                                      unsigned int& delta_frames) const;
+  virtual int GetReceiveCodecStastistics(const int video_channel,
+                                         unsigned int& key_frames,
+                                         unsigned int& delta_frames) const;
+  virtual int GetCodecTargetBitrate(const int video_channel,
+                                    unsigned int* bitrate) const;
+  virtual unsigned int GetDiscardedPackets(const int video_channel) const;
+  virtual int SetKeyFrameRequestCallbackStatus(const int video_channel,
+                                               const bool enable);
+  virtual int SetSignalKeyPacketLossStatus(const int video_channel,
+                                           const bool enable,
+                                           const bool only_key_frames = false);
+  virtual int RegisterEncoderObserver(const int video_channel,
+                                      ViEEncoderObserver& observer);
+  virtual int DeregisterEncoderObserver(const int video_channel);
+  virtual int RegisterDecoderObserver(const int video_channel,
+                                      ViEDecoderObserver& observer);
+  virtual int DeregisterDecoderObserver(const int video_channel);
+  virtual int SendKeyFrame(const int video_channel);
+  virtual int WaitForFirstKeyFrame(const int video_channel, const bool wait);
+
+ protected:
+  explicit ViECodecImpl(ViESharedData* shared_data);
+  virtual ~ViECodecImpl();
+
+ private:
+  bool CodecValid(const VideoCodec& video_codec);
+
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_CODEC_IMPL_H_
diff --git a/src/video_engine/vie_defines.h b/src/video_engine/vie_defines.h
new file mode 100644
index 0000000..2cc18cc
--- /dev/null
+++ b/src/video_engine/vie_defines.h
@@ -0,0 +1,221 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
+
+#include "engine_configurations.h"  // NOLINT
+
+// TODO(mflodman) Remove.
+#ifdef WEBRTC_ANDROID
+#include <arpa/inet.h>  // NOLINT
+#include <linux/net.h>  // NOLINT
+#include <netinet/in.h>  // NOLINT
+#include <pthread.h>  // NOLINT
+#include <stdio.h>  // NOLINT
+#include <stdlib.h>  // NOLINT
+#include <string.h>  // NOLINT
+#include <sys/types.h>  // NOLINT
+#include <sys/socket.h>  // NOLINT
+#include <sys/time.h>  // NOLINT
+#include <time.h>  // NOLINT
+#endif
+
+namespace webrtc {
+
+// General
+enum { kViEMinKeyRequestIntervalMs = 300 };
+
+// ViEBase
+enum { kViEMaxNumberOfChannels = 32 };
+enum { kViEVersionMaxMessageSize = 1024 };
+enum { kViEMaxModuleVersionSize = 960 };
+
+// ViECapture
+enum { kViEMaxCaptureDevices = 10 };
+enum { kViECaptureDefaultWidth = 352 };
+enum { kViECaptureDefaultHeight = 288 };
+enum { kViECaptureDefaultFramerate = 30 };
+enum { kViECaptureMaxSnapshotWaitTimeMs = 500 };
+
+// ViECodec
+enum { kViEMaxCodecWidth = 4048 };
+enum { kViEMaxCodecHeight = 3040 };
+enum { kViEMaxCodecFramerate = 60 };
+enum { kViEMinCodecBitrate = 30 };
+
+// ViEEncryption
+enum { kViEMaxSrtpKeyLength = 30 };
+enum { kViEMinSrtpEncryptLength = 16 };
+enum { kViEMaxSrtpEncryptLength = 256 };
+enum { kViEMaxSrtpAuthSh1Length = 20 };
+enum { kViEMaxSrtpTagAuthNullLength = 12 };
+enum { kViEMaxSrtpKeyAuthNullLength = 256 };
+
+// ViEFile
+enum { kViEMaxFilePlayers = 3 };
+
+// ViENetwork
+enum { kViEMaxMtu = 1500 };
+enum { kViESocketThreads = 1 };
+enum { kViENumReceiveSocketBuffers = 500 };
+
+// ViERender
+// Max valid time set in SetRenderTimeoutImage
+enum { kViEMaxRenderTimeoutTimeMs  = 10000 };
+// Min valid time set in SetRenderTimeoutImage
+enum { kViEMinRenderTimeoutTimeMs = 33 };
+enum { kViEDefaultRenderDelayMs = 10 };
+
+// ViERTP_RTCP
+enum { kNackHistorySize = 400 };
+
+// Id definitions
+enum {
+  kViEChannelIdBase = 0x0,
+  kViEChannelIdMax = 0xFF,
+  kViECaptureIdBase = 0x1001,
+  kViECaptureIdMax = 0x10FF,
+  kViEFileIdBase = 0x2000,
+  kViEFileIdMax = 0x200F,
+  kViEDummyChannelId = 0xFFFF
+};
+
+// Module id
+// Create a unique id based on the ViE instance id and the
+// channel id. ViE id > 0 and 0 <= channel id <= 255
+
+inline int ViEId(const int vieId, const int channelId = -1) {
+  if (channelId == -1) {
+    return static_cast<int>((vieId << 16) + kViEDummyChannelId);
+  }
+  return static_cast<int>((vieId << 16) + channelId);
+}
+
+inline int ViEModuleId(const int vieId, const int channelId = -1) {
+  if (channelId == -1) {
+    return static_cast<int>((vieId << 16) + kViEDummyChannelId);
+  }
+  return static_cast<int>((vieId << 16) + channelId);
+}
+
+inline int ChannelId(const int moduleId) {
+  return static_cast<int>(moduleId & 0xffff);
+}
+
+// Windows specific.
+#if defined(_WIN32)
+  //  Build information macros
+  #if defined(_DEBUG)
+  #define BUILDMODE TEXT("d")
+  #elif defined(DEBUG)
+  #define BUILDMODE TEXT("d")
+  #elif defined(NDEBUG)
+  #define BUILDMODE TEXT("r")
+  #else
+  #define BUILDMODE TEXT("?")
+  #endif
+
+  #define BUILDTIME TEXT(__TIME__)
+  #define BUILDDATE TEXT(__DATE__)
+
+  // Example: "Oct 10 2002 12:05:30 r".
+  #define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
+  #define RENDER_MODULE_TYPE kRenderWindows
+
+  // Warning pragmas.
+  // new behavior: elements of array 'XXX' will be default initialized.
+  #pragma warning(disable: 4351)
+  // 'this' : used in base member initializer list.
+  #pragma warning(disable: 4355)
+  // Frame pointer register 'ebp' modified by inline assembly code.
+  #pragma warning(disable: 4731)
+
+  // Include libraries.
+  #pragma comment(lib, "winmm.lib")
+
+  #ifndef WEBRTC_EXTERNAL_TRANSPORT
+  #pragma comment(lib, "ws2_32.lib")
+  #pragma comment(lib, "Iphlpapi.lib")   // _GetAdaptersAddresses
+  #endif
+#endif
+
+// Mac specific.
+#ifdef WEBRTC_MAC_INTEL
+  #define SLEEP(x) usleep(x * 1000)
+
+  //  Build information macros.
+  #define TEXT(x) x
+  #if defined(_DEBUG)
+  #define BUILDMODE TEXT("d")
+  #elif defined(DEBUG)
+  #define BUILDMODE TEXT("d")
+  #elif defined(NDEBUG)
+  #define BUILDMODE TEXT("r")
+  #else
+  #define BUILDMODE TEXT("?")
+  #endif
+
+  #define BUILDTIME TEXT(__TIME__)
+  #define BUILDDATE TEXT(__DATE__)
+
+  // Example: "Oct 10 2002 12:05:30 r".
+  #define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
+  #define RENDER_MODULE_TYPE kRenderWindows
+#endif
+
+// Linux specific.
+#ifndef WEBRTC_ANDROID
+#ifdef WEBRTC_LINUX
+  //  Build information macros.
+  #if defined(_DEBUG)
+  #define BUILDMODE "d"
+  #elif defined(DEBUG)
+  #define BUILDMODE "d"
+  #elif defined(NDEBUG)
+  #define BUILDMODE "r"
+  #else
+  #define BUILDMODE "?"
+  #endif
+
+  #define BUILDTIME __TIME__
+  #define BUILDDATE __DATE__
+
+  // Example: "Oct 10 2002 12:05:30 r".
+  #define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+#endif  // WEBRTC_LINUX
+#endif  // WEBRTC_ANDROID
+
+// Android specific.
+#ifdef WEBRTC_ANDROID
+  #define FAR
+  #define __cdecl
+
+  #if defined(_DEBUG)
+  #define BUILDMODE "d"
+  #elif defined(DEBUG)
+  #define BUILDMODE "d"
+  #elif defined(NDEBUG)
+  #define BUILDMODE "r"
+  #else
+  #define BUILDMODE "?"
+  #endif
+
+  #define BUILDTIME __TIME__
+  #define BUILDDATE __DATE__
+
+  // Example: "Oct 10 2002 12:05:30 r".
+  #define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+#endif  // WEBRTC_ANDROID
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_DEFINES_H_
diff --git a/src/video_engine/vie_encoder.cc b/src/video_engine/vie_encoder.cc
new file mode 100644
index 0000000..e0b0e39
--- /dev/null
+++ b/src/video_engine/vie_encoder.cc
@@ -0,0 +1,893 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_encoder.h"
+
+#include <cassert>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/utility/interface/process_thread.h"
+#include "modules/video_coding/codecs/interface/video_codec_interface.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_codec.h"
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class QMVideoSettingsCallback : public VCMQMSettingsCallback {
+ public:
+  explicit QMVideoSettingsCallback(VideoProcessingModule* vpm);
+  ~QMVideoSettingsCallback();
+
+  // Update VPM with QM (quality modes: frame size & frame rate) settings.
+  WebRtc_Word32 SetVideoQMSettings(const WebRtc_UWord32 frame_rate,
+                                   const WebRtc_UWord32 width,
+                                   const WebRtc_UWord32 height);
+
+ private:
+  VideoProcessingModule* vpm_;
+};
+
+class ViEBitrateObserver : public BitrateObserver {
+ public:
+  explicit ViEBitrateObserver(ViEEncoder* owner)
+      : owner_(owner) {
+  }
+  // Implements BitrateObserver.
+  virtual void OnNetworkChanged(const uint32_t bitrate_bps,
+                                const uint8_t fraction_lost,
+                                const uint32_t rtt) {
+    owner_->OnNetworkChanged(bitrate_bps, fraction_lost, rtt);
+  }
+ private:
+  ViEEncoder* owner_;
+};
+
+ViEEncoder::ViEEncoder(WebRtc_Word32 engine_id,
+                       WebRtc_Word32 channel_id,
+                       WebRtc_UWord32 number_of_cores,
+                       ProcessThread& module_process_thread,
+                       BitrateController* bitrate_controller)
+  : engine_id_(engine_id),
+    channel_id_(channel_id),
+    number_of_cores_(number_of_cores),
+    vcm_(*webrtc::VideoCodingModule::Create(ViEModuleId(engine_id,
+                                                        channel_id))),
+    vpm_(*webrtc::VideoProcessingModule::Create(ViEModuleId(engine_id,
+                                                            channel_id))),
+    default_rtp_rtcp_(NULL),
+    callback_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+    data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+    bitrate_controller_(bitrate_controller),
+    paused_(false),
+    time_last_intra_request_ms_(0),
+    channels_dropping_delta_frames_(0),
+    drop_next_frame_(false),
+    fec_enabled_(false),
+    nack_enabled_(false),
+    codec_observer_(NULL),
+    effect_filter_(NULL),
+    module_process_thread_(module_process_thread),
+    has_received_sli_(false),
+    picture_id_sli_(0),
+    has_received_rpsi_(false),
+    picture_id_rpsi_(0),
+    file_recorder_(channel_id),
+    qm_callback_(NULL) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
+               ViEId(engine_id, channel_id),
+               "%s(engine_id: %d) 0x%p - Constructor", __FUNCTION__, engine_id,
+               this);
+
+  RtpRtcp::Configuration configuration;
+  configuration.id = ViEModuleId(engine_id_, channel_id_);
+  configuration.audio = false;  // Video.
+
+  default_rtp_rtcp_.reset(RtpRtcp::CreateRtpRtcp(configuration));
+  bitrate_observer_.reset(new ViEBitrateObserver(this));
+}
+
+bool ViEEncoder::Init() {
+  if (vcm_.InitializeSender() != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s InitializeSender failure", __FUNCTION__);
+    return false;
+  }
+  vpm_.EnableTemporalDecimation(true);
+
+  // Enable/disable content analysis: off by default for now.
+  vpm_.EnableContentAnalysis(false);
+
+  if (module_process_thread_.RegisterModule(&vcm_) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s RegisterModule failure", __FUNCTION__);
+    return false;
+  }
+  if (module_process_thread_.RegisterModule(default_rtp_rtcp_.get()) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s RegisterModule failure", __FUNCTION__);
+    return false;
+  }
+
+  if (qm_callback_) {
+    delete qm_callback_;
+  }
+  qm_callback_ = new QMVideoSettingsCallback(&vpm_);
+
+#ifdef VIDEOCODEC_VP8
+  VideoCodec video_codec;
+  if (vcm_.Codec(webrtc::kVideoCodecVP8, &video_codec) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s Codec failure", __FUNCTION__);
+    return false;
+  }
+  if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                             default_rtp_rtcp_->MaxDataPayloadLength()) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s RegisterSendCodec failure", __FUNCTION__);
+    return false;
+  }
+  if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s RegisterSendPayload failure", __FUNCTION__);
+    return false;
+  }
+  if (default_rtp_rtcp_->RegisterSendRtpHeaderExtension(
+      kRtpExtensionTransmissionTimeOffset, 1) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s RegisterSendRtpHeaderExtension failure", __FUNCTION__);
+    return false;
+  }
+#else
+  VideoCodec video_codec;
+  if (vcm_.Codec(webrtc::kVideoCodecI420, &video_codec) == VCM_OK) {
+    vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                           default_rtp_rtcp_->MaxDataPayloadLength());
+    default_rtp_rtcp_->RegisterSendPayload(video_codec);
+  } else {
+    return false;
+  }
+#endif
+
+  if (vcm_.RegisterTransportCallback(this) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViEEncoder: VCM::RegisterTransportCallback failure");
+    return false;
+  }
+  if (vcm_.RegisterSendStatisticsCallback(this) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "ViEEncoder: VCM::RegisterSendStatisticsCallback failure");
+    return false;
+  }
+  if (vcm_.RegisterVideoQMCallback(qm_callback_) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "VCM::RegisterQMCallback failure");
+    return false;
+  }
+  return true;
+}
+
+ViEEncoder::~ViEEncoder() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "ViEEncoder Destructor 0x%p, engine_id: %d", this, engine_id_);
+  module_process_thread_.DeRegisterModule(&vcm_);
+  module_process_thread_.DeRegisterModule(&vpm_);
+  module_process_thread_.DeRegisterModule(default_rtp_rtcp_.get());
+  delete &vcm_;
+  delete &vpm_;
+  delete qm_callback_;
+}
+
+int ViEEncoder::Owner() const {
+  return channel_id_;
+}
+
+void ViEEncoder::Pause() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  CriticalSectionScoped cs(data_cs_.get());
+  paused_ = true;
+}
+
+void ViEEncoder::Restart() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s", __FUNCTION__);
+  CriticalSectionScoped cs(data_cs_.get());
+  paused_ = false;
+}
+
+WebRtc_Word32 ViEEncoder::DropDeltaAfterKey(bool enable) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(%d)", __FUNCTION__, enable);
+  CriticalSectionScoped cs(data_cs_.get());
+
+  if (enable) {
+    channels_dropping_delta_frames_++;
+  } else {
+    channels_dropping_delta_frames_--;
+    if (channels_dropping_delta_frames_ < 0) {
+      channels_dropping_delta_frames_ = 0;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Called too many times", __FUNCTION__);
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_UWord8 ViEEncoder::NumberOfCodecs() {
+  return vcm_.NumberOfCodecs();
+}
+
+WebRtc_Word32 ViEEncoder::GetCodec(WebRtc_UWord8 list_index,
+                                   VideoCodec* video_codec) {
+  if (vcm_.Codec(list_index, video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: Could not get codec",
+                 __FUNCTION__);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::RegisterExternalEncoder(webrtc::VideoEncoder* encoder,
+                                                  WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: pltype %u", __FUNCTION__,
+               pl_type);
+
+  if (encoder == NULL)
+    return -1;
+
+  if (vcm_.RegisterExternalEncoder(encoder, pl_type) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not register external encoder");
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::DeRegisterExternalEncoder(WebRtc_UWord8 pl_type) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: pltype %u", __FUNCTION__, pl_type);
+
+  webrtc::VideoCodec current_send_codec;
+  if (vcm_.SendCodec(&current_send_codec) == VCM_OK) {
+    if (vcm_.Bitrate(&current_send_codec.startBitrate) != 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Failed to get the current encoder target bitrate.");
+    }
+  }
+
+  if (vcm_.RegisterExternalEncoder(NULL, pl_type) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not deregister external encoder");
+    return -1;
+  }
+
+  // If the external encoder is the current send codeci, use vcm internal
+  // encoder.
+  if (current_send_codec.plType == pl_type) {
+    WebRtc_UWord16 max_data_payload_length =
+        default_rtp_rtcp_->MaxDataPayloadLength();
+    if (vcm_.RegisterSendCodec(&current_send_codec, number_of_cores_,
+                               max_data_payload_length) != VCM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Could not use internal encoder");
+      return -1;
+    }
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SetEncoder(const webrtc::VideoCodec& video_codec) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s: CodecType: %d, width: %u, height: %u", __FUNCTION__,
+               video_codec.codecType, video_codec.width, video_codec.height);
+
+  // Setting target width and height for VPM.
+  if (vpm_.SetTargetResolution(video_codec.width, video_codec.height,
+                               video_codec.maxFramerate) != VPM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not set VPM target dimensions");
+    return -1;
+  }
+
+  if (default_rtp_rtcp_->RegisterSendPayload(video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could register RTP module video payload");
+    return -1;
+  }
+  // Convert from kbps to bps.
+  default_rtp_rtcp_->SetTargetSendBitrate(video_codec.startBitrate * 1000);
+
+  WebRtc_UWord16 max_data_payload_length =
+      default_rtp_rtcp_->MaxDataPayloadLength();
+
+  if (vcm_.RegisterSendCodec(&video_codec, number_of_cores_,
+                             max_data_payload_length) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not register send codec");
+    return -1;
+  }
+
+  // Set this module as sending right away, let the slave module in the channel
+  // start and stop sending.
+  if (default_rtp_rtcp_->Sending() == false) {
+    if (default_rtp_rtcp_->SetSendingStatus(true) != 0) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "Could start RTP module sending");
+      return -1;
+    }
+  }
+  bitrate_controller_->SetBitrateObserver(bitrate_observer_.get(),
+                                          video_codec.startBitrate * 1000,
+                                          video_codec.minBitrate * 1000,
+                                          video_codec.maxBitrate * 1000);
+
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::GetEncoder(VideoCodec* video_codec) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  if (vcm_.SendCodec(video_codec) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get VCM send codec");
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::GetCodecConfigParameters(
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  WebRtc_Word32 num_parameters =
+      vcm_.CodecConfigParameters(config_parameters, kConfigParameterSize);
+  if (num_parameters <= 0) {
+    config_parameters_size = 0;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get config parameters");
+    return -1;
+  }
+  config_parameters_size = static_cast<unsigned char>(num_parameters);
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::ScaleInputImage(bool enable) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s(enable %d)", __FUNCTION__,
+               enable);
+
+  VideoFrameResampling resampling_mode = kFastRescaling;
+  if (enable == true) {
+    // kInterpolation is currently not supported.
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s not supported",
+                 __FUNCTION__, enable);
+    return -1;
+  }
+  vpm_.SetInputFrameResampleMode(resampling_mode);
+
+  return 0;
+}
+
+RtpRtcp* ViEEncoder::SendRtpRtcpModule() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  return default_rtp_rtcp_.get();
+}
+
+void ViEEncoder::DeliverFrame(int id,
+                              VideoFrame* video_frame,
+                              int num_csrcs,
+                              const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: %llu", __FUNCTION__,
+               video_frame->TimeStamp());
+
+  {
+    CriticalSectionScoped cs(data_cs_.get());
+    if (paused_ || default_rtp_rtcp_->SendingMedia() == false) {
+      // We've paused or we have no channels attached, don't encode.
+      return;
+    }
+    if (drop_next_frame_) {
+      // Drop this frame.
+      WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Dropping frame %llu after a key fame", __FUNCTION__,
+                   video_frame->TimeStamp());
+      drop_next_frame_ = false;
+      return;
+    }
+  }
+
+  // Convert render time, in ms, to RTP timestamp.
+  const int kMsToRtpTimestamp = 90;
+  const WebRtc_UWord32 time_stamp =
+      kMsToRtpTimestamp *
+      static_cast<WebRtc_UWord32>(video_frame->RenderTimeMs());
+  video_frame->SetTimeStamp(time_stamp);
+  {
+    CriticalSectionScoped cs(callback_cs_.get());
+    if (effect_filter_) {
+      effect_filter_->Transform(video_frame->Length(), video_frame->Buffer(),
+                                video_frame->TimeStamp(),
+                                video_frame->Width(), video_frame->Height());
+    }
+  }
+  // Record raw frame.
+  file_recorder_.RecordVideoFrame(*video_frame);
+
+  // Make sure the CSRC list is correct.
+  if (num_csrcs > 0) {
+    WebRtc_UWord32 tempCSRC[kRtpCsrcSize];
+    for (int i = 0; i < num_csrcs; i++) {
+      if (CSRC[i] == 1) {
+        tempCSRC[i] = default_rtp_rtcp_->SSRC();
+      } else {
+        tempCSRC[i] = CSRC[i];
+      }
+    }
+    default_rtp_rtcp_->SetCSRCs(tempCSRC, (WebRtc_UWord8) num_csrcs);
+  }
+
+#ifdef VIDEOCODEC_VP8
+  if (vcm_.SendCodec() == webrtc::kVideoCodecVP8) {
+    webrtc::CodecSpecificInfo codec_specific_info;
+    codec_specific_info.codecType = webrtc::kVideoCodecVP8;
+    if (has_received_sli_ || has_received_rpsi_) {
+      {
+        codec_specific_info.codecSpecific.VP8.hasReceivedRPSI =
+          has_received_rpsi_;
+        codec_specific_info.codecSpecific.VP8.hasReceivedSLI =
+          has_received_sli_;
+        codec_specific_info.codecSpecific.VP8.pictureIdRPSI =
+          picture_id_rpsi_;
+        codec_specific_info.codecSpecific.VP8.pictureIdSLI  =
+          picture_id_sli_;
+      }
+      has_received_sli_ = false;
+      has_received_rpsi_ = false;
+    }
+    VideoFrame* decimated_frame = NULL;
+    const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
+    if (ret == 1) {
+      // Drop this frame.
+      return;
+    } else if (ret != VPM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Error preprocessing frame %u", __FUNCTION__,
+                   video_frame->TimeStamp());
+      return;
+    }
+
+    VideoContentMetrics* content_metrics = NULL;
+    content_metrics = vpm_.ContentMetrics();
+
+    // Frame was not re-sampled => use original.
+    if (decimated_frame == NULL)  {
+      decimated_frame = video_frame;
+    }
+
+    if (vcm_.AddVideoFrame(*decimated_frame, content_metrics,
+                           &codec_specific_info) != VCM_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Error encoding frame %u", __FUNCTION__,
+                   video_frame->TimeStamp());
+    }
+    return;
+  }
+#endif
+  // TODO(mflodman) Rewrite this to use code common to VP8 case.
+  // Pass frame via preprocessor.
+  VideoFrame* decimated_frame = NULL;
+  const int ret = vpm_.PreprocessFrame(video_frame, &decimated_frame);
+  if (ret == 1) {
+    // Drop this frame.
+    return;
+  } else if (ret != VPM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Error preprocessing frame %u", __FUNCTION__,
+                 video_frame->TimeStamp());
+    return;
+  }
+
+  // Frame was not sampled => use original.
+  if (decimated_frame == NULL)  {
+    decimated_frame = video_frame;
+  }
+  if (vcm_.AddVideoFrame(*decimated_frame) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: Error encoding frame %u",
+                 __FUNCTION__, video_frame->TimeStamp());
+  }
+}
+
+void ViEEncoder::DelayChanged(int id, int frame_delay) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s: %u", __FUNCTION__,
+               frame_delay);
+
+  default_rtp_rtcp_->SetCameraDelay(frame_delay);
+  file_recorder_.SetFrameDelay(frame_delay);
+}
+
+int ViEEncoder::GetPreferedFrameSettings(int* width,
+                                         int* height,
+                                         int* frame_rate) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  webrtc::VideoCodec video_codec;
+  memset(&video_codec, 0, sizeof(video_codec));
+  if (vcm_.SendCodec(&video_codec) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "Could not get VCM send codec");
+    return -1;
+  }
+
+  *width = video_codec.width;
+  *height = video_codec.height;
+  *frame_rate = video_codec.maxFramerate;
+  return 0;
+}
+
+int ViEEncoder::SendKeyFrame() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+  return vcm_.IntraFrameRequest();
+}
+
+WebRtc_Word32 ViEEncoder::SendCodecStatistics(
+    WebRtc_UWord32* num_key_frames, WebRtc_UWord32* num_delta_frames) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  webrtc::VCMFrameCount sent_frames;
+  if (vcm_.SentFrameCount(sent_frames) != VCM_OK) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Could not get sent frame information", __FUNCTION__);
+    return -1;
+  }
+  *num_key_frames = sent_frames.numKeyFrames;
+  *num_delta_frames = sent_frames.numDeltaFrames;
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::EstimatedSendBandwidth(
+    WebRtc_UWord32* available_bandwidth) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+
+  if (!bitrate_controller_->AvailableBandwidth(available_bandwidth)) {
+    return -1;
+  }
+  return 0;
+}
+
+int ViEEncoder::CodecTargetBitrate(WebRtc_UWord32* bitrate) const {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, channel_id_), "%s",
+               __FUNCTION__);
+  if (vcm_.Bitrate(bitrate) != 0)
+    return -1;
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::UpdateProtectionMethod() {
+  bool fec_enabled = false;
+  WebRtc_UWord8 dummy_ptype_red = 0;
+  WebRtc_UWord8 dummy_ptypeFEC = 0;
+
+  // Updated protection method to VCM to get correct packetization sizes.
+  // FEC has larger overhead than NACK -> set FEC if used.
+  WebRtc_Word32 error = default_rtp_rtcp_->GenericFECStatus(fec_enabled,
+                                                           dummy_ptype_red,
+                                                           dummy_ptypeFEC);
+  if (error) {
+    return -1;
+  }
+
+  bool nack_enabled = (default_rtp_rtcp_->NACK() == kNackOff) ? false : true;
+  if (fec_enabled_ == fec_enabled && nack_enabled_ == nack_enabled) {
+    // No change needed, we're already in correct state.
+    return 0;
+  }
+  fec_enabled_ = fec_enabled;
+  nack_enabled_ = nack_enabled;
+
+  // Set Video Protection for VCM.
+  if (fec_enabled && nack_enabled) {
+    vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, true);
+  } else {
+    vcm_.SetVideoProtection(webrtc::kProtectionFEC, fec_enabled_);
+    vcm_.SetVideoProtection(webrtc::kProtectionNack, nack_enabled_);
+    vcm_.SetVideoProtection(webrtc::kProtectionNackFEC, false);
+  }
+
+  if (fec_enabled || nack_enabled) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: FEC status ",
+                 __FUNCTION__, fec_enabled);
+    vcm_.RegisterProtectionCallback(this);
+    // The send codec must be registered to set correct MTU.
+    webrtc::VideoCodec codec;
+    if (vcm_.SendCodec(&codec) == 0) {
+      WebRtc_UWord16 max_pay_load = default_rtp_rtcp_->MaxDataPayloadLength();
+      if (vcm_.Bitrate(&codec.startBitrate) != 0) {
+        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "Failed to get the current encoder target bitrate.");
+      }
+      if (vcm_.RegisterSendCodec(&codec, number_of_cores_, max_pay_load) != 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                     ViEId(engine_id_, channel_id_),
+                     "%s: Failed to update Sendcodec when enabling FEC",
+                     __FUNCTION__, fec_enabled);
+        return -1;
+      }
+    }
+    return 0;
+  } else {
+    // FEC and NACK are disabled.
+    vcm_.RegisterProtectionCallback(NULL);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SendData(
+    const FrameType frame_type,
+    const WebRtc_UWord8 payload_type,
+    const WebRtc_UWord32 time_stamp,
+    int64_t capture_time_ms,
+    const WebRtc_UWord8* payload_data,
+    const WebRtc_UWord32 payload_size,
+    const webrtc::RTPFragmentationHeader& fragmentation_header,
+    const RTPVideoHeader* rtp_video_hdr) {
+  {
+    CriticalSectionScoped cs(data_cs_.get());
+    if (paused_) {
+      // Paused, don't send this packet.
+      return 0;
+    }
+    if (channels_dropping_delta_frames_ &&
+        frame_type == webrtc::kVideoFrameKey) {
+      WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: Sending key frame, drop next frame", __FUNCTION__);
+      drop_next_frame_ = true;
+    }
+  }
+
+  // New encoded data, hand over to the rtp module.
+  return default_rtp_rtcp_->SendOutgoingData(frame_type,
+                                             payload_type,
+                                             time_stamp,
+                                             capture_time_ms,
+                                             payload_data,
+                                             payload_size,
+                                             &fragmentation_header,
+                                             rtp_video_hdr);
+}
+
+WebRtc_Word32 ViEEncoder::ProtectionRequest(
+    const FecProtectionParams* delta_fec_params,
+    const FecProtectionParams* key_fec_params,
+    WebRtc_UWord32* sent_video_rate_bps,
+    WebRtc_UWord32* sent_nack_rate_bps,
+    WebRtc_UWord32* sent_fec_rate_bps) {
+  WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s, deltaFECRate: %u, key_fecrate: %u, "
+               "delta_use_uep_protection: %d, key_use_uep_protection: %d, "
+               "delta_max_fec_frames: %d, key_max_fec_frames: %d, "
+               "delta_mask_type: %d, key_mask_type: %d, ",
+               __FUNCTION__,
+               delta_fec_params->fec_rate,
+               key_fec_params->fec_rate,
+               delta_fec_params->use_uep_protection,
+               key_fec_params->use_uep_protection,
+               delta_fec_params->max_fec_frames,
+               key_fec_params->max_fec_frames,
+               delta_fec_params->fec_mask_type,
+               key_fec_params->fec_mask_type);
+  if (default_rtp_rtcp_->SetFecParameters(delta_fec_params,
+                                         key_fec_params) != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Could not update FEC parameters", __FUNCTION__);
+  }
+  default_rtp_rtcp_->BitrateSent(NULL,
+                                sent_video_rate_bps,
+                                sent_fec_rate_bps,
+                                sent_nack_rate_bps);
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::SendStatistics(const WebRtc_UWord32 bit_rate,
+                                         const WebRtc_UWord32 frame_rate) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (codec_observer_) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: bitrate %u, framerate %u",
+                 __FUNCTION__, bit_rate, frame_rate);
+    codec_observer_->OutgoingRate(channel_id_, frame_rate, bit_rate);
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEEncoder::RegisterCodecObserver(ViEEncoderObserver* observer) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (observer) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: observer added",
+                 __FUNCTION__);
+    if (codec_observer_) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_), "%s: observer already set.",
+                   __FUNCTION__);
+      return -1;
+    }
+    codec_observer_ = observer;
+  } else {
+    if (codec_observer_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: observer does not exist.", __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: observer removed",
+                 __FUNCTION__);
+    codec_observer_ = NULL;
+  }
+  return 0;
+}
+
+void ViEEncoder::OnReceivedSLI(const uint32_t /*ssrc*/,
+                               const uint8_t picture_id) {
+  picture_id_sli_ = picture_id;
+  has_received_sli_ = true;
+}
+
+void ViEEncoder::OnReceivedRPSI(const uint32_t /*ssrc*/,
+                                const uint64_t picture_id) {
+  picture_id_rpsi_ = picture_id;
+  has_received_rpsi_ = true;
+}
+
+void ViEEncoder::OnReceivedIntraFrameRequest(const uint32_t /*ssrc*/) {
+  // Key frame request from remote side, signal to VCM.
+  WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_), "%s", __FUNCTION__);
+
+  WebRtc_Word64 now = TickTime::MillisecondTimestamp();
+  if (time_last_intra_request_ms_ + kViEMinKeyRequestIntervalMs > now) {
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_),
+                 "%s: Not not encoding new intra due to timing", __FUNCTION__);
+    return;
+  }
+  vcm_.IntraFrameRequest();
+  time_last_intra_request_ms_ = now;
+}
+
+// Called from ViEBitrateObserver.
+void ViEEncoder::OnNetworkChanged(const uint32_t bitrate_bps,
+                                  const uint8_t fraction_lost,
+                                  const uint32_t round_trip_time_ms) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+               ViEId(engine_id_, channel_id_),
+               "%s(bitrate_bps: %u, fraction_lost: %u, rtt_ms: %u",
+               __FUNCTION__, bitrate_bps, fraction_lost, round_trip_time_ms);
+
+  vcm_.SetChannelParameters(bitrate_bps / 1000, fraction_lost,
+                            round_trip_time_ms);
+
+  default_rtp_rtcp_->SetTargetSendBitrate(bitrate_bps);
+}
+
+WebRtc_Word32 ViEEncoder::RegisterEffectFilter(ViEEffectFilter* effect_filter) {
+  CriticalSectionScoped cs(callback_cs_.get());
+  if (effect_filter == NULL) {
+    if (effect_filter_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_), "%s: no effect filter added",
+                   __FUNCTION__);
+      return -1;
+    }
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: deregister effect filter",
+                 __FUNCTION__);
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo,
+                 ViEId(engine_id_, channel_id_), "%s: register effect",
+                 __FUNCTION__);
+    if (effect_filter_) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                   ViEId(engine_id_, channel_id_),
+                   "%s: effect filter already added ", __FUNCTION__);
+      return -1;
+    }
+  }
+  effect_filter_ = effect_filter;
+  return 0;
+}
+
+ViEFileRecorder& ViEEncoder::GetOutgoingFileRecorder() {
+  return file_recorder_;
+}
+
+int ViEEncoder::StartDebugRecording(const char* fileNameUTF8) {
+  return vcm_.StartDebugRecording(fileNameUTF8);
+}
+
+int ViEEncoder::StopDebugRecording() {
+  return vcm_.StopDebugRecording();
+}
+
+QMVideoSettingsCallback::QMVideoSettingsCallback(VideoProcessingModule* vpm)
+    : vpm_(vpm) {
+}
+
+QMVideoSettingsCallback::~QMVideoSettingsCallback() {
+}
+
+WebRtc_Word32 QMVideoSettingsCallback::SetVideoQMSettings(
+    const WebRtc_UWord32 frame_rate,
+    const WebRtc_UWord32 width,
+    const WebRtc_UWord32 height) {
+  return vpm_->SetTargetResolution(width, height, frame_rate);
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_encoder.h b/src/video_engine/vie_encoder.h
new file mode 100644
index 0000000..031ebf6
--- /dev/null
+++ b/src/video_engine/vie_encoder.h
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
+
+#include "common_types.h"  // NOLINT
+#include "typedefs.h"  //NOLINT
+#include "modules/bitrate_controller/include/bitrate_controller.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "modules/video_processing/main/interface/video_processing.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_file_recorder.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class QMVideoSettingsCallback;
+class RtpRtcp;
+class VideoCodingModule;
+class ViEBitrateObserver;
+class ViEEffectFilter;
+class ViEEncoderObserver;
+
+class ViEEncoder
+    : public RtcpIntraFrameObserver,
+      public VCMPacketizationCallback,
+      public VCMProtectionCallback,
+      public VCMSendStatisticsCallback,
+      public ViEFrameCallback {
+ public:
+  friend class ViEBitrateObserver;
+
+  ViEEncoder(WebRtc_Word32 engine_id,
+             WebRtc_Word32 channel_id,
+             WebRtc_UWord32 number_of_cores,
+             ProcessThread& module_process_thread,
+             BitrateController* bitrate_controller);
+  ~ViEEncoder();
+
+  bool Init();
+
+  // Returns the id of the owning channel.
+  int Owner() const;
+
+  // Drops incoming packets before they get to the encoder.
+  void Pause();
+  void Restart();
+
+  WebRtc_Word32 DropDeltaAfterKey(bool enable);
+
+  // Codec settings.
+  WebRtc_UWord8 NumberOfCodecs();
+  WebRtc_Word32 GetCodec(WebRtc_UWord8 list_index, VideoCodec* video_codec);
+  WebRtc_Word32 RegisterExternalEncoder(VideoEncoder* encoder,
+                                        WebRtc_UWord8 pl_type);
+  WebRtc_Word32 DeRegisterExternalEncoder(WebRtc_UWord8 pl_type);
+  WebRtc_Word32 SetEncoder(const VideoCodec& video_codec);
+  WebRtc_Word32 GetEncoder(VideoCodec* video_codec);
+
+  WebRtc_Word32 GetCodecConfigParameters(
+    unsigned char config_parameters[kConfigParameterSize],
+    unsigned char& config_parameters_size);
+
+  // Scale or crop/pad image.
+  WebRtc_Word32 ScaleInputImage(bool enable);
+
+  // RTP settings.
+  RtpRtcp* SendRtpRtcpModule();
+
+  // Implementing ViEFrameCallback.
+  virtual void DeliverFrame(int id,
+                            VideoFrame* video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay);
+  virtual int GetPreferedFrameSettings(int* width,
+                                       int* height,
+                                       int* frame_rate);
+
+  virtual void ProviderDestroyed(int id) {
+    return;
+  }
+
+  WebRtc_Word32 SendKeyFrame();
+  WebRtc_Word32 SendCodecStatistics(WebRtc_UWord32* num_key_frames,
+                                    WebRtc_UWord32* num_delta_frames);
+
+  WebRtc_Word32 EstimatedSendBandwidth(
+        WebRtc_UWord32* available_bandwidth) const;
+
+  int CodecTargetBitrate(WebRtc_UWord32* bitrate) const;
+  // Loss protection.
+  WebRtc_Word32 UpdateProtectionMethod();
+
+  // Implements VCMPacketizationCallback.
+  virtual WebRtc_Word32 SendData(
+    FrameType frame_type,
+    WebRtc_UWord8 payload_type,
+    WebRtc_UWord32 time_stamp,
+    int64_t capture_time_ms,
+    const WebRtc_UWord8* payload_data,
+    WebRtc_UWord32 payload_size,
+    const RTPFragmentationHeader& fragmentation_header,
+    const RTPVideoHeader* rtp_video_hdr);
+
+  // Implements VideoProtectionCallback.
+  virtual int ProtectionRequest(
+      const FecProtectionParams* delta_fec_params,
+      const FecProtectionParams* key_fec_params,
+      WebRtc_UWord32* sent_video_rate_bps,
+      WebRtc_UWord32* sent_nack_rate_bps,
+      WebRtc_UWord32* sent_fec_rate_bps);
+
+  // Implements VideoSendStatisticsCallback.
+  virtual WebRtc_Word32 SendStatistics(const WebRtc_UWord32 bit_rate,
+                                       const WebRtc_UWord32 frame_rate);
+  WebRtc_Word32 RegisterCodecObserver(ViEEncoderObserver* observer);
+
+  // Implements RtcpIntraFrameObserver.
+  virtual void OnReceivedIntraFrameRequest(const uint32_t ssrc);
+
+  virtual void OnReceivedSLI(const uint32_t ssrc,
+                             const uint8_t picture_id);
+
+  virtual void OnReceivedRPSI(const uint32_t ssrc,
+                              const uint64_t picture_id);
+
+  // Effect filter.
+  WebRtc_Word32 RegisterEffectFilter(ViEEffectFilter* effect_filter);
+
+  // Recording.
+  ViEFileRecorder& GetOutgoingFileRecorder();
+
+  // Enables recording of debugging information.
+  virtual int StartDebugRecording(const char* fileNameUTF8);
+
+  // Disables recording of debugging information.
+  virtual int StopDebugRecording();
+
+ protected:
+  // Called by BitrateObserver.
+  void OnNetworkChanged(const uint32_t bitrate_bps,
+                        const uint8_t fraction_lost,
+                        const uint32_t round_trip_time_ms);
+
+ private:
+  WebRtc_Word32 engine_id_;
+  const int channel_id_;
+  const WebRtc_UWord32 number_of_cores_;
+
+  VideoCodingModule& vcm_;
+  VideoProcessingModule& vpm_;
+  scoped_ptr<RtpRtcp> default_rtp_rtcp_;
+  scoped_ptr<CriticalSectionWrapper> callback_cs_;
+  scoped_ptr<CriticalSectionWrapper> data_cs_;
+  scoped_ptr<BitrateObserver> bitrate_observer_;
+
+  BitrateController* bitrate_controller_;
+
+  bool paused_;
+  WebRtc_Word64 time_last_intra_request_ms_;
+  WebRtc_Word32 channels_dropping_delta_frames_;
+  bool drop_next_frame_;
+
+  bool fec_enabled_;
+  bool nack_enabled_;
+
+  ViEEncoderObserver* codec_observer_;
+  ViEEffectFilter* effect_filter_;
+  ProcessThread& module_process_thread_;
+
+  bool has_received_sli_;
+  WebRtc_UWord8 picture_id_sli_;
+  bool has_received_rpsi_;
+  WebRtc_UWord64 picture_id_rpsi_;
+
+  ViEFileRecorder file_recorder_;
+
+  // Quality modes callback
+  QMVideoSettingsCallback* qm_callback_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_ENCODER_H_
diff --git a/src/video_engine/vie_encryption_impl.cc b/src/video_engine/vie_encryption_impl.cc
new file mode 100644
index 0000000..adbda42
--- /dev/null
+++ b/src/video_engine/vie_encryption_impl.cc
@@ -0,0 +1,111 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_encryption_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEEncryption* ViEEncryption::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+  if (video_engine == NULL) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEEncryptionImpl* vie_encryption_impl = vie_impl;
+  // Increase ref count.
+  (*vie_encryption_impl)++;
+  return vie_encryption_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEEncryptionImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEEncryptionImpl release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEEncryptionImpl::ViEEncryptionImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::ViEEncryptionImpl() Ctor");
+}
+
+ViEEncryptionImpl::~ViEEncryptionImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEEncryptionImpl::~ViEEncryptionImpl() Dtor");
+}
+
+int ViEEncryptionImpl::RegisterExternalEncryption(const int video_channel,
+                                                  Encryption& encryption) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "RegisterExternalEncryption(video_channel=%d)", video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (vie_channel == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterExternalEncryption(&encryption) != 0) {
+    shared_data_->SetLastError(kViEEncryptionUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEEncryptionImpl::DeregisterExternalEncryption(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "RegisterExternalEncryption(video_channel=%d)", video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (vie_channel == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEEncryptionInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->DeRegisterExternalEncryption() != 0) {
+    shared_data_->SetLastError(kViEEncryptionUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_encryption_impl.h b/src/video_engine/vie_encryption_impl.h
new file mode 100644
index 0000000..608a5ab
--- /dev/null
+++ b/src/video_engine/vie_encryption_impl.h
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
+
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_encryption.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEEncryptionImpl
+    : public ViEEncryption,
+      public ViERefCount {
+ public:
+  virtual int Release();
+
+  // Implements ViEEncryption.
+  virtual int RegisterExternalEncryption(const int video_channel,
+                                         Encryption& encryption);
+  virtual int DeregisterExternalEncryption(const int video_channel);
+
+ protected:
+  explicit ViEEncryptionImpl(ViESharedData* shared_data);
+  virtual ~ViEEncryptionImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_ENCRYPTION_IMPL_H_
diff --git a/src/video_engine/vie_external_codec_impl.cc b/src/video_engine/vie_external_codec_impl.cc
new file mode 100644
index 0000000..8c9be01
--- /dev/null
+++ b/src/video_engine/vie_external_codec_impl.cc
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_external_codec_impl.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEExternalCodec* ViEExternalCodec::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+  if (video_engine == NULL) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEExternalCodecImpl* vie_external_codec_impl = vie_impl;
+  // Increase ref count.
+  (*vie_external_codec_impl)++;
+  return vie_external_codec_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEExternalCodecImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodec::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEExternalCodec release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodec reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEExternalCodecImpl::ViEExternalCodecImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodecImpl::ViEExternalCodecImpl() Ctor");
+}
+
+ViEExternalCodecImpl::~ViEExternalCodecImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEExternalCodecImpl::~ViEExternalCodecImpl() Dtor");
+}
+
+int ViEExternalCodecImpl::RegisterExternalSendCodec(const int video_channel,
+                                                    const unsigned char pl_type,
+                                                    VideoEncoder* encoder) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d encoder 0x%x", __FUNCTION__,
+               video_channel, pl_type, encoder);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (!encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument Encoder 0x%x.", __FUNCTION__, encoder);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_encoder->RegisterExternalEncoder(encoder, pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::DeRegisterExternalSendCodec(
+  const int video_channel, const unsigned char pl_type) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d", __FUNCTION__, video_channel,
+               pl_type);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_encoder->DeRegisterExternalEncoder(pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::RegisterExternalReceiveCodec(
+    const int video_channel,
+    const unsigned int pl_type,
+    VideoDecoder* decoder,
+    bool decoder_render,
+    int render_delay) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %d decoder 0x%x, decoder_render %d, "
+               "renderDelay %d", __FUNCTION__, video_channel, pl_type, decoder,
+               decoder_render, render_delay);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (!decoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument decoder 0x%x.", __FUNCTION__, decoder);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+
+  if (vie_channel->RegisterExternalDecoder(pl_type, decoder, decoder_render,
+                                           render_delay) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEExternalCodecImpl::DeRegisterExternalReceiveCodec(
+const int video_channel, const unsigned char pl_type) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s channel %d pl_type %u", __FUNCTION__, video_channel,
+               pl_type);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Invalid argument video_channel %u. Does it exist?",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViECodecInvalidArgument);
+    return -1;
+  }
+  if (vie_channel->DeRegisterExternalDecoder(pl_type) != 0) {
+    shared_data_->SetLastError(kViECodecUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_external_codec_impl.h b/src/video_engine/vie_external_codec_impl.h
new file mode 100644
index 0000000..f06613f
--- /dev/null
+++ b/src/video_engine/vie_external_codec_impl.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
+
+#include "video_engine/include/vie_external_codec.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEExternalCodecImpl
+    : public ViEExternalCodec,
+      public ViERefCount {
+ public:
+  // Implements ViEExternalCodec.
+  virtual int Release();
+  virtual int RegisterExternalSendCodec(const int video_channel,
+                                        const unsigned char pl_type,
+                                        VideoEncoder* encoder);
+  virtual int DeRegisterExternalSendCodec(const int video_channel,
+                                          const unsigned char pl_type);
+  virtual int RegisterExternalReceiveCodec(const int video_channel,
+                                           const unsigned int pl_type,
+                                           VideoDecoder* decoder,
+                                           bool decoder_render = false,
+                                           int render_delay = 0);
+  virtual int DeRegisterExternalReceiveCodec(const int video_channel,
+                                             const unsigned char pl_type);
+
+ protected:
+  explicit ViEExternalCodecImpl(ViESharedData* shared_data);
+  virtual ~ViEExternalCodecImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_EXTERNAL_CODEC_IMPL_H_
diff --git a/src/video_engine/vie_file_image.cc b/src/video_engine/vie_file_image.cc
new file mode 100644
index 0000000..772642c
--- /dev/null
+++ b/src/video_engine/vie_file_image.cc
@@ -0,0 +1,105 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Placed first to get WEBRTC_VIDEO_ENGINE_FILE_API.
+#include "engine_configurations.h"  // NOLINT
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+
+#include "video_engine/vie_file_image.h"
+
+#include <stdio.h>  // NOLINT
+
+#include "common_video/interface/video_image.h"
+#include "common_video/jpeg/include/jpeg.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+int ViEFileImage::ConvertJPEGToVideoFrame(int engine_id,
+                                          const char* file_nameUTF8,
+                                          VideoFrame* video_frame) {
+  // Read jpeg file into temporary buffer.
+  EncodedImage image_buffer;
+
+  FILE* image_file = fopen(file_nameUTF8, "rb");
+  if (!image_file) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not open file %s", __FUNCTION__, file_nameUTF8);
+    return -1;
+  }
+  if (fseek(image_file, 0, SEEK_END) != 0) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame fseek SEEK_END error for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  int buffer_size = ftell(image_file);
+  if (buffer_size == -1) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame could tell file size for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  image_buffer._size = buffer_size;
+  if (fseek(image_file, 0, SEEK_SET) != 0) {
+    fclose(image_file);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "ConvertJPEGToVideoFrame fseek SEEK_SET error for file %s",
+                 file_nameUTF8);
+    return -1;
+  }
+  image_buffer._buffer = new WebRtc_UWord8[ image_buffer._size + 1];
+  if (image_buffer._size != fread(image_buffer._buffer, sizeof(WebRtc_UWord8),
+                                  image_buffer._size, image_file)) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not read file %s", __FUNCTION__, file_nameUTF8);
+    fclose(image_file);
+    delete [] image_buffer._buffer;
+    return -1;
+  }
+  fclose(image_file);
+
+  JpegDecoder decoder;
+  int ret = decoder.Decode(image_buffer, *video_frame);
+
+  delete [] image_buffer._buffer;
+  image_buffer._buffer = NULL;
+
+  if (ret == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could decode file %s from jpeg format", __FUNCTION__,
+                 file_nameUTF8);
+    return -1;
+  } else if (ret == -3) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s could not convert jpeg's data to i420 format",
+                 __FUNCTION__, file_nameUTF8);
+  }
+  return 0;
+}
+
+int ViEFileImage::ConvertPictureToVideoFrame(int engine_id,
+                                             const ViEPicture& picture,
+                                             VideoFrame* video_frame) {
+  WebRtc_UWord32 picture_length = (WebRtc_UWord32)(picture.width *
+                                                   picture.height * 1.5);
+  video_frame->CopyFrame(picture_length, picture.data);
+  video_frame->SetWidth(picture.width);
+  video_frame->SetHeight(picture.height);
+  video_frame->SetLength(picture_length);
+  return 0;
+}
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_FILE_API
diff --git a/src/video_engine/vie_file_image.h b/src/video_engine/vie_file_image.h
new file mode 100644
index 0000000..e3f229a
--- /dev/null
+++ b/src/video_engine/vie_file_image.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
+
+#include "modules/interface/module_common_types.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_file.h"
+
+namespace webrtc {
+
+class ViEFileImage {
+ public:
+  static int ConvertJPEGToVideoFrame(int engine_id,
+                                     const char* file_nameUTF8,
+                                     VideoFrame* video_frame);
+  static int ConvertPictureToVideoFrame(int engine_id,
+                                        const ViEPicture& picture,
+                                        VideoFrame* video_frame);
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMAGE_H_
diff --git a/src/video_engine/vie_file_impl.cc b/src/video_engine/vie_file_impl.cc
new file mode 100644
index 0000000..1e2753b
--- /dev/null
+++ b/src/video_engine/vie_file_impl.cc
@@ -0,0 +1,1021 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_file_impl.h"
+
+#include "engine_configurations.h"  // NOLINT
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+#include "common_video/jpeg/include/jpeg.h"
+#include "system_wrappers/interface/condition_variable_wrapper.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_file_image.h"
+#include "video_engine/vie_file_player.h"
+#include "video_engine/vie_file_recorder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_render_manager.h"
+#include "video_engine/vie_renderer.h"
+#endif
+
+namespace webrtc {
+
+ViEFile* ViEFile::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEFileImpl* vie_file_impl = vie_impl;
+  // Increase ref count.
+  (*vie_file_impl)++;
+  return vie_file_impl;
+#else
+  return NULL;
+#endif
+}
+
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+
+int ViEFileImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEFile::Release()");
+  // Decrease ref count.
+  (*this)--;
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEFile release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEFile reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEFileImpl::ViEFileImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEFileImpl::ViEFileImpl() Ctor");
+}
+
+ViEFileImpl::~ViEFileImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEFileImpl::~ViEFileImpl() Dtor");
+}
+
+int ViEFileImpl::StartPlayFile(const char* file_nameUTF8,
+                               int& file_id,
+                               const bool loop,
+                               const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s", __FUNCTION__);
+
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  VoiceEngine* voice = shared_data_->channel_manager()->GetVoiceEngine();
+  const WebRtc_Word32 result = shared_data_->input_manager()->CreateFilePlayer(
+      file_nameUTF8, loop, file_format, voice, file_id);
+  if (result != 0) {
+    shared_data_->SetLastError(result);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayFile(const int file_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+  {
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+    if (!vie_file_player) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: File with id %d is not playing.", __FUNCTION__,
+                   file_id);
+      shared_data_->SetLastError(kViEFileNotPlaying);
+      return -1;
+    }
+  }
+  // Destroy the capture device.
+  return shared_data_->input_manager()->DestroyFilePlayer(file_id);
+}
+
+int ViEFileImpl::RegisterObserver(int file_id,
+                                  ViEFileObserver& observer) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Observer already registered", __FUNCTION__);
+    shared_data_->SetLastError(kViEFileObserverAlreadyRegistered);
+    return -1;
+  }
+  if (vie_file_player->RegisterObserver(&observer) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Failed to register observer", __FUNCTION__, file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::DeregisterObserver(int file_id,
+                                    ViEFileObserver& observer) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (!vie_file_player->IsObserverRegistered()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: No Observer registered", __FUNCTION__);
+    shared_data_->SetLastError(kViEFileObserverNotRegistered);
+    return -1;
+  }
+  if (vie_file_player->DeRegisterObserver() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), file_id),
+                 "%s: Failed to deregister observer", __FUNCTION__, file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SendFileOnChannel(const int file_id, const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  if (is.FrameProvider(vie_encoder) != NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already connected to a capture device or "
+                 "file.", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInputAlreadyConnected);
+    return -1;
+  }
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+
+  if (vie_file_player->RegisterFrameCallback(video_channel, vie_encoder)
+      != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Failed to register frame callback.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopSendFileOnChannel(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViEFrameProviderBase* frame_provider = is.FrameProvider(vie_encoder);
+  if (!frame_provider ||
+      frame_provider->Id() < kViEFileIdBase ||
+      frame_provider->Id() > kViEFileIdMax) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: No file connected to Channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotConnected);
+    return -1;
+  }
+  if (frame_provider->DeregisterFrameCallback(vie_encoder) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to deregister file from channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartPlayFileAsMicrophone(const int file_id,
+                                           const int audio_channel,
+                                           bool mix_microphone,
+                                           float volume_scaling) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->SendAudioOnChannel(audio_channel, mix_microphone,
+  volume_scaling) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayFileAsMicrophone(const int file_id,
+                                          const int audio_channel) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+
+  if (vie_file_player->StopSendAudioOnChannel(audio_channel) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartPlayAudioLocally(const int file_id,
+                                       const int audio_channel,
+                                       float volume_scaling) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->PlayAudioLocally(audio_channel, volume_scaling) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopPlayAudioLocally(const int file_id,
+                                      const int audio_channel) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+
+  ViEFilePlayer* vie_file_player = is.FilePlayer(file_id);
+  if (!vie_file_player) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: File with id %d is not playing.", __FUNCTION__,
+                 file_id);
+    shared_data_->SetLastError(kViEFileNotPlaying);
+    return -1;
+  }
+  if (vie_file_player->StopPlayAudioLocally(audio_channel) != 0) {
+    shared_data_->SetLastError(kViEFileVoEFailure);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StartRecordOutgoingVideo(const int video_channel,
+                                          const char* file_nameUTF8,
+                                          AudioSource audio_source,
+                                          const CodecInst& audio_codec,
+                                          const VideoCodec& video_codec,
+                                          const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
+  if (file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Already recording outgoing video on channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileAlreadyRecording);
+    return -1;
+  }
+
+  WebRtc_Word32 ve_channel_id = -1;
+  VoiceEngine* ve_ptr = NULL;
+  if (audio_source != NO_AUDIO) {
+    ViEChannel* vie_channel = cs.Channel(video_channel);
+    if (!vie_channel) {
+      // Channel should exists since we have a ViEEncoder above.
+      assert(false);
+      return -1;
+    }
+    ve_channel_id = vie_channel->VoiceChannel();
+    ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
+    if (!ve_ptr) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Can't access voice engine. Have SetVoiceEngine "
+                   "been called?", __FUNCTION__);
+      shared_data_->SetLastError(kViEFileVoENotSet);
+      return -1;
+    }
+  }
+  if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
+                                   ve_channel_id, audio_codec, ve_ptr,
+                                   file_format) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to start recording. Check arguments.",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+
+  return 0;
+}
+
+int ViEFileImpl::StopRecordOutgoingVideo(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_encoder->GetOutgoingFileRecorder();
+  if (!file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d is not recording.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotRecording);
+    return -1;
+  }
+  if (file_recorder.StopRecording() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to stop recording of channel %d.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::StopRecordIncomingVideo(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
+  if (!file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d is not recording.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileNotRecording);
+    vie_channel->ReleaseIncomingFileRecorder();
+    return -1;
+  }
+  if (file_recorder.StopRecording() != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to stop recording of channel %d.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    vie_channel->ReleaseIncomingFileRecorder();
+    return -1;
+  }
+  // Let the channel know we are no longer recording.
+  vie_channel->ReleaseIncomingFileRecorder();
+  return 0;
+}
+
+int ViEFileImpl::StartRecordIncomingVideo(const int video_channel,
+                                          const char* file_nameUTF8,
+                                          AudioSource audio_source,
+                                          const CodecInst& audio_codec,
+                                          const VideoCodec& video_codec,
+                                          const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidChannelId);
+    return -1;
+  }
+  ViEFileRecorder& file_recorder = vie_channel->GetIncomingFileRecorder();
+  if (file_recorder.RecordingStarted()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Already recording outgoing video on channel %d",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileAlreadyRecording);
+    return -1;
+  }
+
+  WebRtc_Word32 ve_channel_id = -1;
+  VoiceEngine* ve_ptr = NULL;
+  if (audio_source != NO_AUDIO) {
+    ve_channel_id = vie_channel->VoiceChannel();
+    ve_ptr = shared_data_->channel_manager()->GetVoiceEngine();
+
+    if (!ve_ptr) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: Can't access voice engine. Have SetVoiceEngine "
+                   "been called?", __FUNCTION__);
+      shared_data_->SetLastError(kViEFileVoENotSet);
+      return -1;
+    }
+  }
+  if (file_recorder.StartRecording(file_nameUTF8, video_codec, audio_source,
+                                   ve_channel_id, audio_codec, ve_ptr,
+                                   file_format) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Failed to start recording. Check arguments.",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViEFileUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::GetFileInformation(const char* file_name,
+                                    VideoCodec& video_codec,
+                                    CodecInst& audio_codec,
+                                    const FileFormats file_format) {
+  return ViEFilePlayer::GetFileInformation(
+           shared_data_->instance_id(),
+           file_name, video_codec, audio_codec, file_format);
+}
+
+int ViEFileImpl::GetRenderSnapshot(const int video_channel,
+                                   const char* file_nameUTF8) {
+  // Gain access to the renderer for the specified channel and get it's
+  // current frame.
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
+    return -1;
+  }
+
+  // JPEGEncoder writes the jpeg file for you (no control over it) and does
+  // not return you the buffer. Thus, we are not going to be writing to the
+  // disk here.
+  JpegEncoder jpeg_encoder;
+  if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not open output file '%s' for writing!",
+                 file_nameUTF8);
+    return -1;
+  }
+
+  if (jpeg_encoder.Encode(video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not encode i420 -> jpeg file '%s' for writing!",
+                 file_nameUTF8);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::GetRenderSnapshot(const int video_channel,
+                                   ViEPicture& picture) {
+  // Gain access to the renderer for the specified channel and get it's
+  // current frame.
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (renderer->GetLastRenderedFrame(video_channel, video_frame) == -1) {
+    return -1;
+  }
+
+  // Copy from VideoFrame class to ViEPicture struct.
+  int buffer_length =
+      static_cast<int>(video_frame.Width() * video_frame.Height() * 1.5);
+  picture.data =  static_cast<WebRtc_UWord8*>(malloc(
+      buffer_length * sizeof(WebRtc_UWord8)));
+  memcpy(picture.data, video_frame.Buffer(), buffer_length);
+  picture.size = buffer_length;
+  picture.width = video_frame.Width();
+  picture.height = video_frame.Height();
+  picture.type = kVideoI420;
+  return 0;
+}
+
+int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
+                                          const char* file_nameUTF8) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+
+  VideoFrame video_frame;
+  if (GetNextCapturedFrame(capture_id, &video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "Could not gain acces to capture device %d video frame "
+                 "%s:%d", capture_id, __FUNCTION__);
+    return -1;
+  }
+
+  // JPEGEncoder writes the jpeg file for you (no control over it) and does
+  // not return you the buffer Thusly, we are not going to be writing to the
+  // disk here.
+  JpegEncoder jpeg_encoder;
+
+  if (jpeg_encoder.SetFileName(file_nameUTF8) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not open output file '%s' for writing!",
+                 file_nameUTF8);
+    return -1;
+  }
+  if (jpeg_encoder.Encode(video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "\tCould not encode i420 -> jpeg file '%s' for "
+                 "writing!", file_nameUTF8);
+
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::GetCaptureDeviceSnapshot(const int capture_id,
+                                          ViEPicture& picture) {
+  VideoFrame video_frame;
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+  if (GetNextCapturedFrame(capture_id, &video_frame) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, shared_data_->instance_id(),
+                 "Could not gain acces to capture device %d video frame "
+                 "%s:%d", capture_id, __FUNCTION__);
+    return -1;
+  }
+
+  // Copy from VideoFrame class to ViEPicture struct.
+  int buffer_length =
+      static_cast<int>(video_frame.Width() * video_frame.Height() * 1.5);
+  picture.data = static_cast<WebRtc_UWord8*>(malloc(
+      buffer_length * sizeof(WebRtc_UWord8)));
+  memcpy(picture.data, video_frame.Buffer(), buffer_length);
+  picture.size = buffer_length;
+  picture.width = video_frame.Width();
+  picture.height = video_frame.Height();
+  picture.type = kVideoI420;
+  return 0;
+}
+
+int ViEFileImpl::FreePicture(ViEPicture& picture) {  // NOLINT
+  if (picture.data) {
+    free(picture.data);
+  }
+
+  picture.data = NULL;
+  picture.size = 0;
+  picture.width = 0;
+  picture.height = 0;
+  picture.type = kVideoUnknown;
+  return 0;
+}
+int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
+                                       const char* file_nameUTF8) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    shared_data_->SetLastError(kViEFileInvalidCaptureId);
+    return -1;
+  }
+
+  VideoFrame capture_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+          ViEId(shared_data_->instance_id(), capture_id), file_nameUTF8,
+          &capture_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Failed to open file.", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (capturer->SetCaptureDeviceImage(capture_image)) {
+    shared_data_->SetLastError(kViEFileSetCaptureImageError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetCaptureDeviceImage(const int capture_id,
+                                       const ViEPicture& picture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Not a valid picture type.",
+                 __FUNCTION__, capture_id);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    shared_data_->SetLastError(kViEFileSetCaptureImageError);
+    return -1;
+  }
+
+  VideoFrame capture_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+      ViEId(shared_data_->instance_id(), capture_id), picture,
+          &capture_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), capture_id),
+                 "%s(capture_id: %d) Failed to use picture.", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (capturer->SetCaptureDeviceImage(capture_image)) {
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderStartImage(const int video_channel,
+                                     const char* file_nameUTF8) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+
+  VideoFrame start_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+      ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
+          &start_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to open file.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  if (renderer->SetRenderStartImage(start_image) != 0) {
+    shared_data_->SetLastError(kViEFileSetStartImageError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderStartImage(const int video_channel,
+                                     const ViEPicture& picture) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Not a valid picture type.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+
+  VideoFrame start_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+      ViEId(shared_data_->instance_id(), video_channel), picture,
+          &start_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to use picture.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  if (renderer->SetRenderStartImage(start_image) != 0) {
+    shared_data_->SetLastError(kViEFileSetStartImageError);
+    return -1;
+  }
+  return 0;
+}
+int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
+                                       const char* file_nameUTF8,
+                                       const unsigned int timeout_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileInvalidRenderId);
+    return -1;
+  }
+  VideoFrame timeout_image;
+  if (ViEFileImage::ConvertJPEGToVideoFrame(
+          ViEId(shared_data_->instance_id(), video_channel), file_nameUTF8,
+          &timeout_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to open file.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViEFileInvalidFile);
+    return -1;
+  }
+  WebRtc_Word32 timeout_time = timeout_ms;
+  if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
+    timeout_time = kViEMinRenderTimeoutTimeMs;
+  }
+  if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
+    timeout_time = kViEMaxRenderTimeoutTimeMs;
+  }
+  if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEFileImpl::SetRenderTimeoutImage(const int video_channel,
+                                       const ViEPicture& picture,
+const unsigned int timeout_ms) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  if (picture.type != kVideoI420) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Not a valid picture type.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidArgument);
+    return -1;
+  }
+
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(video_channel);
+  if (!renderer) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  VideoFrame timeout_image;
+  if (ViEFileImage::ConvertPictureToVideoFrame(
+          ViEId(shared_data_->instance_id(), video_channel), picture,
+          &timeout_image) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Failed to use picture.",
+                 __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEFileInvalidCapture);
+    return -1;
+  }
+  WebRtc_Word32 timeout_time = timeout_ms;
+  if (timeout_ms < kViEMinRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMinRenderTimeoutTimeMs);
+    timeout_time = kViEMinRenderTimeoutTimeMs;
+  }
+  if (timeout_ms > kViEMaxRenderTimeoutTimeMs) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s(video_channel: %d) Invalid timeout_ms, using %d.",
+                 __FUNCTION__, video_channel, kViEMaxRenderTimeoutTimeMs);
+    timeout_time = kViEMaxRenderTimeoutTimeMs;
+  }
+  if (renderer->SetTimeoutImage(timeout_image, timeout_time) != 0) {
+    shared_data_->SetLastError(kViEFileSetRenderTimeoutError);
+    return -1;
+  }
+  return 0;
+}
+
+WebRtc_Word32 ViEFileImpl::GetNextCapturedFrame(WebRtc_Word32 capture_id,
+                                                VideoFrame* video_frame) {
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* capturer = is.Capture(capture_id);
+  if (!capturer) {
+    return -1;
+  }
+
+  ViECaptureSnapshot* snap_shot = new ViECaptureSnapshot();
+  capturer->RegisterFrameCallback(-1, snap_shot);
+  bool snapshot_taken = snap_shot->GetSnapshot(kViECaptureMaxSnapshotWaitTimeMs,
+                                               video_frame);
+
+  // Check once again if it has been destroyed.
+  capturer->DeregisterFrameCallback(snap_shot);
+  delete snap_shot;
+  snap_shot = NULL;
+
+  if (snapshot_taken) {
+    return 0;
+  }
+  return -1;
+}
+
+int ViEFileImpl::StartDebugRecording(int video_channel,
+                                     const char* file_name_utf8) {
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+     WEBRTC_TRACE(kTraceError, kTraceVideo,
+                  ViEId(shared_data_->instance_id(), video_channel),
+                  "%s: No encoder %d", __FUNCTION__, video_channel);
+    return -1;
+  }
+  return vie_encoder->StartDebugRecording(file_name_utf8);
+}
+
+int ViEFileImpl::StopDebugRecording(int video_channel) {
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo,
+                   ViEId(shared_data_->instance_id(), video_channel),
+                   "%s: No encoder %d", __FUNCTION__, video_channel);
+    return -1;
+  }
+  return vie_encoder->StopDebugRecording();
+}
+
+ViECaptureSnapshot::ViECaptureSnapshot()
+    : crit_(CriticalSectionWrapper::CreateCriticalSection()),
+      condition_varaible_(ConditionVariableWrapper::CreateConditionVariable()),
+      video_frame_(NULL) {
+}
+
+ViECaptureSnapshot::~ViECaptureSnapshot() {
+  if (video_frame_) {
+    delete video_frame_;
+    video_frame_ = NULL;
+  }
+}
+
+bool ViECaptureSnapshot::GetSnapshot(unsigned int max_wait_time,
+                                     VideoFrame* video_frame) {
+  crit_->Enter();
+  video_frame_ = new VideoFrame();
+  if (condition_varaible_->SleepCS(*(crit_.get()), max_wait_time)) {
+    // Snapshot taken.
+    video_frame->SwapFrame(*video_frame_);
+    delete video_frame_;
+    video_frame_ = NULL;
+    crit_->Leave();
+    return true;
+  }
+  crit_->Leave();
+  return false;
+}
+
+void ViECaptureSnapshot::DeliverFrame(int id,
+                                      VideoFrame* video_frame,
+                                      int num_csrcs,
+const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  CriticalSectionScoped cs(crit_.get());
+  if (!video_frame_) {
+    return;
+  }
+  video_frame_->SwapFrame(*video_frame);
+  condition_varaible_->WakeAll();
+  return;
+}
+
+#endif
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_file_impl.h b/src/video_engine/vie_file_impl.h
new file mode 100644
index 0000000..b90c92b
--- /dev/null
+++ b/src/video_engine/vie_file_impl.h
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_file.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_ref_count.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+class ConditionVariableWrapper;
+class CriticalSectionWrapper;
+class ViESharedData;
+
+class ViECaptureSnapshot : public ViEFrameCallback {
+ public:
+  ViECaptureSnapshot();
+  ~ViECaptureSnapshot();
+
+  bool GetSnapshot(unsigned int max_wait_time, VideoFrame* video_frame);
+
+  // Implements ViEFrameCallback.
+  virtual void DeliverFrame(int id,
+                            VideoFrame* video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay) {}
+  virtual int GetPreferedFrameSettings(int* width,
+                                       int* height,
+                                       int* frame_rate) {
+    return -1;
+  }
+  virtual void ProviderDestroyed(int id) {}
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> crit_;
+  scoped_ptr<ConditionVariableWrapper> condition_varaible_;
+  VideoFrame* video_frame_;
+};
+
+class ViEFileImpl
+    : public ViEFile,
+      public ViERefCount {
+ public:
+  // Implements ViEFile.
+  virtual int Release();
+  virtual int StartPlayFile(const char* file_nameUTF8, int& file_id,  // NOLINT
+                            const bool loop = false,
+                            const FileFormats file_format = kFileFormatAviFile);
+  virtual int StopPlayFile(const int file_id);
+  virtual int RegisterObserver(int file_id,
+                               ViEFileObserver& observer);  // NOLINT
+  virtual int DeregisterObserver(int file_id,
+                                 ViEFileObserver& observer);  // NOLINT
+  virtual int SendFileOnChannel(const int file_id, const int video_channel);
+  virtual int StopSendFileOnChannel(const int video_channel);
+  virtual int StartPlayFileAsMicrophone(const int file_id,
+                                        const int audio_channel,
+                                        bool mix_microphone = false,
+                                        float volume_scaling = 1);
+  virtual int StopPlayFileAsMicrophone(const int file_id,
+                                       const int audio_channel);
+  virtual int StartPlayAudioLocally(const int file_id, const int audio_channel,
+                                    float volume_scaling = 1);
+  virtual int StopPlayAudioLocally(const int file_id, const int audio_channel);
+  virtual int StartRecordOutgoingVideo(
+      const int video_channel,
+      const char* file_nameUTF8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int StartRecordIncomingVideo(
+      const int video_channel,
+      const char* file_nameUTF8,
+      AudioSource audio_source,
+      const CodecInst& audio_codec,
+      const VideoCodec& video_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int StopRecordOutgoingVideo(const int video_channel);
+  virtual int StopRecordIncomingVideo(const int video_channel);
+  virtual int GetFileInformation(
+      const char* file_name,
+      VideoCodec& video_codec,
+      CodecInst& audio_codec,
+      const FileFormats file_format = kFileFormatAviFile);
+  virtual int GetRenderSnapshot(const int video_channel,
+                                const char* file_nameUTF8);
+  virtual int GetRenderSnapshot(const int video_channel,
+                                ViEPicture& picture);  // NOLINT
+  virtual int FreePicture(ViEPicture& picture);  // NOLINT
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       const char* file_nameUTF8);
+  virtual int GetCaptureDeviceSnapshot(const int capture_id,
+                                       ViEPicture& picture);
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const char* file_nameUTF8);
+  virtual int SetCaptureDeviceImage(const int capture_id,
+                                    const ViEPicture& picture);
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const char* file_nameUTF8);
+  virtual int SetRenderStartImage(const int video_channel,
+                                  const ViEPicture& picture);
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const char* file_nameUTF8,
+                                    const unsigned int timeout_ms);
+  virtual int SetRenderTimeoutImage(const int video_channel,
+                                    const ViEPicture& picture,
+                                    const unsigned int timeout_ms);
+  virtual int StartDebugRecording(int video_channel,
+                                  const char* file_name_utf8);
+  virtual int StopDebugRecording(int video_channel);
+
+ protected:
+  explicit ViEFileImpl(ViESharedData* shared_data);
+  virtual ~ViEFileImpl();
+
+ private:
+  WebRtc_Word32 GetNextCapturedFrame(WebRtc_Word32 capture_id,
+                                     VideoFrame* video_frame);
+
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_IMPL_H_
diff --git a/src/video_engine/vie_file_player.cc b/src/video_engine/vie_file_player.cc
new file mode 100644
index 0000000..1f01cad
--- /dev/null
+++ b/src/video_engine/vie_file_player.cc
@@ -0,0 +1,505 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_file_player.h"
+
+#include "modules/utility/interface/file_player.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_file.h"
+#include "video_engine/vie_defines.h"
+#include "voice_engine/include/voe_base.h"
+#include "voice_engine/include/voe_file.h"
+#include "voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+const int kThreadWaitTimeMs = 100;
+
+ViEFilePlayer* ViEFilePlayer::CreateViEFilePlayer(
+    int file_id,
+    int engine_id,
+    const char* file_nameUTF8,
+    const bool loop,
+    const FileFormats file_format,
+    VoiceEngine* voe_ptr) {
+  ViEFilePlayer* self = new ViEFilePlayer(file_id, engine_id);
+  if (!self || self->Init(file_nameUTF8, loop, file_format, voe_ptr) != 0) {
+    delete self;
+    self = NULL;
+  }
+  return self;
+}
+
+ViEFilePlayer::ViEFilePlayer(int Id,
+                             int engine_id)
+    : ViEFrameProviderBase(Id, engine_id),
+      play_back_started_(false),
+      feedback_cs_(NULL),
+      audio_cs_(NULL),
+      file_player_(NULL),
+      audio_stream_(false),
+      video_clients_(0),
+      audio_clients_(0),
+      local_audio_channel_(-1),
+      observer_(NULL),
+      voe_file_interface_(NULL),
+      voe_video_sync_(NULL),
+      decode_thread_(NULL),
+      decode_event_(NULL),
+      decoded_audio_length_(0) {
+  memset(file_name_, 0, FileWrapper::kMaxFileNameSize);
+  memset(decoded_audio_, 0, kMaxDecodedAudioLength);
+}
+
+ViEFilePlayer::~ViEFilePlayer() {
+  // StopPlay deletes decode_thread_.
+  StopPlay();
+  delete decode_event_;
+  delete audio_cs_;
+  delete feedback_cs_;
+}
+
+int ViEFilePlayer::Init(const char* file_nameUTF8,
+                        const bool loop,
+                        const FileFormats file_format,
+                        VoiceEngine* voice_engine) {
+  feedback_cs_ = CriticalSectionWrapper::CreateCriticalSection();
+  if (!feedback_cs_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate critsect");
+    return -1;
+  }
+
+  audio_cs_ = CriticalSectionWrapper::CreateCriticalSection();
+  if (!audio_cs_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate critsect");
+    return -1;
+  }
+
+  decode_event_ = EventWrapper::Create();
+  if (!decode_event_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to allocate event");
+    return -1;
+  }
+  if (strlen(file_nameUTF8) > FileWrapper::kMaxFileNameSize) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() Too long filename");
+    return -1;
+  }
+  strncpy(file_name_, file_nameUTF8, strlen(file_nameUTF8) + 1);
+
+  file_player_ = FilePlayer::CreateFilePlayer(ViEId(engine_id_, id_),
+                                              file_format);
+  if (!file_player_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to create file player");
+    return -1;
+  }
+  if (file_player_->RegisterModuleFileCallback(this) == -1) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to "
+                 "RegisterModuleFileCallback");
+    file_player_ = NULL;
+    return -1;
+  }
+  decode_thread_ = ThreadWrapper::CreateThread(FilePlayDecodeThreadFunction,
+                                               this, kHighestPriority,
+                                               "ViEFilePlayThread");
+  if (!decode_thread_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StartPlay() failed to start decode thread.");
+    file_player_ = NULL;
+    return -1;
+  }
+
+  // Always try to open with Audio since we don't know on what channels the
+  // audio should be played on.
+  WebRtc_Word32 error = file_player_->StartPlayingVideoFile(file_name_, loop,
+                                                            false);
+  if (error) {
+    // Failed to open the file with audio, try without.
+    error = file_player_->StartPlayingVideoFile(file_name_, loop, true);
+    audio_stream_ = false;
+    if (error) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                   "ViEFilePlayer::StartPlay() failed to Start play video "
+                   "file");
+      return -1;
+    }
+
+  } else {
+    audio_stream_ = true;
+  }
+
+  if (audio_stream_) {
+    if (voice_engine) {
+      // A VoiceEngine has been provided and we want to play audio on local
+      // a channel.
+      voe_file_interface_ = VoEFile::GetInterface(voice_engine);
+      if (!voe_file_interface_) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::StartPlay() failed to get VEFile "
+                     "interface");
+        return -1;
+      }
+      voe_video_sync_ = VoEVideoSync::GetInterface(voice_engine);
+      if (!voe_video_sync_) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo,
+                     ViEId(engine_id_, id_),
+                     "ViEFilePlayer::StartPlay() failed to get "
+                     "VoEVideoSync interface");
+        return -1;
+      }
+    }
+  }
+
+  // Read audio /(or just video) every 10ms.
+  decode_event_->StartTimer(true, 10);
+  return 0;
+}
+
+int ViEFilePlayer::FrameCallbackChanged() {
+  // Starts the decode thread when someone cares.
+  if (ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() >
+      video_clients_) {
+    if (!play_back_started_) {
+      play_back_started_ = true;
+      unsigned int thread_id;
+      if (decode_thread_->Start(thread_id)) {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged() Started file decode"
+                     " thread %u", thread_id);
+      } else {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged() Failed to start "
+                     "file decode thread.");
+      }
+    } else if (!file_player_->IsPlayingFile()) {
+      if (file_player_->StartPlayingVideoFile(file_name_, false,
+                                              !audio_stream_) != 0) {
+        WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                     "ViEFilePlayer::FrameCallbackChanged(), Failed to restart "
+                     "the file player.");
+      }
+    }
+  }
+  video_clients_ = ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks();
+  return 0;
+}
+
+bool ViEFilePlayer::FilePlayDecodeThreadFunction(void* obj) {
+  return static_cast<ViEFilePlayer*>(obj)->FilePlayDecodeProcess();
+}
+
+bool ViEFilePlayer::FilePlayDecodeProcess() {
+  if (decode_event_->Wait(kThreadWaitTimeMs) == kEventSignaled) {
+    if (audio_stream_ && audio_clients_ == 0) {
+      // There is audio but no one cares, read the audio here.
+      Read(NULL, 0);
+    }
+    if (file_player_->TimeUntilNextVideoFrame() < 10) {
+      // Less than 10ms to next videoframe.
+      if (file_player_->GetVideoFromFile(decoded_video_) != 0) {
+      }
+    }
+    if (decoded_video_.Length() > 0) {
+      if (local_audio_channel_ != -1 && voe_video_sync_) {
+        // We are playing audio locally.
+        int audio_delay = 0;
+        if (voe_video_sync_->GetPlayoutBufferSize(audio_delay) == 0) {
+          decoded_video_.SetRenderTime(decoded_video_.RenderTimeMs() +
+                                       audio_delay);
+        }
+      }
+      DeliverFrame(&decoded_video_);
+      decoded_video_.SetLength(0);
+    }
+  }
+  return true;
+}
+
+int ViEFilePlayer::StopPlay() {
+  // Only called from destructor.
+  if (decode_thread_) {
+    decode_thread_->SetNotAlive();
+    if (decode_thread_->Stop()) {
+      delete decode_thread_;
+    } else {
+      assert(false && "ViEFilePlayer::StopPlay() Failed to stop decode thread");
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                   "ViEFilePlayer::StartPlay() Failed to stop file decode "
+                   "thread.");
+    }
+  }
+  decode_thread_ = NULL;
+  if (decode_event_) {
+    decode_event_->StopTimer();
+  }
+  StopPlayAudio();
+
+  if (voe_file_interface_) {
+    voe_file_interface_->Release();
+    voe_file_interface_ = NULL;
+  }
+  if (voe_video_sync_) {
+    voe_video_sync_->Release();
+    voe_video_sync_ = NULL;
+  }
+
+  if (file_player_) {
+    file_player_->StopPlayingFile();
+    FilePlayer::DestroyFilePlayer(file_player_);
+    file_player_ = NULL;
+  }
+  return 0;
+}
+
+int ViEFilePlayer::StopPlayAudio() {
+  // Stop sending audio.
+
+  std::set<int>::iterator it = audio_channels_sending_.begin();
+  while (it != audio_channels_sending_.end()) {
+    StopSendAudioOnChannel(*it);
+    // StopSendAudioOnChannel erases the item from the map.
+    it = audio_channels_sending_.begin();
+  }
+
+  // Stop local audio playback.
+  if (local_audio_channel_ != -1) {
+    StopPlayAudioLocally(local_audio_channel_);
+  }
+  local_audio_channel_ = -1;
+  audio_channel_buffers_.clear();
+  audio_clients_ = 0;
+  return 0;
+}
+
+int ViEFilePlayer::Read(void* buf, int len) {
+  // Protect from simultaneous reading from multiple channels.
+  CriticalSectionScoped lock(*audio_cs_);
+  if (NeedsAudioFromFile(buf)) {
+    // We will run the VoE in 16KHz.
+    if (file_player_->Get10msAudioFromFile(decoded_audio_,
+                                           decoded_audio_length_, 16000) != 0) {
+      // No data.
+      decoded_audio_length_ = 0;
+      return 0;
+    }
+    // 2 bytes per sample.
+    decoded_audio_length_ *= 2;
+    if (buf) {
+      audio_channel_buffers_.push_back(buf);
+    }
+  } else {
+    // No need for new audiobuffer from file, ie the buffer read from file has
+    // not been played on this channel.
+  }
+  if (buf) {
+    memcpy(buf, decoded_audio_, decoded_audio_length_);
+  }
+  return decoded_audio_length_;
+}
+
+bool ViEFilePlayer::NeedsAudioFromFile(void* buf) {
+  bool needs_new_audio = false;
+  if (audio_channel_buffers_.size() == 0) {
+    return true;
+  }
+
+  // Check if we the buf already have read the current audio.
+  for (std::list<void*>::iterator it = audio_channel_buffers_.begin();
+       it != audio_channel_buffers_.end(); ++it) {
+    if (*it == buf) {
+      needs_new_audio = true;
+      audio_channel_buffers_.erase(it);
+      break;
+    }
+  }
+  return needs_new_audio;
+}
+
+void ViEFilePlayer::PlayFileEnded(const WebRtc_Word32 id) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id),
+               "%s: file_id %d", __FUNCTION__, id_);
+  file_player_->StopPlayingFile();
+
+  CriticalSectionScoped lock(*feedback_cs_);
+  if (observer_) {
+    observer_->PlayFileEnded(id_);
+  }
+}
+
+bool ViEFilePlayer::IsObserverRegistered() {
+  CriticalSectionScoped lock(*feedback_cs_);
+  return observer_ != NULL;
+}
+
+int ViEFilePlayer::RegisterObserver(ViEFileObserver* observer) {
+  CriticalSectionScoped lock(*feedback_cs_);
+  if (observer_) {
+    return -1;
+  }
+  observer_ = observer;
+  return 0;
+}
+
+int ViEFilePlayer::DeRegisterObserver() {
+  CriticalSectionScoped lock(*feedback_cs_);
+  observer_ = NULL;
+  return 0;
+}
+
+int ViEFilePlayer::SendAudioOnChannel(const int audio_channel,
+                                      bool mix_microphone,
+                                      float volume_scaling) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StartPlayingFileAsMicrophone(audio_channel, this,
+                                                       mix_microphone,
+                                                       kFileFormatPcm16kHzFile,
+                                                       volume_scaling) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::SendAudioOnChannel() "
+                 "VE_StartPlayingFileAsMicrophone failed. audio_channel %d, "
+                 " mix_microphone %d, volume_scaling %.2f",
+                 audio_channel, mix_microphone, volume_scaling);
+    return -1;
+  }
+  audio_channels_sending_.insert(audio_channel);
+
+  CriticalSectionScoped lock(*audio_cs_);
+  audio_clients_++;
+  return 0;
+}
+
+int ViEFilePlayer::StopSendAudioOnChannel(const int audio_channel) {
+  int result = 0;
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel() - no VoE interface");
+    return -1;
+  }
+  std::set<int>::iterator it = audio_channels_sending_.find(audio_channel);
+  if (it == audio_channels_sending_.end()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel AudioChannel %d not "
+                 "sending", audio_channel);
+    return -1;
+  }
+  result = voe_file_interface_->StopPlayingFileAsMicrophone(audio_channel);
+  if (result != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "ViEFilePlayer::StopSendAudioOnChannel() "
+                 "VE_StopPlayingFileAsMicrophone failed. audio_channel %d",
+                 audio_channel);
+  }
+  audio_channels_sending_.erase(audio_channel);
+  CriticalSectionScoped lock(*audio_cs_);
+  audio_clients_--;
+  assert(audio_clients_ >= 0);
+  return 0;
+}
+
+int ViEFilePlayer::PlayAudioLocally(const int audio_channel,
+                                    float volume_scaling) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StartPlayingFileLocally(audio_channel, this,
+                                                   kFileFormatPcm16kHzFile,
+                                                   volume_scaling) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s  VE_StartPlayingFileAsMicrophone failed. audio_channel %d,"
+                 " mix_microphone %d, volume_scaling %.2f",
+                 __FUNCTION__, audio_channel, volume_scaling);
+    return -1;
+  }
+
+  CriticalSectionScoped lock(*audio_cs_);
+  local_audio_channel_ = audio_channel;
+  audio_clients_++;
+  return 0;
+}
+
+int ViEFilePlayer::StopPlayAudioLocally(const int audio_channel) {
+  if (!voe_file_interface_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s No VEFile interface.", __FUNCTION__);
+    return -1;
+  }
+  if (voe_file_interface_->StopPlayingFileLocally(audio_channel) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s VE_StopPlayingFileLocally failed. audio_channel %d.",
+                 __FUNCTION__, audio_channel);
+    return -1;
+  }
+
+  CriticalSectionScoped lock(*audio_cs_);
+  local_audio_channel_ = -1;
+  audio_clients_--;
+  return 0;
+}
+
+int ViEFilePlayer::GetFileInformation(int engine_id,
+                                      const char* file_name,
+                                      VideoCodec& video_codec,
+                                      CodecInst& audio_codec,
+                                      const FileFormats file_format) {
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, engine_id, "%s ", __FUNCTION__);
+
+  FilePlayer* file_player = FilePlayer::CreateFilePlayer(engine_id,
+                                                         file_format);
+  if (!file_player) {
+    return -1;
+  }
+
+  bool video_only = false;
+
+  memset(&video_codec, 0, sizeof(video_codec));
+  memset(&audio_codec, 0, sizeof(audio_codec));
+
+  if (file_player->StartPlayingVideoFile(file_name, false, false) != 0) {
+    video_only = true;
+    if (file_player->StartPlayingVideoFile(file_name, false, true) != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                   "%s Failed to open file.", __FUNCTION__);
+      FilePlayer::DestroyFilePlayer(file_player);
+      return -1;
+    }
+  }
+
+  if (!video_only && file_player->AudioCodec(audio_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s Failed to get audio codec.", __FUNCTION__);
+    FilePlayer::DestroyFilePlayer(file_player);
+    return -1;
+  }
+  if (file_player->video_codec_info(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, engine_id,
+                 "%s Failed to get video codec.", __FUNCTION__);
+    FilePlayer::DestroyFilePlayer(file_player);
+    return -1;
+  }
+  FilePlayer::DestroyFilePlayer(file_player);
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_file_player.h b/src/video_engine/vie_file_player.h
new file mode 100644
index 0000000..15ac35a
--- /dev/null
+++ b/src/video_engine/vie_file_player.h
@@ -0,0 +1,138 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
+
+#include <list>
+#include <set>
+
+#include "common_types.h"  // NOLINT
+#include "modules/media_file/interface/media_file_defines.h"
+#include "system_wrappers/interface/file_wrapper.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class EventWrapper;
+class FilePlayer;
+class ThreadWrapper;
+class ViEFileObserver;
+class VoEFile;
+class VoEVideoSync;
+class VoiceEngine;
+
+class ViEFilePlayer
+    : public ViEFrameProviderBase,
+      protected FileCallback,
+      protected InStream {
+ public:
+  static ViEFilePlayer* CreateViEFilePlayer(int file_id,
+                                            int engine_id,
+                                            const char* file_nameUTF8,
+                                            const bool loop,
+                                            const FileFormats file_format,
+                                            VoiceEngine* voe_ptr);
+
+  static int GetFileInformation(const int engine_id,
+                                const char* file_name,
+                                VideoCodec& video_codec,
+                                CodecInst& audio_codec,
+                                const FileFormats file_format);
+  ~ViEFilePlayer();
+
+  bool IsObserverRegistered();
+  int RegisterObserver(ViEFileObserver* observer);
+  int DeRegisterObserver();
+  int SendAudioOnChannel(const int audio_channel,
+                         bool mix_microphone,
+                         float volume_scaling);
+  int StopSendAudioOnChannel(const int audio_channel);
+  int PlayAudioLocally(const int audio_channel, float volume_scaling);
+  int StopPlayAudioLocally(const int audio_channel);
+
+  // Implements ViEFrameProviderBase.
+  virtual int FrameCallbackChanged();
+
+ protected:
+  ViEFilePlayer(int Id, int engine_id);
+  int Init(const char* file_nameUTF8,
+           const bool loop,
+           const FileFormats file_format,
+           VoiceEngine* voe_ptr);
+  int StopPlay();
+  int StopPlayAudio();
+
+  // File play decode function.
+  static bool FilePlayDecodeThreadFunction(void* obj);
+  bool FilePlayDecodeProcess();
+  bool NeedsAudioFromFile(void* buf);
+
+  // Implements webrtc::InStream.
+  virtual int Read(void* buf, int len);
+  virtual int Rewind() {
+    return 0;
+  }
+
+  // Implements FileCallback.
+  virtual void PlayNotification(const WebRtc_Word32 /*id*/,
+                                const WebRtc_UWord32 /*notification_ms*/) {}
+  virtual void RecordNotification(const WebRtc_Word32 /*id*/,
+                                  const WebRtc_UWord32 /*notification_ms*/) {}
+  virtual void PlayFileEnded(const WebRtc_Word32 id);
+  virtual void RecordFileEnded(const WebRtc_Word32 /*id*/) {}
+
+ private:
+  static const int kMaxDecodedAudioLength = 320;
+  bool play_back_started_;
+
+  CriticalSectionWrapper* feedback_cs_;
+  CriticalSectionWrapper* audio_cs_;
+
+  FilePlayer* file_player_;
+  bool audio_stream_;
+
+  // Number of active video clients.
+  int video_clients_;
+
+  // Number of audio channels sending this audio.
+  int audio_clients_;
+
+  // Local audio channel playing this video. Sync video against this.
+  int local_audio_channel_;
+
+  ViEFileObserver* observer_;
+  char file_name_[FileWrapper::kMaxFileNameSize];
+
+  // VoE Interface.
+  VoEFile* voe_file_interface_;
+  VoEVideoSync* voe_video_sync_;
+
+  // Thread for decoding video (and audio if no audio clients connected).
+  ThreadWrapper* decode_thread_;
+  EventWrapper* decode_event_;
+  WebRtc_Word16 decoded_audio_[kMaxDecodedAudioLength];
+  int decoded_audio_length_;
+
+  // Trick - list containing VoE buffer reading this file. Used if multiple
+  // audio channels are sending.
+  std::list<void*> audio_channel_buffers_;
+
+  // AudioChannels sending audio from this file.
+  std::set<int> audio_channels_sending_;
+
+  // Frame receiving decoded video from file.
+  VideoFrame decoded_video_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_PLAYER_H_
diff --git a/src/video_engine/vie_file_recorder.cc b/src/video_engine/vie_file_recorder.cc
new file mode 100644
index 0000000..6f377c4
--- /dev/null
+++ b/src/video_engine/vie_file_recorder.cc
@@ -0,0 +1,239 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_file_recorder.h"
+
+#include "modules/utility/interface/file_player.h"
+#include "modules/utility/interface/file_recorder.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+ViEFileRecorder::ViEFileRecorder(int instanceID)
+    : recorder_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      file_recorder_(NULL),
+      is_first_frame_recorded_(false),
+      is_out_stream_started_(false),
+      instance_id_(instanceID),
+      frame_delay_(0),
+      audio_channel_(-1),
+      audio_source_(NO_AUDIO),
+      voe_file_interface_(NULL) {
+}
+
+ViEFileRecorder::~ViEFileRecorder() {
+  StopRecording();
+  delete recorder_cs_;
+}
+
+int ViEFileRecorder::StartRecording(const char* file_nameUTF8,
+                                    const VideoCodec& codec_inst,
+                                    AudioSource audio_source,
+                                    int audio_channel,
+                                    const CodecInst& audio_codec_inst,
+                                    VoiceEngine* voe_ptr,
+                                    const FileFormats file_format) {
+  CriticalSectionScoped lock(*recorder_cs_);
+
+  if (file_recorder_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() - already recording.");
+    return -1;
+  }
+  file_recorder_ = FileRecorder::CreateFileRecorder(instance_id_, file_format);
+  if (!file_recorder_) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() failed to create recoder.");
+    return -1;
+  }
+
+  int error = file_recorder_->StartRecordingVideoFile(file_nameUTF8,
+                                                      audio_codec_inst,
+                                                      codec_inst,
+                                                      AMRFileStorage,
+                                                      audio_source == NO_AUDIO);
+  if (error) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                 "ViEFileRecorder::StartRecording() failed to "
+                 "StartRecordingVideoFile.");
+    FileRecorder::DestroyFileRecorder(file_recorder_);
+    file_recorder_ = NULL;
+    return -1;
+  }
+
+  audio_source_ = audio_source;
+  if (voe_ptr && audio_source != NO_AUDIO) {
+    // VoE interface has been provided and we want to record audio.
+    voe_file_interface_ = VoEFile::GetInterface(voe_ptr);
+    if (!voe_file_interface_) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StartRecording() failed to get VEFile "
+                   "interface");
+      return -1;
+    }
+
+    // Always L16.
+    CodecInst engine_audio_codec_inst = {96, "L16", audio_codec_inst.plfreq,
+                                         audio_codec_inst.plfreq / 100, 1,
+                                         audio_codec_inst.plfreq * 16 };
+
+    switch (audio_source) {
+      // case NO_AUDIO is checked above.
+      case MICROPHONE:
+        error = voe_file_interface_->StartRecordingMicrophone(
+            this, &engine_audio_codec_inst);
+        break;
+      case PLAYOUT:
+        error = voe_file_interface_->StartRecordingPlayout(
+            audio_channel, this, &engine_audio_codec_inst);
+        break;
+      default:
+        assert(false && "Unknown audio_source");
+    }
+    if (error != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StartRecording() failed to start recording"
+                   " audio");
+      FileRecorder::DestroyFileRecorder(file_recorder_);
+      file_recorder_ = NULL;
+      return -1;
+    }
+    is_out_stream_started_ = true;
+    audio_channel_ = audio_channel;
+  }
+  is_first_frame_recorded_ = false;
+  return 0;
+}
+
+int ViEFileRecorder::StopRecording() {
+  int error = 0;
+  // We can not hold the ptr_cs_ while accessing VoE functions. It might cause
+  // deadlock in Write.
+  if (voe_file_interface_) {
+    switch (audio_source_) {
+      case MICROPHONE:
+        error = voe_file_interface_->StopRecordingMicrophone();
+        break;
+      case PLAYOUT:
+        error = voe_file_interface_->StopRecordingPlayout(audio_channel_);
+        break;
+      case NO_AUDIO:
+        break;
+      default:
+        assert(false && "Unknown audio_source");
+    }
+    if (error != 0) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, instance_id_,
+                   "ViEFileRecorder::StopRecording() failed to stop recording "
+                   "audio");
+    }
+  }
+  CriticalSectionScoped lock(*recorder_cs_);
+  if (voe_file_interface_) {
+    voe_file_interface_->Release();
+    voe_file_interface_ = NULL;
+  }
+
+  if (file_recorder_) {
+    if (file_recorder_->IsRecording()) {
+      int error = file_recorder_->StopRecording();
+      if (error) {
+        return -1;
+      }
+    }
+    FileRecorder::DestroyFileRecorder(file_recorder_);
+    file_recorder_ = NULL;
+  }
+  is_first_frame_recorded_ = false;
+  is_out_stream_started_ = false;
+  return 0;
+}
+
+void ViEFileRecorder::SetFrameDelay(int frame_delay) {
+  CriticalSectionScoped lock(*recorder_cs_);
+  frame_delay_ = frame_delay;
+}
+
+bool ViEFileRecorder::RecordingStarted() {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return file_recorder_ && file_recorder_->IsRecording();
+}
+
+bool ViEFileRecorder::FirstFrameRecorded() {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return is_first_frame_recorded_;
+}
+
+bool ViEFileRecorder::IsRecordingFileFormat(const FileFormats file_format) {
+  CriticalSectionScoped lock(*recorder_cs_);
+  return (file_recorder_->RecordingFileFormat() == file_format) ? true : false;
+}
+
+void ViEFileRecorder::RecordVideoFrame(const VideoFrame& video_frame) {
+  CriticalSectionScoped lock(*recorder_cs_);
+
+  if (file_recorder_ && file_recorder_->IsRecording()) {
+    if (!IsRecordingFileFormat(kFileFormatAviFile))
+      return;
+
+    // Compensate for frame delay in order to get audio/video sync when
+    // recording local video.
+    const WebRtc_UWord32 time_stamp = video_frame.TimeStamp();
+    const WebRtc_Word64 render_time_stamp = video_frame.RenderTimeMs();
+    VideoFrame& unconst_video_frame = const_cast<VideoFrame&>(video_frame);
+    unconst_video_frame.SetTimeStamp(time_stamp - 90 * frame_delay_);
+    unconst_video_frame.SetRenderTime(render_time_stamp - frame_delay_);
+
+    file_recorder_->RecordVideoToFile(unconst_video_frame);
+
+    unconst_video_frame.SetRenderTime(render_time_stamp);
+    unconst_video_frame.SetTimeStamp(time_stamp);
+  }
+}
+
+bool ViEFileRecorder::Write(const void* buf, int len) {
+  if (!is_out_stream_started_)
+    return true;
+
+  // Always 10 ms L16 from VoE.
+  if (len % (2 * 80)) {
+    // Not 2 bytes 80 samples.
+    WEBRTC_TRACE(kTraceError, kTraceVideo, audio_channel_,
+                 "Audio length not supported: %d.", len);
+    return true;
+  }
+
+  AudioFrame audio_frame;
+  WebRtc_UWord16 length_in_samples = len / 2;
+  audio_frame.UpdateFrame(audio_channel_, 0,
+                          static_cast<const WebRtc_Word16*>(buf),
+                          length_in_samples, length_in_samples * 100,
+                          AudioFrame::kUndefined,
+                          AudioFrame::kVadUnknown);
+
+  CriticalSectionScoped lock(*recorder_cs_);
+  if (file_recorder_ && file_recorder_->IsRecording()) {
+    TickTime tick_time = TickTime::Now();
+    file_recorder_->RecordAudioToFile(audio_frame, &tick_time);
+  }
+
+  // Always return true to continue recording.
+  return true;
+}
+
+int ViEFileRecorder::Rewind() {
+  // Not supported!
+  return -1;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_file_recorder.h b/src/video_engine/vie_file_recorder.h
new file mode 100644
index 0000000..cc964fa
--- /dev/null
+++ b/src/video_engine/vie_file_recorder.h
@@ -0,0 +1,65 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
+
+#include "modules/utility/interface/file_recorder.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_file.h"
+#include "voice_engine/include/voe_file.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ViEFileRecorder : protected OutStream {
+ public:
+  explicit ViEFileRecorder(int channel_id);
+  ~ViEFileRecorder();
+
+  int StartRecording(const char* file_nameUTF8,
+                     const VideoCodec& codec_inst,
+                     AudioSource audio_source, int audio_channel,
+                     const CodecInst& audio_codec_inst,
+                     VoiceEngine* voe_ptr,
+                     const FileFormats file_format = kFileFormatAviFile);
+  int StopRecording();
+
+  void SetFrameDelay(int frame_delay);
+  bool RecordingStarted();
+
+  // Records incoming decoded video frame to file.
+  void RecordVideoFrame(const VideoFrame& video_frame);
+
+ protected:
+  bool FirstFrameRecorded();
+  bool IsRecordingFileFormat(const FileFormats file_format);
+
+  // Implements OutStream.
+  bool Write(const void* buf, int len);
+  int Rewind();
+
+ private:
+  CriticalSectionWrapper* recorder_cs_;
+
+  FileRecorder* file_recorder_;
+  bool is_first_frame_recorded_;
+  bool is_out_stream_started_;
+  int instance_id_;
+  int frame_delay_;
+  int audio_channel_;
+  AudioSource audio_source_;
+  VoEFile* voe_file_interface_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FILE_RECORDER_H_
diff --git a/src/video_engine/vie_frame_provider_base.cc b/src/video_engine/vie_frame_provider_base.cc
new file mode 100644
index 0000000..134f5c3
--- /dev/null
+++ b/src/video_engine/vie_frame_provider_base.cc
@@ -0,0 +1,196 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_frame_provider_base.h"
+
+#include <algorithm>
+
+#include "modules/interface/module_common_types.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+ViEFrameProviderBase::ViEFrameProviderBase(int Id, int engine_id)
+    : id_(Id),
+      engine_id_(engine_id),
+      provider_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      frame_delay_(0) {
+}
+
+ViEFrameProviderBase::~ViEFrameProviderBase() {
+  if (frame_callbacks_.size() > 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "FrameCallbacks still exist when Provider deleted %d",
+                 frame_callbacks_.size());
+  }
+
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    (*it)->ProviderDestroyed(id_);
+  }
+  frame_callbacks_.clear();
+}
+
+int ViEFrameProviderBase::Id() {
+  return id_;
+}
+
+void ViEFrameProviderBase::DeliverFrame(
+    VideoFrame* video_frame,
+    int num_csrcs,
+    const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+#ifdef DEBUG_
+  const TickTime start_process_time = TickTime::Now();
+#endif
+  CriticalSectionScoped cs(provider_cs_.get());
+
+  // Deliver the frame to all registered callbacks.
+  if (frame_callbacks_.size() > 0) {
+    if (frame_callbacks_.size() == 1) {
+      // We don't have to copy the frame.
+      frame_callbacks_.front()->DeliverFrame(id_, video_frame, num_csrcs, CSRC);
+    } else {
+      // Make a copy of the frame for all callbacks.callback
+      for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+           it != frame_callbacks_.end(); ++it) {
+        if (!extra_frame_.get()) {
+          extra_frame_.reset(new VideoFrame());
+        }
+        extra_frame_->CopyFrame(*video_frame);
+        (*it)->DeliverFrame(id_, extra_frame_.get(), num_csrcs, CSRC);
+      }
+    }
+  }
+#ifdef DEBUG_
+  const int process_time =
+      static_cast<int>((TickTime::Now() - start_process_time).Milliseconds());
+  if (process_time > 25) {
+    // Warn if the delivery time is too long.
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s Too long time: %ums", __FUNCTION__, process_time);
+  }
+#endif
+}
+
+void ViEFrameProviderBase::SetFrameDelay(int frame_delay) {
+  CriticalSectionScoped cs(provider_cs_.get());
+  frame_delay_ = frame_delay;
+
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    (*it)->DelayChanged(id_, frame_delay);
+  }
+}
+
+int ViEFrameProviderBase::FrameDelay() {
+  return frame_delay_;
+}
+
+int ViEFrameProviderBase::GetBestFormat(int* best_width,
+                                        int* best_height,
+                                        int* best_frame_rate) {
+  int largest_width = 0;
+  int largest_height = 0;
+  int highest_frame_rate = 0;
+
+  CriticalSectionScoped cs(provider_cs_.get());
+  for (FrameCallbacks::iterator it = frame_callbacks_.begin();
+       it != frame_callbacks_.end(); ++it) {
+    int prefered_width = 0;
+    int prefered_height = 0;
+    int prefered_frame_rate = 0;
+    if ((*it)->GetPreferedFrameSettings(&prefered_width, &prefered_height,
+                                        &prefered_frame_rate) == 0) {
+      if (prefered_width > largest_width) {
+        largest_width = prefered_width;
+      }
+      if (prefered_height > largest_height) {
+        largest_height = prefered_height;
+      }
+      if (prefered_frame_rate > highest_frame_rate) {
+        highest_frame_rate = prefered_frame_rate;
+      }
+    }
+  }
+  *best_width = largest_width;
+  *best_height = largest_height;
+  *best_frame_rate = highest_frame_rate;
+  return 0;
+}
+
+int ViEFrameProviderBase::RegisterFrameCallback(
+    int observer_id, ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
+               __FUNCTION__, callback_object);
+  {
+    CriticalSectionScoped cs(provider_cs_.get());
+    if (std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
+                  callback_object) != frame_callbacks_.end()) {
+      // This object is already registered.
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                   "%s 0x%p already registered", __FUNCTION__,
+                   callback_object);
+      assert(false && "frameObserver already registered");
+      return -1;
+    }
+    frame_callbacks_.push_back(callback_object);
+  }
+  // Report current capture delay.
+  callback_object->DelayChanged(id_, frame_delay_);
+
+  // Notify implementer of this class that the callback list have changed.
+  FrameCallbackChanged();
+  return 0;
+}
+
+int ViEFrameProviderBase::DeregisterFrameCallback(
+    const ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_), "%s(0x%p)",
+               __FUNCTION__, callback_object);
+  CriticalSectionScoped cs(provider_cs_.get());
+
+  FrameCallbacks::iterator it = std::find(frame_callbacks_.begin(),
+                                          frame_callbacks_.end(),
+                                          callback_object);
+  if (it == frame_callbacks_.end()) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, ViEId(engine_id_, id_),
+                 "%s 0x%p not found", __FUNCTION__, callback_object);
+    return -1;
+  }
+  frame_callbacks_.erase(it);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
+               "%s 0x%p deregistered", __FUNCTION__, callback_object);
+
+  // Notify implementer of this class that the callback list have changed.
+  FrameCallbackChanged();
+  return 0;
+}
+
+bool ViEFrameProviderBase::IsFrameCallbackRegistered(
+    const ViEFrameCallback* callback_object) {
+  assert(callback_object);
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, ViEId(engine_id_, id_),
+               "%s(0x%p)", __FUNCTION__, callback_object);
+
+  CriticalSectionScoped cs(provider_cs_.get());
+  return std::find(frame_callbacks_.begin(), frame_callbacks_.end(),
+                   callback_object) != frame_callbacks_.end();
+}
+
+int ViEFrameProviderBase::NumberOfRegisteredFrameCallbacks() {
+  CriticalSectionScoped cs(provider_cs_.get());
+  return frame_callbacks_.size();
+}
+}  // namespac webrtc
diff --git a/src/video_engine/vie_frame_provider_base.h b/src/video_engine/vie_frame_provider_base.h
new file mode 100644
index 0000000..2f75adb
--- /dev/null
+++ b/src/video_engine/vie_frame_provider_base.h
@@ -0,0 +1,102 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
+
+#include <vector>
+
+#include "common_types.h"  // NOLINT
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class VideoEncoder;
+class VideoFrame;
+
+// ViEFrameCallback shall be implemented by all classes receiving frames from a
+// frame provider.
+class ViEFrameCallback {
+ public:
+  virtual void DeliverFrame(int id,
+                            VideoFrame* video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL) = 0;
+
+  // The capture delay has changed from the provider. |frame_delay| is given in
+  // ms.
+  virtual void DelayChanged(int id, int frame_delay) = 0;
+
+  // Get the width, height and frame rate preferred by this observer.
+  virtual int GetPreferedFrameSettings(int* width,
+                                       int* height,
+                                       int* frame_rate) = 0;
+
+  // ProviderDestroyed is called when the frame is about to be destroyed. There
+  // must not be any more calls to the frame provider after this.
+  virtual void ProviderDestroyed(int id) = 0;
+
+  virtual ~ViEFrameCallback() {}
+};
+
+// ViEFrameProviderBase is a base class that will deliver frames to all
+// registered ViEFrameCallbacks.
+class ViEFrameProviderBase {
+ public:
+  ViEFrameProviderBase(int Id, int engine_id);
+  virtual ~ViEFrameProviderBase();
+
+  // Returns the frame provider id.
+  int Id();
+
+  // Register frame callbacks, i.e. a receiver of the captured frame.
+  virtual int RegisterFrameCallback(int observer_id,
+                                    ViEFrameCallback* callback_object);
+
+  virtual int DeregisterFrameCallback(const ViEFrameCallback* callback_object);
+
+  virtual bool IsFrameCallbackRegistered(
+      const ViEFrameCallback* callback_object);
+
+  int NumberOfRegisteredFrameCallbacks();
+
+  // FrameCallbackChanged
+  // Inherited classes should check for new frame_settings and reconfigure
+  // output if possible.
+  virtual int FrameCallbackChanged() = 0;
+
+ protected:
+  void DeliverFrame(VideoFrame* video_frame,
+                    int num_csrcs = 0,
+                    const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  void SetFrameDelay(int frame_delay);
+  int FrameDelay();
+  int GetBestFormat(int* best_width,
+                    int* best_height,
+                    int* best_frame_rate);
+
+  int id_;
+  int engine_id_;
+
+  // Frame callbacks.
+  typedef std::vector<ViEFrameCallback*> FrameCallbacks;
+  FrameCallbacks frame_callbacks_;
+  scoped_ptr<CriticalSectionWrapper> provider_cs_;
+
+ private:
+  scoped_ptr<VideoFrame> extra_frame_;
+  int frame_delay_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_FRAME_PROVIDER_BASE_H_
diff --git a/src/video_engine/vie_image_process_impl.cc b/src/video_engine/vie_image_process_impl.cc
new file mode 100644
index 0000000..d9dfcd4
--- /dev/null
+++ b/src/video_engine/vie_image_process_impl.cc
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_image_process_impl.h"
+
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViEImageProcess* ViEImageProcess::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViEImageProcessImpl* vie_image_process_impl = vie_impl;
+  // Increase ref count.
+  (*vie_image_process_impl)++;
+  return vie_image_process_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViEImageProcessImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcess::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViEImageProcess release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcess reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViEImageProcessImpl::ViEImageProcessImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcessImpl::ViEImageProcessImpl() Ctor");
+}
+
+ViEImageProcessImpl::~ViEImageProcessImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViEImageProcessImpl::~ViEImageProcessImpl() Dtor");
+}
+
+int ViEImageProcessImpl::RegisterCaptureEffectFilter(
+  const int capture_id,
+  ViEEffectFilter& capture_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+  if (vie_capture->RegisterEffectFilter(&capture_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterCaptureEffectFilter(const int capture_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+  if (vie_capture->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::RegisterSendEffectFilter(
+    const int video_channel,
+    ViEEffectFilter& send_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (vie_encoder == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_encoder->RegisterEffectFilter(&send_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterSendEffectFilter(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (vie_encoder == NULL) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_encoder->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::RegisterRenderEffectFilter(
+  const int video_channel,
+  ViEEffectFilter& render_filter) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterEffectFilter(&render_filter) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterExists);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::DeregisterRenderEffectFilter(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->RegisterEffectFilter(NULL) != 0) {
+    shared_data_->SetLastError(kViEImageProcessFilterDoesNotExist);
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableDeflickering(const int capture_id,
+                                            const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
+               enable);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_capture->EnableDeflickering(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableDenoising(const int capture_id,
+                                         const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(capture_id: %d, enable: %d)", __FUNCTION__, capture_id,
+               enable);
+
+  ViEInputManagerScoped is(*(shared_data_->input_manager()));
+  ViECapturer* vie_capture = is.Capture(capture_id);
+  if (!vie_capture) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Capture device %d doesn't exist", __FUNCTION__,
+                 capture_id);
+    shared_data_->SetLastError(kViEImageProcessInvalidCaptureId);
+    return -1;
+  }
+
+  if (vie_capture->EnableDenoising(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+int ViEImageProcessImpl::EnableColorEnhancement(const int video_channel,
+                                                const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(video_channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViEImageProcessInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableColorEnhancement(enable) != 0) {
+    if (enable) {
+      shared_data_->SetLastError(kViEImageProcessAlreadyEnabled);
+    } else {
+      shared_data_->SetLastError(kViEImageProcessAlreadyDisabled);
+    }
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_image_process_impl.h b/src/video_engine/vie_image_process_impl.h
new file mode 100644
index 0000000..4a8c5f9
--- /dev/null
+++ b/src/video_engine/vie_image_process_impl.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
+
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_image_process.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViEImageProcessImpl
+    : public ViEImageProcess,
+      public ViERefCount {
+ public:
+  // Implements ViEImageProcess.
+  virtual int Release();
+  virtual int RegisterCaptureEffectFilter(const int capture_id,
+                                          ViEEffectFilter& capture_filter);
+  virtual int DeregisterCaptureEffectFilter(const int capture_id);
+  virtual int RegisterSendEffectFilter(const int video_channel,
+                                       ViEEffectFilter& send_filter);
+  virtual int DeregisterSendEffectFilter(const int video_channel);
+  virtual int RegisterRenderEffectFilter(const int video_channel,
+                                         ViEEffectFilter& render_filter);
+  virtual int DeregisterRenderEffectFilter(const int video_channel);
+  virtual int EnableDeflickering(const int capture_id, const bool enable);
+  virtual int EnableDenoising(const int capture_id, const bool enable);
+  virtual int EnableColorEnhancement(const int video_channel,
+                                     const bool enable);
+
+ protected:
+  explicit ViEImageProcessImpl(ViESharedData* shared_data);
+  virtual ~ViEImageProcessImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_IMAGE_PROCESS_IMPL_H_
diff --git a/src/video_engine/vie_impl.cc b/src/video_engine/vie_impl.cc
new file mode 100644
index 0000000..8805cd6
--- /dev/null
+++ b/src/video_engine/vie_impl.cc
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_impl.h"
+
+#if (defined(WIN32_) || defined(WIN64_))
+#include <Windows.h>  // For LoadLibrary.
+#include <tchar.h>    // For T_.
+#endif
+
+#include "system_wrappers/interface/trace.h"
+
+#ifdef WEBRTC_ANDROID
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_render/main/interface/video_render.h"
+#endif
+
+// Global counter to get an id for each new ViE instance.
+static WebRtc_Word32 g_vie_active_instance_counter = 0;
+
+namespace webrtc {
+
+// extern "C" ensures that GetProcAddress() can find the function address.
+extern "C" {
+  VideoEngine* GetVideoEngine() {
+    VideoEngineImpl* self = new VideoEngineImpl();
+    if (!self) {
+      return NULL;
+    }
+    g_vie_active_instance_counter++;
+    VideoEngine* vie = reinterpret_cast<VideoEngine*>(self);
+    return vie;
+  }
+}
+
+VideoEngine* VideoEngine::Create() {
+#if (defined(WIN32_) || defined(WIN64_))
+  // Load a debug dll, if there is one.
+  HMODULE hmod_ = LoadLibrary(TEXT("VideoEngineTestingDLL.dll"));
+  if (hmod_) {
+    typedef VideoEngine* (*PFNGetVideoEngineLib)(void);
+    PFNGetVideoEngineLib pfn =
+      (PFNGetVideoEngineLib)GetProcAddress(hmod_, "GetVideoEngine");
+    if (pfn) {
+      VideoEngine* self = pfn();
+      return self;
+    } else {
+      assert(false && "Failed to open test dll VideoEngineTestingDLL.dll");
+      return NULL;
+    }
+  }
+#endif
+
+  return GetVideoEngine();
+}
+
+bool VideoEngine::Delete(VideoEngine*& video_engine) {
+  if (!video_engine) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "VideoEngine::Delete - No argument");
+    return false;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "VideoEngine::Delete(vie = 0x%p)", video_engine);
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+
+  // Check all reference counters.
+  ViEBaseImpl* vie_base = vie_impl;
+  if (vie_base->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEBase ref count: %d", vie_base->GetCount());
+    return false;
+  }
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+  ViECaptureImpl* vie_capture = vie_impl;
+  if (vie_capture->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViECapture ref count: %d", vie_capture->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+  ViECodecImpl* vie_codec = vie_impl;
+  if (vie_codec->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViECodec ref count: %d", vie_codec->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+  ViEEncryptionImpl* vie_encryption = vie_impl;
+  if (vie_encryption->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEEncryption ref count: %d", vie_encryption->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+  ViEExternalCodecImpl* vie_external_codec = vie_impl;
+  if (vie_external_codec->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEEncryption ref count: %d", vie_encryption->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+  ViEFileImpl* vie_file = vie_impl;
+  if (vie_file->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEFile ref count: %d", vie_file->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+  ViEImageProcessImpl* vie_image_process = vie_impl;
+  if (vie_image_process->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViEImageProcess ref count: %d",
+                 vie_image_process->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+  ViENetworkImpl* vie_network = vie_impl;
+  if (vie_network->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViENetwork ref count: %d", vie_network->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+  ViERenderImpl* vie_render = vie_impl;
+  if (vie_render->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViERender ref count: %d", vie_render->GetCount());
+    return false;
+  }
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+  ViERTP_RTCPImpl* vie_rtp_rtcp = vie_impl;
+  if (vie_rtp_rtcp->GetCount() > 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "ViERTP_RTCP ref count: %d", vie_rtp_rtcp->GetCount());
+    return false;
+  }
+#endif
+
+  delete vie_impl;
+  vie_impl = NULL;
+  video_engine = NULL;
+
+  // Decrease the number of instances.
+  g_vie_active_instance_counter--;
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, g_vie_active_instance_counter,
+               "%s: instance deleted. Remaining instances: %d", __FUNCTION__,
+               g_vie_active_instance_counter);
+  return true;
+}
+
+int VideoEngine::SetTraceFile(const char* file_nameUTF8,
+                              const bool add_file_counter) {
+  if (!file_nameUTF8) {
+    return -1;
+  }
+  if (Trace::SetTraceFile(file_nameUTF8, add_file_counter) == -1) {
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceFileName(file_nameUTF8 = %s, add_file_counter = %d",
+               file_nameUTF8, add_file_counter);
+  return 0;
+}
+
+int VideoEngine::SetTraceFilter(const unsigned int filter) {
+  WebRtc_UWord32 old_filter = 0;
+  Trace::LevelFilter(old_filter);
+
+  if (filter == kTraceNone && old_filter != kTraceNone) {
+    // Do the logging before turning it off.
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+                 "SetTraceFilter(filter = 0x%x)", filter);
+  }
+
+  WebRtc_Word32 error = Trace::SetLevelFilter(filter);
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceFilter(filter = 0x%x)", filter);
+  if (error != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "SetTraceFilter error: %d", error);
+    return -1;
+  }
+  return 0;
+}
+
+int VideoEngine::SetTraceCallback(TraceCallback* callback) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetTraceCallback(TraceCallback = 0x%p)", callback);
+  return Trace::SetTraceCallback(callback);
+}
+
+int VideoEngine::SetAndroidObjects(void* javaVM, void* javaContext) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, g_vie_active_instance_counter,
+               "SetAndroidObjects()");
+
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
+  if (SetCaptureAndroidVM(javaVM, javaContext) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "Could not set capture Android VM");
+    return -1;
+  }
+  if (SetRenderAndroidVM(javaVM) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+                 "Could not set render Android VM");
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
+               "WEBRTC_ANDROID not defined for VideoEngine::SetAndroidObjects");
+  return -1;
+#endif
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_impl.h b/src/video_engine/vie_impl.h
new file mode 100644
index 0000000..ca5d903
--- /dev/null
+++ b/src/video_engine/vie_impl.h
@@ -0,0 +1,115 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
+
+#include "engine_configurations.h"  // NOLINT
+#include "video_engine/vie_defines.h"
+
+#include "video_engine/vie_base_impl.h"
+
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+#include "video_engine/vie_capture_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+#include "video_engine/vie_codec_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+#include "video_engine/vie_encryption_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+#include "video_engine/vie_file_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+#include "video_engine/vie_image_process_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+#include "video_engine/vie_network_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+#include "video_engine/vie_render_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+#include "video_engine/vie_rtp_rtcp_impl.h"
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+#include "video_engine/vie_external_codec_impl.h"
+#endif
+
+namespace webrtc {
+
+class VideoEngineImpl
+    : public ViEBaseImpl
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+      , public ViECodecImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+      , public ViECaptureImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+      , public ViEEncryptionImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+      , public ViEFileImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+      , public ViEImageProcessImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+      , public ViENetworkImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+      , public ViERenderImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+      , public ViERTP_RTCPImpl
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+      , public ViEExternalCodecImpl
+#endif
+{  // NOLINT
+ public:
+  VideoEngineImpl()
+      :
+#ifdef WEBRTC_VIDEO_ENGINE_CODEC_API
+        ViECodecImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_CAPTURE_API
+        , ViECaptureImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_ENCRYPTION_API
+        , ViEEncryptionImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_FILE_API
+        , ViEFileImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_IMAGE_PROCESS_API
+        , ViEImageProcessImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+        , ViENetworkImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+        , ViERenderImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+        , ViERTP_RTCPImpl(ViEBaseImpl::shared_data())
+#endif
+#ifdef WEBRTC_VIDEO_ENGINE_EXTERNAL_CODEC_API
+        , ViEExternalCodecImpl(ViEBaseImpl::shared_data())
+#endif
+  {}
+  virtual ~VideoEngineImpl() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_IMPL_H_
diff --git a/src/video_engine/vie_input_manager.cc b/src/video_engine/vie_input_manager.cc
new file mode 100644
index 0000000..8ff183c
--- /dev/null
+++ b/src/video_engine/vie_input_manager.cc
@@ -0,0 +1,585 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_input_manager.h"
+
+#include <cassert>
+
+#include "common_types.h"  // NOLINT
+#include "modules/video_capture/main/interface/video_capture_factory.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "modules/video_coding/main/interface/video_coding_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_file_player.h"
+
+namespace webrtc {
+
+ViEInputManager::ViEInputManager(const int engine_id)
+    : engine_id_(engine_id),
+      map_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      vie_frame_provider_map_(),
+      capture_device_info_(NULL),
+      module_process_thread_(NULL) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  for (int idx = 0; idx < kViEMaxCaptureDevices; idx++) {
+    free_capture_device_id_[idx] = true;
+  }
+  capture_device_info_ = VideoCaptureFactory::CreateDeviceInfo(
+      ViEModuleId(engine_id_));
+  for (int idx = 0; idx < kViEMaxFilePlayers; idx++) {
+    free_file_id_[idx] = true;
+  }
+}
+
+ViEInputManager::~ViEInputManager() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+  while (vie_frame_provider_map_.Size() != 0) {
+    MapItem* item = vie_frame_provider_map_.First();
+    assert(item);
+    ViEFrameProviderBase* frame_provider =
+        static_cast<ViEFrameProviderBase*>(item->GetItem());
+    vie_frame_provider_map_.Erase(item);
+    delete frame_provider;
+  }
+
+  if (capture_device_info_) {
+    delete capture_device_info_;
+    capture_device_info_ = NULL;
+  }
+}
+void ViEInputManager::SetModuleProcessThread(
+    ProcessThread* module_process_thread) {
+  assert(!module_process_thread_);
+  module_process_thread_ = module_process_thread;
+}
+
+int ViEInputManager::NumberOfCaptureDevices() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  assert(capture_device_info_);
+  return capture_device_info_->NumberOfDevices();
+}
+
+int ViEInputManager::GetDeviceName(WebRtc_UWord32 device_number,
+                                   char* device_nameUTF8,
+                                   WebRtc_UWord32 device_name_length,
+                                   char* device_unique_idUTF8,
+                                   WebRtc_UWord32 device_unique_idUTF8Length) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_number: %d)", __FUNCTION__, device_number);
+  assert(capture_device_info_);
+  return capture_device_info_->GetDeviceName(device_number, device_nameUTF8,
+                                             device_name_length,
+                                             device_unique_idUTF8,
+                                             device_unique_idUTF8Length);
+}
+
+int ViEInputManager::NumberOfCaptureCapabilities(
+  const char* device_unique_idUTF8) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  assert(capture_device_info_);
+  return capture_device_info_->NumberOfCapabilities(device_unique_idUTF8);
+}
+
+int ViEInputManager::GetCaptureCapability(
+    const char* device_unique_idUTF8,
+    const WebRtc_UWord32 device_capability_number,
+    CaptureCapability& capability) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_idUTF8: %s, device_capability_number: %d)",
+               __FUNCTION__, device_unique_idUTF8, device_capability_number);
+  assert(capture_device_info_);
+  VideoCaptureCapability module_capability;
+  int result = capture_device_info_->GetCapability(device_unique_idUTF8,
+                                                   device_capability_number,
+                                                   module_capability);
+  if (result != 0)
+    return result;
+
+  // Copy from module type to public type.
+  capability.expectedCaptureDelay = module_capability.expectedCaptureDelay;
+  capability.height = module_capability.height;
+  capability.width = module_capability.width;
+  capability.interlaced = module_capability.interlaced;
+  capability.rawType = module_capability.rawType;
+  capability.codecType = module_capability.codecType;
+  capability.maxFPS = module_capability.maxFPS;
+  return result;
+}
+
+int ViEInputManager::GetOrientation(const char* device_unique_idUTF8,
+                                    RotateCapturedFrame& orientation) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_idUTF8: %s,)", __FUNCTION__,
+               device_unique_idUTF8);
+  assert(capture_device_info_);
+  VideoCaptureRotation module_orientation;
+  int result = capture_device_info_->GetOrientation(device_unique_idUTF8,
+                                                    module_orientation);
+  // Copy from module type to public type.
+  switch (module_orientation) {
+    case kCameraRotate0:
+      orientation = RotateCapturedFrame_0;
+      break;
+    case kCameraRotate90:
+      orientation = RotateCapturedFrame_90;
+      break;
+    case kCameraRotate180:
+      orientation = RotateCapturedFrame_180;
+      break;
+    case kCameraRotate270:
+      orientation = RotateCapturedFrame_270;
+      break;
+  }
+  return result;
+}
+
+int ViEInputManager::DisplayCaptureSettingsDialogBox(
+    const char* device_unique_idUTF8,
+    const char* dialog_titleUTF8,
+    void* parent_window,
+    WebRtc_UWord32 positionX,
+    WebRtc_UWord32 positionY) {
+  assert(capture_device_info_);
+  return capture_device_info_->DisplayCaptureSettingsDialogBox(
+           device_unique_idUTF8, dialog_titleUTF8, parent_window, positionX,
+           positionY);
+}
+
+int ViEInputManager::CreateCaptureDevice(
+    const char* device_unique_idUTF8,
+    const WebRtc_UWord32 device_unique_idUTF8Length,
+    int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s)", __FUNCTION__, device_unique_idUTF8);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  // Make sure the device is not already allocated.
+  for (MapItem* item = vie_frame_provider_map_.First(); item != NULL;
+       item = vie_frame_provider_map_.Next(item)) {
+    // Make sure this is a capture device.
+    if (item->GetId() >= kViECaptureIdBase &&
+        item->GetId() <= kViECaptureIdMax) {
+      ViECapturer* vie_capture = static_cast<ViECapturer*>(item->GetItem());
+      assert(vie_capture);
+      // TODO(mflodman) Can we change input to avoid this cast?
+      const char* device_name =
+          reinterpret_cast<const char*>(vie_capture->CurrentDeviceName());
+      if (strncmp(device_name,
+                  reinterpret_cast<const char*>(device_unique_idUTF8),
+                  strlen(device_name)) == 0) {
+        return kViECaptureDeviceAlreadyAllocated;
+      }
+    }
+  }
+
+  // Make sure the device name is valid.
+  bool found_device = false;
+  for (WebRtc_UWord32 device_index = 0;
+       device_index < capture_device_info_->NumberOfDevices(); ++device_index) {
+    if (device_unique_idUTF8Length > kVideoCaptureUniqueNameLength) {
+      // User's string length is longer than the max.
+      return -1;
+    }
+
+    char found_name[kVideoCaptureDeviceNameLength] = "";
+    char found_unique_name[kVideoCaptureUniqueNameLength] = "";
+    capture_device_info_->GetDeviceName(device_index, found_name,
+                                        kVideoCaptureDeviceNameLength,
+                                        found_unique_name,
+                                        kVideoCaptureUniqueNameLength);
+
+    // TODO(mflodman) Can we change input to avoid this cast?
+    const char* cast_id = reinterpret_cast<const char*>(device_unique_idUTF8);
+    if (strncmp(cast_id, reinterpret_cast<const char*>(found_unique_name),
+                strlen(cast_id)) == 0) {
+      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s:%d Capture device was found by unique ID: %s. Returning",
+                   __FUNCTION__, __LINE__, device_unique_idUTF8);
+      found_device = true;
+      break;
+    }
+  }
+  if (!found_device) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s:%d Capture device NOT found by unique ID: %s. Returning",
+                 __FUNCTION__, __LINE__, device_unique_idUTF8);
+    return kViECaptureDeviceDoesNotExist;
+  }
+
+  int newcapture_id = 0;
+  if (GetFreeCaptureId(&newcapture_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, device_unique_idUTF8,
+      device_unique_idUTF8Length, *module_process_thread_);
+  if (!vie_capture) {
+  ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not create capture module for %s", __FUNCTION__,
+                 device_unique_idUTF8);
+    return kViECaptureDeviceUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+  ReturnCaptureId(newcapture_id);
+  WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s: Could not insert capture module for %s", __FUNCTION__,
+               device_unique_idUTF8);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s, capture_id: %d)", __FUNCTION__,
+               device_unique_idUTF8, capture_id);
+  return 0;
+}
+
+int ViEInputManager::CreateCaptureDevice(VideoCaptureModule* capture_module,
+                                         int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  int newcapture_id = 0;
+  if (!GetFreeCaptureId(&newcapture_id)) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, capture_module, *module_process_thread_);
+  if (!vie_capture) {
+  ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could attach capture module.", __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert capture module", __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s, capture_id: %d", __FUNCTION__, capture_id);
+  return 0;
+}
+
+int ViEInputManager::DestroyCaptureDevice(const int capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(capture_id: %d)", __FUNCTION__, capture_id);
+  ViECapturer* vie_capture = NULL;
+  {
+    // We need exclusive access to the object to delete it.
+    // Take this write lock first since the read lock is taken before map_cs_.
+    ViEManagerWriteScoped wl(this);
+    CriticalSectionScoped cs(map_cs_.get());
+
+    vie_capture = ViECapturePtr(capture_id);
+    if (!vie_capture) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s(capture_id: %d) - No such capture device id",
+                   __FUNCTION__, capture_id);
+      return -1;
+    }
+    WebRtc_UWord32 num_callbacks =
+        vie_capture->NumberOfRegisteredFrameCallbacks();
+    if (num_callbacks > 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_), "%s(capture_id: %d) - %u registered "
+                   "callbacks when destroying capture device",
+                   __FUNCTION__, capture_id, num_callbacks);
+    }
+    vie_frame_provider_map_.Erase(capture_id);
+    ReturnCaptureId(capture_id);
+    // Leave cs before deleting the capture object. This is because deleting the
+    // object might cause deletions of renderers so we prefer to not have a lock
+    // at that time.
+  }
+  delete vie_capture;
+  return 0;
+}
+
+int ViEInputManager::CreateExternalCaptureDevice(
+    ViEExternalCapture*& external_capture,
+    int& capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  int newcapture_id = 0;
+  if (GetFreeCaptureId(&newcapture_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of capture devices already in "
+                 "use", __FUNCTION__);
+    return kViECaptureDeviceMaxNoDevicesAllocated;
+  }
+
+  ViECapturer* vie_capture = ViECapturer::CreateViECapture(
+      newcapture_id, engine_id_, NULL, 0, *module_process_thread_);
+  if (!vie_capture) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not create capture module for external capture.",
+                 __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(newcapture_id, vie_capture) != 0) {
+    ReturnCaptureId(newcapture_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert capture module for external capture.",
+                 __FUNCTION__);
+    return kViECaptureDeviceUnknownError;
+  }
+  capture_id = newcapture_id;
+  external_capture = vie_capture;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s, capture_id: %d)", __FUNCTION__, capture_id);
+  return 0;
+}
+
+int ViEInputManager::CreateFilePlayer(const char* file_nameUTF8,
+                                      const bool loop,
+                                      const webrtc::FileFormats file_format,
+                                      VoiceEngine* voe_ptr, int& file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(device_unique_id: %s)", __FUNCTION__, file_nameUTF8);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  int new_file_id = 0;
+  if (GetFreeFileId(&new_file_id) == false) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Maximum supported number of file players already in use",
+                 __FUNCTION__);
+    return kViEFileMaxNoOfFilesOpened;
+  }
+
+  ViEFilePlayer* vie_file_player = ViEFilePlayer::CreateViEFilePlayer(
+      new_file_id, engine_id_, file_nameUTF8, loop, file_format, voe_ptr);
+  if (!vie_file_player) {
+    ReturnFileId(new_file_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not open file %s for playback", __FUNCTION__,
+                 file_nameUTF8);
+    return kViEFileUnknownError;
+  }
+
+  if (vie_frame_provider_map_.Insert(new_file_id, vie_file_player) != 0) {
+    ReturnCaptureId(new_file_id);
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Could not insert file player for %s", __FUNCTION__,
+                 file_nameUTF8);
+    delete vie_file_player;
+    return kViEFileUnknownError;
+  }
+
+  file_id = new_file_id;
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(filename: %s, file_id: %d)", __FUNCTION__, file_nameUTF8,
+               new_file_id);
+  return 0;
+}
+
+int ViEInputManager::DestroyFilePlayer(int file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(file_id: %d)", __FUNCTION__, file_id);
+
+  ViEFilePlayer* vie_file_player = NULL;
+  {
+    // We need exclusive access to the object to delete it.
+    // Take this write lock first since the read lock is taken before map_cs_.
+    ViEManagerWriteScoped wl(this);
+
+    CriticalSectionScoped cs(map_cs_.get());
+    vie_file_player = ViEFilePlayerPtr(file_id);
+    if (!vie_file_player) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s(file_id: %d) - No such file player", __FUNCTION__,
+                   file_id);
+      return -1;
+    }
+    int num_callbacks = vie_file_player->NumberOfRegisteredFrameCallbacks();
+    if (num_callbacks > 0) {
+      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                   ViEId(engine_id_), "%s(file_id: %d) - %u registered "
+                   "callbacks when destroying file player", __FUNCTION__,
+                   file_id, num_callbacks);
+    }
+    vie_frame_provider_map_.Erase(file_id);
+    ReturnFileId(file_id);
+    // Leave cs before deleting the file object. This is because deleting the
+    // object might cause deletions of renderers so we prefer to not have a lock
+    // at that time.
+  }
+  delete vie_file_player;
+  return 0;
+}
+
+bool ViEInputManager::GetFreeCaptureId(int* freecapture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+  for (int id = 0; id < kViEMaxCaptureDevices; id++) {
+    if (free_capture_device_id_[id]) {
+      // We found a free capture device id.
+      free_capture_device_id_[id] = false;
+      *freecapture_id = id + kViECaptureIdBase;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: new id: %d", __FUNCTION__, *freecapture_id);
+      return true;
+    }
+  }
+  return false;
+}
+
+void ViEInputManager::ReturnCaptureId(int capture_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(%d)", __FUNCTION__, capture_id);
+  CriticalSectionScoped cs(map_cs_.get());
+  if (capture_id >= kViECaptureIdBase &&
+      capture_id < kViEMaxCaptureDevices + kViECaptureIdBase) {
+    free_capture_device_id_[capture_id - kViECaptureIdBase] = true;
+  }
+  return;
+}
+
+bool ViEInputManager::GetFreeFileId(int* free_file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_), "%s",
+               __FUNCTION__);
+
+  for (int id = 0; id < kViEMaxFilePlayers; id++) {
+    if (free_file_id_[id]) {
+      // We found a free capture device id.
+      free_file_id_[id] = false;
+      *free_file_id = id + kViEFileIdBase;
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: new id: %d", __FUNCTION__, *free_file_id);
+      return true;
+    }
+  }
+  return false;
+}
+
+void ViEInputManager::ReturnFileId(int file_id) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s(%d)", __FUNCTION__, file_id);
+
+  CriticalSectionScoped cs(map_cs_.get());
+  if (file_id >= kViEFileIdBase &&
+      file_id < kViEMaxFilePlayers + kViEFileIdBase) {
+    free_file_id_[file_id - kViEFileIdBase] = true;
+  }
+  return;
+}
+
+ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(
+    const ViEFrameCallback* capture_observer) const {
+  assert(capture_observer);
+  CriticalSectionScoped cs(map_cs_.get());
+
+  for (MapItem* provider_item = vie_frame_provider_map_.First(); provider_item
+  != NULL; provider_item = vie_frame_provider_map_.Next(provider_item)) {
+    ViEFrameProviderBase* vie_frame_provider =
+        static_cast<ViEFrameProviderBase*>(provider_item->GetItem());
+    assert(vie_frame_provider != NULL);
+
+    if (vie_frame_provider->IsFrameCallbackRegistered(capture_observer)) {
+      // We found it.
+      return vie_frame_provider;
+    }
+  }
+  // No capture device set for this channel.
+  return NULL;
+}
+
+ViEFrameProviderBase* ViEInputManager::ViEFrameProvider(int provider_id) const {
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(provider_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViEFrameProviderBase* vie_frame_provider =
+      static_cast<ViEFrameProviderBase*>(map_item->GetItem());
+  return vie_frame_provider;
+}
+
+ViECapturer* ViEInputManager::ViECapturePtr(int capture_id) const {
+  if (!(capture_id >= kViECaptureIdBase &&
+        capture_id <= kViECaptureIdBase + kViEMaxCaptureDevices))
+    return NULL;
+
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(capture_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViECapturer* vie_capture = static_cast<ViECapturer*>(map_item->GetItem());
+  return vie_capture;
+}
+
+ViEFilePlayer* ViEInputManager::ViEFilePlayerPtr(int file_id) const {
+  if (file_id < kViEFileIdBase || file_id > kViEFileIdMax) {
+    return NULL;
+  }
+  CriticalSectionScoped cs(map_cs_.get());
+  MapItem* map_item = vie_frame_provider_map_.Find(file_id);
+  if (!map_item) {
+    return NULL;
+  }
+  ViEFilePlayer* vie_file_player =
+      static_cast<ViEFilePlayer*>(map_item->GetItem());
+  return vie_file_player;
+}
+
+ViEInputManagerScoped::ViEInputManagerScoped(
+    const ViEInputManager& vie_input_manager)
+    : ViEManagerScopedBase(vie_input_manager) {
+}
+
+ViECapturer* ViEInputManagerScoped::Capture(int capture_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViECapturePtr(
+      capture_id);
+}
+
+ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
+    const ViEFrameCallback* capture_observer) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFrameProvider(
+      capture_observer);
+}
+
+ViEFrameProviderBase* ViEInputManagerScoped::FrameProvider(
+    int provider_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFrameProvider(
+      provider_id);
+}
+
+ViEFilePlayer* ViEInputManagerScoped::FilePlayer(int file_id) const {
+  return static_cast<const ViEInputManager*>(vie_manager_)->ViEFilePlayerPtr(
+      file_id);
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_input_manager.h b/src/video_engine/vie_input_manager.h
new file mode 100644
index 0000000..465472d
--- /dev/null
+++ b/src/video_engine/vie_input_manager.h
@@ -0,0 +1,140 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
+
+#include "modules/video_capture/main/interface/video_capture.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_capture.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RWLockWrapper;
+class ViECapturer;
+class ViEExternalCapture;
+class ViEFilePlayer;
+class VoiceEngine;
+
+class ViEInputManager : private ViEManagerBase {
+  friend class ViEInputManagerScoped;
+ public:
+  explicit ViEInputManager(int engine_id);
+  ~ViEInputManager();
+
+  void SetModuleProcessThread(ProcessThread* module_process_thread);
+
+  // Returns number of capture devices.
+  int NumberOfCaptureDevices();
+
+  // Gets name and id for a capture device.
+  int GetDeviceName(WebRtc_UWord32 device_number,
+                    char* device_nameUTF8,
+                    WebRtc_UWord32 device_name_length,
+                    char* device_unique_idUTF8,
+                    WebRtc_UWord32 device_unique_idUTF8Length);
+
+  // Returns the number of capture capabilities for a specified device.
+  int NumberOfCaptureCapabilities(const char* device_unique_idUTF8);
+
+  // Gets a specific capability for a capture device.
+  int GetCaptureCapability(const char* device_unique_idUTF8,
+                           const WebRtc_UWord32 device_capability_number,
+                           CaptureCapability& capability);
+
+  // Show OS specific Capture settings.
+  int DisplayCaptureSettingsDialogBox(const char* device_unique_idUTF8,
+                                      const char* dialog_titleUTF8,
+                                      void* parent_window,
+                                      WebRtc_UWord32 positionX,
+                                      WebRtc_UWord32 positionY);
+  int GetOrientation(const char* device_unique_idUTF8,
+                     RotateCapturedFrame& orientation);
+
+  // Creates a capture module for the specified capture device and assigns
+  // a capture device id for the device.
+  // Return zero on success, ViEError on failure.
+  int CreateCaptureDevice(const char* device_unique_idUTF8,
+                          const WebRtc_UWord32 device_unique_idUTF8Length,
+                          int& capture_id);
+  int CreateCaptureDevice(VideoCaptureModule* capture_module,
+                          int& capture_id);
+  int CreateExternalCaptureDevice(ViEExternalCapture*& external_capture,
+                                  int& capture_id);
+  int DestroyCaptureDevice(int capture_id);
+
+  int CreateFilePlayer(const char* file_nameUTF8, const bool loop,
+                       const FileFormats file_format,
+                       VoiceEngine* voe_ptr,
+                       int& file_id);
+  int DestroyFilePlayer(int file_id);
+
+ private:
+  // Gets and allocates a free capture device id. Assumed protected by caller.
+  bool GetFreeCaptureId(int* freecapture_id);
+
+  // Frees a capture id assigned in GetFreeCaptureId.
+  void ReturnCaptureId(int capture_id);
+
+  // Gets and allocates a free file id. Assumed protected by caller.
+  bool GetFreeFileId(int* free_file_id);
+
+  // Frees a file id assigned in GetFreeFileId.
+  void ReturnFileId(int file_id);
+
+  // Gets the ViEFrameProvider for this capture observer.
+  ViEFrameProviderBase* ViEFrameProvider(
+      const ViEFrameCallback* capture_observer) const;
+
+  // Gets the ViEFrameProvider for this capture observer.
+  ViEFrameProviderBase* ViEFrameProvider(int provider_id) const;
+
+  // Gets the ViECapturer for the capture device id.
+  ViECapturer* ViECapturePtr(int capture_id) const;
+
+  // Gets the ViEFilePlayer for this file_id.
+  ViEFilePlayer* ViEFilePlayerPtr(int file_id) const;
+
+  int engine_id_;
+  scoped_ptr<CriticalSectionWrapper> map_cs_;
+  MapWrapper vie_frame_provider_map_;
+
+  // Capture devices.
+  VideoCaptureModule::DeviceInfo* capture_device_info_;
+  int free_capture_device_id_[kViEMaxCaptureDevices];
+
+  // File Players.
+  int free_file_id_[kViEMaxFilePlayers];
+
+  ProcessThread* module_process_thread_;  // Weak.
+};
+
+// Provides protected access to ViEInputManater.
+class ViEInputManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViEInputManagerScoped(const ViEInputManager& vie_input_manager);
+
+  ViECapturer* Capture(int capture_id) const;
+  ViEFilePlayer* FilePlayer(int file_id) const;
+  ViEFrameProviderBase* FrameProvider(int provider_id) const;
+  ViEFrameProviderBase* FrameProvider(const ViEFrameCallback*
+                                      capture_observer) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_INPUT_MANAGER_H_
diff --git a/src/video_engine/vie_manager_base.cc b/src/video_engine/vie_manager_base.cc
new file mode 100644
index 0000000..7b13227
--- /dev/null
+++ b/src/video_engine/vie_manager_base.cc
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <assert.h>
+
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+ViEManagerBase::ViEManagerBase()
+    : instance_rwlock_(*RWLockWrapper::CreateRWLock()) {
+}
+
+ViEManagerBase::~ViEManagerBase() {
+  delete &instance_rwlock_;
+}
+
+void ViEManagerBase::ReadLockManager() const {
+  instance_rwlock_.AcquireLockShared();
+}
+
+void ViEManagerBase::ReleaseLockManager() const {
+  instance_rwlock_.ReleaseLockShared();
+}
+
+void ViEManagerBase::WriteLockManager() {
+  instance_rwlock_.AcquireLockExclusive();
+}
+
+void ViEManagerBase::ReleaseWriteLockManager() {
+  instance_rwlock_.ReleaseLockExclusive();
+}
+
+ViEManagerScopedBase::ViEManagerScopedBase(const ViEManagerBase& ViEManagerBase)
+    : vie_manager_(&ViEManagerBase),
+      ref_count_(0) {
+  vie_manager_->ReadLockManager();
+}
+
+ViEManagerScopedBase::~ViEManagerScopedBase() {
+  assert(ref_count_ == 0);
+  vie_manager_->ReleaseLockManager();
+}
+
+ViEManagerWriteScoped::ViEManagerWriteScoped(ViEManagerBase* vie_manager)
+    : vie_manager_(vie_manager) {
+  vie_manager_->WriteLockManager();
+}
+
+ViEManagerWriteScoped::~ViEManagerWriteScoped() {
+  vie_manager_->ReleaseWriteLockManager();
+}
+
+ViEManagedItemScopedBase::ViEManagedItemScopedBase(
+    ViEManagerScopedBase* vie_scoped_manager)
+    : vie_scoped_manager_(vie_scoped_manager) {
+  vie_scoped_manager_->ref_count_++;
+}
+
+ViEManagedItemScopedBase::~ViEManagedItemScopedBase() {
+  vie_scoped_manager_->ref_count_--;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_manager_base.h b/src/video_engine/vie_manager_base.h
new file mode 100644
index 0000000..088a2b8
--- /dev/null
+++ b/src/video_engine/vie_manager_base.h
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
+
+namespace webrtc {
+
+class RWLockWrapper;
+
+class ViEManagerBase {
+  friend class ViEManagedItemScopedBase;
+  friend class ViEManagerScopedBase;
+  friend class ViEManagerWriteScoped;
+ public:
+  ViEManagerBase();
+  ~ViEManagerBase();
+
+ private:
+  // Exclusive lock, used by ViEManagerWriteScoped.
+  void WriteLockManager();
+
+  // Releases exclusive lock, used by ViEManagerWriteScoped.
+  void ReleaseWriteLockManager();
+
+  // Increases lock count, used by ViEManagerScopedBase.
+  void ReadLockManager() const;
+
+  // Releases the lock count, used by ViEManagerScopedBase.
+  void ReleaseLockManager() const;
+
+  RWLockWrapper& instance_rwlock_;
+};
+
+class ViEManagerWriteScoped {
+ public:
+  explicit ViEManagerWriteScoped(ViEManagerBase* vie_manager);
+  ~ViEManagerWriteScoped();
+
+ private:
+  ViEManagerBase* vie_manager_;
+};
+
+class ViEManagerScopedBase {
+  friend class ViEManagedItemScopedBase;
+ public:
+  explicit ViEManagerScopedBase(const ViEManagerBase& vie_manager);
+  ~ViEManagerScopedBase();
+
+ protected:
+  const ViEManagerBase* vie_manager_;
+
+ private:
+  int ref_count_;
+};
+
+class ViEManagedItemScopedBase {
+ public:
+  explicit ViEManagedItemScopedBase(ViEManagerScopedBase* vie_scoped_manager);
+  ~ViEManagedItemScopedBase();
+
+ protected:
+  ViEManagerScopedBase* vie_scoped_manager_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_MANAGER_BASE_H_
diff --git a/src/video_engine/vie_network_impl.cc b/src/video_engine/vie_network_impl.cc
new file mode 100644
index 0000000..378d08f
--- /dev/null
+++ b/src/video_engine/vie_network_impl.cc
@@ -0,0 +1,803 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_network_impl.h"
+
+#include <stdio.h>
+#if (defined(WIN32_) || defined(WIN64_))
+#include <qos.h>
+#endif
+
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViENetwork* ViENetwork::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_NETWORK_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViENetworkImpl* vie_networkImpl = vie_impl;
+  // Increase ref count.
+  (*vie_networkImpl)++;
+  return vie_networkImpl;
+#else
+  return NULL;
+#endif
+}
+
+int ViENetworkImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViENetwork::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViENetwork release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViENetwork reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViENetworkImpl::ViENetworkImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViENetworkImpl::ViENetworkImpl() Ctor");
+}
+
+ViENetworkImpl::~ViENetworkImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViENetworkImpl::~ViENetworkImpl() Dtor");
+}
+
+int ViENetworkImpl::SetLocalReceiver(const int video_channel,
+                                     const uint16_t rtp_port,
+                                     const uint16_t rtcp_port,
+                                     const char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, rtp_port: %u, rtcp_port: %u, ip_address: %s)",
+               __FUNCTION__, video_channel, rtp_port, rtcp_port, ip_address);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists.
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->Receiving()) {
+    shared_data_->SetLastError(kViENetworkAlreadyReceiving);
+    return -1;
+  }
+  if (vie_channel->SetLocalReceiver(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetLocalReceiver(const int video_channel,
+                                     uint16_t& rtp_port,
+                                     uint16_t& rtcp_port,
+                                     char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetLocalReceiver(&rtp_port, &rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkLocalReceiverNotSet);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendDestination(const int video_channel,
+                                       const char* ip_address,
+                                       const uint16_t rtp_port,
+                                       const uint16_t rtcp_port,
+                                       const uint16_t source_rtp_port,
+                                       const uint16_t source_rtcp_port) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, ip_address: %s, rtp_port: %u, rtcp_port: %u, "
+               "sourceRtpPort: %u, source_rtcp_port: %u)",
+               __FUNCTION__, video_channel, ip_address, rtp_port, rtcp_port,
+               source_rtp_port, source_rtcp_port);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending.", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetSendDestination(ip_address, rtp_port, rtcp_port,
+                                          source_rtp_port,
+                                          source_rtcp_port) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSendDestination(const int video_channel,
+                                       char* ip_address,
+                                       uint16_t& rtp_port,
+                                       uint16_t& rtcp_port,
+                                       uint16_t& source_rtp_port,
+                                       uint16_t& source_rtcp_port) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSendDestination(ip_address, &rtp_port, &rtcp_port,
+                                      &source_rtp_port,
+                                      &source_rtcp_port) != 0) {
+    shared_data_->SetLastError(kViENetworkDestinationNotSet);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::RegisterSendTransport(const int video_channel,
+                                          Transport& transport) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending.", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->RegisterSendTransport(&transport) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::DeregisterSendTransport(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel doesn't exist", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s Channel already sending", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkAlreadySending);
+    return -1;
+  }
+  if (vie_channel->DeregisterSendTransport() != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::ReceivedRTPPacket(const int video_channel, const void* data,
+                                      const int length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
+               video_channel, length);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->ReceivedRTPPacket(data, length);
+}
+
+int ViENetworkImpl::ReceivedRTCPPacket(const int video_channel,
+                                       const void* data, const int length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d)", __FUNCTION__,
+               video_channel, length);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->ReceivedRTCPPacket(data, length);
+}
+
+int ViENetworkImpl::GetSourceInfo(const int video_channel,
+                                  uint16_t& rtp_port,
+                                  uint16_t& rtcp_port, char* ip_address,
+                                  unsigned int ip_address_length) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSourceInfo(&rtp_port, &rtcp_port, ip_address,
+                                 ip_address_length) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetLocalIP(char ip_address[64], bool ipv6) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s( ip_address, ipV6: %d)", __FUNCTION__, ipv6);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  if (!ip_address) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: No argument", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkInvalidArgument);
+    return -1;
+  }
+
+  WebRtc_UWord8 num_socket_threads = 1;
+  UdpTransport* socket_transport = UdpTransport::Create(
+      ViEModuleId(shared_data_->instance_id(), -1), num_socket_threads);
+
+  if (!socket_transport) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s: Could not create socket module", __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+
+  char local_ip_address[64];
+  if (ipv6) {
+    char local_ip[16];
+    if (socket_transport->LocalHostAddressIPV6(local_ip) != 0) {
+      UdpTransport::Destroy(socket_transport);
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Could not get local IP", __FUNCTION__);
+      shared_data_->SetLastError(kViENetworkUnknownError);
+      return -1;
+    }
+    // Convert 128-bit address to character string (a:b:c:d:e:f:g:h).
+    // TODO(mflodman) Change sprintf.
+    sprintf(local_ip_address,  // NOLINT
+            "%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:"
+            "%.2x%.2x",
+            local_ip[0], local_ip[1], local_ip[2], local_ip[3], local_ip[4],
+            local_ip[5], local_ip[6], local_ip[7], local_ip[8], local_ip[9],
+            local_ip[10], local_ip[11], local_ip[12], local_ip[13],
+            local_ip[14], local_ip[15]);
+  } else {
+    WebRtc_UWord32 local_ip = 0;
+    if (socket_transport->LocalHostAddress(local_ip) != 0) {
+      UdpTransport::Destroy(socket_transport);
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: Could not get local IP", __FUNCTION__);
+      shared_data_->SetLastError(kViENetworkUnknownError);
+      return -1;
+    }
+    // Convert 32-bit address to character string (x.y.z.w).
+    // TODO(mflodman) Change sprintf.
+    sprintf(local_ip_address, "%d.%d.%d.%d",  // NOLINT
+            static_cast<int>((local_ip >> 24) & 0x0ff),
+            static_cast<int>((local_ip >> 16) & 0x0ff),
+            static_cast<int>((local_ip >> 8) & 0x0ff),
+            static_cast<int>(local_ip & 0x0ff));
+  }
+  strncpy(ip_address, local_ip_address, sizeof(local_ip_address));
+  UdpTransport::Destroy(socket_transport);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s: local ip = %s", __FUNCTION__, local_ip_address);
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s: not available for external transport", __FUNCTION__);
+
+  return -1;
+#endif
+}
+
+int ViENetworkImpl::EnableIPv6(int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableIPv6() != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+bool ViENetworkImpl::IsIPv6Enabled(int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return false;
+  }
+  return vie_channel->IsIPv6Enabled();
+}
+
+int ViENetworkImpl::SetSourceFilter(const int video_channel,
+                                    const uint16_t rtp_port,
+                                    const uint16_t rtcp_port,
+                                    const char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, rtp_port: %u, rtcp_port: %u, ip_address: %s)",
+               __FUNCTION__, video_channel, rtp_port, rtcp_port, ip_address);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSourceFilter(rtp_port, rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSourceFilter(const int video_channel,
+                                    uint16_t& rtp_port,
+                                    uint16_t& rtcp_port,
+                                    char* ip_address) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSourceFilter(&rtp_port, &rtcp_port, ip_address) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendToS(const int video_channel, const int DSCP,
+                               const bool use_set_sockOpt = false) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, DSCP: %d, use_set_sockOpt: %d)", __FUNCTION__,
+               video_channel, DSCP, use_set_sockOpt);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "   force use_set_sockopt=true since there is no alternative"
+               " implementation");
+  if (vie_channel->SetToS(DSCP, true) != 0) {
+#else
+  if (vie_channel->SetToS(DSCP, use_set_sockOpt) != 0) {
+#endif
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::GetSendToS(const int video_channel,
+                               int& DSCP,  // NOLINT
+                               bool& use_set_sockOpt) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetToS(&DSCP, &use_set_sockOpt) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetSendGQoS(const int video_channel, const bool enable,
+                                const int service_type,
+                                const int overrideDSCP) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, service_type: %d, "
+               "overrideDSCP: %d)", __FUNCTION__, video_channel, enable,
+               service_type, overrideDSCP);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+#if (defined(WIN32_) || defined(WIN64_))
+  // Sanity check. We might crash if testing and relying on an OS socket error.
+  if (enable &&
+      (service_type != SERVICETYPE_BESTEFFORT) &&
+      (service_type != SERVICETYPE_CONTROLLEDLOAD) &&
+      (service_type != SERVICETYPE_GUARANTEED) &&
+      (service_type != SERVICETYPE_QUALITATIVE)) {
+    WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: service type %d not supported", __FUNCTION__,
+                 video_channel, service_type);
+    shared_data_->SetLastError(kViENetworkServiceTypeNotSupported);
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  VideoCodec video_codec;
+  if (vie_encoder->GetEncoder(video_codec) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get max bitrate for the channel",
+                 __FUNCTION__);
+    shared_data_->SetLastError(kViENetworkSendCodecNotSet);
+    return -1;
+  }
+  if (vie_channel->SetSendGQoS(enable, service_type, video_codec.maxBitrate,
+                               overrideDSCP) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+#else
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s: Not supported", __FUNCTION__);
+  shared_data_->SetLastError(kViENetworkNotSupported);
+  return -1;
+#endif
+}
+
+int ViENetworkImpl::GetSendGQoS(const int video_channel,
+                                bool& enabled,  // NOLINT
+                                int& service_type,  // NOLINT
+                                int& overrideDSCP) {  // NOLINT
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetSendGQoS(&enabled, &service_type, &overrideDSCP) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetMTU(int video_channel, unsigned int mtu) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, mtu: %u)", __FUNCTION__, video_channel, mtu);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetMTU(mtu) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SetPacketTimeoutNotification(const int video_channel,
+                                                 bool enable,
+                                                 int timeout_seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, timeout_seconds: %u)",
+               __FUNCTION__, video_channel, enable, timeout_seconds);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetPacketTimeoutNotification(enable,
+                                                timeout_seconds) != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::RegisterObserver(const int video_channel,
+                                     ViENetworkObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterNetworkObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViENetworkObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::DeregisterObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->NetworkObserverRegistered()) {
+    shared_data_->SetLastError(kViENetworkObserverNotRegistered);
+    return -1;
+  }
+  return vie_channel->RegisterNetworkObserver(NULL);
+}
+
+int ViENetworkImpl::SetPeriodicDeadOrAliveStatus(
+    const int video_channel,
+    bool enable,
+    unsigned int sample_time_seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, sample_time_seconds: %ul)",
+               __FUNCTION__, video_channel, enable, sample_time_seconds);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->NetworkObserverRegistered()) {
+    shared_data_->SetLastError(kViENetworkObserverNotRegistered);
+    return -1;
+  }
+  if (vie_channel->SetPeriodicDeadOrAliveStatus(enable, sample_time_seconds)
+      != 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViENetworkImpl::SendUDPPacket(const int video_channel, const void* data,
+                                  const unsigned int length,
+                                  int& transmitted_bytes,
+                                  bool use_rtcp_socket = false) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, data: -, length: %d, transmitter_bytes: -, "
+               "useRtcpSocket: %d)", __FUNCTION__, video_channel, length,
+               use_rtcp_socket);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "Channel doesn't exist");
+    shared_data_->SetLastError(kViENetworkInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SendUDPPacket((const WebRtc_Word8*) data, length,
+                                     (WebRtc_Word32&) transmitted_bytes,
+                                     use_rtcp_socket) < 0) {
+    shared_data_->SetLastError(kViENetworkUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_network_impl.h b/src/video_engine/vie_network_impl.h
new file mode 100644
index 0000000..56832df
--- /dev/null
+++ b/src/video_engine/vie_network_impl.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
+
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_network.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViENetworkImpl
+    : public ViENetwork,
+      public ViERefCount {
+ public:
+  // Implements ViENetwork.
+  virtual int Release();
+  virtual int SetLocalReceiver(const int video_channel,
+                               const uint16_t rtp_port,
+                               const uint16_t rtcp_port,
+                               const char* ip_address);
+  virtual int GetLocalReceiver(const int video_channel,
+                               uint16_t& rtp_port,
+                               uint16_t& rtcp_port,
+                               char* ip_address);
+  virtual int SetSendDestination(const int video_channel,
+                                 const char* ip_address,
+                                 const uint16_t rtp_port,
+                                 const uint16_t rtcp_port,
+                                 const uint16_t source_rtp_port,
+                                 const uint16_t source_rtcp_port);
+  virtual int GetSendDestination(const int video_channel,
+                                 char* ip_address,
+                                 uint16_t& rtp_port,
+                                 uint16_t& rtcp_port,
+                                 uint16_t& source_rtp_port,
+                                 uint16_t& source_rtcp_port);
+  virtual int RegisterSendTransport(const int video_channel,
+                                    Transport& transport);
+  virtual int DeregisterSendTransport(const int video_channel);
+  virtual int ReceivedRTPPacket(const int video_channel,
+                                const void* data,
+                                const int length);
+  virtual int ReceivedRTCPPacket(const int video_channel,
+                                 const void* data,
+                                 const int length);
+  virtual int GetSourceInfo(const int video_channel,
+                            uint16_t& rtp_port,
+                            uint16_t& rtcp_port,
+                            char* ip_address,
+                            unsigned int ip_address_length);
+  virtual int GetLocalIP(char ip_address[64], bool ipv6);
+  virtual int EnableIPv6(int video_channel);
+  virtual bool IsIPv6Enabled(int video_channel);
+  virtual int SetSourceFilter(const int video_channel,
+                              const uint16_t rtp_port,
+                              const uint16_t rtcp_port,
+                              const char* ip_address);
+  virtual int GetSourceFilter(const int video_channel,
+                              uint16_t& rtp_port,
+                              uint16_t& rtcp_port,
+                              char* ip_address);
+  virtual int SetSendToS(const int video_channel,
+                         const int DSCP,
+                         const bool use_set_sockOpt);
+  virtual int GetSendToS(const int video_channel,
+                         int& DSCP,
+                         bool& use_set_sockOpt);
+  virtual int SetSendGQoS(const int video_channel,
+                          const bool enable,
+                          const int service_type,
+                          const int overrideDSCP);
+  virtual int GetSendGQoS(const int video_channel,
+                          bool& enabled,
+                          int& service_type,
+                          int& overrideDSCP);
+  virtual int SetMTU(int video_channel, unsigned int mtu);
+  virtual int SetPacketTimeoutNotification(const int video_channel,
+                                           bool enable,
+                                           int timeout_seconds);
+  virtual int RegisterObserver(const int video_channel,
+                               ViENetworkObserver& observer);
+  virtual int DeregisterObserver(const int video_channel);
+  virtual int SetPeriodicDeadOrAliveStatus(
+      const int video_channel,
+      const bool enable,
+      const unsigned int sample_time_seconds);
+  virtual int SendUDPPacket(const int video_channel,
+                            const void* data,
+                            const unsigned int length,
+                            int& transmitted_bytes,
+                            bool use_rtcp_socket);
+
+ protected:
+  explicit ViENetworkImpl(ViESharedData* shared_data);
+  virtual ~ViENetworkImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_NETWORK_IMPL_H_
diff --git a/src/video_engine/vie_performance_monitor.cc b/src/video_engine/vie_performance_monitor.cc
new file mode 100644
index 0000000..4cab915
--- /dev/null
+++ b/src/video_engine/vie_performance_monitor.cc
@@ -0,0 +1,139 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_performance_monitor.h"
+
+#include "system_wrappers/interface/cpu_wrapper.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_base.h"
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+enum { kVieMonitorPeriodMs = 975 };
+
+ViEPerformanceMonitor::ViEPerformanceMonitor(int engine_id)
+    : engine_id_(engine_id),
+      pointer_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      monitor_thread_(NULL),
+      monitor_event_(*EventWrapper::Create()),
+      cpu_(NULL),
+      vie_base_observer_(NULL) {
+}
+
+ViEPerformanceMonitor::~ViEPerformanceMonitor() {
+  Terminate();
+  delete pointer_cs_;
+  delete &monitor_event_;
+}
+
+int ViEPerformanceMonitor::Init(ViEBaseObserver* vie_base_observer) {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  CriticalSectionScoped cs(pointer_cs_);
+  if (!vie_base_observer || vie_base_observer_) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "%s: Bad input argument or observer already set",
+                 __FUNCTION__);
+    return -1;
+  }
+
+  cpu_ = CpuWrapper::CreateCpu();
+  if (cpu_ == NULL) {
+    // Performance monitoring not supported
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo,
+                 ViEId(engine_id_), "%s: Not supported", __FUNCTION__);
+    return 0;
+  }
+
+  if (monitor_thread_ == NULL) {
+    monitor_event_.StartTimer(true, kVieMonitorPeriodMs);
+    monitor_thread_ = ThreadWrapper::CreateThread(ViEMonitorThreadFunction,
+                                                  this, kNormalPriority,
+                                                  "ViEPerformanceMonitor");
+    unsigned int t_id = 0;
+    if (monitor_thread_->Start(t_id)) {
+      WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: Performance monitor thread started %u",
+                   __FUNCTION__, t_id);
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "%s: Could not start performance monitor", __FUNCTION__);
+      monitor_event_.StopTimer();
+      return -1;
+    }
+  }
+  vie_base_observer_ = vie_base_observer;
+  return 0;
+}
+
+void ViEPerformanceMonitor::Terminate() {
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, ViEId(engine_id_),
+               "%s", __FUNCTION__);
+
+  pointer_cs_->Enter();
+  if (!vie_base_observer_) {
+    pointer_cs_->Leave();
+    return;
+  }
+
+  vie_base_observer_ = NULL;
+  monitor_event_.StopTimer();
+  if (monitor_thread_) {
+    ThreadWrapper* tmp_thread = monitor_thread_;
+    monitor_thread_ = NULL;
+    monitor_event_.Set();
+    pointer_cs_->Leave();
+    if (tmp_thread->Stop()) {
+      pointer_cs_->Enter();
+      delete tmp_thread;
+      tmp_thread = NULL;
+      delete cpu_;
+    }
+    cpu_ = NULL;
+  }
+  pointer_cs_->Leave();
+}
+
+bool ViEPerformanceMonitor::ViEBaseObserverRegistered() {
+  CriticalSectionScoped cs(pointer_cs_);
+  return vie_base_observer_ != NULL;
+}
+
+bool ViEPerformanceMonitor::ViEMonitorThreadFunction(void* obj) {
+  return static_cast<ViEPerformanceMonitor*>(obj)->ViEMonitorProcess();
+}
+
+bool ViEPerformanceMonitor::ViEMonitorProcess() {
+  // Periodically triggered with time KViEMonitorPeriodMs
+  monitor_event_.Wait(kVieMonitorPeriodMs);
+  if (monitor_thread_ == NULL) {
+    // Thread removed, exit
+    return false;
+  }
+
+  CriticalSectionScoped cs(pointer_cs_);
+  if (cpu_) {
+    int cpu_load = cpu_->CpuUsage();
+    if (cpu_load > 75) {
+      if (vie_base_observer_) {
+        vie_base_observer_->PerformanceAlarm(cpu_load);
+      }
+    }
+  }
+  return true;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_performance_monitor.h b/src/video_engine/vie_performance_monitor.h
new file mode 100644
index 0000000..1dc72ec
--- /dev/null
+++ b/src/video_engine/vie_performance_monitor.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViEPerformanceMonitor is used to check the current CPU usage and triggers a
+// callback when getting over a specified threshold.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class CpuWrapper;
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class ViEBaseObserver;
+
+class ViEPerformanceMonitor {
+ public:
+  explicit ViEPerformanceMonitor(int engine_id);
+  ~ViEPerformanceMonitor();
+
+  int Init(ViEBaseObserver* vie_base_observer);
+  void Terminate();
+  bool ViEBaseObserverRegistered();
+
+ protected:
+  static bool ViEMonitorThreadFunction(void* obj);
+  bool ViEMonitorProcess();
+
+ private:
+  const int engine_id_;
+  // TODO(mfldoman) Make this one scoped_ptr.
+  CriticalSectionWrapper* pointer_cs_;
+  ThreadWrapper* monitor_thread_;
+  EventWrapper& monitor_event_;
+  CpuWrapper* cpu_;
+  ViEBaseObserver* vie_base_observer_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_PERFORMANCE_MONITOR_H_
diff --git a/src/video_engine/vie_receiver.cc b/src/video_engine/vie_receiver.cc
new file mode 100644
index 0000000..955d3dc
--- /dev/null
+++ b/src/video_engine/vie_receiver.cc
@@ -0,0 +1,265 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_receiver.h"
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/utility/interface/rtp_dump.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+ViEReceiver::ViEReceiver(const int32_t channel_id,
+                         VideoCodingModule* module_vcm)
+    : receive_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      channel_id_(channel_id),
+      rtp_rtcp_(NULL),
+      vcm_(module_vcm),
+      external_decryption_(NULL),
+      decryption_buffer_(NULL),
+      rtp_dump_(NULL),
+      receiving_(false) {
+}
+
+ViEReceiver::~ViEReceiver() {
+  if (decryption_buffer_) {
+    delete[] decryption_buffer_;
+    decryption_buffer_ = NULL;
+  }
+  if (rtp_dump_) {
+    rtp_dump_->Stop();
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  }
+}
+
+int ViEReceiver::RegisterExternalDecryption(Encryption* decryption) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (external_decryption_) {
+    return -1;
+  }
+  decryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
+  if (decryption_buffer_ == NULL) {
+    return -1;
+  }
+  external_decryption_ = decryption;
+  return 0;
+}
+
+int ViEReceiver::DeregisterExternalDecryption() {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (external_decryption_ == NULL) {
+    return -1;
+  }
+  external_decryption_ = NULL;
+  return 0;
+}
+
+void ViEReceiver::SetRtpRtcpModule(RtpRtcp* module) {
+  rtp_rtcp_ = module;
+}
+
+void ViEReceiver::RegisterSimulcastRtpRtcpModules(
+    const std::list<RtpRtcp*>& rtp_modules) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  rtp_rtcp_simulcast_.clear();
+
+  if (!rtp_modules.empty()) {
+    rtp_rtcp_simulcast_.insert(rtp_rtcp_simulcast_.begin(),
+                               rtp_modules.begin(),
+                               rtp_modules.end());
+  }
+}
+
+void ViEReceiver::IncomingRTPPacket(const WebRtc_Word8* rtp_packet,
+                                    const WebRtc_Word32 rtp_packet_length,
+                                    const char* from_ip,
+                                    const WebRtc_UWord16 from_port) {
+  InsertRTPPacket(rtp_packet, rtp_packet_length);
+}
+
+void ViEReceiver::IncomingRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                     const WebRtc_Word32 rtcp_packet_length,
+                                     const char* from_ip,
+                                     const WebRtc_UWord16 from_port) {
+  InsertRTCPPacket(rtcp_packet, rtcp_packet_length);
+}
+
+int ViEReceiver::ReceivedRTPPacket(const void* rtp_packet,
+                                   int rtp_packet_length) {
+  if (!receiving_) {
+    return -1;
+  }
+  return InsertRTPPacket((const WebRtc_Word8*) rtp_packet, rtp_packet_length);
+}
+
+int ViEReceiver::ReceivedRTCPPacket(const void* rtcp_packet,
+                                    int rtcp_packet_length) {
+  if (!receiving_) {
+    return -1;
+  }
+  return InsertRTCPPacket((const WebRtc_Word8*) rtcp_packet,
+                          rtcp_packet_length);
+}
+
+WebRtc_Word32 ViEReceiver::OnReceivedPayloadData(
+    const WebRtc_UWord8* payload_data, const WebRtc_UWord16 payload_size,
+    const WebRtcRTPHeader* rtp_header) {
+  if (rtp_header == NULL) {
+    return 0;
+  }
+
+  if (vcm_->IncomingPacket(payload_data, payload_size, *rtp_header) != 0) {
+    // Check this...
+    return -1;
+  }
+  return 0;
+}
+
+int ViEReceiver::InsertRTPPacket(const WebRtc_Word8* rtp_packet,
+                                 int rtp_packet_length) {
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtp_packet);
+  unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
+  int received_packet_length = rtp_packet_length;
+
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+
+    if (external_decryption_) {
+      int decrypted_length = 0;
+      external_decryption_->decrypt(channel_id_, received_packet,
+                                    decryption_buffer_, received_packet_length,
+                                    &decrypted_length);
+      if (decrypted_length <= 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                     "RTP decryption failed");
+        return -1;
+      } else if (decrypted_length > kViEMaxMtu) {
+        WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, channel_id_,
+                     "InsertRTPPacket: %d bytes is allocated as RTP decrytption"
+                     " output, external decryption used %d bytes. => memory is "
+                     " now corrupted", kViEMaxMtu, decrypted_length);
+        return -1;
+      }
+      received_packet = decryption_buffer_;
+      received_packet_length = decrypted_length;
+    }
+
+    if (rtp_dump_) {
+      rtp_dump_->DumpPacket(received_packet,
+                           static_cast<WebRtc_UWord16>(received_packet_length));
+    }
+  }
+  assert(rtp_rtcp_);  // Should be set by owner at construction time.
+  return rtp_rtcp_->IncomingPacket(received_packet, received_packet_length);
+}
+
+int ViEReceiver::InsertRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                  int rtcp_packet_length) {
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  WebRtc_Word8* tmp_ptr = const_cast<WebRtc_Word8*>(rtcp_packet);
+  unsigned char* received_packet = reinterpret_cast<unsigned char*>(tmp_ptr);
+  int received_packet_length = rtcp_packet_length;
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+
+    if (external_decryption_) {
+      int decrypted_length = 0;
+      external_decryption_->decrypt_rtcp(channel_id_, received_packet,
+                                         decryption_buffer_,
+                                         received_packet_length,
+                                         &decrypted_length);
+      if (decrypted_length <= 0) {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                     "RTP decryption failed");
+        return -1;
+      } else if (decrypted_length > kViEMaxMtu) {
+        WEBRTC_TRACE(webrtc::kTraceCritical, webrtc::kTraceVideo, channel_id_,
+                     "InsertRTCPPacket: %d bytes is allocated as RTP "
+                     " decrytption output, external decryption used %d bytes. "
+                     " => memory is now corrupted",
+                     kViEMaxMtu, decrypted_length);
+        return -1;
+      }
+      received_packet = decryption_buffer_;
+      received_packet_length = decrypted_length;
+    }
+
+    if (rtp_dump_) {
+      rtp_dump_->DumpPacket(
+          received_packet, static_cast<WebRtc_UWord16>(received_packet_length));
+    }
+  }
+  {
+    CriticalSectionScoped cs(receive_cs_.get());
+    std::list<RtpRtcp*>::iterator it = rtp_rtcp_simulcast_.begin();
+    while (it != rtp_rtcp_simulcast_.end()) {
+      RtpRtcp* rtp_rtcp = *it++;
+      rtp_rtcp->IncomingPacket(received_packet, received_packet_length);
+    }
+  }
+  assert(rtp_rtcp_);  // Should be set by owner at construction time.
+  return rtp_rtcp_->IncomingPacket(received_packet, received_packet_length);
+}
+
+void ViEReceiver::StartReceive() {
+  receiving_ = true;
+}
+
+void ViEReceiver::StopReceive() {
+  receiving_ = false;
+}
+
+int ViEReceiver::StartRTPDump(const char file_nameUTF8[1024]) {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (rtp_dump_) {
+    // Restart it if it already exists and is started
+    rtp_dump_->Stop();
+  } else {
+    rtp_dump_ = RtpDump::CreateRtpDump();
+    if (rtp_dump_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                   "StartRTPDump: Failed to create RTP dump");
+      return -1;
+    }
+  }
+  if (rtp_dump_->Start(file_nameUTF8) != 0) {
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                 "StartRTPDump: Failed to start RTP dump");
+    return -1;
+  }
+  return 0;
+}
+
+int ViEReceiver::StopRTPDump() {
+  CriticalSectionScoped cs(receive_cs_.get());
+  if (rtp_dump_) {
+    if (rtp_dump_->IsActive()) {
+      rtp_dump_->Stop();
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                   "StopRTPDump: Dump not active");
+    }
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                 "StopRTPDump: RTP dump not started");
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_receiver.h b/src/video_engine/vie_receiver.h
new file mode 100644
index 0000000..09fbc8f
--- /dev/null
+++ b/src/video_engine/vie_receiver.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
+
+#include <list>
+
+#include "engine_configurations.h"  // NOLINT
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "modules/udp_transport/interface/udp_transport.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class Encryption;
+class RtpDump;
+class RtpRtcp;
+class VideoCodingModule;
+
+class ViEReceiver : public UdpTransportData, public RtpData {
+ public:
+  ViEReceiver(const int32_t channel_id, VideoCodingModule* module_vcm);
+  ~ViEReceiver();
+
+  int RegisterExternalDecryption(Encryption* decryption);
+  int DeregisterExternalDecryption();
+
+  void SetRtpRtcpModule(RtpRtcp* module);
+
+  void RegisterSimulcastRtpRtcpModules(const std::list<RtpRtcp*>& rtp_modules);
+
+  void StartReceive();
+  void StopReceive();
+
+  int StartRTPDump(const char file_nameUTF8[1024]);
+  int StopRTPDump();
+
+  // Implements UdpTransportData.
+  virtual void IncomingRTPPacket(const WebRtc_Word8* rtp_packet,
+                                 const WebRtc_Word32 rtp_packet_length,
+                                 const char* from_ip,
+                                 const WebRtc_UWord16 from_port);
+  virtual void IncomingRTCPPacket(const WebRtc_Word8* rtcp_packet,
+                                  const WebRtc_Word32 rtcp_packet_length,
+                                  const char* from_ip,
+                                  const WebRtc_UWord16 from_port);
+
+  // Receives packets from external transport.
+  int ReceivedRTPPacket(const void* rtp_packet, int rtp_packet_length);
+  int ReceivedRTCPPacket(const void* rtcp_packet, int rtcp_packet_length);
+
+  // Implements RtpData.
+  virtual WebRtc_Word32 OnReceivedPayloadData(
+      const WebRtc_UWord8* payload_data,
+      const WebRtc_UWord16 payload_size,
+      const WebRtcRTPHeader* rtp_header);
+
+ private:
+  int InsertRTPPacket(const WebRtc_Word8* rtp_packet, int rtp_packet_length);
+  int InsertRTCPPacket(const WebRtc_Word8* rtcp_packet, int rtcp_packet_length);
+
+  scoped_ptr<CriticalSectionWrapper> receive_cs_;
+  const int32_t channel_id_;
+  RtpRtcp* rtp_rtcp_;
+  std::list<RtpRtcp*> rtp_rtcp_simulcast_;
+  VideoCodingModule* vcm_;
+
+  Encryption* external_decryption_;
+  WebRtc_UWord8* decryption_buffer_;
+  RtpDump* rtp_dump_;
+  bool receiving_;
+};
+
+}  // namespace webrt
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RECEIVER_H_
diff --git a/src/video_engine/vie_ref_count.cc b/src/video_engine/vie_ref_count.cc
new file mode 100644
index 0000000..3f7e45c
--- /dev/null
+++ b/src/video_engine/vie_ref_count.cc
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_ref_count.h"
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+
+namespace webrtc {
+
+ViERefCount::ViERefCount()
+    : count_(0),
+      crit_(CriticalSectionWrapper::CreateCriticalSection()) {
+}
+
+ViERefCount::~ViERefCount() {
+}
+
+ViERefCount& ViERefCount::operator++(int) {  // NOLINT
+  CriticalSectionScoped lock(crit_.get());
+  count_++;
+  return *this;
+}
+
+ViERefCount& ViERefCount::operator--(int) {  // NOLINT
+  CriticalSectionScoped lock(crit_.get());
+  count_--;
+  return *this;
+}
+
+void ViERefCount::Reset() {
+  CriticalSectionScoped lock(crit_.get());
+  count_ = 0;
+}
+
+int ViERefCount::GetCount() const {
+  return count_;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_ref_count.h b/src/video_engine/vie_ref_count.h
new file mode 100644
index 0000000..5ac9a3e
--- /dev/null
+++ b/src/video_engine/vie_ref_count.h
@@ -0,0 +1,40 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// TODO(mflodman) Remove this class and use ref count class in system_wrappers.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
+
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+
+class ViERefCount {
+ public:
+  ViERefCount();
+  ~ViERefCount();
+
+  ViERefCount& operator++(int);  // NOLINT
+  ViERefCount& operator--(int);  // NOLINT
+
+  void Reset();
+  int GetCount() const;
+
+ private:
+  volatile int count_;
+  scoped_ptr<CriticalSectionWrapper> crit_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_REF_COUNT_H_
diff --git a/src/video_engine/vie_remb.cc b/src/video_engine/vie_remb.cc
new file mode 100644
index 0000000..4e18f25
--- /dev/null
+++ b/src/video_engine/vie_remb.cc
@@ -0,0 +1,211 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_remb.h"
+
+#include <algorithm>
+#include <cassert>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/utility/interface/process_thread.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/tick_util.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+const int kRembSendIntervallMs = 1000;
+const int kRembTimeOutThresholdMs = 2000;
+const unsigned int kRembMinimumBitrateKbps = 50;
+
+// % threshold for if we should send a new REMB asap.
+const unsigned int kSendThresholdPercent = 97;
+
+VieRemb::VieRemb(ProcessThread* process_thread)
+    : process_thread_(process_thread),
+      list_crit_(CriticalSectionWrapper::CreateCriticalSection()),
+      last_remb_time_(TickTime::MillisecondTimestamp()),
+      last_send_bitrate_(0) {
+  process_thread->RegisterModule(this);
+}
+
+VieRemb::~VieRemb() {
+  process_thread_->DeRegisterModule(this);
+}
+
+void VieRemb::AddReceiveChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+               "VieRemb::AddReceiveChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  if (std::find(receive_modules_.begin(), receive_modules_.end(), rtp_rtcp) !=
+      receive_modules_.end())
+    return;
+
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, -1, "AddRembChannel");
+  // The module probably doesn't have a remote SSRC yet, so don't add it to the
+  // map.
+  receive_modules_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveReceiveChannel(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+               "VieRemb::RemoveReceiveChannel(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  unsigned int ssrc = rtp_rtcp->RemoteSSRC();
+  for (RtpModules::iterator it = receive_modules_.begin();
+       it != receive_modules_.end(); ++it) {
+    if ((*it) == rtp_rtcp) {
+      receive_modules_.erase(it);
+      break;
+    }
+  }
+  update_time_bitrates_.erase(ssrc);
+}
+
+void VieRemb::AddRembSender(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+               "VieRemb::AddRembSender(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+
+  // Verify this module hasn't been added earlier.
+  if (std::find(rtcp_sender_.begin(), rtcp_sender_.end(), rtp_rtcp) !=
+      rtcp_sender_.end())
+    return;
+  rtcp_sender_.push_back(rtp_rtcp);
+}
+
+void VieRemb::RemoveRembSender(RtpRtcp* rtp_rtcp) {
+  assert(rtp_rtcp);
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVideo, -1,
+               "VieRemb::RemoveRembSender(%p)", rtp_rtcp);
+
+  CriticalSectionScoped cs(list_crit_.get());
+  for (RtpModules::iterator it = rtcp_sender_.begin();
+       it != rtcp_sender_.end(); ++it) {
+    if ((*it) == rtp_rtcp) {
+      rtcp_sender_.erase(it);
+      return;
+    }
+  }
+}
+
+bool VieRemb::InUse() const {
+  CriticalSectionScoped cs(list_crit_.get());
+  if (receive_modules_.empty() && rtcp_sender_.empty())
+    return false;
+  else
+    return true;
+}
+
+void VieRemb::OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate) {
+  WEBRTC_TRACE(kTraceStream, kTraceVideo, -1,
+               "VieRemb::UpdateBitrateEstimate(ssrc: %u, bitrate: %u)",
+               ssrc, bitrate);
+  CriticalSectionScoped cs(list_crit_.get());
+
+  // Check if this is a new ssrc and add it to the map if it is.
+  if (update_time_bitrates_.find(ssrc) == update_time_bitrates_.end()) {
+    update_time_bitrates_[ssrc] = std::make_pair(
+        TickTime::MillisecondTimestamp(), bitrate);
+  }
+
+  // If we already have an estimate, check if the new total estimate is below
+  // kSendThresholdPercent of the previous estimate.
+  if (last_send_bitrate_ > 0) {
+    unsigned int new_remb_bitrate = last_send_bitrate_ -
+        update_time_bitrates_[ssrc].second + bitrate;
+
+    if (new_remb_bitrate < kSendThresholdPercent * last_send_bitrate_ / 100) {
+      // The new bitrate estimate is less than kSendThresholdPercent % of the
+      // last report. Send a REMB asap.
+      last_remb_time_ = TickTime::MillisecondTimestamp() - kRembSendIntervallMs;
+    }
+  }
+  update_time_bitrates_[ssrc] = std::make_pair(
+      TickTime::MillisecondTimestamp(), bitrate);
+}
+
+WebRtc_Word32 VieRemb::ChangeUniqueId(const WebRtc_Word32 id) {
+  return 0;
+}
+
+WebRtc_Word32 VieRemb::TimeUntilNextProcess() {
+  return kRembSendIntervallMs -
+      (TickTime::MillisecondTimestamp() - last_remb_time_);
+}
+
+WebRtc_Word32 VieRemb::Process() {
+  int64_t now = TickTime::MillisecondTimestamp();
+  if (now - last_remb_time_ < kRembSendIntervallMs)
+    return 0;
+
+  last_remb_time_ = now;
+
+  // Calculate total receive bitrate estimate.
+  list_crit_->Enter();
+
+  // Remove any timed out estimates.
+  SsrcTimeBitrate::iterator it = update_time_bitrates_.begin();
+  while (it != update_time_bitrates_.end()) {
+    if (TickTime::MillisecondTimestamp() - it->second.first >
+      kRembTimeOutThresholdMs) {
+      update_time_bitrates_.erase(it++);
+    } else {
+      ++it;
+    }
+  }
+
+  int num_bitrates = update_time_bitrates_.size();
+
+  if (num_bitrates == 0) {
+    list_crit_->Leave();
+    return 0;
+  }
+
+  // TODO(mflodman) Use std::vector and change RTP module API.
+  unsigned int* ssrcs = new unsigned int[num_bitrates];
+
+  unsigned int total_bitrate = 0;
+  int idx = 0;
+  for (it = update_time_bitrates_.begin(); it != update_time_bitrates_.end();
+      ++it, ++idx) {
+    total_bitrate += it->second.second;
+    ssrcs[idx] = it->first;
+  }
+
+  // Send a REMB packet.
+  RtpRtcp* sender = NULL;
+  if (!rtcp_sender_.empty()) {
+    sender = rtcp_sender_.front();
+  } else if (!receive_modules_.empty()) {
+    sender = receive_modules_.front();
+  }
+  last_send_bitrate_ = total_bitrate;
+
+  // Never send a REMB lower than last_send_bitrate_.
+  if (last_send_bitrate_ < kRembMinimumBitrateKbps) {
+    last_send_bitrate_ = kRembMinimumBitrateKbps;
+  }
+  list_crit_->Leave();
+
+  if (sender) {
+    sender->SetREMBData(total_bitrate, num_bitrates, ssrcs);
+  }
+  delete [] ssrcs;
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_remb.h b/src/video_engine/vie_remb.h
new file mode 100644
index 0000000..7fe12f8
--- /dev/null
+++ b/src/video_engine/vie_remb.h
@@ -0,0 +1,92 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// 1. Register a RtpRtcp module to include in the REMB packet.
+// 2. When UpdateBitrateEstimate is called for the first time for a SSRC, add it
+//    to the map.
+// 3. Send a new REMB every kRembSendIntervallMs or if a lower bitrate estimate
+//    for a specified SSRC.
+
+
+#ifndef WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
+#define WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
+
+#include <list>
+#include <map>
+#include <utility>
+
+#include "modules/interface/module.h"
+#include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class ProcessThread;
+class RtpRtcp;
+
+class VieRemb : public RemoteBitrateObserver, public Module {
+ public:
+  explicit VieRemb(ProcessThread* process_thread);
+  ~VieRemb();
+
+  // Called to add a receive channel to include in the REMB packet.
+  void AddReceiveChannel(RtpRtcp* rtp_rtcp);
+
+  // Removes the specified channel from REMB estimate.
+  void RemoveReceiveChannel(RtpRtcp* rtp_rtcp);
+
+  // Called to add a module that can generate and send REMB RTCP.
+  void AddRembSender(RtpRtcp* rtp_rtcp);
+
+  // Removes a REMB RTCP sender.
+  void RemoveRembSender(RtpRtcp* rtp_rtcp);
+
+  // Returns true if the instance is in use, false otherwise.
+  bool InUse() const;
+
+  // Called every time there is a new bitrate estimate for the received stream
+  // with given SSRC. This call will trigger a new RTCP REMB packet if the
+  // bitrate estimate has decreased or if no RTCP REMB packet has been sent for
+  // a certain time interval.
+  // Implements RtpReceiveBitrateUpdate.
+  virtual void OnReceiveBitrateChanged(unsigned int ssrc, unsigned int bitrate);
+
+  // Implements Module.
+  virtual WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+  virtual WebRtc_Word32 TimeUntilNextProcess();
+  virtual WebRtc_Word32 Process();
+
+ private:
+  typedef std::list<RtpRtcp*> RtpModules;
+  typedef std::map<unsigned int, std::pair<int64_t, unsigned int> >
+      SsrcTimeBitrate;
+
+  ProcessThread* process_thread_;
+  scoped_ptr<CriticalSectionWrapper> list_crit_;
+
+  // The last time a REMB was sent.
+  int64_t last_remb_time_;
+  unsigned int last_send_bitrate_;
+
+  // All RtpRtcp modules to include in the REMB packet.
+  RtpModules receive_modules_;
+
+  // All modules that can send REMB RTCP.
+  RtpModules rtcp_sender_;
+
+  // The last bitrate update for each SSRC.
+  SsrcTimeBitrate update_time_bitrates_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_MAIN_SOURCE_VIE_REMB_H_
diff --git a/src/video_engine/vie_remb_unittest.cc b/src/video_engine/vie_remb_unittest.cc
new file mode 100644
index 0000000..b024877
--- /dev/null
+++ b/src/video_engine/vie_remb_unittest.cc
@@ -0,0 +1,318 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+
+// This file includes unit tests for ViERemb.
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/rtp_rtcp/mocks/mock_rtp_rtcp.h"
+#include "modules/utility/interface/process_thread.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/sleep.h"
+#include "video_engine/vie_remb.h"
+
+using ::testing::_;
+using ::testing::AnyNumber;
+using ::testing::Return;
+
+namespace webrtc {
+
+// TODO(mflodman) Make a trigger function for this class to fake a clock and
+// remove sleeps in the test.
+class TestProcessThread : public ProcessThread {
+ public:
+  explicit TestProcessThread() {}
+  ~TestProcessThread() {}
+  virtual WebRtc_Word32 Start() { return 0; }
+  virtual WebRtc_Word32 Stop() { return 0; }
+  virtual WebRtc_Word32 RegisterModule(const Module* module) { return 0; }
+  virtual WebRtc_Word32 DeRegisterModule(const Module* module) { return 0; }
+};
+
+class ViERembTest : public ::testing::Test {
+ protected:
+  virtual void SetUp() {
+    process_thread_.reset(new TestProcessThread);
+    vie_remb_.reset(new VieRemb(process_thread_.get()));
+  }
+  scoped_ptr<TestProcessThread> process_thread_;
+  scoped_ptr<VieRemb> vie_remb_;
+};
+
+TEST_F(ViERembTest, OneModuleTestForSendingRemb) {
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+
+  const unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // TODO(mflodman) Add fake clock and remove the lowered bitrate below.
+  SleepMs(1010);
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Lower bitrate to send another REMB packet.
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate - 100);
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate - 100, 1, _))
+        .Times(1);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, LowerEstimateToSendRemb) {
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+  // Call process to get a first estimate.
+  SleepMs(1010);
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+        .Times(1);
+  vie_remb_->Process();
+
+  // Lower the estimate with more than 3% to trigger a call to SetREMBData right
+  // away.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+}
+
+TEST_F(ViERembTest, VerifyCombinedBitrateEstimate) {
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Call process to get a first estimate.
+  EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0], 1, _))
+        .Times(1);
+  SleepMs(1010);
+  vie_remb_->Process();
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1] + 100);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Lower the estimate to trigger a callback.
+  int total_bitrate = bitrate_estimate[0] + bitrate_estimate[1];
+  EXPECT_CALL(rtp_0, SetREMBData(total_bitrate, 2, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveRembSender(&rtp_0);
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, NoRembForIncreasedBitrate) {
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Trigger a first call to have a running state.
+  // TODO(mflodman) Add fake clock.
+  SleepMs(1010);
+  EXPECT_CALL(rtp_0,
+              SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2, _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Increased estimate shouldn't trigger a callback right away.
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0] + 1);
+  EXPECT_CALL(rtp_0, SetREMBData(_, _, _))
+      .Times(0);
+
+  // Decresing the estimate less than 3% shouldn't trigger a new callback.
+  int lower_estimate = bitrate_estimate[0] * 98 / 100;
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], lower_estimate);
+  EXPECT_CALL(rtp_0, SetREMBData(_, _, _))
+      .Times(0);
+
+  vie_remb_->Process();
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveRembSender(&rtp_0);
+}
+
+TEST_F(ViERembTest, ChangeSendRtpModule) {
+  MockRtpRtcp rtp_0;
+  MockRtpRtcp rtp_1;
+  vie_remb_->AddReceiveChannel(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_0);
+  vie_remb_->AddReceiveChannel(&rtp_1);
+
+  unsigned int bitrate_estimate[] = { 456, 789 };
+  unsigned int ssrc[] = { 1234, 5678 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  EXPECT_CALL(rtp_0, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  EXPECT_CALL(rtp_1, RemoteSSRC())
+      .Times(AnyNumber())
+      .WillRepeatedly(Return(ssrc[1]));
+
+  // Call process to get a first estimate.
+  SleepMs(1010);
+  EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2,
+                                 _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Decrease estimate to trigger a REMB.
+  bitrate_estimate[0] = bitrate_estimate[0] - 100;
+  EXPECT_CALL(rtp_0, SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2,
+                                 _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+  vie_remb_->Process();
+
+  // Remove the sending module, add it again -> should get remb on the second
+  // module.
+  vie_remb_->RemoveRembSender(&rtp_0);
+  vie_remb_->AddRembSender(&rtp_1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate[0]);
+
+  bitrate_estimate[1] = bitrate_estimate[1] - 100;
+  EXPECT_CALL(rtp_1, SetREMBData(bitrate_estimate[0] + bitrate_estimate[1], 2,
+                                 _))
+        .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[1], bitrate_estimate[1]);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp_0);
+  vie_remb_->RemoveReceiveChannel(&rtp_1);
+}
+
+TEST_F(ViERembTest, OnlyOneRembForDoubleProcess) {
+  MockRtpRtcp rtp;
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Call process to get a first estimate.
+  SleepMs(1010);
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+        .Times(1);
+  vie_remb_->Process();
+
+  // Lower the estimate, should trigger a call to SetREMBData right away.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(bitrate_estimate, 1, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+
+  // Call Process again, this should not trigger a new callback.
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(0);
+  vie_remb_->Process();
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+TEST_F(ViERembTest, NoOnReceivedBitrateChangedCall) {
+  MockRtpRtcp rtp;
+  EXPECT_CALL(rtp, RemoteSSRC())
+        .WillRepeatedly(Return(1234));
+
+  vie_remb_->AddReceiveChannel(&rtp);
+  vie_remb_->AddRembSender(&rtp);
+  // TODO(mflodman) Add fake clock.
+  SleepMs(1010);
+  // No bitrate estimate given, no callback expected.
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(0);
+  vie_remb_->Process();
+
+  vie_remb_->RemoveReceiveChannel(&rtp);
+  vie_remb_->RemoveRembSender(&rtp);
+}
+
+// Only register receiving modules and make sure we fallback to trigger a REMB
+// packet on this one.
+TEST_F(ViERembTest, NoSendingRtpModule) {
+  MockRtpRtcp rtp;
+  vie_remb_->AddReceiveChannel(&rtp);
+
+  unsigned int bitrate_estimate = 456;
+  unsigned int ssrc[] = { 1234 };
+
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  EXPECT_CALL(rtp, RemoteSSRC())
+      .WillRepeatedly(Return(ssrc[0]));
+
+  // Call process to get a first estimate.
+  SleepMs(1010);
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(1);
+  vie_remb_->Process();
+
+  // Lower the estimate to trigger a new packet REMB packet.
+  bitrate_estimate = bitrate_estimate - 100;
+  EXPECT_CALL(rtp, SetREMBData(_, _, _))
+      .Times(1);
+  vie_remb_->OnReceiveBitrateChanged(ssrc[0], bitrate_estimate);
+  vie_remb_->Process();
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_render_impl.cc b/src/video_engine/vie_render_impl.cc
new file mode 100644
index 0000000..c667d4d
--- /dev/null
+++ b/src/video_engine/vie_render_impl.cc
@@ -0,0 +1,398 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_render_impl.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_capturer.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_frame_provider_base.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_render_manager.h"
+#include "video_engine/vie_renderer.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+ViERender* ViERender::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_RENDER_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViERenderImpl* vie_render_impl = vie_impl;
+  // Increase ref count.
+  (*vie_render_impl)++;
+  return vie_render_impl;
+#else
+  return NULL;
+#endif
+}
+
+int ViERenderImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViERender::Release()");
+  // Decrease ref count
+  (*this)--;
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViERender release too many times");
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViERender reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViERenderImpl::ViERenderImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERenderImpl::ViERenderImpl() Ctor");
+}
+
+ViERenderImpl::~ViERenderImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERenderImpl::~ViERenderImpl() Dtor");
+}
+
+int ViERenderImpl::RegisterVideoRenderModule(
+  VideoRender& render_module) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (&render_module: %p)", __FUNCTION__, &render_module);
+  if (shared_data_->render_manager()->RegisterVideoRenderModule(
+      &render_module) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::DeRegisterVideoRenderModule(
+  VideoRender& render_module) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (&render_module: %p)", __FUNCTION__, &render_module);
+  if (shared_data_->render_manager()->DeRegisterVideoRenderModule(
+      &render_module) != 0) {
+    // Error logging is done in ViERenderManager::DeRegisterVideoRenderModule.
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::AddRenderer(const int render_id, void* window,
+                               const unsigned int z_order, const float left,
+                               const float top, const float right,
+                               const float bottom) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s (render_id: %d,  window: 0x%p, z_order: %u, left: %f, "
+               "top: %f, right: %f, bottom: %f)",
+               __FUNCTION__, render_id, window, z_order, left, top, right,
+               bottom);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  {
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    if (rs.Renderer(render_id)) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s - Renderer already exist %d.", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderAlreadyExists);
+      return -1;
+    }
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, window, z_order, left, top, right, bottom);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  } else {
+    // Camera or file.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, window, z_order, left, top, right, bottom);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  }
+}
+
+int ViERenderImpl::RemoveRenderer(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, ViEId(shared_data_->instance_id()),
+               "%s(render_id: %d)", __FUNCTION__, render_id);
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+
+  ViERenderer* renderer = NULL;
+  {
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    renderer = rs.Renderer(render_id);
+    if (!renderer) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s No render exist with render_id: %d", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    // Leave the scope lock since we don't want to lock two managers
+    // simultanousely.
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEChannel* channel = cm.Channel(render_id);
+    if (!channel) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s: no channel with id %d exists ", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    channel->DeregisterFrameCallback(renderer);
+  } else {
+    // Provider owned by inputmanager, i.e. file or capture device.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* provider = is.FrameProvider(render_id);
+    if (!provider) {
+      WEBRTC_TRACE(kTraceWarning, kTraceVideo,
+                   ViEId(shared_data_->instance_id()),
+                   "%s: no provider with id %d exists ", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    provider->DeregisterFrameCallback(renderer);
+  }
+  if (shared_data_->render_manager()->RemoveRenderStream(render_id) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::StartRender(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render Id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->StartRender() != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::StopRender(const int render_id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->StopRender() != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::ConfigureRender(int render_id, const unsigned int z_order,
+                                   const float left, const float top,
+                                   const float right, const float bottom) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), render_id),
+               "%s(channel: %d)", __FUNCTION__, render_id);
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+
+  if (renderer->ConfigureRenderer(z_order, left, top, right, bottom) != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::MirrorRenderStream(const int render_id, const bool enable,
+                                      const bool mirror_xaxis,
+                                      const bool mirror_yaxis) {
+  ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+  ViERenderer* renderer = rs.Renderer(render_id);
+  if (!renderer) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: No renderer with render_id %d exist.", __FUNCTION__,
+                 render_id);
+    shared_data_->SetLastError(kViERenderInvalidRenderId);
+    return -1;
+  }
+  if (renderer->EnableMirroring(render_id, enable, mirror_xaxis, mirror_yaxis)
+      != 0) {
+    shared_data_->SetLastError(kViERenderUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERenderImpl::AddRenderer(const int render_id,
+                               RawVideoType video_input_format,
+                               ExternalRenderer* external_renderer) {
+  // Check if the client requested a format that we can convert the frames to.
+  if (video_input_format != kVideoI420 &&
+      video_input_format != kVideoYV12 &&
+      video_input_format != kVideoYUY2 &&
+      video_input_format != kVideoUYVY &&
+      video_input_format != kVideoARGB &&
+      video_input_format != kVideoRGB24 &&
+      video_input_format != kVideoRGB565 &&
+      video_input_format != kVideoARGB4444 &&
+      video_input_format != kVideoARGB1555) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), render_id),
+                 "%s: Unsupported video frame format requested",
+                 __FUNCTION__, render_id);
+    shared_data_->SetLastError(kViERenderInvalidFrameFormat);
+    return -1;
+  }
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  {
+    // Verify the renderer doesn't exist.
+    ViERenderManagerScoped rs(*(shared_data_->render_manager()));
+    if (rs.Renderer(render_id)) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s - Renderer already exist %d.", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderAlreadyExists);
+      return -1;
+    }
+  }
+  if (render_id >= kViEChannelIdBase && render_id <= kViEChannelIdMax) {
+    // This is a channel.
+    ViEChannelManagerScoped cm(*(shared_data_->channel_manager()));
+    ViEFrameProviderBase* frame_provider = cm.Channel(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, NULL, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    if (renderer->SetExternalRenderer(render_id, video_input_format,
+                                      external_renderer) == -1) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  } else {
+    // Camera or file.
+    ViEInputManagerScoped is(*(shared_data_->input_manager()));
+    ViEFrameProviderBase* frame_provider = is.FrameProvider(render_id);
+    if (!frame_provider) {
+      WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                   "%s: FrameProvider id %d doesn't exist", __FUNCTION__,
+                   render_id);
+      shared_data_->SetLastError(kViERenderInvalidRenderId);
+      return -1;
+    }
+    ViERenderer* renderer = shared_data_->render_manager()->AddRenderStream(
+        render_id, NULL, 0, 0.0f, 0.0f, 1.0f, 1.0f);
+    if (!renderer) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    if (renderer->SetExternalRenderer(render_id, video_input_format,
+                                      external_renderer) == -1) {
+      shared_data_->SetLastError(kViERenderUnknownError);
+      return -1;
+    }
+    return frame_provider->RegisterFrameCallback(render_id, renderer);
+  }
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_render_impl.h b/src/video_engine/vie_render_impl.h
new file mode 100644
index 0000000..c0cf916
--- /dev/null
+++ b/src/video_engine/vie_render_impl.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
+
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_render.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViERenderImpl
+    : public ViERender,
+      public ViERefCount {
+ public:
+  // Implements ViERender
+  virtual int Release();
+  virtual int RegisterVideoRenderModule(VideoRender& render_module);  // NOLINT
+  virtual int DeRegisterVideoRenderModule(
+      VideoRender& render_module);  // NOLINT
+  virtual int AddRenderer(const int render_id, void* window,
+                          const unsigned int z_order, const float left,
+                          const float top, const float right,
+                          const float bottom);
+  virtual int RemoveRenderer(const int render_id);
+  virtual int StartRender(const int render_id);
+  virtual int StopRender(const int render_id);
+  virtual int ConfigureRender(int render_id, const unsigned int z_order,
+                              const float left, const float top,
+                              const float right, const float bottom);
+  virtual int MirrorRenderStream(const int render_id, const bool enable,
+                                 const bool mirror_xaxis,
+                                 const bool mirror_yaxis);
+  virtual int AddRenderer(const int render_id, RawVideoType video_input_format,
+                          ExternalRenderer* renderer);
+
+ protected:
+  explicit ViERenderImpl(ViESharedData* shared_data);
+  virtual ~ViERenderImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDER_IMPL_H_
diff --git a/src/video_engine/vie_render_manager.cc b/src/video_engine/vie_render_manager.cc
new file mode 100644
index 0000000..2ec17c3
--- /dev/null
+++ b/src/video_engine/vie_render_manager.cc
@@ -0,0 +1,230 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_render_manager.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/rw_lock_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_renderer.h"
+
+namespace webrtc {
+
+ViERenderManagerScoped::ViERenderManagerScoped(
+    const ViERenderManager& vie_render_manager)
+    : ViEManagerScopedBase(vie_render_manager) {
+}
+
+ViERenderer* ViERenderManagerScoped::Renderer(WebRtc_Word32 render_id) const {
+  return static_cast<const ViERenderManager*>(vie_manager_)->ViERenderPtr(
+           render_id);
+}
+
+ViERenderManager::ViERenderManager(WebRtc_Word32 engine_id)
+    : list_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      engine_id_(engine_id),
+      use_external_render_module_(false) {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id),
+               "ViERenderManager::ViERenderManager(engine_id: %d) - "
+               "Constructor", engine_id);
+}
+
+ViERenderManager::~ViERenderManager() {
+  WEBRTC_TRACE(webrtc::kTraceMemory, webrtc::kTraceVideo, ViEId(engine_id_),
+               "ViERenderManager Destructor, engine_id: %d", engine_id_);
+
+  while (stream_to_vie_renderer_.Size() != 0) {
+    MapItem* item = stream_to_vie_renderer_.First();
+    assert(item);
+    const WebRtc_Word32 render_id = item->GetId();
+    // The renderer is delete in RemoveRenderStream.
+    item = NULL;
+    RemoveRenderStream(render_id);
+  }
+}
+
+WebRtc_Word32 ViERenderManager::RegisterVideoRenderModule(
+    VideoRender* render_module) {
+  // See if there is already a render module registered for the window that
+  // the registrant render module is associated with.
+  VideoRender* current_module = FindRenderModule(render_module->Window());
+  if (current_module) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "A module is already registered for this window (window=%p, "
+                 "current module=%p, registrant module=%p.",
+                 render_module->Window(), current_module, render_module);
+    return -1;
+  }
+
+  // Register module.
+  render_list_.PushBack(static_cast<void*>(render_module));
+  use_external_render_module_ = true;
+  return 0;
+}
+
+WebRtc_Word32 ViERenderManager::DeRegisterVideoRenderModule(
+    VideoRender* render_module) {
+  // Check if there are streams in the module.
+  WebRtc_UWord32 n_streams = render_module->GetNumIncomingRenderStreams();
+  if (n_streams != 0) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "There are still %d streams in this module, cannot "
+                 "de-register", n_streams);
+    return -1;
+  }
+
+  // Erase the render module from the map.
+  ListItem* list_item = NULL;
+  bool found = false;
+  for (list_item = render_list_.First(); list_item != NULL;
+       list_item = render_list_.Next(list_item)) {
+    if (render_module == static_cast<VideoRender*>(list_item->GetItem())) {
+      // We've found our renderer.
+      render_list_.Erase(list_item);
+      found = true;
+      break;
+    }
+  }
+  if (!found) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "Module not registered");
+    return -1;
+  }
+  return 0;
+}
+
+ViERenderer* ViERenderManager::AddRenderStream(const WebRtc_Word32 render_id,
+                                               void* window,
+                                               const WebRtc_UWord32 z_order,
+                                               const float left,
+                                               const float top,
+                                               const float right,
+                                               const float bottom) {
+  CriticalSectionScoped cs(list_cs_.get());
+
+  if (stream_to_vie_renderer_.Find(render_id) != NULL) {
+    // This stream is already added to a renderer, not allowed!
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "Render stream already exists");
+    return NULL;
+  }
+
+  // Get the render module for this window.
+  VideoRender* render_module = FindRenderModule(window);
+  if (render_module == NULL) {
+    // No render module for this window, create a new one.
+    render_module = VideoRender::CreateVideoRender(ViEModuleId(engine_id_, -1),
+                                                  window, false);
+    if (!render_module) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, ViEId(engine_id_),
+                   "Could not create new render module");
+      return NULL;
+    }
+    render_list_.PushBack(static_cast<void*>(render_module));
+  }
+
+  ViERenderer* vie_renderer = ViERenderer::CreateViERenderer(render_id,
+                                                             engine_id_,
+                                                             *render_module,
+                                                             *this, z_order,
+                                                             left, top, right,
+                                                             bottom);
+  if (!vie_renderer) {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(engine_id_, render_id),
+                 "Could not create new render stream");
+    return NULL;
+  }
+  stream_to_vie_renderer_.Insert(render_id, vie_renderer);
+  return vie_renderer;
+}
+
+WebRtc_Word32 ViERenderManager::RemoveRenderStream(
+    const WebRtc_Word32 render_id) {
+  // We need exclusive right to the items in the render manager to delete a
+  // stream.
+  ViEManagerWriteScoped scope(this);
+
+  CriticalSectionScoped cs(list_cs_.get());
+  MapItem* map_item = stream_to_vie_renderer_.Find(render_id);
+  if (!map_item) {
+    // No such stream
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, ViEId(engine_id_),
+                 "No renderer for this stream found, channel_id");
+    return 0;
+  }
+
+  ViERenderer* vie_renderer = static_cast<ViERenderer*>(map_item->GetItem());
+  assert(vie_renderer);
+
+  // Get the render module pointer for this vie_render object.
+  VideoRender& renderer = vie_renderer->RenderModule();
+
+  // Delete the vie_render.
+  // This deletes the stream in the render module.
+  delete vie_renderer;
+
+  // Remove from the stream map.
+  stream_to_vie_renderer_.Erase(map_item);
+
+  // Check if there are other streams in the module.
+  if (!use_external_render_module_ &&
+      renderer.GetNumIncomingRenderStreams() == 0) {
+    // Erase the render module from the map.
+    ListItem* list_item = NULL;
+    for (list_item = render_list_.First(); list_item != NULL;
+         list_item = render_list_.Next(list_item)) {
+      if (&renderer == static_cast<VideoRender*>(list_item->GetItem())) {
+        // We've found our renderer.
+        render_list_.Erase(list_item);
+        break;
+      }
+    }
+    // Destroy the module.
+    VideoRender::DestroyVideoRender(&renderer);
+  }
+  return 0;
+}
+
+VideoRender* ViERenderManager::FindRenderModule(void* window) {
+  VideoRender* renderer = NULL;
+  ListItem* list_item = NULL;
+  for (list_item = render_list_.First(); list_item != NULL;
+       list_item = render_list_.Next(list_item)) {
+    renderer = static_cast<VideoRender*>(list_item->GetItem());
+    if (renderer == NULL) {
+      break;
+    }
+    if (renderer->Window() == window) {
+      // We've found the render module.
+      break;
+    }
+    renderer = NULL;
+  }
+  return renderer;
+}
+
+ViERenderer* ViERenderManager::ViERenderPtr(WebRtc_Word32 render_id) const {
+  ViERenderer* renderer = NULL;
+  MapItem* map_item = stream_to_vie_renderer_.Find(render_id);
+  if (!map_item) {
+    // No such stream in any renderer.
+    return NULL;
+  }
+  renderer = static_cast<ViERenderer*>(map_item->GetItem());
+
+  return renderer;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_render_manager.h b/src/video_engine/vie_render_manager.h
new file mode 100644
index 0000000..ebdf862
--- /dev/null
+++ b/src/video_engine/vie_render_manager.h
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
+
+#include "system_wrappers/interface/list_wrapper.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+
+#include "video_engine/vie_manager_base.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RWLockWrapper;
+class VideoRender;
+class VideoRenderCallback;
+class ViERenderer;
+
+class ViERenderManager : private ViEManagerBase {
+  friend class ViERenderManagerScoped;
+ public:
+  explicit ViERenderManager(WebRtc_Word32 engine_id);
+  ~ViERenderManager();
+
+  WebRtc_Word32 RegisterVideoRenderModule(VideoRender* render_module);
+  WebRtc_Word32 DeRegisterVideoRenderModule(VideoRender* render_module);
+
+  ViERenderer* AddRenderStream(const WebRtc_Word32 render_id,
+                               void* window,
+                               const WebRtc_UWord32 z_order,
+                               const float left,
+                               const float top,
+                               const float right,
+                               const float bottom);
+
+  WebRtc_Word32 RemoveRenderStream(WebRtc_Word32 render_id);
+
+ private:
+  // Returns a pointer to the render module if it exists in the render list.
+  // Assumed protected.
+  VideoRender* FindRenderModule(void* window);
+
+  // Methods used by ViERenderScoped.
+  ViERenderer* ViERenderPtr(WebRtc_Word32 render_id) const;
+
+  scoped_ptr<CriticalSectionWrapper> list_cs_;
+  WebRtc_Word32 engine_id_;
+  MapWrapper stream_to_vie_renderer_;  // Protected by ViEManagerBase.
+  ListWrapper render_list_;
+  bool use_external_render_module_;
+};
+
+class ViERenderManagerScoped: private ViEManagerScopedBase {
+ public:
+  explicit ViERenderManagerScoped(const ViERenderManager& vie_render_manager);
+
+  // Returns a pointer to the ViERender object.
+  ViERenderer* Renderer(WebRtc_Word32 render_id) const;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDER_MANAGER_H_
diff --git a/src/video_engine/vie_renderer.cc b/src/video_engine/vie_renderer.cc
new file mode 100644
index 0000000..f1c6f8c
--- /dev/null
+++ b/src/video_engine/vie_renderer.cc
@@ -0,0 +1,225 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_renderer.h"
+
+#include "common_video/libyuv/include/webrtc_libyuv.h"
+#include "modules/video_render/main/interface/video_render.h"
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "video_engine/vie_render_manager.h"
+
+namespace webrtc {
+
+ViERenderer* ViERenderer::CreateViERenderer(const WebRtc_Word32 render_id,
+                                            const WebRtc_Word32 engine_id,
+                                            VideoRender& render_module,
+                                            ViERenderManager& render_manager,
+                                            const WebRtc_UWord32 z_order,
+                                            const float left,
+                                            const float top,
+                                            const float right,
+                                            const float bottom) {
+  ViERenderer* self = new ViERenderer(render_id, engine_id, render_module,
+                                      render_manager);
+  if (!self || self->Init(z_order, left, top, right, bottom) != 0) {
+    delete self;
+    self = NULL;
+  }
+  return self;
+}
+
+ViERenderer::~ViERenderer(void) {
+  if (render_callback_)
+    render_module_.DeleteIncomingRenderStream(render_id_);
+
+  if (incoming_external_callback_)
+    delete incoming_external_callback_;
+}
+
+WebRtc_Word32 ViERenderer::StartRender() {
+  return render_module_.StartRender(render_id_);
+}
+WebRtc_Word32 ViERenderer::StopRender() {
+  return render_module_.StopRender(render_id_);
+}
+
+WebRtc_Word32 ViERenderer::GetLastRenderedFrame(const WebRtc_Word32 renderID,
+                                                VideoFrame& video_frame) {
+  return render_module_.GetLastRenderedFrame(renderID, video_frame);
+}
+
+WebRtc_Word32 ViERenderer::ConfigureRenderer(const unsigned int z_order,
+                                             const float left,
+                                             const float top,
+                                             const float right,
+                                             const float bottom) {
+  return render_module_.ConfigureRenderer(render_id_, z_order, left, top, right,
+                                          bottom);
+}
+
+VideoRender& ViERenderer::RenderModule() {
+  return render_module_;
+}
+
+WebRtc_Word32 ViERenderer::EnableMirroring(const WebRtc_Word32 render_id,
+                                           const bool enable,
+                                           const bool mirror_xaxis,
+                                           const bool mirror_yaxis) {
+  return render_module_.MirrorRenderStream(render_id, enable, mirror_xaxis,
+                                           mirror_yaxis);
+}
+
+WebRtc_Word32 ViERenderer::SetTimeoutImage(const VideoFrame& timeout_image,
+                                           const WebRtc_Word32 timeout_value) {
+  return render_module_.SetTimeoutImage(render_id_, timeout_image,
+                                        timeout_value);
+}
+
+WebRtc_Word32  ViERenderer::SetRenderStartImage(const VideoFrame& start_image) {
+  return render_module_.SetStartImage(render_id_, start_image);
+}
+
+WebRtc_Word32 ViERenderer::SetExternalRenderer(
+    const WebRtc_Word32 render_id,
+    RawVideoType video_input_format,
+    ExternalRenderer* external_renderer) {
+  if (!incoming_external_callback_)
+    return -1;
+
+  incoming_external_callback_->SetViEExternalRenderer(external_renderer,
+                                                      video_input_format);
+  return render_module_.AddExternalRenderCallback(render_id,
+                                                  incoming_external_callback_);
+}
+
+ViERenderer::ViERenderer(const WebRtc_Word32 render_id,
+                         const WebRtc_Word32 engine_id,
+                         VideoRender& render_module,
+                         ViERenderManager& render_manager)
+    : render_id_(render_id),
+      render_module_(render_module),
+      render_manager_(render_manager),
+      render_callback_(NULL),
+      incoming_external_callback_(new ViEExternalRendererImpl()) {
+}
+
+WebRtc_Word32 ViERenderer::Init(const WebRtc_UWord32 z_order,
+                                const float left,
+                                const float top,
+                                const float right,
+                                const float bottom) {
+  render_callback_ =
+      static_cast<VideoRenderCallback*>(render_module_.AddIncomingRenderStream(
+          render_id_, z_order, left, top, right, bottom));
+  if (!render_callback_) {
+    // Logging done.
+    return -1;
+  }
+  return 0;
+}
+
+void ViERenderer::DeliverFrame(int id,
+                               VideoFrame* video_frame,
+                               int num_csrcs,
+                               const WebRtc_UWord32 CSRC[kRtpCsrcSize]) {
+  render_callback_->RenderFrame(render_id_, *video_frame);
+}
+
+void ViERenderer::DelayChanged(int id, int frame_delay) {}
+
+int ViERenderer::GetPreferedFrameSettings(int* width,
+                                          int* height,
+                                          int* frame_rate) {
+    return -1;
+}
+
+void ViERenderer::ProviderDestroyed(int id) {
+  // Remove the render stream since the provider is destroyed.
+  render_manager_.RemoveRenderStream(render_id_);
+}
+
+ViEExternalRendererImpl::ViEExternalRendererImpl()
+    : external_renderer_(NULL),
+      external_renderer_format_(kVideoUnknown),
+      external_renderer_width_(0),
+      external_renderer_height_(0),
+      converted_frame_(new VideoFrame()) {
+}
+
+int ViEExternalRendererImpl::SetViEExternalRenderer(
+    ExternalRenderer* external_renderer,
+    RawVideoType video_input_format) {
+  external_renderer_ = external_renderer;
+  external_renderer_format_ = video_input_format;
+  return 0;
+}
+
+WebRtc_Word32 ViEExternalRendererImpl::RenderFrame(
+    const WebRtc_UWord32 stream_id,
+    VideoFrame&   video_frame) {
+  VideoFrame* out_frame = converted_frame_.get();
+
+  // Convert to requested format.
+  VideoType type =
+      RawVideoTypeToCommonVideoVideoType(external_renderer_format_);
+  int buffer_size = CalcBufferSize(type, video_frame.Width(),
+                                   video_frame.Height());
+  if (buffer_size <= 0) {
+    // Unsupported video format.
+    assert(false);
+    return -1;
+  }
+  converted_frame_->VerifyAndAllocate(buffer_size);
+
+  switch (external_renderer_format_) {
+    case kVideoI420:
+      out_frame = &video_frame;
+      break;
+    case kVideoYV12:
+    case kVideoYUY2:
+    case kVideoUYVY:
+    case kVideoARGB:
+    case kVideoRGB24:
+    case kVideoRGB565:
+    case kVideoARGB4444:
+    case kVideoARGB1555 :
+      {
+        ConvertFromI420(video_frame.Buffer(), video_frame.Width(), type, 0,
+                        video_frame.Width(), video_frame.Height(),
+                        converted_frame_->Buffer());
+      }
+      break;
+    case kVideoIYUV:
+      // no conversion available
+      break;
+    default:
+      assert(false);
+      out_frame = NULL;
+      break;
+  }
+
+  if (external_renderer_width_ != video_frame.Width() ||
+      external_renderer_height_ != video_frame.Height()) {
+    external_renderer_width_ = video_frame.Width();
+    external_renderer_height_ = video_frame.Height();
+    external_renderer_->FrameSizeChange(external_renderer_width_,
+                                        external_renderer_height_, stream_id);
+  }
+
+  if (out_frame) {
+    external_renderer_->DeliverFrame(out_frame->Buffer(),
+                                     out_frame->Length(),
+                                     video_frame.TimeStamp(),
+                                     video_frame.RenderTimeMs());
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_renderer.h b/src/video_engine/vie_renderer.h
new file mode 100644
index 0000000..dd216ea
--- /dev/null
+++ b/src/video_engine/vie_renderer.h
@@ -0,0 +1,116 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
+
+#include "modules/video_render/main/interface/video_render_defines.h"
+#include "system_wrappers/interface/map_wrapper.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "video_engine/include/vie_render.h"
+#include "video_engine/vie_frame_provider_base.h"
+
+namespace webrtc {
+
+class VideoRender;
+class VideoRenderCallback;
+class ViERenderManager;
+
+class ViEExternalRendererImpl : public VideoRenderCallback {
+ public:
+  ViEExternalRendererImpl();
+  virtual ~ViEExternalRendererImpl() {}
+
+  int SetViEExternalRenderer(ExternalRenderer* external_renderer,
+                             RawVideoType video_input_format);
+
+  // Implements VideoRenderCallback.
+  virtual WebRtc_Word32 RenderFrame(const WebRtc_UWord32 stream_id,
+                                    VideoFrame& video_frame);
+
+ private:
+  ExternalRenderer* external_renderer_;
+  RawVideoType external_renderer_format_;
+  WebRtc_UWord32 external_renderer_width_;
+  WebRtc_UWord32 external_renderer_height_;
+  scoped_ptr<VideoFrame> converted_frame_;
+};
+
+class ViERenderer: public ViEFrameCallback {
+ public:
+  static ViERenderer* CreateViERenderer(const WebRtc_Word32 render_id,
+                                        const WebRtc_Word32 engine_id,
+                                        VideoRender& render_module,
+                                        ViERenderManager& render_manager,
+                                        const WebRtc_UWord32 z_order,
+                                        const float left,
+                                        const float top,
+                                        const float right,
+                                        const float bottom);
+  ~ViERenderer(void);
+
+  WebRtc_Word32 StartRender();
+  WebRtc_Word32 StopRender();
+
+  WebRtc_Word32 GetLastRenderedFrame(const WebRtc_Word32 renderID,
+                                     VideoFrame& video_frame);
+
+  WebRtc_Word32 ConfigureRenderer(const unsigned int z_order,
+                                  const float left,
+                                  const float top,
+                                  const float right,
+                                  const float bottom);
+
+  VideoRender& RenderModule();
+
+  WebRtc_Word32 EnableMirroring(const WebRtc_Word32 render_id,
+                                const bool enable,
+                                const bool mirror_xaxis,
+                                const bool mirror_yaxis);
+
+  WebRtc_Word32 SetTimeoutImage(const VideoFrame& timeout_image,
+                                const WebRtc_Word32 timeout_value);
+  WebRtc_Word32 SetRenderStartImage(const VideoFrame& start_image);
+  WebRtc_Word32 SetExternalRenderer(const WebRtc_Word32 render_id,
+                                    RawVideoType video_input_format,
+                                    ExternalRenderer* external_renderer);
+
+ private:
+  ViERenderer(const WebRtc_Word32 render_id, const WebRtc_Word32 engine_id,
+                VideoRender& render_module,
+                ViERenderManager& render_manager);
+
+  WebRtc_Word32 Init(const WebRtc_UWord32 z_order,
+                     const float left,
+                     const float top,
+                     const float right,
+                     const float bottom);
+
+  // Implement ViEFrameCallback
+  virtual void DeliverFrame(int id,
+                            VideoFrame* video_frame,
+                            int num_csrcs = 0,
+                            const WebRtc_UWord32 CSRC[kRtpCsrcSize] = NULL);
+  virtual void DelayChanged(int id, int frame_delay);
+  virtual int GetPreferedFrameSettings(int* width,
+                                       int* height,
+                                       int* frame_rate);
+  virtual void ProviderDestroyed(int id);
+
+  WebRtc_UWord32 render_id_;
+  VideoRender& render_module_;
+  ViERenderManager& render_manager_;
+  VideoRenderCallback* render_callback_;
+  ViEExternalRendererImpl* incoming_external_callback_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RENDERER_H_
diff --git a/src/video_engine/vie_rtp_rtcp_impl.cc b/src/video_engine/vie_rtp_rtcp_impl.cc
new file mode 100644
index 0000000..b4b4746
--- /dev/null
+++ b/src/video_engine/vie_rtp_rtcp_impl.cc
@@ -0,0 +1,955 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_rtp_rtcp_impl.h"
+
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/file_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/include/vie_errors.h"
+#include "video_engine/vie_channel.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_encoder.h"
+#include "video_engine/vie_impl.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+// Helper methods for converting between module format and ViE API format.
+
+static RTCPMethod ViERTCPModeToRTCPMethod(ViERTCPMode api_mode) {
+  switch (api_mode) {
+    case kRtcpNone:
+      return kRtcpOff;
+
+    case kRtcpCompound_RFC4585:
+      return kRtcpCompound;
+
+    case kRtcpNonCompound_RFC5506:
+      return kRtcpNonCompound;
+  }
+  assert(false);
+  return kRtcpOff;
+}
+
+static ViERTCPMode RTCPMethodToViERTCPMode(RTCPMethod module_method) {
+  switch (module_method) {
+    case kRtcpOff:
+      return kRtcpNone;
+
+    case kRtcpCompound:
+      return kRtcpCompound_RFC4585;
+
+    case kRtcpNonCompound:
+      return kRtcpNonCompound_RFC5506;
+  }
+  assert(false);
+  return kRtcpNone;
+}
+
+static KeyFrameRequestMethod APIRequestToModuleRequest(
+  ViEKeyFrameRequestMethod api_method) {
+  switch (api_method) {
+    case kViEKeyFrameRequestNone:
+      return kKeyFrameReqFirRtp;
+
+    case kViEKeyFrameRequestPliRtcp:
+      return kKeyFrameReqPliRtcp;
+
+    case kViEKeyFrameRequestFirRtp:
+      return kKeyFrameReqFirRtp;
+
+    case kViEKeyFrameRequestFirRtcp:
+      return kKeyFrameReqFirRtcp;
+  }
+  assert(false);
+  return kKeyFrameReqFirRtp;
+}
+
+ViERTP_RTCP* ViERTP_RTCP::GetInterface(VideoEngine* video_engine) {
+#ifdef WEBRTC_VIDEO_ENGINE_RTP_RTCP_API
+  if (!video_engine) {
+    return NULL;
+  }
+  VideoEngineImpl* vie_impl = reinterpret_cast<VideoEngineImpl*>(video_engine);
+  ViERTP_RTCPImpl* vie_rtpimpl = vie_impl;
+  // Increase ref count.
+  (*vie_rtpimpl)++;
+  return vie_rtpimpl;
+#else
+  return NULL;
+#endif
+}
+
+int ViERTP_RTCPImpl::Release() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCP::Release()");
+  // Decrease ref count.
+  (*this)--;
+
+  WebRtc_Word32 ref_count = GetCount();
+  if (ref_count < 0) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVideo, shared_data_->instance_id(),
+                 "ViERTP_RTCP release too many times");
+    shared_data_->SetLastError(kViEAPIDoesNotExist);
+    return -1;
+  }
+  WEBRTC_TRACE(kTraceInfo, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCP reference count: %d", ref_count);
+  return ref_count;
+}
+
+ViERTP_RTCPImpl::ViERTP_RTCPImpl(ViESharedData* shared_data)
+    : shared_data_(shared_data) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCPImpl::ViERTP_RTCPImpl() Ctor");
+}
+
+ViERTP_RTCPImpl::~ViERTP_RTCPImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVideo, shared_data_->instance_id(),
+               "ViERTP_RTCPImpl::~ViERTP_RTCPImpl() Dtor");
+}
+
+int ViERTP_RTCPImpl::SetLocalSSRC(const int video_channel,
+                                  const unsigned int SSRC,
+                                  const StreamType usage,
+                                  const unsigned char simulcast_idx) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSSRC(SSRC, usage, simulcast_idx) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRemoteSSRCType(const int videoChannel,
+                                       const StreamType usage,
+                                       const unsigned int SSRC) const {
+  WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideo,
+               ViEId(shared_data_->instance_id(), videoChannel),
+               "%s(channel: %d, usage:%d SSRC: 0x%x)",
+               __FUNCTION__, usage, videoChannel, SSRC);
+
+  // Get the channel
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* ptrViEChannel = cs.Channel(videoChannel);
+  if (ptrViEChannel == NULL) {
+    // The channel doesn't exists
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo,
+                 ViEId(shared_data_->instance_id(), videoChannel),
+                 "%s: Channel %d doesn't exist",
+                 __FUNCTION__, videoChannel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (ptrViEChannel->SetRemoteSSRCType(usage, SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetLocalSSRC(const int video_channel,
+                                  unsigned int& SSRC) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, SSRC: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetLocalSSRC(&SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteSSRC(const int video_channel,
+                                   unsigned int& SSRC) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel, SSRC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteSSRC(&SSRC) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteCSRCs(const int video_channel,
+                                    unsigned int CSRCs[kRtpCsrcSize]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteCSRC(CSRCs) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetStartSequenceNumber(const int video_channel,
+                                            uint16_t sequence_number) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, sequence_number: %u)", __FUNCTION__,
+               video_channel, sequence_number);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already sending.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetStartSequenceNumber(sequence_number) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRTCPStatus(const int video_channel,
+                                   const ViERTCPMode rtcp_mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, mode: %d)", __FUNCTION__, video_channel,
+               rtcp_mode);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  RTCPMethod module_mode = ViERTCPModeToRTCPMethod(rtcp_mode);
+  if (vie_channel->SetRTCPMode(module_mode) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTCPStatus(const int video_channel,
+                                   ViERTCPMode& rtcp_mode) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel, rtcp_mode);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  RTCPMethod module_mode = kRtcpOff;
+  if (vie_channel->GetRTCPMode(&module_mode) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: could not get current RTCP mode", __FUNCTION__);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  rtcp_mode = RTCPMethodToViERTCPMode(module_mode);
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRTCPCName(const int video_channel,
+                                  const char rtcp_cname[KMaxRTCPCNameLength]) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, name: %s)", __FUNCTION__, video_channel,
+               rtcp_cname);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d already sending.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpAlreadySending);
+    return -1;
+  }
+  if (vie_channel->SetRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTCPCName(const int video_channel,
+                                  char rtcp_cname[KMaxRTCPCNameLength]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRemoteRTCPCName(
+    const int video_channel,
+    char rtcp_cname[KMaxRTCPCNameLength]) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRemoteRTCPCName(rtcp_cname) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
+  const int video_channel,
+  const unsigned char sub_type,
+  unsigned int name,
+  const char* data,
+  uint16_t data_length_in_bytes) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, sub_type: %c, name: %d, data: x, length: %u)",
+               __FUNCTION__, video_channel, sub_type, name,
+               data_length_in_bytes);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (!vie_channel->Sending()) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d not sending", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpNotSending);
+    return -1;
+  }
+  RTCPMethod method;
+  if (vie_channel->GetRTCPMode(&method) != 0 || method == kRtcpOff) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: RTCP disabled on channel %d.", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpRtcpDisabled);
+    return -1;
+  }
+  if (vie_channel->SendApplicationDefinedRTCPPacket(
+        sub_type, name, reinterpret_cast<const WebRtc_UWord8*>(data),
+        data_length_in_bytes) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetNACKStatus(const int video_channel, const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetNACKStatus(enable) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+
+  // Update the encoder
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetFECStatus(const int video_channel, const bool enable,
+                                  const unsigned char payload_typeRED,
+                                  const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, payload_typeRED: %u, "
+               "payloadTypeFEC: %u)",
+               __FUNCTION__, video_channel, enable, payload_typeRED,
+               payload_typeFEC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetFECStatus(enable, payload_typeRED,
+                                payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  // Update the encoder.
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetHybridNACKFECStatus(
+    const int video_channel,
+    const bool enable,
+    const unsigned char payload_typeRED,
+    const unsigned char payload_typeFEC) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d, payload_typeRED: %u, "
+               "payloadTypeFEC: %u)",
+               __FUNCTION__, video_channel, enable, payload_typeRED,
+               payload_typeFEC);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  // Update the channel status with hybrid NACK FEC mode.
+  if (vie_channel->SetHybridNACKFECStatus(enable, payload_typeRED,
+                                          payload_typeFEC) != 0) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: failed for channel %d", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+
+  // Update the encoder.
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  vie_encoder->UpdateProtectionMethod();
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetKeyFrameRequestMethod(
+  const int video_channel,
+  const ViEKeyFrameRequestMethod method) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, method: %d)", __FUNCTION__, video_channel,
+               method);
+
+  // Get the channel.
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  KeyFrameRequestMethod module_method = APIRequestToModuleRequest(method);
+  if (vie_channel->SetKeyFrameRequestMethod(module_method) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetTMMBRStatus(const int video_channel,
+                                    const bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, enable: %d)", __FUNCTION__, video_channel,
+               enable);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->EnableTMMBR(enable) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetRembStatus(int video_channel, bool sender,
+                                   bool receiver) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "ViERTP_RTCPImpl::SetRembStatus(%d, %d, %d)", video_channel,
+               sender, receiver);
+  if (!shared_data_->channel_manager()->SetRembStatus(video_channel, sender,
+                                                      receiver)) {
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetSendTimestampOffsetStatus(int video_channel,
+                                                  bool enable,
+                                                  int id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "ViERTP_RTCPImpl::SetSendTimestampOffsetStatus(%d, %d, %d)",
+               video_channel, enable, id);
+
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetSendTimestampOffsetStatus(enable, id) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::SetReceiveTimestampOffsetStatus(int video_channel,
+                                                     bool enable,
+                                                     int id) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "ViERTP_RTCPImpl::SetReceiveTimestampOffsetStatus(%d, %d, %d)",
+               video_channel, enable, id);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->SetReceiveTimestampOffsetStatus(enable, id) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetReceivedRTCPStatistics(const int video_channel,
+                                               uint16_t& fraction_lost,
+                                               unsigned int& cumulative_lost,
+                                               unsigned int& extended_max,
+                                               unsigned int& jitter,
+                                               int& rtt_ms) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetReceivedRtcpStatistics(&fraction_lost,
+                                             &cumulative_lost,
+                                             &extended_max,
+                                             &jitter,
+                                             &rtt_ms) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetSentRTCPStatistics(const int video_channel,
+                                           uint16_t& fraction_lost,
+                                           unsigned int& cumulative_lost,
+                                           unsigned int& extended_max,
+                                           unsigned int& jitter,
+                                           int& rtt_ms) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+
+  if (vie_channel->GetSendRtcpStatistics(&fraction_lost, &cumulative_lost,
+                                         &extended_max, &jitter,
+                                         &rtt_ms) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetRTPStatistics(const int video_channel,
+                                      unsigned int& bytes_sent,
+                                      unsigned int& packets_sent,
+                                      unsigned int& bytes_received,
+                                      unsigned int& packets_received) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->GetRtpStatistics(&bytes_sent,
+                                    &packets_sent,
+                                    &bytes_received,
+                                    &packets_received) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetBandwidthUsage(const int video_channel,
+                                       unsigned int& total_bitrate_sent,
+                                       unsigned int& video_bitrate_sent,
+                                       unsigned int& fec_bitrate_sent,
+                                       unsigned int& nackBitrateSent) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  vie_channel->GetBandwidthUsage(&total_bitrate_sent,
+                                 &video_bitrate_sent,
+                                 &fec_bitrate_sent,
+                                 &nackBitrateSent);
+  return 0;
+}
+
+int ViERTP_RTCPImpl::GetEstimatedSendBandwidth(
+    const int video_channel,
+    unsigned int* estimated_bandwidth) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEEncoder* vie_encoder = cs.Encoder(video_channel);
+  if (!vie_encoder) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get encoder for channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  return vie_encoder->EstimatedSendBandwidth(
+      static_cast<WebRtc_UWord32*>(estimated_bandwidth));
+}
+
+int ViERTP_RTCPImpl::GetEstimatedReceiveBandwidth(
+    const int video_channel,
+    unsigned int* estimated_bandwidth) const {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Could not get channel %d", __FUNCTION__,
+                 video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  return vie_channel->GetEstimatedReceiveBandwidth(
+      static_cast<WebRtc_UWord32*>(estimated_bandwidth));
+}
+
+int ViERTP_RTCPImpl::SetOverUseDetectorOptions(
+    const OverUseDetectorOptions& options) const {
+  if (!shared_data_->Initialized()) {
+    shared_data_->SetLastError(kViENotInitialized);
+    WEBRTC_TRACE(kTraceError, kTraceVideo, ViEId(shared_data_->instance_id()),
+                 "%s - ViE instance %d not initialized", __FUNCTION__,
+                 shared_data_->instance_id());
+    return -1;
+  }
+  // Lock the channel manager to avoid creating a channel with
+  // "undefined" bwe settings (atomic copy).
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  shared_data_->SetOverUseDetectorOptions(options);
+  return 0;
+}
+
+int ViERTP_RTCPImpl::StartRTPDump(const int video_channel,
+                                  const char file_nameUTF8[1024],
+                                  RTPDirections direction) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, file_name: %s, direction: %d)", __FUNCTION__,
+               video_channel, file_nameUTF8, direction);
+  assert(FileWrapper::kMaxFileNameSize == 1024);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StartRTPDump(file_nameUTF8, direction) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::StopRTPDump(const int video_channel,
+                                 RTPDirections direction) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d, direction: %d)", __FUNCTION__, video_channel,
+               direction);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->StopRTPDump(direction) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpUnknownError);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::RegisterRTPObserver(const int video_channel,
+                                         ViERTPObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtpObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::DeregisterRTPObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtpObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::RegisterRTCPObserver(const int video_channel,
+                                          ViERTCPObserver& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtcpObserver(&observer) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverAlreadyRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+int ViERTP_RTCPImpl::DeregisterRTCPObserver(const int video_channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVideo,
+               ViEId(shared_data_->instance_id(), video_channel),
+               "%s(channel: %d)", __FUNCTION__, video_channel);
+  ViEChannelManagerScoped cs(*(shared_data_->channel_manager()));
+  ViEChannel* vie_channel = cs.Channel(video_channel);
+  if (!vie_channel) {
+    WEBRTC_TRACE(kTraceError, kTraceVideo,
+                 ViEId(shared_data_->instance_id(), video_channel),
+                 "%s: Channel %d doesn't exist", __FUNCTION__, video_channel);
+    shared_data_->SetLastError(kViERtpRtcpInvalidChannelId);
+    return -1;
+  }
+  if (vie_channel->RegisterRtcpObserver(NULL) != 0) {
+    shared_data_->SetLastError(kViERtpRtcpObserverNotRegistered);
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_rtp_rtcp_impl.h b/src/video_engine/vie_rtp_rtcp_impl.h
new file mode 100644
index 0000000..7c1614b
--- /dev/null
+++ b/src/video_engine/vie_rtp_rtcp_impl.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp_defines.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/include/vie_rtp_rtcp.h"
+#include "video_engine/vie_ref_count.h"
+
+namespace webrtc {
+
+class ViESharedData;
+
+class ViERTP_RTCPImpl
+    : public ViERTP_RTCP,
+      public ViERefCount {
+ public:
+  // Implements ViERTP_RTCP.
+  virtual int Release();
+  virtual int SetLocalSSRC(const int video_channel,
+                           const unsigned int SSRC,
+                           const StreamType usage,
+                           const unsigned char simulcast_idx);
+  virtual int GetLocalSSRC(const int video_channel,
+                           unsigned int& SSRC) const;  // NOLINT
+  virtual int SetRemoteSSRCType(const int video_channel,
+                                const StreamType usage,
+                                const unsigned int SSRC) const;
+  virtual int GetRemoteSSRC(const int video_channel,
+                            unsigned int& SSRC) const;  // NOLINT
+  virtual int GetRemoteCSRCs(const int video_channel,
+                             unsigned int CSRCs[kRtpCsrcSize]) const;
+  virtual int SetStartSequenceNumber(const int video_channel,
+                                     uint16_t sequence_number);
+  virtual int SetRTCPStatus(const int video_channel,
+                            const ViERTCPMode rtcp_mode);
+  virtual int GetRTCPStatus(const int video_channel,
+                            ViERTCPMode& rtcp_mode) const;
+  virtual int SetRTCPCName(const int video_channel,
+                           const char rtcp_cname[KMaxRTCPCNameLength]);
+  virtual int GetRTCPCName(const int video_channel,
+                           char rtcp_cname[KMaxRTCPCNameLength]) const;
+  virtual int GetRemoteRTCPCName(const int video_channel,
+                                 char rtcp_cname[KMaxRTCPCNameLength]) const;
+  virtual int SendApplicationDefinedRTCPPacket(
+      const int video_channel,
+      const unsigned char sub_type,
+      unsigned int name,
+      const char* data,
+      uint16_t data_length_in_bytes);
+  virtual int SetNACKStatus(const int video_channel, const bool enable);
+  virtual int SetFECStatus(const int video_channel, const bool enable,
+                           const unsigned char payload_typeRED,
+                           const unsigned char payload_typeFEC);
+  virtual int SetHybridNACKFECStatus(const int video_channel, const bool enable,
+                                     const unsigned char payload_typeRED,
+                                     const unsigned char payload_typeFEC);
+  virtual int SetKeyFrameRequestMethod(const int video_channel,
+                                       const ViEKeyFrameRequestMethod method);
+  virtual int SetTMMBRStatus(const int video_channel, const bool enable);
+  virtual int SetRembStatus(int video_channel, bool sender, bool receiver);
+  virtual int SetSendTimestampOffsetStatus(int video_channel,
+                                           bool enable,
+                                           int id);
+  virtual int SetReceiveTimestampOffsetStatus(int video_channel,
+                                              bool enable,
+                                              int id);
+  virtual int GetReceivedRTCPStatistics(const int video_channel,
+                                        uint16_t& fraction_lost,
+                                        unsigned int& cumulative_lost,
+                                        unsigned int& extended_max,
+                                        unsigned int& jitter,
+                                        int& rtt_ms) const;
+  virtual int GetSentRTCPStatistics(const int video_channel,
+                                    uint16_t& fraction_lost,
+                                    unsigned int& cumulative_lost,
+                                    unsigned int& extended_max,
+                                    unsigned int& jitter, int& rtt_ms) const;
+  virtual int GetRTPStatistics(const int video_channel,
+                               unsigned int& bytes_sent,
+                               unsigned int& packets_sent,
+                               unsigned int& bytes_received,
+                               unsigned int& packets_received) const;
+  virtual int GetBandwidthUsage(const int video_channel,
+                                unsigned int& total_bitrate_sent,
+                                unsigned int& video_bitrate_sent,
+                                unsigned int& fec_bitrate_sent,
+                                unsigned int& nackBitrateSent) const;
+  virtual int GetEstimatedSendBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const;
+  virtual int GetEstimatedReceiveBandwidth(
+      const int video_channel,
+      unsigned int* estimated_bandwidth) const;
+  virtual int SetOverUseDetectorOptions(
+      const OverUseDetectorOptions& options) const;
+  virtual int StartRTPDump(const int video_channel,
+                           const char file_nameUTF8[1024],
+                           RTPDirections direction);
+  virtual int StopRTPDump(const int video_channel, RTPDirections direction);
+  virtual int RegisterRTPObserver(const int video_channel,
+                                  ViERTPObserver& observer);
+  virtual int DeregisterRTPObserver(const int video_channel);
+  virtual int RegisterRTCPObserver(const int video_channel,
+                                   ViERTCPObserver& observer);
+  virtual int DeregisterRTCPObserver(const int video_channel);
+
+ protected:
+  explicit ViERTP_RTCPImpl(ViESharedData* shared_data);
+  virtual ~ViERTP_RTCPImpl();
+
+ private:
+  ViESharedData* shared_data_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_RTP_RTCP_IMPL_H_
diff --git a/src/video_engine/vie_sender.cc b/src/video_engine/vie_sender.cc
new file mode 100644
index 0000000..6f682c1
--- /dev/null
+++ b/src/video_engine/vie_sender.cc
@@ -0,0 +1,197 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_sender.h"
+
+#include <cassert>
+
+#include "modules/utility/interface/rtp_dump.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+
+namespace webrtc {
+
+ViESender::ViESender(int channel_id)
+    : channel_id_(channel_id),
+      critsect_(CriticalSectionWrapper::CreateCriticalSection()),
+      external_encryption_(NULL),
+      encryption_buffer_(NULL),
+      transport_(NULL),
+      rtp_dump_(NULL) {
+}
+
+ViESender::~ViESender() {
+  if (encryption_buffer_) {
+    delete[] encryption_buffer_;
+    encryption_buffer_ = NULL;
+  }
+
+  if (rtp_dump_) {
+    rtp_dump_->Stop();
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  }
+}
+
+int ViESender::RegisterExternalEncryption(Encryption* encryption) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (external_encryption_) {
+    return -1;
+  }
+  encryption_buffer_ = new WebRtc_UWord8[kViEMaxMtu];
+  if (encryption_buffer_ == NULL) {
+    return -1;
+  }
+  external_encryption_ = encryption;
+  return 0;
+}
+
+int ViESender::DeregisterExternalEncryption() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (external_encryption_ == NULL) {
+    return -1;
+  }
+  if (encryption_buffer_) {
+    delete[] encryption_buffer_;
+    encryption_buffer_ = NULL;
+  }
+  external_encryption_ = NULL;
+  return 0;
+}
+
+int ViESender::RegisterSendTransport(Transport* transport) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (transport_) {
+    return -1;
+  }
+  transport_ = transport;
+  return 0;
+}
+
+int ViESender::DeregisterSendTransport() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (transport_ == NULL) {
+    return -1;
+  }
+  transport_ = NULL;
+  return 0;
+}
+
+int ViESender::StartRTPDump(const char file_nameUTF8[1024]) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (rtp_dump_) {
+    // Packet dump is already started, restart it.
+    rtp_dump_->Stop();
+  } else {
+    rtp_dump_ = RtpDump::CreateRtpDump();
+    if (rtp_dump_ == NULL) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                   "StartSRTPDump: Failed to create RTP dump");
+      return -1;
+    }
+  }
+  if (rtp_dump_->Start(file_nameUTF8) != 0) {
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                 "StartRTPDump: Failed to start RTP dump");
+    return -1;
+  }
+  return 0;
+}
+
+int ViESender::StopRTPDump() {
+  CriticalSectionScoped cs(critsect_.get());
+  if (rtp_dump_) {
+    if (rtp_dump_->IsActive()) {
+      rtp_dump_->Stop();
+    } else {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                   "StopRTPDump: Dump not active");
+    }
+    RtpDump::DestroyRtpDump(rtp_dump_);
+    rtp_dump_ = NULL;
+  } else {
+    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideo, channel_id_,
+                 "StopRTPDump: RTP dump not started");
+    return -1;
+  }
+  return 0;
+}
+
+int ViESender::SendPacket(int vie_id, const void* data, int len) {
+  CriticalSectionScoped cs(critsect_.get());
+  if (!transport_) {
+    // No transport
+    return -1;
+  }
+  assert(ChannelId(vie_id) == channel_id_);
+
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  void* tmp_ptr = const_cast<void*>(data);
+  unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
+  int send_packet_length = len;
+
+  if (rtp_dump_) {
+    rtp_dump_->DumpPacket(send_packet, send_packet_length);
+  }
+
+  if (external_encryption_) {
+    external_encryption_->encrypt(channel_id_, send_packet,
+                                  encryption_buffer_, send_packet_length,
+                                  static_cast<int*>(&send_packet_length));
+    send_packet = encryption_buffer_;
+  }
+  const int bytes_sent = transport_->SendPacket(channel_id_, send_packet,
+                                                send_packet_length);
+  if (bytes_sent != send_packet_length) {
+    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideo, channel_id_,
+                 "ViESender::SendPacket - Transport failed to send RTP packet");
+  }
+  return bytes_sent;
+}
+
+int ViESender::SendRTCPPacket(int vie_id, const void* data, int len) {
+  CriticalSectionScoped cs(critsect_.get());
+
+  if (!transport_) {
+    return -1;
+  }
+
+  assert(ChannelId(vie_id) == channel_id_);
+
+  // Prepare for possible encryption and sending.
+  // TODO(mflodman) Change decrypt to get rid of this cast.
+  void* tmp_ptr = const_cast<void*>(data);
+  unsigned char* send_packet = static_cast<unsigned char*>(tmp_ptr);
+  int send_packet_length = len;
+
+  if (rtp_dump_) {
+    rtp_dump_->DumpPacket(send_packet, send_packet_length);
+  }
+
+  if (external_encryption_) {
+    external_encryption_->encrypt_rtcp(
+        channel_id_, send_packet, encryption_buffer_, send_packet_length,
+        static_cast<int*>(&send_packet_length));
+    send_packet = encryption_buffer_;
+  }
+
+  const int bytes_sent = transport_->SendRTCPPacket(channel_id_, send_packet,
+                                                    send_packet_length);
+  if (bytes_sent != send_packet_length) {
+    WEBRTC_TRACE(
+        webrtc::kTraceWarning, webrtc::kTraceVideo, channel_id_,
+        "ViESender::SendRTCPPacket - Transport failed to send RTCP packet");
+  }
+  return bytes_sent;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_sender.h b/src/video_engine/vie_sender.h
new file mode 100644
index 0000000..c9a1ef8
--- /dev/null
+++ b/src/video_engine/vie_sender.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESender is responsible for encrypting, if enabled, packets and send to
+// network.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
+
+#include "common_types.h"  // NOLINT
+#include "engine_configurations.h"  // NOLINT
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "typedefs.h"  // NOLINT
+#include "video_engine/vie_defines.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpDump;
+class Transport;
+class VideoCodingModule;
+
+class ViESender: public Transport {
+ public:
+  explicit ViESender(const int32_t channel_id);
+  ~ViESender();
+
+  // Registers an encryption class to use before sending packets.
+  int RegisterExternalEncryption(Encryption* encryption);
+  int DeregisterExternalEncryption();
+
+  // Registers transport to use for sending RTP and RTCP.
+  int RegisterSendTransport(Transport* transport);
+  int DeregisterSendTransport();
+
+  // Stores all incoming packets to file.
+  int StartRTPDump(const char file_nameUTF8[1024]);
+  int StopRTPDump();
+
+  // Implements Transport.
+  virtual int SendPacket(int vie_id, const void* data, int len);
+  virtual int SendRTCPPacket(int vie_id, const void* data, int len);
+
+ private:
+  const int32_t channel_id_;
+
+  scoped_ptr<CriticalSectionWrapper> critsect_;
+
+  Encryption* external_encryption_;
+  WebRtc_UWord8* encryption_buffer_;
+  Transport* transport_;
+  RtpDump* rtp_dump_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SENDER_H_
diff --git a/src/video_engine/vie_shared_data.cc b/src/video_engine/vie_shared_data.cc
new file mode 100644
index 0000000..b927144
--- /dev/null
+++ b/src/video_engine/vie_shared_data.cc
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "modules/utility/interface/process_thread.h"
+#include "system_wrappers/interface/cpu_info.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/vie_channel_manager.h"
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_input_manager.h"
+#include "video_engine/vie_render_manager.h"
+#include "video_engine/vie_shared_data.h"
+
+namespace webrtc {
+
+// Active instance counter
+int ViESharedData::instance_counter_ = 0;
+
+ViESharedData::ViESharedData()
+    : instance_id_(++instance_counter_),
+      initialized_(false),
+      number_cores_(CpuInfo::DetectNumberOfCores()),
+      over_use_detector_options_(),
+      vie_performance_monitor_(ViEPerformanceMonitor(instance_id_)),
+      channel_manager_(*new ViEChannelManager(instance_id_, number_cores_,
+                                              &vie_performance_monitor_,
+                                              over_use_detector_options_)),
+      input_manager_(*new ViEInputManager(instance_id_)),
+      render_manager_(*new ViERenderManager(instance_id_)),
+      module_process_thread_(ProcessThread::CreateProcessThread()),
+      last_error_(0) {
+  Trace::CreateTrace();
+  channel_manager_.SetModuleProcessThread(module_process_thread_);
+  input_manager_.SetModuleProcessThread(module_process_thread_);
+  module_process_thread_->Start();
+}
+
+ViESharedData::~ViESharedData() {
+  delete &input_manager_;
+  delete &channel_manager_;
+  delete &render_manager_;
+
+  module_process_thread_->Stop();
+  ProcessThread::DestroyProcessThread(module_process_thread_);
+  Trace::ReturnTrace();
+}
+
+bool ViESharedData::Initialized() const {
+  return initialized_;
+}
+
+int ViESharedData::SetInitialized() {
+  initialized_ = true;
+  return 0;
+}
+
+int ViESharedData::SetUnInitialized() {
+  initialized_ = false;
+  return 0;
+}
+
+void ViESharedData::SetLastError(const int error) const {
+  last_error_ = error;
+}
+
+int ViESharedData::LastErrorInternal() const {
+  int error = last_error_;
+  last_error_ = 0;
+  return error;
+}
+
+void ViESharedData::SetOverUseDetectorOptions(
+    const OverUseDetectorOptions& options) {
+  over_use_detector_options_ = options;
+}
+
+int ViESharedData::NumberOfCores() const {
+  return number_cores_;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_shared_data.h b/src/video_engine/vie_shared_data.h
new file mode 100644
index 0000000..062e5c5
--- /dev/null
+++ b/src/video_engine/vie_shared_data.h
@@ -0,0 +1,64 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESharedData contains data and instances common to all interface
+// implementations.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
+
+#include "video_engine/vie_defines.h"
+#include "video_engine/vie_performance_monitor.h"
+
+namespace webrtc {
+
+class ProcessThread;
+class ViEChannelManager;
+class ViEInputManager;
+class ViERenderManager;
+
+class ViESharedData {
+ public:
+  ViESharedData();
+  ~ViESharedData();
+
+  bool Initialized() const;
+  int SetInitialized();
+  int SetUnInitialized();
+  void SetLastError(const int error) const;
+  int LastErrorInternal() const;
+  void SetOverUseDetectorOptions(const OverUseDetectorOptions& options);
+  int NumberOfCores() const;
+
+  int instance_id() { return instance_id_;}
+  ViEPerformanceMonitor* vie_performance_monitor() {
+    return &vie_performance_monitor_; }
+  ViEChannelManager* channel_manager() { return &channel_manager_; }
+  ViEInputManager* input_manager() { return &input_manager_; }
+  ViERenderManager* render_manager() { return &render_manager_; }
+
+ private:
+  static int instance_counter_;
+  const int instance_id_;
+  bool initialized_;
+  const int number_cores_;
+
+  OverUseDetectorOptions over_use_detector_options_;
+  ViEPerformanceMonitor vie_performance_monitor_;
+  ViEChannelManager& channel_manager_;
+  ViEInputManager& input_manager_;
+  ViERenderManager& render_manager_;
+  ProcessThread* module_process_thread_;
+  mutable int last_error_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SHARED_DATA_H_
diff --git a/src/video_engine/vie_sync_module.cc b/src/video_engine/vie_sync_module.cc
new file mode 100644
index 0000000..325b69b
--- /dev/null
+++ b/src/video_engine/vie_sync_module.cc
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video_engine/vie_sync_module.h"
+
+#include "modules/rtp_rtcp/interface/rtp_rtcp.h"
+#include "modules/video_coding/main/interface/video_coding.h"
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/trace.h"
+#include "video_engine/stream_synchronization.h"
+#include "voice_engine/include/voe_video_sync.h"
+
+namespace webrtc {
+
+enum { kSyncInterval = 1000};
+
+ViESyncModule::ViESyncModule(const int32_t channel_id, VideoCodingModule* vcm)
+    : data_cs_(CriticalSectionWrapper::CreateCriticalSection()),
+      channel_id_(channel_id),
+      vcm_(vcm),
+      video_rtcp_module_(NULL),
+      voe_channel_id_(-1),
+      voe_sync_interface_(NULL),
+      last_sync_time_(TickTime::Now()),
+      sync_() {
+}
+
+ViESyncModule::~ViESyncModule() {
+}
+
+int ViESyncModule::ConfigureSync(int voe_channel_id,
+                                 VoEVideoSync* voe_sync_interface,
+                                 RtpRtcp* video_rtcp_module) {
+  CriticalSectionScoped cs(data_cs_.get());
+  voe_channel_id_ = voe_channel_id;
+  voe_sync_interface_ = voe_sync_interface;
+  video_rtcp_module_ = video_rtcp_module;
+  sync_.reset(new StreamSynchronization(voe_channel_id, channel_id_));
+
+  if (!voe_sync_interface) {
+    voe_channel_id_ = -1;
+    if (voe_channel_id >= 0) {
+      // Trying to set a voice channel but no interface exist.
+      return -1;
+    }
+    return 0;
+  }
+  return 0;
+}
+
+int ViESyncModule::VoiceChannel() {
+  return voe_channel_id_;
+}
+
+WebRtc_Word32 ViESyncModule::TimeUntilNextProcess() {
+  return static_cast<WebRtc_Word32>(kSyncInterval -
+                         (TickTime::Now() - last_sync_time_).Milliseconds());
+}
+
+WebRtc_Word32 ViESyncModule::Process() {
+  CriticalSectionScoped cs(data_cs_.get());
+  last_sync_time_ = TickTime::Now();
+
+  int total_video_delay_target_ms = vcm_->Delay();
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, channel_id_,
+               "Video delay (JB + decoder) is %d ms",
+               total_video_delay_target_ms);
+
+  if (voe_channel_id_ == -1) {
+    return 0;
+  }
+  assert(video_rtcp_module_ && voe_sync_interface_);
+  assert(sync_.get());
+
+  int current_audio_delay_ms = 0;
+  if (voe_sync_interface_->GetDelayEstimate(voe_channel_id_,
+                                            current_audio_delay_ms) != 0) {
+    // Could not get VoE delay value, probably not a valid channel Id.
+    WEBRTC_TRACE(webrtc::kTraceStream, webrtc::kTraceVideo, channel_id_,
+                 "%s: VE_GetDelayEstimate error for voice_channel %d",
+                 __FUNCTION__, total_video_delay_target_ms, voe_channel_id_);
+    return 0;
+  }
+
+  // VoiceEngine report delay estimates even when not started, ignore if the
+  // reported value is lower than 40 ms.
+  if (current_audio_delay_ms < 40) {
+    WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, channel_id_,
+                 "A/V Sync: Audio delay < 40, skipping.");
+    return 0;
+  }
+
+  RtpRtcp* voice_rtcp_module = NULL;
+  if (0 != voe_sync_interface_->GetRtpRtcp(voe_channel_id_,
+                                           voice_rtcp_module)) {
+    return 0;
+  }
+  assert(voice_rtcp_module);
+
+  StreamSynchronization::Measurements video;
+  if (0 != video_rtcp_module_->RemoteNTP(&video.received_ntp_secs,
+                                         &video.received_ntp_frac,
+                                         &video.rtcp_arrivaltime_secs,
+                                         &video.rtcp_arrivaltime_frac)) {
+    // Failed to get video NTP.
+    return 0;
+  }
+
+  StreamSynchronization::Measurements audio;
+  if (0 != voice_rtcp_module->RemoteNTP(&audio.received_ntp_secs,
+                                        &audio.received_ntp_frac,
+                                        &audio.rtcp_arrivaltime_secs,
+                                        &audio.rtcp_arrivaltime_frac)) {
+    // Failed to get audio NTP.
+    return 0;
+  }
+  int extra_audio_delay_ms = 0;
+  if (sync_->ComputeDelays(audio, current_audio_delay_ms, &extra_audio_delay_ms,
+                          video, &total_video_delay_target_ms) != 0) {
+    return 0;
+  }
+  // Set the extra audio delay.synchronization
+  if (voe_sync_interface_->SetMinimumPlayoutDelay(
+      voe_channel_id_, extra_audio_delay_ms) == -1) {
+    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideo, channel_id_,
+                 "Error setting voice delay");
+  }
+  vcm_->SetMinimumPlayoutDelay(total_video_delay_target_ms);
+  WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideo, channel_id_,
+               "New Video delay target is: %d", total_video_delay_target_ms);
+  return 0;
+}
+
+}  // namespace webrtc
diff --git a/src/video_engine/vie_sync_module.h b/src/video_engine/vie_sync_module.h
new file mode 100644
index 0000000..c93d586
--- /dev/null
+++ b/src/video_engine/vie_sync_module.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// ViESyncModule is responsible for synchronization audio and video for a given
+// VoE and ViE channel couple.
+
+#ifndef WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+#define WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
+
+#include "modules/interface/module.h"
+#include "system_wrappers/interface/scoped_ptr.h"
+#include "system_wrappers/interface/tick_util.h"
+
+namespace webrtc {
+
+class CriticalSectionWrapper;
+class RtpRtcp;
+class StreamSynchronization;
+class VideoCodingModule;
+class VoEVideoSync;
+
+class ViESyncModule : public Module {
+ public:
+  ViESyncModule(const int32_t channel_id, VideoCodingModule* vcm);
+  ~ViESyncModule();
+
+  int ConfigureSync(int voe_channel_id,
+                    VoEVideoSync* voe_sync_interface,
+                    RtpRtcp* video_rtcp_module);
+
+  int VoiceChannel();
+
+  // Implements Module.
+  virtual WebRtc_Word32 TimeUntilNextProcess();
+  virtual WebRtc_Word32 Process();
+
+ private:
+  scoped_ptr<CriticalSectionWrapper> data_cs_;
+  const int32_t channel_id_;
+  VideoCodingModule* vcm_;
+  RtpRtcp* video_rtcp_module_;
+  int voe_channel_id_;
+  VoEVideoSync* voe_sync_interface_;
+  TickTime last_sync_time_;
+  scoped_ptr<StreamSynchronization> sync_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VIDEO_ENGINE_VIE_SYNC_MODULE_H_
diff --git a/src/voice_engine/OWNERS b/src/voice_engine/OWNERS
new file mode 100644
index 0000000..a07ced3
--- /dev/null
+++ b/src/voice_engine/OWNERS
@@ -0,0 +1,4 @@
+henrikg@webrtc.org
+henrika@webrtc.org
+niklas.enbom@webrtc.org
+xians@webrtc.org
diff --git a/src/voice_engine/channel.cc b/src/voice_engine/channel.cc
new file mode 100644
index 0000000..b4889e2
--- /dev/null
+++ b/src/voice_engine/channel.cc
@@ -0,0 +1,6640 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+
+#include "audio_device.h"
+#include "audio_frame_operations.h"
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "process_thread.h"
+#include "rtp_dump.h"
+#include "statistics.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "utility.h"
+#include "voe_base.h"
+#include "voe_external_media.h"
+#include "voe_rtp_rtcp.h"
+
+#if defined(_WIN32)
+#include <Qos.h>
+#endif
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+WebRtc_Word32
+Channel::SendData(FrameType frameType,
+                  WebRtc_UWord8   payloadType,
+                  WebRtc_UWord32  timeStamp,
+                  const WebRtc_UWord8*  payloadData,
+                  WebRtc_UWord16  payloadSize,
+                  const RTPFragmentationHeader* fragmentation)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendData(frameType=%u, payloadType=%u, timeStamp=%u,"
+                 " payloadSize=%u, fragmentation=0x%x)",
+                 frameType, payloadType, timeStamp, payloadSize, fragmentation);
+
+    if (_includeAudioLevelIndication)
+    {
+        assert(_rtpAudioProc.get() != NULL);
+        // Store current audio level in the RTP/RTCP module.
+        // The level will be used in combination with voice-activity state
+        // (frameType) to add an RTP header extension
+        _rtpRtcpModule->SetAudioLevel(_rtpAudioProc->level_estimator()->RMS());
+    }
+
+    // Push data from ACM to RTP/RTCP-module to deliver audio frame for
+    // packetization.
+    // This call will trigger Transport::SendPacket() from the RTP/RTCP module.
+    if (_rtpRtcpModule->SendOutgoingData((FrameType&)frameType,
+                                        payloadType,
+                                        timeStamp,
+                                        // Leaving the time when this frame was
+                                        // received from the capture device as
+                                        // undefined for voice for now.
+                                        -1,
+                                        payloadData,
+                                        payloadSize,
+                                        fragmentation) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
+            "Channel::SendData() failed to send data to RTP/RTCP module");
+        return -1;
+    }
+
+    _lastLocalTimeStamp = timeStamp;
+    _lastPayloadType = payloadType;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::InFrameType(WebRtc_Word16 frameType)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::InFrameType(frameType=%d)", frameType);
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    // 1 indicates speech
+    _sendFrameType = (frameType == 1) ? 1 : 0;
+    return 0;
+}
+
+#ifdef WEBRTC_DTMF_DETECTION
+int
+Channel::IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool end)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::IncomingDtmf(digitDtmf=%u, end=%d)",
+               digitDtmf, end);
+
+    if (digitDtmf != 999)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_telephoneEventDetectionPtr)
+        {
+            _telephoneEventDetectionPtr->OnReceivedTelephoneEventInband(
+                _channelId, digitDtmf, end);
+        }
+    }
+
+    return 0;
+}
+#endif
+
+WebRtc_Word32
+Channel::OnRxVadDetected(const int vadDecision)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::OnRxVadDetected(vadDecision=%d)", vadDecision);
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    if (_rxVadObserverPtr)
+    {
+        _rxVadObserverPtr->OnRxVad(_channelId, vadDecision);
+    }
+
+    return 0;
+}
+
+int
+Channel::SendPacket(int channel, const void *data, int len)
+{
+    channel = VoEChannelId(channel);
+    assert(channel == _channelId);
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendPacket(channel=%d, len=%d)", channel, len);
+
+    if (_transportPtr == NULL)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() failed to send RTP packet due to"
+                     " invalid transport object");
+        return -1;
+    }
+
+    // Insert extra RTP packet using if user has called the InsertExtraRTPPacket
+    // API
+    if (_insertExtraRTPPacket)
+    {
+        WebRtc_UWord8* rtpHdr = (WebRtc_UWord8*)data;
+        WebRtc_UWord8 M_PT(0);
+        if (_extraMarkerBit)
+        {
+            M_PT = 0x80;            // set the M-bit
+        }
+        M_PT += _extraPayloadType;  // set the payload type
+        *(++rtpHdr) = M_PT;     // modify the M|PT-byte within the RTP header
+        _insertExtraRTPPacket = false;  // insert one packet only
+    }
+
+    WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
+    WebRtc_Word32 bufferLength = len;
+
+    // Dump the RTP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTP dump to output file failed");
+    }
+
+    // SRTP or External encryption
+    if (_encrypting)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_encryptionRTPBufferPtr)
+            {
+                // Allocate memory for encryption buffer one time only
+                _encryptionRTPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform encryption (SRTP or external)
+            WebRtc_Word32 encryptedBufferLength = 0;
+            _encryptionPtr->encrypt(_channelId,
+                                    bufferToSendPtr,
+                                    _encryptionRTPBufferPtr,
+                                    bufferLength,
+                                    (int*)&encryptedBufferLength);
+            if (encryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_ENCRYPTION_FAILED,
+                    kTraceError, "Channel::SendPacket() encryption failed");
+                return -1;
+            }
+
+            // Replace default data buffer with encrypted buffer
+            bufferToSendPtr = _encryptionRTPBufferPtr;
+            bufferLength = encryptedBufferLength;
+        }
+    }
+
+    // Packet transmission using WebRtc socket transport
+    if (!_externalTransport)
+    {
+        int n = _transportPtr->SendPacket(channel, bufferToSendPtr,
+                                          bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendPacket() RTP transmission using WebRtc"
+                         " sockets failed");
+            return -1;
+        }
+        return n;
+    }
+
+    // Packet transmission using external transport transport
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        int n = _transportPtr->SendPacket(channel,
+                                          bufferToSendPtr,
+                                          bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendPacket() RTP transmission using external"
+                         " transport failed");
+            return -1;
+        }
+        return n;
+    }
+}
+
+int
+Channel::SendRTCPPacket(int channel, const void *data, int len)
+{
+    channel = VoEChannelId(channel);
+    assert(channel == _channelId);
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendRTCPPacket(channel=%d, len=%d)", channel, len);
+
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_transportPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() failed to send RTCP packet"
+                         " due to invalid transport object");
+            return -1;
+        }
+    }
+
+    WebRtc_UWord8* bufferToSendPtr = (WebRtc_UWord8*)data;
+    WebRtc_Word32 bufferLength = len;
+
+    // Dump the RTCP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpOut.DumpPacket((const WebRtc_UWord8*)data, len) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTCP dump to output file failed");
+    }
+
+    // SRTP or External encryption
+    if (_encrypting)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_encryptionRTCPBufferPtr)
+            {
+                // Allocate memory for encryption buffer one time only
+                _encryptionRTCPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform encryption (SRTP or external).
+            WebRtc_Word32 encryptedBufferLength = 0;
+            _encryptionPtr->encrypt_rtcp(_channelId,
+                                         bufferToSendPtr,
+                                         _encryptionRTCPBufferPtr,
+                                         bufferLength,
+                                         (int*)&encryptedBufferLength);
+            if (encryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_ENCRYPTION_FAILED, kTraceError,
+                    "Channel::SendRTCPPacket() encryption failed");
+                return -1;
+            }
+
+            // Replace default data buffer with encrypted buffer
+            bufferToSendPtr = _encryptionRTCPBufferPtr;
+            bufferLength = encryptedBufferLength;
+        }
+    }
+
+    // Packet transmission using WebRtc socket transport
+    if (!_externalTransport)
+    {
+        int n = _transportPtr->SendRTCPPacket(channel,
+                                              bufferToSendPtr,
+                                              bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() transmission using WebRtc"
+                         " sockets failed");
+            return -1;
+        }
+        return n;
+    }
+
+    // Packet transmission using external transport transport
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        int n = _transportPtr->SendRTCPPacket(channel,
+                                              bufferToSendPtr,
+                                              bufferLength);
+        if (n < 0)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::SendRTCPPacket() transmission using external"
+                         " transport failed");
+            return -1;
+        }
+        return n;
+    }
+
+    return len;
+}
+
+void
+Channel::IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                           const WebRtc_Word32 rtpPacketLength,
+                           const char* fromIP,
+                           const WebRtc_UWord16 fromPort)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IncomingRTPPacket(rtpPacketLength=%d,"
+                 " fromIP=%s, fromPort=%u)",
+                 rtpPacketLength, fromIP, fromPort);
+
+    // Store playout timestamp for the received RTP packet
+    // to be used for upcoming delay estimations
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
+    {
+        _playoutTimeStampRTP = playoutTimestamp;
+    }
+
+    WebRtc_UWord8* rtpBufferPtr = (WebRtc_UWord8*)incomingRtpPacket;
+    WebRtc_Word32 rtpBufferLength = rtpPacketLength;
+
+    // SRTP or External decryption
+    if (_decrypting)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_decryptionRTPBufferPtr)
+            {
+                // Allocate memory for decryption buffer one time only
+                _decryptionRTPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform decryption (SRTP or external)
+            WebRtc_Word32 decryptedBufferLength = 0;
+            _encryptionPtr->decrypt(_channelId,
+                                    rtpBufferPtr,
+                                    _decryptionRTPBufferPtr,
+                                    rtpBufferLength,
+                                    (int*)&decryptedBufferLength);
+            if (decryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_DECRYPTION_FAILED, kTraceError,
+                    "Channel::IncomingRTPPacket() decryption failed");
+                return;
+            }
+
+            // Replace default data buffer with decrypted buffer
+            rtpBufferPtr = _decryptionRTPBufferPtr;
+            rtpBufferLength = decryptedBufferLength;
+        }
+    }
+
+    // Dump the RTP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpIn.DumpPacket(rtpBufferPtr,
+                              (WebRtc_UWord16)rtpBufferLength) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTP dump to input file failed");
+    }
+
+    // Deliver RTP packet to RTP/RTCP module for parsing
+    // The packet will be pushed back to the channel thru the
+    // OnReceivedPayloadData callback so we don't push it to the ACM here
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)rtpBufferPtr,
+                                      (WebRtc_UWord16)rtpBufferLength) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "Channel::IncomingRTPPacket() RTP packet is invalid");
+        return;
+    }
+}
+
+void
+Channel::IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                            const WebRtc_Word32 rtcpPacketLength,
+                            const char* fromIP,
+                            const WebRtc_UWord16 fromPort)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IncomingRTCPPacket(rtcpPacketLength=%d, fromIP=%s,"
+                 " fromPort=%u)",
+                 rtcpPacketLength, fromIP, fromPort);
+
+    // Temporary buffer pointer and size for decryption
+    WebRtc_UWord8* rtcpBufferPtr = (WebRtc_UWord8*)incomingRtcpPacket;
+    WebRtc_Word32 rtcpBufferLength = rtcpPacketLength;
+
+    // Store playout timestamp for the received RTCP packet
+    // which will be read by the GetRemoteRTCPData API
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) == 0)
+    {
+        _playoutTimeStampRTCP = playoutTimestamp;
+    }
+
+    // SRTP or External decryption
+    if (_decrypting)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_encryptionPtr)
+        {
+            if (!_decryptionRTCPBufferPtr)
+            {
+                // Allocate memory for decryption buffer one time only
+                _decryptionRTCPBufferPtr =
+                    new WebRtc_UWord8[kVoiceEngineMaxIpPacketSizeBytes];
+            }
+
+            // Perform decryption (SRTP or external).
+            WebRtc_Word32 decryptedBufferLength = 0;
+            _encryptionPtr->decrypt_rtcp(_channelId,
+                                         rtcpBufferPtr,
+                                         _decryptionRTCPBufferPtr,
+                                         rtcpBufferLength,
+                                         (int*)&decryptedBufferLength);
+            if (decryptedBufferLength <= 0)
+            {
+                _engineStatisticsPtr->SetLastError(
+                    VE_DECRYPTION_FAILED, kTraceError,
+                    "Channel::IncomingRTCPPacket() decryption failed");
+                return;
+            }
+
+            // Replace default data buffer with decrypted buffer
+            rtcpBufferPtr = _decryptionRTCPBufferPtr;
+            rtcpBufferLength = decryptedBufferLength;
+        }
+    }
+
+    // Dump the RTCP packet to a file (if RTP dump is enabled).
+    if (_rtpDumpIn.DumpPacket(rtcpBufferPtr,
+                              (WebRtc_UWord16)rtcpBufferLength) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::SendPacket() RTCP dump to input file failed");
+    }
+
+    // Deliver RTCP packet to RTP/RTCP module for parsing
+    if (_rtpRtcpModule->IncomingPacket((const WebRtc_UWord8*)rtcpBufferPtr,
+                                      (WebRtc_UWord16)rtcpBufferLength) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "Channel::IncomingRTPPacket() RTCP packet is invalid");
+        return;
+    }
+}
+
+void
+Channel::OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                  const WebRtc_UWord8 event,
+                                  const bool endOfEvent)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedTelephoneEvent(id=%d, event=%u,"
+                 " endOfEvent=%d)", id, event, endOfEvent);
+
+#ifdef WEBRTC_DTMF_DETECTION
+    if (_outOfBandTelephoneEventDetecion)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_telephoneEventDetectionPtr)
+        {
+            _telephoneEventDetectionPtr->OnReceivedTelephoneEventOutOfBand(
+                _channelId, event, endOfEvent);
+        }
+    }
+#endif
+}
+
+void
+Channel::OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                              const WebRtc_UWord8 event,
+                              const WebRtc_UWord16 lengthMs,
+                              const WebRtc_UWord8 volume)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPlayTelephoneEvent(id=%d, event=%u, lengthMs=%u,"
+                 " volume=%u)", id, event, lengthMs, volume);
+
+    if (!_playOutbandDtmfEvent || (event > 15))
+    {
+        // Ignore callback since feedback is disabled or event is not a
+        // Dtmf tone event.
+        return;
+    }
+
+    assert(_outputMixerPtr != NULL);
+
+    // Start playing out the Dtmf tone (if playout is enabled).
+    // Reduce length of tone with 80ms to the reduce risk of echo.
+    _outputMixerPtr->PlayDtmfTone(event, lengthMs - 80, volume);
+}
+
+void
+Channel::OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 SSRC)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnIncomingSSRCChanged(id=%d, SSRC=%d)",
+                 id, SSRC);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    // Reset RTP-module counters since a new incoming RTP stream is detected
+    _rtpRtcpModule->ResetReceiveDataCountersRTP();
+    _rtpRtcpModule->ResetStatisticsRTP();
+
+    if (_rtpObserver)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_rtpObserverPtr)
+        {
+            // Send new SSRC to registered observer using callback
+            _rtpObserverPtr->OnIncomingSSRCChanged(channel, SSRC);
+        }
+    }
+}
+
+void Channel::OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                                    const WebRtc_UWord32 CSRC,
+                                    const bool added)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnIncomingCSRCChanged(id=%d, CSRC=%d, added=%d)",
+                 id, CSRC, added);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    if (_rtpObserver)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_rtpObserverPtr)
+        {
+            _rtpObserverPtr->OnIncomingCSRCChanged(channel, CSRC, added);
+        }
+    }
+}
+
+void
+Channel::OnApplicationDataReceived(const WebRtc_Word32 id,
+                                   const WebRtc_UWord8 subType,
+                                   const WebRtc_UWord32 name,
+                                   const WebRtc_UWord16 length,
+                                   const WebRtc_UWord8* data)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnApplicationDataReceived(id=%d, subType=%u,"
+                 " name=%u, length=%u)",
+                 id, subType, name, length);
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    if (_rtcpObserver)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_rtcpObserverPtr)
+        {
+            _rtcpObserverPtr->OnApplicationDataReceived(channel,
+                                                        subType,
+                                                        name,
+                                                        data,
+                                                        length);
+        }
+    }
+}
+
+WebRtc_Word32
+Channel::OnInitializeDecoder(
+    const WebRtc_Word32 id,
+    const WebRtc_Word8 payloadType,
+    const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+    const int frequency,
+    const WebRtc_UWord8 channels,
+    const WebRtc_UWord32 rate)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnInitializeDecoder(id=%d, payloadType=%d, "
+                 "payloadName=%s, frequency=%u, channels=%u, rate=%u)",
+                 id, payloadType, payloadName, frequency, channels, rate);
+
+    assert(VoEChannelId(id) == _channelId);
+
+    CodecInst receiveCodec = {0};
+    CodecInst dummyCodec = {0};
+
+    receiveCodec.pltype = payloadType;
+    receiveCodec.plfreq = frequency;
+    receiveCodec.channels = channels;
+    receiveCodec.rate = rate;
+    strncpy(receiveCodec.plname, payloadName, RTP_PAYLOAD_NAME_SIZE - 1);
+    
+    _audioCodingModule.Codec(payloadName, dummyCodec, frequency, channels);
+    receiveCodec.pacsize = dummyCodec.pacsize;
+
+    // Register the new codec to the ACM
+    if (_audioCodingModule.RegisterReceiveCodec(receiveCodec) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "Channel::OnInitializeDecoder() invalid codec ("
+                     "pt=%d, name=%s) received - 1", payloadType, payloadName);
+        _engineStatisticsPtr->SetLastError(VE_AUDIO_CODING_MODULE_ERROR);
+        return -1;
+    }
+
+    return 0;
+}
+
+void
+Channel::OnPacketTimeout(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPacketTimeout(id=%d)", id);
+
+    CriticalSectionScoped cs(_callbackCritSectPtr);
+    if (_voiceEngineObserverPtr)
+    {
+        if (_receiving || _externalTransport)
+        {
+            WebRtc_Word32 channel = VoEChannelId(id);
+            assert(channel == _channelId);
+            // Ensure that next OnReceivedPacket() callback will trigger
+            // a VE_PACKET_RECEIPT_RESTARTED callback.
+            _rtpPacketTimedOut = true;
+            // Deliver callback to the observer
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::OnPacketTimeout() => "
+                         "CallbackOnError(VE_RECEIVE_PACKET_TIMEOUT)");
+            _voiceEngineObserverPtr->CallbackOnError(channel,
+                                                     VE_RECEIVE_PACKET_TIMEOUT);
+        }
+    }
+}
+
+void
+Channel::OnReceivedPacket(const WebRtc_Word32 id,
+                          const RtpRtcpPacketType packetType)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedPacket(id=%d, packetType=%d)",
+                 id, packetType);
+
+    assert(VoEChannelId(id) == _channelId);
+
+    // Notify only for the case when we have restarted an RTP session.
+    if (_rtpPacketTimedOut && (kPacketRtp == packetType))
+    {
+        CriticalSectionScoped cs(_callbackCritSectPtr);
+        if (_voiceEngineObserverPtr)
+        {
+            WebRtc_Word32 channel = VoEChannelId(id);
+            assert(channel == _channelId);
+            // Reset timeout mechanism
+            _rtpPacketTimedOut = false;
+            // Deliver callback to the observer
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::OnPacketTimeout() =>"
+                         " CallbackOnError(VE_PACKET_RECEIPT_RESTARTED)");
+            _voiceEngineObserverPtr->CallbackOnError(
+                channel,
+                VE_PACKET_RECEIPT_RESTARTED);
+        }
+    }
+}
+
+void
+Channel::OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                               const RTPAliveType alive)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnPeriodicDeadOrAlive(id=%d, alive=%d)", id, alive);
+
+    if (!_connectionObserver)
+        return;
+
+    WebRtc_Word32 channel = VoEChannelId(id);
+    assert(channel == _channelId);
+
+    // Use Alive as default to limit risk of false Dead detections
+    bool isAlive(true);
+
+    // Always mark the connection as Dead when the module reports kRtpDead
+    if (kRtpDead == alive)
+    {
+        isAlive = false;
+    }
+
+    // It is possible that the connection is alive even if no RTP packet has
+    // been received for a long time since the other side might use VAD/DTX
+    // and a low SID-packet update rate.
+    if ((kRtpNoRtp == alive) && _playing)
+    {
+        // Detect Alive for all NetEQ states except for the case when we are
+        // in PLC_CNG state.
+        // PLC_CNG <=> background noise only due to long expand or error.
+        // Note that, the case where the other side stops sending during CNG
+        // state will be detected as Alive. Dead is is not set until after
+        // missing RTCP packets for at least twelve seconds (handled
+        // internally by the RTP/RTCP module).
+        isAlive = (_outputSpeechType != AudioFrame::kPLCCNG);
+    }
+
+    UpdateDeadOrAliveCounters(isAlive);
+
+    // Send callback to the registered observer
+    if (_connectionObserver)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_connectionObserverPtr)
+        {
+            _connectionObserverPtr->OnPeriodicDeadOrAlive(channel, isAlive);
+        }
+    }
+}
+
+WebRtc_Word32
+Channel::OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                               const WebRtc_UWord16 payloadSize,
+                               const WebRtcRTPHeader* rtpHeader)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::OnReceivedPayloadData(payloadSize=%d,"
+                 " payloadType=%u, audioChannel=%u)",
+                 payloadSize,
+                 rtpHeader->header.payloadType,
+                 rtpHeader->type.Audio.channel);
+
+    if (!_playing)
+    {
+        // Avoid inserting into NetEQ when we are not playing. Count the
+        // packet as discarded.
+        WEBRTC_TRACE(kTraceStream, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "received packet is discarded since playing is not"
+                     " activated");
+        _numberOfDiscardedPackets++;
+        return 0;
+    }
+
+    // Push the incoming payload (parsed and ready for decoding) into the ACM
+    if (_audioCodingModule.IncomingPacket(payloadData,
+                                          payloadSize,
+                                          *rtpHeader) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
+            "Channel::OnReceivedPayloadData() unable to push data to the ACM");
+        return -1;
+    }
+
+    // Update the packet delay
+    UpdatePacketDelay(rtpHeader->header.timestamp,
+                      rtpHeader->header.sequenceNumber);
+
+    return 0;
+}
+
+WebRtc_Word32 Channel::GetAudioFrame(const WebRtc_Word32 id,
+                                     AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetAudioFrame(id=%d)", id);
+
+    // Get 10ms raw PCM data from the ACM (mixer limits output frequency)
+    if (_audioCodingModule.PlayoutData10Ms(audioFrame.sample_rate_hz_,
+                                           audioFrame) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::GetAudioFrame() PlayoutData10Ms() failed!");
+        // In all likelihood, the audio in this frame is garbage. We return an
+        // error so that the audio mixer module doesn't add it to the mix. As
+        // a result, it won't be played out and the actions skipped here are
+        // irrelevant.
+        return -1;
+    }
+
+    if (_RxVadDetection)
+    {
+        UpdateRxVadDetection(audioFrame);
+    }
+
+    // Convert module ID to internal VoE channel ID
+    audioFrame.id_ = VoEChannelId(audioFrame.id_);
+    // Store speech type for dead-or-alive detection
+    _outputSpeechType = audioFrame.speech_type_;
+
+    // Perform far-end AudioProcessing module processing on the received signal
+    if (_rxApmIsEnabled)
+    {
+        ApmProcessRx(audioFrame);
+    }
+
+    // Output volume scaling
+    if (_outputGain < 0.99f || _outputGain > 1.01f)
+    {
+        AudioFrameOperations::ScaleWithSat(_outputGain, audioFrame);
+    }
+
+    // Scale left and/or right channel(s) if stereo and master balance is
+    // active
+
+    if (_panLeft != 1.0f || _panRight != 1.0f)
+    {
+        if (audioFrame.num_channels_ == 1)
+        {
+            // Emulate stereo mode since panning is active.
+            // The mono signal is copied to both left and right channels here.
+            AudioFrameOperations::MonoToStereo(&audioFrame);
+        }
+        // For true stereo mode (when we are receiving a stereo signal), no
+        // action is needed.
+
+        // Do the panning operation (the audio frame contains stereo at this
+        // stage)
+        AudioFrameOperations::Scale(_panLeft, _panRight, audioFrame);
+    }
+
+    // Mix decoded PCM output with file if file mixing is enabled
+    if (_outputFilePlaying)
+    {
+        MixAudioWithFile(audioFrame, audioFrame.sample_rate_hz_);
+    }
+
+    // Place channel in on-hold state (~muted) if on-hold is activated
+    if (_outputIsOnHold)
+    {
+        AudioFrameOperations::Mute(audioFrame);
+    }
+
+    // External media
+    if (_outputExternalMedia)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        const bool isStereo = (audioFrame.num_channels_ == 2);
+        if (_outputExternalMediaCallbackPtr)
+        {
+            _outputExternalMediaCallbackPtr->Process(
+                _channelId,
+                kPlaybackPerChannel,
+                (WebRtc_Word16*)audioFrame.data_,
+                audioFrame.samples_per_channel_,
+                audioFrame.sample_rate_hz_,
+                isStereo);
+        }
+    }
+
+    // Record playout if enabled
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        if (_outputFileRecording && _outputFileRecorderPtr)
+        {
+            _outputFileRecorderPtr->RecordAudioToFile(audioFrame);
+        }
+    }
+
+    // Measure audio level (0-9)
+    _outputAudioLevel.ComputeLevel(audioFrame);
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::NeededFrequency(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::NeededFrequency(id=%d)", id);
+
+    int highestNeeded = 0;
+
+    // Determine highest needed receive frequency
+    WebRtc_Word32 receiveFrequency = _audioCodingModule.ReceiveFrequency();
+
+    // Return the bigger of playout and receive frequency in the ACM.
+    if (_audioCodingModule.PlayoutFrequency() > receiveFrequency)
+    {
+        highestNeeded = _audioCodingModule.PlayoutFrequency();
+    }
+    else
+    {
+        highestNeeded = receiveFrequency;
+    }
+
+    // Special case, if we're playing a file on the playout side
+    // we take that frequency into consideration as well
+    // This is not needed on sending side, since the codec will
+    // limit the spectrum anyway.
+    if (_outputFilePlaying)
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+        if (_outputFilePlayerPtr && _outputFilePlaying)
+        {
+            if(_outputFilePlayerPtr->Frequency()>highestNeeded)
+            {
+                highestNeeded=_outputFilePlayerPtr->Frequency();
+            }
+        }
+    }
+
+    return(highestNeeded);
+}
+
+WebRtc_Word32
+Channel::CreateChannel(Channel*& channel,
+                       const WebRtc_Word32 channelId,
+                       const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId,channelId),
+                 "Channel::CreateChannel(channelId=%d, instanceId=%d)",
+        channelId, instanceId);
+
+    channel = new Channel(channelId, instanceId);
+    if (channel == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice,
+                     VoEId(instanceId,channelId),
+                     "Channel::CreateChannel() unable to allocate memory for"
+                     " channel");
+        return -1;
+    }
+    return 0;
+}
+
+void
+Channel::PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void
+Channel::RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void
+Channel::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PlayFileEnded(id=%d)", id);
+
+    if (id == _inputFilePlayerId)
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        _inputFilePlaying = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::PlayFileEnded() => input file player module is"
+                     " shutdown");
+    }
+    else if (id == _outputFilePlayerId)
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        _outputFilePlaying = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::PlayFileEnded() => output file player module is"
+                     " shutdown");
+    }
+}
+
+void
+Channel::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RecordFileEnded(id=%d)", id);
+
+    assert(id == _outputFileRecorderId);
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    _outputFileRecording = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "Channel::RecordFileEnded() => output file recorder module is"
+                 " shutdown");
+}
+
+Channel::Channel(const WebRtc_Word32 channelId,
+                 const WebRtc_UWord32 instanceId) :
+    _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _instanceId(instanceId),
+    _channelId(channelId),
+    _audioCodingModule(*AudioCodingModule::Create(
+        VoEModuleId(instanceId, channelId))),
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+     _numSocketThreads(KNumSocketThreads),
+    _socketTransportModule(*UdpTransport::Create(
+        VoEModuleId(instanceId, channelId), _numSocketThreads)),
+#endif
+#ifdef WEBRTC_SRTP
+    _srtpModule(*SrtpModule::CreateSrtpModule(VoEModuleId(instanceId,
+                                                          channelId))),
+#endif
+    _rtpDumpIn(*RtpDump::CreateRtpDump()),
+    _rtpDumpOut(*RtpDump::CreateRtpDump()),
+    _outputAudioLevel(),
+    _externalTransport(false),
+    _inputFilePlayerPtr(NULL),
+    _outputFilePlayerPtr(NULL),
+    _outputFileRecorderPtr(NULL),
+    // Avoid conflict with other channels by adding 1024 - 1026,
+    // won't use as much as 1024 channels.
+    _inputFilePlayerId(VoEModuleId(instanceId, channelId) + 1024),
+    _outputFilePlayerId(VoEModuleId(instanceId, channelId) + 1025),
+    _outputFileRecorderId(VoEModuleId(instanceId, channelId) + 1026),
+    _inputFilePlaying(false),
+    _outputFilePlaying(false),
+    _outputFileRecording(false),
+    _inbandDtmfQueue(VoEModuleId(instanceId, channelId)),
+    _inbandDtmfGenerator(VoEModuleId(instanceId, channelId)),
+    _inputExternalMedia(false),
+    _outputExternalMedia(false),
+    _inputExternalMediaCallbackPtr(NULL),
+    _outputExternalMediaCallbackPtr(NULL),
+    _encryptionRTPBufferPtr(NULL),
+    _decryptionRTPBufferPtr(NULL),
+    _encryptionRTCPBufferPtr(NULL),
+    _decryptionRTCPBufferPtr(NULL),
+    _timeStamp(0), // This is just an offset, RTP module will add it's own random offset
+    _sendTelephoneEventPayloadType(106),
+    _playoutTimeStampRTP(0),
+    _playoutTimeStampRTCP(0),
+    _numberOfDiscardedPackets(0),
+    _engineStatisticsPtr(NULL),
+    _outputMixerPtr(NULL),
+    _transmitMixerPtr(NULL),
+    _moduleProcessThreadPtr(NULL),
+    _audioDeviceModulePtr(NULL),
+    _voiceEngineObserverPtr(NULL),
+    _callbackCritSectPtr(NULL),
+    _transportPtr(NULL),
+    _encryptionPtr(NULL),
+    _rtpAudioProc(NULL),
+    _rxAudioProcessingModulePtr(NULL),
+#ifdef WEBRTC_DTMF_DETECTION
+    _telephoneEventDetectionPtr(NULL),
+#endif
+    _rxVadObserverPtr(NULL),
+    _oldVadDecision(-1),
+    _sendFrameType(0),
+    _rtpObserverPtr(NULL),
+    _rtcpObserverPtr(NULL),
+    _outputIsOnHold(false),
+    _externalPlayout(false),
+    _inputIsOnHold(false),
+    _playing(false),
+    _sending(false),
+    _receiving(false),
+    _mixFileWithMicrophone(false),
+    _rtpObserver(false),
+    _rtcpObserver(false),
+    _mute(false),
+    _panLeft(1.0f),
+    _panRight(1.0f),
+    _outputGain(1.0f),
+    _encrypting(false),
+    _decrypting(false),
+    _playOutbandDtmfEvent(false),
+    _playInbandDtmfEvent(false),
+    _inbandTelephoneEventDetection(false),
+    _outOfBandTelephoneEventDetecion(false),
+    _extraPayloadType(0),
+    _insertExtraRTPPacket(false),
+    _extraMarkerBit(false),
+    _lastLocalTimeStamp(0),
+    _lastPayloadType(0),
+    _includeAudioLevelIndication(false),
+    _rtpPacketTimedOut(false),
+    _rtpPacketTimeOutIsEnabled(false),
+    _rtpTimeOutSeconds(0),
+    _connectionObserver(false),
+    _connectionObserverPtr(NULL),
+    _countAliveDetections(0),
+    _countDeadDetections(0),
+    _outputSpeechType(AudioFrame::kNormalSpeech),
+    _averageDelayMs(0),
+    _previousSequenceNumber(0),
+    _previousTimestamp(0),
+    _recPacketDelayMs(20),
+    _RxVadDetection(false),
+    _rxApmIsEnabled(false),
+    _rxAgcIsEnabled(false),
+    _rxNsIsEnabled(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Channel() - ctor");
+    _inbandDtmfQueue.ResetDtmf();
+    _inbandDtmfGenerator.Init();
+    _outputAudioLevel.Clear();
+
+    RtpRtcp::Configuration configuration;
+    configuration.id = VoEModuleId(instanceId, channelId);
+    configuration.audio = true;
+    configuration.incoming_data = this;
+    configuration.incoming_messages = this;
+    configuration.outgoing_transport = this;
+    configuration.rtcp_feedback = this;
+    configuration.audio_messages = this;
+
+    _rtpRtcpModule.reset(RtpRtcp::CreateRtpRtcp(configuration));
+
+    // Create far end AudioProcessing Module
+    _rxAudioProcessingModulePtr = AudioProcessing::Create(
+        VoEModuleId(instanceId, channelId));
+}
+
+Channel::~Channel()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::~Channel() - dtor");
+
+    if (_outputExternalMedia)
+    {
+        DeRegisterExternalMediaProcessing(kPlaybackPerChannel);
+    }
+    if (_inputExternalMedia)
+    {
+        DeRegisterExternalMediaProcessing(kRecordingPerChannel);
+    }
+    StopSend();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    StopReceiving();
+    // De-register packet callback to ensure we're not in a callback when
+    // deleting channel state, avoids race condition and deadlock.
+    if (_socketTransportModule.InitializeReceiveSockets(NULL, 0, NULL, NULL, 0)
+            != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "~Channel() failed to de-register receive callback");
+    }
+#endif
+    StopPlayout();
+
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+        if (_inputFilePlayerPtr)
+        {
+            _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            _inputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+            _inputFilePlayerPtr = NULL;
+        }
+        if (_outputFilePlayerPtr)
+        {
+            _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            _outputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+        }
+        if (_outputFileRecorderPtr)
+        {
+            _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _outputFileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+            _outputFileRecorderPtr = NULL;
+        }
+    }
+
+    // The order to safely shutdown modules in a channel is:
+    // 1. De-register callbacks in modules
+    // 2. De-register modules in process thread
+    // 3. Destroy modules
+    if (_audioCodingModule.RegisterTransportCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register transport callback"
+                     " (Audio coding module)");
+    }
+    if (_audioCodingModule.RegisterVADCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register VAD callback"
+                     " (Audio coding module)");
+    }
+#ifdef WEBRTC_DTMF_DETECTION
+    if (_audioCodingModule.RegisterIncomingMessagesCallback(NULL) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to de-register incoming messages "
+                     "callback (Audio coding module)");
+    }
+#endif
+    // De-register modules in process thread
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (_moduleProcessThreadPtr->DeRegisterModule(&_socketTransportModule)
+            == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to deregister socket module");
+    }
+#endif
+    if (_moduleProcessThreadPtr->DeRegisterModule(_rtpRtcpModule.get()) == -1)
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "~Channel() failed to deregister RTP/RTCP module");
+    }
+
+    // Destroy modules
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    UdpTransport::Destroy(
+        &_socketTransportModule);
+#endif
+    AudioCodingModule::Destroy(&_audioCodingModule);
+#ifdef WEBRTC_SRTP
+    SrtpModule::DestroySrtpModule(&_srtpModule);
+#endif
+    if (_rxAudioProcessingModulePtr != NULL)
+    {
+        AudioProcessing::Destroy(_rxAudioProcessingModulePtr); // far end APM
+        _rxAudioProcessingModulePtr = NULL;
+    }
+
+    // End of modules shutdown
+
+    // Delete other objects
+    RtpDump::DestroyRtpDump(&_rtpDumpIn);
+    RtpDump::DestroyRtpDump(&_rtpDumpOut);
+    delete [] _encryptionRTPBufferPtr;
+    delete [] _decryptionRTPBufferPtr;
+    delete [] _encryptionRTCPBufferPtr;
+    delete [] _decryptionRTCPBufferPtr;
+    delete &_callbackCritSect;
+    delete &_fileCritSect;
+}
+
+WebRtc_Word32
+Channel::Init()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Init()");
+
+    // --- Initial sanity
+
+    if ((_engineStatisticsPtr == NULL) ||
+        (_moduleProcessThreadPtr == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+                     VoEId(_instanceId,_channelId),
+                     "Channel::Init() must call SetEngineInformation() first");
+        return -1;
+    }
+
+    // --- Add modules to process thread (for periodic schedulation)
+
+    const bool processThreadFail =
+        ((_moduleProcessThreadPtr->RegisterModule(_rtpRtcpModule.get()) != 0) ||
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+        (_moduleProcessThreadPtr->RegisterModule(
+                &_socketTransportModule) != 0));
+#else
+        false);
+#endif
+    if (processThreadFail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_INIT_CHANNEL, kTraceError,
+            "Channel::Init() modules not registered");
+        return -1;
+    }
+    // --- ACM initialization
+
+    if ((_audioCodingModule.InitializeReceiver() == -1) ||
+#ifdef WEBRTC_CODEC_AVT
+        // out-of-band Dtmf tones are played out by default
+        (_audioCodingModule.SetDtmfPlayoutStatus(true) == -1) ||
+#endif
+        (_audioCodingModule.InitializeSender() == -1))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "Channel::Init() unable to initialize the ACM - 1");
+        return -1;
+    }
+
+    // --- RTP/RTCP module initialization
+
+    // Ensure that RTCP is enabled by default for the created channel.
+    // Note that, the module will keep generating RTCP until it is explicitly
+    // disabled by the user.
+    // After StopListen (when no sockets exists), RTCP packets will no longer
+    // be transmitted since the Transport object will then be invalid.
+
+    const bool rtpRtcpFail =
+        ((_rtpRtcpModule->SetTelephoneEventStatus(false, true, true) == -1) ||
+        // RTCP is enabled by default
+        (_rtpRtcpModule->SetRTCPStatus(kRtcpCompound) == -1));
+    if (rtpRtcpFail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "Channel::Init() RTP/RTCP module not initialized");
+        return -1;
+    }
+
+     // --- Register all permanent callbacks
+    const bool fail =
+        (_audioCodingModule.RegisterTransportCallback(this) == -1) ||
+        (_audioCodingModule.RegisterVADCallback(this) == -1);
+
+    if (fail)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_INIT_CHANNEL, kTraceError,
+            "Channel::Init() callbacks not registered");
+        return -1;
+    }
+
+    // --- Register all supported codecs to the receiving side of the
+    // RTP/RTCP module
+
+    CodecInst codec;
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    for (int idx = 0; idx < nSupportedCodecs; idx++)
+    {
+        // Open up the RTP/RTCP receiver for all supported codecs
+        if ((_audioCodingModule.Codec(idx, codec) == -1) ||
+            (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::Init() unable to register %s (%d/%d/%d/%d) "
+                         "to RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "Channel::Init() %s (%d/%d/%d/%d) has been added to "
+                         "the RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+
+        // Ensure that PCMU is used as default codec on the sending side
+        if (!STR_CASE_CMP(codec.plname, "PCMU") && (codec.channels == 1))
+        {
+            SetSendCodec(codec);
+        }
+
+        // Register default PT for outband 'telephone-event'
+        if (!STR_CASE_CMP(codec.plname, "telephone-event"))
+        {
+            if ((_rtpRtcpModule->RegisterSendPayload(codec) == -1) ||
+                (_audioCodingModule.RegisterReceiveCodec(codec) == -1))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register outband "
+                             "'telephone-event' (%d/%d) correctly",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+
+        if (!STR_CASE_CMP(codec.plname, "CN"))
+        {
+            if ((_audioCodingModule.RegisterSendCodec(codec) == -1) ||
+                (_audioCodingModule.RegisterReceiveCodec(codec) == -1) ||
+                (_rtpRtcpModule->RegisterSendPayload(codec) == -1))
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register CN (%d/%d) "
+                             "correctly - 1",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+#ifdef WEBRTC_CODEC_RED
+        // Register RED to the receiving side of the ACM.
+        // We will not receive an OnInitializeDecoder() callback for RED.
+        if (!STR_CASE_CMP(codec.plname, "RED"))
+        {
+            if (_audioCodingModule.RegisterReceiveCodec(codec) == -1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId,_channelId),
+                             "Channel::Init() failed to register RED (%d/%d) "
+                             "correctly",
+                             codec.pltype, codec.plfreq);
+            }
+        }
+#endif
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    // Ensure that the WebRtcSocketTransport implementation is used as
+    // Transport on the sending side
+    {
+        // A lock is needed here since users can call
+        // RegisterExternalTransport() at the same time.
+        CriticalSectionScoped cs(&_callbackCritSect);
+        _transportPtr = &_socketTransportModule;
+    }
+#endif
+
+    // Initialize the far end AP module
+    // Using 8 kHz as initial Fs, the same as in transmission. Might be
+    // changed at the first receiving audio.
+    if (_rxAudioProcessingModulePtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NO_MEMORY, kTraceCritical,
+            "Channel::Init() failed to create the far-end AudioProcessing"
+            " module");
+        return -1;
+    }
+
+    if (_rxAudioProcessingModulePtr->set_sample_rate_hz(8000))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Channel::Init() failed to set the sample rate to 8K for"
+            " far-end AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->set_num_channels(1, 1) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set channels for the primary audio stream");
+    }
+
+    if (_rxAudioProcessingModulePtr->high_pass_filter()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Channel::Init() failed to set the high-pass filter for"
+            " far-end AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set noise reduction level for far-end"
+            " AP module");
+    }
+    if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set noise reduction state for far-end"
+            " AP module");
+    }
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_mode(
+        (GainControl::Mode)WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set AGC mode for far-end AP module");
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->Enable(
+        WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceWarning,
+            "Init() failed to set AGC state for far-end AP module");
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetEngineInformation(Statistics& engineStatistics,
+                              OutputMixer& outputMixer,
+                              voe::TransmitMixer& transmitMixer,
+                              ProcessThread& moduleProcessThread,
+                              AudioDeviceModule& audioDeviceModule,
+                              VoiceEngineObserver* voiceEngineObserver,
+                              CriticalSectionWrapper* callbackCritSect)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetEngineInformation()");
+    _engineStatisticsPtr = &engineStatistics;
+    _outputMixerPtr = &outputMixer;
+    _transmitMixerPtr = &transmitMixer,
+    _moduleProcessThreadPtr = &moduleProcessThread;
+    _audioDeviceModulePtr = &audioDeviceModule;
+    _voiceEngineObserverPtr = voiceEngineObserver;
+    _callbackCritSectPtr = callbackCritSect;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::UpdateLocalTimeStamp()
+{
+
+    _timeStamp += _audioFrame.samples_per_channel_;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayout()");
+    if (_playing)
+    {
+        return 0;
+    }
+    // Add participant as candidates for mixing.
+    if (_outputMixerPtr->SetMixabilityStatus(*this, true) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayout() failed to add participant to mixer");
+        return -1;
+    }
+
+    _playing = true;
+
+    if (RegisterFilePlayingToMixer() != 0)
+        return -1;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayout()");
+    if (!_playing)
+    {
+        return 0;
+    }
+    // Remove participant as candidates for mixing
+    if (_outputMixerPtr->SetMixabilityStatus(*this, false) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayout() failed to remove participant from mixer");
+        return -1;
+    }
+
+    _playing = false;
+    _outputAudioLevel.Clear();
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartSend()");
+    {
+        // A lock is needed because |_sending| can be accessed or modified by
+        // another thread at the same time.
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (_sending)
+        {
+            return 0;
+        }
+        _sending = true;
+    }
+
+    if (_rtpRtcpModule->SetSendingStatus(true) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "StartSend() RTP/RTCP failed to start sending");
+        CriticalSectionScoped cs(&_callbackCritSect);
+        _sending = false;
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopSend()");
+    {
+        // A lock is needed because |_sending| can be accessed or modified by
+        // another thread at the same time.
+        CriticalSectionScoped cs(&_callbackCritSect);
+
+        if (!_sending)
+        {
+            return 0;
+        }
+        _sending = false;
+    }
+
+    // Reset sending SSRC and sequence number and triggers direct transmission
+    // of RTCP BYE
+    if (_rtpRtcpModule->SetSendingStatus(false) == -1 ||
+        _rtpRtcpModule->ResetSendDataCountersRTP() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
+            "StartSend() RTP/RTCP failed to stop sending");
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StartReceiving()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartReceiving()");
+    if (_receiving)
+    {
+        return 0;
+    }
+    // If external transport is used, we will only initialize/set the variables
+    // after this section, since we are not using the WebRtc transport but
+    // still need to keep track of e.g. if we are receiving.
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_externalTransport)
+    {
+        if (!_socketTransportModule.ReceiveSocketsInitialized())
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKETS_NOT_INITED, kTraceError,
+                "StartReceive() must set local receiver first");
+            return -1;
+        }
+        if (_socketTransportModule.StartReceiving(KNumberOfSocketBuffers) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+                "StartReceiving() failed to start receiving");
+            return -1;
+        }
+    }
+#endif
+    _receiving = true;
+    _numberOfDiscardedPackets = 0;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::StopReceiving()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopReceiving()");
+    if (!_receiving)
+    {
+        return 0;
+    }
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_externalTransport &&
+        _socketTransportModule.ReceiveSocketsInitialized())
+    {
+        if (_socketTransportModule.StopReceiving() != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+                "StopReceiving() failed to stop receiving.");
+            return -1;
+        }
+    }
+#endif
+    bool dtmfDetection = _rtpRtcpModule->TelephoneEvent();
+    // Recover DTMF detection status.
+    WebRtc_Word32 ret = _rtpRtcpModule->SetTelephoneEventStatus(dtmfDetection,
+                                                               true, true);
+    if (ret != 0) {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopReceiving() failed to restore telephone-event status.");
+    }
+    RegisterReceiveCodecsToRTPModule();
+    _receiving = false;
+    return 0;
+}
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::SetLocalReceiver(const WebRtc_UWord16 rtpPort,
+                          const WebRtc_UWord16 rtcpPort,
+                          const char ipAddr[64],
+                          const char multicastIpAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetLocalReceiver()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetLocalReceiver() conflict with external transport");
+        return -1;
+    }
+
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_SENDING, kTraceError,
+            "SetLocalReceiver() already sending");
+        return -1;
+    }
+    if (_receiving)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_LISTENING, kTraceError,
+            "SetLocalReceiver() already receiving");
+        return -1;
+    }
+
+    if (_socketTransportModule.InitializeReceiveSockets(this,
+                                                        rtpPort,
+                                                        ipAddr,
+                                                        multicastIpAddr,
+                                                        rtcpPort) != 0)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kIpAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "SetLocalReceiver() invalid IP address");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetLocalReceiver() invalid socket");
+            break;
+        case UdpTransport::kPortInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_PORT_NMBR, kTraceError,
+                "SetLocalReceiver() invalid port");
+            break;
+        case UdpTransport::kFailedToBindPort:
+            _engineStatisticsPtr->SetLastError(
+                VE_BINDING_SOCKET_TO_LOCAL_ADDRESS_FAILED, kTraceError,
+                "SetLocalReceiver() binding failed");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetLocalReceiver() undefined socket error");
+            break;
+        }
+        return -1;
+    }
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetLocalReceiver(int& port, int& RTCPport, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetLocalReceiver()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetLocalReceiver() conflict with external transport");
+        return -1;
+    }
+
+    char ipAddrTmp[UdpTransport::kIpAddressVersion6Length] = {0};
+    WebRtc_UWord16 rtpPort(0);
+    WebRtc_UWord16 rtcpPort(0);
+    char multicastIpAddr[UdpTransport::kIpAddressVersion6Length] = {0};
+
+    // Acquire socket information from the socket module
+    if (_socketTransportModule.ReceiveSocketInformation(ipAddrTmp,
+                                                        rtpPort,
+                                                        rtcpPort,
+                                                        multicastIpAddr) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_GET_SOCKET_INFO, kTraceError,
+            "GetLocalReceiver() unable to retrieve socket information");
+        return -1;
+    }
+
+    // Deliver valid results to the user
+    port = static_cast<int> (rtpPort);
+    RTCPport = static_cast<int> (rtcpPort);
+    if (ipAddr != NULL)
+    {
+        strcpy(ipAddr, ipAddrTmp);
+    }
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::SetSendDestination(const WebRtc_UWord16 rtpPort,
+                            const char ipAddr[64],
+                            const int sourcePort,
+                            const WebRtc_UWord16 rtcpPort)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendDestination()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendDestination() conflict with external transport");
+        return -1;
+    }
+
+    // Initialize ports and IP address for the remote (destination) side.
+    // By default, the sockets used for receiving are used for transmission as
+    // well, hence the source ports for outgoing packets are the same as the
+    // receiving ports specified in SetLocalReceiver.
+    // If an extra send socket has been created, it will be utilized until a
+    // new source port is specified or until the channel has been deleted and
+    // recreated. If no socket exists, sockets will be created when the first
+    // RTP and RTCP packets shall be transmitted (see e.g.
+    // UdpTransportImpl::SendPacket()).
+    //
+    // NOTE: this function does not require that sockets exists; all it does is
+    // to build send structures to be used with the sockets when they exist.
+    // It is therefore possible to call this method before SetLocalReceiver.
+    // However, sockets must exist if a multi-cast address is given as input.
+
+    // Build send structures and enable QoS (if enabled and supported)
+    if (_socketTransportModule.InitializeSendSockets(
+        ipAddr, rtpPort, rtcpPort) != UdpTransport::kNoSocketError)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kIpAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_IP_ADDRESS, kTraceError,
+                "SetSendDestination() invalid IP address 1");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetSendDestination() invalid socket 1");
+            break;
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(
+                VE_GQOS_ERROR, kTraceError,
+                "SetSendDestination() failed to set QoS");
+            break;
+        case UdpTransport::kMulticastAddressInvalid:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_MULTICAST_ADDRESS, kTraceError,
+                "SetSendDestination() invalid multicast address");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_SOCKET_ERROR, kTraceError,
+                "SetSendDestination() undefined socket error 1");
+            break;
+        }
+        return -1;
+    }
+
+    // Check if the user has specified a non-default source port different from
+    // the local receive port.
+    // If so, an extra local socket will be created unless the source port is
+    // not unique.
+    if (sourcePort != kVoEDefault)
+    {
+        WebRtc_UWord16 receiverRtpPort(0);
+        WebRtc_UWord16 rtcpNA(0);
+        if (_socketTransportModule.ReceiveSocketInformation(NULL,
+                                                            receiverRtpPort,
+                                                            rtcpNA,
+                                                            NULL) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_CANNOT_GET_SOCKET_INFO, kTraceError,
+                "SetSendDestination() failed to retrieve socket information");
+            return -1;
+        }
+
+        WebRtc_UWord16 sourcePortUW16 =
+                static_cast<WebRtc_UWord16> (sourcePort);
+
+        // An extra socket will only be created if the specified source port
+        // differs from the local receive port.
+        if (sourcePortUW16 != receiverRtpPort)
+        {
+            // Initialize extra local socket to get a different source port
+            // than the local
+            // receiver port. Always use default source for RTCP.
+            // Note that, this calls UdpTransport::CloseSendSockets().
+            if (_socketTransportModule.InitializeSourcePorts(
+                sourcePortUW16,
+                sourcePortUW16+1) != 0)
+            {
+                UdpTransport::ErrorCode lastSockError(
+                    _socketTransportModule.LastError());
+                switch (lastSockError)
+                {
+                case UdpTransport::kIpAddressInvalid:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_INVALID_IP_ADDRESS, kTraceError,
+                        "SetSendDestination() invalid IP address 2");
+                    break;
+                case UdpTransport::kSocketInvalid:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendDestination() invalid socket 2");
+                    break;
+                default:
+                    _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendDestination() undefined socket error 2");
+                    break;
+                }
+                return -1;
+            }
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "SetSendDestination() extra local socket is created"
+                         " to facilitate unique source port");
+        }
+        else
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "SetSendDestination() sourcePort equals the local"
+                         " receive port => no extra socket is created");
+        }
+    }
+
+    return 0;
+}
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetSendDestination(int& port,
+                            char ipAddr[64],
+                            int& sourcePort,
+                            int& RTCPport)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendDestination()");
+
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendDestination() conflict with external transport");
+        return -1;
+    }
+
+    char ipAddrTmp[UdpTransport::kIpAddressVersion6Length] = {0};
+    WebRtc_UWord16 rtpPort(0);
+    WebRtc_UWord16 rtcpPort(0);
+    WebRtc_UWord16 rtpSourcePort(0);
+    WebRtc_UWord16 rtcpSourcePort(0);
+
+    // Acquire sending socket information from the socket module
+    _socketTransportModule.SendSocketInformation(ipAddrTmp, rtpPort, rtcpPort);
+    _socketTransportModule.SourcePorts(rtpSourcePort, rtcpSourcePort);
+
+    // Deliver valid results to the user
+    port = static_cast<int> (rtpPort);
+    RTCPport = static_cast<int> (rtcpPort);
+    sourcePort = static_cast<int> (rtpSourcePort);
+    if (ipAddr != NULL)
+    {
+        strcpy(ipAddr, ipAddrTmp);
+    }
+
+    return 0;
+}
+#endif
+
+
+WebRtc_Word32
+Channel::SetNetEQPlayoutMode(NetEqModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetNetEQPlayoutMode()");
+    AudioPlayoutMode playoutMode(voice);
+    switch (mode)
+    {
+        case kNetEqDefault:
+            playoutMode = voice;
+            break;
+        case kNetEqStreaming:
+            playoutMode = streaming;
+            break;
+        case kNetEqFax:
+            playoutMode = fax;
+            break;
+    }
+    if (_audioCodingModule.SetPlayoutMode(playoutMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetNetEQPlayoutMode() failed to set playout mode");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetNetEQPlayoutMode(NetEqModes& mode)
+{
+    const AudioPlayoutMode playoutMode = _audioCodingModule.PlayoutMode();
+    switch (playoutMode)
+    {
+        case voice:
+            mode = kNetEqDefault;
+            break;
+        case streaming:
+            mode = kNetEqStreaming;
+            break;
+        case fax:
+            mode = kNetEqFax;
+            break;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "Channel::GetNetEQPlayoutMode() => mode=%u", mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetNetEQBGNMode(NetEqBgnModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetNetEQPlayoutMode()");
+    ACMBackgroundNoiseMode noiseMode(On);
+    switch (mode)
+    {
+        case kBgnOn:
+            noiseMode = On;
+            break;
+        case kBgnFade:
+            noiseMode = Fade;
+            break;
+        case kBgnOff:
+            noiseMode = Off;
+            break;
+    }
+    if (_audioCodingModule.SetBackgroundNoiseMode(noiseMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetBackgroundNoiseMode() failed to set noise mode");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetOnHoldStatus(bool enable, OnHoldModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetOnHoldStatus()");
+    if (mode == kHoldSendAndPlay)
+    {
+        _outputIsOnHold = enable;
+        _inputIsOnHold = enable;
+    }
+    else if (mode == kHoldPlayOnly)
+    {
+        _outputIsOnHold = enable;
+    }
+    if (mode == kHoldSendOnly)
+    {
+        _inputIsOnHold = enable;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetOnHoldStatus(bool& enabled, OnHoldModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetOnHoldStatus()");
+    enabled = (_outputIsOnHold || _inputIsOnHold);
+    if (_outputIsOnHold && _inputIsOnHold)
+    {
+        mode = kHoldSendAndPlay;
+    }
+    else if (_outputIsOnHold && !_inputIsOnHold)
+    {
+        mode = kHoldPlayOnly;
+    }
+    else if (!_outputIsOnHold && _inputIsOnHold)
+    {
+        mode = kHoldSendOnly;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetOnHoldStatus() => enabled=%d, mode=%d",
+                 enabled, mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterVoiceEngineObserver() observer already enabled");
+        return -1;
+    }
+    _voiceEngineObserverPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterVoiceEngineObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterVoiceEngineObserver() observer already disabled");
+        return 0;
+    }
+    _voiceEngineObserverPtr = NULL;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetNetEQBGNMode(NetEqBgnModes& mode)
+{
+  ACMBackgroundNoiseMode noiseMode(On);
+    _audioCodingModule.BackgroundNoiseMode(noiseMode);
+    switch (noiseMode)
+    {
+        case On:
+            mode = kBgnOn;
+            break;
+        case Fade:
+            mode = kBgnFade;
+            break;
+        case Off:
+            mode = kBgnOff;
+            break;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetNetEQBGNMode() => mode=%u", mode);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSendCodec(CodecInst& codec)
+{
+    return (_audioCodingModule.SendCodec(codec));
+}
+
+WebRtc_Word32
+Channel::GetRecCodec(CodecInst& codec)
+{
+    return (_audioCodingModule.ReceiveCodec(codec));
+}
+
+WebRtc_Word32
+Channel::SetSendCodec(const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendCodec()");
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetSendCodec() failed to register codec to ACM");
+        return -1;
+    }
+
+    if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
+    {
+        _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
+        if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
+        {
+            WEBRTC_TRACE(
+                    kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                    "SetSendCodec() failed to register codec to"
+                    " RTP/RTCP module");
+            return -1;
+        }
+    }
+
+    if (_rtpRtcpModule->SetAudioPacketSize(codec.pacsize) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetSendCodec() failed to set audio packet size");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetVADStatus(bool enableVAD, ACMVADMode mode, bool disableDTX)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetVADStatus(mode=%d)", mode);
+    // To disable VAD, DTX must be disabled too
+    disableDTX = ((enableVAD == false) ? true : disableDTX);
+    if (_audioCodingModule.SetVAD(!disableDTX, enableVAD, mode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetVADStatus() failed to set VAD");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetVADStatus(bool& enabledVAD, ACMVADMode& mode, bool& disabledDTX)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetVADStatus");
+    if (_audioCodingModule.VAD(disabledDTX, enabledVAD, mode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "GetVADStatus() failed to get VAD status");
+        return -1;
+    }
+    disabledDTX = !disabledDTX;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetRecPayloadType(const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRecPayloadType()");
+
+    if (_playing)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "SetRecPayloadType() unable to set PT while playing");
+        return -1;
+    }
+    if (_receiving)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_LISTENING, kTraceError,
+            "SetRecPayloadType() unable to set PT while listening");
+        return -1;
+    }
+
+    if (codec.pltype == -1)
+    {
+        // De-register the selected codec (RTP/RTCP module and ACM)
+
+        WebRtc_Word8 pltype(-1);
+        CodecInst rxCodec = codec;
+
+        // Get payload type for the given codec
+        _rtpRtcpModule->ReceivePayloadType(rxCodec, &pltype);
+        rxCodec.pltype = pltype;
+
+        if (_rtpRtcpModule->DeRegisterReceivePayload(pltype) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                    VE_RTP_RTCP_MODULE_ERROR,
+                    kTraceError,
+                    "SetRecPayloadType() RTP/RTCP-module deregistration "
+                    "failed");
+            return -1;
+        }
+        if (_audioCodingModule.UnregisterReceiveCodec(rxCodec.pltype) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() ACM deregistration failed - 1");
+            return -1;
+        }
+        return 0;
+    }
+
+    if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
+    {
+        // First attempt to register failed => de-register and try again
+        _rtpRtcpModule->DeRegisterReceivePayload(codec.pltype);
+        if (_rtpRtcpModule->RegisterReceivePayload(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() RTP/RTCP-module registration failed");
+            return -1;
+        }
+    }
+    if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
+    {
+        _audioCodingModule.UnregisterReceiveCodec(codec.pltype);
+        if (_audioCodingModule.RegisterReceiveCodec(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "SetRecPayloadType() ACM registration failed - 1");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetRecPayloadType(CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRecPayloadType()");
+    WebRtc_Word8 payloadType(-1);
+    if (_rtpRtcpModule->ReceivePayloadType(codec, &payloadType) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceWarning,
+            "GetRecPayloadType() failed to retrieve RX payload type");
+        return -1;
+    }
+    codec.pltype = payloadType;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRecPayloadType() => pltype=%u", codec.pltype);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetAMREncFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMREncFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetAMRDecFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRDecFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetAMRWbEncFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRWbEncFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+
+}
+
+WebRtc_Word32
+Channel::SetAMRWbDecFormat(AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetAMRWbDecFormat()");
+
+    // ACM doesn't support AMR
+    return -1;
+}
+
+WebRtc_Word32
+Channel::SetSendCNPayloadType(int type, PayloadFrequencies frequency)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendCNPayloadType()");
+
+    CodecInst codec;
+    WebRtc_Word32 samplingFreqHz(-1);
+    const int kMono = 1;
+    if (frequency == kFreq32000Hz)
+        samplingFreqHz = 32000;
+    else if (frequency == kFreq16000Hz)
+        samplingFreqHz = 16000;
+
+    if (_audioCodingModule.Codec("CN", codec, samplingFreqHz, kMono) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetSendCNPayloadType() failed to retrieve default CN codec "
+            "settings");
+        return -1;
+    }
+
+    // Modify the payload type (must be set to dynamic range)
+    codec.pltype = type;
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetSendCNPayloadType() failed to register CN to ACM");
+        return -1;
+    }
+
+    if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
+    {
+        _rtpRtcpModule->DeRegisterSendPayload(codec.pltype);
+        if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "SetSendCNPayloadType() failed to register CN to RTP/RTCP "
+                "module");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACInitTargetRate(int rateBps, bool useFixedFrameSize)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACInitTargetRate()");
+
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACInitTargetRate() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        // This API is only valid if iSAC is setup to run in channel-adaptive
+        // mode.
+        // We do not validate the adaptive mode here. It is done later in the
+        // ConfigISACBandwidthEstimator() API.
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACInitTargetRate() send codec is not iSAC");
+        return -1;
+    }
+
+    WebRtc_UWord8 initFrameSizeMsec(0);
+    if (16000 == sendCodec.plfreq)
+    {
+        // Note that 0 is a valid and corresponds to "use default
+        if ((rateBps != 0 &&
+            rateBps < kVoiceEngineMinIsacInitTargetRateBpsWb) ||
+            (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsWb))
+        {
+             _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACInitTargetRate() invalid target rate - 1");
+            return -1;
+        }
+        // 30 or 60ms
+        initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 16);
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((rateBps != 0 &&
+            rateBps < kVoiceEngineMinIsacInitTargetRateBpsSwb) ||
+            (rateBps > kVoiceEngineMaxIsacInitTargetRateBpsSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACInitTargetRate() invalid target rate - 2");
+            return -1;
+        }
+        initFrameSizeMsec = (WebRtc_UWord8)(sendCodec.pacsize / 32); // 30ms
+    }
+
+    if (_audioCodingModule.ConfigISACBandwidthEstimator(
+        initFrameSizeMsec, rateBps, useFixedFrameSize) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACInitTargetRate() iSAC BWE config failed");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACMaxRate(int rateBps)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACMaxRate()");
+
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxRate() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        // This API is only valid if iSAC is selected as sending codec.
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxRate() send codec is not iSAC");
+        return -1;
+    }
+    if (16000 == sendCodec.plfreq)
+    {
+        if ((rateBps < kVoiceEngineMinIsacMaxRateBpsWb) ||
+            (rateBps > kVoiceEngineMaxIsacMaxRateBpsWb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxRate() invalid max rate - 1");
+            return -1;
+        }
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((rateBps < kVoiceEngineMinIsacMaxRateBpsSwb) ||
+            (rateBps > kVoiceEngineMaxIsacMaxRateBpsSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxRate() invalid max rate - 2");
+            return -1;
+        }
+    }
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetISACMaxRate() unable to set max rate while sending");
+        return -1;
+    }
+
+    // Set the maximum instantaneous rate of iSAC (works for both adaptive
+    // and non-adaptive mode)
+    if (_audioCodingModule.SetISACMaxRate(rateBps) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACMaxRate() failed to set max rate");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetISACMaxPayloadSize(int sizeBytes)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetISACMaxPayloadSize()");
+    CodecInst sendCodec;
+    if (_audioCodingModule.SendCodec(sendCodec) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() failed to retrieve send codec");
+        return -1;
+    }
+    if (STR_CASE_CMP(sendCodec.plname, "ISAC") != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() send codec is not iSAC");
+        return -1;
+    }
+    if (16000 == sendCodec.plfreq)
+    {
+        if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesWb) ||
+            (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesWb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxPayloadSize() invalid max payload - 1");
+            return -1;
+        }
+    }
+    else if (32000 == sendCodec.plfreq)
+    {
+        if ((sizeBytes < kVoiceEngineMinIsacMaxPayloadSizeBytesSwb) ||
+            (sizeBytes > kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb))
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetISACMaxPayloadSize() invalid max payload - 2");
+            return -1;
+        }
+    }
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetISACMaxPayloadSize() unable to set max rate while sending");
+        return -1;
+    }
+
+    if (_audioCodingModule.SetISACMaxPayloadSize(sizeBytes) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetISACMaxPayloadSize() failed to set max payload size");
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32 Channel::RegisterExternalTransport(Transport& transport)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::RegisterExternalTransport()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    // Sanity checks for default (non external transport) to avoid conflict with
+    // WebRtc sockets.
+    if (_socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(VE_SEND_SOCKETS_CONFLICT,
+                                           kTraceError,
+                "RegisterExternalTransport() send sockets already initialized");
+        return -1;
+    }
+    if (_socketTransportModule.ReceiveSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(VE_RECEIVE_SOCKETS_CONFLICT,
+                                           kTraceError,
+             "RegisterExternalTransport() receive sockets already initialized");
+        return -1;
+    }
+#endif
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION,
+                                           kTraceError,
+              "RegisterExternalTransport() external transport already enabled");
+       return -1;
+    }
+    _externalTransport = true;
+    _transportPtr = &transport;
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterExternalTransport()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterExternalTransport()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_transportPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterExternalTransport() external transport already "
+            "disabled");
+        return 0;
+    }
+    _externalTransport = false;
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    _transportPtr = NULL;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "DeRegisterExternalTransport() all transport is disabled");
+#else
+    _transportPtr = &_socketTransportModule;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "DeRegisterExternalTransport() internal Transport is enabled");
+#endif
+    return 0;
+}
+
+WebRtc_Word32
+Channel::ReceivedRTPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ReceivedRTPPacket()");
+    const char dummyIP[] = "127.0.0.1";
+    IncomingRTPPacket(data, length, dummyIP, 0);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::ReceivedRTCPPacket(const WebRtc_Word8* data, WebRtc_Word32 length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ReceivedRTCPPacket()");
+    const char dummyIP[] = "127.0.0.1";
+    IncomingRTCPPacket(data, length, dummyIP, 0);
+    return 0;
+}
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32
+Channel::GetSourceInfo(int& rtpPort, int& rtcpPort, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSourceInfo()");
+
+    WebRtc_UWord16 rtpPortModule;
+    WebRtc_UWord16 rtcpPortModule;
+    char ipaddr[UdpTransport::kIpAddressVersion6Length] = {0};
+
+    if (_socketTransportModule.RemoteSocketInformation(ipaddr,
+                                                       rtpPortModule,
+                                                       rtcpPortModule) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSourceInfo() failed to retrieve remote socket information");
+        return -1;
+    }
+    strcpy(ipAddr, ipaddr);
+    rtpPort = rtpPortModule;
+    rtcpPort = rtcpPortModule;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+        "GetSourceInfo() => rtpPort=%d, rtcpPort=%d, ipAddr=%s",
+        rtpPort, rtcpPort, ipAddr);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::EnableIPv6()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::EnableIPv6()");
+    if (_socketTransportModule.ReceiveSocketsInitialized() ||
+        _socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "EnableIPv6() socket layer is already initialized");
+        return -1;
+    }
+    if (_socketTransportModule.EnableIpV6() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_ERROR, kTraceError,
+            "EnableIPv6() failed to enable IPv6");
+        const UdpTransport::ErrorCode lastError =
+            _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d", lastError);
+        return -1;
+    }
+    return 0;
+}
+
+bool
+Channel::IPv6IsEnabled() const
+{
+    bool isEnabled = _socketTransportModule.IpV6Enabled();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "IPv6IsEnabled() => %d", isEnabled);
+    return isEnabled;
+}
+
+WebRtc_Word32
+Channel::SetSourceFilter(int rtpPort, int rtcpPort, const char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSourceFilter()");
+    if (_socketTransportModule.SetFilterPorts(
+        static_cast<WebRtc_UWord16>(rtpPort),
+        static_cast<WebRtc_UWord16>(rtcpPort)) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "SetSourceFilter() failed to set filter ports");
+        const UdpTransport::ErrorCode lastError =
+            _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d",
+                     lastError);
+        return -1;
+    }
+    const char* filterIpAddress = ipAddr;
+    if (_socketTransportModule.SetFilterIP(filterIpAddress) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_IP_ADDRESS, kTraceError,
+            "SetSourceFilter() failed to set filter IP address");
+        const UdpTransport::ErrorCode lastError =
+           _socketTransportModule.LastError();
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport::LastError() => %d", lastError);
+        return -1;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSourceFilter(int& rtpPort, int& rtcpPort, char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSourceFilter()");
+    WebRtc_UWord16 rtpFilterPort(0);
+    WebRtc_UWord16 rtcpFilterPort(0);
+    if (_socketTransportModule.FilterPorts(rtpFilterPort, rtcpFilterPort) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceWarning,
+            "GetSourceFilter() failed to retrieve filter ports");
+    }
+    char ipAddrTmp[UdpTransport::kIpAddressVersion6Length] = {0};
+    if (_socketTransportModule.FilterIP(ipAddrTmp) != 0)
+    {
+        // no filter has been configured (not seen as an error)
+        memset(ipAddrTmp,
+               0, UdpTransport::kIpAddressVersion6Length);
+    }
+    rtpPort = static_cast<int> (rtpFilterPort);
+    rtcpPort = static_cast<int> (rtcpFilterPort);
+    strcpy(ipAddr, ipAddrTmp);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+        "GetSourceFilter() => rtpPort=%d, rtcpPort=%d, ipAddr=%s",
+        rtpPort, rtcpPort, ipAddr);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetSendTOS(int DSCP, int priority, bool useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendTOS(DSCP=%d, useSetSockopt=%d)",
+                 DSCP, (int)useSetSockopt);
+
+    // Set TOS value and possibly try to force usage of setsockopt()
+    if (_socketTransportModule.SetToS(DSCP, useSetSockopt) != 0)
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kTosError:
+            _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                               "SetSendTOS() TOS error");
+            break;
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(
+                    VE_TOS_GQOS_CONFLICT, kTraceError,
+                    "SetSendTOS() GQOS error");
+            break;
+        case UdpTransport::kTosInvalid:
+            // can't switch SetSockOpt method without disabling TOS first, or
+            // SetSockopt() call failed
+            _engineStatisticsPtr->SetLastError(VE_TOS_INVALID, kTraceError,
+                                               "SetSendTOS() invalid TOS");
+            break;
+        case UdpTransport::kSocketInvalid:
+            _engineStatisticsPtr->SetLastError(VE_SOCKET_ERROR, kTraceError,
+                                               "SetSendTOS() invalid Socket");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                               "SetSendTOS() TOS error");
+            break;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport =>  lastError = %d",
+                     lastSockError);
+        return -1;
+    }
+
+    // Set priority (PCP) value, -1 means don't change
+    if (-1 != priority)
+    {
+        if (_socketTransportModule.SetPCP(priority) != 0)
+        {
+            UdpTransport::ErrorCode lastSockError(
+                _socketTransportModule.LastError());
+            switch (lastSockError)
+            {
+            case UdpTransport::kPcpError:
+                _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                                   "SetSendTOS() PCP error");
+                break;
+            case UdpTransport::kQosError:
+                _engineStatisticsPtr->SetLastError(
+                        VE_TOS_GQOS_CONFLICT, kTraceError,
+                        "SetSendTOS() GQOS conflict");
+                break;
+            case UdpTransport::kSocketInvalid:
+                _engineStatisticsPtr->SetLastError(
+                        VE_SOCKET_ERROR, kTraceError,
+                        "SetSendTOS() invalid Socket");
+                break;
+            default:
+                _engineStatisticsPtr->SetLastError(VE_TOS_ERROR, kTraceError,
+                                                   "SetSendTOS() PCP error");
+                break;
+            }
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                         VoEId(_instanceId,_channelId),
+                         "UdpTransport =>  lastError = %d",
+                         lastSockError);
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetSendTOS(int &DSCP, int& priority, bool &useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendTOS(DSCP=?, useSetSockopt=?)");
+    WebRtc_Word32 dscp(0), prio(0);
+    bool setSockopt(false);
+    if (_socketTransportModule.ToS(dscp, setSockopt) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSendTOS() failed to get TOS info");
+        return -1;
+    }
+    if (_socketTransportModule.PCP(prio) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetSendTOS() failed to get PCP info");
+        return -1;
+    }
+    DSCP = static_cast<int> (dscp);
+    priority = static_cast<int> (prio);
+    useSetSockopt = setSockopt;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSendTOS() => DSCP=%d, priority=%d, useSetSockopt=%d",
+        DSCP, priority, (int)useSetSockopt);
+    return 0;
+}
+
+#if defined(_WIN32)
+WebRtc_Word32
+Channel::SetSendGQoS(bool enable, int serviceType, int overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetSendGQoS(enable=%d, serviceType=%d, "
+                 "overrideDSCP=%d)",
+                 (int)enable, serviceType, overrideDSCP);
+    if(!_socketTransportModule.ReceiveSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKETS_NOT_INITED, kTraceError,
+            "SetSendGQoS() GQoS state must be set after sockets are created");
+        return -1;
+    }
+    if(!_socketTransportModule.SendSocketsInitialized())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_DESTINATION_NOT_INITED, kTraceError,
+            "SetSendGQoS() GQoS state must be set after sending side is "
+            "initialized");
+        return -1;
+    }
+    if (enable &&
+       (serviceType != SERVICETYPE_BESTEFFORT) &&
+       (serviceType != SERVICETYPE_CONTROLLEDLOAD) &&
+       (serviceType != SERVICETYPE_GUARANTEED) &&
+       (serviceType != SERVICETYPE_QUALITATIVE))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendGQoS() Invalid service type");
+        return -1;
+    }
+    if (enable && ((overrideDSCP <  0) || (overrideDSCP > 63)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendGQoS() Invalid overrideDSCP value");
+        return -1;
+    }
+
+    // Avoid GQoS/ToS conflict when user wants to override the default DSCP
+    // mapping
+    bool QoS(false);
+    WebRtc_Word32 sType(0);
+    WebRtc_Word32 ovrDSCP(0);
+    if (_socketTransportModule.QoS(QoS, sType, ovrDSCP))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "SetSendGQoS() failed to get QOS info");
+        return -1;
+    }
+    if (QoS && ovrDSCP == 0 && overrideDSCP != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_TOS_GQOS_CONFLICT, kTraceError,
+            "SetSendGQoS() QOS is already enabled and overrideDSCP differs,"
+            " not allowed");
+        return -1;
+    }
+    const WebRtc_Word32 maxBitrate(0);
+    if (_socketTransportModule.SetQoS(enable,
+                                      static_cast<WebRtc_Word32>(serviceType),
+                                      maxBitrate,
+                                      static_cast<WebRtc_Word32>(overrideDSCP),
+                                      true))
+    {
+        UdpTransport::ErrorCode lastSockError(
+            _socketTransportModule.LastError());
+        switch (lastSockError)
+        {
+        case UdpTransport::kQosError:
+            _engineStatisticsPtr->SetLastError(VE_GQOS_ERROR, kTraceError,
+                                               "SetSendGQoS() QOS error");
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(VE_SOCKET_ERROR, kTraceError,
+                                               "SetSendGQoS() Socket error");
+            break;
+        }
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "UdpTransport() => lastError = %d",
+                     lastSockError);
+        return -1;
+    }
+    return 0;
+}
+#endif
+
+#if defined(_WIN32)
+WebRtc_Word32
+Channel::GetSendGQoS(bool &enabled, int &serviceType, int &overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendGQoS(enable=?, serviceType=?, "
+                 "overrideDSCP=?)");
+
+    bool QoS(false);
+    WebRtc_Word32 serviceTypeModule(0);
+    WebRtc_Word32 overrideDSCPModule(0);
+    _socketTransportModule.QoS(QoS, serviceTypeModule, overrideDSCPModule);
+
+    enabled = QoS;
+    serviceType = static_cast<int> (serviceTypeModule);
+    overrideDSCP = static_cast<int> (overrideDSCPModule);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "GetSendGQoS() => enabled=%d, serviceType=%d, overrideDSCP=%d",
+                 (int)enabled, serviceType, overrideDSCP);
+    return 0;
+}
+#endif
+#endif
+
+WebRtc_Word32
+Channel::SetPacketTimeoutNotification(bool enable, int timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetPacketTimeoutNotification()");
+    if (enable)
+    {
+        const WebRtc_UWord32 RTPtimeoutMS = 1000*timeoutSeconds;
+        const WebRtc_UWord32 RTCPtimeoutMS = 0;
+        _rtpRtcpModule->SetPacketTimeout(RTPtimeoutMS, RTCPtimeoutMS);
+        _rtpPacketTimeOutIsEnabled = true;
+        _rtpTimeOutSeconds = timeoutSeconds;
+    }
+    else
+    {
+        _rtpRtcpModule->SetPacketTimeout(0, 0);
+        _rtpPacketTimeOutIsEnabled = false;
+        _rtpTimeOutSeconds = 0;
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPacketTimeoutNotification(bool& enabled, int& timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPacketTimeoutNotification()");
+    enabled = _rtpPacketTimeOutIsEnabled;
+    if (enabled)
+    {
+        timeoutSeconds = _rtpTimeOutSeconds;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetPacketTimeoutNotification() => enabled=%d,"
+                 " timeoutSeconds=%d",
+                 enabled, timeoutSeconds);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::RegisterDeadOrAliveObserver(VoEConnectionObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterDeadOrAliveObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_connectionObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "RegisterDeadOrAliveObserver() observer already enabled");
+        return -1;
+    }
+
+    _connectionObserverPtr = &observer;
+    _connectionObserver = true;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::DeRegisterDeadOrAliveObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterDeadOrAliveObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_connectionObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterDeadOrAliveObserver() observer already disabled");
+        return 0;
+    }
+
+    _connectionObserver = false;
+    _connectionObserverPtr = NULL;
+
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SetPeriodicDeadOrAliveStatus(bool enable, int sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetPeriodicDeadOrAliveStatus()");
+    if (!_connectionObserverPtr)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "SetPeriodicDeadOrAliveStatus() connection observer has"
+                     " not been registered");
+    }
+    if (enable)
+    {
+        ResetDeadOrAliveCounters();
+    }
+    bool enabled(false);
+    WebRtc_UWord8 currentSampleTimeSec(0);
+    // Store last state (will be used later if dead-or-alive is disabled).
+    _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, currentSampleTimeSec);
+    // Update the dead-or-alive state.
+    if (_rtpRtcpModule->SetPeriodicDeadOrAliveStatus(
+        enable, (WebRtc_UWord8)sampleTimeSeconds) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR,
+                kTraceError,
+                "SetPeriodicDeadOrAliveStatus() failed to set dead-or-alive "
+                "status");
+        return -1;
+    }
+    if (!enable)
+    {
+        // Restore last utilized sample time.
+        // Without this, the sample time would always be reset to default
+        // (2 sec), each time dead-or-alived was disabled without sample-time
+        // parameter.
+        _rtpRtcpModule->SetPeriodicDeadOrAliveStatus(enable,
+                                                    currentSampleTimeSec);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPeriodicDeadOrAliveStatus(bool& enabled, int& sampleTimeSeconds)
+{
+    _rtpRtcpModule->PeriodicDeadOrAliveStatus(
+        enabled,
+        (WebRtc_UWord8&)sampleTimeSeconds);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetPeriodicDeadOrAliveStatus() => enabled=%d,"
+                 " sampleTimeSeconds=%d",
+                 enabled, sampleTimeSeconds);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SendUDPPacket(const void* data,
+                       unsigned int length,
+                       int& transmittedBytes,
+                       bool useRtcpSocket)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SendUDPPacket()");
+    if (_externalTransport)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SendUDPPacket() external transport is enabled");
+        return -1;
+    }
+    if (useRtcpSocket && !_rtpRtcpModule->RTCP())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTCP_ERROR, kTraceError,
+            "SendUDPPacket() RTCP is disabled");
+        return -1;
+    }
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "SendUDPPacket() not sending");
+        return -1;
+    }
+
+    char* dataC = new char[length];
+    if (NULL == dataC)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NO_MEMORY, kTraceError,
+            "SendUDPPacket() memory allocation failed");
+        return -1;
+    }
+    memcpy(dataC, data, length);
+
+    transmittedBytes = SendPacketRaw(dataC, length, useRtcpSocket);
+
+    delete [] dataC;
+    dataC = NULL;
+
+    if (transmittedBytes <= 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+                VE_SEND_ERROR, kTraceError,
+                "SendUDPPacket() transmission failed");
+        transmittedBytes = 0;
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "SendUDPPacket() => transmittedBytes=%d", transmittedBytes);
+    return 0;
+}
+
+
+int Channel::StartPlayingFileLocally(const char* fileName,
+                                     const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileLocally(fileNameUTF8[]=%s, loop=%d,"
+                 " format=%d, volumeScaling=%5.3f, startPosition=%d, "
+                 "stopPosition=%d)", fileName, loop, format, volumeScaling,
+                 startPosition, stopPosition);
+
+    if (_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "StartPlayingFileLocally() is already playing");
+        return -1;
+    }
+
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        if (_outputFilePlayerPtr)
+        {
+            _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+        }
+
+        _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+            _outputFilePlayerId, (const FileFormats)format);
+
+        if (_outputFilePlayerPtr == NULL)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "StartPlayingFileLocally() filePlayer format is not correct");
+            return -1;
+        }
+
+        const WebRtc_UWord32 notificationTime(0);
+
+        if (_outputFilePlayerPtr->StartPlayingFile(
+                fileName,
+                loop,
+                startPosition,
+                volumeScaling,
+                notificationTime,
+                stopPosition,
+                (const CodecInst*)codecInst) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_BAD_FILE, kTraceError,
+                "StartPlayingFile() failed to start file playout");
+            _outputFilePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+            _outputFilePlayerPtr = NULL;
+            return -1;
+        }
+        _outputFilePlayerPtr->RegisterModuleFileCallback(this);
+        _outputFilePlaying = true;
+    }
+
+    if (RegisterFilePlayingToMixer() != 0)
+        return -1;
+
+    return 0;
+}
+
+int Channel::StartPlayingFileLocally(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileLocally(format=%d,"
+                 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if(stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileLocally() NULL as input stream");
+        return -1;
+    }
+
+
+    if (_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceError,
+            "StartPlayingFileLocally() is already playing");
+        return -1;
+    }
+
+    {
+      CriticalSectionScoped cs(&_fileCritSect);
+
+      // Destroy the old instance
+      if (_outputFilePlayerPtr)
+      {
+          _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+          FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+          _outputFilePlayerPtr = NULL;
+      }
+
+      // Create the instance
+      _outputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+          _outputFilePlayerId,
+          (const FileFormats)format);
+
+      if (_outputFilePlayerPtr == NULL)
+      {
+          _engineStatisticsPtr->SetLastError(
+              VE_INVALID_ARGUMENT, kTraceError,
+              "StartPlayingFileLocally() filePlayer format isnot correct");
+          return -1;
+      }
+
+      const WebRtc_UWord32 notificationTime(0);
+
+      if (_outputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
+                                                 volumeScaling,
+                                                 notificationTime,
+                                                 stopPosition, codecInst) != 0)
+      {
+          _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                             "StartPlayingFile() failed to "
+                                             "start file playout");
+          _outputFilePlayerPtr->StopPlayingFile();
+          FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+          _outputFilePlayerPtr = NULL;
+          return -1;
+      }
+      _outputFilePlayerPtr->RegisterModuleFileCallback(this);
+      _outputFilePlaying = true;
+    }
+
+    if (RegisterFilePlayingToMixer() != 0)
+        return -1;
+
+    return 0;
+}
+
+int Channel::StopPlayingFileLocally()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayingFileLocally()");
+
+    if (!_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileLocally() isnot playing");
+        return 0;
+    }
+
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        if (_outputFilePlayerPtr->StopPlayingFile() != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_STOP_RECORDING_FAILED, kTraceError,
+                "StopPlayingFile() could not stop playing");
+            return -1;
+        }
+        _outputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+        _outputFilePlayerPtr = NULL;
+        _outputFilePlaying = false;
+    }
+    // _fileCritSect cannot be taken while calling
+    // SetAnonymousMixibilityStatus. Refer to comments in
+    // StartPlayingFileLocally(const char* ...) for more details.
+    if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, false) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StopPlayingFile() failed to stop participant from playing as"
+            "file in the mixer");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::IsPlayingFileLocally() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IsPlayingFileLocally()");
+
+    return (WebRtc_Word32)_outputFilePlaying;
+}
+
+int Channel::RegisterFilePlayingToMixer()
+{
+    // Return success for not registering for file playing to mixer if:
+    // 1. playing file before playout is started on that channel.
+    // 2. starting playout without file playing on that channel.
+    if (!_playing || !_outputFilePlaying)
+    {
+        return 0;
+    }
+
+    // |_fileCritSect| cannot be taken while calling
+    // SetAnonymousMixabilityStatus() since as soon as the participant is added
+    // frames can be pulled by the mixer. Since the frames are generated from
+    // the file, _fileCritSect will be taken. This would result in a deadlock.
+    if (_outputMixerPtr->SetAnonymousMixabilityStatus(*this, true) != 0)
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+        _outputFilePlaying = false;
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CONF_MIX_MODULE_ERROR, kTraceError,
+            "StartPlayingFile() failed to add participant as file to mixer");
+        _outputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_outputFilePlayerPtr);
+        _outputFilePlayerPtr = NULL;
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::ScaleLocalFilePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ScaleLocalFilePlayout(scale=%5.3f)", scale);
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    if (!_outputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleLocalFilePlayout() isnot playing");
+        return -1;
+    }
+    if ((_outputFilePlayerPtr == NULL) ||
+        (_outputFilePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale the playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::GetLocalPlayoutPosition(int& positionMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetLocalPlayoutPosition(position=?)");
+
+    WebRtc_UWord32 position;
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    if (_outputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "GetLocalPlayoutPosition() filePlayer instance doesnot exist");
+        return -1;
+    }
+
+    if (_outputFilePlayerPtr->GetPlayoutPosition(position) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "GetLocalPlayoutPosition() failed");
+        return -1;
+    }
+    positionMs = position;
+
+    return 0;
+}
+
+int Channel::StartPlayingFileAsMicrophone(const char* fileName,
+                                          const bool loop,
+                                          const FileFormats format,
+                                          const int startPosition,
+                                          const float volumeScaling,
+                                          const int stopPosition,
+                                          const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileAsMicrophone(fileNameUTF8[]=%s, "
+                 "loop=%d, format=%d, volumeScaling=%5.3f, startPosition=%d, "
+                 "stopPosition=%d)", fileName, loop, format, volumeScaling,
+                 startPosition, stopPosition);
+
+    if (_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() filePlayer is playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    // Destroy the old instance
+    if (_inputFilePlayerPtr)
+    {
+        _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+    }
+
+    // Create the instance
+    _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+        _inputFilePlayerId, (const FileFormats)format);
+
+    if (_inputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_inputFilePlayerPtr->StartPlayingFile(
+        fileName,
+        loop,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*)codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _inputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+        return -1;
+    }
+    _inputFilePlayerPtr->RegisterModuleFileCallback(this);
+    _inputFilePlaying = true;
+
+    return 0;
+}
+
+int Channel::StartPlayingFileAsMicrophone(InStream* stream,
+                                          const FileFormats format,
+                                          const int startPosition,
+                                          const float volumeScaling,
+                                          const int stopPosition,
+                                          const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartPlayingFileAsMicrophone(format=%d, "
+                 "volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if(stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileAsMicrophone NULL as input stream");
+        return -1;
+    }
+
+    if (_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    // Destroy the old instance
+    if (_inputFilePlayerPtr)
+    {
+        _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+    }
+
+    // Create the instance
+    _inputFilePlayerPtr = FilePlayer::CreateFilePlayer(
+        _inputFilePlayerId, (const FileFormats)format);
+
+    if (_inputFilePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingInputFile() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_inputFilePlayerPtr->StartPlayingFile(*stream, startPosition,
+                                              volumeScaling, notificationTime,
+                                              stopPosition, codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                           "StartPlayingFile() failed to start "
+                                           "file playout");
+        _inputFilePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+        _inputFilePlayerPtr = NULL;
+        return -1;
+    }
+    
+    _inputFilePlayerPtr->RegisterModuleFileCallback(this);
+    _inputFilePlaying = true;
+
+    return 0;
+}
+
+int Channel::StopPlayingFileAsMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StopPlayingFileAsMicrophone()");
+
+    if (!_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileAsMicrophone() isnot playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+    if (_inputFilePlayerPtr->StopPlayingFile() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopPlayingFile() could not stop playing");
+        return -1;
+    }
+    _inputFilePlayerPtr->RegisterModuleFileCallback(NULL);
+    FilePlayer::DestroyFilePlayer(_inputFilePlayerPtr);
+    _inputFilePlayerPtr = NULL;
+    _inputFilePlaying = false;
+
+    return 0;
+}
+
+int Channel::IsPlayingFileAsMicrophone() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::IsPlayingFileAsMicrophone()");
+
+    return _inputFilePlaying;
+}
+
+int Channel::ScaleFileAsMicrophonePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ScaleFileAsMicrophonePlayout(scale=%5.3f)", scale);
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    if (!_inputFilePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleFileAsMicrophonePlayout() isnot playing");
+        return -1;
+    }
+
+    if ((_inputFilePlayerPtr == NULL) ||
+        (_inputFilePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int Channel::StartRecordingPlayout(const char* fileName,
+                                   const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartRecordingPlayout(fileName=%s)", fileName);
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if ((codecInst != NULL) &&
+      ((codecInst->channels < 1) || (codecInst->channels > 2)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _outputFileRecorderId, (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(
+        fileName, (const CodecInst&)*codecInst, notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int Channel::StartRecordingPlayout(OutStream* stream,
+                                   const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::StartRecordingPlayout()");
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _outputFileRecorderId, (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream, *codecInst,
+                                                        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+                                           "StartRecordingPlayout() failed to "
+                                           "start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int Channel::StopRecordingPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "Channel::StopRecordingPlayout()");
+
+    if (!_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StopRecordingPlayout() isnot recording");
+        return -1;
+    }
+
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    if (_outputFileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording() could not stop recording");
+        return(-1);
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+    _outputFileRecorderPtr = NULL;
+    _outputFileRecording = false;
+
+    return 0;
+}
+
+void
+Channel::SetMixWithMicStatus(bool mix)
+{
+    _mixFileWithMicrophone=mix;
+}
+
+int
+Channel::GetSpeechOutputLevel(WebRtc_UWord32& level) const
+{
+    WebRtc_Word8 currentLevel = _outputAudioLevel.Level();
+    level = static_cast<WebRtc_Word32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSpeechOutputLevel() => level=%u", level);
+    return 0;
+}
+
+int
+Channel::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const
+{
+    WebRtc_Word16 currentLevel = _outputAudioLevel.LevelFullRange();
+    level = static_cast<WebRtc_Word32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSpeechOutputLevelFullRange() => level=%u", level);
+    return 0;
+}
+
+int
+Channel::SetMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetMute(enable=%d)", enable);
+    _mute = enable;
+    return 0;
+}
+
+bool
+Channel::Mute() const
+{
+    return _mute;
+}
+
+int
+Channel::SetOutputVolumePan(float left, float right)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetOutputVolumePan()");
+    _panLeft = left;
+    _panRight = right;
+    return 0;
+}
+
+int
+Channel::GetOutputVolumePan(float& left, float& right) const
+{
+    left = _panLeft;
+    right = _panRight;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetOutputVolumePan() => left=%3.2f, right=%3.2f", left, right);
+    return 0;
+}
+
+int
+Channel::SetChannelOutputVolumeScaling(float scaling)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetChannelOutputVolumeScaling()");
+    _outputGain = scaling;
+    return 0;
+}
+
+int
+Channel::GetChannelOutputVolumeScaling(float& scaling) const
+{
+    scaling = _outputGain;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetChannelOutputVolumeScaling() => scaling=%3.2f", scaling);
+    return 0;
+}
+
+#ifdef WEBRTC_SRTP
+
+int
+Channel::EnableSRTPSend(
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::EnableSRTPSend()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_encrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "EnableSRTPSend() encryption already enabled");
+        return -1;
+    }
+
+    if (key == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "EnableSRTPSend() invalid key string");
+        return -1;
+    }
+
+    if (((kEncryption == level ||
+            kEncryptionAndAuthentication == level) &&
+            (cipherKeyLength < kVoiceEngineMinSrtpEncryptLength ||
+            cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength)) ||
+        ((kAuthentication == level ||
+            kEncryptionAndAuthentication == level) &&
+            kAuthHmacSha1 == authType &&
+            (authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length ||
+            authTagLength > kVoiceEngineMaxSrtpAuthSha1Length)) ||
+        ((kAuthentication == level ||
+            kEncryptionAndAuthentication == level) &&
+            kAuthNull == authType &&
+            (authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength ||
+            authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "EnableSRTPSend() invalid key length(s)");
+        return -1;
+    }
+
+
+    if (_srtpModule.EnableSRTPEncrypt(
+        !useForRTCP,
+        (SrtpModule::CipherTypes)cipherType,
+        cipherKeyLength,
+        (SrtpModule::AuthenticationTypes)authType,
+        authKeyLength, authTagLength,
+        (SrtpModule::SecurityLevels)level,
+        key) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "EnableSRTPSend() failed to enable SRTP encryption");
+        return -1;
+    }
+
+    if (_encryptionPtr == NULL)
+    {
+        _encryptionPtr = &_srtpModule;
+    }
+    _encrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DisableSRTPSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DisableSRTPSend()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_encrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DisableSRTPSend() SRTP encryption already disabled");
+        return 0;
+    }
+
+    _encrypting = false;
+
+    if (_srtpModule.DisableSRTPEncrypt() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "DisableSRTPSend() failed to disable SRTP encryption");
+        return -1;
+    }
+
+    if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
+    {
+        // Both directions are disabled
+        _encryptionPtr = NULL;
+    }
+
+    return 0;
+}
+
+int
+Channel::EnableSRTPReceive(
+    CipherTypes  cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::EnableSRTPReceive()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_decrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "EnableSRTPReceive() SRTP decryption already enabled");
+        return -1;
+    }
+
+    if (key == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "EnableSRTPReceive() invalid key string");
+        return -1;
+    }
+
+    if ((((kEncryption == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            ((cipherKeyLength < kVoiceEngineMinSrtpEncryptLength) ||
+            (cipherKeyLength > kVoiceEngineMaxSrtpEncryptLength))) ||
+        (((kAuthentication == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            (kAuthHmacSha1 == authType) &&
+            ((authKeyLength > kVoiceEngineMaxSrtpAuthSha1Length) ||
+            (authTagLength > kVoiceEngineMaxSrtpAuthSha1Length))) ||
+        (((kAuthentication == level) ||
+            (kEncryptionAndAuthentication == level)) &&
+            (kAuthNull == authType) &&
+            ((authKeyLength > kVoiceEngineMaxSrtpKeyAuthNullLength) ||
+            (authTagLength > kVoiceEngineMaxSrtpTagAuthNullLength))))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "EnableSRTPReceive() invalid key length(s)");
+        return -1;
+    }
+
+    if (_srtpModule.EnableSRTPDecrypt(
+        !useForRTCP,
+        (SrtpModule::CipherTypes)cipherType,
+        cipherKeyLength,
+        (SrtpModule::AuthenticationTypes)authType,
+        authKeyLength,
+        authTagLength,
+        (SrtpModule::SecurityLevels)level,
+        key) == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "EnableSRTPReceive() failed to enable SRTP decryption");
+        return -1;
+    }
+
+    if (_encryptionPtr == NULL)
+    {
+        _encryptionPtr = &_srtpModule;
+    }
+
+    _decrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DisableSRTPReceive()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DisableSRTPReceive()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_decrypting)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DisableSRTPReceive() SRTP decryption already disabled");
+        return 0;
+    }
+
+    _decrypting = false;
+
+    if (_srtpModule.DisableSRTPDecrypt() == -1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SRTP_ERROR, kTraceError,
+            "DisableSRTPReceive() failed to disable SRTP decryption");
+        return -1;
+    }
+
+    if (!_srtpModule.SRTPDecrypt() && !_srtpModule.SRTPEncrypt())
+    {
+        _encryptionPtr = NULL;
+    }
+
+    return 0;
+}
+
+#endif
+
+int
+Channel::RegisterExternalEncryption(Encryption& encryption)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::RegisterExternalEncryption()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_encryptionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterExternalEncryption() encryption already enabled");
+        return -1;
+    }
+
+    _encryptionPtr = &encryption;
+
+    _decrypting = true;
+    _encrypting = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterExternalEncryption()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::DeRegisterExternalEncryption()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_encryptionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterExternalEncryption() encryption already disabled");
+        return 0;
+    }
+
+    _decrypting = false;
+    _encrypting = false;
+
+    _encryptionPtr = NULL;
+
+    return 0;
+}
+
+int Channel::SendTelephoneEventOutband(unsigned char eventCode,
+                                          int lengthMs, int attenuationDb,
+                                          bool playDtmfEvent)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::SendTelephoneEventOutband(..., playDtmfEvent=%d)",
+               playDtmfEvent);
+
+    _playOutbandDtmfEvent = playDtmfEvent;
+
+    if (_rtpRtcpModule->SendTelephoneEventOutband(eventCode, lengthMs,
+                                                 attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SEND_DTMF_FAILED,
+            kTraceWarning,
+            "SendTelephoneEventOutband() failed to send event");
+        return -1;
+    }
+    return 0;
+}
+
+int Channel::SendTelephoneEventInband(unsigned char eventCode,
+                                         int lengthMs,
+                                         int attenuationDb,
+                                         bool playDtmfEvent)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::SendTelephoneEventInband(..., playDtmfEvent=%d)",
+               playDtmfEvent);
+
+    _playInbandDtmfEvent = playDtmfEvent;
+    _inbandDtmfQueue.AddDtmf(eventCode, lengthMs, attenuationDb);
+
+    return 0;
+}
+
+int
+Channel::SetDtmfPlayoutStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetDtmfPlayoutStatus()");
+    if (_audioCodingModule.SetDtmfPlayoutStatus(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceWarning,
+            "SetDtmfPlayoutStatus() failed to set Dtmf playout");
+        return -1;
+    }
+    return 0;
+}
+
+bool
+Channel::DtmfPlayoutStatus() const
+{
+    return _audioCodingModule.DtmfPlayoutStatus();
+}
+
+int
+Channel::SetSendTelephoneEventPayloadType(unsigned char type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetSendTelephoneEventPayloadType()");
+    if (type > 127)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTelephoneEventPayloadType() invalid type");
+        return -1;
+    }
+    CodecInst codec;
+    codec.plfreq = 8000;
+    codec.pltype = type;
+    memcpy(codec.plname, "telephone-event", 16);
+    if (_rtpRtcpModule->RegisterSendPayload(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetSendTelephoneEventPayloadType() failed to register send"
+            "payload type");
+        return -1;
+    }
+    _sendTelephoneEventPayloadType = type;
+    return 0;
+}
+
+int
+Channel::GetSendTelephoneEventPayloadType(unsigned char& type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetSendTelephoneEventPayloadType()");
+    type = _sendTelephoneEventPayloadType;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetSendTelephoneEventPayloadType() => type=%u", type);
+    return 0;
+}
+
+#ifdef WEBRTC_DTMF_DETECTION
+
+WebRtc_Word32
+Channel::RegisterTelephoneEventDetection(
+    TelephoneEventDetectionMethods detectionMethod,
+    VoETelephoneEventObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterTelephoneEventDetection()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_telephoneEventDetectionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterTelephoneEventDetection() detection already enabled");
+        return -1;
+    }
+
+    _telephoneEventDetectionPtr = &observer;
+
+    switch (detectionMethod)
+    {
+        case kInBand:
+            _inbandTelephoneEventDetection = true;
+            _outOfBandTelephoneEventDetecion = false;
+            break;
+        case kOutOfBand:
+            _inbandTelephoneEventDetection = false;
+            _outOfBandTelephoneEventDetecion = true;
+            break;
+        case kInAndOutOfBand:
+            _inbandTelephoneEventDetection = true;
+            _outOfBandTelephoneEventDetecion = true;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "RegisterTelephoneEventDetection() invalid detection method");
+            return -1;
+    }
+
+    if (_inbandTelephoneEventDetection)
+    {
+        // Enable in-band Dtmf detectin in the ACM.
+        if (_audioCodingModule.RegisterIncomingMessagesCallback(this) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+                "RegisterTelephoneEventDetection() failed to enable Dtmf "
+                "detection");
+        }
+    }
+
+    // Enable/disable out-of-band detection of received telephone-events.
+    // When enabled, RtpAudioFeedback::OnReceivedTelephoneEvent() will be
+    // called two times by the RTP/RTCP module (start & end).
+    const bool forwardToDecoder =
+        _rtpRtcpModule->TelephoneEventForwardToDecoder();
+    const bool detectEndOfTone = true;
+    _rtpRtcpModule->SetTelephoneEventStatus(_outOfBandTelephoneEventDetecion,
+                                           forwardToDecoder,
+                                           detectEndOfTone);
+
+    return 0;
+}
+
+int
+Channel::DeRegisterTelephoneEventDetection()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::DeRegisterTelephoneEventDetection()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_telephoneEventDetectionPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION,
+            kTraceWarning,
+            "DeRegisterTelephoneEventDetection() detection already disabled");
+        return 0;
+    }
+
+    // Disable out-of-band event detection
+    const bool forwardToDecoder =
+        _rtpRtcpModule->TelephoneEventForwardToDecoder();
+    _rtpRtcpModule->SetTelephoneEventStatus(false, forwardToDecoder);
+
+    // Disable in-band Dtmf detection
+    _audioCodingModule.RegisterIncomingMessagesCallback(NULL);
+
+    _inbandTelephoneEventDetection = false;
+    _outOfBandTelephoneEventDetecion = false;
+    _telephoneEventDetectionPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::GetTelephoneEventDetectionStatus(
+    bool& enabled,
+    TelephoneEventDetectionMethods& detectionMethod)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::GetTelephoneEventDetectionStatus()");
+
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        enabled = (_telephoneEventDetectionPtr != NULL);
+    }
+
+    if (enabled)
+    {
+        if (_inbandTelephoneEventDetection && !_outOfBandTelephoneEventDetecion)
+            detectionMethod = kInBand;
+        else if (!_inbandTelephoneEventDetection
+            && _outOfBandTelephoneEventDetecion)
+            detectionMethod = kOutOfBand;
+        else if (_inbandTelephoneEventDetection
+            && _outOfBandTelephoneEventDetecion)
+            detectionMethod = kInAndOutOfBand;
+        else
+        {
+            assert(false);
+            return -1;
+        }
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId, _channelId),
+               "GetTelephoneEventDetectionStatus() => enabled=%d,"
+               "detectionMethod=%d", enabled, detectionMethod);
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_DTMF_DETECTION
+
+int
+Channel::UpdateRxVadDetection(AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdateRxVadDetection()");
+
+    int vadDecision = 1;
+
+    vadDecision = (audioFrame.vad_activity_ == AudioFrame::kVadActive)? 1 : 0;
+
+    if ((vadDecision != _oldVadDecision) && _rxVadObserverPtr)
+    {
+        OnRxVadDetected(vadDecision);
+        _oldVadDecision = vadDecision;
+    }
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdateRxVadDetection() => vadDecision=%d",
+                 vadDecision);
+    return 0;
+}
+
+int
+Channel::RegisterRxVadObserver(VoERxVadCallback &observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterRxVadObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_rxVadObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRxVadObserver() observer already enabled");
+        return -1;
+    }
+    _rxVadObserverPtr = &observer;
+    _RxVadDetection = true;
+    return 0;
+}
+
+int
+Channel::DeRegisterRxVadObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterRxVadObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_rxVadObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRxVadObserver() observer already disabled");
+        return 0;
+    }
+    _rxVadObserverPtr = NULL;
+    _RxVadDetection = false;
+    return 0;
+}
+
+int
+Channel::VoiceActivityIndicator(int &activity)
+{
+    activity = _sendFrameType;
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::VoiceActivityIndicator(indicator=%d)", activity);
+    return 0;
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+
+int
+Channel::SetRxAgcStatus(const bool enable, const AgcModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxAgcStatus(enable=%d, mode=%d)",
+                 (int)enable, (int)mode);
+
+    GainControl::Mode agcMode(GainControl::kFixedDigital);
+    switch (mode)
+    {
+        case kAgcDefault:
+            agcMode = GainControl::kAdaptiveDigital;
+            break;
+        case kAgcUnchanged:
+            agcMode = _rxAudioProcessingModulePtr->gain_control()->mode();
+            break;
+        case kAgcFixedDigital:
+            agcMode = GainControl::kFixedDigital;
+            break;
+        case kAgcAdaptiveDigital:
+            agcMode =GainControl::kAdaptiveDigital;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_ARGUMENT, kTraceError,
+                "SetRxAgcStatus() invalid Agc mode");
+            return -1;
+    }
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_mode(agcMode) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc mode");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->Enable(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc state");
+        return -1;
+    }
+
+    _rxAgcIsEnabled = enable;
+
+    _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+
+    return 0;
+}
+
+int
+Channel::GetRxAgcStatus(bool& enabled, AgcModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRxAgcStatus(enable=?, mode=?)");
+
+    bool enable = _rxAudioProcessingModulePtr->gain_control()->is_enabled();
+    GainControl::Mode agcMode =
+        _rxAudioProcessingModulePtr->gain_control()->mode();
+
+    enabled = enable;
+
+    switch (agcMode)
+    {
+        case GainControl::kFixedDigital:
+            mode = kAgcFixedDigital;
+            break;
+        case GainControl::kAdaptiveDigital:
+            mode = kAgcAdaptiveDigital;
+            break;
+        default:
+            _engineStatisticsPtr->SetLastError(
+                VE_APM_ERROR, kTraceError,
+                "GetRxAgcStatus() invalid Agc mode");
+            return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::SetRxAgcConfig(const AgcConfig config)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxAgcConfig()");
+
+    if (_rxAudioProcessingModulePtr->gain_control()->set_target_level_dbfs(
+        config.targetLeveldBOv) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set target peak |level|"
+            "(or envelope) of the Agc");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->set_compression_gain_db(
+        config.digitalCompressionGaindB) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set the range in |gain| the"
+            " digital compression stage may apply");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->gain_control()->enable_limiter(
+        config.limiterEnable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcConfig() failed to set hard limiter to the signal");
+        return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::GetRxAgcConfig(AgcConfig& config)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRxAgcConfig(config=%?)");
+
+    config.targetLeveldBOv =
+        _rxAudioProcessingModulePtr->gain_control()->target_level_dbfs();
+    config.digitalCompressionGaindB =
+        _rxAudioProcessingModulePtr->gain_control()->compression_gain_db();
+    config.limiterEnable =
+        _rxAudioProcessingModulePtr->gain_control()->is_limiter_enabled();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId), "GetRxAgcConfig() => "
+                   "targetLeveldBOv=%u, digitalCompressionGaindB=%u,"
+                   " limiterEnable=%d",
+                   config.targetLeveldBOv,
+                   config.digitalCompressionGaindB,
+                   config.limiterEnable);
+
+    return 0;
+}
+
+#endif // #ifdef WEBRTC_VOICE_ENGINE_AGC
+
+#ifdef WEBRTC_VOICE_ENGINE_NR
+
+int
+Channel::SetRxNsStatus(const bool enable, const NsModes mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRxNsStatus(enable=%d, mode=%d)",
+                 (int)enable, (int)mode);
+
+    NoiseSuppression::Level nsLevel(
+        (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE);
+    switch (mode)
+    {
+
+        case kNsDefault:
+            nsLevel = (NoiseSuppression::Level)
+                WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE;
+            break;
+        case kNsUnchanged:
+            nsLevel = _rxAudioProcessingModulePtr->noise_suppression()->level();
+            break;
+        case kNsConference:
+            nsLevel = NoiseSuppression::kHigh;
+            break;
+        case kNsLowSuppression:
+            nsLevel = NoiseSuppression::kLow;
+            break;
+        case kNsModerateSuppression:
+            nsLevel = NoiseSuppression::kModerate;
+            break;
+        case kNsHighSuppression:
+            nsLevel = NoiseSuppression::kHigh;
+            break;
+        case kNsVeryHighSuppression:
+            nsLevel = NoiseSuppression::kVeryHigh;
+            break;
+    }
+
+    if (_rxAudioProcessingModulePtr->noise_suppression()->set_level(nsLevel)
+        != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Ns level");
+        return -1;
+    }
+    if (_rxAudioProcessingModulePtr->noise_suppression()->Enable(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_APM_ERROR, kTraceError,
+            "SetRxAgcStatus() failed to set Agc state");
+        return -1;
+    }
+
+    _rxNsIsEnabled = enable;
+    _rxApmIsEnabled = ((_rxAgcIsEnabled == true) || (_rxNsIsEnabled == true));
+
+    return 0;
+}
+
+int
+Channel::GetRxNsStatus(bool& enabled, NsModes& mode)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRxNsStatus(enable=?, mode=?)");
+
+    bool enable =
+        _rxAudioProcessingModulePtr->noise_suppression()->is_enabled();
+    NoiseSuppression::Level ncLevel =
+        _rxAudioProcessingModulePtr->noise_suppression()->level();
+
+    enabled = enable;
+
+    switch (ncLevel)
+    {
+        case NoiseSuppression::kLow:
+            mode = kNsLowSuppression;
+            break;
+        case NoiseSuppression::kModerate:
+            mode = kNsModerateSuppression;
+            break;
+        case NoiseSuppression::kHigh:
+            mode = kNsHighSuppression;
+            break;
+        case NoiseSuppression::kVeryHigh:
+            mode = kNsVeryHighSuppression;
+            break;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId,_channelId),
+               "GetRxNsStatus() => enabled=%d, mode=%d", enabled, mode);
+    return 0;
+}
+
+#endif // #ifdef WEBRTC_VOICE_ENGINE_NR
+
+int
+Channel::RegisterRTPObserver(VoERTPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::RegisterRTPObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_rtpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRTPObserver() observer already enabled");
+        return -1;
+    }
+
+    _rtpObserverPtr = &observer;
+    _rtpObserver = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterRTPObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterRTPObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_rtpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRTPObserver() observer already disabled");
+        return 0;
+    }
+
+    _rtpObserver = false;
+    _rtpObserverPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::RegisterRTCPObserver(VoERTCPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterRTCPObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_rtcpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterRTCPObserver() observer already enabled");
+        return -1;
+    }
+
+    _rtcpObserverPtr = &observer;
+    _rtcpObserver = true;
+
+    return 0;
+}
+
+int
+Channel::DeRegisterRTCPObserver()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::DeRegisterRTCPObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (!_rtcpObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "DeRegisterRTCPObserver() observer already disabled");
+        return 0;
+    }
+
+    _rtcpObserver = false;
+    _rtcpObserverPtr = NULL;
+
+    return 0;
+}
+
+int
+Channel::SetLocalSSRC(unsigned int ssrc)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetLocalSSRC()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_SENDING, kTraceError,
+            "SetLocalSSRC() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule->SetSSRC(ssrc) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetLocalSSRC() failed to set SSRC");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetLocalSSRC(unsigned int& ssrc)
+{
+    ssrc = _rtpRtcpModule->SSRC();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetLocalSSRC() => ssrc=%lu", ssrc);
+    return 0;
+}
+
+int
+Channel::GetRemoteSSRC(unsigned int& ssrc)
+{
+    ssrc = _rtpRtcpModule->RemoteSSRC();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRemoteSSRC() => ssrc=%lu", ssrc);
+    return 0;
+}
+
+int
+Channel::GetRemoteCSRCs(unsigned int arrCSRC[15])
+{
+    if (arrCSRC == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetRemoteCSRCs() invalid array argument");
+        return -1;
+    }
+    WebRtc_UWord32 arrOfCSRC[kRtpCsrcSize];
+    WebRtc_Word32 CSRCs(0);
+    CSRCs = _rtpRtcpModule->CSRCs(arrOfCSRC);
+    if (CSRCs > 0)
+    {
+        memcpy(arrCSRC, arrOfCSRC, CSRCs * sizeof(WebRtc_UWord32));
+        for (int i = 0; i < (int) CSRCs; i++)
+        {
+            WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteCSRCs() => arrCSRC[%d]=%lu", i, arrCSRC[i]);
+        }
+    } else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                   VoEId(_instanceId, _channelId),
+                   "GetRemoteCSRCs() => list is empty!");
+    }
+    return CSRCs;
+}
+
+int
+Channel::SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID)
+{
+    if (_rtpAudioProc.get() == NULL)
+    {
+        _rtpAudioProc.reset(AudioProcessing::Create(VoEModuleId(_instanceId,
+                                                                _channelId)));
+        if (_rtpAudioProc.get() == NULL)
+        {
+            _engineStatisticsPtr->SetLastError(VE_NO_MEMORY, kTraceCritical,
+                "Failed to create AudioProcessing");
+            return -1;
+        }
+    }
+
+    if (_rtpAudioProc->level_estimator()->Enable(enable) !=
+        AudioProcessing::kNoError)
+    {
+        _engineStatisticsPtr->SetLastError(VE_APM_ERROR, kTraceWarning,
+            "Failed to enable AudioProcessing::level_estimator()");
+    }
+
+    _includeAudioLevelIndication = enable;
+    return _rtpRtcpModule->SetRTPAudioLevelIndicationStatus(enable, ID);
+}
+int
+Channel::GetRTPAudioLevelIndicationStatus(bool& enabled, unsigned char& ID)
+{
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRTPAudioLevelIndicationStatus() => enabled=%d, ID=%u",
+                 enabled, ID);
+    return _rtpRtcpModule->GetRTPAudioLevelIndicationStatus(enabled, ID);
+}
+
+int
+Channel::SetRTCPStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetRTCPStatus()");
+    if (_rtpRtcpModule->SetRTCPStatus(enable ?
+        kRtcpCompound : kRtcpOff) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetRTCPStatus() failed to set RTCP status");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTCPStatus(bool& enabled)
+{
+    RTCPMethod method = _rtpRtcpModule->RTCP();
+    enabled = (method != kRtcpOff);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetRTCPStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+int
+Channel::SetRTCP_CNAME(const char cName[256])
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetRTCP_CNAME()");
+    if (_rtpRtcpModule->SetCNAME(cName) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetRTCP_CNAME() failed to set RTCP CNAME");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTCP_CNAME(char cName[256])
+{
+    if (_rtpRtcpModule->CNAME(cName) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRTCP_CNAME() failed to retrieve RTCP CNAME");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTCP_CNAME() => cName=%s", cName);
+    return 0;
+}
+
+int
+Channel::GetRemoteRTCP_CNAME(char cName[256])
+{
+    if (cName == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "GetRemoteRTCP_CNAME() invalid CNAME input buffer");
+        return -1;
+    }
+    char cname[RTCP_CNAME_SIZE];
+    const WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
+    if (_rtpRtcpModule->RemoteCNAME(remoteSSRC, cname) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_CNAME, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to retrieve remote RTCP CNAME");
+        return -1;
+    }
+    strcpy(cName, cname);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCP_CNAME() => cName=%s", cName);
+    return 0;
+}
+
+int
+Channel::GetRemoteRTCPData(
+    unsigned int& NTPHigh,
+    unsigned int& NTPLow,
+    unsigned int& timestamp,
+    unsigned int& playoutTimestamp,
+    unsigned int* jitter,
+    unsigned short* fractionLost)
+{
+    // --- Information from sender info in received Sender Reports
+
+    RTCPSenderInfo senderInfo;
+    if (_rtpRtcpModule->RemoteRTCPStat(&senderInfo) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "GetRemoteRTCPData() failed to retrieve sender info for remote "
+            "side");
+        return -1;
+    }
+
+    // We only utilize 12 out of 20 bytes in the sender info (ignores packet
+    // and octet count)
+    NTPHigh = senderInfo.NTPseconds;
+    NTPLow = senderInfo.NTPfraction;
+    timestamp = senderInfo.RTPtimeStamp;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCPData() => NTPHigh=%lu, NTPLow=%lu, "
+                 "timestamp=%lu",
+                 NTPHigh, NTPLow, timestamp);
+
+    // --- Locally derived information
+
+    // This value is updated on each incoming RTCP packet (0 when no packet
+    // has been received)
+    playoutTimestamp = _playoutTimeStampRTCP;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRemoteRTCPData() => playoutTimestamp=%lu",
+                 _playoutTimeStampRTCP);
+
+    if (NULL != jitter || NULL != fractionLost)
+    {
+        // Get all RTCP receiver report blocks that have been received on this
+        // channel. If we receive RTP packets from a remote source we know the
+        // remote SSRC and use the report block from him.
+        // Otherwise use the first report block.
+        std::vector<RTCPReportBlock> remote_stats;
+        if (_rtpRtcpModule->RemoteRTCPStat(&remote_stats) != 0 ||
+            remote_stats.empty()) {
+          WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() failed to measure statistics due"
+                       " to lack of received RTP and/or RTCP packets");
+          return -1;
+        }
+
+        WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
+        std::vector<RTCPReportBlock>::const_iterator it = remote_stats.begin();
+        for (; it != remote_stats.end(); ++it) {
+          if (it->remoteSSRC == remoteSSRC)
+            break;
+        }
+
+        if (it == remote_stats.end()) {
+          // If we have not received any RTCP packets from this SSRC it probably
+          // means that we have not received any RTP packets.
+          // Use the first received report block instead.
+          it = remote_stats.begin();
+          remoteSSRC = it->remoteSSRC;
+        }
+
+        if (jitter) {
+          *jitter = it->jitter;
+          WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() => jitter = %lu", *jitter);
+        }
+
+        if (fractionLost) {
+          *fractionLost = it->fractionLost;
+          WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "GetRemoteRTCPData() => fractionLost = %lu",
+                       *fractionLost);
+        }
+    }
+    return 0;
+}
+
+int
+Channel::SendApplicationDefinedRTCPPacket(const unsigned char subType,
+                                             unsigned int name,
+                                             const char* data,
+                                             unsigned short dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SendApplicationDefinedRTCPPacket()");
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "SendApplicationDefinedRTCPPacket() not sending");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SendApplicationDefinedRTCPPacket() invalid data value");
+        return -1;
+    }
+    if (dataLengthInBytes % 4 != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SendApplicationDefinedRTCPPacket() invalid length value");
+        return -1;
+    }
+    RTCPMethod status = _rtpRtcpModule->RTCP();
+    if (status == kRtcpOff)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTCP_ERROR, kTraceError,
+            "SendApplicationDefinedRTCPPacket() RTCP is disabled");
+        return -1;
+    }
+
+    // Create and schedule the RTCP APP packet for transmission
+    if (_rtpRtcpModule->SetRTCPApplicationSpecificData(
+        subType,
+        name,
+        (const unsigned char*) data,
+        dataLengthInBytes) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SEND_ERROR, kTraceError,
+            "SendApplicationDefinedRTCPPacket() failed to send RTCP packet");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRTPStatistics(
+        unsigned int& averageJitterMs,
+        unsigned int& maxJitterMs,
+        unsigned int& discardedPackets)
+{
+    WebRtc_UWord8 fraction_lost(0);
+    WebRtc_UWord32 cum_lost(0);
+    WebRtc_UWord32 ext_max(0);
+    WebRtc_UWord32 jitter(0);
+    WebRtc_UWord32 max_jitter(0);
+
+    // The jitter statistics is updated for each received RTP packet and is
+    // based on received packets.
+    if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
+                                     &cum_lost,
+                                     &ext_max,
+                                     &jitter,
+                                     &max_jitter) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
+            "GetRTPStatistics() failed to read RTP statistics from the "
+            "RTP/RTCP module");
+    }
+
+    const WebRtc_Word32 playoutFrequency =
+        _audioCodingModule.PlayoutFrequency();
+    if (playoutFrequency > 0)
+    {
+        // Scale RTP statistics given the current playout frequency
+        maxJitterMs = max_jitter / (playoutFrequency / 1000);
+        averageJitterMs = jitter / (playoutFrequency / 1000);
+    }
+
+    discardedPackets = _numberOfDiscardedPackets;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+               VoEId(_instanceId, _channelId),
+               "GetRTPStatistics() => averageJitterMs = %lu, maxJitterMs = %lu,"
+               " discardedPackets = %lu)",
+               averageJitterMs, maxJitterMs, discardedPackets);
+    return 0;
+}
+
+int Channel::GetRemoteRTCPSenderInfo(SenderInfo* sender_info) {
+  if (sender_info == NULL) {
+    _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+        "GetRemoteRTCPSenderInfo() invalid sender_info.");
+    return -1;
+  }
+
+  // Get the sender info from the latest received RTCP Sender Report.
+  RTCPSenderInfo rtcp_sender_info;
+  if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_sender_info) != 0) {
+    _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+        "GetRemoteRTCPSenderInfo() failed to read RTCP SR sender info.");
+    return -1;
+  }
+
+  sender_info->NTP_timestamp_high = rtcp_sender_info.NTPseconds;
+  sender_info->NTP_timestamp_low = rtcp_sender_info.NTPfraction;
+  sender_info->RTP_timestamp = rtcp_sender_info.RTPtimeStamp;
+  sender_info->sender_packet_count = rtcp_sender_info.sendPacketCount;
+  sender_info->sender_octet_count = rtcp_sender_info.sendOctetCount;
+  return 0;
+}
+
+int Channel::GetRemoteRTCPReportBlocks(
+    std::vector<ReportBlock>* report_blocks) {
+  if (report_blocks == NULL) {
+    _engineStatisticsPtr->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+      "GetRemoteRTCPReportBlock()s invalid report_blocks.");
+    return -1;
+  }
+
+  // Get the report blocks from the latest received RTCP Sender or Receiver
+  // Report. Each element in the vector contains the sender's SSRC and a
+  // report block according to RFC 3550.
+  std::vector<RTCPReportBlock> rtcp_report_blocks;
+  if (_rtpRtcpModule->RemoteRTCPStat(&rtcp_report_blocks) != 0) {
+    _engineStatisticsPtr->SetLastError(VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+        "GetRemoteRTCPReportBlocks() failed to read RTCP SR/RR report block.");
+    return -1;
+  }
+
+  if (rtcp_report_blocks.empty())
+    return 0;
+
+  std::vector<RTCPReportBlock>::const_iterator it = rtcp_report_blocks.begin();
+  for (; it != rtcp_report_blocks.end(); ++it) {
+    ReportBlock report_block;
+    report_block.sender_SSRC = it->remoteSSRC;
+    report_block.source_SSRC = it->sourceSSRC;
+    report_block.fraction_lost = it->fractionLost;
+    report_block.cumulative_num_packets_lost = it->cumulativeLost;
+    report_block.extended_highest_sequence_number = it->extendedHighSeqNum;
+    report_block.interarrival_jitter = it->jitter;
+    report_block.last_SR_timestamp = it->lastSR;
+    report_block.delay_since_last_SR = it->delaySinceLastSR;
+    report_blocks->push_back(report_block);
+  }
+  return 0;
+}
+
+int
+Channel::GetRTPStatistics(CallStatistics& stats)
+{
+    WebRtc_UWord8 fraction_lost(0);
+    WebRtc_UWord32 cum_lost(0);
+    WebRtc_UWord32 ext_max(0);
+    WebRtc_UWord32 jitter(0);
+    WebRtc_UWord32 max_jitter(0);
+
+    // --- Part one of the final structure (four values)
+
+    // The jitter statistics is updated for each received RTP packet and is
+    // based on received packets.
+    if (_rtpRtcpModule->StatisticsRTP(&fraction_lost,
+                                     &cum_lost,
+                                     &ext_max,
+                                     &jitter,
+                                     &max_jitter) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_RTP_STAT, kTraceWarning,
+            "GetRTPStatistics() failed to read RTP statistics from the "
+            "RTP/RTCP module");
+    }
+
+    stats.fractionLost = fraction_lost;
+    stats.cumulativeLost = cum_lost;
+    stats.extendedMax = ext_max;
+    stats.jitterSamples = jitter;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => fractionLost=%lu, cumulativeLost=%lu,"
+                 " extendedMax=%lu, jitterSamples=%li)",
+                 stats.fractionLost, stats.cumulativeLost, stats.extendedMax,
+                 stats.jitterSamples);
+
+    // --- Part two of the final structure (one value)
+
+    WebRtc_UWord16 RTT(0);
+    RTCPMethod method = _rtpRtcpModule->RTCP();
+    if (method == kRtcpOff)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "GetRTPStatistics() RTCP is disabled => valid RTT "
+                     "measurements cannot be retrieved");
+    } else
+    {
+        // The remote SSRC will be zero if no RTP packet has been received.
+        WebRtc_UWord32 remoteSSRC = _rtpRtcpModule->RemoteSSRC();
+        if (remoteSSRC > 0)
+        {
+            WebRtc_UWord16 avgRTT(0);
+            WebRtc_UWord16 maxRTT(0);
+            WebRtc_UWord16 minRTT(0);
+
+            if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT, &maxRTT)
+                != 0)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "GetRTPStatistics() failed to retrieve RTT from "
+                             "the RTP/RTCP module");
+            }
+        } else
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "GetRTPStatistics() failed to measure RTT since no "
+                         "RTP packets have been received yet");
+        }
+    }
+
+    stats.rttMs = static_cast<int> (RTT);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => rttMs=%d", stats.rttMs);
+
+    // --- Part three of the final structure (four values)
+
+    WebRtc_UWord32 bytesSent(0);
+    WebRtc_UWord32 packetsSent(0);
+    WebRtc_UWord32 bytesReceived(0);
+    WebRtc_UWord32 packetsReceived(0);
+
+    if (_rtpRtcpModule->DataCountersRTP(&bytesSent,
+                                       &packetsSent,
+                                       &bytesReceived,
+                                       &packetsReceived) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                     VoEId(_instanceId, _channelId),
+                     "GetRTPStatistics() failed to retrieve RTP datacounters =>"
+                     " output will not be complete");
+    }
+
+    stats.bytesSent = bytesSent;
+    stats.packetsSent = packetsSent;
+    stats.bytesReceived = bytesReceived;
+    stats.packetsReceived = packetsReceived;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetRTPStatistics() => bytesSent=%d, packetsSent=%d,"
+                 " bytesReceived=%d, packetsReceived=%d)",
+                 stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+                 stats.packetsReceived);
+
+    return 0;
+}
+
+int
+Channel::SetFECStatus(bool enable, int redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::SetFECStatus()");
+
+    CodecInst codec;
+
+    // Get default RED settings from the ACM database
+    bool foundRED(false);
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+    for (int idx = 0; (!foundRED && idx < nSupportedCodecs); idx++)
+    {
+        _audioCodingModule.Codec(idx, codec);
+        if (!STR_CASE_CMP(codec.plname, "RED"))
+        {
+            foundRED = true;
+        }
+    }
+    if (!foundRED)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CODEC_ERROR, kTraceError,
+            "SetFECStatus() RED is not supported");
+        return -1;
+    }
+
+    if (redPayloadtype != -1)
+    {
+        codec.pltype = redPayloadtype;
+    }
+
+    if (_audioCodingModule.RegisterSendCodec(codec) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetFECStatus() RED registration in ACM module failed");
+        return -1;
+    }
+    if (_rtpRtcpModule->SetSendREDPayloadType(codec.pltype) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetFECStatus() RED registration in RTP/RTCP module failed");
+        return -1;
+    }
+    if (_audioCodingModule.SetFECStatus(enable) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetFECStatus() failed to set FEC state in the ACM");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetFECStatus(bool& enabled, int& redPayloadtype)
+{
+    enabled = _audioCodingModule.FECStatus();
+    if (enabled)
+    {
+        WebRtc_Word8 payloadType(0);
+        if (_rtpRtcpModule->SendREDPayloadType(payloadType) != 0)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+                "GetFECStatus() failed to retrieve RED PT from RTP/RTCP "
+                "module");
+            return -1;
+        }
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                   VoEId(_instanceId, _channelId),
+                   "GetFECStatus() => enabled=%d, redPayloadtype=%d",
+                   enabled, redPayloadtype);
+        return 0;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId, _channelId),
+                 "GetFECStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+int
+Channel::StartRTPDump(const char fileNameUTF8[1024],
+                      RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::StartRTPDump()");
+    if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRTPDump() invalid RTP direction");
+        return -1;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    if (rtpDumpPtr == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    if (rtpDumpPtr->IsActive())
+    {
+        rtpDumpPtr->Stop();
+    }
+    if (rtpDumpPtr->Start(fileNameUTF8) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRTPDump() failed to create file");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::StopRTPDump(RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+                 "Channel::StopRTPDump()");
+    if ((direction != kRtpIncoming) && (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StopRTPDump() invalid RTP direction");
+        return -1;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    if (rtpDumpPtr == NULL)
+    {
+        assert(false);
+        return -1;
+    }
+    if (!rtpDumpPtr->IsActive())
+    {
+        return 0;
+    }
+    return rtpDumpPtr->Stop();
+}
+
+bool
+Channel::RTPDumpIsActive(RTPDirections direction)
+{
+    if ((direction != kRtpIncoming) &&
+        (direction != kRtpOutgoing))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "RTPDumpIsActive() invalid RTP direction");
+        return false;
+    }
+    RtpDump* rtpDumpPtr = (direction == kRtpIncoming) ?
+        &_rtpDumpIn : &_rtpDumpOut;
+    return rtpDumpPtr->IsActive();
+}
+
+int
+Channel::InsertExtraRTPPacket(unsigned char payloadType,
+                              bool markerBit,
+                              const char* payloadData,
+                              unsigned short payloadSize)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, _channelId),
+               "Channel::InsertExtraRTPPacket()");
+    if (payloadType > 127)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_PLTYPE, kTraceError,
+            "InsertExtraRTPPacket() invalid payload type");
+        return -1;
+    }
+    if (payloadData == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "InsertExtraRTPPacket() invalid payload data");
+        return -1;
+    }
+    if (payloadSize > _rtpRtcpModule->MaxDataPayloadLength())
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "InsertExtraRTPPacket() invalid payload size");
+        return -1;
+    }
+    if (!_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_NOT_SENDING, kTraceError,
+            "InsertExtraRTPPacket() not sending");
+        return -1;
+    }
+
+    // Create extra RTP packet by calling RtpRtcp::SendOutgoingData().
+    // Transport::SendPacket() will be called by the module when the RTP packet
+    // is created.
+    // The call to SendOutgoingData() does *not* modify the timestamp and
+    // payloadtype to ensure that the RTP module generates a valid RTP packet
+    // (user might utilize a non-registered payload type).
+    // The marker bit and payload type will be replaced just before the actual
+    // transmission, i.e., the actual modification is done *after* the RTP
+    // module has delivered its RTP packet back to the VoE.
+    // We will use the stored values above when the packet is modified
+    // (see Channel::SendPacket()).
+
+    _extraPayloadType = payloadType;
+    _extraMarkerBit = markerBit;
+    _insertExtraRTPPacket = true;
+
+    if (_rtpRtcpModule->SendOutgoingData(kAudioFrameSpeech,
+                                        _lastPayloadType,
+                                        _lastLocalTimeStamp,
+                                        // Leaving the time when this frame was
+                                        // received from the capture device as
+                                        // undefined for voice for now.
+                                        -1,
+                                        (const WebRtc_UWord8*) payloadData,
+                                        payloadSize) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "InsertExtraRTPPacket() failed to send extra RTP packet");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::Demultiplex(const AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::Demultiplex()");
+    _audioFrame = audioFrame;
+    _audioFrame.id_ = _channelId;
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::PrepareEncodeAndSend(int mixingFrequency)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::PrepareEncodeAndSend()");
+
+    if (_audioFrame.samples_per_channel_ == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::PrepareEncodeAndSend() invalid audio frame");
+        return -1;
+    }
+
+    if (_inputFilePlaying)
+    {
+        MixOrReplaceAudioWithFile(mixingFrequency);
+    }
+
+    if (_mute)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+    }
+
+    if (_inputExternalMedia)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        const bool isStereo = (_audioFrame.num_channels_ == 2);
+        if (_inputExternalMediaCallbackPtr)
+        {
+            _inputExternalMediaCallbackPtr->Process(
+                _channelId,
+                kRecordingPerChannel,
+               (WebRtc_Word16*)_audioFrame.data_,
+                _audioFrame.samples_per_channel_,
+                _audioFrame.sample_rate_hz_,
+                isStereo);
+        }
+    }
+
+    InsertInbandDtmfTone();
+
+    if (_includeAudioLevelIndication)
+    {
+        assert(_rtpAudioProc.get() != NULL);
+
+        // Check if settings need to be updated.
+        if (_rtpAudioProc->sample_rate_hz() != _audioFrame.sample_rate_hz_)
+        {
+            if (_rtpAudioProc->set_sample_rate_hz(_audioFrame.sample_rate_hz_) !=
+                AudioProcessing::kNoError)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Error setting AudioProcessing sample rate");
+                return -1;
+            }
+        }
+
+        if (_rtpAudioProc->num_input_channels() != _audioFrame.num_channels_)
+        {
+            if (_rtpAudioProc->set_num_channels(_audioFrame.num_channels_,
+                                                _audioFrame.num_channels_)
+                != AudioProcessing::kNoError)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Error setting AudioProcessing channels");
+                return -1;
+            }
+        }
+
+        // Performs level analysis only; does not affect the signal.
+        _rtpAudioProc->ProcessStream(&_audioFrame);
+    }
+
+    return 0;
+}
+
+WebRtc_UWord32
+Channel::EncodeAndSend()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::EncodeAndSend()");
+
+    assert(_audioFrame.num_channels_ <= 2);
+    if (_audioFrame.samples_per_channel_ == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::EncodeAndSend() invalid audio frame");
+        return -1;
+    }
+
+    _audioFrame.id_ = _channelId;
+
+    // --- Add 10ms of raw (PCM) audio data to the encoder @ 32kHz.
+
+    // The ACM resamples internally.
+    _audioFrame.timestamp_ = _timeStamp;
+    if (_audioCodingModule.Add10MsData((AudioFrame&)_audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::EncodeAndSend() ACM encoding failed");
+        return -1;
+    }
+
+    _timeStamp += _audioFrame.samples_per_channel_;
+
+    // --- Encode if complete frame is ready
+
+    // This call will trigger AudioPacketizationCallback::SendData if encoding
+    // is done and payload is ready for packetization and transmission.
+    return _audioCodingModule.Process();
+}
+
+int Channel::RegisterExternalMediaProcessing(
+    ProcessingTypes type,
+    VoEMediaProcess& processObject)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (kPlaybackPerChannel == type)
+    {
+        if (_outputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceError,
+                "Channel::RegisterExternalMediaProcessing() "
+                "output external media already enabled");
+            return -1;
+        }
+        _outputExternalMediaCallbackPtr = &processObject;
+        _outputExternalMedia = true;
+    }
+    else if (kRecordingPerChannel == type)
+    {
+        if (_inputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceError,
+                "Channel::RegisterExternalMediaProcessing() "
+                "output external media already enabled");
+            return -1;
+        }
+        _inputExternalMediaCallbackPtr = &processObject;
+        _inputExternalMedia = true;
+    }
+    return 0;
+}
+
+int Channel::DeRegisterExternalMediaProcessing(ProcessingTypes type)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (kPlaybackPerChannel == type)
+    {
+        if (!_outputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceWarning,
+                "Channel::DeRegisterExternalMediaProcessing() "
+                "output external media already disabled");
+            return 0;
+        }
+        _outputExternalMedia = false;
+        _outputExternalMediaCallbackPtr = NULL;
+    }
+    else if (kRecordingPerChannel == type)
+    {
+        if (!_inputExternalMediaCallbackPtr)
+        {
+            _engineStatisticsPtr->SetLastError(
+                VE_INVALID_OPERATION, kTraceWarning,
+                "Channel::DeRegisterExternalMediaProcessing() "
+                "input external media already disabled");
+            return 0;
+        }
+        _inputExternalMedia = false;
+        _inputExternalMediaCallbackPtr = NULL;
+    }
+
+    return 0;
+}
+
+int
+Channel::ResetRTCPStatistics()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ResetRTCPStatistics()");
+    WebRtc_UWord32 remoteSSRC(0);
+    remoteSSRC = _rtpRtcpModule->RemoteSSRC();
+    return _rtpRtcpModule->ResetRTT(remoteSSRC);
+}
+
+int
+Channel::GetRoundTripTimeSummary(StatVal& delaysMs) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRoundTripTimeSummary()");
+    // Override default module outputs for the case when RTCP is disabled.
+    // This is done to ensure that we are backward compatible with the
+    // VoiceEngine where we did not use RTP/RTCP module.
+    if (!_rtpRtcpModule->RTCP())
+    {
+        delaysMs.min = -1;
+        delaysMs.max = -1;
+        delaysMs.average = -1;
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRoundTripTimeSummary() RTCP is disabled =>"
+                     " valid RTT measurements cannot be retrieved");
+        return 0;
+    }
+
+    WebRtc_UWord32 remoteSSRC;
+    WebRtc_UWord16 RTT;
+    WebRtc_UWord16 avgRTT;
+    WebRtc_UWord16 maxRTT;
+    WebRtc_UWord16 minRTT;
+    // The remote SSRC will be zero if no RTP packet has been received.
+    remoteSSRC = _rtpRtcpModule->RemoteSSRC();
+    if (remoteSSRC == 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetRoundTripTimeSummary() unable to measure RTT"
+                     " since no RTP packet has been received yet");
+    }
+
+    // Retrieve RTT statistics from the RTP/RTCP module for the specified
+    // channel and SSRC. The SSRC is required to parse out the correct source
+    // in conference scenarios.
+    if (_rtpRtcpModule->RTT(remoteSSRC, &RTT, &avgRTT, &minRTT,&maxRTT) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "GetRoundTripTimeSummary unable to retrieve RTT values"
+                     " from the RTCP layer");
+        delaysMs.min = -1; delaysMs.max = -1; delaysMs.average = -1;
+    }
+    else
+    {
+        delaysMs.min = minRTT;
+        delaysMs.max = maxRTT;
+        delaysMs.average = avgRTT;
+    }
+    return 0;
+}
+
+int
+Channel::GetNetworkStatistics(NetworkStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetNetworkStatistics()");
+    return _audioCodingModule.NetworkStatistics(
+        (ACMNetworkStatistics &)stats);
+}
+
+int
+Channel::GetDelayEstimate(int& delayMs) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetDelayEstimate()");
+    delayMs = (_averageDelayMs + 5) / 10 + _recPacketDelayMs;
+    return 0;
+}
+
+int
+Channel::SetMinimumPlayoutDelay(int delayMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetMinimumPlayoutDelay()");
+    if ((delayMs < kVoiceEngineMinMinPlayoutDelayMs) ||
+        (delayMs > kVoiceEngineMaxMinPlayoutDelayMs))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "SetMinimumPlayoutDelay() invalid min delay");
+        return -1;
+    }
+    if (_audioCodingModule.SetMinimumPlayoutDelay(delayMs) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_AUDIO_CODING_MODULE_ERROR, kTraceError,
+            "SetMinimumPlayoutDelay() failed to set min playout delay");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetPlayoutTimestamp(unsigned int& timestamp)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPlayoutTimestamp()");
+    WebRtc_UWord32 playoutTimestamp(0);
+    if (GetPlayoutTimeStamp(playoutTimestamp) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_RETRIEVE_VALUE, kTraceError,
+            "GetPlayoutTimestamp() failed to retrieve timestamp");
+        return -1;
+    }
+    timestamp = playoutTimestamp;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+                 VoEId(_instanceId,_channelId),
+                 "GetPlayoutTimestamp() => timestamp=%u", timestamp);
+    return 0;
+}
+
+int
+Channel::SetInitTimestamp(unsigned int timestamp)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+               "Channel::SetInitTimestamp()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError, "SetInitTimestamp() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule->SetStartTimestamp(timestamp) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetInitTimestamp() failed to set timestamp");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::SetInitSequenceNumber(short sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::SetInitSequenceNumber()");
+    if (_sending)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_SENDING, kTraceError,
+            "SetInitSequenceNumber() already sending");
+        return -1;
+    }
+    if (_rtpRtcpModule->SetSequenceNumber(sequenceNumber) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_RTP_RTCP_MODULE_ERROR, kTraceError,
+            "SetInitSequenceNumber() failed to set sequence number");
+        return -1;
+    }
+    return 0;
+}
+
+int
+Channel::GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetRtpRtcp()");
+    rtpRtcpModule = _rtpRtcpModule.get();
+    return 0;
+}
+
+// TODO(andrew): refactor Mix functions here and in transmit_mixer.cc to use
+// a shared helper.
+WebRtc_Word32
+Channel::MixOrReplaceAudioWithFile(const int mixingFrequency)
+{
+    scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
+    int fileSamples(0);
+
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        if (_inputFilePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() fileplayer"
+                             " doesnt exist");
+            return -1;
+        }
+
+        if (_inputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
+                                                      fileSamples,
+                                                      mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() file mixing "
+                         "failed");
+            return -1;
+        }
+        if (fileSamples == 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixOrReplaceAudioWithFile() file is ended");
+            return 0;
+        }
+    }
+
+    assert(_audioFrame.samples_per_channel_ == fileSamples);
+
+    if (_mixFileWithMicrophone)
+    {
+        // Currently file stream is always mono.
+        // TODO(xians): Change the code when FilePlayer supports real stereo.
+        Utility::MixWithSat(_audioFrame.data_,
+                            _audioFrame.num_channels_,
+                            fileBuffer.get(),
+                            1,
+                            fileSamples);
+    }
+    else
+    {
+        // Replace ACM audio with file.
+        // Currently file stream is always mono.
+        // TODO(xians): Change the code when FilePlayer supports real stereo.
+        _audioFrame.UpdateFrame(_channelId,
+                                -1,
+                                fileBuffer.get(),
+                                fileSamples,
+                                mixingFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadUnknown,
+                                1);
+
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::MixAudioWithFile(AudioFrame& audioFrame,
+                          const int mixingFrequency)
+{
+    assert(mixingFrequency <= 32000);
+
+    scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
+    int fileSamples(0);
+
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+
+        if (_outputFilePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixAudioWithFile() file mixing failed");
+            return -1;
+        }
+
+        // We should get the frequency we ask for.
+        if (_outputFilePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
+                                                       fileSamples,
+                                                       mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::MixAudioWithFile() file mixing failed");
+            return -1;
+        }
+    }
+
+    if (audioFrame.samples_per_channel_ == fileSamples)
+    {
+        // Currently file stream is always mono.
+        // TODO(xians): Change the code when FilePlayer supports real stereo.
+        Utility::MixWithSat(audioFrame.data_,
+                            audioFrame.num_channels_,
+                            fileBuffer.get(),
+                            1,
+                            fileSamples);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+            "Channel::MixAudioWithFile() samples_per_channel_(%d) != "
+            "fileSamples(%d)",
+            audioFrame.samples_per_channel_, fileSamples);
+        return -1;
+    }
+
+    return 0;
+}
+
+int
+Channel::InsertInbandDtmfTone()
+{
+    // Check if we should start a new tone.
+    if (_inbandDtmfQueue.PendingDtmf() &&
+        !_inbandDtmfGenerator.IsAddingTone() &&
+        _inbandDtmfGenerator.DelaySinceLastTone() >
+        kMinTelephoneEventSeparationMs)
+    {
+        WebRtc_Word8 eventCode(0);
+        WebRtc_UWord16 lengthMs(0);
+        WebRtc_UWord8 attenuationDb(0);
+
+        eventCode = _inbandDtmfQueue.NextDtmf(&lengthMs, &attenuationDb);
+        _inbandDtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb);
+        if (_playInbandDtmfEvent)
+        {
+            // Add tone to output mixer using a reduced length to minimize
+            // risk of echo.
+            _outputMixerPtr->PlayDtmfTone(eventCode, lengthMs - 80,
+                                          attenuationDb);
+        }
+    }
+
+    if (_inbandDtmfGenerator.IsAddingTone())
+    {
+        WebRtc_UWord16 frequency(0);
+        _inbandDtmfGenerator.GetSampleRate(frequency);
+
+        if (frequency != _audioFrame.sample_rate_hz_)
+        {
+            // Update sample rate of Dtmf tone since the mixing frequency
+            // has changed.
+            _inbandDtmfGenerator.SetSampleRate(
+                (WebRtc_UWord16) (_audioFrame.sample_rate_hz_));
+            // Reset the tone to be added taking the new sample rate into
+            // account.
+            _inbandDtmfGenerator.ResetTone();
+        }
+        
+        WebRtc_Word16 toneBuffer[320];
+        WebRtc_UWord16 toneSamples(0);
+        // Get 10ms tone segment and set time since last tone to zero
+        if (_inbandDtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                       VoEId(_instanceId, _channelId),
+                       "Channel::EncodeAndSend() inserting Dtmf failed");
+            return -1;
+        }
+
+        // Replace mixed audio with DTMF tone.
+        for (int sample = 0; 
+            sample < _audioFrame.samples_per_channel_;
+            sample++)
+        {
+            for (int channel = 0; 
+                channel < _audioFrame.num_channels_; 
+                channel++)
+            {
+                _audioFrame.data_[sample * _audioFrame.num_channels_ + channel] = 
+                        toneBuffer[sample];
+            }
+        }
+        
+        assert(_audioFrame.samples_per_channel_ == toneSamples);
+    } else
+    {
+        // Add 10ms to "delay-since-last-tone" counter
+        _inbandDtmfGenerator.UpdateDelaySinceLastTone();
+    }
+    return 0;
+}
+
+WebRtc_Word32
+Channel::GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp)
+{
+    WebRtc_UWord32 timestamp(0);
+    CodecInst currRecCodec;
+
+    if (_audioCodingModule.PlayoutTimestamp(timestamp) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetPlayoutTimeStamp() failed to read playout"
+                     " timestamp from the ACM");
+        return -1;
+    }
+
+    WebRtc_UWord16 delayMS(0);
+    if (_audioDeviceModulePtr->PlayoutDelay(&delayMS) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,_channelId),
+                     "Channel::GetPlayoutTimeStamp() failed to read playout"
+                     " delay from the ADM");
+        return -1;
+    }
+
+    WebRtc_Word32 playoutFrequency = _audioCodingModule.PlayoutFrequency();
+    if (_audioCodingModule.ReceiveCodec(currRecCodec) == 0)
+    {
+        if (STR_CASE_CMP("G722", currRecCodec.plname) == 0)
+        {
+            playoutFrequency = 8000;
+        }
+    }
+    timestamp -= (delayMS * (playoutFrequency/1000));
+
+    playoutTimestamp = timestamp;
+
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::GetPlayoutTimeStamp() => playoutTimestamp = %lu",
+                 playoutTimestamp);
+    return 0;
+}
+
+void
+Channel::ResetDeadOrAliveCounters()
+{
+    _countDeadDetections = 0;
+    _countAliveDetections = 0;
+}
+
+void
+Channel::UpdateDeadOrAliveCounters(bool alive)
+{
+    if (alive)
+        _countAliveDetections++;
+    else
+        _countDeadDetections++;
+}
+
+int
+Channel::GetDeadOrAliveCounters(int& countDead, int& countAlive) const
+{
+    bool enabled;
+    WebRtc_UWord8 timeSec;
+
+    _rtpRtcpModule->PeriodicDeadOrAliveStatus(enabled, timeSec);
+    if (!enabled)
+        return (-1);
+
+    countDead = static_cast<int> (_countDeadDetections);
+    countAlive = static_cast<int> (_countAliveDetections);
+    return 0;
+}
+
+WebRtc_Word32
+Channel::SendPacketRaw(const void *data, int len, bool RTCP)
+{
+    if (_transportPtr == NULL)
+    {
+        return -1;
+    }
+    if (!RTCP)
+    {
+        return _transportPtr->SendPacket(_channelId, data, len);
+    }
+    else
+    {
+        return _transportPtr->SendRTCPPacket(_channelId, data, len);
+    }
+}
+
+WebRtc_Word32
+Channel::UpdatePacketDelay(const WebRtc_UWord32 timestamp,
+                           const WebRtc_UWord16 sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::UpdatePacketDelay(timestamp=%lu, sequenceNumber=%u)",
+                 timestamp, sequenceNumber);
+
+    WebRtc_Word32 rtpReceiveFrequency(0);
+
+    // Get frequency of last received payload
+    rtpReceiveFrequency = _audioCodingModule.ReceiveFrequency();
+
+    CodecInst currRecCodec;
+    if (_audioCodingModule.ReceiveCodec(currRecCodec) == 0)
+    {
+        if (STR_CASE_CMP("G722", currRecCodec.plname) == 0)
+        {
+            // Even though the actual sampling rate for G.722 audio is
+            // 16,000 Hz, the RTP clock rate for the G722 payload format is
+            // 8,000 Hz because that value was erroneously assigned in
+            // RFC 1890 and must remain unchanged for backward compatibility.
+            rtpReceiveFrequency = 8000;
+        }
+    }
+
+    const WebRtc_UWord32 timeStampDiff = timestamp - _playoutTimeStampRTP;
+    WebRtc_UWord32 timeStampDiffMs(0);
+
+    if (timeStampDiff > 0)
+    {
+        switch (rtpReceiveFrequency)
+        {
+            case 8000:
+                timeStampDiffMs = timeStampDiff >> 3;
+                break;
+            case 16000:
+                timeStampDiffMs = timeStampDiff >> 4;
+                break;
+            case 32000:
+                timeStampDiffMs = timeStampDiff >> 5;
+                break;
+            default:
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                             VoEId(_instanceId, _channelId),
+                             "Channel::UpdatePacketDelay() invalid sample "
+                             "rate");
+                timeStampDiffMs = 0;
+                return -1;
+        }
+        if (timeStampDiffMs > 5000)
+        {
+            timeStampDiffMs = 0;
+        }
+
+        if (_averageDelayMs == 0)
+        {
+            _averageDelayMs = timeStampDiffMs;
+        }
+        else
+        {
+            // Filter average delay value using exponential filter (alpha is
+            // 7/8). We derive 10*_averageDelayMs here (reduces risk of
+            // rounding error) and compensate for it in GetDelayEstimate()
+            // later. Adding 4/8 results in correct rounding.
+            _averageDelayMs = ((_averageDelayMs*7 + 10*timeStampDiffMs + 4)>>3);
+        }
+
+        if (sequenceNumber - _previousSequenceNumber == 1)
+        {
+            WebRtc_UWord16 packetDelayMs = 0;
+            switch (rtpReceiveFrequency)
+            {
+            case 8000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 3);
+                break;
+            case 16000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 4);
+                break;
+            case 32000:
+                packetDelayMs = (WebRtc_UWord16)(
+                    (timestamp - _previousTimestamp) >> 5);
+                break;
+            }
+
+            if (packetDelayMs >= 10 && packetDelayMs <= 60)
+                _recPacketDelayMs = packetDelayMs;
+        }
+    }
+
+    _previousSequenceNumber = sequenceNumber;
+    _previousTimestamp = timestamp;
+
+    return 0;
+}
+
+void
+Channel::RegisterReceiveCodecsToRTPModule()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::RegisterReceiveCodecsToRTPModule()");
+
+
+    CodecInst codec;
+    const WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    for (int idx = 0; idx < nSupportedCodecs; idx++)
+    {
+        // Open up the RTP/RTCP receiver for all supported codecs
+        if ((_audioCodingModule.Codec(idx, codec) == -1) ||
+            (_rtpRtcpModule->RegisterReceivePayload(codec) == -1))
+        {
+            WEBRTC_TRACE(
+                         kTraceWarning,
+                         kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::RegisterReceiveCodecsToRTPModule() unable"
+                         " to register %s (%d/%d/%d/%d) to RTP/RTCP receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+        else
+        {
+            WEBRTC_TRACE(
+                         kTraceInfo,
+                         kTraceVoice,
+                         VoEId(_instanceId, _channelId),
+                         "Channel::RegisterReceiveCodecsToRTPModule() %s "
+                         "(%d/%d/%d/%d) has been added to the RTP/RTCP "
+                         "receiver",
+                         codec.plname, codec.pltype, codec.plfreq,
+                         codec.channels, codec.rate);
+        }
+    }
+}
+
+int
+Channel::ApmProcessRx(AudioFrame& audioFrame)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,_channelId),
+                 "Channel::ApmProcessRx()");
+
+    // Reset the APM frequency if the frequency has changed
+    if (_rxAudioProcessingModulePtr->sample_rate_hz() !=
+        audioFrame.sample_rate_hz_)
+    {
+        if (_rxAudioProcessingModulePtr->set_sample_rate_hz(
+            audioFrame.sample_rate_hz_) != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                         "AudioProcessingModule::set_sample_rate_hz("
+                         "sample_rate_hz_=%u) => error",
+                         _audioFrame.sample_rate_hz_);
+        }
+    }
+
+    if (_rxAudioProcessingModulePtr->ProcessStream(&audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                   "AudioProcessingModule::ProcessStream() => error");
+    }
+
+    return 0;
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/src/voice_engine/channel.h b/src/voice_engine/channel.h
new file mode 100644
index 0000000..8889bc2
--- /dev/null
+++ b/src/voice_engine/channel.h
@@ -0,0 +1,659 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_H
+
+#include "audio_coding_module.h"
+#include "audio_conference_mixer_defines.h"
+#include "common_types.h"
+#include "dtmf_inband.h"
+#include "dtmf_inband_queue.h"
+#include "file_player.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "resampler.h"
+#include "rtp_rtcp.h"
+#include "scoped_ptr.h"
+#include "shared_data.h"
+#include "voe_audio_processing.h"
+#include "voe_network.h"
+#include "voice_engine_defines.h"
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+#include "udp_transport.h"
+#endif
+#ifdef WEBRTC_SRTP
+#include "SrtpModule.h"
+#endif
+#ifdef WEBRTC_DTMF_DETECTION
+#include "voe_dtmf.h" // TelephoneEventDetectionMethods, TelephoneEventObserver
+#endif
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class ProcessThread;
+class AudioDeviceModule;
+class RtpRtcp;
+class FileWrapper;
+class RtpDump;
+class VoiceEngineObserver;
+class VoEMediaProcess;
+class VoERTPObserver;
+class VoERTCPObserver;
+
+struct CallStatistics;
+struct ReportBlock;
+struct SenderInfo;
+
+namespace voe
+{
+class Statistics;
+class TransmitMixer;
+class OutputMixer;
+
+
+class Channel:
+    public RtpData,
+    public RtpFeedback,
+    public RtcpFeedback,
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    public UdpTransportData, // receiving packet from sockets
+#endif
+    public FileCallback, // receiving notification from file player & recorder
+    public Transport,
+    public RtpAudioFeedback,
+    public AudioPacketizationCallback, // receive encoded packets from the ACM
+    public ACMVADCallback, // receive voice activity from the ACM
+#ifdef WEBRTC_DTMF_DETECTION
+    public AudioCodingFeedback, // inband Dtmf detection in the ACM
+#endif
+    public MixerParticipant // supplies output mixer with audio frames
+{
+public:
+    enum {KNumSocketThreads = 1};
+    enum {KNumberOfSocketBuffers = 8};
+public:
+    virtual ~Channel();
+    static WebRtc_Word32 CreateChannel(Channel*& channel,
+                                       const WebRtc_Word32 channelId,
+                                       const WebRtc_UWord32 instanceId);
+    Channel(const WebRtc_Word32 channelId, const WebRtc_UWord32 instanceId);
+    WebRtc_Word32 Init();
+    WebRtc_Word32 SetEngineInformation(
+        Statistics& engineStatistics,
+        OutputMixer& outputMixer,
+        TransmitMixer& transmitMixer,
+        ProcessThread& moduleProcessThread,
+        AudioDeviceModule& audioDeviceModule,
+        VoiceEngineObserver* voiceEngineObserver,
+        CriticalSectionWrapper* callbackCritSect);
+    WebRtc_Word32 UpdateLocalTimeStamp();
+
+public:
+    // API methods
+
+    // VoEBase
+    WebRtc_Word32 StartPlayout();
+    WebRtc_Word32 StopPlayout();
+    WebRtc_Word32 StartSend();
+    WebRtc_Word32 StopSend();
+    WebRtc_Word32 StartReceiving();
+    WebRtc_Word32 StopReceiving();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 SetLocalReceiver(const WebRtc_UWord16 rtpPort,
+                                   const WebRtc_UWord16 rtcpPort,
+                                   const char ipAddr[64],
+                                   const char multicastIpAddr[64]);
+    WebRtc_Word32 GetLocalReceiver(int& port, int& RTCPport, char ipAddr[]);
+    WebRtc_Word32 SetSendDestination(const WebRtc_UWord16 rtpPort,
+                                     const char ipAddr[64],
+                                     const int sourcePort,
+                                     const WebRtc_UWord16 rtcpPort);
+    WebRtc_Word32 GetSendDestination(int& port, char ipAddr[64],
+                                     int& sourcePort, int& RTCPport);
+#endif
+    WebRtc_Word32 SetNetEQPlayoutMode(NetEqModes mode);
+    WebRtc_Word32 GetNetEQPlayoutMode(NetEqModes& mode);
+    WebRtc_Word32 SetNetEQBGNMode(NetEqBgnModes mode);
+    WebRtc_Word32 GetNetEQBGNMode(NetEqBgnModes& mode);
+    WebRtc_Word32 SetOnHoldStatus(bool enable, OnHoldModes mode);
+    WebRtc_Word32 GetOnHoldStatus(bool& enabled, OnHoldModes& mode);
+    WebRtc_Word32 RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+    WebRtc_Word32 DeRegisterVoiceEngineObserver();
+
+    // VoECodec
+    WebRtc_Word32 GetSendCodec(CodecInst& codec);
+    WebRtc_Word32 GetRecCodec(CodecInst& codec);
+    WebRtc_Word32 SetSendCodec(const CodecInst& codec);
+    WebRtc_Word32 SetVADStatus(bool enableVAD, ACMVADMode mode,
+                               bool disableDTX);
+    WebRtc_Word32 GetVADStatus(bool& enabledVAD, ACMVADMode& mode,
+                               bool& disabledDTX);
+    WebRtc_Word32 SetRecPayloadType(const CodecInst& codec);
+    WebRtc_Word32 GetRecPayloadType(CodecInst& codec);
+    WebRtc_Word32 SetAMREncFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRDecFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRWbEncFormat(AmrMode mode);
+    WebRtc_Word32 SetAMRWbDecFormat(AmrMode mode);
+    WebRtc_Word32 SetSendCNPayloadType(int type, PayloadFrequencies frequency);
+    WebRtc_Word32 SetISACInitTargetRate(int rateBps, bool useFixedFrameSize);
+    WebRtc_Word32 SetISACMaxRate(int rateBps);
+    WebRtc_Word32 SetISACMaxPayloadSize(int sizeBytes);
+
+    // VoENetwork
+    WebRtc_Word32 RegisterExternalTransport(Transport& transport);
+    WebRtc_Word32 DeRegisterExternalTransport();
+    WebRtc_Word32 ReceivedRTPPacket(const WebRtc_Word8* data,
+                                    WebRtc_Word32 length);
+    WebRtc_Word32 ReceivedRTCPPacket(const WebRtc_Word8* data,
+                                     WebRtc_Word32 length);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 GetSourceInfo(int& rtpPort, int& rtcpPort, char ipAddr[64]);
+    WebRtc_Word32 EnableIPv6();
+    bool IPv6IsEnabled() const;
+    WebRtc_Word32 SetSourceFilter(int rtpPort, int rtcpPort,
+                                  const char ipAddr[64]);
+    WebRtc_Word32 GetSourceFilter(int& rtpPort, int& rtcpPort, char ipAddr[64]);
+    WebRtc_Word32 SetSendTOS(int DSCP, int priority, bool useSetSockopt);
+    WebRtc_Word32 GetSendTOS(int &DSCP, int& priority, bool &useSetSockopt);
+#if defined(_WIN32)
+    WebRtc_Word32 SetSendGQoS(bool enable, int serviceType, int overrideDSCP);
+    WebRtc_Word32 GetSendGQoS(bool &enabled, int &serviceType,
+                              int &overrideDSCP);
+#endif
+#endif
+    WebRtc_Word32 SetPacketTimeoutNotification(bool enable, int timeoutSeconds);
+    WebRtc_Word32 GetPacketTimeoutNotification(bool& enabled,
+                                               int& timeoutSeconds);
+    WebRtc_Word32 RegisterDeadOrAliveObserver(VoEConnectionObserver& observer);
+    WebRtc_Word32 DeRegisterDeadOrAliveObserver();
+    WebRtc_Word32 SetPeriodicDeadOrAliveStatus(bool enable,
+                                               int sampleTimeSeconds);
+    WebRtc_Word32 GetPeriodicDeadOrAliveStatus(bool& enabled,
+                                               int& sampleTimeSeconds);
+    WebRtc_Word32 SendUDPPacket(const void* data, unsigned int length,
+                                int& transmittedBytes, bool useRtcpSocket);
+
+    // VoEFile
+    int StartPlayingFileLocally(const char* fileName, const bool loop,
+                                const FileFormats format,
+                                const int startPosition,
+                                const float volumeScaling,
+                                const int stopPosition,
+                                const CodecInst* codecInst);
+    int StartPlayingFileLocally(InStream* stream, const FileFormats format,
+                                const int startPosition,
+                                const float volumeScaling,
+                                const int stopPosition,
+                                const CodecInst* codecInst);
+    int StopPlayingFileLocally();
+    int IsPlayingFileLocally() const;
+    int RegisterFilePlayingToMixer();
+    int ScaleLocalFilePlayout(const float scale);
+    int GetLocalPlayoutPosition(int& positionMs);
+    int StartPlayingFileAsMicrophone(const char* fileName, const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+    int StartPlayingFileAsMicrophone(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+    int StopPlayingFileAsMicrophone();
+    int IsPlayingFileAsMicrophone() const;
+    int ScaleFileAsMicrophonePlayout(const float scale);
+    int StartRecordingPlayout(const char* fileName, const CodecInst* codecInst);
+    int StartRecordingPlayout(OutStream* stream, const CodecInst* codecInst);
+    int StopRecordingPlayout();
+
+    void SetMixWithMicStatus(bool mix);
+
+    // VoEExternalMediaProcessing
+    int RegisterExternalMediaProcessing(ProcessingTypes type,
+                                        VoEMediaProcess& processObject);
+    int DeRegisterExternalMediaProcessing(ProcessingTypes type);
+
+    // VoEVolumeControl
+    int GetSpeechOutputLevel(WebRtc_UWord32& level) const;
+    int GetSpeechOutputLevelFullRange(WebRtc_UWord32& level) const;
+    int SetMute(const bool enable);
+    bool Mute() const;
+    int SetOutputVolumePan(float left, float right);
+    int GetOutputVolumePan(float& left, float& right) const;
+    int SetChannelOutputVolumeScaling(float scaling);
+    int GetChannelOutputVolumeScaling(float& scaling) const;
+
+    // VoECallReport
+    void ResetDeadOrAliveCounters();
+    int ResetRTCPStatistics();
+    int GetRoundTripTimeSummary(StatVal& delaysMs) const;
+    int GetDeadOrAliveCounters(int& countDead, int& countAlive) const;
+
+    // VoENetEqStats
+    int GetNetworkStatistics(NetworkStatistics& stats);
+
+    // VoEVideoSync
+    int GetDelayEstimate(int& delayMs) const;
+    int SetMinimumPlayoutDelay(int delayMs);
+    int GetPlayoutTimestamp(unsigned int& timestamp);
+    int SetInitTimestamp(unsigned int timestamp);
+    int SetInitSequenceNumber(short sequenceNumber);
+
+    // VoEVideoSyncExtended
+    int GetRtpRtcp(RtpRtcp* &rtpRtcpModule) const;
+
+    // VoEEncryption
+#ifdef WEBRTC_SRTP
+    int EnableSRTPSend(
+            CipherTypes cipherType,
+            int cipherKeyLength,
+            AuthenticationTypes authType,
+            int authKeyLength,
+            int authTagLength,
+            SecurityLevels level,
+            const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+            bool useForRTCP);
+    int DisableSRTPSend();
+    int EnableSRTPReceive(
+            CipherTypes cipherType,
+            int cipherKeyLength,
+            AuthenticationTypes authType,
+            int authKeyLength,
+            int authTagLength,
+            SecurityLevels level,
+            const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+            bool useForRTCP);
+    int DisableSRTPReceive();
+#endif
+    int RegisterExternalEncryption(Encryption& encryption);
+    int DeRegisterExternalEncryption();
+
+    // VoEDtmf
+    int SendTelephoneEventOutband(unsigned char eventCode, int lengthMs,
+                                  int attenuationDb, bool playDtmfEvent);
+    int SendTelephoneEventInband(unsigned char eventCode, int lengthMs,
+                                 int attenuationDb, bool playDtmfEvent);
+    int SetDtmfPlayoutStatus(bool enable);
+    bool DtmfPlayoutStatus() const;
+    int SetSendTelephoneEventPayloadType(unsigned char type);
+    int GetSendTelephoneEventPayloadType(unsigned char& type);
+#ifdef WEBRTC_DTMF_DETECTION
+    int RegisterTelephoneEventDetection(
+            TelephoneEventDetectionMethods detectionMethod,
+            VoETelephoneEventObserver& observer);
+    int DeRegisterTelephoneEventDetection();
+    int GetTelephoneEventDetectionStatus(
+            bool& enabled,
+            TelephoneEventDetectionMethods& detectionMethod);
+#endif
+
+    // VoEAudioProcessingImpl
+    int UpdateRxVadDetection(AudioFrame& audioFrame);
+    int RegisterRxVadObserver(VoERxVadCallback &observer);
+    int DeRegisterRxVadObserver();
+    int VoiceActivityIndicator(int &activity);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    int SetRxAgcStatus(const bool enable, const AgcModes mode);
+    int GetRxAgcStatus(bool& enabled, AgcModes& mode);
+    int SetRxAgcConfig(const AgcConfig config);
+    int GetRxAgcConfig(AgcConfig& config);
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NR
+    int SetRxNsStatus(const bool enable, const NsModes mode);
+    int GetRxNsStatus(bool& enabled, NsModes& mode);
+#endif
+
+    // VoERTP_RTCP
+    int RegisterRTPObserver(VoERTPObserver& observer);
+    int DeRegisterRTPObserver();
+    int RegisterRTCPObserver(VoERTCPObserver& observer);
+    int DeRegisterRTCPObserver();
+    int SetLocalSSRC(unsigned int ssrc);
+    int GetLocalSSRC(unsigned int& ssrc);
+    int GetRemoteSSRC(unsigned int& ssrc);
+    int GetRemoteCSRCs(unsigned int arrCSRC[15]);
+    int SetRTPAudioLevelIndicationStatus(bool enable, unsigned char ID);
+    int GetRTPAudioLevelIndicationStatus(bool& enable, unsigned char& ID);
+    int SetRTCPStatus(bool enable);
+    int GetRTCPStatus(bool& enabled);
+    int SetRTCP_CNAME(const char cName[256]);
+    int GetRTCP_CNAME(char cName[256]);
+    int GetRemoteRTCP_CNAME(char cName[256]);
+    int GetRemoteRTCPData(unsigned int& NTPHigh, unsigned int& NTPLow,
+                          unsigned int& timestamp,
+                          unsigned int& playoutTimestamp, unsigned int* jitter,
+                          unsigned short* fractionLost);
+    int SendApplicationDefinedRTCPPacket(const unsigned char subType,
+                                         unsigned int name, const char* data,
+                                         unsigned short dataLengthInBytes);
+    int GetRTPStatistics(unsigned int& averageJitterMs,
+                         unsigned int& maxJitterMs,
+                         unsigned int& discardedPackets);
+    int GetRemoteRTCPSenderInfo(SenderInfo* sender_info);
+    int GetRemoteRTCPReportBlocks(std::vector<ReportBlock>* report_blocks);
+    int GetRTPStatistics(CallStatistics& stats);
+    int SetFECStatus(bool enable, int redPayloadtype);
+    int GetFECStatus(bool& enabled, int& redPayloadtype);
+    int StartRTPDump(const char fileNameUTF8[1024], RTPDirections direction);
+    int StopRTPDump(RTPDirections direction);
+    bool RTPDumpIsActive(RTPDirections direction);
+    int InsertExtraRTPPacket(unsigned char payloadType, bool markerBit,
+                             const char* payloadData,
+                             unsigned short payloadSize);
+
+public:
+    // From AudioPacketizationCallback in the ACM
+    WebRtc_Word32 SendData(FrameType frameType,
+                           WebRtc_UWord8 payloadType,
+                           WebRtc_UWord32 timeStamp,
+                           const WebRtc_UWord8* payloadData,
+                           WebRtc_UWord16 payloadSize,
+                           const RTPFragmentationHeader* fragmentation);
+    // From ACMVADCallback in the ACM
+    WebRtc_Word32 InFrameType(WebRtc_Word16 frameType);
+
+#ifdef WEBRTC_DTMF_DETECTION
+public: // From AudioCodingFeedback in the ACM
+    int IncomingDtmf(const WebRtc_UWord8 digitDtmf, const bool end);
+#endif
+
+public:
+    WebRtc_Word32 OnRxVadDetected(const int vadDecision);
+
+public:
+    // From RtpData in the RTP/RTCP module
+    WebRtc_Word32 OnReceivedPayloadData(const WebRtc_UWord8* payloadData,
+                                        const WebRtc_UWord16 payloadSize,
+                                        const WebRtcRTPHeader* rtpHeader);
+
+public:
+    // From RtpFeedback in the RTP/RTCP module
+    WebRtc_Word32 OnInitializeDecoder(
+            const WebRtc_Word32 id,
+            const WebRtc_Word8 payloadType,
+            const char payloadName[RTP_PAYLOAD_NAME_SIZE],
+            const int frequency,
+            const WebRtc_UWord8 channels,
+            const WebRtc_UWord32 rate);
+
+    void OnPacketTimeout(const WebRtc_Word32 id);
+
+    void OnReceivedPacket(const WebRtc_Word32 id,
+                          const RtpRtcpPacketType packetType);
+
+    void OnPeriodicDeadOrAlive(const WebRtc_Word32 id,
+                               const RTPAliveType alive);
+
+    void OnIncomingSSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 SSRC);
+
+    void OnIncomingCSRCChanged(const WebRtc_Word32 id,
+                               const WebRtc_UWord32 CSRC, const bool added);
+
+public:
+    // From RtcpFeedback in the RTP/RTCP module
+    void OnApplicationDataReceived(const WebRtc_Word32 id,
+                                   const WebRtc_UWord8 subType,
+                                   const WebRtc_UWord32 name,
+                                   const WebRtc_UWord16 length,
+                                   const WebRtc_UWord8* data);
+
+public:
+    // From RtpAudioFeedback in the RTP/RTCP module
+    void OnReceivedTelephoneEvent(const WebRtc_Word32 id,
+                                  const WebRtc_UWord8 event,
+                                  const bool endOfEvent);
+
+    void OnPlayTelephoneEvent(const WebRtc_Word32 id,
+                              const WebRtc_UWord8 event,
+                              const WebRtc_UWord16 lengthMs,
+                              const WebRtc_UWord8 volume);
+
+public:
+    // From UdpTransportData in the Socket Transport module
+    void IncomingRTPPacket(const WebRtc_Word8* incomingRtpPacket,
+                           const WebRtc_Word32 rtpPacketLength,
+                           const char* fromIP,
+                           const WebRtc_UWord16 fromPort);
+
+    void IncomingRTCPPacket(const WebRtc_Word8* incomingRtcpPacket,
+                            const WebRtc_Word32 rtcpPacketLength,
+                            const char* fromIP,
+                            const WebRtc_UWord16 fromPort);
+
+public:
+    // From Transport (called by the RTP/RTCP module)
+    int SendPacket(int /*channel*/, const void *data, int len);
+    int SendRTCPPacket(int /*channel*/, const void *data, int len);
+
+public:
+    // From MixerParticipant
+    WebRtc_Word32 GetAudioFrame(const WebRtc_Word32 id,
+                                AudioFrame& audioFrame);
+    WebRtc_Word32 NeededFrequency(const WebRtc_Word32 id);
+
+public:
+    // From MonitorObserver
+    void OnPeriodicProcess();
+
+public:
+    // From FileCallback
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+    void PlayFileEnded(const WebRtc_Word32 id);
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+public:
+    WebRtc_UWord32 InstanceId() const
+    {
+        return _instanceId;
+    }
+    WebRtc_Word32 ChannelId() const
+    {
+        return _channelId;
+    }
+    bool Playing() const
+    {
+        return _playing;
+    }
+    bool Sending() const
+    {
+        // A lock is needed because |_sending| is accessed by both
+        // TransmitMixer::PrepareDemux() and StartSend()/StopSend(), which
+        // are called by different threads.
+        CriticalSectionScoped cs(&_callbackCritSect);
+        return _sending;
+    }
+    bool Receiving() const
+    {
+        return _receiving;
+    }
+    bool ExternalTransport() const
+    {
+        return _externalTransport;
+    }
+    bool OutputIsOnHold() const
+    {
+        return _outputIsOnHold;
+    }
+    bool InputIsOnHold() const
+    {
+        return _inputIsOnHold;
+    }
+    RtpRtcp* RtpRtcpModulePtr() const
+    {
+        return _rtpRtcpModule.get();
+    }
+    WebRtc_Word8 OutputEnergyLevel() const
+    {
+        return _outputAudioLevel.Level();
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    bool SendSocketsInitialized() const
+    {
+        return _socketTransportModule.SendSocketsInitialized();
+    }
+    bool ReceiveSocketsInitialized() const
+    {
+        return _socketTransportModule.ReceiveSocketsInitialized();
+    }
+#endif
+    WebRtc_UWord32 Demultiplex(const AudioFrame& audioFrame);
+    WebRtc_UWord32 PrepareEncodeAndSend(int mixingFrequency);
+    WebRtc_UWord32 EncodeAndSend();
+
+private:
+    int InsertInbandDtmfTone();
+    WebRtc_Word32
+            MixOrReplaceAudioWithFile(const int mixingFrequency);
+    WebRtc_Word32 MixAudioWithFile(AudioFrame& audioFrame,
+                                   const int mixingFrequency);
+    WebRtc_Word32 GetPlayoutTimeStamp(WebRtc_UWord32& playoutTimestamp);
+    void UpdateDeadOrAliveCounters(bool alive);
+    WebRtc_Word32 SendPacketRaw(const void *data, int len, bool RTCP);
+    WebRtc_Word32 UpdatePacketDelay(const WebRtc_UWord32 timestamp,
+                                    const WebRtc_UWord16 sequenceNumber);
+    void RegisterReceiveCodecsToRTPModule();
+    int ApmProcessRx(AudioFrame& audioFrame);
+
+private:
+    CriticalSectionWrapper& _fileCritSect;
+    CriticalSectionWrapper& _callbackCritSect;
+    WebRtc_UWord32 _instanceId;
+    WebRtc_Word32 _channelId;
+
+private:
+    scoped_ptr<RtpRtcp> _rtpRtcpModule;
+    AudioCodingModule& _audioCodingModule;
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_UWord8 _numSocketThreads;
+    UdpTransport& _socketTransportModule;
+#endif
+#ifdef WEBRTC_SRTP
+    SrtpModule& _srtpModule;
+#endif
+    RtpDump& _rtpDumpIn;
+    RtpDump& _rtpDumpOut;
+private:
+    AudioLevel _outputAudioLevel;
+    bool _externalTransport;
+    AudioFrame _audioFrame;
+    WebRtc_UWord8 _audioLevel_dBov;
+    FilePlayer* _inputFilePlayerPtr;
+    FilePlayer* _outputFilePlayerPtr;
+    FileRecorder* _outputFileRecorderPtr;
+    int _inputFilePlayerId;
+    int _outputFilePlayerId;
+    int _outputFileRecorderId;
+    bool _inputFilePlaying;
+    bool _outputFilePlaying;
+    bool _outputFileRecording;
+    DtmfInbandQueue _inbandDtmfQueue;
+    DtmfInband _inbandDtmfGenerator;
+    bool _inputExternalMedia;
+    bool _outputExternalMedia;
+    VoEMediaProcess* _inputExternalMediaCallbackPtr;
+    VoEMediaProcess* _outputExternalMediaCallbackPtr;
+    WebRtc_UWord8* _encryptionRTPBufferPtr;
+    WebRtc_UWord8* _decryptionRTPBufferPtr;
+    WebRtc_UWord8* _encryptionRTCPBufferPtr;
+    WebRtc_UWord8* _decryptionRTCPBufferPtr;
+    WebRtc_UWord32 _timeStamp;
+    WebRtc_UWord8 _sendTelephoneEventPayloadType;
+    WebRtc_UWord32 _playoutTimeStampRTP;
+    WebRtc_UWord32 _playoutTimeStampRTCP;
+    WebRtc_UWord32 _numberOfDiscardedPackets;
+private:
+    // uses
+    Statistics* _engineStatisticsPtr;
+    OutputMixer* _outputMixerPtr;
+    TransmitMixer* _transmitMixerPtr;
+    ProcessThread* _moduleProcessThreadPtr;
+    AudioDeviceModule* _audioDeviceModulePtr;
+    VoiceEngineObserver* _voiceEngineObserverPtr; // owned by base
+    CriticalSectionWrapper* _callbackCritSectPtr; // owned by base
+    Transport* _transportPtr; // WebRtc socket or external transport
+    Encryption* _encryptionPtr; // WebRtc SRTP or external encryption
+    scoped_ptr<AudioProcessing> _rtpAudioProc;
+    AudioProcessing* _rxAudioProcessingModulePtr; // far end AudioProcessing
+#ifdef WEBRTC_DTMF_DETECTION
+    VoETelephoneEventObserver* _telephoneEventDetectionPtr;
+#endif
+    VoERxVadCallback* _rxVadObserverPtr;
+    WebRtc_Word32 _oldVadDecision;
+    WebRtc_Word32 _sendFrameType; // Send data is voice, 1-voice, 0-otherwise
+    VoERTPObserver* _rtpObserverPtr;
+    VoERTCPObserver* _rtcpObserverPtr;
+private:
+    // VoEBase
+    bool _outputIsOnHold;
+    bool _externalPlayout;
+    bool _inputIsOnHold;
+    bool _playing;
+    bool _sending;
+    bool _receiving;
+    bool _mixFileWithMicrophone;
+    bool _rtpObserver;
+    bool _rtcpObserver;
+    // VoEVolumeControl
+    bool _mute;
+    float _panLeft;
+    float _panRight;
+    float _outputGain;
+    // VoEEncryption
+    bool _encrypting;
+    bool _decrypting;
+    // VoEDtmf
+    bool _playOutbandDtmfEvent;
+    bool _playInbandDtmfEvent;
+    bool _inbandTelephoneEventDetection;
+    bool _outOfBandTelephoneEventDetecion;
+    // VoeRTP_RTCP
+    WebRtc_UWord8 _extraPayloadType;
+    bool _insertExtraRTPPacket;
+    bool _extraMarkerBit;
+    WebRtc_UWord32 _lastLocalTimeStamp;
+    WebRtc_Word8 _lastPayloadType;
+    bool _includeAudioLevelIndication;
+    // VoENetwork
+    bool _rtpPacketTimedOut;
+    bool _rtpPacketTimeOutIsEnabled;
+    WebRtc_UWord32 _rtpTimeOutSeconds;
+    bool _connectionObserver;
+    VoEConnectionObserver* _connectionObserverPtr;
+    WebRtc_UWord32 _countAliveDetections;
+    WebRtc_UWord32 _countDeadDetections;
+    AudioFrame::SpeechType _outputSpeechType;
+    // VoEVideoSync
+    WebRtc_UWord32 _averageDelayMs;
+    WebRtc_UWord16 _previousSequenceNumber;
+    WebRtc_UWord32 _previousTimestamp;
+    WebRtc_UWord16 _recPacketDelayMs;
+    // VoEAudioProcessing
+    bool _RxVadDetection;
+    bool _rxApmIsEnabled;
+    bool _rxAgcIsEnabled;
+    bool _rxNsIsEnabled;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_H
diff --git a/src/voice_engine/channel_manager.cc b/src/voice_engine/channel_manager.cc
new file mode 100644
index 0000000..47cec4a
--- /dev/null
+++ b/src/voice_engine/channel_manager.cc
@@ -0,0 +1,161 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+#include "channel_manager.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+ChannelManager::ChannelManager(const WebRtc_UWord32 instanceId) :
+    ChannelManagerBase(),
+    _instanceId(instanceId)
+{
+}
+
+ChannelManager::~ChannelManager()
+{
+    ChannelManagerBase::DestroyAllItems();
+}
+
+bool ChannelManager::CreateChannel(WebRtc_Word32& channelId)
+{
+    return ChannelManagerBase::CreateItem(channelId);
+}
+
+WebRtc_Word32 ChannelManager::DestroyChannel(const WebRtc_Word32 channelId)
+{
+    Channel* deleteChannel =
+        static_cast<Channel*> (ChannelManagerBase::RemoveItem(channelId));
+    if (!deleteChannel)
+    {
+        return -1;
+    }
+    delete deleteChannel;
+    return 0;
+}
+
+WebRtc_Word32 ChannelManager::NumOfChannels() const
+{
+    return ChannelManagerBase::NumOfItems();
+}
+
+WebRtc_Word32 ChannelManager::MaxNumOfChannels() const
+{
+    return ChannelManagerBase::MaxNumOfItems();
+}
+
+void* ChannelManager::NewItem(WebRtc_Word32 itemID)
+{
+    Channel* channel;
+    if (Channel::CreateChannel(channel, itemID, _instanceId) == -1)
+    {
+        return NULL;
+    }
+    return static_cast<void*> (channel);
+}
+
+void ChannelManager::DeleteItem(void* item)
+{
+    Channel* deleteItem = static_cast<Channel*> (item);
+    delete deleteItem;
+}
+
+Channel* ChannelManager::GetChannel(const WebRtc_Word32 channelId) const
+{
+    return static_cast<Channel*> (ChannelManagerBase::GetItem(channelId));
+}
+
+void ChannelManager::ReleaseChannel()
+{
+    ChannelManagerBase::ReleaseItem();
+}
+
+void ChannelManager::GetChannelIds(WebRtc_Word32* channelsArray,
+                                   WebRtc_Word32& numOfChannels) const
+{
+    ChannelManagerBase::GetItemIds(channelsArray, numOfChannels);
+}
+
+void ChannelManager::GetChannels(MapWrapper& channels) const
+{
+    ChannelManagerBase::GetChannels(channels);
+}
+
+ScopedChannel::ScopedChannel(ChannelManager& chManager) :
+    _chManager(chManager),
+    _channelPtr(NULL)
+{
+    // Copy all existing channels to the local map.
+    // It is not possible to utilize the ChannelPtr() API after
+    // this constructor. The intention is that this constructor
+    // is used in combination with the scoped iterator.
+    _chManager.GetChannels(_channels);
+}
+
+ScopedChannel::ScopedChannel(ChannelManager& chManager,
+                             WebRtc_Word32 channelId) :
+    _chManager(chManager),
+    _channelPtr(NULL)
+{
+    _channelPtr = _chManager.GetChannel(channelId);
+}
+
+ScopedChannel::~ScopedChannel()
+{
+    if (_channelPtr != NULL || _channels.Size() != 0)
+    {
+        _chManager.ReleaseChannel();
+    }
+
+    // Delete the map
+    while (_channels.Erase(_channels.First()) == 0)
+        ;
+}
+
+Channel* ScopedChannel::ChannelPtr()
+{
+    return _channelPtr;
+}
+
+Channel* ScopedChannel::GetFirstChannel(void*& iterator) const
+{
+    MapItem* it = _channels.First();
+    iterator = (void*) it;
+    if (!it)
+    {
+        return NULL;
+    }
+    return static_cast<Channel*> (it->GetItem());
+}
+
+Channel* ScopedChannel::GetNextChannel(void*& iterator) const
+{
+    MapItem* it = (MapItem*) iterator;
+    if (!it)
+    {
+        iterator = NULL;
+        return NULL;
+    }
+    it = _channels.Next(it);
+    iterator = (void*) it;
+    if (!it)
+    {
+        return NULL;
+    }
+    return static_cast<Channel*> (it->GetItem());
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/src/voice_engine/channel_manager.h b/src/voice_engine/channel_manager.h
new file mode 100644
index 0000000..6c40ef1
--- /dev/null
+++ b/src/voice_engine/channel_manager.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
+
+#include "channel_manager_base.h"
+#include "typedefs.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+class ScopedChannel;
+class Channel;
+
+class ChannelManager: private ChannelManagerBase
+{
+    friend class ScopedChannel;
+
+public:
+    bool CreateChannel(WebRtc_Word32& channelId);
+
+    WebRtc_Word32 DestroyChannel(const WebRtc_Word32 channelId);
+
+    WebRtc_Word32 MaxNumOfChannels() const;
+
+    WebRtc_Word32 NumOfChannels() const;
+
+    void GetChannelIds(WebRtc_Word32* channelsArray,
+                       WebRtc_Word32& numOfChannels) const;
+
+    ChannelManager(const WebRtc_UWord32 instanceId);
+
+    ~ChannelManager();
+
+private:
+    ChannelManager(const ChannelManager&);
+
+    ChannelManager& operator=(const ChannelManager&);
+
+    Channel* GetChannel(const WebRtc_Word32 channelId) const;
+
+    void GetChannels(MapWrapper& channels) const;
+
+    void ReleaseChannel();
+
+    virtual void* NewItem(WebRtc_Word32 itemID);
+
+    virtual void DeleteItem(void* item);
+
+    WebRtc_UWord32 _instanceId;
+};
+
+class ScopedChannel
+{
+public:
+    // Can only be created by the channel manager
+    ScopedChannel(ChannelManager& chManager);
+
+    ScopedChannel(ChannelManager& chManager, WebRtc_Word32 channelId);
+
+    Channel* ChannelPtr();
+
+    Channel* GetFirstChannel(void*& iterator) const;
+
+    Channel* GetNextChannel(void*& iterator) const;
+
+    ~ScopedChannel();
+private:
+    ChannelManager& _chManager;
+    Channel* _channelPtr;
+    MapWrapper _channels;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_H
diff --git a/src/voice_engine/channel_manager_base.cc b/src/voice_engine/channel_manager_base.cc
new file mode 100644
index 0000000..572720c
--- /dev/null
+++ b/src/voice_engine/channel_manager_base.cc
@@ -0,0 +1,227 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel_manager_base.h"
+
+#include "critical_section_wrapper.h"
+#include "rw_lock_wrapper.h"
+#include <cassert>
+
+namespace webrtc
+{
+
+namespace voe
+{
+
+ChannelManagerBase::ChannelManagerBase() :
+    _itemsCritSectPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _itemsRWLockPtr(RWLockWrapper::CreateRWLock())
+{
+    for (int i = 0; i < KMaxNumberOfItems; i++)
+    {
+        _freeItemIds[i] = true;
+    }
+}
+
+ChannelManagerBase::~ChannelManagerBase()
+{
+    if (_itemsRWLockPtr)
+    {
+        delete _itemsRWLockPtr;
+        _itemsRWLockPtr = NULL;
+    }
+    if (_itemsCritSectPtr)
+    {
+        delete _itemsCritSectPtr;
+        _itemsCritSectPtr = NULL;
+    }
+}
+
+bool ChannelManagerBase::GetFreeItemId(WebRtc_Word32& itemId)
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    WebRtc_Word32 i(0);
+    while (i < KMaxNumberOfItems)
+    {
+        if (_freeItemIds[i])
+        {
+            itemId = i;
+            _freeItemIds[i] = false;
+            return true;
+        }
+        i++;
+    }
+    return false;
+}
+
+void ChannelManagerBase::AddFreeItemId(WebRtc_Word32 itemId)
+{
+    assert(itemId < KMaxNumberOfItems);
+    _freeItemIds[itemId] = true;
+}
+
+void ChannelManagerBase::RemoveFreeItemIds()
+{
+    for (int i = 0; i < KMaxNumberOfItems; i++)
+    {
+        _freeItemIds[i] = false;
+    }
+}
+
+bool ChannelManagerBase::CreateItem(WebRtc_Word32& itemId)
+{
+    _itemsCritSectPtr->Enter();
+    void* itemPtr;
+    itemId = -1;
+    const bool success = GetFreeItemId(itemId);
+    if (!success)
+    {
+        _itemsCritSectPtr->Leave();
+        return false;
+    }
+    itemPtr = NewItem(itemId);
+    if (!itemPtr)
+    {
+        _itemsCritSectPtr->Leave();
+        return false;
+    }
+    _itemsCritSectPtr->Leave();
+    InsertItem(itemId, itemPtr);
+
+    return true;
+}
+
+void ChannelManagerBase::InsertItem(WebRtc_Word32 itemId, void* item)
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    assert(!_items.Find(itemId));
+    _items.Insert(itemId, item);
+}
+
+void*
+ChannelManagerBase::RemoveItem(WebRtc_Word32 itemId)
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    WriteLockScoped wlock(*_itemsRWLockPtr);
+    MapItem* it = _items.Find(itemId);
+    if (!it)
+    {
+        return 0;
+    }
+    void* returnItem = it->GetItem();
+    _items.Erase(it);
+    AddFreeItemId(itemId);
+
+    return returnItem;
+}
+
+void ChannelManagerBase::DestroyAllItems()
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    MapItem* it = _items.First();
+    while (it)
+    {
+        DeleteItem(it->GetItem());
+        _items.Erase(it);
+        it = _items.First();
+    }
+    RemoveFreeItemIds();
+}
+
+WebRtc_Word32 ChannelManagerBase::NumOfItems() const
+{
+    return _items.Size();
+}
+
+WebRtc_Word32 ChannelManagerBase::MaxNumOfItems() const
+{
+    return static_cast<WebRtc_Word32> (KMaxNumberOfItems);
+}
+
+void*
+ChannelManagerBase::GetItem(WebRtc_Word32 itemId) const
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    MapItem* it = _items.Find(itemId);
+    if (!it)
+    {
+        return 0;
+    }
+    _itemsRWLockPtr->AcquireLockShared();
+    return it->GetItem();
+}
+
+void*
+ChannelManagerBase::GetFirstItem(void*& iterator) const
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    MapItem* it = _items.First();
+    iterator = (void*) it;
+    if (!it)
+    {
+        return 0;
+    }
+    return it->GetItem();
+}
+
+void*
+ChannelManagerBase::GetNextItem(void*& iterator) const
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    MapItem* it = (MapItem*) iterator;
+    if (!it)
+    {
+        iterator = 0;
+        return 0;
+    }
+    it = _items.Next(it);
+    iterator = (void*) it;
+    if (!it)
+    {
+        return 0;
+    }
+    return it->GetItem();
+}
+
+void ChannelManagerBase::ReleaseItem()
+{
+    _itemsRWLockPtr->ReleaseLockShared();
+}
+
+void ChannelManagerBase::GetItemIds(WebRtc_Word32* channelsArray,
+                                    WebRtc_Word32& numOfChannels) const
+{
+    MapItem* it = _items.First();
+    numOfChannels = (numOfChannels <= _items.Size()) ?
+        numOfChannels : _items.Size();
+    for (int i = 0; i < numOfChannels && it != NULL; i++)
+    {
+        channelsArray[i] = it->GetId();
+        it = _items.Next(it);
+    }
+}
+
+void ChannelManagerBase::GetChannels(MapWrapper& channels) const
+{
+    CriticalSectionScoped cs(_itemsCritSectPtr);
+    if (_items.Size() == 0)
+    {
+        return;
+    }
+    _itemsRWLockPtr->AcquireLockShared();
+    for (MapItem* it = _items.First(); it != NULL; it = _items.Next(it))
+    {
+        channels.Insert(it->GetId(), it->GetItem());
+    }
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/src/voice_engine/channel_manager_base.h b/src/voice_engine/channel_manager_base.h
new file mode 100644
index 0000000..0831e43
--- /dev/null
+++ b/src/voice_engine/channel_manager_base.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
+#define WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
+
+#include "typedefs.h"
+#include "map_wrapper.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc
+{
+class CriticalSectionWrapper;
+class RWLockWrapper;
+
+namespace voe
+{
+
+class ScopedChannel;
+class Channel;
+
+class ChannelManagerBase
+{
+public:
+    enum {KMaxNumberOfItems = kVoiceEngineMaxNumOfChannels};
+
+protected:
+    bool CreateItem(WebRtc_Word32& itemId);
+
+    void InsertItem(WebRtc_Word32 itemId, void* item);
+
+    void* RemoveItem(WebRtc_Word32 itemId);
+
+    void* GetItem(WebRtc_Word32 itemId) const;
+
+    void* GetFirstItem(void*& iterator) const ;
+
+    void* GetNextItem(void*& iterator) const;
+
+    void ReleaseItem();
+
+    void AddFreeItemId(WebRtc_Word32 itemId);
+
+    bool GetFreeItemId(WebRtc_Word32& itemId);
+
+    void RemoveFreeItemIds();
+
+    void DestroyAllItems();
+
+    WebRtc_Word32 NumOfItems() const;
+
+    WebRtc_Word32 MaxNumOfItems() const;
+
+    void GetItemIds(WebRtc_Word32* channelsArray,
+                    WebRtc_Word32& numOfChannels) const;
+
+    void GetChannels(MapWrapper& channels) const;
+
+    virtual void* NewItem(WebRtc_Word32 itemId) = 0;
+
+    virtual void DeleteItem(void* item) = 0;
+
+    ChannelManagerBase();
+
+    virtual ~ChannelManagerBase();
+
+private:
+    // Protects _items and _freeItemIds
+    CriticalSectionWrapper* _itemsCritSectPtr;
+
+    MapWrapper _items;
+
+    bool _freeItemIds[KMaxNumberOfItems];
+
+    // Protects channels from being destroyed while being used
+    RWLockWrapper* _itemsRWLockPtr;
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_CHANNEL_MANAGER_BASE_H
diff --git a/src/voice_engine/channel_unittest.cc b/src/voice_engine/channel_unittest.cc
new file mode 100644
index 0000000..fc78679
--- /dev/null
+++ b/src/voice_engine/channel_unittest.cc
@@ -0,0 +1,15 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "channel.h"
+#include "gtest/gtest.h"
+
+// Empty test just to get coverage metrics.
+TEST(ChannelTest, EmptyTestToGetCodeCoverage) {}
diff --git a/src/voice_engine/dtmf_inband.cc b/src/voice_engine/dtmf_inband.cc
new file mode 100644
index 0000000..689bc54
--- /dev/null
+++ b/src/voice_engine/dtmf_inband.cc
@@ -0,0 +1,389 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_inband.h"
+
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include <cassert>
+
+namespace webrtc {
+
+const WebRtc_Word16 Dtmf_a_times2Tab8Khz[8]=
+{
+	27978, 26956, 25701, 24219,
+	19073, 16325, 13085, 9314
+};
+
+const WebRtc_Word16 Dtmf_a_times2Tab16Khz[8]=
+{
+	31548, 31281, 30951, 30556,
+	29144, 28361, 27409, 26258
+};
+
+const WebRtc_Word16 Dtmf_a_times2Tab32Khz[8]=
+{
+	32462,32394, 32311, 32210, 31849, 31647, 31400, 31098
+};
+
+// Second table is sin(2*pi*f/fs) in Q14
+
+const WebRtc_Word16 Dtmf_ym2Tab8Khz[8]=
+{
+	8527, 9315, 10163, 11036,
+	13322, 14206, 15021, 15708
+};
+
+const WebRtc_Word16 Dtmf_ym2Tab16Khz[8]=
+{
+	4429, 4879, 5380, 5918,
+	7490, 8207, 8979, 9801
+};
+
+const WebRtc_Word16 Dtmf_ym2Tab32Khz[8]=
+{
+	2235, 2468, 2728, 3010, 3853, 4249, 4685, 5164
+};
+
+const WebRtc_Word16 Dtmf_dBm0kHz[37]=
+{
+       16141,      14386,      12821,      11427,      10184,       9077,
+        8090,       7210,       6426,       5727,       5104,       4549,
+        4054,       3614,       3221,       2870,       2558,       2280,
+        2032,       1811,       1614,       1439,       1282,       1143,
+        1018,        908,        809,        721,        643,        573,
+         510,        455,        405,        361,        322,        287,
+		 256
+};
+
+
+DtmfInband::DtmfInband(const WebRtc_Word32 id) :
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _id(id),
+    _outputFrequencyHz(8000),
+    _frameLengthSamples(0),
+    _remainingSamples(0),
+    _eventCode(0),
+    _attenuationDb(0),
+    _lengthMs(0),
+    _reinit(true),
+    _playing(false),
+    _delaySinceLastToneMS(1000)
+{
+    memset(_oldOutputLow, 0, sizeof(_oldOutputLow));
+    memset(_oldOutputHigh, 0, sizeof(_oldOutputHigh));
+}
+
+DtmfInband::~DtmfInband()
+{
+	delete &_critSect;
+}
+
+int
+DtmfInband::SetSampleRate(const WebRtc_UWord16 frequency)
+{
+    if (frequency != 8000 &&
+            frequency != 16000 &&
+            frequency != 32000)
+    {
+        // invalid sample rate
+        assert(false);
+        return -1;
+    }
+    _outputFrequencyHz = frequency;
+    return 0;
+}
+
+int
+DtmfInband::GetSampleRate(WebRtc_UWord16& frequency)
+{
+    frequency = _outputFrequencyHz;
+    return 0;
+}
+
+void 
+DtmfInband::Init()
+{
+    _remainingSamples = 0;
+    _frameLengthSamples = 0;
+    _eventCode = 0;
+    _attenuationDb = 0;
+    _lengthMs = 0;
+    _reinit = true;
+    _oldOutputLow[0] = 0;
+    _oldOutputLow[1] = 0;
+    _oldOutputHigh[0] = 0;
+    _oldOutputHigh[1] = 0;
+    _delaySinceLastToneMS = 1000;
+}
+
+int
+DtmfInband::AddTone(const WebRtc_UWord8 eventCode,
+                    WebRtc_Word32 lengthMs,
+                    WebRtc_Word32 attenuationDb)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (attenuationDb > 36 || eventCode > 15)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (IsAddingTone())
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_id,-1),
+                   "DtmfInband::AddTone() new tone interrupts ongoing tone");
+    }
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _eventCode = static_cast<WebRtc_Word16> (eventCode);
+    _attenuationDb = static_cast<WebRtc_Word16> (attenuationDb);
+    _remainingSamples = static_cast<WebRtc_Word32>
+        (lengthMs * (_outputFrequencyHz / 1000));
+    _lengthMs = lengthMs;
+
+    return 0;
+}
+
+int
+DtmfInband::ResetTone()
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _remainingSamples = static_cast<WebRtc_Word32>
+        (_lengthMs * (_outputFrequencyHz / 1000));
+
+    return 0;
+}
+
+int
+DtmfInband::StartTone(const WebRtc_UWord8 eventCode,
+                      WebRtc_Word32 attenuationDb)
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (attenuationDb > 36 || eventCode > 15)
+    {
+        assert(false);
+        return -1;
+    }
+
+    if (IsAddingTone())
+    {
+            return -1;
+    }
+
+    ReInit();
+
+    _frameLengthSamples = static_cast<WebRtc_Word16> (_outputFrequencyHz / 100);
+    _eventCode = static_cast<WebRtc_Word16> (eventCode);
+    _attenuationDb = static_cast<WebRtc_Word16> (attenuationDb);
+    _playing = true;
+
+    return 0;
+}
+
+int
+DtmfInband::StopTone()
+{
+    CriticalSectionScoped lock(&_critSect);
+
+    if (!_playing)
+    {
+        return 0;
+    }
+
+    _playing = false;
+
+    return 0;
+}
+
+// Shall be called between tones
+void 
+DtmfInband::ReInit()
+{
+    _reinit = true;
+}
+
+bool 
+DtmfInband::IsAddingTone()
+{
+    CriticalSectionScoped lock(&_critSect);
+    return (_remainingSamples > 0 || _playing);
+}
+
+int
+DtmfInband::Get10msTone(WebRtc_Word16 output[320],
+                        WebRtc_UWord16& outputSizeInSamples)
+{
+    CriticalSectionScoped lock(&_critSect);
+    if (DtmfFix_generate(output,
+                         _eventCode,
+                         _attenuationDb,
+                         _frameLengthSamples,
+                         _outputFrequencyHz) == -1)
+    {
+        return -1;
+    }
+    _remainingSamples -= _frameLengthSamples;
+    outputSizeInSamples = _frameLengthSamples;
+    _delaySinceLastToneMS = 0;
+    return 0;
+}
+
+void
+DtmfInband::UpdateDelaySinceLastTone()
+{
+    _delaySinceLastToneMS += kDtmfFrameSizeMs;
+    // avoid wraparound
+    if (_delaySinceLastToneMS > (1<<30))
+    {
+        _delaySinceLastToneMS = 1000;
+    }
+}
+
+WebRtc_UWord32
+DtmfInband::DelaySinceLastTone() const
+{
+    return _delaySinceLastToneMS;
+}
+
+WebRtc_Word16
+DtmfInband::DtmfFix_generate(WebRtc_Word16 *decoded,
+                             const WebRtc_Word16 value,
+                             const WebRtc_Word16 volume,
+                             const WebRtc_Word16 frameLen,
+                             const WebRtc_Word16 fs)
+{
+    const WebRtc_Word16 *a_times2Tbl;
+    const WebRtc_Word16 *y2_Table;
+    WebRtc_Word16 a1_times2 = 0, a2_times2 = 0;
+
+    if (fs==8000) {
+        a_times2Tbl=Dtmf_a_times2Tab8Khz;
+        y2_Table=Dtmf_ym2Tab8Khz;
+    } else if (fs==16000) {
+        a_times2Tbl=Dtmf_a_times2Tab16Khz;
+        y2_Table=Dtmf_ym2Tab16Khz;
+    } else if (fs==32000) {
+        a_times2Tbl=Dtmf_a_times2Tab32Khz;
+        y2_Table=Dtmf_ym2Tab32Khz;
+    } else {
+        return(-1);
+    }
+
+    if ((value==1)||(value==2)||(value==3)||(value==12)) {
+        a1_times2=a_times2Tbl[0];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[0];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==4)||(value==5)||(value==6)||(value==13)) {
+        a1_times2=a_times2Tbl[1];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[1];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==7)||(value==8)||(value==9)||(value==14)) {
+        a1_times2=a_times2Tbl[2];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[2];
+            _oldOutputLow[1]=0;
+        }
+    } else if ((value==10)||(value==0)||(value==11)||(value==15)) {
+        a1_times2=a_times2Tbl[3];
+        if (_reinit) {
+            _oldOutputLow[0]=y2_Table[3];
+            _oldOutputLow[1]=0;
+        }
+    }
+    if ((value==1)||(value==4)||(value==7)||(value==10)) {
+        a2_times2=a_times2Tbl[4];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[4];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==2)||(value==5)||(value==8)||(value==0)) {
+        a2_times2=a_times2Tbl[5];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[5];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==3)||(value==6)||(value==9)||(value==11)) {
+        a2_times2=a_times2Tbl[6];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[6];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    } else if ((value==12)||(value==13)||(value==14)||(value==15)) {
+        a2_times2=a_times2Tbl[7];
+        if (_reinit) {
+            _oldOutputHigh[0]=y2_Table[7];
+            _oldOutputHigh[1]=0;
+            _reinit=false;
+        }
+    }
+
+    return (DtmfFix_generateSignal(a1_times2,
+                                   a2_times2,
+                                   volume,
+                                   decoded,
+                                   frameLen));
+}
+
+WebRtc_Word16
+DtmfInband::DtmfFix_generateSignal(const WebRtc_Word16 a1_times2,
+                                   const WebRtc_Word16 a2_times2,
+                                   const WebRtc_Word16 volume,
+                                   WebRtc_Word16 *signal,
+                                   const WebRtc_Word16 length)
+{
+    int i;
+
+    /* Generate Signal */
+    for (i=0;i<length;i++) {
+        WebRtc_Word32 tempVal;
+        WebRtc_Word16 tempValLow, tempValHigh;
+
+        /* Use recursion formula y[n] = a*2*y[n-1] - y[n-2] */
+        tempValLow  = (WebRtc_Word16)(((( (WebRtc_Word32)(a1_times2 *
+            _oldOutputLow[1])) + 8192) >> 14) - _oldOutputLow[0]);
+        tempValHigh = (WebRtc_Word16)(((( (WebRtc_Word32)(a2_times2 *
+            _oldOutputHigh[1])) + 8192) >> 14) - _oldOutputHigh[0]);
+
+        /* Update memory */
+        _oldOutputLow[0]=_oldOutputLow[1];
+        _oldOutputLow[1]=tempValLow;
+        _oldOutputHigh[0]=_oldOutputHigh[1];
+        _oldOutputHigh[1]=tempValHigh;
+
+        tempVal = (WebRtc_Word32)(kDtmfAmpLow * tempValLow) +
+            (WebRtc_Word32)(kDtmfAmpHigh * tempValHigh);
+
+        /* Norm the signal to Q14 */
+        tempVal=(tempVal+16384)>>15;
+
+        /* Scale the signal to correct dbM0 value */
+        signal[i]=(WebRtc_Word16)((tempVal*Dtmf_dBm0kHz[volume]+8192)>>14);
+    }
+
+    return(0);
+}
+
+}  // namespace webrtc
diff --git a/src/voice_engine/dtmf_inband.h b/src/voice_engine/dtmf_inband.h
new file mode 100644
index 0000000..806fff0
--- /dev/null
+++ b/src/voice_engine/dtmf_inband.h
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
+#define WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
+
+#if _MSC_VER > 1000
+#pragma once
+#endif
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+class DtmfInband
+{
+public:
+    DtmfInband(const WebRtc_Word32 id);
+
+    virtual ~DtmfInband();
+
+    void Init();
+
+    int SetSampleRate(const WebRtc_UWord16 frequency);
+
+    int GetSampleRate(WebRtc_UWord16& frequency);
+
+    int AddTone(const WebRtc_UWord8 eventCode,
+                WebRtc_Word32 lengthMs,
+                WebRtc_Word32 attenuationDb);
+
+    int ResetTone();
+    int StartTone(const WebRtc_UWord8 eventCode,
+                  WebRtc_Word32 attenuationDb);
+
+    int StopTone();
+
+    bool IsAddingTone();
+
+    int Get10msTone(WebRtc_Word16 output[320],
+                    WebRtc_UWord16& outputSizeInSamples);
+
+    WebRtc_UWord32 DelaySinceLastTone() const;
+
+    void UpdateDelaySinceLastTone();
+
+private:
+    void ReInit();
+    WebRtc_Word16 DtmfFix_generate(WebRtc_Word16* decoded,
+                                   const WebRtc_Word16 value,
+                                   const WebRtc_Word16 volume,
+                                   const WebRtc_Word16 frameLen,
+                                   const WebRtc_Word16 fs);
+
+private:
+    enum {kDtmfFrameSizeMs = 10};
+    enum {kDtmfAmpHigh = 32768};
+    enum {kDtmfAmpLow  = 23171};	// 3 dB lower than the high frequency
+
+    WebRtc_Word16 DtmfFix_generateSignal(const WebRtc_Word16 a1_times2,
+                                         const WebRtc_Word16 a2_times2,
+                                         const WebRtc_Word16 volume,
+                                         WebRtc_Word16* signal,
+                                         const WebRtc_Word16 length);
+
+private:
+    CriticalSectionWrapper& _critSect;
+    WebRtc_Word32 _id;
+    WebRtc_UWord16 _outputFrequencyHz;  // {8000, 16000, 32000}
+    WebRtc_Word16 _oldOutputLow[2];     // Data needed for oscillator model
+    WebRtc_Word16 _oldOutputHigh[2];    // Data needed for oscillator model
+    WebRtc_Word16 _frameLengthSamples;  // {80, 160, 320}
+    WebRtc_Word32 _remainingSamples;
+    WebRtc_Word16 _eventCode;           // [0, 15]
+    WebRtc_Word16 _attenuationDb;       // [0, 36]
+    WebRtc_Word32 _lengthMs;
+    bool _reinit;  // 'true' if the oscillator should be reinit for next event
+    bool _playing;
+    WebRtc_UWord32 _delaySinceLastToneMS; // time since last generated tone [ms]
+};
+
+}   // namespace webrtc
+
+#endif // #ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_H
diff --git a/src/voice_engine/dtmf_inband_queue.cc b/src/voice_engine/dtmf_inband_queue.cc
new file mode 100644
index 0000000..b81d827
--- /dev/null
+++ b/src/voice_engine/dtmf_inband_queue.cc
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "dtmf_inband_queue.h"
+#include "trace.h"
+
+namespace webrtc {
+
+DtmfInbandQueue::DtmfInbandQueue(const WebRtc_Word32 id):
+    _id(id),
+    _DtmfCritsect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _nextEmptyIndex(0)
+{
+    memset(_DtmfKey,0, sizeof(_DtmfKey));
+    memset(_DtmfLen,0, sizeof(_DtmfLen));
+    memset(_DtmfLevel,0, sizeof(_DtmfLevel));
+}
+
+DtmfInbandQueue::~DtmfInbandQueue()
+{
+    delete &_DtmfCritsect;
+}
+
+int
+DtmfInbandQueue::AddDtmf(WebRtc_UWord8 key,
+                         WebRtc_UWord16 len,
+                         WebRtc_UWord8 level)
+{
+    CriticalSectionScoped lock(&_DtmfCritsect);
+
+    if (_nextEmptyIndex >= kDtmfInbandMax)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_id,-1),
+                   "DtmfInbandQueue::AddDtmf() unable to add Dtmf tone");
+        return -1;
+    }
+    WebRtc_Word32 index = _nextEmptyIndex;
+    _DtmfKey[index] = key;
+    _DtmfLen[index] = len;
+    _DtmfLevel[index] = level;
+    _nextEmptyIndex++;
+    return 0;
+}
+
+WebRtc_Word8
+DtmfInbandQueue::NextDtmf(WebRtc_UWord16* len, WebRtc_UWord8* level)
+{
+    CriticalSectionScoped lock(&_DtmfCritsect);
+
+    if(!PendingDtmf())
+    {
+        return -1;
+    }
+    WebRtc_Word8 nextDtmf = _DtmfKey[0];
+    *len=_DtmfLen[0];
+    *level=_DtmfLevel[0];
+
+    memmove(&(_DtmfKey[0]), &(_DtmfKey[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord8));
+    memmove(&(_DtmfLen[0]), &(_DtmfLen[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord16));
+    memmove(&(_DtmfLevel[0]), &(_DtmfLevel[1]),
+            _nextEmptyIndex*sizeof(WebRtc_UWord8));
+
+    _nextEmptyIndex--;
+    return nextDtmf;
+}
+
+bool 
+DtmfInbandQueue::PendingDtmf()
+{
+    return(_nextEmptyIndex>0);        
+}
+
+void 
+DtmfInbandQueue::ResetDtmf()
+{
+    _nextEmptyIndex = 0;
+}
+
+}  // namespace webrtc
diff --git a/src/voice_engine/dtmf_inband_queue.h b/src/voice_engine/dtmf_inband_queue.h
new file mode 100644
index 0000000..b3bd39e
--- /dev/null
+++ b/src/voice_engine/dtmf_inband_queue.h
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
+#define WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
+
+#include "critical_section_wrapper.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+
+namespace webrtc {
+
+class DtmfInbandQueue
+{
+public:
+
+    DtmfInbandQueue(const WebRtc_Word32 id);
+
+    virtual ~DtmfInbandQueue();
+
+    int AddDtmf(WebRtc_UWord8 DtmfKey,
+                WebRtc_UWord16 len,
+                WebRtc_UWord8 level);
+
+    WebRtc_Word8 NextDtmf(WebRtc_UWord16* len, WebRtc_UWord8* level);
+
+    bool PendingDtmf();
+
+    void ResetDtmf();
+
+private:
+    enum {kDtmfInbandMax = 20};
+
+    WebRtc_Word32 _id;
+    CriticalSectionWrapper& _DtmfCritsect;
+    WebRtc_UWord8 _nextEmptyIndex;
+    WebRtc_UWord8 _DtmfKey[kDtmfInbandMax];
+    WebRtc_UWord16 _DtmfLen[kDtmfInbandMax];
+    WebRtc_UWord8 _DtmfLevel[kDtmfInbandMax];
+};
+
+}   // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_DTMF_INBAND_QUEUE_H
diff --git a/src/voice_engine/include/mock/mock_voe_connection_observer.h b/src/voice_engine/include/mock/mock_voe_connection_observer.h
new file mode 100644
index 0000000..62e572e
--- /dev/null
+++ b/src/voice_engine/include/mock/mock_voe_connection_observer.h
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef MOCK_VOE_CONNECTION_OBSERVER_H_
+#define MOCK_VOE_CONNECTION_OBSERVER_H_
+
+#include "voice_engine/include/voe_network.h"
+
+namespace webrtc {
+
+class MockVoeConnectionObserver : public VoEConnectionObserver {
+ public:
+  MOCK_METHOD2(OnPeriodicDeadOrAlive, void(const int channel,
+                                           const bool alive));
+};
+
+}
+
+#endif  // MOCK_VOE_CONNECTION_OBSERVER_H_
diff --git a/src/voice_engine/include/mock/mock_voe_observer.h b/src/voice_engine/include/mock/mock_voe_observer.h
new file mode 100644
index 0000000..c01320d
--- /dev/null
+++ b/src/voice_engine/include/mock/mock_voe_observer.h
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
+#define WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
+
+#include "gmock/gmock.h"
+#include "voice_engine/include/voe_base.h"
+
+namespace webrtc {
+
+class MockVoEObserver: public VoiceEngineObserver {
+ public:
+  MockVoEObserver() {}
+  virtual ~MockVoEObserver() {}
+
+  MOCK_METHOD2(CallbackOnError, void(const int channel, const int error_code));
+};
+
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_MOCK_VOE_OBSERVER_H_
diff --git a/src/voice_engine/include/voe_audio_processing.h b/src/voice_engine/include/voe_audio_processing.h
new file mode 100644
index 0000000..4965d33
--- /dev/null
+++ b/src/voice_engine/include/voe_audio_processing.h
@@ -0,0 +1,237 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Noise Suppression (NS).
+//  - Automatic Gain Control (AGC).
+//  - Echo Control (EC).
+//  - Receiving side VAD, NS and AGC.
+//  - Measurements of instantaneous speech, noise and echo levels.
+//  - Generation of AP debug recordings.
+//  - Detection of keyboard typing which can disrupt a voice conversation.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface();
+//  VoEAudioProcessing* ap = VoEAudioProcessing::GetInterface(voe);
+//  base->Init();
+//  ap->SetEcStatus(true, kAgcAdaptiveAnalog);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  ap->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
+#define WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoERxVadCallback
+class WEBRTC_DLLEXPORT VoERxVadCallback
+{
+public:
+    virtual void OnRxVad(int channel, int vadDecision) = 0;
+
+protected:
+    virtual ~VoERxVadCallback() {}
+};
+
+// VoEAudioProcessing
+class WEBRTC_DLLEXPORT VoEAudioProcessing
+{
+public:
+    // Factory for the VoEAudioProcessing sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEAudioProcessing* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEAudioProcessing sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sets Noise Suppression (NS) status and mode.
+    // The NS reduces noise in the microphone signal.
+    virtual int SetNsStatus(bool enable, NsModes mode = kNsUnchanged) = 0;
+
+    // Gets the NS status and mode.
+    virtual int GetNsStatus(bool& enabled, NsModes& mode) = 0;
+
+    // Sets the Automatic Gain Control (AGC) status and mode.
+    // The AGC adjusts the microphone signal to an appropriate level.
+    virtual int SetAgcStatus(bool enable, AgcModes mode = kAgcUnchanged) = 0;
+
+    // Gets the AGC status and mode.
+    virtual int GetAgcStatus(bool& enabled, AgcModes& mode) = 0;
+
+    // Sets the AGC configuration.
+    // Should only be used in situations where the working environment
+    // is well known.
+    virtual int SetAgcConfig(const AgcConfig config) = 0;
+
+    // Gets the AGC configuration.
+    virtual int GetAgcConfig(AgcConfig& config) = 0;
+
+    // Sets the Echo Control (EC) status and mode.
+    // The EC mitigates acoustic echo where a user can hear their own
+    // speech repeated back due to an acoustic coupling between the
+    // speaker and the microphone at the remote end.
+    virtual int SetEcStatus(bool enable, EcModes mode = kEcUnchanged) = 0;
+
+    // Gets the EC status and mode.
+    virtual int GetEcStatus(bool& enabled, EcModes& mode) = 0;
+
+    // Enables the compensation of clock drift between the capture and render
+    // streams by the echo canceller (i.e. only using EcMode==kEcAec). It will
+    // only be enabled if supported on the current platform; otherwise an error
+    // will be returned. Check if the platform is supported by calling
+    // |DriftCompensationSupported()|.
+    virtual int EnableDriftCompensation(bool enable) = 0;
+    virtual bool DriftCompensationEnabled() = 0;
+    static bool DriftCompensationSupported();
+
+    // Sets a delay |offset| in ms to add to the system delay reported by the
+    // OS, which is used by the AEC to synchronize far- and near-end streams.
+    // In some cases a system may introduce a delay which goes unreported by the
+    // OS, but which is known to the user. This method can be used to compensate
+    // for the unreported delay.
+    virtual void SetDelayOffsetMs(int offset) = 0;
+    virtual int DelayOffsetMs() = 0;
+
+    // Modifies settings for the AEC designed for mobile devices (AECM).
+    virtual int SetAecmMode(AecmModes mode = kAecmSpeakerphone,
+                            bool enableCNG = true) = 0;
+
+    // Gets settings for the AECM.
+    virtual int GetAecmMode(AecmModes& mode, bool& enabledCNG) = 0;
+
+    // Enables a high pass filter on the capture signal. This removes DC bias
+    // and low-frequency noise. Recommended to be enabled.
+    virtual int EnableHighPassFilter(bool enable) = 0;
+    virtual bool IsHighPassFilterEnabled() = 0;
+
+    // Sets status and mode of the receiving-side (Rx) NS.
+    // The Rx NS reduces noise in the received signal for the specified
+    // |channel|. Intended for advanced usage only.
+    virtual int SetRxNsStatus(int channel,
+                              bool enable,
+                              NsModes mode = kNsUnchanged) = 0;
+
+    // Gets status and mode of the receiving-side NS.
+    virtual int GetRxNsStatus(int channel,
+                              bool& enabled,
+                              NsModes& mode) = 0;
+
+    // Sets status and mode of the receiving-side (Rx) AGC.
+    // The Rx AGC adjusts the received signal to an appropriate level
+    // for the specified |channel|. Intended for advanced usage only.
+    virtual int SetRxAgcStatus(int channel,
+                               bool enable,
+                               AgcModes mode = kAgcUnchanged) = 0;
+
+    // Gets status and mode of the receiving-side AGC.
+    virtual int GetRxAgcStatus(int channel,
+                               bool& enabled,
+                               AgcModes& mode) = 0;
+
+    // Modifies the AGC configuration on the receiving side for the
+    // specified |channel|.
+    virtual int SetRxAgcConfig(int channel, const AgcConfig config) = 0;
+
+    // Gets the AGC configuration on the receiving side.
+    virtual int GetRxAgcConfig(int channel, AgcConfig& config) = 0;
+
+    // Registers a VoERxVadCallback |observer| instance and enables Rx VAD
+    // notifications for the specified |channel|.
+    virtual int RegisterRxVadObserver(int channel,
+                                      VoERxVadCallback &observer) = 0;
+
+    // Deregisters the VoERxVadCallback |observer| and disables Rx VAD
+    // notifications for the specified |channel|.
+    virtual int DeRegisterRxVadObserver(int channel) = 0;
+
+    // Gets the VAD/DTX activity for the specified |channel|.
+    // The returned value is 1 if frames of audio contains speech
+    // and 0 if silence. The output is always 1 if VAD is disabled.
+    virtual int VoiceActivityIndicator(int channel) = 0;
+
+    // Enables or disables the possibility to retrieve echo metrics and delay
+    // logging values during an active call. The metrics are only supported in
+    // AEC.
+    virtual int SetEcMetricsStatus(bool enable) = 0;
+
+    // Gets the current EC metric status.
+    virtual int GetEcMetricsStatus(bool& enabled) = 0;
+
+    // Gets the instantaneous echo level metrics.
+    virtual int GetEchoMetrics(int& ERL, int& ERLE, int& RERL, int& A_NLP) = 0;
+
+    // Gets the EC internal |delay_median| and |delay_std| in ms between
+    // near-end and far-end. The values are calculated over the time period
+    // since the last GetEcDelayMetrics() call.
+    virtual int GetEcDelayMetrics(int& delay_median, int& delay_std) = 0;
+
+    // Enables recording of Audio Processing (AP) debugging information.
+    // The file can later be used for off-line analysis of the AP performance.
+    virtual int StartDebugRecording(const char* fileNameUTF8) = 0;
+
+    // Disables recording of AP debugging information.
+    virtual int StopDebugRecording() = 0;
+
+    // Enables or disables detection of disturbing keyboard typing.
+    // An error notification will be given as a callback upon detection.
+    virtual int SetTypingDetectionStatus(bool enable) = 0;
+
+    // Gets the current typing detection status.
+    virtual int GetTypingDetectionStatus(bool& enabled) = 0;
+
+    // Reports the lower of:
+    // * Time in seconds since the last typing event.
+    // * Time in seconds since the typing detection was enabled.
+    // Returns error if typing detection is disabled.
+    virtual int TimeSinceLastTyping(int &seconds) = 0;
+
+    // Optional setting of typing detection parameters
+    // Parameter with value == 0 will be ignored
+    // and left with default config.
+    // TODO(niklase) Remove default argument as soon as libJingle is updated!
+    virtual int SetTypingDetectionParameters(int timeWindow,
+                                             int costPerTyping,
+                                             int reportingThreshold,
+                                             int penaltyDecay,
+                                             int typeEventDelay = 0) = 0;
+
+    // Swaps the capture-side left and right audio channels when enabled. It
+    // only has an effect when using a stereo send codec. The setting is
+    // persistent; it will be applied whenever a stereo send codec is enabled.
+    //
+    // The swap is applied only to the captured audio, and not mixed files. The
+    // swap will appear in file recordings and when accessing audio through the
+    // external media interface.
+    virtual void EnableStereoChannelSwapping(bool enable) = 0;
+    virtual bool IsStereoChannelSwappingEnabled() = 0;
+
+protected:
+    VoEAudioProcessing() {}
+    virtual ~VoEAudioProcessing() {}
+};
+
+}  //  namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_H
diff --git a/src/voice_engine/include/voe_base.h b/src/voice_engine/include/voe_base.h
new file mode 100644
index 0000000..28f465e
--- /dev/null
+++ b/src/voice_engine/include/voe_base.h
@@ -0,0 +1,211 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Enables full duplex VoIP sessions via RTP using G.711 (mu-Law or A-Law).
+//  - Initialization and termination.
+//  - Trace information on text files or via callbacks.
+//  - Multi-channel support (mixing, sending to multiple destinations etc.).
+//  - Call setup (port and address) for receiving and sending sides.
+//
+// To support other codecs than G.711, the VoECodec sub-API must be utilized.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  base->StartPlayout(ch);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_BASE_H
+#define WEBRTC_VOICE_ENGINE_VOE_BASE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class AudioDeviceModule;
+
+const int kVoEDefault = -1;
+
+// VoiceEngineObserver
+class WEBRTC_DLLEXPORT VoiceEngineObserver
+{
+public:
+    // This method will be called after the occurrence of any runtime error
+    // code, or warning notification, when the observer interface has been
+    // installed using VoEBase::RegisterVoiceEngineObserver().
+    virtual void CallbackOnError(const int channel, const int errCode) = 0;
+
+protected:
+    virtual ~VoiceEngineObserver() {}
+};
+
+// VoiceEngine
+class WEBRTC_DLLEXPORT VoiceEngine
+{
+public:
+    // Creates a VoiceEngine object, which can then be used to acquire
+    // sub-APIs. Returns NULL on failure.
+    static VoiceEngine* Create();
+
+    // Deletes a created VoiceEngine object and releases the utilized resources.
+    // Note that if there are outstanding references held via other interfaces,
+    // the voice engine instance will not actually be deleted until those
+    // references have been released.
+    static bool Delete(VoiceEngine*& voiceEngine);
+
+    // Specifies the amount and type of trace information which will be
+    // created by the VoiceEngine.
+    static int SetTraceFilter(const unsigned int filter);
+
+    // Sets the name of the trace file and enables non-encrypted trace messages.
+    static int SetTraceFile(const char* fileNameUTF8,
+                            const bool addFileCounter = false);
+
+    // Installs the TraceCallback implementation to ensure that the user
+    // receives callbacks for generated trace messages.
+    static int SetTraceCallback(TraceCallback* callback);
+
+    static int SetAndroidObjects(void* javaVM, void* env, void* context);
+
+protected:
+    VoiceEngine() {}
+    virtual ~VoiceEngine() {}
+};
+
+// VoEBase
+class WEBRTC_DLLEXPORT VoEBase
+{
+public:
+    // Factory for the VoEBase sub-API. Increases an internal reference
+    // counter if successful. Returns NULL if the API is not supported or if
+    // construction fails.
+    static VoEBase* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEBase sub-API and decreases an internal reference
+    // counter. Returns the new reference count. This value should be zero
+    // for all sub-API:s before the VoiceEngine object can be safely deleted.
+    virtual int Release() = 0;
+
+    // Installs the observer class to enable runtime error control and
+    // warning notifications.
+    virtual int RegisterVoiceEngineObserver(VoiceEngineObserver& observer) = 0;
+
+    // Removes and disables the observer class for runtime error control
+    // and warning notifications.
+    virtual int DeRegisterVoiceEngineObserver() = 0;
+
+    // Initiates all common parts of the VoiceEngine; e.g. all
+    // encoders/decoders, the sound card and core receiving components.
+    // This method also makes it possible to install a user-defined
+    // external Audio Device Module (ADM) which implements all the audio
+    // layer functionality in a separate (reference counted) module.
+    virtual int Init(AudioDeviceModule* external_adm = NULL) = 0;
+
+    // Terminates all VoiceEngine functions and releses allocated resources.
+    virtual int Terminate() = 0;
+
+    // Retrieves the maximum number of channels that can be created.
+    virtual int MaxNumOfChannels() = 0;
+
+    // Creates a new channel and allocates the required resources for it.
+    virtual int CreateChannel() = 0;
+
+    // Deletes an existing channel and releases the utilized resources.
+    virtual int DeleteChannel(int channel) = 0;
+
+    // Sets the local receiver port and address for a specified
+    // |channel| number.
+    virtual int SetLocalReceiver(int channel, int port,
+                                 int RTCPport = kVoEDefault,
+                                 const char ipAddr[64] = NULL,
+                                 const char multiCastAddr[64] = NULL) = 0;
+
+    // Gets the local receiver port and address for a specified
+    // |channel| number.
+    virtual int GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                 char ipAddr[64]) = 0;
+
+    // Sets the destination port and address for a specified |channel| number.
+    virtual int SetSendDestination(int channel, int port,
+                                   const char ipAddr[64],
+                                   int sourcePort = kVoEDefault,
+                                   int RTCPport = kVoEDefault) = 0;
+
+    // Gets the destination port and address for a specified |channel| number.
+    virtual int GetSendDestination(int channel, int& port, char ipAddr[64],
+                                   int& sourcePort, int& RTCPport) = 0;
+
+    // Prepares and initiates the VoiceEngine for reception of
+    // incoming RTP/RTCP packets on the specified |channel|.
+    virtual int StartReceive(int channel) = 0;
+
+    // Stops receiving incoming RTP/RTCP packets on the specified |channel|.
+    virtual int StopReceive(int channel) = 0;
+
+    // Starts forwarding the packets to the mixer/soundcard for a
+    // specified |channel|.
+    virtual int StartPlayout(int channel) = 0;
+
+    // Stops forwarding the packets to the mixer/soundcard for a
+    // specified |channel|.
+    virtual int StopPlayout(int channel) = 0;
+
+    // Starts sending packets to an already specified IP address and
+    // port number for a specified |channel|.
+    virtual int StartSend(int channel) = 0;
+
+    // Stops sending packets from a specified |channel|.
+    virtual int StopSend(int channel) = 0;
+
+    // Gets the version information for VoiceEngine and its components.
+    virtual int GetVersion(char version[1024]) = 0;
+
+    // Gets the last VoiceEngine error code.
+    virtual int LastError() = 0;
+
+
+    // Stops or resumes playout and transmission on a temporary basis.
+    virtual int SetOnHoldStatus(int channel, bool enable,
+                                OnHoldModes mode = kHoldSendAndPlay) = 0;
+
+    // Gets the current playout and transmission status.
+    virtual int GetOnHoldStatus(int channel, bool& enabled,
+                                OnHoldModes& mode) = 0;
+
+    // Sets the NetEQ playout mode for a specified |channel| number.
+    virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode) = 0;
+
+    // Gets the NetEQ playout mode for a specified |channel| number.
+    virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode) = 0;
+
+    // Sets the NetEQ background noise mode for a specified |channel| number.
+    virtual int SetNetEQBGNMode(int channel, NetEqBgnModes mode) = 0;
+
+    // Gets the NetEQ background noise mode for a specified |channel| number.
+    virtual int GetNetEQBGNMode(int channel, NetEqBgnModes& mode) = 0;
+
+protected:
+    VoEBase() {}
+    virtual ~VoEBase() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_BASE_H
diff --git a/src/voice_engine/include/voe_call_report.h b/src/voice_engine/include/voe_call_report.h
new file mode 100644
index 0000000..c4d3abd
--- /dev/null
+++ b/src/voice_engine/include/voe_call_report.h
@@ -0,0 +1,87 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Long-term speech and noise level metrics.
+//  - Long-term echo metric statistics.
+//  - Round Trip Time (RTT) statistics.
+//  - Dead-or-Alive connection summary.
+//  - Generation of call reports to text files.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoECallReport report = VoECallReport::GetInterface(voe);
+//  base->Init();
+//  LevelStatistics stats;
+//  report->GetSpeechAndNoiseSummary(stats);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  report->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
+#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoECallReport
+class WEBRTC_DLLEXPORT VoECallReport
+{
+public:
+    // Factory for the VoECallReport sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoECallReport* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoECallReport sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Performs a combined reset of all components involved in generating
+    // the call report for a specified |channel|. Pass in -1 to reset
+    // all channels.
+    virtual int ResetCallReportStatistics(int channel) = 0;
+
+    // Gets minimum, maximum and average levels for long-term echo metrics.
+    virtual int GetEchoMetricSummary(EchoStatistics& stats) = 0;
+
+    // Gets minimum, maximum and average levels for Round Trip Time (RTT)
+    // measurements.
+    virtual int GetRoundTripTimeSummary(int channel,
+                                        StatVal& delaysMs) = 0;
+
+    // Gets the total amount of dead and alive connection detections
+    // during a VoIP session.
+    virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
+                                      int& numOfAliveDetections) = 0;
+
+    // Creates a text file in ASCII format, which contains a summary
+    // of all the statistics that can be obtained by the call report sub-API.
+    virtual int WriteReportToFile(const char* fileNameUTF8) = 0;
+
+protected:
+    VoECallReport() { }
+    virtual ~VoECallReport() { }
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_H
diff --git a/src/voice_engine/include/voe_codec.h b/src/voice_engine/include/voe_codec.h
new file mode 100644
index 0000000..37f8f68
--- /dev/null
+++ b/src/voice_engine/include/voe_codec.h
@@ -0,0 +1,135 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Support of non-default codecs (e.g. iLBC, iSAC, etc.).
+//  - Voice Activity Detection (VAD) on a per channel basis.
+//  - Possibility to specify how to map received payload types to codecs.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoECodec* codec = VoECodec::GetInterface(voe);
+//  base->Init();
+//  int num_of_codecs = codec->NumOfCodecs()
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  codec->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CODEC_H
+#define WEBRTC_VOICE_ENGINE_VOE_CODEC_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoECodec
+{
+public:
+    // Factory for the VoECodec sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoECodec* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoECodec sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the number of supported codecs.
+    virtual int NumOfCodecs() = 0;
+
+    // Get the |codec| information for a specified list |index|.
+    virtual int GetCodec(int index, CodecInst& codec) = 0;
+
+    // Sets the |codec| for the |channel| to be used for sending.
+    virtual int SetSendCodec(int channel, const CodecInst& codec) = 0;
+
+    // Gets the |codec| parameters for the sending codec on a specified
+    // |channel|.
+    virtual int GetSendCodec(int channel, CodecInst& codec) = 0;
+
+    // Gets the currently received |codec| for a specific |channel|.
+    virtual int GetRecCodec(int channel, CodecInst& codec) = 0;
+
+    // Sets the initial values of target rate and frame size for iSAC
+    // for a specified |channel|. This API is only valid if iSAC is setup
+    // to run in channel-adaptive mode
+    virtual int SetISACInitTargetRate(int channel, int rateBps,
+                                      bool useFixedFrameSize = false) = 0;
+
+    // Sets the maximum allowed iSAC rate which the codec may not exceed
+    // for a single packet for the specified |channel|. The maximum rate is
+    // defined as payload size per frame size in bits per second.
+    virtual int SetISACMaxRate(int channel, int rateBps) = 0;
+
+    // Sets the maximum allowed iSAC payload size for a specified |channel|.
+    // The maximum value is set independently of the frame size, i.e.
+    // 30 ms and 60 ms packets have the same limit.
+    virtual int SetISACMaxPayloadSize(int channel, int sizeBytes) = 0;
+
+    // Sets the dynamic payload type number for a particular |codec| or
+    // disables (ignores) a codec for receiving. For instance, when receiving
+    // an invite from a SIP-based client, this function can be used to change
+    // the dynamic payload type number to match that in the INVITE SDP-
+    // message. The utilized parameters in the |codec| structure are:
+    // plname, plfreq, pltype and channels.
+    virtual int SetRecPayloadType(int channel, const CodecInst& codec) = 0;
+
+    // Gets the actual payload type that is set for receiving a |codec| on a
+    // |channel|. The value it retrieves will either be the default payload
+    // type, or a value earlier set with SetRecPayloadType().
+    virtual int GetRecPayloadType(int channel, CodecInst& codec) = 0;
+
+    // Sets the payload |type| for the sending of SID-frames with background
+    // noise estimation during silence periods detected by the VAD.
+    virtual int SetSendCNPayloadType(
+        int channel, int type, PayloadFrequencies frequency = kFreq16000Hz) = 0;
+
+
+    // Sets the VAD/DTX (silence suppression) status and |mode| for a
+    // specified |channel|. Disabling VAD (through |enable|) will also disable
+    // DTX; it is not necessary to explictly set |disableDTX| in this case.
+    virtual int SetVADStatus(int channel, bool enable,
+                             VadModes mode = kVadConventional,
+                             bool disableDTX = false) = 0;
+
+    // Gets the VAD/DTX status and |mode| for a specified |channel|.
+    virtual int GetVADStatus(int channel, bool& enabled, VadModes& mode,
+                             bool& disabledDTX) = 0;
+
+    // Not supported
+    virtual int SetAMREncFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRDecFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRWbEncFormat(int channel, AmrMode mode) = 0;
+
+    // Not supported
+    virtual int SetAMRWbDecFormat(int channel, AmrMode mode) = 0;
+
+protected:
+    VoECodec() {}
+    virtual ~VoECodec() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_CODEC_H
diff --git a/src/voice_engine/include/voe_dtmf.h b/src/voice_engine/include/voe_dtmf.h
new file mode 100644
index 0000000..3ed1749
--- /dev/null
+++ b/src/voice_engine/include/voe_dtmf.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Telephone event transmission.
+//  - DTMF tone generation.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEDtmf* dtmf  = VoEDtmf::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  dtmf->SendTelephoneEvent(ch, 7);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  dtmf->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_DTMF_H
+#define WEBRTC_VOICE_ENGINE_VOE_DTMF_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoETelephoneEventObserver
+class WEBRTC_DLLEXPORT VoETelephoneEventObserver
+{
+public:
+    // This method will be called after the detection of an inband
+    // telephone event. The event code is given as output in the
+    // |eventCode| parameter.
+    virtual void OnReceivedTelephoneEventInband(int channel,
+                                                int eventCode,
+                                                bool endOfEvent) = 0;
+
+    // This method will be called after the detection of an out-of-band
+    // telephone event. The event code is given as output in the
+    // |eventCode| parameter.
+    virtual void OnReceivedTelephoneEventOutOfBand(
+        int channel,
+        int eventCode,
+        bool endOfEvent) = 0;
+
+protected:
+    virtual ~VoETelephoneEventObserver() {}
+};
+
+// VoEDtmf
+class WEBRTC_DLLEXPORT VoEDtmf
+{
+public:
+    
+    // Factory for the VoEDtmf sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEDtmf* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEDtmf sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sends telephone events either in-band or out-of-band.
+    virtual int SendTelephoneEvent(int channel, int eventCode,
+                                   bool outOfBand = true, int lengthMs = 160,
+                                   int attenuationDb = 10) = 0;
+
+   
+    // Sets the dynamic payload |type| that should be used for telephone
+    // events.
+    virtual int SetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char type) = 0;
+
+  
+    // Gets the currently set dynamic payload |type| for telephone events.
+    virtual int GetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char& type) = 0;
+
+    // Enables or disables local tone playout for received DTMF events
+    // out-of-band.
+    virtual int SetDtmfPlayoutStatus(int channel, bool enable) = 0;
+
+    // Gets the DTMF playout status.
+    virtual int GetDtmfPlayoutStatus(int channel, bool& enabled) = 0;
+
+    // Toogles DTMF feedback state: when a DTMF tone is sent, the same tone
+    // is played out on the speaker.
+    virtual int SetDtmfFeedbackStatus(bool enable,
+                                      bool directFeedback = false) = 0;
+
+    // Gets the DTMF feedback status.
+    virtual int GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback) = 0;
+
+    // Plays a DTMF feedback tone (only locally).
+    virtual int PlayDtmfTone(int eventCode, int lengthMs = 200,
+                             int attenuationDb = 10) = 0;
+
+    // Starts playing out a DTMF feedback tone locally.
+    // The tone will be played out until the corresponding stop function
+    // is called.
+    virtual int StartPlayingDtmfTone(int eventCode,
+                                     int attenuationDb = 10) = 0;
+
+    // Stops playing out a DTMF feedback tone locally.
+    virtual int StopPlayingDtmfTone() = 0;
+
+    // Installs an instance of a VoETelephoneEventObserver derived class and
+    // activates detection of telephone events for the specified |channel|.
+    virtual int RegisterTelephoneEventDetection(
+        int channel, TelephoneEventDetectionMethods detectionMethod,
+        VoETelephoneEventObserver& observer) = 0;
+
+    // Removes an instance of a VoETelephoneEventObserver derived class and
+    // disables detection of telephone events for the specified |channel|.
+    virtual int DeRegisterTelephoneEventDetection(int channel) = 0;
+
+    // Gets the current telephone-event detection status for a specified
+    // |channel|.
+    virtual int GetTelephoneEventDetectionStatus(
+        int channel, bool& enabled,
+        TelephoneEventDetectionMethods& detectionMethod) = 0;
+
+protected:
+    VoEDtmf() {}
+    virtual ~VoEDtmf() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_DTMF_H
diff --git a/src/voice_engine/include/voe_encryption.h b/src/voice_engine/include/voe_encryption.h
new file mode 100644
index 0000000..ae3f373
--- /dev/null
+++ b/src/voice_engine/include/voe_encryption.h
@@ -0,0 +1,81 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - External encryption and decryption.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEEncryption* encrypt  = VoEEncryption::GetInterface(voe);
+//  ...
+//  encrypt->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
+#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEEncryption
+{
+public:
+    // Factory for the VoEEncryption sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEEncryption* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEEncryption sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs an Encryption instance and enables external encryption
+    // for the selected |channel|.
+    virtual int RegisterExternalEncryption(
+        int channel, Encryption& encryption) = 0;
+
+    // Removes an Encryption instance and disables external encryption
+    // for the selected |channel|.
+    virtual int DeRegisterExternalEncryption(int channel) = 0;
+
+    // Not supported
+    virtual int EnableSRTPSend(int channel, CipherTypes cipherType,
+        int cipherKeyLength, AuthenticationTypes authType, int authKeyLength,
+        int authTagLength, SecurityLevels level, const unsigned char key[30],
+        bool useForRTCP = false) = 0;
+
+    // Not supported
+    virtual int DisableSRTPSend(int channel) = 0;
+
+    // Not supported
+    virtual int EnableSRTPReceive(int channel, CipherTypes cipherType,
+        int cipherKeyLength, AuthenticationTypes authType, int authKeyLength,
+        int authTagLength, SecurityLevels level, const unsigned char key[30],
+        bool useForRTCP = false) = 0;
+
+    // Not supported
+    virtual int DisableSRTPReceive(int channel) = 0;
+
+protected:
+    VoEEncryption() {}
+    virtual ~VoEEncryption() {}
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_H
diff --git a/src/voice_engine/include/voe_errors.h b/src/voice_engine/include/voe_errors.h
new file mode 100644
index 0000000..cc05970
--- /dev/null
+++ b/src/voice_engine/include/voe_errors.h
@@ -0,0 +1,162 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
+#define WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
+
+// Warnings
+#define VE_PORT_NOT_DEFINED 8001
+#define VE_CHANNEL_NOT_VALID 8002
+#define VE_FUNC_NOT_SUPPORTED 8003
+#define VE_INVALID_LISTNR 8004
+#define VE_INVALID_ARGUMENT 8005
+#define VE_INVALID_PORT_NMBR 8006
+#define VE_INVALID_PLNAME 8007
+#define VE_INVALID_PLFREQ 8008
+#define VE_INVALID_PLTYPE 8009
+#define VE_INVALID_PACSIZE 8010
+#define VE_NOT_SUPPORTED 8011
+#define VE_ALREADY_LISTENING 8012
+#define VE_CHANNEL_NOT_CREATED 8013
+#define VE_MAX_ACTIVE_CHANNELS_REACHED 8014
+#define VE_REC_CANNOT_PREPARE_HEADER 8015
+#define VE_REC_CANNOT_ADD_BUFFER 8016
+#define VE_PLAY_CANNOT_PREPARE_HEADER 8017
+#define VE_ALREADY_SENDING 8018
+#define VE_INVALID_IP_ADDRESS 8019
+#define VE_ALREADY_PLAYING 8020
+#define VE_NOT_ALL_VERSION_INFO 8021
+#define VE_DTMF_OUTOF_RANGE 8022
+#define VE_INVALID_CHANNELS 8023
+#define VE_SET_PLTYPE_FAILED 8024
+#define VE_ENCRYPT_NOT_INITED 8025
+#define VE_NOT_INITED 8026
+#define VE_NOT_SENDING 8027
+#define VE_EXT_TRANSPORT_NOT_SUPPORTED 8028
+#define VE_EXTERNAL_TRANSPORT_ENABLED 8029
+#define VE_STOP_RECORDING_FAILED 8030
+#define VE_INVALID_RATE 8031
+#define VE_INVALID_PACKET 8032
+#define VE_NO_GQOS 8033
+#define VE_INVALID_TIMESTAMP 8034
+#define VE_RECEIVE_PACKET_TIMEOUT 8035
+#define VE_STILL_PLAYING_PREV_DTMF 8036
+#define VE_INIT_FAILED_WRONG_EXPIRY 8037
+#define VE_SENDING 8038
+#define VE_ENABLE_IPV6_FAILED 8039
+#define VE_FUNC_NO_STEREO 8040
+// Range 8041-8080 is not used
+#define VE_FW_TRAVERSAL_ALREADY_INITIALIZED 8081
+#define VE_PACKET_RECEIPT_RESTARTED 8082
+#define VE_NOT_ALL_INFO 8083
+#define VE_CANNOT_SET_SEND_CODEC 8084
+#define VE_CODEC_ERROR 8085
+#define VE_NETEQ_ERROR 8086
+#define VE_RTCP_ERROR 8087
+#define VE_INVALID_OPERATION 8088
+#define VE_CPU_INFO_ERROR 8089
+#define VE_SOUNDCARD_ERROR 8090
+#define VE_SPEECH_LEVEL_ERROR 8091
+#define VE_SEND_ERROR 8092
+#define VE_CANNOT_REMOVE_CONF_CHANNEL 8093
+#define VE_PLTYPE_ERROR 8094
+#define VE_SET_FEC_FAILED 8095
+#define VE_CANNOT_GET_PLAY_DATA 8096
+#define VE_APM_ERROR 8097
+#define VE_RUNTIME_PLAY_WARNING 8098
+#define VE_RUNTIME_REC_WARNING 8099
+#define VE_NOT_PLAYING 8100
+#define VE_SOCKETS_NOT_INITED 8101
+#define VE_CANNOT_GET_SOCKET_INFO 8102
+#define VE_INVALID_MULTICAST_ADDRESS 8103
+#define VE_DESTINATION_NOT_INITED 8104
+#define VE_RECEIVE_SOCKETS_CONFLICT 8105
+#define VE_SEND_SOCKETS_CONFLICT 8106
+#define VE_TYPING_NOISE_WARNING 8107
+#define VE_SATURATION_WARNING 8108
+#define VE_NOISE_WARNING 8109
+#define VE_CANNOT_GET_SEND_CODEC 8110
+#define VE_CANNOT_GET_REC_CODEC 8111
+#define VE_ALREADY_INITED 8112
+
+// Errors causing limited functionality
+#define VE_RTCP_SOCKET_ERROR 9001
+#define VE_MIC_VOL_ERROR 9002
+#define VE_SPEAKER_VOL_ERROR 9003
+#define VE_CANNOT_ACCESS_MIC_VOL 9004
+#define VE_CANNOT_ACCESS_SPEAKER_VOL 9005
+#define VE_GET_MIC_VOL_ERROR 9006
+#define VE_GET_SPEAKER_VOL_ERROR 9007
+#define VE_THREAD_RTCP_ERROR 9008
+#define VE_CANNOT_INIT_APM 9009
+#define VE_SEND_SOCKET_TOS_ERROR 9010
+#define VE_CANNOT_RETRIEVE_DEVICE_NAME 9013
+#define VE_SRTP_ERROR 9014
+// 9015 is not used
+#define VE_INTERFACE_NOT_FOUND 9016
+#define VE_TOS_GQOS_CONFLICT 9017
+#define VE_CANNOT_ADD_CONF_CHANNEL 9018
+#define VE_BUFFER_TOO_SMALL 9019
+#define VE_CANNOT_EXECUTE_SETTING 9020
+#define VE_CANNOT_RETRIEVE_SETTING 9021
+// 9022 is not used
+#define VE_RTP_KEEPALIVE_FAILED 9023
+#define VE_SEND_DTMF_FAILED 9024
+#define VE_CANNOT_RETRIEVE_CNAME 9025
+#define VE_DECRYPTION_FAILED 9026
+#define VE_ENCRYPTION_FAILED 9027
+#define VE_CANNOT_RETRIEVE_RTP_STAT 9028
+#define VE_GQOS_ERROR 9029
+#define VE_BINDING_SOCKET_TO_LOCAL_ADDRESS_FAILED 9030
+#define VE_TOS_INVALID 9031
+#define VE_TOS_ERROR 9032
+#define VE_CANNOT_RETRIEVE_VALUE 9033
+
+// Critical errors that stops voice functionality
+#define VE_PLAY_UNDEFINED_SC_ERR 10001
+#define VE_REC_CANNOT_OPEN_SC 10002
+#define VE_SOCKET_ERROR 10003
+#define VE_MMSYSERR_INVALHANDLE 10004
+#define VE_MMSYSERR_NODRIVER 10005
+#define VE_MMSYSERR_NOMEM 10006
+#define VE_WAVERR_UNPREPARED 10007
+#define VE_WAVERR_STILLPLAYING 10008
+#define VE_UNDEFINED_SC_ERR 10009
+#define VE_UNDEFINED_SC_REC_ERR 10010
+#define VE_THREAD_ERROR 10011
+#define VE_CANNOT_START_RECORDING 10012
+#define VE_PLAY_CANNOT_OPEN_SC 10013
+#define VE_NO_WINSOCK_2 10014
+#define VE_SEND_SOCKET_ERROR 10015
+#define VE_BAD_FILE 10016
+#define VE_EXPIRED_COPY 10017
+#define VE_NOT_AUTHORISED 10018
+#define VE_RUNTIME_PLAY_ERROR 10019
+#define VE_RUNTIME_REC_ERROR 10020
+#define VE_BAD_ARGUMENT 10021
+#define VE_LINUX_API_ONLY 10022
+#define VE_REC_DEVICE_REMOVED 10023
+#define VE_NO_MEMORY 10024
+#define VE_BAD_HANDLE 10025
+#define VE_RTP_RTCP_MODULE_ERROR 10026
+#define VE_AUDIO_CODING_MODULE_ERROR 10027
+#define VE_AUDIO_DEVICE_MODULE_ERROR 10028
+#define VE_CANNOT_START_PLAYOUT 10029
+#define VE_CANNOT_STOP_RECORDING 10030
+#define VE_CANNOT_STOP_PLAYOUT 10031
+#define VE_CANNOT_INIT_CHANNEL 10032
+#define VE_RECV_SOCKET_ERROR 10033
+#define VE_SOCKET_TRANSPORT_MODULE_ERROR 10034
+#define VE_AUDIO_CONF_MIX_MODULE_ERROR 10035
+
+// Warnings for other platforms (reserved range 8061-8080)
+#define VE_IGNORED_FUNCTION 8061
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_ERRORS_H
diff --git a/src/voice_engine/include/voe_external_media.h b/src/voice_engine/include/voe_external_media.h
new file mode 100644
index 0000000..50d2d38
--- /dev/null
+++ b/src/voice_engine/include/voe_external_media.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// In some cases it is desirable to use an audio source or sink which may
+// not be available to the VoiceEngine, such as a DV camera. This sub-API
+// contains functions that allow for the use of such external recording
+// sources and playout sinks. It also describes how recorded data, or data
+// to be played out, can be modified outside the VoiceEngine.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEMediaProcess media = VoEMediaProcess::GetInterface(voe);
+//  base->Init();
+//  ...
+//  media->SetExternalRecordingStatus(true);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  media->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEMediaProcess
+{
+public:
+    // The VoiceEngine user should override the Process() method in a
+    // derived class. Process() will be called when audio is ready to
+    // be processed. The audio can be accessed in several different modes
+    // given by the |type| parameter. The function should modify the
+    // original data and ensure that it is copied back to the |audio10ms|
+    // array. The number of samples in the frame cannot be changed.
+    // The sampling frequency will depend upon the codec used. 
+    // If |isStereo| is true, audio10ms will contain 16-bit PCM data
+    // samples in interleaved stereo format (L0,R0,L1,R1,…):
+    virtual void Process(const int channel, const ProcessingTypes type,
+                         WebRtc_Word16 audio10ms[], const int length,
+                         const int samplingFreq, const bool isStereo) = 0;
+
+protected:
+    virtual ~VoEMediaProcess() {}
+};
+
+class WEBRTC_DLLEXPORT VoEExternalMedia
+{
+public:
+    // Factory for the VoEExternalMedia sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEExternalMedia* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEExternalMedia sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs a VoEMediaProcess derived instance and activates external
+    // media for the specified |channel| and |type|.
+    virtual int RegisterExternalMediaProcessing(
+        int channel, ProcessingTypes type, VoEMediaProcess& processObject) = 0;
+
+    // Removes the VoEMediaProcess derived instance and deactivates external
+    // media for the specified |channel| and |type|.
+    virtual int DeRegisterExternalMediaProcessing(
+        int channel, ProcessingTypes type) = 0;
+
+    // Toogles state of external recording.
+    virtual int SetExternalRecordingStatus(bool enable) = 0;
+
+    // Toogles state of external playout.
+    virtual int SetExternalPlayoutStatus(bool enable) = 0;
+
+    // This function accepts externally recorded audio. During transmission,
+    // this method should be called at as regular an interval as possible
+    // with frames of corresponding size.
+    virtual int ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[], int lengthSamples,
+        int samplingFreqHz, int current_delay_ms) = 0;
+
+    // This function gets audio for an external playout sink.
+    // During transmission, this function should be called every ~10 ms
+    // to obtain a new 10 ms frame of audio. The length of the block will
+    // be 160, 320, 440 or 480 samples (for 16, 32, 44 or 48 kHz sampling
+    // rates respectively).
+    virtual int ExternalPlayoutGetData(
+        WebRtc_Word16 speechData10ms[], int samplingFreqHz,
+        int current_delay_ms, int& lengthSamples) = 0;
+
+protected:
+    VoEExternalMedia() {}
+    virtual ~VoEExternalMedia() {}
+};
+
+}  // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_H
diff --git a/src/voice_engine/include/voe_file.h b/src/voice_engine/include/voe_file.h
new file mode 100644
index 0000000..d968dcf
--- /dev/null
+++ b/src/voice_engine/include/voe_file.h
@@ -0,0 +1,184 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - File playback.
+//  - File recording.
+//  - File conversion.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEFile* file  = VoEFile::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  base->StartPlayout(ch);
+//  file->StartPlayingFileAsMicrophone(ch, "data_file_16kHz.pcm", true);
+//  ...
+//  file->StopPlayingFileAsMicrophone(ch);
+//  base->StopPlayout(ch);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  file->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_FILE_H
+#define WEBRTC_VOICE_ENGINE_VOE_FILE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEFile
+{
+public:
+    // Factory for the VoEFile sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEFile* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEFile sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Starts playing and mixing files with the local speaker signal for
+    // playout.
+    virtual int StartPlayingFileLocally(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0,
+        int stopPointMs = 0) = 0;
+
+    // Starts playing and mixing streams with the local speaker signal for
+    // playout.
+    virtual int StartPlayingFileLocally(
+        int channel,
+        InStream* stream,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0, int stopPointMs = 0) = 0;
+
+    // Stops playback of a file on a specific |channel|.
+    virtual int StopPlayingFileLocally(int channel) = 0;
+
+    // Returns the current file playing state for a specific |channel|.
+    virtual int IsPlayingFileLocally(int channel) = 0;
+
+    // Sets the volume scaling for a speaker file that is already playing.
+    virtual int ScaleLocalFilePlayout(int channel, float scale) = 0;
+
+    // Starts reading data from a file and transmits the data either
+    // mixed with or instead of the microphone signal.
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false ,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0) = 0;
+
+    // Starts reading data from a stream and transmits the data either
+    // mixed with or instead of the microphone signal.
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        InStream* stream,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0) = 0;
+
+    // Stops playing of a file as microphone signal for a specific |channel|.
+    virtual int StopPlayingFileAsMicrophone(int channel) = 0;
+
+    // Returns whether the |channel| is currently playing a file as microphone.
+    virtual int IsPlayingFileAsMicrophone(int channel) = 0;
+
+    // Sets the volume scaling for a microphone file that is already playing.
+    virtual int ScaleFileAsMicrophonePlayout(int channel, float scale) = 0;
+
+    // Starts recording the mixed playout audio.
+    virtual int StartRecordingPlayout(int channel,
+                                      const char* fileNameUTF8,
+                                      CodecInst* compression = NULL,
+                                      int maxSizeBytes = -1) = 0;
+
+    // Stops recording the mixed playout audio.
+    virtual int StopRecordingPlayout(int channel) = 0;
+
+    virtual int StartRecordingPlayout(int channel,
+                                      OutStream* stream,
+                                      CodecInst* compression = NULL) = 0;
+
+    // Starts recording the microphone signal to a file.
+    virtual int StartRecordingMicrophone(const char* fileNameUTF8,
+                                         CodecInst* compression = NULL,
+                                         int maxSizeBytes = -1) = 0;
+
+    // Starts recording the microphone signal to a stream.
+    virtual int StartRecordingMicrophone(OutStream* stream,
+                                         CodecInst* compression = NULL) = 0;
+
+    // Stops recording the microphone signal.
+    virtual int StopRecordingMicrophone() = 0;
+
+
+    // Gets the duration of a file.
+    virtual int GetFileDuration(const char* fileNameUTF8, int& durationMs,
+        FileFormats format = kFileFormatPcm16kHzFile) = 0;
+
+    // Gets the current played position of a file on a specific |channel|.
+    virtual int GetPlaybackPosition(int channel, int& positionMs) = 0;
+
+    virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertPCMToWAV(InStream* streamIn,
+                                OutStream* streamOut) = 0;
+
+    virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertWAVToPCM(InStream* streamIn,
+                                OutStream* streamOut) = 0;
+
+    virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8,
+                                       CodecInst* compression) = 0;
+
+    virtual int ConvertPCMToCompressed(InStream* streamIn,
+                                       OutStream* streamOut,
+                                       CodecInst* compression) = 0;
+
+    virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8) = 0;
+
+    virtual int ConvertCompressedToPCM(InStream* streamIn,
+                                       OutStream* streamOut) = 0;
+
+protected:
+    VoEFile() {}
+    virtual ~VoEFile() {}
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_FILE_H
diff --git a/src/voice_engine/include/voe_hardware.h b/src/voice_engine/include/voe_hardware.h
new file mode 100644
index 0000000..24ed1ff
--- /dev/null
+++ b/src/voice_engine/include/voe_hardware.h
@@ -0,0 +1,142 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Audio device handling.
+//  - Device information.
+//  - CPU load monitoring.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEHardware* hardware  = VoEHardware::GetInterface(voe);
+//  base->Init();
+//  ...
+//  int n_devices = hardware->GetNumOfPlayoutDevices();
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  hardware->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
+#define WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEHardware
+{
+public:
+    // Factory for the VoEHardware sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEHardware* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEHardware sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the number of audio devices available for recording.
+    virtual int GetNumOfRecordingDevices(int& devices) = 0;
+
+    // Gets the number of audio devices available for playout.
+    virtual int GetNumOfPlayoutDevices(int& devices) = 0;
+
+    // Gets the name of a specific recording device given by an |index|.
+    // On Windows Vista/7, it also retrieves an additional unique ID
+    // (GUID) for the recording device.
+    virtual int GetRecordingDeviceName(int index, char strNameUTF8[128],
+                                       char strGuidUTF8[128]) = 0;
+
+    // Gets the name of a specific playout device given by an |index|.
+    // On Windows Vista/7, it also retrieves an additional unique ID
+    // (GUID) for the playout device.
+    virtual int GetPlayoutDeviceName(int index, char strNameUTF8[128],
+                                     char strGuidUTF8[128]) = 0;
+
+    // Checks if the sound card is available to be opened for recording.
+    virtual int GetRecordingDeviceStatus(bool& isAvailable) = 0;
+
+    // Checks if the sound card is available to be opened for playout.
+    virtual int GetPlayoutDeviceStatus(bool& isAvailable) = 0;
+
+    // Sets the audio device used for recording.
+    virtual int SetRecordingDevice(
+        int index, StereoChannel recordingChannel = kStereoBoth) = 0;
+
+    // Sets the audio device used for playout.
+    virtual int SetPlayoutDevice(int index) = 0;
+
+    // Sets the type of audio device layer to use.
+    virtual int SetAudioDeviceLayer(AudioLayers audioLayer) = 0;
+
+    // Gets the currently used (active) audio device layer.
+    virtual int GetAudioDeviceLayer(AudioLayers& audioLayer) = 0;
+
+    // Gets the VoiceEngine's current CPU consumption in terms of the percent
+    // of total CPU availability. [Windows only]
+    virtual int GetCPULoad(int& loadPercent) = 0;
+
+    // Gets the computer's current CPU consumption in terms of the percent
+    // of the total CPU availability. This method may fail a few times on
+    // Windows because it needs a certain warm-up time before reporting the
+    // result. You should check the return value and either try again or
+    // give up when it fails.
+    virtual int GetSystemCPULoad(int& loadPercent) = 0;
+
+    // Not supported
+    virtual int ResetAudioDevice() = 0;
+
+    // Not supported
+    virtual int AudioDeviceControl(
+        unsigned int par1, unsigned int par2, unsigned int par3) = 0;
+
+    // Not supported
+    virtual int SetLoudspeakerStatus(bool enable) = 0;
+
+    // Not supported
+    virtual int GetLoudspeakerStatus(bool& enabled) = 0;
+
+    // *Experimental - not recommended for use.*
+    // Enables the Windows Core Audio built-in AEC. Fails on other platforms.
+    //
+    // Currently incompatible with the standard VoE AEC and AGC; don't attempt
+    // to enable them while this is active.
+    //
+    // Must be called before VoEBase::StartSend(). When enabled:
+    // 1. VoEBase::StartPlayout() must be called before VoEBase::StartSend().
+    // 2. VoEBase::StopSend() should be called before VoEBase::StopPlayout().
+    //    The reverse order may cause garbage audio to be rendered or the
+    //    capture side to halt until StopSend() is called.
+    //
+    //    As a consequence, SetPlayoutDevice() should be used with caution
+    //    during a call. It will function, but may cause the above issues for
+    //    the duration it takes to complete. (In practice, it should complete
+    //    fast enough to avoid audible degradation).
+    virtual int EnableBuiltInAEC(bool enable) = 0;
+    virtual bool BuiltInAECIsEnabled() const = 0;
+
+protected:
+    VoEHardware() {}
+    virtual ~VoEHardware() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_HARDWARE_H
diff --git a/src/voice_engine/include/voe_neteq_stats.h b/src/voice_engine/include/voe_neteq_stats.h
new file mode 100644
index 0000000..4940bed
--- /dev/null
+++ b/src/voice_engine/include/voe_neteq_stats.h
@@ -0,0 +1,45 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoENetEqStats
+{
+public:
+    // Factory for the VoENetEqStats sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoENetEqStats* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoENetEqStats sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Get the "in-call" statistics from NetEQ.
+    // The statistics are reset after the query.
+    virtual int GetNetworkStatistics(int channel, NetworkStatistics& stats) = 0;
+
+protected:
+    VoENetEqStats() {}
+    virtual ~VoENetEqStats() {}
+};
+
+}   // namespace webrtc
+
+#endif    // #ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_H
diff --git a/src/voice_engine/include/voe_network.h b/src/voice_engine/include/voe_network.h
new file mode 100644
index 0000000..10acf1c
--- /dev/null
+++ b/src/voice_engine/include/voe_network.h
@@ -0,0 +1,177 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - External protocol support.
+//  - Extended port and address APIs.
+//  - Port and address filters.
+//  - Windows GQoS functions.
+//  - Packet timeout notification.
+//  - Dead-or-Alive connection observations.
+//  - Transmission of raw RTP/RTCP packets into existing channels.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoENetwork* netw  = VoENetwork::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  netw->SetPeriodicDeadOrAliveStatus(ch, true);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  netw->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoEConnectionObserver
+class WEBRTC_DLLEXPORT VoEConnectionObserver
+{
+public:
+    // This method will be called peridically and deliver dead-or-alive
+    // notifications for a specified |channel| when the observer interface
+    // has been installed and activated.
+    virtual void OnPeriodicDeadOrAlive(const int channel, const bool alive) = 0;
+
+protected:
+    virtual ~VoEConnectionObserver() {}
+};
+
+// VoENetwork
+class WEBRTC_DLLEXPORT VoENetwork
+{
+public:
+    // Factory for the VoENetwork sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoENetwork* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoENetwork sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Installs and enables a user-defined external transport protocol for a
+    // specified |channel|.
+    virtual int RegisterExternalTransport(
+        int channel, Transport& transport) = 0;
+
+    // Removes and disables a user-defined external transport protocol for a
+    // specified |channel|.
+    virtual int DeRegisterExternalTransport(int channel) = 0;
+
+    // The packets received from the network should be passed to this
+    // function when external transport is enabled. Note that the data
+    // including the RTP-header must also be given to the VoiceEngine.
+    virtual int ReceivedRTPPacket(
+        int channel, const void* data, unsigned int length) = 0;
+
+    // The packets received from the network should be passed to this
+    // function when external transport is enabled. Note that the data
+    // including the RTCP-header must also be given to the VoiceEngine.
+    virtual int ReceivedRTCPPacket(
+        int channel, const void* data, unsigned int length) = 0;
+
+    // Gets the source ports and IP address of incoming packets on a
+    // specific |channel|.
+    virtual int GetSourceInfo(
+        int channel, int& rtpPort, int& rtcpPort, char ipAddr[64]) = 0;
+
+    // Gets the local (host) IP address.
+    virtual int GetLocalIP(char ipAddr[64], bool ipv6 = false) = 0;
+
+    // Enables IPv6 for a specified |channel|.
+    virtual int EnableIPv6(int channel) = 0;
+
+    // Gets the current IPv6 staus for a specified |channel|.
+    virtual bool IPv6IsEnabled(int channel) = 0;
+
+    // Enables a port and IP address filter for incoming packets on a
+    // specific |channel|.
+    virtual int SetSourceFilter(int channel,
+        int rtpPort, int rtcpPort = 0, const char ipAddr[64] = 0) = 0;
+
+    // Gets the current port and IP-address filter for a specified |channel|.
+    virtual int GetSourceFilter(
+        int channel, int& rtpPort, int& rtcpPort, char ipAddr[64]) = 0;
+
+    // Sets the six-bit Differentiated Services Code Point (DSCP) in the
+    // IP header of the outgoing stream for a specific |channel|.
+    virtual int SetSendTOS(int channel,
+        int DSCP, int priority = -1, bool useSetSockopt = false) = 0;
+
+    // Gets the six-bit DSCP in the IP header of the outgoing stream for
+    // a specific channel.
+    virtual int GetSendTOS(
+        int channel, int& DSCP, int& priority, bool& useSetSockopt) = 0;
+
+    // Sets the Generic Quality of Service (GQoS) service level.
+    // The Windows operating system then maps to a Differentiated Services
+    // Code Point (DSCP) and to an 802.1p setting. [Windows only]
+    virtual int SetSendGQoS(
+        int channel, bool enable, int serviceType, int overrideDSCP = 0) = 0;
+
+    // Gets the Generic Quality of Service (GQoS) service level.
+    virtual int GetSendGQoS(
+        int channel, bool& enabled, int& serviceType, int& overrideDSCP) = 0;
+
+    // Enables or disables warnings that report if packets have not been
+    // received in |timeoutSeconds| seconds for a specific |channel|.
+    virtual int SetPacketTimeoutNotification(
+        int channel, bool enable, int timeoutSeconds = 2) = 0;
+
+    // Gets the current time-out notification status.
+    virtual int GetPacketTimeoutNotification(
+        int channel, bool& enabled, int& timeoutSeconds) = 0;
+
+    // Installs the observer class implementation for a specified |channel|.
+    virtual int RegisterDeadOrAliveObserver(
+        int channel, VoEConnectionObserver& observer) = 0;
+
+    // Removes the observer class implementation for a specified |channel|.
+    virtual int DeRegisterDeadOrAliveObserver(int channel) = 0;
+
+    // Enables or disables the periodic dead-or-alive callback functionality
+    // for a specified |channel|.
+    virtual int SetPeriodicDeadOrAliveStatus(
+        int channel, bool enable, int sampleTimeSeconds = 2) = 0;
+
+    // Gets the current dead-or-alive notification status.
+    virtual int GetPeriodicDeadOrAliveStatus(
+        int channel, bool& enabled, int& sampleTimeSeconds) = 0;
+
+    // Handles sending a raw UDP data packet over an existing RTP or RTCP
+    // socket.
+    virtual int SendUDPPacket(
+        int channel, const void* data, unsigned int length,
+        int& transmittedBytes, bool useRtcpSocket = false) = 0;
+
+protected:
+    VoENetwork() {}
+    virtual ~VoENetwork() {}
+};
+
+} // namespace webrtc
+
+#endif  //  WEBRTC_VOICE_ENGINE_VOE_NETWORK_H
diff --git a/src/voice_engine/include/voe_rtp_rtcp.h b/src/voice_engine/include/voe_rtp_rtcp.h
new file mode 100644
index 0000000..fd0ca0b
--- /dev/null
+++ b/src/voice_engine/include/voe_rtp_rtcp.h
@@ -0,0 +1,255 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Callbacks for RTP and RTCP events such as modified SSRC or CSRC.
+//  - SSRC handling.
+//  - Transmission of RTCP sender reports.
+//  - Obtaining RTCP data from incoming RTCP sender reports.
+//  - RTP and RTCP statistics (jitter, packet loss, RTT etc.).
+//  - Forward Error Correction (FEC).
+//  - Writing RTP and RTCP packets to binary files for off-line analysis of
+//    the call quality.
+//  - Inserting extra RTP packets into active audio stream.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoERTP_RTCP* rtp_rtcp  = VoERTP_RTCP::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  rtp_rtcp->SetLocalSSRC(ch, 12345);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  rtp_rtcp->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
+#define WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
+
+#include <vector>
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+// VoERTPObserver
+class WEBRTC_DLLEXPORT VoERTPObserver
+{
+public:
+    virtual void OnIncomingCSRCChanged(
+        const int channel, const unsigned int CSRC, const bool added) = 0;
+
+    virtual void OnIncomingSSRCChanged(
+        const int channel, const unsigned int SSRC) = 0;
+
+protected:
+    virtual ~VoERTPObserver() {}
+};
+
+// VoERTCPObserver
+class WEBRTC_DLLEXPORT VoERTCPObserver
+{
+public:
+    virtual void OnApplicationDataReceived(
+        const int channel, const unsigned char subType,
+        const unsigned int name, const unsigned char* data,
+        const unsigned short dataLengthInBytes) = 0;
+
+protected:
+    virtual ~VoERTCPObserver() {}
+};
+
+// CallStatistics
+struct CallStatistics
+{
+    unsigned short fractionLost;
+    unsigned int cumulativeLost;
+    unsigned int extendedMax;
+    unsigned int jitterSamples;
+    int rttMs;
+    int bytesSent;
+    int packetsSent;
+    int bytesReceived;
+    int packetsReceived;
+};
+
+// See section 6.4.1 in http://www.ietf.org/rfc/rfc3550.txt for details.
+struct SenderInfo {
+  uint32_t NTP_timestamp_high;
+  uint32_t NTP_timestamp_low;
+  uint32_t RTP_timestamp;
+  uint32_t sender_packet_count;
+  uint32_t sender_octet_count;
+};
+
+// See section 6.4.2 in http://www.ietf.org/rfc/rfc3550.txt for details.
+struct ReportBlock {
+  uint32_t sender_SSRC; // SSRC of sender
+  uint32_t source_SSRC;
+  uint8_t fraction_lost;
+  uint32_t cumulative_num_packets_lost;
+  uint32_t extended_highest_sequence_number;
+  uint32_t interarrival_jitter;
+  uint32_t last_SR_timestamp;
+  uint32_t delay_since_last_SR;
+};
+
+// VoERTP_RTCP
+class WEBRTC_DLLEXPORT VoERTP_RTCP
+{
+public:
+
+    // Factory for the VoERTP_RTCP sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoERTP_RTCP* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoERTP_RTCP sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Registers an instance of a VoERTPObserver derived class for a specified
+    // |channel|. It will allow the user to observe callbacks related to the
+    // RTP protocol such as changes in the incoming SSRC.
+    virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer) = 0;
+
+    // Deregisters an instance of a VoERTPObserver derived class for a
+    // specified |channel|.
+    virtual int DeRegisterRTPObserver(int channel) = 0;
+
+    // Registers an instance of a VoERTCPObserver derived class for a specified
+    // |channel|.
+    virtual int RegisterRTCPObserver(
+        int channel, VoERTCPObserver& observer) = 0;
+
+    // Deregisters an instance of a VoERTCPObserver derived class for a
+    // specified |channel|.
+    virtual int DeRegisterRTCPObserver(int channel) = 0;
+
+    // Sets the local RTP synchronization source identifier (SSRC) explicitly.
+    virtual int SetLocalSSRC(int channel, unsigned int ssrc) = 0;
+
+    // Gets the local RTP SSRC of a specified |channel|.
+    virtual int GetLocalSSRC(int channel, unsigned int& ssrc) = 0;
+
+    // Gets the SSRC of the incoming RTP packets.
+    virtual int GetRemoteSSRC(int channel, unsigned int& ssrc) = 0;
+
+    // Sets the status of rtp-audio-level-indication on a specific |channel|.
+    virtual int SetRTPAudioLevelIndicationStatus(
+        int channel, bool enable, unsigned char ID = 1) = 0;
+
+    // Sets the status of rtp-audio-level-indication on a specific |channel|.
+    virtual int GetRTPAudioLevelIndicationStatus(
+        int channel, bool& enabled, unsigned char& ID) = 0;
+
+    // Gets the CSRCs of the incoming RTP packets.
+    virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]) = 0;
+
+    // Sets the RTCP status on a specific |channel|.
+    virtual int SetRTCPStatus(int channel, bool enable) = 0;
+
+    // Gets the RTCP status on a specific |channel|.
+    virtual int GetRTCPStatus(int channel, bool& enabled) = 0;
+
+    // Sets the canonical name (CNAME) parameter for RTCP reports on a
+    // specific |channel|.
+    virtual int SetRTCP_CNAME(int channel, const char cName[256]) = 0;
+
+    // Gets the canonical name (CNAME) parameter for RTCP reports on a
+    // specific |channel|.
+    virtual int GetRTCP_CNAME(int channel, char cName[256]) = 0;
+
+    // Gets the canonical name (CNAME) parameter for incoming RTCP reports
+    // on a specific channel.
+    virtual int GetRemoteRTCP_CNAME(int channel, char cName[256]) = 0;
+
+    // Gets RTCP data from incoming RTCP Sender Reports.
+    virtual int GetRemoteRTCPData(
+        int channel, unsigned int& NTPHigh, unsigned int& NTPLow,
+        unsigned int& timestamp, unsigned int& playoutTimestamp,
+        unsigned int* jitter = NULL, unsigned short* fractionLost = NULL) = 0;
+
+    // Gets RTP statistics for a specific |channel|.
+    virtual int GetRTPStatistics(
+        int channel, unsigned int& averageJitterMs, unsigned int& maxJitterMs,
+        unsigned int& discardedPackets) = 0;
+
+    // Gets RTCP statistics for a specific |channel|.
+    virtual int GetRTCPStatistics(int channel, CallStatistics& stats) = 0;
+
+    // Gets the sender info part of the last received RTCP Sender Report (SR)
+    // on a specified |channel|.
+    virtual int GetRemoteRTCPSenderInfo(
+        int channel, SenderInfo* sender_info) = 0;
+
+    // Gets the report block parts of the last received RTCP Sender Report (SR),
+    // or RTCP Receiver Report (RR) on a specified |channel|. Each vector
+    // element also contains the SSRC of the sender in addition to a report
+    // block.
+    virtual int GetRemoteRTCPReportBlocks(
+        int channel, std::vector<ReportBlock>* receive_blocks) = 0;
+
+    // Sends an RTCP APP packet on a specific |channel|.
+    virtual int SendApplicationDefinedRTCPPacket(
+        int channel, const unsigned char subType, unsigned int name,
+        const char* data, unsigned short dataLengthInBytes) = 0;
+
+    // Sets the Forward Error Correction (FEC) status on a specific |channel|.
+    virtual int SetFECStatus(
+        int channel, bool enable, int redPayloadtype = -1) = 0;
+
+    // Gets the FEC status on a specific |channel|.
+    virtual int GetFECStatus(
+        int channel, bool& enabled, int& redPayloadtype) = 0;
+
+    // Enables capturing of RTP packets to a binary file on a specific
+    // |channel| and for a given |direction|. The file can later be replayed
+    // using e.g. RTP Tools rtpplay since the binary file format is
+    // compatible with the rtpdump format.
+    virtual int StartRTPDump(
+        int channel, const char fileNameUTF8[1024],
+        RTPDirections direction = kRtpIncoming) = 0;
+
+    // Disables capturing of RTP packets to a binary file on a specific
+    // |channel| and for a given |direction|.
+    virtual int StopRTPDump(
+        int channel, RTPDirections direction = kRtpIncoming) = 0;
+
+    // Gets the the current RTP capturing state for the specified
+    // |channel| and |direction|.
+    virtual int RTPDumpIsActive(
+        int channel, RTPDirections direction = kRtpIncoming) = 0;
+
+    // Sends an extra RTP packet using an existing/active RTP session.
+    // It is possible to set the payload type, marker bit and payload
+    // of the extra RTP
+    virtual int InsertExtraRTPPacket(
+        int channel, unsigned char payloadType, bool markerBit,
+        const char* payloadData, unsigned short payloadSize) = 0;
+
+protected:
+    VoERTP_RTCP() {}
+    virtual ~VoERTP_RTCP() {}
+};
+
+}  // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_H
diff --git a/src/voice_engine/include/voe_video_sync.h b/src/voice_engine/include/voe_video_sync.h
new file mode 100644
index 0000000..ac3b84a
--- /dev/null
+++ b/src/voice_engine/include/voe_video_sync.h
@@ -0,0 +1,85 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - RTP header modification (time stamp and sequence number fields).
+//  - Playout delay tuning to synchronize the voice with video.
+//  - Playout delay monitoring.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEVideoSync* vsync  = VoEVideoSync::GetInterface(voe);
+//  base->Init();
+//  ...
+//  int buffer_ms(0);
+//  vsync->GetPlayoutBufferSize(buffer_ms);
+//  ...
+//  base->Terminate();
+//  base->Release();
+//  vsync->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
+#define WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class RtpRtcp;
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEVideoSync
+{
+public:
+    // Factory for the VoEVideoSync sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEVideoSync* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEVideoSync sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Gets the current sound card buffer size (playout delay).
+    virtual int GetPlayoutBufferSize(int& bufferMs) = 0;
+
+    // Sets an additional delay for the playout jitter buffer.
+    virtual int SetMinimumPlayoutDelay(int channel, int delayMs) = 0;
+
+    // Gets the sum of the algorithmic delay, jitter buffer delay, and the
+    // playout buffer delay for a specified |channel|.
+    virtual int GetDelayEstimate(int channel, int& delayMs) = 0;
+
+    // Manual initialization of the RTP timestamp.
+    virtual int SetInitTimestamp(int channel, unsigned int timestamp) = 0;
+
+    // Manual initialization of the RTP sequence number.
+    virtual int SetInitSequenceNumber(int channel, short sequenceNumber) = 0;
+
+    // Get the received RTP timestamp
+    virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp) = 0;
+
+    virtual int GetRtpRtcp (int channel, RtpRtcp* &rtpRtcpModule) = 0;
+
+protected:
+    VoEVideoSync() { }
+    virtual ~VoEVideoSync() { }
+};
+
+}   // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_H
diff --git a/src/voice_engine/include/voe_volume_control.h b/src/voice_engine/include/voe_volume_control.h
new file mode 100644
index 0000000..6d64e96
--- /dev/null
+++ b/src/voice_engine/include/voe_volume_control.h
@@ -0,0 +1,127 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// This sub-API supports the following functionalities:
+//
+//  - Speaker volume controls.
+//  - Microphone volume control.
+//  - Non-linear speech level control.
+//  - Mute functions.
+//  - Additional stereo scaling methods.
+//
+// Usage example, omitting error checking:
+//
+//  using namespace webrtc;
+//  VoiceEngine* voe = VoiceEngine::Create();
+//  VoEBase* base = VoEBase::GetInterface(voe);
+//  VoEVolumeControl* volume  = VoEVolumeControl::GetInterface(voe);
+//  base->Init();
+//  int ch = base->CreateChannel();
+//  ...
+//  volume->SetInputMute(ch, true);
+//  ...
+//  base->DeleteChannel(ch);
+//  base->Terminate();
+//  base->Release();
+//  volume->Release();
+//  VoiceEngine::Delete(voe);
+//
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
+
+#include "common_types.h"
+
+namespace webrtc {
+
+class VoiceEngine;
+
+class WEBRTC_DLLEXPORT VoEVolumeControl
+{
+public:
+    // Factory for the VoEVolumeControl sub-API. Increases an internal
+    // reference counter if successful. Returns NULL if the API is not
+    // supported or if construction fails.
+    static VoEVolumeControl* GetInterface(VoiceEngine* voiceEngine);
+
+    // Releases the VoEVolumeControl sub-API and decreases an internal
+    // reference counter. Returns the new reference count. This value should
+    // be zero for all sub-API:s before the VoiceEngine object can be safely
+    // deleted.
+    virtual int Release() = 0;
+
+    // Sets the speaker |volume| level. Valid range is [0,255].
+    virtual int SetSpeakerVolume(unsigned int volume) = 0;
+
+    // Gets the speaker |volume| level.
+    virtual int GetSpeakerVolume(unsigned int& volume) = 0;
+
+    // Mutes the speaker device completely in the operating system.
+    virtual int SetSystemOutputMute(bool enable) = 0;
+
+    // Gets the output device mute state in the operating system.
+    virtual int GetSystemOutputMute(bool &enabled) = 0;
+
+    // Sets the microphone volume level. Valid range is [0,255].
+    virtual int SetMicVolume(unsigned int volume) = 0;
+
+    // Gets the microphone volume level.
+    virtual int GetMicVolume(unsigned int& volume) = 0;
+
+    // Mutes the microphone input signal completely without affecting
+    // the audio device volume.
+    virtual int SetInputMute(int channel, bool enable) = 0;
+
+    // Gets the current microphone input mute state.
+    virtual int GetInputMute(int channel, bool& enabled) = 0;
+
+    // Mutes the microphone device completely in the operating system.
+    virtual int SetSystemInputMute(bool enable) = 0;
+
+    // Gets the mute state of the input device in the operating system.
+    virtual int GetSystemInputMute(bool& enabled) = 0;
+
+    // Gets the microphone speech |level|, mapped non-linearly to the range
+    // [0,9].
+    virtual int GetSpeechInputLevel(unsigned int& level) = 0;
+
+    // Gets the speaker speech |level|, mapped non-linearly to the range
+    // [0,9].
+    virtual int GetSpeechOutputLevel(int channel, unsigned int& level) = 0;
+
+    // Gets the microphone speech |level|, mapped linearly to the range
+    // [0,32768].
+    virtual int GetSpeechInputLevelFullRange(unsigned int& level) = 0;
+
+    // Gets the speaker speech |level|, mapped linearly to the range [0,32768].
+    virtual int GetSpeechOutputLevelFullRange(
+        int channel, unsigned int& level) = 0;
+
+    // Sets a volume |scaling| applied to the outgoing signal of a specific
+    // channel. Valid scale range is [0.0, 10.0].
+    virtual int SetChannelOutputVolumeScaling(int channel, float scaling) = 0;
+
+    // Gets the current volume scaling for a specified |channel|.
+    virtual int GetChannelOutputVolumeScaling(int channel, float& scaling) = 0;
+
+    // Scales volume of the |left| and |right| channels independently.
+    // Valid scale range is [0.0, 1.0].
+    virtual int SetOutputVolumePan(int channel, float left, float right) = 0;
+
+    // Gets the current left and right scaling factors.
+    virtual int GetOutputVolumePan(int channel, float& left, float& right) = 0;
+
+protected:
+    VoEVolumeControl() {};
+    virtual ~VoEVolumeControl() {};
+};
+
+}  // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
diff --git a/src/voice_engine/level_indicator.cc b/src/voice_engine/level_indicator.cc
new file mode 100644
index 0000000..1b5cba5
--- /dev/null
+++ b/src/voice_engine/level_indicator.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "level_indicator.h"
+#include "module_common_types.h"
+#include "signal_processing_library.h"
+
+namespace webrtc {
+
+namespace voe {
+
+
+// Number of bars on the indicator.
+// Note that the number of elements is specified because we are indexing it
+// in the range of 0-32
+const WebRtc_Word8 permutation[33] =
+    {0,1,2,3,4,4,5,5,5,5,6,6,6,6,6,7,7,7,7,8,8,8,9,9,9,9,9,9,9,9,9,9,9};
+
+
+AudioLevel::AudioLevel() :
+    _absMax(0),
+    _count(0),
+    _currentLevel(0),
+    _currentLevelFullRange(0)
+{
+}
+
+AudioLevel::~AudioLevel()
+{
+}
+
+void
+AudioLevel::Clear()
+{
+    _absMax = 0;
+    _count = 0;
+    _currentLevel = 0;
+    _currentLevelFullRange = 0;
+}
+
+void
+AudioLevel::ComputeLevel(const AudioFrame& audioFrame)
+{
+    WebRtc_Word16 absValue(0);
+
+    // Check speech level (works for 2 channels as well)
+    absValue = WebRtcSpl_MaxAbsValueW16(
+        audioFrame.data_,
+        audioFrame.samples_per_channel_*audioFrame.num_channels_);
+    if (absValue > _absMax)
+    _absMax = absValue;
+
+    // Update level approximately 10 times per second
+    if (_count++ == kUpdateFrequency)
+    {
+        _currentLevelFullRange = _absMax;
+
+        _count = 0;
+
+        // Highest value for a WebRtc_Word16 is 0x7fff = 32767
+        // Divide with 1000 to get in the range of 0-32 which is the range of
+        // the permutation vector
+        WebRtc_Word32 position = _absMax/1000;
+
+        // Make it less likely that the bar stays at position 0. I.e. only if
+        // its in the range 0-250 (instead of 0-1000)
+        if ((position == 0) && (_absMax > 250))
+        {
+            position = 1;
+        }
+        _currentLevel = permutation[position];
+
+        // Decay the absolute maximum (divide by 4)
+        _absMax >>= 2;
+    }
+}
+
+WebRtc_Word8
+AudioLevel::Level() const
+{
+    return _currentLevel;
+}
+
+WebRtc_Word16
+AudioLevel::LevelFullRange() const
+{
+    return _currentLevelFullRange;
+}
+
+}  // namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/level_indicator.h b/src/voice_engine/level_indicator.h
new file mode 100644
index 0000000..564b068
--- /dev/null
+++ b/src/voice_engine/level_indicator.h
@@ -0,0 +1,49 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
+#define WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+
+class AudioFrame;
+namespace voe {
+
+class AudioLevel
+{
+public:
+    AudioLevel();
+    virtual ~AudioLevel();
+
+    void ComputeLevel(const AudioFrame& audioFrame);
+
+    WebRtc_Word8 Level() const;
+
+    WebRtc_Word16 LevelFullRange() const;
+
+    void Clear();
+
+private:
+    enum { kUpdateFrequency = 10};
+
+    WebRtc_Word16 _absMax;
+    WebRtc_Word16 _count;
+    WebRtc_Word8 _currentLevel;
+    WebRtc_Word16 _currentLevelFullRange;
+};
+
+}  // namespace voe
+
+}  // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_LEVEL_INDICATOR_H
diff --git a/src/voice_engine/monitor_module.cc b/src/voice_engine/monitor_module.cc
new file mode 100644
index 0000000..07b17fb
--- /dev/null
+++ b/src/voice_engine/monitor_module.cc
@@ -0,0 +1,91 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "critical_section_wrapper.h"
+#include "monitor_module.h"
+
+namespace webrtc  {
+
+namespace voe  {
+
+MonitorModule::MonitorModule() :
+    _observerPtr(NULL),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _lastProcessTime(GET_TIME_IN_MS())
+{
+}
+
+MonitorModule::~MonitorModule()
+{
+    delete &_callbackCritSect;
+}
+
+WebRtc_Word32 
+MonitorModule::RegisterObserver(MonitorObserver& observer)
+{
+    CriticalSectionScoped lock(&_callbackCritSect);
+    if (_observerPtr)
+    {
+        return -1;
+    }
+    _observerPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::DeRegisterObserver()
+{
+    CriticalSectionScoped lock(&_callbackCritSect);
+    if (!_observerPtr)
+    {
+        return 0;
+    }
+    _observerPtr = NULL;
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::Version(char* version,
+                       WebRtc_UWord32& remainingBufferInBytes,
+                       WebRtc_UWord32& position) const
+{
+    return 0;
+}
+   
+WebRtc_Word32 
+MonitorModule::ChangeUniqueId(const WebRtc_Word32 id)
+{
+    return 0;
+}
+
+WebRtc_Word32 
+MonitorModule::TimeUntilNextProcess()
+{
+    WebRtc_UWord32 now = GET_TIME_IN_MS();
+    WebRtc_Word32 timeToNext =
+        kAverageProcessUpdateTimeMs - (now - _lastProcessTime);
+    return (timeToNext); 
+}
+
+WebRtc_Word32 
+MonitorModule::Process()
+{
+    _lastProcessTime = GET_TIME_IN_MS();
+    if (_observerPtr)
+    {
+        CriticalSectionScoped lock(&_callbackCritSect);
+        _observerPtr->OnPeriodicProcess();
+    }
+    return 0;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/monitor_module.h b/src/voice_engine/monitor_module.h
new file mode 100644
index 0000000..7612c3c
--- /dev/null
+++ b/src/voice_engine/monitor_module.h
@@ -0,0 +1,63 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
+#define WEBRTC_VOICE_ENGINE_MONITOR_MODULE_H
+
+#include "module.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+class MonitorObserver
+{
+public:
+    virtual void OnPeriodicProcess() = 0;
+protected:
+    virtual ~MonitorObserver() {}
+};
+
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class MonitorModule : public Module
+{
+public:
+    WebRtc_Word32 RegisterObserver(MonitorObserver& observer);
+
+    WebRtc_Word32 DeRegisterObserver();
+
+    MonitorModule();
+
+    virtual ~MonitorModule();
+public:	// module
+    WebRtc_Word32 Version(char* version,
+                          WebRtc_UWord32& remainingBufferInBytes,
+                          WebRtc_UWord32& position) const;
+
+    WebRtc_Word32 ChangeUniqueId(const WebRtc_Word32 id);
+
+    WebRtc_Word32 TimeUntilNextProcess();
+
+    WebRtc_Word32 Process();
+private:
+    enum { kAverageProcessUpdateTimeMs = 1000 };
+    MonitorObserver* _observerPtr;
+    CriticalSectionWrapper&	_callbackCritSect;
+    WebRtc_Word32 _lastProcessTime;
+};
+
+}  //  namespace voe
+
+}  //  namespace webrtc
+
+#endif // VOICE_ENGINE_MONITOR_MODULE
diff --git a/src/voice_engine/output_mixer.cc b/src/voice_engine/output_mixer.cc
new file mode 100644
index 0000000..daf0d4a
--- /dev/null
+++ b/src/voice_engine/output_mixer.cc
@@ -0,0 +1,665 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "output_mixer.h"
+
+#include "audio_processing.h"
+#include "audio_frame_operations.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "output_mixer_internal.h"
+#include "statistics.h"
+#include "trace.h"
+#include "voe_external_media.h"
+
+namespace webrtc {
+
+namespace voe {
+
+void
+OutputMixer::NewMixedAudio(const WebRtc_Word32 id,
+                           const AudioFrame& generalAudioFrame,
+                           const AudioFrame** uniqueAudioFrames,
+                           const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::NewMixedAudio(id=%d, size=%u)", id, size);
+
+    _audioFrame = generalAudioFrame;
+    _audioFrame.id_ = id;
+}
+
+void OutputMixer::MixedParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::MixedParticipants(id=%d, size=%u)", id, size);
+}
+
+void OutputMixer::VADPositiveParticipants(
+    const WebRtc_Word32 id,
+    const ParticipantStatistics* participantStatistics,
+    const WebRtc_UWord32 size)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::VADPositiveParticipants(id=%d, size=%u)",
+                 id, size);
+}
+
+void OutputMixer::MixedAudioLevel(const WebRtc_Word32  id,
+                                  const WebRtc_UWord32 level)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::MixedAudioLevel(id=%d, level=%u)", id, level);
+}
+
+void OutputMixer::PlayNotification(const WebRtc_Word32 id,
+                                   const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+    // Not implement yet
+}
+
+void OutputMixer::RecordNotification(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void OutputMixer::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::PlayFileEnded(id=%d)", id);
+
+    // not needed
+}
+
+void OutputMixer::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordFileEnded(id=%d)", id);
+    assert(id == _instanceId);
+
+    CriticalSectionScoped cs(&_fileCritSect);
+    _outputFileRecording = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::RecordFileEnded() =>"
+                 "output file recorder module is shutdown");
+}
+
+WebRtc_Word32
+OutputMixer::Create(OutputMixer*& mixer, const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId,
+                 "OutputMixer::Create(instanceId=%d)", instanceId);
+    mixer = new OutputMixer(instanceId);
+    if (mixer == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice, instanceId,
+                     "OutputMixer::Create() unable to allocate memory for"
+                     "mixer");
+        return -1;
+    }
+    return 0;
+}
+
+OutputMixer::OutputMixer(const WebRtc_UWord32 instanceId) :
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _fileCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _mixerModule(*AudioConferenceMixer::Create(instanceId)),
+    _audioLevel(),
+    _dtmfGenerator(instanceId),
+    _instanceId(instanceId),
+    _externalMediaCallbackPtr(NULL),
+    _externalMedia(false),
+    _panLeft(1.0f),
+    _panRight(1.0f),
+    _mixingFrequencyHz(8000),
+    _outputFileRecorderPtr(NULL),
+    _outputFileRecording(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::OutputMixer() - ctor");
+	
+    if ((_mixerModule.RegisterMixedStreamCallback(*this) == -1) ||
+        (_mixerModule.RegisterMixerStatusCallback(*this, 100) == -1))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "OutputMixer::OutputMixer() failed to register mixer"
+                     "callbacks");
+    }
+	
+    _dtmfGenerator.Init();
+}
+
+void
+OutputMixer::Destroy(OutputMixer*& mixer)
+{
+    if (mixer)
+    {
+        delete mixer;
+        mixer = NULL;
+    }
+}
+	
+OutputMixer::~OutputMixer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::~OutputMixer() - dtor");
+    if (_externalMedia)
+    {
+        DeRegisterExternalMediaProcessing();
+    }
+    {
+        CriticalSectionScoped cs(&_fileCritSect);
+        if (_outputFileRecorderPtr)
+        {
+            _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _outputFileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+            _outputFileRecorderPtr = NULL;
+        }
+    }
+    _mixerModule.UnRegisterMixerStatusCallback();
+    _mixerModule.UnRegisterMixedStreamCallback();
+    delete &_mixerModule;
+    delete &_callbackCritSect;
+    delete &_fileCritSect;
+}
+
+WebRtc_Word32
+OutputMixer::SetEngineInformation(voe::Statistics& engineStatistics)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetEngineInformation()");
+    _engineStatisticsPtr = &engineStatistics;
+    return 0;
+}
+
+WebRtc_Word32 
+OutputMixer::SetAudioProcessingModule(
+    AudioProcessing* audioProcessingModule)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetAudioProcessingModule("
+                 "audioProcessingModule=0x%x)", audioProcessingModule);
+    _audioProcessingModulePtr = audioProcessingModule;
+    return 0;
+}
+
+int OutputMixer::RegisterExternalMediaProcessing(
+    VoEMediaProcess& proccess_object)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "OutputMixer::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    _externalMediaCallbackPtr = &proccess_object;
+    _externalMedia = true;
+
+    return 0;
+}
+
+int OutputMixer::DeRegisterExternalMediaProcessing()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    _externalMedia = false;
+    _externalMediaCallbackPtr = NULL;
+
+    return 0;
+}
+
+int OutputMixer::PlayDtmfTone(WebRtc_UWord8 eventCode, int lengthMs,
+                              int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::PlayDtmfTone()");
+    if (_dtmfGenerator.AddTone(eventCode, lengthMs, attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(VE_STILL_PLAYING_PREV_DTMF,
+                                           kTraceError,
+                                           "OutputMixer::PlayDtmfTone()");
+        return -1;
+    }
+    return 0;
+}
+
+int OutputMixer::StartPlayingDtmfTone(WebRtc_UWord8 eventCode,
+                                      int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::StartPlayingDtmfTone()");
+    if (_dtmfGenerator.StartTone(eventCode, attenuationDb) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STILL_PLAYING_PREV_DTMF,
+            kTraceError,
+            "OutputMixer::StartPlayingDtmfTone())");
+        return -1;
+    }
+    return 0;
+}
+
+int OutputMixer::StopPlayingDtmfTone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "OutputMixer::StopPlayingDtmfTone()");
+    return (_dtmfGenerator.StopTone());
+}
+
+WebRtc_Word32
+OutputMixer::SetMixabilityStatus(MixerParticipant& participant,
+                                 const bool mixable)
+{
+    return _mixerModule.SetMixabilityStatus(participant, mixable);
+}
+
+WebRtc_Word32
+OutputMixer::SetAnonymousMixabilityStatus(MixerParticipant& participant,
+                                          const bool mixable)
+{
+    return _mixerModule.SetAnonymousMixabilityStatus(participant,mixable);
+}
+
+WebRtc_Word32
+OutputMixer::MixActiveChannels()
+{
+    return _mixerModule.Process();
+}
+
+int
+OutputMixer::GetSpeechOutputLevel(WebRtc_UWord32& level)
+{
+    WebRtc_Word8 currentLevel = _audioLevel.Level();
+    level = static_cast<WebRtc_UWord32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSpeechOutputLevel() => level=%u", level);
+    return 0;
+}
+
+int
+OutputMixer::GetSpeechOutputLevelFullRange(WebRtc_UWord32& level)
+{
+    WebRtc_Word16 currentLevel = _audioLevel.LevelFullRange();
+    level = static_cast<WebRtc_UWord32> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetSpeechOutputLevelFullRange() => level=%u", level);
+    return 0;
+}
+
+int
+OutputMixer::SetOutputVolumePan(float left, float right)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::SetOutputVolumePan()");
+    _panLeft = left;
+    _panRight = right;
+    return 0;
+}
+
+int
+OutputMixer::GetOutputVolumePan(float& left, float& right)
+{
+    left = _panLeft;
+    right = _panRight;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "GetOutputVolumePan() => left=%2.1f, right=%2.1f",
+                 left, right);
+    return 0;
+}
+
+int OutputMixer::StartRecordingPlayout(const char* fileName,
+                                       const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StartRecordingPlayout(fileName=%s)", fileName);
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0);
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if ((codecInst != NULL) &&
+      ((codecInst->channels < 1) || (codecInst->channels > 2)))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+    
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _instanceId,
+        (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&)*codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int OutputMixer::StartRecordingPlayout(OutStream* stream,
+                                       const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StartRecordingPlayout()");
+
+    if (_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                     "StartRecordingPlayout() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0);
+    CodecInst dummyCodec={100,"L16",16000,320,1,320000};
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() invalid compression");
+        return(-1);
+    }
+    if(codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst=&dummyCodec;
+    }
+    else if((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    }
+    else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    // Destroy the old instance
+    if (_outputFileRecorderPtr)
+    {
+        _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+    }
+
+    _outputFileRecorderPtr = FileRecorder::CreateFileRecorder(
+        _instanceId,
+        (const FileFormats)format);
+    if (_outputFileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingPlayout() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_outputFileRecorderPtr->StartRecordingAudioFile(*stream,
+                                                        *codecInst,
+                                                        notificationTime) != 0)
+    {
+       _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+           "StartRecordingAudioFile() failed to start file recording");
+        _outputFileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+        _outputFileRecorderPtr = NULL;
+        return -1;
+    }
+
+    _outputFileRecorderPtr->RegisterModuleFileCallback(this);
+    _outputFileRecording = true;
+
+    return 0;
+}
+
+int OutputMixer::StopRecordingPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "OutputMixer::StopRecordingPlayout()");
+
+    if (!_outputFileRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId,-1),
+                     "StopRecordingPlayout() file isnot recording");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(&_fileCritSect);
+
+    if (_outputFileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+    _outputFileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_outputFileRecorderPtr);
+    _outputFileRecorderPtr = NULL;
+    _outputFileRecording = false;
+
+    return 0;
+}
+
+int OutputMixer::GetMixedAudio(int sample_rate_hz,
+                               int num_channels,
+                               AudioFrame* frame) {
+  WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+               "OutputMixer::GetMixedAudio(sample_rate_hz=%d, num_channels=%d)",
+               sample_rate_hz, num_channels);
+
+  // --- Record playout if enabled
+  {
+    CriticalSectionScoped cs(&_fileCritSect);
+    if (_outputFileRecording && _outputFileRecorderPtr)
+      _outputFileRecorderPtr->RecordAudioToFile(_audioFrame);
+  }
+
+  frame->num_channels_ = num_channels;
+  frame->sample_rate_hz_ = sample_rate_hz;
+  // TODO(andrew): Ideally the downmixing would occur much earlier, in
+  // AudioCodingModule.
+  return RemixAndResample(_audioFrame, &_resampler, frame);
+}
+
+WebRtc_Word32
+OutputMixer::DoOperationsOnCombinedSignal()
+{
+    if (_audioFrame.sample_rate_hz_ != _mixingFrequencyHz)
+    {
+        WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                     "OutputMixer::DoOperationsOnCombinedSignal() => "
+                     "mixing frequency = %d", _audioFrame.sample_rate_hz_);
+        _mixingFrequencyHz = _audioFrame.sample_rate_hz_;
+    }
+
+    // --- Insert inband Dtmf tone
+    if (_dtmfGenerator.IsAddingTone())
+    {
+        InsertInbandDtmfTone();
+    }
+
+    // Scale left and/or right channel(s) if balance is active
+    if (_panLeft != 1.0 || _panRight != 1.0)
+    {
+        if (_audioFrame.num_channels_ == 1)
+        {
+            AudioFrameOperations::MonoToStereo(&_audioFrame);
+        }
+        else
+        {
+            // Pure stereo mode (we are receiving a stereo signal).
+        }
+
+        assert(_audioFrame.num_channels_ == 2);
+        AudioFrameOperations::Scale(_panLeft, _panRight, _audioFrame);
+    }
+
+    // --- Far-end Voice Quality Enhancement (AudioProcessing Module)
+
+    APMAnalyzeReverseStream();
+
+    // --- External media processing
+
+    if (_externalMedia)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        const bool isStereo = (_audioFrame.num_channels_ == 2);
+        if (_externalMediaCallbackPtr)
+        {
+            _externalMediaCallbackPtr->Process(
+                -1,
+                kPlaybackAllChannelsMixed,
+                (WebRtc_Word16*)_audioFrame.data_,
+                _audioFrame.samples_per_channel_,
+                _audioFrame.sample_rate_hz_,
+                isStereo);
+        }
+    }
+
+    // --- Measure audio level (0-9) for the combined signal
+    _audioLevel.ComputeLevel(_audioFrame);
+
+    return 0;
+}
+
+// ----------------------------------------------------------------------------
+//                             Private methods
+// ----------------------------------------------------------------------------
+
+void OutputMixer::APMAnalyzeReverseStream() {
+  // Convert from mixing to AudioProcessing sample rate, determined by the send
+  // side. Downmix to mono.
+  AudioFrame frame;
+  frame.num_channels_ = 1;
+  frame.sample_rate_hz_ = _audioProcessingModulePtr->sample_rate_hz();
+  if (RemixAndResample(_audioFrame, &_apmResampler, &frame) == -1)
+    return;
+
+  if (_audioProcessingModulePtr->AnalyzeReverseStream(&frame) == -1) {
+    WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId,-1),
+                 "AudioProcessingModule::AnalyzeReverseStream() => error");
+  }
+}
+
+int
+OutputMixer::InsertInbandDtmfTone()
+{
+    WebRtc_UWord16 sampleRate(0);
+    _dtmfGenerator.GetSampleRate(sampleRate);
+    if (sampleRate != _audioFrame.sample_rate_hz_)
+    {
+        // Update sample rate of Dtmf tone since the mixing frequency changed.
+        _dtmfGenerator.SetSampleRate(
+            (WebRtc_UWord16)(_audioFrame.sample_rate_hz_));
+        // Reset the tone to be added taking the new sample rate into account.
+        _dtmfGenerator.ResetTone();
+    }
+
+    WebRtc_Word16 toneBuffer[320];
+    WebRtc_UWord16 toneSamples(0);
+    if (_dtmfGenerator.Get10msTone(toneBuffer, toneSamples) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "OutputMixer::InsertInbandDtmfTone() inserting Dtmf"
+                     "tone failed");
+        return -1;
+    }
+
+    // replace mixed audio with Dtmf tone
+    if (_audioFrame.num_channels_ == 1)
+    {
+        // mono
+        memcpy(_audioFrame.data_, toneBuffer, sizeof(WebRtc_Word16)
+            * toneSamples);
+    } else
+    {
+        // stereo
+        for (int i = 0; i < _audioFrame.samples_per_channel_; i++)
+        {
+            _audioFrame.data_[2 * i] = toneBuffer[i];
+            _audioFrame.data_[2 * i + 1] = 0;
+        }
+    }
+    assert(_audioFrame.samples_per_channel_ == toneSamples);
+
+    return 0;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/output_mixer.h b/src/voice_engine/output_mixer.h
new file mode 100644
index 0000000..29ca858
--- /dev/null
+++ b/src/voice_engine/output_mixer.h
@@ -0,0 +1,161 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_
+#define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_H_
+
+#include "audio_conference_mixer.h"
+#include "audio_conference_mixer_defines.h"
+#include "common_types.h"
+#include "dtmf_inband.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "resampler.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc {
+
+class AudioProcessing;
+class CriticalSectionWrapper;
+class FileWrapper;
+class VoEMediaProcess;
+
+namespace voe {
+
+class Statistics;
+
+class OutputMixer : public AudioMixerOutputReceiver,
+                    public AudioMixerStatusReceiver,
+                    public FileCallback
+{
+public:
+    static WebRtc_Word32 Create(OutputMixer*& mixer,
+                                const WebRtc_UWord32 instanceId);
+
+    static void Destroy(OutputMixer*& mixer);
+
+    WebRtc_Word32 SetEngineInformation(Statistics& engineStatistics);
+
+    WebRtc_Word32 SetAudioProcessingModule(
+        AudioProcessing* audioProcessingModule);
+
+    // VoEExternalMedia
+    int RegisterExternalMediaProcessing(
+        VoEMediaProcess& proccess_object);
+
+    int DeRegisterExternalMediaProcessing();
+
+    // VoEDtmf
+    int PlayDtmfTone(WebRtc_UWord8 eventCode,
+                     int lengthMs,
+                     int attenuationDb);
+
+    int StartPlayingDtmfTone(WebRtc_UWord8 eventCode,
+                             int attenuationDb);
+
+    int StopPlayingDtmfTone();
+
+    WebRtc_Word32 MixActiveChannels();
+
+    WebRtc_Word32 DoOperationsOnCombinedSignal();
+
+    WebRtc_Word32 SetMixabilityStatus(MixerParticipant& participant,
+                                      const bool mixable);
+
+    WebRtc_Word32 SetAnonymousMixabilityStatus(MixerParticipant& participant,
+                                               const bool mixable);
+
+    int GetMixedAudio(int sample_rate_hz, int num_channels,
+                      AudioFrame* audioFrame);
+
+    // VoEVolumeControl
+    int GetSpeechOutputLevel(WebRtc_UWord32& level);
+
+    int GetSpeechOutputLevelFullRange(WebRtc_UWord32& level);
+
+    int SetOutputVolumePan(float left, float right);
+
+    int GetOutputVolumePan(float& left, float& right);
+
+    // VoEFile
+    int StartRecordingPlayout(const char* fileName,
+                              const CodecInst* codecInst);
+
+    int StartRecordingPlayout(OutStream* stream,
+                              const CodecInst* codecInst);
+    int StopRecordingPlayout();
+
+    virtual ~OutputMixer();
+
+    // from AudioMixerOutputReceiver
+    virtual void NewMixedAudio(
+        const WebRtc_Word32 id,
+        const AudioFrame& generalAudioFrame,
+        const AudioFrame** uniqueAudioFrames,
+        const WebRtc_UWord32 size);
+
+    // from AudioMixerStatusReceiver
+    virtual void MixedParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    virtual void VADPositiveParticipants(
+        const WebRtc_Word32 id,
+        const ParticipantStatistics* participantStatistics,
+        const WebRtc_UWord32 size);
+
+    virtual void MixedAudioLevel(const WebRtc_Word32  id,
+                                 const WebRtc_UWord32 level);
+
+    // For file recording
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+
+    void PlayFileEnded(const WebRtc_Word32 id);
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+private:
+    OutputMixer(const WebRtc_UWord32 instanceId);
+    void APMAnalyzeReverseStream();
+    int InsertInbandDtmfTone();
+
+    // uses
+    Statistics* _engineStatisticsPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+
+    // owns
+    CriticalSectionWrapper& _callbackCritSect;
+    // protect the _outputFileRecorderPtr and _outputFileRecording
+    CriticalSectionWrapper& _fileCritSect;
+    AudioConferenceMixer& _mixerModule;
+    AudioFrame _audioFrame;
+    Resampler _resampler;        // converts mixed audio to fit ADM format
+    Resampler _apmResampler;    // converts mixed audio to fit APM rate
+    AudioLevel _audioLevel;    // measures audio level for the combined signal
+    DtmfInband _dtmfGenerator;
+    int _instanceId;
+    VoEMediaProcess* _externalMediaCallbackPtr;
+    bool _externalMedia;
+    float _panLeft;
+    float _panRight;
+    int _mixingFrequencyHz;
+    FileRecorder* _outputFileRecorderPtr;
+    bool _outputFileRecording;
+};
+
+}  //  namespace voe
+
+}  //  namespace werbtc
+
+#endif  // VOICE_ENGINE_OUTPUT_MIXER_H_
diff --git a/src/voice_engine/output_mixer_internal.cc b/src/voice_engine/output_mixer_internal.cc
new file mode 100644
index 0000000..b78d8cd
--- /dev/null
+++ b/src/voice_engine/output_mixer_internal.cc
@@ -0,0 +1,73 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "output_mixer_internal.h"
+
+#include "audio_frame_operations.h"
+#include "common_audio/resampler/include/resampler.h"
+#include "module_common_types.h"
+#include "trace.h"
+
+namespace webrtc {
+namespace voe {
+
+int RemixAndResample(const AudioFrame& src_frame,
+                     Resampler* resampler,
+                     AudioFrame* dst_frame) {
+  const int16_t* audio_ptr = src_frame.data_;
+  int audio_ptr_num_channels = src_frame.num_channels_;
+  int16_t mono_audio[AudioFrame::kMaxDataSizeSamples];
+
+  // Downmix before resampling.
+  if (src_frame.num_channels_ == 2 && dst_frame->num_channels_ == 1) {
+    AudioFrameOperations::StereoToMono(src_frame.data_,
+                                       src_frame.samples_per_channel_,
+                                       mono_audio);
+    audio_ptr = mono_audio;
+    audio_ptr_num_channels = 1;
+  }
+
+  const ResamplerType resampler_type = audio_ptr_num_channels == 1 ?
+      kResamplerSynchronous : kResamplerSynchronousStereo;
+  if (resampler->ResetIfNeeded(src_frame.sample_rate_hz_,
+                               dst_frame->sample_rate_hz_,
+                               resampler_type) == -1) {
+    *dst_frame = src_frame;
+    WEBRTC_TRACE(kTraceError, kTraceVoice, -1,
+                "%s ResetIfNeeded failed", __FUNCTION__);
+    return -1;
+  }
+
+  int out_length = 0;
+  if (resampler->Push(audio_ptr,
+                      src_frame.samples_per_channel_* audio_ptr_num_channels,
+                      dst_frame->data_,
+                      AudioFrame::kMaxDataSizeSamples,
+                      out_length) == 0) {
+    dst_frame->samples_per_channel_ = out_length / audio_ptr_num_channels;
+  } else {
+    *dst_frame = src_frame;
+    WEBRTC_TRACE(kTraceError, kTraceVoice, -1,
+                 "%s resampling failed", __FUNCTION__);
+    return -1;
+  }
+
+  // Upmix after resampling.
+  if (src_frame.num_channels_ == 1 && dst_frame->num_channels_ == 2) {
+    // The audio in dst_frame really is mono at this point; MonoToStereo will
+    // set this back to stereo.
+    dst_frame->num_channels_ = 1;
+    AudioFrameOperations::MonoToStereo(dst_frame);
+  }
+  return 0;
+}
+
+}  // namespace voe
+}  // namespace webrtc
diff --git a/src/voice_engine/output_mixer_internal.h b/src/voice_engine/output_mixer_internal.h
new file mode 100644
index 0000000..8d23a14
--- /dev/null
+++ b/src/voice_engine/output_mixer_internal.h
@@ -0,0 +1,33 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
+#define WEBRTC_VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
+
+namespace webrtc {
+
+class AudioFrame;
+class Resampler;
+
+namespace voe {
+
+// Upmix or downmix and resample the audio in |src_frame| to |dst_frame|.
+// Expects |dst_frame| to have its |num_channels_| and |sample_rate_hz_| set to
+// the desired values. Updates |samples_per_channel_| accordingly.
+//
+// On failure, returns -1 and copies |src_frame| to |dst_frame|.
+int RemixAndResample(const AudioFrame& src_frame,
+                     Resampler* resampler,
+                     AudioFrame* dst_frame);
+
+}  // namespace voe
+}  // namespace webrtc
+
+#endif  // VOICE_ENGINE_OUTPUT_MIXER_INTERNAL_H_
diff --git a/src/voice_engine/output_mixer_unittest.cc b/src/voice_engine/output_mixer_unittest.cc
new file mode 100644
index 0000000..fe678a0
--- /dev/null
+++ b/src/voice_engine/output_mixer_unittest.cc
@@ -0,0 +1,214 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+
+#include "gtest/gtest.h"
+
+#include "output_mixer.h"
+#include "output_mixer_internal.h"
+
+namespace webrtc {
+namespace voe {
+namespace {
+
+class OutputMixerTest : public ::testing::Test {
+ protected:
+  OutputMixerTest() {
+    src_frame_.sample_rate_hz_ = 16000;
+    src_frame_.samples_per_channel_ = src_frame_.sample_rate_hz_ / 100;
+    src_frame_.num_channels_ = 1;
+    dst_frame_ = src_frame_;
+    golden_frame_ = src_frame_;
+  }
+
+  void RunResampleTest(int src_channels, int src_sample_rate_hz,
+                       int dst_channels, int dst_sample_rate_hz);
+
+  Resampler resampler_;
+  AudioFrame src_frame_;
+  AudioFrame dst_frame_;
+  AudioFrame golden_frame_;
+};
+
+// Sets the signal value to increase by |data| with every sample. Floats are
+// used so non-integer values result in rounding error, but not an accumulating
+// error.
+void SetMonoFrame(AudioFrame* frame, float data, int sample_rate_hz) {
+  frame->num_channels_ = 1;
+  frame->sample_rate_hz_ = sample_rate_hz;
+  frame->samples_per_channel_ = sample_rate_hz / 100;
+  for (int i = 0; i < frame->samples_per_channel_; i++) {
+    frame->data_[i] = data * i;
+  }
+}
+
+// Keep the existing sample rate.
+void SetMonoFrame(AudioFrame* frame, float data) {
+  SetMonoFrame(frame, data, frame->sample_rate_hz_);
+}
+
+// Sets the signal value to increase by |left| and |right| with every sample in
+// each channel respectively.
+void SetStereoFrame(AudioFrame* frame, float left, float right,
+                    int sample_rate_hz) {
+  frame->num_channels_ = 2;
+  frame->sample_rate_hz_ = sample_rate_hz;
+  frame->samples_per_channel_ = sample_rate_hz / 100;
+  for (int i = 0; i < frame->samples_per_channel_; i++) {
+    frame->data_[i * 2] = left * i;
+    frame->data_[i * 2 + 1] = right * i;
+  }
+}
+
+// Keep the existing sample rate.
+void SetStereoFrame(AudioFrame* frame, float left, float right) {
+  SetStereoFrame(frame, left, right, frame->sample_rate_hz_);
+}
+
+void VerifyParams(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
+  EXPECT_EQ(ref_frame.num_channels_, test_frame.num_channels_);
+  EXPECT_EQ(ref_frame.samples_per_channel_, test_frame.samples_per_channel_);
+  EXPECT_EQ(ref_frame.sample_rate_hz_, test_frame.sample_rate_hz_);
+}
+
+// Computes the best SNR based on the error between |ref_frame| and
+// |test_frame|. It allows for up to a 30 sample delay between the signals to
+// compensate for the resampling delay.
+float ComputeSNR(const AudioFrame& ref_frame, const AudioFrame& test_frame) {
+  VerifyParams(ref_frame, test_frame);
+  float best_snr = 0;
+  int best_delay = 0;
+  for (int delay = 0; delay < 30; delay++) {
+    float mse = 0;
+    float variance = 0;
+    for (int i = 0; i < ref_frame.samples_per_channel_ *
+        ref_frame.num_channels_ - delay; i++) {
+      int error = ref_frame.data_[i] - test_frame.data_[i + delay];
+      mse += error * error;
+      variance += ref_frame.data_[i] * ref_frame.data_[i];
+    }
+    float snr = 100;  // We assign 100 dB to the zero-error case.
+    if (mse > 0)
+      snr = 10 * log10(variance / mse);
+    if (snr > best_snr) {
+      best_snr = snr;
+      best_delay = delay;
+    }
+  }
+  printf("SNR=%.1f dB at delay=%d\n", best_snr, best_delay);
+  return best_snr;
+}
+
+void VerifyFramesAreEqual(const AudioFrame& ref_frame,
+                          const AudioFrame& test_frame) {
+  VerifyParams(ref_frame, test_frame);
+  for (int i = 0; i < ref_frame.samples_per_channel_ * ref_frame.num_channels_;
+      i++) {
+    EXPECT_EQ(ref_frame.data_[i], test_frame.data_[i]);
+  }
+}
+
+void OutputMixerTest::RunResampleTest(int src_channels,
+                                      int src_sample_rate_hz,
+                                      int dst_channels,
+                                      int dst_sample_rate_hz) {
+  Resampler resampler;  // Create a new one with every test.
+  const int16_t kSrcLeft = 60;  // Shouldn't overflow for any used sample rate.
+  const int16_t kSrcRight = 30;
+  const float kResamplingFactor = (1.0 * src_sample_rate_hz) /
+      dst_sample_rate_hz;
+  const float kDstLeft = kResamplingFactor * kSrcLeft;
+  const float kDstRight = kResamplingFactor * kSrcRight;
+  const float kDstMono = (kDstLeft + kDstRight) / 2;
+  if (src_channels == 1)
+    SetMonoFrame(&src_frame_, kSrcLeft, src_sample_rate_hz);
+  else
+    SetStereoFrame(&src_frame_, kSrcLeft, kSrcRight, src_sample_rate_hz);
+
+  if (dst_channels == 1) {
+    SetMonoFrame(&dst_frame_, 0, dst_sample_rate_hz);
+    if (src_channels == 1)
+      SetMonoFrame(&golden_frame_, kDstLeft, dst_sample_rate_hz);
+    else
+      SetMonoFrame(&golden_frame_, kDstMono, dst_sample_rate_hz);
+  } else {
+    SetStereoFrame(&dst_frame_, 0, 0, dst_sample_rate_hz);
+    if (src_channels == 1)
+      SetStereoFrame(&golden_frame_, kDstLeft, kDstLeft, dst_sample_rate_hz);
+    else
+      SetStereoFrame(&golden_frame_, kDstLeft, kDstRight, dst_sample_rate_hz);
+  }
+
+  printf("(%d, %d Hz) -> (%d, %d Hz) ",  // SNR reported on the same line later.
+      src_channels, src_sample_rate_hz, dst_channels, dst_sample_rate_hz);
+  EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler, &dst_frame_));
+  EXPECT_GT(ComputeSNR(golden_frame_, dst_frame_), 40.0f);
+}
+
+TEST_F(OutputMixerTest, RemixAndResampleFailsWithBadSampleRate) {
+  SetMonoFrame(&dst_frame_, 10, 44100);
+  EXPECT_EQ(-1, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
+  VerifyFramesAreEqual(src_frame_, dst_frame_);
+}
+
+TEST_F(OutputMixerTest, RemixAndResampleCopyFrameSucceeds) {
+  // Stereo -> stereo.
+  SetStereoFrame(&src_frame_, 10, 10);
+  SetStereoFrame(&dst_frame_, 0, 0);
+  EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
+  VerifyFramesAreEqual(src_frame_, dst_frame_);
+
+  // Mono -> mono.
+  SetMonoFrame(&src_frame_, 20);
+  SetMonoFrame(&dst_frame_, 0);
+  EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
+  VerifyFramesAreEqual(src_frame_, dst_frame_);
+}
+
+TEST_F(OutputMixerTest, RemixAndResampleMixingOnlySucceeds) {
+  // Stereo -> mono.
+  SetStereoFrame(&dst_frame_, 0, 0);
+  SetMonoFrame(&src_frame_, 10);
+  SetStereoFrame(&golden_frame_, 10, 10);
+  EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
+  VerifyFramesAreEqual(dst_frame_, golden_frame_);
+
+  // Mono -> stereo.
+  SetMonoFrame(&dst_frame_, 0);
+  SetStereoFrame(&src_frame_, 10, 20);
+  SetMonoFrame(&golden_frame_, 15);
+  EXPECT_EQ(0, RemixAndResample(src_frame_, &resampler_, &dst_frame_));
+  VerifyFramesAreEqual(golden_frame_, dst_frame_);
+}
+
+TEST_F(OutputMixerTest, RemixAndResampleSucceeds) {
+  // We don't attempt to be exhaustive here, but just get good coverage. Some
+  // combinations of rates will not be resampled, and some give an odd
+  // resampling factor which makes it more difficult to evaluate.
+  const int kSampleRates[] = {16000, 32000, 48000};
+  const int kSampleRatesSize = sizeof(kSampleRates) / sizeof(*kSampleRates);
+  const int kChannels[] = {1, 2};
+  const int kChannelsSize = sizeof(kChannels) / sizeof(*kChannels);
+  for (int src_rate = 0; src_rate < kSampleRatesSize; src_rate++) {
+    for (int dst_rate = 0; dst_rate < kSampleRatesSize; dst_rate++) {
+      for (int src_channel = 0; src_channel < kChannelsSize; src_channel++) {
+        for (int dst_channel = 0; dst_channel < kChannelsSize; dst_channel++) {
+          RunResampleTest(kChannels[src_channel], kSampleRates[src_rate],
+                          kChannels[dst_channel], kSampleRates[dst_rate]);
+        }
+      }
+    }
+  }
+}
+
+}  // namespace
+}  // namespace voe
+}  // namespace webrtc
diff --git a/src/voice_engine/shared_data.cc b/src/voice_engine/shared_data.cc
new file mode 100644
index 0000000..7bea1e0
--- /dev/null
+++ b/src/voice_engine/shared_data.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "shared_data.h"
+
+#include "audio_processing.h"
+#include "critical_section_wrapper.h"
+#include "channel.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+
+namespace webrtc {
+
+namespace voe {
+
+static WebRtc_Word32 _gInstanceCounter = 0;
+
+SharedData::SharedData() :
+    _instanceId(++_gInstanceCounter),
+    _apiCritPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _channelManager(_gInstanceCounter),
+    _engineStatistics(_gInstanceCounter),
+    _audioDevicePtr(NULL),
+    _audioProcessingModulePtr(NULL),
+    _moduleProcessThreadPtr(ProcessThread::CreateProcessThread()),
+    _externalRecording(false),
+    _externalPlayout(false)
+{
+    Trace::CreateTrace();
+    Trace::SetLevelFilter(WEBRTC_VOICE_ENGINE_DEFAULT_TRACE_FILTER);
+    if (OutputMixer::Create(_outputMixerPtr, _gInstanceCounter) == 0)
+    {
+        _outputMixerPtr->SetEngineInformation(_engineStatistics);
+    }
+    if (TransmitMixer::Create(_transmitMixerPtr, _gInstanceCounter) == 0)
+    {
+        _transmitMixerPtr->SetEngineInformation(*_moduleProcessThreadPtr,
+                                                _engineStatistics,
+                                                _channelManager);
+    }
+    _audioDeviceLayer = AudioDeviceModule::kPlatformDefaultAudio;
+}
+
+SharedData::~SharedData()
+{
+    OutputMixer::Destroy(_outputMixerPtr);
+    TransmitMixer::Destroy(_transmitMixerPtr);
+    if (_audioDevicePtr) {
+        _audioDevicePtr->Release();
+    }
+    AudioProcessing::Destroy(_audioProcessingModulePtr);
+    delete _apiCritPtr;
+    ProcessThread::DestroyProcessThread(_moduleProcessThreadPtr);
+    Trace::ReturnTrace();
+}
+
+void SharedData::set_audio_device(AudioDeviceModule* audio_device)
+{
+    // AddRef first in case the pointers are equal.
+    if (audio_device)
+      audio_device->AddRef();
+    if (_audioDevicePtr)
+      _audioDevicePtr->Release();
+    _audioDevicePtr = audio_device;
+}
+
+void SharedData::set_audio_processing(AudioProcessing* audio_processing) {
+    if (_audioProcessingModulePtr)
+      AudioProcessing::Destroy(_audioProcessingModulePtr);
+    _audioProcessingModulePtr = audio_processing;
+}
+
+WebRtc_UWord16 SharedData::NumOfSendingChannels()
+{
+    WebRtc_Word32 numOfChannels = _channelManager.NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+
+    WebRtc_UWord16 nChannelsSending(0);
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+
+    _channelManager.GetChannelIds(channelsArray, numOfChannels);
+    for (int i = 0; i < numOfChannels; i++)
+    {
+        voe::ScopedChannel sc(_channelManager, channelsArray[i]);
+        Channel* chPtr = sc.ChannelPtr();
+        if (chPtr)
+        {
+            if (chPtr->Sending())
+            {
+                nChannelsSending++;
+            }
+        }
+    }
+    delete [] channelsArray;
+    return nChannelsSending;
+}
+
+void SharedData::SetLastError(const WebRtc_Word32 error) const {
+  _engineStatistics.SetLastError(error);
+}
+
+void SharedData::SetLastError(const WebRtc_Word32 error,
+                              const TraceLevel level) const {
+  _engineStatistics.SetLastError(error, level);
+}
+
+void SharedData::SetLastError(const WebRtc_Word32 error, const TraceLevel level,
+                              const char* msg) const {
+  _engineStatistics.SetLastError(error, level, msg);
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/shared_data.h b/src/voice_engine/shared_data.h
new file mode 100644
index 0000000..191e369
--- /dev/null
+++ b/src/voice_engine/shared_data.h
@@ -0,0 +1,90 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_SHARED_DATA_H
+#define WEBRTC_VOICE_ENGINE_SHARED_DATA_H
+
+#include "voice_engine_defines.h"
+
+#include "channel_manager.h"
+#include "statistics.h"
+#include "process_thread.h"
+
+#include "audio_device.h"
+#include "audio_processing.h"
+
+class ProcessThread;
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class TransmitMixer;
+class OutputMixer;
+
+class SharedData
+{
+public:
+    // Public accessors.
+    WebRtc_UWord32 instance_id() const { return _instanceId; }
+    Statistics& statistics() { return _engineStatistics; }
+    ChannelManager& channel_manager() { return _channelManager; }
+    AudioDeviceModule* audio_device() { return _audioDevicePtr; }
+    void set_audio_device(AudioDeviceModule* audio_device);
+    AudioProcessing* audio_processing() { return _audioProcessingModulePtr; }
+    void set_audio_processing(AudioProcessing* audio_processing);
+    TransmitMixer* transmit_mixer() { return _transmitMixerPtr; }
+    OutputMixer* output_mixer() { return _outputMixerPtr; }
+    CriticalSectionWrapper* crit_sec() { return _apiCritPtr; }
+    bool ext_recording() const { return _externalRecording; }
+    void set_ext_recording(bool value) { _externalRecording = value; }
+    bool ext_playout() const { return _externalPlayout; }
+    void set_ext_playout(bool value) { _externalPlayout = value; }
+    ProcessThread* process_thread() { return _moduleProcessThreadPtr; }
+    AudioDeviceModule::AudioLayer audio_device_layer() const {
+      return _audioDeviceLayer;
+    }
+    void set_audio_device_layer(AudioDeviceModule::AudioLayer layer) {
+      _audioDeviceLayer = layer;
+    }
+
+    WebRtc_UWord16 NumOfSendingChannels();
+
+    // Convenience methods for calling statistics().SetLastError().
+    void SetLastError(const WebRtc_Word32 error) const;
+    void SetLastError(const WebRtc_Word32 error, const TraceLevel level) const;
+    void SetLastError(const WebRtc_Word32 error, const TraceLevel level,
+                      const char* msg) const;
+
+protected:
+    const WebRtc_UWord32 _instanceId;
+    CriticalSectionWrapper* _apiCritPtr;
+    ChannelManager _channelManager;
+    Statistics _engineStatistics;
+    AudioDeviceModule* _audioDevicePtr;
+    OutputMixer* _outputMixerPtr;
+    TransmitMixer* _transmitMixerPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+    ProcessThread* _moduleProcessThreadPtr;
+
+    bool _externalRecording;
+    bool _externalPlayout;
+
+    AudioDeviceModule::AudioLayer _audioDeviceLayer;
+
+    SharedData();
+    virtual ~SharedData();
+};
+
+} //  namespace voe
+
+} //  namespace webrtc
+#endif // WEBRTC_VOICE_ENGINE_SHARED_DATA_H
diff --git a/src/voice_engine/statistics.cc b/src/voice_engine/statistics.cc
new file mode 100644
index 0000000..4f1bc79
--- /dev/null
+++ b/src/voice_engine/statistics.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cassert>
+#include <stdio.h>
+
+#include "statistics.h"
+
+#include "trace.h"
+#include "critical_section_wrapper.h"
+
+namespace webrtc {
+
+namespace voe {
+
+Statistics::Statistics(const WebRtc_UWord32 instanceId) :
+    _critPtr(CriticalSectionWrapper::CreateCriticalSection()),
+    _instanceId(instanceId),
+    _lastError(0),
+    _isInitialized(false)
+{
+}
+	
+Statistics::~Statistics()
+{
+    if (_critPtr)
+    {
+        delete _critPtr;
+        _critPtr = NULL;
+    }
+}
+
+WebRtc_Word32 Statistics::SetInitialized()
+{
+    _isInitialized = true;
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetUnInitialized()
+{
+    _isInitialized = false;
+    return 0;
+}
+
+bool Statistics::Initialized() const
+{
+    return _isInitialized;
+}
+
+WebRtc_Word32 Statistics::SetLastError(const WebRtc_Word32 error) const
+{
+    CriticalSectionScoped cs(_critPtr);
+    _lastError = error;
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetLastError(const WebRtc_Word32 error,
+                                       const TraceLevel level) const
+{
+    CriticalSectionScoped cs(_critPtr);
+    _lastError = error;
+    WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1),
+                 "error code is set to %d",
+                 _lastError);
+    return 0;
+}
+
+WebRtc_Word32 Statistics::SetLastError(
+    const WebRtc_Word32 error,
+    const TraceLevel level, const char* msg) const
+{
+    CriticalSectionScoped cs(_critPtr);
+    char traceMessage[KTraceMaxMessageSize];
+    assert(strlen(msg) < KTraceMaxMessageSize);
+    _lastError = error;
+    sprintf(traceMessage, "%s (error=%d)", msg, error);
+    WEBRTC_TRACE(level, kTraceVoice, VoEId(_instanceId,-1), "%s",
+                 traceMessage);
+    return 0;
+}
+
+WebRtc_Word32 Statistics::LastError() const
+{
+    CriticalSectionScoped cs(_critPtr);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId,-1),
+               "LastError() => %d", _lastError);
+    return _lastError;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/statistics.h b/src/voice_engine/statistics.h
new file mode 100644
index 0000000..fc0bf8c
--- /dev/null
+++ b/src/voice_engine/statistics.h
@@ -0,0 +1,54 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_STATISTICS_H
+#define WEBRTC_VOICE_ENGINE_STATISTICS_H
+
+#include "common_types.h"
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+#include "voe_errors.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+
+namespace voe {
+
+class Statistics
+{
+ public:
+    enum {KTraceMaxMessageSize = 256};
+ public:
+    Statistics(const WebRtc_UWord32 instanceId);
+    ~Statistics();
+
+    WebRtc_Word32 SetInitialized();
+    WebRtc_Word32 SetUnInitialized();
+    bool Initialized() const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error) const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error,
+                               const TraceLevel level) const;
+    WebRtc_Word32 SetLastError(const WebRtc_Word32 error,
+                               const TraceLevel level,
+                               const char* msg) const;
+    WebRtc_Word32 LastError() const;
+
+ private:
+    CriticalSectionWrapper* _critPtr;
+    const WebRtc_UWord32 _instanceId;
+    mutable WebRtc_Word32 _lastError;
+    bool _isInitialized;
+};
+
+}  // namespace voe
+
+}  //  namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_STATISTICS_H
diff --git a/src/voice_engine/test/android/android_test/.classpath b/src/voice_engine/test/android/android_test/.classpath
new file mode 100644
index 0000000..86a15c9
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<classpath>

+	<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>

+	<classpathentry kind="src" path="src"/>

+	<classpathentry kind="src" path="gen"/>

+	<classpathentry kind="output" path="bin"/>

+</classpath>

diff --git a/src/voice_engine/test/android/android_test/.project b/src/voice_engine/test/android/android_test/.project
new file mode 100644
index 0000000..990e2f5
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/.project
@@ -0,0 +1,33 @@
+<?xml version="1.0" encoding="UTF-8"?>

+<projectDescription>

+	<name>AndroidTest</name>

+	<comment></comment>

+	<projects>

+	</projects>

+	<buildSpec>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>org.eclipse.jdt.core.javabuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+		<buildCommand>

+			<name>com.android.ide.eclipse.adt.ApkBuilder</name>

+			<arguments>

+			</arguments>

+		</buildCommand>

+	</buildSpec>

+	<natures>

+		<nature>com.android.ide.eclipse.adt.AndroidNature</nature>

+		<nature>org.eclipse.jdt.core.javanature</nature>

+	</natures>

+</projectDescription>

diff --git a/src/voice_engine/test/android/android_test/AndroidManifest.xml b/src/voice_engine/test/android/android_test/AndroidManifest.xml
new file mode 100644
index 0000000..a614f8d
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/AndroidManifest.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+	  android:versionCode="1"
+	  android:versionName="1.0" package="org.webrtc.voiceengine.test">
+  <application android:icon="@drawable/icon"
+	       android:label="@string/app_name"
+	       android:debuggable="true">
+    <activity android:name=".AndroidTest"
+              android:label="@string/app_name"
+              android:screenOrientation="portrait">
+      <intent-filter>
+        <action android:name="android.intent.action.MAIN" />
+        <category android:name="android.intent.category.LAUNCHER" />
+      </intent-filter>
+    </activity>
+
+  </application>
+  <uses-sdk android:minSdkVersion="3" />
+
+  <uses-permission android:name="android.permission.MODIFY_AUDIO_SETTINGS" />
+  <uses-permission android:name="android.permission.RECORD_AUDIO" />
+  <uses-permission android:name="android.permission.INTERNET" />
+
+</manifest> 
diff --git a/src/voice_engine/test/android/android_test/default.properties b/src/voice_engine/test/android/android_test/default.properties
new file mode 100644
index 0000000..6ed608e
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/default.properties
@@ -0,0 +1,11 @@
+# This file is automatically generated by Android Tools.

+# Do not modify this file -- YOUR CHANGES WILL BE ERASED!

+# 

+# This file must be checked in Version Control Systems.

+# 

+# To customize properties used by the Ant build system use,

+# "build.properties", and override values to adapt the script to your

+# project structure.

+

+# Project target, OpenSL ES requires API level 9 

+target=android-9

diff --git a/src/voice_engine/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java b/src/voice_engine/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java
new file mode 100644
index 0000000..ec8f5b4
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/gen/org/webrtc/voiceengine/test/R.java
@@ -0,0 +1,30 @@
+/* AUTO-GENERATED FILE.  DO NOT MODIFY.

+ *

+ * This class was automatically generated by the

+ * aapt tool from the resource data it found.  It

+ * should not be modified by hand.

+ */

+

+package org.webrtc.voiceengine.test;

+

+public final class R {

+    public static final class attr {

+    }

+    public static final class drawable {

+        public static final int icon=0x7f020000;

+    }

+    public static final class id {

+        public static final int Button01=0x7f050002;

+        public static final int Button02=0x7f050005;

+        public static final int EditText01=0x7f050001;

+        public static final int Spinner01=0x7f050003;

+        public static final int Spinner02=0x7f050004;

+        public static final int TextView01=0x7f050000;

+    }

+    public static final class layout {

+        public static final int main=0x7f030000;

+    }

+    public static final class string {

+        public static final int app_name=0x7f040000;

+    }

+}

diff --git a/src/voice_engine/test/android/android_test/jni/Application.mk b/src/voice_engine/test/android/android_test/jni/Application.mk
new file mode 100644
index 0000000..03c35ac
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/jni/Application.mk
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Build both ARMv5TE and ARMv7-A machine code.
+APP_ABI := armeabi armeabi-v7a x86  
+APP_STL := stlport_shared
diff --git a/src/voice_engine/test/android/android_test/jni/android_test.cc b/src/voice_engine/test/android/android_test/jni/android_test.cc
new file mode 100644
index 0000000..8c5fdff
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/jni/android_test.cc
@@ -0,0 +1,1554 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <android/log.h>
+
+#include "org_webrtc_voiceengine_test_AndroidTest.h"
+
+#include "thread_wrapper.h"
+
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_file.h"
+#include "voe_network.h"
+#include "voe_audio_processing.h"
+#include "voe_volume_control.h"
+#include "voe_hardware.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_encryption.h"
+
+#include "voe_test_interface.h"
+
+//#define USE_SRTP
+//#define INIT_FROM_THREAD
+//#define START_CALL_FROM_THREAD
+
+#define WEBRTC_LOG_TAG "*WEBRTCN*" // As in WEBRTC Native...
+#define VALIDATE_BASE_POINTER \
+    if (!veData1.base) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Base pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_CODEC_POINTER \
+    if (!veData1.codec) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Codec pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_FILE_POINTER \
+    if (!veData1.file) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "File pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_NETWORK_POINTER \
+    if (!veData1.netw) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Network pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_APM_POINTER \
+    if (!veData1.codec) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Apm pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_VOLUME_POINTER \
+    if (!veData1.volume) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Volume pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_HARDWARE_POINTER \
+    if (!veData1.hardware) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Hardware pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_RTP_RTCP_POINTER \
+    if (!veData1.rtp_rtcp) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "RTP / RTCP pointer doesn't exist"); \
+        return -1; \
+    }
+#define VALIDATE_ENCRYPT_POINTER \
+    if (!veData1.encrypt) \
+    { \
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, \
+                            "Encrypt pointer doesn't exist"); \
+        return -1; \
+    }
+
+// Register functions in JNI_OnLoad()
+// How do we ensure that VoE is deleted? JNI_OnUnload?
+// What happens if class is unloaded? When loaded again, NativeInit will be
+// called again. Keep what we have?
+// Should we do something in JNI_OnUnload?
+// General design: create a class or keep global struct with "C" functions?
+// Otherwise make sure symbols are as unique as possible.
+
+// TestType enumerator
+enum TestType
+{
+  Invalid = -1,
+  Standard = 0,
+  Extended = 1,
+  Stress   = 2,
+  Unit     = 3,
+  CPU      = 4
+};
+
+// ExtendedSelection enumerator
+enum ExtendedSelection
+{
+   XSEL_Invalid = -1,
+   XSEL_None = 0,
+   XSEL_All,
+   XSEL_Base,
+   XSEL_CallReport,
+   XSEL_Codec,
+   XSEL_DTMF,
+   XSEL_Encryption,
+   XSEL_ExternalMedia,
+   XSEL_File,
+   XSEL_Hardware,
+   XSEL_NetEqStats,
+   XSEL_Network,
+   XSEL_PTT,
+   XSEL_RTP_RTCP,
+   XSEL_VideoSync,
+   XSEL_VideoSyncExtended,
+   XSEL_VolumeControl,
+   XSEL_VQE,
+   XSEL_APM,
+   XSEL_VQMon
+};
+
+using namespace webrtc;
+
+class my_transportation;
+
+// VoiceEngine data struct
+typedef struct
+{
+    // VoiceEngine
+    VoiceEngine* ve;
+    // Sub-APIs
+    VoEBase* base;
+    VoECodec* codec;
+    VoEFile* file;
+    VoENetwork* netw;
+    VoEAudioProcessing* apm;
+    VoEVolumeControl* volume;
+    VoEHardware* hardware;
+    VoERTP_RTCP* rtp_rtcp;
+    VoEEncryption* encrypt;
+    // Other
+    my_transportation* extTrans;
+    JavaVM* jvm;
+} VoiceEngineData;
+
+// my_transportation is used when useExtTrans is enabled
+class my_transportation : public Transport
+{
+ public:
+  my_transportation(VoENetwork * network) :
+      netw(network) {
+  }
+
+  int SendPacket(int channel,const void *data,int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+ private:
+  VoENetwork * netw;
+};
+
+int my_transportation::SendPacket(int channel,const void *data,int len)
+{
+  netw->ReceivedRTPPacket(channel, data, len);
+  return len;
+}
+
+int my_transportation::SendRTCPPacket(int channel, const void *data, int len)
+{
+  netw->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+//Global variables visible in this file
+static VoiceEngineData veData1;
+static VoiceEngineData veData2;
+
+// "Local" functions (i.e. not Java accessible)
+static bool GetSubApis(VoiceEngineData &veData);
+static bool ReleaseSubApis(VoiceEngineData &veData);
+
+class ThreadTest
+{
+public:
+    ThreadTest();
+    ~ThreadTest();
+    int RunTest();
+    int CloseTest();
+private:
+    static bool Run(void* ptr);
+    bool Process();
+private:
+    ThreadWrapper* _thread;
+};
+
+ThreadTest::~ThreadTest()
+{
+    if (_thread)
+    {
+        _thread->SetNotAlive();
+        if (_thread->Stop())
+        {
+            delete _thread;
+            _thread = NULL;
+        }
+    }
+}
+
+ThreadTest::ThreadTest() :
+    _thread(NULL)
+{
+    _thread = ThreadWrapper::CreateThread(Run, this, kNormalPriority,
+                                          "ThreadTest thread");
+}
+
+bool ThreadTest::Run(void* ptr)
+{
+    return static_cast<ThreadTest*> (ptr)->Process();
+}
+
+bool ThreadTest::Process()
+{
+    // Attach this thread to JVM
+    /*JNIEnv* env = NULL;
+     jint res = veData1.jvm->AttachCurrentThread(&env, NULL);
+     char msg[32];
+     sprintf(msg, "res=%d, env=%d", res, env);
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG, msg);*/
+
+#ifdef INIT_FROM_THREAD
+    VALIDATE_BASE_POINTER;
+    veData1.base->Init();
+#endif
+
+#ifdef START_CALL_FROM_THREAD
+    // receiving instance
+    veData2.ve = VoiceEngine::Create();
+    GetSubApis(veData2);
+    veData2.base->Init();
+    veData2.base->CreateChannel();
+    if(veData2.base->SetLocalReceiver(0, 1234) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set local receiver 2 failed");
+    }
+    veData2.hardware->SetLoudspeakerStatus(false);
+    veData2.volume->SetSpeakerVolume(204);
+    veData2.base->StartReceive(0);
+    if(veData2.base->StartPlayout(0) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "start playout failed");
+    }
+
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+            "receiving instance started from thread");
+
+    // sending instance
+    veData1.ve = VoiceEngine::Create();
+    GetSubApis(veData1);
+    veData1.base->Init();
+    if(veData1.base->CreateChannel() < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "create channel failed");
+    }
+    if(veData1.base->SetLocalReceiver(0, 1256) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set local receiver failed");
+    }
+    if(veData1.base->SetSendDestination(0, 1234, "127.0.0.1") < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "set send destination failed");
+    }
+    if(veData1.base->StartSend(0) < 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "start send failed");
+    }
+
+    __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+            "sending instance started from thread");
+#endif
+
+    _thread->SetNotAlive();
+    _thread->Stop();
+
+    //res = veData1.jvm->DetachCurrentThread();
+
+    return true;
+}
+
+int ThreadTest::RunTest()
+{
+    if (_thread)
+    {
+        unsigned int id;
+        _thread->Start(id);
+    }
+    return 0;
+}
+
+int ThreadTest::CloseTest()
+{
+    VALIDATE_BASE_POINTER
+
+    veData1.base->DeleteChannel(0);
+    veData2.base->DeleteChannel(0);
+    veData1.base->Terminate();
+    veData2.base->Terminate();
+
+    // Release sub-APIs
+    ReleaseSubApis(veData1);
+    ReleaseSubApis(veData2);
+
+    // Delete
+    VoiceEngine::Delete(veData1.ve);
+    VoiceEngine::Delete(veData2.ve);
+    veData2.ve = NULL;
+    veData2.ve = NULL;
+
+    return 0;
+}
+
+ThreadTest threadTest;
+
+//////////////////////////////////////////////////////////////////
+// General functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// JNI_OnLoad
+//
+jint JNI_OnLoad(JavaVM* vm, void* /*reserved*/)
+{
+    if (!vm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "JNI_OnLoad did not receive a valid VM pointer");
+        return -1;
+    }
+
+    // Get JNI
+    JNIEnv* env;
+    if (JNI_OK != vm->GetEnv(reinterpret_cast<void**> (&env),
+                             JNI_VERSION_1_4))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "JNI_OnLoad could not get JNI env");
+        return -1;
+    }
+
+    // Get class to register the native functions with
+    // jclass regClass = env->FindClass("webrtc/android/AndroidTest");
+    // if (!regClass) {
+    // return -1; // Exception thrown
+    // }
+
+    // Register native functions
+    // JNINativeMethod methods[1];
+    // methods[0].name = NULL;
+    // methods[0].signature = NULL;
+    // methods[0].fnPtr = NULL;
+    // if (JNI_OK != env->RegisterNatives(regClass, methods, 1))
+    // {
+    // return -1;
+    // }
+
+    // Init VoiceEngine data
+    memset(&veData1, 0, sizeof(veData1));
+    memset(&veData2, 0, sizeof(veData2));
+
+    // Store the JVM
+    veData1.jvm = vm;
+    veData2.jvm = vm;
+
+    return JNI_VERSION_1_4;
+}
+
+/////////////////////////////////////////////
+// Native initialization
+//
+JNIEXPORT jboolean JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_NativeInit(
+        JNIEnv * env,
+        jclass)
+{
+    // Look up and cache any interesting class, field and method IDs for
+    // any used java class here
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// Run auto standard test
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_RunAutoTest(
+        JNIEnv *env,
+        jobject context,
+        jint testType,
+        jint extendedSel)
+{
+    TestType tType(Invalid);
+
+    switch (testType)
+    {
+        case 0:
+            return 0;
+        case 1:
+            tType = Standard;
+            break;
+        case 2:
+            tType = Extended;
+            break;
+        case 3:
+            tType = Stress;
+            break;
+        case 4:
+            tType = Unit;
+            break;
+        default:
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "RunAutoTest - Invalid TestType");
+            return -1;
+    }
+
+    ExtendedSelection xsel(XSEL_Invalid);
+
+    switch (extendedSel)
+    {
+        case 0:
+            xsel = XSEL_None;
+            break;
+        case 1:
+            xsel = XSEL_All;
+            break;
+        case 2:
+            xsel = XSEL_Base;
+            break;
+        case 3:
+            xsel = XSEL_CallReport;
+            break;
+        case 4:
+            xsel = XSEL_Codec;
+            break;
+        case 5:
+            xsel = XSEL_DTMF;
+            break;
+        case 6:
+            xsel = XSEL_Encryption;
+            break;
+        case 7:
+            xsel = XSEL_ExternalMedia;
+            break;
+        case 8:
+            xsel = XSEL_File;
+            break;
+        case 9:
+            xsel = XSEL_Hardware;
+            break;
+        case 10:
+            xsel = XSEL_NetEqStats;
+            break;
+        case 11:
+            xsel = XSEL_Network;
+            break;
+        case 12:
+            xsel = XSEL_PTT;
+            break;
+        case 13:
+            xsel = XSEL_RTP_RTCP;
+            break;
+        case 14:
+            xsel = XSEL_VideoSync;
+            break;
+        case 15:
+            xsel = XSEL_VideoSyncExtended;
+            break;
+        case 16:
+            xsel = XSEL_VolumeControl;
+            break;
+        case 17:
+            xsel = XSEL_APM;
+            break;
+        case 18:
+            xsel = XSEL_VQMon;
+            break;
+        default:
+            xsel = XSEL_Invalid;
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "RunAutoTest - Invalid extendedType");
+            return -1;
+    }
+
+    // Set instance independent Java objects
+    VoiceEngine::SetAndroidObjects(veData1.jvm, env, context);
+
+    // Call voe test interface function
+    // TODO(leozwang) add autotest setAndroidObjects(veData1.jvm, context);
+    // jint retVal = runAutoTest(tType, xsel);
+
+    // Clear instance independent Java objects
+    VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+    return 0;
+}
+
+//////////////////////////////////////////////////////////////////
+// VoiceEngine API wrapper functions
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// Create VoiceEngine instance
+//
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Create(
+        JNIEnv *env,
+        jobject context)
+{
+    // Check if already created
+    if (veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "VoE already created");
+        return false;
+    }
+
+    // Set instance independent Java objects
+    VoiceEngine::SetAndroidObjects(veData1.jvm, env, context);
+
+#ifdef START_CALL_FROM_THREAD
+    threadTest.RunTest();
+#else
+    // Create
+    veData1.ve = VoiceEngine::Create();
+    if (!veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Create VoE failed");
+        return false;
+    }
+
+    // Get sub-APIs
+    if (!GetSubApis(veData1))
+    {
+        // If not OK, release all sub-APIs and delete VoE
+        ReleaseSubApis(veData1);
+        if (!VoiceEngine::Delete(veData1.ve))
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Delete VoE failed");
+        }
+        return false;
+    }
+#endif
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// Delete VoiceEngine instance
+//
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Delete(
+        JNIEnv *,
+        jobject)
+{
+#ifdef START_CALL_FROM_THREAD
+    threadTest.CloseTest();
+#else
+    // Check if exists
+    if (!veData1.ve)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "VoE does not exist");
+        return false;
+    }
+
+    // Release sub-APIs
+    ReleaseSubApis(veData1);
+
+    // Delete
+    if (!VoiceEngine::Delete(veData1.ve))
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Delete VoE failed");
+        return false;
+    }
+
+    veData1.ve = NULL;
+#endif
+
+    // Clear instance independent Java objects
+    VoiceEngine::SetAndroidObjects(NULL, NULL, NULL);
+
+    return true;
+}
+
+/////////////////////////////////////////////
+// [Base] Initialize VoiceEngine
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Init(
+        JNIEnv *,
+        jobject,
+        jboolean enableTrace,
+        jboolean useExtTrans)
+{
+    VALIDATE_BASE_POINTER;
+
+    if (enableTrace)
+    {
+        if (0 != VoiceEngine::SetTraceFile("/sdcard/trace.txt"))
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Could not enable trace");
+        }
+        if (0 != VoiceEngine::SetTraceFilter(kTraceAll))
+        {
+            __android_log_write(ANDROID_LOG_WARN, WEBRTC_LOG_TAG,
+                                "Could not set trace filter");
+        }
+    }
+
+    if (useExtTrans)
+    {
+        VALIDATE_NETWORK_POINTER;
+        veData1.extTrans = new my_transportation(veData1.netw);
+    }
+
+    int retVal = 0;
+#ifdef INIT_FROM_THREAD
+    threadTest.RunTest();
+    usleep(200000);
+#else
+    retVal = veData1.base->Init();
+#endif
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Terminate VoiceEngine
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Terminate(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_BASE_POINTER;
+
+    jint retVal = veData1.base->Terminate();
+
+    delete veData1.extTrans;
+    veData1.extTrans = NULL;
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Create channel
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_CreateChannel(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_BASE_POINTER;
+    jint channel = veData1.base->CreateChannel();
+
+    if (veData1.extTrans)
+    {
+        VALIDATE_NETWORK_POINTER;
+        __android_log_print(ANDROID_LOG_DEBUG, WEBRTC_LOG_TAG,
+                            "Enabling external transport on channel %d",
+                            channel);
+        if (veData1.netw->RegisterExternalTransport(channel, *veData1.extTrans)
+                < 0)
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Could not set external transport");
+            return -1;
+        }
+    }
+
+    return channel;
+}
+
+/////////////////////////////////////////////
+// [Base] Delete channel
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_DeleteChannel(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->DeleteChannel(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] SetLocalReceiver
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetLocalReceiver(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jint port)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->SetLocalReceiver(channel, port);
+}
+
+/////////////////////////////////////////////
+// [Base] SetSendDestination
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSendDestination(
+        JNIEnv *env,
+        jobject,
+        jint channel,
+        jint port,
+        jstring ipaddr)
+{
+    VALIDATE_BASE_POINTER;
+
+    const char* ipaddrNative = env->GetStringUTFChars(ipaddr, NULL);
+    if (!ipaddrNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.base->SetSendDestination(channel, port, ipaddrNative);
+
+    env->ReleaseStringUTFChars(ipaddr, ipaddrNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] StartListen
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartListen(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    bool useForRTCP = false;
+    if (veData1.encrypt->EnableSRTPReceive(
+                    channel,CIPHER_AES_128_COUNTER_MODE,30,AUTH_HMAC_SHA1,
+                    16,4, ENCRYPTION_AND_AUTHENTICATION,
+                    (unsigned char*)nikkey, useForRTCP) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to enable SRTP receive");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartReceive(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Start playout
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartPlayout(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Start send
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartSend(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    /*    int dscp(0), serviceType(-1), overrideDscp(0), res(0);
+     bool gqosEnabled(false), useSetSockOpt(false);
+
+     if (veData1.netw->SetSendTOS(channel, 13, useSetSockOpt) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to set TOS");
+     return -1;
+     }
+
+     res = veData1.netw->GetSendTOS(channel, dscp, useSetSockOpt);
+     if (res != 0 || dscp != 13 || useSetSockOpt != true)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to get TOS");
+     return -1;
+     } */
+
+    /* if (veData1.rtp_rtcp->SetFECStatus(channel, 1) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to enable FEC");
+     return -1;
+     } */
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    bool useForRTCP = false;
+    if (veData1.encrypt->EnableSRTPSend(
+                    channel,CIPHER_AES_128_COUNTER_MODE,30,AUTH_HMAC_SHA1,
+                    16,4, ENCRYPTION_AND_AUTHENTICATION,
+                    (unsigned char*)nikkey, useForRTCP) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to enable SRTP send");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    int retVal = veData1.base->StartSend(channel);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [Base] Stop listen
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopListen(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    if (veData1.encrypt->DisableSRTPReceive(channel) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to disable SRTP receive");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopReceive(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] Stop playout
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopPlayout(channel);
+}
+
+/////////////////////////////////////////////
+// [Base] Stop send
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopSend(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    /* if (veData1.rtp_rtcp->SetFECStatus(channel, 0) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to disable FEC");
+     return -1;
+     } */
+
+#ifdef USE_SRTP
+    VALIDATE_ENCRYPT_POINTER;
+    if (veData1.encrypt->DisableSRTPSend(channel) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                "Failed to disable SRTP send");
+        return -1;
+    }
+#endif
+
+    VALIDATE_BASE_POINTER;
+    return veData1.base->StopSend(channel);
+}
+
+/////////////////////////////////////////////
+// [codec] Number of codecs
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NumOfCodecs(
+        JNIEnv *,
+        jobject)
+{
+    VALIDATE_CODEC_POINTER;
+    return veData1.codec->NumOfCodecs();
+}
+
+/////////////////////////////////////////////
+// [codec] Set send codec
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSendCodec(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jint index)
+{
+    VALIDATE_CODEC_POINTER;
+
+    CodecInst codec;
+
+    if (veData1.codec->GetCodec(index, codec) != 0)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Failed to get codec");
+        return -1;
+    }
+
+    return veData1.codec->SetSendCodec(channel, codec);
+}
+
+/////////////////////////////////////////////
+// [codec] Set VAD status
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetVADStatus(
+        JNIEnv *,
+        jobject,
+        jint channel,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_CODEC_POINTER;
+
+    VadModes VADmode = kVadConventional;
+
+    switch (mode)
+    {
+        case 0:
+            break; // already set
+        case 1:
+            VADmode = kVadAggressiveLow;
+            break;
+        case 2:
+            VADmode = kVadAggressiveMid;
+            break;
+        case 3:
+            VADmode = kVadAggressiveHigh;
+            break;
+        default:
+            VADmode = (VadModes) 17; // force error
+            break;
+    }
+
+    return veData1.codec->SetVADStatus(channel, enable, VADmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetNSStatus
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetNSStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    NsModes NSmode = kNsDefault;
+
+    switch (mode)
+    {
+        case 0:
+            NSmode = kNsUnchanged;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            NSmode = kNsConference;
+            break;
+        case 3:
+            NSmode = kNsLowSuppression;
+            break;
+        case 4:
+            NSmode = kNsModerateSuppression;
+            break;
+        case 5:
+            NSmode = kNsHighSuppression;
+            break;
+        case 6:
+            NSmode = kNsVeryHighSuppression;
+            break;
+        default:
+            NSmode = (NsModes) 17; // force error
+            break;
+    }
+
+    return veData1.apm->SetNsStatus(enable, NSmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetAGCStatus
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetAGCStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    AgcModes AGCmode = kAgcDefault;
+
+    switch (mode)
+    {
+        case 0:
+            AGCmode = kAgcUnchanged;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            AGCmode = kAgcAdaptiveAnalog;
+            break;
+        case 3:
+            AGCmode = kAgcAdaptiveDigital;
+            break;
+        case 4:
+            AGCmode = kAgcFixedDigital;
+            break;
+        default:
+            AGCmode = (AgcModes) 17; // force error
+            break;
+    }
+
+    /* AgcConfig agcConfig;
+     agcConfig.targetLeveldBOv = 3;
+     agcConfig.digitalCompressionGaindB = 50;
+     agcConfig.limiterEnable = 0;
+
+     if (veData1.apm->SetAGCConfig(agcConfig) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Failed to set AGC config");
+     return -1;
+     } */
+
+    return veData1.apm->SetAgcStatus(enable, AGCmode);
+}
+
+/////////////////////////////////////////////
+// [apm] SetECStatus
+//
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetECStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable,
+        jint mode)
+{
+    VALIDATE_APM_POINTER;
+
+    EcModes ECmode = kEcDefault;
+
+    switch (mode)
+    {
+        case 0:
+            ECmode = kEcDefault;
+            break;
+        case 1:
+            break; // already set
+        case 2:
+            ECmode = kEcConference;
+            break;
+        case 3:
+            ECmode = kEcAec;
+            break;
+        case 4:
+            ECmode = kEcAecm;
+            break;
+        default:
+            ECmode = (EcModes) 17; // force error
+            break;
+    }
+
+    return veData1.apm->SetEcStatus(enable, ECmode);
+}
+
+/////////////////////////////////////////////
+// [File] Start play file locally
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileLocally(
+        JNIEnv * env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean loop)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartPlayingFileLocally(channel,
+                                                        fileNameNative, loop);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop play file locally
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileLocally(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopPlayingFileLocally(channel);
+}
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartRecordingPlayout
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartRecordingPlayout(
+        JNIEnv * env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartRecordingPlayout(channel, fileNameNative,
+                                                      0);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop Recording Playout
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopRecordingPlayout(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopRecordingPlayout(channel);
+}
+
+/////////////////////////////////////////////
+// [File] Start playing file as microphone
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileAsMicrophone(
+        JNIEnv *env,
+        jobject,
+        jint channel,
+        jstring fileName,
+        jboolean loop)
+{
+    VALIDATE_FILE_POINTER;
+
+    const char* fileNameNative = env->GetStringUTFChars(fileName, NULL);
+    if (!fileNameNative)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Could not get UTF string");
+        return -1;
+    }
+
+    jint retVal = veData1.file->StartPlayingFileAsMicrophone(channel,
+                                                             fileNameNative,
+                                                             loop);
+
+    env->ReleaseStringUTFChars(fileName, fileNameNative);
+
+    return retVal;
+}
+
+/////////////////////////////////////////////
+// [File] Stop playing file as microphone
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileAsMicrophone(
+        JNIEnv *,
+        jobject,
+        jint channel)
+{
+    VALIDATE_FILE_POINTER;
+    return veData1.file->StopPlayingFileAsMicrophone(channel);
+}
+
+/////////////////////////////////////////////
+// [Volume] Set speaker volume
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetSpeakerVolume(
+        JNIEnv *,
+        jobject,
+        jint level)
+{
+    VALIDATE_VOLUME_POINTER;
+    if (veData1.volume->SetSpeakerVolume(level) != 0)
+    {
+        return -1;
+    }
+
+    unsigned int storedVolume = 0;
+    if (veData1.volume->GetSpeakerVolume(storedVolume) != 0)
+    {
+        return -1;
+    }
+
+    if (storedVolume != level)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+/////////////////////////////////////////////
+// [Hardware] Set loudspeaker status
+//
+JNIEXPORT jint JNICALL
+Java_org_webrtc_voiceengine_test_AndroidTest_SetLoudspeakerStatus(
+        JNIEnv *,
+        jobject,
+        jboolean enable)
+{
+    VALIDATE_HARDWARE_POINTER;
+    if (veData1.hardware->SetLoudspeakerStatus(enable) != 0)
+    {
+        return -1;
+    }
+
+    /*VALIDATE_RTP_RTCP_POINTER;
+
+     if (veData1.rtp_rtcp->SetFECStatus(0, enable, -1) != 0)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could not set FEC");
+     return -1;
+     }
+     else if(enable)
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could enable FEC");
+     }
+     else
+     {
+     __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+         "Could disable FEC");
+     }*/
+
+    return 0;
+}
+
+//////////////////////////////////////////////////////////////////
+// "Local" functions (i.e. not Java accessible)
+//////////////////////////////////////////////////////////////////
+
+/////////////////////////////////////////////
+// Get all sub-APIs
+//
+bool GetSubApis(VoiceEngineData &veData)
+{
+    bool getOK = true;
+
+    // Base
+    veData.base = VoEBase::GetInterface(veData.ve);
+    if (!veData.base)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get base sub-API failed");
+        getOK = false;
+    }
+
+    // Codec
+    veData.codec = VoECodec::GetInterface(veData.ve);
+    if (!veData.codec)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get codec sub-API failed");
+        getOK = false;
+    }
+
+    // File
+    veData.file = VoEFile::GetInterface(veData.ve);
+    if (!veData.file)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get file sub-API failed");
+        getOK = false;
+    }
+
+    // Network
+    veData.netw = VoENetwork::GetInterface(veData.ve);
+    if (!veData.netw)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get network sub-API failed");
+        getOK = false;
+    }
+
+    // AudioProcessing module
+    veData.apm = VoEAudioProcessing::GetInterface(veData.ve);
+    if (!veData.apm)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get apm sub-API failed");
+        getOK = false;
+    }
+
+    // Volume
+    veData.volume = VoEVolumeControl::GetInterface(veData.ve);
+    if (!veData.volume)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get volume sub-API failed");
+        getOK = false;
+    }
+
+    // Hardware
+    veData.hardware = VoEHardware::GetInterface(veData.ve);
+    if (!veData.hardware)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get hardware sub-API failed");
+        getOK = false;
+    }
+
+    // RTP / RTCP
+    veData.rtp_rtcp = VoERTP_RTCP::GetInterface(veData.ve);
+    if (!veData.rtp_rtcp)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get rtp_rtcp sub-API failed");
+        getOK = false;
+    }
+
+    // Encrypt
+    veData.encrypt = VoEEncryption::GetInterface(veData.ve);
+    if (!veData.encrypt)
+    {
+        __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                            "Get encrypt sub-API failed");
+        getOK = false;
+    }
+
+    return getOK;
+}
+
+/////////////////////////////////////////////
+// Release all sub-APIs
+//
+bool ReleaseSubApis(VoiceEngineData &veData)
+{
+    bool releaseOK = true;
+
+    // Base
+    if (veData.base)
+    {
+        if (0 != veData.base->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release base sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.base = NULL;
+        }
+    }
+
+    // Codec
+    if (veData.codec)
+    {
+        if (0 != veData.codec->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release codec sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.codec = NULL;
+        }
+    }
+
+    // File
+    if (veData.file)
+    {
+        if (0 != veData.file->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release file sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.file = NULL;
+        }
+    }
+
+    // Network
+    if (veData.netw)
+    {
+        if (0 != veData.netw->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release network sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.netw = NULL;
+        }
+    }
+
+    // apm
+    if (veData.apm)
+    {
+        if (0 != veData.apm->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release apm sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.apm = NULL;
+        }
+    }
+
+    // Volume
+    if (veData.volume)
+    {
+        if (0 != veData.volume->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release volume sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.volume = NULL;
+        }
+    }
+
+    // Hardware
+    if (veData.hardware)
+    {
+        if (0 != veData.hardware->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release hardware sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.hardware = NULL;
+        }
+    }
+
+    // RTP RTCP
+    if (veData.rtp_rtcp)
+    {
+        if (0 != veData.rtp_rtcp->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release rtp_rtcp sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.rtp_rtcp = NULL;
+        }
+    }
+
+    // Encrypt
+    if (veData.encrypt)
+    {
+        if (0 != veData.encrypt->Release())
+        {
+            __android_log_write(ANDROID_LOG_ERROR, WEBRTC_LOG_TAG,
+                                "Release encrypt sub-API failed");
+            releaseOK = false;
+        }
+        else
+        {
+            veData.encrypt = NULL;
+        }
+    }
+
+    return releaseOK;
+}
diff --git a/src/voice_engine/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h b/src/voice_engine/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h
new file mode 100644
index 0000000..60fe839
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/jni/org_webrtc_voiceengine_test_AndroidTest.h
@@ -0,0 +1,253 @@
+/* DO NOT EDIT THIS FILE - it is machine generated */
+#include <jni.h>
+/* Header for class org_webrtc_voiceengine_test_AndroidTest */
+
+#ifndef _Included_org_webrtc_voiceengine_test_AndroidTest
+#define _Included_org_webrtc_voiceengine_test_AndroidTest
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    NativeInit
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NativeInit
+  (JNIEnv *, jclass);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    RunAutoTest
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_RunAutoTest
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Create
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Create
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Delete
+ * Signature: ()Z
+ */
+JNIEXPORT jboolean JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Delete
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Init
+ * Signature: (IIIZZ)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Init
+  (JNIEnv *, jobject, jboolean, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    Terminate
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_Terminate
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    CreateChannel
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_CreateChannel
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    DeleteChannel
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_DeleteChannel
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetLocalReceiver
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetLocalReceiver
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSendDestination
+ * Signature: (IILjava/lang/String;)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSendDestination
+  (JNIEnv *, jobject, jint, jint, jstring);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopListen
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopListen
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopSend
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopSend
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayingFileLocally
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileLocally
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayingFileLocally
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileLocally
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartRecordingPlayout
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartRecordingPlayout
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopRecordingPlayout
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopRecordingPlayout
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StartPlayingFileAsMicrophone
+ * Signature: (ILjava/lang/String;Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StartPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint, jstring, jboolean);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    StopPlayingFileAsMicrophone
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_StopPlayingFileAsMicrophone
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    NumOfCodecs
+ * Signature: ()I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_NumOfCodecs
+  (JNIEnv *, jobject);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSendCodec
+ * Signature: (II)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSendCodec
+  (JNIEnv *, jobject, jint, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetVADStatus
+ * Signature: (IZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetVADStatus
+  (JNIEnv *, jobject, jint, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetNSStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetNSStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetAGCStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetAGCStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetECStatus
+ * Signature: (ZI)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetECStatus
+  (JNIEnv *, jobject, jboolean, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetSpeakerVolume
+ * Signature: (I)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetSpeakerVolume
+  (JNIEnv *, jobject, jint);
+
+/*
+ * Class:     org_webrtc_voiceengine_test_AndroidTest
+ * Method:    SetLoudspeakerStatus
+ * Signature: (Z)I
+ */
+JNIEXPORT jint JNICALL Java_org_webrtc_voiceengine_test_AndroidTest_SetLoudspeakerStatus
+  (JNIEnv *, jobject, jboolean);
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/src/voice_engine/test/android/android_test/res/drawable/icon.png b/src/voice_engine/test/android/android_test/res/drawable/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/res/drawable/icon.png
Binary files differ
diff --git a/src/voice_engine/test/android/android_test/res/layout/main.xml b/src/voice_engine/test/android/android_test/res/layout/main.xml
new file mode 100644
index 0000000..4165a07
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/res/layout/main.xml
@@ -0,0 +1,43 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+	      android:orientation="vertical"
+	      android:layout_width="fill_parent"
+	      android:layout_height="fill_parent">
+
+  <TextView android:text="@+id/TextView01"
+	    android:id="@+id/TextView01"
+	    android:layout_width="wrap_content"
+	    android:layout_height="wrap_content">
+  </TextView>
+  <EditText android:text="@+id/EditText01"
+	    android:id="@+id/EditText01"
+	    android:layout_width="wrap_content"
+	    android:layout_height="wrap_content">
+  </EditText>
+  <Button android:text="@+id/Button01"
+	  android:id="@+id/Button01"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+  <Spinner android:id="@+id/Spinner01"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+  </Spinner>
+  <Spinner android:id="@+id/Spinner02"
+	   android:layout_width="wrap_content"
+	   android:layout_height="wrap_content">
+  </Spinner>
+  <Button android:text="@+id/Button02"
+	  android:id="@+id/Button02"
+	  android:layout_width="wrap_content"
+	  android:layout_height="wrap_content">
+  </Button>
+</LinearLayout>
diff --git a/src/voice_engine/test/android/android_test/res/values/strings.xml b/src/voice_engine/test/android/android_test/res/values/strings.xml
new file mode 100644
index 0000000..29ec4ee
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/res/values/strings.xml
@@ -0,0 +1,13 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. -->
+<!--                                                                     -->
+<!-- Use of this source code is governed by a BSD-style license          -->
+<!-- that can be found in the LICENSE file in the root of the source     -->
+<!-- tree. An additional intellectual property rights grant can be found -->
+<!-- in the file PATENTS.  All contributing project authors may          -->
+<!-- be found in the AUTHORS file in the root of the source tree.        -->
+
+<resources>
+    
+    <string name="app_name">WebRtc VoE</string>
+</resources>
diff --git a/src/voice_engine/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java b/src/voice_engine/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java
new file mode 100644
index 0000000..71b22b0
--- /dev/null
+++ b/src/voice_engine/test/android/android_test/src/org/webrtc/voiceengine/test/AndroidTest.java
@@ -0,0 +1,1190 @@
+/*

+ * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.

+ *

+ * Use of this source code is governed by a BSD-style license that can be found

+ * in the LICENSE file in the root of the source tree. An additional

+ * intellectual property rights grant can be found in the file PATENTS. All

+ * contributing project authors may be found in the AUTHORS file in the root of

+ * the source tree.

+ */

+

+/*

+ * VoiceEngine Android test application. It starts either auto test or acts like

+ * a GUI test.

+ */

+

+package org.webrtc.voiceengine.test;

+

+import java.io.File;

+import java.io.FileInputStream;

+import java.io.FileNotFoundException;

+import java.io.FileOutputStream;

+import java.io.FileReader;

+import java.io.IOException;

+

+import android.app.Activity;

+import android.content.Context;

+import android.media.AudioFormat;

+import android.media.AudioManager;

+import android.media.AudioRecord;

+import android.media.AudioTrack;

+import android.media.MediaRecorder;

+import android.os.Bundle;

+import android.util.Log;

+import android.view.View;

+import android.widget.AdapterView;

+import android.widget.ArrayAdapter;

+import android.widget.Button;

+import android.widget.EditText;

+import android.widget.Spinner;

+import android.widget.TextView;

+

+public class AndroidTest extends Activity {

+    private byte[] _playBuffer = null;

+    private short[] _circBuffer = new short[8000]; // can hold 50 frames

+

+    private int _recIndex = 0;

+    private int _playIndex = 0;

+    // private int _streamVolume = 4;

+    private int _maxVolume = 0; // Android max level (commonly 5)

+    // VoE level (0-255), corresponds to level 4 out of 5

+    private int _volumeLevel = 204;

+

+    private Thread _playThread;

+    private Thread _recThread;

+    private Thread _autotestThread;

+

+    private static AudioTrack _at;

+    private static AudioRecord _ar;

+

+    private File _fr = null;

+    private FileInputStream _in = null;

+

+    private boolean _isRunningPlay = false;

+    private boolean _isRunningRec = false;

+    private boolean _settingSet = true;

+    private boolean _isCallActive = false;

+    private boolean _runAutotest = false; // ENABLE AUTOTEST HERE!

+

+    private int _channel = -1;

+    private int _codecIndex = 0;

+    private int _ecIndex = 0;

+    private int _nsIndex = 0;

+    private int _agcIndex = 0;

+    private int _vadIndex = 0;

+    private int _audioIndex = 3;

+    private int _settingMenu = 0;

+    private int _receivePort = 1234;

+    private int _destinationPort = 1234;

+    private String _destinationIP = "127.0.0.1";

+

+    // "Build" settings

+    private final boolean _playFromFile = false;

+    // Set to true to send data to native code and back

+    private final boolean _runThroughNativeLayer = true;

+    private final boolean enableSend = true;

+    private final boolean enableReceive = true;

+    private final boolean useNativeThread = false;

+

+    /** Called when the activity is first created. */

+    public void onCreate(Bundle savedInstanceState) {

+        super.onCreate(savedInstanceState);

+        setContentView(R.layout.main);

+

+        TextView tv = (TextView) findViewById(R.id.TextView01);

+        tv.setText("");

+

+        final EditText ed = (EditText) findViewById(R.id.EditText01);

+        ed.setWidth(200);

+        ed.setText(_destinationIP);

+

+        final Button buttonStart = (Button) findViewById(R.id.Button01);

+        buttonStart.setWidth(200);

+        if (_runAutotest) {

+            buttonStart.setText("Run test");

+        } else {

+            buttonStart.setText("Start Call");

+        }

+        // button.layout(50, 50, 100, 40);

+        buttonStart.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+

+                if (_runAutotest) {

+                    startAutoTest();

+                } else {

+                    if (_isCallActive) {

+

+                        if (stopCall() != -1) {

+                            _isCallActive = false;

+                            buttonStart.setText("Start Call");

+                        }

+                    } else {

+

+                        _destinationIP = ed.getText().toString();

+                        if (startCall() != -1) {

+                            _isCallActive = true;

+                            buttonStart.setText("Stop Call");

+                        }

+                    }

+                }

+

+                // displayTextFromFile();

+                // recordAudioToFile();

+                // if(!_playFromFile)

+                // {

+                // recAudioInThread();

+                // }

+                // playAudioInThread();

+            }

+        });

+

+        final Button buttonStop = (Button) findViewById(R.id.Button02);

+        buttonStop.setWidth(200);

+        buttonStop.setText("Close app");

+        buttonStop.setOnClickListener(new View.OnClickListener() {

+            public void onClick(View v) {

+

+                if (!_runAutotest) {

+                    ShutdownVoE();

+                }

+

+                // This call terminates and should close the activity

+                finish();

+

+                // playAudioFromFile();

+                // if(!_playFromFile)

+                // {

+                // stopRecAudio();

+                // }

+                // stopPlayAudio();

+            }

+        });

+

+

+        String ap1[] = {"EC off", "AECM"};

+        final ArrayAdapter<String> adapterAp1 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap1);

+        String ap2[] =

+                        {"NS off", "NS low", "NS moderate", "NS high",

+                                        "NS very high"};

+        final ArrayAdapter<String> adapterAp2 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap2);

+        String ap3[] = {"AGC off", "AGC adaptive", "AGC fixed"};

+        final ArrayAdapter<String> adapterAp3 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap3);

+        String ap4[] =

+                        {"VAD off", "VAD conventional", "VAD high rate",

+                                        "VAD mid rate", "VAD low rate"};

+        final ArrayAdapter<String> adapterAp4 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        ap4);

+        String codecs[] = {"iSAC", "PCMU", "PCMA", "iLBC"};

+        final ArrayAdapter<String> adapterCodecs = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        codecs);

+

+        final Spinner spinnerSettings1 = (Spinner) findViewById(R.id.Spinner01);

+        final Spinner spinnerSettings2 = (Spinner) findViewById(R.id.Spinner02);

+        spinnerSettings1.setMinimumWidth(200);

+        String settings[] =

+                        {"Codec", "Echo Control", "Noise Suppression",

+                         "Automatic Gain Control",

+                         "Voice Activity Detection"};

+        ArrayAdapter<String> adapterSettings1 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        settings);

+        spinnerSettings1.setAdapter(adapterSettings1);

+        spinnerSettings1.setOnItemSelectedListener(

+                        new AdapterView.OnItemSelectedListener() {

+            public void onItemSelected(AdapterView adapterView, View view,

+                            int position, long id) {

+

+                _settingMenu = position;

+                _settingSet = false;

+                if (position == 0) {

+                    spinnerSettings2.setAdapter(adapterCodecs);

+                    spinnerSettings2.setSelection(_codecIndex);

+                }

+                if (position == 1) {

+                    spinnerSettings2.setAdapter(adapterAp1);

+                    spinnerSettings2.setSelection(_ecIndex);

+                }

+                if (position == 2) {

+                    spinnerSettings2.setAdapter(adapterAp2);

+                    spinnerSettings2.setSelection(_nsIndex);

+                }

+                if (position == 3) {

+                    spinnerSettings2.setAdapter(adapterAp3);

+                    spinnerSettings2.setSelection(_agcIndex);

+                }

+                if (position == 4) {

+                    spinnerSettings2.setAdapter(adapterAp4);

+                    spinnerSettings2.setSelection(_vadIndex);

+                }

+            }

+

+            public void onNothingSelected(AdapterView adapterView) {

+                WebrtcLog("No setting1 selected");

+            }

+        });

+

+        spinnerSettings2.setMinimumWidth(200);

+        ArrayAdapter<String> adapterSettings2 = new ArrayAdapter<String>(

+                        this,

+                        android.R.layout.simple_spinner_dropdown_item,

+                        codecs);

+        spinnerSettings2.setAdapter(adapterSettings2);

+        spinnerSettings2.setOnItemSelectedListener(

+                        new AdapterView.OnItemSelectedListener() {

+            public void onItemSelected(AdapterView adapterView, View view,

+                            int position, long id) {

+

+                // avoid unintentional setting

+                if (_settingSet == false) {

+                    _settingSet = true;

+                    return;

+                }

+

+                // Change volume

+                if (_settingMenu == 0) {

+                    WebrtcLog("Selected audio " + position);

+                    setAudioProperties(position);

+                    spinnerSettings2.setSelection(_audioIndex);

+                }

+

+                // Change codec

+                if (_settingMenu == 1) {

+                    _codecIndex = position;

+                    WebrtcLog("Selected codec " + position);

+                    if (0 != SetSendCodec(_channel, _codecIndex)) {

+                        WebrtcLog("VoE set send codec failed");

+                    }

+                }

+

+                // Change EC

+                if (_settingMenu == 2) {

+                    boolean enable = true;

+                    int ECmode = 5; // AECM

+                    int AESmode = 0;

+

+                    _ecIndex = position;

+                    WebrtcLog("Selected EC " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                    }

+                    if (position > 1) {

+                        ECmode = 4; // AES

+                        AESmode = position - 1;

+                    }

+

+                    if (0 != SetECStatus(enable, ECmode)) {

+                        WebrtcLog("VoE set EC status failed");

+                    }

+                }

+

+                // Change NS

+                if (_settingMenu == 3) {

+                    boolean enable = true;

+

+                    _nsIndex = position;

+                    WebrtcLog("Selected NS " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                    }

+                    if (0 != SetNSStatus(enable, position + 2)) {

+                        WebrtcLog("VoE set NS status failed");

+                    }

+                }

+

+                // Change AGC

+                if (_settingMenu == 4) {

+                    boolean enable = true;

+

+                    _agcIndex = position;

+                    WebrtcLog("Selected AGC " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                        position = 1; // default

+                    }

+                    if (0 != SetAGCStatus(enable, position + 2)) {

+                        WebrtcLog("VoE set AGC status failed");

+                    }

+                }

+

+                // Change VAD

+                if (_settingMenu == 5) {

+                    boolean enable = true;

+

+                    _vadIndex = position;

+                    WebrtcLog("Selected VAD " + position);

+

+                    if (position == 0) {

+                        enable = false;

+                        position++;

+                    }

+                    if (0 != SetVADStatus(_channel, enable, position - 1)) {

+                        WebrtcLog("VoE set VAD status failed");

+                    }

+                }

+            }

+

+            public void onNothingSelected(AdapterView adapterView) {

+            }

+        });

+

+        // Setup VoiceEngine

+        if (!_runAutotest && !useNativeThread) SetupVoE();

+

+        // Suggest to use the voice call audio stream for hardware volume

+        // controls

+        setVolumeControlStream(AudioManager.STREAM_VOICE_CALL);

+

+        // Get max Android volume and adjust default volume to map exactly to an

+        // Android level

+        AudioManager am =

+                        (AudioManager) getSystemService(Context.AUDIO_SERVICE);

+        _maxVolume = am.getStreamMaxVolume(AudioManager.STREAM_VOICE_CALL);

+        if (_maxVolume <= 0) {

+            WebrtcLog("Could not get max volume!");

+        } else {

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            _volumeLevel = (androidVolumeLevel * 255) / _maxVolume;

+        }

+

+        WebrtcLog("Started Webrtc Android Test");

+    }

+

+    // Will be called when activity is shutdown.

+    // NOTE: Activity may be killed without this function being called,

+    // but then we should not need to clean up.

+    protected void onDestroy() {

+        super.onDestroy();

+        // ShutdownVoE();

+    }

+

+    private void SetupVoE() {

+        // Create VoiceEngine

+        Create(); // Error logging is done in native API wrapper

+

+        // Initialize

+        if (0 != Init(false, false)) {

+            WebrtcLog("VoE init failed");

+        }

+

+        // Create channel

+        _channel = CreateChannel();

+        if (0 != _channel) {

+            WebrtcLog("VoE create channel failed");

+        }

+

+    }

+

+    private void ShutdownVoE() {

+        // Delete channel

+        if (0 != DeleteChannel(_channel)) {

+            WebrtcLog("VoE delete channel failed");

+        }

+

+        // Terminate

+        if (0 != Terminate()) {

+            WebrtcLog("VoE terminate failed");

+        }

+

+        // Delete VoiceEngine

+        Delete(); // Error logging is done in native API wrapper

+    }

+

+    int startCall() {

+

+        if (useNativeThread == true) {

+

+            Create();

+            return 0;

+        }

+

+        if (enableReceive == true) {

+            // Set local receiver

+            if (0 != SetLocalReceiver(_channel, _receivePort)) {

+                WebrtcLog("VoE set local receiver failed");

+            }

+

+            if (0 != StartListen(_channel)) {

+                WebrtcLog("VoE start listen failed");

+                return -1;

+            }

+

+            // Route audio to earpiece

+            if (0 != SetLoudspeakerStatus(false)) {

+                WebrtcLog("VoE set louspeaker status failed");

+                return -1;

+            }

+

+            /*

+             * WebrtcLog("VoE start record now"); if (0 !=

+             * StartRecordingPlayout(_channel, "/sdcard/singleUserDemoOut.pcm",

+             * false)) { WebrtcLog("VoE Recording Playout failed"); }

+             * WebrtcLog("VoE start Recording Playout end");

+             */

+            // Start playout

+            if (0 != StartPlayout(_channel)) {

+                WebrtcLog("VoE start playout failed");

+                return -1;

+            }

+

+            // Start playout file

+            // if (0 != StartPlayingFileLocally(_channel,

+            // "/sdcard/singleUserDemo.pcm", true)) {

+            // WebrtcLog("VoE start playout file failed");

+            // return -1;

+            // }

+        }

+

+        if (enableSend == true) {

+            if (0 != SetSendDestination(_channel, _destinationPort,

+                            _destinationIP)) {

+                WebrtcLog("VoE set send  destination failed");

+                return -1;

+            }

+

+            if (0 != SetSendCodec(_channel, _codecIndex)) {

+                WebrtcLog("VoE set send codec failed");

+                return -1;

+            }

+

+            /*

+             * if (0 != StartPlayingFileAsMicrophone(_channel,

+             * "/sdcard/singleUserDemo.pcm", true)) {

+             * WebrtcLog("VoE start playing file as microphone failed"); }

+             */

+            if (0 != StartSend(_channel)) {

+                WebrtcLog("VoE start send failed");

+                return -1;

+            }

+

+            // if (0 != StartPlayingFileAsMicrophone(_channel,

+            // "/sdcard/singleUserDemo.pcm", true)) {

+            // WebrtcLog("VoE start playing file as microphone failed");

+            // return -1;

+            // }

+        }

+

+        return 0;

+    }

+

+    int stopCall() {

+

+        if (useNativeThread == true) {

+

+            Delete();

+            return 0;

+        }

+

+        if (enableSend == true) {

+            // Stop playing file as microphone

+            /*

+             * if (0 != StopPlayingFileAsMicrophone(_channel)) {

+             * WebrtcLog("VoE stop playing file as microphone failed"); return

+             * -1; }

+             */

+            // Stop send

+            if (0 != StopSend(_channel)) {

+                WebrtcLog("VoE stop send failed");

+                return -1;

+            }

+        }

+

+        if (enableReceive == true) {

+            // if (0 != StopRecordingPlayout(_channel)) {

+            // WebrtcLog("VoE stop Recording Playout failed");

+            // }

+            // WebrtcLog("VoE stop Recording Playout ended");

+

+            // Stop listen

+            if (0 != StopListen(_channel)) {

+                WebrtcLog("VoE stop listen failed");

+                return -1;

+            }

+

+            // Stop playout file

+            // if (0 != StopPlayingFileLocally(_channel)) {

+            // WebrtcLog("VoE stop playout file failed");

+            // return -1;

+            // }

+

+            // Stop playout

+            if (0 != StopPlayout(_channel)) {

+                WebrtcLog("VoE stop playout failed");

+                return -1;

+            }

+

+            // Route audio to loudspeaker

+            if (0 != SetLoudspeakerStatus(true)) {

+                WebrtcLog("VoE set louspeaker status failed");

+                return -1;

+            }

+        }

+

+        return 0;

+    }

+

+    int startAutoTest() {

+

+        _autotestThread = new Thread(_autotestProc);

+        _autotestThread.start();

+

+        return 0;

+    }

+

+    private Runnable _autotestProc = new Runnable() {

+        public void run() {

+            // TODO(xians): choose test from GUI

+            // 1 = standard, not used

+            // 2 = extended, 2 = base

+            RunAutoTest(1, 2);

+        }

+    };

+

+    int setAudioProperties(int val) {

+

+        // AudioManager am = (AudioManager)

+        // getSystemService(Context.AUDIO_SERVICE);

+

+        if (val == 0) {

+            // _streamVolume =

+            // am.getStreamVolume(AudioManager.STREAM_VOICE_CALL);

+            // am.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+            // (_streamVolume+1), 0);

+

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            if (androidVolumeLevel < _maxVolume) {

+                _volumeLevel = ((androidVolumeLevel + 1) * 255) / _maxVolume;

+                if (0 != SetSpeakerVolume(_volumeLevel)) {

+                    WebrtcLog("VoE set speaker volume failed");

+                }

+            }

+        } else if (val == 1) {

+            // _streamVolume =

+            // am.getStreamVolume(AudioManager.STREAM_VOICE_CALL);

+            // am.setStreamVolume(AudioManager.STREAM_VOICE_CALL,

+            // (_streamVolume-1), 0);

+

+            int androidVolumeLevel = (_volumeLevel * _maxVolume) / 255;

+            if (androidVolumeLevel > 0) {

+                _volumeLevel = ((androidVolumeLevel - 1) * 255) / _maxVolume;

+                if (0 != SetSpeakerVolume(_volumeLevel)) {

+                    WebrtcLog("VoE set speaker volume failed");

+                }

+            }

+        } else if (val == 2) {

+            // route audio to back speaker

+            if (0 != SetLoudspeakerStatus(true)) {

+                WebrtcLog("VoE set loudspeaker status failed");

+            }

+            _audioIndex = 2;

+        } else if (val == 3) {

+            // route audio to earpiece

+            if (0 != SetLoudspeakerStatus(false)) {

+                WebrtcLog("VoE set loudspeaker status failed");

+            }

+            _audioIndex = 3;

+        }

+

+        return 0;

+    }

+

+    int displayTextFromFile() {

+

+        TextView tv = (TextView) findViewById(R.id.TextView01);

+        FileReader fr = null;

+        char[] fileBuffer = new char[64];

+

+        try {

+            fr = new FileReader("/sdcard/test.txt");

+        } catch (FileNotFoundException e) {

+            e.printStackTrace();

+            tv.setText("File not found!");

+        }

+

+        try {

+            fr.read(fileBuffer);

+        } catch (IOException e) {

+            e.printStackTrace();

+        }

+

+        String readString = new String(fileBuffer);

+        tv.setText(readString);

+        // setContentView(tv);

+

+        return 0;

+    }

+

+    int recordAudioToFile() {

+        File fr = null;

+        // final to be reachable within onPeriodicNotification

+        byte[] recBuffer = new byte[320];

+

+        int recBufSize =

+                        AudioRecord.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        AudioRecord rec =

+                        new AudioRecord(MediaRecorder.AudioSource.MIC, 16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT,

+                                        recBufSize);

+

+        fr = new File("/sdcard/record.pcm");

+        FileOutputStream out = null;

+        try {

+            out = new FileOutputStream(fr);

+        } catch (FileNotFoundException e1) {

+            e1.printStackTrace();

+        }

+

+        // start recording

+        try {

+            rec.startRecording();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        for (int i = 0; i < 550; i++) {

+            // note, there is a short version of write as well!

+            int wrBytes = rec.read(recBuffer, 0, 320);

+

+            try {

+                out.write(recBuffer);

+            } catch (IOException e) {

+                e.printStackTrace();

+            }

+        }

+

+        // stop playout

+        try {

+            rec.stop();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        return 0;

+    }

+

+    int playAudioFromFile() {

+

+        File fr = null;

+        // final to be reachable within onPeriodicNotification

+        // final byte[] playBuffer = new byte [320000];

+        // final to be reachable within onPeriodicNotification

+        final byte[] playBuffer = new byte[320];

+

+        final int playBufSize =

+                        AudioTrack.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        // final int playBufSize = 1920; // 100 ms buffer

+        // byte[] playBuffer = new byte [playBufSize];

+        final AudioTrack play =

+                        new AudioTrack(AudioManager.STREAM_VOICE_CALL, 16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT,

+                                        playBufSize, AudioTrack.MODE_STREAM);

+

+        // implementation of the playpos callback functions

+        play.setPlaybackPositionUpdateListener(

+                        new AudioTrack.OnPlaybackPositionUpdateListener() {

+

+            int count = 0;

+

+            public void onPeriodicNotification(AudioTrack track) {

+                // int wrBytes = play.write(playBuffer, count, 320);

+                count += 320;

+            }

+

+            public void onMarkerReached(AudioTrack track) {

+

+            }

+        });

+

+        // set the notification period = 160 samples

+        // int ret = play.setPositionNotificationPeriod(160);

+

+        fr = new File("/sdcard/record.pcm");

+        FileInputStream in = null;

+        try {

+            in = new FileInputStream(fr);

+        } catch (FileNotFoundException e1) {

+            e1.printStackTrace();

+        }

+

+        // try {

+        // in.read(playBuffer);

+        // } catch (IOException e) {

+        // e.printStackTrace();

+        // }

+

+        // play all at once

+        // int wrBytes = play.write(playBuffer, 0, 320000);

+

+

+        // start playout

+        try {

+            play.play();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        // returns the number of samples that has been written

+        // int headPos = play.getPlaybackHeadPosition();

+

+        // play with multiple writes

+        for (int i = 0; i < 500; i++) {

+            try {

+                in.read(playBuffer);

+            } catch (IOException e) {

+                e.printStackTrace();

+            }

+

+

+            // note, there is a short version of write as well!

+            int wrBytes = play.write(playBuffer, 0, 320);

+

+            Log.d("testWrite", "wrote");

+        }

+

+        // stop playout

+        try {

+            play.stop();

+        } catch (IllegalStateException e) {

+            e.printStackTrace();

+        }

+

+        return 0;

+    }

+

+    int playAudioInThread() {

+

+        if (_isRunningPlay) {

+            return 0;

+        }

+

+        // File fr = null;

+        // final byte[] playBuffer = new byte[320];

+        if (_playFromFile) {

+            _playBuffer = new byte[320];

+        } else {

+            // reset index

+            _playIndex = 0;

+        }

+        // within

+        // onPeriodicNotification

+

+        // Log some info (static)

+        WebrtcLog("Creating AudioTrack object");

+        final int minPlayBufSize =

+                        AudioTrack.getMinBufferSize(16000,

+                                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                                        AudioFormat.ENCODING_PCM_16BIT);

+        WebrtcLog("Min play buf size = " + minPlayBufSize);

+        WebrtcLog("Min volume = " + AudioTrack.getMinVolume());

+        WebrtcLog("Max volume = " + AudioTrack.getMaxVolume());

+        WebrtcLog("Native sample rate = "

+                        + AudioTrack.getNativeOutputSampleRate(

+                                        AudioManager.STREAM_VOICE_CALL));

+

+        final int playBufSize = minPlayBufSize; // 3200; // 100 ms buffer

+        // byte[] playBuffer = new byte [playBufSize];

+        try {

+            _at = new AudioTrack(

+                            AudioManager.STREAM_VOICE_CALL,

+                            16000,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            playBufSize, AudioTrack.MODE_STREAM);

+        } catch (Exception e) {

+            WebrtcLog(e.getMessage());

+        }

+

+        // Log some info (non-static)

+        WebrtcLog("Notification marker pos = "

+                        + _at.getNotificationMarkerPosition());

+        WebrtcLog("Play head pos = " + _at.getPlaybackHeadPosition());

+        WebrtcLog("Pos notification dt = "

+                        + _at.getPositionNotificationPeriod());

+        WebrtcLog("Playback rate = " + _at.getPlaybackRate());

+        WebrtcLog("Sample rate = " + _at.getSampleRate());

+

+        // implementation of the playpos callback functions

+        // _at.setPlaybackPositionUpdateListener(

+        // new AudioTrack.OnPlaybackPositionUpdateListener() {

+        //

+        // int count = 3200;

+        //

+        // public void onPeriodicNotification(AudioTrack track) {

+        // // int wrBytes = play.write(playBuffer, count, 320);

+        // count += 320;

+        // }

+        //

+        // public void onMarkerReached(AudioTrack track) {

+        // }

+        // });

+

+        // set the notification period = 160 samples

+        // int ret = _at.setPositionNotificationPeriod(160);

+

+        if (_playFromFile) {

+            _fr = new File("/sdcard/singleUserDemo.pcm");

+            try {

+                _in = new FileInputStream(_fr);

+            } catch (FileNotFoundException e1) {

+                e1.printStackTrace();

+            }

+        }

+

+        // try {

+        // in.read(playBuffer);

+        // } catch (IOException e) {

+        // e.printStackTrace();

+        // }

+

+        _isRunningPlay = true;

+

+        // buffer = new byte[3200];

+        _playThread = new Thread(_playProc);

+        // ar.startRecording();

+        // bytesRead = 3200;

+        // recording = true;

+        _playThread.start();

+

+        return 0;

+    }

+

+    int stopPlayAudio() {

+        if (!_isRunningPlay) {

+            return 0;

+        }

+

+        _isRunningPlay = false;

+

+        return 0;

+    }

+

+    private Runnable _playProc = new Runnable() {

+        public void run() {

+

+            // set high thread priority

+            android.os.Process.setThreadPriority(

+                            android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+

+            // play all at once

+            // int wrBytes = play.write(playBuffer, 0, 320000);

+

+            // fill the buffer

+            // play.write(playBuffer, 0, 3200);

+

+            // play.flush();

+

+            // start playout

+            try {

+                _at.play();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // play with multiple writes

+            int i = 0;

+            for (; i < 3000 && _isRunningPlay; i++) {

+

+                if (_playFromFile) {

+                    try {

+                        _in.read(_playBuffer);

+                    } catch (IOException e) {

+                        e.printStackTrace();

+                    }

+

+                    int wrBytes = _at.write(_playBuffer, 0 /* i * 320 */, 320);

+                } else {

+                    int wrSamples =

+                                    _at.write(_circBuffer, _playIndex * 160,

+                                                    160);

+

+                    // WebrtcLog("Played 10 ms from buffer, _playIndex = " +

+                    // _playIndex);

+                    // WebrtcLog("Diff = " + (_recIndex - _playIndex));

+

+                    if (_playIndex == 49) {

+                        _playIndex = 0;

+                    } else {

+                        _playIndex += 1;

+                    }

+                }

+

+                // WebrtcLog("Wrote 10 ms to buffer, head = "

+                // + _at.getPlaybackHeadPosition());

+            }

+

+            // stop playout

+            try {

+                _at.stop();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // returns the number of samples that has been written

+            WebrtcLog("Test stopped, i = " + i + ", head = "

+                            + _at.getPlaybackHeadPosition());

+            int headPos = _at.getPlaybackHeadPosition();

+

+            // flush the buffers

+            _at.flush();

+

+            // release the object

+            _at.release();

+            _at = null;

+

+            // try {

+            // Thread.sleep() must be within a try - catch block

+            // Thread.sleep(3000);

+            // }catch (Exception e){

+            // System.out.println(e.getMessage());

+            // }

+

+            _isRunningPlay = false;

+

+        }

+    };

+

+    int recAudioInThread() {

+

+        if (_isRunningRec) {

+            return 0;

+        }

+

+        // within

+        // onPeriodicNotification

+

+        // reset index

+        _recIndex = 20;

+

+        // Log some info (static)

+        WebrtcLog("Creating AudioRecord object");

+        final int minRecBufSize = AudioRecord.getMinBufferSize(16000,

+                        AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                        AudioFormat.ENCODING_PCM_16BIT);

+        WebrtcLog("Min rec buf size = " + minRecBufSize);

+        // WebrtcLog("Min volume = " + AudioTrack.getMinVolume());

+        // WebrtcLog("Max volume = " + AudioTrack.getMaxVolume());

+        // WebrtcLog("Native sample rate = "

+        // + AudioRecord

+        // .getNativeInputSampleRate(AudioManager.STREAM_VOICE_CALL));

+

+        final int recBufSize = minRecBufSize; // 3200; // 100 ms buffer

+        try {

+            _ar = new AudioRecord(

+                            MediaRecorder.AudioSource.MIC,

+                            16000,

+                            AudioFormat.CHANNEL_CONFIGURATION_MONO,

+                            AudioFormat.ENCODING_PCM_16BIT,

+                            recBufSize);

+        } catch (Exception e) {

+            WebrtcLog(e.getMessage());

+        }

+

+        // Log some info (non-static)

+        WebrtcLog("Notification marker pos = "

+                        + _ar.getNotificationMarkerPosition());

+        // WebrtcLog("Play head pos = " + _ar.getRecordHeadPosition());

+        WebrtcLog("Pos notification dt rec= "

+                        + _ar.getPositionNotificationPeriod());

+        // WebrtcLog("Playback rate = " + _ar.getRecordRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+        WebrtcLog("Sample rate = " + _ar.getSampleRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+        // WebrtcLog("Playback rate = " + _ar.getPlaybackRate());

+

+        _isRunningRec = true;

+

+        _recThread = new Thread(_recProc);

+

+        _recThread.start();

+

+        return 0;

+    }

+

+    int stopRecAudio() {

+        if (!_isRunningRec) {

+            return 0;

+        }

+

+        _isRunningRec = false;

+

+        return 0;

+    }

+

+    private Runnable _recProc = new Runnable() {

+        public void run() {

+

+            // set high thread priority

+            android.os.Process.setThreadPriority(

+                            android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);

+

+            // start recording

+            try {

+                _ar.startRecording();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // keep recording to circular buffer

+            // for a while

+            int i = 0;

+            int rdSamples = 0;

+            short[] tempBuffer = new short[160]; // Only used for native case

+

+            for (; i < 3000 && _isRunningRec; i++) {

+                if (_runThroughNativeLayer) {

+                    rdSamples = _ar.read(tempBuffer, 0, 160);

+                    // audioLoop(tempBuffer, 160); // Insert into native layer

+                } else {

+                    rdSamples = _ar.read(_circBuffer, _recIndex * 160, 160);

+

+                    // WebrtcLog("Recorded 10 ms to buffer, _recIndex = " +

+                    // _recIndex);

+                    // WebrtcLog("rdSamples = " + rdSamples);

+

+                    if (_recIndex == 49) {

+                        _recIndex = 0;

+                    } else {

+                        _recIndex += 1;

+                    }

+                }

+            }

+

+            // stop recording

+            try {

+                _ar.stop();

+            } catch (IllegalStateException e) {

+                e.printStackTrace();

+            }

+

+            // release the object

+            _ar.release();

+            _ar = null;

+

+            // try {

+            // Thread.sleep() must be within a try - catch block

+            // Thread.sleep(3000);

+            // }catch (Exception e){

+            // System.out.println(e.getMessage());

+            // }

+

+            _isRunningRec = false;

+

+            // returns the number of samples that has been written

+            // WebrtcLog("Test stopped, i = " + i + ", head = "

+            // + _at.getPlaybackHeadPosition());

+            // int headPos = _at.getPlaybackHeadPosition();

+        }

+    };

+

+    private void WebrtcLog(String msg) {

+        Log.d("*Webrtc*", msg);

+    }

+

+    // //////////////// Native function prototypes ////////////////////

+

+    private native static boolean NativeInit();

+

+    private native int RunAutoTest(int testType, int extendedSel);

+

+    private native boolean Create();

+

+    private native boolean Delete();

+

+    private native int Init(boolean enableTrace, boolean useExtTrans);

+

+    private native int Terminate();

+

+    private native int CreateChannel();

+

+    private native int DeleteChannel(int channel);

+

+    private native int SetLocalReceiver(int channel, int port);

+

+    private native int SetSendDestination(int channel, int port,

+                    String ipaddr);

+

+    private native int StartListen(int channel);

+

+    private native int StartPlayout(int channel);

+

+    private native int StartSend(int channel);

+

+    private native int StopListen(int channel);

+

+    private native int StopPlayout(int channel);

+

+    private native int StopSend(int channel);

+

+    private native int StartPlayingFileLocally(int channel, String fileName,

+                    boolean loop);

+

+    private native int StopPlayingFileLocally(int channel);

+

+    private native int StartRecordingPlayout(int channel, String fileName,

+                    boolean loop);

+

+    private native int StopRecordingPlayout(int channel);

+

+    private native int StartPlayingFileAsMicrophone(int channel,

+                    String fileName, boolean loop);

+

+    private native int StopPlayingFileAsMicrophone(int channel);

+

+    private native int NumOfCodecs();

+

+    private native int SetSendCodec(int channel, int index);

+

+    private native int SetVADStatus(int channel, boolean enable, int mode);

+

+    private native int SetNSStatus(boolean enable, int mode);

+

+    private native int SetAGCStatus(boolean enable, int mode);

+

+    private native int SetECStatus(boolean enable, int mode);

+

+    private native int SetSpeakerVolume(int volume);

+

+    private native int SetLoudspeakerStatus(boolean enable);

+

+    /*

+     * this is used to load the 'webrtc-voice-demo-jni'

+     * library on application startup.

+     * The library has already been unpacked into

+     * /data/data/webrtc.android.AndroidTest/lib/libwebrtc-voice-demo-jni.so

+     * at installation time by the package manager.

+     */

+    static {

+        Log.d("*Webrtc*", "Loading webrtc-voice-demo-jni...");

+        System.loadLibrary("webrtc-voice-demo-jni");

+

+        Log.d("*Webrtc*", "Calling native init...");

+        if (!NativeInit()) {

+            Log.e("*Webrtc*", "Native init failed");

+            throw new RuntimeException("Native init failed");

+        } else {

+            Log.d("*Webrtc*", "Native init successful");

+        }

+    }

+}

diff --git a/src/voice_engine/test/auto_test/automated_mode.cc b/src/voice_engine/test/auto_test/automated_mode.cc
new file mode 100644
index 0000000..13fa257
--- /dev/null
+++ b/src/voice_engine/test/auto_test/automated_mode.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "gtest/gtest.h"
+
+int RunInAutomatedMode(int argc, char** argv) {
+  testing::InitGoogleTest(&argc, argv);
+
+  return RUN_ALL_TESTS();
+}
diff --git a/src/voice_engine/test/auto_test/automated_mode.h b/src/voice_engine/test/auto_test/automated_mode.h
new file mode 100644
index 0000000..cd7ab9e
--- /dev/null
+++ b/src/voice_engine/test/auto_test/automated_mode.h
@@ -0,0 +1,16 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
+
+int RunInAutomatedMode(int argc, char** argv);
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_AUTOMATED_MODE_H_
diff --git a/src/voice_engine/test/auto_test/extended/agc_config_test.cc b/src/voice_engine/test/auto_test/extended/agc_config_test.cc
new file mode 100644
index 0000000..ee7e062
--- /dev/null
+++ b/src/voice_engine/test/auto_test/extended/agc_config_test.cc
@@ -0,0 +1,109 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class AgcConfigTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    // These should be defaults for the AGC config.
+    default_agc_config_.digitalCompressionGaindB = 9;
+    default_agc_config_.limiterEnable = true;
+    default_agc_config_.targetLeveldBOv = 3;
+  }
+
+  webrtc::AgcConfig default_agc_config_;
+};
+
+TEST_F(AgcConfigTest, HasCorrectDefaultConfiguration) {
+  webrtc::AgcConfig agc_config;
+
+  EXPECT_EQ(0, voe_apm_->GetAgcConfig(agc_config));
+
+  EXPECT_EQ(default_agc_config_.targetLeveldBOv, agc_config.targetLeveldBOv);
+  EXPECT_EQ(default_agc_config_.digitalCompressionGaindB,
+            agc_config.digitalCompressionGaindB);
+  EXPECT_EQ(default_agc_config_.limiterEnable, agc_config.limiterEnable);
+}
+
+TEST_F(AgcConfigTest, DealsWithInvalidParameters) {
+  webrtc::AgcConfig agc_config = default_agc_config_;
+  agc_config.digitalCompressionGaindB = 91;
+  EXPECT_EQ(-1, voe_apm_->SetAgcConfig(agc_config)) << "Should not be able "
+      "to set gain to more than 90 dB.";
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+
+  agc_config = default_agc_config_;
+  agc_config.targetLeveldBOv = 32;
+  EXPECT_EQ(-1, voe_apm_->SetAgcConfig(agc_config)) << "Should not be able "
+      "to set target level to more than 31.";
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+}
+
+TEST_F(AgcConfigTest, CanGetAndSetAgcStatus) {
+  webrtc::AgcConfig agc_config;
+  agc_config.digitalCompressionGaindB = 17;
+  agc_config.targetLeveldBOv = 11;
+  agc_config.limiterEnable = false;
+
+  webrtc::AgcConfig actual_config;
+  EXPECT_EQ(0, voe_apm_->SetAgcConfig(agc_config));
+  EXPECT_EQ(0, voe_apm_->GetAgcConfig(actual_config));
+
+  EXPECT_EQ(agc_config.digitalCompressionGaindB,
+            actual_config.digitalCompressionGaindB);
+  EXPECT_EQ(agc_config.limiterEnable,
+            actual_config.limiterEnable);
+  EXPECT_EQ(agc_config.targetLeveldBOv,
+            actual_config.targetLeveldBOv);
+}
+
+TEST_F(AgcConfigTest, HasCorrectDefaultRxConfiguration) {
+  webrtc::AgcConfig agc_config;
+
+  EXPECT_EQ(0, voe_apm_->GetRxAgcConfig(channel_, agc_config));
+
+  EXPECT_EQ(default_agc_config_.targetLeveldBOv, agc_config.targetLeveldBOv);
+  EXPECT_EQ(default_agc_config_.digitalCompressionGaindB,
+      agc_config.digitalCompressionGaindB);
+  EXPECT_EQ(default_agc_config_.limiterEnable, agc_config.limiterEnable);
+}
+
+TEST_F(AgcConfigTest, DealsWithInvalidRxParameters) {
+  webrtc::AgcConfig agc_config = default_agc_config_;
+  agc_config.digitalCompressionGaindB = 91;
+  EXPECT_EQ(-1, voe_apm_->SetRxAgcConfig(channel_, agc_config)) <<
+      "Should not be able to set RX gain to more than 90 dB.";
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+
+  agc_config = default_agc_config_;
+  agc_config.targetLeveldBOv = 32;
+  EXPECT_EQ(-1, voe_apm_->SetRxAgcConfig(channel_, agc_config)) <<
+      "Should not be able to set target level to more than 31.";
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+}
+
+TEST_F(AgcConfigTest, CanGetAndSetRxAgcStatus) {
+  webrtc::AgcConfig agc_config;
+  agc_config.digitalCompressionGaindB = 17;
+  agc_config.targetLeveldBOv = 11;
+  agc_config.limiterEnable = false;
+
+  webrtc::AgcConfig actual_config;
+  EXPECT_EQ(0, voe_apm_->SetRxAgcConfig(channel_, agc_config));
+  EXPECT_EQ(0, voe_apm_->GetRxAgcConfig(channel_, actual_config));
+
+  EXPECT_EQ(agc_config.digitalCompressionGaindB,
+            actual_config.digitalCompressionGaindB);
+  EXPECT_EQ(agc_config.limiterEnable,
+            actual_config.limiterEnable);
+  EXPECT_EQ(agc_config.targetLeveldBOv,
+            actual_config.targetLeveldBOv);
+}
diff --git a/src/voice_engine/test/auto_test/extended/ec_metrics_test.cc b/src/voice_engine/test/auto_test/extended/ec_metrics_test.cc
new file mode 100644
index 0000000..2d60d0d
--- /dev/null
+++ b/src/voice_engine/test/auto_test/extended/ec_metrics_test.cc
@@ -0,0 +1,83 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class EcMetricsTest : public AfterStreamingFixture {
+};
+
+TEST_F(EcMetricsTest, EcMetricsAreOffByDefault) {
+  bool enabled = true;
+  EXPECT_EQ(0, voe_apm_->GetEcMetricsStatus(enabled));
+  EXPECT_FALSE(enabled);
+}
+
+TEST_F(EcMetricsTest, CanEnableAndDisableEcMetrics) {
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+  bool ec_on = false;
+  EXPECT_EQ(0, voe_apm_->GetEcMetricsStatus(ec_on));
+  ASSERT_TRUE(ec_on);
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(false));
+  EXPECT_EQ(0, voe_apm_->GetEcMetricsStatus(ec_on));
+  ASSERT_FALSE(ec_on);
+}
+
+TEST_F(EcMetricsTest, ManualTestEcMetrics) {
+  SwitchToManualMicrophone();
+
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+
+  // Must enable AEC to get valid echo metrics.
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(true, webrtc::kEcAec));
+
+  TEST_LOG("Speak into microphone and check metrics for 5 seconds...\n");
+  int erl, erle, rerl, a_nlp;
+  int delay_median = 0;
+  int delay_std = 0;
+
+  for (int i = 0; i < 5; i++) {
+    Sleep(1000);
+    EXPECT_EQ(0, voe_apm_->GetEchoMetrics(erl, erle, rerl, a_nlp));
+    EXPECT_EQ(0, voe_apm_->GetEcDelayMetrics(delay_median, delay_std));
+    TEST_LOG("    Echo  : ERL=%5d, ERLE=%5d, RERL=%5d, A_NLP=%5d [dB], "
+        " delay median=%3d, delay std=%3d [ms]\n", erl, erle, rerl, a_nlp,
+        delay_median, delay_std);
+  }
+
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(false));
+}
+
+TEST_F(EcMetricsTest, GetEcMetricsFailsIfEcNotEnabled) {
+  int dummy = 0;
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+  EXPECT_EQ(-1, voe_apm_->GetEchoMetrics(dummy, dummy, dummy, dummy));
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+}
+
+TEST_F(EcMetricsTest, GetEcDelayMetricsFailsIfEcNotEnabled) {
+  int dummy = 0;
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+  EXPECT_EQ(-1, voe_apm_->GetEcDelayMetrics(dummy, dummy));
+  EXPECT_EQ(VE_APM_ERROR, voe_base_->LastError());
+}
+
+TEST_F(EcMetricsTest, ManualVerifyEcDelayMetrics) {
+  SwitchToManualMicrophone();
+  TEST_LOG("Verify EC Delay metrics:");
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(true));
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+
+  for (int i = 0; i < 5; i++) {
+    int delay, delay_std;
+    EXPECT_EQ(0, voe_apm_->GetEcDelayMetrics(delay, delay_std));
+    TEST_LOG("Delay = %d, Delay Std = %d\n", delay, delay_std);
+    Sleep(1000);
+  }
+}
diff --git a/src/voice_engine/test/auto_test/fakes/fake_external_transport.cc b/src/voice_engine/test/auto_test/fakes/fake_external_transport.cc
new file mode 100644
index 0000000..1fd4a25
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fakes/fake_external_transport.cc
@@ -0,0 +1,98 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "voice_engine/include/voe_network.h"
+#include "voice_engine/voice_engine_defines.h"
+#include "voice_engine/test/auto_test/fakes/fake_external_transport.h"
+
+FakeExternalTransport::FakeExternalTransport(webrtc::VoENetwork* ptr)
+    : my_network_(ptr),
+      thread_(NULL),
+      lock_(NULL),
+      event_(NULL),
+      length_(0),
+      channel_(0),
+      delay_is_enabled_(0),
+      delay_time_in_ms_(0) {
+  const char* thread_name = "external_thread";
+  lock_ = webrtc::CriticalSectionWrapper::CreateCriticalSection();
+  event_ = webrtc::EventWrapper::Create();
+  thread_ = webrtc::ThreadWrapper::CreateThread(
+      Run, this, webrtc::kHighPriority, thread_name);
+  if (thread_) {
+    unsigned int id;
+    thread_->Start(id);
+  }
+}
+
+FakeExternalTransport::~FakeExternalTransport() {
+  if (thread_) {
+    thread_->SetNotAlive();
+    event_->Set();
+    if (thread_->Stop()) {
+      delete thread_;
+      thread_ = NULL;
+      delete event_;
+      event_ = NULL;
+      delete lock_;
+      lock_ = NULL;
+    }
+  }
+}
+
+bool FakeExternalTransport::Run(void* ptr) {
+  return static_cast<FakeExternalTransport*> (ptr)->Process();
+}
+
+bool FakeExternalTransport::Process() {
+  switch (event_->Wait(500)) {
+    case webrtc::kEventSignaled:
+      lock_->Enter();
+      my_network_->ReceivedRTPPacket(channel_, packet_buffer_, length_);
+      lock_->Leave();
+      return true;
+    case webrtc::kEventTimeout:
+      return true;
+    case webrtc::kEventError:
+      break;
+  }
+  return true;
+}
+
+int FakeExternalTransport::SendPacket(int channel, const void *data, int len) {
+  lock_->Enter();
+  if (len < 1612) {
+    memcpy(packet_buffer_, (const unsigned char*) data, len);
+    length_ = len;
+    channel_ = channel;
+  }
+  lock_->Leave();
+  event_->Set();  // Triggers ReceivedRTPPacket() from worker thread.
+  return len;
+}
+
+int FakeExternalTransport::SendRTCPPacket(int channel,
+                                          const void *data,
+                                          int len) {
+  if (delay_is_enabled_) {
+    Sleep(delay_time_in_ms_);
+  }
+  my_network_->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+void FakeExternalTransport::SetDelayStatus(bool enable,
+                                           unsigned int delayInMs) {
+  delay_is_enabled_ = enable;
+  delay_time_in_ms_ = delayInMs;
+}
diff --git a/src/voice_engine/test/auto_test/fakes/fake_external_transport.h b/src/voice_engine/test/auto_test/fakes/fake_external_transport.h
new file mode 100644
index 0000000..25d34c7
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fakes/fake_external_transport.h
@@ -0,0 +1,46 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKES_FAKE_EXTERNAL_TRANSPORT_H_
+#define VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKES_FAKE_EXTERNAL_TRANSPORT_H_
+
+#include "common_types.h"
+
+namespace webrtc {
+class CriticalSectionWrapper;
+class EventWrapper;
+class ThreadWrapper;
+class VoENetwork;
+}
+
+class FakeExternalTransport : public webrtc::Transport {
+ public:
+  explicit FakeExternalTransport(webrtc::VoENetwork* ptr);
+  virtual ~FakeExternalTransport();
+  int SendPacket(int channel, const void *data, int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+  void SetDelayStatus(bool enabled, unsigned int delayInMs = 100);
+
+  webrtc::VoENetwork* my_network_;
+ private:
+  static bool Run(void* ptr);
+  bool Process();
+ private:
+  webrtc::ThreadWrapper* thread_;
+  webrtc::CriticalSectionWrapper* lock_;
+  webrtc::EventWrapper* event_;
+ private:
+  unsigned char packet_buffer_[1612];
+  int length_;
+  int channel_;
+  bool delay_is_enabled_;
+  int delay_time_in_ms_;
+};
+
+#endif  // VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKES_FAKE_EXTERNAL_TRANSPORT_H_
diff --git a/src/voice_engine/test/auto_test/fakes/fake_media_process.h b/src/voice_engine/test/auto_test/fakes/fake_media_process.h
new file mode 100644
index 0000000..9c45129
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fakes/fake_media_process.h
@@ -0,0 +1,44 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+#ifndef VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
+#define VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
+
+#include <cmath>
+
+class FakeMediaProcess : public webrtc::VoEMediaProcess {
+ public:
+  virtual void Process(const int channel,
+                       const webrtc::ProcessingTypes type,
+                       WebRtc_Word16 audio_10ms[],
+                       const int length,
+                       const int sampling_freq_hz,
+                       const bool stereo) {
+    for (int i = 0; i < length; i++) {
+      if (!stereo) {
+        audio_10ms[i] = static_cast<WebRtc_Word16>(audio_10ms[i] *
+            sin(2.0 * 3.14 * frequency * 400.0 / sampling_freq_hz));
+      } else {
+        // Interleaved stereo.
+        audio_10ms[2 * i] = static_cast<WebRtc_Word16> (
+            audio_10ms[2 * i] * sin(2.0 * 3.14 *
+                frequency * 400.0 / sampling_freq_hz));
+        audio_10ms[2 * i + 1] = static_cast<WebRtc_Word16> (
+            audio_10ms[2 * i + 1] * sin(2.0 * 3.14 *
+                frequency * 400.0 / sampling_freq_hz));
+      }
+      frequency++;
+    }
+  }
+
+ private:
+  int frequency;
+};
+
+#endif  // VOICE_ENGINE_MAIN_TEST_AUTO_TEST_FAKE_MEDIA_PROCESS_H_
diff --git a/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.cc b/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.cc
new file mode 100644
index 0000000..f0e665b
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.cc
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class TestErrorObserver : public webrtc::VoiceEngineObserver {
+ public:
+  TestErrorObserver() {}
+  virtual ~TestErrorObserver() {}
+  void CallbackOnError(const int channel, const int error_code) {
+    ADD_FAILURE() << "Unexpected error on channel " << channel <<
+        ": error code " << error_code;
+  }
+};
+
+AfterInitializationFixture::AfterInitializationFixture()
+    : error_observer_(new TestErrorObserver()) {
+  EXPECT_EQ(0, voe_base_->Init());
+
+#if defined(WEBRTC_ANDROID)
+  EXPECT_EQ(0, voe_hardware_->SetLoudspeakerStatus(false));
+#endif
+
+  EXPECT_EQ(0, voe_base_->RegisterVoiceEngineObserver(*error_observer_));
+}
+
+AfterInitializationFixture::~AfterInitializationFixture() {
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+}
diff --git a/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h b/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h
new file mode 100644
index 0000000..bbdd64d
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/after_initialization_fixture.h
@@ -0,0 +1,32 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
+
+#include "before_initialization_fixture.h"
+#include "scoped_ptr.h"
+
+class TestErrorObserver;
+
+// This fixture initializes the voice engine in addition to the work
+// done by the before-initialization fixture. It also registers an error
+// observer which will fail tests on error callbacks. This fixture is
+// useful to tests that want to run before we have started any form of
+// streaming through the voice engine.
+class AfterInitializationFixture : public BeforeInitializationFixture {
+ public:
+  AfterInitializationFixture();
+  virtual ~AfterInitializationFixture();
+ protected:
+  webrtc::scoped_ptr<TestErrorObserver> error_observer_;
+};
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_AFTER_INIT_H_
diff --git a/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.cc b/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.cc
new file mode 100644
index 0000000..d1e6039
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.cc
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+#include <cstring>
+
+static const char* kLoopbackIp = "127.0.0.1";
+
+AfterStreamingFixture::AfterStreamingFixture()
+    : channel_(voe_base_->CreateChannel()) {
+  EXPECT_GE(channel_, 0);
+
+  fake_microphone_input_file_ = resource_manager_.long_audio_file_path();
+  EXPECT_FALSE(fake_microphone_input_file_.empty());
+
+  SetUpLocalPlayback();
+  ResumePlaying();
+  RestartFakeMicrophone();
+}
+
+AfterStreamingFixture::~AfterStreamingFixture() {
+  voe_file_->StopPlayingFileAsMicrophone(channel_);
+  PausePlaying();
+
+  voe_base_->DeleteChannel(channel_);
+}
+
+void AfterStreamingFixture::SwitchToManualMicrophone() {
+  EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+
+  TEST_LOG("You need to speak manually into the microphone for this test.\n");
+  TEST_LOG("Please start speaking now.\n");
+  Sleep(1000);
+}
+
+void AfterStreamingFixture::RestartFakeMicrophone() {
+  EXPECT_EQ(0, voe_file_->StartPlayingFileAsMicrophone(
+        channel_, fake_microphone_input_file_.c_str(), true, true));
+}
+
+void AfterStreamingFixture::PausePlaying() {
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+}
+
+void AfterStreamingFixture::ResumePlaying() {
+  EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+}
+
+void AfterStreamingFixture::SetUpLocalPlayback() {
+  EXPECT_EQ(0, voe_base_->SetSendDestination(channel_, 8000, kLoopbackIp));
+  EXPECT_EQ(0, voe_base_->SetLocalReceiver(0, 8000));
+
+  webrtc::CodecInst codec;
+  codec.channels = 1;
+  codec.pacsize = 160;
+  codec.plfreq = 8000;
+  codec.pltype = 0;
+  codec.rate = 64000;
+  strcpy(codec.plname, "PCMU");
+
+  voe_codec_->SetSendCodec(channel_, codec);
+}
diff --git a/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h b/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h
new file mode 100644
index 0000000..6b0a61f
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/after_streaming_fixture.h
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
+
+#include "after_initialization_fixture.h"
+#include "resource_manager.h"
+
+// This fixture will, in addition to the work done by its superclasses,
+// create a channel and start playing a file through the fake microphone
+// to simulate microphone input. The purpose is to make it convenient
+// to write tests that require microphone input.
+class AfterStreamingFixture : public AfterInitializationFixture {
+ public:
+  AfterStreamingFixture();
+  virtual ~AfterStreamingFixture();
+
+ protected:
+  int             channel_;
+  ResourceManager resource_manager_;
+  std::string     fake_microphone_input_file_;
+
+  // Shuts off the fake microphone for this test.
+  void SwitchToManualMicrophone();
+
+  // Restarts the fake microphone if it's been shut off earlier.
+  void RestartFakeMicrophone();
+
+  // Stops all sending and playout.
+  void PausePlaying();
+
+  // Resumes all sending and playout.
+  void ResumePlaying();
+
+ private:
+  void SetUpLocalPlayback();
+};
+
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_AFTER_STREAMING_H_
diff --git a/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.cc b/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.cc
new file mode 100644
index 0000000..407e5b3
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.cc
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "before_initialization_fixture.h"
+
+#include "voice_engine_defines.h"
+
+BeforeInitializationFixture::BeforeInitializationFixture()
+    : voice_engine_(webrtc::VoiceEngine::Create()) {
+  EXPECT_TRUE(voice_engine_ != NULL);
+
+  voe_base_ = webrtc::VoEBase::GetInterface(voice_engine_);
+  voe_codec_ = webrtc::VoECodec::GetInterface(voice_engine_);
+  voe_volume_control_ = webrtc::VoEVolumeControl::GetInterface(voice_engine_);
+  voe_dtmf_ = webrtc::VoEDtmf::GetInterface(voice_engine_);
+  voe_rtp_rtcp_ = webrtc::VoERTP_RTCP::GetInterface(voice_engine_);
+  voe_apm_ = webrtc::VoEAudioProcessing::GetInterface(voice_engine_);
+  voe_network_ = webrtc::VoENetwork::GetInterface(voice_engine_);
+  voe_file_ = webrtc::VoEFile::GetInterface(voice_engine_);
+  voe_vsync_ = webrtc::VoEVideoSync::GetInterface(voice_engine_);
+  voe_encrypt_ = webrtc::VoEEncryption::GetInterface(voice_engine_);
+  voe_hardware_ = webrtc::VoEHardware::GetInterface(voice_engine_);
+  voe_xmedia_ = webrtc::VoEExternalMedia::GetInterface(voice_engine_);
+  voe_call_report_ = webrtc::VoECallReport::GetInterface(voice_engine_);
+  voe_neteq_stats_ = webrtc::VoENetEqStats::GetInterface(voice_engine_);
+}
+
+BeforeInitializationFixture::~BeforeInitializationFixture() {
+  voe_base_->Release();
+  voe_codec_->Release();
+  voe_volume_control_->Release();
+  voe_dtmf_->Release();
+  voe_rtp_rtcp_->Release();
+  voe_apm_->Release();
+  voe_network_->Release();
+  voe_file_->Release();
+  voe_vsync_->Release();
+  voe_encrypt_->Release();
+  voe_hardware_->Release();
+  voe_xmedia_->Release();
+  voe_call_report_->Release();
+  voe_neteq_stats_->Release();
+
+  EXPECT_TRUE(webrtc::VoiceEngine::Delete(voice_engine_));
+}
+
+void BeforeInitializationFixture::Sleep(long milliseconds) {
+  // Implementation note: This method is used to reduce usage of the macro and
+  // avoid ugly errors in Eclipse (its parser can't deal with the sleep macro).
+  SLEEP(milliseconds);
+}
diff --git a/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h b/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h
new file mode 100644
index 0000000..ef1636b
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fixtures/before_initialization_fixture.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
+
+#include <assert.h>
+
+#include "common_types.h"
+#include "engine_configurations.h"
+#include "voe_audio_processing.h"
+#include "voe_base.h"
+#include "voe_call_report.h"
+#include "voe_codec.h"
+#include "voe_dtmf.h"
+#include "voe_encryption.h"
+#include "voe_errors.h"
+#include "voe_external_media.h"
+#include "voe_file.h"
+#include "voe_hardware.h"
+#include "voe_neteq_stats.h"
+#include "voe_network.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_test_defines.h"
+#include "voe_video_sync.h"
+#include "voe_volume_control.h"
+
+// TODO(qhogpat): Remove these undefs once the clashing macros are gone.
+#undef TEST
+#undef ASSERT_TRUE
+#undef ASSERT_FALSE
+#include "gtest/gtest.h"
+#include "gmock/gmock.h"
+
+// This convenient fixture sets up all voice engine interfaces automatically for
+// use by testing subclasses. It allocates each interface and releases it once
+// which means that if a tests allocates additional interfaces from the voice
+// engine and forgets to release it, this test will fail in the destructor.
+// It will not call any init methods.
+//
+// Implementation note:
+// The interface fetching is done in the constructor and not SetUp() since
+// this relieves our subclasses from calling SetUp in the superclass if they
+// choose to override SetUp() themselves. This is fine as googletest will
+// construct new test objects for each method.
+class BeforeInitializationFixture : public testing::Test {
+ public:
+  BeforeInitializationFixture();
+  virtual ~BeforeInitializationFixture();
+
+ protected:
+  // Use this sleep function to sleep in test (avoid sleep macro).
+  void Sleep(long milliseconds);
+
+  webrtc::VoiceEngine*        voice_engine_;
+  webrtc::VoEBase*            voe_base_;
+  webrtc::VoECodec*           voe_codec_;
+  webrtc::VoEVolumeControl*   voe_volume_control_;
+  webrtc::VoEDtmf*            voe_dtmf_;
+  webrtc::VoERTP_RTCP*        voe_rtp_rtcp_;
+  webrtc::VoEAudioProcessing* voe_apm_;
+  webrtc::VoENetwork*         voe_network_;
+  webrtc::VoEFile*            voe_file_;
+  webrtc::VoEVideoSync*       voe_vsync_;
+  webrtc::VoEEncryption*      voe_encrypt_;
+  webrtc::VoEHardware*        voe_hardware_;
+  webrtc::VoEExternalMedia*   voe_xmedia_;
+  webrtc::VoECallReport*      voe_call_report_;
+  webrtc::VoENetEqStats*      voe_neteq_stats_;
+};
+
+#endif  // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_STANDARD_TEST_BASE_H_
diff --git a/src/voice_engine/test/auto_test/fuzz/rtp_fuzz_test.cc b/src/voice_engine/test/auto_test/fuzz/rtp_fuzz_test.cc
new file mode 100644
index 0000000..f18d5e1
--- /dev/null
+++ b/src/voice_engine/test/auto_test/fuzz/rtp_fuzz_test.cc
@@ -0,0 +1,48 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <ctime>
+
+#include "test/libtest/include/bit_flip_encryption.h"
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class RtpFuzzTest : public AfterStreamingFixture {
+ protected:
+  void BitFlipFuzzTest(float flip_probability) {
+    BitFlipEncryption bit_flip_encryption(std::time(NULL), flip_probability);
+
+    TEST_LOG("Starting to flip bits in RTP/RTCP packets.\n");
+    voe_encrypt_->RegisterExternalEncryption(channel_, bit_flip_encryption);
+
+    Sleep(5000);
+
+    voe_encrypt_->DeRegisterExternalEncryption(channel_);
+
+    TEST_LOG("Flipped %d bits. Back to normal.\n",
+             static_cast<int>(bit_flip_encryption.flip_count()));
+    Sleep(2000);
+  }
+};
+
+TEST_F(RtpFuzzTest, VoiceEngineDealsWithASmallNumberOfTamperedRtpPackets) {
+  BitFlipFuzzTest(0.00005f);
+}
+
+TEST_F(RtpFuzzTest, VoiceEngineDealsWithAMediumNumberOfTamperedRtpPackets) {
+  BitFlipFuzzTest(0.0005f);
+}
+
+TEST_F(RtpFuzzTest, VoiceEngineDealsWithALargeNumberOfTamperedRtpPackets) {
+  BitFlipFuzzTest(0.05f);
+}
+
+TEST_F(RtpFuzzTest, VoiceEngineDealsWithAHugeNumberOfTamperedRtpPackets) {
+  BitFlipFuzzTest(0.5f);
+}
diff --git a/src/voice_engine/test/auto_test/resource_manager.cc b/src/voice_engine/test/auto_test/resource_manager.cc
new file mode 100644
index 0000000..18213f9
--- /dev/null
+++ b/src/voice_engine/test/auto_test/resource_manager.cc
@@ -0,0 +1,29 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "resource_manager.h"
+
+#include "testsupport/fileutils.h"
+
+ResourceManager::ResourceManager() {
+  std::string filename = "audio_long16.pcm";
+#if defined(WEBRTC_ANDROID)
+  long_audio_file_path_ = "/sdcard/" + filename;
+#else
+  std::string resource_path = webrtc::test::ProjectRootPath();
+  if (resource_path == webrtc::test::kCannotFindProjectRootDir) {
+    long_audio_file_path_ = "";
+  } else {
+    long_audio_file_path_ =
+        resource_path + "data/voice_engine/" + filename;
+  }
+#endif
+}
+
diff --git a/src/voice_engine/test/auto_test/resource_manager.h b/src/voice_engine/test/auto_test/resource_manager.h
new file mode 100644
index 0000000..1bb91cf
--- /dev/null
+++ b/src/voice_engine/test/auto_test/resource_manager.h
@@ -0,0 +1,30 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
+#define SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
+
+#include <string>
+
+class ResourceManager {
+ public:
+  ResourceManager();
+
+  // Returns the full path to a long audio file.
+  // Returns the empty string on failure.
+  const std::string& long_audio_file_path() const {
+    return long_audio_file_path_;
+  }
+
+ private:
+  std::string long_audio_file_path_;
+};
+
+#endif // SRC_VOICE_ENGINE_MAIN_TEST_AUTO_TEST_RESOURCE_MANAGER_H_
diff --git a/src/voice_engine/test/auto_test/standard/audio_processing_test.cc b/src/voice_engine/test/auto_test/standard/audio_processing_test.cc
new file mode 100644
index 0000000..a7a5a07
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/audio_processing_test.cc
@@ -0,0 +1,415 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "testsupport/fileutils.h"
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+#include "voice_engine/test/auto_test/voe_standard_test.h"
+
+class RxCallback : public webrtc::VoERxVadCallback {
+ public:
+  RxCallback() :
+    vad_decision(-1) {
+  }
+
+  virtual void OnRxVad(int, int vadDecision) {
+    char msg[128];
+    sprintf(msg, "RX VAD detected decision %d \n", vadDecision);
+    TEST_LOG("%s", msg);
+    vad_decision = vadDecision;
+  }
+
+  int vad_decision;
+};
+
+class AudioProcessingTest : public AfterStreamingFixture {
+ protected:
+  // Note: Be careful with this one, it is used in the
+  // Android / iPhone part too.
+  void TryEnablingAgcWithMode(webrtc::AgcModes agc_mode_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, agc_mode_to_set));
+
+    bool agc_enabled = false;
+    webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+    EXPECT_TRUE(agc_enabled);
+    EXPECT_EQ(agc_mode_to_set, agc_mode);
+  }
+
+  void TryEnablingRxAgcWithMode(webrtc::AgcModes agc_mode_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetRxAgcStatus(channel_, true, agc_mode_to_set));
+
+    bool rx_agc_enabled = false;
+    webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
+    EXPECT_TRUE(rx_agc_enabled);
+    EXPECT_EQ(agc_mode_to_set, agc_mode);
+  }
+
+  // EC modes can map to other EC modes, so we have a separate parameter
+  // for what we expect the EC mode to be set to.
+  void TryEnablingEcWithMode(webrtc::EcModes ec_mode_to_set,
+                             webrtc::EcModes expected_mode) {
+    EXPECT_EQ(0, voe_apm_->SetEcStatus(true, ec_mode_to_set));
+
+    bool ec_enabled = true;
+    webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+    EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+
+    EXPECT_EQ(expected_mode, ec_mode);
+  }
+
+  // Here, the CNG mode will be expected to be on or off depending on the mode.
+  void TryEnablingAecmWithMode(webrtc::AecmModes aecm_mode_to_set,
+                               bool cng_enabled_to_set) {
+    EXPECT_EQ(0, voe_apm_->SetAecmMode(aecm_mode_to_set, cng_enabled_to_set));
+
+    bool cng_enabled = false;
+    webrtc::AecmModes aecm_mode = webrtc::kAecmEarpiece;
+
+    voe_apm_->GetAecmMode(aecm_mode, cng_enabled);
+
+    EXPECT_EQ(cng_enabled_to_set, cng_enabled);
+    EXPECT_EQ(aecm_mode_to_set, aecm_mode);
+  }
+
+  void TryEnablingNsWithMode(webrtc::NsModes ns_mode_to_set,
+                             webrtc::NsModes expected_ns_mode) {
+    EXPECT_EQ(0, voe_apm_->SetNsStatus(true, ns_mode_to_set));
+
+    bool ns_status = true;
+    webrtc::NsModes ns_mode = webrtc::kNsDefault;
+    EXPECT_EQ(0, voe_apm_->GetNsStatus(ns_status, ns_mode));
+
+    EXPECT_TRUE(ns_status);
+    EXPECT_EQ(expected_ns_mode, ns_mode);
+  }
+
+  void TryEnablingRxNsWithMode(webrtc::NsModes ns_mode_to_set,
+                               webrtc::NsModes expected_ns_mode) {
+    EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true, ns_mode_to_set));
+
+    bool ns_status = true;
+    webrtc::NsModes ns_mode = webrtc::kNsDefault;
+    EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
+
+    EXPECT_TRUE(ns_status);
+    EXPECT_EQ(expected_ns_mode, ns_mode);
+  }
+
+  void TryDetectingSilence() {
+    // Here, speech is running. Shut down speech.
+    EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+    EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, true));
+    EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+
+    // We should detect the silence after a short time.
+    Sleep(50);
+    for (int i = 0; i < 25; i++) {
+      EXPECT_EQ(0, voe_apm_->VoiceActivityIndicator(channel_));
+      Sleep(10);
+    }
+  }
+
+  void TryDetectingSpeechAfterSilence() {
+    // Re-enable speech.
+    RestartFakeMicrophone();
+    EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, false));
+    EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, false));
+
+    // We should detect the speech after a short time.
+    for (int i = 0; i < 50; i++) {
+      if (voe_apm_->VoiceActivityIndicator(channel_) == 1) {
+        return;
+      }
+      Sleep(10);
+    }
+
+    ADD_FAILURE() << "Failed to detect speech within 500 ms.";
+  }
+};
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+
+TEST_F(AudioProcessingTest, AgcIsOnByDefault) {
+  bool agc_enabled = false;
+  webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
+
+  EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+  EXPECT_TRUE(agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveAnalog, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanEnableAgcWithAllModes) {
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveDigital);
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveAnalog);
+  TryEnablingAgcWithMode(webrtc::kAgcFixedDigital);
+}
+
+TEST_F(AudioProcessingTest, EcIsDisabledAndAecIsDefaultEcMode) {
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAec, ec_mode);
+}
+
+TEST_F(AudioProcessingTest, EnablingEcAecShouldEnableEcAec) {
+  TryEnablingEcWithMode(webrtc::kEcAec, webrtc::kEcAec);
+}
+
+TEST_F(AudioProcessingTest, EnablingEcConferenceShouldEnableEcAec) {
+  TryEnablingEcWithMode(webrtc::kEcConference, webrtc::kEcAec);
+}
+
+TEST_F(AudioProcessingTest, EcModeIsPreservedWhenEcIsTurnedOff) {
+  TryEnablingEcWithMode(webrtc::kEcConference, webrtc::kEcAec);
+
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(false));
+
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAec, ec_mode);
+}
+
+TEST_F(AudioProcessingTest, CanEnableAndDisableEcModeSeveralTimesInARow) {
+  for (int i = 0; i < 10; i++) {
+    EXPECT_EQ(0, voe_apm_->SetEcStatus(true));
+    EXPECT_EQ(0, voe_apm_->SetEcStatus(false));
+  }
+
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAec, ec_mode);
+}
+
+// TODO(phoglund): Reenable below test when it's no longer flaky.
+TEST_F(AudioProcessingTest, DISABLED_TestVoiceActivityDetectionWithObserver) {
+  RxCallback rx_callback;
+  EXPECT_EQ(0, voe_apm_->RegisterRxVadObserver(channel_, rx_callback));
+
+  // The extra sleeps are to allow decisions some time to propagate to the
+  // observer.
+  TryDetectingSilence();
+  Sleep(100);
+
+  EXPECT_EQ(0, rx_callback.vad_decision);
+
+  TryDetectingSpeechAfterSilence();
+  Sleep(100);
+
+  EXPECT_EQ(1, rx_callback.vad_decision);
+
+  EXPECT_EQ(0, voe_apm_->DeRegisterRxVadObserver(channel_));
+}
+
+#endif   // !MAC_IPHONE && !WEBRTC_ANDROID
+
+TEST_F(AudioProcessingTest, EnablingEcAecmShouldEnableEcAecm) {
+  // This one apparently applies to Android and iPhone as well.
+  TryEnablingEcWithMode(webrtc::kEcAecm, webrtc::kEcAecm);
+}
+
+TEST_F(AudioProcessingTest, EcAecmModeIsEnabledAndSpeakerphoneByDefault) {
+  bool cng_enabled = false;
+  webrtc::AecmModes aecm_mode = webrtc::kAecmEarpiece;
+
+  voe_apm_->GetAecmMode(aecm_mode, cng_enabled);
+
+  EXPECT_TRUE(cng_enabled);
+  EXPECT_EQ(webrtc::kAecmSpeakerphone, aecm_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetAecmMode) {
+  EXPECT_EQ(0, voe_apm_->SetEcStatus(true, webrtc::kEcAecm));
+
+  // Try some AECM mode - CNG enabled combinations.
+  TryEnablingAecmWithMode(webrtc::kAecmEarpiece, true);
+  TryEnablingAecmWithMode(webrtc::kAecmEarpiece, false);
+  TryEnablingAecmWithMode(webrtc::kAecmLoudEarpiece, true);
+  TryEnablingAecmWithMode(webrtc::kAecmLoudSpeakerphone, false);
+  TryEnablingAecmWithMode(webrtc::kAecmQuietEarpieceOrHeadset, true);
+  TryEnablingAecmWithMode(webrtc::kAecmSpeakerphone, false);
+}
+
+TEST_F(AudioProcessingTest, RxAgcShouldBeOffByDefault) {
+  bool rx_agc_enabled = true;
+  webrtc::AgcModes agc_mode = webrtc::kAgcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetRxAgcStatus(channel_, rx_agc_enabled, agc_mode));
+  EXPECT_FALSE(rx_agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveDigital, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanTurnOnDigitalRxAcg) {
+  TryEnablingRxAgcWithMode(webrtc::kAgcAdaptiveDigital);
+  TryEnablingRxAgcWithMode(webrtc::kAgcFixedDigital);
+}
+
+TEST_F(AudioProcessingTest, CannotTurnOnAdaptiveAnalogRxAgc) {
+  EXPECT_EQ(-1, voe_apm_->SetRxAgcStatus(
+      channel_, true, webrtc::kAgcAdaptiveAnalog));
+}
+
+TEST_F(AudioProcessingTest, NsIsOffWithModerateSuppressionByDefault) {
+  bool ns_status = true;
+  webrtc::NsModes ns_mode = webrtc::kNsDefault;
+  EXPECT_EQ(0, voe_apm_->GetNsStatus(ns_status, ns_mode));
+
+  EXPECT_FALSE(ns_status);
+  EXPECT_EQ(webrtc::kNsModerateSuppression, ns_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetNsMode) {
+  // Concrete suppression values map to themselves.
+  TryEnablingNsWithMode(webrtc::kNsHighSuppression,
+                        webrtc::kNsHighSuppression);
+  TryEnablingNsWithMode(webrtc::kNsLowSuppression,
+                        webrtc::kNsLowSuppression);
+  TryEnablingNsWithMode(webrtc::kNsModerateSuppression,
+                        webrtc::kNsModerateSuppression);
+  TryEnablingNsWithMode(webrtc::kNsVeryHighSuppression,
+                        webrtc::kNsVeryHighSuppression);
+
+  // Conference and Default map to concrete values.
+  TryEnablingNsWithMode(webrtc::kNsConference,
+                        webrtc::kNsHighSuppression);
+  TryEnablingNsWithMode(webrtc::kNsDefault,
+                        webrtc::kNsModerateSuppression);
+}
+
+TEST_F(AudioProcessingTest, RxNsIsOffWithModerateSuppressionByDefault) {
+  bool ns_status = true;
+  webrtc::NsModes ns_mode = webrtc::kNsDefault;
+  EXPECT_EQ(0, voe_apm_->GetRxNsStatus(channel_, ns_status, ns_mode));
+
+  EXPECT_FALSE(ns_status);
+  EXPECT_EQ(webrtc::kNsModerateSuppression, ns_mode);
+}
+
+TEST_F(AudioProcessingTest, CanSetRxNsMode) {
+  EXPECT_EQ(0, voe_apm_->SetRxNsStatus(channel_, true));
+
+  // See comments on the regular NS test above.
+  TryEnablingRxNsWithMode(webrtc::kNsHighSuppression,
+                          webrtc::kNsHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsLowSuppression,
+                          webrtc::kNsLowSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsModerateSuppression,
+                          webrtc::kNsModerateSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsVeryHighSuppression,
+                          webrtc::kNsVeryHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsConference,
+                          webrtc::kNsHighSuppression);
+  TryEnablingRxNsWithMode(webrtc::kNsDefault,
+                          webrtc::kNsModerateSuppression);
+}
+
+TEST_F(AudioProcessingTest, VadIsDisabledByDefault) {
+  bool vad_enabled;
+  bool disabled_dtx;
+  webrtc::VadModes vad_mode;
+
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, disabled_dtx));
+
+  EXPECT_FALSE(vad_enabled);
+}
+
+TEST_F(AudioProcessingTest, VoiceActivityIndicatorReturns1WithSpeechOn) {
+  // This sleep is necessary since the voice detection algorithm needs some
+  // time to detect the speech from the fake microphone.
+  Sleep(500);
+  EXPECT_EQ(1, voe_apm_->VoiceActivityIndicator(channel_));
+}
+
+TEST_F(AudioProcessingTest, CanSetDelayOffset) {
+  voe_apm_->SetDelayOffsetMs(50);
+  EXPECT_EQ(50, voe_apm_->DelayOffsetMs());
+  voe_apm_->SetDelayOffsetMs(-50);
+  EXPECT_EQ(-50, voe_apm_->DelayOffsetMs());
+}
+
+TEST_F(AudioProcessingTest, HighPassFilterIsOnByDefault) {
+  EXPECT_TRUE(voe_apm_->IsHighPassFilterEnabled());
+}
+
+TEST_F(AudioProcessingTest, CanSetHighPassFilter) {
+  EXPECT_EQ(0, voe_apm_->EnableHighPassFilter(true));
+  EXPECT_TRUE(voe_apm_->IsHighPassFilterEnabled());
+  EXPECT_EQ(0, voe_apm_->EnableHighPassFilter(false));
+  EXPECT_FALSE(voe_apm_->IsHighPassFilterEnabled());
+}
+
+TEST_F(AudioProcessingTest, StereoChannelSwappingIsOffByDefault) {
+  EXPECT_FALSE(voe_apm_->IsStereoChannelSwappingEnabled());
+}
+
+TEST_F(AudioProcessingTest, CanSetStereoChannelSwapping) {
+  voe_apm_->EnableStereoChannelSwapping(true);
+  EXPECT_TRUE(voe_apm_->IsStereoChannelSwappingEnabled());
+  voe_apm_->EnableStereoChannelSwapping(false);
+  EXPECT_FALSE(voe_apm_->IsStereoChannelSwappingEnabled());
+}
+
+TEST_F(AudioProcessingTest, CanStartAndStopDebugRecording) {
+  std::string output_path = webrtc::test::OutputPath();
+  std::string output_file = output_path + "apm_debug.txt";
+
+  EXPECT_EQ(0, voe_apm_->StartDebugRecording(output_file.c_str()));
+  Sleep(1000);
+  EXPECT_EQ(0, voe_apm_->StopDebugRecording());
+}
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+
+TEST_F(AudioProcessingTest, AgcIsOffByDefaultAndDigital) {
+  bool agc_enabled = true;
+  webrtc::AgcModes agc_mode = webrtc::kAgcAdaptiveAnalog;
+
+  EXPECT_EQ(0, voe_apm_->GetAgcStatus(agc_enabled, agc_mode));
+  EXPECT_FALSE(agc_enabled);
+  EXPECT_EQ(webrtc::kAgcAdaptiveDigital, agc_mode);
+}
+
+TEST_F(AudioProcessingTest, CanEnableAgcInAdaptiveDigitalMode) {
+  TryEnablingAgcWithMode(webrtc::kAgcAdaptiveDigital);
+}
+
+TEST_F(AudioProcessingTest, AgcIsPossibleExceptInAdaptiveAnalogMode) {
+  EXPECT_EQ(-1, voe_apm_->SetAgcStatus(true, webrtc::kAgcAdaptiveAnalog));
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, webrtc::kAgcFixedDigital));
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(true, webrtc::kAgcAdaptiveDigital));
+}
+
+TEST_F(AudioProcessingTest, EcIsDisabledAndAecmIsDefaultEcMode) {
+  bool ec_enabled = true;
+  webrtc::EcModes ec_mode = webrtc::kEcDefault;
+
+  EXPECT_EQ(0, voe_apm_->GetEcStatus(ec_enabled, ec_mode));
+  EXPECT_FALSE(ec_enabled);
+  EXPECT_EQ(webrtc::kEcAecm, ec_mode);
+}
+
+TEST_F(AudioProcessingTest, TestVoiceActivityDetection) {
+  TryDetectingSilence();
+  TryDetectingSpeechAfterSilence();
+}
+
+#endif  // MAC_IPHONE || WEBRTC_ANDROID
diff --git a/src/voice_engine/test/auto_test/standard/call_report_test.cc b/src/voice_engine/test/auto_test/standard/call_report_test.cc
new file mode 100644
index 0000000..c96b14d
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/call_report_test.cc
@@ -0,0 +1,88 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "testsupport/fileutils.h"
+
+class CallReportTest : public AfterStreamingFixture {
+};
+
+TEST_F(CallReportTest, ResetCallReportStatisticsFailsForBadInput) {
+  EXPECT_EQ(-1, voe_call_report_->ResetCallReportStatistics(-2));
+  EXPECT_EQ(-1, voe_call_report_->ResetCallReportStatistics(1));
+}
+
+TEST_F(CallReportTest, ResetCallReportStatisticsSucceedsWithCorrectInput) {
+  EXPECT_EQ(0, voe_call_report_->ResetCallReportStatistics(channel_));
+  EXPECT_EQ(0, voe_call_report_->ResetCallReportStatistics(-1));
+}
+
+TEST_F(CallReportTest, EchoMetricSummarySucceeds) {
+  EXPECT_EQ(0, voe_apm_->SetEcMetricsStatus(true));
+  Sleep(1000);
+
+  webrtc::EchoStatistics echo_statistics;
+  EXPECT_EQ(0, voe_call_report_->GetEchoMetricSummary(echo_statistics));
+}
+
+TEST_F(CallReportTest, GetRoundTripTimeSummaryReturnsAllMinusOnesIfRtcpIsOff) {
+  voe_rtp_rtcp_->SetRTCPStatus(channel_, false);
+
+  webrtc::StatVal delays;
+  EXPECT_EQ(0, voe_call_report_->GetRoundTripTimeSummary(channel_, delays));
+  EXPECT_EQ(-1, delays.average);
+  EXPECT_EQ(-1, delays.min);
+  EXPECT_EQ(-1, delays.max);
+}
+
+TEST_F(CallReportTest, GetRoundTripTimesReturnsValuesIfRtcpIsOn) {
+  voe_rtp_rtcp_->SetRTCPStatus(channel_, true);
+  Sleep(1000);
+
+  webrtc::StatVal delays;
+  EXPECT_EQ(0, voe_call_report_->GetRoundTripTimeSummary(channel_, delays));
+  EXPECT_NE(-1, delays.average);
+  EXPECT_NE(-1, delays.min);
+  EXPECT_NE(-1, delays.max);
+}
+
+TEST_F(CallReportTest, DeadOrAliveSummaryFailsIfDeadOrAliveTrackingNotActive) {
+  int count_the_dead;
+  int count_the_living;
+  EXPECT_EQ(-1, voe_call_report_->GetDeadOrAliveSummary(channel_,
+                                                        count_the_dead,
+                                                        count_the_living));
+}
+
+TEST_F(CallReportTest,
+       DeadOrAliveSummarySucceedsIfDeadOrAliveTrackingIsActive) {
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  Sleep(1200);
+
+  int count_the_dead;
+  int count_the_living;
+  EXPECT_EQ(0, voe_call_report_->GetDeadOrAliveSummary(channel_,
+                                                       count_the_dead,
+                                                       count_the_living));
+
+  EXPECT_GE(count_the_dead, 0);
+  EXPECT_GE(count_the_living, 0);
+}
+
+TEST_F(CallReportTest, WriteReportToFileFailsOnBadInput) {
+  EXPECT_EQ(-1, voe_call_report_->WriteReportToFile(NULL));
+}
+
+TEST_F(CallReportTest, WriteReportToFileSucceedsWithCorrectFilename) {
+  std::string output_path = webrtc::test::OutputPath();
+  std::string report_filename = output_path + "call_report.txt";
+
+  EXPECT_EQ(0, voe_call_report_->WriteReportToFile(report_filename.c_str()));
+}
diff --git a/src/voice_engine/test/auto_test/standard/codec_before_streaming_test.cc b/src/voice_engine/test/auto_test/standard/codec_before_streaming_test.cc
new file mode 100644
index 0000000..6d902ef
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/codec_before_streaming_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class CodecBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp() {
+    memset(&codec_instance_, 0, sizeof(codec_instance_));
+    codec_instance_.channels = 1;
+    codec_instance_.plfreq = 16000;
+    codec_instance_.pacsize = 480;
+
+    channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(channel_);
+  }
+
+  webrtc::CodecInst codec_instance_;
+  int channel_;
+};
+
+// TODO(phoglund): add test which verifies default pltypes for various codecs.
+
+TEST_F(CodecBeforeStreamingTest, GetRecPayloadTypeFailsForInvalidCodecName) {
+  strcpy(codec_instance_.plname, "SomeInvalidCodecName");
+
+  // Should fail since the codec name is invalid.
+  EXPECT_NE(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+}
+
+TEST_F(CodecBeforeStreamingTest, GetRecPayloadTypeRecognizesISAC) {
+  strcpy(codec_instance_.plname, "iSAC");
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  strcpy(codec_instance_.plname, "ISAC");
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+}
+
+TEST_F(CodecBeforeStreamingTest, SetRecPayloadTypeCanChangeISACPayloadType) {
+  strcpy(codec_instance_.plname, "ISAC");
+
+  codec_instance_.pltype = 123;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(123, codec_instance_.pltype);
+
+  codec_instance_.pltype = 104;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(104, codec_instance_.pltype);
+}
+
+TEST_F(CodecBeforeStreamingTest, SetRecPayloadTypeCanChangeILBCPayloadType) {
+  strcpy(codec_instance_.plname, "iLBC");
+  codec_instance_.plfreq = 8000;
+  codec_instance_.pacsize = 240;
+  codec_instance_.rate = 13300;
+
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+  int original_pltype = codec_instance_.pltype;
+  codec_instance_.pltype = 123;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(123, codec_instance_.pltype);
+
+  codec_instance_.pltype = original_pltype;
+  EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance_));
+  EXPECT_EQ(0, voe_codec_->GetRecPayloadType(channel_, codec_instance_));
+
+  EXPECT_EQ(original_pltype, codec_instance_.pltype);
+}
diff --git a/src/voice_engine/test/auto_test/standard/codec_test.cc b/src/voice_engine/test/auto_test/standard/codec_test.cc
new file mode 100644
index 0000000..d861452
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/codec_test.cc
@@ -0,0 +1,232 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voe_test_defines.h"
+#include "voice_engine_defines.h"
+
+class CodecTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    memset(&codec_instance_, 0, sizeof(codec_instance_));
+  }
+
+  void SetArbitrarySendCodec() {
+    // Just grab the first codec.
+    EXPECT_EQ(0, voe_codec_->GetCodec(0, codec_instance_));
+    EXPECT_EQ(0, voe_codec_->SetSendCodec(channel_, codec_instance_));
+  }
+
+  webrtc::CodecInst codec_instance_;
+};
+
+static void SetRateIfILBC(webrtc::CodecInst* codec_instance, int packet_size) {
+  if (!_stricmp(codec_instance->plname, "ilbc")) {
+    if (packet_size == 160 || packet_size == 320) {
+      codec_instance->rate = 15200;
+    } else {
+      codec_instance->rate = 13300;
+    }
+  }
+}
+
+static bool IsNotViableSendCodec(const char* codec_name) {
+  return !_stricmp(codec_name, "CN") ||
+         !_stricmp(codec_name, "telephone-event") ||
+         !_stricmp(codec_name, "red");
+}
+
+TEST_F(CodecTest, PcmuIsDefaultCodecAndHasTheRightValues) {
+  EXPECT_EQ(0, voe_codec_->GetSendCodec(channel_, codec_instance_));
+  EXPECT_EQ(1, codec_instance_.channels);
+  EXPECT_EQ(160, codec_instance_.pacsize);
+  EXPECT_EQ(8000, codec_instance_.plfreq);
+  EXPECT_EQ(0, codec_instance_.pltype);
+  EXPECT_EQ(64000, codec_instance_.rate);
+  EXPECT_STRCASEEQ("PCMU", codec_instance_.plname);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionIsOffByDefault) {
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_FALSE(vad_enabled);
+  EXPECT_TRUE(dtx_disabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionCanBeEnabled) {
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_TRUE(vad_enabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+  EXPECT_FALSE(dtx_disabled);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionTypeSettingsCanBeChanged) {
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveLow, false));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveLow);
+  EXPECT_FALSE(dtx_disabled);
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveMid, false));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveMid);
+  EXPECT_FALSE(dtx_disabled);
+
+  // The fourth argument is the DTX disable flag.
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadAggressiveHigh, true));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadAggressiveHigh);
+  EXPECT_TRUE(dtx_disabled);
+
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, true, webrtc::kVadConventional, true));
+  EXPECT_EQ(0, voe_codec_->GetVADStatus(
+      channel_, vad_enabled, vad_mode, dtx_disabled));
+  EXPECT_EQ(vad_mode, webrtc::kVadConventional);
+}
+
+TEST_F(CodecTest, VoiceActivityDetectionCanBeTurnedOff) {
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(channel_, true));
+
+  // VAD is always on when DTX is on, so we need to turn off DTX too.
+  EXPECT_EQ(0, voe_codec_->SetVADStatus(
+      channel_, false, webrtc::kVadConventional, true));
+
+  bool vad_enabled = false;
+  bool dtx_disabled = false;
+  webrtc::VadModes vad_mode = webrtc::kVadAggressiveMid;
+
+  voe_codec_->GetVADStatus(channel_, vad_enabled, vad_mode, dtx_disabled);
+
+  EXPECT_FALSE(vad_enabled);
+  EXPECT_TRUE(dtx_disabled);
+  EXPECT_EQ(webrtc::kVadConventional, vad_mode);
+}
+
+// Tests requiring manual verification (although they do have some value
+// without the manual verification):
+TEST_F(CodecTest, ManualExtendedISACApisBehaveAsExpected) {
+   strcpy(codec_instance_.plname, "isac");
+   codec_instance_.pltype = 103;
+   codec_instance_.plfreq = 16000;
+   codec_instance_.channels = 1;
+   // -1 here means "adaptive rate".
+   codec_instance_.rate = -1;
+   codec_instance_.pacsize = 480;
+
+   EXPECT_EQ(0, voe_codec_->SetSendCodec(channel_, codec_instance_));
+
+   EXPECT_NE(0, voe_codec_->SetISACInitTargetRate(channel_, 5000)) <<
+       "iSAC should reject rate 5000.";
+   EXPECT_NE(0, voe_codec_->SetISACInitTargetRate(channel_, 33000)) <<
+       "iSAC should reject rate 33000.";
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 32000));
+
+   TEST_LOG("Ensure that the sound is good (iSAC, target = 32kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000));
+   TEST_LOG("Ensure that the sound is good (iSAC, target = 10kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000, true));
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 10000, false));
+   EXPECT_EQ(0, voe_codec_->SetISACInitTargetRate(channel_, 0));
+   TEST_LOG("Ensure that the sound is good (iSAC, target = default)...\n");
+   Sleep(3000);
+
+   TEST_LOG("  Testing SetISACMaxPayloadSize:\n");
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_NE(0, voe_codec_->SetISACMaxPayloadSize(channel_, 50));
+   EXPECT_NE(0, voe_codec_->SetISACMaxPayloadSize(channel_, 650));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxPayloadSize(channel_, 120));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+   TEST_LOG("Ensure that the sound is good (iSAC, "
+            "max payload size = 100 bytes)...\n");
+   Sleep(3000);
+
+   TEST_LOG("  Testing SetISACMaxRate:\n");
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxPayloadSize(channel_, 400));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+   EXPECT_NE(0, voe_codec_->SetISACMaxRate(channel_, 31900));
+   EXPECT_NE(0, voe_codec_->SetISACMaxRate(channel_, 53500));
+   EXPECT_EQ(0, voe_codec_->SetISACMaxRate(channel_, 32000));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+   TEST_LOG("Ensure that the sound is good (iSAC, max rate = 32 kbps)...\n");
+   Sleep(3000);
+
+   EXPECT_EQ(0, voe_base_->StopSend(channel_));
+
+   // Restore "no limitation". No, no limit, we reach for the sky.
+   EXPECT_EQ(0, voe_codec_->SetISACMaxRate(channel_, 53400));
+   EXPECT_EQ(0, voe_base_->StartSend(channel_));
+}
+
+// TODO(xians, phoglund): Re-enable when issue 372 is resolved.
+TEST_F(CodecTest, DISABLED_ManualVerifySendCodecsForAllPacketSizes) {
+  for (int i = 0; i < voe_codec_->NumOfCodecs(); ++i) {
+    voe_codec_->GetCodec(i, codec_instance_);
+    if (IsNotViableSendCodec(codec_instance_.plname)) {
+      TEST_LOG("Skipping %s.\n", codec_instance_.plname);
+      continue;
+    }
+    EXPECT_NE(-1, codec_instance_.pltype) <<
+        "The codec database should suggest a payload type.";
+
+    // Test with default packet size:
+    TEST_LOG("%s (pt=%d): default packet size(%d), accepts sizes ",
+             codec_instance_.plname, codec_instance_.pltype,
+             codec_instance_.pacsize);
+    voe_codec_->SetSendCodec(channel_, codec_instance_);
+    Sleep(CODEC_TEST_TIME);
+
+    // Now test other reasonable packet sizes:
+    bool at_least_one_succeeded = false;
+    for (int packet_size = 80; packet_size < 1000; packet_size += 80) {
+      SetRateIfILBC(&codec_instance_, packet_size);
+      codec_instance_.pacsize = packet_size;
+
+      if (voe_codec_->SetSendCodec(channel_, codec_instance_) != -1) {
+        // Note that it's fine for SetSendCodec to fail - what packet sizes
+        // it accepts depends on the codec. It should accept one at minimum.
+        TEST_LOG("%d ", packet_size);
+        TEST_LOG_FLUSH;
+        at_least_one_succeeded = true;
+        Sleep(CODEC_TEST_TIME);
+      }
+    }
+    TEST_LOG("\n");
+    EXPECT_TRUE(at_least_one_succeeded);
+  }
+}
diff --git a/src/voice_engine/test/auto_test/standard/dtmf_test.cc b/src/voice_engine/test/auto_test/standard/dtmf_test.cc
new file mode 100644
index 0000000..6aa4625
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/dtmf_test.cc
@@ -0,0 +1,101 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voice_engine_defines.h"
+
+class DtmfTest : public AfterStreamingFixture {
+ protected:
+  void RunSixteenDtmfEvents(bool out_of_band) {
+    TEST_LOG("Sending telephone events:\n");
+    EXPECT_EQ(0, voe_dtmf_->SetDtmfFeedbackStatus(false));
+
+    for (int i = 0; i < 16; i++) {
+      TEST_LOG("%d ", i);
+      TEST_LOG_FLUSH;
+      EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(
+          channel_, i, out_of_band, 160, 10));
+      Sleep(500);
+    }
+    TEST_LOG("\n");
+  }
+};
+
+TEST_F(DtmfTest, DtmfFeedbackIsEnabledByDefaultButNotDirectFeedback) {
+  bool dtmf_feedback = false;
+  bool dtmf_direct_feedback = false;
+
+  EXPECT_EQ(0, voe_dtmf_->GetDtmfFeedbackStatus(dtmf_feedback,
+                                                dtmf_direct_feedback));
+
+  EXPECT_TRUE(dtmf_feedback);
+  EXPECT_FALSE(dtmf_direct_feedback);
+}
+
+TEST_F(DtmfTest, ManualSuccessfullySendsInBandTelephoneEvents) {
+  RunSixteenDtmfEvents(false);
+}
+
+TEST_F(DtmfTest, ManualSuccessfullySendsOutOfBandTelephoneEvents) {
+  RunSixteenDtmfEvents(true);
+}
+
+TEST_F(DtmfTest, TestTwoNonDtmfEvents) {
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 32, true));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 110, true));
+}
+
+#ifndef MAC_IPHONE
+TEST_F(DtmfTest, ManualCanDisableDtmfPlayoutExceptOnIphone) {
+  TEST_LOG("Disabling DTMF playout (no tone should be heard) \n");
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfPlayoutStatus(channel_, false));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 0, true));
+  Sleep(500);
+
+  TEST_LOG("Enabling DTMF playout (tone should be heard) \n");
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfPlayoutStatus(channel_, true));
+  EXPECT_EQ(0, voe_dtmf_->SendTelephoneEvent(channel_, 0, true));
+  Sleep(500);
+}
+#endif
+
+// This test modifies the DTMF payload type from the default 106 to 88
+// and then runs through 16 DTMF out.of-band events.
+TEST_F(DtmfTest, ManualCanChangeDtmfPayloadType) {
+  webrtc::CodecInst codec_instance;
+
+  TEST_LOG("Changing DTMF payload type.\n");
+
+  // Start by modifying the receiving side.
+  for (int i = 0; i < voe_codec_->NumOfCodecs(); i++) {
+    EXPECT_EQ(0, voe_codec_->GetCodec(i, codec_instance));
+    if (!_stricmp("telephone-event", codec_instance.plname)) {
+      codec_instance.pltype = 88;  // Use 88 instead of default 106.
+      EXPECT_EQ(0, voe_base_->StopSend(channel_));
+      EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+      EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+      EXPECT_EQ(0, voe_codec_->SetRecPayloadType(channel_, codec_instance));
+      EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+      EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+      EXPECT_EQ(0, voe_base_->StartSend(channel_));
+      break;
+    }
+  }
+
+  Sleep(500);
+
+  // Next, we must modify the sending side as well.
+  EXPECT_EQ(0, voe_dtmf_->SetSendTelephoneEventPayloadType(
+      channel_, codec_instance.pltype));
+
+  RunSixteenDtmfEvents(true);
+
+  EXPECT_EQ(0, voe_dtmf_->SetDtmfFeedbackStatus(true, false));
+}
diff --git a/src/voice_engine/test/auto_test/standard/encryption_test.cc b/src/voice_engine/test/auto_test/standard/encryption_test.cc
new file mode 100644
index 0000000..acb1a0d
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/encryption_test.cc
@@ -0,0 +1,74 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/include/voe_encryption.h"
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class BasicBitInverseEncryption : public webrtc::Encryption {
+  void encrypt(int channel_no, unsigned char* in_data,
+               unsigned char* out_data, int bytes_in, int* bytes_out);
+  void decrypt(int channel_no, unsigned char* in_data,
+               unsigned char* out_data, int bytes_in, int* bytes_out);
+  void encrypt_rtcp(int channel_no, unsigned char* in_data,
+                    unsigned char* out_data, int bytes_in, int* bytes_out);
+  void decrypt_rtcp(int channel_no, unsigned char* in_data,
+                    unsigned char* out_data, int bytes_in, int* bytes_out);
+};
+
+void BasicBitInverseEncryption::encrypt(int, unsigned char* in_data,
+                                        unsigned char* out_data,
+                                        int bytes_in, int* bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2;
+}
+
+void BasicBitInverseEncryption::decrypt(int, unsigned char* in_data,
+                                        unsigned char* out_data,
+                                        int bytes_in, int* bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in - 2;
+}
+
+void BasicBitInverseEncryption::encrypt_rtcp(int, unsigned char* in_data,
+                                             unsigned char* out_data,
+                                             int bytes_in, int* bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2;
+}
+
+void BasicBitInverseEncryption::decrypt_rtcp(int, unsigned char* in_data,
+                                             unsigned char* out_data,
+                                             int bytes_in, int* bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = ~in_data[i];
+  *bytes_out = bytes_in + 2;
+}
+
+
+class EncryptionTest : public AfterStreamingFixture {
+};
+
+TEST_F(EncryptionTest, ManualBasicCorrectExternalEncryptionHasNoEffectOnVoice) {
+  BasicBitInverseEncryption basic_encryption;
+
+  voe_encrypt_->RegisterExternalEncryption(channel_, basic_encryption);
+
+  TEST_LOG("Registered external encryption, should still hear good audio.");
+  Sleep(3000);
+
+  voe_encrypt_->DeRegisterExternalEncryption(channel_);
+}
diff --git a/src/voice_engine/test/auto_test/standard/external_media_test.cc b/src/voice_engine/test/auto_test/standard/external_media_test.cc
new file mode 100644
index 0000000..5c641ba
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/external_media_test.cc
@@ -0,0 +1,84 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/include/voe_external_media.h"
+#include "voice_engine/test/auto_test/fakes/fake_media_process.h"
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class ExternalMediaTest : public AfterStreamingFixture {
+ protected:
+  void TestRegisterExternalMedia(int channel, webrtc::ProcessingTypes type) {
+    FakeMediaProcess fake_media_process;
+    EXPECT_EQ(0, voe_xmedia_->RegisterExternalMediaProcessing(
+        channel, type, fake_media_process));
+    Sleep(2000);
+
+    TEST_LOG("Back to normal.\n");
+    EXPECT_EQ(0, voe_xmedia_->DeRegisterExternalMediaProcessing(
+        channel, type));
+    Sleep(2000);
+  }
+};
+
+TEST_F(ExternalMediaTest, ManualCanRecordAndPlaybackUsingExternalPlayout) {
+  SwitchToManualMicrophone();
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_xmedia_->SetExternalPlayoutStatus(true));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  TEST_LOG("Recording data for 2 seconds starting now: please speak.\n");
+  int16_t recording[32000];
+  for (int i = 0; i < 200; i++) {
+    int sample_length = 0;
+    EXPECT_EQ(0, voe_xmedia_->ExternalPlayoutGetData(
+        &(recording[i * 160]), 16000, 100, sample_length));
+    EXPECT_EQ(160, sample_length);
+    Sleep(10);
+  }
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_xmedia_->SetExternalPlayoutStatus(false));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_xmedia_->SetExternalRecordingStatus(true));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  TEST_LOG("Playing back recording, you should hear what you said earlier.\n");
+  for (int i = 0; i < 200; i++) {
+    EXPECT_EQ(0, voe_xmedia_->ExternalRecordingInsertData(
+        &(recording[i * 160]), 160, 16000, 20));
+    Sleep(10);
+  }
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_xmedia_->SetExternalRecordingStatus(false));
+}
+
+TEST_F(ExternalMediaTest,
+    ManualRegisterExternalMediaProcessingOnAllChannelsAffectsPlayout) {
+  TEST_LOG("Enabling external media processing: audio should be affected.\n");
+  TestRegisterExternalMedia(-1, webrtc::kPlaybackAllChannelsMixed);
+}
+
+TEST_F(ExternalMediaTest,
+    ManualRegisterExternalMediaOnSingleChannelAffectsPlayout) {
+  TEST_LOG("Enabling external media processing: audio should be affected.\n");
+  TestRegisterExternalMedia(channel_, webrtc::kRecordingPerChannel);
+}
+
+TEST_F(ExternalMediaTest,
+    ManualRegisterExternalMediaOnAllChannelsMixedAffectsRecording) {
+  SwitchToManualMicrophone();
+  TEST_LOG("Speak and verify your voice is distorted.\n");
+  TestRegisterExternalMedia(-1, webrtc::kRecordingAllChannelsMixed);
+}
diff --git a/src/voice_engine/test/auto_test/standard/file_before_streaming_test.cc b/src/voice_engine/test/auto_test/standard/file_before_streaming_test.cc
new file mode 100644
index 0000000..5a10d72
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/file_before_streaming_test.cc
@@ -0,0 +1,132 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+#include "test/testsupport/fileutils.h"
+
+namespace {
+
+const int kSampleRateHz = 16000;
+const int kTestDurationMs = 1000;
+const int kSkipOutputMs = 50;
+const int16_t kInputValue = 15000;
+const int16_t kSilenceValue = 0;
+
+}  // namespace
+
+class FileBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  FileBeforeStreamingTest()
+      : input_filename_(webrtc::test::OutputPath() + "file_test_input.pcm"),
+        output_filename_(webrtc::test::OutputPath() + "file_test_output.pcm") {
+  }
+
+  void SetUp() {
+    channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(channel_);
+  }
+
+  // TODO(andrew): consolidate below methods in a shared place?
+
+  // Generate input file with constant values as |kInputValue|. The file
+  // will be one second longer than the duration of the test.
+  void GenerateInputFile() {
+    FILE* input_file = fopen(input_filename_.c_str(), "wb");
+    ASSERT_TRUE(input_file != NULL);
+    for (int i = 0; i < kSampleRateHz / 1000 * (kTestDurationMs + 1000); i++) {
+      ASSERT_EQ(1u, fwrite(&kInputValue, sizeof(kInputValue), 1, input_file));
+    }
+    ASSERT_EQ(0, fclose(input_file));
+  }
+
+  void RecordOutput() {
+    // Start recording the mixed output for |kTestDurationMs| long.
+    EXPECT_EQ(0, voe_file_->StartRecordingPlayout(-1,
+        output_filename_.c_str()));
+    Sleep(kTestDurationMs);
+    EXPECT_EQ(0, voe_file_->StopRecordingPlayout(-1));
+  }
+
+  void VerifyOutput(int16_t target_value) {
+    FILE* output_file = fopen(output_filename_.c_str(), "rb");
+    ASSERT_TRUE(output_file != NULL);
+    int16_t output_value = 0;
+    int samples_read = 0;
+
+    // Skip the first segment to avoid initialization and ramping-in effects.
+    EXPECT_EQ(0, fseek(output_file, sizeof(output_value) *
+                       kSampleRateHz / 1000 * kSkipOutputMs, SEEK_SET));
+    while (fread(&output_value, sizeof(output_value), 1, output_file) == 1) {
+      samples_read++;
+      EXPECT_EQ(output_value, target_value);
+    }
+
+    // Ensure that a reasonable amount was recorded. We use a loose
+    // tolerance to avoid flaky bot failures.
+    ASSERT_GE((samples_read * 1000.0) / kSampleRateHz, 0.4 * kTestDurationMs);
+
+    // Ensure we read the entire file.
+    ASSERT_NE(0, feof(output_file));
+    ASSERT_EQ(0, fclose(output_file));
+  }
+
+void VerifyEmptyOutput() {
+  FILE* output_file = fopen(output_filename_.c_str(), "rb");
+  ASSERT_TRUE(output_file != NULL);
+  ASSERT_EQ(0, fseek(output_file, 0, SEEK_END));
+  EXPECT_EQ(0, ftell(output_file));
+  ASSERT_EQ(0, fclose(output_file));
+}
+
+  int channel_;
+  const std::string input_filename_;
+  const std::string output_filename_;
+};
+
+// This test case is to ensure that StartPlayingFileLocally() and
+// StartPlayout() can be called in any order.
+// A DC signal is used as input. And the output of mixer is supposed to be:
+// 1. the same DC signal if file is played out,
+// 2. total silence if file is not played out,
+// 3. no output if playout is not started.
+TEST_F(FileBeforeStreamingTest, TestStartPlayingFileLocallyWithStartPlayout) {
+  GenerateInputFile();
+
+  TEST_LOG("Playout is not started. File will not be played out.\n");
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, input_filename_.c_str(), true));
+  EXPECT_EQ(1, voe_file_->IsPlayingFileLocally(channel_));
+  RecordOutput();
+  VerifyEmptyOutput();
+
+  TEST_LOG("Playout is now started. File will be played out.\n");
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  RecordOutput();
+  VerifyOutput(kInputValue);
+
+  TEST_LOG("Stop playing file. Only silence will be played out.\n");
+  EXPECT_EQ(0, voe_file_->StopPlayingFileLocally(channel_));
+  EXPECT_EQ(0, voe_file_->IsPlayingFileLocally(channel_));
+  RecordOutput();
+  VerifyOutput(kSilenceValue);
+
+  TEST_LOG("Start playing file again. File will be played out.\n");
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, input_filename_.c_str(), true));
+  EXPECT_EQ(1, voe_file_->IsPlayingFileLocally(channel_));
+  RecordOutput();
+  VerifyOutput(kInputValue);
+
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_file_->StopPlayingFileLocally(channel_));
+}
diff --git a/src/voice_engine/test/auto_test/standard/file_test.cc b/src/voice_engine/test/auto_test/standard/file_test.cc
new file mode 100644
index 0000000..63b1600
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/file_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "testsupport/fileutils.h"
+
+class FileTest : public AfterStreamingFixture {
+ protected:
+  // Creates the string åäö.pcm.
+  std::string CreateTrickyFilenameInUtf8() {
+    char filename[16] = { (char)0xc3, (char)0xa5,
+                          (char)0xc3, (char)0xa4,
+                          (char)0xc3, (char)0xb6,
+                          static_cast<char>(0) };
+    return std::string(filename) + ".pcm";
+  }
+};
+
+TEST_F(FileTest, ManualRecordToFileForThreeSecondsAndPlayback) {
+  SwitchToManualMicrophone();
+
+  std::string recording_filename =
+      webrtc::test::OutputPath() + CreateTrickyFilenameInUtf8();
+
+  TEST_LOG("Recording to %s for 3 seconds.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartRecordingMicrophone(recording_filename.c_str()));
+  Sleep(3000);
+  EXPECT_EQ(0, voe_file_->StopRecordingMicrophone());
+
+  TEST_LOG("Playing back %s.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, recording_filename.c_str()));
+
+  // Play the file to the user and ensure the is-playing-locally
+  // and scaling methods also work. The clip is 3 seconds long.
+  Sleep(250);
+  EXPECT_EQ(1, voe_file_->IsPlayingFileLocally(channel_));
+  Sleep(1500);
+  TEST_LOG("Decreasing level by 50%%.\n");
+  EXPECT_EQ(0, voe_file_->ScaleLocalFilePlayout(channel_, 0.5f));
+  Sleep(1500);
+  EXPECT_EQ(0, voe_file_->IsPlayingFileLocally(channel_));
+}
+
+TEST_F(FileTest, ManualRecordPlayoutToWavFileForThreeSecondsAndPlayback) {
+  webrtc::CodecInst send_codec;
+  voe_codec_->GetSendCodec(channel_, send_codec);
+
+  std::string recording_filename =
+      webrtc::test::OutputPath() + "playout.wav";
+
+  TEST_LOG("Recording playout to %s.\n", recording_filename.c_str());
+  EXPECT_EQ(0, voe_file_->StartRecordingPlayout(
+      channel_, recording_filename.c_str(), &send_codec));
+  Sleep(3000);
+  EXPECT_EQ(0, voe_file_->StopRecordingPlayout(channel_));
+
+  TEST_LOG("Playing back the recording in looping mode.\n");
+  EXPECT_EQ(0, voe_file_->StartPlayingFileAsMicrophone(
+      channel_, recording_filename.c_str(), true, false,
+      webrtc::kFileFormatWavFile));
+
+  Sleep(2000);
+  EXPECT_EQ(1, voe_file_->IsPlayingFileAsMicrophone(channel_));
+  Sleep(2000);
+  // We should still be playing since we're looping.
+  EXPECT_EQ(1, voe_file_->IsPlayingFileAsMicrophone(channel_));
+
+  // Try scaling as well.
+  TEST_LOG("Decreasing level by 50%%.\n");
+  EXPECT_EQ(0, voe_file_->ScaleFileAsMicrophonePlayout(channel_, 0.5f));
+  Sleep(1000);
+
+  EXPECT_EQ(0, voe_file_->StopPlayingFileAsMicrophone(channel_));
+}
diff --git a/src/voice_engine/test/auto_test/standard/hardware_before_initializing_test.cc b/src/voice_engine/test/auto_test/standard/hardware_before_initializing_test.cc
new file mode 100644
index 0000000..540614e
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/hardware_before_initializing_test.cc
@@ -0,0 +1,27 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "common_types.h"
+#include "before_initialization_fixture.h"
+
+using namespace webrtc;
+
+class HardwareBeforeInitializingTest : public BeforeInitializationFixture {
+};
+
+TEST_F(HardwareBeforeInitializingTest,
+       SetAudioDeviceLayerAcceptsPlatformDefaultBeforeInitializing) {
+  AudioLayers wanted_layer = kAudioPlatformDefault;
+  AudioLayers given_layer;
+  EXPECT_EQ(0, voe_hardware_->SetAudioDeviceLayer(wanted_layer));
+  EXPECT_EQ(0, voe_hardware_->GetAudioDeviceLayer(given_layer));
+  EXPECT_EQ(wanted_layer, given_layer) <<
+      "These should be the same before initializing.";
+}
diff --git a/src/voice_engine/test/auto_test/standard/hardware_before_streaming_test.cc b/src/voice_engine/test/auto_test/standard/hardware_before_streaming_test.cc
new file mode 100644
index 0000000..6e56347
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/hardware_before_streaming_test.cc
@@ -0,0 +1,166 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cstring>
+
+#include "after_initialization_fixture.h"
+
+using namespace webrtc;
+
+static const char* kNoDevicesErrorMessage =
+    "Either you have no recording / playout device "
+    "on your system, or the method failed.";
+
+class HardwareBeforeStreamingTest : public AfterInitializationFixture {
+};
+
+// Tests that apply to both mobile and desktop:
+
+TEST_F(HardwareBeforeStreamingTest,
+       SetAudioDeviceLayerFailsSinceTheVoiceEngineHasBeenInitialized) {
+  EXPECT_NE(0, voe_hardware_->SetAudioDeviceLayer(kAudioPlatformDefault));
+  EXPECT_EQ(VE_ALREADY_INITED, voe_base_->LastError());
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       GetCPULoadSucceedsOnWindowsButNotOtherPlatforms) {
+  int load_percent;
+#if defined(_WIN32)
+  EXPECT_EQ(0, voe_hardware_->GetCPULoad(load_percent));
+#else
+  EXPECT_NE(0, voe_hardware_->GetCPULoad(load_percent)) <<
+      "Should fail on non-Windows platforms.";
+#endif
+}
+
+// Tests that only apply to mobile:
+
+#ifdef MAC_IPHONE
+TEST_F(HardwareBeforeStreamingTest, ResetsAudioDeviceOnIphone) {
+  EXPECT_EQ(0, voe_hardware_->ResetAudioDevice());
+}
+#endif
+
+// Tests that only apply to desktop:
+#if !defined(MAC_IPHONE) & !defined(WEBRTC_ANDROID)
+
+TEST_F(HardwareBeforeStreamingTest, GetSystemCpuLoadSucceeds) {
+#ifdef _WIN32
+  // This method needs some warm-up time on Windows. We sleep a good amount
+  // of time instead of retrying to make the test simpler.
+  Sleep(2000);
+#endif
+
+  int load_percent;
+  EXPECT_EQ(0, voe_hardware_->GetSystemCPULoad(load_percent));
+}
+
+TEST_F(HardwareBeforeStreamingTest, GetPlayoutDeviceStatusReturnsTrue) {
+  bool play_available = false;
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceStatus(play_available));
+  ASSERT_TRUE(play_available) <<
+      "Ensures that the method works and that hardware is in the right state.";
+}
+
+TEST_F(HardwareBeforeStreamingTest, GetRecordingDeviceStatusReturnsTrue) {
+  bool recording_available = false;
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceStatus(recording_available));
+  EXPECT_TRUE(recording_available) <<
+      "Ensures that the method works and that hardware is in the right state.";
+}
+
+  // Win, Mac and Linux sound device tests.
+TEST_F(HardwareBeforeStreamingTest,
+       GetRecordingDeviceNameRetrievesDeviceNames) {
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+#ifdef _WIN32
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      -1, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+  device_name[0] = '\0';
+
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      -1, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+
+#else
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      0, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+  device_name[0] = '\0';
+
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      0, device_name, guid_name));
+  EXPECT_GT(strlen(device_name), 0u) << kNoDevicesErrorMessage;
+#endif  // !WIN32
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       AllEnumeratedRecordingDevicesCanBeSetAsRecordingDevice) {
+  // Check recording side.
+  // Extended Win32 enumeration tests: unique GUID outputs on Vista and up:
+  // Win XP and below : device_name is copied to guid_name.
+  // Win Vista and up : device_name is the friendly name and GUID is a unique
+  //                    identifier.
+  // Other            : guid_name is left unchanged.
+  int num_of_recording_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(
+      num_of_recording_devices));
+  EXPECT_GT(num_of_recording_devices, 0) << kNoDevicesErrorMessage;
+
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+  for (int i = 0; i < num_of_recording_devices; i++) {
+    EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+        i, device_name, guid_name));
+    EXPECT_GT(strlen(device_name), 0u) <<
+        "There should be no empty device names "
+        "among the ones the system gives us.";
+    EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(i));
+  }
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       AllEnumeratedPlayoutDevicesCanBeSetAsPlayoutDevice) {
+  // Check playout side (see recording side test for more info on GUIDs).
+  int num_of_playout_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(
+      num_of_playout_devices));
+  EXPECT_GT(num_of_playout_devices, 0) << kNoDevicesErrorMessage;
+
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+
+  for (int i = 0; i < num_of_playout_devices; ++i) {
+    EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+        i, device_name, guid_name));
+    EXPECT_GT(strlen(device_name), 0u) <<
+        "There should be no empty device names "
+        "among the ones the system gives us.";
+    EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(i));
+  }
+}
+
+TEST_F(HardwareBeforeStreamingTest,
+       SetDeviceWithMagicalArgumentsSetsDefaultSoundDevices) {
+#ifdef _WIN32
+  // -1 means "default device" on Windows.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+#else
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(0));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(0));
+#endif
+}
+
+#endif // !defined(MAC_IPHONE) & !defined(WEBRTC_ANDROID)
diff --git a/src/voice_engine/test/auto_test/standard/hardware_test.cc b/src/voice_engine/test/auto_test/standard/hardware_test.cc
new file mode 100644
index 0000000..41145e1
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/hardware_test.cc
@@ -0,0 +1,168 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "audio_device.h"
+#include "voe_test_defines.h"
+
+class HardwareTest : public AfterStreamingFixture {
+};
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+TEST_F(HardwareTest, AbleToQueryForDevices) {
+  int num_recording_devices = 0;
+  int num_playout_devices = 0;
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(num_recording_devices));
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(num_playout_devices));
+
+  ASSERT_GT(num_recording_devices, 0) <<
+      "There seem to be no recording devices on your system, "
+      "and this test really doesn't make sense then.";
+  ASSERT_GT(num_playout_devices, 0) <<
+      "There seem to be no playout devices on your system, "
+      "and this test really doesn't make sense then.";
+
+  // Recording devices are handled a bit differently on Windows - we can
+  // just tell it to set the 'default' communication device there.
+#ifdef _WIN32
+  // Should also work while already recording.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(
+      webrtc::AudioDeviceModule::kDefaultCommunicationDevice));
+  // Should also work while already playing.
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(
+      webrtc::AudioDeviceModule::kDefaultCommunicationDevice));
+#else
+  // For other platforms, just use the first device encountered.
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(0));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(0));
+#endif
+
+  // It's hard to know what names this will return (it's system-dependent),
+  // so just check that it's possible to do it.
+  char device_name[128] = {0};
+  char guid_name[128] = {0};
+  EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(
+      0, device_name, guid_name));
+  EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(
+      0, device_name, guid_name));
+}
+#endif
+
+#ifdef _WIN32
+TEST_F(HardwareTest, GetCpuLoadWorksOnWindows) {
+  int load = -1;
+  EXPECT_EQ(0, voe_hardware_->GetCPULoad(load));
+  EXPECT_GE(0, load);
+  TEST_LOG("Voice engine CPU load = %d%%\n", load);
+}
+#else
+TEST_F(HardwareTest, GetCpuLoadReturnsErrorOnNonWindowsPlatform) {
+  int load = -1;
+  EXPECT_EQ(-1, voe_hardware_->GetCPULoad(load));
+}
+#endif
+
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+TEST_F(HardwareTest, GetSystemCpuLoadWorksExceptOnMacAndAndroid) {
+#ifdef _WIN32
+  // This method needs some warm-up time on Windows. We sleep a good amount
+  // of time instead of retrying to make the test simpler.
+  Sleep(2000);
+#endif
+  int load = -1;
+  EXPECT_EQ(0, voe_hardware_->GetSystemCPULoad(load));
+  EXPECT_GE(load, 0);
+  TEST_LOG("System CPU load = %d%%\n", load);
+}
+#endif
+
+TEST_F(HardwareTest, BuiltInWasapiAECWorksForAudioWindowsCoreAudioLayer) {
+#ifdef MAC_IPHONE
+  // Ensure the sound device is reset on iPhone.
+  EXPECT_EQ(0, voe_hardware_->ResetAudioDevice());
+  Sleep(2000);
+#endif
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+
+  webrtc::AudioLayers given_layer;
+  EXPECT_EQ(0, voe_hardware_->GetAudioDeviceLayer(given_layer));
+  if (given_layer != webrtc::kAudioWindowsCore) {
+    // Not Windows Audio Core - then it shouldn't work.
+    EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(true));
+    EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(false));
+    return;
+  }
+
+  TEST_LOG("Testing AEC for Audio Windows Core.\n");
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  // Can't be set after StartSend().
+  EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(true));
+  EXPECT_EQ(-1, voe_hardware_->EnableBuiltInAEC(false));
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_hardware_->EnableBuiltInAEC(true));
+
+  // Can't be called before StartPlayout().
+  EXPECT_EQ(-1, voe_base_->StartSend(channel_));
+
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  TEST_LOG("Processing capture data with built-in AEC...\n");
+  Sleep(2000);
+
+  TEST_LOG("Looping through capture devices...\n");
+  int num_devs = 0;
+  char dev_name[128] = { 0 };
+  char guid_name[128] = { 0 };
+  EXPECT_EQ(0, voe_hardware_->GetNumOfRecordingDevices(num_devs));
+  for (int dev_index = 0; dev_index < num_devs; ++dev_index) {
+    EXPECT_EQ(0, voe_hardware_->GetRecordingDeviceName(dev_index,
+                                                       dev_name,
+                                                       guid_name));
+    TEST_LOG("%d: %s\n", dev_index, dev_name);
+    EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(dev_index));
+    Sleep(2000);
+  }
+
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+
+  TEST_LOG("Looping through render devices, restarting for each "
+      "device...\n");
+  EXPECT_EQ(0, voe_hardware_->GetNumOfPlayoutDevices(num_devs));
+  for (int dev_index = 0; dev_index < num_devs; ++dev_index) {
+    EXPECT_EQ(0, voe_hardware_->GetPlayoutDeviceName(dev_index,
+                                                     dev_name,
+                                                     guid_name));
+    TEST_LOG("%d: %s\n", dev_index, dev_name);
+    EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(dev_index));
+    Sleep(2000);
+  }
+
+  TEST_LOG("Using default devices...\n");
+  EXPECT_EQ(0, voe_hardware_->SetRecordingDevice(-1));
+  EXPECT_EQ(0, voe_hardware_->SetPlayoutDevice(-1));
+  Sleep(2000);
+
+  // Possible, but not recommended before StopSend().
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  Sleep(2000);  // To verify that there is no garbage audio.
+
+  TEST_LOG("Disabling built-in AEC.\n");
+  EXPECT_EQ(0, voe_hardware_->EnableBuiltInAEC(false));
+
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+}
diff --git a/src/voice_engine/test/auto_test/standard/manual_hold_test.cc b/src/voice_engine/test/auto_test/standard/manual_hold_test.cc
new file mode 100644
index 0000000..68f28b4
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/manual_hold_test.cc
@@ -0,0 +1,43 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+// Note: This class includes sleeps and requires manual verification.
+class ManualHoldTest : public AfterStreamingFixture {
+};
+
+TEST_F(ManualHoldTest, SetOnHoldStatusBlockAudio) {
+  TEST_LOG("Channel not on hold => should hear audio.\n");
+  Sleep(2000);
+  TEST_LOG("Put channel on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, true));
+  Sleep(2000);
+  TEST_LOG("Remove on hold => should hear audio again.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, false));
+  Sleep(2000);
+  TEST_LOG("Put sending on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(channel_, true, webrtc::kHoldSendOnly));
+  Sleep(2000);
+}
+
+TEST_F(ManualHoldTest, SetOnHoldStatusBlocksLocalFileAudio) {
+  TEST_LOG("Start playing a file locally => "
+      "you should now hear this file being played out.\n");
+  voe_file_->StopPlayingFileAsMicrophone(channel_);
+  EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(
+      channel_, resource_manager_.long_audio_file_path().c_str(), true));
+  Sleep(2000);
+
+  TEST_LOG("Put playing on hold => should *not* hear audio.\n");
+  EXPECT_EQ(0, voe_base_->SetOnHoldStatus(
+      channel_, true, webrtc::kHoldPlayOnly));
+  Sleep(2000);
+}
diff --git a/src/voice_engine/test/auto_test/standard/mixing_test.cc b/src/voice_engine/test/auto_test/standard/mixing_test.cc
new file mode 100644
index 0000000..565d8b6
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/mixing_test.cc
@@ -0,0 +1,243 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+
+#include <string>
+
+#include "after_initialization_fixture.h"
+#include "test/testsupport/fileutils.h"
+
+namespace webrtc {
+namespace {
+
+const int16_t kLimiterHeadroom = 29204;  // == -1 dbFS
+const int16_t kInt16Max = 0x7fff;
+const int kSampleRateHz = 16000;
+const int kTestDurationMs = 3000;
+const int kSkipOutputMs = 500;
+
+}  // namespace
+
+class MixingTest : public AfterInitializationFixture {
+ protected:
+  MixingTest()
+    : input_filename_(test::OutputPath() + "mixing_test_input.pcm"),
+      output_filename_(test::OutputPath() + "mixing_test_output.pcm") {
+  }
+
+  // Creates and mixes |num_remote_streams| which play a file "as microphone"
+  // with |num_local_streams| which play a file "locally", using a constant
+  // amplitude of |input_value|. The local streams manifest as "anonymous"
+  // mixing participants, meaning they will be mixed regardless of the number
+  // of participants. (A stream is a VoiceEngine "channel").
+  //
+  // The mixed output is verified to always fall between |max_output_value| and
+  // |min_output_value|, after a startup phase.
+  //
+  // |num_remote_streams_using_mono| of the remote streams use mono, with the
+  // remainder using stereo.
+  void RunMixingTest(int num_remote_streams,
+                     int num_local_streams,
+                     int num_remote_streams_using_mono,
+                     int16_t input_value,
+                     int16_t max_output_value,
+                     int16_t min_output_value) {
+    ASSERT_LE(num_remote_streams_using_mono, num_remote_streams);
+
+    GenerateInputFile(input_value);
+
+    std::vector<int> local_streams(num_local_streams);
+    for (size_t i = 0; i < local_streams.size(); ++i) {
+      local_streams[i] = voe_base_->CreateChannel();
+      EXPECT_NE(-1, local_streams[i]);
+    }
+    StartLocalStreams(local_streams);
+    TEST_LOG("Playing %d local streams.\n", num_local_streams);
+
+    std::vector<int> remote_streams(num_remote_streams);
+    for (size_t i = 0; i < remote_streams.size(); ++i) {
+      remote_streams[i] = voe_base_->CreateChannel();
+      EXPECT_NE(-1, remote_streams[i]);
+    }
+    StartRemoteStreams(remote_streams, num_remote_streams_using_mono);
+    TEST_LOG("Playing %d remote streams.\n", num_remote_streams);
+
+    // Start recording the mixed output and wait.
+    EXPECT_EQ(0, voe_file_->StartRecordingPlayout(-1 /* record meeting */,
+        output_filename_.c_str()));
+    Sleep(kTestDurationMs);
+    EXPECT_EQ(0, voe_file_->StopRecordingPlayout(-1));
+
+    StopLocalStreams(local_streams);
+    StopRemoteStreams(remote_streams);
+
+    VerifyMixedOutput(max_output_value, min_output_value);
+  }
+
+ private:
+  // Generate input file with constant values equal to |input_value|. The file
+  // will be one second longer than the duration of the test.
+  void GenerateInputFile(int16_t input_value) {
+    FILE* input_file = fopen(input_filename_.c_str(), "wb");
+    ASSERT_TRUE(input_file != NULL);
+    for (int i = 0; i < kSampleRateHz / 1000 * (kTestDurationMs + 1000); i++) {
+      ASSERT_EQ(1u, fwrite(&input_value, sizeof(input_value), 1, input_file));
+    }
+    ASSERT_EQ(0, fclose(input_file));
+  }
+
+  void VerifyMixedOutput(int16_t max_output_value, int16_t min_output_value) {
+    // Verify the mixed output.
+    FILE* output_file = fopen(output_filename_.c_str(), "rb");
+    ASSERT_TRUE(output_file != NULL);
+    int16_t output_value = 0;
+    // Skip the first segment to avoid initialization and ramping-in effects.
+    EXPECT_EQ(0, fseek(output_file, sizeof(output_value) *
+                       kSampleRateHz / 1000 * kSkipOutputMs, SEEK_SET));
+    int samples_read = 0;
+    while (fread(&output_value, sizeof(output_value), 1, output_file) == 1) {
+      samples_read++;
+      std::ostringstream trace_stream;
+      trace_stream << samples_read << " samples read";
+      SCOPED_TRACE(trace_stream.str());
+      EXPECT_LE(output_value, max_output_value);
+      EXPECT_GE(output_value, min_output_value);
+    }
+    // Ensure the recording length is close to the duration of the test.
+    ASSERT_GE((samples_read * 1000.0) / kSampleRateHz,
+              0.9 * (kTestDurationMs - kSkipOutputMs));
+    // Ensure we read the entire file.
+    ASSERT_NE(0, feof(output_file));
+    ASSERT_EQ(0, fclose(output_file));
+  }
+
+  // Start up local streams ("anonymous" participants).
+  void StartLocalStreams(const std::vector<int>& streams) {
+    for (size_t i = 0; i < streams.size(); ++i) {
+      EXPECT_EQ(0, voe_base_->StartPlayout(streams[i]));
+      EXPECT_EQ(0, voe_file_->StartPlayingFileLocally(streams[i],
+          input_filename_.c_str(), true));
+    }
+  }
+
+  void StopLocalStreams(const std::vector<int>& streams) {
+    for (size_t i = 0; i < streams.size(); ++i) {
+      EXPECT_EQ(0, voe_base_->StopPlayout(streams[i]));
+      EXPECT_EQ(0, voe_base_->DeleteChannel(streams[i]));
+    }
+  }
+
+  // Start up remote streams ("normal" participants).
+  void StartRemoteStreams(const std::vector<int>& streams,
+                          int num_remote_streams_using_mono) {
+    // Use L16 at 16kHz to minimize distortion (file recording is 16kHz and
+    // resampling will cause distortion).
+    CodecInst codec_inst;
+    strcpy(codec_inst.plname, "L16");
+    codec_inst.channels = 1;
+    codec_inst.plfreq = kSampleRateHz;
+    codec_inst.pltype = 105;
+    codec_inst.pacsize = codec_inst.plfreq / 100;
+    codec_inst.rate = codec_inst.plfreq * sizeof(int16_t) * 8;  // 8 bits/byte.
+
+    for (int i = 0; i < num_remote_streams_using_mono; ++i) {
+      StartRemoteStream(streams[i], codec_inst, 1234 + 2 * i);
+    }
+
+    // The remainder of the streams will use stereo.
+    codec_inst.channels = 2;
+    codec_inst.pltype++;
+    for (size_t i = num_remote_streams_using_mono; i < streams.size(); ++i) {
+      StartRemoteStream(streams[i], codec_inst, 1234 + 2 * i);
+    }
+  }
+
+  // Start up a single remote stream.
+  void StartRemoteStream(int stream, const CodecInst& codec_inst, int port) {
+    EXPECT_EQ(0, voe_codec_->SetRecPayloadType(stream, codec_inst));
+    EXPECT_EQ(0, voe_base_->SetLocalReceiver(stream, port));
+    EXPECT_EQ(0, voe_base_->SetSendDestination(stream, port, "127.0.0.1"));
+    EXPECT_EQ(0, voe_base_->StartReceive(stream));
+    EXPECT_EQ(0, voe_base_->StartPlayout(stream));
+    EXPECT_EQ(0, voe_codec_->SetSendCodec(stream, codec_inst));
+    EXPECT_EQ(0, voe_base_->StartSend(stream));
+    EXPECT_EQ(0, voe_file_->StartPlayingFileAsMicrophone(stream,
+        input_filename_.c_str(), true));
+  }
+
+  void StopRemoteStreams(const std::vector<int>& streams) {
+    for (size_t i = 0; i < streams.size(); ++i) {
+      EXPECT_EQ(0, voe_base_->StopSend(streams[i]));
+      EXPECT_EQ(0, voe_base_->StopPlayout(streams[i]));
+      EXPECT_EQ(0, voe_base_->StopReceive(streams[i]));
+      EXPECT_EQ(0, voe_base_->DeleteChannel(streams[i]));
+    }
+  }
+
+  const std::string input_filename_;
+  const std::string output_filename_;
+};
+
+// These tests assume a maximum of three mixed participants. We typically allow
+// a +/- 10% range around the expected output level to account for distortion
+// from coding and processing in the loopback chain.
+TEST_F(MixingTest, FourChannelsWithOnlyThreeMixed) {
+  const int16_t kInputValue = 1000;
+  const int16_t kExpectedOutput = kInputValue * 3;
+  RunMixingTest(4, 0, 4, kInputValue, 1.1 * kExpectedOutput,
+                0.9 * kExpectedOutput);
+}
+
+// Ensure the mixing saturation protection is working. We can do this because
+// the mixing limiter is given some headroom, so the expected output is less
+// than full scale.
+TEST_F(MixingTest, VerifySaturationProtection) {
+  const int16_t kInputValue = 20000;
+  const int16_t kExpectedOutput = kLimiterHeadroom;
+  // If this isn't satisfied, we're not testing anything.
+  ASSERT_GT(kInputValue * 3, kInt16Max);
+  ASSERT_LT(1.1 * kExpectedOutput, kInt16Max);
+  RunMixingTest(3, 0, 3, kInputValue, 1.1 * kExpectedOutput,
+               0.9 * kExpectedOutput);
+}
+
+TEST_F(MixingTest, SaturationProtectionHasNoEffectOnOneChannel) {
+  const int16_t kInputValue = kInt16Max;
+  const int16_t kExpectedOutput = kInt16Max;
+  // If this isn't satisfied, we're not testing anything.
+  ASSERT_GT(0.95 * kExpectedOutput, kLimiterHeadroom);
+  // Tighter constraints are required here to properly test this.
+  RunMixingTest(1, 0, 1, kInputValue, kExpectedOutput,
+                0.95 * kExpectedOutput);
+}
+
+TEST_F(MixingTest, VerifyAnonymousAndNormalParticipantMixing) {
+  const int16_t kInputValue = 1000;
+  const int16_t kExpectedOutput = kInputValue * 2;
+  RunMixingTest(1, 1, 1, kInputValue, 1.1 * kExpectedOutput,
+                0.9 * kExpectedOutput);
+}
+
+TEST_F(MixingTest, AnonymousParticipantsAreAlwaysMixed) {
+  const int16_t kInputValue = 1000;
+  const int16_t kExpectedOutput = kInputValue * 4;
+  RunMixingTest(3, 1, 3, kInputValue, 1.1 * kExpectedOutput,
+                0.9 * kExpectedOutput);
+}
+
+TEST_F(MixingTest, VerifyStereoAndMonoMixing) {
+  const int16_t kInputValue = 1000;
+  const int16_t kExpectedOutput = kInputValue * 2;
+  RunMixingTest(2, 0, 1, kInputValue, 1.1 * kExpectedOutput,
+                0.9 * kExpectedOutput);
+}
+
+}  // namespace webrtc
diff --git a/src/voice_engine/test/auto_test/standard/neteq_stats_test.cc b/src/voice_engine/test/auto_test/standard/neteq_stats_test.cc
new file mode 100644
index 0000000..0cb4158
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/neteq_stats_test.cc
@@ -0,0 +1,52 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+class NetEQStatsTest : public AfterStreamingFixture {
+};
+
+TEST_F(NetEQStatsTest, ManualPrintStatisticsAfterRunningAWhile) {
+  Sleep(5000);
+
+  webrtc::NetworkStatistics network_statistics;
+
+  EXPECT_EQ(0, voe_neteq_stats_->GetNetworkStatistics(
+      channel_, network_statistics));
+
+  TEST_LOG("Inspect these statistics and ensure they make sense.\n");
+
+  TEST_LOG("    currentAccelerateRate     = %hu \n",
+      network_statistics.currentAccelerateRate);
+  TEST_LOG("    currentBufferSize         = %hu \n",
+      network_statistics.currentBufferSize);
+  TEST_LOG("    currentDiscardRate        = %hu \n",
+      network_statistics.currentDiscardRate);
+  TEST_LOG("    currentExpandRate         = %hu \n",
+      network_statistics.currentExpandRate);
+  TEST_LOG("    currentPacketLossRate     = %hu \n",
+      network_statistics.currentPacketLossRate);
+  TEST_LOG("    currentPreemptiveRate     = %hu \n",
+      network_statistics.currentPreemptiveRate);
+  TEST_LOG("    preferredBufferSize       = %hu \n",
+      network_statistics.preferredBufferSize);
+  TEST_LOG("    jitterPeaksFound          = %i \n",
+      network_statistics.jitterPeaksFound);
+  TEST_LOG("    clockDriftPPM             = %i \n",
+      network_statistics.clockDriftPPM);
+  TEST_LOG("    meanWaitingTimeMs         = %i \n",
+      network_statistics.meanWaitingTimeMs);
+  TEST_LOG("    medianWaitingTimeMs       = %i \n",
+      network_statistics.medianWaitingTimeMs);
+  TEST_LOG("    minWaitingTimeMs          = %i \n",
+      network_statistics.minWaitingTimeMs);
+  TEST_LOG("    maxWaitingTimeMs          = %i \n",
+      network_statistics.maxWaitingTimeMs);
+}
diff --git a/src/voice_engine/test/auto_test/standard/neteq_test.cc b/src/voice_engine/test/auto_test/standard/neteq_test.cc
new file mode 100644
index 0000000..8184535
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/neteq_test.cc
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+class NetEQTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    additional_channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(additional_channel_);
+  }
+
+  int additional_channel_;
+};
+
+TEST_F(NetEQTest, GetNetEQPlayoutModeReturnsDefaultModeByDefault) {
+  webrtc::NetEqModes mode;
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqDefault, mode);
+}
+
+TEST_F(NetEQTest, SetNetEQPlayoutModeActuallySetsTheModeForTheChannel) {
+  webrtc::NetEqModes mode;
+  // Set for the first channel but leave the second.
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqFax));
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqFax, mode);
+
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(additional_channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqDefault, mode);
+
+  // Set the second channel, leave the first.
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(
+      additional_channel_, webrtc::kNetEqStreaming));
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(additional_channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqStreaming, mode);
+
+  EXPECT_EQ(0, voe_base_->GetNetEQPlayoutMode(channel_, mode));
+  EXPECT_EQ(webrtc::kNetEqFax, mode);
+}
+
+TEST_F(NetEQTest, GetNetEQBgnModeReturnsBgnOnByDefault) {
+  webrtc::NetEqBgnModes bgn_mode;
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnOn, bgn_mode);
+}
+
+TEST_F(NetEQTest, SetNetEQBgnModeActuallySetsTheBgnMode) {
+  webrtc::NetEqBgnModes bgn_mode;
+  EXPECT_EQ(0, voe_base_->SetNetEQBGNMode(channel_, webrtc::kBgnOff));
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnOff, bgn_mode);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQBGNMode(channel_, webrtc::kBgnFade));
+  EXPECT_EQ(0, voe_base_->GetNetEQBGNMode(channel_, bgn_mode));
+  EXPECT_EQ(webrtc::kBgnFade, bgn_mode);
+}
+
+TEST_F(NetEQTest, ManualSetEQPlayoutModeStillProducesOkAudio) {
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqDefault));
+  TEST_LOG("NetEQ default playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(
+      channel_, webrtc::kNetEqStreaming));
+  TEST_LOG("NetEQ streaming playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+
+  EXPECT_EQ(0, voe_base_->SetNetEQPlayoutMode(channel_, webrtc::kNetEqFax));
+  TEST_LOG("NetEQ fax playout mode enabled => should hear OK audio.\n");
+  Sleep(2000);
+}
diff --git a/src/voice_engine/test/auto_test/standard/network_before_streaming_test.cc b/src/voice_engine/test/auto_test/standard/network_before_streaming_test.cc
new file mode 100644
index 0000000..7a41e80
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/network_before_streaming_test.cc
@@ -0,0 +1,55 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+class NetworkBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp() {
+    channel_ = voe_base_->CreateChannel();
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(channel_);
+  }
+
+  int channel_;
+};
+
+TEST_F(NetworkBeforeStreamingTest,
+    GetSourceInfoReturnsEmptyValuesForUnconfiguredChannel) {
+  char src_ip[32] = "0.0.0.0";
+  int src_rtp_port = 1234;
+  int src_rtcp_port = 1235;
+
+  EXPECT_EQ(0, voe_network_->GetSourceInfo(
+      channel_, src_rtp_port, src_rtcp_port, src_ip));
+  EXPECT_EQ(0, src_rtp_port);
+  EXPECT_EQ(0, src_rtcp_port);
+  EXPECT_STRCASEEQ("", src_ip);
+}
+
+TEST_F(NetworkBeforeStreamingTest,
+    GetSourceFilterReturnsEmptyValuesForUnconfiguredChannel) {
+  int filter_port = -1;
+  int filter_port_rtcp = -1;
+  char filter_ip[32] = "0.0.0.0";
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_port, filter_port_rtcp, filter_ip));
+
+  EXPECT_EQ(0, filter_port);
+  EXPECT_EQ(0, filter_port_rtcp);
+  EXPECT_STRCASEEQ("", filter_ip);
+}
+
+TEST_F(NetworkBeforeStreamingTest, SetSourceFilterSucceeds) {
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, 0));
+}
diff --git a/src/voice_engine/test/auto_test/standard/network_test.cc b/src/voice_engine/test/auto_test/standard/network_test.cc
new file mode 100644
index 0000000..e4aebd1
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/network_test.cc
@@ -0,0 +1,200 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/test/auto_test/fakes/fake_external_transport.h"
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+#include "voice_engine/test/auto_test/voe_test_interface.h"
+#include "voice_engine/include/mock/mock_voe_connection_observer.h"
+#include "voice_engine/include/mock/mock_voe_observer.h"
+
+static const int kDefaultRtpPort = 8000;
+static const int kDefaultRtcpPort = 8001;
+
+class NetworkTest : public AfterStreamingFixture {
+};
+
+using ::testing::Between;
+
+TEST_F(NetworkTest, GetSourceInfoReturnsPortsAndIpAfterReceivingPackets) {
+  // Give some time to send speech packets.
+  Sleep(200);
+
+  int rtp_port = 0;
+  int rtcp_port = 0;
+  char source_ip[32] = "127.0.0.1";
+
+  EXPECT_EQ(0, voe_network_->GetSourceInfo(channel_, rtp_port, rtcp_port,
+      source_ip));
+
+  EXPECT_EQ(kDefaultRtpPort, rtp_port);
+  EXPECT_EQ(kDefaultRtcpPort, rtcp_port);
+}
+
+TEST_F(NetworkTest, NoFilterIsEnabledByDefault) {
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(0, filter_rtp_port);
+  EXPECT_EQ(0, filter_rtcp_port);
+  EXPECT_STREQ("", filter_ip);
+}
+
+TEST_F(NetworkTest, ManualCanFilterRtpPort) {
+  TEST_LOG("No filter, should hear audio.\n");
+  Sleep(1000);
+
+  int port_to_block = kDefaultRtpPort + 10;
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, port_to_block));
+
+  // Changes should take effect immediately.
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(port_to_block, filter_rtp_port);
+
+  TEST_LOG("Now filtering port %d, should not hear audio.\n", port_to_block);
+  Sleep(1000);
+
+  TEST_LOG("Removing filter, should hear audio.\n");
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(channel_, 0));
+  Sleep(1000);
+}
+
+TEST_F(NetworkTest, ManualCanFilterIp) {
+  TEST_LOG("You should hear audio.\n");
+  Sleep(1000);
+
+  int rtcp_port_to_block = kDefaultRtcpPort + 10;
+  TEST_LOG("Filtering IP 10.10.10.10, should not hear audio.\n");
+  EXPECT_EQ(0, voe_network_->SetSourceFilter(
+      channel_, 0, rtcp_port_to_block, "10.10.10.10"));
+
+  int filter_rtp_port = -1;
+  int filter_rtcp_port = -1;
+  char filter_ip[64] = { 0 };
+  EXPECT_EQ(0, voe_network_->GetSourceFilter(
+      channel_, filter_rtp_port, filter_rtcp_port, filter_ip));
+
+  EXPECT_EQ(0, filter_rtp_port);
+  EXPECT_EQ(rtcp_port_to_block, filter_rtcp_port);
+  EXPECT_STREQ("10.10.10.10", filter_ip);
+}
+
+TEST_F(NetworkTest,
+    CallsObserverOnTimeoutAndRestartWhenPacketTimeoutNotificationIsEnabled) {
+  // First, get rid of the default, asserting observer and install our observer.
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+  webrtc::MockVoEObserver mock_observer;
+  EXPECT_EQ(0, voe_base_->RegisterVoiceEngineObserver(mock_observer));
+
+  // Define expectations.
+  int expected_error = VE_RECEIVE_PACKET_TIMEOUT;
+  EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
+      .Times(1);
+  expected_error = VE_PACKET_RECEIPT_RESTARTED;
+    EXPECT_CALL(mock_observer, CallbackOnError(channel_, expected_error))
+      .Times(1);
+
+  // Get some speech going.
+  Sleep(500);
+
+  // Enable packet timeout.
+  EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
+
+  // Trigger a timeout.
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(1500);
+
+  // Trigger a restart event.
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  Sleep(500);
+}
+
+TEST_F(NetworkTest, DoesNotCallDeRegisteredObserver) {
+  // De-register the default observer. This test will fail if the observer gets
+  // called for any reason, so if this de-register doesn't work the test will
+  // fail.
+  EXPECT_EQ(0, voe_base_->DeRegisterVoiceEngineObserver());
+
+  // Get some speech going.
+  Sleep(500);
+
+  // Enable packet timeout.
+  EXPECT_EQ(0, voe_network_->SetPacketTimeoutNotification(channel_, true, 1));
+
+  // Trigger a timeout.
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(1500);
+}
+
+TEST_F(NetworkTest, DeadOrAliveObserverSeesAliveMessagesIfEnabled) {
+  webrtc::MockVoeConnectionObserver mock_observer;
+  EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
+      channel_, mock_observer));
+
+  // We should be called about 4 times in four seconds, but 3 is OK too.
+  EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, true))
+      .Times(Between(3, 4));
+
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  Sleep(4000);
+
+  EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
+}
+
+TEST_F(NetworkTest, DeadOrAliveObserverSeesDeadMessagesIfEnabled) {
+  // "When do you see them?" - "All the time!"
+  webrtc::MockVoeConnectionObserver mock_observer;
+  EXPECT_EQ(0, voe_network_->RegisterDeadOrAliveObserver(
+      channel_, mock_observer));
+
+  Sleep(500);
+
+  // We should be called about 4 times in four seconds, but 3 is OK too.
+  EXPECT_CALL(mock_observer, OnPeriodicDeadOrAlive(channel_, false))
+      .Times(Between(3, 4));
+
+  EXPECT_EQ(0, voe_network_->SetPeriodicDeadOrAliveStatus(channel_, true, 1));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, false));
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  Sleep(4000);
+
+  EXPECT_EQ(0, voe_network_->DeRegisterDeadOrAliveObserver(channel_));
+}
+
+TEST_F(NetworkTest, CanSwitchToExternalTransport) {
+  EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+  EXPECT_EQ(0, voe_base_->DeleteChannel(channel_));
+  channel_ = voe_base_->CreateChannel();
+
+  FakeExternalTransport external_transport(voe_network_);
+  EXPECT_EQ(0, voe_network_->RegisterExternalTransport(
+      channel_, external_transport));
+
+  EXPECT_EQ(0, voe_base_->StartReceive(channel_));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+  EXPECT_EQ(0, voe_base_->StartPlayout(channel_));
+
+  Sleep(1000);
+
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_base_->StopPlayout(channel_));
+  EXPECT_EQ(0, voe_base_->StopReceive(channel_));
+
+  EXPECT_EQ(0, voe_network_->DeRegisterExternalTransport(channel_));
+}
diff --git a/src/voice_engine/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc b/src/voice_engine/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
new file mode 100644
index 0000000..93170f6
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/rtp_rtcp_before_streaming_test.cc
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_initialization_fixture.h"
+
+using namespace webrtc;
+using namespace testing;
+
+class RtpRtcpBeforeStreamingTest : public AfterInitializationFixture {
+ protected:
+  void SetUp();
+  void TearDown();
+
+  int channel_;
+};
+
+void RtpRtcpBeforeStreamingTest::SetUp() {
+  EXPECT_THAT(channel_ = voe_base_->CreateChannel(), Not(Lt(0)));
+}
+
+void RtpRtcpBeforeStreamingTest::TearDown() {
+  EXPECT_EQ(0, voe_base_->DeleteChannel(channel_));
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest,
+       GetRtcpStatusReturnsTrueByDefaultAndObeysSetRtcpStatus) {
+  bool on = false;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_TRUE(on);
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, false));
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_FALSE(on);
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetRTCPStatus(channel_, true));
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRTCPStatus(channel_, on));
+  EXPECT_TRUE(on);
+}
+
+TEST_F(RtpRtcpBeforeStreamingTest, GetLocalSsrcObeysSetLocalSsrc) {
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, 1234));
+  unsigned int result = 0;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetLocalSSRC(channel_, result));
+  EXPECT_EQ(1234u, result);
+}
diff --git a/src/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc b/src/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc
new file mode 100644
index 0000000..8e20217
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/rtp_rtcp_test.cc
@@ -0,0 +1,293 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+#include "voe_standard_test.h"
+#include "testsupport/fileutils.h"
+
+class TestRtpObserver : public webrtc::VoERTPObserver {
+ public:
+  TestRtpObserver();
+  virtual ~TestRtpObserver();
+  virtual void OnIncomingCSRCChanged(const int channel,
+                                     const unsigned int CSRC,
+                                     const bool added);
+  virtual void OnIncomingSSRCChanged(const int channel,
+                                     const unsigned int SSRC);
+  void Reset();
+ public:
+  unsigned int ssrc_[2];
+  unsigned int csrc_[2][2];  // Stores 2 CSRCs for each channel.
+  bool added_[2][2];
+  int size_[2];
+};
+
+TestRtpObserver::TestRtpObserver() {
+  Reset();
+}
+
+TestRtpObserver::~TestRtpObserver() {
+}
+
+void TestRtpObserver::Reset() {
+  for (int i = 0; i < 2; i++) {
+    ssrc_[i] = 0;
+    csrc_[i][0] = 0;
+    csrc_[i][1] = 0;
+    added_[i][0] = false;
+    added_[i][1] = false;
+    size_[i] = 0;
+  }
+}
+
+void TestRtpObserver::OnIncomingCSRCChanged(const int channel,
+                                            const unsigned int CSRC,
+                                            const bool added) {
+  char msg[128];
+  sprintf(msg, "=> OnIncomingCSRCChanged(channel=%d, CSRC=%u, added=%d)\n",
+          channel, CSRC, added);
+  TEST_LOG("%s", msg);
+
+  if (channel > 1)
+    return;  // Not enough memory.
+
+  csrc_[channel][size_[channel]] = CSRC;
+  added_[channel][size_[channel]] = added;
+
+  size_[channel]++;
+  if (size_[channel] == 2)
+    size_[channel] = 0;
+}
+
+void TestRtpObserver::OnIncomingSSRCChanged(const int channel,
+                                            const unsigned int SSRC) {
+  char msg[128];
+  sprintf(msg, "\n=> OnIncomingSSRCChanged(channel=%d, SSRC=%u)\n", channel,
+          SSRC);
+  TEST_LOG("%s", msg);
+
+  ssrc_[channel] = SSRC;
+}
+
+class RtcpAppHandler : public webrtc::VoERTCPObserver {
+ public:
+  void OnApplicationDataReceived(const int channel,
+                                 const unsigned char sub_type,
+                                 const unsigned int name,
+                                 const unsigned char* data,
+                                 const unsigned short length_in_bytes);
+  void Reset();
+  ~RtcpAppHandler() {}
+  unsigned short length_in_bytes_;
+  unsigned char data_[256];
+  unsigned char sub_type_;
+  unsigned int name_;
+};
+
+
+static const char* const RTCP_CNAME = "Whatever";
+
+class RtpRtcpTest : public AfterStreamingFixture {
+ protected:
+  void SetUp() {
+    // We need a second channel for this test, so set it up.
+    second_channel_ = voe_base_->CreateChannel();
+    EXPECT_GE(second_channel_, 0);
+
+    EXPECT_EQ(0, voe_base_->SetSendDestination(
+        second_channel_, 8002, "127.0.0.1"));
+    EXPECT_EQ(0, voe_base_->SetLocalReceiver(
+        second_channel_, 8002));
+    EXPECT_EQ(0, voe_base_->StartReceive(second_channel_));
+    EXPECT_EQ(0, voe_base_->StartPlayout(second_channel_));
+    EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(second_channel_, 5678));
+    EXPECT_EQ(0, voe_base_->StartSend(second_channel_));
+
+    // We'll set up the RTCP CNAME and SSRC to something arbitrary here.
+    voe_rtp_rtcp_->SetRTCP_CNAME(channel_, RTCP_CNAME);
+  }
+
+  void TearDown() {
+    voe_base_->DeleteChannel(second_channel_);
+  }
+
+  int second_channel_;
+};
+
+void RtcpAppHandler::OnApplicationDataReceived(
+    const int /*channel*/, const unsigned char sub_type,
+    const unsigned int name, const unsigned char* data,
+    const unsigned short length_in_bytes) {
+  length_in_bytes_ = length_in_bytes;
+  memcpy(data_, &data[0], length_in_bytes);
+  sub_type_ = sub_type;
+  name_ = name;
+}
+
+void RtcpAppHandler::Reset() {
+  length_in_bytes_ = 0;
+  memset(data_, 0, sizeof(data_));
+  sub_type_ = 0;
+  name_ = 0;
+}
+
+TEST_F(RtpRtcpTest, RemoteRtcpCnameHasPropagatedToRemoteSide) {
+  // We need to sleep a bit here for the name to propagate. For instance,
+  // 200 milliseconds is not enough, so we'll go with one second here.
+  Sleep(1000);
+
+  char char_buffer[256];
+  voe_rtp_rtcp_->GetRemoteRTCP_CNAME(channel_, char_buffer);
+  EXPECT_STREQ(RTCP_CNAME, char_buffer);
+}
+
+TEST_F(RtpRtcpTest, SSRCPropagatesCorrectly) {
+  unsigned int local_ssrc = 1234;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, local_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(1000);
+
+  unsigned int ssrc;
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetLocalSSRC(channel_, ssrc));
+  EXPECT_EQ(local_ssrc, ssrc);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->GetRemoteSSRC(channel_, ssrc));
+  EXPECT_EQ(local_ssrc, ssrc);
+}
+
+TEST_F(RtpRtcpTest, RtcpApplicationDefinedPacketsCanBeSentAndReceived) {
+  RtcpAppHandler rtcp_app_handler;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTCPObserver(
+      channel_, rtcp_app_handler));
+
+  // Send data aligned to 32 bytes.
+  const char* data = "application-dependent data------";
+  unsigned short data_length = strlen(data);
+  unsigned int data_name = 0x41424344;  // 'ABCD' in ascii
+  unsigned char data_subtype = 1;
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->SendApplicationDefinedRTCPPacket(
+      channel_, data_subtype, data_name, data, data_length));
+
+  // Ensure the RTP-RTCP process gets scheduled.
+  Sleep(1000);
+
+  // Ensure we received the data in the callback.
+  EXPECT_EQ(data_length, rtcp_app_handler.length_in_bytes_);
+  EXPECT_EQ(0, memcmp(data, rtcp_app_handler.data_, data_length));
+  EXPECT_EQ(data_name, rtcp_app_handler.name_);
+  EXPECT_EQ(data_subtype, rtcp_app_handler.sub_type_);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTCPObserver(channel_));
+}
+
+TEST_F(RtpRtcpTest, DisabledRtcpObserverDoesNotReceiveData) {
+  RtcpAppHandler rtcp_app_handler;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTCPObserver(
+      channel_, rtcp_app_handler));
+
+  // Put observer in a known state before de-registering.
+  rtcp_app_handler.Reset();
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTCPObserver(channel_));
+
+  const char* data = "whatever";
+  EXPECT_EQ(0, voe_rtp_rtcp_->SendApplicationDefinedRTCPPacket(
+      channel_, 1, 0x41424344, data, strlen(data)));
+
+  // Ensure the RTP-RTCP process gets scheduled.
+  Sleep(1000);
+
+  // Ensure we received no data.
+  EXPECT_EQ(0u, rtcp_app_handler.name_);
+  EXPECT_EQ(0u, rtcp_app_handler.sub_type_);
+}
+
+TEST_F(RtpRtcpTest, InsertExtraRTPPacketDealsWithInvalidArguments) {
+  const char payload_data[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      -1, 0, false, payload_data, 8)) <<
+          "Should reject: invalid channel.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      channel_, -1, false, payload_data, 8)) <<
+          "Should reject: invalid payload type.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+      channel_, 128, false, payload_data, 8)) <<
+          "Should reject: invalid payload type.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, 99, false, NULL, 8)) <<
+            "Should reject: bad pointer.";
+  EXPECT_EQ(-1, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, 99, false, payload_data, 1500 - 28 + 1)) <<
+            "Should reject: invalid size.";
+}
+
+TEST_F(RtpRtcpTest, CanTransmitExtraRtpPacketsWithoutError) {
+  const char payload_data[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  for (int i = 0; i < 128; ++i) {
+    // Try both with and without the marker bit set
+    EXPECT_EQ(0, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, i, false, payload_data, 8));
+    EXPECT_EQ(0, voe_rtp_rtcp_->InsertExtraRTPPacket(
+        channel_, i, true, payload_data, 8));
+  }
+}
+
+// TODO(xians, phoglund): Re-enable when issue 372 is resolved.
+TEST_F(RtpRtcpTest, DISABLED_CanCreateRtpDumpFilesWithoutError) {
+  // Create two RTP dump files (3 seconds long). You can verify these after
+  // the test using rtpplay or NetEqRTPplay if you like.
+  std::string output_path = webrtc::test::OutputPath();
+  std::string incoming_filename = output_path + "dump_in_3sec.rtp";
+  std::string outgoing_filename = output_path + "dump_out_3sec.rtp";
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->StartRTPDump(
+      channel_, incoming_filename.c_str(), webrtc::kRtpIncoming));
+  EXPECT_EQ(0, voe_rtp_rtcp_->StartRTPDump(
+      channel_, outgoing_filename.c_str(), webrtc::kRtpOutgoing));
+
+  Sleep(3000);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->StopRTPDump(channel_, webrtc::kRtpIncoming));
+  EXPECT_EQ(0, voe_rtp_rtcp_->StopRTPDump(channel_, webrtc::kRtpOutgoing));
+}
+
+TEST_F(RtpRtcpTest, ObserverGetsNotifiedOnSsrcChange) {
+  TestRtpObserver rtcp_observer;
+  EXPECT_EQ(0, voe_rtp_rtcp_->RegisterRTPObserver(
+      channel_, rtcp_observer));
+  rtcp_observer.Reset();
+
+  unsigned int new_ssrc = 7777;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, new_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(500);
+
+  // Verify we got the new SSRC.
+  EXPECT_EQ(new_ssrc, rtcp_observer.ssrc_[0]);
+
+  // Now try another SSRC.
+  unsigned int newer_ssrc = 1717;
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_rtp_rtcp_->SetLocalSSRC(channel_, newer_ssrc));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  Sleep(500);
+
+  EXPECT_EQ(newer_ssrc, rtcp_observer.ssrc_[0]);
+
+  EXPECT_EQ(0, voe_rtp_rtcp_->DeRegisterRTPObserver(channel_));
+}
diff --git a/src/voice_engine/test/auto_test/standard/video_sync_test.cc b/src/voice_engine/test/auto_test/standard/video_sync_test.cc
new file mode 100644
index 0000000..65516a1
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/video_sync_test.cc
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <cmath>
+#include <numeric>
+#include <vector>
+
+#include "voice_engine/test/auto_test/fixtures/after_streaming_fixture.h"
+
+#ifdef MAC_IPHONE
+  const int kMinimumReasonableDelayEstimateMs = 30;
+#else
+  const int kMinimumReasonableDelayEstimateMs = 45;
+#endif  // !MAC_IPHONE
+
+class VideoSyncTest : public AfterStreamingFixture {
+ protected:
+  // This test will verify that delay estimates converge (e.g. the standard
+  // deviation for the last five seconds' estimates is less than 20) without
+  // manual observation. The test runs for 15 seconds, sampling once per second.
+  // All samples are checked so they are greater than |min_estimate|.
+  int CollectEstimatesDuring15Seconds(int min_estimate) {
+    Sleep(1000);
+
+    std::vector<int> all_delay_estimates;
+    for (int second = 0; second < 15; second++) {
+      int delay_estimate = 0;
+      EXPECT_EQ(0, voe_vsync_->GetDelayEstimate(channel_, delay_estimate));
+
+      EXPECT_GT(delay_estimate, min_estimate) <<
+          "The delay estimate can not conceivably get lower than " <<
+          min_estimate << " ms, it's unrealistic.";
+
+      all_delay_estimates.push_back(delay_estimate);
+      Sleep(1000);
+    }
+
+    return ComputeStandardDeviation(
+        all_delay_estimates.begin() + 10, all_delay_estimates.end());
+  }
+
+  void CheckEstimatesConvergeReasonablyWell(int min_estimate) {
+    float standard_deviation = CollectEstimatesDuring15Seconds(min_estimate);
+    EXPECT_LT(standard_deviation, 30.0f);
+  }
+
+  // Computes the standard deviation by first estimating the sample variance
+  // with an unbiased estimator.
+  float ComputeStandardDeviation(std::vector<int>::const_iterator start,
+                               std::vector<int>::const_iterator end) const {
+    int num_elements = end - start;
+    int mean = std::accumulate(start, end, 0) / num_elements;
+    assert(num_elements > 1);
+
+    float variance = 0;
+    for (; start != end; ++start) {
+      variance += (*start - mean) * (*start - mean) / (num_elements - 1);
+    }
+    return std::sqrt(variance);
+  }
+};
+
+TEST_F(VideoSyncTest, CanGetPlayoutTimestampWhilePlayingWithoutSettingItFirst) {
+  unsigned int ignored;
+  EXPECT_EQ(0, voe_vsync_->GetPlayoutTimestamp(channel_, ignored));
+}
+
+TEST_F(VideoSyncTest, CannotSetInitTimestampWhilePlaying) {
+  EXPECT_EQ(-1, voe_vsync_->SetInitTimestamp(channel_, 12345));
+}
+
+TEST_F(VideoSyncTest, CannotSetInitSequenceNumberWhilePlaying) {
+  EXPECT_EQ(-1, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+}
+
+TEST_F(VideoSyncTest, CanSetInitTimestampWhileStopped) {
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+}
+
+TEST_F(VideoSyncTest, CanSetInitSequenceNumberWhileStopped) {
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+}
+
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=438
+TEST_F(VideoSyncTest,
+       DISABLED_DelayEstimatesStabilizeDuring15sAndAreNotTooLow) {
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+  EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+}
+
+// TODO(phoglund): pending investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=438
+TEST_F(VideoSyncTest,
+       DISABLED_DelayEstimatesStabilizeAfterNetEqMinDelayChanges45s) {
+  EXPECT_EQ(0, voe_base_->StopSend(channel_));
+  EXPECT_EQ(0, voe_vsync_->SetInitTimestamp(channel_, 12345));
+  EXPECT_EQ(0, voe_vsync_->SetInitSequenceNumber(channel_, 123));
+  EXPECT_EQ(0, voe_base_->StartSend(channel_));
+
+  CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+  EXPECT_EQ(0, voe_vsync_->SetMinimumPlayoutDelay(channel_, 200));
+  CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+  EXPECT_EQ(0, voe_vsync_->SetMinimumPlayoutDelay(channel_, 0));
+  CheckEstimatesConvergeReasonablyWell(kMinimumReasonableDelayEstimateMs);
+}
+
+#if !defined(WEBRTC_ANDROID)
+TEST_F(VideoSyncTest, CanGetPlayoutBufferSize) {
+  int ignored;
+  EXPECT_EQ(0, voe_vsync_->GetPlayoutBufferSize(ignored));
+}
+#endif  // !ANDROID
diff --git a/src/voice_engine/test/auto_test/standard/voe_base_misc_test.cc b/src/voice_engine/test/auto_test/standard/voe_base_misc_test.cc
new file mode 100644
index 0000000..0388025
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/voe_base_misc_test.cc
@@ -0,0 +1,26 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "before_initialization_fixture.h"
+
+class VoeBaseMiscTest : public BeforeInitializationFixture {
+};
+
+using namespace testing;
+
+TEST_F(VoeBaseMiscTest, MaxNumChannelsIs32) {
+  EXPECT_EQ(32, voe_base_->MaxNumOfChannels());
+}
+
+TEST_F(VoeBaseMiscTest, GetVersionPrintsSomeUsefulInformation) {
+  char char_buffer[1024];
+  EXPECT_EQ(0, voe_base_->GetVersion(char_buffer));
+  EXPECT_THAT(char_buffer, ContainsRegex("VoiceEngine"));
+}
diff --git a/src/voice_engine/test/auto_test/standard/volume_test.cc b/src/voice_engine/test/auto_test/standard/volume_test.cc
new file mode 100644
index 0000000..d930281
--- /dev/null
+++ b/src/voice_engine/test/auto_test/standard/volume_test.cc
@@ -0,0 +1,280 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "after_streaming_fixture.h"
+
+#ifdef WEBRTC_LINUX
+#define DISABLED_ON_LINUX(test) DISABLED_##test
+#else
+#define DISABLED_ON_LINUX(test) test
+#endif
+
+class VolumeTest : public AfterStreamingFixture {
+};
+
+// TODO(phoglund): a number of tests are disabled here on Linux, all pending
+// investigation in
+// http://code.google.com/p/webrtc/issues/detail?id=367
+
+TEST_F(VolumeTest, DefaultSpeakerVolumeIsAtMost255) {
+  unsigned int volume = 1000;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_LE(volume, 255u);
+}
+
+TEST_F(VolumeTest, SetVolumeBeforePlayoutWorks) {
+  // This is a rather specialized test, intended to exercise some PulseAudio
+  // code. However, these conditions should be satisfied on any platform.
+  unsigned int original_volume = 0;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(original_volume));
+  Sleep(1000);
+
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(200));
+  unsigned int volume;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_EQ(200u, volume);
+
+  PausePlaying();
+  ResumePlaying();
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  // Ensure the volume has not changed after resuming playout.
+  EXPECT_EQ(200u, volume);
+
+  PausePlaying();
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(100));
+  ResumePlaying();
+  // Ensure the volume set while paused is retained.
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_EQ(100u, volume);
+
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(original_volume));
+}
+
+TEST_F(VolumeTest, ManualSetVolumeWorks) {
+  unsigned int original_volume = 0;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(original_volume));
+  Sleep(1000);
+
+  TEST_LOG("Setting speaker volume to 0 out of 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(0));
+  unsigned int volume;
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_EQ(0u, volume);
+  Sleep(1000);
+
+  TEST_LOG("Setting speaker volume to 100 out of 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(100));
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_EQ(100u, volume);
+  Sleep(1000);
+
+  // Set the volume to 255 very briefly so we don't blast the poor user
+  // listening to this. This is just to test the call succeeds.
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(255));
+  EXPECT_EQ(0, voe_volume_control_->GetSpeakerVolume(volume));
+  EXPECT_EQ(255u, volume);
+
+  TEST_LOG("Setting speaker volume to the original %d out of 255.\n",
+      original_volume);
+  EXPECT_EQ(0, voe_volume_control_->SetSpeakerVolume(original_volume));
+  Sleep(1000);
+}
+
+#if !defined(MAC_IPHONE)
+
+TEST_F(VolumeTest, DISABLED_ON_LINUX(DefaultMicrophoneVolumeIsAtMost255)) {
+  unsigned int volume = 1000;
+  EXPECT_EQ(0, voe_volume_control_->GetMicVolume(volume));
+  EXPECT_LE(volume, 255u);
+}
+
+TEST_F(VolumeTest, DISABLED_ON_LINUX(
+          ManualRequiresMicrophoneCanSetMicrophoneVolumeWithAcgOff)) {
+  SwitchToManualMicrophone();
+  EXPECT_EQ(0, voe_apm_->SetAgcStatus(false));
+
+  unsigned int original_volume = 0;
+  EXPECT_EQ(0, voe_volume_control_->GetMicVolume(original_volume));
+
+  TEST_LOG("Setting microphone volume to 0.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(channel_));
+  Sleep(1000);
+  TEST_LOG("Setting microphone volume to 255.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(255));
+  Sleep(1000);
+  TEST_LOG("Setting microphone volume back to saved value.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetMicVolume(original_volume));
+  Sleep(1000);
+}
+
+TEST_F(VolumeTest, ChannelScalingIsOneByDefault) {
+  float scaling = -1.0f;
+
+  EXPECT_EQ(0, voe_volume_control_->GetChannelOutputVolumeScaling(
+      channel_, scaling));
+  EXPECT_FLOAT_EQ(1.0f, scaling);
+}
+
+TEST_F(VolumeTest, ManualCanSetChannelScaling) {
+  EXPECT_EQ(0, voe_volume_control_->SetChannelOutputVolumeScaling(
+      channel_, 0.1f));
+
+  float scaling = 1.0f;
+  EXPECT_EQ(0, voe_volume_control_->GetChannelOutputVolumeScaling(
+      channel_, scaling));
+
+  EXPECT_FLOAT_EQ(0.1f, scaling);
+
+  TEST_LOG("Channel scaling set to 0.1: audio should be barely audible.\n");
+  Sleep(2000);
+}
+
+#endif  // !MAC_IPHONE
+
+#if !defined(WEBRTC_ANDROID) && !defined(MAC_IPHONE)
+
+TEST_F(VolumeTest, InputMutingIsNotEnabledByDefault) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, DISABLED_ON_LINUX(ManualInputMutingMutesMicrophone)) {
+  SwitchToManualMicrophone();
+
+  // Enable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: talk into microphone and verify you can't hear yourself.\n");
+  Sleep(2000);
+
+  // Test that we can disable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetInputMute(channel_, false));
+  EXPECT_EQ(0, voe_volume_control_->GetInputMute(channel_, is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: talk into microphone and verify you can hear yourself.\n");
+  Sleep(2000);
+}
+
+TEST_F(VolumeTest, DISABLED_ON_LINUX(SystemInputMutingIsNotEnabledByDefault)) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, DISABLED_ON_LINUX(ManualSystemInputMutingMutesMicrophone)) {
+  SwitchToManualMicrophone();
+
+  // Enable system input muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemInputMute(true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: talk into microphone and verify you can't hear yourself.\n");
+  Sleep(2000);
+
+  // Test that we can disable system input muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemInputMute(false));
+  EXPECT_EQ(0, voe_volume_control_->GetSystemInputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: talk into microphone and verify you can hear yourself.\n");
+  Sleep(2000);
+}
+
+TEST_F(VolumeTest, SystemOutputMutingIsNotEnabledByDefault) {
+  bool is_muted = true;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+}
+
+TEST_F(VolumeTest, ManualSystemOutputMutingMutesOutput) {
+  // Enable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemOutputMute(true));
+  bool is_muted = false;
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_TRUE(is_muted);
+
+  TEST_LOG("Muted: you should hear no audio.\n");
+  Sleep(2000);
+
+  // Test that we can disable muting.
+  EXPECT_EQ(0, voe_volume_control_->SetSystemOutputMute(false));
+  EXPECT_EQ(0, voe_volume_control_->GetSystemOutputMute(is_muted));
+  EXPECT_FALSE(is_muted);
+
+  TEST_LOG("Unmuted: you should hear audio.\n");
+  Sleep(2000);
+}
+
+TEST_F(VolumeTest, ManualTestInputAndOutputLevels) {
+  SwitchToManualMicrophone();
+
+  TEST_LOG("Speak and verify that the following levels look right:\n");
+  for (int i = 0; i < 5; i++) {
+    Sleep(1000);
+    unsigned int input_level = 0;
+    unsigned int output_level = 0;
+    unsigned int input_level_full_range = 0;
+    unsigned int output_level_full_range = 0;
+
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechInputLevel(
+        input_level));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechOutputLevel(
+        channel_, output_level));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechInputLevelFullRange(
+        input_level_full_range));
+    EXPECT_EQ(0, voe_volume_control_->GetSpeechOutputLevelFullRange(
+        channel_, output_level_full_range));
+
+    TEST_LOG("    warped levels (0-9)    : in=%5d, out=%5d\n",
+        input_level, output_level);
+    TEST_LOG("    linear levels (0-32768): in=%5d, out=%5d\n",
+        input_level_full_range, output_level_full_range);
+  }
+}
+
+TEST_F(VolumeTest, ChannelsAreNotPannedByDefault) {
+  float left = -1.0;
+  float right = -1.0;
+
+  EXPECT_EQ(0, voe_volume_control_->GetOutputVolumePan(channel_, left, right));
+  EXPECT_FLOAT_EQ(1.0, left);
+  EXPECT_FLOAT_EQ(1.0, right);
+}
+
+TEST_F(VolumeTest, ManualTestChannelPanning) {
+  TEST_LOG("Panning left.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 0.8f, 0.1f));
+  Sleep(1000);
+
+  TEST_LOG("Back to center.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 1.0f, 1.0f));
+  Sleep(1000);
+
+  TEST_LOG("Panning right.\n");
+  EXPECT_EQ(0, voe_volume_control_->SetOutputVolumePan(channel_, 0.1f, 0.8f));
+  Sleep(1000);
+
+  // To finish, verify that the getter works.
+  float left = 0.0f;
+  float right = 0.0f;
+
+  EXPECT_EQ(0, voe_volume_control_->GetOutputVolumePan(channel_, left, right));
+  EXPECT_FLOAT_EQ(0.1f, left);
+  EXPECT_FLOAT_EQ(0.8f, right);
+}
+
+#endif  // !WEBRTC_ANDROID && !MAC_IPHONE
diff --git a/src/voice_engine/test/auto_test/voe_cpu_test.cc b/src/voice_engine/test/auto_test/voe_cpu_test.cc
new file mode 100644
index 0000000..14e4a00
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_cpu_test.cc
@@ -0,0 +1,93 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "voe_cpu_test.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define CHECK(expr)                                             \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+        PAUSE												    \
+        return -1;                                              \
+    }
+
+VoECpuTest::VoECpuTest(VoETestManager& mgr)
+    : _mgr(mgr) {
+
+}
+
+int VoECpuTest::DoTest() {
+  printf("------------------------------------------------\n");
+  printf(" CPU Reference Test\n");
+  printf("------------------------------------------------\n");
+
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+
+  int channel(-1);
+  CodecInst isac;
+
+  isac.pltype = 104;
+  strcpy(isac.plname, "ISAC");
+  isac.pacsize = 960;
+  isac.plfreq = 32000;
+  isac.channels = 1;
+  isac.rate = -1;
+
+  CHECK(base->Init());
+  channel = base->CreateChannel();
+
+  CHECK(base->SetLocalReceiver(channel, 5566));
+  CHECK(base->SetSendDestination(channel, 5566, "127.0.0.1"));
+  CHECK(codec->SetRecPayloadType(channel, isac));
+  CHECK(codec->SetSendCodec(channel, isac));
+
+  CHECK(base->StartReceive(channel));
+  CHECK(base->StartPlayout(channel));
+  CHECK(base->StartSend(channel));
+  CHECK(file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(),
+          true, true));
+
+  CHECK(codec->SetVADStatus(channel, true));
+  CHECK(apm->SetAgcStatus(true, kAgcAdaptiveAnalog));
+  CHECK(apm->SetNsStatus(true, kNsModerateSuppression));
+  CHECK(apm->SetEcStatus(true, kEcAec));
+
+  TEST_LOG("\nMeasure CPU and memory while running a full-duplex"
+    " iSAC-swb call.\n\n");
+
+  PAUSE
+
+  CHECK(base->StopSend(channel));
+  CHECK(base->StopPlayout(channel));
+  CHECK(base->StopReceive(channel));
+
+  base->DeleteChannel(channel);
+  CHECK(base->Terminate());
+
+  return 0;
+}
+
+} //  namespace voetest
diff --git a/src/voice_engine/test/auto_test/voe_cpu_test.h b/src/voice_engine/test/auto_test/voe_cpu_test.h
new file mode 100644
index 0000000..f883075
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_cpu_test.h
@@ -0,0 +1,31 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
+
+#include "voe_standard_test.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+class VoECpuTest {
+ public:
+  VoECpuTest(VoETestManager& mgr);
+  ~VoECpuTest() {}
+  int DoTest();
+ private:
+  VoETestManager& _mgr;
+};
+
+} // namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_CPU_TEST_H
diff --git a/src/voice_engine/test/auto_test/voe_extended_test.cc b/src/voice_engine/test/auto_test/voe_extended_test.cc
new file mode 100644
index 0000000..b90e63c0
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_extended_test.cc
@@ -0,0 +1,7524 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <vector>
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/ref_count.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "testsupport/fileutils.h"
+#include "voice_engine/voice_engine_defines.h"
+#include "voice_engine/test/auto_test/voe_extended_test.h"
+
+#if defined(_WIN32)
+#include <conio.h>
+#include <winsock2.h>
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#include <netdb.h>
+#endif
+
+using namespace webrtc;
+
+namespace voetest {
+
+// Set this flag to ensure that test packets are transmitted to
+// RemoteIP::RemotePort during tests of SetSendToS and SetSendGQos. Requires
+// receiver at the remote side and Wireshark with a proper ip.src filter.
+#define _SEND_TO_REMOTE_IP_
+
+#ifdef _SEND_TO_REMOTE_IP_
+const int RemotePort = 12345; // transmit to this UDP port
+const char* RemoteIP = "192.168.200.1"; // transmit to this IP address
+#endif
+
+#ifdef MAC_IPHONE
+#define SLEEP_IF_IPHONE(x) SLEEP(x)
+#else
+#define SLEEP_IF_IPHONE(x)
+#endif
+
+#ifdef WEBRTC_ANDROID
+// Global pointers
+extern void* globalJavaVM;
+extern void* globalContext;
+#endif
+
+// ----------------------------------------------------------------------------
+// External AudioDeviceModule implementation
+// ----------------------------------------------------------------------------
+
+// static
+AudioDeviceModuleImpl* AudioDeviceModuleImpl::Create() {
+  AudioDeviceModuleImpl* xADM = new AudioDeviceModuleImpl();
+  if (xADM)
+    xADM->AddRef();
+  return xADM;
+}
+
+// static
+bool AudioDeviceModuleImpl::Destroy(AudioDeviceModuleImpl* adm) {
+  if (!adm)
+    return false;
+  int32_t count = adm->Release();
+  if (count != 0) {
+    return false;
+  } else {
+    delete adm;
+    return true;
+  }
+}
+
+AudioDeviceModuleImpl::AudioDeviceModuleImpl() :
+  _ref_count(0) {
+}
+
+AudioDeviceModuleImpl::~AudioDeviceModuleImpl() {
+}
+
+int32_t AudioDeviceModuleImpl::AddRef() {
+  return ++_ref_count;
+}
+
+int32_t AudioDeviceModuleImpl::Release() {
+  // Avoid self destruction in this mock implementation.
+  // Ensures that we can always check the reference counter while alive.
+  return --_ref_count;
+}
+
+// ----------------------------------------------------------------------------
+//  External transport (Transport) implementations:
+// ----------------------------------------------------------------------------
+
+ExtendedTestTransport::ExtendedTestTransport(VoENetwork* ptr) :
+  myNetw(ptr), _thread(NULL), _lock(NULL), _event(NULL), _length(0),
+  _channel(0) {
+  const char* threadName = "voe_extended_test_external_thread";
+  _lock = CriticalSectionWrapper::CreateCriticalSection();
+  _event = EventWrapper::Create();
+  _thread = ThreadWrapper::CreateThread(Run, this, kHighPriority, threadName);
+  if (_thread) {
+    unsigned int id;
+    _thread->Start(id);
+  }
+}
+
+ExtendedTestTransport::~ExtendedTestTransport() {
+  if (_thread) {
+    _thread->SetNotAlive();
+    _event->Set();
+    if (_thread->Stop()) {
+      delete _thread;
+      _thread = NULL;
+      delete _event;
+      _event = NULL;
+      delete _lock;
+      _lock = NULL;
+    }
+  }
+}
+
+bool ExtendedTestTransport::Run(void* ptr) {
+  return static_cast<ExtendedTestTransport*> (ptr)->Process();
+}
+
+bool ExtendedTestTransport::Process() {
+  switch (_event->Wait(500)) {
+    case kEventSignaled:
+      _lock->Enter();
+      myNetw->ReceivedRTPPacket(_channel, _packetBuffer, _length);
+      _lock->Leave();
+      return true;
+    case kEventTimeout:
+      return true;
+    case kEventError:
+      break;
+  }
+  return true;
+}
+
+int ExtendedTestTransport::SendPacket(int channel, const void *data, int len) {
+  _lock->Enter();
+  if (len < 1612) {
+    memcpy(_packetBuffer, (const unsigned char*) data, len);
+    _length = len;
+    _channel = channel;
+  }
+  _lock->Leave();
+  _event->Set(); // triggers ReceivedRTPPacket() from worker thread
+  return len;
+}
+
+int ExtendedTestTransport::SendRTCPPacket(int channel, const void *data, int len) {
+  myNetw->ReceivedRTCPPacket(channel, data, len);
+  return len;
+}
+
+XTransport::XTransport(VoENetwork* netw, VoEFile* file) :
+  _netw(netw), _file(file) {
+}
+
+int XTransport::SendPacket(int channel, const void *data, int len) {
+  // loopback
+  // _netw->ReceivedRTPPacket(channel, data, len);
+
+  return 0;
+}
+
+int XTransport::SendRTCPPacket(int, const void *, int) {
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoERTPObserver
+// ----------------------------------------------------------------------------
+
+XRTPObserver::XRTPObserver() :
+  _SSRC(0) {
+}
+
+XRTPObserver::~XRTPObserver() {
+}
+
+void XRTPObserver::OnIncomingCSRCChanged(const int /*channel*/, const unsigned int /*CSRC*/,
+                                         const bool /*added*/) {
+}
+
+void XRTPObserver::OnIncomingSSRCChanged(const int /*channel*/, const unsigned int SSRC) {
+  // char msg[128];
+  // sprintf(msg, "OnIncomingSSRCChanged(channel=%d, SSRC=%lu)\n",
+  //        channel, SSRC);
+  // TEST_LOG(msg);
+
+  _SSRC = SSRC; // skip channel dependency for simplicty
+
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::PrepareTest(const char* str) const {
+  TEST_LOG("\n\n================================================\n");
+  TEST_LOG("\tExtended *%s* Test\n", str);
+  TEST_LOG("================================================\n\n");
+
+  return 0;
+}
+
+int VoEExtendedTest::TestPassed(const char* str) const {
+  TEST_LOG("\n\n------------------------------------------------\n");
+  TEST_LOG("\tExtended *%s* test passed!\n", str);
+  TEST_LOG("------------------------------------------------\n\n");
+
+  return 0;
+}
+
+void VoEExtendedTest::OnPeriodicDeadOrAlive(const int /*channel*/, const bool alive) {
+  _alive = alive;
+  if (alive) {
+    TEST_LOG("=> ALIVE ");
+  } else {
+    TEST_LOG("=> DEAD ");
+  }
+  fflush(NULL);
+}
+
+void VoEExtendedTest::CallbackOnError(const int errCode, int) {
+  _errCode = errCode;
+  TEST_LOG("\n************************\n");
+  TEST_LOG(" RUNTIME ERROR: %d \n", errCode);
+  TEST_LOG("************************\n");
+}
+
+VoEExtendedTest::VoEExtendedTest(VoETestManager& mgr) :
+  _mgr(mgr) {
+  for (int i = 0; i < 32; i++) {
+    _listening[i] = false;
+    _playing[i] = false;
+    _sending[i] = false;
+  }
+}
+
+VoEExtendedTest::~VoEExtendedTest() {
+}
+
+void VoEExtendedTest::StartMedia(int channel, int rtpPort, bool listen,
+                                 bool playout, bool send) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+
+  _listening[channel] = false;
+  _playing[channel] = false;
+  _sending[channel] = false;
+
+  voe_base_->SetLocalReceiver(channel, rtpPort);
+  voe_base_->SetSendDestination(channel, rtpPort, "127.0.0.1");
+  if (listen) {
+    _listening[channel] = true;
+    voe_base_->StartReceive(channel);
+  }
+  if (playout) {
+    _playing[channel] = true;
+    voe_base_->StartPlayout(channel);
+  }
+  if (send) {
+    _sending[channel] = true;
+    voe_base_->StartSend(channel);
+  }
+}
+
+void VoEExtendedTest::StopMedia(int channel) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+
+  if (_listening[channel]) {
+    _listening[channel] = false;
+    voe_base_->StopReceive(channel);
+  }
+  if (_playing[channel]) {
+    _playing[channel] = false;
+    voe_base_->StopPlayout(channel);
+  }
+  if (_sending[channel]) {
+    _sending[channel] = false;
+    voe_base_->StopSend(channel);
+  }
+}
+
+void VoEExtendedTest::Play(int channel, unsigned int timeMillisec, bool addFileAsMicrophone,
+                           bool addTimeMarker) {
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  voe_base_->StartPlayout(channel);
+  TEST_LOG("[playing]");
+  fflush(NULL);
+  if (addFileAsMicrophone) {
+    file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(), true, true);
+    TEST_LOG("[file as mic]");
+    fflush(NULL);
+  }
+  if (addTimeMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    TEST_LOG("[dT=%.1f]", dtSec);
+    fflush(NULL); // print sleep time in seconds
+  }
+  SLEEP(timeMillisec);
+  voe_base_->StopPlayout(channel);
+  file->StopPlayingFileAsMicrophone(channel);
+}
+
+void VoEExtendedTest::Sleep(unsigned int timeMillisec, bool addMarker) {
+  if (addMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    TEST_LOG("[dT=%.1f]", dtSec); // print sleep time in seconds
+  }
+  ::Sleep(timeMillisec);
+}
+
+int VoEExtendedTest::TestBase() {
+#ifndef _WIN32
+  // Sleep a bit instead when pause not supported
+#undef PAUSE
+#define PAUSE SLEEP(2000);
+#endif
+
+  PrepareTest("Base");
+
+  // TODO(phoglund): make this an actual instance variable. I think the
+  // macro black magic will make more sense then. This is named like an
+  // instance variable since it is required in order to appease the
+  // gods of darkness.
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+#ifdef _TEST_RTP_RTCP_
+  VoERTP_RTCP* rtp = _mgr.RTP_RTCPPtr();
+#endif
+
+  //////////////////////////
+  // SetTraceFileName
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST(SetTraceFileName - SetDebugTraceFileName); ANL();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(NULL)); MARK();
+  // don't use these files
+  std::string output_path = webrtc::test::OutputPath();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+              (output_path + "VoEBase_trace_dont_use.txt").c_str())); MARK();
+  // use these instead
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(""
+              (output_path + "VoEBase_trace.txt").c_str())); MARK();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStream |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo)); MARK();
+
+  ANL(); AOK(); ANL(); ANL();
+#endif
+
+  ///////////////////////////////////////
+  // RegisterVoiceEngineObserver
+  // DeRegisterVoiceEngineObserver
+  TEST(SetObserver);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->RegisterVoiceEngineObserver(*this));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->DeRegisterVoiceEngineObserver());
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  /////////////////////
+  // GetVersion
+  TEST(GetVersion);
+  ANL();
+
+  char version[1024];
+  // audio device module and AudioProcessing fail to getversion when they
+  // are not initiliazed
+  TEST_MUSTPASS(voe_base_->GetVersion(version));
+  MARK();
+  TEST_LOG("\n-----\n%s\n-----\n", version);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ///////////////
+  // Init
+  TEST(Init);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  // ensure that no new memory is allocated at the second call (check
+  // trace file)
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK();
+  TEST_MUSTPASS(voe_base_->Terminate());
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  // verify AEC recording
+  TEST_MUSTPASS(voe_base_->Init());
+  MARK(); // verify output dat-files
+  TEST_MUSTPASS(voe_base_->Terminate());
+#endif
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ////////////////////
+  // Terminate
+  TEST(Terminate);
+  ANL();
+  TEST_MUSTPASS(voe_base_->Terminate());
+  MARK(); // should be ignored
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->Terminate());
+  MARK(); // should terminate
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> Init(AudioDeviceModule)
+  //
+  // Note that our mock implementation of the ADM also mocks the
+  // reference counting part. This approach enables us to keep track
+  // of the internal reference counter without checking return values
+  // from the ADM and we also avoid the built-in self destruction.
+  //
+  // TODO(henrika): this test does not verify that external ADM methods
+  // are called by the VoiceEngine once registered. We could extend
+  // the mock implementation and add counters for each ADM API to ensure
+  // that they are called in the correct sequence and the correct number
+  // of times.
+  TEST_LOG("\nTesting: Init in combination with an external ADM\n");
+
+  // Create the ADM and call AddRef within the factory method.
+  AudioDeviceModuleImpl* xADM = AudioDeviceModuleImpl::Create();
+  ASSERT_FALSE(xADM == NULL);
+  ASSERT_TRUE(xADM->ReferenceCounter() == 1);
+
+  // Verify default usage case for external ADM.
+  TEST_MUSTPASS(voe_base_->Init(xADM));MARK();
+  ASSERT_TRUE(xADM->ReferenceCounter() == 2);
+  TEST_MUSTPASS(voe_base_->Terminate());
+  ASSERT_TRUE(xADM->ReferenceCounter() == 1);
+
+  // Our reference-count implementation does not self destruct.
+  // We do it manually here instead by calling Release followed by delete.
+  ASSERT_TRUE(AudioDeviceModuleImpl::Destroy(xADM));
+  ANL();
+  AOK();ANL();
+
+  // >> end of Init(AudioDeviceModule)
+  // ------------------------------------------------------------------------
+
+  ///////////////////////////
+  // MaxNumOfChannels
+  TEST(MaxNumOfChannels);
+  ANL();
+  TEST_MUSTPASS(voe_base_->MaxNumOfChannels() < 0);
+  MARK();
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  ////////////////////////
+  // CreateChannel
+  // DeleteChannel
+
+  int i;
+  int channel;
+  int nChannels(voe_base_->MaxNumOfChannels());
+
+  TEST(CreateChannel);
+  ANL();
+  TEST(DeleteChannel);
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  channel = voe_base_->CreateChannel();
+  MARK();
+  TEST_MUSTPASS(channel != 0);
+  channel = voe_base_->CreateChannel();
+  MARK();
+  TEST_MUSTPASS(channel != 1);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  MARK();
+  TEST_MUSTPASS(voe_base_->DeleteChannel(1));
+  MARK();
+
+  // create and delete one channel many times
+  for (i = 0; i < 10; i++) {
+    channel = voe_base_->CreateChannel();
+    MARK();
+    TEST_MUSTPASS(channel != 0); // should be 0 each time
+    TEST_MUSTPASS(voe_base_->DeleteChannel(channel));
+    MARK();
+  }
+  // create max number of channels
+  for (i = 0; i < nChannels; i++) {
+    channel = voe_base_->CreateChannel();
+    MARK();
+    TEST_MUSTPASS(channel != i);
+  }
+  channel = voe_base_->CreateChannel();
+  MARK(); // should fail since no more channels can now be created
+  TEST_MUSTPASS(channel != -1);
+
+  int aChannel = (((nChannels - 17) > 0) ? (nChannels - 17) : 0);
+  TEST_MUSTPASS(voe_base_->DeleteChannel(aChannel));
+  MARK();
+  channel = voe_base_->CreateChannel();
+  MARK(); // should reuse channel
+  TEST_MUSTPASS(channel != aChannel);
+
+  // delete all created channels
+  for (i = 0; i < nChannels; i++) {
+    TEST_MUSTPASS(voe_base_->DeleteChannel(i));
+    MARK();
+  }
+
+  // try to delete a non-existing channel
+  TEST_MUSTPASS(-1 != voe_base_->DeleteChannel(aChannel));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> SetLocalReceiver
+  //
+  // State: VE not initialized, no existing channels
+  TEST_MUSTPASS(voe_base_->Init());
+
+  int ch;
+
+  TEST(SetLocalReceiver);
+  ANL();
+
+  // no channel created yet => should fail
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(0, 100));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+#ifdef MAC_IPHONE
+  printf("\nNOTE: Local IP must be set in source code (line %d) \n",
+      __LINE__ + 1);
+  char* localIp = "127.0.0.1";
+#else
+  char localIp[64] = { 0 };
+  TEST_MUSTPASS(netw->GetLocalIP(localIp));
+  MARK();
+  // NOTE: This API is supported on Win, Mac and Linux and may fail or not
+  // return local IP for other platforms.
+#endif
+
+  // trivial invalid function calls
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch+1, 12345));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, -1));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+
+  // check conflict with ongoing receiving
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_ERROR(VE_ALREADY_LISTENING);
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // check conflict with ongoing transmission
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  TEST_ERROR(VE_ALREADY_SENDING);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+
+  // valid function calls
+  // Need to sleep between, otherwise it may fail for unknown reason
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, NULL,
+          "230.1.2.3"));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp,
+          "230.1.2.3"));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 5555, NULL));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+
+  // STATE: no media but sockets exists and are binded to 12345 and 12346
+  // respectively
+
+  // Add some dynamic tests as well:
+
+  // ensure that last setting is used (cancels old settings)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 44444));
+  MARK();
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 54321));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 54321, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 1000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetLocalReceiver
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetLocalReceiver);
+  ANL();
+
+  int port;
+  char ipaddr[64];
+  int RTCPport;
+
+  ch = voe_base_->CreateChannel();
+
+  // verify non-configured (blank) local receiver
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 0);
+  TEST_MUSTPASS(RTCPport != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // check some trivial set/get combinations
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 12346);
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0); // now binded to "any" IP
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 55555))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 55555);
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, kVoEDefault, localIp))
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, port, RTCPport, ipaddr));
+  MARK();
+  TEST_MUSTPASS(port != 12345);
+  TEST_MUSTPASS(RTCPport != 12346);
+  TEST_MUSTPASS(strcmp(ipaddr, localIp) != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendDestination
+  //
+  // State: VE initialized, no existing channels
+  TEST(SetSendDestination);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // trivial fail tests
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 65536, "127.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid RTP port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid source port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", kVoEDefault,
+          65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR); // invalid RTCP port
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 12345, "127.0.0.300"));
+  MARK();
+  TEST_ERROR(VE_INVALID_IP_ADDRESS); // invalid IP address
+
+  // sockets must be created first to support multi-cast (not required
+  // otherwise)
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(ch, 55555, "230.0.0.1"));
+  MARK();
+  TEST_ERROR(VE_SOCKET_ERROR);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555)); // create sockets
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "230.0.0.1"));
+  MARK(); // should work now
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // STATE: one channel created, no sockets exist
+
+  // valid function calls
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1"));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", kVoEDefault,
+          55555));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444,
+          55555));
+  MARK();
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // create receive sockets first and then an extra pair of send sockets
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 44444));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1", 11111));
+  MARK(); // binds to 11111
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetSendDestination
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSendDestination
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetSendDestination);
+  ANL();
+
+  int sourcePort;
+
+  ch = voe_base_->CreateChannel();
+
+  // verify non-configured (blank) local receiver
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 0);
+  TEST_MUSTPASS(sourcePort != 0);
+  TEST_MUSTPASS(RTCPport != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // check some trivial set/get combinations
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 0); // should be 0 since no local receiver has
+  // NOT been defined yet
+  TEST_MUSTPASS(RTCPport != 44445);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, ipaddr, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 55555); // should be equal to local port
+  TEST_MUSTPASS(RTCPport != 44445);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 44444, "127.0.0.1"));
+  // NULL as IP-address input should work as well
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, port, NULL, sourcePort,
+          RTCPport));
+  MARK();
+  TEST_MUSTPASS(port != 44444);
+  TEST_MUSTPASS(sourcePort != 0);
+  TEST_MUSTPASS(RTCPport != 44445);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalReceiver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartReceive
+  // >> StopReceive
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartReceive);
+  ANL();
+  TEST(StopReceive);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartReceive(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopReceive(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // sockets must be created first
+  TEST_MUSTPASS(!voe_base_->StartReceive(0));
+  MARK();
+  TEST_ERROR(VE_SOCKETS_NOT_INITED);
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK(); // should work this time
+
+  // enable again (should work)
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK();
+
+  // Stop/Start (should work)
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  MARK();
+
+  // Verify in loopback
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 1000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  MARK();
+
+  voe_base_->DeleteChannel(0);
+  ch = voe_base_->CreateChannel();
+
+  // Ensure that it is OK to add delay between SetLocalReceiver and StarListen
+  TEST_LOG("\nspeak after 2 seconds and ensure that no delay is added:\n");
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 55555));
+
+  Sleep(2000, true); // adding emulated delay here
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 55555, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  Play(ch, 2000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+  ANL();
+
+  // Multi-channel tests
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 11111+2*i));
+    TEST_MUSTPASS(voe_base_->StartReceive(ch));
+    MARK();
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->StopReceive(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 11111+2*i));
+    TEST_MUSTPASS(voe_base_->StartReceive(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopReceive(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartReceive/StopReceive
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartPlayout
+  // >> StopPlayout
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartPlayout);
+  ANL();
+  TEST(StopPlayout);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartPlayout(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopPlayout(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  MARK();
+
+  voe_base_->DeleteChannel(ch);
+
+  // Multi-channel tests
+  const int MaxNumberOfPlayingChannels(kVoiceEngineMaxNumOfActiveChannels);
+
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+    MARK();
+  }
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    TEST_MUSTPASS(voe_base_->StopPlayout(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < MaxNumberOfPlayingChannels; i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartPlayout/StopPlayout
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> StartSend
+  // >> StopSend
+  //
+  // State: VE initialized, no existing channels
+  TEST(StartSend);
+  ANL();
+  TEST(StopSend);
+  ANL();
+
+  // call without existing channel
+  TEST_MUSTPASS(!voe_base_->StartSend(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(!voe_base_->StopSend(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  ch = voe_base_->CreateChannel();
+
+  // call without initialized destination
+  TEST_MUSTPASS(!voe_base_->StartSend(ch));
+  MARK();
+  TEST_ERROR(VE_DESTINATION_NOT_INITED);
+
+  // initialize destination and try again (should work even without existing
+  // sockets)
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  MARK();
+  SLEEP(100);
+
+  // STATE: sockets should now have been created automatically at the first
+  // transmitted packet should be binded to 33333 and "0.0.0.0"
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  MARK();
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+  // try loopback with unique send sockets (closed when channel is deleted or
+  // new source is set)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33333));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333, "127.0.0.1", 44444));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  Play(ch, 2000, true, true);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ANL();
+
+  // Multi-channel tests
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33333 + 2*i));
+    TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33333 + 2*i, "127.0.0.1"));
+    TEST_MUSTPASS(voe_base_->StartSend(ch));
+    MARK();
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->StopSend(i));
+    MARK();
+    voe_base_->DeleteChannel(i);
+  }
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 45633 + 2*i));
+    TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 45633 + 2*i, "127.0.0.1"));
+    TEST_MUSTPASS(voe_base_->StartSend(ch));
+    MARK();
+    TEST_MUSTPASS(voe_base_->StopSend(ch));
+    MARK();
+    voe_base_->DeleteChannel(ch);
+  }
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of StartSend/StopSend
+  // ------------------------------------------------------------------------
+
+  //////////////////////////////
+  // SetNetEQPlayoutMode
+  // GetNetEQPlayoutMode
+  TEST(SetNetEQPlayoutMode);
+  ANL();
+  TEST(GetNetEQPlayoutMode);
+  ANL();
+
+  NetEqModes mode;
+
+  ch = voe_base_->CreateChannel();
+
+  // invalid function calls (should fail)
+  TEST_MUSTPASS(!voe_base_->GetNetEQPlayoutMode(ch+1, mode));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetNetEQPlayoutMode(ch+1, kNetEqDefault));
+  MARK();
+
+  // verify default mode (should be kNetEqDefault)
+  TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(ch, mode));
+  MARK();
+  TEST_MUSTPASS(mode != kNetEqDefault);
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqStreaming));
+  MARK();
+  voe_base_->DeleteChannel(ch);
+
+  // ensure that default mode is set as soon as new channel is created
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(ch, mode));
+  MARK();
+  TEST_MUSTPASS(mode != kNetEqDefault);
+  voe_base_->DeleteChannel(ch);
+
+  // verify Set/Get for all supported modes and max number of channels
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+
+    // verify Set/Get for all supported modes
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqDefault));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqDefault);
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqStreaming));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqStreaming);
+    TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(i, kNetEqFax));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQPlayoutMode(i, mode));
+    MARK();
+    TEST_MUSTPASS(mode != kNetEqFax);
+    SLEEP(50);
+  }
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    voe_base_->DeleteChannel(i);
+  }
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////////////
+  // SetNetEQBGNMode
+  // GetNetEQBGNMode
+  TEST(SetNetEQBGNMode);
+  ANL();
+  TEST(GetNetEQBGNMode);
+  ANL();
+
+  NetEqBgnModes bgnMode;
+
+  ch = voe_base_->CreateChannel();
+
+  // invalid function calls (should fail)
+  TEST_MUSTPASS(!voe_base_->GetNetEQBGNMode(ch+1, bgnMode));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetNetEQBGNMode(ch+1, kBgnOn));
+  MARK();
+
+  // verify default mode (should be kBgnOn)
+  TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(ch, bgnMode));
+  MARK();
+  TEST_MUSTPASS(bgnMode != kBgnOn);
+  voe_base_->DeleteChannel(ch);
+
+  // ensure that default mode is set as soon as new channel is created
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(ch, bgnMode));
+  MARK();
+  TEST_MUSTPASS(bgnMode != kBgnOn);
+  voe_base_->DeleteChannel(ch);
+
+  // verify Set/Get for all supported modes and max number of channels
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    ch = voe_base_->CreateChannel();
+
+    // verify Set/Get for all supported modes
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnOn));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnOn);
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnFade));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnFade);
+    TEST_MUSTPASS(voe_base_->SetNetEQBGNMode(i, kBgnOff));
+    MARK();
+    TEST_MUSTPASS(voe_base_->GetNetEQBGNMode(i, bgnMode));
+    MARK();
+    TEST_MUSTPASS(bgnMode != kBgnOff);
+    SLEEP(50);
+  }
+
+  for (i = 0; i < voe_base_->MaxNumOfChannels(); i++) {
+    voe_base_->DeleteChannel(i);
+  }
+
+  // Verify real-time performance for all playout modes in full duplex
+
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqDefault));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqDefault playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqStreaming));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqStreaming playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->SetNetEQPlayoutMode(ch, kNetEqFax));
+  MARK();
+  TEST_LOG("\nenjoy full duplex using kNetEqFax playout mode...\n");
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  /////////////////////
+  // Full duplex tests
+
+  ch = voe_base_->CreateChannel(); // We must delete this channel first to be able
+  // to reuse port 12345
+
+  // start with default case, also test non-default RTCP port
+#ifdef _TEST_RTP_RTCP_
+  TEST_MUSTPASS(rtp->SetRTCP_CNAME(ch, "Johnny"));
+#endif
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345, 12349));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", kVoEDefault,
+          12349));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("full duplex is now activated (1)\n");
+  TEST_LOG("waiting for RTCP packet...\n");
+
+  SLEEP(7000); // Make sure we get RTCP packet
+  PAUSE;
+
+  // Verify that we got RTCP packet from correct source port
+#ifdef _TEST_RTP_RTCP_
+  char tmpStr[64] = { 0 };
+  TEST_MUSTPASS(rtp->GetRemoteRTCP_CNAME(ch, tmpStr));
+  TEST_MUSTPASS(_stricmp("Johnny", tmpStr));
+#endif
+  int rtpPort(0), rtcpPort(0);
+  char ipAddr[64] = { 0 };
+  TEST_MUSTPASS(netw->GetSourceInfo(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(12349 != rtcpPort);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // Call StartSend before StartReceive
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (2)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // Try again using same ports
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (3)\n");
+  TEST_LOG("waiting for RTCP packet...\n");
+
+  SLEEP(7000); // Make sure we get RTCP packet
+  PAUSE
+
+  // Verify correct RTCP source port
+  TEST_MUSTPASS(netw->GetSourceInfo(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(12345+1 != rtcpPort);
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+  // Try with extra send socket
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 22222));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 22222, "127.0.0.1", 11111));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (4)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // repeat default case starting with a fresh channel
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+
+  TEST_LOG("\nfull duplex is now activated (5)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // restart call again
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 12345));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (6)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // force sending from new socket
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 12345, "127.0.0.1", 12350,
+          12359));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+  TEST_LOG("\nfull duplex is now activated (7)\n");
+
+  PAUSE
+
+  // Test getting send settings
+  TEST_MUSTPASS(voe_base_->GetSendDestination(ch, rtpPort, ipAddr, sourcePort,
+          rtcpPort));
+  TEST_MUSTPASS(12345 != rtpPort);
+  TEST_MUSTPASS(_stricmp("127.0.0.1", ipAddr));
+  TEST_MUSTPASS(12350 != sourcePort);
+  TEST_MUSTPASS(12359 != rtcpPort);
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  // new channel and new port
+  ch = voe_base_->CreateChannel();
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch , 33221));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33221, "127.0.0.1"));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (8)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+
+  voe_base_->DeleteChannel(ch);
+  ch = voe_base_->CreateChannel();
+
+#ifndef MAC_IPHONE
+  // bind to local IP and try again
+  strcpy(localIp, "127.0.0.1");
+#else
+  localIp = "127.0.0.1";
+#endif
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch, 33221, 12349, localIp));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch, 33221, localIp));
+
+  TEST_MUSTPASS(voe_base_->StartReceive(ch));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StartSend(ch));
+
+  TEST_LOG("\nfull duplex is now activated (9)\n");
+
+  PAUSE
+
+  TEST_MUSTPASS(voe_base_->GetLocalReceiver(ch, rtpPort, rtcpPort, ipAddr));
+  TEST_MUSTPASS(33221 != rtpPort);
+  TEST_MUSTPASS(_stricmp(localIp, ipAddr));
+  TEST_MUSTPASS(12349 != rtcpPort);
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////
+  // Trace filter tests
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST(SetTraceFilter); ANL();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(GetFilename(""
+              "VoEBase_trace_filter.txt").c_str())); MARK();
+  SLEEP(100);
+
+  // Test a few different filters, verify in trace file
+  // Each SetTraceFilter calls should be seen once, no more, no less
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceNone)); MARK();
+  SLEEP(300);
+  // API call and info should NOT be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, true)); MARK();
+  // API call and error should NOT be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceApiCall |
+          kTraceCritical |
+          kTraceError |
+          kTraceWarning)); MARK();
+  SLEEP(300);
+  // API call should and info should NOT be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, false)); MARK();
+  // API call and error should be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceApiCall | kTraceInfo));
+  MARK();
+  SLEEP(300);
+  // API call and info should be seen in log
+  TEST_MUSTPASS(voe_base_->SetOnHoldStatus(0, true)); MARK();
+  // API call should and error should NOT be seen in log
+  TEST_MUSTPASS(!voe_base_->SetOnHoldStatus(999, true)); MARK();
+
+  // Back to default
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceAll)); MARK();
+  SLEEP(300);
+
+  AOK(); ANL();
+#endif
+
+  // ------------------------------------------------------------------------
+  // >> Multiple instance testing
+  //
+  // We should support 8 instances simultaneously
+  // and at least one should be able to have a call running
+
+  // One instance is already created
+  VoiceEngine* instVE[7];
+  VoEBase* baseVE[7];
+  for (int instNum = 0; instNum < 7; instNum++) {
+    instVE[instNum] = VoiceEngine::Create();
+    baseVE[instNum] = VoEBase::GetInterface(instVE[instNum]);
+    TEST_MUSTPASS(baseVE[instNum]->Init());
+    TEST_MUSTPASS(baseVE[instNum]->CreateChannel());
+  }
+
+  TEST_LOG("Created 7 more instances of VE, make sure audio is ok...\n\n");
+  PAUSE
+
+  for (int instNum = 0; instNum < 7; instNum++) {
+    TEST_MUSTPASS(baseVE[instNum]->DeleteChannel(0));
+    TEST_MUSTPASS(baseVE[instNum]->Terminate());
+    baseVE[instNum]->Release();
+    VoiceEngine::Delete(instVE[instNum]);
+  }
+
+  AOK();
+  ANL();
+
+  //////////////
+  // Close down
+  TEST_MUSTPASS(voe_base_->StopSend(ch));
+  TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  TEST_MUSTPASS(voe_base_->StopReceive(ch));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(ch));
+
+  voe_base_->DeleteChannel(0);
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestCallReport
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestCallReport() {
+  // Get required sub-API pointers
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoECallReport* report = _mgr.CallReportPtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEAudioProcessing* apm = _mgr.APMPtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+
+  PrepareTest("CallReport");
+
+  // check if this interface is supported
+  if (!report) {
+    TEST_LOG("VoECallReport is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+      GetFilename("VoECallReport_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  ///////////////////////////
+  // Actual test starts here
+  TEST(ResetCallReportStatistics);
+  ANL();
+  TEST_MUSTPASS(!report->ResetCallReportStatistics(-2));
+  MARK(); // not OK
+  TEST_MUSTPASS(!report->ResetCallReportStatistics(1));
+  MARK(); // not OK
+  TEST_MUSTPASS(report->ResetCallReportStatistics(0));
+  MARK(); // OK
+  TEST_MUSTPASS(report->ResetCallReportStatistics(-1));
+  MARK(); // OK
+  AOK();
+  ANL();
+
+  bool enabled = false;
+  EchoStatistics echo;
+  TEST(GetEchoMetricSummary);
+  ANL();
+  TEST_MUSTPASS(apm->GetEcMetricsStatus(enabled));
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(apm->SetEcMetricsStatus(true));
+  TEST_MUSTPASS(report->GetEchoMetricSummary(echo)); // all outputs will be
+  // -100 in loopback (skip further tests)
+  AOK();
+  ANL();
+
+  // TODO(xians): investigate the cause of test failure before enabling.
+  /*
+   StatVal delays;
+   TEST(GetRoundTripTimeSummary);
+   ANL();
+   // All values should be >=0 since RTCP is now on
+   TEST_MUSTPASS(report->GetRoundTripTimeSummary(0, delays));
+   MARK();
+   TEST_MUSTPASS(delays.min == -1);
+   TEST_MUSTPASS(delays.max == -1);
+   TEST_MUSTPASS(delays.average == -1);
+   rtp_rtcp->SetRTCPStatus(0, false);
+   // All values should be -1 since RTCP is off
+   TEST_MUSTPASS(report->GetRoundTripTimeSummary(0, delays));
+   MARK();
+   TEST_MUSTPASS(delays.min != -1);
+   TEST_MUSTPASS(delays.max != -1);
+   TEST_MUSTPASS(delays.average != -1);
+   rtp_rtcp->SetRTCPStatus(0, true);
+   AOK();
+   ANL();
+   */
+
+  int nDead = 0;
+  int nAlive = 0;
+  TEST(GetDeadOrAliveSummary);
+  ANL();
+  // All results should be -1 since dead-or-alive is not active
+  TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive) != -1);
+  MARK();
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  SLEEP(2000);
+  // All results should be >= 0 since dead-or-alive is active
+  TEST_MUSTPASS(report->GetDeadOrAliveSummary(0, nDead, nAlive));
+  MARK();
+  TEST_MUSTPASS(nDead == -1);
+  TEST_MUSTPASS(nAlive == -1)
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  AOK();
+  ANL();
+
+  TEST(WriteReportToFile);
+  ANL();
+
+  // Greek and Coptic (see http://www.utf8-chartable.de/unicode-utf8-table.pl)
+  char fileNameUTF8[64];
+
+  fileNameUTF8[0] = (char) 0xce;
+  fileNameUTF8[1] = (char) 0xba;
+  fileNameUTF8[2] = (char) 0xce;
+  fileNameUTF8[3] = (char) 0xbb;
+  fileNameUTF8[4] = (char) 0xce;
+  fileNameUTF8[5] = (char) 0xbd;
+  fileNameUTF8[6] = (char) 0xce;
+  fileNameUTF8[7] = (char) 0xbe;
+  fileNameUTF8[8] = '.';
+  fileNameUTF8[9] = 't';
+  fileNameUTF8[10] = 'x';
+  fileNameUTF8[11] = 't';
+  fileNameUTF8[12] = 0;
+
+  TEST_MUSTPASS(!report->WriteReportToFile(NULL));
+  MARK();
+  TEST_MUSTPASS(report->WriteReportToFile("call_report.txt"));
+  MARK();
+  TEST_MUSTPASS(report->WriteReportToFile(fileNameUTF8));
+  MARK(); // should work with UTF-8 as well (κλνξ.txt)
+  AOK();
+  ANL();
+
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestCodec
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestCodec() {
+  PrepareTest("Codec");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEFile* file = _mgr.FilePtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+      GetFilename("VoECodec_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  ExtendedTestTransport* ptrTransport(NULL);
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+#else
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+#endif
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  int i;
+  int err;
+
+  CodecInst cinst;
+
+  /////////////////////////
+  // GetNumOfCodecs
+
+  int nCodecs;
+
+  TEST(GetNumOfCodecs);
+  ANL();
+  // validate #codecs
+  nCodecs = codec->NumOfCodecs();
+  MARK();
+  TEST_MUSTPASS(nCodecs < 0);
+  AOK();
+  ANL();
+
+  ///////////////////
+  // GetCodec
+  TEST(GetCodec);
+  ANL();
+  // scan all supported codecs
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    TEST_MUSTPASS(codec->GetCodec(index, cinst));
+    TEST_LOG("[%2d] %16s: fs=%6d, pt=%4d, rate=%7d, ch=%2d, size=%5d", index, cinst.plname,
+             cinst.plfreq, cinst.pltype, cinst.rate, cinst.channels, cinst.pacsize);
+    if (cinst.pltype == -1) {
+      TEST_LOG(" <= NOTE pt=-1\n");
+    } else {
+      ANL();
+    }
+  }
+
+  // ensure that an invalid index parameter is detected
+  TEST_MUSTPASS(-1 != codec->GetCodec(-1, cinst));
+  nCodecs = codec->NumOfCodecs();
+  TEST_MUSTPASS(-1 != codec->GetCodec(nCodecs, cinst));
+  MARK();
+  // ensure that error code is VE_INVALID_LISTNR
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_LISTNR);
+  AOK();
+  ANL();
+
+  ///////////////////////
+  // GetSendCodec
+  TEST(GetSendCodec);
+  ANL();
+
+  CodecInst defaultCodec;
+
+  // check the channel parameter
+  int nMaxChannels(voe_base_->MaxNumOfChannels());
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(nMaxChannels-1, cinst));
+  MARK(); // not created
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(nMaxChannels, cinst));
+  MARK(); // out of range
+  TEST_MUSTPASS(-1 != codec->GetSendCodec(-1, cinst));
+  MARK(); // out of range
+  TEST_MUSTPASS(codec->GetSendCodec(0, cinst));
+  MARK(); // OK
+
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    TEST_MUSTPASS(codec->GetCodec(index, defaultCodec));
+    if (codec->SetSendCodec(0, defaultCodec) == 0) {
+      TEST_MUSTPASS(codec->GetSendCodec(0, cinst));
+      MARK();
+      //TEST_LOG("[%2d] %s: fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+      // index, cinst.plname, cinst.plfreq, cinst.pltype, cinst.rate,
+      // cinst.channels, cinst.pacsize);
+      TEST_MUSTPASS(cinst.pacsize != defaultCodec.pacsize);
+      TEST_MUSTPASS(cinst.plfreq != defaultCodec.plfreq);
+      TEST_MUSTPASS(cinst.pltype != defaultCodec.pltype);
+      TEST_MUSTPASS(cinst.rate != defaultCodec.rate);
+      TEST_MUSTPASS(cinst.channels != defaultCodec.channels);
+    }
+  }
+
+  ANL();
+  AOK();
+  ANL();
+
+  ///////////////////////
+  // SetSendCodec
+  TEST(SetSendCodec);
+  ANL();
+
+  // --- Scan all supported codecs and set default parameters
+
+  nCodecs = codec->NumOfCodecs();
+  for (int index = 0; index < nCodecs; index++) {
+    // Get default (ACM) settings
+    TEST_MUSTPASS(codec->GetCodec(index, cinst));
+    defaultCodec = cinst;
+    TEST_LOG("[%2d] %s (default): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+             index, cinst.plname, cinst.plfreq, cinst.pltype, cinst.rate,
+             cinst.channels, cinst.pacsize);
+
+    // Verify invalid codec names
+    if (!_stricmp("CN", cinst.plname) || !_stricmp("telephone-event",
+                                                   cinst.plname)
+        || !_stricmp("red", cinst.plname)) {
+      // default settings for invalid payload names (should give
+      // VE_INVALID_PLNAME)
+      TEST_MUSTPASS(!codec->SetSendCodec(0, cinst));
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      continue;
+    }
+
+    // If we build the ACM with more codecs than we have payload types,
+    // some codecs will be given -1 as default payload type. This is a fix
+    // to ensure that we can complete these tests also for this case.
+    if (cinst.pltype == -1) {
+      cinst.pltype = 97;
+    }
+
+    // --- Default settings
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // --- Packet size
+    TEST_LOG("\npacsize : ");
+
+    for (int pacsize = 80; pacsize < 1440; pacsize += 80) {
+      cinst.pacsize = pacsize;
+      if (-1 != codec->SetSendCodec(0, cinst)) {
+        // log valid packet size
+        TEST_LOG("%d ", pacsize);
+      } else {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+    }
+    cinst.pacsize = defaultCodec.pacsize;
+
+    // --- Audio channels (1/mono or 2/stereo)
+    TEST_LOG("\nchannels: ");
+    for (int channels = 1; channels < 4; channels++) {
+      cinst.channels = channels;
+      if (-1 != codec->SetSendCodec(0, cinst)) {
+        // Valid channels currently.
+        // 1 should always be OK for all codecs.
+        // 2 is OK for stereo codecs and some of mono codecs.
+        TEST_LOG("%d ", channels);
+      } else {
+        // Invalide channels. Currently there should be two cases:
+        // 2 would fail to some mono codecs with VE_CANNOT_SET_SEND_CODEC;
+        // 3(and higher) should always fail with VE_INVALID_ARGUMENT.
+        err = voe_base_->LastError();
+        ASSERT_TRUE((err == VE_INVALID_ARGUMENT)||
+                    (err == VE_CANNOT_SET_SEND_CODEC));
+      }
+    }
+    cinst.channels = defaultCodec.channels;
+
+    // --- Payload frequency
+    TEST_LOG("\nplfreq  : ");
+    cinst.plfreq = defaultCodec.plfreq;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.plfreq);
+
+    // --- Payload name
+
+    strcpy(cinst.plname, "INVALID");
+    TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+    {
+      // ensure that error code is VE_INVALID_PLNAME
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+    }
+
+    // restore default plname
+    strcpy(cinst.plname, defaultCodec.plname);
+
+    // --- Payload type (dynamic range is 96-127)
+    TEST_LOG("\npltype  : ");
+    // All PT should be OK, test a few different
+    cinst.pltype = defaultCodec.pltype;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.pltype);
+    cinst.pltype = defaultCodec.pltype + 1;
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+    TEST_LOG("%d ", cinst.pltype);
+    const int valid_pltypes[4] = { 0, 96, 117, 127 };
+    for (i = 0; i < static_cast<int> (sizeof(valid_pltypes) / sizeof(int)); i++) {
+      cinst.pltype = valid_pltypes[i];
+      TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+      TEST_LOG("%d ", cinst.pltype);
+    }
+    // Restore default
+    cinst.pltype = defaultCodec.pltype;
+
+    // --- Codec rate
+    TEST_LOG("\nrate    : ");
+    if (_stricmp("isac", cinst.plname) == 0) {
+      // ISAC
+      if (cinst.plfreq == 16000) {
+        int valid_rates[3] = { -1, 10000, 32000 };
+        // failed in RegisterPayload when rate is 32000
+        for (i = 0; i < static_cast<int> (sizeof(valid_rates) / sizeof(int)); i++) {
+          cinst.rate = valid_rates[i];
+          TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+          TEST_LOG("%d ", cinst.rate);
+        }
+        cinst.rate = 0; // invalid
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+        {
+          // ensure that error code is VE_CANNOT_SET_SEND_CODEC
+          err = voe_base_->LastError();
+          TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        }
+        ANL();
+      } else //ISACSWB
+      {
+        // rate changing fails in RegisterPayload
+        int valid_rates[8] = { -1, 10000, 25000, 32000, 35000, 45000, 50000, 52000 };
+        for (i = 0; i < static_cast<int> (sizeof(valid_rates) / sizeof(int)); i++) {
+          cinst.rate = valid_rates[i];
+          TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+          TEST_LOG("%d ", cinst.rate);
+        }
+        int invalid_rates[3] = { 0, 5000, 57000 }; // invalid
+        for (i = 0; i < static_cast<int> (sizeof(invalid_rates) / sizeof(int)); i++) {
+          cinst.rate = invalid_rates[i];
+          TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+          {
+            // ensure that error code is VE_CANNOT_SET_SEND_CODEC
+            err = voe_base_->LastError();
+            TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+          }
+        }
+        ANL();
+      }
+    } else if (_stricmp("amr", cinst.plname) == 0) {
+      int valid_rates[8] = { 4750, 5150, 5900, 6700, 7400, 7950, 10200, 12200 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      ANL();
+    } else if (_stricmp("g7291", cinst.plname) == 0) {
+      int valid_rates[12] = { 8000, 12000, 14000, 16000, 18000, 20000, 22000,
+                              24000, 26000, 28000, 30000, 32000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      ANL();
+    } else if (_stricmp("amr-wb", cinst.plname) == 0) {
+      int valid_rates[9] = { 7000, 9000, 12000, 14000, 16000, 18000, 20000,
+                             23000, 24000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      TEST_LOG(" <=> ");
+      ANL();
+    } else if (_stricmp("speex", cinst.plname) == 0) {
+      // Valid speex rates are > 2000, testing some of them here
+      int valid_rates[9] = { 2001, 4000, 7000, 11000, 15000, 20000, 25000,
+          33000, 46000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      cinst.rate = 2000; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      ANL();
+    } else if (_stricmp("silk", cinst.plname) == 0) {
+      // Valid Silk rates are 6000 - 40000, listing some of them here
+      int valid_rates[7] = { 6000, 10000, 15000, 20000, 25000, 32000, 40000 };
+      for (i = 0;
+          i < static_cast<int> (sizeof(valid_rates) / sizeof(int));
+          i++) {
+        cinst.rate = valid_rates[i];
+        TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+        TEST_LOG("%d ", cinst.rate);
+      }
+      cinst.rate = 5999; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      cinst.rate = 40001; // invalid
+      TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst))
+      {
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+      ANL();
+    } else {
+      // Use default rate for all other codecs.
+      cinst.rate = defaultCodec.rate;
+      TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+      TEST_LOG("%d ", cinst.rate);
+      cinst.rate = defaultCodec.rate + 17;
+      TEST_MUSTPASS(!codec->SetSendCodec(0, cinst));
+      err = voe_base_->LastError();
+      TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      ANL();
+    }
+    cinst.rate = defaultCodec.rate;
+
+    // run some extra tests for L16
+    if (_stricmp("l16", cinst.plname) == 0) {
+      if (8000 == cinst.plfreq) {
+        // valid pacsizes: 80, 160, 240, 320
+        cinst.pacsize = 480; // only supported in combination with 16kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        cinst.pacsize = 640; // only supported in combination with 16kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      } else {
+        // valid pacsizes: 160, 320, 480, 640
+        cinst.pacsize = 80; // only supported in combination with 8kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+        cinst.pacsize = 240; // only supported in combination with 8kHz
+        TEST_MUSTPASS(-1 != codec->SetSendCodec(0, cinst));
+        err = voe_base_->LastError();
+        TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+      }
+    }
+    ANL();
+  } // for (int index = 0; index < nCodecs; index++)
+
+  // restore PCMU
+  const CodecInst tmp = { 0, "PCMU", 8000, 160, 1, 64000 };
+  TEST_MUSTPASS(codec->SetSendCodec(0, tmp));
+
+  ANL();
+  AOK();
+  ANL();
+
+  ///////
+  // VAD
+
+  const int VADSleep = 0;
+
+  bool disabledDTX;
+  VadModes mode;
+  bool enabled;
+
+  // verify default settings (should be OFF, kVadConventional and DTX enabled)
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != true);
+
+  // enable default VAD settings
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != false);
+  SLEEP(VADSleep);
+
+  // set kVadConventional mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadConventional));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadConventional);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveLow mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveLow));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveLow);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveMid mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveMid));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveMid);
+  SLEEP(VADSleep);
+
+  // set kVadAggressiveMid mode
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadAggressiveHigh));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(mode != kVadAggressiveHigh);
+  SLEEP(VADSleep);
+
+  // turn DTX OFF (audio should not be affected by VAD decisions)
+  TEST_MUSTPASS(codec->SetVADStatus(0, true, kVadConventional, true));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(disabledDTX != true);
+  SLEEP(VADSleep);
+
+  // try to enable DTX again (should fail since VAD is disabled)
+  TEST_MUSTPASS(codec->SetVADStatus(0, false, kVadConventional, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(disabledDTX == false);
+  SLEEP(VADSleep);
+
+  // disable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  SLEEP(VADSleep);
+
+  // restore default VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  TEST_MUSTPASS(codec->GetVADStatus(0, enabled, mode, disabledDTX));
+  TEST_LOG("VAD: enabled=%d, mode=%d, disabledDTX=%d\n", enabled, mode,
+           disabledDTX);
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(mode != kVadConventional);
+  TEST_MUSTPASS(disabledDTX != true);
+  SLEEP(VADSleep);
+
+  AOK();
+  ANL();
+  ANL();
+
+  //////////////////////
+  // GetRecCodec
+  TEST(GetRecCodec);
+  ANL();
+
+  // stop all streaming first
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // start loopback streaming (PCMU is default)
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0,8000,"127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0,8000));
+#endif
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(100); // ensure that at least one packets is received
+
+  // scan all supported and valid codecs
+  CodecInst newCodec;
+  for (i = 0; i < codec->NumOfCodecs(); i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // test all valid send codecs
+    if (!_stricmp("red", newCodec.plname) || !_stricmp("cn", newCodec.plname)
+        || !_stricmp("telephone-event", newCodec.plname)) {
+      continue; // Ignore these
+    }
+    if (-1 != codec->SetSendCodec(0, newCodec)) {
+      SLEEP(150);
+      // verify correct detection
+      TEST_MUSTPASS(codec->GetRecCodec(0, cinst));
+      TEST_LOG("%s %s ", newCodec.plname, cinst.plname);
+      TEST_MUSTPASS(_stricmp(newCodec.plname, cinst.plname) != 0);
+      TEST_MUSTPASS(cinst.pltype != newCodec.pltype);
+      TEST_MUSTPASS(cinst.plfreq != newCodec.plfreq);
+    }
+  }
+
+  // stop streaming
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+#ifdef WEBRTC_CODEC_GSMAMR
+  //////////////////////////
+  // SetAMREncFormat
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST(SetAMREncFormat); ANL();
+
+  //set another codec which is not AMR
+  TEST_MUSTPASS(codec->GetCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should fail
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267BwEfficient));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267OctetAligned));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(0, kRfc3267FileStorage));
+  MARK();
+
+  //set AMR as encoder
+  strcpy(cinst.plname,"AMR");
+  cinst.channels=1; cinst.plfreq=8000; cinst.rate=12200; cinst.pltype=112;
+  cinst.pacsize=160;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should pass
+  TEST_MUSTPASS(codec->SetAMREncFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMREncFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMREncFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+
+  //////////////////////////
+  // SetAMRDecFormat
+
+  TEST(SetAMRDecFormat); ANL();
+
+  // It should not be possible to set AMR dec format before valid AMR decoder
+  // is registered
+  TEST_MUSTPASS(!codec->SetAMRDecFormat(0)); MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  // Ensure that ACM::RegisterReceiveCodec(AMR) is called
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+  // All these tests should now pass
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRDecFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRDecFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+#endif // #ifdef WEBRTC_CODEC_GSMAMR
+#ifdef WEBRTC_CODEC_GSMAMRWB
+  //////////////////////////
+  // SetAMRWbEncFormat
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST(SetAMRWbEncFormat); ANL();
+
+  //set another codec which is not AMR-wb
+  TEST_MUSTPASS(codec->GetCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should fail
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267BwEfficient));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267OctetAligned));
+  MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(0, kRfc3267FileStorage));
+  MARK();
+
+  //set AMR-wb as encoder
+  strcpy(cinst.plname,"AMR-WB");
+  cinst.channels=1; cinst.plfreq=16000; cinst.rate=20000;
+  cinst.pltype=112; cinst.pacsize=320;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  //try to change the encode format, tests should pass
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbEncFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbEncFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+
+  //////////////////////////
+  // SetAMRDecFormat
+
+  TEST(SetAMRWbDecFormat); ANL();
+
+  // It should not be possible to set AMR dec format before valid AMR decoder
+  // is registered
+  TEST_MUSTPASS(!codec->SetAMRWbDecFormat(0)); MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  // Ensure that ACM::RegisterReceiveCodec(AMR) is called
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+  // All these tests should now pass
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267BwEfficient)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267OctetAligned)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0, kRfc3267FileStorage)); MARK();
+  TEST_MUSTPASS(-1 != codec->SetAMRWbDecFormat(-1)); MARK();
+  TEST_MUSTPASS(codec->SetAMRWbDecFormat(0)); MARK(); // restore default
+
+  ANL();
+  AOK();
+  ANL();
+#endif // #ifdef WEBRTC_CODEC_GSMAMRWB
+  ///////////////////////////////
+  // SetSendCNPayloadType
+  TEST(SetSendCNPayloadType);
+  ANL();
+
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(-1, 0));
+  MARK(); // invalid channel
+
+  // Invalid payload range (only dynamic range [96,127]
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 0));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 95));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, 128));
+  MARK(); // invalid PT
+  TEST_MUSTPASS(-1 != codec->SetSendCNPayloadType(0, -1));
+  MARK(); // invalid PT
+
+  // Not possible to change PT for 8000
+  TEST_MUSTPASS(!codec->SetSendCNPayloadType(0, 96, kFreq8000Hz));
+  MARK();
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_PLFREQ);
+
+  // Try some dynamic for 16000 and 32000 as well
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 96, kFreq16000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 96, kFreq32000Hz));
+  MARK(); // same should work
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 127, kFreq16000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 127, kFreq32000Hz));
+  MARK();
+  TEST_MUSTPASS(codec->SetSendCNPayloadType(0, 100, kFreq32000Hz));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////
+  // SetRecPayloadType
+  TEST(SetRecPayloadType);
+  ANL();
+
+  // scan all supported and valid codecs without changing payloads
+  nCodecs = codec->NumOfCodecs();
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // If no default payload type is defined, we use 127
+    if (-1 == newCodec.pltype) {
+      newCodec.pltype = 127;
+    }
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // use default
+    newCodec.pltype = 99;
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // use same PT on all
+    newCodec.pltype = -1;
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    MARK(); // deregister all PTs
+  }
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////
+  // GetRecPayloadType
+  TEST(GetRecPayloadType);
+  ANL();
+
+  CodecInst extraCodec;
+  for (i = 0; i < nCodecs; i++) {
+    // Set defaults
+    TEST_MUSTPASS(codec->GetCodec(i, newCodec));
+    // If no default payload type is defined, we use 127
+    if (-1 == newCodec.pltype) {
+      newCodec.pltype = 127;
+    }
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, newCodec));
+    //TEST_LOG("[%2d] %s (SetRec): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+    //  i, newCodec.plname, newCodec.plfreq, newCodec.pltype, newCodec.rate,
+    // newCodec.channels, newCodec.pacsize);
+    extraCodec.pltype = -1; // don't know this yet
+    extraCodec.plfreq = newCodec.plfreq;
+    extraCodec.rate = newCodec.rate;
+    extraCodec.channels = newCodec.channels;
+    strcpy(extraCodec.plname, newCodec.plname);
+    // Verfify that setting is OK
+    TEST_MUSTPASS(codec->GetRecPayloadType(0, extraCodec));
+    //TEST_LOG("[%2d] %s (GetRec): fs=%d, pt=%d, rate=%d, ch=%d, size=%d\n",
+    //  i, extraCodec.plname, extraCodec.plfreq, extraCodec.pltype,
+    // extraCodec.rate, extraCodec.channels, extraCodec.pacsize);
+    TEST_MUSTPASS(newCodec.pltype != extraCodec.pltype);
+    TEST_MUSTPASS(newCodec.plfreq != extraCodec.plfreq);
+    TEST_MUSTPASS(newCodec.channels != extraCodec.channels);
+  }
+
+  AOK();
+  ANL();
+
+  ////////////////////////////////////////////////////
+  // SetRecPayloadType - remove receive codecs
+  TEST(SetRecPayloadType - removing receive codecs);
+  ANL();
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+#endif
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  if (file) {
+    TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0,
+            _mgr.AudioFilename(),
+            true,
+            true));
+  }
+
+  // Scan all supported and valid codecs and remove from receiving db, then
+  // restore
+  nCodecs = codec->NumOfCodecs();
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s", cinst.plname);
+    fflush(NULL);
+
+    if (-1 == cinst.pltype) {
+      // If no default payload type is defined, we use 127,
+      // codec is not registered for receiving
+      cinst.pltype = 127;
+    } else {
+      // Remove codec
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      extraCodec.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, extraCodec));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify no audio
+    TEST_MUSTPASS(voe_base_->StartReceive(0));
+    TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    TEST_LOG("  silence");
+    fflush(NULL);
+    SLEEP(800);
+    TEST_MUSTPASS(voe_base_->StopPlayout(0));
+    TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+    // Restore codec
+    TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+
+    // Verify audio
+    TEST_MUSTPASS(voe_base_->StartReceive(0));
+    TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    TEST_LOG("  audio");
+    fflush(NULL);
+    SLEEP(800);
+    TEST_MUSTPASS(voe_base_->StopPlayout(0));
+    TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+    if (127 == cinst.pltype) {
+      // If no default payload type is defined, i.e. we have set pt to
+      //127 above,
+      // make sure we remove codec from receiving
+      cinst.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+    }
+
+    ANL();
+  }
+
+  // Remove certain codecs
+  TEST_LOG("Removing receive codecs:");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("ipcmwb", cinst.plname) || !_stricmp("pcmu", cinst.plname)
+        || !_stricmp("eg711a", cinst.plname)) {
+      TEST_LOG(" %s", cinst.plname);
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      extraCodec.pltype = -1;
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, extraCodec));
+    }
+  }
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // Test sending all codecs - verify audio/no audio depending on codec
+  TEST_LOG("Looping through send codecs \n");
+  TEST_LOG("Verify that removed codecs are not audible and the other are \n");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s \n", cinst.plname);
+
+    // If no default payload type is defined, we use 127 and set receive
+    // payload type
+    if (-1 == cinst.pltype) {
+      cinst.pltype = 127;
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify audio/no audio
+    SLEEP(800);
+  }
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // Restore codecs
+  TEST_LOG("Restoring receive codecs:");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("ipcmwb", cinst.plname) || !_stricmp("pcmu", cinst.plname)
+        || !_stricmp("eg711a", cinst.plname)) {
+      TEST_LOG(" %s", cinst.plname);
+      memcpy(&extraCodec, &cinst, sizeof(CodecInst));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+    }
+  }
+  ANL();
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // Test sending all codecs - verify audio
+  TEST_LOG("Looping through send codecs \n");
+  TEST_LOG("Verify that all codecs are audible \n");
+  for (i = 0; i < nCodecs; i++) {
+    TEST_MUSTPASS(codec->GetCodec(i, cinst));
+    if (!_stricmp("red", cinst.plname) || !_stricmp("cn", cinst.plname)
+        || !_stricmp("telephone-event", cinst.plname)) {
+      continue; // Ignore these
+    }
+    TEST_LOG("Testing codec: %s \n", cinst.plname);
+
+    // If no default payload type is defined, we use 127 and set receive
+    // payload type
+    if (-1 == cinst.pltype) {
+      cinst.pltype = 127;
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+    }
+
+    // Set send codec
+    TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+    // Verify audio/no audio
+    SLEEP(800);
+  }
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  // Fresh channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+#if defined(WEBRTC_CODEC_ISAC)
+
+  /////////////////////////////////////
+  // SetISACInitTargetRate - wb
+  TEST(SetISACInitTargetRate);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 10000));
+  MARK(); // should fail since iSAC is not active
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec (16kHz)
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(1, 10000));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 500));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 33000));
+  MARK(); // invalid target rates (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 10000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 0));
+  MARK(); // 0 is a valid rate
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // try max as well
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, true));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, false));
+  MARK();
+
+  cinst.pacsize = 960; // 60ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000, false));
+  MARK();
+
+  cinst.rate = 20000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // only works in adaptive mode
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_AUDIO_CODING_MODULE_ERROR);
+
+  cinst.rate = -1;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 32000));
+  MARK(); // back to adaptive mode
+
+  ANL();
+  AOK();
+  ANL();
+
+  /////////////////////////////////////
+  // SetISACInitTargetRate - swb
+  TEST(ISACSWB SetISACInitTargetRate);
+  ANL();
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(1, 10000));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, -1));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, -1));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 500));
+  MARK(); // invalid target rates (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACInitTargetRate(0, 57000));
+  MARK(); // invalid target rates (valid range is [10000, 56000])
+
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 10000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 0));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000));
+  MARK(); // try max as well
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000, true));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACInitTargetRate(0, 56000, false));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  ////////////////////////////////
+  // SetISACMaxRate
+  TEST(SetISACMaxRate);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 48000));
+  MARK(); // should fail since iSAC is not active
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 53500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 48000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 53400));
+  MARK(); // try max as well (default)
+
+  cinst.pacsize = 960; // 60ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 48000));
+  MARK();
+
+  cinst.rate = 20000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK(); // also works in non-adaptive mode
+
+  ANL();
+  AOK();
+  ANL();
+
+  TEST(ISACSWB SetISACMaxRate);
+  ANL();
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = 45000; // instantaneous mode
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 107500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 55000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 80000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 107000));
+  MARK(); // try max as well (default)
+
+
+  cinst.rate = -1; // adaptive mode
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(1, 48000));
+  MARK(); // invalid channel
+  TEST_MUSTPASS(voe_base_->LastError() != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 31900));
+  MARK(); // invalid target rates (too small)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxRate(0, 107500));
+  MARK(); // invalid target rates (too large)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 32000));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 40000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 55000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 80000));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxRate(0, 107000));
+  MARK(); // try max as well (default)
+
+  ANL();
+  AOK();
+  ANL();
+
+  ////////////////////////////////
+  // SetISACMaxPayloadSize
+  TEST(SetISACMaxPayloadSize);
+  ANL();
+
+  // set PCMU as sending codec
+  cinst.channels = 1;
+  cinst.pacsize = 160;
+  cinst.plfreq = 8000;
+  strcpy(cinst.plname, "PCMU");
+  cinst.pltype = 0;
+  cinst.rate = 64000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 120));
+  MARK(); // should fail since iSAC is not active
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CODEC_ERROR);
+
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 16000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // adaptive rate
+  cinst.pacsize = 480; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(1, 120));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 100));
+  MARK(); // invalid size (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 410));
+  MARK(); // invalid size (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 200));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 120));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 400));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  TEST(ISACSWB SetISACMaxPayloadSize);
+  ANL();
+  // set iSAC as sending codec
+  cinst.channels = 1;
+  cinst.plfreq = 32000;
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 104;
+  cinst.rate = 45000; // default rate
+  cinst.pacsize = 960; // 30ms
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(1, 100));
+  MARK(); // invalid channel
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 100));
+  MARK(); // invalid size (too small)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(!codec->SetISACMaxPayloadSize(0, 610));
+  MARK(); // invalid size (too large)
+  err = voe_base_->LastError();
+  TEST_MUSTPASS(err != VE_INVALID_ARGUMENT);
+
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 200));
+  MARK(); // life is good now
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 120));
+  MARK();
+  TEST_MUSTPASS(codec->SetISACMaxPayloadSize(0, 600));
+  MARK();
+
+  ANL();
+  AOK();
+  ANL();
+
+  // set iSAC as sending codec
+  // set iSAC-wb as sending codec
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+#else
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8001, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8001));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  std::string output_path = webrtc::test::OutputPath();
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, (output_path + "audio_long16.pcm").c_str(), true , true));
+  cinst.channels = 1;
+  TEST_LOG("Testing codec: Switch between iSAC-wb and iSAC-swb \n");
+  TEST_LOG("Testing codec: iSAC wideband \n");
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 480; // 30ms
+  cinst.plfreq = 16000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC superwideband \n");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  cinst.plfreq = 32000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC wideband \n");
+  strcpy(cinst.plname, "ISAC");
+  cinst.pltype = 103;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 480; // 30ms
+  cinst.plfreq = 16000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_LOG("             : iSAC superwideband \n");
+  cinst.pltype = 104;
+  cinst.rate = -1; // default rate
+  cinst.pacsize = 960; // 30ms
+  cinst.plfreq = 32000;
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#else
+  TEST_LOG("Skipping extended iSAC API tests - "
+      "WEBRTC_CODEC_ISAC not defined\n");
+#endif // #if defined(WEBRTC_CODEC_ISAC)
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  delete ptrTransport;
+#endif
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestDtmf
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestDtmf() {
+  PrepareTest("Dtmf");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEDtmf* dtmf = _mgr.DtmfPtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+
+  std::string output_path = webrtc::test::OutputPath();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+      (output_path + "VoEDtmf_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+  //#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  // SetDtmfFeedbackStatus
+  TEST(SetDtmfFeedbackStatus & GetDtmfFeedbackStatus);
+  ANL();
+  bool dtmfFeedback = false, dtmfDirectFeedback = true;
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, false));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, true));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(dtmfFeedback);
+  TEST_MUSTPASS(!dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, false));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, true));
+  TEST_MUSTPASS(dtmf->GetDtmfFeedbackStatus(dtmfFeedback,
+          dtmfDirectFeedback));
+  TEST_MUSTPASS(!dtmfFeedback);
+  TEST_MUSTPASS(!dtmfDirectFeedback);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false, false));
+
+  AOK();
+  ANL();
+
+  // SendDtmf
+  TEST(SendDtmf);
+  ANL();
+
+  // Fail tests
+  // Event
+  // the eventcode is changed to unsigned char, so -1 will be interpreted as
+  // 255, 256->0
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, -1, false, 160, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 16, false, 160, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Length
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 99, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 60001, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 20, true, -1, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Volume
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 160, -1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true, 160, 37));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  // Without sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(!dtmf->SendTelephoneEvent(0, 0, true));
+  MARK();
+  TEST_MUSTPASS(VE_NOT_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  // Testing Dtmf out-of-band: event, length and volume
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 16, true));
+  MARK();
+  SLEEP(500); // Flash, not audible
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 160, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 160, 36));
+  MARK();
+  SLEEP(500);
+
+  // Testing Dtmf inband: event, length and volume
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 15, false));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 400, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 160, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, false, 160, 36));
+  MARK();
+  SLEEP(500);
+
+  // Testing other events out-of-band: event and length
+  // These are not audible
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 17, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 78, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 255, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  // the minimum length is 100 for the telephoneevent
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 100, 10));
+  MARK();
+  SLEEP(200);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 32, true, 1000, 10));
+  MARK();
+  SLEEP(1200);
+
+  AOK();
+  ANL();
+
+  // PlayDtmfTone
+  TEST(PlayDtmfTone);
+  ANL();
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(-1, 200, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(16, 200, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 9, 10));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 200, -1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!dtmf->PlayDtmfTone(0, 200, 37));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0));
+  MARK();
+  SLEEP(500);
+  // the minimum length fo the DtmfTone is 100
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 100, 10));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 2000, 10));
+  MARK();
+  SLEEP(2300);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 200, 0));
+  MARK();
+  SLEEP(500);
+  TEST_MUSTPASS(dtmf->PlayDtmfTone(0, 200, 36));
+  MARK();
+  SLEEP(500);
+
+  AOK();
+  ANL();
+
+  // SetTelephoneEventDetection
+  TEST(SetTelephoneEventDetection);
+  ANL();
+  AOK();
+  ANL();
+
+  // Testing sending Dtmf under VAD/CN
+  TEST(SendDtmf - with VAD enabled);
+  ANL();
+  // Mute mic
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  MARK();
+  // Enable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+  MARK();
+  // Send Dtmf
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  // Switch codec
+  CodecInst ci;
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  ci.channels = 1;
+  ci.pacsize = 480;
+  ci.plfreq = 16000;
+  strcpy(ci.plname, "ISAC");
+  ci.pltype = 103;
+  ci.rate = -1;
+#else
+  ci.pltype = 119;
+  strcpy(ci.plname, "isaclc");
+  ci.plfreq = 16000;
+  ci.pacsize = 320;
+  ci.channels = 1;
+  ci.rate = 40000;
+#endif
+  TEST_MUSTPASS(codec->SetSendCodec(0, ci));
+  MARK();
+  // Send Dtmf
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 0, true, 400));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, 9, true, 400));
+  MARK();
+  SLEEP(1000);
+  SLEEP(4000);
+  // Disable VAD
+  TEST_MUSTPASS(codec->SetVADStatus(0, false));
+  MARK();
+  // Unmute
+  TEST_MUSTPASS(volume->SetInputMute(0, false));
+  MARK();
+
+  AOK();
+  ANL();
+
+  // SetSendTelephoneEventPayloadType
+  TEST(SetSendTelephoneEventPayloadType);
+  ANL();
+  TEST_MUSTPASS(!dtmf->SetSendTelephoneEventPayloadType(0, 128));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 96));
+  MARK();
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 127));
+  MARK();
+  TEST_MUSTPASS(dtmf->SetSendTelephoneEventPayloadType(0, 106));
+  MARK(); // restore default
+
+  AOK();
+  ANL();
+
+#ifdef WEBRTC_DTMF_DETECTION
+  TEST(RegisterTelephoneEventDetection - several channels); ANL();
+
+  ci.channels = 1;
+  ci.pacsize = 160;
+  ci.plfreq = 8000;
+  ci.pltype = 0;
+  ci.rate = 64000;
+  strcpy(ci.plname, "PCMU");
+  TEST_MUSTPASS(codec->SetSendCodec(0, ci));
+
+  int ch2 = voe_base_->CreateChannel();
+  TEST_MUSTPASS(voe_base_->SetSendDestination(ch2, 8002, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(ch2, 8002));
+  TEST_MUSTPASS(voe_base_->StartReceive(ch2));
+  TEST_MUSTPASS(codec->SetSendCodec(ch2, ci));
+  TEST_MUSTPASS(voe_base_->StartPlayout(ch2));
+  TEST_MUSTPASS(voe_base_->StartSend(ch2));
+  MARK();
+
+  DtmfCallback *d = new DtmfCallback();
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(false));
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // In-band
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(0, kInBand, *d));
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(ch2, kInBand, *d));
+  TEST_LOG("\nSending in-band telephone events:");
+  for(int i = 0; i < 16; i++)
+  {
+    TEST_LOG("\n  %d ", i); fflush(NULL);
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, i, false, 160, 10));
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(ch2, i, false, 160, 10));
+    SLEEP(500);
+  }
+  TEST_LOG("\nDetected %d events \n", d->counter);
+  TEST_MUSTPASS(d->counter != 32);
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(0));
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(ch2));
+
+  // Out-of-band
+  d->counter = 0;
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(0, kOutOfBand, *d));
+  TEST_MUSTPASS(dtmf->RegisterTelephoneEventDetection(ch2, kOutOfBand, *d));
+  TEST_LOG("\nSending out-band telephone events:");
+  for(int i = 0; i < 16; i++)
+  {
+    TEST_LOG("\n  %d ", i); fflush(NULL);
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(0, i, true, 160, 10));
+    TEST_MUSTPASS(dtmf->SendTelephoneEvent(ch2, i, true, 160, 10));
+    SLEEP(500);
+  }
+  TEST_LOG("\nDetected %d events \n", d->counter);
+  TEST_MUSTPASS(d->counter != 32);
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(0));
+  TEST_MUSTPASS(dtmf->DeRegisterTelephoneEventDetection(ch2));
+  delete d;
+
+  AOK(); ANL();
+#endif
+
+  TEST_MUSTPASS(dtmf->SetDtmfFeedbackStatus(true, false));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestEncryption
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestEncryption() {
+  PrepareTest("Encryption");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEEncryption* encrypt = _mgr.EncryptionPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEEncryption_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+    ///////////////////////////
+  // Actual test starts here
+
+  unsigned char key1[30] = { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6,
+      7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 };
+
+#ifdef WEBRTC_SRTP
+  unsigned char key2[30]; // Different than key1 in first position
+  memcpy(key2, key1, 30);
+  key2[0] = 99;
+  unsigned char key3[30]; // Different than key1 in last position
+  memcpy(key3, key1, 30);
+  key3[29] = 99;
+  unsigned char key4[29]; // Same as key1 but shorter
+  memcpy(key4, key1, 29);
+
+  TEST(SRTP - Fail tests); ANL();
+
+  // Send
+  // Incorrect parameters when not all protection is enabled
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kNoProtection, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryption key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  // Incorrect cipher key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 15, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 257, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 21, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 257, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth tag length
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 21,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 20, 13,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  // key NULL pointer
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, NULL));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  // Same for receive
+  // Incorrect parameters when not all protection is enabled
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kNoProtection, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryption key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(VE_SRTP_ERROR != voe_base_->LastError());
+  MARK();
+  // Incorrect cipher key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 15,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 257,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth key length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode,
+          30, kAuthHmacSha1, 21, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // it crashed the application
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 257, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // Incorrect auth tag length
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 21,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // it crashed the application
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 20, 13,
+          kEncryptionAndAuthentication,
+          key1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  // key NULL pointer
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          NULL));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  ANL();
+
+  TEST(SRTP - Should hear audio at all time); ANL();
+
+  // Authentication only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // No protection
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthNull, 0, 0,
+          kNoProtection, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthNull, 0, 0,
+          kNoProtection, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+
+  // Encryption only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+
+  // Authentication only
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Switching between keys
+  TEST(SRTP - Different keys - should hear audio at all time); ANL();
+
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Testing different keys that should be silent
+  TEST(SRTP - Should be silent or garbage); ANL();
+
+  // key1 and key2
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key2));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key2));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key2));
+  MARK(); SLEEP(2000);
+
+  // key1 and key3
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key3));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key3));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key3));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key3));
+  MARK(); SLEEP(2000);
+
+  // key1 and key4
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key4));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication, key4));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1, 20, 4,
+          kEncryptionAndAuthentication,
+          key1));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0, kEncryption key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthNull, 0, 0,
+          kEncryption key4));
+  MARK(); SLEEP(2000);
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherNull, 0, kAuthHmacSha1, 20,
+          4, kAuthentication, key1));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherNull, 0, kAuthHmacSha1,
+          20, 4, kAuthentication, key4));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // Back to normal
+  TEST(SRTP - Back to normal - should hear audio); ANL();
+
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  MARK(); SLEEP(2000);
+  ANL();
+
+  // SRTCP tests
+  TEST(SRTCP - Ignore voice or not); ANL();
+  VoERTP_RTCP* rtp_rtcp = _mgr.RTP_RTCPPtr();
+  char tmpStr[32];
+
+  // First test that RTCP packet is received and OK without encryption
+
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik1"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik1", tmpStr));
+
+  // Enable SRTP and SRTCP send and receive
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik2"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr));
+
+  // Disable SRTP and SRTCP send
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik3"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP send, but disable SRTCP send
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik4"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP and SRTCP send, disable SRTP and SRTCP receive
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->EnableSRTPSend(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1, true));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik5"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Enable SRTP receive, but disable SRTCP receive
+  TEST_MUSTPASS(encrypt->EnableSRTPReceive(0, kCipherAes128CounterMode, 30,
+          kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik6"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik2", tmpStr)); // Should not have changed
+
+  // Disable all
+  TEST_MUSTPASS(encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(rtp_rtcp->SetRTCP_CNAME(0, "Henrik7"));
+  MARK(); SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCP_CNAME(0, tmpStr));
+  TEST_MUSTPASS(_stricmp("Henrik7", tmpStr));
+  ANL();
+
+#else
+  TEST(SRTP disabled - Fail tests);
+  ANL();
+
+  TEST_MUSTPASS(!encrypt->EnableSRTPSend(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->EnableSRTPReceive(0, kCipherNull, 30, kAuthHmacSha1,
+          20, 4, kEncryptionAndAuthentication, key1));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->DisableSRTPSend(0));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!encrypt->DisableSRTPReceive(0));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  ANL();
+#endif
+  AOK();
+
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestExternalMedia
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestExternalMedia() {
+  PrepareTest("VoEExternalMedia");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEExternalMedia* xmedia = _mgr.ExternalMediaPtr();
+
+  // check if this interface is supported
+  if (!xmedia) {
+    TEST_LOG("VoEExternalMedia is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEExternalMedia_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(
+          kTraceStateInfo | kTraceStateInfo | kTraceWarning |
+          kTraceError | kTraceCritical | kTraceApiCall |
+          kTraceMemory | kTraceInfo));
+#endif
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  int getLen = 0;
+  WebRtc_Word16 vector[32000];
+  memset(vector, 0, 32000 * sizeof(short));
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+  // ExternalPlayoutGetData
+  TEST(ExternalPlayoutGetData);
+  ANL();
+
+  TEST_MUSTPASS(!xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, 100, getLen));
+  TEST_MUSTPASS(VE_INVALID_OPERATION != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  TEST_MUSTPASS(xmedia->ExternalPlayoutGetData(vector, 48000, 0, getLen));
+  TEST_MUSTPASS(480 != getLen);
+  SLEEP(10);
+  TEST_MUSTPASS(xmedia->ExternalPlayoutGetData(vector, 16000, 3000, getLen));
+  TEST_MUSTPASS(160 != getLen);
+  SLEEP(10);
+
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 8000, 100, getLen));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, -1, getLen));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(xmedia->SetExternalPlayoutStatus(false));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  // SetExternalRecording
+  TEST(SetExternalRecording);
+  ANL();
+
+  TEST_MUSTPASS(!xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_OPERATION != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  TEST_MUSTPASS(xmedia->ExternalRecordingInsertData(vector, 480, 48000, 0));
+  SLEEP(10);
+  TEST_MUSTPASS(xmedia->ExternalRecordingInsertData(vector, 640, 16000, 0));
+  SLEEP(40);
+
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, -1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 80, 8000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 0, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 80, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 500, 16000, 20));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(xmedia->SetExternalRecordingStatus(false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+#else // #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_MUSTPASS(!xmedia->SetExternalPlayoutStatus(true));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalPlayoutGetData(vector, 16000, 100, getLen));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->SetExternalRecordingStatus(true));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+  TEST_MUSTPASS(!xmedia->ExternalRecordingInsertData(vector, 160, 16000, 20));
+  TEST_MUSTPASS(VE_FUNC_NOT_SUPPORTED != voe_base_->LastError());
+
+#endif // #ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestFile
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestFile() {
+  PrepareTest("File");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+          GetFilename("VoEFile_trace.txt").c_str())); MARK();
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  const int dT(100);
+
+  TEST(StartPlayingFileLocally);
+  ANL();
+  TEST(StopPlayingFileLocally);
+  ANL();
+
+  voe_base_->StopPlayout(0);
+  std::string output_path = webrtc::test::OutputPath();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));MARK();
+  voe_base_->StartPlayout(0);
+  MARK(); // file should be mixed in and played out
+  SLEEP(dT);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));
+  MARK(); // should fail (must stop first)
+  TEST_MUSTPASS(voe_base_->LastError() != VE_ALREADY_PLAYING);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));
+  MARK(); // should work again (restarts file)
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str(),
+          false, kFileFormatPcm16kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long8.pcm").c_str(),
+          false, kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.wav").c_str(),
+          false, kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long8mulaw.wav").c_str(), false,
+          kFileFormatPcm8kHzFile));
+  MARK();
+  SLEEP(dT);
+
+  // add compressed tests here...
+
+  // TEST_MUSTPASS(file->StopPlayingFileLocally(0)); MARK();
+  // TEST_MUSTPASS(file->StartPlayingFileLocally(
+  //   0, (output_path + "audio_short16.pcm").c_str(), true,
+  //   kFileFormatPcm16kHzFile)); MARK(); // loop
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_short16.pcm").c_str(), false,
+          kFileFormatPcm16kHzFile, 1.0, 0, 2000));
+  MARK(); // play segment
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, (output_path + "audio_short16.pcm").c_str(), false,
+          kFileFormatPcm16kHzFile, 1.0, 2000, 1000));
+  MARK(); // invalid segment
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, (output_path + "audio_short16.pcm").c_str(), false,
+          kFileFormatPcm16kHzFile, 1.0, 21000, 30000));
+  MARK(); // start > file size
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(
+          0, (output_path + "audio_short16.pcm").c_str(), false,
+          kFileFormatPcm16kHzFile, 1.0, 100, 100));
+  MARK(); // invalid segment
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));
+  MARK(); // should work again (restarts file)
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  TEST_MUSTPASS(!file->StartPlayingFileLocally(0, (InStream*)NULL));
+  MARK(); // just do it
+  TEST_MUSTPASS(voe_base_->LastError() != VE_BAD_FILE);
+
+  AOK();
+  ANL();
+
+  TEST(IsPlayingFileLocally);
+  ANL();
+
+  TEST_MUSTPASS(0 != file->IsPlayingFileLocally(0));
+  MARK(); // inactive
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));
+  MARK();
+  TEST_MUSTPASS(1 != file->IsPlayingFileLocally(0));
+  MARK(); // active
+  AOK();
+  ANL();
+
+  TEST(ScaleLocalFilePlayout);
+  ANL();
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 1.0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.5));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->ScaleLocalFilePlayout(0, 0.25));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  MARK();
+  AOK();
+  ANL();
+
+  // Replace microphone with file and play out on remote side
+  // All channels, per channel
+  // Different mixing frequencies
+  TEST(StartPlayingFileAsMicrophone);
+  ANL();
+  TEST(IsPlayingFileAsMicrophone);
+  ANL();
+  TEST(ScaleFileAsMicrophonePlayout);
+  ANL();
+  CodecInst tempCodec;
+  for (int ch = -1; ch < 1; ++ch) // Channel -1 and 0
+  {
+    TEST_LOG("Testing channel = %d \n", ch);
+    for (int fs = 1; fs < 4; ++fs) // nb, wb and swb codecs
+    {
+      switch (fs) {
+        case 1: // nb
+          TEST_LOG("Testing with nb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 160;
+          tempCodec.plfreq = 8000;
+          strcpy(tempCodec.plname, "PCMU");
+          tempCodec.pltype = 0;
+          tempCodec.rate = 64000;
+          break;
+        case 2: // wb
+#ifdef WEBRTC_CODEC_ISAC
+          TEST_LOG("Testing with wb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 480;
+          tempCodec.plfreq = 16000;
+          strcpy(tempCodec.plname, "ISAC");
+          tempCodec.pltype = 103;
+          tempCodec.rate = 32000;
+          break;
+#else
+          TEST_LOG("NOT testing with wb codec - "
+              "WEBRTC_CODEC_ISAC not defined \n");
+          continue;
+#endif
+        case 3: // swb
+#ifdef WEBRTC_CODEC_PCM16
+          TEST_LOG("Testing with swb codec \n");
+          tempCodec.channels = 1;
+          tempCodec.pacsize = 640;
+          tempCodec.plfreq = 32000;
+          strcpy(tempCodec.plname, "L16");
+          tempCodec.pltype = 125;
+          tempCodec.rate = 512000;
+          break;
+#else
+          TEST_LOG("NOT testing with swb codec -"
+              " WEBRTC_CODEC_PCM16 not defined \n");
+          continue;
+#endif
+      }
+      TEST_MUSTPASS(voe_base_->StopSend(0));
+      TEST_MUSTPASS(voe_base_->StopPlayout(0));
+      TEST_MUSTPASS(voe_base_->StopReceive(0));
+      TEST_MUSTPASS(codec->SetRecPayloadType(0, tempCodec));
+      TEST_MUSTPASS(voe_base_->StartReceive(0));
+      TEST_MUSTPASS(voe_base_->StartPlayout(0));
+      TEST_MUSTPASS(voe_base_->StartSend(0));
+      TEST_MUSTPASS(codec->SetSendCodec(0, tempCodec));
+
+      TEST_LOG("File 1 in 16 kHz no mix, 2 in 16 kHz mix,"
+        " 3 in 8 kHz no mix, 4 in 8 kHz mix \n");
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long16.pcm").c_str()));
+      MARK(); // don't mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long16.wav").c_str(), false, true,
+              kFileFormatWavFile));
+      MARK(); // mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long8.pcm").c_str(), false, false,
+              kFileFormatPcm8kHzFile));
+      MARK(); // don't mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long8.pcm").c_str(), false, true,
+              kFileFormatPcm8kHzFile));
+      MARK(); // mix
+      SLEEP(2000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      TEST_MUSTPASS(!file->StartPlayingFileAsMicrophone(
+              ch, (InStream*)NULL));
+      MARK(); // force error
+      AOK();
+      ANL();
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long16.pcm").c_str()));
+      TEST_MUSTPASS(1 != file->IsPlayingFileAsMicrophone(ch));
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      TEST_MUSTPASS(0 != file->IsPlayingFileAsMicrophone(ch));
+      AOK();
+      ANL();
+
+      TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+              ch, (output_path + "audio_long16.pcm").c_str()));
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 1.0));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.5));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.25));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->ScaleFileAsMicrophonePlayout(ch, 0.0));
+      MARK();
+      SLEEP(1000);
+      TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(ch));
+      MARK();
+      AOK();
+      ANL();
+    }
+  }
+
+  // Record speaker signal to file
+
+  CodecInst fcomp = { 0, "L16", 8000, 80, 1, 128000 };
+
+  TEST(StartRecordingPlayout);
+  ANL();
+  TEST(StopRecordingPlayout);
+  ANL();
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          (output_path + "rec_play16.pcm").c_str()));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+      (output_path + "rec_play8.wav").c_str(), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+    fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+      (output_path + "rec_play16.wav").c_str(), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 0;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "PCMU");
+  fcomp.rate = 64000;
+  fcomp.pacsize = 160;
+  fcomp.channels = 1;
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          (output_path + "rec_play_pcmu.wav").c_str(),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 8;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "PCMA");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          (output_path + "rec_play_pcma.wav").c_str(),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  fcomp.pltype = 97;
+  fcomp.pacsize = 240;
+  fcomp.rate = 13300;
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "ILBC");
+  TEST_MUSTPASS(file->StartRecordingPlayout(0,
+          (output_path + "rec_play.ilbc").c_str(),
+          &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(0));
+  MARK();
+
+  TEST_MUSTPASS(file->StartRecordingPlayout(
+          -1, (output_path + "rec_play16_mixed.pcm").c_str()));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingPlayout(-1));
+  MARK();
+
+  // TEST_MUSTPASS(file->StopPlayingFileLocally(0)); // Why should this work?
+  TEST_LOG("\nplaying out...\n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "rec_play.ilbc").c_str(), false,
+          kFileFormatCompressedFile));
+  MARK();
+  SLEEP(2000);
+
+  AOK();
+  ANL();
+
+  // Record microphone signal to file
+  TEST(StartRecordingMicrophone);
+  ANL();
+  TEST(StopRecordingMicrophone);
+  ANL();
+
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+      (output_path + "rec_mic16.pcm").c_str()));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  voe_base_->StopSend(0);
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+      (output_path + "rec_mic16.pcm").c_str()));
+  MARK(); // record without sending as well
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  voe_base_->StartSend(0); // restore sending
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          (output_path + "rec_play8.wav").c_str(), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          (output_path + "rec_play16.wav").c_str(), &fcomp));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+
+  // FT#1810, the following test is to make sure StartRecordingCall will
+  // record both mic and file
+  TEST_LOG("StartRecordingCall, record both mic and file in specific"
+    " channels \n");
+  TEST_LOG("Create maxnumofchannels \n");
+  for (int i = 1; i < voe_base_->MaxNumOfChannels(); i++) {
+    int ch = voe_base_->CreateChannel();
+    TEST_MUSTPASS(ch == -1);
+    TEST_MUSTPASS(voe_base_->StopPlayout(ch));
+  }
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(1, 12356, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(1, 12356));
+  TEST_MUSTPASS(voe_base_->StartReceive(1));
+  TEST_MUSTPASS(voe_base_->StopPlayout(1));
+  TEST_MUSTPASS(voe_base_->StartSend(1));
+  TEST_MUSTPASS(voe_base_->StartPlayout(1));
+
+  TEST_LOG("ALways playing audio_long16.pcm for "
+    "channel 0 in background \n");
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_LOG("Recording microphone to L16, please speak \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, (output_path + "audio_long16.pcm").c_str(), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          (output_path + "rec_play_ch.wav").c_str(), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_LOG("Playing recording file, you should only hear what you said \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "rec_play_ch.wav").c_str(),
+          false, kFileFormatWavFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  TEST_LOG("Recording microphone 0 to L16, please speak \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          -1, (output_path + "audio_long16.pcm").c_str(), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          (output_path + "rec_play_ch_0.wav").c_str(), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(-1));
+  TEST_LOG("Playing recording file, you should hear what you said and"
+    " audio_long16.pcm \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "rec_play_ch_0.wav").c_str(),
+          false, kFileFormatWavFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  TEST_LOG("Recording microphone to ilbc, please speak \n");
+  strcpy(fcomp.plname, "ilbc");
+  fcomp.plfreq = 8000;
+  fcomp.pacsize = 160;
+  fcomp.rate = 15200;
+  fcomp.channels = 1;
+  fcomp.pltype = 97;
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(
+          0, (output_path + "audio_long16.pcm").c_str(), true , true));
+  TEST_MUSTPASS(file->StartRecordingMicrophone(
+          (output_path + "rec_play_ch_0.ilbc").c_str(), &fcomp));
+  MARK();
+  SLEEP(3000);
+  TEST_MUSTPASS(file->StopRecordingMicrophone());
+  MARK();
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_LOG("Playing recording file, you should only hear what you said \n");
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "rec_play_ch_0.ilbc").c_str(), false,
+          kFileFormatCompressedFile));
+  SLEEP(2500);
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  for (int i = 1; i < voe_base_->MaxNumOfChannels(); i++) {
+    TEST_MUSTPASS(voe_base_->DeleteChannel(i));
+  }
+
+  AOK();
+  ANL();
+
+  // Record mixed (speaker + microphone) signal to file
+
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  TEST(StartRecordingSpeakerStereo);
+  ANL();
+  TEST(StopRecordingSpeakerStereo);
+  ANL();
+
+  VoEHardware* hardware = _mgr.HardwarePtr();
+  TEST_MUSTPASS(NULL == hardware);
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  AOK();
+  ANL();
+#else
+  TEST_LOG("Skipping stereo record tests -"
+      " MAC_IPHONE or WEBRTC_ANDROID is defined \n");
+#endif // #if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  // Conversion between different file formats
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+#endif
+
+  TEST(ConvertPCMToWAV);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertPCMToWAV(
+          (output_path + "audio_long16.pcm").c_str(),
+          (output_path + "singleUserDemoConv.wav").c_str()));
+  MARK();
+  TEST_MUSTPASS(!file->ConvertPCMToWAV((InStream*)NULL,
+          (OutStream*)NULL));MARK(); // invalid stream handles
+  AOK();
+  ANL();
+
+  TEST(ConvertWAVToPCM);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertWAVToPCM(
+          (output_path + "audio_long16.wav").c_str(),
+          (output_path + "singleUserDemoConv.pcm").c_str()));
+  MARK();
+  TEST_MUSTPASS(!file->ConvertWAVToPCM((InStream*)NULL, (OutStream*)NULL));
+  MARK(); // invalid stream handles
+  AOK();
+  ANL();
+
+  TEST(ConvertPCMToCompressed);
+  ANL();
+
+  fcomp.plfreq = 16000;
+  strcpy(fcomp.plname, "L16");
+  TEST_MUSTPASS(!file->ConvertPCMToCompressed(
+          (output_path + "audio_long16.pcm").c_str(),
+          (output_path + "singleUserDemoConv16_dummy.wav").c_str(), &fcomp));
+  MARK(); // should not be supported
+
+  fcomp.plfreq = 8000;
+  strcpy(fcomp.plname, "ilbc");
+  fcomp.pacsize = 160;
+  fcomp.rate = 15200;
+  fcomp.pltype = 97;
+  fcomp.channels = 1;
+  TEST_MUSTPASS(file->ConvertPCMToCompressed(
+          (output_path + "audio_long16.pcm").c_str(),
+          (output_path + "singleUserDemoConv.ilbc").c_str(), &fcomp));MARK();
+  AOK();ANL();
+
+  TEST(ConvertCompressedToPCM);
+  ANL();
+
+  TEST_MUSTPASS(file->ConvertCompressedToPCM(
+          (output_path + "singleUserDemoConv.ilbc").c_str(),
+          (output_path + "singleUserDemoConv_ilbc.pcm").c_str()));MARK();
+  TEST_MUSTPASS(!file->ConvertCompressedToPCM(
+          (output_path + "audio_long16.pcm").c_str(),
+          (output_path + "singleUserDemoConv_dummy.pcm").c_str()));MARK();
+  AOK();ANL();
+
+#if defined(MAC_IPHONE) || defined(WEBRTC_ANDROID)
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+#endif
+
+  // Misc file functions
+  TEST(GetFileDuration);
+  ANL();
+
+  int dur;
+
+  TEST_MUSTPASS(file->GetFileDuration(
+          (output_path + "audio_long16.pcm").c_str(), dur));
+  TEST_MUSTPASS(file->GetFileDuration(
+          (output_path + "audio_long8.pcm").c_str(),
+          dur, kFileFormatPcm8kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          (output_path + "audio_long16.pcm").c_str(),
+          dur, kFileFormatPcm16kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          (output_path + "audio_long16.wav").c_str(),
+          dur, kFileFormatPcm8kHzFile));
+  TEST_MUSTPASS(file->GetFileDuration(
+          (output_path + "singleUserDemoConv.ilbc").c_str(), dur,
+          kFileFormatCompressedFile));
+
+  AOK();
+  ANL();
+
+  TEST(GetPlaybackPosition);
+  ANL();
+
+  int pos;
+
+  TEST_MUSTPASS(file->StartPlayingFileLocally(
+          0, (output_path + "audio_long16.pcm").c_str()));
+  SLEEP(1000);
+  TEST_MUSTPASS(file->GetPlaybackPosition(0, pos));
+  MARK(); // position should be ~1000
+  SLEEP(1000);
+  TEST_MUSTPASS(file->GetPlaybackPosition(0, pos));
+  MARK(); // position should be ~2000
+  // SLEEP(70*1000);
+  // file is no longer playing
+  // TEST_MUSTPASS(file->GetPlaybackPosition(0, pos)); MARK();
+  TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  AOK();
+  ANL();
+
+  // These tests are related to defect 5136
+  // They play .wav files with different sample freq for 5s
+  char localFiles[7][50] = { "audio_tiny8.wav", "audio_tiny11.wav",
+      "audio_tiny16.wav", "audio_tiny22.wav", "audio_tiny32.wav",
+      "audio_tiny44.wav", "audio_tiny48.wav" };
+  char freq[7][5] = { "8", "11", "16", "22", "32", "44.1", "48" };
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  for (int i = 0; i < 7; i++) {
+    TEST_LOG("Playing file %s, in %s KHz \n", localFiles[i], freq[i]);
+    TEST_MUSTPASS(file->StartPlayingFileLocally(
+            0, (output_path + localFiles[i]).c_str(),
+            false, kFileFormatWavFile, 1));
+    SLEEP(4500); // The file should not end
+    TEST_MUSTPASS(file->StopPlayingFileLocally(0));
+  }
+
+  // TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0)); // Should not work
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestHardware
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestHardware() {
+  PrepareTest("Hardware");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEHardware* hardware = _mgr.HardwarePtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile((output_path +
+              "VoEHardware_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  // Set/GetAudioDeviceLayer
+  TEST(Set/GetAudioDeviceLayer);
+  ANL();
+  AudioLayers wantedLayer = kAudioPlatformDefault;
+  AudioLayers givenLayer;
+
+#if defined(_WIN32)
+  wantedLayer = kAudioWindowsCore;
+  hardware->SetAudioDeviceLayer(wantedLayer);
+  TEST_LOG("If you run on XP or below, CoreAudio "
+      "should not be able to set.\n");
+  TEST_LOG("If you run on Vista or above, CoreAudio "
+      "should be able to set.\n");
+  TEST_LOG("Verify that this is the case.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioWindowsCore)
+  {
+    TEST_LOG("CoreAudio was set\n");
+  }
+  else
+  {
+    TEST_LOG("CoreAudio was *not* set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  wantedLayer = kAudioWindowsWave;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("Wave audio should always be able to set.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioWindowsWave)
+  {
+    TEST_LOG("Wave audio was set\n");
+  }
+  else
+  {
+    TEST_LOG("Wave audio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+  // end _WIN32
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+  wantedLayer = kAudioLinuxPulse;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("If you run on Linux with no/unsupported PA version, PulseAudio "
+      "7should not be able to set.\n");
+  TEST_LOG("If you run on Linux with supported PA version running, PulseAudio"
+      " should be able to set.\n");
+  TEST_LOG("Verify that this is the case.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioLinuxPulse)
+  {
+    TEST_LOG("\nPulseAudio was set\n");
+  }
+  else
+  {
+    TEST_LOG("\nPulseAudio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  wantedLayer = kAudioLinuxAlsa;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_LOG("ALSA audio should always be able to set.\n");
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  if(givenLayer == kAudioLinuxAlsa)
+  {
+    TEST_LOG("\nALSA audio was set\n");
+  }
+  else
+  {
+    TEST_LOG("\nALSA audio was not set\n");
+  }
+
+  TEST_MUSTPASS(voe_base_->Terminate());
+#endif // defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+  // Invalid arguments should be ignored.
+  wantedLayer = (AudioLayers) 17;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  ASSERT_TRUE(givenLayer == kAudioPlatformDefault);
+  MARK();
+
+  // Basic usage
+  wantedLayer = kAudioPlatformDefault;
+  TEST_MUSTPASS(hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  TEST_MUSTPASS(givenLayer != wantedLayer);
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  wantedLayer = kAudioPlatformDefault;
+  TEST_MUSTPASS(-1 != hardware->SetAudioDeviceLayer(wantedLayer));
+  TEST_MUSTPASS(VE_ALREADY_INITED != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetAudioDeviceLayer(givenLayer));
+  MARK();
+  switch (givenLayer) {
+    case kAudioPlatformDefault:
+      // already set above
+      break;
+    case kAudioWindowsCore:
+      TEST_LOG("\nRunning kAudioWindowsCore\n");
+      break;
+    case kAudioWindowsWave:
+      TEST_LOG("\nRunning kAudioWindowsWave\n");
+      break;
+    case kAudioLinuxAlsa:
+      TEST_LOG("\nRunning kAudioLinuxAlsa\n");
+      break;
+    case kAudioLinuxPulse:
+      TEST_LOG("\nRunning kAudioLinuxPulse\n");
+      break;
+    default:
+      TEST_LOG("\nERROR: Running unknown audio layer!!\n");
+      return -1;
+  }
+  ANL();
+
+#if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+  // GetRecording/PlayoutDeviceStatus
+  TEST(Getrecording/PlayoutDeviceStatus);
+  ANL();
+  bool isRecAvailable = false;
+  bool isPlayAvailable = false;
+  TEST_MUSTPASS(hardware->GetRecordingDeviceStatus(isRecAvailable));
+  TEST_MUSTPASS(!isRecAvailable);
+  MARK();
+  TEST_MUSTPASS(hardware->GetPlayoutDeviceStatus(isPlayAvailable));
+  TEST_MUSTPASS(!isPlayAvailable);
+  MARK();
+
+  ANL();
+
+  int nRec = 0, nPlay = 0;
+  char devName[128];
+  char guidName[128];
+  int idx;
+
+  TEST_MUSTPASS(hardware->GetNumOfPlayoutDevices(nPlay));
+
+  // GetPlayoutDeviceName
+  TEST(GetPlayoutDeviceName);
+  ANL();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(nPlay, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(-2, devName, guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(nPlay+1, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetPlayoutDeviceName(0, NULL, guidName));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetPlayoutDeviceName(0, devName, NULL));
+
+  // default tests
+  for (idx = 0; idx < nPlay; idx++) {
+    TEST_MUSTPASS(hardware->GetPlayoutDeviceName(idx, devName, guidName));
+    MARK();
+    TEST_MUSTPASS(hardware->SetPlayoutDevice(idx));
+  }
+
+  ANL();
+
+  TEST_MUSTPASS(hardware->GetNumOfRecordingDevices(nRec));
+
+  // GetRecordingDeviceName
+  TEST(GetRecordingDeviceName);
+  ANL();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(nRec, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(-2, devName, guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(nRec+1, devName,
+          guidName));
+  TEST_MUSTPASS(VE_CANNOT_RETRIEVE_DEVICE_NAME != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(-1 != hardware->GetRecordingDeviceName(0, NULL, guidName));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(hardware->GetRecordingDeviceName(0, devName, NULL));
+
+  // default tests
+  for (idx = 0; idx < nRec; idx++) {
+    TEST_MUSTPASS(hardware->GetRecordingDeviceName(idx, devName, guidName));
+    MARK();
+    TEST_MUSTPASS(hardware->SetRecordingDevice(idx));
+  }
+  ANL();
+
+    // // SetRecordingDevice
+  TEST(SetRecordingDevice);
+  ANL();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  MARK();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0, kStereoLeft));
+  MARK();
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0, kStereoRight));
+  MARK();
+  ANL();
+
+  // SetPlayoutDevice
+  TEST(SetPlayoutDevice);
+  ANL();
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1)); MARK();
+#else
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+  MARK();
+#endif
+  ANL();
+#endif // #if !defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID)
+#if defined(MAC_IPHONE)
+  TEST(ResetSoundDevice); ANL();
+
+  for (int p=0; p<=60; p+=20)
+  {
+    TEST_LOG("Resetting sound device several times with pause %d ms\n", p);
+    for (int l=0; l<50; ++l)
+    {
+      TEST_MUSTPASS(hardware->ResetAudioDevice()); MARK();
+      SLEEP(p);
+    }
+    ANL();
+  }
+
+  TEST_LOG("Start streaming - verify the audio after each batch of resets \n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0,8000));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(2000);
+
+  SLEEP(2000);
+  for (int p=0; p<=60; p+=20)
+  {
+    TEST_LOG("Resetting sound device several time with pause %d ms\n", p);
+    for (int l=0; l<20; ++l)
+    {
+      TEST_MUSTPASS(hardware->ResetAudioDevice()); MARK();
+      SLEEP(p);
+    }
+    ANL();
+    SLEEP(2000);
+  }
+
+  TEST_LOG("Stop streaming \n");
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+#endif // defined(MAC_IPHONE))
+#ifdef MAC_IPHONE
+  TEST_LOG("\nNOTE: Always run hardware tests also without extended tests "
+      "enabled,\nsince the extended tests are pre-streaming tests only.\n");
+#endif
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestNetEqStats
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestNetEqStats() {
+  PrepareTest("NetEqStats (!EMPTY!)");
+
+  AOK();
+  ANL();
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestNetwork
+//
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestNetwork() {
+  PrepareTest("Network");
+
+#ifdef WEBRTC_ANDROID
+  int sleepTime = 200;
+  int sleepTime2 = 250;
+#elif defined(MAC_IPHONE) // MAC_IPHONE needs more delay for getSourceInfo()
+  int sleepTime = 150;
+  int sleepTime2 = 200;
+#else
+  int sleepTime = 100;
+  int sleepTime2 = 200;
+#endif
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoENetwork* netw = _mgr.NetworkPtr();
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile((output_path +
+              "VoENetwork_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+
+  // ------------------------------------------------------------------------
+  // >> GetLocalIP
+  //
+  // State: VE initialized, no existing channels
+  TEST(GetLocalIP);
+  ANL();
+
+#ifdef MAC_IPHONE
+  // Should fail
+  TEST_MUSTPASS(!netw->GetLocalIP(NULL, 0)); MARK();
+  TEST_ERROR(VE_FUNC_NOT_SUPPORTED);
+
+  ANL();
+  printf("NOTE: Local IP must be set in source code (line %d) \n",
+      __LINE__ + 1);
+  const char* localIP = "192.168.1.4";
+
+#else
+  // Must be big enough so that we can print an IPv6 address.
+  char localIP[256] = {0};
+
+  // invalid parameter
+  TEST_MUSTPASS(!netw->GetLocalIP(NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // default function calls (IPv4)
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+  TEST_LOG("[local IPv4: %s]\n", localIP);
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+
+#if !defined(WEBRTC_MAC) && !defined(WEBRTC_ANDROID)
+  // default function calls (IPv6)
+  TEST_MUSTPASS(netw->GetLocalIP(localIP, true));
+  MARK();
+  TEST_LOG("[local IPv6: %s]\n", localIP);
+  TEST_MUSTPASS(netw->GetLocalIP(localIP, true));
+  MARK();
+#endif
+
+  // one last call to ensure that local
+  TEST_MUSTPASS(netw->GetLocalIP(localIP));
+  MARK();
+#endif
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetLocalIP
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSourceInfo
+  //
+  // - VE initialized
+  // - no existing channels
+  TEST(GetSourceInfo);
+  ANL();
+
+  int rtpPort(0);
+  int rtcpPort(0);
+  char ipaddr[64] = { 0 };
+  ExtendedTestTransport* ptrTransport(NULL);
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // NULL as input string
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // call when external transport is enabled
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  delete ptrTransport;
+
+  // call when external transport is disabled (no packet received yet)
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+  // send and receive packets with default settings for a while
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime2); // does not guarantee RTCP
+
+  // verify remote parameters (exclude RTCP)
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // ensure that valid results are maintained after StopListen
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // verify that results are maintained after new call to SetLocalReceiver
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: not listening, not sending
+  // send and receive packets with other settings for a while
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // STATE: listening, sending
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9005);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // restart sending to and from local IP
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9005);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0); // should not be "127.0.0.1"
+
+  // use non-default source port in outgoing packets
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 9005));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1", 9010));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: listening and sending using an extra local socket
+
+  // stop/start sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify that the unique source port is maintained for the extra socket
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // set new source port for outgoing packets (9010 -> 9020)
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 9005, "127.0.0.1", 9020));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+#ifdef MAC_IPHONE
+  SLEEP(500); // Need extra pause for some reason
+#endif
+
+  // verify that the unique source port is set for the new extra socket
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 9020);
+  // STATE: listening and sending using an extra local socket
+
+  // remove extra send socket and restart call again
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // delete channel => destroys the
+  // extra socket
+  TEST_MUSTPASS(voe_base_->CreateChannel()); // new channel uses one socket only
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000)); // use new port as well
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  // verify that remote info is correct
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 8000);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // STATE: listening and sending using shared socket
+
+  // use non-default source port in outgoing packets to create extra send
+  // socket
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 7000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 7000, "127.0.0.1", 7010));
+  // RTP src is 7010 => RTCP src = 7011
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  // verify new remote parameters
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 7010);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+
+  // check RTCP port as well (should be 7010 + 1 = 7011)
+  Sleep(8000, true);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 7010);
+  TEST_MUSTPASS(rtcpPort != 7011);
+  TEST_MUSTPASS(strcmp(ipaddr, "127.0.0.1") != 0);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetSourceInfo
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetExternalTransport
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  //
+  TEST(SetExternalTransport);
+  ANL();
+
+  ptrTransport = new ExtendedTestTransport(netw);
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // different valid call combinations
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK(); // must deregister first
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+
+  // STATE: external transport is disabled
+
+  // initialize sending and ensure that external transport can't be enabled
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 1234, "127.0.0.2"));
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_ERROR(VE_SEND_SOCKETS_CONFLICT);
+
+  // restart channel to ensure that "initialized sender" state is cleared
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // initialize receiving and ensure that external transport can't be enabled
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 5678));
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_ERROR(VE_RECEIVE_SOCKETS_CONFLICT);
+
+  // restart channel to ensure that "initialized receiver" state is cleared
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // enable external transport and verify that "emulated loopback" works
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartSend(0)); // should only start recording
+  TEST_MUSTPASS(!netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK(); // should fail
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  Play(0, 2000, true, true); // play file as mic and verify loopback audio
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+
+  // STATE: external transport is disabled
+#if defined(WEBRTC_ANDROID) || defined(MAC_IPHONE)
+  int testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  int testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+
+  // check all APIs that should fail when external transport is enabled
+  int DSCP, priority, serviceType, overrideDSCP, nBytes(0);
+  bool useSetSockopt, enabled;
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  MARK();
+  TEST_MUSTPASS(!voe_base_->SetLocalReceiver(0, 12345));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->GetLocalReceiver(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!voe_base_->GetSendDestination(0, rtpPort, ipaddr, rtpPort,
+          rtcpPort));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->EnableIPv6(0))
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != false)
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 12346));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+
+  // modified i VoE 3.4 (can be called also for external transport)
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+#if (!defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)) || \
+      defined(WEBRTC_EXTERNAL_TRANSPORT)
+  testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0));
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(!netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_ERROR(testError);
+#if !defined(_WIN32) || defined(WEBRTC_EXTERNAL_TRANSPORT)
+  testError = VE_FUNC_NOT_SUPPORTED;
+#else
+  testError = VE_EXTERNAL_TRANSPORT_ENABLED;
+#endif
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, false, 0));
+  TEST_ERROR(testError);
+  TEST_MUSTPASS(!netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_ERROR(testError);
+  char dummy[1] = { 'a' };
+  TEST_MUSTPASS(!netw->SendUDPPacket(0, dummy, 1, nBytes));
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+
+  // always disable external transport before deleting the Transport object;
+  // will lead to crash for RTCP transmission otherwise
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  MARK();
+  delete ptrTransport;
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetExternalTransport
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> EnableIPv6
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  // - NOTE: set _ENABLE_IPV6_TESTS_ to include these tests
+  // - http://www.microsoft.com/resources/documentation/windows/xp/all/
+  //   proddocs/en-us/sag_ip_v6_pro_rt_enable.mspx?mfr=true
+  // >> ipv6 install
+  // >> ipv6 [-v] if [IfIndex]
+  // >> ping6 ::1
+  // >> ping6 fe80::1
+
+#ifdef _ENABLE_IPV6_TESTS_
+
+  TEST(EnableIPv6); ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // call with enabled external transport
+  ptrTransport = new ExtendedTestTransport(netw);
+  TEST_MUSTPASS(netw->RegisterExternalTransport(0, *ptrTransport));
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK();
+  TEST_ERROR(VE_EXTERNAL_TRANSPORT_ENABLED);
+  TEST_MUSTPASS(netw->DeRegisterExternalTransport(0));
+  delete ptrTransport;
+
+  // Test "locking" to IPv4
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0)); MARK(); // After this call we cannot
+  // enable IPv6
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK(); // Should fail
+
+  // Check that IPv6 address is invalid
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 8000, "::1")); MARK(); // fail
+
+  // New channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // valid default call
+  TEST_MUSTPASS(netw->EnableIPv6(0)); MARK();
+  TEST_MUSTPASS(netw->GetLocalIP(localIP)); MARK(); // should still read IPv4
+  TEST_LOG("[local IPv4: %s]", localIP);
+
+  // ensure that Ipv6 is enabled
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != true);
+
+  // check that IPv4 address is invalid
+  TEST_MUSTPASS(!voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_ERROR(VE_INVALID_IP_ADDRESS);
+
+  // verify usage of IPv6 loopback address
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  // IPv6 loopback address is 0:0:0:0:0:0:0:1
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "::1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(!netw->EnableIPv6(0)); MARK(); // Should fail
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true);
+  ANL();
+
+  // Restart channel
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST_MUSTPASS(netw->EnableIPv6(0)); MARK();
+  // ensure that Ipv6 is enabled
+  TEST_MUSTPASS(netw->IPv6IsEnabled(0) != true);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  // IPv6 loopback address is 0:0:0:0:0:0:0:1
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "::1"));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(), true,
+      true);
+  SLEEP(500); // ensure that we receieve some packets
+
+  // SetSourceFilter and GetSourceFilter
+  TEST(SetSourceFilter and GetSourceFilter for IPv6); ANL();
+  char sourceIp[64] =
+  { 0};
+  char filterIp[64] =
+  { 0};
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, sourceIp));
+  TEST_LOG("Source port: %d \n", rtpPort);
+  TEST_LOG("Source RTCP port: %d \n", rtcpPort);
+  TEST_LOG("Source IP: %s \n", sourceIp);
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_LOG("Filter port RTP: %d \n", rtpPort);
+  TEST_LOG("Filter port RTCP: %d \n", rtcpPort);
+  TEST_LOG("Filter IP: %s \n", filterIp);
+  TEST_MUSTPASS(0 != rtpPort);
+  TEST_MUSTPASS(0 != rtcpPort);
+  TEST_MUSTPASS(filterIp[0] != '\0');
+  TEST_LOG("Set filter IP to %s => should hear audio\n", sourceIp);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, sourceIp));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(0 != rtpPort);
+  TEST_MUSTPASS(0 != rtcpPort);
+  TEST_MUSTPASS(_stricmp(filterIp, sourceIp));
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::0"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::"));
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(filterIp[0] != '\0');
+  SLEEP(1500);
+  TEST_LOG("Set filter IP to ::10:10:10 => should *not* hear audio\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::10:10:10"));
+  SLEEP(1500);
+  TEST_LOG("Disable IP filter => should hear audio again\n");
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "::"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, filterIp));
+  TEST_MUSTPASS(_stricmp(filterIp, "::"));
+  SLEEP(1500);
+
+  file->StopPlayingFileAsMicrophone(0);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+#endif // #ifdef _ENABLE_IPV6_TESTS_
+  // >> end of EnableIPv6
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSourceFilter
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(SetSourceFilter);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid parameters
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 65536));
+  MARK();
+  TEST_ERROR(VE_INVALID_PORT_NMBR);
+  TEST_MUSTPASS(!netw->SetSourceFilter(0, 12345, 12346, "300.300.300.300"));
+  MARK();
+  TEST_ERROR(VE_INVALID_IP_ADDRESS);
+
+  // STATE: RTP filter port is 12345, RTCP filter port is 12346
+
+  // disable all filters and ensure that media is received
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2000);
+  TEST_MUSTPASS(rtcpPort != 2001);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  // clear states and restart loopback session
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // clear source info state
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // set RTP filter to port 2002 and verify that source 2000 is blocked
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 2002, 0, NULL));;
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // ensure that received packets originates from 2002 and that they now pass
+  // the filter
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // RTP source is 2002
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2002, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2002, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2002);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  // clear states and restart loopback session
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0)); // clear source info state
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // set IP filter to local IP and verify that default loopback stream is
+  // blocked
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, localIP));;
+  MARK();
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // ensure that received packets originates from the local IP and that they
+  // now pass the filter
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // should pass the filter
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 2000, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 2000, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  TEST_MUSTPASS(netw->GetSourceInfo(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 2000);
+  TEST_MUSTPASS(strcmp(ipaddr, localIP) != 0);
+
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  // STATE: no active media, IP filter is active
+
+  // disable all filters
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));;
+  MARK();
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetSourceFilter
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> GetSourceFilter
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(GetSourceFilter);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid input parameters
+  TEST_MUSTPASS(!netw->GetSourceFilter(0, rtpPort, rtcpPort, NULL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // valid call without any filter set
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  // STATE: no active media and no enabled filters
+
+  // set different filters and verify that they "bite"
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 54321, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 54321);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtpPort != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 15425, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtcpPort != 15425);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(rtcpPort != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "192.168.199.19"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "192.168.199.19") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, "0.0.0.0"));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "0.0.0.0") != 0);
+  TEST_MUSTPASS(netw->SetSourceFilter(0, 0, 0, NULL));
+  TEST_MUSTPASS(netw->GetSourceFilter(0, rtpPort, rtcpPort, ipaddr));
+  MARK();
+  TEST_MUSTPASS(strcmp(ipaddr, "") != 0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of GetSourceFilter
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> RegisterDeadOrAliveObserver
+  // >> DeRegisterDeadOrAliveObserver
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(RegisterDeadOrAliveObserver);
+  ANL();
+  TEST(DeRegisterDeadOrAliveObserver);
+  ANL();
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_MUSTPASS(!netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK(); // already registered
+  TEST_ERROR(VE_INVALID_OPERATION);
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK(); // OK to do it again
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  // STATE: dead-or-alive observer is disabled
+
+  // >> end of RegisterDeadOrAliveObserver
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetPeriodicDeadOrAliveStatus
+  // >> GetPeriodicDeadOrAliveStatus
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+
+  // call without valid channel
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, false));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // Invalid paramters
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(0, true, 151));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPeriodicDeadOrAliveStatus(1, true, 10));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  int sampleTime(0);
+
+  // Valid parameters
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(sampleTime != 1);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 150));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(sampleTime != 150);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPeriodicDeadOrAliveStatus(0, enabled, sampleTime));
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(sampleTime != 150); // ensure last set time isnt modified
+
+  StartMedia(0, 2000, true, true, true);
+
+  // STATE: full duplex media is active
+
+  // test the dead-or-alive mechanism
+  TEST_MUSTPASS(netw->RegisterDeadOrAliveObserver(0, *this));
+  MARK();
+  TEST_LOG("\nVerify that Alive callbacks are received (dT=2sec): ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 2));
+  SLEEP(6000);
+  TEST_LOG("\nChange dT to 1 second: ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 1));
+  SLEEP(6000);
+  TEST_LOG("\nDisable dead-or-alive callbacks: ");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  SLEEP(6000);
+  TEST_LOG("\nStop sending and enable callbacks again.\n");
+  TEST_LOG("Verify that Dead callbacks are received (dT=2sec): ");
+  fflush(NULL);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, true, 2));
+  SLEEP(6000);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_LOG("\nRestart sending.\n");
+  TEST_LOG("Verify that Alive callbacks are received again (dT=2sec): ");
+  fflush(NULL);
+  SLEEP(6000);
+  TEST_LOG("\nDisable dead-or-alive callbacks.");
+  fflush(NULL);
+  TEST_MUSTPASS(netw->SetPeriodicDeadOrAliveStatus(0, false));
+  TEST_MUSTPASS(netw->DeRegisterDeadOrAliveObserver(0));
+  MARK();
+
+  StopMedia(0);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetPeriodicDeadOrAliveStatus
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetPacketTimeoutNotification
+  // >> GetPacketTimeoutNotification
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  // - NOTE: dynamic tests are performed in standard test
+
+  int timeOut(0);
+
+  TEST(SetPacketTimeoutNotification);
+  ANL();
+  TEST(GetPacketTimeoutNotification);
+  ANL();
+
+  // call without existing valid channel
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // invalid function calls
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetPacketTimeoutNotification(0, true, 151));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // valid function calls (no active media)
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 2));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 2);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 10));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 10);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, true, 2));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(timeOut != 2);
+  TEST_MUSTPASS(netw->SetPacketTimeoutNotification(0, false));
+  MARK();
+  TEST_MUSTPASS(netw->GetPacketTimeoutNotification(0, enabled, timeOut));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL();
+  AOK();
+  ANL();
+  ANL();
+
+  // >> end of SetPacketTimeoutNotification
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SendUDPPacket
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+
+
+  // >> end of SendUDPPacket
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendTOS
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  TEST(SetSendTOS);
+  ANL();
+#if defined(_WIN32) || defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+
+  // call without existing valid channel
+
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // trivial invalid function calls
+  TEST_MUSTPASS(!netw->SetSendTOS(0, -1)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 64)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -2)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, 8)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1)); MARK();
+  TEST_ERROR(VE_SOCKET_ERROR); // must create sockets first
+
+#ifdef _WIN32
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 3000));
+
+  // enable ToS using SetSockopt (should work without local binding)
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, -1, true)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 1);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // try to disable SetSockopt while ToS is enabled (should fail)
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_ERROR(VE_TOS_INVALID); // must disable ToS before enabling SetSockopt
+
+  // disable ToS to be able to stop using SetSockopt
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, -1, true)); MARK(); // disable ToS
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 0);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // to use the "non-SetSockopt" method, local binding is required,
+  // trying without it should fail
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_ERROR(VE_TOS_ERROR); // must bind to local IP first
+
+  // bind to local IP and try again (should work this time)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault, localIP));
+  TEST_LOG("\nThis test needs to be run as administrator\n");
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, -1, false)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 1);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != false);
+
+  // STATE: binded to local IP, local port is 12345 and DSCP is 1 (not using
+  // SetSockopt)
+
+  // verify loopback audio with the current settings
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP at the "
+      "remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each DSCP below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+  TEST_LOG("  DSCP is set to 0x%02x\n", 1);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 2));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 63));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+
+  // stop and resume sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x\n", DSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0));
+#endif // _SEND_TO_REMOTE_IP_
+  // Windows priority tests (priority cannot be set using setsockopt on Win)
+  TEST_LOG("Testing priority\n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0, 3, true)); // Should fail
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, 3, false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, 3, false));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#endif // _WIN32
+  // STATE: no media, disabled ToS, no defined receiver
+
+  // Repeat tests above but using setsockopt() this time.
+  // Binding to local IP should not be required.
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 10, -1, true)); MARK();
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt)); MARK();
+  TEST_MUSTPASS(DSCP != 10);
+  TEST_MUSTPASS(priority != 0);
+  TEST_MUSTPASS(useSetSockopt != true);
+
+  // STATE: *not* binded to local IP, local port is 12345 and DSCP is 10
+  // (using SetSockopt)
+
+  // verify loopback audio with the current settings
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP at the"
+      " remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each DSCP below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 20, -1, true)); // use setsockopt()
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // Change the DSCP while sending is active and verify on remote side.
+  TEST_MUSTPASS(netw->SetSendTOS(0, 61, -1, true)); // use setsockopt()
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+
+  // stop and resume sending
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(netw->GetSendTOS(0, DSCP, priority, useSetSockopt));
+  TEST_LOG("  DSCP is set to 0x%02x (setsockopt)\n", DSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, -1, true));
+#endif // _SEND_TO_REMOTE_IP_
+#if defined(WEBRTC_LINUX)
+  // Linux priority tests (using setsockopt)
+  TEST_LOG("Testing priority\n");
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 0, 3, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(netw->SetSendTOS(0, 1, 3, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  Play(0, 2000, true, true); // file should be played out here...
+#endif // #if defined(WEBRTC_LINUX)
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX)
+  // Fail tests for other than Wind and Linux
+  TEST_MUSTPASS(!netw->SetSendTOS(0, 0, 3, false)); // Should fail
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+#endif // #if !defined(_WIN32) && !defined(WEBRTC_LINUX)
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL(); AOK(); ANL(); ANL();
+
+  // END #if defined(_WIN32) || defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#else
+  TEST_LOG("Skipping ToS tests -  _WIN32, LINUX, MAC is not defined or "
+    "WEBRTC_ANDROID is defined");
+#endif
+
+  // >> end of SetSendTOS
+  // ------------------------------------------------------------------------
+
+  // ------------------------------------------------------------------------
+  // >> SetSendGQoS (Windows only)
+  //
+  // - VE initialized
+  // - no existing channels
+  // - no media
+  //
+  // From qos.h:
+  //
+  //  #define SERVICETYPE_NOTRAFFIC               0x00000000
+  //  #define SERVICETYPE_BESTEFFORT              0x00000001 (*)
+  //  #define SERVICETYPE_CONTROLLEDLOAD          0x00000002 (*)
+  //  #define SERVICETYPE_GUARANTEED              0x00000003 (*)
+  //  #define SERVICETYPE_NETWORK_UNAVAILABLE     0x00000004
+  //  #define SERVICETYPE_GENERAL_INFORMATION     0x00000005
+  //  #define SERVICETYPE_NOCHANGE                0x00000006
+  //  #define SERVICETYPE_NONCONFORMING           0x00000009
+  //  #define SERVICETYPE_NETWORK_CONTROL         0x0000000A
+  //  #define SERVICETYPE_QUALITATIVE             0x0000000D (*)
+  //
+  //  #define SERVICE_BESTEFFORT                  0x80010000
+  //  #define SERVICE_CONTROLLEDLOAD              0x80020000
+  //  #define SERVICE_GUARANTEED                  0x80040000
+  //  #define SERVICE_QUALITATIVE                 0x80200000
+  //
+  //  (*) supported in WEBRTC VoE
+  TEST(SetSendGQoS);
+  ANL();
+#ifdef _WIN32
+
+  // call without existing valid channel
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, false, 0)); MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+
+  // supported service type but no sockets
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_SOCKETS_NOT_INITED);
+
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+
+  // supported service type but sender is not initialized
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_DESTINATION_NOT_INITED);
+
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+
+  // invalid service types
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NOTRAFFIC)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NETWORK_UNAVAILABLE));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_GENERAL_INFORMATION));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NOCHANGE)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NONCONFORMING));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_NETWORK_CONTROL));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_BESTEFFORT)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_CONTROLLEDLOAD)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_GUARANTEED)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICE_QUALITATIVE)); MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+
+  // Is ToS enabled here?
+
+  // Settings which don't require binding to local IP
+
+  // set SERVICETYPE_BESTEFFORT
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_BESTEFFORT);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_CONTROLLEDLOAD
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD));
+  MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_CONTROLLEDLOAD);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_GUARANTEED
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_GUARANTEED);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // set SERVICETYPE_QUALITATIVE
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != true);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_QUALITATIVE);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // disable GQoS
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0)); MARK();
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  MARK();
+  TEST_MUSTPASS(enabled != false);
+  TEST_MUSTPASS(serviceType != SERVICETYPE_QUALITATIVE);
+  TEST_MUSTPASS(overrideDSCP != false);
+
+  // STATE: diabled QGoS, sockets exists, sending side is initialized, no media
+
+  // Loopback tests using the four different GQoS settings
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT)); MARK();
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  ANL();
+  TEST_LOG("[SERVICETYPE_BESTEFFORT]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_CONTROLLEDLOAD]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_GUARANTEED]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE)); MARK();
+  ANL();
+  TEST_LOG("[SERVICETYPE_QUALITATIVE]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Send to remote destination and verify the DSCP mapping using Wireshark.
+  // Use filter ip.src == "RemoteIP".
+
+  // Modify the send destination on the fly
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP mapping at"
+      " the remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each GQoS setting below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT (0x%02x), should "
+      "be mapped to DSCP = 0x00\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_CONTROLLEDLOAD));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_CONTROLLEDLOAD (0x%02x), "
+      "should be mapped to DSCP = 0x18\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_LOG("  QoS is disabled, should give DSCP = 0x%02x\n", 0);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_GUARANTEED));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_GUARANTEED (0x%02x), should "
+      "be mapped to DSCP = 0x28\n", serviceType);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_QUALITATIVE));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_QUALITATIVE (0x%02x), should"
+      " be mapped to DSCP = 0x00\n", serviceType);
+  SLEEP(100);
+#endif // _SEND_TO_REMOTE_IP_
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  // STATE: sockets exists, sending side is initialized, no media
+
+  // Repeat test above but this time using overrideDSCP.
+
+  // Some initial loopack tests.
+  // NOTE - override DSCP requres binding to local IP.
+
+  // should not work since QoS is enabled
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+  TEST_ERROR(VE_TOS_GQOS_CONFLICT);
+
+  // disble QoS and try to override again (should fail again since local
+  // binding is not done yet)
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_MUSTPASS(!netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+  TEST_ERROR(VE_GQOS_ERROR);
+
+  // make proper settings and try again (should work this time)
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345, kVoEDefault, localIP));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, localIP));
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 3));
+  MARK();
+
+  // Now, let's try some loopback tests using override DSCP
+
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  ANL();
+  TEST_LOG("[overrideDSCP=3]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 17));
+  MARK();
+  ANL();
+  TEST_LOG("[overrideDSCP=17]");
+  Play(0, 2000, true, true); // file should be played out here...
+
+  // And finally, send to remote side as well to verify that the new mapping
+  // works as it should.
+
+#ifdef _SEND_TO_REMOTE_IP_
+  // Modify the send destination on the fly
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, RemotePort, RemoteIP));
+
+  TEST_LOG("\nUse Wireshark and verify a correctly received DSCP mapping at"
+      " the remote side!\n");
+  TEST_LOG("Sending approx. 5 packets to %s:%d for each GQoS setting below:\n",
+      RemoteIP, RemotePort);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 18));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 62));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 32));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, true, SERVICETYPE_BESTEFFORT, 1));
+  TEST_MUSTPASS(netw->GetSendGQoS(0, enabled, serviceType, overrideDSCP));
+  TEST_LOG("  serviceType is set to SERVICETYPE_BESTEFFORT, should be "
+      "overrided to DSCP = 0x%02x\n", overrideDSCP);
+  SLEEP(100);
+  TEST_MUSTPASS(netw->SetSendGQoS(0, false, 0));
+  TEST_LOG("  QoS is disabled, should give DSCP = 0x%02x\n", 0);
+  SLEEP(100);
+#endif // _SEND_TO_REMOTE_IP_
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  ANL(); AOK(); ANL(); ANL();
+
+#else
+  TEST_LOG("Skipping GQoS tests - _WIN32 is not defined");
+#endif  // #ifdef _WIN32
+  // >> end of SetSendGQoS
+  // ------------------------------------------------------------------------
+
+    if (file) {
+    file->StopPlayingFileAsMicrophone(0);
+  }
+  voe_base_->StopSend(0);
+  voe_base_->StopPlayout(0);
+  voe_base_->StopReceive(0);
+  voe_base_->DeleteChannel(0);
+  voe_base_->Terminate();
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestRTP_RTCP
+// ----------------------------------------------------------------------------
+
+// Used to validate packets during the RTP audio level indication test.
+class RTPAudioTransport: public Transport {
+public:
+
+  RTPAudioTransport() :
+    mute_(false) {
+  }
+
+  virtual ~RTPAudioTransport() {
+  }
+
+  void set_mute(bool mute) {
+    mute_ = mute;
+  }
+  bool mute() const {
+    return mute_;
+  }
+
+  // TODO(andrew): use proper error checks here rather than asserts.
+  virtual int SendPacket(int channel, const void* data, int length) {
+    const uint8_t* packet = static_cast<const uint8_t*> (data);
+
+    // Extension bit.
+    assert(packet[0] & 0x10);
+    int index = 12; // Assume standard RTP header.
+    // Header extension ID
+    assert(packet[index++] == 0xBE);
+    assert(packet[index++] == 0xDE);
+    // Header extension length
+    assert(packet[index++] == 0x00);
+    assert(packet[index++] == 0x01);
+
+    // User-defined ID.
+    assert(((packet[index] & 0xf0) >> 4) == 1);
+    // Length
+    assert((packet[index++] & 0x0f) == 0);
+
+    int vad = packet[index] >> 7;
+    int level = packet[index] & 0x7f;
+    if (channel == 0) {
+      printf("%d    -%d\n", vad, level);
+    } else if (channel == 1) {
+      printf("             %d    -%d\n", vad, level);
+    } else {
+      assert(false);
+    }
+
+    if (mute_) {
+      assert(vad == 0);
+      assert(level == 127);
+    } else {
+      assert(vad == 0 || vad == 1);
+      assert(level >= 0 && level <= 127);
+    }
+
+    return 0;
+  }
+
+  virtual int SendRTCPPacket(int /*channel*/, const void* /*data*/,
+                             int /*length*/) {
+    return 0;
+  }
+
+private:
+  bool mute_;
+};
+
+int VoEExtendedTest::TestRTP_RTCP() {
+  PrepareTest("RTP_RTCP");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoERTP_RTCP* rtp_rtcp = _mgr.RTP_RTCPPtr();
+  VoENetwork* network = _mgr.NetworkPtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+  VoECodec* codec = _mgr.CodecPtr();
+
+  XRTPObserver rtpObserver;
+
+#ifdef WEBRTC_ANDROID
+  int sleepTime = 200;
+#else
+  int sleepTime = 100;
+#endif
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile((output_path +
+              "VoERTP_RTCP_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+          kTraceStateInfo |
+          kTraceWarning |
+          kTraceError |
+          kTraceCritical |
+          kTraceApiCall |
+          kTraceMemory |
+          kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  // ------------------------------------------------------------------------
+  // >> Set/GetRTPAudioLevelIndicationStatus
+  TEST(SetRTPAudioLevelIndicationStatus);
+  ANL();
+  TEST(GetRTPAudioLevelIndicationStatus);
+
+  // test invalid input parameters
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, 0));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, 15));
+  MARK();
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, false, 15));
+  MARK();
+  TEST_MUSTPASS(-1 != rtp_rtcp->SetRTPAudioLevelIndicationStatus(1, true, 5));
+  MARK();
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+
+  // test complete valid input range [1,14]
+  bool audioLevelEnabled(false);
+  unsigned char ID(0);
+  for (int id = 1; id < 15; id++) {
+    TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true, id));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->GetRTPAudioLevelIndicationStatus(
+            0, audioLevelEnabled, ID));
+    MARK();
+    TEST_MUSTPASS(audioLevelEnabled != true);
+    TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, false, id));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->GetRTPAudioLevelIndicationStatus(
+            0, audioLevelEnabled, ID));
+    MARK();
+    TEST_MUSTPASS(audioLevelEnabled != false);
+    TEST_MUSTPASS(ID != id);
+  }
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  RTPAudioTransport rtpAudioTransport;
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(network->RegisterExternalTransport(0, rtpAudioTransport));
+  TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(0, true));
+  TEST_MUSTPASS(codec->SetVADStatus(0, true));
+
+  printf("\n\nReceving muted packets (expect VAD = 0, Level = -127)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  rtpAudioTransport.set_mute(true);
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  rtpAudioTransport.set_mute(false);
+  TEST_MUSTPASS(volume->SetInputMute(0, false));
+
+  printf("\nReceiving packets from mic (should respond to mic level)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  printf("\nReceiving packets from file (expect mostly VAD = 1)...\n");
+  printf("VAD  Level [dbFS]\n");
+  SLEEP(2000);
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+
+  printf("\nMuted and mic on independent channels...\n");
+  printf("Muted        Mic\n");
+  SLEEP(2000);
+  ASSERT_TRUE(1 == voe_base_->CreateChannel());
+  TEST_MUSTPASS(network->RegisterExternalTransport(1, rtpAudioTransport));
+  TEST_MUSTPASS(rtp_rtcp->SetRTPAudioLevelIndicationStatus(1, true));
+  TEST_MUSTPASS(codec->SetVADStatus(1, true));
+  TEST_MUSTPASS(volume->SetInputMute(0, true));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(1));
+  SLEEP(5000);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopSend(1));
+
+  TEST_MUSTPASS(network->DeRegisterExternalTransport(0));
+  TEST_MUSTPASS(network->DeRegisterExternalTransport(1));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(1));
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+
+  MARK();
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> InsertExtraRTPPacket
+
+  int i(0);
+
+  TEST(SetLocalSSRC);
+  TEST_MUSTPASS(!rtp_rtcp->SetLocalSSRC(0, 5678));
+  MARK();
+  TEST_MUSTPASS(VE_ALREADY_SENDING != voe_base_->LastError());
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SetLocalSSRC(0, 5678)); // force send SSRC to 5678
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  ANL();
+
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  // ------------------------------------------------------------------------
+  // >> InsertExtraRTPPacket
+  TEST(InsertExtraRTPPacket);
+  ANL();
+
+  const char payloadData[8] = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H' };
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(-1, 0, false,
+          payloadData, 8));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, -1, false,
+          payloadData, 8));
+  MARK(); // invalid payload type
+  TEST_ERROR(VE_INVALID_PLTYPE);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 128, false,
+          payloadData, 8));
+  MARK(); // invalid payload type
+  TEST_ERROR(VE_INVALID_PLTYPE);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          NULL, 8));
+    MARK(); // invalid pointer
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          payloadData, 1500-28+1));
+  MARK(); // invalid size
+  TEST_ERROR(VE_INVALID_ARGUMENT);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(-1 != rtp_rtcp->InsertExtraRTPPacket(0, 99, false,
+          payloadData, 8));
+  MARK(); // not sending
+  TEST_ERROR(VE_NOT_SENDING);
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  SLEEP(1000);
+  for (int p = 0; p < 128; p++) {
+    TEST_MUSTPASS(rtp_rtcp->InsertExtraRTPPacket(0, p, false,
+            payloadData, 8));
+    MARK();
+    TEST_MUSTPASS(rtp_rtcp->InsertExtraRTPPacket(0, p, true,
+            payloadData, 8));
+    MARK();
+  }
+
+  // Ensure we have sent all extra packets before we move forward to avoid
+  //incorrect error code
+  SLEEP(1000);
+
+  ANL();
+
+  // ------------------------------------------------------------------------
+  // >> RTP dump APIs
+  TEST(Start/StopRtpDump);
+  ANL();
+  TEST(Start/RTPDumpIsActive);
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->RTPDumpIsActive(-1, kRtpIncoming));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(false != rtp_rtcp->RTPDumpIsActive(0, kRtpIncoming));
+  MARK(); // should be off by default
+  TEST_MUSTPASS(false != rtp_rtcp->RTPDumpIsActive(0, kRtpOutgoing));
+  MARK(); // should be off by default
+
+  TEST_MUSTPASS(-1 != rtp_rtcp->StartRTPDump(-1, NULL));
+  MARK(); // invalid channel
+  TEST_ERROR(VE_CHANNEL_NOT_VALID);
+  TEST_MUSTPASS(-1 != rtp_rtcp->StartRTPDump(0, NULL));
+  MARK(); // invalid file name
+  TEST_ERROR(VE_BAD_FILE);
+
+  // Create two RTP dump files:
+
+  //  - dump_in_1sec.rtp <=> ~1 sec recording of input side
+  //  - dump_in_2sec.rtp <=> ~2 sec recording of output side
+  //
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpIncoming));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpOutgoing));
+  MARK();
+  std::string output_path = webrtc::test::OutputPath();
+  TEST_MUSTPASS(rtp_rtcp->StartRTPDump(
+      0, (output_path + "dump_in_1sec.rtp").c_str(), kRtpIncoming));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->StartRTPDump(
+      0, (output_path + "dump_out_2sec.rtp").c_str(), kRtpOutgoing));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpIncoming));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0, kRtpOutgoing));
+  MARK();
+
+  // Start/Stop tests:
+  //
+  // - only one file (called dump_in_200ms.rtp) should exist after this test
+  //
+  for (i = 0; i < 10; i++) {
+    TEST_MUSTPASS(rtp_rtcp->StartRTPDump(0,
+            (output_path + "dump_in_200ms.rtp").c_str()));
+    MARK();
+    SLEEP(200);
+    TEST_MUSTPASS(rtp_rtcp->StopRTPDump(0));
+    MARK();
+  }
+
+  // >> end of RTP dump APIs
+  // ------------------------------------------------------------------------
+  ANL();
+
+  TEST(GetRTCPStatus);
+  bool enabled;
+  TEST_MUSTPASS(!rtp_rtcp->GetRTCPStatus(-1, enabled));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  MARK(); // should be on by default
+  TEST_MUSTPASS(enabled != true);
+  ANL();
+
+  TEST(SetRTCPStatus);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  TEST_MUSTPASS(enabled != false);
+  MARK();
+  SLEEP(2000);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatus(0, enabled));
+  TEST_MUSTPASS(enabled != true);
+  MARK();
+  SLEEP(6000); // Make sure we get an RTCP packet
+  ANL();
+
+  TEST(CNAME);
+  TEST_MUSTPASS(!rtp_rtcp->SetRTCP_CNAME(0, NULL));
+  MARK();
+  TEST_MUSTPASS(VE_RTP_RTCP_MODULE_ERROR != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!rtp_rtcp->GetRemoteRTCP_CNAME(0, NULL));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  ANL();
+
+  TEST(GetRemoteSSRC);
+  unsigned int ssrc(0);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteSSRC(0, ssrc));
+  MARK();
+  TEST_MUSTPASS(ssrc != 5678);
+  ANL();
+
+  TEST(GetRemoteCSRC); // only trivial tests added
+  unsigned int csrcs[2];
+  int n(0);
+  TEST_MUSTPASS(!rtp_rtcp->GetRemoteCSRCs(1, csrcs));
+  MARK();
+  n = rtp_rtcp->GetRemoteCSRCs(0, csrcs);
+  MARK();
+  TEST_MUSTPASS(n != 0); // should be empty
+  ANL();
+
+  TEST(SetRTPObserver);
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(rtp_rtcp->RegisterRTPObserver(0, rtpObserver));
+  TEST_MUSTPASS(rtp_rtcp->DeRegisterRTPObserver(0));
+  TEST_MUSTPASS(rtp_rtcp->RegisterRTPObserver(0, rtpObserver));
+  TEST_MUSTPASS(rtp_rtcp->SetLocalSSRC(0, 7777)); // force send SSRC to 7777
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(sleepTime);
+  // verify that the new SSRC has been detected by the observer
+  TEST_MUSTPASS(rtpObserver._SSRC != 7777);
+  TEST_MUSTPASS(rtp_rtcp->DeRegisterRTPObserver(0));
+  ANL();
+
+  // Make fresh restart (ensures that SSRC is randomized)
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+
+  SLEEP(100);
+
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+          true, true));
+
+  SLEEP(8000);
+
+  TEST(GetRemoteRTCPData);
+  // Statistics based on received RTCP reports (i.e. statistics on the remote
+  // side sent to us).
+  unsigned int NTPHigh(0), NTPLow(0), timestamp(0), playoutTimestamp(0),
+      jitter(0);
+  unsigned short fractionLost(0);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh, NTPLow,
+          timestamp, playoutTimestamp));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh, NTPLow, timestamp, playoutTimestamp, jitter, fractionLost);
+
+  unsigned int NTPHigh2(0), NTPLow2(0), timestamp2(0);
+  unsigned int playoutTimestamp2(0), jitter2(0);
+  unsigned short fractionLost2(0);
+
+  TEST_LOG("take a new sample and ensure that the playout timestamp is "
+    "maintained");
+  SLEEP(100);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2, timestamp2,
+          playoutTimestamp2, &jitter2,
+          &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS(playoutTimestamp != playoutTimestamp2);
+
+  TEST_LOG("wait for 8 seconds and ensure that the RTCP statistics is"
+    " updated...");
+  SLEEP(8000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2,
+          timestamp2, playoutTimestamp2,
+          &jitter2, &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n  "
+    "  playoutTimestamp = %u \n    jitter = %u \n    fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS((NTPHigh == NTPHigh2) && (NTPLow == NTPLow2));
+  TEST_MUSTPASS(timestamp == timestamp2);
+  TEST_MUSTPASS(playoutTimestamp == playoutTimestamp2);
+
+#ifdef WEBRTC_CODEC_RED
+  //The following test is related to defect 4985 and 4986
+  TEST_LOG("Turn FEC and VAD on and wait for 4 seconds and ensure that "
+    "the jitter is still small...");
+  CodecInst cinst;
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  cinst.pltype = 104;
+  strcpy(cinst.plname, "isac");
+  cinst.plfreq = 32000;
+  cinst.pacsize = 960;
+  cinst.channels = 1;
+  cinst.rate = 45000;
+#else
+  cinst.pltype = 119;
+  strcpy(cinst.plname, "isaclc");
+  cinst.plfreq = 16000;
+  cinst.pacsize = 320;
+  cinst.channels = 1;
+  cinst.rate = 40000;
+#endif
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+  TEST_MUSTPASS(codec->SetSendCodec(0, cinst));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, true, -1));
+  MARK();
+  TEST_MUSTPASS(codec->SetVADStatus(0,true));
+  SLEEP(4000);
+  TEST_MUSTPASS(rtp_rtcp->GetRemoteRTCPData(0, NTPHigh2, NTPLow2, timestamp2,
+          playoutTimestamp2, &jitter2,
+          &fractionLost2));
+  TEST_LOG("\n    NTPHigh = %u \n    NTPLow = %u \n    timestamp = %u \n "
+    "   playoutTimestamp = %u \n    jitter = %u \n   fractionLost = %hu \n",
+    NTPHigh2, NTPLow2, timestamp2, playoutTimestamp2, jitter2, fractionLost2);
+  TEST_MUSTPASS(jitter2 > 1000)
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, false));
+  MARK();
+  //4985 and 4986 end
+#endif // #ifdef WEBRTC_CODEC_RED
+  TEST(GetRTPStatistics);
+  ANL();
+  // Statistics summarized on local side based on received RTP packets.
+  CallStatistics stats;
+  // Call GetRTPStatistics over a longer period than 7.5 seconds
+  // (=dT RTCP transmissions).
+  unsigned int averageJitterMs, maxJitterMs, discardedPackets;
+  SLEEP(1000);
+  for (i = 0; i < 8; i++) {
+    TEST_MUSTPASS(rtp_rtcp->GetRTPStatistics(0, averageJitterMs,
+            maxJitterMs,
+            discardedPackets));
+    TEST_LOG("    %i) averageJitterMs = %u \n    maxJitterMs = %u \n  "
+      "  discardedPackets = %u \n", i, averageJitterMs, maxJitterMs,
+      discardedPackets);
+    SLEEP(1000);
+  }
+
+  TEST(RTCPStatistics #1);
+  ANL();
+  unsigned int packetsSent(0);
+  unsigned int packetsReceived(0);
+  for (i = 0; i < 8; i++)
+  {
+    TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+    TEST_LOG("    %i) fractionLost = %hu \n    cumulativeLost = %u \n  "
+        "  extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+        i, stats.fractionLost, stats.cumulativeLost,
+        stats.extendedMax, stats.jitterSamples, stats.rttMs);
+    TEST_LOG( "    bytesSent = %d \n    packetsSent = %d \n   "
+        " bytesReceived = %d \n    packetsReceived = %d \n",
+        stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+        stats.packetsReceived);
+    if (i > 0)
+    {
+      TEST_LOG("    diff sent packets    : %u (~50)\n",
+               stats.packetsSent - packetsSent);
+      TEST_LOG("    diff received packets: %u (~50)\n",
+               stats.packetsReceived - packetsReceived);
+    }
+    packetsSent = stats.packetsSent;
+    packetsReceived = stats.packetsReceived;
+    SLEEP(1000);
+  }
+
+  TEST(RTCPStatistics #2);
+  ANL();
+  TEST_LOG("restart sending and ensure that the statistics is reset");
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  SLEEP(50); // ensures approx. two received packets
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n  "
+      "  extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples, stats.rttMs);
+  TEST_LOG( "    bytesSent = %d \n    packetsSent = %d \n   "
+      " bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent, stats.bytesReceived,
+      stats.packetsReceived);
+
+  TEST(RTCPStatistics #3);
+  ANL();
+  TEST_LOG("disable RTCP and verify that statistics is not corrupt");
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  SLEEP(250);
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n   "
+      " extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples, stats.rttMs);
+  TEST_LOG("    bytesSent = %d \n    packetsSent = %d \n    "
+      "bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent,
+      stats.bytesReceived, stats.packetsReceived);
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+
+  TEST(RTCPStatistics #4);
+  ANL();
+  TEST_LOG("restart receiving and check RX statistics");
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  SLEEP(50); // ensures approx. two received packets
+  TEST_MUSTPASS(rtp_rtcp->GetRTCPStatistics(0, stats));
+  TEST_LOG("\n    fractionLost = %hu \n    cumulativeLost = %u \n   "
+      " extendedMax = %u \n    jitterSamples = %u \n    rttMs = %d \n",
+      stats.fractionLost, stats.cumulativeLost,
+      stats.extendedMax, stats.jitterSamples,
+      stats.rttMs);
+  TEST_LOG("    bytesSent = %d \n    packetsSent = %d \n   "
+      " bytesReceived = %d \n    packetsReceived = %d \n",
+      stats.bytesSent, stats.packetsSent,
+      stats.bytesReceived, stats.packetsReceived);
+
+  TEST(SendApplicationDefinedRTCPPacket);
+  // just do some fail tests here
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  // should fail since sending is off
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_MUSTPASS(rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, false));
+  // should fail since RTCP is off
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  TEST_MUSTPASS(rtp_rtcp->SetRTCPStatus(0, true));
+  TEST_MUSTPASS(rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabcd", 32));
+  MARK();
+  // invalid data length
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(
+      0, 0, 0, "abcdabcdabcdabcdabcdabcdabcdabc", 31));
+  MARK();
+  // invalid data vector
+  TEST_MUSTPASS(!rtp_rtcp->SendApplicationDefinedRTCPPacket(0, 0, 0, NULL, 0));
+  MARK();
+  ANL();
+
+#ifdef WEBRTC_CODEC_RED
+  TEST(SetFECStatus);
+  ANL();
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  cinst.pltype = 126;
+  strcpy(cinst.plname, "red");
+  cinst.plfreq = 8000;
+  cinst.pacsize = 0;
+  cinst.channels = 1;
+  cinst.rate = 0;
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  cinst.pltype = 104;
+  strcpy(cinst.plname, "isac");
+  cinst.plfreq = 32000;
+  cinst.pacsize = 960;
+  cinst.channels = 1;
+  cinst.rate = 45000;
+#else
+  cinst.pltype = 119;
+  strcpy(cinst.plname, "isaclc");
+  cinst.plfreq = 16000;
+  cinst.pacsize = 320;
+  cinst.channels = 1;
+  cinst.rate = 40000;
+#endif
+  // We have to re-register the audio codec payload type as stopReceive will
+  // clean the database
+  TEST_MUSTPASS(codec->SetRecPayloadType(0, cinst));
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 8000));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 8000, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  TEST_LOG("Start playing a file as microphone again \n");
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+                                                   true, true));
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, true, 126));
+  MARK();
+  TEST_LOG("Should sound OK with FEC enabled\n");
+  SLEEP(4000);
+  TEST_MUSTPASS(rtp_rtcp->SetFECStatus(0, false));
+  MARK();
+#endif // #ifdef WEBRTC_CODEC_RED
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  ANL();
+  AOK();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestVideoSync
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestVideoSync()
+{
+  PrepareTest("VideoSync");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEVideoSync* vsync = _mgr.VideoSyncPtr();
+
+  // check if this interface is supported
+  if (!vsync)
+  {
+    TEST_LOG("VoEVideoSync is not supported!");
+    return -1;
+  }
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile((output_path +
+      "VoEVideoSync_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+                                            kTraceStateInfo |
+                                            kTraceWarning |
+                                            kTraceError |
+                                            kTraceCritical |
+                                            kTraceApiCall |
+                                            kTraceMemory |
+                                            kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+
+  ///////////////////////////
+  // Actual test starts here
+
+  TEST(SetInitTimestamp);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->SetInitTimestamp(0, 12345));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  SLEEP(1000);
+  AOK();
+  ANL();
+
+  TEST(SetInitSequenceNumber);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  MARK();
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->SetInitSequenceNumber(0, 123));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+  MARK();
+  SLEEP(1000);
+  AOK();
+  ANL();
+
+  unsigned int timeStamp;
+  TEST(GetPlayoutTimestamp);
+  ANL();
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG("GetPlayoutTimestamp: %u", timeStamp);
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG(" %u", timeStamp);
+  SLEEP(1000);
+  TEST_MUSTPASS(vsync->GetPlayoutTimestamp(0, timeStamp));
+  TEST_LOG(" %u\n", timeStamp);
+  AOK();
+  ANL();
+
+  TEST(SetMinimumPlayoutDelay);
+  ANL();
+  TEST_MUSTPASS(!vsync->SetMinimumPlayoutDelay(0, -1));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+  TEST_MUSTPASS(!vsync->SetMinimumPlayoutDelay(0, 5000));
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  MARK();
+
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  VoEExtendedTest::TestVolumeControl
+// ----------------------------------------------------------------------------
+
+int VoEExtendedTest::TestVolumeControl()
+{
+  PrepareTest("TestVolumeControl");
+
+  VoEBase* voe_base_ = _mgr.BasePtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+#ifdef _TEST_FILE_
+  VoEFile* file = _mgr.FilePtr();
+#endif
+#ifdef _TEST_HARDWARE_
+  VoEHardware* hardware = _mgr.HardwarePtr();
+#endif
+
+#ifdef _USE_EXTENDED_TRACE_
+  TEST_MUSTPASS(VoiceEngine::SetTraceFile(
+      (output_path + "VoEVolumeControl_trace.txt").c_str()));
+  TEST_MUSTPASS(VoiceEngine::SetTraceFilter(kTraceStateInfo |
+                                            kTraceStateInfo |
+                                            kTraceWarning |
+                                            kTraceError |
+                                            kTraceCritical |
+                                            kTraceApiCall |
+                                            kTraceMemory |
+                                            kTraceInfo));
+#endif
+
+  TEST_MUSTPASS(voe_base_->Init());
+  TEST_MUSTPASS(voe_base_->CreateChannel());
+#if (defined _TEST_HARDWARE_ && (!defined(MAC_IPHONE)))
+#if defined(_WIN32)
+  TEST_MUSTPASS(hardware->SetRecordingDevice(-1));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(-1));
+#else
+  TEST_MUSTPASS(hardware->SetRecordingDevice(0));
+  TEST_MUSTPASS(hardware->SetPlayoutDevice(0));
+#endif
+#endif
+  TEST_MUSTPASS(voe_base_->SetLocalReceiver(0, 12345));
+  TEST_MUSTPASS(voe_base_->SetSendDestination(0, 12345, "127.0.0.1"));
+  TEST_MUSTPASS(voe_base_->StartReceive(0));
+  TEST_MUSTPASS(voe_base_->StartPlayout(0));
+  TEST_MUSTPASS(voe_base_->StartSend(0));
+#ifdef _TEST_FILE_
+  TEST_MUSTPASS(file->StartPlayingFileAsMicrophone(0, _mgr.AudioFilename(),
+                                                   true, true));
+#endif
+
+  ////////////////////////////
+  // Actual test starts here
+
+#if !defined(MAC_IPHONE)
+  TEST(SetSpeakerVolume);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetSpeakerVolume(256));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if !defined(MAC_IPHONE)
+
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  TEST(SetMicVolume); ANL();
+  TEST_MUSTPASS(-1 != volume->SetMicVolume(256)); MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+
+#if !defined(MAC_IPHONE)
+  TEST(SetChannelOutputVolumeScaling);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetChannelOutputVolumeScaling(0, (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetChannelOutputVolumeScaling(0, (float)10.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if !defined(MAC_IPHONE)
+#if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+  TEST(SetOutputVolumePan);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)-0.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.0,
+                                                 (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(-1, (float)1.0,
+                                                 (float)1.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+
+  TEST(SetChannelOutputVolumePan);
+  ANL();
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)-0.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.1,
+                                                 (float)1.0));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.0,
+                                                 (float)-0.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  TEST_MUSTPASS(-1 != volume->SetOutputVolumePan(0, (float)1.0,
+                                                 (float)1.1));
+  MARK();
+  TEST_MUSTPASS(VE_INVALID_ARGUMENT != voe_base_->LastError());
+  ANL();
+#endif // #if (!defined(MAC_IPHONE) && !defined(WEBRTC_ANDROID))
+#ifdef _TEST_FILE_
+  TEST_MUSTPASS(file->StopPlayingFileAsMicrophone(0));
+#endif
+  TEST_MUSTPASS(voe_base_->StopSend(0));
+  TEST_MUSTPASS(voe_base_->StopPlayout(0));
+  TEST_MUSTPASS(voe_base_->StopReceive(0));
+  TEST_MUSTPASS(voe_base_->DeleteChannel(0));
+  TEST_MUSTPASS(voe_base_->Terminate());
+
+  AOK();
+  ANL();
+  return 0;
+}
+
+} //  namespace voetest
diff --git a/src/voice_engine/test/auto_test/voe_extended_test.h b/src/voice_engine/test/auto_test/voe_extended_test.h
new file mode 100644
index 0000000..3552cf3
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_extended_test.h
@@ -0,0 +1,462 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
+
+#include "voe_standard_test.h"
+#include "audio_device.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+// ----------------------------------------------------------------------------
+//  AudioDeviceModule
+//
+//  Implementation of the ADM to be used as external ADM in VoiceEngine.
+//  This implementation is only a mock class, i.e., it does not provide
+//  any real audio support.
+// ----------------------------------------------------------------------------
+
+class AudioDeviceModuleImpl : public AudioDeviceModule {
+ public:
+  // Factory methods
+  static AudioDeviceModuleImpl* Create();
+  static bool Destroy(AudioDeviceModuleImpl* adm);
+
+  // Helper methods which allows us to get some handy information about
+  // this mock implementation.
+  int32_t ReferenceCounter() const {
+    return _ref_count;
+  }
+
+  // RefCountedModule implementation (mocks default implementation)
+  virtual int32_t AddRef();
+  virtual int32_t Release();
+
+  // Module implementation
+  virtual int32_t Version(char* version,
+                          uint32_t& remaining_buffer_in_bytes,
+                          uint32_t& position) const {
+    return 0;
+  }
+  virtual int32_t ChangeUniqueId(const int32_t id) {
+    return 0;
+  }
+  virtual int32_t TimeUntilNextProcess() {
+    return -1;
+  }
+  virtual int32_t Process() {
+    return 0;
+  }
+
+  // AudioDeviceModule implementation
+  virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const {
+    return 0;
+  }
+
+  virtual ErrorCode LastError() const {
+    return static_cast<ErrorCode> (0);
+  }
+  virtual int32_t RegisterEventObserver(AudioDeviceObserver* eventCallback) {
+    return 0;
+  }
+
+  virtual int32_t RegisterAudioCallback(AudioTransport* audioCallback) {
+    return 0;
+  }
+
+  virtual int32_t Init() {
+    return 0;
+  }
+  virtual int32_t Terminate() {
+    return 0;
+  }
+  virtual bool Initialized() const {
+    return true;
+  }
+
+  virtual int16_t PlayoutDevices() {
+    return -1;
+  }
+  virtual int16_t RecordingDevices() {
+    return -1;
+  }
+  virtual int32_t PlayoutDeviceName(uint16_t index,
+                                    char name[kAdmMaxDeviceNameSize],
+                                    char guid[kAdmMaxGuidSize]) {
+    return -1;
+  }
+  virtual int32_t RecordingDeviceName(uint16_t index,
+                                      char name[kAdmMaxDeviceNameSize],
+                                      char guid[kAdmMaxGuidSize]) {
+    return -1;
+  }
+
+  virtual int32_t SetPlayoutDevice(uint16_t index) {
+    return 0;
+  }
+  virtual int32_t SetPlayoutDevice(WindowsDeviceType device) {
+    return 0;
+  }
+  virtual int32_t SetRecordingDevice(uint16_t index) {
+    return 0;
+  }
+  virtual int32_t SetRecordingDevice(WindowsDeviceType device) {
+    return 0;
+  }
+
+  virtual int32_t PlayoutIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitPlayout() {
+    return 0;
+  }
+  virtual bool PlayoutIsInitialized() const {
+    return true;
+  }
+  virtual int32_t RecordingIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitRecording() {
+    return 0;
+  }
+  virtual bool RecordingIsInitialized() const {
+    return true;
+  }
+
+  virtual int32_t StartPlayout() {
+    return 0;
+  }
+  virtual int32_t StopPlayout() {
+    return 0;
+  }
+  virtual bool Playing() const {
+    return true;
+  }
+  virtual int32_t StartRecording() {
+    return 0;
+  }
+  virtual int32_t StopRecording() {
+    return 0;
+  }
+  virtual bool Recording() const {
+    return true;
+  }
+
+  virtual int32_t SetAGC(bool enable) {
+    return -1;
+  }
+  virtual bool AGC() const {
+    return false;
+  }
+
+  virtual int32_t SetWaveOutVolume(uint16_t volumeLeft,
+                                   uint16_t volumeRight) {
+    return -1;
+  }
+  virtual int32_t WaveOutVolume(uint16_t* volumeLeft,
+                                uint16_t* volumeRight) const {
+    return -1;
+  }
+
+  virtual int32_t SpeakerIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitSpeaker() {
+    return 0;
+  }
+  virtual bool SpeakerIsInitialized() const {
+    return true;
+  }
+  virtual int32_t MicrophoneIsAvailable(bool* available) {
+    *available = true;
+    return 0;
+  }
+  virtual int32_t InitMicrophone() {
+    return 0;
+  }
+  virtual bool MicrophoneIsInitialized() const {
+    return true;
+  }
+
+  virtual int32_t SpeakerVolumeIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetSpeakerVolume(uint32_t volume) {
+    return -1;
+  }
+  virtual int32_t SpeakerVolume(uint32_t* volume) const {
+    return -1;
+  }
+  virtual int32_t MaxSpeakerVolume(uint32_t* maxVolume) const {
+    return -1;
+  }
+  virtual int32_t MinSpeakerVolume(uint32_t* minVolume) const {
+    return -1;
+  }
+  virtual int32_t SpeakerVolumeStepSize(uint16_t* stepSize) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneVolumeIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneVolume(uint32_t volume) {
+    return -1;
+  }
+  virtual int32_t MicrophoneVolume(uint32_t* volume) const {
+    return -1;
+  }
+  virtual int32_t MaxMicrophoneVolume(uint32_t* maxVolume) const {
+    return -1;
+  }
+  virtual int32_t MinMicrophoneVolume(uint32_t* minVolume) const {
+    return -1;
+  }
+  virtual int32_t MicrophoneVolumeStepSize(uint16_t* stepSize) const {
+    return -1;
+  }
+
+  virtual int32_t SpeakerMuteIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetSpeakerMute(bool enable) {
+    return -1;
+  }
+  virtual int32_t SpeakerMute(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneMuteIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneMute(bool enable) {
+    return -1;
+  }
+  virtual int32_t MicrophoneMute(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t MicrophoneBoostIsAvailable(bool* available) {
+    return -1;
+  }
+  virtual int32_t SetMicrophoneBoost(bool enable) {
+    return -1;
+  }
+  virtual int32_t MicrophoneBoost(bool* enabled) const {
+    return -1;
+  }
+
+  virtual int32_t StereoPlayoutIsAvailable(bool* available) const {
+    return -1;
+  }
+  virtual int32_t SetStereoPlayout(bool enable) {
+    return -1;
+  }
+  virtual int32_t StereoPlayout(bool* enabled) const {
+    return -1;
+  }
+  virtual int32_t StereoRecordingIsAvailable(bool* available) const {
+    return -1;
+  }
+  virtual int32_t SetStereoRecording(bool enable) {
+    return -1;
+  }
+  virtual int32_t StereoRecording(bool* enabled) const {
+    return -1;
+  }
+  virtual int32_t SetRecordingChannel(const ChannelType channel) {
+    return -1;
+  }
+  virtual int32_t RecordingChannel(ChannelType* channel) const {
+    return -1;
+  }
+
+  virtual int32_t SetPlayoutBuffer(const BufferType type, uint16_t sizeMS = 0) {
+    return -1;
+  }
+  virtual int32_t PlayoutBuffer(BufferType* type, uint16_t* sizeMS) const {
+    return -1;
+  }
+  virtual int32_t PlayoutDelay(uint16_t* delayMS) const {
+    return -1;
+  }
+  virtual int32_t RecordingDelay(uint16_t* delayMS) const {
+    return -1;
+  }
+
+  virtual int32_t CPULoad(uint16_t* load) const {
+    return -1;
+  }
+
+  virtual int32_t StartRawOutputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) {
+    return -1;
+  }
+  virtual int32_t StopRawOutputFileRecording() {
+    return -1;
+  }
+  virtual int32_t StartRawInputFileRecording(
+      const char pcmFileNameUTF8[kAdmMaxFileNameSize]) {
+    return -1;
+  }
+  virtual int32_t StopRawInputFileRecording() {
+    return -1;
+  }
+
+  virtual int32_t SetRecordingSampleRate(const uint32_t samplesPerSec) {
+    return -1;
+  }
+  virtual int32_t RecordingSampleRate(uint32_t* samplesPerSec) const {
+    return -1;
+  }
+  virtual int32_t SetPlayoutSampleRate(const uint32_t samplesPerSec) {
+    return -1;
+  }
+  virtual int32_t PlayoutSampleRate(uint32_t* samplesPerSec) const {
+    return -1;
+  }
+
+  virtual int32_t ResetAudioDevice() {
+    return -1;
+  }
+  virtual int32_t SetLoudspeakerStatus(bool enable) {
+    return -1;
+  }
+  virtual int32_t GetLoudspeakerStatus(bool* enabled) const {
+    return -1;
+  }
+
+ protected:
+  AudioDeviceModuleImpl();
+  ~AudioDeviceModuleImpl();
+
+ private:
+  volatile int32_t _ref_count;
+};
+
+// ----------------------------------------------------------------------------
+//	Transport
+// ----------------------------------------------------------------------------
+
+class ExtendedTestTransport : public Transport {
+ public:
+  ExtendedTestTransport(VoENetwork* ptr);
+  ~ExtendedTestTransport();
+  VoENetwork* myNetw;
+
+ protected:
+  virtual int SendPacket(int channel, const void *data, int len);
+  virtual int SendRTCPPacket(int channel, const void *data, int len);
+
+ private:
+  static bool Run(void* ptr);
+  bool Process();
+
+ private:
+  ThreadWrapper* _thread;
+  CriticalSectionWrapper* _lock;
+  EventWrapper* _event;
+
+ private:
+  unsigned char _packetBuffer[1612];
+  int _length;
+  int _channel;
+};
+
+class XTransport : public Transport {
+ public:
+  XTransport(VoENetwork* netw, VoEFile* file);
+  VoENetwork* _netw;
+  VoEFile* _file;
+
+ public:
+  virtual int SendPacket(int channel, const void *data, int len);
+  virtual int SendRTCPPacket(int channel, const void *data, int len);
+};
+
+class XRTPObserver : public VoERTPObserver {
+ public:
+  XRTPObserver();
+  ~XRTPObserver();
+  virtual void OnIncomingCSRCChanged(const int channel,
+                                     const unsigned int CSRC,
+                                     const bool added);
+  virtual void OnIncomingSSRCChanged(const int channel,
+                                     const unsigned int SSRC);
+ public:
+  unsigned int _SSRC;
+};
+
+// ----------------------------------------------------------------------------
+//	VoEExtendedTest
+// ----------------------------------------------------------------------------
+
+class VoEExtendedTest : public VoiceEngineObserver,
+                        public VoEConnectionObserver {
+ public:
+  VoEExtendedTest(VoETestManager& mgr);
+  ~VoEExtendedTest();
+  int PrepareTest(const char* str) const;
+  int TestPassed(const char* str) const;
+  int TestBase();
+  int TestCallReport();
+  int TestCodec();
+  int TestDtmf();
+  int TestEncryption();
+  int TestExternalMedia();
+  int TestFile();
+  int TestMixing();
+  int TestHardware();
+  int TestNetEqStats();
+  int TestNetwork();
+  int TestRTP_RTCP();
+  int TestVideoSync();
+  int TestVolumeControl();
+ public:
+  int ErrorCode() const {
+    return _errCode;
+  }
+  void ClearErrorCode() {
+    _errCode = 0;
+  }
+ protected:
+  // from VoiceEngineObserver
+  void CallbackOnError(const int errCode, const int channel);
+  void CallbackOnTrace(const TraceLevel level, const char* message, const int length);
+ protected:
+  // from VoEConnectionObserver
+  void OnPeriodicDeadOrAlive(const int channel, const bool alive);
+ private:
+  void Play(int channel, unsigned int timeMillisec, bool addFileAsMicrophone = false,
+            bool addTimeMarker = false);
+  void Sleep(unsigned int timeMillisec, bool addMarker = false);
+  void StartMedia(int channel, int rtpPort, bool listen, bool playout, bool send);
+  void StopMedia(int channel);
+  int RunMixingTest(int num_remote_channels, int num_local_channels,
+                    int16_t input_value, int16_t max_output_value,
+                    int16_t min_output_value);
+ private:
+  VoETestManager& _mgr;
+ private:
+  int _errCode;
+  bool _alive;
+  bool _listening[32];
+  bool _playing[32];
+  bool _sending[32];
+};
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_EXTENDED_TEST_H
diff --git a/src/voice_engine/test/auto_test/voe_standard_test.cc b/src/voice_engine/test/auto_test/voe_standard_test.cc
new file mode 100644
index 0000000..69eb490
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_standard_test.cc
@@ -0,0 +1,611 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <string.h>
+#include <math.h>
+
+#include "engine_configurations.h"
+#if defined(_WIN32)
+#include <conio.h>   // Exists only on windows.
+#include <tchar.h>
+#endif
+
+#include "voice_engine/test/auto_test/voe_standard_test.h"
+
+#if defined (_ENABLE_VISUAL_LEAK_DETECTOR_) && defined(_DEBUG) && \
+    defined(_WIN32) && !defined(_INSTRUMENTATION_TESTING_)
+#include "vld.h"
+#endif
+
+#include "system_wrappers/interface/critical_section_wrapper.h"
+#include "system_wrappers/interface/event_wrapper.h"
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "voice_engine/voice_engine_defines.h"
+#include "voice_engine/test/auto_test/automated_mode.h"
+
+#ifdef _TEST_NETEQ_STATS_
+#include "voice_engine/include/voe_neteq_stats.h"
+#endif
+
+#include "voice_engine/test/auto_test/voe_cpu_test.h"
+#include "voice_engine/test/auto_test/voe_extended_test.h"
+#include "voice_engine/test/auto_test/voe_stress_test.h"
+#include "voice_engine/test/auto_test/voe_unit_test.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+int dummy = 0;  // Dummy used in different functions to avoid warnings
+
+void SubAPIManager::DisplayStatus() const {
+  TEST_LOG("Supported sub APIs:\n\n");
+  if (_base)
+    TEST_LOG("  Base\n");
+  if (_callReport)
+    TEST_LOG("  CallReport\n");
+  if (_codec)
+    TEST_LOG("  Codec\n");
+  if (_dtmf)
+    TEST_LOG("  Dtmf\n");
+  if (_encryption)
+    TEST_LOG("  Encryption\n");
+  if (_externalMedia)
+    TEST_LOG("  ExternalMedia\n");
+  if (_file)
+    TEST_LOG("  File\n");
+  if (_hardware)
+    TEST_LOG("  Hardware\n");
+  if (_netEqStats)
+    TEST_LOG("  NetEqStats\n");
+  if (_network)
+    TEST_LOG("  Network\n");
+  if (_rtp_rtcp)
+    TEST_LOG("  RTP_RTCP\n");
+  if (_videoSync)
+    TEST_LOG("  VideoSync\n");
+  if (_volumeControl)
+    TEST_LOG("  VolumeControl\n");
+  if (_apm)
+    TEST_LOG("  AudioProcessing\n");
+  ANL();
+  TEST_LOG("Excluded sub APIs:\n\n");
+  if (!_base)
+    TEST_LOG("  Base\n");
+  if (!_callReport)
+    TEST_LOG("  CallReport\n");
+  if (!_codec)
+    TEST_LOG("  Codec\n");
+  if (!_dtmf)
+    TEST_LOG("  Dtmf\n");
+  if (!_encryption)
+    TEST_LOG("  Encryption\n");
+  if (!_externalMedia)
+    TEST_LOG("  ExternamMedia\n");
+  if (!_file)
+    TEST_LOG("  File\n");
+  if (!_hardware)
+    TEST_LOG("  Hardware\n");
+  if (!_netEqStats)
+    TEST_LOG("  NetEqStats\n");
+  if (!_network)
+    TEST_LOG("  Network\n");
+  if (!_rtp_rtcp)
+    TEST_LOG("  RTP_RTCP\n");
+  if (!_videoSync)
+    TEST_LOG("  VideoSync\n");
+  if (!_volumeControl)
+    TEST_LOG("  VolumeControl\n");
+  if (!_apm)
+    TEST_LOG("  AudioProcessing\n");
+  ANL();
+}
+
+bool SubAPIManager::GetExtendedMenuSelection(ExtendedSelection& sel) {
+  printf("------------------------------------------------\n");
+  printf("Select extended test\n\n");
+  printf(" (0)  None\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (1)  Base");
+  if (_base)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (2)  CallReport");
+  if (_callReport)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (3)  Codec");
+  if (_codec)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (4)  Dtmf");
+  if (_dtmf)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (5)  Encryption");
+  if (_encryption)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (6)  VoEExternalMedia");
+  if (_externalMedia)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (7)  File");
+  if (_file)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (8)  Hardware");
+  if (_hardware)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (9) NetEqStats");
+  if (_netEqStats)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (10) Network");
+  if (_network)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (11) RTP_RTCP");
+  if (_rtp_rtcp)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (12) VideoSync");
+  if (_videoSync)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf(" (13) VolumeControl");
+  if (_volumeControl)
+    printf("\n");
+  else
+    printf(" (NA)\n");
+  printf("\n: ");
+
+  ExtendedSelection xsel(XSEL_Invalid);
+  int selection(0);
+  dummy = scanf("%d", &selection);
+
+  switch (selection) {
+    case 0:
+      xsel = XSEL_None;
+      break;
+    case 1:
+      if (_base)
+        xsel = XSEL_Base;
+      break;
+    case 2:
+      if (_callReport)
+        xsel = XSEL_CallReport;
+      break;
+    case 3:
+      if (_codec)
+        xsel = XSEL_Codec;
+      break;
+    case 4:
+      if (_dtmf)
+        xsel = XSEL_DTMF;
+      break;
+    case 5:
+      if (_encryption)
+        xsel = XSEL_Encryption;
+      break;
+    case 6:
+      if (_externalMedia)
+        xsel = XSEL_ExternalMedia;
+      break;
+    case 7:
+      if (_file)
+        xsel = XSEL_File;
+      break;
+    case 8:
+      if (_hardware)
+        xsel = XSEL_Hardware;
+      break;
+    case 9:
+      if (_netEqStats)
+        xsel = XSEL_NetEqStats;
+      break;
+    case 10:
+      if (_network)
+        xsel = XSEL_Network;
+      break;
+    case 11:
+      if (_rtp_rtcp)
+        xsel = XSEL_RTP_RTCP;
+      break;
+    case 12:
+      if (_videoSync)
+        xsel = XSEL_VideoSync;
+      break;
+    case 13:
+      if (_volumeControl)
+        xsel = XSEL_VolumeControl;
+      break;
+    default:
+      xsel = XSEL_Invalid;
+      break;
+  }
+  if (xsel == XSEL_Invalid)
+    printf("Invalid selection!\n");
+
+  sel = xsel;
+  _xsel = xsel;
+
+  return (xsel != XSEL_Invalid);
+}
+
+VoETestManager::VoETestManager()
+    : initialized_(false),
+      voice_engine_(NULL),
+      voe_base_(0),
+      voe_call_report_(0),
+      voe_codec_(0),
+      voe_dtmf_(0),
+      voe_encrypt_(0),
+      voe_xmedia_(0),
+      voe_file_(0),
+      voe_hardware_(0),
+      voe_network_(0),
+#ifdef _TEST_NETEQ_STATS_
+      voe_neteq_stats_(NULL),
+#endif
+      voe_rtp_rtcp_(0),
+      voe_vsync_(0),
+      voe_volume_control_(0),
+      voe_apm_(0)
+{
+}
+
+VoETestManager::~VoETestManager() {
+}
+
+bool VoETestManager::Init() {
+  if (initialized_)
+    return true;
+
+  if (VoiceEngine::SetTraceFile(NULL) != -1) {
+    // should not be possible to call a Trace method before the VoE is
+    // created
+    TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
+      "should fail)!\n", __LINE__);
+    return false;
+  }
+
+  voice_engine_ = VoiceEngine::Create();
+  if (!voice_engine_) {
+    TEST_LOG("Failed to create VoiceEngine\n");
+    return false;
+  }
+
+  return true;
+}
+
+void VoETestManager::GetInterfaces() {
+  if (voice_engine_) {
+    voe_base_ = VoEBase::GetInterface(voice_engine_);
+    voe_codec_ = VoECodec::GetInterface(voice_engine_);
+    voe_volume_control_ = VoEVolumeControl::GetInterface(voice_engine_);
+    voe_dtmf_ = VoEDtmf::GetInterface(voice_engine_);
+    voe_rtp_rtcp_ = VoERTP_RTCP::GetInterface(voice_engine_);
+    voe_apm_ = VoEAudioProcessing::GetInterface(voice_engine_);
+    voe_network_ = VoENetwork::GetInterface(voice_engine_);
+    voe_file_ = VoEFile::GetInterface(voice_engine_);
+#ifdef _TEST_VIDEO_SYNC_
+    voe_vsync_ = VoEVideoSync::GetInterface(voice_engine_);
+#endif
+    voe_encrypt_ = VoEEncryption::GetInterface(voice_engine_);
+    voe_hardware_ = VoEHardware::GetInterface(voice_engine_);
+    // Set the audio layer to use in all tests
+    if (voe_hardware_) {
+      int res = voe_hardware_->SetAudioDeviceLayer(TESTED_AUDIO_LAYER);
+      if (res < 0) {
+        printf("\nERROR: failed to set audio layer to use in "
+          "testing\n");
+      } else {
+        printf("\nAudio layer %d will be used in testing\n",
+               TESTED_AUDIO_LAYER);
+      }
+    }
+#ifdef _TEST_XMEDIA_
+    voe_xmedia_ = VoEExternalMedia::GetInterface(voice_engine_);
+#endif
+#ifdef _TEST_CALL_REPORT_
+    voe_call_report_ = VoECallReport::GetInterface(voice_engine_);
+#endif
+#ifdef _TEST_NETEQ_STATS_
+    voe_neteq_stats_ = VoENetEqStats::GetInterface(voice_engine_);
+#endif
+  }
+}
+
+int VoETestManager::ReleaseInterfaces() {
+  bool releaseOK(true);
+
+  if (voe_base_) {
+    voe_base_->Release();
+    voe_base_ = NULL;
+  }
+  if (voe_codec_) {
+    voe_codec_->Release();
+    voe_codec_ = NULL;
+  }
+  if (voe_volume_control_) {
+    voe_volume_control_->Release();
+    voe_volume_control_ = NULL;
+  }
+  if (voe_dtmf_) {
+    voe_dtmf_->Release();
+    voe_dtmf_ = NULL;
+  }
+  if (voe_rtp_rtcp_) {
+    voe_rtp_rtcp_->Release();
+    voe_rtp_rtcp_ = NULL;
+  }
+  if (voe_apm_) {
+    voe_apm_->Release();
+    voe_apm_ = NULL;
+  }
+  if (voe_network_) {
+    voe_network_->Release();
+    voe_network_ = NULL;
+  }
+  if (voe_file_) {
+    voe_file_->Release();
+    voe_file_ = NULL;
+  }
+#ifdef _TEST_VIDEO_SYNC_
+  if (voe_vsync_) {
+    voe_vsync_->Release();
+    voe_vsync_ = NULL;
+  }
+#endif
+  if (voe_encrypt_) {
+    voe_encrypt_->Release();
+    voe_encrypt_ = NULL;
+  }
+  if (voe_hardware_) {
+    voe_hardware_->Release();
+    voe_hardware_ = NULL;
+  }
+#ifdef _TEST_XMEDIA_
+  if (voe_xmedia_) {
+    voe_xmedia_->Release();
+    voe_xmedia_ = NULL;
+  }
+#endif
+#ifdef _TEST_CALL_REPORT_
+  if (voe_call_report_) {
+    voe_call_report_->Release();
+    voe_call_report_ = NULL;
+  }
+#endif
+#ifdef _TEST_NETEQ_STATS_
+  if (voe_neteq_stats_) {
+    voe_neteq_stats_->Release();
+    voe_neteq_stats_ = NULL;
+  }
+#endif
+  if (false == VoiceEngine::Delete(voice_engine_)) {
+    TEST_LOG("\n\nVoiceEngine::Delete() failed. \n");
+    releaseOK = false;
+  }
+
+  if (VoiceEngine::SetTraceFile(NULL) != -1) {
+    TEST_LOG("\nError at line: %i (VoiceEngine::SetTraceFile()"
+      "should fail)!\n", __LINE__);
+  }
+
+  return (releaseOK == true) ? 0 : -1;
+}
+
+int run_auto_test(TestType test_type, ExtendedSelection ext_selection) {
+  assert(test_type != Standard);
+
+  SubAPIManager api_manager;
+  api_manager.DisplayStatus();
+
+  ////////////////////////////////////
+  // Create VoiceEngine and sub API:s
+
+  voetest::VoETestManager test_manager;
+  if (!test_manager.Init()) {
+    return -1;
+  }
+  test_manager.GetInterfaces();
+
+  int result(-1);
+  if (test_type == Extended) {
+    VoEExtendedTest xtend(test_manager);
+
+    result = 0;
+    while (ext_selection != XSEL_None) {
+      if (ext_selection == XSEL_Base || ext_selection == XSEL_All) {
+        if ((result = xtend.TestBase()) == -1)
+          break;
+        xtend.TestPassed("Base");
+      }
+      if (ext_selection == XSEL_CallReport || ext_selection == XSEL_All) {
+        if ((result = xtend.TestCallReport()) == -1)
+          break;
+        xtend.TestPassed("CallReport");
+      }
+      if (ext_selection == XSEL_Codec || ext_selection == XSEL_All) {
+        if ((result = xtend.TestCodec()) == -1)
+          break;
+        xtend.TestPassed("Codec");
+      }
+      if (ext_selection == XSEL_DTMF || ext_selection == XSEL_All) {
+        if ((result = xtend.TestDtmf()) == -1)
+          break;
+        xtend.TestPassed("Dtmf");
+      }
+      if (ext_selection == XSEL_Encryption || ext_selection == XSEL_All) {
+        if ((result = xtend.TestEncryption()) == -1)
+          break;
+        xtend.TestPassed("Encryption");
+      }
+      if (ext_selection == XSEL_ExternalMedia || ext_selection == XSEL_All) {
+        if ((result = xtend.TestExternalMedia()) == -1)
+          break;
+        xtend.TestPassed("ExternalMedia");
+      }
+      if (ext_selection == XSEL_File || ext_selection == XSEL_All) {
+        if ((result = xtend.TestFile()) == -1)
+          break;
+        xtend.TestPassed("File");
+      }
+      if (ext_selection == XSEL_Hardware || ext_selection == XSEL_All) {
+        if ((result = xtend.TestHardware()) == -1)
+          break;
+        xtend.TestPassed("Hardware");
+      }
+      if (ext_selection == XSEL_NetEqStats || ext_selection == XSEL_All) {
+        if ((result = xtend.TestNetEqStats()) == -1)
+          break;
+        xtend.TestPassed("NetEqStats");
+      }
+      if (ext_selection == XSEL_Network || ext_selection == XSEL_All) {
+        if ((result = xtend.TestNetwork()) == -1)
+          break;
+        xtend.TestPassed("Network");
+      }
+      if (ext_selection == XSEL_RTP_RTCP || ext_selection == XSEL_All) {
+        if ((result = xtend.TestRTP_RTCP()) == -1)
+          break;
+        xtend.TestPassed("RTP_RTCP");
+      }
+      if (ext_selection == XSEL_VideoSync || ext_selection == XSEL_All) {
+        if ((result = xtend.TestVideoSync()) == -1)
+          break;
+        xtend.TestPassed("VideoSync");
+      }
+      if (ext_selection == XSEL_VolumeControl || ext_selection == XSEL_All) {
+        if ((result = xtend.TestVolumeControl()) == -1)
+          break;
+        xtend.TestPassed("VolumeControl");
+      }
+      api_manager.GetExtendedMenuSelection(ext_selection);
+    } // while (extendedSel != XSEL_None)
+  } else if (test_type == Stress) {
+    VoEStressTest stressTest(test_manager);
+    result = stressTest.DoTest();
+  } else if (test_type == Unit) {
+    VoEUnitTest unitTest(test_manager);
+    result = unitTest.DoTest();
+  } else if (test_type == CPU) {
+    VoECpuTest cpuTest(test_manager);
+    result = cpuTest.DoTest();
+  } else {
+    // Should never end up here
+    assert(false);
+  }
+
+  //////////////////
+  // Release/Delete
+
+  int release_ok = test_manager.ReleaseInterfaces();
+
+  if ((0 == result) && (release_ok != -1)) {
+    TEST_LOG("\n\n*** All tests passed *** \n\n");
+  } else {
+    TEST_LOG("\n\n*** Test failed! *** \n");
+  }
+
+  return 0;
+}
+} // namespace voetest
+
+int RunInManualMode(int argc, char** argv) {
+  using namespace voetest;
+
+  SubAPIManager api_manager;
+  api_manager.DisplayStatus();
+
+  printf("----------------------------\n");
+  printf("Select type of test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  Standard test\n");
+  printf(" (2)  Extended test(s)...\n");
+  printf(" (3)  Stress test(s)...\n");
+  printf(" (4)  Unit test(s)...\n");
+  printf(" (5)  CPU & memory reference test [Windows]...\n");
+  printf("\n: ");
+
+  int selection(0);
+
+  dummy = scanf("%d", &selection);
+
+  ExtendedSelection ext_selection = XSEL_Invalid;
+  TestType test_type = Invalid;
+
+  switch (selection) {
+    case 0:
+      return 0;
+    case 1:
+      test_type = Standard;
+      break;
+    case 2:
+      test_type = Extended;
+      while (!api_manager.GetExtendedMenuSelection(ext_selection))
+        continue;
+      break;
+    case 3:
+      test_type = Stress;
+      break;
+    case 4:
+      test_type = Unit;
+      break;
+    case 5:
+      test_type = CPU;
+      break;
+    default:
+      TEST_LOG("Invalid selection!\n");
+      return 0;
+  }
+
+  if (test_type == Standard) {
+    TEST_LOG("\n\n+++ Running standard tests +++\n\n");
+
+    // Currently, all googletest-rewritten tests are in the "automated" suite.
+    return RunInAutomatedMode(argc, argv);
+  }
+
+  // Function that can be called from other entry functions.
+  return run_auto_test(test_type, ext_selection);
+}
+
+// ----------------------------------------------------------------------------
+//                                       main
+// ----------------------------------------------------------------------------
+
+#if !defined(MAC_IPHONE)
+int main(int argc, char** argv) {
+  if (argc > 1 && std::string(argv[1]) == "--automated") {
+    // This function is defined in automated_mode.cc to avoid macro clashes
+    // with googletest (for instance the ASSERT_TRUE macro).
+    return RunInAutomatedMode(argc, argv);
+  }
+
+  return RunInManualMode(argc, argv);
+}
+#endif //#if !defined(MAC_IPHONE)
diff --git a/src/voice_engine/test/auto_test/voe_standard_test.h b/src/voice_engine/test/auto_test/voe_standard_test.h
new file mode 100644
index 0000000..bbb6d2e
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_standard_test.h
@@ -0,0 +1,236 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
+
+#include <stdio.h>
+#include <string>
+
+#include "resource_manager.h"
+#include "voe_audio_processing.h"
+#include "voe_base.h"
+#include "voe_dtmf.h"
+#include "voe_errors.h"
+#include "voe_file.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_test_defines.h"
+#include "voe_test_interface.h"
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#include "voe_call_report.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+#include "voe_codec.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#include "voe_encryption.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#include "voe_external_media.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#include "voe_hardware.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+#include "voe_network.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#include "voe_video_sync.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#include "voe_volume_control.h"
+#endif
+
+#ifdef _TEST_NETEQ_STATS_
+namespace webrtc {
+class CriticalSectionWrapper;
+class ThreadWrapper;
+class VoENetEqStats;
+}
+#endif
+
+#if defined(WEBRTC_ANDROID)
+extern char mobileLogMsg[640];
+#endif
+
+namespace voetest {
+
+class SubAPIManager {
+ public:
+  SubAPIManager()
+    : _base(true),
+      _callReport(false),
+      _codec(false),
+      _dtmf(false),
+      _encryption(false),
+      _externalMedia(false),
+      _file(false),
+      _hardware(false),
+      _netEqStats(false),
+      _network(false),
+      _rtp_rtcp(false),
+      _videoSync(false),
+      _volumeControl(false),
+      _apm(false),
+      _xsel(XSEL_Invalid) {
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+      _callReport = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+      _codec = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+      _dtmf = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+      _encryption = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+      _externalMedia = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+      _file = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+      _hardware = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+      _netEqStats = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+      _network = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+      _rtp_rtcp = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+      _videoSync = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+      _volumeControl = true;
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+      _apm = true;
+#endif
+  }
+
+  void DisplayStatus() const;
+  bool GetExtendedMenuSelection(ExtendedSelection& sel);
+
+ private:
+  bool _base, _callReport, _codec, _dtmf, _encryption;
+  bool _externalMedia, _file, _hardware;
+  bool _netEqStats, _network, _rtp_rtcp, _videoSync, _volumeControl, _apm;
+  ExtendedSelection _xsel;
+};
+
+class VoETestManager {
+ public:
+  VoETestManager();
+  ~VoETestManager();
+
+  // Must be called after construction.
+  bool Init();
+
+  void GetInterfaces();
+  int ReleaseInterfaces();
+
+  const char* AudioFilename() const {
+    const std::string& result = resource_manager_.long_audio_file_path();
+    if (result.length() == 0) {
+      TEST_LOG("ERROR: Failed to open input file!");
+    }
+    return result.c_str();
+  }
+
+  VoiceEngine* VoiceEnginePtr() const {
+    return voice_engine_;
+  }
+  VoEBase* BasePtr() const {
+    return voe_base_;
+  }
+  VoECodec* CodecPtr() const {
+    return voe_codec_;
+  }
+  VoEVolumeControl* VolumeControlPtr() const {
+    return voe_volume_control_;
+  }
+  VoEDtmf* DtmfPtr() const {
+    return voe_dtmf_;
+  }
+  VoERTP_RTCP* RTP_RTCPPtr() const {
+    return voe_rtp_rtcp_;
+  }
+  VoEAudioProcessing* APMPtr() const {
+    return voe_apm_;
+  }
+
+  VoENetwork* NetworkPtr() const {
+    return voe_network_;
+  }
+
+  VoEFile* FilePtr() const {
+    return voe_file_;
+  }
+
+  VoEHardware* HardwarePtr() const {
+    return voe_hardware_;
+  }
+
+  VoEVideoSync* VideoSyncPtr() const {
+    return voe_vsync_;
+  }
+
+  VoEEncryption* EncryptionPtr() const {
+    return voe_encrypt_;
+  }
+
+  VoEExternalMedia* ExternalMediaPtr() const {
+    return voe_xmedia_;
+  }
+
+  VoECallReport* CallReportPtr() const {
+    return voe_call_report_;
+  }
+
+#ifdef _TEST_NETEQ_STATS_
+  VoENetEqStats* NetEqStatsPtr() const {
+    return voe_neteq_stats_;
+  }
+
+#endif
+
+ private:
+  bool                   initialized_;
+
+  VoiceEngine*           voice_engine_;
+  VoEBase*               voe_base_;
+  VoECallReport*         voe_call_report_;
+  VoECodec*              voe_codec_;
+  VoEDtmf*               voe_dtmf_;
+  VoEEncryption*         voe_encrypt_;
+  VoEExternalMedia*      voe_xmedia_;
+  VoEFile*               voe_file_;
+  VoEHardware*           voe_hardware_;
+  VoENetwork*            voe_network_;
+#ifdef _TEST_NETEQ_STATS_
+  VoENetEqStats*         voe_neteq_stats_;
+#endif
+  VoERTP_RTCP*           voe_rtp_rtcp_;
+  VoEVideoSync*          voe_vsync_;
+  VoEVolumeControl*      voe_volume_control_;
+  VoEAudioProcessing*    voe_apm_;
+
+  ResourceManager        resource_manager_;
+};
+
+} // namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_STANDARD_TEST_H
diff --git a/src/voice_engine/test/auto_test/voe_stress_test.cc b/src/voice_engine/test/auto_test/voe_stress_test.cc
new file mode 100644
index 0000000..5bae92e
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_stress_test.cc
@@ -0,0 +1,409 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+//       Some ideas of improvements:
+//       Break out common init and maybe terminate to separate function(s).
+//       How much trace should we have enabled?
+//       API error counter, to print info and return -1 if any error.
+
+#include <stdlib.h>
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "voe_stress_test.h"
+#include "voe_standard_test.h"
+
+#include "voice_engine/voice_engine_defines.h"  // defines build macros
+#include "thread_wrapper.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define VALIDATE_STRESS(expr)                                   \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+    }
+
+#ifdef _WIN32
+// Pause if supported
+#define PAUSE_OR_SLEEP(x) PAUSE;
+#else
+// Sleep a bit instead if pause not supported
+#define PAUSE_OR_SLEEP(x) SLEEP(x);
+#endif
+
+const char* VoEStressTest::_key = "====YUtFWRAAAAADBtIHgAAAAAEAAAAcAAAAAQBHU0ds"
+  "b2JhbCBJUCBTb3VuZAAC\nAAAAIwAAAExpY2Vuc2VkIHRvIE5vcnRlbCBOZXR3cm9rcwAAAAA"
+  "xAAAAZxZ7/u0M\niFYyTwSwko5Uutf7mh8S0O4rYZYTFidbzQeuGonuL17F/2oD/2pfDp3jL4"
+  "Rf3z/A\nnlJsEJgEtASkDNFuwLILjGY0pzjjAYQp3pCl6z6k2MtE06AirdjGLYCjENpq/opX"
+  "\nOrs3sIuwdYK5va/aFcsjBDmlsGCUM48RDYG9s23bIHYafXUC4ofOaubbZPWiPTmL\nEVJ8WH"
+  "4F9pgNjALc14oJXfON7r/3\n=EsLx";
+
+int VoEStressTest::DoTest() {
+  int test(-1);
+  while (test != 0) {
+    test = MenuSelection();
+    switch (test) {
+      case 0:
+        // Quit stress test
+        break;
+      case 1:
+        // All tests
+        StartStopTest();
+        CreateDeleteChannelsTest();
+        MultipleThreadsTest();
+        break;
+      case 2:
+        StartStopTest();
+        break;
+      case 3:
+        CreateDeleteChannelsTest();
+        break;
+      case 4:
+        MultipleThreadsTest();
+        break;
+      default:
+        // Should not be possible
+        printf("Invalid selection! (Test code error)\n");
+        assert(false);
+    } // switch
+  } // while
+
+  return 0;
+}
+
+int VoEStressTest::MenuSelection() {
+  printf("------------------------------------------------\n");
+  printf("Select stress test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  All\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (2)  Start/stop\n");
+  printf(" (3)  Create/delete channels\n");
+  printf(" (4)  Multiple threads\n");
+
+  const int maxMenuSelection = 4;
+  int selection(-1);
+
+  while ((selection < 0) || (selection > maxMenuSelection)) {
+    printf("\n: ");
+    int retval = scanf("%d", &selection);
+    if ((retval != 1) || (selection < 0) || (selection > maxMenuSelection)) {
+      printf("Invalid selection!\n");
+    }
+  }
+
+  return selection;
+}
+
+int VoEStressTest::StartStopTest() {
+  printf("------------------------------------------------\n");
+  printf("Running start/stop test\n");
+  printf("------------------------------------------------\n");
+
+  printf("\nNOTE: this thest will fail after a while if Core audio is used\n");
+  printf("because MS returns AUDCLNT_E_CPUUSAGE_EXCEEDED (VoE Error 10013).\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //         GetFilename("VoEStressTest_StartStop_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //         GetFilename("VoEStressTest_StartStop_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //         kTraceWarning | kTraceError |
+  //         kTraceCritical | kTraceApiCall |
+  //         kTraceMemory | kTraceInfo));
+  VALIDATE_STRESS(base->Init());
+  VALIDATE_STRESS(base->CreateChannel());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(2000);
+  int loopSleep(200);
+  int i(0);
+  int markInterval(20);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  for (i = 0; i < numberOfLoops; ++i) {
+    VALIDATE_STRESS(base->SetLocalReceiver(0, 4800));
+    VALIDATE_STRESS(base->SetSendDestination(0, 4800, "127.0.0.1"));
+    VALIDATE_STRESS(base->StartReceive(0));
+    VALIDATE_STRESS(base->StartPlayout(0));
+    VALIDATE_STRESS(base->StartSend(0));
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+    VALIDATE_STRESS(base->StopSend(0));
+    VALIDATE_STRESS(base->StopPlayout(0));
+    VALIDATE_STRESS(base->StopReceive(0));
+  }
+  ANL();
+
+  VALIDATE_STRESS(base->SetLocalReceiver(0, 4800));
+  VALIDATE_STRESS(base->SetSendDestination(0, 4800, "127.0.0.1"));
+  VALIDATE_STRESS(base->StartReceive(0));
+  VALIDATE_STRESS(base->StartPlayout(0));
+  VALIDATE_STRESS(base->StartSend(0));
+  printf("Verify that audio is good. \n");
+  PAUSE_OR_SLEEP(20000);
+  VALIDATE_STRESS(base->StopSend(0));
+  VALIDATE_STRESS(base->StopPlayout(0));
+  VALIDATE_STRESS(base->StopReceive(0));
+
+  ///////////// End test /////////////
+
+
+  // Terminate
+  VALIDATE_STRESS(base->DeleteChannel(0));
+  VALIDATE_STRESS(base->Terminate());
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+int VoEStressTest::CreateDeleteChannelsTest() {
+  printf("------------------------------------------------\n");
+  printf("Running create/delete channels test\n");
+  printf("------------------------------------------------\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //          GetFilename("VoEStressTest_CreateChannels_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //          GetFilename("VoEStressTest_CreateChannels_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //         kTraceWarning | kTraceError |
+  //         kTraceCritical | kTraceApiCall |
+  //         kTraceMemory | kTraceInfo));
+  VALIDATE_STRESS(base->Init());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(10000);
+  int loopSleep(10);
+  int i(0);
+  int markInterval(200);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  //       Some possible extensions include:
+  //       Different sleep times (fixed or random) or zero.
+  //       Start call on all or some channels.
+  //       Two parts: first have a slight overweight to creating channels,
+  //       then to deleting. (To ensure we hit max channels and go to zero.)
+  //       Make sure audio is OK after test has finished.
+
+  // Set up, start with maxChannels/2 channels
+  const int maxChannels = base->MaxNumOfChannels();
+  VALIDATE_STRESS(maxChannels < 1); // Should always have at least one channel
+  bool* channelState = new bool[maxChannels];
+  memset(channelState, 0, maxChannels * sizeof(bool));
+  int channel(0);
+  int noOfActiveChannels(0);
+  for (i = 0; i < (maxChannels / 2); ++i) {
+    channel = base->CreateChannel();
+    VALIDATE_STRESS(channel < 0);
+    if (channel >= 0) {
+      channelState[channel] = true;
+      ++noOfActiveChannels;
+    }
+  }
+  srand((unsigned int) time(NULL));
+  bool action(false);
+  double rnd(0.0);
+  int res(0);
+
+  // Create/delete channels with slight
+  for (i = 0; i < numberOfLoops; ++i) {
+    // Randomize action (create or delete channel)
+    action = rand() <= (RAND_MAX / 2);
+    if (action) {
+      if (noOfActiveChannels < maxChannels) {
+        // Create new channel
+        channel = base->CreateChannel();
+        VALIDATE_STRESS(channel < 0);
+        if (channel >= 0) {
+          channelState[channel] = true;
+          ++noOfActiveChannels;
+        }
+      }
+    } else {
+      if (noOfActiveChannels > 0) {
+        // Delete random channel that's created [0, maxChannels - 1]
+        do {
+          rnd = static_cast<double> (rand());
+          channel = static_cast<int> (rnd /
+                                      (static_cast<double> (RAND_MAX) + 1.0f) *
+                                      maxChannels);
+        } while (!channelState[channel]); // Must find a created channel
+
+        res = base->DeleteChannel(channel);
+        VALIDATE_STRESS(0 != res);
+        if (0 == res) {
+          channelState[channel] = false;
+          --noOfActiveChannels;
+        }
+      }
+    }
+
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+  }
+  ANL();
+
+  delete[] channelState;
+
+  ///////////// End test /////////////
+
+
+  // Terminate
+  VALIDATE_STRESS(base->Terminate()); // Deletes all channels
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+int VoEStressTest::MultipleThreadsTest() {
+  printf("------------------------------------------------\n");
+  printf("Running multiple threads test\n");
+  printf("------------------------------------------------\n");
+
+  // Get sub-API pointers
+  VoEBase* base = _mgr.BasePtr();
+
+  // Set trace
+  //     VALIDATE_STRESS(base->SetTraceFileName(
+  //        GetFilename("VoEStressTest_MultipleThreads_trace.txt")));
+  //     VALIDATE_STRESS(base->SetDebugTraceFileName(
+  //        GetFilename("VoEStressTest_MultipleThreads_trace_debug.txt")));
+  //     VALIDATE_STRESS(base->SetTraceFilter(kTraceStateInfo |
+  //        kTraceWarning | kTraceError |
+  //        kTraceCritical | kTraceApiCall |
+  //        kTraceMemory | kTraceInfo));
+
+  // Init
+  VALIDATE_STRESS(base->Init());
+  VALIDATE_STRESS(base->CreateChannel());
+
+  ///////////// Start test /////////////
+
+  int numberOfLoops(10000);
+  int loopSleep(0);
+  int i(0);
+  int markInterval(1000);
+
+  printf("Running %d loops with %d ms sleep. Mark every %d loop. \n",
+         numberOfLoops, loopSleep, markInterval);
+  printf("Test will take approximately %d minutes. \n",
+         numberOfLoops * loopSleep / 1000 / 60 + 1);
+
+  srand((unsigned int) time(NULL));
+  int rnd(0);
+
+  // Start extra thread
+  const char* threadName = "StressTest Extra API Thread";
+  _ptrExtraApiThread = ThreadWrapper::CreateThread(RunExtraApi, this,
+                                                   kNormalPriority, threadName);
+  unsigned int id(0);
+  VALIDATE_STRESS(!_ptrExtraApiThread->Start(id));
+
+  //       Some possible extensions include:
+  //       Add more API calls to randomize
+  //       More threads
+  //       Different sleep times (fixed or random).
+  //       Make sure audio is OK after test has finished.
+
+  // Call random API functions here and in extra thread, ignore any error
+  for (i = 0; i < numberOfLoops; ++i) {
+    // This part should be equal to the marked part in the extra thread
+    // --- BEGIN ---
+    rnd = rand();
+    if (rnd < (RAND_MAX / 2)) {
+      // Start playout
+      base->StartPlayout(0);
+    } else {
+      // Stop playout
+      base->StopPlayout(0);
+    }
+    // --- END ---
+
+    if (!(i % markInterval))
+      MARK();
+    SLEEP(loopSleep);
+  }
+  ANL();
+
+  // Stop extra thread
+  VALIDATE_STRESS(!_ptrExtraApiThread->Stop());
+  delete _ptrExtraApiThread;
+
+  ///////////// End test /////////////
+
+  // Terminate
+  VALIDATE_STRESS(base->Terminate()); // Deletes all channels
+
+  printf("Test finished \n");
+
+  return 0;
+}
+
+// Thread functions
+
+bool VoEStressTest::RunExtraApi(void* ptr) {
+  return static_cast<VoEStressTest*> (ptr)->ProcessExtraApi();
+}
+
+bool VoEStressTest::ProcessExtraApi() {
+  // Prepare
+  VoEBase* base = _mgr.BasePtr();
+  int rnd(0);
+
+  // Call random API function, ignore any error
+
+  // This part should be equal to the marked part in the main thread
+  // --- BEGIN ---
+  rnd = rand();
+  if (rnd < (RAND_MAX / 2)) {
+    // Start playout
+    base->StartPlayout(0);
+  } else {
+    // Stop playout
+    base->StopPlayout(0);
+  }
+  // --- END ---
+
+  return true;
+}
+
+} //  namespace voetest
diff --git a/src/voice_engine/test/auto_test/voe_stress_test.h b/src/voice_engine/test/auto_test/voe_stress_test.h
new file mode 100644
index 0000000..b3a418c
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_stress_test.h
@@ -0,0 +1,50 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
+
+namespace webrtc {
+class ThreadWrapper;
+}
+
+namespace voetest {
+// TODO(andrew): using directives are not permitted.
+using namespace webrtc;
+
+class VoETestManager;
+
+class VoEStressTest {
+ public:
+  VoEStressTest(VoETestManager& mgr) :
+    _mgr(mgr), _ptrExtraApiThread(NULL) {
+  }
+  ~VoEStressTest() {
+  }
+  int DoTest();
+
+ private:
+  int MenuSelection();
+  int StartStopTest();
+  int CreateDeleteChannelsTest();
+  int MultipleThreadsTest();
+
+  static bool RunExtraApi(void* ptr);
+  bool ProcessExtraApi();
+
+  VoETestManager& _mgr;
+  static const char* _key;
+
+  ThreadWrapper* _ptrExtraApiThread;
+};
+
+} //  namespace voetest
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_STRESS_TEST_H
diff --git a/src/voice_engine/test/auto_test/voe_test_defines.h b/src/voice_engine/test/auto_test/voe_test_defines.h
new file mode 100644
index 0000000..9fff35b
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_test_defines.h
@@ -0,0 +1,189 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
+#define WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
+
+// Read WEBRTC_VOICE_ENGINE_XXX_API compiler flags
+#include "engine_configurations.h"
+
+#ifdef WEBRTC_ANDROID
+#include <android/log.h>
+#define ANDROID_LOG_TAG "VoiceEngine Auto Test"
+#define TEST_LOG(...) \
+    __android_log_print(ANDROID_LOG_DEBUG, ANDROID_LOG_TAG, __VA_ARGS__)
+#define TEST_LOG_ERROR(...) \
+    __android_log_print(ANDROID_LOG_ERROR, ANDROID_LOG_TAG, __VA_ARGS__)
+#define TEST_LOG_FLUSH
+#else
+#define TEST_LOG printf
+#define TEST_LOG_ERROR printf
+#define TEST_LOG_FLUSH fflush(NULL)
+#endif
+
+// Select the tests to execute, list order below is same as they will be
+// executed. Note that, all settings below will be overriden by sub-API
+// settings in engine_configurations.h.
+#define _TEST_BASE_
+#define _TEST_RTP_RTCP_
+#define _TEST_HARDWARE_
+#define _TEST_CODEC_
+#define _TEST_DTMF_
+#define _TEST_VOLUME_
+#define _TEST_AUDIO_PROCESSING_
+#define _TEST_FILE_
+#define _TEST_NETWORK_
+#define _TEST_CALL_REPORT_
+#define _TEST_VIDEO_SYNC_
+#define _TEST_ENCRYPT_
+#define _TEST_NETEQ_STATS_
+#define _TEST_XMEDIA_
+
+#define TESTED_AUDIO_LAYER kAudioPlatformDefault
+//#define TESTED_AUDIO_LAYER kAudioLinuxPulse
+
+// #define _ENABLE_VISUAL_LEAK_DETECTOR_ // Enables VLD to find memory leaks
+// #define _ENABLE_IPV6_TESTS_      // Enables IPv6 tests in network xtest
+// #define _USE_EXTENDED_TRACE_     // Adds unique trace files for extended test
+// #define _MEMORY_TEST_
+
+// Enable this when running instrumentation of some kind to exclude tests
+// that will not pass due to slowed down execution.
+// #define _INSTRUMENTATION_TESTING_
+
+// Exclude (override) API tests given preprocessor settings in
+// engine_configurations.h
+#ifndef WEBRTC_VOICE_ENGINE_CODEC_API
+#undef _TEST_CODEC_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#undef _TEST_VOLUME_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_API
+#undef _TEST_DTMF_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#undef _TEST_RTP_RTCP_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#undef _TEST_AUDIO_PROCESSING_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_FILE_API
+#undef _TEST_FILE_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#undef _TEST_VIDEO_SYNC_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#undef _TEST_ENCRYPT_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#undef _TEST_HARDWARE_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#undef _TEST_XMEDIA_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_NETWORK_API
+#undef _TEST_NETWORK_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#undef _TEST_NETEQ_STATS_
+#endif
+#ifndef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#undef _TEST_CALL_REPORT_
+#endif
+
+// Some parts can cause problems while running Insure
+#ifdef __INSURE__
+#define _INSTRUMENTATION_TESTING_
+#undef WEBRTC_SRTP
+#endif
+
+// Time in ms to test each packet size for each codec
+#define CODEC_TEST_TIME 400
+
+#define MARK() TEST_LOG("."); fflush(NULL);             // Add test marker
+#define ANL() TEST_LOG("\n")                            // Add New Line
+#define AOK() TEST_LOG("[Test is OK]"); fflush(NULL);   // Add OK
+#if defined(_WIN32)
+#define PAUSE                                      \
+    {                                               \
+        TEST_LOG("Press any key to continue...");   \
+        _getch();                                   \
+        TEST_LOG("\n");                             \
+    }
+#else
+#define PAUSE                                          \
+    {                                                   \
+        TEST_LOG("Continuing (pause not supported)\n"); \
+    }
+#endif
+
+#define TEST(s)                         \
+    {                                   \
+        TEST_LOG("Testing: %s", #s);    \
+    }                                   \
+
+#ifdef _INSTRUMENTATION_TESTING_
+// Don't stop execution if error occurs
+#define TEST_MUSTPASS(expr)                                               \
+    {                                                                     \
+        if ((expr))                                                       \
+        {                                                                 \
+            TEST_LOG_ERROR("Error at line:%i, %s \n",__LINE__, #expr);    \
+            TEST_LOG_ERROR("Error code: %i\n",voe_base_->LastError());    \
+        }                                                                 \
+    }
+#define TEST_ERROR(code)                                                \
+    {                                                                   \
+        int err = voe_base_->LastError();                               \
+        if (err != code)                                                \
+        {                                                               \
+            TEST_LOG_ERROR("Invalid error code (%d, should be %d) at line %d\n",
+                           code, err, __LINE__);
+}
+}
+#else
+#define ASSERT_TRUE(expr) TEST_MUSTPASS(!(expr))
+#define ASSERT_FALSE(expr) TEST_MUSTPASS(expr)
+#define TEST_MUSTFAIL(expr) TEST_MUSTPASS(!((expr) == -1))
+#define TEST_MUSTPASS(expr)                                              \
+    {                                                                    \
+        if ((expr))                                                      \
+        {                                                                \
+            TEST_LOG_ERROR("\nError at line:%i, %s \n",__LINE__, #expr); \
+            TEST_LOG_ERROR("Error code: %i\n", voe_base_->LastError());  \
+            PAUSE                                                        \
+            return -1;                                                   \
+        }                                                                \
+    }
+#define TEST_ERROR(code) \
+    {																                                         \
+      int err = voe_base_->LastError();                                      \
+      if (err != code)                                                       \
+      {                                                                      \
+        TEST_LOG_ERROR("Invalid error code (%d, should be %d) at line %d\n", \
+                       err, code, __LINE__);                                 \
+        PAUSE                                                                \
+        return -1;                                                           \
+      }															                                         \
+    }
+#endif  // #ifdef _INSTRUMENTATION_TESTING_
+#define EXCLUDE()                                                   \
+    {                                                               \
+        TEST_LOG("\n>>> Excluding test at line: %i <<<\n\n",__LINE__);  \
+    }
+
+#define INCOMPLETE()                                                \
+    {                                                               \
+        TEST_LOG("\n>>> Incomplete test at line: %i <<<\n\n",__LINE__);  \
+    }
+
+#endif // WEBRTC_VOICE_ENGINE_VOE_TEST_DEFINES_H
diff --git a/src/voice_engine/test/auto_test/voe_test_interface.h b/src/voice_engine/test/auto_test/voe_test_interface.h
new file mode 100644
index 0000000..9926f1e
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_test_interface.h
@@ -0,0 +1,53 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Interface for starting test
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
+#define WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
+
+#include "common_types.h"
+
+namespace voetest {
+// TODO(andrew): Using directives not permitted.
+using namespace webrtc;
+
+// TestType enumerator
+enum TestType {
+  Invalid = -1, Standard = 0, Extended = 1, Stress = 2, Unit = 3, CPU = 4
+};
+
+// ExtendedSelection enumerator
+enum ExtendedSelection {
+  XSEL_Invalid = -1,
+  XSEL_None = 0,
+  XSEL_All,
+  XSEL_Base,
+  XSEL_CallReport,
+  XSEL_Codec,
+  XSEL_DTMF,
+  XSEL_Encryption,
+  XSEL_ExternalMedia,
+  XSEL_File,
+  XSEL_Hardware,
+  XSEL_NetEqStats,
+  XSEL_Network,
+  XSEL_RTP_RTCP,
+  XSEL_VideoSync,
+  XSEL_VolumeControl,
+};
+
+// Main test function
+int runAutoTest(TestType testType, ExtendedSelection extendedSel);
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_TEST_INTERFACE_H
diff --git a/src/voice_engine/test/auto_test/voe_unit_test.cc b/src/voice_engine/test/auto_test/voe_unit_test.cc
new file mode 100644
index 0000000..d76c448
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_unit_test.cc
@@ -0,0 +1,1079 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_unit_test.h"
+
+#include <stdio.h>
+#include <string.h>
+#include <time.h>
+#include <cassert>
+#if defined(_WIN32)
+#include <conio.h>
+#endif
+
+#include "system_wrappers/interface/thread_wrapper.h"
+#include "testsupport/fileutils.h"
+#include "voice_engine/voice_engine_defines.h"
+#include "voice_engine/test/auto_test/fakes/fake_media_process.h"
+
+using namespace webrtc;
+
+namespace voetest {
+
+#define CHECK(expr)                                             \
+    if (expr)                                                   \
+    {                                                           \
+        printf("Error at line: %i, %s \n", __LINE__, #expr);    \
+        printf("Error code: %i \n", base->LastError());  \
+		PAUSE												    \
+        return -1;                                              \
+    }
+
+// ----------------------------------------------------------------------------
+//                       >>>  R E A D M E  F I R S T <<<
+// ----------------------------------------------------------------------------
+
+// 1) The user must ensure that the following codecs are included in VoE:
+//
+// - L16
+// - G.729
+// - G.722.1C
+
+// 2) It is also possible to modify the simulation time for each individual test
+//
+const int dTBetweenEachTest = 4000;
+
+// ----------------------------------------------------------------------------
+//                                  Encrypt
+// ----------------------------------------------------------------------------
+
+void VoEUnitTest::encrypt(int channel_no, unsigned char * in_data,
+                          unsigned char * out_data, int bytes_in,
+                          int * bytes_out) {
+  int i;
+
+  if (!_extOnOff) {
+    // no stereo emulation <=> pure bypass
+    for (i = 0; i < bytes_in; i++)
+      out_data[i] = in_data[i];
+    *bytes_out = bytes_in;
+  } else if (_extOnOff && (_extBitsPerSample == 16)) {
+    // stereo emulation (sample based, 2 bytes per sample)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    short* ptrIn = (short*) &in_data[12];
+    short* ptrOut = (short*) &out_data[12];
+
+    // network byte order
+    for (i = 0; i < nBytesPayload / 2; i++) {
+      // produce two output samples for each input sample
+      *ptrOut++ = *ptrIn; // left sample
+      *ptrOut++ = *ptrIn; // right sample
+      ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  } else if (_extOnOff && (_extBitsPerSample == 8)) {
+    // stereo emulation (sample based, 1 bytes per sample)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    unsigned char* ptrIn = (unsigned char*) &in_data[12];
+    unsigned char* ptrOut = (unsigned char*) &out_data[12];
+
+    // network byte order
+    for (i = 0; i < nBytesPayload; i++) {
+      // produce two output samples for each input sample
+      *ptrOut++ = *ptrIn; // left sample
+      *ptrOut++ = *ptrIn; // right sample
+      ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  } else if (_extOnOff && (_extBitsPerSample == -1)) {
+    // stereo emulation (frame based)
+
+    const int nBytesPayload = bytes_in - 12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    unsigned char* ptrIn = (unsigned char*) &in_data[12];
+    unsigned char* ptrOut = (unsigned char*) &out_data[12];
+
+    // left channel
+    for (i = 0; i < nBytesPayload; i++) {
+      *ptrOut++ = *ptrIn++;
+    }
+
+    ptrIn = (unsigned char*) &in_data[12];
+
+    // right channel
+    for (i = 0; i < nBytesPayload; i++) {
+      *ptrOut++ = *ptrIn++;
+    }
+
+    *bytes_out = 12 + 2 * nBytesPayload;
+  }
+}
+
+void VoEUnitTest::decrypt(int channel_no, unsigned char * in_data,
+                          unsigned char * out_data, int bytes_in,
+                          int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::encrypt_rtcp(int channel_no, unsigned char * in_data,
+                               unsigned char * out_data, int bytes_in,
+                               int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::decrypt_rtcp(int channel_no, unsigned char * in_data,
+                               unsigned char * out_data, int bytes_in,
+                               int * bytes_out) {
+  int i;
+  for (i = 0; i < bytes_in; i++)
+    out_data[i] = in_data[i];
+  *bytes_out = bytes_in;
+}
+
+void VoEUnitTest::SetStereoExternalEncryption(int channel, bool onOff,
+                                              int bitsPerSample) {
+  _extOnOff = onOff;
+  _extChannel = channel;
+  _extBitsPerSample = bitsPerSample;
+}
+
+// VoEVEMediaProcess
+FakeMediaProcess mpobj;
+
+// ----------------------------------------------------------------------------
+//                               VoEUnitTest
+// ----------------------------------------------------------------------------
+
+VoEUnitTest::VoEUnitTest(VoETestManager& mgr) :
+  _mgr(mgr), _extOnOff(false), _extBitsPerSample(-1), _extChannel(0) {
+  for (int i = 0; i < 32; i++) {
+    _listening[i] = false;
+    _playing[i] = false;
+    _sending[i] = false;
+  }
+}
+
+// ----------------------------------------------------------------------------
+//  DoTest
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::DoTest() {
+  int test(-1);
+  int ret(0);
+  while ((test != 0) && (ret != -1)) {
+    test = MenuSelection();
+    switch (test) {
+      case 0:
+        // Quit stress test
+        break;
+      case 1:
+        ret = MixerTest();
+        break;
+      case 2:
+        ret = MixerTest();
+        break;
+      default:
+        // Should not be possible
+        printf("Invalid selection! (Test code error)\n");
+        assert(false);
+    } // switch
+  } // while
+
+  return ret;
+}
+
+// ----------------------------------------------------------------------------
+//  MenuSelection
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::MenuSelection() {
+  printf("------------------------------------------------\n");
+  printf("Select unit test\n\n");
+  printf(" (0)  Quit\n");
+  printf(" (1)  All\n");
+  printf("- - - - - - - - - - - - - - - - - - - - - - - - \n");
+  printf(" (2)  Mixer\n");
+
+  const int maxMenuSelection = 2;
+  int selection(-1);
+
+  while ((selection < 0) || (selection > maxMenuSelection)) {
+    printf("\n: ");
+    int retval = scanf("%d", &selection);
+    if ((retval != 1) || (selection < 0) || (selection > maxMenuSelection)) {
+      printf("Invalid selection!\n");
+    }
+  }
+
+  return selection;
+}
+
+// ----------------------------------------------------------------------------
+//  StartMedia
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::StartMedia(int channel, int rtpPort, bool listen, bool playout,
+                            bool send, bool fileAsMic, bool localFile) {
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  _listening[channel] = false;
+  _playing[channel] = false;
+  _sending[channel] = false;
+
+  CHECK(base->SetLocalReceiver(channel, rtpPort));
+  CHECK(base->SetSendDestination(channel, rtpPort, "127.0.0.1"));
+  if (listen) {
+    _listening[channel] = true;
+    CHECK(base->StartReceive(channel));
+  }
+  if (playout) {
+    _playing[channel] = true;
+    CHECK(base->StartPlayout(channel));
+  }
+  if (send) {
+    _sending[channel] = true;
+    CHECK(base->StartSend(channel));
+  }
+  if (fileAsMic) {
+    // play mic as file, mix with microphone to ensure that SWB can be
+    //tested as well
+    const bool mixWithMic(true);
+    CHECK(file->StartPlayingFileAsMicrophone(channel, _mgr.AudioFilename(),
+            true, mixWithMic));
+  }
+  if (localFile) {
+    std::string inputFile = webrtc::test::OutputPath() + "audio_short16.pcm";
+    CHECK(file->StartPlayingFileLocally(channel,
+            inputFile.c_str(),
+            false,
+            kFileFormatPcm16kHzFile));
+  }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+//  StopMedia
+// ----------------------------------------------------------------------------
+
+int VoEUnitTest::StopMedia(int channel) {
+  VoEBase* base = _mgr.BasePtr();
+  VoEFile* file = _mgr.FilePtr();
+
+  if (file->IsPlayingFileAsMicrophone(channel)) {
+    CHECK(file->StopPlayingFileAsMicrophone(channel));
+  }
+  if (file->IsPlayingFileLocally(channel)) {
+    CHECK(file->StopPlayingFileLocally(channel));
+  }
+  if (_listening[channel]) {
+    _listening[channel] = false;
+    CHECK(base->StopReceive(channel));
+  }
+  if (_playing[channel]) {
+    _playing[channel] = false;
+    CHECK(base->StopPlayout(channel));
+  }
+  if (_sending[channel]) {
+    _sending[channel] = false;
+    CHECK(base->StopSend(channel));
+  }
+
+  return 0;
+}
+
+void VoEUnitTest::Sleep(unsigned int timeMillisec, bool addMarker) {
+  if (addMarker) {
+    float dtSec = (float) ((float) timeMillisec / 1000.0);
+    printf("[dT=%.1f]", dtSec);
+    fflush(NULL);
+  }
+  ::Sleep(timeMillisec);
+}
+
+void VoEUnitTest::Wait() {
+#if defined(_WIN32)
+  printf("\npress any key..."); fflush(NULL);
+  _getch();
+#endif
+}
+
+void VoEUnitTest::Test(const char* msg) {
+  printf("%s", msg);
+  fflush(NULL);
+  printf("\n");
+  fflush(NULL);
+}
+
+int VoEUnitTest::MixerTest() {
+  // Set up test parameters first
+  //
+  const int testTime(dTBetweenEachTest);
+
+  printf("\n\n================================================\n");
+  printf(" Mixer Unit Test\n");
+  printf("================================================\n\n");
+
+  // Get sub-API pointers
+  //
+  VoEBase* base = _mgr.BasePtr();
+  VoECodec* codec = _mgr.CodecPtr();
+  VoEFile* file = _mgr.FilePtr();
+  VoEVolumeControl* volume = _mgr.VolumeControlPtr();
+  VoEEncryption* encrypt = _mgr.EncryptionPtr();
+  VoEDtmf* dtmf = _mgr.DtmfPtr();
+  VoEExternalMedia* xmedia = _mgr.ExternalMediaPtr();
+
+  // Set trace
+  //
+  std::string outputDir = webrtc::test::OutputPath();
+  std::string traceFile = outputDir + "UnitTest_Mixer_trace.txt";
+  VoiceEngine::SetTraceFile(outputDir.c_str());
+  VoiceEngine::SetTraceFilter(kTraceStateInfo | kTraceWarning | kTraceError |
+                              kTraceCritical | kTraceApiCall | kTraceMemory |
+                              kTraceInfo);
+
+  // Init
+  //
+  CHECK(base->Init());
+
+  // 8 kHz
+  //    CodecInst l16_8 = { 123, "L16", 8000, 160, 1, 128000 };
+  CodecInst pcmu_8 = { 0, "pcmu", 8000, 160, 1, 64000 };
+  //    CodecInst g729_8 = { 18, "g729", 8000, 160, 1, 8000 };
+
+  // 16 kHz
+  CodecInst ipcmwb_16 = { 97, "ipcmwb", 16000, 320, 1, 80000 };
+  CodecInst l16_16 = { 124, "L16", 16000, 320, 1, 256000 };
+
+  // 32 kHz
+  CodecInst l16_32 = { 125, "L16", 32000, 320, 1, 512000 };
+  CodecInst g722_1c_32 = { 126, "G7221", 32000, 640, 1, 32000 };// 20ms@32kHz
+
+  // ------------------------
+  // Verify mixing frequency
+  // ------------------------
+
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing frequency:\n");
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending file at 16kHz <=> mixing at 16kHz...");
+  CHECK(codec->SetSendCodec(0, ipcmwb_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32Hz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(codec->SetSendCodec(0, pcmu_8));
+  Sleep(testTime);
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing at 16kHz...");
+  std::string inputFile = outputDir + "audio_long16.pcm";
+  CHECK(file->StartPlayingFileLocally(0, inputFile.c_str(),
+          false, kFileFormatPcm16kHzFile));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  base->CreateChannel();
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(codec->SetSendCodec(0, pcmu_8));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32Hz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+
+  Test("(ch 1) Playing 16kHz file locally <=> mixing at 32kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+
+  CHECK(StopMedia(1));
+  CHECK(StopMedia(0));
+
+  base->DeleteChannel(1);
+  base->DeleteChannel(0);
+  ANL();
+
+  // -------------------------
+  // Verify stereo mode mixing
+  // -------------------------
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  // SetOutputVolumePan
+  //
+  // Ensure that all cases sound OK and that the mixer changes state between
+  // mono and stereo as it should. A debugger is required to trace the state
+  // transitions.
+
+  Test(">> Verify correct mixing in stereo using SetOutputVolumePan():\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Panning volume to the left <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Panning volume to the left <=> mixing in stereo @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Stopped playing file <=> mixing in mono @ 16kHz...");
+  CHECK(StopMedia(1));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  Test("(ch 0) Sending file at 8kHz <=> mixing at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // SetChannelOutputVolumePan
+  //
+  // Ensure that all cases sound OK and that the mixer changes state between
+  // mono and stereo as it should. A debugger is required to trace the state
+  // transitions.
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing in stereo using"
+    " SetChannelOutputVolumePan():\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("(ch 0) Panning channel volume to the left <=> mixing in stereo @ "
+    "16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 0.0));
+  Sleep(testTime);
+  Test("(ch 0) Panning channel volume to the right <=> mixing in stereo"
+    " @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 0.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 0) Back to center volume again <=> mixing in mono @"
+    " 16kHz...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+  Test("(ch 1) Panning channel volume to the left <=> mixing in stereo "
+    "@ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(1, 1.0, 0.0));
+  Sleep(testTime);
+  Test("(ch 1) Back to center volume again <=> mixing in mono @ 16kHz...");
+  CHECK(volume->SetOutputVolumePan(1, 1.0, 1.0));
+  Sleep(testTime);
+  Test("(ch 1) Stopped playing file <=> mixing in mono @ 16kHz...");
+  CHECK(StopMedia(1));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // Emulate stereo-encoding using Encryption
+  //
+  // Modify the transmitted RTP stream by using external encryption.
+  // Supports frame based and sample based "stereo-encoding schemes".
+
+  base->CreateChannel();
+
+  Test(">> Verify correct mixing in stereo using emulated stereo input:\n");
+
+  // enable external encryption
+  CHECK(encrypt->RegisterExternalEncryption(0, *this));
+  Test("(ch 0) External Encryption is now enabled:");
+
+  Test("(ch 0) Sending file at 8kHz <=> mixing in mono @ 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  // switch to 16kHz (L16) sending codec
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 0) Sending file at 16kHz (L16) <=> mixing in mono @ 16kHz...");
+  Sleep(testTime);
+
+  // register L16 as 2-channel codec on receiving side =>
+  // should sound bad since RTP module splits all received packets in half
+  // (sample based)
+  CHECK(base->StopPlayout(0));
+  CHECK(base->StopReceive(0));
+  l16_16.channels = 2;
+  CHECK(codec->SetRecPayloadType(0, l16_16));
+  CHECK(base->StartReceive(0));
+  CHECK(base->StartPlayout(0));
+  Test("(ch 0) 16kHz L16 is now registered as 2-channel codec on RX side => "
+    "should sound bad...");
+  Sleep(testTime);
+
+  // emulate sample-based stereo encoding
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side => "
+    "should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+  Test("(ch 0) Stop emulating sample-based stereo encoding on sending side =>"
+    " should sound bad...");
+  SetStereoExternalEncryption(0, false, 16);
+  Sleep(testTime);
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side => "
+    "should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  // switch to 32kHz (L16) sending codec and disable stereo encoding
+  CHECK(codec->SetSendCodec(0, l16_32));
+  SetStereoExternalEncryption(0, false, 16);
+  Test("(ch 0) Sending file and spech at 32kHz (L16) <=> mixing in mono @ "
+    "32kHz...");
+  Sleep(testTime);
+
+  // register L16 32kHz as 2-channel codec on receiving side
+  CHECK(base->StopPlayout(0));
+  CHECK(base->StopReceive(0));
+  l16_32.channels = 2;
+  CHECK(codec->SetRecPayloadType(0, l16_32));
+  CHECK(base->StartReceive(0));
+  CHECK(base->StartPlayout(0));
+  Test("(ch 0) 32kHz L16 is now registered as 2-channel codec on RX side =>"
+    " should sound bad...");
+  Sleep(testTime);
+
+  // emulate sample-based stereo encoding
+  Test("(ch 0) Emulate sample-based stereo encoding on sending side =>"
+    " should sound OK...");
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  StopMedia(0);
+  l16_32.channels = 1;
+
+  // disable external encryption
+  CHECK(encrypt->DeRegisterExternalEncryption(0));
+  ANL();
+
+  base->DeleteChannel(0);
+
+  // ------------------
+  // Verify put-on-hold
+  // ------------------
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify put-on-hold functionality:\n");
+
+  Test("(ch 0) Sending at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Sending at 16kHz...");
+  l16_16.channels = 1;
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Perform minor panning to the left to force mixing in"
+    " stereo...");
+  CHECK(volume->SetOutputVolumePan(0, (float)1.0, (float)0.7));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  Sleep(testTime);
+
+  Test("(ch 0) Back to center volume again...");
+  CHECK(volume->SetOutputVolumePan(0, 1.0, 1.0));
+  Sleep(testTime);
+
+  Test("(ch 1) Add 16kHz local file to the mixer...");
+  CHECK(StartMedia(1, 54321, false, true, false, false, true));
+  Sleep(testTime);
+
+  CHECK(base->SetOnHoldStatus(0, true, kHoldPlayOnly));
+  Test("(ch 0) Playout is now on hold...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(1, true, kHoldPlayOnly));
+  Test("(ch 1) Playout is now on hold => should be silent...");
+  Sleep(testTime);
+  CHECK(base->SetOnHoldStatus(0, false, kHoldPlayOnly));
+  Test("(ch 0) Playout is now enabled again...");
+  CHECK(base->SetOnHoldStatus(1, false, kHoldPlayOnly));
+  Test("(ch 1) Playout is now enabled again...");
+  Sleep(testTime);
+  StopMedia(1);
+  Test("(ch 1) Stopped playing file...");
+  Sleep(testTime);
+  StopMedia(0);
+  ANL();
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+
+  // -----------------------------------
+  // Verify recording of playout to file
+  // -----------------------------------
+
+  // StartRecordingPlayout
+  //
+  // Verify that the correct set of signals is recorded in the mixer.
+  // Record each channel and all channels (-1) to ensure that post and pre
+  // mixing recording works.
+
+  base->CreateChannel();
+  base->CreateChannel();
+
+  Test(">> Verify file-recording functionality:\n");
+
+  Test("(ch 0) Sending at 8kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+
+  std::string recordedPlayoutFile = webrtc::test::OutputPath() +
+        "RecordedPlayout16kHz.pcm";
+  CHECK(file->StartRecordingPlayout(
+          0, recordedPlayoutFile.c_str(), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, recordedPlayoutFile.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 0) Sending at 16kHz (L16)...");
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, recordedPlayoutFile.c_str(), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, recordedPlayoutFile.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Test("(ch 0) Sending at 32kHz (L16)...");
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, recordedPlayoutFile.c_str(), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, recordedPlayoutFile.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  Test("(ch 0) Sending at 16kHz without file as mic but file added on the"
+    " playout side instead...");
+  CHECK(StopMedia(0));
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Recording of playout to 16kHz PCM file...");
+  CHECK(file->StartRecordingPlayout(
+          0, recordedPlayoutFile.c_str(), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(0));
+  CHECK(file->StopPlayingFileLocally(0));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(file->StartPlayingFileLocally(
+          0, recordedPlayoutFile.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  CHECK(StopMedia(0));
+  CHECK(StopMedia(1));
+
+  Test("(ch 0) Sending at 16kHz...");
+  CHECK(StartMedia(0, 12345, true, true, true, false, false));
+  CHECK(codec->SetSendCodec(0, l16_16));
+  Test("(ch 1) Adding playout file...");
+  CHECK(StartMedia(1, 33333, false, true, false, false, true));
+  Sleep(testTime);
+
+  Test("(ch -1) Speak while recording all channels to add mixer input on "
+    "channel 0...");
+  CHECK(file->StartRecordingPlayout(
+          -1, recordedPlayoutFile.c_str(), NULL));
+  Sleep(testTime);
+  CHECK(file->StopRecordingPlayout(-1));
+  CHECK(file->StopPlayingFileLocally(1));
+
+  Test("(ch 0) Playing out the recorded file...");
+  CHECK(volume->SetInputMute(0, true));
+  CHECK(file->StartPlayingFileLocally(
+          0, recordedPlayoutFile.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  CHECK(StopMedia(0));
+  CHECK(StopMedia(1));
+  ANL();
+
+  // StartRecordingPlayoutStereo
+
+  Test(">> Verify recording of playout in stereo:\n");
+
+  Test("(ch 0) Sending at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_16));
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  Test("Modified master balance (L=10%%, R=100%%) to force stereo mixing...");
+  CHECK(volume->SetOutputVolumePan(-1, (float)0.1, (float)1.0));
+  Sleep(testTime);
+
+  /*
+   Test("Recording of left and right channel playout to two 16kHz PCM "
+   "files...");
+   file->StartRecordingPlayoutStereo(
+   GetFilename("RecordedPlayout_Left_16kHz.pcm"),
+   GetFilename("RecordedPlayout_Right_16kHz.pcm"), StereoBoth);
+   Sleep(testTime);
+   Test("Back to center volume again...");
+   CHECK(volume->SetOutputVolumePan(-1, (float)1.0, (float)1.0));
+   */
+
+  Test("(ch 0) Playing out the recorded file for the left channel (10%%)...");
+  CHECK(volume->SetInputMute(0, true));
+  std::string leftFilename = outputDir + "RecordedPlayout_Left_16kHz.pcm";
+  CHECK(file->StartPlayingFileLocally(0, leftFilename.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+
+  Test("(ch 0) Playing out the recorded file for the right channel (100%%) =>"
+    " should sound louder than the left channel...");
+  std::string rightFilename = outputDir + "RecordedPlayout_Right_16kHz.pcm";
+  CHECK(file->StartPlayingFileLocally(0, rightFilename.c_str()));
+  Sleep(testTime);
+  CHECK(file->StopPlayingFileLocally(0));
+  CHECK(volume->SetInputMute(0, false));
+
+  base->DeleteChannel(0);
+  base->DeleteChannel(1);
+  ANL();
+
+  // ---------------------------
+  // Verify inserted Dtmf tones
+  // ---------------------------
+
+  Test(">> Verify Dtmf feedback functionality:\n");
+
+  base->CreateChannel();
+
+  for (int i = 0; i < 2; i++) {
+    if (i == 0)
+      Test("Dtmf direct feedback is now enabled...");
+    else
+      Test("Dtmf direct feedback is now disabled...");
+
+    CHECK(dtmf->SetDtmfFeedbackStatus(true, (i==0)));
+
+    Test("(ch 0) Sending at 32kHz using G.722.1C...");
+    CHECK(codec->SetRecPayloadType(0, g722_1c_32));
+    CHECK(codec->SetSendCodec(0, g722_1c_32));
+    CHECK(StartMedia(0, 12345, true, true, true, false, false));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    // ensure that receiver will not play out outband Dtmf
+    CHECK(dtmf->SetSendTelephoneEventPayloadType(0, 118));
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    Test("(ch 0) Changing codec to 8kHz PCMU...");
+    CHECK(codec->SetSendCodec(0, pcmu_8));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    Test("(ch 0) Changing codec to 16kHz L16...");
+    CHECK(codec->SetSendCodec(0, l16_16));
+    Sleep(500);
+
+    Test("(ch 0) Sending outband Dtmf events => ensure that they are added"
+      " to the mixer...");
+    CHECK(dtmf->SendTelephoneEvent(0, 9, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 1, true, 390));
+    Sleep(500);
+    CHECK(dtmf->SendTelephoneEvent(0, 5, true, 390));
+    Sleep(500);
+    Sleep(testTime - 1500);
+
+    StopMedia(0);
+    ANL();
+  }
+
+  base->DeleteChannel(0);
+
+  // ---------------------------
+  // Verify external processing
+  // --------------------------
+
+  base->CreateChannel();
+
+  Test(">> Verify external media processing:\n");
+
+  Test("(ch 0) Playing 16kHz file locally <=> mixing in mono @ 16kHz...");
+  CHECK(StartMedia(0, 12345, false, true, false, false, true));
+  Sleep(testTime);
+  Test("Enabling playout external media processing => played audio should "
+    "now be affected");
+  CHECK(xmedia->RegisterExternalMediaProcessing(
+          0, kPlaybackAllChannelsMixed, mpobj));
+  Sleep(testTime);
+  Test("(ch 0) Sending speech at 32kHz <=> mixing at 32kHz...");
+  CHECK(codec->SetSendCodec(0, l16_32));
+  Sleep(testTime);
+  printf("Back to normal again\n");
+  CHECK(xmedia->DeRegisterExternalMediaProcessing(0,
+          kPlaybackAllChannelsMixed));
+  Sleep(testTime);
+  printf("Enabling playout external media processing on ch 0 => "
+    "played audio should now be affected\n");
+  CHECK(xmedia->RegisterExternalMediaProcessing(0, kPlaybackPerChannel,
+          mpobj));
+  Sleep(testTime);
+  Test("Panning volume to the right <=> mixing in stereo @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 0.0, 1.0));
+  Sleep(testTime);
+  Test("Back to center volume again <=> mixing in mono @ 32kHz...");
+  CHECK(volume->SetOutputVolumePan(-1, 1.0, 1.0));
+  Sleep(testTime);
+  printf("Back to normal again\n");
+  CHECK(xmedia->DeRegisterExternalMediaProcessing(0, kPlaybackPerChannel));
+  Sleep(testTime);
+  CHECK(StopMedia(0));
+  ANL();
+
+  base->DeleteChannel(0);
+
+  // --------------------------------------------------
+  // Extended tests of emulated stereo encoding schemes
+  // --------------------------------------------------
+
+  CodecInst PCMU;
+  CodecInst G729;
+  CodecInst L16_8;
+  CodecInst L16_16;
+  CodecInst L16_32;
+
+  base->CreateChannel();
+
+  Test(">> Verify emulated stereo encoding for differenct codecs:\n");
+
+  // enable external encryption
+  CHECK(encrypt->RegisterExternalEncryption(0, *this));
+  Test("(ch 0) External Encryption is now enabled:");
+
+  // register all codecs on the receiving side
+  strcpy(PCMU.plname, "PCMU");
+  PCMU.channels = 2;
+  PCMU.pacsize = 160;
+  PCMU.plfreq = 8000;
+  PCMU.pltype = 125;
+  PCMU.rate = 64000;
+  CHECK(codec->SetRecPayloadType(0, PCMU));
+
+  strcpy(G729.plname, "G729");
+  G729.channels = 2;
+  G729.pacsize = 160;
+  G729.plfreq = 8000;
+  G729.pltype = 18;
+  G729.rate = 8000;
+  CHECK(codec->SetRecPayloadType(0, G729));
+
+  strcpy(L16_8.plname, "L16");
+  L16_8.channels = 2;
+  L16_8.pacsize = 160;
+  L16_8.plfreq = 8000;
+  L16_8.pltype = 120;
+  L16_8.rate = 128000;
+  CHECK(codec->SetRecPayloadType(0, L16_8));
+
+  strcpy(L16_16.plname, "L16");
+  L16_16.channels = 2;
+  L16_16.pacsize = 320;
+  L16_16.plfreq = 16000;
+  L16_16.pltype = 121;
+  L16_16.rate = 256000;
+  CHECK(codec->SetRecPayloadType(0, L16_16));
+
+  // NOTE - we cannot send larger than 1500 bytes per RTP packet
+  strcpy(L16_32.plname, "L16");
+  L16_32.channels = 2;
+  L16_32.pacsize = 320;
+  L16_32.plfreq = 32000;
+  L16_32.pltype = 122;
+  L16_32.rate = 512000;
+  CHECK(codec->SetRecPayloadType(0, L16_32));
+
+  // sample-based, 8-bits per sample
+
+  Test("(ch 0) Sending using G.711 (sample based, 8 bits/sample)...");
+  PCMU.channels = 1;
+  CHECK(codec->SetSendCodec(0, PCMU));
+  SetStereoExternalEncryption(0, true, 8);
+  CHECK(StartMedia(0, 12345, true, true, true, true, false));
+  Sleep(testTime);
+
+  // sample-based, 16-bits per sample
+
+  Test("(ch 0) Sending using L16 8kHz (sample based, 16 bits/sample)...");
+  L16_8.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_8));
+  SetStereoExternalEncryption(0, true, 16);
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using L16 16kHz (sample based, 16 bits/sample)...");
+  L16_16.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_16));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using L16 32kHz (sample based, 16 bits/sample)...");
+  L16_32.channels = 1;
+  CHECK(codec->SetSendCodec(0, L16_32));
+  Sleep(testTime);
+
+  Test("(ch 0) Sending using G.729 (frame based)...");
+  G729.channels = 1;
+  CHECK(codec->SetSendCodec(0, G729));
+  Sleep(testTime);
+
+  StopMedia(0);
+
+  // disable external encryption
+  CHECK(encrypt->DeRegisterExternalEncryption(0));
+
+  base->DeleteChannel(0);
+
+  // ------------------------------------------------------------------------
+  CHECK(base->Terminate());
+
+  printf("\n\n------------------------------------------------\n");
+  printf(" Test passed!\n");
+  printf("------------------------------------------------\n\n");
+
+  return 0;
+}
+
+} // namespace voetest
diff --git a/src/voice_engine/test/auto_test/voe_unit_test.h b/src/voice_engine/test/auto_test/voe_unit_test.h
new file mode 100644
index 0000000..346713a
--- /dev/null
+++ b/src/voice_engine/test/auto_test/voe_unit_test.h
@@ -0,0 +1,68 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
+#define WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
+
+#include "voice_engine/test/auto_test/voe_standard_test.h"
+
+namespace voetest {
+
+class VoETestManager;
+
+class VoEUnitTest : public Encryption {
+ public:
+  VoEUnitTest(VoETestManager& mgr);
+  ~VoEUnitTest() {}
+  int DoTest();
+
+ protected:
+  // Encryption
+  void encrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt(int channel_no, unsigned char * in_data,
+               unsigned char * out_data, int bytes_in, int * bytes_out);
+  void encrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+  void decrypt_rtcp(int channel_no, unsigned char * in_data,
+                    unsigned char * out_data, int bytes_in, int * bytes_out);
+
+ private:
+  int MenuSelection();
+  int MixerTest();
+  void Sleep(unsigned int timeMillisec, bool addMarker = false);
+  void Wait();
+  int StartMedia(int channel,
+                 int rtpPort,
+                 bool listen,
+                 bool playout,
+                 bool send,
+                 bool fileAsMic,
+                 bool localFile);
+  int StopMedia(int channel);
+  void Test(const char* msg);
+  void SetStereoExternalEncryption(int channel, bool onOff, int bitsPerSample);
+
+ private:
+  VoETestManager& _mgr;
+
+ private:
+  bool _listening[32];
+  bool _playing[32];
+  bool _sending[32];
+
+ private:
+  bool _extOnOff;
+  int _extBitsPerSample;
+  int _extChannel;
+};
+
+} //  namespace voetest
+#endif // WEBRTC_VOICE_ENGINE_VOE_UNIT_TEST_H
diff --git a/src/voice_engine/test/cmd_test/voe_cmd_test.cc b/src/voice_engine/test/cmd_test/voe_cmd_test.cc
new file mode 100644
index 0000000..8753002
--- /dev/null
+++ b/src/voice_engine/test/cmd_test/voe_cmd_test.cc
@@ -0,0 +1,924 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#ifndef _WIN32
+#include <unistd.h>
+#endif
+
+#include <vector>
+
+#include "gtest/gtest.h"
+#include "test/testsupport/fileutils.h"
+
+#include "voe_errors.h"
+#include "voe_base.h"
+#include "voe_codec.h"
+#include "voe_volume_control.h"
+#include "voe_dtmf.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_audio_processing.h"
+#include "voe_file.h"
+#include "voe_video_sync.h"
+#include "voe_encryption.h"
+#include "voe_hardware.h"
+#include "voe_external_media.h"
+#include "voe_network.h"
+#include "voe_neteq_stats.h"
+#include "engine_configurations.h"
+
+// Enable this this flag to run this test with hard coded
+// IP/Port/codec and start test automatically with key input
+// it could be useful in repeat tests.
+//#define DEBUG
+
+// #define EXTERNAL_TRANSPORT
+
+using namespace webrtc;
+
+#define VALIDATE                                                        \
+  if (res != 0)                                                         \
+  {                                                                     \
+    printf("*** Error at position %i / line %i \n", cnt, __LINE__);     \
+    printf("*** Error code = %i \n", base1->LastError());               \
+  }                                                                     \
+  cnt++;
+
+VoiceEngine* m_voe = NULL;
+VoEBase* base1 = NULL;
+VoECodec* codec = NULL;
+VoEVolumeControl* volume = NULL;
+VoEDtmf* dtmf = NULL;
+VoERTP_RTCP* rtp_rtcp = NULL;
+VoEAudioProcessing* apm = NULL;
+VoENetwork* netw = NULL;
+VoEFile* file = NULL;
+VoEVideoSync* vsync = NULL;
+VoEEncryption* encr = NULL;
+VoEHardware* hardware = NULL;
+VoEExternalMedia* xmedia = NULL;
+VoENetEqStats* neteqst = NULL;
+
+void RunTest(std::string out_path);
+
+#ifdef EXTERNAL_TRANSPORT
+
+class my_transportation : public Transport
+{
+  int SendPacket(int channel,const void *data,int len);
+  int SendRTCPPacket(int channel, const void *data, int len);
+};
+
+int my_transportation::SendPacket(int channel,const void *data,int len)
+{
+  netw->ReceivedRTPPacket(channel, data, len);
+  return 0;
+}
+
+int my_transportation::SendRTCPPacket(int channel, const void *data, int len)
+{
+  netw->ReceivedRTCPPacket(channel, data, len);
+  return 0;
+}
+
+my_transportation my_transport;
+#endif
+
+class MyObserver : public VoiceEngineObserver {
+ public:
+   virtual void CallbackOnError(const int channel, const int err_code);
+};
+
+void MyObserver::CallbackOnError(const int channel, const int err_code) {
+  // Add printf for other error codes here
+  if (err_code == VE_TYPING_NOISE_WARNING) {
+    printf("  TYPING NOISE DETECTED \n");
+  } else if (err_code == VE_RECEIVE_PACKET_TIMEOUT) {
+    printf("  RECEIVE PACKET TIMEOUT \n");
+  } else if (err_code == VE_PACKET_RECEIPT_RESTARTED) {
+    printf("  PACKET RECEIPT RESTARTED \n");
+  } else if (err_code == VE_RUNTIME_PLAY_WARNING) {
+    printf("  RUNTIME PLAY WARNING \n");
+  } else if (err_code == VE_RUNTIME_REC_WARNING) {
+    printf("  RUNTIME RECORD WARNING \n");
+  } else if (err_code == VE_SATURATION_WARNING) {
+    printf("  SATURATION WARNING \n");
+  } else if (err_code == VE_RUNTIME_PLAY_ERROR) {
+    printf("  RUNTIME PLAY ERROR \n");
+  } else if (err_code == VE_RUNTIME_REC_ERROR) {
+    printf("  RUNTIME RECORD ERROR \n");
+  } else if (err_code == VE_REC_DEVICE_REMOVED) {
+    printf("  RECORD DEVICE REMOVED \n");
+  }
+}
+
+int main() {
+  int res = 0;
+  int cnt = 0;
+
+  printf("Test started \n");
+
+  m_voe = VoiceEngine::Create();
+  base1 = VoEBase::GetInterface(m_voe);
+  codec = VoECodec::GetInterface(m_voe);
+  apm = VoEAudioProcessing::GetInterface(m_voe);
+  volume = VoEVolumeControl::GetInterface(m_voe);
+  dtmf = VoEDtmf::GetInterface(m_voe);
+  rtp_rtcp = VoERTP_RTCP::GetInterface(m_voe);
+  netw = VoENetwork::GetInterface(m_voe);
+  file = VoEFile::GetInterface(m_voe);
+  vsync = VoEVideoSync::GetInterface(m_voe);
+  encr = VoEEncryption::GetInterface(m_voe);
+  hardware = VoEHardware::GetInterface(m_voe);
+  xmedia = VoEExternalMedia::GetInterface(m_voe);
+  neteqst = VoENetEqStats::GetInterface(m_voe);
+
+  MyObserver my_observer;
+
+  const std::string out_path = webrtc::test::OutputPath();
+  const std::string trace_filename = out_path + "webrtc_trace.txt";
+
+  printf("Set trace filenames (enable trace)\n");
+  VoiceEngine::SetTraceFilter(kTraceAll);
+  res = VoiceEngine::SetTraceFile(trace_filename.c_str());
+  VALIDATE;
+
+  res = VoiceEngine::SetTraceCallback(NULL);
+  VALIDATE;
+
+  printf("Init\n");
+  res = base1->Init();
+  if (res != 0) {
+    printf("\nError calling Init: %d\n", base1->LastError());
+    fflush(NULL);
+    exit(1);
+  }
+
+  res = base1->RegisterVoiceEngineObserver(my_observer);
+  VALIDATE;
+
+  cnt++;
+  printf("Version\n");
+  char tmp[1024];
+  res = base1->GetVersion(tmp);
+  VALIDATE;
+  cnt++;
+  printf("%s\n", tmp);
+
+  RunTest(out_path);
+
+  printf("Terminate \n");
+
+  base1->DeRegisterVoiceEngineObserver();
+
+  res = base1->Terminate();
+  VALIDATE;
+
+  if (base1)
+    base1->Release();
+
+  if (codec)
+    codec->Release();
+
+  if (volume)
+    volume->Release();
+
+  if (dtmf)
+    dtmf->Release();
+
+  if (rtp_rtcp)
+    rtp_rtcp->Release();
+
+  if (apm)
+    apm->Release();
+
+  if (netw)
+    netw->Release();
+
+  if (file)
+    file->Release();
+
+  if (vsync)
+    vsync->Release();
+
+  if (encr)
+    encr->Release();
+
+  if (hardware)
+    hardware->Release();
+
+  if (xmedia)
+    xmedia->Release();
+
+  if (neteqst)
+    neteqst->Release();
+
+  VoiceEngine::Delete(m_voe);
+
+  return 0;
+}
+
+void RunTest(std::string out_path) {
+  int chan, cnt, res;
+  CodecInst cinst;
+  cnt = 0;
+  int i;
+  int codecinput;
+  bool AEC = false;
+  bool AGC = true;
+  bool AGC1 = false;
+  bool VAD = false;
+  bool NS = false;
+  bool NS1 = false;
+  bool typing_detection = false;
+  bool muted = false;
+  bool on_hold = false;
+
+#if defined(WEBRTC_ANDROID)
+  std::string resource_path = "/sdcard/";
+#else
+  std::string resource_path = webrtc::test::ProjectRootPath();
+  if (resource_path == webrtc::test::kCannotFindProjectRootDir) {
+    printf("*** Unable to get project root directory. "
+           "File playing may fail. ***\n");
+    // Fall back to the current directory.
+    resource_path = "./";
+  } else {
+    resource_path += "data/voice_engine/";
+  }
+#endif
+  const std::string audio_filename = resource_path + "audio_long16.pcm";
+
+  const std::string play_filename = out_path + "recorded_playout.pcm";
+  const std::string mic_filename = out_path + "recorded_mic.pcm";
+
+  chan = base1->CreateChannel();
+  if (chan < 0) {
+    printf("Error at position %i\n", cnt);
+    printf("************ Error code = %i\n", base1->LastError());
+    fflush(NULL);
+  }
+  cnt++;
+
+  int j = 0;
+#ifdef EXTERNAL_TRANSPORT
+  my_transportation ch0transport;
+  printf("Enabling external transport \n");
+  netw->RegisterExternalTransport(0, ch0transport);
+#else
+  char ip[64];
+#ifdef DEBUG
+  strcpy(ip, "127.0.0.1");
+#else
+  char localip[64];
+  netw->GetLocalIP(localip);
+  printf("local IP:%s\n", localip);
+
+  printf("1. 127.0.0.1 \n");
+  printf("2. Specify IP \n");
+  ASSERT_EQ(1, scanf("%i", &i));
+
+  if (1 == i)
+    strcpy(ip, "127.0.0.1");
+  else {
+    printf("Specify remote IP: ");
+    ASSERT_EQ(1, scanf("%s", ip));
+  }
+#endif
+
+  int colons(0);
+  while (ip[j] != '\0' && j < 64 && !(colons = (ip[j++] == ':')))
+    ;
+  if (colons) {
+    printf("Enabling IPv6\n");
+    res = netw->EnableIPv6(0);
+    VALIDATE;
+  }
+
+  int rPort;
+#ifdef DEBUG
+  rPort=8500;
+#else
+  printf("Specify remote port (1=1234): ");
+  ASSERT_EQ(1, scanf("%i", &rPort));
+  if (1 == rPort)
+    rPort = 1234;
+  printf("Set Send port \n");
+#endif
+
+  printf("Set Send IP \n");
+  res = base1->SetSendDestination(chan, rPort, ip);
+  VALIDATE;
+
+  int lPort;
+#ifdef DEBUG
+  lPort=8500;
+#else
+  printf("Specify local port (1=1234): ");
+  ASSERT_EQ(1, scanf("%i", &lPort));
+  if (1 == lPort)
+    lPort = 1234;
+  printf("Set Rec Port \n");
+#endif
+  res = base1->SetLocalReceiver(chan, lPort);
+  VALIDATE;
+#endif
+
+  printf("\n");
+  for (i = 0; i < codec->NumOfCodecs(); i++) {
+    res = codec->GetCodec(i, cinst);
+    VALIDATE;
+    if (strncmp(cinst.plname, "ISAC", 4) == 0 && cinst.plfreq == 32000) {
+      printf("%i. ISAC-swb pltype:%i plfreq:%i channels:%i\n", i, cinst.pltype,
+             cinst.plfreq, cinst.channels);
+    }
+    else {
+      printf("%i. %s pltype:%i plfreq:%i channels:%i\n", i, cinst.plname,
+             cinst.pltype, cinst.plfreq, cinst.channels);
+    }
+  }
+#ifdef DEBUG
+  codecinput=0;
+#else
+  printf("Select send codec: ");
+  ASSERT_EQ(1, scanf("%i", &codecinput));
+#endif
+  codec->GetCodec(codecinput, cinst);
+
+  printf("Set primary codec\n");
+  res = codec->SetSendCodec(chan, cinst);
+  VALIDATE;
+
+#ifndef WEBRTC_ANDROID
+  const int kMaxNumChannels = 8;
+#else
+  const int kMaxNumChannels = 1;
+#endif
+  int channel_index = 0;
+  std::vector<int> channels(kMaxNumChannels);
+  for (i = 0; i < kMaxNumChannels; ++i) {
+    channels[i] = base1->CreateChannel();
+    int port = rPort + (i + 1) * 2;
+    res = base1->SetSendDestination(channels[i], port, ip);
+    VALIDATE;
+    res = base1->SetLocalReceiver(channels[i], port);
+    VALIDATE;
+    res = codec->SetSendCodec(channels[i], cinst);
+    VALIDATE;
+  }
+
+  // Call loop
+  bool newcall = true;
+  while (newcall) {
+
+#if defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+    int rd(-1), pd(-1);
+    res = hardware->GetNumOfRecordingDevices(rd);
+    VALIDATE;
+    res = hardware->GetNumOfPlayoutDevices(pd);
+    VALIDATE;
+
+    char dn[128] = { 0 };
+    char guid[128] = { 0 };
+    printf("\nPlayout devices (%d): \n", pd);
+    for (j=0; j<pd; ++j) {
+      res = hardware->GetPlayoutDeviceName(j, dn, guid);
+      VALIDATE;
+      printf("  %d: %s \n", j, dn);
+    }
+
+    printf("Recording devices (%d): \n", rd);
+    for (j=0; j<rd; ++j) {
+      res = hardware->GetRecordingDeviceName(j, dn, guid);
+      VALIDATE;
+      printf("  %d: %s \n", j, dn);
+    }
+
+    printf("Select playout device: ");
+    ASSERT_EQ(1, scanf("%d", &pd));
+    res = hardware->SetPlayoutDevice(pd);
+    VALIDATE;
+    printf("Select recording device: ");
+    ASSERT_EQ(1, scanf("%d", &rd));
+    printf("Setting sound devices \n");
+    res = hardware->SetRecordingDevice(rd);
+    VALIDATE;
+
+#endif // WEBRTC_LINUX
+    res = codec->SetVADStatus(0, VAD);
+    VALIDATE;
+
+    res = apm->SetAgcStatus(AGC);
+    VALIDATE;
+
+    res = apm->SetEcStatus(AEC);
+    VALIDATE;
+
+    res = apm->SetNsStatus(NS);
+    VALIDATE;
+
+#ifdef DEBUG
+    i = 1;
+#else
+    printf("\n1. Send, listen and playout \n");
+    printf("2. Send only \n");
+    printf("3. Listen and playout only \n");
+    printf("Select transfer mode: ");
+    ASSERT_EQ(1, scanf("%i", &i));
+#endif
+    const bool send = !(3 == i);
+    const bool receive = !(2 == i);
+
+    if (receive) {
+#ifndef EXTERNAL_TRANSPORT
+      printf("Start Listen \n");
+      res = base1->StartReceive(chan);
+      VALIDATE;
+#endif
+
+      printf("Start Playout \n");
+      res = base1->StartPlayout(chan);
+      VALIDATE;
+    }
+
+    if (send) {
+      printf("Start Send \n");
+      res = base1->StartSend(chan);
+      VALIDATE;
+    }
+
+#ifndef WEBRTC_ANDROID
+    printf("Getting mic volume \n");
+    unsigned int vol = 999;
+    res = volume->GetMicVolume(vol);
+    VALIDATE;
+    if ((vol > 255) || (vol < 1)) {
+      printf("\n****ERROR in GetMicVolume");
+    }
+#endif
+
+    int forever = 1;
+    while (forever) {
+      printf("\nActions\n");
+
+      printf("Codec Changes\n");
+      for (i = 0; i < codec->NumOfCodecs(); i++) {
+        res = codec->GetCodec(i, cinst);
+        VALIDATE;
+        if (strncmp(cinst.plname, "ISAC", 4) == 0 && cinst.plfreq
+            == 32000) {
+          printf("\t%i. ISAC-swb pltype:%i plfreq:%i channels:%i\n", i,
+                 cinst.pltype, cinst.plfreq, cinst.channels);
+        }
+        else {
+          printf("\t%i. %s pltype:%i plfreq:%i channels:%i\n", i, cinst.plname,
+                 cinst.pltype, cinst.plfreq, cinst.channels);
+        }
+      }
+      printf("Other\n");
+      const int noCodecs = i - 1;
+      printf("\t%i. Toggle VAD\n", i);
+      i++;
+      printf("\t%i. Toggle AGC\n", i);
+      i++;
+      printf("\t%i. Toggle NS\n", i);
+      i++;
+      printf("\t%i. Toggle EC\n", i);
+      i++;
+      printf("\t%i. Select AEC\n", i);
+      i++;
+      printf("\t%i. Select AECM\n", i);
+      i++;
+      printf("\t%i. Get speaker volume\n", i);
+      i++;
+      printf("\t%i. Set speaker volume\n", i);
+      i++;
+      printf("\t%i. Get microphone volume\n", i);
+      i++;
+      printf("\t%i. Set microphone volume\n", i);
+      i++;
+      printf("\t%i. Play local file (audio_long16.pcm) \n", i);
+      i++;
+      printf("\t%i. Change playout device \n", i);
+      i++;
+      printf("\t%i. Change recording device \n", i);
+      i++;
+      printf("\t%i. Toggle receive-side AGC \n", i);
+      i++;
+      printf("\t%i. Toggle receive-side NS \n", i);
+      i++;
+      printf("\t%i. AGC status \n", i);
+      i++;
+      printf("\t%i. Toggle microphone mute \n", i);
+      i++;
+      printf("\t%i. Toggle on hold status \n", i);
+      i++;
+      printf("\t%i. Get last error code \n", i);
+      i++;
+      printf("\t%i. Toggle typing detection (for Mac/Windows only) \n", i);
+      i++;
+      printf("\t%i. Record a PCM file \n", i);
+      i++;
+      printf("\t%i. Play a previously recorded PCM file locally \n", i);
+      i++;
+      printf("\t%i. Play a previously recorded PCM file as microphone \n", i);
+      i++;
+      printf("\t%i. Add an additional file-playing channel \n", i);
+      i++;
+      printf("\t%i. Remove a file-playing channel \n", i);
+      i++;
+
+      printf("Select action or %i to stop the call: ", i);
+      ASSERT_EQ(1, scanf("%i", &codecinput));
+
+      if (codecinput < codec->NumOfCodecs()) {
+        res = codec->GetCodec(codecinput, cinst);
+        VALIDATE;
+
+        printf("Set primary codec\n");
+        res = codec->SetSendCodec(chan, cinst);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 1)) {
+        VAD = !VAD;
+        res = codec->SetVADStatus(0, VAD);
+        VALIDATE;
+        if (VAD)
+          printf("\n VAD is now on! \n");
+        else
+          printf("\n VAD is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 2)) {
+        AGC = !AGC;
+        res = apm->SetAgcStatus(AGC);
+        VALIDATE;
+        if (AGC)
+          printf("\n AGC is now on! \n");
+        else
+          printf("\n AGC is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 3)) {
+        NS = !NS;
+        res = apm->SetNsStatus(NS);
+        VALIDATE;
+        if (NS)
+          printf("\n NS is now on! \n");
+        else
+          printf("\n NS is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 4)) {
+        AEC = !AEC;
+        res = apm->SetEcStatus(AEC, kEcUnchanged);
+        VALIDATE;
+        if (AEC)
+          printf("\n Echo control is now on! \n");
+        else
+          printf("\n Echo control is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 5)) {
+        res = apm->SetEcStatus(AEC, kEcAec);
+        VALIDATE;
+        printf("\n AEC selected! \n");
+        if (AEC)
+          printf(" (Echo control is on)\n");
+        else
+          printf(" (Echo control is off)\n");
+      }
+      else if (codecinput == (noCodecs + 6)) {
+        res = apm->SetEcStatus(AEC, kEcAecm);
+        VALIDATE;
+        printf("\n AECM selected! \n");
+        if (AEC)
+          printf(" (Echo control is on)\n");
+        else
+          printf(" (Echo control is off)\n");
+      }
+      else if (codecinput == (noCodecs + 7)) {
+        unsigned vol(0);
+        res = volume->GetSpeakerVolume(vol);
+        VALIDATE;
+        printf("\n Speaker Volume is %d \n", vol);
+      }
+      else if (codecinput == (noCodecs + 8)) {
+        printf("Level: ");
+        ASSERT_EQ(1, scanf("%i", &i));
+        res = volume->SetSpeakerVolume(i);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 9)) {
+        unsigned vol(0);
+        res = volume->GetMicVolume(vol);
+        VALIDATE;
+        printf("\n Microphone Volume is %d \n", vol);
+      }
+      else if (codecinput == (noCodecs + 10)) {
+        printf("Level: ");
+        ASSERT_EQ(1, scanf("%i", &i));
+        res = volume->SetMicVolume(i);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 11)) {
+        res = file->StartPlayingFileLocally(0, audio_filename.c_str());
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 12)) {
+        // change the playout device with current call
+        int num_pd(-1);
+        res = hardware->GetNumOfPlayoutDevices(num_pd);
+        VALIDATE;
+
+        char dn[128] = { 0 };
+        char guid[128] = { 0 };
+
+        printf("\nPlayout devices (%d): \n", num_pd);
+        for (j = 0; j < num_pd; ++j) {
+          res = hardware->GetPlayoutDeviceName(j, dn, guid);
+          VALIDATE;
+          printf("  %d: %s \n", j, dn);
+        }
+        printf("Select playout device: ");
+        ASSERT_EQ(1, scanf("%d", &num_pd));
+        // Will use plughw for hardware devices
+        res = hardware->SetPlayoutDevice(num_pd);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 13)) {
+        // change the recording device with current call
+        int num_rd(-1);
+
+        res = hardware->GetNumOfRecordingDevices(num_rd);
+        VALIDATE;
+
+        char dn[128] = { 0 };
+        char guid[128] = { 0 };
+
+        printf("Recording devices (%d): \n", num_rd);
+        for (j = 0; j < num_rd; ++j) {
+          res = hardware->GetRecordingDeviceName(j, dn, guid);
+          VALIDATE;
+          printf("  %d: %s \n", j, dn);
+        }
+
+        printf("Select recording device: ");
+        ASSERT_EQ(1, scanf("%d", &num_rd));
+        printf("Setting sound devices \n");
+        // Will use plughw for hardware devices
+        res = hardware->SetRecordingDevice(num_rd);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 14)) {
+        // Remote AGC
+        AGC1 = !AGC1;
+        res = apm->SetRxAgcStatus(chan, AGC1);
+        VALIDATE;
+        if (AGC1)
+          printf("\n Receive-side AGC is now on! \n");
+        else
+          printf("\n Receive-side AGC is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 15)) {
+        // Remote NS
+        NS1 = !NS1;
+        res = apm->SetRxNsStatus(chan, NS);
+        VALIDATE;
+        if (NS1)
+          printf("\n Receive-side NS is now on! \n");
+        else
+          printf("\n Receive-side NS is now off! \n");
+      }
+      else if (codecinput == (noCodecs + 16)) {
+        AgcModes agcmode;
+        bool enable;
+        res = apm->GetAgcStatus(enable, agcmode);
+        VALIDATE
+            printf("\n AGC enable is %d, mode is %d \n", enable, agcmode);
+      }
+      else if (codecinput == (noCodecs + 17)) {
+        // Toggle Mute on Microphone
+        res = volume->GetInputMute(chan, muted);
+        VALIDATE;
+        muted = !muted;
+        res = volume->SetInputMute(chan, muted);
+        VALIDATE;
+        if (muted)
+          printf("\n Microphone is now on mute! \n");
+        else
+          printf("\n Microphone is no longer on mute! \n");
+
+      }
+      else if (codecinput == (noCodecs + 18)) {
+        // Toggle the call on hold
+        OnHoldModes mode;
+        res = base1->GetOnHoldStatus(chan, on_hold, mode);
+        VALIDATE;
+        on_hold = !on_hold;
+        mode = kHoldSendAndPlay;
+        res = base1->SetOnHoldStatus(chan, on_hold, mode);
+        VALIDATE;
+        if (on_hold)
+          printf("\n Call now on hold! \n");
+        else
+          printf("\n Call now not on hold! \n");
+      }
+
+      else if (codecinput == (noCodecs + 19)) {
+        // Get the last error code and print to screen
+        int err_code = 0;
+        err_code = base1->LastError();
+        if (err_code != -1)
+          printf("\n The last error code was %i.\n", err_code);
+      }
+      else if (codecinput == (noCodecs + 20)) {
+        typing_detection= !typing_detection;
+        res = apm->SetTypingDetectionStatus(typing_detection);
+        VALIDATE;
+        if (typing_detection)
+          printf("\n Typing detection is now on!\n");
+        else
+          printf("\n Typing detection is now off!\n");
+      }
+      else if (codecinput == (noCodecs + 21)) {
+        int stop_record = 1;
+        int file_source = 1;
+        printf("\n Select source of recorded file. ");
+        printf("\n 1. Record from microphone to file ");
+        printf("\n 2. Record from playout to file ");
+        printf("\n Enter your selection: \n");
+        ASSERT_EQ(1, scanf("%i", &file_source));
+        if (file_source == 1) {
+          printf("\n Start recording microphone as %s \n",
+                 mic_filename.c_str());
+          res = file->StartRecordingMicrophone(mic_filename.c_str());
+          VALIDATE;
+        }
+        else {
+          printf("\n Start recording playout as %s \n", play_filename.c_str());
+          res = file->StartRecordingPlayout(chan, play_filename.c_str());
+          VALIDATE;
+        }
+        while (stop_record != 0) {
+          printf("\n Type 0 to stop recording file \n");
+          ASSERT_EQ(1, scanf("%i", &stop_record));
+        }
+        if (file_source == 1) {
+          res = file->StopRecordingMicrophone();
+          VALIDATE;
+        }
+        else {
+          res = file->StopRecordingPlayout(chan);
+          VALIDATE;
+        }
+        printf("\n File finished recording \n");
+      }
+      else if (codecinput == (noCodecs + 22)) {
+        int file_type = 1;
+        int stop_play = 1;
+        printf("\n Select a file to play locally in a loop.");
+        printf("\n 1. Play %s", mic_filename.c_str());
+        printf("\n 2. Play %s", play_filename.c_str());
+        printf("\n Enter your selection\n");
+        ASSERT_EQ(1, scanf("%i", &file_type));
+        if (file_type == 1)  {
+          printf("\n Start playing %s locally in a loop\n",
+                 mic_filename.c_str());
+          res = file->StartPlayingFileLocally(chan, mic_filename.c_str(), true);
+          VALIDATE;
+        }
+        else {
+          printf("\n Start playing %s locally in a loop\n",
+                 play_filename.c_str());
+          res = file->StartPlayingFileLocally(chan, play_filename.c_str(),
+                                              true);
+          VALIDATE;
+        }
+        while (stop_play != 0) {
+          printf("\n Type 0 to stop playing file\n");
+          ASSERT_EQ(1, scanf("%i", &stop_play));
+        }
+        res = file->StopPlayingFileLocally(chan);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 23)) {
+        int file_type = 1;
+        int stop_play = 1;
+        printf("\n Select a file to play as microphone in a loop.");
+        printf("\n 1. Play %s", mic_filename.c_str());
+        printf("\n 2. Play %s", play_filename.c_str());
+        printf("\n Enter your selection\n");
+        ASSERT_EQ(1, scanf("%i", &file_type));
+        if (file_type == 1)  {
+          printf("\n Start playing %s as mic in a loop\n",
+                 mic_filename.c_str());
+          res = file->StartPlayingFileAsMicrophone(chan, mic_filename.c_str(),
+                                                   true);
+          VALIDATE;
+        }
+        else {
+          printf("\n Start playing %s as mic in a loop\n",
+                 play_filename.c_str());
+          res = file->StartPlayingFileAsMicrophone(chan, play_filename.c_str(),
+                                                   true);
+          VALIDATE;
+        }
+        while (stop_play != 0) {
+          printf("\n Type 0 to stop playing file\n");
+          ASSERT_EQ(1, scanf("%i", &stop_play));
+        }
+        res = file->StopPlayingFileAsMicrophone(chan);
+        VALIDATE;
+      }
+      else if (codecinput == (noCodecs + 24)) {
+        if (channel_index < kMaxNumChannels) {
+          res = base1->StartReceive(channels[channel_index]);
+          VALIDATE;
+          res = base1->StartPlayout(channels[channel_index]);
+          VALIDATE;
+          res = base1->StartSend(channels[channel_index]);
+          VALIDATE;
+          res = file->StartPlayingFileAsMicrophone(channels[channel_index],
+                                                   audio_filename.c_str(),
+                                                   true,
+                                                   false);
+          VALIDATE;
+          channel_index++;
+          printf("Using %d additional channels\n", channel_index);
+        } else {
+          printf("Max number of channels reached\n");
+        }
+      }
+      else if (codecinput == (noCodecs + 25)) {
+        if (channel_index > 0) {
+          channel_index--;
+          res = file->StopPlayingFileAsMicrophone(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopSend(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopPlayout(channels[channel_index]);
+          VALIDATE;
+          res = base1->StopReceive(channels[channel_index]);
+          VALIDATE;
+          printf("Using %d additional channels\n", channel_index);
+        } else {
+          printf("All additional channels stopped\n");
+        }
+      }
+      else
+        break;
+    }
+
+    if (send) {
+      printf("Stop Send \n");
+      res = base1->StopSend(chan);
+      VALIDATE;
+    }
+
+    if (receive) {
+      printf("Stop Playout \n");
+      res = base1->StopPlayout(chan);
+      VALIDATE;
+
+#ifndef EXTERNAL_TRANSPORT
+      printf("Stop Listen \n");
+      res = base1->StopReceive(chan);
+      VALIDATE;
+#endif
+    }
+
+    while (channel_index > 0) {
+      --channel_index;
+      res = file->StopPlayingFileAsMicrophone(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopSend(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopPlayout(channels[channel_index]);
+      VALIDATE;
+      res = base1->StopReceive(channels[channel_index]);
+      VALIDATE;
+    }
+
+    printf("\n1. New call \n");
+    printf("2. Quit \n");
+    printf("Select action: ");
+    ASSERT_EQ(1, scanf("%i", &i));
+    newcall = (1 == i);
+    // Call loop
+  }
+
+  printf("Delete channels \n");
+  res = base1->DeleteChannel(chan);
+  VALIDATE;
+
+  for (i = 0; i < kMaxNumChannels; ++i) {
+    channels[i] = base1->DeleteChannel(channels[i]);
+    VALIDATE;
+  }
+}
diff --git a/src/voice_engine/test/voice_engine_tests.gypi b/src/voice_engine/test/voice_engine_tests.gypi
new file mode 100644
index 0000000..5e23589
--- /dev/null
+++ b/src/voice_engine/test/voice_engine_tests.gypi
@@ -0,0 +1,163 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'targets': [
+    # Auto test - command line test for all platforms
+    {
+      'target_name': 'voe_auto_test',
+      'type': 'executable',
+      'dependencies': [
+        'voice_engine_core',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+        '<(webrtc_root)/test/test.gyp:test_support',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/testing/gmock.gyp:gmock',
+        '<(webrtc_root)/test/libtest/libtest.gyp:libtest',
+      ],
+      'include_dirs': [
+        'auto_test',
+        'auto_test/fixtures',
+        '<(webrtc_root)/modules/interface',
+        # TODO(phoglund): We only depend on voice_engine_defines.h here -
+        # move that file to interface and then remove this dependency.
+        '<(webrtc_root)/voice_engine',
+        '<(webrtc_root)/modules/audio_device/main/interface',
+      ],
+      'sources': [
+        'auto_test/automated_mode.cc',
+        'auto_test/extended/agc_config_test.cc',
+        'auto_test/extended/ec_metrics_test.cc',
+        'auto_test/fakes/fake_external_transport.cc',
+        'auto_test/fakes/fake_external_transport.h',
+        'auto_test/fixtures/after_initialization_fixture.cc',
+        'auto_test/fixtures/after_initialization_fixture.h',
+        'auto_test/fixtures/after_streaming_fixture.cc',
+        'auto_test/fixtures/after_streaming_fixture.h',
+        'auto_test/fixtures/before_initialization_fixture.cc',
+        'auto_test/fixtures/before_initialization_fixture.h',
+        'auto_test/fuzz/rtp_fuzz_test.cc',
+        'auto_test/standard/audio_processing_test.cc',
+        'auto_test/standard/call_report_test.cc',
+        'auto_test/standard/codec_before_streaming_test.cc',
+        'auto_test/standard/codec_test.cc',
+        'auto_test/standard/dtmf_test.cc',
+        'auto_test/standard/encryption_test.cc',
+        'auto_test/standard/external_media_test.cc',
+        'auto_test/standard/file_before_streaming_test.cc',
+        'auto_test/standard/file_test.cc',
+        'auto_test/standard/hardware_before_initializing_test.cc',
+        'auto_test/standard/hardware_before_streaming_test.cc',
+        'auto_test/standard/hardware_test.cc',
+        'auto_test/standard/manual_hold_test.cc',
+        'auto_test/standard/mixing_test.cc',
+        'auto_test/standard/neteq_stats_test.cc',
+        'auto_test/standard/neteq_test.cc',
+        'auto_test/standard/network_before_streaming_test.cc',
+        'auto_test/standard/network_test.cc',
+        'auto_test/standard/rtp_rtcp_before_streaming_test.cc',
+        'auto_test/standard/rtp_rtcp_test.cc',
+        'auto_test/standard/voe_base_misc_test.cc',
+        'auto_test/standard/video_sync_test.cc',
+        'auto_test/standard/volume_test.cc',
+        'auto_test/resource_manager.cc',
+        'auto_test/voe_cpu_test.cc',
+        'auto_test/voe_cpu_test.h',
+        'auto_test/voe_extended_test.cc',
+        'auto_test/voe_extended_test.h',
+        'auto_test/voe_standard_test.cc',
+        'auto_test/voe_standard_test.h',
+        'auto_test/voe_stress_test.cc',
+        'auto_test/voe_stress_test.h',
+        'auto_test/voe_test_defines.h',
+        'auto_test/voe_test_interface.h',
+        'auto_test/voe_unit_test.cc',
+        'auto_test/voe_unit_test.h',
+      ],
+      'conditions': [
+        ['OS=="android"', {
+          # some tests are not supported on android yet, exclude these tests.
+          'sources!': [
+            'auto_test/standard/hardware_before_streaming_test.cc',
+          ],
+        }],
+      ],
+    },
+    {
+      # command line test that should work on linux/mac/win
+      'target_name': 'voe_cmd_test',
+      'type': 'executable',
+      'dependencies': [
+        '<(webrtc_root)/test/test.gyp:test_support',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        'voice_engine_core',
+        '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+      ],
+      'sources': [
+        'cmd_test/voe_cmd_test.cc',
+      ],
+    },
+  ],
+  'conditions': [
+    # TODO(kjellander): Support UseoFMFC on VS2010.
+    # http://code.google.com/p/webrtc/issues/detail?id=709
+    ['OS=="win" and MSVS_VERSION < "2010"', {
+      'targets': [
+        # WinTest - GUI test for Windows
+        {
+          'target_name': 'voe_ui_win_test',
+          'type': 'executable',
+          'dependencies': [
+            'voice_engine_core',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+            '<(webrtc_root)/test/test.gyp:test_support',
+          ],
+          'include_dirs': [
+            'win_test',
+          ],
+          'sources': [
+            'win_test/Resource.h',
+            'win_test/WinTest.cc',
+            'win_test/WinTest.h',
+            'win_test/WinTest.rc',
+            'win_test/WinTestDlg.cc',
+            'win_test/WinTestDlg.h',
+            'win_test/res/WinTest.ico',
+            'win_test/res/WinTest.rc2',
+            'win_test/stdafx.cc',
+            'win_test/stdafx.h',
+          ],
+          'configurations': {
+            'Common_Base': {
+              'msvs_configuration_attributes': {
+                'conditions': [
+                  ['component=="shared_library"', {
+                    'UseOfMFC': '2',  # Shared DLL
+                  },{
+                    'UseOfMFC': '1',  # Static
+                  }],
+                ],
+              },
+            },
+          },
+          'msvs_settings': {
+            'VCLinkerTool': {
+              'SubSystem': '2',   # Windows
+            },
+          },
+        },
+      ],
+    }],
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/voice_engine/test/win_test/Resource.h b/src/voice_engine/test/win_test/Resource.h
new file mode 100644
index 0000000..5ae9c5f
--- /dev/null
+++ b/src/voice_engine/test/win_test/Resource.h
@@ -0,0 +1,241 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Visual C++ generated include file.
+// Used by WinTest.rc
+//
+#define IDM_ABOUTBOX                    0x0010
+#define IDD_ABOUTBOX                    100
+#define IDS_ABOUTBOX                    101
+#define IDD_WINTEST_DIALOG              102
+#define IDR_MAINFRAME                   128
+#define IDD_DTMF_DIALOG                 129
+#define IDC_BUTTON_CREATE_1             1000
+#define IDC_BUTTON_DELETE_1             1001
+#define IDC_EDIT_1                      1002
+#define IDC_BUTTON_CREATE_2             1003
+#define IDC_BUTTON_DELETE_2             1004
+#define IDC_EDIT_2                      1005
+#define IDC_EDIT_MESSAGE                1006
+#define IDC_BUTTON_START_LISTEN_1       1007
+#define IDC_COMBO_IP_1                  1008
+#define IDC_EDIT_TX_PORT_1              1009
+#define IDC_EDIT_RX_PORT_1              1010
+#define IDC_COMBO_CODEC_1               1011
+#define IDC_BUTTON_STOP_LISTEN_1        1012
+#define IDC_STATIC_LISTEN               1013
+#define IDC_BUTTON_START_PLAYOUT_1      1014
+#define IDC_BUTTON_STOP_PLAYOUT_1       1015
+#define IDC_STATIC_PLAY                 1016
+#define IDC_BUTTON_START_SEND_1         1017
+#define IDC_BUTTON_STOP_SEND_1          1018
+#define IDC_STATIC_SEND                 1019
+#define IDC_COMBO_IP_2                  1020
+#define IDC_STATIC_IP                   1021
+#define IDC_STATIC_PORTS                1022
+#define IDC_STATIC_CODEC                1023
+#define IDC_STATIC_CHANNEL              1024
+#define IDC_STATIC_ID                   1025
+#define IDC_EDIT_TX_PORT_2              1026
+#define IDC_EDIT_RX_PORT_2              1027
+#define IDC_COMBO_CODEC_2               1028
+#define IDC_BUTTON_START_LISTEN_2       1029
+#define IDC_BUTTON_STOP_LISTEN_2        1030
+#define IDC_BUTTON_START_PLAYOUT_2      1031
+#define IDC_BUTTON_STOP_PLAYOUT_2       1032
+#define IDC_BUTTON_START_SEND_2         1033
+#define IDC_BUTTON_STOP_SEND_2          1034
+#define IDC_BUTTON_START_SEND_3         1035
+#define IDC_BUTTON_TEST_1_1             1035
+#define IDC_BUTTON_TEST_1               1035
+#define IDC_EDIT_RESULT                 1036
+#define IDC_EDIT_N_FAILS                1037
+#define IDC_STATIC_ERROR                1038
+#define IDC_EDIT_LAST_ERROR             1039
+#define IDC_STATIC_LAST_ERROR           1040
+#define IDC_STATIC_PLAY_FILE            1041
+#define IDC_STATIC_EXTERNAL             1042
+#define IDC_CHECK_EXT_TRANS_1           1043
+#define IDC_CHECK2                      1044
+#define IDC_CHECK_PLAY_FILE_IN_1        1044
+#define IDC_CHECK_PLAY_FILE_OUT_1       1045
+#define IDC_CHECK_PLAY_FILE_IN_2        1046
+#define IDC_CHECK_PLAY_FILE_OUT_2       1047
+#define IDC_CHECK_EXT_TRANS_2           1048
+#define IDC_STATIC_ALL_CHANNELS         1049
+#define IDC_CHECK_PLAY_FILE_IN          1050
+#define IDC_CHECK_PLAY_FILE_OUT         1051
+#define IDC_CHECK_EXT_MEDIA_IN_1        1051
+#define IDC_COMBO_REC_DEVICE            1052
+#define IDC_STATIC_REC_DEVICE           1053
+#define IDC_COMBO_PLAY_DEVICE2          1054
+#define IDC_COMBO_PLAY_DEVICE           1054
+#define IDC_STATIC_PLAY_DEVICE          1055
+#define IDC_CHECK_EXT_MEDIA_PLAY_1      1056
+#define IDC_CHECK_EXT_MEDIA_OUT_1       1056
+#define IDC_STATIC_PLAY_FILE2           1057
+#define IDC_SLIDER_INPUT_VOLUME         1058
+#define IDC_STATIC_MIC_VOLUME           1059
+#define IDC_SLIDER_OUTPUT_VOLUME        1060
+#define IDC_STATIC_SPK_VOLUME2          1061
+#define IDC_STATIC_SPK_VOLUME           1061
+#define IDC_CHECK_PLAY_FILE_IN2         1062
+#define IDC_CHECK_AGC                   1062
+#define IDC_STATIC_MIC_VOLUME2          1063
+#define IDC_STATIC_AUDIO_LEVEL_IN       1063
+#define IDC_PROGRESS_AUDIO_LEVEL_IN     1064
+#define IDC_CHECK_AGC2                  1065
+#define IDC_CHECK_NS                    1065
+#define IDC_BUTTON_1                    1065
+#define IDC_CHECK_VAD                   1066
+#define IDC_CHECK_EXT_MEDIA_IN_2        1066
+#define IDC_BUTTON_2                    1066
+#define IDC_CHECK_VAD2                  1067
+#define IDC_CHECK_EC                    1067
+#define IDC_BUTTON_3                    1067
+#define IDC_CHECK_VAD_1                 1068
+#define IDC_BUTTON_4                    1068
+#define IDC_CHECK_VAD_2                 1069
+#define IDC_CHECK_EXT_MEDIA_OUT_2       1069
+#define IDC_BUTTON_5                    1069
+#define IDC_CHECK_VAD_3                 1070
+#define IDC_BUTTON_6                    1070
+#define IDC_CHECK_MUTE_IN               1071
+#define IDC_BUTTON_7                    1071
+#define IDC_CHECK_MUTE_IN_1             1072
+#define IDC_BUTTON_8                    1072
+#define IDC_CHECK_MUTE_IN_2             1073
+#define IDC_BUTTON_9                    1073
+#define IDC_CHECK_SRTP_TX_1             1074
+#define IDC_BUTTON_10                   1074
+#define IDC_CHECK_SRTP_RX_1             1075
+#define IDC_BUTTON_11                   1075
+#define IDC_STATIC_PLAY_FILE3           1076
+#define IDC_STATIC_SRTP                 1076
+#define IDC_BUTTON_12                   1076
+#define IDC_CHECK_SRTP_TX_2             1077
+#define IDC_BUTTON_13                   1077
+#define IDC_CHECK_SRTP_RX_2             1078
+#define IDC_BUTTON_14                   1078
+#define IDC_CHECK_EXT_ENCRYPTION_1      1079
+#define IDC_BUTTON_15                   1079
+#define IDC_STATIC_PLAY_FILE4           1080
+#define IDC_BUTTON_16                   1080
+#define IDC_CHECK_EXT_ENCRYPTION_2      1081
+#define IDC_BUTTON_17                   1081
+#define IDC_BUTTON_DTMF_1               1082
+#define IDC_BUTTON_18                   1082
+#define IDC_EDIT_DTMF_EVENT             1083
+#define IDC_CHECK_REC_                  1083
+#define IDC_CHECK_REC_MIC               1083
+#define IDC_STATIC_DTMF_EVENT           1084
+#define IDC_BUTTON_DTMF_2               1084
+#define IDC_STATIC_GROUP_DTMF           1085
+#define IDC_CHECK_CONFERENCE_1          1085
+#define IDC_BUTTON_19                   1086
+#define IDC_CHECK_CONFERENCE_2          1086
+#define IDC_BUTTON_20                   1087
+#define IDC_CHECK_ON_HOLD_1             1087
+#define IDC_BUTTON_21                   1088
+#define IDC_CHECK_ON_HOLD_2             1088
+#define IDC_BUTTON_22                   1089
+#define IDC_CHECK_DTMF_PLAYOUT_RX       1089
+#define IDC_CHECK_EXT_MEDIA_IN          1089
+#define IDC_STATIC_PLAYOUT_RX           1090
+#define IDC_EDIT_GET_OUTPUT             1090
+#define IDC_CHECK_DTMF_PLAY_TONE        1091
+#define IDC_STATIC_LAST_ERROR2          1091
+#define IDC_STATIC_GET                  1091
+#define IDC_STATIC_PLAY_TONE            1092
+#define IDC_CHECK_EXT_MEDIA_OUT         1092
+#define IDC_CHECK_START_STOP_MODE       1093
+#define IDC_BUTTON_SET_TX_TELEPHONE_PT  1093
+#define IDC_PROGRESS_AUDIO_LEVEL_IN2    1093
+#define IDC_PROGRESS_AUDIO_LEVEL_OUT    1093
+#define IDC_EDIT_EVENT_LENGTH           1094
+#define IDC_EDIT_RX_PORT_3              1094
+#define IDC_EDIT_DELAY_ESTIMATE_1       1094
+#define IDC_STATIC_EVENT_LENGTH         1095
+#define IDC_EDIT_PLAYOUT_BUFFER_SIZE    1095
+#define IDC_STATIC_START_STOP_MODE      1096
+#define IDC_EDIT_EVENT_RX_PT            1096
+#define IDC_CHECK_DELAY_ESTIMATE_1      1096
+#define IDC_EDIT_EVENT_ATTENUATION      1097
+#define IDC_CHECK_AGC_1                 1097
+#define IDC_CHECK_EVENT_INBAND          1098
+#define IDC_CHECK_NS_1                  1098
+#define IDC_STATIC_EVENT_ATTENUATION    1099
+#define IDC_STATIC_SRTP2                1099
+#define IDC_STATIC_RX_VQE               1099
+#define IDC_EDIT_EVENT_TX_PT            1100
+#define IDC_CHECK_REC_MIC2              1100
+#define IDC_CHECK_REC_CALL              1100
+#define IDC_CHECK_DTMF_FEEDBACK         1101
+#define IDC_CHECK_REC_CALL2             1101
+#define IDC_CHECK_TYPING_DETECTION      1101
+#define IDC_CHECK_START_STOP_MODE2      1102
+#define IDC_CHECK_DIRECT_FEEDBACK       1102
+#define IDC_CHECK_FEC                   1102
+#define IDC_BUTTON_SET_RX_TELEPHONE_PT_TYPE 1103
+#define IDC_BUTTON_SET_RX_TELEPHONE_PT  1103
+#define IDC_BUTTON_CLEAR_ERROR_CALLBACK 1103
+#define IDC_EDIT_EVENT_CODE             1104
+#define IDC_STATIC_DIRECT_FEEDBACK      1105
+#define IDC_RADIO_SINGLE                1106
+#define IDC_RADIO_MULTI                 1107
+#define IDC_RADIO_START_STOP            1108
+#define IDC_STATIC_MODE                 1109
+#define IDC_STATIC_EVENT_RX_PT          1110
+#define IDC_STATIC_EVENT_TX_PT          1111
+#define IDC_STATIC_PT                   1112
+#define IDC_BUTTON_SEND_TELEPHONE_EVENT 1113
+#define IDC_STATIC_EVENT_CODE           1114
+#define IDC_CHECK_EVENT_DETECTION       1115
+#define IDC_CHECK_DETECT_INBAND         1116
+#define IDC_CHECK_DETECT_OUT_OF_BAND    1117
+#define IDC_STATIC_INBAND_DETECTION     1118
+#define IDC_STATIC_OUT_OF_BAND_DETECTION 1119
+#define IDC_STATIC_EVENT_DETECTION      1120
+#define IDC_STATIC_TELEPHONE_EVENTS     1121
+#define IDC_EDIT_EVENT_CODE2            1122
+#define IDC_EDIT_ON_EVENT               1122
+#define IDC_EDIT_ON_EVENT_OUT_OF_BAND   1122
+#define IDC_STATIC_ON_EVENT             1123
+#define IDC_EDIT_ON_EVENT_INBAND        1123
+#define IDC_STATIC_EVEN                 1124
+#define IDC_STATIC_LINE                 1125
+#define IDC_LIST_CODEC_1                1128
+#define IDC_EDIT2                       1129
+#define IDC_EDIT_CODEC_1                1129
+#define IDC_STATIC_PANNING              1131
+#define IDC_SLIDER_PAN_LEFT             1132
+#define IDC_SLIDER_PAN_RIGHT            1133
+#define IDC_STATIC_LEFT                 1134
+#define IDC_STATIC_LEFT2                1135
+#define IDC_STATIC_RIGHT                1135
+#define IDC_BUTTON_VERSION              1136
+#define IDC_STATIC_PLAYOUT_BUFFER       1137
+#define IDC_CHECK_RXVAD                 1138
+#define IDC_EDIT1                       1139
+#define IDC_EDIT_RXVAD                  1139
+#define IDC_STATIC_RX_PORT              1140
+#define IDC_STATIC_RX_PORT2             1141
+#define IDC_EDIT3                       1142
+#define IDC_EDIT_AUDIO_LAYER            1142
+#define IDC_EDIT_AUDIO_LAYER2           1143
+#define IDC_EDIT_CPU_LOAD               1143
+#define IDC_STATIC_ERROR_CALLBACK       1144
+#define IDC_EDIT_ERROR_CALLBACK         1145
+#define IDC_EDIT_RX_CODEC_1             1146
+#define IDC_STATIC_BYTES_SENT_TEXT      1147
+#define IDC_EDIT_RTCP_STAT              1147
+#define IDC_EDIT_RTCP_STAT_1            1147
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        130
+#define _APS_NEXT_COMMAND_VALUE         32771
+#define _APS_NEXT_CONTROL_VALUE         1148
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
diff --git a/src/voice_engine/test/win_test/WinTest.aps b/src/voice_engine/test/win_test/WinTest.aps
new file mode 100644
index 0000000..499db5f
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTest.aps
Binary files differ
diff --git a/src/voice_engine/test/win_test/WinTest.cc b/src/voice_engine/test/win_test/WinTest.cc
new file mode 100644
index 0000000..e0e0248
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTest.cc
@@ -0,0 +1,75 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "stdafx.h"
+#include "WinTest.h"
+#include "WinTestDlg.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#endif
+
+
+// CWinTestApp
+
+BEGIN_MESSAGE_MAP(CWinTestApp, CWinApp)
+	ON_COMMAND(ID_HELP, &CWinApp::OnHelp)
+END_MESSAGE_MAP()
+
+
+// CWinTestApp construction
+
+CWinTestApp::CWinTestApp()
+{
+}
+
+
+// The one and only CWinTestApp object
+
+CWinTestApp theApp;
+
+
+// CWinTestApp initialization
+
+BOOL CWinTestApp::InitInstance()
+{
+	// InitCommonControlsEx() is required on Windows XP if an application
+	// manifest specifies use of ComCtl32.dll version 6 or later to enable
+	// visual styles.  Otherwise, any window creation will fail.
+	INITCOMMONCONTROLSEX InitCtrls;
+	InitCtrls.dwSize = sizeof(InitCtrls);
+	// Set this to include all the common control classes you want to use
+	// in your application.
+	InitCtrls.dwICC = ICC_WIN95_CLASSES;
+	InitCommonControlsEx(&InitCtrls);
+
+	CWinApp::InitInstance();
+
+	// Standard initialization
+	// If you are not using these features and wish to reduce the size
+	// of your final executable, you should remove from the following
+	// the specific initialization routines you do not need
+	// Change the registry key under which our settings are stored
+	SetRegistryKey(_T("Local AppWizard-Generated Applications"));
+
+	CWinTestDlg dlg;
+	m_pMainWnd = &dlg;
+	INT_PTR nResponse = dlg.DoModal();
+	if (nResponse == IDOK)
+	{
+	}
+	else if (nResponse == IDCANCEL)
+	{
+	}
+
+	// Since the dialog has been closed, return FALSE so that we exit the
+	//  application, rather than start the application's message pump.
+	return FALSE;
+}
diff --git a/src/voice_engine/test/win_test/WinTest.h b/src/voice_engine/test/win_test/WinTest.h
new file mode 100644
index 0000000..d012ce6
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTest.h
@@ -0,0 +1,38 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#ifndef __AFXWIN_H__
+	#error "include 'stdafx.h' before including this file for PCH"
+#endif
+
+#include "resource.h"		// main symbols
+
+
+// CWinTestApp:
+// See WinTest.cpp for the implementation of this class
+//
+
+class CWinTestApp : public CWinApp
+{
+public:
+	CWinTestApp();
+
+// Overrides
+	public:
+	virtual BOOL InitInstance();
+
+// Implementation
+
+	DECLARE_MESSAGE_MAP()
+};
+
+extern CWinTestApp theApp;
diff --git a/src/voice_engine/test/win_test/WinTest.rc b/src/voice_engine/test/win_test/WinTest.rc
new file mode 100644
index 0000000..dfe503f
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTest.rc
@@ -0,0 +1,394 @@
+// Microsoft Visual C++ generated resource script.

+//

+#include "resource.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// Swedish resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+#ifdef _WIN32

+LANGUAGE LANG_SWEDISH, SUBLANG_DEFAULT

+#pragma code_page(1252)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE 

+BEGIN

+    "resource.h\0"

+END

+

+2 TEXTINCLUDE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE 

+BEGIN

+    "#define _AFX_NO_SPLITTER_RESOURCES\r\n"

+    "#define _AFX_NO_OLE_RESOURCES\r\n"

+    "#define _AFX_NO_TRACKER_RESOURCES\r\n"

+    "#define _AFX_NO_PROPERTY_RESOURCES\r\n"

+    "\r\n"

+    "#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)\r\n"

+    "LANGUAGE 29, 1\r\n"

+    "#pragma code_page(1252)\r\n"

+    "#include ""res\\WinTest.rc2""  // non-Microsoft Visual C++ edited resources\r\n"

+    "#include ""afxres.rc""     // Standard components\r\n"

+    "#endif\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Icon

+//

+

+// Icon with lowest ID value placed first to ensure application icon

+// remains consistent on all systems.

+IDR_MAINFRAME           ICON                    "res\\WinTest.ico"

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_ABOUTBOX DIALOGEX 0, 0, 235, 55

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "About WinTest"

+FONT 8, "MS Shell Dlg", 0, 0, 0x1

+BEGIN

+    ICON            IDR_MAINFRAME,IDC_STATIC,11,17,20,20

+    LTEXT           "WinTest Version 1.0",IDC_STATIC,40,10,119,8,SS_NOPREFIX

+    LTEXT           "Copyright (C) 2010",IDC_STATIC,40,25,119,8

+    DEFPUSHBUTTON   "OK",IDOK,178,7,50,16,WS_GROUP

+END

+

+IDD_WINTEST_DIALOG DIALOGEX 0, 0, 796, 278

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_MINIMIZEBOX | WS_POPUP | WS_VISIBLE | WS_CAPTION | WS_SYSMENU

+EXSTYLE WS_EX_APPWINDOW

+CAPTION "WinTest"

+FONT 8, "MS Shell Dlg", 0, 0, 0x1

+BEGIN

+    PUSHBUTTON      "Create",IDC_BUTTON_CREATE_1,28,24,32,14

+    PUSHBUTTON      "Delete",IDC_BUTTON_DELETE_1,28,40,32,14

+    EDITTEXT        IDC_EDIT_1,6,32,18,14,ES_AUTOHSCROLL | ES_READONLY

+    PUSHBUTTON      "Create",IDC_BUTTON_CREATE_2,28,72,32,14

+    PUSHBUTTON      "Delete",IDC_BUTTON_DELETE_2,28,88,32,14

+    EDITTEXT        IDC_EDIT_2,6,82,18,14,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_MESSAGE,28,244,764,12,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_IP_1,64,24,76,30,CBS_DROPDOWN | CBS_SORT | WS_VSCROLL | WS_TABSTOP

+    EDITTEXT        IDC_EDIT_TX_PORT_1,144,24,28,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_RX_PORT_1,144,40,28,14,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_CODEC_1,176,24,76,156,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Start",IDC_BUTTON_START_LISTEN_1,256,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_LISTEN_1,256,40,32,14

+    LTEXT           "Receive",IDC_STATIC_LISTEN,262,8,26,8

+    PUSHBUTTON      "Start",IDC_BUTTON_START_PLAYOUT_1,292,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_PLAYOUT_1,292,40,32,14

+    LTEXT           "Playout",IDC_STATIC_PLAY,295,8,25,8

+    PUSHBUTTON      "Start",IDC_BUTTON_START_SEND_1,328,24,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_SEND_1,328,40,32,14

+    LTEXT           "Send",IDC_STATIC_SEND,335,8,17,8

+    COMBOBOX        IDC_COMBO_IP_2,64,72,76,30,CBS_DROPDOWN | CBS_SORT | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Destination IP address",IDC_STATIC_IP,64,8,73,8

+    LTEXT           "Ports",IDC_STATIC_PORTS,145,8,18,8

+    LTEXT           "Codec",IDC_STATIC_CODEC,177,8,21,8

+    LTEXT           "Channel",IDC_STATIC_CHANNEL,30,8,27,8

+    LTEXT           "ID",IDC_STATIC_ID,12,8,8,8

+    EDITTEXT        IDC_EDIT_TX_PORT_2,144,72,28,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_RX_PORT_2,144,88,28,14,ES_AUTOHSCROLL

+    COMBOBOX        IDC_COMBO_CODEC_2,176,72,76,156,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    PUSHBUTTON      "Start",IDC_BUTTON_START_LISTEN_2,256,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_LISTEN_2,256,88,32,14

+    PUSHBUTTON      "Start",IDC_BUTTON_START_PLAYOUT_2,292,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_PLAYOUT_2,292,88,32,14

+    PUSHBUTTON      "Start",IDC_BUTTON_START_SEND_2,328,72,32,14

+    PUSHBUTTON      "Stop",IDC_BUTTON_STOP_SEND_2,328,88,32,14

+    PUSHBUTTON      "TEST 1",IDC_BUTTON_TEST_1,756,224,36,14

+    LTEXT           "API",IDC_STATIC,4,247,12,8

+    EDITTEXT        IDC_EDIT_RESULT,28,260,96,12,ES_AUTOHSCROLL

+    LTEXT           "Result",IDC_STATIC,3,263,21,8

+    EDITTEXT        IDC_EDIT_N_FAILS,156,260,30,12,ES_AUTOHSCROLL

+    LTEXT           "#Fails",IDC_STATIC_ERROR,132,263,20,8

+    EDITTEXT        IDC_EDIT_LAST_ERROR,228,260,36,12,ES_AUTOHSCROLL

+    LTEXT           "Last Error",IDC_STATIC_LAST_ERROR,192,262,32,8

+    LTEXT           "Ext. Trans.",IDC_STATIC_EXTERNAL,361,8,37,8

+    CONTROL         "",IDC_CHECK_EXT_TRANS_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,371,33,16,10

+    CONTROL         "In",IDC_CHECK_PLAY_FILE_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,24,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "Play File",IDC_STATIC_PLAY_FILE,401,8,27,8

+    CONTROL         "Out",IDC_CHECK_PLAY_FILE_OUT_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "In",IDC_CHECK_PLAY_FILE_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_PLAY_FILE_OUT_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_EXT_TRANS_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,371,82,16,10

+    GROUPBOX        "",IDC_STATIC_ALL_CHANNELS,6,107,662,113

+    CONTROL         "PlayFileAsMic",IDC_CHECK_PLAY_FILE_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,122,60,14,WS_EX_DLGMODALFRAME

+    COMBOBOX        IDC_COMBO_REC_DEVICE,12,132,184,80,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Recording device",IDC_STATIC_REC_DEVICE,12,120,56,8

+    COMBOBOX        IDC_COMBO_PLAY_DEVICE,12,180,184,80,CBS_DROPDOWN | WS_VSCROLL | WS_TABSTOP

+    LTEXT           "Playout device",IDC_STATIC_PLAY_DEVICE,12,167,56,8

+    CONTROL         "In",IDC_CHECK_EXT_MEDIA_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_EXT_MEDIA_OUT_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "Ext. Media",IDC_STATIC_PLAY_FILE2,437,8,35,8

+    CONTROL         "",IDC_SLIDER_INPUT_VOLUME,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,196,130,72,15

+    LTEXT           "Microphone Volume",IDC_STATIC_MIC_VOLUME,202,120,62,8

+    CONTROL         "",IDC_SLIDER_OUTPUT_VOLUME,"msctls_trackbar32",TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,196,179,72,15

+    LTEXT           "Speaker Volume",IDC_STATIC_SPK_VOLUME,202,167,52,8

+    CONTROL         "AGC",IDC_CHECK_AGC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,316,122,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_PROGRESS_AUDIO_LEVEL_IN,"msctls_progress32",WS_BORDER,268,135,42,6

+    LTEXT           "Audio Level",IDC_STATIC_AUDIO_LEVEL_IN,271,120,38,8,NOT WS_GROUP

+    CONTROL         "NS",IDC_CHECK_NS,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,316,142,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "EC",IDC_CHECK_EC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,356,122,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "VAD",IDC_CHECK_VAD_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "In",IDC_CHECK_EXT_MEDIA_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Out",IDC_CHECK_EXT_MEDIA_OUT_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,436,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "VAD",IDC_CHECK_VAD_3,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,356,142,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Mute",IDC_CHECK_MUTE_IN_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,476,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "TX",IDC_CHECK_SRTP_TX_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "RX",IDC_CHECK_SRTP_RX_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "SRTP",IDC_STATIC_SRTP,525,8,18,8

+    CONTROL         "TX",IDC_CHECK_SRTP_TX_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,72,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "RX",IDC_CHECK_SRTP_RX_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,516,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_EXT_ENCRYPTION_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,564,33,16,10

+    LTEXT           "Encrypt",IDC_STATIC_PLAY_FILE4,556,8,26,8

+    CONTROL         "",IDC_CHECK_EXT_ENCRYPTION_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,564,82,16,10

+    PUSHBUTTON      "DTMF>>",IDC_BUTTON_DTMF_1,584,24,36,14

+    CONTROL         "RecMicToFile",IDC_CHECK_REC_MIC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,396,142,60,14,WS_EX_DLGMODALFRAME

+    PUSHBUTTON      "DTMF>>",IDC_BUTTON_DTMF_2,584,72,36,14

+    CONTROL         "Conf",IDC_CHECK_CONFERENCE_1,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,584,40,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Conf",IDC_CHECK_CONFERENCE_2,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,584,88,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Hold",IDC_CHECK_ON_HOLD_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,708,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Hold",IDC_CHECK_ON_HOLD_2,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,708,72,36,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_GET_OUTPUT,292,260,500,12,ES_AUTOHSCROLL

+    LTEXT           "Get",IDC_STATIC_GET,276,262,12,8

+    CONTROL         "Ext. Media",IDC_CHECK_EXT_MEDIA_IN,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,460,122,52,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Ext. Media",IDC_CHECK_EXT_MEDIA_OUT,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,460,180,52,14,WS_EX_DLGMODALFRAME

+    LISTBOX         IDC_LIST_CODEC_1,208,40,44,28,LBS_NOINTEGRALHEIGHT | NOT WS_BORDER | WS_VSCROLL | WS_TABSTOP,WS_EX_CLIENTEDGE

+    EDITTEXT        IDC_EDIT_CODEC_1,176,40,28,14,ES_AUTOHSCROLL

+    CONTROL         "",IDC_PROGRESS_AUDIO_LEVEL_OUT,"msctls_progress32",WS_BORDER,268,184,42,6

+    LTEXT           "Panning",IDC_STATIC_PANNING,328,167,26,8

+    CONTROL         "",IDC_SLIDER_PAN_LEFT,"msctls_trackbar32",TBS_VERT | TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,328,175,12,28

+    CONTROL         "",IDC_SLIDER_PAN_RIGHT,"msctls_trackbar32",TBS_VERT | TBS_BOTH | TBS_NOTICKS | WS_TABSTOP,344,175,12,28

+    LTEXT           "L",IDC_STATIC_LEFT,332,200,8,8

+    LTEXT           "R",IDC_STATIC_RIGHT,347,201,8,8

+    PUSHBUTTON      "Version",IDC_BUTTON_VERSION,624,200,36,14

+    EDITTEXT        IDC_EDIT_PLAYOUT_BUFFER_SIZE,363,181,28,12,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY | NOT WS_TABSTOP

+    LTEXT           "Buffer Size",IDC_STATIC_PLAYOUT_BUFFER,361,167,36,8

+    CONTROL         "Delay",IDC_CHECK_DELAY_ESTIMATE_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,624,24,36,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_DELAY_ESTIMATE_1,631,40,24,14,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY | NOT WS_TABSTOP

+    CONTROL         "RxVAD",IDC_CHECK_RXVAD,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,664,24,40,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_RXVAD,671,40,24,14,ES_CENTER | ES_AUTOHSCROLL | ES_READONLY

+    CONTROL         "AGC",IDC_CHECK_AGC_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,748,24,36,14,WS_EX_DLGMODALFRAME

+    CONTROL         "NS",IDC_CHECK_NS_1,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,748,40,36,14,WS_EX_DLGMODALFRAME

+    LTEXT           "RX VQE",IDC_STATIC_RX_VQE,753,8,25,8

+    CONTROL         "RecordCall",IDC_CHECK_REC_CALL,"Button",BS_AUTOCHECKBOX | NOT WS_VISIBLE | WS_TABSTOP,517,156,52,14,WS_EX_DLGMODALFRAME

+    LTEXT           "RX",IDC_STATIC_RX_PORT,133,42,10,8

+    LTEXT           "RX",IDC_STATIC_RX_PORT2,133,91,10,8

+    CONTROL         "TypingDetect",IDC_CHECK_TYPING_DETECTION,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,572,156,60,14,WS_EX_DLGMODALFRAME

+    EDITTEXT        IDC_EDIT_AUDIO_LAYER,28,224,116,14,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_CPU_LOAD,152,224,116,14,ES_AUTOHSCROLL | ES_READONLY

+    CONTROL         "FEC",IDC_CHECK_FEC,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,176,55,28,14,WS_EX_DLGMODALFRAME

+    LTEXT           "=> Callbacks",IDC_STATIC_ERROR_CALLBACK,283,226,43,8

+    EDITTEXT        IDC_EDIT_ERROR_CALLBACK,328,224,312,14,ES_AUTOHSCROLL

+    PUSHBUTTON      "Clear",IDC_BUTTON_CLEAR_ERROR_CALLBACK,644,224,24,14

+    EDITTEXT        IDC_EDIT_RX_CODEC_1,256,56,216,12,ES_AUTOHSCROLL | ES_READONLY

+    EDITTEXT        IDC_EDIT_RTCP_STAT_1,476,56,316,12,ES_AUTOHSCROLL | ES_READONLY

+END

+

+IDD_DTMF_DIALOG DIALOGEX 0, 0, 316, 212

+STYLE DS_SETFONT | DS_MODALFRAME | DS_FIXEDSYS | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Telehone Events"

+FONT 8, "MS Shell Dlg", 400, 0, 0x1

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,260,192,50,14

+    PUSHBUTTON      "1",IDC_BUTTON_1,16,20,16,14

+    PUSHBUTTON      "2",IDC_BUTTON_2,36,20,16,14

+    PUSHBUTTON      "3",IDC_BUTTON_3,56,20,16,14

+    PUSHBUTTON      "4",IDC_BUTTON_4,16,36,16,14

+    PUSHBUTTON      "5",IDC_BUTTON_5,36,36,16,14

+    PUSHBUTTON      "6",IDC_BUTTON_6,56,36,16,14

+    PUSHBUTTON      "7",IDC_BUTTON_7,16,52,16,14

+    PUSHBUTTON      "8",IDC_BUTTON_8,36,52,16,14

+    PUSHBUTTON      "9",IDC_BUTTON_9,56,52,16,14

+    PUSHBUTTON      "*",IDC_BUTTON_10,16,68,16,14

+    PUSHBUTTON      "0",IDC_BUTTON_11,36,68,16,14

+    PUSHBUTTON      "#",IDC_BUTTON_12,56,68,16,14

+    PUSHBUTTON      "A",IDC_BUTTON_13,76,20,16,14

+    PUSHBUTTON      "B",IDC_BUTTON_14,76,36,16,14

+    PUSHBUTTON      "C",IDC_BUTTON_15,76,52,16,14

+    PUSHBUTTON      "D",IDC_BUTTON_16,76,68,16,14

+    EDITTEXT        IDC_EDIT_DTMF_EVENT,56,90,16,12,ES_AUTOHSCROLL | ES_READONLY

+    LTEXT           "Event code",IDC_STATIC_DTMF_EVENT,17,91,37,8

+    PUSHBUTTON      "1",IDC_BUTTON_17,16,20,16,14

+    PUSHBUTTON      "2",IDC_BUTTON_18,36,20,16,14

+    PUSHBUTTON      "3",IDC_BUTTON_19,56,20,16,14

+    PUSHBUTTON      "4",IDC_BUTTON_20,16,36,16,14

+    PUSHBUTTON      "A",IDC_BUTTON_21,76,20,16,14

+    GROUPBOX        "DTMF Events",IDC_STATIC_GROUP_DTMF,4,4,188,132

+    CONTROL         "",IDC_CHECK_DTMF_PLAYOUT_RX,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,160,21,12,14

+    LTEXT           "Play out-band RX",IDC_STATIC_PLAYOUT_RX,101,24,56,8

+    CONTROL         "",IDC_CHECK_DTMF_PLAY_TONE,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,160,39,12,14

+    LTEXT           "Play tone locally",IDC_STATIC_PLAY_TONE,101,41,52,8

+    EDITTEXT        IDC_EDIT_EVENT_LENGTH,44,163,28,14,ES_AUTOHSCROLL

+    LTEXT           "Duration",IDC_STATIC_EVENT_LENGTH,12,165,28,8

+    EDITTEXT        IDC_EDIT_EVENT_ATTENUATION,44,183,28,14,ES_AUTOHSCROLL

+    LTEXT           "Volume",IDC_STATIC_EVENT_ATTENUATION,12,186,24,8

+    CONTROL         "Inband",IDC_CHECK_EVENT_INBAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,84,163,40,14,WS_EX_DLGMODALFRAME

+    CONTROL         "Feedback",IDC_CHECK_DTMF_FEEDBACK,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,16,112,48,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_DIRECT_FEEDBACK,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,96,112,12,14

+    LTEXT           "Direct",IDC_STATIC_DIRECT_FEEDBACK,72,115,20,8

+    CONTROL         "Single",IDC_RADIO_SINGLE,"Button",BS_AUTORADIOBUTTON | WS_GROUP,112,68,35,10

+    CONTROL         "Sequence",IDC_RADIO_MULTI,"Button",BS_AUTORADIOBUTTON,112,80,47,10

+    CONTROL         "Start/Stop",IDC_RADIO_START_STOP,"Button",BS_AUTORADIOBUTTON,112,92,49,10

+    GROUPBOX        "Mode",IDC_STATIC_MODE,100,56,68,52

+    EDITTEXT        IDC_EDIT_EVENT_RX_PT,220,20,24,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_EVENT_TX_PT,220,41,24,14,ES_AUTOHSCROLL

+    LTEXT           "RX",IDC_STATIC_EVENT_RX_PT,208,22,10,8

+    LTEXT           "TX",IDC_STATIC_EVENT_TX_PT,208,42,9,8

+    PUSHBUTTON      "Set",IDC_BUTTON_SET_TX_TELEPHONE_PT,248,41,24,14

+    PUSHBUTTON      "Set",IDC_BUTTON_SET_RX_TELEPHONE_PT,248,20,24,14

+    GROUPBOX        "Payload Type",IDC_STATIC_PT,200,4,80,56

+    EDITTEXT        IDC_EDIT_EVENT_CODE,128,163,28,14,ES_AUTOHSCROLL

+    LTEXT           "Event code",IDC_STATIC_EVENT_CODE,125,152,37,8

+    PUSHBUTTON      "Send",IDC_BUTTON_SEND_TELEPHONE_EVENT,160,163,24,14

+    CONTROL         "On/Off",IDC_CHECK_EVENT_DETECTION,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,80,40,14,WS_EX_DLGMODALFRAME

+    CONTROL         "",IDC_CHECK_DETECT_INBAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,100,12,14

+    CONTROL         "",IDC_CHECK_DETECT_OUT_OF_BAND,"Button",BS_AUTOCHECKBOX | WS_TABSTOP,208,116,12,14

+    LTEXT           "Inband",IDC_STATIC_INBAND_DETECTION,220,103,24,8

+    LTEXT           "Outband",IDC_STATIC_OUT_OF_BAND_DETECTION,220,120,29,8

+    GROUPBOX        "Event Detection",IDC_STATIC_EVENT_DETECTION,200,68,108,68

+    GROUPBOX        "Telephone Events",IDC_STATIC_TELEPHONE_EVENTS,4,140,188,64

+    EDITTEXT        IDC_EDIT_ON_EVENT_OUT_OF_BAND,252,117,48,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT_ON_EVENT_INBAND,252,101,48,14,ES_AUTOHSCROLL

+    LTEXT           "=> Detections",IDC_STATIC_EVEN,253,90,48,8

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Version

+//

+

+VS_VERSION_INFO VERSIONINFO

+ FILEVERSION 1,0,0,0

+ PRODUCTVERSION 1,0,0,0

+ FILEFLAGSMASK 0x3fL

+#ifdef _DEBUG

+ FILEFLAGS 0x1L

+#else

+ FILEFLAGS 0x0L

+#endif

+ FILEOS 0x4L

+ FILETYPE 0x1L

+ FILESUBTYPE 0x0L

+BEGIN

+    BLOCK "StringFileInfo"

+    BEGIN

+        BLOCK "040904e4"

+        BEGIN

+            VALUE "FileDescription", "WebRTC VoiceEngine Test"

+            VALUE "FileVersion", "1.0.0.0"

+            VALUE "InternalName", "WinTest.exe"

+            VALUE "LegalCopyright", "Copyright (c) 2011 The WebRTC project authors. All Rights Reserved."

+            VALUE "OriginalFilename", "WinTest.exe"

+            VALUE "ProductName", "WebRTC VoiceEngine"

+            VALUE "ProductVersion", "1.0.0.0"

+        END

+    END

+    BLOCK "VarFileInfo"

+    BEGIN

+        VALUE "Translation", 0x409, 1252

+    END

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO 

+BEGIN

+    IDD_ABOUTBOX, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 228

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 48

+    END

+

+    IDD_WINTEST_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 789

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 271

+    END

+

+    IDD_DTMF_DIALOG, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 309

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 205

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// String Table

+//

+

+STRINGTABLE 

+BEGIN

+    IDS_ABOUTBOX            "&About WinTest..."

+END

+

+#endif    // Swedish resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+#define _AFX_NO_SPLITTER_RESOURCES

+#define _AFX_NO_OLE_RESOURCES

+#define _AFX_NO_TRACKER_RESOURCES

+#define _AFX_NO_PROPERTY_RESOURCES

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_SVE)

+LANGUAGE 29, 1

+#pragma code_page(1252)

+#include "res\WinTest.rc2"  // non-Microsoft Visual C++ edited resources

+#include "afxres.rc"     // Standard components

+#endif

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/src/voice_engine/test/win_test/WinTestDlg.cc b/src/voice_engine/test/win_test/WinTestDlg.cc
new file mode 100644
index 0000000..b11c09f
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTestDlg.cc
@@ -0,0 +1,3584 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include <math.h>
+#include "stdafx.h"
+#include "WinTest.h"
+#include "WinTestDlg.h"
+#include "testsupport/fileutils.h"
+
+#ifdef _DEBUG
+#define new DEBUG_NEW
+#endif
+
+using namespace webrtc;
+
+unsigned char key[30] = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9};
+
+// Hack to convert char to TCHAR, using two buffers to be able to
+// call twice in the same statement
+TCHAR convertTemp1[256] = {0};
+TCHAR convertTemp2[256] = {0};
+bool convertBufferSwitch(false);
+TCHAR* CharToTchar(const char* str, int len)
+{
+#ifdef _UNICODE
+  TCHAR* temp = convertBufferSwitch ? convertTemp1 : convertTemp2;
+  convertBufferSwitch = !convertBufferSwitch;
+  memset(temp, 0, sizeof(convertTemp1));
+  MultiByteToWideChar(CP_UTF8, 0, str, len, temp, 256);
+  return temp;
+#else
+  return str;
+#endif
+}
+
+// Hack to convert TCHAR to char
+char convertTemp3[256] = {0};
+char* TcharToChar(TCHAR* str, int len)
+{
+#ifdef _UNICODE
+  memset(convertTemp3, 0, sizeof(convertTemp3));
+  WideCharToMultiByte(CP_UTF8, 0, str, len, convertTemp3, 256, 0, 0);
+  return convertTemp3;
+#else
+  return str;
+#endif
+}
+
+// ----------------------------------------------------------------------------
+//    VoEConnectionObserver
+// ----------------------------------------------------------------------------
+
+class ConnectionObserver : public  VoEConnectionObserver
+{
+public:
+    ConnectionObserver();
+    virtual void OnPeriodicDeadOrAlive(const int channel, const bool alive);
+};
+
+ConnectionObserver::ConnectionObserver()
+{
+}
+
+void ConnectionObserver::OnPeriodicDeadOrAlive(const int channel, const bool alive)
+{
+    CString str;
+    str.Format(_T("OnPeriodicDeadOrAlive(channel=%d) => alive=%d"), channel, alive);
+    OutputDebugString(str);
+}
+
+// ----------------------------------------------------------------------------
+//    VoiceEngineObserver
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::CallbackOnError(const int channel, const int errCode)
+{
+    _nErrorCallbacks++;
+
+    CString str;
+    str.Format(_T("[#%d] CallbackOnError(channel=%d) => errCode = %d"), _nErrorCallbacks, channel, errCode);
+    if (errCode == VE_RECEIVE_PACKET_TIMEOUT)
+    {
+        str += _T(" <=> VE_RECEIVE_PACKET_TIMEOUT");
+    }
+    else if (errCode == VE_PACKET_RECEIPT_RESTARTED)
+    {
+        str += _T(" <=> VE_PACKET_RECEIPT_RESTARTED");
+    }
+    else if (errCode == VE_RUNTIME_PLAY_WARNING)
+    {
+        str += _T(" <=> VE_RUNTIME_PLAY_WARNING");
+    }
+    else if (errCode == VE_RUNTIME_REC_WARNING)
+    {
+        str += _T(" <=> VE_RUNTIME_REC_WARNING");
+    }
+    else if (errCode == VE_RUNTIME_PLAY_ERROR)
+    {
+        str += _T(" <=> VE_RUNTIME_PLAY_ERROR");
+    }
+    else if (errCode == VE_RUNTIME_REC_ERROR)
+    {
+        str += _T(" <=> VE_RUNTIME_REC_ERROR");
+    }
+    else if (errCode == VE_SATURATION_WARNING)
+    {
+        str += _T(" <=> VE_SATURATION_WARNING");
+    }
+    else if (errCode == VE_TYPING_NOISE_WARNING)
+    {
+        str += _T(" <=> VE_TYPING_NOISE_WARNING");
+    }
+    else if (errCode == VE_REC_DEVICE_REMOVED)
+    {
+        str += _T(" <=> VE_REC_DEVICE_REMOVED");
+    }
+    // AfxMessageBox((LPCTSTR)str, MB_OK);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+// ----------------------------------------------------------------------------
+//    VoERTPObserver
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnIncomingCSRCChanged(const int channel, const unsigned int CSRC, const bool added)
+{
+    CString str;
+    str.Format(_T("OnIncomingCSRCChanged(channel=%d) => CSRC=%u, added=%d"), channel, CSRC, added);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+void CWinTestDlg::OnIncomingSSRCChanged(const int channel, const unsigned int SSRC)
+{
+    CString str;
+    str.Format(_T("OnIncomingSSRCChanged(channel=%d) => SSRC=%u"), channel, SSRC);
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, (LPCTSTR)str);
+}
+
+// ----------------------------------------------------------------------------
+//    Transport
+// ----------------------------------------------------------------------------
+
+class MyTransport : public Transport
+{
+public:
+    MyTransport(VoENetwork* veNetwork);
+    virtual int SendPacket(int channel, const void *data, int len);
+    virtual int SendRTCPPacket(int channel, const void *data, int len);
+private:
+    VoENetwork* _veNetworkPtr;
+};
+
+MyTransport::MyTransport(VoENetwork* veNetwork) :
+    _veNetworkPtr(veNetwork)
+{
+}
+
+int
+MyTransport::SendPacket(int channel, const void *data, int len)
+{
+    _veNetworkPtr->ReceivedRTPPacket(channel, data, len);
+    return len;
+}
+
+int
+MyTransport::SendRTCPPacket(int channel, const void *data, int len)
+{
+    _veNetworkPtr->ReceivedRTCPPacket(channel, data, len);
+    return len;
+}
+
+// ----------------------------------------------------------------------------
+//    VoEMediaProcess
+// ----------------------------------------------------------------------------
+
+class MediaProcessImpl : public VoEMediaProcess
+{
+public:
+    MediaProcessImpl();
+    virtual void Process(const int channel,
+                         const ProcessingTypes type,
+                         WebRtc_Word16 audio_10ms[],
+                         const int length,
+                         const int samplingFreqHz,
+                         const bool stereo);
+};
+
+MediaProcessImpl::MediaProcessImpl()
+{
+}
+
+void MediaProcessImpl::Process(const int channel,
+                               const ProcessingTypes type,
+                               WebRtc_Word16 audio_10ms[],
+                               const int length,
+                               const int samplingFreqHz,
+                               const bool stereo)
+{
+    int x = rand() % 100;
+
+    for (int i = 0; i < length; i++)
+    {
+        if (channel == -1)
+        {
+            if (type == kPlaybackAllChannelsMixed)
+            {
+                // playout: scale up
+                if (!stereo)
+                {
+                    audio_10ms[i] = (audio_10ms[i] << 2);
+                }
+                else
+                {
+                    audio_10ms[2*i] = (audio_10ms[2*i] << 2);
+                    audio_10ms[2*i+1] = (audio_10ms[2*i+1] << 2);
+                }
+            }
+            else
+            {
+                // recording: emulate packet loss by "dropping" 10% of the packets
+                if (x >= 0 && x < 10)
+                {
+                    if (!stereo)
+                    {
+                        audio_10ms[i] = 0;
+                    }
+                    else
+                    {
+                        audio_10ms[2*i] = 0;
+                        audio_10ms[2*i+1] = 0;
+                    }
+                }
+            }
+        }
+        else
+        {
+            if (type == kPlaybackPerChannel)
+            {
+                // playout: mute
+                if (!stereo)
+                {
+                    audio_10ms[i] = 0;
+                }
+                else
+                {
+                    audio_10ms[2*i] = 0;
+                    audio_10ms[2*i+1] = 0;
+                }
+            }
+            else
+            {
+                // recording: emulate packet loss by "dropping" 50% of the packets
+                if (x >= 0 && x < 50)
+                {
+                    if (!stereo)
+                    {
+                        audio_10ms[i] = 0;
+                    }
+                    else
+                    {
+                        audio_10ms[2*i] = 0;
+                        audio_10ms[2*i+1] = 0;
+                    }
+                }
+            }
+        }
+    }
+}
+
+// ----------------------------------------------------------------------------
+//    Encryptionen
+// ----------------------------------------------------------------------------
+
+class MyEncryption : public Encryption
+{
+public:
+    void encrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void decrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void encrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+    void decrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out);
+};
+
+void MyEncryption::encrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    // --- Stereo emulation (sample based, 2 bytes per sample)
+
+    const int nBytesPayload = bytes_in-12;
+
+    // RTP header (first 12 bytes)
+    memcpy(out_data, in_data, 12);
+
+    // skip RTP header
+    short* ptrIn = (short*) &in_data[12];
+    short* ptrOut = (short*) &out_data[12];
+
+    // network byte order
+    for (int i = 0; i < nBytesPayload/2; i++)
+    {
+        // produce two output samples for each input sample
+        *ptrOut++ = *ptrIn; // left sample
+        *ptrOut++ = *ptrIn; // right sample
+        ptrIn++;
+    }
+
+    *bytes_out = 12 + 2*nBytesPayload;
+
+    /*
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+    */
+}
+
+void MyEncryption::decrypt(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    // Do nothing (<=> memcpy)
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] = in_data[i];
+    *bytes_out = bytes_in;
+}
+
+void MyEncryption::encrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+}
+
+void MyEncryption::decrypt_rtcp(int channel_no, unsigned char * in_data, unsigned char * out_data, int bytes_in, int* bytes_out)
+{
+    for(int i = 0; i < bytes_in; i++)
+        out_data[i] =~ in_data[i];
+    *bytes_out = bytes_in;
+}
+
+// ----------------------------------------------------------------------------
+//    TelephoneEventObserver
+// ----------------------------------------------------------------------------
+
+class TelephoneEventObserver: public VoETelephoneEventObserver
+{
+public:
+    TelephoneEventObserver(CWnd* editControlOut, CWnd* editControlIn);
+    virtual void OnReceivedTelephoneEventInband(int channel, int eventCode,
+                                                bool endOfEvent);
+    virtual void OnReceivedTelephoneEventOutOfBand(int channel, int eventCode,
+                                                   bool endOfEvent);
+private:
+    CWnd* _editControlOutPtr;
+    CWnd* _editControlInPtr;
+};
+
+TelephoneEventObserver::TelephoneEventObserver(CWnd* editControlOut, CWnd* editControlIn) :
+    _editControlOutPtr(editControlOut),
+    _editControlInPtr(editControlIn)
+{
+}
+
+void TelephoneEventObserver::OnReceivedTelephoneEventInband(int channel,
+                                                            int eventCode,
+                                                            bool endOfEvent)
+{
+    CString msg;
+    if (endOfEvent)
+    {
+        msg.AppendFormat(_T("%d [END]"), eventCode);
+        _editControlInPtr->SetWindowText((LPCTSTR)msg);
+    }
+    else
+    {
+        msg.AppendFormat(_T("%d [START]"), eventCode);
+        _editControlInPtr->SetWindowText((LPCTSTR)msg);
+    }
+}
+
+void TelephoneEventObserver::OnReceivedTelephoneEventOutOfBand(int channel,
+                                                               int eventCode,
+                                                               bool endOfEvent)
+{
+    CString msg;
+    if (endOfEvent)
+    {
+        msg.AppendFormat(_T("%d [END]"), eventCode);
+        _editControlOutPtr->SetWindowText((LPCTSTR)msg);
+    }
+    else
+    {
+        msg.AppendFormat(_T("%d [START]"), eventCode);
+        _editControlOutPtr->SetWindowText((LPCTSTR)msg);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//    RxVadCallback
+// ----------------------------------------------------------------------------
+
+class RxCallback : public VoERxVadCallback
+{
+public:
+    RxCallback() : vad_decision(-1) {};
+
+    virtual void OnRxVad(int , int vadDecision)
+    {
+        vad_decision = vadDecision;
+    }
+
+    int vad_decision;
+};
+
+// ----------------------------------------------------------------------------
+//                                 CAboutDlg dialog
+// ----------------------------------------------------------------------------
+
+class CAboutDlg : public CDialog
+{
+public:
+    CAboutDlg();
+
+// Dialog Data
+    enum { IDD = IDD_ABOUTBOX };
+
+    protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+// Implementation
+protected:
+    DECLARE_MESSAGE_MAP()
+};
+
+CAboutDlg::CAboutDlg() : CDialog(CAboutDlg::IDD)
+{
+}
+
+void CAboutDlg::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+}
+
+BEGIN_MESSAGE_MAP(CAboutDlg, CDialog)
+END_MESSAGE_MAP()
+
+// ----------------------------------------------------------------------------
+//                               CTelephonyEvent dialog
+// ----------------------------------------------------------------------------
+
+class CTelephonyEvent : public CDialog
+{
+    DECLARE_DYNAMIC(CTelephonyEvent)
+
+public:
+    CTelephonyEvent(VoiceEngine* voiceEngine, int channel, CDialog* pParentDialog, CWnd* pParent = NULL);   // standard constructor
+    virtual ~CTelephonyEvent();
+
+// Dialog Data
+    enum { IDD = IDD_DTMF_DIALOG };
+
+protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+    virtual BOOL OnInitDialog();
+
+    DECLARE_MESSAGE_MAP()
+public:
+    afx_msg void OnBnClickedButton1();
+    afx_msg void OnBnClickedButton2();
+    afx_msg void OnBnClickedButton3();
+    afx_msg void OnBnClickedButton4();
+    afx_msg void OnBnClickedButton5();
+    afx_msg void OnBnClickedButton6();
+    afx_msg void OnBnClickedButton7();
+    afx_msg void OnBnClickedButton8();
+    afx_msg void OnBnClickedButton9();
+    afx_msg void OnBnClickedButton10();
+    afx_msg void OnBnClickedButton11();
+    afx_msg void OnBnClickedButton12();
+    afx_msg void OnBnClickedButtonA();
+    afx_msg void OnBnClickedButtonB();
+    afx_msg void OnBnClickedButtonC();
+    afx_msg void OnBnClickedButtonD();
+    afx_msg void OnBnClickedCheckDtmfPlayoutRx();
+    afx_msg void OnBnClickedCheckDtmfPlayTone();
+    afx_msg void OnBnClickedCheckStartStopMode();
+    afx_msg void OnBnClickedCheckEventInband();
+    afx_msg void OnBnClickedCheckDtmfFeedback();
+    afx_msg void OnBnClickedCheckDirectFeedback();
+    afx_msg void OnBnClickedRadioSingle();
+    afx_msg void OnBnClickedRadioMulti();
+    afx_msg void OnBnClickedRadioStartStop();
+    afx_msg void OnBnClickedButtonSetRxTelephonePt();
+    afx_msg void OnBnClickedButtonSetTxTelephonePt();
+    afx_msg void OnBnClickedButtonSendTelephoneEvent();
+    afx_msg void OnBnClickedCheckDetectInband();
+    afx_msg void OnBnClickedCheckDetectOutOfBand();
+    afx_msg void OnBnClickedCheckEventDetection();
+
+private:
+    void SendTelephoneEvent(unsigned char eventCode);
+
+private:
+    VoiceEngine*                _vePtr;
+    VoEBase*                    _veBasePtr;
+    VoEDtmf*                    _veDTMFPtr;
+    VoECodec*                   _veCodecPtr;
+    int                         _channel;
+    CString                     _strMsg;
+    CDialog*                    _parentDialogPtr;
+    TelephoneEventObserver*     _telephoneEventObserverPtr;
+    bool                        _PlayDtmfToneLocally;
+    bool                        _modeStartStop;
+    bool                        _modeSingle;
+    bool                        _modeSequence;
+    bool                        _playingDTMFTone;
+    bool                        _outOfBandEventDetection;
+    bool                        _inbandEventDetection;
+};
+
+IMPLEMENT_DYNAMIC(CTelephonyEvent, CDialog)
+
+CTelephonyEvent::CTelephonyEvent(VoiceEngine* voiceEngine,
+                                 int channel,
+                                 CDialog* pParentDialog,
+                                 CWnd* pParent /*=NULL*/)
+    : _vePtr(voiceEngine),
+      _channel(channel),
+      _PlayDtmfToneLocally(false),
+      _modeStartStop(false),
+      _modeSingle(true),
+      _modeSequence(false),
+      _playingDTMFTone(false),
+      _outOfBandEventDetection(true),
+      _inbandEventDetection(false),
+      _parentDialogPtr(pParentDialog),
+      _telephoneEventObserverPtr(NULL),
+      CDialog(CTelephonyEvent::IDD, pParent)
+{
+    _veBasePtr = VoEBase::GetInterface(_vePtr);
+    _veDTMFPtr = VoEDtmf::GetInterface(_vePtr);
+    _veCodecPtr = VoECodec::GetInterface(_vePtr);
+}
+
+CTelephonyEvent::~CTelephonyEvent()
+{
+    _veDTMFPtr->Release();
+    _veCodecPtr->Release();
+    _veBasePtr->Release();
+
+    if (_telephoneEventObserverPtr)
+    {
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+    }
+}
+
+void CTelephonyEvent::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+}
+
+
+BEGIN_MESSAGE_MAP(CTelephonyEvent, CDialog)
+    ON_BN_CLICKED(IDC_BUTTON_1, &CTelephonyEvent::OnBnClickedButton1)
+    ON_BN_CLICKED(IDC_BUTTON_2, &CTelephonyEvent::OnBnClickedButton2)
+    ON_BN_CLICKED(IDC_BUTTON_3, &CTelephonyEvent::OnBnClickedButton3)
+    ON_BN_CLICKED(IDC_BUTTON_4, &CTelephonyEvent::OnBnClickedButton4)
+    ON_BN_CLICKED(IDC_BUTTON_5, &CTelephonyEvent::OnBnClickedButton5)
+    ON_BN_CLICKED(IDC_BUTTON_6, &CTelephonyEvent::OnBnClickedButton6)
+    ON_BN_CLICKED(IDC_BUTTON_7, &CTelephonyEvent::OnBnClickedButton7)
+    ON_BN_CLICKED(IDC_BUTTON_8, &CTelephonyEvent::OnBnClickedButton8)
+    ON_BN_CLICKED(IDC_BUTTON_9, &CTelephonyEvent::OnBnClickedButton9)
+    ON_BN_CLICKED(IDC_BUTTON_10, &CTelephonyEvent::OnBnClickedButton10)
+    ON_BN_CLICKED(IDC_BUTTON_11, &CTelephonyEvent::OnBnClickedButton11)
+    ON_BN_CLICKED(IDC_BUTTON_12, &CTelephonyEvent::OnBnClickedButton12)
+    ON_BN_CLICKED(IDC_BUTTON_13, &CTelephonyEvent::OnBnClickedButtonA)
+    ON_BN_CLICKED(IDC_BUTTON_14, &CTelephonyEvent::OnBnClickedButtonB)
+    ON_BN_CLICKED(IDC_BUTTON_15, &CTelephonyEvent::OnBnClickedButtonC)
+    ON_BN_CLICKED(IDC_BUTTON_16, &CTelephonyEvent::OnBnClickedButtonD)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_PLAYOUT_RX, &CTelephonyEvent::OnBnClickedCheckDtmfPlayoutRx)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_PLAY_TONE, &CTelephonyEvent::OnBnClickedCheckDtmfPlayTone)
+    ON_BN_CLICKED(IDC_CHECK_EVENT_INBAND, &CTelephonyEvent::OnBnClickedCheckEventInband)
+    ON_BN_CLICKED(IDC_CHECK_DTMF_FEEDBACK, &CTelephonyEvent::OnBnClickedCheckDtmfFeedback)
+    ON_BN_CLICKED(IDC_CHECK_DIRECT_FEEDBACK, &CTelephonyEvent::OnBnClickedCheckDirectFeedback)
+    ON_BN_CLICKED(IDC_RADIO_SINGLE, &CTelephonyEvent::OnBnClickedRadioSingle)
+    ON_BN_CLICKED(IDC_RADIO_MULTI, &CTelephonyEvent::OnBnClickedRadioMulti)
+    ON_BN_CLICKED(IDC_RADIO_START_STOP, &CTelephonyEvent::OnBnClickedRadioStartStop)
+    ON_BN_CLICKED(IDC_BUTTON_SET_RX_TELEPHONE_PT, &CTelephonyEvent::OnBnClickedButtonSetRxTelephonePt)
+    ON_BN_CLICKED(IDC_BUTTON_SET_TX_TELEPHONE_PT, &CTelephonyEvent::OnBnClickedButtonSetTxTelephonePt)
+    ON_BN_CLICKED(IDC_BUTTON_SEND_TELEPHONE_EVENT, &CTelephonyEvent::OnBnClickedButtonSendTelephoneEvent)
+    ON_BN_CLICKED(IDC_CHECK_DETECT_INBAND, &CTelephonyEvent::OnBnClickedCheckDetectInband)
+    ON_BN_CLICKED(IDC_CHECK_DETECT_OUT_OF_BAND, &CTelephonyEvent::OnBnClickedCheckDetectOutOfBand)
+    ON_BN_CLICKED(IDC_CHECK_EVENT_DETECTION, &CTelephonyEvent::OnBnClickedCheckEventDetection)
+END_MESSAGE_MAP()
+
+
+// CTelephonyEvent message handlers
+
+BOOL CTelephonyEvent::OnInitDialog()
+{
+    CDialog::OnInitDialog();
+
+    CString str;
+    GetWindowText(str);
+    str.AppendFormat(_T(" [channel = %d]"), _channel);
+    SetWindowText(str);
+
+    // Update dialog with latest playout state
+    bool enabled(false);
+    _veDTMFPtr->GetDtmfPlayoutStatus(_channel, enabled);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAYOUT_RX);
+    button->SetCheck(enabled ? BST_CHECKED : BST_UNCHECKED);
+
+    // Update dialog with latest feedback state
+    bool directFeedback(false);
+    _veDTMFPtr->GetDtmfFeedbackStatus(enabled, directFeedback);
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    button->SetCheck(enabled ? BST_CHECKED : BST_UNCHECKED);
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    button->SetCheck(directFeedback ? BST_CHECKED : BST_UNCHECKED);
+
+    // Default event length is 160 ms
+    SetDlgItemInt(IDC_EDIT_EVENT_LENGTH, 160);
+
+    // Default event attenuation is 10 (<-> -10dBm0)
+    SetDlgItemInt(IDC_EDIT_EVENT_ATTENUATION, 10);
+
+    // Current event-detection status
+    TelephoneEventDetectionMethods detectionMethod(kOutOfBand);
+    if (_veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod) == 0)
+    {
+        // DTMF detection is supported
+        if (enabled)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_DETECTION);
+            button->SetCheck(BST_CHECKED);
+        }
+        if (detectionMethod == kOutOfBand || detectionMethod == kInAndOutOfBand)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND);
+            button->SetCheck(BST_CHECKED);
+        }
+        if (detectionMethod == kInBand || detectionMethod == kInAndOutOfBand)
+        {
+            button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_INBAND);
+            button->SetCheck(BST_CHECKED);
+        }
+    }
+    else
+    {
+        // DTMF detection is not supported
+        GetDlgItem(IDC_CHECK_EVENT_DETECTION)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DETECT_INBAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_ON_EVENT_INBAND)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_ON_EVENT_OUT_OF_BAND)->EnableWindow(FALSE);
+    }
+
+    // Telephone-event PTs
+    unsigned char pt(0);
+    _veDTMFPtr->GetSendTelephoneEventPayloadType(_channel, pt);
+    SetDlgItemInt(IDC_EDIT_EVENT_TX_PT, pt);
+
+    CodecInst codec;
+    strcpy_s(codec.plname, 32, "telephone-event"); codec.channels = 1; codec.plfreq = 8000;
+    _veCodecPtr->GetRecPayloadType(_channel, codec);
+    SetDlgItemInt(IDC_EDIT_EVENT_RX_PT, codec.pltype);
+
+    if (_modeSingle)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_SINGLE))->SetCheck(BST_CHECKED);
+    }
+    else if (_modeStartStop)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_START_STOP))->SetCheck(BST_CHECKED);
+    }
+    else if (_modeSequence)
+    {
+        ((CButton*)GetDlgItem(IDC_RADIO_MULTI))->SetCheck(BST_CHECKED);
+    }
+
+    return TRUE;  // return TRUE  unless you set the focus to a control
+}
+void CTelephonyEvent::SendTelephoneEvent(unsigned char eventCode)
+{
+    BOOL ret;
+    int lengthMs(0);
+    int attenuationDb(0);
+    bool outBand(false);
+    int res(0);
+
+    // tone length
+    if (!_modeStartStop)
+    {
+        lengthMs = GetDlgItemInt(IDC_EDIT_EVENT_LENGTH, &ret);
+        if (ret == FALSE)
+        {
+            // use default length if edit field is empty
+            lengthMs = 160;
+        }
+    }
+
+    // attenuation
+    attenuationDb = GetDlgItemInt(IDC_EDIT_EVENT_ATTENUATION, &ret);
+    if (ret == FALSE)
+    {
+        // use default length if edit field is empty
+        attenuationDb = 10;
+    }
+
+    // out-band or in-band
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_INBAND);
+    int check = button->GetCheck();
+    outBand = (check == BST_UNCHECKED);
+
+    if (eventCode < 16)
+        SetDlgItemInt(IDC_EDIT_DTMF_EVENT, eventCode);
+
+    if (_PlayDtmfToneLocally)
+    {
+        // --- PlayDtmfTone
+
+        if (_modeSingle)
+        {
+            TEST2(_veDTMFPtr->PlayDtmfTone(eventCode, lengthMs, attenuationDb) == 0,
+                _T("PlayDtmfTone(eventCode=%u, lengthMs=%d, attenuationDb=%d)"), eventCode, lengthMs, attenuationDb);
+        }
+        else if (_modeStartStop)
+        {
+            if (!_playingDTMFTone)
+            {
+                TEST2((res = _veDTMFPtr->StartPlayingDtmfTone(eventCode, attenuationDb)) == 0,
+                    _T("StartPlayingDtmfTone(eventCode=%u, attenuationDb=%d)"), eventCode, attenuationDb);
+            }
+            else
+            {
+                TEST2((res = _veDTMFPtr->StopPlayingDtmfTone()) == 0,
+                    _T("StopPlayingDTMFTone()"));
+            }
+            if (res == 0)
+                _playingDTMFTone = !_playingDTMFTone;
+        }
+        else if (_modeSequence)
+        {
+            int nTones(1);
+            int sleepMs(0);
+            int lenMult(1);
+            if (eventCode == 1)
+            {
+                nTones = 2;
+                sleepMs = lengthMs;
+                lenMult = 1;
+            }
+            else if (eventCode == 2)
+            {
+                nTones = 2;
+                sleepMs = lengthMs/2;
+                lenMult = 2;
+            }
+            else if (eventCode == 3)
+            {
+                nTones = 3;
+                sleepMs = 0;
+                lenMult = 1;
+            }
+            for (int i = 0; i < nTones; i++)
+            {
+                TEST2(_veDTMFPtr->PlayDtmfTone(eventCode, lengthMs, attenuationDb) == 0,
+                    _T("PlayDtmfTone(eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), eventCode, lengthMs, attenuationDb);
+                Sleep(sleepMs);
+                lengthMs = lenMult*lengthMs;
+                eventCode++;
+            }
+        }
+    }
+    else
+    {
+        // --- SendTelephoneEvent
+
+        if (_modeSingle)
+        {
+            TEST2(_veDTMFPtr->SendTelephoneEvent(_channel, eventCode, outBand, lengthMs, attenuationDb) == 0,
+                _T("SendTelephoneEvent(channel=%d, eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), _channel, eventCode, outBand, lengthMs, attenuationDb);
+        }
+        else if (_modeStartStop)
+        {
+            TEST2(false, _T("*** NOT IMPLEMENTED ***"));
+        }
+        else if (_modeSequence)
+        {
+            int nTones(1);
+            int sleepMs(0);
+            int lenMult(1);
+            if (eventCode == 1)
+            {
+                nTones = 2;
+                sleepMs = lengthMs;
+                lenMult = 1;
+            }
+            else if (eventCode == 2)
+            {
+                eventCode = 1;
+                nTones = 2;
+                sleepMs = lengthMs/2;
+                lenMult = 2;
+            }
+            else if (eventCode == 3)
+            {
+                eventCode = 1;
+                nTones = 3;
+                sleepMs = 0;
+                lenMult = 1;
+            }
+            for (int i = 0; i < nTones; i++)
+            {
+                TEST2(_veDTMFPtr->SendTelephoneEvent(_channel, eventCode, outBand, lengthMs, attenuationDb) == 0,
+                    _T("SendTelephoneEvent(channel=%d, eventCode=%u, outBand=%d, lengthMs=%d, attenuationDb=%d)"), _channel, eventCode, outBand, lengthMs, attenuationDb);
+                Sleep(sleepMs);
+                lengthMs = lenMult*lengthMs;
+                eventCode++;
+            }
+        }
+    }
+}
+
+void CTelephonyEvent::OnBnClickedButtonSendTelephoneEvent()
+{
+    BOOL ret;
+    unsigned char eventCode(0);
+
+    eventCode = (unsigned char)GetDlgItemInt(IDC_EDIT_EVENT_CODE, &ret);
+    if (ret == FALSE)
+    {
+        return;
+    }
+    SendTelephoneEvent(eventCode);
+}
+
+void CTelephonyEvent::OnBnClickedButton1()
+{
+    SendTelephoneEvent(1);
+}
+
+void CTelephonyEvent::OnBnClickedButton2()
+{
+    SendTelephoneEvent(2);
+}
+
+void CTelephonyEvent::OnBnClickedButton3()
+{
+    SendTelephoneEvent(3);
+}
+
+void CTelephonyEvent::OnBnClickedButton4()
+{
+    SendTelephoneEvent(4);
+}
+
+void CTelephonyEvent::OnBnClickedButton5()
+{
+    SendTelephoneEvent(5);
+}
+
+void CTelephonyEvent::OnBnClickedButton6()
+{
+    SendTelephoneEvent(6);
+}
+
+void CTelephonyEvent::OnBnClickedButton7()
+{
+    SendTelephoneEvent(7);
+}
+
+void CTelephonyEvent::OnBnClickedButton8()
+{
+    SendTelephoneEvent(8);
+}
+
+void CTelephonyEvent::OnBnClickedButton9()
+{
+    SendTelephoneEvent(9);
+}
+
+void CTelephonyEvent::OnBnClickedButton10()
+{
+    // *
+    SendTelephoneEvent(10);
+}
+
+void CTelephonyEvent::OnBnClickedButton11()
+{
+    SendTelephoneEvent(0);
+}
+
+void CTelephonyEvent::OnBnClickedButton12()
+{
+    // #
+    SendTelephoneEvent(11);
+}
+
+void CTelephonyEvent::OnBnClickedButtonA()
+{
+    SendTelephoneEvent(12);
+}
+
+void CTelephonyEvent::OnBnClickedButtonB()
+{
+    SendTelephoneEvent(13);
+}
+
+void CTelephonyEvent::OnBnClickedButtonC()
+{
+    SendTelephoneEvent(14);
+}
+
+void CTelephonyEvent::OnBnClickedButtonD()
+{
+    SendTelephoneEvent(15);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfPlayoutRx()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAYOUT_RX);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST2(_veDTMFPtr->SetDtmfPlayoutStatus(_channel, enable) == 0, _T("SetDtmfPlayoutStatus(channel=%d, enable=%d)"), _channel, enable);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfPlayTone()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_PLAY_TONE);
+    int check = button->GetCheck();
+    _PlayDtmfToneLocally = (check == BST_CHECKED);
+}
+
+void CTelephonyEvent::OnBnClickedRadioSingle()
+{
+    _modeStartStop = false;
+    _modeSingle = true;
+    _modeSequence = false;
+}
+
+void CTelephonyEvent::OnBnClickedRadioMulti()
+{
+    _modeStartStop = false;
+    _modeSingle = false;
+    _modeSequence = true;
+}
+
+void CTelephonyEvent::OnBnClickedRadioStartStop()
+{
+    // CButton* button = (CButton*)GetDlgItem(IDC_RADIO_START_STOP);
+    // int check = button->GetCheck();
+    _modeStartStop = true;
+    _modeSingle = false;
+    _modeSequence = false;
+    // GetDlgItem(IDC_EDIT_EVENT_LENGTH)->EnableWindow();
+}
+
+void CTelephonyEvent::OnBnClickedCheckEventInband()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_INBAND);
+    int check = button->GetCheck();
+    GetDlgItem(IDC_EDIT_EVENT_CODE)->EnableWindow(check?FALSE:TRUE);
+    GetDlgItem(IDC_BUTTON_SEND_TELEPHONE_EVENT)->EnableWindow(check?FALSE:TRUE);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDtmfFeedback()
+{
+    CButton* button(NULL);
+
+    // Retrieve feedback state
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    // Retrieve direct-feedback setting
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    check = button->GetCheck();
+    const bool directFeedback = (check == BST_CHECKED);
+
+    // GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK)->EnableWindow(enable ? TRUE : FALSE);
+
+    TEST2(_veDTMFPtr->SetDtmfFeedbackStatus(enable, directFeedback) == 0,
+        _T("SetDtmfFeedbackStatus(enable=%d, directFeedback=%d)"), enable, directFeedback);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDirectFeedback()
+{
+    CButton* button(NULL);
+
+    // Retrieve feedback state
+    button = (CButton*)GetDlgItem(IDC_CHECK_DTMF_FEEDBACK);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    // Retrieve new direct-feedback setting
+    button = (CButton*)GetDlgItem(IDC_CHECK_DIRECT_FEEDBACK);
+    check = button->GetCheck();
+    const bool directFeedback = (check == BST_CHECKED);
+
+    TEST2(_veDTMFPtr->SetDtmfFeedbackStatus(enable, directFeedback) == 0,
+        _T("SetDtmfFeedbackStatus(enable=%d, directFeedback=%d)"), enable, directFeedback);
+}
+
+void CTelephonyEvent::OnBnClickedButtonSetRxTelephonePt()
+{
+    BOOL ret;
+    int pt = GetDlgItemInt(IDC_EDIT_EVENT_RX_PT, &ret);
+    if (ret == FALSE)
+        return;
+    CodecInst codec;
+    strcpy_s(codec.plname, 32, "telephone-event");
+    codec.pltype = pt; codec.channels = 1; codec.plfreq = 8000;
+    TEST2(_veCodecPtr->SetRecPayloadType(_channel, codec) == 0,
+        _T("SetSendTelephoneEventPayloadType(channel=%d, codec.pltype=%u)"), _channel, codec.pltype);
+}
+
+void CTelephonyEvent::OnBnClickedButtonSetTxTelephonePt()
+{
+    BOOL ret;
+    int pt = GetDlgItemInt(IDC_EDIT_EVENT_TX_PT, &ret);
+    if (ret == FALSE)
+        return;
+    TEST2(_veDTMFPtr->SetSendTelephoneEventPayloadType(_channel, pt) == 0,
+        _T("SetSendTelephoneEventPayloadType(channel=%d, type=%u)"), _channel, pt);
+}
+
+void CTelephonyEvent::OnBnClickedCheckDetectInband()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_INBAND);
+    int check = button->GetCheck();
+    _inbandEventDetection = (check == BST_CHECKED);
+
+    bool enabled(false);
+    TelephoneEventDetectionMethods detectionMethod;
+    _veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod);
+    if (enabled)
+    {
+        // deregister
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+    OnBnClickedCheckEventDetection();
+}
+
+void CTelephonyEvent::OnBnClickedCheckDetectOutOfBand()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DETECT_OUT_OF_BAND);
+    int check = button->GetCheck();
+    _outOfBandEventDetection = (check == BST_CHECKED);
+
+    bool enabled(false);
+    TelephoneEventDetectionMethods detectionMethod;
+    _veDTMFPtr->GetTelephoneEventDetectionStatus(_channel, enabled, detectionMethod);
+    if (enabled)
+    {
+        // deregister
+        _veDTMFPtr->DeRegisterTelephoneEventDetection(_channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+    OnBnClickedCheckEventDetection();
+}
+
+void CTelephonyEvent::OnBnClickedCheckEventDetection()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EVENT_DETECTION);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        TelephoneEventDetectionMethods method(kInBand);
+        if (_inbandEventDetection && !_outOfBandEventDetection)
+            method = kInBand;
+        else if (!_inbandEventDetection && _outOfBandEventDetection)
+            method = kOutOfBand;
+        else if (_inbandEventDetection && _outOfBandEventDetection)
+            method = kInAndOutOfBand;
+
+        CWnd* wndOut = GetDlgItem(IDC_EDIT_ON_EVENT_OUT_OF_BAND);
+        CWnd* wndIn = GetDlgItem(IDC_EDIT_ON_EVENT_INBAND);
+        _telephoneEventObserverPtr = new TelephoneEventObserver(wndOut, wndIn);
+
+        TEST2(_veDTMFPtr->RegisterTelephoneEventDetection(_channel, method, *_telephoneEventObserverPtr) == 0,
+            _T("RegisterTelephoneEventDetection(channel=%d, detectionMethod=%d)"), _channel, method);
+    }
+    else
+    {
+        TEST2(_veDTMFPtr->DeRegisterTelephoneEventDetection(_channel) == 0,
+            _T("DeRegisterTelephoneEventDetection(channel=%d)"), _channel);
+        delete _telephoneEventObserverPtr;
+        _telephoneEventObserverPtr = NULL;
+        SetDlgItemText(IDC_EDIT_ON_EVENT_INBAND,_T(""));
+        SetDlgItemText(IDC_EDIT_ON_EVENT_OUT_OF_BAND,_T(""));
+    }
+}
+
+// ============================================================================
+//                                 CWinTestDlg dialog
+// ============================================================================
+
+CWinTestDlg::CWinTestDlg(CWnd* pParent /*=NULL*/)
+    : CDialog(CWinTestDlg::IDD, pParent),
+    _failCount(0),
+    _vePtr(NULL),
+    _veBasePtr(NULL),
+    _veCodecPtr(NULL),
+    _veNetworkPtr(NULL),
+    _veFilePtr(NULL),
+    _veHardwarePtr(NULL),
+    _veExternalMediaPtr(NULL),
+    _veApmPtr(NULL),
+    _veEncryptionPtr(NULL),
+    _veRtpRtcpPtr(NULL),
+    _transportPtr(NULL),
+    _encryptionPtr(NULL),
+    _externalMediaPtr(NULL),
+    _externalTransport(false),
+    _externalTransportBuild(false),
+    _checkPlayFileIn(0),
+    _checkPlayFileIn1(0),
+    _checkPlayFileIn2(0),
+    _checkPlayFileOut1(0),
+    _checkPlayFileOut2(0),
+    _checkAGC(0),
+    _checkAGC1(0),
+    _checkNS(0),
+    _checkNS1(0),
+    _checkEC(0),
+    _checkVAD1(0),
+    _checkVAD2(0),
+    _checkSrtpTx1(0),
+    _checkSrtpTx2(0),
+    _checkSrtpRx1(0),
+    _checkSrtpRx2(0),
+    _checkConference1(0),
+    _checkConference2(0),
+    _checkOnHold1(0),
+    _checkOnHold2(0),
+    _strComboIp1(_T("")),
+    _strComboIp2(_T("")),
+    _delayEstimate1(false),
+    _delayEstimate2(false),
+    _rxVad(false),
+    _nErrorCallbacks(0),
+    _timerTicks(0)
+{
+    m_hIcon = AfxGetApp()->LoadIcon(IDR_MAINFRAME);
+
+    _vePtr = VoiceEngine::Create();
+
+    VoiceEngine::SetTraceFilter(kTraceNone);
+    // VoiceEngine::SetTraceFilter(kTraceAll);
+    // VoiceEngine::SetTraceFilter(kTraceStream | kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | kTraceApiCall | kTraceModuleCall | kTraceMemory | kTraceDebug | kTraceInfo);
+    // VoiceEngine::SetTraceFilter(kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | kTraceApiCall | kTraceModuleCall | kTraceMemory | kTraceInfo);
+
+    VoiceEngine::SetTraceFile("ve_win_test.txt");
+    VoiceEngine::SetTraceCallback(NULL);
+
+    if (_vePtr)
+    {
+        _veExternalMediaPtr = VoEExternalMedia::GetInterface(_vePtr);
+        _veVolumeControlPtr = VoEVolumeControl::GetInterface(_vePtr);
+        _veEncryptionPtr = VoEEncryption::GetInterface(_vePtr);
+        _veVideoSyncPtr = VoEVideoSync::GetInterface(_vePtr);
+        _veNetworkPtr = VoENetwork::GetInterface(_vePtr);
+        _veFilePtr = VoEFile::GetInterface(_vePtr);
+        _veApmPtr = VoEAudioProcessing::GetInterface(_vePtr);
+
+        _veBasePtr = VoEBase::GetInterface(_vePtr);
+        _veCodecPtr = VoECodec::GetInterface(_vePtr);
+        _veHardwarePtr = VoEHardware::GetInterface(_vePtr);
+        _veRtpRtcpPtr = VoERTP_RTCP::GetInterface(_vePtr);
+        _transportPtr = new MyTransport(_veNetworkPtr);
+        _encryptionPtr = new MyEncryption();
+        _externalMediaPtr = new MediaProcessImpl();
+        _connectionObserverPtr = new ConnectionObserver();
+        _rxVadObserverPtr = new RxCallback();
+    }
+
+    _veBasePtr->RegisterVoiceEngineObserver(*this);
+
+    std::string resource_path = webrtc::test::ProjectRootPath();
+    if (resource_path == webrtc::test::kCannotFindProjectRootDir) {
+        _long_audio_file_path = "./";
+    } else {
+        _long_audio_file_path = resource_path + "data\\voice_engine\\";
+    }
+}
+
+CWinTestDlg::~CWinTestDlg()
+{
+    if (_connectionObserverPtr) delete _connectionObserverPtr;
+    if (_externalMediaPtr) delete _externalMediaPtr;
+    if (_transportPtr) delete _transportPtr;
+    if (_encryptionPtr) delete _encryptionPtr;
+    if (_rxVadObserverPtr) delete _rxVadObserverPtr;
+
+    if (_veExternalMediaPtr) _veExternalMediaPtr->Release();
+    if (_veEncryptionPtr) _veEncryptionPtr->Release();
+    if (_veVideoSyncPtr) _veVideoSyncPtr->Release();
+    if (_veVolumeControlPtr) _veVolumeControlPtr->Release();
+
+    if (_veBasePtr) _veBasePtr->Terminate();
+    if (_veBasePtr) _veBasePtr->Release();
+
+    if (_veCodecPtr) _veCodecPtr->Release();
+    if (_veNetworkPtr) _veNetworkPtr->Release();
+    if (_veFilePtr) _veFilePtr->Release();
+    if (_veHardwarePtr) _veHardwarePtr->Release();
+    if (_veApmPtr) _veApmPtr->Release();
+    if (_veRtpRtcpPtr) _veRtpRtcpPtr->Release();
+    if (_vePtr)
+    {
+        VoiceEngine::Delete(_vePtr);
+    }
+    VoiceEngine::SetTraceFilter(kTraceNone);
+}
+
+void CWinTestDlg::DoDataExchange(CDataExchange* pDX)
+{
+    CDialog::DoDataExchange(pDX);
+    DDX_CBString(pDX, IDC_COMBO_IP_1, _strComboIp1);
+    DDX_CBString(pDX, IDC_COMBO_IP_2, _strComboIp2);
+}
+
+BEGIN_MESSAGE_MAP(CWinTestDlg, CDialog)
+    ON_WM_SYSCOMMAND()
+    ON_WM_PAINT()
+    ON_WM_QUERYDRAGICON()
+    ON_WM_TIMER()
+    //}}AFX_MSG_MAP
+    ON_BN_CLICKED(IDC_BUTTON_CREATE_1, &CWinTestDlg::OnBnClickedButtonCreate1)
+    ON_BN_CLICKED(IDC_BUTTON_DELETE_1, &CWinTestDlg::OnBnClickedButtonDelete1)
+    ON_BN_CLICKED(IDC_BUTTON_CREATE_2, &CWinTestDlg::OnBnClickedButtonCreate2)
+    ON_BN_CLICKED(IDC_BUTTON_DELETE_2, &CWinTestDlg::OnBnClickedButtonDelete2)
+    ON_CBN_SELCHANGE(IDC_COMBO_CODEC_1, &CWinTestDlg::OnCbnSelchangeComboCodec1)
+    ON_BN_CLICKED(IDC_BUTTON_START_LISTEN_1, &CWinTestDlg::OnBnClickedButtonStartListen1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_LISTEN_1, &CWinTestDlg::OnBnClickedButtonStopListen1)
+    ON_BN_CLICKED(IDC_BUTTON_START_PLAYOUT_1, &CWinTestDlg::OnBnClickedButtonStartPlayout1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_PLAYOUT_1, &CWinTestDlg::OnBnClickedButtonStopPlayout1)
+    ON_BN_CLICKED(IDC_BUTTON_START_SEND_1, &CWinTestDlg::OnBnClickedButtonStartSend1)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_SEND_1, &CWinTestDlg::OnBnClickedButtonStopSend1)
+    ON_CBN_SELCHANGE(IDC_COMBO_IP_2, &CWinTestDlg::OnCbnSelchangeComboIp2)
+    ON_CBN_SELCHANGE(IDC_COMBO_IP_1, &CWinTestDlg::OnCbnSelchangeComboIp1)
+    ON_CBN_SELCHANGE(IDC_COMBO_CODEC_2, &CWinTestDlg::OnCbnSelchangeComboCodec2)
+    ON_BN_CLICKED(IDC_BUTTON_START_LISTEN_2, &CWinTestDlg::OnBnClickedButtonStartListen2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_LISTEN_2, &CWinTestDlg::OnBnClickedButtonStopListen2)
+    ON_BN_CLICKED(IDC_BUTTON_START_PLAYOUT_2, &CWinTestDlg::OnBnClickedButtonStartPlayout2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_PLAYOUT_2, &CWinTestDlg::OnBnClickedButtonStopPlayout2)
+    ON_BN_CLICKED(IDC_BUTTON_START_SEND_2, &CWinTestDlg::OnBnClickedButtonStartSend2)
+    ON_BN_CLICKED(IDC_BUTTON_STOP_SEND_2, &CWinTestDlg::OnBnClickedButtonStopSend2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_TRANS_1, &CWinTestDlg::OnBnClickedCheckExtTrans1)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN_1, &CWinTestDlg::OnBnClickedCheckPlayFileIn1)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_OUT_1, &CWinTestDlg::OnBnClickedCheckPlayFileOut1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_TRANS_2, &CWinTestDlg::OnBnClickedCheckExtTrans2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN_2, &CWinTestDlg::OnBnClickedCheckPlayFileIn2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_OUT_2, &CWinTestDlg::OnBnClickedCheckPlayFileOut2)
+    ON_BN_CLICKED(IDC_CHECK_PLAY_FILE_IN, &CWinTestDlg::OnBnClickedCheckPlayFileIn)
+    ON_CBN_SELCHANGE(IDC_COMBO_REC_DEVICE, &CWinTestDlg::OnCbnSelchangeComboRecDevice)
+    ON_CBN_SELCHANGE(IDC_COMBO_PLAY_DEVICE, &CWinTestDlg::OnCbnSelchangeComboPlayDevice)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN_1, &CWinTestDlg::OnBnClickedCheckExtMediaIn1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT_1, &CWinTestDlg::OnBnClickedCheckExtMediaOut1)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_INPUT_VOLUME, &CWinTestDlg::OnNMReleasedcaptureSliderInputVolume)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_OUTPUT_VOLUME, &CWinTestDlg::OnNMReleasedcaptureSliderOutputVolume)
+    ON_BN_CLICKED(IDC_CHECK_AGC, &CWinTestDlg::OnBnClickedCheckAgc)
+    ON_BN_CLICKED(IDC_CHECK_NS, &CWinTestDlg::OnBnClickedCheckNs)
+    ON_BN_CLICKED(IDC_CHECK_EC, &CWinTestDlg::OnBnClickedCheckEc)
+    ON_BN_CLICKED(IDC_CHECK_VAD_1, &CWinTestDlg::OnBnClickedCheckVad1)
+    ON_BN_CLICKED(IDC_CHECK_VAD_3, &CWinTestDlg::OnBnClickedCheckVad2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN_2, &CWinTestDlg::OnBnClickedCheckExtMediaIn2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT_2, &CWinTestDlg::OnBnClickedCheckExtMediaOut2)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN, &CWinTestDlg::OnBnClickedCheckMuteIn)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN_1, &CWinTestDlg::OnBnClickedCheckMuteIn1)
+    ON_BN_CLICKED(IDC_CHECK_MUTE_IN_2, &CWinTestDlg::OnBnClickedCheckMuteIn2)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_TX_1, &CWinTestDlg::OnBnClickedCheckSrtpTx1)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_RX_1, &CWinTestDlg::OnBnClickedCheckSrtpRx1)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_TX_2, &CWinTestDlg::OnBnClickedCheckSrtpTx2)
+    ON_BN_CLICKED(IDC_CHECK_SRTP_RX_2, &CWinTestDlg::OnBnClickedCheckSrtpRx2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_ENCRYPTION_1, &CWinTestDlg::OnBnClickedCheckExtEncryption1)
+    ON_BN_CLICKED(IDC_CHECK_EXT_ENCRYPTION_2, &CWinTestDlg::OnBnClickedCheckExtEncryption2)
+    ON_BN_CLICKED(IDC_BUTTON_DTMF_1, &CWinTestDlg::OnBnClickedButtonDtmf1)
+    ON_BN_CLICKED(IDC_CHECK_REC_MIC, &CWinTestDlg::OnBnClickedCheckRecMic)
+    ON_BN_CLICKED(IDC_BUTTON_DTMF_2, &CWinTestDlg::OnBnClickedButtonDtmf2)
+    ON_BN_CLICKED(IDC_BUTTON_TEST_1, &CWinTestDlg::OnBnClickedButtonTest1)
+    ON_BN_CLICKED(IDC_CHECK_CONFERENCE_1, &CWinTestDlg::OnBnClickedCheckConference1)
+    ON_BN_CLICKED(IDC_CHECK_CONFERENCE_2, &CWinTestDlg::OnBnClickedCheckConference2)
+    ON_BN_CLICKED(IDC_CHECK_ON_HOLD_1, &CWinTestDlg::OnBnClickedCheckOnHold1)
+    ON_BN_CLICKED(IDC_CHECK_ON_HOLD_2, &CWinTestDlg::OnBnClickedCheckOnHold2)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_IN, &CWinTestDlg::OnBnClickedCheckExtMediaIn)
+    ON_BN_CLICKED(IDC_CHECK_EXT_MEDIA_OUT, &CWinTestDlg::OnBnClickedCheckExtMediaOut)
+    ON_LBN_SELCHANGE(IDC_LIST_CODEC_1, &CWinTestDlg::OnLbnSelchangeListCodec1)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_PAN_LEFT, &CWinTestDlg::OnNMReleasedcaptureSliderPanLeft)
+    ON_NOTIFY(NM_RELEASEDCAPTURE, IDC_SLIDER_PAN_RIGHT, &CWinTestDlg::OnNMReleasedcaptureSliderPanRight)
+    ON_BN_CLICKED(IDC_BUTTON_VERSION, &CWinTestDlg::OnBnClickedButtonVersion)
+    ON_BN_CLICKED(IDC_CHECK_DELAY_ESTIMATE_1, &CWinTestDlg::OnBnClickedCheckDelayEstimate1)
+    ON_BN_CLICKED(IDC_CHECK_RXVAD, &CWinTestDlg::OnBnClickedCheckRxvad)
+    ON_BN_CLICKED(IDC_CHECK_AGC_1, &CWinTestDlg::OnBnClickedCheckAgc1)
+    ON_BN_CLICKED(IDC_CHECK_NS_1, &CWinTestDlg::OnBnClickedCheckNs1)
+    ON_BN_CLICKED(IDC_CHECK_REC_CALL, &CWinTestDlg::OnBnClickedCheckRecCall)
+    ON_BN_CLICKED(IDC_CHECK_TYPING_DETECTION, &CWinTestDlg::OnBnClickedCheckTypingDetection)
+    ON_BN_CLICKED(IDC_CHECK_FEC, &CWinTestDlg::OnBnClickedCheckFEC)
+    ON_BN_CLICKED(IDC_BUTTON_CLEAR_ERROR_CALLBACK, &CWinTestDlg::OnBnClickedButtonClearErrorCallback)
+END_MESSAGE_MAP()
+
+BOOL CWinTestDlg::UpdateTest(bool failed, const CString& strMsg)
+{
+    if (failed)
+    {
+        SetDlgItemText(IDC_EDIT_MESSAGE, strMsg);
+        _strErr.Format(_T("FAILED (error=%d)"), _veBasePtr->LastError());
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr);
+        _failCount++;
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount);
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError());
+    }
+    else
+    {
+        SetDlgItemText(IDC_EDIT_MESSAGE, strMsg);
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK"));
+    }
+    return TRUE;
+}
+
+
+// CWinTestDlg message handlers
+
+BOOL CWinTestDlg::OnInitDialog()
+{
+    CDialog::OnInitDialog();
+
+    // Add "About..." menu item to system menu.
+
+    // IDM_ABOUTBOX must be in the system command range.
+    ASSERT((IDM_ABOUTBOX & 0xFFF0) == IDM_ABOUTBOX);
+    ASSERT(IDM_ABOUTBOX < 0xF000);
+
+    CMenu* pSysMenu = GetSystemMenu(FALSE);
+    if (pSysMenu != NULL)
+    {
+        CString strAboutMenu;
+        strAboutMenu.LoadString(IDS_ABOUTBOX);
+        if (!strAboutMenu.IsEmpty())
+        {
+            pSysMenu->AppendMenu(MF_SEPARATOR);
+            pSysMenu->AppendMenu(MF_STRING, IDM_ABOUTBOX, strAboutMenu);
+        }
+    }
+
+    // Set the icon for this dialog.  The framework does this automatically
+    //  when the application's main window is not a dialog
+    SetIcon(m_hIcon, TRUE);            // Set big icon
+    SetIcon(m_hIcon, FALSE);        // Set small icon
+
+    // char version[1024];
+    // _veBasePtr->GetVersion(version);
+    // AfxMessageBox(version, MB_OK);
+
+    if (_veBasePtr->Init() != 0)
+    {
+         AfxMessageBox(_T("Init() failed "), MB_OKCANCEL);
+    }
+
+    int ch = _veBasePtr->CreateChannel();
+    if (_veBasePtr->SetSendDestination(ch, 1234, "127.0.0.1") == -1)
+    {
+        if (_veBasePtr->LastError() == VE_EXTERNAL_TRANSPORT_ENABLED)
+        {
+            _strMsg.Format(_T("*** External transport build ***"));
+            SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg);
+            _externalTransportBuild = true;
+        }
+    }
+    _veBasePtr->DeleteChannel(ch);
+
+    // --- Add (preferred) local IPv4 address in title
+
+    if (_veNetworkPtr)
+    {
+        char localIP[64];
+        _veNetworkPtr->GetLocalIP(localIP);
+        CString str;
+        GetWindowText(str);
+        str.AppendFormat(_T("  [Local IPv4 address: %s]"), CharToTchar(localIP, 64));
+        SetWindowText(str);
+    }
+
+    // --- Volume sliders
+
+    if (_veVolumeControlPtr)
+    {
+        unsigned int volume(0);
+        CSliderCtrl* slider(NULL);
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+        slider->SetRangeMin(0);
+        slider->SetRangeMax(255);
+        _veVolumeControlPtr->GetMicVolume(volume);
+        slider->SetPos(volume);
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+        slider->SetRangeMin(0);
+        slider->SetRangeMax(255);
+        _veVolumeControlPtr->GetSpeakerVolume(volume);
+        slider->SetPos(volume);
+    }
+
+    // --- Panning sliders
+
+    if (_veVolumeControlPtr)
+    {
+        float lVol(0.0);
+        float rVol(0.0);
+        int leftVol, rightVol;
+        CSliderCtrl* slider(NULL);
+
+        _veVolumeControlPtr->GetOutputVolumePan(-1, lVol, rVol);
+
+        leftVol = (int)(lVol*10.0f);    // [0,10]
+        rightVol = (int)(rVol*10.0f);    // [0,10]
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_LEFT);
+        slider->SetRange(0,10);
+        slider->SetPos(10-leftVol);        // pos 0 <=> max pan 1.0 (top of slider)
+
+        slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_RIGHT);
+        slider->SetRange(0,10);
+        slider->SetPos(10-rightVol);
+    }
+
+    // --- APM settings
+
+    bool enable(false);
+    CButton* button(NULL);
+
+    AgcModes agcMode(kAgcDefault);
+    if (_veApmPtr->GetAgcStatus(enable, agcMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_AGC);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // AGC is not supported
+        GetDlgItem(IDC_CHECK_AGC)->EnableWindow(FALSE);
+    }
+
+    NsModes nsMode(kNsDefault);
+    if (_veApmPtr->GetNsStatus(enable, nsMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_NS);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // NS is not supported
+        GetDlgItem(IDC_CHECK_NS)->EnableWindow(FALSE);
+    }
+
+    EcModes ecMode(kEcDefault);
+    if (_veApmPtr->GetEcStatus(enable, ecMode) == 0)
+    {
+        button = (CButton*)GetDlgItem(IDC_CHECK_EC);
+        enable ? button->SetCheck(BST_CHECKED) : button->SetCheck(BST_UNCHECKED);
+    }
+    else
+    {
+        // EC is not supported
+        GetDlgItem(IDC_CHECK_EC)->EnableWindow(FALSE);
+    }
+
+    // --- First channel section
+
+    GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_CONFERENCE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_FEC)->EnableWindow(FALSE);
+
+    CComboBox* comboIP(NULL);
+    comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_1);
+    comboIP->AddString(_T("127.0.0.1"));
+    comboIP->SetCurSel(0);
+
+    SetDlgItemInt(IDC_EDIT_TX_PORT_1, 1111);
+    SetDlgItemInt(IDC_EDIT_RX_PORT_1, 1111);
+
+    // --- Add supported codecs to the codec combo box
+
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_1);
+    comboCodec->ResetContent();
+
+    int numCodecs = _veCodecPtr->NumOfCodecs();
+    for (int idx = 0; idx < numCodecs; idx++)
+    {
+        CodecInst codec;
+        _veCodecPtr->GetCodec(idx, codec);
+        if ((_stricmp(codec.plname, "CNNB") != 0) &&
+            (_stricmp(codec.plname, "CNWB") != 0))
+        {
+            CString strCodec;
+            if (_stricmp(codec.plname, "G7221") == 0)
+                strCodec.Format(_T("%s (%d/%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000, codec.rate/1000);
+            else
+                strCodec.Format(_T("%s (%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000);
+            comboCodec->AddString(strCodec);
+        }
+        if (idx == 0)
+        {
+            SetDlgItemInt(IDC_EDIT_CODEC_1, codec.pltype);
+        }
+    }
+    comboCodec->SetCurSel(0);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    list->AddString(_T("pltype"));
+    list->AddString(_T("plfreq"));
+    list->AddString(_T("pacsize"));
+    list->AddString(_T("channels"));
+    list->AddString(_T("rate"));
+    list->SetCurSel(0);
+
+    // --- Add available audio devices to the combo boxes
+
+    CComboBox* comboRecDevice(NULL);
+    CComboBox* comboPlayDevice(NULL);
+    comboRecDevice = (CComboBox*)GetDlgItem(IDC_COMBO_REC_DEVICE);
+    comboPlayDevice = (CComboBox*)GetDlgItem(IDC_COMBO_PLAY_DEVICE);
+    comboRecDevice->ResetContent();
+    comboPlayDevice->ResetContent();
+
+    if (_veHardwarePtr)
+    {
+        int numPlayout(0);
+        int numRecording(0);
+        char nameStr[128];
+        char guidStr[128];
+        CString strDevice;
+        AudioLayers audioLayer;
+
+        _veHardwarePtr->GetAudioDeviceLayer(audioLayer);
+        if (kAudioWindowsWave == audioLayer)
+        {
+            strDevice.FormatMessage(_T("Audio Layer: Windows Wave API"));
+        }
+        else if (kAudioWindowsCore == audioLayer)
+        {
+            strDevice.FormatMessage(_T("Audio Layer: Windows Core API"));
+        }
+        else
+        {
+            strDevice.FormatMessage(_T("Audio Layer: ** UNKNOWN **"));
+        }
+        SetDlgItemText(IDC_EDIT_AUDIO_LAYER, (LPCTSTR)strDevice);
+
+        _veHardwarePtr->GetNumOfRecordingDevices(numRecording);
+
+        for (int idx = 0; idx < numRecording; idx++)
+        {
+            _veHardwarePtr->GetRecordingDeviceName(idx, nameStr, guidStr);
+      strDevice.Format(_T("%s"), CharToTchar(nameStr, 128));
+            comboRecDevice->AddString(strDevice);
+        }
+        // Select default (communication) device in the combo box
+        _veHardwarePtr->GetRecordingDeviceName(-1, nameStr, guidStr);
+    CString tmp = CString(nameStr);
+        int nIndex = comboRecDevice->SelectString(-1, tmp);
+        ASSERT(nIndex != CB_ERR);
+
+        _veHardwarePtr->GetNumOfPlayoutDevices(numPlayout);
+
+        for (int idx = 0; idx < numPlayout; idx++)
+        {
+            _veHardwarePtr->GetPlayoutDeviceName(idx, nameStr, guidStr);
+      strDevice.Format(_T("%s"), CharToTchar(nameStr, 128));
+            comboPlayDevice->AddString(strDevice);
+        }
+        // Select default (communication) device in the combo box
+        _veHardwarePtr->GetPlayoutDeviceName(-1, nameStr, guidStr);
+        nIndex = comboPlayDevice->SelectString(-1, CString(nameStr));
+        ASSERT(nIndex != CB_ERR);
+    }
+
+    // --- Second channel section
+
+    GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(FALSE);
+    GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(FALSE);
+
+    comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_2);
+    comboIP->AddString(_T("127.0.0.1"));
+    comboIP->SetCurSel(0);
+
+    SetDlgItemInt(IDC_EDIT_TX_PORT_2, 2222);
+    SetDlgItemInt(IDC_EDIT_RX_PORT_2, 2222);
+
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_2);
+    comboCodec->ResetContent();
+
+    if (_veCodecPtr)
+    {
+        numCodecs = _veCodecPtr->NumOfCodecs();
+        for (int idx = 0; idx < numCodecs; idx++)
+        {
+            CodecInst codec;
+            _veCodecPtr->GetCodec(idx, codec);
+            CString strCodec;
+            strCodec.Format(_T("%s (%d/%d)"), CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq/1000);
+            comboCodec->AddString(strCodec);
+        }
+        comboCodec->SetCurSel(0);
+    }
+
+    // --- Start windows timer
+
+    SetTimer(0, 1000, NULL);
+
+    return TRUE;  // return TRUE  unless you set the focus to a control
+}
+
+void CWinTestDlg::OnSysCommand(UINT nID, LPARAM lParam)
+{
+    if ((nID & 0xFFF0) == IDM_ABOUTBOX)
+    {
+        CAboutDlg dlgAbout;
+        dlgAbout.DoModal();
+    }
+    else if (nID == SC_CLOSE)
+    {
+        BOOL ret;
+        int channel(0);
+        channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+        if (ret == TRUE)
+        {
+            _veBasePtr->DeleteChannel(channel);
+        }
+        channel = GetDlgItemInt(IDC_EDIT_2, &ret);
+        if (ret == TRUE)
+        {
+            _veBasePtr->DeleteChannel(channel);
+        }
+
+        CDialog::OnSysCommand(nID, lParam);
+    }
+    else
+    {
+        CDialog::OnSysCommand(nID, lParam);
+    }
+
+}
+
+// If you add a minimize button to your dialog, you will need the code below
+//  to draw the icon.  For MFC applications using the document/view model,
+//  this is automatically done for you by the framework.
+
+void CWinTestDlg::OnPaint()
+{
+    if (IsIconic())
+    {
+        CPaintDC dc(this); // device context for painting
+
+        SendMessage(WM_ICONERASEBKGND, reinterpret_cast<WPARAM>(dc.GetSafeHdc()), 0);
+
+        // Center icon in client rectangle
+        int cxIcon = GetSystemMetrics(SM_CXICON);
+        int cyIcon = GetSystemMetrics(SM_CYICON);
+        CRect rect;
+        GetClientRect(&rect);
+        int x = (rect.Width() - cxIcon + 1) / 2;
+        int y = (rect.Height() - cyIcon + 1) / 2;
+
+        // Draw the icon
+        dc.DrawIcon(x, y, m_hIcon);
+    }
+    else
+    {
+        CDialog::OnPaint();
+    }
+}
+
+// The system calls this function to obtain the cursor to display while the user drags
+//  the minimized window.
+HCURSOR CWinTestDlg::OnQueryDragIcon()
+{
+    return static_cast<HCURSOR>(m_hIcon);
+}
+
+
+void CWinTestDlg::OnBnClickedButtonCreate1()
+{
+    int channel(0);
+    TEST((channel = _veBasePtr->CreateChannel()) >= 0, _T("CreateChannel(channel=%d)"), channel);
+    if (channel >= 0)
+    {
+        _veRtpRtcpPtr->RegisterRTPObserver(channel, *this);
+
+        SetDlgItemInt(IDC_EDIT_1, channel);
+        GetDlgItem(IDC_BUTTON_CREATE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_FEC)->EnableWindow(TRUE);
+
+        // Always set send codec to default codec <=> index 0.
+        CodecInst codec;
+        _veCodecPtr->GetCodec(0, codec);
+        _veCodecPtr->SetSendCodec(channel, codec);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonCreate2()
+{
+    int channel(0);
+    TEST((channel = _veBasePtr->CreateChannel()) >=0 , _T("CreateChannel(%d)"), channel);
+    if (channel >= 0)
+    {
+        _veRtpRtcpPtr->RegisterRTPObserver(channel, *this);
+
+        SetDlgItemInt(IDC_EDIT_2, channel);
+        GetDlgItem(IDC_BUTTON_CREATE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(TRUE);
+
+        // Always set send codec to default codec <=> index 0.
+        CodecInst codec;
+        _veCodecPtr->GetCodec(0, codec);
+        _veCodecPtr->SetSendCodec(channel, codec);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDelete1()
+{
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+    if (ret == TRUE)
+    {
+        _delayEstimate1 = false;
+        _rxVad = false;
+        _veRtpRtcpPtr->DeRegisterRTPObserver(channel);
+        TEST(_veBasePtr->DeleteChannel(channel) == 0, _T("DeleteChannel(channel=%d)"), channel);
+        SetDlgItemText(IDC_EDIT_1, _T(""));
+        GetDlgItem(IDC_BUTTON_CREATE_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DELETE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_IP_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_TX_PORT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_RX_PORT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_LIST_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_CODEC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DTMF_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_VAD_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_AGC_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_NS_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_RXVAD)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_FEC)->EnableWindow(FALSE);
+        SetDlgItemText(IDC_EDIT_RXVAD, _T(""));
+        GetDlgItem(IDC_EDIT_RXVAD)->EnableWindow(FALSE);
+        CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_VAD_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_CONFERENCE_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_AGC_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_NS_1);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_RXVAD);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_FEC);
+        button->SetCheck(BST_UNCHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDelete2()
+{
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_2, &ret);
+    if (ret == TRUE)
+    {
+        _delayEstimate2 = false;
+        _veRtpRtcpPtr->DeRegisterRTPObserver(channel);
+        TEST(_veBasePtr->DeleteChannel(channel) == 0, _T("DeleteChannel(%d)"), channel);
+        SetDlgItemText(IDC_EDIT_2, _T(""));
+        GetDlgItem(IDC_BUTTON_CREATE_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_DELETE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_IP_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_TX_PORT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_EDIT_RX_PORT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_COMBO_CODEC_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_TRANS_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_MUTE_IN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_VAD_3)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_TX_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_SRTP_RX_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_CONFERENCE_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_DTMF_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_CHECK_ON_HOLD_2)->EnableWindow(FALSE);
+        CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_VAD_3);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_CONFERENCE_2);
+        button->SetCheck(BST_UNCHECKED);
+        button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_2);
+        button->SetCheck(BST_UNCHECKED);
+    }
+}
+
+void CWinTestDlg::OnCbnSelchangeComboIp1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CString str;
+    int port = GetDlgItemInt(IDC_EDIT_TX_PORT_1);
+    CComboBox* comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_1);
+    int n = comboIP->GetLBTextLen(0);
+    comboIP->GetLBText(0, str.GetBuffer(n));
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(str.GetBuffer(n), -1)) == 0,
+        _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, str.GetBuffer(n));
+    str.ReleaseBuffer();
+}
+
+void CWinTestDlg::OnCbnSelchangeComboIp2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CString str;
+    int port = GetDlgItemInt(IDC_EDIT_TX_PORT_2);
+    CComboBox* comboIP = (CComboBox*)GetDlgItem(IDC_COMBO_IP_2);
+    int n = comboIP->GetLBTextLen(0);
+    comboIP->GetLBText(0, str.GetBuffer(n));
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(str.GetBuffer(n), -1)) == 0,
+        _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, str.GetBuffer(n));
+    str.ReleaseBuffer();
+}
+
+void CWinTestDlg::OnCbnSelchangeComboCodec1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+
+    CodecInst codec;
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_1);
+    int index = comboCodec->GetCurSel();
+    _veCodecPtr->GetCodec(index, codec);
+    if (strncmp(codec.plname, "ISAC", 4) == 0)
+    {
+        // Set iSAC to adaptive mode by default.
+        codec.rate = -1;
+    }
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    list->SetCurSel(0);
+    SetDlgItemInt(IDC_EDIT_CODEC_1, codec.pltype);
+}
+
+void CWinTestDlg::OnLbnSelchangeListCodec1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+
+    CListBox* list = (CListBox*)GetDlgItem(IDC_LIST_CODEC_1);
+    int listIdx = list->GetCurSel();
+    if (listIdx < 0)
+        return;
+    CString str;
+    list->GetText(listIdx, str);
+
+    CodecInst codec;
+    _veCodecPtr->GetSendCodec(channel, codec);
+
+    int value = GetDlgItemInt(IDC_EDIT_CODEC_1);
+    if (str == _T("pltype"))
+    {
+        codec.pltype = value;
+    }
+    else if (str == _T("plfreq"))
+    {
+        codec.plfreq = value;
+    }
+    else if (str == _T("pacsize"))
+    {
+        codec.pacsize = value;
+    }
+    else if (str == _T("channels"))
+    {
+        codec.channels = value;
+    }
+    else if (str == _T("rate"))
+    {
+        codec.rate = value;
+    }
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+}
+
+void CWinTestDlg::OnCbnSelchangeComboCodec2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+
+    CodecInst codec;
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_CODEC_2);
+    int index = comboCodec->GetCurSel();
+    _veCodecPtr->GetCodec(index, codec);
+    TEST(_veCodecPtr->SetSendCodec(channel, codec) == 0,
+        _T("SetSendCodec(channel=%d, plname=%s, pltype=%d, plfreq=%d, rate=%d, pacsize=%d, channels=%d)"),
+        channel, CharToTchar(codec.plname, 32), codec.pltype, codec.plfreq, codec.rate, codec.pacsize, codec.channels);
+}
+
+void CWinTestDlg::OnBnClickedButtonStartListen1()
+{
+    int ret1(0);
+    int ret2(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    int port = GetDlgItemInt(IDC_EDIT_RX_PORT_1);
+    TEST((ret1 = _veBasePtr->SetLocalReceiver(channel, port)) == 0, _T("SetLocalReceiver(channel=%d, port=%d)"), channel, port);
+    TEST((ret2 = _veBasePtr->StartReceive(channel)) == 0, _T("StartReceive(channel=%d)"), channel);
+    if (ret1 == 0 && ret2 == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartListen2()
+{
+    int ret1(0);
+    int ret2(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    int port = GetDlgItemInt(IDC_EDIT_RX_PORT_2);
+    TEST((ret1 = _veBasePtr->SetLocalReceiver(channel, port)) == 0, _T("SetLocalReceiver(channel=%d, port=%d)"), channel, port);
+    TEST((ret2 = _veBasePtr->StartReceive(channel)) == 0, _T("StartReceive(channel=%d)"), channel);
+    if (ret1 == 0 && ret2 == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopListen1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopReceive(channel)) == 0, _T("StopListen(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopListen2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopReceive(channel)) == 0, _T("StopListen(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_LISTEN_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_LISTEN_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartPlayout1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StartPlayout(channel)) == 0, _T("StartPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartPlayout2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StartPlayout(channel)) == 0, _T("StartPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopPlayout1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopPlayout(channel)) == 0, _T("StopPlayout(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopPlayout2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopPlayout(channel)) == 0, _T("StopPlayout(channel=%d)"));
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_PLAYOUT_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_PLAYOUT_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartSend1()
+{
+    UpdateData(TRUE);  // update IP address
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    if (!_externalTransport)
+    {
+        CString str;
+        int port = GetDlgItemInt(IDC_EDIT_TX_PORT_1);
+    TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(_strComboIp1.GetBuffer(7), -1)) == 0,
+      _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, _strComboIp1.GetBuffer(7));
+        str.ReleaseBuffer();
+    }
+
+	//_veVideoSyncPtr->SetInitTimestamp(0,0);
+    // OnCbnSelchangeComboCodec1();
+
+    TEST((ret = _veBasePtr->StartSend(channel)) == 0, _T("StartSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStartSend2()
+{
+    UpdateData(TRUE);  // update IP address
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    if (!_externalTransport)
+    {
+        CString str;
+        int port = GetDlgItemInt(IDC_EDIT_TX_PORT_2);
+        TEST(_veBasePtr->SetSendDestination(channel, port, TcharToChar(_strComboIp2.GetBuffer(7), -1)) == 0,
+            _T("SetSendDestination(channel=%d, port=%d, ip=%s)"), channel, port, _strComboIp2.GetBuffer(7));
+        str.ReleaseBuffer();
+    }
+
+    // OnCbnSelchangeComboCodec2();
+
+    TEST((ret = _veBasePtr->StartSend(channel)) == 0, _T("StartSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(FALSE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(TRUE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopSend1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    TEST((ret = _veBasePtr->StopSend(channel)) == 0, _T("StopSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_1)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_1)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonStopSend2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    TEST((ret = _veBasePtr->StopSend(channel)) == 0, _T("StopSend(channel=%d)"), channel);
+    if (ret == 0)
+    {
+        GetDlgItem(IDC_BUTTON_START_SEND_2)->EnableWindow(TRUE);
+        GetDlgItem(IDC_BUTTON_STOP_SEND_2)->EnableWindow(FALSE);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtTrans1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veNetworkPtr->RegisterExternalTransport(channel, *_transportPtr)) == 0,
+            _T("RegisterExternalTransport(channel=%d, transport=0x%x)"), channel, _transportPtr);
+    }
+    else
+    {
+        TEST((ret = _veNetworkPtr->DeRegisterExternalTransport(channel)) == 0,
+            _T("DeRegisterExternalTransport(channel=%d)"), channel);
+    }
+    if (ret == 0)
+    {
+        _externalTransport = enable;
+    }
+    else
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtTrans2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_TRANS_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veNetworkPtr->RegisterExternalTransport(channel, *_transportPtr)) == 0,
+            _T("RegisterExternalTransport(channel=%d, transport=0x%x)"), channel, _transportPtr);
+    }
+    else
+    {
+        TEST((ret = _veNetworkPtr->DeRegisterExternalTransport(channel)) == 0,
+            _T("DeRegisterExternalTransport(channel=%d)"), channel);
+    }
+    if (ret == 0)
+    {
+        _externalTransport = enable;
+    }
+    else
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn1()
+{
+    std::string micFile = _long_audio_file_path + "audio_short16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn1 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileAsMicrophone(channel,
+            micFile.c_str(), loop, mix, format, scale) == 0),
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, loop=%d, ")
+            _T("mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile.c_str(), -1),
+            loop, mix, format, scale);
+        _checkPlayFileIn1++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileAsMicrophone(channel) == 0),
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn2()
+{
+    std::string micFile = _long_audio_file_path + "audio_long16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn2 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileAsMicrophone(channel,
+            micFile.c_str(), loop, mix, format, scale) == 0),
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, loop=%d, ")
+            _T("mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile.c_str(), -1),
+            loop, mix, format, scale);
+        _checkPlayFileIn2++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileAsMicrophone(channel) == 0),
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileOut1()
+{
+    const FileFormats formats[8]  = {{kFileFormatPcm16kHzFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile},
+                                          {kFileFormatWavFile}};
+    // File path is relative to the location of 'voice_engine.gyp'.
+    const char spkrFiles[8][64] = {{"audio_short16.pcm"},
+                                   {"audio_tiny8.wav"},
+                                   {"audio_tiny11.wav"},
+                                   {"audio_tiny16.wav"},
+                                   {"audio_tiny22.wav"},
+                                   {"audio_tiny32.wav"},
+                                   {"audio_tiny44.wav"},
+                                   {"audio_tiny48.wav"}};
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        const bool loop(true);
+        const float volumeScaling(1.0);
+        const int startPointMs(0);
+        const int stopPointMs(0);
+        const FileFormats format = formats[_checkPlayFileOut1 % 8];
+        std::string spkrFile = _long_audio_file_path +
+                               spkrFiles[_checkPlayFileOut1 % 8];
+
+        CString str;
+        if (_checkPlayFileOut1 % 8 == 0)
+        {
+            str = _T("kFileFormatPcm16kHzFile");
+        }
+        else
+        {
+            str = _T("kFileFormatWavFile");
+        }
+        // (_checkPlayFileOut1 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileLocally(channel,
+            spkrFile.c_str(), loop, format, volumeScaling,
+            startPointMs,stopPointMs) == 0),
+            _T("StartPlayingFileLocally(channel=%d, file=%s, loop=%d, ")
+            _T("format=%s, scale=%2.1f, start=%d, stop=%d)"),
+            channel, CharToTchar(spkrFile.c_str(), -1),
+            loop, str, volumeScaling, startPointMs, stopPointMs);
+        _checkPlayFileOut1++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileLocally(channel) == 0),
+            _T("StopPlayingFileLocally(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckPlayFileOut2()
+{
+    std::string spkrFile = _long_audio_file_path + "audio_long16.pcm";
+
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_OUT_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float volumeScaling(1.0);
+        const int startPointMs(0);
+        const int stopPointMs(0);
+
+        // (_checkPlayFileOut2 %2 == 0) ? mix = true : mix = false;
+        TEST((ret = _veFilePtr->StartPlayingFileLocally(channel,
+            spkrFile.c_str(), loop, format, volumeScaling,
+            startPointMs,stopPointMs) == 0),
+            _T("StartPlayingFileLocally(channel=%d, file=%s, loop=%d, ")
+            _T("format=%d, scale=%2.1f, start=%d, stop=%d)"),
+            channel, CharToTchar(spkrFile.c_str(), -1),
+            loop, format, volumeScaling, startPointMs, stopPointMs);
+        // _checkPlayFileIn2++;
+    }
+    else
+    {
+        TEST((ret = _veFilePtr->StopPlayingFileLocally(channel) == 0),
+            _T("StopPlayingFileLocally(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_1);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN_2);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_1);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT_2);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackPerChannel, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackPerChannel) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackPerChannel)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckVad1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_VAD_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        VadModes mode(kVadConventional);
+        if (_checkVAD1 % 4 == 0)
+        {
+            mode = kVadConventional;
+            str = _T("kVadConventional");
+        }
+        else if (_checkVAD1 % 4 == 1)
+        {
+            mode = kVadAggressiveLow;
+            str = _T("kVadAggressiveLow");
+        }
+        else if (_checkVAD1 % 4 == 2)
+        {
+            mode = kVadAggressiveMid;
+            str = _T("kVadAggressiveMid");
+        }
+        else if (_checkVAD1 % 4 == 3)
+        {
+            mode = kVadAggressiveHigh;
+            str = _T("kVadAggressiveHigh");
+        }
+        const bool disableDTX(false);
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, true, mode, disableDTX) == 0),
+            _T("SetVADStatus(channel=%d, enable=%d, mode=%s, disableDTX=%d)"), channel, enable, str, disableDTX);
+        _checkVAD1++;
+    }
+    else
+    {
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, false)) == 0, _T("SetVADStatus(channel=%d, enable=%d)"), channel, false);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckVad2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_VAD_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        VadModes mode(kVadConventional);
+        if (_checkVAD2 % 4 == 0)
+        {
+            mode = kVadConventional;
+            str = _T("kVadConventional");
+        }
+        else if (_checkVAD2 % 4 == 1)
+        {
+            mode = kVadAggressiveLow;
+            str = _T("kVadAggressiveLow");
+        }
+        else if (_checkVAD2 % 4 == 2)
+        {
+            mode = kVadAggressiveMid;
+            str = _T("kVadAggressiveMid");
+        }
+        else if (_checkVAD2 % 4 == 3)
+        {
+            mode = kVadAggressiveHigh;
+            str = _T("kVadAggressiveHigh");
+        }
+        const bool disableDTX(false);
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, true, mode, disableDTX)) == 0,
+            _T("SetVADStatus(channel=%d, enable=%d, mode=%s, disableDTX=%d)"), channel, enable, str, disableDTX);
+        _checkVAD2++;
+    }
+    else
+    {
+        TEST((ret = _veCodecPtr->SetVADStatus(channel, false) == 0), _T("SetVADStatus(channel=%d, enable=%d)"), channel, false);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_1);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN_2);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpTx1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP = false;
+    if (enable)
+    {
+        (_checkSrtpTx1++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPSend(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPSend(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPSend(channel) == 0), _T("DisableSRTPSend(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpTx2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_TX_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP = false;
+    if (enable)
+    {
+        (_checkSrtpTx2++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPSend(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPSend(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPSend(channel) == 0), _T("DisableSRTPSend(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpRx1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP(false);
+    if (enable)
+    {
+        (_checkSrtpRx1++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPReceive(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPReceive(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPReceive(channel) == 0), _T("DisableSRTPReceive(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckSrtpRx2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_SRTP_RX_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    bool useForRTCP(false);
+    if (enable)
+    {
+        (_checkSrtpRx2++ %2 == 0) ? useForRTCP = false : useForRTCP = true;
+        TEST((ret = _veEncryptionPtr->EnableSRTPReceive(channel,
+            kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP)) == 0,
+            _T("EnableSRTPReceive(channel=%d, kCipherAes128CounterMode, 30, kAuthHmacSha1, 20, 4, kEncryptionAndAuthentication, key, useForRTCP=%d)"),
+            channel, useForRTCP);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DisableSRTPReceive(channel)) == 0, _T("DisableSRTPReceive(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtEncryption1()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veEncryptionPtr->RegisterExternalEncryption(channel, *_encryptionPtr)) == 0,
+            _T("RegisterExternalEncryption(channel=%d, encryption=0x%x)"), channel, _encryptionPtr);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DeRegisterExternalEncryption(channel)) == 0,
+            _T("DeRegisterExternalEncryption(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtEncryption2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_ENCRYPTION_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST((ret = _veEncryptionPtr->RegisterExternalEncryption(channel, *_encryptionPtr)) == 0,
+            _T("RegisterExternalEncryption(channel=%d, encryption=0x%x)"), channel, _encryptionPtr);
+    }
+    else
+    {
+        TEST((ret = _veEncryptionPtr->DeRegisterExternalEncryption(channel)) == 0,
+            _T("DeRegisterExternalEncryption(channel=%d)"), channel);
+    }
+    if (ret == -1)
+    {
+        // restore inital state since API call failed
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+    }
+}
+
+void CWinTestDlg::OnBnClickedButtonDtmf1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CTelephonyEvent dlgTelephoneEvent(_vePtr, channel, this);
+    dlgTelephoneEvent.DoModal();
+}
+
+void CWinTestDlg::OnBnClickedButtonDtmf2()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CTelephonyEvent dlgTelephoneEvent(_vePtr, channel, this);
+    dlgTelephoneEvent.DoModal();
+}
+
+void CWinTestDlg::OnBnClickedCheckConference1()
+{
+    // Not supported yet
+}
+
+void CWinTestDlg::OnBnClickedCheckConference2()
+{
+   // Not supported yet
+}
+
+void CWinTestDlg::OnBnClickedCheckOnHold1()
+{
+    SHORT shiftKeyIsPressed = ::GetAsyncKeyState(VK_SHIFT);
+
+    CString str;
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_1);
+    int check = button->GetCheck();
+
+    if (shiftKeyIsPressed)
+    {
+        bool enabled(false);
+        OnHoldModes mode(kHoldSendAndPlay);
+        TEST(_veBasePtr->GetOnHoldStatus(channel, enabled, mode) == 0,
+            _T("GetOnHoldStatus(channel=%d, enabled=?, mode=?)"), channel);
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+
+        switch (mode)
+        {
+        case kHoldSendAndPlay:
+            str = _T("kHoldSendAndPlay");
+            break;
+        case kHoldSendOnly:
+            str = _T("kHoldSendOnly");
+            break;
+        case kHoldPlayOnly:
+            str = _T("kHoldPlayOnly");
+            break;
+        default:
+            break;
+        }
+        PRINT_GET_RESULT(_T("enabled=%d, mode=%s"), enabled, str);
+        return;
+    }
+
+    int ret(0);
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        OnHoldModes mode(kHoldSendAndPlay);
+        if (_checkOnHold1 % 3 == 0)
+        {
+            mode = kHoldSendAndPlay;
+            str = _T("kHoldSendAndPlay");
+        }
+        else if (_checkOnHold1 % 3 == 1)
+        {
+            mode = kHoldSendOnly;
+            str = _T("kHoldSendOnly");
+        }
+        else if (_checkOnHold1 % 3 == 2)
+        {
+            mode = kHoldPlayOnly;
+            str = _T("kHoldPlayOnly");
+        }
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable, mode)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d, mode=%s)"), channel, enable, str);
+        _checkOnHold1++;
+    }
+    else
+    {
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckOnHold2()
+{
+    int ret(0);
+    int channel = GetDlgItemInt(IDC_EDIT_2);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_ON_HOLD_2);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        OnHoldModes mode(kHoldSendAndPlay);
+        if (_checkOnHold1 % 3 == 0)
+        {
+            mode = kHoldSendAndPlay;
+            str = _T("kHoldSendAndPlay");
+        }
+        else if (_checkOnHold1 % 3 == 1)
+        {
+            mode = kHoldSendOnly;
+            str = _T("kHoldSendOnly");
+        }
+        else if (_checkOnHold1 % 3 == 2)
+        {
+            mode = kHoldPlayOnly;
+            str = _T("kHoldPlayOnly");
+        }
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable, mode)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d, mode=%s)"), channel, enable, str);
+        _checkOnHold1++;
+    }
+    else
+    {
+        TEST((ret = _veBasePtr->SetOnHoldStatus(channel, enable)) == 0,
+            _T("SetOnHoldStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckDelayEstimate1()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_DELAY_ESTIMATE_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        _delayEstimate1 = true;
+        SetDlgItemInt(IDC_EDIT_DELAY_ESTIMATE_1, 0);
+    }
+    else
+    {
+        _delayEstimate1 = false;
+        SetDlgItemText(IDC_EDIT_DELAY_ESTIMATE_1, _T(""));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRxvad()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_RXVAD);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (enable)
+    {
+        _rxVad = true;
+        _veApmPtr->RegisterRxVadObserver(channel, *_rxVadObserverPtr);
+        SetDlgItemInt(IDC_EDIT_RXVAD, 0);
+    }
+    else
+    {
+        _rxVad = false;
+        _veApmPtr->DeRegisterRxVadObserver(channel);
+        SetDlgItemText(IDC_EDIT_RXVAD, _T(""));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckAgc1()
+{
+    SHORT shiftKeyIsPressed = ::GetAsyncKeyState(VK_SHIFT);
+
+    CString str;
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_AGC_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+
+    if (shiftKeyIsPressed)
+    {
+        bool enabled(false);
+        AgcModes mode(kAgcAdaptiveDigital);
+        TEST(_veApmPtr->GetRxAgcStatus(channel, enabled, mode) == 0,
+            _T("GetRxAgcStatus(channel=%d, enabled=?, mode=?)"), channel);
+        button->SetCheck((check == BST_CHECKED) ? BST_UNCHECKED : BST_CHECKED);
+
+        switch (mode)
+        {
+        case kAgcAdaptiveAnalog:
+            str = _T("kAgcAdaptiveAnalog");
+            break;
+        case kAgcAdaptiveDigital:
+            str = _T("kAgcAdaptiveDigital");
+            break;
+        case kAgcFixedDigital:
+            str = _T("kAgcFixedDigital");
+            break;
+        default:
+            break;
+        }
+        PRINT_GET_RESULT(_T("enabled=%d, mode=%s"), enabled, str);
+        return;
+    }
+
+    if (enable)
+    {
+        CString str;
+        AgcModes mode(kAgcDefault);
+        if (_checkAGC1 % 3 == 0)
+        {
+            mode = kAgcDefault;
+            str = _T("kAgcDefault");
+        }
+        else if (_checkAGC1 % 3 == 1)
+        {
+            mode = kAgcAdaptiveDigital;
+            str = _T("kAgcAdaptiveDigital");
+        }
+        else if (_checkAGC1 % 3 == 2)
+        {
+            mode = kAgcFixedDigital;
+            str = _T("kAgcFixedDigital");
+        }
+        TEST(_veApmPtr->SetRxAgcStatus(channel, true, mode) == 0, _T("SetRxAgcStatus(channel=%d, enable=%d, %s)"), channel, enable, str);
+        _checkAGC1++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetRxAgcStatus(channel, false, kAgcUnchanged) == 0, _T("SetRxAgcStatus(channel=%d, enable=%d)"), channel, enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckNs1()
+{
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    CButton* buttonNS = (CButton*)GetDlgItem(IDC_CHECK_NS_1);
+    int check = buttonNS->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        NsModes mode(kNsDefault);
+        if (_checkNS1 % 6 == 0)
+        {
+            mode = kNsDefault;
+            str = _T("kNsDefault");
+        }
+        else if (_checkNS1 % 6 == 1)
+        {
+            mode = kNsConference;
+            str = _T("kNsConference");
+        }
+        else if (_checkNS1 % 6 == 2)
+        {
+            mode = kNsLowSuppression;
+            str = _T("kNsLowSuppression");
+        }
+        else if (_checkNS1 % 6 == 3)
+        {
+            mode = kNsModerateSuppression;
+            str = _T("kNsModerateSuppression");
+        }
+        else if (_checkNS1 % 6 == 4)
+        {
+            mode = kNsHighSuppression;
+            str = _T("kNsHighSuppression");
+        }
+        else if (_checkNS1 % 6 == 5)
+        {
+            mode = kNsVeryHighSuppression;
+            str = _T("kNsVeryHighSuppression");
+        }
+        TEST(_veApmPtr->SetRxNsStatus(channel, true, mode) == 0, _T("SetRxNsStatus(channel=%d, enable=%d, %s)"), channel, enable, str);
+        _checkNS1++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetRxNsStatus(channel, false, kNsUnchanged) == 0, _T("SetRxNsStatus(channel=%d, enable=%d)"), enable, channel);
+    }
+}
+
+// ----------------------------------------------------------------------------
+//                         Channel-independent Operations
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnBnClickedCheckPlayFileIn()
+{
+    std::string micFile = _long_audio_file_path + "audio_short16.pcm";
+    // std::string micFile = _long_audio_file_path + "audio_long16noise.pcm";
+
+    int channel(-1);
+    CButton* buttonExtTrans = (CButton*)GetDlgItem(IDC_CHECK_PLAY_FILE_IN);
+    int check = buttonExtTrans->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        bool mix;
+        const bool loop(true);
+        const FileFormats format = kFileFormatPcm16kHzFile;
+        const float scale(1.0);
+
+        (_checkPlayFileIn %2 == 0) ? mix = true : mix = false;
+        TEST(_veFilePtr->StartPlayingFileAsMicrophone(channel,
+            micFile.c_str(), loop, mix, format, scale) == 0,
+            _T("StartPlayingFileAsMicrophone(channel=%d, file=%s, ")
+            _T("loop=%d, mix=%d, format=%d, scale=%2.1f)"),
+            channel, CharToTchar(micFile.c_str(), -1),
+            loop, mix, format, scale);
+        _checkPlayFileIn++;
+    }
+    else
+    {
+        TEST(_veFilePtr->StopPlayingFileAsMicrophone(channel) == 0,
+            _T("StopPlayingFileAsMicrophone(channel=%d)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRecMic()
+{
+    std::string micFile = webrtc::test::OutputPath() +
+                          "rec_mic_mono_16kHz.pcm";
+
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_REC_MIC);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veFilePtr->StartRecordingMicrophone(micFile.c_str(), NULL) == 0,
+            _T("StartRecordingMicrophone(file=%s)"),
+            CharToTchar(micFile.c_str(), -1));
+    }
+    else
+    {
+        TEST(_veFilePtr->StopRecordingMicrophone() == 0,
+            _T("StopRecordingMicrophone()"));
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckAgc()
+{
+    CButton* buttonAGC = (CButton*)GetDlgItem(IDC_CHECK_AGC);
+    int check = buttonAGC->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        AgcModes mode(kAgcDefault);
+        if (_checkAGC % 4 == 0)
+        {
+            mode = kAgcDefault;
+            str = _T("kAgcDefault");
+        }
+        else if (_checkAGC % 4 == 1)
+        {
+            mode = kAgcAdaptiveAnalog;
+            str = _T("kAgcAdaptiveAnalog");
+        }
+        else if (_checkAGC % 4 == 2)
+        {
+            mode = kAgcAdaptiveDigital;
+            str = _T("kAgcAdaptiveDigital");
+        }
+        else if (_checkAGC % 4 == 3)
+        {
+            mode = kAgcFixedDigital;
+            str = _T("kAgcFixedDigital");
+        }
+        TEST(_veApmPtr->SetAgcStatus(true, mode) == 0, _T("SetAgcStatus(enable=%d, %s)"), enable, str);
+        _checkAGC++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetAgcStatus(false, kAgcUnchanged) == 0, _T("SetAgcStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckNs()
+{
+    CButton* buttonNS = (CButton*)GetDlgItem(IDC_CHECK_NS);
+    int check = buttonNS->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        NsModes mode(kNsDefault);
+        if (_checkNS % 6 == 0)
+        {
+            mode = kNsDefault;
+            str = _T("kNsDefault");
+        }
+        else if (_checkNS % 6 == 1)
+        {
+            mode = kNsConference;
+            str = _T("kNsConference");
+        }
+        else if (_checkNS % 6 == 2)
+        {
+            mode = kNsLowSuppression;
+            str = _T("kNsLowSuppression");
+        }
+        else if (_checkNS % 6 == 3)
+        {
+            mode = kNsModerateSuppression;
+            str = _T("kNsModerateSuppression");
+        }
+        else if (_checkNS % 6 == 4)
+        {
+            mode = kNsHighSuppression;
+            str = _T("kNsHighSuppression");
+        }
+        else if (_checkNS % 6 == 5)
+        {
+            mode = kNsVeryHighSuppression;
+            str = _T("kNsVeryHighSuppression");
+        }
+        TEST(_veApmPtr->SetNsStatus(true, mode) == 0, _T("SetNsStatus(enable=%d, %s)"), enable, str);
+        _checkNS++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetNsStatus(false, kNsUnchanged) == 0, _T("SetNsStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckEc()
+{
+    CButton* buttonEC = (CButton*)GetDlgItem(IDC_CHECK_EC);
+    int check = buttonEC->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        CString str;
+        EcModes mode(kEcDefault);
+        if (_checkEC % 4 == 0)
+        {
+            mode = kEcDefault;
+            str = _T("kEcDefault");
+        }
+        else if (_checkEC % 4 == 1)
+        {
+            mode = kEcConference;
+            str = _T("kEcConference");
+        }
+        else if (_checkEC % 4 == 2)
+        {
+            mode = kEcAec;
+            str = _T("kEcAec");
+        }
+        else if (_checkEC % 4 == 3)
+        {
+            mode = kEcAecm;
+            str = _T("kEcAecm");
+        }
+        TEST(_veApmPtr->SetEcStatus(true, mode) == 0, _T("SetEcStatus(enable=%d, %s)"), enable, str);
+        _checkEC++;
+    }
+    else
+    {
+        TEST(_veApmPtr->SetEcStatus(false, kEcUnchanged) == 0, _T("SetEcStatus(enable=%d)"), enable);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckMuteIn()
+{
+    CButton* buttonMute = (CButton*)GetDlgItem(IDC_CHECK_MUTE_IN);
+    int check = buttonMute->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    const int channel(-1);
+    TEST(_veVolumeControlPtr->SetInputMute(channel, enable) == 0,
+        _T("SetInputMute(channel=%d, enable=%d)"), channel, enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaIn()
+{
+    const int channel(-1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_IN);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kRecordingAllChannelsMixed, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kRecordingAllChannelsMixed, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kRecordingAllChannelsMixed) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kRecordingAllChannelsMixed)"), channel);
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckExtMediaOut()
+{
+    const int channel(-1);
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_EXT_MEDIA_OUT);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    if (enable)
+    {
+        TEST(_veExternalMediaPtr->RegisterExternalMediaProcessing(channel, kPlaybackAllChannelsMixed, *_externalMediaPtr) == 0,
+            _T("RegisterExternalMediaProcessing(channel=%d, kPlaybackAllChannelsMixed, processObject=0x%x)"), channel, _externalMediaPtr);
+    }
+    else
+    {
+        TEST(_veExternalMediaPtr->DeRegisterExternalMediaProcessing(channel, kPlaybackAllChannelsMixed) == 0,
+            _T("DeRegisterExternalMediaProcessing(channel=%d, kPlaybackAllChannelsMixed)"), channel);
+    }
+}
+
+void CWinTestDlg::OnCbnSelchangeComboRecDevice()
+{
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_REC_DEVICE);
+    int index = comboCodec->GetCurSel();
+    TEST(_veHardwarePtr->SetRecordingDevice(index) == 0,
+        _T("SetRecordingDevice(index=%d)"), index);
+}
+
+void CWinTestDlg::OnCbnSelchangeComboPlayDevice()
+{
+    CComboBox* comboCodec(NULL);
+    comboCodec = (CComboBox*)GetDlgItem(IDC_COMBO_PLAY_DEVICE);
+    int index = comboCodec->GetCurSel();
+    TEST(_veHardwarePtr->SetPlayoutDevice(index) == 0,
+        _T("SetPlayoutDevice(index=%d)"), index);
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderInputVolume(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+    slider->SetRangeMin(0);
+    slider->SetRangeMax(255);
+    int pos = slider->GetPos();
+
+    TEST(_veVolumeControlPtr->SetMicVolume(pos) == 0, _T("SetMicVolume(volume=%d)"), pos);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderOutputVolume(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+    slider->SetRangeMin(0);
+    slider->SetRangeMax(255);
+    int pos = slider->GetPos();
+
+    TEST(_veVolumeControlPtr->SetSpeakerVolume(pos) == 0, _T("SetSpeakerVolume(volume=%d)"), pos);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderPanLeft(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_LEFT);
+    slider->SetRange(0,10);
+    int pos = 10 - slider->GetPos();    // 0 <=> lower end, 10 <=> upper end
+
+    float left(0.0);
+    float right(0.0);
+    const int channel(-1);
+
+    // Only left channel will be modified
+    _veVolumeControlPtr->GetOutputVolumePan(channel, left, right);
+
+    left = (float)((float)pos/10.0f);
+
+    TEST(_veVolumeControlPtr->SetOutputVolumePan(channel, left, right) == 0,
+        _T("SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)"), channel, left, right);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnNMReleasedcaptureSliderPanRight(NMHDR *pNMHDR, LRESULT *pResult)
+{
+    CSliderCtrl* slider = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_PAN_RIGHT);
+    slider->SetRange(0,10);
+    int pos = 10 - slider->GetPos();    // 0 <=> lower end, 10 <=> upper end
+
+    float left(0.0);
+    float right(0.0);
+    const int channel(-1);
+
+    // Only right channel will be modified
+    _veVolumeControlPtr->GetOutputVolumePan(channel, left, right);
+
+    right = (float)((float)pos/10.0f);
+
+    TEST(_veVolumeControlPtr->SetOutputVolumePan(channel, left, right) == 0,
+        _T("SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)"), channel, left, right);
+
+    *pResult = 0;
+}
+
+void CWinTestDlg::OnBnClickedButtonVersion()
+{
+    if (_veBasePtr)
+    {
+        char version[1024];
+        if (_veBasePtr->GetVersion(version) == 0)
+        {
+            AfxMessageBox(CString(version), MB_OK);
+        }
+        else
+        {
+            AfxMessageBox(_T("FAILED!"), MB_OK);
+        }
+    }
+}
+
+void CWinTestDlg::OnBnClickedCheckRecCall()
+{
+    // Not supported
+}
+
+void CWinTestDlg::OnBnClickedCheckTypingDetection()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_TYPING_DETECTION);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veApmPtr->SetTypingDetectionStatus(enable) == 0, _T("SetTypingDetectionStatus(enable=%d)"), enable);
+}
+
+void CWinTestDlg::OnBnClickedCheckFEC()
+{
+    CButton* button = (CButton*)GetDlgItem(IDC_CHECK_FEC);
+    int channel = GetDlgItemInt(IDC_EDIT_1);
+    int check = button->GetCheck();
+    const bool enable = (check == BST_CHECKED);
+    TEST(_veRtpRtcpPtr->SetFECStatus(channel, enable) == 0, _T("SetFECStatus(enable=%d)"), enable);
+}
+
+// ----------------------------------------------------------------------------
+//                                   Message Handlers
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnTimer(UINT_PTR nIDEvent)
+{
+    CString str;
+
+    unsigned int svol(0);
+    unsigned int mvol(0);
+
+    _timerTicks++;
+
+    // Get speaker and microphone volumes
+    _veVolumeControlPtr->GetSpeakerVolume(svol);
+    _veVolumeControlPtr->GetMicVolume(mvol);
+
+    // Update speaker volume slider
+    CSliderCtrl* sliderSpkr = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_OUTPUT_VOLUME);
+    sliderSpkr->SetRangeMin(0);
+    sliderSpkr->SetRangeMax(255);
+    sliderSpkr->SetPos(svol);
+
+    // Update microphone volume slider
+    CSliderCtrl* sliderMic = (CSliderCtrl*)GetDlgItem(IDC_SLIDER_INPUT_VOLUME);
+    sliderMic->SetRangeMin(0);
+    sliderMic->SetRangeMax(255);
+    sliderMic->SetPos(mvol);
+
+    unsigned int micLevel;
+    unsigned int combinedOutputLevel;
+
+    // Get audio levels
+    _veVolumeControlPtr->GetSpeechInputLevel(micLevel);
+    _veVolumeControlPtr->GetSpeechOutputLevel(-1, combinedOutputLevel);
+
+    // Update audio level controls
+    CProgressCtrl* progressMic = (CProgressCtrl*)GetDlgItem(IDC_PROGRESS_AUDIO_LEVEL_IN);
+    progressMic->SetRange(0,9);
+    progressMic->SetStep(1);
+    progressMic->SetPos(micLevel);
+    CProgressCtrl* progressOut = (CProgressCtrl*)GetDlgItem(IDC_PROGRESS_AUDIO_LEVEL_OUT);
+    progressOut->SetRange(0,9);
+    progressOut->SetStep(1);
+    progressOut->SetPos(combinedOutputLevel);
+
+    // Update playout delay (buffer size)
+    if (_veVideoSyncPtr)
+    {
+        int bufferMs(0);
+        _veVideoSyncPtr->GetPlayoutBufferSize(bufferMs);
+        SetDlgItemInt(IDC_EDIT_PLAYOUT_BUFFER_SIZE, bufferMs);
+    }
+
+    if (_delayEstimate1 && _veVideoSyncPtr)
+    {
+        const int channel = GetDlgItemInt(IDC_EDIT_1);
+        int delayMs(0);
+        _veVideoSyncPtr->GetDelayEstimate(channel, delayMs);
+        SetDlgItemInt(IDC_EDIT_DELAY_ESTIMATE_1, delayMs);
+    }
+
+    if (_rxVad && _veApmPtr && _rxVadObserverPtr)
+    {
+        SetDlgItemInt(IDC_EDIT_RXVAD, _rxVadObserverPtr->vad_decision);
+    }
+
+    if (_veHardwarePtr)
+    {
+        int load1, load2;
+        _veHardwarePtr->GetSystemCPULoad(load1);
+        _veHardwarePtr->GetCPULoad(load2);
+        str.Format(_T("CPU load (system/VoE): %d/%d [%%]"), load1, load2);
+        SetDlgItemText(IDC_EDIT_CPU_LOAD, (LPCTSTR)str);
+    }
+
+    BOOL ret;
+    int channel = GetDlgItemInt(IDC_EDIT_1, &ret);
+
+    if (_veCodecPtr)
+    {
+        if (ret == TRUE)
+        {
+            CodecInst codec;
+            if (_veCodecPtr->GetRecCodec(channel, codec) == 0)
+            {
+        str.Format(_T("RX codec: %s, freq=%d, pt=%d, rate=%d, size=%d"), CharToTchar(codec.plname, 32), codec.plfreq, codec.pltype, codec.rate, codec.pacsize);
+                SetDlgItemText(IDC_EDIT_RX_CODEC_1, (LPCTSTR)str);
+            }
+        }
+    }
+
+    if (_veRtpRtcpPtr)
+    {
+        if (ret == TRUE)
+        {
+            CallStatistics stats;
+            if (_veRtpRtcpPtr->GetRTCPStatistics(channel, stats) == 0)
+            {
+                str.Format(_T("RTCP | RTP: cum=%u, ext=%d, frac=%u, jitter=%u | TX=%d, RX=%d, RTT=%d"),
+                    stats.cumulativeLost, stats.extendedMax, stats.fractionLost, stats.jitterSamples, stats.packetsSent, stats.packetsReceived, stats.rttMs);
+                SetDlgItemText(IDC_EDIT_RTCP_STAT_1, (LPCTSTR)str);
+            }
+        }
+    }
+
+    SetTimer(0, 1000, NULL);
+    CDialog::OnTimer(nIDEvent);
+}
+
+void CWinTestDlg::OnBnClickedButtonClearErrorCallback()
+{
+    _nErrorCallbacks = 0;
+    SetDlgItemText(IDC_EDIT_ERROR_CALLBACK, _T(""));
+}
+
+// ----------------------------------------------------------------------------
+//                                       TEST
+// ----------------------------------------------------------------------------
+
+void CWinTestDlg::OnBnClickedButtonTest1()
+{
+    // add tests here...
+}
+
diff --git a/src/voice_engine/test/win_test/WinTestDlg.h b/src/voice_engine/test/win_test/WinTestDlg.h
new file mode 100644
index 0000000..412c220
--- /dev/null
+++ b/src/voice_engine/test/win_test/WinTestDlg.h
@@ -0,0 +1,281 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#pragma once
+
+#if (_MSC_VER >= 1400)
+#define PRINT_GET_RESULT(...) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_GET_OUTPUT, _strMsg); \
+    } \
+
+#define TEST(x, ...) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        _strErr.Format(_T("FAILED (error=%d)"), _veBasePtr->LastError()); \
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr); \
+        _failCount++; \
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount); \
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError()); \
+    } \
+    else \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK")); \
+    } \
+
+#define TEST2(x, ...) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(true, _strMsg); \
+    } \
+    else \
+    { \
+        _strMsg.Format(__VA_ARGS__); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(false, _strMsg); \
+    }
+#else
+#define TEST(x, exp) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(exp); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        _strErr.Format("FAILED (error=%d)", _veBasePtr->LastError()); \
+        SetDlgItemText(IDC_EDIT_RESULT, _strErr); \
+        _failCount++; \
+        SetDlgItemInt(IDC_EDIT_N_FAILS, _failCount); \
+        SetDlgItemInt(IDC_EDIT_LAST_ERROR, _veBasePtr->LastError()); \
+    } \
+    else \
+    { \
+        _strMsg.Format(exp); \
+        SetDlgItemText(IDC_EDIT_MESSAGE, _strMsg); \
+        SetDlgItemText(IDC_EDIT_RESULT, _T("OK")); \
+    } \
+
+#define TEST2(x, exp) \
+    if (!(x)) \
+    { \
+        _strMsg.Format(exp); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(true, _strMsg); \
+    } \
+    else \
+    { \
+        _strMsg.Format(exp); \
+        ((CWinTestDlg*)_parentDialogPtr)->UpdateTest(false, _strMsg); \
+    }
+#endif
+
+#include <string>
+
+#include "voe_base.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_codec.h"
+#include "voe_dtmf.h"
+#include "voe_encryption.h"
+#include "voe_external_media.h"
+#include "voe_file.h"
+#include "voe_hardware.h"
+#include "voe_network.h"
+#include "voe_video_sync.h"
+#include "voe_volume_control.h"
+
+#include "voe_audio_processing.h"
+#include "voe_rtp_rtcp.h"
+#include "voe_errors.h"
+
+class MediaProcessImpl;
+class ConnectionObserver;
+class MyEncryption;
+class RxCallback;
+class MyTransport;
+
+using namespace webrtc;
+
+#define MAX_NUM_OF_CHANNELS    10
+
+// CWinTestDlg dialog
+class CWinTestDlg : public CDialog,
+                    public VoiceEngineObserver,
+                    public VoERTPObserver
+{
+// Construction
+public:
+    CWinTestDlg(CWnd* pParent = NULL);    // standard constructor
+    virtual ~CWinTestDlg();
+
+// Dialog Data
+    enum { IDD = IDD_WINTEST_DIALOG };
+
+    BOOL UpdateTest(bool failed, const CString& strMsg);
+
+protected:
+    virtual void DoDataExchange(CDataExchange* pDX);    // DDX/DDV support
+
+protected:  // VoiceEngineObserver
+    virtual void CallbackOnError(const int channel, const int errCode);
+
+protected:    // VoERTPObserver
+    virtual void OnIncomingCSRCChanged(
+        const int channel, const unsigned int CSRC, const bool added);
+    virtual void OnIncomingSSRCChanged(
+        const int channel, const unsigned int SSRC);
+
+// Implementation
+protected:
+    HICON m_hIcon;
+
+    // Generated message map functions
+    virtual BOOL OnInitDialog();
+    afx_msg void OnSysCommand(UINT nID, LPARAM lParam);
+    afx_msg void OnPaint();
+    afx_msg HCURSOR OnQueryDragIcon();
+    DECLARE_MESSAGE_MAP()
+public:
+    afx_msg void OnBnClickedButtonCreate1();
+    afx_msg void OnBnClickedButtonDelete1();
+
+private:
+    VoiceEngine*    _vePtr;
+
+    VoECodec*               _veCodecPtr;
+    VoEExternalMedia*       _veExternalMediaPtr;
+    VoEVolumeControl*       _veVolumeControlPtr;
+    VoEEncryption*          _veEncryptionPtr;
+    VoEHardware*            _veHardwarePtr;
+    VoEVideoSync*           _veVideoSyncPtr;
+    VoENetwork*             _veNetworkPtr;
+    VoEFile*                _veFilePtr;
+    VoEAudioProcessing*     _veApmPtr;
+    VoEBase*                _veBasePtr;
+    VoERTP_RTCP*            _veRtpRtcpPtr;
+
+    MyTransport*            _transportPtr;
+    MediaProcessImpl*       _externalMediaPtr;
+    ConnectionObserver*     _connectionObserverPtr;
+    MyEncryption*           _encryptionPtr;
+    RxCallback*             _rxVadObserverPtr;
+
+private:
+    int                     _failCount;
+    CString                 _strMsg;
+    CString                 _strErr;
+    bool                    _externalTransport;
+    bool                    _externalTransportBuild;
+    int                     _checkPlayFileIn;
+    int                     _checkPlayFileIn1;
+    int                     _checkPlayFileIn2;
+    int                     _checkPlayFileOut1;
+    int                     _checkPlayFileOut2;
+    int                     _checkAGC;
+    int                     _checkAGC1;
+    int                     _checkNS;
+    int                     _checkNS1;
+    int                     _checkEC;
+    int                     _checkVAD1;
+    int                     _checkVAD2;
+    int                     _checkSrtpTx1;
+    int                     _checkSrtpTx2;
+    int                     _checkSrtpRx1;
+    int                     _checkSrtpRx2;
+    int                     _checkConference1;
+    int                     _checkConference2;
+    int                     _checkOnHold1;
+    int                     _checkOnHold2;
+    bool                    _delayEstimate1;
+    bool                    _delayEstimate2;
+    bool                    _rxVad;
+    int                     _nErrorCallbacks;
+    int                     _timerTicks;
+    std::string             _long_audio_file_path;
+
+public:
+    afx_msg void OnBnClickedButtonCreate2();
+    afx_msg void OnBnClickedButtonDelete2();
+    afx_msg void OnCbnSelchangeComboCodec1();
+    afx_msg void OnBnClickedButtonStartListen1();
+    afx_msg void OnBnClickedButtonStopListen1();
+    afx_msg void OnBnClickedButtonStartPlayout1();
+    afx_msg void OnBnClickedButtonStopPlayout1();
+    afx_msg void OnBnClickedButtonStartSend1();
+    afx_msg void OnBnClickedButtonStopSend1();
+    afx_msg void OnCbnSelchangeComboIp2();
+    afx_msg void OnCbnSelchangeComboIp1();
+    afx_msg void OnCbnSelchangeComboCodec2();
+    afx_msg void OnBnClickedButtonStartListen2();
+    afx_msg void OnBnClickedButtonStopListen2();
+    afx_msg void OnBnClickedButtonStartPlayout2();
+    afx_msg void OnBnClickedButtonStopPlayout2();
+    afx_msg void OnBnClickedButtonStartSend2();
+    afx_msg void OnBnClickedButtonStopSend2();
+    afx_msg void OnBnClickedButtonTest11();
+    afx_msg void OnBnClickedCheckExtTrans1();
+    afx_msg void OnBnClickedCheckPlayFileIn1();
+    afx_msg void OnBnClickedCheckPlayFileOut1();
+    afx_msg void OnBnClickedCheckExtTrans2();
+    afx_msg void OnBnClickedCheckPlayFileIn2();
+    afx_msg void OnBnClickedCheckPlayFileOut2();
+    afx_msg void OnBnClickedCheckPlayFileIn();
+    afx_msg void OnBnClickedCheckPlayFileOut();
+    afx_msg void OnCbnSelchangeComboRecDevice();
+    afx_msg void OnCbnSelchangeComboPlayDevice();
+    afx_msg void OnBnClickedCheckExtMediaIn1();
+    afx_msg void OnBnClickedCheckExtMediaOut1();
+    afx_msg void OnNMReleasedcaptureSliderInputVolume(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnNMReleasedcaptureSliderOutputVolume(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnTimer(UINT_PTR nIDEvent);
+    afx_msg void OnBnClickedCheckAgc();
+    CString _strComboIp1;
+    CString _strComboIp2;
+    afx_msg void OnBnClickedCheckNs();
+    afx_msg void OnBnClickedCheckEc();
+    afx_msg void OnBnClickedCheckVad1();
+    afx_msg void OnBnClickedCheckVad2();
+    afx_msg void OnBnClickedCheckExtMediaIn2();
+    afx_msg void OnBnClickedCheckExtMediaOut2();
+    afx_msg void OnBnClickedCheckMuteIn();
+    afx_msg void OnBnClickedCheckMuteIn1();
+    afx_msg void OnBnClickedCheckMuteIn2();
+    afx_msg void OnBnClickedCheckSrtpTx1();
+    afx_msg void OnBnClickedCheckSrtpRx1();
+    afx_msg void OnBnClickedCheckSrtpTx2();
+    afx_msg void OnBnClickedCheckSrtpRx2();
+    afx_msg void OnBnClickedCheckExtEncryption1();
+    afx_msg void OnBnClickedCheckExtEncryption2();
+    afx_msg void OnBnClickedButtonDtmf1();
+    afx_msg void OnBnClickedCheckRecMic();
+    afx_msg void OnBnClickedButtonDtmf2();
+    afx_msg void OnBnClickedButtonTest1();
+    afx_msg void OnBnClickedCheckConference1();
+    afx_msg void OnBnClickedCheckConference2();
+    afx_msg void OnBnClickedCheckOnHold1();
+    afx_msg void OnBnClickedCheckOnHold2();
+    afx_msg void OnBnClickedCheckExtMediaIn();
+    afx_msg void OnBnClickedCheckExtMediaOut();
+    afx_msg void OnLbnSelchangeListCodec1();
+    afx_msg void OnNMReleasedcaptureSliderPanLeft(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnNMReleasedcaptureSliderPanRight(NMHDR *pNMHDR, LRESULT *pResult);
+    afx_msg void OnBnClickedButtonVersion();
+    afx_msg void OnBnClickedCheckDelayEstimate1();
+    afx_msg void OnBnClickedCheckRxvad();
+    afx_msg void OnBnClickedCheckAgc1();
+    afx_msg void OnBnClickedCheckNs1();
+    afx_msg void OnBnClickedCheckRecCall();
+    afx_msg void OnBnClickedCheckTypingDetection();
+    afx_msg void OnBnClickedCheckFEC();
+    afx_msg void OnBnClickedButtonClearErrorCallback();
+    afx_msg void OnBnClickedCheckBwe1();
+};
+#pragma once
diff --git a/src/voice_engine/test/win_test/res/WinTest.ico b/src/voice_engine/test/win_test/res/WinTest.ico
new file mode 100644
index 0000000..8a84ca3
--- /dev/null
+++ b/src/voice_engine/test/win_test/res/WinTest.ico
Binary files differ
diff --git a/src/voice_engine/test/win_test/res/WinTest.rc2 b/src/voice_engine/test/win_test/res/WinTest.rc2
new file mode 100644
index 0000000..044bf7e
--- /dev/null
+++ b/src/voice_engine/test/win_test/res/WinTest.rc2
@@ -0,0 +1,13 @@
+//

+// WinTest.RC2 - resources Microsoft Visual C++ does not edit directly

+//

+

+#ifdef APSTUDIO_INVOKED

+#error this file is not editable by Microsoft Visual C++

+#endif //APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+// Add manually edited resources here...

+

+/////////////////////////////////////////////////////////////////////////////

diff --git a/src/voice_engine/test/win_test/stdafx.cc b/src/voice_engine/test/win_test/stdafx.cc
new file mode 100644
index 0000000..6cdb906
--- /dev/null
+++ b/src/voice_engine/test/win_test/stdafx.cc
@@ -0,0 +1,17 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.cpp : source file that includes just the standard includes
+// WinTest.pch will be the pre-compiled header
+// stdafx.obj will contain the pre-compiled type information
+
+#include "stdafx.h"
+
+
diff --git a/src/voice_engine/test/win_test/stdafx.h b/src/voice_engine/test/win_test/stdafx.h
new file mode 100644
index 0000000..b4d875c
--- /dev/null
+++ b/src/voice_engine/test/win_test/stdafx.h
@@ -0,0 +1,82 @@
+/*
+ *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// stdafx.h : include file for standard system include files,
+// or project specific include files that are used frequently,
+// but are changed infrequently
+
+#pragma once
+
+#ifndef _SECURE_ATL
+#define _SECURE_ATL 1
+#endif
+
+#ifndef VC_EXTRALEAN
+#define VC_EXTRALEAN		// Exclude rarely-used stuff from Windows headers
+#endif
+
+// Modify the following defines if you have to target a platform prior to the ones specified below.
+// Refer to MSDN for the latest info on corresponding values for different platforms.
+#ifndef WINVER				// Allow use of features specific to Windows XP or later.
+#define WINVER 0x0501		// Change this to the appropriate value to target other versions of Windows.
+#endif
+
+#ifndef _WIN32_WINNT		// Allow use of features specific to Windows XP or later.                   
+#define _WIN32_WINNT 0x0501	// Change this to the appropriate value to target other versions of Windows.
+#endif						
+
+#ifndef _WIN32_WINDOWS		// Allow use of features specific to Windows 98 or later.
+#define _WIN32_WINDOWS 0x0410 // Change this to the appropriate value to target Windows Me or later.
+#endif
+
+#ifndef _WIN32_IE			// Allow use of features specific to IE 6.0 or later.
+#define _WIN32_IE 0x0600	// Change this to the appropriate value to target other versions of IE.
+#endif
+
+#define _ATL_CSTRING_EXPLICIT_CONSTRUCTORS	// some CString constructors will be explicit
+
+// turns off MFC's hiding of some common and often safely ignored warning messages
+#define _AFX_ALL_WARNINGS
+
+#include <afxwin.h>         // MFC core and standard components
+#include <afxext.h>         // MFC extensions
+
+
+
+
+
+#ifndef _AFX_NO_OLE_SUPPORT
+#include <afxdtctl.h>		// MFC support for Internet Explorer 4 Common Controls
+#endif
+#ifndef _AFX_NO_AFXCMN_SUPPORT
+#include <afxcmn.h>			// MFC support for Windows Common Controls
+#endif // _AFX_NO_AFXCMN_SUPPORT
+
+
+
+
+
+
+
+
+
+#ifdef _UNICODE
+#if defined _M_IX86
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='x86' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#elif defined _M_IA64
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='ia64' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#elif defined _M_X64
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='amd64' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#else
+#pragma comment(linker,"/manifestdependency:\"type='win32' name='Microsoft.Windows.Common-Controls' version='6.0.0.0' processorArchitecture='*' publicKeyToken='6595b64144ccf1df' language='*'\"")
+#endif
+#endif
+
+
diff --git a/src/voice_engine/transmit_mixer.cc b/src/voice_engine/transmit_mixer.cc
new file mode 100644
index 0000000..6153c5b
--- /dev/null
+++ b/src/voice_engine/transmit_mixer.cc
@@ -0,0 +1,1487 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "transmit_mixer.h"
+
+#include "audio_frame_operations.h"
+#include "channel.h"
+#include "channel_manager.h"
+#include "critical_section_wrapper.h"
+#include "event_wrapper.h"
+#include "statistics.h"
+#include "trace.h"
+#include "utility.h"
+#include "voe_base_impl.h"
+#include "voe_external_media.h"
+
+#define WEBRTC_ABS(a) (((a) < 0) ? -(a) : (a))
+
+namespace webrtc {
+
+namespace voe {
+
+// Used for downmixing before resampling.
+// TODO(andrew): audio_device should advertise the maximum sample rate it can
+//               provide.
+static const int kMaxMonoDeviceDataSizeSamples = 960;  // 10 ms, 96 kHz, mono.
+
+void
+TransmitMixer::OnPeriodicProcess()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::OnPeriodicProcess()");
+
+#if defined(WEBRTC_VOICE_ENGINE_TYPING_DETECTION)
+    if (_typingNoiseWarning > 0)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() => "
+                         "CallbackOnError(VE_TYPING_NOISE_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1,
+                                                     VE_TYPING_NOISE_WARNING);
+        }
+        _typingNoiseWarning = 0;
+    }
+#endif
+
+    if (_saturationWarning > 0)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() =>"
+                         " CallbackOnError(VE_SATURATION_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1, VE_SATURATION_WARNING);
+       }
+        _saturationWarning = 0;
+    }
+
+    if (_noiseWarning > 0)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        if (_voiceEngineObserverPtr)
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::OnPeriodicProcess() =>"
+                         "CallbackOnError(VE_NOISE_WARNING)");
+            _voiceEngineObserverPtr->CallbackOnError(-1, VE_NOISE_WARNING);
+        }
+        _noiseWarning = 0;
+    }
+}
+
+
+void TransmitMixer::PlayNotification(const WebRtc_Word32 id,
+                                     const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void TransmitMixer::RecordNotification(const WebRtc_Word32 id,
+                                       const WebRtc_UWord32 durationMs)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::RecordNotification(id=%d, durationMs=%d)",
+                 id, durationMs);
+
+    // Not implement yet
+}
+
+void TransmitMixer::PlayFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayFileEnded(id=%d)", id);
+
+    assert(id == _filePlayerId);
+
+    CriticalSectionScoped cs(&_critSect);
+
+    _filePlaying = false;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PlayFileEnded() =>"
+                 "file player module is shutdown");
+}
+
+void
+TransmitMixer::RecordFileEnded(const WebRtc_Word32 id)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RecordFileEnded(id=%d)", id);
+
+    if (id == _fileRecorderId)
+    {
+        CriticalSectionScoped cs(&_critSect);
+        _fileRecording = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordFileEnded() => fileRecorder module"
+                     "is shutdown");
+    } else if (id == _fileCallRecorderId)
+    {
+        CriticalSectionScoped cs(&_critSect);
+        _fileCallRecording = false;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordFileEnded() => fileCallRecorder"
+                     "module is shutdown");
+    }
+}
+
+WebRtc_Word32
+TransmitMixer::Create(TransmitMixer*& mixer, const WebRtc_UWord32 instanceId)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
+                 "TransmitMixer::Create(instanceId=%d)", instanceId);
+    mixer = new TransmitMixer(instanceId);
+    if (mixer == NULL)
+    {
+        WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(instanceId, -1),
+                     "TransmitMixer::Create() unable to allocate memory"
+                     "for mixer");
+        return -1;
+    }
+    return 0;
+}
+
+void
+TransmitMixer::Destroy(TransmitMixer*& mixer)
+{
+    if (mixer)
+    {
+        delete mixer;
+        mixer = NULL;
+    }
+}
+
+TransmitMixer::TransmitMixer(const WebRtc_UWord32 instanceId) :
+    _engineStatisticsPtr(NULL),
+    _channelManagerPtr(NULL),
+    _audioProcessingModulePtr(NULL),
+    _voiceEngineObserverPtr(NULL),
+    _processThreadPtr(NULL),
+    _filePlayerPtr(NULL),
+    _fileRecorderPtr(NULL),
+    _fileCallRecorderPtr(NULL),
+    // Avoid conflict with other channels by adding 1024 - 1026,
+    // won't use as much as 1024 channels.
+    _filePlayerId(instanceId + 1024),
+    _fileRecorderId(instanceId + 1025),
+    _fileCallRecorderId(instanceId + 1026),
+    _filePlaying(false),
+    _fileRecording(false),
+    _fileCallRecording(false),
+    _audioLevel(),
+    _critSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    _timeActive(0),
+    _timeSinceLastTyping(0),
+    _penaltyCounter(0),
+    _typingNoiseWarning(0),
+    _timeWindow(10), // 10ms slots accepted to count as a hit
+    _costPerTyping(100), // Penalty added for a typing + activity coincide
+    _reportingThreshold(300), // Threshold for _penaltyCounter
+    _penaltyDecay(1), // how much we reduce _penaltyCounter every 10 ms.
+    _typeEventDelay(2), // how "old" event we check for
+#endif
+    _saturationWarning(0),
+    _noiseWarning(0),
+    _instanceId(instanceId),
+    _mixFileWithMicrophone(false),
+    _captureLevel(0),
+    _externalMedia(false),
+    _externalMediaCallbackPtr(NULL),
+    _mute(false),
+    _remainingMuteMicTimeMs(0),
+    _mixingFrequency(0),
+    stereo_codec_(false),
+    swap_stereo_channels_(false)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::TransmitMixer() - ctor");
+}
+
+TransmitMixer::~TransmitMixer()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::~TransmitMixer() - dtor");
+    _monitorModule.DeRegisterObserver();
+    if (_processThreadPtr)
+    {
+        _processThreadPtr->DeRegisterModule(&_monitorModule);
+    }
+    if (_externalMedia)
+    {
+        DeRegisterExternalMediaProcessing();
+    }
+    {
+        CriticalSectionScoped cs(&_critSect);
+        if (_fileRecorderPtr)
+        {
+            _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+            _fileRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+            _fileRecorderPtr = NULL;
+        }
+        if (_fileCallRecorderPtr)
+        {
+            _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+            _fileCallRecorderPtr->StopRecording();
+            FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+            _fileCallRecorderPtr = NULL;
+        }
+        if (_filePlayerPtr)
+        {
+            _filePlayerPtr->RegisterModuleFileCallback(NULL);
+            _filePlayerPtr->StopPlayingFile();
+            FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+            _filePlayerPtr = NULL;
+        }
+    }
+    delete &_critSect;
+    delete &_callbackCritSect;
+}
+
+WebRtc_Word32
+TransmitMixer::SetEngineInformation(ProcessThread& processThread,
+                                    Statistics& engineStatistics,
+                                    ChannelManager& channelManager)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetEngineInformation()");
+
+    _processThreadPtr = &processThread;
+    _engineStatisticsPtr = &engineStatistics;
+    _channelManagerPtr = &channelManager;
+
+    if (_processThreadPtr->RegisterModule(&_monitorModule) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::SetEngineInformation() failed to"
+                     "register the monitor module");
+    } else
+    {
+        _monitorModule.RegisterObserver(*this);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+TransmitMixer::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+
+    if (_voiceEngineObserverPtr)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "RegisterVoiceEngineObserver() observer already enabled");
+        return -1;
+    }
+    _voiceEngineObserverPtr = &observer;
+    return 0;
+}
+
+WebRtc_Word32
+TransmitMixer::SetAudioProcessingModule(AudioProcessing* audioProcessingModule)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetAudioProcessingModule("
+                 "audioProcessingModule=0x%x)",
+                 audioProcessingModule);
+    _audioProcessingModulePtr = audioProcessingModule;
+    return 0;
+}
+
+void TransmitMixer::CheckForSendCodecChanges() {
+  ScopedChannel sc(*_channelManagerPtr);
+  void* iterator = NULL;
+  Channel* channel = sc.GetFirstChannel(iterator);
+  _mixingFrequency = 8000;
+  stereo_codec_ = false;
+  while (channel != NULL) {
+    if (channel->Sending()) {
+      CodecInst codec;
+      channel->GetSendCodec(codec);
+
+      if (codec.channels == 2)
+        stereo_codec_ = true;
+      if (codec.plfreq > _mixingFrequency)
+        _mixingFrequency = codec.plfreq;
+    }
+    channel = sc.GetNextChannel(iterator);
+  }
+}
+
+WebRtc_Word32
+TransmitMixer::PrepareDemux(const void* audioSamples,
+                            const WebRtc_UWord32 nSamples,
+                            const WebRtc_UWord8 nChannels,
+                            const WebRtc_UWord32 samplesPerSec,
+                            const WebRtc_UWord16 totalDelayMS,
+                            const WebRtc_Word32 clockDrift,
+                            const WebRtc_UWord16 currentMicLevel)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::PrepareDemux(nSamples=%u, nChannels=%u,"
+                 "samplesPerSec=%u, totalDelayMS=%u, clockDrift=%u,"
+                 "currentMicLevel=%u)", nSamples, nChannels, samplesPerSec,
+                 totalDelayMS, clockDrift, currentMicLevel);
+
+    CheckForSendCodecChanges();
+
+    // --- Resample input audio and create/store the initial audio frame
+
+    if (GenerateAudioFrame(static_cast<const WebRtc_Word16*>(audioSamples),
+                           nSamples,
+                           nChannels,
+                           samplesPerSec) == -1)
+    {
+        return -1;
+    }
+
+    // --- Near-end Voice Quality Enhancement (APM) processing
+
+    APMProcessStream(totalDelayMS, clockDrift, currentMicLevel);
+
+    if (swap_stereo_channels_ && stereo_codec_)
+      // Only bother swapping if we're using a stereo codec.
+      AudioFrameOperations::SwapStereoChannels(&_audioFrame);
+
+    // --- Annoying typing detection (utilizes the APM/VAD decision)
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    TypingDetection();
+#endif
+
+    // --- Mute during DTMF tone if direct feedback is enabled
+
+    if (_remainingMuteMicTimeMs > 0)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+        _remainingMuteMicTimeMs -= 10;
+        if (_remainingMuteMicTimeMs < 0)
+        {
+            _remainingMuteMicTimeMs = 0;
+        }
+    }
+
+    // --- Mute signal
+
+    if (_mute)
+    {
+        AudioFrameOperations::Mute(_audioFrame);
+    }
+
+    // --- Measure audio level of speech after APM processing
+
+    _audioLevel.ComputeLevel(_audioFrame);
+
+    // --- Mix with file (does not affect the mixing frequency)
+
+    if (_filePlaying)
+    {
+        MixOrReplaceAudioWithFile(_mixingFrequency);
+    }
+
+    // --- Record to file
+
+    if (_fileRecording)
+    {
+        RecordAudioToFile(_mixingFrequency);
+    }
+
+    // --- External media processing
+
+    if (_externalMedia)
+    {
+        CriticalSectionScoped cs(&_callbackCritSect);
+        const bool isStereo = (_audioFrame.num_channels_ == 2);
+        if (_externalMediaCallbackPtr)
+        {
+            _externalMediaCallbackPtr->Process(
+                -1,
+                kRecordingAllChannelsMixed,
+                (WebRtc_Word16*) _audioFrame.data_,
+                _audioFrame.samples_per_channel_,
+                _audioFrame.sample_rate_hz_,
+                isStereo);
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32
+TransmitMixer::DemuxAndMix()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::DemuxAndMix()");
+
+    ScopedChannel sc(*_channelManagerPtr);
+    void* iterator(NULL);
+    Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        if (channelPtr->InputIsOnHold())
+        {
+            channelPtr->UpdateLocalTimeStamp();
+        } else if (channelPtr->Sending())
+        {
+            // load temporary audioframe with current (mixed) microphone signal
+            AudioFrame tmpAudioFrame = _audioFrame;
+
+            channelPtr->Demultiplex(tmpAudioFrame);
+            channelPtr->PrepareEncodeAndSend(_mixingFrequency);
+        }
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+    return 0;
+}
+
+WebRtc_Word32
+TransmitMixer::EncodeAndSend()
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::EncodeAndSend()");
+
+    ScopedChannel sc(*_channelManagerPtr);
+    void* iterator(NULL);
+    Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        if (channelPtr->Sending() && !channelPtr->InputIsOnHold())
+        {
+            channelPtr->EncodeAndSend();
+        }
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+    return 0;
+}
+
+WebRtc_UWord32 TransmitMixer::CaptureLevel() const
+{
+    return _captureLevel;
+}
+
+void
+TransmitMixer::UpdateMuteMicrophoneTime(const WebRtc_UWord32 lengthMs)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::UpdateMuteMicrophoneTime(lengthMs=%d)",
+               lengthMs);
+    _remainingMuteMicTimeMs = lengthMs;
+}
+
+WebRtc_Word32
+TransmitMixer::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::StopSend()");
+    _audioLevel.Clear();
+    return 0;
+}
+
+int TransmitMixer::StartPlayingFileAsMicrophone(const char* fileName,
+                                                const bool loop,
+                                                const FileFormats format,
+                                                const int startPosition,
+                                                const float volumeScaling,
+                                                const int stopPosition,
+                                                const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartPlayingFileAsMicrophone("
+                 "fileNameUTF8[]=%s,loop=%d, format=%d, volumeScaling=%5.3f,"
+                 " startPosition=%d, stopPosition=%d)", fileName, loop,
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if (_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is already playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_filePlayerPtr)
+    {
+        _filePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+    }
+
+    // Dynamically create the instance
+    _filePlayerPtr
+        = FilePlayer::CreateFilePlayer(_filePlayerId,
+                                       (const FileFormats) format);
+
+    if (_filePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_filePlayerPtr->StartPlayingFile(
+        fileName,
+        loop,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*) codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _filePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+        return -1;
+    }
+
+    _filePlayerPtr->RegisterModuleFileCallback(this);
+    _filePlaying = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartPlayingFileAsMicrophone(InStream* stream,
+                                                const FileFormats format,
+                                                const int startPosition,
+                                                const float volumeScaling,
+                                                const int stopPosition,
+                                                const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::StartPlayingFileAsMicrophone(format=%d,"
+                 " volumeScaling=%5.3f, startPosition=%d, stopPosition=%d)",
+                 format, volumeScaling, startPosition, stopPosition);
+
+    if (stream == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFileAsMicrophone() NULL as input stream");
+        return -1;
+    }
+
+    if (_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_ALREADY_PLAYING, kTraceWarning,
+            "StartPlayingFileAsMicrophone() is already playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_filePlayerPtr)
+    {
+        _filePlayerPtr->RegisterModuleFileCallback(NULL);
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+    }
+
+    // Dynamically create the instance
+    _filePlayerPtr
+        = FilePlayer::CreateFilePlayer(_filePlayerId,
+                                       (const FileFormats) format);
+
+    if (_filePlayerPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceWarning,
+            "StartPlayingFileAsMicrophone() filePlayer format isnot correct");
+        return -1;
+    }
+
+    const WebRtc_UWord32 notificationTime(0);
+
+    if (_filePlayerPtr->StartPlayingFile(
+        (InStream&) *stream,
+        startPosition,
+        volumeScaling,
+        notificationTime,
+        stopPosition,
+        (const CodecInst*) codecInst) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartPlayingFile() failed to start file playout");
+        _filePlayerPtr->StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+        _filePlayerPtr = NULL;
+        return -1;
+    }
+    _filePlayerPtr->RegisterModuleFileCallback(this);
+    _filePlaying = true;
+
+    return 0;
+}
+
+int TransmitMixer::StopPlayingFileAsMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId,-1),
+                 "TransmitMixer::StopPlayingFileAsMicrophone()");
+
+    if (!_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceWarning,
+            "StopPlayingFileAsMicrophone() isnot playing");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    if (_filePlayerPtr->StopPlayingFile() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_CANNOT_STOP_PLAYOUT, kTraceError,
+            "StopPlayingFile() couldnot stop playing file");
+        return -1;
+    }
+
+    _filePlayerPtr->RegisterModuleFileCallback(NULL);
+    FilePlayer::DestroyFilePlayer(_filePlayerPtr);
+    _filePlayerPtr = NULL;
+    _filePlaying = false;
+
+    return 0;
+}
+
+int TransmitMixer::IsPlayingFileAsMicrophone() const
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::IsPlayingFileAsMicrophone()");
+    return _filePlaying;
+}
+
+int TransmitMixer::ScaleFileAsMicrophonePlayout(const float scale)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::ScaleFileAsMicrophonePlayout(scale=%5.3f)",
+                 scale);
+
+    CriticalSectionScoped cs(&_critSect);
+
+    if (!_filePlaying)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_OPERATION, kTraceError,
+            "ScaleFileAsMicrophonePlayout() isnot playing file");
+        return -1;
+    }
+
+    if ((_filePlayerPtr == NULL) ||
+        (_filePlayerPtr->SetAudioScaling(scale) != 0))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "SetAudioScaling() failed to scale playout");
+        return -1;
+    }
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingMicrophone(const char* fileName,
+                                            const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingMicrophone(fileName=%s)",
+                 fileName);
+
+    if (_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingMicrophone() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL &&
+      (codecInst->channels < 0 || codecInst->channels > 2))
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_fileRecorderPtr)
+    {
+        _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+    }
+
+    _fileRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileRecorderId,
+                                         (const FileFormats) format);
+    if (_fileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&) *codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _fileRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+        return -1;
+    }
+    _fileRecorderPtr->RegisterModuleFileCallback(this);
+    _fileRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingMicrophone(OutStream* stream,
+                                            const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+               "TransmitMixer::StartRecordingMicrophone()");
+
+    if (_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "StartRecordingMicrophone() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_fileRecorderPtr)
+    {
+        _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+        _fileRecorderPtr = NULL;
+    }
+
+    _fileRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileRecorderId,
+                                         (const FileFormats) format);
+    if (_fileRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingMicrophone() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->StartRecordingAudioFile(*stream,
+                                                  *codecInst,
+                                                  notificationTime) != 0)
+    {
+    _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+      "StartRecordingAudioFile() failed to start file recording");
+    _fileRecorderPtr->StopRecording();
+    FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+    _fileRecorderPtr = NULL;
+    return -1;
+    }
+
+    _fileRecorderPtr->RegisterModuleFileCallback(this);
+    _fileRecording = true;
+
+    return 0;
+}
+
+
+int TransmitMixer::StopRecordingMicrophone()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StopRecordingMicrophone()");
+
+    if (!_fileRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "StopRecordingMicrophone() isnot recording");
+        return 0;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    if (_fileRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+    _fileRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_fileRecorderPtr);
+    _fileRecorderPtr = NULL;
+    _fileRecording = false;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingCall(const char* fileName,
+                                      const CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingCall(fileName=%s)", fileName);
+
+    if (_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingCall() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingCall() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_fileCallRecorderPtr)
+    {
+        _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+    }
+
+    _fileCallRecorderPtr
+        = FileRecorder::CreateFileRecorder(_fileCallRecorderId,
+                                           (const FileFormats) format);
+    if (_fileCallRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingCall() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileCallRecorderPtr->StartRecordingAudioFile(
+        fileName,
+        (const CodecInst&) *codecInst,
+        notificationTime) != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_FILE, kTraceError,
+            "StartRecordingAudioFile() failed to start file recording");
+        _fileCallRecorderPtr->StopRecording();
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+        return -1;
+    }
+    _fileCallRecorderPtr->RegisterModuleFileCallback(this);
+    _fileCallRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StartRecordingCall(OutStream* stream,
+                                      const  CodecInst* codecInst)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StartRecordingCall()");
+
+    if (_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "StartRecordingCall() is already recording");
+        return 0;
+    }
+
+    FileFormats format;
+    const WebRtc_UWord32 notificationTime(0); // Not supported in VoE
+    CodecInst dummyCodec = { 100, "L16", 16000, 320, 1, 320000 };
+
+    if (codecInst != NULL && codecInst->channels != 1)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_BAD_ARGUMENT, kTraceError,
+            "StartRecordingCall() invalid compression");
+        return (-1);
+    }
+    if (codecInst == NULL)
+    {
+        format = kFileFormatPcm16kHzFile;
+        codecInst = &dummyCodec;
+    } else if ((STR_CASE_CMP(codecInst->plname,"L16") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMU") == 0) ||
+        (STR_CASE_CMP(codecInst->plname,"PCMA") == 0))
+    {
+        format = kFileFormatWavFile;
+    } else
+    {
+        format = kFileFormatCompressedFile;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    // Destroy the old instance
+    if (_fileCallRecorderPtr)
+    {
+        _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+        FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+        _fileCallRecorderPtr = NULL;
+    }
+
+    _fileCallRecorderPtr =
+        FileRecorder::CreateFileRecorder(_fileCallRecorderId,
+                                         (const FileFormats) format);
+    if (_fileCallRecorderPtr == NULL)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_INVALID_ARGUMENT, kTraceError,
+            "StartRecordingCall() fileRecorder format isnot correct");
+        return -1;
+    }
+
+    if (_fileCallRecorderPtr->StartRecordingAudioFile(*stream,
+                                                      *codecInst,
+                                                      notificationTime) != 0)
+    {
+    _engineStatisticsPtr->SetLastError(VE_BAD_FILE, kTraceError,
+     "StartRecordingAudioFile() failed to start file recording");
+    _fileCallRecorderPtr->StopRecording();
+    FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+    _fileCallRecorderPtr = NULL;
+    return -1;
+    }
+
+    _fileCallRecorderPtr->RegisterModuleFileCallback(this);
+    _fileCallRecording = true;
+
+    return 0;
+}
+
+int TransmitMixer::StopRecordingCall()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::StopRecordingCall()");
+
+    if (!_fileCallRecording)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "StopRecordingCall() file isnot recording");
+        return -1;
+    }
+
+    CriticalSectionScoped cs(&_critSect);
+
+    if (_fileCallRecorderPtr->StopRecording() != 0)
+    {
+        _engineStatisticsPtr->SetLastError(
+            VE_STOP_RECORDING_FAILED, kTraceError,
+            "StopRecording(), could not stop recording");
+        return -1;
+    }
+
+    _fileCallRecorderPtr->RegisterModuleFileCallback(NULL);
+    FileRecorder::DestroyFileRecorder(_fileCallRecorderPtr);
+    _fileCallRecorderPtr = NULL;
+    _fileCallRecording = false;
+
+    return 0;
+}
+
+void
+TransmitMixer::SetMixWithMicStatus(bool mix)
+{
+    _mixFileWithMicrophone = mix;
+}
+
+int TransmitMixer::RegisterExternalMediaProcessing(
+    VoEMediaProcess& proccess_object)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::RegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    _externalMediaCallbackPtr = &proccess_object;
+    _externalMedia = true;
+
+    return 0;
+}
+
+int TransmitMixer::DeRegisterExternalMediaProcessing()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::DeRegisterExternalMediaProcessing()");
+
+    CriticalSectionScoped cs(&_callbackCritSect);
+    _externalMedia = false;
+    _externalMediaCallbackPtr = NULL;
+
+    return 0;
+}
+
+int
+TransmitMixer::SetMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_instanceId, -1),
+                 "TransmitMixer::SetMute(enable=%d)", enable);
+    _mute = enable;
+    return 0;
+}
+
+bool
+TransmitMixer::Mute() const
+{
+    return _mute;
+}
+
+WebRtc_Word8 TransmitMixer::AudioLevel() const
+{
+    // Speech + file level [0,9]
+    return _audioLevel.Level();
+}
+
+WebRtc_Word16 TransmitMixer::AudioLevelFullRange() const
+{
+    // Speech + file level [0,32767]
+    return _audioLevel.LevelFullRange();
+}
+
+bool TransmitMixer::IsRecordingCall()
+{
+    return _fileCallRecording;
+}
+
+bool TransmitMixer::IsRecordingMic()
+{
+
+    return _fileRecording;
+}
+
+// TODO(andrew): use RemixAndResample for this.
+int TransmitMixer::GenerateAudioFrame(const int16_t audio[],
+                                      int samples_per_channel,
+                                      int num_channels,
+                                      int sample_rate_hz)
+{
+    const int16_t* audio_ptr = audio;
+    int16_t mono_audio[kMaxMonoDeviceDataSizeSamples];
+    assert(samples_per_channel <= kMaxMonoDeviceDataSizeSamples);
+    // If no stereo codecs are in use, we downmix a stereo stream from the
+    // device early in the chain, before resampling.
+    if (num_channels == 2 && !stereo_codec_) {
+      AudioFrameOperations::StereoToMono(audio, samples_per_channel,
+                                         mono_audio);
+      audio_ptr = mono_audio;
+      num_channels = 1;
+    }
+
+    ResamplerType resampler_type = (num_channels == 1) ?
+            kResamplerSynchronous : kResamplerSynchronousStereo;
+
+    if (_audioResampler.ResetIfNeeded(sample_rate_hz,
+                                      _mixingFrequency,
+                                      resampler_type) != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::GenerateAudioFrame() unable to resample");
+        return -1;
+    }
+    if (_audioResampler.Push(audio_ptr,
+                             samples_per_channel * num_channels,
+                             _audioFrame.data_,
+                             AudioFrame::kMaxDataSizeSamples,
+                             _audioFrame.samples_per_channel_) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::GenerateAudioFrame() resampling failed");
+        return -1;
+    }
+
+    _audioFrame.samples_per_channel_ /= num_channels;
+    _audioFrame.id_ = _instanceId;
+    _audioFrame.timestamp_ = -1;
+    _audioFrame.sample_rate_hz_ = _mixingFrequency;
+    _audioFrame.speech_type_ = AudioFrame::kNormalSpeech;
+    _audioFrame.vad_activity_ = AudioFrame::kVadUnknown;
+    _audioFrame.num_channels_ = num_channels;
+
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::RecordAudioToFile(
+    const WebRtc_UWord32 mixingFrequency)
+{
+    CriticalSectionScoped cs(&_critSect);
+    if (_fileRecorderPtr == NULL)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordAudioToFile() filerecorder doesnot"
+                     "exist");
+        return -1;
+    }
+
+    if (_fileRecorderPtr->RecordAudioToFile(_audioFrame) != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "TransmitMixer::RecordAudioToFile() file recording"
+                     "failed");
+        return -1;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::MixOrReplaceAudioWithFile(
+    const int mixingFrequency)
+{
+    scoped_array<WebRtc_Word16> fileBuffer(new WebRtc_Word16[640]);
+
+    int fileSamples(0);
+    {
+        CriticalSectionScoped cs(&_critSect);
+        if (_filePlayerPtr == NULL)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                         VoEId(_instanceId, -1),
+                         "TransmitMixer::MixOrReplaceAudioWithFile()"
+                         "fileplayer doesnot exist");
+            return -1;
+        }
+
+        if (_filePlayerPtr->Get10msAudioFromFile(fileBuffer.get(),
+                                                 fileSamples,
+                                                 mixingFrequency) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "TransmitMixer::MixOrReplaceAudioWithFile() file"
+                         " mixing failed");
+            return -1;
+        }
+    }
+
+    assert(_audioFrame.samples_per_channel_ == fileSamples);
+
+    if (_mixFileWithMicrophone)
+    {
+        // Currently file stream is always mono.
+        // TODO(xians): Change the code when FilePlayer supports real stereo.
+        Utility::MixWithSat(_audioFrame.data_,
+                            _audioFrame.num_channels_,
+                            fileBuffer.get(),
+                            1,
+                            fileSamples);
+    } else
+    {
+        // Replace ACM audio with file.
+        // Currently file stream is always mono.
+        // TODO(xians): Change the code when FilePlayer supports real stereo.
+        _audioFrame.UpdateFrame(-1,
+                                -1,
+                                fileBuffer.get(),
+                                fileSamples,
+                                mixingFrequency,
+                                AudioFrame::kNormalSpeech,
+                                AudioFrame::kVadUnknown,
+                                1);
+    }
+    return 0;
+}
+
+WebRtc_Word32 TransmitMixer::APMProcessStream(
+    const WebRtc_UWord16 totalDelayMS,
+    const WebRtc_Word32 clockDrift,
+    const WebRtc_UWord16 currentMicLevel)
+{
+    WebRtc_UWord16 captureLevel(currentMicLevel);
+
+    // Check if the number of incoming channels has changed. This has taken
+    // both the capture device and send codecs into account.
+    if (_audioFrame.num_channels_ !=
+        _audioProcessingModulePtr->num_input_channels())
+    {
+        if (_audioProcessingModulePtr->set_num_channels(
+                _audioFrame.num_channels_,
+                _audioFrame.num_channels_))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "AudioProcessing::set_num_channels(%d, %d) => error",
+                         _audioFrame.num_channels_,
+                         _audioProcessingModulePtr->num_output_channels());
+        }
+    }
+
+    // If the frequency has changed we need to change APM settings
+    // Sending side is "master"
+    if (_audioProcessingModulePtr->sample_rate_hz() !=
+        _audioFrame.sample_rate_hz_)
+    {
+        if (_audioProcessingModulePtr->set_sample_rate_hz(
+                _audioFrame.sample_rate_hz_))
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                         "AudioProcessing::set_sample_rate_hz(%u) => error",
+                         _audioFrame.sample_rate_hz_);
+        }
+    }
+
+    if (_audioProcessingModulePtr->set_stream_delay_ms(totalDelayMS) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::set_stream_delay_ms(%u) => error",
+                     totalDelayMS);
+    }
+    if (_audioProcessingModulePtr->gain_control()->set_stream_analog_level(
+            captureLevel) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::set_stream_analog_level(%u) => error",
+                     captureLevel);
+    }
+    if (_audioProcessingModulePtr->echo_cancellation()->
+            is_drift_compensation_enabled())
+    {
+        if (_audioProcessingModulePtr->echo_cancellation()->
+                set_stream_drift_samples(clockDrift) == -1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                "AudioProcessing::set_stream_drift_samples(%u) => error",
+                clockDrift);
+        }
+    }
+    if (_audioProcessingModulePtr->ProcessStream(&_audioFrame) == -1)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                     "AudioProcessing::ProcessStream() => error");
+    }
+    captureLevel =
+        _audioProcessingModulePtr->gain_control()->stream_analog_level();
+
+    // Store new capture level (only updated when analog AGC is enabled)
+    _captureLevel = captureLevel;
+
+    // Log notifications
+    if (_audioProcessingModulePtr->gain_control()->stream_is_saturated())
+    {
+        if (_saturationWarning == 1)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                       "TransmitMixer::APMProcessStream() pending "
+                       "saturation warning exists");
+        }
+        _saturationWarning = 1; // triggers callback from moduleprocess thread
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                   "TransmitMixer::APMProcessStream() VE_SATURATION_WARNING "
+                   "message has been posted for callback");
+    }
+
+    return 0;
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+int TransmitMixer::TypingDetection()
+{
+
+    // We let the VAD determine if we're using this feature or not.
+    if (_audioFrame.vad_activity_ == AudioFrame::kVadUnknown)
+    {
+        return (0);
+    }
+
+    int keyPressed = EventWrapper::KeyPressed();
+
+    if (keyPressed < 0)
+    {
+        return (-1);
+    }
+
+    if (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
+        _timeActive++;
+    else
+        _timeActive = 0;
+
+    // Keep track if time since last typing event
+    if (keyPressed)
+    {
+      _timeSinceLastTyping = 0;
+    }
+    else
+    {
+      ++_timeSinceLastTyping;
+    }
+
+    if ((_timeSinceLastTyping < _typeEventDelay)
+        && (_audioFrame.vad_activity_ == AudioFrame::kVadActive)
+        && (_timeActive < _timeWindow))
+    {
+        _penaltyCounter += _costPerTyping;
+        if (_penaltyCounter > _reportingThreshold)
+        {
+            if (_typingNoiseWarning == 1)
+            {
+                WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                           VoEId(_instanceId, -1),
+                           "TransmitMixer::TypingDetection() pending "
+                               "noise-saturation warning exists");
+            }
+            // triggers callback from the module process thread
+            _typingNoiseWarning = 1;
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice, VoEId(_instanceId, -1),
+                       "TransmitMixer::TypingDetection() "
+                       "VE_TYPING_NOISE_WARNING message has been posted for"
+                       "callback");
+        }
+    }
+
+    if (_penaltyCounter > 0)
+        _penaltyCounter-=_penaltyDecay;
+
+    return (0);
+}
+#endif
+
+int TransmitMixer::GetMixingFrequency()
+{
+    assert(_mixingFrequency!=0);
+    return (_mixingFrequency);
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+int TransmitMixer::TimeSinceLastTyping(int &seconds)
+{
+  // We check in VoEAudioProcessingImpl that this is only called when
+  // typing detection is active.
+
+  // Round to whole seconds
+  seconds = (_timeSinceLastTyping + 50) / 100;
+  return(0);
+}
+#endif
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+int TransmitMixer::SetTypingDetectionParameters(int timeWindow,
+                                                int costPerTyping,
+                                                int reportingThreshold,
+                                                int penaltyDecay,
+                                                int typeEventDelay)
+{
+  if(timeWindow != 0)
+    _timeWindow = timeWindow;
+  if(costPerTyping != 0)
+    _costPerTyping = costPerTyping;
+  if(reportingThreshold != 0)
+    _reportingThreshold = reportingThreshold;
+  if(penaltyDecay != 0)
+    _penaltyDecay = penaltyDecay;
+  if(typeEventDelay != 0)
+    _typeEventDelay = typeEventDelay;
+
+
+  return(0);
+}
+#endif
+
+void TransmitMixer::EnableStereoChannelSwapping(bool enable) {
+  swap_stereo_channels_ = enable;
+}
+
+bool TransmitMixer::IsStereoChannelSwappingEnabled() {
+  return swap_stereo_channels_;
+}
+
+}  //  namespace voe
+
+}  //  namespace webrtc
diff --git a/src/voice_engine/transmit_mixer.h b/src/voice_engine/transmit_mixer.h
new file mode 100644
index 0000000..da87218
--- /dev/null
+++ b/src/voice_engine/transmit_mixer.h
@@ -0,0 +1,244 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+#define WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+
+#include "common_types.h"
+#include "voe_base.h"
+#include "file_player.h"
+#include "file_recorder.h"
+#include "level_indicator.h"
+#include "module_common_types.h"
+#include "monitor_module.h"
+#include "resampler.h"
+#include "voice_engine_defines.h"
+
+
+namespace webrtc {
+
+class AudioProcessing;
+class ProcessThread;
+class VoEExternalMedia;
+class VoEMediaProcess;
+
+namespace voe {
+
+class ChannelManager;
+class MixedAudio;
+class Statistics;
+
+class TransmitMixer : public MonitorObserver,
+                      public FileCallback
+
+{
+public:
+    static WebRtc_Word32 Create(TransmitMixer*& mixer,
+                                const WebRtc_UWord32 instanceId);
+
+    static void Destroy(TransmitMixer*& mixer);
+
+    WebRtc_Word32 SetEngineInformation(ProcessThread& processThread,
+                                       Statistics& engineStatistics,
+                                       ChannelManager& channelManager);
+
+    WebRtc_Word32 SetAudioProcessingModule(
+        AudioProcessing* audioProcessingModule);
+
+    WebRtc_Word32 PrepareDemux(const void* audioSamples,
+                               const WebRtc_UWord32 nSamples,
+                               const WebRtc_UWord8  nChannels,
+                               const WebRtc_UWord32 samplesPerSec,
+                               const WebRtc_UWord16 totalDelayMS,
+                               const WebRtc_Word32  clockDrift,
+                               const WebRtc_UWord16 currentMicLevel);
+
+
+    WebRtc_Word32 DemuxAndMix();
+
+    WebRtc_Word32 EncodeAndSend();
+
+    WebRtc_UWord32 CaptureLevel() const;
+
+    WebRtc_Word32 StopSend();
+
+    // VoEDtmf
+    void UpdateMuteMicrophoneTime(const WebRtc_UWord32 lengthMs);
+
+    // VoEExternalMedia
+    int RegisterExternalMediaProcessing(VoEMediaProcess& proccess_object);
+
+    int DeRegisterExternalMediaProcessing();
+
+    int GetMixingFrequency();
+
+    // VoEVolumeControl
+    int SetMute(const bool enable);
+
+    bool Mute() const;
+
+    WebRtc_Word8 AudioLevel() const;
+
+    WebRtc_Word16 AudioLevelFullRange() const;
+
+    bool IsRecordingCall();
+
+    bool IsRecordingMic();
+
+    int StartPlayingFileAsMicrophone(const char* fileName,
+                                     const bool loop,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+
+    int StartPlayingFileAsMicrophone(InStream* stream,
+                                     const FileFormats format,
+                                     const int startPosition,
+                                     const float volumeScaling,
+                                     const int stopPosition,
+                                     const CodecInst* codecInst);
+
+    int StopPlayingFileAsMicrophone();
+
+    int IsPlayingFileAsMicrophone() const;
+
+    int ScaleFileAsMicrophonePlayout(const float scale);
+
+    int StartRecordingMicrophone(const char* fileName,
+                                 const CodecInst* codecInst);
+
+    int StartRecordingMicrophone(OutStream* stream,
+                                 const CodecInst* codecInst);
+
+    int StopRecordingMicrophone();
+
+    int StartRecordingCall(const char* fileName, const CodecInst* codecInst);
+
+    int StartRecordingCall(OutStream* stream, const CodecInst* codecInst);
+
+    int StopRecordingCall();
+
+    void SetMixWithMicStatus(bool mix);
+
+    WebRtc_Word32 RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+
+    virtual ~TransmitMixer();
+
+    // MonitorObserver
+    void OnPeriodicProcess();
+
+
+    // FileCallback
+    void PlayNotification(const WebRtc_Word32 id,
+                          const WebRtc_UWord32 durationMs);
+
+    void RecordNotification(const WebRtc_Word32 id,
+                            const WebRtc_UWord32 durationMs);
+
+    void PlayFileEnded(const WebRtc_Word32 id);
+
+    void RecordFileEnded(const WebRtc_Word32 id);
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    // Typing detection
+    int TimeSinceLastTyping(int &seconds);
+    int SetTypingDetectionParameters(int timeWindow,
+                                     int costPerTyping,
+                                     int reportingThreshold,
+                                     int penaltyDecay,
+                                     int typeEventDelay);
+#endif
+
+  void EnableStereoChannelSwapping(bool enable);
+  bool IsStereoChannelSwappingEnabled();
+
+private:
+    TransmitMixer(const WebRtc_UWord32 instanceId);
+
+    void CheckForSendCodecChanges();
+
+    int GenerateAudioFrame(const int16_t audioSamples[],
+                           int nSamples,
+                           int nChannels,
+                           int samplesPerSec);
+    WebRtc_Word32 RecordAudioToFile(const WebRtc_UWord32 mixingFrequency);
+
+    WebRtc_Word32 MixOrReplaceAudioWithFile(
+        const int mixingFrequency);
+
+    WebRtc_Word32 APMProcessStream(const WebRtc_UWord16 totalDelayMS,
+                                   const WebRtc_Word32 clockDrift,
+                                   const WebRtc_UWord16 currentMicLevel);
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    int TypingDetection();
+#endif
+
+    // uses
+    Statistics* _engineStatisticsPtr;
+    ChannelManager* _channelManagerPtr;
+    AudioProcessing* _audioProcessingModulePtr;
+    VoiceEngineObserver* _voiceEngineObserverPtr;
+    ProcessThread* _processThreadPtr;
+
+    // owns
+    MonitorModule _monitorModule;
+    AudioFrame _audioFrame;
+    Resampler _audioResampler;		// ADM sample rate -> mixing rate
+    FilePlayer*	_filePlayerPtr;
+    FileRecorder* _fileRecorderPtr;
+    FileRecorder* _fileCallRecorderPtr;
+    int _filePlayerId;
+    int _fileRecorderId;
+    int _fileCallRecorderId;
+    bool _filePlaying;
+    bool _fileRecording;
+    bool _fileCallRecording;
+    voe::AudioLevel _audioLevel;
+    // protect file instances and their variables in MixedParticipants()
+    CriticalSectionWrapper& _critSect;
+    CriticalSectionWrapper& _callbackCritSect;
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+    WebRtc_Word32 _timeActive;
+    WebRtc_Word32 _timeSinceLastTyping;
+    WebRtc_Word32 _penaltyCounter;
+    WebRtc_UWord32 _typingNoiseWarning;
+
+    // Tunable treshold values
+    int _timeWindow; // nr of10ms slots accepted to count as a hit.
+    int _costPerTyping; // Penalty added for a typing + activity coincide.
+    int _reportingThreshold; // Threshold for _penaltyCounter.
+    int _penaltyDecay; // How much we reduce _penaltyCounter every 10 ms.
+    int _typeEventDelay; // How old typing events we allow
+
+#endif
+    WebRtc_UWord32 _saturationWarning;
+    WebRtc_UWord32 _noiseWarning;
+
+    int _instanceId;
+    bool _mixFileWithMicrophone;
+    WebRtc_UWord32 _captureLevel;
+    bool _externalMedia;
+    VoEMediaProcess* _externalMediaCallbackPtr;
+    bool _mute;
+    WebRtc_Word32 _remainingMuteMicTimeMs;
+    int _mixingFrequency;
+    bool stereo_codec_;
+    bool swap_stereo_channels_;
+};
+
+#endif // WEBRTC_VOICE_ENGINE_TRANSMIT_MIXER_H
+
+}  //  namespace voe
+
+}  // namespace webrtc
diff --git a/src/voice_engine/utility.cc b/src/voice_engine/utility.cc
new file mode 100644
index 0000000..1ef108e
--- /dev/null
+++ b/src/voice_engine/utility.cc
@@ -0,0 +1,125 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "utility.h"
+
+#include "module.h"
+#include "trace.h"
+#include "signal_processing_library.h"
+
+namespace webrtc
+{
+
+namespace voe
+{
+enum{kMaxTargetLen = 2*32*10}; // stereo 32KHz 10ms
+
+void Utility::MixWithSat(WebRtc_Word16 target[],
+                         int target_channel,
+                         const WebRtc_Word16 source[],
+                         int source_channel,
+                         int source_len)
+{
+    assert((target_channel == 1) || (target_channel == 2));
+    assert((source_channel == 1) || (source_channel == 2));
+    assert(source_len <= kMaxTargetLen);
+
+    if ((target_channel == 2) && (source_channel == 1))
+    {
+        // Convert source from mono to stereo.
+        WebRtc_Word32 left = 0;
+        WebRtc_Word32 right = 0;
+        for (int i = 0; i < source_len; ++i) {
+            left  = source[i] + target[i*2];
+            right = source[i] + target[i*2 + 1];
+            target[i*2]     = WebRtcSpl_SatW32ToW16(left);
+            target[i*2 + 1] = WebRtcSpl_SatW32ToW16(right);
+        }
+    }
+    else if ((target_channel == 1) && (source_channel == 2))
+    {
+        // Convert source from stereo to mono.
+        WebRtc_Word32 temp = 0;
+        for (int i = 0; i < source_len/2; ++i) {
+          temp = ((source[i*2] + source[i*2 + 1])>>1) + target[i];
+          target[i] = WebRtcSpl_SatW32ToW16(temp);
+        }
+    }
+    else
+    {
+        WebRtc_Word32 temp = 0;
+        for (int i = 0; i < source_len; ++i) {
+          temp = source[i] + target[i];
+          target[i] = WebRtcSpl_SatW32ToW16(temp);
+        }
+    }
+}
+
+void Utility::MixSubtractWithSat(WebRtc_Word16 target[],
+                                 const WebRtc_Word16 source[],
+                                 WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = target[i] - source[i];
+        if (temp > 32767)
+            target[i] = 32767;
+        else if (temp < -32768)
+            target[i] = -32768;
+        else
+            target[i] = (WebRtc_Word16) temp;
+    }
+}
+
+void Utility::MixAndScaleWithSat(WebRtc_Word16 target[],
+                                 const WebRtc_Word16 source[], float scale,
+                                 WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = (WebRtc_Word32) (target[i] + scale * source[i]);
+        if (temp > 32767)
+            target[i] = 32767;
+        else if (temp < -32768)
+            target[i] = -32768;
+        else
+            target[i] = (WebRtc_Word16) temp;
+    }
+}
+
+void Utility::Scale(WebRtc_Word16 vector[], float scale, WebRtc_UWord16 len)
+{
+    for (int i = 0; i < len; i++)
+    {
+        vector[i] = (WebRtc_Word16) (scale * vector[i]);
+    }
+}
+
+void Utility::ScaleWithSat(WebRtc_Word16 vector[], float scale,
+                           WebRtc_UWord16 len)
+{
+    WebRtc_Word32 temp(0);
+    for (int i = 0; i < len; i++)
+    {
+        temp = (WebRtc_Word32) (scale * vector[i]);
+        if (temp > 32767)
+            vector[i] = 32767;
+        else if (temp < -32768)
+            vector[i] = -32768;
+        else
+            vector[i] = (WebRtc_Word16) temp;
+    }
+}
+
+} // namespace voe
+
+} // namespace webrtc
diff --git a/src/voice_engine/utility.h b/src/voice_engine/utility.h
new file mode 100644
index 0000000..a8af8bd
--- /dev/null
+++ b/src/voice_engine/utility.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  Contains functions often used by different parts of VoiceEngine.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_UTILITY_H
+#define WEBRTC_VOICE_ENGINE_UTILITY_H
+
+#include "typedefs.h"
+#include "voice_engine_defines.h"
+
+namespace webrtc
+{
+
+class Module;
+
+namespace voe
+{
+
+class Utility
+{
+public:
+    static void MixWithSat(WebRtc_Word16 target[],
+                           int target_channel,
+                           const WebRtc_Word16 source[],
+                           int source_channel,
+                           int source_len);
+
+    static void MixSubtractWithSat(WebRtc_Word16 target[],
+                                   const WebRtc_Word16 source[],
+                                   WebRtc_UWord16 len);
+
+    static void MixAndScaleWithSat(WebRtc_Word16 target[],
+                                   const WebRtc_Word16 source[],
+                                   float scale,
+                                   WebRtc_UWord16 len);
+
+    static void Scale(WebRtc_Word16 vector[], float scale, WebRtc_UWord16 len);
+
+    static void ScaleWithSat(WebRtc_Word16 vector[],
+                             float scale,
+                             WebRtc_UWord16 len);
+};
+
+} // namespace voe
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_UTILITY_H
diff --git a/src/voice_engine/voe_audio_processing_impl.cc b/src/voice_engine/voe_audio_processing_impl.cc
new file mode 100644
index 0000000..3012687
--- /dev/null
+++ b/src/voice_engine/voe_audio_processing_impl.cc
@@ -0,0 +1,1154 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_audio_processing_impl.h"
+
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+// TODO(andrew): move to a common place.
+#define WEBRTC_TRACE_VOICE_API()                                   \
+  do {                                                             \
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,                       \
+                 VoEId(_shared->instance_id(), -1), __FUNCTION__); \
+  } while (0)
+
+#define WEBRTC_VOICE_INIT_CHECK()                        \
+  do {                                                   \
+    if (!_shared->statistics().Initialized()) {          \
+      _shared->SetLastError(VE_NOT_INITED, kTraceError); \
+      return -1;                                         \
+    }                                                    \
+  } while (0)
+
+#define WEBRTC_VOICE_INIT_CHECK_BOOL()                   \
+  do {                                                   \
+    if (!_shared->statistics().Initialized()) {          \
+      _shared->SetLastError(VE_NOT_INITED, kTraceError); \
+      return false;                                      \
+    }                                                    \
+  } while (0)
+
+
+namespace webrtc {
+
+#if defined(WEBRTC_ANDROID) || defined(MAC_IPHONE) || defined(MAC_IPHONE_SIM)
+static const EcModes kDefaultEcMode = kEcAecm;
+#else
+static const EcModes kDefaultEcMode = kEcAec;
+#endif
+
+VoEAudioProcessing* VoEAudioProcessing::GetInterface(VoiceEngine* voiceEngine) {
+#ifndef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+  return NULL;
+#else
+  if (NULL == voiceEngine) {
+    return NULL;
+  }
+  VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+  s->AddRef();
+  return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+VoEAudioProcessingImpl::VoEAudioProcessingImpl(voe::SharedData* shared)
+    : _isAecMode(kDefaultEcMode == kEcAec),
+      _shared(shared) {
+  WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "VoEAudioProcessingImpl::VoEAudioProcessingImpl() - ctor");
+}
+
+VoEAudioProcessingImpl::~VoEAudioProcessingImpl() {
+  WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "VoEAudioProcessingImpl::~VoEAudioProcessingImpl() - dtor");
+}
+
+int VoEAudioProcessingImpl::SetNsStatus(bool enable, NsModes mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetNsStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_NR
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  NoiseSuppression::Level nsLevel(
+      (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE);
+  switch (mode) {
+    case kNsDefault:
+      nsLevel = (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE;
+      break;
+    case kNsUnchanged:
+      nsLevel = _shared->audio_processing()->noise_suppression()->level();
+      break;
+    case kNsConference:
+      nsLevel = NoiseSuppression::kHigh;
+      break;
+    case kNsLowSuppression:
+      nsLevel = NoiseSuppression::kLow;
+      break;
+    case kNsModerateSuppression:
+      nsLevel = NoiseSuppression::kModerate;
+      break;
+    case kNsHighSuppression:
+      nsLevel = NoiseSuppression::kHigh;
+      break;
+    case kNsVeryHighSuppression:
+      nsLevel = NoiseSuppression::kVeryHigh;
+      break;
+  }
+
+  if (_shared->audio_processing()->noise_suppression()->
+          set_level(nsLevel) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetNsStatus() failed to set Ns mode");
+    return -1;
+  }
+  if (_shared->audio_processing()->noise_suppression()->Enable(enable) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetNsStatus() failed to set Ns state");
+    return -1;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetNsStatus() Ns is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetNsStatus(bool& enabled, NsModes& mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetNsStatus(enabled=?, mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_NR
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  bool enable(false);
+  NoiseSuppression::Level nsLevel(
+      (NoiseSuppression::Level)WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE);
+
+  enable = _shared->audio_processing()->noise_suppression()->is_enabled();
+  nsLevel = _shared->audio_processing()->noise_suppression()->level();
+
+  enabled = enable;
+
+  switch (nsLevel) {
+    case NoiseSuppression::kLow:
+      mode = kNsLowSuppression;
+      break;
+    case NoiseSuppression::kModerate:
+      mode = kNsModerateSuppression;
+      break;
+    case NoiseSuppression::kHigh:
+      mode = kNsHighSuppression;
+      break;
+    case NoiseSuppression::kVeryHigh:
+      mode = kNsVeryHighSuppression;
+      break;
+  }
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetNsStatus() => enabled=% d, mode=%d", enabled, mode);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetNsStatus() Ns is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetAgcStatus(bool enable, AgcModes mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetAgcStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+#if defined(MAC_IPHONE) || defined(ATA) || defined(WEBRTC_ANDROID)
+  if (mode == kAgcAdaptiveAnalog) {
+    _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+        "SetAgcStatus() invalid Agc mode for mobile device");
+    return -1;
+  }
+#endif
+
+  GainControl::Mode agcMode(
+     (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE);
+  switch (mode) {
+    case kAgcDefault:
+      agcMode = (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE;
+      break;
+    case kAgcUnchanged:
+      agcMode = _shared->audio_processing()->gain_control()->mode();;
+      break;
+    case kAgcFixedDigital:
+      agcMode = GainControl::kFixedDigital;
+      break;
+    case kAgcAdaptiveAnalog:
+      agcMode = GainControl::kAdaptiveAnalog;
+      break;
+    case kAgcAdaptiveDigital:
+      agcMode = GainControl::kAdaptiveDigital;
+      break;
+  }
+
+  if (_shared->audio_processing()->gain_control()->set_mode(agcMode) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAgcStatus() failed to set Agc mode");
+    return -1;
+  }
+  if (_shared->audio_processing()->gain_control()->Enable(enable) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAgcStatus() failed to set Agc state");
+    return -1;
+  }
+
+  if (agcMode != GainControl::kFixedDigital) {
+    // Set Agc state in the ADM when adaptive Agc mode has been selected.
+    // Note that we also enable the ADM Agc when Adaptive Digital mode is
+    // used since we want to be able to provide the APM with updated mic
+    // levels when the user modifies the mic level manually.
+    if (_shared->audio_device()->SetAGC(enable) != 0) {
+      _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+          kTraceWarning, "SetAgcStatus() failed to set Agc mode");
+    }
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetAgcStatus() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAgcStatus(bool& enabled, AgcModes& mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAgcStatus(enabled=?, mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  bool enable(false);
+  GainControl::Mode agcMode(
+    (GainControl::Mode)WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE);
+
+  enable = _shared->audio_processing()->gain_control()->is_enabled();
+  agcMode = _shared->audio_processing()->gain_control()->mode();
+
+  enabled = enable;
+
+  switch (agcMode) {
+    case GainControl::kFixedDigital:
+      mode = kAgcFixedDigital;
+      break;
+    case GainControl::kAdaptiveAnalog:
+      mode = kAgcAdaptiveAnalog;
+      break;
+    case GainControl::kAdaptiveDigital:
+      mode = kAgcAdaptiveDigital;
+      break;
+  }
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAgcStatus() => enabled=%d, mode=%d", enabled, mode);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetAgcStatus() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetAgcConfig(const AgcConfig config) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetAgcConfig()");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  if (_shared->audio_processing()->gain_control()->set_target_level_dbfs(
+      config.targetLeveldBOv) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAgcConfig() failed to set target peak |level|"
+        " (or envelope) of the Agc");
+    return -1;
+  }
+  if (_shared->audio_processing()->gain_control()->set_compression_gain_db(
+        config.digitalCompressionGaindB) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAgcConfig() failed to set the range in |gain| "
+        "the digital compression stage may apply");
+    return -1;
+  }
+  if (_shared->audio_processing()->gain_control()->enable_limiter(
+        config.limiterEnable) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAgcConfig() failed to set hard limiter to the signal");
+    return -1;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetAgcConfig() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAgcConfig(AgcConfig& config) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAgcConfig(config=?)");
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  config.targetLeveldBOv =
+    _shared->audio_processing()->gain_control()->target_level_dbfs();
+  config.digitalCompressionGaindB =
+    _shared->audio_processing()->gain_control()->compression_gain_db();
+  config.limiterEnable =
+    _shared->audio_processing()->gain_control()->is_limiter_enabled();
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAgcConfig() => targetLeveldBOv=%u, "
+                  "digitalCompressionGaindB=%u, limiterEnable=%d",
+               config.targetLeveldBOv,
+               config.digitalCompressionGaindB,
+               config.limiterEnable);
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetAgcConfig() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxNsStatus(int channel,
+                                          bool enable,
+                                          NsModes mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetRxNsStatus(channel=%d, enable=%d, mode=%d)",
+               channel, (int)enable, (int)mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "SetRxNsStatus() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->SetRxNsStatus(enable, mode);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetRxNsStatus() AGC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxNsStatus(int channel,
+                                          bool& enabled,
+                                          NsModes& mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetRxNsStatus(channel=%d, enable=?, mode=?)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "GetRxNsStatus() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->GetRxNsStatus(enabled, mode);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetRxNsStatus() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxAgcStatus(int channel,
+                                           bool enable,
+                                           AgcModes mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetRxAgcStatus(channel=%d, enable=%d, mode=%d)",
+               channel, (int)enable, (int)mode);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "SetRxAgcStatus() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->SetRxAgcStatus(enable, mode);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetRxAgcStatus() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxAgcStatus(int channel,
+                                           bool& enabled,
+                                           AgcModes& mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetRxAgcStatus(channel=%d, enable=?, mode=?)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "GetRxAgcStatus() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->GetRxAgcStatus(enabled, mode);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetRxAgcStatus() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetRxAgcConfig(int channel,
+                                           const AgcConfig config) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetRxAgcConfig(channel=%d)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+      "SetRxAgcConfig() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->SetRxAgcConfig(config);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetRxAgcConfig() Agc is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetRxAgcConfig(int channel, AgcConfig& config) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetRxAgcConfig(channel=%d)", channel);
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "GetRxAgcConfig() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->GetRxAgcConfig(config);
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetRxAgcConfig() Agc is not supported");
+  return -1;
+#endif
+}
+
+bool VoEAudioProcessing::DriftCompensationSupported() {
+#if defined(WEBRTC_DRIFT_COMPENSATION_SUPPORTED)
+  return true;
+#else
+  return false;
+#endif
+}
+
+int VoEAudioProcessingImpl::EnableDriftCompensation(bool enable) {
+  WEBRTC_TRACE_VOICE_API();
+  WEBRTC_VOICE_INIT_CHECK();
+
+  if (!DriftCompensationSupported()) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+        "Drift compensation is not supported on this platform.");
+    return -1;
+  }
+
+  EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
+  if (aec->enable_drift_compensation(enable) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "aec->enable_drift_compensation() failed");
+    return -1;
+  }
+  return 0;
+}
+
+bool VoEAudioProcessingImpl::DriftCompensationEnabled() {
+  WEBRTC_TRACE_VOICE_API();
+  WEBRTC_VOICE_INIT_CHECK_BOOL();
+
+  EchoCancellation* aec = _shared->audio_processing()->echo_cancellation();
+  return aec->is_drift_compensation_enabled();
+}
+
+int VoEAudioProcessingImpl::SetEcStatus(bool enable, EcModes mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetEcStatus(enable=%d, mode=%d)", enable, mode);
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  // AEC mode
+  if ((mode == kEcDefault) ||
+      (mode == kEcConference) ||
+      (mode == kEcAec) ||
+      ((mode == kEcUnchanged) &&
+       (_isAecMode == true))) {
+    if (enable) {
+      // Disable the AECM before enable the AEC
+      if (_shared->audio_processing()->echo_control_mobile()->is_enabled()) {
+        _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+            "SetEcStatus() disable AECM before enabling AEC");
+        if (_shared->audio_processing()->echo_control_mobile()->
+            Enable(false) != 0) {
+          _shared->SetLastError(VE_APM_ERROR, kTraceError,
+              "SetEcStatus() failed to disable AECM");
+          return -1;
+        }
+      }
+    }
+    if (_shared->audio_processing()->echo_cancellation()->Enable(enable) != 0) {
+      _shared->SetLastError(VE_APM_ERROR, kTraceError,
+          "SetEcStatus() failed to set AEC state");
+      return -1;
+    }
+    if (mode == kEcConference) {
+      if (_shared->audio_processing()->echo_cancellation()->
+          set_suppression_level(EchoCancellation::kHighSuppression) != 0) {
+        _shared->SetLastError(VE_APM_ERROR, kTraceError,
+            "SetEcStatus() failed to set aggressiveness to high");
+        return -1;
+      }
+    } else {
+      if (_shared->audio_processing()->echo_cancellation()->
+          set_suppression_level(
+            EchoCancellation::kModerateSuppression) != 0) {
+        _shared->SetLastError(VE_APM_ERROR, kTraceError,
+            "SetEcStatus() failed to set aggressiveness to moderate");
+        return -1;
+      }
+    }
+
+    _isAecMode = true;
+  } else if ((mode == kEcAecm) ||
+             ((mode == kEcUnchanged) &&
+              (_isAecMode == false))) {
+    if (enable) {
+      // Disable the AEC before enable the AECM
+      if (_shared->audio_processing()->echo_cancellation()->is_enabled()) {
+        _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+            "SetEcStatus() disable AEC before enabling AECM");
+        if (_shared->audio_processing()->echo_cancellation()->
+            Enable(false) != 0) {
+          _shared->SetLastError(VE_APM_ERROR, kTraceError,
+              "SetEcStatus() failed to disable AEC");
+          return -1;
+        }
+      }
+    }
+    if (_shared->audio_processing()->echo_control_mobile()->
+        Enable(enable) != 0) {
+      _shared->SetLastError(VE_APM_ERROR, kTraceError,
+          "SetEcStatus() failed to set AECM state");
+      return -1;
+    }
+    _isAecMode = false;
+  } else {
+    _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+                                   "SetEcStatus() invalid EC mode");
+    return -1;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcStatus(bool& enabled, EcModes& mode) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcStatus()");
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  if (_isAecMode == true) {
+    mode = kEcAec;
+    enabled = _shared->audio_processing()->echo_cancellation()->is_enabled();
+  } else {
+    mode = kEcAecm;
+    enabled = _shared->audio_processing()->echo_control_mobile()->
+              is_enabled();
+  }
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcStatus() => enabled=%i, mode=%i",
+               enabled, (int)mode);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+void VoEAudioProcessingImpl::SetDelayOffsetMs(int offset) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetDelayOffsetMs(offset = %d)", offset);
+  _shared->audio_processing()->set_delay_offset_ms(offset);
+}
+
+int VoEAudioProcessingImpl::DelayOffsetMs() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "DelayOffsetMs()");
+  return _shared->audio_processing()->delay_offset_ms();
+}
+
+int VoEAudioProcessingImpl::SetAecmMode(AecmModes mode, bool enableCNG) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetAECMMode(mode = %d)", mode);
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  EchoControlMobile::RoutingMode aecmMode(
+      EchoControlMobile::kQuietEarpieceOrHeadset);
+
+  switch (mode) {
+    case kAecmQuietEarpieceOrHeadset:
+      aecmMode = EchoControlMobile::kQuietEarpieceOrHeadset;
+      break;
+    case kAecmEarpiece:
+      aecmMode = EchoControlMobile::kEarpiece;
+      break;
+    case kAecmLoudEarpiece:
+      aecmMode = EchoControlMobile::kLoudEarpiece;
+      break;
+    case kAecmSpeakerphone:
+      aecmMode = EchoControlMobile::kSpeakerphone;
+      break;
+    case kAecmLoudSpeakerphone:
+      aecmMode = EchoControlMobile::kLoudSpeakerphone;
+      break;
+  }
+
+
+  if (_shared->audio_processing()->echo_control_mobile()->
+      set_routing_mode(aecmMode) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAECMMode() failed to set AECM routing mode");
+    return -1;
+  }
+  if (_shared->audio_processing()->echo_control_mobile()->
+      enable_comfort_noise(enableCNG) != 0) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetAECMMode() failed to set comfort noise state for AECM");
+    return -1;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetAECMMode() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetAecmMode(AecmModes& mode, bool& enabledCNG) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAECMMode(mode=?)");
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  enabledCNG = false;
+
+  EchoControlMobile::RoutingMode aecmMode =
+      _shared->audio_processing()->echo_control_mobile()->routing_mode();
+  enabledCNG = _shared->audio_processing()->echo_control_mobile()->
+      is_comfort_noise_enabled();
+
+  switch (aecmMode) {
+    case EchoControlMobile::kQuietEarpieceOrHeadset:
+      mode = kAecmQuietEarpieceOrHeadset;
+      break;
+    case EchoControlMobile::kEarpiece:
+      mode = kAecmEarpiece;
+      break;
+    case EchoControlMobile::kLoudEarpiece:
+      mode = kAecmLoudEarpiece;
+      break;
+    case EchoControlMobile::kSpeakerphone:
+      mode = kAecmSpeakerphone;
+      break;
+    case EchoControlMobile::kLoudSpeakerphone:
+      mode = kAecmLoudSpeakerphone;
+      break;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "GetAECMMode() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::EnableHighPassFilter(bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "EnableHighPassFilter(%d)", enable);
+  if (_shared->audio_processing()->high_pass_filter()->Enable(enable) !=
+      AudioProcessing::kNoError) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "HighPassFilter::Enable() failed.");
+    return -1;
+  }
+
+  return 0;
+}
+
+bool VoEAudioProcessingImpl::IsHighPassFilterEnabled() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "IsHighPassFilterEnabled()");
+  return _shared->audio_processing()->high_pass_filter()->is_enabled();
+}
+
+int VoEAudioProcessingImpl::RegisterRxVadObserver(
+  int channel,
+  VoERxVadCallback& observer) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "RegisterRxVadObserver()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "RegisterRxVadObserver() failed to locate channel");
+    return -1;
+  }
+  return channelPtr->RegisterRxVadObserver(observer);
+}
+
+int VoEAudioProcessingImpl::DeRegisterRxVadObserver(int channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "DeRegisterRxVadObserver()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "DeRegisterRxVadObserver() failed to locate channel");
+    return -1;
+  }
+
+  return channelPtr->DeRegisterRxVadObserver();
+}
+
+int VoEAudioProcessingImpl::VoiceActivityIndicator(int channel) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "VoiceActivityIndicator(channel=%d)", channel);
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channelPtr = sc.ChannelPtr();
+  if (channelPtr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "DeRegisterRxVadObserver() failed to locate channel");
+    return -1;
+  }
+  int activity(-1);
+  channelPtr->VoiceActivityIndicator(activity);
+
+  return activity;
+}
+
+int VoEAudioProcessingImpl::SetEcMetricsStatus(bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetEcMetricsStatus(enable=%d)", enable);
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  if ((_shared->audio_processing()->echo_cancellation()->enable_metrics(enable)
+       != 0) ||
+      (_shared->audio_processing()->echo_cancellation()->enable_delay_logging(
+         enable) != 0)) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "SetEcMetricsStatus() unable to set EC metrics mode");
+    return -1;
+  }
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcMetricsStatus(bool& enabled) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcMetricsStatus(enabled=?)");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  bool echo_mode =
+    _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
+  bool delay_mode = _shared->audio_processing()->echo_cancellation()->
+      is_delay_logging_enabled();
+
+  if (echo_mode != delay_mode) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceError,
+        "GetEcMetricsStatus() delay logging and echo mode are not the same");
+    return -1;
+  }
+
+  enabled = echo_mode;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcMetricsStatus() => enabled=%d", enabled);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEchoMetrics(int& ERL,
+                                           int& ERLE,
+                                           int& RERL,
+                                           int& A_NLP) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEchoMetrics(ERL=?, ERLE=?, RERL=?, A_NLP=?)");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+        "GetEchoMetrics() AudioProcessingModule AEC is not enabled");
+    return -1;
+  }
+
+  // Get Echo Metrics from Audio Processing Module.
+  EchoCancellation::Metrics echoMetrics;
+  if (_shared->audio_processing()->echo_cancellation()->GetMetrics(
+          &echoMetrics)) {
+    WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetEchoMetrics(), AudioProcessingModule metrics error");
+    return -1;
+  }
+
+  // Echo quality metrics.
+  ERL = echoMetrics.echo_return_loss.instant;
+  ERLE = echoMetrics.echo_return_loss_enhancement.instant;
+  RERL = echoMetrics.residual_echo_return_loss.instant;
+  A_NLP = echoMetrics.a_nlp.instant;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEchoMetrics() => ERL=%d, ERLE=%d, RERL=%d, A_NLP=%d",
+               ERL, ERLE, RERL, A_NLP);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetEcDelayMetrics(int& delay_median,
+                                              int& delay_std) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcDelayMetrics(median=?, std=?)");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_ECHO
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  if (!_shared->audio_processing()->echo_cancellation()->is_enabled()) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+        "GetEcDelayMetrics() AudioProcessingModule AEC is not enabled");
+    return -1;
+  }
+
+  int median = 0;
+  int std = 0;
+  // Get delay-logging values from Audio Processing Module.
+  if (_shared->audio_processing()->echo_cancellation()->GetDelayMetrics(
+        &median, &std)) {
+    WEBRTC_TRACE(kTraceError, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetEcDelayMetrics(), AudioProcessingModule delay-logging "
+                 "error");
+    return -1;
+  }
+
+  // EC delay-logging metrics
+  delay_median = median;
+  delay_std = std;
+
+  WEBRTC_TRACE(kTraceStateInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetEcDelayMetrics() => delay_median=%d, delay_std=%d",
+               delay_median, delay_std);
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetEcStatus() EC is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::StartDebugRecording(const char* fileNameUTF8) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "StartDebugRecording()");
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  return _shared->audio_processing()->StartDebugRecording(fileNameUTF8);
+
+}
+
+int VoEAudioProcessingImpl::StopDebugRecording() {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "StopDebugRecording()");
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  return _shared->audio_processing()->StopDebugRecording();
+}
+
+int VoEAudioProcessingImpl::SetTypingDetectionStatus(bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetTypingDetectionStatus()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+
+  // Just use the VAD state to determine if we should enable typing detection
+  // or not
+
+  if (_shared->audio_processing()->voice_detection()->Enable(enable)) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+        "SetTypingDetectionStatus() failed to set VAD state");
+    return -1;
+  }
+  if (_shared->audio_processing()->voice_detection()->set_likelihood(
+          VoiceDetection::kVeryLowLikelihood)) {
+    _shared->SetLastError(VE_APM_ERROR, kTraceWarning,
+        "SetTypingDetectionStatus() failed to set VAD likelihood to low");
+    return -1;
+  }
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetTypingDetectionStatus is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::GetTypingDetectionStatus(bool& enabled) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetTypingDetectionStatus()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  // Just use the VAD state to determine if we should enable typing
+  // detection or not
+
+  enabled = _shared->audio_processing()->voice_detection()->is_enabled();
+
+  return 0;
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetTypingDetectionStatus is not supported");
+  return -1;
+#endif
+}
+
+
+int VoEAudioProcessingImpl::TimeSinceLastTyping(int &seconds) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "TimeSinceLastTyping()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  // Check if typing detection is enabled
+  bool enabled = _shared->audio_processing()->voice_detection()->is_enabled();
+  if (enabled)
+  {
+    _shared->transmit_mixer()->TimeSinceLastTyping(seconds);
+    return 0;
+  }
+  else
+  {
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetTypingDetectionStatus is not enabled");
+  return -1;
+  }
+#else
+  _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetTypingDetectionStatus is not supported");
+  return -1;
+#endif
+}
+
+int VoEAudioProcessingImpl::SetTypingDetectionParameters(int timeWindow,
+                                                         int costPerTyping,
+                                                         int reportingThreshold,
+                                                         int penaltyDecay,
+                                                         int typeEventDelay) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetTypingDetectionParameters()");
+  ANDROID_NOT_SUPPORTED(_shared->statistics());
+  IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOICE_ENGINE_TYPING_DETECTION
+  if (!_shared->statistics().Initialized()) {
+    _shared->statistics().SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  return (_shared->transmit_mixer()->SetTypingDetectionParameters(timeWindow,
+      costPerTyping, reportingThreshold, penaltyDecay, typeEventDelay));
+
+#else
+  _shared->statistics().SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "SetTypingDetectionParameters is not supported");
+  return -1;
+#endif
+
+}
+
+void VoEAudioProcessingImpl::EnableStereoChannelSwapping(bool enable) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "EnableStereoChannelSwapping(enable=%d)", enable);
+  _shared->transmit_mixer()->EnableStereoChannelSwapping(enable);
+}
+
+bool VoEAudioProcessingImpl::IsStereoChannelSwappingEnabled() {
+  WEBRTC_TRACE_VOICE_API();
+  return _shared->transmit_mixer()->IsStereoChannelSwappingEnabled();
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+
+}   // namespace webrtc
diff --git a/src/voice_engine/voe_audio_processing_impl.h b/src/voice_engine/voe_audio_processing_impl.h
new file mode 100644
index 0000000..3d6b64d
--- /dev/null
+++ b/src/voice_engine/voe_audio_processing_impl.h
@@ -0,0 +1,113 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+
+#include "voe_audio_processing.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEAudioProcessingImpl : public VoEAudioProcessing {
+ public:
+  virtual int SetNsStatus(bool enable, NsModes mode = kNsUnchanged);
+
+  virtual int GetNsStatus(bool& enabled, NsModes& mode);
+
+  virtual int SetAgcStatus(bool enable, AgcModes mode = kAgcUnchanged);
+
+  virtual int GetAgcStatus(bool& enabled, AgcModes& mode);
+
+  virtual int SetAgcConfig(const AgcConfig config);
+
+  virtual int GetAgcConfig(AgcConfig& config);
+
+  virtual int SetRxNsStatus(int channel,
+                            bool enable,
+                            NsModes mode = kNsUnchanged);
+
+  virtual int GetRxNsStatus(int channel, bool& enabled, NsModes& mode);
+
+  virtual int SetRxAgcStatus(int channel,
+                             bool enable,
+                             AgcModes mode = kAgcUnchanged);
+
+  virtual int GetRxAgcStatus(int channel, bool& enabled, AgcModes& mode);
+
+  virtual int SetRxAgcConfig(int channel, const AgcConfig config);
+
+  virtual int GetRxAgcConfig(int channel, AgcConfig& config);
+
+  virtual int SetEcStatus(bool enable, EcModes mode = kEcUnchanged);
+  virtual int GetEcStatus(bool& enabled, EcModes& mode);
+  virtual int EnableDriftCompensation(bool enable);
+  virtual bool DriftCompensationEnabled();
+
+  virtual void SetDelayOffsetMs(int offset);
+  virtual int DelayOffsetMs();
+
+  virtual int SetAecmMode(AecmModes mode = kAecmSpeakerphone,
+                          bool enableCNG = true);
+
+  virtual int GetAecmMode(AecmModes& mode, bool& enabledCNG);
+
+  virtual int EnableHighPassFilter(bool enable);
+  virtual bool IsHighPassFilterEnabled();
+
+  virtual int RegisterRxVadObserver(int channel,
+                                    VoERxVadCallback& observer);
+
+  virtual int DeRegisterRxVadObserver(int channel);
+
+  virtual int VoiceActivityIndicator(int channel);
+
+  virtual int SetEcMetricsStatus(bool enable);
+
+  virtual int GetEcMetricsStatus(bool& enabled);
+
+  virtual int GetEchoMetrics(int& ERL, int& ERLE, int& RERL, int& A_NLP);
+
+  virtual int GetEcDelayMetrics(int& delay_median, int& delay_std);
+
+  virtual int StartDebugRecording(const char* fileNameUTF8);
+
+  virtual int StopDebugRecording();
+
+  virtual int SetTypingDetectionStatus(bool enable);
+
+  virtual int GetTypingDetectionStatus(bool& enabled);
+
+  virtual int TimeSinceLastTyping(int &seconds);
+
+  // TODO(niklase) Remove default argument as soon as libJingle is updated!
+  virtual int SetTypingDetectionParameters(int timeWindow,
+                                           int costPerTyping,
+                                           int reportingThreshold,
+                                           int penaltyDecay,
+                                           int typeEventDelay = 0);
+
+  virtual void EnableStereoChannelSwapping(bool enable);
+  virtual bool IsStereoChannelSwappingEnabled();
+
+ protected:
+  VoEAudioProcessingImpl(voe::SharedData* shared);
+  virtual ~VoEAudioProcessingImpl();
+
+ private:
+  bool _isAecMode;
+  voe::SharedData* _shared;
+};
+
+}  //  namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_AUDIO_PROCESSING_IMPL_H
+
diff --git a/src/voice_engine/voe_audio_processing_unittest.cc b/src/voice_engine/voe_audio_processing_unittest.cc
new file mode 100644
index 0000000..8c66d88
--- /dev/null
+++ b/src/voice_engine/voe_audio_processing_unittest.cc
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine/include/voe_audio_processing.h"
+
+#include "gtest/gtest.h"
+#include "voice_engine/include/voe_base.h"
+
+namespace webrtc {
+namespace voe {
+namespace {
+
+class VoEAudioProcessingTest : public ::testing::Test {
+ protected:
+  VoEAudioProcessingTest()
+      : voe_(VoiceEngine::Create()),
+        base_(VoEBase::GetInterface(voe_)),
+        audioproc_(VoEAudioProcessing::GetInterface(voe_)) {
+  }
+
+  virtual ~VoEAudioProcessingTest() {
+    base_->Terminate();
+    audioproc_->Release();
+    base_->Release();
+    VoiceEngine::Delete(voe_);
+  }
+
+  VoiceEngine* voe_;
+  VoEBase* base_;
+  VoEAudioProcessing* audioproc_;
+};
+
+TEST_F(VoEAudioProcessingTest, FailureIfNotInitialized) {
+  EXPECT_EQ(-1, audioproc_->EnableDriftCompensation(true));
+  EXPECT_EQ(-1, audioproc_->EnableDriftCompensation(false));
+  EXPECT_FALSE(audioproc_->DriftCompensationEnabled());
+}
+
+// TODO(andrew): Investigate race conditions triggered by this test:
+// https://code.google.com/p/webrtc/issues/detail?id=788
+TEST_F(VoEAudioProcessingTest, DISABLED_DriftCompensationIsEnabledIfSupported) {
+  ASSERT_EQ(0, base_->Init());
+  // TODO(andrew): Ideally, DriftCompensationSupported() would be mocked.
+  bool supported = VoEAudioProcessing::DriftCompensationSupported();
+  if (supported) {
+    EXPECT_EQ(0, audioproc_->EnableDriftCompensation(true));
+    EXPECT_TRUE(audioproc_->DriftCompensationEnabled());
+    EXPECT_EQ(0, audioproc_->EnableDriftCompensation(false));
+    EXPECT_FALSE(audioproc_->DriftCompensationEnabled());
+  } else {
+    EXPECT_EQ(-1, audioproc_->EnableDriftCompensation(true));
+    EXPECT_FALSE(audioproc_->DriftCompensationEnabled());
+    EXPECT_EQ(-1, audioproc_->EnableDriftCompensation(false));
+    EXPECT_FALSE(audioproc_->DriftCompensationEnabled());
+  }
+}
+
+}  // namespace
+}  // namespace voe
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_base_impl.cc b/src/voice_engine/voe_base_impl.cc
new file mode 100644
index 0000000..2ab249d
--- /dev/null
+++ b/src/voice_engine/voe_base_impl.cc
@@ -0,0 +1,1623 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_base_impl.h"
+
+#include "audio_coding_module.h"
+#include "audio_device_impl.h"
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "output_mixer.h"
+#include "signal_processing_library.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "utility.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+#if (defined(_WIN32) && defined(_DLL) && (_MSC_VER == 1400))
+// Fix for VS 2005 MD/MDd link problem
+#include <stdio.h>
+extern "C"
+    { FILE _iob[3] = {   __iob_func()[0], __iob_func()[1], __iob_func()[2]}; }
+#endif
+
+namespace webrtc
+{
+
+VoEBase* VoEBase::GetInterface(VoiceEngine* voiceEngine)
+{
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+}
+
+VoEBaseImpl::VoEBaseImpl(voe::SharedData* shared) :
+    _voiceEngineObserverPtr(NULL),
+    _callbackCritSect(*CriticalSectionWrapper::CreateCriticalSection()),
+    _voiceEngineObserver(false), _oldVoEMicLevel(0), _oldMicLevel(0),
+    _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl() - ctor");
+}
+
+VoEBaseImpl::~VoEBaseImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "~VoEBaseImpl() - dtor");
+
+    TerminateInternal();
+
+    delete &_callbackCritSect;
+}
+
+void VoEBaseImpl::OnErrorIsReported(const ErrorCode error)
+{
+    CriticalSectionScoped cs(&_callbackCritSect);
+    if (_voiceEngineObserver)
+    {
+        if (_voiceEngineObserverPtr)
+        {
+            int errCode(0);
+            if (error == AudioDeviceObserver::kRecordingError)
+            {
+                errCode = VE_RUNTIME_REC_ERROR;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                    VoEId(_shared->instance_id(), -1),
+                    "VoEBaseImpl::OnErrorIsReported() => VE_RUNTIME_REC_ERROR");
+            }
+            else if (error == AudioDeviceObserver::kPlayoutError)
+            {
+                errCode = VE_RUNTIME_PLAY_ERROR;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                    VoEId(_shared->instance_id(), -1),
+                    "VoEBaseImpl::OnErrorIsReported() => "
+                    "VE_RUNTIME_PLAY_ERROR");
+            }
+            // Deliver callback (-1 <=> no channel dependency)
+            _voiceEngineObserverPtr->CallbackOnError(-1, errCode);
+        }
+    }
+}
+
+void VoEBaseImpl::OnWarningIsReported(const WarningCode warning)
+{
+    CriticalSectionScoped cs(&_callbackCritSect);
+    if (_voiceEngineObserver)
+    {
+        if (_voiceEngineObserverPtr)
+        {
+            int warningCode(0);
+            if (warning == AudioDeviceObserver::kRecordingWarning)
+            {
+                warningCode = VE_RUNTIME_REC_WARNING;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                    VoEId(_shared->instance_id(), -1),
+                    "VoEBaseImpl::OnErrorIsReported() => "
+                    "VE_RUNTIME_REC_WARNING");
+            }
+            else if (warning == AudioDeviceObserver::kPlayoutWarning)
+            {
+                warningCode = VE_RUNTIME_PLAY_WARNING;
+                WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                    VoEId(_shared->instance_id(), -1),
+                    "VoEBaseImpl::OnErrorIsReported() => "
+                    "VE_RUNTIME_PLAY_WARNING");
+            }
+            // Deliver callback (-1 <=> no channel dependency)
+            _voiceEngineObserverPtr->CallbackOnError(-1, warningCode);
+        }
+    }
+}
+
+WebRtc_Word32 VoEBaseImpl::RecordedDataIsAvailable(
+        const void* audioSamples,
+        const WebRtc_UWord32 nSamples,
+        const WebRtc_UWord8 nBytesPerSample,
+        const WebRtc_UWord8 nChannels,
+        const WebRtc_UWord32 samplesPerSec,
+        const WebRtc_UWord32 totalDelayMS,
+        const WebRtc_Word32 clockDrift,
+        const WebRtc_UWord32 currentMicLevel,
+        WebRtc_UWord32& newMicLevel)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::RecordedDataIsAvailable(nSamples=%u, "
+                     "nBytesPerSample=%u, nChannels=%u, samplesPerSec=%u, "
+                     "totalDelayMS=%u, clockDrift=%d, currentMicLevel=%u)",
+                 nSamples, nBytesPerSample, nChannels, samplesPerSec,
+                 totalDelayMS, clockDrift, currentMicLevel);
+
+    assert(_shared->transmit_mixer() != NULL);
+    assert(_shared->audio_device() != NULL);
+
+    bool isAnalogAGC(false);
+    WebRtc_UWord32 maxVolume(0);
+    WebRtc_UWord16 currentVoEMicLevel(0);
+    WebRtc_UWord32 newVoEMicLevel(0);
+
+    if (_shared->audio_processing() &&
+        (_shared->audio_processing()->gain_control()->mode()
+                    == GainControl::kAdaptiveAnalog))
+    {
+        isAnalogAGC = true;
+    }
+
+    // Will only deal with the volume in adaptive analog mode
+    if (isAnalogAGC)
+    {
+        // Scale from ADM to VoE level range
+        if (_shared->audio_device()->MaxMicrophoneVolume(&maxVolume) == 0)
+        {
+            if (0 != maxVolume)
+            {
+                currentVoEMicLevel = (WebRtc_UWord16) ((currentMicLevel
+                        * kMaxVolumeLevel + (int) (maxVolume / 2))
+                        / (maxVolume));
+            }
+        }
+        // We learned that on certain systems (e.g Linux) the currentVoEMicLevel
+        // can be greater than the maxVolumeLevel therefore
+        // we are going to cap the currentVoEMicLevel to the maxVolumeLevel
+        // and change the maxVolume to currentMicLevel if it turns out that
+        // the currentVoEMicLevel is indeed greater than the maxVolumeLevel.
+        if (currentVoEMicLevel > kMaxVolumeLevel)
+        {
+            currentVoEMicLevel = kMaxVolumeLevel;
+            maxVolume = currentMicLevel;
+        }
+    }
+
+    // Keep track if the MicLevel has been changed by the AGC, if not,
+    // use the old value AGC returns to let AGC continue its trend,
+    // so eventually the AGC is able to change the mic level. This handles
+    // issues with truncation introduced by the scaling.
+    if (_oldMicLevel == currentMicLevel)
+    {
+        currentVoEMicLevel = (WebRtc_UWord16) _oldVoEMicLevel;
+    }
+
+    // Perform channel-independent operations
+    // (APM, mix with file, record to file, mute, etc.)
+    _shared->transmit_mixer()->PrepareDemux(audioSamples, nSamples, nChannels,
+        samplesPerSec, static_cast<WebRtc_UWord16>(totalDelayMS), clockDrift,
+        currentVoEMicLevel);
+
+    // Copy the audio frame to each sending channel and perform
+    // channel-dependent operations (file mixing, mute, etc.) to prepare
+    // for encoding.
+    _shared->transmit_mixer()->DemuxAndMix();
+    // Do the encoding and packetize+transmit the RTP packet when encoding
+    // is done.
+    _shared->transmit_mixer()->EncodeAndSend();
+
+    // Will only deal with the volume in adaptive analog mode
+    if (isAnalogAGC)
+    {
+        // Scale from VoE to ADM level range
+        newVoEMicLevel = _shared->transmit_mixer()->CaptureLevel();
+        if (newVoEMicLevel != currentVoEMicLevel)
+        {
+            // Add (kMaxVolumeLevel/2) to round the value
+            newMicLevel = (WebRtc_UWord32) ((newVoEMicLevel * maxVolume
+                    + (int) (kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+        }
+        else
+        {
+            // Pass zero if the level is unchanged
+            newMicLevel = 0;
+        }
+
+        // Keep track of the value AGC returns
+        _oldVoEMicLevel = newVoEMicLevel;
+        _oldMicLevel = currentMicLevel;
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::NeedMorePlayData(
+        const WebRtc_UWord32 nSamples,
+        const WebRtc_UWord8 nBytesPerSample,
+        const WebRtc_UWord8 nChannels,
+        const WebRtc_UWord32 samplesPerSec,
+        void* audioSamples,
+        WebRtc_UWord32& nSamplesOut)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::NeedMorePlayData(nSamples=%u, "
+                     "nBytesPerSample=%d, nChannels=%d, samplesPerSec=%u)",
+                 nSamples, nBytesPerSample, nChannels, samplesPerSec);
+
+    assert(_shared->output_mixer() != NULL);
+
+    // TODO(andrew): if the device is running in mono, we should tell the mixer
+    // here so that it will only request mono from AudioCodingModule.
+    // Perform mixing of all active participants (channel-based mixing)
+    _shared->output_mixer()->MixActiveChannels();
+
+    // Additional operations on the combined signal
+    _shared->output_mixer()->DoOperationsOnCombinedSignal();
+
+    // Retrieve the final output mix (resampled to match the ADM)
+    _shared->output_mixer()->GetMixedAudio(samplesPerSec, nChannels,
+        &_audioFrame);
+
+    assert(static_cast<int>(nSamples) == _audioFrame.samples_per_channel_);
+    assert(samplesPerSec ==
+        static_cast<WebRtc_UWord32>(_audioFrame.sample_rate_hz_));
+
+    // Deliver audio (PCM) samples to the ADM
+    memcpy(
+           (WebRtc_Word16*) audioSamples,
+           (const WebRtc_Word16*) _audioFrame.data_,
+           sizeof(WebRtc_Word16) * (_audioFrame.samples_per_channel_
+                   * _audioFrame.num_channels_));
+
+    nSamplesOut = _audioFrame.samples_per_channel_;
+
+    return 0;
+}
+
+int VoEBaseImpl::RegisterVoiceEngineObserver(VoiceEngineObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RegisterVoiceEngineObserver(observer=0x%d)", &observer);
+    CriticalSectionScoped cs(&_callbackCritSect);
+    if (_voiceEngineObserverPtr)
+    {
+        _shared->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "RegisterVoiceEngineObserver() observer already enabled");
+        return -1;
+    }
+
+    // Register the observer in all active channels
+    voe::ScopedChannel sc(_shared->channel_manager());
+    void* iterator(NULL);
+    voe::Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        channelPtr->RegisterVoiceEngineObserver(observer);
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+    _shared->transmit_mixer()->RegisterVoiceEngineObserver(observer);
+
+    _voiceEngineObserverPtr = &observer;
+    _voiceEngineObserver = true;
+
+    return 0;
+}
+
+int VoEBaseImpl::DeRegisterVoiceEngineObserver()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterVoiceEngineObserver()");
+    CriticalSectionScoped cs(&_callbackCritSect);
+    if (!_voiceEngineObserverPtr)
+    {
+        _shared->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "DeRegisterVoiceEngineObserver() observer already disabled");
+        return 0;
+    }
+
+    _voiceEngineObserver = false;
+    _voiceEngineObserverPtr = NULL;
+
+    // Deregister the observer in all active channels
+    voe::ScopedChannel sc(_shared->channel_manager());
+    void* iterator(NULL);
+    voe::Channel* channelPtr = sc.GetFirstChannel(iterator);
+    while (channelPtr != NULL)
+    {
+        channelPtr->DeRegisterVoiceEngineObserver();
+        channelPtr = sc.GetNextChannel(iterator);
+    }
+
+    return 0;
+}
+
+int VoEBaseImpl::Init(AudioDeviceModule* external_adm)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+        "Init(external_adm=0x%p)", external_adm);
+    CriticalSectionScoped cs(_shared->crit_sec());
+
+    if (_shared->statistics().Initialized())
+    {
+        return 0;
+    }
+
+    if (_shared->process_thread())
+    {
+        if (_shared->process_thread()->Start() != 0)
+        {
+            _shared->SetLastError(VE_THREAD_ERROR, kTraceError,
+                "Init() failed to start module process thread");
+            return -1;
+        }
+    }
+
+    // Create an internal ADM if the user has not added an external
+    // ADM implementation as input to Init().
+    if (external_adm == NULL)
+    {
+        // Create the internal ADM implementation.
+        _shared->set_audio_device(AudioDeviceModuleImpl::Create(
+            VoEId(_shared->instance_id(), -1), _shared->audio_device_layer()));
+
+        if (_shared->audio_device() == NULL)
+        {
+            _shared->SetLastError(VE_NO_MEMORY, kTraceCritical,
+                "Init() failed to create the ADM");
+            return -1;
+        }
+    }
+    else
+    {
+        // Use the already existing external ADM implementation.
+        _shared->set_audio_device(external_adm);
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+            "An external ADM implementation will be used in VoiceEngine");
+    }
+
+    // Register the ADM to the process thread, which will drive the error
+    // callback mechanism
+    if (_shared->process_thread() &&
+        _shared->process_thread()->RegisterModule(_shared->audio_device()) != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "Init() failed to register the ADM");
+        return -1;
+    }
+
+    bool available(false);
+
+    // --------------------
+    // Reinitialize the ADM
+
+    // Register the AudioObserver implementation
+    if (_shared->audio_device()->RegisterEventObserver(this) != 0) {
+      _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+          "Init() failed to register event observer for the ADM");
+    }
+
+    // Register the AudioTransport implementation
+    if (_shared->audio_device()->RegisterAudioCallback(this) != 0) {
+      _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+          "Init() failed to register audio callback for the ADM");
+    }
+
+    // ADM initialization
+    if (_shared->audio_device()->Init() != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "Init() failed to initialize the ADM");
+        return -1;
+    }
+
+    // Initialize the default speaker
+    if (_shared->audio_device()->SetPlayoutDevice(
+            WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE) != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceInfo,
+            "Init() failed to set the default output device");
+    }
+    if (_shared->audio_device()->SpeakerIsAvailable(&available) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() failed to check speaker availability, trying to "
+            "initialize speaker anyway");
+    }
+    else if (!available)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() speaker not available, trying to initialize speaker "
+            "anyway");
+    }
+    if (_shared->audio_device()->InitSpeaker() != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceInfo,
+            "Init() failed to initialize the speaker");
+    }
+
+    // Initialize the default microphone
+    if (_shared->audio_device()->SetRecordingDevice(
+            WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE) != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceInfo,
+            "Init() failed to set the default input device");
+    }
+    if (_shared->audio_device()->MicrophoneIsAvailable(&available) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() failed to check microphone availability, trying to "
+            "initialize microphone anyway");
+    }
+    else if (!available)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() microphone not available, trying to initialize "
+            "microphone anyway");
+    }
+    if (_shared->audio_device()->InitMicrophone() != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceInfo,
+            "Init() failed to initialize the microphone");
+    }
+
+    // Set number of channels
+    if (_shared->audio_device()->StereoPlayoutIsAvailable(&available) != 0) {
+      _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+          "Init() failed to query stereo playout mode");
+    }
+    if (_shared->audio_device()->SetStereoPlayout(available) != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set mono/stereo playout mode");
+    }
+
+    // TODO(andrew): These functions don't tell us whether stereo recording
+    // is truly available. We simply set the AudioProcessing input to stereo
+    // here, because we have to wait until receiving the first frame to
+    // determine the actual number of channels anyway.
+    //
+    // These functions may be changed; tracked here:
+    // http://code.google.com/p/webrtc/issues/detail?id=204
+    _shared->audio_device()->StereoRecordingIsAvailable(&available);
+    if (_shared->audio_device()->SetStereoRecording(available) != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "Init() failed to set mono/stereo recording mode");
+    }
+
+    // APM initialization done after sound card since we need
+    // to know if we support stereo recording or not.
+
+    // Create the AudioProcessing Module if it does not exist.
+
+    if (_shared->audio_processing() == NULL)
+    {
+        _shared->set_audio_processing(AudioProcessing::Create(
+                VoEId(_shared->instance_id(), -1)));
+        if (_shared->audio_processing() == NULL)
+        {
+            _shared->SetLastError(VE_NO_MEMORY, kTraceCritical,
+                "Init() failed to create the AP module");
+            return -1;
+        }
+        // Ensure that mixers in both directions has access to the created APM
+        _shared->transmit_mixer()->SetAudioProcessingModule(
+            _shared->audio_processing());
+        _shared->output_mixer()->SetAudioProcessingModule(
+            _shared->audio_processing());
+
+        if (_shared->audio_processing()->echo_cancellation()->
+                set_device_sample_rate_hz(
+                        kVoiceEngineAudioProcessingDeviceSampleRateHz))
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the device sample rate to 48K for AP "
+                " module");
+            return -1;
+        }
+        // Using 8 kHz as inital Fs. Might be changed already at first call.
+        if (_shared->audio_processing()->set_sample_rate_hz(8000))
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the sample rate to 8K for AP module");
+            return -1;
+        }
+
+        // Assume mono until the audio frames are received from the capture
+        // device, at which point this can be updated.
+        if (_shared->audio_processing()->set_num_channels(1, 1) != 0)
+        {
+            _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                "Init() failed to set channels for the primary audio stream");
+            return -1;
+        }
+
+        if (_shared->audio_processing()->set_num_reverse_channels(1) != 0)
+        {
+            _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+                "Init() failed to set channels for the primary audio stream");
+            return -1;
+        }
+        // high-pass filter
+        if (_shared->audio_processing()->high_pass_filter()->Enable(
+                WEBRTC_VOICE_ENGINE_HP_DEFAULT_STATE) != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set the high-pass filter for AP module");
+            return -1;
+        }
+        // Echo Cancellation
+        if (_shared->audio_processing()->echo_cancellation()->
+                enable_drift_compensation(false) != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set drift compensation for AP module");
+            return -1;
+        }
+        if (_shared->audio_processing()->echo_cancellation()->Enable(
+                WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE))
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set echo cancellation state for AP module");
+            return -1;
+        }
+        // Noise Reduction
+        if (_shared->audio_processing()->noise_suppression()->set_level(
+                (NoiseSuppression::Level) WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE)
+                != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set noise reduction level for AP module");
+            return -1;
+        }
+        if (_shared->audio_processing()->noise_suppression()->Enable(
+                WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE) != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set noise reduction state for AP module");
+            return -1;
+        }
+        // Automatic Gain control
+        if (_shared->audio_processing()->gain_control()->
+                set_analog_level_limits(kMinVolumeLevel,kMaxVolumeLevel) != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC analog level for AP module");
+            return -1;
+        }
+        if (_shared->audio_processing()->gain_control()->set_mode(
+                (GainControl::Mode) WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE)
+                != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC mode for AP module");
+            return -1;
+        }
+        if (_shared->audio_processing()->gain_control()->Enable(
+                WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE)
+                != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set AGC state for AP module");
+            return -1;
+        }
+        // VAD
+        if (_shared->audio_processing()->voice_detection()->Enable(
+                WEBRTC_VOICE_ENGINE_VAD_DEFAULT_STATE)
+                != 0)
+        {
+            _shared->SetLastError(VE_APM_ERROR, kTraceError,
+                "Init() failed to set VAD state for AP module");
+            return -1;
+        }
+    }
+
+  // Set default AGC mode for the ADM
+#ifdef WEBRTC_VOICE_ENGINE_AGC
+    bool enable(false);
+    if (_shared->audio_processing()->gain_control()->mode()
+            != GainControl::kFixedDigital)
+    {
+        enable = _shared->audio_processing()->gain_control()->is_enabled();
+        // Only set the AGC mode for the ADM when Adaptive AGC mode is selected
+        if (_shared->audio_device()->SetAGC(enable) != 0)
+        {
+            _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR,
+                kTraceError, "Init() failed to set default AGC mode in ADM 0");
+        }
+    }
+#endif
+
+    return _shared->statistics().SetInitialized();
+}
+
+int VoEBaseImpl::Terminate()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "Terminate()");
+    CriticalSectionScoped cs(_shared->crit_sec());
+    return TerminateInternal();
+}
+
+int VoEBaseImpl::MaxNumOfChannels()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "MaxNumOfChannels()");
+    WebRtc_Word32 maxNumOfChannels =
+        _shared->channel_manager().MaxNumOfChannels();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "MaxNumOfChannels() => %d", maxNumOfChannels);
+    return (maxNumOfChannels);
+}
+
+int VoEBaseImpl::CreateChannel()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "CreateChannel()");
+    CriticalSectionScoped cs(_shared->crit_sec());
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_Word32 channelId = -1;
+
+    if (!_shared->channel_manager().CreateChannel(channelId))
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+            "CreateChannel() failed to allocate memory for channel");
+        return -1;
+    }
+
+    bool destroyChannel(false);
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channelId);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                "CreateChannel() failed to allocate memory for channel");
+            return -1;
+        }
+        else if (channelPtr->SetEngineInformation(_shared->statistics(),
+                                                  *_shared->output_mixer(),
+                                                  *_shared->transmit_mixer(),
+                                                  *_shared->process_thread(),
+                                                  *_shared->audio_device(),
+                                                  _voiceEngineObserverPtr,
+                                                  &_callbackCritSect) != 0)
+        {
+            destroyChannel = true;
+            _shared->SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                "CreateChannel() failed to associate engine and channel."
+                " Destroying channel.");
+        }
+        else if (channelPtr->Init() != 0)
+        {
+            destroyChannel = true;
+            _shared->SetLastError(VE_CHANNEL_NOT_CREATED, kTraceError,
+                "CreateChannel() failed to initialize channel. Destroying"
+                " channel.");
+        }
+    }
+    if (destroyChannel)
+    {
+        _shared->channel_manager().DestroyChannel(channelId);
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "CreateChannel() => %d", channelId);
+    return channelId;
+}
+
+int VoEBaseImpl::DeleteChannel(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeleteChannel(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "DeleteChannel() failed to locate channel");
+            return -1;
+        }
+    }
+
+    if (_shared->channel_manager().DestroyChannel(channel) != 0)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeleteChannel() failed to destroy channel");
+        return -1;
+    }
+
+    if (StopSend() != 0)
+    {
+        return -1;
+    }
+
+    if (StopPlayout() != 0)
+    {
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEBaseImpl::SetLocalReceiver(int channel, int port, int RTCPport,
+                                  const char ipAddr[64],
+                                  const char multiCastAddr[64])
+{
+    //  Inititialize local receive sockets (RTP and RTCP).
+    //
+    //  The sockets are always first closed and then created again by this
+    //  function call. The created sockets are by default also used for
+    // transmission (unless source port is set in SetSendDestination).
+    //
+    //  Note that, sockets can also be created automatically if a user calls
+    //  SetSendDestination and StartSend without having called SetLocalReceiver
+    // first. The sockets are then created at the first packet transmission.
+
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (ipAddr == NULL && multiCastAddr == NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d)",
+            channel, port, RTCPport);
+    }
+    else if (ipAddr != NULL && multiCastAddr == NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+          VoEId(_shared->instance_id(), -1),
+          "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, ipAddr=%s)",
+          channel, port, RTCPport, ipAddr);
+    }
+    else if (ipAddr == NULL && multiCastAddr != NULL)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, "
+            "multiCastAddr=%s)", channel, port, RTCPport, multiCastAddr);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "SetLocalReceiver(channel=%d, port=%d, RTCPport=%d, "
+            "ipAddr=%s, multiCastAddr=%s)", channel, port, RTCPport, ipAddr,
+            multiCastAddr);
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((port < 0) || (port > 65535))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetLocalReceiver() invalid RTP port");
+        return -1;
+    }
+    if (((RTCPport != kVoEDefault) && (RTCPport < 0)) || ((RTCPport
+            != kVoEDefault) && (RTCPport > 65535)))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetLocalReceiver() invalid RTCP port");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetLocalReceiver() failed to locate channel");
+        return -1;
+    }
+
+    // Cast RTCP port. In the RTP module 0 corresponds to RTP port + 1 in
+    // the module, which is the default.
+    WebRtc_UWord16 rtcpPortUW16(0);
+    if (RTCPport != kVoEDefault)
+    {
+        rtcpPortUW16 = static_cast<WebRtc_UWord16> (RTCPport);
+    }
+
+    return channelPtr->SetLocalReceiver(port, rtcpPortUW16, ipAddr,
+                                        multiCastAddr);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED,
+        kTraceWarning, "SetLocalReceiver() VoE is built for external "
+        "transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                  char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetLocalReceiver(channel=%d, ipAddr[]=?)", channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetLocalReceiver() failed to locate channel");
+        return -1;
+    }
+    WebRtc_Word32 ret = channelPtr->GetLocalReceiver(port, RTCPport, ipAddr);
+    if (ipAddr != NULL)
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "GetLocalReceiver() => port=%d, RTCPport=%d, ipAddr=%s",
+            port, RTCPport, ipAddr);
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+          VoEId(_shared->instance_id(), -1),
+          "GetLocalReceiver() => port=%d, RTCPport=%d", port, RTCPport);
+    }
+    return ret;
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetLocalReceiver() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::SetSendDestination(int channel, int port, const char* ipaddr,
+                                    int sourcePort, int RTCPport)
+{
+    WEBRTC_TRACE(
+                 kTraceApiCall,
+                 kTraceVoice,
+                 VoEId(_shared->instance_id(), -1),
+                 "SetSendDestination(channel=%d, port=%d, ipaddr=%s,"
+                 "sourcePort=%d, RTCPport=%d)",
+                 channel, port, ipaddr, sourcePort, RTCPport);
+    CriticalSectionScoped cs(_shared->crit_sec());
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendDestination() failed to locate channel");
+        return -1;
+    }
+    if ((port < 0) || (port > 65535))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSendDestination() invalid RTP port");
+        return -1;
+    }
+    if (((RTCPport != kVoEDefault) && (RTCPport < 0)) || ((RTCPport
+            != kVoEDefault) && (RTCPport > 65535)))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSendDestination() invalid RTCP port");
+        return -1;
+    }
+    if (((sourcePort != kVoEDefault) && (sourcePort < 0)) || ((sourcePort
+            != kVoEDefault) && (sourcePort > 65535)))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSendDestination() invalid source port");
+        return -1;
+    }
+
+    // Cast RTCP port. In the RTP module 0 corresponds to RTP port + 1 in the
+    // module, which is the default.
+    WebRtc_UWord16 rtcpPortUW16(0);
+    if (RTCPport != kVoEDefault)
+    {
+        rtcpPortUW16 = static_cast<WebRtc_UWord16> (RTCPport);
+        WEBRTC_TRACE(
+                     kTraceInfo,
+                     kTraceVoice,
+                     VoEId(_shared->instance_id(), channel),
+                     "SetSendDestination() non default RTCP port %u will be "
+                     "utilized",
+                     rtcpPortUW16);
+    }
+
+    return channelPtr->SetSendDestination(port, ipaddr, sourcePort,
+                                          rtcpPortUW16);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSendDestination() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::GetSendDestination(int channel, int& port, char ipAddr[64],
+                                    int& sourcePort, int& RTCPport)
+{
+    WEBRTC_TRACE(
+                 kTraceApiCall,
+                 kTraceVoice,
+                 VoEId(_shared->instance_id(), -1),
+                 "GetSendDestination(channel=%d, ipAddr[]=?, sourcePort=?,"
+                 "RTCPport=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendDestination() failed to locate channel");
+        return -1;
+    }
+    WebRtc_Word32 ret = channelPtr->GetSendDestination(port, ipAddr,
+                                                       sourcePort, RTCPport);
+    if (ipAddr != NULL)
+    {
+        WEBRTC_TRACE(
+                     kTraceStateInfo,
+                     kTraceVoice,
+                     VoEId(_shared->instance_id(), -1),
+                     "GetSendDestination() => port=%d, RTCPport=%d, ipAddr=%s, "
+                     "sourcePort=%d, RTCPport=%d",
+                     port, RTCPport, ipAddr, sourcePort, RTCPport);
+    }
+    else
+    {
+        WEBRTC_TRACE(
+                     kTraceStateInfo,
+                     kTraceVoice,
+                     VoEId(_shared->instance_id(), -1),
+                     "GetSendDestination() => port=%d, RTCPport=%d, "
+                     "sourcePort=%d, RTCPport=%d",
+                     port, RTCPport, sourcePort, RTCPport);
+    }
+    return ret;
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSendDestination() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoEBaseImpl::StartReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartReceive(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartReceive() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StartReceiving();
+}
+
+int VoEBaseImpl::StopReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopListen(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetLocalReceiver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StopReceiving();
+}
+
+int VoEBaseImpl::StartPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayout(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartPlayout() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->Playing())
+    {
+        return 0;
+    }
+    if (StartPlayout() != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "StartPlayout() failed to start playout");
+        return -1;
+    }
+    return channelPtr->StartPlayout();
+}
+
+int VoEBaseImpl::StopPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopPlayout(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayout() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->StopPlayout() != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "StopPlayout() failed to stop playout for channel %d", channel);
+    }
+    return StopPlayout();
+}
+
+int VoEBaseImpl::StartSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartSend(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartSend() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->Sending())
+    {
+        return 0;
+    }
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!channelPtr->ExternalTransport()
+            && !channelPtr->SendSocketsInitialized())
+    {
+        _shared->SetLastError(VE_DESTINATION_NOT_INITED, kTraceError,
+            "StartSend() must set send destination first");
+        return -1;
+    }
+#endif
+    if (StartSend() != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "StartSend() failed to start recording");
+        return -1;
+    }
+    return channelPtr->StartSend();
+}
+
+int VoEBaseImpl::StopSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopSend(channel=%d)", channel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopSend() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->StopSend() != 0)
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "StopSend() failed to stop sending for channel %d", channel);
+    }
+    return StopSend();
+}
+
+int VoEBaseImpl::GetVersion(char version[1024])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetVersion(version=?)");
+    assert(kVoiceEngineVersionMaxMessageSize == 1024);
+
+    if (version == NULL)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError);
+        return (-1);
+    }
+
+    char versionBuf[kVoiceEngineVersionMaxMessageSize];
+    char* versionPtr = versionBuf;
+
+    WebRtc_Word32 len = 0;
+    WebRtc_Word32 accLen = 0;
+
+    len = AddVoEVersion(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+
+    len = AddBuildInfo(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    len = AddExternalTransportBuild(versionPtr);
+    if (len == -1)
+    {
+         return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+#endif
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    len = AddExternalRecAndPlayoutBuild(versionPtr);
+    if (len == -1)
+    {
+        return -1;
+    }
+    versionPtr += len;
+    accLen += len;
+    assert(accLen < kVoiceEngineVersionMaxMessageSize);
+ #endif
+
+    memcpy(version, versionBuf, accLen);
+    version[accLen] = '\0';
+
+    // to avoid the truncation in the trace, split the string into parts
+    char partOfVersion[256];
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1), "GetVersion() =>");
+    for (int partStart = 0; partStart < accLen;)
+    {
+        memset(partOfVersion, 0, sizeof(partOfVersion));
+        int partEnd = partStart + 180;
+        while (version[partEnd] != '\n' && version[partEnd] != '\0')
+        {
+            partEnd--;
+        }
+        if (partEnd < accLen)
+        {
+            memcpy(partOfVersion, &version[partStart], partEnd - partStart);
+        }
+        else
+        {
+            memcpy(partOfVersion, &version[partStart], accLen - partStart);
+        }
+        partStart = partEnd;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "%s", partOfVersion);
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::AddBuildInfo(char* str) const
+{
+    return sprintf(str, "Build: svn:%s %s\n", WEBRTC_SVNREVISION, BUILDINFO);
+}
+
+WebRtc_Word32 VoEBaseImpl::AddVoEVersion(char* str) const
+{
+    return sprintf(str, "VoiceEngine 4.1.0\n");
+}
+
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+WebRtc_Word32 VoEBaseImpl::AddExternalTransportBuild(char* str) const
+{
+    return sprintf(str, "External transport build\n");
+}
+#endif
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+WebRtc_Word32 VoEBaseImpl::AddExternalRecAndPlayoutBuild(char* str) const
+{
+    return sprintf(str, "External recording and playout build\n");
+}
+#endif
+
+int VoEBaseImpl::LastError()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "LastError()");
+    return (_shared->statistics().LastError());
+}
+
+
+int VoEBaseImpl::SetNetEQPlayoutMode(int channel, NetEqModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetNetEQPlayoutMode(channel=%i, mode=%i)", channel, mode);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetNetEQPlayoutMode() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetNetEQPlayoutMode(mode);
+}
+
+int VoEBaseImpl::GetNetEQPlayoutMode(int channel, NetEqModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetNetEQPlayoutMode(channel=%i, mode=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetNetEQPlayoutMode() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetNetEQPlayoutMode(mode);
+}
+
+int VoEBaseImpl::SetNetEQBGNMode(int channel, NetEqBgnModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetNetEQBGNMode(channel=%i, mode=%i)", channel, mode);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetNetEQBGNMode() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetNetEQBGNMode(mode);
+}
+
+int VoEBaseImpl::GetNetEQBGNMode(int channel, NetEqBgnModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetNetEQBGNMode(channel=%i, mode=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetNetEQBGNMode() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetNetEQBGNMode(mode);
+}
+
+int VoEBaseImpl::SetOnHoldStatus(int channel, bool enable, OnHoldModes mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetOnHoldStatus(channel=%d, enable=%d, mode=%d)", channel,
+                 enable, mode);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetOnHoldStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetOnHoldStatus(enable, mode);
+}
+
+int VoEBaseImpl::GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetOnHoldStatus(channel=%d, enabled=?, mode=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetOnHoldStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetOnHoldStatus(enabled, mode);
+}
+
+WebRtc_Word32 VoEBaseImpl::StartPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::StartPlayout()");
+    if (_shared->audio_device()->Playing())
+    {
+        return 0;
+    }
+    if (!_shared->ext_playout())
+    {
+        if (_shared->audio_device()->InitPlayout() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayout() failed to initialize playout");
+            return -1;
+        }
+        if (_shared->audio_device()->StartPlayout() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayout() failed to start playout");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StopPlayout()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::StopPlayout()");
+
+    WebRtc_Word32 numOfChannels = _shared->channel_manager().NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+
+    WebRtc_UWord16 nChannelsPlaying(0);
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+
+    // Get number of playing channels
+    _shared->channel_manager().GetChannelIds(channelsArray, numOfChannels);
+    for (int i = 0; i < numOfChannels; i++)
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channelsArray[i]);
+        voe::Channel* chPtr = sc.ChannelPtr();
+        if (chPtr)
+        {
+            if (chPtr->Playing())
+            {
+                nChannelsPlaying++;
+            }
+        }
+    }
+    delete[] channelsArray;
+
+    // Stop audio-device playing if no channel is playing out
+    if (nChannelsPlaying == 0)
+    {
+        if (_shared->audio_device()->StopPlayout() != 0)
+        {
+            _shared->SetLastError(VE_CANNOT_STOP_PLAYOUT, kTraceError,
+                "StopPlayout() failed to stop playout");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StartSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::StartSend()");
+    if (_shared->audio_device()->Recording())
+    {
+        return 0;
+    }
+    if (!_shared->ext_recording())
+    {
+        if (_shared->audio_device()->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartSend() failed to initialize recording");
+            return -1;
+        }
+        if (_shared->audio_device()->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartSend() failed to start recording");
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::StopSend()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::StopSend()");
+
+    if (_shared->NumOfSendingChannels() == 0 &&
+        !_shared->transmit_mixer()->IsRecordingMic())
+    {
+        // Stop audio-device recording if no channel is recording
+        if (_shared->audio_device()->StopRecording() != 0)
+        {
+            _shared->SetLastError(VE_CANNOT_STOP_RECORDING, kTraceError,
+                "StopSend() failed to stop recording");
+            return -1;
+        }
+        _shared->transmit_mixer()->StopSend();
+    }
+
+    return 0;
+}
+
+WebRtc_Word32 VoEBaseImpl::TerminateInternal()
+{
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEBaseImpl::TerminateInternal()");
+
+    // Delete any remaining channel objects
+    WebRtc_Word32 numOfChannels = _shared->channel_manager().NumOfChannels();
+    if (numOfChannels > 0)
+    {
+        WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+        _shared->channel_manager().GetChannelIds(channelsArray, numOfChannels);
+        for (int i = 0; i < numOfChannels; i++)
+        {
+            DeleteChannel(channelsArray[i]);
+        }
+        delete[] channelsArray;
+    }
+
+    if (_shared->process_thread())
+    {
+        if (_shared->audio_device())
+        {
+            if (_shared->process_thread()->
+                    DeRegisterModule(_shared->audio_device()) != 0)
+            {
+                _shared->SetLastError(VE_THREAD_ERROR, kTraceError,
+                    "TerminateInternal() failed to deregister ADM");
+            }
+        }
+        if (_shared->process_thread()->Stop() != 0)
+        {
+            _shared->SetLastError(VE_THREAD_ERROR, kTraceError,
+                "TerminateInternal() failed to stop module process thread");
+        }
+    }
+
+    // Audio Device Module
+
+    if (_shared->audio_device() != NULL)
+    {
+        if (_shared->audio_device()->StopPlayout() != 0)
+        {
+            _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+                "TerminateInternal() failed to stop playout");
+        }
+        if (_shared->audio_device()->StopRecording() != 0)
+        {
+            _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+                "TerminateInternal() failed to stop recording");
+        }
+        if (_shared->audio_device()->RegisterEventObserver(NULL) != 0) {
+          _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+              "TerminateInternal() failed to de-register event observer "
+              "for the ADM");
+        }
+        if (_shared->audio_device()->RegisterAudioCallback(NULL) != 0) {
+          _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+              "TerminateInternal() failed to de-register audio callback "
+              "for the ADM");
+        }
+        if (_shared->audio_device()->Terminate() != 0)
+        {
+            _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+                "TerminateInternal() failed to terminate the ADM");
+        }
+       
+        _shared->set_audio_device(NULL);
+    }
+
+    // AP module
+
+    if (_shared->audio_processing() != NULL)
+    {
+        _shared->transmit_mixer()->SetAudioProcessingModule(NULL);
+        _shared->set_audio_processing(NULL);
+    }
+
+    return _shared->statistics().SetUnInitialized();
+}
+
+} // namespace webrtc
diff --git a/src/voice_engine/voe_base_impl.h b/src/voice_engine/voe_base_impl.h
new file mode 100644
index 0000000..0eb44fa
--- /dev/null
+++ b/src/voice_engine/voe_base_impl.h
@@ -0,0 +1,148 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
+
+#include "voe_base.h"
+
+#include "module_common_types.h"
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class ProcessThread;
+
+class VoEBaseImpl: public VoEBase,
+                   public AudioTransport,
+                   public AudioDeviceObserver
+{
+public:
+    virtual int RegisterVoiceEngineObserver(VoiceEngineObserver& observer);
+
+    virtual int DeRegisterVoiceEngineObserver();
+
+    virtual int Init(AudioDeviceModule* external_adm = NULL);
+
+    virtual int Terminate();
+
+    virtual int MaxNumOfChannels();
+
+    virtual int CreateChannel();
+
+    virtual int DeleteChannel(int channel);
+
+    virtual int SetLocalReceiver(int channel, int port,
+                                 int RTCPport = kVoEDefault,
+                                 const char ipAddr[64] = NULL,
+                                 const char multiCastAddr[64] = NULL);
+
+    virtual int GetLocalReceiver(int channel, int& port, int& RTCPport,
+                                 char ipAddr[64]);
+
+    virtual int SetSendDestination(int channel, int port,
+                                   const char ipAddr[64],
+                                   int sourcePort = kVoEDefault,
+                                   int RTCPport = kVoEDefault);
+
+    virtual int GetSendDestination(int channel,
+                                   int& port,
+                                   char ipAddr[64],
+                                   int& sourcePort,
+                                   int& RTCPport);
+
+    virtual int StartReceive(int channel);
+
+    virtual int StartPlayout(int channel);
+
+    virtual int StartSend(int channel);
+
+    virtual int StopReceive(int channel);
+
+    virtual int StopPlayout(int channel);
+
+    virtual int StopSend(int channel);
+
+    virtual int SetNetEQPlayoutMode(int channel, NetEqModes mode);
+
+    virtual int GetNetEQPlayoutMode(int channel, NetEqModes& mode);
+
+    virtual int SetNetEQBGNMode(int channel, NetEqBgnModes mode);
+
+    virtual int GetNetEQBGNMode(int channel, NetEqBgnModes& mode);
+
+
+    virtual int SetOnHoldStatus(int channel,
+                                bool enable,
+                                OnHoldModes mode = kHoldSendAndPlay);
+
+    virtual int GetOnHoldStatus(int channel, bool& enabled, OnHoldModes& mode);
+
+    virtual int GetVersion(char version[1024]);
+
+    virtual int LastError();
+
+    // AudioTransport
+    virtual WebRtc_Word32
+        RecordedDataIsAvailable(const void* audioSamples,
+                                const WebRtc_UWord32 nSamples,
+                                const WebRtc_UWord8 nBytesPerSample,
+                                const WebRtc_UWord8 nChannels,
+                                const WebRtc_UWord32 samplesPerSec,
+                                const WebRtc_UWord32 totalDelayMS,
+                                const WebRtc_Word32 clockDrift,
+                                const WebRtc_UWord32 currentMicLevel,
+                                WebRtc_UWord32& newMicLevel);
+
+    virtual WebRtc_Word32 NeedMorePlayData(const WebRtc_UWord32 nSamples,
+                                           const WebRtc_UWord8 nBytesPerSample,
+                                           const WebRtc_UWord8 nChannels,
+                                           const WebRtc_UWord32 samplesPerSec,
+                                           void* audioSamples,
+                                           WebRtc_UWord32& nSamplesOut);
+
+    // AudioDeviceObserver
+    virtual void OnErrorIsReported(const ErrorCode error);
+    virtual void OnWarningIsReported(const WarningCode warning);
+
+protected:
+    VoEBaseImpl(voe::SharedData* shared);
+    virtual ~VoEBaseImpl();
+
+private:
+    WebRtc_Word32 StartPlayout();
+    WebRtc_Word32 StopPlayout();
+    WebRtc_Word32 StartSend();
+    WebRtc_Word32 StopSend();
+    WebRtc_Word32 TerminateInternal();
+
+    WebRtc_Word32 AddBuildInfo(char* str) const;
+    WebRtc_Word32 AddVoEVersion(char* str) const;
+#ifdef WEBRTC_EXTERNAL_TRANSPORT
+    WebRtc_Word32 AddExternalTransportBuild(char* str) const;
+#endif
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    WebRtc_Word32 AddExternalRecAndPlayoutBuild(char* str) const;
+#endif
+    VoiceEngineObserver* _voiceEngineObserverPtr;
+    CriticalSectionWrapper& _callbackCritSect;
+
+    bool _voiceEngineObserver;
+    WebRtc_UWord32 _oldVoEMicLevel;
+    WebRtc_UWord32 _oldMicLevel;
+    AudioFrame _audioFrame;
+    voe::SharedData* _shared;
+
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_BASE_IMPL_H
diff --git a/src/voice_engine/voe_call_report_impl.cc b/src/voice_engine/voe_call_report_impl.cc
new file mode 100644
index 0000000..ef4c39c
--- /dev/null
+++ b/src/voice_engine/voe_call_report_impl.cc
@@ -0,0 +1,411 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_call_report_impl.h"
+
+#include "audio_processing.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoECallReport* VoECallReport::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+
+VoECallReportImpl::VoECallReportImpl(voe::SharedData* shared) :
+    _file(*FileWrapper::Create()), _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoECallReportImpl() - ctor");
+}
+
+VoECallReportImpl::~VoECallReportImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "~VoECallReportImpl() - dtor");
+    delete &_file;
+}
+
+int VoECallReportImpl::ResetCallReportStatistics(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ResetCallReportStatistics(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    assert(_shared->audio_processing() != NULL);
+
+    bool echoMode =
+        _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "  current AudioProcessingModule echo metric state %d)",
+                 echoMode);
+    // Reset the APM statistics
+    if (_shared->audio_processing()->echo_cancellation()->enable_metrics(true)
+        != 0)
+    {
+        _shared->SetLastError(VE_APM_ERROR, kTraceError,
+            "ResetCallReportStatistics() unable to "
+            "set the AudioProcessingModule echo metrics state");
+        return -1;
+    }
+    // Restore metric states
+    _shared->audio_processing()->echo_cancellation()->enable_metrics(echoMode);
+
+    // Reset channel dependent statistics
+    if (channel != -1)
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "ResetCallReportStatistics() failed to locate channel");
+            return -1;
+        }
+        channelPtr->ResetDeadOrAliveCounters();
+        channelPtr->ResetRTCPStatistics();
+    }
+    else
+    {
+        WebRtc_Word32 numOfChannels =
+            _shared->channel_manager().NumOfChannels();
+        if (numOfChannels <= 0)
+        {
+            return 0;
+        }
+        WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+        _shared->channel_manager().GetChannelIds(channelsArray, numOfChannels);
+        for (int i = 0; i < numOfChannels; i++)
+        {
+            voe::ScopedChannel sc(_shared->channel_manager(), channelsArray[i]);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr)
+            {
+                channelPtr->ResetDeadOrAliveCounters();
+                channelPtr->ResetRTCPStatistics();
+            }
+        }
+        delete[] channelsArray;
+    }
+
+    return 0;
+}
+
+int VoECallReportImpl::GetEchoMetricSummary(EchoStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetEchoMetricSummary()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    assert(_shared->audio_processing() != NULL);
+
+    return (GetEchoMetricSummaryInternal(stats));
+}
+
+int VoECallReportImpl::GetEchoMetricSummaryInternal(EchoStatistics& stats)
+{
+    // Retrieve echo metrics from the AudioProcessingModule
+    int ret(0);
+    bool mode(false);
+    EchoCancellation::Metrics metrics;
+
+    // Ensure that echo metrics is enabled
+
+    mode =
+        _shared->audio_processing()->echo_cancellation()->are_metrics_enabled();
+    if (mode != false)
+    {
+        ret = _shared->audio_processing()->echo_cancellation()->
+              GetMetrics(&metrics);
+        if (ret != 0)
+        {
+            WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "  AudioProcessingModule GetMetrics() => error");
+        }
+    }
+    else
+    {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "  AudioProcessingModule echo metrics is not enabled");
+    }
+
+    if ((ret != 0) || (mode == false))
+    {
+        // Mark complete struct as invalid (-100 dB)
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "  unable to retrieve echo metrics from the AudioProcessingModule");
+        stats.erl.min = -100;
+        stats.erl.max = -100;
+        stats.erl.average = -100;
+        stats.erle.min = -100;
+        stats.erle.max = -100;
+        stats.erle.average = -100;
+        stats.rerl.min = -100;
+        stats.rerl.max = -100;
+        stats.rerl.average = -100;
+        stats.a_nlp.min = -100;
+        stats.a_nlp.max = -100;
+        stats.a_nlp.average = -100;
+    }
+    else
+    {
+
+        // Deliver output results to user
+        stats.erl.min = metrics.echo_return_loss.minimum;
+        stats.erl.max = metrics.echo_return_loss.maximum;
+        stats.erl.average = metrics.echo_return_loss.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "  erl: min=%d, max=%d, avg=%d",
+            stats.erl.min, stats.erl.max, stats.erl.average);
+
+        stats.erle.min = metrics.echo_return_loss_enhancement.minimum;
+        stats.erle.max = metrics.echo_return_loss_enhancement.maximum;
+        stats.erle.average = metrics.echo_return_loss_enhancement.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "  erle: min=%d, max=%d, avg=%d",
+            stats.erle.min, stats.erle.max, stats.erle.average);
+
+        stats.rerl.min = metrics.residual_echo_return_loss.minimum;
+        stats.rerl.max = metrics.residual_echo_return_loss.maximum;
+        stats.rerl.average = metrics.residual_echo_return_loss.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "  rerl: min=%d, max=%d, avg=%d",
+            stats.rerl.min, stats.rerl.max, stats.rerl.average);
+
+        stats.a_nlp.min = metrics.a_nlp.minimum;
+        stats.a_nlp.max = metrics.a_nlp.maximum;
+        stats.a_nlp.average = metrics.a_nlp.average;
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "  a_nlp: min=%d, max=%d, avg=%d",
+            stats.a_nlp.min, stats.a_nlp.max, stats.a_nlp.average);
+    }
+    return 0;
+}
+
+int VoECallReportImpl::GetRoundTripTimeSummary(int channel, StatVal& delaysMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRoundTripTimeSummary()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRoundTripTimeSummary() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetRoundTripTimeSummary(delaysMs);
+}
+
+int VoECallReportImpl::GetDeadOrAliveSummary(int channel,
+                                             int& numOfDeadDetections,
+                                             int& numOfAliveDetections)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetDeadOrAliveSummary(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return (GetDeadOrAliveSummaryInternal(channel, numOfDeadDetections,
+                                          numOfAliveDetections));
+}
+
+int VoECallReportImpl::GetDeadOrAliveSummaryInternal(int channel,
+                                                     int& numOfDeadDetections,
+                                                     int& numOfAliveDetections)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetDeadOrAliveSummary(channel=%d)", channel);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRoundTripTimeSummary() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetDeadOrAliveCounters(numOfDeadDetections,
+                                              numOfAliveDetections);
+}
+
+int VoECallReportImpl::WriteReportToFile(const char* fileNameUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "WriteReportToFile(fileNameUTF8=%s)", fileNameUTF8);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (NULL == fileNameUTF8)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "WriteReportToFile() invalid filename");
+        return -1;
+    }
+
+    if (_file.Open())
+    {
+        _file.CloseFile();
+    }
+
+    // Open text file in write mode
+    if (_file.OpenFile(fileNameUTF8, false, false, true) != 0)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "WriteReportToFile() unable to open the file");
+        return -1;
+    }
+
+    // Summarize information and add it to the open file
+    //
+    _file.WriteText("WebRtc VoiceEngine Call Report\n");
+    _file.WriteText("==============================\n");
+    _file.WriteText("\nNetwork Packet Round Trip Time (RTT)\n");
+    _file.WriteText("------------------------------------\n\n");
+
+    WebRtc_Word32 numOfChannels = _shared->channel_manager().NumOfChannels();
+    if (numOfChannels <= 0)
+    {
+        return 0;
+    }
+    WebRtc_Word32* channelsArray = new WebRtc_Word32[numOfChannels];
+    _shared->channel_manager().GetChannelIds(channelsArray, numOfChannels);
+    for (int ch = 0; ch < numOfChannels; ch++)
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channelsArray[ch]);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr)
+        {
+            StatVal delaysMs;
+            _file.WriteText("channel %d:\n", ch);
+            channelPtr->GetRoundTripTimeSummary(delaysMs);
+            _file.WriteText("  min:%5d [ms]\n", delaysMs.min);
+            _file.WriteText("  max:%5d [ms]\n", delaysMs.max);
+            _file.WriteText("  avg:%5d [ms]\n", delaysMs.average);
+        }
+    }
+
+    _file.WriteText("\nDead-or-Alive Connection Detections\n");
+    _file.WriteText("------------------------------------\n\n");
+
+    for (int ch = 0; ch < numOfChannels; ch++)
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channelsArray[ch]);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr)
+        {
+            int nDead(0);
+            int nAlive(0);
+            _file.WriteText("channel %d:\n", ch);
+            GetDeadOrAliveSummary(ch, nDead, nAlive);
+            _file.WriteText("  #dead :%6d\n", nDead);
+            _file.WriteText("  #alive:%6d\n", nAlive);
+        }
+    }
+
+    delete[] channelsArray;
+
+    EchoStatistics echo;
+    GetEchoMetricSummary(echo);
+
+    _file.WriteText("\nEcho Metrics\n");
+    _file.WriteText("------------\n\n");
+
+    _file.WriteText("erl:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.erl.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.erl.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.erl.average);
+    _file.WriteText("\nerle:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.erle.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.erle.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.erle.average);
+    _file.WriteText("rerl:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.rerl.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.rerl.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.rerl.average);
+    _file.WriteText("a_nlp:\n");
+    _file.WriteText("  min:%5d [dB]\n", echo.a_nlp.min);
+    _file.WriteText("  max:%5d [dB]\n", echo.a_nlp.max);
+    _file.WriteText("  avg:%5d [dB]\n", echo.a_nlp.average);
+
+    _file.WriteText("\n<END>");
+
+    _file.Flush();
+    _file.CloseFile();
+
+    return 0;
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+
+} // namespace webrtc
diff --git a/src/voice_engine/voe_call_report_impl.h b/src/voice_engine/voe_call_report_impl.h
new file mode 100644
index 0000000..fcc708a
--- /dev/null
+++ b/src/voice_engine/voe_call_report_impl.h
@@ -0,0 +1,57 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
+
+#include "voe_call_report.h"
+
+#include "shared_data.h"
+
+
+namespace webrtc
+{
+class FileWrapper;
+
+class VoECallReportImpl: public VoECallReport
+{
+public:
+    virtual int ResetCallReportStatistics(int channel);
+
+    virtual int GetEchoMetricSummary(EchoStatistics& stats);
+
+    virtual int GetRoundTripTimeSummary(int channel,
+                                        StatVal& delaysMs);
+
+    virtual int GetDeadOrAliveSummary(int channel, int& numOfDeadDetections,
+                                      int& numOfAliveDetections);
+
+    virtual int WriteReportToFile(const char* fileNameUTF8);
+
+protected:
+    VoECallReportImpl(voe::SharedData* shared);
+    virtual ~VoECallReportImpl();
+
+private:
+    int GetDeadOrAliveSummaryInternal(int channel,
+                                      int& numOfDeadDetections,
+                                      int& numOfAliveDetections);
+
+    int GetEchoMetricSummaryInternal(EchoStatistics& stats);
+
+    int GetSpeechAndNoiseSummaryInternal(LevelStatistics& stats);
+
+    FileWrapper& _file;
+    voe::SharedData* _shared;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_CALL_REPORT_IMPL_H
diff --git a/src/voice_engine/voe_codec_impl.cc b/src/voice_engine/voe_codec_impl.cc
new file mode 100644
index 0000000..b1879bb
--- /dev/null
+++ b/src/voice_engine/voe_codec_impl.cc
@@ -0,0 +1,662 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_codec_impl.h"
+
+#include "audio_coding_module.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoECodec* VoECodec::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_CODEC_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+
+VoECodecImpl::VoECodecImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoECodecImpl() - ctor");
+}
+
+VoECodecImpl::~VoECodecImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "~VoECodecImpl() - dtor");
+}
+
+int VoECodecImpl::NumOfCodecs()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "NumOfCodecs()");
+
+    // Number of supported codecs in the ACM
+    WebRtc_UWord8 nSupportedCodecs = AudioCodingModule::NumberOfCodecs();
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "NumOfCodecs() => %u", nSupportedCodecs);
+    return (nSupportedCodecs);
+}
+
+int VoECodecImpl::GetCodec(int index, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetCodec(index=%d, codec=?)", index);
+    CodecInst acmCodec;
+    if (AudioCodingModule::Codec(index, (CodecInst&) acmCodec)
+            == -1)
+    {
+        _shared->SetLastError(VE_INVALID_LISTNR, kTraceError,
+            "GetCodec() invalid index");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetCodec() => plname=%s, pacsize=%d, plfreq=%d, pltype=%d, "
+        "channels=%d, rate=%d", codec.plname, codec.pacsize,
+        codec.plfreq, codec.pltype, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::SetSendCodec(int channel, const CodecInst& codec)
+{
+    CodecInst copyCodec;
+    ExternalToACMCodecRepresentation(copyCodec, codec);
+
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetSendCodec(channel=%d, codec)", channel);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "codec: plname=%s, pacsize=%d, plfreq=%d, pltype=%d, "
+                 "channels=%d, rate=%d", codec.plname, codec.pacsize,
+                 codec.plfreq, codec.pltype, codec.channels, codec.rate);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    // External sanity checks performed outside the ACM
+    if ((STR_CASE_CMP(copyCodec.plname, "L16") == 0) &&
+            (copyCodec.pacsize >= 960))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendCodec() invalid L16 packet size");
+        return -1;
+    }
+    if (!STR_CASE_CMP(copyCodec.plname, "CN")
+            || !STR_CASE_CMP(copyCodec.plname, "TELEPHONE-EVENT")
+            || !STR_CASE_CMP(copyCodec.plname, "RED"))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendCodec() invalid codec name");
+        return -1;
+    }
+    if ((copyCodec.channels != 1) && (copyCodec.channels != 2))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendCodec() invalid number of channels");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendCodec() failed to locate channel");
+        return -1;
+    }
+    if (!AudioCodingModule::IsCodecValid(
+            (CodecInst&) copyCodec))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendCodec() invalid codec");
+        return -1;
+    }
+    if (channelPtr->SetSendCodec(copyCodec) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_SET_SEND_CODEC, kTraceError,
+            "SetSendCodec() failed to set send codec");
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoECodecImpl::GetSendCodec(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSendCodec(channel=%d, codec=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendCodec() failed to locate channel");
+        return -1;
+    }
+    CodecInst acmCodec;
+    if (channelPtr->GetSendCodec(acmCodec) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_GET_SEND_CODEC, kTraceError,
+            "GetSendCodec() failed to get send codec");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSendCodec() => plname=%s, pacsize=%d, plfreq=%d, "
+        "channels=%d, rate=%d", codec.plname, codec.pacsize,
+        codec.plfreq, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::GetRecCodec(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRecCodec(channel=%d, codec=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRecCodec() failed to locate channel");
+        return -1;
+    }
+    CodecInst acmCodec;
+    if (channelPtr->GetRecCodec(acmCodec) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_GET_REC_CODEC, kTraceError,
+            "GetRecCodec() failed to get received codec");
+        return -1;
+    }
+    ACMToExternalCodecRepresentation(codec, acmCodec);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetRecCodec() => plname=%s, pacsize=%d, plfreq=%d, "
+        "channels=%d, rate=%d", codec.plname, codec.pacsize,
+        codec.plfreq, codec.channels, codec.rate);
+    return 0;
+}
+
+int VoECodecImpl::SetAMREncFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetAMREncFormat(channel=%d, mode=%d)", channel, mode);
+#ifdef WEBRTC_CODEC_GSMAMR
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetAMREncFormat() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetAMREncFormat(mode);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAMREncFormat() AMR codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRDecFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetAMRDecFormat(channel=%i, mode=%i)", channel, mode);
+#ifdef WEBRTC_CODEC_GSMAMR
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetAMRDecFormat() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetAMRDecFormat(mode);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAMRDecFormat() AMR codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRWbEncFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetAMRWbEncFormat(channel=%d, mode=%d)", channel, mode);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetAMRWbEncFormat() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetAMRWbEncFormat(mode);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAMRWbEncFormat() AMR-wb codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetAMRWbDecFormat(int channel, AmrMode mode)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetAMRWbDecFormat(channel=%i, mode=%i)", channel, mode);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_GSMAMRWB
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetAMRWbDecFormat() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetAMRWbDecFormat(mode);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetAMRWbDecFormat() AMR-wb codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetRecPayloadType(int channel, const CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetRecPayloadType(channel=%d, codec)", channel);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "codec: plname=%s, plfreq=%d, pltype=%d, channels=%u, "
+               "pacsize=%d, rate=%d", codec.plname, codec.plfreq, codec.pltype,
+               codec.channels, codec.pacsize, codec.rate);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRecPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRecPayloadType(codec);
+}
+
+int VoECodecImpl::GetRecPayloadType(int channel, CodecInst& codec)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRecPayloadType(channel=%d, codec)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRecPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRecPayloadType(codec);
+}
+
+int VoECodecImpl::SetSendCNPayloadType(int channel, int type,
+                                       PayloadFrequencies frequency)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetSendCNPayloadType(channel=%d, type=%d, frequency=%d)",
+                 channel, type, frequency);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (type < 96 || type > 127)
+    {
+        // Only allow dynamic range: 96 to 127
+        _shared->SetLastError(VE_INVALID_PLTYPE, kTraceError,
+            "SetSendCNPayloadType() invalid payload type");
+        return -1;
+    }
+    if ((frequency != kFreq16000Hz) && (frequency != kFreq32000Hz))
+    {
+        // It is not possible to modify the payload type for CN/8000.
+        // We only allow modification of the CN payload type for CN/16000
+        // and CN/32000.
+        _shared->SetLastError(VE_INVALID_PLFREQ, kTraceError,
+            "SetSendCNPayloadType() invalid payload frequency");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendCNPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetSendCNPayloadType(type, frequency);
+}
+
+int VoECodecImpl::SetISACInitTargetRate(int channel, int rateBps,
+                                        bool useFixedFrameSize)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetISACInitTargetRate(channel=%d, rateBps=%d, "
+                 "useFixedFrameSize=%d)", channel, rateBps, useFixedFrameSize);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetISACInitTargetRate() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetISACInitTargetRate(rateBps, useFixedFrameSize);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetISACInitTargetRate() iSAC codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetISACMaxRate(int channel, int rateBps)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetISACMaxRate(channel=%d, rateBps=%d)", channel, rateBps);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetISACMaxRate() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetISACMaxRate(rateBps);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetISACMaxRate() iSAC codec is not supported");
+    return -1;
+#endif
+}
+
+int VoECodecImpl::SetISACMaxPayloadSize(int channel, int sizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetISACMaxPayloadSize(channel=%d, sizeBytes=%d)", channel,
+                 sizeBytes);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_CODEC_ISAC
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetISACMaxPayloadSize() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetISACMaxPayloadSize(sizeBytes);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetISACMaxPayloadSize() iSAC codec is not supported");
+    return -1;
+#endif
+    return 0;
+}
+
+int VoECodecImpl::SetVADStatus(int channel, bool enable, VadModes mode,
+                               bool disableDTX)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetVADStatus(channel=%i, enable=%i, mode=%i, disableDTX=%i)",
+                 channel, enable, mode, disableDTX);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetVADStatus failed to locate channel");
+        return -1;
+    }
+
+    ACMVADMode vadMode(VADNormal);
+    switch (mode)
+    {
+        case kVadConventional:
+            vadMode = VADNormal;
+            break;
+        case kVadAggressiveLow:
+            vadMode = VADLowBitrate;
+            break;
+        case kVadAggressiveMid:
+            vadMode = VADAggr;
+            break;
+        case kVadAggressiveHigh:
+            vadMode = VADVeryAggr;
+            break;
+    }
+    return channelPtr->SetVADStatus(enable, vadMode, disableDTX);
+}
+
+int VoECodecImpl::GetVADStatus(int channel, bool& enabled, VadModes& mode,
+                               bool& disabledDTX)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetVADStatus(channel=%i)", channel);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetVADStatus failed to locate channel");
+        return -1;
+    }
+
+    ACMVADMode vadMode;
+    int ret = channelPtr->GetVADStatus(enabled, vadMode, disabledDTX);
+
+    if (ret != 0)
+    {
+        _shared->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "GetVADStatus failed to get VAD mode");
+        return -1;
+    }
+    switch (vadMode)
+    {
+        case VADNormal:
+            mode = kVadConventional;
+            break;
+        case VADLowBitrate:
+            mode = kVadAggressiveLow;
+            break;
+        case VADAggr:
+            mode = kVadAggressiveMid;
+            break;
+        case VADVeryAggr:
+            mode = kVadAggressiveHigh;
+            break;
+    }
+
+    return 0;
+}
+
+void VoECodecImpl::ACMToExternalCodecRepresentation(CodecInst& toInst,
+                                                    const CodecInst& fromInst)
+{
+    toInst = fromInst;
+    if (STR_CASE_CMP(fromInst.plname,"SILK") == 0)
+    {
+        if (fromInst.plfreq == 12000)
+        {
+            if (fromInst.pacsize == 320)
+            {
+                toInst.pacsize = 240;
+            }
+            else if (fromInst.pacsize == 640)
+            {
+                toInst.pacsize = 480;
+            }
+            else if (fromInst.pacsize == 960)
+            {
+                toInst.pacsize = 720;
+            }
+        }
+        else if (fromInst.plfreq == 24000)
+        {
+            if (fromInst.pacsize == 640)
+            {
+                toInst.pacsize = 480;
+            }
+            else if (fromInst.pacsize == 1280)
+            {
+                toInst.pacsize = 960;
+            }
+            else if (fromInst.pacsize == 1920)
+            {
+                toInst.pacsize = 1440;
+            }
+        }
+    }
+}
+
+void VoECodecImpl::ExternalToACMCodecRepresentation(CodecInst& toInst,
+                                                    const CodecInst& fromInst)
+{
+    toInst = fromInst;
+    if (STR_CASE_CMP(fromInst.plname,"SILK") == 0)
+    {
+        if (fromInst.plfreq == 12000)
+        {
+            if (fromInst.pacsize == 240)
+            {
+                toInst.pacsize = 320;
+            }
+            else if (fromInst.pacsize == 480)
+            {
+                toInst.pacsize = 640;
+            }
+            else if (fromInst.pacsize == 720)
+            {
+                toInst.pacsize = 960;
+            }
+        }
+        else if (fromInst.plfreq == 24000)
+        {
+            if (fromInst.pacsize == 480)
+            {
+                toInst.pacsize = 640;
+            }
+            else if (fromInst.pacsize == 960)
+            {
+                toInst.pacsize = 1280;
+            }
+            else if (fromInst.pacsize == 1440)
+            {
+                toInst.pacsize = 1920;
+            }
+        }
+    }
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_CODEC_API
+
+} // namespace webrtc
diff --git a/src/voice_engine/voe_codec_impl.h b/src/voice_engine/voe_codec_impl.h
new file mode 100644
index 0000000..eb955ec
--- /dev/null
+++ b/src/voice_engine/voe_codec_impl.h
@@ -0,0 +1,89 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
+
+#include "voe_codec.h"
+
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class VoECodecImpl: public VoECodec
+{
+public:
+    virtual int NumOfCodecs();
+
+    virtual int GetCodec(int index, CodecInst& codec);
+
+    virtual int SetSendCodec(int channel, const CodecInst& codec);
+
+    virtual int GetSendCodec(int channel, CodecInst& codec);
+
+    virtual int GetRecCodec(int channel, CodecInst& codec);
+
+    virtual int SetAMREncFormat(int channel,
+                                AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRDecFormat(int channel,
+                                AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRWbEncFormat(int channel,
+                                  AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetAMRWbDecFormat(int channel,
+                                  AmrMode mode = kRfc3267BwEfficient);
+
+    virtual int SetSendCNPayloadType(
+        int channel, int type,
+        PayloadFrequencies frequency = kFreq16000Hz);
+
+    virtual int SetRecPayloadType(int channel,
+                                  const CodecInst& codec);
+
+    virtual int GetRecPayloadType(int channel, CodecInst& codec);
+
+    virtual int SetISACInitTargetRate(int channel,
+                                      int rateBps,
+                                      bool useFixedFrameSize = false);
+
+    virtual int SetISACMaxRate(int channel, int rateBps);
+
+    virtual int SetISACMaxPayloadSize(int channel, int sizeBytes);
+
+    virtual int SetVADStatus(int channel,
+                             bool enable,
+                             VadModes mode = kVadConventional,
+                             bool disableDTX = false);
+
+    virtual int GetVADStatus(int channel,
+                             bool& enabled,
+                             VadModes& mode,
+                             bool& disabledDTX);
+
+protected:
+    VoECodecImpl(voe::SharedData* shared);
+    virtual ~VoECodecImpl();
+
+private:
+    void ACMToExternalCodecRepresentation(CodecInst& toInst,
+                                          const CodecInst& fromInst);
+
+    void ExternalToACMCodecRepresentation(CodecInst& toInst,
+                                          const CodecInst& fromInst);
+
+    voe::SharedData* _shared;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_CODEC_IMPL_H
diff --git a/src/voice_engine/voe_dtmf_impl.cc b/src/voice_engine/voe_dtmf_impl.cc
new file mode 100644
index 0000000..e7c22bf
--- /dev/null
+++ b/src/voice_engine/voe_dtmf_impl.cc
@@ -0,0 +1,443 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_dtmf_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEDtmf* VoEDtmf::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_DTMF_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+
+VoEDtmfImpl::VoEDtmfImpl(voe::SharedData* shared) :
+    _dtmfFeedback(true),
+    _dtmfDirectFeedback(false),
+    _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEDtmfImpl::VoEDtmfImpl() - ctor");
+}
+
+VoEDtmfImpl::~VoEDtmfImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEDtmfImpl::~VoEDtmfImpl() - dtor");
+}
+
+int VoEDtmfImpl::SendTelephoneEvent(int channel,
+                                    int eventCode,
+                                    bool outOfBand,
+                                    int lengthMs,
+                                    int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SendTelephoneEvent(channel=%d, eventCode=%d, outOfBand=%d,"
+                 "length=%d, attenuationDb=%d)",
+                 channel, eventCode, (int)outOfBand, lengthMs, attenuationDb);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendTelephoneEvent() failed to locate channel");
+        return -1;
+    }
+    if (!channelPtr->Sending())
+    {
+        _shared->SetLastError(VE_NOT_SENDING, kTraceError,
+            "SendTelephoneEvent() sending is not active");
+        return -1;
+    }
+
+    // Sanity check
+    const int maxEventCode = outOfBand ?
+        static_cast<int>(kMaxTelephoneEventCode) :
+        static_cast<int>(kMaxDtmfEventCode);
+    const bool testFailed = ((eventCode < 0) ||
+        (eventCode > maxEventCode) ||
+        (lengthMs < kMinTelephoneEventDuration) ||
+        (lengthMs > kMaxTelephoneEventDuration) ||
+        (attenuationDb < kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation));
+    if (testFailed)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SendTelephoneEvent() invalid parameter(s)");
+        return -1;
+    }
+
+    const bool isDtmf =
+        (eventCode >= 0) && (eventCode <= kMaxDtmfEventCode);
+    const bool playDtmfToneDirect =
+        isDtmf && (_dtmfFeedback && _dtmfDirectFeedback);
+
+    if (playDtmfToneDirect)
+    {
+        // Mute the microphone signal while playing back the tone directly.
+        // This is to reduce the risk of introducing echo from the added output.
+        _shared->transmit_mixer()->UpdateMuteMicrophoneTime(lengthMs);
+
+        // Play out local feedback tone directly (same approach for both inband
+        // and outband).
+        // Reduce the length of the the tone with 80ms to reduce risk of echo.
+        // For non-direct feedback, outband and inband cases are handled
+        // differently.
+        _shared->output_mixer()->PlayDtmfTone(eventCode, lengthMs - 80,
+                                            attenuationDb);
+    }
+
+    if (outOfBand)
+    {
+        // The RTP/RTCP module will always deliver OnPlayTelephoneEvent when
+        // an event is transmitted. It is up to the VoE to utilize it or not.
+        // This flag ensures that feedback/playout is enabled; however, the
+        // channel object must still parse out the Dtmf events (0-15) from
+        // all possible events (0-255).
+        const bool playDTFMEvent = (_dtmfFeedback && !_dtmfDirectFeedback);
+
+        return channelPtr->SendTelephoneEventOutband(eventCode,
+                                                     lengthMs,
+                                                     attenuationDb,
+                                                     playDTFMEvent);
+    }
+    else
+    {
+        // For Dtmf tones, we want to ensure that inband tones are played out
+        // in sync with the transmitted audio. This flag is utilized by the
+        // channel object to determine if the queued Dtmf e vent shall also
+        // be fed to the output mixer in the same step as input audio is
+        // replaced by inband Dtmf tones.
+        const bool playDTFMEvent =
+            (isDtmf && _dtmfFeedback && !_dtmfDirectFeedback);
+
+        return channelPtr->SendTelephoneEventInband(eventCode,
+                                                    lengthMs,
+                                                    attenuationDb,
+                                                    playDTFMEvent);
+    }
+}
+
+int VoEDtmfImpl::SetSendTelephoneEventPayloadType(int channel,
+                                                  unsigned char type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetSendTelephoneEventPayloadType(channel=%d, type=%u)",
+                 channel, type);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendTelephoneEventPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetSendTelephoneEventPayloadType(type);
+}
+
+int VoEDtmfImpl::GetSendTelephoneEventPayloadType(int channel,
+                                                  unsigned char& type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSendTelephoneEventPayloadType(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendTelephoneEventPayloadType() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetSendTelephoneEventPayloadType(type);
+}
+
+int VoEDtmfImpl::PlayDtmfTone(int eventCode,
+                              int lengthMs,
+                              int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "PlayDtmfTone(eventCode=%d, lengthMs=%d, attenuationDb=%d)",
+                 eventCode, lengthMs, attenuationDb);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_shared->audio_device()->Playing())
+    {
+        _shared->SetLastError(VE_NOT_PLAYING, kTraceError,
+            "PlayDtmfTone() no channel is playing out");
+        return -1;
+    }
+    if ((eventCode < kMinDtmfEventCode) ||
+        (eventCode > kMaxDtmfEventCode) ||
+        (lengthMs < kMinTelephoneEventDuration) ||
+        (lengthMs > kMaxTelephoneEventDuration) ||
+        (attenuationDb <kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+        "PlayDtmfTone() invalid tone parameter(s)");
+        return -1;
+    }
+    return _shared->output_mixer()->PlayDtmfTone(eventCode, lengthMs,
+                                               attenuationDb);
+}
+
+int VoEDtmfImpl::StartPlayingDtmfTone(int eventCode,
+                                      int attenuationDb)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayingDtmfTone(eventCode=%d, attenuationDb=%d)",
+                 eventCode, attenuationDb);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!_shared->audio_device()->Playing())
+    {
+        _shared->SetLastError(VE_NOT_PLAYING, kTraceError,
+            "StartPlayingDtmfTone() no channel is playing out");
+        return -1;
+    }
+    if ((eventCode < kMinDtmfEventCode) ||
+        (eventCode > kMaxDtmfEventCode) ||
+        (attenuationDb < kMinTelephoneEventAttenuation) ||
+        (attenuationDb > kMaxTelephoneEventAttenuation))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "StartPlayingDtmfTone() invalid tone parameter(s)");
+        return -1;
+    }
+    return _shared->output_mixer()->StartPlayingDtmfTone(eventCode,
+                                                       attenuationDb);
+}
+
+int VoEDtmfImpl::StopPlayingDtmfTone()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopPlayingDtmfTone()");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    return _shared->output_mixer()->StopPlayingDtmfTone();
+}
+
+int VoEDtmfImpl::RegisterTelephoneEventDetection(
+    int channel,
+    TelephoneEventDetectionMethods detectionMethod,
+    VoETelephoneEventObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+      "RegisterTelephoneEventDetection(channel=%d, detectionMethod=%d,"
+      "observer=0x%x)", channel, detectionMethod, &observer);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterTelephoneEventDetection() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterTelephoneEventDetection(detectionMethod,
+                                                       observer);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetTelephoneEventDetectionStatus() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+int VoEDtmfImpl::DeRegisterTelephoneEventDetection(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+            "DeRegisterTelephoneEventDetection(channel=%d)", channel);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterTelephoneEventDe tection() failed to locate channel");
+            return -1;
+    }
+    return channelPtr->DeRegisterTelephoneEventDetection();
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "DeRegisterTelephoneEventDetection() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+
+int VoEDtmfImpl::GetTelephoneEventDetectionStatus(
+    int channel,
+    bool& enabled,
+    TelephoneEventDetectionMethods& detectionMethod)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetTelephoneEventDetectionStatus(channel=%d)", channel);
+#ifdef WEBRTC_DTMF_DETECTION
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetTelephoneEventDetectionStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetTelephoneEventDetectionStatus(enabled,
+                                                        detectionMethod);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetTelephoneEventDetectionStatus() Dtmf detection is not supported");
+    return -1;
+#endif
+}
+
+int VoEDtmfImpl::SetDtmfFeedbackStatus(bool enable, bool directFeedback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetDtmfFeedbackStatus(enable=%d, directFeeback=%d)",
+                 (int)enable, (int)directFeedback);
+
+    CriticalSectionScoped sc(_shared->crit_sec());
+
+    _dtmfFeedback = enable;
+    _dtmfDirectFeedback = directFeedback;
+
+    return 0;
+}
+
+int VoEDtmfImpl::GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetDtmfFeedbackStatus()");
+
+    CriticalSectionScoped sc(_shared->crit_sec());
+
+    enabled = _dtmfFeedback;
+    directFeedback = _dtmfDirectFeedback;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetDtmfFeedbackStatus() => enabled=%d, directFeedback=%d",
+        enabled, directFeedback);
+    return 0;
+}
+
+int VoEDtmfImpl::SetDtmfPlayoutStatus(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetDtmfPlayoutStatus(channel=%d, enable=%d)",
+                 channel, enable);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetDtmfPlayoutStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetDtmfPlayoutStatus(enable);
+}
+
+int VoEDtmfImpl::GetDtmfPlayoutStatus(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetDtmfPlayoutStatus(channel=%d, enabled=?)", channel);
+    IPHONE_NOT_SUPPORTED();
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetDtmfPlayoutStatus() failed to locate channel");
+        return -1;
+    }
+    enabled = channelPtr->DtmfPlayoutStatus();
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetDtmfPlayoutStatus() => enabled=%d", enabled);
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_dtmf_impl.h b/src/voice_engine/voe_dtmf_impl.h
new file mode 100644
index 0000000..ad3874b
--- /dev/null
+++ b/src/voice_engine/voe_dtmf_impl.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
+
+#include "voe_dtmf.h"
+
+#include "shared_data.h"
+
+namespace webrtc
+{
+
+class VoEDtmfImpl : public VoEDtmf
+{
+public:
+    virtual int SendTelephoneEvent(
+        int channel,
+        int eventCode,
+        bool outOfBand = true,
+        int lengthMs = 160,
+        int attenuationDb = 10);
+
+    virtual int SetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char type);
+
+    virtual int GetSendTelephoneEventPayloadType(int channel,
+                                                 unsigned char& type);
+
+    virtual int SetDtmfFeedbackStatus(bool enable,
+        bool directFeedback = false);
+
+    virtual int GetDtmfFeedbackStatus(bool& enabled, bool& directFeedback);
+
+    virtual int PlayDtmfTone(int eventCode,
+                             int lengthMs = 200,
+                             int attenuationDb = 10);
+
+    virtual int StartPlayingDtmfTone(int eventCode,
+                                     int attenuationDb = 10);
+
+    virtual int StopPlayingDtmfTone();
+
+    virtual int RegisterTelephoneEventDetection(
+        int channel,
+        TelephoneEventDetectionMethods detectionMethod,
+        VoETelephoneEventObserver& observer);
+
+    virtual int DeRegisterTelephoneEventDetection(int channel);
+
+    virtual int GetTelephoneEventDetectionStatus(
+        int channel,
+        bool& enabled,
+        TelephoneEventDetectionMethods& detectionMethod);
+
+    virtual int SetDtmfPlayoutStatus(int channel, bool enable);
+
+    virtual int GetDtmfPlayoutStatus(int channel, bool& enabled);
+
+protected:
+    VoEDtmfImpl(voe::SharedData* shared);
+    virtual ~VoEDtmfImpl();
+
+private:
+    bool _dtmfFeedback;
+    bool _dtmfDirectFeedback;
+    voe::SharedData* _shared;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_DTMF_IMPL_H
diff --git a/src/voice_engine/voe_encryption_impl.cc b/src/voice_engine/voe_encryption_impl.cc
new file mode 100644
index 0000000..4ac8ada
--- /dev/null
+++ b/src/voice_engine/voe_encryption_impl.cc
@@ -0,0 +1,244 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_encryption_impl.h"
+
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEEncryption* VoEEncryption::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+
+VoEEncryptionImpl::VoEEncryptionImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEEncryptionImpl::VoEEncryptionImpl() - ctor");
+}
+
+VoEEncryptionImpl::~VoEEncryptionImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEEncryptionImpl::~VoEEncryptionImpl() - dtor");
+}
+
+int VoEEncryptionImpl::EnableSRTPSend(
+    int channel,
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+    bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "EnableSRTPSend(channel=%i, cipherType=%i, cipherKeyLength=%i,"
+                 " authType=%i, authKeyLength=%i, authTagLength=%i, level=%i, "
+                 "key=?, useForRTCP=%d)",
+                 channel, cipherType, cipherKeyLength, authType,
+                 authKeyLength, authTagLength, level, useForRTCP);
+#ifdef WEBRTC_SRTP
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableSRTPSend() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->EnableSRTPSend(cipherType,
+                                      cipherKeyLength,
+                                      authType,
+                                      authKeyLength,
+                                      authTagLength,
+                                      level,
+                                      key,
+                                      useForRTCP);
+#else
+   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "EnableSRTPSend() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::DisableSRTPSend(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "DisableSRTPSend(channel=%i)",channel);
+#ifdef WEBRTC_SRTP
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DisableSRTPSend() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DisableSRTPSend();
+#else
+   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "DisableSRTPSend() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::EnableSRTPReceive(
+    int channel,
+    CipherTypes cipherType,
+    int cipherKeyLength,
+    AuthenticationTypes authType,
+    int authKeyLength,
+    int authTagLength,
+    SecurityLevels level,
+    const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+		bool useForRTCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "EnableSRTPReceive(channel=%i, cipherType=%i, "
+                 "cipherKeyLength=%i, authType=%i, authKeyLength=%i, "
+                 "authTagLength=%i, level=%i, key=?, useForRTCP=%d)",
+                 channel, cipherType, cipherKeyLength, authType,
+                 authKeyLength, authTagLength, level, useForRTCP);
+#ifdef WEBRTC_SRTP
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableSRTPReceive() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->EnableSRTPReceive(cipherType,
+                                         cipherKeyLength,
+	                                 authType,
+	                                 authKeyLength,
+	                                 authTagLength,
+	                                 level,
+	                                 key,
+	                                 useForRTCP);
+#else
+   _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "EnableSRTPReceive() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::DisableSRTPReceive(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DisableSRTPReceive(channel=%i)", channel);
+#ifdef WEBRTC_SRTP
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DisableSRTPReceive() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DisableSRTPReceive();
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "DisableSRTPReceive() SRTP is not supported");
+    return -1;
+#endif
+}
+
+int VoEEncryptionImpl::RegisterExternalEncryption(int channel,
+                                                  Encryption& encryption)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RegisterExternalEncryption(channel=%d, encryption=0x%x)",
+                 channel, &encryption);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterExternalEncryption() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterExternalEncryption(encryption);
+}
+
+int VoEEncryptionImpl::DeRegisterExternalEncryption(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterExternalEncryption(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterExternalEncryption() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterExternalEncryption();
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+
+// EOF
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_encryption_impl.h b/src/voice_engine/voe_encryption_impl.h
new file mode 100644
index 0000000..76124d4
--- /dev/null
+++ b/src/voice_engine/voe_encryption_impl.h
@@ -0,0 +1,67 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
+
+#include "voe_encryption.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEEncryptionImpl : public VoEEncryption
+{
+public:
+    // SRTP
+    virtual int EnableSRTPSend(
+        int channel,
+        CipherTypes cipherType,
+        int cipherKeyLength,
+        AuthenticationTypes authType,
+        int authKeyLength,
+        int authTagLength,
+        SecurityLevels level,
+        const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+        bool useForRTCP = false);
+
+    virtual int DisableSRTPSend(int channel);
+
+    virtual int EnableSRTPReceive(
+        int channel,
+        CipherTypes cipherType,
+        int cipherKeyLength,
+        AuthenticationTypes authType,
+        int authKeyLength,
+        int authTagLength,
+        SecurityLevels level,
+        const unsigned char key[kVoiceEngineMaxSrtpKeyLength],
+        bool useForRTCP = false);
+
+    virtual int DisableSRTPReceive(int channel);
+
+    // External encryption
+    virtual int RegisterExternalEncryption(
+        int channel,
+        Encryption& encryption);
+
+    virtual int DeRegisterExternalEncryption(int channel);
+
+protected:
+    VoEEncryptionImpl(voe::SharedData* shared);
+    virtual ~VoEEncryptionImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}   // namespace webrtc
+
+#endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_ENCRYPTION_IMPL_H
diff --git a/src/voice_engine/voe_external_media_impl.cc b/src/voice_engine/voe_external_media_impl.cc
new file mode 100644
index 0000000..f62fa24
--- /dev/null
+++ b/src/voice_engine/voe_external_media_impl.cc
@@ -0,0 +1,349 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_external_media_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voice_engine_impl.h"
+#include "voe_errors.h"
+
+namespace webrtc {
+
+VoEExternalMedia* VoEExternalMedia::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+
+VoEExternalMediaImpl::VoEExternalMediaImpl(voe::SharedData* shared)
+    : playout_delay_ms_(0), shared_(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "VoEExternalMediaImpl() - ctor");
+}
+
+VoEExternalMediaImpl::~VoEExternalMediaImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "~VoEExternalMediaImpl() - dtor");
+}
+
+int VoEExternalMediaImpl::RegisterExternalMediaProcessing(
+    int channel,
+    ProcessingTypes type,
+    VoEMediaProcess& processObject)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "RegisterExternalMediaProcessing(channel=%d, type=%d, "
+                 "processObject=0x%x)", channel, type, &processObject);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+    if (!shared_->statistics().Initialized())
+    {
+        shared_->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    switch (type)
+    {
+        case kPlaybackPerChannel:
+        case kRecordingPerChannel:
+        {
+            voe::ScopedChannel sc(shared_->channel_manager(), channel);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr == NULL)
+            {
+                shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                    "RegisterExternalMediaProcessing() failed to locate "
+                    "channel");
+                return -1;
+            }
+            return channelPtr->RegisterExternalMediaProcessing(type,
+                                                               processObject);
+        }
+        case kPlaybackAllChannelsMixed:
+        {
+            return shared_->output_mixer()->RegisterExternalMediaProcessing(
+                processObject);
+        }
+        case kRecordingAllChannelsMixed:
+        {
+            return shared_->transmit_mixer()->RegisterExternalMediaProcessing(
+                processObject);
+        }
+    }
+    return -1;
+}
+
+int VoEExternalMediaImpl::DeRegisterExternalMediaProcessing(
+    int channel,
+    ProcessingTypes type)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "DeRegisterExternalMediaProcessing(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+    if (!shared_->statistics().Initialized())
+    {
+        shared_->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    switch (type)
+    {
+        case kPlaybackPerChannel:
+        case kRecordingPerChannel:
+        {
+            voe::ScopedChannel sc(shared_->channel_manager(), channel);
+            voe::Channel* channelPtr = sc.ChannelPtr();
+            if (channelPtr == NULL)
+            {
+                shared_->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                    "RegisterExternalMediaProcessing() "
+                    "failed to locate channel");
+                return -1;
+            }
+            return channelPtr->DeRegisterExternalMediaProcessing(type);
+        }
+        case kPlaybackAllChannelsMixed:
+        {
+            return shared_->output_mixer()->
+                DeRegisterExternalMediaProcessing();
+        }
+        case kRecordingAllChannelsMixed:
+        {
+            return shared_->transmit_mixer()->
+                DeRegisterExternalMediaProcessing();
+        }
+    }
+    return -1;
+}
+
+int VoEExternalMediaImpl::SetExternalRecordingStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "SetExternalRecordingStatus(enable=%d)", enable);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (shared_->audio_device()->Recording())
+    {
+        shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
+            "SetExternalRecordingStatus() cannot set state while sending");
+        return -1;
+    }
+    shared_->set_ext_recording(enable);
+    return 0;
+#else
+    shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetExternalRecordingStatus() external recording is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[],
+        int lengthSamples,
+        int samplingFreqHz,
+        int current_delay_ms)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "ExternalRecordingInsertData(speechData10ms=0x%x,"
+                 " lengthSamples=%u, samplingFreqHz=%d, current_delay_ms=%d)",
+                 &speechData10ms[0], lengthSamples, samplingFreqHz,
+              current_delay_ms);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (!shared_->statistics().Initialized())
+    {
+        shared_->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!shared_->ext_recording())
+    {
+       shared_->SetLastError(VE_INVALID_OPERATION, kTraceError,
+           "ExternalRecordingInsertData() external recording is not enabled");
+        return -1;
+    }
+    if (shared_->NumOfSendingChannels() == 0)
+    {
+        shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
+            "SetExternalRecordingStatus() no channel is sending");
+        return -1;
+    }
+    if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
+        (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
+    {
+         shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+             "SetExternalRecordingStatus() invalid sample rate");
+        return -1;
+    }
+    if ((0 == lengthSamples) ||
+        ((lengthSamples % (samplingFreqHz / 100)) != 0))
+    {
+         shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+             "SetExternalRecordingStatus() invalid buffer size");
+        return -1;
+    }
+    if (current_delay_ms < 0)
+    {
+        shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetExternalRecordingStatus() invalid delay)");
+        return -1;
+    }
+
+    WebRtc_UWord16 blockSize = samplingFreqHz / 100;
+    WebRtc_UWord32 nBlocks = lengthSamples / blockSize;
+    WebRtc_Word16 totalDelayMS = 0;
+    WebRtc_UWord16 playoutDelayMS = 0;
+
+    for (WebRtc_UWord32 i = 0; i < nBlocks; i++)
+    {
+        if (!shared_->ext_playout())
+        {
+            // Use real playout delay if external playout is not enabled.
+            if (shared_->audio_device()->PlayoutDelay(&playoutDelayMS) != 0) {
+              shared_->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+                  "PlayoutDelay() unable to get the playout delay");
+            }
+            totalDelayMS = current_delay_ms + playoutDelayMS;
+        }
+        else
+        {
+            // Use stored delay value given the last call
+            // to ExternalPlayoutGetData.
+            totalDelayMS = current_delay_ms + playout_delay_ms_;
+            // Compensate for block sizes larger than 10ms
+            totalDelayMS -= (WebRtc_Word16)(i*10);
+            if (totalDelayMS < 0)
+                totalDelayMS = 0;
+        }
+        shared_->transmit_mixer()->PrepareDemux(
+            (const WebRtc_Word8*)(&speechData10ms[i*blockSize]),
+            blockSize,
+            1,
+            samplingFreqHz,
+            totalDelayMS,
+            0,
+            0);
+
+        shared_->transmit_mixer()->DemuxAndMix();
+        shared_->transmit_mixer()->EncodeAndSend();
+    }
+    return 0;
+#else
+       shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "ExternalRecordingInsertData() external recording is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::SetExternalPlayoutStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "SetExternalPlayoutStatus(enable=%d)", enable);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (shared_->audio_device()->Playing())
+    {
+        shared_->SetLastError(VE_ALREADY_SENDING, kTraceError,
+            "SetExternalPlayoutStatus() cannot set state while playing");
+        return -1;
+    }
+    shared_->set_ext_playout(enable);
+    return 0;
+#else
+    shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetExternalPlayoutStatus() external playout is not supported");
+    return -1;
+#endif
+}
+
+int VoEExternalMediaImpl::ExternalPlayoutGetData(
+    WebRtc_Word16 speechData10ms[],
+    int samplingFreqHz,
+    int current_delay_ms,
+    int& lengthSamples)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(shared_->instance_id(), -1),
+                 "ExternalPlayoutGetData(speechData10ms=0x%x, samplingFreqHz=%d"
+                 ",  current_delay_ms=%d)", &speechData10ms[0], samplingFreqHz,
+                 current_delay_ms);
+    ANDROID_NOT_SUPPORTED(shared_->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifdef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+    if (!shared_->statistics().Initialized())
+    {
+        shared_->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (!shared_->ext_playout())
+    {
+       shared_->SetLastError(VE_INVALID_OPERATION, kTraceError,
+           "ExternalPlayoutGetData() external playout is not enabled");
+        return -1;
+    }
+    if ((16000 != samplingFreqHz) && (32000 != samplingFreqHz) &&
+        (48000 != samplingFreqHz) && (44000 != samplingFreqHz))
+    {
+        shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "ExternalPlayoutGetData() invalid sample rate");
+        return -1;
+    }
+    if (current_delay_ms < 0)
+    {
+        shared_->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "ExternalPlayoutGetData() invalid delay)");
+        return -1;
+    }
+
+    AudioFrame audioFrame;
+
+    // Retrieve mixed output at the specified rate
+    shared_->output_mixer()->MixActiveChannels();
+    shared_->output_mixer()->DoOperationsOnCombinedSignal();
+    shared_->output_mixer()->GetMixedAudio(samplingFreqHz, 1, &audioFrame);
+
+    // Deliver audio (PCM) samples to the external sink
+    memcpy(speechData10ms,
+           audioFrame.data_,
+           sizeof(WebRtc_Word16)*(audioFrame.samples_per_channel_));
+    lengthSamples = audioFrame.samples_per_channel_;
+
+    // Store current playout delay (to be used by ExternalRecordingInsertData).
+    playout_delay_ms_ = current_delay_ms;
+
+    return 0;
+#else
+    shared_->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+       "ExternalPlayoutGetData() external playout is not supported");
+    return -1;
+#endif
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_external_media_impl.h b/src/voice_engine/voe_external_media_impl.h
new file mode 100644
index 0000000..c922392
--- /dev/null
+++ b/src/voice_engine/voe_external_media_impl.h
@@ -0,0 +1,58 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
+
+#include "voe_external_media.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEExternalMediaImpl : public VoEExternalMedia
+{
+public:
+    virtual int RegisterExternalMediaProcessing(
+        int channel,
+        ProcessingTypes type,
+        VoEMediaProcess& processObject);
+
+    virtual int DeRegisterExternalMediaProcessing(
+        int channel,
+        ProcessingTypes type);
+
+    virtual int SetExternalRecordingStatus(bool enable);
+
+    virtual int SetExternalPlayoutStatus(bool enable);
+
+    virtual int ExternalRecordingInsertData(
+        const WebRtc_Word16 speechData10ms[],
+        int lengthSamples,
+        int samplingFreqHz,
+        int current_delay_ms);
+
+    virtual int ExternalPlayoutGetData(WebRtc_Word16 speechData10ms[],
+                                       int samplingFreqHz,
+                                       int current_delay_ms,
+                                       int& lengthSamples);
+
+protected:
+    VoEExternalMediaImpl(voe::SharedData* shared);
+    virtual ~VoEExternalMediaImpl();
+
+private:
+    int playout_delay_ms_;
+    voe::SharedData* shared_;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_EXTERNAL_MEDIA_IMPL_H
diff --git a/src/voice_engine/voe_file_impl.cc b/src/voice_engine/voe_file_impl.cc
new file mode 100644
index 0000000..8f0061f
--- /dev/null
+++ b/src/voice_engine/voe_file_impl.cc
@@ -0,0 +1,1419 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_file_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "file_wrapper.h"
+#include "media_file.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEFile* VoEFile::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_FILE_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+
+VoEFileImpl::VoEFileImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEFileImpl::VoEFileImpl() - ctor");
+}
+
+VoEFileImpl::~VoEFileImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEFileImpl::~VoEFileImpl() - dtor");
+}
+
+int VoEFileImpl::StartPlayingFileLocally(
+    int channel,
+    const char fileNameUTF8[1024],
+    bool loop, FileFormats format,
+    float volumeScaling,
+    int startPointMs,
+    int stopPointMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayingFileLocally(channel=%d, fileNameUTF8[]=%s, "
+                 "loop=%d, format=%d, volumeScaling=%5.3f, startPointMs=%d,"
+                 " stopPointMs=%d)",
+                 channel, fileNameUTF8, loop, format, volumeScaling,
+                 startPointMs, stopPointMs);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->StartPlayingFileLocally(fileNameUTF8,
+                                               loop,
+                                               format,
+                                               startPointMs,
+                                               volumeScaling,
+                                               stopPointMs,
+                                               NULL);
+}
+
+int VoEFileImpl::StartPlayingFileLocally(int channel,
+                                         InStream* stream,
+                                         FileFormats format,
+                                         float volumeScaling,
+                                         int startPointMs,
+                                         int stopPointMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayingFileLocally(channel=%d, stream, format=%d, "
+                 "volumeScaling=%5.3f, startPointMs=%d, stopPointMs=%d)",
+                 channel, format, volumeScaling, startPointMs, stopPointMs);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->StartPlayingFileLocally(stream,
+                                               format,
+                                               startPointMs,
+                                               volumeScaling,
+                                               stopPointMs,
+                                               NULL);
+}
+
+int VoEFileImpl::StopPlayingFileLocally(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopPlayingFileLocally()");
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StopPlayingFileLocally();
+}
+
+int VoEFileImpl::IsPlayingFileLocally(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "IsPlayingFileLocally(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->IsPlayingFileLocally();
+}
+
+int VoEFileImpl::ScaleLocalFilePlayout(int channel, float scale)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ScaleLocalFilePlayout(channel=%d, scale=%5.3f)",
+                 channel, scale);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopPlayingFileLocally() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->ScaleLocalFilePlayout(scale);
+}
+
+int VoEFileImpl::StartPlayingFileAsMicrophone(int channel,
+                                              const char fileNameUTF8[1024],
+                                              bool loop,
+                                              bool mixWithMicrophone,
+                                              FileFormats format,
+                                              float volumeScaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayingFileAsMicrophone(channel=%d, fileNameUTF8=%s, "
+                 "loop=%d, mixWithMicrophone=%d, format=%d, "
+                 "volumeScaling=%5.3f)",
+                 channel, fileNameUTF8, loop, mixWithMicrophone, format,
+                 volumeScaling);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    const WebRtc_UWord32 startPointMs(0);
+    const WebRtc_UWord32 stopPointMs(0);
+
+    if (channel == -1)
+    {
+        int res = _shared->transmit_mixer()->StartPlayingFileAsMicrophone(
+            fileNameUTF8,
+            loop,
+            format,
+            startPointMs,
+            volumeScaling,
+            stopPointMs,
+            NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayingFileAsMicrophone() failed to start playing file");
+            return(-1);
+        }
+        else
+        {
+            _shared->transmit_mixer()->SetMixWithMicStatus(mixWithMicrophone);
+            return(0);
+        }
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+
+        int res = channelPtr->StartPlayingFileAsMicrophone(fileNameUTF8,
+                                                           loop,
+                                                           format,
+                                                           startPointMs,
+                                                           volumeScaling,
+                                                           stopPointMs,
+                                                           NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayingFileAsMicrophone() failed to start playing file");
+            return -1;
+        }
+        else
+        {
+            channelPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return 0;
+        }
+    }
+}
+
+int VoEFileImpl::StartPlayingFileAsMicrophone(int channel,
+                                              InStream* stream,
+                                              bool mixWithMicrophone,
+                                              FileFormats format,
+                                              float volumeScaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartPlayingFileAsMicrophone(channel=%d, stream,"
+                 " mixWithMicrophone=%d, format=%d, volumeScaling=%5.3f)",
+                 channel, mixWithMicrophone, format, volumeScaling);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    const WebRtc_UWord32 startPointMs(0);
+    const WebRtc_UWord32 stopPointMs(0);
+
+    if (channel == -1)
+    {
+        int res = _shared->transmit_mixer()->StartPlayingFileAsMicrophone(
+            stream,
+            format,
+            startPointMs,
+            volumeScaling,
+            stopPointMs,
+            NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayingFileAsMicrophone() failed to start "
+                "playing stream");
+            return(-1);
+        }
+        else
+        {
+            _shared->transmit_mixer()->SetMixWithMicStatus(mixWithMicrophone);
+            return(0);
+        }
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+
+        int res = channelPtr->StartPlayingFileAsMicrophone(
+            stream, format, startPointMs, volumeScaling, stopPointMs, NULL);
+        if (res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartPlayingFileAsMicrophone() failed to start "
+                "playing stream");
+            return -1;
+        }
+        else
+        {
+            channelPtr->SetMixWithMicStatus(mixWithMicrophone);
+            return 0;
+        }
+    }
+}
+
+int VoEFileImpl::StopPlayingFileAsMicrophone(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopPlayingFileAsMicrophone(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        // Stop adding file before demultiplexing <=> affects all channels
+        return _shared->transmit_mixer()->StopPlayingFileAsMicrophone();
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StopPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StopPlayingFileAsMicrophone();
+    }
+}
+
+int VoEFileImpl::IsPlayingFileAsMicrophone(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "IsPlayingFileAsMicrophone(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->transmit_mixer()->IsPlayingFileAsMicrophone();
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "IsPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->IsPlayingFileAsMicrophone();
+    }
+}
+
+int VoEFileImpl::ScaleFileAsMicrophonePlayout(int channel, float scale)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ScaleFileAsMicrophonePlayout(channel=%d, scale=%5.3f)",
+                 channel, scale);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->transmit_mixer()->ScaleFileAsMicrophonePlayout(scale);
+    }
+    else
+    {
+        // Stop adding file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "IsPlayingFileAsMicrophone() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->ScaleFileAsMicrophonePlayout(scale);
+    }
+}
+
+int VoEFileImpl::StartRecordingPlayout(
+    int channel, const char* fileNameUTF8, CodecInst* compression,
+    int maxSizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartRecordingPlayout(channel=%d, fileNameUTF8=%s, "
+                 "compression, maxSizeBytes=%d)",
+                 channel, fileNameUTF8, maxSizeBytes);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->StartRecordingPlayout
+          (fileNameUTF8, compression);
+    }
+    else
+    {
+        // Add file after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StartRecordingPlayout(fileNameUTF8, compression);
+    }
+}
+
+int VoEFileImpl::StartRecordingPlayout(
+    int channel, OutStream* stream, CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartRecordingPlayout(channel=%d, stream, compression)",
+                 channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->
+            StartRecordingPlayout(stream, compression);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StartRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StartRecordingPlayout(stream, compression);
+    }
+}
+
+int VoEFileImpl::StopRecordingPlayout(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopRecordingPlayout(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->StopRecordingPlayout();
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "StopRecordingPlayout() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->StopRecordingPlayout();
+    }
+}
+
+int VoEFileImpl::StartRecordingMicrophone(
+    const char* fileNameUTF8, CodecInst* compression, int maxSizeBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartRecordingMicrophone(fileNameUTF8=%s, compression, "
+                 "maxSizeBytes=%d)", fileNameUTF8, maxSizeBytes);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (_shared->transmit_mixer()->StartRecordingMicrophone(fileNameUTF8,
+                                                          compression))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "StartRecordingMicrophone() failed to start recording");
+        return -1;
+    }
+    if (_shared->audio_device()->Recording())
+    {
+        return 0;
+    }
+    if (!_shared->ext_recording())
+    {
+        if (_shared->audio_device()->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartRecordingMicrophone() failed to initialize recording");
+            return -1;
+        }
+        if (_shared->audio_device()->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartRecordingMicrophone() failed to start recording");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+int VoEFileImpl::StartRecordingMicrophone(
+    OutStream* stream, CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartRecordingMicrophone(stream, compression)");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (_shared->transmit_mixer()->StartRecordingMicrophone(stream,
+                                                          compression) == -1)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "StartRecordingMicrophone() failed to start recording");
+        return -1;
+    }
+    if (_shared->audio_device()->Recording())
+    {
+        return 0;
+    }
+    if (!_shared->ext_recording())
+    {
+        if (_shared->audio_device()->InitRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartRecordingMicrophone() failed to initialize recording");
+            return -1;
+        }
+        if (_shared->audio_device()->StartRecording() != 0)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StartRecordingMicrophone() failed to start recording");
+            return -1;
+        }
+    }
+    return 0;
+}
+
+int VoEFileImpl::StopRecordingMicrophone()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopRecordingMicrophone()");
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    int err = 0;
+
+    // TODO(xians): consider removing Start/StopRecording() in
+    // Start/StopRecordingMicrophone() if no channel is recording.
+    if (_shared->NumOfSendingChannels() == 0 &&
+        _shared->audio_device()->Recording())
+    {
+        // Stop audio-device recording if no channel is recording
+        if (_shared->audio_device()->StopRecording() != 0)
+        {
+            _shared->SetLastError(VE_CANNOT_STOP_RECORDING, kTraceError,
+                "StopRecordingMicrophone() failed to stop recording");
+            err = -1;
+        }
+    }
+
+    if (_shared->transmit_mixer()->StopRecordingMicrophone() != 0)
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "StopRecordingMicrophone() failed to stop recording to mixer");
+        err = -1;
+    }
+
+    return err;
+}
+
+// TODO(andrew): a cursory inspection suggests there's a large amount of
+// overlap in these convert functions which could be refactored to a helper.
+int VoEFileImpl::ConvertPCMToWAV(const char* fileNameInUTF8,
+                                 const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertPCMToWAV(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1,
+        kFileFormatPcm16kHzFile));
+
+    int res=playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatWavFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToWAV failed during conversion (audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToWAV failed during conversion (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToWAV(InStream* streamIn, OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertPCMToWAV(streamIn, streamOut)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+        kFileFormatPcm16kHzFile));
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(-1,
+        kFileFormatWavFile));
+    CodecInst codecInst;
+    strncpy(codecInst.plname, "L16", 32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToWAV failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength, frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToWAV failed during conversion "
+                "(create audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToWAV failed during conversion (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertWAVToPCM(const char* fileNameInUTF8,
+                                 const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertWAVToPCM(fileNameInUTF8=%s, fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+                                                        kFileFormatWavFile));
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                   (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertWAVToPCM failed during conversion (audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertWAVToPCM failed during conversion (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertWAVToPCM(InStream* streamIn, OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertWAVToPCM(streamIn, streamOut)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+          VoEId(_shared->instance_id(), -1), "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(-1,
+                                                        kFileFormatWavFile));
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertWAVToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength, frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertWAVToPCM failed during conversion (audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertWAVToPCM failed during conversion (write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                        const char* fileNameOutUTF8,
+                                        CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertPCMToCompressed(fileNameInUTF8=%s, fileNameOutUTF8=%s"
+                 ",  compression)", fileNameInUTF8, fileNameOutUTF8);
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "  compression: plname=%s, plfreq=%d, pacsize=%d",
+                 compression->plname, compression->plfreq,
+                 compression->pacsize);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1,
+        kFileFormatPcm16kHzFile));
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0, NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create player object");
+        // Clean up and shutdown the file player
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1,
+        kFileFormatCompressedFile));
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8, *compression,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                  frequency, AudioFrame::kNormalSpeech,
+                                  AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToCompressed failed during conversion "
+                "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToCompressed failed during conversion "
+                "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertPCMToCompressed(InStream* streamIn,
+                                        OutStream* streamOut,
+                                        CodecInst* compression)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertPCMToCompressed(streamIn, streamOut, compression)");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "invalid stream handles");
+        return (-1);
+    }
+
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "  compression: plname=%s, plfreq=%d, pacsize=%d",
+                 compression->plname, compression->plfreq,
+                 compression->pacsize);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatPcm16kHzFile));
+
+    int res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatCompressedFile));
+    res = recObj.StartRecordingAudioFile(*streamOut,*compression,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertPCMToCompressed failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency, AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToCompressed failed during conversion "
+                "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertPCMToCompressed failed during conversion "
+                "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                        const char* fileNameOutUTF8)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertCompressedToPCM(fileNameInUTF8=%s,"
+                 " fileNameOutUTF8=%s)",
+                 fileNameInUTF8, fileNameOutUTF8);
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatCompressedFile));
+
+    int res = playerObj.StartPlayingFile(fileNameInUTF8,false,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(fileNameOutUTF8,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertCompressedToPCM failed during conversion "
+                "(create audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertCompressedToPCM failed during conversion "
+                "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+int VoEFileImpl::ConvertCompressedToPCM(InStream* streamIn,
+                                        OutStream* streamOut)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ConvertCompressedToPCM(file, file);");
+
+    if ((streamIn == NULL) || (streamOut == NULL))
+    {
+        WEBRTC_TRACE(kTraceError, kTraceVoice,
+            VoEId(_shared->instance_id(), -1), "invalid stream handles");
+        return (-1);
+    }
+
+    // Create file player object
+    FilePlayer& playerObj(*FilePlayer::CreateFilePlayer(
+        -1, kFileFormatCompressedFile));
+    int res;
+
+    res = playerObj.StartPlayingFile(*streamIn,0,1.0,0,0,NULL);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create player object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        return -1;
+    }
+
+    // Create file recorder object
+    FileRecorder& recObj(*FileRecorder::CreateFileRecorder(
+        -1, kFileFormatPcm16kHzFile));
+
+    CodecInst codecInst;
+    strncpy(codecInst.plname,"L16",32);
+            codecInst.channels = 1;
+            codecInst.rate     = 256000;
+            codecInst.plfreq   = 16000;
+            codecInst.pltype   = 94;
+            codecInst.pacsize  = 160;
+
+    res = recObj.StartRecordingAudioFile(*streamOut,codecInst,0);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "ConvertCompressedToPCM failed to create recorder object");
+        playerObj.StopPlayingFile();
+        FilePlayer::DestroyFilePlayer(&playerObj);
+        recObj.StopRecording();
+        FileRecorder::DestroyFileRecorder(&recObj);
+        return -1;
+    }
+
+    // Run throught the file
+    AudioFrame audioFrame;
+    WebRtc_Word16 decodedData[160];
+    int decLength=0;
+    const WebRtc_UWord32 frequency = 16000;
+
+    while(!playerObj.Get10msAudioFromFile(decodedData,decLength,frequency))
+    {
+        if(decLength!=frequency/100)
+        {
+            // This is an OK way to end
+            break;
+        }
+        res=audioFrame.UpdateFrame(-1, 0, decodedData,
+                                  (WebRtc_UWord16)decLength,
+                                   frequency,
+                                   AudioFrame::kNormalSpeech,
+                                   AudioFrame::kVadActive);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertCompressedToPCM failed during conversion "
+                "(audio frame)");
+            break;
+        }
+
+        res=recObj.RecordAudioToFile(audioFrame);
+        if(res)
+        {
+            WEBRTC_TRACE(kTraceError, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "ConvertCompressedToPCM failed during conversion "
+                "(write frame)");
+        }
+    }
+
+    playerObj.StopPlayingFile();
+    recObj.StopRecording();
+    FilePlayer::DestroyFilePlayer(&playerObj);
+    FileRecorder::DestroyFileRecorder(&recObj);
+
+    return res;
+}
+
+
+int VoEFileImpl::GetFileDuration(const char* fileNameUTF8,
+                                 int& durationMs,
+                                 FileFormats format)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetFileDuration(fileNameUTF8=%s, format=%d)",
+                 fileNameUTF8, format);
+
+    // Create a dummy file module for this
+    MediaFile * fileModule=MediaFile::CreateMediaFile(-1);
+
+    // Temp container of the right format
+    WebRtc_UWord32 duration;
+    int res=fileModule->FileDurationMs(fileNameUTF8,duration,format);
+    if (res)
+    {
+        _shared->SetLastError(VE_BAD_FILE, kTraceError,
+            "GetFileDuration() failed measure file duration");
+        return -1;
+    }
+    durationMs = duration;
+    MediaFile::DestroyMediaFile(fileModule);
+    fileModule = NULL;
+
+    return(res);
+}
+
+int VoEFileImpl::GetPlaybackPosition(int channel, int& positionMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPlaybackPosition(channel=%d)", channel);
+
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlaybackPosition() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetLocalPlayoutPosition(positionMs);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_FILE_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_file_impl.h b/src/voice_engine/voe_file_impl.h
new file mode 100644
index 0000000..dcb5642
--- /dev/null
+++ b/src/voice_engine/voe_file_impl.h
@@ -0,0 +1,141 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+
+#include "voe_file.h"
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEFileImpl : public VoEFile
+{
+public:
+    // Playout file locally
+
+    virtual int StartPlayingFileLocally(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0,
+        int stopPointMs = 0);
+
+    virtual int StartPlayingFileLocally(
+        int channel,
+        InStream* stream,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0,
+        int startPointMs = 0, int stopPointMs = 0);
+
+    virtual int StopPlayingFileLocally(int channel);
+
+    virtual int IsPlayingFileLocally(int channel);
+
+    virtual int ScaleLocalFilePlayout(int channel, float scale);
+
+    // Use file as microphone input
+
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        const char fileNameUTF8[1024],
+        bool loop = false ,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0);
+
+    virtual int StartPlayingFileAsMicrophone(
+        int channel,
+        InStream* stream,
+        bool mixWithMicrophone = false,
+        FileFormats format = kFileFormatPcm16kHzFile,
+        float volumeScaling = 1.0);
+
+    virtual int StopPlayingFileAsMicrophone(int channel);
+
+    virtual int IsPlayingFileAsMicrophone(int channel);
+
+    virtual int ScaleFileAsMicrophonePlayout(int channel, float scale);
+
+    // Record speaker signal to file
+
+    virtual int StartRecordingPlayout(int channel,
+                                      const char* fileNameUTF8,
+                                      CodecInst* compression = NULL,
+                                      int maxSizeBytes = -1);
+
+    virtual int StartRecordingPlayout(int channel,
+                                      OutStream* stream,
+                                      CodecInst* compression = NULL);
+
+    virtual int StopRecordingPlayout(int channel);
+
+    // Record microphone signal to file
+
+    virtual int StartRecordingMicrophone(const char* fileNameUTF8,
+                                         CodecInst* compression = NULL,
+                                         int maxSizeBytes = -1);
+
+    virtual int StartRecordingMicrophone(OutStream* stream,
+                                         CodecInst* compression = NULL);
+
+    virtual int StopRecordingMicrophone();
+
+    // Conversion between different file formats
+
+    virtual int ConvertPCMToWAV(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8);
+
+    virtual int ConvertPCMToWAV(InStream* streamIn,
+                                OutStream* streamOut);
+
+    virtual int ConvertWAVToPCM(const char* fileNameInUTF8,
+                                const char* fileNameOutUTF8);
+
+    virtual int ConvertWAVToPCM(InStream* streamIn,
+                                OutStream* streamOut);
+
+    virtual int ConvertPCMToCompressed(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8,
+                                       CodecInst* compression);
+
+    virtual int ConvertPCMToCompressed(InStream* streamIn,
+                                       OutStream* streamOut,
+                                       CodecInst* compression);
+
+    virtual int ConvertCompressedToPCM(const char* fileNameInUTF8,
+                                       const char* fileNameOutUTF8);
+
+    virtual int ConvertCompressedToPCM(InStream* streamIn,
+                                       OutStream* streamOut);
+
+    // Misc file functions
+
+    virtual int GetFileDuration(
+        const char* fileNameUTF8,
+        int& durationMs,
+        FileFormats format = kFileFormatPcm16kHzFile);
+
+    virtual int GetPlaybackPosition(int channel, int& positionMs);
+
+protected:
+    VoEFileImpl(voe::SharedData* shared);
+    virtual ~VoEFileImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}  // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_FILE_IMPL_H
+
diff --git a/src/voice_engine/voe_hardware_impl.cc b/src/voice_engine/voe_hardware_impl.cc
new file mode 100644
index 0000000..851967e
--- /dev/null
+++ b/src/voice_engine/voe_hardware_impl.cc
@@ -0,0 +1,823 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_hardware_impl.h"
+
+#include <cassert>
+
+#include "cpu_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoEHardware* VoEHardware::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_HARDWARE_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+
+VoEHardwareImpl::VoEHardwareImpl(voe::SharedData* shared) :
+    _cpu(NULL), _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEHardwareImpl() - ctor");
+
+    _cpu = CpuWrapper::CreateCpu();
+    if (_cpu)
+    {
+        _cpu->CpuUsage(); // init cpu usage
+    }
+}
+
+VoEHardwareImpl::~VoEHardwareImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "~VoEHardwareImpl() - dtor");
+
+    if (_cpu)
+    {
+        delete _cpu;
+        _cpu = NULL;
+    }
+}
+
+int VoEHardwareImpl::SetAudioDeviceLayer(AudioLayers audioLayer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetAudioDeviceLayer(audioLayer=%d)", audioLayer);
+
+    // Don't allow a change if VoE is initialized
+    if (_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_ALREADY_INITED, kTraceError);
+        return -1;
+    }
+
+    // Map to AudioDeviceModule::AudioLayer
+    AudioDeviceModule::AudioLayer
+        wantedLayer(AudioDeviceModule::kPlatformDefaultAudio);
+    switch (audioLayer)
+    {
+        case kAudioPlatformDefault:
+            // already set above
+            break;
+        case kAudioWindowsCore:
+            wantedLayer = AudioDeviceModule::kWindowsCoreAudio;
+            break;
+        case kAudioWindowsWave:
+            wantedLayer = AudioDeviceModule::kWindowsWaveAudio;
+            break;
+        case kAudioLinuxAlsa:
+            wantedLayer = AudioDeviceModule::kLinuxAlsaAudio;
+            break;
+        case kAudioLinuxPulse:
+            wantedLayer = AudioDeviceModule::kLinuxPulseAudio;
+            break;
+    }
+
+    // Save the audio device layer for Init()
+    _shared->set_audio_device_layer(wantedLayer);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetAudioDeviceLayer(AudioLayers& audioLayer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetAudioDeviceLayer(devices=?)");
+
+    // Can always be called regardless of VoE state
+
+    AudioDeviceModule::AudioLayer
+        activeLayer(AudioDeviceModule::kPlatformDefaultAudio);
+
+    if (_shared->audio_device())
+    {
+        // Get active audio layer from ADM
+        if (_shared->audio_device()->ActiveAudioLayer(&activeLayer) != 0)
+        {
+            _shared->SetLastError(VE_UNDEFINED_SC_ERR, kTraceError,
+                "  Audio Device error");
+            return -1;
+        }
+    }
+    else
+    {
+        // Return VoE's internal layer setting
+        activeLayer = _shared->audio_device_layer();
+    }
+
+    // Map to AudioLayers
+    switch (activeLayer)
+    {
+        case AudioDeviceModule::kPlatformDefaultAudio:
+            audioLayer = kAudioPlatformDefault;
+            break;
+        case AudioDeviceModule::kWindowsCoreAudio:
+            audioLayer = kAudioWindowsCore;
+            break;
+        case AudioDeviceModule::kWindowsWaveAudio:
+            audioLayer = kAudioWindowsWave;
+            break;
+        case AudioDeviceModule::kLinuxAlsaAudio:
+            audioLayer = kAudioLinuxAlsa;
+            break;
+        case AudioDeviceModule::kLinuxPulseAudio:
+            audioLayer = kAudioLinuxPulse;
+            break;
+        default:
+            _shared->SetLastError(VE_UNDEFINED_SC_ERR, kTraceError,
+                "  unknown audio layer");
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: audioLayer=%d", audioLayer);
+
+    return 0;
+}
+int VoEHardwareImpl::GetNumOfRecordingDevices(int& devices)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetNumOfRecordingDevices(devices=?)");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    devices = static_cast<int> (_shared->audio_device()->RecordingDevices());
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1), "  Output: devices=%d", devices);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetNumOfPlayoutDevices(int& devices)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetNumOfPlayoutDevices(devices=?)");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    devices = static_cast<int> (_shared->audio_device()->PlayoutDevices());
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: devices=%d", devices);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetRecordingDeviceName(int index,
+                                            char strNameUTF8[128],
+                                            char strGuidUTF8[128])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRecordingDeviceName(index=%d)", index);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (strNameUTF8 == NULL)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "GetRecordingDeviceName() invalid argument");
+        return -1;
+    }
+
+    // Note that strGuidUTF8 is allowed to be NULL
+
+    // Init len variable to length of supplied vectors
+    const WebRtc_UWord16 strLen = 128;
+
+    // Check if length has been changed in module
+    assert(strLen == kAdmMaxDeviceNameSize);
+    assert(strLen == kAdmMaxGuidSize);
+
+    char name[strLen];
+    char guid[strLen];
+
+    // Get names from module
+    if (_shared->audio_device()->RecordingDeviceName(index, name, guid) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_RETRIEVE_DEVICE_NAME, kTraceError,
+            "GetRecordingDeviceName() failed to get device name");
+        return -1;
+    }
+
+    // Copy to vectors supplied by user
+    strncpy(strNameUTF8, name, strLen);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: strNameUTF8=%s", strNameUTF8);
+
+    if (strGuidUTF8 != NULL)
+    {
+        strncpy(strGuidUTF8, guid, strLen);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "  Output: strGuidUTF8=%s", strGuidUTF8);
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetPlayoutDeviceName(int index,
+                                          char strNameUTF8[128],
+                                          char strGuidUTF8[128])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPlayoutDeviceName(index=%d)", index);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (strNameUTF8 == NULL)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "GetPlayoutDeviceName() invalid argument");
+        return -1;
+    }
+
+    // Note that strGuidUTF8 is allowed to be NULL
+
+    // Init len variable to length of supplied vectors
+    const WebRtc_UWord16 strLen = 128;
+
+    // Check if length has been changed in module
+    assert(strLen == kAdmMaxDeviceNameSize);
+    assert(strLen == kAdmMaxGuidSize);
+
+    char name[strLen];
+    char guid[strLen];
+
+    // Get names from module
+    if (_shared->audio_device()->PlayoutDeviceName(index, name, guid) != 0)
+    {
+        _shared->SetLastError(VE_CANNOT_RETRIEVE_DEVICE_NAME, kTraceError,
+            "GetPlayoutDeviceName() failed to get device name");
+        return -1;
+    }
+
+    // Copy to vectors supplied by user
+    strncpy(strNameUTF8, name, strLen);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: strNameUTF8=%s", strNameUTF8);
+
+    if (strGuidUTF8 != NULL)
+    {
+        strncpy(strGuidUTF8, guid, strLen);
+        WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+            VoEId(_shared->instance_id(), -1),
+            "  Output: strGuidUTF8=%s", strGuidUTF8);
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::SetRecordingDevice(int index,
+                                        StereoChannel recordingChannel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetRecordingDevice(index=%d, recordingChannel=%d)",
+                 index, (int) recordingChannel);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool isRecording(false);
+
+    // Store state about activated recording to be able to restore it after the
+    // recording device has been modified.
+    if (_shared->audio_device()->Recording())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                     "SetRecordingDevice() device is modified while recording"
+                     " is active...");
+        isRecording = true;
+        if (_shared->audio_device()->StopRecording() == -1)
+        {
+            _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+                "SetRecordingDevice() unable to stop recording");
+            return -1;
+        }
+    }
+
+    // We let the module do the index sanity
+
+    // Set recording channel
+    AudioDeviceModule::ChannelType recCh =
+        AudioDeviceModule::kChannelBoth;
+    switch (recordingChannel)
+    {
+        case kStereoLeft:
+            recCh = AudioDeviceModule::kChannelLeft;
+            break;
+        case kStereoRight:
+            recCh = AudioDeviceModule::kChannelRight;
+            break;
+        case kStereoBoth:
+            // default setting kChannelBoth (<=> mono)
+            break;
+    }
+
+    if (_shared->audio_device()->SetRecordingChannel(recCh) != 0) {
+      _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceWarning,
+          "SetRecordingChannel() unable to set the recording channel");
+    }
+
+    // Map indices to unsigned since underlying functions need that
+    WebRtc_UWord16 indexU = static_cast<WebRtc_UWord16> (index);
+
+    WebRtc_Word32 res(0);
+
+    if (index == -1)
+    {
+        res = _shared->audio_device()->SetRecordingDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice);
+    }
+    else if (index == -2)
+    {
+        res = _shared->audio_device()->SetRecordingDevice(
+            AudioDeviceModule::kDefaultDevice);
+    }
+    else
+    {
+        res = _shared->audio_device()->SetRecordingDevice(indexU);
+    }
+
+    if (res != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "SetRecordingDevice() unable to set the recording device");
+        return -1;
+    }
+
+    // Init microphone, so user can do volume settings etc
+    if (_shared->audio_device()->InitMicrophone() == -1)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_MIC_VOL, kTraceWarning,
+            "SetRecordingDevice() cannot access microphone");
+    }
+
+    // Set number of channels
+    bool available = false;
+    if (_shared->audio_device()->StereoRecordingIsAvailable(&available) != 0) {
+      _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+          "StereoRecordingIsAvailable() failed to query stereo recording");
+    }
+
+    if (_shared->audio_device()->SetStereoRecording(available) != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "SetRecordingDevice() failed to set mono recording mode");
+    }
+
+    // Restore recording if it was enabled already when calling this function.
+    if (isRecording)
+    {
+        if (!_shared->ext_recording())
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "SetRecordingDevice() recording is now being restored...");
+            if (_shared->audio_device()->InitRecording() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                    VoEId(_shared->instance_id(), -1),
+                    "SetRecordingDevice() failed to initialize recording");
+                return -1;
+            }
+            if (_shared->audio_device()->StartRecording() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_shared->instance_id(), -1),
+                             "SetRecordingDevice() failed to start recording");
+                return -1;
+            }
+        }
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::SetPlayoutDevice(int index)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetPlayoutDevice(index=%d)", index);
+    CriticalSectionScoped cs(_shared->crit_sec());
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool isPlaying(false);
+
+    // Store state about activated playout to be able to restore it after the
+    // playout device has been modified.
+    if (_shared->audio_device()->Playing())
+    {
+        WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                     "SetPlayoutDevice() device is modified while playout is "
+                     "active...");
+        isPlaying = true;
+        if (_shared->audio_device()->StopPlayout() == -1)
+        {
+            _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+                "SetPlayoutDevice() unable to stop playout");
+            return -1;
+        }
+    }
+
+    // We let the module do the index sanity
+
+    // Map indices to unsigned since underlying functions need that
+    WebRtc_UWord16 indexU = static_cast<WebRtc_UWord16> (index);
+
+    WebRtc_Word32 res(0);
+
+    if (index == -1)
+    {
+        res = _shared->audio_device()->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultCommunicationDevice);
+    }
+    else if (index == -2)
+    {
+        res = _shared->audio_device()->SetPlayoutDevice(
+            AudioDeviceModule::kDefaultDevice);
+    }
+    else
+    {
+        res = _shared->audio_device()->SetPlayoutDevice(indexU);
+    }
+
+    if (res != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+            "SetPlayoutDevice() unable to set the playout device");
+        return -1;
+    }
+
+    // Init speaker, so user can do volume settings etc
+    if (_shared->audio_device()->InitSpeaker() == -1)
+    {
+        _shared->SetLastError(VE_CANNOT_ACCESS_SPEAKER_VOL, kTraceWarning,
+            "SetPlayoutDevice() cannot access speaker");
+    }
+
+    // Set number of channels
+    bool available = false;
+    _shared->audio_device()->StereoPlayoutIsAvailable(&available);
+    if (_shared->audio_device()->SetStereoPlayout(available) != 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceWarning,
+            "SetPlayoutDevice() failed to set stereo playout mode");
+    }
+
+    // Restore playout if it was enabled already when calling this function.
+    if (isPlaying)
+    {
+        if (!_shared->ext_playout())
+        {
+            WEBRTC_TRACE(kTraceInfo, kTraceVoice,
+                VoEId(_shared->instance_id(), -1),
+                "SetPlayoutDevice() playout is now being restored...");
+            if (_shared->audio_device()->InitPlayout() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                  VoEId(_shared->instance_id(), -1),
+                  "SetPlayoutDevice() failed to initialize playout");
+                return -1;
+            }
+            if (_shared->audio_device()->StartPlayout() != 0)
+            {
+                WEBRTC_TRACE(kTraceError, kTraceVoice,
+                             VoEId(_shared->instance_id(), -1),
+                             "SetPlayoutDevice() failed to start playout");
+                return -1;
+            }
+        }
+    }
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetRecordingDeviceStatus(bool& isAvailable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRecordingDeviceStatus()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // We let the module do isRecording sanity
+
+    bool available(false);
+
+    // Check availability
+    if (_shared->audio_device()->RecordingIsAvailable(&available) != 0)
+    {
+        _shared->SetLastError(VE_UNDEFINED_SC_REC_ERR, kTraceError,
+            "  Audio Device error");
+        return -1;
+    }
+
+    isAvailable = available;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: isAvailable = %d)", (int) isAvailable);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetPlayoutDeviceStatus(bool& isAvailable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPlayoutDeviceStatus()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // We let the module do isPlaying sanity
+
+    bool available(false);
+
+    // Check availability
+    if (_shared->audio_device()->PlayoutIsAvailable(&available) != 0)
+    {
+        _shared->SetLastError(VE_PLAY_UNDEFINED_SC_ERR, kTraceError,
+            "  Audio Device error");
+        return -1;
+    }
+
+    isAvailable = available;
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: isAvailable = %d)", (int) isAvailable);
+
+    return 0;
+}
+
+int VoEHardwareImpl::ResetAudioDevice()
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ResetAudioDevice()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+#if defined(MAC_IPHONE)
+    if (_shared->audio_device()->ResetAudioDevice() < 0)
+    {
+        _shared->SetLastError(VE_SOUNDCARD_ERROR, kTraceError,
+            "  Failed to reset sound device");
+        return -1;
+    }
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "  no support for resetting sound device");
+    return -1;
+#endif
+
+    return 0;
+}
+
+int VoEHardwareImpl::AudioDeviceControl(unsigned int par1, unsigned int par2,
+                                        unsigned int par3)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "AudioDeviceControl(%i, %i, %i)", par1, par2, par3);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "  no support for resetting sound device");
+    return -1;
+}
+
+int VoEHardwareImpl::SetLoudspeakerStatus(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetLoudspeakerStatus(enable=%i)", (int) enable);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+#if defined(WEBRTC_ANDROID)
+    if (_shared->audio_device()->SetLoudspeakerStatus(enable) < 0)
+    {
+        _shared->SetLastError(VE_IGNORED_FUNCTION, kTraceError,
+            "  Failed to set loudspeaker status");
+        return -1;
+    }
+
+    return 0;
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "  no support for setting loudspeaker status");
+    return -1;
+#endif
+}
+
+int VoEHardwareImpl::GetLoudspeakerStatus(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetLoudspeakerStatus()");
+    IPHONE_NOT_SUPPORTED();
+
+#if defined(WEBRTC_ANDROID)
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_shared->audio_device()->GetLoudspeakerStatus(&enabled) < 0)
+    {
+        _shared->SetLastError(VE_IGNORED_FUNCTION, kTraceError,
+            "  Failed to get loudspeaker status");
+        return -1;
+    }
+
+    return 0;
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+      "  no support for setting loudspeaker status");
+    return -1;
+#endif
+}
+
+int VoEHardwareImpl::GetCPULoad(int& loadPercent)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetCPULoad()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // Get CPU load from ADM
+    WebRtc_UWord16 load(0);
+    if (_shared->audio_device()->CPULoad(&load) != 0)
+    {
+        _shared->SetLastError(VE_CPU_INFO_ERROR, kTraceError,
+            "  error getting system CPU load");
+        return -1;
+    }
+
+    loadPercent = static_cast<int> (load);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: loadPercent = %d", loadPercent);
+
+    return 0;
+}
+
+int VoEHardwareImpl::GetSystemCPULoad(int& loadPercent)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSystemCPULoad(loadPercent=?)");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    // Check if implemented for this platform
+    if (!_cpu)
+    {
+        _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+            "  no support for getting system CPU load");
+        return -1;
+    }
+
+    // Get CPU load
+    WebRtc_Word32 load = _cpu->CpuUsage();
+    if (load < 0)
+    {
+        _shared->SetLastError(VE_CPU_INFO_ERROR, kTraceError,
+            "  error getting system CPU load");
+        return -1;
+    }
+
+    loadPercent = static_cast<int> (load);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "  Output: loadPercent = %d", loadPercent);
+
+    return 0;
+}
+
+int VoEHardwareImpl::EnableBuiltInAEC(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+        "%s", __FUNCTION__);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    return _shared->audio_device()->EnableBuiltInAEC(enable);
+}
+
+bool VoEHardwareImpl::BuiltInAECIsEnabled() const
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+        "%s", __FUNCTION__);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return false;
+    }
+
+    return _shared->audio_device()->BuiltInAECIsEnabled();
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_HARDWARE_API
+
+} // namespace webrtc
diff --git a/src/voice_engine/voe_hardware_impl.h b/src/voice_engine/voe_hardware_impl.h
new file mode 100644
index 0000000..c801228
--- /dev/null
+++ b/src/voice_engine/voe_hardware_impl.h
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
+
+#include "voe_hardware.h"
+
+#include "shared_data.h"
+
+namespace webrtc
+{
+class CpuWrapper;
+
+class VoEHardwareImpl: public VoEHardware
+{
+public:
+    virtual int GetNumOfRecordingDevices(int& devices);
+
+    virtual int GetNumOfPlayoutDevices(int& devices);
+
+    virtual int GetRecordingDeviceName(int index,
+                                       char strNameUTF8[128],
+                                       char strGuidUTF8[128]);
+
+    virtual int GetPlayoutDeviceName(int index,
+                                     char strNameUTF8[128],
+                                     char strGuidUTF8[128]);
+
+    virtual int GetRecordingDeviceStatus(bool& isAvailable);
+
+    virtual int GetPlayoutDeviceStatus(bool& isAvailable);
+
+    virtual int SetRecordingDevice(
+        int index,
+        StereoChannel recordingChannel = kStereoBoth);
+
+    virtual int SetPlayoutDevice(int index);
+
+    virtual int SetAudioDeviceLayer(AudioLayers audioLayer);
+
+    virtual int GetAudioDeviceLayer(AudioLayers& audioLayer);
+
+    virtual int GetCPULoad(int& loadPercent);
+
+    virtual int GetSystemCPULoad(int& loadPercent);
+
+    virtual int ResetAudioDevice();
+
+    virtual int AudioDeviceControl(unsigned int par1,
+                                   unsigned int par2,
+                                   unsigned int par3);
+
+    virtual int SetLoudspeakerStatus(bool enable);
+
+    virtual int GetLoudspeakerStatus(bool& enabled);
+
+    virtual int EnableBuiltInAEC(bool enable);
+    virtual bool BuiltInAECIsEnabled() const;
+
+protected:
+    VoEHardwareImpl(voe::SharedData* shared);
+    virtual ~VoEHardwareImpl();
+
+private:
+    CpuWrapper* _cpu;
+    voe::SharedData* _shared;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_HARDWARE_IMPL_H
diff --git a/src/voice_engine/voe_neteq_stats_impl.cc b/src/voice_engine/voe_neteq_stats_impl.cc
new file mode 100644
index 0000000..c82f414
--- /dev/null
+++ b/src/voice_engine/voe_neteq_stats_impl.cc
@@ -0,0 +1,79 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_neteq_stats_impl.h"
+
+#include "audio_coding_module.h"
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+
+namespace webrtc {
+
+VoENetEqStats* VoENetEqStats::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+
+VoENetEqStatsImpl::VoENetEqStatsImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoENetEqStatsImpl::VoENetEqStatsImpl() - ctor");
+}
+
+VoENetEqStatsImpl::~VoENetEqStatsImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoENetEqStatsImpl::~VoENetEqStatsImpl() - dtor");
+}
+
+int VoENetEqStatsImpl::GetNetworkStatistics(int channel,
+                                            NetworkStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetNetworkStatistics(channel=%d, stats=?)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetNetworkStatistics() failed to locate channel");
+        return -1;
+    }
+
+    return channelPtr->GetNetworkStatistics(stats);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+
+}   // namespace webrtc
diff --git a/src/voice_engine/voe_neteq_stats_impl.h b/src/voice_engine/voe_neteq_stats_impl.h
new file mode 100644
index 0000000..1b077b3
--- /dev/null
+++ b/src/voice_engine/voe_neteq_stats_impl.h
@@ -0,0 +1,36 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
+
+#include "voe_neteq_stats.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoENetEqStatsImpl : public VoENetEqStats
+{
+public:
+    virtual int GetNetworkStatistics(int channel,
+                                     NetworkStatistics& stats);
+
+protected:
+    VoENetEqStatsImpl(voe::SharedData* shared);
+    virtual ~VoENetEqStatsImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}  // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_NETEQ_STATS_IMPL_H
diff --git a/src/voice_engine/voe_network_impl.cc b/src/voice_engine/voe_network_impl.cc
new file mode 100644
index 0000000..174abca
--- /dev/null
+++ b/src/voice_engine/voe_network_impl.cc
@@ -0,0 +1,872 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_network_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc
+{
+
+VoENetwork* VoENetwork::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_NETWORK_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+
+VoENetworkImpl::VoENetworkImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoENetworkImpl() - ctor");
+}
+
+VoENetworkImpl::~VoENetworkImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "~VoENetworkImpl() - dtor");
+}
+
+int VoENetworkImpl::RegisterExternalTransport(int channel,
+                                              Transport& transport)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetExternalTransport(channel=%d, transport=0x%x)",
+                 channel, &transport);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetExternalTransport() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterExternalTransport(transport);
+}
+
+int VoENetworkImpl::DeRegisterExternalTransport(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterExternalTransport(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterExternalTransport() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterExternalTransport();
+}
+
+int VoENetworkImpl::ReceivedRTPPacket(int channel,
+                                      const void* data,
+                                      unsigned int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ReceivedRTPPacket(channel=%d, length=%u)", channel, length);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((length < 12) || (length > 807))
+    {
+        _shared->SetLastError(VE_INVALID_PACKET, kTraceError,
+            "ReceivedRTPPacket() invalid packet length");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "ReceivedRTPPacket() invalid data vector");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "ReceivedRTPPacket() failed to locate channel");
+        return -1;
+    }
+
+    if (!channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "ReceivedRTPPacket() external transport is not enabled");
+        return -1;
+    }
+    return channelPtr->ReceivedRTPPacket((const WebRtc_Word8*) data, length);
+}
+
+int VoENetworkImpl::ReceivedRTCPPacket(int channel, const void* data,
+                                       unsigned int length)
+{
+    WEBRTC_TRACE(kTraceStream, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "ReceivedRTCPPacket(channel=%d, length=%u)", channel, length);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (length < 4)
+    {
+        _shared->SetLastError(VE_INVALID_PACKET, kTraceError,
+            "ReceivedRTCPPacket() invalid packet length");
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "ReceivedRTCPPacket() invalid data vector");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "ReceivedRTCPPacket() failed to locate channel");
+        return -1;
+    }
+    if (!channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_INVALID_OPERATION, kTraceError,
+            "ReceivedRTCPPacket() external transport is not enabled");
+        return -1;
+    }
+    return channelPtr->ReceivedRTCPPacket((const WebRtc_Word8*) data, length);
+}
+
+int VoENetworkImpl::GetSourceInfo(int channel,
+                                  int& rtpPort,
+                                  int& rtcpPort,
+                                  char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSourceInfo(channel=%d, rtpPort=?, rtcpPort=?, ipAddr[]=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "GetSourceInfo() invalid IP-address buffer");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSourceInfo() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSourceInfo() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSourceInfo(rtpPort, rtcpPort, ipAddr);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSourceInfo() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetLocalIP(char ipAddr[64], bool ipv6)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetLocalIP(ipAddr[]=?, ipv6=%d)", ipv6);
+    IPHONE_NOT_SUPPORTED();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "GetLocalIP() invalid IP-address buffer");
+        return -1;
+    }
+
+    // Create a temporary socket module to ensure that this method can be
+    // called also when no channels are created.
+    WebRtc_UWord8 numSockThreads(1);
+    UdpTransport* socketPtr =
+        UdpTransport::Create(
+            -1,
+            numSockThreads);
+    if (NULL == socketPtr)
+    {
+        _shared->SetLastError(VE_SOCKET_TRANSPORT_MODULE_ERROR, kTraceError,
+            "GetLocalIP() failed to create socket module");
+        return -1;
+    }
+
+    // Use a buffer big enough for IPv6 addresses and initialize it with zeros.
+    char localIPAddr[256] = {0};
+
+    if (ipv6)
+    {
+        char localIP[16];
+        if (socketPtr->LocalHostAddressIPV6(localIP) != 0)
+        {
+            _shared->SetLastError(VE_INVALID_IP_ADDRESS, kTraceError,
+                "GetLocalIP() failed to retrieve local IP - 1");
+            UdpTransport::Destroy(socketPtr);
+            return -1;
+        }
+        // Convert 128-bit address to character string (a:b:c:d:e:f:g:h)
+        sprintf(localIPAddr,
+                "%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x%.2x:%.2x"
+                "%.2x:%.2x%.2x",
+                localIP[0], localIP[1], localIP[2], localIP[3], localIP[4],
+                localIP[5], localIP[6], localIP[7], localIP[8], localIP[9],
+                localIP[10], localIP[11], localIP[12], localIP[13],
+                localIP[14], localIP[15]);
+    }
+    else
+    {
+        WebRtc_UWord32 localIP(0);
+        // Read local IP (as 32-bit address) from the socket module
+        if (socketPtr->LocalHostAddress(localIP) != 0)
+        {
+            _shared->SetLastError(VE_INVALID_IP_ADDRESS, kTraceError,
+                "GetLocalIP() failed to retrieve local IP - 2");
+            UdpTransport::Destroy(socketPtr);
+            return -1;
+        }
+        // Convert 32-bit address to character string (x.y.z.w)
+        sprintf(localIPAddr, "%d.%d.%d.%d", (int) ((localIP >> 24) & 0x0ff),
+                (int) ((localIP >> 16) & 0x0ff),
+                (int) ((localIP >> 8) & 0x0ff),
+                (int) (localIP & 0x0ff));
+    }
+
+    strcpy(ipAddr, localIPAddr);
+
+    UdpTransport::Destroy(socketPtr);
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetLocalIP() => ipAddr=%s", ipAddr);
+    return 0;
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetLocalIP() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::EnableIPv6(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "EnableIPv6(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "EnableIPv6() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "EnableIPv6() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->EnableIPv6();
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "EnableIPv6() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+bool VoENetworkImpl::IPv6IsEnabled(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "IPv6IsEnabled(channel=%d)", channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return false;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "IPv6IsEnabled() failed to locate channel");
+        return false;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "IPv6IsEnabled() external transport is enabled");
+        return false;
+    }
+    return channelPtr->IPv6IsEnabled();
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "IPv6IsEnabled() VoE is built for external transport");
+    return false;
+#endif
+}
+
+int VoENetworkImpl::SetSourceFilter(int channel,
+                                    int rtpPort,
+                                    int rtcpPort,
+                                    const char ipAddr[64])
+{
+    (ipAddr == NULL) ? WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                                    VoEId(_shared->instance_id(), -1),
+                                    "SetSourceFilter(channel=%d, rtpPort=%d,"
+                                    " rtcpPort=%d)",
+                                    channel, rtpPort, rtcpPort)
+                     : WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                                    VoEId(_shared->instance_id(), -1),
+                                    "SetSourceFilter(channel=%d, rtpPort=%d,"
+                                    " rtcpPort=%d, ipAddr=%s)",
+                                    channel, rtpPort, rtcpPort, ipAddr);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((rtpPort < 0) || (rtpPort > 65535))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSourceFilter() invalid RTP port");
+        return -1;
+    }
+    if ((rtcpPort < 0) || (rtcpPort > 65535))
+    {
+        _shared->SetLastError(VE_INVALID_PORT_NMBR, kTraceError,
+            "SetSourceFilter() invalid RTCP port");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSourceFilter() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSourceFilter() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->SetSourceFilter(rtpPort, rtcpPort, ipAddr);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSourceFilter() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSourceFilter(int channel,
+                                    int& rtpPort,
+                                    int& rtcpPort,
+                                    char ipAddr[64])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSourceFilter(channel=%d, rtpPort=?, rtcpPort=?, "
+                 "ipAddr[]=?)",
+                 channel);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == ipAddr)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "GetSourceFilter() invalid IP-address buffer");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSourceFilter() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSourceFilter() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSourceFilter(rtpPort, rtcpPort, ipAddr);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSourceFilter() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetSendTOS(int channel,
+                               int DSCP,
+                               int priority,
+                               bool useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetSendTOS(channel=%d, DSCP=%d, useSetSockopt=%d)",
+                 channel, DSCP, useSetSockopt);
+
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "SetSendTOS() is not supported on this platform");
+    return -1;
+#endif
+
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if ((DSCP < 0) || (DSCP > 63))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTOS() Invalid DSCP value");
+        return -1;
+    }
+#if defined(_WIN32) || defined(WEBRTC_LINUX)
+    if ((priority < -1) || (priority > 7))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTOS() Invalid priority value");
+        return -1;
+    }
+#else
+    if (-1 != priority)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTOS() priority not supported");
+        return -1;
+    }
+#endif
+#if defined(_WIN32)
+    if ((priority >= 0) && useSetSockopt)
+    {
+        // On Windows, priority and useSetSockopt cannot be combined
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSendTOS() priority and useSetSockopt conflict");
+        return -1;
+    }
+#endif
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendTOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendTOS() external transport is enabled");
+        return -1;
+    }
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+    useSetSockopt = true;
+    WEBRTC_TRACE(kTraceInfo, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "   force useSetSockopt=true since there is no alternative"
+                 " implementation");
+#endif
+
+    return channelPtr->SetSendTOS(DSCP, priority, useSetSockopt);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSendTOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSendTOS(int channel,
+                               int& DSCP,
+                               int& priority,
+                               bool& useSetSockopt)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSendTOS(channel=%d)", channel);
+
+#if !defined(_WIN32) && !defined(WEBRTC_LINUX) && !defined(WEBRTC_MAC)
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "GetSendTOS() is not supported on this platform");
+    return -1;
+#endif
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendTOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendTOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSendTOS(DSCP, priority, useSetSockopt);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSendTOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetSendGQoS(int channel,
+                                bool enable,
+                                int serviceType,
+                                int overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetSendGQOS(channel=%d, enable=%d, serviceType=%d,"
+                 " overrideDSCP=%d)",
+                 channel, (int) enable, serviceType, overrideDSCP);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#if !defined(_WIN32)
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "SetSendGQOS() is not supported on this platform");
+    return -1;
+#elif !defined(WEBRTC_EXTERNAL_TRANSPORT)
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetSendGQOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "SetSendGQOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->SetSendGQoS(enable, serviceType, overrideDSCP);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SetSendGQOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::GetSendGQoS(int channel,
+                                bool& enabled,
+                                int& serviceType,
+                                int& overrideDSCP)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetSendGQOS(channel=%d)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+#if !defined(_WIN32)
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceWarning,
+        "GetSendGQOS() is not supported on this platform");
+    return -1;
+#elif !defined(WEBRTC_EXTERNAL_TRANSPORT)
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetSendGQOS() failed to locate channel");
+        return -1;
+    }
+    if (channelPtr->ExternalTransport())
+    {
+        _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceError,
+            "GetSendGQOS() external transport is enabled");
+        return -1;
+    }
+    return channelPtr->GetSendGQoS(enabled, serviceType, overrideDSCP);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "GetSendGQOS() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+int VoENetworkImpl::SetPacketTimeoutNotification(int channel,
+                                                 bool enable,
+                                                 int timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetPacketTimeoutNotification(channel=%d, enable=%d, "
+                 "timeoutSeconds=%d)",
+                 channel, (int) enable, timeoutSeconds);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (enable &&
+        ((timeoutSeconds < kVoiceEngineMinPacketTimeoutSec) ||
+        (timeoutSeconds > kVoiceEngineMaxPacketTimeoutSec)))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetPacketTimeoutNotification() invalid timeout size");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetPacketTimeoutNotification() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetPacketTimeoutNotification(enable, timeoutSeconds);
+}
+
+int VoENetworkImpl::GetPacketTimeoutNotification(int channel,
+                                                 bool& enabled,
+                                                 int& timeoutSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPacketTimeoutNotification(channel=%d, enabled=?,"
+                 " timeoutSeconds=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPacketTimeoutNotification() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPacketTimeoutNotification(enabled, timeoutSeconds);
+}
+
+int VoENetworkImpl::RegisterDeadOrAliveObserver(int channel,
+                                                VoEConnectionObserver&
+                                                observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RegisterDeadOrAliveObserver(channel=%d, observer=0x%x)",
+                 channel, &observer);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterDeadOrAliveObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterDeadOrAliveObserver(observer);
+}
+
+int VoENetworkImpl::DeRegisterDeadOrAliveObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterDeadOrAliveObserver(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterDeadOrAliveObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterDeadOrAliveObserver();
+}
+
+int VoENetworkImpl::SetPeriodicDeadOrAliveStatus(int channel, bool enable,
+                                                 int sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetPeriodicDeadOrAliveStatus(channel=%d, enable=%d,"
+                 " sampleTimeSeconds=%d)",
+                 channel, enable, sampleTimeSeconds);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (enable &&
+        ((sampleTimeSeconds < kVoiceEngineMinSampleTimeSec) ||
+        (sampleTimeSeconds > kVoiceEngineMaxSampleTimeSec)))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetPeriodicDeadOrAliveStatus() invalid sample time");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetPeriodicDeadOrAliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetPeriodicDeadOrAliveStatus(enable, sampleTimeSeconds);
+}
+
+int VoENetworkImpl::GetPeriodicDeadOrAliveStatus(int channel,
+                                                 bool& enabled,
+                                                 int& sampleTimeSeconds)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPeriodicDeadOrAliveStatus(channel=%d, enabled=?,"
+                 " sampleTimeSeconds=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPeriodicDeadOrAliveStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPeriodicDeadOrAliveStatus(enabled,
+                                                    sampleTimeSeconds);
+}
+
+int VoENetworkImpl::SendUDPPacket(int channel,
+                                  const void* data,
+                                  unsigned int length,
+                                  int& transmittedBytes,
+                                  bool useRtcpSocket)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SendUDPPacket(channel=%d, data=0x%x, length=%u, useRTCP=%d)",
+                 channel, data, length, useRtcpSocket);
+#ifndef WEBRTC_EXTERNAL_TRANSPORT
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (NULL == data)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SendUDPPacket() invalid data buffer");
+        return -1;
+    }
+    if (0 == length)
+    {
+        _shared->SetLastError(VE_INVALID_PACKET, kTraceError,
+            "SendUDPPacket() invalid packet size");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendUDPPacket() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SendUDPPacket(data,
+                                     length,
+                                     transmittedBytes,
+                                     useRtcpSocket);
+#else
+    _shared->SetLastError(VE_EXTERNAL_TRANSPORT_ENABLED, kTraceWarning,
+        "SendUDPPacket() VoE is built for external transport");
+    return -1;
+#endif
+}
+
+#endif  // WEBRTC_VOICE_ENGINE_NETWORK_API
+
+} // namespace webrtc
diff --git a/src/voice_engine/voe_network_impl.h b/src/voice_engine/voe_network_impl.h
new file mode 100644
index 0000000..b159c81
--- /dev/null
+++ b/src/voice_engine/voe_network_impl.h
@@ -0,0 +1,114 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
+
+#include "voe_network.h"
+
+#include "shared_data.h"
+
+
+namespace webrtc
+{
+
+class VoENetworkImpl: public VoENetwork
+{
+public:
+    virtual int RegisterExternalTransport(int channel, Transport& transport);
+
+    virtual int DeRegisterExternalTransport(int channel);
+
+    virtual int ReceivedRTPPacket(int channel,
+                                  const void* data,
+                                  unsigned int length);
+
+    virtual int ReceivedRTCPPacket(int channel,
+                                   const void* data,
+                                   unsigned int length);
+
+    virtual int GetSourceInfo(int channel,
+                              int& rtpPort,
+                              int& rtcpPort,
+                              char ipAddr[64]);
+
+    virtual int GetLocalIP(char ipAddr[64], bool ipv6 = false);
+
+    virtual int EnableIPv6(int channel);
+
+    virtual bool IPv6IsEnabled(int channel);
+
+    virtual int SetSourceFilter(int channel,
+                                int rtpPort,
+                                int rtcpPort,
+                                const char ipAddr[64] = 0);
+
+    virtual int GetSourceFilter(int channel,
+                                int& rtpPort,
+                                int& rtcpPort,
+                                char ipAddr[64]);
+
+    virtual int SetSendTOS(int channel,
+                           int DSCP,
+                           int priority = -1,
+                           bool useSetSockopt = false);
+
+    virtual int GetSendTOS(int channel,
+                           int& DSCP,
+                           int& priority,
+                           bool& useSetSockopt);
+
+    virtual int SetSendGQoS(int channel,
+                            bool enable,
+                            int serviceType,
+                            int overrideDSCP);
+
+    virtual int GetSendGQoS(int channel,
+                            bool& enabled,
+                            int& serviceType,
+                            int& overrideDSCP);
+
+    virtual int SetPacketTimeoutNotification(int channel,
+                                             bool enable,
+                                             int timeoutSeconds = 2);
+
+    virtual int GetPacketTimeoutNotification(int channel,
+                                             bool& enabled,
+                                             int& timeoutSeconds);
+
+    virtual int RegisterDeadOrAliveObserver(int channel,
+                                            VoEConnectionObserver& observer);
+
+    virtual int DeRegisterDeadOrAliveObserver(int channel);
+
+    virtual int SetPeriodicDeadOrAliveStatus(int channel,
+                                             bool enable,
+                                             int sampleTimeSeconds = 2);
+
+    virtual int GetPeriodicDeadOrAliveStatus(int channel,
+                                             bool& enabled,
+                                             int& sampleTimeSeconds);
+
+    virtual int SendUDPPacket(int channel,
+                              const void* data,
+                              unsigned int length,
+                              int& transmittedBytes,
+                              bool useRtcpSocket = false);
+
+protected:
+    VoENetworkImpl(voe::SharedData* shared);
+    virtual ~VoENetworkImpl();
+private:
+    voe::SharedData* _shared;
+};
+
+} // namespace webrtc
+
+#endif  // WEBRTC_VOICE_ENGINE_VOE_NETWORK_IMPL_H
diff --git a/src/voice_engine/voe_rtp_rtcp_impl.cc b/src/voice_engine/voe_rtp_rtcp_impl.cc
new file mode 100644
index 0000000..d21f722
--- /dev/null
+++ b/src/voice_engine/voe_rtp_rtcp_impl.cc
@@ -0,0 +1,667 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_rtp_rtcp_impl.h"
+#include "trace.h"
+#include "file_wrapper.h"
+#include "critical_section_wrapper.h"
+#include "voice_engine_impl.h"
+#include "voe_errors.h"
+
+#include "channel.h"
+#include "transmit_mixer.h"
+
+namespace webrtc {
+
+VoERTP_RTCP* VoERTP_RTCP::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+
+VoERTP_RTCPImpl::VoERTP_RTCPImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoERTP_RTCPImpl::VoERTP_RTCPImpl() - ctor");
+}
+
+VoERTP_RTCPImpl::~VoERTP_RTCPImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoERTP_RTCPImpl::~VoERTP_RTCPImpl() - dtor");
+}
+
+int VoERTP_RTCPImpl::RegisterRTPObserver(int channel, VoERTPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RegisterRTPObserver(channel=%d observer=0x%x)",
+                 channel, &observer);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterRTPObserver(observer);
+}
+
+int VoERTP_RTCPImpl::DeRegisterRTPObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterRTPObserver(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterRTPObserver();
+}
+
+int VoERTP_RTCPImpl::RegisterRTCPObserver(int channel, VoERTCPObserver& observer)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RegisterRTCPObserver(channel=%d observer=0x%x)",
+                 channel, &observer);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "RegisterRTPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RegisterRTCPObserver(observer);
+}
+
+int VoERTP_RTCPImpl::DeRegisterRTCPObserver(int channel)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "DeRegisterRTCPObserver(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "DeRegisterRTCPObserver() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->DeRegisterRTCPObserver();
+}
+
+int VoERTP_RTCPImpl::SetLocalSSRC(int channel, unsigned int ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetLocalSSRC(channel=%d, %lu)", channel, ssrc);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetLocalSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetLocalSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetLocalSSRC(int channel, unsigned int& ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetLocalSSRC(channel=%d, ssrc=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetLocalSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetLocalSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetRemoteSSRC(int channel, unsigned int& ssrc)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRemoteSSRC(channel=%d, ssrc=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteSSRC() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteSSRC(ssrc);
+}
+
+int VoERTP_RTCPImpl::GetRemoteCSRCs(int channel, unsigned int arrCSRC[15])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRemoteCSRCs(channel=%d, arrCSRC=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteCSRCs() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteCSRCs(arrCSRC);
+}
+
+
+int VoERTP_RTCPImpl::SetRTPAudioLevelIndicationStatus(int channel,
+                                                      bool enable,
+                                                      unsigned char ID)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetRTPAudioLevelIndicationStatus(channel=%d, enable=%d,"
+                 " ID=%u)", channel, enable, ID);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (ID < kVoiceEngineMinRtpExtensionId ||
+        ID > kVoiceEngineMaxRtpExtensionId)
+    {
+        // [RFC5285] The 4-bit ID is the local identifier of this element in
+        // the range 1-14 inclusive.
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetRTPAudioLevelIndicationStatus() invalid ID parameter");
+        return -1;
+    }
+
+    // Set state and ID for the specified channel.
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTPAudioLevelIndicationStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTPAudioLevelIndicationStatus(enable, ID);
+}
+
+int VoERTP_RTCPImpl::GetRTPAudioLevelIndicationStatus(int channel,
+                                                      bool& enabled,
+                                                      unsigned char& ID)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRTPAudioLevelIndicationStatus(channel=%d, enable=?, ID=?)",
+                 channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPAudioLevelIndicationStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPAudioLevelIndicationStatus(enabled, ID);
+}
+
+int VoERTP_RTCPImpl::SetRTCPStatus(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetRTCPStatus(channel=%d, enable=%d)", channel, enable);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTCPStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTCPStatus(enable);
+}
+
+int VoERTP_RTCPImpl::GetRTCPStatus(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRTCPStatus(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTCPStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTCPStatus(enabled);
+}
+
+int VoERTP_RTCPImpl::SetRTCP_CNAME(int channel, const char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetRTCP_CNAME(channel=%d, cName=%s)", channel, cName);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRTCP_CNAME(int channel, char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRTCP_CNAME(channel=%d, cName=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCP_CNAME(int channel, char cName[256])
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRemoteRTCP_CNAME(channel=%d, cName=?)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteRTCP_CNAME(cName);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCPData(
+    int channel,
+    unsigned int& NTPHigh, // from sender info in SR
+    unsigned int& NTPLow, // from sender info in SR
+    unsigned int& timestamp, // from sender info in SR
+    unsigned int& playoutTimestamp, // derived locally
+    unsigned int* jitter, // from report block 1 in SR/RR
+    unsigned short* fractionLost) // from report block 1 in SR/RR
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRemoteRTCPData(channel=%d,...)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRemoteRTCP_CNAME() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRemoteRTCPData(NTPHigh,
+                                         NTPLow,
+                                         timestamp,
+                                         playoutTimestamp,
+                                         jitter,
+                                         fractionLost);
+}
+
+int VoERTP_RTCPImpl::SendApplicationDefinedRTCPPacket(
+    int channel,
+    const unsigned char subType,
+    unsigned int name,
+    const char* data,
+    unsigned short dataLengthInBytes)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SendApplicationDefinedRTCPPacket(channel=%d, subType=%u,"
+                 "name=%u, data=?, dataLengthInBytes=%u)",
+                 channel, subType, name, dataLengthInBytes);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SendApplicationDefinedRTCPPacket() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SendApplicationDefinedRTCPPacket(subType,
+                                                        name,
+                                                        data,
+                                                        dataLengthInBytes);
+}
+
+int VoERTP_RTCPImpl::GetRTPStatistics(int channel,
+                                      unsigned int& averageJitterMs,
+                                      unsigned int& maxJitterMs,
+                                      unsigned int& discardedPackets)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRTPStatistics(channel=%d,....)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPStatistics() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPStatistics(averageJitterMs,
+                                        maxJitterMs,
+                                        discardedPackets);
+}
+
+int VoERTP_RTCPImpl::GetRTCPStatistics(int channel, CallStatistics& stats)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRTCPStatistics(channel=%d)", channel);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetRTPStatistics() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRTPStatistics(stats);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCPSenderInfo(int channel,
+                                             SenderInfo* sender_info) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetRemoteRTCPSenderInfo(channel=%d)", channel);
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channel_ptr = sc.ChannelPtr();
+  if (channel_ptr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "GetRemoteRTCPSenderInfo() failed to locate channel");
+    return -1;
+  }
+  return channel_ptr->GetRemoteRTCPSenderInfo(sender_info);
+}
+
+int VoERTP_RTCPImpl::GetRemoteRTCPReportBlocks(
+    int channel, std::vector<ReportBlock>* report_blocks) {
+  WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetRemoteRTCPReportBlocks(channel=%d)", channel);
+  if (!_shared->statistics().Initialized()) {
+    _shared->SetLastError(VE_NOT_INITED, kTraceError);
+    return -1;
+  }
+  voe::ScopedChannel sc(_shared->channel_manager(), channel);
+  voe::Channel* channel_ptr = sc.ChannelPtr();
+  if (channel_ptr == NULL) {
+    _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+        "GetRemoteRTCPReportBlocks() failed to locate channel");
+    return -1;
+  }
+  return channel_ptr->GetRemoteRTCPReportBlocks(report_blocks);
+}
+
+int VoERTP_RTCPImpl::SetFECStatus(int channel, bool enable, int redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetFECStatus(channel=%d, enable=%d, redPayloadtype=%d)",
+                 channel, enable, redPayloadtype);
+#ifdef WEBRTC_CODEC_RED
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetFECStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetFECStatus(enable, redPayloadtype);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "SetFECStatus() RED is not supported");
+    return -1;
+#endif
+}
+
+int VoERTP_RTCPImpl::GetFECStatus(int channel,
+                                  bool& enabled,
+                                  int& redPayloadtype)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetFECStatus(channel=%d, enabled=?, redPayloadtype=?)",
+                 channel);
+#ifdef WEBRTC_CODEC_RED
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetFECStatus() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetFECStatus(enabled, redPayloadtype);
+#else
+    _shared->SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,
+        "GetFECStatus() RED is not supported");
+    return -1;
+#endif
+}
+
+int VoERTP_RTCPImpl::StartRTPDump(int channel,
+                                  const char fileNameUTF8[1024],
+                                  RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StartRTPDump(channel=%d, fileNameUTF8=%s, direction=%d)",
+                 channel, fileNameUTF8, direction);
+    assert(1024 == FileWrapper::kMaxFileNameSize);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StartRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StartRTPDump(fileNameUTF8, direction);
+}
+
+int VoERTP_RTCPImpl::StopRTPDump(int channel, RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "StopRTPDump(channel=%d, direction=%d)", channel, direction);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->StopRTPDump(direction);
+}
+
+int VoERTP_RTCPImpl::RTPDumpIsActive(int channel, RTPDirections direction)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "RTPDumpIsActive(channel=%d, direction=%d)",
+                 channel, direction);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->RTPDumpIsActive(direction);
+}
+
+int VoERTP_RTCPImpl::InsertExtraRTPPacket(int channel,
+                                          unsigned char payloadType,
+                                          bool markerBit,
+                                          const char* payloadData,
+                                          unsigned short payloadSize)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "InsertExtraRTPPacket(channel=%d, payloadType=%u,"
+                 " markerBit=%u, payloadSize=%u)",
+                 channel, payloadType, markerBit, payloadSize);
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "StopRTPDump() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->InsertExtraRTPPacket(payloadType,
+                                            markerBit,
+                                            payloadData,
+                                            payloadSize);
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_rtp_rtcp_impl.h b/src/voice_engine/voe_rtp_rtcp_impl.h
new file mode 100644
index 0000000..721499c
--- /dev/null
+++ b/src/voice_engine/voe_rtp_rtcp_impl.h
@@ -0,0 +1,126 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+
+#include "voe_rtp_rtcp.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoERTP_RTCPImpl : public VoERTP_RTCP
+{
+public:
+    // Registration of observers for RTP and RTCP callbacks
+    virtual int RegisterRTPObserver(int channel, VoERTPObserver& observer);
+
+    virtual int DeRegisterRTPObserver(int channel);
+
+    virtual int RegisterRTCPObserver(int channel, VoERTCPObserver& observer);
+
+    virtual int DeRegisterRTCPObserver(int channel);
+
+    // RTCP
+    virtual int SetRTCPStatus(int channel, bool enable);
+
+    virtual int GetRTCPStatus(int channel, bool& enabled);
+
+    virtual int SetRTCP_CNAME(int channel, const char cName[256]);
+
+    virtual int GetRTCP_CNAME(int channel, char cName[256]);
+
+    virtual int GetRemoteRTCP_CNAME(int channel, char cName[256]);
+
+    virtual int GetRemoteRTCPData(int channel,
+                                  unsigned int& NTPHigh,
+                                  unsigned int& NTPLow,
+                                  unsigned int& timestamp,
+                                  unsigned int& playoutTimestamp,
+                                  unsigned int* jitter = NULL,
+                                  unsigned short* fractionLost = NULL);
+
+    virtual int SendApplicationDefinedRTCPPacket(
+        int channel,
+        const unsigned char subType,
+        unsigned int name,
+        const char* data,
+        unsigned short dataLengthInBytes);
+
+    // SSRC
+    virtual int SetLocalSSRC(int channel, unsigned int ssrc);
+
+    virtual int GetLocalSSRC(int channel, unsigned int& ssrc);
+
+    virtual int GetRemoteSSRC(int channel, unsigned int& ssrc);
+
+    // RTP Header Extension for Client-to-Mixer Audio Level Indication
+    virtual int SetRTPAudioLevelIndicationStatus(int channel,
+                                                 bool enable,
+                                                 unsigned char ID);
+
+    virtual int GetRTPAudioLevelIndicationStatus(int channel,
+                                                 bool& enabled,
+                                                 unsigned char& ID);
+
+    // CSRC 
+    virtual int GetRemoteCSRCs(int channel, unsigned int arrCSRC[15]);
+
+    // Statistics
+    virtual int GetRTPStatistics(int channel,
+                                 unsigned int& averageJitterMs,
+                                 unsigned int& maxJitterMs,
+                                 unsigned int& discardedPackets);
+
+    virtual int GetRTCPStatistics(int channel, CallStatistics& stats);
+
+    virtual int GetRemoteRTCPSenderInfo(int channel, SenderInfo* sender_info);
+
+    virtual int GetRemoteRTCPReportBlocks(
+        int channel, std::vector<ReportBlock>* report_blocks);
+
+    // FEC
+    virtual int SetFECStatus(int channel,
+                             bool enable,
+                             int redPayloadtype = -1);
+
+    virtual int GetFECStatus(int channel, bool& enabled, int& redPayloadtype);
+
+    // Store RTP and RTCP packets and dump to file (compatible with rtpplay)
+    virtual int StartRTPDump(int channel,
+                             const char fileNameUTF8[1024],
+                             RTPDirections direction = kRtpIncoming);
+
+    virtual int StopRTPDump(int channel,
+                            RTPDirections direction = kRtpIncoming);
+
+    virtual int RTPDumpIsActive(int channel,
+                                RTPDirections direction = kRtpIncoming);
+
+    // Insert (and transmits) extra RTP packet into active RTP audio stream
+    virtual int InsertExtraRTPPacket(int channel,
+                                     unsigned char payloadType,
+                                     bool markerBit,
+                                     const char* payloadData,
+                                     unsigned short payloadSize);
+
+protected:
+    VoERTP_RTCPImpl(voe::SharedData* shared);
+    virtual ~VoERTP_RTCPImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}  // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_RTP_RTCP_IMPL_H
+
diff --git a/src/voice_engine/voe_video_sync_impl.cc b/src/voice_engine/voe_video_sync_impl.cc
new file mode 100644
index 0000000..a509f70
--- /dev/null
+++ b/src/voice_engine/voe_video_sync_impl.cc
@@ -0,0 +1,222 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_video_sync_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "trace.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEVideoSync* VoEVideoSync::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+
+VoEVideoSyncImpl::VoEVideoSyncImpl(voe::SharedData* shared) : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEVideoSyncImpl::VoEVideoSyncImpl() - ctor");
+}
+
+VoEVideoSyncImpl::~VoEVideoSyncImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "VoEVideoSyncImpl::~VoEVideoSyncImpl() - dtor");
+}
+
+int VoEVideoSyncImpl::GetPlayoutTimestamp(int channel, unsigned int& timestamp)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetPlayoutTimestamp(channel=%d, timestamp=?)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlayoutTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetPlayoutTimestamp(timestamp);
+}
+
+int VoEVideoSyncImpl::SetInitTimestamp(int channel,
+                                       unsigned int timestamp)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetInitTimestamp(channel=%d, timestamp=%lu)",
+                 channel, timestamp);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetInitTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetInitTimestamp(timestamp);
+}
+
+int VoEVideoSyncImpl::SetInitSequenceNumber(int channel,
+                                            short sequenceNumber)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetInitSequenceNumber(channel=%d, sequenceNumber=%hd)",
+                 channel, sequenceNumber);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetInitSequenceNumber() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetInitSequenceNumber(sequenceNumber);
+}
+
+int VoEVideoSyncImpl::SetMinimumPlayoutDelay(int channel,int delayMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "SetMinimumPlayoutDelay(channel=%d, delayMs=%d)",
+                 channel, delayMs);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetMinimumPlayoutDelay() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetMinimumPlayoutDelay(delayMs);
+}
+
+int VoEVideoSyncImpl::GetDelayEstimate(int channel, int& delayMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetDelayEstimate(channel=%d, delayMs=?)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetDelayEstimate() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetDelayEstimate(delayMs);
+}
+
+int VoEVideoSyncImpl::GetPlayoutBufferSize(int& bufferMs)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetPlayoutBufferSize(bufferMs=?)");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    AudioDeviceModule::BufferType type
+        (AudioDeviceModule::kFixedBufferSize);
+    WebRtc_UWord16 sizeMS(0);
+    if (_shared->audio_device()->PlayoutBuffer(&type, &sizeMS) != 0)
+    {
+        _shared->SetLastError(VE_AUDIO_DEVICE_MODULE_ERROR, kTraceError,
+            "GetPlayoutBufferSize() failed to read buffer size");
+        return -1;
+    }
+    bufferMs = sizeMS;
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetPlayoutBufferSize() => bufferMs=%d", bufferMs);
+    return 0;
+}
+
+int VoEVideoSyncImpl::GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+                 "GetRtpRtcp(channel=%i)", channel);
+    
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetPlayoutTimestamp() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetRtpRtcp(rtpRtcpModule);
+}
+
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_video_sync_impl.h b/src/voice_engine/voe_video_sync_impl.h
new file mode 100644
index 0000000..1b75f05
--- /dev/null
+++ b/src/voice_engine/voe_video_sync_impl.h
@@ -0,0 +1,47 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
+
+#include "voe_video_sync.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEVideoSyncImpl : public VoEVideoSync
+{
+public:
+    virtual int GetPlayoutBufferSize(int& bufferMs);
+
+    virtual int SetMinimumPlayoutDelay(int channel, int delayMs);
+
+    virtual int GetDelayEstimate(int channel, int& delayMs);
+
+    virtual int SetInitTimestamp(int channel, unsigned int timestamp);
+
+    virtual int SetInitSequenceNumber(int channel, short sequenceNumber);
+
+    virtual int GetPlayoutTimestamp(int channel, unsigned int& timestamp);
+
+    virtual int GetRtpRtcp(int channel, RtpRtcp* &rtpRtcpModule);
+
+protected:
+    VoEVideoSyncImpl(voe::SharedData* shared);
+    virtual ~VoEVideoSyncImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}   // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_VIDEO_SYNC_IMPL_H
diff --git a/src/voice_engine/voe_volume_control_impl.cc b/src/voice_engine/voe_volume_control_impl.cc
new file mode 100644
index 0000000..f821ab3
--- /dev/null
+++ b/src/voice_engine/voe_volume_control_impl.cc
@@ -0,0 +1,640 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voe_volume_control_impl.h"
+
+#include "channel.h"
+#include "critical_section_wrapper.h"
+#include "output_mixer.h"
+#include "trace.h"
+#include "transmit_mixer.h"
+#include "voe_errors.h"
+#include "voice_engine_impl.h"
+
+namespace webrtc {
+
+VoEVolumeControl* VoEVolumeControl::GetInterface(VoiceEngine* voiceEngine)
+{
+#ifndef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+    return NULL;
+#else
+    if (NULL == voiceEngine)
+    {
+        return NULL;
+    }
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    s->AddRef();
+    return s;
+#endif
+}
+
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+VoEVolumeControlImpl::VoEVolumeControlImpl(voe::SharedData* shared)
+    : _shared(shared)
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "VoEVolumeControlImpl::VoEVolumeControlImpl() - ctor");
+}
+
+VoEVolumeControlImpl::~VoEVolumeControlImpl()
+{
+    WEBRTC_TRACE(kTraceMemory, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "VoEVolumeControlImpl::~VoEVolumeControlImpl() - dtor");
+}
+
+int VoEVolumeControlImpl::SetSpeakerVolume(unsigned int volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetSpeakerVolume(volume=%u)", volume);
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (volume > kMaxVolumeLevel)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetSpeakerVolume() invalid argument");
+        return -1;
+    }
+
+    WebRtc_UWord32 maxVol(0);
+    WebRtc_UWord32 spkrVol(0);
+
+    // scale: [0,kMaxVolumeLevel] -> [0,MaxSpeakerVolume]
+    if (_shared->audio_device()->MaxSpeakerVolume(&maxVol) != 0)
+    {
+        _shared->SetLastError(VE_MIC_VOL_ERROR, kTraceError,
+            "SetSpeakerVolume() failed to get max volume");
+        return -1;
+    }
+    // Round the value and avoid floating computation.
+    spkrVol = (WebRtc_UWord32)((volume * maxVol +
+        (int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+
+    // set the actual volume using the audio mixer
+    if (_shared->audio_device()->SetSpeakerVolume(spkrVol) != 0)
+    {
+        _shared->SetLastError(VE_MIC_VOL_ERROR, kTraceError,
+            "SetSpeakerVolume() failed to set speaker volume");
+        return -1;
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeakerVolume(unsigned int& volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSpeakerVolume()");
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_UWord32 spkrVol(0);
+    WebRtc_UWord32 maxVol(0);
+
+    if (_shared->audio_device()->SpeakerVolume(&spkrVol) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetSpeakerVolume() unable to get speaker volume");
+        return -1;
+    }
+
+    // scale: [0, MaxSpeakerVolume] -> [0, kMaxVolumeLevel]
+    if (_shared->audio_device()->MaxSpeakerVolume(&maxVol) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetSpeakerVolume() unable to get max speaker volume");
+        return -1;
+    }
+    // Round the value and avoid floating computation.
+    volume = (WebRtc_UWord32) ((spkrVol * kMaxVolumeLevel +
+        (int)(maxVol / 2)) / (maxVol));
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSpeakerVolume() => volume=%d", volume);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetSystemOutputMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSystemOutputMute(enabled=%d)", enable);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_shared->audio_device()->SetSpeakerMute(enable) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SpeakerMute() unable to Set speaker mute");
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSystemOutputMute(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSystemOutputMute(enabled=?)");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_shared->audio_device()->SpeakerMute(&enabled) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SpeakerMute() unable to get speaker mute state");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSystemOutputMute() => %d", enabled);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetMicVolume(unsigned int volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetMicVolume(volume=%u)", volume);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (volume > kMaxVolumeLevel)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetMicVolume() invalid argument");
+        return -1;
+    }
+
+    WebRtc_UWord32 maxVol(0);
+    WebRtc_UWord32 micVol(0);
+
+    // scale: [0, kMaxVolumeLevel] -> [0,MaxMicrophoneVolume]
+    if (_shared->audio_device()->MaxMicrophoneVolume(&maxVol) != 0)
+    {
+        _shared->SetLastError(VE_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() failed to get max volume");
+        return -1;
+    }
+
+    if (volume == kMaxVolumeLevel) {
+      // On Linux running pulse, users are able to set the volume above 100%
+      // through the volume control panel, where the +100% range is digital
+      // scaling. WebRTC does not support setting the volume above 100%, and
+      // simply ignores changing the volume if the user tries to set it to
+      // |kMaxVolumeLevel| while the current volume is higher than |maxVol|.
+      if (_shared->audio_device()->MicrophoneVolume(&micVol) != 0) {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() unable to get microphone volume");
+        return -1;
+      }
+      if (micVol >= maxVol)
+        return 0;
+    }
+
+    // Round the value and avoid floating point computation.
+    micVol = (WebRtc_UWord32) ((volume * maxVol +
+        (int)(kMaxVolumeLevel / 2)) / (kMaxVolumeLevel));
+
+    // set the actual volume using the audio mixer
+    if (_shared->audio_device()->SetMicrophoneVolume(micVol) != 0)
+    {
+        _shared->SetLastError(VE_MIC_VOL_ERROR, kTraceError,
+            "SetMicVolume() failed to set mic volume");
+        return -1;
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetMicVolume(unsigned int& volume)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetMicVolume()");
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    WebRtc_UWord32 micVol(0);
+    WebRtc_UWord32 maxVol(0);
+
+    if (_shared->audio_device()->MicrophoneVolume(&micVol) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetMicVolume() unable to get microphone volume");
+        return -1;
+    }
+
+    // scale: [0, MaxMicrophoneVolume] -> [0, kMaxVolumeLevel]
+    if (_shared->audio_device()->MaxMicrophoneVolume(&maxVol) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "GetMicVolume() unable to get max microphone volume");
+        return -1;
+    }
+    if (micVol < maxVol) {
+      // Round the value and avoid floating point calculation.
+      volume = (WebRtc_UWord32) ((micVol * kMaxVolumeLevel +
+          (int)(maxVol / 2)) / (maxVol));
+    } else {
+      // Truncate the value to the kMaxVolumeLevel.
+      volume = kMaxVolumeLevel;
+    }
+
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetMicVolume() => volume=%d", volume);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetInputMute(int channel, bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetInputMute(channel=%d, enable=%d)", channel, enable);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        // Mute before demultiplexing <=> affects all channels
+        return _shared->transmit_mixer()->SetMute(enable);
+    }
+    else
+    {
+        // Mute after demultiplexing <=> affects one channel only
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetInputMute() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->SetMute(enable);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetInputMute(int channel, bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetInputMute(channel=%d)", channel);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        enabled = _shared->transmit_mixer()->Mute();
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetInputMute() failed to locate channel");
+            return -1;
+        }
+        enabled = channelPtr->Mute();
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetInputMute() => enabled = %d", (int)enabled);
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetSystemInputMute(bool enable)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetSystemInputMute(enabled=%d)", enable);
+
+    if (!_shared->statistics().Initialized())
+    {
+            _shared->SetLastError(VE_NOT_INITED, kTraceError);
+            return -1;
+    }
+
+    if (_shared->audio_device()->SetMicrophoneMute(enable) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "MicrophoneMute() unable to set microphone mute state");
+        return -1;
+    }
+
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSystemInputMute(bool& enabled)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSystemInputMute(enabled=?)");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    if (_shared->audio_device()->MicrophoneMute(&enabled) != 0)
+    {
+        _shared->SetLastError(VE_GET_MIC_VOL_ERROR, kTraceError,
+            "MicrophoneMute() unable to get microphone mute state");
+        return -1;
+    }
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSystemInputMute() => %d", enabled);
+	return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechInputLevel(unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSpeechInputLevel()");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    WebRtc_Word8 currentLevel = _shared->transmit_mixer()->AudioLevel();
+    level = static_cast<unsigned int> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSpeechInputLevel() => %d", level);
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechOutputLevel(int channel,
+                                               unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSpeechOutputLevel(channel=%d, level=?)", channel);
+	
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->GetSpeechOutputLevel(
+            (WebRtc_UWord32&)level);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetSpeechOutputLevel() failed to locate channel");
+            return -1;
+        }
+        channelPtr->GetSpeechOutputLevel((WebRtc_UWord32&)level);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechInputLevelFullRange(unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSpeechInputLevelFullRange(level=?)");
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    WebRtc_Word16 currentLevel = _shared->transmit_mixer()->
+        AudioLevelFullRange();
+    level = static_cast<unsigned int> (currentLevel);
+    WEBRTC_TRACE(kTraceStateInfo, kTraceVoice,
+        VoEId(_shared->instance_id(), -1),
+        "GetSpeechInputLevelFullRange() => %d", level);
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetSpeechOutputLevelFullRange(int channel,
+                                                        unsigned int& level)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetSpeechOutputLevelFullRange(channel=%d, level=?)", channel);
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->GetSpeechOutputLevelFullRange(
+            (WebRtc_UWord32&)level);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetSpeechOutputLevelFullRange() failed to locate channel");
+            return -1;
+        }
+        channelPtr->GetSpeechOutputLevelFullRange((WebRtc_UWord32&)level);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::SetChannelOutputVolumeScaling(int channel,
+                                                        float scaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetChannelOutputVolumeScaling(channel=%d, scaling=%3.2f)",
+               channel, scaling);
+    IPHONE_NOT_SUPPORTED();
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    if (scaling < kMinOutputVolumeScaling ||
+        scaling > kMaxOutputVolumeScaling)
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetChannelOutputVolumeScaling() invalid parameter");
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "SetChannelOutputVolumeScaling() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->SetChannelOutputVolumeScaling(scaling);
+}
+
+int VoEVolumeControlImpl::GetChannelOutputVolumeScaling(int channel,
+                                                        float& scaling)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetChannelOutputVolumeScaling(channel=%d, scaling=?)", channel);
+    IPHONE_NOT_SUPPORTED();
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+    voe::ScopedChannel sc(_shared->channel_manager(), channel);
+    voe::Channel* channelPtr = sc.ChannelPtr();
+    if (channelPtr == NULL)
+    {
+        _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+            "GetChannelOutputVolumeScaling() failed to locate channel");
+        return -1;
+    }
+    return channelPtr->GetChannelOutputVolumeScaling(scaling);
+}
+
+int VoEVolumeControlImpl::SetOutputVolumePan(int channel,
+                                             float left,
+                                             float right)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "SetOutputVolumePan(channel=%d, left=%2.1f, right=%2.1f)",
+               channel, left, right);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool available(false);
+    _shared->audio_device()->StereoPlayoutIsAvailable(&available);
+    if (!available)
+    {
+        _shared->SetLastError(VE_FUNC_NO_STEREO, kTraceError,
+            "SetOutputVolumePan() stereo playout not supported");
+        return -1;
+    }
+    if ((left < kMinOutputVolumePanning)  ||
+        (left > kMaxOutputVolumePanning)  ||
+        (right < kMinOutputVolumePanning) ||
+        (right > kMaxOutputVolumePanning))
+    {
+        _shared->SetLastError(VE_INVALID_ARGUMENT, kTraceError,
+            "SetOutputVolumePan() invalid parameter");
+        return -1;
+    }
+
+    if (channel == -1)
+    {
+        // Master balance (affectes the signal after output mixing)
+        return _shared->output_mixer()->SetOutputVolumePan(left, right);
+    }
+    else
+    {
+        // Per-channel balance (affects the signal before output mixing)
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "SetOutputVolumePan() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->SetOutputVolumePan(left, right);
+    }
+    return 0;
+}
+
+int VoEVolumeControlImpl::GetOutputVolumePan(int channel,
+                                             float& left,
+                                             float& right)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, VoEId(_shared->instance_id(), -1),
+               "GetOutputVolumePan(channel=%d, left=?, right=?)", channel);
+    ANDROID_NOT_SUPPORTED(_shared->statistics());
+    IPHONE_NOT_SUPPORTED();
+
+    if (!_shared->statistics().Initialized())
+    {
+        _shared->SetLastError(VE_NOT_INITED, kTraceError);
+        return -1;
+    }
+
+    bool available(false);
+    _shared->audio_device()->StereoPlayoutIsAvailable(&available);
+    if (!available)
+    {
+        _shared->SetLastError(VE_FUNC_NO_STEREO, kTraceError,
+            "GetOutputVolumePan() stereo playout not supported");
+        return -1;
+    }
+
+    if (channel == -1)
+    {
+        return _shared->output_mixer()->GetOutputVolumePan(left, right);
+    }
+    else
+    {
+        voe::ScopedChannel sc(_shared->channel_manager(), channel);
+        voe::Channel* channelPtr = sc.ChannelPtr();
+        if (channelPtr == NULL)
+        {
+            _shared->SetLastError(VE_CHANNEL_NOT_VALID, kTraceError,
+                "GetOutputVolumePan() failed to locate channel");
+            return -1;
+        }
+        return channelPtr->GetOutputVolumePan(left, right);
+    }
+    return 0;
+}
+
+#endif  // #ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+
+}  // namespace webrtc
diff --git a/src/voice_engine/voe_volume_control_impl.h b/src/voice_engine/voe_volume_control_impl.h
new file mode 100644
index 0000000..9e1cc5a
--- /dev/null
+++ b/src/voice_engine/voe_volume_control_impl.h
@@ -0,0 +1,72 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+
+#include "voe_volume_control.h"
+
+#include "shared_data.h"
+
+namespace webrtc {
+
+class VoEVolumeControlImpl : public VoEVolumeControl
+{
+public:
+    virtual int SetSpeakerVolume(unsigned int volume);
+
+    virtual int GetSpeakerVolume(unsigned int& volume);
+
+    virtual int SetSystemOutputMute(bool enable);
+
+    virtual int GetSystemOutputMute(bool& enabled);
+
+    virtual int SetMicVolume(unsigned int volume);
+
+    virtual int GetMicVolume(unsigned int& volume);
+
+    virtual int SetInputMute(int channel, bool enable);
+
+    virtual int GetInputMute(int channel, bool& enabled);
+
+    virtual int SetSystemInputMute(bool enable);
+
+    virtual int GetSystemInputMute(bool& enabled);
+
+    virtual int GetSpeechInputLevel(unsigned int& level);
+
+    virtual int GetSpeechOutputLevel(int channel, unsigned int& level);
+
+    virtual int GetSpeechInputLevelFullRange(unsigned int& level);
+
+    virtual int GetSpeechOutputLevelFullRange(int channel,
+                                              unsigned int& level);
+
+    virtual int SetChannelOutputVolumeScaling(int channel, float scaling);
+
+    virtual int GetChannelOutputVolumeScaling(int channel, float& scaling);
+
+    virtual int SetOutputVolumePan(int channel, float left, float right);
+
+    virtual int GetOutputVolumePan(int channel, float& left, float& right);
+
+
+protected:
+    VoEVolumeControlImpl(voe::SharedData* shared);
+    virtual ~VoEVolumeControlImpl();
+
+private:
+    voe::SharedData* _shared;
+};
+
+}   // namespace webrtc
+
+#endif    // WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_IMPL_H
+
diff --git a/src/voice_engine/voice_engine.gyp b/src/voice_engine/voice_engine.gyp
new file mode 100644
index 0000000..ebf13ca
--- /dev/null
+++ b/src/voice_engine/voice_engine.gyp
@@ -0,0 +1,23 @@
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [
+    '../build/common.gypi',
+    'voice_engine_core.gypi',
+  ],
+
+  # Test targets, excluded when building with Chromium.
+  'conditions': [
+    ['include_tests==1', {
+      'includes': [
+        'test/voice_engine_tests.gypi',
+      ],
+    }],
+  ],
+}
diff --git a/src/voice_engine/voice_engine_core.gypi b/src/voice_engine/voice_engine_core.gypi
new file mode 100644
index 0000000..354993a
--- /dev/null
+++ b/src/voice_engine/voice_engine_core.gypi
@@ -0,0 +1,158 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'variables': {
+    'voice_engine_dependencies': [
+      '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+      '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
+      '<(webrtc_root)/modules/modules.gyp:audio_coding_module',
+      '<(webrtc_root)/modules/modules.gyp:audio_conference_mixer',
+      '<(webrtc_root)/modules/modules.gyp:audio_device',
+      '<(webrtc_root)/modules/modules.gyp:audio_processing',
+      '<(webrtc_root)/modules/modules.gyp:media_file',
+      '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+      '<(webrtc_root)/modules/modules.gyp:udp_transport',
+      '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+      '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'voice_engine_core',
+      'type': '<(library)',
+      'dependencies': [
+        '<@(voice_engine_dependencies)',
+      ],
+      'include_dirs': [
+        'include',
+        '<(webrtc_root)/modules/audio_device/main/source',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          'include',
+        ],
+      },
+      'sources': [
+        '../common_types.h',
+        '../engine_configurations.h',
+        '../typedefs.h',
+        'include/voe_audio_processing.h',
+        'include/voe_base.h',
+        'include/voe_call_report.h',
+        'include/voe_codec.h',
+        'include/voe_dtmf.h',
+        'include/voe_encryption.h',
+        'include/voe_errors.h',
+        'include/voe_external_media.h',
+        'include/voe_file.h',
+        'include/voe_hardware.h',
+        'include/voe_neteq_stats.h',
+        'include/voe_network.h',
+        'include/voe_rtp_rtcp.h',
+        'include/voe_video_sync.h',
+        'include/voe_volume_control.h',
+        'channel.cc',
+        'channel.h',
+        'channel_manager.cc',
+        'channel_manager.h',
+        'channel_manager_base.cc',
+        'channel_manager_base.h',
+        'dtmf_inband.cc',
+        'dtmf_inband.h',
+        'dtmf_inband_queue.cc',
+        'dtmf_inband_queue.h',
+        'level_indicator.cc',
+        'level_indicator.h',
+        'monitor_module.cc',
+        'monitor_module.h',
+        'output_mixer.cc',
+        'output_mixer.h',
+        'output_mixer_internal.cc',
+        'output_mixer_internal.h',
+        'shared_data.cc',
+        'shared_data.h',
+        'statistics.cc',
+        'statistics.h',
+        'transmit_mixer.cc',
+        'transmit_mixer.h',
+        'utility.cc',
+        'utility.h',
+        'voe_audio_processing_impl.cc',
+        'voe_audio_processing_impl.h',
+        'voe_base_impl.cc',
+        'voe_base_impl.h',
+        'voe_call_report_impl.cc',
+        'voe_call_report_impl.h',
+        'voe_codec_impl.cc',
+        'voe_codec_impl.h',
+        'voe_dtmf_impl.cc',
+        'voe_dtmf_impl.h',
+        'voe_encryption_impl.cc',
+        'voe_encryption_impl.h',
+        'voe_external_media_impl.cc',
+        'voe_external_media_impl.h',
+        'voe_file_impl.cc',
+        'voe_file_impl.h',
+        'voe_hardware_impl.cc',
+        'voe_hardware_impl.h',
+        'voe_neteq_stats_impl.cc',
+        'voe_neteq_stats_impl.h',
+        'voe_network_impl.cc',
+        'voe_network_impl.h',
+        'voe_rtp_rtcp_impl.cc',
+        'voe_rtp_rtcp_impl.h',
+        'voe_video_sync_impl.cc',
+        'voe_video_sync_impl.h',
+        'voe_volume_control_impl.cc',
+        'voe_volume_control_impl.h',
+        'voice_engine_defines.h',
+        'voice_engine_impl.cc',
+        'voice_engine_impl.h',
+      ],
+    },
+  ],
+  'conditions': [
+    ['OS=="win"', {
+      'defines': ['WEBRTC_DRIFT_COMPENSATION_SUPPORTED',],
+    }],
+    ['include_tests==1', {
+      'targets': [
+        {
+          'target_name': 'voice_engine_unittests',
+          'type': 'executable',
+          'dependencies': [
+            'voice_engine_core',
+            '<(DEPTH)/testing/gtest.gyp:gtest',
+            '<(webrtc_root)/test/test.gyp:test_support_main',
+            # The rest are to satisfy the unittests' include chain.
+            # This would be unnecessary if we used qualified includes.
+            '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
+            '<(webrtc_root)/modules/modules.gyp:audio_device',
+            '<(webrtc_root)/modules/modules.gyp:audio_processing',
+            '<(webrtc_root)/modules/modules.gyp:audio_coding_module',
+            '<(webrtc_root)/modules/modules.gyp:audio_conference_mixer',
+            '<(webrtc_root)/modules/modules.gyp:media_file',
+            '<(webrtc_root)/modules/modules.gyp:rtp_rtcp',
+            '<(webrtc_root)/modules/modules.gyp:udp_transport',
+            '<(webrtc_root)/modules/modules.gyp:webrtc_utility',
+            '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
+          ],
+          'include_dirs': [
+            'include',
+          ],
+          'sources': [
+            'channel_unittest.cc',
+            'output_mixer_unittest.cc',
+            'voe_audio_processing_unittest.cc',
+          ],
+        },
+      ], # targets
+    }], # include_tests
+  ], # conditions
+}
diff --git a/src/voice_engine/voice_engine_defines.h b/src/voice_engine/voice_engine_defines.h
new file mode 100644
index 0000000..7d4c729
--- /dev/null
+++ b/src/voice_engine/voice_engine_defines.h
@@ -0,0 +1,588 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+/*
+ *  This file contains common constants for VoiceEngine, as well as
+ *  platform specific settings and include files.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
+#define WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
+
+#include "common_types.h"
+#include "engine_configurations.h"
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+
+// VolumeControl
+enum { kMinVolumeLevel = 0 };
+enum { kMaxVolumeLevel = 255 };
+// Min scale factor for per-channel volume scaling
+const float kMinOutputVolumeScaling = 0.0f;
+// Max scale factor for per-channel volume scaling
+const float kMaxOutputVolumeScaling = 10.0f;
+// Min scale factor for output volume panning
+const float kMinOutputVolumePanning = 0.0f;
+// Max scale factor for output volume panning
+const float kMaxOutputVolumePanning = 1.0f;
+
+// DTMF
+enum { kMinDtmfEventCode = 0 };                 // DTMF digit "0"
+enum { kMaxDtmfEventCode = 15 };                // DTMF digit "D"
+enum { kMinTelephoneEventCode = 0 };            // RFC4733 (Section 2.3.1)
+enum { kMaxTelephoneEventCode = 255 };          // RFC4733 (Section 2.3.1)
+enum { kMinTelephoneEventDuration = 100 };
+enum { kMaxTelephoneEventDuration = 60000 };    // Actual limit is 2^16
+enum { kMinTelephoneEventAttenuation = 0 };     // 0 dBm0
+enum { kMaxTelephoneEventAttenuation = 36 };    // -36 dBm0
+enum { kMinTelephoneEventSeparationMs = 100 };  // Min delta time between two
+                                                // telephone events
+enum { kVoiceEngineMaxIpPacketSizeBytes = 1500 };       // assumes Ethernet
+
+enum { kVoiceEngineMaxModuleVersionSize = 960 };
+
+// Base
+enum { kVoiceEngineVersionMaxMessageSize = 1024 };
+
+// Encryption
+// SRTP uses 30 bytes key length
+enum { kVoiceEngineMaxSrtpKeyLength = 30 };
+// SRTP minimum key/tag length for encryption level
+enum { kVoiceEngineMinSrtpEncryptLength = 16 };
+// SRTP maximum key/tag length for encryption level
+enum { kVoiceEngineMaxSrtpEncryptLength = 256 };
+// SRTP maximum key/tag length for authentication level,
+// HMAC SHA1 authentication type
+enum { kVoiceEngineMaxSrtpAuthSha1Length = 20 };
+// SRTP maximum tag length for authentication level,
+// null authentication type
+enum { kVoiceEngineMaxSrtpTagAuthNullLength = 12 };
+// SRTP maximum key length for authentication level,
+// null authentication type
+enum { kVoiceEngineMaxSrtpKeyAuthNullLength = 256 };
+
+// Audio processing
+enum { kVoiceEngineAudioProcessingDeviceSampleRateHz = 48000 };
+
+// Codec
+// Min init target rate for iSAC-wb
+enum { kVoiceEngineMinIsacInitTargetRateBpsWb = 10000 };
+// Max init target rate for iSAC-wb
+enum { kVoiceEngineMaxIsacInitTargetRateBpsWb = 32000 };
+// Min init target rate for iSAC-swb
+enum { kVoiceEngineMinIsacInitTargetRateBpsSwb = 10000 };
+// Max init target rate for iSAC-swb
+enum { kVoiceEngineMaxIsacInitTargetRateBpsSwb = 56000 };
+// Lowest max rate for iSAC-wb
+enum { kVoiceEngineMinIsacMaxRateBpsWb = 32000 };
+// Highest max rate for iSAC-wb
+enum { kVoiceEngineMaxIsacMaxRateBpsWb = 53400 };
+// Lowest max rate for iSAC-swb
+enum { kVoiceEngineMinIsacMaxRateBpsSwb = 32000 };
+// Highest max rate for iSAC-swb
+enum { kVoiceEngineMaxIsacMaxRateBpsSwb = 107000 };
+// Lowest max payload size for iSAC-wb
+enum { kVoiceEngineMinIsacMaxPayloadSizeBytesWb = 120 };
+// Highest max payload size for iSAC-wb
+enum { kVoiceEngineMaxIsacMaxPayloadSizeBytesWb = 400 };
+// Lowest max payload size for iSAC-swb
+enum { kVoiceEngineMinIsacMaxPayloadSizeBytesSwb = 120 };
+// Highest max payload size for iSAC-swb
+enum { kVoiceEngineMaxIsacMaxPayloadSizeBytesSwb = 600 };
+
+// VideoSync
+// Lowest minimum playout delay
+enum { kVoiceEngineMinMinPlayoutDelayMs = 0 };
+// Highest minimum playout delay
+enum { kVoiceEngineMaxMinPlayoutDelayMs = 1000 };
+
+// Network
+// Min packet-timeout time for received RTP packets
+enum { kVoiceEngineMinPacketTimeoutSec = 1 };
+// Max packet-timeout time for received RTP packets
+enum { kVoiceEngineMaxPacketTimeoutSec = 150 };
+// Min sample time for dead-or-alive detection
+enum { kVoiceEngineMinSampleTimeSec = 1 };
+// Max sample time for dead-or-alive detection
+enum { kVoiceEngineMaxSampleTimeSec = 150 };
+
+// RTP/RTCP
+// Min 4-bit ID for RTP extension (see section 4.2 in RFC 5285)
+enum { kVoiceEngineMinRtpExtensionId = 1 };
+// Max 4-bit ID for RTP extension
+enum { kVoiceEngineMaxRtpExtensionId = 14 };
+
+} // namespace webrtc
+
+// TODO(andrew): we shouldn't be using the precompiler for this.
+// Use enums or bools as appropriate.
+#define WEBRTC_AUDIO_PROCESSING_OFF false
+
+#define WEBRTC_VOICE_ENGINE_HP_DEFAULT_STATE true
+    // AudioProcessing HP is ON
+#define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing NS off
+#define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE true
+    // AudioProcessing AGC on
+#define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing EC off
+#define WEBRTC_VOICE_ENGINE_VAD_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing off
+#define WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX AGC off
+#define WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX NS off
+#define WEBRTC_VOICE_ENGINE_RX_HP_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+    // AudioProcessing RX High Pass Filter off
+
+#define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE NoiseSuppression::kModerate
+    // AudioProcessing NS moderate suppression
+#define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE GainControl::kAdaptiveAnalog
+    // AudioProcessing AGC analog digital combined
+#define WEBRTC_VOICE_ENGINE_RX_AGC_DEFAULT_MODE GainControl::kAdaptiveDigital
+    // AudioProcessing AGC mode
+#define WEBRTC_VOICE_ENGINE_RX_NS_DEFAULT_MODE NoiseSuppression::kModerate
+    // AudioProcessing RX NS mode
+
+// Macros
+// Comparison of two strings without regard to case
+#define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+// Compares characters of two strings without regard to case
+#define STR_NCASE_CMP(x,y,n) ::_strnicmp(x,y,n)
+
+// ----------------------------------------------------------------------------
+//  Build information macros
+// ----------------------------------------------------------------------------
+
+#if defined(_DEBUG)
+#define BUILDMODE "d"
+#elif defined(DEBUG)
+#define BUILDMODE "d"
+#elif defined(NDEBUG)
+#define BUILDMODE "r"
+#else
+#define BUILDMODE "?"
+#endif
+
+#define BUILDTIME __TIME__
+#define BUILDDATE __DATE__
+
+// Example: "Oct 10 2002 12:05:30 r"
+#define BUILDINFO BUILDDATE " " BUILDTIME " " BUILDMODE
+
+// ----------------------------------------------------------------------------
+//  Macros
+// ----------------------------------------------------------------------------
+
+#if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
+  #include <windows.h>
+  #include <stdio.h>
+  #define DEBUG_PRINT(...)      \
+  {                             \
+    char msg[256];              \
+    sprintf(msg, __VA_ARGS__);  \
+    OutputDebugStringA(msg);    \
+  }
+#else
+  // special fix for visual 2003
+  #define DEBUG_PRINT(exp)      ((void)0)
+#endif  // defined(_DEBUG) && defined(_WIN32)
+
+#define CHECK_CHANNEL(channel)  if (CheckChannel(channel) == -1) return -1;
+
+// ----------------------------------------------------------------------------
+//  Default Trace filter
+// ----------------------------------------------------------------------------
+
+#define WEBRTC_VOICE_ENGINE_DEFAULT_TRACE_FILTER \
+    kTraceStateInfo | kTraceWarning | kTraceError | kTraceCritical | \
+    kTraceApiCall
+
+// ----------------------------------------------------------------------------
+//  Inline functions
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+
+inline int VoEId(const int veId, const int chId)
+{
+    if (chId == -1)
+    {
+        const int dummyChannel(99);
+        return (int) ((veId << 16) + dummyChannel);
+    }
+    return (int) ((veId << 16) + chId);
+}
+
+inline int VoEModuleId(const int veId, const int chId)
+{
+    return (int) ((veId << 16) + chId);
+}
+
+// Convert module ID to internal VoE channel ID
+inline int VoEChannelId(const int moduleId)
+{
+    return (int) (moduleId & 0xffff);
+}
+
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Platform settings
+// ----------------------------------------------------------------------------
+
+// *** WINDOWS ***
+
+#if defined(_WIN32)
+
+  #pragma comment( lib, "winmm.lib" )
+
+  #ifndef WEBRTC_EXTERNAL_TRANSPORT
+    #pragma comment( lib, "ws2_32.lib" )
+  #endif
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+// Max number of supported channels
+enum { kVoiceEngineMaxNumOfChannels = 32 };
+// Max number of channels which can be played out simultaneously
+enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #include <windows.h>
+  #include <mmsystem.h> // timeGetTime
+
+  #define GET_TIME_IN_MS() ::timeGetTime()
+  #define SLEEP(x) ::Sleep(x)
+  // Comparison of two strings without regard to case
+  #define STR_CASE_CMP(x,y) ::_stricmp(x,y)
+  // Compares characters of two strings without regard to case
+  #define STR_NCASE_CMP(x,y,n) ::_strnicmp(x,y,n)
+
+// Default device for Windows PC
+  #define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE \
+    AudioDeviceModule::kDefaultCommunicationDevice
+
+#endif  // #if (defined(_WIN32)
+
+// *** LINUX ***
+
+#ifdef WEBRTC_LINUX
+
+#include <pthread.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+#ifndef QNX
+  #include <linux/net.h>
+#ifndef ANDROID
+  #include <sys/soundcard.h>
+#endif // ANDROID
+#endif // QNX
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <sys/ioctl.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sched.h>
+#include <time.h>
+#include <sys/time.h>
+
+#define DWORD unsigned long int
+#define WINAPI
+#define LPVOID void *
+#define FALSE 0
+#define TRUE 1
+#define UINT unsigned int
+#define UCHAR unsigned char
+#define TCHAR char
+#ifdef QNX
+#define _stricmp stricmp
+#else
+#define _stricmp strcasecmp
+#endif
+#define GetLastError() errno
+#define WSAGetLastError() errno
+#define LPCTSTR const char*
+#define LPCSTR const char*
+#define wsprintf sprintf
+#define TEXT(a) a
+#define _ftprintf fprintf
+#define _tcslen strlen
+#define FAR
+#define __cdecl
+#define LPSOCKADDR struct sockaddr *
+
+namespace
+{
+    void Sleep(unsigned long x)
+    {
+        timespec t;
+        t.tv_sec = x/1000;
+        t.tv_nsec = (x-(x/1000)*1000)*1000000;
+        nanosleep(&t,NULL);
+    }
+
+    DWORD timeGetTime()
+    {
+        struct timeval tv;
+        struct timezone tz;
+        unsigned long val;
+
+        gettimeofday(&tv, &tz);
+        val= tv.tv_sec*1000+ tv.tv_usec/1000;
+        return(val);
+    }
+}
+
+#define SLEEP(x) ::Sleep(x)
+#define GET_TIME_IN_MS timeGetTime
+
+// Default device for Linux and Android
+#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
+
+#ifdef ANDROID
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 2 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 2 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  // Always excluded for Android builds
+  #undef WEBRTC_CODEC_ISAC
+  #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  #undef WEBRTC_CONFERENCING
+  #undef WEBRTC_TYPING_DETECTION
+
+  // Default audio processing states
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+
+  // Default audio processing modes
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE  \
+      NoiseSuppression::kModerate
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE \
+      GainControl::kAdaptiveDigital
+
+  #define ANDROID_NOT_SUPPORTED(stat)                         \
+      stat.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError,   \
+                        "API call not supported");            \
+      return -1;
+
+#else // LINUX PC
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 32 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #define ANDROID_NOT_SUPPORTED(stat)
+
+#endif // ANDROID - LINUX PC
+
+#else
+#define ANDROID_NOT_SUPPORTED(stat)
+#endif  // #ifdef WEBRTC_LINUX
+
+// *** WEBRTC_MAC ***
+// including iPhone
+
+#ifdef WEBRTC_MAC
+
+#include <pthread.h>
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <netinet/in.h>
+#include <arpa/inet.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <sched.h>
+#include <sys/time.h>
+#include <time.h>
+#include <AudioUnit/AudioUnit.h>
+#if !defined(MAC_IPHONE) && !defined(MAC_IPHONE_SIM)
+  #include <CoreServices/CoreServices.h>
+  #include <CoreAudio/CoreAudio.h>
+  #include <AudioToolbox/DefaultAudioOutput.h>
+  #include <AudioToolbox/AudioConverter.h>
+  #include <CoreAudio/HostTime.h>
+#endif
+
+#define DWORD unsigned long int
+#define WINAPI
+#define LPVOID void *
+#define FALSE 0
+#define TRUE 1
+#define SOCKADDR_IN struct sockaddr_in
+#define UINT unsigned int
+#define UCHAR unsigned char
+#define TCHAR char
+#define _stricmp strcasecmp
+#define GetLastError() errno
+#define WSAGetLastError() errno
+#define LPCTSTR const char*
+#define wsprintf sprintf
+#define TEXT(a) a
+#define _ftprintf fprintf
+#define _tcslen strlen
+#define FAR
+#define __cdecl
+#define LPSOCKADDR struct sockaddr *
+#define LPCSTR const char*
+#define ULONG unsigned long
+
+namespace
+{
+    void Sleep(unsigned long x)
+    {
+        timespec t;
+        t.tv_sec = x/1000;
+        t.tv_nsec = (x-(x/1000)*1000)*1000000;
+        nanosleep(&t,NULL);
+    }
+
+    DWORD WebRtcTimeGetTime()
+    {
+        struct timeval tv;
+        struct timezone tz;
+        unsigned long val;
+
+        gettimeofday(&tv, &tz);
+        val= tv.tv_sec*1000+ tv.tv_usec/1000;
+        return(val);
+    }
+}
+
+#define SLEEP(x) ::Sleep(x)
+#define GET_TIME_IN_MS WebRtcTimeGetTime
+
+// Default device for Mac and iPhone
+#define WEBRTC_VOICE_ENGINE_DEFAULT_DEVICE 0
+
+// iPhone specific
+#if defined(MAC_IPHONE) || defined(MAC_IPHONE_SIM)
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 2 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 2 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  // Always excluded for iPhone builds
+  #undef WEBRTC_CODEC_ISAC
+  #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE
+  #undef  WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_STATE WEBRTC_AUDIO_PROCESSING_OFF
+  #define WEBRTC_VOICE_ENGINE_EC_DEFAULT_STATE  WEBRTC_AUDIO_PROCESSING_OFF
+
+  #undef  WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE
+  #undef  WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE
+  #define WEBRTC_VOICE_ENGINE_NS_DEFAULT_MODE \
+      NoiseSuppression::kModerate
+  #define WEBRTC_VOICE_ENGINE_AGC_DEFAULT_MODE \
+      GainControl::kAdaptiveDigital
+
+  #define IPHONE_NOT_SUPPORTED() \
+    _engineStatistics.SetLastError(VE_FUNC_NOT_SUPPORTED, kTraceError, \
+                                   "API call not supported"); \
+    return -1;
+
+#else // Non-iPhone
+
+// ----------------------------------------------------------------------------
+//  Enumerators
+// ----------------------------------------------------------------------------
+
+namespace webrtc
+{
+  // Max number of supported channels
+  enum { kVoiceEngineMaxNumOfChannels = 32 };
+  // Max number of channels which can be played out simultaneously
+  enum { kVoiceEngineMaxNumOfActiveChannels = 16 };
+} // namespace webrtc
+
+// ----------------------------------------------------------------------------
+//  Defines
+// ----------------------------------------------------------------------------
+
+  #define IPHONE_NOT_SUPPORTED()
+#endif
+
+#else
+#define IPHONE_NOT_SUPPORTED()
+#endif  // #ifdef WEBRTC_MAC
+
+
+
+#endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
diff --git a/src/voice_engine/voice_engine_impl.cc b/src/voice_engine/voice_engine_impl.cc
new file mode 100644
index 0000000..d0f06db
--- /dev/null
+++ b/src/voice_engine/voice_engine_impl.cc
@@ -0,0 +1,160 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "voice_engine_impl.h"
+#include "trace.h"
+
+#ifdef WEBRTC_ANDROID
+extern "C"
+{
+extern WebRtc_Word32 SetAndroidAudioDeviceObjects(
+    void* javaVM, void* env, void* context);
+} // extern "C"
+#endif
+
+namespace webrtc
+{
+
+// Counter to be ensure that we can add a correct ID in all static trace
+// methods. It is not the nicest solution, especially not since we already
+// have a counter in VoEBaseImpl. In other words, there is room for
+// improvement here.
+static WebRtc_Word32 gVoiceEngineInstanceCounter = 0;
+
+extern "C"
+{
+WEBRTC_DLLEXPORT VoiceEngine* GetVoiceEngine();
+
+VoiceEngine* GetVoiceEngine()
+{
+    VoiceEngineImpl* self = new VoiceEngineImpl();
+    VoiceEngine* ve = reinterpret_cast<VoiceEngine*>(self);
+    if (ve != NULL)
+    {
+        self->AddRef();  // First reference.  Released in VoiceEngine::Delete.
+        gVoiceEngineInstanceCounter++;
+    }
+    return ve;
+}
+} // extern "C"
+
+int VoiceEngineImpl::AddRef() {
+  return ++_ref_count;
+}
+
+// This implements the Release() method for all the inherited interfaces.
+int VoiceEngineImpl::Release() {
+  int new_ref = --_ref_count;
+  assert(new_ref >= 0);
+  if (new_ref == 0) {
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice, -1,
+                 "VoiceEngineImpl self deleting (voiceEngine=0x%p)",
+                 this);
+
+    delete this;
+  }
+
+  return new_ref;
+}
+
+VoiceEngine* VoiceEngine::Create()
+{
+#if (defined _WIN32)
+    HMODULE hmod_ = LoadLibrary(TEXT("VoiceEngineTestingDynamic.dll"));
+
+    if (hmod_)
+    {
+        typedef VoiceEngine* (*PfnGetVoiceEngine)(void);
+        PfnGetVoiceEngine pfn = (PfnGetVoiceEngine)GetProcAddress(
+                hmod_,"GetVoiceEngine");
+        if (pfn)
+        {
+            VoiceEngine* self = pfn();
+            return (self);
+        }
+    }
+#endif
+
+    return GetVoiceEngine();
+}
+
+int VoiceEngine::SetTraceFilter(const unsigned int filter)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceFilter(filter=0x%x)", filter);
+
+    // Remember old filter
+    WebRtc_UWord32 oldFilter = 0;
+    Trace::LevelFilter(oldFilter);
+
+    // Set new filter
+    WebRtc_Word32 ret = Trace::SetLevelFilter(filter);
+
+    // If previous log was ignored, log again after changing filter
+    if (kTraceNone == oldFilter)
+    {
+        WEBRTC_TRACE(kTraceApiCall, kTraceVoice, -1,
+                     "SetTraceFilter(filter=0x%x)", filter);
+    }
+
+    return (ret);
+}
+
+int VoiceEngine::SetTraceFile(const char* fileNameUTF8,
+                              const bool addFileCounter)
+{
+    int ret = Trace::SetTraceFile(fileNameUTF8, addFileCounter);
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceFile(fileNameUTF8=%s, addFileCounter=%d)",
+                 fileNameUTF8, addFileCounter);
+    return (ret);
+}
+
+int VoiceEngine::SetTraceCallback(TraceCallback* callback)
+{
+    WEBRTC_TRACE(kTraceApiCall, kTraceVoice,
+                 VoEId(gVoiceEngineInstanceCounter, -1),
+                 "SetTraceCallback(callback=0x%x)", callback);
+    return (Trace::SetTraceCallback(callback));
+}
+
+bool VoiceEngine::Delete(VoiceEngine*& voiceEngine)
+{
+    if (voiceEngine == NULL)
+        return false;
+
+    VoiceEngineImpl* s = reinterpret_cast<VoiceEngineImpl*>(voiceEngine);
+    // Release the reference that was added in GetVoiceEngine.
+    int ref = s->Release();
+    voiceEngine = NULL;
+
+    if (ref != 0) {
+        WEBRTC_TRACE(kTraceWarning, kTraceVoice, -1,
+            "VoiceEngine::Delete did not release the very last reference.  "
+            "%d references remain.", ref);
+    }
+
+    return true;
+}
+
+int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
+{
+#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
+    // modules/audio_device/main/source/android/audio_device_android_jni.cc
+    // contains the actual implementation.
+    return SetAndroidAudioDeviceObjects(javaVM, env, context);
+#else
+    return -1;
+#endif
+}
+
+} //namespace webrtc
diff --git a/src/voice_engine/voice_engine_impl.h b/src/voice_engine/voice_engine_impl.h
new file mode 100644
index 0000000..7db77be
--- /dev/null
+++ b/src/voice_engine/voice_engine_impl.h
@@ -0,0 +1,164 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
+#define WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
+
+#include "atomic32.h"
+#include "engine_configurations.h"
+#include "voe_base_impl.h"
+
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+#include "voe_audio_processing_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+#include "voe_call_report_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+#include "voe_codec_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+#include "voe_dtmf_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+#include "voe_encryption_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+#include "voe_external_media_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+#include "voe_file_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+#include "voe_hardware_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+#include "voe_neteq_stats_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+#include "voe_network_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+#include "voe_rtp_rtcp_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+#include "voe_video_sync_impl.h"
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+#include "voe_volume_control_impl.h"
+#endif
+
+namespace webrtc
+{
+
+class VoiceEngineImpl : public voe::SharedData,  // Must be the first base class
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+                        public VoEAudioProcessingImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+                        public VoECallReportImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+                        public VoECodecImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+                        public VoEDtmfImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+                        public VoEEncryptionImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+                        public VoEExternalMediaImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+                        public VoEFileImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+                        public VoEHardwareImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+                        public VoENetEqStatsImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+                        public VoENetworkImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+                        public VoERTP_RTCPImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+                        public VoEVideoSyncImpl,
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+                        public VoEVolumeControlImpl,
+#endif
+                        public VoEBaseImpl
+{
+public:
+    VoiceEngineImpl() : 
+#ifdef WEBRTC_VOICE_ENGINE_AUDIO_PROCESSING_API
+        VoEAudioProcessingImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CALL_REPORT_API
+        VoECallReportImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_CODEC_API
+        VoECodecImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_DTMF_API
+        VoEDtmfImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_ENCRYPTION_API
+        VoEEncryptionImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_EXTERNAL_MEDIA_API
+        VoEExternalMediaImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_FILE_API
+        VoEFileImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_HARDWARE_API
+        VoEHardwareImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETEQ_STATS_API
+        VoENetEqStatsImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_NETWORK_API
+        VoENetworkImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_RTP_RTCP_API
+        VoERTP_RTCPImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VIDEO_SYNC_API
+        VoEVideoSyncImpl(this),
+#endif
+#ifdef WEBRTC_VOICE_ENGINE_VOLUME_CONTROL_API
+        VoEVolumeControlImpl(this),
+#endif
+        VoEBaseImpl(this),
+        _ref_count(0)
+    {
+    }
+    virtual ~VoiceEngineImpl()
+    {
+        assert(_ref_count.Value() == 0);
+    }
+
+    int AddRef();
+
+    // This implements the Release() method for all the inherited interfaces.
+    virtual int Release();
+
+private:
+    Atomic32 _ref_count;
+};
+
+} // namespace webrtc
+
+#endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_IMPL_H
diff --git a/test/OWNERS b/test/OWNERS
deleted file mode 100644
index 777963e..0000000
--- a/test/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-phoglund@webrtc.org

-kjellander@webrtc.org

-ivinnichenko@webrtc.org

-amyfong@webrtc.org

diff --git a/test/data/audio_processing/aec_far.pcm b/test/data/audio_processing/aec_far.pcm
deleted file mode 100644
index fd6afc0..0000000
--- a/test/data/audio_processing/aec_far.pcm
+++ /dev/null
Binary files differ
diff --git a/test/data/audio_processing/aec_near.pcm b/test/data/audio_processing/aec_near.pcm
deleted file mode 100644
index 02c016c..0000000
--- a/test/data/audio_processing/aec_near.pcm
+++ /dev/null
Binary files differ
diff --git a/test/data/audio_processing/output_data_fixed.pb b/test/data/audio_processing/output_data_fixed.pb
deleted file mode 100644
index 81bc5af..0000000
--- a/test/data/audio_processing/output_data_fixed.pb
+++ /dev/null
Binary files differ
diff --git a/test/data/audio_processing/output_data_float.pb b/test/data/audio_processing/output_data_float.pb
deleted file mode 100644
index ccd7509..0000000
--- a/test/data/audio_processing/output_data_float.pb
+++ /dev/null
Binary files differ
diff --git a/test/functional_test/README b/test/functional_test/README
deleted file mode 100644
index a855135..0000000
--- a/test/functional_test/README
+++ /dev/null
@@ -1,41 +0,0 @@
-This test client is a simple functional test for WebRTC enabled Chrome build.
-
-The following is necessary to run the test:
-- A WebRTC Chrome binary.
-- A peerconnection_server binary (make peerconnection_server).
-
-It can be used in two scenarios:
-1. Single client calling itself with the server test page
-(peerconnection/samples/server/server_test.html) in loopback mode as a fake
-client.
-2. Call between two clients.
-
-To start the test for scenario (1):
-1. Start peerconnection_server.
-2. Start the WebRTC Chrome build: $ <path_to_chome_binary>/chrome
---enable-media-stream --enable-p2papi --user-data-dir=<path_to_data>
-<path_to_data> is where Chrome looks for all its states, use for example
-"temp/chrome_webrtc_data". If you don't always start the browser from the same
-directory, use an absolute path instead.
-3. Open the server test page, ensure loopback is enabled, choose a name (for
-example "loopback") and connect to the server.
-4. Open the test page, connect to the server, select the loopback peer, click
-call.
-
-To start the test for scenario (2):
-1. Start peerconnection_server.
-2. Start the WebRTC Chrome build, see scenario (1).
-3. Open the test page, connect to the server.
-4. On another machine, start the WebRTC Chrome build.
-5. Open the test page, connect to the server, select the other peer, click call.
-
-Note 1: There is currently a limitation so that the camera device can only be
-accessed once, even if in the same browser instance. Hence the need to use two
-machines for scenario (2).
-
-Note 2: The web page must normally be on a web server to be able to access the
-camera for security reasons.
-See http://blog.chromium.org/2008/12/security-in-depth-local-web-pages.html
-for more details on this topic. This can be overridden with the flag
---allow-file-access-from-files, in which case running it over the file://
-URI scheme works.
diff --git a/test/functional_test/webrtc_test.html b/test/functional_test/webrtc_test.html
deleted file mode 100644
index e2d8939..0000000
--- a/test/functional_test/webrtc_test.html
+++ /dev/null
@@ -1,594 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN">
-
-<!--
-Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-
-Use of this source code is governed by a BSD-style license
-that can be found in the LICENSE file in the root of the source
-tree. An additional intellectual property rights grant can be found
-in the file PATENTS. All contributing project authors may
-be found in the AUTHORS file in the root of the source tree.
--->
-
-<html>
-
-<head>
-<title>WebRTC Test</title>
-
-<style type="text/css">
-body, input, button, select, table {
-  font-family:"Lucida Grande", "Lucida Sans", Verdana, Arial, sans-serif;
-  font-size: 13 px;
-}
-body, input:enable, button:enable, select:enable, table {
-  color: rgb(51, 51, 51);
-}
-h1 {font-size: 40 px;}
-</style>
-
-<script type="text/javascript">
-
-// TODO: Catch more exceptions
-
-var server;
-var myId = -1;
-var myName;
-var remoteId = -1;
-var remoteName;
-var request = null;
-var hangingGet = null;
-var pc = null;
-var localStream = null;
-var disconnecting = false;
-var callState = 0; // 0 - Not started, 1 - Call ongoing
-
-
-// General
-
-function toggleExtraButtons() {
-  document.getElementById("createPcBtn").hidden =
-    !document.getElementById("createPcBtn").hidden;
-  document.getElementById("test1Btn").hidden =
-    !document.getElementById("test1Btn").hidden;
-}
-
-function trace(txt) {
-  var elem = document.getElementById("debug");
-  elem.innerHTML += txt + "<br>";
-}
-
-function trace_warning(txt) {
-  var wtxt = "<b>" + txt + "</b>";
-  trace(wtxt);
-}
-
-function trace_exception(e, txt) {
-  var etxt = "<b>" + txt + "</b> (" + e.name + " / " + e.message + ")";
-  trace(etxt);
-}
-
-function setCallState(state) {
-  trace("Changing call state: " + callState + " -> " + state);
-  callState = state;
-}
-
-function checkPeerConnection() {
-  if (!pc) {
-    trace_warning("No PeerConnection object exists");
-    return 0;
-  }
-  return 1;
-}
-
-
-// Local stream generation
-
-function gotStream(s) {
-  var url = webkitURL.createObjectURL(s);
-  document.getElementById("localView").src = url;
-  trace("User has granted access to local media. url = " + url);
-  localStream = s;
-}
-
-function gotStreamFailed(error) {
-  alert("Failed to get access to local media. Error code was " + error.code +
-    ".");
-  trace_warning("Failed to get access to local media. Error code was " +
-    error.code);
-}
-
-function getUserMedia() {
-  try {
-    navigator.webkitGetUserMedia("video,audio", gotStream, gotStreamFailed);
-    trace("Requested access to local media");
-  } catch (e) {
-    trace_exception(e, "getUserMedia error");
-  }
-}
-
-
-// Peer list and remote peer handling
-
-function peerExists(id) {
-  try {
-    var peerList = document.getElementById("peers");
-    for (var i = 0; i < peerList.length; i++) {
-      if (parseInt(peerList.options[i].value) == id)
-        return true;
-    }
-  } catch (e) {
-    trace_exception(e, "Error searching for peer");
-  }
-  return false;
-}
-
-function addPeer(id, pname) {
-  var peerList = document.getElementById("peers");
-  var option = document.createElement("option");
-  option.text = pname;
-  option.value = id;
-  try {
-    // For IE earlier than version 8
-    peerList.add(option, x.options[null]);
-  } catch (e) {
-    peerList.add(option, null);
-  }
-}
-
-function removePeer(id) {
-  try {
-    var peerList = document.getElementById("peers");
-    for (var i = 0; i < peerList.length; i++) {
-      if (parseInt(peerList.options[i].value) == id) {
-        peerList.remove(i);
-        break;
-      }
-    }
-  } catch (e) {
-    trace_exception(e, "Error removing peer");
-  }
-}
-
-function clearPeerList() {
-  var peerList = document.getElementById("peers");
-  while (peerList.length > 0)
-    peerList.remove(0);
-}
-
-function setSelectedPeer(id) {
-  try {
-    var peerList = document.getElementById("peers");
-    for (var i = 0; i < peerList.length; i++) {
-      if (parseInt(peerList.options[i].value) == id) {
-        peerList.options[i].selected = true;
-        return true;
-      }
-    }
-  } catch (e) {
-    trace_exception(e, "Error setting selected peer");
-  }
-  return false;
-}
-
-function getPeerName(id) {
-  try {
-    var peerList = document.getElementById("peers");
-    for (var i = 0; i < peerList.length; i++) {
-      if (parseInt(peerList.options[i].value) == id) {
-        return peerList.options[i].text;
-      }
-    }
-  } catch (e) {
-    trace_exception(e, "Error finding peer name");
-    return;
-  }
-  return;
-}
-
-function storeRemoteInfo() {
-  try {
-    var peerList = document.getElementById("peers");
-    if (peerList.selectedIndex < 0) {
-      alert("Please select a peer.");
-      return false;
-    } else
-      remoteId = parseInt(peerList.options[peerList.selectedIndex].value);
-      remoteName = peerList.options[peerList.selectedIndex].text;
-  } catch (e) {
-    trace_exception(e, "Error storing remote peer info");
-    return false;
-  }
-  return true;
-}
-
-
-// Call control
-
-function createPeerConnection() {
-  if (pc) {
-    trace_warning("PeerConnection object already exists");
-  }
-  trace("Creating PeerConnection object");
-  try {
-    pc = new webkitPeerConnection("STUN stun.l.google.com:19302",
-      onSignalingMessage);
-  pc.onaddstream = onAddStream;
-  pc.onremovestream = onRemoveStream;
-  } catch (e) {
-    trace_exception(e, "Create PeerConnection error");
-  }
-}
-
-function doCall() {
-  if (!storeRemoteInfo())
-    return;
-  document.getElementById("call").disabled = true;
-  document.getElementById("peers").disabled = true;
-  createPeerConnection();
-  trace("Adding stream");
-  pc.addStream(localStream);
-  document.getElementById("hangup").disabled = false;
-  setCallState(1);
-}
-
-function hangUp() {
-  document.getElementById("hangup").disabled = true;
-  trace("Sending BYE to " + remoteName + " (ID " + remoteId + ")");
-  sendToPeer(remoteId, "BYE");
-  closeCall();
-}
-
-function closeCall() {
-  trace("Stopping showing remote stream");
-  document.getElementById("remoteView").src = "dummy";
-  if (pc) {
-    trace("Stopping call [pc.close()]");
-    pc.close();
-    pc = null;
-  } else
-    trace("No pc object to close");
-  remoteId = -1;
-  document.getElementById("call").disabled = false;
-  document.getElementById("peers").disabled = false;
-  setCallState(0);
-}
-
-
-// PeerConnection callbacks
-
-function onAddStream(e) {
-  var stream = e.stream;
-  var url = webkitURL.createObjectURL(stream);
-  document.getElementById("remoteView").src = url;
-  trace("Started showing remote stream. url = " + url);
-}
-
-function onRemoveStream(e) {
-  // Currently if we get this callback, call has ended.
-  document.getElementById("remoteView").src = "";
-  trace("Stopped showing remote stream");
-}
-
-function onSignalingMessage(msg) {
-  trace("Sending message to " + remoteName + " (ID " + remoteId + "):\n" + msg);
-  sendToPeer(remoteId, msg);
-}
-
-// TODO: Add callbacks onconnecting, onopen and onstatechange.
-
-
-// Server interaction
-
-function handleServerNotification(data) {
-  trace("Server notification: " + data);
-  var parsed = data.split(",");
-  if (parseInt(parsed[2]) == 1) { // New peer
-    var peerId = parseInt(parsed[1]);
-    if (!peerExists(peerId)) {
-      var peerList = document.getElementById("peers");
-      if (peerList.length == 1 && peerList.options[0].value == -1)
-        clearPeerList();
-      addPeer(peerId, parsed[0]);
-      document.getElementById("peers").disabled = false;
-      document.getElementById("call").disabled = false;
-    }
-  } else if (parseInt(parsed[2]) == 0) { // Removed peer
-    removePeer(parseInt(parsed[1]));
-    if (document.getElementById("peers").length == 0) {
-      document.getElementById("peers").disabled = true;
-      addPeer(-1, "No other peer connected");
-    }
-  }
-}
-
-function handlePeerMessage(peer_id, msg) {
-  var peerName = getPeerName(peer_id);
-  if (peerName == undefined) {
-    trace_warning("Received message from unknown peer (ID " + peer_id +
-      "), ignoring message:");
-    trace(msg);
-    return;
-  }
-  trace("Received message from " + peerName + " (ID " + peer_id + "):\n" + msg);
-  // Assuming we receive the message from the peer we want to communicate with.
-  // TODO: Only accept messages from peer we communicate with with if call is
-  // ongoing.
-  if (msg.search("BYE") == 0) {
-    // Other side has hung up.
-    document.getElementById("hangup").disabled = true;
-    closeCall()
-  } else {
-    if (!pc) {
-      // Other side is calling us, startup
-      if (!setSelectedPeer(peer_id)) {
-        trace_warning("Recevied message from unknown peer, ignoring");
-        return;
-      }
-      if (!storeRemoteInfo())
-        return;
-      document.getElementById("call").disabled = true;
-      document.getElementById("peers").disabled = true;
-      createPeerConnection();
-      try {
-        pc.processSignalingMessage(msg);
-      } catch (e) {
-        trace_exception(e, "Process signaling message error");
-      }
-      trace("Adding stream");
-      pc.addStream(localStream);
-      document.getElementById("hangup").disabled = false;
-    } else {
-      try {
-        pc.processSignalingMessage(msg);
-      } catch (e) {
-        trace_exception(e, "Process signaling message error");
-      }
-    }
-  }
-}
-
-function getIntHeader(r, name) {
-  var val = r.getResponseHeader(name);
-  trace("header value: " + val);
-  return val != null && val.length ? parseInt(val) : -1;
-}
-
-function hangingGetCallback() {
-  try {
-    if (hangingGet.readyState != 4 || disconnecting)
-      return;
-    if (hangingGet.status != 200) {
-      trace_warning("server error, status: " + hangingGet.status + ", text: " +
-        hangingGet.statusText);
-      disconnect();
-    } else {
-      var peer_id = getIntHeader(hangingGet, "Pragma");
-      if (peer_id == myId) {
-        handleServerNotification(hangingGet.responseText);
-      } else {
-        handlePeerMessage(peer_id, hangingGet.responseText);
-      }
-    }
-
-    if (hangingGet) {
-      hangingGet.abort();
-      hangingGet = null;
-    }
-
-    if (myId != -1)
-      window.setTimeout(startHangingGet, 0);
-  } catch (e) {
-    trace_exception(e, "Hanging get error");
-  }
-}
-
-function onHangingGetTimeout() {
-  trace("hanging get timeout. issuing again");
-  hangingGet.abort();
-  hangingGet = null;
-  if (myId != -1)
-    window.setTimeout(startHangingGet, 0);
-}
-
-function startHangingGet() {
-  try {
-    hangingGet = new XMLHttpRequest();
-    hangingGet.onreadystatechange = hangingGetCallback;
-    hangingGet.ontimeout = onHangingGetTimeout;
-    hangingGet.open("GET", server + "/wait?peer_id=" + myId, true);
-    hangingGet.send();  
-  } catch (e) {
-    trace_exception(e, "Start hanging get error");
-  }
-}
-
-function sendToPeer(peer_id, data) {
-  if (myId == -1) {
-    alert("Not connected.");
-    return;
-  }
-  if (peer_id == myId) {
-    alert("Can't send a message to oneself.");
-    return;
-  }
-  var r = new XMLHttpRequest();
-  r.open("POST", server + "/message?peer_id=" + myId + "&to=" + peer_id, false);
-  r.setRequestHeader("Content-Type", "text/plain");
-  r.send(data);
-  r = null;
-}
-
-function signInCallback() {
-  try {
-    if (request.readyState == 4) {
-      if (request.status == 200) {
-        var peers = request.responseText.split("\n");
-        myId = parseInt(peers[0].split(",")[1]);
-        trace("My id: " + myId);
-        clearPeerList();
-        var added = 0;
-        for (var i = 1; i < peers.length; ++i) {
-          if (peers[i].length > 0) {
-            trace("Peer " + i + ": " + peers[i]);
-            var parsed = peers[i].split(",");
-            addPeer(parseInt(parsed[1]), parsed[0]);
-            ++added;
-          }
-        }
-        if (added == 0)
-          addPeer(-1, "No other peer connected");
-        else {
-          document.getElementById("peers").disabled = false;
-          document.getElementById("call").disabled = false;
-        }
-        startHangingGet();
-        request = null;
-        document.getElementById("connect").disabled = true;
-        document.getElementById("disconnect").disabled = false;
-      }
-    }
-  } catch (e) {
-    trace_exception(e, "Sign in error");
-    document.getElementById("connect").disabled = false;
-  }
-}
-
-function signIn() {
-  try {
-    request = new XMLHttpRequest();
-    request.onreadystatechange = signInCallback;
-    request.open("GET", server + "/sign_in?" + myName, true);
-    request.send();
-  } catch (e) {
-    trace_exception(e, "Start sign in error");
-    document.getElementById("connect").disabled = false;
-  }
-}
-
-function connect() {
-  myName = document.getElementById("local").value.toLowerCase();
-  server = document.getElementById("server").value.toLowerCase();
-  if (myName.length == 0) {
-    alert("I need a name please.");
-    document.getElementById("local").focus();
-  } else {
-    // TODO: Disable connect button here, but we need a timeout and check if we
-    // have connected, if so enable it again.
-    signIn();
-  }
-}
-
-function disconnect() {
-  if (callState == 1)
-    hangUp();
-
-  disconnecting = true;
-  
-  if (request) {
-    request.abort();
-    request = null;
-  }
-
-  if (hangingGet) {
-    hangingGet.abort();
-    hangingGet = null;
-  }
-
-  if (myId != -1) {
-    request = new XMLHttpRequest();
-    request.open("GET", server + "/sign_out?peer_id=" + myId, false);
-    request.send();
-    request = null;
-    myId = -1;
-  }
-
-  clearPeerList();
-  addPeer(-1, "Not connected");
-  document.getElementById("connect").disabled = false;
-  document.getElementById("disconnect").disabled = true;
-  document.getElementById("peers").disabled = true;
-  document.getElementById("call").disabled = true;
-
-  disconnecting = false;
-}
-
-
-// Window event handling
-
-window.onload = getUserMedia;
-window.onbeforeunload = disconnect;
-
-
-</script>
-</head>
-
-<body>
-<h1>WebRTC</h1>
-You must have a WebRTC capable browser in order to make calls using this test
-page.<br>&nbsp;
-
-<table border="0">
-<tr>
- <td>Local Preview</td>
- <td>Remote Video</td>
-</tr>
-<tr>
- <td>
-  <video width="320" height="240" id="localView" autoplay="autoplay"></video>
- </td>
- <td>
-  <video width="640" height="480" id="remoteView" autoplay="autoplay"></video>
- </td>
-</tr>
-</table>
-
-<table border="0">
-<tr>
- <td valign="top">
-  <table border="0" cellpaddning="0" cellspacing="0">
-  <tr>
-   <td>Server:</td>
-   <td>
-    <input type="text" id="server" size="30" value="http://localhost:8888"/>
-   </td>
-  </tr>
-  <tr>
-   <td>Name:</td><td><input type="text" id="local" size="30" value="name"/></td>
-  </tr>
-  </table>
- </td>
- <td valign="top">
-  <button id="connect" onclick="connect();">Connect</button><br>
-  <button id="disconnect" onclick="disconnect();" disabled="true">Disconnect
-  </button>
- </td>
- <td>&nbsp;&nbsp;&nbsp;</td>
- <td valign="top">
-  Connected peers:<br>
-  <select id="peers" size="5" disabled="true">
-   <option value="-1">Not connected</option>
-  </select>
-  </td>
- <td valign="top">
-  <!--input type="text" id="peer_id" size="3" value="1"/><br-->
-  <button id="call" onclick="doCall();" disabled="true">Call</button><br>
-  <button id="hangup" onclick="hangUp();" disabled="true">Hang up</button><br>
- </td>
- <td>&nbsp;&nbsp;&nbsp;</td>
- <td valign="top">
-  <button onclick="toggleExtraButtons();">Toggle extra buttons (debug)</button>
-  <br>
-  <button id="createPcBtn" onclick="createPeerConnection();" hidden="true">
-  Create peer connection</button>
- </td>
-</tr>
-</table>
-
-<button onclick="document.getElementById('debug').innerHTML='';">Clear log
-</button>
-<pre id="debug"></pre>
-
-</body>
-
-</html>
-
diff --git a/test/metrics.gyp b/test/metrics.gyp
deleted file mode 100644
index 70483f9..0000000
--- a/test/metrics.gyp
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS.  All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-{
-  'includes': [
-    '../src/build/common.gypi',
-  ],
-  'targets': [
-    {
-      # The metrics code must be kept in its own GYP file in order to
-      # avoid a circular dependency error due to the dependency on libyuv.
-      # If the code would be put in test.gyp a circular dependency error during
-      # GYP generation would occur, because the libyuv.gypi unittest target
-      # depends on test_support_main. See issue #160 for more info.
-      'target_name': 'metrics',
-      'type': '<(library)',
-      'dependencies': [
-        '<(webrtc_root)/common_video/common_video.gyp:webrtc_libyuv',
-      ],
-      'include_dirs': [
-        '.',
-      ],
-      'sources': [
-        'testsupport/metrics/video_metrics.h',
-        'testsupport/metrics/video_metrics.cc',
-      ],
-    },
-    {
-      'target_name': 'metrics_unittests',
-      'type': 'executable',
-      'dependencies': [
-        'metrics',
-        '<(webrtc_root)/../test/test.gyp:test_support_main',
-        '<(webrtc_root)/../testing/gtest.gyp:gtest',
-      ],
-      'sources': [
-        'testsupport/metrics/video_metrics_unittest.cc',
-      ],
-    },
-  ],
-}
diff --git a/test/test.gyp b/test/test.gyp
deleted file mode 100644
index 86a57ff..0000000
--- a/test/test.gyp
+++ /dev/null
@@ -1,78 +0,0 @@
-# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
-#
-# Use of this source code is governed by a BSD-style license
-# that can be found in the LICENSE file in the root of the source
-# tree. An additional intellectual property rights grant can be found
-# in the file PATENTS.  All contributing project authors may
-# be found in the AUTHORS file in the root of the source tree.
-
-# TODO(andrew): consider moving test_support to src/base/test.
-{
-  'includes': [
-    '../src/build/common.gypi',
-  ],
-  'targets': [
-    {
-      'target_name': 'test_support',
-      'type': 'static_library',
-      'include_dirs': [
-        '.',
-      ],
-      'direct_dependent_settings': {
-        'include_dirs': [
-          '.', # Some includes are hierarchical
-        ],
-      },
-      'dependencies': [
-        '<(webrtc_root)/../testing/gtest.gyp:gtest',
-        '<(webrtc_root)/../testing/gmock.gyp:gmock',
-      ],
-      'all_dependent_settings': {
-        'include_dirs': [
-          '.',
-        ],
-      },
-      'sources': [
-        'test_suite.cc',
-        'test_suite.h',
-        'testsupport/fileutils.h',
-        'testsupport/fileutils.cc',
-        'testsupport/frame_reader.h',
-        'testsupport/frame_reader.cc',
-        'testsupport/frame_writer.h',
-        'testsupport/frame_writer.cc',
-        'testsupport/packet_reader.h',
-        'testsupport/packet_reader.cc',
-        'testsupport/mock/mock_frame_reader.h',
-        'testsupport/mock/mock_frame_writer.h',
-      ],
-    },
-    {
-      # Depend on this target when you want to have test_support but also the
-      # main method needed for gtest to execute!
-      'target_name': 'test_support_main',
-      'type': 'static_library',
-      'dependencies': [
-        'test_support',
-      ],
-      'sources': [
-        'run_all_unittests.cc',
-      ],
-    },
-    {
-      'target_name': 'test_support_unittests',
-      'type': 'executable',
-      'dependencies': [
-        'test_support_main',
-        '<(webrtc_root)/../testing/gtest.gyp:gtest',
-      ],
-      'sources': [
-        'testsupport/unittest_utils.h',
-        'testsupport/fileutils_unittest.cc',
-        'testsupport/frame_reader_unittest.cc',
-        'testsupport/frame_writer_unittest.cc',
-        'testsupport/packet_reader_unittest.cc',
-      ],
-    },
-  ],
-}
diff --git a/test/testsupport/fileutils.cc b/test/testsupport/fileutils.cc
deleted file mode 100644
index 1e6bbca..0000000
--- a/test/testsupport/fileutils.cc
+++ /dev/null
@@ -1,167 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testsupport/fileutils.h"
-
-#ifdef WIN32
-#include <direct.h>
-#define GET_CURRENT_DIR _getcwd
-#else
-#include <unistd.h>
-#define GET_CURRENT_DIR getcwd
-#endif
-
-#include <sys/stat.h>  // To check for directory existence.
-#ifndef S_ISDIR  // Not defined in stat.h on Windows.
-#define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR)
-#endif
-
-#include <cstdio>
-
-#include "typedefs.h"  // For architecture defines
-
-namespace webrtc {
-namespace test {
-
-#ifdef WIN32
-static const char* kPathDelimiter = "\\";
-#else
-static const char* kPathDelimiter = "/";
-#endif
-// The file we're looking for to identify the project root dir.
-static const char* kProjectRootFileName = "DEPS";
-static const char* kOutputDirName = "out";
-static const char* kFallbackPath = "./";
-static const char* kResourcesDirName = "resources";
-const char* kCannotFindProjectRootDir = "ERROR_CANNOT_FIND_PROJECT_ROOT_DIR";
-
-std::string ProjectRootPath() {
-  std::string working_dir = WorkingDir();
-  if (working_dir == kFallbackPath) {
-    return kCannotFindProjectRootDir;
-  }
-  // Check for our file that verifies the root dir.
-  std::string current_path(working_dir);
-  FILE* file = NULL;
-  int path_delimiter_index = current_path.find_last_of(kPathDelimiter);
-  while (path_delimiter_index > -1) {
-    std::string root_filename = current_path + kPathDelimiter +
-        kProjectRootFileName;
-    file = fopen(root_filename.c_str(), "r");
-    if (file != NULL) {
-      fclose(file);
-      return current_path + kPathDelimiter;
-    }
-    // Move up one directory in the directory tree.
-    current_path = current_path.substr(0, path_delimiter_index);
-    path_delimiter_index = current_path.find_last_of(kPathDelimiter);
-  }
-  // Reached the root directory.
-  fprintf(stderr, "Cannot find project root directory!\n");
-  return kCannotFindProjectRootDir;
-}
-
-std::string OutputPath() {
-  std::string path = ProjectRootPath();
-  if (path == kCannotFindProjectRootDir) {
-    return kFallbackPath;
-  }
-  path += kOutputDirName;
-  if (!CreateDirectory(path)) {
-    return kFallbackPath;
-  }
-  return path + kPathDelimiter;
-}
-
-std::string WorkingDir() {
-  char path_buffer[FILENAME_MAX];
-  if (!GET_CURRENT_DIR(path_buffer, sizeof(path_buffer))) {
-    fprintf(stderr, "Cannot get current directory!\n");
-    return kFallbackPath;
-  } else {
-    return std::string(path_buffer);
-  }
-}
-
-bool CreateDirectory(std::string directory_name) {
-  struct stat path_info = {0};
-  // Check if the path exists already:
-  if (stat(directory_name.c_str(), &path_info) == 0) {
-    if (!S_ISDIR(path_info.st_mode)) {
-      fprintf(stderr, "Path %s exists but is not a directory! Remove this "
-              "file and re-run to create the directory.\n",
-              directory_name.c_str());
-      return false;
-    }
-  } else {
-#ifdef WIN32
-    return _mkdir(directory_name.c_str()) == 0;
-#else
-    return mkdir(directory_name.c_str(),  S_IRWXU | S_IRWXG | S_IRWXO) == 0;
-#endif
-  }
-  return true;
-}
-
-bool FileExists(std::string file_name) {
-  struct stat file_info = {0};
-  return stat(file_name.c_str(), &file_info) == 0;
-}
-
-std::string ResourcePath(std::string name, std::string extension) {
-  std::string platform = "win";
-#ifdef WEBRTC_LINUX
-  platform = "linux";
-#endif  // WEBRTC_LINUX
-#ifdef WEBRTC_MAC
-  platform = "mac";
-#endif  // WEBRTC_MAC
-
-#ifdef WEBRTC_ARCH_64_BITS
-  std::string architecture = "64";
-#else
-  std::string architecture = "32";
-#endif  // WEBRTC_ARCH_64_BITS
-
-  std::string resources_path = ProjectRootPath() + kResourcesDirName +
-      kPathDelimiter;
-  std::string resource_file = resources_path + name + "_" + platform + "_" +
-      architecture + "." + extension;
-  if (FileExists(resource_file)) {
-    return resource_file;
-  }
-  // Try without architecture.
-  resource_file = resources_path + name + "_" + platform + "." + extension;
-  if (FileExists(resource_file)) {
-    return resource_file;
-  }
-  // Try without platform.
-  resource_file = resources_path + name + "_" + architecture + "." + extension;
-  if (FileExists(resource_file)) {
-    return resource_file;
-  }
-  // Fall back on name without architecture or platform.
-  return resources_path + name + "." + extension;
-}
-
-size_t GetFileSize(std::string filename) {
-  FILE* f = fopen(filename.c_str(), "rb");
-  size_t size = 0;
-  if (f != NULL) {
-    if (fseek(f, 0, SEEK_END) == 0) {
-      size = ftell(f);
-    }
-    fclose(f);
-  }
-  return size;
-}
-
-}  // namespace test
-}  // namespace webrtc
diff --git a/test/testsupport/fileutils.h b/test/testsupport/fileutils.h
deleted file mode 100644
index c89ac29..0000000
--- a/test/testsupport/fileutils.h
+++ /dev/null
@@ -1,143 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include <cstdio>
-
-// File utilities for testing purposes.
-//
-// The ProjectRootPath() method is a convenient way of getting an absolute
-// path to the project source tree root directory. Using this, it is easy to
-// refer to test resource files in a portable way.
-//
-// Notice that even if Windows platforms use backslash as path delimiter, it is
-// also supported to use slash, so there's no need for #ifdef checks in test
-// code for setting up the paths to the resource files.
-//
-// Example use:
-// Assume we have the following code being used in a test source file:
-// const std::string kInputFile = webrtc::test::ProjectRootPath() +
-//     "test/data/voice_engine/audio_long16.wav";
-// // Use the kInputFile for the tests...
-//
-// Then here's some example outputs for different platforms:
-// Linux:
-// * Source tree located in /home/user/webrtc/trunk
-// * Test project located in /home/user/webrtc/trunk/src/testproject
-// * Test binary compiled as:
-//   /home/user/webrtc/trunk/out/Debug/testproject_unittests
-// Then ProjectRootPath() will return /home/user/webrtc/trunk/ no matter if
-// the test binary is executed from standing in either of:
-// /home/user/webrtc/trunk
-// or
-// /home/user/webrtc/trunk/out/Debug
-// (or any other directory below the trunk for that matter).
-//
-// Windows:
-// * Source tree located in C:\Users\user\webrtc\trunk
-// * Test project located in C:\Users\user\webrtc\trunk\src\testproject
-// * Test binary compiled as:
-//   C:\Users\user\webrtc\trunk\src\testproject\Debug\testproject_unittests.exe
-// Then ProjectRootPath() will return C:\Users\user\webrtc\trunk\ when the
-// test binary is executed from inside Visual Studio.
-// It will also return the same path if the test is executed from a command
-// prompt standing in C:\Users\user\webrtc\trunk\src\testproject\Debug
-//
-// Mac:
-// * Source tree located in /Users/user/webrtc/trunk
-// * Test project located in /Users/user/webrtc/trunk/src/testproject
-// * Test binary compiled as:
-//   /Users/user/webrtc/trunk/xcodebuild/Debug/testproject_unittests
-// Then ProjectRootPath() will return /Users/user/webrtc/trunk/ no matter if
-// the test binary is executed from standing in either of:
-// /Users/user/webrtc/trunk
-// or
-// /Users/user/webrtc/trunk/out/Debug
-// (or any other directory below the trunk for that matter).
-
-#ifndef WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
-#define WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
-
-#include <string>
-
-namespace webrtc {
-namespace test {
-
-// This is the "directory" returned if the ProjectPath() function fails
-// to find the project root.
-extern const char* kCannotFindProjectRootDir;
-
-// Finds the root dir of the project, to be able to set correct paths to
-// resource files used by tests.
-// The implementation is simple: it just looks for the file defined by
-// kProjectRootFileName, starting in the current directory (the working
-// directory) and then steps upward until it is found (or it is at the root of
-// the file system).
-// If the current working directory is above the project root dir, it will not
-// be found.
-//
-// If symbolic links occur in the path they will be resolved and the actual
-// directory will be returned.
-//
-// Returns the absolute path to the project root dir (usually the trunk dir)
-// WITH a trailing path delimiter.
-// If the project root is not found, the string specified by
-// kCannotFindProjectRootDir is returned.
-std::string ProjectRootPath();
-
-// Creates and returns the absolute path to the output directory where log files
-// and other test artifacts should be put. The output directory is always a
-// directory named "out" at the top-level of the project, i.e. a subfolder to
-// the path returned by ProjectRootPath().
-//
-// Details described for ProjectRootPath() apply here too.
-//
-// Returns the path WITH a trailing path delimiter. If the project root is not
-// found, the current working directory ("./") is returned as a fallback.
-std::string OutputPath();
-
-// Returns a path to a resource file for the currently executing platform.
-// Adapts to what filenames are currently present in the
-// [project-root]/resources/ dir.
-// Returns an absolute path according to this priority list (the directory
-// part of the path is left out for readability):
-// 1. [name]_[platform]_[architecture].[extension]
-// 2. [name]_[platform].[extension]
-// 3. [name]_[architecture].[extension]
-// 4. [name].[extension]
-// Where
-// * platform is either of "win", "mac" or "linux".
-// * architecture is either of "32" or "64".
-//
-// Arguments:
-//    name - Name of the resource file. If a plain filename (no directory path)
-//           is supplied, the file is assumed to be located in resources/
-//           If a directory path is prepended to the filename, a subdirectory
-//           hierarchy reflecting that path is assumed to be present.
-//    extension - File extension, without the dot, i.e. "bmp" or "yuv".
-std::string ResourcePath(std::string name, std::string extension);
-
-// Gets the current working directory for the executing program.
-// Returns "./" if for some reason it is not possible to find the working
-// directory.
-std::string WorkingDir();
-
-// Creates a directory if it not already exists.
-// Returns true if successful. Will print an error message to stderr and return
-// false if a file with the same name already exists.
-bool CreateDirectory(std::string directory_name);
-
-// File size of the supplied file in bytes. Will return 0 if the file is
-// empty or if the file does not exist/is readable.
-size_t GetFileSize(std::string filename);
-
-}  // namespace test
-}  // namespace webrtc
-
-#endif  // WEBRTC_TEST_TESTSUPPORT_FILEUTILS_H_
diff --git a/test/testsupport/fileutils_unittest.cc b/test/testsupport/fileutils_unittest.cc
deleted file mode 100644
index a500a07..0000000
--- a/test/testsupport/fileutils_unittest.cc
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testsupport/fileutils.h"
-
-#include <cstdio>
-#include <list>
-#include <string>
-
-#include "gtest/gtest.h"
-
-#ifdef WIN32
-static const char* kPathDelimiter = "\\";
-#else
-static const char* kPathDelimiter = "/";
-#endif
-
-static const std::string kDummyDir = "file_utils_unittest_dummy_dir";
-static const std::string kResourcesDir = "resources";
-static const std::string kTestName = "fileutils_unittest";
-static const std::string kExtension = "tmp";
-
-typedef std::list<std::string> FileList;
-
-namespace webrtc {
-
-// Test fixture to restore the working directory between each test, since some
-// of them change it with chdir during execution (not restored by the
-// gtest framework).
-class FileUtilsTest : public testing::Test {
- protected:
-  FileUtilsTest() {
-  }
-  virtual ~FileUtilsTest() {}
-  // Runs before the first test
-  static void SetUpTestCase() {
-    original_working_dir_ = webrtc::test::WorkingDir();
-    std::string resources_path = original_working_dir_ + kPathDelimiter +
-        kResourcesDir + kPathDelimiter;
-    webrtc::test::CreateDirectory(resources_path);
-
-    files_.push_back(resources_path + kTestName + "." + kExtension);
-    files_.push_back(resources_path + kTestName + "_32." + kExtension);
-    files_.push_back(resources_path + kTestName + "_64." + kExtension);
-    files_.push_back(resources_path + kTestName + "_linux." + kExtension);
-    files_.push_back(resources_path + kTestName + "_mac." + kExtension);
-    files_.push_back(resources_path + kTestName + "_win." + kExtension);
-    files_.push_back(resources_path + kTestName + "_linux_32." + kExtension);
-    files_.push_back(resources_path + kTestName + "_mac_32." + kExtension);
-    files_.push_back(resources_path + kTestName + "_win_32." + kExtension);
-    files_.push_back(resources_path + kTestName + "_linux_64." + kExtension);
-    files_.push_back(resources_path + kTestName + "_mac_64." + kExtension);
-    files_.push_back(resources_path + kTestName + "_win_64." + kExtension);
-
-    // Now that the resources dir exists, write some empty test files into it.
-    for (FileList::iterator file_it = files_.begin();
-        file_it != files_.end(); ++file_it) {
-      FILE* file = fopen(file_it->c_str(), "wb");
-      ASSERT_TRUE(file != NULL) << "Failed to write file: " << file_it->c_str();
-      ASSERT_GT(fprintf(file, "%s",  "Dummy data"), 0);
-      fclose(file);
-    }
-    // Create a dummy subdir that can be chdir'ed into for testing purposes.
-    empty_dummy_dir_ = original_working_dir_ + kPathDelimiter + kDummyDir;
-    webrtc::test::CreateDirectory(empty_dummy_dir_);
-  }
-  static void TearDownTestCase() {
-    // Clean up all resource files written
-    for (FileList::iterator file_it = files_.begin();
-            file_it != files_.end(); ++file_it) {
-      remove(file_it->c_str());
-    }
-    std::remove(empty_dummy_dir_.c_str());
-  }
-  void SetUp() {
-    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
-  }
-  void TearDown() {
-    ASSERT_EQ(chdir(original_working_dir_.c_str()), 0);
-  }
- protected:
-  static FileList files_;
-  static std::string empty_dummy_dir_;
- private:
-  static std::string original_working_dir_;
-};
-
-FileList FileUtilsTest::files_;
-std::string FileUtilsTest::original_working_dir_ = "";
-std::string FileUtilsTest::empty_dummy_dir_ = "";
-
-// Tests that the project root path is returned for the default working
-// directory that is automatically set when the test executable is launched.
-// The test is not fully testing the implementation, since we cannot be sure
-// of where the executable was launched from.
-// The test will fail if the top level directory is not named "trunk".
-TEST_F(FileUtilsTest, ProjectRootPathFromUnchangedWorkingDir) {
-  std::string path = webrtc::test::ProjectRootPath();
-  std::string expected_end = "trunk";
-  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
-  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
-}
-
-// Similar to the above test, but for the output dir
-TEST_F(FileUtilsTest, OutputPathFromUnchangedWorkingDir) {
-  std::string path = webrtc::test::OutputPath();
-  std::string expected_end = "out";
-  expected_end = kPathDelimiter + expected_end + kPathDelimiter;
-  ASSERT_EQ(path.length() - expected_end.length(), path.find(expected_end));
-}
-
-// Tests setting the current working directory to a directory three levels
-// deeper from the current one. Then testing that the project path returned
-// is still the same, when the function under test is called again.
-TEST_F(FileUtilsTest, ProjectRootPathFromDeeperWorkingDir) {
-  std::string path = webrtc::test::ProjectRootPath();
-  std::string original_working_dir = path;  // This is the correct project root
-  // Change to a subdirectory path.
-  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
-  ASSERT_EQ(original_working_dir, webrtc::test::ProjectRootPath());
-}
-
-// Similar to the above test, but for the output dir
-TEST_F(FileUtilsTest, OutputPathFromDeeperWorkingDir) {
-  std::string path = webrtc::test::OutputPath();
-  std::string original_working_dir = path;
-  ASSERT_EQ(0, chdir(empty_dummy_dir_.c_str()));
-  ASSERT_EQ(original_working_dir, webrtc::test::OutputPath());
-}
-
-// Tests with current working directory set to a directory higher up in the
-// directory tree than the project root dir. This case shall return a specified
-// error string as a directory (which will be an invalid path).
-TEST_F(FileUtilsTest, ProjectRootPathFromRootWorkingDir) {
-  // Change current working dir to the root of the current file system
-  // (this will always be "above" our project root dir).
-  ASSERT_EQ(0, chdir(kPathDelimiter));
-  ASSERT_EQ(webrtc::test::kCannotFindProjectRootDir,
-            webrtc::test::ProjectRootPath());
-}
-
-// Similar to the above test, but for the output dir
-TEST_F(FileUtilsTest, OutputPathFromRootWorkingDir) {
-  ASSERT_EQ(0, chdir(kPathDelimiter));
-  ASSERT_EQ("./", webrtc::test::OutputPath());
-}
-
-// Only tests that the code executes
-TEST_F(FileUtilsTest, CreateDirectory) {
-  std::string directory = "fileutils-unittest-empty-dir";
-  // Make sure it's removed if a previous test has failed:
-  std::remove(directory.c_str());
-  ASSERT_TRUE(webrtc::test::CreateDirectory(directory));
-  std::remove(directory.c_str());
-}
-
-TEST_F(FileUtilsTest, WorkingDirReturnsValue) {
-  // Hard to cover all platforms. Just test that it returns something without
-  // crashing:
-  std::string working_dir = webrtc::test::WorkingDir();
-  ASSERT_GT(working_dir.length(), 0u);
-}
-
-// Due to multiple platforms, it is hard to make a complete test for
-// ResourcePath. Manual testing has been performed by removing files and
-// verified the result confirms with the specified documentation for the
-// function.
-TEST_F(FileUtilsTest, ResourcePathReturnsValue) {
-  std::string resource = webrtc::test::ResourcePath(kTestName, kExtension);
-  ASSERT_GT(resource.find(kTestName), 0u);
-  ASSERT_GT(resource.find(kExtension), 0u);
-  ASSERT_EQ(0, chdir(kPathDelimiter));
-  ASSERT_EQ("./", webrtc::test::OutputPath());
-}
-
-TEST_F(FileUtilsTest, GetFileSizeExistingFile) {
-  ASSERT_GT(webrtc::test::GetFileSize(files_.front()), 0u);
-}
-
-TEST_F(FileUtilsTest, GetFileSizeNonExistingFile) {
-  ASSERT_EQ(0u, webrtc::test::GetFileSize("non-existing-file.tmp"));
-}
-
-}  // namespace webrtc
diff --git a/test/testsupport/frame_writer.h b/test/testsupport/frame_writer.h
deleted file mode 100644
index abc5d35..0000000
--- a/test/testsupport/frame_writer.h
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
-#define WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
-
-#include <cstdio>
-#include <string>
-
-#include "typedefs.h"
-
-namespace webrtc {
-namespace test {
-
-// Handles writing of video files.
-class FrameWriter {
- public:
-  virtual ~FrameWriter() {}
-
-  // Initializes the file handler, i.e. opens the input and output files etc.
-  // This must be called before reading or writing frames has started.
-  // Returns false if an error has occurred, in addition to printing to stderr.
-  virtual bool Init() = 0;
-
-  // Writes a frame of the configured frame length to the output file.
-  // Returns true if the write was successful, false otherwise.
-  virtual bool WriteFrame(WebRtc_UWord8* frame_buffer) = 0;
-
-  // Closes the output file if open. Essentially makes this class impossible
-  // to use anymore. Will also be invoked by the destructor.
-  virtual void Close() = 0;
-
-  // Frame length in bytes of a single frame image.
-  virtual int FrameLength() = 0;
-};
-
-class FrameWriterImpl : public FrameWriter {
- public:
-  // Creates a file handler. The input file is assumed to exist and be readable
-  // and the output file must be writable.
-  // Parameters:
-  //   output_filename         The file to write. Will be overwritten if already
-  //                           existing.
-  //   frame_length_in_bytes   The size of each frame.
-  //                           For YUV: 3*width*height/2
-  FrameWriterImpl(std::string output_filename, int frame_length_in_bytes);
-  virtual ~FrameWriterImpl();
-  bool Init();
-  bool WriteFrame(WebRtc_UWord8* frame_buffer);
-  void Close();
-  int FrameLength() { return frame_length_in_bytes_; }
-
- private:
-  std::string output_filename_;
-  int frame_length_in_bytes_;
-  int number_of_frames_;
-  FILE* output_file_;
-};
-
-}  // namespace test
-}  // namespace webrtc
-
-#endif  // WEBRTC_TEST_TESTSUPPORT_FRAME_WRITER_H_
diff --git a/test/testsupport/metrics/video_metrics.cc b/test/testsupport/metrics/video_metrics.cc
deleted file mode 100644
index 9e61ec8..0000000
--- a/test/testsupport/metrics/video_metrics.cc
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testsupport/metrics/video_metrics.h"
-
-#include <algorithm> // min_element, max_element
-#include <cassert>
-#include <cstdio>
-
-#include "common_video/libyuv/include/libyuv.h"
-
-namespace webrtc {
-namespace test {
-
-// Used for calculating min and max values
-static bool LessForFrameResultValue (const FrameResult& s1,
-                                     const FrameResult& s2) {
-    return s1.value < s2.value;
-}
-
-enum VideoMetricsType { kPSNR, kSSIM, kBoth };
-
-// Calculates metrics for a frame and adds statistics to the result for it.
-void CalculateFrame(VideoMetricsType video_metrics_type,
-                    uint8_t* ref,
-                    uint8_t* test,
-                    int width,
-                    int height,
-                    int frame_number,
-                    QualityMetricsResult* result) {
-  FrameResult frame_result;
-  frame_result.frame_number = frame_number;
-  switch (video_metrics_type) {
-    case kPSNR:
-      frame_result.value = I420PSNR(ref, test, width, height);
-      break;
-    case kSSIM:
-      frame_result.value = I420SSIM(ref, test, width, height);
-      break;
-    default:
-      assert(false);
-  }
-  result->frames.push_back(frame_result);
-}
-
-// Calculates average, min and max values for the supplied struct, if non-NULL.
-void CalculateStats(QualityMetricsResult* result) {
-  if (result == NULL || result->frames.size() == 0) {
-    return;
-  }
-  // Calculate average
-  std::vector<FrameResult>::iterator iter;
-  double metrics_values_sum = 0.0;
-  for (iter = result->frames.begin(); iter != result->frames.end(); ++iter) {
-    metrics_values_sum += iter->value;
-  }
-  result->average = metrics_values_sum / result->frames.size();
-
-  // Calculate min/max statistics
-  iter = min_element(result->frames.begin(), result->frames.end(),
-                     LessForFrameResultValue);
-  result->min = iter->value;
-  result->min_frame_number = iter->frame_number;
-  iter = max_element(result->frames.begin(), result->frames.end(),
-                     LessForFrameResultValue);
-  result->max = iter->value;
-  result->max_frame_number = iter->frame_number;
-}
-
-// Single method that handles all combinations of video metrics calculation, to
-// minimize code duplication. Either psnr_result or ssim_result may be NULL,
-// depending on which VideoMetricsType is targeted.
-int CalculateMetrics(VideoMetricsType video_metrics_type,
-                     const char* ref_filename,
-                     const char* test_filename,
-                     int width,
-                     int height,
-                     QualityMetricsResult* psnr_result,
-                     QualityMetricsResult* ssim_result) {
-  assert(ref_filename != NULL);
-  assert(test_filename != NULL);
-  assert(width > 0);
-  assert(height > 0);
-
-  FILE* ref_fp = fopen(ref_filename, "rb");
-  if (ref_fp == NULL) {
-    // cannot open reference file
-    fprintf(stderr, "Cannot open file %s\n", ref_filename);
-    return -1;
-  }
-  FILE* test_fp = fopen(test_filename, "rb");
-  if (test_fp == NULL) {
-    // cannot open test file
-    fprintf(stderr, "Cannot open file %s\n", test_filename);
-    fclose(ref_fp);
-    return -2;
-  }
-  int frame_number = 0;
-
-  // Allocating size for one I420 frame.
-  const int frame_length = 3 * width * height >> 1;
-  uint8_t* ref = new uint8_t[frame_length];
-  uint8_t* test = new uint8_t[frame_length];
-
-  int ref_bytes = fread(ref, 1, frame_length, ref_fp);
-  int test_bytes = fread(test, 1, frame_length, test_fp);
-  while (ref_bytes == frame_length && test_bytes == frame_length) {
-    switch (video_metrics_type) {
-      case kPSNR:
-        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
-                       psnr_result);
-        break;
-      case kSSIM:
-        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
-                       ssim_result);
-        break;
-      case kBoth:
-        CalculateFrame(kPSNR, ref, test, width, height, frame_number,
-                       psnr_result);
-        CalculateFrame(kSSIM, ref, test, width, height, frame_number,
-                       ssim_result);
-        break;
-      default:
-        assert(false);
-    }
-    frame_number++;
-    ref_bytes = fread(ref, 1, frame_length, ref_fp);
-    test_bytes = fread(test, 1, frame_length, test_fp);
-  }
-  int return_code = 0;
-  if (frame_number == 0) {
-    fprintf(stderr, "Tried to measure video metrics from empty files "
-            "(reference file: %s  test file: %s)\n", ref_filename,
-            test_filename);
-    return_code = -3;
-  } else {
-    CalculateStats(psnr_result);
-    CalculateStats(ssim_result);
-  }
-  delete [] ref;
-  delete [] test;
-  fclose(ref_fp);
-  fclose(test_fp);
-  return return_code;
-}
-
-int I420MetricsFromFiles(const char* ref_filename,
-                         const char* test_filename,
-                         int width,
-                         int height,
-                         QualityMetricsResult* psnr_result,
-                         QualityMetricsResult* ssim_result) {
-  assert(psnr_result != NULL);
-  assert(ssim_result != NULL);
-  return CalculateMetrics(kBoth, ref_filename, test_filename, width, height,
-                          psnr_result, ssim_result);
-}
-
-int I420PSNRFromFiles(const char* ref_filename,
-                      const char* test_filename,
-                      int width,
-                      int height,
-                      QualityMetricsResult* result) {
-  assert(result != NULL);
-  return CalculateMetrics(kPSNR, ref_filename, test_filename, width, height,
-                          result, NULL);
-}
-
-int I420SSIMFromFiles(const char* ref_filename,
-                      const char* test_filename,
-                      int width,
-                      int height,
-                      QualityMetricsResult* result) {
-  assert(result != NULL);
-  return CalculateMetrics(kSSIM, ref_filename, test_filename, width, height,
-                          NULL, result);
-}
-
-}  // namespace test
-}  // namespace webrtc
diff --git a/test/testsupport/packet_reader_unittest.cc b/test/testsupport/packet_reader_unittest.cc
deleted file mode 100644
index 6719e4c..0000000
--- a/test/testsupport/packet_reader_unittest.cc
+++ /dev/null
@@ -1,123 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#include "testsupport/packet_reader.h"
-
-#include "gtest/gtest.h"
-#include "testsupport/unittest_utils.h"
-
-namespace webrtc {
-namespace test {
-
-class PacketReaderTest: public PacketRelatedTest {
- protected:
-  PacketReaderTest() {}
-  virtual ~PacketReaderTest() {}
-  void SetUp() {
-    reader_ = new PacketReader();
-  }
-  void TearDown() {
-    delete reader_;
-  }
-  void VerifyPacketData(int expected_length,
-                        int actual_length,
-                        WebRtc_UWord8* original_data_pointer,
-                        WebRtc_UWord8* new_data_pointer) {
-    EXPECT_EQ(expected_length, actual_length);
-    EXPECT_EQ(*original_data_pointer, *new_data_pointer);
-    EXPECT_EQ(0, memcmp(original_data_pointer, new_data_pointer,
-                        actual_length));
-  }
-  PacketReader* reader_;
-};
-
-// Test lack of initialization
-TEST_F(PacketReaderTest, Uninitialized) {
-  WebRtc_UWord8* data_pointer = NULL;
-  EXPECT_EQ(-1, reader_->NextPacket(&data_pointer));
-  EXPECT_EQ(NULL, data_pointer);
-}
-
-TEST_F(PacketReaderTest, InitializeZeroLengthArgument) {
-  reader_->InitializeReading(packet_data_, 0, kPacketSizeInBytes);
-  ASSERT_EQ(0, reader_->NextPacket(&packet_data_pointer_));
-}
-
-// Test with something smaller than one packet
-TEST_F(PacketReaderTest, NormalSmallData) {
-  const int kDataLengthInBytes = 1499;
-  WebRtc_UWord8 data[kDataLengthInBytes];
-  WebRtc_UWord8* data_pointer = data;
-  memset(data, 1, kDataLengthInBytes);
-
-  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
-  int length_to_read = reader_->NextPacket(&data_pointer);
-  VerifyPacketData(kDataLengthInBytes, length_to_read, data, data_pointer);
-  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
-
-  // Reading another one shall result in 0 bytes:
-  length_to_read = reader_->NextPacket(&data_pointer);
-  EXPECT_EQ(0, length_to_read);
-  EXPECT_EQ(kDataLengthInBytes, data_pointer - data);
-}
-
-// Test with data length that exactly matches one packet
-TEST_F(PacketReaderTest, NormalOnePacketData) {
-  WebRtc_UWord8 data[kPacketSizeInBytes];
-  WebRtc_UWord8* data_pointer = data;
-  memset(data, 1, kPacketSizeInBytes);
-
-  reader_->InitializeReading(data, kPacketSizeInBytes, kPacketSizeInBytes);
-  int length_to_read = reader_->NextPacket(&data_pointer);
-  VerifyPacketData(kPacketSizeInBytes, length_to_read, data, data_pointer);
-  EXPECT_EQ(0, data_pointer - data);  // pointer hasn't moved
-
-  // Reading another one shall result in 0 bytes:
-  length_to_read = reader_->NextPacket(&data_pointer);
-  EXPECT_EQ(0, length_to_read);
-  EXPECT_EQ(kPacketSizeInBytes, data_pointer - data);
-}
-
-// Test with data length that will result in 3 packets
-TEST_F(PacketReaderTest, NormalLargeData) {
-  reader_->InitializeReading(packet_data_, kPacketDataLength,
-                             kPacketSizeInBytes);
-
-  int length_to_read = reader_->NextPacket(&packet_data_pointer_);
-  VerifyPacketData(kPacketSizeInBytes, length_to_read,
-                   packet1_, packet_data_pointer_);
-
-  length_to_read = reader_->NextPacket(&packet_data_pointer_);
-  VerifyPacketData(kPacketSizeInBytes, length_to_read,
-                   packet2_, packet_data_pointer_);
-
-  length_to_read = reader_->NextPacket(&packet_data_pointer_);
-  VerifyPacketData(1u, length_to_read,
-                   packet3_, packet_data_pointer_);
-
-  // Reading another one shall result in 0 bytes:
-  length_to_read = reader_->NextPacket(&packet_data_pointer_);
-  EXPECT_EQ(0, length_to_read);
-  EXPECT_EQ(kPacketDataLength, packet_data_pointer_ - packet_data_);
-}
-
-// Test with empty data.
-TEST_F(PacketReaderTest, EmptyData) {
-  const int kDataLengthInBytes = 0;
-  WebRtc_UWord8* data = new WebRtc_UWord8[kDataLengthInBytes];
-  reader_->InitializeReading(data, kDataLengthInBytes, kPacketSizeInBytes);
-  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data));
-  // Do it again to make sure nothing changes
-  EXPECT_EQ(kDataLengthInBytes, reader_->NextPacket(&data));
-  delete[] data;
-}
-
-}  // namespace test
-}  // namespace webrtc
diff --git a/test/testsupport/unittest_utils.h b/test/testsupport/unittest_utils.h
deleted file mode 100644
index 963a5d3..0000000
--- a/test/testsupport/unittest_utils.h
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- *  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
- *
- *  Use of this source code is governed by a BSD-style license
- *  that can be found in the LICENSE file in the root of the source
- *  tree. An additional intellectual property rights grant can be found
- *  in the file PATENTS.  All contributing project authors may
- *  be found in the AUTHORS file in the root of the source tree.
- */
-
-#ifndef WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
-#define WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
-
-namespace webrtc {
-namespace test {
-
-const int kPacketSizeInBytes = 1500;
-const int kPacketDataLength = kPacketSizeInBytes * 2 + 1;
-const int kPacketDataNumberOfPackets = 3;
-
-// A base test fixture for packet related tests. Contains
-// two full prepared packets with 1s, 2s in their data and a third packet with
-// a single 3 in it (size=1).
-// A packet data structure is also available, that contains these three packets
-// in order.
-class PacketRelatedTest: public testing::Test {
- protected:
-  // Tree packet byte arrays with data used for verification:
-  WebRtc_UWord8 packet1_[kPacketSizeInBytes];
-  WebRtc_UWord8 packet2_[kPacketSizeInBytes];
-  WebRtc_UWord8 packet3_[1];
-  // Construct a data structure containing these packets
-  WebRtc_UWord8 packet_data_[kPacketDataLength];
-  WebRtc_UWord8* packet_data_pointer_;
-
-  PacketRelatedTest() {
-    packet_data_pointer_ = packet_data_;
-
-    memset(packet1_, 1, kPacketSizeInBytes);
-    memset(packet2_, 2, kPacketSizeInBytes);
-    memset(packet3_, 3, 1);
-    // Fill the packet_data:
-    memcpy(packet_data_pointer_, packet1_, kPacketSizeInBytes);
-    memcpy(packet_data_pointer_ + kPacketSizeInBytes, packet2_,
-           kPacketSizeInBytes);
-    memcpy(packet_data_pointer_ + kPacketSizeInBytes * 2, packet3_, 1);
-  }
-  virtual ~PacketRelatedTest() {}
-  void SetUp() {
-    // Initialize the random generator with 0 to get deterministic behavior
-    srand(0);
-  }
-  void TearDown() {}
-};
-
-}  // namespace test
-}  // namespace webrtc
-
-#endif  // WEBRTC_TEST_TESTSUPPORT_UNITTEST_UTILS_H_
diff --git a/third_party/google-gflags/LICENSE b/third_party/google-gflags/LICENSE
new file mode 100644
index 0000000..d15b0c2
--- /dev/null
+++ b/third_party/google-gflags/LICENSE
@@ -0,0 +1,28 @@
+Copyright (c) 2006, Google Inc.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+    * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+    * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+    * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party/google-gflags/README.webrtc b/third_party/google-gflags/README.webrtc
new file mode 100644
index 0000000..9a993ce
--- /dev/null
+++ b/third_party/google-gflags/README.webrtc
@@ -0,0 +1,13 @@
+URL: http://code.google.com/p/google-gflags/
+Version: 1.5
+License: New BSD
+License File: LICENSE
+
+Description:
+The gflags package contains a library that implements commandline
+flags processing. As such it's a replacement for getopt(). It has
+increased flexibility, including built-in support for C++ types like
+string, and the ability to define flags in the source file in which
+they're used.
+
+Local Modifications: None
diff --git a/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/android/arm/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/android/arm/include/private/config.h b/third_party/google-gflags/gen/arch/android/arm/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/android/arm/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/arm/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/arm/include/private/config.h b/third_party/google-gflags/gen/arch/linux/arm/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/arm/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h b/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/ia32/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h b/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/linux/x64/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h b/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/ia32/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..0c2f997
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags.h
@@ -0,0 +1,585 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 1
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 1
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+namespace google {
+
+#if 1      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 1   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 0     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern const char* GetArgv();                // all of argv as a string
+extern const char* GetArgv0();               // only argv0
+extern uint32 GetArgvSum();                  // simple checksum of argv
+extern const char* ProgramInvocationName();  // argv0, or "UNKNOWN" if not set
+extern const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern const char* ProgramUsage();           // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern std::string SetCommandLineOption(const char* name, const char* value);
+extern std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} __attribute__ ((unused));
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern bool SaveCommandFlags();  // actually defined in google.cc !
+extern bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern bool BoolFromEnv(const char *varname, bool defval);
+extern int32 Int32FromEnv(const char *varname, int32 defval);
+extern int64 Int64FromEnv(const char *varname, int64 defval);
+extern uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern double DoubleFromEnv(const char *varname, double defval);
+extern const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.  Returns the index (into argv)
+// of the first non-flag argument.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.  If a flag is
+// defined more than once in the command line or flag file, the last
+// definition is used.  Returns the index (into argv) of the first
+// non-flag argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.  Only flags
+// registered since the last parse will be recognized.  Any flag value
+// must be provided as part of the argument using "=", not as a
+// separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+// Returns the index (into the original argv) of the first non-flag
+// argument.  (If remove_flags is true, will always return 1.)
+extern uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : ::google::kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    type FLAGS_##name = FLAGS_nono##name;                       \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(      \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    extern type FLAGS_##name;                   \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> double IsBoolFlag(const From& from);
+bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+// The weird 'using' + 'extern' inside the fLS namespace is to work around
+// an unknown compiler bug/issue with the gcc 4.2.1 on SUSE 10.  See
+//    http://code.google.com/p/google-gflags/issues/detail?id=20
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    extern clstring& FLAGS_##name;                                          \
+    using fLS::FLAGS_##name;                                                \
+    clstring& FLAGS_##name = *FLAGS_no##name;                               \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..9d9ce7a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,121 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+namespace google {
+
+void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h b/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h
new file mode 100644
index 0000000..98d8e1a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/mac/x64/include/private/config.h
@@ -0,0 +1,110 @@
+/* src/config.h.  Generated from config.h.in by configure.  */
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#define GFLAGS_DLL_DECL /**/
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#define HAVE_DLFCN_H 1
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#define HAVE_FNMATCH_H 1
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#define HAVE_INTTYPES_H 1
+
+/* Define to 1 if you have the <memory.h> header file. */
+#define HAVE_MEMORY_H 1
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES 1
+
+/* Define if you have POSIX threads libraries and header files. */
+#define HAVE_PTHREAD 1
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV 1
+
+/* Define to 1 if you have the `setenv' function. */
+#define HAVE_SETENV 1
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#define HAVE_STDINT_H 1
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#define HAVE_STRINGS_H 1
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL 1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ 1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#define HAVE_UNISTD_H 1
+
+/* define if your compiler has __attribute__ */
+#define HAVE___ATTRIBUTE__ 1
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#define LT_OBJDIR ".libs/"
+
+/* Name of package */
+#define PACKAGE "gflags"
+
+/* Define to the address where bug reports for this package should be sent. */
+#define PACKAGE_BUGREPORT "opensource@google.com"
+
+/* Define to the full name of this package. */
+#define PACKAGE_NAME "gflags"
+
+/* Define to the full name and version of this package. */
+#define PACKAGE_STRING "gflags 1.5"
+
+/* Define to the one symbol short name of this package. */
+#define PACKAGE_TARNAME "gflags"
+
+/* Define to the home page for this package. */
+#define PACKAGE_URL ""
+
+/* Define to the version of this package. */
+#define PACKAGE_VERSION "1.5"
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+/* #undef PTHREAD_CREATE_JOINABLE */
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS 1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE std
+
+/* Version number of package */
+#define VERSION "1.5"
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_ }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_ namespace google {
diff --git a/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h
new file mode 100644
index 0000000..fdafe2a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags.h
@@ -0,0 +1,601 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 0
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 0
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#if defined(_WIN32)
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG   __declspec(dllexport)
+# endif
+#else
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG
+# endif
+#endif
+
+namespace google {
+
+#if 0      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 0   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 1     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct GFLAGS_DLL_DECL CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern GFLAGS_DLL_DECL void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern GFLAGS_DLL_DECL const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern GFLAGS_DLL_DECL const char* GetArgv();               // all of argv as a string
+extern GFLAGS_DLL_DECL const char* GetArgv0();              // only argv0
+extern GFLAGS_DLL_DECL uint32 GetArgvSum();                 // simple checksum of argv
+extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set
+extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern GFLAGS_DLL_DECL const char* ProgramUsage();          // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum GFLAGS_DLL_DECL FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern GFLAGS_DLL_DECL std::string SetCommandLineOption(const char* name, const char* value);
+extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class GFLAGS_DLL_DECL FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} ;
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern GFLAGS_DLL_DECL bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern GFLAGS_DLL_DECL bool SaveCommandFlags();  // actually defined in google.cc !
+extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval);
+extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval);
+extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval);
+extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval);
+extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern GFLAGS_DLL_DECL void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.
+// Only flags registered since the last parse will be recognized.
+// Any flag value must be provided as part of the argument using "=",
+// not as a separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+extern GFLAGS_DLL_DECL uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class GFLAGS_DLL_DECL FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    /* We always want to export defined variables, dll or no */ \
+    GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(                   \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    /* We always want to import declared variables, dll or no */ \
+    extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> GFLAGS_DLL_DECL double IsBoolFlag(const From& from);
+GFLAGS_DLL_DECL bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name = *FLAGS_no##name;        \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..e97de5b3
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/ia32/include/gflags/gflags_completions.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#ifndef GFLAGS_DLL_DECL
+# ifdef _WIN32
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# else
+#   define GFLAGS_DLL_DECL
+# endif
+#endif
+
+namespace google {
+
+GFLAGS_DLL_DECL void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h b/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h
new file mode 100644
index 0000000..dcca757
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/ia32/include/private/config.h
@@ -0,0 +1,139 @@
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Sometimes we accidentally #include this config.h instead of the one
+   in .. -- this is particularly true for msys/mingw, which uses the
+   unix config.h but also runs code in the windows directory.
+   */
+#ifdef __MINGW32__
+#include "../config.h"
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#endif
+
+#ifndef GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#ifndef GFLAGS_DLL_DECL
+# define GFLAGS_IS_A_DLL  1   /* not set if you're statically linking */
+# define GFLAGS_DLL_DECL  __declspec(dllexport)
+# define GFLAGS_DLL_DECL_FOR_UNITTESTS  __declspec(dllimport)
+#endif
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE  ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#undef HAVE_FNMATCH_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES  1
+
+/* Define if you have POSIX threads libraries and header files. */
+#undef HAVE_PTHREAD
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV  1
+
+/* Define to 1 if you have the `setenv' function. */
+#undef HAVE_SETENV
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL  1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ  1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* define if your compiler has __attribute__ */
+#undef HAVE___ATTRIBUTE__
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+#undef PTHREAD_CREATE_JOINABLE
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS  1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE  std
+
+/* Version number of package */
+#undef VERSION
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_  }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_  namespace google {
+
+// ---------------------------------------------------------------------
+// Extra stuff not found in config.h.in
+
+// This must be defined before the windows.h is included.  It's needed
+// for mutex.h, to give access to the TryLock method.
+#ifndef _WIN32_WINNT
+# define _WIN32_WINNT 0x0400
+#endif
+
+// TODO(csilvers): include windows/port.h in every relevant source file instead?
+#include "windows/port.h"
+
+#endif  /* GOOGLE_GFLAGS_WINDOWS_CONFIG_H_ */
diff --git a/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h b/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h
new file mode 100644
index 0000000..fdafe2a
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags.h
@@ -0,0 +1,601 @@
+// Copyright (c) 2006, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// ---
+// Author: Ray Sidney
+// Revamped and reorganized by Craig Silverstein
+//
+// This is the file that should be included by any file which declares
+// or defines a command line flag or wants to parse command line flags
+// or print a program usage message (which will include information about
+// flags).  Executive summary, in the form of an example foo.cc file:
+//
+//    #include "foo.h"         // foo.h has a line "DECLARE_int32(start);"
+//    #include "validators.h"  // hypothetical file defining ValidateIsFile()
+//
+//    DEFINE_int32(end, 1000, "The last record to read");
+//
+//    DEFINE_string(filename, "my_file.txt", "The file to read");
+//    // Crash if the specified file does not exist.
+//    static bool dummy = RegisterFlagValidator(&FLAGS_filename,
+//                                              &ValidateIsFile);
+//
+//    DECLARE_bool(verbose); // some other file has a DEFINE_bool(verbose, ...)
+//
+//    void MyFunc() {
+//      if (FLAGS_verbose) printf("Records %d-%d\n", FLAGS_start, FLAGS_end);
+//    }
+//
+// Then, at the command-line:
+//    ./foo --noverbose --start=5 --end=100
+//
+// For more details, see
+//    doc/gflags.html
+//
+// --- A note about thread-safety:
+//
+// We describe many functions in this routine as being thread-hostile,
+// thread-compatible, or thread-safe.  Here are the meanings we use:
+//
+// thread-safe: it is safe for multiple threads to call this routine
+//   (or, when referring to a class, methods of this class)
+//   concurrently.
+// thread-hostile: it is not safe for multiple threads to call this
+//   routine (or methods of this class) concurrently.  In gflags,
+//   most thread-hostile routines are intended to be called early in,
+//   or even before, main() -- that is, before threads are spawned.
+// thread-compatible: it is safe for multiple threads to read from
+//   this variable (when applied to variables), or to call const
+//   methods of this class (when applied to classes), as long as no
+//   other thread is writing to the variable or calling non-const
+//   methods of this class.
+
+#ifndef GOOGLE_GFLAGS_H_
+#define GOOGLE_GFLAGS_H_
+
+#include <string>
+#include <vector>
+
+// We care a lot about number of bits things take up.  Unfortunately,
+// systems define their bit-specific ints in a lot of different ways.
+// We use our own way, and have a typedef to get there.
+// Note: these commands below may look like "#if 1" or "#if 0", but
+// that's because they were constructed that way at ./configure time.
+// Look at gflags.h.in to see how they're calculated (based on your config).
+#if 0
+#include <stdint.h>             // the normal place uint16_t is defined
+#endif
+#if 1
+#include <sys/types.h>          // the normal place u_int16_t is defined
+#endif
+#if 0
+#include <inttypes.h>           // a third place for uint16_t or u_int16_t
+#endif
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#if defined(_WIN32)
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG  __declspec(dllimport)
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG   __declspec(dllexport)
+# endif
+#else
+# ifndef GFLAGS_DLL_DECL
+#   define GFLAGS_DLL_DECL
+# endif
+# ifndef GFLAGS_DLL_DECLARE_FLAG
+#   define GFLAGS_DLL_DECLARE_FLAG
+# endif
+# ifndef GFLAGS_DLL_DEFINE_FLAG
+#   define GFLAGS_DLL_DEFINE_FLAG
+# endif
+#endif
+
+namespace google {
+
+#if 0      // the C99 format
+typedef int32_t int32;
+typedef uint32_t uint32;
+typedef int64_t int64;
+typedef uint64_t uint64;
+#elif 0   // the BSD format
+typedef int32_t int32;
+typedef u_int32_t uint32;
+typedef int64_t int64;
+typedef u_int64_t uint64;
+#elif 1     // the windows (vc7) format
+typedef __int32 int32;
+typedef unsigned __int32 uint32;
+typedef __int64 int64;
+typedef unsigned __int64 uint64;
+#else
+#error Do not know how to define a 32-bit integer quantity on your system
+#endif
+
+// --------------------------------------------------------------------
+// To actually define a flag in a file, use DEFINE_bool,
+// DEFINE_string, etc. at the bottom of this file.  You may also find
+// it useful to register a validator with the flag.  This ensures that
+// when the flag is parsed from the commandline, or is later set via
+// SetCommandLineOption, we call the validation function. It is _not_
+// called when you assign the value to the flag directly using the = operator.
+//
+// The validation function should return true if the flag value is valid, and
+// false otherwise. If the function returns false for the new setting of the
+// flag, the flag will retain its current value. If it returns false for the
+// default value, ParseCommandLineFlags() will die.
+//
+// This function is safe to call at global construct time (as in the
+// example below).
+//
+// Example use:
+//    static bool ValidatePort(const char* flagname, int32 value) {
+//       if (value > 0 && value < 32768)   // value is ok
+//         return true;
+//       printf("Invalid value for --%s: %d\n", flagname, (int)value);
+//       return false;
+//    }
+//    DEFINE_int32(port, 0, "What port to listen on");
+//    static bool dummy = RegisterFlagValidator(&FLAGS_port, &ValidatePort);
+
+// Returns true if successfully registered, false if not (because the
+// first argument doesn't point to a command-line flag, or because a
+// validator is already registered for this flag).
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const bool* flag,
+                           bool (*validate_fn)(const char*, bool));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int32* flag,
+                           bool (*validate_fn)(const char*, int32));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const int64* flag,
+                           bool (*validate_fn)(const char*, int64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const uint64* flag,
+                           bool (*validate_fn)(const char*, uint64));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const double* flag,
+                           bool (*validate_fn)(const char*, double));
+GFLAGS_DLL_DECL bool RegisterFlagValidator(const std::string* flag,
+                           bool (*validate_fn)(const char*, const std::string&));
+
+
+// --------------------------------------------------------------------
+// These methods are the best way to get access to info about the
+// list of commandline flags.  Note that these routines are pretty slow.
+//   GetAllFlags: mostly-complete info about the list, sorted by file.
+//   ShowUsageWithFlags: pretty-prints the list to stdout (what --help does)
+//   ShowUsageWithFlagsRestrict: limit to filenames with restrict as a substr
+//
+// In addition to accessing flags, you can also access argv[0] (the program
+// name) and argv (the entire commandline), which we sock away a copy of.
+// These variables are static, so you should only set them once.
+
+struct GFLAGS_DLL_DECL CommandLineFlagInfo {
+  std::string name;           // the name of the flag
+  std::string type;           // the type of the flag: int32, etc
+  std::string description;    // the "help text" associated with the flag
+  std::string current_value;  // the current value, as a string
+  std::string default_value;  // the default value, as a string
+  std::string filename;       // 'cleaned' version of filename holding the flag
+  bool has_validator_fn;      // true if RegisterFlagValidator called on flag
+  bool is_default;            // true if the flag has the default value and
+                              // has not been set explicitly from the cmdline
+                              // or via SetCommandLineOption
+};
+
+// Using this inside of a validator is a recipe for a deadlock.
+// TODO(wojtekm) Fix locking when validators are running, to make it safe to
+// call validators during ParseAllFlags.
+// Also make sure then to uncomment the corresponding unit test in
+// commandlineflags_unittest.sh
+extern GFLAGS_DLL_DECL void GetAllFlags(std::vector<CommandLineFlagInfo>* OUTPUT);
+// These two are actually defined in commandlineflags_reporting.cc.
+extern GFLAGS_DLL_DECL void ShowUsageWithFlags(const char *argv0);  // what --help does
+extern GFLAGS_DLL_DECL void ShowUsageWithFlagsRestrict(const char *argv0, const char *restrict);
+
+// Create a descriptive string for a flag.
+// Goes to some trouble to make pretty line breaks.
+extern GFLAGS_DLL_DECL std::string DescribeOneFlag(const CommandLineFlagInfo& flag);
+
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetArgv(int argc, const char** argv);
+// The following functions are thread-safe as long as SetArgv() is
+// only called before any threads start.
+extern GFLAGS_DLL_DECL const std::vector<std::string>& GetArgvs();  // all of argv as a vector
+extern GFLAGS_DLL_DECL const char* GetArgv();               // all of argv as a string
+extern GFLAGS_DLL_DECL const char* GetArgv0();              // only argv0
+extern GFLAGS_DLL_DECL uint32 GetArgvSum();                 // simple checksum of argv
+extern GFLAGS_DLL_DECL const char* ProgramInvocationName(); // argv0, or "UNKNOWN" if not set
+extern GFLAGS_DLL_DECL const char* ProgramInvocationShortName();   // basename(argv0)
+// ProgramUsage() is thread-safe as long as SetUsageMessage() is only
+// called before any threads start.
+extern GFLAGS_DLL_DECL const char* ProgramUsage();          // string set by SetUsageMessage()
+
+
+// --------------------------------------------------------------------
+// Normally you access commandline flags by just saying "if (FLAGS_foo)"
+// or whatever, and set them by calling "FLAGS_foo = bar" (or, more
+// commonly, via the DEFINE_foo macro).  But if you need a bit more
+// control, we have programmatic ways to get/set the flags as well.
+// These programmatic ways to access flags are thread-safe, but direct
+// access is only thread-compatible.
+
+// Return true iff the flagname was found.
+// OUTPUT is set to the flag's value, or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineOption(const char* name, std::string* OUTPUT);
+
+// Return true iff the flagname was found. OUTPUT is set to the flag's
+// CommandLineFlagInfo or unchanged if we return false.
+extern GFLAGS_DLL_DECL bool GetCommandLineFlagInfo(const char* name,
+                                   CommandLineFlagInfo* OUTPUT);
+
+// Return the CommandLineFlagInfo of the flagname.  exit() if name not found.
+// Example usage, to check if a flag's value is currently the default value:
+//   if (GetCommandLineFlagInfoOrDie("foo").is_default) ...
+extern GFLAGS_DLL_DECL CommandLineFlagInfo GetCommandLineFlagInfoOrDie(const char* name);
+
+enum GFLAGS_DLL_DECL FlagSettingMode {
+  // update the flag's value (can call this multiple times).
+  SET_FLAGS_VALUE,
+  // update the flag's value, but *only if* it has not yet been updated
+  // with SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef".
+  SET_FLAG_IF_DEFAULT,
+  // set the flag's default value to this.  If the flag has not yet updated
+  // yet (via SET_FLAGS_VALUE, SET_FLAG_IF_DEFAULT, or "FLAGS_xxx = nondef")
+  // change the flag's current value to the new default value as well.
+  SET_FLAGS_DEFAULT
+};
+
+// Set a particular flag ("command line option").  Returns a string
+// describing the new value that the option has been set to.  The
+// return value API is not well-specified, so basically just depend on
+// it to be empty if the setting failed for some reason -- the name is
+// not a valid flag name, or the value is not a valid value -- and
+// non-empty else.
+
+// SetCommandLineOption uses set_mode == SET_FLAGS_VALUE (the common case)
+extern GFLAGS_DLL_DECL std::string SetCommandLineOption(const char* name, const char* value);
+extern GFLAGS_DLL_DECL std::string SetCommandLineOptionWithMode(const char* name, const char* value,
+                                                FlagSettingMode set_mode);
+
+
+// --------------------------------------------------------------------
+// Saves the states (value, default value, whether the user has set
+// the flag, registered validators, etc) of all flags, and restores
+// them when the FlagSaver is destroyed.  This is very useful in
+// tests, say, when you want to let your tests change the flags, but
+// make sure that they get reverted to the original states when your
+// test is complete.
+//
+// Example usage:
+//   void TestFoo() {
+//     FlagSaver s1;
+//     FLAG_foo = false;
+//     FLAG_bar = "some value";
+//
+//     // test happens here.  You can return at any time
+//     // without worrying about restoring the FLAG values.
+//   }
+//
+// Note: This class is marked with __attribute__((unused)) because all the
+// work is done in the constructor and destructor, so in the standard
+// usage example above, the compiler would complain that it's an
+// unused variable.
+//
+// This class is thread-safe.
+
+class GFLAGS_DLL_DECL FlagSaver {
+ public:
+  FlagSaver();
+  ~FlagSaver();
+
+ private:
+  class FlagSaverImpl* impl_;   // we use pimpl here to keep API steady
+
+  FlagSaver(const FlagSaver&);  // no copying!
+  void operator=(const FlagSaver&);
+} ;
+
+// --------------------------------------------------------------------
+// Some deprecated or hopefully-soon-to-be-deprecated functions.
+
+// This is often used for logging.  TODO(csilvers): figure out a better way
+extern GFLAGS_DLL_DECL std::string CommandlineFlagsIntoString();
+// Usually where this is used, a FlagSaver should be used instead.
+extern GFLAGS_DLL_DECL bool ReadFlagsFromString(const std::string& flagfilecontents,
+                                const char* prog_name,
+                                bool errors_are_fatal); // uses SET_FLAGS_VALUE
+
+// These let you manually implement --flagfile functionality.
+// DEPRECATED.
+extern GFLAGS_DLL_DECL bool AppendFlagsIntoFile(const std::string& filename, const char* prog_name);
+extern GFLAGS_DLL_DECL bool SaveCommandFlags();  // actually defined in google.cc !
+extern GFLAGS_DLL_DECL bool ReadFromFlagsFile(const std::string& filename, const char* prog_name,
+                              bool errors_are_fatal);   // uses SET_FLAGS_VALUE
+
+
+// --------------------------------------------------------------------
+// Useful routines for initializing flags from the environment.
+// In each case, if 'varname' does not exist in the environment
+// return defval.  If 'varname' does exist but is not valid
+// (e.g., not a number for an int32 flag), abort with an error.
+// Otherwise, return the value.  NOTE: for booleans, for true use
+// 't' or 'T' or 'true' or '1', for false 'f' or 'F' or 'false' or '0'.
+
+extern GFLAGS_DLL_DECL bool BoolFromEnv(const char *varname, bool defval);
+extern GFLAGS_DLL_DECL int32 Int32FromEnv(const char *varname, int32 defval);
+extern GFLAGS_DLL_DECL int64 Int64FromEnv(const char *varname, int64 defval);
+extern GFLAGS_DLL_DECL uint64 Uint64FromEnv(const char *varname, uint64 defval);
+extern GFLAGS_DLL_DECL double DoubleFromEnv(const char *varname, double defval);
+extern GFLAGS_DLL_DECL const char *StringFromEnv(const char *varname, const char *defval);
+
+
+// --------------------------------------------------------------------
+// The next two functions parse commandlineflags from main():
+
+// Set the "usage" message for this program.  For example:
+//   string usage("This program does nothing.  Sample usage:\n");
+//   usage += argv[0] + " <uselessarg1> <uselessarg2>";
+//   SetUsageMessage(usage);
+// Do not include commandline flags in the usage: we do that for you!
+// Thread-hostile; meant to be called before any threads are spawned.
+extern GFLAGS_DLL_DECL void SetUsageMessage(const std::string& usage);
+
+// Looks for flags in argv and parses them.  Rearranges argv to put
+// flags first, or removes them entirely if remove_flags is true.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+// See top-of-file for more details on this function.
+#ifndef SWIG   // In swig, use ParseCommandLineFlagsScript() instead.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineFlags(int *argc, char*** argv,
+                                    bool remove_flags);
+#endif
+
+
+// Calls to ParseCommandLineNonHelpFlags and then to
+// HandleCommandLineHelpFlags can be used instead of a call to
+// ParseCommandLineFlags during initialization, in order to allow for
+// changing default values for some FLAGS (via
+// e.g. SetCommandLineOptionWithMode calls) between the time of
+// command line parsing and the time of dumping help information for
+// the flags as a result of command line parsing.
+// If a flag is defined more than once in the command line or flag
+// file, the last definition is used.
+extern GFLAGS_DLL_DECL uint32 ParseCommandLineNonHelpFlags(int *argc, char*** argv,
+                                           bool remove_flags);
+// This is actually defined in commandlineflags_reporting.cc.
+// This function is misnamed (it also handles --version, etc.), but
+// it's too late to change that now. :-(
+extern GFLAGS_DLL_DECL void HandleCommandLineHelpFlags();   // in commandlineflags_reporting.cc
+
+// Allow command line reparsing.  Disables the error normally
+// generated when an unknown flag is found, since it may be found in a
+// later parse.  Thread-hostile; meant to be called before any threads
+// are spawned.
+extern GFLAGS_DLL_DECL void AllowCommandLineReparsing();
+
+// Reparse the flags that have not yet been recognized.
+// Only flags registered since the last parse will be recognized.
+// Any flag value must be provided as part of the argument using "=",
+// not as a separate command line argument that follows the flag argument.
+// Intended for handling flags from dynamically loaded libraries,
+// since their flags are not registered until they are loaded.
+extern GFLAGS_DLL_DECL uint32 ReparseCommandLineNonHelpFlags();
+
+// Clean up memory allocated by flags.  This is only needed to reduce
+// the quantity of "potentially leaked" reports emitted by memory
+// debugging tools such as valgrind.  It is not required for normal
+// operation, or for the perftools heap-checker.  It must only be called
+// when the process is about to exit, and all threads that might
+// access flags are quiescent.  Referencing flags after this is called
+// will have unexpected consequences.  This is not safe to run when
+// multiple threads might be running: the function is thread-hostile.
+extern GFLAGS_DLL_DECL void ShutDownCommandLineFlags();
+
+
+// --------------------------------------------------------------------
+// Now come the command line flag declaration/definition macros that
+// will actually be used.  They're kind of hairy.  A major reason
+// for this is initialization: we want people to be able to access
+// variables in global constructors and have that not crash, even if
+// their global constructor runs before the global constructor here.
+// (Obviously, we can't guarantee the flags will have the correct
+// default value in that case, but at least accessing them is safe.)
+// The only way to do that is have flags point to a static buffer.
+// So we make one, using a union to ensure proper alignment, and
+// then use placement-new to actually set up the flag with the
+// correct default value.  In the same vein, we have to worry about
+// flag access in global destructors, so FlagRegisterer has to be
+// careful never to destroy the flag-values it constructs.
+//
+// Note that when we define a flag variable FLAGS_<name>, we also
+// preemptively define a junk variable, FLAGS_no<name>.  This is to
+// cause a link-time error if someone tries to define 2 flags with
+// names like "logging" and "nologging".  We do this because a bool
+// flag FLAG can be set from the command line to true with a "-FLAG"
+// argument, and to false with a "-noFLAG" argument, and so this can
+// potentially avert confusion.
+//
+// We also put flags into their own namespace.  It is purposefully
+// named in an opaque way that people should have trouble typing
+// directly.  The idea is that DEFINE puts the flag in the weird
+// namespace, and DECLARE imports the flag from there into the current
+// namespace.  The net result is to force people to use DECLARE to get
+// access to a flag, rather than saying "extern bool FLAGS_whatever;"
+// or some such instead.  We want this so we can put extra
+// functionality (like sanity-checking) in DECLARE if we want, and
+// make sure it is picked up everywhere.
+//
+// We also put the type of the variable in the namespace, so that
+// people can't DECLARE_int32 something that they DEFINE_bool'd
+// elsewhere.
+
+class GFLAGS_DLL_DECL FlagRegisterer {
+ public:
+  FlagRegisterer(const char* name, const char* type,
+                 const char* help, const char* filename,
+                 void* current_storage, void* defvalue_storage);
+};
+
+extern bool FlagsTypeWarn(const char *name);
+
+// If your application #defines STRIP_FLAG_HELP to a non-zero value
+// before #including this file, we remove the help message from the
+// binary file. This can reduce the size of the resulting binary
+// somewhat, and may also be useful for security reasons.
+
+extern const char kStrippedFlagHelp[];
+
+}
+
+#ifndef SWIG  // In swig, ignore the main flag declarations
+
+#if defined(STRIP_FLAG_HELP) && STRIP_FLAG_HELP > 0
+// Need this construct to avoid the 'defined but not used' warning.
+#define MAYBE_STRIPPED_HELP(txt) (false ? (txt) : kStrippedFlagHelp)
+#else
+#define MAYBE_STRIPPED_HELP(txt) txt
+#endif
+
+// Each command-line flag has two variables associated with it: one
+// with the current value, and one with the default value.  However,
+// we have a third variable, which is where value is assigned; it's a
+// constant.  This guarantees that FLAG_##value is initialized at
+// static initialization time (e.g. before program-start) rather than
+// than global construction time (which is after program-start but
+// before main), at least when 'value' is a compile-time constant.  We
+// use a small trick for the "default value" variable, and call it
+// FLAGS_no<name>.  This serves the second purpose of assuring a
+// compile error if someone tries to define a flag named no<name>
+// which is illegal (--foo and --nofoo both affect the "foo" flag).
+#define DEFINE_VARIABLE(type, shorttype, name, value, help) \
+  namespace fL##shorttype {                                     \
+    static const type FLAGS_nono##name = value;                 \
+    /* We always want to export defined variables, dll or no */ \
+    GFLAGS_DLL_DEFINE_FLAG type FLAGS_##name = FLAGS_nono##name; \
+    type FLAGS_no##name = FLAGS_nono##name;                     \
+    static ::google::FlagRegisterer o_##name(                   \
+      #name, #type, MAYBE_STRIPPED_HELP(help), __FILE__,        \
+      &FLAGS_##name, &FLAGS_no##name);                          \
+  }                                                             \
+  using fL##shorttype::FLAGS_##name
+
+#define DECLARE_VARIABLE(type, shorttype, name) \
+  namespace fL##shorttype {                     \
+    /* We always want to import declared variables, dll or no */ \
+    extern GFLAGS_DLL_DECLARE_FLAG type FLAGS_##name; \
+  }                                             \
+  using fL##shorttype::FLAGS_##name
+
+// For DEFINE_bool, we want to do the extra check that the passed-in
+// value is actually a bool, and not a string or something that can be
+// coerced to a bool.  These declarations (no definition needed!) will
+// help us do that, and never evaluate From, which is important.
+// We'll use 'sizeof(IsBool(val))' to distinguish. This code requires
+// that the compiler have different sizes for bool & double. Since
+// this is not guaranteed by the standard, we check it with a
+// compile-time assert (msg[-1] will give a compile-time error).
+namespace fLB {
+struct CompileAssert {};
+typedef CompileAssert expected_sizeof_double_neq_sizeof_bool[
+                      (sizeof(double) != sizeof(bool)) ? 1 : -1];
+template<typename From> GFLAGS_DLL_DECL double IsBoolFlag(const From& from);
+GFLAGS_DLL_DECL bool IsBoolFlag(bool from);
+}  // namespace fLB
+
+#define DECLARE_bool(name)          DECLARE_VARIABLE(bool, B, name)
+#define DEFINE_bool(name, val, txt)                                       \
+  namespace fLB {                                                         \
+    typedef ::fLB::CompileAssert FLAG_##name##_value_is_not_a_bool[       \
+            (sizeof(::fLB::IsBoolFlag(val)) != sizeof(double)) ? 1 : -1]; \
+  }                                                                       \
+  DEFINE_VARIABLE(bool, B, name, val, txt)
+
+#define DECLARE_int32(name)         DECLARE_VARIABLE(::google::int32, I, name)
+#define DEFINE_int32(name,val,txt)  DEFINE_VARIABLE(::google::int32, I, name, val, txt)
+
+#define DECLARE_int64(name)         DECLARE_VARIABLE(::google::int64, I64, name)
+#define DEFINE_int64(name,val,txt)  DEFINE_VARIABLE(::google::int64, I64, name, val, txt)
+
+#define DECLARE_uint64(name)        DECLARE_VARIABLE(::google::uint64, U64, name)
+#define DEFINE_uint64(name,val,txt) DEFINE_VARIABLE(::google::uint64, U64, name, val, txt)
+
+#define DECLARE_double(name)          DECLARE_VARIABLE(double, D, name)
+#define DEFINE_double(name, val, txt) DEFINE_VARIABLE(double, D, name, val, txt)
+
+// Strings are trickier, because they're not a POD, so we can't
+// construct them at static-initialization time (instead they get
+// constructed at global-constructor time, which is much later).  To
+// try to avoid crashes in that case, we use a char buffer to store
+// the string, which we can static-initialize, and then placement-new
+// into it later.  It's not perfect, but the best we can do.
+
+namespace fLS {
+// The meaning of "string" might be different between now and when the
+// macros below get invoked (e.g., if someone is experimenting with
+// other string implementations that get defined after this file is
+// included).  Save the current meaning now and use it in the macros.
+typedef std::string clstring;
+
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const char *value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           const clstring &value) {
+  return new(stringspot) clstring(value);
+}
+inline clstring* dont_pass0toDEFINE_string(char *stringspot,
+                                           int value);
+}  // namespace fLS
+
+#define DECLARE_string(name)  namespace fLS { extern GFLAGS_DLL_DECLARE_FLAG ::fLS::clstring& FLAGS_##name; } \
+                              using fLS::FLAGS_##name
+
+// We need to define a var named FLAGS_no##name so people don't define
+// --string and --nostring.  And we need a temporary place to put val
+// so we don't have to evaluate it twice.  Two great needs that go
+// great together!
+#define DEFINE_string(name, val, txt)                                       \
+  namespace fLS {                                                           \
+    using ::fLS::clstring;                                                  \
+    static union { void* align; char s[sizeof(clstring)]; } s_##name[2];    \
+    clstring* const FLAGS_no##name = ::fLS::                                \
+                                   dont_pass0toDEFINE_string(s_##name[0].s, \
+                                                             val);          \
+    static ::google::FlagRegisterer o_##name(                  \
+        #name, "string", MAYBE_STRIPPED_HELP(txt), __FILE__,                \
+        s_##name[0].s, new (s_##name[1].s) clstring(*FLAGS_no##name));      \
+    GFLAGS_DLL_DEFINE_FLAG clstring& FLAGS_##name = *FLAGS_no##name;        \
+  }                                                                         \
+  using fLS::FLAGS_##name
+
+#endif  // SWIG
+
+#endif  // GOOGLE_GFLAGS_H_
diff --git a/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h b/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h
new file mode 100644
index 0000000..e97de5b3
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/x64/include/gflags/gflags_completions.h
@@ -0,0 +1,130 @@
+// Copyright (c) 2008, Google Inc.
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//     * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//     * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//     * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+//
+// ---
+// Author: Dave Nicponski
+//
+// Implement helpful bash-style command line flag completions
+//
+// ** Functional API:
+// HandleCommandLineCompletions() should be called early during
+// program startup, but after command line flag code has been
+// initialized, such as the beginning of HandleCommandLineHelpFlags().
+// It checks the value of the flag --tab_completion_word.  If this
+// flag is empty, nothing happens here.  If it contains a string,
+// however, then HandleCommandLineCompletions() will hijack the
+// process, attempting to identify the intention behind this
+// completion.  Regardless of the outcome of this deduction, the
+// process will be terminated, similar to --helpshort flag
+// handling.
+//
+// ** Overview of Bash completions:
+// Bash can be told to programatically determine completions for the
+// current 'cursor word'.  It does this by (in this case) invoking a
+// command with some additional arguments identifying the command
+// being executed, the word being completed, and the previous word
+// (if any).  Bash then expects a sequence of output lines to be
+// printed to stdout.  If these lines all contain a common prefix
+// longer than the cursor word, bash will replace the cursor word
+// with that common prefix, and display nothing.  If there isn't such
+// a common prefix, bash will display the lines in pages using 'more'.
+//
+// ** Strategy taken for command line completions:
+// If we can deduce either the exact flag intended, or a common flag
+// prefix, we'll output exactly that.  Otherwise, if information
+// must be displayed to the user, we'll take the opportunity to add
+// some helpful information beyond just the flag name (specifically,
+// we'll include the default flag value and as much of the flag's
+// description as can fit on a single terminal line width, as specified
+// by the flag --tab_completion_columns).  Furthermore, we'll try to
+// make bash order the output such that the most useful or relevent
+// flags are the most likely to be shown at the top.
+//
+// ** Additional features:
+// To assist in finding that one really useful flag, substring matching
+// was implemented.  Before pressing a <TAB> to get completion for the
+// current word, you can append one or more '?' to the flag to do
+// substring matching.  Here's the semantics:
+//   --foo<TAB>     Show me all flags with names prefixed by 'foo'
+//   --foo?<TAB>    Show me all flags with 'foo' somewhere in the name
+//   --foo??<TAB>   Same as prior case, but also search in module
+//                  definition path for 'foo'
+//   --foo???<TAB>  Same as prior case, but also search in flag
+//                  descriptions for 'foo'
+// Finally, we'll trim the output to a relatively small number of
+// flags to keep bash quiet about the verbosity of output.  If one
+// really wanted to see all possible matches, appending a '+' to the
+// search word will force the exhaustive list of matches to be printed.
+//
+// ** How to have bash accept completions from a binary:
+// Bash requires that it be informed about each command that programmatic
+// completion should be enabled for.  Example addition to a .bashrc
+// file would be (your path to gflags_completions.sh file may differ):
+
+/*
+$ complete -o bashdefault -o default -o nospace -C                        \
+ '/usr/local/bin/gflags_completions.sh --tab_completion_columns $COLUMNS' \
+  time  env  binary_name  another_binary  [...]
+*/
+
+// This would allow the following to work:
+//   $ /path/to/binary_name --vmodule<TAB>
+// Or:
+//   $ ./bin/path/another_binary --gfs_u<TAB>
+// (etc)
+//
+// Sadly, it appears that bash gives no easy way to force this behavior for
+// all commands.  That's where the "time" in the above example comes in.
+// If you haven't specifically added a command to the list of completion
+// supported commands, you can still get completions by prefixing the
+// entire command with "env".
+//   $ env /some/brand/new/binary --vmod<TAB>
+// Assuming that "binary" is a newly compiled binary, this should still
+// produce the expected completion output.
+
+
+#ifndef GOOGLE_GFLAGS_COMPLETIONS_H_
+#define GOOGLE_GFLAGS_COMPLETIONS_H_
+
+// Annoying stuff for windows -- makes sure clients can import these functions
+#ifndef GFLAGS_DLL_DECL
+# ifdef _WIN32
+#   define GFLAGS_DLL_DECL  __declspec(dllimport)
+# else
+#   define GFLAGS_DLL_DECL
+# endif
+#endif
+
+namespace google {
+
+GFLAGS_DLL_DECL void HandleCommandLineCompletions(void);
+
+}
+
+#endif  // GOOGLE_GFLAGS_COMPLETIONS_H_
diff --git a/third_party/google-gflags/gen/arch/win/x64/include/private/config.h b/third_party/google-gflags/gen/arch/win/x64/include/private/config.h
new file mode 100644
index 0000000..dcca757
--- /dev/null
+++ b/third_party/google-gflags/gen/arch/win/x64/include/private/config.h
@@ -0,0 +1,139 @@
+/* src/config.h.in.  Generated from configure.ac by autoheader.  */
+
+/* Sometimes we accidentally #include this config.h instead of the one
+   in .. -- this is particularly true for msys/mingw, which uses the
+   unix config.h but also runs code in the windows directory.
+   */
+#ifdef __MINGW32__
+#include "../config.h"
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#endif
+
+#ifndef GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+#define GOOGLE_GFLAGS_WINDOWS_CONFIG_H_
+
+/* Always the empty-string on non-windows systems. On windows, should be
+   "__declspec(dllexport)". This way, when we compile the dll, we export our
+   functions/classes. It's safe to define this here because config.h is only
+   used internally, to compile the DLL, and every DLL source file #includes
+   "config.h" before anything else. */
+#ifndef GFLAGS_DLL_DECL
+# define GFLAGS_IS_A_DLL  1   /* not set if you're statically linking */
+# define GFLAGS_DLL_DECL  __declspec(dllexport)
+# define GFLAGS_DLL_DECL_FOR_UNITTESTS  __declspec(dllimport)
+#endif
+
+/* Namespace for Google classes */
+#define GOOGLE_NAMESPACE  ::google
+
+/* Define to 1 if you have the <dlfcn.h> header file. */
+#undef HAVE_DLFCN_H
+
+/* Define to 1 if you have the <fnmatch.h> header file. */
+#undef HAVE_FNMATCH_H
+
+/* Define to 1 if you have the <inttypes.h> header file. */
+#undef HAVE_INTTYPES_H
+
+/* Define to 1 if you have the <memory.h> header file. */
+#undef HAVE_MEMORY_H
+
+/* define if the compiler implements namespaces */
+#define HAVE_NAMESPACES  1
+
+/* Define if you have POSIX threads libraries and header files. */
+#undef HAVE_PTHREAD
+
+/* Define to 1 if you have the `putenv' function. */
+#define HAVE_PUTENV  1
+
+/* Define to 1 if you have the `setenv' function. */
+#undef HAVE_SETENV
+
+/* Define to 1 if you have the <stdint.h> header file. */
+#undef HAVE_STDINT_H
+
+/* Define to 1 if you have the <stdlib.h> header file. */
+#define HAVE_STDLIB_H 1
+
+/* Define to 1 if you have the <strings.h> header file. */
+#undef HAVE_STRINGS_H
+
+/* Define to 1 if you have the <string.h> header file. */
+#define HAVE_STRING_H 1
+
+/* Define to 1 if you have the `strtoll' function. */
+#define HAVE_STRTOLL  1
+
+/* Define to 1 if you have the `strtoq' function. */
+#define HAVE_STRTOQ  1
+
+/* Define to 1 if you have the <sys/stat.h> header file. */
+#define HAVE_SYS_STAT_H 1
+
+/* Define to 1 if you have the <sys/types.h> header file. */
+#define HAVE_SYS_TYPES_H 1
+
+/* Define to 1 if you have the <unistd.h> header file. */
+#undef HAVE_UNISTD_H
+
+/* define if your compiler has __attribute__ */
+#undef HAVE___ATTRIBUTE__
+
+/* Define to the sub-directory in which libtool stores uninstalled libraries.
+   */
+#undef LT_OBJDIR
+
+/* Name of package */
+#undef PACKAGE
+
+/* Define to the address where bug reports for this package should be sent. */
+#undef PACKAGE_BUGREPORT
+
+/* Define to the full name of this package. */
+#undef PACKAGE_NAME
+
+/* Define to the full name and version of this package. */
+#undef PACKAGE_STRING
+
+/* Define to the one symbol short name of this package. */
+#undef PACKAGE_TARNAME
+
+/* Define to the home page for this package. */
+#undef PACKAGE_URL
+
+/* Define to the version of this package. */
+#undef PACKAGE_VERSION
+
+/* Define to necessary symbol if this constant uses a non-standard name on
+   your system. */
+#undef PTHREAD_CREATE_JOINABLE
+
+/* Define to 1 if you have the ANSI C header files. */
+#define STDC_HEADERS  1
+
+/* the namespace where STL code like vector<> is defined */
+#define STL_NAMESPACE  std
+
+/* Version number of package */
+#undef VERSION
+
+/* Stops putting the code inside the Google namespace */
+#define _END_GOOGLE_NAMESPACE_  }
+
+/* Puts following code inside the Google namespace */
+#define _START_GOOGLE_NAMESPACE_  namespace google {
+
+// ---------------------------------------------------------------------
+// Extra stuff not found in config.h.in
+
+// This must be defined before the windows.h is included.  It's needed
+// for mutex.h, to give access to the TryLock method.
+#ifndef _WIN32_WINNT
+# define _WIN32_WINNT 0x0400
+#endif
+
+// TODO(csilvers): include windows/port.h in every relevant source file instead?
+#include "windows/port.h"
+
+#endif  /* GOOGLE_GFLAGS_WINDOWS_CONFIG_H_ */
diff --git a/third_party/google-gflags/google-gflags.gyp b/third_party/google-gflags/google-gflags.gyp
new file mode 100644
index 0000000..211280a
--- /dev/null
+++ b/third_party/google-gflags/google-gflags.gyp
@@ -0,0 +1,70 @@
+# Copyright 2011 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+  'variables': {
+    'gflags_root': '<(DEPTH)/third_party/google-gflags',
+    'gflags_gen_arch_root': '<(gflags_root)/gen/arch/<(OS)/<(target_arch)',
+  },
+  'targets': [
+    {
+      'target_name': 'google-gflags',
+      'type': '<(library)',
+      'include_dirs': [
+        '<(gflags_gen_arch_root)/include/private',  # For config.h
+        '<(gflags_gen_arch_root)/include',  # For configured files.
+        '<(gflags_root)/src',  # For everything else.
+      ],
+      'defines': [
+        # These macros exist so flags and symbols are properly
+        # exported when building DLLs. Since we don't build DLLs, we
+        # need to disable them.
+        'GFLAGS_DLL_DECL=',
+        'GFLAGS_DLL_DECLARE_FLAG=',
+        'GFLAGS_DLL_DEFINE_FLAG=',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(gflags_gen_arch_root)/include',  # For configured files.
+          '<(gflags_root)/src',  # For everything else.
+        ],
+        'defines': [
+          'GFLAGS_DLL_DECL=',
+          'GFLAGS_DLL_DECLARE_FLAG=',
+          'GFLAGS_DLL_DEFINE_FLAG=',
+        ],
+      },
+      'sources': [
+        'src/gflags.cc',
+        'src/gflags_completions.cc',
+        'src/gflags_reporting.cc',
+      ],
+      'conditions': [
+        ['OS == "win"', {
+          'sources': [
+            'src/windows/port.cc',
+          ],
+        }],
+        # TODO(andrew): Look into fixing this warning upstream:
+        # http://code.google.com/p/webrtc/issues/detail?id=760
+        ['clang==1', {
+          'cflags!': ['-Wheader-hygiene',],
+          'xcode_settings': {
+            'WARNING_CFLAGS!': ['-Wheader-hygiene',],
+          },
+        }],
+      ],
+    },
+  ],
+}
diff --git a/third_party/libvpx/copy_obj.sh b/third_party/libvpx/copy_obj.sh
new file mode 100755
index 0000000..a79f525
--- /dev/null
+++ b/third_party/libvpx/copy_obj.sh
@@ -0,0 +1,36 @@
+#!/bin/bash -e
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is used to copy a file from several possible locations.
+#
+# Arguments:
+#
+# -d - Destination.
+# -s - Source file path.
+#
+
+while getopts "d:s:" flag; do
+  if [ "$flag" = "d" ]; then
+    dest=$OPTARG
+  elif [ "$flag" = "s" ]; then
+    srcs="$OPTARG $srcs"
+  fi
+done
+
+for f in $srcs; do
+  if [ -a $f ]; then
+    src=$f
+    break
+  fi
+done
+
+if [ -z "$src" ]; then
+  echo "Unable to locate file."
+  false
+  exit
+fi
+
+cp "$src" "$dest"
diff --git a/third_party/libvpx/generate_gypi.sh b/third_party/libvpx/generate_gypi.sh
new file mode 100755
index 0000000..56c9c4d
--- /dev/null
+++ b/third_party/libvpx/generate_gypi.sh
@@ -0,0 +1,145 @@
+#!/bin/bash -e
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is used to generate .gypi files needed to build libvpx.
+# Every time libvpx source code is updated just run this script.
+#
+# For example:
+# $ ./generate_gypi.sh
+#
+# And this will update all the .gypi files needed.
+#
+# Configuration for building on each platform is taken from the
+# corresponding vpx_config.h
+#
+
+BASE_DIR=`pwd`
+LIBVPX_SRC_DIR="source/libvpx"
+LIBVPX_CONFIG_DIR="source/config"
+
+# Convert a list of source files into gypi file.
+# $1 - Input file.
+# $2 - Output gypi file.
+function convert_srcs_to_gypi {
+  # Do the following here:
+  # 1. Filter .c, .h, .s, .S and .asm files.
+  # 2. Exclude *_offsets.c.
+  # 3. Exclude vpx_config.c.
+  # 4. Repelace .asm.s to .asm because gyp will do the conversion.
+  local source_list=`grep -E '(\.c|\.h|\.S|\.s|\.asm)$' $1 | grep -v '_offsets\.c' | grep -v 'vpx_config\.c' | sed s/\.asm\.s$/.asm/ | sort`
+
+  # Build the gypi file.
+  echo "# This file is generated. Do not edit." > $2
+  echo "# Copyright (c) 2012 The Chromium Authors. All rights reserved." >> $2
+  echo "# Use of this source code is governed by a BSD-style license that can be" >> $2
+  echo "# found in the LICENSE file." >> $2
+  echo "" >> $2
+  echo "{" >> $2
+  echo "  'sources': [" >> $2
+  for f in $source_list
+  do
+    echo "    '$LIBVPX_SRC_DIR/$f'," >> $2
+  done
+  echo "  ]," >> $2
+  echo "}" >> $2
+}
+
+# Clean files from previous make.
+function make_clean {
+  make clean > /dev/null
+  rm -f libvpx_srcs.txt
+}
+
+# Lint a pair of vpx_config.h and vpx_config.asm to make sure they match.
+# $1 - Header file directory.
+function lint_config {
+  $BASE_DIR/lint_config.sh \
+    -h $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.h \
+    -a $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.asm
+}
+
+# Print the configuration.
+# $1 - Header file directory.
+function print_config {
+  $BASE_DIR/lint_config.sh -p \
+    -h $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.h \
+    -a $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.asm
+}
+
+# Generate vpx_rtcd.h.
+# $1 - Header file directory.
+# $2 - Architecture.
+function gen_rtcd_header {
+  echo "Generate $LIBVPX_CONFIG_DIR/$1/vpx_rtcd.h."
+
+  rm -rf $BASE_DIR/$TEMP_DIR/libvpx.config
+  $BASE_DIR/lint_config.sh -p \
+    -h $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.h \
+    -a $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_config.asm \
+    -o $BASE_DIR/$TEMP_DIR/libvpx.config
+
+  $BASE_DIR/$LIBVPX_SRC_DIR/build/make/rtcd.sh \
+    --arch=$2 \
+    --sym=vpx_rtcd \
+    --config=$BASE_DIR/$TEMP_DIR/libvpx.config \
+    $BASE_DIR/$LIBVPX_SRC_DIR/vp8/common/rtcd_defs.sh \
+    > $BASE_DIR/$LIBVPX_CONFIG_DIR/$1/vpx_rtcd.h
+
+  rm -rf $BASE_DIR/$TEMP_DIR/libvpx.config
+}
+
+echo "Lint libvpx configuration."
+lint_config linux/ia32
+lint_config linux/x64
+lint_config linux/arm
+lint_config linux/arm-neon
+lint_config win/ia32
+lint_config mac/ia32
+
+echo "Create temporary directory."
+TEMP_DIR="$LIBVPX_SRC_DIR.temp"
+rm -rf $TEMP_DIR
+cp -R $LIBVPX_SRC_DIR $TEMP_DIR
+cd $TEMP_DIR
+
+gen_rtcd_header linux/ia32 x86
+gen_rtcd_header linux/x64 x86_64
+gen_rtcd_header linux/arm armv5te
+gen_rtcd_header linux/arm-neon armv7
+gen_rtcd_header win/ia32 x86
+gen_rtcd_header mac/ia32 x86
+
+echo "Prepare Makefile."
+./configure --target=generic-gnu > /dev/null
+make_clean
+
+echo "Generate X86 source list."
+config=$(print_config linux/ia32)
+make_clean
+make libvpx_srcs.txt target=libs $config > /dev/null
+convert_srcs_to_gypi libvpx_srcs.txt $BASE_DIR/libvpx_srcs_x86.gypi
+
+echo "Generate X86_64 source list."
+config=$(print_config linux/x64)
+make_clean
+make libvpx_srcs.txt target=libs $config > /dev/null
+convert_srcs_to_gypi libvpx_srcs.txt $BASE_DIR/libvpx_srcs_x86_64.gypi
+
+echo "Generate ARM source list."
+config=$(print_config linux/arm)
+make_clean
+make libvpx_srcs.txt target=libs $config > /dev/null
+convert_srcs_to_gypi libvpx_srcs.txt $BASE_DIR/libvpx_srcs_arm.gypi
+
+echo "Generate ARM NEON source list."
+config=$(print_config linux/arm-neon)
+make_clean
+make libvpx_srcs.txt target=libs $config > /dev/null
+convert_srcs_to_gypi libvpx_srcs.txt $BASE_DIR/libvpx_srcs_arm_neon.gypi
+
+echo "Remove temporary directory."
+cd $BASE_DIR
+rm -rf $TEMP_DIR
diff --git a/third_party/libvpx/libvpx.gyp b/third_party/libvpx/libvpx.gyp
new file mode 100644
index 0000000..7323131
--- /dev/null
+++ b/third_party/libvpx/libvpx.gyp
@@ -0,0 +1,498 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'variables': {
+    'conditions': [
+      ['os_posix==1', {
+        'asm_obj_extension': 'o',
+      }],
+      ['OS=="win"', {
+        'asm_obj_extension': 'obj',
+      }],
+
+      ['target_arch=="arm" and arm_neon==1', {
+        'target_arch_full': 'arm-neon',
+      }, {
+        'target_arch_full': '<(target_arch)',
+      }],
+
+      # Conversion to libvpx arch names.
+      ['target_arch=="arm" and arm_neon==1', {
+        'libvpx_arch': 'armv7',
+      }],
+      ['target_arch=="arm" and arm_neon==0', {
+        'libvpx_arch': 'armv6',
+      }],
+      ['target_arch=="ia32"', {
+        'libvpx_arch': 'x86',
+      }],
+      ['target_arch=="x64"', {
+        'libvpx_arch': 'x86_64',
+      }],
+
+      ['os_posix == 1 and OS != "mac"', {
+        'OS_CATEGORY%': 'linux',
+      }, {
+        'OS_CATEGORY%': '<(OS)',
+      }],
+    ],
+
+    # Location of the intermediate output.
+    'shared_generated_dir': '<(SHARED_INTERMEDIATE_DIR)/third_party/libvpx',
+  },
+
+  'conditions': [
+    # TODO(andrew): Hack to ensure we pass -msse2 to gcc on Linux for files
+    # containing SSE intrinsics. This should be handled in the gyp generator
+    # scripts somehow. Clang (default on Mac) doesn't require this.
+    ['target_arch=="ia32" or target_arch=="x64"', {
+      'targets' : [
+        {
+          'target_name': 'libvpx_sse2',
+          'type': 'static_library',
+          'include_dirs': [
+            'source/config/<(OS)/<(target_arch)',
+            'source/libvpx',
+            'source/libvpx/vp8/common',
+            'source/libvpx/vp8/decoder',
+            'source/libvpx/vp8/encoder',
+          ],
+          'sources': [
+            'source/libvpx/vp8/encoder/x86/denoising_sse2.c',
+          ],
+          'conditions': [
+            ['os_posix==1 and OS!="mac"', {
+              'cflags': [ '-msse2', ],
+            }],
+            ['OS=="mac"', {
+              'xcode_settings': {
+                'OTHER_CFLAGS': [ '-msse2', ],
+              },
+            }],
+          ],
+        },
+      ],
+    }],
+    [ 'target_arch!="arm"', {
+      'targets': [
+        {
+          # This libvpx target contains both encoder and decoder.
+          # Encoder is configured to be realtime only.
+          'target_name': 'libvpx',
+          'type': 'static_library',
+          'variables': {
+            'yasm_output_path': '<(SHARED_INTERMEDIATE_DIR)/third_party/libvpx',
+            'OS_CATEGORY%': '<(OS_CATEGORY)',
+            'yasm_flags': [
+              '-D', 'CHROMIUM',
+              '-I', 'source/config/<(OS_CATEGORY)/<(target_arch)',
+              '-I', 'source/libvpx',
+              '-I', '<(shared_generated_dir)', # Generated assembly offsets
+            ],
+          },
+          'dependencies': [
+            'gen_asm_offsets',
+          ],
+          'includes': [
+            '../yasm/yasm_compile.gypi'
+          ],
+          'include_dirs': [
+            'source/config/<(OS_CATEGORY)/<(target_arch)',
+            'source/libvpx',
+            'source/libvpx/vp8/common',
+            'source/libvpx/vp8/decoder',
+            'source/libvpx/vp8/encoder',
+            '<(shared_generated_dir)', # Provides vpx_rtcd.h.
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'source/libvpx',
+            ],
+          },
+          # VS2010 does not correctly incrementally link obj files generated
+          # from asm files. This flag disables UseLibraryDependencyInputs to
+          # avoid this problem.
+          'msvs_2010_disable_uldi_when_referenced': 1,
+          'conditions': [
+            [ 'target_arch=="ia32"', {
+              'includes': [
+                'libvpx_srcs_x86.gypi',
+              ],
+              'dependencies': [ 'libvpx_sse2', ],
+            }],
+            [ 'target_arch=="x64"', {
+              'includes': [
+                'libvpx_srcs_x86_64.gypi',
+              ],
+              'dependencies': [ 'libvpx_sse2', ],
+            }],
+            ['clang == 1', {
+              'xcode_settings': {
+                'WARNING_CFLAGS': [
+                  # libvpx heavily relies on implicit enum casting.
+                  '-Wno-conversion',
+                  # libvpx does `if ((a == b))` in some places.
+                  '-Wno-parentheses-equality',
+                ],
+              },
+              'cflags': [
+                '-Wno-conversion',
+                '-Wno-parentheses-equality',
+              ],
+            }],
+            [ 'chromeos == 1', {
+              # ChromeOS needs these files for animated WebM avatars.
+              'sources': [
+                'source/libvpx/libmkv/EbmlIDs.h',
+                'source/libvpx/libmkv/EbmlWriter.c',
+                'source/libvpx/libmkv/EbmlWriter.h',
+              ],
+            }],
+          ],
+        },
+      ],
+    },
+    ],
+    # 'libvpx' target for ARM builds.
+    [ 'target_arch=="arm" ', {
+      'targets': [
+        {
+          # This libvpx target contains both encoder and decoder.
+          # Encoder is configured to be realtime only.
+          'target_name': 'libvpx',
+          'type': 'static_library',
+          'dependencies': [
+            'gen_asm_offsets',
+          ],
+
+          # Copy the script to the output folder so that we can use it with
+          # absolute path.
+          'copies': [{
+            'destination': '<(shared_generated_dir)',
+            'files': [
+              '<(ads2gas_script_path)',
+            ],
+          }],
+
+          # Rule to convert .asm files to .S files.
+          'rules': [
+            {
+              'rule_name': 'convert_asm',
+              'extension': 'asm',
+              'inputs': [ '<(shared_generated_dir)/<(ads2gas_script)', ],
+              'outputs': [
+                '<(shared_generated_dir)/<(RULE_INPUT_ROOT).S',
+              ],
+              'action': [
+                'bash',
+                '-c',
+                'cat <(RULE_INPUT_PATH) | perl <(shared_generated_dir)/<(ads2gas_script) > <(shared_generated_dir)/<(RULE_INPUT_ROOT).S',
+              ],
+              'process_outputs_as_sources': 1,
+              'message': 'Convert libvpx asm file for ARM <(RULE_INPUT_PATH).',
+            },
+          ],
+
+          'variables': {
+            # Location of the assembly conversion script.
+            'ads2gas_script': 'ads2gas.pl',
+            'ads2gas_script_path': 'source/libvpx/build/make/<(ads2gas_script)',
+          },
+          'cflags': [
+            # We need to explicitly tell the GCC assembler to look for
+            # .include directive files from the place where they're
+            # generated to.
+            '-Wa,-I,<!(pwd)/source/config/<(OS_CATEGORY)/<(target_arch_full)',
+            '-Wa,-I,<(shared_generated_dir)',
+          ],
+          'include_dirs': [
+            'source/config/<(OS_CATEGORY)/<(target_arch_full)',
+            'source/libvpx',
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              'source/libvpx',
+            ],
+          },
+          'conditions': [
+            # Libvpx optimizations for ARMv6 or ARMv7 without NEON.
+            ['arm_neon==0', {
+              'includes': [
+                'libvpx_srcs_arm.gypi',
+              ],
+            }],
+            # Libvpx optimizations for ARMv7 with NEON.
+            ['arm_neon==1', {
+              'includes': [
+                'libvpx_srcs_arm_neon.gypi',
+              ],
+            }],
+            ['OS == "android"', {
+              # TODO(andrew): include_dirs block removed here. Should likely be
+              # upstreamed.
+              'defines': [
+                'ANDROID_CPU_ARM_FEATURE_NEON=4',
+              ],
+            }],
+            [ 'chromeos == 1', {
+              # ChromeOS needs these files for animated WebM avatars.
+              'sources': [
+                'source/libvpx/libmkv/EbmlIDs.h',
+                'source/libvpx/libmkv/EbmlWriter.c',
+                'source/libvpx/libmkv/EbmlWriter.h',
+              ],
+            }],
+          ],
+        },
+      ],
+    }],
+  ],
+  'targets': [
+    {
+      # A tool that runs on host to tract integers from object file.
+      'target_name': 'libvpx_obj_int_extract',
+      'type': 'executable',
+      'toolsets': ['host'],
+      'include_dirs': [
+        'source/config/<(OS_CATEGORY)/<(target_arch_full)',
+        'source/libvpx',
+      ],
+      'sources': [
+        'source/libvpx/build/make/obj_int_extract.c',
+      ]
+    },
+    {
+      # A library that contains assembly offsets needed.
+      'target_name': 'libvpx_asm_offsets',
+      'type': 'static_library',
+      'hard_dependency': 1,
+      'include_dirs': [
+        'source/config/<(OS_CATEGORY)/<(target_arch_full)',
+        'source/libvpx',
+      ],
+      'conditions': [
+        ['asan==1', {
+          'cflags!': [ '-faddress-sanitizer', ],
+          'xcode_settings': {
+            'OTHER_CFLAGS!': [ '-faddress-sanitizer', ],
+          },
+          'ldflags!': [ '-faddress-sanitizer', ],
+        }],
+      ],
+      'sources': [
+        '<(shared_generated_dir)/vpx_rtcd.h',
+        'source/libvpx/vp8/common/asm_com_offsets.c',
+        'source/libvpx/vp8/decoder/asm_dec_offsets.c',
+        'source/libvpx/vp8/encoder/asm_enc_offsets.c',
+      ],
+    },
+    {
+      # A target that takes assembly offsets library and generate the
+      # corresponding assembly files.
+      # This target is a hard dependency because the generated .asm files
+      # are needed all assembly optimized files in libvpx.
+      'target_name': 'gen_asm_offsets',
+      'type': 'none',
+      'hard_dependency': 1,
+      'dependencies': [
+        'libvpx_asm_offsets',
+        'libvpx_obj_int_extract#host',
+      ],
+      'conditions': [
+        ['OS=="win"', {
+          'variables': {
+            'ninja_obj_dir': '<(PRODUCT_DIR)/obj/third_party/libvpx/source/libvpx/vp8',
+          },
+          'actions': [
+            {
+              'action_name': 'copy_enc_offsets_obj',
+              'inputs': [ 'copy_obj.sh' ],
+              'outputs': [ '<(INTERMEDIATE_DIR)/asm_enc_offsets.obj' ],
+              'action': [
+                '<(DEPTH)/third_party/libvpx/copy_obj.sh',
+                '-d', '<@(_outputs)',
+                '-s', '<(PRODUCT_DIR)/obj/libvpx_asm_offsets/asm_enc_offsets.obj',
+                '-s', '<(ninja_obj_dir)/encoder/libvpx_asm_offsets.asm_enc_offsets.obj',
+              ],
+              'process_output_as_sources': 1,
+            },
+            {
+              'action_name': 'copy_dec_offsets_obj',
+              'inputs': [ 'copy_obj.sh' ],
+              'outputs': [ '<(INTERMEDIATE_DIR)/asm_dec_offsets.obj' ],
+              'action': [
+                '<(DEPTH)/third_party/libvpx/copy_obj.sh',
+                '-d', '<@(_outputs)',
+                '-s', '<(PRODUCT_DIR)/obj/libvpx_asm_offsets/asm_dec_offsets.obj',
+                '-s', '<(ninja_obj_dir)/decoder/libvpx_asm_offsets.asm_dec_offsets.obj',
+              ],
+              'process_output_as_sources': 1,
+            },
+            {
+              'action_name': 'copy_com_offsets_obj',
+              'inputs': [ 'copy_obj.sh' ],
+              'outputs': [ '<(INTERMEDIATE_DIR)/asm_com_offsets.obj' ],
+              'action': [
+                '<(DEPTH)/third_party/libvpx/copy_obj.sh',
+                '-d', '<@(_outputs)',
+                '-s', '<(PRODUCT_DIR)/obj/libvpx_asm_offsets/asm_com_offsets.obj',
+                '-s', '<(ninja_obj_dir)/common/libvpx_asm_offsets.asm_com_offsets.obj',
+              ],
+              'process_output_as_sources': 1,
+            },
+          ],
+          'sources': [
+            '<(INTERMEDIATE_DIR)/asm_com_offsets.obj',
+            '<(INTERMEDIATE_DIR)/asm_dec_offsets.obj',
+            '<(INTERMEDIATE_DIR)/asm_enc_offsets.obj',
+          ],
+        }, {
+          'actions': [
+            {
+              # Take archived .a file and unpack it unto .o files.
+              'action_name': 'unpack_lib_posix',
+              'inputs': [
+                'unpack_lib_posix.sh',
+              ],
+              'outputs': [
+                '<(INTERMEDIATE_DIR)/asm_com_offsets.o',
+                '<(INTERMEDIATE_DIR)/asm_dec_offsets.o',
+                '<(INTERMEDIATE_DIR)/asm_enc_offsets.o',
+              ],
+              'action': [
+                '<(DEPTH)/third_party/libvpx/unpack_lib_posix.sh',
+                '-d', '<(INTERMEDIATE_DIR)',
+                '-a', '<(LIB_DIR)/libvpx_asm_offsets.a',
+                '-a', '<(LIB_DIR)/third_party/libvpx/libvpx_asm_offsets.a',
+                '-f', 'asm_com_offsets.o',
+                '-f', 'asm_dec_offsets.o',
+                '-f', 'asm_enc_offsets.o',
+              ],
+              'process_output_as_sources': 1,
+            },
+          ],
+          # Need this otherwise gyp won't run the rule on them.
+          'sources': [
+            '<(INTERMEDIATE_DIR)/asm_com_offsets.o',
+            '<(INTERMEDIATE_DIR)/asm_dec_offsets.o',
+            '<(INTERMEDIATE_DIR)/asm_enc_offsets.o',
+          ],
+        }],
+      ],
+      'rules': [
+        {
+          # Rule to extract integer values for each symbol from an object file.
+          'rule_name': 'obj_int_extract',
+          'extension': '<(asm_obj_extension)',
+          'inputs': [
+            '<(PRODUCT_DIR)/libvpx_obj_int_extract',
+            'obj_int_extract.sh',
+          ],
+          'outputs': [
+            '<(shared_generated_dir)/<(RULE_INPUT_ROOT).asm',
+          ],
+          'variables': {
+            'conditions': [
+              ['target_arch=="arm"', {
+                'asm_format': 'gas',
+              }, {
+                'asm_format': 'rvds',
+              }],
+            ],
+          },
+          'action': [
+            '<(DEPTH)/third_party/libvpx/obj_int_extract.sh',
+            '-e', '<(PRODUCT_DIR)/libvpx_obj_int_extract',
+            '-f', '<(asm_format)',
+            '-b', '<(RULE_INPUT_PATH)',
+            '-o', '<(shared_generated_dir)/<(RULE_INPUT_ROOT).asm',
+          ],
+          'message': 'Generate assembly offsets <(RULE_INPUT_PATH).',
+        },
+      ],
+    },
+    {
+      'target_name': 'simple_encoder',
+      'type': 'executable',
+      'dependencies': [
+        'libvpx',
+      ],
+
+      # Copy the script to the output folder so that we can use it with
+      # absolute path.
+      'copies': [{
+        'destination': '<(shared_generated_dir)/simple_encoder',
+        'files': [
+          'source/libvpx/examples/gen_example_code.sh',
+        ],
+      }],
+
+      # Rule to convert .txt files to .c files.
+      'rules': [
+        {
+          'rule_name': 'generate_example',
+          'extension': 'txt',
+          'inputs': [ '<(shared_generated_dir)/simple_encoder/gen_example_code.sh', ],
+          'outputs': [
+            '<(shared_generated_dir)/<(RULE_INPUT_ROOT).c',
+          ],
+          'action': [
+            'bash',
+            '-c',
+            '<(shared_generated_dir)/simple_encoder/gen_example_code.sh <(RULE_INPUT_PATH) > <(shared_generated_dir)/<(RULE_INPUT_ROOT).c',
+          ],
+          'process_outputs_as_sources': 1,
+          'message': 'Generate libvpx example code <(RULE_INPUT_PATH).',
+        },
+      ],
+      'sources': [
+        'source/libvpx/examples/simple_encoder.txt',
+      ]
+    },
+    {
+      'target_name': 'simple_decoder',
+      'type': 'executable',
+      'dependencies': [
+        'libvpx',
+      ],
+
+      # Copy the script to the output folder so that we can use it with
+      # absolute path.
+      'copies': [{
+        'destination': '<(shared_generated_dir)/simple_decoder',
+        'files': [
+          'source/libvpx/examples/gen_example_code.sh',
+        ],
+      }],
+
+      # Rule to convert .txt files to .c files.
+      'rules': [
+        {
+          'rule_name': 'generate_example',
+          'extension': 'txt',
+          'inputs': [ '<(shared_generated_dir)/simple_decoder/gen_example_code.sh', ],
+          'outputs': [
+            '<(shared_generated_dir)/<(RULE_INPUT_ROOT).c',
+          ],
+          'action': [
+            'bash',
+            '-c',
+            '<(shared_generated_dir)/simple_decoder/gen_example_code.sh <(RULE_INPUT_PATH) > <(shared_generated_dir)/<(RULE_INPUT_ROOT).c',
+          ],
+          'process_outputs_as_sources': 1,
+          'message': 'Generate libvpx example code <(RULE_INPUT_PATH).',
+        },
+      ],
+      'sources': [
+        'source/libvpx/examples/simple_decoder.txt',
+      ]
+    },
+  ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/third_party/libvpx/libvpx_srcs_arm.gypi b/third_party/libvpx/libvpx_srcs_arm.gypi
new file mode 100644
index 0000000..3fc2c41
--- /dev/null
+++ b/third_party/libvpx/libvpx_srcs_arm.gypi
@@ -0,0 +1,177 @@
+# This file is generated. Do not edit.
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'sources': [
+    'source/libvpx/vp8/common/alloccommon.c',
+    'source/libvpx/vp8/common/alloccommon.h',
+    'source/libvpx/vp8/common/arm/dequantize_arm.c',
+    'source/libvpx/vp8/common/arm/filter_arm.c',
+    'source/libvpx/vp8/common/arm/loopfilter_arm.c',
+    'source/libvpx/vp8/common/arm/reconintra_arm.c',
+    'source/libvpx/vp8/common/arm/variance_arm.c',
+    'source/libvpx/vp8/common/blockd.c',
+    'source/libvpx/vp8/common/blockd.h',
+    'source/libvpx/vp8/common/coefupdateprobs.h',
+    'source/libvpx/vp8/common/common.h',
+    'source/libvpx/vp8/common/debugmodes.c',
+    'source/libvpx/vp8/common/default_coef_probs.h',
+    'source/libvpx/vp8/common/dequantize.c',
+    'source/libvpx/vp8/common/entropy.c',
+    'source/libvpx/vp8/common/entropy.h',
+    'source/libvpx/vp8/common/entropymode.c',
+    'source/libvpx/vp8/common/entropymode.h',
+    'source/libvpx/vp8/common/entropymv.c',
+    'source/libvpx/vp8/common/entropymv.h',
+    'source/libvpx/vp8/common/extend.c',
+    'source/libvpx/vp8/common/extend.h',
+    'source/libvpx/vp8/common/filter.c',
+    'source/libvpx/vp8/common/filter.h',
+    'source/libvpx/vp8/common/findnearmv.c',
+    'source/libvpx/vp8/common/findnearmv.h',
+    'source/libvpx/vp8/common/generic/systemdependent.c',
+    'source/libvpx/vp8/common/header.h',
+    'source/libvpx/vp8/common/idct_blk.c',
+    'source/libvpx/vp8/common/idctllm.c',
+    'source/libvpx/vp8/common/invtrans.h',
+    'source/libvpx/vp8/common/loopfilter.c',
+    'source/libvpx/vp8/common/loopfilter_filters.c',
+    'source/libvpx/vp8/common/loopfilter.h',
+    'source/libvpx/vp8/common/mbpitch.c',
+    'source/libvpx/vp8/common/mfqe.c',
+    'source/libvpx/vp8/common/modecont.c',
+    'source/libvpx/vp8/common/modecont.h',
+    'source/libvpx/vp8/common/mv.h',
+    'source/libvpx/vp8/common/onyxc_int.h',
+    'source/libvpx/vp8/common/onyxd.h',
+    'source/libvpx/vp8/common/onyx.h',
+    'source/libvpx/vp8/common/postproc.c',
+    'source/libvpx/vp8/common/postproc.h',
+    'source/libvpx/vp8/common/ppflags.h',
+    'source/libvpx/vp8/common/pragmas.h',
+    'source/libvpx/vp8/common/quant_common.c',
+    'source/libvpx/vp8/common/quant_common.h',
+    'source/libvpx/vp8/common/reconinter.c',
+    'source/libvpx/vp8/common/reconinter.h',
+    'source/libvpx/vp8/common/reconintra4x4.c',
+    'source/libvpx/vp8/common/reconintra4x4.h',
+    'source/libvpx/vp8/common/reconintra.c',
+    'source/libvpx/vp8/common/rtcd.c',
+    'source/libvpx/vp8/common/sad_c.c',
+    'source/libvpx/vp8/common/setupintrarecon.c',
+    'source/libvpx/vp8/common/setupintrarecon.h',
+    'source/libvpx/vp8/common/swapyv12buffer.c',
+    'source/libvpx/vp8/common/swapyv12buffer.h',
+    'source/libvpx/vp8/common/systemdependent.h',
+    'source/libvpx/vp8/common/threading.h',
+    'source/libvpx/vp8/common/treecoder.c',
+    'source/libvpx/vp8/common/treecoder.h',
+    'source/libvpx/vp8/common/variance_c.c',
+    'source/libvpx/vp8/common/variance.h',
+    'source/libvpx/vp8/common/vp8_entropymodedata.h',
+    'source/libvpx/vp8/decoder/dboolhuff.c',
+    'source/libvpx/vp8/decoder/dboolhuff.h',
+    'source/libvpx/vp8/decoder/decodemv.c',
+    'source/libvpx/vp8/decoder/decodemv.h',
+    'source/libvpx/vp8/decoder/decoderthreading.h',
+    'source/libvpx/vp8/decoder/decodframe.c',
+    'source/libvpx/vp8/decoder/detokenize.c',
+    'source/libvpx/vp8/decoder/detokenize.h',
+    'source/libvpx/vp8/decoder/ec_types.h',
+    'source/libvpx/vp8/decoder/error_concealment.c',
+    'source/libvpx/vp8/decoder/error_concealment.h',
+    'source/libvpx/vp8/decoder/onyxd_if.c',
+    'source/libvpx/vp8/decoder/onyxd_int.h',
+    'source/libvpx/vp8/decoder/threading.c',
+    'source/libvpx/vp8/decoder/treereader.h',
+    'source/libvpx/vp8/encoder/arm/armv5te/boolhuff_armv5te.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_mbrow_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_partitions_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/boolhuff_arm.c',
+    'source/libvpx/vp8/encoder/arm/dct_arm.c',
+    'source/libvpx/vp8/encoder/arm/quantize_arm.c',
+    'source/libvpx/vp8/encoder/bitstream.c',
+    'source/libvpx/vp8/encoder/bitstream.h',
+    'source/libvpx/vp8/encoder/block.h',
+    'source/libvpx/vp8/encoder/boolhuff.h',
+    'source/libvpx/vp8/encoder/dct.c',
+    'source/libvpx/vp8/encoder/dct_value_cost.h',
+    'source/libvpx/vp8/encoder/dct_value_tokens.h',
+    'source/libvpx/vp8/encoder/defaultcoefcounts.h',
+    'source/libvpx/vp8/encoder/denoising.c',
+    'source/libvpx/vp8/encoder/denoising.h',
+    'source/libvpx/vp8/encoder/encodeframe.c',
+    'source/libvpx/vp8/encoder/encodeframe.h',
+    'source/libvpx/vp8/encoder/encodeintra.c',
+    'source/libvpx/vp8/encoder/encodeintra.h',
+    'source/libvpx/vp8/encoder/encodemb.c',
+    'source/libvpx/vp8/encoder/encodemb.h',
+    'source/libvpx/vp8/encoder/encodemv.c',
+    'source/libvpx/vp8/encoder/encodemv.h',
+    'source/libvpx/vp8/encoder/ethreading.c',
+    'source/libvpx/vp8/encoder/firstpass.h',
+    'source/libvpx/vp8/encoder/lookahead.c',
+    'source/libvpx/vp8/encoder/lookahead.h',
+    'source/libvpx/vp8/encoder/mcomp.c',
+    'source/libvpx/vp8/encoder/mcomp.h',
+    'source/libvpx/vp8/encoder/modecosts.c',
+    'source/libvpx/vp8/encoder/modecosts.h',
+    'source/libvpx/vp8/encoder/mr_dissim.c',
+    'source/libvpx/vp8/encoder/mr_dissim.h',
+    'source/libvpx/vp8/encoder/onyx_if.c',
+    'source/libvpx/vp8/encoder/onyx_int.h',
+    'source/libvpx/vp8/encoder/pickinter.c',
+    'source/libvpx/vp8/encoder/pickinter.h',
+    'source/libvpx/vp8/encoder/picklpf.c',
+    'source/libvpx/vp8/encoder/psnr.c',
+    'source/libvpx/vp8/encoder/psnr.h',
+    'source/libvpx/vp8/encoder/quantize.c',
+    'source/libvpx/vp8/encoder/quantize.h',
+    'source/libvpx/vp8/encoder/ratectrl.c',
+    'source/libvpx/vp8/encoder/ratectrl.h',
+    'source/libvpx/vp8/encoder/rdopt.c',
+    'source/libvpx/vp8/encoder/rdopt.h',
+    'source/libvpx/vp8/encoder/segmentation.c',
+    'source/libvpx/vp8/encoder/segmentation.h',
+    'source/libvpx/vp8/encoder/tokenize.c',
+    'source/libvpx/vp8/encoder/tokenize.h',
+    'source/libvpx/vp8/encoder/treewriter.c',
+    'source/libvpx/vp8/encoder/treewriter.h',
+    'source/libvpx/vp8/vp8_cx_iface.c',
+    'source/libvpx/vp8/vp8_dx_iface.c',
+    'source/libvpx/vpx/internal/vpx_codec_internal.h',
+    'source/libvpx/vpx_mem/include/vpx_mem_intrnl.h',
+    'source/libvpx/vpx_mem/vpx_mem.c',
+    'source/libvpx/vpx_mem/vpx_mem.h',
+    'source/libvpx/vpx_ports/arm_cpudetect.c',
+    'source/libvpx/vpx_ports/arm.h',
+    'source/libvpx/vpx_ports/asm_offsets.h',
+    'source/libvpx/vpx_ports/mem.h',
+    'source/libvpx/vpx_ports/vpx_timer.h',
+    'source/libvpx/vpx_scale/generic/gen_scalers.c',
+    'source/libvpx/vpx_scale/generic/vpxscale.c',
+    'source/libvpx/vpx_scale/generic/yv12config.c',
+    'source/libvpx/vpx_scale/generic/yv12extend.c',
+    'source/libvpx/vpx_scale/generic/yv12extend_generic.h',
+    'source/libvpx/vpx_scale/scale_mode.h',
+    'source/libvpx/vpx_scale/vpxscale.h',
+    'source/libvpx/vpx_scale/yv12config.h',
+    'source/libvpx/vpx/src/vpx_codec.c',
+    'source/libvpx/vpx/src/vpx_decoder.c',
+    'source/libvpx/vpx/src/vpx_encoder.c',
+    'source/libvpx/vpx/src/vpx_image.c',
+    'source/libvpx/vpx/vp8cx.h',
+    'source/libvpx/vpx/vp8dx.h',
+    'source/libvpx/vpx/vp8.h',
+    'source/libvpx/vpx/vpx_codec.h',
+    'source/libvpx/vpx/vpx_codec_impl_bottom.h',
+    'source/libvpx/vpx/vpx_codec_impl_top.h',
+    'source/libvpx/vpx/vpx_decoder.h',
+    'source/libvpx/vpx/vpx_encoder.h',
+    'source/libvpx/vpx/vpx_image.h',
+    'source/libvpx/vpx/vpx_integer.h',
+  ],
+}
diff --git a/third_party/libvpx/libvpx_srcs_arm_neon.gypi b/third_party/libvpx/libvpx_srcs_arm_neon.gypi
new file mode 100644
index 0000000..1d5174f
--- /dev/null
+++ b/third_party/libvpx/libvpx_srcs_arm_neon.gypi
@@ -0,0 +1,248 @@
+# This file is generated. Do not edit.
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'sources': [
+    'source/libvpx/vp8/common/alloccommon.c',
+    'source/libvpx/vp8/common/alloccommon.h',
+    'source/libvpx/vp8/common/arm/armv6/bilinearfilter_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/copymem16x16_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/copymem8x4_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/copymem8x8_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/dc_only_idct_add_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/dequant_idct_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/dequantize_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/filter_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/idct_blk_v6.c',
+    'source/libvpx/vp8/common/arm/armv6/idct_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/intra4x4_predict_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/iwalsh_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/loopfilter_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/simpleloopfilter_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/sixtappredict8x4_v6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_sad16x16_armv6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_variance16x16_armv6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_variance8x8_armv6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_variance_halfpixvar16x16_h_armv6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_variance_halfpixvar16x16_hv_armv6.asm',
+    'source/libvpx/vp8/common/arm/armv6/vp8_variance_halfpixvar16x16_v_armv6.asm',
+    'source/libvpx/vp8/common/arm/bilinearfilter_arm.c',
+    'source/libvpx/vp8/common/arm/bilinearfilter_arm.h',
+    'source/libvpx/vp8/common/arm/dequantize_arm.c',
+    'source/libvpx/vp8/common/arm/filter_arm.c',
+    'source/libvpx/vp8/common/arm/loopfilter_arm.c',
+    'source/libvpx/vp8/common/arm/neon/bilinearpredict16x16_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/bilinearpredict4x4_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/bilinearpredict8x4_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/bilinearpredict8x8_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/buildintrapredictorsmby_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/copymem16x16_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/copymem8x4_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/copymem8x8_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/dc_only_idct_add_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/dequant_idct_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/dequantizeb_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/idct_blk_neon.c',
+    'source/libvpx/vp8/common/arm/neon/idct_dequant_0_2x_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/idct_dequant_full_2x_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/iwalsh_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/loopfilter_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/loopfiltersimplehorizontaledge_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/loopfiltersimpleverticaledge_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/mbloopfilter_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sad16_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sad8_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/save_reg_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/shortidct4x4llm_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sixtappredict16x16_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sixtappredict4x4_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sixtappredict8x4_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/sixtappredict8x8_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/variance_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/vp8_subpixelvariance16x16_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/vp8_subpixelvariance16x16s_neon.asm',
+    'source/libvpx/vp8/common/arm/neon/vp8_subpixelvariance8x8_neon.asm',
+    'source/libvpx/vp8/common/arm/reconintra_arm.c',
+    'source/libvpx/vp8/common/arm/variance_arm.c',
+    'source/libvpx/vp8/common/blockd.c',
+    'source/libvpx/vp8/common/blockd.h',
+    'source/libvpx/vp8/common/coefupdateprobs.h',
+    'source/libvpx/vp8/common/common.h',
+    'source/libvpx/vp8/common/debugmodes.c',
+    'source/libvpx/vp8/common/default_coef_probs.h',
+    'source/libvpx/vp8/common/dequantize.c',
+    'source/libvpx/vp8/common/entropy.c',
+    'source/libvpx/vp8/common/entropy.h',
+    'source/libvpx/vp8/common/entropymode.c',
+    'source/libvpx/vp8/common/entropymode.h',
+    'source/libvpx/vp8/common/entropymv.c',
+    'source/libvpx/vp8/common/entropymv.h',
+    'source/libvpx/vp8/common/extend.c',
+    'source/libvpx/vp8/common/extend.h',
+    'source/libvpx/vp8/common/filter.c',
+    'source/libvpx/vp8/common/filter.h',
+    'source/libvpx/vp8/common/findnearmv.c',
+    'source/libvpx/vp8/common/findnearmv.h',
+    'source/libvpx/vp8/common/generic/systemdependent.c',
+    'source/libvpx/vp8/common/header.h',
+    'source/libvpx/vp8/common/idct_blk.c',
+    'source/libvpx/vp8/common/idctllm.c',
+    'source/libvpx/vp8/common/invtrans.h',
+    'source/libvpx/vp8/common/loopfilter.c',
+    'source/libvpx/vp8/common/loopfilter_filters.c',
+    'source/libvpx/vp8/common/loopfilter.h',
+    'source/libvpx/vp8/common/mbpitch.c',
+    'source/libvpx/vp8/common/mfqe.c',
+    'source/libvpx/vp8/common/modecont.c',
+    'source/libvpx/vp8/common/modecont.h',
+    'source/libvpx/vp8/common/mv.h',
+    'source/libvpx/vp8/common/onyxc_int.h',
+    'source/libvpx/vp8/common/onyxd.h',
+    'source/libvpx/vp8/common/onyx.h',
+    'source/libvpx/vp8/common/postproc.c',
+    'source/libvpx/vp8/common/postproc.h',
+    'source/libvpx/vp8/common/ppflags.h',
+    'source/libvpx/vp8/common/pragmas.h',
+    'source/libvpx/vp8/common/quant_common.c',
+    'source/libvpx/vp8/common/quant_common.h',
+    'source/libvpx/vp8/common/reconinter.c',
+    'source/libvpx/vp8/common/reconinter.h',
+    'source/libvpx/vp8/common/reconintra4x4.c',
+    'source/libvpx/vp8/common/reconintra4x4.h',
+    'source/libvpx/vp8/common/reconintra.c',
+    'source/libvpx/vp8/common/rtcd.c',
+    'source/libvpx/vp8/common/sad_c.c',
+    'source/libvpx/vp8/common/setupintrarecon.c',
+    'source/libvpx/vp8/common/setupintrarecon.h',
+    'source/libvpx/vp8/common/swapyv12buffer.c',
+    'source/libvpx/vp8/common/swapyv12buffer.h',
+    'source/libvpx/vp8/common/systemdependent.h',
+    'source/libvpx/vp8/common/threading.h',
+    'source/libvpx/vp8/common/treecoder.c',
+    'source/libvpx/vp8/common/treecoder.h',
+    'source/libvpx/vp8/common/variance_c.c',
+    'source/libvpx/vp8/common/variance.h',
+    'source/libvpx/vp8/common/vp8_entropymodedata.h',
+    'source/libvpx/vp8/decoder/dboolhuff.c',
+    'source/libvpx/vp8/decoder/dboolhuff.h',
+    'source/libvpx/vp8/decoder/decodemv.c',
+    'source/libvpx/vp8/decoder/decodemv.h',
+    'source/libvpx/vp8/decoder/decoderthreading.h',
+    'source/libvpx/vp8/decoder/decodframe.c',
+    'source/libvpx/vp8/decoder/detokenize.c',
+    'source/libvpx/vp8/decoder/detokenize.h',
+    'source/libvpx/vp8/decoder/ec_types.h',
+    'source/libvpx/vp8/decoder/error_concealment.c',
+    'source/libvpx/vp8/decoder/error_concealment.h',
+    'source/libvpx/vp8/decoder/onyxd_if.c',
+    'source/libvpx/vp8/decoder/onyxd_int.h',
+    'source/libvpx/vp8/decoder/threading.c',
+    'source/libvpx/vp8/decoder/treereader.h',
+    'source/libvpx/vp8/encoder/arm/armv5te/boolhuff_armv5te.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_mbrow_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/armv5te/vp8_packtokens_partitions_armv5.asm',
+    'source/libvpx/vp8/encoder/arm/armv6/vp8_fast_quantize_b_armv6.asm',
+    'source/libvpx/vp8/encoder/arm/armv6/vp8_mse16x16_armv6.asm',
+    'source/libvpx/vp8/encoder/arm/armv6/vp8_short_fdct4x4_armv6.asm',
+    'source/libvpx/vp8/encoder/arm/armv6/vp8_subtract_armv6.asm',
+    'source/libvpx/vp8/encoder/arm/armv6/walsh_v6.asm',
+    'source/libvpx/vp8/encoder/arm/boolhuff_arm.c',
+    'source/libvpx/vp8/encoder/arm/dct_arm.c',
+    'source/libvpx/vp8/encoder/arm/neon/fastquantizeb_neon.asm',
+    'source/libvpx/vp8/encoder/arm/neon/picklpf_arm.c',
+    'source/libvpx/vp8/encoder/arm/neon/shortfdct_neon.asm',
+    'source/libvpx/vp8/encoder/arm/neon/subtract_neon.asm',
+    'source/libvpx/vp8/encoder/arm/neon/vp8_memcpy_neon.asm',
+    'source/libvpx/vp8/encoder/arm/neon/vp8_mse16x16_neon.asm',
+    'source/libvpx/vp8/encoder/arm/neon/vp8_shortwalsh4x4_neon.asm',
+    'source/libvpx/vp8/encoder/arm/quantize_arm.c',
+    'source/libvpx/vp8/encoder/bitstream.c',
+    'source/libvpx/vp8/encoder/bitstream.h',
+    'source/libvpx/vp8/encoder/block.h',
+    'source/libvpx/vp8/encoder/boolhuff.h',
+    'source/libvpx/vp8/encoder/dct.c',
+    'source/libvpx/vp8/encoder/dct_value_cost.h',
+    'source/libvpx/vp8/encoder/dct_value_tokens.h',
+    'source/libvpx/vp8/encoder/defaultcoefcounts.h',
+    'source/libvpx/vp8/encoder/denoising.c',
+    'source/libvpx/vp8/encoder/denoising.h',
+    'source/libvpx/vp8/encoder/encodeframe.c',
+    'source/libvpx/vp8/encoder/encodeframe.h',
+    'source/libvpx/vp8/encoder/encodeintra.c',
+    'source/libvpx/vp8/encoder/encodeintra.h',
+    'source/libvpx/vp8/encoder/encodemb.c',
+    'source/libvpx/vp8/encoder/encodemb.h',
+    'source/libvpx/vp8/encoder/encodemv.c',
+    'source/libvpx/vp8/encoder/encodemv.h',
+    'source/libvpx/vp8/encoder/ethreading.c',
+    'source/libvpx/vp8/encoder/firstpass.h',
+    'source/libvpx/vp8/encoder/lookahead.c',
+    'source/libvpx/vp8/encoder/lookahead.h',
+    'source/libvpx/vp8/encoder/mcomp.c',
+    'source/libvpx/vp8/encoder/mcomp.h',
+    'source/libvpx/vp8/encoder/modecosts.c',
+    'source/libvpx/vp8/encoder/modecosts.h',
+    'source/libvpx/vp8/encoder/mr_dissim.c',
+    'source/libvpx/vp8/encoder/mr_dissim.h',
+    'source/libvpx/vp8/encoder/onyx_if.c',
+    'source/libvpx/vp8/encoder/onyx_int.h',
+    'source/libvpx/vp8/encoder/pickinter.c',
+    'source/libvpx/vp8/encoder/pickinter.h',
+    'source/libvpx/vp8/encoder/picklpf.c',
+    'source/libvpx/vp8/encoder/psnr.c',
+    'source/libvpx/vp8/encoder/psnr.h',
+    'source/libvpx/vp8/encoder/quantize.c',
+    'source/libvpx/vp8/encoder/quantize.h',
+    'source/libvpx/vp8/encoder/ratectrl.c',
+    'source/libvpx/vp8/encoder/ratectrl.h',
+    'source/libvpx/vp8/encoder/rdopt.c',
+    'source/libvpx/vp8/encoder/rdopt.h',
+    'source/libvpx/vp8/encoder/segmentation.c',
+    'source/libvpx/vp8/encoder/segmentation.h',
+    'source/libvpx/vp8/encoder/tokenize.c',
+    'source/libvpx/vp8/encoder/tokenize.h',
+    'source/libvpx/vp8/encoder/treewriter.c',
+    'source/libvpx/vp8/encoder/treewriter.h',
+    'source/libvpx/vp8/vp8_cx_iface.c',
+    'source/libvpx/vp8/vp8_dx_iface.c',
+    'source/libvpx/vpx/internal/vpx_codec_internal.h',
+    'source/libvpx/vpx_mem/include/vpx_mem_intrnl.h',
+    'source/libvpx/vpx_mem/vpx_mem.c',
+    'source/libvpx/vpx_mem/vpx_mem.h',
+    'source/libvpx/vpx_ports/arm_cpudetect.c',
+    'source/libvpx/vpx_ports/arm.h',
+    'source/libvpx/vpx_ports/asm_offsets.h',
+    'source/libvpx/vpx_ports/mem.h',
+    'source/libvpx/vpx_ports/vpx_timer.h',
+    'source/libvpx/vpx_scale/arm/neon/vp8_vpxyv12_copyframe_func_neon.asm',
+    'source/libvpx/vpx_scale/arm/neon/vp8_vpxyv12_copysrcframe_func_neon.asm',
+    'source/libvpx/vpx_scale/arm/neon/vp8_vpxyv12_copy_y_neon.asm',
+    'source/libvpx/vpx_scale/arm/neon/vp8_vpxyv12_extendframeborders_neon.asm',
+    'source/libvpx/vpx_scale/arm/neon/yv12extend_arm.c',
+    'source/libvpx/vpx_scale/generic/gen_scalers.c',
+    'source/libvpx/vpx_scale/generic/vpxscale.c',
+    'source/libvpx/vpx_scale/generic/yv12config.c',
+    'source/libvpx/vpx_scale/generic/yv12extend.c',
+    'source/libvpx/vpx_scale/generic/yv12extend_generic.h',
+    'source/libvpx/vpx_scale/scale_mode.h',
+    'source/libvpx/vpx_scale/vpxscale.h',
+    'source/libvpx/vpx_scale/yv12config.h',
+    'source/libvpx/vpx/src/vpx_codec.c',
+    'source/libvpx/vpx/src/vpx_decoder.c',
+    'source/libvpx/vpx/src/vpx_encoder.c',
+    'source/libvpx/vpx/src/vpx_image.c',
+    'source/libvpx/vpx/vp8cx.h',
+    'source/libvpx/vpx/vp8dx.h',
+    'source/libvpx/vpx/vp8.h',
+    'source/libvpx/vpx/vpx_codec.h',
+    'source/libvpx/vpx/vpx_codec_impl_bottom.h',
+    'source/libvpx/vpx/vpx_codec_impl_top.h',
+    'source/libvpx/vpx/vpx_decoder.h',
+    'source/libvpx/vpx/vpx_encoder.h',
+    'source/libvpx/vpx/vpx_image.h',
+    'source/libvpx/vpx/vpx_integer.h',
+  ],
+}
diff --git a/third_party/libvpx/libvpx_srcs_x86.gypi b/third_party/libvpx/libvpx_srcs_x86.gypi
new file mode 100644
index 0000000..f4737f4
--- /dev/null
+++ b/third_party/libvpx/libvpx_srcs_x86.gypi
@@ -0,0 +1,216 @@
+# This file is generated. Do not edit.
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'sources': [
+    'source/libvpx/vp8/common/alloccommon.c',
+    'source/libvpx/vp8/common/alloccommon.h',
+    'source/libvpx/vp8/common/blockd.c',
+    'source/libvpx/vp8/common/blockd.h',
+    'source/libvpx/vp8/common/coefupdateprobs.h',
+    'source/libvpx/vp8/common/common.h',
+    'source/libvpx/vp8/common/debugmodes.c',
+    'source/libvpx/vp8/common/default_coef_probs.h',
+    'source/libvpx/vp8/common/dequantize.c',
+    'source/libvpx/vp8/common/entropy.c',
+    'source/libvpx/vp8/common/entropy.h',
+    'source/libvpx/vp8/common/entropymode.c',
+    'source/libvpx/vp8/common/entropymode.h',
+    'source/libvpx/vp8/common/entropymv.c',
+    'source/libvpx/vp8/common/entropymv.h',
+    'source/libvpx/vp8/common/extend.c',
+    'source/libvpx/vp8/common/extend.h',
+    'source/libvpx/vp8/common/filter.c',
+    'source/libvpx/vp8/common/filter.h',
+    'source/libvpx/vp8/common/findnearmv.c',
+    'source/libvpx/vp8/common/findnearmv.h',
+    'source/libvpx/vp8/common/generic/systemdependent.c',
+    'source/libvpx/vp8/common/header.h',
+    'source/libvpx/vp8/common/idct_blk.c',
+    'source/libvpx/vp8/common/idctllm.c',
+    'source/libvpx/vp8/common/invtrans.h',
+    'source/libvpx/vp8/common/loopfilter.c',
+    'source/libvpx/vp8/common/loopfilter_filters.c',
+    'source/libvpx/vp8/common/loopfilter.h',
+    'source/libvpx/vp8/common/mbpitch.c',
+    'source/libvpx/vp8/common/mfqe.c',
+    'source/libvpx/vp8/common/modecont.c',
+    'source/libvpx/vp8/common/modecont.h',
+    'source/libvpx/vp8/common/mv.h',
+    'source/libvpx/vp8/common/onyxc_int.h',
+    'source/libvpx/vp8/common/onyxd.h',
+    'source/libvpx/vp8/common/onyx.h',
+    'source/libvpx/vp8/common/postproc.c',
+    'source/libvpx/vp8/common/postproc.h',
+    'source/libvpx/vp8/common/ppflags.h',
+    'source/libvpx/vp8/common/pragmas.h',
+    'source/libvpx/vp8/common/quant_common.c',
+    'source/libvpx/vp8/common/quant_common.h',
+    'source/libvpx/vp8/common/reconinter.c',
+    'source/libvpx/vp8/common/reconinter.h',
+    'source/libvpx/vp8/common/reconintra4x4.c',
+    'source/libvpx/vp8/common/reconintra4x4.h',
+    'source/libvpx/vp8/common/reconintra.c',
+    'source/libvpx/vp8/common/rtcd.c',
+    'source/libvpx/vp8/common/sad_c.c',
+    'source/libvpx/vp8/common/setupintrarecon.c',
+    'source/libvpx/vp8/common/setupintrarecon.h',
+    'source/libvpx/vp8/common/swapyv12buffer.c',
+    'source/libvpx/vp8/common/swapyv12buffer.h',
+    'source/libvpx/vp8/common/systemdependent.h',
+    'source/libvpx/vp8/common/threading.h',
+    'source/libvpx/vp8/common/treecoder.c',
+    'source/libvpx/vp8/common/treecoder.h',
+    'source/libvpx/vp8/common/variance_c.c',
+    'source/libvpx/vp8/common/variance.h',
+    'source/libvpx/vp8/common/vp8_entropymodedata.h',
+    'source/libvpx/vp8/common/x86/dequantize_mmx.asm',
+    'source/libvpx/vp8/common/x86/filter_x86.c',
+    'source/libvpx/vp8/common/x86/filter_x86.h',
+    'source/libvpx/vp8/common/x86/idct_blk_mmx.c',
+    'source/libvpx/vp8/common/x86/idct_blk_sse2.c',
+    'source/libvpx/vp8/common/x86/idctllm_mmx.asm',
+    'source/libvpx/vp8/common/x86/idctllm_sse2.asm',
+    'source/libvpx/vp8/common/x86/iwalsh_mmx.asm',
+    'source/libvpx/vp8/common/x86/iwalsh_sse2.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_mmx.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_sse2.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_x86.c',
+    'source/libvpx/vp8/common/x86/mfqe_sse2.asm',
+    'source/libvpx/vp8/common/x86/postproc_mmx.asm',
+    'source/libvpx/vp8/common/x86/postproc_sse2.asm',
+    'source/libvpx/vp8/common/x86/postproc_x86.c',
+    'source/libvpx/vp8/common/x86/recon_mmx.asm',
+    'source/libvpx/vp8/common/x86/recon_sse2.asm',
+    'source/libvpx/vp8/common/x86/recon_wrapper_sse2.c',
+    'source/libvpx/vp8/common/x86/sad_mmx.asm',
+    'source/libvpx/vp8/common/x86/sad_sse2.asm',
+    'source/libvpx/vp8/common/x86/sad_sse3.asm',
+    'source/libvpx/vp8/common/x86/sad_sse4.asm',
+    'source/libvpx/vp8/common/x86/sad_ssse3.asm',
+    'source/libvpx/vp8/common/x86/subpixel_mmx.asm',
+    'source/libvpx/vp8/common/x86/subpixel_sse2.asm',
+    'source/libvpx/vp8/common/x86/subpixel_ssse3.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_mmx.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_sse2.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_ssse3.asm',
+    'source/libvpx/vp8/common/x86/variance_mmx.c',
+    'source/libvpx/vp8/common/x86/variance_sse2.c',
+    'source/libvpx/vp8/common/x86/variance_ssse3.c',
+    'source/libvpx/vp8/common/x86/vp8_asm_stubs.c',
+    'source/libvpx/vp8/decoder/dboolhuff.c',
+    'source/libvpx/vp8/decoder/dboolhuff.h',
+    'source/libvpx/vp8/decoder/decodemv.c',
+    'source/libvpx/vp8/decoder/decodemv.h',
+    'source/libvpx/vp8/decoder/decoderthreading.h',
+    'source/libvpx/vp8/decoder/decodframe.c',
+    'source/libvpx/vp8/decoder/detokenize.c',
+    'source/libvpx/vp8/decoder/detokenize.h',
+    'source/libvpx/vp8/decoder/ec_types.h',
+    'source/libvpx/vp8/decoder/error_concealment.c',
+    'source/libvpx/vp8/decoder/error_concealment.h',
+    'source/libvpx/vp8/decoder/onyxd_if.c',
+    'source/libvpx/vp8/decoder/onyxd_int.h',
+    'source/libvpx/vp8/decoder/threading.c',
+    'source/libvpx/vp8/decoder/treereader.h',
+    'source/libvpx/vp8/encoder/bitstream.c',
+    'source/libvpx/vp8/encoder/bitstream.h',
+    'source/libvpx/vp8/encoder/block.h',
+    'source/libvpx/vp8/encoder/boolhuff.c',
+    'source/libvpx/vp8/encoder/boolhuff.h',
+    'source/libvpx/vp8/encoder/dct.c',
+    'source/libvpx/vp8/encoder/dct_value_cost.h',
+    'source/libvpx/vp8/encoder/dct_value_tokens.h',
+    'source/libvpx/vp8/encoder/defaultcoefcounts.h',
+    'source/libvpx/vp8/encoder/denoising.c',
+    'source/libvpx/vp8/encoder/denoising.h',
+    'source/libvpx/vp8/encoder/encodeframe.c',
+    'source/libvpx/vp8/encoder/encodeframe.h',
+    'source/libvpx/vp8/encoder/encodeintra.c',
+    'source/libvpx/vp8/encoder/encodeintra.h',
+    'source/libvpx/vp8/encoder/encodemb.c',
+    'source/libvpx/vp8/encoder/encodemb.h',
+    'source/libvpx/vp8/encoder/encodemv.c',
+    'source/libvpx/vp8/encoder/encodemv.h',
+    'source/libvpx/vp8/encoder/ethreading.c',
+    'source/libvpx/vp8/encoder/firstpass.h',
+    'source/libvpx/vp8/encoder/lookahead.c',
+    'source/libvpx/vp8/encoder/lookahead.h',
+    'source/libvpx/vp8/encoder/mcomp.c',
+    'source/libvpx/vp8/encoder/mcomp.h',
+    'source/libvpx/vp8/encoder/modecosts.c',
+    'source/libvpx/vp8/encoder/modecosts.h',
+    'source/libvpx/vp8/encoder/mr_dissim.c',
+    'source/libvpx/vp8/encoder/mr_dissim.h',
+    'source/libvpx/vp8/encoder/onyx_if.c',
+    'source/libvpx/vp8/encoder/onyx_int.h',
+    'source/libvpx/vp8/encoder/pickinter.c',
+    'source/libvpx/vp8/encoder/pickinter.h',
+    'source/libvpx/vp8/encoder/picklpf.c',
+    'source/libvpx/vp8/encoder/psnr.c',
+    'source/libvpx/vp8/encoder/psnr.h',
+    'source/libvpx/vp8/encoder/quantize.c',
+    'source/libvpx/vp8/encoder/quantize.h',
+    'source/libvpx/vp8/encoder/ratectrl.c',
+    'source/libvpx/vp8/encoder/ratectrl.h',
+    'source/libvpx/vp8/encoder/rdopt.c',
+    'source/libvpx/vp8/encoder/rdopt.h',
+    'source/libvpx/vp8/encoder/segmentation.c',
+    'source/libvpx/vp8/encoder/segmentation.h',
+    'source/libvpx/vp8/encoder/tokenize.c',
+    'source/libvpx/vp8/encoder/tokenize.h',
+    'source/libvpx/vp8/encoder/treewriter.c',
+    'source/libvpx/vp8/encoder/treewriter.h',
+    'source/libvpx/vp8/encoder/x86/dct_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/dct_sse2.asm',
+    # Manually removed; refer to libvpx.gyp
+    #'source/libvpx/vp8/encoder/x86/denoising_sse2.c',
+    'source/libvpx/vp8/encoder/x86/encodeopt.asm',
+    'source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_sse4.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_ssse3.asm',
+    'source/libvpx/vp8/encoder/x86/subtract_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/subtract_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/vp8_enc_stubs_mmx.c',
+    'source/libvpx/vp8/encoder/x86/vp8_enc_stubs_sse2.c',
+    'source/libvpx/vp8/vp8_cx_iface.c',
+    'source/libvpx/vp8/vp8_dx_iface.c',
+    'source/libvpx/vpx/internal/vpx_codec_internal.h',
+    'source/libvpx/vpx_mem/include/vpx_mem_intrnl.h',
+    'source/libvpx/vpx_mem/vpx_mem.c',
+    'source/libvpx/vpx_mem/vpx_mem.h',
+    'source/libvpx/vpx_ports/asm_offsets.h',
+    'source/libvpx/vpx_ports/emms.asm',
+    'source/libvpx/vpx_ports/mem.h',
+    'source/libvpx/vpx_ports/vpx_timer.h',
+    'source/libvpx/vpx_ports/x86_abi_support.asm',
+    'source/libvpx/vpx_ports/x86_cpuid.c',
+    'source/libvpx/vpx_ports/x86.h',
+    'source/libvpx/vpx_scale/generic/gen_scalers.c',
+    'source/libvpx/vpx_scale/generic/vpxscale.c',
+    'source/libvpx/vpx_scale/generic/yv12config.c',
+    'source/libvpx/vpx_scale/generic/yv12extend.c',
+    'source/libvpx/vpx_scale/generic/yv12extend_generic.h',
+    'source/libvpx/vpx_scale/scale_mode.h',
+    'source/libvpx/vpx_scale/vpxscale.h',
+    'source/libvpx/vpx_scale/yv12config.h',
+    'source/libvpx/vpx/src/vpx_codec.c',
+    'source/libvpx/vpx/src/vpx_decoder.c',
+    'source/libvpx/vpx/src/vpx_encoder.c',
+    'source/libvpx/vpx/src/vpx_image.c',
+    'source/libvpx/vpx/vp8cx.h',
+    'source/libvpx/vpx/vp8dx.h',
+    'source/libvpx/vpx/vp8.h',
+    'source/libvpx/vpx/vpx_codec.h',
+    'source/libvpx/vpx/vpx_codec_impl_bottom.h',
+    'source/libvpx/vpx/vpx_codec_impl_top.h',
+    'source/libvpx/vpx/vpx_decoder.h',
+    'source/libvpx/vpx/vpx_encoder.h',
+    'source/libvpx/vpx/vpx_image.h',
+    'source/libvpx/vpx/vpx_integer.h',
+  ],
+}
diff --git a/third_party/libvpx/libvpx_srcs_x86_64.gypi b/third_party/libvpx/libvpx_srcs_x86_64.gypi
new file mode 100644
index 0000000..70b4a8d
--- /dev/null
+++ b/third_party/libvpx/libvpx_srcs_x86_64.gypi
@@ -0,0 +1,218 @@
+# This file is generated. Do not edit.
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'sources': [
+    'source/libvpx/vp8/common/alloccommon.c',
+    'source/libvpx/vp8/common/alloccommon.h',
+    'source/libvpx/vp8/common/blockd.c',
+    'source/libvpx/vp8/common/blockd.h',
+    'source/libvpx/vp8/common/coefupdateprobs.h',
+    'source/libvpx/vp8/common/common.h',
+    'source/libvpx/vp8/common/debugmodes.c',
+    'source/libvpx/vp8/common/default_coef_probs.h',
+    'source/libvpx/vp8/common/dequantize.c',
+    'source/libvpx/vp8/common/entropy.c',
+    'source/libvpx/vp8/common/entropy.h',
+    'source/libvpx/vp8/common/entropymode.c',
+    'source/libvpx/vp8/common/entropymode.h',
+    'source/libvpx/vp8/common/entropymv.c',
+    'source/libvpx/vp8/common/entropymv.h',
+    'source/libvpx/vp8/common/extend.c',
+    'source/libvpx/vp8/common/extend.h',
+    'source/libvpx/vp8/common/filter.c',
+    'source/libvpx/vp8/common/filter.h',
+    'source/libvpx/vp8/common/findnearmv.c',
+    'source/libvpx/vp8/common/findnearmv.h',
+    'source/libvpx/vp8/common/generic/systemdependent.c',
+    'source/libvpx/vp8/common/header.h',
+    'source/libvpx/vp8/common/idct_blk.c',
+    'source/libvpx/vp8/common/idctllm.c',
+    'source/libvpx/vp8/common/invtrans.h',
+    'source/libvpx/vp8/common/loopfilter.c',
+    'source/libvpx/vp8/common/loopfilter_filters.c',
+    'source/libvpx/vp8/common/loopfilter.h',
+    'source/libvpx/vp8/common/mbpitch.c',
+    'source/libvpx/vp8/common/mfqe.c',
+    'source/libvpx/vp8/common/modecont.c',
+    'source/libvpx/vp8/common/modecont.h',
+    'source/libvpx/vp8/common/mv.h',
+    'source/libvpx/vp8/common/onyxc_int.h',
+    'source/libvpx/vp8/common/onyxd.h',
+    'source/libvpx/vp8/common/onyx.h',
+    'source/libvpx/vp8/common/postproc.c',
+    'source/libvpx/vp8/common/postproc.h',
+    'source/libvpx/vp8/common/ppflags.h',
+    'source/libvpx/vp8/common/pragmas.h',
+    'source/libvpx/vp8/common/quant_common.c',
+    'source/libvpx/vp8/common/quant_common.h',
+    'source/libvpx/vp8/common/reconinter.c',
+    'source/libvpx/vp8/common/reconinter.h',
+    'source/libvpx/vp8/common/reconintra4x4.c',
+    'source/libvpx/vp8/common/reconintra4x4.h',
+    'source/libvpx/vp8/common/reconintra.c',
+    'source/libvpx/vp8/common/rtcd.c',
+    'source/libvpx/vp8/common/sad_c.c',
+    'source/libvpx/vp8/common/setupintrarecon.c',
+    'source/libvpx/vp8/common/setupintrarecon.h',
+    'source/libvpx/vp8/common/swapyv12buffer.c',
+    'source/libvpx/vp8/common/swapyv12buffer.h',
+    'source/libvpx/vp8/common/systemdependent.h',
+    'source/libvpx/vp8/common/threading.h',
+    'source/libvpx/vp8/common/treecoder.c',
+    'source/libvpx/vp8/common/treecoder.h',
+    'source/libvpx/vp8/common/variance_c.c',
+    'source/libvpx/vp8/common/variance.h',
+    'source/libvpx/vp8/common/vp8_entropymodedata.h',
+    'source/libvpx/vp8/common/x86/dequantize_mmx.asm',
+    'source/libvpx/vp8/common/x86/filter_x86.c',
+    'source/libvpx/vp8/common/x86/filter_x86.h',
+    'source/libvpx/vp8/common/x86/idct_blk_mmx.c',
+    'source/libvpx/vp8/common/x86/idct_blk_sse2.c',
+    'source/libvpx/vp8/common/x86/idctllm_mmx.asm',
+    'source/libvpx/vp8/common/x86/idctllm_sse2.asm',
+    'source/libvpx/vp8/common/x86/iwalsh_mmx.asm',
+    'source/libvpx/vp8/common/x86/iwalsh_sse2.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_block_sse2.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_mmx.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_sse2.asm',
+    'source/libvpx/vp8/common/x86/loopfilter_x86.c',
+    'source/libvpx/vp8/common/x86/mfqe_sse2.asm',
+    'source/libvpx/vp8/common/x86/postproc_mmx.asm',
+    'source/libvpx/vp8/common/x86/postproc_sse2.asm',
+    'source/libvpx/vp8/common/x86/postproc_x86.c',
+    'source/libvpx/vp8/common/x86/recon_mmx.asm',
+    'source/libvpx/vp8/common/x86/recon_sse2.asm',
+    'source/libvpx/vp8/common/x86/recon_wrapper_sse2.c',
+    'source/libvpx/vp8/common/x86/sad_mmx.asm',
+    'source/libvpx/vp8/common/x86/sad_sse2.asm',
+    'source/libvpx/vp8/common/x86/sad_sse3.asm',
+    'source/libvpx/vp8/common/x86/sad_sse4.asm',
+    'source/libvpx/vp8/common/x86/sad_ssse3.asm',
+    'source/libvpx/vp8/common/x86/subpixel_mmx.asm',
+    'source/libvpx/vp8/common/x86/subpixel_sse2.asm',
+    'source/libvpx/vp8/common/x86/subpixel_ssse3.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_mmx.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_sse2.asm',
+    'source/libvpx/vp8/common/x86/variance_impl_ssse3.asm',
+    'source/libvpx/vp8/common/x86/variance_mmx.c',
+    'source/libvpx/vp8/common/x86/variance_sse2.c',
+    'source/libvpx/vp8/common/x86/variance_ssse3.c',
+    'source/libvpx/vp8/common/x86/vp8_asm_stubs.c',
+    'source/libvpx/vp8/decoder/dboolhuff.c',
+    'source/libvpx/vp8/decoder/dboolhuff.h',
+    'source/libvpx/vp8/decoder/decodemv.c',
+    'source/libvpx/vp8/decoder/decodemv.h',
+    'source/libvpx/vp8/decoder/decoderthreading.h',
+    'source/libvpx/vp8/decoder/decodframe.c',
+    'source/libvpx/vp8/decoder/detokenize.c',
+    'source/libvpx/vp8/decoder/detokenize.h',
+    'source/libvpx/vp8/decoder/ec_types.h',
+    'source/libvpx/vp8/decoder/error_concealment.c',
+    'source/libvpx/vp8/decoder/error_concealment.h',
+    'source/libvpx/vp8/decoder/onyxd_if.c',
+    'source/libvpx/vp8/decoder/onyxd_int.h',
+    'source/libvpx/vp8/decoder/threading.c',
+    'source/libvpx/vp8/decoder/treereader.h',
+    'source/libvpx/vp8/encoder/bitstream.c',
+    'source/libvpx/vp8/encoder/bitstream.h',
+    'source/libvpx/vp8/encoder/block.h',
+    'source/libvpx/vp8/encoder/boolhuff.c',
+    'source/libvpx/vp8/encoder/boolhuff.h',
+    'source/libvpx/vp8/encoder/dct.c',
+    'source/libvpx/vp8/encoder/dct_value_cost.h',
+    'source/libvpx/vp8/encoder/dct_value_tokens.h',
+    'source/libvpx/vp8/encoder/defaultcoefcounts.h',
+    'source/libvpx/vp8/encoder/denoising.c',
+    'source/libvpx/vp8/encoder/denoising.h',
+    'source/libvpx/vp8/encoder/encodeframe.c',
+    'source/libvpx/vp8/encoder/encodeframe.h',
+    'source/libvpx/vp8/encoder/encodeintra.c',
+    'source/libvpx/vp8/encoder/encodeintra.h',
+    'source/libvpx/vp8/encoder/encodemb.c',
+    'source/libvpx/vp8/encoder/encodemb.h',
+    'source/libvpx/vp8/encoder/encodemv.c',
+    'source/libvpx/vp8/encoder/encodemv.h',
+    'source/libvpx/vp8/encoder/ethreading.c',
+    'source/libvpx/vp8/encoder/firstpass.h',
+    'source/libvpx/vp8/encoder/lookahead.c',
+    'source/libvpx/vp8/encoder/lookahead.h',
+    'source/libvpx/vp8/encoder/mcomp.c',
+    'source/libvpx/vp8/encoder/mcomp.h',
+    'source/libvpx/vp8/encoder/modecosts.c',
+    'source/libvpx/vp8/encoder/modecosts.h',
+    'source/libvpx/vp8/encoder/mr_dissim.c',
+    'source/libvpx/vp8/encoder/mr_dissim.h',
+    'source/libvpx/vp8/encoder/onyx_if.c',
+    'source/libvpx/vp8/encoder/onyx_int.h',
+    'source/libvpx/vp8/encoder/pickinter.c',
+    'source/libvpx/vp8/encoder/pickinter.h',
+    'source/libvpx/vp8/encoder/picklpf.c',
+    'source/libvpx/vp8/encoder/psnr.c',
+    'source/libvpx/vp8/encoder/psnr.h',
+    'source/libvpx/vp8/encoder/quantize.c',
+    'source/libvpx/vp8/encoder/quantize.h',
+    'source/libvpx/vp8/encoder/ratectrl.c',
+    'source/libvpx/vp8/encoder/ratectrl.h',
+    'source/libvpx/vp8/encoder/rdopt.c',
+    'source/libvpx/vp8/encoder/rdopt.h',
+    'source/libvpx/vp8/encoder/segmentation.c',
+    'source/libvpx/vp8/encoder/segmentation.h',
+    'source/libvpx/vp8/encoder/tokenize.c',
+    'source/libvpx/vp8/encoder/tokenize.h',
+    'source/libvpx/vp8/encoder/treewriter.c',
+    'source/libvpx/vp8/encoder/treewriter.h',
+    'source/libvpx/vp8/encoder/x86/dct_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/dct_sse2.asm',
+    # Manually removed; refer to libvpx.gyp
+    #'source/libvpx/vp8/encoder/x86/denoising_sse2.c',
+    'source/libvpx/vp8/encoder/x86/encodeopt.asm',
+    'source/libvpx/vp8/encoder/x86/fwalsh_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_sse4.asm',
+    'source/libvpx/vp8/encoder/x86/quantize_ssse3.asm',
+    'source/libvpx/vp8/encoder/x86/ssim_opt.asm',
+    'source/libvpx/vp8/encoder/x86/subtract_mmx.asm',
+    'source/libvpx/vp8/encoder/x86/subtract_sse2.asm',
+    'source/libvpx/vp8/encoder/x86/vp8_enc_stubs_mmx.c',
+    'source/libvpx/vp8/encoder/x86/vp8_enc_stubs_sse2.c',
+    'source/libvpx/vp8/vp8_cx_iface.c',
+    'source/libvpx/vp8/vp8_dx_iface.c',
+    'source/libvpx/vpx/internal/vpx_codec_internal.h',
+    'source/libvpx/vpx_mem/include/vpx_mem_intrnl.h',
+    'source/libvpx/vpx_mem/vpx_mem.c',
+    'source/libvpx/vpx_mem/vpx_mem.h',
+    'source/libvpx/vpx_ports/asm_offsets.h',
+    'source/libvpx/vpx_ports/emms.asm',
+    'source/libvpx/vpx_ports/mem.h',
+    'source/libvpx/vpx_ports/vpx_timer.h',
+    'source/libvpx/vpx_ports/x86_abi_support.asm',
+    'source/libvpx/vpx_ports/x86_cpuid.c',
+    'source/libvpx/vpx_ports/x86.h',
+    'source/libvpx/vpx_scale/generic/gen_scalers.c',
+    'source/libvpx/vpx_scale/generic/vpxscale.c',
+    'source/libvpx/vpx_scale/generic/yv12config.c',
+    'source/libvpx/vpx_scale/generic/yv12extend.c',
+    'source/libvpx/vpx_scale/generic/yv12extend_generic.h',
+    'source/libvpx/vpx_scale/scale_mode.h',
+    'source/libvpx/vpx_scale/vpxscale.h',
+    'source/libvpx/vpx_scale/yv12config.h',
+    'source/libvpx/vpx/src/vpx_codec.c',
+    'source/libvpx/vpx/src/vpx_decoder.c',
+    'source/libvpx/vpx/src/vpx_encoder.c',
+    'source/libvpx/vpx/src/vpx_image.c',
+    'source/libvpx/vpx/vp8cx.h',
+    'source/libvpx/vpx/vp8dx.h',
+    'source/libvpx/vpx/vp8.h',
+    'source/libvpx/vpx/vpx_codec.h',
+    'source/libvpx/vpx/vpx_codec_impl_bottom.h',
+    'source/libvpx/vpx/vpx_codec_impl_top.h',
+    'source/libvpx/vpx/vpx_decoder.h',
+    'source/libvpx/vpx/vpx_encoder.h',
+    'source/libvpx/vpx/vpx_image.h',
+    'source/libvpx/vpx/vpx_integer.h',
+  ],
+}
diff --git a/third_party/libvpx/lint_config.sh b/third_party/libvpx/lint_config.sh
new file mode 100755
index 0000000..4ff08c7
--- /dev/null
+++ b/third_party/libvpx/lint_config.sh
@@ -0,0 +1,105 @@
+#!/bin/bash -e
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is used to compare vpx_config.h and vpx_config.asm to
+# verify the two files match.
+#
+# Arguments:
+#
+# -h - C Header file.
+# -a - ASM file.
+# -p - Print the options if correct.
+# -o - Output file.
+#
+# Usage:
+#
+# # Compare the two configuration files and output the final results.
+# ./lint_config.sh -h vpx_config.h -a vpx_config.asm -o libvpx.config -p
+
+print_final="no"
+
+while getopts "h:a:o:p" flag
+do
+  if [ "$flag" = "h" ]; then
+    header_file=$OPTARG
+  elif [ "$flag" = "a" ]; then
+    asm_file=$OPTARG
+  elif [ "$flag" = "o" ]; then
+    out_file=$OPTARG
+  elif [ "$flag" = "p" ]; then
+    print_final="yes"
+  fi
+done
+
+if [ -z "$header_file" ]; then
+  echo "Header file not specified."
+  false
+  exit
+fi
+
+if [ -z "$asm_file" ]; then
+  echo "ASM file not specified."
+  false
+  exit
+fi
+
+# Concat header file and assembly file and select those ended with 0 or 1.
+combined_config="$(cat $header_file $asm_file | grep -E ' +[01] *$')"
+
+# Extra filtering for known exceptions.
+combined_config="$(echo "$combined_config" | grep -v DO1STROUNDING)"
+
+# Remove all spaces.
+combined_config="$(echo "$combined_config" | sed 's/[ \t]//g')"
+
+# Remove #define in the header file.
+combined_config="$(echo "$combined_config" | sed 's/.*define//')"
+
+# Remove equ in the ASM file.
+combined_config="$(echo "$combined_config" | sed 's/\.equ//')" # gas style
+combined_config="$(echo "$combined_config" | sed 's/equ//')" # rvds style
+
+# Remove useless comma in gas style assembly file.
+combined_config="$(echo "$combined_config" | sed 's/,//')"
+
+# Substitute 0 with =no.
+combined_config="$(echo "$combined_config" | sed 's/0$/=no/')"
+
+# Substitute 1 with =yes.
+combined_config="$(echo "$combined_config" | sed 's/1$/=yes/')"
+
+# Find the mismatch variables.
+odd_config="$(echo "$combined_config" | sort | uniq -u)"
+odd_vars="$(echo "$odd_config" | sed 's/=.*//' | uniq)"
+
+for var in $odd_vars; do
+  echo "Error: Configuration mismatch for $var."
+  echo "Header file: $header_file"
+  echo "$(cat -n $header_file | grep "$var[ \t]")"
+  echo "Assembly file: $asm_file"
+  echo "$(cat -n $asm_file | grep "$var[ \t]")"
+  echo ""
+done
+
+if [ -n "$odd_vars" ]; then
+  false
+  exit
+fi
+
+if [ "$print_final" = "no" ]; then
+  exit
+fi
+
+# Do some additional filter to make libvpx happy.
+combined_config="$(echo "$combined_config" | grep -v ARCH_X86=no)"
+combined_config="$(echo "$combined_config" | grep -v ARCH_X86_64=no)"
+
+# Print out the unique configurations.
+if [ -n "$out_file" ]; then
+  echo "$combined_config" | sort | uniq > $out_file
+else
+  echo "$combined_config" | sort | uniq
+fi
diff --git a/third_party/libvpx/obj_int_extract.sh b/third_party/libvpx/obj_int_extract.sh
new file mode 100755
index 0000000..648790a
--- /dev/null
+++ b/third_party/libvpx/obj_int_extract.sh
@@ -0,0 +1,30 @@
+#!/bin/bash -e
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is used to run obj_int_extract and output the result to a
+# file.
+#
+# Arguments:
+#
+# -e - Executable of obj_int_extract.
+# -f - ASM format.
+# -b - Object binary file.
+# -o - Output file.
+#
+
+while getopts "e:f:b:o:" flag; do
+  if [ "$flag" = "e" ]; then
+    bin_file=$OPTARG
+  elif [ "$flag" = "f" ]; then
+    asm_format=$OPTARG
+  elif [ "$flag" = "b" ]; then
+    obj_file=$OPTARG
+  elif [ "$flag" = "o" ]; then
+    out_file=$OPTARG
+  fi
+done
+
+"$bin_file" "$asm_format" "$obj_file" > "$out_file"
diff --git a/third_party/libvpx/source/config/linux/arm-neon/vpx_config.asm b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.asm
new file mode 100644
index 0000000..8ba484e
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.asm
@@ -0,0 +1,78 @@
+@ This file was created from a .asm file
+@  using the ads2gas.pl script.
+	.equ DO1STROUNDING, 0
+.equ ARCH_ARM ,  1
+.equ ARCH_MIPS ,  0
+.equ ARCH_X86 ,  0
+.equ ARCH_X86_64 ,  0
+.equ ARCH_PPC32 ,  0
+.equ ARCH_PPC64 ,  0
+.equ HAVE_ARMV5TE ,  1
+.equ HAVE_ARMV6 ,  1
+.equ HAVE_ARMV7 ,  1
+.equ HAVE_EDSP ,  1
+.equ HAVE_MEDIA ,  1
+.equ HAVE_NEON ,  1
+.equ HAVE_MIPS32 ,  0
+.equ HAVE_MMX ,  0
+.equ HAVE_SSE ,  0
+.equ HAVE_SSE2 ,  0
+.equ HAVE_SSE3 ,  0
+.equ HAVE_SSSE3 ,  0
+.equ HAVE_SSE4_1 ,  0
+.equ HAVE_ALTIVEC ,  0
+.equ HAVE_VPX_PORTS ,  1
+.equ HAVE_STDINT_H ,  1
+.equ HAVE_ALT_TREE_LAYOUT ,  0
+.equ HAVE_PTHREAD_H ,  1
+.equ HAVE_SYS_MMAN_H ,  1
+.equ HAVE_UNISTD_H ,  1
+.equ CONFIG_EXTERNAL_BUILD ,  0
+.equ CONFIG_INSTALL_DOCS ,  0
+.equ CONFIG_INSTALL_BINS ,  0
+.equ CONFIG_INSTALL_LIBS ,  0
+.equ CONFIG_INSTALL_SRCS ,  0
+.equ CONFIG_DEBUG ,  0
+.equ CONFIG_GPROF ,  0
+.equ CONFIG_GCOV ,  0
+.equ CONFIG_RVCT ,  0
+.equ CONFIG_GCC ,  1
+.equ CONFIG_MSVS ,  0
+.equ CONFIG_PIC ,  1
+.equ CONFIG_BIG_ENDIAN ,  0
+.equ CONFIG_CODEC_SRCS ,  0
+.equ CONFIG_DEBUG_LIBS ,  0
+.equ CONFIG_FAST_UNALIGNED ,  1
+.equ CONFIG_MEM_MANAGER ,  0
+.equ CONFIG_MEM_TRACKER ,  0
+.equ CONFIG_MEM_CHECKS ,  0
+.equ CONFIG_MD5 ,  1
+.equ CONFIG_DEQUANT_TOKENS ,  0
+.equ CONFIG_DC_RECON ,  0
+.equ CONFIG_RUNTIME_CPU_DETECT ,  0
+.equ CONFIG_POSTPROC ,  1
+.equ CONFIG_MULTITHREAD ,  1
+.equ CONFIG_INTERNAL_STATS ,  0
+.equ CONFIG_VP8_ENCODER ,  1
+.equ CONFIG_VP8_DECODER ,  1
+.equ CONFIG_VP8 ,  1
+.equ CONFIG_ENCODERS ,  1
+.equ CONFIG_DECODERS ,  1
+.equ CONFIG_STATIC_MSVCRT ,  0
+.equ CONFIG_SPATIAL_RESAMPLING ,  1
+.equ CONFIG_REALTIME_ONLY ,  1
+.equ CONFIG_ONTHEFLY_BITPACKING ,  0
+.equ CONFIG_ERROR_CONCEALMENT ,  1
+.equ CONFIG_SHARED ,  0
+.equ CONFIG_STATIC ,  1
+.equ CONFIG_SMALL ,  0
+.equ CONFIG_POSTPROC_VISUALIZER ,  0
+.equ CONFIG_OS_SUPPORT ,  1
+.equ CONFIG_UNIT_TESTS ,  0
+.equ CONFIG_MULTI_RES_ENCODING ,  1
+.equ CONFIG_TEMPORAL_DENOISING ,  1
+	.section	.note.GNU-stack,"",%progbits
+@ This file was created from a .asm file
+@  using the ads2gas.pl script.
+	.equ DO1STROUNDING, 0
+	.section	.note.GNU-stack,"",%progbits
diff --git a/third_party/libvpx/source/config/linux/arm-neon/vpx_config.c b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.c
new file mode 100644
index 0000000..b7b3d11
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--sdk-path=$ANDROID_NDK_ROOT --target=armv7-android-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-postproc --disable-runtime-cpu-detect --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/linux/arm-neon/vpx_config.h b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.h
new file mode 100644
index 0000000..5e7b345
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm-neon/vpx_config.h
@@ -0,0 +1,82 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 1
+#define ARCH_MIPS 0
+#define ARCH_X86 0
+#define ARCH_X86_64 0
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_ARMV5TE 1
+#define HAVE_ARMV6 1
+#define HAVE_ARMV7 1
+#define HAVE_EDSP 1
+#define HAVE_MEDIA 1
+#define HAVE_NEON 1
+#define HAVE_MIPS32 0
+#define HAVE_MMX 0
+#define HAVE_SSE 0
+#define HAVE_SSE2 0
+#define HAVE_SSE3 0
+#define HAVE_SSSE3 0
+#define HAVE_SSE4_1 0
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 0
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/linux/arm-neon/vpx_rtcd.h b/third_party/libvpx/source/config/linux/arm-neon/vpx_rtcd.h
new file mode 100644
index 0000000..ae86195
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm-neon/vpx_rtcd.h
@@ -0,0 +1,506 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_v6(struct blockd*, short *dqc);
+void vp8_dequantize_b_neon(struct blockd*, short *dqc);
+#define vp8_dequantize_b vp8_dequantize_b_neon
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_v6(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_neon(short *input, short *dq, unsigned char *output, int stride);
+#define vp8_dequant_idct_add vp8_dequant_idct_add_neon
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_v6(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_neon(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+#define vp8_dequant_idct_add_y_block vp8_dequant_idct_add_y_block_neon
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_v6(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_neon(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+#define vp8_dequant_idct_add_uv_block vp8_dequant_idct_add_uv_block_neon
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_armv6(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_neon(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbv vp8_loop_filter_mbv_neon
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_armv6(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_neon(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bv vp8_loop_filter_bv_neon
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_armv6(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_neon(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbh vp8_loop_filter_mbh_neon
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_armv6(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_neon(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bh vp8_loop_filter_bh_neon
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_armv6(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_mbvs_neon(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbv vp8_loop_filter_mbvs_neon
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_armv6(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_mbhs_neon(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbh vp8_loop_filter_mbhs_neon
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_armv6(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_neon(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bv vp8_loop_filter_bvs_neon
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_armv6(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_neon(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bh vp8_loop_filter_bhs_neon
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_v6_dual(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_neon(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+#define vp8_short_idct4x4llm vp8_short_idct4x4llm_neon
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_v6(short *input, short *output);
+void vp8_short_inv_walsh4x4_neon(short *input, short *output);
+#define vp8_short_inv_walsh4x4 vp8_short_inv_walsh4x4_neon
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_v6(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_neon(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+#define vp8_dc_only_idct_add vp8_dc_only_idct_add_neon
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_v6(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_neon(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem16x16 vp8_copy_mem16x16_neon
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_v6(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_neon(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x8 vp8_copy_mem8x8_neon
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_v6(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_neon(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x4 vp8_copy_mem8x4_neon
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+#define vp8_build_intra_predictors_mby_s vp8_build_intra_predictors_mby_s_c
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+#define vp8_build_intra_predictors_mbuv_s vp8_build_intra_predictors_mbuv_s_c
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+void vp8_intra4x4_predict_armv6(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_armv6
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_down vp8_mbpost_proc_down_c
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_across_ip vp8_mbpost_proc_across_ip_c
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+#define vp8_post_proc_down_and_across vp8_post_proc_down_and_across_c
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+#define vp8_plane_add_noise vp8_plane_add_noise_c
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight16x16 vp8_filter_by_weight16x16_c
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight8x8 vp8_filter_by_weight8x8_c
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict16x16 vp8_sixtap_predict16x16_neon
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict8x8 vp8_sixtap_predict8x8_neon
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict8x4 vp8_sixtap_predict8x4_neon
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict4x4 vp8_sixtap_predict4x4_neon
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict16x16 vp8_bilinear_predict16x16_neon
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x8 vp8_bilinear_predict8x8_neon
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x4 vp8_bilinear_predict8x4_neon
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_armv6(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_neon(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict4x4 vp8_bilinear_predict4x4_neon
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance4x4 vp8_variance4x4_c
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x8 vp8_variance8x8_neon
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x16 vp8_variance8x16_neon
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x8 vp8_variance16x8_neon
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x16 vp8_variance16x16_neon
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance4x4 vp8_sub_pixel_variance4x4_c
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_armv6(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_neon(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x8 vp8_sub_pixel_variance8x8_neon
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x16 vp8_sub_pixel_variance8x16_c
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance16x8 vp8_sub_pixel_variance16x8_c
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_armv6(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_neon(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance16x16 vp8_sub_pixel_variance16x16_neon
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_neon
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_neon
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_neon
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_neon(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad4x4 vp8_sad4x4_neon
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_neon(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x8 vp8_sad8x8_neon
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_neon(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x16 vp8_sad8x16_neon
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_neon(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad16x8 vp8_sad16x8_neon
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_armv6(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_neon(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad16x16 vp8_sad16x16_neon
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad4x4x3 vp8_sad4x4x3_c
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x8x3 vp8_sad8x8x3_c
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x16x3 vp8_sad8x16x3_c
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x8x3 vp8_sad16x8x3_c
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x16x3 vp8_sad16x16x3_c
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad4x4x8 vp8_sad4x4x8_c
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad8x8x8 vp8_sad8x8x8_c
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad8x16x8 vp8_sad8x16x8_c
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad16x8x8 vp8_sad16x8x8_c
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad16x16x8 vp8_sad16x16x8_c
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad4x4x4d vp8_sad4x4x4d_c
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x8x4d vp8_sad8x8x4d_c
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x16x4d vp8_sad8x16x4d_c
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x8x4d vp8_sad16x8x4d_c
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x16x4d vp8_sad16x16x4d_c
+
+unsigned int vp8_get_mb_ss_c(const short *);
+#define vp8_get_mb_ss vp8_get_mb_ss_c
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_mse16x16 vp8_sub_pixel_mse16x16_c
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_armv6(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_mse16x16 vp8_mse16x16_neon
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_neon(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+#define vp8_get4x4sse_cs vp8_get4x4sse_cs_neon
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_armv6(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_neon(short *input, short *output, int pitch);
+#define vp8_short_fdct4x4 vp8_short_fdct4x4_neon
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_armv6(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_neon(short *input, short *output, int pitch);
+#define vp8_short_fdct8x4 vp8_short_fdct8x4_neon
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_armv6(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_neon(short *input, short *output, int pitch);
+#define vp8_short_walsh4x4 vp8_short_walsh4x4_neon
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+#define vp8_regular_quantize_b vp8_regular_quantize_b_c
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_armv6(struct block *, struct blockd *);
+void vp8_fast_quantize_b_neon(struct block *, struct blockd *);
+#define vp8_fast_quantize_b vp8_fast_quantize_b_neon
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+void vp8_fast_quantize_b_pair_neon(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_neon
+
+void vp8_quantize_mb_c(struct macroblock *);
+void vp8_quantize_mb_neon(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_neon
+
+void vp8_quantize_mby_c(struct macroblock *);
+void vp8_quantize_mby_neon(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_neon
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+void vp8_quantize_mbuv_neon(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_neon
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+#define vp8_block_error vp8_block_error_c
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+#define vp8_mbblock_error vp8_mbblock_error_c
+
+int vp8_mbuverror_c(struct macroblock *mb);
+#define vp8_mbuverror vp8_mbuverror_c
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_armv6(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_neon(struct block *be, struct blockd *bd, int pitch);
+#define vp8_subtract_b vp8_subtract_b_neon
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_armv6(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_neon(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+#define vp8_subtract_mby vp8_subtract_mby_neon
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_armv6(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_neon(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+#define vp8_subtract_mbuv vp8_subtract_mbuv_neon
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_full_search_sad vp8_full_search_sad_c
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_refining_search_sad vp8_refining_search_sad_c
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_diamond_search_sad vp8_diamond_search_sad_c
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+void vp8_yv12_copy_partial_frame_neon(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_neon
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+#define vp8_denoiser_filter vp8_denoiser_filter_c
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+void vp8_yv12_extend_frame_borders_neon(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_neon
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+void vp8_yv12_copy_frame_neon(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_neon
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+void vp8_yv12_copy_y_neon(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_neon
+
+void vpx_rtcd(void);
+#include "vpx_config.h"
+
+#ifdef RTCD_C
+#include "vpx_ports/arm.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = arm_cpu_caps();
+
+    (void)flags;
+
+
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/linux/arm-neon/vpx_version.h b/third_party/libvpx/source/config/linux/arm-neon/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm-neon/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/linux/arm/vpx_config.asm b/third_party/libvpx/source/config/linux/arm/vpx_config.asm
new file mode 100644
index 0000000..cb75d90
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm/vpx_config.asm
@@ -0,0 +1,78 @@
+@ This file was created from a .asm file
+@  using the ads2gas.pl script.
+	.equ DO1STROUNDING, 0
+.equ ARCH_ARM ,  1
+.equ ARCH_MIPS ,  0
+.equ ARCH_X86 ,  0
+.equ ARCH_X86_64 ,  0
+.equ ARCH_PPC32 ,  0
+.equ ARCH_PPC64 ,  0
+.equ HAVE_ARMV5TE ,  1
+.equ HAVE_ARMV6 ,  0
+.equ HAVE_ARMV7 ,  0
+.equ HAVE_EDSP ,  1
+.equ HAVE_MEDIA ,  0
+.equ HAVE_NEON ,  0
+.equ HAVE_MIPS32 ,  0
+.equ HAVE_MMX ,  0
+.equ HAVE_SSE ,  0
+.equ HAVE_SSE2 ,  0
+.equ HAVE_SSE3 ,  0
+.equ HAVE_SSSE3 ,  0
+.equ HAVE_SSE4_1 ,  0
+.equ HAVE_ALTIVEC ,  0
+.equ HAVE_VPX_PORTS ,  1
+.equ HAVE_STDINT_H ,  1
+.equ HAVE_ALT_TREE_LAYOUT ,  0
+.equ HAVE_PTHREAD_H ,  1
+.equ HAVE_SYS_MMAN_H ,  1
+.equ HAVE_UNISTD_H ,  1
+.equ CONFIG_EXTERNAL_BUILD ,  0
+.equ CONFIG_INSTALL_DOCS ,  0
+.equ CONFIG_INSTALL_BINS ,  0
+.equ CONFIG_INSTALL_LIBS ,  0
+.equ CONFIG_INSTALL_SRCS ,  0
+.equ CONFIG_DEBUG ,  0
+.equ CONFIG_GPROF ,  0
+.equ CONFIG_GCOV ,  0
+.equ CONFIG_RVCT ,  0
+.equ CONFIG_GCC ,  1
+.equ CONFIG_MSVS ,  0
+.equ CONFIG_PIC ,  1
+.equ CONFIG_BIG_ENDIAN ,  0
+.equ CONFIG_CODEC_SRCS ,  0
+.equ CONFIG_DEBUG_LIBS ,  0
+.equ CONFIG_FAST_UNALIGNED ,  1
+.equ CONFIG_MEM_MANAGER ,  0
+.equ CONFIG_MEM_TRACKER ,  0
+.equ CONFIG_MEM_CHECKS ,  0
+.equ CONFIG_MD5 ,  1
+.equ CONFIG_DEQUANT_TOKENS ,  0
+.equ CONFIG_DC_RECON ,  0
+.equ CONFIG_RUNTIME_CPU_DETECT ,  0
+.equ CONFIG_POSTPROC ,  1
+.equ CONFIG_MULTITHREAD ,  1
+.equ CONFIG_INTERNAL_STATS ,  0
+.equ CONFIG_VP8_ENCODER ,  1
+.equ CONFIG_VP8_DECODER ,  1
+.equ CONFIG_VP8 ,  1
+.equ CONFIG_ENCODERS ,  1
+.equ CONFIG_DECODERS ,  1
+.equ CONFIG_STATIC_MSVCRT ,  0
+.equ CONFIG_SPATIAL_RESAMPLING ,  1
+.equ CONFIG_REALTIME_ONLY ,  1
+.equ CONFIG_ONTHEFLY_BITPACKING ,  0
+.equ CONFIG_ERROR_CONCEALMENT ,  1
+.equ CONFIG_SHARED ,  0
+.equ CONFIG_STATIC ,  1
+.equ CONFIG_SMALL ,  0
+.equ CONFIG_POSTPROC_VISUALIZER ,  0
+.equ CONFIG_OS_SUPPORT ,  1
+.equ CONFIG_UNIT_TESTS ,  0
+.equ CONFIG_MULTI_RES_ENCODING ,  1
+.equ CONFIG_TEMPORAL_DENOISING ,  1
+	.section	.note.GNU-stack,"",%progbits
+@ This file was created from a .asm file
+@  using the ads2gas.pl script.
+	.equ DO1STROUNDING, 0
+	.section	.note.GNU-stack,"",%progbits
diff --git a/third_party/libvpx/source/config/linux/arm/vpx_config.c b/third_party/libvpx/source/config/linux/arm/vpx_config.c
new file mode 100644
index 0000000..a45bce6
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--sdk-path=$ANDROID_NDK_ROOT --target=armv5te-android-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-postproc --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/linux/arm/vpx_config.h b/third_party/libvpx/source/config/linux/arm/vpx_config.h
new file mode 100644
index 0000000..aa06b42
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm/vpx_config.h
@@ -0,0 +1,82 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 1
+#define ARCH_MIPS 0
+#define ARCH_X86 0
+#define ARCH_X86_64 0
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_ARMV5TE 1
+#define HAVE_ARMV6 0
+#define HAVE_ARMV7 0
+#define HAVE_EDSP 1
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 0
+#define HAVE_SSE 0
+#define HAVE_SSE2 0
+#define HAVE_SSE3 0
+#define HAVE_SSSE3 0
+#define HAVE_SSE4_1 0
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 0
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/linux/arm/vpx_rtcd.h b/third_party/libvpx/source/config/linux/arm/vpx_rtcd.h
new file mode 100644
index 0000000..e5a262d
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm/vpx_rtcd.h
@@ -0,0 +1,406 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+#define vp8_dequantize_b vp8_dequantize_b_c
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+#define vp8_dequant_idct_add vp8_dequant_idct_add_c
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+#define vp8_dequant_idct_add_y_block vp8_dequant_idct_add_y_block_c
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+#define vp8_dequant_idct_add_uv_block vp8_dequant_idct_add_uv_block_c
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbv vp8_loop_filter_mbv_c
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bv vp8_loop_filter_bv_c
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbh vp8_loop_filter_mbh_c
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bh vp8_loop_filter_bh_c
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbv vp8_loop_filter_simple_vertical_edge_c
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbh vp8_loop_filter_simple_horizontal_edge_c
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bv vp8_loop_filter_bvs_c
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bh vp8_loop_filter_bhs_c
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+#define vp8_short_idct4x4llm vp8_short_idct4x4llm_c
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4 vp8_short_inv_walsh4x4_c
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+#define vp8_dc_only_idct_add vp8_dc_only_idct_add_c
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem16x16 vp8_copy_mem16x16_c
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x8 vp8_copy_mem8x8_c
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x4 vp8_copy_mem8x4_c
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+#define vp8_build_intra_predictors_mby_s vp8_build_intra_predictors_mby_s_c
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+#define vp8_build_intra_predictors_mbuv_s vp8_build_intra_predictors_mbuv_s_c
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_down vp8_mbpost_proc_down_c
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_across_ip vp8_mbpost_proc_across_ip_c
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+#define vp8_post_proc_down_and_across vp8_post_proc_down_and_across_c
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+#define vp8_plane_add_noise vp8_plane_add_noise_c
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight16x16 vp8_filter_by_weight16x16_c
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight8x8 vp8_filter_by_weight8x8_c
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict16x16 vp8_sixtap_predict16x16_c
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict8x8 vp8_sixtap_predict8x8_c
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict8x4 vp8_sixtap_predict8x4_c
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_sixtap_predict4x4 vp8_sixtap_predict4x4_c
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict16x16 vp8_bilinear_predict16x16_c
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x8 vp8_bilinear_predict8x8_c
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x4 vp8_bilinear_predict8x4_c
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict4x4 vp8_bilinear_predict4x4_c
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance4x4 vp8_variance4x4_c
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x8 vp8_variance8x8_c
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x16 vp8_variance8x16_c
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x8 vp8_variance16x8_c
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x16 vp8_variance16x16_c
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance4x4 vp8_sub_pixel_variance4x4_c
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x8 vp8_sub_pixel_variance8x8_c
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x16 vp8_sub_pixel_variance8x16_c
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance16x8 vp8_sub_pixel_variance16x8_c
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance16x16 vp8_sub_pixel_variance16x16_c
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_c
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_c
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_c
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad4x4 vp8_sad4x4_c
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x8 vp8_sad8x8_c
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x16 vp8_sad8x16_c
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad16x8 vp8_sad16x8_c
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad16x16 vp8_sad16x16_c
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad4x4x3 vp8_sad4x4x3_c
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x8x3 vp8_sad8x8x3_c
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x16x3 vp8_sad8x16x3_c
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x8x3 vp8_sad16x8x3_c
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x16x3 vp8_sad16x16x3_c
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad4x4x8 vp8_sad4x4x8_c
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad8x8x8 vp8_sad8x8x8_c
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad8x16x8 vp8_sad8x16x8_c
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad16x8x8 vp8_sad16x8x8_c
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+#define vp8_sad16x16x8 vp8_sad16x16x8_c
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad4x4x4d vp8_sad4x4x4d_c
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x8x4d vp8_sad8x8x4d_c
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad8x16x4d vp8_sad8x16x4d_c
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x8x4d vp8_sad16x8x4d_c
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+#define vp8_sad16x16x4d vp8_sad16x16x4d_c
+
+unsigned int vp8_get_mb_ss_c(const short *);
+#define vp8_get_mb_ss vp8_get_mb_ss_c
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_mse16x16 vp8_sub_pixel_mse16x16_c
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_mse16x16 vp8_mse16x16_c
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+#define vp8_get4x4sse_cs vp8_get4x4sse_cs_c
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+#define vp8_short_fdct4x4 vp8_short_fdct4x4_c
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+#define vp8_short_fdct8x4 vp8_short_fdct8x4_c
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+#define vp8_short_walsh4x4 vp8_short_walsh4x4_c
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+#define vp8_regular_quantize_b vp8_regular_quantize_b_c
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+#define vp8_fast_quantize_b vp8_fast_quantize_b_c
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+#define vp8_block_error vp8_block_error_c
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+#define vp8_mbblock_error vp8_mbblock_error_c
+
+int vp8_mbuverror_c(struct macroblock *mb);
+#define vp8_mbuverror vp8_mbuverror_c
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+#define vp8_subtract_b vp8_subtract_b_c
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+#define vp8_subtract_mby vp8_subtract_mby_c
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+#define vp8_subtract_mbuv vp8_subtract_mbuv_c
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_full_search_sad vp8_full_search_sad_c
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_refining_search_sad vp8_refining_search_sad_c
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+#define vp8_diamond_search_sad vp8_diamond_search_sad_c
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+#define vp8_denoiser_filter vp8_denoiser_filter_c
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+
+void vpx_rtcd(void);
+#include "vpx_config.h"
+
+#ifdef RTCD_C
+#include "vpx_ports/arm.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = arm_cpu_caps();
+
+    (void)flags;
+
+
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/linux/arm/vpx_version.h b/third_party/libvpx/source/config/linux/arm/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/arm/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/linux/ia32/vpx_config.asm b/third_party/libvpx/source/config/linux/ia32/vpx_config.asm
new file mode 100644
index 0000000..6c39f37
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/ia32/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 1
+ARCH_X86_64 equ 0
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 1
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 1
+HAVE_SYS_MMAN_H equ 1
+HAVE_UNISTD_H equ 1
+CONFIG_EXTERNAL_BUILD equ 0
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 1
+CONFIG_MSVS equ 0
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 0
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/linux/ia32/vpx_config.c b/third_party/libvpx/source/config/linux/ia32/vpx_config.c
new file mode 100644
index 0000000..f843e57
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/ia32/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86-linux-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/linux/ia32/vpx_config.h b/third_party/libvpx/source/config/linux/ia32/vpx_config.h
new file mode 100644
index 0000000..f079b8d
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/ia32/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 1
+#define ARCH_X86_64 0
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/linux/ia32/vpx_rtcd.h b/third_party/libvpx/source/config/linux/ia32/vpx_rtcd.h
new file mode 100644
index 0000000..6630af4
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/ia32/vpx_rtcd.h
@@ -0,0 +1,898 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+RTCD_EXTERN void (*vp8_dequantize_b)(struct blockd*, short *dqc);
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+RTCD_EXTERN void (*vp8_dequant_idct_add)(short *input, short *dq, unsigned char *output, int stride);
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_y_block)(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_uv_block)(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_short_idct4x4llm)(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+RTCD_EXTERN void (*vp8_short_inv_walsh4x4)(short *input, short *output);
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_dc_only_idct_add)(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem16x16)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x8)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x4)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_down)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_across_ip)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+RTCD_EXTERN void (*vp8_post_proc_down_and_across)(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+RTCD_EXTERN void (*vp8_plane_add_noise)(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight16x16)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight8x8)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance4x4)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance4x4)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_h)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_v)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_hv)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad4x4)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+RTCD_EXTERN unsigned int (*vp8_get_mb_ss)(const short *);
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_mse16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_mse16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+RTCD_EXTERN unsigned int (*vp8_get4x4sse_cs)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct4x4)(short *input, short *output, int pitch);
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct8x4)(short *input, short *output, int pitch);
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_walsh4x4)(short *input, short *output, int pitch);
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+RTCD_EXTERN int (*vp8_block_error)(short *coeff, short *dqcoeff);
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+RTCD_EXTERN int (*vp8_mbblock_error)(struct macroblock *mb, int dc);
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+RTCD_EXTERN int (*vp8_mbuverror)(struct macroblock *mb);
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+RTCD_EXTERN void (*vp8_subtract_b)(struct block *be, struct blockd *bd, int pitch);
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mby)(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mbuv)(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+RTCD_EXTERN int (*vp8_denoiser_filter)(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+    vp8_dequantize_b = vp8_dequantize_b_c;
+    if (flags & HAS_MMX) vp8_dequantize_b = vp8_dequantize_b_mmx;
+
+    vp8_dequant_idct_add = vp8_dequant_idct_add_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add = vp8_dequant_idct_add_mmx;
+
+    vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_sse2;
+
+    vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_sse2;
+
+    vp8_loop_filter_mbv = vp8_loop_filter_mbv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbv = vp8_loop_filter_mbv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbv = vp8_loop_filter_mbv_sse2;
+
+    vp8_loop_filter_bv = vp8_loop_filter_bv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bv = vp8_loop_filter_bv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bv = vp8_loop_filter_bv_sse2;
+
+    vp8_loop_filter_mbh = vp8_loop_filter_mbh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbh = vp8_loop_filter_mbh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbh = vp8_loop_filter_mbh_sse2;
+
+    vp8_loop_filter_bh = vp8_loop_filter_bh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bh = vp8_loop_filter_bh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bh = vp8_loop_filter_bh_sse2;
+
+    vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_sse2;
+
+    vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_sse2;
+
+    vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_sse2;
+
+    vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_sse2;
+
+    vp8_short_idct4x4llm = vp8_short_idct4x4llm_c;
+    if (flags & HAS_MMX) vp8_short_idct4x4llm = vp8_short_idct4x4llm_mmx;
+
+
+    vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_c;
+    if (flags & HAS_MMX) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_sse2;
+
+    vp8_dc_only_idct_add = vp8_dc_only_idct_add_c;
+    if (flags & HAS_MMX) vp8_dc_only_idct_add = vp8_dc_only_idct_add_mmx;
+
+    vp8_copy_mem16x16 = vp8_copy_mem16x16_c;
+    if (flags & HAS_MMX) vp8_copy_mem16x16 = vp8_copy_mem16x16_mmx;
+    if (flags & HAS_SSE2) vp8_copy_mem16x16 = vp8_copy_mem16x16_sse2;
+
+    vp8_copy_mem8x8 = vp8_copy_mem8x8_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x8 = vp8_copy_mem8x8_mmx;
+
+    vp8_copy_mem8x4 = vp8_copy_mem8x4_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x4 = vp8_copy_mem8x4_mmx;
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+    vp8_mbpost_proc_down = vp8_mbpost_proc_down_c;
+    if (flags & HAS_MMX) vp8_mbpost_proc_down = vp8_mbpost_proc_down_mmx;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_down = vp8_mbpost_proc_down_xmm;
+
+    vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_c;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_xmm;
+
+    vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_c;
+    if (flags & HAS_MMX) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_mmx;
+    if (flags & HAS_SSE2) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_xmm;
+
+    vp8_plane_add_noise = vp8_plane_add_noise_c;
+    if (flags & HAS_MMX) vp8_plane_add_noise = vp8_plane_add_noise_mmx;
+    if (flags & HAS_SSE2) vp8_plane_add_noise = vp8_plane_add_noise_wmt;
+
+
+
+
+    vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_sse2;
+
+    vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_sse2;
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+    vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_mmx;
+
+    vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_mmx;
+
+    vp8_variance4x4 = vp8_variance4x4_c;
+    if (flags & HAS_MMX) vp8_variance4x4 = vp8_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_variance4x4 = vp8_variance4x4_wmt;
+
+    vp8_variance8x8 = vp8_variance8x8_c;
+    if (flags & HAS_MMX) vp8_variance8x8 = vp8_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x8 = vp8_variance8x8_wmt;
+
+    vp8_variance8x16 = vp8_variance8x16_c;
+    if (flags & HAS_MMX) vp8_variance8x16 = vp8_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x16 = vp8_variance8x16_wmt;
+
+    vp8_variance16x8 = vp8_variance16x8_c;
+    if (flags & HAS_MMX) vp8_variance16x8 = vp8_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x8 = vp8_variance16x8_wmt;
+
+    vp8_variance16x16 = vp8_variance16x16_c;
+    if (flags & HAS_MMX) vp8_variance16x16 = vp8_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x16 = vp8_variance16x16_wmt;
+
+    vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_wmt;
+
+    vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_wmt;
+
+    vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_wmt;
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+    vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_wmt;
+
+    vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_wmt;
+
+    vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_wmt;
+
+    vp8_sad4x4 = vp8_sad4x4_c;
+    if (flags & HAS_MMX) vp8_sad4x4 = vp8_sad4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sad4x4 = vp8_sad4x4_wmt;
+
+    vp8_sad8x8 = vp8_sad8x8_c;
+    if (flags & HAS_MMX) vp8_sad8x8 = vp8_sad8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x8 = vp8_sad8x8_wmt;
+
+    vp8_sad8x16 = vp8_sad8x16_c;
+    if (flags & HAS_MMX) vp8_sad8x16 = vp8_sad8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x16 = vp8_sad8x16_wmt;
+
+    vp8_sad16x8 = vp8_sad16x8_c;
+    if (flags & HAS_MMX) vp8_sad16x8 = vp8_sad16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x8 = vp8_sad16x8_wmt;
+
+    vp8_sad16x16 = vp8_sad16x16_c;
+    if (flags & HAS_MMX) vp8_sad16x16 = vp8_sad16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+    vp8_get_mb_ss = vp8_get_mb_ss_c;
+    if (flags & HAS_MMX) vp8_get_mb_ss = vp8_get_mb_ss_mmx;
+    if (flags & HAS_SSE2) vp8_get_mb_ss = vp8_get_mb_ss_sse2;
+
+    vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_wmt;
+
+    vp8_mse16x16 = vp8_mse16x16_c;
+    if (flags & HAS_MMX) vp8_mse16x16 = vp8_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_mse16x16 = vp8_mse16x16_wmt;
+
+    vp8_get4x4sse_cs = vp8_get4x4sse_cs_c;
+    if (flags & HAS_MMX) vp8_get4x4sse_cs = vp8_get4x4sse_cs_mmx;
+
+    vp8_copy32xn = vp8_copy32xn_c;
+    if (flags & HAS_SSE2) vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+    vp8_short_fdct4x4 = vp8_short_fdct4x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct4x4 = vp8_short_fdct4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct4x4 = vp8_short_fdct4x4_sse2;
+
+    vp8_short_fdct8x4 = vp8_short_fdct8x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct8x4 = vp8_short_fdct8x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct8x4 = vp8_short_fdct8x4_sse2;
+
+    vp8_short_walsh4x4 = vp8_short_walsh4x4_c;
+    if (flags & HAS_SSE2) vp8_short_walsh4x4 = vp8_short_walsh4x4_sse2;
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+    vp8_block_error = vp8_block_error_c;
+    if (flags & HAS_MMX) vp8_block_error = vp8_block_error_mmx;
+    if (flags & HAS_SSE2) vp8_block_error = vp8_block_error_xmm;
+
+    vp8_mbblock_error = vp8_mbblock_error_c;
+    if (flags & HAS_MMX) vp8_mbblock_error = vp8_mbblock_error_mmx;
+    if (flags & HAS_SSE2) vp8_mbblock_error = vp8_mbblock_error_xmm;
+
+    vp8_mbuverror = vp8_mbuverror_c;
+    if (flags & HAS_MMX) vp8_mbuverror = vp8_mbuverror_mmx;
+    if (flags & HAS_SSE2) vp8_mbuverror = vp8_mbuverror_xmm;
+
+    vp8_subtract_b = vp8_subtract_b_c;
+    if (flags & HAS_MMX) vp8_subtract_b = vp8_subtract_b_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_b = vp8_subtract_b_sse2;
+
+    vp8_subtract_mby = vp8_subtract_mby_c;
+    if (flags & HAS_MMX) vp8_subtract_mby = vp8_subtract_mby_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mby = vp8_subtract_mby_sse2;
+
+    vp8_subtract_mbuv = vp8_subtract_mbuv_c;
+    if (flags & HAS_MMX) vp8_subtract_mbuv = vp8_subtract_mbuv_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mbuv = vp8_subtract_mbuv_sse2;
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+
+
+    vp8_denoiser_filter = vp8_denoiser_filter_c;
+    if (flags & HAS_SSE2) vp8_denoiser_filter = vp8_denoiser_filter_sse2;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/linux/ia32/vpx_version.h b/third_party/libvpx/source/config/linux/ia32/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/ia32/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/linux/x64/vpx_config.asm b/third_party/libvpx/source/config/linux/x64/vpx_config.asm
new file mode 100644
index 0000000..cb005db
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/x64/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 0
+ARCH_X86_64 equ 1
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 1
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 1
+HAVE_SYS_MMAN_H equ 1
+HAVE_UNISTD_H equ 1
+CONFIG_EXTERNAL_BUILD equ 0
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 1
+CONFIG_MSVS equ 0
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 0
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/linux/x64/vpx_config.c b/third_party/libvpx/source/config/linux/x64/vpx_config.c
new file mode 100644
index 0000000..776c7e0
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/x64/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86_64-linux-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/linux/x64/vpx_config.h b/third_party/libvpx/source/config/linux/x64/vpx_config.h
new file mode 100644
index 0000000..001ab19
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/x64/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 0
+#define ARCH_X86_64 1
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/linux/x64/vpx_rtcd.h b/third_party/libvpx/source/config/linux/x64/vpx_rtcd.h
new file mode 100644
index 0000000..9e24e14
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/x64/vpx_rtcd.h
@@ -0,0 +1,723 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+#define vp8_dequantize_b vp8_dequantize_b_mmx
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+#define vp8_dequant_idct_add vp8_dequant_idct_add_mmx
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+#define vp8_dequant_idct_add_y_block vp8_dequant_idct_add_y_block_sse2
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+#define vp8_dequant_idct_add_uv_block vp8_dequant_idct_add_uv_block_sse2
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbv vp8_loop_filter_mbv_sse2
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bv vp8_loop_filter_bv_sse2
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbh vp8_loop_filter_mbh_sse2
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bh vp8_loop_filter_bh_sse2
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbv vp8_loop_filter_simple_vertical_edge_sse2
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbh vp8_loop_filter_simple_horizontal_edge_sse2
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bv vp8_loop_filter_bvs_sse2
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bh vp8_loop_filter_bhs_sse2
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+#define vp8_short_idct4x4llm vp8_short_idct4x4llm_mmx
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+#define vp8_short_inv_walsh4x4 vp8_short_inv_walsh4x4_sse2
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+#define vp8_dc_only_idct_add vp8_dc_only_idct_add_mmx
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem16x16 vp8_copy_mem16x16_sse2
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x8 vp8_copy_mem8x8_mmx
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x4 vp8_copy_mem8x4_mmx
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_down vp8_mbpost_proc_down_xmm
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_across_ip vp8_mbpost_proc_across_ip_xmm
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+#define vp8_post_proc_down_and_across vp8_post_proc_down_and_across_xmm
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+#define vp8_plane_add_noise vp8_plane_add_noise_wmt
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight16x16 vp8_filter_by_weight16x16_sse2
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight8x8 vp8_filter_by_weight8x8_sse2
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x4 vp8_bilinear_predict8x4_mmx
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict4x4 vp8_bilinear_predict4x4_mmx
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance4x4 vp8_variance4x4_wmt
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x8 vp8_variance8x8_wmt
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x16 vp8_variance8x16_wmt
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x8 vp8_variance16x8_wmt
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x16 vp8_variance16x16_wmt
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance4x4 vp8_sub_pixel_variance4x4_wmt
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x8 vp8_sub_pixel_variance8x8_wmt
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x16 vp8_sub_pixel_variance8x16_wmt
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_wmt
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad4x4 vp8_sad4x4_wmt
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x8 vp8_sad8x8_wmt
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad8x16 vp8_sad8x16_wmt
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+#define vp8_sad16x8 vp8_sad16x8_wmt
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+#define vp8_get_mb_ss vp8_get_mb_ss_sse2
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_mse16x16 vp8_sub_pixel_mse16x16_wmt
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_mse16x16 vp8_mse16x16_wmt
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+#define vp8_get4x4sse_cs vp8_get4x4sse_cs_mmx
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct4x4 vp8_short_fdct4x4_sse2
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct8x4 vp8_short_fdct8x4_sse2
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_walsh4x4 vp8_short_walsh4x4_sse2
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+#define vp8_block_error vp8_block_error_xmm
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+#define vp8_mbblock_error vp8_mbblock_error_xmm
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+#define vp8_mbuverror vp8_mbuverror_xmm
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+#define vp8_subtract_b vp8_subtract_b_sse2
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+#define vp8_subtract_mby vp8_subtract_mby_sse2
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+#define vp8_subtract_mbuv vp8_subtract_mbuv_sse2
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+#define vp8_denoiser_filter vp8_denoiser_filter_sse2
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+
+
+
+
+
+
+
+    vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+
+
+
+
+    vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+
+
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/linux/x64/vpx_version.h b/third_party/libvpx/source/config/linux/x64/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/linux/x64/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/mac/ia32/vpx_config.asm b/third_party/libvpx/source/config/mac/ia32/vpx_config.asm
new file mode 100644
index 0000000..6c39f37
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/ia32/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 1
+ARCH_X86_64 equ 0
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 1
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 1
+HAVE_SYS_MMAN_H equ 1
+HAVE_UNISTD_H equ 1
+CONFIG_EXTERNAL_BUILD equ 0
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 1
+CONFIG_MSVS equ 0
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 0
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/mac/ia32/vpx_config.c b/third_party/libvpx/source/config/mac/ia32/vpx_config.c
new file mode 100644
index 0000000..775ad67
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/ia32/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86-darwin9-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/mac/ia32/vpx_config.h b/third_party/libvpx/source/config/mac/ia32/vpx_config.h
new file mode 100644
index 0000000..f079b8d
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/ia32/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 1
+#define ARCH_X86_64 0
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/mac/ia32/vpx_rtcd.h b/third_party/libvpx/source/config/mac/ia32/vpx_rtcd.h
new file mode 100644
index 0000000..6630af4
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/ia32/vpx_rtcd.h
@@ -0,0 +1,898 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+RTCD_EXTERN void (*vp8_dequantize_b)(struct blockd*, short *dqc);
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+RTCD_EXTERN void (*vp8_dequant_idct_add)(short *input, short *dq, unsigned char *output, int stride);
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_y_block)(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_uv_block)(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_short_idct4x4llm)(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+RTCD_EXTERN void (*vp8_short_inv_walsh4x4)(short *input, short *output);
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_dc_only_idct_add)(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem16x16)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x8)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x4)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_down)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_across_ip)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+RTCD_EXTERN void (*vp8_post_proc_down_and_across)(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+RTCD_EXTERN void (*vp8_plane_add_noise)(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight16x16)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight8x8)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance4x4)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance4x4)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_h)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_v)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_hv)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad4x4)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+RTCD_EXTERN unsigned int (*vp8_get_mb_ss)(const short *);
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_mse16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_mse16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+RTCD_EXTERN unsigned int (*vp8_get4x4sse_cs)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct4x4)(short *input, short *output, int pitch);
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct8x4)(short *input, short *output, int pitch);
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_walsh4x4)(short *input, short *output, int pitch);
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+RTCD_EXTERN int (*vp8_block_error)(short *coeff, short *dqcoeff);
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+RTCD_EXTERN int (*vp8_mbblock_error)(struct macroblock *mb, int dc);
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+RTCD_EXTERN int (*vp8_mbuverror)(struct macroblock *mb);
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+RTCD_EXTERN void (*vp8_subtract_b)(struct block *be, struct blockd *bd, int pitch);
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mby)(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mbuv)(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+RTCD_EXTERN int (*vp8_denoiser_filter)(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+    vp8_dequantize_b = vp8_dequantize_b_c;
+    if (flags & HAS_MMX) vp8_dequantize_b = vp8_dequantize_b_mmx;
+
+    vp8_dequant_idct_add = vp8_dequant_idct_add_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add = vp8_dequant_idct_add_mmx;
+
+    vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_sse2;
+
+    vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_sse2;
+
+    vp8_loop_filter_mbv = vp8_loop_filter_mbv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbv = vp8_loop_filter_mbv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbv = vp8_loop_filter_mbv_sse2;
+
+    vp8_loop_filter_bv = vp8_loop_filter_bv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bv = vp8_loop_filter_bv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bv = vp8_loop_filter_bv_sse2;
+
+    vp8_loop_filter_mbh = vp8_loop_filter_mbh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbh = vp8_loop_filter_mbh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbh = vp8_loop_filter_mbh_sse2;
+
+    vp8_loop_filter_bh = vp8_loop_filter_bh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bh = vp8_loop_filter_bh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bh = vp8_loop_filter_bh_sse2;
+
+    vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_sse2;
+
+    vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_sse2;
+
+    vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_sse2;
+
+    vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_sse2;
+
+    vp8_short_idct4x4llm = vp8_short_idct4x4llm_c;
+    if (flags & HAS_MMX) vp8_short_idct4x4llm = vp8_short_idct4x4llm_mmx;
+
+
+    vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_c;
+    if (flags & HAS_MMX) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_sse2;
+
+    vp8_dc_only_idct_add = vp8_dc_only_idct_add_c;
+    if (flags & HAS_MMX) vp8_dc_only_idct_add = vp8_dc_only_idct_add_mmx;
+
+    vp8_copy_mem16x16 = vp8_copy_mem16x16_c;
+    if (flags & HAS_MMX) vp8_copy_mem16x16 = vp8_copy_mem16x16_mmx;
+    if (flags & HAS_SSE2) vp8_copy_mem16x16 = vp8_copy_mem16x16_sse2;
+
+    vp8_copy_mem8x8 = vp8_copy_mem8x8_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x8 = vp8_copy_mem8x8_mmx;
+
+    vp8_copy_mem8x4 = vp8_copy_mem8x4_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x4 = vp8_copy_mem8x4_mmx;
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+    vp8_mbpost_proc_down = vp8_mbpost_proc_down_c;
+    if (flags & HAS_MMX) vp8_mbpost_proc_down = vp8_mbpost_proc_down_mmx;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_down = vp8_mbpost_proc_down_xmm;
+
+    vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_c;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_xmm;
+
+    vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_c;
+    if (flags & HAS_MMX) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_mmx;
+    if (flags & HAS_SSE2) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_xmm;
+
+    vp8_plane_add_noise = vp8_plane_add_noise_c;
+    if (flags & HAS_MMX) vp8_plane_add_noise = vp8_plane_add_noise_mmx;
+    if (flags & HAS_SSE2) vp8_plane_add_noise = vp8_plane_add_noise_wmt;
+
+
+
+
+    vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_sse2;
+
+    vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_sse2;
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+    vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_mmx;
+
+    vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_mmx;
+
+    vp8_variance4x4 = vp8_variance4x4_c;
+    if (flags & HAS_MMX) vp8_variance4x4 = vp8_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_variance4x4 = vp8_variance4x4_wmt;
+
+    vp8_variance8x8 = vp8_variance8x8_c;
+    if (flags & HAS_MMX) vp8_variance8x8 = vp8_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x8 = vp8_variance8x8_wmt;
+
+    vp8_variance8x16 = vp8_variance8x16_c;
+    if (flags & HAS_MMX) vp8_variance8x16 = vp8_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x16 = vp8_variance8x16_wmt;
+
+    vp8_variance16x8 = vp8_variance16x8_c;
+    if (flags & HAS_MMX) vp8_variance16x8 = vp8_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x8 = vp8_variance16x8_wmt;
+
+    vp8_variance16x16 = vp8_variance16x16_c;
+    if (flags & HAS_MMX) vp8_variance16x16 = vp8_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x16 = vp8_variance16x16_wmt;
+
+    vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_wmt;
+
+    vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_wmt;
+
+    vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_wmt;
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+    vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_wmt;
+
+    vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_wmt;
+
+    vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_wmt;
+
+    vp8_sad4x4 = vp8_sad4x4_c;
+    if (flags & HAS_MMX) vp8_sad4x4 = vp8_sad4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sad4x4 = vp8_sad4x4_wmt;
+
+    vp8_sad8x8 = vp8_sad8x8_c;
+    if (flags & HAS_MMX) vp8_sad8x8 = vp8_sad8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x8 = vp8_sad8x8_wmt;
+
+    vp8_sad8x16 = vp8_sad8x16_c;
+    if (flags & HAS_MMX) vp8_sad8x16 = vp8_sad8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x16 = vp8_sad8x16_wmt;
+
+    vp8_sad16x8 = vp8_sad16x8_c;
+    if (flags & HAS_MMX) vp8_sad16x8 = vp8_sad16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x8 = vp8_sad16x8_wmt;
+
+    vp8_sad16x16 = vp8_sad16x16_c;
+    if (flags & HAS_MMX) vp8_sad16x16 = vp8_sad16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+    vp8_get_mb_ss = vp8_get_mb_ss_c;
+    if (flags & HAS_MMX) vp8_get_mb_ss = vp8_get_mb_ss_mmx;
+    if (flags & HAS_SSE2) vp8_get_mb_ss = vp8_get_mb_ss_sse2;
+
+    vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_wmt;
+
+    vp8_mse16x16 = vp8_mse16x16_c;
+    if (flags & HAS_MMX) vp8_mse16x16 = vp8_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_mse16x16 = vp8_mse16x16_wmt;
+
+    vp8_get4x4sse_cs = vp8_get4x4sse_cs_c;
+    if (flags & HAS_MMX) vp8_get4x4sse_cs = vp8_get4x4sse_cs_mmx;
+
+    vp8_copy32xn = vp8_copy32xn_c;
+    if (flags & HAS_SSE2) vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+    vp8_short_fdct4x4 = vp8_short_fdct4x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct4x4 = vp8_short_fdct4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct4x4 = vp8_short_fdct4x4_sse2;
+
+    vp8_short_fdct8x4 = vp8_short_fdct8x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct8x4 = vp8_short_fdct8x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct8x4 = vp8_short_fdct8x4_sse2;
+
+    vp8_short_walsh4x4 = vp8_short_walsh4x4_c;
+    if (flags & HAS_SSE2) vp8_short_walsh4x4 = vp8_short_walsh4x4_sse2;
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+    vp8_block_error = vp8_block_error_c;
+    if (flags & HAS_MMX) vp8_block_error = vp8_block_error_mmx;
+    if (flags & HAS_SSE2) vp8_block_error = vp8_block_error_xmm;
+
+    vp8_mbblock_error = vp8_mbblock_error_c;
+    if (flags & HAS_MMX) vp8_mbblock_error = vp8_mbblock_error_mmx;
+    if (flags & HAS_SSE2) vp8_mbblock_error = vp8_mbblock_error_xmm;
+
+    vp8_mbuverror = vp8_mbuverror_c;
+    if (flags & HAS_MMX) vp8_mbuverror = vp8_mbuverror_mmx;
+    if (flags & HAS_SSE2) vp8_mbuverror = vp8_mbuverror_xmm;
+
+    vp8_subtract_b = vp8_subtract_b_c;
+    if (flags & HAS_MMX) vp8_subtract_b = vp8_subtract_b_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_b = vp8_subtract_b_sse2;
+
+    vp8_subtract_mby = vp8_subtract_mby_c;
+    if (flags & HAS_MMX) vp8_subtract_mby = vp8_subtract_mby_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mby = vp8_subtract_mby_sse2;
+
+    vp8_subtract_mbuv = vp8_subtract_mbuv_c;
+    if (flags & HAS_MMX) vp8_subtract_mbuv = vp8_subtract_mbuv_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mbuv = vp8_subtract_mbuv_sse2;
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+
+
+    vp8_denoiser_filter = vp8_denoiser_filter_c;
+    if (flags & HAS_SSE2) vp8_denoiser_filter = vp8_denoiser_filter_sse2;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/mac/ia32/vpx_version.h b/third_party/libvpx/source/config/mac/ia32/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/ia32/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/mac/x64/vpx_config.asm b/third_party/libvpx/source/config/mac/x64/vpx_config.asm
new file mode 100644
index 0000000..cb005db
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/x64/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 0
+ARCH_X86_64 equ 1
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 1
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 1
+HAVE_SYS_MMAN_H equ 1
+HAVE_UNISTD_H equ 1
+CONFIG_EXTERNAL_BUILD equ 0
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 1
+CONFIG_MSVS equ 0
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 0
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/mac/x64/vpx_config.c b/third_party/libvpx/source/config/mac/x64/vpx_config.c
new file mode 100644
index 0000000..e764d7c
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/x64/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86_64-darwin10-gcc --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/mac/x64/vpx_config.h b/third_party/libvpx/source/config/mac/x64/vpx_config.h
new file mode 100644
index 0000000..001ab19
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/x64/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 0
+#define ARCH_X86_64 1
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 1
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 1
+#define HAVE_SYS_MMAN_H 1
+#define HAVE_UNISTD_H 1
+#define CONFIG_EXTERNAL_BUILD 0
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 1
+#define CONFIG_MSVS 0
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 0
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/mac/x64/vpx_rtcd.h b/third_party/libvpx/source/config/mac/x64/vpx_rtcd.h
new file mode 100644
index 0000000..3144b87
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/x64/vpx_rtcd.h
@@ -0,0 +1,724 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+#define vp8_dequantize_b vp8_dequantize_b_mmx
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+#define vp8_dequant_idct_add vp8_dequant_idct_add_mmx
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+#define vp8_dequant_idct_add_y_block vp8_dequant_idct_add_y_block_sse2
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+#define vp8_dequant_idct_add_uv_block vp8_dequant_idct_add_uv_block_sse2
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbv vp8_loop_filter_mbv_sse2
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bv vp8_loop_filter_bv_sse2
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbh vp8_loop_filter_mbh_sse2
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bh vp8_loop_filter_bh_sse2
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbv vp8_loop_filter_simple_vertical_edge_sse2
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbh vp8_loop_filter_simple_horizontal_edge_sse2
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bv vp8_loop_filter_bvs_sse2
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bh vp8_loop_filter_bhs_sse2
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+#define vp8_short_idct4x4llm vp8_short_idct4x4llm_mmx
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+#define vp8_short_inv_walsh4x4 vp8_short_inv_walsh4x4_sse2
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+#define vp8_dc_only_idct_add vp8_dc_only_idct_add_mmx
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem16x16 vp8_copy_mem16x16_sse2
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x8 vp8_copy_mem8x8_mmx
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x4 vp8_copy_mem8x4_mmx
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_d_c(unsigned char *above, unsigned char *left, int left_stride, int b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict_d vp8_intra4x4_predict_d_c
+
+void vp8_intra4x4_predict_c(unsigned char *src, int src_stride, int b_mode, unsigned char *dst, int dst_stride);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_down vp8_mbpost_proc_down_xmm
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_across_ip vp8_mbpost_proc_across_ip_xmm
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+#define vp8_post_proc_down_and_across vp8_post_proc_down_and_across_xmm
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+#define vp8_plane_add_noise vp8_plane_add_noise_wmt
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight16x16 vp8_filter_by_weight16x16_sse2
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight8x8 vp8_filter_by_weight8x8_sse2
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x4 vp8_bilinear_predict8x4_mmx
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict4x4 vp8_bilinear_predict4x4_mmx
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance4x4 vp8_variance4x4_wmt
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x8 vp8_variance8x8_wmt
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x16 vp8_variance8x16_wmt
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x8 vp8_variance16x8_wmt
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x16 vp8_variance16x16_wmt
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance4x4 vp8_sub_pixel_variance4x4_wmt
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x8 vp8_sub_pixel_variance8x8_wmt
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x16 vp8_sub_pixel_variance8x16_wmt
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_wmt
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad4x4 vp8_sad4x4_wmt
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad8x8 vp8_sad8x8_wmt
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad8x16 vp8_sad8x16_wmt
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad16x8 vp8_sad16x8_wmt
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+#define vp8_get_mb_ss vp8_get_mb_ss_sse2
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_mse16x16 vp8_sub_pixel_mse16x16_wmt
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_mse16x16 vp8_mse16x16_wmt
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+#define vp8_get4x4sse_cs vp8_get4x4sse_cs_mmx
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct4x4 vp8_short_fdct4x4_sse2
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct8x4 vp8_short_fdct8x4_sse2
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_walsh4x4 vp8_short_walsh4x4_sse2
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+#define vp8_block_error vp8_block_error_xmm
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+#define vp8_mbblock_error vp8_mbblock_error_xmm
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+#define vp8_mbuverror vp8_mbuverror_xmm
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+#define vp8_subtract_b vp8_subtract_b_sse2
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+#define vp8_subtract_mby vp8_subtract_mby_sse2
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+#define vp8_subtract_mbuv vp8_subtract_mbuv_sse2
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+#define vp8_denoiser_filter vp8_denoiser_filter_sse2
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+void vpx_rtcd(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+
+
+
+
+
+
+
+    vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+
+
+
+
+    vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+
+
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/mac/x64/vpx_version.h b/third_party/libvpx/source/config/mac/x64/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/mac/x64/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/win/ia32/vpx_config.asm b/third_party/libvpx/source/config/win/ia32/vpx_config.asm
new file mode 100644
index 0000000..456e96f
--- /dev/null
+++ b/third_party/libvpx/source/config/win/ia32/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 1
+ARCH_X86_64 equ 0
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 0
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 0
+HAVE_SYS_MMAN_H equ 0
+HAVE_UNISTD_H equ 0
+CONFIG_EXTERNAL_BUILD equ 1
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 0
+CONFIG_MSVS equ 1
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 1
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/win/ia32/vpx_config.c b/third_party/libvpx/source/config/win/ia32/vpx_config.c
new file mode 100644
index 0000000..6c67a42
--- /dev/null
+++ b/third_party/libvpx/source/config/win/ia32/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86-win32-vs9 --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-static-msvcrt --as=yasm --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/win/ia32/vpx_config.h b/third_party/libvpx/source/config/win/ia32/vpx_config.h
new file mode 100644
index 0000000..bdfa8da
--- /dev/null
+++ b/third_party/libvpx/source/config/win/ia32/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 1
+#define ARCH_X86_64 0
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 0
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 0
+#define HAVE_SYS_MMAN_H 0
+#define HAVE_UNISTD_H 0
+#define CONFIG_EXTERNAL_BUILD 1
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 0
+#define CONFIG_MSVS 1
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 1
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/win/ia32/vpx_rtcd.h b/third_party/libvpx/source/config/win/ia32/vpx_rtcd.h
new file mode 100755
index 0000000..6630af4
--- /dev/null
+++ b/third_party/libvpx/source/config/win/ia32/vpx_rtcd.h
@@ -0,0 +1,898 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+#include "vp8/common/blockd.h"
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+RTCD_EXTERN void (*vp8_dequantize_b)(struct blockd*, short *dqc);
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+RTCD_EXTERN void (*vp8_dequant_idct_add)(short *input, short *dq, unsigned char *output, int stride);
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_y_block)(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+RTCD_EXTERN void (*vp8_dequant_idct_add_uv_block)(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bv)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_mbh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+RTCD_EXTERN void (*vp8_loop_filter_bh)(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_mbh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bv)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+RTCD_EXTERN void (*vp8_loop_filter_simple_bh)(unsigned char *y, int ystride, const unsigned char *blimit);
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_short_idct4x4llm)(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+RTCD_EXTERN void (*vp8_short_inv_walsh4x4)(short *input, short *output);
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+RTCD_EXTERN void (*vp8_dc_only_idct_add)(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem16x16)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x8)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_copy_mem8x4)(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_c(unsigned char *Above, unsigned char *yleft, int left_stride, B_PREDICTION_MODE b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_down)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+RTCD_EXTERN void (*vp8_mbpost_proc_across_ip)(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+RTCD_EXTERN void (*vp8_post_proc_down_and_across)(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+RTCD_EXTERN void (*vp8_plane_add_noise)(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight16x16)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+RTCD_EXTERN void (*vp8_filter_by_weight8x8)(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance4x4)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance8x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance4x4)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance8x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_h)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_v)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_variance_halfpixvar16x16_hv)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad4x4)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad8x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int ref_stride, unsigned int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int src_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int src_stride, const unsigned char * const ref_ptr[], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+RTCD_EXTERN unsigned int (*vp8_get_mb_ss)(const short *);
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_mse16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_mse16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+RTCD_EXTERN unsigned int (*vp8_get4x4sse_cs)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct4x4)(short *input, short *output, int pitch);
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_fdct8x4)(short *input, short *output, int pitch);
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+RTCD_EXTERN void (*vp8_short_walsh4x4)(short *input, short *output, int pitch);
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+RTCD_EXTERN int (*vp8_block_error)(short *coeff, short *dqcoeff);
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+RTCD_EXTERN int (*vp8_mbblock_error)(struct macroblock *mb, int dc);
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+RTCD_EXTERN int (*vp8_mbuverror)(struct macroblock *mb);
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+RTCD_EXTERN void (*vp8_subtract_b)(struct block *be, struct blockd *bd, int pitch);
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mby)(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+RTCD_EXTERN void (*vp8_subtract_mbuv)(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+RTCD_EXTERN int (*vp8_denoiser_filter)(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+static void setup_rtcd_internal(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+    vp8_dequantize_b = vp8_dequantize_b_c;
+    if (flags & HAS_MMX) vp8_dequantize_b = vp8_dequantize_b_mmx;
+
+    vp8_dequant_idct_add = vp8_dequant_idct_add_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add = vp8_dequant_idct_add_mmx;
+
+    vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_y_block = vp8_dequant_idct_add_y_block_sse2;
+
+    vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_c;
+    if (flags & HAS_MMX) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_mmx;
+    if (flags & HAS_SSE2) vp8_dequant_idct_add_uv_block = vp8_dequant_idct_add_uv_block_sse2;
+
+    vp8_loop_filter_mbv = vp8_loop_filter_mbv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbv = vp8_loop_filter_mbv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbv = vp8_loop_filter_mbv_sse2;
+
+    vp8_loop_filter_bv = vp8_loop_filter_bv_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bv = vp8_loop_filter_bv_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bv = vp8_loop_filter_bv_sse2;
+
+    vp8_loop_filter_mbh = vp8_loop_filter_mbh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_mbh = vp8_loop_filter_mbh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_mbh = vp8_loop_filter_mbh_sse2;
+
+    vp8_loop_filter_bh = vp8_loop_filter_bh_c;
+    if (flags & HAS_MMX) vp8_loop_filter_bh = vp8_loop_filter_bh_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_bh = vp8_loop_filter_bh_sse2;
+
+    vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbv = vp8_loop_filter_simple_vertical_edge_sse2;
+
+    vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_mbh = vp8_loop_filter_simple_horizontal_edge_sse2;
+
+    vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bv = vp8_loop_filter_bvs_sse2;
+
+    vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_c;
+    if (flags & HAS_MMX) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_mmx;
+    if (flags & HAS_SSE2) vp8_loop_filter_simple_bh = vp8_loop_filter_bhs_sse2;
+
+    vp8_short_idct4x4llm = vp8_short_idct4x4llm_c;
+    if (flags & HAS_MMX) vp8_short_idct4x4llm = vp8_short_idct4x4llm_mmx;
+
+
+    vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_c;
+    if (flags & HAS_MMX) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_inv_walsh4x4 = vp8_short_inv_walsh4x4_sse2;
+
+    vp8_dc_only_idct_add = vp8_dc_only_idct_add_c;
+    if (flags & HAS_MMX) vp8_dc_only_idct_add = vp8_dc_only_idct_add_mmx;
+
+    vp8_copy_mem16x16 = vp8_copy_mem16x16_c;
+    if (flags & HAS_MMX) vp8_copy_mem16x16 = vp8_copy_mem16x16_mmx;
+    if (flags & HAS_SSE2) vp8_copy_mem16x16 = vp8_copy_mem16x16_sse2;
+
+    vp8_copy_mem8x8 = vp8_copy_mem8x8_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x8 = vp8_copy_mem8x8_mmx;
+
+    vp8_copy_mem8x4 = vp8_copy_mem8x4_c;
+    if (flags & HAS_MMX) vp8_copy_mem8x4 = vp8_copy_mem8x4_mmx;
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_c;
+    if (flags & HAS_SSE2) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+    vp8_mbpost_proc_down = vp8_mbpost_proc_down_c;
+    if (flags & HAS_MMX) vp8_mbpost_proc_down = vp8_mbpost_proc_down_mmx;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_down = vp8_mbpost_proc_down_xmm;
+
+    vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_c;
+    if (flags & HAS_SSE2) vp8_mbpost_proc_across_ip = vp8_mbpost_proc_across_ip_xmm;
+
+    vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_c;
+    if (flags & HAS_MMX) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_mmx;
+    if (flags & HAS_SSE2) vp8_post_proc_down_and_across = vp8_post_proc_down_and_across_xmm;
+
+    vp8_plane_add_noise = vp8_plane_add_noise_c;
+    if (flags & HAS_MMX) vp8_plane_add_noise = vp8_plane_add_noise_mmx;
+    if (flags & HAS_SSE2) vp8_plane_add_noise = vp8_plane_add_noise_wmt;
+
+
+
+
+    vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight16x16 = vp8_filter_by_weight16x16_sse2;
+
+    vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_c;
+    if (flags & HAS_SSE2) vp8_filter_by_weight8x8 = vp8_filter_by_weight8x8_sse2;
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_mmx;
+    if (flags & HAS_SSE2) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_c;
+    if (flags & HAS_MMX) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_mmx;
+    if (flags & HAS_SSE2) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+    vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict8x4 = vp8_bilinear_predict8x4_mmx;
+
+    vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_c;
+    if (flags & HAS_MMX) vp8_bilinear_predict4x4 = vp8_bilinear_predict4x4_mmx;
+
+    vp8_variance4x4 = vp8_variance4x4_c;
+    if (flags & HAS_MMX) vp8_variance4x4 = vp8_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_variance4x4 = vp8_variance4x4_wmt;
+
+    vp8_variance8x8 = vp8_variance8x8_c;
+    if (flags & HAS_MMX) vp8_variance8x8 = vp8_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x8 = vp8_variance8x8_wmt;
+
+    vp8_variance8x16 = vp8_variance8x16_c;
+    if (flags & HAS_MMX) vp8_variance8x16 = vp8_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance8x16 = vp8_variance8x16_wmt;
+
+    vp8_variance16x8 = vp8_variance16x8_c;
+    if (flags & HAS_MMX) vp8_variance16x8 = vp8_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x8 = vp8_variance16x8_wmt;
+
+    vp8_variance16x16 = vp8_variance16x16_c;
+    if (flags & HAS_MMX) vp8_variance16x16 = vp8_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_variance16x16 = vp8_variance16x16_wmt;
+
+    vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance4x4 = vp8_sub_pixel_variance4x4_wmt;
+
+    vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x8 = vp8_sub_pixel_variance8x8_wmt;
+
+    vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance8x16 = vp8_sub_pixel_variance8x16_wmt;
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+    vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_h = vp8_variance_halfpixvar16x16_h_wmt;
+
+    vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_v = vp8_variance_halfpixvar16x16_v_wmt;
+
+    vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_c;
+    if (flags & HAS_MMX) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_mmx;
+    if (flags & HAS_SSE2) vp8_variance_halfpixvar16x16_hv = vp8_variance_halfpixvar16x16_hv_wmt;
+
+    vp8_sad4x4 = vp8_sad4x4_c;
+    if (flags & HAS_MMX) vp8_sad4x4 = vp8_sad4x4_mmx;
+    if (flags & HAS_SSE2) vp8_sad4x4 = vp8_sad4x4_wmt;
+
+    vp8_sad8x8 = vp8_sad8x8_c;
+    if (flags & HAS_MMX) vp8_sad8x8 = vp8_sad8x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x8 = vp8_sad8x8_wmt;
+
+    vp8_sad8x16 = vp8_sad8x16_c;
+    if (flags & HAS_MMX) vp8_sad8x16 = vp8_sad8x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad8x16 = vp8_sad8x16_wmt;
+
+    vp8_sad16x8 = vp8_sad16x8_c;
+    if (flags & HAS_MMX) vp8_sad16x8 = vp8_sad16x8_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x8 = vp8_sad16x8_wmt;
+
+    vp8_sad16x16 = vp8_sad16x16_c;
+    if (flags & HAS_MMX) vp8_sad16x16 = vp8_sad16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+    vp8_get_mb_ss = vp8_get_mb_ss_c;
+    if (flags & HAS_MMX) vp8_get_mb_ss = vp8_get_mb_ss_mmx;
+    if (flags & HAS_SSE2) vp8_get_mb_ss = vp8_get_mb_ss_sse2;
+
+    vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_c;
+    if (flags & HAS_MMX) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_sub_pixel_mse16x16 = vp8_sub_pixel_mse16x16_wmt;
+
+    vp8_mse16x16 = vp8_mse16x16_c;
+    if (flags & HAS_MMX) vp8_mse16x16 = vp8_mse16x16_mmx;
+    if (flags & HAS_SSE2) vp8_mse16x16 = vp8_mse16x16_wmt;
+
+    vp8_get4x4sse_cs = vp8_get4x4sse_cs_c;
+    if (flags & HAS_MMX) vp8_get4x4sse_cs = vp8_get4x4sse_cs_mmx;
+
+    vp8_copy32xn = vp8_copy32xn_c;
+    if (flags & HAS_SSE2) vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+    vp8_short_fdct4x4 = vp8_short_fdct4x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct4x4 = vp8_short_fdct4x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct4x4 = vp8_short_fdct4x4_sse2;
+
+    vp8_short_fdct8x4 = vp8_short_fdct8x4_c;
+    if (flags & HAS_MMX) vp8_short_fdct8x4 = vp8_short_fdct8x4_mmx;
+    if (flags & HAS_SSE2) vp8_short_fdct8x4 = vp8_short_fdct8x4_sse2;
+
+    vp8_short_walsh4x4 = vp8_short_walsh4x4_c;
+    if (flags & HAS_SSE2) vp8_short_walsh4x4 = vp8_short_walsh4x4_sse2;
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_c;
+    if (flags & HAS_SSE2) vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+    vp8_block_error = vp8_block_error_c;
+    if (flags & HAS_MMX) vp8_block_error = vp8_block_error_mmx;
+    if (flags & HAS_SSE2) vp8_block_error = vp8_block_error_xmm;
+
+    vp8_mbblock_error = vp8_mbblock_error_c;
+    if (flags & HAS_MMX) vp8_mbblock_error = vp8_mbblock_error_mmx;
+    if (flags & HAS_SSE2) vp8_mbblock_error = vp8_mbblock_error_xmm;
+
+    vp8_mbuverror = vp8_mbuverror_c;
+    if (flags & HAS_MMX) vp8_mbuverror = vp8_mbuverror_mmx;
+    if (flags & HAS_SSE2) vp8_mbuverror = vp8_mbuverror_xmm;
+
+    vp8_subtract_b = vp8_subtract_b_c;
+    if (flags & HAS_MMX) vp8_subtract_b = vp8_subtract_b_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_b = vp8_subtract_b_sse2;
+
+    vp8_subtract_mby = vp8_subtract_mby_c;
+    if (flags & HAS_MMX) vp8_subtract_mby = vp8_subtract_mby_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mby = vp8_subtract_mby_sse2;
+
+    vp8_subtract_mbuv = vp8_subtract_mbuv_c;
+    if (flags & HAS_MMX) vp8_subtract_mbuv = vp8_subtract_mbuv_mmx;
+    if (flags & HAS_SSE2) vp8_subtract_mbuv = vp8_subtract_mbuv_sse2;
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+
+
+    vp8_denoiser_filter = vp8_denoiser_filter_c;
+    if (flags & HAS_SSE2) vp8_denoiser_filter = vp8_denoiser_filter_sse2;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/win/ia32/vpx_version.h b/third_party/libvpx/source/config/win/ia32/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/win/ia32/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/source/config/win/x64/vpx_config.asm b/third_party/libvpx/source/config/win/x64/vpx_config.asm
new file mode 100644
index 0000000..df6f68b
--- /dev/null
+++ b/third_party/libvpx/source/config/win/x64/vpx_config.asm
@@ -0,0 +1,67 @@
+ARCH_ARM equ 0
+ARCH_MIPS equ 0
+ARCH_X86 equ 0
+ARCH_X86_64 equ 1
+ARCH_PPC32 equ 0
+ARCH_PPC64 equ 0
+HAVE_EDSP equ 0
+HAVE_MEDIA equ 0
+HAVE_NEON equ 0
+HAVE_MIPS32 equ 0
+HAVE_MMX equ 1
+HAVE_SSE equ 1
+HAVE_SSE2 equ 1
+HAVE_SSE3 equ 1
+HAVE_SSSE3 equ 1
+HAVE_SSE4_1 equ 1
+HAVE_ALTIVEC equ 0
+HAVE_VPX_PORTS equ 1
+HAVE_STDINT_H equ 0
+HAVE_ALT_TREE_LAYOUT equ 0
+HAVE_PTHREAD_H equ 0
+HAVE_SYS_MMAN_H equ 0
+HAVE_UNISTD_H equ 0
+CONFIG_EXTERNAL_BUILD equ 1
+CONFIG_INSTALL_DOCS equ 0
+CONFIG_INSTALL_BINS equ 0
+CONFIG_INSTALL_LIBS equ 0
+CONFIG_INSTALL_SRCS equ 0
+CONFIG_DEBUG equ 0
+CONFIG_GPROF equ 0
+CONFIG_GCOV equ 0
+CONFIG_RVCT equ 0
+CONFIG_GCC equ 0
+CONFIG_MSVS equ 1
+CONFIG_PIC equ 1
+CONFIG_BIG_ENDIAN equ 0
+CONFIG_CODEC_SRCS equ 0
+CONFIG_DEBUG_LIBS equ 0
+CONFIG_FAST_UNALIGNED equ 1
+CONFIG_MEM_MANAGER equ 0
+CONFIG_MEM_TRACKER equ 0
+CONFIG_MEM_CHECKS equ 0
+CONFIG_MD5 equ 1
+CONFIG_DEQUANT_TOKENS equ 0
+CONFIG_DC_RECON equ 0
+CONFIG_RUNTIME_CPU_DETECT equ 1
+CONFIG_POSTPROC equ 1
+CONFIG_MULTITHREAD equ 1
+CONFIG_INTERNAL_STATS equ 0
+CONFIG_VP8_ENCODER equ 1
+CONFIG_VP8_DECODER equ 1
+CONFIG_VP8 equ 1
+CONFIG_ENCODERS equ 1
+CONFIG_DECODERS equ 1
+CONFIG_STATIC_MSVCRT equ 1
+CONFIG_SPATIAL_RESAMPLING equ 1
+CONFIG_REALTIME_ONLY equ 1
+CONFIG_ONTHEFLY_BITPACKING equ 0
+CONFIG_ERROR_CONCEALMENT equ 1
+CONFIG_SHARED equ 0
+CONFIG_STATIC equ 1
+CONFIG_SMALL equ 0
+CONFIG_POSTPROC_VISUALIZER equ 0
+CONFIG_OS_SUPPORT equ 1
+CONFIG_UNIT_TESTS equ 0
+CONFIG_MULTI_RES_ENCODING equ 1
+CONFIG_TEMPORAL_DENOISING equ 1
diff --git a/third_party/libvpx/source/config/win/x64/vpx_config.c b/third_party/libvpx/source/config/win/x64/vpx_config.c
new file mode 100644
index 0000000..83b5049
--- /dev/null
+++ b/third_party/libvpx/source/config/win/x64/vpx_config.c
@@ -0,0 +1,9 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+static const char* const cfg = "--target=x86_64-win64-vs9 --enable-pic --enable-error-concealment --disable-install-docs --disable-install-srcs --disable-examples --disable-internal-stats --disable-install-libs --disable-install-bins --enable-realtime-only --enable-static-msvcrt --as=yasm --enable-multi-res-encoding";
+const char *vpx_codec_build_config(void) {return cfg;}
diff --git a/third_party/libvpx/source/config/win/x64/vpx_config.h b/third_party/libvpx/source/config/win/x64/vpx_config.h
new file mode 100644
index 0000000..f8f03d8
--- /dev/null
+++ b/third_party/libvpx/source/config/win/x64/vpx_config.h
@@ -0,0 +1,79 @@
+/* Copyright (c) 2011 The WebM project authors. All Rights Reserved. */
+/*  */
+/* Use of this source code is governed by a BSD-style license */
+/* that can be found in the LICENSE file in the root of the source */
+/* tree. An additional intellectual property rights grant can be found */
+/* in the file PATENTS.  All contributing project authors may */
+/* be found in the AUTHORS file in the root of the source tree. */
+/* This file automatically generated by configure. Do not edit! */
+#ifndef VPX_CONFIG_H
+#define VPX_CONFIG_H
+#define RESTRICT    
+#define ARCH_ARM 0
+#define ARCH_MIPS 0
+#define ARCH_X86 0
+#define ARCH_X86_64 1
+#define ARCH_PPC32 0
+#define ARCH_PPC64 0
+#define HAVE_EDSP 0
+#define HAVE_MEDIA 0
+#define HAVE_NEON 0
+#define HAVE_MIPS32 0
+#define HAVE_MMX 1
+#define HAVE_SSE 1
+#define HAVE_SSE2 1
+#define HAVE_SSE3 1
+#define HAVE_SSSE3 1
+#define HAVE_SSE4_1 1
+#define HAVE_ALTIVEC 0
+#define HAVE_VPX_PORTS 1
+#define HAVE_STDINT_H 0
+#define HAVE_ALT_TREE_LAYOUT 0
+#define HAVE_PTHREAD_H 0
+#define HAVE_SYS_MMAN_H 0
+#define HAVE_UNISTD_H 0
+#define CONFIG_EXTERNAL_BUILD 1
+#define CONFIG_INSTALL_DOCS 0
+#define CONFIG_INSTALL_BINS 0
+#define CONFIG_INSTALL_LIBS 0
+#define CONFIG_INSTALL_SRCS 0
+#define CONFIG_DEBUG 0
+#define CONFIG_GPROF 0
+#define CONFIG_GCOV 0
+#define CONFIG_RVCT 0
+#define CONFIG_GCC 0
+#define CONFIG_MSVS 1
+#define CONFIG_PIC 1
+#define CONFIG_BIG_ENDIAN 0
+#define CONFIG_CODEC_SRCS 0
+#define CONFIG_DEBUG_LIBS 0
+#define CONFIG_FAST_UNALIGNED 1
+#define CONFIG_MEM_MANAGER 0
+#define CONFIG_MEM_TRACKER 0
+#define CONFIG_MEM_CHECKS 0
+#define CONFIG_MD5 1
+#define CONFIG_DEQUANT_TOKENS 0
+#define CONFIG_DC_RECON 0
+#define CONFIG_RUNTIME_CPU_DETECT 1
+#define CONFIG_POSTPROC 1
+#define CONFIG_MULTITHREAD 1
+#define CONFIG_INTERNAL_STATS 0
+#define CONFIG_VP8_ENCODER 1
+#define CONFIG_VP8_DECODER 1
+#define CONFIG_VP8 1
+#define CONFIG_ENCODERS 1
+#define CONFIG_DECODERS 1
+#define CONFIG_STATIC_MSVCRT 1
+#define CONFIG_SPATIAL_RESAMPLING 1
+#define CONFIG_REALTIME_ONLY 1
+#define CONFIG_ONTHEFLY_BITPACKING 0
+#define CONFIG_ERROR_CONCEALMENT 1
+#define CONFIG_SHARED 0
+#define CONFIG_STATIC 1
+#define CONFIG_SMALL 0
+#define CONFIG_POSTPROC_VISUALIZER 0
+#define CONFIG_OS_SUPPORT 1
+#define CONFIG_UNIT_TESTS 0
+#define CONFIG_MULTI_RES_ENCODING 1
+#define CONFIG_TEMPORAL_DENOISING 1
+#endif /* VPX_CONFIG_H */
diff --git a/third_party/libvpx/source/config/win/x64/vpx_rtcd.h b/third_party/libvpx/source/config/win/x64/vpx_rtcd.h
new file mode 100755
index 0000000..3144b87
--- /dev/null
+++ b/third_party/libvpx/source/config/win/x64/vpx_rtcd.h
@@ -0,0 +1,724 @@
+#ifndef VPX_RTCD_
+#define VPX_RTCD_
+
+#ifdef RTCD_C
+#define RTCD_EXTERN
+#else
+#define RTCD_EXTERN extern
+#endif
+
+struct blockd;
+struct macroblockd;
+struct loop_filter_info;
+
+/* Encoder forward decls */
+struct block;
+struct macroblock;
+struct variance_vtable;
+union int_mv;
+struct yv12_buffer_config;
+
+void vp8_dequantize_b_c(struct blockd*, short *dqc);
+void vp8_dequantize_b_mmx(struct blockd*, short *dqc);
+#define vp8_dequantize_b vp8_dequantize_b_mmx
+
+void vp8_dequant_idct_add_c(short *input, short *dq, unsigned char *output, int stride);
+void vp8_dequant_idct_add_mmx(short *input, short *dq, unsigned char *output, int stride);
+#define vp8_dequant_idct_add vp8_dequant_idct_add_mmx
+
+void vp8_dequant_idct_add_y_block_c(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_mmx(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+void vp8_dequant_idct_add_y_block_sse2(short *q, short *dq, unsigned char *dst, int stride, char *eobs);
+#define vp8_dequant_idct_add_y_block vp8_dequant_idct_add_y_block_sse2
+
+void vp8_dequant_idct_add_uv_block_c(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_mmx(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+void vp8_dequant_idct_add_uv_block_sse2(short *q, short *dq, unsigned char *dst_u, unsigned char *dst_v, int stride, char *eobs);
+#define vp8_dequant_idct_add_uv_block vp8_dequant_idct_add_uv_block_sse2
+
+void vp8_loop_filter_mbv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbv vp8_loop_filter_mbv_sse2
+
+void vp8_loop_filter_bv_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bv_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bv vp8_loop_filter_bv_sse2
+
+void vp8_loop_filter_mbh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_mbh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_mbh vp8_loop_filter_mbh_sse2
+
+void vp8_loop_filter_bh_c(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_mmx(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+void vp8_loop_filter_bh_sse2(unsigned char *y, unsigned char *u, unsigned char *v, int ystride, int uv_stride, struct loop_filter_info *lfi);
+#define vp8_loop_filter_bh vp8_loop_filter_bh_sse2
+
+void vp8_loop_filter_simple_vertical_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_vertical_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbv vp8_loop_filter_simple_vertical_edge_sse2
+
+void vp8_loop_filter_simple_horizontal_edge_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_simple_horizontal_edge_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_mbh vp8_loop_filter_simple_horizontal_edge_sse2
+
+void vp8_loop_filter_bvs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bvs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bv vp8_loop_filter_bvs_sse2
+
+void vp8_loop_filter_bhs_c(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_mmx(unsigned char *y, int ystride, const unsigned char *blimit);
+void vp8_loop_filter_bhs_sse2(unsigned char *y, int ystride, const unsigned char *blimit);
+#define vp8_loop_filter_simple_bh vp8_loop_filter_bhs_sse2
+
+void vp8_short_idct4x4llm_c(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+void vp8_short_idct4x4llm_mmx(short *input, unsigned char *pred, int pitch, unsigned char *dst, int dst_stride);
+#define vp8_short_idct4x4llm vp8_short_idct4x4llm_mmx
+
+void vp8_short_inv_walsh4x4_1_c(short *input, short *output);
+#define vp8_short_inv_walsh4x4_1 vp8_short_inv_walsh4x4_1_c
+
+void vp8_short_inv_walsh4x4_c(short *input, short *output);
+void vp8_short_inv_walsh4x4_mmx(short *input, short *output);
+void vp8_short_inv_walsh4x4_sse2(short *input, short *output);
+#define vp8_short_inv_walsh4x4 vp8_short_inv_walsh4x4_sse2
+
+void vp8_dc_only_idct_add_c(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+void vp8_dc_only_idct_add_mmx(short input, unsigned char *pred, int pred_stride, unsigned char *dst, int dst_stride);
+#define vp8_dc_only_idct_add vp8_dc_only_idct_add_mmx
+
+void vp8_copy_mem16x16_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem16x16_sse2(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem16x16 vp8_copy_mem16x16_sse2
+
+void vp8_copy_mem8x8_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x8_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x8 vp8_copy_mem8x8_mmx
+
+void vp8_copy_mem8x4_c(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+void vp8_copy_mem8x4_mmx(unsigned char *src, int src_pitch, unsigned char *dst, int dst_pitch);
+#define vp8_copy_mem8x4 vp8_copy_mem8x4_mmx
+
+void vp8_build_intra_predictors_mby_s_c(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_sse2(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+void vp8_build_intra_predictors_mby_s_ssse3(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mby_s)(struct macroblockd *x, unsigned char * yabove_row, unsigned char * yleft, int left_stride, unsigned char * ypred_ptr, int y_stride);
+
+void vp8_build_intra_predictors_mbuv_s_c(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_sse2(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+void vp8_build_intra_predictors_mbuv_s_ssse3(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+RTCD_EXTERN void (*vp8_build_intra_predictors_mbuv_s)(struct macroblockd *x, unsigned char * uabove_row, unsigned char * vabove_row,  unsigned char *uleft, unsigned char *vleft, int left_stride, unsigned char * upred_ptr, unsigned char * vpred_ptr, int pred_stride);
+
+void vp8_intra4x4_predict_d_c(unsigned char *above, unsigned char *left, int left_stride, int b_mode, unsigned char *dst, int dst_stride, unsigned char top_left);
+#define vp8_intra4x4_predict_d vp8_intra4x4_predict_d_c
+
+void vp8_intra4x4_predict_c(unsigned char *src, int src_stride, int b_mode, unsigned char *dst, int dst_stride);
+#define vp8_intra4x4_predict vp8_intra4x4_predict_c
+
+void vp8_mbpost_proc_down_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_mmx(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_down_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_down vp8_mbpost_proc_down_xmm
+
+void vp8_mbpost_proc_across_ip_c(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+void vp8_mbpost_proc_across_ip_xmm(unsigned char *dst, int pitch, int rows, int cols,int flimit);
+#define vp8_mbpost_proc_across_ip vp8_mbpost_proc_across_ip_xmm
+
+void vp8_post_proc_down_and_across_c(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_mmx(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+void vp8_post_proc_down_and_across_xmm(unsigned char *src, unsigned char *dst, int src_pitch, int dst_pitch, int rows, int cols, int flimit);
+#define vp8_post_proc_down_and_across vp8_post_proc_down_and_across_xmm
+
+void vp8_plane_add_noise_c(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_mmx(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+void vp8_plane_add_noise_wmt(unsigned char *s, char *noise, char blackclamp[16], char whiteclamp[16], char bothclamp[16], unsigned int w, unsigned int h, int pitch);
+#define vp8_plane_add_noise vp8_plane_add_noise_wmt
+
+void vp8_blend_mb_inner_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_inner vp8_blend_mb_inner_c
+
+void vp8_blend_mb_outer_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_mb_outer vp8_blend_mb_outer_c
+
+void vp8_blend_b_c(unsigned char *y, unsigned char *u, unsigned char *v, int y1, int u1, int v1, int alpha, int stride);
+#define vp8_blend_b vp8_blend_b_c
+
+void vp8_filter_by_weight16x16_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight16x16_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight16x16 vp8_filter_by_weight16x16_sse2
+
+void vp8_filter_by_weight8x8_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+void vp8_filter_by_weight8x8_sse2(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight8x8 vp8_filter_by_weight8x8_sse2
+
+void vp8_filter_by_weight4x4_c(unsigned char *src, int src_stride, unsigned char *dst, int dst_stride, int src_weight);
+#define vp8_filter_by_weight4x4 vp8_filter_by_weight4x4_c
+
+void vp8_sixtap_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict8x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict8x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_sixtap_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_sixtap_predict4x4_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_sixtap_predict4x4)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict16x16_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict16x16_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict16x16)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x8_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_sse2(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x8_ssse3(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+RTCD_EXTERN void (*vp8_bilinear_predict8x8)(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+
+void vp8_bilinear_predict8x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict8x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict8x4 vp8_bilinear_predict8x4_mmx
+
+void vp8_bilinear_predict4x4_c(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+void vp8_bilinear_predict4x4_mmx(unsigned char *src, int src_pitch, int xofst, int yofst, unsigned char *dst, int dst_pitch);
+#define vp8_bilinear_predict4x4 vp8_bilinear_predict4x4_mmx
+
+unsigned int vp8_variance4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance4x4 vp8_variance4x4_wmt
+
+unsigned int vp8_variance8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x8 vp8_variance8x8_wmt
+
+unsigned int vp8_variance8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance8x16 vp8_variance8x16_wmt
+
+unsigned int vp8_variance16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x8 vp8_variance16x8_wmt
+
+unsigned int vp8_variance16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance16x16 vp8_variance16x16_wmt
+
+unsigned int vp8_sub_pixel_variance4x4_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance4x4_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance4x4 vp8_sub_pixel_variance4x4_wmt
+
+unsigned int vp8_sub_pixel_variance8x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x8 vp8_sub_pixel_variance8x8_wmt
+
+unsigned int vp8_sub_pixel_variance8x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance8x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_variance8x16 vp8_sub_pixel_variance8x16_wmt
+
+unsigned int vp8_sub_pixel_variance16x8_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x8_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x8)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_sub_pixel_variance16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_variance16x16_ssse3(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+RTCD_EXTERN unsigned int (*vp8_sub_pixel_variance16x16)(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+
+unsigned int vp8_variance_halfpixvar16x16_h_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_h_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_h vp8_variance_halfpixvar16x16_h_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_v_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_v_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_v vp8_variance_halfpixvar16x16_v_wmt
+
+unsigned int vp8_variance_halfpixvar16x16_hv_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_variance_halfpixvar16x16_hv_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_variance_halfpixvar16x16_hv vp8_variance_halfpixvar16x16_hv_wmt
+
+unsigned int vp8_sad4x4_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad4x4_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad4x4_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad4x4 vp8_sad4x4_wmt
+
+unsigned int vp8_sad8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad8x8 vp8_sad8x8_wmt
+
+unsigned int vp8_sad8x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad8x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad8x16 vp8_sad8x16_wmt
+
+unsigned int vp8_sad16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x8_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x8_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+#define vp8_sad16x8 vp8_sad16x8_wmt
+
+unsigned int vp8_sad16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+unsigned int vp8_sad16x16_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+RTCD_EXTERN unsigned int (*vp8_sad16x16)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int max_sad);
+
+void vp8_sad4x4x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x3_ssse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x3_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x3_ssse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x3)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad4x4x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad4x4x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x8x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad8x16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad8x16x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x8x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x8x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad16x16x8_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+void vp8_sad16x16x8_sse4(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x8)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned short *sad_array);
+
+void vp8_sad4x4x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad4x4x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad4x4x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x8x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x8x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x8x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad8x16x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad8x16x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad8x16x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x8x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x8x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x8x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+void vp8_sad16x16x4d_c(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+void vp8_sad16x16x4d_sse3(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+RTCD_EXTERN void (*vp8_sad16x16x4d)(const unsigned char *src_ptr, int source_stride, unsigned char *ref_ptr[4], int  ref_stride, unsigned int *sad_array);
+
+unsigned int vp8_get_mb_ss_c(const short *);
+unsigned int vp8_get_mb_ss_mmx(const short *);
+unsigned int vp8_get_mb_ss_sse2(const short *);
+#define vp8_get_mb_ss vp8_get_mb_ss_sse2
+
+unsigned int vp8_sub_pixel_mse16x16_c(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_mmx(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+unsigned int vp8_sub_pixel_mse16x16_wmt(const unsigned char  *src_ptr, int  source_stride, int  xoffset, int  yoffset, const unsigned char *ref_ptr, int Refstride, unsigned int *sse);
+#define vp8_sub_pixel_mse16x16 vp8_sub_pixel_mse16x16_wmt
+
+unsigned int vp8_mse16x16_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+unsigned int vp8_mse16x16_wmt(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride, unsigned int *sse);
+#define vp8_mse16x16 vp8_mse16x16_wmt
+
+unsigned int vp8_get4x4sse_cs_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+unsigned int vp8_get4x4sse_cs_mmx(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int  ref_stride);
+#define vp8_get4x4sse_cs vp8_get4x4sse_cs_mmx
+
+void vp8_copy32xn_c(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse2(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+void vp8_copy32xn_sse3(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+RTCD_EXTERN void (*vp8_copy32xn)(const unsigned char *src_ptr, int source_stride, const unsigned char *ref_ptr, int ref_stride, int n);
+
+void vp8_short_fdct4x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct4x4 vp8_short_fdct4x4_sse2
+
+void vp8_short_fdct8x4_c(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_mmx(short *input, short *output, int pitch);
+void vp8_short_fdct8x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_fdct8x4 vp8_short_fdct8x4_sse2
+
+void vp8_short_walsh4x4_c(short *input, short *output, int pitch);
+void vp8_short_walsh4x4_sse2(short *input, short *output, int pitch);
+#define vp8_short_walsh4x4 vp8_short_walsh4x4_sse2
+
+void vp8_regular_quantize_b_c(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_regular_quantize_b_sse4(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_regular_quantize_b)(struct block *, struct blockd *);
+
+void vp8_fast_quantize_b_c(struct block *, struct blockd *);
+void vp8_fast_quantize_b_sse2(struct block *, struct blockd *);
+void vp8_fast_quantize_b_ssse3(struct block *, struct blockd *);
+RTCD_EXTERN void (*vp8_fast_quantize_b)(struct block *, struct blockd *);
+
+void vp8_regular_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_regular_quantize_b_pair vp8_regular_quantize_b_pair_c
+
+void vp8_fast_quantize_b_pair_c(struct block *b1, struct block *b2, struct blockd *d1, struct blockd *d2);
+#define vp8_fast_quantize_b_pair vp8_fast_quantize_b_pair_c
+
+void vp8_quantize_mb_c(struct macroblock *);
+#define vp8_quantize_mb vp8_quantize_mb_c
+
+void vp8_quantize_mby_c(struct macroblock *);
+#define vp8_quantize_mby vp8_quantize_mby_c
+
+void vp8_quantize_mbuv_c(struct macroblock *);
+#define vp8_quantize_mbuv vp8_quantize_mbuv_c
+
+int vp8_block_error_c(short *coeff, short *dqcoeff);
+int vp8_block_error_mmx(short *coeff, short *dqcoeff);
+int vp8_block_error_xmm(short *coeff, short *dqcoeff);
+#define vp8_block_error vp8_block_error_xmm
+
+int vp8_mbblock_error_c(struct macroblock *mb, int dc);
+int vp8_mbblock_error_mmx(struct macroblock *mb, int dc);
+int vp8_mbblock_error_xmm(struct macroblock *mb, int dc);
+#define vp8_mbblock_error vp8_mbblock_error_xmm
+
+int vp8_mbuverror_c(struct macroblock *mb);
+int vp8_mbuverror_mmx(struct macroblock *mb);
+int vp8_mbuverror_xmm(struct macroblock *mb);
+#define vp8_mbuverror vp8_mbuverror_xmm
+
+void vp8_subtract_b_c(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_mmx(struct block *be, struct blockd *bd, int pitch);
+void vp8_subtract_b_sse2(struct block *be, struct blockd *bd, int pitch);
+#define vp8_subtract_b vp8_subtract_b_sse2
+
+void vp8_subtract_mby_c(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_mmx(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+void vp8_subtract_mby_sse2(short *diff, unsigned char *src, int src_stride, unsigned char *pred, int pred_stride);
+#define vp8_subtract_mby vp8_subtract_mby_sse2
+
+void vp8_subtract_mbuv_c(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_mmx(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+void vp8_subtract_mbuv_sse2(short *diff, unsigned char *usrc, unsigned char *vsrc, int src_stride, unsigned char *upred, unsigned char *vpred, int pred_stride);
+#define vp8_subtract_mbuv vp8_subtract_mbuv_sse2
+
+int vp8_full_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx3(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_full_search_sadx8(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_full_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_refining_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_refining_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_refining_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, int sad_per_bit, int distance, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+int vp8_diamond_search_sad_c(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+int vp8_diamond_search_sadx4(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+RTCD_EXTERN int (*vp8_diamond_search_sad)(struct macroblock *x, struct block *b, struct blockd *d, union int_mv *ref_mv, union int_mv *best_mv, int search_param, int sad_per_bit, int *num00, struct variance_vtable *fn_ptr, int *mvcost[2], union int_mv *center_mv);
+
+void vp8_yv12_copy_partial_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_partial_frame vp8_yv12_copy_partial_frame_c
+
+int vp8_denoiser_filter_c(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+int vp8_denoiser_filter_sse2(struct yv12_buffer_config* mc_running_avg, struct yv12_buffer_config* running_avg, struct macroblock* signal, unsigned int motion_magnitude2, int y_offset, int uv_offset);
+#define vp8_denoiser_filter vp8_denoiser_filter_sse2
+
+void vp8_horizontal_line_4_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_4_5_scale vp8_horizontal_line_4_5_scale_c
+
+void vp8_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_4_5_scale vp8_vertical_band_4_5_scale_c
+
+void vp8_last_vertical_band_4_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_4_5_scale vp8_last_vertical_band_4_5_scale_c
+
+void vp8_horizontal_line_2_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_3_scale vp8_horizontal_line_2_3_scale_c
+
+void vp8_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_3_scale vp8_vertical_band_2_3_scale_c
+
+void vp8_last_vertical_band_2_3_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_2_3_scale vp8_last_vertical_band_2_3_scale_c
+
+void vp8_horizontal_line_3_5_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_5_scale vp8_horizontal_line_3_5_scale_c
+
+void vp8_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_5_scale vp8_vertical_band_3_5_scale_c
+
+void vp8_last_vertical_band_3_5_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_5_scale vp8_last_vertical_band_3_5_scale_c
+
+void vp8_horizontal_line_3_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_3_4_scale vp8_horizontal_line_3_4_scale_c
+
+void vp8_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_3_4_scale vp8_vertical_band_3_4_scale_c
+
+void vp8_last_vertical_band_3_4_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_3_4_scale vp8_last_vertical_band_3_4_scale_c
+
+void vp8_horizontal_line_1_2_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_1_2_scale vp8_horizontal_line_1_2_scale_c
+
+void vp8_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_1_2_scale vp8_vertical_band_1_2_scale_c
+
+void vp8_last_vertical_band_1_2_scale_c(unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_last_vertical_band_1_2_scale vp8_last_vertical_band_1_2_scale_c
+
+void vp8_horizontal_line_5_4_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_4_scale vp8_horizontal_line_5_4_scale_c
+
+void vp8_vertical_band_5_4_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_4_scale vp8_vertical_band_5_4_scale_c
+
+void vp8_horizontal_line_5_3_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_5_3_scale vp8_horizontal_line_5_3_scale_c
+
+void vp8_vertical_band_5_3_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_5_3_scale vp8_vertical_band_5_3_scale_c
+
+void vp8_horizontal_line_2_1_scale_c(const unsigned char *source, unsigned int source_width, unsigned char *dest, unsigned int dest_width);
+#define vp8_horizontal_line_2_1_scale vp8_horizontal_line_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale vp8_vertical_band_2_1_scale_c
+
+void vp8_vertical_band_2_1_scale_i_c(unsigned char *source, unsigned int src_pitch, unsigned char *dest, unsigned int dest_pitch, unsigned int dest_width);
+#define vp8_vertical_band_2_1_scale_i vp8_vertical_band_2_1_scale_i_c
+
+void vp8_yv12_extend_frame_borders_c(struct yv12_buffer_config *ybf);
+#define vp8_yv12_extend_frame_borders vp8_yv12_extend_frame_borders_c
+
+void vp8_yv12_copy_frame_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_frame vp8_yv12_copy_frame_c
+
+void vp8_yv12_copy_y_c(struct yv12_buffer_config *src_ybc, struct yv12_buffer_config *dst_ybc);
+#define vp8_yv12_copy_y vp8_yv12_copy_y_c
+void vpx_rtcd(void);
+
+#ifdef RTCD_C
+#include "vpx_ports/x86.h"
+void vpx_rtcd(void)
+{
+    int flags = x86_simd_caps();
+
+    (void)flags;
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mby_s = vp8_build_intra_predictors_mby_s_ssse3;
+
+    vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_sse2;
+    if (flags & HAS_SSSE3) vp8_build_intra_predictors_mbuv_s = vp8_build_intra_predictors_mbuv_s_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict16x16 = vp8_sixtap_predict16x16_ssse3;
+
+    vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x8 = vp8_sixtap_predict8x8_ssse3;
+
+    vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_sse2;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict8x4 = vp8_sixtap_predict8x4_ssse3;
+
+    vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_mmx;
+    if (flags & HAS_SSSE3) vp8_sixtap_predict4x4 = vp8_sixtap_predict4x4_ssse3;
+
+    vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict16x16 = vp8_bilinear_predict16x16_ssse3;
+
+    vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_sse2;
+    if (flags & HAS_SSSE3) vp8_bilinear_predict8x8 = vp8_bilinear_predict8x8_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+    vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x8 = vp8_sub_pixel_variance16x8_ssse3;
+
+    vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_wmt;
+    if (flags & HAS_SSSE3) vp8_sub_pixel_variance16x16 = vp8_sub_pixel_variance16x16_ssse3;
+
+
+
+
+
+
+
+
+    vp8_sad16x16 = vp8_sad16x16_wmt;
+    if (flags & HAS_SSE3) vp8_sad16x16 = vp8_sad16x16_sse3;
+
+    vp8_sad4x4x3 = vp8_sad4x4x3_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x3 = vp8_sad4x4x3_sse3;
+
+    vp8_sad8x8x3 = vp8_sad8x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x3 = vp8_sad8x8x3_sse3;
+
+    vp8_sad8x16x3 = vp8_sad8x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x3 = vp8_sad8x16x3_sse3;
+
+    vp8_sad16x8x3 = vp8_sad16x8x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x3 = vp8_sad16x8x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x8x3 = vp8_sad16x8x3_ssse3;
+
+    vp8_sad16x16x3 = vp8_sad16x16x3_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x3 = vp8_sad16x16x3_sse3;
+    if (flags & HAS_SSSE3) vp8_sad16x16x3 = vp8_sad16x16x3_ssse3;
+
+    vp8_sad4x4x8 = vp8_sad4x4x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad4x4x8 = vp8_sad4x4x8_sse4;
+
+    vp8_sad8x8x8 = vp8_sad8x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x8x8 = vp8_sad8x8x8_sse4;
+
+    vp8_sad8x16x8 = vp8_sad8x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad8x16x8 = vp8_sad8x16x8_sse4;
+
+    vp8_sad16x8x8 = vp8_sad16x8x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x8x8 = vp8_sad16x8x8_sse4;
+
+    vp8_sad16x16x8 = vp8_sad16x16x8_c;
+    if (flags & HAS_SSE4_1) vp8_sad16x16x8 = vp8_sad16x16x8_sse4;
+
+    vp8_sad4x4x4d = vp8_sad4x4x4d_c;
+    if (flags & HAS_SSE3) vp8_sad4x4x4d = vp8_sad4x4x4d_sse3;
+
+    vp8_sad8x8x4d = vp8_sad8x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x8x4d = vp8_sad8x8x4d_sse3;
+
+    vp8_sad8x16x4d = vp8_sad8x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad8x16x4d = vp8_sad8x16x4d_sse3;
+
+    vp8_sad16x8x4d = vp8_sad16x8x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x8x4d = vp8_sad16x8x4d_sse3;
+
+    vp8_sad16x16x4d = vp8_sad16x16x4d_c;
+    if (flags & HAS_SSE3) vp8_sad16x16x4d = vp8_sad16x16x4d_sse3;
+
+
+
+
+
+    vp8_copy32xn = vp8_copy32xn_sse2;
+    if (flags & HAS_SSE3) vp8_copy32xn = vp8_copy32xn_sse3;
+
+
+
+
+    vp8_regular_quantize_b = vp8_regular_quantize_b_sse2;
+    if (flags & HAS_SSE4_1) vp8_regular_quantize_b = vp8_regular_quantize_b_sse4;
+
+    vp8_fast_quantize_b = vp8_fast_quantize_b_sse2;
+    if (flags & HAS_SSSE3) vp8_fast_quantize_b = vp8_fast_quantize_b_ssse3;
+
+
+
+
+
+
+
+
+
+
+
+
+    vp8_full_search_sad = vp8_full_search_sad_c;
+    if (flags & HAS_SSE3) vp8_full_search_sad = vp8_full_search_sadx3;
+    if (flags & HAS_SSE4_1) vp8_full_search_sad = vp8_full_search_sadx8;
+
+    vp8_refining_search_sad = vp8_refining_search_sad_c;
+    if (flags & HAS_SSE3) vp8_refining_search_sad = vp8_refining_search_sadx4;
+
+    vp8_diamond_search_sad = vp8_diamond_search_sad_c;
+    if (flags & HAS_SSE3) vp8_diamond_search_sad = vp8_diamond_search_sadx4;
+}
+#endif
+#endif
diff --git a/third_party/libvpx/source/config/win/x64/vpx_version.h b/third_party/libvpx/source/config/win/x64/vpx_version.h
new file mode 100644
index 0000000..f6b740c
--- /dev/null
+++ b/third_party/libvpx/source/config/win/x64/vpx_version.h
@@ -0,0 +1,7 @@
+#define VERSION_MAJOR  1
+#define VERSION_MINOR  1
+#define VERSION_PATCH  0
+#define VERSION_EXTRA  "6-gdd6134b"
+#define VERSION_PACKED ((VERSION_MAJOR<<16)|(VERSION_MINOR<<8)|(VERSION_PATCH))
+#define VERSION_STRING_NOSP "v1.1.0-6-gdd6134b"
+#define VERSION_STRING      " v1.1.0-6-gdd6134b"
diff --git a/third_party/libvpx/unpack_lib_posix.sh b/third_party/libvpx/unpack_lib_posix.sh
new file mode 100755
index 0000000..5a55f76
--- /dev/null
+++ b/third_party/libvpx/unpack_lib_posix.sh
@@ -0,0 +1,67 @@
+#!/bin/bash -e
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is used to unpack a .a file into object files.
+#
+# Arguments:
+#
+# d - Output directory.
+# a - List of possible locations of the archive.
+# f - List of files to extract.
+#
+
+while getopts "d:a:f:" flag
+do
+  if [ "$flag" = "d" ]; then
+    out_dir=$OPTARG
+  elif [ "$flag" = "a" ]; then
+    lib_files="$OPTARG $lib_files"
+  elif [ "$flag" = "f" ]; then
+    obj_files="$OPTARG $obj_files"
+  fi
+done
+
+for f in $lib_files; do
+  if [ -a $f ]; then
+    lib_file=$f
+    break
+  fi
+done
+
+if [ -z "$lib_file" ]; then
+  echo "Failed to locate a static library."
+  false
+  exit
+fi
+
+# Find the appropriate ar to use.
+ar="ar"
+if [ -n "$AR_target" ]; then
+  ar=$AR_target
+elif [ -n "$AR" ]; then
+  ar=$AR
+fi
+
+obj_list="$($ar t $lib_file | grep '\.o$')"
+
+function extract_object {
+  for f in $obj_list; do
+    filename="${f##*/}"
+
+    if [ -z "$(echo $filename | grep $1)" ]; then
+      continue
+    fi
+
+    # Only echo this if debugging.
+    # echo "Extract $filename from archive to $out_dir/$1."
+    $ar p $lib_file $filename > $out_dir/$1
+    break
+  done
+}
+
+for f in $obj_files; do
+  extract_object $f
+done
diff --git a/third_party_mods/ace/LICENSE b/third_party_mods/ace/LICENSE
new file mode 100644
index 0000000..9204394
--- /dev/null
+++ b/third_party_mods/ace/LICENSE
@@ -0,0 +1,66 @@
+Copyright and Licensing Information for ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), 
+and CoSMIC(TM)
+
+ACE(TM), TAO(TM), CIAO(TM), DAnCE>(TM), and CoSMIC(TM) (henceforth referred to 
+as "DOC software") are copyrighted by Douglas C. Schmidt and his research 
+group at Washington University, University of California, Irvine, and 
+Vanderbilt University, Copyright (c) 1993-2009, all rights reserved. Since DOC 
+software is open-source, freely available software, you are free to use, 
+modify, copy, and distribute--perpetually and irrevocably--the DOC software 
+source code and object code produced from the source, as well as copy and 
+distribute modified versions of this software. You must, however, include this 
+copyright statement along with any code built using DOC software that you 
+release. No copyright statement needs to be provided if you just ship binary 
+executables of your software products.
+You can use DOC software in commercial and/or binary software releases and are 
+under no obligation to redistribute any of your source code that is built 
+using DOC software. Note, however, that you may not misappropriate the DOC 
+software code, such as copyrighting it yourself or claiming authorship of the 
+DOC software code, in a way that will prevent DOC software from being 
+distributed freely using an open-source development model. You needn't inform 
+anyone that you're using DOC software in your software, though we encourage 
+you to let us know so we can promote your project in the DOC software success 
+stories.
+
+The ACE, TAO, CIAO, DAnCE, and CoSMIC web sites are maintained by the DOC 
+Group at the Institute for Software Integrated Systems (ISIS) and the Center 
+for Distributed Object Computing of Washington University, St. Louis for the 
+development of open-source software as part of the open-source software 
+community. Submissions are provided by the submitter ``as is'' with no 
+warranties whatsoever, including any warranty of merchantability, 
+noninfringement of third party intellectual property, or fitness for any 
+particular purpose. In no event shall the submitter be liable for any direct, 
+indirect, special, exemplary, punitive, or consequential damages, including 
+without limitation, lost profits, even if advised of the possibility of such 
+damages. Likewise, DOC software is provided as is with no warranties of any 
+kind, including the warranties of design, merchantability, and fitness for a 
+particular purpose, noninfringement, or arising from a course of dealing, 
+usage or trade practice. Washington University, UC Irvine, Vanderbilt 
+University, their employees, and students shall have no liability with respect 
+to the infringement of copyrights, trade secrets or any patents by DOC 
+software or any part thereof. Moreover, in no event will Washington 
+University, UC Irvine, or Vanderbilt University, their employees, or students 
+be liable for any lost revenue or profits or other special, indirect and 
+consequential damages.
+
+DOC software is provided with no support and without any obligation on the 
+part of Washington University, UC Irvine, Vanderbilt University, their 
+employees, or students to assist in its use, correction, modification, or 
+enhancement. A number of companies around the world provide commercial support 
+for DOC software, however. DOC software is Y2K-compliant, as long as the 
+underlying OS platform is Y2K-compliant. Likewise, DOC software is compliant 
+with the new US daylight savings rule passed by Congress as "The Energy Policy 
+Act of 2005," which established new daylight savings times (DST) rules for the 
+United States that expand DST as of March 2007. Since DOC software obtains 
+time/date and calendaring information from operating systems users will not be 
+affected by the new DST rules as long as they upgrade their operating systems 
+accordingly.
+
+The names ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), CoSMIC(TM), Washington 
+University, UC Irvine, and Vanderbilt University, may not be used to endorse 
+or promote products or services derived from this source without express 
+written permission from Washington University, UC Irvine, or Vanderbilt 
+University. This license grants no permission to call products or services 
+derived from this source ACE(TM), TAO(TM), CIAO(TM), DAnCE(TM), or CoSMIC(TM), 
+nor does it grant permission for the name Washington University, UC Irvine, or 
+Vanderbilt University to appear in their names.
\ No newline at end of file
diff --git a/third_party_mods/chromium/LICENSE b/third_party_mods/chromium/LICENSE
new file mode 100644
index 0000000..8dc3504
--- /dev/null
+++ b/third_party_mods/chromium/LICENSE
@@ -0,0 +1,27 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+//    * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+//    * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+//    * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/third_party_mods/mslpl/LICENSE b/third_party_mods/mslpl/LICENSE
new file mode 100644
index 0000000..9a3d932
--- /dev/null
+++ b/third_party_mods/mslpl/LICENSE
@@ -0,0 +1,64 @@
+This license governs use of code marked as “sample” or “example” available on 
+this web site without a license agreement, as provided under the section above 
+titled “NOTICE SPECIFIC TO SOFTWARE AVAILABLE ON THIS WEB SITE.” If you use 
+such code (the “software”), you accept this license. If you do not accept the 
+license, do not use the software.
+
+1. Definitions
+
+The terms “reproduce,” “reproduction,” “derivative works,” and “distribution” 
+have the same meaning here as under U.S. copyright law.
+
+A “contribution” is the original software, or any additions or changes to the 
+software.
+
+A “contributor” is any person that distributes its contribution under this 
+license.
+
+“Licensed patents” are a contributor’s patent claims that read directly on its 
+contribution.
+
+2. Grant of Rights
+
+(A) Copyright Grant - Subject to the terms of this license, including the 
+license conditions and limitations in section 3, each contributor grants you a 
+non-exclusive, worldwide, royalty-free copyright license to reproduce its 
+contribution, prepare derivative works of its contribution, and distribute its 
+contribution or any derivative works that you create.
+
+(B) Patent Grant - Subject to the terms of this license, including the license 
+conditions and limitations in section 3, each contributor grants you a 
+non-exclusive, worldwide, royalty-free license under its licensed patents to 
+make, have made, use, sell, offer for sale, import, and/or otherwise dispose 
+of its contribution in the software or derivative works of the contribution in 
+the software.
+
+3. Conditions and Limitations
+
+(A) No Trademark License- This license does not grant you rights to use any 
+contributors’ name, logo, or trademarks.
+
+(B) If you bring a patent claim against any contributor over patents that you 
+claim are infringed by the software, your patent license from such contributor 
+to the software ends automatically.
+
+(C) If you distribute any portion of the software, you must retain all 
+copyright, patent, trademark, and attribution notices that are present in the 
+software.
+
+(D) If you distribute any portion of the software in source code form, you may 
+do so only under this license by including a complete copy of this license 
+with your distribution. If you distribute any portion of the software in 
+compiled or object code form, you may only do so under a license that complies 
+with this license.
+
+(E) The software is licensed “as-is.” You bear the risk of using it. The 
+contributors give no express warranties, guarantees or conditions. You may 
+have additional consumer rights under your local laws which this license 
+cannot change. To the extent permitted under your local laws, the contributors 
+exclude the implied warranties of merchantability, fitness for a particular 
+purpose and non-infringement.
+
+(F) Platform Limitation - The licenses granted in sections 2(A) and 2(B) 
+extend only to the software or derivative works that you create that run on a 
+Microsoft Windows operating system product.
diff --git a/third_party_mods/sqrt_floor/LICENSE b/third_party_mods/sqrt_floor/LICENSE
new file mode 100644
index 0000000..e24dfe7
--- /dev/null
+++ b/third_party_mods/sqrt_floor/LICENSE
@@ -0,0 +1,26 @@
+The following email record is related to source files spl_sqrt_floor.c
+and spl_sqrt_floor.s in trunk/src/common_audio/signal_processing/.
+
+
+From: Wilco Dijkstra <Wilco.Dijkstra@ntlworld.com>
+Date: Fri, Jun 24, 2011 at 3:20 AM
+Subject: Re: sqrt routine
+To: Kevin Ma <kma@google.com>
+Hi Kevin,
+Thanks for asking. Those routines are public domain (originally posted to 
+comp.sys.arm a long time ago), so you can use them freely for any purpose.
+Cheers,
+Wilco
+
+----- Original Message -----
+From: "Kevin Ma" <kma@google.com>
+To: <Wilco.Dijkstra@ntlworld.com>
+Sent: Thursday, June 23, 2011 11:44 PM
+Subject: Fwd: sqrt routine
+Hi Wilco,
+I saw your sqrt routine from several web sites, including
+http://www.finesse.demon.co.uk/steven/sqrt.html.
+Just wonder if there's any copyright information with your Successive
+approximation routines, or if I can freely use it for any purpose.
+Thanks.
+Kevin
diff --git a/tools/.gitignore b/tools/.gitignore
new file mode 100644
index 0000000..db9f362
--- /dev/null
+++ b/tools/.gitignore
@@ -0,0 +1,14 @@
+*.pyc
+*~
+.*.sw?
+.DS_Store
+.code_review_password
+.cproject
+.metadata
+.project
+.pydevproject
+.settings
+.status_password
+/third_party/gaeunit
+/third_party/google-visualization-python
+/third_party/oauth2
diff --git a/tools/DEPS b/tools/DEPS
new file mode 100644
index 0000000..8c7429c
--- /dev/null
+++ b/tools/DEPS
@@ -0,0 +1,18 @@
+# Tools has its own dependencies, separate from the production code.
+# Use http rather than https; the latter can cause problems for users behind
+# proxies.
+
+deps = {
+  # Used by quality_tracking.
+  "tools/third_party/gaeunit":
+    "http://code.google.com/p/gaeunit.git@e16d5bd4",
+
+  # Used by quality_tracking.
+  "tools/third_party/oauth2":
+    "http://github.com/simplegeo/python-oauth2.git@a83f4a29",
+
+  # Used by tools/quality_tracking/dashboard and tools/python_charts.
+  "tools/third_party/google-visualization-python":
+    "http://google-visualization-python.googlecode.com/svn/trunk/@15",
+}
+
diff --git a/tools/OWNERS b/tools/OWNERS
new file mode 100644
index 0000000..965de1e
--- /dev/null
+++ b/tools/OWNERS
@@ -0,0 +1,4 @@
+kjellander@webrtc.org
+phoglund@webrtc.org
+niklas.enbom@webrtc.org
+andrew@webrtc.org
diff --git a/tools/PRESUBMIT.py b/tools/PRESUBMIT.py
new file mode 100644
index 0000000..61dffcc
--- /dev/null
+++ b/tools/PRESUBMIT.py
@@ -0,0 +1,62 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+def _LicenseHeader(input_api):
+  """Returns the license header regexp."""
+  # Accept any year number from 2011 to the current year
+  current_year = int(input_api.time.strftime('%Y'))
+  allowed_years = (str(s) for s in reversed(xrange(2011, current_year + 1)))
+  years_re = '(' + '|'.join(allowed_years) + ')'
+  license_header = (
+      r'.*? Copyright \(c\) %(year)s The WebRTC project authors\. '
+        r'All Rights Reserved\.\n'
+      r'.*?\n'
+      r'.*? Use of this source code is governed by a BSD-style license\n'
+      r'.*? that can be found in the LICENSE file in the root of the source\n'
+      r'.*? tree\. An additional intellectual property rights grant can be '
+        r'found\n'
+      r'.*? in the file PATENTS\.  All contributing project authors may\n'
+      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
+  ) % {
+      'year': years_re,
+  }
+  return license_header
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  results = []
+  results.extend(input_api.canned_checks.CheckLongLines(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckLicense(
+      input_api, output_api, _LicenseHeader(input_api)))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeWasUploaded(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasDescription(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasBugField(
+      input_api, output_api))
+  results.extend(input_api.canned_checks.CheckChangeHasTestField(
+      input_api, output_api))
+  return results
diff --git a/tools/barcode_tools/DEPS b/tools/barcode_tools/DEPS
new file mode 100644
index 0000000..d0325a6
--- /dev/null
+++ b/tools/barcode_tools/DEPS
@@ -0,0 +1,13 @@
+# This is trimmed down version of the main tools DEPS file which is to be used
+# in Chromiums PyAuto WebRTC video quality measurement test. We will only
+# need the Zxing dependencies as we only use the barcode tools in this test.
+
+deps = {
+  # Used by barcode_tools
+  "barcode_tools/third_party/zxing/core":
+    "http://zxing.googlecode.com/svn/trunk/core@2349",
+
+  # Used by barcode_tools
+  "barcode_tools/third_party/zxing/javase":
+    "http://zxing.googlecode.com/svn/trunk/javase@2349",
+}
diff --git a/tools/barcode_tools/README b/tools/barcode_tools/README
new file mode 100644
index 0000000..880cc7d
--- /dev/null
+++ b/tools/barcode_tools/README
@@ -0,0 +1,17 @@
+This file explains how to set up the Zebra Crossing (Zxing) library in order to
+use it in the barcode encoder and decoder tools. Zxing could be found at:
+https://code.google.com/p/zxing/
+
+After checkout, the relevant files from Zxing should be in third_party/zxing,
+relative to this README.
+
+In order to run barcode_encoder.py and barcode_decoder.py we need to have build
+two jar files: zxing/core/core.jar and zxing/javase/javase.jar. In order to
+build these we have to have Ant already installed. Building is as simple as
+running the build_zxing.py script:
+./build_zxing.py,
+which should automatically call ant with the respective build files from the
+Zxing checkout.
+
+For more information on how to run barcode_encoder.py and barcode_decoder.py
+check the documentation in the main functions inside these tools.
diff --git a/tools/barcode_tools/barcode_decoder.py b/tools/barcode_tools/barcode_decoder.py
new file mode 100755
index 0000000..b016219
--- /dev/null
+++ b/tools/barcode_tools/barcode_decoder.py
@@ -0,0 +1,285 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import optparse
+import os
+import subprocess
+import sys
+
+import helper_functions
+
+_DEFAULT_BARCODE_WIDTH = 352
+
+
+def convert_yuv_to_png_files(yuv_file_name, yuv_frame_width, yuv_frame_height,
+                             output_directory = '.'):
+  """Converts a YUV video file into PNG frames.
+
+  The function uses ffmpeg to convert the YUV file. The output of ffmpeg is in
+  the form frame_xxxx.png, where xxxx is the frame number, starting from 0001.
+
+  Args:
+    yuv_file_name(string): The name of the YUV file.
+    yuv_frame_width(int): The width of one YUV frame.
+    yuv_frame_height(int): The height of one YUV frame.
+    output_directory(string): The output directory where the PNG frames will be
+      stored.
+
+  Return:
+    (bool): True if the conversion was OK.
+  """
+  size_string = str(yuv_frame_width) + 'x' + str(yuv_frame_height)
+  output_files_pattern = os.path.join(output_directory, 'frame_%04d.png')
+  command = ['ffmpeg', '-s', '%s' % size_string, '-i', '%s'
+             % yuv_file_name, '-f', 'image2', '-vcodec', 'png',
+             '%s' % output_files_pattern]
+  try:
+    helper_functions.run_shell_command(
+        command, msg='Error during YUV to PNG conversion')
+  except helper_functions.HelperError, err:
+    print err
+    return False
+  return True
+
+
+def decode_frames(barcode_width, barcode_height, input_directory='.',
+                  path_to_zxing='zxing-read-only'):
+  """Decodes the barcodes overlaid in each frame.
+
+  The function uses the example Java command-line tool from the Zxing
+  distribution to decode the barcode in every PNG frame from the input
+  directory. The frames should be named frame_xxxx.png, where xxxx is the frame
+  number. The frame numbers should be consecutive and should start from 0001.
+  The decoding results in a frame_xxxx.txt file for every successfully decoded
+  barcode. This file contains the decoded barcode as 12-digit string (UPC-A
+  format: 11 digits content + one check digit).
+
+  Args:
+    barcode_width(int): Width of the barcode.
+    barcode_height(int): Height of the barcode.
+    input_directory(string): The input directory from where the PNG frames are
+      read.
+    path_to_zxing(string): The path to Zxing.
+  Return:
+    (bool): True if the decoding went without errors.
+  """
+  jars = helper_functions.form_jars_string(path_to_zxing)
+  command_line_decoder ='com.google.zxing.client.j2se.CommandLineRunner'
+  return helper_functions.perform_action_on_all_files(
+      directory=input_directory, file_pattern='frame_',
+      file_extension='png', start_number=1, action=_decode_barcode_in_file,
+      barcode_width=barcode_width, barcode_height=barcode_height, jars=jars,
+      command_line_decoder=command_line_decoder)
+
+
+def _decode_barcode_in_file(file_name, barcode_width, barcode_height, jars,
+                            command_line_decoder):
+  """Decodes the barcode in the upper left corner of a PNG file.
+
+  Args:
+    file_name(string): File name of the PNG file.
+    barcode_width(int): Width of the barcode (in pixels).
+    barcode_height(int): Height of the barcode (in pixels)
+    jars(string): The Zxing core and javase string.
+    command_line_decoder(string): The ZXing command-line decoding tool.
+
+  Return:
+    (bool): True upon success, False otherwise.
+  """
+  command = ['java', '-cp', '%s' % jars,
+             '%s' % command_line_decoder, '--products_only',
+             '--dump_results', '--brief', '--crop=%d,%d,%d,%d' %
+             (0, 0, barcode_width, barcode_height),
+             '%s' % file_name]
+  try:
+    out = helper_functions.run_shell_command(
+        command, msg='Error during decoding of %s' % file_name)
+    if not 'Success' in out:
+      sys.stderr.write('Barcode in %s cannot be decoded\n' % file_name)
+      return False
+  except helper_functions.HelperError, err:
+    print err
+    return False
+  return True
+
+
+def _generate_stats_file(stats_file_name, input_directory='.'):
+  """Generate statistics file.
+
+  The function generates a statistics file. The contents of the file are in the
+  format <frame_name> <barcode>, where frame name is the name of every frame
+  (effectively the frame number) and barcode is the decoded barcode. The frames
+  and the helper .txt files are removed after they have been used.
+  """
+  file_prefix = os.path.join(input_directory, 'frame_')
+  stats_file = open(stats_file_name, 'w')
+
+  for i in range(1, _count_frames_in(input_directory=input_directory) + 1):
+    frame_number = helper_functions.zero_pad(i)
+    barcode_file_name = file_prefix + frame_number + '.txt'
+    png_frame = file_prefix + frame_number + '.png'
+    entry_frame_number = helper_functions.zero_pad(i-1)
+    entry = 'frame_' + entry_frame_number + ' '
+
+    if os.path.isfile(barcode_file_name):
+      barcode = _read_barcode_from_text_file(barcode_file_name)
+      os.remove(barcode_file_name)
+
+      if _check_barcode(barcode):
+        entry += (helper_functions.zero_pad(int(barcode[0:11])) + '\n')
+      else:
+        entry += 'Barcode error\n'  # Barcode is wrongly detected.
+    else:  # Barcode file doesn't exist.
+      entry += 'Barcode error\n'
+
+    stats_file.write(entry)
+    os.remove(png_frame)
+
+  stats_file.close()
+
+
+def _read_barcode_from_text_file(barcode_file_name):
+  """Reads the decoded barcode for a .txt file.
+
+  Args:
+    barcode_file_name(string): The name of the .txt file.
+  Return:
+    (string): The decoded barcode.
+  """
+  barcode_file = open(barcode_file_name, 'r')
+  barcode = barcode_file.read()
+  barcode_file.close()
+
+  return barcode
+
+
+def _check_barcode(barcode):
+  """Check weather the UPC-A barcode was decoded correctly.
+
+  This function calculates the check digit of the provided barcode and compares
+  it to the check digit that was decoded.
+
+  Args:
+    barcode(string): The barcode (12-digit).
+  Return:
+    (bool): True if the barcode was decoded correctly.
+  """
+  if len(barcode) != 12:
+    return False
+
+  r1 = range(0, 11, 2)  # Odd digits
+  r2 = range(1, 10, 2)  # Even digits except last
+  dsum = 0
+  # Sum all the even digits
+  for i in r1:
+    dsum += int(barcode[i])
+  # Multiply the sum by 3
+  dsum *= 3
+  # Add all the even digits except the check digit (12th digit)
+  for i in r2:
+    dsum += int(barcode[i])
+  # Get the modulo 10
+  dsum = dsum % 10
+  # If not 0 substract from 10
+  if dsum != 0:
+    dsum = 10 - dsum
+  # Compare result and check digit
+  return dsum == int(barcode[11])
+
+
+def _count_frames_in(input_directory = '.'):
+  """Calculates the number of frames in the input directory.
+
+  The function calculates the number of frames in the input directory. The
+  frames should be named frame_xxxx.png, where xxxx is the number of the frame.
+  The numbers should start from 1 and should be consecutive.
+
+  Args:
+    input_directory(string): The input directory.
+  Return:
+    (int): The number of frames.
+  """
+  file_prefix = os.path.join(input_directory, 'frame_')
+  file_exists = True
+  num = 1
+
+  while file_exists:
+    file_name = (file_prefix + helper_functions.zero_pad(num) + '.png')
+    if os.path.isfile(file_name):
+      num += 1
+    else:
+      file_exists = False
+  return num - 1
+
+
+def _parse_args():
+  """Registers the command-line options."""
+  usage = "usage: %prog [options]"
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--yuv_frame_width', type='int', default=352,
+                    help=('Width of the YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--yuv_frame_height', type='int', default=288,
+                    help=('Height of the YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--barcode_width', type='int',
+                    default=_DEFAULT_BARCODE_WIDTH,
+                    help=('Width of the barcodes. Default: %default'))
+  parser.add_option('--barcode_height', type='int', default=32,
+                    help=('Height of the barcodes. Default: %default'))
+  parser.add_option('--yuv_file', type='string', default='output.yuv',
+                    help=('The YUV file to be decoded. Default: %default'))
+  parser.add_option('--stats_file', type='string', default='stats.txt',
+                    help=('The output stats file. Default: %default'))
+  parser.add_option('--png_output_dir', type='string', default='.',
+                    help=('The output directory for the generated PNG files. '
+                          'Default: %default'))
+  parser.add_option('--png_input_dir', type='string', default='.',
+                    help=('The input directory for the generated PNG files. '
+                          'Default: %default'))
+  parser.add_option('--path_to_zxing', type='string', default='zxing',
+                    help=('The path to Zxing. Default: %default'))
+  options = parser.parse_args()[0]
+  return options
+
+
+def _main():
+  """The main function.
+
+  A simple invocation is:
+  ./tools/barcode_tolls/barcode_decoder.py
+  --yuv_file=<path_and_name_of_overlaid_yuv_video>
+  --yuv_frame_width=352 --yuv_frame_height=288 --barcode_height=32
+  --stats_file=<path_and_name_to_stats_file>
+  """
+  options = _parse_args()
+
+  # The barcodes with will be different than the base frame width only if
+  # explicitly specified at the command line.
+  if options.barcode_width == _DEFAULT_BARCODE_WIDTH:
+    options.barcode_width = options.yuv_frame_width
+
+  script_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+  zxing_dir = os.path.join(script_dir, 'third_party', 'zxing')
+
+  # Convert the overlaid YUV video into a set of PNG frames.
+  convert_yuv_to_png_files(options.yuv_file, options.yuv_frame_width,
+                           options.yuv_frame_height,
+                           output_directory=options.png_output_dir)
+  # Decode the barcodes from the PNG frames.
+  decode_frames(options.barcode_width, options.barcode_height,
+                input_directory=options.png_input_dir, path_to_zxing=zxing_dir)
+  # Generate statistics file.
+  _generate_stats_file(options.stats_file,
+                       input_directory=options.png_input_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(_main())
diff --git a/tools/barcode_tools/barcode_encoder.py b/tools/barcode_tools/barcode_encoder.py
new file mode 100755
index 0000000..429866e
--- /dev/null
+++ b/tools/barcode_tools/barcode_encoder.py
@@ -0,0 +1,358 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import optparse
+import os
+import subprocess
+import sys
+
+import helper_functions
+
+_DEFAULT_BARCODE_WIDTH = 352
+_DEFAULT_BARCODES_FILE = 'barcodes.yuv'
+
+
+def generate_upca_barcodes(number_of_barcodes, barcode_width, barcode_height,
+                           output_directory='.',
+                           path_to_zxing='zxing-read-only'):
+  """Generates UPC-A barcodes.
+
+  This function generates a number_of_barcodes UPC-A barcodes. The function
+  calls an example Java encoder from the Zxing library. The barcodes are
+  generated as PNG images. The width of the barcodes shouldn't be less than 102
+  pixels because otherwise Zxing can't properly generate the barcodes.
+
+  Args:
+    number_of_barcodes(int): The number of barcodes to generate.
+    barcode_width(int): Width of barcode in pixels.
+    barcode_height(int): Height of barcode in pixels.
+    output_directory(string): Output directory where to store generated
+      barcodes.
+    path_to_zxing(string): The path to Zxing.
+
+  Return:
+    (bool): True if the conversion is successful.
+  """
+  base_file_name = os.path.join(output_directory, "barcode_")
+  jars = helper_functions.form_jars_string(path_to_zxing)
+  command_line_encoder ='com.google.zxing.client.j2se.CommandLineEncoder'
+  barcode_width = str(barcode_width)
+  barcode_height = str(barcode_height)
+
+  errors = False
+  for i in range(number_of_barcodes):
+    suffix = helper_functions.zero_pad(i)
+    # Barcodes starting from 0
+    content = helper_functions.zero_pad(i, 11)
+    output_file_name = base_file_name + suffix + ".png"
+
+    command = ["java", "-cp", jars, command_line_encoder,
+               "--barcode_format=UPC_A", "--height=%s" % barcode_height,
+               "--width=%s" % barcode_width,
+               "--output=%s" % (output_file_name), "%s" % (content)]
+    try:
+      helper_functions.run_shell_command(
+          command, msg=('Error during barcode %s generation' % content))
+    except helper_functions.HelperError, err:
+      print err
+      errors = True
+  return not errors
+
+
+def convert_png_to_yuv_barcodes(input_directory='.', output_directory='.'):
+  """Converts PNG barcodes to YUV barcode images.
+
+  This function reads all the PNG files from the input directory which are in
+  the format frame_xxxx.png, where xxxx is the number of the frame, starting
+  from 0000. The frames should be consecutive numbers. The output YUV file is
+  named frame_xxxx.yuv. The function uses ffmpeg to do the conversion.
+
+  Args:
+    input_directory(string): The input direcotry to read the PNG barcodes from.
+    output_directory(string): The putput directory to write the YUV files to.
+  Return:
+    (bool): True if the conversion was without errors.
+  """
+  return helper_functions.perform_action_on_all_files(
+      input_directory, 'barcode_', 'png', 0, _convert_to_yuv_and_delete,
+      output_directory=output_directory, pattern='barcode_')
+
+
+def _convert_to_yuv_and_delete(output_directory, file_name, pattern):
+  """Converts a PNG file to a YUV file and deletes the PNG file.
+
+  Args:
+    output_directory(string): The output directory for the YUV file.
+    file_name(string): The PNG file name.
+    pattern(string): The file pattern of the PNG/YUV file. The PNG/YUV files are
+      named patternxx..x.png/yuv, where xx..x are digits starting from 00..0.
+  Return:
+    (bool): True upon successful conversion, false otherwise.
+  """
+  # Pattern should be in file name
+  if not pattern in file_name:
+    return False
+  pattern_position = file_name.rfind(pattern)
+
+  # Strip the path to the PNG file and replace the png extension with yuv
+  yuv_file_name = file_name[pattern_position:-3] + 'yuv'
+  yuv_file_name = os.path.join(output_directory, yuv_file_name)
+
+  command = ['ffmpeg', '-i', '%s' % (file_name), '-pix_fmt', 'yuv420p',
+             '%s' % (yuv_file_name)]
+  try:
+    helper_functions.run_shell_command(
+        command, msg=('Error during PNG to YUV conversion of %s' %
+                       file_name));
+    os.remove(file_name)
+  except helper_functions.HelperError, err:
+    print err
+    return False
+  return True
+
+
+def combine_yuv_frames_into_one_file(output_file_name, input_directory='.'):
+  """Combines several YUV frames into one YUV video file.
+
+  The function combines the YUV frames from input_directory into one YUV video
+  file. The frames should be named in the format frame_xxxx.yuv where xxxx
+  stands for the frame number. The numbers have to be consecutive and start from
+  0000. The YUV frames are removed after they have been added to the video.
+
+  Args:
+    output_file_name(string): The name of the file to produce.
+    input_directory(string): The directory from which the YUV frames are read.
+  Return:
+    (bool): True if the frame stitching went OK.
+  """
+  output_file = open(output_file_name, "wb")
+  success = helper_functions.perform_action_on_all_files(
+      input_directory, 'barcode_', 'yuv', 0, _add_to_file_and_delete,
+      output_file=output_file)
+  output_file.close()
+  return success
+
+def _add_to_file_and_delete(output_file, file_name):
+  """Adds the contents of a file to a previously opened file.
+
+  Args:
+    output_file(file): The ouput file, previously opened.
+    file_name(string): The file name of the file to add to the output file.
+
+  Return:
+    (bool): True if successful, False otherwise.
+  """
+  input_file = open(file_name, "rb")
+  input_file_contents = input_file.read()
+  output_file.write(input_file_contents)
+  input_file.close()
+  try:
+    os.remove(file_name)
+  except Exception as e:
+    sys.stderr.write('Error in deleting file %s' % file_name)
+    return False
+  return True
+
+
+def _overlay_barcode_and_base_frames(barcodes_file, base_file, output_file,
+                                     barcodes_component_sizes,
+                                     base_component_sizes):
+  """Overlays the next YUV frame from a file with a barcode.
+
+  Args:
+    barcodes_file(FileObject): The YUV file containing the barcodes (opened).
+    base_file(FileObject): The base YUV file (opened).
+    output_file(FileObject): The output overlaid file (opened).
+    barcodes_component_sizes(list of tuples): The width and height of each Y, U
+      and V plane of the barcodes YUV file.
+    base_component_sizes(list of tuples): The width and height of each Y, U and
+      V plane of the base YUV file.
+  Return:
+    (bool): True if there are more planes (i.e. frames) in the base file, false
+      otherwise.
+  """
+  # We will loop three times - once for the Y, U and V planes
+  for ((barcode_comp_width, barcode_comp_height),
+      (base_comp_width, base_comp_height)) in zip(barcodes_component_sizes,
+                                                  base_component_sizes):
+    for base_row in range(base_comp_height):
+      barcode_plane_traversed = False
+      if (base_row < barcode_comp_height) and not barcode_plane_traversed:
+        barcode_plane = barcodes_file.read(barcode_comp_width)
+        if barcode_plane == "":
+          barcode_plane_traversed = True
+      else:
+        barcode_plane_traversed = True
+      base_plane = base_file.read(base_comp_width)
+
+      if base_plane == "":
+        return False
+
+      if not barcode_plane_traversed:
+        # Substitute part of the base component with the top component
+        output_file.write(barcode_plane)
+        base_plane = base_plane[barcode_comp_width:]
+      output_file.write(base_plane)
+  return True
+
+
+def overlay_yuv_files(barcode_width, barcode_height, base_width, base_height,
+                      barcodes_file_name, base_file_name, output_file_name):
+  """Overlays two YUV files starting from the upper left corner of both.
+
+  Args:
+    barcode_width(int): The width of the barcode (to be overlaid).
+    barcode_height(int): The height of the barcode (to be overlaid).
+    base_width(int): The width of a frame of the base file.
+    base_height(int): The height of a frame of the base file.
+    barcodes_file_name(string): The name of the YUV file containing the YUV
+      barcodes.
+    base_file_name(string): The name of the base YUV file.
+    output_file_name(string): The name of the output file where the overlaid
+      video will be written.
+  """
+  # Component sizes = [Y_sizes, U_sizes, V_sizes]
+  barcodes_component_sizes = [(barcode_width, barcode_height),
+                              (barcode_width/2, barcode_height/2),
+                              (barcode_width/2, barcode_height/2)]
+  base_component_sizes = [(base_width, base_height),
+                          (base_width/2, base_height/2),
+                          (base_width/2, base_height/2)]
+
+  barcodes_file = open(barcodes_file_name, 'rb')
+  base_file = open(base_file_name, 'rb')
+  output_file = open(output_file_name, 'wb')
+
+  data_left = True
+  while data_left:
+    data_left = _overlay_barcode_and_base_frames(barcodes_file, base_file,
+                                                 output_file,
+                                                 barcodes_component_sizes,
+                                                 base_component_sizes)
+
+  barcodes_file.close()
+  base_file.close()
+  output_file.close()
+
+
+def calculate_frames_number_from_yuv(yuv_width, yuv_height, file_name):
+  """Calculates the number of frames of a YUV video.
+
+  Args:
+    yuv_width(int): Width of a frame of the yuv file.
+    yuv_height(int): Height of a frame of the YUV file.
+    file_name(string): The name of the YUV file.
+  Return:
+    (int): The number of frames in the YUV file.
+  """
+  file_size = os.path.getsize(file_name)
+
+  y_plane_size = yuv_width * yuv_height
+  u_plane_size = (yuv_width/2) * (yuv_height/2)  # Equals to V plane size too
+  frame_size = y_plane_size + (2 * u_plane_size)
+  return int(file_size/frame_size)  # Should be int anyway
+
+
+def _parse_args():
+  """Registers the command-line options."""
+  usage = "usage: %prog [options]"
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--barcode_width', type='int',
+                    default=_DEFAULT_BARCODE_WIDTH,
+                    help=('Width of the barcodes to be overlaid on top of the'
+                          ' base file. Default: %default'))
+  parser.add_option('--barcode_height', type='int', default=32,
+                    help=('Height of the barcodes to be overlaid on top of the'
+                          ' base file. Default: %default'))
+  parser.add_option('--base_frame_width', type='int', default=352,
+                    help=('Width of the base YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--base_frame_height', type='int', default=288,
+                    help=('Height of the top YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--barcodes_yuv', type='string',
+                    default=_DEFAULT_BARCODES_FILE,
+                    help=('The YUV file with the barcodes in YUV. '
+                          'Default: %default'))
+  parser.add_option('--base_yuv', type='string', default='base.yuv',
+                    help=('The base YUV file to be overlaid. '
+                          'Default: %default'))
+  parser.add_option('--output_yuv', type='string', default='output.yuv',
+                    help=('The output YUV file containing the base overlaid'
+                          ' with the barcodes. Default: %default'))
+  parser.add_option('--png_barcodes_output_dir', type='string', default='.',
+                    help=('Output directory where the PNG barcodes will be '
+                          'generated. Default: %default'))
+  parser.add_option('--png_barcodes_input_dir', type='string', default='.',
+                    help=('Input directory from where the PNG barcodes will be '
+                          'read. Default: %default'))
+  parser.add_option('--yuv_barcodes_output_dir', type='string', default='.',
+                    help=('Output directory where the YUV barcodes will be '
+                          'generated. Default: %default'))
+  parser.add_option('--yuv_frames_input_dir', type='string', default='.',
+                    help=('Input directory from where the YUV will be '
+                          'read before combination. Default: %default'))
+  parser.add_option('--zxing_dir', type='string', default='zxing',
+                    help=('Path to the Zxing barcodes library. '
+                          'Default: %default'))
+  options = parser.parse_args()[0]
+  return options
+
+
+def _main():
+  """The main function.
+
+  A simple invocation will be:
+  ./tools/barcode_tools/barcode_encoder.py --barcode_height=32
+  --base_frame_width=352 --base_frame_height=288
+  --base_yuv=<path_and_name_of_base_file>
+  --output_yuv=<path and name_of_output_file>
+  """
+  options = _parse_args()
+  # The barcodes with will be different than the base frame width only if
+  # explicitly specified at the command line.
+  if options.barcode_width == _DEFAULT_BARCODE_WIDTH:
+    options.barcode_width = options.base_frame_width
+  # If the user provides a value for the barcodes YUV video file, we will keep
+  # it. Otherwise we create a temp file which is removed after it has been used.
+  keep_barcodes_yuv_file = False
+  if options.barcodes_yuv != _DEFAULT_BARCODES_FILE:
+    keep_barcodes_yuv_file = True
+
+  # Calculate the number of barcodes - it is equal to the number of frames in
+  # the base file.
+  number_of_barcodes = calculate_frames_number_from_yuv(
+      options.base_frame_width, options.base_frame_height, options.base_yuv)
+
+  script_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
+  zxing_dir = os.path.join(script_dir, 'third_party', 'zxing')
+  # Generate barcodes - will generate them in PNG.
+  generate_upca_barcodes(number_of_barcodes, options.barcode_width,
+                         options.barcode_height,
+                         output_directory=options.png_barcodes_output_dir,
+                         path_to_zxing=zxing_dir)
+  # Convert the PNG barcodes to to YUV format.
+  convert_png_to_yuv_barcodes(options.png_barcodes_input_dir,
+                              options.yuv_barcodes_output_dir)
+  # Combine the YUV barcodes into one YUV file.
+  combine_yuv_frames_into_one_file(options.barcodes_yuv,
+                                   input_directory=options.yuv_frames_input_dir)
+  # Overlay the barcodes over the base file.
+  overlay_yuv_files(options.barcode_width, options.barcode_height,
+                    options.base_frame_width, options.base_frame_height,
+                    options.barcodes_yuv, options.base_yuv, options.output_yuv)
+
+  if not keep_barcodes_yuv_file:
+    # Remove the temporary barcodes YUV file
+    os.remove(options.barcodes_yuv)
+
+
+if __name__ == '__main__':
+  sys.exit(_main())
\ No newline at end of file
diff --git a/tools/barcode_tools/build_zxing.py b/tools/barcode_tools/build_zxing.py
new file mode 100644
index 0000000..62a29ef
--- /dev/null
+++ b/tools/barcode_tools/build_zxing.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import os
+import subprocess
+import sys
+
+
+def run_ant_build_command(path_to_ant_build_file):
+  """Tries to build the passed build file with ant."""
+  ant_suffix = '.bat' if 'win32' in sys.platform else ''
+  cmd = ['ant%s' % ant_suffix, '-buildfile', path_to_ant_build_file]
+  try:
+    process = subprocess.Popen(cmd, stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE)
+    stdout, stderr = process.communicate()
+    if process.returncode != 0:
+      print 'Failed to execute: %s\nError: %s' % (' '.join(cmd), stderr)
+    else:
+      print stdout
+  except Exception as e:
+    print 'Failed to execute: %s\nError: %s' % (' '.join(cmd), e)
+
+
+def _main():
+  core_build = os.path.join('third_party', 'zxing', 'core', 'build.xml')
+  run_ant_build_command(core_build)
+
+  javase_build = os.path.join('third_party', 'zxing', 'javase', 'build.xml')
+  run_ant_build_command(javase_build)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(_main())
\ No newline at end of file
diff --git a/tools/barcode_tools/helper_functions.py b/tools/barcode_tools/helper_functions.py
new file mode 100644
index 0000000..74ff064
--- /dev/null
+++ b/tools/barcode_tools/helper_functions.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import os
+import subprocess
+import sys
+
+_DEFAULT_PADDING = 4
+
+
+class HelperError(Exception):
+  """Exception raised for errors in the helper."""
+  pass
+
+
+def zero_pad(number, padding=_DEFAULT_PADDING):
+  """Converts an int into a zero padded string.
+
+  Args:
+    number(int): The number to convert.
+    padding(int): The number of chars in the output. Note that if you pass for
+      example number=23456 and padding=4, the output will still be '23456',
+      i.e. it will not be cropped. If you pass number=2 and padding=4, the
+      return value will be '0002'.
+  Return:
+    (string): The zero padded number converted to string.
+  """
+  return str(number).zfill(padding)
+
+
+def run_shell_command(command, msg=None):
+  """Executes a command.
+
+  Args:
+    command(list): Command list to execute.
+    msg(string): Message describing the error in case the command fails.
+
+  Return:
+    (string): The standard output from running the command.
+
+  Raise:
+    HelperError: If command fails.
+  """
+  cmd_list = [str(x) for x in command]
+  cmd = ' '.join(cmd_list)
+
+  process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
+                             stderr=subprocess.PIPE)
+  output, error = process.communicate()
+  if process.returncode != 0:
+    if msg:
+      print msg
+    raise HelperError('Failed to run %s: command returned %d and printed '
+                      '%s and %s' % (cmd, process.returncode, output, error))
+  return output.strip()
+
+
+def form_jars_string(path_to_zxing):
+  """Forms the the Zxing core and javase jars argument.
+
+  Args:
+    path_to_zxing(string): The path to the Zxing checkout folder.
+  Return:
+    (string): The newly formed jars argument.
+  """
+  javase_jar = os.path.join(path_to_zxing, "javase", "javase.jar")
+  core_jar = os.path.join(path_to_zxing, "core", "core.jar")
+  delimiter = ':'
+  if os.name != 'posix':
+    delimiter = ';'
+  return javase_jar + delimiter + core_jar
+
+
+def perform_action_on_all_files(directory, file_pattern, file_extension,
+                                start_number, action, **kwargs):
+  """Function that performs a given action on all files matching a pattern.
+
+  It is assumed that the files are named file_patternxxxx.file_extension, where
+  xxxx are digits. The file names start from
+  file_patern0..start_number>.file_extension.
+
+  Args:
+    directory(string): The directory where the files live.
+    file_pattern(string): The name pattern of the files.
+    file_extension(string): The files' extension.
+    start_number(int): From where to start to count frames.
+    action(function): The action to be performed over the files.
+
+  Return:
+    (bool): Whether performing the action over all files was successful or not.
+  """
+  file_prefix = os.path.join(directory, file_pattern)
+  file_exists = True
+  file_number = start_number
+  errors = False
+
+  while file_exists:
+    zero_padded_file_number = zero_pad(file_number)
+    file_name = file_prefix + zero_padded_file_number + '.' + file_extension
+    if os.path.isfile(file_name):
+      if not action(file_name=file_name, **kwargs):
+        errors = True
+      file_number += 1
+    else:
+      file_exists = False
+  return not errors
+
+
diff --git a/tools/barcode_tools/yuv_cropper.py b/tools/barcode_tools/yuv_cropper.py
new file mode 100755
index 0000000..9652c16
--- /dev/null
+++ b/tools/barcode_tools/yuv_cropper.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import optparse
+import os
+import sys
+
+
+def _crop_one_frame(yuv_file, output_file, component_sizes):
+  """Crops one frame.
+
+  This function crops one frame going through all the YUV planes and cropping
+  respective amount of rows.
+
+  Args:
+    yuv_file(file): The opened (for binary reading) YUV file.
+    output_file(file): The opened (for binary writing) file.
+    component_sizes(list of 3 3-ples): The list contains the sizes for all the
+      planes (Y, U, V) of the YUV file plus the crop_height scaled for every
+      plane. The sizes equal width, height and crop_height for the Y plane,
+      and are equal to width/2, height/2 and crop_height/2 for the U and V
+      planes.
+  Return:
+    (bool): True if there are more frames to crop, False otherwise.
+  """
+  for comp_width, comp_height, comp_crop_height in component_sizes:
+    for row in range(comp_height):
+      # Read the plane data for this row.
+      yuv_plane = yuv_file.read(comp_width)
+
+      # If the plane is empty, we have reached the end of the file.
+      if yuv_plane == "":
+        return False
+
+      # Only write the plane data for the rows bigger than crop_height.
+      if row >= comp_crop_height:
+        output_file.write(yuv_plane)
+  return True
+
+
+def crop_frames(yuv_file_name, output_file_name, width, height, crop_height):
+  """Crops rows of pixels from the top of the YUV frames.
+
+  This function goes through all the frames in a video and crops the crop_height
+  top pixel rows of every frame.
+
+  Args:
+    yuv_file_name(string): The name of the YUV file to be cropped.
+    output_file_name(string): The name of the output file where the result will
+      be written.
+    width(int): The width of the original YUV file.
+    height(int): The height of the original YUV file.
+    crop_height(int): The height (the number of pixel rows) to be cropped from
+      the frames.
+  """
+  # Component sizes = [Y_sizes, U_sizes, V_sizes].
+  component_sizes = [(width, height, crop_height),
+                     (width/2, height/2, crop_height/2),
+                     (width/2, height/2, crop_height/2)]
+
+  yuv_file = open(yuv_file_name, 'rb')
+  output_file = open(output_file_name, 'wb')
+
+  data_left = True
+  while data_left:
+    data_left = _crop_one_frame(yuv_file, output_file, component_sizes)
+
+  yuv_file.close()
+  output_file.close()
+
+
+def _parse_args():
+  """Registers the command-line options."""
+  usage = "usage: %prog [options]"
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--width', type='int',
+                    default=352,
+                    help=('Width of the YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--height', type='int', default=288,
+                    help=('Height of the YUV file\'s frames. '
+                          'Default: %default'))
+  parser.add_option('--crop_height', type='int', default=32,
+                    help=('How much of the top of the YUV file to crop. '
+                          'Has to be module of 2. Default: %default'))
+  parser.add_option('--yuv_file', type='string',
+                    help=('The YUV file to be cropped.'))
+  parser.add_option('--output_file', type='string', default='output.yuv',
+                    help=('The output YUV file containing the cropped YUV. '
+                          'Default: %default'))
+  options = parser.parse_args()[0]
+  if not options.yuv_file:
+    parser.error('yuv_file argument missing. Please specify input YUV file!')
+  return options
+
+
+def _main():
+  """A tool to crop rows of pixels from the top part of a YUV file.
+
+  A simple invocation will be:
+  ./yuv_cropper.py --width=640 --height=480 --crop_height=32
+  --yuv_file=<path_and_name_of_yuv_file>
+  --output_yuv=<path and name_of_output_file>
+  """
+  options = _parse_args()
+
+  if os.path.getsize(options.yuv_file) == 0:
+    sys.stderr.write('Error: The YUV file you have passed has size 0. The '
+                     'produced output will also have size 0.\n')
+    return -1
+
+  crop_frames(options.yuv_file, options.output_file, options.width,
+              options.height, options.crop_height)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(_main())
\ No newline at end of file
diff --git a/tools/codereview.settings b/tools/codereview.settings
new file mode 100644
index 0000000..c7ae786
--- /dev/null
+++ b/tools/codereview.settings
@@ -0,0 +1,9 @@
+# This file is used by gcl to get repository specific information.
+CODE_REVIEW_SERVER: webrtc-codereview.appspot.com
+#CC_LIST:
+#VIEW_VC:
+#STATUS:
+TRY_ON_UPLOAD: False
+#TRYSERVER_SVN_URL:
+#GITCL_PREUPLOAD:
+#GITCL_PREDCOMMIT:
diff --git a/tools/coverity/OWNERS b/tools/coverity/OWNERS
new file mode 100644
index 0000000..b44992b
--- /dev/null
+++ b/tools/coverity/OWNERS
@@ -0,0 +1,3 @@
+kjellander@webrtc.org
+phoglund@webrtc.org
+
diff --git a/tools/coverity/coverity.py b/tools/coverity/coverity.py
new file mode 100755
index 0000000..0eb57c3
--- /dev/null
+++ b/tools/coverity/coverity.py
@@ -0,0 +1,324 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""
+Runs Coverity Static Analysis on a build of WebRTC.
+
+This script is a modified copy of Chromium's tools/coverity/coverity.py
+Changes made:
+ * Replaced deprecated switches for cov-commit-defects command:
+   * Using --host instead of --remote
+   * Using --stream instead of --product
+   * Removed --cxx (now default enabled)
+ * Changed cleaning of output path, since WebRTC's out dir is located directly
+   in trunk/
+ * Updated some default constants.
+
+The script runs on all WebRTC supported platforms.
+
+On Windows, this script should be run in a Visual Studio Command Prompt, so
+that the INCLUDE, LIB, and PATH environment variables are set properly for
+Visual Studio.
+
+Usage examples:
+  coverity.py
+  coverity.py --dry-run
+  coverity.py --target=debug
+  %comspec% /c ""C:\Program Files\Microsoft Visual Studio 8\VC\vcvarsall.bat"
+      x86 && C:\Python24\python.exe C:\coverity.py"
+
+For a full list of options, pass the '--help' switch.
+
+See http://support.microsoft.com/kb/308569 for running this script as a
+Scheduled Task on Windows XP.
+"""
+
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import time
+
+# These constants provide default values, but are exposed as command-line
+# flags. See the --help for more info. Note that for historical reasons
+# (the script started out as Windows-only and has legacy usages which pre-date
+# these switches), the constants are all tuned for Windows.
+# Usage of this script on Linux pretty much requires explicit
+# --source-dir, --coverity-bin-dir, --coverity-intermediate-dir, and
+# --coverity-target command line flags.
+
+WEBRTC_SOURCE_DIR = 'C:\\webrtc.latest'
+
+# Relative to WEBRTC_SOURCE_DIR. Only applies to Windows platform.
+WEBRTC_SOLUTION_FILE = 'webrtc.sln'
+
+# Relative to WEBRTC_SOURCE_DIR. Only applies to Windows platform.
+WEBRTC_SOLUTION_DIR = 'build'
+
+COVERITY_BIN_DIR = 'C:\\coverity-integrity-center\\static-analysis\\bin'
+
+COVERITY_INTERMEDIATE_DIR = 'C:\\coverity\\cvbuild\\cr_int'
+
+COVERITY_ANALYZE_OPTIONS = ('--security --concurrency '
+                            '--enable ATOMICITY '
+                            '--enable MISSING_LOCK '
+                            '--enable DELETE_VOID '
+                            '--checker-option PASS_BY_VALUE:size_threshold:16 '
+                            '--checker-option '
+                            'USE_AFTER_FREE:allow_simple_use:false '
+                            '--enable-constraint-fpp '
+                            '--enable-callgraph-metrics')
+
+# Might need to be changed to FQDN
+COVERITY_REMOTE = 'localhost'
+
+COVERITY_PORT = '8080'
+
+COVERITY_STREAM = 'WebRTC-Windows-7-x64'
+
+COVERITY_TARGET = 'Windows'
+
+COVERITY_USER = 'coverityanalyzer'
+# looking for a PASSWORD constant? Look at --coverity-password-file instead.
+
+# Relative to WEBRTC_SOURCE_DIR.  Contains the pid of this script.
+LOCK_FILE = 'coverity.lock'
+
+
+def _ReadPassword(pwfilename):
+  """Reads the coverity password in from a file where it was stashed"""
+  pwfile = open(pwfilename, 'r')
+  password = pwfile.readline()
+  pwfile.close()
+  return password.rstrip()
+
+
+def _RunCommand(cmd, dry_run, shell=False, echo_cmd=True):
+  """Runs the command if dry_run is false, otherwise just prints the command."""
+  if echo_cmd:
+    print cmd
+  if not dry_run:
+    return subprocess.call(cmd, shell=shell)
+  else:
+    return 0
+
+
+def _ReleaseLock(lock_file, lock_filename):
+  """Removes the lockfile. Function-ized so we can bail from anywhere"""
+  os.close(lock_file)
+  os.remove(lock_filename)
+
+
+def run_coverity(options, args):
+  """Runs all the selected tests for the given build type and target."""
+  # Create the lock file to prevent another instance of this script from
+  # running.
+  lock_filename = os.path.join(options.source_dir, LOCK_FILE)
+  try:
+    lock_file = os.open(lock_filename,
+                        os.O_CREAT | os.O_EXCL | os.O_TRUNC | os.O_RDWR)
+  except OSError, err:
+    print 'Failed to open lock file:\n  ' + str(err)
+    return 1
+
+  # Write the pid of this script (the python.exe process) to the lock file.
+  os.write(lock_file, str(os.getpid()))
+
+  options.target = options.target.title()
+
+  start_time = time.time()
+
+  print 'Change directory to ' + options.source_dir
+  os.chdir(options.source_dir)
+
+  # The coverity-password filename may have been a relative path.
+  # If so, assume it's relative to the source directory, which means
+  # the time to read the password is after we do the chdir().
+  coverity_password = _ReadPassword(options.coverity_password_file)
+
+  cmd = 'gclient sync --force'
+  gclient_exit = _RunCommand(cmd, options.dry_run, shell=True)
+  if gclient_exit != 0:
+    print 'gclient aborted with status %s' % gclient_exit
+    _ReleaseLock(lock_file, lock_filename)
+    return 1
+
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  # Do a clean build.  Remove the build output directory first.
+  if sys.platform.startswith('linux'):
+    rm_path = os.path.join(options.source_dir,'out',options.target)
+  elif sys.platform == 'win32':
+    rm_path = os.path.join(options.source_dir,options.solution_dir,
+                           options.target)
+  elif sys.platform == 'darwin':
+    rm_path = os.path.join(options.source_dir,'xcodebuild')
+  else:
+    print 'Platform "%s" unrecognized, aborting' % sys.platform
+    _ReleaseLock(lock_file, lock_filename)
+    return 1
+
+  if options.dry_run:
+    print 'shutil.rmtree(%s)' % repr(rm_path)
+  else:
+    shutil.rmtree(rm_path,True)
+
+  if options.preserve_intermediate_dir:
+      print 'Preserving intermediate directory.'
+  else:
+    if options.dry_run:
+      print 'shutil.rmtree(%s)' % repr(options.coverity_intermediate_dir)
+      print 'os.mkdir(%s)' % repr(options.coverity_intermediate_dir)
+    else:
+      shutil.rmtree(options.coverity_intermediate_dir,True)
+      os.mkdir(options.coverity_intermediate_dir)
+
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  use_shell_during_make = False
+  if sys.platform.startswith('linux'):
+    use_shell_during_make = True
+    _RunCommand('pwd', options.dry_run, shell=True)
+    cmd = '%s/cov-build --dir %s make BUILDTYPE=%s All' % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.target)
+  elif sys.platform == 'win32':
+    cmd = ('%s\\cov-build.exe --dir %s devenv.com %s\\%s /build %s '
+           '/project All.vcproj') % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.source_dir, options.solution_file, options.target)
+  elif sys.platform == 'darwin':
+    use_shell_during_make = True
+    _RunCommand('pwd', options.dry_run, shell=True)
+    cmd = ('%s/cov-build --dir %s xcodebuild -project webrtc.xcodeproj '
+           '-configuration %s -target All') % (
+      options.coverity_bin_dir, options.coverity_intermediate_dir,
+      options.target)
+
+
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  cov_analyze_exe = os.path.join(options.coverity_bin_dir,'cov-analyze')
+  cmd = '%s --dir %s %s' % (cov_analyze_exe,
+                            options.coverity_intermediate_dir,
+                            options.coverity_analyze_options)
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make)
+  print 'Elapsed time: %ds' % (time.time() - start_time)
+
+  cov_commit_exe = os.path.join(options.coverity_bin_dir,'cov-commit-defects')
+
+  # On Linux we have started using a Target with a space in it, so we want
+  # to quote it. On the other hand, Windows quoting doesn't work quite the
+  # same way. To be conservative, I'd like to avoid quoting an argument
+  # that doesn't need quoting and which we haven't historically been quoting
+  # on that platform. So, only quote the target if we have to.
+  coverity_target = options.coverity_target
+  if sys.platform != 'win32':
+    coverity_target = '"%s"' % coverity_target
+
+  cmd = ('%s --dir %s --host %s --port %s '
+         '--stream %s '
+         '--target %s '
+         '--user %s '
+         '--password %s') % (cov_commit_exe,
+                             options.coverity_intermediate_dir,
+                             options.coverity_dbhost,
+                             options.coverity_port,
+                             options.coverity_stream,
+                             coverity_target,
+                             options.coverity_user,
+                             coverity_password)
+  # Avoid echoing the Commit command because it has a password in it
+  print 'Commiting defects to Coverity Integrity Manager server...'
+  _RunCommand(cmd, options.dry_run, shell=use_shell_during_make, echo_cmd=False)
+
+  print 'Completed! Total time: %ds' % (time.time() - start_time)
+
+  _ReleaseLock(lock_file, lock_filename)
+
+  return 0
+
+
+def main():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('', '--dry-run', action='store_true', default=False,
+                           help='print but don\'t run the commands')
+
+  option_parser.add_option('', '--target', default='Release',
+                           help='build target (Debug or Release)')
+
+  option_parser.add_option('', '--source-dir', dest='source_dir',
+                           help='full path to directory ABOVE "src"',
+                           default=WEBRTC_SOURCE_DIR)
+
+  option_parser.add_option('', '--solution-file', dest='solution_file',
+                           help='filename of solution file to build (Win only)',
+                           default=WEBRTC_SOLUTION_FILE)
+
+  option_parser.add_option('', '--solution-dir', dest='solution_dir',
+                           help='build directory for the solution (Win only)',
+                           default=WEBRTC_SOLUTION_DIR)
+
+  option_parser.add_option('', '--coverity-bin-dir', dest='coverity_bin_dir',
+                           default=COVERITY_BIN_DIR)
+
+  option_parser.add_option('', '--coverity-intermediate-dir',
+                           dest='coverity_intermediate_dir',
+                           default=COVERITY_INTERMEDIATE_DIR)
+
+  option_parser.add_option('', '--coverity-analyze-options',
+                           dest='coverity_analyze_options',
+                           help=('all cov-analyze options, e.g. "%s"'
+                                 % COVERITY_ANALYZE_OPTIONS),
+                           default=COVERITY_ANALYZE_OPTIONS)
+
+  option_parser.add_option('', '--coverity-db-host',
+                           dest='coverity_dbhost',
+                           help=('coverity defect db server hostname, e.g. %s'
+                                 % COVERITY_REMOTE),
+                           default=COVERITY_REMOTE)
+
+  option_parser.add_option('', '--coverity-db-port', dest='coverity_port',
+                           help=('port # of coverity web/db server, e.g. %s'
+                                 % COVERITY_PORT),
+                           default=COVERITY_PORT)
+
+  option_parser.add_option('', '--coverity-stream', dest='coverity_stream',
+                           help=('Name of stream reported to Coverity, e.g. %s'
+                                 % COVERITY_STREAM),
+                           default=COVERITY_STREAM)
+
+  option_parser.add_option('', '--coverity-target', dest='coverity_target',
+                           help='Platform Target reported to coverity',
+                           default=COVERITY_TARGET)
+
+  option_parser.add_option('', '--coverity-user', dest='coverity_user',
+                           help='Username used to log into coverity',
+                           default=COVERITY_USER)
+
+  option_parser.add_option('', '--coverity-password-file',
+                           dest='coverity_password_file',
+                           help='file containing the coverity password',
+                           default='coverity-password')
+
+  helpmsg = ('By default, the intermediate dir is emptied before analysis. '
+             'This switch disables that behavior.')
+  option_parser.add_option('', '--preserve-intermediate-dir',
+                           action='store_true', help=helpmsg,
+                           default=False)
+
+  options, args = option_parser.parse_args()
+  return run_coverity(options, args)
+
+
+if '__main__' == __name__:
+  sys.exit(main())
diff --git a/tools/create_supplement_gypi.py b/tools/create_supplement_gypi.py
new file mode 100644
index 0000000..7f996e7
--- /dev/null
+++ b/tools/create_supplement_gypi.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import sys
+
+supplement_gypi = """#!/usr/bin/env python
+# This file is generated by %s.  Not for check-in.
+# Please see the WebRTC DEPS file for details.
+{
+  'variables': {
+    'build_with_chromium': 0,
+  }
+}
+"""
+
+def main(argv):
+  open(argv[1], 'w').write(supplement_gypi % argv[0])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/tools/e2e_quality/audio/README b/tools/e2e_quality/audio/README
new file mode 100644
index 0000000..5c8b6f8
--- /dev/null
+++ b/tools/e2e_quality/audio/README
@@ -0,0 +1,27 @@
+The tools here run an end-to-end audio quality test on Linux using PulseAudio.
+
+INSTALLATION
+The test depends on PulseAudio virtual devices (null sinks). Without additional
+arguments, run_audio_test.py expects a pair of sinks named "capture" and
+"render". To create these devices at machine startup, place the provided
+default.pa file in ~/.pulse. Alternately, the "pacmd" commands therein can be
+run on the command-line to create the devices.
+
+Similarly, place the provided daemon.conf file in ~/.pulse to use high quality
+resampling in PulseAudio. This will reduce the resampling impact on the outcome
+of the test.
+
+Build all WebRTC targets as usual (or just the audio_e2e_harness target) to
+generate the VoiceEngine harness.
+
+USAGE
+Run run_audio_test.py to start. The script has reasonable defaults and will
+use the expected location of audio_e2e_harness. Some settings will usually
+be provided by the user, particularly the comparison tool command-line and
+regular expression to extract the quality metric.
+
+An example command-line, run from trunk/
+
+tools/e2e_quality/audio/run_audio_test.py \
+--input=data/voice_engine/audio_short16.pcm --output=e2e_audio_out.pcm \
+--codec=L16 --compare="comparison-tool" --regexp="(\d\.\d{3})"
diff --git a/tools/e2e_quality/audio/audio_e2e_harness.cc b/tools/e2e_quality/audio/audio_e2e_harness.cc
new file mode 100644
index 0000000..a4789f2
--- /dev/null
+++ b/tools/e2e_quality/audio/audio_e2e_harness.cc
@@ -0,0 +1,99 @@
+/*
+ *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+ *
+ *  Use of this source code is governed by a BSD-style license
+ *  that can be found in the LICENSE file in the root of the source
+ *  tree. An additional intellectual property rights grant can be found
+ *  in the file PATENTS.  All contributing project authors may
+ *  be found in the AUTHORS file in the root of the source tree.
+ */
+
+// Sets up a simple VoiceEngine loopback call with the default audio devices
+// and runs forever. Some parameters can be configured through command-line
+// flags.
+
+#include "gflags/gflags.h"
+#include "gtest/gtest.h"
+
+#include "src/voice_engine/include/voe_audio_processing.h"
+#include "src/voice_engine/include/voe_base.h"
+#include "src/voice_engine/include/voe_codec.h"
+#include "src/voice_engine/include/voe_hardware.h"
+
+DEFINE_string(render, "render", "render device name");
+DEFINE_string(codec, "ISAC", "codec name");
+DEFINE_int32(rate, 16000, "codec sample rate in Hz");
+
+namespace webrtc {
+namespace {
+
+void RunHarness() {
+  VoiceEngine* voe = VoiceEngine::Create();
+  ASSERT_TRUE(voe != NULL);
+  VoEAudioProcessing* audio = VoEAudioProcessing::GetInterface(voe);
+  ASSERT_TRUE(audio != NULL);
+  VoEBase* base = VoEBase::GetInterface(voe);
+  ASSERT_TRUE(base != NULL);
+  VoECodec* codec = VoECodec::GetInterface(voe);
+  ASSERT_TRUE(codec != NULL);
+  VoEHardware* hardware = VoEHardware::GetInterface(voe);
+  ASSERT_TRUE(hardware != NULL);
+
+  ASSERT_EQ(0, base->Init());
+  int channel = base->CreateChannel();
+  ASSERT_NE(-1, channel);
+  ASSERT_EQ(0, base->SetSendDestination(channel, 1234, "127.0.0.1"));
+  ASSERT_EQ(0, base->SetLocalReceiver(channel, 1234));
+
+  CodecInst codec_params = {0};
+  bool codec_found = false;
+  for (int i = 0; i < codec->NumOfCodecs(); i++) {
+    ASSERT_EQ(0, codec->GetCodec(i, codec_params));
+    if (FLAGS_codec.compare(codec_params.plname) == 0 &&
+        FLAGS_rate == codec_params.plfreq) {
+      codec_found = true;
+      break;
+    }
+  }
+  ASSERT_TRUE(codec_found);
+  ASSERT_EQ(0, codec->SetSendCodec(channel, codec_params));
+
+  int num_devices = 0;
+  ASSERT_EQ(0, hardware->GetNumOfPlayoutDevices(num_devices));
+  char device_name[128] = {0};
+  char guid[128] = {0};
+  bool device_found = false;
+  int device_index;
+  for (device_index = 0; device_index < num_devices; device_index++) {
+    ASSERT_EQ(0, hardware->GetPlayoutDeviceName(device_index, device_name,
+                                                guid));
+    if (FLAGS_render.compare(device_name) == 0) {
+      device_found = true;
+      break;
+    }
+  }
+  ASSERT_TRUE(device_found);
+  ASSERT_EQ(0, hardware->SetPlayoutDevice(device_index));
+
+  // Disable all audio processing.
+  ASSERT_EQ(0, audio->SetAgcStatus(false));
+  ASSERT_EQ(0, audio->SetEcStatus(false));
+  ASSERT_EQ(0, audio->EnableHighPassFilter(false));
+  ASSERT_EQ(0, audio->SetNsStatus(false));
+
+  ASSERT_EQ(0, base->StartReceive(channel));
+  ASSERT_EQ(0, base->StartPlayout(channel));
+  ASSERT_EQ(0, base->StartSend(channel));
+
+  // Run forever...
+  while (1) {
+  }
+}
+
+}  // namespace
+}  // namespace webrtc
+
+int main(int argc, char** argv) {
+  google::ParseCommandLineFlags(&argc, &argv, true);
+  webrtc::RunHarness();
+}
diff --git a/tools/e2e_quality/audio/daemon.conf b/tools/e2e_quality/audio/daemon.conf
new file mode 100644
index 0000000..26c4df4
--- /dev/null
+++ b/tools/e2e_quality/audio/daemon.conf
@@ -0,0 +1 @@
+resample-method = speex-float-9
diff --git a/tools/e2e_quality/audio/default.pa b/tools/e2e_quality/audio/default.pa
new file mode 100755
index 0000000..adef2db
--- /dev/null
+++ b/tools/e2e_quality/audio/default.pa
@@ -0,0 +1,6 @@
+# Place in ~/.pulse/ to add null sinks for the audio end-to-end quality test.
+
+.include /etc/pulse/default.pa
+
+load-module module-null-sink sink_name=render sink_properties=device.description=render format=s16 rate=48000 channels=1
+load-module module-null-sink sink_name=capture sink_properties=device.description=capture format=s16 rate=48000 channels=1
diff --git a/tools/e2e_quality/audio/run_audio_test.py b/tools/e2e_quality/audio/run_audio_test.py
new file mode 100755
index 0000000..9e52df3
--- /dev/null
+++ b/tools/e2e_quality/audio/run_audio_test.py
@@ -0,0 +1,125 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""Runs an end-to-end audio quality test on Linux.
+
+Expects the presence of PulseAudio virtual devices (null sinks). These are
+configured as default devices for a VoiceEngine audio call. A PulseAudio
+utility (pacat) is used to play to and record from the virtual devices.
+
+The input reference file is then compared to the output file.
+"""
+
+import optparse
+import os
+import re
+import shlex
+import subprocess
+import sys
+import threading
+import time
+
+def main(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog [options]'
+  parser.set_usage(usage)
+  parser.add_option('--input', default='input.pcm', help='input PCM file')
+  parser.add_option('--output', default='output.pcm', help='output PCM file')
+  parser.add_option('--codec', default='ISAC', help='codec name')
+  parser.add_option('--rate', default='16000', help='sample rate in Hz')
+  parser.add_option('--channels', default='1', help='number of channels')
+  parser.add_option('--play_sink', default='capture',
+      help='name of PulseAudio sink to which to play audio')
+  parser.add_option('--rec_sink', default='render',
+      help='name of PulseAudio sink whose monitor will be recorded')
+  parser.add_option('--harness',
+      default=os.path.abspath(os.path.dirname(sys.argv[0]) +
+          '/../../../out/Debug/audio_e2e_harness'),
+      help='path to audio harness executable')
+  parser.add_option('--compare',
+                    help='command-line arguments for comparison tool')
+  parser.add_option('--regexp',
+                    help='regular expression to extract the comparison metric')
+  (options, args) = parser.parse_args(argv[1:])
+
+  # Get the initial default capture device, to restore later.
+  command = ['pacmd', 'list-sources']
+  print ' '.join(command)
+  proc = subprocess.Popen(command, stdout=subprocess.PIPE)
+  output = proc.communicate()[0]
+  if proc.returncode != 0:
+    return proc.returncode
+  default_source = re.search(r'(^  \* index: )([0-9]+$)', output,
+                             re.MULTILINE).group(2)
+
+  # Set the default capture device to be used by VoiceEngine. We unfortunately
+  # need to do this rather than select the devices directly through the harness
+  # because monitor sources don't appear in VoiceEngine except as defaults.
+  #
+  # We pass the render device for VoiceEngine to select because (for unknown
+  # reasons) the virtual device is sometimes not used when the default.
+  command = ['pacmd', 'set-default-source', options.play_sink + '.monitor']
+  print ' '.join(command)
+  retcode = subprocess.call(command, stdout=subprocess.PIPE)
+  if retcode != 0:
+    return retcode
+
+  command = [options.harness, '--render=' + options.rec_sink,
+      '--codec=' + options.codec, '--rate=' + options.rate]
+  print ' '.join(command)
+  voe_proc = subprocess.Popen(command)
+
+  # If recording starts before there is data available, pacat sometimes
+  # inexplicably adds a large delay to the start of the file. We wait here in
+  # an attempt to prevent that, because VoE often takes some time to startup a
+  # call.
+  time.sleep(5)
+
+  format_args = ['--format=s16le', '--rate=' + options.rate,
+      '--channels=' + options.channels, '--raw']
+  command = (['pacat', '-p', '-d', options.play_sink] + format_args +
+      [options.input])
+  print ' '.join(command)
+  play_proc = subprocess.Popen(command)
+
+  command = (['pacat', '-r', '-d', options.rec_sink + '.monitor'] +
+      format_args + [options.output])
+  print ' '.join(command)
+  record_proc = subprocess.Popen(command)
+
+  retcode = play_proc.wait()
+  # If these ended early, an exception will be thrown here.
+  record_proc.kill()
+  voe_proc.kill()
+  if retcode != 0:
+    return retcode
+
+  # Restore the initial default capture device.
+  command = ['pacmd', 'set-default-source', default_source]
+  print ' '.join(command)
+  retcode = subprocess.call(command, stdout=subprocess.PIPE)
+  if retcode != 0:
+    return retcode
+
+  if options.compare and options.regexp:
+    command = shlex.split(options.compare) + [options.input, options.output]
+    print ' '.join(command)
+    proc = subprocess.Popen(command, stdout=subprocess.PIPE)
+    output = proc.communicate()[0]
+    if proc.returncode != 0:
+      return proc.returncode
+
+    # The list should only contain one item.
+    print ''.join(re.findall(options.regexp, output))
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/tools/e2e_quality/e2e_quality.gyp b/tools/e2e_quality/e2e_quality.gyp
new file mode 100644
index 0000000..fc04bdf
--- /dev/null
+++ b/tools/e2e_quality/e2e_quality.gyp
@@ -0,0 +1,25 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ '../../src/build/common.gypi'],
+  'targets': [
+    {
+      'target_name': 'audio_e2e_harness',
+      'type': 'executable',
+      'dependencies': [
+        '<(webrtc_root)/voice_engine/voice_engine.gyp:voice_engine_core',
+        '<(DEPTH)/testing/gtest.gyp:gtest',
+        '<(DEPTH)/third_party/google-gflags/google-gflags.gyp:google-gflags',
+      ],
+      'sources': [
+        'audio/audio_e2e_harness.cc',
+      ],
+    },
+  ],
+}
diff --git a/tools/matlab/maxUnwrap.m b/tools/matlab/maxUnwrap.m
new file mode 100644
index 0000000..276c952
--- /dev/null
+++ b/tools/matlab/maxUnwrap.m
@@ -0,0 +1,25 @@
+function sequence = maxUnwrap(sequence, max)
+%
+% sequence = maxUnwrap(sequence, max)
+% Unwraps when a wrap around is detected.
+%
+% Arguments
+%
+% sequence: The vector to unwrap.
+% max: The maximum value that the sequence can take,
+%      and after which it will wrap over to 0.
+%
+% Return value
+%
+% sequence: The unwrapped vector.
+%
+
+% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+%
+% Use of this source code is governed by a BSD-style license
+% that can be found in the LICENSE file in the root of the source
+% tree. An additional intellectual property rights grant can be found
+% in the file PATENTS.  All contributing project authors may
+% be found in the AUTHORS file in the root of the source tree.
+
+sequence = round((unwrap(2 * pi * sequence / max) * max) / (2 * pi));
diff --git a/tools/matlab/parseLog.m b/tools/matlab/parseLog.m
new file mode 100644
index 0000000..5d4c3f7
--- /dev/null
+++ b/tools/matlab/parseLog.m
@@ -0,0 +1,54 @@
+function parsed = parseLog(filename)
+%
+% parsed = parseLog(filename)
+% Parses a DataLog text file, with the filename specified in the string
+% filename, into a struct with each column name as a field, and with the
+% column data stored as a vector in that field.
+%
+% Arguments
+%
+% filename: A string with the name of the file to parse.
+%
+% Return value
+%
+% parsed: A struct containing each column parsed from the input file
+%         as a field and with the column data stored as a vector in that 
+%         field.
+%
+
+% Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+%
+% Use of this source code is governed by a BSD-style license
+% that can be found in the LICENSE file in the root of the source
+% tree. An additional intellectual property rights grant can be found
+% in the file PATENTS.  All contributing project authors may
+% be found in the AUTHORS file in the root of the source tree.
+
+table = importdata(filename, ',', 1);
+if ~isstruct(table)
+  error('Malformed file, possibly empty or lacking data entries')
+end
+
+colheaders = table.textdata;
+if length(colheaders) == 1
+  colheaders = regexp(table.textdata{1}, ',', 'split');
+end
+
+parsed = struct;
+i = 1;
+while i <= length(colheaders)
+  % Checking for a multi-value column.
+  m = regexp(colheaders{i}, '([\w\t]+)\[(\d+)\]', 'tokens');
+  if ~isempty(m)
+    % Parse a multi-value column
+    n = str2double(m{1}{2}) - 1;
+    parsed.(strrep(m{1}{1}, ' ', '_')) = table.data(:, i:i+n);
+    i = i + n + 1;
+  elseif ~isempty(colheaders{i})
+    % Parse a single-value column
+    parsed.(strrep(colheaders{i}, ' ', '_')) = table.data(:, i);
+    i = i + 1;
+  else
+    error('Empty column');
+  end
+end
diff --git a/tools/network_emulator/config.py b/tools/network_emulator/config.py
new file mode 100644
index 0000000..60fa485
--- /dev/null
+++ b/tools/network_emulator/config.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Configuration class for network emulation."""
+
+
+class ConnectionConfig(object):
+  """Configuration containing the characteristics of a network connection."""
+
+  def __init__(self, num, name, receive_bw_kbps, send_bw_kbps, delay_ms,
+               packet_loss_percent, queue_slots):
+    self.num = num
+    self.name = name
+    self.receive_bw_kbps = receive_bw_kbps
+    self.send_bw_kbps = send_bw_kbps
+    self.delay_ms = delay_ms
+    self.packet_loss_percent = packet_loss_percent
+    self.queue_slots = queue_slots
+
+  def __str__(self):
+    """String representing the configuration.
+
+    Returns:
+        A string formatted and padded like this example:
+    12 Name                       375 kbps   375 kbps   10   145 ms  0.1 %
+    """
+    left_aligned_name = self.name.ljust(24, ' ')
+    return '%2s %24s %5s kbps %5s kbps %4s %5s ms  %3s %%' % (
+        self.num, left_aligned_name, self.receive_bw_kbps, self.send_bw_kbps,
+        self.queue_slots, self.delay_ms, self.packet_loss_percent)
diff --git a/tools/network_emulator/emulate.py b/tools/network_emulator/emulate.py
new file mode 100755
index 0000000..e256705
--- /dev/null
+++ b/tools/network_emulator/emulate.py
@@ -0,0 +1,198 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Script for constraining traffic on the local machine."""
+
+import logging
+import optparse
+import os
+import socket
+import sys
+
+import config
+import network_emulator
+
+_DEFAULT_LOG_LEVEL = logging.INFO
+
+# Default port range to apply network constraints on.
+_DEFAULT_PORT_RANGE = (32768, 65535)
+
+_PRESETS = [
+    config.ConnectionConfig(1, 'Generic, Bad', 95, 95, 250, 2, 100),
+    config.ConnectionConfig(2, 'Generic, Average', 375, 375, 145, 0.1, 100),
+    config.ConnectionConfig(3, 'Generic, Good', 1000, 1000, 35, 0, 100),
+    config.ConnectionConfig(4, '3G, Average Case', 780, 330, 100, 0, 100),
+    config.ConnectionConfig(5, '3G, Good', 850, 420, 90, 0, 100),
+    config.ConnectionConfig(6, '3G, Lossy Network', 780, 330, 100, 1, 100),
+    config.ConnectionConfig(7, 'Cable Modem', 6000, 1000, 2, 0, 10),
+    config.ConnectionConfig(8, 'DSL', 2000, 256, 5, 0, 10),
+    config.ConnectionConfig(9, 'Edge, Average Case', 240, 200, 400, 0, 100),
+    config.ConnectionConfig(10, 'Edge, Good', 250, 200, 350, 0, 100),
+    config.ConnectionConfig(11, 'Edge, Lossy Network', 240, 200, 400, 1, 100),
+    config.ConnectionConfig(12, 'Wifi, Average Case', 40000, 33000, 1, 0, 100),
+    config.ConnectionConfig(13, 'Wifi, Good', 45000, 40000, 1, 0, 100),
+    config.ConnectionConfig(14, 'Wifi, Lossy', 40000, 33000, 1, 0, 100),
+    ]
+_PRESETS_DICT = dict((p.num, p) for p in _PRESETS)
+
+_DEFAULT_PRESET_ID = 2
+_DEFAULT_PRESET = _PRESETS_DICT[_DEFAULT_PRESET_ID]
+
+
+class NonStrippingEpilogOptionParser(optparse.OptionParser):
+  """Custom parser to let us show the epilog without weird line breaking."""
+
+  def format_epilog(self, formatter):
+    return self.epilog
+
+
+def _get_external_ip():
+  """Finds out the machine's external IP by connecting to google.com."""
+  external_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+  external_socket.connect(('google.com', 80))
+  return external_socket.getsockname()[0]
+
+
+def _parse_args():
+  """Define and parse the command-line arguments."""
+  presets_string = '\n'.join(str(p) for p in _PRESETS)
+  parser = NonStrippingEpilogOptionParser(epilog=(
+      '\nAvailable presets:\n'
+      '                              Bandwidth (kbps)                  Packet\n'
+      'ID Name                       Receive     Send    Queue  Delay   loss \n'
+      '-- ----                      ---------   -------- ----- ------- ------\n'
+      '%s\n' % presets_string))
+  parser.add_option('-p', '--preset', type='int', default=2,
+                    help=('ConnectionConfig configuration, specified by ID. '
+                          'Default: %default'))
+  parser.add_option('-r', '--receive-bw', type='int',
+                    default=_DEFAULT_PRESET.receive_bw_kbps,
+                    help=('Receive bandwidth in kilobit/s. Default: %default'))
+  parser.add_option('-s', '--send-bw', type='int',
+                    default=_DEFAULT_PRESET.send_bw_kbps,
+                    help=('Send bandwidth in kilobit/s. Default: %default'))
+  parser.add_option('-d', '--delay', type='int',
+                    default=_DEFAULT_PRESET.delay_ms,
+                    help=('Delay in ms. Default: %default'))
+  parser.add_option('-l', '--packet-loss', type='float',
+                    default=_DEFAULT_PRESET.packet_loss_percent,
+                    help=('Packet loss in %. Default: %default'))
+  parser.add_option('-q', '--queue', type='int',
+                    default=_DEFAULT_PRESET.queue_slots,
+                    help=('Queue size as number of slots. Default: %default'))
+  parser.add_option('--port-range', default='%s,%s' % _DEFAULT_PORT_RANGE,
+                    help=('Range of ports for constrained network. Specify as '
+                          'two comma separated integers. Default: %default'))
+  parser.add_option('--target-ip', default=None,
+                    help=('The interface IP address to apply the rules for. '
+                          'Default: the external facing interface IP address.'))
+  parser.add_option('-v', '--verbose', action='store_true', default=False,
+                    help=('Turn on verbose output. Will print all \'ipfw\' '
+                          'commands that are executed.'))
+
+  options = parser.parse_args()[0]
+
+  # Find preset by ID, if specified:
+  if options.preset and not _PRESETS_DICT.has_key(options.preset):
+    parser.error('Invalid preset: %s' % options.preset)
+
+  # Simple validation of the IP address, if supplied:
+  if options.target_ip:
+    try:
+      socket.inet_aton(options.target_ip)
+    except socket.error:
+      parser.error('Invalid IP address specified: %s' % options.target_ip)
+
+  # Convert port range into the desired tuple format.
+  try:
+    if isinstance(options.port_range, str):
+      options.port_range = tuple(int(port) for port in
+                                 options.port_range.split(','))
+      if len(options.port_range) != 2:
+        parser.error('Invalid port range specified, please specify two '
+                     'integers separated by a comma.')
+  except ValueError:
+    parser.error('Invalid port range specified.')
+
+  _set_logger(options.verbose)
+  return options
+
+
+def _set_logger(verbose):
+  """Setup logging."""
+  log_level = _DEFAULT_LOG_LEVEL
+  if verbose:
+    log_level = logging.DEBUG
+  logging.basicConfig(level=log_level, format='%(message)s')
+
+
+def _main():
+  """Checks arguments, permissions and runs a network emulation."""
+  if os.name != 'posix':
+    print >> sys.stderr, 'This script is only supported on Linux and Mac.'
+    return 1
+
+  options = _parse_args()
+
+  # Build a configuration object. Override any preset configuration settings if
+  # a value of a setting was also given as a flag.
+  connection_config = _PRESETS_DICT[options.preset]
+  if options.receive_bw:
+    connection_config.receive_bw_kbps = options.receive_bw
+  if options.send_bw:
+    connection_config.send_bw_kbps = options.send_bw
+  if options.delay:
+    connection_config.delay_ms = options.delay
+  if options.packet_loss:
+    connection_config.packet_loss_percent = options.packet_loss
+  if options.queue:
+    connection_config.queue_slots = options.queue
+
+  emulator = network_emulator.NetworkEmulator(connection_config,
+                                              options.port_range)
+  try:
+    emulator.check_permissions()
+  except network_emulator.NetworkEmulatorError as e:
+    logging.error('Error: %s\n\nCause: %s', e.msg, e.error)
+    return -1
+
+  if not options.target_ip:
+    external_ip = _get_external_ip()
+  else:
+    external_ip = options.target_ip
+
+  logging.info('Constraining traffic to/from IP: %s', external_ip)
+  try:
+    emulator.emulate(external_ip)
+    logging.info('Started network emulation with the following configuration:\n'
+                 '  Receive bandwidth: %s kbps (%s kB/s)\n'
+                 '  Send bandwidth   : %s kbps (%s kB/s)\n'
+                 '  Delay            : %s ms\n'
+                 '  Packet loss      : %s %%\n'
+                 '  Queue slots      : %s',
+                 connection_config.receive_bw_kbps,
+                 connection_config.receive_bw_kbps/8,
+                 connection_config.send_bw_kbps,
+                 connection_config.send_bw_kbps/8,
+                 connection_config.delay_ms,
+                 connection_config.packet_loss_percent,
+                 connection_config.queue_slots)
+    logging.info('Affected traffic: IP traffic on ports %s-%s',
+                 options.port_range[0], options.port_range[1])
+    raw_input('Press Enter to abort Network Emulation...')
+    logging.info('Flushing all Dummynet rules...')
+    emulator.cleanup()
+    logging.info('Completed Network Emulation.')
+    return 0
+  except network_emulator.NetworkEmulatorError as e:
+    logging.error('Error: %s\n\nCause: %s', e.msg, e.error)
+    return -2
+
+if __name__ == '__main__':
+  sys.exit(_main())
diff --git a/tools/network_emulator/network_emulator.py b/tools/network_emulator/network_emulator.py
new file mode 100644
index 0000000..2876939
--- /dev/null
+++ b/tools/network_emulator/network_emulator.py
@@ -0,0 +1,178 @@
+#!/usr/bin/env python
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Script for constraining traffic on the local machine."""
+
+import logging
+import os
+import subprocess
+import sys
+
+
+class NetworkEmulatorError(BaseException):
+  """Exception raised for errors in the network emulator.
+
+  Attributes:
+    msg: User defined error message.
+    cmd: Command for which the exception was raised.
+    returncode: Return code of running the command.
+    stdout: Output of running the command.
+    stderr: Error output of running the command.
+  """
+
+  def __init__(self, msg, cmd=None, returncode=None, output=None,
+               error=None):
+    BaseException.__init__(self, msg)
+    self.msg = msg
+    self.cmd = cmd
+    self.returncode = returncode
+    self.output = output
+    self.error = error
+
+
+class NetworkEmulator(object):
+  """A network emulator that can constrain the network using Dummynet."""
+
+  def __init__(self, connection_config, port_range):
+    """Constructor.
+
+    Args:
+        connection_config: A config.ConnectionConfig object containing the
+            characteristics for the connection to be emulation.
+        port_range: Tuple containing two integers defining the port range.
+    """
+    self._pipe_counter = 0
+    self._rule_counter = 0
+    self._port_range = port_range
+    self._connection_config = connection_config
+
+  def emulate(self, target_ip):
+    """Starts a network emulation by setting up Dummynet rules.
+
+    Args:
+        target_ip: The IP address of the interface that shall be that have the
+            network constraints applied to it.
+    """
+    receive_pipe_id = self._create_dummynet_pipe(
+        self._connection_config.receive_bw_kbps,
+        self._connection_config.delay_ms,
+        self._connection_config.packet_loss_percent,
+        self._connection_config.queue_slots)
+    logging.debug('Created receive pipe: %s', receive_pipe_id)
+    send_pipe_id = self._create_dummynet_pipe(
+        self._connection_config.send_bw_kbps,
+        self._connection_config.delay_ms,
+        self._connection_config.packet_loss_percent,
+        self._connection_config.queue_slots)
+    logging.debug('Created send pipe: %s', send_pipe_id)
+
+    # Adding the rules will start the emulation.
+    incoming_rule_id = self._create_dummynet_rule(receive_pipe_id, 'any',
+                                                  target_ip, self._port_range)
+    logging.debug('Created incoming rule: %s', incoming_rule_id)
+    outgoing_rule_id = self._create_dummynet_rule(send_pipe_id, target_ip,
+                                                  'any', self._port_range)
+    logging.debug('Created outgoing rule: %s', outgoing_rule_id)
+
+  def check_permissions(self):
+    """Checks if permissions are available to run Dummynet commands.
+
+    Raises:
+      NetworkEmulatorError: If permissions to run Dummynet commands are not
+      available.
+    """
+    if os.geteuid() != 0:
+      self._run_shell_command(
+          ['sudo', '-n', 'ipfw', '-h'],
+          msg=('Cannot run \'ipfw\' command. This script must be run as '
+               'root or have password-less sudo access to this command.'))
+
+  def cleanup(self):
+    """Stops the network emulation by flushing all Dummynet rules.
+
+    Notice that this will flush any rules that may have been created previously
+    before starting the emulation.
+    """
+    self._run_shell_command(['sudo', 'ipfw', '-f', 'flush'],
+                            'Failed to flush Dummynet rules!')
+
+  def _create_dummynet_rule(self, pipe_id, from_address, to_address,
+                            port_range):
+    """Creates a network emulation rule and returns its ID.
+
+    Args:
+        pipe_id: integer ID of the pipe.
+        from_address: The IP address to match source address. May be an IP or
+          'any'.
+        to_address: The IP address to match destination address. May be an IP or
+          'any'.
+        port_range: The range of ports the rule shall be applied on. Must be
+          specified as a tuple of with two integers.
+    Returns:
+        The ID of the rule, starting at 100. The rule ID increments with 100 for
+        each rule being added.
+    """
+    self._rule_counter += 100
+    add_part = ['sudo', 'ipfw', 'add', self._rule_counter, 'pipe', pipe_id,
+                'ip', 'from', from_address, 'to', to_address]
+    self._run_shell_command(add_part + ['src-port', '%s-%s' % port_range],
+                            'Failed to add Dummynet src-port rule.')
+    self._run_shell_command(add_part + ['dst-port', '%s-%s' % port_range],
+                            'Failed to add Dummynet dst-port rule.')
+    return self._rule_counter
+
+  def _create_dummynet_pipe(self, bandwidth_kbps, delay_ms, packet_loss_percent,
+                            queue_slots):
+    """Creates a Dummynet pipe and return its ID.
+
+    Args:
+        bandwidth_kbps: Bandwidth.
+        delay_ms: Delay for a one-way trip of a packet.
+        packet_loss_percent: Float value of packet loss, in percent.
+        queue_slots: Size of the queue.
+    Returns:
+        The ID of the pipe, starting at 1.
+    """
+    self._pipe_counter += 1
+    cmd = ['sudo', 'ipfw', 'pipe', self._pipe_counter, 'config',
+           'bw', str(bandwidth_kbps/8) + 'KByte/s',
+           'delay', '%sms' % delay_ms,
+           'plr', (packet_loss_percent/100.0),
+           'queue', queue_slots]
+    error_message = 'Failed to create Dummynet pipe. '
+    if sys.platform.startswith('linux'):
+      error_message += ('Make sure you have loaded the ipfw_mod.ko module to '
+                        'your kernel (sudo insmod /path/to/ipfw_mod.ko)')
+    self._run_shell_command(cmd, error_message)
+    return self._pipe_counter
+
+  def _run_shell_command(self, command, msg=None):
+    """Executes a command.
+
+    Args:
+      command: Command list to execute.
+      msg: Message describing the error in case the command fails.
+
+    Returns:
+      The standard output from running the command.
+
+    Raises:
+      NetworkEmulatorError: If command fails. Message is set by the msg
+        parameter.
+    """
+    cmd_list = [str(x) for x in command]
+    cmd = ' '.join(cmd_list)
+    logging.debug('Running command: %s', cmd)
+
+    process = subprocess.Popen(cmd_list, stdout=subprocess.PIPE,
+                               stderr=subprocess.PIPE)
+    output, error = process.communicate()
+    if process.returncode != 0:
+      raise NetworkEmulatorError(msg, cmd, process.returncode, output, error)
+    return output.strip()
diff --git a/tools/python_charts/OWNERS b/tools/python_charts/OWNERS
new file mode 100644
index 0000000..0428a4a
--- /dev/null
+++ b/tools/python_charts/OWNERS
@@ -0,0 +1 @@
+kjellander@webrtc.org
diff --git a/tools/python_charts/README b/tools/python_charts/README
new file mode 100644
index 0000000..483c402
--- /dev/null
+++ b/tools/python_charts/README
@@ -0,0 +1,41 @@
+This file describes how to setup Eclipse and then the Python Charts project
+
+Setup Eclipse
+-------------
+These instructions were tested on Linux, but are very similar for Windows and
+Mac.
+1. Ensure you have Python 2.x installed
+2. Download and install Google App Engine SDK for Python from 
+   http://code.google.com/appengine/downloads.html
+3. Note which location you put App Engine in, as this will be needed later on.
+4. Download Eclipse from http://www.eclipse.org. Any distribution will probably
+   do, but if you're going to do mainly web development, you might pick Eclipse
+   IDE for JavaScript Web Developers
+5. Install the PyDev plugin using the Eclipse update site mentioned at 
+   http://pydev.org/download.html
+6. Install the Google Plugin for Eclipse: http://code.google.com/eclipse/
+
+Setup the project
+-----------------
+Generic instructions are available at
+http://code.google.com/appengine/docs/python/gettingstarted/ but the following
+should be enough:
+1. Launch Eclipse and create a workspace
+2. Create a new PyDev Project
+3. In the PyDev Project wizard, uncheck the "Use Default" checkbox for Project
+   contents and browse to your tools/python_charts directory.
+4. Enter a project name. We'll assume PythonCharts in the examples below.
+5. In the radio button of the lower part of the window, select
+   "Add project directory to the PYTHONPATH"
+6. Click Finish
+7. Select the Run > Run Configuration… menu item
+8. Create a new "Python Run" configuration
+9. Select your Python Charts project as project
+10. As Main Module, enter the path to your dev_appserver.py, which is a part
+    of your App Engine installation,
+    e.g. /usr/local/google_appengine/dev_appserver.py
+11. At the Arguments tab, enter the location of your project root.
+    Using Eclipse variables if your project name is PythonCharts:
+    ${workspace_loc:PythonCharts}
+12. Launch the development app server by clicking the Run button.
+13. Launch a browser and go to http://localhost:8080
diff --git a/tools/python_charts/app.yaml b/tools/python_charts/app.yaml
new file mode 100644
index 0000000..ace1b51
--- /dev/null
+++ b/tools/python_charts/app.yaml
@@ -0,0 +1,9 @@
+application: webrtc-python-charts
+version: 1
+runtime: python
+api_version: 1
+
+handlers:
+
+- url: /*
+  script: webrtc/main.py
\ No newline at end of file
diff --git a/tools/python_charts/data/vp8_hw.py b/tools/python_charts/data/vp8_hw.py
new file mode 100644
index 0000000..b8c6cc0
--- /dev/null
+++ b/tools/python_charts/data/vp8_hw.py
@@ -0,0 +1,49 @@
+# Sample output from the video_quality_measurment program, included only for
+# reference. Geneate your own by running with the --python flag and then change
+# the filenames in main.py
+test_configuration = [{'name': 'name',                      'value': 'VP8 hardware test'},
+{'name': 'description',               'value': ''},
+{'name': 'test_number',               'value': '0'},
+{'name': 'input_filename',            'value': 'foreman_cif.yuv'},
+{'name': 'output_filename',           'value': 'foreman_cif_out.yuv'},
+{'name': 'output_dir',                'value': '.'},
+{'name': 'packet_size_in_bytes',      'value': '1500'},
+{'name': 'max_payload_size_in_bytes', 'value': '1440'},
+{'name': 'packet_loss_mode',          'value': 'Uniform'},
+{'name': 'packet_loss_probability',   'value': '0.000000'},
+{'name': 'packet_loss_burst_length',  'value': '1'},
+{'name': 'exclude_frame_types',       'value': 'ExcludeOnlyFirstKeyFrame'},
+{'name': 'frame_length_in_bytes',     'value': '152064'},
+{'name': 'use_single_core',           'value': 'False'},
+{'name': 'keyframe_interval;',        'value': '0'},
+{'name': 'video_codec_type',          'value': 'VP8'},
+{'name': 'width',                     'value': '352'},
+{'name': 'height',                    'value': '288'},
+{'name': 'bit_rate_in_kbps',          'value': '500'},
+]
+frame_data_types = {'frame_number': ('number', 'Frame number'),
+'encoding_successful': ('boolean', 'Encoding successful?'),
+'decoding_successful': ('boolean', 'Decoding successful?'),
+'encode_time': ('number', 'Encode time (us)'),
+'decode_time': ('number', 'Decode time (us)'),
+'encode_return_code': ('number', 'Encode return code'),
+'decode_return_code': ('number', 'Decode return code'),
+'bit_rate': ('number', 'Bit rate (kbps)'),
+'encoded_frame_length': ('number', 'Encoded frame length (bytes)'),
+'frame_type': ('string', 'Frame type'),
+'packets_dropped': ('number', 'Packets dropped'),
+'total_packets': ('number', 'Total packets'),
+'ssim': ('number', 'SSIM'),
+'psnr': ('number', 'PSNR (dB)'),
+}
+frame_data = [{'frame_number': 0, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 94676, 'decode_time': 37942, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 1098, 'encoded_frame_length': 4579, 'frame_type': 'Other', 'packets_dropped': 0, 'total_packets': 4, 'ssim': 0.910364, 'psnr': 35.067258},
+{'frame_number': 1, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 244007, 'decode_time': 39421, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 306, 'encoded_frame_length': 1277, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.911859, 'psnr': 35.115193},
+{'frame_number': 2, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 240508, 'decode_time': 38918, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 330, 'encoded_frame_length': 1379, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.913597, 'psnr': 35.181604},
+{'frame_number': 3, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 243449, 'decode_time': 39664, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 298, 'encoded_frame_length': 1242, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.912378, 'psnr': 35.164710},
+{'frame_number': 4, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 248024, 'decode_time': 39115, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 332, 'encoded_frame_length': 1385, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.911471, 'psnr': 35.109488},
+{'frame_number': 5, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 246910, 'decode_time': 39146, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 416, 'encoded_frame_length': 1734, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.915231, 'psnr': 35.392300},
+{'frame_number': 6, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 242953, 'decode_time': 38827, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 279, 'encoded_frame_length': 1165, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.916130, 'psnr': 35.452889},
+{'frame_number': 7, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 247343, 'decode_time': 41429, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 393, 'encoded_frame_length': 1639, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.919356, 'psnr': 35.647128},
+{'frame_number': 8, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 249529, 'decode_time': 40329, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 487, 'encoded_frame_length': 2033, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.924705, 'psnr': 36.179837},
+{'frame_number': 9, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 249408, 'decode_time': 41716, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 583, 'encoded_frame_length': 2433, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.928433, 'psnr': 36.589875},
+]
diff --git a/tools/python_charts/data/vp8_sw.py b/tools/python_charts/data/vp8_sw.py
new file mode 100644
index 0000000..0f29137
--- /dev/null
+++ b/tools/python_charts/data/vp8_sw.py
@@ -0,0 +1,49 @@
+# Sample output from the video_quality_measurment program, included only for
+# reference. Geneate your own by running with the --python flag and then change
+# the filenames in main.py
+test_configuration = [{'name': 'name',                      'value': 'VP8 software test'},
+{'name': 'description',               'value': ''},
+{'name': 'test_number',               'value': '0'},
+{'name': 'input_filename',            'value': 'foreman_cif.yuv'},
+{'name': 'output_filename',           'value': 'foreman_cif_out.yuv'},
+{'name': 'output_dir',                'value': '.'},
+{'name': 'packet_size_in_bytes',      'value': '1500'},
+{'name': 'max_payload_size_in_bytes', 'value': '1440'},
+{'name': 'packet_loss_mode',          'value': 'Uniform'},
+{'name': 'packet_loss_probability',   'value': '0.000000'},
+{'name': 'packet_loss_burst_length',  'value': '1'},
+{'name': 'exclude_frame_types',       'value': 'ExcludeOnlyFirstKeyFrame'},
+{'name': 'frame_length_in_bytes',     'value': '152064'},
+{'name': 'use_single_core',           'value': 'False'},
+{'name': 'keyframe_interval;',        'value': '0'},
+{'name': 'video_codec_type',          'value': 'VP8'},
+{'name': 'width',                     'value': '352'},
+{'name': 'height',                    'value': '288'},
+{'name': 'bit_rate_in_kbps',          'value': '500'},
+]
+frame_data_types = {'frame_number': ('number', 'Frame number'),
+'encoding_successful': ('boolean', 'Encoding successful?'),
+'decoding_successful': ('boolean', 'Decoding successful?'),
+'encode_time': ('number', 'Encode time (us)'),
+'decode_time': ('number', 'Decode time (us)'),
+'encode_return_code': ('number', 'Encode return code'),
+'decode_return_code': ('number', 'Decode return code'),
+'bit_rate': ('number', 'Bit rate (kbps)'),
+'encoded_frame_length': ('number', 'Encoded frame length (bytes)'),
+'frame_type': ('string', 'Frame type'),
+'packets_dropped': ('number', 'Packets dropped'),
+'total_packets': ('number', 'Total packets'),
+'ssim': ('number', 'SSIM'),
+'psnr': ('number', 'PSNR (dB)'),
+}
+frame_data = [{'frame_number': 0, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 12427, 'decode_time': 4403, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 2270, 'encoded_frame_length': 9459, 'frame_type': 'Other', 'packets_dropped': 0, 'total_packets': 7, 'ssim': 0.947050, 'psnr': 38.332820},
+{'frame_number': 1, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 3292, 'decode_time': 821, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 88, 'encoded_frame_length': 368, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.927272, 'psnr': 35.883510},
+{'frame_number': 2, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4295, 'decode_time': 902, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 130, 'encoded_frame_length': 544, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.920539, 'psnr': 35.457107},
+{'frame_number': 3, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 3880, 'decode_time': 767, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 171, 'encoded_frame_length': 714, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.917434, 'psnr': 35.389298},
+{'frame_number': 4, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4471, 'decode_time': 909, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 248, 'encoded_frame_length': 1035, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.918892, 'psnr': 35.570229},
+{'frame_number': 5, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4447, 'decode_time': 976, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 269, 'encoded_frame_length': 1123, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.920609, 'psnr': 35.769663},
+{'frame_number': 6, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4432, 'decode_time': 891, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 271, 'encoded_frame_length': 1132, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 1, 'ssim': 0.922672, 'psnr': 35.913519},
+{'frame_number': 7, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 5026, 'decode_time': 1068, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 366, 'encoded_frame_length': 1529, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.925505, 'psnr': 36.246713},
+{'frame_number': 8, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4877, 'decode_time': 1051, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 369, 'encoded_frame_length': 1538, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.926122, 'psnr': 36.305984},
+{'frame_number': 9, 'encoding_successful': True , 'decoding_successful': True , 'encode_time': 4712, 'decode_time': 1087, 'encode_return_code': 0, 'decode_return_code': 0, 'bit_rate': 406, 'encoded_frame_length': 1692, 'frame_type': 'Delta', 'packets_dropped': 0, 'total_packets': 2, 'ssim': 0.927183, 'psnr': 36.379735},
+]
diff --git a/tools/python_charts/gviz_api.py b/tools/python_charts/gviz_api.py
new file mode 120000
index 0000000..c9dca90
--- /dev/null
+++ b/tools/python_charts/gviz_api.py
@@ -0,0 +1 @@
+../../third_party/google-visualization-python/gviz_api.py
\ No newline at end of file
diff --git a/tools/python_charts/templates/chart_page_template.html b/tools/python_charts/templates/chart_page_template.html
new file mode 100644
index 0000000..1cb3951
--- /dev/null
+++ b/tools/python_charts/templates/chart_page_template.html
@@ -0,0 +1,90 @@
+<html>
+  <!--
+  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+
+  Template file to be used to generate Charts for Video Quality Metrics.
+   -->
+  <head>
+    <link href="http://code.google.com/css/codesite.pack.04102009.css"
+          rel="stylesheet" type="text/css" />
+  </head>
+  <script src="https://www.google.com/jsapi" type="text/javascript"></script>
+  <script>
+    google.load('visualization', '1', {packages:['table', 'corechart']});
+
+    google.setOnLoadCallback(drawTable);
+    function drawTable() {
+      /* Build data tables and views */
+      var configurations_data_table =
+        new google.visualization.DataTable(%(json_configurations)s);
+      var ssim_data_table =
+        new google.visualization.DataTable(%(json_ssim_data)s);
+      var psnr_data_table =
+        new google.visualization.DataTable(%(json_psnr_data)s);
+      var packet_loss_data_table =
+        new google.visualization.DataTable(%(json_packet_loss_data)s);
+      var bit_rate_data_table =
+        new google.visualization.DataTable(%(json_bit_rate_data)s);
+
+      /* Display tables and charts */
+      var configurations_table = new google.visualization.Table(
+        document.getElementById('table_div_configurations'));
+      configurations_table.draw(configurations_data_table, {
+        height: 200
+      });
+
+      var ssim_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_ssim'));
+      ssim_chart.draw(ssim_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'SSIM'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var psnr_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_psnr'));
+      psnr_chart.draw(psnr_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'PSNR (dB)'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var packet_loss_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_packet_loss'));
+      packet_loss_chart.draw(packet_loss_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink'],
+        vAxis: {title: 'Packets dropped'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+
+      var bit_rate_chart = new google.visualization.LineChart(
+        document.getElementById('table_div_bit_rate'));
+      bit_rate_chart.draw(bit_rate_data_table, {
+        colors: ['blue', 'red', 'lightblue', 'pink', 'green'],
+        vAxis: {title: 'Bit rate'},
+        hAxis: {title: 'Frame'},
+        width: 1200, height: 300,
+      });
+    }
+  </script>
+  <body>
+    <h3>Test Configurations:</h3>
+    <div id="table_div_configurations"></div>
+    <h3>Messages:</h3>
+    <pre>%(messages)s</pre>
+    <h3>Metrics measured per frame:</h3>
+    <div id="table_div_ssim"></div>
+    <div id="table_div_psnr"></div>
+    <div id="table_div_packet_loss"></div>
+    <div id="table_div_bit_rate"></div>
+  </body>
+</html>
diff --git a/tools/python_charts/webrtc/__init__.py b/tools/python_charts/webrtc/__init__.py
new file mode 100644
index 0000000..c1caaa2
--- /dev/null
+++ b/tools/python_charts/webrtc/__init__.py
@@ -0,0 +1,8 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
\ No newline at end of file
diff --git a/tools/python_charts/webrtc/data_helper.py b/tools/python_charts/webrtc/data_helper.py
new file mode 100644
index 0000000..fce949f
--- /dev/null
+++ b/tools/python_charts/webrtc/data_helper.py
@@ -0,0 +1,183 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+class DataHelper(object):
+  """
+  Helper class for managing table data.
+  This class does not verify the consistency of the data tables sent into it.
+  """
+
+  def __init__(self, data_list, table_description, names_list, messages):
+    """ Initializes the DataHelper with data.
+    
+    Args:
+      data_list: List of one or more data lists in the format that the 
+        Google Visualization Python API expects (list of dictionaries, one
+        per row of data). See the gviz_api.DataTable documentation for more 
+        info.
+      table_description: dictionary describing the data types of all
+        columns in the data lists, as defined in the gviz_api.DataTable
+        documentation.
+      names_list: List of strings of what we're going to name the data
+        columns after. Usually different runs of data collection. 
+      messages: List of strings we might append error messages to.
+    """
+    self.data_list = data_list
+    self.table_description = table_description
+    self.names_list = names_list
+    self.messages = messages
+    self.number_of_datasets = len(data_list)
+    self.number_of_frames = len(data_list[0])
+    
+  def CreateData(self, field_name, start_frame=0, end_frame=0):
+    """ Creates a data structure for a specified data field.
+    
+    Creates a data structure (data type description dictionary and a list 
+    of data dictionaries) to be used with the Google Visualization Python 
+    API. The frame_number column is always present and one column per data
+    set is added and its field name is suffixed by _N where N is the number 
+    of the data set (0, 1, 2...)
+    
+    Args:
+      field_name: String name of the field, must be present in the data
+        structure this DataHelper was created with.
+      start_frame: Frame number to start at (zero indexed). Default: 0.
+      end_frame: Frame number to be the last frame. If zero all frames 
+        will be included. Default: 0.
+        
+    Returns:
+      A tuple containing:
+      - a dictionary describing the columns in the data result_data_table below.
+        This description uses the name for each data set specified by 
+        names_list.  
+        
+        Example with two data sets named 'Foreman' and 'Crew':
+        {
+         'frame_number': ('number', 'Frame number'),
+         'ssim_0': ('number', 'Foreman'),
+         'ssim_1': ('number', 'Crew'),
+         }
+      - a list containing dictionaries (one per row) with the frame_number
+        column and one column of the specified field_name column per data 
+        set. 
+        
+        Example with two data sets named 'Foreman' and 'Crew':
+        [
+         {'frame_number': 0, 'ssim_0': 0.98, 'ssim_1': 0.77 },
+         {'frame_number': 1, 'ssim_0': 0.81, 'ssim_1': 0.53 },
+        ]
+    """
+    
+    # Build dictionary that describes the data types
+    result_table_description = {'frame_number': ('string', 'Frame number')} 
+    for dataset_index in range(self.number_of_datasets):
+      column_name = '%s_%s' % (field_name, dataset_index)
+      column_type = self.table_description[field_name][0]
+      column_description = self.names_list[dataset_index]
+      result_table_description[column_name] = (column_type, column_description)
+
+    # Build data table of all the data        
+    result_data_table = []
+    # We're going to have one dictionary per row. 
+    # Create that and copy frame_number values from the first data set
+    for source_row in self.data_list[0]:
+      row_dict = { 'frame_number': source_row['frame_number'] }
+      result_data_table.append(row_dict)
+    
+    # Pick target field data points from the all data tables
+    if end_frame == 0:  # Default to all frames
+      end_frame = self.number_of_frames
+      
+    for dataset_index in range(self.number_of_datasets):
+      for row_number in range(start_frame, end_frame):
+        column_name = '%s_%s' % (field_name, dataset_index)
+        # Stop if any of the data sets are missing the frame
+        try:
+          result_data_table[row_number][column_name] = \
+          self.data_list[dataset_index][row_number][field_name]
+        except IndexError:
+          self.messages.append("Couldn't find frame data for row %d "
+          "for %s" % (row_number, self.names_list[dataset_index])) 
+          break
+    return result_table_description, result_data_table
+
+  def GetOrdering(self, table_description):
+    """ Creates a list of column names, ordered alphabetically except for the
+      frame_number column which always will be the first column.
+     
+      Args:
+        table_description: A dictionary of column definitions as defined by the
+          gviz_api.DataTable documentation.
+      Returns:
+        A list of column names, where frame_number is the first and the
+        remaining columns are sorted alphabetically.
+    """
+    # The JSON data representation generated from gviz_api.DataTable.ToJSon()
+    # must have frame_number as its first column in order for the chart to 
+    # use it as it's X-axis value series.
+    # gviz_api.DataTable orders the columns by name by default, which will 
+    # be incorrect if we have column names that are sorted before frame_number
+    # in our data table.
+    columns_ordering = ['frame_number']
+    # add all other columns:
+    for column in sorted(table_description.keys()):
+      if column != 'frame_number':
+        columns_ordering.append(column)
+    return columns_ordering
+  
+  def CreateConfigurationTable(self, configurations):
+    """ Combines multiple test data configurations for display.
+
+    Args:
+      configurations: List of one ore more configurations. Each configuration
+      is required to be a list of dictionaries with two keys: 'name' and
+      'value'.
+      Example of a single configuration:
+      [
+        {'name': 'name', 'value': 'VP8 software'},
+        {'name': 'test_number', 'value': '0'},
+        {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+      ]
+    Returns:
+      A tuple containing:
+      - a dictionary describing the columns in the configuration table to be
+        displayed. All columns will have string as data type.
+        Example:
+        {
+          'name': 'string',
+          'test_number': 'string',
+          'input_filename': 'string',
+         }
+      - a list containing dictionaries (one per configuration) with the
+        configuration column names mapped to the value for each test run:
+
+        Example matching the columns above:
+        [
+         {'name': 'VP8 software',
+          'test_number': '12',
+          'input_filename': 'foreman_cif.yuv' },
+         {'name': 'VP8 hardware',
+          'test_number': '5',
+          'input_filename': 'foreman_cif.yuv' },
+        ]
+    """
+    result_description = {}
+    result_data = []
+
+    for configuration in configurations:
+      data = {}
+      result_data.append(data)
+      for dict in configuration:
+        name = dict['name']
+        value = dict['value']
+        result_description[name] = 'string'
+        data[name] = value
+    return result_description, result_data
diff --git a/tools/python_charts/webrtc/data_helper_test.py b/tools/python_charts/webrtc/data_helper_test.py
new file mode 100644
index 0000000..6282f7b
--- /dev/null
+++ b/tools/python_charts/webrtc/data_helper_test.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+import unittest
+import webrtc.data_helper
+
+class Test(unittest.TestCase):
+
+  def setUp(self):
+    # Simulate frame data from two different test runs, with 2 frames each.
+    self.frame_data_0 = [{'frame_number': 0, 'ssim': 0.5, 'psnr': 30.5}, 
+                         {'frame_number': 1, 'ssim': 0.55, 'psnr': 30.55}]
+    self.frame_data_1 = [{'frame_number': 0, 'ssim': 0.6, 'psnr': 30.6},
+                         {'frame_number': 0, 'ssim': 0.66, 'psnr': 30.66}]
+    self.all_data = [ self.frame_data_0, self.frame_data_1 ]
+    
+    # Test with frame_number column in a non-first position sice we need to 
+    # support reordering that to be able to use the gviz_api as we want.
+    self.type_description = {
+                             'ssim': ('number', 'SSIM'),
+                             'frame_number': ('number', 'Frame number'),
+                             'psnr': ('number', 'PSRN'),
+    }
+    self.names = ["Test 0", "Test 1"]
+    self.configurations = [
+     [{'name': 'name', 'value': 'Test 0'},
+      {'name': 'test_number', 'value': '13'},
+      {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+     ],
+     [{'name': 'name', 'value': 'Test 1'},
+      {'name': 'test_number', 'value': '5'},
+      {'name': 'input_filename', 'value': 'foreman_cif.yuv'},
+     ],
+    ]
+
+  def testCreateData(self):
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data_table = helper.CreateData('ssim')
+    self.assertEqual(3, len(description))
+    self.assertTrue('frame_number' in description)
+    self.assertTrue('ssim_0' in description)
+    self.assertTrue('number' in description['ssim_0'][0])
+    self.assertTrue('Test 0' in description['ssim_0'][1])
+    self.assertTrue('ssim_1' in description)
+    self.assertTrue('number' in description['ssim_1'][0])
+    self.assertTrue('Test 1' in description['ssim_1'][1])
+
+    self.assertEqual(0, len(messages))
+
+    self.assertEquals(2, len(data_table))
+    row = data_table[0]
+    self.assertEquals(0, row['frame_number'])
+    self.assertEquals(0.5, row['ssim_0'])
+    self.assertEquals(0.6, row['ssim_1'])
+    row = data_table[1]
+    self.assertEquals(1, row['frame_number'])
+    self.assertEquals(0.55, row['ssim_0'])
+    self.assertEquals(0.66, row['ssim_1'])
+    
+    description, data_table = helper.CreateData('psnr') 
+    self.assertEqual(3, len(description))
+    self.assertTrue('frame_number' in description)
+    self.assertTrue('psnr_0' in description)
+    self.assertTrue('psnr_1' in description) 
+    self.assertEqual(0, len(messages)) 
+
+    self.assertEquals(2, len(data_table))
+    row = data_table[0]
+    self.assertEquals(0, row['frame_number'])
+    self.assertEquals(30.5, row['psnr_0'])
+    self.assertEquals(30.6, row['psnr_1'])
+    row = data_table[1]
+    self.assertEquals(1, row['frame_number'])
+    self.assertEquals(30.55, row['psnr_0'])
+    self.assertEquals(30.66, row['psnr_1'])
+  
+  def testGetOrdering(self):
+    """ Tests that the ordering help method returns a list with frame_number 
+       first and the rest sorted alphabetically """
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data_table = helper.CreateData('ssim')
+    columns = helper.GetOrdering(description)
+    self.assertEqual(3, len(columns))
+    self.assertEqual(0, len(messages))
+    self.assertEqual('frame_number', columns[0])
+    self.assertEqual('ssim_0', columns[1])
+    self.assertEqual('ssim_1', columns[2])
+    
+  def testCreateConfigurationTable(self):
+    messages = []
+    helper = webrtc.data_helper.DataHelper(self.all_data, self.type_description,
+                                           self.names, messages)
+    description, data = helper.CreateConfigurationTable(self.configurations)
+    self.assertEqual(3, len(description))  # 3 columns
+    self.assertEqual(2, len(data))  # 2 data sets
+    self.assertTrue(description.has_key('name'))
+    self.assertTrue(description.has_key('test_number'))
+    self.assertTrue(description.has_key('input_filename'))
+    self.assertEquals('Test 0', data[0]['name'])
+    self.assertEquals('Test 1', data[1]['name'])
+    
+if __name__ == "__main__":
+  unittest.main()
diff --git a/tools/python_charts/webrtc/main.py b/tools/python_charts/webrtc/main.py
new file mode 100644
index 0000000..82d8831
--- /dev/null
+++ b/tools/python_charts/webrtc/main.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+#  Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+import os
+import gviz_api
+import webrtc.data_helper
+
+def main():
+  """
+  This Python script displays a web page with test created with the 
+  video_quality_measurement program, which is a tool in WebRTC.
+  
+  The script requires on two external files and one Python library:
+  - A HTML template file with layout and references to the json variables 
+    defined in this script
+  - A data file in Python format, containing the following:
+    - test_configuration - a dictionary of test configuration names and values.
+    - frame_data_types - a dictionary that maps the different metrics to their 
+      data types.
+    - frame_data - a list of dictionaries where each dictionary maps a metric to 
+      it's value. 
+  - The gviz_api.py of the Google Visualization Python API, available at
+    http://code.google.com/p/google-visualization-python/
+  
+  The HTML file is shipped with the script, while the data file must be 
+  generated by running video_quality_measurement with the --python flag
+  specified.
+  """
+  print 'Content-type: text/html\n' # the newline is required!
+
+  page_template_filename = '../templates/chart_page_template.html'
+  # The data files must be located in the project tree for app engine being
+  # able to access them.
+  data_filenames = [ '../data/vp8_sw.py', '../data/vp8_hw.py' ]
+  # Will contain info/error messages to be displayed on the resulting page.
+  messages = []
+  # Load the page HTML template.
+  try:
+    f = open(page_template_filename)
+    page_template = f.read()
+    f.close()
+  except IOError as e:
+    ShowErrorPage('Cannot open page template file: %s<br>Details: %s' % 
+                  (page_template_filename, e))
+    return
+  
+  # Read data from external Python script files. First check that they exist.
+  for filename in data_filenames:
+    if not os.path.exists(filename):
+      messages.append('Cannot open data file: %s' % filename)
+      data_filenames.remove(filename)
+  
+  # Read data from all existing input files.
+  data_list = []
+  test_configurations = []
+  names = []
+  
+  for filename in data_filenames:
+    read_vars = {} # empty dictionary to load the data into.
+    execfile(filename, read_vars, read_vars)
+    
+    test_configuration = read_vars['test_configuration']
+    table_description = read_vars['frame_data_types']
+    table_data = read_vars['frame_data']
+    
+    # Verify the data in the file loaded properly.
+    if not table_description or not table_data:
+      messages.append('Invalid input file: %s. Missing description list or '
+                      'data dictionary variables.' % filename)
+      continue
+    
+    # Frame numbers appear as number type in the data, but Chart API requires
+    # values of the X-axis to be of string type.
+    # Change the frame_number column data type: 
+    table_description['frame_number'] = ('string', 'Frame number')
+    # Convert all the values to string types: 
+    for row in table_data:
+      row['frame_number'] = str(row['frame_number'])
+    
+    # Store the unique data from this file in the high level lists.
+    test_configurations.append(test_configuration)
+    data_list.append(table_data)
+    # Name of the test run must be present.
+    test_name = FindConfiguration(test_configuration, 'name')
+    if not test_name:
+      messages.append('Invalid input file: %s. Missing configuration key ' 
+                      '"name"', filename)
+      continue
+    names.append(test_name)
+    
+  # Create data helper and build data tables for each graph.
+  helper = webrtc.data_helper.DataHelper(data_list, table_description, 
+                                         names, messages)
+    
+  # Loading it into gviz_api.DataTable objects and create JSON strings.
+  description, data = helper.CreateConfigurationTable(test_configurations)
+  configurations = gviz_api.DataTable(description, data)
+  json_configurations = configurations.ToJSon()
+  
+  description, data = helper.CreateData('ssim')
+  ssim = gviz_api.DataTable(description, data)
+  json_ssim_data = ssim.ToJSon(helper.GetOrdering(description))
+  
+  description, data = helper.CreateData('psnr')
+  psnr = gviz_api.DataTable(description, data)
+  json_psnr_data = psnr.ToJSon(helper.GetOrdering(description))
+  
+  description, data = helper.CreateData('packets_dropped')
+  packet_loss = gviz_api.DataTable(description, data)
+  json_packet_loss_data = packet_loss.ToJSon(helper.GetOrdering(description))  
+  
+  description, data = helper.CreateData('bit_rate')
+  # Add a column of data points for the desired bit rate to be plotted.
+  # (uses test configuration from the last data set, assuming it is the same 
+  # for all of them)
+  desired_bit_rate = FindConfiguration(test_configuration, 'bit_rate_in_kbps')
+  if not desired_bit_rate:
+    ShowErrorPage('Cannot configuration field named "bit_rate_in_kbps"')
+    return
+  desired_bit_rate = int(desired_bit_rate)
+  # Add new column data type description.
+  description['desired_bit_rate'] = ('number', 'Desired bit rate (kbps)')
+  for row in data:
+    row['desired_bit_rate'] = desired_bit_rate
+  bit_rate = gviz_api.DataTable(description, data)
+  json_bit_rate_data = bit_rate.ToJSon(helper.GetOrdering(description))
+
+  # Format the messages list with newlines.
+  messages = '\n'.join(messages)
+  
+  # Put the variables as JSon strings into the template.
+  print page_template % vars()
+
+def FindConfiguration(configuration, name):
+  """ Finds a configuration value using it's name. 
+      Returns the first configuration with a matching name. Returns None if no
+      matching configuration is found. """
+  return_value = None
+  for row in configuration:
+    if row['name'] == name:
+      return_value = row['value']
+      break
+  return return_value
+
+def ShowErrorPage(error_message):
+  print '<html><body>%s</body></html>' % error_message
+  
+if __name__ == '__main__':
+  main()
diff --git a/tools/quality_tracking/OWNERS b/tools/quality_tracking/OWNERS
new file mode 100644
index 0000000..323e8e7
--- /dev/null
+++ b/tools/quality_tracking/OWNERS
@@ -0,0 +1,2 @@
+phoglund@webrtc.org
+kjellander@webrtc.org
diff --git a/tools/quality_tracking/README b/tools/quality_tracking/README
new file mode 100644
index 0000000..faf3e7a
--- /dev/null
+++ b/tools/quality_tracking/README
@@ -0,0 +1,31 @@
+This file describes the coverage tracking script and the coverage dashboard.
+
+ABSTRACT:
+The intention of this small tracking system is to track code coverage data
+over time. Since code coverage is continuously recomputed on the build bots,
+the track_coverage.py script is intended to run on the build bot as a cron job
+and extract the data from there. The dashboard doesn't care how often this
+script runs, but running each hour should be more than enough.
+
+The track_coverage.py script uses OAuth to authenticate itself. In order to do
+this, it needs two files: consumer.secret and access.token. The consumer secret
+is known within the organization and is stored in a plain file on the bot 
+running the scripts (we don't want to check in this secret in the code in the
+public repository). The consumer secret is a plain file with a single line
+containing the secret string.
+
+The access.token file is generated by request_oauth_permission.py. It does this
+by going through the three-legged OAuth authorization process. An administrator
+of the dashboard must approve the request from the script. Once that is done,
+access.token will be written and track_coverage.py will be able to report
+results.
+
+HOW TO RUN LOCALLY:
+Follow the following instructions:
+http://code.google.com/appengine/docs/python/gettingstartedpython27/devenvironment.html
+The dashboard can be started on 127.0.0.1:8080 using the dev_appserver.py script 
+as described in the above URL (and in the following 'hello world' page).
+
+HOW TO DEPLOY:
+Follow the following instructions:
+http://code.google.com/appengine/docs/python/gettingstartedpython27/uploading.html
\ No newline at end of file
diff --git a/tools/quality_tracking/constants.py b/tools/quality_tracking/constants.py
new file mode 100644
index 0000000..6fd570c
--- /dev/null
+++ b/tools/quality_tracking/constants.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains tweakable constants for quality dashboard utility scripts."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+# This identifies our application using the information we got when we
+# registered the application on Google appengine.
+DASHBOARD_SERVER = 'webrtc-dashboard.appspot.com'
+DASHBOARD_SERVER_HTTP = 'http://' + DASHBOARD_SERVER
+CONSUMER_KEY = DASHBOARD_SERVER
+CONSUMER_SECRET_FILE = 'consumer.secret'
+ACCESS_TOKEN_FILE = 'access.token'
+
+# OAuth URL:s.
+REQUEST_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthGetRequestToken'
+AUTHORIZE_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthAuthorizeToken'
+ACCESS_TOKEN_URL = DASHBOARD_SERVER_HTTP + '/_ah/OAuthGetAccessToken'
+
+# The build master URL.
+BUILD_MASTER_SERVER = 'webrtc-cb-linux-master.cbf.corp.google.com:8010'
+BUILD_MASTER_TRANSPOSED_GRID_URL = '/tgrid'
+
+# Build bot constants.
+BUILD_BOT_COVERAGE_WWW_DIRECTORY = '/var/www/coverage'
+
+# Dashboard data input URLs.
+ADD_COVERAGE_DATA_URL = DASHBOARD_SERVER_HTTP + '/add_coverage_data'
+ADD_BUILD_STATUS_DATA_URL = DASHBOARD_SERVER_HTTP + '/add_build_status_data'
diff --git a/tools/quality_tracking/dashboard/add_build_status_data.py b/tools/quality_tracking/dashboard/add_build_status_data.py
new file mode 100644
index 0000000..d26df6f
--- /dev/null
+++ b/tools/quality_tracking/dashboard/add_build_status_data.py
@@ -0,0 +1,171 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements a handler for adding build status data."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import datetime
+import logging
+
+from google.appengine.ext import db
+
+import oauth_post_request_handler
+
+VALID_STATUSES = ['OK', 'failed', 'building', 'warnings']
+
+
+class OrphanedBuildStatusesExistException(Exception):
+  pass
+
+
+class BuildStatusRoot(db.Model):
+  """Exists solely to be the root parent for all build status data and to keep
+     track of when the last update was made.
+
+     Since all build status data will refer to this as their parent,
+     we can run transactions on the build status data as a whole.
+  """
+  last_updated_at = db.DateTimeProperty()
+
+
+class BuildStatusData(db.Model):
+  """This represents one build status report from the build bot."""
+  bot_name = db.StringProperty(required=True)
+  revision = db.IntegerProperty(required=True)
+  build_number = db.IntegerProperty(required=True)
+  status = db.StringProperty(required=True)
+
+
+def _ensure_build_status_root_exists():
+  root = db.GqlQuery('SELECT * FROM BuildStatusRoot').get()
+  if not root:
+    # Create a new root, but ensure we don't have any orphaned build statuses
+    # (in that case, we would not have a single entity group as we desire).
+    orphans = db.GqlQuery('SELECT * FROM BuildStatusData').get()
+    if orphans:
+      raise OrphanedBuildStatusesExistException('Parent is gone and there are '
+                                                'orphaned build statuses in '
+                                                'the database!')
+    root = BuildStatusRoot()
+    root.put()
+
+  return root
+
+
+def _filter_oauth_parameters(post_keys):
+  return filter(lambda post_key: not post_key.startswith('oauth_'),
+                post_keys)
+
+
+def _parse_status(build_number_and_status):
+  parsed_status = build_number_and_status.split('--')
+  if len(parsed_status) != 2:
+    raise ValueError('Malformed status string %s.' % build_number_and_status)
+
+  parsed_build_number = int(parsed_status[0])
+  status = parsed_status[1]
+
+  if status not in VALID_STATUSES:
+    raise ValueError('Invalid status in %s.' % build_number_and_status)
+
+  return (parsed_build_number, status)
+
+
+def _parse_name(revision_and_bot_name):
+  parsed_name = revision_and_bot_name.split('--')
+  if len(parsed_name) != 2:
+    raise ValueError('Malformed name string %s.' % revision_and_bot_name)
+
+  revision = parsed_name[0]
+  bot_name = parsed_name[1]
+  if '\n' in bot_name:
+    raise ValueError('Bot name %s can not contain newlines.' % bot_name)
+
+  return (int(revision), bot_name)
+
+
+def _delete_all_with_revision(revision, build_status_root):
+  query_result = db.GqlQuery('SELECT * FROM BuildStatusData '
+                             'WHERE revision = :1 AND ANCESTOR IS :2',
+                             revision, build_status_root)
+  for entry in query_result:
+    entry.delete()
+
+
+class AddBuildStatusData(oauth_post_request_handler.OAuthPostRequestHandler):
+  """Used to report build status data.
+
+     Build status data is reported as a POST request. The POST request, aside
+     from the required oauth_* parameters should contain name-value entries that
+     abide by the following rules:
+
+     1) The name should be on the form <revision>--<bot name>, for instance
+        1568--Win32Release.
+     2) The value should be on the form <build number>--<status>, for instance
+        553--OK, 554--building. The status is permitted to be failed, OK or
+        building.
+
+    Data is keyed by revision. This handler will delete all data from a revision
+    if data with that revision is present in the current update, since we
+    assume that more recent data is always better data. We also assume that
+    an update always has complete information on a revision (e.g. the status
+    for all the bots are reported in each update).
+
+    In particular the revision arrangement solves the problem when the latest
+    revision reports 'building' for a bot. Had we not deleted the old revision
+    we would first store a 'building' status for that bot and revision, and
+    later store a 'OK' or 'failed' status for that bot and revision. This is
+    undesirable since we don't want multiple statuses for one bot-revision
+    combination. Now we will effectively update the bot's status instead.
+  """
+
+  def _parse_and_store_data(self):
+    build_status_root = _ensure_build_status_root_exists()
+    build_status_data = _filter_oauth_parameters(self.request.arguments())
+
+    db.run_in_transaction(self._parse_and_store_data_in_transaction,
+                          build_status_root, build_status_data)
+
+  def _parse_and_store_data_in_transaction(self, build_status_root,
+                                           build_status_data):
+
+    encountered_revisions = set()
+    for revision_and_bot_name in build_status_data:
+      build_number_and_status = self.request.get(revision_and_bot_name)
+
+      try:
+        (build_number, status) = _parse_status(build_number_and_status)
+        (revision, bot_name) = _parse_name(revision_and_bot_name)
+      except ValueError as error:
+        logging.warn('Invalid parameter in request: %s.' % error)
+        self.response.set_status(400)
+        return
+
+      if revision not in encountered_revisions:
+        # There's new data on this revision in this update, so clear all status
+        # entries with that revision. Only do this once when we first encounter
+        # the revision.
+        _delete_all_with_revision(revision, build_status_root)
+        encountered_revisions.add(revision)
+
+      # Finally, write the item.
+      item = BuildStatusData(parent=build_status_root,
+                             bot_name=bot_name,
+                             revision=revision,
+                             build_number=build_number,
+                             status=status)
+      item.put()
+
+    request_posix_timestamp = float(self.request.get('oauth_timestamp'))
+    request_datetime = datetime.datetime.fromtimestamp(request_posix_timestamp)
+    build_status_root.last_updated_at = request_datetime
+    build_status_root.put()
+
diff --git a/tools/quality_tracking/dashboard/add_coverage_data.py b/tools/quality_tracking/dashboard/add_coverage_data.py
new file mode 100644
index 0000000..ade5a6a
--- /dev/null
+++ b/tools/quality_tracking/dashboard/add_coverage_data.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements a handler for adding coverage data."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from datetime import datetime
+import logging
+
+from google.appengine.ext import db
+
+import oauth_post_request_handler
+
+REPORT_CATEGORIES = ('small_medium_tests', 'large_tests')
+
+
+class CoverageData(db.Model):
+  """This represents one coverage report from the build bot."""
+
+  # The date the report was made.
+  date = db.DateTimeProperty(required=True)
+
+  # Coverage percentages.
+  line_coverage = db.FloatProperty(required=True)
+  function_coverage = db.FloatProperty(required=True)
+  branch_coverage = db.FloatProperty()
+
+  # The report category must be one of the REPORT_CATEGORIES.
+  report_category = db.CategoryProperty()
+
+
+class AddCoverageData(oauth_post_request_handler.OAuthPostRequestHandler):
+  """Used to report coverage data.
+
+     Coverage data is reported as a POST request and should contain, aside from
+     the regular oauth_* parameters, these values:
+
+     date: The POSIX timestamp for when the coverage observation was made.
+     report_category: A value in REPORT_CATEGORIES which characterizes the
+         coverage information (e.g. is the coverage from small / medium tests
+         or large tests?)
+
+     line_coverage: Line coverage percentage.
+     function_coverage: Function coverage percentage.
+     branch_coverage: Branch coverage percentage.
+  """
+
+  def _parse_and_store_data(self):
+    try:
+      request_posix_timestamp = float(self.request.get('oauth_timestamp'))
+      parsed_date = datetime.fromtimestamp(request_posix_timestamp)
+
+      line_coverage = self._parse_percentage('line_coverage')
+      function_coverage = self._parse_percentage('function_coverage')
+      branch_coverage = self._parse_percentage('branch_coverage')
+      report_category = self._parse_category('report_category')
+
+    except ValueError as error:
+      logging.warn('Invalid parameter in request: %s.' % error)
+      self.response.set_status(400)
+      return
+
+    item = CoverageData(date=parsed_date,
+                        line_coverage=line_coverage,
+                        function_coverage=function_coverage,
+                        branch_coverage=branch_coverage,
+                        report_category=report_category)
+    item.put()
+
+  def _parse_percentage(self, key):
+    """Parses out a percentage value from the request."""
+    percentage = float(self.request.get(key))
+    if percentage < 0.0 or percentage > 100.0:
+      raise ValueError('%s is not a valid percentage.' % string_value)
+    return percentage
+
+  def _parse_category(self, key):
+    value = self.request.get(key)
+    if value in REPORT_CATEGORIES:
+      return value
+    else:
+      raise ValueError("Invalid category %s." % value)
diff --git a/tools/quality_tracking/dashboard/app.yaml b/tools/quality_tracking/dashboard/app.yaml
new file mode 100644
index 0000000..64324fe
--- /dev/null
+++ b/tools/quality_tracking/dashboard/app.yaml
@@ -0,0 +1,24 @@
+application: webrtc-dashboard
+version: 1
+runtime: python27
+api_version: 1
+threadsafe: false
+
+handlers:
+# Serve stylesheets statically.
+- url: /stylesheets
+  static_dir: stylesheets
+# This magic file is here to prove to the Google Account Domain Management
+# that we own this domain. It needs to stay there so the domain management
+# doesn't get suspicious.
+- url: /google403c95edcde16425.html
+  static_files: static/google403c95edcde16425.html
+  upload: static/google403c95edcde16425.html
+
+# Note: tests should be disabled in production.
+# - url: /test.*
+#  script: gaeunit.py
+
+# Redirect all other requests to our dynamic handlers.
+- url: /.*
+  script: main.app
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/dashboard.py b/tools/quality_tracking/dashboard/dashboard.py
new file mode 100644
index 0000000..d18d75c
--- /dev/null
+++ b/tools/quality_tracking/dashboard/dashboard.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements the quality tracker dashboard and reporting facilities."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import math
+
+from google.appengine.ext.webapp import template
+import webapp2
+
+import load_build_status
+import load_coverage
+
+
+class ShowDashboard(webapp2.RequestHandler):
+  """Shows the dashboard page.
+
+     The page is shown by grabbing data we have stored previously
+     in the App Engine database using the AddCoverageData handler.
+  """
+
+  def get(self):
+    build_status_loader = load_build_status.BuildStatusLoader()
+
+    # Split the build status data in two rows to fit them on the page.
+    build_status_data = build_status_loader.load_build_status_data()
+    split_point = int(math.ceil(len(build_status_data) / 2.0))
+    build_status_data_row_1 = build_status_data[:split_point]
+    build_status_data_row_2 = build_status_data[split_point:]
+
+    last_updated_at = build_status_loader.load_last_modified_at()
+    if last_updated_at is None:
+      self._show_error_page("No data has yet been uploaded to the dashboard.")
+      return
+
+    last_updated_at = last_updated_at.strftime("%Y-%m-%d %H:%M")
+    lkgr = build_status_loader.compute_lkgr()
+
+    coverage_loader = load_coverage.CoverageDataLoader()
+    small_medium_coverage_json_data = (
+        coverage_loader.load_coverage_json_data('small_medium_tests'))
+    large_coverage_json_data = (
+        coverage_loader.load_coverage_json_data('large_tests'))
+
+    page_template_filename = 'templates/dashboard_template.html'
+    self.response.write(template.render(page_template_filename, vars()))
+
+  def _show_error_page(self, error_message):
+    self.response.write('<html><body>%s</body></html>' % error_message)
+
diff --git a/tools/quality_tracking/dashboard/gaeunit.py b/tools/quality_tracking/dashboard/gaeunit.py
new file mode 120000
index 0000000..a93f6bd
--- /dev/null
+++ b/tools/quality_tracking/dashboard/gaeunit.py
@@ -0,0 +1 @@
+../../../third_party/gaeunit/gaeunit.py
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/gviz_api.py b/tools/quality_tracking/dashboard/gviz_api.py
new file mode 120000
index 0000000..c9dca90
--- /dev/null
+++ b/tools/quality_tracking/dashboard/gviz_api.py
@@ -0,0 +1 @@
+../../third_party/google-visualization-python/gviz_api.py
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/index.yaml b/tools/quality_tracking/dashboard/index.yaml
new file mode 100644
index 0000000..7a20411
--- /dev/null
+++ b/tools/quality_tracking/dashboard/index.yaml
@@ -0,0 +1,16 @@
+indexes:
+
+# AUTOGENERATED
+
+# This index.yaml is automatically updated whenever the dev_appserver
+# detects that a new type of query is run.  If you want to manage the
+# index.yaml file manually, remove the above marker line (the line
+# saying "# AUTOGENERATED").  If you want to manage some indexes
+# manually, move them above the marker line.  The index.yaml file is
+# automatically uploaded to the admin console when you next deploy
+# your application using appcfg.py.
+
+- kind: CoverageData
+  properties:
+  - name: report_category
+  - name: date
diff --git a/tools/quality_tracking/dashboard/lkgr_page.py b/tools/quality_tracking/dashboard/lkgr_page.py
new file mode 100644
index 0000000..a025489
--- /dev/null
+++ b/tools/quality_tracking/dashboard/lkgr_page.py
@@ -0,0 +1,31 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Implements the LKGR page."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import webapp2
+
+import load_build_status
+
+class ShowLkgr(webapp2.RequestHandler):
+  """This handler shows the LKGR in the simplest possible way.
+
+     The page is intended to be used by automated tools.
+  """
+  def get(self):
+    build_status_loader = load_build_status.BuildStatusLoader()
+
+    lkgr = build_status_loader.compute_lkgr()
+    if lkgr is None:
+      self.response.out.write('No data has been uploaded to the dashboard.')
+    else:
+      self.response.out.write(lkgr)
diff --git a/tools/quality_tracking/dashboard/load_build_status.py b/tools/quality_tracking/dashboard/load_build_status.py
new file mode 100644
index 0000000..d4265f9
--- /dev/null
+++ b/tools/quality_tracking/dashboard/load_build_status.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Loads build status data for the dashboard."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.ext import db
+
+
+def _status_not_ok(status):
+  return status not in ('OK', 'warnings')
+
+
+def _all_ok(statuses):
+  return filter(_status_not_ok, statuses) == []
+
+
+def _get_first_entry(iterable):
+  if not iterable:
+    return None
+  for item in iterable:
+    return item
+
+
+class BuildStatusLoader:
+  """ Loads various build status data from the database."""
+
+  def load_build_status_data(self):
+    """Returns the latest conclusive build status for each bot.
+
+       The statuses OK, failed and warnings are considered to be conclusive.
+
+       The algorithm looks at the 100 most recent status entries, which should
+       give data on roughly the last five revisions if the number of bots stay
+       around 20 (The number 100 should be increased if the number of bots
+       increases significantly). This should give us enough data to get a
+       conclusive build status for all active bots.
+
+       With this limit, the algorithm will adapt automatically if a bot is
+       decommissioned - it will eventually disappear. The limit should not be
+       too high either since we will perhaps remember offline bots too long,
+       which could be confusing. The algorithm also adapts automatically to new
+       bots - these show up immediately if they get a build status for a recent
+       revision.
+
+       Returns:
+           A list of BuildStatusData entities with one entity per bot.
+    """
+
+    build_status_entries = db.GqlQuery('SELECT * '
+                                       'FROM BuildStatusData '
+                                       'ORDER BY revision DESC '
+                                       'LIMIT 100')
+
+    bots_to_latest_conclusive_entry = dict()
+    for entry in build_status_entries:
+      if entry.status == 'building':
+        # The 'building' status it not conclusive, so discard this entry and
+        # pick up the entry for this bot on the next revision instead. That
+        # entry is guaranteed to have a status != 'building' since a bot cannot
+        # be building two revisions simultaneously.
+        continue
+      if bots_to_latest_conclusive_entry.has_key(entry.bot_name):
+        # We've already determined this bot's status.
+        continue
+
+      bots_to_latest_conclusive_entry[entry.bot_name] = entry
+
+    return bots_to_latest_conclusive_entry.values()
+
+  def load_last_modified_at(self):
+    build_status_root = db.GqlQuery('SELECT * '
+                                    'FROM BuildStatusRoot').get()
+    if not build_status_root:
+      # Operating on completely empty database
+      return None
+
+    return build_status_root.last_updated_at
+
+  def compute_lkgr(self):
+    """ Finds the most recent revision for which all bots are green.
+
+        Returns:
+            The last known good revision (as an integer) or None if there
+            is no green revision in the database.
+
+        Implementation note: The data store fetches stuff as we go, so we won't
+        read in the whole status table unless the LKGR is right at the end or
+        we don't have a LKGR. Bots that are offline do not affect the LKGR
+        computation (e.g. they are not considered to be failed).
+    """
+    build_status_entries = db.GqlQuery('SELECT * '
+                                       'FROM BuildStatusData '
+                                       'ORDER BY revision DESC ')
+
+    first_entry = _get_first_entry(build_status_entries)
+    if first_entry is None:
+      # No entries => no LKGR
+      return None
+
+    current_lkgr = first_entry.revision
+    statuses_for_current_lkgr = [first_entry.status]
+
+    for entry in build_status_entries:
+      if current_lkgr == entry.revision:
+        statuses_for_current_lkgr.append(entry.status)
+      else:
+        # Starting on new revision, check previous revision.
+        if _all_ok(statuses_for_current_lkgr):
+          # All bots are green; LKGR found.
+          return current_lkgr
+        else:
+          # Not all bots are green, so start over on the next revision.
+          current_lkgr = entry.revision
+          statuses_for_current_lkgr = [entry.status]
+
+    if _all_ok(statuses_for_current_lkgr):
+      # There was only one revision and it was OK.
+      return current_lkgr
+
+    # There are no all-green revision in the database.
+    return None
diff --git a/tools/quality_tracking/dashboard/load_coverage.py b/tools/quality_tracking/dashboard/load_coverage.py
new file mode 100644
index 0000000..f7b79d7
--- /dev/null
+++ b/tools/quality_tracking/dashboard/load_coverage.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Loads coverage data from the database."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import logging
+
+from google.appengine.ext import db
+import gviz_api
+
+
+class CoverageDataLoader:
+  """ Loads coverage data from the database."""
+
+  def load_coverage_json_data(self, report_category):
+    coverage_entries = db.GqlQuery('SELECT * '
+                                   'FROM CoverageData '
+                                   'WHERE report_category = :1 '
+                                   'ORDER BY date ASC', report_category)
+    data = []
+    for coverage_entry in coverage_entries:
+      # Note: The date column must be first in alphabetical order since it is
+      # the primary column. This is a bug in the gviz api (or at least it
+      # doesn't make much sense).
+      data.append({'aa_date': coverage_entry.date,
+                   'line_coverage': coverage_entry.line_coverage,
+                   'function_coverage': coverage_entry.function_coverage,
+                   'branch_coverage': coverage_entry.branch_coverage,
+                  })
+
+    description = {
+        'aa_date': ('datetime', 'Date'),
+        'line_coverage': ('number', 'Line Coverage'),
+        'function_coverage': ('number', 'Function Coverage'),
+        'branch_coverage': ('number', 'Branch Coverage'),
+    }
+    coverage_data = gviz_api.DataTable(description, data)
+    return coverage_data.ToJSon(order_by='date')
diff --git a/tools/quality_tracking/dashboard/main.py b/tools/quality_tracking/dashboard/main.py
new file mode 100644
index 0000000..41d4beb
--- /dev/null
+++ b/tools/quality_tracking/dashboard/main.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Connects all URLs with their respective handlers."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.ext.webapp import template
+import webapp2
+
+import add_build_status_data
+import add_coverage_data
+import dashboard
+import lkgr_page
+
+app = webapp2.WSGIApplication([('/', dashboard.ShowDashboard),
+                               ('/lkgr', lkgr_page.ShowLkgr),
+                               ('/add_coverage_data',
+                                add_coverage_data.AddCoverageData),
+                               ('/add_build_status_data',
+                                add_build_status_data.AddBuildStatusData)],
+                              debug=True)
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/oauth_post_request_handler.py b/tools/quality_tracking/dashboard/oauth_post_request_handler.py
new file mode 100644
index 0000000..416e1b7
--- /dev/null
+++ b/tools/quality_tracking/dashboard/oauth_post_request_handler.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Provides a OAuth request handler base class."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+from google.appengine.api import oauth
+import logging
+import webapp2
+
+
+class UserNotAuthenticatedException(Exception):
+  """Gets thrown if a user is not permitted to store data."""
+  pass
+
+
+class OAuthPostRequestHandler(webapp2.RequestHandler):
+  """Works like a normal request handler but adds OAuth authentication.
+
+     This handler will expect a proper OAuth request over POST. This abstract
+     class deals with the authentication but leaves user-defined data handling
+     to its subclasses. Subclasses should not implement the post() method but
+     the _parse_and_store_data() method. Otherwise they may act like regular
+     request handlers. Subclasses should NOT override the get() method.
+
+     The handler will accept an OAuth request if it is correctly formed and
+     the consumer is acting on behalf of an administrator for the dashboard.
+  """
+
+  def post(self):
+    try:
+      self._authenticate_user()
+    except UserNotAuthenticatedException as exception:
+      logging.warn('Failed to authenticate: %s.' % exception)
+      self.response.set_status(403)
+      return
+
+    # Do the actual work.
+    self._parse_and_store_data()
+
+  def _parse_and_store_data(self):
+    """Reads data from POST request and responds accordingly."""
+    raise NotImplementedError('You must override this method!')
+
+  def _authenticate_user(self):
+    try:
+      if oauth.is_current_user_admin():
+        # The user on whose behalf we are acting is indeed an administrator
+        # of this application, so we're good to go.
+        logging.info('Authenticated on behalf of user %s.' %
+                     oauth.get_current_user())
+        return
+      else:
+        raise UserNotAuthenticatedException('We are acting on behalf of '
+                                            'user %s, but that user is not '
+                                            'an administrator.' %
+                                            oauth.get_current_user())
+    except oauth.OAuthRequestError as exception:
+      raise UserNotAuthenticatedException('Invalid OAuth request: %s' %
+                                          exception.__class__.__name__)
diff --git a/tools/quality_tracking/dashboard/static/google403c95edcde16425.html b/tools/quality_tracking/dashboard/static/google403c95edcde16425.html
new file mode 100644
index 0000000..95c7e2d
--- /dev/null
+++ b/tools/quality_tracking/dashboard/static/google403c95edcde16425.html
@@ -0,0 +1 @@
+google-site-verification: google403c95edcde16425.html
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/stylesheets/stylesheet.css b/tools/quality_tracking/dashboard/stylesheets/stylesheet.css
new file mode 100644
index 0000000..c6eb679
--- /dev/null
+++ b/tools/quality_tracking/dashboard/stylesheets/stylesheet.css
@@ -0,0 +1,45 @@
+/********************************************************************
+*
+*  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+*
+*  Use of this source code is governed by a BSD-style license
+*  that can be found in the LICENSE file in the root of the source
+*  tree. An additional intellectual property rights grant can be found
+*  in the file PATENTS.  All contributing project authors may
+*  be found in the AUTHORS file in the root of the source tree.
+*
+*********************************************************************/
+
+.status_OK {
+  color: #FFFFFF;
+  background-color: #8fdf5f;
+}
+
+.status_failed {
+  color: #FFFFFF;
+  background-color: #e98080;
+}
+
+.status_building {
+  color: #666666;
+  background-color: #fffc6c;
+}
+
+.status_warnings {
+  color: #000000;
+  background-color: #FFC343;
+}
+
+.last_known_good_revision {
+  font-size: 800%;
+}
+
+.status_cell {
+  width: 100px;
+  text-align: center;
+}
+
+body {
+  margin-left: 35px;
+  margin-top: 25px;
+}
\ No newline at end of file
diff --git a/tools/quality_tracking/dashboard/templates/dashboard_template.html b/tools/quality_tracking/dashboard/templates/dashboard_template.html
new file mode 100644
index 0000000..419116f
--- /dev/null
+++ b/tools/quality_tracking/dashboard/templates/dashboard_template.html
@@ -0,0 +1,113 @@
+<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN"
+    "http://www.w3.org/TR/html4/strict.dtd">
+<html>
+  <!--
+  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+
+  Use of this source code is governed by a BSD-style license
+  that can be found in the LICENSE file in the root of the source
+  tree. An additional intellectual property rights grant can be found
+  in the file PATENTS.  All contributing project authors may
+  be found in the AUTHORS file in the root of the source tree.
+
+  Template file to be used to generate the WebRTC dashboard.
+  -->
+  <head>
+    <title>WebRTC Coverage Dashboard</title>
+    <meta http-equiv="refresh" content="60">
+    <link href="http://code.google.com/css/codesite.pack.04102009.css"
+          rel="stylesheet" type="text/css">
+    <link href="stylesheets/stylesheet.css"
+          rel="stylesheet" type="text/css">
+
+    <script src="https://www.google.com/jsapi" type="text/javascript"></script>
+    <script type="text/javascript">
+      google.load('visualization', '1', {packages:['table', 'corechart']});
+
+      google.setOnLoadCallback(drawTable);
+      function drawTable() {
+        /* Build data tables and views */
+        {% comment %}
+          Disable Django auto-escaping here since that will mess up our
+          coverage table JSON data otherwise.
+        {% endcomment %}
+        {% autoescape off %}
+        var small_medium_coverage_data_table =
+          new google.visualization.DataTable(
+              {{ small_medium_coverage_json_data }});
+        var large_coverage_data_table =
+          new google.visualization.DataTable(
+              {{ large_coverage_json_data }});
+        {% endautoescape %}
+
+        /* Display tables and charts */
+        var small_medium_coverage_chart = new google.visualization.LineChart(
+          document.getElementById('table_div_small_medium_coverage'));
+        small_medium_coverage_chart.draw(small_medium_coverage_data_table, {
+          colors: ['blue', 'red', 'black'],
+          vAxis: {title: 'Coverage (%)'},
+          hAxis: {title: 'Date'},
+          width: 1200, height: 300,
+        });
+
+        var large_coverage_chart = new google.visualization.LineChart(
+          document.getElementById('table_div_large_coverage'));
+        large_coverage_chart.draw(large_coverage_data_table, {
+          colors: ['blue', 'red', 'black'],
+          vAxis: {title: 'Coverage (%)'},
+          hAxis: {title: 'Date'},
+          width: 1200, height: 300,
+        });
+      }
+    </script>
+  </head>
+  <body>
+    <h1>WebRTC Quality Dashboard</h1>
+    <h2>Current Build Status</h2>
+    <div>(as of {{ last_updated_at }} UTC)</div>
+    <table>
+      <tr>
+      {% for entry in build_status_data_row_1 %}
+        <th class="status_cell">{{ entry.bot_name }}</th>
+      {% endfor %}
+      </tr>
+      <tr>
+      {% for entry in build_status_data_row_1 %}
+        <td title="Last built revision {{ entry.revision }}"
+            class="status_cell status_{{entry.status}}">
+          {{entry.status}}
+        </td>
+      {% endfor %}
+      </tr>
+      <tr>
+      {% for entry in build_status_data_row_2 %}
+        <th class="status_cell">{{ entry.bot_name }}</th>
+      {% endfor %}
+      </tr>
+      <tr>
+      {% for entry in build_status_data_row_2 %}
+        <td title="Last built revision {{ entry.revision }}"
+            class="status_cell status_{{entry.status}}">
+          {{entry.status}}
+        </td>
+      {% endfor %}
+      </tr>
+    </table>
+    <p></p>
+
+    <h2>Last Known Good Revision (LKGR)</h2>
+    <div class="last_known_good_revision">
+      {% if lkgr  %}
+        <a href="http://code.google.com/p/webrtc/source/detail?r={{ lkgr }}">
+          {{ lkgr }}</a>
+      {% else %}
+        ????
+      {% endif %}
+    </div>
+
+    <h2>Code Coverage History (Small / Medium Tests)</h2>
+    <div id="table_div_small_medium_coverage"></div>
+    <h2>Code Coverage History (Large Tests)</h2>
+    <div id="table_div_large_coverage"></div>
+  </body>
+</html>
diff --git a/tools/quality_tracking/dashboard/test/load_build_status_test.py b/tools/quality_tracking/dashboard/test/load_build_status_test.py
new file mode 100755
index 0000000..56c9379
--- /dev/null
+++ b/tools/quality_tracking/dashboard/test/load_build_status_test.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import unittest
+from google.appengine.ext import db
+from google.appengine.ext import testbed
+
+from add_build_status_data import BuildStatusData
+import load_build_status
+
+class LoadBuildStatusTest(unittest.TestCase):
+  def setUp(self):
+     # First, create an instance of the Testbed class.
+    self.testbed = testbed.Testbed()
+    # Then activate the testbed, which prepares the service stubs for use.
+    self.testbed.activate()
+    # Next, declare which service stubs you want to use.
+    self.testbed.init_datastore_v3_stub()
+
+  def test_returns_latest_nonbuilding_entries_when_loading_build_status(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="failed").put()
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="building").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+    BuildStatusData(bot_name="Bot3", revision=18,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    result = loader.load_build_status_data()
+
+    self.assertEqual(3, len(result))
+
+    # We make no guarantees on order, but we can use the fact that the testbed
+    # is deterministic to evaluate that the corrects bots were selected like so:
+    self.assertEqual("Bot1", result[0].bot_name)
+    self.assertEqual(17, result[0].revision)
+    self.assertEqual("OK", result[0].status)
+
+    self.assertEqual("Bot3", result[1].bot_name)
+    self.assertEqual(18, result[1].revision)
+    self.assertEqual("OK", result[1].status)
+
+    self.assertEqual("Bot2", result[2].bot_name)
+    self.assertEqual(18, result[2].revision)
+    self.assertEqual("failed", result[2].status)
+
+  def test_returns_lkgr_for_single_green_revision(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+  def test_returns_correct_lkgr_with_most_recent_revision_failed(self):
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=17,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot3", revision=17,
+                    build_number=344, status="OK").put()
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="OK").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+    BuildStatusData(bot_name="Bot3", revision=18,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+  def test_returns_none_if_no_revisions(self):
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(None, loader.compute_lkgr())
+
+  def test_returns_none_if_no_green_revisions(self):
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="failed").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(None, loader.compute_lkgr())
+
+  def test_skips_partially_building_revisions(self):
+    BuildStatusData(bot_name="Bot1", revision=18,
+                    build_number=499, status="building").put()
+    BuildStatusData(bot_name="Bot2", revision=18,
+                    build_number=505, status="OK").put()
+    BuildStatusData(bot_name="Bot1", revision=17,
+                    build_number=344, status="OK").put()
+
+    loader = load_build_status.BuildStatusLoader()
+    self.assertEqual(17, loader.compute_lkgr())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/quality_tracking/dashboard_connection.py b/tools/quality_tracking/dashboard_connection.py
new file mode 100644
index 0000000..9a6e30f
--- /dev/null
+++ b/tools/quality_tracking/dashboard_connection.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains utilities for communicating with the dashboard."""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import httplib
+import shelve
+import urlparse
+import oauth.oauth as oauth
+
+import constants
+
+
+class FailedToReadRequiredInputFile(Exception):
+  pass
+
+
+class FailedToReportToDashboard(Exception):
+  pass
+
+
+class DashboardConnection:
+  """Helper class for pushing data to the dashboard.
+
+     This class deals with most of details for accessing protected resources
+     (i.e. data-writing operations) on the dashboard. Such operations are
+     authenticated using OAuth. This class requires a consumer secret and a
+     access token.
+
+     The access token and consumer secrets are stored as files on disk in the
+     working directory of the scripts. Both files are created by the
+     request_oauth_permission script.
+  """
+
+  def __init__(self, consumer_key):
+    self.consumer_key_ = consumer_key
+
+  def read_required_files(self, consumer_secret_file, access_token_file):
+    """Reads required data for making OAuth requests.
+
+       Args:
+           consumer_secret_file: A shelve file with an entry consumer_secret
+               containing the consumer secret in string form.
+           access_token_file: A shelve file with an entry access_token
+               containing the access token in string form.
+    """
+    self.access_token_string_ = self._read_access_token(access_token_file)
+    self.consumer_secret_ = self._read_consumer_secret(consumer_secret_file)
+
+  def send_post_request(self, url, parameters):
+    """Sends an OAuth request for a protected resource in the dashboard.
+
+       Use this when you want to report new data to the dashboard. You must have
+       called the read_required_files method prior to calling this method, since
+       that method will read in the consumer secret and access token we need to
+       make the OAuth request. These concepts are described in the class
+       description.
+
+       The server is expected to respond with HTTP status 200 and a completely
+       empty response if the call failed. The server may put diagnostic
+       information in the response.
+
+       Args:
+           url: An absolute url within the dashboard domain, for example
+               http://webrtc-dashboard.appspot.com/add_coverage_data.
+           parameters: A dict which maps from POST parameter names to values.
+
+       Raises:
+           FailedToReportToDashboard: If the dashboard didn't respond
+               with HTTP 200 to our request or if the response is non-empty.
+    """
+    consumer = oauth.OAuthConsumer(self.consumer_key_, self.consumer_secret_)
+    access_token = oauth.OAuthToken.from_string(self.access_token_string_)
+
+    oauth_request = oauth.OAuthRequest.from_consumer_and_token(
+                        consumer,
+                        token=access_token,
+                        http_method='POST',
+                        http_url=url,
+                        parameters=parameters)
+
+    signature_method_hmac_sha1 = oauth.OAuthSignatureMethod_HMAC_SHA1()
+    oauth_request.sign_request(signature_method_hmac_sha1, consumer,
+                               access_token)
+
+    connection = httplib.HTTPConnection(constants.DASHBOARD_SERVER)
+
+    headers = {'Content-Type': 'application/x-www-form-urlencoded'}
+    connection.request('POST', url, body=oauth_request.to_postdata(),
+                       headers=headers)
+
+    response = connection.getresponse()
+    connection.close()
+
+    if response.status != 200:
+      message = ('Failed to report to %s: got response %d (%s)' %
+                 (url, response.status, response.reason))
+      raise FailedToReportToDashboard(message)
+
+    # The response content should be empty on success, so check that:
+    response_content = response.read()
+    if response_content:
+      message = ('Dashboard reported the following error: %s.' %
+                 response_content)
+      raise FailedToReportToDashboard(message)
+
+  def _read_access_token(self, filename):
+    return self._read_shelve(filename, 'access_token')
+
+  def _read_consumer_secret(self, filename):
+    return self._read_shelve(filename, 'consumer_secret')
+
+  def _read_shelve(self, filename, key):
+    input_file = shelve.open(filename)
+
+    if not input_file.has_key(key):
+      raise FailedToReadRequiredInputFile('Missing correct %s file in current '
+                                          'directory. You may have to run '
+                                          'request_oauth_permission.py.' %
+                                          filename)
+
+    result = input_file[key]
+    input_file.close()
+
+    return result
diff --git a/tools/quality_tracking/oauth2 b/tools/quality_tracking/oauth2
new file mode 120000
index 0000000..63ab40b
--- /dev/null
+++ b/tools/quality_tracking/oauth2
@@ -0,0 +1 @@
+../third_party/oauth2/oauth2
\ No newline at end of file
diff --git a/tools/quality_tracking/request_oauth_permission.py b/tools/quality_tracking/request_oauth_permission.py
new file mode 100755
index 0000000..fb97738
--- /dev/null
+++ b/tools/quality_tracking/request_oauth_permission.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script request an access token from the appengine running the dashboard.
+
+   The script is intended to be run manually whenever we wish to change which
+   dashboard administrator we act on behalf of when running the
+   track_coverage.py script. For example, this will be useful if the current
+   dashboard administrator leaves the project. This script can also be used to
+   launch a new dashboard if that is desired.
+
+   This script should be run on the build bot which runs the track_coverage.py
+   script. This script will present a link during its execution, which the new
+   administrator should follow and then click approve on the web page that
+   appears. The new administrator should have admin rights on the coverage
+   dashboard, otherwise the track_* scripts will not work.
+
+   If successful, this script will write the access token to a file access.token
+   in the current directory, which later can be read by the track_* scripts.
+   The token is stored in string form (as reported by the web server) using the
+   shelve module. The consumer secret passed in as an argument to this script
+   will also similarly be stored in a file consumer.secret. The shelve keys
+   will be 'access_token' and 'consumer_secret', respectively.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import shelve
+import sys
+import urlparse
+import oauth2 as oauth
+
+import constants
+
+
+class FailedToRequestPermissionException(Exception):
+  pass
+
+
+def _ensure_token_response_is_200(response, queried_url, token_type):
+  if response.status != 200:
+    raise FailedToRequestPermissionException('Failed to request %s from %s: '
+                                             'received status %d, reason %s.' %
+                                             (token_type,
+                                              queried_url,
+                                              response.status,
+                                              response.reason))
+
+
+def _request_unauthorized_token(consumer, request_token_url):
+  """Requests the initial token from the dashboard service.
+
+     Given that the response from the server is correct, we will return a
+     dictionary containing oauth_token and oauth_token_secret mapped to the
+     token and secret value, respectively.
+  """
+  client = oauth.Client(consumer)
+
+  try:
+    response, content = client.request(request_token_url, 'POST')
+  except AttributeError as error:
+    # This catch handler is here since we'll get very confusing messages
+    # if the target server is down for some reason.
+    raise FailedToRequestPermissionException('Failed to request token: '
+                                             'the dashboard is likely down.',
+                                             error)
+
+  _ensure_token_response_is_200(response, request_token_url,
+                                'unauthorized token')
+
+  return dict(urlparse.parse_qsl(content))
+
+
+def _ask_user_to_authorize_us(unauthorized_token):
+  """This function will block until the user enters y + newline."""
+  print 'Go to the following link in your browser:'
+  print '%s?oauth_token=%s' % (constants.AUTHORIZE_TOKEN_URL,
+                               unauthorized_token['oauth_token'])
+
+  accepted = 'n'
+  while accepted.lower() != 'y':
+    accepted = raw_input('Have you authorized me yet? (y/n) ')
+
+
+def _request_access_token(consumer, unauthorized_token):
+  token = oauth.Token(unauthorized_token['oauth_token'],
+                      unauthorized_token['oauth_token_secret'])
+  client = oauth.Client(consumer, token)
+  response, content = client.request(constants.ACCESS_TOKEN_URL, 'POST')
+
+  _ensure_token_response_is_200(response, constants.ACCESS_TOKEN_URL,
+                                'access token')
+
+  return content
+
+
+def _write_access_token_to_file(access_token, filename):
+  output = shelve.open(filename)
+  output['access_token'] = access_token
+  output.close()
+
+  print 'Wrote the access token to the file %s.' % filename
+
+
+def _write_consumer_secret_to_file(consumer_secret, filename):
+  output = shelve.open(filename)
+  output['consumer_secret'] = consumer_secret
+  output.close()
+
+  print 'Wrote the consumer secret to the file %s.' % filename
+
+
+def _main():
+  if len(sys.argv) != 2:
+    print ('Usage: %s <consumer secret>.\n\nThe consumer secret is an OAuth '
+           'concept and is obtained from the Google Accounts domain dashboard.'
+           % sys.argv[0])
+    return
+
+  consumer_secret = sys.argv[1]
+  consumer = oauth.Consumer(constants.CONSUMER_KEY, consumer_secret)
+
+  unauthorized_token = _request_unauthorized_token(consumer,
+                                                   constants.REQUEST_TOKEN_URL)
+
+  _ask_user_to_authorize_us(unauthorized_token)
+
+  access_token_string = _request_access_token(consumer, unauthorized_token)
+
+  _write_access_token_to_file(access_token_string, constants.ACCESS_TOKEN_FILE)
+  _write_consumer_secret_to_file(consumer_secret,
+                                 constants.CONSUMER_SECRET_FILE)
+
+if __name__ == '__main__':
+  _main()
diff --git a/tools/quality_tracking/tgrid_parser.py b/tools/quality_tracking/tgrid_parser.py
new file mode 100644
index 0000000..b255bf6
--- /dev/null
+++ b/tools/quality_tracking/tgrid_parser.py
@@ -0,0 +1,94 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Contains functions for parsing the build master's transposed grid page.
+
+   Compatible with build bot 0.8.4 P1.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import re
+
+
+# This is here to work around a buggy build bot status message which makes no
+# sense, but which means the build failed when the slave was lost.
+BB_084_P1_BUGGY_STATUS = 'build<br/>successful<br/>exception<br/>slave<br/>lost'
+
+
+class FailedToParseBuildStatus(Exception):
+  pass
+
+
+def _map_status(status):
+  if status == 'exception' or status == BB_084_P1_BUGGY_STATUS:
+    return 'failed'
+  return status
+
+
+def _parse_builds(revision, html):
+  """Parses the bot list, which is a sequence of <td></td> lines.
+
+     See contract for parse_tgrid_page for more information on how this function
+     behaves.
+
+     Example input:
+     <td class="build success"><a href="builders/Android/builds/119">OK</a></td>
+     The first regular expression group captures Android, second 119, third OK.
+  """
+  result = {}
+
+  for match in re.finditer('<td.*?>.*?<a href="builders/(.+?)/builds/(\d+)">'
+                           '(OK|failed|building|warnings|exception|' +
+                           BB_084_P1_BUGGY_STATUS + ')'
+                           '.*?</a>.*?</td>',
+                           html, re.DOTALL):
+    revision_and_bot_name = revision + "--" + match.group(1)
+    build_number_and_status = match.group(2) + "--" + _map_status(
+                                                          match.group(3))
+
+    result[revision_and_bot_name] = build_number_and_status
+
+  return result
+
+
+def parse_tgrid_page(html):
+  """Parses the build master's tgrid page.
+
+     Example input:
+     <tr>
+       <td valign="bottom" class="sourcestamp">1568</td>
+       LIST OF BOTS
+     </tr>
+     The first regular expression group captures 1568, second group captures
+     everything in LIST OF BOTS. The list of bots is then passed into a
+     separate function for parsing.
+
+     Args:
+         html: The raw HTML from the tgrid page.
+
+     Returns: A dictionary with <svn revision>--<bot name> mapped to
+         <bot build number>--<status>, where status is either OK, failed,
+         building or warnings. The status may be 'exception' in the input, but
+         we simply map that to failed.
+  """
+  result = {}
+
+  for match in re.finditer('<td.*?class="sourcestamp">(\d+).*?</td>(.*?)</tr>',
+                           html, re.DOTALL):
+    revision = match.group(1)
+    builds_for_revision_html = match.group(2)
+    result.update(_parse_builds(revision, builds_for_revision_html))
+
+  if not result:
+    raise FailedToParseBuildStatus('Could not find any build statuses in %s.' %
+                                   html)
+
+  return result
diff --git a/tools/quality_tracking/tgrid_parser_test.py b/tools/quality_tracking/tgrid_parser_test.py
new file mode 100755
index 0000000..c772ac3
--- /dev/null
+++ b/tools/quality_tracking/tgrid_parser_test.py
@@ -0,0 +1,613 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+#  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+#  Use of this source code is governed by a BSD-style license
+#  that can be found in the LICENSE file in the root of the source
+#  tree. An additional intellectual property rights grant can be found
+#  in the file PATENTS.  All contributing project authors may
+#  be found in the AUTHORS file in the root of the source tree.
+
+"""Test the tgrid parser.
+
+   Compatible with build bot 0.8.4 P1.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import unittest
+
+import tgrid_parser
+
+
+SAMPLE_FILE = """
+
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+  "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<html xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+        <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
+            <title>Buildbot</title>
+    <link rel="stylesheet" href="default.css" type="text/css" />
+    <link rel="alternate" type="application/rss+xml" title="RSS" href="rss">
+      </head>
+  <body class="interface">
+    <div class="header">
+        <a href=".">Home</a>
+        - <a href="waterfall">Waterfall</a>
+        <a href="grid">Grid</a>
+        <a href="tgrid">T-Grid</a>
+        <a href="console">Console</a>
+        <a href="builders">Builders</a>
+        <a href="one_line_per_build">Recent Builds</a>
+        <a href="buildslaves">Buildslaves</a>
+        <a href="changes">Changesources</a>
+        - <a href="json/help">JSON API</a>
+        - <a href="about">About</a>
+    </div>
+    <hr/>
+
+    <div class="content">
+<h1>Transposed Grid View</h1>
+
+<table class="Grid" border="0" cellspacing="0">
+
+<tr>
+ <td class="title"><a href="http://www.chromium.org">WebRTC</a>
+
+
+ </td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Android">Android</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/AndroidNDK">AndroidNDK</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Chrome">Chrome</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/ChromeOS">ChromeOS</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Linux32DBG">Linux32DBG</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Linux32Release">Linux32Release</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Linux64DBG">Linux64DBG</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Linux64DBG-GCC4.6">Linux64DBG-GCC4.6</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Linux64Release">Linux64Release</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/LinuxClang">LinuxClang</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/LinuxValgrind">LinuxValgrind</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/LinuxVideoTest">LinuxVideoTest</a></td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/MacOS32DBG">MacOS32DBG</a></td>
+   <td valign="middle" style="text-align: center" class="builder building">
+    <a href="builders/MacOS32Release">MacOS32Release</a><br/>(building)</td>
+   <td valign="middle" style="text-align: center" class="builder idle">
+    <a href="builders/Win32Debug">Win32Debug</a></td>
+   <td valign="middle" style="text-align: center" class="builder building">
+    <a href="builders/Win32Release">Win32Release</a><br/>(building)</td>
+ </tr>
+
+ <tr>
+ <td valign="bottom" class="sourcestamp">2006  </td>
+      <td class="build success">
+    <a href="builders/Android/builds/482">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/AndroidNDK/builds/70">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Chrome/builds/243">warnings</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/ChromeOS/builds/933">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32DBG/builds/936">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32Release/builds/1050">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG/builds/1038">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG-GCC4.6/builds/371">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64Release/builds/936">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxClang/builds/610">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxValgrind/builds/317">OK</a>
+  </td>
+
+      <td class="build">&nbsp;</td>
+
+      <td class="build success">
+    <a href="builders/MacOS32DBG/builds/1052">OK</a>
+  </td>
+
+      <td class="build">&nbsp;</td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/822">OK</a>
+  </td>
+
+      <td class="build">&nbsp;</td>
+
+  </tr>
+ <tr>
+ <td valign="bottom" class="sourcestamp">2007  </td>
+      <td class="build success">
+    <a href="builders/Android/builds/483">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/AndroidNDK/builds/71">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Chrome/builds/244">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/ChromeOS/builds/934">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32DBG/builds/937">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32Release/builds/1051">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG/builds/1039">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG-GCC4.6/builds/372">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64Release/builds/937">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxClang/builds/611">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxValgrind/builds/318">OK</a>
+  </td>
+
+      <td class="build failure">
+    <a href="builders/LinuxVideoTest/builds/731">failed<br/>voe_auto_test</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32DBG/builds/1053">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32Release/builds/309">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/823">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Release/builds/809">OK</a>
+  </td>
+
+  </tr>
+ <tr>
+ <td valign="bottom" class="sourcestamp">2008  </td>
+      <td class="build success">
+    <a href="builders/Android/builds/484">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/AndroidNDK/builds/72">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Chrome/builds/245">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/ChromeOS/builds/935">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32DBG/builds/938">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32Release/builds/1052">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG/builds/1040">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG-GCC4.6/builds/373">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64Release/builds/938">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxClang/builds/612">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxValgrind/builds/319">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxVideoTest/builds/732">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32DBG/builds/1054">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32Release/builds/310">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/824">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Release/builds/810">OK</a>
+  </td>
+
+  </tr>
+ <tr>
+ <td valign="bottom" class="sourcestamp">2010  </td>
+      <td class="build success">
+    <a href="builders/Android/builds/485">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/AndroidNDK/builds/73">OK</a>
+  </td>
+
+      <td class="build">&nbsp;</td>
+
+      <td class="build success">
+    <a href="builders/ChromeOS/builds/936">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32DBG/builds/939">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32Release/builds/1053">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG/builds/1041">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG-GCC4.6/builds/374">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64Release/builds/939">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxClang/builds/613">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxValgrind/builds/320">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxVideoTest/builds/733">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32DBG/builds/1055">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32Release/builds/311">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/825">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Release/builds/811">OK</a>
+  </td>
+
+  </tr>
+ <tr>
+ <td valign="bottom" class="sourcestamp">2011  </td>
+      <td class="build success">
+    <a href="builders/Android/builds/486">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/AndroidNDK/builds/74">OK</a>
+  </td>
+
+      <td class="build">&nbsp;</td>
+
+      <td class="build success">
+    <a href="builders/ChromeOS/builds/937">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32DBG/builds/940">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux32Release/builds/1054">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG/builds/1042">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64DBG-GCC4.6/builds/375">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Linux64Release/builds/940">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxClang/builds/614">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxValgrind/builds/321">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/LinuxVideoTest/builds/734">OK</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/MacOS32DBG/builds/1056">OK</a>
+  </td>
+
+      <td class="build running">
+    <a href="builders/MacOS32Release/builds/313">building</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/826">OK</a>
+  </td>
+
+      <td class="build running">
+    <a href="builders/Win32Release/builds/813">building</a>
+  </td>
+
+  </tr>
+  <tr>
+    <td valign="bottom" class="sourcestamp">latest  </td>
+      <td class="build running">
+    <a href="builders/MacOS32Release/builds/313">building</a>
+  </td>
+
+      <td class="build success">
+    <a href="builders/Win32Debug/builds/826">OK</a>
+  </td>
+  </tr>
+</table>
+
+</div><div class="footer" style="clear:both">
+      <hr/>
+      <a href="http://buildbot.net/">BuildBot</a> (0.8.4p1)
+      working for the&nbsp;<a href="http://www.chromium.org">WebRTC
+        </a>&nbsp;project.<br/>
+      Page built: <b>Thu 12 Apr 2012 03:49:32</b> (CDT)
+    </div>
+    </body>
+</html>
+"""
+
+MINIMAL_OK = """
+<tr>
+<td valign="bottom" class="sourcestamp">1570  </td>
+<td class="build success">
+<a href="builders/Android/builds/121">OK</a></td>
+</tr>
+"""
+
+MINIMAL_FAIL = """
+<tr>
+<td valign="bottom" class="sourcestamp">1573  </td>
+<td class="build failure">
+  <a href="builders/LinuxVideoTest/builds/731">failed<br/>voe_auto_test</a>
+</td>
+</tr>
+"""
+
+MINIMAL_BUILDING = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576  </td>
+<td class="build running">
+<a href="builders/Win32Debug/builds/434">building</a></td>
+voe_auto_test</td>
+</tr>
+"""
+
+MINIMAL_WARNED = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576  </td>
+<td class="build warnings">
+<a href="builders/Chrome/builds/109">warnings</a><br />
+make chrome</td>
+</tr>
+"""
+
+MINIMAL_EXCEPTION = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576  </td>
+<td class="build exception">
+<a href="builders/Chrome/builds/109">exception</a><br />
+Sync</td>
+</tr>
+"""
+
+MINIMAL_EXCEPTION_SLAVE_LOST = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576  </td>
+<td class="build retry">
+  <a href="builders/LinuxValgrind/builds/324">build<br/>successful<br/>exception<br/>slave<br/>lost</a>
+</td>
+</tr>
+"""
+
+MINIMAL_IN_TRUNK_SOURCESTAMP = """
+<tr>
+<td valign="bottom" class="sourcestamp">1576 in trunk </td>
+<td class="build retry">
+  <a href="builders/LinuxValgrind/builds/324">build<br/>successful<br/>exception<br/>slave<br/>lost</a>
+</td>
+</tr>
+"""
+
+class TGridParserTest(unittest.TestCase):
+  def test_parser_throws_exception_on_empty_html(self):
+    self.assertRaises(tgrid_parser.FailedToParseBuildStatus,
+                      tgrid_parser.parse_tgrid_page, '');
+
+  def test_parser_finds_successful_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_OK)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1570--Android', first_mapping[0])
+    self.assertEqual('121--OK', first_mapping[1])
+
+  def test_parser_finds_failed_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_FAIL)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1573--LinuxVideoTest', first_mapping[0])
+    self.assertEqual('731--failed', first_mapping[1])
+
+  def test_parser_finds_building_bot(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_BUILDING)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--Win32Debug', first_mapping[0])
+    self.assertEqual('434--building', first_mapping[1])
+
+  def test_parser_finds_warnings(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_WARNED)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--Chrome', first_mapping[0])
+    self.assertEqual('109--warnings', first_mapping[1])
+
+  def test_parser_finds_exception_and_maps_to_failed(self):
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_EXCEPTION)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--Chrome', first_mapping[0])
+    self.assertEqual('109--failed', first_mapping[1])
+
+  def test_parser_finds_exception_slave_lost_and_maps_to_failed(self):
+    # This is to work around a bug in build bot 0.8.4p1 where it may say that
+    # the build was successful AND the slave was lost. In this case the build
+    # is not actually successful, so treat it as such.
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_EXCEPTION_SLAVE_LOST)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--LinuxValgrind', first_mapping[0])
+    self.assertEqual('324--failed', first_mapping[1])
+
+  def test_parser_finds_exception_slave_lost_and_maps_to_failed(self):
+    # Sometimes the transposed grid says "in trunk" in the source stamp, so
+    # make sure we deal with that.
+    result = tgrid_parser.parse_tgrid_page(MINIMAL_IN_TRUNK_SOURCESTAMP)
+
+    self.assertEqual(1, len(result), 'There is only one bot in the sample.')
+    first_mapping = result.items()[0]
+
+    self.assertEqual('1576--LinuxValgrind', first_mapping[0])
+    self.assertEqual('324--failed', first_mapping[1])
+
+  def test_parser_finds_all_bots_and_revisions_except_forced_builds(self):
+    result = tgrid_parser.parse_tgrid_page(SAMPLE_FILE)
+
+    # 5*16 = 80 bots in sample. There's also five empty results because some
+    # bots did not run for some revisions, so 80 - 5 = 75 results. There are
+    # two additional statuses under an explicit 'latest' revision, which should
+    # be ignored since that means the build was forced.
+    self.assertEqual(75, len(result))
+
+    # Make some samples
+    self.assertTrue(result.has_key('2006--ChromeOS'))
+    self.assertEquals('933--OK', result['2006--ChromeOS'])
+
+    self.assertTrue(result.has_key('2006--Chrome'))
+    self.assertEquals('243--warnings', result['2006--Chrome'])
+
+    self.assertTrue(result.has_key('2006--LinuxClang'))
+    self.assertEquals('610--OK', result['2006--LinuxClang'])
+
+    # This one happened to not get reported in revision 2006, but it should be
+    # there in the next revision:
+    self.assertFalse(result.has_key('2006--Win32Release'))
+    self.assertTrue(result.has_key('2007--Win32Release'))
+    self.assertEquals('809--OK', result['2007--Win32Release'])
+
+    self.assertTrue(result.has_key('2007--ChromeOS'))
+    self.assertEquals('934--OK', result['2007--ChromeOS'])
+
+    self.assertTrue(result.has_key('2007--LinuxVideoTest'))
+    self.assertEquals('731--failed', result['2007--LinuxVideoTest'])
+
+    self.assertTrue(result.has_key('2011--Win32Release'))
+    self.assertEquals('813--building', result['2011--Win32Release'])
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/quality_tracking/track_build_status.py b/tools/quality_tracking/track_build_status.py
new file mode 100644
index 0000000..e07e00a
--- /dev/null
+++ b/tools/quality_tracking/track_build_status.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script checks the current build status on the master and submits
+   it to the dashboard. It is adapted to build bot version 0.7.12.
+"""
+
+import httplib
+
+import constants
+import dashboard_connection
+import tgrid_parser
+
+# Bots that must be green in order to increment the LKGR revision.
+BOTS = ['Win32Debug',
+        'Win32Release',
+        'Mac32Debug',
+        'Mac32Release',
+        'Linux32Debug',
+        'Linux32Release',
+        'Linux64Debug',
+        'Linux64Release',
+        'LinuxClang',
+        'Linux64Debug-GCC4.6',
+        'LinuxMemcheck',
+        'LinuxTsan',
+        'LinuxAsan',
+        'WinLargeTests',
+        'MacLargeTests',
+        'LinuxLargeTests',
+        'CrOS',
+        'Android',
+        'AndroidNDK',
+       ]
+
+
+class FailedToGetStatusFromMaster(Exception):
+  pass
+
+
+def _download_and_parse_build_status():
+  connection = httplib.HTTPConnection(constants.BUILD_MASTER_SERVER)
+  connection.request('GET', constants.BUILD_MASTER_TRANSPOSED_GRID_URL)
+  response = connection.getresponse()
+
+  if response.status != 200:
+    raise FailedToGetStatusFromMaster(('Failed to get build status from master:'
+                                       ' got status %d, reason %s.' %
+                                       (response.status, response.reason)))
+
+  full_response = response.read()
+  connection.close()
+
+  return tgrid_parser.parse_tgrid_page(full_response)
+
+
+def _is_chrome_only_build(revision_to_bot_name):
+  """Figures out if a revision-to-bot-name mapping represents a Chrome build.
+
+  We assume here that Chrome revisions are always > 100000, whereas WebRTC
+  revisions will not reach that number in the foreseeable future.
+  """
+  revision = int(revision_to_bot_name.split('--')[0])
+  bot_name = revision_to_bot_name.split('--')[1]
+  return 'Chrome' in bot_name and revision > 100000
+
+
+def _filter_undesired_bots(bot_to_status_mapping, desired_bot_names):
+  """Returns the desired bots for the builds status from the dictionary.
+
+  Args:
+    bot_to_status_mapping: Dictionary mapping bot name with revision to status.
+    desired_bot_names: List of bot names that will be the only bots returned in
+      the resulting dictionary.
+  Returns: A dictionary only containing the desired bots.
+  """
+  result = {}
+  for revision_to_bot_name, status in bot_to_status_mapping.iteritems():
+    bot_name = revision_to_bot_name.split('--')[1]
+    if bot_name in desired_bot_names:
+      result[revision_to_bot_name] = status
+  return result
+
+
+def _filter_chrome_only_builds(bot_to_status_mapping):
+  """Filters chrome-only builds from the system so LKGR doesn't get confused."""
+  return dict((revision_to_bot_name, status)
+              for revision_to_bot_name, status
+              in bot_to_status_mapping.iteritems()
+              if not _is_chrome_only_build(revision_to_bot_name))
+
+
+def _main():
+  dashboard = dashboard_connection.DashboardConnection(constants.CONSUMER_KEY)
+  dashboard.read_required_files(constants.CONSUMER_SECRET_FILE,
+                                constants.ACCESS_TOKEN_FILE)
+
+  bot_to_status_mapping = _download_and_parse_build_status()
+  bot_to_status_mapping = _filter_undesired_bots(bot_to_status_mapping, BOTS)
+  bot_to_status_mapping = _filter_chrome_only_builds(bot_to_status_mapping)
+
+  dashboard.send_post_request(constants.ADD_BUILD_STATUS_DATA_URL,
+                              bot_to_status_mapping)
+
+
+if __name__ == '__main__':
+  _main()
diff --git a/tools/quality_tracking/track_build_status_test.py b/tools/quality_tracking/track_build_status_test.py
new file mode 100644
index 0000000..87a904e
--- /dev/null
+++ b/tools/quality_tracking/track_build_status_test.py
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""Unit test for the build status tracker script."""
+
+
+import copy
+import unittest
+
+import track_build_status
+
+
+NORMAL_BOT_TO_STATUS_MAPPING = {'1455--ChromeOS': '455--OK',
+                                '1455--Chrome': '900--failed',
+                                '1455--Linux32DBG': '344--OK',
+                                '1456--ChromeOS': '456--OK'}
+
+
+class TrackBuildStatusTest(unittest.TestCase):
+
+  def test_that_filter_chrome_only_builds_filter_properly(self):
+    bot_to_status_mapping = copy.deepcopy(NORMAL_BOT_TO_STATUS_MAPPING)
+    bot_to_status_mapping['133445--Chrome'] = '901--OK'
+    bot_to_status_mapping['133441--ChromeBloat'] = '344--OK'
+
+    result = track_build_status._filter_chrome_only_builds(
+        bot_to_status_mapping)
+
+    self.assertEquals(NORMAL_BOT_TO_STATUS_MAPPING, result)
+
+  def test_ensure_filter_chrome_only_builds_doesnt_filter_too_much(self):
+    result = track_build_status._filter_chrome_only_builds(
+        NORMAL_BOT_TO_STATUS_MAPPING)
+
+    self.assertEquals(NORMAL_BOT_TO_STATUS_MAPPING, result)
+
+  def test_get_desired_bots(self):
+    bot_to_status_mapping = copy.deepcopy(NORMAL_BOT_TO_STATUS_MAPPING)
+    desired_bot_names = ['Linux32DBG']
+    result = track_build_status._get_desired_bots(bot_to_status_mapping,
+                                                  desired_bot_names)
+    self.assertEquals(1, len(result))
+    self.assertTrue(desired_bot_names[0] in result.keys()[0])
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/tools/quality_tracking/track_coverage.py b/tools/quality_tracking/track_coverage.py
new file mode 100755
index 0000000..248fad2
--- /dev/null
+++ b/tools/quality_tracking/track_coverage.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+#-*- coding: utf-8 -*-
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""This script grabs and reports coverage information.
+
+   It grabs coverage information from the latest Linux 32-bit build and
+   pushes it to the coverage tracker, enabling us to track code coverage
+   over time. This script is intended to run on the 32-bit Linux slave.
+
+   This script requires an access.token file in the current directory, as
+   generated by the request_oauth_permission.py script. It also expects a file
+   customer.secret with a single line containing the customer secret. The
+   customer secret is an OAuth concept and is received when one registers the
+   application with the App Engine running the dashboard.
+
+   The script assumes that all coverage data is stored under
+   /home/<build bot user>/www.
+"""
+
+__author__ = 'phoglund@webrtc.org (Patrik Höglund)'
+
+import os
+import re
+import sys
+import time
+
+import constants
+import dashboard_connection
+
+
+class FailedToParseCoverageHtml(Exception):
+  pass
+
+
+class CouldNotFindCoverageDirectory(Exception):
+  pass
+
+
+def _find_latest_build_coverage(www_directory_contents, coverage_www_dir,
+                                directory_prefix):
+  """Finds the most recent coverage directory in the directory listing.
+
+     We assume here that build numbers keep rising and never wrap around.
+
+     Args:
+       www_directory_contents: A list of entries in the coverage directory.
+       coverage_www_dir: The coverage directory on the bot.
+       directory_prefix: Coverage directories have the form <prefix><number>,
+           and the prefix is different on different bots. The prefix is
+           generally the builder name, such as Linux32DBG.
+
+     Returns:
+       The most recent directory name.
+
+     Raises:
+       CouldNotFindCoverageDirectory: if we failed to find coverage data.
+  """
+
+  found_build_numbers = []
+  for entry in www_directory_contents:
+    match = re.match(directory_prefix + '(\d+)', entry)
+    if match is not None:
+      found_build_numbers.append(int(match.group(1)))
+
+  if not found_build_numbers:
+    raise CouldNotFindCoverageDirectory('Error: Found no directories %s* '
+                                        'in directory %s.' %
+                                         (directory_prefix, coverage_www_dir))
+
+  most_recent = max(found_build_numbers)
+  return directory_prefix + str(most_recent)
+
+
+def _grab_coverage_percentage(label, index_html_contents):
+  """Extracts coverage from a LCOV coverage report.
+
+     Grabs coverage by assuming that the label in the coverage HTML report
+     is close to the actual number and that the number is followed by a space
+     and a percentage sign.
+  """
+  match = re.search('<td[^>]*>' + label + '</td>.*?(\d+\.\d) %',
+                    index_html_contents, re.DOTALL)
+  if match is None:
+    raise FailedToParseCoverageHtml('Missing coverage at label "%s".' % label)
+
+  try:
+    return float(match.group(1))
+  except ValueError:
+    raise FailedToParseCoverageHtml('%s is not a float.' % match.group(1))
+
+
+def _report_coverage_to_dashboard(dashboard, line_coverage, function_coverage,
+                                  branch_coverage, report_category):
+  parameters = {'line_coverage': '%f' % line_coverage,
+                'function_coverage': '%f' % function_coverage,
+                'branch_coverage': '%f' % branch_coverage,
+                'report_category': report_category,
+               }
+
+  dashboard.send_post_request(constants.ADD_COVERAGE_DATA_URL, parameters)
+
+
+def _main(report_category, directory_prefix):
+  """Grabs coverage data from disk on a bot and publishes it.
+
+     Args:
+       report_category: The kind of coverage to report. The dashboard
+           application decides what is acceptable here (see
+           dashboard/add_coverage_data.py for more information).
+      directory_prefix: This bot's coverage directory prefix. Generally a bot's
+          coverage directories will have the form <prefix><build number>,
+          like Linux32DBG_345.
+  """
+  dashboard = dashboard_connection.DashboardConnection(constants.CONSUMER_KEY)
+  dashboard.read_required_files(constants.CONSUMER_SECRET_FILE,
+                                constants.ACCESS_TOKEN_FILE)
+
+  coverage_www_dir = constants.BUILD_BOT_COVERAGE_WWW_DIRECTORY
+  www_dir_contents = os.listdir(coverage_www_dir)
+  latest_build_directory = _find_latest_build_coverage(www_dir_contents,
+                                                       coverage_www_dir,
+                                                       directory_prefix)
+
+  index_html_path = os.path.join(coverage_www_dir, latest_build_directory,
+                                 'index.html')
+  index_html_file = open(index_html_path)
+  whole_file = index_html_file.read()
+
+  line_coverage = _grab_coverage_percentage('Lines:', whole_file)
+  function_coverage = _grab_coverage_percentage('Functions:', whole_file)
+  branch_coverage = _grab_coverage_percentage('Branches:', whole_file)
+
+  _report_coverage_to_dashboard(dashboard, line_coverage, function_coverage,
+      branch_coverage, report_category)
+
+
+def _parse_args():
+  if len(sys.argv) != 3:
+    print ('Usage: %s <coverage category> <directory prefix>\n\n'
+           'The coverage category describes the kind of coverage you are '
+           'uploading. Known acceptable values are small_medium_tests and'
+           'large_tests. The directory prefix is what the directories in %s '
+           'are prefixed on this bot (such as Linux32DBG_).' %
+               (sys.argv[0], constants.BUILD_BOT_COVERAGE_WWW_DIRECTORY))
+    return (None, None)
+  return (sys.argv[1], sys.argv[2])
+
+
+if __name__ == '__main__':
+  report_category, directory_prefix = _parse_args()
+  if report_category:
+    _main(report_category, directory_prefix)
+
diff --git a/tools/refactoring/addfileheader.py b/tools/refactoring/addfileheader.py
new file mode 100644
index 0000000..01c8a8b
--- /dev/null
+++ b/tools/refactoring/addfileheader.py
@@ -0,0 +1,163 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+extensions = ['.h','.cc','.c','.cpp']
+
+ignore_these = ['my_ignore_header.h']
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: directory [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+# Just steal the header from the template
+def fileheaderasstring():
+    template_file_name = 'license_template.txt'
+    if (not filemanagement.fileexist(template_file_name)):
+        print 'File ' + template_file_name + ' not found!'
+        quit()
+    template_file = open(template_file_name,'r')
+    return_string = ''
+    for line in template_file:
+        return_string += line
+    return return_string
+
+# Just steal the header from the template
+def fileheaderasarray():
+    template_file_name = 'license_template.txt'
+    if (not filemanagement.fileexist(template_file_name)):
+        print 'File ' + template_file_name + ' not found!'
+        quit()
+    template_file = open(template_file_name,'r')
+    return_value = []
+    for line in template_file:
+        return_value.append(line)
+    return return_value
+
+
+def findheader(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + file_name + ' not found!'
+        print 'Unexpected error!'
+        quit()
+    file_handle = open(full_file_name)
+    template_file_content = fileheaderasarray()
+    compare_content = []
+    # load the same number of lines from file as the fileheader
+    for index in range(len(template_file_content)):
+        line = file_handle.readline()
+        if (line == ''):
+            return False
+        compare_content.append(line)
+
+    while (True):
+        found = True
+        for index in range(len(template_file_content)):
+            line1 = template_file_content[index]
+            line2 = compare_content[index]
+            if(line1 != line2):
+                found = False
+                break
+        if (found):
+            return True
+        compare_content = compare_content[1:len(compare_content)]
+        line = file_handle.readline()
+        if (line == ''):
+            return False
+        compare_content.append(line)
+    return False
+
+# Used to store temporary result before flushing to real file when finished
+def temporaryfilename(old_file_name):
+    return old_file_name + '.deleteme'
+
+def updatefile(path, old_file_name):
+    full_old_file_name = path + old_file_name
+    if (not filemanagement.fileexist(full_old_file_name)):
+        print 'File ' + full_old_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    full_temporary_file_name = path + temporaryfilename(old_file_name)
+
+    # Make sure that the files are closed by putting them out of scope
+    old_file = open(full_old_file_name,'r')
+    temporary_file = open(full_temporary_file_name,'w')
+
+    temporary_file.writelines(fileheaderasstring())
+    remove_whitespaces = True
+    for line in old_file:
+        if (remove_whitespaces and (len(line.split()) == 0)):
+            continue
+        else:
+            remove_whitespaces = False
+        temporary_file.writelines(line)
+    old_file.close()
+    temporary_file.close()
+
+    filemanagement.copyfile(full_old_file_name,full_temporary_file_name)
+    filemanagement.deletefile(full_temporary_file_name)
+
+
+failed_files = []
+skipped_files = []
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+
+# Let the word copyright be our sanity, i.e. make sure there is only one
+# copy right occurance or report that there will be no change
+    if(filemanagement.findstringinfile(path_dir,filename,'Copyright') or
+        filemanagement.findstringinfile(path_dir,filename,'copyright') or
+        filemanagement.findstringinfile(path_dir,filename,'COPYRIGHT')):
+        if(findheader(path_dir,filename)):
+            skipped_files.append(path_dir + filename)
+        else:
+            failed_files.append(path_dir + filename)
+        continue
+
+    if (not commit):
+        print 'File ' + path_dir + filename + ' will be updated'
+        continue
+    updatefile(path_dir,filename)
+
+tense = 'will be'
+if (commit):
+    tense = 'has been'
+if (len(skipped_files) > 0):
+    print str(len(skipped_files)) + ' file(s) ' + tense + ' skipped since they already have the correct header'
+
+if (len(failed_files) > 0):
+    print 'Following files seem to have an invalid file header:'
+for line in failed_files:
+    print line
diff --git a/tools/refactoring/filemanagement.py b/tools/refactoring/filemanagement.py
new file mode 100644
index 0000000..4ff64ce
--- /dev/null
+++ b/tools/refactoring/filemanagement.py
@@ -0,0 +1,72 @@
+import fnmatch
+import os
+import stringmanipulation
+
+def fileexist( file_name ):
+    return os.path.isfile(file_name)
+
+def pathexist( path ):
+    return os.path.exists(path)
+
+def fixpath( path ):
+    return_value = path
+    if( return_value[len(return_value) - 1] != '/'):
+        return_value = return_value + '/'
+    return return_value
+
+def listallfilesinfolder( path, extension ):
+    matches = []
+    signature = '*' + extension
+    for root, dirnames, filenames in os.walk(path):
+        for filename in fnmatch.filter(filenames, signature):
+            matches.append([fixpath(root), filename])
+    return matches
+
+def copyfile(to_file, from_file):
+    if(not fileexist(from_file)):
+        return
+    command = 'cp -f ' + from_file + ' ' + to_file
+    os.system(command)
+    #print command
+
+def deletefile(file_to_delete):
+    if(not fileexist(file_to_delete)):
+        return
+    os.system('rm ' + file_to_delete)
+
+# very ugly but works, so keep for now
+def findstringinfile(path,file_name,search_string):
+    command = 'grep \'' + search_string + '\' ' + path + file_name + ' > deleteme.txt'
+    return_value = os.system(command)
+#    print command
+    return (return_value == 0)
+
+def replacestringinfolder( path, old_string, new_string, extension ):
+    if(not stringmanipulation.isextension(extension)):
+        print 'failed to search and replace'
+        return
+    if(len(old_string) == 0):
+        print 'failed to search and replace'
+        return
+    find_command = 'ls '+ path + '/*' + extension
+    sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
+                     '/g\' *' + extension
+    command_string = find_command + ' | xargs ' + sed_command + ' 2> deleteme.txt'
+    os.system(command_string)
+    #print command_string
+
+#find ./ -name "*.h" -type f  | xargs -P 0 sed -i 's/process_thread_wrapper.h/process_thread.h/g' *.h deleteme.txt
+def replacestringinallsubfolders( old_string, new_string, extension):
+    if(not stringmanipulation.isextension(extension)):
+        print 'failed to search and replace'
+        return
+    if(len(old_string) == 0):
+        print 'failed to search and replace'
+        return
+
+    find_command = 'find ./ -name \"*' + extension + '\" -type f'
+    sed_command = 'sed -i \'s/' + old_string + '/' + new_string +\
+                     '/g\' *' + extension
+    command_string = find_command + ' | xargs -P 0 ' + sed_command + ' 2> deleteme.txt'
+    os.system(command_string)
+    #print command_string
diff --git a/tools/refactoring/fixincludeguards.py b/tools/refactoring/fixincludeguards.py
new file mode 100644
index 0000000..0b56355
--- /dev/null
+++ b/tools/refactoring/fixincludeguards.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+extensions = ['.h']
+
+ignore_these = ['my_ignore_header.h']
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: directory [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+for extension in extensions:
+    files_to_fix = filemanagement.listallfilesinfolder(directory,\
+                                                       extension)
+
+def buildincludeguardname(path,filename):
+    full_file_name = 'WEBRTC_' + path + filename
+    full_file_name = full_file_name.upper()
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '/', '_')
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '\\', '_')
+    full_file_name = stringmanipulation.replaceoccurances(full_file_name, '.', '_')
+    full_file_name += '_'
+    return full_file_name
+
+def buildnewincludeguardset(path,filename):
+    include_guard_name = buildincludeguardname(path,filename)
+    if(include_guard_name == ''):
+        return []
+    return_value = []
+    return_value.append('#ifndef ' + include_guard_name)
+    return_value.append('#define ' + include_guard_name)
+    return_value.append(include_guard_name)
+    return return_value
+
+def printincludeguardset(include_guard_set):
+    print 'First line: ' + include_guard_set[0]
+    print 'Second line: ' + include_guard_set[1]
+    print 'Last line: ' + include_guard_set[2]
+    return
+
+include_guard_begin_identifier = ['#ifndef', '#if !defined']
+include_guard_second_identifier = ['#define']
+def findincludeguardidentifier(line):
+    for begin_identifier in include_guard_begin_identifier:
+        line = stringmanipulation.removealloccurances(line,begin_identifier)
+    for second_identifier in include_guard_begin_identifier:
+        line = stringmanipulation.removealloccurances(line,second_identifier)
+    removed_prefix = [True,'']
+    line = stringmanipulation.whitespacestoonespace(line)
+    while(removed_prefix[0]):
+        removed_prefix = stringmanipulation.removeprefix(line,' ')
+        line = removed_prefix[1]
+    line = stringmanipulation.removealloccurances(line,'(')
+    if(line == ''):
+        return ''
+    word_pos = stringmanipulation.getword(line,0)
+    return_value = line[0:word_pos[1]]
+    return_value = return_value.rstrip('\r\n')
+    return return_value
+
+def findoldincludeguardset(path,filename):
+    return_value = []
+    full_file_name = path + filename
+    file_pointer = open(full_file_name,'r')
+    include_guard_name = ''
+    for line in file_pointer:
+        if (include_guard_name == ''):
+            for compare_string in include_guard_begin_identifier:
+                if (stringmanipulation.issubstring(compare_string, line) != -1):
+                    include_guard_name = findincludeguardidentifier(line)
+                    if (include_guard_name == ''):
+                        break
+                    line = line.rstrip('\r\n')
+                    return_value.append(line)
+                    break
+        else:
+            for compare_string in include_guard_second_identifier:
+                if (stringmanipulation.issubstring(compare_string, line) != -1):
+                    if (stringmanipulation.issubstring(include_guard_name, line) != -1):
+                        line = line.rstrip('\r\n')
+                        return_value.append(line)
+                        return_value.append(include_guard_name)
+                        return return_value
+            include_guard_name = ''
+            return_value = []
+    return []
+
+failed_files = []
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+    old_include_guard_set = findoldincludeguardset(path_dir,filename)
+    if (len(old_include_guard_set) != 3) :
+        failed_files.append('unable to figure out the include guards for ' + filename)
+        continue
+
+    new_include_guard_set = buildnewincludeguardset(path_dir,filename)
+    if (len(new_include_guard_set) != 3) :
+        failed_files.append('unable to figure out new the include guards for ' + filename)
+        continue
+
+    if(not commit):
+        print 'old guard: ' + old_include_guard_set[2]
+        print 'new guard: ' + new_include_guard_set[2]
+        continue
+
+    for index in range(2):
+        # enough to only replace for file. However, no function for that
+        for extension in extensions:
+            filemanagement.replacestringinfolder(path_dir,old_include_guard_set[index],new_include_guard_set[index],extension)
+    # special case for last to avoid complications
+    for extension in extensions:
+        filemanagement.replacestringinfolder(path_dir,' ' + old_include_guard_set[2],' ' + new_include_guard_set[2],extension)
+        filemanagement.replacestringinfolder(path_dir,'\\/\\/' + old_include_guard_set[2],'\\/\\/ ' + new_include_guard_set[2],extension)
+
+
+if(len(failed_files) > 0):
+    print 'Following failures should be investigated manually:'
+for line in failed_files:
+    print line
diff --git a/tools/refactoring/fixnames.py b/tools/refactoring/fixnames.py
new file mode 100644
index 0000000..15381e3
--- /dev/null
+++ b/tools/refactoring/fixnames.py
@@ -0,0 +1,387 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import p4commands
+import sys
+
+name_space_to_ignore = 'GIPS::'
+#only allow one prefix to be removed since allowing multiple will complicate
+# things
+prefix_to_filter = 'gips'
+#words_to_filter = ['Module']
+# it might be dangerous to remove GIPS but keep it default
+words_to_filter = ['Module','GIPS']
+
+# This script finds all the words that should be replaced in an h-file. Once
+# all words that should be replaced are found it does a global search and
+# replace.
+
+extensions_to_edit = ['.cpp','.cc','.h']
+
+#line = '    ~hiGIPSCriticalSectionScoped()'
+#print line
+#position = stringmanipulation.getword(line,11)
+#old_word = line[position[0]:position[0]+position[1]]
+#result = stringmanipulation.removealloccurances(old_word,'gips')
+#new_word = result
+#print old_word
+#print position[0]
+#print position[0]+position[1]
+#print new_word
+#quit()
+
+# Ignore whole line if any item in this table is a substring of the line
+do_not_replace_line_table = []
+do_not_replace_line_table.append('namespace GIPS')
+
+# [old_string,new_string]
+# List of things to remove that are static:
+manual_replace_table = []
+#manual_replace_table.append(['using namespace GIPS;',''])
+#manual_replace_table.append(['CreateGipsEvent','CreateEvent'])
+#manual_replace_table.append(['CreateGIPSTrace','CreateTrace'])
+#manual_replace_table.append(['ReturnGIPSTrace','ReturnTrace'])
+#manual_replace_table.append(['CreateGIPSFile','CreateFile'])
+replace_table = manual_replace_table
+#replace_table.append(['GIPS::','webrtc::'])
+# List of things to not remove that are static, i.e. exceptions:
+# don't replace any of the GIPS_Words since that will affect all files
+# do that in a separate script!
+do_not_replace_table = []
+do_not_replace_table.append('GIPS_CipherTypes')
+do_not_replace_table.append('GIPS_AuthenticationTypes')
+do_not_replace_table.append('GIPS_SecurityLevels')
+do_not_replace_table.append('GIPS_encryption')
+do_not_replace_table.append('~GIPS_encryption')
+do_not_replace_table.append('GIPS_transport')
+do_not_replace_table.append('~GIPS_transport')
+do_not_replace_table.append('GIPSTraceCallback')
+do_not_replace_table.append('~GIPSTraceCallback')
+do_not_replace_table.append('GIPS_RTP_CSRC_SIZE')
+do_not_replace_table.append('GIPS_RTPDirections')
+do_not_replace_table.append('GIPS_RTP_INCOMING')
+do_not_replace_table.append('GIPS_RTP_OUTGOING')
+do_not_replace_table.append('GIPSFrameType')
+do_not_replace_table.append('GIPS_FRAME_EMPTY')
+do_not_replace_table.append('GIPS_AUDIO_FRAME_SPEECH')
+do_not_replace_table.append('GIPS_AUDIO_FRAME_CN')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_KEY')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_GOLDEN')
+do_not_replace_table.append('GIPS_VIDEO_FRAME_DELTA_KEY')
+do_not_replace_table.append('GIPS_PacketType')
+do_not_replace_table.append('GIPS_PACKET_TYPE_RTP')
+do_not_replace_table.append('GIPS_PACKET_TYPE_KEEP_ALIVE')
+do_not_replace_table.append('GIPS_AudioLayers')
+do_not_replace_table.append('GIPS_AUDIO_PLATFORM_DEFAULT')
+do_not_replace_table.append('GIPS_AUDIO_WINDOWS_WAVE')
+do_not_replace_table.append('GIPS_AUDIO_WINDOWS_CORE')
+do_not_replace_table.append('GIPS_AUDIO_LINUX_ALSA')
+do_not_replace_table.append('GIPS_AUDIO_LINUX_PULSE')
+do_not_replace_table.append('GIPS_AUDIO_FORMAT')
+do_not_replace_table.append('GIPS_PCM_16_16KHZ')
+do_not_replace_table.append('GIPS_PCM_16_8KHZ')
+do_not_replace_table.append('GIPS_G729')
+do_not_replace_table.append('GIPSAMRmode')
+do_not_replace_table.append('GIPS_RFC3267_BWEFFICIENT')
+do_not_replace_table.append('GIPS_RFC3267_OCTETALIGNED')
+do_not_replace_table.append('GIPS_RFC3267_FILESTORAGE')
+do_not_replace_table.append('GIPS_NCModes')
+do_not_replace_table.append('GIPS_NC_OFF')
+do_not_replace_table.append('GIPS_NC_MILD')
+do_not_replace_table.append('GIPS_NC_MODERATE')
+do_not_replace_table.append('GIPS_NC_AGGRESSIVE')
+do_not_replace_table.append('GIPS_NC_VERY_AGGRESSIVE')
+do_not_replace_table.append('GIPS_AGCModes')
+do_not_replace_table.append('GIPS_AGC_OFF')
+do_not_replace_table.append('GIPS_AGC_ANALOG')
+do_not_replace_table.append('GIPS_AGC_DIGITAL')
+do_not_replace_table.append('GIPS_AGC_STANDALONE_DIG')
+do_not_replace_table.append('GIPS_ECModes')
+do_not_replace_table.append('GIPS_EC_UNCHANGED')
+do_not_replace_table.append('GIPS_EC_DEFAULT')
+do_not_replace_table.append('GIPS_EC_CONFERENCE')
+do_not_replace_table.append('GIPS_EC_AEC')
+do_not_replace_table.append('GIPS_EC_AES')
+do_not_replace_table.append('GIPS_EC_AECM')
+do_not_replace_table.append('GIPS_EC_NEC_IAD')
+do_not_replace_table.append('GIPS_AESModes')
+do_not_replace_table.append('GIPS_AES_DEFAULT')
+do_not_replace_table.append('GIPS_AES_NORMAL')
+do_not_replace_table.append('GIPS_AES_HIGH')
+do_not_replace_table.append('GIPS_AES_ATTENUATE')
+do_not_replace_table.append('GIPS_AES_NORMAL_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AES_HIGH_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AES_ATTENUATE_SOFT_TRANS')
+do_not_replace_table.append('GIPS_AECMModes')
+do_not_replace_table.append('GIPS_AECM_QUIET_EARPIECE_OR_HEADSET')
+do_not_replace_table.append('GIPS_AECM_EARPIECE')
+do_not_replace_table.append('GIPS_AECM_LOUD_EARPIECE')
+do_not_replace_table.append('GIPS_AECM_SPEAKERPHONE')
+do_not_replace_table.append('GIPS_AECM_LOUD_SPEAKERPHONE')
+do_not_replace_table.append('AECM_LOUD_SPEAKERPHONE')
+do_not_replace_table.append('GIPS_VAD_CONVENTIONAL')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_LOW')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_MID')
+do_not_replace_table.append('GIPS_VAD_AGGRESSIVE_HIGH')
+do_not_replace_table.append('GIPS_NetEQModes')
+do_not_replace_table.append('GIPS_NETEQ_DEFAULT')
+do_not_replace_table.append('GIPS_NETEQ_STREAMING')
+do_not_replace_table.append('GIPS_NETEQ_FAX')
+do_not_replace_table.append('GIPS_NetEQBGNModes')
+do_not_replace_table.append('GIPS_BGN_ON')
+do_not_replace_table.append('GIPS_BGN_FADE')
+do_not_replace_table.append('GIPS_BGN_OFF')
+do_not_replace_table.append('GIPS_OnHoldModes')
+do_not_replace_table.append('GIPS_HOLD_SEND_AND_PLAY')
+do_not_replace_table.append('GIPS_HOLD_SEND_ONLY')
+do_not_replace_table.append('GIPS_HOLD_PLAY_ONLY')
+do_not_replace_table.append('GIPS_PayloadFrequencies')
+do_not_replace_table.append('GIPS_FREQ_8000_HZ')
+do_not_replace_table.append('GIPS_FREQ_16000_HZ')
+do_not_replace_table.append('GIPS_FREQ_32000_HZ')
+do_not_replace_table.append('GIPS_TelephoneEventDetectionMethods')
+do_not_replace_table.append('GIPS_IN_BAND')
+do_not_replace_table.append('GIPS_OUT_OF_BAND')
+do_not_replace_table.append('GIPS_IN_AND_OUT_OF_BAND')
+do_not_replace_table.append('GIPS_ProcessingTypes')
+do_not_replace_table.append('GIPS_PLAYBACK_PER_CHANNEL')
+do_not_replace_table.append('GIPS_PLAYBACK_ALL_CHANNELS_MIXED')
+do_not_replace_table.append('GIPS_RECORDING_PER_CHANNEL')
+do_not_replace_table.append('GIPS_RECORDING_ALL_CHANNELS_MIXED')
+do_not_replace_table.append('GIPS_StereoChannel')
+do_not_replace_table.append('GIPS_StereoLeft')
+do_not_replace_table.append('GIPS_StereoRight')
+do_not_replace_table.append('GIPS_StereoBoth')
+do_not_replace_table.append('GIPS_stat_val')
+do_not_replace_table.append('GIPS_P56_statistics')
+do_not_replace_table.append('GIPS_echo_statistics')
+do_not_replace_table.append('GIPS_NetworkStatistics')
+do_not_replace_table.append('GIPS_JitterStatistics')
+do_not_replace_table.append('GIPSVideoRawType')
+do_not_replace_table.append('GIPS_VIDEO_I420')
+do_not_replace_table.append('GIPS_VIDEO_YV12')
+do_not_replace_table.append('GIPS_VIDEO_YUY2')
+do_not_replace_table.append('GIPS_VIDEO_UYVY')
+do_not_replace_table.append('GIPS_VIDEO_IYUV')
+do_not_replace_table.append('GIPS_VIDEO_ARGB')
+do_not_replace_table.append('GIPS_VIDEO_RGB24')
+do_not_replace_table.append('GIPS_VIDEO_RGB565')
+do_not_replace_table.append('GIPS_VIDEO_ARGB4444')
+do_not_replace_table.append('GIPS_VIDEO_ARGB1555')
+do_not_replace_table.append('GIPS_VIDEO_MJPG')
+do_not_replace_table.append('GIPS_VIDEO_NV12')
+do_not_replace_table.append('GIPS_VIDEO_NV21')
+do_not_replace_table.append('GIPS_VIDEO_Unknown')
+do_not_replace_table.append('GIPSVideoLayouts')
+do_not_replace_table.append('GIPS_LAYOUT_NONE')
+do_not_replace_table.append('GIPS_LAYOUT_DEFAULT')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED1')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED2')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED3')
+do_not_replace_table.append('GIPS_LAYOUT_ADVANCED4')
+do_not_replace_table.append('GIPS_LAYOUT_FULL')
+do_not_replace_table.append('KGIPSConfigParameterSize')
+do_not_replace_table.append('KGIPSPayloadNameSize')
+do_not_replace_table.append('GIPSVideoCodecH263')
+do_not_replace_table.append('GIPSVideoH264Packetization')
+do_not_replace_table.append('GIPS_H264_SingleMode')
+do_not_replace_table.append('GIPS_H264_NonInterleavedMode')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodec_Complexity_Normal')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_High')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_Higher')
+do_not_replace_table.append('GIPSVideoCodec_Comlexity_Max')
+do_not_replace_table.append('GIPSVideoCodecH264')
+do_not_replace_table.append('GIPSVideoH264Packetization')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecProfile')
+do_not_replace_table.append('KGIPSConfigParameterSize')
+do_not_replace_table.append('KGIPSMaxSVCLayers')
+do_not_replace_table.append('GIPSVideoH264LayerTypes')
+do_not_replace_table.append('GIPS_H264SVC_Base')
+do_not_replace_table.append('GIPS_H264SVC_Extend_2X2')
+do_not_replace_table.append('GIPS_H264SVC_Extend_1X1')
+do_not_replace_table.append('GIPS_H264SVC_Extend_MGS')
+do_not_replace_table.append('GIPS_H264SVC_Extend_1_5')
+do_not_replace_table.append('GIPS_H264SVC_Extend_Custom')
+do_not_replace_table.append('GIPSVideoH264LayersProperties')
+do_not_replace_table.append('GIPSVideoH264LayerTypes')
+do_not_replace_table.append('GIPSVideoH264Layers')
+do_not_replace_table.append('GIPSVideoH264LayersProperties')
+do_not_replace_table.append('GIPSVideoCodecH264SVC')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecProfile')
+do_not_replace_table.append('GIPSVideoH264Layers')
+do_not_replace_table.append('GIPSVideoCodecVP8')
+do_not_replace_table.append('GIPSVideoCodecComplexity')
+do_not_replace_table.append('GIPSVideoCodecMPEG')
+do_not_replace_table.append('GIPSVideoCodecGeneric')
+do_not_replace_table.append('GIPSVideoCodecType')
+do_not_replace_table.append('GIPSVideoCodec_H263')
+do_not_replace_table.append('GIPSVideoCodec_H264')
+do_not_replace_table.append('GIPSVideoCodec_H264SVC')
+do_not_replace_table.append('GIPSVideoCodec_VP8')
+do_not_replace_table.append('GIPSVideoCodec_MPEG4')
+do_not_replace_table.append('GIPSVideoCodec_I420')
+do_not_replace_table.append('GIPSVideoCodec_RED')
+do_not_replace_table.append('GIPSVideoCodec_ULPFEC')
+do_not_replace_table.append('GIPSVideoCodec_Unknown')
+do_not_replace_table.append('GIPSVideoCodecUnion')
+do_not_replace_table.append('GIPSVideoCodecH263')
+do_not_replace_table.append('GIPSVideoCodecH264')
+do_not_replace_table.append('GIPSVideoCodecH264SVC')
+do_not_replace_table.append('GIPSVideoCodecVP8')
+do_not_replace_table.append('GIPSVideoCodecMPEG4')
+do_not_replace_table.append('GIPSVideoCodecGeneric')
+do_not_replace_table.append('GIPSVideoCodec')
+do_not_replace_table.append('GIPSVideoCodecType')
+do_not_replace_table.append('GIPSVideoCodecUnion')
+do_not_replace_table.append('GIPSAudioFrame')
+do_not_replace_table.append('GIPS_CodecInst')
+do_not_replace_table.append('GIPS_FileFormats')
+do_not_replace_table.append('GIPSTickTime')
+do_not_replace_table.append('GIPS_Word64')
+do_not_replace_table.append('GIPS_UWord64')
+do_not_replace_table.append('GIPS_Word32')
+do_not_replace_table.append('GIPS_UWord32')
+do_not_replace_table.append('GIPS_Word16')
+do_not_replace_table.append('GIPS_UWord16')
+do_not_replace_table.append('GIPS_Word8')
+do_not_replace_table.append('GIPS_UWord8')
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+commit = (len(sys.argv) == 3)
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+# APIs are all in h-files
+extension = '.h'
+
+# All h-files
+files_to_modify = filemanagement.listallfilesinfolder(directory,\
+                                                      extension)
+
+def isinmanualremovetable( compare_word ):
+    for old_word, new_word in manual_replace_table:
+        if(old_word == compare_word):
+            return True
+    return False
+
+# Begin
+# This function looks at each line and decides which words should be replaced
+# that is this is the only part of the script that you will ever want to change!
+def findstringstoreplace(line):
+    original_line = line
+# Dont replace compiler directives
+    if(line[0] == '#'):
+        return []
+# Dont allow global removal of namespace gips since it is very intrusive
+    for sub_string_compare in do_not_replace_line_table:
+        index = stringmanipulation.issubstring(line,sub_string_compare)
+        if(index != -1):
+            return []
+
+    return_value = []
+
+    line = stringmanipulation.removeccomment(line)
+    line = stringmanipulation.whitespacestoonespace(line)
+    if(len(line) == 0):
+        return []
+    if(line[0] == '*'):
+        return []
+    index = stringmanipulation.issubstring(line,prefix_to_filter)
+    while index >= 0:
+        dont_store_hit = False
+        word_position = stringmanipulation.getword(line, index)
+        start_of_word = word_position[0]
+        size_of_word = word_position[1]
+        end_of_word = start_of_word + size_of_word
+        old_word = line[start_of_word:end_of_word]
+        if(isinmanualremovetable(old_word)):
+            dont_store_hit = True
+        if((end_of_word + 2 < len(line)) and\
+           name_space_to_ignore == line[start_of_word:end_of_word+2]):
+            dont_store_hit = True
+
+        result = stringmanipulation.removeprefix(old_word,prefix_to_filter)
+        new_word = result[1]
+        for word_to_filter in words_to_filter:
+            new_word = stringmanipulation.removealloccurances(new_word,word_to_filter)
+        result = stringmanipulation.removeprefix(new_word,'_')
+        new_word = result[1]
+        new_word = stringmanipulation.fixabbreviations(new_word)
+        new_word = stringmanipulation.removealloccurances(new_word,'_')
+        if(not dont_store_hit):
+            return_value.append([old_word,new_word])
+# remove the word we found from the string so we dont find it again
+        line = line[0:start_of_word] + line[end_of_word:len(line)]
+        index = stringmanipulation.issubstring(line,'GIPS')
+
+    return return_value
+# End
+
+# loop through all files
+for path, file_name in files_to_modify:
+#    if(file_name != 'GIPSTickUtil.h'):
+#        continue
+    full_file_name = path + file_name
+    file_pointer = open(full_file_name,'r')
+#    print file_name
+#loop through all lines
+    for line in file_pointer:
+#        print line
+        local_replace_string = findstringstoreplace(line)
+        #print local_replace_string
+        if(len(local_replace_string) != 0):
+            replace_table.extend(local_replace_string)
+
+
+# we have built our replace table now
+replace_table = stringmanipulation.removeduplicates( replace_table )
+replace_table = stringmanipulation.ordertablesizefirst( replace_table )
+replace_table = stringmanipulation.complement(replace_table,\
+                                              do_not_replace_table)
+
+def replaceoriginal( path,my_table ):
+    size_of_table = len(my_table)
+    for index in range(len(my_table)):
+        old_name = my_table[index][0]
+        new_name = my_table[index][1]
+        filemanagement.replacestringinfolder(path, old_name, new_name,\
+                                             ".h")
+        print (100*index) / (size_of_table*2)
+
+def replaceall( my_table, extension_list ):
+    size_of_table = len(my_table)
+    for index in range(len(my_table)):
+        old_name = my_table[index][0]
+        new_name = my_table[index][1]
+        new_name = new_name
+        for extension in extensions_to_edit:
+            filemanagement.replacestringinallsubfolders(old_name, new_name,
+                                                        extension)
+        print 100*(size_of_table + index) / (size_of_table*2)
+
+
+if(commit):
+    print 'commiting'
+    replace_table = stringmanipulation.removenochange(replace_table)
+    p4commands.checkoutallfiles()
+    replaceoriginal(directory,replace_table)
+    replaceall(replace_table,extensions_to_edit)
+    p4commands.revertunchangedfiles()
+else:
+    for old_name, new_name in replace_table:
+        print 'Going to replace [' + old_name + '] with [' + new_name + ']'
diff --git a/tools/refactoring/integratefiles.py b/tools/refactoring/integratefiles.py
new file mode 100644
index 0000000..c5cc892
--- /dev/null
+++ b/tools/refactoring/integratefiles.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import p4commands
+import sys
+
+extensions = ['.h', '.cpp', '.cc', '.gyp']
+
+ignore_these = ['list_no_stl.h','map_no_stl.h','constructor_magic.h']
+
+exceptions = [
+['GIPSRWLock.h','rw_lock.h'],
+['GIPSCriticalsection.h','critical_section.h'],
+]
+
+if((len(sys.argv) != 4) and (len(sys.argv) != 5)):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    print 'path ' + directory + ' does not exist'
+    quit()
+
+old_extension = sys.argv[2]
+if(not stringmanipulation.isextension(old_extension)):
+    print old_extension + ' is not a valid extension'
+    quit()
+
+new_extension = sys.argv[3]
+if(not stringmanipulation.isextension(new_extension)):
+    print new_extension + ' is not a valid extension'
+    quit()
+
+if((len(sys.argv) == 5) and (sys.argv[4] != '--commit')):
+    print 'parameters are: parent directory extension new extension [--commit]'
+    quit()
+
+commit = False
+if(len(sys.argv) == 5):
+    commit = True
+
+files_to_integrate = filemanagement.listallfilesinfolder(directory,\
+                                                         old_extension)
+
+if(commit):
+    p4commands.checkoutallfiles()
+for index in range(len(files_to_integrate)):
+    if(commit):
+        print (100*index)/len(files_to_integrate)
+    path_dir = files_to_integrate[index][0]
+    filename = files_to_integrate[index][1]
+    is_ignore = False
+    for ignore_names in ignore_these:
+        if(filename == ignore_names):
+            is_ignore = True
+            break
+    if(is_ignore):
+        continue
+
+    new_file_name = ''
+    is_exception = False
+    for exception_name,exception_name_new in exceptions:
+        if(filename == exception_name):
+            is_exception = True
+            new_file_name = exception_name_new
+            break
+
+    if(not is_exception):
+        new_file_name = filename
+
+        new_file_name = stringmanipulation.removeallprefix(new_file_name,\
+                                                       'gips')
+        new_file_name = stringmanipulation.removealloccurances(new_file_name,\
+                                                       'module')
+        new_file_name = stringmanipulation.changeextension(new_file_name,\
+                                           old_extension,\
+                                           new_extension)
+        new_file_name = stringmanipulation.fixabbreviations( new_file_name )
+        new_file_name = stringmanipulation.lowercasewithunderscore(new_file_name)
+    if(not commit):
+        print 'File ' + filename + ' will be replaced with ' + new_file_name
+        continue
+    full_new_file_name = path_dir + new_file_name
+    full_old_file_name = path_dir + filename
+    if(full_new_file_name != full_old_file_name):
+        p4commands.integratefile(full_old_file_name,full_new_file_name)
+    else:
+        print 'skipping ' + new_file_name + ' due to no change'
+    for extension in extensions:
+        print 'replacing ' + filename
+        if (extension == ".gyp"):
+            filemanagement.replacestringinallsubfolders(
+                filename,new_file_name,extension)
+        else:
+            filemanagement.replacestringinallsubfolders(
+                '\"' + filename + '\"', '\"' + new_file_name + '\"', extension)
+if(commit):
+    p4commands.revertunchangedfiles()
diff --git a/tools/refactoring/p4commands.py b/tools/refactoring/p4commands.py
new file mode 100644
index 0000000..71ac31b
--- /dev/null
+++ b/tools/refactoring/p4commands.py
@@ -0,0 +1,31 @@
+import os
+import filemanagement
+
+# checks out entire p4 repository
+def checkoutallfiles():
+    os.system('p4 edit //depotGoogle/...')
+    return
+
+# reverts all unchanged files, this is completely innoculus
+def revertunchangedfiles():
+    os.system('p4 revert -a //depotGoogle/...')
+    return
+
+def integratefile( old_name, new_name):
+    if(old_name == new_name):
+        return
+    if(not filemanagement.fileexist(old_name)):
+        return
+    integrate_command = 'p4 integrate -o -f ' +\
+                        old_name +\
+                        ' ' +\
+                        new_name +\
+                        ' > p4summary.txt 2> error.txt'
+    os.system(integrate_command)
+    #print integrate_command
+    delete_command = 'p4 delete -c default ' +\
+                     old_name +\
+                     ' > p4summary.txt 2> error.txt'
+    os.system(delete_command)
+    #print delete_command
+    return
diff --git a/tools/refactoring/removetrace.py b/tools/refactoring/removetrace.py
new file mode 100644
index 0000000..43c622d
--- /dev/null
+++ b/tools/refactoring/removetrace.py
@@ -0,0 +1,161 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# NOTE: This is a hack which disobeys a number of conventions and best
+# practices. It's here just to be easily shared. If it's to remain in the
+# repository it should be refactored.
+
+#!/usr/bin/env python
+
+import stringmanipulation
+import filemanagement
+import sys
+
+trace_remove_key_word = 'kTraceModuleCall'
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    print 'parameters are: parent directory [--commit]'
+    quit()
+
+commit = (len(sys.argv) == 3)
+
+directory = sys.argv[1];
+occurances = []
+
+trace_identifier = 'WEBRTC_TRACE('
+extensions = ['.h','.cc','.c','.cpp']
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+# This function identifies the begining of a trace statement
+def istracebegining(line):
+    return stringmanipulation.issubstring(line, trace_identifier) != -1
+
+def endofstatement(line):
+    return stringmanipulation.issubstring(line, ';') != -1
+
+def removekeywordfound(line):
+    return stringmanipulation.issubstring(line, trace_remove_key_word) != -1
+
+# Used to store temporary result before flushing to real file when finished
+def temporaryfilename():
+    return 'deleteme.txt'
+
+
+def find_occurances(path, file_name):
+    full_filename = path + file_name
+    file_handle = open(full_filename,'r')
+    line_is_trace = False
+    last_trace_line = -1
+    for line_nr, line in enumerate(file_handle):
+        if(istracebegining(line)):
+            line_is_trace = True;
+            last_trace_line = line_nr
+
+        if(line_is_trace):
+            if(removekeywordfound(line)):
+                occurances.append(last_trace_line)
+
+        if(endofstatement(line)):
+            line_is_trace = False;
+
+def remove_occurances(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + full_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    full_temporary_file_name = path + temporaryfilename()
+    temporary_file = open(full_temporary_file_name,'w')
+    original_file = open(full_file_name,'r')
+    next_occurance_id = 0;
+    removing_statement = False
+    if(len(occurances) == next_occurance_id):
+        return
+    next_occurance = occurances[next_occurance_id]
+    next_occurance_id += 1
+    for line_nr, line in enumerate(original_file):
+        if(line_nr == next_occurance):
+            removing_statement = True
+            if(len(occurances) == next_occurance_id):
+                next_occurance_id = -1
+            else:
+                next_occurance = occurances[next_occurance_id]
+                next_occurance_id += 1
+
+        if (not removing_statement):
+            temporary_file.writelines(line)
+
+        if(endofstatement(line)):
+            removing_statement = False;
+
+    temporary_file.close()
+    original_file.close()
+    filemanagement.copyfile(full_file_name,full_temporary_file_name)
+    filemanagement.deletefile(full_temporary_file_name)
+
+def nextoccurance():
+    if (len(occurances) == 0):
+        return -1
+    return_value = occurances[0]
+    occurances = occurances[1:len(occurances)]
+    return return_value
+
+def would_be_removed_occurances(path, file_name):
+    full_file_name = path + file_name
+    if (not filemanagement.fileexist(full_file_name)):
+        print 'File ' + full_file_name + ' is not found.'
+        print 'Should not happen! Ever!'
+        quit()
+
+    original_file = open(full_file_name,'r')
+    removing_statement = False
+    next_occurance_id = 0;
+    if(len(occurances) == next_occurance_id):
+        return
+    next_occurance = occurances[next_occurance_id]
+    next_occurance_id += 1
+    for line_nr, line in enumerate(original_file):
+        if(line_nr == next_occurance):
+            removing_statement = True
+            if(len(occurances) == next_occurance_id):
+                return
+            next_occurance = occurances[next_occurance_id]
+            next_occurance_id += 1
+
+        if (removing_statement):
+            print line_nr
+
+        if(endofstatement(line)):
+            removing_statement = False;
+            if(next_occurance == -1):
+                break
+    original_file.close()
+
+for index in range(len(files_to_fix)):
+    if(commit):
+        print (100*index)/len(files_to_fix)
+
+    path_dir = files_to_fix[index][0]
+    filename = files_to_fix[index][1]
+
+    #print path_dir + filename
+    occurances = []
+    find_occurances(path_dir, filename)
+
+    if(not commit):
+        would_be_removed_occurances(path_dir, filename)
+        continue
+    remove_occurances(path_dir, filename)
diff --git a/tools/refactoring/stringmanipulation.py b/tools/refactoring/stringmanipulation.py
new file mode 100644
index 0000000..0d9e0ff
--- /dev/null
+++ b/tools/refactoring/stringmanipulation.py
@@ -0,0 +1,303 @@
+import string
+
+# returns tuple, [success,updated_string] where the updated string has
+# has one less (the first) occurance of match string
+def removefirstoccurance( remove_string, match_string ):
+    lowercase_string = remove_string.lower()
+    lowercase_match_string = match_string.lower()
+    lowest_index = lowercase_string.find(lowercase_match_string)
+    if(lowest_index == -1):
+        return [False,remove_string]
+    past_match_index = lowest_index + len(lowercase_match_string)
+    highest_index = len(remove_string)
+    remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
+    return [True,remove_string]
+
+# returns a string with all occurances of match_string removed
+def removealloccurances( remove_string, match_string ):
+    return_value = [True, remove_string]
+    while(return_value[0]):
+        return_value = removefirstoccurance(return_value[1],match_string)
+    return return_value[1]
+
+# removes an occurance of match_string only if it's first in the string
+# returns tuple [succes, new_string]
+def removeprefix( remove_string, match_string ):
+    lowercase_string = remove_string.lower()
+    lowercase_match_string = match_string.lower()
+    lowest_index = lowercase_string.find(lowercase_match_string)
+    if(lowest_index == -1):
+        return [False,remove_string]
+    if(lowest_index != 0):
+        return [False,remove_string]
+    past_match_index = lowest_index + len(lowercase_match_string)
+    highest_index = len(remove_string)
+    remove_string = remove_string[0:lowest_index] + remove_string[past_match_index: highest_index]
+#    print lowest_index
+#    print past_match_index
+    return [True,remove_string]
+
+# removes multiple occurances of match string as long as they are first in
+# the string
+def removeallprefix( remove_string, match_string ):
+    return_value = [True, remove_string]
+    while(return_value[0]):
+        return_value = removeprefix(return_value[1],match_string)
+    return return_value[1]
+
+# returns true if extensionstring is a correct extension
+def isextension( extensionstring ):
+    if(len(extensionstring) < 2):
+        return False
+    if(extensionstring[0] != '.'):
+        return False
+    if(extensionstring[1:len(extensionstring)-1].find('.') != -1):
+        return False
+    return True
+
+# returns the index of start of the last occurance of match_string
+def findlastoccurance( original_string, match_string ):
+    search_index = original_string.find(match_string)
+    found_index = search_index
+    last_index = len(original_string) - 1
+    while((search_index != -1) and (search_index < last_index)):
+        search_index = original_string[search_index+1:last_index].find(match_string)
+        if(search_index != -1):
+            found_index = search_index
+    return found_index
+
+# changes extension from original_extension to new_extension
+def changeextension( original_string, original_extension, new_extension):
+    if(not isextension(original_extension)):
+        return original_string
+    if(not isextension(new_extension)):
+        return original_string
+    index = findlastoccurance(original_string, original_extension)
+    if(index == -1):
+        return original_string
+    return_value = original_string[0:index] + new_extension
+    return return_value
+
+# wanted to do this with str.find however didnt seem to work so do it manually
+# returns the index of the first capital letter
+def findfirstcapitalletter( original_string ):
+    for index in range(len(original_string)):
+        if(original_string[index].lower() != original_string[index]):
+            return index
+    return -1
+
+
+# replaces capital letters with underscore and lower case letter (except very
+# first
+def lowercasewithunderscore( original_string ):
+# ignore the first letter since there should be no underscore in front of it
+    if(len(original_string) < 2):
+        return original_string
+    return_value = original_string[1:len(original_string)]
+    index = findfirstcapitalletter(return_value)
+    while(index != -1):
+        return_value = return_value[0:index] + \
+                       '_' + \
+                       return_value[index].lower() + \
+                       return_value[index+1:len(return_value)]
+        index = findfirstcapitalletter(return_value)
+    return_value = original_string[0].lower() + return_value
+    return return_value
+
+# my table is a duplicate of strings
+def removeduplicates( my_table ):
+    new_table = []
+    for old_string1, new_string1 in my_table:
+        found = 0
+        for old_string2, new_string2 in new_table:
+            if(old_string1 == old_string2):
+                found += 1
+            if(new_string1 == new_string2):
+                if(new_string1 == ''):
+                    found += found
+                else:
+                    found += 1
+            if(found == 1):
+                print 'missmatching set, terminating program'
+                print old_string1
+                print new_string1
+                print old_string2
+                print new_string2
+                quit()
+            if(found == 2):
+                break
+        if(found == 0):
+            new_table.append([old_string1,new_string1])
+    return new_table
+
+def removenochange( my_table ):
+    new_table = []
+    for old_string, new_string in my_table:
+        if(old_string != new_string):
+            new_table.append([old_string,new_string])
+    return new_table
+
+# order table after size of the string (can be used to replace bigger strings
+# first which is useful since smaller strings can be inside the bigger string)
+# E.g. GIPS is a sub string of GIPSVE if we remove GIPS first GIPSVE will never
+# be removed. N is small so no need for fancy sort algorithm. Use selection sort
+def ordertablesizefirst( my_table ):
+    for current_index in range(len(my_table)):
+        biggest_string = 0
+        biggest_string_index = -1
+        for search_index in range(len(my_table)):
+            if(search_index < current_index):
+                continue
+            length_of_string = len(my_table[search_index][0])
+            if(length_of_string > biggest_string):
+                biggest_string = length_of_string
+                biggest_string_index = search_index
+        if(biggest_string_index == -1):
+            print 'sorting algorithm failed, program exit'
+            quit()
+        old_value = my_table[current_index]
+        my_table[current_index] = my_table[biggest_string_index]
+        my_table[biggest_string_index] = old_value
+    return my_table
+
+# returns true if string 1 or 2 is a substring of the other, assuming neither
+# has whitespaces
+def issubstring( string1, string2 ):
+    if(len(string1) == 0):
+        return -1
+    if(len(string2) == 0):
+        return -1
+    large_string = string1
+    small_string = string2
+    if(len(string1) < len(string2)):
+        large_string = string2
+        small_string = string1
+
+    for index in range(len(large_string)):
+        large_sub_string = large_string[index:index+len(small_string)].lower()
+        if(large_sub_string ==\
+           small_string.lower()):
+              return index
+    return -1
+
+#not_part_of_word_table = [' ','(',')','{','}',':','\t','*','&','/','[',']','.',',','\n']
+#def ispartofword( char ):
+#    for item in not_part_of_word_table:
+#        if(char == item):
+#            return False
+#    return True
+
+# must be numerical,_ or charachter
+def ispartofword( char ):
+    if(char.isalpha()):
+        return True
+    if(char.isalnum()):
+        return True
+    if(char == '_'):
+        return True
+    return False
+
+# returns the index of the first letter in the word that the current_index
+# is pointing to and the size of the word
+def getword( line, current_index):
+    if(current_index < 0):
+        return []
+    line = line.rstrip()
+    if(len(line) <= current_index):
+        return []
+    if(line[current_index] == ' '):
+        return []
+    start_pos = current_index
+    while start_pos >= 0:
+        if(not ispartofword(line[start_pos])):
+            start_pos += 1
+            break
+        start_pos -= 1
+    if(start_pos == -1):
+        start_pos = 0
+    end_pos = current_index
+    while end_pos < len(line):
+        if(not ispartofword(line[end_pos])):
+            break
+        end_pos += 1
+    return [start_pos,end_pos - start_pos]
+
+# my table is a tuple [string1,string2] complement_to_table is just a list
+# of strings to compare to string1
+def complement( my_table, complement_to_table ):
+    new_table = []
+    for index in range(len(my_table)):
+        found = False;
+        for compare_string in complement_to_table:
+            if(my_table[index][0].lower() == compare_string.lower()):
+                found = True
+        if(not found):
+            new_table.append(my_table[index])
+    return new_table
+
+def removestringfromhead( line, remove_string):
+    for index in range(len(line)):
+        if(line[index:index+len(remove_string)] != remove_string):
+            return line[index:index+len(line)]
+    return ''
+
+def removeccomment( line ):
+    comment_string = '//'
+    for index in range(len(line)):
+        if(line[index:index+len(comment_string)] == comment_string):
+            return line[0:index]
+    return line
+
+def whitespacestoonespace( line ):
+    return ' '.join(line.split())
+
+def fixabbreviations( original_string ):
+    previouswascapital = (original_string[0].upper() == original_string[0])
+    new_string = ''
+    for index in range(len(original_string)):
+        if(index == 0):
+            new_string += original_string[index]
+            continue
+        if(original_string[index] == '_'):
+            new_string += original_string[index]
+            previouswascapital = False
+            continue
+        if(original_string[index].isdigit()):
+            new_string += original_string[index]
+            previouswascapital = False
+            continue
+        currentiscapital = (original_string[index].upper() == original_string[index])
+        letter_to_add = original_string[index]
+        if(previouswascapital and currentiscapital):
+            letter_to_add = letter_to_add.lower()
+        if(previouswascapital and (not currentiscapital)):
+            old_letter = new_string[len(new_string)-1]
+            new_string = new_string[0:len(new_string)-1]
+            new_string += old_letter.upper()
+        previouswascapital = currentiscapital
+        new_string += letter_to_add
+    return new_string
+
+def replaceoccurances(old_string, replace_string, replace_with_string):
+    if (len(replace_string) == 0):
+        return old_string
+    if (len(old_string) < len(replace_string)):
+        return old_string
+    # Simple implementation, could proably be done smarter
+    new_string = ''
+    for index in range(len(old_string)):
+        #print new_string
+        if(len(replace_string) > (len(old_string) - index)):
+            new_string += old_string[index:index + len(old_string)]
+            break
+        match = (len(replace_string) > 0)
+        for replace_index in range(len(replace_string)):
+            if (replace_string[replace_index] != old_string[index + replace_index]):
+                match = False
+                break
+        if (match):
+            new_string += replace_with_string
+            index =+ len(replace_string)
+        else:
+            new_string += old_string[index]
+    return new_string
diff --git a/tools/refactoring/trim.py b/tools/refactoring/trim.py
new file mode 100644
index 0000000..5539f5f
--- /dev/null
+++ b/tools/refactoring/trim.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+
+import sys
+import fileinput
+
+# Defaults
+TABSIZE = 4
+
+usage = """
+Replaces all TAB characters with %(TABSIZE)d space characters.
+In addition, all trailing space characters are removed.
+usage: trim file ...
+file ... : files are changed in place without taking any backup.
+""" % vars()
+
+def main():
+
+    if len(sys.argv) == 1:
+        sys.stderr.write(usage)
+        sys.exit(2)
+
+    # Iterate over the lines of all files listed in sys.argv[1:]
+    for line in fileinput.input(sys.argv[1:], inplace=True):
+        line = line.replace('\t',' '*TABSIZE);    # replace TABs
+        line = line.rstrip(None)  # remove trailing whitespaces
+        print line                # modify the file
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/refactoring/trimall.py b/tools/refactoring/trimall.py
new file mode 100644
index 0000000..7a1c458
--- /dev/null
+++ b/tools/refactoring/trimall.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+import sys
+import fileinput
+import filemanagement
+import p4commands
+
+# Defaults
+TABSIZE = 4
+
+extensions = ['.h','.cc','.c','.cpp']
+
+ignore_these = ['my_ignore_header.h']
+
+usage = """
+Replaces all TAB characters with %(TABSIZE)d space characters.
+In addition, all trailing space characters are removed.
+usage: trim directory
+""" % vars()
+
+if((len(sys.argv) != 2) and (len(sys.argv) != 3)):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+directory = sys.argv[1];
+if(not filemanagement.pathexist(directory)):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+if((len(sys.argv) == 3) and (sys.argv[2] != '--commit')):
+    sys.stderr.write(usage)
+    sys.exit(2)
+
+commit = False
+if(len(sys.argv) == 3):
+    commit = True
+
+files_to_fix = []
+for extension in extensions:
+    files_to_fix.extend(filemanagement.listallfilesinfolder(directory,\
+                                                       extension))
+
+def main():
+    if (commit):
+        p4commands.checkoutallfiles()
+    for path,file_name in files_to_fix:
+        full_file_name = path + file_name
+        if (not commit):
+            print full_file_name + ' will be edited'
+            continue
+        for line in fileinput.input(full_file_name, inplace=True):
+            line = line.replace('\t',' '*TABSIZE);    # replace TABs
+            line = line.rstrip(None)  # remove trailing whitespaces
+            print line                # modify the file
+    if (commit):
+        p4commands.revertunchangedfiles()
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/resources/OWNERS b/tools/resources/OWNERS
new file mode 100644
index 0000000..d90e8fb
--- /dev/null
+++ b/tools/resources/OWNERS
@@ -0,0 +1,2 @@
+kjellander@webrtc.org
+phoglund@webrtc.org
diff --git a/tools/resources/update.py b/tools/resources/update.py
new file mode 100755
index 0000000..f202d07
--- /dev/null
+++ b/tools/resources/update.py
@@ -0,0 +1,176 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+__author__ = 'kjellander@webrtc.org (Henrik Kjellander)'
+
+"""Downloads WebRTC resources files from a remote host."""
+
+from optparse import OptionParser
+from urlparse import urljoin
+import os
+import shutil
+import sys
+import tarfile
+import tempfile
+import urllib2
+
+DEPS_KEY = 'webrtc_resources_revision'
+REMOTE_URL_BASE = 'http://commondatastorage.googleapis.com/webrtc-resources'
+VERSION_FILENAME = 'webrtc-resources-version'
+FILENAME_PREFIX = 'webrtc-resources-'
+EXTENSION = '.tgz'
+
+
+def main():
+  """
+  Downloads WebRTC resources files from a remote host.
+
+  This script will download WebRTC resource files used for testing, like audio
+  and video files. It will check the current version in the DEPS file and
+  compare it with the one downloaded (kept in a text file in the download dir).
+  If the DEPS version is different than the one downloaded, the correct version
+  will be downloaded.
+  """
+
+  # Make it possible to skip download using an environment variable:
+  if os.getenv('WEBRTC_SKIP_RESOURCES_DOWNLOAD'):
+    print 'Skipping resources download since WEBRTC_SKIP_RESOURCES_DOWNLOAD set'
+    return
+
+  project_root_dir = os.path.normpath(sys.path[0] + '/../../')
+  deps_file = os.path.join(project_root_dir, 'DEPS')
+  downloads_dir = os.path.join(project_root_dir, 'resources')
+  current_version_file = os.path.join(downloads_dir, VERSION_FILENAME)
+
+  # Ensure the downloads dir is created.
+  if not os.path.isdir(downloads_dir):
+    os.mkdir(downloads_dir)
+
+  # Define and parse arguments.
+  parser = OptionParser()
+  parser.add_option('-f', '--force', action='store_true', dest='force',
+                    help='forces download and removal of existing resources.')
+  parser.add_option('-b', '--base_url', dest='base_url',
+                    help= 'Overrides the default Base URL (%s) and uses the '
+                    'supplied URL instead.' % REMOTE_URL_BASE)
+  (options, unused_args) = parser.parse_args()
+
+  # Download archive if forced or DEPS version is different than our current.
+  current_version = _get_current_version(current_version_file)
+  desired_version = _get_desired_version(deps_file)
+  if desired_version != current_version or options.force:
+    base_url = options.base_url or REMOTE_URL_BASE
+    _perform_download(base_url, desired_version, downloads_dir)
+  else:
+    print 'Already have correct version: %s' % current_version
+
+
+def _get_current_version(current_version_file):
+  """Returns the version already downloaded (if any).
+
+  Args:
+      current_version_file: The filename of the text file containing the
+          currently downloaded version (if any) on local disk.
+  Returns:
+      The version number, or 0 if no downloaded version exists.
+  """
+  current_version = 0
+  if os.path.isfile(current_version_file):
+    f = open(current_version_file)
+    current_version = int(f.read())
+    f.close()
+    print 'Found downloaded resources: version: %s' % current_version
+  return current_version
+
+
+def _get_desired_version(deps_file):
+  """Evaluates the project's DEPS and returns the desired resources version.
+
+  Args:
+      deps_file: Full path to the DEPS file of the project.
+  Returns:
+      The desired resources version.
+  """
+  # Evaluate the DEPS file as Python code to extract the variables defined.
+  locals_dict = {'Var': lambda name: locals_dict['vars'][name],
+           'File': lambda name: name,
+           'From': lambda deps, definition: deps}
+  execfile(deps_file, {}, locals_dict)
+  deps_vars = locals_dict['vars']
+
+  desired_version = int(deps_vars[DEPS_KEY])
+  print 'Version in DEPS file: %d' % desired_version
+  return desired_version
+
+
+def _perform_download(base_url, desired_version, downloads_dir):
+  """Performs the download and extracts the downloaded resources.
+
+  Args:
+      base_url: URL that holds the resource downloads.
+      desired_version: Desired version, which decides the filename.
+  """
+  temp_dir = tempfile.mkdtemp(prefix='webrtc-resources-')
+  try:
+    archive_name = '%s%s%s' % (FILENAME_PREFIX, desired_version, EXTENSION)
+    # urljoin requires base URL to end with slash to construct a proper URL
+    # to our file:
+    if not base_url[-1:] == '/':
+      base_url += '/'
+    remote_archive_url = urljoin(base_url, archive_name)
+    # Download into the temporary directory with display of progress, inspired
+    # by the Stack Overflow post at http://goo.gl/JIrbo
+    temp_filename = os.path.join(temp_dir, archive_name)
+    print 'Downloading: %s' % remote_archive_url
+
+    response = urllib2.urlopen(remote_archive_url)
+    temp_file = open(temp_filename, 'wb')
+    meta = response.info()
+    file_size_kb = int(meta.getheaders('Content-Length')[0]) / 1024
+    print 'Progress: %s : %s kB' % (archive_name, file_size_kb)
+
+    file_size_dl_kb = 0
+    block_size = 65536
+    while True:
+      file_buffer = response.read(block_size)
+      if not file_buffer:
+        break
+      file_size_dl_kb += len(file_buffer) / 1024
+      temp_file.write(file_buffer)
+      status = r'%10d kB [%3.2f%%]' % (file_size_dl_kb,
+                                       file_size_dl_kb * 100. / file_size_kb)
+      status += chr(8) * (len(status) + 1)
+      print status,
+    print
+    temp_file.close()
+
+    # Clean up the existing resources dir.
+    print 'Removing old resources in %s' % downloads_dir
+    shutil.rmtree(downloads_dir)
+    os.mkdir(downloads_dir)
+
+    # Extract the archive.
+    archive = tarfile.open(temp_filename, 'r:gz')
+    archive.extractall(downloads_dir)
+    archive.close()
+    print 'Extracted resource files into %s' % downloads_dir
+
+    # Write the downloaded version to a text file in the resources dir to avoid
+    # re-download of the same version in the future.
+    new_version_filename = os.path.join(downloads_dir, VERSION_FILENAME)
+    version_file = open(new_version_filename, 'w')
+    version_file.write('%d' % desired_version)
+    version_file.close()
+
+  finally:
+    # Clean up the temp dir.
+    shutil.rmtree(temp_dir)
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/valgrind-webrtc/memcheck/OWNERS b/tools/valgrind-webrtc/memcheck/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/tools/valgrind-webrtc/memcheck/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/valgrind-webrtc/memcheck/PRESUBMIT.py b/tools/valgrind-webrtc/memcheck/PRESUBMIT.py
new file mode 100644
index 0000000..eb7df99
--- /dev/null
+++ b/tools/valgrind-webrtc/memcheck/PRESUBMIT.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+"""
+Copied from Chrome's src/tools/valgrind/memcheck/PRESUBMIT.py
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+import os
+import re
+import sys
+
+def CheckChange(input_api, output_api):
+  """Checks the memcheck suppressions files for bad data."""
+
+  # Add the path to the Chrome valgrind dir to the import path:
+  tools_vg_path = os.path.join(input_api.PresubmitLocalPath(), '..', '..',
+                               'valgrind')
+  sys.path.append(tools_vg_path)
+  import suppressions
+
+  sup_regex = re.compile('suppressions.*\.txt$')
+  suppressions = {}
+  errors = []
+  check_for_memcheck = False
+  # skip_next_line has 3 possible values:
+  # - False: don't skip the next line.
+  # - 'skip_suppression_name': the next line is a suppression name, skip.
+  # - 'skip_param': the next line is a system call parameter error, skip.
+  skip_next_line = False
+  for f in filter(lambda x: sup_regex.search(x.LocalPath()),
+                  input_api.AffectedFiles()):
+    for line, line_num in zip(f.NewContents(),
+                              xrange(1, len(f.NewContents()) + 1)):
+      line = line.lstrip()
+      if line.startswith('#') or not line:
+        continue
+
+      if skip_next_line:
+        if skip_next_line == 'skip_suppression_name':
+          if 'insert_a_suppression_name_here' in line:
+            errors.append('"insert_a_suppression_name_here" is not a valid '
+                          'suppression name')
+          if suppressions.has_key(line):
+            if f.LocalPath() == suppressions[line][1]:
+              errors.append('suppression with name "%s" at %s line %s '
+                            'has already been defined at line %s' %
+                            (line, f.LocalPath(), line_num,
+                             suppressions[line][1]))
+            else:
+              errors.append('suppression with name "%s" at %s line %s '
+                            'has already been defined at %s line %s' %
+                            (line, f.LocalPath(), line_num,
+                             suppressions[line][0], suppressions[line][1]))
+          else:
+            suppressions[line] = (f, line_num)
+            check_for_memcheck = True;
+        skip_next_line = False
+        continue
+      if check_for_memcheck:
+        if not line.startswith('Memcheck:'):
+          errors.append('"%s" should be "Memcheck:..." in %s line %s' %
+                        (line, f.LocalPath(), line_num))
+        check_for_memcheck = False;
+      if line == '{':
+        skip_next_line = 'skip_suppression_name'
+        continue
+      if line == "Memcheck:Param":
+        skip_next_line = 'skip_param'
+        continue
+
+      if (line.startswith('fun:') or line.startswith('obj:') or
+          line.startswith('Memcheck:') or line == '}' or
+          line == '...'):
+        continue
+      errors.append('"%s" is probably wrong: %s line %s' % (line, f.LocalPath(),
+                                                            line_num))
+  if errors:
+    return [output_api.PresubmitError('\n'.join(errors))]
+  return []
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CheckChange(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CheckChange(input_api, output_api)
+
+def GetPreferredTrySlaves():
+  # We don't have any memcheck slaves yet, so there's no use for this method.
+  # When we have, the slave name(s) should be put into this list.
+  return []
diff --git a/tools/valgrind-webrtc/memcheck/suppressions.txt b/tools/valgrind-webrtc/memcheck/suppressions.txt
new file mode 100644
index 0000000..34941da
--- /dev/null
+++ b/tools/valgrind-webrtc/memcheck/suppressions.txt
@@ -0,0 +1,24 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
+
+{
+   bug_716
+   Memcheck:Leak
+   fun:_Znw*
+   fun:_ZN6webrtc11ThreadPosix6CreateEPFbPvES1_NS_14ThreadPriorityEPKc
+   fun:_ZN6webrtc13ThreadWrapper12CreateThreadEPFbPvES1_NS_14ThreadPriorityEPKc
+   fun:_ZN6webrtc18videocapturemodule22VideoCaptureModuleV4L212StartCaptureERKNS_22VideoCaptureCapabilityE
+   fun:_ZN6webrtc11ViECapturer5StartERKNS_17CaptureCapabilityE
+   fun:_ZN6webrtc14ViECaptureImpl12StartCaptureEiRKNS_17CaptureCapabilityE
+   fun:_ZN15TbCaptureDeviceC1ER12TbInterfaces
+   fun:_ZN12_GLOBAL__N_114ViERtpFuzzTest5SetUpEv
+}
\ No newline at end of file
diff --git a/tools/valgrind-webrtc/memcheck/suppressions_mac.txt b/tools/valgrind-webrtc/memcheck/suppressions_mac.txt
new file mode 100644
index 0000000..ec20df4
--- /dev/null
+++ b/tools/valgrind-webrtc/memcheck/suppressions_mac.txt
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
diff --git a/tools/valgrind-webrtc/memcheck/suppressions_win32.txt b/tools/valgrind-webrtc/memcheck/suppressions_win32.txt
new file mode 100644
index 0000000..ec20df4
--- /dev/null
+++ b/tools/valgrind-webrtc/memcheck/suppressions_win32.txt
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
diff --git a/tools/valgrind-webrtc/tsan/OWNERS b/tools/valgrind-webrtc/tsan/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/tools/valgrind-webrtc/tsan/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/tools/valgrind-webrtc/tsan/PRESUBMIT.py b/tools/valgrind-webrtc/tsan/PRESUBMIT.py
new file mode 100644
index 0000000..75f7272
--- /dev/null
+++ b/tools/valgrind-webrtc/tsan/PRESUBMIT.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+import os
+import re
+import sys
+
+"""
+Copied from Chrome's src/tools/valgrind/tsan/PRESUBMIT.py
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into gcl.
+"""
+
+def CheckChange(input_api, output_api):
+  """Checks the TSan suppressions files for bad suppressions."""
+
+  # Add the path to the Chrome valgrind dir to the import path:
+  tools_vg_path = os.path.join(input_api.PresubmitLocalPath(), '..', '..',
+                               'valgrind')
+  sys.path.append(tools_vg_path)
+  import suppressions
+
+  return suppressions.PresubmitCheck(input_api, output_api)
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CheckChange(input_api, output_api)
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CheckChange(input_api, output_api)
+
+def GetPreferredTrySlaves():
+  # We don't have any tsan slaves yet, so there's no use for this method.
+  # When we have, the slave name(s) should be put into this list.
+  return []
diff --git a/tools/valgrind-webrtc/tsan/suppressions.txt b/tools/valgrind-webrtc/tsan/suppressions.txt
new file mode 100644
index 0000000..ec20df4
--- /dev/null
+++ b/tools/valgrind-webrtc/tsan/suppressions.txt
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
diff --git a/tools/valgrind-webrtc/tsan/suppressions_mac.txt b/tools/valgrind-webrtc/tsan/suppressions_mac.txt
new file mode 100644
index 0000000..ec20df4
--- /dev/null
+++ b/tools/valgrind-webrtc/tsan/suppressions_mac.txt
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
diff --git a/tools/valgrind-webrtc/tsan/suppressions_win32.txt b/tools/valgrind-webrtc/tsan/suppressions_win32.txt
new file mode 100644
index 0000000..ec20df4
--- /dev/null
+++ b/tools/valgrind-webrtc/tsan/suppressions_win32.txt
@@ -0,0 +1,11 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# This file is used in addition to the one already maintained in Chrome.
+# It acts as a place holder for future additions for WebRTC.
+# It must exist for the Python wrapper script to work properly.
diff --git a/tools/valgrind-webrtc/webrtc_tests.py b/tools/valgrind-webrtc/webrtc_tests.py
new file mode 100755
index 0000000..59db422
--- /dev/null
+++ b/tools/valgrind-webrtc/webrtc_tests.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+'''Runs various WebRTC tests through valgrind_test.py.
+
+This script inherits the chrome_tests.py in Chrome, replacing its tests. We do
+this by taking chrome's faux cmdline test and making that the standard, so that
+we effectively can pass in any binary we feel like. It's also possible to pass
+arguments to the test, provided that the arguments do not contain dashes (these
+can be "escaped" by passing + instead, so -a becomes +a, and --my-option becomes
+++my_option).
+
+Suppression files:
+The Chrome valgrind directory we use as a DEPS dependency contains the following
+suppression files:
+  valgrind/memcheck/suppressions.txt
+  valgrind/memcheck/suppressions_mac.txt
+  valgrind/tsan/suppressions.txt
+  valgrind/tsan/suppressions_mac.txt
+  valgrind/tsan/suppressions_win32.txt
+Since they're referenced from the chrome_tests.py script, we have similar files
+below the directory of this script. When executing, this script will setup both
+Chrome's suppression files and our own, so we can easily maintain WebRTC
+specific suppressions in our own files.
+'''
+
+import optparse
+import os
+import sys
+
+import logging_utils
+import path_utils
+
+import chrome_tests
+
+class WebRTCTests(chrome_tests.ChromeTests):
+  """Class that handles setup of suppressions for WebRTC.
+
+  Everything else is inherited from chrome_tests.ChromeTests.
+  """
+
+  def _DefaultCommand(self, tool, exe=None, valgrind_test_args=None):
+    """Override command-building method so we can add more suppressions."""
+    cmd = chrome_tests.ChromeTests._DefaultCommand(self, tool, exe,
+                                                   valgrind_test_args)
+    # When ChromeTests._DefaultCommand has executed, it has setup suppression
+    # files based on what's found in the memcheck/ or tsan/ subdirectories of
+    # this script's location. If Mac or Windows is executing, additional
+    # platform specific files have also been added.
+    # Since only the ones located below this directory is added, we must also
+    # add the ones maintained by Chrome, located in ../valgrind.
+
+    # The idea is to look for --suppression arguments in the cmd list and add a
+    # modified copy of each suppression file, for the corresponding file in
+    # ../valgrind. If we would simply replace 'valgrind-webrtc' with 'valgrind'
+    # we may produce invalid paths if other parts of the path contain that
+    # string. That's why the code below only replaces the end of the path.
+    old_base, old_dir = _split_script_path()
+    new_dir = old_base + 'valgrind'
+    add_suppressions = []
+    for token in cmd:
+      if '--suppressions' in token:
+        add_suppressions.append(token.replace(old_base + old_dir, new_dir))
+    return add_suppressions + cmd
+
+
+def _split_script_path():
+  """Splits the script's path into a tuple separating the last directory.
+
+    Returns a tuple where the first item is the whole path except the last
+    directory and the second item is the name of the last directory.
+  """
+  script_dir = path_utils.ScriptDir()
+  last_sep_index = script_dir.rfind(os.sep)
+  return script_dir[0:last_sep_index+1], script_dir[last_sep_index+1:]
+
+
+def _main(_):
+  parser = optparse.OptionParser("usage: %prog -b <dir> -t <test> "
+                                 "[-t <test> ...] <arguments to all tests>"
+                                 "NOTE: when passing arguments to all tests, "
+                                 "      replace any - with +.")
+  parser.add_option("-b", "--build_dir",
+                    help="the location of the compiler output")
+  parser.add_option("-t", "--test", action="append", default=[],
+                    help="which test to run, supports test:gtest_filter format "
+                         "as well.")
+  parser.add_option("", "--baseline", action="store_true", default=False,
+                    help="generate baseline data instead of validating")
+  parser.add_option("", "--gtest_filter",
+                    help="additional arguments to --gtest_filter")
+  parser.add_option("", "--gtest_repeat",
+                    help="argument for --gtest_repeat")
+  parser.add_option("-v", "--verbose", action="store_true", default=False,
+                    help="verbose output - enable debug log messages")
+  parser.add_option("", "--tool", dest="valgrind_tool", default="memcheck",
+                    help="specify a valgrind tool to run the tests under")
+  parser.add_option("", "--tool_flags", dest="valgrind_tool_flags", default="",
+                    help="specify custom flags for the selected valgrind tool")
+  parser.add_option("", "--keep_logs", action="store_true", default=False,
+                    help="store memory tool logs in the <tool>.logs directory "
+                         "instead of /tmp.\nThis can be useful for tool "
+                         "developers/maintainers.\nPlease note that the <tool>"
+                         ".logs directory will be clobbered on tool startup.")
+  options, args = parser.parse_args()
+
+  if options.verbose:
+    logging_utils.config_root(logging.DEBUG)
+  else:
+    logging_utils.config_root()
+
+  if not options.test:
+    parser.error("--test not specified")
+
+  if len(options.test) != 1 and options.gtest_filter:
+    parser.error("--gtest_filter and multiple tests don't make sense together")
+
+  # Performs the deferred-argument black magic described in the usage.
+  translated_args = map(lambda arg: arg.replace('+', '-'), args)
+
+  for t in options.test:
+    tests = WebRTCTests(options, translated_args, t)
+    ret = tests.Run()
+    if ret: return ret
+  return 0
+
+if __name__ == "__main__":
+  # Overwrite the ChromeTests tests dictionary. The cmdline option allows the
+  # user to pass any executable as parameter to the test script, so we'll use
+  # that to get our binaries in (hackish but convenient).
+  chrome_tests.ChromeTests._test_list = {
+    "cmdline": chrome_tests.ChromeTests.RunCmdLine,
+  }
+
+  # We do this so the user can write -t <binary> instead of -t cmdline <binary>.
+  sys.argv.insert(sys.argv.index('-t') + 1, 'cmdline')
+  print sys.argv
+  ret = _main(sys.argv)
+  sys.exit(ret)
diff --git a/tools/valgrind-webrtc/webrtc_tests.sh b/tools/valgrind-webrtc/webrtc_tests.sh
new file mode 100755
index 0000000..e33b78c
--- /dev/null
+++ b/tools/valgrind-webrtc/webrtc_tests.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+# Set up some paths and re-direct the arguments to webrtc_tests.py
+
+# This script is a copy of the chrome_tests.sh wrapper script with the following
+# changes:
+# - The locate_valgrind.sh of Chromium's Valgrind scripts dir is used to locate
+#   the Valgrind framework install.
+# - webrtc_tests.py is invoked instead of chrome_tests.py.
+# - Chromium's Valgrind scripts directory is added to the PYTHONPATH to make it 
+#   possible to execute the Python scripts properly.
+
+export THISDIR=`dirname $0`
+ARGV_COPY="$@"
+
+# We need to set CHROME_VALGRIND iff using Memcheck or TSan-Valgrind:
+#   tools/valgrind/chrome_tests.sh --tool memcheck
+# or
+#   tools/valgrind/chrome_tests.sh --tool=memcheck
+# (same for "--tool=tsan")
+tool="memcheck"  # Default to memcheck.
+while (( "$#" ))
+do
+  if [[ "$1" == "--tool" ]]
+  then
+    tool="$2"
+    shift
+  elif [[ "$1" =~ --tool=(.*) ]]
+  then
+    tool="${BASH_REMATCH[1]}"
+  fi
+  shift
+done
+
+NEEDS_VALGRIND=0
+NEEDS_DRMEMORY=0
+
+case "$tool" in
+  "memcheck")
+    NEEDS_VALGRIND=1
+    ;;
+  "tsan" | "tsan_rv")
+    NEEDS_VALGRIND=1
+    ;;
+  "drmemory" | "drmemory_light" | "drmemory_full")
+    NEEDS_DRMEMORY=1
+    ;;
+esac
+
+# For WebRTC, we'll use the locate_valgrind.sh script in Chromium's Valgrind 
+# scripts dir to locate the Valgrind framework install
+CHROME_VALGRIND_SCRIPTS=$THISDIR/../valgrind
+
+if [ "$NEEDS_VALGRIND" == "1" ]
+then
+  CHROME_VALGRIND=`sh $CHROME_VALGRIND_SCRIPTS/locate_valgrind.sh`
+  if [ "$CHROME_VALGRIND" = "" ]
+  then
+    # locate_valgrind.sh failed
+    exit 1
+  fi
+  echo "Using valgrind binaries from ${CHROME_VALGRIND}"
+
+  PATH="${CHROME_VALGRIND}/bin:$PATH"
+  # We need to set these variables to override default lib paths hard-coded into
+  # Valgrind binary.
+  export VALGRIND_LIB="$CHROME_VALGRIND/lib/valgrind"
+  export VALGRIND_LIB_INNER="$CHROME_VALGRIND/lib/valgrind"
+fi
+
+if [ "$NEEDS_DRMEMORY" == "1" ]
+then
+  if [ -z "$DRMEMORY_COMMAND" ]
+  then
+    DRMEMORY_PATH="$THISDIR/../../third_party/drmemory"
+    DRMEMORY_SFX="$DRMEMORY_PATH/drmemory-windows-sfx.exe"
+    if [ ! -f "$DRMEMORY_SFX" ]
+    then
+      echo "Can't find Dr. Memory executables."
+      echo "See http://www.chromium.org/developers/how-tos/using-valgrind/dr-memory"
+      echo "for the instructions on how to get them."
+      exit 1
+    fi
+
+    chmod +x "$DRMEMORY_SFX"  # Cygwin won't run it without +x.
+    "$DRMEMORY_SFX" -o"$DRMEMORY_PATH/unpacked" -y
+    export DRMEMORY_COMMAND="$DRMEMORY_PATH/unpacked/bin/drmemory.exe"
+  fi
+fi
+
+# Add Chrome's Valgrind scripts dir to the PYTHON_PATH since it contains 
+# the scripts that are needed for this script to run
+PYTHONPATH=$THISDIR/../python/google:$CHROME_VALGRIND_SCRIPTS python \
+           "$THISDIR/webrtc_tests.py" $ARGV_COPY
diff --git a/webrtc.gyp b/webrtc.gyp
new file mode 100644
index 0000000..f2f68bd
--- /dev/null
+++ b/webrtc.gyp
@@ -0,0 +1,41 @@
+# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
+#
+# Use of this source code is governed by a BSD-style license
+# that can be found in the LICENSE file in the root of the source
+# tree. An additional intellectual property rights grant can be found
+# in the file PATENTS.  All contributing project authors may
+# be found in the AUTHORS file in the root of the source tree.
+
+{
+  'includes': [ 'src/build/common.gypi', ],
+  'variables': {
+    'webrtc_all_dependencies': [
+      'src/common_audio/common_audio.gyp:*',
+      'src/common_video/common_video.gyp:*',
+      'src/modules/modules.gyp:*',
+      'src/system_wrappers/source/system_wrappers.gyp:*',
+      'src/video_engine/video_engine.gyp:*',
+      'src/voice_engine/voice_engine.gyp:*',
+      '<(webrtc_vp8_dir)/vp8.gyp:*',
+    ],
+  },
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        '<@(webrtc_all_dependencies)',
+      ],
+      'conditions': [
+        ['include_tests==1', {
+          'dependencies': [
+            'src/test/metrics.gyp:*',
+            'src/test/test.gyp:*',
+            'src/tools/tools.gyp:*',
+            'tools/e2e_quality/e2e_quality.gyp:*',
+          ],
+        }],
+      ],
+    },
+  ],
+}